refactor(providers): centralize native provider detection (#60341)

* refactor(providers): centralize native provider detection

* fix(providers): preserve openrouter thinking format

* fix(providers): preserve openrouter host thinking format
This commit is contained in:
Vincent Koc
2026-04-03 23:46:21 +09:00
committed by GitHub
parent 24e10e6e45
commit 66825c0969
6 changed files with 249 additions and 14 deletions

View File

@@ -117,4 +117,140 @@ describe("openai transport stream", () => {
totalTokens: 9,
});
});
it("keeps OpenRouter thinking format for declared OpenRouter providers on custom proxy URLs", async () => {
const streamFn = buildTransportAwareSimpleStreamFn(
attachModelProviderRequestTransport(
{
id: "anthropic/claude-sonnet-4",
name: "Claude Sonnet 4",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://proxy.example.com/v1",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
} satisfies Model<"openai-completions">,
{
proxy: {
mode: "explicit-proxy",
url: "http://proxy.internal:8443",
},
},
),
);
expect(streamFn).toBeTypeOf("function");
let capturedPayload: Record<string, unknown> | undefined;
let resolveCaptured!: () => void;
const captured = new Promise<void>((resolve) => {
resolveCaptured = resolve;
});
void streamFn!(
{
id: "anthropic/claude-sonnet-4",
name: "Claude Sonnet 4",
api: "openclaw-openai-completions-transport",
provider: "openrouter",
baseUrl: "https://proxy.example.com/v1",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
} as Model<"openclaw-openai-completions-transport">,
{
systemPrompt: "system",
messages: [],
tools: [],
} as never,
{
reasoningEffort: "high",
onPayload: async (payload) => {
capturedPayload = payload as Record<string, unknown>;
resolveCaptured();
return payload;
},
} as never,
);
await captured;
expect(capturedPayload).toMatchObject({
reasoning: {
effort: "high",
},
});
});
it("keeps OpenRouter thinking format for native OpenRouter hosts behind custom provider ids", async () => {
const streamFn = buildTransportAwareSimpleStreamFn(
attachModelProviderRequestTransport(
{
id: "anthropic/claude-sonnet-4",
name: "Claude Sonnet 4",
api: "openai-completions",
provider: "custom-openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
} satisfies Model<"openai-completions">,
{
proxy: {
mode: "explicit-proxy",
url: "http://proxy.internal:8443",
},
},
),
);
expect(streamFn).toBeTypeOf("function");
let capturedPayload: Record<string, unknown> | undefined;
let resolveCaptured!: () => void;
const captured = new Promise<void>((resolve) => {
resolveCaptured = resolve;
});
void streamFn!(
{
id: "anthropic/claude-sonnet-4",
name: "Claude Sonnet 4",
api: "openclaw-openai-completions-transport",
provider: "custom-openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
} as Model<"openclaw-openai-completions-transport">,
{
systemPrompt: "system",
messages: [],
tools: [],
} as never,
{
reasoningEffort: "high",
onPayload: async (payload) => {
capturedPayload = payload as Record<string, unknown>;
resolveCaptured();
return payload;
},
} as never,
);
await captured;
expect(capturedPayload).toMatchObject({
reasoning: {
effort: "high",
},
});
});
});

View File

@@ -13,6 +13,7 @@ import OpenAI, { AzureOpenAI } from "openai";
import type { ChatCompletionChunk } from "openai/resources/chat/completions.js";
import type { ResponseCreateParamsStreaming } from "openai/resources/responses/responses.js";
import { fetchWithSsrFGuard } from "../infra/net/fetch-guard.js";
import { resolveProviderRequestCapabilities } from "./provider-attribution.js";
import {
buildProviderRequestDispatcherPolicy,
getModelProviderRequestTransport,
@@ -1317,6 +1318,18 @@ async function processOpenAICompletionsStream(
function detectCompat(model: OpenAIModeModel) {
const provider = model.provider;
const baseUrl = model.baseUrl ?? "";
const capabilities = resolveProviderRequestCapabilities({
provider,
api: model.api,
baseUrl: model.baseUrl,
capability: "llm",
transport: "stream",
modelId: model.id,
compat:
model.compat && typeof model.compat === "object"
? (model.compat as { supportsStore?: boolean })
: undefined,
});
const isZai = provider === "zai" || baseUrl.includes("api.z.ai");
const isNonStandard =
provider === "cerebras" ||
@@ -1353,7 +1366,9 @@ function detectCompat(model: OpenAIModeModel) {
requiresThinkingAsText: false,
thinkingFormat: isZai
? "zai"
: provider === "openrouter" || baseUrl.includes("openrouter.ai")
: provider === "openrouter" ||
capabilities.endpointClass === "openrouter" ||
capabilities.attributionProvider === "openrouter"
? "openrouter"
: "openai",
openRouterRouting: {},

View File

@@ -206,6 +206,27 @@ describe("provider attribution", () => {
});
});
it("classifies native Mistral hosts centrally", () => {
expect(resolveProviderEndpoint("https://api.mistral.ai/v1")).toMatchObject({
endpointClass: "mistral-public",
hostname: "api.mistral.ai",
});
expect(
resolveProviderRequestCapabilities({
provider: "mistral",
api: "openai-completions",
baseUrl: "https://api.mistral.ai/v1",
capability: "llm",
transport: "stream",
}),
).toMatchObject({
endpointClass: "mistral-public",
isKnownNativeEndpoint: true,
knownProviderFamily: "mistral",
});
});
it("treats OpenRouter-hosted Responses routes as explicit proxy-like endpoints", () => {
expect(
resolveProviderRequestPolicy({

View File

@@ -35,6 +35,7 @@ export type ProviderEndpointClass =
| "default"
| "anthropic-public"
| "github-copilot-native"
| "mistral-public"
| "moonshot-native"
| "modelstudio-native"
| "openai-public"
@@ -202,6 +203,9 @@ export function resolveProviderEndpoint(
if (host === "api.anthropic.com") {
return { endpointClass: "anthropic-public", hostname: host };
}
if (host === "api.mistral.ai") {
return { endpointClass: "mistral-public", hostname: host };
}
if (host.endsWith(".githubcopilot.com")) {
return { endpointClass: "github-copilot-native", hostname: host };
}
@@ -498,6 +502,7 @@ export function resolveProviderRequestCapabilities(
const isKnownNativeEndpoint =
endpointClass === "anthropic-public" ||
endpointClass === "github-copilot-native" ||
endpointClass === "mistral-public" ||
endpointClass === "moonshot-native" ||
endpointClass === "modelstudio-native" ||
endpointClass === "openai-public" ||