refactor(providers): centralize native provider detection (#60341)

* refactor(providers): centralize native provider detection

* fix(providers): preserve openrouter thinking format

* fix(providers): preserve openrouter host thinking format
This commit is contained in:
Vincent Koc
2026-04-03 23:46:21 +09:00
committed by GitHub
parent 24e10e6e45
commit 66825c0969
6 changed files with 249 additions and 14 deletions

View File

@@ -1,5 +1,6 @@
import { describe, expect, it } from "vitest";
import { applyMistralModelCompat } from "./api.js";
import { default as mistralPlugin } from "./index.js";
function supportsStore(model: { compat?: unknown }): boolean | undefined {
return (model.compat as { supportsStore?: boolean } | undefined)?.supportsStore;
@@ -48,4 +49,53 @@ describe("applyMistralModelCompat", () => {
};
expect(applyMistralModelCompat(model)).toBe(model);
});
it("contributes Mistral compat for native, provider-family, and hinted custom routes", () => {
const registerProvider = (mistralPlugin as { register?: (api: unknown) => void }).register;
let contributeResolvedModelCompat:
| ((params: { modelId: string; model: Record<string, unknown> }) => unknown)
| undefined;
registerProvider?.({
registerProvider: (provider: {
contributeResolvedModelCompat?: typeof contributeResolvedModelCompat;
}) => {
contributeResolvedModelCompat = provider.contributeResolvedModelCompat;
},
registerMediaUnderstandingProvider: () => {},
});
expect(
contributeResolvedModelCompat?.({
modelId: "mistral-large-latest",
model: {
provider: "mistral",
api: "openai-completions",
baseUrl: "https://proxy.example/v1",
},
}),
).toBeDefined();
expect(
contributeResolvedModelCompat?.({
modelId: "custom-model",
model: {
provider: "custom-mistral-host",
api: "openai-completions",
baseUrl: "https://api.mistral.ai/v1",
},
}),
).toBeDefined();
expect(
contributeResolvedModelCompat?.({
modelId: "mistralai/mistral-small-3.2",
model: {
provider: "openrouter",
api: "openai-completions",
baseUrl: "https://openrouter.ai/api/v1",
},
}),
).toBeDefined();
});
});

View File

@@ -1,4 +1,5 @@
import { defineSingleProviderPluginEntry } from "openclaw/plugin-sdk/provider-entry";
import { resolveProviderRequestCapabilities } from "openclaw/plugin-sdk/provider-http";
import { applyMistralModelCompat, MISTRAL_MODEL_COMPAT_PATCH } from "./api.js";
import { mistralMediaUnderstandingProvider } from "./media-understanding-provider.js";
import { applyMistralConfig, MISTRAL_DEFAULT_MODEL_REF } from "./onboard.js";
@@ -15,17 +16,6 @@ const MISTRAL_MODEL_HINTS = [
"ministral",
] as const;
function isMistralBaseUrl(baseUrl: unknown): boolean {
if (typeof baseUrl !== "string" || !baseUrl.trim()) {
return false;
}
try {
return new URL(baseUrl).hostname.toLowerCase() === "api.mistral.ai";
} catch {
return baseUrl.toLowerCase().includes("api.mistral.ai");
}
}
function isMistralModelHint(modelId: string): boolean {
const normalized = modelId.trim().toLowerCase();
return MISTRAL_MODEL_HINTS.some(
@@ -39,12 +29,30 @@ function isMistralModelHint(modelId: string): boolean {
function shouldContributeMistralCompat(params: {
modelId: string;
model: { api?: unknown; baseUrl?: unknown };
model: { api?: unknown; baseUrl?: unknown; provider?: unknown; compat?: unknown };
}): boolean {
if (params.model.api !== "openai-completions") {
return false;
}
return isMistralBaseUrl(params.model.baseUrl) || isMistralModelHint(params.modelId);
const capabilities = resolveProviderRequestCapabilities({
provider: typeof params.model.provider === "string" ? params.model.provider : undefined,
api: "openai-completions",
baseUrl: typeof params.model.baseUrl === "string" ? params.model.baseUrl : undefined,
capability: "llm",
transport: "stream",
modelId: params.modelId,
compat:
params.model.compat && typeof params.model.compat === "object"
? (params.model.compat as { supportsStore?: boolean })
: undefined,
});
return (
capabilities.knownProviderFamily === "mistral" ||
capabilities.endpointClass === "mistral-public" ||
isMistralModelHint(params.modelId)
);
}
function buildMistralReplayPolicy() {

View File

@@ -117,4 +117,140 @@ describe("openai transport stream", () => {
totalTokens: 9,
});
});
it("keeps OpenRouter thinking format for declared OpenRouter providers on custom proxy URLs", async () => {
const streamFn = buildTransportAwareSimpleStreamFn(
attachModelProviderRequestTransport(
{
id: "anthropic/claude-sonnet-4",
name: "Claude Sonnet 4",
api: "openai-completions",
provider: "openrouter",
baseUrl: "https://proxy.example.com/v1",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
} satisfies Model<"openai-completions">,
{
proxy: {
mode: "explicit-proxy",
url: "http://proxy.internal:8443",
},
},
),
);
expect(streamFn).toBeTypeOf("function");
let capturedPayload: Record<string, unknown> | undefined;
let resolveCaptured!: () => void;
const captured = new Promise<void>((resolve) => {
resolveCaptured = resolve;
});
void streamFn!(
{
id: "anthropic/claude-sonnet-4",
name: "Claude Sonnet 4",
api: "openclaw-openai-completions-transport",
provider: "openrouter",
baseUrl: "https://proxy.example.com/v1",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
} as Model<"openclaw-openai-completions-transport">,
{
systemPrompt: "system",
messages: [],
tools: [],
} as never,
{
reasoningEffort: "high",
onPayload: async (payload) => {
capturedPayload = payload as Record<string, unknown>;
resolveCaptured();
return payload;
},
} as never,
);
await captured;
expect(capturedPayload).toMatchObject({
reasoning: {
effort: "high",
},
});
});
it("keeps OpenRouter thinking format for native OpenRouter hosts behind custom provider ids", async () => {
const streamFn = buildTransportAwareSimpleStreamFn(
attachModelProviderRequestTransport(
{
id: "anthropic/claude-sonnet-4",
name: "Claude Sonnet 4",
api: "openai-completions",
provider: "custom-openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
} satisfies Model<"openai-completions">,
{
proxy: {
mode: "explicit-proxy",
url: "http://proxy.internal:8443",
},
},
),
);
expect(streamFn).toBeTypeOf("function");
let capturedPayload: Record<string, unknown> | undefined;
let resolveCaptured!: () => void;
const captured = new Promise<void>((resolve) => {
resolveCaptured = resolve;
});
void streamFn!(
{
id: "anthropic/claude-sonnet-4",
name: "Claude Sonnet 4",
api: "openclaw-openai-completions-transport",
provider: "custom-openrouter",
baseUrl: "https://openrouter.ai/api/v1",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
} as Model<"openclaw-openai-completions-transport">,
{
systemPrompt: "system",
messages: [],
tools: [],
} as never,
{
reasoningEffort: "high",
onPayload: async (payload) => {
capturedPayload = payload as Record<string, unknown>;
resolveCaptured();
return payload;
},
} as never,
);
await captured;
expect(capturedPayload).toMatchObject({
reasoning: {
effort: "high",
},
});
});
});

View File

@@ -13,6 +13,7 @@ import OpenAI, { AzureOpenAI } from "openai";
import type { ChatCompletionChunk } from "openai/resources/chat/completions.js";
import type { ResponseCreateParamsStreaming } from "openai/resources/responses/responses.js";
import { fetchWithSsrFGuard } from "../infra/net/fetch-guard.js";
import { resolveProviderRequestCapabilities } from "./provider-attribution.js";
import {
buildProviderRequestDispatcherPolicy,
getModelProviderRequestTransport,
@@ -1317,6 +1318,18 @@ async function processOpenAICompletionsStream(
function detectCompat(model: OpenAIModeModel) {
const provider = model.provider;
const baseUrl = model.baseUrl ?? "";
const capabilities = resolveProviderRequestCapabilities({
provider,
api: model.api,
baseUrl: model.baseUrl,
capability: "llm",
transport: "stream",
modelId: model.id,
compat:
model.compat && typeof model.compat === "object"
? (model.compat as { supportsStore?: boolean })
: undefined,
});
const isZai = provider === "zai" || baseUrl.includes("api.z.ai");
const isNonStandard =
provider === "cerebras" ||
@@ -1353,7 +1366,9 @@ function detectCompat(model: OpenAIModeModel) {
requiresThinkingAsText: false,
thinkingFormat: isZai
? "zai"
: provider === "openrouter" || baseUrl.includes("openrouter.ai")
: provider === "openrouter" ||
capabilities.endpointClass === "openrouter" ||
capabilities.attributionProvider === "openrouter"
? "openrouter"
: "openai",
openRouterRouting: {},

View File

@@ -206,6 +206,27 @@ describe("provider attribution", () => {
});
});
it("classifies native Mistral hosts centrally", () => {
expect(resolveProviderEndpoint("https://api.mistral.ai/v1")).toMatchObject({
endpointClass: "mistral-public",
hostname: "api.mistral.ai",
});
expect(
resolveProviderRequestCapabilities({
provider: "mistral",
api: "openai-completions",
baseUrl: "https://api.mistral.ai/v1",
capability: "llm",
transport: "stream",
}),
).toMatchObject({
endpointClass: "mistral-public",
isKnownNativeEndpoint: true,
knownProviderFamily: "mistral",
});
});
it("treats OpenRouter-hosted Responses routes as explicit proxy-like endpoints", () => {
expect(
resolveProviderRequestPolicy({

View File

@@ -35,6 +35,7 @@ export type ProviderEndpointClass =
| "default"
| "anthropic-public"
| "github-copilot-native"
| "mistral-public"
| "moonshot-native"
| "modelstudio-native"
| "openai-public"
@@ -202,6 +203,9 @@ export function resolveProviderEndpoint(
if (host === "api.anthropic.com") {
return { endpointClass: "anthropic-public", hostname: host };
}
if (host === "api.mistral.ai") {
return { endpointClass: "mistral-public", hostname: host };
}
if (host.endsWith(".githubcopilot.com")) {
return { endpointClass: "github-copilot-native", hostname: host };
}
@@ -498,6 +502,7 @@ export function resolveProviderRequestCapabilities(
const isKnownNativeEndpoint =
endpointClass === "anthropic-public" ||
endpointClass === "github-copilot-native" ||
endpointClass === "mistral-public" ||
endpointClass === "moonshot-native" ||
endpointClass === "modelstudio-native" ||
endpointClass === "openai-public" ||