fix(openrouter): expose DeepSeek V4 xhigh thinking

Fixes #74788.
This commit is contained in:
Vincent Koc
2026-05-03 10:51:05 -07:00
parent 96886381a3
commit 63ebe372e8
7 changed files with 114 additions and 16 deletions

View File

@@ -29,6 +29,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Plugins/OpenRouter: advertise DeepSeek V4 thinking levels, including `xhigh` and `max`, through the runtime and lightweight provider policy surfaces so `/think` validation no longer rejects OpenRouter-routed DeepSeek V4 models. Fixes #74788. Thanks @vincentkoc.
- Status/sessions: ignore malformed non-string persisted session provider/model metadata instead of throwing while rendering status summaries. Thanks @vincentkoc.
- CLI/config: remove only the targeted array element for `openclaw config unset array[index]` instead of replaying the unset during config write and deleting the shifted next element. Fixes #76290. Thanks @SymbolStar and @vincentkoc.
- Agents/tools: stop treating `tools.deny: ["write"]` as an implicit `apply_patch` deny; operators who want to block patch writes should deny `apply_patch` or `group:fs` explicitly. Fixes #76749. (#76795) Thanks @Nek-12 and @hclsys.

View File

@@ -9,6 +9,7 @@ import {
buildOpenrouterProvider,
isOpenRouterProxyReasoningUnsupportedModel,
} from "./provider-catalog.js";
import { resolveThinkingProfile } from "./provider-policy-api.js";
describe("openrouter provider hooks", () => {
it("registers OpenRouter speech alongside model and media providers", async () => {
@@ -70,6 +71,53 @@ describe("openrouter provider hooks", () => {
).toBe("native");
});
it("advertises xhigh thinking for OpenRouter-routed DeepSeek V4 models", async () => {
const provider = await registerSingleProviderPlugin(openrouterPlugin);
const expectedV4Levels = ["off", "minimal", "low", "medium", "high", "xhigh", "max"];
expect(
provider
.resolveThinkingProfile?.({
provider: "openrouter",
modelId: "deepseek/deepseek-v4-pro",
} as never)
?.levels.map((level) => level.id),
).toEqual(expectedV4Levels);
expect(
provider.resolveThinkingProfile?.({
provider: "openrouter",
modelId: "openrouter/deepseek/deepseek-v4-flash",
} as never)?.defaultLevel,
).toBe("high");
expect(
provider.supportsXHighThinking?.({
provider: "openrouter",
modelId: "openrouter/deepseek/deepseek-v4-pro",
} as never),
).toBe(true);
expect(
provider.resolveThinkingProfile?.({
provider: "openrouter",
modelId: "openai/gpt-5.4",
} as never),
).toBe(undefined);
});
it("exposes DeepSeek V4 thinking levels through the lightweight policy artifact", () => {
expect(
resolveThinkingProfile({
provider: "openrouter",
modelId: "openrouter/deepseek/deepseek-v4-pro",
})?.levels.map((level) => level.id),
).toContain("xhigh");
expect(
resolveThinkingProfile({
provider: "openrouter",
modelId: "openai/gpt-5.4",
}),
).toBe(undefined);
});
it("canonicalizes stale OpenRouter /v1 config and runtime metadata", async () => {
const provider = await registerSingleProviderPlugin(openrouterPlugin);

View File

@@ -23,6 +23,10 @@ import {
} from "./provider-catalog.js";
import { buildOpenRouterSpeechProvider } from "./speech-provider.js";
import { wrapOpenRouterProviderStream } from "./stream.js";
import {
resolveOpenRouterThinkingProfile,
supportsOpenRouterXHighThinking,
} from "./thinking-policy.js";
import { buildOpenRouterVideoGenerationProvider } from "./video-generation-provider.js";
const PROVIDER_ID = "openrouter";
@@ -150,6 +154,8 @@ export default definePluginEntry({
},
...PASSTHROUGH_GEMINI_REPLAY_HOOKS,
resolveReasoningOutputMode: () => "native",
supportsXHighThinking: ({ modelId }) => supportsOpenRouterXHighThinking(modelId),
resolveThinkingProfile: ({ modelId }) => resolveOpenRouterThinkingProfile(modelId),
isModernModelRef: () => true,
wrapStreamFn: wrapOpenRouterProviderStream,
isCacheTtlEligible: (ctx) => isOpenRouterCacheTtlModel(ctx.modelId),

View File

@@ -0,0 +1,18 @@
import { normalizeLowercaseStringOrEmpty } from "openclaw/plugin-sdk/text-runtime";
export function normalizeOpenRouterModelId(modelId: unknown): string | undefined {
if (typeof modelId !== "string") {
return undefined;
}
const normalized = normalizeLowercaseStringOrEmpty(modelId);
return normalized.startsWith("openrouter/") ? normalized.slice("openrouter/".length) : normalized;
}
export function isOpenRouterDeepSeekV4ModelId(modelId: unknown): boolean {
const normalized = normalizeOpenRouterModelId(modelId);
if (!normalized?.startsWith("deepseek/")) {
return false;
}
const deepSeekModelId = normalized.slice("deepseek/".length).split(":", 1)[0];
return deepSeekModelId === "deepseek-v4-flash" || deepSeekModelId === "deepseek-v4-pro";
}

View File

@@ -0,0 +1,5 @@
import { resolveOpenRouterThinkingProfile } from "./thinking-policy.js";
export function resolveThinkingProfile(params: { provider?: string; modelId: string }) {
return resolveOpenRouterThinkingProfile(params.modelId);
}

View File

@@ -7,6 +7,7 @@ import {
stripTrailingAssistantPrefillMessages,
} from "openclaw/plugin-sdk/provider-stream-shared";
import { createSubsystemLogger } from "openclaw/plugin-sdk/runtime-env";
import { isOpenRouterDeepSeekV4ModelId } from "./models.js";
import {
isOpenRouterProxyReasoningUnsupportedModel,
normalizeOpenRouterBaseUrl,
@@ -27,22 +28,6 @@ function isOpenRouterAnthropicModelId(modelId: unknown): boolean {
);
}
function normalizeOpenRouterModelId(modelId: unknown): string | undefined {
const normalized = readString(modelId)?.toLowerCase();
return normalized?.startsWith("openrouter/")
? normalized.slice("openrouter/".length)
: normalized;
}
function isOpenRouterDeepSeekV4ModelId(modelId: unknown): boolean {
const normalized = normalizeOpenRouterModelId(modelId);
if (!normalized?.startsWith("deepseek/")) {
return false;
}
const deepSeekModelId = normalized.slice("deepseek/".length).split(":", 1)[0];
return deepSeekModelId === "deepseek-v4-flash" || deepSeekModelId === "deepseek-v4-pro";
}
function isVerifiedOpenRouterRoute(model: Parameters<StreamFn>[0]): boolean {
const provider = readString(model.provider)?.toLowerCase();
const baseUrl = readString(model.baseUrl);

View File

@@ -0,0 +1,35 @@
import type { ProviderThinkingProfile } from "openclaw/plugin-sdk/plugin-entry";
import { isOpenRouterDeepSeekV4ModelId } from "./models.js";
const OPENROUTER_DEEPSEEK_V4_THINKING_LEVEL_IDS = [
"off",
"minimal",
"low",
"medium",
"high",
"xhigh",
"max",
] as const;
function buildOpenRouterDeepSeekV4ThinkingLevel(
id: (typeof OPENROUTER_DEEPSEEK_V4_THINKING_LEVEL_IDS)[number],
) {
return { id };
}
const OPENROUTER_DEEPSEEK_V4_THINKING_PROFILE = {
levels: OPENROUTER_DEEPSEEK_V4_THINKING_LEVEL_IDS.map(buildOpenRouterDeepSeekV4ThinkingLevel),
defaultLevel: "high",
} satisfies ProviderThinkingProfile;
export function supportsOpenRouterXHighThinking(modelId: string): boolean {
return isOpenRouterDeepSeekV4ModelId(modelId);
}
export function resolveOpenRouterThinkingProfile(
modelId: string,
): ProviderThinkingProfile | undefined {
return isOpenRouterDeepSeekV4ModelId(modelId)
? OPENROUTER_DEEPSEEK_V4_THINKING_PROFILE
: undefined;
}