fix(openrouter): keep DeepSeek V4 reasoning effort valid (#77423)

Summary:
- The PR removes `max` from OpenRouter DeepSeek V4 thinking profiles, maps stale OpenRouter `max` overrides to `xhigh`, preserves direct DeepSeek behavior, and updates docs, tests, and changelog.
- Reproducibility: yes. Source inspection on current main shows OpenRouter DeepSeek V4 advertises `max` and se ... ffort: "max"`, matching the linked 400 logs; I did not need a live OpenRouter request for this assist pass.

Automerge notes:
- Ran the ClawSweeper repair loop before final review.
- Addressed earlier ClawSweeper review findings before merge.
- Included post-review commit in the final squash: docs(changelog): credit OpenRouter duplicate fix
- Included post-review commit in the final squash: fix(openrouter): keep DeepSeek V4 reasoning effort valid

Validation:
- ClawSweeper review passed for head becdea4223.
- Required merge gates passed before the squash merge.

Prepared head SHA: becdea4223
Review: https://github.com/openclaw/openclaw/pull/77423#issuecomment-4372880583

Co-authored-by: sallyom <somalley@redhat.com>
Co-authored-by: clawsweeper <274271284+clawsweeper[bot]@users.noreply.github.com>
This commit is contained in:
Sally O'Malley
2026-05-04 17:05:05 -04:00
committed by GitHub
parent a9817a5f97
commit 02ac7dc5a6
7 changed files with 83 additions and 16 deletions

View File

@@ -73,7 +73,7 @@ describe("openrouter provider hooks", () => {
it("advertises xhigh thinking for OpenRouter-routed DeepSeek V4 models", async () => {
const provider = await registerSingleProviderPlugin(openrouterPlugin);
const expectedV4Levels = ["off", "minimal", "low", "medium", "high", "xhigh", "max"];
const expectedV4Levels = ["off", "minimal", "low", "medium", "high", "xhigh"];
expect(
provider
@@ -309,7 +309,7 @@ describe("openrouter provider hooks", () => {
expect(capturedPayload).toMatchObject({
thinking: { type: "enabled" },
reasoning_effort: "max",
reasoning_effort: "xhigh",
messages: [
{ role: "user", content: "read file" },
{
@@ -324,6 +324,50 @@ describe("openrouter provider hooks", () => {
expect(baseStreamFn).toHaveBeenCalledOnce();
});
it("keeps OpenRouter DeepSeek V4 reasoning_effort within OpenRouter values", async () => {
const provider = await registerSingleProviderPlugin(openrouterPlugin);
const payloads: Array<Record<string, unknown>> = [];
const baseStreamFn = vi.fn(
(
...args: Parameters<import("@mariozechner/pi-agent-core").StreamFn>
): ReturnType<import("@mariozechner/pi-agent-core").StreamFn> => {
const payload = { messages: [] };
void args[2]?.onPayload?.(payload, args[0]);
payloads.push(payload);
return { async *[Symbol.asyncIterator]() {} } as never;
},
);
for (const thinkingLevel of ["minimal", "low", "medium", "high", "xhigh", "max"] as const) {
const wrapped = provider.wrapStreamFn?.({
provider: "openrouter",
modelId: "openrouter/deepseek/deepseek-v4-pro",
streamFn: baseStreamFn,
thinkingLevel,
} as never);
void wrapped?.(
{
provider: "openrouter",
api: "openai-completions",
id: "openrouter/deepseek/deepseek-v4-pro",
baseUrl: "https://openrouter.ai/api/v1",
compat: {},
} as never,
{ messages: [] } as never,
{},
);
}
expect(payloads.map((payload) => payload.reasoning_effort)).toEqual([
"minimal",
"low",
"medium",
"high",
"xhigh",
"xhigh",
]);
});
it("recognizes full OpenRouter DeepSeek V4 refs but skips custom proxy routes", async () => {
const provider = await registerSingleProviderPlugin(openrouterPlugin);
const payloads: Array<Record<string, unknown>> = [];

View File

@@ -3,6 +3,8 @@ import type { ProviderWrapStreamFnContext } from "openclaw/plugin-sdk/plugin-ent
import { OPENROUTER_THINKING_STREAM_HOOKS } from "openclaw/plugin-sdk/provider-stream-family";
import {
createDeepSeekV4OpenAICompatibleThinkingWrapper,
type DeepSeekV4ReasoningEffort,
type DeepSeekV4ThinkingLevel,
createPayloadPatchStreamWrapper,
stripTrailingAssistantPrefillMessages,
} from "openclaw/plugin-sdk/provider-stream-shared";
@@ -55,6 +57,27 @@ function shouldPatchDeepSeekV4OpenRouterPayload(model: Parameters<StreamFn>[0]):
);
}
function resolveOpenRouterDeepSeekV4ReasoningEffort(
thinkingLevel: DeepSeekV4ThinkingLevel,
): DeepSeekV4ReasoningEffort {
switch (thinkingLevel) {
case "minimal":
case "low":
case "medium":
case "high":
case "xhigh":
return thinkingLevel;
case "max":
return "xhigh";
case "adaptive":
return "medium";
case "off":
case undefined:
return "high";
}
return "high";
}
function isEnabledReasoningValue(value: unknown): boolean {
if (value === undefined || value === null || value === false) {
return false;
@@ -125,6 +148,7 @@ function createOpenRouterDeepSeekV4ThinkingWrapper(
baseStreamFn,
thinkingLevel,
shouldPatchModel: shouldPatchDeepSeekV4OpenRouterPayload,
resolveReasoningEffort: resolveOpenRouterDeepSeekV4ReasoningEffort,
});
}
@@ -156,12 +180,3 @@ export function wrapOpenRouterProviderStream(
createOpenRouterDeepSeekV4ThinkingWrapper(wrappedStreamFn, ctx.thinkingLevel),
);
}
export const __testing = {
isOpenRouterDeepSeekV4ModelId,
isOpenRouterAnthropicModelId,
isOpenRouterReasoningPayloadEnabled,
isVerifiedOpenRouterRoute,
shouldPatchDeepSeekV4OpenRouterPayload,
shouldPatchAnthropicOpenRouterPayload,
};

View File

@@ -8,7 +8,6 @@ const OPENROUTER_DEEPSEEK_V4_THINKING_LEVEL_IDS = [
"medium",
"high",
"xhigh",
"max",
] as const;
function buildOpenRouterDeepSeekV4ThinkingLevel(