fix(deepseek): expose V4 max thinking levels (#73008)

Merged via squash.

Prepared head SHA: ef561a59de
Co-authored-by: ai-hpc <183861985+ai-hpc@users.noreply.github.com>
Co-authored-by: hxy91819 <8814856+hxy91819@users.noreply.github.com>
Reviewed-by: @hxy91819
This commit is contained in:
NVIDIAN
2026-04-30 08:34:05 -07:00
committed by GitHub
parent 0eb8f34000
commit 797d574dfd
7 changed files with 67 additions and 9 deletions

View File

@@ -110,6 +110,37 @@ describe("deepseek provider plugin", () => {
);
});
it("advertises max thinking levels for DeepSeek V4 models only", async () => {
const provider = await registerSingleProviderPlugin(deepseekPlugin);
const resolveThinkingProfile = provider.resolveThinkingProfile!;
const expectedV4Levels = ["off", "minimal", "low", "medium", "high", "xhigh", "max"];
expect(
resolveThinkingProfile({
provider: "deepseek",
modelId: "deepseek-v4-pro",
} as never)?.levels.map((level) => level.id),
).toEqual(expectedV4Levels);
expect(
resolveThinkingProfile({
provider: "deepseek",
modelId: "deepseek-v4-flash",
} as never)?.defaultLevel,
).toBe("high");
expect(
resolveThinkingProfile({
provider: "deepseek",
modelId: "deepseek-v4-flash",
} as never)?.levels.map((level) => level.id),
).toEqual(expectedV4Levels);
expect(
resolveThinkingProfile({ provider: "deepseek", modelId: "deepseek-chat" } as never),
).toBe(undefined);
expect(
resolveThinkingProfile({ provider: "deepseek", modelId: "deepseek-reasoner" } as never),
).toBe(undefined);
});
it("maps thinking levels to DeepSeek V4 payload controls", async () => {
let capturedPayload: Record<string, unknown> | undefined;
const baseStreamFn = (

View File

@@ -1,11 +1,27 @@
import type { ProviderThinkingProfile } from "openclaw/plugin-sdk/plugin-entry";
import { readConfiguredProviderCatalogEntries } from "openclaw/plugin-sdk/provider-catalog-shared";
import { defineSingleProviderPluginEntry } from "openclaw/plugin-sdk/provider-entry";
import { buildProviderReplayFamilyHooks } from "openclaw/plugin-sdk/provider-model-shared";
import { isDeepSeekV4ModelId } from "./models.js";
import { applyDeepSeekConfig, DEEPSEEK_DEFAULT_MODEL_REF } from "./onboard.js";
import { buildDeepSeekProvider } from "./provider-catalog.js";
import { createDeepSeekV4ThinkingWrapper } from "./stream.js";
const PROVIDER_ID = "deepseek";
const V4_THINKING_LEVEL_IDS = ["off", "minimal", "low", "medium", "high", "xhigh", "max"] as const;
function buildDeepSeekV4ThinkingLevel(id: (typeof V4_THINKING_LEVEL_IDS)[number]) {
return { id };
}
const DEEPSEEK_V4_THINKING_PROFILE = {
levels: V4_THINKING_LEVEL_IDS.map(buildDeepSeekV4ThinkingLevel),
defaultLevel: "high",
} satisfies ProviderThinkingProfile;
function resolveDeepSeekV4ThinkingProfile(modelId: string): ProviderThinkingProfile | undefined {
return isDeepSeekV4ModelId(modelId) ? DEEPSEEK_V4_THINKING_PROFILE : undefined;
}
export default defineSingleProviderPluginEntry({
id: PROVIDER_ID,
@@ -46,9 +62,7 @@ export default defineSingleProviderPluginEntry({
/\bdeepseek\b.*(?:input.*too long|context.*exceed)/i.test(errorMessage),
...buildProviderReplayFamilyHooks({ family: "openai-compatible" }),
wrapStreamFn: (ctx) => createDeepSeekV4ThinkingWrapper(ctx.streamFn, ctx.thinkingLevel),
isModernModelRef: ({ modelId }) => {
const lower = modelId.toLowerCase();
return lower === "deepseek-v4-flash" || lower === "deepseek-v4-pro";
},
resolveThinkingProfile: ({ modelId }) => resolveDeepSeekV4ThinkingProfile(modelId),
isModernModelRef: ({ modelId }) => Boolean(resolveDeepSeekV4ThinkingProfile(modelId)),
},
});

View File

@@ -19,3 +19,15 @@ export function buildDeepSeekModelDefinition(
api: "openai-completions",
};
}
const DEEPSEEK_V4_MODEL_IDS = new Set(["deepseek-v4-flash", "deepseek-v4-pro"]);
export function isDeepSeekV4ModelId(modelId: string): boolean {
return DEEPSEEK_V4_MODEL_IDS.has(modelId.toLowerCase());
}
export function isDeepSeekV4ModelRef(model: { provider?: string; id?: unknown }): boolean {
return (
model.provider === "deepseek" && typeof model.id === "string" && isDeepSeekV4ModelId(model.id)
);
}

View File

@@ -1,9 +1,6 @@
import type { ProviderWrapStreamFnContext } from "openclaw/plugin-sdk/plugin-entry";
import { createDeepSeekV4OpenAICompatibleThinkingWrapper } from "openclaw/plugin-sdk/provider-stream-shared";
function isDeepSeekV4ModelId(modelId: unknown): boolean {
return modelId === "deepseek-v4-flash" || modelId === "deepseek-v4-pro";
}
import { isDeepSeekV4ModelRef } from "./models.js";
export function createDeepSeekV4ThinkingWrapper(
baseStreamFn: ProviderWrapStreamFnContext["streamFn"],
@@ -12,6 +9,6 @@ export function createDeepSeekV4ThinkingWrapper(
return createDeepSeekV4OpenAICompatibleThinkingWrapper({
baseStreamFn,
thinkingLevel,
shouldPatchModel: (model) => model.provider === "deepseek" && isDeepSeekV4ModelId(model.id),
shouldPatchModel: isDeepSeekV4ModelRef,
});
}