fix(opencode-go): route DeepSeek V4 through OpenAI transport

This commit is contained in:
Peter Steinberger
2026-04-25 18:56:52 +01:00
parent 275c128e99
commit afd6b5d6fc
11 changed files with 161 additions and 77 deletions

View File

@@ -1,76 +1,17 @@
import type { ProviderWrapStreamFnContext } from "openclaw/plugin-sdk/plugin-entry";
import { streamWithPayloadPatch } from "openclaw/plugin-sdk/provider-stream-shared";
type DeepSeekThinkingLevel = ProviderWrapStreamFnContext["thinkingLevel"];
import { createDeepSeekV4OpenAICompatibleThinkingWrapper } from "openclaw/plugin-sdk/provider-stream-shared";
function isDeepSeekV4ModelId(modelId: unknown): boolean {
return modelId === "deepseek-v4-flash" || modelId === "deepseek-v4-pro";
}
function isDisabledThinkingLevel(thinkingLevel: DeepSeekThinkingLevel): boolean {
const normalized = typeof thinkingLevel === "string" ? thinkingLevel.toLowerCase() : "";
return normalized === "off" || normalized === "none";
}
function resolveDeepSeekReasoningEffort(thinkingLevel: DeepSeekThinkingLevel): "high" | "max" {
return thinkingLevel === "xhigh" || thinkingLevel === "max" ? "max" : "high";
}
function stripDeepSeekReasoningContent(payload: Record<string, unknown>): void {
if (!Array.isArray(payload.messages)) {
return;
}
for (const message of payload.messages) {
if (!message || typeof message !== "object") {
continue;
}
delete (message as Record<string, unknown>).reasoning_content;
}
}
function ensureDeepSeekToolCallReasoningContent(payload: Record<string, unknown>): void {
if (!Array.isArray(payload.messages)) {
return;
}
for (const message of payload.messages) {
if (!message || typeof message !== "object") {
continue;
}
const record = message as Record<string, unknown>;
if (record.role !== "assistant" || !Array.isArray(record.tool_calls)) {
continue;
}
if (!("reasoning_content" in record)) {
record.reasoning_content = "";
}
}
}
export function createDeepSeekV4ThinkingWrapper(
baseStreamFn: ProviderWrapStreamFnContext["streamFn"],
thinkingLevel: DeepSeekThinkingLevel,
thinkingLevel: ProviderWrapStreamFnContext["thinkingLevel"],
): ProviderWrapStreamFnContext["streamFn"] {
if (!baseStreamFn) {
return undefined;
}
const underlying = baseStreamFn;
return (model, context, options) => {
if (model.provider !== "deepseek" || !isDeepSeekV4ModelId(model.id)) {
return underlying(model, context, options);
}
return streamWithPayloadPatch(underlying, model, context, options, (payload) => {
if (isDisabledThinkingLevel(thinkingLevel)) {
payload.thinking = { type: "disabled" };
delete payload.reasoning_effort;
delete payload.reasoning;
stripDeepSeekReasoningContent(payload);
return;
}
payload.thinking = { type: "enabled" };
payload.reasoning_effort = resolveDeepSeekReasoningEffort(thinkingLevel);
ensureDeepSeekToolCallReasoningContent(payload);
});
};
return createDeepSeekV4OpenAICompatibleThinkingWrapper({
baseStreamFn,
thinkingLevel,
shouldPatchModel: (model) => model.provider === "deepseek" && isDeepSeekV4ModelId(model.id),
});
}

View File

@@ -119,15 +119,56 @@ describe("opencode-go provider plugin", () => {
} as never),
).toMatchObject({
id: "deepseek-v4-pro",
api: "anthropic-messages",
api: "openai-completions",
provider: "opencode-go",
baseUrl: "https://opencode.ai/zen/go",
baseUrl: "https://opencode.ai/zen/go/v1",
reasoning: true,
contextWindow: 1_000_000,
maxTokens: 384_000,
compat: {
supportsUsageInStreaming: true,
supportsReasoningEffort: true,
maxTokensField: "max_tokens",
},
});
});
it("disables invalid DeepSeek V4 reasoning_effort off payloads on OpenCode Go", async () => {
const provider = await registerSingleProviderPlugin(plugin);
const capturedPayloads: Record<string, unknown>[] = [];
const baseStreamFn = (_model: unknown, _context: unknown, options: unknown) => {
const payload = {
model: "deepseek-v4-flash",
reasoning_effort: "off",
reasoning: "off",
};
(options as { onPayload?: (payload: Record<string, unknown>) => void })?.onPayload?.(payload);
capturedPayloads.push(payload);
return {} as never;
};
const streamFn = provider.wrapStreamFn?.({
streamFn: baseStreamFn as never,
providerId: "opencode-go",
modelId: "deepseek-v4-flash",
thinkingLevel: "off",
} as never);
expect(streamFn).toBeTypeOf("function");
await streamFn?.(
{ provider: "opencode-go", id: "deepseek-v4-flash" } as never,
{} as never,
{},
);
expect(capturedPayloads).toEqual([
{
model: "deepseek-v4-flash",
thinking: { type: "disabled" },
},
]);
});
it("canonicalizes stale OpenCode Go base URLs", async () => {
const provider = await registerSingleProviderPlugin(plugin);

View File

@@ -8,6 +8,7 @@ import {
normalizeOpencodeGoBaseUrl,
resolveOpencodeGoSupplementalModel,
} from "./provider-catalog.js";
import { createOpencodeGoDeepSeekV4Wrapper } from "./stream.js";
const PROVIDER_ID = "opencode-go";
export default definePluginEntry({
@@ -67,6 +68,7 @@ export default definePluginEntry({
resolveDynamicModel: ({ modelId }) => resolveOpencodeGoSupplementalModel(modelId),
augmentModelCatalog: () => listOpencodeGoSupplementalModelCatalogEntries(),
...PASSTHROUGH_GEMINI_REPLAY_HOOKS,
wrapStreamFn: (ctx) => createOpencodeGoDeepSeekV4Wrapper(ctx.streamFn, ctx.thinkingLevel),
isModernModelRef: () => true,
});
api.registerMediaUnderstandingProvider(opencodeGoMediaUnderstandingProvider);

View File

@@ -12,9 +12,9 @@ const OPENCODE_GO_SUPPLEMENTAL_MODELS = (
{
id: "deepseek-v4-pro",
name: "DeepSeek V4 Pro",
api: "anthropic-messages",
api: "openai-completions",
provider: PROVIDER_ID,
baseUrl: OPENCODE_GO_ANTHROPIC_BASE_URL,
baseUrl: OPENCODE_GO_OPENAI_BASE_URL,
reasoning: true,
input: ["text"],
cost: {
@@ -25,13 +25,18 @@ const OPENCODE_GO_SUPPLEMENTAL_MODELS = (
},
contextWindow: 1_000_000,
maxTokens: 384_000,
compat: {
supportsUsageInStreaming: true,
supportsReasoningEffort: true,
maxTokensField: "max_tokens",
},
},
{
id: "deepseek-v4-flash",
name: "DeepSeek V4 Flash",
api: "anthropic-messages",
api: "openai-completions",
provider: PROVIDER_ID,
baseUrl: OPENCODE_GO_ANTHROPIC_BASE_URL,
baseUrl: OPENCODE_GO_OPENAI_BASE_URL,
reasoning: true,
input: ["text"],
cost: {
@@ -42,6 +47,11 @@ const OPENCODE_GO_SUPPLEMENTAL_MODELS = (
},
contextWindow: 1_000_000,
maxTokens: 384_000,
compat: {
supportsUsageInStreaming: true,
supportsReasoningEffort: true,
maxTokensField: "max_tokens",
},
},
] satisfies ProviderRuntimeModel[]
).map((model) => normalizeModelCompat(model));

View File

@@ -0,0 +1,18 @@
import type { ProviderWrapStreamFnContext } from "openclaw/plugin-sdk/plugin-entry";
import { createDeepSeekV4OpenAICompatibleThinkingWrapper } from "openclaw/plugin-sdk/provider-stream-shared";
function isOpencodeGoDeepSeekV4ModelId(modelId: unknown): boolean {
return modelId === "deepseek-v4-flash" || modelId === "deepseek-v4-pro";
}
export function createOpencodeGoDeepSeekV4Wrapper(
baseStreamFn: ProviderWrapStreamFnContext["streamFn"],
thinkingLevel: ProviderWrapStreamFnContext["thinkingLevel"],
): ProviderWrapStreamFnContext["streamFn"] {
return createDeepSeekV4OpenAICompatibleThinkingWrapper({
baseStreamFn,
thinkingLevel,
shouldPatchModel: (model) =>
model.provider === "opencode-go" && isOpencodeGoDeepSeekV4ModelId(model.id),
});
}