fix(opencode): expose Claude thinking levels (#72778)

* fix(opencode): expose claude thinking levels

* test(opencode): cover adaptive claude thinking bounds

* docs(changelog): credit opencode thinking contributor

---------

Co-authored-by: haishmg <4529977+haishmg@users.noreply.github.com>
Co-authored-by: Peter Steinberger <steipete@gmail.com>
This commit is contained in:
harish ganeshmurthy
2026-04-27 17:03:26 +04:00
committed by GitHub
parent 1b1916053f
commit f75d8827f2
3 changed files with 89 additions and 2 deletions

View File

@@ -48,6 +48,7 @@ Docs: https://docs.openclaw.ai
- Control UI: keep session-specific assistant identity loads authoritative after WebSocket connect, so non-main agent chat sessions do not show the main agent name in the header after bootstrap refreshes. Fixes #72776. Thanks @rockytian-top.
- Agents/Qwen: preserve exact custom `modelstudio` provider configs with foreign `api` owners so explicit OpenAI-compatible Model Studio endpoints no longer get normalized into the bundled Qwen plugin path. Fixes #64483. Thanks @FiredMosquito831.
- MCP/bundle-mcp: normalize CLI-native `type: "http"` MCP server entries to OpenClaw `transport: "streamable-http"` on save, repair existing configs with doctor, and keep embedded Pi from falling back to legacy SSE GET-first startup for those servers. Fixes #72757. Thanks @Studioscale.
- OpenCode: expose Anthropic Opus/Sonnet 4.x thinking levels for proxied Claude models, so `/think xhigh`, `/think adaptive`, and `/think max` validate consistently with the direct Anthropic provider. Fixes #72729. Thanks @haishmg and @aaajiao.
- Media-understanding/audio: migrate deprecated `{input}` placeholders in legacy `audio.transcription.command` configs to `{{MediaPath}}`, so custom audio transcribers no longer receive the literal placeholder after doctor repair. Fixes #72760. Thanks @krisfanue3-hash.
- Ollama/WSL2: warn when GPU-backed WSL2 installs combine CUDA visibility with an autostarting `ollama.service` using `Restart=always`, and document the systemd, `.wslconfig`, and keep-alive mitigation for crash loops. Carries forward #61022; fixes #61185. Thanks @yhyatt.
- Ollama/onboarding: de-dupe suggested bare local models against installed `:latest` tags and skip redundant pulls, so setup shows the installed model once and no longer says it is downloading an already available model. Fixes #68952. Thanks @tleyden.

View File

@@ -1,5 +1,8 @@
import { describe, expect, it } from "vitest";
import { registerProviderPlugin } from "../../test/helpers/plugins/provider-registration.js";
import {
registerProviderPlugin,
requireRegisteredProvider,
} from "../../test/helpers/plugins/provider-registration.js";
import { expectPassthroughReplayPolicy } from "../../test/helpers/provider-replay-policy.ts";
import plugin from "./index.js";
@@ -40,4 +43,46 @@ describe("opencode provider plugin", () => {
modelId: "claude-opus-4.6",
});
});
it("exposes Anthropic thinking levels for proxied Claude models", async () => {
const { providers } = await registerProviderPlugin({
plugin,
id: "opencode",
name: "OpenCode Zen Provider",
});
const provider = requireRegisteredProvider(providers, "opencode");
const resolveThinkingProfile = provider.resolveThinkingProfile!;
expect(
resolveThinkingProfile({
provider: "opencode",
modelId: "claude-opus-4-7",
}),
).toMatchObject({
levels: expect.arrayContaining([{ id: "xhigh" }, { id: "adaptive" }, { id: "max" }]),
defaultLevel: "off",
});
const opus46Profile = resolveThinkingProfile({
provider: "opencode",
modelId: "claude-opus-4.6",
});
expect(opus46Profile).toMatchObject({
levels: expect.arrayContaining([{ id: "adaptive" }]),
defaultLevel: "adaptive",
});
expect(opus46Profile?.levels.some((level) => level.id === "xhigh" || level.id === "max")).toBe(
false,
);
const sonnet46Profile = resolveThinkingProfile({
provider: "opencode",
modelId: "claude-sonnet-4-6",
});
expect(sonnet46Profile).toMatchObject({
levels: expect.arrayContaining([{ id: "adaptive" }]),
defaultLevel: "adaptive",
});
expect(
sonnet46Profile?.levels.some((level) => level.id === "xhigh" || level.id === "max"),
).toBe(false);
});
});

View File

@@ -1,4 +1,4 @@
import { definePluginEntry } from "openclaw/plugin-sdk/plugin-entry";
import { definePluginEntry, type ProviderThinkingProfile } from "openclaw/plugin-sdk/plugin-entry";
import { createProviderApiKeyAuthMethod } from "openclaw/plugin-sdk/provider-auth-api-key";
import {
matchesExactOrPrefix,
@@ -17,6 +17,20 @@ const OPENCODE_SHARED_WIZARD_GROUP = {
groupLabel: "OpenCode",
groupHint: OPENCODE_SHARED_HINT,
} as const;
const ANTHROPIC_OPUS_47_MODEL_PREFIXES = ["claude-opus-4-7", "claude-opus-4.7"] as const;
const ANTHROPIC_ADAPTIVE_MODEL_PREFIXES = [
"claude-opus-4-6",
"claude-opus-4.6",
"claude-sonnet-4-6",
"claude-sonnet-4.6",
] as const;
const BASE_ANTHROPIC_THINKING_LEVELS = [
{ id: "off" },
{ id: "minimal" },
{ id: "low" },
{ id: "medium" },
{ id: "high" },
] as const satisfies ProviderThinkingProfile["levels"];
function isModernOpencodeModel(modelId: string): boolean {
const lower = normalizeLowercaseStringOrEmpty(modelId);
@@ -26,6 +40,32 @@ function isModernOpencodeModel(modelId: string): boolean {
return !matchesExactOrPrefix(lower, MINIMAX_MODERN_MODEL_MATCHERS);
}
function matchesAnyPrefix(modelId: string, prefixes: readonly string[]): boolean {
const lower = normalizeLowercaseStringOrEmpty(modelId);
return prefixes.some((prefix) => lower.startsWith(prefix));
}
function resolveOpencodeThinkingProfile(modelId: string): ProviderThinkingProfile {
if (matchesAnyPrefix(modelId, ANTHROPIC_OPUS_47_MODEL_PREFIXES)) {
return {
levels: [
...BASE_ANTHROPIC_THINKING_LEVELS,
{ id: "xhigh" },
{ id: "adaptive" },
{ id: "max" },
],
defaultLevel: "off",
};
}
if (matchesAnyPrefix(modelId, ANTHROPIC_ADAPTIVE_MODEL_PREFIXES)) {
return {
levels: [...BASE_ANTHROPIC_THINKING_LEVELS, { id: "adaptive" }],
defaultLevel: "adaptive",
};
}
return { levels: BASE_ANTHROPIC_THINKING_LEVELS };
}
export default definePluginEntry({
id: PROVIDER_ID,
name: "OpenCode Zen Provider",
@@ -66,6 +106,7 @@ export default definePluginEntry({
],
...PASSTHROUGH_GEMINI_REPLAY_HOOKS,
isModernModelRef: ({ modelId }) => isModernOpencodeModel(modelId),
resolveThinkingProfile: ({ modelId }) => resolveOpencodeThinkingProfile(modelId),
});
api.registerMediaUnderstandingProvider(opencodeMediaUnderstandingProvider);
},