From 2dcc4605d4908c16068e8d6351eb8ec8f26e3f27 Mon Sep 17 00:00:00 2001 From: Peter Steinberger Date: Mon, 27 Apr 2026 06:01:45 +0100 Subject: [PATCH] fix(llm-task): normalize provider-prefixed model overrides --- CHANGELOG.md | 1 + extensions/llm-task/src/llm-task-tool.test.ts | 11 +++++++++++ extensions/llm-task/src/llm-task-tool.ts | 13 ++++++++++++- 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ab6b0b1ec85..2fd8c3968a2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -55,6 +55,7 @@ Docs: https://docs.openclaw.ai - Providers/Ollama: preserve explicit configured model input modalities when merging discovered provider metadata so custom vision models keep image support instead of silently dropping attachments. Fixes #39690; carries forward #39785. Thanks @Skrblik and @Mriris. - Providers/Ollama: estimate native Ollama transcript usage when `/api/chat` omits prompt/eval counters while preserving exact zero counters, keeping local model runs visible in usage surfaces. Carries forward #39112. Thanks @TylonHH. - Providers/PDF/Ollama: add bounded network timeouts for Ollama model pulls and native Anthropic/Gemini PDF analysis requests so unresponsive provider endpoints no longer hang sessions indefinitely. Fixes #54142; supersedes #54144 and #54145. Thanks @jinduwang1001-max and @arkyu2077. +- LLM Task/Ollama: accept model overrides that already include the selected provider prefix, avoiding doubled ids such as `ollama/ollama/llama3.2:latest`, and live-verify local Ollama JSON tasks return parsed output. Fixes #50052. Thanks @ralphy-maplebots and @Hollychou924. - Memory/doctor: treat Ollama memory embeddings as key-optional so `openclaw doctor` no longer warns about a missing API key when the gateway reports embeddings are ready. Fixes #46584. Thanks @fengly78. - Agents/Ollama: apply provider-owned replay turn normalization to native Ollama chat so Cloud models no longer reject non-alternating replay history in agent/Gateway runs. Fixes #71697. Thanks @ismael-81. - Control UI/Ollama: show the resolved configured thinking default in chat and session thinking dropdowns so inherited `adaptive`/per-model thinking config no longer appears as `Default (off)` or a generic inherit value. Fixes #72407. Thanks @NotecAG. diff --git a/extensions/llm-task/src/llm-task-tool.test.ts b/extensions/llm-task/src/llm-task-tool.test.ts index 9436367d490..546ef31fa53 100644 --- a/extensions/llm-task/src/llm-task-tool.test.ts +++ b/extensions/llm-task/src/llm-task-tool.test.ts @@ -155,6 +155,17 @@ describe("llm-task tool (json-only)", () => { expect(call.model).toBe("claude-4-sonnet"); }); + it("accepts model overrides that already include the selected provider prefix", async () => { + mockEmbeddedRunJson({ ok: true }); + const call = await executeEmbeddedRun({ + prompt: "x", + provider: "anthropic", + model: "anthropic/claude-4-sonnet", + }); + expect(call.provider).toBe("anthropic"); + expect(call.model).toBe("claude-4-sonnet"); + }); + it("passes thinking override to embedded runner", async () => { mockEmbeddedRunJson({ ok: true }); const call = await executeEmbeddedRun({ prompt: "x", thinking: "high" }); diff --git a/extensions/llm-task/src/llm-task-tool.ts b/extensions/llm-task/src/llm-task-tool.ts index 1611ca0f113..4d195724183 100644 --- a/extensions/llm-task/src/llm-task-tool.ts +++ b/extensions/llm-task/src/llm-task-tool.ts @@ -38,6 +38,16 @@ function toModelKey(provider?: string, model?: string): string | undefined { return `${p}/${m}`; } +function stripDuplicateProviderPrefix(provider: string | undefined, model: string | undefined) { + const p = provider?.trim(); + const m = model?.trim(); + if (!p || !m) { + return m || undefined; + } + const prefix = `${p}/`; + return m.startsWith(prefix) ? m.slice(prefix.length) : m; +} + type PluginCfg = { defaultProvider?: string; defaultModel?: string; @@ -109,11 +119,12 @@ export function createLlmTaskTool(api: OpenClawPluginApi) { primaryProvider || undefined; - const model = + const rawModel = (typeof params.model === "string" && params.model.trim()) || (typeof pluginCfg.defaultModel === "string" && pluginCfg.defaultModel.trim()) || primaryModel || undefined; + const model = stripDuplicateProviderPrefix(provider, rawModel); const authProfileId = (typeof params.authProfileId === "string" && params.authProfileId.trim()) ||