From 702e23835d4b7ada25b9ea7681428d3f911b699b Mon Sep 17 00:00:00 2001 From: Peter Steinberger Date: Fri, 1 May 2026 11:51:41 +0100 Subject: [PATCH] fix(agent): default missing model cost metadata --- src/agents/pi-embedded-runner/model.test.ts | 32 +++++++++++++++++++ src/agents/pi-embedded-runner/model.ts | 35 +++++++++++++++++++++ 2 files changed, 67 insertions(+) diff --git a/src/agents/pi-embedded-runner/model.test.ts b/src/agents/pi-embedded-runner/model.test.ts index 48f1f958ce0..0a4dbbdc719 100644 --- a/src/agents/pi-embedded-runner/model.test.ts +++ b/src/agents/pi-embedded-runner/model.test.ts @@ -254,6 +254,38 @@ describe("resolveModel", () => { expect(result.model?.input).toEqual(["text"]); }); + it("defaults missing model cost before handing models to PI", () => { + const cfg = { + models: { + providers: { + openai: { + api: "openai-responses", + models: [ + { + id: "gpt-5.5", + name: "GPT-5.5", + api: "openai-responses", + reasoning: true, + input: ["text"], + contextWindow: 400_000, + maxTokens: 128_000, + }, + ], + }, + }, + }, + } as unknown as OpenClawConfig; + + const result = resolveModelForTest("openai", "gpt-5.5", "/tmp/agent", cfg); + + expect(result.error).toBeUndefined(); + expect(result.model).toMatchObject({ + provider: "openai", + id: "gpt-5.5", + cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }, + }); + }); + it("includes provider baseUrl in fallback model", () => { const cfg = { models: { diff --git a/src/agents/pi-embedded-runner/model.ts b/src/agents/pi-embedded-runner/model.ts index 4ace0982c56..81bbfcb8504 100644 --- a/src/agents/pi-embedded-runner/model.ts +++ b/src/agents/pi-embedded-runner/model.ts @@ -190,6 +190,40 @@ function normalizeResolvedModel(params: { agentDir?: string; runtimeHooks?: ProviderRuntimeHooks; }): Model { + const normalizeModelCost = (cost: unknown): Model["cost"] => { + if (!cost || typeof cost !== "object" || Array.isArray(cost)) { + return { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 }; + } + const record = cost as Partial["cost"]>; + const input = + typeof record.input === "number" && Number.isFinite(record.input) ? record.input : 0; + const output = + typeof record.output === "number" && Number.isFinite(record.output) ? record.output : 0; + const cacheRead = + typeof record.cacheRead === "number" && Number.isFinite(record.cacheRead) + ? record.cacheRead + : 0; + const cacheWrite = + typeof record.cacheWrite === "number" && Number.isFinite(record.cacheWrite) + ? record.cacheWrite + : 0; + if ( + input === record.input && + output === record.output && + cacheRead === record.cacheRead && + cacheWrite === record.cacheWrite + ) { + return record as Model["cost"]; + } + return { + ...cost, + input, + output, + cacheRead, + cacheWrite, + }; + }; + const normalizedInputModel = { ...params.model, input: resolveProviderModelInput({ @@ -198,6 +232,7 @@ function normalizeResolvedModel(params: { modelName: params.model.name, input: params.model.input, }), + cost: normalizeModelCost((params.model as { cost?: unknown }).cost), } as Model; const runtimeHooks = params.runtimeHooks ?? DEFAULT_PROVIDER_RUNTIME_HOOKS; const pluginNormalized = runtimeHooks.normalizeProviderResolvedModelWithPlugin({