From 482ff924ef637886e8513201240d8b9e0f064b19 Mon Sep 17 00:00:00 2001 From: neilofneils404 Date: Mon, 27 Apr 2026 18:43:38 -0400 Subject: [PATCH] fix: pass directories to provider stream wrappers (#67843) * fix: pass directories to provider stream wrappers * fix: pass directories to provider stream wrappers --------- Co-authored-by: neilofneils404 <258699186+neilofneils404@users.noreply.github.com> Co-authored-by: vincentkoc <25068+vincentkoc@users.noreply.github.com> --- CHANGELOG.md | 1 + package.json | 1 + scripts/test-built-status-message-runtime.mjs | 47 +++++++++++++++++++ .../pi-embedded-runner-extraparams.test.ts | 34 ++++++++++++++ src/agents/pi-embedded-runner/extra-params.ts | 2 + 5 files changed, 85 insertions(+) create mode 100644 scripts/test-built-status-message-runtime.mjs diff --git a/CHANGELOG.md b/CHANGELOG.md index a4633b24150..b4d599e637a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -17,6 +17,7 @@ Docs: https://docs.openclaw.ai ### Fixes - Control UI/Agents: redact tool-call args, partial/final results, derived exec output, and configured custom secret patterns before streaming tool events to the Control UI, so tool output cannot expose provider or channel credentials. Fixes #72283. (#72319) Thanks @volcano303 and @BunsDev. +- Providers/Codex: pass agent and workspace directories into provider stream wrappers so Codex native `web_search` activation can evaluate the correct auth context, and smoke-test the built status-message runtime by resolving the emitted bundle name. Carries forward #67843; refs #65909. Thanks @neilofneils404. - Models/fallbacks: treat user-selected session models as exact choices, so `/model ollama/...` and model-picker switches fail visibly when the selected provider is unreachable instead of answering from an unrelated configured fallback. Fixes #73023. Thanks @pavelyortho-cyber. - CLI/model probes: fail local `infer model run` probes when the provider returns no text output, so unreachable local providers and empty completions no longer look like successful smoke tests. Refs #73023. Thanks @pavelyortho-cyber. - CLI/Ollama: run local `infer model run` through the lean provider completion path and skip global model discovery for one-shot local probes, so Ollama smoke tests no longer pay full chat-agent/tool startup cost or hang before the native `/api/chat` request. Fixes #72851. Thanks @TotalRes2020. diff --git a/package.json b/package.json index 9f7a4267142..2ca48bf6b0a 100644 --- a/package.json +++ b/package.json @@ -1568,6 +1568,7 @@ "test:auth:compat": "node scripts/run-vitest.mjs run --config test/vitest/vitest.gateway.config.ts src/gateway/server.auth.compat-baseline.test.ts src/gateway/client.test.ts src/gateway/reconnect-gating.test.ts src/gateway/protocol/connect-error-details.test.ts", "test:build:bundled-runtime-deps": "node scripts/test-built-bundled-runtime-deps.mjs", "test:build:singleton": "node scripts/test-built-plugin-singleton.mjs", + "test:build:status-message-runtime": "node scripts/test-built-status-message-runtime.mjs", "test:bundled": "node scripts/run-vitest.mjs run --config test/vitest/vitest.bundled.config.ts", "test:changed": "node scripts/test-projects.mjs --changed origin/main", "test:changed:max": "OPENCLAW_VITEST_MAX_WORKERS=8 node scripts/test-projects.mjs --changed origin/main", diff --git a/scripts/test-built-status-message-runtime.mjs b/scripts/test-built-status-message-runtime.mjs new file mode 100644 index 00000000000..e081354f128 --- /dev/null +++ b/scripts/test-built-status-message-runtime.mjs @@ -0,0 +1,47 @@ +import assert from "node:assert/strict"; +import fs from "node:fs"; +import path from "node:path"; +import { pathToFileURL } from "node:url"; +import { parsePackageRootArg } from "./lib/package-root-args.mjs"; + +const STATUS_MESSAGE_RUNTIME_RE = /^status-message\.runtime(?:-[A-Za-z0-9_-]+)?\.js$/u; + +const { packageRoot } = parsePackageRootArg( + process.argv.slice(2), + "OPENCLAW_STATUS_MESSAGE_RUNTIME_ROOT", +); + +function findBuiltStatusMessageRuntimePath(distDir) { + const candidates = fs + .readdirSync(distDir, { withFileTypes: true }) + .filter((entry) => entry.isFile() && STATUS_MESSAGE_RUNTIME_RE.test(entry.name)) + .map((entry) => entry.name) + .toSorted((left, right) => { + const leftHasHash = left !== "status-message.runtime.js"; + const rightHasHash = right !== "status-message.runtime.js"; + if (leftHasHash !== rightHasHash) { + return leftHasHash ? -1 : 1; + } + return left.localeCompare(right); + }); + + assert.ok(candidates.length > 0, `missing built status-message runtime bundle under ${distDir}`); + + return path.join(distDir, candidates[0]); +} + +const runtimePath = findBuiltStatusMessageRuntimePath(path.join(packageRoot, "dist")); +const runtimeModule = await import(pathToFileURL(runtimePath).href); + +assert.equal( + typeof runtimeModule.loadStatusMessageRuntimeModule, + "function", + `built status-message runtime did not export loadStatusMessageRuntimeModule: ${runtimePath}`, +); + +const statusModule = await runtimeModule.loadStatusMessageRuntimeModule(); +assert.equal( + typeof statusModule.buildStatusMessage, + "function", + "status-message runtime did not load buildStatusMessage", +); diff --git a/src/agents/pi-embedded-runner-extraparams.test.ts b/src/agents/pi-embedded-runner-extraparams.test.ts index 22852257130..5d7067d9fd7 100644 --- a/src/agents/pi-embedded-runner-extraparams.test.ts +++ b/src/agents/pi-embedded-runner-extraparams.test.ts @@ -479,6 +479,40 @@ describe("applyExtraParamsToAgent", () => { }; } + it("passes agentDir and workspaceDir to provider stream wrappers", () => { + let capturedContext: WrapProviderStreamFnParams["context"] | undefined; + extraParamsTesting.setProviderRuntimeDepsForTest({ + prepareProviderExtraParams: () => undefined, + wrapProviderStreamFn: (params) => { + capturedContext = params.context; + return params.context.streamFn; + }, + }); + + const agent = { streamFn: (() => ({}) as ReturnType) as StreamFn }; + const model = { + api: "openai-codex-responses", + provider: "openai-codex", + id: "gpt-5.4", + } as Model<"openai-codex-responses">; + + applyExtraParamsToAgent( + agent, + undefined, + "openai-codex", + "gpt-5.4", + undefined, + "high", + "cass", + "/tmp/openclaw-workspace", + model, + "/tmp/openclaw-agent", + ); + + expect(capturedContext?.agentDir).toBe("/tmp/openclaw-agent"); + expect(capturedContext?.workspaceDir).toBe("/tmp/openclaw-workspace"); + }); + function runResponsesPayloadMutationCase(params: { applyProvider: string; applyModelId: string; diff --git a/src/agents/pi-embedded-runner/extra-params.ts b/src/agents/pi-embedded-runner/extra-params.ts index aa9cc94a569..58484a6403d 100644 --- a/src/agents/pi-embedded-runner/extra-params.ts +++ b/src/agents/pi-embedded-runner/extra-params.ts @@ -682,6 +682,8 @@ export function applyExtraParamsToAgent( config: cfg, context: { config: cfg, + agentDir, + workspaceDir, provider, modelId, extraParams: effectiveExtraParams,