diff --git a/src/gateway/gateway-cli-backend.live.test.ts b/src/gateway/gateway-cli-backend.live.test.ts index fd725e6c9c1..f70a6de78b1 100644 --- a/src/gateway/gateway-cli-backend.live.test.ts +++ b/src/gateway/gateway-cli-backend.live.test.ts @@ -74,6 +74,26 @@ function sleep(ms: number): Promise { return new Promise((resolve) => setTimeout(resolve, ms)); } +function openAiProviderConfigForCodexCli( + modelKey: string, +): NonNullable["providers"]>["openai"] { + const parsed = parseModelRef(modelKey, DEFAULT_PROVIDER); + const modelId = parsed?.model?.trim() || "gpt-5.5"; + return { + api: "openai-responses", + baseUrl: "https://api.openai.com/v1", + models: [ + { + contextWindow: 1_047_576, + id: modelId, + maxTokens: 32_768, + name: modelId, + }, + ], + timeoutSeconds: Math.ceil(CLI_BACKEND_REQUEST_TIMEOUT_MS / 1000), + }; +} + function isProviderCapacityError(error: unknown): boolean { const message = error instanceof Error ? `${error.name}: ${error.message}` : String(error); const normalized = message.toLowerCase(); @@ -289,8 +309,8 @@ describeLive("gateway live (cli backend)", () => { providers: { ...cfg.models?.providers, openai: { + ...openAiProviderConfigForCodexCli(modelKey), ...cfg.models?.providers?.openai, - timeoutSeconds: Math.ceil(CLI_BACKEND_REQUEST_TIMEOUT_MS / 1000), }, }, } diff --git a/src/gateway/gateway-codex-harness.live-helpers.test.ts b/src/gateway/gateway-codex-harness.live-helpers.test.ts index 7c05ce31ef5..80259d7bdbd 100644 --- a/src/gateway/gateway-codex-harness.live-helpers.test.ts +++ b/src/gateway/gateway-codex-harness.live-helpers.test.ts @@ -78,6 +78,22 @@ describe("gateway codex harness live helpers", () => { expect(isExpectedCodexStatusCommandText(text)).toBe(true); }); + it("accepts the OpenAI Codex status card emitted by the GPT-5.5 Docker harness", () => { + const text = [ + "OpenClaw 2026.4.30-beta.1 is running on `openai/gpt-5.5`.", + "", + "Session is healthy:", + "- Context: `21k/272k` used, `8%`", + "- Cache: `19%` hit", + "- Runtime: `OpenAI Codex`", + "- Execution: `direct`", + "- Active tasks: `1` (`/codex status`)", + "- Queue: `steer`, depth `0`", + ].join("\n"); + + expect(isExpectedCodexStatusCommandText(text)).toBe(true); + }); + it("rejects status prose for a different codex session", () => { const text = "OpenClaw is running on `openai/gpt-5.5` with low reasoning/text settings. Context is at `22k/272k` tokens, no compactions, and the current session is `agent:dev:other`."; diff --git a/src/gateway/gateway-codex-harness.live-helpers.ts b/src/gateway/gateway-codex-harness.live-helpers.ts index 53b9136037b..62a3b378a69 100644 --- a/src/gateway/gateway-codex-harness.live-helpers.ts +++ b/src/gateway/gateway-codex-harness.live-helpers.ts @@ -91,6 +91,7 @@ export function isExpectedCodexStatusCommandText(text: string): boolean { const normalized = text.toLowerCase(); const mentionsOpenClawStatus = normalized.includes("openclaw is running on") || + /openclaw\s+\S+\s+is running on/u.test(normalized) || normalized.includes("openclaw status:") || normalized.includes("status: running on") || normalized.includes("session status: running on"); @@ -103,6 +104,7 @@ export function isExpectedCodexStatusCommandText(text: string): boolean { normalized.includes("current session is `agent:dev:live-codex-harness`") || normalized.includes("current session is agent:dev:live-codex-harness") || normalized.includes("session context is healthy") || + normalized.includes("session is healthy:") || ((normalized.includes("session context") || normalized.includes("context is at")) && normalized.includes("active task: `/codex status`")); const mentionsModel =