test: accept gpt-5.5 release live output

This commit is contained in:
Peter Steinberger
2026-05-02 08:32:17 +01:00
parent d13a2063c4
commit 45d0268f9a
3 changed files with 39 additions and 1 deletions

View File

@@ -74,6 +74,26 @@ function sleep(ms: number): Promise<void> {
return new Promise((resolve) => setTimeout(resolve, ms));
}
function openAiProviderConfigForCodexCli(
modelKey: string,
): NonNullable<NonNullable<OpenClawConfig["models"]>["providers"]>["openai"] {
const parsed = parseModelRef(modelKey, DEFAULT_PROVIDER);
const modelId = parsed?.model?.trim() || "gpt-5.5";
return {
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
models: [
{
contextWindow: 1_047_576,
id: modelId,
maxTokens: 32_768,
name: modelId,
},
],
timeoutSeconds: Math.ceil(CLI_BACKEND_REQUEST_TIMEOUT_MS / 1000),
};
}
function isProviderCapacityError(error: unknown): boolean {
const message = error instanceof Error ? `${error.name}: ${error.message}` : String(error);
const normalized = message.toLowerCase();
@@ -289,8 +309,8 @@ describeLive("gateway live (cli backend)", () => {
providers: {
...cfg.models?.providers,
openai: {
...openAiProviderConfigForCodexCli(modelKey),
...cfg.models?.providers?.openai,
timeoutSeconds: Math.ceil(CLI_BACKEND_REQUEST_TIMEOUT_MS / 1000),
},
},
}

View File

@@ -78,6 +78,22 @@ describe("gateway codex harness live helpers", () => {
expect(isExpectedCodexStatusCommandText(text)).toBe(true);
});
it("accepts the OpenAI Codex status card emitted by the GPT-5.5 Docker harness", () => {
const text = [
"OpenClaw 2026.4.30-beta.1 is running on `openai/gpt-5.5`.",
"",
"Session is healthy:",
"- Context: `21k/272k` used, `8%`",
"- Cache: `19%` hit",
"- Runtime: `OpenAI Codex`",
"- Execution: `direct`",
"- Active tasks: `1` (`/codex status`)",
"- Queue: `steer`, depth `0`",
].join("\n");
expect(isExpectedCodexStatusCommandText(text)).toBe(true);
});
it("rejects status prose for a different codex session", () => {
const text =
"OpenClaw is running on `openai/gpt-5.5` with low reasoning/text settings. Context is at `22k/272k` tokens, no compactions, and the current session is `agent:dev:other`.";

View File

@@ -91,6 +91,7 @@ export function isExpectedCodexStatusCommandText(text: string): boolean {
const normalized = text.toLowerCase();
const mentionsOpenClawStatus =
normalized.includes("openclaw is running on") ||
/openclaw\s+\S+\s+is running on/u.test(normalized) ||
normalized.includes("openclaw status:") ||
normalized.includes("status: running on") ||
normalized.includes("session status: running on");
@@ -103,6 +104,7 @@ export function isExpectedCodexStatusCommandText(text: string): boolean {
normalized.includes("current session is `agent:dev:live-codex-harness`") ||
normalized.includes("current session is agent:dev:live-codex-harness") ||
normalized.includes("session context is healthy") ||
normalized.includes("session is healthy:") ||
((normalized.includes("session context") || normalized.includes("context is at")) &&
normalized.includes("active task: `/codex status`"));
const mentionsModel =