fix(openai-codex): use /backend-api/codex/ base URL

OpenAI removed the /backend-api/responses alias on chatgpt.com server-side.
The OpenAI SDK appends /responses to the configured baseUrl, so OpenClaw's
current baseUrl ("https://chatgpt.com/backend-api") now resolves to
/backend-api/responses and hits a Cloudflare HTML 403 block page. The
provider's 403+HTML error classifier then surfaces this as an auth-scope
failure, triggering fruitless OAuth re-login loops for every GPT-5.4
sub-agent call.

- Point OPENAI_CODEX_BASE_URL at https://chatgpt.com/backend-api/codex
  (both the catalog constant and the sibling local constant in the provider).
- Extend isOpenAICodexBaseUrl to accept the new /codex segment while keeping
  the legacy path recognized so pre-existing user configs and persisted
  model metadata still round-trip through the normalizer correctly.
- Add positive-case test coverage for the new base URL; update existing
  normalization tests whose expected canonical output now includes /codex.

Verified with live curl using the exact OAuth access token stored by
OpenClaw: the /codex/responses path returns HTTP 200 with streaming SSE,
while the old /responses alias returns HTTP 403 HTML regardless of auth
headers. Scoped tests (base-url, openai-codex-provider, transport-policy,
openai-provider, index) pass; pnpm tsgo and pnpm build are clean.
This commit is contained in:
methazoo
2026-04-20 06:01:27 -07:00
committed by Peter Steinberger
parent 8150c363b5
commit 8a2d7f2541
6 changed files with 16 additions and 8 deletions

View File

@@ -15,6 +15,14 @@ describe("openai base URL helpers", () => {
});
it("recognizes Codex ChatGPT backend routes", () => {
// New canonical form (includes /codex segment; OpenAI removed the
// /backend-api/responses alias server-side on 2026-04).
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api/codex")).toBe(true);
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api/codex/")).toBe(true);
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api/codex/v1")).toBe(true);
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api/codex/v1/")).toBe(true);
// Legacy form still recognized as a Codex baseURL for backward
// compatibility with existing user configs.
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api")).toBe(true);
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api/")).toBe(true);
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api/v1")).toBe(true);
@@ -25,7 +33,7 @@ describe("openai base URL helpers", () => {
expect(isOpenAICodexBaseUrl("https://api.openai.com/v1")).toBe(false);
expect(isOpenAICodexBaseUrl("https://chatgpt.com")).toBe(false);
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api/v2")).toBe(false);
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api/codex")).toBe(false);
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api/codex/v2")).toBe(false);
expect(isOpenAICodexBaseUrl(undefined)).toBe(false);
});
});

View File

@@ -13,5 +13,5 @@ export function isOpenAICodexBaseUrl(baseUrl?: string): boolean {
if (!trimmed) {
return false;
}
return /^https?:\/\/chatgpt\.com\/backend-api(?:\/v1)?\/?$/i.test(trimmed);
return /^https?:\/\/chatgpt\.com\/backend-api(?:\/codex)?(?:\/v1)?\/?$/i.test(trimmed);
}

View File

@@ -1,6 +1,6 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const OPENAI_CODEX_BASE_URL = "https://chatgpt.com/backend-api";
export const OPENAI_CODEX_BASE_URL = "https://chatgpt.com/backend-api/codex";
export function buildOpenAICodexProvider(): ModelProviderConfig {
return {

View File

@@ -465,7 +465,7 @@ describe("openai codex provider", () => {
expect(model).toMatchObject({
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
baseUrl: "https://chatgpt.com/backend-api/codex",
});
});
@@ -491,7 +491,7 @@ describe("openai codex provider", () => {
expect(model).toMatchObject({
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
baseUrl: "https://chatgpt.com/backend-api/codex",
});
});
@@ -506,7 +506,7 @@ describe("openai codex provider", () => {
} as never),
).toEqual({
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
baseUrl: "https://chatgpt.com/backend-api/codex",
});
});
});

View File

@@ -34,7 +34,7 @@ import {
} from "./shared.js";
const PROVIDER_ID = "openai-codex";
const OPENAI_CODEX_BASE_URL = "https://chatgpt.com/backend-api";
const OPENAI_CODEX_BASE_URL = "https://chatgpt.com/backend-api/codex";
const OPENAI_CODEX_GPT_54_MODEL_ID = "gpt-5.4";
const OPENAI_CODEX_GPT_54_LEGACY_MODEL_ID = "gpt-5.4-codex";
const OPENAI_CODEX_GPT_54_PRO_MODEL_ID = "gpt-5.4-pro";

View File

@@ -209,7 +209,7 @@ describe("buildOpenAIProvider", () => {
provider: "openai-codex",
id: "gpt-5.4",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
baseUrl: "https://chatgpt.com/backend-api/codex",
contextWindow: 1_050_000,
maxTokens: 128_000,
});