fix(openai-codex): normalize legacy copilot transport

This commit is contained in:
Peter Steinberger
2026-04-21 08:02:27 +01:00
parent 85d86ebc4b
commit c197b3fef4
3 changed files with 51 additions and 2 deletions

View File

@@ -20,7 +20,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Exec/YOLO: stop rejecting gateway-host exec in `security=full` plus `ask=off` mode via the Python/Node script preflight hardening path, so promptless YOLO exec once again runs direct interpreter stdin and heredoc forms such as `node <<'NODE' ... NODE`.
- OpenAI Codex: normalize legacy `openai-completions` transport overrides on default OpenAI/Codex hosts back to the native Codex Responses transport while leaving custom proxies untouched. (#45304) Thanks @dyss1992.
- OpenAI Codex: normalize legacy `openai-completions` transport overrides on default OpenAI/Codex and GitHub Copilot-compatible hosts back to the native Codex Responses transport while leaving custom proxies untouched. (#45304, #42194) Thanks @dyss1992 and @DeadlySilent.
- Anthropic/plugins: scope Anthropic `api: "anthropic-messages"` defaulting to Anthropic-owned providers, so `openai-codex` and other providers without an explicit `api` no longer get rewritten to the wrong transport. Fixes #64534.
- fix(qqbot): add SSRF guard to direct-upload URL paths in uploadC2CMedia and uploadGroupMedia [AI-assisted]. (#69595) Thanks @pgondhi987.
- fix(gateway): enforce allowRequestSessionKey gate on template-rendered mapping sessionKeys. (#69381) Thanks @pgondhi987.

View File

@@ -521,6 +521,32 @@ describe("openai codex provider", () => {
});
});
it("normalizes legacy GitHub Copilot Codex metadata to the codex transport", () => {
const provider = buildOpenAICodexProviderPlugin();
const model = provider.normalizeResolvedModel?.({
provider: "openai-codex",
model: {
id: "gpt-5.4",
name: "gpt-5.4",
provider: "openai-codex",
api: "openai-completions",
baseUrl: "https://api.githubcopilot.com",
reasoning: true,
input: ["text", "image"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_050_000,
contextTokens: 272_000,
maxTokens: 128_000,
},
} as never);
expect(model).toMatchObject({
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api/codex",
});
});
it("normalizes transport metadata for stale /backend-api/v1 codex routes", () => {
const provider = buildOpenAICodexProviderPlugin();
@@ -551,6 +577,21 @@ describe("openai codex provider", () => {
});
});
it("normalizes transport metadata for legacy GitHub Copilot Codex routes", () => {
const provider = buildOpenAICodexProviderPlugin();
expect(
provider.normalizeTransport?.({
provider: "openai-codex",
api: "openai-completions",
baseUrl: "https://api.githubcopilot.com/v1",
} as never),
).toEqual({
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api/codex",
});
});
it("leaves custom proxy completions transport metadata unchanged", () => {
const provider = buildOpenAICodexProviderPlugin();

View File

@@ -96,6 +96,11 @@ const OPENAI_CODEX_MODERN_MODEL_IDS = [
OPENAI_CODEX_GPT_53_SPARK_MODEL_ID,
] as const;
function isLegacyCodexCompatBaseUrl(baseUrl?: string): boolean {
const trimmed = baseUrl?.trim();
return !!trimmed && /^https?:\/\/api\.githubcopilot\.com(?:\/v1)?\/?$/iu.test(trimmed);
}
function normalizeCodexTransportFields(params: {
api?: ProviderRuntimeModel["api"] | null;
baseUrl?: string;
@@ -104,7 +109,10 @@ function normalizeCodexTransportFields(params: {
baseUrl?: string;
} {
const useCodexTransport =
!params.baseUrl || isOpenAIApiBaseUrl(params.baseUrl) || isOpenAICodexBaseUrl(params.baseUrl);
!params.baseUrl ||
isOpenAIApiBaseUrl(params.baseUrl) ||
isOpenAICodexBaseUrl(params.baseUrl) ||
isLegacyCodexCompatBaseUrl(params.baseUrl);
const api =
useCodexTransport &&
(!params.api || params.api === "openai-responses" || params.api === "openai-completions")