fix(openai-codex): normalize completions transport drift

This commit is contained in:
Peter Steinberger
2026-04-21 07:57:25 +01:00
parent dc6ecd571a
commit 97534372f8
3 changed files with 56 additions and 1 deletions

View File

@@ -20,6 +20,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Exec/YOLO: stop rejecting gateway-host exec in `security=full` plus `ask=off` mode via the Python/Node script preflight hardening path, so promptless YOLO exec once again runs direct interpreter stdin and heredoc forms such as `node <<'NODE' ... NODE`.
- OpenAI Codex: normalize legacy `openai-completions` transport overrides on default OpenAI/Codex hosts back to the native Codex Responses transport while leaving custom proxies untouched. (#45304) Thanks @dyss1992.
- Anthropic/plugins: scope Anthropic `api: "anthropic-messages"` defaulting to Anthropic-owned providers, so `openai-codex` and other providers without an explicit `api` no longer get rewritten to the wrong transport. Fixes #64534.
- fix(qqbot): add SSRF guard to direct-upload URL paths in uploadC2CMedia and uploadGroupMedia [AI-assisted]. (#69595) Thanks @pgondhi987.
- fix(gateway): enforce allowRequestSessionKey gate on template-rendered mapping sessionKeys. (#69381) Thanks @pgondhi987.

View File

@@ -495,6 +495,32 @@ describe("openai codex provider", () => {
});
});
it("normalizes legacy completions metadata to the codex transport", () => {
const provider = buildOpenAICodexProviderPlugin();
const model = provider.normalizeResolvedModel?.({
provider: "openai-codex",
model: {
id: "gpt-5.4",
name: "gpt-5.4",
provider: "openai-codex",
api: "openai-completions",
baseUrl: "https://api.openai.com/v1",
reasoning: true,
input: ["text", "image"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_050_000,
contextTokens: 272_000,
maxTokens: 128_000,
},
} as never);
expect(model).toMatchObject({
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api/codex",
});
});
it("normalizes transport metadata for stale /backend-api/v1 codex routes", () => {
const provider = buildOpenAICodexProviderPlugin();
@@ -509,4 +535,31 @@ describe("openai codex provider", () => {
baseUrl: "https://chatgpt.com/backend-api/codex",
});
});
it("normalizes transport metadata for legacy completions codex routes", () => {
const provider = buildOpenAICodexProviderPlugin();
expect(
provider.normalizeTransport?.({
provider: "openai-codex",
api: "openai-completions",
baseUrl: "https://api.openai.com/v1",
} as never),
).toEqual({
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api/codex",
});
});
it("leaves custom proxy completions transport metadata unchanged", () => {
const provider = buildOpenAICodexProviderPlugin();
expect(
provider.normalizeTransport?.({
provider: "openai-codex",
api: "openai-completions",
baseUrl: "https://proxy.example.com/v1",
} as never),
).toBeUndefined();
});
});

View File

@@ -106,7 +106,8 @@ function normalizeCodexTransportFields(params: {
const useCodexTransport =
!params.baseUrl || isOpenAIApiBaseUrl(params.baseUrl) || isOpenAICodexBaseUrl(params.baseUrl);
const api =
useCodexTransport && (!params.api || params.api === "openai-responses")
useCodexTransport &&
(!params.api || params.api === "openai-responses" || params.api === "openai-completions")
? "openai-codex-responses"
: (params.api ?? undefined);
const baseUrl =