fix(openai): suppress stale Codex OAuth models

This commit is contained in:
Peter Steinberger
2026-05-06 09:37:59 +01:00
parent 85ded4d444
commit 5cf55ed3f1
9 changed files with 226 additions and 32 deletions

View File

@@ -110,6 +110,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- OpenAI/Codex: suppress stale `openai-codex` GPT-5.1/5.2/5.3 model refs that ChatGPT/Codex OAuth accounts now reject, keeping model lists, config validation, and forward-compat resolution on current 5.4/5.5 routes. Fixes #67158. Thanks @drpau.
- Google Meet/Voice Call: wait longer before playing PIN-derived Twilio DTMF for Meet dial-in prompts and retire stale delegated phone sessions instead of reusing completed calls.
- PDF/Codex: include extraction-fallback instructions for `openai-codex/*` PDF tool requests so Codex Responses receives its required system prompt. Fixes #77872. Thanks @anyech.
- Onboard/channels: recover externalized channel plugins from stale `channels.<id>` config by falling back to `ensureChannelSetupPluginInstalled` via the trusted catalog when the plugin is missing on disk, so leftover `appId`/token entries no longer dead-end onboard with "<channel> plugin not available." (#78328) Thanks @sliverp.

View File

@@ -150,6 +150,7 @@ Anthropic staff told us OpenClaw-style Claude CLI usage is allowed again, so Ope
- Policy note: OpenAI Codex OAuth is explicitly supported for external tools/workflows like OpenClaw.
- For the common subscription plus native Codex runtime route, sign in with `openai-codex` auth but configure `openai/gpt-5.5` plus `agents.defaults.agentRuntime.id: "codex"`.
- Use `openai-codex/gpt-5.5` only when you want the Codex OAuth/subscription route through PI; use `openai/gpt-5.5` without the Codex runtime override when your API-key setup and local catalog expose the public API route.
- Older `openai-codex/gpt-5.1*`, `openai-codex/gpt-5.2*`, and `openai-codex/gpt-5.3*` refs are suppressed because ChatGPT/Codex OAuth accounts reject them; use `openai-codex/gpt-5.5` or the native Codex runtime route instead.
```json5
{

View File

@@ -218,6 +218,14 @@ Choose your preferred auth method and follow the setup steps.
| `openai-codex/gpt-5.4-mini` | omitted / `runtime: "pi"` | ChatGPT/Codex OAuth through PI | Codex sign-in |
| `openai-codex/gpt-5.5` | `runtime: "auto"` | Still PI unless a plugin explicitly claims `openai-codex` | Codex sign-in |
<Warning>
Do not configure older `openai-codex/gpt-5.1*`, `openai-codex/gpt-5.2*`, or
`openai-codex/gpt-5.3*` model refs. ChatGPT/Codex OAuth accounts now reject
those models. Use `openai-codex/gpt-5.5` for the PI OAuth route, or
`openai/gpt-5.5` with `agentRuntime.id: "codex"` for native Codex runtime
execution.
</Warning>
<Note>
Keep using the `openai-codex` provider id for auth/profile commands. The
`openai-codex/*` model prefix is also the explicit PI route for Codex OAuth.

View File

@@ -98,17 +98,12 @@ const OPENAI_CODEX_GPT_55_PRO_TEMPLATE_MODEL_IDS = [
OPENAI_CODEX_GPT_54_PRO_MODEL_ID,
...OPENAI_CODEX_GPT_54_TEMPLATE_MODEL_IDS,
] as const;
const OPENAI_CODEX_GPT_53_MODEL_ID = "gpt-5.3-codex";
const OPENAI_CODEX_TEMPLATE_MODEL_IDS = ["gpt-5.2-codex"] as const;
const OPENAI_CODEX_MODERN_MODEL_IDS = [
OPENAI_CODEX_GPT_55_MODEL_ID,
OPENAI_CODEX_GPT_55_PRO_MODEL_ID,
OPENAI_CODEX_GPT_54_MODEL_ID,
OPENAI_CODEX_GPT_54_PRO_MODEL_ID,
OPENAI_CODEX_GPT_54_MINI_MODEL_ID,
"gpt-5.2",
"gpt-5.2-codex",
OPENAI_CODEX_GPT_53_MODEL_ID,
] as const;
function isLegacyCodexCompatBaseUrl(baseUrl?: string): boolean {
@@ -237,8 +232,6 @@ function resolveCodexForwardCompatModel(ctx: ProviderResolveDynamicModelContext)
maxTokens: OPENAI_CODEX_GPT_54_MAX_TOKENS,
cost: OPENAI_CODEX_GPT_54_MINI_COST,
};
} else if (lower === OPENAI_CODEX_GPT_53_MODEL_ID) {
templateIds = OPENAI_CODEX_TEMPLATE_MODEL_IDS;
} else {
return undefined;
}

View File

@@ -703,6 +703,56 @@
"provider": "openai-codex",
"model": "gpt-5.3-codex-spark",
"reason": "gpt-5.3-codex-spark is no longer exposed by the OpenAI or Codex catalogs. Use openai/gpt-5.5."
},
{
"provider": "openai-codex",
"model": "gpt-5.1",
"reason": "gpt-5.1 is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id=\"codex\" for the native Codex runtime."
},
{
"provider": "openai-codex",
"model": "gpt-5.1-codex",
"reason": "gpt-5.1-codex is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id=\"codex\" for the native Codex runtime."
},
{
"provider": "openai-codex",
"model": "gpt-5.1-codex-mini",
"reason": "gpt-5.1-codex-mini is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id=\"codex\" for the native Codex runtime."
},
{
"provider": "openai-codex",
"model": "gpt-5.1-codex-max",
"reason": "gpt-5.1-codex-max is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id=\"codex\" for the native Codex runtime."
},
{
"provider": "openai-codex",
"model": "gpt-5.2",
"reason": "gpt-5.2 is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id=\"codex\" for the native Codex runtime."
},
{
"provider": "openai-codex",
"model": "gpt-5.2-codex",
"reason": "gpt-5.2-codex is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id=\"codex\" for the native Codex runtime."
},
{
"provider": "openai-codex",
"model": "gpt-5.2-pro",
"reason": "gpt-5.2-pro is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id=\"codex\" for the native Codex runtime."
},
{
"provider": "openai-codex",
"model": "gpt-5.3",
"reason": "gpt-5.3 is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id=\"codex\" for the native Codex runtime."
},
{
"provider": "openai-codex",
"model": "gpt-5.3-codex",
"reason": "gpt-5.3-codex is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id=\"codex\" for the native Codex runtime."
},
{
"provider": "openai-codex",
"model": "gpt-5.3-chat-latest",
"reason": "gpt-5.3-chat-latest is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id=\"codex\" for the native Codex runtime."
}
]
},

View File

@@ -18,15 +18,7 @@ const OPENAI_XHIGH_MODEL_IDS = [
"gpt-5.2",
] as const;
const OPENAI_CODEX_XHIGH_MODEL_IDS = [
"gpt-5.5",
"gpt-5.5-pro",
"gpt-5.4",
"gpt-5.4-pro",
"gpt-5.3-codex",
"gpt-5.2-codex",
"gpt-5.1-codex",
] as const;
const OPENAI_CODEX_XHIGH_MODEL_IDS = ["gpt-5.5", "gpt-5.5-pro", "gpt-5.4", "gpt-5.4-pro"] as const;
function normalizeModelId(value: string): string {
return value.trim().toLowerCase();

View File

@@ -19,17 +19,41 @@ let readFileMock: ReturnType<typeof vi.fn>;
vi.mock("./model-suppression.runtime.js", () => ({
shouldSuppressBuiltInModel: (params: { provider?: string; id?: string }) =>
(params.provider === "openai" ||
params.provider === "azure-openai-responses" ||
params.provider === "openai-codex") &&
params.id === "gpt-5.3-codex-spark",
isSuppressedModel(params.provider, params.id),
buildShouldSuppressBuiltInModel: () => (params: { provider?: string; id?: string }) =>
(params.provider === "openai" ||
params.provider === "azure-openai-responses" ||
params.provider === "openai-codex") &&
params.id === "gpt-5.3-codex-spark",
isSuppressedModel(params.provider, params.id),
}));
function isSuppressedModel(provider?: string, id?: string): boolean {
const modelId = id?.trim().toLowerCase();
if (!modelId) {
return false;
}
if (
(provider === "openai" ||
provider === "azure-openai-responses" ||
provider === "openai-codex") &&
modelId === "gpt-5.3-codex-spark"
) {
return true;
}
return (
provider === "openai-codex" &&
[
"gpt-5.1",
"gpt-5.1-codex",
"gpt-5.1-codex-mini",
"gpt-5.1-codex-max",
"gpt-5.2",
"gpt-5.2-codex",
"gpt-5.2-pro",
"gpt-5.3",
"gpt-5.3-codex",
"gpt-5.3-chat-latest",
].includes(modelId)
);
}
function mockCatalogImportFailThenRecover() {
let call = 0;
__setModelCatalogImportForTest(async () => {
@@ -524,6 +548,57 @@ describe("loadModelCatalog", () => {
);
});
it("filters stale openai-codex 5.1/5.2/5.3 built-ins from the catalog", async () => {
mockPiDiscoveryModels([
{
id: "gpt-5.1-codex-mini",
provider: "openai-codex",
name: "GPT-5.1 Codex Mini",
reasoning: true,
contextWindow: 400000,
input: ["text", "image"],
},
{
id: "gpt-5.2-codex",
provider: "openai-codex",
name: "GPT-5.2 Codex",
reasoning: true,
contextWindow: 400000,
input: ["text", "image"],
},
{
id: "gpt-5.3-codex",
provider: "openai-codex",
name: "GPT-5.3 Codex",
reasoning: true,
contextWindow: 400000,
input: ["text", "image"],
},
{
id: "gpt-5.5",
provider: "openai-codex",
name: "GPT-5.5",
reasoning: true,
contextWindow: 400000,
input: ["text", "image"],
},
]);
const result = await loadModelCatalog({ config: {} as OpenClawConfig });
expect(result).not.toContainEqual(
expect.objectContaining({ provider: "openai-codex", id: "gpt-5.1-codex-mini" }),
);
expect(result).not.toContainEqual(
expect.objectContaining({ provider: "openai-codex", id: "gpt-5.2-codex" }),
);
expect(result).not.toContainEqual(
expect.objectContaining({ provider: "openai-codex", id: "gpt-5.3-codex" }),
);
expect(result).toContainEqual(
expect.objectContaining({ provider: "openai-codex", id: "gpt-5.5" }),
);
});
it("does not synthesize gpt-5.4 OpenAI forward-compat entries from template models", async () => {
mockPiDiscoveryModels([
{

View File

@@ -40,6 +40,25 @@ vi.mock("../model-suppression.js", () => {
return undefined;
}
const staleOpenAICodexModelIds = new Set([
"gpt-5.1",
"gpt-5.1-codex",
"gpt-5.1-codex-mini",
"gpt-5.1-codex-max",
"gpt-5.2",
"gpt-5.2-codex",
"gpt-5.2-pro",
"gpt-5.3",
"gpt-5.3-codex",
"gpt-5.3-chat-latest",
]);
function isStaleOpenAICodexModel(provider?: string, id?: string): boolean {
return (
provider === "openai-codex" && staleOpenAICodexModelIds.has(id?.trim().toLowerCase() ?? "")
);
}
return {
shouldSuppressBuiltInModel: ({
provider,
@@ -52,6 +71,9 @@ vi.mock("../model-suppression.js", () => {
baseUrl?: string;
config?: unknown;
}) => {
if (isStaleOpenAICodexModel(provider, id)) {
return true;
}
if (
(provider === "openai" ||
provider === "azure-openai-responses" ||
@@ -67,6 +89,9 @@ vi.mock("../model-suppression.js", () => {
);
},
shouldUnconditionallySuppress: ({ provider, id }: { provider?: string; id?: string }) => {
if (isStaleOpenAICodexModel(provider, id)) {
return true;
}
if (
(provider === "openai" ||
provider === "azure-openai-responses" ||
@@ -93,6 +118,10 @@ vi.mock("../model-suppression.js", () => {
) {
return "Unknown model: qwen/qwen3.6-plus. qwen3.6-plus is not supported on the Qwen Coding Plan endpoint; use a Standard pay-as-you-go Qwen endpoint or choose qwen/qwen3.5-plus.";
}
if (isStaleOpenAICodexModel(provider, id)) {
const modelId = id?.trim().toLowerCase() ?? "";
return `Unknown model: openai-codex/${modelId}. ${modelId} is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id="codex" for the native Codex runtime.`;
}
if (
(provider === "openai" ||
provider === "azure-openai-responses" ||
@@ -1455,7 +1484,7 @@ describe("resolveModel", () => {
});
});
it("does not downgrade exact openai-codex gpt-5.3-codex registry metadata", () => {
it("rejects stale exact openai-codex gpt-5.3-codex registry metadata", () => {
vi.mocked(discoverModels).mockReturnValue({
find: vi.fn((provider: string, modelId: string) => {
if (provider !== "openai-codex") {
@@ -1475,13 +1504,10 @@ describe("resolveModel", () => {
const result = resolveModelForTest("openai-codex", "gpt-5.3-codex", "/tmp/agent");
expect(result.error).toBeUndefined();
expect(result.model).toMatchObject({
provider: "openai-codex",
id: "gpt-5.3-codex",
contextWindow: 272000,
maxTokens: 128000,
});
expect(result.model).toBeUndefined();
expect(result.error).toBe(
'Unknown model: openai-codex/gpt-5.3-codex. gpt-5.3-codex is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id="codex" for the native Codex runtime.',
);
});
it("canonicalizes the legacy openai-codex gpt-5.4-codex alias at runtime", () => {

View File

@@ -2,6 +2,9 @@ import { describe, expect, it } from "vitest";
import type { PluginManifestRegistry } from "../plugins/manifest-registry.js";
import { validateConfigObjectWithPlugins } from "./validation.js";
const staleOpenAICodexReason =
'is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id="codex" for the native Codex runtime.';
function createModelSuppressionRegistry(): PluginManifestRegistry {
return {
diagnostics: [],
@@ -26,6 +29,16 @@ function createModelSuppressionRegistry(): PluginManifestRegistry {
reason:
"gpt-5.3-codex-spark is no longer exposed by the OpenAI or Codex catalogs. Use openai/gpt-5.5.",
},
{
provider: "openai-codex",
model: "gpt-5.2-codex",
reason: `gpt-5.2-codex ${staleOpenAICodexReason}`,
},
{
provider: "openai-codex",
model: "gpt-5.3-codex",
reason: `gpt-5.3-codex ${staleOpenAICodexReason}`,
},
],
},
},
@@ -83,4 +96,39 @@ describe("config model reference validation", () => {
expect(res.ok).toBe(true);
});
it("rejects stale openai-codex fallback model pairs", () => {
const res = validateConfigObjectWithPlugins(
{
agents: {
defaults: {
model: {
primary: "openai-codex/gpt-5.4-mini",
fallbacks: ["openai-codex/gpt-5.2-codex", "openai-codex/gpt-5.3-codex"],
},
},
},
},
{
pluginMetadataSnapshot: {
manifestRegistry: createModelSuppressionRegistry(),
},
},
);
expect(res.ok).toBe(false);
if (res.ok) {
return;
}
expect(res.issues).toContainEqual({
path: "agents.defaults.model.fallbacks.0",
message:
'Unknown model: openai-codex/gpt-5.2-codex. gpt-5.2-codex is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id="codex" for the native Codex runtime.',
});
expect(res.issues).toContainEqual({
path: "agents.defaults.model.fallbacks.1",
message:
'Unknown model: openai-codex/gpt-5.3-codex. gpt-5.3-codex is no longer supported for ChatGPT/Codex OAuth accounts. Use openai-codex/gpt-5.5 for PI OAuth, or openai/gpt-5.5 with agentRuntime.id="codex" for the native Codex runtime.',
});
});
});