diff --git a/docs/concepts/model-providers.md b/docs/concepts/model-providers.md
index 13e8e6715c2..050107892c1 100644
--- a/docs/concepts/model-providers.md
+++ b/docs/concepts/model-providers.md
@@ -68,7 +68,7 @@ OpenClaw ships with the pi‑ai catalog. These providers require **no**
generic OpenAI-compatible proxies
- Native OpenAI routes also keep Responses `store`, prompt-cache hints, and
OpenAI reasoning-compat payload shaping; proxy routes do not
-- `openai/gpt-5.3-codex-spark` is intentionally suppressed in OpenClaw because the live OpenAI API rejects it; Spark is treated as Codex-only
+- `openai/gpt-5.3-codex-spark` is intentionally suppressed in OpenClaw because live OpenAI API requests reject it and the current Codex catalog does not expose it
```json5
{
@@ -107,7 +107,6 @@ OpenClaw ships with the pi‑ai catalog. These providers require **no**
`User-Agent`) are only attached on native Codex traffic to
`chatgpt.com/backend-api`, not generic OpenAI-compatible proxies
- Shares the same `/fast` toggle and `params.fastMode` config as direct `openai/*`; OpenClaw maps that to `service_tier=priority`
-- `openai/gpt-5.3-codex-spark` remains available through Codex OAuth when the catalog exposes it; entitlement-dependent
- `openai/gpt-5.5` keeps native `contextWindow = 1000000` and a default runtime `contextTokens = 272000`; override the runtime cap with `models.providers.openai-codex.models[].contextTokens`
- Policy note: OpenAI Codex OAuth is explicitly supported for external tools/workflows like OpenClaw.
diff --git a/docs/providers/openai.md b/docs/providers/openai.md
index 41d8c428211..f01790f7a83 100644
--- a/docs/providers/openai.md
+++ b/docs/providers/openai.md
@@ -80,7 +80,7 @@ Choose your preferred auth method and follow the setup steps.
```
- OpenClaw does **not** expose `openai/gpt-5.3-codex-spark` on the direct API path. Live OpenAI API requests reject that model. Spark is Codex-only.
+ OpenClaw does **not** expose `openai/gpt-5.3-codex-spark`. Live OpenAI API requests reject that model, and the current Codex catalog does not expose it either.
@@ -123,7 +123,6 @@ Choose your preferred auth method and follow the setup steps.
| Model ref | Route | Auth |
|-----------|-------|------|
| `openai/gpt-5.5` | ChatGPT/Codex OAuth | Codex sign-in |
- | `openai/gpt-5.3-codex-spark` | ChatGPT/Codex OAuth | Codex sign-in (entitlement-dependent) |
`openai-codex/*` and `codex/*` model refs are legacy compatibility aliases. Keep using the `openai-codex` provider id for auth/profile commands.
diff --git a/extensions/openai/openai-codex-provider.ts b/extensions/openai/openai-codex-provider.ts
index 84c31e7fb25..dff190c30c4 100644
--- a/extensions/openai/openai-codex-provider.ts
+++ b/extensions/openai/openai-codex-provider.ts
@@ -103,9 +103,6 @@ const OPENAI_CODEX_GPT_54_MINI_TEMPLATE_MODEL_IDS = [
...OPENAI_CODEX_GPT_54_TEMPLATE_MODEL_IDS,
] as const;
const OPENAI_CODEX_GPT_53_MODEL_ID = "gpt-5.3-codex";
-const OPENAI_CODEX_GPT_53_SPARK_MODEL_ID = "gpt-5.3-codex-spark";
-const OPENAI_CODEX_GPT_53_SPARK_CONTEXT_TOKENS = 128_000;
-const OPENAI_CODEX_GPT_53_SPARK_MAX_TOKENS = 128_000;
const OPENAI_CODEX_TEMPLATE_MODEL_IDS = ["gpt-5.2-codex"] as const;
const OPENAI_CODEX_XHIGH_MODEL_IDS = [
OPENAI_CODEX_GPT_55_MODEL_ID,
@@ -114,7 +111,6 @@ const OPENAI_CODEX_XHIGH_MODEL_IDS = [
OPENAI_CODEX_GPT_54_PRO_MODEL_ID,
OPENAI_CODEX_GPT_54_MINI_MODEL_ID,
OPENAI_CODEX_GPT_53_MODEL_ID,
- OPENAI_CODEX_GPT_53_SPARK_MODEL_ID,
"gpt-5.2-codex",
"gpt-5.1-codex",
] as const;
@@ -127,7 +123,6 @@ const OPENAI_CODEX_MODERN_MODEL_IDS = [
"gpt-5.2",
"gpt-5.2-codex",
OPENAI_CODEX_GPT_53_MODEL_ID,
- OPENAI_CODEX_GPT_53_SPARK_MODEL_ID,
] as const;
function isLegacyCodexCompatBaseUrl(baseUrl?: string): boolean {
@@ -236,18 +231,6 @@ function resolveCodexForwardCompatModel(ctx: ProviderResolveDynamicModelContext)
maxTokens: OPENAI_CODEX_GPT_54_MAX_TOKENS,
cost: OPENAI_CODEX_GPT_54_MINI_COST,
};
- } else if (lower === OPENAI_CODEX_GPT_53_SPARK_MODEL_ID) {
- templateIds = [OPENAI_CODEX_GPT_53_MODEL_ID, ...OPENAI_CODEX_TEMPLATE_MODEL_IDS];
- patch = {
- api: "openai-codex-responses",
- provider: PROVIDER_ID,
- baseUrl: OPENAI_CODEX_BASE_URL,
- reasoning: true,
- input: ["text"],
- cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
- contextWindow: OPENAI_CODEX_GPT_53_SPARK_CONTEXT_TOKENS,
- maxTokens: OPENAI_CODEX_GPT_53_SPARK_MAX_TOKENS,
- };
} else if (lower === OPENAI_CODEX_GPT_53_MODEL_ID) {
templateIds = OPENAI_CODEX_TEMPLATE_MODEL_IDS;
} else {
@@ -485,6 +468,15 @@ export function buildOpenAICodexProviderPlugin(): ProviderPlugin {
],
}),
isModernModelRef: ({ modelId }) => matchesExactOrPrefix(modelId, OPENAI_CODEX_MODERN_MODEL_IDS),
+ suppressBuiltInModel: ({ provider, modelId }) =>
+ normalizeProviderId(provider) === PROVIDER_ID &&
+ normalizeLowercaseStringOrEmpty(modelId) === "gpt-5.3-codex-spark"
+ ? {
+ suppress: true,
+ errorMessage:
+ "gpt-5.3-codex-spark is no longer exposed by the OpenAI or Codex catalogs. Use openai/gpt-5.5.",
+ }
+ : undefined,
preferRuntimeResolvedModel: (ctx) => {
if (normalizeProviderId(ctx.provider) !== PROVIDER_ID) {
return false;
@@ -530,11 +522,6 @@ export function buildOpenAICodexProviderPlugin(): ProviderPlugin {
providerId: PROVIDER_ID,
templateIds: OPENAI_CODEX_GPT_54_MINI_TEMPLATE_MODEL_IDS,
});
- const sparkTemplate = findCatalogTemplate({
- entries: ctx.entries,
- providerId: PROVIDER_ID,
- templateIds: [OPENAI_CODEX_GPT_53_MODEL_ID, ...OPENAI_CODEX_TEMPLATE_MODEL_IDS],
- });
return [
buildOpenAISyntheticCatalogEntry(gpt55Template, {
id: OPENAI_CODEX_GPT_55_MODEL_ID,
@@ -575,12 +562,6 @@ export function buildOpenAICodexProviderPlugin(): ProviderPlugin {
contextWindow: OPENAI_CODEX_GPT_54_MINI_CONTEXT_TOKENS,
cost: OPENAI_CODEX_GPT_54_MINI_COST,
}),
- buildOpenAISyntheticCatalogEntry(sparkTemplate, {
- id: OPENAI_CODEX_GPT_53_SPARK_MODEL_ID,
- reasoning: true,
- input: ["text"],
- contextWindow: OPENAI_CODEX_GPT_53_SPARK_CONTEXT_TOKENS,
- }),
].filter((entry): entry is NonNullable => entry !== undefined);
},
};
diff --git a/extensions/openai/openai-provider.ts b/extensions/openai/openai-provider.ts
index 0eb75ee6dbc..773c3241423 100644
--- a/extensions/openai/openai-provider.ts
+++ b/extensions/openai/openai-provider.ts
@@ -284,7 +284,7 @@ export function buildOpenAIProvider(): ProviderPlugin {
}
return {
suppress: true,
- errorMessage: `Unknown model: ${ctx.provider}/${OPENAI_DIRECT_SPARK_MODEL_ID}. ${OPENAI_DIRECT_SPARK_MODEL_ID} is only supported through Codex OAuth. Use openai/${OPENAI_DIRECT_SPARK_MODEL_ID} with the Codex OAuth profile.`,
+ errorMessage: `Unknown model: ${ctx.provider}/${OPENAI_DIRECT_SPARK_MODEL_ID}. ${OPENAI_DIRECT_SPARK_MODEL_ID} is no longer exposed by the OpenAI or Codex catalogs. Use openai/gpt-5.5.`,
};
},
augmentModelCatalog: (ctx) => {
diff --git a/src/agents/model-catalog.test.ts b/src/agents/model-catalog.test.ts
index 38031b97352..8585741995a 100644
--- a/src/agents/model-catalog.test.ts
+++ b/src/agents/model-catalog.test.ts
@@ -12,7 +12,9 @@ let augmentCatalogMock: ReturnType;
vi.mock("./model-suppression.runtime.js", () => ({
shouldSuppressBuiltInModel: (params: { provider?: string; id?: string }) =>
- (params.provider === "openai" || params.provider === "azure-openai-responses") &&
+ (params.provider === "openai" ||
+ params.provider === "azure-openai-responses" ||
+ params.provider === "openai-codex") &&
params.id === "gpt-5.3-codex-spark",
}));
@@ -177,7 +179,7 @@ describe("loadModelCatalog", () => {
);
});
- it("filters stale openai gpt-5.3-codex-spark built-ins from the catalog", async () => {
+ it("filters stale gpt-5.3-codex-spark built-ins from the catalog", async () => {
mockPiDiscoveryModels([
{
id: "gpt-5.3-codex-spark",
@@ -218,7 +220,7 @@ describe("loadModelCatalog", () => {
id: "gpt-5.3-codex-spark",
}),
);
- expect(result).toContainEqual(
+ expect(result).not.toContainEqual(
expect.objectContaining({
provider: "openai-codex",
id: "gpt-5.3-codex-spark",
diff --git a/src/agents/pi-embedded-runner/model.forward-compat.errors-and-overrides.test.ts b/src/agents/pi-embedded-runner/model.forward-compat.errors-and-overrides.test.ts
index ed564cb7e85..ff708c3a0b1 100644
--- a/src/agents/pi-embedded-runner/model.forward-compat.errors-and-overrides.test.ts
+++ b/src/agents/pi-embedded-runner/model.forward-compat.errors-and-overrides.test.ts
@@ -22,16 +22,20 @@ vi.mock("../../plugins/provider-runtime.js", async () => {
vi.mock("../model-suppression.js", () => ({
shouldSuppressBuiltInModel: ({ provider, id }: { provider?: string; id?: string }) =>
- (provider === "openai" || provider === "azure-openai-responses") &&
+ (provider === "openai" ||
+ provider === "azure-openai-responses" ||
+ provider === "openai-codex") &&
id?.trim().toLowerCase() === "gpt-5.3-codex-spark",
buildSuppressedBuiltInModelError: ({ provider, id }: { provider?: string; id?: string }) => {
if (
- (provider !== "openai" && provider !== "azure-openai-responses") ||
+ (provider !== "openai" &&
+ provider !== "azure-openai-responses" &&
+ provider !== "openai-codex") ||
id?.trim().toLowerCase() !== "gpt-5.3-codex-spark"
) {
return undefined;
}
- return `Unknown model: ${provider}/gpt-5.3-codex-spark. gpt-5.3-codex-spark is only supported through Codex OAuth. Use openai/gpt-5.3-codex-spark with the Codex OAuth profile.`;
+ return `Unknown model: ${provider}/gpt-5.3-codex-spark. gpt-5.3-codex-spark is no longer exposed by the OpenAI or Codex catalogs. Use openai/gpt-5.5.`;
},
}));
@@ -140,7 +144,7 @@ describe("resolveModel forward-compat errors and overrides", () => {
expect(result.model).toBeUndefined();
expect(result.error).toBe(
- "Unknown model: openai/gpt-5.3-codex-spark. gpt-5.3-codex-spark is only supported through Codex OAuth. Use openai/gpt-5.3-codex-spark with the Codex OAuth profile.",
+ "Unknown model: openai/gpt-5.3-codex-spark. gpt-5.3-codex-spark is no longer exposed by the OpenAI or Codex catalogs. Use openai/gpt-5.5.",
);
});
@@ -161,7 +165,7 @@ describe("resolveModel forward-compat errors and overrides", () => {
expect(result.model).toBeUndefined();
expect(result.error).toBe(
- "Unknown model: openai/gpt-5.3-codex-spark. gpt-5.3-codex-spark is only supported through Codex OAuth. Use openai/gpt-5.3-codex-spark with the Codex OAuth profile.",
+ "Unknown model: openai/gpt-5.3-codex-spark. gpt-5.3-codex-spark is no longer exposed by the OpenAI or Codex catalogs. Use openai/gpt-5.5.",
);
});
@@ -174,7 +178,7 @@ describe("resolveModel forward-compat errors and overrides", () => {
expect(result.model).toBeUndefined();
expect(result.error).toBe(
- "Unknown model: azure-openai-responses/gpt-5.3-codex-spark. gpt-5.3-codex-spark is only supported through Codex OAuth. Use openai/gpt-5.3-codex-spark with the Codex OAuth profile.",
+ "Unknown model: azure-openai-responses/gpt-5.3-codex-spark. gpt-5.3-codex-spark is no longer exposed by the OpenAI or Codex catalogs. Use openai/gpt-5.5.",
);
});
diff --git a/src/agents/pi-embedded-runner/model.test.ts b/src/agents/pi-embedded-runner/model.test.ts
index 2a3f18a4f30..3ab89178e82 100644
--- a/src/agents/pi-embedded-runner/model.test.ts
+++ b/src/agents/pi-embedded-runner/model.test.ts
@@ -4,16 +4,20 @@ import { createProviderRuntimeTestMock } from "./model.provider-runtime.test-sup
vi.mock("../model-suppression.js", () => ({
shouldSuppressBuiltInModel: ({ provider, id }: { provider?: string; id?: string }) =>
- (provider === "openai" || provider === "azure-openai-responses") &&
+ (provider === "openai" ||
+ provider === "azure-openai-responses" ||
+ provider === "openai-codex") &&
id?.trim().toLowerCase() === "gpt-5.3-codex-spark",
buildSuppressedBuiltInModelError: ({ provider, id }: { provider?: string; id?: string }) => {
if (
- (provider !== "openai" && provider !== "azure-openai-responses") ||
+ (provider !== "openai" &&
+ provider !== "azure-openai-responses" &&
+ provider !== "openai-codex") ||
id?.trim().toLowerCase() !== "gpt-5.3-codex-spark"
) {
return undefined;
}
- return `Unknown model: ${provider}/gpt-5.3-codex-spark. gpt-5.3-codex-spark is only supported through Codex OAuth. Use openai/gpt-5.3-codex-spark with the Codex OAuth profile.`;
+ return `Unknown model: ${provider}/gpt-5.3-codex-spark. gpt-5.3-codex-spark is no longer exposed by the OpenAI or Codex catalogs. Use openai/gpt-5.5.`;
},
}));
@@ -987,18 +991,18 @@ describe("resolveModel", () => {
expect(result.model).toMatchObject(buildOpenAICodexForwardCompatExpectation("gpt-5.4-mini"));
});
- it("builds an openai-codex fallback for gpt-5.3-codex-spark", () => {
+ it("does not build an openai-codex fallback for removed gpt-5.3-codex-spark", () => {
mockOpenAICodexTemplateModel(discoverModels);
const result = resolveModelForTest("openai-codex", "gpt-5.3-codex-spark", "/tmp/agent");
- expect(result.error).toBeUndefined();
- expect(result.model).toMatchObject(
- buildOpenAICodexForwardCompatExpectation("gpt-5.3-codex-spark"),
+ expect(result.model).toBeUndefined();
+ expect(result.error).toBe(
+ "Unknown model: openai-codex/gpt-5.3-codex-spark. gpt-5.3-codex-spark is no longer exposed by the OpenAI or Codex catalogs. Use openai/gpt-5.5.",
);
});
- it("keeps openai-codex gpt-5.3-codex-spark when discovery provides it", () => {
+ it("rejects stale openai-codex gpt-5.3-codex-spark discovery rows", () => {
mockDiscoveredModel(discoverModels, {
provider: "openai-codex",
modelId: "gpt-5.3-codex-spark",
@@ -1011,13 +1015,10 @@ describe("resolveModel", () => {
const result = resolveModelForTest("openai-codex", "gpt-5.3-codex-spark", "/tmp/agent");
- expect(result.error).toBeUndefined();
- expect(result.model).toMatchObject({
- provider: "openai-codex",
- id: "gpt-5.3-codex-spark",
- api: "openai-codex-responses",
- baseUrl: "https://chatgpt.com/backend-api",
- });
+ expect(result.model).toBeUndefined();
+ expect(result.error).toBe(
+ "Unknown model: openai-codex/gpt-5.3-codex-spark. gpt-5.3-codex-spark is no longer exposed by the OpenAI or Codex catalogs. Use openai/gpt-5.5.",
+ );
});
it("prefers runtime-resolved openai-codex gpt-5.4 metadata when it has a larger context window", () => {
@@ -1260,7 +1261,7 @@ describe("resolveModel", () => {
expect(result.model).toBeUndefined();
expect(result.error).toBe(
- "Unknown model: openai/gpt-5.3-codex-spark. gpt-5.3-codex-spark is only supported through Codex OAuth. Use openai/gpt-5.3-codex-spark with the Codex OAuth profile.",
+ "Unknown model: openai/gpt-5.3-codex-spark. gpt-5.3-codex-spark is no longer exposed by the OpenAI or Codex catalogs. Use openai/gpt-5.5.",
);
});
diff --git a/src/commands/models.list.e2e.test.ts b/src/commands/models.list.e2e.test.ts
index fa1e43b71fd..71bb10fab8c 100644
--- a/src/commands/models.list.e2e.test.ts
+++ b/src/commands/models.list.e2e.test.ts
@@ -180,14 +180,6 @@ describe("models list/status", () => {
baseUrl: "https://api.openai.com/v1",
contextWindow: 128000,
};
- const OPENAI_CODEX_SPARK_MODEL = {
- provider: "openai-codex",
- id: "gpt-5.3-codex-spark",
- name: "GPT-5.3 Codex Spark",
- input: ["text"],
- baseUrl: "https://chatgpt.com/backend-api",
- contextWindow: 128000,
- };
const MOONSHOT_MODEL = {
provider: "moonshot",
id: "kimi-k2.6",
@@ -448,37 +440,31 @@ describe("models list/status", () => {
expect(ensureOpenClawModelsJson).not.toHaveBeenCalled();
});
- it("filters stale direct OpenAI spark rows from models list and registry views", async () => {
+ it("filters stale spark rows from models list and registry views", async () => {
shouldSuppressBuiltInModel.mockImplementation(
({ provider, id }: { provider?: string | null; id?: string | null }) =>
id === "gpt-5.3-codex-spark" &&
- (provider === "openai" || provider === "azure-openai-responses"),
+ (provider === "openai" ||
+ provider === "azure-openai-responses" ||
+ provider === "openai-codex"),
);
- setDefaultModel("openai-codex/gpt-5.3-codex-spark");
- modelRegistryState.models = [
- OPENAI_SPARK_MODEL,
- AZURE_OPENAI_SPARK_MODEL,
- OPENAI_CODEX_SPARK_MODEL,
- ];
- modelRegistryState.available = [
- OPENAI_SPARK_MODEL,
- AZURE_OPENAI_SPARK_MODEL,
- OPENAI_CODEX_SPARK_MODEL,
- ];
+ setDefaultModel("openai/gpt-5.5");
+ modelRegistryState.models = [OPENAI_MODEL, OPENAI_SPARK_MODEL, AZURE_OPENAI_SPARK_MODEL];
+ modelRegistryState.available = [OPENAI_MODEL, OPENAI_SPARK_MODEL, AZURE_OPENAI_SPARK_MODEL];
const runtime = makeRuntime();
await modelsListCommand({ all: true, json: true }, runtime);
const payload = parseJsonLog(runtime);
expect(payload.models.map((model: { key: string }) => model.key)).toEqual([
- "openai-codex/gpt-5.3-codex-spark",
+ "openai/gpt-4.1-mini",
]);
const loaded = await loadModelRegistry({} as never);
expect(loaded.models.map((model) => `${model.provider}/${model.id}`)).toEqual([
- "openai-codex/gpt-5.3-codex-spark",
+ "openai/gpt-4.1-mini",
]);
- expect(Array.from(loaded.availableKeys ?? [])).toEqual(["openai-codex/gpt-5.3-codex-spark"]);
+ expect(Array.from(loaded.availableKeys ?? [])).toEqual(["openai/gpt-4.1-mini"]);
});
it("modelsListCommand persists using the source snapshot config when provided", async () => {
diff --git a/src/plugins/provider-runtime.test-support.ts b/src/plugins/provider-runtime.test-support.ts
index 3fd7485557f..184add6d262 100644
--- a/src/plugins/provider-runtime.test-support.ts
+++ b/src/plugins/provider-runtime.test-support.ts
@@ -16,11 +16,6 @@ export const expectedAugmentedOpenaiCodexCatalogEntries = [
{ provider: "openai-codex", id: "gpt-5.4", name: "gpt-5.4" },
{ provider: "openai-codex", id: "gpt-5.4-pro", name: "gpt-5.4-pro" },
{ provider: "openai-codex", id: "gpt-5.4-mini", name: "gpt-5.4-mini" },
- {
- provider: "openai-codex",
- id: "gpt-5.3-codex-spark",
- name: "gpt-5.3-codex-spark",
- },
];
export const expectedAugmentedOpenaiCodexCatalogEntriesWithGpt55 = [
@@ -77,9 +72,7 @@ export function expectCodexBuiltInSuppression(
}),
).toMatchObject({
suppress: true,
- errorMessage: expect.stringContaining(
- "openai/gpt-5.3-codex-spark with the Codex OAuth profile",
- ),
+ errorMessage: expect.stringContaining("gpt-5.3-codex-spark"),
});
}
diff --git a/src/plugins/provider-runtime.test.ts b/src/plugins/provider-runtime.test.ts
index da9ffcc75ae..8430d2221c5 100644
--- a/src/plugins/provider-runtime.test.ts
+++ b/src/plugins/provider-runtime.test.ts
@@ -136,7 +136,8 @@ function createOpenAiCatalogProviderPlugin(
modelId === "gpt-5.3-codex-spark"
? {
suppress: true,
- errorMessage: "openai/gpt-5.3-codex-spark with the Codex OAuth profile",
+ errorMessage:
+ "gpt-5.3-codex-spark is no longer exposed by the OpenAI or Codex catalogs. Use openai/gpt-5.5.",
}
: undefined,
augmentModelCatalog: () => [
@@ -147,11 +148,6 @@ function createOpenAiCatalogProviderPlugin(
{ provider: "openai-codex", id: "gpt-5.4", name: "gpt-5.4" },
{ provider: "openai-codex", id: "gpt-5.4-pro", name: "gpt-5.4-pro" },
{ provider: "openai-codex", id: "gpt-5.4-mini", name: "gpt-5.4-mini" },
- {
- provider: "openai-codex",
- id: "gpt-5.3-codex-spark",
- name: "gpt-5.3-codex-spark",
- },
],
...overrides,
};