diff --git a/src/agents/live-model-filter.ts b/src/agents/live-model-filter.ts index 8230485bb3a..97d22da9742 100644 --- a/src/agents/live-model-filter.ts +++ b/src/agents/live-model-filter.ts @@ -14,6 +14,7 @@ const CODEX_MODELS = [ "gpt-5.2", "gpt-5.2-codex", "gpt-5.3-codex", + "gpt-5.3-codex-spark", "gpt-5.1-codex", "gpt-5.1-codex-mini", "gpt-5.1-codex-max", diff --git a/src/agents/model-catalog.test.ts b/src/agents/model-catalog.test.ts index 3e90d8ee488..42ebee14917 100644 --- a/src/agents/model-catalog.test.ts +++ b/src/agents/model-catalog.test.ts @@ -84,4 +84,43 @@ describe("loadModelCatalog", () => { expect(result).toEqual([{ id: "gpt-4.1", name: "GPT-4.1", provider: "openai" }]); expect(warnSpy).toHaveBeenCalledTimes(1); }); + + it("adds openai-codex/gpt-5.3-codex-spark when base gpt-5.3-codex exists", async () => { + __setModelCatalogImportForTest( + async () => + ({ + AuthStorage: class {}, + ModelRegistry: class { + getAll() { + return [ + { + id: "gpt-5.3-codex", + provider: "openai-codex", + name: "GPT-5.3 Codex", + reasoning: true, + contextWindow: 200000, + input: ["text"], + }, + { + id: "gpt-5.2-codex", + provider: "openai-codex", + name: "GPT-5.2 Codex", + }, + ]; + } + }, + }) as unknown as PiSdkModule, + ); + + const result = await loadModelCatalog({ config: {} as OpenClawConfig }); + expect(result).toContainEqual( + expect.objectContaining({ + provider: "openai-codex", + id: "gpt-5.3-codex-spark", + }), + ); + const spark = result.find((entry) => entry.id === "gpt-5.3-codex-spark"); + expect(spark?.name).toBe("gpt-5.3-codex-spark"); + expect(spark?.reasoning).toBe(true); + }); }); diff --git a/src/agents/model-catalog.ts b/src/agents/model-catalog.ts index 3ae2a12045c..15862015564 100644 --- a/src/agents/model-catalog.ts +++ b/src/agents/model-catalog.ts @@ -27,6 +27,35 @@ let hasLoggedModelCatalogError = false; const defaultImportPiSdk = () => import("./pi-model-discovery.js"); let importPiSdk = defaultImportPiSdk; +const CODEX_PROVIDER = "openai-codex"; +const OPENAI_CODEX_GPT53_MODEL_ID = "gpt-5.3-codex"; +const OPENAI_CODEX_GPT53_SPARK_MODEL_ID = "gpt-5.3-codex-spark"; + +function applyOpenAICodexSparkFallback(models: ModelCatalogEntry[]): void { + const hasSpark = models.some( + (entry) => + entry.provider === CODEX_PROVIDER && + entry.id.toLowerCase() === OPENAI_CODEX_GPT53_SPARK_MODEL_ID, + ); + if (hasSpark) { + return; + } + + const baseModel = models.find( + (entry) => + entry.provider === CODEX_PROVIDER && entry.id.toLowerCase() === OPENAI_CODEX_GPT53_MODEL_ID, + ); + if (!baseModel) { + return; + } + + models.push({ + ...baseModel, + id: OPENAI_CODEX_GPT53_SPARK_MODEL_ID, + name: OPENAI_CODEX_GPT53_SPARK_MODEL_ID, + }); +} + export function resetModelCatalogCacheForTest() { modelCatalogPromise = null; hasLoggedModelCatalogError = false; @@ -94,6 +123,7 @@ export async function loadModelCatalog(params?: { const input = Array.isArray(entry?.input) ? entry.input : undefined; models.push({ id, name, provider, contextWindow, reasoning, input }); } + applyOpenAICodexSparkFallback(models); if (models.length === 0) { // If we found nothing, don't cache this result so we can try again. diff --git a/src/agents/pi-embedded-runner/model.test.ts b/src/agents/pi-embedded-runner/model.test.ts index 5f9ba96a69b..69c93ca8cfd 100644 --- a/src/agents/pi-embedded-runner/model.test.ts +++ b/src/agents/pi-embedded-runner/model.test.ts @@ -172,6 +172,43 @@ describe("resolveModel", () => { }); }); + it("builds an openai-codex fallback for gpt-5.3-codex-spark", () => { + const templateModel = { + id: "gpt-5.2-codex", + name: "GPT-5.2 Codex", + provider: "openai-codex", + api: "openai-codex-responses", + baseUrl: "https://chatgpt.com/backend-api", + reasoning: true, + input: ["text", "image"] as const, + cost: { input: 1.75, output: 14, cacheRead: 0.175, cacheWrite: 0 }, + contextWindow: 272000, + maxTokens: 128000, + }; + + vi.mocked(discoverModels).mockReturnValue({ + find: vi.fn((provider: string, modelId: string) => { + if (provider === "openai-codex" && modelId === "gpt-5.2-codex") { + return templateModel; + } + return null; + }), + } as unknown as ReturnType); + + const result = resolveModel("openai-codex", "gpt-5.3-codex-spark", "/tmp/agent"); + + expect(result.error).toBeUndefined(); + expect(result.model).toMatchObject({ + provider: "openai-codex", + id: "gpt-5.3-codex-spark", + api: "openai-codex-responses", + baseUrl: "https://chatgpt.com/backend-api", + reasoning: true, + contextWindow: 272000, + maxTokens: 128000, + }); + }); + it("builds an anthropic forward-compat fallback for claude-opus-4-6", () => { const templateModel = { id: "claude-opus-4-5", @@ -283,6 +320,12 @@ describe("resolveModel", () => { expect(result.error).toBe("Unknown model: openai-codex/gpt-4.1-mini"); }); + it("errors for unknown gpt-5.3-codex-* variants", () => { + const result = resolveModel("openai-codex", "gpt-5.3-codex-unknown", "/tmp/agent"); + expect(result.model).toBeUndefined(); + expect(result.error).toBe("Unknown model: openai-codex/gpt-5.3-codex-unknown"); + }); + it("uses codex fallback even when openai-codex provider is configured", () => { // This test verifies the ordering: codex fallback must fire BEFORE the generic providerCfg fallback. // If ordering is wrong, the generic fallback would use api: "openai-responses" (the default) diff --git a/src/agents/pi-embedded-runner/model.ts b/src/agents/pi-embedded-runner/model.ts index e4b3d5c950f..2d45d1116f2 100644 --- a/src/agents/pi-embedded-runner/model.ts +++ b/src/agents/pi-embedded-runner/model.ts @@ -20,6 +20,7 @@ type InlineProviderConfig = { }; const OPENAI_CODEX_GPT_53_MODEL_ID = "gpt-5.3-codex"; +const OPENAI_CODEX_GPT_53_SPARK_MODEL_ID = "gpt-5.3-codex-spark"; const OPENAI_CODEX_TEMPLATE_MODEL_IDS = ["gpt-5.2-codex"] as const; @@ -39,7 +40,11 @@ function resolveOpenAICodexGpt53FallbackModel( if (normalizedProvider !== "openai-codex") { return undefined; } - if (trimmedModelId.toLowerCase() !== OPENAI_CODEX_GPT_53_MODEL_ID) { + const loweredModelId = trimmedModelId.toLowerCase(); + if ( + loweredModelId !== OPENAI_CODEX_GPT_53_MODEL_ID && + loweredModelId !== OPENAI_CODEX_GPT_53_SPARK_MODEL_ID + ) { return undefined; } diff --git a/src/auto-reply/reply.directive.directive-behavior.accepts-thinking-xhigh-codex-models.e2e.test.ts b/src/auto-reply/reply.directive.directive-behavior.accepts-thinking-xhigh-codex-models.e2e.test.ts index 45d5d56bf18..f94ba609242 100644 --- a/src/auto-reply/reply.directive.directive-behavior.accepts-thinking-xhigh-codex-models.e2e.test.ts +++ b/src/auto-reply/reply.directive.directive-behavior.accepts-thinking-xhigh-codex-models.e2e.test.ts @@ -154,7 +154,7 @@ describe("directive behavior", () => { const texts = (Array.isArray(res) ? res : [res]).map((entry) => entry?.text).filter(Boolean); expect(texts).toContain( - 'Thinking level "xhigh" is only supported for openai/gpt-5.2, openai-codex/gpt-5.3-codex, openai-codex/gpt-5.2-codex, openai-codex/gpt-5.1-codex, github-copilot/gpt-5.2-codex or github-copilot/gpt-5.2.', + 'Thinking level "xhigh" is only supported for openai/gpt-5.2, openai-codex/gpt-5.3-codex, openai-codex/gpt-5.3-codex-spark, openai-codex/gpt-5.2-codex, openai-codex/gpt-5.1-codex, github-copilot/gpt-5.2-codex or github-copilot/gpt-5.2.', ); }); }); diff --git a/src/auto-reply/thinking.test.ts b/src/auto-reply/thinking.test.ts index f1c6b850440..dd0523fcc3f 100644 --- a/src/auto-reply/thinking.test.ts +++ b/src/auto-reply/thinking.test.ts @@ -44,6 +44,7 @@ describe("listThinkingLevels", () => { it("includes xhigh for codex models", () => { expect(listThinkingLevels(undefined, "gpt-5.2-codex")).toContain("xhigh"); expect(listThinkingLevels(undefined, "gpt-5.3-codex")).toContain("xhigh"); + expect(listThinkingLevels(undefined, "gpt-5.3-codex-spark")).toContain("xhigh"); }); it("includes xhigh for openai gpt-5.2", () => { diff --git a/src/auto-reply/thinking.ts b/src/auto-reply/thinking.ts index 02c12fa9a67..5b10374b6ac 100644 --- a/src/auto-reply/thinking.ts +++ b/src/auto-reply/thinking.ts @@ -24,6 +24,7 @@ export function isBinaryThinkingProvider(provider?: string | null): boolean { export const XHIGH_MODEL_REFS = [ "openai/gpt-5.2", "openai-codex/gpt-5.3-codex", + "openai-codex/gpt-5.3-codex-spark", "openai-codex/gpt-5.2-codex", "openai-codex/gpt-5.1-codex", "github-copilot/gpt-5.2-codex",