feat(codex): add gpt-5.4-pro forward compat (#66453)

* feat(openai-codex): add gpt-5.4-pro forward-compat #63404

* feat(openai-codex): add gpt-5.4-pro forward-compat #63404

* openai-codex: use patch.cost when forward-compat falls back to normalizeModelCompat

* feat(codex): add gpt-5.4-pro forward compat

* fix(codex): reuse gpt-5.4 fallback for gpt-5.4-pro

---------

Co-authored-by: jepson-liu <jepsonliu@gmail.com>
This commit is contained in:
Vincent Koc
2026-04-14 11:05:24 +01:00
committed by GitHub
parent 8820a43818
commit 5a5ca6d62c
12 changed files with 235 additions and 13 deletions

View File

@@ -401,7 +401,8 @@ describe("isModernModelRef", () => {
provider === "openai" &&
["gpt-5.4", "gpt-5.4-pro", "gpt-5.4-mini", "gpt-5.4-nano"].includes(context.modelId)
? true
: provider === "openai-codex" && ["gpt-5.4", "gpt-5.4-mini"].includes(context.modelId)
: provider === "openai-codex" &&
["gpt-5.4", "gpt-5.4-pro", "gpt-5.4-mini"].includes(context.modelId)
? true
: provider === "opencode" && ["claude-opus-4-6", "gemini-3-pro"].includes(context.modelId)
? true
@@ -415,6 +416,7 @@ describe("isModernModelRef", () => {
expect(isModernModelRef({ provider: "openai", id: "gpt-5.4-mini" })).toBe(true);
expect(isModernModelRef({ provider: "openai", id: "gpt-5.4-nano" })).toBe(true);
expect(isModernModelRef({ provider: "openai-codex", id: "gpt-5.4" })).toBe(true);
expect(isModernModelRef({ provider: "openai-codex", id: "gpt-5.4-pro" })).toBe(true);
expect(isModernModelRef({ provider: "openai-codex", id: "gpt-5.4-mini" })).toBe(true);
expect(isModernModelRef({ provider: "opencode", id: "claude-opus-4-6" })).toBe(true);
expect(isModernModelRef({ provider: "opencode", id: "gemini-3-pro" })).toBe(true);

View File

@@ -184,17 +184,17 @@ function buildDynamicModel(
case "openai-codex": {
const isLegacyGpt54Alias = lower === "gpt-5.4-codex";
const template =
lower === "gpt-5.4" || isLegacyGpt54Alias
? findTemplate(params, "openai-codex", ["gpt-5.4", "gpt-5.4"])
lower === "gpt-5.4" || isLegacyGpt54Alias || lower === "gpt-5.4-pro"
? findTemplate(params, "openai-codex", ["gpt-5.4", "gpt-5.3-codex", "gpt-5.2-codex"])
: lower === "gpt-5.4-mini"
? findTemplate(params, "openai-codex", [
"gpt-5.4",
"gpt-5.1-codex-mini",
"gpt-5.3-codex",
"gpt-5.4",
"gpt-5.2-codex",
])
: lower === "gpt-5.3-codex-spark"
? findTemplate(params, "openai-codex", ["gpt-5.4", "gpt-5.4"])
? findTemplate(params, "openai-codex", ["gpt-5.4", "gpt-5.3-codex", "gpt-5.2-codex"])
: findTemplate(params, "openai-codex", ["gpt-5.4"]);
const fallback = {
provider: "openai-codex",
@@ -222,6 +222,22 @@ function buildDynamicModel(
fallback,
);
}
if (lower === "gpt-5.4-pro") {
return cloneTemplate(
template,
modelId,
{
provider: "openai-codex",
api: "openai-codex-responses",
baseUrl: OPENAI_CODEX_BASE_URL,
cost: { input: 30, output: 180, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_050_000,
contextTokens: 272_000,
maxTokens: 128_000,
},
fallback,
);
}
if (lower === "gpt-5.4-mini") {
return cloneTemplate(
template,

View File

@@ -39,6 +39,7 @@ export const DEFAULT_TEST_MODEL_CATALOG: Array<{
{ id: "gpt-5.4-mini", name: "GPT-5.4 Mini", provider: "openai" },
{ id: "gpt-5.4-nano", name: "GPT-5.4 Nano", provider: "openai" },
{ id: "gpt-5.4", name: "GPT-5.4 (Codex)", provider: "openai-codex" },
{ id: "gpt-5.4-pro", name: "GPT-5.4 Pro (Codex)", provider: "openai-codex" },
{ id: "gpt-5.4-mini", name: "GPT-5.4 Mini (Codex)", provider: "openai-codex" },
{ id: "gpt-4.1-mini", name: "GPT-4.1 Mini", provider: "openai" },
];
@@ -55,6 +56,7 @@ const OPENAI_XHIGH_MODEL_IDS = [
const OPENAI_CODEX_XHIGH_MODEL_IDS = [
"gpt-5.4",
"gpt-5.4-pro",
"gpt-5.4-mini",
"gpt-5.3-codex",
"gpt-5.3-codex-spark",

View File

@@ -85,7 +85,7 @@ describe("listThinkingLevels", () => {
providerRuntimeMocks.resolveProviderXHighThinking.mockImplementation(({ provider, context }) =>
(provider === "openai" && ["gpt-5.4", "gpt-5.4", "gpt-5.4-pro"].includes(context.modelId)) ||
(provider === "openai-codex" &&
["gpt-5.4", "gpt-5.4", "gpt-5.3-codex-spark"].includes(context.modelId)) ||
["gpt-5.4", "gpt-5.4-pro", "gpt-5.3-codex-spark"].includes(context.modelId)) ||
(provider === "github-copilot" && ["gpt-5.4", "gpt-5.4"].includes(context.modelId))
? true
: undefined,
@@ -94,6 +94,7 @@ describe("listThinkingLevels", () => {
expect(listThinkingLevels("openai-codex", "gpt-5.4")).toContain("xhigh");
expect(listThinkingLevels("openai-codex", "gpt-5.4")).toContain("xhigh");
expect(listThinkingLevels("openai-codex", "gpt-5.3-codex-spark")).toContain("xhigh");
expect(listThinkingLevels("openai-codex", "gpt-5.4-pro")).toContain("xhigh");
expect(listThinkingLevels("openai", "gpt-5.4")).toContain("xhigh");
expect(listThinkingLevels("openai", "gpt-5.4")).toContain("xhigh");
expect(listThinkingLevels("openai", "gpt-5.4-pro")).toContain("xhigh");

View File

@@ -19,6 +19,12 @@ const OPENAI_CODEX_MINI_MODEL = {
contextWindow: 272_000,
};
const OPENAI_CODEX_PRO_MODEL = {
...OPENAI_CODEX_MODEL,
id: "gpt-5.4-pro",
name: "GPT-5.4 Pro",
};
const OPENAI_CODEX_53_MODEL = {
...OPENAI_CODEX_MODEL,
id: "gpt-5.4",
@@ -234,6 +240,35 @@ describe("modelsListCommand forward-compat", () => {
expect(codexMini?.tags).not.toContain("missing");
});
it("does not mark configured codex gpt-5.4-pro as missing when forward-compat can build a fallback", async () => {
mocks.resolveConfiguredEntries.mockReturnValueOnce({
entries: [
{
key: "openai-codex/gpt-5.4-pro",
ref: { provider: "openai-codex", model: "gpt-5.4-pro" },
tags: new Set(["configured"]),
aliases: [],
},
],
});
mocks.resolveModelWithRegistry.mockReturnValueOnce({ ...OPENAI_CODEX_PRO_MODEL });
const runtime = createRuntime();
await modelsListCommand({ json: true }, runtime as never);
expect(mocks.printModelTable).toHaveBeenCalled();
const rows = lastPrintedRows<{
key: string;
tags: string[];
missing: boolean;
}>();
const codexPro = rows.find((row) => row.key === "openai-codex/gpt-5.4-pro");
expect(codexPro).toBeTruthy();
expect(codexPro?.missing).toBe(false);
expect(codexPro?.tags).not.toContain("missing");
});
it("passes source config to model registry loading for persistence safety", async () => {
const runtime = createRuntime();

View File

@@ -14,6 +14,7 @@ export const expectedAugmentedOpenaiCodexCatalogEntries = [
{ provider: "openai", id: "gpt-5.4-mini", name: "gpt-5.4-mini" },
{ provider: "openai", id: "gpt-5.4-nano", name: "gpt-5.4-nano" },
{ provider: "openai-codex", id: "gpt-5.4", name: "gpt-5.4" },
{ provider: "openai-codex", id: "gpt-5.4-pro", name: "gpt-5.4-pro" },
{ provider: "openai-codex", id: "gpt-5.4-mini", name: "gpt-5.4-mini" },
{
provider: "openai-codex",

View File

@@ -132,6 +132,7 @@ function createOpenAiCatalogProviderPlugin(
{ provider: "openai", id: "gpt-5.4-mini", name: "gpt-5.4-mini" },
{ provider: "openai", id: "gpt-5.4-nano", name: "gpt-5.4-nano" },
{ provider: "openai-codex", id: "gpt-5.4", name: "gpt-5.4" },
{ provider: "openai-codex", id: "gpt-5.4-pro", name: "gpt-5.4-pro" },
{ provider: "openai-codex", id: "gpt-5.4-mini", name: "gpt-5.4-mini" },
{
provider: "openai-codex",