fix(openai-codex): normalize stale transport metadata in resolution and discovery (#67635)

Merged via squash.

Supersedes:
- #66969 by @saamuelng601-pixel
- #67159 by @hclsys

Co-authored-by: saamuelng601-pixel <274746699+saamuelng601-pixel@users.noreply.github.com>
Co-authored-by: hclsys <7755017+hclsys@users.noreply.github.com>
This commit is contained in:
Nimrod Gutman
2026-04-16 14:30:05 +03:00
committed by GitHub
parent f697b01747
commit 90801ba400
8 changed files with 245 additions and 20 deletions

View File

@@ -17,11 +17,15 @@ describe("openai base URL helpers", () => {
it("recognizes Codex ChatGPT backend routes", () => {
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api")).toBe(true);
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api/")).toBe(true);
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api/v1")).toBe(true);
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api/v1/")).toBe(true);
});
it("rejects non-Codex backend routes", () => {
expect(isOpenAICodexBaseUrl("https://api.openai.com/v1")).toBe(false);
expect(isOpenAICodexBaseUrl("https://chatgpt.com")).toBe(false);
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api/v2")).toBe(false);
expect(isOpenAICodexBaseUrl("https://chatgpt.com/backend-api/codex")).toBe(false);
expect(isOpenAICodexBaseUrl(undefined)).toBe(false);
});
});

View File

@@ -13,5 +13,5 @@ export function isOpenAICodexBaseUrl(baseUrl?: string): boolean {
if (!trimmed) {
return false;
}
return /^https?:\/\/chatgpt\.com\/backend-api\/?$/i.test(trimmed);
return /^https?:\/\/chatgpt\.com\/backend-api(?:\/v1)?\/?$/i.test(trimmed);
}

View File

@@ -375,4 +375,70 @@ describe("openai codex provider", () => {
name: "gpt-5.4",
});
});
it("defaults missing codex api metadata to openai-codex-responses", () => {
const provider = buildOpenAICodexProviderPlugin();
const model = provider.normalizeResolvedModel?.({
provider: "openai-codex",
model: {
id: "gpt-5.4",
name: "gpt-5.4",
provider: "openai-codex",
baseUrl: "https://chatgpt.com/backend-api",
reasoning: true,
input: ["text", "image"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_050_000,
contextTokens: 272_000,
maxTokens: 128_000,
},
} as never);
expect(model).toMatchObject({
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
});
});
it("normalizes stale /backend-api/v1 codex metadata to the canonical base url", () => {
const provider = buildOpenAICodexProviderPlugin();
const model = provider.normalizeResolvedModel?.({
provider: "openai-codex",
model: {
id: "gpt-5.4",
name: "gpt-5.4",
provider: "openai-codex",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api/v1",
reasoning: true,
input: ["text", "image"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_050_000,
contextTokens: 272_000,
maxTokens: 128_000,
},
} as never);
expect(model).toMatchObject({
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
});
});
it("normalizes transport metadata for stale /backend-api/v1 codex routes", () => {
const provider = buildOpenAICodexProviderPlugin();
expect(
provider.normalizeTransport?.({
provider: "openai-codex",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api/v1",
} as never),
).toEqual({
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
});
});
});

View File

@@ -94,6 +94,25 @@ const OPENAI_CODEX_MODERN_MODEL_IDS = [
OPENAI_CODEX_GPT_53_MODEL_ID,
OPENAI_CODEX_GPT_53_SPARK_MODEL_ID,
] as const;
function normalizeCodexTransportFields(params: {
api?: ProviderRuntimeModel["api"] | null;
baseUrl?: string;
}): {
api?: ProviderRuntimeModel["api"];
baseUrl?: string;
} {
const useCodexTransport =
!params.baseUrl || isOpenAIApiBaseUrl(params.baseUrl) || isOpenAICodexBaseUrl(params.baseUrl);
const api =
useCodexTransport && (!params.api || params.api === "openai-responses")
? "openai-codex-responses"
: (params.api ?? undefined);
const baseUrl =
api === "openai-codex-responses" && useCodexTransport ? OPENAI_CODEX_BASE_URL : params.baseUrl;
return { api, baseUrl };
}
function normalizeCodexTransport(model: ProviderRuntimeModel): ProviderRuntimeModel {
const lowerModelId = normalizeLowercaseStringOrEmpty(model.id);
const canonicalModelId =
@@ -102,14 +121,12 @@ function normalizeCodexTransport(model: ProviderRuntimeModel): ProviderRuntimeMo
normalizeLowercaseStringOrEmpty(model.name) === OPENAI_CODEX_GPT_54_LEGACY_MODEL_ID
? OPENAI_CODEX_GPT_54_MODEL_ID
: model.name;
const useCodexTransport =
!model.baseUrl || isOpenAIApiBaseUrl(model.baseUrl) || isOpenAICodexBaseUrl(model.baseUrl);
const api =
useCodexTransport && model.api === "openai-responses" ? "openai-codex-responses" : model.api;
const baseUrl =
api === "openai-codex-responses" && (!model.baseUrl || isOpenAIApiBaseUrl(model.baseUrl))
? OPENAI_CODEX_BASE_URL
: model.baseUrl;
const normalizedTransport = normalizeCodexTransportFields({
api: model.api,
baseUrl: model.baseUrl,
});
const api = normalizedTransport.api ?? model.api;
const baseUrl = normalizedTransport.baseUrl ?? model.baseUrl;
if (
api === model.api &&
baseUrl === model.baseUrl &&
@@ -335,6 +352,16 @@ export function buildOpenAICodexProviderPlugin(): ProviderPlugin {
}
return normalizeCodexTransport(ctx.model);
},
normalizeTransport: ({ provider, api, baseUrl }) => {
if (normalizeProviderId(provider) !== PROVIDER_ID) {
return undefined;
}
const normalized = normalizeCodexTransportFields({ api, baseUrl });
if (normalized.api === api && normalized.baseUrl === baseUrl) {
return undefined;
}
return normalized;
},
resolveUsageAuth: async (ctx) => await ctx.resolveOAuthToken(),
fetchUsageSnapshot: async (ctx) =>
await fetchCodexUsage(ctx.token, ctx.accountId, ctx.timeoutMs, ctx.fetchFn),

View File

@@ -3,6 +3,7 @@ import type { OpenRouterModelCapabilities } from "./openrouter-model-capabilitie
const OPENAI_BASE_URL = "https://api.openai.com/v1";
const OPENAI_CODEX_BASE_URL = "https://chatgpt.com/backend-api";
const OPENAI_CODEX_LEGACY_BASE_URL = "https://chatgpt.com/backend-api/v1";
const OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1";
const ANTHROPIC_BASE_URL = "https://api.anthropic.com";
const XAI_BASE_URL = "https://api.x.ai/v1";
@@ -64,20 +65,23 @@ function cloneTemplate(
} as ResolvedModelLike;
}
function isNativeOpenAICodexBaseUrl(baseUrl?: string): boolean {
return baseUrl === OPENAI_CODEX_BASE_URL || baseUrl === OPENAI_CODEX_LEGACY_BASE_URL;
}
function normalizeDynamicModel(params: { provider: string; model: ResolvedModelLike }) {
if (params.provider !== "openai-codex") {
return undefined;
}
const baseUrl = typeof params.model.baseUrl === "string" ? params.model.baseUrl : undefined;
const useCodexTransport =
!baseUrl || baseUrl === OPENAI_BASE_URL || isNativeOpenAICodexBaseUrl(baseUrl);
const nextApi =
params.model.api === "openai-responses" &&
(!baseUrl || baseUrl === OPENAI_BASE_URL || baseUrl === OPENAI_CODEX_BASE_URL)
useCodexTransport && (!params.model.api || params.model.api === "openai-responses")
? "openai-codex-responses"
: params.model.api;
const nextBaseUrl =
nextApi === "openai-codex-responses" && (!baseUrl || baseUrl === OPENAI_BASE_URL)
? OPENAI_CODEX_BASE_URL
: baseUrl;
nextApi === "openai-codex-responses" && useCodexTransport ? OPENAI_CODEX_BASE_URL : baseUrl;
if (nextApi !== params.model.api || nextBaseUrl !== baseUrl) {
return { ...params.model, api: nextApi, baseUrl: nextBaseUrl };
}
@@ -96,6 +100,14 @@ function normalizeTransport(params: {
params.context.api === "openai-completions" &&
(params.context.baseUrl === XAI_BASE_URL ||
(params.provider === "xai" && !params.context.baseUrl));
const isNativeOpenAICodexTransport =
params.provider === "openai-codex" &&
((!params.context.api &&
(!params.context.baseUrl || isNativeOpenAICodexBaseUrl(params.context.baseUrl))) ||
(params.context.api === "openai-responses" &&
(!params.context.baseUrl ||
params.context.baseUrl === OPENAI_BASE_URL ||
isNativeOpenAICodexBaseUrl(params.context.baseUrl))));
if (
params.context.api === "google-generative-ai" &&
params.context.baseUrl === "https://generativelanguage.googleapis.com"
@@ -117,6 +129,12 @@ function normalizeTransport(params: {
baseUrl: params.context.baseUrl,
};
}
if (isNativeOpenAICodexTransport) {
return {
api: "openai-codex-responses",
baseUrl: OPENAI_CODEX_BASE_URL,
};
}
return undefined;
}

View File

@@ -1069,6 +1069,50 @@ describe("resolveModel", () => {
});
});
it("normalizes stale discovered openai-codex /backend-api/v1 metadata", () => {
mockDiscoveredModel(discoverModels, {
provider: "openai-codex",
modelId: "gpt-5.4",
templateModel: {
...buildOpenAICodexForwardCompatExpectation("gpt-5.4"),
name: "GPT-5.4",
baseUrl: "https://chatgpt.com/backend-api/v1",
},
});
const result = resolveModelForTest("openai-codex", "gpt-5.4", "/tmp/agent");
expect(result.error).toBeUndefined();
expect(result.model).toMatchObject({
provider: "openai-codex",
id: "gpt-5.4",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
});
});
it("normalizes discovered openai-codex metadata when api is missing", () => {
mockDiscoveredModel(discoverModels, {
provider: "openai-codex",
modelId: "gpt-5.4",
templateModel: {
...buildOpenAICodexForwardCompatExpectation("gpt-5.4"),
name: "GPT-5.4",
api: undefined,
},
});
const result = resolveModelForTest("openai-codex", "gpt-5.4", "/tmp/agent");
expect(result.error).toBeUndefined();
expect(result.model).toMatchObject({
provider: "openai-codex",
id: "gpt-5.4",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
});
});
it("passes configured workspaceDir to runtime preference hooks", () => {
mockDiscoveredModel(discoverModels, {
provider: "openai-codex",

View File

@@ -5,6 +5,7 @@ import { describe, expect, it } from "vitest";
import { resolvePiCredentialMapFromStore } from "./pi-auth-credentials.js";
import {
addEnvBackedPiCredentials,
normalizeDiscoveredPiModel,
scrubLegacyStaticAuthJsonEntriesForDiscovery,
} from "./pi-model-discovery.js";
@@ -153,4 +154,57 @@ describe("discoverAuthStorage", () => {
}
}
});
it("normalizes stale discovered openai-codex rows when api metadata is missing", () => {
const normalized = normalizeDiscoveredPiModel(
{
id: "gpt-5.4",
name: "gpt-5.4",
provider: "openai-codex",
baseUrl: "https://chatgpt.com/backend-api",
reasoning: true,
input: ["text", "image"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_050_000,
contextTokens: 272_000,
maxTokens: 128_000,
},
"/tmp/agent",
) as {
api?: string;
baseUrl?: string;
};
expect(normalized).toMatchObject({
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
});
});
it("canonicalizes stale discovered openai-codex backend-api/v1 rows", () => {
const normalized = normalizeDiscoveredPiModel(
{
id: "gpt-5.4",
name: "gpt-5.4",
provider: "openai-codex",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api/v1",
reasoning: true,
input: ["text", "image"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_050_000,
contextTokens: 272_000,
maxTokens: 128_000,
},
"/tmp/agent",
) as {
api?: string;
baseUrl?: string;
};
expect(normalized).toMatchObject({
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
});
});
});

View File

@@ -29,6 +29,10 @@ type ProviderRuntimeModelLike = Model<Api> & {
contextTokens?: number;
};
type DiscoveredProviderRuntimeModelLike = Omit<ProviderRuntimeModelLike, "api"> & {
api?: string | null;
};
type InMemoryAuthStorageBackendLike = {
withLock<T>(
update: (current: string) => {
@@ -65,19 +69,18 @@ export function normalizeDiscoveredPiModel<T>(value: T, agentDir: string): T {
if (
typeof value.id !== "string" ||
typeof value.name !== "string" ||
typeof value.provider !== "string" ||
typeof value.api !== "string"
typeof value.provider !== "string"
) {
return value;
}
const model = value as unknown as ProviderRuntimeModelLike;
const model = value as unknown as DiscoveredProviderRuntimeModelLike;
const pluginNormalized =
normalizeProviderResolvedModelWithPlugin({
provider: model.provider,
context: {
provider: model.provider,
modelId: model.id,
model,
model: model as unknown as ProviderRuntimeModelLike,
agentDir,
},
}) ?? model;
@@ -87,7 +90,7 @@ export function normalizeDiscoveredPiModel<T>(value: T, agentDir: string): T {
context: {
provider: model.provider,
modelId: model.id,
model: pluginNormalized,
model: pluginNormalized as unknown as ProviderRuntimeModelLike,
agentDir,
},
}) ?? pluginNormalized;
@@ -97,10 +100,19 @@ export function normalizeDiscoveredPiModel<T>(value: T, agentDir: string): T {
context: {
provider: model.provider,
modelId: model.id,
model: compatNormalized,
model: compatNormalized as unknown as ProviderRuntimeModelLike,
agentDir,
},
}) ?? compatNormalized;
if (
!isRecord(transportNormalized) ||
typeof transportNormalized.id !== "string" ||
typeof transportNormalized.name !== "string" ||
typeof transportNormalized.provider !== "string" ||
typeof transportNormalized.api !== "string"
) {
return value;
}
return normalizeModelCompat(transportNormalized as Model<Api>) as T;
}