fix: prefer codex gpt-5.4 runtime metadata (#62694) (thanks @ruclaw7)

* Agents: prefer runtime codex gpt-5.4 metadata

* Agents: move codex gpt-5.4 override into provider hook

* fix: repair codex runtime preference hooks

* fix: use workspace dir for codex runtime preference

* test: cover codex workspace dir hook

* fix: prefer codex gpt-5.4 runtime metadata (#62694) (thanks @ruclaw7)

---------

Co-authored-by: Rudi Cilibrasi <cilibrar@gmail.com>
Co-authored-by: Rudi Cilibrasi <rudi@metagood.com>
Co-authored-by: Ayaan Zaidi <hi@obviy.us>
This commit is contained in:
ruclaw7
2026-04-07 19:46:23 -07:00
committed by GitHub
parent b8f12d99b2
commit 81969c7a91
7 changed files with 234 additions and 3 deletions

View File

@@ -487,6 +487,12 @@ export function createProviderRuntimeTestMock(options: ProviderRuntimeTestMockOp
},
)
: undefined,
shouldPreferProviderRuntimeResolvedModel: (params: {
provider: string;
context: { modelId: string };
}) =>
params.provider === "openai-codex" &&
params.context.modelId.trim().toLowerCase() === "gpt-5.4",
prepareProviderDynamicModel: async (params: {
provider: string;
context: { modelId: string };

View File

@@ -869,6 +869,133 @@ describe("resolveModel", () => {
});
});
it("prefers runtime-resolved openai-codex gpt-5.4 metadata when it has a larger context window", () => {
mockDiscoveredModel(discoverModels, {
provider: "openai-codex",
modelId: "gpt-5.4",
templateModel: {
...buildOpenAICodexForwardCompatExpectation("gpt-5.4"),
name: "GPT-5.4",
contextWindow: 128_000,
contextTokens: 32_000,
input: ["text"],
},
});
const result = resolveModelForTest("openai-codex", "gpt-5.4", "/tmp/agent");
expect(result.error).toBeUndefined();
expect(result.model).toMatchObject({
provider: "openai-codex",
id: "gpt-5.4",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
contextWindow: 1_050_000,
contextTokens: 272_000,
});
});
it("prefers runtime-resolved openai-codex gpt-5.4 metadata during async resolution too", async () => {
mockDiscoveredModel(discoverModels, {
provider: "openai-codex",
modelId: "gpt-5.4",
templateModel: {
...buildOpenAICodexForwardCompatExpectation("gpt-5.4"),
name: "GPT-5.4",
contextWindow: 128_000,
contextTokens: 32_000,
},
});
const result = await resolveModelAsyncForTest("openai-codex", "gpt-5.4", "/tmp/agent");
expect(result.error).toBeUndefined();
expect(result.model).toMatchObject({
provider: "openai-codex",
id: "gpt-5.4",
contextWindow: 1_050_000,
contextTokens: 272_000,
});
});
it("passes configured workspaceDir to runtime preference hooks", () => {
mockDiscoveredModel(discoverModels, {
provider: "openai-codex",
modelId: "gpt-5.4",
templateModel: {
...buildOpenAICodexForwardCompatExpectation("gpt-5.4"),
name: "GPT-5.4",
contextWindow: 128_000,
contextTokens: 32_000,
},
});
const shouldPreferRuntimeResolvedModel = vi.fn(
(params: { workspaceDir?: string; context: { agentDir?: string } }) =>
params.workspaceDir === "/tmp/workspace" && params.context.agentDir === "/tmp/agent-state",
);
const runtimeHooks = {
...createRuntimeHooks(),
shouldPreferProviderRuntimeResolvedModel: shouldPreferRuntimeResolvedModel,
};
const cfg = {
agents: {
defaults: {
workspace: "/tmp/workspace",
},
},
} as OpenClawConfig;
const result = resolveModel("openai-codex", "gpt-5.4", "/tmp/agent-state", cfg, {
authStorage: { mocked: true } as never,
modelRegistry: discoverModels({ mocked: true } as never, "/tmp/agent-state"),
runtimeHooks,
});
expect(shouldPreferRuntimeResolvedModel).toHaveBeenCalledWith(
expect.objectContaining({
provider: "openai-codex",
workspaceDir: "/tmp/workspace",
context: expect.objectContaining({
agentDir: "/tmp/agent-state",
workspaceDir: "/tmp/workspace",
}),
}),
);
expect(result.error).toBeUndefined();
expect(result.model).toMatchObject({
provider: "openai-codex",
id: "gpt-5.4",
contextWindow: 1_050_000,
contextTokens: 272_000,
});
});
it("keeps exact discovered metadata for other openai-codex models", () => {
mockDiscoveredModel(discoverModels, {
provider: "openai-codex",
modelId: "gpt-5.4-mini",
templateModel: {
...buildOpenAICodexForwardCompatExpectation("gpt-5.4-mini"),
name: "GPT-5.4 Mini",
contextWindow: 64_000,
input: ["text"],
},
});
const result = resolveModelForTest("openai-codex", "gpt-5.4-mini", "/tmp/agent");
expect(result.error).toBeUndefined();
expect(result.model).toMatchObject({
provider: "openai-codex",
id: "gpt-5.4-mini",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
contextWindow: 64_000,
input: ["text"],
});
});
it("rejects stale direct openai gpt-5.3-codex-spark discovery rows", () => {
mockDiscoveredModel(discoverModels, {
provider: "openai",

View File

@@ -10,6 +10,7 @@ import {
prepareProviderDynamicModel,
runProviderDynamicModel,
normalizeProviderResolvedModelWithPlugin,
shouldPreferProviderRuntimeResolvedModel,
} from "../../plugins/provider-runtime.js";
import type { ProviderRuntimeModel } from "../../plugins/types.js";
import { resolveOpenClawAgentDir } from "../agent-paths.js";
@@ -51,6 +52,9 @@ type ProviderRuntimeHooks = {
params: Parameters<typeof prepareProviderDynamicModel>[0],
) => Promise<void>;
runProviderDynamicModel: (params: Parameters<typeof runProviderDynamicModel>[0]) => unknown;
shouldPreferProviderRuntimeResolvedModel?: (
params: Parameters<typeof shouldPreferProviderRuntimeResolvedModel>[0],
) => boolean;
normalizeProviderResolvedModelWithPlugin: (
params: Parameters<typeof normalizeProviderResolvedModelWithPlugin>[0],
) => unknown;
@@ -66,6 +70,7 @@ const DEFAULT_PROVIDER_RUNTIME_HOOKS: ProviderRuntimeHooks = {
clearProviderRuntimeHookCache,
prepareProviderDynamicModel,
runProviderDynamicModel,
shouldPreferProviderRuntimeResolvedModel,
normalizeProviderResolvedModelWithPlugin,
normalizeProviderTransportWithPlugin,
};
@@ -526,6 +531,43 @@ function resolveConfiguredFallbackModel(params: {
});
}
function shouldCompareProviderRuntimeResolvedModel(params: {
provider: string;
modelId: string;
cfg?: OpenClawConfig;
agentDir?: string;
runtimeHooks: ProviderRuntimeHooks;
}): boolean {
const workspaceDir = params.cfg?.agents?.defaults?.workspace;
return (
params.runtimeHooks.shouldPreferProviderRuntimeResolvedModel?.({
provider: params.provider,
config: params.cfg,
workspaceDir,
context: {
provider: params.provider,
modelId: params.modelId,
config: params.cfg,
agentDir: params.agentDir,
workspaceDir,
},
}) ?? false
);
}
function preferProviderRuntimeResolvedModel(params: {
explicitModel: Model<Api>;
runtimeResolvedModel?: Model<Api>;
}): Model<Api> {
if (
params.runtimeResolvedModel &&
params.runtimeResolvedModel.contextWindow > params.explicitModel.contextWindow
) {
return params.runtimeResolvedModel;
}
return params.explicitModel;
}
export function resolveModelWithRegistry(params: {
provider: string;
modelId: string;
@@ -543,14 +585,29 @@ export function resolveModelWithRegistry(params: {
provider: normalizedRef.provider,
modelId: normalizedRef.model,
};
const runtimeHooks = params.runtimeHooks ?? DEFAULT_PROVIDER_RUNTIME_HOOKS;
const explicitModel = resolveExplicitModelWithRegistry(normalizedParams);
if (explicitModel?.kind === "suppressed") {
return undefined;
}
if (explicitModel?.kind === "resolved") {
return explicitModel.model;
if (
!shouldCompareProviderRuntimeResolvedModel({
provider: normalizedParams.provider,
modelId: normalizedParams.modelId,
cfg: normalizedParams.cfg,
agentDir: normalizedParams.agentDir,
runtimeHooks,
})
) {
return explicitModel.model;
}
const pluginDynamicModel = resolvePluginDynamicModelWithRegistry(normalizedParams);
return preferProviderRuntimeResolvedModel({
explicitModel: explicitModel.model,
runtimeResolvedModel: pluginDynamicModel,
});
}
const pluginDynamicModel = resolvePluginDynamicModelWithRegistry(normalizedParams);
if (pluginDynamicModel) {
return pluginDynamicModel;
@@ -683,7 +740,16 @@ export async function resolveModelAsync(
});
};
let model =
explicitModel?.kind === "resolved" ? explicitModel.model : await resolveDynamicAttempt();
explicitModel?.kind === "resolved" &&
!shouldCompareProviderRuntimeResolvedModel({
provider: normalizedRef.provider,
modelId: normalizedRef.model,
cfg,
agentDir: resolvedAgentDir,
runtimeHooks,
})
? explicitModel.model
: await resolveDynamicAttempt();
if (!model && !explicitModel && options?.retryTransientProviderRuntimeMiss) {
// Startup can race the first provider-runtime snapshot load on a fresh
// gateway boot. Retry once with a cleared hook cache before surfacing a

View File

@@ -36,6 +36,7 @@ import type {
ProviderModernModelPolicyContext,
ProviderPrepareExtraParamsContext,
ProviderPrepareDynamicModelContext,
ProviderPreferRuntimeResolvedModelContext,
ProviderResolveExternalAuthProfilesContext,
ProviderResolveExternalOAuthProfilesContext,
ProviderPrepareRuntimeAuthContext,
@@ -251,6 +252,18 @@ export async function prepareProviderDynamicModel(params: {
await resolveProviderRuntimePlugin(params)?.prepareDynamicModel?.(params.context);
}
export function shouldPreferProviderRuntimeResolvedModel(params: {
provider: string;
config?: OpenClawConfig;
workspaceDir?: string;
env?: NodeJS.ProcessEnv;
context: ProviderPreferRuntimeResolvedModelContext;
}): boolean {
return (
resolveProviderRuntimePlugin(params)?.preferRuntimeResolvedModel?.(params.context) ?? false
);
}
export function normalizeProviderResolvedModelWithPlugin(params: {
provider: string;
config?: OpenClawConfig;

View File

@@ -379,6 +379,14 @@ export type ProviderResolveDynamicModelContext = {
*/
export type ProviderPrepareDynamicModelContext = ProviderResolveDynamicModelContext;
export type ProviderPreferRuntimeResolvedModelContext = {
config?: OpenClawConfig;
agentDir?: string;
workspaceDir?: string;
provider: string;
modelId: string;
};
/**
* Last-chance rewrite hook for provider-owned transport normalization.
*
@@ -1138,6 +1146,12 @@ export type ProviderPlugin = {
* completes, `resolveDynamicModel` is called again.
*/
prepareDynamicModel?: (ctx: ProviderPrepareDynamicModelContext) => Promise<void>;
/**
* Lets a provider plugin opt exact configured models into a runtime
* metadata comparison pass before the embedded runner returns the explicit
* entry unchanged.
*/
preferRuntimeResolvedModel?: (ctx: ProviderPreferRuntimeResolvedModelContext) => boolean;
/**
* Provider-owned transport normalization.
*