test(openai): cover GPT-5.5 defaults

This commit is contained in:
Peter Steinberger
2026-04-23 20:00:51 +01:00
parent a36903b94c
commit cd5bc2fc93
65 changed files with 437 additions and 181 deletions

View File

@@ -99,8 +99,8 @@ const modelCatalogMocks = getSharedMocks("openclaw.trigger-handling.model-catalo
contextWindow: 200000,
},
{ provider: "openai", id: "gpt-4.1-mini", name: "GPT-4.1 mini" },
{ provider: "openai", id: "gpt-5.4", name: "GPT-5.2" },
{ provider: "openai-codex", id: "gpt-5.4", name: "GPT-5.2 (Codex)" },
{ provider: "openai", id: "gpt-5.5", name: "GPT-5.5" },
{ provider: "openai-codex", id: "gpt-5.5", name: "GPT-5.5 (Codex)" },
{ provider: "minimax", id: "MiniMax-M2.7", name: "MiniMax M2.7" },
]),
resetModelCatalogCacheForTest: vi.fn(),

View File

@@ -10,7 +10,7 @@ import {
import type { PluginRuntime } from "../../../src/plugins/runtime/types.js";
const DEFAULT_PROVIDER = "openai";
const DEFAULT_MODEL = "gpt-5.4";
const DEFAULT_MODEL = "gpt-5.5";
type DeepPartial<T> = {
[K in keyof T]?: T[K] extends (...args: never[]) => unknown

View File

@@ -119,12 +119,12 @@ function buildOpenAICodexOAuthResult(params: {
agents: {
defaults: {
models: {
"openai-codex/gpt-5.4": {},
"openai-codex/gpt-5.5": {},
},
},
},
},
defaultModel: "openai-codex/gpt-5.4",
defaultModel: "openai-codex/gpt-5.5",
notes: undefined,
};
}

View File

@@ -1,5 +1,6 @@
export {
expectAugmentedCodexCatalog,
expectedAugmentedOpenaiCodexCatalogEntriesWithGpt55,
expectCodexBuiltInSuppression,
expectCodexMissingAuthHint,
} from "../../../src/plugins/provider-runtime.test-support.js";

View File

@@ -445,6 +445,34 @@ export function describeOpenAIProviderRuntimeContract(load: ProviderRuntimeContr
});
});
it("owns openai gpt-5.5 forward-compat resolution", () => {
const provider = requireProviderContractProvider("openai");
const model = provider.resolveDynamicModel?.({
provider: "openai",
modelId: "gpt-5.5",
modelRegistry: {
find: (_provider: string, id: string) =>
id === "gpt-5.4"
? createModel({
id,
provider: "openai",
baseUrl: "https://api.openai.com/v1",
input: ["text", "image"],
})
: null,
} as never,
});
expect(model).toMatchObject({
id: "gpt-5.5",
provider: "openai",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
contextWindow: 1_000_000,
maxTokens: 128_000,
});
});
it("owns openai gpt-5.4 mini forward-compat resolution", () => {
const provider = requireProviderContractProvider("openai");
const model = provider.resolveDynamicModel?.({
@@ -542,6 +570,34 @@ export function describeOpenAIProviderRuntimeContract(load: ProviderRuntimeContr
});
});
it("owns forward-compat codex gpt-5.5 models", () => {
const provider = requireProviderContractProvider("openai-codex");
const model = provider.resolveDynamicModel?.({
provider: "openai-codex",
modelId: "gpt-5.5",
modelRegistry: {
find: (_provider: string, id: string) =>
id === "gpt-5.4"
? createModel({
id,
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: "https://chatgpt.com/backend-api",
})
: null,
} as never,
});
expect(model).toMatchObject({
id: "gpt-5.5",
provider: "openai-codex",
api: "openai-codex-responses",
contextWindow: 1_000_000,
contextTokens: 272_000,
maxTokens: 128_000,
});
});
it("owns forward-compat codex mini models", () => {
const provider = requireProviderContractProvider("openai-codex");
const model = provider.resolveDynamicModel?.({