Files
openclaw/src/agents/pi-embedded-runner/model.e2e.test.ts
2026-02-14 22:06:04 +00:00

64 lines
1.8 KiB
TypeScript

import { beforeEach, describe, expect, it, vi } from "vitest";
vi.mock("../pi-model-discovery.js", () => ({
discoverAuthStorage: vi.fn(() => ({ mocked: true })),
discoverModels: vi.fn(() => ({ find: vi.fn(() => null) })),
}));
import { buildInlineProviderModels, resolveModel } from "./model.js";
import {
makeModel,
mockDiscoveredModel,
OPENAI_CODEX_TEMPLATE_MODEL,
resetMockDiscoverModels,
} from "./model.test-harness.js";
beforeEach(() => {
resetMockDiscoverModels();
});
describe("pi embedded model e2e smoke", () => {
it("attaches provider ids and provider-level baseUrl for inline models", () => {
const providers = {
custom: {
baseUrl: "http://localhost:8000",
models: [makeModel("custom-model")],
},
};
const result = buildInlineProviderModels(providers);
expect(result).toEqual([
{
...makeModel("custom-model"),
provider: "custom",
baseUrl: "http://localhost:8000",
api: undefined,
},
]);
});
it("builds an openai-codex forward-compat fallback for gpt-5.3-codex", () => {
mockDiscoveredModel({
provider: "openai-codex",
modelId: "gpt-5.2-codex",
templateModel: OPENAI_CODEX_TEMPLATE_MODEL,
});
const result = resolveModel("openai-codex", "gpt-5.3-codex", "/tmp/agent");
expect(result.error).toBeUndefined();
expect(result.model).toMatchObject({
provider: "openai-codex",
id: "gpt-5.3-codex",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
reasoning: true,
});
});
it("keeps unknown-model errors for non-forward-compat IDs", () => {
const result = resolveModel("openai-codex", "gpt-4.1-mini", "/tmp/agent");
expect(result.model).toBeUndefined();
expect(result.error).toBe("Unknown model: openai-codex/gpt-4.1-mini");
});
});