perf: slim vllm discovery contract test

This commit is contained in:
Peter Steinberger
2026-04-23 23:39:42 +01:00
parent c2cf3c49d3
commit 707e13f966

View File

@@ -1,7 +1,90 @@
import { fileURLToPath } from "node:url";
import { describeVllmProviderDiscoveryContract } from "../../test/helpers/plugins/provider-discovery-contract.js";
import type { OpenClawPluginApi } from "openclaw/plugin-sdk/plugin-entry";
import { beforeEach, describe, expect, it, vi } from "vitest";
describeVllmProviderDiscoveryContract({
load: () => import("./index.js"),
apiModuleId: fileURLToPath(new URL("./api.js", import.meta.url)),
const buildVllmProviderMock = vi.hoisted(() => vi.fn());
vi.mock("./api.js", () => ({
VLLM_DEFAULT_API_KEY_ENV_VAR: "VLLM_API_KEY",
VLLM_DEFAULT_BASE_URL: "http://127.0.0.1:8000/v1",
VLLM_MODEL_PLACEHOLDER: "meta-llama/Meta-Llama-3-8B-Instruct",
VLLM_PROVIDER_LABEL: "vLLM",
buildVllmProvider: (...args: unknown[]) => buildVllmProviderMock(...args),
}));
type ProviderDiscoveryRun = (ctx: {
config: Record<string, unknown>;
env: NodeJS.ProcessEnv;
resolveProviderApiKey: () => {
apiKey: string | undefined;
discoveryApiKey?: string;
};
resolveProviderAuth: () => {
apiKey: string | undefined;
discoveryApiKey?: string;
mode: "api_key" | "oauth" | "token" | "none";
source: "env" | "profile" | "none";
};
}) => Promise<unknown>;
type RegisteredVllmProvider = {
id: string;
discovery?: {
order?: string;
run: ProviderDiscoveryRun;
};
};
describe("vllm provider discovery contract", () => {
beforeEach(() => {
buildVllmProviderMock.mockReset();
});
it("keeps self-hosted discovery provider-owned", async () => {
const { default: plugin } = await import("./index.js");
let provider: RegisteredVllmProvider | undefined;
plugin.register({
registerProvider: (registeredProvider) => {
provider = registeredProvider as RegisteredVllmProvider;
},
} as OpenClawPluginApi);
expect(provider?.id).toBe("vllm");
expect(provider?.discovery?.order).toBe("late");
const discovery = provider?.discovery;
expect(discovery).toBeDefined();
buildVllmProviderMock.mockResolvedValueOnce({
baseUrl: "http://127.0.0.1:8000/v1",
api: "openai-completions",
models: [{ id: "meta-llama/Meta-Llama-3-8B-Instruct", name: "Meta Llama 3" }],
});
await expect(
discovery!.run({
config: {},
env: {
VLLM_API_KEY: "env-vllm-key",
} as NodeJS.ProcessEnv,
resolveProviderApiKey: () => ({
apiKey: "VLLM_API_KEY",
discoveryApiKey: "env-vllm-key",
}),
resolveProviderAuth: () => ({
apiKey: "VLLM_API_KEY",
discoveryApiKey: "env-vllm-key",
mode: "api_key",
source: "env",
}),
}),
).resolves.toEqual({
provider: {
baseUrl: "http://127.0.0.1:8000/v1",
api: "openai-completions",
apiKey: "VLLM_API_KEY",
models: [{ id: "meta-llama/Meta-Llama-3-8B-Instruct", name: "Meta Llama 3" }],
},
});
expect(buildVllmProviderMock).toHaveBeenCalledWith({
apiKey: "env-vllm-key",
});
});
});