From 707e13f9663200ae1f72ea220e3516c70a8ad552 Mon Sep 17 00:00:00 2001 From: Peter Steinberger Date: Thu, 23 Apr 2026 23:39:42 +0100 Subject: [PATCH] perf: slim vllm discovery contract test --- .../vllm/provider-discovery.contract.test.ts | 93 ++++++++++++++++++- 1 file changed, 88 insertions(+), 5 deletions(-) diff --git a/extensions/vllm/provider-discovery.contract.test.ts b/extensions/vllm/provider-discovery.contract.test.ts index c62404f7893..74990e08baf 100644 --- a/extensions/vllm/provider-discovery.contract.test.ts +++ b/extensions/vllm/provider-discovery.contract.test.ts @@ -1,7 +1,90 @@ -import { fileURLToPath } from "node:url"; -import { describeVllmProviderDiscoveryContract } from "../../test/helpers/plugins/provider-discovery-contract.js"; +import type { OpenClawPluginApi } from "openclaw/plugin-sdk/plugin-entry"; +import { beforeEach, describe, expect, it, vi } from "vitest"; -describeVllmProviderDiscoveryContract({ - load: () => import("./index.js"), - apiModuleId: fileURLToPath(new URL("./api.js", import.meta.url)), +const buildVllmProviderMock = vi.hoisted(() => vi.fn()); + +vi.mock("./api.js", () => ({ + VLLM_DEFAULT_API_KEY_ENV_VAR: "VLLM_API_KEY", + VLLM_DEFAULT_BASE_URL: "http://127.0.0.1:8000/v1", + VLLM_MODEL_PLACEHOLDER: "meta-llama/Meta-Llama-3-8B-Instruct", + VLLM_PROVIDER_LABEL: "vLLM", + buildVllmProvider: (...args: unknown[]) => buildVllmProviderMock(...args), +})); + +type ProviderDiscoveryRun = (ctx: { + config: Record; + env: NodeJS.ProcessEnv; + resolveProviderApiKey: () => { + apiKey: string | undefined; + discoveryApiKey?: string; + }; + resolveProviderAuth: () => { + apiKey: string | undefined; + discoveryApiKey?: string; + mode: "api_key" | "oauth" | "token" | "none"; + source: "env" | "profile" | "none"; + }; +}) => Promise; + +type RegisteredVllmProvider = { + id: string; + discovery?: { + order?: string; + run: ProviderDiscoveryRun; + }; +}; + +describe("vllm provider discovery contract", () => { + beforeEach(() => { + buildVllmProviderMock.mockReset(); + }); + + it("keeps self-hosted discovery provider-owned", async () => { + const { default: plugin } = await import("./index.js"); + let provider: RegisteredVllmProvider | undefined; + plugin.register({ + registerProvider: (registeredProvider) => { + provider = registeredProvider as RegisteredVllmProvider; + }, + } as OpenClawPluginApi); + expect(provider?.id).toBe("vllm"); + expect(provider?.discovery?.order).toBe("late"); + const discovery = provider?.discovery; + expect(discovery).toBeDefined(); + + buildVllmProviderMock.mockResolvedValueOnce({ + baseUrl: "http://127.0.0.1:8000/v1", + api: "openai-completions", + models: [{ id: "meta-llama/Meta-Llama-3-8B-Instruct", name: "Meta Llama 3" }], + }); + + await expect( + discovery!.run({ + config: {}, + env: { + VLLM_API_KEY: "env-vllm-key", + } as NodeJS.ProcessEnv, + resolveProviderApiKey: () => ({ + apiKey: "VLLM_API_KEY", + discoveryApiKey: "env-vllm-key", + }), + resolveProviderAuth: () => ({ + apiKey: "VLLM_API_KEY", + discoveryApiKey: "env-vllm-key", + mode: "api_key", + source: "env", + }), + }), + ).resolves.toEqual({ + provider: { + baseUrl: "http://127.0.0.1:8000/v1", + api: "openai-completions", + apiKey: "VLLM_API_KEY", + models: [{ id: "meta-llama/Meta-Llama-3-8B-Instruct", name: "Meta Llama 3" }], + }, + }); + expect(buildVllmProviderMock).toHaveBeenCalledWith({ + apiKey: "env-vllm-key", + }); + }); });