refactor(plugin-sdk): remove bundled provider setup shims

This commit is contained in:
Peter Steinberger
2026-03-29 20:23:00 +01:00
parent e133924047
commit fc5fdcb091
15 changed files with 278 additions and 372 deletions

View File

@@ -145,6 +145,11 @@ describe("provider discovery contract", () => {
pluginId: "github-copilot",
artifactBasename: "token.js",
});
const ollamaApiModuleId = resolveRelativeBundledPluginPublicModuleId({
fromModuleUrl: import.meta.url,
pluginId: "ollama",
artifactBasename: "api.js",
});
const vllmApiModuleId = resolveRelativeBundledPluginPublicModuleId({
fromModuleUrl: import.meta.url,
pluginId: "vllm",
@@ -181,23 +186,11 @@ describe("provider discovery contract", () => {
resolveCopilotApiToken: resolveCopilotApiTokenMock,
};
});
vi.doMock("openclaw/plugin-sdk/provider-setup", async () => {
const actual = await vi.importActual<object>("openclaw/plugin-sdk/provider-setup");
vi.doMock(ollamaApiModuleId, async () => {
const actual = await vi.importActual<object>(ollamaApiModuleId);
return {
...actual,
buildOllamaProvider: (...args: unknown[]) => buildOllamaProviderMock(...args),
buildVllmProvider: (...args: unknown[]) => buildVllmProviderMock(...args),
buildSglangProvider: (...args: unknown[]) => buildSglangProviderMock(...args),
};
});
vi.doMock("openclaw/plugin-sdk/self-hosted-provider-setup", async () => {
const actual = await vi.importActual<object>(
"openclaw/plugin-sdk/self-hosted-provider-setup",
);
return {
...actual,
buildVllmProvider: (...args: unknown[]) => buildVllmProviderMock(...args),
buildSglangProvider: (...args: unknown[]) => buildSglangProviderMock(...args),
};
});
vi.doMock(vllmApiModuleId, async () => {

View File

@@ -1,14 +0,0 @@
import { describe, expect, it } from "vitest";
describe("provider-vllm-setup", () => {
it("loads provider-setup without recursing through the vllm facade", async () => {
const providerSetup = await import("../plugin-sdk/provider-setup.js");
const vllm = await import("../plugin-sdk/vllm.js");
expect(providerSetup.VLLM_DEFAULT_BASE_URL).toBe("http://127.0.0.1:8000/v1");
expect(typeof providerSetup.buildVllmProvider).toBe("function");
expect(vllm.VLLM_DEFAULT_API_KEY_ENV_VAR).toBe("VLLM_API_KEY");
expect(vllm.VLLM_MODEL_PLACEHOLDER).toBe("meta-llama/Meta-Llama-3-8B-Instruct");
expect(vllm.VLLM_PROVIDER_LABEL).toBe("vLLM");
});
});

View File

@@ -1,41 +0,0 @@
import type { OpenClawConfig } from "../config/config.js";
import type { WizardPrompter } from "../wizard/prompts.js";
import {
applyProviderDefaultModel,
SELF_HOSTED_DEFAULT_CONTEXT_WINDOW,
SELF_HOSTED_DEFAULT_COST,
SELF_HOSTED_DEFAULT_MAX_TOKENS,
promptAndConfigureOpenAICompatibleSelfHostedProvider,
} from "./provider-self-hosted-setup.js";
// Keep setup-side defaults local so the provider-setup barrel does not recurse
// back through the generated plugin facade while vLLM's public surface loads.
export const VLLM_DEFAULT_BASE_URL = "http://127.0.0.1:8000/v1";
export const VLLM_PROVIDER_LABEL = "vLLM";
export const VLLM_DEFAULT_API_KEY_ENV_VAR = "VLLM_API_KEY";
export const VLLM_MODEL_PLACEHOLDER = "meta-llama/Meta-Llama-3-8B-Instruct";
export const VLLM_DEFAULT_CONTEXT_WINDOW = SELF_HOSTED_DEFAULT_CONTEXT_WINDOW;
export const VLLM_DEFAULT_MAX_TOKENS = SELF_HOSTED_DEFAULT_MAX_TOKENS;
export const VLLM_DEFAULT_COST = SELF_HOSTED_DEFAULT_COST;
export async function promptAndConfigureVllm(params: {
cfg: OpenClawConfig;
prompter: WizardPrompter;
}): Promise<{ config: OpenClawConfig; modelId: string; modelRef: string }> {
const result = await promptAndConfigureOpenAICompatibleSelfHostedProvider({
cfg: params.cfg,
prompter: params.prompter,
providerId: "vllm",
providerLabel: VLLM_PROVIDER_LABEL,
defaultBaseUrl: VLLM_DEFAULT_BASE_URL,
defaultApiKeyEnvVar: VLLM_DEFAULT_API_KEY_ENV_VAR,
modelPlaceholder: VLLM_MODEL_PLACEHOLDER,
});
return {
config: result.config,
modelId: result.modelId,
modelRef: result.modelRef,
};
}
export { applyProviderDefaultModel as applyVllmDefaultModel };