fix(plugin-sdk): break vllm setup recursion

This commit is contained in:
Peter Steinberger
2026-03-29 17:54:49 +01:00
parent 2d2e386b94
commit 56640a6725
2 changed files with 20 additions and 7 deletions

View File

@@ -0,0 +1,14 @@
import { describe, expect, it } from "vitest";
describe("provider-vllm-setup", () => {
it("loads provider-setup without recursing through the vllm facade", async () => {
const providerSetup = await import("../plugin-sdk/provider-setup.js");
const vllm = await import("../plugin-sdk/vllm.js");
expect(providerSetup.VLLM_DEFAULT_BASE_URL).toBe("http://127.0.0.1:8000/v1");
expect(typeof providerSetup.buildVllmProvider).toBe("function");
expect(vllm.VLLM_DEFAULT_API_KEY_ENV_VAR).toBe("VLLM_API_KEY");
expect(vllm.VLLM_MODEL_PLACEHOLDER).toBe("meta-llama/Meta-Llama-3-8B-Instruct");
expect(vllm.VLLM_PROVIDER_LABEL).toBe("vLLM");
});
});

View File

@@ -1,10 +1,4 @@
import type { OpenClawConfig } from "../config/config.js";
import {
VLLM_DEFAULT_API_KEY_ENV_VAR,
VLLM_DEFAULT_BASE_URL,
VLLM_MODEL_PLACEHOLDER,
VLLM_PROVIDER_LABEL,
} from "../plugin-sdk/vllm.js";
import type { WizardPrompter } from "../wizard/prompts.js";
import {
applyProviderDefaultModel,
@@ -14,7 +8,12 @@ import {
promptAndConfigureOpenAICompatibleSelfHostedProvider,
} from "./provider-self-hosted-setup.js";
export { VLLM_DEFAULT_BASE_URL } from "../plugin-sdk/vllm.js";
// Keep setup-side defaults local so the provider-setup barrel does not recurse
// back through the generated plugin facade while vLLM's public surface loads.
export const VLLM_DEFAULT_BASE_URL = "http://127.0.0.1:8000/v1";
export const VLLM_PROVIDER_LABEL = "vLLM";
export const VLLM_DEFAULT_API_KEY_ENV_VAR = "VLLM_API_KEY";
export const VLLM_MODEL_PLACEHOLDER = "meta-llama/Meta-Llama-3-8B-Instruct";
export const VLLM_DEFAULT_CONTEXT_WINDOW = SELF_HOSTED_DEFAULT_CONTEXT_WINDOW;
export const VLLM_DEFAULT_MAX_TOKENS = SELF_HOSTED_DEFAULT_MAX_TOKENS;
export const VLLM_DEFAULT_COST = SELF_HOSTED_DEFAULT_COST;