mirror of
https://github.com/openclaw/openclaw.git
synced 2026-03-30 19:32:27 +00:00
fix(plugin-sdk): break vllm setup recursion
This commit is contained in:
14
src/plugins/provider-vllm-setup.test.ts
Normal file
14
src/plugins/provider-vllm-setup.test.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
describe("provider-vllm-setup", () => {
|
||||
it("loads provider-setup without recursing through the vllm facade", async () => {
|
||||
const providerSetup = await import("../plugin-sdk/provider-setup.js");
|
||||
const vllm = await import("../plugin-sdk/vllm.js");
|
||||
|
||||
expect(providerSetup.VLLM_DEFAULT_BASE_URL).toBe("http://127.0.0.1:8000/v1");
|
||||
expect(typeof providerSetup.buildVllmProvider).toBe("function");
|
||||
expect(vllm.VLLM_DEFAULT_API_KEY_ENV_VAR).toBe("VLLM_API_KEY");
|
||||
expect(vllm.VLLM_MODEL_PLACEHOLDER).toBe("meta-llama/Meta-Llama-3-8B-Instruct");
|
||||
expect(vllm.VLLM_PROVIDER_LABEL).toBe("vLLM");
|
||||
});
|
||||
});
|
||||
@@ -1,10 +1,4 @@
|
||||
import type { OpenClawConfig } from "../config/config.js";
|
||||
import {
|
||||
VLLM_DEFAULT_API_KEY_ENV_VAR,
|
||||
VLLM_DEFAULT_BASE_URL,
|
||||
VLLM_MODEL_PLACEHOLDER,
|
||||
VLLM_PROVIDER_LABEL,
|
||||
} from "../plugin-sdk/vllm.js";
|
||||
import type { WizardPrompter } from "../wizard/prompts.js";
|
||||
import {
|
||||
applyProviderDefaultModel,
|
||||
@@ -14,7 +8,12 @@ import {
|
||||
promptAndConfigureOpenAICompatibleSelfHostedProvider,
|
||||
} from "./provider-self-hosted-setup.js";
|
||||
|
||||
export { VLLM_DEFAULT_BASE_URL } from "../plugin-sdk/vllm.js";
|
||||
// Keep setup-side defaults local so the provider-setup barrel does not recurse
|
||||
// back through the generated plugin facade while vLLM's public surface loads.
|
||||
export const VLLM_DEFAULT_BASE_URL = "http://127.0.0.1:8000/v1";
|
||||
export const VLLM_PROVIDER_LABEL = "vLLM";
|
||||
export const VLLM_DEFAULT_API_KEY_ENV_VAR = "VLLM_API_KEY";
|
||||
export const VLLM_MODEL_PLACEHOLDER = "meta-llama/Meta-Llama-3-8B-Instruct";
|
||||
export const VLLM_DEFAULT_CONTEXT_WINDOW = SELF_HOSTED_DEFAULT_CONTEXT_WINDOW;
|
||||
export const VLLM_DEFAULT_MAX_TOKENS = SELF_HOSTED_DEFAULT_MAX_TOKENS;
|
||||
export const VLLM_DEFAULT_COST = SELF_HOSTED_DEFAULT_COST;
|
||||
|
||||
Reference in New Issue
Block a user