refactor: pluginize litellm auth onboarding

This commit is contained in:
Peter Steinberger
2026-03-27 12:25:55 +00:00
parent 324cddee4c
commit bcfddcc768
21 changed files with 271 additions and 328 deletions

View File

@@ -0,0 +1,41 @@
import { defineSingleProviderPluginEntry } from "openclaw/plugin-sdk/provider-entry";
import { applyLitellmConfig, LITELLM_DEFAULT_MODEL_REF } from "./onboard.js";
import { buildLitellmProvider } from "./provider-catalog.js";
const PROVIDER_ID = "litellm";
export default defineSingleProviderPluginEntry({
id: PROVIDER_ID,
name: "LiteLLM Provider",
description: "Bundled LiteLLM provider plugin",
provider: {
label: "LiteLLM",
docsPath: "/providers/litellm",
auth: [
{
methodId: "api-key",
label: "LiteLLM API key",
hint: "Unified gateway for 100+ LLM providers",
optionKey: "litellmApiKey",
flagName: "--litellm-api-key",
envVar: "LITELLM_API_KEY",
promptMessage: "Enter LiteLLM API key",
defaultModel: LITELLM_DEFAULT_MODEL_REF,
applyConfig: (cfg) => applyLitellmConfig(cfg),
noteTitle: "LiteLLM",
noteMessage: [
"LiteLLM provides a unified API to 100+ LLM providers.",
"Get your API key from your LiteLLM proxy or https://litellm.ai",
"Default proxy runs on http://localhost:4000",
].join("\n"),
wizard: {
groupHint: "Unified LLM gateway (100+ providers)",
},
},
],
catalog: {
buildProvider: buildLitellmProvider,
allowExplicitBaseUrl: true,
},
},
});

View File

@@ -0,0 +1,55 @@
import {
createDefaultModelPresetAppliers,
type ModelDefinitionConfig,
type OpenClawConfig,
} from "openclaw/plugin-sdk/provider-onboard";
export const LITELLM_BASE_URL = "http://localhost:4000";
export const LITELLM_DEFAULT_MODEL_ID = "claude-opus-4-6";
export const LITELLM_DEFAULT_MODEL_REF = `litellm/${LITELLM_DEFAULT_MODEL_ID}`;
const LITELLM_DEFAULT_CONTEXT_WINDOW = 128_000;
const LITELLM_DEFAULT_MAX_TOKENS = 8_192;
const LITELLM_DEFAULT_COST = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
};
export function buildLitellmModelDefinition(): ModelDefinitionConfig {
return {
id: LITELLM_DEFAULT_MODEL_ID,
name: "Claude Opus 4.6",
reasoning: true,
input: ["text", "image"],
cost: LITELLM_DEFAULT_COST,
contextWindow: LITELLM_DEFAULT_CONTEXT_WINDOW,
maxTokens: LITELLM_DEFAULT_MAX_TOKENS,
};
}
const litellmPresetAppliers = createDefaultModelPresetAppliers({
primaryModelRef: LITELLM_DEFAULT_MODEL_REF,
resolveParams: (cfg: OpenClawConfig) => {
const existingProvider = cfg.models?.providers?.litellm as { baseUrl?: unknown } | undefined;
const resolvedBaseUrl =
typeof existingProvider?.baseUrl === "string" ? existingProvider.baseUrl.trim() : "";
return {
providerId: "litellm",
api: "openai-completions" as const,
baseUrl: resolvedBaseUrl || LITELLM_BASE_URL,
defaultModel: buildLitellmModelDefinition(),
defaultModelId: LITELLM_DEFAULT_MODEL_ID,
aliases: [{ modelRef: LITELLM_DEFAULT_MODEL_REF, alias: "LiteLLM" }],
};
},
});
export function applyLitellmProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
return litellmPresetAppliers.applyProviderConfig(cfg);
}
export function applyLitellmConfig(cfg: OpenClawConfig): OpenClawConfig {
return litellmPresetAppliers.applyConfig(cfg);
}

View File

@@ -0,0 +1,28 @@
{
"id": "litellm",
"providers": ["litellm"],
"providerAuthEnvVars": {
"litellm": ["LITELLM_API_KEY"]
},
"providerAuthChoices": [
{
"provider": "litellm",
"method": "api-key",
"choiceId": "litellm-api-key",
"choiceLabel": "LiteLLM API key",
"choiceHint": "Unified gateway for 100+ LLM providers",
"groupId": "litellm",
"groupLabel": "LiteLLM",
"groupHint": "Unified LLM gateway (100+ providers)",
"optionKey": "litellmApiKey",
"cliFlag": "--litellm-api-key",
"cliOption": "--litellm-api-key <key>",
"cliDescription": "LiteLLM API key"
}
],
"configSchema": {
"type": "object",
"additionalProperties": false,
"properties": {}
}
}

View File

@@ -0,0 +1,12 @@
{
"name": "@openclaw/litellm-provider",
"version": "2026.3.26",
"private": true,
"description": "OpenClaw LiteLLM provider plugin",
"type": "module",
"openclaw": {
"extensions": [
"./index.ts"
]
}
}

View File

@@ -0,0 +1,10 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-models";
import { buildLitellmModelDefinition, LITELLM_BASE_URL } from "./onboard.js";
export function buildLitellmProvider(): ModelProviderConfig {
return {
baseUrl: LITELLM_BASE_URL,
api: "openai-completions",
models: [buildLitellmModelDefinition()],
};
}

View File

@@ -20,21 +20,6 @@ export type AuthChoiceGroup = {
};
export const CORE_AUTH_CHOICE_OPTIONS: ReadonlyArray<AuthChoiceOption> = [
{
value: "chutes",
label: "Chutes (OAuth)",
groupId: "chutes",
groupLabel: "Chutes",
groupHint: "OAuth",
},
{
value: "litellm-api-key",
label: "LiteLLM API key",
hint: "Unified gateway for 100+ LLM providers",
groupId: "litellm",
groupLabel: "LiteLLM",
groupHint: "Unified LLM gateway (100+ providers)",
},
{
value: "custom-api-key",
label: "Custom Provider",

View File

@@ -39,6 +39,15 @@ describe("buildAuthChoiceOptions", () => {
it("includes core and provider-specific auth choices", () => {
resolveManifestProviderAuthChoices.mockReturnValue([
{
pluginId: "chutes",
providerId: "chutes",
methodId: "oauth",
choiceId: "chutes",
choiceLabel: "Chutes (OAuth)",
groupId: "chutes",
groupLabel: "Chutes",
},
{
pluginId: "github-copilot",
providerId: "github-copilot",
@@ -66,6 +75,15 @@ describe("buildAuthChoiceOptions", () => {
groupId: "openai",
groupLabel: "OpenAI",
},
{
pluginId: "litellm",
providerId: "litellm",
methodId: "api-key",
choiceId: "litellm-api-key",
choiceLabel: "LiteLLM API key",
groupId: "litellm",
groupLabel: "LiteLLM",
},
{
pluginId: "moonshot",
providerId: "moonshot",
@@ -206,6 +224,20 @@ describe("buildAuthChoiceOptions", () => {
it("builds cli help choices from the same catalog", () => {
resolveManifestProviderAuthChoices.mockReturnValue([
{
pluginId: "chutes",
providerId: "chutes",
methodId: "oauth",
choiceId: "chutes",
choiceLabel: "Chutes (OAuth)",
},
{
pluginId: "litellm",
providerId: "litellm",
methodId: "api-key",
choiceId: "litellm-api-key",
choiceLabel: "LiteLLM API key",
},
{
pluginId: "openai",
providerId: "openai",
@@ -277,11 +309,24 @@ describe("buildAuthChoiceOptions", () => {
expect(cliChoices).not.toContain("ollama");
expect(cliChoices).not.toContain("openai-api-key");
expect(cliChoices).not.toContain("chutes");
expect(cliChoices).not.toContain("litellm-api-key");
expect(cliChoices).toContain("custom-api-key");
expect(cliChoices).toContain("skip");
});
it("shows Chutes in grouped provider selection", () => {
resolveManifestProviderAuthChoices.mockReturnValue([]);
resolveManifestProviderAuthChoices.mockReturnValue([
{
pluginId: "chutes",
providerId: "chutes",
methodId: "oauth",
choiceId: "chutes",
choiceLabel: "Chutes (OAuth)",
groupId: "chutes",
groupLabel: "Chutes",
},
]);
const { groups } = buildAuthChoiceGroups({
store: EMPTY_STORE,
includeSkip: false,
@@ -292,6 +337,28 @@ describe("buildAuthChoiceOptions", () => {
expect(chutesGroup?.options.some((opt) => opt.value === "chutes")).toBe(true);
});
it("shows LiteLLM in grouped provider selection", () => {
resolveManifestProviderAuthChoices.mockReturnValue([
{
pluginId: "litellm",
providerId: "litellm",
methodId: "api-key",
choiceId: "litellm-api-key",
choiceLabel: "LiteLLM API key",
groupId: "litellm",
groupLabel: "LiteLLM",
},
]);
const { groups } = buildAuthChoiceGroups({
store: EMPTY_STORE,
includeSkip: false,
});
const litellmGroup = groups.find((group) => group.value === "litellm");
expect(litellmGroup).toBeDefined();
expect(litellmGroup?.options.some((opt) => opt.value === "litellm-api-key")).toBe(true);
});
it("groups OpenCode Zen and Go under one OpenCode entry", () => {
resolveManifestProviderAuthChoices.mockReturnValue([
{

View File

@@ -1,96 +0,0 @@
import { ensureAuthProfileStore, resolveAuthProfileOrder } from "../agents/auth-profiles.js";
import { applyAuthProfileConfig } from "../plugins/provider-auth-helpers.js";
import { LITELLM_DEFAULT_MODEL_REF, setLitellmApiKey } from "../plugins/provider-auth-storage.js";
import { normalizeApiKeyInput, validateApiKeyInput } from "./auth-choice.api-key.js";
import { ensureApiKeyFromOptionEnvOrPrompt } from "./auth-choice.apply-helpers.js";
import type { ApplyAuthChoiceParams, ApplyAuthChoiceResult } from "./auth-choice.apply.js";
import { applyLitellmConfig, applyLitellmProviderConfig } from "./onboard-auth.config-litellm.js";
import type { SecretInputMode } from "./onboard-types.js";
type ApiKeyProviderConfigApplier = (
config: ApplyAuthChoiceParams["config"],
) => ApplyAuthChoiceParams["config"];
type ApplyProviderDefaultModel = (args: {
defaultModel: string;
applyDefaultConfig: ApiKeyProviderConfigApplier;
applyProviderConfig: ApiKeyProviderConfigApplier;
noteDefault?: string;
}) => Promise<void>;
type ApplyApiKeyProviderParams = {
params: ApplyAuthChoiceParams;
authChoice: string;
config: ApplyAuthChoiceParams["config"];
setConfig: (config: ApplyAuthChoiceParams["config"]) => void;
getConfig: () => ApplyAuthChoiceParams["config"];
normalizedTokenProvider?: string;
requestedSecretInputMode?: SecretInputMode;
applyProviderDefaultModel: ApplyProviderDefaultModel;
getAgentModelOverride: () => string | undefined;
};
export async function applyLiteLlmApiKeyProvider({
params,
authChoice,
config,
setConfig,
getConfig,
normalizedTokenProvider,
requestedSecretInputMode,
applyProviderDefaultModel,
getAgentModelOverride,
}: ApplyApiKeyProviderParams): Promise<ApplyAuthChoiceResult | null> {
if (authChoice !== "litellm-api-key") {
return null;
}
let nextConfig = config;
const store = ensureAuthProfileStore(params.agentDir, { allowKeychainPrompt: false });
const profileOrder = resolveAuthProfileOrder({ cfg: nextConfig, store, provider: "litellm" });
const existingProfileId = profileOrder.find((profileId) => Boolean(store.profiles[profileId]));
const existingCred = existingProfileId ? store.profiles[existingProfileId] : undefined;
let profileId = "litellm:default";
let hasCredential = Boolean(existingProfileId && existingCred?.type === "api_key");
if (hasCredential && existingProfileId) {
profileId = existingProfileId;
}
if (!hasCredential) {
await ensureApiKeyFromOptionEnvOrPrompt({
token: params.opts?.token,
tokenProvider: normalizedTokenProvider,
secretInputMode: requestedSecretInputMode,
config: nextConfig,
expectedProviders: ["litellm"],
provider: "litellm",
envLabel: "LITELLM_API_KEY",
promptMessage: "Enter LiteLLM API key",
normalize: normalizeApiKeyInput,
validate: validateApiKeyInput,
prompter: params.prompter,
setCredential: async (apiKey, mode) =>
setLitellmApiKey(apiKey, params.agentDir, { secretInputMode: mode }),
noteMessage:
"LiteLLM provides a unified API to 100+ LLM providers.\nGet your API key from your LiteLLM proxy or https://litellm.ai\nDefault proxy runs on http://localhost:4000",
noteTitle: "LiteLLM",
});
hasCredential = true;
}
if (hasCredential) {
nextConfig = applyAuthProfileConfig(nextConfig, {
profileId,
provider: "litellm",
mode: "api_key",
});
}
setConfig(nextConfig);
await applyProviderDefaultModel({
defaultModel: LITELLM_DEFAULT_MODEL_REF,
applyDefaultConfig: applyLitellmConfig,
applyProviderConfig: applyLitellmProviderConfig,
noteDefault: LITELLM_DEFAULT_MODEL_REF,
});
return { config: getConfig(), agentModelOverride: getAgentModelOverride() };
}

View File

@@ -1,17 +1,8 @@
import { resolveManifestProviderApiKeyChoice } from "../plugins/provider-auth-choices.js";
import {
createAuthChoiceDefaultModelApplierForMutableState,
normalizeSecretInputModeInput,
normalizeTokenProviderInput,
} from "./auth-choice.apply-helpers.js";
import { applyLiteLlmApiKeyProvider } from "./auth-choice.apply.api-key-providers.js";
import { normalizeTokenProviderInput } from "./auth-choice.apply-helpers.js";
import type { ApplyAuthChoiceParams, ApplyAuthChoiceResult } from "./auth-choice.apply.js";
import type { AuthChoice } from "./onboard-types.js";
const CORE_API_KEY_TOKEN_PROVIDER_AUTH_CHOICES: Partial<Record<string, AuthChoice>> = {
litellm: "litellm-api-key",
};
export function normalizeApiKeyTokenProviderAuthChoice(params: {
authChoice: AuthChoice;
tokenProvider?: string;
@@ -32,48 +23,12 @@ export function normalizeApiKeyTokenProviderAuthChoice(params: {
config: params.config,
workspaceDir: params.workspaceDir,
env: params.env,
})?.choiceId as AuthChoice | undefined) ??
CORE_API_KEY_TOKEN_PROVIDER_AUTH_CHOICES[normalizedTokenProvider] ??
params.authChoice
})?.choiceId as AuthChoice | undefined) ?? params.authChoice
);
}
export async function applyAuthChoiceApiProviders(
params: ApplyAuthChoiceParams,
_params: ApplyAuthChoiceParams,
): Promise<ApplyAuthChoiceResult | null> {
let nextConfig = params.config;
let agentModelOverride: string | undefined;
const applyProviderDefaultModel = createAuthChoiceDefaultModelApplierForMutableState(
params,
() => nextConfig,
(config) => (nextConfig = config),
() => agentModelOverride,
(model) => (agentModelOverride = model),
);
const authChoice = normalizeApiKeyTokenProviderAuthChoice({
authChoice: params.authChoice,
tokenProvider: params.opts?.tokenProvider,
config: params.config,
env: process.env,
});
const normalizedTokenProvider = normalizeTokenProviderInput(params.opts?.tokenProvider);
const requestedSecretInputMode = normalizeSecretInputModeInput(params.opts?.secretInputMode);
const litellmResult = await applyLiteLlmApiKeyProvider({
params,
authChoice,
config: nextConfig,
setConfig: (config) => (nextConfig = config),
getConfig: () => nextConfig,
normalizedTokenProvider,
requestedSecretInputMode,
applyProviderDefaultModel,
getAgentModelOverride: () => agentModelOverride,
});
if (litellmResult) {
return litellmResult;
}
return null;
}

View File

@@ -8,6 +8,7 @@ import googlePlugin from "../../extensions/google/index.js";
import huggingfacePlugin from "../../extensions/huggingface/index.js";
import kimiCodingPlugin from "../../extensions/kimi-coding/index.js";
import minimaxPlugin from "../../extensions/minimax/index.js";
import litellmPlugin from "../../extensions/litellm/index.js";
import mistralPlugin from "../../extensions/mistral/index.js";
import moonshotPlugin from "../../extensions/moonshot/index.js";
import ollamaPlugin from "../../extensions/ollama/index.js";
@@ -94,6 +95,7 @@ function createDefaultProviderPlugins() {
googlePlugin,
huggingfacePlugin,
kimiCodingPlugin,
litellmPlugin,
minimaxPlugin,
mistralPlugin,
moonshotPlugin,

View File

@@ -1,65 +0,0 @@
import type { OpenClawConfig } from "../config/config.js";
import { LITELLM_DEFAULT_MODEL_REF } from "../plugins/provider-auth-storage.js";
import {
applyAgentDefaultModelPrimary,
applyProviderConfigWithDefaultModel,
} from "../plugins/provider-onboarding-config.js";
export const LITELLM_BASE_URL = "http://localhost:4000";
export const LITELLM_DEFAULT_MODEL_ID = "claude-opus-4-6";
const LITELLM_DEFAULT_CONTEXT_WINDOW = 128_000;
const LITELLM_DEFAULT_MAX_TOKENS = 8_192;
const LITELLM_DEFAULT_COST = {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
};
function buildLitellmModelDefinition(): {
id: string;
name: string;
reasoning: boolean;
input: Array<"text" | "image">;
cost: { input: number; output: number; cacheRead: number; cacheWrite: number };
contextWindow: number;
maxTokens: number;
} {
return {
id: LITELLM_DEFAULT_MODEL_ID,
name: "Claude Opus 4.6",
reasoning: true,
input: ["text", "image"],
cost: LITELLM_DEFAULT_COST,
contextWindow: LITELLM_DEFAULT_CONTEXT_WINDOW,
maxTokens: LITELLM_DEFAULT_MAX_TOKENS,
};
}
export function applyLitellmProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
const models = { ...cfg.agents?.defaults?.models };
models[LITELLM_DEFAULT_MODEL_REF] = {
...models[LITELLM_DEFAULT_MODEL_REF],
alias: models[LITELLM_DEFAULT_MODEL_REF]?.alias ?? "LiteLLM",
};
const defaultModel = buildLitellmModelDefinition();
const existingProvider = cfg.models?.providers?.litellm as { baseUrl?: unknown } | undefined;
const resolvedBaseUrl =
typeof existingProvider?.baseUrl === "string" ? existingProvider.baseUrl.trim() : "";
return applyProviderConfigWithDefaultModel(cfg, {
agentModels: models,
providerId: "litellm",
api: "openai-completions",
baseUrl: resolvedBaseUrl || LITELLM_BASE_URL,
defaultModel,
defaultModelId: LITELLM_DEFAULT_MODEL_ID,
});
}
export function applyLitellmConfig(cfg: OpenClawConfig): OpenClawConfig {
const next = applyLitellmProviderConfig(cfg);
return applyAgentDefaultModelPrimary(next, LITELLM_DEFAULT_MODEL_REF);
}

View File

@@ -36,6 +36,7 @@ import {
} from "../../extensions/xai/onboard.js";
import { applyXiaomiConfig, applyXiaomiProviderConfig } from "../../extensions/xiaomi/onboard.js";
import { applyZaiConfig, applyZaiProviderConfig } from "../../extensions/zai/onboard.js";
import { applyLitellmProviderConfig } from "../../extensions/litellm/onboard.js";
import { SYNTHETIC_DEFAULT_MODEL_ID } from "../agents/synthetic-models.js";
import type { OpenClawConfig } from "../config/config.js";
import {
@@ -55,7 +56,6 @@ import {
ZAI_CODING_CN_BASE_URL,
ZAI_GLOBAL_BASE_URL,
} from "../plugins/provider-model-definitions.js";
import { applyLitellmProviderConfig } from "./onboard-auth.config-litellm.js";
import {
createAuthTestLifecycle,
readAuthProfilesForAgent,

View File

@@ -10,12 +10,4 @@ export type OnboardCoreAuthFlag = {
description: string;
};
export const CORE_ONBOARD_AUTH_FLAGS: ReadonlyArray<OnboardCoreAuthFlag> = [
{
optionKey: "litellmApiKey",
authChoice: "litellm-api-key",
cliFlag: "--litellm-api-key",
cliOption: "--litellm-api-key <key>",
description: "LiteLLM API key",
},
];
export const CORE_ONBOARD_AUTH_FLAGS: ReadonlyArray<OnboardCoreAuthFlag> = [];

View File

@@ -37,6 +37,7 @@ vi.mock("./onboard-non-interactive/local/auth-choice.plugin-providers.js", async
{ default: volcenginePlugin },
{ default: byteplusPlugin },
{ default: anthropicPlugin },
{ default: litellmPlugin },
{ default: mistralPlugin },
{ default: togetherPlugin },
{ default: qianfanPlugin },
@@ -57,6 +58,7 @@ vi.mock("./onboard-non-interactive/local/auth-choice.plugin-providers.js", async
import("../../extensions/volcengine/index.ts"),
import("../../extensions/byteplus/index.ts"),
import("../../extensions/anthropic/index.ts"),
import("../../extensions/litellm/index.ts"),
import("../../extensions/mistral/index.ts"),
import("../../extensions/together/index.ts"),
import("../../extensions/qianfan/index.ts"),
@@ -100,6 +102,7 @@ vi.mock("./onboard-non-interactive/local/auth-choice.plugin-providers.js", async
volcenginePlugin,
byteplusPlugin,
anthropicPlugin,
litellmPlugin,
mistralPlugin,
togetherPlugin,
qianfanPlugin,

View File

@@ -1,67 +0,0 @@
import type { OpenClawConfig } from "../../../config/config.js";
import type { SecretInput } from "../../../config/types.secrets.js";
import { applyAuthProfileConfig } from "../../../plugins/provider-auth-helpers.js";
import { setLitellmApiKey } from "../../../plugins/provider-auth-storage.js";
import type { RuntimeEnv } from "../../../runtime.js";
import { applyLitellmConfig } from "../../onboard-auth.config-litellm.js";
import type { AuthChoice, OnboardOptions } from "../../onboard-types.js";
type ApiKeyStorageOptions = {
secretInputMode: "plaintext" | "ref";
};
type ResolvedNonInteractiveApiKey = {
key: string;
source: "profile" | "env" | "flag";
};
export async function applySimpleNonInteractiveApiKeyChoice(params: {
authChoice: AuthChoice;
nextConfig: OpenClawConfig;
baseConfig: OpenClawConfig;
opts: OnboardOptions;
runtime: RuntimeEnv;
apiKeyStorageOptions?: ApiKeyStorageOptions;
resolveApiKey: (input: {
provider: string;
cfg: OpenClawConfig;
flagValue?: string;
flagName: `--${string}`;
envVar: string;
runtime: RuntimeEnv;
}) => Promise<ResolvedNonInteractiveApiKey | null>;
maybeSetResolvedApiKey: (
resolved: ResolvedNonInteractiveApiKey,
setter: (value: SecretInput) => Promise<void> | void,
) => Promise<boolean>;
}): Promise<OpenClawConfig | null | undefined> {
if (params.authChoice !== "litellm-api-key") {
return undefined;
}
const resolved = await params.resolveApiKey({
provider: "litellm",
cfg: params.baseConfig,
flagValue: params.opts.litellmApiKey,
flagName: "--litellm-api-key",
envVar: "LITELLM_API_KEY",
runtime: params.runtime,
});
if (!resolved) {
return null;
}
if (
!(await params.maybeSetResolvedApiKey(resolved, (value) =>
setLitellmApiKey(value, undefined, params.apiKeyStorageOptions),
))
) {
return null;
}
return applyLitellmConfig(
applyAuthProfileConfig(params.nextConfig, {
profileId: "litellm:default",
provider: "litellm",
mode: "api_key",
}),
);
}

View File

@@ -2,13 +2,6 @@ import { beforeEach, describe, expect, it, vi } from "vitest";
import type { OpenClawConfig } from "../../../config/config.js";
import { applyNonInteractiveAuthChoice } from "./auth-choice.js";
const applySimpleNonInteractiveApiKeyChoice = vi.hoisted(() =>
vi.fn<() => Promise<OpenClawConfig | null | undefined>>(async () => undefined),
);
vi.mock("./auth-choice.api-key-providers.js", () => ({
applySimpleNonInteractiveApiKeyChoice,
}));
const applyNonInteractivePluginProviderChoice = vi.hoisted(() => vi.fn(async () => undefined));
vi.mock("./auth-choice.plugin-providers.js", () => ({
applyNonInteractivePluginProviderChoice,
@@ -32,7 +25,7 @@ function createRuntime() {
}
describe("applyNonInteractiveAuthChoice", () => {
it("resolves plugin provider auth before builtin API key fallbacks", async () => {
it("resolves plugin provider auth before builtin custom-provider handling", async () => {
const runtime = createRuntime();
const nextConfig = { agents: { defaults: {} } } as OpenClawConfig;
const resolvedConfig = { auth: { profiles: { "openai:default": { mode: "api_key" } } } };
@@ -48,6 +41,5 @@ describe("applyNonInteractiveAuthChoice", () => {
expect(result).toBe(resolvedConfig);
expect(applyNonInteractivePluginProviderChoice).toHaveBeenCalledOnce();
expect(applySimpleNonInteractiveApiKeyChoice).not.toHaveBeenCalled();
});
});

View File

@@ -20,7 +20,6 @@ import {
} from "../../onboard-custom.js";
import type { AuthChoice, OnboardOptions } from "../../onboard-types.js";
import { resolveNonInteractiveApiKey } from "../api-keys.js";
import { applySimpleNonInteractiveApiKeyChoice } from "./auth-choice.api-key-providers.js";
import { applyNonInteractivePluginProviderChoice } from "./auth-choice.plugin-providers.js";
type ResolvedNonInteractiveApiKey = NonNullable<
@@ -172,20 +171,6 @@ export async function applyNonInteractiveAuthChoice(params: {
return pluginProviderChoice;
}
const simpleApiKeyChoice = await applySimpleNonInteractiveApiKeyChoice({
authChoice,
nextConfig,
baseConfig,
opts,
runtime,
apiKeyStorageOptions,
resolveApiKey,
maybeSetResolvedApiKey,
});
if (simpleApiKeyChoice !== undefined) {
return simpleApiKeyChoice;
}
if (authChoice === "cloudflare-ai-gateway-api-key") {
const accountId = opts.cloudflareAiGatewayAccountId?.trim() ?? "";
const gatewayId = opts.cloudflareAiGatewayGatewayId?.trim() ?? "";

View File

@@ -33,6 +33,7 @@ export async function loadGeneratedBundledPluginEntries() {
kilocodePluginModule,
kimiCodingPluginModule,
linePluginModule,
litellmPluginModule,
llmTaskPluginModule,
lobsterPluginModule,
matrixPluginModule,
@@ -111,6 +112,7 @@ export async function loadGeneratedBundledPluginEntries() {
import("../../extensions/kilocode/index.js"),
import("../../extensions/kimi-coding/index.js"),
import("../../extensions/line/index.js"),
import("../../extensions/litellm/index.js"),
import("../../extensions/llm-task/index.js"),
import("../../extensions/lobster/index.js"),
import("../../extensions/matrix/index.js"),
@@ -190,6 +192,7 @@ export async function loadGeneratedBundledPluginEntries() {
kilocodePluginModule.default,
kimiCodingPluginModule.default,
linePluginModule.default,
litellmPluginModule.default,
llmTaskPluginModule.default,
lobsterPluginModule.default,
matrixPluginModule.default,

View File

@@ -8010,6 +8010,48 @@ export const GENERATED_BUNDLED_PLUGIN_METADATA = [
},
},
},
{
dirName: "litellm",
idHint: "litellm",
source: {
source: "./index.ts",
built: "index.js",
},
packageName: "@openclaw/litellm-provider",
packageVersion: "2026.3.26",
packageDescription: "OpenClaw LiteLLM provider plugin",
packageManifest: {
extensions: ["./index.ts"],
},
manifest: {
id: "litellm",
configSchema: {
type: "object",
additionalProperties: false,
properties: {},
},
providers: ["litellm"],
providerAuthEnvVars: {
litellm: ["LITELLM_API_KEY"],
},
providerAuthChoices: [
{
provider: "litellm",
method: "api-key",
choiceId: "litellm-api-key",
choiceLabel: "LiteLLM API key",
choiceHint: "Unified gateway for 100+ LLM providers",
groupId: "litellm",
groupLabel: "LiteLLM",
groupHint: "Unified LLM gateway (100+ providers)",
optionKey: "litellmApiKey",
cliFlag: "--litellm-api-key",
cliOption: "--litellm-api-key <key>",
cliDescription: "LiteLLM API key",
},
],
},
},
{
dirName: "llm-task",
idHint: "llm-task",

View File

@@ -16,6 +16,7 @@ export const BUNDLED_PROVIDER_AUTH_ENV_VAR_CANDIDATES = {
kilocode: ["KILOCODE_API_KEY"],
kimi: ["KIMI_API_KEY", "KIMICODE_API_KEY"],
"kimi-coding": ["KIMI_API_KEY", "KIMICODE_API_KEY"],
litellm: ["LITELLM_API_KEY"],
"microsoft-foundry": ["AZURE_OPENAI_API_KEY"],
minimax: ["MINIMAX_API_KEY"],
"minimax-portal": ["MINIMAX_OAUTH_TOKEN", "MINIMAX_API_KEY"],

View File

@@ -3,8 +3,6 @@ import type { OpenClawConfig } from "../config/config.js";
import { resolveManifestProviderAuthChoice } from "./provider-auth-choices.js";
const PREFERRED_PROVIDER_BY_AUTH_CHOICE: Partial<Record<string, string>> = {
chutes: "chutes",
"litellm-api-key": "litellm",
"custom-api-key": "custom",
};