From 56529d7850854a7ad40f363ef934146dbda4d580 Mon Sep 17 00:00:00 2001 From: Peter Steinberger Date: Mon, 20 Apr 2026 21:37:20 +0100 Subject: [PATCH] refactor: share ollama provider builder --- extensions/ollama/provider-discovery.ts | 32 +----------------------- extensions/ollama/src/provider-models.ts | 24 ++++++++++++++++++ extensions/ollama/src/setup.ts | 31 +++-------------------- 3 files changed, 28 insertions(+), 59 deletions(-) diff --git a/extensions/ollama/provider-discovery.ts b/extensions/ollama/provider-discovery.ts index cb8671efc6d..75c43248d96 100644 --- a/extensions/ollama/provider-discovery.ts +++ b/extensions/ollama/provider-discovery.ts @@ -1,18 +1,10 @@ import type { ProviderCatalogContext } from "openclaw/plugin-sdk/provider-catalog-shared"; -import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared"; import { OLLAMA_PROVIDER_ID, resolveOllamaDiscoveryResult, type OllamaPluginConfig, } from "./src/discovery-shared.js"; -import { - buildOllamaModelDefinition, - enrichOllamaModelsWithContext, - fetchOllamaModels, - resolveOllamaApiBase, -} from "./src/provider-models.js"; - -const OLLAMA_CONTEXT_ENRICH_LIMIT = 200; +import { buildOllamaProvider } from "./src/provider-models.js"; type OllamaProviderPlugin = { id: string; @@ -26,28 +18,6 @@ type OllamaProviderPlugin = { }; }; -async function buildOllamaProvider( - configuredBaseUrl?: string, - opts?: { quiet?: boolean }, -): Promise { - const apiBase = resolveOllamaApiBase(configuredBaseUrl); - const { reachable, models } = await fetchOllamaModels(apiBase); - if (!reachable && !opts?.quiet) { - console.warn(`Ollama could not be reached at ${apiBase}.`); - } - const discovered = await enrichOllamaModelsWithContext( - apiBase, - models.slice(0, OLLAMA_CONTEXT_ENRICH_LIMIT), - ); - return { - baseUrl: apiBase, - api: "ollama", - models: discovered.map((model) => - buildOllamaModelDefinition(model.name, model.contextWindow, model.capabilities), - ), - }; -} - function resolveOllamaPluginConfig(ctx: ProviderCatalogContext): OllamaPluginConfig { const entries = (ctx.config.plugins?.entries ?? {}) as Record< string, diff --git a/extensions/ollama/src/provider-models.ts b/extensions/ollama/src/provider-models.ts index 818bad79804..82ccd88fde5 100644 --- a/extensions/ollama/src/provider-models.ts +++ b/extensions/ollama/src/provider-models.ts @@ -1,3 +1,4 @@ +import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared"; import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-onboard"; import { fetchWithSsrFGuard } from "openclaw/plugin-sdk/ssrf-runtime"; import { @@ -29,6 +30,7 @@ export type OllamaModelWithContext = OllamaTagModel & { }; const OLLAMA_SHOW_CONCURRENCY = 8; +const OLLAMA_CONTEXT_ENRICH_LIMIT = 200; const MAX_OLLAMA_SHOW_CACHE_ENTRIES = 256; const ollamaModelShowInfoCache = new Map>(); const OLLAMA_ALWAYS_BLOCKED_HOSTNAMES = new Set(["metadata.google.internal"]); @@ -255,6 +257,28 @@ export async function fetchOllamaModels( } } +export async function buildOllamaProvider( + configuredBaseUrl?: string, + opts?: { quiet?: boolean }, +): Promise { + const apiBase = resolveOllamaApiBase(configuredBaseUrl); + const { reachable, models } = await fetchOllamaModels(apiBase); + if (!reachable && !opts?.quiet) { + console.warn(`Ollama could not be reached at ${apiBase}.`); + } + const discovered = await enrichOllamaModelsWithContext( + apiBase, + models.slice(0, OLLAMA_CONTEXT_ENRICH_LIMIT), + ); + return { + baseUrl: apiBase, + api: "ollama", + models: discovered.map((model) => + buildOllamaModelDefinition(model.name, model.contextWindow, model.capabilities), + ), + }; +} + export function resetOllamaModelShowInfoCacheForTest(): void { ollamaModelShowInfoCache.clear(); } diff --git a/extensions/ollama/src/setup.ts b/extensions/ollama/src/setup.ts index eb7c4fd0007..779fd2706c5 100644 --- a/extensions/ollama/src/setup.ts +++ b/extensions/ollama/src/setup.ts @@ -27,6 +27,7 @@ import { } from "./defaults.js"; import { buildOllamaBaseUrlSsrFPolicy, + buildOllamaProvider, buildOllamaModelDefinition, enrichOllamaModelsWithContext, fetchOllamaModels, @@ -34,6 +35,8 @@ import { type OllamaModelWithContext, } from "./provider-models.js"; +export { buildOllamaProvider }; + const OLLAMA_SUGGESTED_MODELS_LOCAL = [OLLAMA_DEFAULT_MODEL]; const OLLAMA_SUGGESTED_MODELS_CLOUD = ["kimi-k2.5:cloud", "minimax-m2.7:cloud", "glm-5.1:cloud"]; const OLLAMA_CONTEXT_ENRICH_LIMIT = 200; @@ -49,12 +52,6 @@ type OllamaSetupResult = { credentialMode?: SecretInputMode; }; -type ProviderConfig = { - baseUrl: string; - api: "ollama"; - models: ReturnType[]; -}; - type OllamaInteractiveMode = "cloud-local" | "cloud-only" | "local-only"; type HostBackedOllamaInteractiveMode = Exclude; @@ -473,28 +470,6 @@ async function promptAndConfigureHostBackedOllama(params: { }; } -export async function buildOllamaProvider( - configuredBaseUrl?: string, - opts?: { quiet?: boolean }, -): Promise { - const apiBase = resolveOllamaApiBase(configuredBaseUrl); - const { reachable, models } = await fetchOllamaModels(apiBase); - if (!reachable && !opts?.quiet) { - console.warn(`Ollama could not be reached at ${apiBase}.`); - } - const discovered = await enrichOllamaModelsWithContext( - apiBase, - models.slice(0, OLLAMA_CONTEXT_ENRICH_LIMIT), - ); - return { - baseUrl: apiBase, - api: "ollama", - models: discovered.map((model) => - buildOllamaModelDefinition(model.name, model.contextWindow, model.capabilities), - ), - }; -} - export async function promptAndConfigureOllama(params: { cfg: OpenClawConfig; env?: NodeJS.ProcessEnv;