mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-06 08:50:43 +00:00
refactor: share ollama provider builder
This commit is contained in:
@@ -1,18 +1,10 @@
|
||||
import type { ProviderCatalogContext } from "openclaw/plugin-sdk/provider-catalog-shared";
|
||||
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
|
||||
import {
|
||||
OLLAMA_PROVIDER_ID,
|
||||
resolveOllamaDiscoveryResult,
|
||||
type OllamaPluginConfig,
|
||||
} from "./src/discovery-shared.js";
|
||||
import {
|
||||
buildOllamaModelDefinition,
|
||||
enrichOllamaModelsWithContext,
|
||||
fetchOllamaModels,
|
||||
resolveOllamaApiBase,
|
||||
} from "./src/provider-models.js";
|
||||
|
||||
const OLLAMA_CONTEXT_ENRICH_LIMIT = 200;
|
||||
import { buildOllamaProvider } from "./src/provider-models.js";
|
||||
|
||||
type OllamaProviderPlugin = {
|
||||
id: string;
|
||||
@@ -26,28 +18,6 @@ type OllamaProviderPlugin = {
|
||||
};
|
||||
};
|
||||
|
||||
async function buildOllamaProvider(
|
||||
configuredBaseUrl?: string,
|
||||
opts?: { quiet?: boolean },
|
||||
): Promise<ModelProviderConfig> {
|
||||
const apiBase = resolveOllamaApiBase(configuredBaseUrl);
|
||||
const { reachable, models } = await fetchOllamaModels(apiBase);
|
||||
if (!reachable && !opts?.quiet) {
|
||||
console.warn(`Ollama could not be reached at ${apiBase}.`);
|
||||
}
|
||||
const discovered = await enrichOllamaModelsWithContext(
|
||||
apiBase,
|
||||
models.slice(0, OLLAMA_CONTEXT_ENRICH_LIMIT),
|
||||
);
|
||||
return {
|
||||
baseUrl: apiBase,
|
||||
api: "ollama",
|
||||
models: discovered.map((model) =>
|
||||
buildOllamaModelDefinition(model.name, model.contextWindow, model.capabilities),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
function resolveOllamaPluginConfig(ctx: ProviderCatalogContext): OllamaPluginConfig {
|
||||
const entries = (ctx.config.plugins?.entries ?? {}) as Record<
|
||||
string,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
|
||||
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-onboard";
|
||||
import { fetchWithSsrFGuard } from "openclaw/plugin-sdk/ssrf-runtime";
|
||||
import {
|
||||
@@ -29,6 +30,7 @@ export type OllamaModelWithContext = OllamaTagModel & {
|
||||
};
|
||||
|
||||
const OLLAMA_SHOW_CONCURRENCY = 8;
|
||||
const OLLAMA_CONTEXT_ENRICH_LIMIT = 200;
|
||||
const MAX_OLLAMA_SHOW_CACHE_ENTRIES = 256;
|
||||
const ollamaModelShowInfoCache = new Map<string, Promise<OllamaModelShowInfo>>();
|
||||
const OLLAMA_ALWAYS_BLOCKED_HOSTNAMES = new Set(["metadata.google.internal"]);
|
||||
@@ -255,6 +257,28 @@ export async function fetchOllamaModels(
|
||||
}
|
||||
}
|
||||
|
||||
export async function buildOllamaProvider(
|
||||
configuredBaseUrl?: string,
|
||||
opts?: { quiet?: boolean },
|
||||
): Promise<ModelProviderConfig> {
|
||||
const apiBase = resolveOllamaApiBase(configuredBaseUrl);
|
||||
const { reachable, models } = await fetchOllamaModels(apiBase);
|
||||
if (!reachable && !opts?.quiet) {
|
||||
console.warn(`Ollama could not be reached at ${apiBase}.`);
|
||||
}
|
||||
const discovered = await enrichOllamaModelsWithContext(
|
||||
apiBase,
|
||||
models.slice(0, OLLAMA_CONTEXT_ENRICH_LIMIT),
|
||||
);
|
||||
return {
|
||||
baseUrl: apiBase,
|
||||
api: "ollama",
|
||||
models: discovered.map((model) =>
|
||||
buildOllamaModelDefinition(model.name, model.contextWindow, model.capabilities),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
export function resetOllamaModelShowInfoCacheForTest(): void {
|
||||
ollamaModelShowInfoCache.clear();
|
||||
}
|
||||
|
||||
@@ -27,6 +27,7 @@ import {
|
||||
} from "./defaults.js";
|
||||
import {
|
||||
buildOllamaBaseUrlSsrFPolicy,
|
||||
buildOllamaProvider,
|
||||
buildOllamaModelDefinition,
|
||||
enrichOllamaModelsWithContext,
|
||||
fetchOllamaModels,
|
||||
@@ -34,6 +35,8 @@ import {
|
||||
type OllamaModelWithContext,
|
||||
} from "./provider-models.js";
|
||||
|
||||
export { buildOllamaProvider };
|
||||
|
||||
const OLLAMA_SUGGESTED_MODELS_LOCAL = [OLLAMA_DEFAULT_MODEL];
|
||||
const OLLAMA_SUGGESTED_MODELS_CLOUD = ["kimi-k2.5:cloud", "minimax-m2.7:cloud", "glm-5.1:cloud"];
|
||||
const OLLAMA_CONTEXT_ENRICH_LIMIT = 200;
|
||||
@@ -49,12 +52,6 @@ type OllamaSetupResult = {
|
||||
credentialMode?: SecretInputMode;
|
||||
};
|
||||
|
||||
type ProviderConfig = {
|
||||
baseUrl: string;
|
||||
api: "ollama";
|
||||
models: ReturnType<typeof buildOllamaModelDefinition>[];
|
||||
};
|
||||
|
||||
type OllamaInteractiveMode = "cloud-local" | "cloud-only" | "local-only";
|
||||
type HostBackedOllamaInteractiveMode = Exclude<OllamaInteractiveMode, "cloud-only">;
|
||||
|
||||
@@ -473,28 +470,6 @@ async function promptAndConfigureHostBackedOllama(params: {
|
||||
};
|
||||
}
|
||||
|
||||
export async function buildOllamaProvider(
|
||||
configuredBaseUrl?: string,
|
||||
opts?: { quiet?: boolean },
|
||||
): Promise<ProviderConfig> {
|
||||
const apiBase = resolveOllamaApiBase(configuredBaseUrl);
|
||||
const { reachable, models } = await fetchOllamaModels(apiBase);
|
||||
if (!reachable && !opts?.quiet) {
|
||||
console.warn(`Ollama could not be reached at ${apiBase}.`);
|
||||
}
|
||||
const discovered = await enrichOllamaModelsWithContext(
|
||||
apiBase,
|
||||
models.slice(0, OLLAMA_CONTEXT_ENRICH_LIMIT),
|
||||
);
|
||||
return {
|
||||
baseUrl: apiBase,
|
||||
api: "ollama",
|
||||
models: discovered.map((model) =>
|
||||
buildOllamaModelDefinition(model.name, model.contextWindow, model.capabilities),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
export async function promptAndConfigureOllama(params: {
|
||||
cfg: OpenClawConfig;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
|
||||
Reference in New Issue
Block a user