fix(ollama): skip localhost discovery for remote providers

This commit is contained in:
Peter Steinberger
2026-04-27 06:24:43 +01:00
parent b94ad7c9d8
commit 52249927ac
4 changed files with 117 additions and 5 deletions

View File

@@ -48,6 +48,7 @@ Docs: https://docs.openclaw.ai
- Providers/Ollama: strip the active custom Ollama provider prefix before native chat and embedding requests, so custom provider ids like `ollama-spark/qwen3:32b` reach Ollama as the real model name. Fixes #72353. Thanks @maximus-dss and @hclsys.
- Providers/Ollama: parse stringified native tool-call arguments before dispatch, preserving unsafe integer values so Ollama tool use receives structured parameters. Fixes #69735; supersedes #69910. Thanks @rongshuzhao and @yfge.
- Providers/Ollama: skip ambient localhost discovery unless Ollama auth or meaningful config opts in, preventing unexpected probes to `127.0.0.1:11434` for users who are not using Ollama. Fixes #56939; supersedes #57116. Thanks @IanxDev and @tsukhani.
- Providers/Ollama: skip implicit localhost discovery when a custom remote `api: "ollama"` provider is configured, while still treating `127/8` loopback hosts as local. Carries forward #43224. Thanks @issacthekaylon.
- Providers/Ollama: move memory embeddings to Ollama's current `/api/embed` endpoint with batched `input` requests while preserving vector normalization and custom provider auth/header overrides. Fixes #39983. Thanks @sskkcc and @LiudengZhang.
- Providers/Ollama: route local web search through Ollama's signed `/api/experimental/web_search` daemon proxy, use hosted `/api/web_search` directly for `ollama.com`, and keep `OLLAMA_API_KEY` scoped to cloud fallback auth. Fixes #69132. Thanks @yoon1012 and @hyspacex.
- Providers/Ollama: accept OpenAI SDK-style `baseURL` as an alias for `baseUrl` across discovery, streaming, setup pulls, embeddings, and web search so remote Ollama hosts are not silently ignored. Fixes #62533; supersedes #62549. Thanks @Julien-BKK and @Linux2010.

View File

@@ -174,7 +174,7 @@ Choose your preferred setup method and mode.
## Model discovery (implicit provider)
When you set `OLLAMA_API_KEY` (or an auth profile) and **do not** define `models.providers.ollama`, OpenClaw discovers models from the local Ollama instance at `http://127.0.0.1:11434`.
When you set `OLLAMA_API_KEY` (or an auth profile) and **do not** define `models.providers.ollama` or another custom remote provider with `api: "ollama"`, OpenClaw discovers models from the local Ollama instance at `http://127.0.0.1:11434`.
| Behavior | Detail |
| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
@@ -202,7 +202,7 @@ ollama pull mistral
The new model will be automatically discovered and available to use.
<Note>
If you set `models.providers.ollama` explicitly, auto-discovery is skipped and you must define models manually. See the explicit config section below.
If you set `models.providers.ollama` explicitly, or configure a custom remote provider such as `models.providers.ollama-cloud` with `api: "ollama"`, auto-discovery is skipped and you must define models manually. Loopback custom providers such as `http://127.0.0.2:11434` are still treated as local. See the explicit config section below.
</Note>
## Vision and image description

View File

@@ -369,6 +369,64 @@ describe("ollama plugin", () => {
});
});
it("skips implicit localhost discovery when a custom remote Ollama provider is configured", async () => {
const provider = registerProvider();
const result = await provider.discovery.run({
config: {
models: {
providers: {
"ollama-cloud": {
api: "ollama",
baseUrl: "https://ollama.com",
models: [{ id: "kimi-k2.5", name: "Kimi K2.5" }],
},
},
},
},
env: { NODE_ENV: "development", OLLAMA_API_KEY: "ollama-live" },
resolveProviderApiKey: () => ({ apiKey: "ollama-live" }),
} as never);
expect(result).toBeNull();
expect(buildOllamaProviderMock).not.toHaveBeenCalled();
});
it("treats custom 127/8 Ollama providers as loopback for implicit discovery", async () => {
const provider = registerProvider();
buildOllamaProviderMock.mockResolvedValueOnce({
baseUrl: "http://127.0.0.1:11434",
api: "ollama",
models: [],
});
const result = await provider.discovery.run({
config: {
models: {
providers: {
"ollama-alt-local": {
api: "ollama",
baseUrl: "http://127.0.0.2:11434",
models: [{ id: "llama3.2", name: "Llama 3.2" }],
},
},
},
},
env: { NODE_ENV: "development", OLLAMA_API_KEY: "ollama-live" },
resolveProviderApiKey: () => ({ apiKey: "ollama-live" }),
} as never);
expect(result).toMatchObject({
provider: {
baseUrl: "http://127.0.0.1:11434",
api: "ollama",
},
});
expect(buildOllamaProviderMock).toHaveBeenCalledWith(undefined, {
quiet: false,
});
});
it("does not mint synthetic auth for empty default-ish provider stubs", () => {
const provider = registerProvider();

View File

@@ -15,9 +15,7 @@ export type OllamaPluginConfig = {
type OllamaDiscoveryContext = {
config: {
models?: {
providers?: {
ollama?: ModelProviderConfig;
};
providers?: Record<string, ModelProviderConfig | undefined>;
ollamaDiscovery?: {
enabled?: boolean;
};
@@ -73,6 +71,17 @@ function shouldSkipAmbientOllamaDiscovery(env: NodeJS.ProcessEnv): boolean {
const LOCAL_OLLAMA_HOSTNAMES = new Set(["localhost", "127.0.0.1", "0.0.0.0", "::1", "::"]);
function isIpv4Loopback(host: string): boolean {
if (!/^\d+\.\d+\.\d+\.\d+$/.test(host)) {
return false;
}
const octets = host.split(".").map((part) => Number.parseInt(part, 10));
if (octets.some((part) => !Number.isInteger(part) || part < 0 || part > 255)) {
return false;
}
return octets[0] === 127;
}
function isIpv4PrivateRange(host: string): boolean {
if (!/^\d+\.\d+\.\d+\.\d+$/.test(host)) {
return false;
@@ -113,6 +122,44 @@ export function isLocalOllamaBaseUrl(baseUrl: string | undefined | null): boolea
);
}
function isLoopbackOllamaBaseUrl(baseUrl: string | undefined | null): boolean {
if (!baseUrl) {
return true;
}
let parsed: URL;
try {
parsed = new URL(baseUrl);
} catch {
return false;
}
let host = parsed.hostname.toLowerCase();
if (host.startsWith("[") && host.endsWith("]")) {
host = host.slice(1, -1);
}
return LOCAL_OLLAMA_HOSTNAMES.has(host) || isIpv4Loopback(host);
}
function hasExplicitRemoteOllamaApiProvider(
providers: Record<string, ModelProviderConfig | undefined> | undefined,
): boolean {
if (!providers) {
return false;
}
for (const [providerId, provider] of Object.entries(providers)) {
if (providerId === OLLAMA_PROVIDER_ID || !provider) {
continue;
}
if (normalizeOptionalString(provider.api)?.toLowerCase() !== "ollama") {
continue;
}
const baseUrl = readProviderBaseUrl(provider);
if (baseUrl && !isLoopbackOllamaBaseUrl(baseUrl)) {
return true;
}
}
return false;
}
export function shouldUseSyntheticOllamaAuth(
providerConfig: ModelProviderConfig | undefined,
): boolean {
@@ -171,6 +218,9 @@ export async function resolveOllamaDiscoveryResult(params: {
const explicit = params.ctx.config.models?.providers?.ollama;
const hasExplicitModels = Array.isArray(explicit?.models) && explicit.models.length > 0;
const hasMeaningfulExplicitConfig = hasMeaningfulExplicitOllamaConfig(explicit);
const hasRemoteOllamaApiProvider = hasExplicitRemoteOllamaApiProvider(
params.ctx.config.models?.providers,
);
const discoveryEnabled =
params.pluginConfig.discovery?.enabled ?? params.ctx.config.models?.ollamaDiscovery?.enabled;
if (!hasExplicitModels && discoveryEnabled === false) {
@@ -202,6 +252,9 @@ export async function resolveOllamaDiscoveryResult(params: {
},
};
}
if (!hasMeaningfulExplicitConfig && hasRemoteOllamaApiProvider) {
return null;
}
if (!hasOllamaDiscoveryOptIn && !hasMeaningfulExplicitConfig) {
return null;
}