fix(ollama): honor baseURL provider aliases

This commit is contained in:
Peter Steinberger
2026-04-27 03:28:14 +01:00
parent ae89d44760
commit dc78d58448
14 changed files with 231 additions and 13 deletions

View File

@@ -34,6 +34,7 @@ Docs: https://docs.openclaw.ai
- Providers/Ollama: skip ambient localhost discovery unless Ollama auth or meaningful config opts in, preventing unexpected probes to `127.0.0.1:11434` for users who are not using Ollama. Fixes #56939; supersedes #57116. Thanks @IanxDev and @tsukhani.
- Providers/Ollama: move memory embeddings to Ollama's current `/api/embed` endpoint with batched `input` requests while preserving vector normalization and custom provider auth/header overrides. Fixes #39983. Thanks @sskkcc and @LiudengZhang.
- Providers/Ollama: route local web search through Ollama's signed `/api/experimental/web_search` daemon proxy, use hosted `/api/web_search` directly for `ollama.com`, and keep `OLLAMA_API_KEY` scoped to cloud fallback auth. Fixes #69132. Thanks @yoon1012 and @hyspacex.
- Providers/Ollama: accept OpenAI SDK-style `baseURL` as an alias for `baseUrl` across discovery, streaming, setup pulls, embeddings, and web search so remote Ollama hosts are not silently ignored. Fixes #62533; supersedes #62549. Thanks @Julien-BKK and @Linux2010.
- Memory/doctor: treat Ollama memory embeddings as key-optional so `openclaw doctor` no longer warns about a missing API key when the gateway reports embeddings are ready. Fixes #46584. Thanks @fengly78.
- Agents/Ollama: apply provider-owned replay turn normalization to native Ollama chat so Cloud models no longer reject non-alternating replay history in agent/Gateway runs. Fixes #71697. Thanks @ismael-81.
- Control UI/Ollama: show the resolved configured thinking default in chat and session thinking dropdowns so inherited `adaptive`/per-model thinking config no longer appears as `Default (off)` or a generic inherit value. Fixes #72407. Thanks @NotecAG.

View File

@@ -13,6 +13,8 @@ OpenClaw integrates with Ollama's native API (`/api/chat`) for hosted cloud mode
**Remote Ollama users**: Do not use the `/v1` OpenAI-compatible URL (`http://host:11434/v1`) with OpenClaw. This breaks tool calling and models may output raw tool JSON as plain text. Use the native Ollama API URL instead: `baseUrl: "http://host:11434"` (no `/v1`).
</Warning>
Ollama provider config uses `baseUrl` as the canonical key. OpenClaw also accepts `baseURL` for compatibility with OpenAI SDK-style examples, but new config should prefer `baseUrl`.
## Getting started
Choose your preferred setup method and mode.

View File

@@ -97,6 +97,8 @@ reuse that host instead:
}
```
The Ollama model provider uses `baseUrl` as the canonical key. The web-search provider also honors `baseURL` on `models.providers.ollama` for compatibility with OpenAI SDK-style config examples.
If no explicit Ollama base URL is set, OpenClaw uses `http://127.0.0.1:11434`.
If your Ollama host expects bearer auth, OpenClaw reuses

View File

@@ -312,6 +312,36 @@ describe("ollama plugin", () => {
});
});
it("accepts baseURL alias as explicit discovery config", async () => {
const provider = registerProvider();
buildOllamaProviderMock.mockResolvedValueOnce({
baseUrl: "http://remote-ollama:11434",
api: "ollama",
models: [],
});
const result = await provider.discovery.run({
config: {
models: {
providers: {
ollama: {
baseURL: "http://remote-ollama:11434",
api: "ollama",
models: [],
},
},
},
},
env: { NODE_ENV: "development" },
resolveProviderApiKey: () => ({ apiKey: "" }),
} as never);
expect(result).toBeNull();
expect(buildOllamaProviderMock).toHaveBeenCalledWith("http://remote-ollama:11434", {
quiet: false,
});
});
it("keeps stored ollama-local marker auth on the quiet ambient path", async () => {
const provider = registerProvider();
buildOllamaProviderMock.mockResolvedValueOnce({
@@ -371,6 +401,24 @@ describe("ollama plugin", () => {
});
});
it("mints synthetic auth for non-default baseURL alias config", () => {
const provider = registerProvider();
const auth = provider.resolveSyntheticAuth?.({
providerConfig: {
baseURL: "http://remote-ollama:11434",
api: "ollama",
models: [],
} as never,
});
expect(auth).toEqual({
apiKey: "ollama-local",
source: "models.providers.ollama (synthetic local key)",
mode: "api-key",
});
});
it("wraps OpenAI-compatible payloads with num_ctx for Ollama compat routes", () => {
const provider = registerProvider();
let payloadSeen: Record<string, unknown> | undefined;
@@ -513,6 +561,28 @@ describe("ollama plugin", () => {
);
});
it("routes createStreamFn through baseURL alias for custom Ollama providers", () => {
const provider = registerProvider();
const config = {
models: {
providers: {
ollama2: {
api: "ollama",
baseURL: "http://127.0.0.1:11435",
models: [],
},
},
},
};
const model = { id: "llama3.2", provider: "ollama2", baseUrl: undefined };
provider.createStreamFn?.({ config, model, provider: "ollama2" } as never);
expect(createConfiguredOllamaStreamFnMock).toHaveBeenCalledWith(
expect.objectContaining({ providerBaseUrl: "http://127.0.0.1:11435" }),
);
});
it("uses ollama provider baseUrl when provider is ollama (backward compat)", () => {
const provider = registerProvider();
const config = {

View File

@@ -31,6 +31,7 @@ import {
} from "./src/embedding-provider.js";
import { ollamaMediaUnderstandingProvider } from "./src/media-understanding-provider.js";
import { ollamaMemoryEmbeddingProviderAdapter } from "./src/memory-embedding-adapter.js";
import { readProviderBaseUrl } from "./src/provider-base-url.js";
import {
createConfiguredOllamaCompatStreamWrapper,
createConfiguredOllamaStreamFn,
@@ -161,8 +162,9 @@ export default definePluginEntry({
createStreamFn: ({ config, model, provider }) => {
return createConfiguredOllamaStreamFn({
model,
providerBaseUrl: resolveConfiguredOllamaProviderConfig({ config, providerId: provider })
?.baseUrl,
providerBaseUrl: readProviderBaseUrl(
resolveConfiguredOllamaProviderConfig({ config, providerId: provider }),
),
});
},
...OPENAI_COMPATIBLE_REPLAY_HOOKS,

View File

@@ -1,5 +1,6 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { OLLAMA_DEFAULT_BASE_URL } from "./defaults.js";
import { readProviderBaseUrl } from "./provider-base-url.js";
import { resolveOllamaApiBase } from "./provider-models.js";
export const OLLAMA_PROVIDER_ID = "ollama";
@@ -63,8 +64,9 @@ export function hasMeaningfulExplicitOllamaConfig(
if (Array.isArray(providerConfig.models) && providerConfig.models.length > 0) {
return true;
}
if (typeof providerConfig.baseUrl === "string" && providerConfig.baseUrl.trim()) {
return resolveOllamaApiBase(providerConfig.baseUrl) !== OLLAMA_DEFAULT_BASE_URL;
const baseUrl = readProviderBaseUrl(providerConfig);
if (baseUrl) {
return resolveOllamaApiBase(baseUrl) !== OLLAMA_DEFAULT_BASE_URL;
}
if (readStringValue(providerConfig.apiKey)) {
return true;
@@ -118,10 +120,7 @@ export async function resolveOllamaDiscoveryResult(params: {
return {
provider: {
...explicit,
baseUrl:
typeof explicit.baseUrl === "string" && explicit.baseUrl.trim()
? resolveOllamaApiBase(explicit.baseUrl)
: OLLAMA_DEFAULT_BASE_URL,
baseUrl: resolveOllamaApiBase(readProviderBaseUrl(explicit) ?? OLLAMA_DEFAULT_BASE_URL),
api: explicit.api ?? "ollama",
apiKey: resolveOllamaDiscoveryApiKey({
env: params.ctx.env,
@@ -142,7 +141,7 @@ export async function resolveOllamaDiscoveryResult(params: {
return null;
}
const provider = await params.buildProvider(explicit?.baseUrl, {
const provider = await params.buildProvider(readProviderBaseUrl(explicit), {
quiet: !hasRealOllamaKey && !hasMeaningfulExplicitConfig,
});
if (provider.models?.length === 0 && !ollamaKey && !explicit?.apiKey) {

View File

@@ -109,6 +109,33 @@ describe("ollama embedding provider", () => {
);
});
it("resolves configured baseURL alias", async () => {
const fetchMock = mockEmbeddingFetch([1, 0]);
const { provider } = await createOllamaEmbeddingProvider({
config: {
models: {
providers: {
ollama: {
baseURL: "http://remote-ollama:11434/v1",
models: [],
},
},
},
} as unknown as OpenClawConfig,
provider: "ollama",
model: "nomic-embed-text",
fallback: "none",
});
await provider.embedQuery("hello");
expect(fetchMock).toHaveBeenCalledWith(
"http://remote-ollama:11434/api/embed",
expect.objectContaining({ method: "POST" }),
);
});
it("fails fast when memory-search remote apiKey is an unresolved SecretRef", async () => {
await expect(
createOllamaEmbeddingProvider({

View File

@@ -13,6 +13,7 @@ import {
type SsrFPolicy,
} from "openclaw/plugin-sdk/ssrf-runtime";
import { normalizeOllamaWireModelId } from "./model-id.js";
import { readProviderBaseUrl } from "./provider-base-url.js";
import { resolveOllamaApiBase } from "./provider-models.js";
export type OllamaEmbeddingProvider = {
@@ -138,7 +139,7 @@ function resolveOllamaEmbeddingClient(
options: OllamaEmbeddingOptions,
): OllamaEmbeddingClientConfig {
const providerConfig = resolveConfiguredProvider(options);
const rawBaseUrl = options.remote?.baseUrl?.trim() || providerConfig?.baseUrl?.trim();
const rawBaseUrl = options.remote?.baseUrl?.trim() || readProviderBaseUrl(providerConfig);
const baseUrl = resolveOllamaApiBase(rawBaseUrl);
const model = normalizeEmbeddingModel(options.model, options.provider);
const headerOverrides = Object.assign({}, providerConfig?.headers, options.remote?.headers);

View File

@@ -0,0 +1,44 @@
import { describe, expect, it } from "vitest";
import { readProviderBaseUrl } from "./provider-base-url.js";
describe("readProviderBaseUrl", () => {
it("reads canonical baseUrl and trims whitespace", () => {
expect(readProviderBaseUrl({ baseUrl: " http://host:11434/v1 ", models: [] })).toBe(
"http://host:11434/v1",
);
});
it("falls back to OpenAI SDK-style baseURL", () => {
const provider = {
baseURL: " http://remote-ollama:11434 ",
models: [],
} as unknown as Parameters<typeof readProviderBaseUrl>[0];
expect(readProviderBaseUrl(provider)).toBe("http://remote-ollama:11434");
});
it("prefers canonical baseUrl over baseURL", () => {
const provider = {
baseUrl: "http://canonical:11434",
baseURL: "http://alternate:11434",
models: [],
} as unknown as Parameters<typeof readProviderBaseUrl>[0];
expect(readProviderBaseUrl(provider)).toBe("http://canonical:11434");
});
it("ignores inherited baseUrl aliases", () => {
const provider = { models: [] } as unknown as Parameters<typeof readProviderBaseUrl>[0];
Object.setPrototypeOf(provider, { baseUrl: "http://inherited:11434" });
expect(readProviderBaseUrl(provider)).toBeUndefined();
});
it("returns undefined for empty or missing values", () => {
expect(readProviderBaseUrl(undefined)).toBeUndefined();
expect(
readProviderBaseUrl({ models: [] } as unknown as Parameters<typeof readProviderBaseUrl>[0]),
).toBeUndefined();
expect(readProviderBaseUrl({ baseUrl: " ", models: [] })).toBeUndefined();
});
});

View File

@@ -0,0 +1,23 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
export function readProviderBaseUrl(provider: ModelProviderConfig | undefined): string | undefined {
if (!provider) {
return undefined;
}
if (
Object.hasOwn(provider, "baseUrl") &&
typeof provider.baseUrl === "string" &&
provider.baseUrl.trim()
) {
return provider.baseUrl.trim();
}
const alternate = provider as ModelProviderConfig & { baseURL?: unknown };
if (
Object.hasOwn(alternate, "baseURL") &&
typeof alternate.baseURL === "string" &&
alternate.baseURL.trim()
) {
return alternate.baseURL.trim();
}
return undefined;
}

View File

@@ -434,6 +434,38 @@ describe("ollama setup", () => {
expect(fetchMock).toHaveBeenCalledTimes(1);
});
it("uses baseURL alias when checking and pulling models", async () => {
const progress = { update: vi.fn(), stop: vi.fn() };
const prompter = {
progress: vi.fn(() => progress),
} as unknown as WizardPrompter;
const fetchMock = createOllamaFetchMock({
tags: [],
pullResponse: new Response('{"status":"success"}\n', { status: 200 }),
});
vi.stubGlobal("fetch", fetchMock);
await ensureOllamaModelPulled({
config: {
agents: { defaults: { model: { primary: "ollama/gemma4" } } },
models: {
providers: {
ollama: {
baseURL: "http://127.0.0.1:11435",
models: [],
} as never,
},
},
},
model: "ollama/gemma4",
prompter,
});
expect(fetchMock.mock.calls[0]?.[0]).toBe("http://127.0.0.1:11435/api/tags");
expect(fetchMock.mock.calls[1]?.[0]).toBe("http://127.0.0.1:11435/api/pull");
});
it("skips pull for cloud models", async () => {
const prompter = {} as unknown as WizardPrompter;
const fetchMock = vi.fn();

View File

@@ -25,6 +25,7 @@ import {
OLLAMA_DEFAULT_BASE_URL,
OLLAMA_DEFAULT_MODEL,
} from "./defaults.js";
import { readProviderBaseUrl } from "./provider-base-url.js";
import {
buildOllamaBaseUrlSsrFPolicy,
buildOllamaProvider,
@@ -631,7 +632,8 @@ export async function ensureOllamaModelPulled(params: {
if (!params.model.startsWith("ollama/")) {
return;
}
const baseUrl = params.config.models?.providers?.ollama?.baseUrl ?? OLLAMA_DEFAULT_BASE_URL;
const baseUrl =
readProviderBaseUrl(params.config.models?.providers?.ollama) ?? OLLAMA_DEFAULT_BASE_URL;
const modelName = params.model.slice("ollama/".length);
if (isOllamaCloudModel(modelName)) {
return;

View File

@@ -19,6 +19,7 @@ type OllamaProviderConfigOverride = Partial<{
api: "ollama";
apiKey: string;
baseUrl: string;
baseURL: string;
models: NonNullable<
NonNullable<NonNullable<OpenClawConfig["models"]>["providers"]>[string]
>["models"];
@@ -125,6 +126,17 @@ describe("ollama web search provider", () => {
).toBe("https://ollama.com");
});
it("uses the model provider baseURL alias for web search", () => {
expect(
testing.resolveOllamaWebSearchBaseUrl(
createOllamaConfig({
baseUrl: undefined,
baseURL: "http://remote-ollama:11434/v1",
} as OllamaProviderConfigOverride),
),
).toBe("http://remote-ollama:11434");
});
it("maps generic search args into the local Ollama proxy endpoint", async () => {
const release = vi.fn(async () => {});
fetchWithSsrFGuardMock.mockResolvedValue({

View File

@@ -20,6 +20,7 @@ import { fetchWithSsrFGuard } from "openclaw/plugin-sdk/ssrf-runtime";
import { normalizeOptionalString } from "openclaw/plugin-sdk/text-runtime";
import { Type } from "typebox";
import { OLLAMA_DEFAULT_BASE_URL } from "./defaults.js";
import { readProviderBaseUrl } from "./provider-base-url.js";
import {
buildOllamaBaseUrlSsrFPolicy,
fetchOllamaModels,
@@ -96,8 +97,8 @@ function resolveOllamaWebSearchBaseUrl(config?: OpenClawConfig): string {
if (pluginBaseUrl) {
return resolveOllamaApiBase(pluginBaseUrl);
}
const configuredBaseUrl = config?.models?.providers?.ollama?.baseUrl;
if (normalizeOptionalString(configuredBaseUrl)) {
const configuredBaseUrl = readProviderBaseUrl(config?.models?.providers?.ollama);
if (configuredBaseUrl) {
return resolveOllamaApiBase(configuredBaseUrl);
}
return OLLAMA_DEFAULT_BASE_URL;