fix(ollama): scope auth to local hosts

This commit is contained in:
Peter Steinberger
2026-04-27 03:53:57 +01:00
parent 29af4add2a
commit 313a19c940
10 changed files with 471 additions and 39 deletions

View File

@@ -40,6 +40,7 @@ Docs: https://docs.openclaw.ai
- Providers/Ollama: move memory embeddings to Ollama's current `/api/embed` endpoint with batched `input` requests while preserving vector normalization and custom provider auth/header overrides. Fixes #39983. Thanks @sskkcc and @LiudengZhang.
- Providers/Ollama: route local web search through Ollama's signed `/api/experimental/web_search` daemon proxy, use hosted `/api/web_search` directly for `ollama.com`, and keep `OLLAMA_API_KEY` scoped to cloud fallback auth. Fixes #69132. Thanks @yoon1012 and @hyspacex.
- Providers/Ollama: accept OpenAI SDK-style `baseURL` as an alias for `baseUrl` across discovery, streaming, setup pulls, embeddings, and web search so remote Ollama hosts are not silently ignored. Fixes #62533; supersedes #62549. Thanks @Julien-BKK and @Linux2010.
- Providers/Ollama: scope synthetic local auth and embedding bearer headers to declared Ollama host boundaries so cloud keys are not sent to local/self-hosted embedding endpoints and remote/cloud Ollama endpoints no longer receive the `ollama-local` marker as if it were a real token. Supersedes #69261 and #69857; refs #43945. Thanks @hyspacex, @maxramsay, and @Meli73.
- Providers/PDF/Ollama: add bounded network timeouts for Ollama model pulls and native Anthropic/Gemini PDF analysis requests so unresponsive provider endpoints no longer hang sessions indefinitely. Fixes #54142; supersedes #54144 and #54145. Thanks @jinduwang1001-max and @arkyu2077.
- Memory/doctor: treat Ollama memory embeddings as key-optional so `openclaw doctor` no longer warns about a missing API key when the gateway reports embeddings are ready. Fixes #46584. Thanks @fengly78.
- Agents/Ollama: apply provider-owned replay turn normalization to native Ollama chat so Cloud models no longer reject non-alternating replay history in agent/Gateway runs. Fixes #71697. Thanks @ismael-81.
@@ -70,7 +71,6 @@ Docs: https://docs.openclaw.ai
- Plugins/memory-core: respect configured memory-search embedding concurrency during non-batch indexing so local Ollama embedding backends can serialize indexing instead of flooding the server. Fixes #66822. (#66931) Thanks @oliviareid-svg and @LyraInTheFlesh.
- Docker/update smoke: keep the package-derived update-channel fixture on package-shipped files and make its UI build stub create the asset the updater verifies. Thanks @vincentkoc.
## 2026.4.26
### Fixes

View File

@@ -15,6 +15,10 @@ OpenClaw integrates with Ollama's native API (`/api/chat`) for hosted cloud mode
Ollama provider config uses `baseUrl` as the canonical key. OpenClaw also accepts `baseURL` for compatibility with OpenAI SDK-style examples, but new config should prefer `baseUrl`.
Local and LAN Ollama hosts do not need a real bearer token; OpenClaw uses the local `ollama-local` marker only for loopback, private-network, `.local`, and bare-hostname Ollama base URLs. Remote public hosts and Ollama Cloud (`https://ollama.com`) require a real credential through `OLLAMA_API_KEY`, an auth profile, or the provider's `apiKey`.
When Ollama is used for memory embeddings, bearer auth is scoped to the host where it was declared. A provider-level key is sent only to that provider's Ollama host; `agents.*.memorySearch.remote.apiKey` is sent only to its remote embedding host; and a pure `OLLAMA_API_KEY` env value is treated as the Ollama Cloud convention rather than being sent to local/self-hosted hosts by default.
## Getting started
Choose your preferred setup method and mode.

View File

@@ -236,7 +236,7 @@ describe("ollama plugin", () => {
baseUrl: "http://127.0.0.1:11434",
api: "ollama",
models: [{ id: "llama3.2", name: "Llama 3.2" }],
apiKey: "OLLAMA_API_KEY",
apiKey: "ollama-local",
},
});
});
@@ -419,6 +419,34 @@ describe("ollama plugin", () => {
});
});
it("does not mint synthetic auth for Ollama Cloud baseUrl", () => {
const provider = registerProvider();
const auth = provider.resolveSyntheticAuth?.({
providerConfig: {
baseUrl: "https://ollama.com",
api: "ollama",
models: [],
},
});
expect(auth).toBeUndefined();
});
it("does not mint synthetic auth for public IPv4 baseUrl", () => {
const provider = registerProvider();
const auth = provider.resolveSyntheticAuth?.({
providerConfig: {
baseUrl: "http://8.8.8.8:11434",
api: "ollama",
models: [],
},
});
expect(auth).toBeUndefined();
});
it("wraps OpenAI-compatible payloads with num_ctx for Ollama compat routes", () => {
const provider = registerProvider();
let payloadSeen: Record<string, unknown> | undefined;

View File

@@ -21,8 +21,8 @@ import {
import {
OLLAMA_DEFAULT_API_KEY,
OLLAMA_PROVIDER_ID,
hasMeaningfulExplicitOllamaConfig,
resolveOllamaDiscoveryResult,
shouldUseSyntheticOllamaAuth,
type OllamaPluginConfig,
} from "./src/discovery-shared.js";
import {
@@ -199,7 +199,7 @@ export default definePluginEntry({
/\bollama\b.*(?:context length|too many tokens|context window)/i.test(errorMessage) ||
/\btruncating input\b.*\btoo long\b/i.test(errorMessage),
resolveSyntheticAuth: ({ providerConfig }) => {
if (!hasMeaningfulExplicitOllamaConfig(providerConfig)) {
if (!shouldUseSyntheticOllamaAuth(providerConfig)) {
return undefined;
}
return {

View File

@@ -119,7 +119,7 @@ describe("Ollama provider", () => {
const provider = await runOllamaCatalog({});
expect(provider).toBeDefined();
expect(provider?.apiKey).toBe("OLLAMA_API_KEY");
expect(provider?.apiKey).toBe(OLLAMA_LOCAL_AUTH_MARKER);
expect(provider?.api).toBe("ollama");
expect(provider?.baseUrl).toBe("http://127.0.0.1:11434");
expectDiscoveryCallCounts(fetchMock, { tags: 1, show: 0 });
@@ -213,7 +213,7 @@ describe("Ollama provider", () => {
env: { OLLAMA_API_KEY: OLLAMA_LOCAL_AUTH_MARKER, VITEST: "", NODE_ENV: "development" },
});
expect(provider?.apiKey).toBe("OLLAMA_API_KEY");
expect(provider?.apiKey).toBe(OLLAMA_LOCAL_AUTH_MARKER);
expect(provider?.api).toBe("ollama");
expect(provider?.baseUrl).toBe("http://127.0.0.1:11434");
expect(provider?.models).toHaveLength(2);
@@ -428,6 +428,43 @@ describe("Ollama provider", () => {
});
});
it("should not use synthetic local auth for configured cloud providers without apiKey", async () => {
await withoutAmbientOllamaEnv(async () => {
const fetchMock = vi.fn();
vi.stubGlobal("fetch", withFetchPreconnect(fetchMock));
const provider = await runOllamaCatalog({
config: {
models: {
providers: {
ollama: {
baseUrl: "https://ollama.com/v1",
models: [
{
id: "gpt-oss:20b",
name: "GPT-OSS 20B",
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 8192,
maxTokens: 81920,
},
],
},
},
},
},
env: { VITEST: "", NODE_ENV: "development" },
});
expect(fetchMock).not.toHaveBeenCalled();
expect(provider?.baseUrl).toBe("https://ollama.com");
expect(provider?.api).toBe("ollama");
expect(provider?.apiKey).toBeUndefined();
expect(provider?.models).toHaveLength(1);
});
});
it("should preserve explicit apiKey from configured remote providers", async () => {
await withoutAmbientOllamaEnv(async () => {
const fetchMock = vi.fn(async (input: unknown) => {

View File

@@ -3,8 +3,8 @@ import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-sha
import {
OLLAMA_DEFAULT_API_KEY,
OLLAMA_PROVIDER_ID,
hasMeaningfulExplicitOllamaConfig,
resolveOllamaDiscoveryResult,
shouldUseSyntheticOllamaAuth,
type OllamaPluginConfig,
} from "./src/discovery-shared.js";
import { buildOllamaProvider } from "./src/provider-models.js";
@@ -51,7 +51,7 @@ export const ollamaProviderDiscovery: OllamaProviderPlugin = {
envVars: ["OLLAMA_API_KEY"],
auth: [],
resolveSyntheticAuth: ({ providerConfig }) => {
if (!hasMeaningfulExplicitOllamaConfig(providerConfig)) {
if (!shouldUseSyntheticOllamaAuth(providerConfig)) {
return undefined;
}
return {

View File

@@ -0,0 +1,38 @@
import { describe, expect, it } from "vitest";
import { isLocalOllamaBaseUrl } from "./discovery-shared.js";
describe("isLocalOllamaBaseUrl", () => {
it.each([
undefined,
"",
"http://localhost:11434",
"http://127.0.0.1:11434",
"http://0.0.0.0:11434",
"http://[::1]:11434",
"http://10.0.0.5:11434",
"http://172.16.0.10:11434",
"http://172.31.255.254:11434",
"http://192.168.1.100:11434",
"http://gpu-node-1:11434",
"http://mac-studio.local:11434",
"http://[fd00::1]:11434",
"http://[fe90::1]:11434",
])("classifies %s as local", (baseUrl) => {
expect(isLocalOllamaBaseUrl(baseUrl)).toBe(true);
});
it.each([
"https://ollama.com",
"https://api.ollama.com/v1",
"https://ollama.example.com:11434",
"http://8.8.8.8:11434",
"http://172.15.255.254:11434",
"http://172.32.0.1:11434",
"http://193.168.1.1:11434",
"http://[2001:4860:4860::8888]:11434",
"http://10.example.com:11434",
"not a url",
])("classifies %s as remote", (baseUrl) => {
expect(isLocalOllamaBaseUrl(baseUrl)).toBe(false);
});
});

View File

@@ -43,18 +43,85 @@ function readStringValue(value: unknown): string | undefined {
export function resolveOllamaDiscoveryApiKey(params: {
env: NodeJS.ProcessEnv;
baseUrl?: string;
explicitApiKey?: string;
hasDeclaredApiKey?: boolean;
resolvedApiKey?: unknown;
}): string {
const envApiKey = params.env.OLLAMA_API_KEY?.trim() ? "OLLAMA_API_KEY" : undefined;
}): string | undefined {
const envValue = normalizeOptionalString(params.env.OLLAMA_API_KEY);
const envApiKey = envValue ? "OLLAMA_API_KEY" : undefined;
const resolvedApiKey = normalizeOptionalString(params.resolvedApiKey);
return envApiKey ?? params.explicitApiKey ?? resolvedApiKey ?? OLLAMA_DEFAULT_API_KEY;
const explicitApiKey = normalizeOptionalString(params.explicitApiKey);
if (explicitApiKey) {
return explicitApiKey;
}
if (params.hasDeclaredApiKey && resolvedApiKey) {
return resolvedApiKey;
}
if (!isLocalOllamaBaseUrl(params.baseUrl)) {
return envApiKey ?? (resolvedApiKey !== OLLAMA_DEFAULT_API_KEY ? resolvedApiKey : undefined);
}
if (resolvedApiKey && resolvedApiKey !== envValue && resolvedApiKey !== OLLAMA_DEFAULT_API_KEY) {
return resolvedApiKey;
}
return OLLAMA_DEFAULT_API_KEY;
}
function shouldSkipAmbientOllamaDiscovery(env: NodeJS.ProcessEnv): boolean {
return Boolean(env.VITEST) || env.NODE_ENV === "test";
}
const LOCAL_OLLAMA_HOSTNAMES = new Set(["localhost", "127.0.0.1", "0.0.0.0", "::1", "::"]);
function isIpv4PrivateRange(host: string): boolean {
if (!/^\d+\.\d+\.\d+\.\d+$/.test(host)) {
return false;
}
const octets = host.split(".").map((part) => Number.parseInt(part, 10));
if (octets.some((part) => !Number.isInteger(part) || part < 0 || part > 255)) {
return false;
}
const [a, b] = octets;
return a === 10 || (a === 172 && b >= 16 && b <= 31) || (a === 192 && b === 168);
}
function isIpv6LocalRange(host: string): boolean {
const lower = host.toLowerCase();
return /^fe[89ab][0-9a-f]:/.test(lower) || /^f[cd][0-9a-f]{2}:/.test(lower);
}
export function isLocalOllamaBaseUrl(baseUrl: string | undefined | null): boolean {
if (!baseUrl) {
return true;
}
let parsed: URL;
try {
parsed = new URL(baseUrl);
} catch {
return false;
}
let host = parsed.hostname.toLowerCase();
if (host.startsWith("[") && host.endsWith("]")) {
host = host.slice(1, -1);
}
return (
LOCAL_OLLAMA_HOSTNAMES.has(host) ||
host.endsWith(".local") ||
isIpv4PrivateRange(host) ||
isIpv6LocalRange(host) ||
(!host.includes(".") && !host.includes(":"))
);
}
export function shouldUseSyntheticOllamaAuth(
providerConfig: ModelProviderConfig | undefined,
): boolean {
if (!hasMeaningfulExplicitOllamaConfig(providerConfig)) {
return false;
}
return isLocalOllamaBaseUrl(readProviderBaseUrl(providerConfig));
}
export function hasMeaningfulExplicitOllamaConfig(
providerConfig: ModelProviderConfig | undefined,
): boolean {
@@ -116,17 +183,22 @@ export async function resolveOllamaDiscoveryResult(params: {
ollamaKey.trim().length > 0 &&
ollamaKey.trim() !== OLLAMA_DEFAULT_API_KEY;
const explicitApiKey = readStringValue(explicit?.apiKey);
const hasDeclaredApiKey = explicit?.apiKey !== undefined;
if (hasExplicitModels && explicit) {
const baseUrl = resolveOllamaApiBase(readProviderBaseUrl(explicit) ?? OLLAMA_DEFAULT_BASE_URL);
const apiKey = resolveOllamaDiscoveryApiKey({
env: params.ctx.env,
baseUrl,
explicitApiKey,
hasDeclaredApiKey,
resolvedApiKey: ollamaKey,
});
return {
provider: {
...explicit,
baseUrl: resolveOllamaApiBase(readProviderBaseUrl(explicit) ?? OLLAMA_DEFAULT_BASE_URL),
baseUrl,
api: explicit.api ?? "ollama",
apiKey: resolveOllamaDiscoveryApiKey({
env: params.ctx.env,
explicitApiKey,
resolvedApiKey: ollamaKey,
}),
...(apiKey ? { apiKey } : {}),
},
};
}
@@ -141,20 +213,24 @@ export async function resolveOllamaDiscoveryResult(params: {
return null;
}
const provider = await params.buildProvider(readProviderBaseUrl(explicit), {
const configuredBaseUrl = readProviderBaseUrl(explicit);
const provider = await params.buildProvider(configuredBaseUrl, {
quiet: !hasRealOllamaKey && !hasMeaningfulExplicitConfig,
});
if (provider.models?.length === 0 && !ollamaKey && !explicit?.apiKey) {
return null;
}
const apiKey = resolveOllamaDiscoveryApiKey({
env: params.ctx.env,
baseUrl: provider.baseUrl ?? configuredBaseUrl,
explicitApiKey,
hasDeclaredApiKey,
resolvedApiKey: ollamaKey,
});
return {
provider: {
...provider,
apiKey: resolveOllamaDiscoveryApiKey({
env: params.ctx.env,
explicitApiKey,
resolvedApiKey: ollamaKey,
}),
...(apiKey ? { apiKey } : {}),
},
};
}

View File

@@ -72,7 +72,7 @@ describe("ollama embedding provider", () => {
expect(vector[1]).toBeCloseTo(0.8, 5);
});
it("resolves configured base URL, API key, and headers", async () => {
it("resolves configured base URL and headers without sending local marker auth", async () => {
const fetchMock = mockEmbeddingFetch([1, 0]);
const { provider } = await createOllamaEmbeddingProvider({
@@ -102,11 +102,16 @@ describe("ollama embedding provider", () => {
method: "POST",
headers: expect.objectContaining({
"Content-Type": "application/json",
Authorization: "Bearer ollama-local",
"X-Provider-Header": "provider",
}),
}),
);
const [, init] = (fetchMock.mock.calls[0] ?? []) as unknown as [
string,
RequestInit | undefined,
];
const headers = init?.headers as Record<string, string> | undefined;
expect(headers?.Authorization).toBeUndefined();
});
it("resolves configured baseURL alias", async () => {
@@ -256,6 +261,137 @@ describe("ollama embedding provider", () => {
);
});
it("does not attach pure env OLLAMA_API_KEY to a local host", async () => {
const fetchMock = mockEmbeddingFetch([1, 0]);
vi.stubEnv("OLLAMA_API_KEY", "ollama-cloud-key");
const { provider } = await createOllamaEmbeddingProvider({
config: {} as OpenClawConfig,
provider: "ollama",
model: "nomic-embed-text",
fallback: "none",
remote: { baseUrl: "http://127.0.0.1:11434" },
});
await provider.embedQuery("hello");
const [, init] = (fetchMock.mock.calls[0] ?? []) as unknown as [
string,
RequestInit | undefined,
];
const headers = init?.headers as Record<string, string> | undefined;
expect(headers?.Authorization).toBeUndefined();
});
it("attaches pure env OLLAMA_API_KEY to Ollama Cloud", async () => {
const fetchMock = mockEmbeddingFetch([1, 0]);
vi.stubEnv("OLLAMA_API_KEY", "ollama-cloud-key");
const { provider } = await createOllamaEmbeddingProvider({
config: {} as OpenClawConfig,
provider: "ollama",
model: "nomic-embed-text",
fallback: "none",
remote: { baseUrl: "https://ollama.com" },
});
await provider.embedQuery("hello");
expect(fetchMock).toHaveBeenCalledWith(
"https://ollama.com/api/embed",
expect.objectContaining({
headers: expect.objectContaining({
Authorization: "Bearer ollama-cloud-key",
}),
}),
);
});
it("does not attach provider apiKey to a different remote embedding host", async () => {
const fetchMock = mockEmbeddingFetch([1, 0]);
const { provider } = await createOllamaEmbeddingProvider({
config: {
models: {
providers: {
ollama: {
baseUrl: "http://127.0.0.1:11434",
apiKey: "provider-host-key",
models: [],
},
},
},
} as unknown as OpenClawConfig,
provider: "ollama",
model: "nomic-embed-text",
fallback: "none",
remote: { baseUrl: "https://memory.example.com" },
});
await provider.embedQuery("hello");
const [, init] = (fetchMock.mock.calls[0] ?? []) as unknown as [
string,
RequestInit | undefined,
];
const headers = init?.headers as Record<string, string> | undefined;
expect(headers?.Authorization).toBeUndefined();
});
it("attaches remote apiKey to a remote embedding host", async () => {
const fetchMock = mockEmbeddingFetch([1, 0]);
const { provider } = await createOllamaEmbeddingProvider({
config: {} as OpenClawConfig,
provider: "ollama",
model: "nomic-embed-text",
fallback: "none",
remote: { baseUrl: "https://memory.example.com", apiKey: "remote-host-key" },
});
await provider.embedQuery("hello");
expect(fetchMock).toHaveBeenCalledWith(
"https://memory.example.com/api/embed",
expect.objectContaining({
headers: expect.objectContaining({
Authorization: "Bearer remote-host-key",
}),
}),
);
});
it("honors remote local marker as an explicit no-auth opt-out", async () => {
const fetchMock = mockEmbeddingFetch([1, 0]);
const { provider } = await createOllamaEmbeddingProvider({
config: {
models: {
providers: {
ollama: {
baseUrl: "http://127.0.0.1:11434",
apiKey: "provider-host-key",
models: [],
},
},
},
} as unknown as OpenClawConfig,
provider: "ollama",
model: "nomic-embed-text",
fallback: "none",
remote: { apiKey: "ollama-local" }, // pragma: allowlist secret
});
await provider.embedQuery("hello");
const [, init] = (fetchMock.mock.calls[0] ?? []) as unknown as [
string,
RequestInit | undefined,
];
const headers = init?.headers as Record<string, string> | undefined;
expect(headers?.Authorization).toBeUndefined();
});
it("marks inline memory batches as local-server timeout work", async () => {
const result = await ollamaMemoryEmbeddingProviderAdapter.create({
config: {} as OpenClawConfig,

View File

@@ -1,5 +1,9 @@
import type { OpenClawConfig } from "openclaw/plugin-sdk/provider-auth";
import { normalizeOptionalSecretInput } from "openclaw/plugin-sdk/provider-auth";
import {
isKnownEnvApiKeyMarker,
isNonSecretApiKeyMarker,
normalizeOptionalSecretInput,
} from "openclaw/plugin-sdk/provider-auth";
import { resolveEnvApiKey } from "openclaw/plugin-sdk/provider-auth-runtime";
import { normalizeProviderId } from "openclaw/plugin-sdk/provider-model-shared";
import {
@@ -12,6 +16,7 @@ import {
ssrfPolicyFromHttpBaseUrlAllowedHostname,
type SsrFPolicy,
} from "openclaw/plugin-sdk/ssrf-runtime";
import { OLLAMA_CLOUD_BASE_URL } from "./defaults.js";
import { normalizeOllamaWireModelId } from "./model-id.js";
import { readProviderBaseUrl } from "./provider-base-url.js";
import { resolveOllamaApiBase } from "./provider-models.js";
@@ -120,34 +125,142 @@ function resolveMemorySecretInputString(params: {
});
}
function resolveOllamaApiKey(options: OllamaEmbeddingOptions): string | undefined {
const remoteApiKey = resolveMemorySecretInputString({
value: options.remote?.apiKey,
path: "agents.*.memorySearch.remote.apiKey",
type OllamaEmbeddingBaseUrlOrigin = "remote-config" | "provider-config" | "default";
type OllamaEmbeddingSourceResolution = "unset" | "opt-out" | { apiKey: string };
type OllamaEmbeddingResolvedKeys = {
remote: OllamaEmbeddingSourceResolution;
provider: OllamaEmbeddingSourceResolution;
env: string | undefined;
};
function resolveSourcedOllamaEmbeddingKey(params: {
configString: string | undefined;
declared: boolean;
}): OllamaEmbeddingSourceResolution {
if (params.configString !== undefined) {
if (!isNonSecretApiKeyMarker(params.configString)) {
return { apiKey: params.configString };
}
if (!isKnownEnvApiKeyMarker(params.configString)) {
return "opt-out";
}
const envKey = resolveEnvApiKey("ollama")?.apiKey;
return envKey && !isNonSecretApiKeyMarker(envKey) ? { apiKey: envKey } : "opt-out";
}
if (params.declared) {
const envKey = resolveEnvApiKey("ollama")?.apiKey;
return envKey && !isNonSecretApiKeyMarker(envKey) ? { apiKey: envKey } : "opt-out";
}
return "unset";
}
function resolveOllamaEmbeddingResolvedKeys(
options: OllamaEmbeddingOptions,
providerConfig: ReturnType<typeof resolveConfiguredProvider>,
): OllamaEmbeddingResolvedKeys {
const remoteValue = options.remote?.apiKey;
const remote = resolveSourcedOllamaEmbeddingKey({
configString: resolveMemorySecretInputString({
value: remoteValue,
path: "agents.*.memorySearch.remote.apiKey",
}),
declared: hasConfiguredSecretInput(remoteValue),
});
if (remoteApiKey) {
return remoteApiKey;
const providerValue = providerConfig?.apiKey;
const provider = resolveSourcedOllamaEmbeddingKey({
configString: normalizeOptionalSecretInput(providerValue),
declared: hasConfiguredSecretInput(providerValue),
});
const envKey = resolveEnvApiKey("ollama")?.apiKey;
const env = envKey && !isNonSecretApiKeyMarker(envKey) ? envKey : undefined;
return { remote, provider, env };
}
function resolveOllamaEmbeddingBaseUrl(params: {
remoteBaseUrl?: string;
providerConfig: ReturnType<typeof resolveConfiguredProvider>;
}): { baseUrl: string; origin: OllamaEmbeddingBaseUrlOrigin } {
const remoteBaseUrl = params.remoteBaseUrl?.trim();
if (remoteBaseUrl) {
return { baseUrl: resolveOllamaApiBase(remoteBaseUrl), origin: "remote-config" };
}
const providerApiKey = normalizeOptionalSecretInput(resolveConfiguredProvider(options)?.apiKey);
if (providerApiKey) {
return providerApiKey;
const providerBaseUrl = readProviderBaseUrl(params.providerConfig);
if (providerBaseUrl) {
return { baseUrl: resolveOllamaApiBase(providerBaseUrl), origin: "provider-config" };
}
return resolveEnvApiKey("ollama")?.apiKey;
return { baseUrl: resolveOllamaApiBase(undefined), origin: "default" };
}
function normalizeOllamaHostKey(baseUrl: string): string | undefined {
try {
const parsed = new URL(baseUrl);
let hostname = parsed.hostname.toLowerCase();
if (hostname === "localhost" || hostname === "::1" || hostname === "[::1]") {
hostname = "127.0.0.1";
}
const port = parsed.port || (parsed.protocol === "https:" ? "443" : "80");
const path = parsed.pathname === "/" ? "" : parsed.pathname.replace(/\/$/, "");
return `${parsed.protocol}//${hostname}:${port}${path}`;
} catch {
return undefined;
}
}
function areOllamaHostsEquivalent(a: string, b: string): boolean {
const aKey = normalizeOllamaHostKey(a);
const bKey = normalizeOllamaHostKey(b);
return aKey !== undefined && bKey !== undefined && aKey === bKey;
}
function isOllamaCloudBaseUrl(baseUrl: string): boolean {
return areOllamaHostsEquivalent(baseUrl, OLLAMA_CLOUD_BASE_URL);
}
function selectOllamaEmbeddingApiKey(params: {
resolved: OllamaEmbeddingResolvedKeys;
baseUrl: string;
baseUrlOrigin: OllamaEmbeddingBaseUrlOrigin;
providerOwnedHost: string;
}): string | undefined {
if (params.resolved.remote !== "unset") {
return typeof params.resolved.remote === "object" ? params.resolved.remote.apiKey : undefined;
}
const reachesProviderHost =
params.baseUrlOrigin === "provider-config" ||
params.baseUrlOrigin === "default" ||
areOllamaHostsEquivalent(params.baseUrl, params.providerOwnedHost);
if (params.resolved.provider !== "unset" && reachesProviderHost) {
return typeof params.resolved.provider === "object"
? params.resolved.provider.apiKey
: undefined;
}
if (params.resolved.env && isOllamaCloudBaseUrl(params.baseUrl)) {
return params.resolved.env;
}
return undefined;
}
function resolveOllamaEmbeddingClient(
options: OllamaEmbeddingOptions,
): OllamaEmbeddingClientConfig {
const providerConfig = resolveConfiguredProvider(options);
const rawBaseUrl = options.remote?.baseUrl?.trim() || readProviderBaseUrl(providerConfig);
const baseUrl = resolveOllamaApiBase(rawBaseUrl);
const { baseUrl, origin: baseUrlOrigin } = resolveOllamaEmbeddingBaseUrl({
remoteBaseUrl: options.remote?.baseUrl,
providerConfig,
});
const model = normalizeEmbeddingModel(options.model, options.provider);
const headerOverrides = Object.assign({}, providerConfig?.headers, options.remote?.headers);
const headers: Record<string, string> = {
"Content-Type": "application/json",
...headerOverrides,
};
const apiKey = resolveOllamaApiKey(options);
const apiKey = selectOllamaEmbeddingApiKey({
resolved: resolveOllamaEmbeddingResolvedKeys(options, providerConfig),
baseUrl,
baseUrlOrigin,
providerOwnedHost: resolveOllamaApiBase(readProviderBaseUrl(providerConfig)),
});
if (apiKey) {
headers.Authorization = `Bearer ${apiKey}`;
}