From 7597fc556ca2d44776540c3c602d55943f8ea0cf Mon Sep 17 00:00:00 2001 From: echoVic Date: Mon, 23 Feb 2026 16:56:15 +0800 Subject: [PATCH] fix(ollama): pass provider headers to Ollama stream function (#24285) createOllamaStreamFn() only accepted baseUrl, ignoring custom headers configured in models.providers..headers. This caused 403 errors when Ollama endpoints are behind reverse proxies that require auth headers (e.g. X-OLLAMA-KEY via HAProxy). Add optional defaultHeaders parameter to createOllamaStreamFn() and merge them into every fetch request. Provider headers from config are now passed through at the call site in the embedded runner. Fixes #24285 --- src/agents/ollama-stream.ts | 6 +++++- src/agents/pi-embedded-runner/run/attempt.ts | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/agents/ollama-stream.ts b/src/agents/ollama-stream.ts index 5040b37737a..fdff0b2ae65 100644 --- a/src/agents/ollama-stream.ts +++ b/src/agents/ollama-stream.ts @@ -405,7 +405,10 @@ function resolveOllamaChatUrl(baseUrl: string): string { return `${apiBase}/api/chat`; } -export function createOllamaStreamFn(baseUrl: string): StreamFn { +export function createOllamaStreamFn( + baseUrl: string, + defaultHeaders?: Record, +): StreamFn { const chatUrl = resolveOllamaChatUrl(baseUrl); return (model, context, options) => { @@ -440,6 +443,7 @@ export function createOllamaStreamFn(baseUrl: string): StreamFn { const headers: Record = { "Content-Type": "application/json", + ...defaultHeaders, ...options?.headers, }; if (options?.apiKey) { diff --git a/src/agents/pi-embedded-runner/run/attempt.ts b/src/agents/pi-embedded-runner/run/attempt.ts index 2f65542a171..c34043a5351 100644 --- a/src/agents/pi-embedded-runner/run/attempt.ts +++ b/src/agents/pi-embedded-runner/run/attempt.ts @@ -1022,7 +1022,7 @@ export async function runEmbeddedAttempt( modelBaseUrl, providerBaseUrl, }); - activeSession.agent.streamFn = createOllamaStreamFn(ollamaBaseUrl); + activeSession.agent.streamFn = createOllamaStreamFn(ollamaBaseUrl, params.model.headers); } else if (params.model.api === "openai-responses" && params.provider === "openai") { const wsApiKey = await params.authStorage.getApiKey(params.provider); if (wsApiKey) {