fix(ollama): keep configured max thinking compatible

This commit is contained in:
Peter Steinberger
2026-04-27 03:13:14 +01:00
parent 1882a8e5ea
commit 0b46227d6c
2 changed files with 37 additions and 2 deletions

View File

@@ -1246,6 +1246,35 @@ describe("createOllamaStreamFn", () => {
);
});
it("maps configured native Ollama params.thinking=max to the stable top-level think value", async () => {
await withMockNdjsonFetch(
[
'{"model":"m","created_at":"t","message":{"role":"assistant","content":"ok"},"done":false}',
'{"model":"m","created_at":"t","message":{"role":"assistant","content":""},"done":true,"prompt_eval_count":1,"eval_count":1}',
],
async (fetchMock) => {
const stream = await createOllamaTestStream({
baseUrl: "http://ollama-host:11434",
model: { params: { thinking: "max" } },
});
const events = await collectStreamEvents(stream);
expect(events.at(-1)?.type).toBe("done");
const requestInit = getGuardedFetchCall(fetchMock).init ?? {};
if (typeof requestInit.body !== "string") {
throw new Error("Expected string request body");
}
const requestBody = JSON.parse(requestInit.body) as {
think?: string;
options?: { think?: string };
};
expect(requestBody.think).toBe("high");
expect(requestBody.options?.think).toBeUndefined();
},
);
});
it("uses the default loopback policy when baseUrl is empty", async () => {
await withMockNdjsonFetch(
[

View File

@@ -152,7 +152,7 @@ export function wrapOllamaCompatNumCtx(baseFn: StreamFn | undefined, numCtx: num
});
}
type OllamaThinkValue = boolean | "low" | "medium" | "high" | "max";
type OllamaThinkValue = boolean | "low" | "medium" | "high";
const OLLAMA_OPTION_PARAM_KEYS = new Set([
"num_keep",
@@ -215,9 +215,15 @@ function resolveOllamaThinkParamValue(
if (raw === "off") {
return false;
}
if (raw === "low" || raw === "medium" || raw === "high" || raw === "max") {
if (raw === "low" || raw === "medium" || raw === "high") {
return raw;
}
if (raw === "minimal") {
return "low";
}
if (raw === "xhigh" || raw === "adaptive" || raw === "max") {
return "high";
}
return undefined;
}