fix: add ollama discovery regression tests and changelog (#14131)

This commit is contained in:
Gustavo Madeira Santana
2026-02-11 10:45:50 -05:00
parent 5dd910dd1e
commit 2292d2de6d
3 changed files with 48 additions and 2 deletions

View File

@@ -10,6 +10,10 @@ Docs: https://docs.openclaw.ai
- CLI: add `openclaw logs --local-time` to display log timestamps in local timezone. (#13818) Thanks @xialonglee.
- Config: avoid redacting `maxTokens`-like fields during config snapshot redaction, preventing round-trip validation failures in `/config`. (#14006) Thanks @constansino.
### Fixes
- Ollama: use configured `models.providers.ollama.baseUrl` for model discovery and normalize `/v1` endpoints to the native Ollama API root. (#14131) Thanks @shtse8.
## 2026.2.9
### Added

View File

@@ -2,7 +2,27 @@ import { mkdtempSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { describe, expect, it } from "vitest";
import { resolveImplicitProviders } from "./models-config.providers.js";
import { resolveImplicitProviders, resolveOllamaApiBase } from "./models-config.providers.js";
describe("resolveOllamaApiBase", () => {
it("returns default localhost base when no configured URL is provided", () => {
expect(resolveOllamaApiBase()).toBe("http://127.0.0.1:11434");
});
it("strips /v1 suffix from OpenAI-compatible URLs", () => {
expect(resolveOllamaApiBase("http://ollama-host:11434/v1")).toBe("http://ollama-host:11434");
expect(resolveOllamaApiBase("http://ollama-host:11434/V1")).toBe("http://ollama-host:11434");
});
it("keeps URLs without /v1 unchanged", () => {
expect(resolveOllamaApiBase("http://ollama-host:11434")).toBe("http://ollama-host:11434");
});
it("handles trailing slash before canonicalizing", () => {
expect(resolveOllamaApiBase("http://ollama-host:11434/v1/")).toBe("http://ollama-host:11434");
expect(resolveOllamaApiBase("http://ollama-host:11434/")).toBe("http://ollama-host:11434");
});
});
describe("Ollama provider", () => {
it("should not include ollama when no API key is configured", async () => {
@@ -33,6 +53,28 @@ describe("Ollama provider", () => {
}
});
it("should preserve explicit ollama baseUrl on implicit provider injection", async () => {
const agentDir = mkdtempSync(join(tmpdir(), "openclaw-test-"));
process.env.OLLAMA_API_KEY = "test-key";
try {
const providers = await resolveImplicitProviders({
agentDir,
explicitProviders: {
ollama: {
baseUrl: "http://192.168.20.14:11434/v1",
api: "openai-completions",
models: [],
},
},
});
expect(providers?.ollama?.baseUrl).toBe("http://192.168.20.14:11434/v1");
} finally {
delete process.env.OLLAMA_API_KEY;
}
});
it("should have correct model structure with streaming disabled (unit test)", () => {
// This test directly verifies the model configuration structure
// since discoverOllamaModels() returns empty array in test mode

View File

@@ -119,7 +119,7 @@ interface OllamaTagsResponse {
* The native Ollama API lives at the root (e.g. `/api/tags`), so we
* strip the `/v1` suffix when present.
*/
function resolveOllamaApiBase(configuredBaseUrl?: string): string {
export function resolveOllamaApiBase(configuredBaseUrl?: string): string {
if (!configuredBaseUrl) {
return OLLAMA_API_BASE_URL;
}