fix(ollama): resolve cloud models omitted from tags

This commit is contained in:
Peter Steinberger
2026-04-29 04:37:45 +01:00
parent eb5adc3cd2
commit 04f6ffd8be
4 changed files with 165 additions and 9 deletions

View File

@@ -20,6 +20,21 @@ const promptAndConfigureOllamaMock = vi.hoisted(() =>
);
const ensureOllamaModelPulledMock = vi.hoisted(() => vi.fn(async () => {}));
const buildOllamaProviderMock = vi.hoisted(() => vi.fn());
const queryOllamaModelShowInfoMock = vi.hoisted(() => vi.fn());
const buildOllamaModelDefinitionMock = vi.hoisted(() =>
vi.fn((modelId: string, contextWindow?: number, capabilities?: string[]) => ({
id: modelId,
name: modelId,
reasoning: capabilities?.includes("thinking") ?? false,
input: capabilities?.includes("vision") ? ["text", "image"] : ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: contextWindow ?? 8192,
maxTokens: 8192,
compat: capabilities
? { supportsTools: capabilities.includes("tools"), supportsUsageInStreaming: true }
: { supportsUsageInStreaming: true },
})),
);
const createConfiguredOllamaStreamFnMock = vi.hoisted(() =>
vi.fn((_params: { model: unknown; providerBaseUrl?: string }) => ({}) as never),
);
@@ -29,6 +44,8 @@ vi.mock("./api.js", () => ({
ensureOllamaModelPulled: ensureOllamaModelPulledMock,
configureOllamaNonInteractive: vi.fn(),
buildOllamaProvider: buildOllamaProviderMock,
queryOllamaModelShowInfo: queryOllamaModelShowInfoMock,
buildOllamaModelDefinition: buildOllamaModelDefinitionMock,
}));
vi.mock("./src/stream.js", async (importOriginal) => {
@@ -43,6 +60,8 @@ beforeEach(() => {
promptAndConfigureOllamaMock.mockClear();
ensureOllamaModelPulledMock.mockClear();
buildOllamaProviderMock.mockReset();
queryOllamaModelShowInfoMock.mockReset();
buildOllamaModelDefinitionMock.mockClear();
createConfiguredOllamaStreamFnMock.mockClear();
});
@@ -420,6 +439,102 @@ describe("ollama plugin", () => {
}
});
it("resolves requested Ollama cloud models that are omitted from tags but confirmed by show", async () => {
const provider = registerProvider();
const previous = process.env.OLLAMA_API_KEY;
process.env.OLLAMA_API_KEY = "ollama-local";
buildOllamaProviderMock.mockResolvedValueOnce({
baseUrl: "http://127.0.0.1:11434",
api: "ollama",
models: [
{
id: "kimi-k2.5:cloud",
name: "kimi-k2.5:cloud",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 262144,
maxTokens: 8192,
},
],
});
queryOllamaModelShowInfoMock.mockResolvedValueOnce({
contextWindow: 1048576,
capabilities: ["completion", "tools", "thinking"],
});
try {
await provider.prepareDynamicModel?.({
config: {},
provider: "ollama",
modelId: "deepseek-v4-pro:cloud",
modelRegistry: { find: vi.fn(() => null) },
} as never);
expect(queryOllamaModelShowInfoMock).toHaveBeenCalledWith(
"http://127.0.0.1:11434",
"deepseek-v4-pro:cloud",
);
expect(
provider.resolveDynamicModel?.({
config: {},
provider: "ollama",
modelId: "deepseek-v4-pro:cloud",
modelRegistry: { find: vi.fn(() => null) },
} as never),
).toMatchObject({
provider: "ollama",
id: "deepseek-v4-pro:cloud",
api: "ollama",
baseUrl: "http://127.0.0.1:11434",
reasoning: true,
compat: { supportsTools: true },
});
} finally {
if (previous === undefined) {
delete process.env.OLLAMA_API_KEY;
} else {
process.env.OLLAMA_API_KEY = previous;
}
}
});
it("keeps unknown requested Ollama models unresolved when show has no metadata", async () => {
const provider = registerProvider();
const previous = process.env.OLLAMA_API_KEY;
process.env.OLLAMA_API_KEY = "ollama-local";
buildOllamaProviderMock.mockResolvedValueOnce({
baseUrl: "http://127.0.0.1:11434",
api: "ollama",
models: [],
});
queryOllamaModelShowInfoMock.mockResolvedValueOnce({});
try {
await provider.prepareDynamicModel?.({
config: {},
provider: "ollama",
modelId: "depseek-v4-pro:cloud",
modelRegistry: { find: vi.fn(() => null) },
} as never);
expect(
provider.resolveDynamicModel?.({
config: {},
provider: "ollama",
modelId: "depseek-v4-pro:cloud",
modelRegistry: { find: vi.fn(() => null) },
} as never),
).toBeUndefined();
} finally {
if (previous === undefined) {
delete process.env.OLLAMA_API_KEY;
} else {
process.env.OLLAMA_API_KEY = previous;
}
}
});
it("skips implicit localhost discovery when a custom remote Ollama provider is configured", async () => {
const provider = registerProvider();

View File

@@ -19,10 +19,13 @@ import {
OPENAI_COMPATIBLE_REPLAY_HOOKS,
} from "openclaw/plugin-sdk/provider-model-shared";
import {
OLLAMA_DEFAULT_BASE_URL,
buildOllamaModelDefinition,
buildOllamaProvider,
configureOllamaNonInteractive,
ensureOllamaModelPulled,
promptAndConfigureOllama,
queryOllamaModelShowInfo,
} from "./api.js";
import {
OLLAMA_DEFAULT_API_KEY,
@@ -100,6 +103,29 @@ function toDynamicOllamaModel(params: {
};
}
async function resolveRequestedDynamicOllamaModel(params: {
provider: string;
providerConfig: ModelProviderConfig;
modelId: string;
}): Promise<ProviderRuntimeModel | undefined> {
const showInfo = await queryOllamaModelShowInfo(
readProviderBaseUrl(params.providerConfig) ?? OLLAMA_DEFAULT_BASE_URL,
params.modelId,
);
if (typeof showInfo.contextWindow !== "number" && (showInfo.capabilities?.length ?? 0) === 0) {
return undefined;
}
return toDynamicOllamaModel({
provider: params.provider,
providerConfig: params.providerConfig,
model: buildOllamaModelDefinition(
params.modelId,
showInfo.contextWindow,
showInfo.capabilities,
),
});
}
export default definePluginEntry({
id: "ollama",
name: "Ollama Provider",
@@ -268,16 +294,24 @@ export default definePluginEntry({
}
const baseUrl = readProviderBaseUrl(providerConfig);
const provider = await buildOllamaProvider(baseUrl, { quiet: true });
dynamicModelCache.set(
buildDynamicCacheKey(ctx.provider, baseUrl),
(provider.models ?? []).map((model) =>
toDynamicOllamaModel({
provider: ctx.provider,
providerConfig: provider,
model,
}),
),
const dynamicModels = (provider.models ?? []).map((model) =>
toDynamicOllamaModel({
provider: ctx.provider,
providerConfig: provider,
model,
}),
);
if (!dynamicModels.some((model) => model.id === ctx.modelId)) {
const requestedModel = await resolveRequestedDynamicOllamaModel({
provider: ctx.provider,
providerConfig: provider,
modelId: ctx.modelId,
});
if (requestedModel) {
dynamicModels.push(requestedModel);
}
}
dynamicModelCache.set(buildDynamicCacheKey(ctx.provider, baseUrl), dynamicModels);
},
resolveDynamicModel: (ctx) => {
const providerConfig = resolveConfiguredOllamaProviderConfig({