refactor: remove ollama core facades

This commit is contained in:
Peter Steinberger
2026-04-18 18:52:50 +01:00
parent 6b525023d4
commit 235cdb3f81
11 changed files with 113 additions and 119 deletions

View File

@@ -301,6 +301,35 @@ describe("ollama plugin", () => {
expect((payloadSeen?.options as Record<string, unknown> | undefined)?.num_ctx).toBe(202752);
});
it("declares streaming usage support for OpenAI-compatible Ollama routes", () => {
const provider = registerProvider();
expect(
provider.contributeResolvedModelCompat?.({
modelId: "qwen3:32b",
provider: "ollama",
model: {
api: "openai-completions",
provider: "ollama",
id: "qwen3:32b",
baseUrl: "http://127.0.0.1:11434/v1",
},
} as never),
).toEqual({ supportsUsageInStreaming: true });
expect(
provider.contributeResolvedModelCompat?.({
modelId: "qwen3:32b",
provider: "custom",
model: {
api: "openai-completions",
provider: "custom",
id: "qwen3:32b",
baseUrl: "https://proxy.example.com/v1",
},
} as never),
).toBeUndefined();
});
it("owns replay policy for OpenAI-compatible Ollama routes only", () => {
const provider = registerProvider();

View File

@@ -28,6 +28,7 @@ import { resolveOllamaApiBase } from "./src/provider-models.js";
import {
createConfiguredOllamaCompatStreamWrapper,
createConfiguredOllamaStreamFn,
isOllamaCompatProvider,
resolveConfiguredOllamaProviderConfig,
} from "./src/stream.js";
import { createOllamaWebSearchProvider } from "./src/web-search-provider.js";
@@ -93,6 +94,21 @@ function hasMeaningfulExplicitOllamaConfig(providerConfig?: OllamaProviderLikeCo
return false;
}
function usesOllamaOpenAICompatTransport(model: {
api?: unknown;
provider?: unknown;
baseUrl?: unknown;
}): boolean {
return (
model.api === "openai-completions" &&
isOllamaCompatProvider({
provider: typeof model.provider === "string" ? model.provider : undefined,
baseUrl: typeof model.baseUrl === "string" ? model.baseUrl : undefined,
api: "openai-completions",
})
);
}
export default definePluginEntry({
id: "ollama",
name: "Ollama Provider",
@@ -248,6 +264,8 @@ export default definePluginEntry({
});
},
...OPENAI_COMPATIBLE_REPLAY_HOOKS,
contributeResolvedModelCompat: ({ model }) =>
usesOllamaOpenAICompatTransport(model) ? { supportsUsageInStreaming: true } : undefined,
resolveReasoningOutputMode: () => "native",
wrapStreamFn: createConfiguredOllamaCompatStreamWrapper,
createEmbeddingProvider: async ({ config, model, remote }) => {