mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-06 07:20:43 +00:00
refactor: remove ollama core facades
This commit is contained in:
@@ -301,6 +301,35 @@ describe("ollama plugin", () => {
|
||||
expect((payloadSeen?.options as Record<string, unknown> | undefined)?.num_ctx).toBe(202752);
|
||||
});
|
||||
|
||||
it("declares streaming usage support for OpenAI-compatible Ollama routes", () => {
|
||||
const provider = registerProvider();
|
||||
|
||||
expect(
|
||||
provider.contributeResolvedModelCompat?.({
|
||||
modelId: "qwen3:32b",
|
||||
provider: "ollama",
|
||||
model: {
|
||||
api: "openai-completions",
|
||||
provider: "ollama",
|
||||
id: "qwen3:32b",
|
||||
baseUrl: "http://127.0.0.1:11434/v1",
|
||||
},
|
||||
} as never),
|
||||
).toEqual({ supportsUsageInStreaming: true });
|
||||
expect(
|
||||
provider.contributeResolvedModelCompat?.({
|
||||
modelId: "qwen3:32b",
|
||||
provider: "custom",
|
||||
model: {
|
||||
api: "openai-completions",
|
||||
provider: "custom",
|
||||
id: "qwen3:32b",
|
||||
baseUrl: "https://proxy.example.com/v1",
|
||||
},
|
||||
} as never),
|
||||
).toBeUndefined();
|
||||
});
|
||||
|
||||
it("owns replay policy for OpenAI-compatible Ollama routes only", () => {
|
||||
const provider = registerProvider();
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@ import { resolveOllamaApiBase } from "./src/provider-models.js";
|
||||
import {
|
||||
createConfiguredOllamaCompatStreamWrapper,
|
||||
createConfiguredOllamaStreamFn,
|
||||
isOllamaCompatProvider,
|
||||
resolveConfiguredOllamaProviderConfig,
|
||||
} from "./src/stream.js";
|
||||
import { createOllamaWebSearchProvider } from "./src/web-search-provider.js";
|
||||
@@ -93,6 +94,21 @@ function hasMeaningfulExplicitOllamaConfig(providerConfig?: OllamaProviderLikeCo
|
||||
return false;
|
||||
}
|
||||
|
||||
function usesOllamaOpenAICompatTransport(model: {
|
||||
api?: unknown;
|
||||
provider?: unknown;
|
||||
baseUrl?: unknown;
|
||||
}): boolean {
|
||||
return (
|
||||
model.api === "openai-completions" &&
|
||||
isOllamaCompatProvider({
|
||||
provider: typeof model.provider === "string" ? model.provider : undefined,
|
||||
baseUrl: typeof model.baseUrl === "string" ? model.baseUrl : undefined,
|
||||
api: "openai-completions",
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export default definePluginEntry({
|
||||
id: "ollama",
|
||||
name: "Ollama Provider",
|
||||
@@ -248,6 +264,8 @@ export default definePluginEntry({
|
||||
});
|
||||
},
|
||||
...OPENAI_COMPATIBLE_REPLAY_HOOKS,
|
||||
contributeResolvedModelCompat: ({ model }) =>
|
||||
usesOllamaOpenAICompatTransport(model) ? { supportsUsageInStreaming: true } : undefined,
|
||||
resolveReasoningOutputMode: () => "native",
|
||||
wrapStreamFn: createConfiguredOllamaCompatStreamWrapper,
|
||||
createEmbeddingProvider: async ({ config, model, remote }) => {
|
||||
|
||||
@@ -10,4 +10,3 @@ export type {
|
||||
} from "./models-config.providers.secrets.js";
|
||||
export { applyNativeStreamingUsageCompat } from "./models-config.providers.policy.js";
|
||||
export { enforceSourceManagedProviderSecrets } from "./models-config.providers.source-managed.js";
|
||||
export { resolveOllamaApiBase } from "../plugin-sdk/ollama.js";
|
||||
|
||||
@@ -2,22 +2,24 @@ import { describe, expect, it } from "vitest";
|
||||
import { resolveOpenAICompletionsCompatDefaults } from "./openai-completions-compat.js";
|
||||
|
||||
describe("resolveOpenAICompletionsCompatDefaults", () => {
|
||||
it("enables streaming usage for local ollama OpenAI-compat endpoints", () => {
|
||||
it("keeps streaming usage enabled for provider-declared compatible endpoints", () => {
|
||||
expect(
|
||||
resolveOpenAICompletionsCompatDefaults({
|
||||
provider: "ollama",
|
||||
provider: "custom-local",
|
||||
endpointClass: "local",
|
||||
knownProviderFamily: "ollama",
|
||||
knownProviderFamily: "custom-local",
|
||||
supportsNativeStreamingUsageCompat: true,
|
||||
}).supportsUsageInStreaming,
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
it("keeps streaming usage enabled for custom ollama OpenAI-compat endpoints", () => {
|
||||
it("keeps streaming usage enabled for custom provider-declared compatible endpoints", () => {
|
||||
expect(
|
||||
resolveOpenAICompletionsCompatDefaults({
|
||||
provider: "ollama",
|
||||
provider: "custom-local",
|
||||
endpointClass: "custom",
|
||||
knownProviderFamily: "ollama",
|
||||
knownProviderFamily: "custom-local",
|
||||
supportsNativeStreamingUsageCompat: true,
|
||||
}).supportsUsageInStreaming,
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
@@ -67,8 +67,6 @@ export function resolveOpenAICompletionsCompatDefaults(
|
||||
endpointClass === "mistral-public" ||
|
||||
knownProviderFamily === "mistral" ||
|
||||
(isDefaultRoute && isDefaultRouteProvider(provider, "chutes"));
|
||||
const isOllamaCompatProvider = provider === "ollama";
|
||||
|
||||
return {
|
||||
supportsStore:
|
||||
!isNonStandard && knownProviderFamily !== "mistral" && !usesExplicitProxyLikeEndpoint,
|
||||
@@ -79,8 +77,7 @@ export function resolveOpenAICompletionsCompatDefaults(
|
||||
endpointClass !== "xai-native" &&
|
||||
!usesExplicitProxyLikeEndpoint,
|
||||
supportsUsageInStreaming:
|
||||
isOllamaCompatProvider ||
|
||||
(!isNonStandard && (!usesConfiguredNonOpenAIEndpoint || supportsNativeStreamingUsageCompat)),
|
||||
!isNonStandard && (!usesConfiguredNonOpenAIEndpoint || supportsNativeStreamingUsageCompat),
|
||||
maxTokensField: usesMaxTokens ? "max_tokens" : "max_completion_tokens",
|
||||
thinkingFormat: isZai ? "zai" : isOpenRouterLike ? "openrouter" : "openai",
|
||||
visibleReasoningDetailTypes: isOpenRouterLike ? ["response.output_text", "response.text"] : [],
|
||||
|
||||
@@ -13,12 +13,6 @@ import { formatErrorMessage } from "../../../infra/errors.js";
|
||||
import { resolveHeartbeatSummaryForAgent } from "../../../infra/heartbeat-summary.js";
|
||||
import { getMachineDisplayName } from "../../../infra/machine-name.js";
|
||||
import { MAX_IMAGE_BYTES } from "../../../media/constants.js";
|
||||
import {
|
||||
isOllamaCompatProvider,
|
||||
resolveOllamaCompatNumCtxEnabled,
|
||||
shouldInjectOllamaCompatNumCtx,
|
||||
wrapOllamaCompatNumCtx,
|
||||
} from "../../../plugin-sdk/ollama-runtime.js";
|
||||
import { getGlobalHookRunner } from "../../../plugins/hook-runner-global.js";
|
||||
import { resolveToolCallArgumentsEncoding } from "../../../plugins/provider-model-compat.js";
|
||||
import {
|
||||
@@ -279,13 +273,6 @@ export {
|
||||
queueSessionsYieldInterruptMessage,
|
||||
stripSessionsYieldArtifacts,
|
||||
} from "./attempt.sessions-yield.js";
|
||||
export {
|
||||
isOllamaCompatProvider,
|
||||
resolveOllamaCompatNumCtxEnabled,
|
||||
shouldInjectOllamaCompatNumCtx,
|
||||
wrapOllamaCompatNumCtx,
|
||||
} from "../../../plugin-sdk/ollama-runtime.js";
|
||||
|
||||
export {
|
||||
decodeHtmlEntitiesInObject,
|
||||
wrapStreamFnRepairMalformedToolCallArguments,
|
||||
|
||||
@@ -709,17 +709,6 @@ describe("provider attribution", () => {
|
||||
endpointClass: "modelstudio-native",
|
||||
supportsNativeStreamingUsageCompat: true,
|
||||
});
|
||||
|
||||
expect(
|
||||
resolveProviderRequestCapabilities({
|
||||
provider: "ollama",
|
||||
modelId: "kimi-k2.5:cloud",
|
||||
capability: "llm",
|
||||
transport: "stream",
|
||||
}),
|
||||
).toMatchObject({
|
||||
compatibilityFamily: "moonshot",
|
||||
});
|
||||
});
|
||||
|
||||
it("treats native GitHub Copilot base URLs as known native endpoints", () => {
|
||||
|
||||
@@ -549,7 +549,6 @@ export function resolveProviderRequestCapabilities(
|
||||
const policy = resolveProviderRequestPolicy(input, env);
|
||||
const provider = policy.provider;
|
||||
const api = normalizeOptionalLowercaseString(input.api);
|
||||
const normalizedModelId = normalizeOptionalLowercaseString(input.modelId);
|
||||
const endpointClass = policy.endpointClass;
|
||||
const isKnownNativeEndpoint =
|
||||
endpointClass === "anthropic-public" ||
|
||||
@@ -574,12 +573,6 @@ export function resolveProviderRequestCapabilities(
|
||||
let compatibilityFamily: ProviderRequestCompatibilityFamily | undefined;
|
||||
if (provider && MOONSHOT_COMPAT_PROVIDERS.has(provider)) {
|
||||
compatibilityFamily = "moonshot";
|
||||
} else if (
|
||||
provider === "ollama" &&
|
||||
normalizedModelId?.startsWith("kimi-k") &&
|
||||
normalizedModelId.includes(":cloud")
|
||||
) {
|
||||
compatibilityFamily = "moonshot";
|
||||
}
|
||||
|
||||
const isResponsesApi = isOpenAIResponsesApi(api);
|
||||
|
||||
@@ -1,65 +0,0 @@
|
||||
type FacadeModule = typeof import("@openclaw/ollama/runtime-api.js");
|
||||
import { loadBundledPluginPublicSurfaceModuleSync } from "./facade-loader.js";
|
||||
|
||||
function loadFacadeModule(): FacadeModule {
|
||||
return loadBundledPluginPublicSurfaceModuleSync<FacadeModule>({
|
||||
dirName: "ollama",
|
||||
artifactBasename: "runtime-api.js",
|
||||
});
|
||||
}
|
||||
|
||||
export type OllamaEmbeddingClient = import("@openclaw/ollama/runtime-api.js").OllamaEmbeddingClient;
|
||||
export const DEFAULT_OLLAMA_EMBEDDING_MODEL = "nomic-embed-text";
|
||||
export const buildAssistantMessage: FacadeModule["buildAssistantMessage"] = ((...args) =>
|
||||
loadFacadeModule().buildAssistantMessage(...args)) as FacadeModule["buildAssistantMessage"];
|
||||
export const buildOllamaChatRequest: FacadeModule["buildOllamaChatRequest"] = ((...args) =>
|
||||
loadFacadeModule().buildOllamaChatRequest(...args)) as FacadeModule["buildOllamaChatRequest"];
|
||||
export const convertToOllamaMessages: FacadeModule["convertToOllamaMessages"] = ((...args) =>
|
||||
loadFacadeModule().convertToOllamaMessages(...args)) as FacadeModule["convertToOllamaMessages"];
|
||||
export const createConfiguredOllamaCompatNumCtxWrapper: FacadeModule["createConfiguredOllamaCompatNumCtxWrapper"] =
|
||||
((...args) =>
|
||||
loadFacadeModule().createConfiguredOllamaCompatNumCtxWrapper(
|
||||
...args,
|
||||
)) as FacadeModule["createConfiguredOllamaCompatNumCtxWrapper"];
|
||||
export const createConfiguredOllamaCompatStreamWrapper: FacadeModule["createConfiguredOllamaCompatStreamWrapper"] =
|
||||
((...args) =>
|
||||
loadFacadeModule().createConfiguredOllamaCompatStreamWrapper(
|
||||
...args,
|
||||
)) as FacadeModule["createConfiguredOllamaCompatStreamWrapper"];
|
||||
export const createConfiguredOllamaStreamFn: FacadeModule["createConfiguredOllamaStreamFn"] = ((
|
||||
...args
|
||||
) =>
|
||||
loadFacadeModule().createConfiguredOllamaStreamFn(
|
||||
...args,
|
||||
)) as FacadeModule["createConfiguredOllamaStreamFn"];
|
||||
export const createOllamaStreamFn: FacadeModule["createOllamaStreamFn"] = ((...args) =>
|
||||
loadFacadeModule().createOllamaStreamFn(...args)) as FacadeModule["createOllamaStreamFn"];
|
||||
|
||||
export const createOllamaEmbeddingProvider: FacadeModule["createOllamaEmbeddingProvider"] = ((
|
||||
...args
|
||||
) =>
|
||||
loadFacadeModule().createOllamaEmbeddingProvider(
|
||||
...args,
|
||||
)) as FacadeModule["createOllamaEmbeddingProvider"];
|
||||
export const isOllamaCompatProvider: FacadeModule["isOllamaCompatProvider"] = ((...args) =>
|
||||
loadFacadeModule().isOllamaCompatProvider(...args)) as FacadeModule["isOllamaCompatProvider"];
|
||||
export const resolveOllamaCompatNumCtxEnabled: FacadeModule["resolveOllamaCompatNumCtxEnabled"] = ((
|
||||
...args
|
||||
) =>
|
||||
loadFacadeModule().resolveOllamaCompatNumCtxEnabled(
|
||||
...args,
|
||||
)) as FacadeModule["resolveOllamaCompatNumCtxEnabled"];
|
||||
export const shouldInjectOllamaCompatNumCtx: FacadeModule["shouldInjectOllamaCompatNumCtx"] = ((
|
||||
...args
|
||||
) =>
|
||||
loadFacadeModule().shouldInjectOllamaCompatNumCtx(
|
||||
...args,
|
||||
)) as FacadeModule["shouldInjectOllamaCompatNumCtx"];
|
||||
export const parseNdjsonStream: FacadeModule["parseNdjsonStream"] = ((...args) =>
|
||||
loadFacadeModule().parseNdjsonStream(...args)) as FacadeModule["parseNdjsonStream"];
|
||||
export const resolveOllamaBaseUrlForRun: FacadeModule["resolveOllamaBaseUrlForRun"] = ((...args) =>
|
||||
loadFacadeModule().resolveOllamaBaseUrlForRun(
|
||||
...args,
|
||||
)) as FacadeModule["resolveOllamaBaseUrlForRun"];
|
||||
export const wrapOllamaCompatNumCtx: FacadeModule["wrapOllamaCompatNumCtx"] = ((...args) =>
|
||||
loadFacadeModule().wrapOllamaCompatNumCtx(...args)) as FacadeModule["wrapOllamaCompatNumCtx"];
|
||||
@@ -1,12 +0,0 @@
|
||||
type FacadeModule = typeof import("@openclaw/ollama/api.js");
|
||||
import { loadBundledPluginPublicSurfaceModuleSync } from "./facade-loader.js";
|
||||
|
||||
function loadFacadeModule(): FacadeModule {
|
||||
return loadBundledPluginPublicSurfaceModuleSync<FacadeModule>({
|
||||
dirName: "ollama",
|
||||
artifactBasename: "api.js",
|
||||
});
|
||||
}
|
||||
|
||||
export const resolveOllamaApiBase: FacadeModule["resolveOllamaApiBase"] = ((...args) =>
|
||||
loadFacadeModule().resolveOllamaApiBase(...args)) as FacadeModule["resolveOllamaApiBase"];
|
||||
57
src/plugins/contracts/core-extension-facade-boundary.test.ts
Normal file
57
src/plugins/contracts/core-extension-facade-boundary.test.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
const repoRoot = fileURLToPath(new URL("../../..", import.meta.url));
|
||||
const srcRoot = path.join(repoRoot, "src");
|
||||
const forbiddenOllamaFacadeFiles = [
|
||||
"src/plugin-sdk/ollama.ts",
|
||||
"src/plugin-sdk/ollama-runtime.ts",
|
||||
] as const;
|
||||
const importSpecifierPattern =
|
||||
/\b(?:import|export)\s+(?:type\s+)?(?:[^'"]*?\s+from\s+)?["']([^"']+)["']|import\(\s*["']([^"']+)["']\s*\)/g;
|
||||
|
||||
function collectSourceFiles(dir: string, files: string[] = []): string[] {
|
||||
for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
|
||||
if (entry.name === "plugin-sdk") {
|
||||
continue;
|
||||
}
|
||||
const entryPath = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
collectSourceFiles(entryPath, files);
|
||||
continue;
|
||||
}
|
||||
if (entry.isFile() && entry.name.endsWith(".ts")) {
|
||||
files.push(entryPath);
|
||||
}
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
function toRepoRelative(filePath: string): string {
|
||||
return path.relative(repoRoot, filePath).split(path.sep).join("/");
|
||||
}
|
||||
|
||||
describe("core extension facade boundary", () => {
|
||||
it("does not expose Ollama plugin facades from core plugin-sdk", () => {
|
||||
expect(
|
||||
forbiddenOllamaFacadeFiles.filter((file) => fs.existsSync(path.join(repoRoot, file))),
|
||||
).toEqual([]);
|
||||
});
|
||||
|
||||
it("does not import Ollama plugin facades from core code", () => {
|
||||
const violations: string[] = [];
|
||||
for (const filePath of collectSourceFiles(srcRoot)) {
|
||||
const source = fs.readFileSync(filePath, "utf8");
|
||||
for (const match of source.matchAll(importSpecifierPattern)) {
|
||||
const specifier = match[1] ?? match[2];
|
||||
if (specifier?.includes("plugin-sdk/ollama")) {
|
||||
violations.push(`${toRepoRelative(filePath)} -> ${specifier}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect(violations).toEqual([]);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user