refactor: remove ollama legacy shims

This commit is contained in:
Peter Steinberger
2026-03-27 06:38:10 +00:00
parent bd2c208689
commit a3e73daa6b
18 changed files with 15 additions and 195 deletions

View File

@@ -1 +0,0 @@
export { DEFAULT_OLLAMA_EMBEDDING_MODEL } from "./embeddings.js";

View File

@@ -3,7 +3,7 @@ import os from "node:os";
import path from "node:path";
import type { OpenClawConfig } from "openclaw/plugin-sdk/memory-core-host-engine-foundation";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { DEFAULT_OLLAMA_EMBEDDING_MODEL } from "./embeddings-ollama.js";
import { DEFAULT_OLLAMA_EMBEDDING_MODEL } from "./embeddings.js";
import type {
EmbeddingProvider,
EmbeddingProviderRuntime,

View File

@@ -44,10 +44,6 @@
"types": "./dist/plugin-sdk/core.d.ts",
"default": "./dist/plugin-sdk/core.js"
},
"./plugin-sdk/ollama-setup": {
"types": "./dist/plugin-sdk/ollama-setup.d.ts",
"default": "./dist/plugin-sdk/ollama-setup.js"
},
"./plugin-sdk/provider-setup": {
"types": "./dist/plugin-sdk/provider-setup.d.ts",
"default": "./dist/plugin-sdk/provider-setup.js"

View File

@@ -1,7 +1,6 @@
[
"index",
"core",
"ollama-setup",
"provider-setup",
"sandbox",
"self-hosted-provider-setup",

View File

@@ -1 +0,0 @@
export { OLLAMA_DEFAULT_BASE_URL } from "../plugin-sdk/provider-models.js";

View File

@@ -1,10 +1,10 @@
import { afterEach, describe, expect, it, vi } from "vitest";
import { jsonResponse, requestBodyText, requestUrl } from "../test-helpers/http.js";
import {
enrichOllamaModelsWithContext,
resolveOllamaApiBase,
type OllamaTagModel,
} from "./ollama-models.js";
} from "../../extensions/ollama/src/provider-models.js";
import { jsonResponse, requestBodyText, requestUrl } from "../test-helpers/http.js";
describe("ollama-models", () => {
afterEach(() => {

View File

@@ -1,18 +0,0 @@
export type {
OllamaModelWithContext,
OllamaTagModel,
OllamaTagsResponse,
} from "../plugin-sdk/provider-models.js";
export {
buildOllamaModelDefinition,
enrichOllamaModelsWithContext,
fetchOllamaModels,
queryOllamaContextWindow,
resolveOllamaApiBase,
} from "../plugin-sdk/provider-models.js";
export { isReasoningModelHeuristic } from "../plugin-sdk/provider-reasoning.js";
export {
OLLAMA_DEFAULT_CONTEXT_WINDOW,
OLLAMA_DEFAULT_COST,
OLLAMA_DEFAULT_MAX_TOKENS,
} from "../plugin-sdk/provider-models.js";

View File

@@ -6,7 +6,7 @@ import {
buildAssistantMessage,
parseNdjsonStream,
resolveOllamaBaseUrlForRun,
} from "./ollama-stream.js";
} from "../../extensions/ollama/src/stream.js";
describe("convertToOllamaMessages", () => {
it("converts user text messages", () => {

View File

@@ -1,9 +0,0 @@
export {
buildAssistantMessage,
convertToOllamaMessages,
createConfiguredOllamaStreamFn,
createOllamaStreamFn,
OLLAMA_NATIVE_BASE_URL,
parseNdjsonStream,
resolveOllamaBaseUrlForRun,
} from "../plugin-sdk/ollama-stream.js";

View File

@@ -1,100 +0,0 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import { streamSimple } from "@mariozechner/pi-ai";
import type { OpenClawConfig } from "../../../config/config.js";
import { normalizeProviderId } from "../../model-selection.js";
export function isOllamaCompatProvider(model: {
provider?: string;
baseUrl?: string;
api?: string;
}): boolean {
const providerId = normalizeProviderId(model.provider ?? "");
if (providerId === "ollama") {
return true;
}
if (!model.baseUrl) {
return false;
}
try {
const parsed = new URL(model.baseUrl);
const hostname = parsed.hostname.toLowerCase();
const isLocalhost =
hostname === "localhost" ||
hostname === "127.0.0.1" ||
hostname === "::1" ||
hostname === "[::1]";
if (isLocalhost && parsed.port === "11434") {
return true;
}
// Allow remote/LAN Ollama OpenAI-compatible endpoints when the provider id
// itself indicates Ollama usage (e.g. "my-ollama").
const providerHintsOllama = providerId.includes("ollama");
const isOllamaPort = parsed.port === "11434";
const isOllamaCompatPath = parsed.pathname === "/" || /^\/v1\/?$/i.test(parsed.pathname);
return providerHintsOllama && isOllamaPort && isOllamaCompatPath;
} catch {
return false;
}
}
export function resolveOllamaCompatNumCtxEnabled(params: {
config?: OpenClawConfig;
providerId?: string;
}): boolean {
const providerId = params.providerId?.trim();
if (!providerId) {
return true;
}
const providers = params.config?.models?.providers;
if (!providers) {
return true;
}
const direct = providers[providerId];
if (direct) {
return direct.injectNumCtxForOpenAICompat ?? true;
}
const normalized = normalizeProviderId(providerId);
for (const [candidateId, candidate] of Object.entries(providers)) {
if (normalizeProviderId(candidateId) === normalized) {
return candidate.injectNumCtxForOpenAICompat ?? true;
}
}
return true;
}
export function shouldInjectOllamaCompatNumCtx(params: {
model: { api?: string; provider?: string; baseUrl?: string };
config?: OpenClawConfig;
providerId?: string;
}): boolean {
if (params.model.api !== "openai-completions") {
return false;
}
if (!isOllamaCompatProvider(params.model)) {
return false;
}
return resolveOllamaCompatNumCtxEnabled({
config: params.config,
providerId: params.providerId,
});
}
export function wrapOllamaCompatNumCtx(baseFn: StreamFn | undefined, numCtx: number): StreamFn {
const streamFn = baseFn ?? streamSimple;
return (model, context, options) =>
streamFn(model, context, {
...options,
onPayload: (payload: unknown) => {
if (!payload || typeof payload !== "object") {
return options?.onPayload?.(payload, model);
}
const payloadRecord = payload as Record<string, unknown>;
if (!payloadRecord.options || typeof payloadRecord.options !== "object") {
payloadRecord.options = {};
}
(payloadRecord.options as Record<string, unknown>).num_ctx = numCtx;
return options?.onPayload?.(payload, model);
},
});
}

View File

@@ -7,6 +7,12 @@ import {
DefaultResourceLoader,
SessionManager,
} from "@mariozechner/pi-coding-agent";
import {
isOllamaCompatProvider,
resolveOllamaCompatNumCtxEnabled,
shouldInjectOllamaCompatNumCtx,
wrapOllamaCompatNumCtx,
} from "../../../../extensions/ollama/src/stream.js";
import {
resolveTelegramInlineButtonsScope,
resolveTelegramReactionLevel,
@@ -131,7 +137,6 @@ import {
finalizeAttemptContextEngineTurn,
runAttemptContextEngineBootstrap,
} from "./attempt.context-engine-helpers.js";
import { shouldInjectOllamaCompatNumCtx, wrapOllamaCompatNumCtx } from "./attempt.ollama-compat.js";
import {
buildAfterTurnRuntimeContext,
prependSystemPromptAddition,
@@ -197,7 +202,7 @@ export {
resolveOllamaCompatNumCtxEnabled,
shouldInjectOllamaCompatNumCtx,
wrapOllamaCompatNumCtx,
} from "./attempt.ollama-compat.js";
} from "../../../../extensions/ollama/src/stream.js";
export {
decodeHtmlEntitiesInObject,
wrapStreamFnRepairMalformedToolCallArguments,

View File

@@ -1,12 +1,12 @@
import { afterEach, describe, expect, it, vi } from "vitest";
import type { RuntimeEnv } from "../runtime.js";
import { jsonResponse, requestBodyText, requestUrl } from "../test-helpers/http.js";
import type { WizardPrompter } from "../wizard/prompts.js";
import {
configureOllamaNonInteractive,
ensureOllamaModelPulled,
promptAndConfigureOllama,
} from "./ollama-setup.js";
} from "../../extensions/ollama/src/setup.js";
import type { RuntimeEnv } from "../runtime.js";
import { jsonResponse, requestBodyText, requestUrl } from "../test-helpers/http.js";
import type { WizardPrompter } from "../wizard/prompts.js";
const upsertAuthProfileWithLock = vi.hoisted(() => vi.fn(async () => {}));
vi.mock("../agents/auth-profiles.js", () => ({

View File

@@ -1,6 +0,0 @@
export {
buildOllamaProvider,
configureOllamaNonInteractive,
ensureOllamaModelPulled,
promptAndConfigureOllama,
} from "../plugin-sdk/provider-setup.js";

View File

@@ -1,19 +0,0 @@
export type {
OpenClawPluginApi,
ProviderAuthContext,
ProviderAuthMethodNonInteractiveContext,
ProviderAuthResult,
ProviderDiscoveryContext,
} from "../plugins/types.js";
export {
OLLAMA_DEFAULT_BASE_URL,
OLLAMA_DEFAULT_MODEL,
} from "../../extensions/ollama/src/defaults.js";
export {
buildOllamaProvider,
configureOllamaNonInteractive,
ensureOllamaModelPulled,
promptAndConfigureOllama,
} from "../../extensions/ollama/src/setup.js";

View File

@@ -1,9 +0,0 @@
export {
buildAssistantMessage,
convertToOllamaMessages,
createConfiguredOllamaStreamFn,
createOllamaStreamFn,
OLLAMA_NATIVE_BASE_URL,
parseNdjsonStream,
resolveOllamaBaseUrlForRun,
} from "../../extensions/ollama/src/stream.js";

View File

@@ -549,7 +549,6 @@ describe("plugin-sdk subpath exports", () => {
"buildSglangProvider",
"configureOpenAICompatibleSelfHostedProviderNonInteractive",
]);
expectSourceMentions("ollama-setup", ["buildOllamaProvider", "configureOllamaNonInteractive"]);
expectSourceMentions("sandbox", ["registerSandboxBackend", "runPluginCommandWithTimeout"]);
expectSourceMentions("secret-input", [

View File

@@ -142,14 +142,6 @@ describe("provider discovery contract", () => {
buildSglangProvider: (...args: unknown[]) => buildSglangProviderMock(...args),
};
});
vi.doMock("openclaw/plugin-sdk/ollama-setup", async () => {
const actual = await vi.importActual<object>("openclaw/plugin-sdk/ollama-setup");
return {
...actual,
buildOllamaProvider: (...args: unknown[]) => buildOllamaProviderMock(...args),
};
});
({ runProviderCatalog } = await import("../provider-discovery.js"));
const [
{ default: githubCopilotPlugin },

View File

@@ -1,8 +0,0 @@
export {
OLLAMA_DEFAULT_BASE_URL,
OLLAMA_DEFAULT_MODEL,
buildOllamaProvider,
configureOllamaNonInteractive,
ensureOllamaModelPulled,
promptAndConfigureOllama,
} from "../plugin-sdk/provider-setup.js";