refactor: move memory engine behind plugin adapters

This commit is contained in:
Peter Steinberger
2026-03-27 00:40:45 +00:00
parent aed6283faa
commit dbf78de7c6
142 changed files with 1610 additions and 966 deletions

View File

@@ -59,6 +59,7 @@ function fakeApi(overrides: Partial<OpenClawPluginApi> = {}): OpenClawPluginApi
registerMemoryPromptSection() {},
registerMemoryFlushPlan() {},
registerMemoryRuntime() {},
registerMemoryEmbeddingProvider() {},
on() {},
resolvePath: (p) => p,
...overrides,

View File

@@ -59,12 +59,14 @@ describe("plugin registration", () => {
const registerMemoryPromptSection = vi.fn();
const registerMemoryFlushPlan = vi.fn();
const registerMemoryRuntime = vi.fn();
const registerMemoryEmbeddingProvider = vi.fn();
const registerCli = vi.fn();
const api = {
registerTool,
registerMemoryPromptSection,
registerMemoryFlushPlan,
registerMemoryRuntime,
registerMemoryEmbeddingProvider,
registerCli,
};
@@ -73,6 +75,7 @@ describe("plugin registration", () => {
expect(registerMemoryPromptSection).toHaveBeenCalledWith(buildPromptSection);
expect(registerMemoryFlushPlan).toHaveBeenCalledWith(buildMemoryFlushPlan);
expect(registerMemoryRuntime).toHaveBeenCalledWith(memoryRuntime);
expect(registerMemoryEmbeddingProvider).toHaveBeenCalledTimes(6);
expect(registerTool).toHaveBeenCalledTimes(2);
expect(registerTool.mock.calls[0]?.[1]).toEqual({ names: ["memory_search"] });
expect(registerTool.mock.calls[1]?.[1]).toEqual({ names: ["memory_get"] });

View File

@@ -6,6 +6,7 @@ import {
DEFAULT_MEMORY_FLUSH_PROMPT,
DEFAULT_MEMORY_FLUSH_SOFT_TOKENS,
} from "./src/flush-plan.js";
import { registerBuiltInMemoryEmbeddingProviders } from "./src/memory/provider-adapters.js";
import { buildPromptSection } from "./src/prompt-section.js";
import { memoryRuntime } from "./src/runtime-provider.js";
import { createMemoryGetTool, createMemorySearchTool } from "./src/tools.js";
@@ -23,6 +24,7 @@ export default definePluginEntry({
description: "File-backed memory search tools and CLI",
kind: "memory",
register(api) {
registerBuiltInMemoryEmbeddingProviders(api);
api.registerMemoryPromptSection(buildPromptSection);
api.registerMemoryFlushPlan(buildMemoryFlushPlan);
api.registerMemoryRuntime(memoryRuntime);

View File

@@ -1 +1 @@
export * from "openclaw/plugin-sdk/memory-core-host";
export * from "openclaw/plugin-sdk/memory-core-host-runtime";

View File

@@ -0,0 +1 @@
export * from "openclaw/plugin-sdk/memory-core-host-engine";

View File

@@ -0,0 +1 @@
export { resolveMemoryBackendConfig } from "../engine-host-api.js";

View File

@@ -2,11 +2,11 @@ import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterAll, beforeAll, beforeEach, expect, vi, type Mock } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import type { MemoryIndexManager } from "../plugin-sdk/memory-core.js";
import type { OpenClawConfig } from "../engine-host-api.js";
import type { MemoryIndexManager, MemorySearchManager } from "./index.js";
type EmbeddingTestMocksModule = typeof import("./embedding.test-mocks.js");
type MemoryIndexModule = typeof import("../plugin-sdk/memory-core.js");
type MemoryIndexModule = typeof import("./index.js");
type MemorySearchManagerHandle = Awaited<
ReturnType<MemoryIndexModule["getMemorySearchManager"]>
>["manager"];
@@ -64,7 +64,7 @@ export function installEmbeddingManagerFixture(opts: {
const embeddingMocks = await import("./embedding.test-mocks.js");
embedBatch = embeddingMocks.getEmbedBatchMock();
resetEmbeddingMocks = embeddingMocks.resetEmbeddingMocks;
({ getMemorySearchManager } = await import("../plugin-sdk/memory-core.js"));
({ getMemorySearchManager } = await import("./index.js"));
fixtureRoot = await fs.mkdtemp(path.join(os.tmpdir(), opts.fixturePrefix));
workspaceDir = path.join(fixtureRoot, "workspace");
memoryDir = path.join(workspaceDir, "memory");

View File

@@ -0,0 +1 @@
export { DEFAULT_OLLAMA_EMBEDDING_MODEL } from "./embeddings.js";

View File

@@ -0,0 +1,175 @@
import {
DEFAULT_GEMINI_EMBEDDING_MODEL,
DEFAULT_LOCAL_MODEL,
DEFAULT_MISTRAL_EMBEDDING_MODEL,
DEFAULT_OLLAMA_EMBEDDING_MODEL,
DEFAULT_OPENAI_EMBEDDING_MODEL,
DEFAULT_VOYAGE_EMBEDDING_MODEL,
getMemoryEmbeddingProvider,
listMemoryEmbeddingProviders,
type MemoryEmbeddingProvider,
type MemoryEmbeddingProviderAdapter,
type MemoryEmbeddingProviderCreateOptions,
type MemoryEmbeddingProviderRuntime,
} from "../engine-host-api.js";
import { canAutoSelectLocal } from "./provider-adapters.js";
export {
DEFAULT_GEMINI_EMBEDDING_MODEL,
DEFAULT_LOCAL_MODEL,
DEFAULT_MISTRAL_EMBEDDING_MODEL,
DEFAULT_OLLAMA_EMBEDDING_MODEL,
DEFAULT_OPENAI_EMBEDDING_MODEL,
DEFAULT_VOYAGE_EMBEDDING_MODEL,
} from "../engine-host-api.js";
export type EmbeddingProvider = MemoryEmbeddingProvider;
export type EmbeddingProviderId = string;
export type EmbeddingProviderRequest = string;
export type EmbeddingProviderFallback = string;
export type EmbeddingProviderRuntime = MemoryEmbeddingProviderRuntime;
export type EmbeddingProviderResult = {
provider: EmbeddingProvider | null;
requestedProvider: EmbeddingProviderRequest;
fallbackFrom?: string;
fallbackReason?: string;
providerUnavailableReason?: string;
runtime?: EmbeddingProviderRuntime;
};
type CreateEmbeddingProviderOptions = MemoryEmbeddingProviderCreateOptions & {
provider: EmbeddingProviderRequest;
fallback: EmbeddingProviderFallback;
};
function formatErrorMessage(err: unknown): string {
return err instanceof Error ? err.message : String(err);
}
function formatProviderError(adapter: MemoryEmbeddingProviderAdapter, err: unknown): string {
return adapter.formatSetupError?.(err) ?? formatErrorMessage(err);
}
function shouldContinueAutoSelection(
adapter: MemoryEmbeddingProviderAdapter,
err: unknown,
): boolean {
return adapter.shouldContinueAutoSelection?.(err) ?? false;
}
function getAdapter(id: string): MemoryEmbeddingProviderAdapter {
const adapter = getMemoryEmbeddingProvider(id);
if (!adapter) {
throw new Error(`Unknown memory embedding provider: ${id}`);
}
return adapter;
}
function listAutoSelectAdapters(
options: CreateEmbeddingProviderOptions,
): MemoryEmbeddingProviderAdapter[] {
return listMemoryEmbeddingProviders()
.filter((adapter) => typeof adapter.autoSelectPriority === "number")
.filter((adapter) =>
adapter.id === "local" ? canAutoSelectLocal(options.local?.modelPath) : true,
)
.toSorted(
(a, b) =>
(a.autoSelectPriority ?? Number.MAX_SAFE_INTEGER) -
(b.autoSelectPriority ?? Number.MAX_SAFE_INTEGER),
);
}
function resolveProviderModel(
adapter: MemoryEmbeddingProviderAdapter,
requestedModel: string,
): string {
const trimmed = requestedModel.trim();
if (trimmed) {
return trimmed;
}
return adapter.defaultModel ?? "";
}
async function createWithAdapter(
adapter: MemoryEmbeddingProviderAdapter,
options: CreateEmbeddingProviderOptions,
): Promise<EmbeddingProviderResult> {
const result = await adapter.create({
...options,
model: resolveProviderModel(adapter, options.model),
});
return {
provider: result.provider,
requestedProvider: options.provider,
runtime: result.runtime,
};
}
export async function createEmbeddingProvider(
options: CreateEmbeddingProviderOptions,
): Promise<EmbeddingProviderResult> {
if (options.provider === "auto") {
const reasons: string[] = [];
for (const adapter of listAutoSelectAdapters(options)) {
try {
const result = await createWithAdapter(adapter, {
...options,
provider: adapter.id,
});
return {
...result,
requestedProvider: "auto",
};
} catch (err) {
const message = formatProviderError(adapter, err);
if (shouldContinueAutoSelection(adapter, err)) {
reasons.push(message);
continue;
}
const wrapped = new Error(message) as Error & { cause?: unknown };
wrapped.cause = err;
throw wrapped;
}
}
return {
provider: null,
requestedProvider: "auto",
providerUnavailableReason:
reasons.length > 0 ? reasons.join("\n\n") : "No embeddings provider available.",
};
}
const primaryAdapter = getAdapter(options.provider);
try {
return await createWithAdapter(primaryAdapter, options);
} catch (primaryErr) {
const reason = formatProviderError(primaryAdapter, primaryErr);
if (options.fallback && options.fallback !== "none" && options.fallback !== options.provider) {
const fallbackAdapter = getAdapter(options.fallback);
try {
const fallbackResult = await createWithAdapter(fallbackAdapter, {
...options,
provider: options.fallback,
});
return {
...fallbackResult,
requestedProvider: options.provider,
fallbackFrom: options.provider,
fallbackReason: reason,
};
} catch (fallbackErr) {
const fallbackReason = formatProviderError(fallbackAdapter, fallbackErr);
const wrapped = new Error(
`${reason}\n\nFallback to ${options.fallback} failed: ${fallbackReason}`,
) as Error & { cause?: unknown };
wrapped.cause = primaryErr;
throw wrapped;
}
}
const wrapped = new Error(reason) as Error & { cause?: unknown };
wrapped.cause = primaryErr;
throw wrapped;
}
}

View File

@@ -1,9 +1,5 @@
import { describe, expect, it } from "vitest";
import {
bm25RankToScore,
buildFtsQuery,
mergeHybridResults,
} from "../../extensions/memory-core/src/memory/hybrid.js";
import { bm25RankToScore, buildFtsQuery, mergeHybridResults } from "./hybrid.js";
describe("memory hybrid helpers", () => {
it("buildFtsQuery tokenizes and AND-joins", () => {

View File

@@ -5,9 +5,9 @@ import os from "node:os";
import path from "node:path";
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
import "./test-runtime-mocks.js";
import type { MemoryIndexManager } from "../../extensions/memory-core/src/memory/index.js";
import type { MemoryIndexManager } from "./index.js";
type MemoryIndexModule = typeof import("../../extensions/memory-core/src/memory/index.js");
type MemoryIndexModule = typeof import("./index.js");
let getMemorySearchManager: MemoryIndexModule["getMemorySearchManager"];
let closeAllMemorySearchManagers: MemoryIndexModule["closeAllMemorySearchManagers"];
@@ -81,13 +81,15 @@ vi.mock("./embeddings.js", () => {
},
...(providerId === "gemini"
? {
gemini: {
baseUrl: "https://generativelanguage.googleapis.com/v1beta",
headers: {},
model,
modelPath: `models/${model}`,
apiKeys: ["test-key"],
outputDimensionality: options.outputDimensionality,
runtime: {
id: "gemini",
cacheKeyData: {
provider: "gemini",
baseUrl: "https://generativelanguage.googleapis.com/v1beta",
model,
outputDimensionality: options.outputDimensionality,
headers: [],
},
},
}
: {}),
@@ -131,8 +133,7 @@ describe("memory index", () => {
beforeAll(async () => {
vi.resetModules();
await import("./test-runtime-mocks.js");
({ getMemorySearchManager, closeAllMemorySearchManagers } =
await import("../../extensions/memory-core/src/memory/index.js"));
({ getMemorySearchManager, closeAllMemorySearchManagers } = await import("./index.js"));
fixtureRoot = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-mem-fixtures-"));
workspaceDir = path.join(fixtureRoot, "workspace");
memoryDir = path.join(workspaceDir, "memory");

View File

@@ -3,7 +3,7 @@ export type {
MemoryEmbeddingProbeResult,
MemorySearchManager,
MemorySearchResult,
} from "../api.js";
} from "../engine-host-api.js";
export {
closeAllMemorySearchManagers,
getMemorySearchManager,

View File

@@ -0,0 +1 @@
export { buildFileEntry } from "../engine-host-api.js";

View File

@@ -1,6 +1,5 @@
import fs from "node:fs/promises";
import {
buildGeminiEmbeddingRequest,
buildMultimodalChunkForIndexing,
chunkMarkdown,
createSubsystemLogger,
@@ -11,19 +10,12 @@ import {
hashText,
parseEmbedding,
remapChunkLines,
runGeminiEmbeddingBatches,
runOpenAiEmbeddingBatches,
runVoyageEmbeddingBatches,
type EmbeddingInput,
type GeminiBatchRequest,
type MemoryChunk,
type MemoryFileEntry,
type MemorySource,
type OpenAiBatchRequest,
type SessionFileEntry,
type VoyageBatchRequest,
OPENAI_BATCH_ENDPOINT,
} from "../api.js";
} from "../engine-host-api.js";
import { MemoryManagerSyncOps } from "./manager-sync-ops.js";
const VECTOR_TABLE = "chunks_vec";
@@ -229,59 +221,67 @@ export abstract class MemoryManagerEmbeddingOps extends MemoryManagerSyncOps {
if (!this.provider) {
return hashText(JSON.stringify({ provider: "none", model: "fts-only" }));
}
if (this.provider.id === "openai" && this.openAi) {
const entries = Object.entries(this.openAi.headers)
.filter(([key]) => key.toLowerCase() !== "authorization")
.toSorted(([a], [b]) => a.localeCompare(b))
.map(([key, value]) => [key, value]);
return hashText(
JSON.stringify({
provider: "openai",
baseUrl: this.openAi.baseUrl,
model: this.openAi.model,
headers: entries,
}),
);
}
if (this.provider.id === "gemini" && this.gemini) {
const entries = Object.entries(this.gemini.headers)
.filter(([key]) => {
const lower = key.toLowerCase();
return lower !== "authorization" && lower !== "x-goog-api-key";
})
.toSorted(([a], [b]) => a.localeCompare(b))
.map(([key, value]) => [key, value]);
return hashText(
JSON.stringify({
provider: "gemini",
baseUrl: this.gemini.baseUrl,
model: this.gemini.model,
outputDimensionality: this.gemini.outputDimensionality,
headers: entries,
}),
);
if (this.providerRuntime?.cacheKeyData) {
return hashText(JSON.stringify(this.providerRuntime.cacheKeyData));
}
return hashText(JSON.stringify({ provider: this.provider.id, model: this.provider.model }));
}
private buildBatchDebug(source: MemorySource, chunks: MemoryChunk[]) {
return (message: string, data?: Record<string, unknown>) =>
log.debug(
message,
data ? { ...data, source, chunks: chunks.length } : { source, chunks: chunks.length },
);
}
private async embedChunksWithBatch(
chunks: MemoryChunk[],
entry: MemoryFileEntry | SessionFileEntry,
_entry: MemoryFileEntry | SessionFileEntry,
source: MemorySource,
): Promise<number[][]> {
if (!this.provider) {
const batchEmbed = this.providerRuntime?.batchEmbed;
if (!this.provider || !batchEmbed) {
return this.embedChunksInBatches(chunks);
}
if (this.provider.id === "openai" && this.openAi) {
return this.embedChunksWithOpenAiBatch(chunks, entry, source);
if (chunks.length === 0) {
return [];
}
if (this.provider.id === "gemini" && this.gemini) {
return this.embedChunksWithGeminiBatch(chunks, entry, source);
const { embeddings, missing } = this.collectCachedEmbeddings(chunks);
if (missing.length === 0) {
return embeddings;
}
if (this.provider.id === "voyage" && this.voyage) {
return this.embedChunksWithVoyageBatch(chunks, entry, source);
const missingChunks = missing.map((item) => item.chunk);
const batchResult = await this.runBatchWithFallback({
provider: this.provider.id,
run: async () =>
await batchEmbed({
agentId: this.agentId,
chunks: missingChunks,
wait: this.batch.wait,
concurrency: this.batch.concurrency,
pollIntervalMs: this.batch.pollIntervalMs,
timeoutMs: this.batch.timeoutMs,
debug: this.buildBatchDebug(source, chunks),
}),
fallback: async () => await this.embedChunksInBatches(chunks),
});
if (!batchResult) {
return this.embedChunksInBatches(chunks);
}
return this.embedChunksInBatches(chunks);
const toCache: Array<{ hash: string; embedding: number[] }> = [];
for (let index = 0; index < missing.length; index += 1) {
const item = missing[index];
const embedding = batchResult[index] ?? [];
if (!item) {
continue;
}
embeddings[item.index] = embedding;
toCache.push({ hash: item.chunk.hash, embedding });
}
this.upsertEmbeddingCache(toCache);
return embeddings;
}
private collectCachedEmbeddings(chunks: MemoryChunk[]): {
@@ -305,221 +305,6 @@ export abstract class MemoryManagerEmbeddingOps extends MemoryManagerSyncOps {
return { embeddings, missing };
}
private buildBatchCustomId(params: {
source: MemorySource;
entry: MemoryFileEntry | SessionFileEntry;
chunk: MemoryChunk;
index: number;
}): string {
return hashText(
`${params.source}:${params.entry.path}:${params.chunk.startLine}:${params.chunk.endLine}:${params.chunk.hash}:${params.index}`,
);
}
private buildBatchRequests<T extends { custom_id: string }>(params: {
missing: Array<{ index: number; chunk: MemoryChunk }>;
entry: MemoryFileEntry | SessionFileEntry;
source: MemorySource;
build: (chunk: MemoryChunk) => Omit<T, "custom_id">;
}): { requests: T[]; mapping: Map<string, { index: number; hash: string }> } {
const requests: T[] = [];
const mapping = new Map<string, { index: number; hash: string }>();
for (const item of params.missing) {
const chunk = item.chunk;
const customId = this.buildBatchCustomId({
source: params.source,
entry: params.entry,
chunk,
index: item.index,
});
mapping.set(customId, { index: item.index, hash: chunk.hash });
const built = params.build(chunk);
requests.push({ custom_id: customId, ...built } as T);
}
return { requests, mapping };
}
private applyBatchEmbeddings(params: {
byCustomId: Map<string, number[]>;
mapping: Map<string, { index: number; hash: string }>;
embeddings: number[][];
}): void {
const toCache: Array<{ hash: string; embedding: number[] }> = [];
for (const [customId, embedding] of params.byCustomId.entries()) {
const mapped = params.mapping.get(customId);
if (!mapped) {
continue;
}
params.embeddings[mapped.index] = embedding;
toCache.push({ hash: mapped.hash, embedding });
}
this.upsertEmbeddingCache(toCache);
}
private buildEmbeddingBatchRunnerOptions<TRequest>(params: {
requests: TRequest[];
chunks: MemoryChunk[];
source: MemorySource;
}): {
agentId: string;
requests: TRequest[];
wait: boolean;
concurrency: number;
pollIntervalMs: number;
timeoutMs: number;
debug: (message: string, data?: Record<string, unknown>) => void;
} {
const { requests, chunks, source } = params;
return {
agentId: this.agentId,
requests,
wait: this.batch.wait,
concurrency: this.batch.concurrency,
pollIntervalMs: this.batch.pollIntervalMs,
timeoutMs: this.batch.timeoutMs,
debug: (message, data) =>
log.debug(
message,
data ? { ...data, source, chunks: chunks.length } : { source, chunks: chunks.length },
),
};
}
private async embedChunksWithProviderBatch<TRequest extends { custom_id: string }>(params: {
chunks: MemoryChunk[];
entry: MemoryFileEntry | SessionFileEntry;
source: MemorySource;
provider: "voyage" | "openai" | "gemini";
enabled: boolean;
buildRequest: (chunk: MemoryChunk) => Omit<TRequest, "custom_id">;
runBatch: (runnerOptions: {
agentId: string;
requests: TRequest[];
wait: boolean;
concurrency: number;
pollIntervalMs: number;
timeoutMs: number;
debug: (message: string, data?: Record<string, unknown>) => void;
}) => Promise<Map<string, number[]> | number[][]>;
}): Promise<number[][]> {
if (!params.enabled) {
return this.embedChunksInBatches(params.chunks);
}
if (params.chunks.length === 0) {
return [];
}
const { embeddings, missing } = this.collectCachedEmbeddings(params.chunks);
if (missing.length === 0) {
return embeddings;
}
const { requests, mapping } = this.buildBatchRequests<TRequest>({
missing,
entry: params.entry,
source: params.source,
build: params.buildRequest,
});
const runnerOptions = this.buildEmbeddingBatchRunnerOptions({
requests,
chunks: params.chunks,
source: params.source,
});
const batchResult = await this.runBatchWithFallback({
provider: params.provider,
run: async () => await params.runBatch(runnerOptions),
fallback: async () => await this.embedChunksInBatches(params.chunks),
});
if (Array.isArray(batchResult)) {
return batchResult;
}
this.applyBatchEmbeddings({ byCustomId: batchResult, mapping, embeddings });
return embeddings;
}
private async embedChunksWithVoyageBatch(
chunks: MemoryChunk[],
entry: MemoryFileEntry | SessionFileEntry,
source: MemorySource,
): Promise<number[][]> {
const voyage = this.voyage;
return await this.embedChunksWithProviderBatch<VoyageBatchRequest>({
chunks,
entry,
source,
provider: "voyage",
enabled: Boolean(voyage),
buildRequest: (chunk) => ({
body: { input: chunk.text },
}),
runBatch: async (runnerOptions) =>
await runVoyageEmbeddingBatches({
client: voyage!,
...runnerOptions,
}),
});
}
private async embedChunksWithOpenAiBatch(
chunks: MemoryChunk[],
entry: MemoryFileEntry | SessionFileEntry,
source: MemorySource,
): Promise<number[][]> {
const openAi = this.openAi;
return await this.embedChunksWithProviderBatch<OpenAiBatchRequest>({
chunks,
entry,
source,
provider: "openai",
enabled: Boolean(openAi),
buildRequest: (chunk) => ({
method: "POST",
url: OPENAI_BATCH_ENDPOINT,
body: {
model: openAi?.model ?? this.provider?.model ?? "text-embedding-3-small",
input: chunk.text,
},
}),
runBatch: async (runnerOptions) =>
await runOpenAiEmbeddingBatches({
openAi: openAi!,
...runnerOptions,
}),
});
}
private async embedChunksWithGeminiBatch(
chunks: MemoryChunk[],
entry: MemoryFileEntry | SessionFileEntry,
source: MemorySource,
): Promise<number[][]> {
const gemini = this.gemini;
if (chunks.some((chunk) => hasNonTextEmbeddingParts(chunk.embeddingInput))) {
return await this.embedChunksInBatches(chunks);
}
return await this.embedChunksWithProviderBatch<GeminiBatchRequest>({
chunks,
entry,
source,
provider: "gemini",
enabled: Boolean(gemini),
buildRequest: (chunk) => ({
request: buildGeminiEmbeddingRequest({
input: chunk.embeddingInput ?? { text: chunk.text },
taskType: "RETRIEVAL_DOCUMENT",
modelPath: this.gemini?.modelPath,
outputDimensionality: this.gemini?.outputDimensionality,
}),
}),
runBatch: async (runnerOptions) =>
await runGeminiEmbeddingBatches({
gemini: gemini!,
...runnerOptions,
}),
});
}
protected async embedBatchWithRetry(texts: string[]): Promise<number[][]> {
if (texts.length === 0) {
return [];

View File

@@ -1,5 +1,5 @@
import type { DatabaseSync } from "node:sqlite";
import { cosineSimilarity, parseEmbedding, truncateUtf16Safe } from "../api.js";
import { cosineSimilarity, parseEmbedding, truncateUtf16Safe } from "../engine-host-api.js";
const vectorToBlob = (embedding: number[]): Buffer =>
Buffer.from(new Float32Array(embedding).buffer);

View File

@@ -5,15 +5,8 @@ import path from "node:path";
import type { DatabaseSync } from "node:sqlite";
import chokidar, { FSWatcher } from "chokidar";
import {
DEFAULT_GEMINI_EMBEDDING_MODEL,
DEFAULT_MISTRAL_EMBEDDING_MODEL,
DEFAULT_OLLAMA_EMBEDDING_MODEL,
DEFAULT_OPENAI_EMBEDDING_MODEL,
DEFAULT_VOYAGE_EMBEDDING_MODEL,
buildCaseInsensitiveExtensionGlob,
buildFileEntry,
classifyMemoryMultimodalPath,
createEmbeddingProvider,
createSubsystemLogger,
ensureDir,
ensureMemoryIndexSchema,
@@ -22,10 +15,8 @@ import {
isFileMissingError,
listMemoryFiles,
listSessionFilesForAgent,
loadSqliteVecExtension,
normalizeExtraMemoryPaths,
onSessionTranscriptUpdate,
requireNodeSqlite,
resolveAgentDir,
resolveSessionTranscriptsDirForAgent,
resolveUserPath,
@@ -37,14 +28,22 @@ import {
type OpenClawConfig,
type ResolvedMemorySearchConfig,
type SessionFileEntry,
type EmbeddingProvider,
type GeminiEmbeddingClient,
type MistralEmbeddingClient,
type OllamaEmbeddingClient,
type OpenAiEmbeddingClient,
type VoyageEmbeddingClient,
buildSessionEntry,
} from "../api.js";
} from "../engine-host-api.js";
import {
createEmbeddingProvider,
DEFAULT_GEMINI_EMBEDDING_MODEL,
DEFAULT_MISTRAL_EMBEDDING_MODEL,
DEFAULT_OLLAMA_EMBEDDING_MODEL,
DEFAULT_OPENAI_EMBEDDING_MODEL,
DEFAULT_VOYAGE_EMBEDDING_MODEL,
type EmbeddingProvider,
type EmbeddingProviderId,
type EmbeddingProviderRuntime,
} from "./embeddings.js";
import { buildFileEntry } from "./internal.js";
import { loadSqliteVecExtension } from "./sqlite-vec.js";
import { requireNodeSqlite } from "./sqlite.js";
type MemoryIndexMeta = {
model: string;
@@ -101,12 +100,8 @@ export abstract class MemoryManagerSyncOps {
protected abstract readonly workspaceDir: string;
protected abstract readonly settings: ResolvedMemorySearchConfig;
protected provider: EmbeddingProvider | null = null;
protected fallbackFrom?: "openai" | "local" | "gemini" | "voyage" | "mistral" | "ollama";
protected openAi?: OpenAiEmbeddingClient;
protected gemini?: GeminiEmbeddingClient;
protected voyage?: VoyageEmbeddingClient;
protected mistral?: MistralEmbeddingClient;
protected ollama?: OllamaEmbeddingClient;
protected fallbackFrom?: EmbeddingProviderId;
protected providerRuntime?: EmbeddingProviderRuntime;
protected abstract batch: {
enabled: boolean;
wait: boolean;
@@ -1104,13 +1099,7 @@ export abstract class MemoryManagerSyncOps {
timeoutMs: number;
} {
const batch = this.settings.remote?.batch;
const enabled = Boolean(
batch?.enabled &&
this.provider &&
((this.openAi && this.provider.id === "openai") ||
(this.gemini && this.provider.id === "gemini") ||
(this.voyage && this.provider.id === "voyage")),
);
const enabled = Boolean(batch?.enabled && this.provider && this.providerRuntime?.batchEmbed);
return {
enabled,
wait: batch?.wait ?? true,
@@ -1128,13 +1117,7 @@ export abstract class MemoryManagerSyncOps {
if (this.fallbackFrom) {
return false;
}
const fallbackFrom = this.provider.id as
| "openai"
| "gemini"
| "local"
| "voyage"
| "mistral"
| "ollama";
const fallbackFrom = this.provider.id as EmbeddingProviderId;
const fallbackModel =
fallback === "gemini"
@@ -1163,11 +1146,7 @@ export abstract class MemoryManagerSyncOps {
this.fallbackFrom = fallbackFrom;
this.fallbackReason = reason;
this.provider = fallbackResult.provider;
this.openAi = fallbackResult.openAi;
this.gemini = fallbackResult.gemini;
this.voyage = fallbackResult.voyage;
this.mistral = fallbackResult.mistral;
this.ollama = fallbackResult.ollama;
this.providerRuntime = fallbackResult.runtime;
this.providerKey = this.computeProviderKey();
this.batch = this.resolveBatchConfig();
log.warn(`memory embeddings: switched to fallback provider (${fallback})`, { reason });

View File

@@ -2,9 +2,9 @@ import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { MemoryIndexManager } from "../../extensions/memory-core/src/memory/index.js";
import { closeAllMemorySearchManagers } from "../../extensions/memory-core/src/memory/index.js";
import type { OpenClawConfig } from "../config/config.js";
import type { OpenClawConfig } from "../engine-host-api.js";
import type { MemoryIndexManager } from "./index.js";
import { closeAllMemorySearchManagers } from "./index.js";
import { createOpenAIEmbeddingProviderMock } from "./test-embeddings-mock.js";
import { createMemoryManagerOrThrow } from "./test-manager.js";

View File

@@ -2,14 +2,14 @@ import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
import type { MemoryIndexManager } from "../../extensions/memory-core/src/memory/index.js";
import type { OpenClawConfig } from "../config/config.js";
import type { OpenClawConfig } from "../engine-host-api.js";
import type { MemoryIndexManager } from "./index.js";
let shouldFail = false;
type EmbeddingTestMocksModule = typeof import("./embedding.test-mocks.js");
type TestManagerHelpersModule = typeof import("./test-manager-helpers.js");
type MemoryIndexModule = typeof import("../../extensions/memory-core/src/memory/index.js");
type MemoryIndexModule = typeof import("./index.js");
describe("memory manager atomic reindex", () => {
let fixtureRoot = "";
@@ -32,8 +32,7 @@ describe("memory manager atomic reindex", () => {
embedBatch = embeddingMocks.getEmbedBatchMock();
resetEmbeddingMocks = embeddingMocks.resetEmbeddingMocks;
({ getRequiredMemoryIndexManager } = await import("./test-manager-helpers.js"));
({ closeAllMemorySearchManagers } =
await import("../../extensions/memory-core/src/memory/index.js"));
({ closeAllMemorySearchManagers } = await import("./index.js"));
vi.stubEnv("OPENCLAW_TEST_MEMORY_UNSAFE_REINDEX", "0");
resetEmbeddingMocks();
shouldFail = false;

View File

@@ -2,14 +2,13 @@ import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
import { useFastShortTimeouts } from "../../test/helpers/fast-short-timeouts.js";
import type { OpenClawConfig } from "../config/config.js";
import { useFastShortTimeouts } from "../../../../test/helpers/fast-short-timeouts.js";
import type { OpenClawConfig } from "../engine-host-api.js";
import { createOpenAIEmbeddingProviderMock } from "./test-embeddings-mock.js";
import { mockPublicPinnedHostname } from "./test-helpers/ssrf.js";
type MemoryIndexManager =
import("../../extensions/memory-core/src/memory/index.js").MemoryIndexManager;
type MemoryIndexModule = typeof import("../../extensions/memory-core/src/memory/index.js");
type MemoryIndexManager = import("./index.js").MemoryIndexManager;
type MemoryIndexModule = typeof import("./index.js");
const embedBatch = vi.fn(async (_texts: string[]) => [] as number[][]);
const embedQuery = vi.fn(async () => [0.5, 0.5, 0.5]);
@@ -122,7 +121,7 @@ describe("memory indexing with OpenAI batches", () => {
}),
}));
await import("./test-runtime-mocks.js");
({ getMemorySearchManager } = await import("../../extensions/memory-core/src/memory/index.js"));
({ getMemorySearchManager } = await import("./index.js"));
fixtureRoot = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-mem-batch-"));
workspaceDir = path.join(fixtureRoot, "workspace");

View File

@@ -1,7 +1,7 @@
import fs from "node:fs/promises";
import path from "node:path";
import { describe, expect, it } from "vitest";
import { useFastShortTimeouts } from "../../test/helpers/fast-short-timeouts.js";
import { useFastShortTimeouts } from "../../../../test/helpers/fast-short-timeouts.js";
import { installEmbeddingManagerFixture } from "./embedding-manager.test-harness.js";
const fx = installEmbeddingManagerFixture({

View File

@@ -3,12 +3,12 @@ import os from "node:os";
import path from "node:path";
import { setTimeout as sleep } from "node:timers/promises";
import { afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
import type { MemoryIndexManager } from "../../extensions/memory-core/src/memory/index.js";
import type { OpenClawConfig } from "../engine-host-api.js";
import "./test-runtime-mocks.js";
import type { OpenClawConfig } from "../config/config.js";
import type { MemoryIndexManager } from "./index.js";
type MemoryIndexModule = typeof import("../../extensions/memory-core/src/memory/index.js");
type ManagerModule = typeof import("../../extensions/memory-core/src/memory/manager.js");
type MemoryIndexModule = typeof import("./index.js");
type ManagerModule = typeof import("./manager.js");
const hoisted = vi.hoisted(() => ({
providerCreateCalls: 0,
@@ -43,10 +43,9 @@ describe("memory manager cache hydration", () => {
let workspaceDir = "";
beforeAll(async () => {
({ getMemorySearchManager, closeAllMemorySearchManagers } =
await import("../../extensions/memory-core/src/memory/index.js"));
({ getMemorySearchManager, closeAllMemorySearchManagers } = await import("./index.js"));
({ closeAllMemoryIndexManagers, MemoryIndexManager: RawMemoryIndexManager } =
await import("../../extensions/memory-core/src/memory/manager.js"));
await import("./manager.js"));
});
beforeEach(async () => {

View File

@@ -2,31 +2,32 @@ import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { MemoryIndexManager } from "../../extensions/memory-core/src/memory/index.js";
import type { OpenClawConfig } from "../config/config.js";
import type { OpenClawConfig } from "../engine-host-api.js";
import { DEFAULT_OLLAMA_EMBEDDING_MODEL } from "./embeddings-ollama.js";
import type {
EmbeddingProvider,
EmbeddingProviderRuntime,
EmbeddingProviderResult,
MistralEmbeddingClient,
OllamaEmbeddingClient,
OpenAiEmbeddingClient,
} from "./embeddings.js";
import type { MemoryIndexManager } from "./index.js";
type MemoryIndexModule = typeof import("./index.js");
const { createEmbeddingProviderMock } = vi.hoisted(() => ({
createEmbeddingProviderMock: vi.fn(),
}));
vi.mock("./embeddings.js", () => ({
createEmbeddingProvider: createEmbeddingProviderMock,
}));
vi.mock("./embeddings.js", async (importOriginal) => {
const actual = await importOriginal<typeof import("./embeddings.js")>();
return {
...actual,
createEmbeddingProvider: createEmbeddingProviderMock,
};
});
vi.mock("./sqlite-vec.js", () => ({
loadSqliteVecExtension: async () => ({ ok: false, error: "sqlite-vec disabled in tests" }),
}));
type MemoryIndexModule = typeof import("../../extensions/memory-core/src/memory/index.js");
let getMemorySearchManager: MemoryIndexModule["getMemorySearchManager"];
let closeAllMemorySearchManagers: MemoryIndexModule["closeAllMemorySearchManagers"];
@@ -78,8 +79,7 @@ describe("memory manager mistral provider wiring", () => {
beforeEach(async () => {
vi.resetModules();
({ getMemorySearchManager, closeAllMemorySearchManagers } =
await import("../../extensions/memory-core/src/memory/index.js"));
({ getMemorySearchManager, closeAllMemorySearchManagers } = await import("./index.js"));
vi.clearAllMocks();
createEmbeddingProviderMock.mockReset();
workspaceDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-memory-mistral-"));
@@ -102,15 +102,14 @@ describe("memory manager mistral provider wiring", () => {
});
it("stores mistral client when mistral provider is selected", async () => {
const mistralClient: MistralEmbeddingClient = {
baseUrl: "https://api.mistral.ai/v1",
headers: { authorization: "Bearer test-key" },
model: "mistral-embed",
const mistralRuntime: EmbeddingProviderRuntime = {
id: "mistral",
cacheKeyData: { provider: "mistral", model: "mistral-embed" },
};
const providerResult: EmbeddingProviderResult = {
requestedProvider: "mistral",
provider: createProvider("mistral"),
mistral: mistralClient,
runtime: mistralRuntime,
};
createEmbeddingProviderMock.mockResolvedValueOnce(providerResult);
@@ -124,32 +123,30 @@ describe("memory manager mistral provider wiring", () => {
const internal = manager as unknown as {
ensureProviderInitialized: () => Promise<void>;
mistral?: MistralEmbeddingClient;
providerRuntime?: EmbeddingProviderRuntime;
};
await internal.ensureProviderInitialized();
expect(internal.mistral).toBe(mistralClient);
expect(internal.providerRuntime).toBe(mistralRuntime);
});
it("stores mistral client after fallback activation", async () => {
const openAiClient: OpenAiEmbeddingClient = {
baseUrl: "https://api.openai.com/v1",
headers: { authorization: "Bearer openai-key" },
model: "text-embedding-3-small",
const openAiRuntime: EmbeddingProviderRuntime = {
id: "openai",
cacheKeyData: { provider: "openai", model: "text-embedding-3-small" },
};
const mistralClient: MistralEmbeddingClient = {
baseUrl: "https://api.mistral.ai/v1",
headers: { authorization: "Bearer mistral-key" },
model: "mistral-embed",
const mistralRuntime: EmbeddingProviderRuntime = {
id: "mistral",
cacheKeyData: { provider: "mistral", model: "mistral-embed" },
};
createEmbeddingProviderMock.mockResolvedValueOnce({
requestedProvider: "openai",
provider: createProvider("openai"),
openAi: openAiClient,
runtime: openAiRuntime,
} as EmbeddingProviderResult);
createEmbeddingProviderMock.mockResolvedValueOnce({
requestedProvider: "mistral",
provider: createProvider("mistral"),
mistral: mistralClient,
runtime: mistralRuntime,
} as EmbeddingProviderResult);
const cfg = buildConfig({ workspaceDir, indexPath, provider: "openai", fallback: "mistral" });
@@ -162,38 +159,34 @@ describe("memory manager mistral provider wiring", () => {
const internal = manager as unknown as {
ensureProviderInitialized: () => Promise<void>;
activateFallbackProvider: (reason: string) => Promise<boolean>;
openAi?: OpenAiEmbeddingClient;
mistral?: MistralEmbeddingClient;
providerRuntime?: EmbeddingProviderRuntime;
};
await internal.ensureProviderInitialized();
expect(internal.providerRuntime?.id).toBe("openai");
const activated = await internal.activateFallbackProvider("forced test");
expect(activated).toBe(true);
expect(internal.openAi).toBeUndefined();
expect(internal.mistral).toBe(mistralClient);
expect(internal.providerRuntime).toBe(mistralRuntime);
});
it("uses default ollama model when activating ollama fallback", async () => {
const openAiClient: OpenAiEmbeddingClient = {
baseUrl: "https://api.openai.com/v1",
headers: { authorization: "Bearer openai-key" },
model: "text-embedding-3-small",
const openAiRuntime: EmbeddingProviderRuntime = {
id: "openai",
cacheKeyData: { provider: "openai", model: "text-embedding-3-small" },
};
const ollamaClient: OllamaEmbeddingClient = {
baseUrl: "http://127.0.0.1:11434",
headers: {},
model: DEFAULT_OLLAMA_EMBEDDING_MODEL,
embedBatch: async (texts: string[]) => texts.map(() => [0.1, 0.2, 0.3]),
const ollamaRuntime: EmbeddingProviderRuntime = {
id: "ollama",
cacheKeyData: { provider: "ollama", model: DEFAULT_OLLAMA_EMBEDDING_MODEL },
};
createEmbeddingProviderMock.mockResolvedValueOnce({
requestedProvider: "openai",
provider: createProvider("openai"),
openAi: openAiClient,
runtime: openAiRuntime,
} as EmbeddingProviderResult);
createEmbeddingProviderMock.mockResolvedValueOnce({
requestedProvider: "ollama",
provider: createProvider("ollama"),
ollama: ollamaClient,
runtime: ollamaRuntime,
} as EmbeddingProviderResult);
const cfg = buildConfig({ workspaceDir, indexPath, provider: "openai", fallback: "ollama" });
@@ -206,15 +199,14 @@ describe("memory manager mistral provider wiring", () => {
const internal = manager as unknown as {
ensureProviderInitialized: () => Promise<void>;
activateFallbackProvider: (reason: string) => Promise<boolean>;
openAi?: OpenAiEmbeddingClient;
ollama?: OllamaEmbeddingClient;
providerRuntime?: EmbeddingProviderRuntime;
};
await internal.ensureProviderInitialized();
expect(internal.providerRuntime?.id).toBe("openai");
const activated = await internal.activateFallbackProvider("forced ollama fallback");
expect(activated).toBe(true);
expect(internal.openAi).toBeUndefined();
expect(internal.ollama).toBe(ollamaClient);
expect(internal.providerRuntime).toBe(ollamaRuntime);
const fallbackCall = createEmbeddingProviderMock.mock.calls[1]?.[0] as
| { provider?: string; model?: string }

View File

@@ -2,9 +2,9 @@ import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterAll, afterEach, beforeAll, describe, expect, it, vi } from "vitest";
import type { MemoryIndexManager } from "../../extensions/memory-core/src/memory/index.js";
import type { OpenClawConfig } from "../config/config.js";
import type { OpenClawConfig } from "../engine-host-api.js";
import { resetEmbeddingMocks } from "./embedding.test-mocks.js";
import type { MemoryIndexManager } from "./index.js";
import { getRequiredMemoryIndexManager } from "./test-manager-helpers.js";
function createMemorySearchCfg(options: {

View File

@@ -3,9 +3,9 @@ import os from "node:os";
import path from "node:path";
import type { DatabaseSync } from "node:sqlite";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { MemoryIndexManager } from "../../extensions/memory-core/src/memory/manager.js";
import type { OpenClawConfig } from "../config/config.js";
import type { OpenClawConfig } from "../engine-host-api.js";
import { resetEmbeddingMocks } from "./embedding.test-mocks.js";
import { MemoryIndexManager } from "./manager.js";
import { getRequiredMemoryIndexManager } from "./test-manager-helpers.js";
type ReadonlyRecoveryHarness = {

View File

@@ -1,5 +1,5 @@
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { runDetachedMemorySync } from "../../extensions/memory-core/src/memory/manager-sync-ops.js";
import { runDetachedMemorySync } from "./manager-sync-ops.js";
describe("memory manager sync failures", () => {
beforeEach(() => {

View File

@@ -1,15 +1,6 @@
import type { DatabaseSync } from "node:sqlite";
import { type FSWatcher } from "chokidar";
import {
createEmbeddingProvider,
type EmbeddingProvider,
type EmbeddingProviderRequest,
type EmbeddingProviderResult,
type GeminiEmbeddingClient,
type MistralEmbeddingClient,
type OllamaEmbeddingClient,
type OpenAiEmbeddingClient,
type VoyageEmbeddingClient,
extractKeywords,
readMemoryFile,
resolveAgentDir,
@@ -25,7 +16,15 @@ import {
type OpenClawConfig,
type ResolvedMemorySearchConfig,
createSubsystemLogger,
} from "../api.js";
} from "../engine-host-api.js";
import {
createEmbeddingProvider,
type EmbeddingProvider,
type EmbeddingProviderId,
type EmbeddingProviderRequest,
type EmbeddingProviderResult,
type EmbeddingProviderRuntime,
} from "./embeddings.js";
import { bm25RankToScore, buildFtsQuery, mergeHybridResults } from "./hybrid.js";
import { MemoryManagerEmbeddingOps } from "./manager-embedding-ops.js";
import { searchKeyword, searchVector } from "./manager-search.js";
@@ -83,14 +82,10 @@ export class MemoryIndexManager extends MemoryManagerEmbeddingOps implements Mem
private readonly requestedProvider: EmbeddingProviderRequest;
private providerInitPromise: Promise<void> | null = null;
private providerInitialized = false;
protected fallbackFrom?: "openai" | "local" | "gemini" | "voyage" | "mistral" | "ollama";
protected fallbackFrom?: EmbeddingProviderId;
protected fallbackReason?: string;
private providerUnavailableReason?: string;
protected openAi?: OpenAiEmbeddingClient;
protected gemini?: GeminiEmbeddingClient;
protected voyage?: VoyageEmbeddingClient;
protected mistral?: MistralEmbeddingClient;
protected ollama?: OllamaEmbeddingClient;
protected providerRuntime?: EmbeddingProviderRuntime;
protected batch: {
enabled: boolean;
wait: boolean;
@@ -270,11 +265,7 @@ export class MemoryIndexManager extends MemoryManagerEmbeddingOps implements Mem
this.fallbackFrom = providerResult.fallbackFrom;
this.fallbackReason = providerResult.fallbackReason;
this.providerUnavailableReason = providerResult.providerUnavailableReason;
this.openAi = providerResult.openAi;
this.gemini = providerResult.gemini;
this.voyage = providerResult.voyage;
this.mistral = providerResult.mistral;
this.ollama = providerResult.ollama;
this.providerRuntime = providerResult.runtime;
this.providerInitialized = true;
}

View File

@@ -2,8 +2,8 @@ import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { MemoryIndexManager } from "../../extensions/memory-core/src/memory/index.js";
import type { OpenClawConfig } from "../config/config.js";
import type { OpenClawConfig } from "../engine-host-api.js";
import type { MemoryIndexManager } from "./index.js";
vi.mock("./embeddings.js", () => {
return {
@@ -21,7 +21,7 @@ vi.mock("./embeddings.js", () => {
type MemoryInternalModule = typeof import("./internal.js");
type TestManagerModule = typeof import("./test-manager.js");
type MemoryIndexModule = typeof import("../../extensions/memory-core/src/memory/index.js");
type MemoryIndexModule = typeof import("./index.js");
let buildFileEntry: MemoryInternalModule["buildFileEntry"];
let createMemoryManagerOrThrow: TestManagerModule["createMemoryManagerOrThrow"];
@@ -57,8 +57,7 @@ describe("memory vector dedupe", () => {
vi.resetModules();
({ buildFileEntry } = await import("./internal.js"));
({ createMemoryManagerOrThrow } = await import("./test-manager.js"));
({ closeAllMemorySearchManagers } =
await import("../../extensions/memory-core/src/memory/index.js"));
({ closeAllMemorySearchManagers } = await import("./index.js"));
workspaceDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-mem-"));
indexPath = path.join(workspaceDir, "index.sqlite");
await seedMemoryWorkspace(workspaceDir);

View File

@@ -2,9 +2,9 @@ import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import type { MemoryIndexManager } from "../../extensions/memory-core/src/memory/index.js";
import type { OpenClawConfig } from "../config/config.js";
import type { MemorySearchConfig } from "../config/types.tools.js";
import type { OpenClawConfig } from "../engine-host-api.js";
import type { MemorySearchConfig } from "../engine-host-api.js";
import type { MemoryIndexManager } from "./index.js";
const { watchMock } = vi.hoisted(() => ({
watchMock: vi.fn(() => ({
@@ -34,7 +34,7 @@ vi.mock("./embeddings.js", () => ({
}),
}));
type MemoryIndexModule = typeof import("../../extensions/memory-core/src/memory/index.js");
type MemoryIndexModule = typeof import("./index.js");
let getMemorySearchManager: MemoryIndexModule["getMemorySearchManager"];
let closeAllMemorySearchManagers: MemoryIndexModule["closeAllMemorySearchManagers"];
@@ -46,8 +46,7 @@ describe("memory watcher config", () => {
beforeEach(async () => {
vi.resetModules();
({ getMemorySearchManager, closeAllMemorySearchManagers } =
await import("../../extensions/memory-core/src/memory/index.js"));
({ getMemorySearchManager, closeAllMemorySearchManagers } = await import("./index.js"));
vi.clearAllMocks();
});

View File

@@ -8,7 +8,7 @@ import {
applyMMRToHybridResults,
DEFAULT_MMR_CONFIG,
type MMRItem,
} from "../../extensions/memory-core/src/memory/mmr.js";
} from "./mmr.js";
describe("tokenize", () => {
it("normalizes, filters, and deduplicates token sets", () => {

View File

@@ -0,0 +1,359 @@
import fsSync from "node:fs";
import {
DEFAULT_GEMINI_EMBEDDING_MODEL,
DEFAULT_LOCAL_MODEL,
DEFAULT_MISTRAL_EMBEDDING_MODEL,
DEFAULT_OLLAMA_EMBEDDING_MODEL,
DEFAULT_OPENAI_EMBEDDING_MODEL,
DEFAULT_VOYAGE_EMBEDDING_MODEL,
OPENAI_BATCH_ENDPOINT,
buildGeminiEmbeddingRequest,
createGeminiEmbeddingProvider,
createLocalEmbeddingProvider,
createMistralEmbeddingProvider,
createOllamaEmbeddingProvider,
createOpenAiEmbeddingProvider,
createVoyageEmbeddingProvider,
hasNonTextEmbeddingParts,
listMemoryEmbeddingProviders,
resolveUserPath,
runGeminiEmbeddingBatches,
runOpenAiEmbeddingBatches,
runVoyageEmbeddingBatches,
type MemoryEmbeddingProviderAdapter,
} from "../engine-host-api.js";
function formatErrorMessage(err: unknown): string {
return err instanceof Error ? err.message : String(err);
}
function isMissingApiKeyError(err: unknown): boolean {
return formatErrorMessage(err).includes("No API key found for provider");
}
function sanitizeHeaders(
headers: Record<string, string>,
excludedHeaderNames: string[],
): Array<[string, string]> {
const excluded = new Set(excludedHeaderNames.map((name) => name.toLowerCase()));
return Object.entries(headers)
.filter(([key]) => !excluded.has(key.toLowerCase()))
.toSorted(([a], [b]) => a.localeCompare(b))
.map(([key, value]) => [key, value]);
}
function mapBatchEmbeddingsByIndex(byCustomId: Map<string, number[]>, count: number): number[][] {
const embeddings: number[][] = [];
for (let index = 0; index < count; index += 1) {
embeddings.push(byCustomId.get(String(index)) ?? []);
}
return embeddings;
}
function isNodeLlamaCppMissing(err: unknown): boolean {
if (!(err instanceof Error)) {
return false;
}
const code = (err as Error & { code?: unknown }).code;
return code === "ERR_MODULE_NOT_FOUND" && err.message.includes("node-llama-cpp");
}
function formatLocalSetupError(err: unknown): string {
const detail = formatErrorMessage(err);
const missing = isNodeLlamaCppMissing(err);
return [
"Local embeddings unavailable.",
missing
? "Reason: optional dependency node-llama-cpp is missing (or failed to install)."
: detail
? `Reason: ${detail}`
: undefined,
missing && detail ? `Detail: ${detail}` : null,
"To enable local embeddings:",
"1) Use Node 24 (recommended for installs/updates; Node 22 LTS, currently 22.14+, remains supported)",
missing
? "2) Reinstall OpenClaw (this should install node-llama-cpp): npm i -g openclaw@latest"
: null,
"3) If you use pnpm: pnpm approve-builds (select node-llama-cpp), then pnpm rebuild node-llama-cpp",
...["openai", "gemini", "voyage", "mistral"].map(
(provider) => `Or set agents.defaults.memorySearch.provider = "${provider}" (remote).`,
),
]
.filter(Boolean)
.join("\n");
}
function canAutoSelectLocal(modelPath?: string): boolean {
const trimmed = modelPath?.trim();
if (!trimmed) {
return false;
}
if (/^(hf:|https?:)/i.test(trimmed)) {
return false;
}
const resolved = resolveUserPath(trimmed);
try {
return fsSync.statSync(resolved).isFile();
} catch {
return false;
}
}
function supportsGeminiMultimodalEmbeddings(model: string): boolean {
const normalized = model
.trim()
.replace(/^models\//, "")
.replace(/^(gemini|google)\//, "");
return normalized === "gemini-embedding-2-preview";
}
const openAiAdapter: MemoryEmbeddingProviderAdapter = {
id: "openai",
defaultModel: DEFAULT_OPENAI_EMBEDDING_MODEL,
transport: "remote",
autoSelectPriority: 20,
allowExplicitWhenConfiguredAuto: true,
shouldContinueAutoSelection: isMissingApiKeyError,
create: async (options) => {
const { provider, client } = await createOpenAiEmbeddingProvider({
...options,
provider: "openai",
fallback: "none",
});
return {
provider,
runtime: {
id: "openai",
cacheKeyData: {
provider: "openai",
baseUrl: client.baseUrl,
model: client.model,
headers: sanitizeHeaders(client.headers, ["authorization"]),
},
batchEmbed: async (batch) => {
const byCustomId = await runOpenAiEmbeddingBatches({
openAi: client,
agentId: batch.agentId,
requests: batch.chunks.map((chunk, index) => ({
custom_id: String(index),
method: "POST",
url: OPENAI_BATCH_ENDPOINT,
body: {
model: client.model,
input: chunk.text,
},
})),
wait: batch.wait,
concurrency: batch.concurrency,
pollIntervalMs: batch.pollIntervalMs,
timeoutMs: batch.timeoutMs,
debug: batch.debug,
});
return mapBatchEmbeddingsByIndex(byCustomId, batch.chunks.length);
},
},
};
},
};
const geminiAdapter: MemoryEmbeddingProviderAdapter = {
id: "gemini",
defaultModel: DEFAULT_GEMINI_EMBEDDING_MODEL,
transport: "remote",
autoSelectPriority: 30,
allowExplicitWhenConfiguredAuto: true,
supportsMultimodalEmbeddings: ({ model }) => supportsGeminiMultimodalEmbeddings(model),
shouldContinueAutoSelection: isMissingApiKeyError,
create: async (options) => {
const { provider, client } = await createGeminiEmbeddingProvider({
...options,
provider: "gemini",
fallback: "none",
});
return {
provider,
runtime: {
id: "gemini",
cacheKeyData: {
provider: "gemini",
baseUrl: client.baseUrl,
model: client.model,
outputDimensionality: client.outputDimensionality,
headers: sanitizeHeaders(client.headers, ["authorization", "x-goog-api-key"]),
},
batchEmbed: async (batch) => {
if (batch.chunks.some((chunk) => hasNonTextEmbeddingParts(chunk.embeddingInput))) {
return null;
}
const byCustomId = await runGeminiEmbeddingBatches({
gemini: client,
agentId: batch.agentId,
requests: batch.chunks.map((chunk, index) => ({
custom_id: String(index),
request: buildGeminiEmbeddingRequest({
input: chunk.embeddingInput ?? { text: chunk.text },
taskType: "RETRIEVAL_DOCUMENT",
modelPath: client.modelPath,
outputDimensionality: client.outputDimensionality,
}),
})),
wait: batch.wait,
concurrency: batch.concurrency,
pollIntervalMs: batch.pollIntervalMs,
timeoutMs: batch.timeoutMs,
debug: batch.debug,
});
return mapBatchEmbeddingsByIndex(byCustomId, batch.chunks.length);
},
},
};
},
};
const voyageAdapter: MemoryEmbeddingProviderAdapter = {
id: "voyage",
defaultModel: DEFAULT_VOYAGE_EMBEDDING_MODEL,
transport: "remote",
autoSelectPriority: 40,
allowExplicitWhenConfiguredAuto: true,
shouldContinueAutoSelection: isMissingApiKeyError,
create: async (options) => {
const { provider, client } = await createVoyageEmbeddingProvider({
...options,
provider: "voyage",
fallback: "none",
});
return {
provider,
runtime: {
id: "voyage",
batchEmbed: async (batch) => {
const byCustomId = await runVoyageEmbeddingBatches({
client,
agentId: batch.agentId,
requests: batch.chunks.map((chunk, index) => ({
custom_id: String(index),
body: {
input: chunk.text,
},
})),
wait: batch.wait,
concurrency: batch.concurrency,
pollIntervalMs: batch.pollIntervalMs,
timeoutMs: batch.timeoutMs,
debug: batch.debug,
});
return mapBatchEmbeddingsByIndex(byCustomId, batch.chunks.length);
},
},
};
},
};
const mistralAdapter: MemoryEmbeddingProviderAdapter = {
id: "mistral",
defaultModel: DEFAULT_MISTRAL_EMBEDDING_MODEL,
transport: "remote",
autoSelectPriority: 50,
allowExplicitWhenConfiguredAuto: true,
shouldContinueAutoSelection: isMissingApiKeyError,
create: async (options) => {
const { provider, client } = await createMistralEmbeddingProvider({
...options,
provider: "mistral",
fallback: "none",
});
return {
provider,
runtime: {
id: "mistral",
cacheKeyData: {
provider: "mistral",
model: client.model,
},
},
};
},
};
const ollamaAdapter: MemoryEmbeddingProviderAdapter = {
id: "ollama",
defaultModel: DEFAULT_OLLAMA_EMBEDDING_MODEL,
transport: "remote",
create: async (options) => {
const { provider, client } = await createOllamaEmbeddingProvider({
...options,
provider: "ollama",
fallback: "none",
});
return {
provider,
runtime: {
id: "ollama",
cacheKeyData: {
provider: "ollama",
model: client.model,
},
},
};
},
};
const localAdapter: MemoryEmbeddingProviderAdapter = {
id: "local",
defaultModel: DEFAULT_LOCAL_MODEL,
transport: "local",
autoSelectPriority: 10,
formatSetupError: formatLocalSetupError,
shouldContinueAutoSelection: () => true,
create: async (options) => {
const provider = await createLocalEmbeddingProvider({
...options,
provider: "local",
fallback: "none",
});
return {
provider,
runtime: {
id: "local",
cacheKeyData: {
provider: "local",
model: provider.model,
},
},
};
},
};
export const builtinMemoryEmbeddingProviderAdapters = [
localAdapter,
openAiAdapter,
geminiAdapter,
voyageAdapter,
mistralAdapter,
ollamaAdapter,
] as const;
export function registerBuiltInMemoryEmbeddingProviders(register: {
registerMemoryEmbeddingProvider: (adapter: MemoryEmbeddingProviderAdapter) => void;
}): void {
const existingIds = new Set(listMemoryEmbeddingProviders().map((adapter) => adapter.id));
for (const adapter of builtinMemoryEmbeddingProviderAdapters) {
if (existingIds.has(adapter.id)) {
continue;
}
register.registerMemoryEmbeddingProvider(adapter);
}
}
export {
DEFAULT_GEMINI_EMBEDDING_MODEL,
DEFAULT_LOCAL_MODEL,
DEFAULT_MISTRAL_EMBEDDING_MODEL,
DEFAULT_OLLAMA_EMBEDDING_MODEL,
DEFAULT_OPENAI_EMBEDDING_MODEL,
DEFAULT_VOYAGE_EMBEDDING_MODEL,
canAutoSelectLocal,
formatLocalSetupError,
isMissingApiKeyError,
};

View File

@@ -67,17 +67,23 @@ function isMcporterCommand(cmd: unknown): boolean {
return /(^|[\\/])mcporter(?:\.cmd)?$/i.test(cmd);
}
vi.mock("../logging/subsystem.js", () => ({
createSubsystemLogger: () => {
const logger = {
warn: logWarnMock,
debug: logDebugMock,
info: logInfoMock,
child: () => logger,
};
return logger;
},
}));
vi.mock("openclaw/plugin-sdk/memory-core-host-engine", async () => {
const actual = await vi.importActual<
typeof import("openclaw/plugin-sdk/memory-core-host-engine")
>("openclaw/plugin-sdk/memory-core-host-engine");
return {
...actual,
createSubsystemLogger: () => {
const logger = {
warn: logWarnMock,
debug: logDebugMock,
info: logInfoMock,
child: () => logger,
};
return logger;
},
};
});
vi.mock("node:child_process", async (importOriginal) => {
const actual = await importOriginal<typeof import("node:child_process")>();
@@ -88,10 +94,12 @@ vi.mock("node:child_process", async (importOriginal) => {
});
import { spawn as mockedSpawn } from "node:child_process";
import { QmdMemoryManager } from "../../extensions/memory-core/src/memory/qmd-manager.js";
import type { OpenClawConfig } from "../config/config.js";
import { resolveMemoryBackendConfig } from "./backend-config.js";
import { requireNodeSqlite } from "./sqlite.js";
import {
requireNodeSqlite,
resolveMemoryBackendConfig,
type OpenClawConfig,
} from "../engine-host-api.js";
import { QmdMemoryManager } from "./qmd-manager.js";
const spawnMock = mockedSpawn as unknown as Mock;
const originalPath = process.env.PATH;

View File

@@ -12,7 +12,6 @@ import {
isQmdScopeAllowed,
listSessionFilesForAgent,
parseQmdQueryJson,
requireNodeSqlite,
resolveAgentWorkspaceDir,
resolveCliSpawnInvocation,
resolveGlobalSingleton,
@@ -32,7 +31,8 @@ import {
type ResolvedQmdMcporterConfig,
type SessionFileEntry,
writeFileWithinRoot,
} from "../api.js";
} from "../engine-host-api.js";
import { requireNodeSqlite } from "./sqlite.js";
type SqliteDatabase = import("node:sqlite").DatabaseSync;

View File

@@ -1,5 +1,5 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import type { OpenClawConfig } from "../engine-host-api.js";
function createManagerStatus(params: {
backend: "qmd" | "builtin";
@@ -96,7 +96,7 @@ const fallbackSearch = fallbackManager.search;
const mockMemoryIndexGet = vi.hoisted(() => vi.fn(async () => fallbackManager));
const mockCloseAllMemoryIndexManagers = vi.hoisted(() => vi.fn(async () => {}));
vi.mock("../../extensions/memory-core/src/memory/qmd-manager.js", () => ({
vi.mock("./qmd-manager.js", () => ({
QmdMemoryManager: {
create: vi.fn(async () => mockPrimary),
},
@@ -109,11 +109,8 @@ vi.mock("../../extensions/memory-core/src/memory/manager-runtime.js", () => ({
closeAllMemoryIndexManagers: mockCloseAllMemoryIndexManagers,
}));
import { QmdMemoryManager } from "../../extensions/memory-core/src/memory/qmd-manager.js";
import {
closeAllMemorySearchManagers,
getMemorySearchManager,
} from "../../extensions/memory-core/src/memory/search-manager.js";
import { QmdMemoryManager } from "./qmd-manager.js";
import { closeAllMemorySearchManagers, getMemorySearchManager } from "./search-manager.js";
// eslint-disable-next-line @typescript-eslint/unbound-method -- mocked static function
const createQmdManagerMock = vi.mocked(QmdMemoryManager.create);

View File

@@ -7,7 +7,7 @@ import {
type MemorySyncProgressUpdate,
type OpenClawConfig,
type ResolvedQmdConfig,
} from "../api.js";
} from "../engine-host-api.js";
const MEMORY_SEARCH_MANAGER_CACHE_KEY = Symbol.for("openclaw.memorySearchManagerCache");
type MemorySearchManagerCacheStore = {

View File

@@ -0,0 +1 @@
export { loadSqliteVecExtension } from "../engine-host-api.js";

View File

@@ -0,0 +1 @@
export { requireNodeSqlite } from "../engine-host-api.js";

View File

@@ -2,12 +2,12 @@ import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, describe, expect, it } from "vitest";
import { mergeHybridResults } from "../../extensions/memory-core/src/memory/hybrid.js";
import { mergeHybridResults } from "./hybrid.js";
import {
applyTemporalDecayToHybridResults,
applyTemporalDecayToScore,
calculateTemporalDecayMultiplier,
} from "../../extensions/memory-core/src/memory/temporal-decay.js";
} from "./temporal-decay.js";
const DAY_MS = 24 * 60 * 60 * 1000;
const NOW_MS = Date.UTC(2026, 1, 10, 0, 0, 0);

View File

@@ -0,0 +1,64 @@
import {
OPENAI_BATCH_ENDPOINT,
runOpenAiEmbeddingBatches,
type MemoryChunk,
} from "../engine-host-api.js";
export function createOpenAIEmbeddingProviderMock(params: {
embedQuery: (input: string) => Promise<number[]>;
embedBatch: (input: string[]) => Promise<number[][]>;
}) {
const openAiClient = {
baseUrl: "https://api.openai.com/v1",
headers: { Authorization: "Bearer test", "Content-Type": "application/json" },
model: "text-embedding-3-small",
};
return {
requestedProvider: "openai",
provider: {
id: "openai",
model: "text-embedding-3-small",
embedQuery: params.embedQuery,
embedBatch: params.embedBatch,
},
runtime: {
id: "openai",
cacheKeyData: {
provider: "openai",
baseUrl: openAiClient.baseUrl,
model: openAiClient.model,
},
batchEmbed: async (options: {
agentId: string;
chunks: MemoryChunk[];
wait: boolean;
concurrency: number;
pollIntervalMs: number;
timeoutMs: number;
debug: (message: string, data?: Record<string, unknown>) => void;
}) => {
const byCustomId = await runOpenAiEmbeddingBatches({
openAi: openAiClient,
agentId: options.agentId,
requests: options.chunks.map((chunk: MemoryChunk, index: number) => ({
custom_id: String(index),
method: "POST",
url: OPENAI_BATCH_ENDPOINT,
body: {
model: openAiClient.model,
input: chunk.text,
},
})),
wait: options.wait,
concurrency: options.concurrency,
pollIntervalMs: options.pollIntervalMs,
timeoutMs: options.timeoutMs,
debug: options.debug,
});
return options.chunks.map(
(_: MemoryChunk, index: number) => byCustomId.get(String(index)) ?? [],
);
},
},
};
}

View File

@@ -0,0 +1,34 @@
import * as ssrf from "openclaw/plugin-sdk/ssrf-runtime";
import { vi } from "vitest";
export function mockPublicPinnedHostname() {
return vi.spyOn(ssrf, "resolvePinnedHostnameWithPolicy").mockImplementation(async (hostname) => {
const normalized = hostname.trim().toLowerCase().replace(/\.$/, "");
const addresses = ["93.184.216.34"];
const lookup = ((host: string, options?: unknown, callback?: unknown) => {
const cb =
typeof options === "function"
? (options as (err: NodeJS.ErrnoException | null, address: unknown) => void)
: (callback as (err: NodeJS.ErrnoException | null, address: unknown) => void);
if (!cb) {
return;
}
if (host.trim().toLowerCase().replace(/\.$/, "") !== normalized) {
cb(null, []);
return;
}
cb(
null,
addresses.map((address) => ({
address,
family: address.includes(":") ? 6 : 4,
})),
);
}) as never;
return {
hostname: normalized,
addresses,
lookup,
};
});
}

View File

@@ -1,5 +1,5 @@
import type { OpenClawConfig } from "../config/config.js";
import type { MemoryIndexManager } from "../plugin-sdk/memory-core.js";
import type { OpenClawConfig } from "../engine-host-api.js";
import type { MemoryIndexManager } from "./index.js";
export async function getRequiredMemoryIndexManager(params: {
cfg: OpenClawConfig;
@@ -7,7 +7,7 @@ export async function getRequiredMemoryIndexManager(params: {
purpose?: "default" | "status";
}): Promise<MemoryIndexManager> {
await import("./embedding.test-mocks.js");
const { getMemorySearchManager } = await import("../plugin-sdk/memory-core.js");
const { getMemorySearchManager } = await import("./index.js");
const result = await getMemorySearchManager({
cfg: params.cfg,
agentId: params.agentId ?? "main",

View File

@@ -1,6 +1,5 @@
import type { OpenClawConfig } from "../config/config.js";
import { getMemorySearchManager } from "../plugin-sdk/memory-core.js";
import type { MemoryIndexManager } from "../plugin-sdk/memory-core.js";
import type { OpenClawConfig } from "../engine-host-api.js";
import { getMemorySearchManager, type MemoryIndexManager } from "./index.js";
export async function createMemoryManagerOrThrow(
cfg: OpenClawConfig,

View File

@@ -424,6 +424,14 @@
"types": "./dist/plugin-sdk/memory-core-host.d.ts",
"default": "./dist/plugin-sdk/memory-core-host.js"
},
"./plugin-sdk/memory-core-host-engine": {
"types": "./dist/plugin-sdk/memory-core-host-engine.d.ts",
"default": "./dist/plugin-sdk/memory-core-host-engine.js"
},
"./plugin-sdk/memory-core-host-runtime": {
"types": "./dist/plugin-sdk/memory-core-host-runtime.d.ts",
"default": "./dist/plugin-sdk/memory-core-host-runtime.js"
},
"./plugin-sdk/memory-lancedb": {
"types": "./dist/plugin-sdk/memory-lancedb.d.ts",
"default": "./dist/plugin-sdk/memory-lancedb.js"

View File

@@ -96,6 +96,8 @@
"mattermost",
"memory-core",
"memory-core-host",
"memory-core-host-engine",
"memory-core-host-runtime",
"memory-lancedb",
"msteams",
"nextcloud-talk",

View File

@@ -1,13 +1,64 @@
import { describe, expect, it } from "vitest";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import type { OpenClawConfig } from "../config/config.js";
import {
clearMemoryEmbeddingProviders,
registerMemoryEmbeddingProvider,
} from "../plugins/memory-embedding-providers.js";
import { resolveMemorySearchConfig } from "./memory-search.js";
const asConfig = (cfg: OpenClawConfig): OpenClawConfig => cfg;
describe("memory search config", () => {
function configWithDefaultProvider(
provider: "openai" | "local" | "gemini" | "mistral" | "ollama",
): OpenClawConfig {
beforeEach(() => {
clearMemoryEmbeddingProviders();
registerMemoryEmbeddingProvider({
id: "openai",
defaultModel: "text-embedding-3-small",
transport: "remote",
create: async () => ({ provider: null }),
});
registerMemoryEmbeddingProvider({
id: "local",
defaultModel: "local-default",
transport: "local",
create: async () => ({ provider: null }),
});
registerMemoryEmbeddingProvider({
id: "gemini",
defaultModel: "gemini-embedding-001",
transport: "remote",
supportsMultimodalEmbeddings: ({ model }) =>
model
.trim()
.replace(/^models\//, "")
.replace(/^(gemini|google)\//, "") === "gemini-embedding-2-preview",
create: async () => ({ provider: null }),
});
registerMemoryEmbeddingProvider({
id: "voyage",
defaultModel: "voyage-4-large",
transport: "remote",
create: async () => ({ provider: null }),
});
registerMemoryEmbeddingProvider({
id: "mistral",
defaultModel: "mistral-embed",
transport: "remote",
create: async () => ({ provider: null }),
});
registerMemoryEmbeddingProvider({
id: "ollama",
defaultModel: "nomic-embed-text",
transport: "remote",
create: async () => ({ provider: null }),
});
});
afterEach(() => {
clearMemoryEmbeddingProviders();
});
function configWithDefaultProvider(provider: string): OpenClawConfig {
return asConfig({
agents: {
defaults: {
@@ -258,7 +309,7 @@ describe("memory search config", () => {
},
});
expect(() => resolveMemorySearchConfig(cfg, "main")).toThrow(
/memorySearch\.multimodal requires memorySearch\.provider = "gemini"/,
/memorySearch\.multimodal requires a provider adapter that supports multimodal embeddings/,
);
});

View File

@@ -3,12 +3,12 @@ import path from "node:path";
import type { OpenClawConfig, MemorySearchConfig } from "../config/config.js";
import { resolveStateDir } from "../config/paths.js";
import type { SecretInput } from "../config/types.secrets.js";
import { getMemoryEmbeddingProvider } from "../plugins/memory-embedding-providers.js";
import {
isMemoryMultimodalEnabled,
normalizeMemoryMultimodalSettings,
supportsMemoryMultimodalEmbeddings,
type MemoryMultimodalSettings,
} from "../memory/multimodal.js";
} from "../plugins/memory-host/multimodal.js";
import { clampInt, clampNumber, resolveUserPath } from "../utils.js";
import { resolveAgentConfig } from "./agent-scope.js";
@@ -17,7 +17,7 @@ export type ResolvedMemorySearchConfig = {
sources: Array<"memory" | "sessions">;
extraPaths: string[];
multimodal: MemoryMultimodalSettings;
provider: "openai" | "local" | "gemini" | "voyage" | "mistral" | "ollama" | "auto";
provider: string;
remote?: {
baseUrl?: string;
apiKey?: SecretInput;
@@ -33,7 +33,7 @@ export type ResolvedMemorySearchConfig = {
experimental: {
sessionMemory: boolean;
};
fallback: "openai" | "gemini" | "local" | "voyage" | "mistral" | "ollama" | "none";
fallback: string;
model: string;
outputDimensionality?: number;
local: {
@@ -88,11 +88,6 @@ export type ResolvedMemorySearchConfig = {
};
};
const DEFAULT_OPENAI_MODEL = "text-embedding-3-small";
const DEFAULT_GEMINI_MODEL = "gemini-embedding-001";
const DEFAULT_VOYAGE_MODEL = "voyage-4-large";
const DEFAULT_MISTRAL_MODEL = "mistral-embed";
const DEFAULT_OLLAMA_MODEL = "nomic-embed-text";
const DEFAULT_CHUNK_TOKENS = 400;
const DEFAULT_CHUNK_OVERLAP = 80;
const DEFAULT_WATCH_DEBOUNCE_MS = 1500;
@@ -150,8 +145,12 @@ function mergeConfig(
const sessionMemory =
overrides?.experimental?.sessionMemory ?? defaults?.experimental?.sessionMemory ?? false;
const provider = overrides?.provider ?? defaults?.provider ?? "auto";
const primaryAdapter = provider === "auto" ? undefined : getMemoryEmbeddingProvider(provider);
const defaultRemote = defaults?.remote;
const overrideRemote = overrides?.remote;
const fallback = overrides?.fallback ?? defaults?.fallback ?? "none";
const fallbackAdapter =
fallback && fallback !== "none" ? getMemoryEmbeddingProvider(fallback) : undefined;
const hasRemoteConfig = Boolean(
overrideRemote?.baseUrl ||
overrideRemote?.apiKey ||
@@ -162,12 +161,9 @@ function mergeConfig(
);
const includeRemote =
hasRemoteConfig ||
provider === "openai" ||
provider === "gemini" ||
provider === "voyage" ||
provider === "mistral" ||
provider === "ollama" ||
provider === "auto";
provider === "auto" ||
primaryAdapter?.transport !== "local" ||
fallbackAdapter?.transport === "remote";
const batch = {
enabled: overrideRemote?.batch?.enabled ?? defaultRemote?.batch?.enabled ?? false,
wait: overrideRemote?.batch?.wait ?? defaultRemote?.batch?.wait ?? true,
@@ -188,19 +184,7 @@ function mergeConfig(
batch,
}
: undefined;
const fallback = overrides?.fallback ?? defaults?.fallback ?? "none";
const modelDefault =
provider === "gemini"
? DEFAULT_GEMINI_MODEL
: provider === "openai"
? DEFAULT_OPENAI_MODEL
: provider === "voyage"
? DEFAULT_VOYAGE_MODEL
: provider === "mistral"
? DEFAULT_MISTRAL_MODEL
: provider === "ollama"
? DEFAULT_OLLAMA_MODEL
: undefined;
const modelDefault = provider === "auto" ? undefined : primaryAdapter?.defaultModel;
const model = overrides?.model ?? defaults?.model ?? modelDefault ?? "";
const outputDimensionality = overrides?.outputDimensionality ?? defaults?.outputDimensionality;
const local = {
@@ -386,15 +370,16 @@ export function resolveMemorySearchConfig(
return null;
}
const multimodalActive = isMemoryMultimodalEnabled(resolved.multimodal);
const multimodalProvider =
resolved.provider === "auto" ? undefined : getMemoryEmbeddingProvider(resolved.provider);
if (
multimodalActive &&
!supportsMemoryMultimodalEmbeddings({
provider: resolved.provider,
!multimodalProvider?.supportsMultimodalEmbeddings?.({
model: resolved.model,
})
) {
throw new Error(
'agents.*.memorySearch.multimodal requires memorySearch.provider = "gemini" and model = "gemini-embedding-2-preview".',
"agents.*.memorySearch.multimodal requires a provider adapter that supports multimodal embeddings for the configured model.",
);
}
if (multimodalActive && resolved.fallback !== "none") {

View File

@@ -5,7 +5,10 @@ import type { ExtensionAPI, FileOperations } from "@mariozechner/pi-coding-agent
import { extractSections } from "../../auto-reply/reply/post-compaction-context.js";
import { openBoundaryFile } from "../../infra/boundary-file-read.js";
import { createSubsystemLogger } from "../../logging/subsystem.js";
import { extractKeywords, isQueryStopWordToken } from "../../memory/query-expansion.js";
import {
extractKeywords,
isQueryStopWordToken,
} from "../../plugins/memory-host/query-expansion.js";
import {
hasMeaningfulConversationContent,
isRealConversationMessage,

View File

@@ -4,8 +4,8 @@ import { resolveMemorySearchConfig } from "../agents/memory-search.js";
import { resolveApiKeyForProvider } from "../agents/model-auth.js";
import { formatCliCommand } from "../cli/command-format.js";
import type { OpenClawConfig } from "../config/config.js";
import { DEFAULT_LOCAL_MODEL } from "../memory/embeddings.js";
import { hasConfiguredMemorySecretInput } from "../memory/secret-input.js";
import { DEFAULT_LOCAL_MODEL } from "../plugins/memory-host/embeddings.js";
import { hasConfiguredMemorySecretInput } from "../plugins/memory-host/secret-input.js";
import { resolveActiveMemoryBackendConfig } from "../plugins/memory-runtime.js";
import { note } from "../terminal/note.js";
import { resolveUserPath } from "../utils.js";
@@ -191,7 +191,7 @@ function hasLocalEmbeddings(local: { modelPath?: string }, useDefaultFallback =
}
async function hasApiKeyForProvider(
provider: "openai" | "gemini" | "voyage" | "mistral" | "ollama",
provider: string,
cfg: OpenClawConfig,
agentDir: string,
): Promise<boolean> {

View File

@@ -12,7 +12,7 @@ import {
resolveMemoryFtsState,
resolveMemoryVectorState,
type Tone,
} from "../memory/status-format.js";
} from "../plugins/memory-host/status-format.js";
import {
formatPluginCompatibilityNotice,
summarizePluginCompatibility,

View File

@@ -1,6 +1,6 @@
import type { OpenClawConfig } from "../config/config.js";
import { getTailnetHostname } from "../infra/tailscale.js";
import type { MemoryProviderStatus } from "../memory/types.js";
import type { MemoryProviderStatus } from "../plugins/memory-host/types.js";
import { getActiveMemorySearchManager } from "../plugins/memory-runtime.js";
export { getTailnetHostname };

View File

@@ -3,7 +3,7 @@ import type { OpenClawConfig } from "../config/types.js";
import { buildGatewayConnectionDetails } from "../gateway/call.js";
import { normalizeControlUiBasePath } from "../gateway/control-ui-shared.js";
import { probeGateway } from "../gateway/probe.js";
import type { MemoryProviderStatus } from "../memory/types.js";
import type { MemoryProviderStatus } from "../plugins/memory-host/types.js";
import {
pickGatewaySelfPresence,
resolveGatewayProbeAuthResolution,

View File

@@ -1882,32 +1882,7 @@ export const GENERATED_BASE_CONFIG_SCHEMA = {
additionalProperties: false,
},
provider: {
anyOf: [
{
type: "string",
const: "openai",
},
{
type: "string",
const: "local",
},
{
type: "string",
const: "gemini",
},
{
type: "string",
const: "voyage",
},
{
type: "string",
const: "mistral",
},
{
type: "string",
const: "ollama",
},
],
type: "string",
},
remote: {
type: "object",
@@ -2021,36 +1996,7 @@ export const GENERATED_BASE_CONFIG_SCHEMA = {
additionalProperties: false,
},
fallback: {
anyOf: [
{
type: "string",
const: "openai",
},
{
type: "string",
const: "gemini",
},
{
type: "string",
const: "local",
},
{
type: "string",
const: "voyage",
},
{
type: "string",
const: "mistral",
},
{
type: "string",
const: "ollama",
},
{
type: "string",
const: "none",
},
],
type: "string",
},
model: {
type: "string",
@@ -3499,32 +3445,7 @@ export const GENERATED_BASE_CONFIG_SCHEMA = {
additionalProperties: false,
},
provider: {
anyOf: [
{
type: "string",
const: "openai",
},
{
type: "string",
const: "local",
},
{
type: "string",
const: "gemini",
},
{
type: "string",
const: "voyage",
},
{
type: "string",
const: "mistral",
},
{
type: "string",
const: "ollama",
},
],
type: "string",
},
remote: {
type: "object",
@@ -3638,36 +3559,7 @@ export const GENERATED_BASE_CONFIG_SCHEMA = {
additionalProperties: false,
},
fallback: {
anyOf: [
{
type: "string",
const: "openai",
},
{
type: "string",
const: "gemini",
},
{
type: "string",
const: "local",
},
{
type: "string",
const: "voyage",
},
{
type: "string",
const: "mistral",
},
{
type: "string",
const: "ollama",
},
{
type: "string",
const: "none",
},
],
type: "string",
},
model: {
type: "string",

View File

@@ -340,8 +340,8 @@ export type MemorySearchConfig = {
/** Enable session transcript indexing (experimental, default: false). */
sessionMemory?: boolean;
};
/** Embedding provider mode. */
provider?: "openai" | "gemini" | "local" | "voyage" | "mistral" | "ollama";
/** Memory embedding provider adapter id. */
provider?: string;
remote?: {
baseUrl?: string;
apiKey?: SecretInput;
@@ -359,8 +359,8 @@ export type MemorySearchConfig = {
timeoutMinutes?: number;
};
};
/** Fallback behavior when embeddings fail. */
fallback?: "openai" | "gemini" | "local" | "voyage" | "mistral" | "ollama" | "none";
/** Fallback memory embedding provider adapter id when embeddings fail. */
fallback?: string;
/** Embedding model id (remote) or alias (local). */
model?: string;
/**

View File

@@ -606,16 +606,7 @@ export const MemorySearchSchema = z
})
.strict()
.optional(),
provider: z
.union([
z.literal("openai"),
z.literal("local"),
z.literal("gemini"),
z.literal("voyage"),
z.literal("mistral"),
z.literal("ollama"),
])
.optional(),
provider: z.string().optional(),
remote: z
.object({
baseUrl: z.string().optional(),
@@ -634,17 +625,7 @@ export const MemorySearchSchema = z
})
.strict()
.optional(),
fallback: z
.union([
z.literal("openai"),
z.literal("gemini"),
z.literal("local"),
z.literal("voyage"),
z.literal("mistral"),
z.literal("ollama"),
z.literal("none"),
])
.optional(),
fallback: z.string().optional(),
model: z.string().optional(),
outputDimensionality: z.number().int().positive().optional(),
local: z

View File

@@ -1,33 +1,60 @@
import { afterAll, beforeAll, describe, expect, it, vi } from "vitest";
import { resolveAgentDir } from "../agents/agent-scope.js";
import type { MemoryEmbeddingProviderAdapter } from "../plugins/memory-embedding-providers.js";
import { getFreePort, installGatewayTestHooks } from "./test-helpers.js";
installGatewayTestHooks({ scope: "suite" });
let startGatewayServer: typeof import("./server.js").startGatewayServer;
let createEmbeddingProviderMock: ReturnType<typeof vi.fn>;
let createEmbeddingProviderMock: ReturnType<
typeof vi.fn<
(options: { provider: string; model: string; agentDir?: string }) => Promise<{
provider: {
id: string;
model: string;
embedQuery: (text: string) => Promise<number[]>;
embedBatch: (texts: string[]) => Promise<number[][]>;
};
}>
>
>;
let clearMemoryEmbeddingProviders: typeof import("../plugins/memory-embedding-providers.js").clearMemoryEmbeddingProviders;
let registerMemoryEmbeddingProvider: typeof import("../plugins/memory-embedding-providers.js").registerMemoryEmbeddingProvider;
let enabledServer: Awaited<ReturnType<typeof startServer>>;
let enabledPort: number;
beforeAll(async () => {
vi.resetModules();
createEmbeddingProviderMock = vi.fn(async (options: { provider: string; model: string }) => ({
provider: {
id: options.provider,
model: options.model,
embedQuery: async () => [0.1, 0.2],
embedBatch: async (texts: string[]) =>
texts.map((_text, index) => [index + 0.1, index + 0.2]),
({ clearMemoryEmbeddingProviders, registerMemoryEmbeddingProvider } =
await import("../plugins/memory-embedding-providers.js"));
createEmbeddingProviderMock = vi.fn(
async (options: { provider: string; model: string; agentDir?: string }) => ({
provider: {
id: options.provider,
model: options.model,
embedQuery: async () => [0.1, 0.2],
embedBatch: async (texts: string[]) =>
texts.map((_text, index) => [index + 0.1, index + 0.2]),
},
}),
);
clearMemoryEmbeddingProviders();
const openAiAdapter: MemoryEmbeddingProviderAdapter = {
id: "openai",
defaultModel: "text-embedding-3-small",
transport: "remote",
autoSelectPriority: 20,
allowExplicitWhenConfiguredAuto: true,
create: async (options) => {
const result = await createEmbeddingProviderMock({
provider: "openai",
model: options.model,
agentDir: options.agentDir,
});
return result;
},
}));
vi.doMock("../memory/embeddings.js", async () => {
const actual =
await vi.importActual<typeof import("../memory/embeddings.js")>("../memory/embeddings.js");
return {
...actual,
createEmbeddingProvider: createEmbeddingProviderMock,
};
});
};
registerMemoryEmbeddingProvider(openAiAdapter);
({ startGatewayServer } = await import("./server.js"));
enabledPort = await getFreePort();
enabledServer = await startServer(enabledPort, { openAiChatCompletionsEnabled: true });
@@ -35,6 +62,7 @@ beforeAll(async () => {
afterAll(async () => {
await enabledServer.close({ reason: "embeddings http enabled suite done" });
clearMemoryEmbeddingProviders();
vi.resetModules();
});
@@ -120,10 +148,9 @@ describe("OpenAI-compatible embeddings HTTP API (e2e)", () => {
expect(typeof json.data?.[0]?.embedding).toBe("string");
expect(createEmbeddingProviderMock).toHaveBeenCalled();
const lastCall = createEmbeddingProviderMock.mock.calls.at(-1)?.[0] as
| { provider?: string; model?: string; fallback?: string; agentDir?: string }
| { provider?: string; model?: string; agentDir?: string }
| undefined;
expect(typeof lastCall?.model).toBe("string");
expect(lastCall?.fallback).toBe("none");
expect(lastCall?.agentDir).toBe(resolveAgentDir({}, "beta"));
});

View File

@@ -5,11 +5,11 @@ import { resolveMemorySearchConfig } from "../agents/memory-search.js";
import { loadConfig } from "../config/config.js";
import { logWarn } from "../logger.js";
import {
createEmbeddingProvider,
type EmbeddingProviderOptions,
type EmbeddingProviderId,
type EmbeddingProviderRequest,
} from "../memory/embeddings.js";
getMemoryEmbeddingProvider,
listMemoryEmbeddingProviders,
type MemoryEmbeddingProvider,
type MemoryEmbeddingProviderAdapter,
} from "../plugins/memory-embedding-providers.js";
import type { AuthRateLimiter } from "./auth-rate-limit.js";
import type { ResolvedGatewayAuth } from "./auth.js";
import { sendJson } from "./http-common.js";
@@ -41,12 +41,7 @@ const DEFAULT_EMBEDDINGS_BODY_BYTES = 5 * 1024 * 1024;
const MAX_EMBEDDING_INPUTS = 128;
const MAX_EMBEDDING_INPUT_CHARS = 8_192;
const MAX_EMBEDDING_TOTAL_CHARS = 65_536;
const SAFE_AUTO_EXPLICIT_PROVIDERS = new Set<EmbeddingProviderId>([
"openai",
"gemini",
"voyage",
"mistral",
]);
type EmbeddingProviderRequest = string;
function coerceRequest(value: unknown): EmbeddingsRequest {
return value && typeof value === "object" ? (value as EmbeddingsRequest) : {};
@@ -87,6 +82,88 @@ function validateInputTexts(texts: string[]): string | undefined {
return undefined;
}
function formatErrorMessage(err: unknown): string {
return err instanceof Error ? err.message : String(err);
}
function resolveAutoExplicitProviders(): Set<string> {
return new Set(
listMemoryEmbeddingProviders()
.filter((adapter) => adapter.allowExplicitWhenConfiguredAuto)
.map((adapter) => adapter.id),
);
}
function shouldContinueAutoSelection(
adapter: MemoryEmbeddingProviderAdapter,
err: unknown,
): boolean {
return adapter.shouldContinueAutoSelection?.(err) ?? false;
}
async function createConfiguredEmbeddingProvider(params: {
cfg: ReturnType<typeof loadConfig>;
agentDir: string;
provider: EmbeddingProviderRequest;
model: string;
memorySearch?: Pick<
NonNullable<ReturnType<typeof resolveMemorySearchConfig>>,
"local" | "remote" | "outputDimensionality"
>;
}): Promise<MemoryEmbeddingProvider> {
const createWithAdapter = async (adapter: MemoryEmbeddingProviderAdapter) => {
const result = await adapter.create({
config: params.cfg,
agentDir: params.agentDir,
model: params.model || adapter.defaultModel || "",
local: params.memorySearch?.local,
remote: params.memorySearch?.remote
? {
baseUrl: params.memorySearch?.remote.baseUrl,
apiKey: params.memorySearch?.remote.apiKey,
headers: params.memorySearch?.remote.headers,
}
: undefined,
outputDimensionality: params.memorySearch?.outputDimensionality,
});
return result.provider;
};
if (params.provider === "auto") {
const adapters = listMemoryEmbeddingProviders()
.filter((adapter) => typeof adapter.autoSelectPriority === "number")
.toSorted(
(a, b) =>
(a.autoSelectPriority ?? Number.MAX_SAFE_INTEGER) -
(b.autoSelectPriority ?? Number.MAX_SAFE_INTEGER),
);
for (const adapter of adapters) {
try {
const provider = await createWithAdapter(adapter);
if (provider) {
return provider;
}
} catch (err) {
if (shouldContinueAutoSelection(adapter, err)) {
continue;
}
throw err;
}
}
throw new Error("No embeddings provider available.");
}
const adapter = getMemoryEmbeddingProvider(params.provider);
if (!adapter) {
throw new Error(`Unknown memory embedding provider: ${params.provider}`);
}
const provider = await createWithAdapter(adapter);
if (!provider) {
throw new Error(`Memory embedding provider ${params.provider} is unavailable.`);
}
return provider;
}
function resolveEmbeddingsTarget(params: {
requestModel: string;
configuredProvider: EmbeddingProviderRequest;
@@ -97,17 +174,18 @@ function resolveEmbeddingsTarget(params: {
return { provider: params.configuredProvider, model: raw };
}
const provider = raw.slice(0, slash).trim().toLowerCase() as EmbeddingProviderRequest;
const provider = raw.slice(0, slash).trim().toLowerCase();
const model = raw.slice(slash + 1).trim();
if (!model) {
return { errorMessage: "Unsupported embedding model reference." };
}
if (params.configuredProvider === "auto") {
const safeAutoExplicitProviders = resolveAutoExplicitProviders();
if (provider === "auto") {
return { provider: "auto", model };
}
if (SAFE_AUTO_EXPLICIT_PROVIDERS.has(provider)) {
if (safeAutoExplicitProviders.has(provider)) {
return { provider, model };
}
return {
@@ -185,7 +263,7 @@ export async function handleOpenAiEmbeddingsHttpRequest(
const agentId = resolveAgentIdForRequest({ req, model: requestModel });
const agentDir = resolveAgentDir(cfg, agentId);
const memorySearch = resolveMemorySearchConfig(cfg, agentId);
const configuredProvider = (memorySearch?.provider ?? "openai") as EmbeddingProviderRequest;
const configuredProvider = memorySearch?.provider ?? "openai";
const overrideModel = getHeader(req, "x-openclaw-model")?.trim() || memorySearch?.model || "";
const target = resolveEmbeddingsTarget({ requestModel: overrideModel, configuredProvider });
if ("errorMessage" in target) {
@@ -198,41 +276,23 @@ export async function handleOpenAiEmbeddingsHttpRequest(
return true;
}
const options: EmbeddingProviderOptions = {
config: cfg,
agentDir,
provider: target.provider,
model: target.model,
// Public HTTP embeddings should fail closed rather than silently mixing
// vector spaces across fallback providers/models.
fallback: "none",
local: memorySearch?.local,
remote: memorySearch?.remote
? {
baseUrl: memorySearch.remote.baseUrl,
apiKey: memorySearch.remote.apiKey,
headers: memorySearch.remote.headers,
}
: undefined,
outputDimensionality:
typeof payload.dimensions === "number" && payload.dimensions > 0
? Math.floor(payload.dimensions)
: memorySearch?.outputDimensionality,
};
try {
const result = await createEmbeddingProvider(options);
if (!result.provider) {
sendJson(res, 503, {
error: {
message: result.providerUnavailableReason ?? "Embeddings provider unavailable.",
type: "api_error",
},
});
return true;
}
const embeddings = await result.provider.embedBatch(texts);
const provider = await createConfiguredEmbeddingProvider({
cfg,
agentDir,
provider: target.provider,
model: target.model,
memorySearch: memorySearch
? {
...memorySearch,
outputDimensionality:
typeof payload.dimensions === "number" && payload.dimensions > 0
? Math.floor(payload.dimensions)
: memorySearch.outputDimensionality,
}
: undefined,
});
const embeddings = await provider.embedBatch(texts);
const encodingFormat = payload.encoding_format === "base64" ? "base64" : "float";
sendJson(res, 200, {
@@ -249,7 +309,7 @@ export async function handleOpenAiEmbeddingsHttpRequest(
},
});
} catch (err) {
logWarn(`openai-compat: embeddings request failed: ${String(err)}`);
logWarn(`openai-compat: embeddings request failed: ${formatErrorMessage(err)}`);
sendJson(res, 500, {
error: {
message: "internal error",

View File

@@ -1,19 +0,0 @@
export function createOpenAIEmbeddingProviderMock(params: {
embedQuery: (input: string) => Promise<number[]>;
embedBatch: (input: string[]) => Promise<number[][]>;
}) {
return {
requestedProvider: "openai",
provider: {
id: "openai",
model: "text-embedding-3-small",
embedQuery: params.embedQuery,
embedBatch: params.embedBatch,
},
openAi: {
baseUrl: "https://api.openai.com/v1",
headers: { Authorization: "Bearer test", "Content-Type": "application/json" },
model: "text-embedding-3-small",
},
};
}

View File

@@ -0,0 +1,168 @@
// Narrow engine surface for the bundled memory-core plugin.
// Keep this limited to host utilities needed by the memory engine cluster.
export {
resolveAgentDir,
resolveAgentWorkspaceDir,
resolveDefaultAgentId,
resolveSessionAgentId,
} from "../agents/agent-scope.js";
export {
resolveMemorySearchConfig,
type ResolvedMemorySearchConfig,
} from "../agents/memory-search.js";
export { parseDurationMs } from "../cli/parse-duration.js";
export { loadConfig } from "../config/config.js";
export { resolveStateDir } from "../config/paths.js";
export { resolveSessionTranscriptsDirForAgent } from "../config/sessions/paths.js";
export {
hasConfiguredSecretInput,
normalizeResolvedSecretInputString,
} from "../config/types.secrets.js";
export { writeFileWithinRoot } from "../infra/fs-safe.js";
export { createSubsystemLogger } from "../logging/subsystem.js";
export { resolveGlobalSingleton } from "../shared/global-singleton.js";
export { onSessionTranscriptUpdate } from "../sessions/transcript-events.js";
export {
buildFileEntry,
buildMultimodalChunkForIndexing,
chunkMarkdown,
cosineSimilarity,
ensureDir,
hashText,
listMemoryFiles,
normalizeExtraMemoryPaths,
parseEmbedding,
remapChunkLines,
runWithConcurrency,
type MemoryChunk,
type MemoryFileEntry,
} from "../plugins/memory-host/internal.js";
export { readMemoryFile } from "../plugins/memory-host/read-file.js";
export { resolveMemoryBackendConfig } from "../plugins/memory-host/backend-config.js";
export type {
ResolvedMemoryBackendConfig,
ResolvedQmdConfig,
ResolvedQmdMcporterConfig,
} from "../plugins/memory-host/backend-config.js";
export type {
MemoryEmbeddingProbeResult,
MemoryProviderStatus,
MemorySearchManager,
MemorySearchResult,
MemorySource,
MemorySyncProgressUpdate,
} from "../plugins/memory-host/types.js";
export {
getMemoryEmbeddingProvider,
listMemoryEmbeddingProviders,
} from "../plugins/memory-embedding-providers.js";
export type {
MemoryEmbeddingBatchChunk,
MemoryEmbeddingBatchOptions,
MemoryEmbeddingProvider,
MemoryEmbeddingProviderAdapter,
MemoryEmbeddingProviderCreateOptions,
MemoryEmbeddingProviderCreateResult,
MemoryEmbeddingProviderRuntime,
} from "../plugins/memory-embedding-providers.js";
export {
createLocalEmbeddingProvider,
createEmbeddingProvider,
DEFAULT_LOCAL_MODEL,
type EmbeddingProvider,
type EmbeddingProviderRequest,
type EmbeddingProviderResult,
type GeminiEmbeddingClient,
type MistralEmbeddingClient,
type OllamaEmbeddingClient,
type OpenAiEmbeddingClient,
type VoyageEmbeddingClient,
} from "../plugins/memory-host/embeddings.js";
export {
createGeminiEmbeddingProvider,
DEFAULT_GEMINI_EMBEDDING_MODEL,
buildGeminiEmbeddingRequest,
} from "../plugins/memory-host/embeddings-gemini.js";
export {
createMistralEmbeddingProvider,
DEFAULT_MISTRAL_EMBEDDING_MODEL,
} from "../plugins/memory-host/embeddings-mistral.js";
export {
createOllamaEmbeddingProvider,
DEFAULT_OLLAMA_EMBEDDING_MODEL,
} from "../plugins/memory-host/embeddings-ollama.js";
export {
createOpenAiEmbeddingProvider,
DEFAULT_OPENAI_EMBEDDING_MODEL,
} from "../plugins/memory-host/embeddings-openai.js";
export {
createVoyageEmbeddingProvider,
DEFAULT_VOYAGE_EMBEDDING_MODEL,
} from "../plugins/memory-host/embeddings-voyage.js";
export {
runGeminiEmbeddingBatches,
type GeminiBatchRequest,
} from "../plugins/memory-host/batch-gemini.js";
export {
OPENAI_BATCH_ENDPOINT,
runOpenAiEmbeddingBatches,
type OpenAiBatchRequest,
} from "../plugins/memory-host/batch-openai.js";
export {
runVoyageEmbeddingBatches,
type VoyageBatchRequest,
} from "../plugins/memory-host/batch-voyage.js";
export { enforceEmbeddingMaxInputTokens } from "../plugins/memory-host/embedding-chunk-limits.js";
export {
estimateStructuredEmbeddingInputBytes,
estimateUtf8Bytes,
} from "../plugins/memory-host/embedding-input-limits.js";
export {
hasNonTextEmbeddingParts,
type EmbeddingInput,
} from "../plugins/memory-host/embedding-inputs.js";
export {
buildCaseInsensitiveExtensionGlob,
classifyMemoryMultimodalPath,
getMemoryMultimodalExtensions,
} from "../plugins/memory-host/multimodal.js";
export { ensureMemoryIndexSchema } from "../plugins/memory-host/memory-schema.js";
export { loadSqliteVecExtension } from "../plugins/memory-host/sqlite-vec.js";
export { requireNodeSqlite } from "../plugins/memory-host/sqlite.js";
export { extractKeywords, isQueryStopWordToken } from "../plugins/memory-host/query-expansion.js";
export {
buildSessionEntry,
listSessionFilesForAgent,
sessionPathForFile,
type SessionFileEntry,
} from "../plugins/memory-host/session-files.js";
export { parseQmdQueryJson, type QmdQueryResult } from "../plugins/memory-host/qmd-query-parser.js";
export {
deriveQmdScopeChannel,
deriveQmdScopeChatType,
isQmdScopeAllowed,
} from "../plugins/memory-host/qmd-scope.js";
export { isFileMissingError, statRegularFile } from "../plugins/memory-host/fs-utils.js";
export { resolveCliSpawnInvocation, runCliCommand } from "../plugins/memory-host/qmd-process.js";
export { detectMime } from "../media/mime.js";
export { splitShellArgs } from "../utils/shell-argv.js";
export { runTasksWithConcurrency } from "../utils/run-with-concurrency.js";
export {
shortenHomeInString,
shortenHomePath,
resolveUserPath,
truncateUtf16Safe,
} from "../utils.js";
export type { OpenClawConfig } from "../config/config.js";
export type { SessionSendPolicyConfig } from "../config/types.base.js";
export type {
MemoryBackend,
MemoryCitationsMode,
MemoryQmdConfig,
MemoryQmdIndexPath,
MemoryQmdMcporterConfig,
MemoryQmdSearchMode,
} from "../config/types.memory.js";
export type { MemorySearchConfig } from "../config/types.tools.js";
export type { SecretInput } from "../config/types.secrets.js";

View File

@@ -0,0 +1,38 @@
// Narrow runtime/helper surface for the bundled memory-core plugin.
// Keep this focused on non-engine plugin wiring: CLI, tools, prompt, flush.
export type { AnyAgentTool } from "../agents/tools/common.js";
export { resolveCronStyleNow } from "../agents/current-time.js";
export { DEFAULT_PI_COMPACTION_RESERVE_TOKENS_FLOOR } from "../agents/pi-settings.js";
export { resolveDefaultAgentId, resolveSessionAgentId } from "../agents/agent-scope.js";
export { resolveMemorySearchConfig } from "../agents/memory-search.js";
export { jsonResult, readNumberParam, readStringParam } from "../agents/tools/common.js";
export { SILENT_REPLY_TOKEN } from "../auto-reply/tokens.js";
export { formatErrorMessage, withManager } from "../cli/cli-utils.js";
export { formatHelpExamples } from "../cli/help-format.js";
export { resolveCommandSecretRefsViaGateway } from "../cli/command-secret-gateway.js";
export { withProgress, withProgressTotals } from "../cli/progress.js";
export { parseNonNegativeByteSize } from "../config/byte-size.js";
export { loadConfig } from "../config/config.js";
export { resolveStateDir } from "../config/paths.js";
export { resolveSessionTranscriptsDirForAgent } from "../config/sessions/paths.js";
export { listMemoryFiles, normalizeExtraMemoryPaths } from "../plugins/memory-host/internal.js";
export { readAgentMemoryFile } from "../plugins/memory-host/read-file.js";
export { resolveMemoryBackendConfig } from "../plugins/memory-host/backend-config.js";
export { emptyPluginConfigSchema } from "../plugins/config-schema.js";
export { parseAgentSessionKey } from "../routing/session-key.js";
export { defaultRuntime } from "../runtime.js";
export { formatDocsLink } from "../terminal/links.js";
export { colorize, isRich, theme } from "../terminal/theme.js";
export { isVerbose, setVerbose } from "../globals.js";
export { shortenHomeInString, shortenHomePath } from "../utils.js";
export type { OpenClawConfig } from "../config/config.js";
export type { MemoryCitationsMode } from "../config/types.memory.js";
export type { MemorySearchResult } from "../plugins/memory-host/types.js";
export type {
MemoryFlushPlan,
MemoryFlushPlanResolver,
MemoryPluginRuntime,
MemoryPromptSectionBuilder,
} from "../plugins/memory-state.js";
export type { OpenClawPluginApi } from "../plugins/types.js";

View File

@@ -1,153 +1,2 @@
// Narrow helper surface for the bundled memory-core plugin implementation.
// Keep this focused on generic host seams and shared utilities.
export type { AnyAgentTool } from "../agents/tools/common.js";
export { resolveCronStyleNow } from "../agents/current-time.js";
export { DEFAULT_PI_COMPACTION_RESERVE_TOKENS_FLOOR } from "../agents/pi-settings.js";
export {
resolveAgentDir,
resolveAgentWorkspaceDir,
resolveDefaultAgentId,
resolveSessionAgentId,
} from "../agents/agent-scope.js";
export {
resolveMemorySearchConfig,
type ResolvedMemorySearchConfig,
} from "../agents/memory-search.js";
export { jsonResult, readNumberParam, readStringParam } from "../agents/tools/common.js";
export { SILENT_REPLY_TOKEN } from "../auto-reply/tokens.js";
export { formatErrorMessage, withManager } from "../cli/cli-utils.js";
export { formatHelpExamples } from "../cli/help-format.js";
export { parseDurationMs } from "../cli/parse-duration.js";
export { resolveCommandSecretRefsViaGateway } from "../cli/command-secret-gateway.js";
export { withProgress, withProgressTotals } from "../cli/progress.js";
export { parseNonNegativeByteSize } from "../config/byte-size.js";
export { loadConfig } from "../config/config.js";
export { resolveStateDir } from "../config/paths.js";
export { resolveSessionTranscriptsDirForAgent } from "../config/sessions/paths.js";
export { writeFileWithinRoot } from "../infra/fs-safe.js";
export { createSubsystemLogger } from "../logging/subsystem.js";
export { resolveGlobalSingleton } from "../shared/global-singleton.js";
export { onSessionTranscriptUpdate } from "../sessions/transcript-events.js";
export {
buildFileEntry,
buildMultimodalChunkForIndexing,
chunkMarkdown,
cosineSimilarity,
ensureDir,
hashText,
listMemoryFiles,
normalizeExtraMemoryPaths,
parseEmbedding,
remapChunkLines,
runWithConcurrency,
type MemoryChunk,
type MemoryFileEntry,
} from "../memory/internal.js";
export { readAgentMemoryFile, readMemoryFile } from "../memory/read-file.js";
export { resolveMemoryBackendConfig } from "../memory/backend-config.js";
export type {
ResolvedMemoryBackendConfig,
ResolvedQmdConfig,
ResolvedQmdMcporterConfig,
} from "../memory/backend-config.js";
export type {
MemoryEmbeddingProbeResult,
MemoryProviderStatus,
MemorySearchManager,
MemorySearchResult,
MemorySource,
MemorySyncProgressUpdate,
} from "../memory/types.js";
export {
createEmbeddingProvider,
type EmbeddingProvider,
type EmbeddingProviderRequest,
type EmbeddingProviderResult,
type GeminiEmbeddingClient,
type MistralEmbeddingClient,
type OllamaEmbeddingClient,
type OpenAiEmbeddingClient,
type VoyageEmbeddingClient,
} from "../memory/embeddings.js";
export {
DEFAULT_GEMINI_EMBEDDING_MODEL,
buildGeminiEmbeddingRequest,
} from "../memory/embeddings-gemini.js";
export { DEFAULT_MISTRAL_EMBEDDING_MODEL } from "../memory/embeddings-mistral.js";
export { DEFAULT_OLLAMA_EMBEDDING_MODEL } from "../memory/embeddings-ollama.js";
export { DEFAULT_OPENAI_EMBEDDING_MODEL } from "../memory/embeddings-openai.js";
export { DEFAULT_VOYAGE_EMBEDDING_MODEL } from "../memory/embeddings-voyage.js";
export { runGeminiEmbeddingBatches, type GeminiBatchRequest } from "../memory/batch-gemini.js";
export {
OPENAI_BATCH_ENDPOINT,
runOpenAiEmbeddingBatches,
type OpenAiBatchRequest,
} from "../memory/batch-openai.js";
export { runVoyageEmbeddingBatches, type VoyageBatchRequest } from "../memory/batch-voyage.js";
export { enforceEmbeddingMaxInputTokens } from "../memory/embedding-chunk-limits.js";
export {
estimateStructuredEmbeddingInputBytes,
estimateUtf8Bytes,
} from "../memory/embedding-input-limits.js";
export { hasNonTextEmbeddingParts, type EmbeddingInput } from "../memory/embedding-inputs.js";
export {
buildCaseInsensitiveExtensionGlob,
classifyMemoryMultimodalPath,
getMemoryMultimodalExtensions,
} from "../memory/multimodal.js";
export { ensureMemoryIndexSchema } from "../memory/memory-schema.js";
export { loadSqliteVecExtension } from "../memory/sqlite-vec.js";
export { requireNodeSqlite } from "../memory/sqlite.js";
export { extractKeywords, isQueryStopWordToken } from "../memory/query-expansion.js";
export {
buildSessionEntry,
listSessionFilesForAgent,
sessionPathForFile,
type SessionFileEntry,
} from "../memory/session-files.js";
export { parseQmdQueryJson, type QmdQueryResult } from "../memory/qmd-query-parser.js";
export {
deriveQmdScopeChannel,
deriveQmdScopeChatType,
isQmdScopeAllowed,
} from "../memory/qmd-scope.js";
export { isFileMissingError, statRegularFile } from "../memory/fs-utils.js";
export { resolveCliSpawnInvocation, runCliCommand } from "../memory/qmd-process.js";
export {
hasConfiguredSecretInput,
normalizeResolvedSecretInputString,
} from "../config/types.secrets.js";
export { emptyPluginConfigSchema } from "../plugins/config-schema.js";
export { parseAgentSessionKey } from "../routing/session-key.js";
export { defaultRuntime } from "../runtime.js";
export { colorize, isRich, theme } from "../terminal/theme.js";
export { formatDocsLink } from "../terminal/links.js";
export { detectMime } from "../media/mime.js";
export { setVerbose, isVerbose } from "../globals.js";
export {
shortenHomeInString,
shortenHomePath,
resolveUserPath,
truncateUtf16Safe,
} from "../utils.js";
export { splitShellArgs } from "../utils/shell-argv.js";
export { runTasksWithConcurrency } from "../utils/run-with-concurrency.js";
export type { OpenClawConfig } from "../config/config.js";
export type { SessionSendPolicyConfig } from "../config/types.base.js";
export type {
MemoryBackend,
MemoryCitationsMode,
MemoryQmdConfig,
MemoryQmdIndexPath,
MemoryQmdMcporterConfig,
MemoryQmdSearchMode,
} from "../config/types.memory.js";
export type { SecretInput } from "../config/types.secrets.js";
export type {
MemoryFlushPlan,
MemoryFlushPlanResolver,
MemoryPluginRuntime,
MemoryPromptSectionBuilder,
} from "../plugins/memory-state.js";
export type { OpenClawPluginApi } from "../plugins/types.js";
export * from "./memory-core-host-runtime.js";
export * from "./memory-core-host-engine.js";

View File

@@ -18,9 +18,12 @@ export {
getMemorySearchManager,
MemoryIndexManager,
} from "../../extensions/memory-core/src/memory/index.js";
export { listMemoryFiles, normalizeExtraMemoryPaths } from "../memory/internal.js";
export { readAgentMemoryFile } from "../memory/read-file.js";
export { resolveMemoryBackendConfig } from "../memory/backend-config.js";
export {
listMemoryFiles,
normalizeExtraMemoryPaths,
readAgentMemoryFile,
resolveMemoryBackendConfig,
} from "./memory-core-host-runtime.js";
export { setVerbose, isVerbose } from "../globals.js";
export { defaultRuntime } from "../runtime.js";
export { colorize, isRich, theme } from "../terminal/theme.js";
@@ -32,7 +35,7 @@ export { withProgress, withProgressTotals } from "../cli/progress.js";
export { shortenHomeInString, shortenHomePath } from "../utils.js";
export type { OpenClawConfig } from "../config/config.js";
export type { MemoryCitationsMode } from "../config/types.memory.js";
export type { MemorySearchResult } from "../memory/types.js";
export type { MemorySearchResult } from "./memory-core-host-runtime.js";
export type {
MemoryFlushPlan,
MemoryFlushPlanResolver,

View File

@@ -59,7 +59,8 @@ export function createCapturedPluginRegistration(): CapturedPluginRegistration {
registerTool(tool: AnyAgentTool) {
tools.push(tool);
},
} as OpenClawPluginApi,
registerMemoryEmbeddingProvider() {},
} as unknown as OpenClawPluginApi,
};
}

View File

@@ -10,6 +10,11 @@ import { getGlobalHookRunner, resetGlobalHookRunner } from "./hook-runner-global
import { createHookRunner } from "./hooks.js";
import { __testing, clearPluginLoaderCache, loadOpenClawPlugins } from "./loader.js";
import { clearPluginManifestRegistryCache } from "./manifest-registry.js";
import {
getMemoryEmbeddingProvider,
listMemoryEmbeddingProviders,
registerMemoryEmbeddingProvider,
} from "./memory-embedding-providers.js";
import {
buildMemoryPromptSection,
getMemoryRuntime,
@@ -1057,6 +1062,10 @@ module.exports = { id: "skipped-scoped-only", register() { throw new Error("skip
it("does not replace active memory plugin registries during non-activating loads", () => {
useNoBundledPlugins();
registerMemoryEmbeddingProvider({
id: "active",
create: async () => ({ provider: null }),
});
registerMemoryPromptSection(() => ["active memory section"]);
registerMemoryFlushPlanResolver(() => ({
softThresholdTokens: 1,
@@ -1082,6 +1091,10 @@ module.exports = { id: "skipped-scoped-only", register() { throw new Error("skip
id: "snapshot-memory",
kind: "memory",
register(api) {
api.registerMemoryEmbeddingProvider({
id: "snapshot",
create: async () => ({ provider: null }),
});
api.registerMemoryPromptSection(() => ["snapshot memory section"]);
api.registerMemoryFlushPlan(() => ({
softThresholdTokens: 10,
@@ -1123,6 +1136,7 @@ module.exports = { id: "skipped-scoped-only", register() { throw new Error("skip
]);
expect(resolveMemoryFlushPlan({})?.relativePath).toBe("memory/active.md");
expect(getMemoryRuntime()).toBe(activeRuntime);
expect(listMemoryEmbeddingProviders().map((adapter) => adapter.id)).toEqual(["active"]);
});
it("clears newly-registered memory plugin registries when plugin register fails", () => {
@@ -1134,6 +1148,10 @@ module.exports = { id: "skipped-scoped-only", register() { throw new Error("skip
id: "failing-memory",
kind: "memory",
register(api) {
api.registerMemoryEmbeddingProvider({
id: "failed",
create: async () => ({ provider: null }),
});
api.registerMemoryPromptSection(() => ["stale failure section"]);
api.registerMemoryFlushPlan(() => ({
softThresholdTokens: 10,
@@ -1173,6 +1191,7 @@ module.exports = { id: "skipped-scoped-only", register() { throw new Error("skip
expect(buildMemoryPromptSection({ availableTools: new Set() })).toEqual([]);
expect(resolveMemoryFlushPlan({})).toBeNull();
expect(getMemoryRuntime()).toBeUndefined();
expect(listMemoryEmbeddingProviders()).toEqual([]);
});
it("throws when activate:false is used without cache:false", () => {
@@ -3435,6 +3454,10 @@ export const runtimeValue = helperValue;`,
describe("clearPluginLoaderCache", () => {
it("resets registered memory plugin registries", () => {
registerMemoryEmbeddingProvider({
id: "stale",
create: async () => ({ provider: null }),
});
registerMemoryPromptSection(() => ["stale memory section"]);
registerMemoryFlushPlanResolver(() => ({
softThresholdTokens: 1,
@@ -3457,11 +3480,13 @@ describe("clearPluginLoaderCache", () => {
]);
expect(resolveMemoryFlushPlan({})?.relativePath).toBe("memory/stale.md");
expect(getMemoryRuntime()).toBeDefined();
expect(getMemoryEmbeddingProvider("stale")).toBeDefined();
clearPluginLoaderCache();
expect(buildMemoryPromptSection({ availableTools: new Set() })).toEqual([]);
expect(resolveMemoryFlushPlan({})).toBeNull();
expect(getMemoryRuntime()).toBeUndefined();
expect(getMemoryEmbeddingProvider("stale")).toBeUndefined();
});
});

View File

@@ -22,6 +22,11 @@ import { discoverOpenClawPlugins } from "./discovery.js";
import { initializeGlobalHookRunner } from "./hook-runner-global.js";
import { clearPluginInteractiveHandlers } from "./interactive.js";
import { loadPluginManifestRegistry } from "./manifest-registry.js";
import {
clearMemoryEmbeddingProviders,
listMemoryEmbeddingProviders,
restoreMemoryEmbeddingProviders,
} from "./memory-embedding-providers.js";
import {
clearMemoryPluginState,
getMemoryFlushPlanResolver,
@@ -99,6 +104,7 @@ export class PluginLoadFailureError extends Error {
type CachedPluginState = {
registry: PluginRegistry;
memoryEmbeddingProviders: ReturnType<typeof listMemoryEmbeddingProviders>;
memoryFlushPlanResolver: ReturnType<typeof getMemoryFlushPlanResolver>;
memoryPromptBuilder: ReturnType<typeof getMemoryPromptSectionBuilder>;
memoryRuntime: ReturnType<typeof getMemoryRuntime>;
@@ -127,6 +133,7 @@ const LAZY_RUNTIME_REFLECTION_KEYS = [
export function clearPluginLoaderCache(): void {
registryCache.clear();
openAllowlistWarningCache.clear();
clearMemoryEmbeddingProviders();
clearMemoryPluginState();
}
@@ -711,6 +718,7 @@ export function loadOpenClawPlugins(options: PluginLoadOptions = {}): PluginRegi
if (cacheEnabled) {
const cached = getCachedPluginRegistry(cacheKey);
if (cached) {
restoreMemoryEmbeddingProviders(cached.memoryEmbeddingProviders);
restoreMemoryPluginState({
promptBuilder: cached.memoryPromptBuilder,
flushPlanResolver: cached.memoryFlushPlanResolver,
@@ -1226,6 +1234,7 @@ export function loadOpenClawPlugins(options: PluginLoadOptions = {}): PluginRegi
hookPolicy: entry?.hooks,
registrationMode,
});
const previousMemoryEmbeddingProviders = listMemoryEmbeddingProviders();
const previousMemoryFlushPlanResolver = getMemoryFlushPlanResolver();
const previousMemoryPromptBuilder = getMemoryPromptSectionBuilder();
const previousMemoryRuntime = getMemoryRuntime();
@@ -1242,6 +1251,7 @@ export function loadOpenClawPlugins(options: PluginLoadOptions = {}): PluginRegi
}
// Snapshot loads should not replace process-global runtime prompt state.
if (!shouldActivate) {
restoreMemoryEmbeddingProviders(previousMemoryEmbeddingProviders);
restoreMemoryPluginState({
promptBuilder: previousMemoryPromptBuilder,
flushPlanResolver: previousMemoryFlushPlanResolver,
@@ -1251,6 +1261,7 @@ export function loadOpenClawPlugins(options: PluginLoadOptions = {}): PluginRegi
registry.plugins.push(record);
seenIds.set(pluginId, candidate.origin);
} catch (err) {
restoreMemoryEmbeddingProviders(previousMemoryEmbeddingProviders);
restoreMemoryPluginState({
promptBuilder: previousMemoryPromptBuilder,
flushPlanResolver: previousMemoryFlushPlanResolver,
@@ -1291,6 +1302,7 @@ export function loadOpenClawPlugins(options: PluginLoadOptions = {}): PluginRegi
if (cacheEnabled) {
setCachedPluginRegistry(cacheKey, {
registry,
memoryEmbeddingProviders: listMemoryEmbeddingProviders(),
memoryFlushPlanResolver: getMemoryFlushPlanResolver(),
memoryPromptBuilder: getMemoryPromptSectionBuilder(),
memoryRuntime: getMemoryRuntime(),

View File

@@ -0,0 +1,49 @@
import { afterEach, describe, expect, it } from "vitest";
import {
clearMemoryEmbeddingProviders,
getMemoryEmbeddingProvider,
listMemoryEmbeddingProviders,
registerMemoryEmbeddingProvider,
restoreMemoryEmbeddingProviders,
type MemoryEmbeddingProviderAdapter,
} from "./memory-embedding-providers.js";
function createAdapter(id: string): MemoryEmbeddingProviderAdapter {
return {
id,
create: async () => ({ provider: null }),
};
}
afterEach(() => {
clearMemoryEmbeddingProviders();
});
describe("memory embedding provider registry", () => {
it("registers and lists adapters in insertion order", () => {
registerMemoryEmbeddingProvider(createAdapter("alpha"));
registerMemoryEmbeddingProvider(createAdapter("beta"));
expect(getMemoryEmbeddingProvider("alpha")?.id).toBe("alpha");
expect(listMemoryEmbeddingProviders().map((adapter) => adapter.id)).toEqual(["alpha", "beta"]);
});
it("restores a previous snapshot", () => {
const alpha = createAdapter("alpha");
const beta = createAdapter("beta");
registerMemoryEmbeddingProvider(alpha);
restoreMemoryEmbeddingProviders([beta]);
expect(getMemoryEmbeddingProvider("alpha")).toBeUndefined();
expect(getMemoryEmbeddingProvider("beta")).toBe(beta);
});
it("clears the registry", () => {
registerMemoryEmbeddingProvider(createAdapter("alpha"));
clearMemoryEmbeddingProviders();
expect(listMemoryEmbeddingProviders()).toEqual([]);
});
});

View File

@@ -0,0 +1,95 @@
import type { OpenClawConfig } from "../config/config.js";
import type { SecretInput } from "../config/types.secrets.js";
import type { EmbeddingInput } from "./memory-host/embedding-inputs.js";
export type MemoryEmbeddingBatchChunk = {
text: string;
embeddingInput?: EmbeddingInput;
};
export type MemoryEmbeddingBatchOptions = {
agentId: string;
chunks: MemoryEmbeddingBatchChunk[];
wait: boolean;
concurrency: number;
pollIntervalMs: number;
timeoutMs: number;
debug: (message: string, data?: Record<string, unknown>) => void;
};
export type MemoryEmbeddingProviderRuntime = {
id: string;
cacheKeyData?: Record<string, unknown>;
batchEmbed?: (options: MemoryEmbeddingBatchOptions) => Promise<number[][] | null>;
};
export type MemoryEmbeddingProvider = {
id: string;
model: string;
maxInputTokens?: number;
embedQuery: (text: string) => Promise<number[]>;
embedBatch: (texts: string[]) => Promise<number[][]>;
embedBatchInputs?: (inputs: EmbeddingInput[]) => Promise<number[][]>;
};
export type MemoryEmbeddingProviderCreateOptions = {
config: OpenClawConfig;
agentDir?: string;
remote?: {
baseUrl?: string;
apiKey?: SecretInput;
headers?: Record<string, string>;
};
model: string;
local?: {
modelPath?: string;
modelCacheDir?: string;
};
outputDimensionality?: number;
};
export type MemoryEmbeddingProviderCreateResult = {
provider: MemoryEmbeddingProvider | null;
runtime?: MemoryEmbeddingProviderRuntime;
};
export type MemoryEmbeddingProviderAdapter = {
id: string;
defaultModel?: string;
transport?: "local" | "remote";
autoSelectPriority?: number;
allowExplicitWhenConfiguredAuto?: boolean;
supportsMultimodalEmbeddings?: (params: { model: string }) => boolean;
create: (
options: MemoryEmbeddingProviderCreateOptions,
) => Promise<MemoryEmbeddingProviderCreateResult>;
formatSetupError?: (err: unknown) => string;
shouldContinueAutoSelection?: (err: unknown) => boolean;
};
const memoryEmbeddingProviders = new Map<string, MemoryEmbeddingProviderAdapter>();
export function registerMemoryEmbeddingProvider(adapter: MemoryEmbeddingProviderAdapter): void {
memoryEmbeddingProviders.set(adapter.id, adapter);
}
export function getMemoryEmbeddingProvider(id: string): MemoryEmbeddingProviderAdapter | undefined {
return memoryEmbeddingProviders.get(id);
}
export function listMemoryEmbeddingProviders(): MemoryEmbeddingProviderAdapter[] {
return Array.from(memoryEmbeddingProviders.values());
}
export function restoreMemoryEmbeddingProviders(adapters: MemoryEmbeddingProviderAdapter[]): void {
memoryEmbeddingProviders.clear();
for (const adapter of adapters) {
memoryEmbeddingProviders.set(adapter.id, adapter);
}
}
export function clearMemoryEmbeddingProviders(): void {
memoryEmbeddingProviders.clear();
}
export const _resetMemoryEmbeddingProviders = clearMemoryEmbeddingProviders;

View File

@@ -1,7 +1,7 @@
import path from "node:path";
import { describe, expect, it } from "vitest";
import { resolveAgentWorkspaceDir } from "../agents/agent-scope.js";
import type { OpenClawConfig } from "../config/config.js";
import { resolveAgentWorkspaceDir } from "../../agents/agent-scope.js";
import type { OpenClawConfig } from "../../config/config.js";
import { resolveMemoryBackendConfig } from "./backend-config.js";
describe("resolveMemoryBackendConfig", () => {

View File

@@ -1,8 +1,8 @@
import path from "node:path";
import { resolveAgentWorkspaceDir } from "../agents/agent-scope.js";
import { parseDurationMs } from "../cli/parse-duration.js";
import type { OpenClawConfig } from "../config/config.js";
import type { SessionSendPolicyConfig } from "../config/types.base.js";
import { resolveAgentWorkspaceDir } from "../../agents/agent-scope.js";
import { parseDurationMs } from "../../cli/parse-duration.js";
import type { OpenClawConfig } from "../../config/config.js";
import type { SessionSendPolicyConfig } from "../../config/types.base.js";
import type {
MemoryBackend,
MemoryCitationsMode,
@@ -10,9 +10,9 @@ import type {
MemoryQmdIndexPath,
MemoryQmdMcporterConfig,
MemoryQmdSearchMode,
} from "../config/types.memory.js";
import { resolveUserPath } from "../utils.js";
import { splitShellArgs } from "../utils/shell-argv.js";
} from "../../config/types.memory.js";
import { resolveUserPath } from "../../utils.js";
import { splitShellArgs } from "../../utils/shell-argv.js";
export type ResolvedMemoryBackendConfig = {
backend: MemoryBackend;

View File

@@ -1,6 +1,6 @@
import { beforeEach, describe, expect, it, vi } from "vitest";
vi.mock("../infra/retry.js", () => ({
vi.mock("../../infra/retry.js", () => ({
retryAsync: vi.fn(async (run: () => Promise<unknown>) => await run()),
}));
@@ -9,7 +9,9 @@ vi.mock("./post-json.js", () => ({
}));
describe("postJsonWithRetry", () => {
let retryAsyncMock: ReturnType<typeof vi.mocked<typeof import("../infra/retry.js").retryAsync>>;
let retryAsyncMock: ReturnType<
typeof vi.mocked<typeof import("../../infra/retry.js").retryAsync>
>;
let postJsonMock: ReturnType<typeof vi.mocked<typeof import("./post-json.js").postJson>>;
let postJsonWithRetry: typeof import("./batch-http.js").postJsonWithRetry;
@@ -18,7 +20,7 @@ describe("postJsonWithRetry", () => {
vi.clearAllMocks();
vi.resetModules();
({ postJsonWithRetry } = await import("./batch-http.js"));
const retryModule = await import("../infra/retry.js");
const retryModule = await import("../../infra/retry.js");
const postJsonModule = await import("./post-json.js");
retryAsyncMock = vi.mocked(retryModule.retryAsync);
postJsonMock = vi.mocked(postJsonModule.postJson);

View File

@@ -1,5 +1,5 @@
import type { SsrFPolicy } from "../infra/net/ssrf.js";
import { retryAsync } from "../infra/retry.js";
import type { SsrFPolicy } from "../../infra/net/ssrf.js";
import { retryAsync } from "../../infra/retry.js";
import { postJson } from "./post-json.js";
export async function postJsonWithRetry<T>(params: {

View File

@@ -1,4 +1,4 @@
import type { SsrFPolicy } from "../infra/net/ssrf.js";
import type { SsrFPolicy } from "../../infra/net/ssrf.js";
export type BatchHttpClientConfig = {
baseUrl?: string;

View File

@@ -1,5 +1,5 @@
import { isTruthyEnvValue } from "../infra/env.js";
import { createSubsystemLogger } from "../logging/subsystem.js";
import { isTruthyEnvValue } from "../../infra/env.js";
import { createSubsystemLogger } from "../../logging/subsystem.js";
const debugEmbeddings = isTruthyEnvValue(process.env.OPENCLAW_DEBUG_MEMORY_EMBEDDINGS);
const log = createSubsystemLogger("memory/embeddings");

View File

@@ -1,9 +1,9 @@
import { afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
import * as authModule from "../agents/model-auth.js";
import * as authModule from "../../agents/model-auth.js";
import { mockPublicPinnedHostname } from "./test-helpers/ssrf.js";
vi.mock("../agents/model-auth.js", async () => {
const { createModelAuthMockModule } = await import("../test-utils/model-auth-mock.js");
vi.mock("../../agents/model-auth.js", async () => {
const { createModelAuthMockModule } = await import("../../test-utils/model-auth-mock.js");
return createModelAuthMockModule();
});

View File

@@ -1,14 +1,14 @@
import {
collectProviderApiKeysForExecution,
executeWithApiKeyRotation,
} from "../agents/api-key-rotation.js";
import { requireApiKey, resolveApiKeyForProvider } from "../agents/model-auth.js";
import { parseGeminiAuth } from "../infra/gemini-auth.js";
} from "../../agents/api-key-rotation.js";
import { requireApiKey, resolveApiKeyForProvider } from "../../agents/model-auth.js";
import { parseGeminiAuth } from "../../infra/gemini-auth.js";
import {
DEFAULT_GOOGLE_API_BASE_URL,
normalizeGoogleApiBaseUrl,
} from "../infra/google-api-base-url.js";
import type { SsrFPolicy } from "../infra/net/ssrf.js";
} from "../../infra/google-api-base-url.js";
import type { SsrFPolicy } from "../../infra/net/ssrf.js";
import type { EmbeddingInput } from "./embedding-inputs.js";
import { sanitizeAndNormalizeEmbedding } from "./embedding-vectors.js";
import { debugEmbeddingsLog } from "./embeddings-debug.js";

View File

@@ -1,4 +1,4 @@
import type { SsrFPolicy } from "../infra/net/ssrf.js";
import type { SsrFPolicy } from "../../infra/net/ssrf.js";
import { normalizeEmbeddingModelWithPrefixes } from "./embeddings-model-normalize.js";
import {
createRemoteEmbeddingProvider,

Some files were not shown because too many files have changed in this diff Show More