perf: extract memory session sync state helpers

This commit is contained in:
Peter Steinberger
2026-04-07 00:13:23 +01:00
parent a690eafdf7
commit d2a03eca1a
4 changed files with 104 additions and 104 deletions

View File

@@ -391,100 +391,6 @@ describe("memory index", () => {
await manager.close?.();
});
it("targets explicit session files during post-compaction sync", async () => {
const stateDir = path.join(fixtureRoot, `state-targeted-${randomUUID()}`);
const sessionDir = path.join(stateDir, "agents", "main", "sessions");
const firstSessionPath = path.join(sessionDir, "targeted-first.jsonl");
const secondSessionPath = path.join(sessionDir, "targeted-second.jsonl");
const storePath = path.join(workspaceDir, `index-targeted-${randomUUID()}.sqlite`);
const previousStateDir = process.env.OPENCLAW_STATE_DIR;
process.env.OPENCLAW_STATE_DIR = stateDir;
await fs.mkdir(sessionDir, { recursive: true });
await fs.writeFile(
firstSessionPath,
`${JSON.stringify({
type: "message",
message: { role: "user", content: [{ type: "text", text: "first transcript v1" }] },
})}\n`,
);
await fs.writeFile(
secondSessionPath,
`${JSON.stringify({
type: "message",
message: { role: "user", content: [{ type: "text", text: "second transcript v1" }] },
})}\n`,
);
try {
const result = await getMemorySearchManager({
cfg: createCfg({
storePath,
sources: ["sessions"],
sessionMemory: true,
}),
agentId: "main",
});
const manager = requireManager(result);
await manager.sync?.({ reason: "test" });
const db = (
manager as unknown as {
db: {
prepare: (sql: string) => {
get: (path: string, source: string) => { hash: string } | undefined;
all?: (...args: unknown[]) => unknown;
};
};
}
).db;
const getSessionHash = (sessionPath: string) =>
db
.prepare(`SELECT hash FROM files WHERE path = ? AND source = ?`)
.get(sessionPath, "sessions")?.hash;
const firstOriginalHash = getSessionHash("sessions/targeted-first.jsonl");
const secondOriginalHash = getSessionHash("sessions/targeted-second.jsonl");
await fs.writeFile(
firstSessionPath,
`${JSON.stringify({
type: "message",
message: {
role: "user",
content: [{ type: "text", text: "first transcript v2 after compaction" }],
},
})}\n`,
);
await fs.writeFile(
secondSessionPath,
`${JSON.stringify({
type: "message",
message: {
role: "user",
content: [{ type: "text", text: "second transcript v2 should stay untouched" }],
},
})}\n`,
);
await manager.sync?.({
reason: "post-compaction",
sessionFiles: [firstSessionPath],
});
expect(getSessionHash("sessions/targeted-first.jsonl")).not.toBe(firstOriginalHash);
expect(getSessionHash("sessions/targeted-second.jsonl")).toBe(secondOriginalHash);
await manager.close?.();
} finally {
if (previousStateDir === undefined) {
delete process.env.OPENCLAW_STATE_DIR;
} else {
process.env.OPENCLAW_STATE_DIR = previousStateDir;
}
await fs.rm(stateDir, { recursive: true, force: true });
}
});
it.skip("finds keyword matches via hybrid search when query embedding is zero", async () => {
await expectHybridKeywordSearchFindsMemory(
createCfg({

View File

@@ -0,0 +1,64 @@
import { describe, expect, it } from "vitest";
import { resolveMemorySessionSyncPlan } from "./manager-session-sync-state.js";
describe("memory session sync state", () => {
it("tracks active paths and bulk hashes for full scans", () => {
const plan = resolveMemorySessionSyncPlan({
needsFullReindex: false,
files: ["/tmp/a.jsonl", "/tmp/b.jsonl"],
targetSessionFiles: null,
sessionsDirtyFiles: new Set(),
existingRows: [
{ path: "sessions/a.jsonl", hash: "hash-a" },
{ path: "sessions/b.jsonl", hash: "hash-b" },
],
sessionPathForFile: (file) => `sessions/${file.split("/").at(-1)}`,
});
expect(plan.indexAll).toBe(true);
expect(plan.activePaths).toEqual(new Set(["sessions/a.jsonl", "sessions/b.jsonl"]));
expect(plan.existingRows).toEqual([
{ path: "sessions/a.jsonl", hash: "hash-a" },
{ path: "sessions/b.jsonl", hash: "hash-b" },
]);
expect(plan.existingHashes).toEqual(
new Map([
["sessions/a.jsonl", "hash-a"],
["sessions/b.jsonl", "hash-b"],
]),
);
});
it("treats targeted session syncs as refresh-only and skips unrelated pruning", () => {
const plan = resolveMemorySessionSyncPlan({
needsFullReindex: false,
files: ["/tmp/targeted-first.jsonl"],
targetSessionFiles: new Set(["/tmp/targeted-first.jsonl"]),
sessionsDirtyFiles: new Set(["/tmp/targeted-first.jsonl"]),
existingRows: [
{ path: "sessions/targeted-first.jsonl", hash: "hash-first" },
{ path: "sessions/targeted-second.jsonl", hash: "hash-second" },
],
sessionPathForFile: (file) => `sessions/${file.split("/").at(-1)}`,
});
expect(plan.indexAll).toBe(true);
expect(plan.activePaths).toBeNull();
expect(plan.existingRows).toBeNull();
expect(plan.existingHashes).toBeNull();
});
it("keeps dirty-only incremental mode when no targeted sync is requested", () => {
const plan = resolveMemorySessionSyncPlan({
needsFullReindex: false,
files: ["/tmp/incremental.jsonl"],
targetSessionFiles: null,
sessionsDirtyFiles: new Set(["/tmp/incremental.jsonl"]),
existingRows: [],
sessionPathForFile: (file) => `sessions/${file.split("/").at(-1)}`,
});
expect(plan.indexAll).toBe(false);
expect(plan.activePaths).toEqual(new Set(["sessions/incremental.jsonl"]));
});
});

View File

@@ -0,0 +1,29 @@
import { type MemorySourceFileStateRow } from "./manager-source-state.js";
export function resolveMemorySessionSyncPlan(params: {
needsFullReindex: boolean;
files: string[];
targetSessionFiles: Set<string> | null;
sessionsDirtyFiles: Set<string>;
existingRows?: MemorySourceFileStateRow[] | null;
sessionPathForFile: (file: string) => string;
}): {
activePaths: Set<string> | null;
existingRows: MemorySourceFileStateRow[] | null;
existingHashes: Map<string, string> | null;
indexAll: boolean;
} {
const activePaths = params.targetSessionFiles
? null
: new Set(params.files.map((file) => params.sessionPathForFile(file)));
const existingRows = activePaths === null ? null : (params.existingRows ?? []);
return {
activePaths,
existingRows,
existingHashes: existingRows ? new Map(existingRows.map((row) => [row.path, row.hash])) : null,
indexAll:
params.needsFullReindex ||
Boolean(params.targetSessionFiles) ||
params.sessionsDirtyFiles.size === 0,
};
}

View File

@@ -55,6 +55,7 @@ import {
type MemoryIndexMeta,
} from "./manager-reindex-state.js";
import { shouldSyncSessionsForReindex } from "./manager-session-reindex.js";
import { resolveMemorySessionSyncPlan } from "./manager-session-sync-state.js";
import {
loadMemorySourceFileState,
resolveMemorySourceExistingHash,
@@ -753,20 +754,20 @@ export abstract class MemoryManagerSyncOps {
const files = targetSessionFiles
? Array.from(targetSessionFiles)
: await listSessionFilesForAgent(this.agentId);
const activePaths = targetSessionFiles
? null
: new Set(files.map((file) => sessionPathForFile(file)));
const existingRows =
activePaths === null
const sessionPlan = resolveMemorySessionSyncPlan({
needsFullReindex: params.needsFullReindex,
files,
targetSessionFiles,
sessionsDirtyFiles: this.sessionsDirtyFiles,
existingRows: targetSessionFiles
? null
: loadMemorySourceFileState({
db: this.db,
source: "sessions",
}).rows;
const existingHashes =
existingRows === null ? null : new Map(existingRows.map((row) => [row.path, row.hash]));
const indexAll =
params.needsFullReindex || Boolean(targetSessionFiles) || this.sessionsDirtyFiles.size === 0;
}).rows,
sessionPathForFile,
});
const { activePaths, existingRows, existingHashes, indexAll } = sessionPlan;
log.debug("memory sync: indexing session files", {
files: files.length,
indexAll,