perf(test): reduce import and fixture overhead in hot tests

This commit is contained in:
Peter Steinberger
2026-02-14 02:42:12 +00:00
parent 2583de5305
commit dd08ca97bb
4 changed files with 77 additions and 176 deletions

View File

@@ -236,110 +236,6 @@ describe("memory index", () => {
expect(results[0]?.path).toContain("memory/2026-01-12.md");
});
it("hybrid weights shift ranking between vector and keyword matches", async () => {
const manyAlpha = Array.from({ length: 50 }, () => "Alpha").join(" ");
await fs.writeFile(
path.join(workspaceDir, "memory", "vector-only.md"),
"Alpha beta. Alpha beta. Alpha beta. Alpha beta.",
);
await fs.writeFile(
path.join(workspaceDir, "memory", "keyword-only.md"),
`${manyAlpha} beta id123.`,
);
const vectorWeightedCfg = {
agents: {
defaults: {
workspace: workspaceDir,
memorySearch: {
provider: "openai",
model: "mock-embed",
store: { path: indexPath, vector: { enabled: false } },
sync: { watch: false, onSessionStart: false, onSearch: true },
query: {
minScore: 0,
maxResults: 200,
hybrid: {
enabled: true,
vectorWeight: 0.99,
textWeight: 0.01,
candidateMultiplier: 10,
},
},
},
},
list: [{ id: "main", default: true }],
},
};
const vectorWeighted = await getMemorySearchManager({
cfg: vectorWeightedCfg,
agentId: "main",
});
expect(vectorWeighted.manager).not.toBeNull();
if (!vectorWeighted.manager) {
throw new Error("manager missing");
}
manager = vectorWeighted.manager;
const status = manager.status();
if (!status.fts?.available) {
return;
}
await manager.sync({ force: true });
const vectorResults = await manager.search("alpha beta id123");
expect(vectorResults.length).toBeGreaterThan(0);
const vectorPaths = vectorResults.map((r) => r.path);
expect(vectorPaths).toContain("memory/vector-only.md");
expect(vectorPaths).toContain("memory/keyword-only.md");
const vectorOnly = vectorResults.find((r) => r.path === "memory/vector-only.md");
const keywordOnly = vectorResults.find((r) => r.path === "memory/keyword-only.md");
expect((vectorOnly?.score ?? 0) > (keywordOnly?.score ?? 0)).toBe(true);
await manager.close();
manager = null;
const textWeightedCfg = {
agents: {
defaults: {
workspace: workspaceDir,
memorySearch: {
provider: "openai",
model: "mock-embed",
store: { path: indexPath, vector: { enabled: false } },
sync: { watch: false, onSessionStart: false, onSearch: false },
query: {
minScore: 0,
maxResults: 200,
hybrid: {
enabled: true,
vectorWeight: 0.01,
textWeight: 0.99,
candidateMultiplier: 10,
},
},
},
},
list: [{ id: "main", default: true }],
},
};
const textWeighted = await getMemorySearchManager({ cfg: textWeightedCfg, agentId: "main" });
expect(textWeighted.manager).not.toBeNull();
if (!textWeighted.manager) {
throw new Error("manager missing");
}
manager = textWeighted.manager;
const keywordResults = await manager.search("alpha beta id123");
expect(keywordResults.length).toBeGreaterThan(0);
const keywordPaths = keywordResults.map((r) => r.path);
expect(keywordPaths).toContain("memory/vector-only.md");
expect(keywordPaths).toContain("memory/keyword-only.md");
const vectorOnlyAfter = keywordResults.find((r) => r.path === "memory/vector-only.md");
const keywordOnlyAfter = keywordResults.find((r) => r.path === "memory/keyword-only.md");
expect((keywordOnlyAfter?.score ?? 0) > (vectorOnlyAfter?.score ?? 0)).toBe(true);
});
it("reports vector availability after probe", async () => {
const cfg = {
agents: {