feat(memory-wiki): add claim health reports

This commit is contained in:
Vincent Koc
2026-04-07 08:10:33 +01:00
parent 947a43dae3
commit 44fd8b0d6e
8 changed files with 873 additions and 102 deletions

View File

@@ -20,6 +20,7 @@ Docs: https://docs.openclaw.ai
- Memory/dreaming: ingest redacted session transcripts into the dreaming corpus with per-day session-corpus notes, cursor checkpointing, and promotion/doctor support. (#62227) Thanks @vignesh07.
- Plugins/memory: add a public memory-artifact export seam to the unified memory capability so companion plugins like `memory-wiki` can bridge the active memory plugin without reaching into `memory-core` internals. Thanks @vincentkoc.
- Memory/wiki: add structured claim/evidence fields plus compiled agent digest artifacts so `memory-wiki` behaves more like a persistent knowledge layer and less like markdown-only page storage. Thanks @vincentkoc.
- Memory/wiki: add claim-health linting, contradiction clustering, staleness-aware dashboards, and freshness-weighted wiki search so `memory-wiki` can act more like a maintained belief layer than a passive markdown dump. Thanks @vincentkoc.
### Fixes

View File

@@ -0,0 +1,240 @@
import type { WikiClaim, WikiPageSummary } from "./markdown.js";
const DAY_MS = 24 * 60 * 60 * 1000;
export const WIKI_AGING_DAYS = 30;
export const WIKI_STALE_DAYS = 90;
const CONTESTED_CLAIM_STATUSES = new Set(["contested", "contradicted", "refuted", "superseded"]);
export type WikiFreshnessLevel = "fresh" | "aging" | "stale" | "unknown";
export type WikiFreshness = {
level: WikiFreshnessLevel;
reason: string;
daysSinceTouch?: number;
lastTouchedAt?: string;
};
export type WikiClaimHealth = {
key: string;
pagePath: string;
pageTitle: string;
pageId?: string;
claimId?: string;
text: string;
status: string;
confidence?: number;
evidenceCount: number;
missingEvidence: boolean;
freshness: WikiFreshness;
};
export type WikiClaimContradictionCluster = {
key: string;
label: string;
entries: WikiClaimHealth[];
};
export type WikiPageContradictionCluster = {
key: string;
label: string;
entries: Array<{
pagePath: string;
pageTitle: string;
pageId?: string;
note: string;
}>;
};
function parseTimestamp(value?: string): number | null {
if (!value?.trim()) {
return null;
}
const parsed = Date.parse(value);
return Number.isFinite(parsed) ? parsed : null;
}
function clampDaysSinceTouch(daysSinceTouch: number): number {
return Math.max(0, daysSinceTouch);
}
function normalizeClaimTextKey(text: string): string {
return text.trim().replace(/\s+/g, " ").toLowerCase();
}
function normalizeTextKey(text: string): string {
return text
.trim()
.toLowerCase()
.replace(/[^a-z0-9]+/g, " ")
.replace(/\s+/g, " ");
}
function buildFreshnessFromTimestamp(params: { timestamp?: string; now?: Date }): WikiFreshness {
const now = params.now ?? new Date();
const timestampMs = parseTimestamp(params.timestamp);
if (timestampMs === null || !params.timestamp) {
return {
level: "unknown",
reason: "missing updatedAt",
};
}
const daysSinceTouch = clampDaysSinceTouch(Math.floor((now.getTime() - timestampMs) / DAY_MS));
if (daysSinceTouch >= WIKI_STALE_DAYS) {
return {
level: "stale",
reason: `last touched ${params.timestamp}`,
daysSinceTouch,
lastTouchedAt: params.timestamp,
};
}
if (daysSinceTouch >= WIKI_AGING_DAYS) {
return {
level: "aging",
reason: `last touched ${params.timestamp}`,
daysSinceTouch,
lastTouchedAt: params.timestamp,
};
}
return {
level: "fresh",
reason: `last touched ${params.timestamp}`,
daysSinceTouch,
lastTouchedAt: params.timestamp,
};
}
function resolveLatestTimestamp(candidates: Array<string | undefined>): string | undefined {
let bestValue: string | undefined;
let bestMs = -1;
for (const candidate of candidates) {
const parsed = parseTimestamp(candidate);
if (parsed === null || !candidate || parsed <= bestMs) {
continue;
}
bestMs = parsed;
bestValue = candidate;
}
return bestValue;
}
export function normalizeClaimStatus(status?: string): string {
return status?.trim().toLowerCase() || "supported";
}
export function isClaimContestedStatus(status?: string): boolean {
return CONTESTED_CLAIM_STATUSES.has(normalizeClaimStatus(status));
}
export function assessPageFreshness(page: WikiPageSummary, now?: Date): WikiFreshness {
return buildFreshnessFromTimestamp({ timestamp: page.updatedAt, now });
}
export function assessClaimFreshness(params: {
page: WikiPageSummary;
claim: WikiClaim;
now?: Date;
}): WikiFreshness {
const latestTimestamp = resolveLatestTimestamp([
params.claim.updatedAt,
params.page.updatedAt,
...params.claim.evidence.map((evidence) => evidence.updatedAt),
]);
return buildFreshnessFromTimestamp({ timestamp: latestTimestamp, now: params.now });
}
export function buildWikiClaimHealth(params: {
page: WikiPageSummary;
claim: WikiClaim;
index: number;
now?: Date;
}): WikiClaimHealth {
const claimId = params.claim.id?.trim();
return {
key: `${params.page.relativePath}#${claimId ?? `claim-${params.index + 1}`}`,
pagePath: params.page.relativePath,
pageTitle: params.page.title,
...(params.page.id ? { pageId: params.page.id } : {}),
...(claimId ? { claimId } : {}),
text: params.claim.text,
status: normalizeClaimStatus(params.claim.status),
...(typeof params.claim.confidence === "number" ? { confidence: params.claim.confidence } : {}),
evidenceCount: params.claim.evidence.length,
missingEvidence: params.claim.evidence.length === 0,
freshness: assessClaimFreshness({ page: params.page, claim: params.claim, now: params.now }),
};
}
export function collectWikiClaimHealth(pages: WikiPageSummary[], now?: Date): WikiClaimHealth[] {
return pages.flatMap((page) =>
page.claims.map((claim, index) => buildWikiClaimHealth({ page, claim, index, now })),
);
}
export function buildClaimContradictionClusters(params: {
pages: WikiPageSummary[];
now?: Date;
}): WikiClaimContradictionCluster[] {
const claimHealth = collectWikiClaimHealth(params.pages, params.now);
const byId = new Map<string, WikiClaimHealth[]>();
for (const claim of claimHealth) {
if (!claim.claimId) {
continue;
}
const current = byId.get(claim.claimId) ?? [];
current.push(claim);
byId.set(claim.claimId, current);
}
return [...byId.entries()]
.flatMap(([claimId, entries]) => {
if (entries.length < 2) {
return [];
}
const distinctTexts = new Set(entries.map((entry) => normalizeClaimTextKey(entry.text)));
const distinctStatuses = new Set(entries.map((entry) => entry.status));
if (distinctTexts.size < 2 && distinctStatuses.size < 2) {
return [];
}
return [
{
key: claimId,
label: claimId,
entries: [...entries].toSorted((left, right) =>
left.pagePath.localeCompare(right.pagePath),
),
},
];
})
.toSorted((left, right) => left.label.localeCompare(right.label));
}
export function buildPageContradictionClusters(
pages: WikiPageSummary[],
): WikiPageContradictionCluster[] {
const byNote = new Map<string, WikiPageContradictionCluster["entries"]>();
for (const page of pages) {
for (const note of page.contradictions) {
const key = normalizeTextKey(note);
if (!key) {
continue;
}
const current = byNote.get(key) ?? [];
current.push({
pagePath: page.relativePath,
pageTitle: page.title,
...(page.id ? { pageId: page.id } : {}),
note,
});
byNote.set(key, current);
}
}
return [...byNote.entries()]
.map(([key, entries]) => ({
key,
label: entries[0]?.note ?? key,
entries: [...entries].toSorted((left, right) => left.pagePath.localeCompare(right.pagePath)),
}))
.toSorted((left, right) => left.label.localeCompare(right.label));
}

View File

@@ -187,11 +187,49 @@ describe("compileMemoryWikiVault", () => {
questions: ["What changed after launch?"],
contradictions: ["Conflicts with source.beta"],
confidence: 0.3,
claims: [
{
id: "claim.alpha.db",
text: "Alpha uses PostgreSQL for production writes.",
status: "supported",
confidence: 0.4,
evidence: [],
},
],
},
body: "# Alpha\n",
}),
"utf8",
);
await fs.writeFile(
path.join(rootDir, "concepts", "alpha-db.md"),
renderWikiMarkdown({
frontmatter: {
pageType: "concept",
id: "concept.alpha.db",
title: "Alpha DB",
sourceIds: ["source.alpha"],
updatedAt: "2025-10-01T00:00:00.000Z",
claims: [
{
id: "claim.alpha.db",
text: "Alpha uses MySQL for production writes.",
status: "contested",
confidence: 0.62,
evidence: [
{
sourceId: "source.alpha",
lines: "9-11",
updatedAt: "2025-10-01T00:00:00.000Z",
},
],
},
],
},
body: "# Alpha DB\n",
}),
"utf8",
);
await fs.writeFile(
path.join(rootDir, "sources", "alpha.md"),
renderWikiMarkdown({
@@ -208,19 +246,42 @@ describe("compileMemoryWikiVault", () => {
const result = await compileMemoryWikiVault(config);
expect(result.pageCounts.report).toBeGreaterThanOrEqual(4);
expect(result.pageCounts.report).toBeGreaterThanOrEqual(5);
await expect(
fs.readFile(path.join(rootDir, "reports", "open-questions.md"), "utf8"),
).resolves.toContain("[Alpha](entities/alpha.md): What changed after launch?");
await expect(
fs.readFile(path.join(rootDir, "reports", "contradictions.md"), "utf8"),
).resolves.toContain("[Alpha](entities/alpha.md): Conflicts with source.beta");
).resolves.toContain("Conflicts with source.beta: [Alpha](entities/alpha.md)");
await expect(
fs.readFile(path.join(rootDir, "reports", "contradictions.md"), "utf8"),
).resolves.toContain("`claim.alpha.db`");
await expect(
fs.readFile(path.join(rootDir, "reports", "low-confidence.md"), "utf8"),
).resolves.toContain("[Alpha](entities/alpha.md): confidence 0.30");
await expect(
fs.readFile(path.join(rootDir, "reports", "low-confidence.md"), "utf8"),
).resolves.toContain("Alpha uses PostgreSQL for production writes.");
await expect(
fs.readFile(path.join(rootDir, "reports", "claim-health.md"), "utf8"),
).resolves.toContain("Missing Evidence");
await expect(
fs.readFile(path.join(rootDir, "reports", "claim-health.md"), "utf8"),
).resolves.toContain("Alpha uses PostgreSQL for production writes.");
await expect(
fs.readFile(path.join(rootDir, "reports", "stale-pages.md"), "utf8"),
).resolves.toContain("[Alpha](entities/alpha.md): missing updatedAt");
const agentDigest = JSON.parse(
await fs.readFile(path.join(rootDir, ".openclaw-wiki", "cache", "agent-digest.json"), "utf8"),
) as {
claimHealth: { missingEvidence: number; freshness: { unknown: number } };
contradictionClusters: Array<{ key: string }>;
};
expect(agentDigest.claimHealth.missingEvidence).toBeGreaterThanOrEqual(1);
expect(agentDigest.claimHealth.freshness.unknown).toBeGreaterThanOrEqual(1);
expect(agentDigest.contradictionClusters).toContainEqual(
expect.objectContaining({ key: "claim.alpha.db" }),
);
});
it("skips dashboard report pages when createDashboards is disabled", async () => {

View File

@@ -4,6 +4,21 @@ import {
replaceManagedMarkdownBlock,
withTrailingNewline,
} from "openclaw/plugin-sdk/memory-host-markdown";
import {
assessClaimFreshness,
assessPageFreshness,
buildClaimContradictionClusters,
buildPageContradictionClusters,
collectWikiClaimHealth,
isClaimContestedStatus,
normalizeClaimStatus,
WIKI_AGING_DAYS,
type WikiClaimContradictionCluster,
type WikiClaimHealth,
type WikiFreshness,
type WikiFreshnessLevel,
type WikiPageContradictionCluster,
} from "./claim-health.js";
import type { ResolvedMemoryWikiConfig } from "./config.js";
import { appendMemoryWikiLog } from "./log.js";
import {
@@ -26,7 +41,6 @@ const COMPILE_PAGE_GROUPS: Array<{ kind: WikiPageKind; dir: string; heading: str
{ kind: "synthesis", dir: "syntheses", heading: "Syntheses" },
{ kind: "report", dir: "reports", heading: "Reports" },
];
const DASHBOARD_STALE_PAGE_DAYS = 30;
const AGENT_DIGEST_PATH = ".openclaw-wiki/cache/agent-digest.json";
const CLAIMS_DIGEST_PATH = ".openclaw-wiki/cache/claims.jsonl";
@@ -69,48 +83,108 @@ const DASHBOARD_PAGES: DashboardPageDefinition[] = [
id: "report.contradictions",
title: "Contradictions",
relativePath: "reports/contradictions.md",
buildBody: ({ config, pages }) => {
const matches = pages.filter((page) => page.contradictions.length > 0);
if (matches.length === 0) {
buildBody: ({ config, pages, now }) => {
const pageClusters = buildPageContradictionClusters(pages);
const claimClusters = buildClaimContradictionClusters({ pages, now });
if (pageClusters.length === 0 && claimClusters.length === 0) {
return "- No contradictions flagged right now.";
}
return [
`- Pages with contradictions: ${matches.length}`,
"",
...matches.map(
(page) =>
`- ${formatWikiLink({
renderMode: config.vault.renderMode,
relativePath: page.relativePath,
title: page.title,
})}: ${page.contradictions.join(" | ")}`,
),
].join("\n");
const lines = [
`- Contradiction note clusters: ${pageClusters.length}`,
`- Competing claim clusters: ${claimClusters.length}`,
];
if (pageClusters.length > 0) {
lines.push("", "### Page Notes");
for (const cluster of pageClusters) {
lines.push(formatPageContradictionClusterLine(config, cluster));
}
}
if (claimClusters.length > 0) {
lines.push("", "### Claim Clusters");
for (const cluster of claimClusters) {
lines.push(formatClaimContradictionClusterLine(config, cluster));
}
}
return lines.join("\n");
},
},
{
id: "report.low-confidence",
title: "Low Confidence",
relativePath: "reports/low-confidence.md",
buildBody: ({ config, pages }) => {
const matches = pages
buildBody: ({ config, pages, now }) => {
const pageMatches = pages
.filter((page) => typeof page.confidence === "number" && page.confidence < 0.5)
.toSorted((left, right) => (left.confidence ?? 1) - (right.confidence ?? 1));
if (matches.length === 0) {
return "- No low-confidence pages right now.";
const claimMatches = collectWikiClaimHealth(pages, now)
.filter((claim) => typeof claim.confidence === "number" && claim.confidence < 0.5)
.toSorted((left, right) => (left.confidence ?? 1) - (right.confidence ?? 1));
if (pageMatches.length === 0 && claimMatches.length === 0) {
return "- No low-confidence pages or claims right now.";
}
return [
`- Low-confidence pages: ${matches.length}`,
"",
...matches.map(
(page) =>
`- ${formatWikiLink({
renderMode: config.vault.renderMode,
relativePath: page.relativePath,
title: page.title,
})}: confidence ${(page.confidence ?? 0).toFixed(2)}`,
),
].join("\n");
const lines = [
`- Low-confidence pages: ${pageMatches.length}`,
`- Low-confidence claims: ${claimMatches.length}`,
];
if (pageMatches.length > 0) {
lines.push("", "### Pages");
for (const page of pageMatches) {
lines.push(
`- ${formatPageLink(config, page)}: confidence ${(page.confidence ?? 0).toFixed(2)}`,
);
}
}
if (claimMatches.length > 0) {
lines.push("", "### Claims");
for (const claim of claimMatches) {
lines.push(`- ${formatClaimHealthLine(config, claim)}`);
}
}
return lines.join("\n");
},
},
{
id: "report.claim-health",
title: "Claim Health",
relativePath: "reports/claim-health.md",
buildBody: ({ config, pages, now }) => {
const claimHealth = collectWikiClaimHealth(pages, now);
const missingEvidence = claimHealth.filter((claim) => claim.missingEvidence);
const contestedClaims = claimHealth.filter((claim) => isClaimHealthContested(claim));
const staleClaims = claimHealth.filter(
(claim) => claim.freshness.level === "stale" || claim.freshness.level === "unknown",
);
if (
missingEvidence.length === 0 &&
contestedClaims.length === 0 &&
staleClaims.length === 0
) {
return "- No claim health issues right now.";
}
const lines = [
`- Claims missing evidence: ${missingEvidence.length}`,
`- Contested claims: ${contestedClaims.length}`,
`- Stale or unknown claims: ${staleClaims.length}`,
];
if (missingEvidence.length > 0) {
lines.push("", "### Missing Evidence");
for (const claim of missingEvidence) {
lines.push(`- ${formatClaimHealthLine(config, claim)}`);
}
}
if (contestedClaims.length > 0) {
lines.push("", "### Contested Claims");
for (const claim of contestedClaims) {
lines.push(`- ${formatClaimHealthLine(config, claim)}`);
}
}
if (staleClaims.length > 0) {
lines.push("", "### Stale Claims");
for (const claim of staleClaims) {
lines.push(`- ${formatClaimHealthLine(config, claim)}`);
}
}
return lines.join("\n");
},
},
{
@@ -118,33 +192,25 @@ const DASHBOARD_PAGES: DashboardPageDefinition[] = [
title: "Stale Pages",
relativePath: "reports/stale-pages.md",
buildBody: ({ config, pages, now }) => {
const staleBeforeMs = now.getTime() - DASHBOARD_STALE_PAGE_DAYS * 24 * 60 * 60 * 1000;
const matches = pages
.filter((page) => page.kind !== "report")
.flatMap((page) => {
if (!page.updatedAt) {
return [{ page, reason: "missing updatedAt" }];
}
const updatedAtMs = Date.parse(page.updatedAt);
if (!Number.isFinite(updatedAtMs) || updatedAtMs > staleBeforeMs) {
const freshness = assessPageFreshness(page, now);
if (freshness.level === "fresh") {
return [];
}
return [{ page, reason: `updated ${page.updatedAt}` }];
return [{ page, freshness }];
})
.toSorted((left, right) => left.page.title.localeCompare(right.page.title));
if (matches.length === 0) {
return `- No stale pages older than ${DASHBOARD_STALE_PAGE_DAYS} days.`;
return `- No aging or stale pages older than ${WIKI_AGING_DAYS} days.`;
}
return [
`- Stale pages: ${matches.length}`,
"",
...matches.map(
({ page, reason }) =>
`- ${formatWikiLink({
renderMode: config.vault.renderMode,
relativePath: page.relativePath,
title: page.title,
})}: ${reason}`,
({ page, freshness }) =>
`- ${formatPageLink(config, page)}: ${formatFreshnessLabel(freshness)}`,
),
].join("\n");
},
@@ -203,6 +269,78 @@ function buildPageCounts(pages: WikiPageSummary[]): Record<WikiPageKind, number>
};
}
function formatPageLink(config: ResolvedMemoryWikiConfig, page: WikiPageSummary): string {
return formatWikiLink({
renderMode: config.vault.renderMode,
relativePath: page.relativePath,
title: page.title,
});
}
function formatFreshnessLabel(freshness: WikiFreshness): string {
switch (freshness.level) {
case "fresh":
return `fresh (${freshness.lastTouchedAt ?? "recent"})`;
case "aging":
return `aging (${freshness.lastTouchedAt ?? "unknown"})`;
case "stale":
return `stale (${freshness.lastTouchedAt ?? "unknown"})`;
case "unknown":
return freshness.reason;
}
}
function formatClaimIdentity(claim: WikiClaimHealth): string {
return claim.claimId ? `\`${claim.claimId}\`: ${claim.text}` : claim.text;
}
function isClaimHealthContested(claim: WikiClaimHealth): boolean {
return isClaimContestedStatus(claim.status);
}
function formatClaimHealthLine(config: ResolvedMemoryWikiConfig, claim: WikiClaimHealth): string {
const details = [
`status ${claim.status}`,
typeof claim.confidence === "number" ? `confidence ${claim.confidence.toFixed(2)}` : null,
claim.missingEvidence ? "missing evidence" : `${claim.evidenceCount} evidence`,
formatFreshnessLabel(claim.freshness),
].filter(Boolean);
return `${formatWikiLink({
renderMode: config.vault.renderMode,
relativePath: claim.pagePath,
title: claim.pageTitle,
})}: ${formatClaimIdentity(claim)} (${details.join(", ")})`;
}
function formatPageContradictionClusterLine(
config: ResolvedMemoryWikiConfig,
cluster: WikiPageContradictionCluster,
): string {
const pageRefs = cluster.entries.map((entry) =>
formatWikiLink({
renderMode: config.vault.renderMode,
relativePath: entry.pagePath,
title: entry.pageTitle,
}),
);
return `- ${cluster.label}: ${pageRefs.join(" | ")}`;
}
function formatClaimContradictionClusterLine(
config: ResolvedMemoryWikiConfig,
cluster: WikiClaimContradictionCluster,
): string {
const entries = cluster.entries.map(
(entry) =>
`${formatWikiLink({
renderMode: config.vault.renderMode,
relativePath: entry.pagePath,
title: entry.pageTitle,
})} -> ${formatClaimIdentity(entry)} (${entry.status}, ${formatFreshnessLabel(entry.freshness)})`,
);
return `- \`${cluster.label}\`: ${entries.join(" | ")}`;
}
function normalizeComparableTarget(value: string): string {
return value
.trim()
@@ -557,8 +695,10 @@ type AgentDigestClaim = {
status: string;
confidence?: number;
evidenceCount: number;
missingEvidence: boolean;
evidence: WikiClaim["evidence"];
updatedAt?: string;
freshnessLevel: WikiFreshnessLevel;
lastTouchedAt?: string;
};
type AgentDigestPage = {
@@ -570,64 +710,167 @@ type AgentDigestPage = {
questions: string[];
contradictions: string[];
confidence?: number;
updatedAt?: string;
freshnessLevel: WikiFreshnessLevel;
lastTouchedAt?: string;
claimCount: number;
topClaims: AgentDigestClaim[];
};
type AgentDigestClaimHealthSummary = {
freshness: Record<WikiFreshnessLevel, number>;
contested: number;
lowConfidence: number;
missingEvidence: number;
};
type AgentDigestContradictionCluster = {
key: string;
label: string;
kind: "claim-id" | "page-note";
entryCount: number;
paths: string[];
};
type AgentDigest = {
pageCounts: Record<WikiPageKind, number>;
claimCount: number;
claimHealth: AgentDigestClaimHealthSummary;
contradictionClusters: AgentDigestContradictionCluster[];
pages: AgentDigestPage[];
};
function normalizeClaimStatus(claim: WikiClaim): string {
return claim.status?.trim() || "supported";
function createFreshnessSummary(): Record<WikiFreshnessLevel, number> {
return {
fresh: 0,
aging: 0,
stale: 0,
unknown: 0,
};
}
function sortClaims(claims: WikiClaim[]): WikiClaim[] {
return [...claims].toSorted((left, right) => {
function rankFreshnessLevel(level: WikiFreshnessLevel): number {
switch (level) {
case "fresh":
return 3;
case "aging":
return 2;
case "stale":
return 1;
case "unknown":
return 0;
}
}
function sortClaims(page: WikiPageSummary): WikiClaim[] {
return [...page.claims].toSorted((left, right) => {
const leftConfidence = left.confidence ?? -1;
const rightConfidence = right.confidence ?? -1;
if (leftConfidence !== rightConfidence) {
return rightConfidence - leftConfidence;
}
const leftFreshness = rankFreshnessLevel(assessClaimFreshness({ page, claim: left }).level);
const rightFreshness = rankFreshnessLevel(assessClaimFreshness({ page, claim: right }).level);
if (leftFreshness !== rightFreshness) {
return rightFreshness - leftFreshness;
}
return left.text.localeCompare(right.text);
});
}
function buildAgentDigestClaimHealthSummary(
pages: WikiPageSummary[],
): AgentDigestClaimHealthSummary {
const freshness = createFreshnessSummary();
let contested = 0;
let lowConfidence = 0;
let missingEvidence = 0;
for (const claim of collectWikiClaimHealth(pages)) {
freshness[claim.freshness.level] += 1;
if (isClaimHealthContested(claim)) {
contested += 1;
}
if (typeof claim.confidence === "number" && claim.confidence < 0.5) {
lowConfidence += 1;
}
if (claim.missingEvidence) {
missingEvidence += 1;
}
}
return {
freshness,
contested,
lowConfidence,
missingEvidence,
};
}
function buildAgentDigestContradictionClusters(
pages: WikiPageSummary[],
): AgentDigestContradictionCluster[] {
const pageClusters = buildPageContradictionClusters(pages).map((cluster) => ({
key: cluster.key,
label: cluster.label,
kind: "page-note" as const,
entryCount: cluster.entries.length,
paths: [...new Set(cluster.entries.map((entry) => entry.pagePath))].toSorted(),
}));
const claimClusters = buildClaimContradictionClusters({ pages }).map((cluster) => ({
key: cluster.key,
label: cluster.label,
kind: "claim-id" as const,
entryCount: cluster.entries.length,
paths: [...new Set(cluster.entries.map((entry) => entry.pagePath))].toSorted(),
}));
return [...pageClusters, ...claimClusters].toSorted((left, right) =>
left.label.localeCompare(right.label),
);
}
function buildAgentDigest(params: {
pages: WikiPageSummary[];
pageCounts: Record<WikiPageKind, number>;
}): AgentDigest {
const pages = [...params.pages]
.toSorted((left, right) => left.relativePath.localeCompare(right.relativePath))
.map((page) => ({
...(page.id ? { id: page.id } : {}),
title: page.title,
kind: page.kind,
path: page.relativePath,
sourceIds: [...page.sourceIds],
questions: [...page.questions],
contradictions: [...page.contradictions],
...(typeof page.confidence === "number" ? { confidence: page.confidence } : {}),
...(page.updatedAt ? { updatedAt: page.updatedAt } : {}),
claimCount: page.claims.length,
topClaims: sortClaims(page.claims)
.slice(0, 5)
.map((claim) => ({
...(claim.id ? { id: claim.id } : {}),
text: claim.text,
status: normalizeClaimStatus(claim),
...(typeof claim.confidence === "number" ? { confidence: claim.confidence } : {}),
evidenceCount: claim.evidence.length,
evidence: [...claim.evidence],
...(claim.updatedAt ? { updatedAt: claim.updatedAt } : {}),
})),
}));
.map((page) => {
const pageFreshness = assessPageFreshness(page);
return {
...(page.id ? { id: page.id } : {}),
title: page.title,
kind: page.kind,
path: page.relativePath,
sourceIds: [...page.sourceIds],
questions: [...page.questions],
contradictions: [...page.contradictions],
...(typeof page.confidence === "number" ? { confidence: page.confidence } : {}),
freshnessLevel: pageFreshness.level,
...(pageFreshness.lastTouchedAt ? { lastTouchedAt: pageFreshness.lastTouchedAt } : {}),
claimCount: page.claims.length,
topClaims: sortClaims(page)
.slice(0, 5)
.map((claim) => {
const freshness = assessClaimFreshness({ page, claim });
return {
...(claim.id ? { id: claim.id } : {}),
text: claim.text,
status: normalizeClaimStatus(claim.status),
...(typeof claim.confidence === "number" ? { confidence: claim.confidence } : {}),
evidenceCount: claim.evidence.length,
missingEvidence: claim.evidence.length === 0,
evidence: [...claim.evidence],
freshnessLevel: freshness.level,
...(freshness.lastTouchedAt ? { lastTouchedAt: freshness.lastTouchedAt } : {}),
};
}),
};
});
return {
pageCounts: params.pageCounts,
claimCount: params.pages.reduce((total, page) => total + page.claims.length, 0),
claimHealth: buildAgentDigestClaimHealthSummary(params.pages),
contradictionClusters: buildAgentDigestContradictionClusters(params.pages),
pages,
};
}
@@ -635,21 +878,25 @@ function buildAgentDigest(params: {
function buildClaimsDigestLines(params: { pages: WikiPageSummary[] }): string[] {
return params.pages
.flatMap((page) =>
sortClaims(page.claims).map((claim) =>
JSON.stringify({
sortClaims(page).map((claim) => {
const freshness = assessClaimFreshness({ page, claim });
return JSON.stringify({
...(claim.id ? { id: claim.id } : {}),
pageId: page.id,
pageTitle: page.title,
pageKind: page.kind,
pagePath: page.relativePath,
text: claim.text,
status: normalizeClaimStatus(claim),
status: normalizeClaimStatus(claim.status),
confidence: claim.confidence,
sourceIds: page.sourceIds,
evidenceCount: claim.evidence.length,
missingEvidence: claim.evidence.length === 0,
evidence: claim.evidence,
updatedAt: claim.updatedAt ?? page.updatedAt,
}),
),
freshnessLevel: freshness.level,
lastTouchedAt: freshness.lastTouchedAt,
});
}),
)
.toSorted((left, right) => left.localeCompare(right));
}

View File

@@ -16,7 +16,7 @@ describe("lintMemoryWikiVault", () => {
},
});
await Promise.all(
["entities", "concepts", "sources"].map((dir) =>
["entities", "concepts", "sources", "syntheses"].map((dir) =>
fs.mkdir(path.join(rootDir, dir), { recursive: true }),
),
);
@@ -29,6 +29,14 @@ describe("lintMemoryWikiVault", () => {
contradictions: ["Conflicts with source.beta"],
questions: ["Is Alpha still active?"],
confidence: 0.2,
claims: [
{
id: "claim.alpha.db",
text: "Alpha uses PostgreSQL for production writes.",
confidence: 0.2,
evidence: [],
},
],
},
body: "# Alpha\n\n[[missing-page]]\n",
});
@@ -47,6 +55,35 @@ describe("lintMemoryWikiVault", () => {
}),
"utf8",
);
await fs.writeFile(
path.join(rootDir, "syntheses", "alpha-db.md"),
renderWikiMarkdown({
frontmatter: {
pageType: "synthesis",
id: "synthesis.alpha.db",
title: "Alpha Database",
sourceIds: ["source.bridge.alpha"],
updatedAt: "2025-10-01T00:00:00.000Z",
claims: [
{
id: "claim.alpha.db",
text: "Alpha uses MySQL for production writes.",
status: "contested",
confidence: 0.7,
evidence: [
{
sourceId: "source.bridge.alpha",
lines: "1-3",
updatedAt: "2025-10-01T00:00:00.000Z",
},
],
},
],
},
body: "# Alpha Database\n",
}),
"utf8",
);
const result = await lintMemoryWikiVault(config);
@@ -56,15 +93,25 @@ describe("lintMemoryWikiVault", () => {
expect(result.issues.map((issue) => issue.code)).toContain("missing-import-provenance");
expect(result.issues.map((issue) => issue.code)).toContain("broken-wikilink");
expect(result.issues.map((issue) => issue.code)).toContain("contradiction-present");
expect(result.issues.map((issue) => issue.code)).toContain("claim-conflict");
expect(result.issues.map((issue) => issue.code)).toContain("open-question");
expect(result.issues.map((issue) => issue.code)).toContain("low-confidence");
expect(result.issuesByCategory.contradictions).toHaveLength(2);
expect(result.issuesByCategory["open-questions"]).toHaveLength(2);
expect(result.issues.map((issue) => issue.code)).toContain("claim-missing-evidence");
expect(result.issues.map((issue) => issue.code)).toContain("claim-low-confidence");
expect(result.issues.map((issue) => issue.code)).toContain("stale-page");
expect(result.issues.map((issue) => issue.code)).toContain("stale-claim");
expect(
result.issuesByCategory.contradictions.some((issue) => issue.code === "claim-conflict"),
).toBe(true);
expect(result.issuesByCategory["open-questions"].length).toBeGreaterThanOrEqual(2);
expect(
result.issuesByCategory.provenance.some(
(issue) => issue.code === "missing-import-provenance",
),
).toBe(true);
expect(
result.issuesByCategory.provenance.some((issue) => issue.code === "claim-missing-evidence"),
).toBe(true);
await expect(fs.readFile(result.reportPath, "utf8")).resolves.toContain("### Errors");
await expect(fs.readFile(result.reportPath, "utf8")).resolves.toContain("### Contradictions");
await expect(fs.readFile(result.reportPath, "utf8")).resolves.toContain("### Open Questions");

View File

@@ -4,6 +4,11 @@ import {
replaceManagedMarkdownBlock,
withTrailingNewline,
} from "openclaw/plugin-sdk/memory-host-markdown";
import {
assessPageFreshness,
buildClaimContradictionClusters,
collectWikiClaimHealth,
} from "./claim-health.js";
import { compileMemoryWikiVault } from "./compile.js";
import type { ResolvedMemoryWikiConfig } from "./config.js";
import { appendMemoryWikiLog } from "./log.js";
@@ -22,8 +27,13 @@ export type MemoryWikiLintIssue = {
| "missing-import-provenance"
| "broken-wikilink"
| "contradiction-present"
| "claim-conflict"
| "open-question"
| "low-confidence";
| "low-confidence"
| "claim-low-confidence"
| "claim-missing-evidence"
| "stale-page"
| "stale-claim";
path: string;
message: string;
};
@@ -68,6 +78,7 @@ function collectBrokenLinkIssues(pages: WikiPageSummary[]): MemoryWikiLintIssue[
function collectPageIssues(pages: WikiPageSummary[]): MemoryWikiLintIssue[] {
const issues: MemoryWikiLintIssue[] = [];
const pagesById = new Map<string, WikiPageSummary[]>();
const claimHealth = collectWikiClaimHealth(pages);
for (const page of pages) {
if (!page.id) {
@@ -179,6 +190,59 @@ function collectPageIssues(pages: WikiPageSummary[]): MemoryWikiLintIssue[] {
message: `Page confidence is low (${page.confidence.toFixed(2)}).`,
});
}
const freshness = assessPageFreshness(page);
if (page.kind !== "report" && (freshness.level === "stale" || freshness.level === "unknown")) {
issues.push({
severity: "warning",
category: "quality",
code: "stale-page",
path: page.relativePath,
message: `Page freshness needs review (${freshness.reason}).`,
});
}
}
for (const claim of claimHealth) {
if (claim.missingEvidence) {
issues.push({
severity: "warning",
category: "provenance",
code: "claim-missing-evidence",
path: claim.pagePath,
message: `Claim ${claim.claimId ? `\`${claim.claimId}\`` : `\`${claim.text}\``} is missing structured evidence.`,
});
}
if (typeof claim.confidence === "number" && claim.confidence < 0.5) {
issues.push({
severity: "warning",
category: "quality",
code: "claim-low-confidence",
path: claim.pagePath,
message: `Claim ${claim.claimId ? `\`${claim.claimId}\`` : `\`${claim.text}\``} has low confidence (${claim.confidence.toFixed(2)}).`,
});
}
if (claim.freshness.level === "stale" || claim.freshness.level === "unknown") {
issues.push({
severity: "warning",
category: "quality",
code: "stale-claim",
path: claim.pagePath,
message: `Claim ${claim.claimId ? `\`${claim.claimId}\`` : `\`${claim.text}\``} freshness needs review (${claim.freshness.reason}).`,
});
}
}
for (const cluster of buildClaimContradictionClusters({ pages })) {
for (const entry of cluster.entries) {
issues.push({
severity: "warning",
category: "contradictions",
code: "claim-conflict",
path: entry.pagePath,
message: `Claim cluster \`${cluster.label}\` has competing variants across ${cluster.entries.length} pages.`,
});
}
}
for (const [id, matches] of pagesById.entries()) {

View File

@@ -152,6 +152,74 @@ describe("searchMemoryWiki", () => {
});
});
it("ranks fresh supported claims ahead of stale contested claims", async () => {
const { rootDir, config } = await createQueryVault({
initialize: true,
});
await fs.writeFile(
path.join(rootDir, "entities", "alpha-fresh.md"),
renderWikiMarkdown({
frontmatter: {
pageType: "entity",
id: "entity.alpha.fresh",
title: "Alpha Fresh",
updatedAt: "2026-04-01T00:00:00.000Z",
claims: [
{
id: "claim.alpha.db.fresh",
text: "Alpha uses PostgreSQL for production writes.",
status: "supported",
confidence: 0.91,
evidence: [
{
sourceId: "source.alpha",
lines: "4-7",
updatedAt: "2026-04-01T00:00:00.000Z",
},
],
},
],
},
body: "# Alpha Fresh\n\nsummary without the keyword\n",
}),
"utf8",
);
await fs.writeFile(
path.join(rootDir, "entities", "alpha-stale.md"),
renderWikiMarkdown({
frontmatter: {
pageType: "entity",
id: "entity.alpha.stale",
title: "Alpha Stale",
updatedAt: "2025-10-01T00:00:00.000Z",
claims: [
{
id: "claim.alpha.db.stale",
text: "Alpha uses PostgreSQL for production writes.",
status: "contested",
confidence: 0.92,
evidence: [
{
sourceId: "source.alpha.old",
lines: "1-2",
updatedAt: "2025-10-01T00:00:00.000Z",
},
],
},
],
},
body: "# Alpha Stale\n\nsummary without the keyword\n",
}),
"utf8",
);
const results = await searchMemoryWiki({ config, query: "postgresql" });
expect(results).toHaveLength(2);
expect(results[0]?.path).toBe("entities/alpha-fresh.md");
expect(results[1]?.path).toBe("entities/alpha-stale.md");
});
it("surfaces bridge provenance for imported source pages", async () => {
const { rootDir, config } = await createQueryVault({
initialize: true,

View File

@@ -4,8 +4,14 @@ import { resolveDefaultAgentId, resolveSessionAgentId } from "openclaw/plugin-sd
import type { MemorySearchResult } from "openclaw/plugin-sdk/memory-host-files";
import { getActiveMemorySearchManager } from "openclaw/plugin-sdk/memory-host-search";
import type { OpenClawConfig } from "../api.js";
import { assessClaimFreshness, isClaimContestedStatus } from "./claim-health.js";
import type { ResolvedMemoryWikiConfig, WikiSearchBackend, WikiSearchCorpus } from "./config.js";
import { parseWikiMarkdown, toWikiPageSummary, type WikiPageSummary } from "./markdown.js";
import {
parseWikiMarkdown,
toWikiPageSummary,
type WikiClaim,
type WikiPageSummary,
} from "./markdown.js";
import { initializeMemoryWikiVault } from "./vault.js";
const QUERY_DIRS = ["entities", "concepts", "sources", "syntheses", "reports"] as const;
@@ -114,14 +120,55 @@ function buildPageSearchText(page: QueryableWikiPage): string {
.join("\n");
}
function isClaimMatch(claim: WikiClaim, queryLower: string): boolean {
if (claim.text.toLowerCase().includes(queryLower)) {
return true;
}
return claim.id?.toLowerCase().includes(queryLower) ?? false;
}
function rankClaimMatch(page: QueryableWikiPage, claim: WikiClaim, queryLower: string): number {
let score = 0;
if (claim.text.toLowerCase().includes(queryLower)) {
score += 25;
}
if (claim.id?.toLowerCase().includes(queryLower)) {
score += 10;
}
if (typeof claim.confidence === "number") {
score += Math.round(claim.confidence * 10);
}
const freshness = assessClaimFreshness({ page, claim });
switch (freshness.level) {
case "fresh":
score += 8;
break;
case "aging":
score += 4;
break;
case "stale":
score -= 2;
break;
case "unknown":
score -= 4;
break;
}
score += isClaimContestedStatus(claim.status) ? -6 : 4;
return score;
}
function getMatchingClaims(page: QueryableWikiPage, queryLower: string): WikiClaim[] {
return page.claims
.filter((claim) => isClaimMatch(claim, queryLower))
.toSorted(
(left, right) =>
rankClaimMatch(page, right, queryLower) - rankClaimMatch(page, left, queryLower),
);
}
function buildPageSnippet(page: QueryableWikiPage, query: string): string {
const queryLower = query.toLowerCase();
const matchingClaim = page.claims.find((claim) => {
if (claim.text.toLowerCase().includes(queryLower)) {
return true;
}
return claim.id?.toLowerCase().includes(queryLower);
});
const matchingClaim = getMatchingClaims(page, queryLower)[0];
if (matchingClaim) {
return matchingClaim.text;
}
@@ -162,14 +209,10 @@ function scorePage(page: QueryableWikiPage, query: string): number {
if (page.sourceIds.some((sourceId) => sourceId.toLowerCase().includes(queryLower))) {
score += 12;
}
const matchingClaimCount = page.claims.filter((claim) => {
if (claim.text.toLowerCase().includes(queryLower)) {
return true;
}
return claim.id?.toLowerCase().includes(queryLower);
}).length;
if (matchingClaimCount > 0) {
score += 25 + Math.min(20, matchingClaimCount * 5);
const matchingClaims = getMatchingClaims(page, queryLower);
if (matchingClaims.length > 0) {
score += rankClaimMatch(page, matchingClaims[0], queryLower);
score += Math.min(10, (matchingClaims.length - 1) * 2);
}
const bodyOccurrences = rawLower.split(queryLower).length - 1;
score += Math.min(10, bodyOccurrences);