diff --git a/packages/memory-host-sdk/src/host/batch-upload.ts b/packages/memory-host-sdk/src/host/batch-upload.ts index efe4aa7000a..9b5b91531df 100644 --- a/packages/memory-host-sdk/src/host/batch-upload.ts +++ b/packages/memory-host-sdk/src/host/batch-upload.ts @@ -3,7 +3,7 @@ import { normalizeBatchBaseUrl, type BatchHttpClientConfig, } from "./batch-utils.js"; -import { hashText } from "./internal.js"; +import { hashText } from "./hash.js"; import { withRemoteHttpResponse } from "./remote-http.js"; export async function uploadBatchJsonlFile(params: { diff --git a/packages/memory-host-sdk/src/host/embedding-chunk-limits.ts b/packages/memory-host-sdk/src/host/embedding-chunk-limits.ts index 5c8cf9020f3..0156b97ef77 100644 --- a/packages/memory-host-sdk/src/host/embedding-chunk-limits.ts +++ b/packages/memory-host-sdk/src/host/embedding-chunk-limits.ts @@ -2,7 +2,8 @@ import { estimateUtf8Bytes, splitTextToUtf8ByteLimit } from "./embedding-input-l import { hasNonTextEmbeddingParts } from "./embedding-inputs.js"; import { resolveEmbeddingMaxInputTokens } from "./embedding-model-limits.js"; import type { EmbeddingProvider } from "./embeddings.js"; -import { hashText, type MemoryChunk } from "./internal.js"; +import { hashText } from "./hash.js"; +import type { MemoryChunk } from "./internal.js"; export function enforceEmbeddingMaxInputTokens( provider: EmbeddingProvider, diff --git a/packages/memory-host-sdk/src/host/hash.ts b/packages/memory-host-sdk/src/host/hash.ts new file mode 100644 index 00000000000..458a6b48ed2 --- /dev/null +++ b/packages/memory-host-sdk/src/host/hash.ts @@ -0,0 +1,5 @@ +import crypto from "node:crypto"; + +export function hashText(value: string): string { + return crypto.createHash("sha256").update(value).digest("hex"); +} diff --git a/packages/memory-host-sdk/src/host/internal.ts b/packages/memory-host-sdk/src/host/internal.ts index 228bb3fffdc..1a97e100ffd 100644 --- a/packages/memory-host-sdk/src/host/internal.ts +++ b/packages/memory-host-sdk/src/host/internal.ts @@ -20,6 +20,9 @@ import { type MemoryMultimodalSettings, } from "./multimodal.js"; +export { hashText } from "./hash.js"; +import { hashText } from "./hash.js"; + export type MemoryFileEntry = { path: string; absPath: string; @@ -204,10 +207,6 @@ export async function listMemoryFiles( return deduped; } -export function hashText(value: string): string { - return crypto.createHash("sha256").update(value).digest("hex"); -} - export async function buildFileEntry( absPath: string, workspaceDir: string, diff --git a/packages/memory-host-sdk/src/host/session-files.ts b/packages/memory-host-sdk/src/host/session-files.ts index d2a1116189e..3eb3af5a8e1 100644 --- a/packages/memory-host-sdk/src/host/session-files.ts +++ b/packages/memory-host-sdk/src/host/session-files.ts @@ -4,10 +4,7 @@ import { stripInboundMetadata } from "../../../../src/auto-reply/reply/strip-inb import { isUsageCountedSessionTranscriptFileName } from "../../../../src/config/sessions/artifacts.js"; import { resolveSessionTranscriptsDirForAgent } from "../../../../src/config/sessions/paths.js"; import { redactSensitiveText } from "../../../../src/logging/redact.js"; -import { createSubsystemLogger } from "../../../../src/logging/subsystem.js"; -import { hashText } from "./internal.js"; - -const log = createSubsystemLogger("memory"); +import { hashText } from "./hash.js"; export type SessionFileEntry = { path: string; @@ -62,6 +59,11 @@ export function sessionPathForFile(absPath: string): string { return path.join("sessions", path.basename(absPath)).replace(/\\/g, "/"); } +async function logSessionFileReadFailure(absPath: string, err: unknown): Promise { + const { createSubsystemLogger } = await import("../../../../src/logging/subsystem.js"); + createSubsystemLogger("memory").debug(`Failed reading session file ${absPath}: ${String(err)}`); +} + function normalizeSessionText(value: string): string { return value .replace(/\s*\n+\s*/g, " ") @@ -174,7 +176,7 @@ export async function buildSessionEntry(absPath: string): Promise; }; +type SessionTranscriptStoreEntry = { + sessionFile?: unknown; + sessionId?: unknown; +}; + function isCheckpointTranscriptFileName(fileName: string): boolean { return fileName.endsWith(".jsonl") && fileName.includes(".checkpoint."); } @@ -169,7 +172,7 @@ export function loadSessionTranscriptClassificationForSessionsDir( sessionsDir: string, ): SessionTranscriptClassification { const storePath = path.join(sessionsDir, "sessions.json"); - const store = loadSessionStore(storePath); + const store = readSessionTranscriptClassificationStore(storePath); const dreamingTranscriptPaths = new Set(); const cronRunTranscriptPaths = new Set(); for (const [sessionKey, entry] of Object.entries(store)) { @@ -190,6 +193,20 @@ export function loadSessionTranscriptClassificationForSessionsDir( }; } +function readSessionTranscriptClassificationStore( + storePath: string, +): Record { + try { + const parsed = JSON.parse(fsSync.readFileSync(storePath, "utf-8")) as unknown; + if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) { + return {}; + } + return parsed as Record; + } catch { + return {}; + } +} + export function loadDreamingNarrativeTranscriptPathSetForAgent( agentId: string, ): ReadonlySet { @@ -236,6 +253,11 @@ export function sessionPathForFile(absPath: string): string { return path.join("sessions", path.basename(absPath)).replace(/\\/g, "/"); } +async function logSessionFileReadFailure(absPath: string, err: unknown): Promise { + const { createSubsystemLogger } = await import("../../logging/subsystem.js"); + createSubsystemLogger("memory").debug(`Failed reading session file ${absPath}: ${String(err)}`); +} + function normalizeSessionText(value: string): string { return value .replace(/\s*\n+\s*/g, " ") @@ -528,7 +550,7 @@ export async function buildSessionEntry( ...(generatedByCronRun ? { generatedByCronRun: true } : {}), }; } catch (err) { - log.debug(`Failed reading session file ${absPath}: ${String(err)}`); + void logSessionFileReadFailure(absPath, err); return null; } }