diff --git a/extensions/file-transfer/src/shared/policy.test.ts b/extensions/file-transfer/src/shared/policy.test.ts index dbfb65845ac..d5417233d2b 100644 --- a/extensions/file-transfer/src/shared/policy.test.ts +++ b/extensions/file-transfer/src/shared/policy.test.ts @@ -325,6 +325,32 @@ describe("persistAllowAlways", () => { expect(root.gateway.nodes.fileTransfer["Lobster"].allowReadPaths).toEqual(["/srv/added.png"]); }); + it("rejects unsafe keys (__proto__, prototype, constructor) that would mutate prototype chain", async () => { + mutateConfigFileMock.mockImplementation( + async ({ mutate }: { mutate: (draft: Record) => void }) => { + const draft: Record = {}; + mutate(draft); + }, + ); + + await expect( + persistAllowAlways({ + nodeId: "n1", + nodeDisplayName: "__proto__", + kind: "read", + path: "/etc/passwd", + }), + ).rejects.toThrow(/unsafe key.*__proto__/); + + await expect( + persistAllowAlways({ + nodeId: "constructor", + kind: "read", + path: "/etc/passwd", + }), + ).rejects.toThrow(/unsafe key.*constructor/); + }); + it("dedupes when path already present", async () => { let captured: Record | null = null; mutateConfigFileMock.mockImplementation( diff --git a/extensions/file-transfer/src/shared/policy.ts b/extensions/file-transfer/src/shared/policy.ts index 028d9790808..f4b863fb082 100644 --- a/extensions/file-transfer/src/shared/policy.ts +++ b/extensions/file-transfer/src/shared/policy.ts @@ -264,6 +264,20 @@ export function evaluateFilePolicy(input: { * the "*" wildcard if that's what was hit). If no entry exists yet, * creates one keyed by nodeDisplayName ?? nodeId. */ +/** + * Reject special object keys that would mutate the prototype chain when + * used as a property name (e.g. `__proto__` setter on a plain object). + * The nodeDisplayName comes from paired-node metadata which we don't + * fully control; refuse to persist policy under a key that could corrupt + * the fileTransfer container's prototype. + */ +function assertSafeConfigKey(key: string): string { + if (key === "__proto__" || key === "prototype" || key === "constructor") { + throw new Error(`refusing to persist file-transfer policy under unsafe key: ${key}`); + } + return key; +} + export async function persistAllowAlways(input: { nodeId: string; nodeDisplayName?: string; @@ -288,9 +302,11 @@ export async function persistAllowAlways(input: { const candidates = [input.nodeId, input.nodeDisplayName].filter( (k): k is string => typeof k === "string" && k.length > 0, ); - let key = candidates.find((c) => fileTransfer[c]); + // Use hasOwnProperty so a node with displayName "constructor" doesn't + // accidentally hit Object.prototype.constructor and pretend to match. + let key = candidates.find((c) => Object.prototype.hasOwnProperty.call(fileTransfer, c)); if (!key) { - key = input.nodeDisplayName ?? input.nodeId; + key = assertSafeConfigKey(input.nodeDisplayName ?? input.nodeId); fileTransfer[key] = {}; } const entry = fileTransfer[key]; diff --git a/extensions/file-transfer/src/tools/dir-fetch-tool.ts b/extensions/file-transfer/src/tools/dir-fetch-tool.ts index ccb7a2d5328..c21837b73cd 100644 --- a/extensions/file-transfer/src/tools/dir-fetch-tool.ts +++ b/extensions/file-transfer/src/tools/dir-fetch-tool.ts @@ -42,6 +42,13 @@ const TAR_UNPACK_TIMEOUT_MS = 60_000; // that walk can do. const TAR_UNPACK_MAX_ENTRIES = 5000; +// Hard caps on uncompressed extraction. Defends against decompression-bomb +// archives that compress to <16MB but expand to gigabytes. Both caps are +// enforced during the post-extract walk: total bytes summed across entries +// and per-file size to bound any single fs.stat / hash operation. +const DIR_FETCH_MAX_UNCOMPRESSED_BYTES = 64 * 1024 * 1024; +const DIR_FETCH_MAX_SINGLE_FILE_BYTES = 16 * 1024 * 1024; + const DirFetchToolSchema = Type.Object({ node: Type.String({ description: "Node id, name, or IP. Resolves the same way as the nodes tool.", @@ -544,6 +551,28 @@ export function createDirFetchTool(): AnyAgentTool { const walked = await walkDir(rootDir, rootDir); const files: UnpackedFileEntry[] = []; + // Defense-in-depth budget on the *uncompressed* extraction. Compressed + // tar is bounded upstream; an attacker can still send a highly + // compressible bomb (gigabytes of zeros) that fits under that cap. + // Stop walking + clean up if the unpacked tree busts the budget. + let totalUncompressed = 0; + const abortAndCleanup = async (reason: string): Promise => { + await fs.rm(rootDir, { recursive: true, force: true }).catch(() => {}); + await appendFileTransferAudit({ + op: "dir.fetch", + nodeId, + nodeDisplayName, + requestedPath: dirPath, + canonicalPath, + decision: "error", + errorCode: "TREE_TOO_LARGE", + errorMessage: reason, + sizeBytes: tarBytes, + sha256, + durationMs: Date.now() - startedAt, + }); + throw new Error(`dir.fetch UNCOMPRESSED_TOO_LARGE: ${reason}`); + }; for (const { relPath, absPath } of walked) { let size = 0; try { @@ -552,6 +581,17 @@ export function createDirFetchTool(): AnyAgentTool { } catch { continue; } + if (size > DIR_FETCH_MAX_SINGLE_FILE_BYTES) { + await abortAndCleanup( + `extracted file ${relPath} is ${size} bytes (limit ${DIR_FETCH_MAX_SINGLE_FILE_BYTES})`, + ); + } + totalUncompressed += size; + if (totalUncompressed > DIR_FETCH_MAX_UNCOMPRESSED_BYTES) { + await abortAndCleanup( + `extracted tree exceeds uncompressed budget ${DIR_FETCH_MAX_UNCOMPRESSED_BYTES} bytes (decompression bomb?)`, + ); + } const mimeType = mimeFromExtension(relPath); const fileSha256 = await computeFileSha256(absPath); files.push({ relPath, size, mimeType, sha256: fileSha256, localPath: absPath }); diff --git a/src/agents/tools/nodes-tool-media.ts b/src/agents/tools/nodes-tool-media.ts index c21e6325cef..fcca2d7fa25 100644 --- a/src/agents/tools/nodes-tool-media.ts +++ b/src/agents/tools/nodes-tool-media.ts @@ -27,7 +27,14 @@ export const MEDIA_INVOKE_ACTIONS = { "camera.clip": "camera_clip", "photos.latest": "photos_latest", "screen.record": "screen_record", + // file-transfer commands: redirect to dedicated tools so the path policy + // + operator approval flow always runs. Without this, an agent could + // call them via the generic nodes.action="invoke" surface and skip + // gatekeep() entirely. "file.fetch": "file_fetch", + "dir.list": "dir_list", + "dir.fetch": "dir_fetch", + "file.write": "file_write", } as const; export type NodeMediaAction = "camera_snap" | "photos_latest" | "camera_clip" | "screen_record";