fix(file-transfer): third-round PR review feedback

Aisle's re-analysis on b63daa6a05 surfaced 3 actionable findings:

- nodes.invoke bypass (HIGH CWE-285): generic nodes.action="invoke" let
  agents call dir.list/dir.fetch/file.write directly, skipping the
  file-transfer plugin's gatekeep + policy + approval flow. Only file.fetch
  was redirected to its dedicated tool. Add the other three to
  MEDIA_INVOKE_ACTIONS so the redirect-or-deny logic in
  nodes-tool-commands fires for all four. The dedicated tools enforce
  policy; the generic invoke surface no longer has a way to skip them
  without an explicit allowMediaInvokeCommands opt-in.
- prototype pollution in persistAllowAlways (MED CWE-1321): a paired
  node with displayName "__proto__" / "prototype" / "constructor" would
  mutate the fileTransfer object's prototype when persisting allow-always.
  Reject those keys explicitly. Switch the existing-key lookup to
  Object.prototype.hasOwnProperty.call so a key like "constructor"
  doesn't accidentally match Object.prototype.constructor.
- decompression-bomb cap in dir_fetch (MED CWE-409): compressed tar is
  bounded upstream, but a highly compressible bomb can still expand to
  gigabytes. Enforce DIR_FETCH_MAX_UNCOMPRESSED_BYTES (64MB) summed
  across extracted files and DIR_FETCH_MAX_SINGLE_FILE_BYTES (16MB) per
  entry, both checked during the post-extract walk. On bust, rm -rf the
  rootDir and audit-log + throw UNCOMPRESSED_TOO_LARGE.

Tests: 85/85 passing (added prototype-pollution rejection test).

Aisle's HIGH parent-symlink finding remains documented as deferred — full
rollback requires a node-side file.unlink command which is out of scope
for this PR. The gateway-side post-flight policy check still detects and
loudly errors on canonical-path mismatches.
This commit is contained in:
Omar Shahine
2026-04-29 07:45:23 +00:00
parent c657974d44
commit 069bd66243
4 changed files with 91 additions and 2 deletions

View File

@@ -325,6 +325,32 @@ describe("persistAllowAlways", () => {
expect(root.gateway.nodes.fileTransfer["Lobster"].allowReadPaths).toEqual(["/srv/added.png"]);
});
it("rejects unsafe keys (__proto__, prototype, constructor) that would mutate prototype chain", async () => {
mutateConfigFileMock.mockImplementation(
async ({ mutate }: { mutate: (draft: Record<string, unknown>) => void }) => {
const draft: Record<string, unknown> = {};
mutate(draft);
},
);
await expect(
persistAllowAlways({
nodeId: "n1",
nodeDisplayName: "__proto__",
kind: "read",
path: "/etc/passwd",
}),
).rejects.toThrow(/unsafe key.*__proto__/);
await expect(
persistAllowAlways({
nodeId: "constructor",
kind: "read",
path: "/etc/passwd",
}),
).rejects.toThrow(/unsafe key.*constructor/);
});
it("dedupes when path already present", async () => {
let captured: Record<string, unknown> | null = null;
mutateConfigFileMock.mockImplementation(

View File

@@ -264,6 +264,20 @@ export function evaluateFilePolicy(input: {
* the "*" wildcard if that's what was hit). If no entry exists yet,
* creates one keyed by nodeDisplayName ?? nodeId.
*/
/**
* Reject special object keys that would mutate the prototype chain when
* used as a property name (e.g. `__proto__` setter on a plain object).
* The nodeDisplayName comes from paired-node metadata which we don't
* fully control; refuse to persist policy under a key that could corrupt
* the fileTransfer container's prototype.
*/
function assertSafeConfigKey(key: string): string {
if (key === "__proto__" || key === "prototype" || key === "constructor") {
throw new Error(`refusing to persist file-transfer policy under unsafe key: ${key}`);
}
return key;
}
export async function persistAllowAlways(input: {
nodeId: string;
nodeDisplayName?: string;
@@ -288,9 +302,11 @@ export async function persistAllowAlways(input: {
const candidates = [input.nodeId, input.nodeDisplayName].filter(
(k): k is string => typeof k === "string" && k.length > 0,
);
let key = candidates.find((c) => fileTransfer[c]);
// Use hasOwnProperty so a node with displayName "constructor" doesn't
// accidentally hit Object.prototype.constructor and pretend to match.
let key = candidates.find((c) => Object.prototype.hasOwnProperty.call(fileTransfer, c));
if (!key) {
key = input.nodeDisplayName ?? input.nodeId;
key = assertSafeConfigKey(input.nodeDisplayName ?? input.nodeId);
fileTransfer[key] = {};
}
const entry = fileTransfer[key];

View File

@@ -42,6 +42,13 @@ const TAR_UNPACK_TIMEOUT_MS = 60_000;
// that walk can do.
const TAR_UNPACK_MAX_ENTRIES = 5000;
// Hard caps on uncompressed extraction. Defends against decompression-bomb
// archives that compress to <16MB but expand to gigabytes. Both caps are
// enforced during the post-extract walk: total bytes summed across entries
// and per-file size to bound any single fs.stat / hash operation.
const DIR_FETCH_MAX_UNCOMPRESSED_BYTES = 64 * 1024 * 1024;
const DIR_FETCH_MAX_SINGLE_FILE_BYTES = 16 * 1024 * 1024;
const DirFetchToolSchema = Type.Object({
node: Type.String({
description: "Node id, name, or IP. Resolves the same way as the nodes tool.",
@@ -544,6 +551,28 @@ export function createDirFetchTool(): AnyAgentTool {
const walked = await walkDir(rootDir, rootDir);
const files: UnpackedFileEntry[] = [];
// Defense-in-depth budget on the *uncompressed* extraction. Compressed
// tar is bounded upstream; an attacker can still send a highly
// compressible bomb (gigabytes of zeros) that fits under that cap.
// Stop walking + clean up if the unpacked tree busts the budget.
let totalUncompressed = 0;
const abortAndCleanup = async (reason: string): Promise<never> => {
await fs.rm(rootDir, { recursive: true, force: true }).catch(() => {});
await appendFileTransferAudit({
op: "dir.fetch",
nodeId,
nodeDisplayName,
requestedPath: dirPath,
canonicalPath,
decision: "error",
errorCode: "TREE_TOO_LARGE",
errorMessage: reason,
sizeBytes: tarBytes,
sha256,
durationMs: Date.now() - startedAt,
});
throw new Error(`dir.fetch UNCOMPRESSED_TOO_LARGE: ${reason}`);
};
for (const { relPath, absPath } of walked) {
let size = 0;
try {
@@ -552,6 +581,17 @@ export function createDirFetchTool(): AnyAgentTool {
} catch {
continue;
}
if (size > DIR_FETCH_MAX_SINGLE_FILE_BYTES) {
await abortAndCleanup(
`extracted file ${relPath} is ${size} bytes (limit ${DIR_FETCH_MAX_SINGLE_FILE_BYTES})`,
);
}
totalUncompressed += size;
if (totalUncompressed > DIR_FETCH_MAX_UNCOMPRESSED_BYTES) {
await abortAndCleanup(
`extracted tree exceeds uncompressed budget ${DIR_FETCH_MAX_UNCOMPRESSED_BYTES} bytes (decompression bomb?)`,
);
}
const mimeType = mimeFromExtension(relPath);
const fileSha256 = await computeFileSha256(absPath);
files.push({ relPath, size, mimeType, sha256: fileSha256, localPath: absPath });

View File

@@ -27,7 +27,14 @@ export const MEDIA_INVOKE_ACTIONS = {
"camera.clip": "camera_clip",
"photos.latest": "photos_latest",
"screen.record": "screen_record",
// file-transfer commands: redirect to dedicated tools so the path policy
// + operator approval flow always runs. Without this, an agent could
// call them via the generic nodes.action="invoke" surface and skip
// gatekeep() entirely.
"file.fetch": "file_fetch",
"dir.list": "dir_list",
"dir.fetch": "dir_fetch",
"file.write": "file_write",
} as const;
export type NodeMediaAction = "camera_snap" | "photos_latest" | "camera_clip" | "screen_record";