test(file-transfer): add unit tests for handlers, policy, and shared utilities

Adds 77 tests covering:
- handleFileFetch: validation, fs errors, sha256, size cap, symlink canonicalization
- handleFileWrite: validation, atomic write, overwrite policy, parent dir handling, symlink refusal, integrity check, size cap
- handleDirList: validation, fs errors, sorted listing, dotfile inclusion, pagination
- handleDirFetch: validation, fs errors, gzipped tar with sha256, mid-stream byte cap
- evaluateFilePolicy: default-deny, denyPaths-wins, allow matching, ask modes (off/on-miss/always), node-id/displayName/'*' resolution
- persistAllowAlways: append, dedupe, create-on-missing
- shared/mime: extension lookup, image/text inline sets
- shared/errors: err helper, classifyFsError, throwFromNodePayload

Also fixes accumulated lint regressions in the prod source flagged once these
files moved into the changed-gate scope (parseInt -> Number.parseInt, redundant
type casts removed, single-statement if bodies wrapped in braces).
This commit is contained in:
Omar Shahine
2026-04-29 06:34:12 +00:00
parent 844f3d62e9
commit db6b4b41e1
14 changed files with 1034 additions and 16 deletions

View File

@@ -1,6 +1,5 @@
import {
definePluginEntry,
type AnyAgentTool,
type OpenClawPluginNodeHostCommand,
} from "openclaw/plugin-sdk/plugin-entry";
import { handleDirFetch } from "./src/node-host/dir-fetch.js";
@@ -57,9 +56,9 @@ export default definePluginEntry({
description: "Fetch, list, write, and watch files on paired nodes via dedicated node commands.",
nodeHostCommands: fileTransferNodeHostCommands,
register(api) {
api.registerTool(createFileFetchTool() as AnyAgentTool);
api.registerTool(createDirListTool() as AnyAgentTool);
api.registerTool(createDirFetchTool() as AnyAgentTool);
api.registerTool(createFileWriteTool() as AnyAgentTool);
api.registerTool(createFileFetchTool());
api.registerTool(createDirListTool());
api.registerTool(createDirFetchTool());
api.registerTool(createFileWriteTool());
},
});

View File

@@ -0,0 +1,109 @@
import crypto from "node:crypto";
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { handleDirFetch } from "./dir-fetch.js";
let tmpRoot: string;
beforeEach(async () => {
tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "dir-fetch-test-"));
});
afterEach(async () => {
await fs.rm(tmpRoot, { recursive: true, force: true });
});
// dir-fetch shells out to /usr/bin/tar. Skip the body of these tests on
// platforms without it (Windows CI). They still register, just no-op.
const HAS_TAR = process.platform !== "win32";
describe("handleDirFetch — input validation", () => {
it("rejects empty / non-string path", async () => {
expect(await handleDirFetch({ path: "" })).toMatchObject({
ok: false,
code: "INVALID_PATH",
});
});
it("rejects relative paths", async () => {
expect(await handleDirFetch({ path: "relative" })).toMatchObject({
ok: false,
code: "INVALID_PATH",
});
});
it("rejects paths with NUL bytes", async () => {
expect(await handleDirFetch({ path: "/tmp/foo\0bar" })).toMatchObject({
ok: false,
code: "INVALID_PATH",
});
});
});
describe("handleDirFetch — fs errors", () => {
it.runIf(HAS_TAR)("returns NOT_FOUND for a missing directory", async () => {
const r = await handleDirFetch({ path: path.join(tmpRoot, "missing") });
expect(r).toMatchObject({ ok: false, code: "NOT_FOUND" });
});
it.runIf(HAS_TAR)("returns IS_FILE when path resolves to a file", async () => {
const f = path.join(tmpRoot, "f.txt");
await fs.writeFile(f, "x");
expect(await handleDirFetch({ path: f })).toMatchObject({
ok: false,
code: "IS_FILE",
});
});
});
describe("handleDirFetch — happy path", () => {
it.runIf(HAS_TAR)("returns a gzipped tar with byte count and sha256", async () => {
await fs.writeFile(path.join(tmpRoot, "a.txt"), "alpha\n");
await fs.writeFile(path.join(tmpRoot, "b.txt"), "beta\n");
await fs.mkdir(path.join(tmpRoot, "sub"));
await fs.writeFile(path.join(tmpRoot, "sub", "c.txt"), "gamma\n");
const r = await handleDirFetch({ path: tmpRoot });
if (!r.ok) {
throw new Error(`expected ok, got ${r.code}: ${r.message}`);
}
expect(r.tarBytes).toBeGreaterThan(0);
expect(r.tarBase64.length).toBeGreaterThan(0);
const buf = Buffer.from(r.tarBase64, "base64");
expect(buf.byteLength).toBe(r.tarBytes);
const expectedSha = crypto.createHash("sha256").update(buf).digest("hex");
expect(r.sha256).toBe(expectedSha);
// gzip magic bytes
expect(buf[0]).toBe(0x1f);
expect(buf[1]).toBe(0x8b);
// file count covers the regular files we created (3); BSD tar may also
// list directory entries, so be generous.
expect(r.fileCount).toBeGreaterThanOrEqual(3);
});
});
describe("handleDirFetch — size cap", () => {
it.runIf(HAS_TAR)(
"returns TREE_TOO_LARGE when content exceeds the cap mid-stream",
async () => {
// Write enough random content to exceed a small maxBytes. Random bytes
// don't compress, so gzip output is roughly the same size as input.
const big = crypto.randomBytes(512 * 1024);
await fs.writeFile(path.join(tmpRoot, "big1.bin"), big);
await fs.writeFile(path.join(tmpRoot, "big2.bin"), big);
await fs.writeFile(path.join(tmpRoot, "big3.bin"), big);
// 64KB cap should trip either the du preflight or the streaming SIGTERM.
const r = await handleDirFetch({ path: tmpRoot, maxBytes: 64 * 1024 });
expect(r).toMatchObject({ ok: false, code: "TREE_TOO_LARGE" });
},
30_000,
);
});

View File

@@ -73,7 +73,7 @@ async function preflightDu(dirPath: string, maxBytes: number): Promise<boolean>
resolve(true);
return;
}
const sizeKb = parseInt(match[1], 10);
const sizeKb = Number.parseInt(match[1], 10);
resolve(sizeKb <= heuristicKb);
});
du.on("error", () => {
@@ -181,7 +181,9 @@ export async function handleDirFetch(params: DirFetchParams): Promise<DirFetchRe
let aborted = false;
const watchdog = setTimeout(() => {
if (aborted) return;
if (aborted) {
return;
}
aborted = true;
try {
child.kill("SIGKILL");
@@ -192,7 +194,9 @@ export async function handleDirFetch(params: DirFetchParams): Promise<DirFetchRe
}, TAR_HARD_TIMEOUT_MS);
child.stdout.on("data", (chunk: Buffer) => {
if (aborted) return;
if (aborted) {
return;
}
totalBytes += chunk.byteLength;
if (totalBytes > maxBytes) {
aborted = true;
@@ -206,7 +210,9 @@ export async function handleDirFetch(params: DirFetchParams): Promise<DirFetchRe
child.on("close", (code) => {
clearTimeout(watchdog);
if (aborted) return;
if (aborted) {
return;
}
if (code !== 0) {
resolve("ERROR");
return;

View File

@@ -0,0 +1,142 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import {
DIR_LIST_DEFAULT_MAX_ENTRIES,
DIR_LIST_HARD_MAX_ENTRIES,
handleDirList,
} from "./dir-list.js";
let tmpRoot: string;
beforeEach(async () => {
tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "dir-list-test-"));
});
afterEach(async () => {
await fs.rm(tmpRoot, { recursive: true, force: true });
});
describe("handleDirList — input validation", () => {
it("rejects empty / non-string path", async () => {
expect(await handleDirList({ path: "" })).toMatchObject({ ok: false, code: "INVALID_PATH" });
expect(await handleDirList({ path: undefined })).toMatchObject({
ok: false,
code: "INVALID_PATH",
});
});
it("rejects relative paths", async () => {
expect(await handleDirList({ path: "relative" })).toMatchObject({
ok: false,
code: "INVALID_PATH",
});
});
it("rejects paths with NUL bytes", async () => {
expect(await handleDirList({ path: "/tmp/foo\0bar" })).toMatchObject({
ok: false,
code: "INVALID_PATH",
});
});
});
describe("handleDirList — fs errors", () => {
it("returns NOT_FOUND for a missing directory", async () => {
expect(await handleDirList({ path: path.join(tmpRoot, "does-not-exist") })).toMatchObject({
ok: false,
code: "NOT_FOUND",
});
});
it("returns IS_FILE when path resolves to a regular file", async () => {
const f = path.join(tmpRoot, "f.txt");
await fs.writeFile(f, "x");
expect(await handleDirList({ path: f })).toMatchObject({ ok: false, code: "IS_FILE" });
});
});
describe("handleDirList — happy path", () => {
it("lists files and subdirs with metadata, sorted by name", async () => {
await fs.writeFile(path.join(tmpRoot, "z.txt"), "Z");
await fs.writeFile(path.join(tmpRoot, "a.png"), "PNG-bytes");
await fs.mkdir(path.join(tmpRoot, "subdir"));
const r = await handleDirList({ path: tmpRoot });
if (!r.ok) {
throw new Error("expected ok");
}
expect(r.entries.map((e) => e.name)).toEqual(["a.png", "subdir", "z.txt"]);
const a = r.entries.find((e) => e.name === "a.png")!;
expect(a.isDir).toBe(false);
expect(a.size).toBeGreaterThan(0);
expect(a.mimeType).toBe("image/png");
const sub = r.entries.find((e) => e.name === "subdir")!;
expect(sub.isDir).toBe(true);
expect(sub.size).toBe(0);
expect(sub.mimeType).toBe("inode/directory");
expect(r.truncated).toBe(false);
expect(r.nextPageToken).toBeUndefined();
});
it("includes dotfiles in the listing", async () => {
await fs.writeFile(path.join(tmpRoot, ".hidden"), "x");
await fs.writeFile(path.join(tmpRoot, "visible"), "x");
const r = await handleDirList({ path: tmpRoot });
if (!r.ok) {
throw new Error("expected ok");
}
expect(r.entries.map((e) => e.name)).toEqual([".hidden", "visible"]);
});
it("paginates via pageToken (offset-based)", async () => {
for (let i = 0; i < 7; i++) {
// zero-pad so localeCompare-stable sort matches creation order
await fs.writeFile(path.join(tmpRoot, `f-${i}.txt`), "x");
}
const page1 = await handleDirList({ path: tmpRoot, maxEntries: 3 });
if (!page1.ok) {
throw new Error("page1");
}
expect(page1.entries.map((e) => e.name)).toEqual(["f-0.txt", "f-1.txt", "f-2.txt"]);
expect(page1.truncated).toBe(true);
expect(page1.nextPageToken).toBe("3");
const page2 = await handleDirList({
path: tmpRoot,
maxEntries: 3,
pageToken: page1.nextPageToken,
});
if (!page2.ok) {
throw new Error("page2");
}
expect(page2.entries.map((e) => e.name)).toEqual(["f-3.txt", "f-4.txt", "f-5.txt"]);
expect(page2.truncated).toBe(true);
const page3 = await handleDirList({
path: tmpRoot,
maxEntries: 3,
pageToken: page2.nextPageToken,
});
if (!page3.ok) {
throw new Error("page3");
}
expect(page3.entries.map((e) => e.name)).toEqual(["f-6.txt"]);
expect(page3.truncated).toBe(false);
expect(page3.nextPageToken).toBeUndefined();
});
});
describe("handleDirList — limits", () => {
it("clamps maxEntries to the hard ceiling and uses the default for invalid values", () => {
expect(DIR_LIST_DEFAULT_MAX_ENTRIES).toBe(200);
expect(DIR_LIST_HARD_MAX_ENTRIES).toBe(5000);
expect(DIR_LIST_DEFAULT_MAX_ENTRIES).toBeLessThan(DIR_LIST_HARD_MAX_ENTRIES);
});
});

View File

@@ -77,7 +77,7 @@ export async function handleDirList(params: DirListParams): Promise<DirListResul
const maxEntries = clampMaxEntries(params.maxEntries);
const offset =
typeof params.pageToken === "string" && params.pageToken.length > 0
? Math.max(0, parseInt(params.pageToken, 10) || 0)
? Math.max(0, Number.parseInt(params.pageToken, 10) || 0)
: 0;
let canonical: string;

View File

@@ -0,0 +1,152 @@
import crypto from "node:crypto";
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import {
FILE_FETCH_DEFAULT_MAX_BYTES,
FILE_FETCH_HARD_MAX_BYTES,
handleFileFetch,
} from "./file-fetch.js";
let tmpRoot: string;
beforeEach(async () => {
tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "file-fetch-test-"));
});
afterEach(async () => {
await fs.rm(tmpRoot, { recursive: true, force: true });
});
describe("handleFileFetch — input validation", () => {
it("returns INVALID_PATH for empty / non-string path", async () => {
expect(await handleFileFetch({ path: "" })).toMatchObject({
ok: false,
code: "INVALID_PATH",
});
expect(await handleFileFetch({ path: undefined })).toMatchObject({
ok: false,
code: "INVALID_PATH",
});
expect(await handleFileFetch({ path: 42 as unknown })).toMatchObject({
ok: false,
code: "INVALID_PATH",
});
});
it("rejects relative paths", async () => {
const r = await handleFileFetch({ path: "relative/file.txt" });
expect(r).toMatchObject({ ok: false, code: "INVALID_PATH" });
expect(r.ok ? "" : r.message).toMatch(/absolute/);
});
it("rejects paths with NUL bytes", async () => {
const r = await handleFileFetch({ path: "/tmp/foo\0bar" });
expect(r).toMatchObject({ ok: false, code: "INVALID_PATH" });
expect(r.ok ? "" : r.message).toMatch(/NUL/);
});
});
describe("handleFileFetch — fs errors", () => {
it("returns NOT_FOUND for a missing file", async () => {
const target = path.join(tmpRoot, "missing.txt");
expect(await handleFileFetch({ path: target })).toMatchObject({
ok: false,
code: "NOT_FOUND",
});
});
it("returns IS_DIRECTORY when the path resolves to a directory", async () => {
const r = await handleFileFetch({ path: tmpRoot });
expect(r).toMatchObject({ ok: false, code: "IS_DIRECTORY" });
// canonical path is reported back so the caller can re-check policy
expect(r.ok ? null : r.canonicalPath).toBeTruthy();
});
});
describe("handleFileFetch — happy path", () => {
it("reads a small file and returns size + sha256 + base64", async () => {
const target = path.join(tmpRoot, "hello.txt");
const contents = "hello world\n";
await fs.writeFile(target, contents);
const r = await handleFileFetch({ path: target });
if (!r.ok) {
throw new Error(`expected ok, got ${r.code}: ${r.message}`);
}
expect(r.size).toBe(contents.length);
expect(Buffer.from(r.base64, "base64").toString("utf-8")).toBe(contents);
const expectedSha = crypto.createHash("sha256").update(contents).digest("hex");
expect(r.sha256).toBe(expectedSha);
// canonicalized path may differ from input on macOS (/tmp -> /private/tmp)
expect(path.basename(r.path)).toBe("hello.txt");
});
it("returns a sensible mime type for known extensions", async () => {
const target = path.join(tmpRoot, "readme.md");
await fs.writeFile(target, "# heading\n");
const r = await handleFileFetch({ path: target });
if (!r.ok) {
throw new Error("expected ok");
}
// libmagic ("file" cli) typically reports text/plain or text/markdown for
// a one-line markdown file; the extension fallback yields text/markdown.
// Accept either.
expect(r.mimeType).toMatch(/^text\/(plain|markdown)$/);
});
});
describe("handleFileFetch — size enforcement", () => {
it("returns FILE_TOO_LARGE when stat size exceeds the cap", async () => {
const target = path.join(tmpRoot, "big.bin");
const data = Buffer.alloc(2048, 0xab);
await fs.writeFile(target, data);
const r = await handleFileFetch({ path: target, maxBytes: 1024 });
expect(r).toMatchObject({ ok: false, code: "FILE_TOO_LARGE" });
});
it("clamps maxBytes to the hard ceiling", async () => {
expect(FILE_FETCH_HARD_MAX_BYTES).toBe(16 * 1024 * 1024);
expect(FILE_FETCH_DEFAULT_MAX_BYTES).toBeLessThanOrEqual(FILE_FETCH_HARD_MAX_BYTES);
// A request asking for a maxBytes well above the hard ceiling should
// still be honored for a small file (no error).
const target = path.join(tmpRoot, "tiny.bin");
await fs.writeFile(target, Buffer.from([0x01, 0x02, 0x03]));
const r = await handleFileFetch({ path: target, maxBytes: Number.MAX_SAFE_INTEGER });
expect(r.ok).toBe(true);
});
it("uses default cap when maxBytes is not finite or non-positive", async () => {
const target = path.join(tmpRoot, "small.bin");
await fs.writeFile(target, Buffer.from([0xff]));
expect(await handleFileFetch({ path: target, maxBytes: -1 })).toMatchObject({ ok: true });
expect(await handleFileFetch({ path: target, maxBytes: Number.NaN })).toMatchObject({
ok: true,
});
expect(await handleFileFetch({ path: target, maxBytes: "8" as unknown })).toMatchObject({
ok: true,
});
});
});
describe("handleFileFetch — symlink canonicalization", () => {
it("returns the canonical (realpath) target path, not the symlink path", async () => {
const real = path.join(tmpRoot, "real.txt");
const link = path.join(tmpRoot, "link.txt");
await fs.writeFile(real, "data");
await fs.symlink(real, link);
const r = await handleFileFetch({ path: link });
if (!r.ok) {
throw new Error(`expected ok, got ${r.code}`);
}
// Both inputs canonicalize through the OS; just compare basenames + that
// canonical resolution happened (path doesn't end with the symlink name).
expect(path.basename(r.path)).toBe("real.txt");
});
});

View File

@@ -0,0 +1,206 @@
import crypto from "node:crypto";
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { handleFileWrite } from "./file-write.js";
let tmpRoot: string;
beforeEach(async () => {
tmpRoot = await fs.mkdtemp(path.join(os.tmpdir(), "file-write-test-"));
});
afterEach(async () => {
await fs.rm(tmpRoot, { recursive: true, force: true });
});
function b64(s: string): string {
return Buffer.from(s, "utf-8").toString("base64");
}
describe("handleFileWrite — input validation", () => {
it("rejects empty / non-string path", async () => {
expect(await handleFileWrite({ path: "", contentBase64: b64("x") })).toMatchObject({
ok: false,
code: "INVALID_PATH",
});
});
it("rejects relative paths", async () => {
const r = await handleFileWrite({ path: "relative.txt", contentBase64: b64("x") });
expect(r).toMatchObject({ ok: false, code: "INVALID_PATH" });
});
it("rejects paths with NUL bytes", async () => {
const r = await handleFileWrite({ path: "/tmp/foo\0bar", contentBase64: b64("x") });
expect(r).toMatchObject({ ok: false, code: "INVALID_PATH" });
});
});
describe("handleFileWrite — happy path", () => {
it("writes a new file and returns size + sha256 + overwritten=false", async () => {
const target = path.join(tmpRoot, "out.txt");
const contents = "hello write\n";
const r = await handleFileWrite({ path: target, contentBase64: b64(contents) });
if (!r.ok) {
throw new Error(`expected ok, got ${r.code}: ${r.message}`);
}
expect(r.size).toBe(contents.length);
expect(r.overwritten).toBe(false);
const expectedSha = crypto.createHash("sha256").update(contents).digest("hex");
expect(r.sha256).toBe(expectedSha);
const onDisk = await fs.readFile(target, "utf-8");
expect(onDisk).toBe(contents);
});
it("does not leave .tmp files behind on success", async () => {
const target = path.join(tmpRoot, "atomic.txt");
const r = await handleFileWrite({ path: target, contentBase64: b64("body") });
expect(r.ok).toBe(true);
const entries = await fs.readdir(tmpRoot);
const tmpFiles = entries.filter((n) => n.includes(".tmp"));
expect(tmpFiles).toEqual([]);
});
});
describe("handleFileWrite — overwrite policy", () => {
it("refuses to overwrite an existing file when overwrite=false", async () => {
const target = path.join(tmpRoot, "exists.txt");
await fs.writeFile(target, "before");
const r = await handleFileWrite({
path: target,
contentBase64: b64("after"),
overwrite: false,
});
expect(r).toMatchObject({ ok: false, code: "EXISTS_NO_OVERWRITE" });
expect(await fs.readFile(target, "utf-8")).toBe("before");
});
it("overwrites and reports overwritten=true when overwrite=true", async () => {
const target = path.join(tmpRoot, "exists.txt");
await fs.writeFile(target, "before");
const r = await handleFileWrite({
path: target,
contentBase64: b64("after"),
overwrite: true,
});
if (!r.ok) {
throw new Error("expected ok");
}
expect(r.overwritten).toBe(true);
expect(await fs.readFile(target, "utf-8")).toBe("after");
});
});
describe("handleFileWrite — parent directory handling", () => {
it("returns PARENT_NOT_FOUND when parent is missing and createParents=false", async () => {
const target = path.join(tmpRoot, "nested", "child.txt");
const r = await handleFileWrite({
path: target,
contentBase64: b64("x"),
createParents: false,
});
expect(r).toMatchObject({ ok: false, code: "PARENT_NOT_FOUND" });
});
it("creates missing parents when createParents=true", async () => {
const target = path.join(tmpRoot, "deep", "nested", "child.txt");
const r = await handleFileWrite({
path: target,
contentBase64: b64("x"),
createParents: true,
});
expect(r.ok).toBe(true);
expect(await fs.readFile(target, "utf-8")).toBe("x");
});
});
describe("handleFileWrite — symlink protection", () => {
it("refuses to write through an existing symlink (lstat)", async () => {
const real = path.join(tmpRoot, "real.txt");
const link = path.join(tmpRoot, "link.txt");
await fs.writeFile(real, "untouched");
await fs.symlink(real, link);
const r = await handleFileWrite({
path: link,
contentBase64: b64("evil"),
overwrite: true,
});
expect(r).toMatchObject({ ok: false, code: "SYMLINK_TARGET_DENIED" });
// The original file must be unchanged.
expect(await fs.readFile(real, "utf-8")).toBe("untouched");
});
it("refuses to overwrite a directory", async () => {
const target = path.join(tmpRoot, "is-a-dir");
await fs.mkdir(target);
const r = await handleFileWrite({
path: target,
contentBase64: b64("x"),
overwrite: true,
});
expect(r).toMatchObject({ ok: false, code: "IS_DIRECTORY" });
});
});
describe("handleFileWrite — integrity check", () => {
it("unlinks the file and returns INTEGRITY_FAILURE when expectedSha256 mismatches", async () => {
const target = path.join(tmpRoot, "checked.txt");
const r = await handleFileWrite({
path: target,
contentBase64: b64("real-content"),
expectedSha256: "0".repeat(64),
});
expect(r).toMatchObject({ ok: false, code: "INTEGRITY_FAILURE" });
// The file must NOT survive a mismatch.
await expect(fs.access(target)).rejects.toMatchObject({ code: "ENOENT" });
});
it("accepts a matching expectedSha256 and keeps the file", async () => {
const target = path.join(tmpRoot, "checked.txt");
const contents = "real-content";
const sha = crypto.createHash("sha256").update(contents).digest("hex");
const r = await handleFileWrite({
path: target,
contentBase64: b64(contents),
expectedSha256: sha,
});
expect(r.ok).toBe(true);
expect(await fs.readFile(target, "utf-8")).toBe(contents);
});
it("treats expectedSha256 as case-insensitive", async () => {
const target = path.join(tmpRoot, "checked.txt");
const contents = "abc";
const sha = crypto.createHash("sha256").update(contents).digest("hex").toUpperCase();
const r = await handleFileWrite({
path: target,
contentBase64: b64(contents),
expectedSha256: sha,
});
expect(r.ok).toBe(true);
});
});
describe("handleFileWrite — size cap", () => {
it("rejects content larger than the 16MB cap", async () => {
const target = path.join(tmpRoot, "big.bin");
// 17MB of zero-bytes — base64 inflates by ~4/3 but we're checking the
// decoded buffer length so this is fine.
const big = Buffer.alloc(17 * 1024 * 1024, 0);
const r = await handleFileWrite({
path: target,
contentBase64: big.toString("base64"),
});
expect(r).toMatchObject({ ok: false, code: "FILE_TOO_LARGE" });
});
});

View File

@@ -0,0 +1,62 @@
import { describe, expect, it } from "vitest";
import { classifyFsError, err, throwFromNodePayload } from "./errors.js";
describe("err", () => {
it("returns an error envelope without canonicalPath when omitted", () => {
const e = err("INVALID_PATH", "path required");
expect(e).toEqual({ ok: false, code: "INVALID_PATH", message: "path required" });
expect("canonicalPath" in e).toBe(false);
});
it("includes canonicalPath only when provided non-empty", () => {
const withPath = err("NOT_FOUND", "missing", "/tmp/x");
expect(withPath.canonicalPath).toBe("/tmp/x");
const blankPath = err("NOT_FOUND", "missing", "");
expect("canonicalPath" in blankPath).toBe(false);
});
});
describe("classifyFsError", () => {
it("maps ENOENT to NOT_FOUND", () => {
expect(classifyFsError({ code: "ENOENT" })).toBe("NOT_FOUND");
});
it("maps EACCES and EPERM to PERMISSION_DENIED", () => {
expect(classifyFsError({ code: "EACCES" })).toBe("PERMISSION_DENIED");
expect(classifyFsError({ code: "EPERM" })).toBe("PERMISSION_DENIED");
});
it("maps EISDIR to IS_DIRECTORY", () => {
expect(classifyFsError({ code: "EISDIR" })).toBe("IS_DIRECTORY");
});
it("falls back to READ_ERROR for unknown / null / non-object input", () => {
expect(classifyFsError({ code: "EUNKNOWN" })).toBe("READ_ERROR");
expect(classifyFsError(null)).toBe("READ_ERROR");
expect(classifyFsError(undefined)).toBe("READ_ERROR");
expect(classifyFsError("nope")).toBe("READ_ERROR");
});
});
describe("throwFromNodePayload", () => {
it("preserves code and message in the thrown Error", () => {
expect(() =>
throwFromNodePayload("file.fetch", { code: "NOT_FOUND", message: "file not found" }),
).toThrow(/file\.fetch NOT_FOUND: file not found/);
});
it("appends canonicalPath when present", () => {
expect(() =>
throwFromNodePayload("file.fetch", {
code: "POLICY_DENIED",
message: "blocked",
canonicalPath: "/tmp/x",
}),
).toThrow(/canonical=\/tmp\/x/);
});
it("falls back to ERROR / generic message when fields are missing", () => {
expect(() => throwFromNodePayload("dir.list", {})).toThrow(/dir\.list ERROR: dir\.list failed/);
});
});

View File

@@ -48,8 +48,7 @@ export async function gatekeep(input: {
// ask=always: prompt even on a match.
// Or: ask=on-miss + no allow match: prompt.
const shouldAsk =
(decision.ok && decision.reason === "ask-always") ||
(!decision.ok && decision.askable === true);
(decision.ok && decision.reason === "ask-always") || (!decision.ok && decision.askable);
if (shouldAsk) {
const verb = input.promptVerb;

View File

@@ -0,0 +1,58 @@
import { describe, expect, it } from "vitest";
import {
EXTENSION_MIME,
IMAGE_MIME_INLINE_SET,
TEXT_INLINE_MAX_BYTES,
TEXT_INLINE_MIME_SET,
mimeFromExtension,
} from "./mime.js";
describe("mimeFromExtension", () => {
it("returns the mapped mime for known extensions", () => {
expect(mimeFromExtension("foo.png")).toBe("image/png");
expect(mimeFromExtension("/abs/path/bar.JPG")).toBe("image/jpeg");
expect(mimeFromExtension("doc.pdf")).toBe("application/pdf");
expect(mimeFromExtension("notes.md")).toBe("text/markdown");
});
it("falls back to application/octet-stream for unknown extensions", () => {
expect(mimeFromExtension("blob.xyz")).toBe("application/octet-stream");
expect(mimeFromExtension("Makefile")).toBe("application/octet-stream");
});
it("is case-insensitive on the extension", () => {
expect(mimeFromExtension("foo.PNG")).toBe("image/png");
expect(mimeFromExtension("foo.WeBp")).toBe("image/webp");
});
});
describe("MIME constants", () => {
it("EXTENSION_MIME includes the v1 image set", () => {
expect(EXTENSION_MIME[".png"]).toBe("image/png");
expect(EXTENSION_MIME[".jpg"]).toBe("image/jpeg");
expect(EXTENSION_MIME[".jpeg"]).toBe("image/jpeg");
expect(EXTENSION_MIME[".webp"]).toBe("image/webp");
expect(EXTENSION_MIME[".gif"]).toBe("image/gif");
});
it("IMAGE_MIME_INLINE_SET is the inline-renderable image set", () => {
expect(IMAGE_MIME_INLINE_SET.has("image/png")).toBe(true);
expect(IMAGE_MIME_INLINE_SET.has("image/jpeg")).toBe(true);
expect(IMAGE_MIME_INLINE_SET.has("image/webp")).toBe(true);
expect(IMAGE_MIME_INLINE_SET.has("image/gif")).toBe(true);
// heic/heif intentionally excluded
expect(IMAGE_MIME_INLINE_SET.has("image/heic")).toBe(false);
expect(IMAGE_MIME_INLINE_SET.has("image/heif")).toBe(false);
});
it("TEXT_INLINE_MIME_SET covers small-text inlining types", () => {
expect(TEXT_INLINE_MIME_SET.has("text/plain")).toBe(true);
expect(TEXT_INLINE_MIME_SET.has("text/markdown")).toBe(true);
expect(TEXT_INLINE_MIME_SET.has("application/json")).toBe(true);
expect(TEXT_INLINE_MIME_SET.has("text/csv")).toBe(true);
});
it("TEXT_INLINE_MAX_BYTES is the documented 8KB cap", () => {
expect(TEXT_INLINE_MAX_BYTES).toBe(8 * 1024);
});
});

View File

@@ -0,0 +1,280 @@
import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
// Mock the plugin-sdk config-runtime surface so we can drive the policy
// reader from the test without booting a gateway. mutateConfigFile is also
// mocked so persistAllowAlways tests can assert what would have been written
// without touching ~/.openclaw/openclaw.json.
const getRuntimeConfigMock = vi.fn();
const mutateConfigFileMock = vi.fn();
vi.mock("openclaw/plugin-sdk/config-runtime", () => ({
getRuntimeConfig: () => getRuntimeConfigMock(),
mutateConfigFile: (input: unknown) => mutateConfigFileMock(input),
}));
// Imported AFTER vi.mock so the mocked module is what policy.ts binds to.
const { evaluateFilePolicy, persistAllowAlways } = await import("./policy.js");
beforeEach(() => {
getRuntimeConfigMock.mockReset();
mutateConfigFileMock.mockReset();
});
afterEach(() => {
vi.restoreAllMocks();
});
function withConfig(fileTransfer: Record<string, unknown> | undefined) {
if (fileTransfer === undefined) {
getRuntimeConfigMock.mockReturnValue({});
} else {
getRuntimeConfigMock.mockReturnValue({
gateway: { nodes: { fileTransfer } },
});
}
}
describe("evaluateFilePolicy — default deny", () => {
it("returns NO_POLICY when no gateway block is present", () => {
getRuntimeConfigMock.mockReturnValue({});
const r = evaluateFilePolicy({ nodeId: "n1", kind: "read", path: "/tmp/x" });
expect(r).toMatchObject({ ok: false, code: "NO_POLICY", askable: false });
});
it("returns NO_POLICY when fileTransfer block is missing", () => {
getRuntimeConfigMock.mockReturnValue({ gateway: { nodes: {} } });
const r = evaluateFilePolicy({ nodeId: "n1", kind: "read", path: "/tmp/x" });
expect(r).toMatchObject({ ok: false, code: "NO_POLICY" });
});
it("returns NO_POLICY when no entry exists for the node and no '*' fallback", () => {
withConfig({ "other-node": { allowReadPaths: ["/tmp/**"] } });
const r = evaluateFilePolicy({ nodeId: "n1", kind: "read", path: "/tmp/x" });
expect(r).toMatchObject({ ok: false, code: "NO_POLICY" });
});
});
describe("evaluateFilePolicy — denyPaths always wins", () => {
it("denies even when allowReadPaths matches", () => {
withConfig({
n1: {
allowReadPaths: ["/tmp/**"],
denyPaths: ["**/.ssh/**"],
},
});
const r = evaluateFilePolicy({
nodeId: "n1",
kind: "read",
path: "/tmp/.ssh/id_rsa",
});
expect(r).toMatchObject({ ok: false, code: "POLICY_DENIED", askable: false });
expect(r.ok ? "" : r.reason).toMatch(/deny/);
});
it("denies even with ask=always (denyPaths is hard)", () => {
withConfig({
n1: {
ask: "always",
denyPaths: ["**/secrets/**"],
},
});
const r = evaluateFilePolicy({
nodeId: "n1",
kind: "read",
path: "/var/secrets/api.key",
});
expect(r).toMatchObject({ ok: false, code: "POLICY_DENIED", askable: false });
});
});
describe("evaluateFilePolicy — allow matching", () => {
it("allows on matched-allow with ask=off (default)", () => {
withConfig({
n1: { allowReadPaths: ["/tmp/**"] },
});
expect(evaluateFilePolicy({ nodeId: "n1", kind: "read", path: "/tmp/foo/bar.png" })).toEqual({
ok: true,
reason: "matched-allow",
maxBytes: undefined,
});
});
it("propagates per-node maxBytes on matched-allow", () => {
withConfig({
n1: { allowReadPaths: ["/tmp/**"], maxBytes: 1024 },
});
const r = evaluateFilePolicy({ nodeId: "n1", kind: "read", path: "/tmp/x" });
expect(r).toMatchObject({ ok: true, maxBytes: 1024 });
});
it("uses kind=write to consult allowWritePaths, not allowReadPaths", () => {
withConfig({
n1: { allowReadPaths: ["/tmp/**"], allowWritePaths: ["/srv/**"] },
});
expect(evaluateFilePolicy({ nodeId: "n1", kind: "write", path: "/srv/out.txt" })).toMatchObject(
{ ok: true },
);
expect(evaluateFilePolicy({ nodeId: "n1", kind: "write", path: "/tmp/out.txt" })).toMatchObject(
{ ok: false, code: "POLICY_DENIED" },
);
});
it("expands tilde in patterns relative to homedir", () => {
const home = os.homedir();
withConfig({
n1: { allowReadPaths: ["~/Screenshots/**"] },
});
expect(
evaluateFilePolicy({
nodeId: "n1",
kind: "read",
path: path.join(home, "Screenshots", "shot.png"),
}),
).toMatchObject({ ok: true });
});
});
describe("evaluateFilePolicy — ask modes", () => {
it("ask=on-miss returns askable POLICY_DENIED on miss", () => {
withConfig({
n1: { ask: "on-miss", allowReadPaths: ["/var/log/**"] },
});
const r = evaluateFilePolicy({ nodeId: "n1", kind: "read", path: "/tmp/x" });
expect(r).toMatchObject({
ok: false,
code: "POLICY_DENIED",
askable: true,
askMode: "on-miss",
});
});
it("ask=on-miss still silent-allows on a match", () => {
withConfig({
n1: { ask: "on-miss", allowReadPaths: ["/tmp/**"] },
});
const r = evaluateFilePolicy({ nodeId: "n1", kind: "read", path: "/tmp/x" });
expect(r).toMatchObject({ ok: true, reason: "matched-allow" });
});
it("ask=always always returns ask-always (prompt on every call)", () => {
withConfig({
n1: { ask: "always", allowReadPaths: ["/tmp/**"] },
});
const r = evaluateFilePolicy({ nodeId: "n1", kind: "read", path: "/tmp/x" });
expect(r).toMatchObject({ ok: true, reason: "ask-always", askMode: "always" });
});
it("ask=off returns non-askable POLICY_DENIED on miss", () => {
withConfig({
n1: { ask: "off", allowReadPaths: ["/var/log/**"] },
});
const r = evaluateFilePolicy({ nodeId: "n1", kind: "read", path: "/tmp/x" });
expect(r).toMatchObject({ ok: false, code: "POLICY_DENIED", askable: false });
});
it("invalid ask values normalize to off", () => {
withConfig({
n1: { ask: "sometimes", allowReadPaths: ["/var/log/**"] },
});
const r = evaluateFilePolicy({ nodeId: "n1", kind: "read", path: "/tmp/x" });
expect(r).toMatchObject({ ok: false, askable: false });
});
});
describe("evaluateFilePolicy — node-id resolution", () => {
it("resolves by displayName when nodeId has no entry", () => {
withConfig({
"Lobster MacBook": { allowReadPaths: ["/tmp/**"] },
});
expect(
evaluateFilePolicy({
nodeId: "node-abc-123",
nodeDisplayName: "Lobster MacBook",
kind: "read",
path: "/tmp/x",
}),
).toMatchObject({ ok: true });
});
it("falls back to '*' wildcard when neither id nor displayName matches", () => {
withConfig({
"*": { allowReadPaths: ["/tmp/**"] },
});
expect(
evaluateFilePolicy({
nodeId: "n1",
nodeDisplayName: "anything",
kind: "read",
path: "/tmp/x",
}),
).toMatchObject({ ok: true });
});
});
describe("persistAllowAlways", () => {
it("appends path to allowReadPaths under the existing matching key", async () => {
let captured: Record<string, unknown> | null = null;
mutateConfigFileMock.mockImplementation(
async ({ mutate }: { mutate: (draft: Record<string, unknown>) => void }) => {
const draft: Record<string, unknown> = {
gateway: { nodes: { fileTransfer: { n1: { allowReadPaths: ["/tmp/**"] } } } },
};
mutate(draft);
captured = draft;
},
);
await persistAllowAlways({ nodeId: "n1", kind: "read", path: "/srv/added.png" });
expect(mutateConfigFileMock).toHaveBeenCalledOnce();
// Drill back into the captured draft to assert the added path.
const root = captured as unknown as {
gateway: { nodes: { fileTransfer: Record<string, { allowReadPaths: string[] }> } };
};
expect(root.gateway.nodes.fileTransfer.n1.allowReadPaths).toContain("/srv/added.png");
});
it("creates a new node entry keyed by displayName when no entry exists", async () => {
let captured: Record<string, unknown> | null = null;
mutateConfigFileMock.mockImplementation(
async ({ mutate }: { mutate: (draft: Record<string, unknown>) => void }) => {
const draft: Record<string, unknown> = {};
mutate(draft);
captured = draft;
},
);
await persistAllowAlways({
nodeId: "n1",
nodeDisplayName: "Lobster",
kind: "write",
path: "/srv/out.txt",
});
const root = captured as unknown as {
gateway: { nodes: { fileTransfer: Record<string, { allowWritePaths: string[] }> } };
};
expect(root.gateway.nodes.fileTransfer["Lobster"].allowWritePaths).toContain("/srv/out.txt");
});
it("dedupes when path already present", async () => {
let captured: Record<string, unknown> | null = null;
mutateConfigFileMock.mockImplementation(
async ({ mutate }: { mutate: (draft: Record<string, unknown>) => void }) => {
const draft: Record<string, unknown> = {
gateway: { nodes: { fileTransfer: { n1: { allowReadPaths: ["/tmp/x"] } } } },
};
mutate(draft);
captured = draft;
},
);
await persistAllowAlways({ nodeId: "n1", kind: "read", path: "/tmp/x" });
const root = captured as unknown as {
gateway: { nodes: { fileTransfer: Record<string, { allowReadPaths: string[] }> } };
};
const list = root.gateway.nodes.fileTransfer.n1.allowReadPaths;
expect(list.filter((p) => p === "/tmp/x").length).toBe(1);
});
});

View File

@@ -259,7 +259,7 @@ export async function persistAllowAlways(input: {
fileTransfer[key] = {};
}
const entry = fileTransfer[key];
const list = Array.isArray(entry[field]) ? (entry[field] as string[]) : [];
const list = Array.isArray(entry[field]) ? entry[field] : [];
if (!list.includes(input.path)) {
list.push(input.path);
}

View File

@@ -70,11 +70,10 @@ export function createFileWriteTool(): AnyAgentTool {
"Write file bytes to a paired node by absolute path. Atomic write (temp + rename). Refuses to overwrite by default — pass overwrite=true to replace. Refuses to write through symlink targets (the node will reject if the path resolves to a symlink). Pair with file_fetch to round-trip a file from one node to another: file_fetch returns base64 in the image content block (.data) and as inline content for small text — pass that base64 directly as contentBase64 here. DO NOT use exec/cp/system.run for file copies; this tool IS the same-machine copy. Requires operator opt-in: gateway.nodes.allowCommands must include 'file.write' AND gateway.nodes.fileTransfer.<node>.allowWritePaths must match the destination path. Without policy configured, every call is denied.",
parameters: FILE_WRITE_SCHEMA,
async execute(_toolCallId, params) {
const raw = (
const raw: Record<string, unknown> =
params && typeof params === "object" && !Array.isArray(params)
? (params as Record<string, unknown>)
: {}
) as Record<string, unknown>;
: {};
const nodeQuery = readTrimmedString(raw, "node");
const filePath = readTrimmedString(raw, "path");

6
pnpm-lock.yaml generated
View File

@@ -592,6 +592,12 @@ importers:
specifier: workspace:*
version: link:../..
extensions/file-transfer:
devDependencies:
'@openclaw/plugin-sdk':
specifier: workspace:*
version: link:../../packages/plugin-sdk
extensions/firecrawl:
dependencies:
typebox: