mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-03 18:10:24 +00:00
Merge branch 'main' into dashboard-v2-views-refactor
This commit is contained in:
143
src/agents/sandbox/fs-bridge-mutation-helper.test.ts
Normal file
143
src/agents/sandbox/fs-bridge-mutation-helper.test.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { SANDBOX_PINNED_MUTATION_PYTHON } from "./fs-bridge-mutation-helper.js";
|
||||
|
||||
async function withTempRoot<T>(prefix: string, run: (root: string) => Promise<T>): Promise<T> {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), prefix));
|
||||
try {
|
||||
return await run(root);
|
||||
} finally {
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
function runMutation(args: string[], input?: string) {
|
||||
return spawnSync("python3", ["-c", SANDBOX_PINNED_MUTATION_PYTHON, ...args], {
|
||||
input,
|
||||
encoding: "utf8",
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
});
|
||||
}
|
||||
|
||||
describe("sandbox pinned mutation helper", () => {
|
||||
it("writes through a pinned directory fd", async () => {
|
||||
await withTempRoot("openclaw-mutation-helper-", async (root) => {
|
||||
const workspace = path.join(root, "workspace");
|
||||
await fs.mkdir(workspace, { recursive: true });
|
||||
|
||||
const result = runMutation(["write", workspace, "nested/deeper", "note.txt", "1"], "hello");
|
||||
|
||||
expect(result.status).toBe(0);
|
||||
await expect(
|
||||
fs.readFile(path.join(workspace, "nested", "deeper", "note.txt"), "utf8"),
|
||||
).resolves.toBe("hello");
|
||||
});
|
||||
});
|
||||
|
||||
it.runIf(process.platform !== "win32")(
|
||||
"rejects symlink-parent writes instead of materializing a temp file outside the mount",
|
||||
async () => {
|
||||
await withTempRoot("openclaw-mutation-helper-", async (root) => {
|
||||
const workspace = path.join(root, "workspace");
|
||||
const outside = path.join(root, "outside");
|
||||
await fs.mkdir(workspace, { recursive: true });
|
||||
await fs.mkdir(outside, { recursive: true });
|
||||
await fs.symlink(outside, path.join(workspace, "alias"));
|
||||
|
||||
const result = runMutation(["write", workspace, "alias", "escape.txt", "0"], "owned");
|
||||
|
||||
expect(result.status).not.toBe(0);
|
||||
await expect(fs.readFile(path.join(outside, "escape.txt"), "utf8")).rejects.toThrow();
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
it.runIf(process.platform !== "win32")("rejects symlink segments during mkdirp", async () => {
|
||||
await withTempRoot("openclaw-mutation-helper-", async (root) => {
|
||||
const workspace = path.join(root, "workspace");
|
||||
const outside = path.join(root, "outside");
|
||||
await fs.mkdir(workspace, { recursive: true });
|
||||
await fs.mkdir(outside, { recursive: true });
|
||||
await fs.symlink(outside, path.join(workspace, "alias"));
|
||||
|
||||
const result = runMutation(["mkdirp", workspace, "alias/nested"]);
|
||||
|
||||
expect(result.status).not.toBe(0);
|
||||
await expect(fs.readFile(path.join(outside, "nested"), "utf8")).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
it.runIf(process.platform !== "win32")("remove unlinks the symlink itself", async () => {
|
||||
await withTempRoot("openclaw-mutation-helper-", async (root) => {
|
||||
const workspace = path.join(root, "workspace");
|
||||
const outside = path.join(root, "outside");
|
||||
await fs.mkdir(workspace, { recursive: true });
|
||||
await fs.mkdir(outside, { recursive: true });
|
||||
await fs.writeFile(path.join(outside, "secret.txt"), "classified", "utf8");
|
||||
await fs.symlink(path.join(outside, "secret.txt"), path.join(workspace, "link.txt"));
|
||||
|
||||
const result = runMutation(["remove", workspace, "", "link.txt", "0", "0"]);
|
||||
|
||||
expect(result.status).toBe(0);
|
||||
await expect(fs.readlink(path.join(workspace, "link.txt"))).rejects.toThrow();
|
||||
await expect(fs.readFile(path.join(outside, "secret.txt"), "utf8")).resolves.toBe(
|
||||
"classified",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it.runIf(process.platform !== "win32")(
|
||||
"rejects symlink destination parents during rename",
|
||||
async () => {
|
||||
await withTempRoot("openclaw-mutation-helper-", async (root) => {
|
||||
const workspace = path.join(root, "workspace");
|
||||
const outside = path.join(root, "outside");
|
||||
await fs.mkdir(workspace, { recursive: true });
|
||||
await fs.mkdir(outside, { recursive: true });
|
||||
await fs.writeFile(path.join(workspace, "from.txt"), "payload", "utf8");
|
||||
await fs.symlink(outside, path.join(workspace, "alias"));
|
||||
|
||||
const result = runMutation([
|
||||
"rename",
|
||||
workspace,
|
||||
"",
|
||||
"from.txt",
|
||||
workspace,
|
||||
"alias",
|
||||
"escape.txt",
|
||||
"1",
|
||||
]);
|
||||
|
||||
expect(result.status).not.toBe(0);
|
||||
await expect(fs.readFile(path.join(workspace, "from.txt"), "utf8")).resolves.toBe(
|
||||
"payload",
|
||||
);
|
||||
await expect(fs.readFile(path.join(outside, "escape.txt"), "utf8")).rejects.toThrow();
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
it.runIf(process.platform !== "win32")(
|
||||
"copies directories across different mount roots during rename fallback",
|
||||
async () => {
|
||||
await withTempRoot("openclaw-mutation-helper-", async (root) => {
|
||||
const sourceRoot = path.join(root, "source");
|
||||
const destRoot = path.join(root, "dest");
|
||||
await fs.mkdir(path.join(sourceRoot, "dir", "nested"), { recursive: true });
|
||||
await fs.mkdir(destRoot, { recursive: true });
|
||||
await fs.writeFile(path.join(sourceRoot, "dir", "nested", "file.txt"), "payload", "utf8");
|
||||
|
||||
const result = runMutation(["rename", sourceRoot, "", "dir", destRoot, "", "moved", "1"]);
|
||||
|
||||
expect(result.status).toBe(0);
|
||||
await expect(
|
||||
fs.readFile(path.join(destRoot, "moved", "nested", "file.txt"), "utf8"),
|
||||
).resolves.toBe("payload");
|
||||
await expect(fs.stat(path.join(sourceRoot, "dir"))).rejects.toThrow();
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
347
src/agents/sandbox/fs-bridge-mutation-helper.ts
Normal file
347
src/agents/sandbox/fs-bridge-mutation-helper.ts
Normal file
@@ -0,0 +1,347 @@
|
||||
import { PATH_ALIAS_POLICIES } from "../../infra/path-alias-guards.js";
|
||||
import type {
|
||||
PathSafetyCheck,
|
||||
PinnedSandboxDirectoryEntry,
|
||||
PinnedSandboxEntry,
|
||||
} from "./fs-bridge-path-safety.js";
|
||||
import type { SandboxFsCommandPlan } from "./fs-bridge-shell-command-plans.js";
|
||||
|
||||
export const SANDBOX_PINNED_MUTATION_PYTHON = [
|
||||
"import errno",
|
||||
"import os",
|
||||
"import secrets",
|
||||
"import stat",
|
||||
"import sys",
|
||||
"",
|
||||
"operation = sys.argv[1]",
|
||||
"",
|
||||
"DIR_FLAGS = os.O_RDONLY",
|
||||
"if hasattr(os, 'O_DIRECTORY'):",
|
||||
" DIR_FLAGS |= os.O_DIRECTORY",
|
||||
"if hasattr(os, 'O_NOFOLLOW'):",
|
||||
" DIR_FLAGS |= os.O_NOFOLLOW",
|
||||
"",
|
||||
"READ_FLAGS = os.O_RDONLY",
|
||||
"if hasattr(os, 'O_NOFOLLOW'):",
|
||||
" READ_FLAGS |= os.O_NOFOLLOW",
|
||||
"",
|
||||
"WRITE_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL",
|
||||
"if hasattr(os, 'O_NOFOLLOW'):",
|
||||
" WRITE_FLAGS |= os.O_NOFOLLOW",
|
||||
"",
|
||||
"def split_relative(path_value):",
|
||||
" segments = []",
|
||||
" for segment in path_value.split('/'):",
|
||||
" if not segment or segment == '.':",
|
||||
" continue",
|
||||
" if segment == '..':",
|
||||
" raise OSError(errno.EPERM, 'path traversal is not allowed', segment)",
|
||||
" segments.append(segment)",
|
||||
" return segments",
|
||||
"",
|
||||
"def open_dir(path_value, dir_fd=None):",
|
||||
" return os.open(path_value, DIR_FLAGS, dir_fd=dir_fd)",
|
||||
"",
|
||||
"def walk_dir(root_fd, rel_path, mkdir_enabled):",
|
||||
" current_fd = os.dup(root_fd)",
|
||||
" try:",
|
||||
" for segment in split_relative(rel_path):",
|
||||
" try:",
|
||||
" next_fd = open_dir(segment, dir_fd=current_fd)",
|
||||
" except FileNotFoundError:",
|
||||
" if not mkdir_enabled:",
|
||||
" raise",
|
||||
" os.mkdir(segment, 0o777, dir_fd=current_fd)",
|
||||
" next_fd = open_dir(segment, dir_fd=current_fd)",
|
||||
" os.close(current_fd)",
|
||||
" current_fd = next_fd",
|
||||
" return current_fd",
|
||||
" except Exception:",
|
||||
" os.close(current_fd)",
|
||||
" raise",
|
||||
"",
|
||||
"def create_temp_file(parent_fd, basename):",
|
||||
" prefix = '.openclaw-write-' + basename + '.'",
|
||||
" for _ in range(128):",
|
||||
" candidate = prefix + secrets.token_hex(6)",
|
||||
" try:",
|
||||
" fd = os.open(candidate, WRITE_FLAGS, 0o600, dir_fd=parent_fd)",
|
||||
" return candidate, fd",
|
||||
" except FileExistsError:",
|
||||
" continue",
|
||||
" raise RuntimeError('failed to allocate sandbox temp file')",
|
||||
"",
|
||||
"def create_temp_dir(parent_fd, basename, mode):",
|
||||
" prefix = '.openclaw-move-' + basename + '.'",
|
||||
" for _ in range(128):",
|
||||
" candidate = prefix + secrets.token_hex(6)",
|
||||
" try:",
|
||||
" os.mkdir(candidate, mode, dir_fd=parent_fd)",
|
||||
" return candidate",
|
||||
" except FileExistsError:",
|
||||
" continue",
|
||||
" raise RuntimeError('failed to allocate sandbox temp directory')",
|
||||
"",
|
||||
"def write_atomic(parent_fd, basename, stdin_buffer):",
|
||||
" temp_fd = None",
|
||||
" temp_name = None",
|
||||
" try:",
|
||||
" temp_name, temp_fd = create_temp_file(parent_fd, basename)",
|
||||
" while True:",
|
||||
" chunk = stdin_buffer.read(65536)",
|
||||
" if not chunk:",
|
||||
" break",
|
||||
" os.write(temp_fd, chunk)",
|
||||
" os.fsync(temp_fd)",
|
||||
" os.close(temp_fd)",
|
||||
" temp_fd = None",
|
||||
" os.replace(temp_name, basename, src_dir_fd=parent_fd, dst_dir_fd=parent_fd)",
|
||||
" temp_name = None",
|
||||
" os.fsync(parent_fd)",
|
||||
" finally:",
|
||||
" if temp_fd is not None:",
|
||||
" os.close(temp_fd)",
|
||||
" if temp_name is not None:",
|
||||
" try:",
|
||||
" os.unlink(temp_name, dir_fd=parent_fd)",
|
||||
" except FileNotFoundError:",
|
||||
" pass",
|
||||
"",
|
||||
"def remove_tree(parent_fd, basename):",
|
||||
" entry_stat = os.lstat(basename, dir_fd=parent_fd)",
|
||||
" if not stat.S_ISDIR(entry_stat.st_mode) or stat.S_ISLNK(entry_stat.st_mode):",
|
||||
" os.unlink(basename, dir_fd=parent_fd)",
|
||||
" return",
|
||||
" dir_fd = open_dir(basename, dir_fd=parent_fd)",
|
||||
" try:",
|
||||
" for child in os.listdir(dir_fd):",
|
||||
" remove_tree(dir_fd, child)",
|
||||
" finally:",
|
||||
" os.close(dir_fd)",
|
||||
" os.rmdir(basename, dir_fd=parent_fd)",
|
||||
"",
|
||||
"def move_entry(src_parent_fd, src_basename, dst_parent_fd, dst_basename):",
|
||||
" try:",
|
||||
" os.rename(src_basename, dst_basename, src_dir_fd=src_parent_fd, dst_dir_fd=dst_parent_fd)",
|
||||
" os.fsync(dst_parent_fd)",
|
||||
" os.fsync(src_parent_fd)",
|
||||
" return",
|
||||
" except OSError as err:",
|
||||
" if err.errno != errno.EXDEV:",
|
||||
" raise",
|
||||
" src_stat = os.lstat(src_basename, dir_fd=src_parent_fd)",
|
||||
" if stat.S_ISDIR(src_stat.st_mode) and not stat.S_ISLNK(src_stat.st_mode):",
|
||||
" temp_dir_name = create_temp_dir(dst_parent_fd, dst_basename, stat.S_IMODE(src_stat.st_mode) or 0o755)",
|
||||
" temp_dir_fd = open_dir(temp_dir_name, dir_fd=dst_parent_fd)",
|
||||
" src_dir_fd = open_dir(src_basename, dir_fd=src_parent_fd)",
|
||||
" try:",
|
||||
" for child in os.listdir(src_dir_fd):",
|
||||
" move_entry(src_dir_fd, child, temp_dir_fd, child)",
|
||||
" finally:",
|
||||
" os.close(src_dir_fd)",
|
||||
" os.close(temp_dir_fd)",
|
||||
" os.rename(temp_dir_name, dst_basename, src_dir_fd=dst_parent_fd, dst_dir_fd=dst_parent_fd)",
|
||||
" os.rmdir(src_basename, dir_fd=src_parent_fd)",
|
||||
" os.fsync(dst_parent_fd)",
|
||||
" os.fsync(src_parent_fd)",
|
||||
" return",
|
||||
" if stat.S_ISLNK(src_stat.st_mode):",
|
||||
" link_target = os.readlink(src_basename, dir_fd=src_parent_fd)",
|
||||
" try:",
|
||||
" os.unlink(dst_basename, dir_fd=dst_parent_fd)",
|
||||
" except FileNotFoundError:",
|
||||
" pass",
|
||||
" os.symlink(link_target, dst_basename, dir_fd=dst_parent_fd)",
|
||||
" os.unlink(src_basename, dir_fd=src_parent_fd)",
|
||||
" os.fsync(dst_parent_fd)",
|
||||
" os.fsync(src_parent_fd)",
|
||||
" return",
|
||||
" src_fd = os.open(src_basename, READ_FLAGS, dir_fd=src_parent_fd)",
|
||||
" temp_fd = None",
|
||||
" temp_name = None",
|
||||
" try:",
|
||||
" temp_name, temp_fd = create_temp_file(dst_parent_fd, dst_basename)",
|
||||
" while True:",
|
||||
" chunk = os.read(src_fd, 65536)",
|
||||
" if not chunk:",
|
||||
" break",
|
||||
" os.write(temp_fd, chunk)",
|
||||
" try:",
|
||||
" os.fchmod(temp_fd, stat.S_IMODE(src_stat.st_mode))",
|
||||
" except AttributeError:",
|
||||
" pass",
|
||||
" os.fsync(temp_fd)",
|
||||
" os.close(temp_fd)",
|
||||
" temp_fd = None",
|
||||
" os.replace(temp_name, dst_basename, src_dir_fd=dst_parent_fd, dst_dir_fd=dst_parent_fd)",
|
||||
" temp_name = None",
|
||||
" os.unlink(src_basename, dir_fd=src_parent_fd)",
|
||||
" os.fsync(dst_parent_fd)",
|
||||
" os.fsync(src_parent_fd)",
|
||||
" finally:",
|
||||
" if temp_fd is not None:",
|
||||
" os.close(temp_fd)",
|
||||
" if temp_name is not None:",
|
||||
" try:",
|
||||
" os.unlink(temp_name, dir_fd=dst_parent_fd)",
|
||||
" except FileNotFoundError:",
|
||||
" pass",
|
||||
" os.close(src_fd)",
|
||||
"",
|
||||
"if operation == 'write':",
|
||||
" root_fd = open_dir(sys.argv[2])",
|
||||
" parent_fd = None",
|
||||
" try:",
|
||||
" parent_fd = walk_dir(root_fd, sys.argv[3], sys.argv[5] == '1')",
|
||||
" write_atomic(parent_fd, sys.argv[4], sys.stdin.buffer)",
|
||||
" finally:",
|
||||
" if parent_fd is not None:",
|
||||
" os.close(parent_fd)",
|
||||
" os.close(root_fd)",
|
||||
"elif operation == 'mkdirp':",
|
||||
" root_fd = open_dir(sys.argv[2])",
|
||||
" target_fd = None",
|
||||
" try:",
|
||||
" target_fd = walk_dir(root_fd, sys.argv[3], True)",
|
||||
" os.fsync(target_fd)",
|
||||
" finally:",
|
||||
" if target_fd is not None:",
|
||||
" os.close(target_fd)",
|
||||
" os.close(root_fd)",
|
||||
"elif operation == 'remove':",
|
||||
" root_fd = open_dir(sys.argv[2])",
|
||||
" parent_fd = None",
|
||||
" try:",
|
||||
" parent_fd = walk_dir(root_fd, sys.argv[3], False)",
|
||||
" try:",
|
||||
" if sys.argv[5] == '1':",
|
||||
" remove_tree(parent_fd, sys.argv[4])",
|
||||
" else:",
|
||||
" entry_stat = os.lstat(sys.argv[4], dir_fd=parent_fd)",
|
||||
" if stat.S_ISDIR(entry_stat.st_mode) and not stat.S_ISLNK(entry_stat.st_mode):",
|
||||
" os.rmdir(sys.argv[4], dir_fd=parent_fd)",
|
||||
" else:",
|
||||
" os.unlink(sys.argv[4], dir_fd=parent_fd)",
|
||||
" os.fsync(parent_fd)",
|
||||
" except FileNotFoundError:",
|
||||
" if sys.argv[6] != '1':",
|
||||
" raise",
|
||||
" finally:",
|
||||
" if parent_fd is not None:",
|
||||
" os.close(parent_fd)",
|
||||
" os.close(root_fd)",
|
||||
"elif operation == 'rename':",
|
||||
" src_root_fd = open_dir(sys.argv[2])",
|
||||
" dst_root_fd = open_dir(sys.argv[5])",
|
||||
" src_parent_fd = None",
|
||||
" dst_parent_fd = None",
|
||||
" try:",
|
||||
" src_parent_fd = walk_dir(src_root_fd, sys.argv[3], False)",
|
||||
" dst_parent_fd = walk_dir(dst_root_fd, sys.argv[6], sys.argv[8] == '1')",
|
||||
" move_entry(src_parent_fd, sys.argv[4], dst_parent_fd, sys.argv[7])",
|
||||
" finally:",
|
||||
" if src_parent_fd is not None:",
|
||||
" os.close(src_parent_fd)",
|
||||
" if dst_parent_fd is not None:",
|
||||
" os.close(dst_parent_fd)",
|
||||
" os.close(src_root_fd)",
|
||||
" os.close(dst_root_fd)",
|
||||
"else:",
|
||||
" raise RuntimeError('unknown sandbox mutation operation: ' + operation)",
|
||||
].join("\n");
|
||||
|
||||
function buildPinnedMutationPlan(params: {
|
||||
args: string[];
|
||||
checks: PathSafetyCheck[];
|
||||
}): SandboxFsCommandPlan {
|
||||
return {
|
||||
checks: params.checks,
|
||||
recheckBeforeCommand: true,
|
||||
script: ["set -eu", "python3 - \"$@\" <<'PY'", SANDBOX_PINNED_MUTATION_PYTHON, "PY"].join("\n"),
|
||||
args: params.args,
|
||||
};
|
||||
}
|
||||
|
||||
export function buildPinnedWritePlan(params: {
|
||||
check: PathSafetyCheck;
|
||||
pinned: PinnedSandboxEntry;
|
||||
mkdir: boolean;
|
||||
}): SandboxFsCommandPlan {
|
||||
return buildPinnedMutationPlan({
|
||||
checks: [params.check],
|
||||
args: [
|
||||
"write",
|
||||
params.pinned.mountRootPath,
|
||||
params.pinned.relativeParentPath,
|
||||
params.pinned.basename,
|
||||
params.mkdir ? "1" : "0",
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
export function buildPinnedMkdirpPlan(params: {
|
||||
check: PathSafetyCheck;
|
||||
pinned: PinnedSandboxDirectoryEntry;
|
||||
}): SandboxFsCommandPlan {
|
||||
return buildPinnedMutationPlan({
|
||||
checks: [params.check],
|
||||
args: ["mkdirp", params.pinned.mountRootPath, params.pinned.relativePath],
|
||||
});
|
||||
}
|
||||
|
||||
export function buildPinnedRemovePlan(params: {
|
||||
check: PathSafetyCheck;
|
||||
pinned: PinnedSandboxEntry;
|
||||
recursive?: boolean;
|
||||
force?: boolean;
|
||||
}): SandboxFsCommandPlan {
|
||||
return buildPinnedMutationPlan({
|
||||
checks: [
|
||||
{
|
||||
target: params.check.target,
|
||||
options: {
|
||||
...params.check.options,
|
||||
aliasPolicy: PATH_ALIAS_POLICIES.unlinkTarget,
|
||||
},
|
||||
},
|
||||
],
|
||||
args: [
|
||||
"remove",
|
||||
params.pinned.mountRootPath,
|
||||
params.pinned.relativeParentPath,
|
||||
params.pinned.basename,
|
||||
params.recursive ? "1" : "0",
|
||||
params.force === false ? "0" : "1",
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
export function buildPinnedRenamePlan(params: {
|
||||
fromCheck: PathSafetyCheck;
|
||||
toCheck: PathSafetyCheck;
|
||||
from: PinnedSandboxEntry;
|
||||
to: PinnedSandboxEntry;
|
||||
}): SandboxFsCommandPlan {
|
||||
return buildPinnedMutationPlan({
|
||||
checks: [
|
||||
{
|
||||
target: params.fromCheck.target,
|
||||
options: {
|
||||
...params.fromCheck.options,
|
||||
aliasPolicy: PATH_ALIAS_POLICIES.unlinkTarget,
|
||||
},
|
||||
},
|
||||
params.toCheck,
|
||||
],
|
||||
args: [
|
||||
"rename",
|
||||
params.from.mountRootPath,
|
||||
params.from.relativeParentPath,
|
||||
params.from.basename,
|
||||
params.to.mountRootPath,
|
||||
params.to.relativeParentPath,
|
||||
params.to.basename,
|
||||
"1",
|
||||
],
|
||||
});
|
||||
}
|
||||
190
src/agents/sandbox/fs-bridge-mutation-python-source.ts
Normal file
190
src/agents/sandbox/fs-bridge-mutation-python-source.ts
Normal file
@@ -0,0 +1,190 @@
|
||||
// language=python
|
||||
export const SANDBOX_PINNED_FS_MUTATION_PYTHON = String.raw`import os
|
||||
import secrets
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
operation = sys.argv[1]
|
||||
|
||||
DIR_FLAGS = os.O_RDONLY
|
||||
if hasattr(os, "O_DIRECTORY"):
|
||||
DIR_FLAGS |= os.O_DIRECTORY
|
||||
if hasattr(os, "O_NOFOLLOW"):
|
||||
DIR_FLAGS |= os.O_NOFOLLOW
|
||||
|
||||
WRITE_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL
|
||||
if hasattr(os, "O_NOFOLLOW"):
|
||||
WRITE_FLAGS |= os.O_NOFOLLOW
|
||||
|
||||
|
||||
def open_dir(path, dir_fd=None):
|
||||
return os.open(path, DIR_FLAGS, dir_fd=dir_fd)
|
||||
|
||||
|
||||
def walk_parent(root_fd, rel_parent, mkdir_enabled):
|
||||
current_fd = os.dup(root_fd)
|
||||
try:
|
||||
segments = [segment for segment in rel_parent.split("/") if segment and segment != "."]
|
||||
for segment in segments:
|
||||
if segment == "..":
|
||||
raise OSError("path traversal is not allowed")
|
||||
try:
|
||||
next_fd = open_dir(segment, dir_fd=current_fd)
|
||||
except FileNotFoundError:
|
||||
if not mkdir_enabled:
|
||||
raise
|
||||
os.mkdir(segment, 0o777, dir_fd=current_fd)
|
||||
next_fd = open_dir(segment, dir_fd=current_fd)
|
||||
os.close(current_fd)
|
||||
current_fd = next_fd
|
||||
return current_fd
|
||||
except Exception:
|
||||
os.close(current_fd)
|
||||
raise
|
||||
|
||||
|
||||
def create_temp_file(parent_fd, basename):
|
||||
prefix = ".openclaw-write-" + basename + "."
|
||||
for _ in range(128):
|
||||
candidate = prefix + secrets.token_hex(6)
|
||||
try:
|
||||
fd = os.open(candidate, WRITE_FLAGS, 0o600, dir_fd=parent_fd)
|
||||
return candidate, fd
|
||||
except FileExistsError:
|
||||
continue
|
||||
raise RuntimeError("failed to allocate sandbox temp file")
|
||||
|
||||
|
||||
def fd_path(fd, basename=None):
|
||||
base = f"/proc/self/fd/{fd}"
|
||||
if basename is None:
|
||||
return base
|
||||
return f"{base}/{basename}"
|
||||
|
||||
|
||||
def run_command(argv, pass_fds):
|
||||
subprocess.run(argv, check=True, pass_fds=tuple(pass_fds))
|
||||
|
||||
|
||||
def write_stdin_to_fd(fd):
|
||||
while True:
|
||||
chunk = sys.stdin.buffer.read(65536)
|
||||
if not chunk:
|
||||
break
|
||||
os.write(fd, chunk)
|
||||
|
||||
|
||||
def run_write(args):
|
||||
mount_root, relative_parent, basename, mkdir_enabled_raw = args
|
||||
mkdir_enabled = mkdir_enabled_raw == "1"
|
||||
root_fd = open_dir(mount_root)
|
||||
parent_fd = None
|
||||
temp_fd = None
|
||||
temp_name = None
|
||||
try:
|
||||
parent_fd = walk_parent(root_fd, relative_parent, mkdir_enabled)
|
||||
temp_name, temp_fd = create_temp_file(parent_fd, basename)
|
||||
write_stdin_to_fd(temp_fd)
|
||||
os.fsync(temp_fd)
|
||||
os.close(temp_fd)
|
||||
temp_fd = None
|
||||
os.replace(temp_name, basename, src_dir_fd=parent_fd, dst_dir_fd=parent_fd)
|
||||
os.fsync(parent_fd)
|
||||
except Exception:
|
||||
if temp_fd is not None:
|
||||
os.close(temp_fd)
|
||||
temp_fd = None
|
||||
if temp_name is not None and parent_fd is not None:
|
||||
try:
|
||||
os.unlink(temp_name, dir_fd=parent_fd)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
raise
|
||||
finally:
|
||||
if parent_fd is not None:
|
||||
os.close(parent_fd)
|
||||
os.close(root_fd)
|
||||
|
||||
|
||||
def run_mkdirp(args):
|
||||
mount_root, relative_parent, basename = args
|
||||
root_fd = open_dir(mount_root)
|
||||
parent_fd = None
|
||||
try:
|
||||
parent_fd = walk_parent(root_fd, relative_parent, True)
|
||||
run_command(["mkdir", "-p", "--", fd_path(parent_fd, basename)], [parent_fd])
|
||||
os.fsync(parent_fd)
|
||||
finally:
|
||||
if parent_fd is not None:
|
||||
os.close(parent_fd)
|
||||
os.close(root_fd)
|
||||
|
||||
|
||||
def run_remove(args):
|
||||
mount_root, relative_parent, basename, recursive_raw, force_raw = args
|
||||
root_fd = open_dir(mount_root)
|
||||
parent_fd = None
|
||||
try:
|
||||
parent_fd = walk_parent(root_fd, relative_parent, False)
|
||||
argv = ["rm"]
|
||||
if force_raw == "1":
|
||||
argv.append("-f")
|
||||
if recursive_raw == "1":
|
||||
argv.append("-r")
|
||||
argv.extend(["--", fd_path(parent_fd, basename)])
|
||||
run_command(argv, [parent_fd])
|
||||
os.fsync(parent_fd)
|
||||
finally:
|
||||
if parent_fd is not None:
|
||||
os.close(parent_fd)
|
||||
os.close(root_fd)
|
||||
|
||||
|
||||
def run_rename(args):
|
||||
(
|
||||
from_mount_root,
|
||||
from_relative_parent,
|
||||
from_basename,
|
||||
to_mount_root,
|
||||
to_relative_parent,
|
||||
to_basename,
|
||||
) = args
|
||||
from_root_fd = open_dir(from_mount_root)
|
||||
to_root_fd = open_dir(to_mount_root)
|
||||
from_parent_fd = None
|
||||
to_parent_fd = None
|
||||
try:
|
||||
from_parent_fd = walk_parent(from_root_fd, from_relative_parent, False)
|
||||
to_parent_fd = walk_parent(to_root_fd, to_relative_parent, True)
|
||||
run_command(
|
||||
[
|
||||
"mv",
|
||||
"--",
|
||||
fd_path(from_parent_fd, from_basename),
|
||||
fd_path(to_parent_fd, to_basename),
|
||||
],
|
||||
[from_parent_fd, to_parent_fd],
|
||||
)
|
||||
os.fsync(from_parent_fd)
|
||||
if to_parent_fd != from_parent_fd:
|
||||
os.fsync(to_parent_fd)
|
||||
finally:
|
||||
if from_parent_fd is not None:
|
||||
os.close(from_parent_fd)
|
||||
if to_parent_fd is not None:
|
||||
os.close(to_parent_fd)
|
||||
os.close(from_root_fd)
|
||||
os.close(to_root_fd)
|
||||
|
||||
|
||||
OPERATIONS = {
|
||||
"write": run_write,
|
||||
"mkdirp": run_mkdirp,
|
||||
"remove": run_remove,
|
||||
"rename": run_rename,
|
||||
}
|
||||
|
||||
if operation not in OPERATIONS:
|
||||
raise RuntimeError(f"unknown sandbox fs mutation: {operation}")
|
||||
|
||||
OPERATIONS[operation](sys.argv[2:])`;
|
||||
@@ -18,17 +18,17 @@ export type PathSafetyCheck = {
|
||||
options: PathSafetyOptions;
|
||||
};
|
||||
|
||||
export type AnchoredSandboxEntry = {
|
||||
canonicalParentPath: string;
|
||||
basename: string;
|
||||
};
|
||||
|
||||
export type PinnedSandboxWriteEntry = {
|
||||
export type PinnedSandboxEntry = {
|
||||
mountRootPath: string;
|
||||
relativeParentPath: string;
|
||||
basename: string;
|
||||
};
|
||||
|
||||
export type PinnedSandboxDirectoryEntry = {
|
||||
mountRootPath: string;
|
||||
relativePath: string;
|
||||
};
|
||||
|
||||
type RunCommand = (
|
||||
script: string,
|
||||
options?: {
|
||||
@@ -134,23 +134,7 @@ export class SandboxFsPathGuard {
|
||||
return guarded;
|
||||
}
|
||||
|
||||
async resolveAnchoredSandboxEntry(target: SandboxResolvedFsPath): Promise<AnchoredSandboxEntry> {
|
||||
const basename = path.posix.basename(target.containerPath);
|
||||
if (!basename || basename === "." || basename === "/") {
|
||||
throw new Error(`Invalid sandbox entry target: ${target.containerPath}`);
|
||||
}
|
||||
const parentPath = normalizeContainerPath(path.posix.dirname(target.containerPath));
|
||||
const canonicalParentPath = await this.resolveCanonicalContainerPath({
|
||||
containerPath: parentPath,
|
||||
allowFinalSymlinkForUnlink: false,
|
||||
});
|
||||
return {
|
||||
canonicalParentPath,
|
||||
basename,
|
||||
};
|
||||
}
|
||||
|
||||
resolvePinnedWriteEntry(target: SandboxResolvedFsPath, action: string): PinnedSandboxWriteEntry {
|
||||
resolvePinnedEntry(target: SandboxResolvedFsPath, action: string): PinnedSandboxEntry {
|
||||
const basename = path.posix.basename(target.containerPath);
|
||||
if (!basename || basename === "." || basename === "/") {
|
||||
throw new Error(`Invalid sandbox entry target: ${target.containerPath}`);
|
||||
@@ -170,6 +154,23 @@ export class SandboxFsPathGuard {
|
||||
};
|
||||
}
|
||||
|
||||
resolvePinnedDirectoryEntry(
|
||||
target: SandboxResolvedFsPath,
|
||||
action: string,
|
||||
): PinnedSandboxDirectoryEntry {
|
||||
const mount = this.resolveRequiredMount(target.containerPath, action);
|
||||
const relativePath = path.posix.relative(mount.containerRoot, target.containerPath);
|
||||
if (relativePath.startsWith("..") || path.posix.isAbsolute(relativePath)) {
|
||||
throw new Error(
|
||||
`Sandbox path escapes allowed mounts; cannot ${action}: ${target.containerPath}`,
|
||||
);
|
||||
}
|
||||
return {
|
||||
mountRootPath: mount.containerRoot,
|
||||
relativePath: relativePath === "." ? "" : relativePath,
|
||||
};
|
||||
}
|
||||
|
||||
private pathIsExistingDirectory(hostPath: string): boolean {
|
||||
try {
|
||||
return fs.statSync(hostPath).isDirectory();
|
||||
|
||||
@@ -1,95 +1,15 @@
|
||||
import { PATH_ALIAS_POLICIES } from "../../infra/path-alias-guards.js";
|
||||
import type { AnchoredSandboxEntry, PathSafetyCheck } from "./fs-bridge-path-safety.js";
|
||||
import type { PathSafetyCheck } from "./fs-bridge-path-safety.js";
|
||||
import type { SandboxResolvedFsPath } from "./fs-paths.js";
|
||||
|
||||
export type SandboxFsCommandPlan = {
|
||||
checks: PathSafetyCheck[];
|
||||
script: string;
|
||||
args?: string[];
|
||||
stdin?: Buffer | string;
|
||||
recheckBeforeCommand?: boolean;
|
||||
allowFailure?: boolean;
|
||||
};
|
||||
|
||||
export function buildMkdirpPlan(
|
||||
target: SandboxResolvedFsPath,
|
||||
anchoredTarget: AnchoredSandboxEntry,
|
||||
): SandboxFsCommandPlan {
|
||||
return {
|
||||
checks: [
|
||||
{
|
||||
target,
|
||||
options: {
|
||||
action: "create directories",
|
||||
requireWritable: true,
|
||||
allowedType: "directory",
|
||||
},
|
||||
},
|
||||
],
|
||||
script: 'set -eu\ncd -- "$1"\nmkdir -p -- "$2"',
|
||||
args: [anchoredTarget.canonicalParentPath, anchoredTarget.basename],
|
||||
};
|
||||
}
|
||||
|
||||
export function buildRemovePlan(params: {
|
||||
target: SandboxResolvedFsPath;
|
||||
anchoredTarget: AnchoredSandboxEntry;
|
||||
recursive?: boolean;
|
||||
force?: boolean;
|
||||
}): SandboxFsCommandPlan {
|
||||
const flags = [params.force === false ? "" : "-f", params.recursive ? "-r" : ""].filter(Boolean);
|
||||
const rmCommand = flags.length > 0 ? `rm ${flags.join(" ")}` : "rm";
|
||||
return {
|
||||
checks: [
|
||||
{
|
||||
target: params.target,
|
||||
options: {
|
||||
action: "remove files",
|
||||
requireWritable: true,
|
||||
aliasPolicy: PATH_ALIAS_POLICIES.unlinkTarget,
|
||||
},
|
||||
},
|
||||
],
|
||||
recheckBeforeCommand: true,
|
||||
script: `set -eu\ncd -- "$1"\n${rmCommand} -- "$2"`,
|
||||
args: [params.anchoredTarget.canonicalParentPath, params.anchoredTarget.basename],
|
||||
};
|
||||
}
|
||||
|
||||
export function buildRenamePlan(params: {
|
||||
from: SandboxResolvedFsPath;
|
||||
to: SandboxResolvedFsPath;
|
||||
anchoredFrom: AnchoredSandboxEntry;
|
||||
anchoredTo: AnchoredSandboxEntry;
|
||||
}): SandboxFsCommandPlan {
|
||||
return {
|
||||
checks: [
|
||||
{
|
||||
target: params.from,
|
||||
options: {
|
||||
action: "rename files",
|
||||
requireWritable: true,
|
||||
aliasPolicy: PATH_ALIAS_POLICIES.unlinkTarget,
|
||||
},
|
||||
},
|
||||
{
|
||||
target: params.to,
|
||||
options: {
|
||||
action: "rename files",
|
||||
requireWritable: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
recheckBeforeCommand: true,
|
||||
script: ["set -eu", 'mkdir -p -- "$2"', 'cd -- "$1"', 'mv -- "$3" "$2/$4"'].join("\n"),
|
||||
args: [
|
||||
params.anchoredFrom.canonicalParentPath,
|
||||
params.anchoredTo.canonicalParentPath,
|
||||
params.anchoredFrom.basename,
|
||||
params.anchoredTo.basename,
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
export function buildStatPlan(target: SandboxResolvedFsPath): SandboxFsCommandPlan {
|
||||
return {
|
||||
checks: [{ target, options: { action: "stat files" } }],
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { SANDBOX_PINNED_WRITE_PYTHON } from "./fs-bridge-write-helper.js";
|
||||
|
||||
async function withTempRoot<T>(prefix: string, run: (root: string) => Promise<T>): Promise<T> {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), prefix));
|
||||
try {
|
||||
return await run(root);
|
||||
} finally {
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
function runPinnedWrite(params: {
|
||||
mountRoot: string;
|
||||
relativeParentPath: string;
|
||||
basename: string;
|
||||
mkdir: boolean;
|
||||
input: string;
|
||||
}) {
|
||||
return spawnSync(
|
||||
"python3",
|
||||
[
|
||||
"-c",
|
||||
SANDBOX_PINNED_WRITE_PYTHON,
|
||||
params.mountRoot,
|
||||
params.relativeParentPath,
|
||||
params.basename,
|
||||
params.mkdir ? "1" : "0",
|
||||
],
|
||||
{
|
||||
input: params.input,
|
||||
encoding: "utf8",
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
describe("sandbox pinned write helper", () => {
|
||||
it("creates missing parents and writes through a pinned directory fd", async () => {
|
||||
await withTempRoot("openclaw-write-helper-", async (root) => {
|
||||
const workspace = path.join(root, "workspace");
|
||||
await fs.mkdir(workspace, { recursive: true });
|
||||
|
||||
const result = runPinnedWrite({
|
||||
mountRoot: workspace,
|
||||
relativeParentPath: "nested/deeper",
|
||||
basename: "note.txt",
|
||||
mkdir: true,
|
||||
input: "hello",
|
||||
});
|
||||
|
||||
expect(result.status).toBe(0);
|
||||
await expect(
|
||||
fs.readFile(path.join(workspace, "nested", "deeper", "note.txt"), "utf8"),
|
||||
).resolves.toBe("hello");
|
||||
});
|
||||
});
|
||||
|
||||
it.runIf(process.platform !== "win32")(
|
||||
"rejects symlink-parent writes instead of materializing a temp file outside the mount",
|
||||
async () => {
|
||||
await withTempRoot("openclaw-write-helper-", async (root) => {
|
||||
const workspace = path.join(root, "workspace");
|
||||
const outside = path.join(root, "outside");
|
||||
await fs.mkdir(workspace, { recursive: true });
|
||||
await fs.mkdir(outside, { recursive: true });
|
||||
await fs.symlink(outside, path.join(workspace, "alias"));
|
||||
|
||||
const result = runPinnedWrite({
|
||||
mountRoot: workspace,
|
||||
relativeParentPath: "alias",
|
||||
basename: "escape.txt",
|
||||
mkdir: false,
|
||||
input: "owned",
|
||||
});
|
||||
|
||||
expect(result.status).not.toBe(0);
|
||||
await expect(fs.readFile(path.join(outside, "escape.txt"), "utf8")).rejects.toThrow();
|
||||
});
|
||||
},
|
||||
);
|
||||
});
|
||||
@@ -1,109 +0,0 @@
|
||||
import type { PathSafetyCheck, PinnedSandboxWriteEntry } from "./fs-bridge-path-safety.js";
|
||||
import type { SandboxFsCommandPlan } from "./fs-bridge-shell-command-plans.js";
|
||||
|
||||
export const SANDBOX_PINNED_WRITE_PYTHON = [
|
||||
"import errno",
|
||||
"import os",
|
||||
"import secrets",
|
||||
"import sys",
|
||||
"",
|
||||
"mount_root = sys.argv[1]",
|
||||
"relative_parent = sys.argv[2]",
|
||||
"basename = sys.argv[3]",
|
||||
'mkdir_enabled = sys.argv[4] == "1"',
|
||||
"",
|
||||
"DIR_FLAGS = os.O_RDONLY",
|
||||
"if hasattr(os, 'O_DIRECTORY'):",
|
||||
" DIR_FLAGS |= os.O_DIRECTORY",
|
||||
"if hasattr(os, 'O_NOFOLLOW'):",
|
||||
" DIR_FLAGS |= os.O_NOFOLLOW",
|
||||
"",
|
||||
"WRITE_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL",
|
||||
"if hasattr(os, 'O_NOFOLLOW'):",
|
||||
" WRITE_FLAGS |= os.O_NOFOLLOW",
|
||||
"",
|
||||
"def open_dir(path, dir_fd=None):",
|
||||
" return os.open(path, DIR_FLAGS, dir_fd=dir_fd)",
|
||||
"",
|
||||
"def walk_parent(root_fd, rel_parent, mkdir_enabled):",
|
||||
" current_fd = os.dup(root_fd)",
|
||||
" try:",
|
||||
" segments = [segment for segment in rel_parent.split('/') if segment and segment != '.']",
|
||||
" for segment in segments:",
|
||||
" if segment == '..':",
|
||||
" raise OSError(errno.EPERM, 'path traversal is not allowed', segment)",
|
||||
" try:",
|
||||
" next_fd = open_dir(segment, dir_fd=current_fd)",
|
||||
" except FileNotFoundError:",
|
||||
" if not mkdir_enabled:",
|
||||
" raise",
|
||||
" os.mkdir(segment, 0o777, dir_fd=current_fd)",
|
||||
" next_fd = open_dir(segment, dir_fd=current_fd)",
|
||||
" os.close(current_fd)",
|
||||
" current_fd = next_fd",
|
||||
" return current_fd",
|
||||
" except Exception:",
|
||||
" os.close(current_fd)",
|
||||
" raise",
|
||||
"",
|
||||
"def create_temp_file(parent_fd, basename):",
|
||||
" prefix = '.openclaw-write-' + basename + '.'",
|
||||
" for _ in range(128):",
|
||||
" candidate = prefix + secrets.token_hex(6)",
|
||||
" try:",
|
||||
" fd = os.open(candidate, WRITE_FLAGS, 0o600, dir_fd=parent_fd)",
|
||||
" return candidate, fd",
|
||||
" except FileExistsError:",
|
||||
" continue",
|
||||
" raise RuntimeError('failed to allocate sandbox temp file')",
|
||||
"",
|
||||
"root_fd = open_dir(mount_root)",
|
||||
"parent_fd = None",
|
||||
"temp_fd = None",
|
||||
"temp_name = None",
|
||||
"try:",
|
||||
" parent_fd = walk_parent(root_fd, relative_parent, mkdir_enabled)",
|
||||
" temp_name, temp_fd = create_temp_file(parent_fd, basename)",
|
||||
" while True:",
|
||||
" chunk = sys.stdin.buffer.read(65536)",
|
||||
" if not chunk:",
|
||||
" break",
|
||||
" os.write(temp_fd, chunk)",
|
||||
" os.fsync(temp_fd)",
|
||||
" os.close(temp_fd)",
|
||||
" temp_fd = None",
|
||||
" os.replace(temp_name, basename, src_dir_fd=parent_fd, dst_dir_fd=parent_fd)",
|
||||
" os.fsync(parent_fd)",
|
||||
"except Exception:",
|
||||
" if temp_fd is not None:",
|
||||
" os.close(temp_fd)",
|
||||
" temp_fd = None",
|
||||
" if temp_name is not None and parent_fd is not None:",
|
||||
" try:",
|
||||
" os.unlink(temp_name, dir_fd=parent_fd)",
|
||||
" except FileNotFoundError:",
|
||||
" pass",
|
||||
" raise",
|
||||
"finally:",
|
||||
" if parent_fd is not None:",
|
||||
" os.close(parent_fd)",
|
||||
" os.close(root_fd)",
|
||||
].join("\n");
|
||||
|
||||
export function buildPinnedWritePlan(params: {
|
||||
check: PathSafetyCheck;
|
||||
pinned: PinnedSandboxWriteEntry;
|
||||
mkdir: boolean;
|
||||
}): SandboxFsCommandPlan & { stdin?: Buffer | string } {
|
||||
return {
|
||||
checks: [params.check],
|
||||
recheckBeforeCommand: true,
|
||||
script: ["set -eu", "python3 - \"$@\" <<'PY'", SANDBOX_PINNED_WRITE_PYTHON, "PY"].join("\n"),
|
||||
args: [
|
||||
params.pinned.mountRootPath,
|
||||
params.pinned.relativeParentPath,
|
||||
params.pinned.basename,
|
||||
params.mkdir ? "1" : "0",
|
||||
],
|
||||
};
|
||||
}
|
||||
@@ -4,8 +4,6 @@ import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
createSandbox,
|
||||
createSandboxFsBridge,
|
||||
findCallByScriptFragment,
|
||||
findCallsByScriptFragment,
|
||||
getDockerArg,
|
||||
installFsBridgeTestHarness,
|
||||
mockedExecDockerRaw,
|
||||
@@ -67,54 +65,60 @@ describe("sandbox fs bridge anchored ops", () => {
|
||||
});
|
||||
});
|
||||
|
||||
const anchoredCases = [
|
||||
const pinnedCases = [
|
||||
{
|
||||
name: "mkdirp anchors parent + basename",
|
||||
name: "mkdirp pins mount root + relative path",
|
||||
invoke: (bridge: ReturnType<typeof createSandboxFsBridge>) =>
|
||||
bridge.mkdirp({ filePath: "nested/leaf" }),
|
||||
scriptFragment: 'mkdir -p -- "$2"',
|
||||
expectedArgs: ["/workspace/nested", "leaf"],
|
||||
expectedArgs: ["mkdirp", "/workspace", "nested/leaf"],
|
||||
forbiddenArgs: ["/workspace/nested/leaf"],
|
||||
canonicalProbe: "/workspace/nested",
|
||||
},
|
||||
{
|
||||
name: "remove anchors parent + basename",
|
||||
name: "remove pins mount root + parent/basename",
|
||||
invoke: (bridge: ReturnType<typeof createSandboxFsBridge>) =>
|
||||
bridge.remove({ filePath: "nested/file.txt" }),
|
||||
scriptFragment: 'rm -f -- "$2"',
|
||||
expectedArgs: ["/workspace/nested", "file.txt"],
|
||||
expectedArgs: ["remove", "/workspace", "nested", "file.txt", "0", "1"],
|
||||
forbiddenArgs: ["/workspace/nested/file.txt"],
|
||||
canonicalProbe: "/workspace/nested",
|
||||
},
|
||||
{
|
||||
name: "rename anchors both parents + basenames",
|
||||
name: "rename pins both parents + basenames",
|
||||
invoke: (bridge: ReturnType<typeof createSandboxFsBridge>) =>
|
||||
bridge.rename({ from: "from.txt", to: "nested/to.txt" }),
|
||||
scriptFragment: 'mv -- "$3" "$2/$4"',
|
||||
expectedArgs: ["/workspace", "/workspace/nested", "from.txt", "to.txt"],
|
||||
expectedArgs: ["rename", "/workspace", "", "from.txt", "/workspace", "nested", "to.txt", "1"],
|
||||
forbiddenArgs: ["/workspace/from.txt", "/workspace/nested/to.txt"],
|
||||
canonicalProbe: "/workspace/nested",
|
||||
},
|
||||
] as const;
|
||||
|
||||
it.each(anchoredCases)("$name", async (testCase) => {
|
||||
const bridge = createSandboxFsBridge({ sandbox: createSandbox() });
|
||||
it.each(pinnedCases)("$name", async (testCase) => {
|
||||
await withTempDir("openclaw-fs-bridge-contract-write-", async (stateDir) => {
|
||||
const workspaceDir = path.join(stateDir, "workspace");
|
||||
await fs.mkdir(path.join(workspaceDir, "nested"), { recursive: true });
|
||||
await fs.writeFile(path.join(workspaceDir, "from.txt"), "hello", "utf8");
|
||||
await fs.writeFile(path.join(workspaceDir, "nested", "file.txt"), "bye", "utf8");
|
||||
|
||||
await testCase.invoke(bridge);
|
||||
const bridge = createSandboxFsBridge({
|
||||
sandbox: createSandbox({
|
||||
workspaceDir,
|
||||
agentWorkspaceDir: workspaceDir,
|
||||
}),
|
||||
});
|
||||
|
||||
const opCall = findCallByScriptFragment(testCase.scriptFragment);
|
||||
expect(opCall).toBeDefined();
|
||||
const args = opCall?.[0] ?? [];
|
||||
testCase.expectedArgs.forEach((value, index) => {
|
||||
expect(getDockerArg(args, index + 1)).toBe(value);
|
||||
await testCase.invoke(bridge);
|
||||
|
||||
const opCall = mockedExecDockerRaw.mock.calls.find(
|
||||
([args]) =>
|
||||
typeof args[5] === "string" &&
|
||||
args[5].includes("python3 - \"$@\" <<'PY'") &&
|
||||
getDockerArg(args, 1) === testCase.expectedArgs[0],
|
||||
);
|
||||
expect(opCall).toBeDefined();
|
||||
const args = opCall?.[0] ?? [];
|
||||
testCase.expectedArgs.forEach((value, index) => {
|
||||
expect(getDockerArg(args, index + 1)).toBe(value);
|
||||
});
|
||||
testCase.forbiddenArgs.forEach((value) => {
|
||||
expect(args).not.toContain(value);
|
||||
});
|
||||
});
|
||||
testCase.forbiddenArgs.forEach((value) => {
|
||||
expect(args).not.toContain(value);
|
||||
});
|
||||
|
||||
const canonicalCalls = findCallsByScriptFragment('readlink -f -- "$cursor"');
|
||||
expect(
|
||||
canonicalCalls.some(([callArgs]) => getDockerArg(callArgs, 1) === testCase.canonicalProbe),
|
||||
).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,7 +6,7 @@ import {
|
||||
createSandbox,
|
||||
createSandboxFsBridge,
|
||||
expectMkdirpAllowsExistingDirectory,
|
||||
getScriptsFromCalls,
|
||||
findCallByDockerArg,
|
||||
installFsBridgeTestHarness,
|
||||
mockedExecDockerRaw,
|
||||
withTempDir,
|
||||
@@ -55,8 +55,7 @@ describe("sandbox fs bridge boundary validation", () => {
|
||||
await expect(bridge.mkdirp({ filePath: "memory/kemik" })).rejects.toThrow(
|
||||
/cannot create directories/i,
|
||||
);
|
||||
const scripts = getScriptsFromCalls();
|
||||
expect(scripts.some((script) => script.includes('mkdir -p -- "$2"'))).toBe(false);
|
||||
expect(findCallByDockerArg(1, "mkdirp")).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -111,7 +110,6 @@ describe("sandbox fs bridge boundary validation", () => {
|
||||
it("rejects missing files before any docker read command runs", async () => {
|
||||
const bridge = createSandboxFsBridge({ sandbox: createSandbox() });
|
||||
await expect(bridge.readFile({ filePath: "a.txt" })).rejects.toThrow(/ENOENT|no such file/i);
|
||||
const scripts = getScriptsFromCalls();
|
||||
expect(scripts.some((script) => script.includes('cat -- "$1"'))).toBe(false);
|
||||
expect(mockedExecDockerRaw).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
89
src/agents/sandbox/fs-bridge.e2e-docker.test.ts
Normal file
89
src/agents/sandbox/fs-bridge.e2e-docker.test.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { DEFAULT_SANDBOX_IMAGE } from "./constants.js";
|
||||
import { buildSandboxCreateArgs, execDocker, execDockerRaw } from "./docker.js";
|
||||
import { createSandboxFsBridge } from "./fs-bridge.js";
|
||||
import { createSandboxTestContext } from "./test-fixtures.js";
|
||||
import { appendWorkspaceMountArgs } from "./workspace-mounts.js";
|
||||
|
||||
async function sandboxImageReady(): Promise<boolean> {
|
||||
try {
|
||||
const dockerVersion = await execDockerRaw(["version"], { allowFailure: true });
|
||||
if (dockerVersion.code !== 0) {
|
||||
return false;
|
||||
}
|
||||
const pythonCheck = await execDockerRaw(
|
||||
["run", "--rm", "--entrypoint", "python3", DEFAULT_SANDBOX_IMAGE, "--version"],
|
||||
{ allowFailure: true },
|
||||
);
|
||||
return pythonCheck.code === 0;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
describe("sandbox fs bridge docker e2e", () => {
|
||||
it.runIf(process.platform !== "win32")(
|
||||
"writes through docker exec using the pinned mutation helper",
|
||||
async () => {
|
||||
if (!(await sandboxImageReady())) {
|
||||
return;
|
||||
}
|
||||
|
||||
const stateDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-fsbridge-e2e-"));
|
||||
const workspaceDir = path.join(stateDir, "workspace");
|
||||
await fs.mkdir(workspaceDir, { recursive: true });
|
||||
|
||||
const suffix = `${process.pid}-${Date.now()}`;
|
||||
const containerName = `openclaw-fsbridge-${suffix}`.slice(0, 63);
|
||||
|
||||
try {
|
||||
const sandbox = createSandboxTestContext({
|
||||
overrides: {
|
||||
workspaceDir,
|
||||
agentWorkspaceDir: workspaceDir,
|
||||
containerName,
|
||||
containerWorkdir: "/workspace",
|
||||
},
|
||||
dockerOverrides: {
|
||||
image: DEFAULT_SANDBOX_IMAGE,
|
||||
containerPrefix: "openclaw-fsbridge-",
|
||||
user: "",
|
||||
},
|
||||
});
|
||||
|
||||
const createArgs = buildSandboxCreateArgs({
|
||||
name: containerName,
|
||||
cfg: sandbox.docker,
|
||||
scopeKey: sandbox.sessionKey,
|
||||
includeBinds: false,
|
||||
bindSourceRoots: [workspaceDir],
|
||||
});
|
||||
createArgs.push("--workdir", sandbox.containerWorkdir);
|
||||
appendWorkspaceMountArgs({
|
||||
args: createArgs,
|
||||
workspaceDir,
|
||||
agentWorkspaceDir: workspaceDir,
|
||||
workdir: sandbox.containerWorkdir,
|
||||
workspaceAccess: sandbox.workspaceAccess,
|
||||
});
|
||||
createArgs.push(sandbox.docker.image, "sleep", "infinity");
|
||||
|
||||
await execDocker(createArgs);
|
||||
await execDocker(["start", containerName]);
|
||||
|
||||
const bridge = createSandboxFsBridge({ sandbox });
|
||||
await bridge.writeFile({ filePath: "nested/hello.txt", data: "from-docker" });
|
||||
|
||||
await expect(
|
||||
fs.readFile(path.join(workspaceDir, "nested", "hello.txt"), "utf8"),
|
||||
).resolves.toBe("from-docker");
|
||||
} finally {
|
||||
await execDocker(["rm", "-f", containerName], { allowFailure: true });
|
||||
await fs.rm(stateDir, { recursive: true, force: true });
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
@@ -45,10 +45,10 @@ describe("sandbox fs bridge shell compatibility", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("resolveCanonicalContainerPath script is valid POSIX sh (no do; token)", async () => {
|
||||
it("path canonicalization recheck script is valid POSIX sh", async () => {
|
||||
const bridge = createSandboxFsBridge({ sandbox: createSandbox() });
|
||||
|
||||
await bridge.mkdirp({ filePath: "nested" });
|
||||
await bridge.writeFile({ filePath: "b.txt", data: "hello" });
|
||||
|
||||
const scripts = getScriptsFromCalls();
|
||||
const canonicalScript = scripts.find((script) => script.includes("allow_final"));
|
||||
@@ -134,6 +134,32 @@ describe("sandbox fs bridge shell compatibility", () => {
|
||||
expect(scripts.some((script) => script.includes("os.replace("))).toBe(true);
|
||||
});
|
||||
|
||||
it("routes mkdirp, remove, and rename through the pinned mutation helper", async () => {
|
||||
await withTempDir("openclaw-fs-bridge-shell-write-", async (stateDir) => {
|
||||
const workspaceDir = path.join(stateDir, "workspace");
|
||||
await fs.mkdir(path.join(workspaceDir, "nested"), { recursive: true });
|
||||
await fs.writeFile(path.join(workspaceDir, "a.txt"), "hello", "utf8");
|
||||
await fs.writeFile(path.join(workspaceDir, "nested", "file.txt"), "bye", "utf8");
|
||||
|
||||
const bridge = createSandboxFsBridge({
|
||||
sandbox: createSandbox({
|
||||
workspaceDir,
|
||||
agentWorkspaceDir: workspaceDir,
|
||||
}),
|
||||
});
|
||||
|
||||
await bridge.mkdirp({ filePath: "nested" });
|
||||
await bridge.remove({ filePath: "nested/file.txt" });
|
||||
await bridge.rename({ from: "a.txt", to: "nested/b.txt" });
|
||||
|
||||
const scripts = getScriptsFromCalls();
|
||||
expect(scripts.filter((script) => script.includes("operation = sys.argv[1]")).length).toBe(3);
|
||||
expect(scripts.some((script) => script.includes('mkdir -p -- "$2"'))).toBe(false);
|
||||
expect(scripts.some((script) => script.includes('rm -f -- "$2"'))).toBe(false);
|
||||
expect(scripts.some((script) => script.includes('mv -- "$3" "$2/$4"'))).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
it("re-validates target before the pinned write helper runs", async () => {
|
||||
const { mockedOpenBoundaryFile } = await import("./fs-bridge.test-helpers.js");
|
||||
mockedOpenBoundaryFile
|
||||
|
||||
@@ -48,6 +48,10 @@ export function findCallByScriptFragment(fragment: string) {
|
||||
return mockedExecDockerRaw.mock.calls.find(([args]) => getDockerScript(args).includes(fragment));
|
||||
}
|
||||
|
||||
export function findCallByDockerArg(position: number, value: string) {
|
||||
return mockedExecDockerRaw.mock.calls.find(([args]) => getDockerArg(args, position) === value);
|
||||
}
|
||||
|
||||
export function findCallsByScriptFragment(fragment: string) {
|
||||
return mockedExecDockerRaw.mock.calls.filter(([args]) =>
|
||||
getDockerScript(args).includes(fragment),
|
||||
@@ -142,12 +146,16 @@ export async function expectMkdirpAllowsExistingDirectory(params?: {
|
||||
|
||||
await expect(bridge.mkdirp({ filePath: "memory/kemik" })).resolves.toBeUndefined();
|
||||
|
||||
const mkdirCall = findCallByScriptFragment('mkdir -p -- "$2"');
|
||||
const mkdirCall = mockedExecDockerRaw.mock.calls.find(
|
||||
([args]) =>
|
||||
getDockerScript(args).includes("operation = sys.argv[1]") &&
|
||||
getDockerArg(args, 1) === "mkdirp",
|
||||
);
|
||||
expect(mkdirCall).toBeDefined();
|
||||
const mkdirParent = mkdirCall ? getDockerArg(mkdirCall[0], 1) : "";
|
||||
const mkdirBase = mkdirCall ? getDockerArg(mkdirCall[0], 2) : "";
|
||||
expect(mkdirParent).toBe("/workspace/memory");
|
||||
expect(mkdirBase).toBe("kemik");
|
||||
const mountRoot = mkdirCall ? getDockerArg(mkdirCall[0], 2) : "";
|
||||
const relativePath = mkdirCall ? getDockerArg(mkdirCall[0], 3) : "";
|
||||
expect(mountRoot).toBe("/workspace");
|
||||
expect(relativePath).toBe("memory/kemik");
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
import fs from "node:fs";
|
||||
import { execDockerRaw, type ExecDockerRawResult } from "./docker.js";
|
||||
import { SandboxFsPathGuard } from "./fs-bridge-path-safety.js";
|
||||
import {
|
||||
buildMkdirpPlan,
|
||||
buildRemovePlan,
|
||||
buildRenamePlan,
|
||||
buildStatPlan,
|
||||
type SandboxFsCommandPlan,
|
||||
} from "./fs-bridge-shell-command-plans.js";
|
||||
import { buildPinnedWritePlan } from "./fs-bridge-write-helper.js";
|
||||
buildPinnedMkdirpPlan,
|
||||
buildPinnedRemovePlan,
|
||||
buildPinnedRenamePlan,
|
||||
buildPinnedWritePlan,
|
||||
} from "./fs-bridge-mutation-helper.js";
|
||||
import { SandboxFsPathGuard } from "./fs-bridge-path-safety.js";
|
||||
import { buildStatPlan, type SandboxFsCommandPlan } from "./fs-bridge-shell-command-plans.js";
|
||||
import {
|
||||
buildSandboxFsMounts,
|
||||
resolveSandboxFsPathWithMounts,
|
||||
@@ -119,7 +118,7 @@ class SandboxFsBridgeImpl implements SandboxFsBridge {
|
||||
const buffer = Buffer.isBuffer(params.data)
|
||||
? params.data
|
||||
: Buffer.from(params.data, params.encoding ?? "utf8");
|
||||
const pinnedWriteTarget = this.pathGuard.resolvePinnedWriteEntry(target, "write files");
|
||||
const pinnedWriteTarget = this.pathGuard.resolvePinnedEntry(target, "write files");
|
||||
await this.runCheckedCommand({
|
||||
...buildPinnedWritePlan({
|
||||
check: writeCheck,
|
||||
@@ -134,8 +133,21 @@ class SandboxFsBridgeImpl implements SandboxFsBridge {
|
||||
async mkdirp(params: { filePath: string; cwd?: string; signal?: AbortSignal }): Promise<void> {
|
||||
const target = this.resolveResolvedPath(params);
|
||||
this.ensureWriteAccess(target, "create directories");
|
||||
const anchoredTarget = await this.pathGuard.resolveAnchoredSandboxEntry(target);
|
||||
await this.runPlannedCommand(buildMkdirpPlan(target, anchoredTarget), params.signal);
|
||||
const mkdirCheck = {
|
||||
target,
|
||||
options: {
|
||||
action: "create directories",
|
||||
requireWritable: true,
|
||||
allowedType: "directory",
|
||||
} as const,
|
||||
};
|
||||
await this.runCheckedCommand({
|
||||
...buildPinnedMkdirpPlan({
|
||||
check: mkdirCheck,
|
||||
pinned: this.pathGuard.resolvePinnedDirectoryEntry(target, "create directories"),
|
||||
}),
|
||||
signal: params.signal,
|
||||
});
|
||||
}
|
||||
|
||||
async remove(params: {
|
||||
@@ -147,16 +159,22 @@ class SandboxFsBridgeImpl implements SandboxFsBridge {
|
||||
}): Promise<void> {
|
||||
const target = this.resolveResolvedPath(params);
|
||||
this.ensureWriteAccess(target, "remove files");
|
||||
const anchoredTarget = await this.pathGuard.resolveAnchoredSandboxEntry(target);
|
||||
await this.runPlannedCommand(
|
||||
buildRemovePlan({
|
||||
target,
|
||||
anchoredTarget,
|
||||
const removeCheck = {
|
||||
target,
|
||||
options: {
|
||||
action: "remove files",
|
||||
requireWritable: true,
|
||||
} as const,
|
||||
};
|
||||
await this.runCheckedCommand({
|
||||
...buildPinnedRemovePlan({
|
||||
check: removeCheck,
|
||||
pinned: this.pathGuard.resolvePinnedEntry(target, "remove files"),
|
||||
recursive: params.recursive,
|
||||
force: params.force,
|
||||
}),
|
||||
params.signal,
|
||||
);
|
||||
signal: params.signal,
|
||||
});
|
||||
}
|
||||
|
||||
async rename(params: {
|
||||
@@ -169,17 +187,29 @@ class SandboxFsBridgeImpl implements SandboxFsBridge {
|
||||
const to = this.resolveResolvedPath({ filePath: params.to, cwd: params.cwd });
|
||||
this.ensureWriteAccess(from, "rename files");
|
||||
this.ensureWriteAccess(to, "rename files");
|
||||
const anchoredFrom = await this.pathGuard.resolveAnchoredSandboxEntry(from);
|
||||
const anchoredTo = await this.pathGuard.resolveAnchoredSandboxEntry(to);
|
||||
await this.runPlannedCommand(
|
||||
buildRenamePlan({
|
||||
from,
|
||||
to,
|
||||
anchoredFrom,
|
||||
anchoredTo,
|
||||
const fromCheck = {
|
||||
target: from,
|
||||
options: {
|
||||
action: "rename files",
|
||||
requireWritable: true,
|
||||
} as const,
|
||||
};
|
||||
const toCheck = {
|
||||
target: to,
|
||||
options: {
|
||||
action: "rename files",
|
||||
requireWritable: true,
|
||||
} as const,
|
||||
};
|
||||
await this.runCheckedCommand({
|
||||
...buildPinnedRenamePlan({
|
||||
fromCheck,
|
||||
toCheck,
|
||||
from: this.pathGuard.resolvePinnedEntry(from, "rename files"),
|
||||
to: this.pathGuard.resolvePinnedEntry(to, "rename files"),
|
||||
}),
|
||||
params.signal,
|
||||
);
|
||||
signal: params.signal,
|
||||
});
|
||||
}
|
||||
|
||||
async stat(params: {
|
||||
|
||||
@@ -36,17 +36,16 @@ const renderGatewayPortHealthDiagnostics = vi.fn(() => ["diag: unhealthy port"])
|
||||
const renderRestartDiagnostics = vi.fn(() => ["diag: unhealthy runtime"]);
|
||||
const resolveGatewayPort = vi.fn(() => 18789);
|
||||
const findGatewayPidsOnPortSync = vi.fn<(port: number) => number[]>(() => []);
|
||||
const probeGateway =
|
||||
vi.fn<
|
||||
(opts: {
|
||||
url: string;
|
||||
auth?: { token?: string; password?: string };
|
||||
timeoutMs: number;
|
||||
}) => Promise<{
|
||||
ok: boolean;
|
||||
configSnapshot: unknown;
|
||||
}>
|
||||
>();
|
||||
const probeGateway = vi.fn<
|
||||
(opts: {
|
||||
url: string;
|
||||
auth?: { token?: string; password?: string };
|
||||
timeoutMs: number;
|
||||
}) => Promise<{
|
||||
ok: boolean;
|
||||
configSnapshot: unknown;
|
||||
}>
|
||||
>();
|
||||
const isRestartEnabled = vi.fn<(config?: { commands?: unknown }) => boolean>(() => true);
|
||||
const loadConfig = vi.fn(() => ({}));
|
||||
|
||||
|
||||
@@ -198,16 +198,6 @@ function appendCronDeliveryInstruction(params: {
|
||||
return `${params.commandBody}\n\nReturn your summary as plain text; it will be delivered automatically. If the task explicitly calls for messaging a specific external recipient, note who/where it should go instead of sending it yourself.`.trim();
|
||||
}
|
||||
|
||||
function resolveCronEmbeddedAgentLane(lane?: string) {
|
||||
const trimmed = lane?.trim();
|
||||
// Cron jobs already execute inside the cron command lane. Reusing that same
|
||||
// lane for the nested embedded-agent run deadlocks: the outer cron task holds
|
||||
// the lane while the inner run waits to reacquire it.
|
||||
if (!trimmed || trimmed === "cron") {
|
||||
return CommandLane.Nested;
|
||||
}
|
||||
return trimmed;
|
||||
}
|
||||
export async function runCronIsolatedAgentTurn(params: {
|
||||
cfg: OpenClawConfig;
|
||||
deps: CliDeps;
|
||||
|
||||
@@ -655,6 +655,7 @@ describe("callGateway password resolution", () => {
|
||||
envSnapshot = captureEnv([
|
||||
"OPENCLAW_GATEWAY_PASSWORD",
|
||||
"OPENCLAW_GATEWAY_TOKEN",
|
||||
"LOCAL_REMOTE_FALLBACK_TOKEN",
|
||||
"LOCAL_REF_PASSWORD",
|
||||
"REMOTE_REF_TOKEN",
|
||||
"REMOTE_REF_PASSWORD",
|
||||
@@ -662,6 +663,7 @@ describe("callGateway password resolution", () => {
|
||||
resetGatewayCallMocks();
|
||||
delete process.env.OPENCLAW_GATEWAY_PASSWORD;
|
||||
delete process.env.OPENCLAW_GATEWAY_TOKEN;
|
||||
delete process.env.LOCAL_REMOTE_FALLBACK_TOKEN;
|
||||
delete process.env.LOCAL_REF_PASSWORD;
|
||||
delete process.env.REMOTE_REF_TOKEN;
|
||||
delete process.env.REMOTE_REF_PASSWORD;
|
||||
@@ -813,6 +815,30 @@ describe("callGateway password resolution", () => {
|
||||
expect(lastClientOptions?.password).toBe("resolved-local-fallback-password"); // pragma: allowlist secret
|
||||
});
|
||||
|
||||
it("fails closed when unresolved local token SecretRef would otherwise fall back to remote token", async () => {
|
||||
process.env.LOCAL_REMOTE_FALLBACK_TOKEN = "resolved-local-remote-fallback-token";
|
||||
loadConfig.mockReturnValue({
|
||||
gateway: {
|
||||
mode: "local",
|
||||
bind: "loopback",
|
||||
auth: {
|
||||
mode: "token",
|
||||
token: { source: "env", provider: "default", id: "MISSING_LOCAL_REF_TOKEN" },
|
||||
},
|
||||
remote: {
|
||||
token: { source: "env", provider: "default", id: "LOCAL_REMOTE_FALLBACK_TOKEN" },
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
providers: {
|
||||
default: { source: "env" },
|
||||
},
|
||||
},
|
||||
} as unknown as OpenClawConfig);
|
||||
|
||||
await expect(callGateway({ method: "health" })).rejects.toThrow("gateway.auth.token");
|
||||
});
|
||||
|
||||
it.each(["none", "trusted-proxy"] as const)(
|
||||
"ignores unresolved local password ref when auth mode is %s",
|
||||
async (mode) => {
|
||||
|
||||
@@ -416,4 +416,74 @@ describe("resolveGatewayConnectionAuth", () => {
|
||||
}),
|
||||
).toThrow("gateway.auth.password");
|
||||
});
|
||||
|
||||
it("fails closed when local token SecretRef is unresolved and remote token fallback exists", async () => {
|
||||
const config = cfg({
|
||||
gateway: {
|
||||
mode: "local",
|
||||
auth: {
|
||||
mode: "token",
|
||||
token: { source: "env", provider: "default", id: "MISSING_LOCAL_TOKEN" },
|
||||
},
|
||||
remote: {
|
||||
token: "remote-token",
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
providers: {
|
||||
default: { source: "env" },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await expect(
|
||||
resolveGatewayConnectionAuth({
|
||||
config,
|
||||
env: {} as NodeJS.ProcessEnv,
|
||||
includeLegacyEnv: false,
|
||||
}),
|
||||
).rejects.toThrow("gateway.auth.token");
|
||||
expect(() =>
|
||||
resolveGatewayConnectionAuthFromConfig({
|
||||
cfg: config,
|
||||
env: {} as NodeJS.ProcessEnv,
|
||||
includeLegacyEnv: false,
|
||||
}),
|
||||
).toThrow("gateway.auth.token");
|
||||
});
|
||||
|
||||
it("fails closed when local password SecretRef is unresolved and remote password fallback exists", async () => {
|
||||
const config = cfg({
|
||||
gateway: {
|
||||
mode: "local",
|
||||
auth: {
|
||||
mode: "password",
|
||||
password: { source: "env", provider: "default", id: "MISSING_LOCAL_PASSWORD" },
|
||||
},
|
||||
remote: {
|
||||
password: "remote-password", // pragma: allowlist secret
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
providers: {
|
||||
default: { source: "env" },
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
await expect(
|
||||
resolveGatewayConnectionAuth({
|
||||
config,
|
||||
env: {} as NodeJS.ProcessEnv,
|
||||
includeLegacyEnv: false,
|
||||
}),
|
||||
).rejects.toThrow("gateway.auth.password");
|
||||
expect(() =>
|
||||
resolveGatewayConnectionAuthFromConfig({
|
||||
cfg: config,
|
||||
env: {} as NodeJS.ProcessEnv,
|
||||
includeLegacyEnv: false,
|
||||
}),
|
||||
).toThrow("gateway.auth.password");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -33,7 +33,6 @@ export type GatewayCredentialPlan = {
|
||||
remoteMode: boolean;
|
||||
remoteUrlConfigured: boolean;
|
||||
tailscaleRemoteExposure: boolean;
|
||||
remoteEnabled: boolean;
|
||||
remoteConfiguredSurface: boolean;
|
||||
remoteTokenFallbackActive: boolean;
|
||||
remoteTokenActive: boolean;
|
||||
@@ -187,7 +186,6 @@ export function createGatewayCredentialPlan(params: {
|
||||
const remoteUrlConfigured = Boolean(trimToUndefined(remote?.url));
|
||||
const tailscaleRemoteExposure =
|
||||
gateway?.tailscale?.mode === "serve" || gateway?.tailscale?.mode === "funnel";
|
||||
const remoteEnabled = remote?.enabled !== false;
|
||||
const remoteConfiguredSurface = remoteMode || remoteUrlConfigured || tailscaleRemoteExposure;
|
||||
const remoteTokenFallbackActive = localTokenCanWin && !envToken && !localToken.configured;
|
||||
const remotePasswordFallbackActive = !envPassword && !localPassword.configured && passwordCanWin;
|
||||
@@ -209,12 +207,10 @@ export function createGatewayCredentialPlan(params: {
|
||||
remoteMode,
|
||||
remoteUrlConfigured,
|
||||
tailscaleRemoteExposure,
|
||||
remoteEnabled,
|
||||
remoteConfiguredSurface,
|
||||
remoteTokenFallbackActive,
|
||||
remoteTokenActive: remoteEnabled && (remoteConfiguredSurface || remoteTokenFallbackActive),
|
||||
remoteTokenActive: remoteConfiguredSurface || remoteTokenFallbackActive,
|
||||
remotePasswordFallbackActive,
|
||||
remotePasswordActive:
|
||||
remoteEnabled && (remoteConfiguredSurface || remotePasswordFallbackActive),
|
||||
remotePasswordActive: remoteConfiguredSurface || remotePasswordFallbackActive,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -158,6 +158,58 @@ describe("resolveGatewayCredentialsFromConfig", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("fails closed when local token SecretRef is unresolved and remote token fallback exists", () => {
|
||||
expect(() =>
|
||||
resolveGatewayCredentialsFromConfig({
|
||||
cfg: {
|
||||
gateway: {
|
||||
mode: "local",
|
||||
auth: {
|
||||
mode: "token",
|
||||
token: { source: "env", provider: "default", id: "MISSING_LOCAL_TOKEN" },
|
||||
},
|
||||
remote: {
|
||||
token: "remote-token",
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
providers: {
|
||||
default: { source: "env" },
|
||||
},
|
||||
},
|
||||
} as unknown as OpenClawConfig,
|
||||
env: {} as NodeJS.ProcessEnv,
|
||||
includeLegacyEnv: false,
|
||||
}),
|
||||
).toThrow("gateway.auth.token");
|
||||
});
|
||||
|
||||
it("fails closed when local password SecretRef is unresolved and remote password fallback exists", () => {
|
||||
expect(() =>
|
||||
resolveGatewayCredentialsFromConfig({
|
||||
cfg: {
|
||||
gateway: {
|
||||
mode: "local",
|
||||
auth: {
|
||||
mode: "password",
|
||||
password: { source: "env", provider: "default", id: "MISSING_LOCAL_PASSWORD" },
|
||||
},
|
||||
remote: {
|
||||
password: "remote-password", // pragma: allowlist secret
|
||||
},
|
||||
},
|
||||
secrets: {
|
||||
providers: {
|
||||
default: { source: "env" },
|
||||
},
|
||||
},
|
||||
} as unknown as OpenClawConfig,
|
||||
env: {} as NodeJS.ProcessEnv,
|
||||
includeLegacyEnv: false,
|
||||
}),
|
||||
).toThrow("gateway.auth.password");
|
||||
});
|
||||
|
||||
it("throws when local password auth relies on an unresolved SecretRef", () => {
|
||||
expect(() =>
|
||||
resolveGatewayCredentialsFromConfig({
|
||||
|
||||
@@ -8,7 +8,7 @@ const mocks = vi.hoisted(() => ({
|
||||
updateSessionStore: vi.fn(),
|
||||
agentCommand: vi.fn(),
|
||||
registerAgentRunContext: vi.fn(),
|
||||
sessionsResetHandler: vi.fn(),
|
||||
performGatewaySessionReset: vi.fn(),
|
||||
loadConfigReturn: {} as Record<string, unknown>,
|
||||
}));
|
||||
|
||||
@@ -62,11 +62,9 @@ vi.mock("../../infra/agent-events.js", () => ({
|
||||
onAgentEvent: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("./sessions.js", () => ({
|
||||
sessionsHandlers: {
|
||||
"sessions.reset": (...args: unknown[]) =>
|
||||
(mocks.sessionsResetHandler as (...args: unknown[]) => unknown)(...args),
|
||||
},
|
||||
vi.mock("../session-reset-service.js", () => ({
|
||||
performGatewaySessionReset: (...args: unknown[]) =>
|
||||
(mocks.performGatewaySessionReset as (...args: unknown[]) => unknown)(...args),
|
||||
}));
|
||||
|
||||
vi.mock("../../sessions/send-policy.js", () => ({
|
||||
@@ -158,7 +156,7 @@ function resetTimeConfig() {
|
||||
|
||||
async function expectResetCall(expectedMessage: string) {
|
||||
await vi.waitFor(() => expect(mocks.agentCommand).toHaveBeenCalled());
|
||||
expect(mocks.sessionsResetHandler).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.performGatewaySessionReset).toHaveBeenCalledTimes(1);
|
||||
const call = readLastAgentCommandCall();
|
||||
expect(call?.message).toBe(expectedMessage);
|
||||
return call;
|
||||
@@ -208,18 +206,16 @@ function mockSessionResetSuccess(params: {
|
||||
}) {
|
||||
const key = params.key ?? "agent:main:main";
|
||||
const sessionId = params.sessionId ?? "reset-session-id";
|
||||
mocks.sessionsResetHandler.mockImplementation(
|
||||
async (opts: {
|
||||
params: { key: string; reason: string };
|
||||
respond: (ok: boolean, payload?: unknown) => void;
|
||||
}) => {
|
||||
expect(opts.params.key).toBe(key);
|
||||
expect(opts.params.reason).toBe(params.reason);
|
||||
opts.respond(true, {
|
||||
mocks.performGatewaySessionReset.mockImplementation(
|
||||
async (opts: { key: string; reason: string; commandSource: string }) => {
|
||||
expect(opts.key).toBe(key);
|
||||
expect(opts.reason).toBe(params.reason);
|
||||
expect(opts.commandSource).toBe("gateway:agent");
|
||||
return {
|
||||
ok: true,
|
||||
key,
|
||||
entry: { sessionId },
|
||||
});
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -560,7 +556,7 @@ describe("gateway agent handler", () => {
|
||||
);
|
||||
|
||||
await vi.waitFor(() => expect(mocks.agentCommand).toHaveBeenCalled());
|
||||
expect(mocks.sessionsResetHandler).toHaveBeenCalledTimes(1);
|
||||
expect(mocks.performGatewaySessionReset).toHaveBeenCalledTimes(1);
|
||||
const call = readLastAgentCommandCall();
|
||||
// Message is now dynamically built with current date — check key substrings
|
||||
expect(call?.message).toContain("Execute your Session Startup sequence now");
|
||||
@@ -572,7 +568,7 @@ describe("gateway agent handler", () => {
|
||||
it("uses /reset suffix as the post-reset message and still injects timestamp", async () => {
|
||||
setupNewYorkTimeConfig("2026-01-29T01:30:00.000Z");
|
||||
mockSessionResetSuccess({ reason: "reset" });
|
||||
mocks.sessionsResetHandler.mockClear();
|
||||
mocks.performGatewaySessionReset.mockClear();
|
||||
primeMainAgentRun({
|
||||
sessionId: "reset-session-id",
|
||||
cfg: mocks.loadConfigReturn,
|
||||
|
||||
@@ -46,6 +46,7 @@ import {
|
||||
validateAgentParams,
|
||||
validateAgentWaitParams,
|
||||
} from "../protocol/index.js";
|
||||
import { performGatewaySessionReset } from "../session-reset-service.js";
|
||||
import {
|
||||
canonicalizeSpawnedByForAgent,
|
||||
loadSessionEntry,
|
||||
@@ -62,7 +63,6 @@ import {
|
||||
waitForTerminalGatewayDedupe,
|
||||
} from "./agent-wait-dedupe.js";
|
||||
import { normalizeRpcAttachmentsToChatAttachments } from "./attachment-normalize.js";
|
||||
import { sessionsHandlers } from "./sessions.js";
|
||||
import type { GatewayRequestHandlerOptions, GatewayRequestHandlers } from "./types.js";
|
||||
|
||||
const RESET_COMMAND_RE = /^\/(new|reset)(?:\s+([\s\S]*))?$/i;
|
||||
@@ -72,101 +72,26 @@ function resolveSenderIsOwnerFromClient(client: GatewayRequestHandlerOptions["cl
|
||||
return scopes.includes(ADMIN_SCOPE);
|
||||
}
|
||||
|
||||
function isGatewayErrorShape(value: unknown): value is { code: string; message: string } {
|
||||
if (!value || typeof value !== "object") {
|
||||
return false;
|
||||
}
|
||||
const candidate = value as { code?: unknown; message?: unknown };
|
||||
return typeof candidate.code === "string" && typeof candidate.message === "string";
|
||||
}
|
||||
|
||||
async function runSessionResetFromAgent(params: {
|
||||
key: string;
|
||||
reason: "new" | "reset";
|
||||
idempotencyKey: string;
|
||||
context: GatewayRequestHandlerOptions["context"];
|
||||
client: GatewayRequestHandlerOptions["client"];
|
||||
isWebchatConnect: GatewayRequestHandlerOptions["isWebchatConnect"];
|
||||
}): Promise<
|
||||
| { ok: true; key: string; sessionId?: string }
|
||||
| { ok: false; error: ReturnType<typeof errorShape> }
|
||||
> {
|
||||
return await new Promise((resolve) => {
|
||||
let settled = false;
|
||||
const settle = (
|
||||
result:
|
||||
| { ok: true; key: string; sessionId?: string }
|
||||
| { ok: false; error: ReturnType<typeof errorShape> },
|
||||
) => {
|
||||
if (settled) {
|
||||
return;
|
||||
}
|
||||
settled = true;
|
||||
resolve(result);
|
||||
};
|
||||
|
||||
const respond: GatewayRequestHandlerOptions["respond"] = (ok, payload, error) => {
|
||||
if (!ok) {
|
||||
settle({
|
||||
ok: false,
|
||||
error: isGatewayErrorShape(error)
|
||||
? error
|
||||
: errorShape(ErrorCodes.UNAVAILABLE, String(error ?? "sessions.reset failed")),
|
||||
});
|
||||
return;
|
||||
}
|
||||
const payloadObj = payload as
|
||||
| {
|
||||
key?: unknown;
|
||||
entry?: {
|
||||
sessionId?: unknown;
|
||||
};
|
||||
}
|
||||
| undefined;
|
||||
const key = typeof payloadObj?.key === "string" ? payloadObj.key : params.key;
|
||||
const sessionId =
|
||||
payloadObj?.entry && typeof payloadObj.entry.sessionId === "string"
|
||||
? payloadObj.entry.sessionId
|
||||
: undefined;
|
||||
settle({ ok: true, key, sessionId });
|
||||
};
|
||||
|
||||
const resetResult = sessionsHandlers["sessions.reset"]({
|
||||
req: {
|
||||
type: "req",
|
||||
id: `${params.idempotencyKey}:reset`,
|
||||
method: "sessions.reset",
|
||||
},
|
||||
params: {
|
||||
key: params.key,
|
||||
reason: params.reason,
|
||||
},
|
||||
context: params.context,
|
||||
client: params.client,
|
||||
isWebchatConnect: params.isWebchatConnect,
|
||||
respond,
|
||||
});
|
||||
|
||||
void (async () => {
|
||||
try {
|
||||
await resetResult;
|
||||
if (!settled) {
|
||||
settle({
|
||||
ok: false,
|
||||
error: errorShape(
|
||||
ErrorCodes.UNAVAILABLE,
|
||||
"sessions.reset completed without returning a response",
|
||||
),
|
||||
});
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
settle({
|
||||
ok: false,
|
||||
error: errorShape(ErrorCodes.UNAVAILABLE, String(err)),
|
||||
});
|
||||
}
|
||||
})();
|
||||
const result = await performGatewaySessionReset({
|
||||
key: params.key,
|
||||
reason: params.reason,
|
||||
commandSource: "gateway:agent",
|
||||
});
|
||||
if (!result.ok) {
|
||||
return result;
|
||||
}
|
||||
return {
|
||||
ok: true,
|
||||
key: result.key,
|
||||
sessionId: result.entry.sessionId,
|
||||
};
|
||||
}
|
||||
|
||||
function dispatchAgentRunFromGateway(params: {
|
||||
@@ -399,10 +324,6 @@ export const agentHandlers: GatewayRequestHandlers = {
|
||||
const resetResult = await runSessionResetFromAgent({
|
||||
key: requestedSessionKey,
|
||||
reason: resetReason,
|
||||
idempotencyKey: idem,
|
||||
context,
|
||||
client,
|
||||
isWebchatConnect,
|
||||
});
|
||||
if (!resetResult.ok) {
|
||||
respond(false, undefined, resetResult.error);
|
||||
|
||||
@@ -1,29 +1,13 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import { getAcpSessionManager } from "../../acp/control-plane/manager.js";
|
||||
import { resolveDefaultAgentId } from "../../agents/agent-scope.js";
|
||||
import { clearBootstrapSnapshot } from "../../agents/bootstrap-cache.js";
|
||||
import { abortEmbeddedPiRun, waitForEmbeddedPiRunEnd } from "../../agents/pi-embedded.js";
|
||||
import { stopSubagentsForRequester } from "../../auto-reply/reply/abort.js";
|
||||
import { clearSessionQueues } from "../../auto-reply/reply/queue.js";
|
||||
import { closeTrackedBrowserTabsForSessions } from "../../browser/session-tab-registry.js";
|
||||
import { loadConfig } from "../../config/config.js";
|
||||
import {
|
||||
loadSessionStore,
|
||||
snapshotSessionOrigin,
|
||||
resolveMainSessionKey,
|
||||
type SessionEntry,
|
||||
updateSessionStore,
|
||||
} from "../../config/sessions.js";
|
||||
import { unbindThreadBindingsBySessionKey } from "../../discord/monitor/thread-bindings.js";
|
||||
import { logVerbose } from "../../globals.js";
|
||||
import { createInternalHookEvent, triggerInternalHook } from "../../hooks/internal-hooks.js";
|
||||
import { getGlobalHookRunner } from "../../plugins/hook-runner-global.js";
|
||||
import {
|
||||
isSubagentSessionKey,
|
||||
normalizeAgentId,
|
||||
parseAgentSessionKey,
|
||||
} from "../../routing/session-key.js";
|
||||
import { normalizeAgentId, parseAgentSessionKey } from "../../routing/session-key.js";
|
||||
import { GATEWAY_CLIENT_IDS } from "../protocol/client-info.js";
|
||||
import {
|
||||
ErrorCodes,
|
||||
@@ -36,9 +20,14 @@ import {
|
||||
validateSessionsResetParams,
|
||||
validateSessionsResolveParams,
|
||||
} from "../protocol/index.js";
|
||||
import {
|
||||
archiveSessionTranscriptsForSession,
|
||||
cleanupSessionBeforeMutation,
|
||||
emitSessionUnboundLifecycleEvent,
|
||||
performGatewaySessionReset,
|
||||
} from "../session-reset-service.js";
|
||||
import {
|
||||
archiveFileOnDisk,
|
||||
archiveSessionTranscripts,
|
||||
listSessionsFromStore,
|
||||
loadCombinedSessionStoreForGateway,
|
||||
loadSessionEntry,
|
||||
@@ -128,219 +117,6 @@ function migrateAndPruneSessionStoreKey(params: {
|
||||
return { target, primaryKey, entry: params.store[primaryKey] };
|
||||
}
|
||||
|
||||
function stripRuntimeModelState(entry?: SessionEntry): SessionEntry | undefined {
|
||||
if (!entry) {
|
||||
return entry;
|
||||
}
|
||||
return {
|
||||
...entry,
|
||||
model: undefined,
|
||||
modelProvider: undefined,
|
||||
contextTokens: undefined,
|
||||
systemPromptReport: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
function archiveSessionTranscriptsForSession(params: {
|
||||
sessionId: string | undefined;
|
||||
storePath: string;
|
||||
sessionFile?: string;
|
||||
agentId?: string;
|
||||
reason: "reset" | "deleted";
|
||||
}): string[] {
|
||||
if (!params.sessionId) {
|
||||
return [];
|
||||
}
|
||||
return archiveSessionTranscripts({
|
||||
sessionId: params.sessionId,
|
||||
storePath: params.storePath,
|
||||
sessionFile: params.sessionFile,
|
||||
agentId: params.agentId,
|
||||
reason: params.reason,
|
||||
});
|
||||
}
|
||||
|
||||
async function emitSessionUnboundLifecycleEvent(params: {
|
||||
targetSessionKey: string;
|
||||
reason: "session-reset" | "session-delete";
|
||||
emitHooks?: boolean;
|
||||
}) {
|
||||
const targetKind = isSubagentSessionKey(params.targetSessionKey) ? "subagent" : "acp";
|
||||
unbindThreadBindingsBySessionKey({
|
||||
targetSessionKey: params.targetSessionKey,
|
||||
targetKind,
|
||||
reason: params.reason,
|
||||
sendFarewell: true,
|
||||
});
|
||||
|
||||
if (params.emitHooks === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
const hookRunner = getGlobalHookRunner();
|
||||
if (!hookRunner?.hasHooks("subagent_ended")) {
|
||||
return;
|
||||
}
|
||||
await hookRunner.runSubagentEnded(
|
||||
{
|
||||
targetSessionKey: params.targetSessionKey,
|
||||
targetKind,
|
||||
reason: params.reason,
|
||||
sendFarewell: true,
|
||||
outcome: params.reason === "session-reset" ? "reset" : "deleted",
|
||||
},
|
||||
{
|
||||
childSessionKey: params.targetSessionKey,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
async function ensureSessionRuntimeCleanup(params: {
|
||||
cfg: ReturnType<typeof loadConfig>;
|
||||
key: string;
|
||||
target: ReturnType<typeof resolveGatewaySessionStoreTarget>;
|
||||
sessionId?: string;
|
||||
}) {
|
||||
const closeTrackedBrowserTabs = async () => {
|
||||
const closeKeys = new Set<string>([
|
||||
params.key,
|
||||
params.target.canonicalKey,
|
||||
...params.target.storeKeys,
|
||||
params.sessionId ?? "",
|
||||
]);
|
||||
return await closeTrackedBrowserTabsForSessions({
|
||||
sessionKeys: [...closeKeys],
|
||||
onWarn: (message) => logVerbose(message),
|
||||
});
|
||||
};
|
||||
|
||||
const queueKeys = new Set<string>(params.target.storeKeys);
|
||||
queueKeys.add(params.target.canonicalKey);
|
||||
if (params.sessionId) {
|
||||
queueKeys.add(params.sessionId);
|
||||
}
|
||||
clearSessionQueues([...queueKeys]);
|
||||
stopSubagentsForRequester({ cfg: params.cfg, requesterSessionKey: params.target.canonicalKey });
|
||||
if (!params.sessionId) {
|
||||
clearBootstrapSnapshot(params.target.canonicalKey);
|
||||
await closeTrackedBrowserTabs();
|
||||
return undefined;
|
||||
}
|
||||
abortEmbeddedPiRun(params.sessionId);
|
||||
const ended = await waitForEmbeddedPiRunEnd(params.sessionId, 15_000);
|
||||
clearBootstrapSnapshot(params.target.canonicalKey);
|
||||
if (ended) {
|
||||
await closeTrackedBrowserTabs();
|
||||
return undefined;
|
||||
}
|
||||
return errorShape(
|
||||
ErrorCodes.UNAVAILABLE,
|
||||
`Session ${params.key} is still active; try again in a moment.`,
|
||||
);
|
||||
}
|
||||
|
||||
const ACP_RUNTIME_CLEANUP_TIMEOUT_MS = 15_000;
|
||||
|
||||
async function runAcpCleanupStep(params: {
|
||||
op: () => Promise<void>;
|
||||
}): Promise<{ status: "ok" } | { status: "timeout" } | { status: "error"; error: unknown }> {
|
||||
let timer: NodeJS.Timeout | undefined;
|
||||
const timeoutPromise = new Promise<{ status: "timeout" }>((resolve) => {
|
||||
timer = setTimeout(() => resolve({ status: "timeout" }), ACP_RUNTIME_CLEANUP_TIMEOUT_MS);
|
||||
});
|
||||
const opPromise = params
|
||||
.op()
|
||||
.then(() => ({ status: "ok" as const }))
|
||||
.catch((error: unknown) => ({ status: "error" as const, error }));
|
||||
const outcome = await Promise.race([opPromise, timeoutPromise]);
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
return outcome;
|
||||
}
|
||||
|
||||
async function closeAcpRuntimeForSession(params: {
|
||||
cfg: ReturnType<typeof loadConfig>;
|
||||
sessionKey: string;
|
||||
entry?: SessionEntry;
|
||||
reason: "session-reset" | "session-delete";
|
||||
}) {
|
||||
if (!params.entry?.acp) {
|
||||
return undefined;
|
||||
}
|
||||
const acpManager = getAcpSessionManager();
|
||||
const cancelOutcome = await runAcpCleanupStep({
|
||||
op: async () => {
|
||||
await acpManager.cancelSession({
|
||||
cfg: params.cfg,
|
||||
sessionKey: params.sessionKey,
|
||||
reason: params.reason,
|
||||
});
|
||||
},
|
||||
});
|
||||
if (cancelOutcome.status === "timeout") {
|
||||
return errorShape(
|
||||
ErrorCodes.UNAVAILABLE,
|
||||
`Session ${params.sessionKey} is still active; try again in a moment.`,
|
||||
);
|
||||
}
|
||||
if (cancelOutcome.status === "error") {
|
||||
logVerbose(
|
||||
`sessions.${params.reason}: ACP cancel failed for ${params.sessionKey}: ${String(cancelOutcome.error)}`,
|
||||
);
|
||||
}
|
||||
|
||||
const closeOutcome = await runAcpCleanupStep({
|
||||
op: async () => {
|
||||
await acpManager.closeSession({
|
||||
cfg: params.cfg,
|
||||
sessionKey: params.sessionKey,
|
||||
reason: params.reason,
|
||||
requireAcpSession: false,
|
||||
allowBackendUnavailable: true,
|
||||
});
|
||||
},
|
||||
});
|
||||
if (closeOutcome.status === "timeout") {
|
||||
return errorShape(
|
||||
ErrorCodes.UNAVAILABLE,
|
||||
`Session ${params.sessionKey} is still active; try again in a moment.`,
|
||||
);
|
||||
}
|
||||
if (closeOutcome.status === "error") {
|
||||
logVerbose(
|
||||
`sessions.${params.reason}: ACP runtime close failed for ${params.sessionKey}: ${String(closeOutcome.error)}`,
|
||||
);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async function cleanupSessionBeforeMutation(params: {
|
||||
cfg: ReturnType<typeof loadConfig>;
|
||||
key: string;
|
||||
target: ReturnType<typeof resolveGatewaySessionStoreTarget>;
|
||||
entry: SessionEntry | undefined;
|
||||
legacyKey?: string;
|
||||
canonicalKey?: string;
|
||||
reason: "session-reset" | "session-delete";
|
||||
}) {
|
||||
const cleanupError = await ensureSessionRuntimeCleanup({
|
||||
cfg: params.cfg,
|
||||
key: params.key,
|
||||
target: params.target,
|
||||
sessionId: params.entry?.sessionId,
|
||||
});
|
||||
if (cleanupError) {
|
||||
return cleanupError;
|
||||
}
|
||||
return await closeAcpRuntimeForSession({
|
||||
cfg: params.cfg,
|
||||
sessionKey: params.legacyKey ?? params.canonicalKey ?? params.target.canonicalKey ?? params.key,
|
||||
entry: params.entry,
|
||||
reason: params.reason,
|
||||
});
|
||||
}
|
||||
|
||||
export const sessionsHandlers: GatewayRequestHandlers = {
|
||||
"sessions.list": ({ params, respond }) => {
|
||||
if (!assertValidParams(params, validateSessionsListParams, "sessions.list", respond)) {
|
||||
@@ -486,89 +262,17 @@ export const sessionsHandlers: GatewayRequestHandlers = {
|
||||
return;
|
||||
}
|
||||
|
||||
const { cfg, target, storePath } = resolveGatewaySessionTargetFromKey(key);
|
||||
const { entry, legacyKey, canonicalKey } = loadSessionEntry(key);
|
||||
const hadExistingEntry = Boolean(entry);
|
||||
const commandReason = p.reason === "new" ? "new" : "reset";
|
||||
const hookEvent = createInternalHookEvent(
|
||||
"command",
|
||||
commandReason,
|
||||
target.canonicalKey ?? key,
|
||||
{
|
||||
sessionEntry: entry,
|
||||
previousSessionEntry: entry,
|
||||
commandSource: "gateway:sessions.reset",
|
||||
cfg,
|
||||
},
|
||||
);
|
||||
await triggerInternalHook(hookEvent);
|
||||
const mutationCleanupError = await cleanupSessionBeforeMutation({
|
||||
cfg,
|
||||
const reason = p.reason === "new" ? "new" : "reset";
|
||||
const result = await performGatewaySessionReset({
|
||||
key,
|
||||
target,
|
||||
entry,
|
||||
legacyKey,
|
||||
canonicalKey,
|
||||
reason: "session-reset",
|
||||
reason,
|
||||
commandSource: "gateway:sessions.reset",
|
||||
});
|
||||
if (mutationCleanupError) {
|
||||
respond(false, undefined, mutationCleanupError);
|
||||
if (!result.ok) {
|
||||
respond(false, undefined, result.error);
|
||||
return;
|
||||
}
|
||||
let oldSessionId: string | undefined;
|
||||
let oldSessionFile: string | undefined;
|
||||
const next = await updateSessionStore(storePath, (store) => {
|
||||
const { primaryKey } = migrateAndPruneSessionStoreKey({ cfg, key, store });
|
||||
const entry = store[primaryKey];
|
||||
const resetEntry = stripRuntimeModelState(entry);
|
||||
const parsed = parseAgentSessionKey(primaryKey);
|
||||
const sessionAgentId = normalizeAgentId(parsed?.agentId ?? resolveDefaultAgentId(cfg));
|
||||
const resolvedModel = resolveSessionModelRef(cfg, resetEntry, sessionAgentId);
|
||||
oldSessionId = entry?.sessionId;
|
||||
oldSessionFile = entry?.sessionFile;
|
||||
const now = Date.now();
|
||||
const nextEntry: SessionEntry = {
|
||||
sessionId: randomUUID(),
|
||||
updatedAt: now,
|
||||
systemSent: false,
|
||||
abortedLastRun: false,
|
||||
thinkingLevel: entry?.thinkingLevel,
|
||||
verboseLevel: entry?.verboseLevel,
|
||||
reasoningLevel: entry?.reasoningLevel,
|
||||
responseUsage: entry?.responseUsage,
|
||||
model: resolvedModel.model,
|
||||
modelProvider: resolvedModel.provider,
|
||||
contextTokens: resetEntry?.contextTokens,
|
||||
sendPolicy: entry?.sendPolicy,
|
||||
label: entry?.label,
|
||||
origin: snapshotSessionOrigin(entry),
|
||||
lastChannel: entry?.lastChannel,
|
||||
lastTo: entry?.lastTo,
|
||||
skillsSnapshot: entry?.skillsSnapshot,
|
||||
// Reset token counts to 0 on session reset (#1523)
|
||||
inputTokens: 0,
|
||||
outputTokens: 0,
|
||||
totalTokens: 0,
|
||||
totalTokensFresh: true,
|
||||
};
|
||||
store[primaryKey] = nextEntry;
|
||||
return nextEntry;
|
||||
});
|
||||
// Archive old transcript so it doesn't accumulate on disk (#14869).
|
||||
archiveSessionTranscriptsForSession({
|
||||
sessionId: oldSessionId,
|
||||
storePath,
|
||||
sessionFile: oldSessionFile,
|
||||
agentId: target.agentId,
|
||||
reason: "reset",
|
||||
});
|
||||
if (hadExistingEntry) {
|
||||
await emitSessionUnboundLifecycleEvent({
|
||||
targetSessionKey: target.canonicalKey ?? key,
|
||||
reason: "session-reset",
|
||||
});
|
||||
}
|
||||
respond(true, { ok: true, key: target.canonicalKey, entry: next }, undefined);
|
||||
respond(true, { ok: true, key: result.key, entry: result.entry }, undefined);
|
||||
},
|
||||
"sessions.delete": async ({ params, respond, client, isWebchatConnect }) => {
|
||||
if (!assertValidParams(params, validateSessionsDeleteParams, "sessions.delete", respond)) {
|
||||
|
||||
@@ -293,6 +293,56 @@ describe("gateway server agent", () => {
|
||||
expect(call.sessionId).not.toBe("sess-main-before-reset");
|
||||
});
|
||||
|
||||
test("write-scoped callers cannot use sessions.reset directly but can still reset conversations via agent", async () => {
|
||||
await withGatewayServer(async ({ port }) => {
|
||||
await useTempSessionStorePath();
|
||||
const storePath = testState.sessionStorePath;
|
||||
if (!storePath) {
|
||||
throw new Error("missing session store path");
|
||||
}
|
||||
|
||||
await writeSessionStore({
|
||||
entries: {
|
||||
main: {
|
||||
sessionId: "sess-main-before-write-reset",
|
||||
updatedAt: Date.now(),
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const writeWs = new WebSocket(`ws://127.0.0.1:${port}`);
|
||||
trackConnectChallengeNonce(writeWs);
|
||||
await new Promise<void>((resolve) => writeWs.once("open", resolve));
|
||||
await connectOk(writeWs, { scopes: ["operator.write"] });
|
||||
|
||||
const directReset = await rpcReq(writeWs, "sessions.reset", { key: "main" });
|
||||
expect(directReset.ok).toBe(false);
|
||||
expect(directReset.error?.message).toContain("missing scope: operator.admin");
|
||||
|
||||
vi.mocked(agentCommand).mockClear();
|
||||
const viaAgent = await rpcReq(writeWs, "agent", {
|
||||
message: "/reset",
|
||||
sessionKey: "main",
|
||||
idempotencyKey: "idem-agent-write-reset",
|
||||
});
|
||||
expect(viaAgent.ok).toBe(true);
|
||||
|
||||
const store = JSON.parse(await fs.readFile(storePath, "utf-8")) as Record<
|
||||
string,
|
||||
{ sessionId?: string }
|
||||
>;
|
||||
expect(store["agent:main:main"]?.sessionId).toBeDefined();
|
||||
expect(store["agent:main:main"]?.sessionId).not.toBe("sess-main-before-write-reset");
|
||||
|
||||
await vi.waitFor(() => expect(vi.mocked(agentCommand)).toHaveBeenCalled());
|
||||
const call = readAgentCommandCall();
|
||||
expect(typeof call.sessionId).toBe("string");
|
||||
expect(call.sessionId).not.toBe("sess-main-before-write-reset");
|
||||
|
||||
writeWs.close();
|
||||
});
|
||||
});
|
||||
|
||||
test("agent ack response then final response", { timeout: 8000 }, async () => {
|
||||
const ackP = onceMessage(
|
||||
ws,
|
||||
|
||||
364
src/gateway/session-reset-service.ts
Normal file
364
src/gateway/session-reset-service.ts
Normal file
@@ -0,0 +1,364 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { getAcpSessionManager } from "../acp/control-plane/manager.js";
|
||||
import { resolveDefaultAgentId } from "../agents/agent-scope.js";
|
||||
import { clearBootstrapSnapshot } from "../agents/bootstrap-cache.js";
|
||||
import { abortEmbeddedPiRun, waitForEmbeddedPiRunEnd } from "../agents/pi-embedded.js";
|
||||
import { stopSubagentsForRequester } from "../auto-reply/reply/abort.js";
|
||||
import { clearSessionQueues } from "../auto-reply/reply/queue.js";
|
||||
import { closeTrackedBrowserTabsForSessions } from "../browser/session-tab-registry.js";
|
||||
import { loadConfig } from "../config/config.js";
|
||||
import {
|
||||
snapshotSessionOrigin,
|
||||
type SessionEntry,
|
||||
updateSessionStore,
|
||||
} from "../config/sessions.js";
|
||||
import { unbindThreadBindingsBySessionKey } from "../discord/monitor/thread-bindings.js";
|
||||
import { logVerbose } from "../globals.js";
|
||||
import { createInternalHookEvent, triggerInternalHook } from "../hooks/internal-hooks.js";
|
||||
import { getGlobalHookRunner } from "../plugins/hook-runner-global.js";
|
||||
import {
|
||||
isSubagentSessionKey,
|
||||
normalizeAgentId,
|
||||
parseAgentSessionKey,
|
||||
} from "../routing/session-key.js";
|
||||
import { ErrorCodes, errorShape } from "./protocol/index.js";
|
||||
import {
|
||||
archiveSessionTranscripts,
|
||||
loadSessionEntry,
|
||||
pruneLegacyStoreKeys,
|
||||
resolveGatewaySessionStoreTarget,
|
||||
resolveSessionModelRef,
|
||||
} from "./session-utils.js";
|
||||
|
||||
const ACP_RUNTIME_CLEANUP_TIMEOUT_MS = 15_000;
|
||||
|
||||
function migrateAndPruneSessionStoreKey(params: {
|
||||
cfg: ReturnType<typeof loadConfig>;
|
||||
key: string;
|
||||
store: Record<string, SessionEntry>;
|
||||
}) {
|
||||
const target = resolveGatewaySessionStoreTarget({
|
||||
cfg: params.cfg,
|
||||
key: params.key,
|
||||
store: params.store,
|
||||
});
|
||||
const primaryKey = target.canonicalKey;
|
||||
if (!params.store[primaryKey]) {
|
||||
const existingKey = target.storeKeys.find((candidate) => Boolean(params.store[candidate]));
|
||||
if (existingKey) {
|
||||
params.store[primaryKey] = params.store[existingKey];
|
||||
}
|
||||
}
|
||||
pruneLegacyStoreKeys({
|
||||
store: params.store,
|
||||
canonicalKey: primaryKey,
|
||||
candidates: target.storeKeys,
|
||||
});
|
||||
return { target, primaryKey, entry: params.store[primaryKey] };
|
||||
}
|
||||
|
||||
function stripRuntimeModelState(entry?: SessionEntry): SessionEntry | undefined {
|
||||
if (!entry) {
|
||||
return entry;
|
||||
}
|
||||
return {
|
||||
...entry,
|
||||
model: undefined,
|
||||
modelProvider: undefined,
|
||||
contextTokens: undefined,
|
||||
systemPromptReport: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
export function archiveSessionTranscriptsForSession(params: {
|
||||
sessionId: string | undefined;
|
||||
storePath: string;
|
||||
sessionFile?: string;
|
||||
agentId?: string;
|
||||
reason: "reset" | "deleted";
|
||||
}): string[] {
|
||||
if (!params.sessionId) {
|
||||
return [];
|
||||
}
|
||||
return archiveSessionTranscripts({
|
||||
sessionId: params.sessionId,
|
||||
storePath: params.storePath,
|
||||
sessionFile: params.sessionFile,
|
||||
agentId: params.agentId,
|
||||
reason: params.reason,
|
||||
});
|
||||
}
|
||||
|
||||
export async function emitSessionUnboundLifecycleEvent(params: {
|
||||
targetSessionKey: string;
|
||||
reason: "session-reset" | "session-delete";
|
||||
emitHooks?: boolean;
|
||||
}) {
|
||||
const targetKind = isSubagentSessionKey(params.targetSessionKey) ? "subagent" : "acp";
|
||||
unbindThreadBindingsBySessionKey({
|
||||
targetSessionKey: params.targetSessionKey,
|
||||
targetKind,
|
||||
reason: params.reason,
|
||||
sendFarewell: true,
|
||||
});
|
||||
|
||||
if (params.emitHooks === false) {
|
||||
return;
|
||||
}
|
||||
|
||||
const hookRunner = getGlobalHookRunner();
|
||||
if (!hookRunner?.hasHooks("subagent_ended")) {
|
||||
return;
|
||||
}
|
||||
await hookRunner.runSubagentEnded(
|
||||
{
|
||||
targetSessionKey: params.targetSessionKey,
|
||||
targetKind,
|
||||
reason: params.reason,
|
||||
sendFarewell: true,
|
||||
outcome: params.reason === "session-reset" ? "reset" : "deleted",
|
||||
},
|
||||
{
|
||||
childSessionKey: params.targetSessionKey,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
async function ensureSessionRuntimeCleanup(params: {
|
||||
cfg: ReturnType<typeof loadConfig>;
|
||||
key: string;
|
||||
target: ReturnType<typeof resolveGatewaySessionStoreTarget>;
|
||||
sessionId?: string;
|
||||
}) {
|
||||
const closeTrackedBrowserTabs = async () => {
|
||||
const closeKeys = new Set<string>([
|
||||
params.key,
|
||||
params.target.canonicalKey,
|
||||
...params.target.storeKeys,
|
||||
params.sessionId ?? "",
|
||||
]);
|
||||
return await closeTrackedBrowserTabsForSessions({
|
||||
sessionKeys: [...closeKeys],
|
||||
onWarn: (message) => logVerbose(message),
|
||||
});
|
||||
};
|
||||
|
||||
const queueKeys = new Set<string>(params.target.storeKeys);
|
||||
queueKeys.add(params.target.canonicalKey);
|
||||
if (params.sessionId) {
|
||||
queueKeys.add(params.sessionId);
|
||||
}
|
||||
clearSessionQueues([...queueKeys]);
|
||||
stopSubagentsForRequester({ cfg: params.cfg, requesterSessionKey: params.target.canonicalKey });
|
||||
if (!params.sessionId) {
|
||||
clearBootstrapSnapshot(params.target.canonicalKey);
|
||||
await closeTrackedBrowserTabs();
|
||||
return undefined;
|
||||
}
|
||||
abortEmbeddedPiRun(params.sessionId);
|
||||
const ended = await waitForEmbeddedPiRunEnd(params.sessionId, 15_000);
|
||||
clearBootstrapSnapshot(params.target.canonicalKey);
|
||||
if (ended) {
|
||||
await closeTrackedBrowserTabs();
|
||||
return undefined;
|
||||
}
|
||||
return errorShape(
|
||||
ErrorCodes.UNAVAILABLE,
|
||||
`Session ${params.key} is still active; try again in a moment.`,
|
||||
);
|
||||
}
|
||||
|
||||
async function runAcpCleanupStep(params: {
|
||||
op: () => Promise<void>;
|
||||
}): Promise<{ status: "ok" } | { status: "timeout" } | { status: "error"; error: unknown }> {
|
||||
let timer: NodeJS.Timeout | undefined;
|
||||
const timeoutPromise = new Promise<{ status: "timeout" }>((resolve) => {
|
||||
timer = setTimeout(() => resolve({ status: "timeout" }), ACP_RUNTIME_CLEANUP_TIMEOUT_MS);
|
||||
});
|
||||
const opPromise = params
|
||||
.op()
|
||||
.then(() => ({ status: "ok" as const }))
|
||||
.catch((error: unknown) => ({ status: "error" as const, error }));
|
||||
const outcome = await Promise.race([opPromise, timeoutPromise]);
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
return outcome;
|
||||
}
|
||||
|
||||
async function closeAcpRuntimeForSession(params: {
|
||||
cfg: ReturnType<typeof loadConfig>;
|
||||
sessionKey: string;
|
||||
entry?: SessionEntry;
|
||||
reason: "session-reset" | "session-delete";
|
||||
}) {
|
||||
if (!params.entry?.acp) {
|
||||
return undefined;
|
||||
}
|
||||
const acpManager = getAcpSessionManager();
|
||||
const cancelOutcome = await runAcpCleanupStep({
|
||||
op: async () => {
|
||||
await acpManager.cancelSession({
|
||||
cfg: params.cfg,
|
||||
sessionKey: params.sessionKey,
|
||||
reason: params.reason,
|
||||
});
|
||||
},
|
||||
});
|
||||
if (cancelOutcome.status === "timeout") {
|
||||
return errorShape(
|
||||
ErrorCodes.UNAVAILABLE,
|
||||
`Session ${params.sessionKey} is still active; try again in a moment.`,
|
||||
);
|
||||
}
|
||||
if (cancelOutcome.status === "error") {
|
||||
logVerbose(
|
||||
`sessions.${params.reason}: ACP cancel failed for ${params.sessionKey}: ${String(cancelOutcome.error)}`,
|
||||
);
|
||||
}
|
||||
|
||||
const closeOutcome = await runAcpCleanupStep({
|
||||
op: async () => {
|
||||
await acpManager.closeSession({
|
||||
cfg: params.cfg,
|
||||
sessionKey: params.sessionKey,
|
||||
reason: params.reason,
|
||||
requireAcpSession: false,
|
||||
allowBackendUnavailable: true,
|
||||
});
|
||||
},
|
||||
});
|
||||
if (closeOutcome.status === "timeout") {
|
||||
return errorShape(
|
||||
ErrorCodes.UNAVAILABLE,
|
||||
`Session ${params.sessionKey} is still active; try again in a moment.`,
|
||||
);
|
||||
}
|
||||
if (closeOutcome.status === "error") {
|
||||
logVerbose(
|
||||
`sessions.${params.reason}: ACP runtime close failed for ${params.sessionKey}: ${String(closeOutcome.error)}`,
|
||||
);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
export async function cleanupSessionBeforeMutation(params: {
|
||||
cfg: ReturnType<typeof loadConfig>;
|
||||
key: string;
|
||||
target: ReturnType<typeof resolveGatewaySessionStoreTarget>;
|
||||
entry: SessionEntry | undefined;
|
||||
legacyKey?: string;
|
||||
canonicalKey?: string;
|
||||
reason: "session-reset" | "session-delete";
|
||||
}) {
|
||||
const cleanupError = await ensureSessionRuntimeCleanup({
|
||||
cfg: params.cfg,
|
||||
key: params.key,
|
||||
target: params.target,
|
||||
sessionId: params.entry?.sessionId,
|
||||
});
|
||||
if (cleanupError) {
|
||||
return cleanupError;
|
||||
}
|
||||
return await closeAcpRuntimeForSession({
|
||||
cfg: params.cfg,
|
||||
sessionKey: params.legacyKey ?? params.canonicalKey ?? params.target.canonicalKey ?? params.key,
|
||||
entry: params.entry,
|
||||
reason: params.reason,
|
||||
});
|
||||
}
|
||||
|
||||
export async function performGatewaySessionReset(params: {
|
||||
key: string;
|
||||
reason: "new" | "reset";
|
||||
commandSource: string;
|
||||
}): Promise<
|
||||
| { ok: true; key: string; entry: SessionEntry }
|
||||
| { ok: false; error: ReturnType<typeof errorShape> }
|
||||
> {
|
||||
const { cfg, target, storePath } = (() => {
|
||||
const cfg = loadConfig();
|
||||
const target = resolveGatewaySessionStoreTarget({ cfg, key: params.key });
|
||||
return { cfg, target, storePath: target.storePath };
|
||||
})();
|
||||
const { entry, legacyKey, canonicalKey } = loadSessionEntry(params.key);
|
||||
const hadExistingEntry = Boolean(entry);
|
||||
const hookEvent = createInternalHookEvent(
|
||||
"command",
|
||||
params.reason,
|
||||
target.canonicalKey ?? params.key,
|
||||
{
|
||||
sessionEntry: entry,
|
||||
previousSessionEntry: entry,
|
||||
commandSource: params.commandSource,
|
||||
cfg,
|
||||
},
|
||||
);
|
||||
await triggerInternalHook(hookEvent);
|
||||
const mutationCleanupError = await cleanupSessionBeforeMutation({
|
||||
cfg,
|
||||
key: params.key,
|
||||
target,
|
||||
entry,
|
||||
legacyKey,
|
||||
canonicalKey,
|
||||
reason: "session-reset",
|
||||
});
|
||||
if (mutationCleanupError) {
|
||||
return { ok: false, error: mutationCleanupError };
|
||||
}
|
||||
|
||||
let oldSessionId: string | undefined;
|
||||
let oldSessionFile: string | undefined;
|
||||
const next = await updateSessionStore(storePath, (store) => {
|
||||
const { primaryKey } = migrateAndPruneSessionStoreKey({ cfg, key: params.key, store });
|
||||
const currentEntry = store[primaryKey];
|
||||
const resetEntry = stripRuntimeModelState(currentEntry);
|
||||
const parsed = parseAgentSessionKey(primaryKey);
|
||||
const sessionAgentId = normalizeAgentId(parsed?.agentId ?? resolveDefaultAgentId(cfg));
|
||||
const resolvedModel = resolveSessionModelRef(cfg, resetEntry, sessionAgentId);
|
||||
oldSessionId = currentEntry?.sessionId;
|
||||
oldSessionFile = currentEntry?.sessionFile;
|
||||
const now = Date.now();
|
||||
const nextEntry: SessionEntry = {
|
||||
sessionId: randomUUID(),
|
||||
updatedAt: now,
|
||||
systemSent: false,
|
||||
abortedLastRun: false,
|
||||
thinkingLevel: currentEntry?.thinkingLevel,
|
||||
verboseLevel: currentEntry?.verboseLevel,
|
||||
reasoningLevel: currentEntry?.reasoningLevel,
|
||||
responseUsage: currentEntry?.responseUsage,
|
||||
model: resolvedModel.model,
|
||||
modelProvider: resolvedModel.provider,
|
||||
contextTokens: resetEntry?.contextTokens,
|
||||
sendPolicy: currentEntry?.sendPolicy,
|
||||
label: currentEntry?.label,
|
||||
origin: snapshotSessionOrigin(currentEntry),
|
||||
lastChannel: currentEntry?.lastChannel,
|
||||
lastTo: currentEntry?.lastTo,
|
||||
skillsSnapshot: currentEntry?.skillsSnapshot,
|
||||
inputTokens: 0,
|
||||
outputTokens: 0,
|
||||
totalTokens: 0,
|
||||
totalTokensFresh: true,
|
||||
};
|
||||
store[primaryKey] = nextEntry;
|
||||
return nextEntry;
|
||||
});
|
||||
|
||||
archiveSessionTranscriptsForSession({
|
||||
sessionId: oldSessionId,
|
||||
storePath,
|
||||
sessionFile: oldSessionFile,
|
||||
agentId: target.agentId,
|
||||
reason: "reset",
|
||||
});
|
||||
if (hadExistingEntry) {
|
||||
await emitSessionUnboundLifecycleEvent({
|
||||
targetSessionKey: target.canonicalKey ?? params.key,
|
||||
reason: "session-reset",
|
||||
});
|
||||
}
|
||||
return { ok: true, key: target.canonicalKey, entry: next };
|
||||
}
|
||||
86
src/infra/fs-pinned-write-helper.test.ts
Normal file
86
src/infra/fs-pinned-write-helper.test.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { createTrackedTempDirs } from "../test-utils/tracked-temp-dirs.js";
|
||||
import { runPinnedWriteHelper } from "./fs-pinned-write-helper.js";
|
||||
|
||||
const tempDirs = createTrackedTempDirs();
|
||||
|
||||
afterEach(async () => {
|
||||
await tempDirs.cleanup();
|
||||
});
|
||||
|
||||
describe("fs pinned write helper", () => {
|
||||
it.runIf(process.platform !== "win32")("writes through a pinned parent directory", async () => {
|
||||
const root = await tempDirs.make("openclaw-fs-pinned-root-");
|
||||
|
||||
const identity = await runPinnedWriteHelper({
|
||||
rootPath: root,
|
||||
relativeParentPath: "nested/deeper",
|
||||
basename: "note.txt",
|
||||
mkdir: true,
|
||||
mode: 0o600,
|
||||
input: {
|
||||
kind: "buffer",
|
||||
data: "hello",
|
||||
},
|
||||
});
|
||||
|
||||
await expect(
|
||||
fs.readFile(path.join(root, "nested", "deeper", "note.txt"), "utf8"),
|
||||
).resolves.toBe("hello");
|
||||
expect(identity.dev).toBeGreaterThanOrEqual(0);
|
||||
expect(identity.ino).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it.runIf(process.platform !== "win32")(
|
||||
"rejects symlink-parent writes instead of creating a temp file outside root",
|
||||
async () => {
|
||||
const root = await tempDirs.make("openclaw-fs-pinned-root-");
|
||||
const outside = await tempDirs.make("openclaw-fs-pinned-outside-");
|
||||
await fs.symlink(outside, path.join(root, "alias"));
|
||||
|
||||
await expect(
|
||||
runPinnedWriteHelper({
|
||||
rootPath: root,
|
||||
relativeParentPath: "alias",
|
||||
basename: "escape.txt",
|
||||
mkdir: false,
|
||||
mode: 0o600,
|
||||
input: {
|
||||
kind: "buffer",
|
||||
data: "owned",
|
||||
},
|
||||
}),
|
||||
).rejects.toThrow();
|
||||
|
||||
await expect(fs.stat(path.join(outside, "escape.txt"))).rejects.toThrow();
|
||||
const outsideFiles = await fs.readdir(outside);
|
||||
expect(outsideFiles).toEqual([]);
|
||||
},
|
||||
);
|
||||
|
||||
it.runIf(process.platform !== "win32")("accepts streamed input", async () => {
|
||||
const root = await tempDirs.make("openclaw-fs-pinned-root-");
|
||||
const sourcePath = path.join(await tempDirs.make("openclaw-fs-pinned-src-"), "source.txt");
|
||||
await fs.writeFile(sourcePath, "streamed", "utf8");
|
||||
const sourceHandle = await fs.open(sourcePath, "r");
|
||||
try {
|
||||
await runPinnedWriteHelper({
|
||||
rootPath: root,
|
||||
relativeParentPath: "",
|
||||
basename: "stream.txt",
|
||||
mkdir: true,
|
||||
mode: 0o600,
|
||||
input: {
|
||||
kind: "stream",
|
||||
stream: sourceHandle.createReadStream(),
|
||||
},
|
||||
});
|
||||
} finally {
|
||||
await sourceHandle.close();
|
||||
}
|
||||
|
||||
await expect(fs.readFile(path.join(root, "stream.txt"), "utf8")).resolves.toBe("streamed");
|
||||
});
|
||||
});
|
||||
230
src/infra/fs-pinned-write-helper.ts
Normal file
230
src/infra/fs-pinned-write-helper.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
import { spawn } from "node:child_process";
|
||||
import { once } from "node:events";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import type { Readable } from "node:stream";
|
||||
import { pipeline } from "node:stream/promises";
|
||||
import type { FileIdentityStat } from "./file-identity.js";
|
||||
|
||||
export type PinnedWriteInput =
|
||||
| { kind: "buffer"; data: string | Buffer; encoding?: BufferEncoding }
|
||||
| { kind: "stream"; stream: Readable };
|
||||
|
||||
const LOCAL_PINNED_WRITE_PYTHON = [
|
||||
"import errno",
|
||||
"import os",
|
||||
"import secrets",
|
||||
"import stat",
|
||||
"import sys",
|
||||
"",
|
||||
"root_path = sys.argv[1]",
|
||||
"relative_parent = sys.argv[2]",
|
||||
"basename = sys.argv[3]",
|
||||
'mkdir_enabled = sys.argv[4] == "1"',
|
||||
"file_mode = int(sys.argv[5], 8)",
|
||||
"",
|
||||
"DIR_FLAGS = os.O_RDONLY",
|
||||
"if hasattr(os, 'O_DIRECTORY'):",
|
||||
" DIR_FLAGS |= os.O_DIRECTORY",
|
||||
"if hasattr(os, 'O_NOFOLLOW'):",
|
||||
" DIR_FLAGS |= os.O_NOFOLLOW",
|
||||
"",
|
||||
"WRITE_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL",
|
||||
"if hasattr(os, 'O_NOFOLLOW'):",
|
||||
" WRITE_FLAGS |= os.O_NOFOLLOW",
|
||||
"",
|
||||
"def open_dir(path_value, dir_fd=None):",
|
||||
" return os.open(path_value, DIR_FLAGS, dir_fd=dir_fd)",
|
||||
"",
|
||||
"def walk_parent(root_fd, rel_parent, mkdir_enabled):",
|
||||
" current_fd = os.dup(root_fd)",
|
||||
" try:",
|
||||
" for segment in [part for part in rel_parent.split('/') if part and part != '.']:",
|
||||
" if segment == '..':",
|
||||
" raise OSError(errno.EPERM, 'path traversal is not allowed', segment)",
|
||||
" try:",
|
||||
" next_fd = open_dir(segment, dir_fd=current_fd)",
|
||||
" except FileNotFoundError:",
|
||||
" if not mkdir_enabled:",
|
||||
" raise",
|
||||
" os.mkdir(segment, 0o777, dir_fd=current_fd)",
|
||||
" next_fd = open_dir(segment, dir_fd=current_fd)",
|
||||
" os.close(current_fd)",
|
||||
" current_fd = next_fd",
|
||||
" return current_fd",
|
||||
" except Exception:",
|
||||
" os.close(current_fd)",
|
||||
" raise",
|
||||
"",
|
||||
"def create_temp_file(parent_fd, basename, mode):",
|
||||
" prefix = '.' + basename + '.'",
|
||||
" for _ in range(128):",
|
||||
" candidate = prefix + secrets.token_hex(6) + '.tmp'",
|
||||
" try:",
|
||||
" fd = os.open(candidate, WRITE_FLAGS, mode, dir_fd=parent_fd)",
|
||||
" return candidate, fd",
|
||||
" except FileExistsError:",
|
||||
" continue",
|
||||
" raise RuntimeError('failed to allocate pinned temp file')",
|
||||
"",
|
||||
"root_fd = open_dir(root_path)",
|
||||
"parent_fd = None",
|
||||
"temp_fd = None",
|
||||
"temp_name = None",
|
||||
"try:",
|
||||
" parent_fd = walk_parent(root_fd, relative_parent, mkdir_enabled)",
|
||||
" temp_name, temp_fd = create_temp_file(parent_fd, basename, file_mode)",
|
||||
" while True:",
|
||||
" chunk = sys.stdin.buffer.read(65536)",
|
||||
" if not chunk:",
|
||||
" break",
|
||||
" os.write(temp_fd, chunk)",
|
||||
" os.fsync(temp_fd)",
|
||||
" os.close(temp_fd)",
|
||||
" temp_fd = None",
|
||||
" os.replace(temp_name, basename, src_dir_fd=parent_fd, dst_dir_fd=parent_fd)",
|
||||
" temp_name = None",
|
||||
" os.fsync(parent_fd)",
|
||||
" result_stat = os.stat(basename, dir_fd=parent_fd, follow_symlinks=False)",
|
||||
" print(f'{result_stat.st_dev}|{result_stat.st_ino}')",
|
||||
"finally:",
|
||||
" if temp_fd is not None:",
|
||||
" os.close(temp_fd)",
|
||||
" if temp_name is not None and parent_fd is not None:",
|
||||
" try:",
|
||||
" os.unlink(temp_name, dir_fd=parent_fd)",
|
||||
" except FileNotFoundError:",
|
||||
" pass",
|
||||
" if parent_fd is not None:",
|
||||
" os.close(parent_fd)",
|
||||
" os.close(root_fd)",
|
||||
].join("\n");
|
||||
|
||||
function parsePinnedIdentity(stdout: string): FileIdentityStat {
|
||||
const line = stdout
|
||||
.trim()
|
||||
.split(/\r?\n/)
|
||||
.map((value) => value.trim())
|
||||
.filter(Boolean)
|
||||
.at(-1);
|
||||
if (!line) {
|
||||
throw new Error("Pinned write helper returned no identity");
|
||||
}
|
||||
const [devRaw, inoRaw] = line.split("|");
|
||||
const dev = Number.parseInt(devRaw ?? "", 10);
|
||||
const ino = Number.parseInt(inoRaw ?? "", 10);
|
||||
if (!Number.isFinite(dev) || !Number.isFinite(ino)) {
|
||||
throw new Error(`Pinned write helper returned invalid identity: ${line}`);
|
||||
}
|
||||
return { dev, ino };
|
||||
}
|
||||
|
||||
export async function runPinnedWriteHelper(params: {
|
||||
rootPath: string;
|
||||
relativeParentPath: string;
|
||||
basename: string;
|
||||
mkdir: boolean;
|
||||
mode: number;
|
||||
input: PinnedWriteInput;
|
||||
}): Promise<FileIdentityStat> {
|
||||
const child = spawn(
|
||||
"python3",
|
||||
[
|
||||
"-c",
|
||||
LOCAL_PINNED_WRITE_PYTHON,
|
||||
params.rootPath,
|
||||
params.relativeParentPath,
|
||||
params.basename,
|
||||
params.mkdir ? "1" : "0",
|
||||
(params.mode || 0o600).toString(8),
|
||||
],
|
||||
{
|
||||
stdio: ["pipe", "pipe", "pipe"],
|
||||
},
|
||||
);
|
||||
|
||||
let stdout = "";
|
||||
let stderr = "";
|
||||
child.stdout.setEncoding?.("utf8");
|
||||
child.stderr.setEncoding?.("utf8");
|
||||
child.stdout.on("data", (chunk: string) => {
|
||||
stdout += chunk;
|
||||
});
|
||||
child.stderr.on("data", (chunk: string) => {
|
||||
stderr += chunk;
|
||||
});
|
||||
|
||||
const exitPromise = once(child, "close") as Promise<[number | null, NodeJS.Signals | null]>;
|
||||
try {
|
||||
if (!child.stdin) {
|
||||
const identity = await runPinnedWriteFallback(params);
|
||||
await exitPromise.catch(() => {});
|
||||
return identity;
|
||||
}
|
||||
|
||||
if (params.input.kind === "buffer") {
|
||||
const input = params.input;
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
child.stdin.once("error", reject);
|
||||
if (typeof input.data === "string") {
|
||||
child.stdin.end(input.data, input.encoding ?? "utf8", () => resolve());
|
||||
return;
|
||||
}
|
||||
child.stdin.end(input.data, () => resolve());
|
||||
});
|
||||
} else {
|
||||
await pipeline(params.input.stream, child.stdin);
|
||||
}
|
||||
|
||||
const [code, signal] = await exitPromise;
|
||||
if (code !== 0) {
|
||||
throw new Error(
|
||||
stderr.trim() ||
|
||||
`Pinned write helper failed with code ${code ?? "null"} (${signal ?? "?"})`,
|
||||
);
|
||||
}
|
||||
return parsePinnedIdentity(stdout);
|
||||
} catch (error) {
|
||||
child.kill("SIGKILL");
|
||||
await exitPromise.catch(() => {});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function runPinnedWriteFallback(params: {
|
||||
rootPath: string;
|
||||
relativeParentPath: string;
|
||||
basename: string;
|
||||
mkdir: boolean;
|
||||
mode: number;
|
||||
input: PinnedWriteInput;
|
||||
}): Promise<FileIdentityStat> {
|
||||
const parentPath = params.relativeParentPath
|
||||
? path.join(params.rootPath, ...params.relativeParentPath.split("/"))
|
||||
: params.rootPath;
|
||||
if (params.mkdir) {
|
||||
await fs.mkdir(parentPath, { recursive: true });
|
||||
}
|
||||
const targetPath = path.join(parentPath, params.basename);
|
||||
const tempPath = path.join(parentPath, `.${params.basename}.fallback.tmp`);
|
||||
if (params.input.kind === "buffer") {
|
||||
if (typeof params.input.data === "string") {
|
||||
await fs.writeFile(tempPath, params.input.data, {
|
||||
encoding: params.input.encoding ?? "utf8",
|
||||
mode: params.mode,
|
||||
});
|
||||
} else {
|
||||
await fs.writeFile(tempPath, params.input.data, { mode: params.mode });
|
||||
}
|
||||
} else {
|
||||
const handle = await fs.open(tempPath, "w", params.mode);
|
||||
try {
|
||||
await pipeline(params.input.stream, handle.createWriteStream());
|
||||
} finally {
|
||||
await handle.close().catch(() => {});
|
||||
}
|
||||
}
|
||||
await fs.rename(tempPath, targetPath);
|
||||
const stat = await fs.stat(targetPath);
|
||||
return { dev: stat.dev, ino: stat.ino };
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import {
|
||||
createRebindableDirectoryAlias,
|
||||
withRealpathSymlinkRebindRace,
|
||||
@@ -36,7 +36,9 @@ async function expectWriteOpenRaceIsBlocked(params: {
|
||||
symlinkTarget: params.outsideDir,
|
||||
timing: "before-realpath",
|
||||
run: async () => {
|
||||
await expect(params.runWrite()).rejects.toMatchObject({ code: "outside-workspace" });
|
||||
await expect(params.runWrite()).rejects.toMatchObject({
|
||||
code: expect.stringMatching(/outside-workspace|invalid-path/),
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -263,120 +265,6 @@ describe("fs-safe", () => {
|
||||
await expect(fs.readFile(targetPath, "utf8")).resolves.toBe("seed\nnext");
|
||||
});
|
||||
|
||||
it("does not truncate existing target when atomic rename fails", async () => {
|
||||
const root = await tempDirs.make("openclaw-fs-safe-root-");
|
||||
const targetPath = path.join(root, "nested", "out.txt");
|
||||
await fs.mkdir(path.dirname(targetPath), { recursive: true });
|
||||
await fs.writeFile(targetPath, "existing-content");
|
||||
const renameSpy = vi
|
||||
.spyOn(fs, "rename")
|
||||
.mockRejectedValue(Object.assign(new Error("rename blocked"), { code: "EACCES" }));
|
||||
try {
|
||||
await expect(
|
||||
writeFileWithinRoot({
|
||||
rootDir: root,
|
||||
relativePath: "nested/out.txt",
|
||||
data: "new-content",
|
||||
}),
|
||||
).rejects.toMatchObject({ code: "EACCES" });
|
||||
} finally {
|
||||
renameSpy.mockRestore();
|
||||
}
|
||||
await expect(fs.readFile(targetPath, "utf8")).resolves.toBe("existing-content");
|
||||
});
|
||||
|
||||
it.runIf(process.platform !== "win32")(
|
||||
"rejects when a hardlink appears after atomic write rename",
|
||||
async () => {
|
||||
const root = await tempDirs.make("openclaw-fs-safe-root-");
|
||||
const targetPath = path.join(root, "nested", "out.txt");
|
||||
const aliasPath = path.join(root, "nested", "alias.txt");
|
||||
await fs.mkdir(path.dirname(targetPath), { recursive: true });
|
||||
await fs.writeFile(targetPath, "existing-content");
|
||||
const realRename = fs.rename.bind(fs);
|
||||
let linked = false;
|
||||
const renameSpy = vi.spyOn(fs, "rename").mockImplementation(async (...args) => {
|
||||
await realRename(...args);
|
||||
if (!linked) {
|
||||
linked = true;
|
||||
await fs.link(String(args[1]), aliasPath);
|
||||
}
|
||||
});
|
||||
try {
|
||||
await expect(
|
||||
writeFileWithinRoot({
|
||||
rootDir: root,
|
||||
relativePath: "nested/out.txt",
|
||||
data: "new-content",
|
||||
}),
|
||||
).rejects.toMatchObject({ code: "invalid-path" });
|
||||
} finally {
|
||||
renameSpy.mockRestore();
|
||||
}
|
||||
await expect(fs.readFile(aliasPath, "utf8")).resolves.toBe("new-content");
|
||||
},
|
||||
);
|
||||
|
||||
it("does not truncate existing target when atomic copy rename fails", async () => {
|
||||
const root = await tempDirs.make("openclaw-fs-safe-root-");
|
||||
const sourceDir = await tempDirs.make("openclaw-fs-safe-source-");
|
||||
const sourcePath = path.join(sourceDir, "in.txt");
|
||||
const targetPath = path.join(root, "nested", "copied.txt");
|
||||
await fs.mkdir(path.dirname(targetPath), { recursive: true });
|
||||
await fs.writeFile(sourcePath, "copy-new");
|
||||
await fs.writeFile(targetPath, "copy-existing");
|
||||
const renameSpy = vi
|
||||
.spyOn(fs, "rename")
|
||||
.mockRejectedValue(Object.assign(new Error("rename blocked"), { code: "EACCES" }));
|
||||
try {
|
||||
await expect(
|
||||
copyFileWithinRoot({
|
||||
sourcePath,
|
||||
rootDir: root,
|
||||
relativePath: "nested/copied.txt",
|
||||
}),
|
||||
).rejects.toMatchObject({ code: "EACCES" });
|
||||
} finally {
|
||||
renameSpy.mockRestore();
|
||||
}
|
||||
await expect(fs.readFile(targetPath, "utf8")).resolves.toBe("copy-existing");
|
||||
});
|
||||
|
||||
it.runIf(process.platform !== "win32")(
|
||||
"rejects when a hardlink appears after atomic copy rename",
|
||||
async () => {
|
||||
const root = await tempDirs.make("openclaw-fs-safe-root-");
|
||||
const sourceDir = await tempDirs.make("openclaw-fs-safe-source-");
|
||||
const sourcePath = path.join(sourceDir, "copy-source.txt");
|
||||
const targetPath = path.join(root, "nested", "copied.txt");
|
||||
const aliasPath = path.join(root, "nested", "alias.txt");
|
||||
await fs.mkdir(path.dirname(targetPath), { recursive: true });
|
||||
await fs.writeFile(sourcePath, "copy-new");
|
||||
await fs.writeFile(targetPath, "copy-existing");
|
||||
const realRename = fs.rename.bind(fs);
|
||||
let linked = false;
|
||||
const renameSpy = vi.spyOn(fs, "rename").mockImplementation(async (...args) => {
|
||||
await realRename(...args);
|
||||
if (!linked) {
|
||||
linked = true;
|
||||
await fs.link(String(args[1]), aliasPath);
|
||||
}
|
||||
});
|
||||
try {
|
||||
await expect(
|
||||
copyFileWithinRoot({
|
||||
sourcePath,
|
||||
rootDir: root,
|
||||
relativePath: "nested/copied.txt",
|
||||
}),
|
||||
).rejects.toMatchObject({ code: "invalid-path" });
|
||||
} finally {
|
||||
renameSpy.mockRestore();
|
||||
}
|
||||
await expect(fs.readFile(aliasPath, "utf8")).resolves.toBe("copy-new");
|
||||
},
|
||||
);
|
||||
|
||||
it("copies a file within root safely", async () => {
|
||||
const root = await tempDirs.make("openclaw-fs-safe-root-");
|
||||
const sourceDir = await tempDirs.make("openclaw-fs-safe-source-");
|
||||
@@ -537,47 +425,6 @@ describe("fs-safe", () => {
|
||||
await expect(fs.readFile(outsideTarget, "utf8")).resolves.toBe("X".repeat(4096));
|
||||
});
|
||||
|
||||
it("cleans up created out-of-root file when symlink retarget races create path", async () => {
|
||||
const root = await tempDirs.make("openclaw-fs-safe-root-");
|
||||
const inside = path.join(root, "inside");
|
||||
const outside = await tempDirs.make("openclaw-fs-safe-outside-");
|
||||
await fs.mkdir(inside, { recursive: true });
|
||||
const outsideTarget = path.join(outside, "target.txt");
|
||||
const slot = path.join(root, "slot");
|
||||
await createRebindableDirectoryAlias({
|
||||
aliasPath: slot,
|
||||
targetPath: inside,
|
||||
});
|
||||
|
||||
const realOpen = fs.open.bind(fs);
|
||||
let flipped = false;
|
||||
const openSpy = vi.spyOn(fs, "open").mockImplementation(async (...args) => {
|
||||
const [filePath] = args;
|
||||
if (!flipped && String(filePath).endsWith(path.join("slot", "target.txt"))) {
|
||||
flipped = true;
|
||||
await createRebindableDirectoryAlias({
|
||||
aliasPath: slot,
|
||||
targetPath: outside,
|
||||
});
|
||||
}
|
||||
return await realOpen(...args);
|
||||
});
|
||||
try {
|
||||
await expect(
|
||||
writeFileWithinRoot({
|
||||
rootDir: root,
|
||||
relativePath: path.join("slot", "target.txt"),
|
||||
data: "new-content",
|
||||
mkdir: false,
|
||||
}),
|
||||
).rejects.toMatchObject({ code: "outside-workspace" });
|
||||
} finally {
|
||||
openSpy.mockRestore();
|
||||
}
|
||||
|
||||
await expect(fs.stat(outsideTarget)).rejects.toMatchObject({ code: "ENOENT" });
|
||||
});
|
||||
|
||||
it("returns not-found for missing files", async () => {
|
||||
const dir = await tempDirs.make("openclaw-fs-safe-");
|
||||
const missing = path.join(dir, "missing.txt");
|
||||
|
||||
@@ -5,9 +5,9 @@ import type { FileHandle } from "node:fs/promises";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { pipeline } from "node:stream/promises";
|
||||
import { logWarn } from "../logger.js";
|
||||
import { sameFileIdentity } from "./file-identity.js";
|
||||
import { runPinnedWriteHelper } from "./fs-pinned-write-helper.js";
|
||||
import { expandHomePrefix } from "./home-dir.js";
|
||||
import { assertNoPathAliasEscape } from "./path-alias-guards.js";
|
||||
import {
|
||||
@@ -332,13 +332,13 @@ async function writeTempFileForAtomicReplace(params: {
|
||||
async function verifyAtomicWriteResult(params: {
|
||||
rootDir: string;
|
||||
targetPath: string;
|
||||
expectedStat: Stats;
|
||||
expectedIdentity: { dev: number | bigint; ino: number | bigint };
|
||||
}): Promise<void> {
|
||||
const rootReal = await fs.realpath(params.rootDir);
|
||||
const rootWithSep = ensureTrailingSep(rootReal);
|
||||
const opened = await openVerifiedLocalFile(params.targetPath, { rejectHardlinks: true });
|
||||
try {
|
||||
if (!sameFileIdentity(opened.stat, params.expectedStat)) {
|
||||
if (!sameFileIdentity(opened.stat, params.expectedIdentity)) {
|
||||
throw new SafeOpenError("path-mismatch", "path changed during write");
|
||||
}
|
||||
if (!isPathInside(rootWithSep, opened.realPath)) {
|
||||
@@ -550,6 +550,195 @@ export async function writeFileWithinRoot(params: {
|
||||
data: string | Buffer;
|
||||
encoding?: BufferEncoding;
|
||||
mkdir?: boolean;
|
||||
}): Promise<void> {
|
||||
if (process.platform === "win32") {
|
||||
await writeFileWithinRootLegacy(params);
|
||||
return;
|
||||
}
|
||||
|
||||
const pinned = await resolvePinnedWriteTargetWithinRoot({
|
||||
rootDir: params.rootDir,
|
||||
relativePath: params.relativePath,
|
||||
});
|
||||
|
||||
const identity = await runPinnedWriteHelper({
|
||||
rootPath: pinned.rootReal,
|
||||
relativeParentPath: pinned.relativeParentPath,
|
||||
basename: pinned.basename,
|
||||
mkdir: params.mkdir !== false,
|
||||
mode: pinned.mode,
|
||||
input: {
|
||||
kind: "buffer",
|
||||
data: params.data,
|
||||
encoding: params.encoding,
|
||||
},
|
||||
}).catch((error) => {
|
||||
throw normalizePinnedWriteError(error);
|
||||
});
|
||||
|
||||
try {
|
||||
await verifyAtomicWriteResult({
|
||||
rootDir: params.rootDir,
|
||||
targetPath: pinned.targetPath,
|
||||
expectedIdentity: identity,
|
||||
});
|
||||
} catch (err) {
|
||||
emitWriteBoundaryWarning(`post-write verification failed: ${String(err)}`);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
export async function copyFileWithinRoot(params: {
|
||||
sourcePath: string;
|
||||
rootDir: string;
|
||||
relativePath: string;
|
||||
maxBytes?: number;
|
||||
mkdir?: boolean;
|
||||
rejectSourceHardlinks?: boolean;
|
||||
}): Promise<void> {
|
||||
const source = await openVerifiedLocalFile(params.sourcePath, {
|
||||
rejectHardlinks: params.rejectSourceHardlinks,
|
||||
});
|
||||
if (params.maxBytes !== undefined && source.stat.size > params.maxBytes) {
|
||||
await source.handle.close().catch(() => {});
|
||||
throw new SafeOpenError(
|
||||
"too-large",
|
||||
`file exceeds limit of ${params.maxBytes} bytes (got ${source.stat.size})`,
|
||||
);
|
||||
}
|
||||
|
||||
try {
|
||||
if (process.platform === "win32") {
|
||||
await copyFileWithinRootLegacy(params, source);
|
||||
return;
|
||||
}
|
||||
|
||||
const pinned = await resolvePinnedWriteTargetWithinRoot({
|
||||
rootDir: params.rootDir,
|
||||
relativePath: params.relativePath,
|
||||
});
|
||||
const sourceStream = source.handle.createReadStream();
|
||||
const identity = await runPinnedWriteHelper({
|
||||
rootPath: pinned.rootReal,
|
||||
relativeParentPath: pinned.relativeParentPath,
|
||||
basename: pinned.basename,
|
||||
mkdir: params.mkdir !== false,
|
||||
mode: pinned.mode,
|
||||
input: {
|
||||
kind: "stream",
|
||||
stream: sourceStream,
|
||||
},
|
||||
}).catch((error) => {
|
||||
throw normalizePinnedWriteError(error);
|
||||
});
|
||||
try {
|
||||
await verifyAtomicWriteResult({
|
||||
rootDir: params.rootDir,
|
||||
targetPath: pinned.targetPath,
|
||||
expectedIdentity: identity,
|
||||
});
|
||||
} catch (err) {
|
||||
emitWriteBoundaryWarning(`post-copy verification failed: ${String(err)}`);
|
||||
throw err;
|
||||
}
|
||||
} finally {
|
||||
await source.handle.close().catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
export async function writeFileFromPathWithinRoot(params: {
|
||||
rootDir: string;
|
||||
relativePath: string;
|
||||
sourcePath: string;
|
||||
mkdir?: boolean;
|
||||
}): Promise<void> {
|
||||
await copyFileWithinRoot({
|
||||
sourcePath: params.sourcePath,
|
||||
rootDir: params.rootDir,
|
||||
relativePath: params.relativePath,
|
||||
mkdir: params.mkdir,
|
||||
rejectSourceHardlinks: true,
|
||||
});
|
||||
}
|
||||
|
||||
async function resolvePinnedWriteTargetWithinRoot(params: {
|
||||
rootDir: string;
|
||||
relativePath: string;
|
||||
}): Promise<{
|
||||
rootReal: string;
|
||||
targetPath: string;
|
||||
relativeParentPath: string;
|
||||
basename: string;
|
||||
mode: number;
|
||||
}> {
|
||||
const { rootReal, rootWithSep, resolved } = await resolvePathWithinRoot(params);
|
||||
try {
|
||||
await assertNoPathAliasEscape({
|
||||
absolutePath: resolved,
|
||||
rootPath: rootReal,
|
||||
boundaryLabel: "root",
|
||||
});
|
||||
} catch (err) {
|
||||
throw new SafeOpenError("invalid-path", "path alias escape blocked", { cause: err });
|
||||
}
|
||||
|
||||
const relativeResolved = path.relative(rootReal, resolved);
|
||||
if (relativeResolved.startsWith("..") || path.isAbsolute(relativeResolved)) {
|
||||
throw new SafeOpenError("outside-workspace", "file is outside workspace root");
|
||||
}
|
||||
const relativePosix = relativeResolved
|
||||
? relativeResolved.split(path.sep).join(path.posix.sep)
|
||||
: "";
|
||||
const basename = path.posix.basename(relativePosix);
|
||||
if (!basename || basename === "." || basename === "/") {
|
||||
throw new SafeOpenError("invalid-path", "invalid target path");
|
||||
}
|
||||
let mode = 0o600;
|
||||
try {
|
||||
const opened = await openFileWithinRoot({
|
||||
rootDir: params.rootDir,
|
||||
relativePath: params.relativePath,
|
||||
rejectHardlinks: true,
|
||||
});
|
||||
try {
|
||||
mode = opened.stat.mode & 0o777;
|
||||
if (!isPathInside(rootWithSep, opened.realPath)) {
|
||||
throw new SafeOpenError("outside-workspace", "file is outside workspace root");
|
||||
}
|
||||
} finally {
|
||||
await opened.handle.close().catch(() => {});
|
||||
}
|
||||
} catch (err) {
|
||||
if (!(err instanceof SafeOpenError) || err.code !== "not-found") {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
rootReal,
|
||||
targetPath: resolved,
|
||||
relativeParentPath:
|
||||
path.posix.dirname(relativePosix) === "." ? "" : path.posix.dirname(relativePosix),
|
||||
basename,
|
||||
mode: mode || 0o600,
|
||||
};
|
||||
}
|
||||
|
||||
function normalizePinnedWriteError(error: unknown): Error {
|
||||
if (error instanceof SafeOpenError) {
|
||||
return error;
|
||||
}
|
||||
return new SafeOpenError("invalid-path", "path is not a regular file under root", {
|
||||
cause: error instanceof Error ? error : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
async function writeFileWithinRootLegacy(params: {
|
||||
rootDir: string;
|
||||
relativePath: string;
|
||||
data: string | Buffer;
|
||||
encoding?: BufferEncoding;
|
||||
mkdir?: boolean;
|
||||
}): Promise<void> {
|
||||
const target = await openWritableFileWithinRoot({
|
||||
rootDir: params.rootDir,
|
||||
@@ -575,7 +764,7 @@ export async function writeFileWithinRoot(params: {
|
||||
await verifyAtomicWriteResult({
|
||||
rootDir: params.rootDir,
|
||||
targetPath: destinationPath,
|
||||
expectedStat: writtenStat,
|
||||
expectedIdentity: writtenStat,
|
||||
});
|
||||
} catch (err) {
|
||||
emitWriteBoundaryWarning(`post-write verification failed: ${String(err)}`);
|
||||
@@ -588,25 +777,17 @@ export async function writeFileWithinRoot(params: {
|
||||
}
|
||||
}
|
||||
|
||||
export async function copyFileWithinRoot(params: {
|
||||
sourcePath: string;
|
||||
rootDir: string;
|
||||
relativePath: string;
|
||||
maxBytes?: number;
|
||||
mkdir?: boolean;
|
||||
rejectSourceHardlinks?: boolean;
|
||||
}): Promise<void> {
|
||||
const source = await openVerifiedLocalFile(params.sourcePath, {
|
||||
rejectHardlinks: params.rejectSourceHardlinks,
|
||||
});
|
||||
if (params.maxBytes !== undefined && source.stat.size > params.maxBytes) {
|
||||
await source.handle.close().catch(() => {});
|
||||
throw new SafeOpenError(
|
||||
"too-large",
|
||||
`file exceeds limit of ${params.maxBytes} bytes (got ${source.stat.size})`,
|
||||
);
|
||||
}
|
||||
|
||||
async function copyFileWithinRootLegacy(
|
||||
params: {
|
||||
sourcePath: string;
|
||||
rootDir: string;
|
||||
relativePath: string;
|
||||
maxBytes?: number;
|
||||
mkdir?: boolean;
|
||||
rejectSourceHardlinks?: boolean;
|
||||
},
|
||||
source: SafeOpenResult,
|
||||
): Promise<void> {
|
||||
let target: SafeWritableOpenResult | null = null;
|
||||
let sourceClosedByStream = false;
|
||||
let targetClosedByUs = false;
|
||||
@@ -635,7 +816,9 @@ export async function copyFileWithinRoot(params: {
|
||||
targetStream.once("close", () => {
|
||||
tempClosedByStream = true;
|
||||
});
|
||||
await pipeline(sourceStream, targetStream);
|
||||
await import("node:stream/promises").then(({ pipeline }) =>
|
||||
pipeline(sourceStream, targetStream),
|
||||
);
|
||||
const writtenStat = await fs.stat(tempPath);
|
||||
if (!tempClosedByStream) {
|
||||
await tempHandle.close().catch(() => {});
|
||||
@@ -648,7 +831,7 @@ export async function copyFileWithinRoot(params: {
|
||||
await verifyAtomicWriteResult({
|
||||
rootDir: params.rootDir,
|
||||
targetPath: destinationPath,
|
||||
expectedStat: writtenStat,
|
||||
expectedIdentity: writtenStat,
|
||||
});
|
||||
} catch (err) {
|
||||
emitWriteBoundaryWarning(`post-copy verification failed: ${String(err)}`);
|
||||
@@ -674,18 +857,3 @@ export async function copyFileWithinRoot(params: {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function writeFileFromPathWithinRoot(params: {
|
||||
rootDir: string;
|
||||
relativePath: string;
|
||||
sourcePath: string;
|
||||
mkdir?: boolean;
|
||||
}): Promise<void> {
|
||||
await copyFileWithinRoot({
|
||||
sourcePath: params.sourcePath,
|
||||
rootDir: params.rootDir,
|
||||
relativePath: params.relativePath,
|
||||
mkdir: params.mkdir,
|
||||
rejectSourceHardlinks: true,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -214,6 +214,38 @@ describe("hardenApprovedExecutionPaths", () => {
|
||||
}
|
||||
|
||||
const mutableOperandCases: RuntimeFixture[] = [
|
||||
{
|
||||
name: "python flagged file",
|
||||
binName: "python3",
|
||||
argv: ["python3", "-B", "./run.py"],
|
||||
scriptName: "run.py",
|
||||
initialBody: 'print("SAFE")\n',
|
||||
expectedArgvIndex: 2,
|
||||
},
|
||||
{
|
||||
name: "lua direct file",
|
||||
binName: "lua",
|
||||
argv: ["lua", "./run.lua"],
|
||||
scriptName: "run.lua",
|
||||
initialBody: 'print("SAFE")\n',
|
||||
expectedArgvIndex: 1,
|
||||
},
|
||||
{
|
||||
name: "pypy direct file",
|
||||
binName: "pypy",
|
||||
argv: ["pypy", "./run.py"],
|
||||
scriptName: "run.py",
|
||||
initialBody: 'print("SAFE")\n',
|
||||
expectedArgvIndex: 1,
|
||||
},
|
||||
{
|
||||
name: "versioned node alias file",
|
||||
binName: "node20",
|
||||
argv: ["node20", "./run.js"],
|
||||
scriptName: "run.js",
|
||||
initialBody: 'console.log("SAFE");\n',
|
||||
expectedArgvIndex: 1,
|
||||
},
|
||||
{
|
||||
name: "bun direct file",
|
||||
binName: "bun",
|
||||
@@ -238,6 +270,22 @@ describe("hardenApprovedExecutionPaths", () => {
|
||||
initialBody: 'console.log("SAFE");\n',
|
||||
expectedArgvIndex: 5,
|
||||
},
|
||||
{
|
||||
name: "bun test file",
|
||||
binName: "bun",
|
||||
argv: ["bun", "test", "./run.test.ts"],
|
||||
scriptName: "run.test.ts",
|
||||
initialBody: 'console.log("SAFE");\n',
|
||||
expectedArgvIndex: 2,
|
||||
},
|
||||
{
|
||||
name: "deno test file",
|
||||
binName: "deno",
|
||||
argv: ["deno", "test", "./run.test.ts"],
|
||||
scriptName: "run.test.ts",
|
||||
initialBody: 'console.log("SAFE");\n',
|
||||
expectedArgvIndex: 2,
|
||||
},
|
||||
];
|
||||
|
||||
for (const runtimeCase of mutableOperandCases) {
|
||||
@@ -296,7 +344,7 @@ describe("hardenApprovedExecutionPaths", () => {
|
||||
}
|
||||
});
|
||||
|
||||
it("does not snapshot bun package script names", () => {
|
||||
it("rejects bun package script names that do not bind a concrete file", () => {
|
||||
withFakeRuntimeBin({
|
||||
binName: "bun",
|
||||
run: () => {
|
||||
@@ -306,11 +354,11 @@ describe("hardenApprovedExecutionPaths", () => {
|
||||
command: ["bun", "run", "dev"],
|
||||
cwd: tmp,
|
||||
});
|
||||
expect(prepared.ok).toBe(true);
|
||||
if (!prepared.ok) {
|
||||
throw new Error("unreachable");
|
||||
}
|
||||
expect(prepared.plan.mutableFileOperand).toBeUndefined();
|
||||
expect(prepared).toEqual({
|
||||
ok: false,
|
||||
message:
|
||||
"SYSTEM_RUN_DENIED: approval cannot safely bind this interpreter/runtime command",
|
||||
});
|
||||
} finally {
|
||||
fs.rmSync(tmp, { recursive: true, force: true });
|
||||
}
|
||||
@@ -318,7 +366,7 @@ describe("hardenApprovedExecutionPaths", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("does not snapshot deno eval invocations", () => {
|
||||
it("rejects deno eval invocations that do not bind a concrete file", () => {
|
||||
withFakeRuntimeBin({
|
||||
binName: "deno",
|
||||
run: () => {
|
||||
@@ -328,11 +376,11 @@ describe("hardenApprovedExecutionPaths", () => {
|
||||
command: ["deno", "eval", "console.log('SAFE')"],
|
||||
cwd: tmp,
|
||||
});
|
||||
expect(prepared.ok).toBe(true);
|
||||
if (!prepared.ok) {
|
||||
throw new Error("unreachable");
|
||||
}
|
||||
expect(prepared.plan.mutableFileOperand).toBeUndefined();
|
||||
expect(prepared).toEqual({
|
||||
ok: false,
|
||||
message:
|
||||
"SYSTEM_RUN_DENIED: approval cannot safely bind this interpreter/runtime command",
|
||||
});
|
||||
} finally {
|
||||
fs.rmSync(tmp, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ import type {
|
||||
SystemRunApprovalPlan,
|
||||
} from "../infra/exec-approvals.js";
|
||||
import { resolveCommandResolutionFromArgv } from "../infra/exec-command-resolution.js";
|
||||
import { isInterpreterLikeSafeBin } from "../infra/exec-safe-bin-runtime-policy.js";
|
||||
import {
|
||||
POSIX_SHELL_WRAPPERS,
|
||||
normalizeExecutableToken,
|
||||
@@ -316,6 +317,53 @@ function resolveOptionFilteredPositionalIndex(params: {
|
||||
return null;
|
||||
}
|
||||
|
||||
function collectExistingFileOperandIndexes(params: {
|
||||
argv: string[];
|
||||
startIndex: number;
|
||||
cwd: string | undefined;
|
||||
}): number[] {
|
||||
let afterDoubleDash = false;
|
||||
const hits: number[] = [];
|
||||
for (let i = params.startIndex; i < params.argv.length; i += 1) {
|
||||
const token = params.argv[i]?.trim() ?? "";
|
||||
if (!token) {
|
||||
continue;
|
||||
}
|
||||
if (afterDoubleDash) {
|
||||
if (resolvesToExistingFileSync(token, params.cwd)) {
|
||||
hits.push(i);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (token === "--") {
|
||||
afterDoubleDash = true;
|
||||
continue;
|
||||
}
|
||||
if (token === "-") {
|
||||
return [];
|
||||
}
|
||||
if (token.startsWith("-")) {
|
||||
continue;
|
||||
}
|
||||
if (resolvesToExistingFileSync(token, params.cwd)) {
|
||||
hits.push(i);
|
||||
}
|
||||
}
|
||||
return hits;
|
||||
}
|
||||
|
||||
function resolveGenericInterpreterScriptOperandIndex(params: {
|
||||
argv: string[];
|
||||
cwd: string | undefined;
|
||||
}): number | null {
|
||||
const hits = collectExistingFileOperandIndexes({
|
||||
argv: params.argv,
|
||||
startIndex: 1,
|
||||
cwd: params.cwd,
|
||||
});
|
||||
return hits.length === 1 ? hits[0] : null;
|
||||
}
|
||||
|
||||
function resolveBunScriptOperandIndex(params: {
|
||||
argv: string[];
|
||||
cwd: string | undefined;
|
||||
@@ -371,36 +419,76 @@ function resolveMutableFileOperandIndex(argv: string[], cwd: string | undefined)
|
||||
const shellIndex = resolvePosixShellScriptOperandIndex(unwrapped.argv);
|
||||
return shellIndex === null ? null : unwrapped.baseIndex + shellIndex;
|
||||
}
|
||||
if (!MUTABLE_ARGV1_INTERPRETER_PATTERNS.some((pattern) => pattern.test(executable))) {
|
||||
if (executable === "bun") {
|
||||
const bunIndex = resolveBunScriptOperandIndex({
|
||||
argv: unwrapped.argv,
|
||||
cwd,
|
||||
});
|
||||
return bunIndex === null ? null : unwrapped.baseIndex + bunIndex;
|
||||
if (MUTABLE_ARGV1_INTERPRETER_PATTERNS.some((pattern) => pattern.test(executable))) {
|
||||
const operand = unwrapped.argv[1]?.trim() ?? "";
|
||||
if (operand && operand !== "-" && !operand.startsWith("-")) {
|
||||
return unwrapped.baseIndex + 1;
|
||||
}
|
||||
if (executable === "deno") {
|
||||
const denoIndex = resolveDenoRunScriptOperandIndex({
|
||||
argv: unwrapped.argv,
|
||||
cwd,
|
||||
});
|
||||
return denoIndex === null ? null : unwrapped.baseIndex + denoIndex;
|
||||
}
|
||||
if (executable === "bun") {
|
||||
const bunIndex = resolveBunScriptOperandIndex({
|
||||
argv: unwrapped.argv,
|
||||
cwd,
|
||||
});
|
||||
if (bunIndex !== null) {
|
||||
return unwrapped.baseIndex + bunIndex;
|
||||
}
|
||||
}
|
||||
if (executable === "deno") {
|
||||
const denoIndex = resolveDenoRunScriptOperandIndex({
|
||||
argv: unwrapped.argv,
|
||||
cwd,
|
||||
});
|
||||
if (denoIndex !== null) {
|
||||
return unwrapped.baseIndex + denoIndex;
|
||||
}
|
||||
}
|
||||
if (!isInterpreterLikeSafeBin(executable)) {
|
||||
return null;
|
||||
}
|
||||
const operand = unwrapped.argv[1]?.trim() ?? "";
|
||||
if (!operand || operand === "-" || operand.startsWith("-")) {
|
||||
return null;
|
||||
const genericIndex = resolveGenericInterpreterScriptOperandIndex({
|
||||
argv: unwrapped.argv,
|
||||
cwd,
|
||||
});
|
||||
return genericIndex === null ? null : unwrapped.baseIndex + genericIndex;
|
||||
}
|
||||
|
||||
function requiresStableInterpreterApprovalBindingWithShellCommand(params: {
|
||||
argv: string[];
|
||||
shellCommand: string | null;
|
||||
}): boolean {
|
||||
if (params.shellCommand !== null) {
|
||||
return false;
|
||||
}
|
||||
return unwrapped.baseIndex + 1;
|
||||
const unwrapped = unwrapArgvForMutableOperand(params.argv);
|
||||
const executable = normalizeExecutableToken(unwrapped.argv[0] ?? "");
|
||||
if (!executable) {
|
||||
return false;
|
||||
}
|
||||
if ((POSIX_SHELL_WRAPPERS as ReadonlySet<string>).has(executable)) {
|
||||
return false;
|
||||
}
|
||||
return isInterpreterLikeSafeBin(executable);
|
||||
}
|
||||
|
||||
function resolveMutableFileOperandSnapshotSync(params: {
|
||||
argv: string[];
|
||||
cwd: string | undefined;
|
||||
shellCommand: string | null;
|
||||
}): { ok: true; snapshot: SystemRunApprovalFileOperand | null } | { ok: false; message: string } {
|
||||
const argvIndex = resolveMutableFileOperandIndex(params.argv, params.cwd);
|
||||
if (argvIndex === null) {
|
||||
if (
|
||||
requiresStableInterpreterApprovalBindingWithShellCommand({
|
||||
argv: params.argv,
|
||||
shellCommand: params.shellCommand,
|
||||
})
|
||||
) {
|
||||
return {
|
||||
ok: false,
|
||||
message: "SYSTEM_RUN_DENIED: approval cannot safely bind this interpreter/runtime command",
|
||||
};
|
||||
}
|
||||
return { ok: true, snapshot: null };
|
||||
}
|
||||
const rawOperand = params.argv[argvIndex]?.trim();
|
||||
@@ -658,6 +746,7 @@ export function buildSystemRunApprovalPlan(params: {
|
||||
const mutableFileOperand = resolveMutableFileOperandSnapshotSync({
|
||||
argv: hardening.argv,
|
||||
cwd: hardening.cwd,
|
||||
shellCommand: command.shellPayload,
|
||||
});
|
||||
if (!mutableFileOperand.ok) {
|
||||
return { ok: false, message: mutableFileOperand.message };
|
||||
|
||||
@@ -111,7 +111,6 @@ describe("evaluateGatewayAuthSurfaceStates", () => {
|
||||
gateway: {
|
||||
mode: "local",
|
||||
remote: {
|
||||
enabled: true,
|
||||
token: envRef("GW_REMOTE_TOKEN"),
|
||||
},
|
||||
},
|
||||
@@ -131,7 +130,6 @@ describe("evaluateGatewayAuthSurfaceStates", () => {
|
||||
mode: "password",
|
||||
},
|
||||
remote: {
|
||||
enabled: true,
|
||||
token: envRef("GW_REMOTE_TOKEN"),
|
||||
},
|
||||
},
|
||||
@@ -153,7 +151,6 @@ describe("evaluateGatewayAuthSurfaceStates", () => {
|
||||
token: envRef("GW_AUTH_TOKEN"),
|
||||
},
|
||||
remote: {
|
||||
enabled: true,
|
||||
token: envRef("GW_REMOTE_TOKEN"),
|
||||
},
|
||||
},
|
||||
@@ -170,7 +167,6 @@ describe("evaluateGatewayAuthSurfaceStates", () => {
|
||||
const states = evaluate({
|
||||
gateway: {
|
||||
remote: {
|
||||
enabled: true,
|
||||
url: "wss://gateway.example.com",
|
||||
password: envRef("GW_REMOTE_PASSWORD"),
|
||||
},
|
||||
@@ -190,7 +186,6 @@ describe("evaluateGatewayAuthSurfaceStates", () => {
|
||||
mode: "token",
|
||||
},
|
||||
remote: {
|
||||
enabled: true,
|
||||
password: envRef("GW_REMOTE_PASSWORD"),
|
||||
},
|
||||
},
|
||||
|
||||
@@ -166,9 +166,6 @@ export function evaluateGatewayAuthSurfaceStates(params: {
|
||||
if (!remote) {
|
||||
return "gateway.remote is not configured.";
|
||||
}
|
||||
if (!plan.remoteEnabled) {
|
||||
return "gateway.remote.enabled is false.";
|
||||
}
|
||||
if (plan.remoteConfiguredSurface) {
|
||||
return `remote surface is active: ${remoteSurfaceReason}.`;
|
||||
}
|
||||
@@ -191,9 +188,6 @@ export function evaluateGatewayAuthSurfaceStates(params: {
|
||||
if (!remote) {
|
||||
return "gateway.remote is not configured.";
|
||||
}
|
||||
if (!plan.remoteEnabled) {
|
||||
return "gateway.remote.enabled is false.";
|
||||
}
|
||||
if (plan.remoteConfiguredSurface) {
|
||||
return `remote surface is active: ${remoteSurfaceReason}.`;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user