Files
openclaw/src/agents/pi-tools.read.ts
stain lu ecfaf64526 fix: align host tilde paths with OS home (#62804) (thanks @stainlu)
* fix(tools): expand tilde in host edit/write paths (non-workspace mode)

* test: use it.runIf for visible skip when tmpdir is not under home

* fix(tools): address Codex P2 review on tilde host edit/write

Responds to two P2 findings from chatgpt-codex-connector on #62804:

1. Tests never ran in CI. The it.runIf(tmpdirUnderHome) guard always
   skipped on Linux runners where os.tmpdir() is /tmp, outside $HOME, so
   the regression tests reported green without executing. Tmpdirs now use
   the test-isolated HOME (process.env.HOME from test/test-env.ts) so
   tests run in every environment and match what expandHomePrefix
   resolves, keeping them hermetic.

2. Edit recovery path resolution was inconsistent. resolveEditPath
   inlined os.homedir() for tilde expansion, bypassing OPENCLAW_HOME,
   while the write/edit operations use expandHomePrefix. Under a custom
   OPENCLAW_HOME, wrapEditToolWithRecovery's readback targeted a
   different file than the edit actually touched, so successful edits
   could be reported as failures. resolveEditPath now uses the same
   expandHomePrefix helper.

* test(tools): verify tilde expansion honors OPENCLAW_HOME override

The prior tests covered tilde expansion but only under the default test
home, which matches os.homedir(). That passed whether the production code
used expandHomePrefix() or inlined os.homedir() — the behaviors only
diverge when OPENCLAW_HOME is set to a path outside $HOME.

Adds four tests that set OPENCLAW_HOME to a temp dir explicitly outside
$HOME and verify that write/mkdir/read/access tilde operations resolve
against OPENCLAW_HOME, not os.homedir(). These would fail if
pi-tools.read.ts or pi-tools.host-edit.ts reverted to os.homedir(),
directly covering the Codex P2 feedback about OPENCLAW_HOME consistency.

Uses the same env snapshot/restore pattern as test/helpers/temp-home.ts.

* Agents: resolve host tilde paths against OS home

* fix: align host tilde paths with OS home (#62804) (thanks @stainlu)

* fix: keep the changelog entry in the active block (#62804) (thanks @stainlu)

---------

Co-authored-by: Ayaan Zaidi <hi@obviy.us>
2026-04-16 14:37:55 +05:30

870 lines
28 KiB
TypeScript
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
import fs from "node:fs/promises";
import path from "node:path";
import { URL } from "node:url";
import type { AgentToolResult } from "@mariozechner/pi-agent-core";
import { createEditTool, createReadTool, createWriteTool } from "@mariozechner/pi-coding-agent";
import { isWindowsDrivePath } from "../infra/archive-path.js";
import {
appendFileWithinRoot,
SafeOpenError,
openFileWithinRoot,
readFileWithinRoot,
writeFileWithinRoot,
} from "../infra/fs-safe.js";
import { expandHomePrefix, resolveOsHomeDir } from "../infra/home-dir.js";
import { hasEncodedFileUrlSeparator, trySafeFileURLToPath } from "../infra/local-file-access.js";
import { detectMime } from "../media/mime.js";
import { sniffMimeFromBase64 } from "../media/sniff-mime-from-base64.js";
import type { ImageSanitizationLimits } from "./image-sanitization.js";
import { toRelativeWorkspacePath } from "./path-policy.js";
import { wrapEditToolWithRecovery } from "./pi-tools.host-edit.js";
import {
REQUIRED_PARAM_GROUPS,
assertRequiredParams,
getToolParamsRecord,
wrapToolParamValidation,
} from "./pi-tools.params.js";
import type { AnyAgentTool } from "./pi-tools.types.js";
import { assertSandboxPath } from "./sandbox-paths.js";
import type { SandboxFsBridge } from "./sandbox/fs-bridge.js";
import { sanitizeToolResultImages } from "./tool-images.js";
export {
REQUIRED_PARAM_GROUPS,
assertRequiredParams,
getToolParamsRecord,
wrapToolParamValidation,
} from "./pi-tools.params.js";
// NOTE(steipete): Upstream read now does file-magic MIME detection; we keep the wrapper
// to sanitize oversized images before they hit providers.
type ToolContentBlock = AgentToolResult<unknown>["content"][number];
type ImageContentBlock = Extract<ToolContentBlock, { type: "image" }>;
type TextContentBlock = Extract<ToolContentBlock, { type: "text" }>;
const DEFAULT_READ_PAGE_MAX_BYTES = 32 * 1024;
const MAX_ADAPTIVE_READ_MAX_BYTES = 128 * 1024;
const ADAPTIVE_READ_CONTEXT_SHARE = 0.1;
const CHARS_PER_TOKEN_ESTIMATE = 4;
const MAX_ADAPTIVE_READ_PAGES = 4;
type OpenClawReadToolOptions = {
modelContextWindowTokens?: number;
imageSanitization?: ImageSanitizationLimits;
};
type ReadTruncationDetails = {
truncated: boolean;
outputLines: number;
firstLineExceedsLimit: boolean;
};
const READ_CONTINUATION_NOTICE_RE =
/\n\n\[(?:Showing lines [^\]]*?Use offset=\d+ to continue\.|\d+ more lines in file\. Use offset=\d+ to continue\.)\]\s*$/;
function clamp(value: number, min: number, max: number): number {
return Math.max(min, Math.min(max, value));
}
function resolveAdaptiveReadMaxBytes(options?: OpenClawReadToolOptions): number {
const contextWindowTokens = options?.modelContextWindowTokens;
if (
typeof contextWindowTokens !== "number" ||
!Number.isFinite(contextWindowTokens) ||
contextWindowTokens <= 0
) {
return DEFAULT_READ_PAGE_MAX_BYTES;
}
const fromContext = Math.floor(
contextWindowTokens * CHARS_PER_TOKEN_ESTIMATE * ADAPTIVE_READ_CONTEXT_SHARE,
);
return clamp(fromContext, DEFAULT_READ_PAGE_MAX_BYTES, MAX_ADAPTIVE_READ_MAX_BYTES);
}
function formatBytes(bytes: number): string {
if (bytes >= 1024 * 1024) {
return `${(bytes / (1024 * 1024)).toFixed(1)}MB`;
}
if (bytes >= 1024) {
return `${Math.round(bytes / 1024)}KB`;
}
return `${bytes}B`;
}
function getToolResultText(result: AgentToolResult<unknown>): string | undefined {
const content = Array.isArray(result.content) ? result.content : [];
const textBlocks = content
.map((block) => {
if (
block &&
typeof block === "object" &&
(block as { type?: unknown }).type === "text" &&
typeof (block as { text?: unknown }).text === "string"
) {
return (block as { text: string }).text;
}
return undefined;
})
.filter((value): value is string => typeof value === "string");
if (textBlocks.length === 0) {
return undefined;
}
return textBlocks.join("\n");
}
function withToolResultText(
result: AgentToolResult<unknown>,
text: string,
): AgentToolResult<unknown> {
const content = Array.isArray(result.content) ? result.content : [];
let replaced = false;
const nextContent: ToolContentBlock[] = content.map((block) => {
if (
!replaced &&
block &&
typeof block === "object" &&
(block as { type?: unknown }).type === "text"
) {
replaced = true;
return {
...(block as TextContentBlock),
text,
};
}
return block;
});
if (replaced) {
return {
...result,
content: nextContent as unknown as AgentToolResult<unknown>["content"],
};
}
const textBlock = { type: "text", text } as unknown as TextContentBlock;
return {
...result,
content: [textBlock] as unknown as AgentToolResult<unknown>["content"],
};
}
function extractReadTruncationDetails(
result: AgentToolResult<unknown>,
): ReadTruncationDetails | null {
const details = (result as { details?: unknown }).details;
if (!details || typeof details !== "object") {
return null;
}
const truncation = (details as { truncation?: unknown }).truncation;
if (!truncation || typeof truncation !== "object") {
return null;
}
const record = truncation as Record<string, unknown>;
if (record.truncated !== true) {
return null;
}
const outputLinesRaw = record.outputLines;
const outputLines =
typeof outputLinesRaw === "number" && Number.isFinite(outputLinesRaw)
? Math.max(0, Math.floor(outputLinesRaw))
: 0;
return {
truncated: true,
outputLines,
firstLineExceedsLimit: record.firstLineExceedsLimit === true,
};
}
function stripReadContinuationNotice(text: string): string {
return text.replace(READ_CONTINUATION_NOTICE_RE, "");
}
function stripReadTruncationContentDetails(
result: AgentToolResult<unknown>,
): AgentToolResult<unknown> {
const details = (result as { details?: unknown }).details;
if (!details || typeof details !== "object") {
return result;
}
const detailsRecord = details as Record<string, unknown>;
const truncationRaw = detailsRecord.truncation;
if (!truncationRaw || typeof truncationRaw !== "object") {
return result;
}
const truncation = truncationRaw as Record<string, unknown>;
if (!Object.prototype.hasOwnProperty.call(truncation, "content")) {
return result;
}
const { content: _content, ...restTruncation } = truncation;
return {
...result,
details: {
...detailsRecord,
truncation: restTruncation,
},
};
}
async function executeReadWithAdaptivePaging(params: {
base: AnyAgentTool;
toolCallId: string;
args: Record<string, unknown>;
signal?: AbortSignal;
maxBytes: number;
}): Promise<AgentToolResult<unknown>> {
const userLimit = params.args.limit;
const hasExplicitLimit =
typeof userLimit === "number" && Number.isFinite(userLimit) && userLimit > 0;
if (hasExplicitLimit) {
return await params.base.execute(params.toolCallId, params.args, params.signal);
}
const offsetRaw = params.args.offset;
let nextOffset =
typeof offsetRaw === "number" && Number.isFinite(offsetRaw) && offsetRaw > 0
? Math.floor(offsetRaw)
: 1;
let firstResult: AgentToolResult<unknown> | null = null;
let aggregatedText = "";
let aggregatedBytes = 0;
let capped = false;
let continuationOffset: number | undefined;
for (let page = 0; page < MAX_ADAPTIVE_READ_PAGES; page += 1) {
const pageArgs = { ...params.args, offset: nextOffset };
const pageResult = await params.base.execute(params.toolCallId, pageArgs, params.signal);
firstResult ??= pageResult;
const rawText = getToolResultText(pageResult);
if (typeof rawText !== "string") {
return pageResult;
}
const truncation = extractReadTruncationDetails(pageResult);
const canContinue =
Boolean(truncation?.truncated) &&
!truncation?.firstLineExceedsLimit &&
(truncation?.outputLines ?? 0) > 0 &&
page < MAX_ADAPTIVE_READ_PAGES - 1;
const pageText = canContinue ? stripReadContinuationNotice(rawText) : rawText;
const delimiter = aggregatedText ? "\n\n" : "";
const nextBytes = Buffer.byteLength(`${delimiter}${pageText}`, "utf-8");
if (aggregatedText && aggregatedBytes + nextBytes > params.maxBytes) {
capped = true;
continuationOffset = nextOffset;
break;
}
aggregatedText += `${delimiter}${pageText}`;
aggregatedBytes += nextBytes;
if (!canContinue || !truncation) {
return withToolResultText(pageResult, aggregatedText);
}
nextOffset += truncation.outputLines;
continuationOffset = nextOffset;
if (aggregatedBytes >= params.maxBytes) {
capped = true;
break;
}
}
if (!firstResult) {
return await params.base.execute(params.toolCallId, params.args, params.signal);
}
let finalText = aggregatedText;
if (capped && continuationOffset) {
finalText += `\n\n[Read output capped at ${formatBytes(params.maxBytes)} for this call. Use offset=${continuationOffset} to continue.]`;
}
return withToolResultText(firstResult, finalText);
}
function rewriteReadImageHeader(text: string, mimeType: string): string {
// pi-coding-agent uses: "Read image file [image/png]"
if (text.startsWith("Read image file [") && text.endsWith("]")) {
return `Read image file [${mimeType}]`;
}
return text;
}
async function normalizeReadImageResult(
result: AgentToolResult<unknown>,
filePath: string,
): Promise<AgentToolResult<unknown>> {
const content = Array.isArray(result.content) ? result.content : [];
const image = content.find(
(b): b is ImageContentBlock =>
!!b &&
typeof b === "object" &&
(b as { type?: unknown }).type === "image" &&
typeof (b as { data?: unknown }).data === "string" &&
typeof (b as { mimeType?: unknown }).mimeType === "string",
);
if (!image) {
return result;
}
if (!image.data.trim()) {
throw new Error(`read: image payload is empty (${filePath})`);
}
const sniffed = await sniffMimeFromBase64(image.data);
if (!sniffed) {
return result;
}
if (!sniffed.startsWith("image/")) {
throw new Error(
`read: file looks like ${sniffed} but was treated as ${image.mimeType} (${filePath})`,
);
}
if (sniffed === image.mimeType) {
return result;
}
const nextContent = content.map((block) => {
if (block && typeof block === "object" && (block as { type?: unknown }).type === "image") {
const b = block as ImageContentBlock & { mimeType: string };
return { ...b, mimeType: sniffed } satisfies ImageContentBlock;
}
if (
block &&
typeof block === "object" &&
(block as { type?: unknown }).type === "text" &&
typeof (block as { text?: unknown }).text === "string"
) {
const b = block as TextContentBlock & { text: string };
return {
...b,
text: rewriteReadImageHeader(b.text, sniffed),
} satisfies TextContentBlock;
}
return block;
});
return { ...result, content: nextContent };
}
export function wrapToolWorkspaceRootGuard(tool: AnyAgentTool, root: string): AnyAgentTool {
return wrapToolWorkspaceRootGuardWithOptions(tool, root);
}
function mapContainerPathToWorkspaceRoot(params: {
filePath: string;
root: string;
containerWorkdir?: string;
}): string {
const containerWorkdir = params.containerWorkdir?.trim();
if (!containerWorkdir) {
return params.filePath;
}
const normalizedWorkdir = containerWorkdir.replace(/\\/g, "/").replace(/\/+$/, "");
if (!normalizedWorkdir.startsWith("/")) {
return params.filePath;
}
if (!normalizedWorkdir) {
return params.filePath;
}
let candidate = params.filePath.startsWith("@") ? params.filePath.slice(1) : params.filePath;
if (/^file:\/\//i.test(candidate)) {
const localFilePath = trySafeFileURLToPath(candidate);
if (localFilePath) {
candidate = localFilePath;
} else {
// Windows rejects posix-style file:///workspace/... in fileURLToPath; map via URL pathname
// when it clearly refers to the container workdir (same idea as sandbox-paths).
let parsed: URL;
try {
parsed = new URL(candidate);
} catch {
return params.filePath;
}
if (parsed.protocol !== "file:") {
return params.filePath;
}
const host = parsed.hostname.trim().toLowerCase();
if (host && host !== "localhost") {
return params.filePath;
}
if (hasEncodedFileUrlSeparator(parsed.pathname)) {
return params.filePath;
}
let normalizedPathname: string;
try {
normalizedPathname = decodeURIComponent(parsed.pathname).replace(/\\/g, "/");
} catch {
return params.filePath;
}
if (
normalizedPathname !== normalizedWorkdir &&
!normalizedPathname.startsWith(`${normalizedWorkdir}/`)
) {
return params.filePath;
}
candidate = normalizedPathname;
}
}
const normalizedCandidate = candidate.replace(/\\/g, "/");
if (normalizedCandidate === normalizedWorkdir) {
return path.resolve(params.root);
}
const prefix = `${normalizedWorkdir}/`;
if (!normalizedCandidate.startsWith(prefix)) {
return candidate;
}
const relative = normalizedCandidate.slice(prefix.length);
if (!relative) {
return path.resolve(params.root);
}
return path.resolve(params.root, ...relative.split("/").filter(Boolean));
}
export function resolveToolPathAgainstWorkspaceRoot(params: {
filePath: string;
root: string;
containerWorkdir?: string;
}): string {
const mapped = mapContainerPathToWorkspaceRoot(params);
const candidate = mapped.startsWith("@") ? mapped.slice(1) : mapped;
if (isWindowsDrivePath(candidate)) {
return path.win32.normalize(candidate);
}
if (path.isAbsolute(candidate)) {
return path.resolve(candidate);
}
return path.resolve(params.root, candidate || ".");
}
type MemoryFlushAppendOnlyWriteOptions = {
root: string;
relativePath: string;
containerWorkdir?: string;
sandbox?: {
root: string;
bridge: SandboxFsBridge;
};
};
async function readOptionalUtf8File(params: {
absolutePath: string;
relativePath: string;
sandbox?: MemoryFlushAppendOnlyWriteOptions["sandbox"];
signal?: AbortSignal;
}): Promise<string> {
try {
if (params.sandbox) {
const stat = await params.sandbox.bridge.stat({
filePath: params.relativePath,
cwd: params.sandbox.root,
signal: params.signal,
});
if (!stat) {
return "";
}
const buffer = await params.sandbox.bridge.readFile({
filePath: params.relativePath,
cwd: params.sandbox.root,
signal: params.signal,
});
return buffer.toString("utf-8");
}
return await fs.readFile(params.absolutePath, "utf-8");
} catch (error) {
if ((error as NodeJS.ErrnoException | undefined)?.code === "ENOENT") {
return "";
}
throw error;
}
}
async function appendMemoryFlushContent(params: {
absolutePath: string;
root: string;
relativePath: string;
content: string;
sandbox?: MemoryFlushAppendOnlyWriteOptions["sandbox"];
signal?: AbortSignal;
}) {
if (!params.sandbox) {
await appendFileWithinRoot({
rootDir: params.root,
relativePath: params.relativePath,
data: params.content,
mkdir: true,
prependNewlineIfNeeded: true,
});
return;
}
const existing = await readOptionalUtf8File({
absolutePath: params.absolutePath,
relativePath: params.relativePath,
sandbox: params.sandbox,
signal: params.signal,
});
const separator =
existing.length > 0 && !existing.endsWith("\n") && !params.content.startsWith("\n") ? "\n" : "";
const next = `${existing}${separator}${params.content}`;
if (params.sandbox) {
const parent = path.posix.dirname(params.relativePath);
if (parent && parent !== ".") {
await params.sandbox.bridge.mkdirp({
filePath: parent,
cwd: params.sandbox.root,
signal: params.signal,
});
}
await params.sandbox.bridge.writeFile({
filePath: params.relativePath,
cwd: params.sandbox.root,
data: next,
mkdir: true,
signal: params.signal,
});
return;
}
await fs.mkdir(path.dirname(params.absolutePath), { recursive: true });
await fs.writeFile(params.absolutePath, next, "utf-8");
}
export function wrapToolMemoryFlushAppendOnlyWrite(
tool: AnyAgentTool,
options: MemoryFlushAppendOnlyWriteOptions,
): AnyAgentTool {
const allowedAbsolutePath = path.resolve(options.root, options.relativePath);
return {
...tool,
description: `${tool.description} During memory flush, this tool may only append to ${options.relativePath}.`,
execute: async (toolCallId, args, signal, onUpdate) => {
const record = getToolParamsRecord(args);
assertRequiredParams(record, REQUIRED_PARAM_GROUPS.write, tool.name);
const filePath =
typeof record?.path === "string" && record.path.trim() ? record.path : undefined;
const content = typeof record?.content === "string" ? record.content : undefined;
if (!filePath || content === undefined) {
return tool.execute(toolCallId, args, signal, onUpdate);
}
const resolvedPath = resolveToolPathAgainstWorkspaceRoot({
filePath,
root: options.root,
containerWorkdir: options.containerWorkdir,
});
if (resolvedPath !== allowedAbsolutePath) {
throw new Error(
`Memory flush writes are restricted to ${options.relativePath}; use that path only.`,
);
}
await appendMemoryFlushContent({
absolutePath: allowedAbsolutePath,
root: options.root,
relativePath: options.relativePath,
content,
sandbox: options.sandbox,
signal,
});
return {
content: [{ type: "text", text: `Appended content to ${options.relativePath}.` }],
details: {
path: options.relativePath,
appendOnly: true,
},
};
},
};
}
export function wrapToolWorkspaceRootGuardWithOptions(
tool: AnyAgentTool,
root: string,
options?: {
containerWorkdir?: string;
pathParamKeys?: readonly string[];
normalizeGuardedPathParams?: boolean;
},
): AnyAgentTool {
const pathParamKeys =
options?.pathParamKeys && options.pathParamKeys.length > 0 ? options.pathParamKeys : ["path"];
return {
...tool,
execute: async (toolCallId, args, signal, onUpdate) => {
const record = getToolParamsRecord(args);
let normalizedRecord: Record<string, unknown> | undefined;
for (const key of pathParamKeys) {
const filePath = record?.[key];
if (typeof filePath !== "string" || !filePath.trim()) {
continue;
}
const sandboxPath = mapContainerPathToWorkspaceRoot({
filePath,
root,
containerWorkdir: options?.containerWorkdir,
});
const sandboxResult = await assertSandboxPath({ filePath: sandboxPath, cwd: root, root });
if (options?.normalizeGuardedPathParams && record) {
normalizedRecord ??= { ...record };
normalizedRecord[key] = sandboxResult.resolved;
}
}
return tool.execute(toolCallId, normalizedRecord ?? args, signal, onUpdate);
},
};
}
type SandboxToolParams = {
root: string;
bridge: SandboxFsBridge;
modelContextWindowTokens?: number;
imageSanitization?: ImageSanitizationLimits;
};
export function createSandboxedReadTool(params: SandboxToolParams) {
const base = createReadTool(params.root, {
operations: createSandboxReadOperations(params),
}) as unknown as AnyAgentTool;
return createOpenClawReadTool(base, {
modelContextWindowTokens: params.modelContextWindowTokens,
imageSanitization: params.imageSanitization,
});
}
export function createSandboxedWriteTool(params: SandboxToolParams) {
const base = createWriteTool(params.root, {
operations: createSandboxWriteOperations(params),
}) as unknown as AnyAgentTool;
return wrapToolParamValidation(base, REQUIRED_PARAM_GROUPS.write);
}
export function createSandboxedEditTool(params: SandboxToolParams) {
const base = createEditTool(params.root, {
operations: createSandboxEditOperations(params),
}) as unknown as AnyAgentTool;
const withRecovery = wrapEditToolWithRecovery(base, {
root: params.root,
readFile: async (absolutePath: string) =>
(await params.bridge.readFile({ filePath: absolutePath, cwd: params.root })).toString("utf8"),
});
return wrapToolParamValidation(withRecovery, REQUIRED_PARAM_GROUPS.edit);
}
export function createHostWorkspaceWriteTool(root: string, options?: { workspaceOnly?: boolean }) {
const base = createWriteTool(root, {
operations: createHostWriteOperations(root, options),
}) as unknown as AnyAgentTool;
return wrapToolParamValidation(base, REQUIRED_PARAM_GROUPS.write);
}
export function createHostWorkspaceEditTool(root: string, options?: { workspaceOnly?: boolean }) {
const base = createEditTool(root, {
operations: createHostEditOperations(root, options),
}) as unknown as AnyAgentTool;
const withRecovery = wrapEditToolWithRecovery(base, {
root,
readFile: (absolutePath: string) => fs.readFile(absolutePath, "utf-8"),
});
return wrapToolParamValidation(withRecovery, REQUIRED_PARAM_GROUPS.edit);
}
export function createOpenClawReadTool(
base: AnyAgentTool,
options?: OpenClawReadToolOptions,
): AnyAgentTool {
return {
...base,
execute: async (toolCallId, params, signal) => {
const record = getToolParamsRecord(params);
assertRequiredParams(record, REQUIRED_PARAM_GROUPS.read, base.name);
const result = await executeReadWithAdaptivePaging({
base,
toolCallId,
args: record ?? {},
signal,
maxBytes: resolveAdaptiveReadMaxBytes(options),
});
const filePath = typeof record?.path === "string" ? record.path : "<unknown>";
const strippedDetailsResult = stripReadTruncationContentDetails(result);
const normalizedResult = await normalizeReadImageResult(strippedDetailsResult, filePath);
return sanitizeToolResultImages(
normalizedResult,
`read:${filePath}`,
options?.imageSanitization,
);
},
};
}
function createSandboxReadOperations(params: SandboxToolParams) {
return {
readFile: (absolutePath: string) =>
params.bridge.readFile({ filePath: absolutePath, cwd: params.root }),
access: async (absolutePath: string) => {
const stat = await params.bridge.stat({ filePath: absolutePath, cwd: params.root });
if (!stat) {
throw createFsAccessError("ENOENT", absolutePath);
}
},
detectImageMimeType: async (absolutePath: string) => {
const buffer = await params.bridge.readFile({ filePath: absolutePath, cwd: params.root });
const mime = await detectMime({ buffer, filePath: absolutePath });
return mime && mime.startsWith("image/") ? mime : undefined;
},
} as const;
}
function createSandboxWriteOperations(params: SandboxToolParams) {
return {
mkdir: async (dir: string) => {
await params.bridge.mkdirp({ filePath: dir, cwd: params.root });
},
writeFile: async (absolutePath: string, content: string) => {
await params.bridge.writeFile({ filePath: absolutePath, cwd: params.root, data: content });
},
} as const;
}
function createSandboxEditOperations(params: SandboxToolParams) {
return {
readFile: (absolutePath: string) =>
params.bridge.readFile({ filePath: absolutePath, cwd: params.root }),
writeFile: (absolutePath: string, content: string) =>
params.bridge.writeFile({ filePath: absolutePath, cwd: params.root, data: content }),
access: async (absolutePath: string) => {
const stat = await params.bridge.stat({ filePath: absolutePath, cwd: params.root });
if (!stat) {
throw createFsAccessError("ENOENT", absolutePath);
}
},
} as const;
}
function expandTildeToOsHome(filePath: string): string {
const home = resolveOsHomeDir();
return home ? expandHomePrefix(filePath, { home }) : filePath;
}
async function writeHostFile(absolutePath: string, content: string) {
const resolved = path.resolve(expandTildeToOsHome(absolutePath));
await fs.mkdir(path.dirname(resolved), { recursive: true });
await fs.writeFile(resolved, content, "utf-8");
}
function createHostWriteOperations(root: string, options?: { workspaceOnly?: boolean }) {
const workspaceOnly = options?.workspaceOnly ?? false;
if (!workspaceOnly) {
// When workspaceOnly is false, allow writes anywhere on the host
return {
mkdir: async (dir: string) => {
const resolved = path.resolve(expandTildeToOsHome(dir));
await fs.mkdir(resolved, { recursive: true });
},
writeFile: writeHostFile,
} as const;
}
// When workspaceOnly is true, enforce workspace boundary
return {
mkdir: async (dir: string) => {
const relative = toRelativeWorkspacePath(root, dir, { allowRoot: true });
const resolved = relative ? path.resolve(root, relative) : path.resolve(root);
await assertSandboxPath({ filePath: resolved, cwd: root, root });
await fs.mkdir(resolved, { recursive: true });
},
writeFile: async (absolutePath: string, content: string) => {
const relative = toRelativeWorkspacePath(root, absolutePath);
await writeFileWithinRoot({
rootDir: root,
relativePath: relative,
data: content,
mkdir: true,
});
},
} as const;
}
function createHostEditOperations(root: string, options?: { workspaceOnly?: boolean }) {
const workspaceOnly = options?.workspaceOnly ?? false;
if (!workspaceOnly) {
// When workspaceOnly is false, allow edits anywhere on the host
return {
readFile: async (absolutePath: string) => {
const resolved = path.resolve(expandTildeToOsHome(absolutePath));
return await fs.readFile(resolved);
},
writeFile: writeHostFile,
access: async (absolutePath: string) => {
const resolved = path.resolve(expandTildeToOsHome(absolutePath));
await fs.access(resolved);
},
} as const;
}
// When workspaceOnly is true, enforce workspace boundary
return {
readFile: async (absolutePath: string) => {
const relative = toRelativeWorkspacePath(root, absolutePath);
const safeRead = await readFileWithinRoot({
rootDir: root,
relativePath: relative,
});
return safeRead.buffer;
},
writeFile: async (absolutePath: string, content: string) => {
const relative = toRelativeWorkspacePath(root, absolutePath);
await writeFileWithinRoot({
rootDir: root,
relativePath: relative,
data: content,
mkdir: true,
});
},
access: async (absolutePath: string) => {
let relative: string;
try {
relative = toRelativeWorkspacePath(root, absolutePath);
} catch {
// Path escapes workspace root. Don't throw here the upstream
// library replaces any `access` error with a misleading "File not
// found" message. By returning silently the subsequent `readFile`
// call will throw the same "Path escapes workspace root" error
// through a code-path that propagates the original message.
return;
}
try {
const opened = await openFileWithinRoot({
rootDir: root,
relativePath: relative,
});
await opened.handle.close().catch(() => {});
} catch (error) {
if (error instanceof SafeOpenError && error.code === "not-found") {
throw createFsAccessError("ENOENT", absolutePath);
}
if (error instanceof SafeOpenError && error.code === "outside-workspace") {
// Don't throw here see the comment above about the upstream
// library swallowing access errors as "File not found".
return;
}
throw error;
}
},
} as const;
}
function createFsAccessError(code: string, filePath: string): NodeJS.ErrnoException {
const error = new Error(`Sandbox FS error (${code}): ${filePath}`) as NodeJS.ErrnoException;
error.code = code;
return error;
}