mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-09 21:10:42 +00:00
[codex] Extract filesystem safety primitives (#77918)
* refactor: extract filesystem safety primitives * refactor: use fs-safe for file access helpers * refactor: reuse fs-safe for media reads * refactor: use fs-safe for image reads * refactor: reuse fs-safe in qqbot media opener * refactor: reuse fs-safe for local media checks * refactor: consume cleaner fs-safe api * refactor: align fs-safe json option names * fix: preserve fs-safe migration contracts * refactor: use fs-safe primitive subpaths * refactor: use grouped fs-safe subpaths * refactor: align fs-safe api usage * refactor: adapt private state store api * chore: refresh proof gate * refactor: follow fs-safe json api split * refactor: follow reduced fs-safe surface * build: default fs-safe python helper off * fix: preserve fs-safe plugin sdk aliases * refactor: consolidate fs-safe usage * refactor: unify fs-safe store usage * refactor: trim fs-safe temp workspace usage * refactor: hide low-level fs-safe primitives * build: use published fs-safe package * fix: preserve outbound recovery durability after rebase * chore: refresh pr checks
This commit is contained in:
committed by
GitHub
parent
61481eb34f
commit
538605ff44
@@ -3,6 +3,7 @@ import path from "node:path";
|
||||
import { isKnownCoreToolId } from "../agents/tool-catalog.js";
|
||||
import { isMutatingToolCall } from "../agents/tool-mutation.js";
|
||||
import { resolveOwnerOnlyToolApprovalClass } from "../agents/tool-policy.js";
|
||||
import { isPathInside } from "../infra/path-guards.js";
|
||||
import {
|
||||
normalizeLowercaseStringOrEmpty,
|
||||
normalizeOptionalString,
|
||||
@@ -175,9 +176,7 @@ function isReadToolCallScopedToCwd(
|
||||
if (!absolutePath) {
|
||||
return false;
|
||||
}
|
||||
const root = path.resolve(cwd);
|
||||
const relative = path.relative(root, absolutePath);
|
||||
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
|
||||
return isPathInside(path.resolve(cwd), absolutePath);
|
||||
}
|
||||
|
||||
export function classifyAcpToolApproval(params: {
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { appendFile, mkdir } from "node:fs/promises";
|
||||
import { mkdir } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { readAcpSessionEntry } from "../acp/runtime/session-meta.js";
|
||||
import { resolveSessionFilePath, resolveSessionFilePathOptions } from "../config/sessions/paths.js";
|
||||
import { onAgentEvent } from "../infra/agent-events.js";
|
||||
import { requestHeartbeat } from "../infra/heartbeat-wake.js";
|
||||
import { appendRegularFile } from "../infra/regular-file.js";
|
||||
import { enqueueSystemEvent } from "../infra/system-events.js";
|
||||
import { scopedHeartbeatWakeOptions } from "../routing/session-key.js";
|
||||
import { normalizeAssistantPhase } from "../shared/chat-message-content.js";
|
||||
@@ -130,10 +131,7 @@ export function startAcpSpawnParentStreamRelay(params: {
|
||||
});
|
||||
logDirReady = true;
|
||||
}
|
||||
await appendFile(logPath, chunk, {
|
||||
encoding: "utf-8",
|
||||
mode: 0o600,
|
||||
});
|
||||
await appendRegularFile({ filePath: logPath, content: chunk });
|
||||
})
|
||||
.catch(() => {
|
||||
// Best-effort diagnostics; never break relay flow.
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import type { OpenClawConfig } from "../config/types.openclaw.js";
|
||||
import { isPathInside } from "../infra/path-guards.js";
|
||||
import { normalizeAgentId } from "../routing/session-key.js";
|
||||
import { lowercasePreservingWhitespace } from "../shared/string-coerce.js";
|
||||
import { listAgentEntries, resolveAgentWorkspaceDir } from "./agent-scope.js";
|
||||
@@ -19,17 +20,11 @@ function normalizeWorkspacePathForComparison(input: string): string {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
function isPathWithinRoot(candidatePath: string, rootPath: string): boolean {
|
||||
const relative = path.relative(rootPath, candidatePath);
|
||||
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
|
||||
}
|
||||
|
||||
function workspacePathsOverlap(left: string, right: string): boolean {
|
||||
const normalizedLeft = normalizeWorkspacePathForComparison(left);
|
||||
const normalizedRight = normalizeWorkspacePathForComparison(right);
|
||||
return (
|
||||
isPathWithinRoot(normalizedLeft, normalizedRight) ||
|
||||
isPathWithinRoot(normalizedRight, normalizedLeft)
|
||||
isPathInside(normalizedRight, normalizedLeft) || isPathInside(normalizedLeft, normalizedRight)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import type { AgentDefaultsConfig } from "../config/types.agent-defaults.js";
|
||||
import type { AgentModelConfig } from "../config/types.agents-shared.js";
|
||||
import type { AgentConfig } from "../config/types.agents.js";
|
||||
import type { OpenClawConfig } from "../config/types.js";
|
||||
import { isPathInside } from "../infra/path-guards.js";
|
||||
import {
|
||||
normalizeAgentId,
|
||||
parseAgentSessionKey,
|
||||
@@ -239,11 +240,6 @@ function normalizePathForComparison(input: string): string {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
function isPathWithinRoot(candidatePath: string, rootPath: string): boolean {
|
||||
const relative = path.relative(rootPath, candidatePath);
|
||||
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
|
||||
}
|
||||
|
||||
export function resolveAgentIdsByWorkspacePath(
|
||||
cfg: OpenClawConfig,
|
||||
workspacePath: string,
|
||||
@@ -255,7 +251,7 @@ export function resolveAgentIdsByWorkspacePath(
|
||||
for (let index = 0; index < ids.length; index += 1) {
|
||||
const id = ids[index];
|
||||
const workspaceDir = normalizePathForComparison(resolveAgentWorkspaceDir(cfg, id));
|
||||
if (!isPathWithinRoot(normalizedWorkspacePath, workspaceDir)) {
|
||||
if (!isPathInside(workspaceDir, normalizedWorkspacePath)) {
|
||||
continue;
|
||||
}
|
||||
matches.push({ id, workspaceDir, order: index });
|
||||
|
||||
@@ -9,111 +9,6 @@ import {
|
||||
import { applyPatch } from "./apply-patch.js";
|
||||
import type { SandboxFsBridge } from "./sandbox/fs-bridge.js";
|
||||
|
||||
const pinnedPathHelper = vi.hoisted(() => {
|
||||
const fs = require("node:fs/promises") as typeof import("node:fs/promises");
|
||||
const path = require("node:path") as typeof import("node:path");
|
||||
const { pipeline } = require("node:stream/promises") as typeof import("node:stream/promises");
|
||||
|
||||
async function resolvePinnedParent(params: {
|
||||
rootPath: string;
|
||||
relativeParentPath?: string;
|
||||
mkdir?: boolean;
|
||||
}): Promise<string> {
|
||||
let current = params.rootPath;
|
||||
for (const segment of (params.relativeParentPath ?? "").split("/").filter(Boolean)) {
|
||||
const next = path.join(current, segment);
|
||||
try {
|
||||
const stat = await fs.lstat(next);
|
||||
if (stat.isSymbolicLink() || !stat.isDirectory()) {
|
||||
throw new Error("symbolic link or non-directory path segment");
|
||||
}
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code !== "ENOENT" || !params.mkdir) {
|
||||
throw error;
|
||||
}
|
||||
await fs.mkdir(next);
|
||||
}
|
||||
current = next;
|
||||
}
|
||||
return current;
|
||||
}
|
||||
|
||||
return {
|
||||
runPinnedPathHelper: vi.fn(
|
||||
async (params: {
|
||||
operation: "mkdirp" | "remove";
|
||||
rootPath: string;
|
||||
relativePath: string;
|
||||
}) => {
|
||||
const segments = params.relativePath.split("/").filter(Boolean);
|
||||
const targetPath = path.join(params.rootPath, ...segments);
|
||||
if (params.operation === "mkdirp") {
|
||||
await resolvePinnedParent({
|
||||
rootPath: params.rootPath,
|
||||
relativeParentPath: params.relativePath,
|
||||
mkdir: true,
|
||||
});
|
||||
return;
|
||||
}
|
||||
await resolvePinnedParent({
|
||||
rootPath: params.rootPath,
|
||||
relativeParentPath: segments.slice(0, -1).join("/"),
|
||||
mkdir: false,
|
||||
});
|
||||
const stat = await fs.lstat(targetPath);
|
||||
if (stat.isDirectory() && !stat.isSymbolicLink()) {
|
||||
await fs.rmdir(targetPath);
|
||||
return;
|
||||
}
|
||||
await fs.unlink(targetPath);
|
||||
},
|
||||
),
|
||||
runPinnedWriteHelper: vi.fn(
|
||||
async (params: {
|
||||
rootPath: string;
|
||||
relativeParentPath: string;
|
||||
basename: string;
|
||||
mkdir: boolean;
|
||||
mode: number;
|
||||
input:
|
||||
| { kind: "buffer"; data: string | Buffer; encoding?: BufferEncoding }
|
||||
| { kind: "stream"; stream: NodeJS.ReadableStream };
|
||||
}) => {
|
||||
const parentPath = await resolvePinnedParent({
|
||||
rootPath: params.rootPath,
|
||||
relativeParentPath: params.relativeParentPath,
|
||||
mkdir: params.mkdir,
|
||||
});
|
||||
const targetPath = path.join(parentPath, params.basename);
|
||||
if (params.input.kind === "buffer") {
|
||||
await fs.writeFile(targetPath, params.input.data, {
|
||||
encoding: params.input.encoding,
|
||||
mode: params.mode,
|
||||
});
|
||||
} else {
|
||||
const handle = await fs.open(targetPath, "w", params.mode);
|
||||
try {
|
||||
await pipeline(params.input.stream, handle.createWriteStream());
|
||||
} finally {
|
||||
await handle.close().catch(() => undefined);
|
||||
}
|
||||
}
|
||||
const stat = await fs.stat(targetPath);
|
||||
return { dev: stat.dev, ino: stat.ino };
|
||||
},
|
||||
),
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("../infra/fs-pinned-path-helper.js", () => ({
|
||||
isPinnedPathHelperSpawnError: () => false,
|
||||
runPinnedPathHelper: pinnedPathHelper.runPinnedPathHelper,
|
||||
}));
|
||||
|
||||
vi.mock("../infra/fs-pinned-write-helper.js", () => ({
|
||||
runPinnedWriteHelper: pinnedPathHelper.runPinnedWriteHelper,
|
||||
}));
|
||||
|
||||
async function withTempDir<T>(fn: (dir: string) => Promise<T>) {
|
||||
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-patch-"));
|
||||
try {
|
||||
|
||||
@@ -3,12 +3,8 @@ import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import type { AgentTool } from "@mariozechner/pi-agent-core";
|
||||
import { Type } from "typebox";
|
||||
import { openBoundaryFile, type BoundaryFileOpenResult } from "../infra/boundary-file-read.js";
|
||||
import {
|
||||
mkdirPathWithinRoot,
|
||||
removePathWithinRoot,
|
||||
writeFileWithinRoot,
|
||||
} from "../infra/fs-safe.js";
|
||||
import { openRootFile, type RootFileOpenResult } from "../infra/boundary-file-read.js";
|
||||
import { root as fsRoot } from "../infra/fs-safe.js";
|
||||
import { PATH_ALIAS_POLICIES, type PathAliasPolicy } from "../infra/path-alias-guards.js";
|
||||
import { applyUpdateHunk } from "./apply-patch-update.js";
|
||||
import { toRelativeSandboxPath, resolvePathFromInput } from "./path-policy.js";
|
||||
@@ -244,12 +240,13 @@ function resolvePatchFileOps(options: ApplyPatchOptions): PatchFileOps {
|
||||
};
|
||||
}
|
||||
const workspaceOnly = options.workspaceOnly !== false;
|
||||
const rootPromise = workspaceOnly ? fsRoot(options.cwd) : undefined;
|
||||
return {
|
||||
readFile: async (filePath) => {
|
||||
if (!workspaceOnly) {
|
||||
return await fs.readFile(filePath, "utf8");
|
||||
}
|
||||
const opened = await openBoundaryFile({
|
||||
const opened = await openRootFile({
|
||||
absolutePath: filePath,
|
||||
rootPath: options.cwd,
|
||||
boundaryLabel: "workspace root",
|
||||
@@ -267,12 +264,7 @@ function resolvePatchFileOps(options: ApplyPatchOptions): PatchFileOps {
|
||||
return;
|
||||
}
|
||||
const relative = toRelativeSandboxPath(options.cwd, filePath);
|
||||
await writeFileWithinRoot({
|
||||
rootDir: options.cwd,
|
||||
relativePath: relative,
|
||||
data: content,
|
||||
encoding: "utf8",
|
||||
});
|
||||
await (await rootPromise)?.write(relative, content, { encoding: "utf8" });
|
||||
},
|
||||
remove: async (filePath) => {
|
||||
if (!workspaceOnly) {
|
||||
@@ -280,10 +272,7 @@ function resolvePatchFileOps(options: ApplyPatchOptions): PatchFileOps {
|
||||
return;
|
||||
}
|
||||
const relative = toRelativeSandboxPath(options.cwd, filePath);
|
||||
await removePathWithinRoot({
|
||||
rootDir: options.cwd,
|
||||
relativePath: relative,
|
||||
});
|
||||
await (await rootPromise)?.remove(relative);
|
||||
},
|
||||
mkdirp: async (dir) => {
|
||||
if (!workspaceOnly) {
|
||||
@@ -291,11 +280,15 @@ function resolvePatchFileOps(options: ApplyPatchOptions): PatchFileOps {
|
||||
return;
|
||||
}
|
||||
const relative = toRelativeSandboxPath(options.cwd, dir, { allowRoot: true });
|
||||
await mkdirPathWithinRoot({
|
||||
rootDir: options.cwd,
|
||||
relativePath: relative,
|
||||
allowRoot: true,
|
||||
});
|
||||
const root = await rootPromise;
|
||||
if (!root) {
|
||||
return;
|
||||
}
|
||||
if (relative === "" || relative === ".") {
|
||||
await root.ensureRoot();
|
||||
return;
|
||||
}
|
||||
await root.mkdir(relative);
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -352,9 +345,9 @@ async function resolvePatchPath(
|
||||
}
|
||||
|
||||
function assertBoundaryRead(
|
||||
opened: BoundaryFileOpenResult,
|
||||
opened: RootFileOpenResult,
|
||||
targetPath: string,
|
||||
): asserts opened is Extract<BoundaryFileOpenResult, { ok: true }> {
|
||||
): asserts opened is Extract<RootFileOpenResult, { ok: true }> {
|
||||
if (opened.ok) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { constants as fsConstants } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { __setFsSafeTestHooksForTest } from "../infra/fs-safe.js";
|
||||
import { __setFsSafeTestHooksForTest } from "../infra/fs-safe-test-hooks.js";
|
||||
import { withTempDir } from "../test-utils/temp-dir.js";
|
||||
import { __testing, createExecTool } from "./bash-tools.exec.js";
|
||||
|
||||
|
||||
@@ -139,9 +139,9 @@ async function loadFsSafeModule(): Promise<FsSafeModule> {
|
||||
|
||||
function shouldSkipScriptPreflightPathError(
|
||||
error: unknown,
|
||||
SafeOpenError: FsSafeModule["SafeOpenError"],
|
||||
FsSafeError: FsSafeModule["FsSafeError"],
|
||||
): boolean {
|
||||
if (error instanceof SafeOpenError) {
|
||||
if (error instanceof FsSafeError) {
|
||||
return true;
|
||||
}
|
||||
const errorCode = getNodeErrorCode(error);
|
||||
@@ -155,8 +155,8 @@ function resolvePreflightRelativePath(params: { rootDir: string; absPath: string
|
||||
if (/^\.\.(?:[\\/]|$)/u.test(relative) || path.isAbsolute(relative)) {
|
||||
return null;
|
||||
}
|
||||
// Preserve literal "~" path segments under the workdir. `readFileWithinRoot`
|
||||
// expands home prefixes for relative paths, so normalize `~/...` to `./~/...`.
|
||||
// Preserve literal "~" path segments under the workdir. Root reads
|
||||
// expand home prefixes for relative paths, so normalize `~/...` to `./~/...`.
|
||||
return /^~(?:$|[\\/])/u.test(relative) ? `.${path.sep}${relative}` : relative;
|
||||
}
|
||||
|
||||
@@ -973,7 +973,8 @@ async function validateScriptFileForShellBleed(params: {
|
||||
return;
|
||||
}
|
||||
|
||||
const { SafeOpenError, readFileWithinRoot } = await loadFsSafeModule();
|
||||
const { FsSafeError, root: fsRoot } = await loadFsSafeModule();
|
||||
const workspaceRoot = await fsRoot(params.workdir);
|
||||
for (const relOrAbsPath of target.relOrAbsPaths) {
|
||||
const absPath = path.isAbsolute(relOrAbsPath)
|
||||
? path.resolve(relOrAbsPath)
|
||||
@@ -992,16 +993,14 @@ async function validateScriptFileForShellBleed(params: {
|
||||
// Use non-blocking open to avoid stalls if a path is swapped to a FIFO.
|
||||
let content: string;
|
||||
try {
|
||||
const safeRead = await readFileWithinRoot({
|
||||
rootDir: params.workdir,
|
||||
relativePath,
|
||||
const safeRead = await workspaceRoot.read(relativePath, {
|
||||
nonBlockingRead: true,
|
||||
allowSymlinkTargetWithinRoot: true,
|
||||
symlinks: "follow-within-root",
|
||||
maxBytes: 512 * 1024,
|
||||
});
|
||||
content = safeRead.buffer.toString("utf-8");
|
||||
} catch (error) {
|
||||
if (shouldSkipScriptPreflightPathError(error, SafeOpenError)) {
|
||||
if (shouldSkipScriptPreflightPathError(error, FsSafeError)) {
|
||||
// Preflight validation is best-effort: skip path/read failures and
|
||||
// continue to execute the command normally.
|
||||
continue;
|
||||
|
||||
@@ -10,6 +10,8 @@ import type { SourceReplyDeliveryMode } from "../../auto-reply/get-reply-options
|
||||
import type { ThinkLevel } from "../../auto-reply/thinking.js";
|
||||
import type { CliBackendConfig } from "../../config/types.js";
|
||||
import type { OpenClawConfig } from "../../config/types.openclaw.js";
|
||||
import { privateFileStore } from "../../infra/private-file-store.js";
|
||||
import { tempWorkspace } from "../../infra/private-temp-workspace.js";
|
||||
import { resolvePreferredOpenClawTmpDir } from "../../infra/tmp-openclaw-dir.js";
|
||||
import { MAX_IMAGE_BYTES } from "../../media/constants.js";
|
||||
import { extensionForMime } from "../../media/mime.js";
|
||||
@@ -283,14 +285,14 @@ export async function writeCliImages(params: {
|
||||
workspaceDir: params.workspaceDir,
|
||||
});
|
||||
await fs.mkdir(imageRoot, { recursive: true, mode: 0o700 });
|
||||
const store = privateFileStore(imageRoot);
|
||||
const paths: string[] = [];
|
||||
for (let i = 0; i < params.images.length; i += 1) {
|
||||
const image = params.images[i];
|
||||
const fileName = path.basename(resolveCliImagePath(image));
|
||||
const filePath = path.join(imageRoot, fileName);
|
||||
const buffer = Buffer.from(image.data, "base64");
|
||||
await fs.writeFile(filePath, buffer, { mode: 0o600 });
|
||||
paths.push(filePath);
|
||||
await store.writeText(fileName, buffer);
|
||||
paths.push(store.path(fileName));
|
||||
}
|
||||
// Keep content-addressed image paths stable across Claude CLI runs so prompt
|
||||
// text and argv don't churn on every turn with fresh temp-dir suffixes.
|
||||
@@ -308,19 +310,17 @@ export async function writeCliSystemPromptFile(params: {
|
||||
) {
|
||||
return { cleanup: async () => {} };
|
||||
}
|
||||
const tempDir = await fs.mkdtemp(
|
||||
path.join(resolvePreferredOpenClawTmpDir(), "openclaw-cli-system-prompt-"),
|
||||
);
|
||||
const filePath = path.join(tempDir, "system-prompt.md");
|
||||
await fs.writeFile(filePath, stripSystemPromptCacheBoundary(params.systemPrompt), {
|
||||
encoding: "utf-8",
|
||||
mode: 0o600,
|
||||
const workspace = await tempWorkspace({
|
||||
rootDir: resolvePreferredOpenClawTmpDir(),
|
||||
prefix: "openclaw-cli-system-prompt-",
|
||||
});
|
||||
const filePath = await workspace.write(
|
||||
"system-prompt.md",
|
||||
stripSystemPromptCacheBoundary(params.systemPrompt),
|
||||
);
|
||||
return {
|
||||
filePath,
|
||||
cleanup: async () => {
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
},
|
||||
cleanup: async () => await workspace.cleanup(),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
resolveSessionFilePathOptions,
|
||||
} from "../../config/sessions/paths.js";
|
||||
import type { OpenClawConfig } from "../../config/types.openclaw.js";
|
||||
import { isPathInside } from "../../infra/path-guards.js";
|
||||
import { resolveSessionAgentIds } from "../agent-scope.js";
|
||||
import {
|
||||
limitAgentHookHistoryMessages,
|
||||
@@ -108,11 +109,6 @@ async function safeRealpath(filePath: string): Promise<string | undefined> {
|
||||
}
|
||||
}
|
||||
|
||||
function isPathWithinBase(basePath: string, targetPath: string): boolean {
|
||||
const relative = path.relative(basePath, targetPath);
|
||||
return Boolean(relative) && !relative.startsWith("..") && !path.isAbsolute(relative);
|
||||
}
|
||||
|
||||
function resolveSafeCliSessionFile(params: {
|
||||
sessionId: string;
|
||||
sessionFile: string;
|
||||
@@ -155,7 +151,11 @@ async function loadCliSessionEntries(params: {
|
||||
}
|
||||
const realSessionsDir = (await safeRealpath(sessionsDir)) ?? path.resolve(sessionsDir);
|
||||
const realSessionFile = await safeRealpath(sessionFile);
|
||||
if (!realSessionFile || !isPathWithinBase(realSessionsDir, realSessionFile)) {
|
||||
if (
|
||||
!realSessionFile ||
|
||||
realSessionFile === realSessionsDir ||
|
||||
!isPathInside(realSessionsDir, realSessionFile)
|
||||
) {
|
||||
return [];
|
||||
}
|
||||
const stat = await fsp.stat(realSessionFile);
|
||||
|
||||
@@ -1,14 +1,5 @@
|
||||
import { createHash, randomUUID } from "node:crypto";
|
||||
import {
|
||||
chmodSync,
|
||||
existsSync,
|
||||
lstatSync,
|
||||
mkdirSync,
|
||||
readFileSync,
|
||||
renameSync,
|
||||
rmSync,
|
||||
writeFileSync,
|
||||
} from "node:fs";
|
||||
import { chmodSync, existsSync, lstatSync, mkdirSync, readFileSync, rmSync } from "node:fs";
|
||||
import {
|
||||
createServer,
|
||||
request as httpRequest,
|
||||
@@ -20,6 +11,7 @@ import { tmpdir } from "node:os";
|
||||
import path from "node:path";
|
||||
import type { OpenClawConfig } from "../../config/types.openclaw.js";
|
||||
import { resolveOpenClawPackageRootSync } from "../../infra/openclaw-root.js";
|
||||
import { privateFileStoreSync } from "../../infra/private-file-store.js";
|
||||
import { createSubsystemLogger } from "../../logging/subsystem.js";
|
||||
import { PluginApprovalResolutions } from "../../plugins/types.js";
|
||||
import { runBeforeToolCallHook } from "../pi-tools.before-tool-call.js";
|
||||
@@ -823,18 +815,10 @@ function writeNativeHookRelayBridgeRecord(
|
||||
registryPath: string,
|
||||
record: NativeHookRelayBridgeRecord,
|
||||
): void {
|
||||
const tempPath = path.join(
|
||||
path.dirname(registryPath),
|
||||
`.${path.basename(registryPath)}.${process.pid}.${randomUUID()}.tmp`,
|
||||
privateFileStoreSync(path.dirname(registryPath)).writeText(
|
||||
path.basename(registryPath),
|
||||
`${JSON.stringify(record)}\n`,
|
||||
);
|
||||
try {
|
||||
writeFileSync(tempPath, `${JSON.stringify(record)}\n`, { mode: 0o600, flag: "wx" });
|
||||
renameSync(tempPath, registryPath);
|
||||
chmodSync(registryPath, 0o600);
|
||||
} catch (error) {
|
||||
rmSync(tempPath, { force: true });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function nativeHookRelayBridgeRegistryPath(relayId: string): string {
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
type OpenClawConfig,
|
||||
} from "../config/config.js";
|
||||
import { createConfigRuntimeEnv } from "../config/env-vars.js";
|
||||
import { privateFileStore } from "../infra/private-file-store.js";
|
||||
import { getCurrentPluginMetadataSnapshot } from "../plugins/current-plugin-metadata-snapshot.js";
|
||||
import { resolveInstalledManifestRegistryIndexFingerprint } from "../plugins/manifest-registry-installed.js";
|
||||
import type { PluginMetadataSnapshot } from "../plugins/plugin-metadata-snapshot.js";
|
||||
@@ -85,7 +86,15 @@ async function readExistingModelsFile(pathname: string): Promise<{
|
||||
parsed: unknown;
|
||||
}> {
|
||||
try {
|
||||
const raw = await fs.readFile(pathname, "utf8");
|
||||
const raw = await privateFileStore(path.dirname(pathname)).readTextIfExists(
|
||||
path.basename(pathname),
|
||||
);
|
||||
if (raw === null) {
|
||||
return {
|
||||
raw: "",
|
||||
parsed: null,
|
||||
};
|
||||
}
|
||||
return {
|
||||
raw,
|
||||
parsed: JSON.parse(raw) as unknown,
|
||||
@@ -108,9 +117,7 @@ export async function writeModelsFileAtomicForModelsJson(
|
||||
targetPath: string,
|
||||
contents: string,
|
||||
): Promise<void> {
|
||||
const tempPath = `${targetPath}.${process.pid}.${Date.now()}.tmp`;
|
||||
await fs.writeFile(tempPath, contents, { mode: 0o600 });
|
||||
await fs.rename(tempPath, targetPath);
|
||||
await privateFileStore(path.dirname(targetPath)).writeText(path.basename(targetPath), contents);
|
||||
}
|
||||
|
||||
function resolveModelsConfigInput(config?: OpenClawConfig): {
|
||||
|
||||
@@ -12,6 +12,10 @@ import {
|
||||
import { readGeneratedModelsJson } from "./models-config.test-utils.js";
|
||||
|
||||
const planOpenClawModelsJsonMock = vi.fn();
|
||||
const writePrivateStoreTextWriteMock = vi.fn();
|
||||
let actualPrivateFileStore:
|
||||
| typeof import("../infra/private-file-store.js").privateFileStore
|
||||
| undefined;
|
||||
|
||||
installModelsConfigTestHooks();
|
||||
|
||||
@@ -66,6 +70,27 @@ beforeAll(async () => {
|
||||
vi.doMock("./models-config.plan.js", () => ({
|
||||
planOpenClawModelsJson: (...args: unknown[]) => planOpenClawModelsJsonMock(...args),
|
||||
}));
|
||||
vi.doMock("../infra/private-file-store.js", async () => {
|
||||
const actual = await vi.importActual<typeof import("../infra/private-file-store.js")>(
|
||||
"../infra/private-file-store.js",
|
||||
);
|
||||
actualPrivateFileStore = actual.privateFileStore;
|
||||
return {
|
||||
...actual,
|
||||
privateFileStore: (rootDir: string) => {
|
||||
const store = actual.privateFileStore(rootDir);
|
||||
return {
|
||||
...store,
|
||||
writeText: (relativePath: string, content: string | Uint8Array) =>
|
||||
writePrivateStoreTextWriteMock({
|
||||
rootDir,
|
||||
filePath: path.join(rootDir, relativePath),
|
||||
content,
|
||||
}),
|
||||
};
|
||||
},
|
||||
};
|
||||
});
|
||||
({ ensureOpenClawModelsJson } = await import("./models-config.js"));
|
||||
({ clearCurrentPluginMetadataSnapshot, setCurrentPluginMetadataSnapshot } =
|
||||
await import("../plugins/current-plugin-metadata-snapshot.js"));
|
||||
@@ -73,6 +98,19 @@ beforeAll(async () => {
|
||||
|
||||
beforeEach(() => {
|
||||
clearCurrentPluginMetadataSnapshot();
|
||||
writePrivateStoreTextWriteMock
|
||||
.mockReset()
|
||||
.mockImplementation(
|
||||
async (params: { filePath: string; rootDir: string; content: string | Uint8Array }) => {
|
||||
if (!actualPrivateFileStore) {
|
||||
throw new Error("private file store mock not initialized");
|
||||
}
|
||||
return await actualPrivateFileStore(params.rootDir).writeText(
|
||||
path.basename(params.filePath),
|
||||
params.content,
|
||||
);
|
||||
},
|
||||
);
|
||||
planOpenClawModelsJsonMock
|
||||
.mockReset()
|
||||
.mockImplementation(async (params: { cfg?: typeof CUSTOM_PROXY_MODELS_CONFIG }) => ({
|
||||
@@ -207,42 +245,35 @@ describe("models-config write serialization", () => {
|
||||
firstModel.name = "Proxy A";
|
||||
secondModel.name = "Proxy B with longer name";
|
||||
|
||||
const originalWriteFile = fs.writeFile.bind(fs);
|
||||
let inFlightWrites = 0;
|
||||
let maxInFlightWrites = 0;
|
||||
const writeSpy = vi.spyOn(fs, "writeFile").mockImplementation(async (...args) => {
|
||||
const targetArg = args[0];
|
||||
const targetPath =
|
||||
typeof targetArg === "string"
|
||||
? targetArg
|
||||
: targetArg instanceof URL
|
||||
? targetArg.pathname
|
||||
: undefined;
|
||||
const isModelsTempWrite =
|
||||
typeof targetPath === "string" &&
|
||||
path.basename(targetPath).startsWith("models.json.") &&
|
||||
targetPath.endsWith(".tmp");
|
||||
if (isModelsTempWrite) {
|
||||
inFlightWrites += 1;
|
||||
if (inFlightWrites > maxInFlightWrites) {
|
||||
maxInFlightWrites = inFlightWrites;
|
||||
writePrivateStoreTextWriteMock.mockImplementation(
|
||||
async (params: { filePath: string; rootDir: string; content: string | Uint8Array }) => {
|
||||
const isModelsWrite = path.basename(params.filePath) === "models.json";
|
||||
if (isModelsWrite) {
|
||||
inFlightWrites += 1;
|
||||
if (inFlightWrites > maxInFlightWrites) {
|
||||
maxInFlightWrites = inFlightWrites;
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
}
|
||||
try {
|
||||
return await originalWriteFile(...args);
|
||||
} finally {
|
||||
if (isModelsTempWrite) {
|
||||
inFlightWrites -= 1;
|
||||
try {
|
||||
if (!actualPrivateFileStore) {
|
||||
throw new Error("private file store mock not initialized");
|
||||
}
|
||||
return await actualPrivateFileStore(params.rootDir).writeText(
|
||||
path.basename(params.filePath),
|
||||
params.content,
|
||||
);
|
||||
} finally {
|
||||
if (isModelsWrite) {
|
||||
inFlightWrites -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
try {
|
||||
await Promise.all([ensureOpenClawModelsJson(first), ensureOpenClawModelsJson(second)]);
|
||||
} finally {
|
||||
writeSpy.mockRestore();
|
||||
}
|
||||
await Promise.all([ensureOpenClawModelsJson(first), ensureOpenClawModelsJson(second)]);
|
||||
|
||||
expect(maxInFlightWrites).toBe(1);
|
||||
const parsed = await readGeneratedModelsJson<{
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { z } from "zod";
|
||||
import { safeParseJsonWithSchema, safeParseWithSchema } from "../utils/zod-parse.js";
|
||||
import { privateFileStore } from "../infra/private-file-store.js";
|
||||
import { safeParseWithSchema } from "../utils/zod-parse.js";
|
||||
import { ensureAuthProfileStore } from "./auth-profiles/store.js";
|
||||
import {
|
||||
piCredentialsEqual,
|
||||
@@ -26,10 +26,12 @@ const PiCredentialSchema: z.ZodType<PiCredential> = z.discriminatedUnion("type",
|
||||
|
||||
const AuthJsonShapeSchema = z.record(z.string(), z.unknown());
|
||||
|
||||
async function readAuthJson(filePath: string): Promise<AuthJsonShape> {
|
||||
async function readAuthJson(rootDir: string, filePath: string): Promise<AuthJsonShape> {
|
||||
try {
|
||||
const raw = await fs.readFile(filePath, "utf8");
|
||||
return safeParseJsonWithSchema(AuthJsonShapeSchema, raw) ?? {};
|
||||
const parsed = await privateFileStore(rootDir).readJsonIfExists(
|
||||
path.relative(rootDir, filePath),
|
||||
);
|
||||
return safeParseWithSchema(AuthJsonShapeSchema, parsed) ?? {};
|
||||
} catch {
|
||||
return {};
|
||||
}
|
||||
@@ -58,7 +60,7 @@ export async function ensurePiAuthJsonFromAuthProfiles(agentDir: string): Promis
|
||||
return { wrote: false, authPath };
|
||||
}
|
||||
|
||||
const existing = await readAuthJson(authPath);
|
||||
const existing = await readAuthJson(agentDir, authPath);
|
||||
let changed = false;
|
||||
|
||||
for (const [provider, cred] of Object.entries(providerCredentials)) {
|
||||
@@ -73,8 +75,9 @@ export async function ensurePiAuthJsonFromAuthProfiles(agentDir: string): Promis
|
||||
return { wrote: false, authPath };
|
||||
}
|
||||
|
||||
await fs.mkdir(agentDir, { recursive: true, mode: 0o700 });
|
||||
await fs.writeFile(authPath, `${JSON.stringify(existing, null, 2)}\n`, { mode: 0o600 });
|
||||
await privateFileStore(agentDir).writeJson(path.basename(authPath), existing, {
|
||||
trailingNewline: true,
|
||||
});
|
||||
|
||||
return { wrote: true, authPath };
|
||||
}
|
||||
|
||||
@@ -18,11 +18,12 @@
|
||||
* capabilities instead of the text-only fallback.
|
||||
*/
|
||||
|
||||
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { existsSync, readFileSync } from "node:fs";
|
||||
import { basename, dirname, join } from "node:path";
|
||||
import { resolveStateDir } from "../../config/paths.js";
|
||||
import { formatErrorMessage } from "../../infra/errors.js";
|
||||
import { resolveProxyFetchFromEnv } from "../../infra/net/proxy-fetch.js";
|
||||
import { privateFileStoreSync } from "../../infra/private-file-store.js";
|
||||
import { createSubsystemLogger } from "../../logging/subsystem.js";
|
||||
|
||||
const log = createSubsystemLogger("openrouter-model-capabilities");
|
||||
@@ -89,14 +90,11 @@ function resolveDiskCachePath(): string {
|
||||
|
||||
function writeDiskCache(map: Map<string, OpenRouterModelCapabilities>): void {
|
||||
try {
|
||||
const cacheDir = resolveDiskCacheDir();
|
||||
if (!existsSync(cacheDir)) {
|
||||
mkdirSync(cacheDir, { recursive: true });
|
||||
}
|
||||
const cachePath = resolveDiskCachePath();
|
||||
const payload: DiskCachePayload = {
|
||||
models: Object.fromEntries(map),
|
||||
};
|
||||
writeFileSync(resolveDiskCachePath(), JSON.stringify(payload), "utf-8");
|
||||
privateFileStoreSync(dirname(cachePath)).writeJson(basename(cachePath), payload);
|
||||
} catch (err: unknown) {
|
||||
const message = formatErrorMessage(err);
|
||||
log.debug(`Failed to write OpenRouter disk cache: ${message}`);
|
||||
|
||||
@@ -11,6 +11,8 @@ import {
|
||||
type SessionEntry,
|
||||
type SessionHeader,
|
||||
} from "@mariozechner/pi-coding-agent";
|
||||
import { appendRegularFile } from "../../infra/fs-safe.js";
|
||||
import { privateFileStore } from "../../infra/private-file-store.js";
|
||||
|
||||
type BranchSummaryEntry = Extract<SessionEntry, { type: "branch_summary" }>;
|
||||
type CompactionEntry = Extract<SessionEntry, { type: "compaction" }>;
|
||||
@@ -293,20 +295,10 @@ export async function writeTranscriptFileAtomic(
|
||||
filePath: string,
|
||||
entries: Array<SessionHeader | SessionEntry>,
|
||||
): Promise<void> {
|
||||
const dir = path.dirname(filePath);
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
const tmpFile = path.join(dir, `.${path.basename(filePath)}.${process.pid}.${randomUUID()}.tmp`);
|
||||
try {
|
||||
await fs.writeFile(tmpFile, serializeTranscriptFileEntries(entries), {
|
||||
encoding: "utf-8",
|
||||
mode: 0o600,
|
||||
flag: "wx",
|
||||
});
|
||||
await fs.rename(tmpFile, filePath);
|
||||
} catch (err) {
|
||||
await fs.unlink(tmpFile).catch(() => undefined);
|
||||
throw err;
|
||||
}
|
||||
await privateFileStore(path.dirname(filePath)).writeText(
|
||||
path.basename(filePath),
|
||||
serializeTranscriptFileEntries(entries),
|
||||
);
|
||||
}
|
||||
|
||||
export async function persistTranscriptStateMutation(params: {
|
||||
@@ -324,9 +316,9 @@ export async function persistTranscriptStateMutation(params: {
|
||||
]);
|
||||
return;
|
||||
}
|
||||
await fs.appendFile(
|
||||
params.sessionFile,
|
||||
params.appendedEntries.map((entry) => JSON.stringify(entry)).join("\n") + "\n",
|
||||
"utf-8",
|
||||
);
|
||||
await appendRegularFile({
|
||||
filePath: params.sessionFile,
|
||||
content: `${params.appendedEntries.map((entry) => JSON.stringify(entry)).join("\n")}\n`,
|
||||
rejectSymlinkParents: true,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { resolveStateDir } from "../config/paths.js";
|
||||
import { isTruthyEnvValue } from "../infra/env.js";
|
||||
import { appendRegularFile } from "../infra/fs-safe.js";
|
||||
|
||||
let rawStreamReady = false;
|
||||
|
||||
@@ -30,7 +31,11 @@ export function appendRawStream(payload: Record<string, unknown>) {
|
||||
}
|
||||
}
|
||||
try {
|
||||
void fs.promises.appendFile(rawStreamPath, `${JSON.stringify(payload)}\n`);
|
||||
void appendRegularFile({
|
||||
filePath: rawStreamPath,
|
||||
content: `${JSON.stringify(payload)}\n`,
|
||||
rejectSymlinkParents: true,
|
||||
});
|
||||
} catch {
|
||||
// ignore raw stream write failures
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@ import path from "node:path";
|
||||
import type { AgentMessage } from "@mariozechner/pi-agent-core";
|
||||
import type { ExtensionAPI, ExtensionContext, FileOperations } from "@mariozechner/pi-coding-agent";
|
||||
import { extractSections } from "../../auto-reply/reply/post-compaction-context.js";
|
||||
import { openBoundaryFile } from "../../infra/boundary-file-read.js";
|
||||
import { openRootFile } from "../../infra/boundary-file-read.js";
|
||||
import { formatErrorMessage } from "../../infra/errors.js";
|
||||
import { isAbortError } from "../../infra/unhandled-rejections.js";
|
||||
import { createSubsystemLogger } from "../../logging/subsystem.js";
|
||||
@@ -735,7 +735,7 @@ async function readWorkspaceContextForSummary(): Promise<string> {
|
||||
const agentsPath = path.join(workspaceDir, "AGENTS.md");
|
||||
|
||||
try {
|
||||
const opened = await openBoundaryFile({
|
||||
const opened = await openRootFile({
|
||||
absolutePath: agentsPath,
|
||||
rootPath: workspaceDir,
|
||||
boundaryLabel: "workspace root",
|
||||
|
||||
@@ -3,7 +3,7 @@ import path from "node:path";
|
||||
import type { SettingsManager } from "@mariozechner/pi-coding-agent";
|
||||
import { applyMergePatch } from "../config/merge-patch.js";
|
||||
import type { OpenClawConfig } from "../config/types.openclaw.js";
|
||||
import { openBoundaryFileSync } from "../infra/boundary-file-read.js";
|
||||
import { openRootFileSync } from "../infra/boundary-file-read.js";
|
||||
import { createSubsystemLogger } from "../logging/subsystem.js";
|
||||
import type { BundleMcpServerConfig } from "../plugins/bundle-mcp.js";
|
||||
import {
|
||||
@@ -43,7 +43,7 @@ function loadBundleSettingsFile(params: {
|
||||
relativePath: string;
|
||||
}): PiSettingsSnapshot | null {
|
||||
const absolutePath = path.join(params.rootDir, params.relativePath);
|
||||
const opened = openBoundaryFileSync({
|
||||
const opened = openRootFileSync({
|
||||
absolutePath,
|
||||
rootPath: params.rootDir,
|
||||
boundaryLabel: "plugin root",
|
||||
|
||||
@@ -6,7 +6,7 @@ import { createTrackedTempDirs } from "../test-utils/tracked-temp-dirs.js";
|
||||
vi.mock("../infra/boundary-file-read.js", async () => {
|
||||
const fs = await import("node:fs");
|
||||
return {
|
||||
openBoundaryFileSync: ({ absolutePath }: { absolutePath: string }) => ({
|
||||
openRootFileSync: ({ absolutePath }: { absolutePath: string }) => ({
|
||||
ok: true,
|
||||
fd: fs.openSync(absolutePath, "r"),
|
||||
}),
|
||||
|
||||
@@ -4,13 +4,7 @@ import { URL } from "node:url";
|
||||
import type { AgentToolResult } from "@mariozechner/pi-agent-core";
|
||||
import { createEditTool, createReadTool, createWriteTool } from "@mariozechner/pi-coding-agent";
|
||||
import { isWindowsDrivePath } from "../infra/archive-path.js";
|
||||
import {
|
||||
appendFileWithinRoot,
|
||||
SafeOpenError,
|
||||
openFileWithinRoot,
|
||||
readFileWithinRoot,
|
||||
writeFileWithinRoot,
|
||||
} from "../infra/fs-safe.js";
|
||||
import { root as fsRoot, FsSafeError } from "../infra/fs-safe.js";
|
||||
import { expandHomePrefix, resolveOsHomeDir } from "../infra/home-dir.js";
|
||||
import { hasEncodedFileUrlSeparator, trySafeFileURLToPath } from "../infra/local-file-access.js";
|
||||
import { detectMime } from "../media/mime.js";
|
||||
@@ -491,10 +485,8 @@ async function appendMemoryFlushContent(params: {
|
||||
signal?: AbortSignal;
|
||||
}) {
|
||||
if (!params.sandbox) {
|
||||
await appendFileWithinRoot({
|
||||
rootDir: params.root,
|
||||
relativePath: params.relativePath,
|
||||
data: params.content,
|
||||
const root = await fsRoot(params.root);
|
||||
await root.append(params.relativePath, params.content, {
|
||||
mkdir: true,
|
||||
prependNewlineIfNeeded: true,
|
||||
});
|
||||
@@ -769,6 +761,7 @@ function createHostWriteOperations(root: string, options?: { workspaceOnly?: boo
|
||||
}
|
||||
|
||||
// When workspaceOnly is true, enforce workspace boundary
|
||||
const rootPromise = fsRoot(root);
|
||||
return {
|
||||
mkdir: async (dir: string) => {
|
||||
const relative = toRelativeWorkspacePath(root, dir, { allowRoot: true });
|
||||
@@ -778,12 +771,7 @@ function createHostWriteOperations(root: string, options?: { workspaceOnly?: boo
|
||||
},
|
||||
writeFile: async (absolutePath: string, content: string) => {
|
||||
const relative = toRelativeWorkspacePath(root, absolutePath);
|
||||
await writeFileWithinRoot({
|
||||
rootDir: root,
|
||||
relativePath: relative,
|
||||
data: content,
|
||||
mkdir: true,
|
||||
});
|
||||
await (await rootPromise).write(relative, content, { mkdir: true });
|
||||
},
|
||||
} as const;
|
||||
}
|
||||
@@ -807,23 +795,16 @@ function createHostEditOperations(root: string, options?: { workspaceOnly?: bool
|
||||
}
|
||||
|
||||
// When workspaceOnly is true, enforce workspace boundary
|
||||
const rootPromise = fsRoot(root);
|
||||
return {
|
||||
readFile: async (absolutePath: string) => {
|
||||
const relative = toRelativeWorkspacePath(root, absolutePath);
|
||||
const safeRead = await readFileWithinRoot({
|
||||
rootDir: root,
|
||||
relativePath: relative,
|
||||
});
|
||||
const safeRead = await (await rootPromise).read(relative);
|
||||
return safeRead.buffer;
|
||||
},
|
||||
writeFile: async (absolutePath: string, content: string) => {
|
||||
const relative = toRelativeWorkspacePath(root, absolutePath);
|
||||
await writeFileWithinRoot({
|
||||
rootDir: root,
|
||||
relativePath: relative,
|
||||
data: content,
|
||||
mkdir: true,
|
||||
});
|
||||
await (await rootPromise).write(relative, content, { mkdir: true });
|
||||
},
|
||||
access: async (absolutePath: string) => {
|
||||
let relative: string;
|
||||
@@ -838,16 +819,13 @@ function createHostEditOperations(root: string, options?: { workspaceOnly?: bool
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const opened = await openFileWithinRoot({
|
||||
rootDir: root,
|
||||
relativePath: relative,
|
||||
});
|
||||
const opened = await (await rootPromise).open(relative);
|
||||
await opened.handle.close().catch(() => {});
|
||||
} catch (error) {
|
||||
if (error instanceof SafeOpenError && error.code === "not-found") {
|
||||
if (error instanceof FsSafeError && error.code === "not-found") {
|
||||
throw createFsAccessError("ENOENT", absolutePath);
|
||||
}
|
||||
if (error instanceof SafeOpenError && error.code === "outside-workspace") {
|
||||
if (error instanceof FsSafeError && error.code === "outside-workspace") {
|
||||
// Don't throw here – see the comment above about the upstream
|
||||
// library swallowing access errors as "File not found".
|
||||
return;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import nodeFs from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { appendRegularFile, resolveRegularFileAppendFlags } from "../infra/fs-safe.js";
|
||||
|
||||
export type QueuedFileWriteResult = "queued" | "dropped";
|
||||
|
||||
@@ -16,103 +16,19 @@ type QueuedFileWriterOptions = {
|
||||
yieldBeforeWrite?: boolean;
|
||||
};
|
||||
|
||||
type QueuedFileAppendFlagConstants = Pick<
|
||||
typeof nodeFs.constants,
|
||||
"O_APPEND" | "O_CREAT" | "O_WRONLY"
|
||||
> &
|
||||
Partial<Pick<typeof nodeFs.constants, "O_NOFOLLOW">>;
|
||||
|
||||
export function resolveQueuedFileAppendFlags(
|
||||
constants: QueuedFileAppendFlagConstants = nodeFs.constants,
|
||||
): number {
|
||||
const noFollow = constants.O_NOFOLLOW;
|
||||
return (
|
||||
constants.O_CREAT |
|
||||
constants.O_APPEND |
|
||||
constants.O_WRONLY |
|
||||
(typeof noFollow === "number" ? noFollow : 0)
|
||||
);
|
||||
}
|
||||
|
||||
async function assertNoSymlinkParents(filePath: string): Promise<void> {
|
||||
const resolvedDir = path.resolve(path.dirname(filePath));
|
||||
const parsed = path.parse(resolvedDir);
|
||||
const relativeParts = path.relative(parsed.root, resolvedDir).split(path.sep).filter(Boolean);
|
||||
let current = parsed.root;
|
||||
for (const part of relativeParts) {
|
||||
current = path.join(current, part);
|
||||
const stat = await fs.lstat(current);
|
||||
if (stat.isSymbolicLink()) {
|
||||
if (path.dirname(current) === parsed.root) {
|
||||
continue;
|
||||
}
|
||||
throw new Error(`Refusing to write queued log under symlinked directory: ${current}`);
|
||||
}
|
||||
if (!stat.isDirectory()) {
|
||||
throw new Error(`Refusing to write queued log under non-directory: ${current}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function verifyStableOpenedFile(params: {
|
||||
preOpenStat?: nodeFs.Stats;
|
||||
postOpenStat: nodeFs.Stats;
|
||||
filePath: string;
|
||||
}): void {
|
||||
if (!params.postOpenStat.isFile()) {
|
||||
throw new Error(`Refusing to write queued log to non-file: ${params.filePath}`);
|
||||
}
|
||||
if (params.postOpenStat.nlink > 1) {
|
||||
throw new Error(`Refusing to write queued log to hardlinked file: ${params.filePath}`);
|
||||
}
|
||||
const pre = params.preOpenStat;
|
||||
if (pre && (pre.dev !== params.postOpenStat.dev || pre.ino !== params.postOpenStat.ino)) {
|
||||
throw new Error(`Refusing to write queued log after file changed: ${params.filePath}`);
|
||||
}
|
||||
}
|
||||
export const resolveQueuedFileAppendFlags = resolveRegularFileAppendFlags;
|
||||
|
||||
async function safeAppendFile(
|
||||
filePath: string,
|
||||
line: string,
|
||||
options: QueuedFileWriterOptions,
|
||||
): Promise<void> {
|
||||
await assertNoSymlinkParents(filePath);
|
||||
|
||||
let preOpenStat: nodeFs.Stats | undefined;
|
||||
try {
|
||||
const stat = await fs.lstat(filePath);
|
||||
if (stat.isSymbolicLink()) {
|
||||
throw new Error(`Refusing to write queued log through symlink: ${filePath}`);
|
||||
}
|
||||
if (!stat.isFile()) {
|
||||
throw new Error(`Refusing to write queued log to non-file: ${filePath}`);
|
||||
}
|
||||
preOpenStat = stat;
|
||||
} catch (err) {
|
||||
if ((err as NodeJS.ErrnoException).code !== "ENOENT") {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
const lineBytes = Buffer.byteLength(line, "utf8");
|
||||
if (
|
||||
options.maxFileBytes !== undefined &&
|
||||
(preOpenStat?.size ?? 0) + lineBytes > options.maxFileBytes
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const handle = await fs.open(filePath, resolveQueuedFileAppendFlags(), 0o600);
|
||||
try {
|
||||
const stat = await handle.stat();
|
||||
verifyStableOpenedFile({ preOpenStat, postOpenStat: stat, filePath });
|
||||
if (options.maxFileBytes !== undefined && stat.size + lineBytes > options.maxFileBytes) {
|
||||
return;
|
||||
}
|
||||
await handle.chmod(0o600);
|
||||
await handle.appendFile(line, "utf8");
|
||||
} finally {
|
||||
await handle.close();
|
||||
}
|
||||
await appendRegularFile({
|
||||
filePath,
|
||||
content: line,
|
||||
maxFileBytes: options.maxFileBytes,
|
||||
rejectSymlinkParents: true,
|
||||
});
|
||||
}
|
||||
|
||||
function waitForImmediate(): Promise<void> {
|
||||
|
||||
@@ -108,10 +108,15 @@ function isManagedMediaPathUnderRoot(candidate: string): boolean {
|
||||
return false;
|
||||
}
|
||||
const mediaRoot = path.join(resolveConfigDir(), "media");
|
||||
const relative = path.relative(path.resolve(mediaRoot), path.resolve(expanded));
|
||||
if (!relative || relative.startsWith("..") || path.isAbsolute(relative)) {
|
||||
const resolvedMediaRoot = path.resolve(mediaRoot);
|
||||
const resolvedExpanded = path.resolve(expanded);
|
||||
if (
|
||||
resolvedExpanded === resolvedMediaRoot ||
|
||||
!isPathInside(resolvedMediaRoot, resolvedExpanded)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
const relative = path.relative(resolvedMediaRoot, resolvedExpanded);
|
||||
const firstSegment = relative.split(path.sep)[0] ?? "";
|
||||
return MANAGED_MEDIA_SUBDIRS.has(firstSegment) || firstSegment.startsWith("tool-");
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
export { openBoundaryFile, type BoundaryFileOpenResult } from "../../infra/boundary-file-read.js";
|
||||
export { openRootFile, type RootFileOpenResult } from "../../infra/boundary-file-read.js";
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import type { PathAliasPolicy } from "../../infra/path-alias-guards.js";
|
||||
import type { SafeOpenSyncAllowedType } from "../../infra/safe-open-sync.js";
|
||||
import { openBoundaryFile, type BoundaryFileOpenResult } from "./fs-bridge-path-safety.runtime.js";
|
||||
import { openRootFile, type RootFileOpenResult } from "./fs-bridge-path-safety.runtime.js";
|
||||
import type { SandboxResolvedFsPath, SandboxFsMount } from "./fs-paths.js";
|
||||
import { isPathInsideContainerRoot, normalizeContainerPath } from "./path-utils.js";
|
||||
|
||||
type BoundaryAllowedType = "file" | "directory";
|
||||
|
||||
export type PathSafetyOptions = {
|
||||
action: string;
|
||||
aliasPolicy?: PathAliasPolicy;
|
||||
requireWritable?: boolean;
|
||||
allowedType?: SafeOpenSyncAllowedType;
|
||||
allowedType?: BoundaryAllowedType;
|
||||
};
|
||||
|
||||
export type PathSafetyCheck = {
|
||||
@@ -69,7 +70,7 @@ export class SandboxFsPathGuard {
|
||||
|
||||
async openReadableFile(
|
||||
target: SandboxResolvedFsPath,
|
||||
): Promise<BoundaryFileOpenResult & { ok: true }> {
|
||||
): Promise<RootFileOpenResult & { ok: true }> {
|
||||
const opened = await this.openBoundaryWithinRequiredMount(target, "read files");
|
||||
if (!opened.ok) {
|
||||
throw opened.error instanceof Error
|
||||
@@ -110,7 +111,7 @@ export class SandboxFsPathGuard {
|
||||
private async assertGuardedPathSafety(
|
||||
target: SandboxResolvedFsPath,
|
||||
options: PathSafetyOptions,
|
||||
guarded: BoundaryFileOpenResult,
|
||||
guarded: RootFileOpenResult,
|
||||
) {
|
||||
if (!guarded.ok) {
|
||||
if (guarded.reason !== "path") {
|
||||
@@ -145,11 +146,11 @@ export class SandboxFsPathGuard {
|
||||
action: string,
|
||||
options?: {
|
||||
aliasPolicy?: PathAliasPolicy;
|
||||
allowedType?: SafeOpenSyncAllowedType;
|
||||
allowedType?: BoundaryAllowedType;
|
||||
},
|
||||
): Promise<BoundaryFileOpenResult> {
|
||||
): Promise<RootFileOpenResult> {
|
||||
const lexicalMount = this.resolveRequiredMount(target.containerPath, action);
|
||||
const guarded = await openBoundaryFile({
|
||||
const guarded = await openRootFile({
|
||||
absolutePath: target.hostPath,
|
||||
rootPath: lexicalMount.hostRoot,
|
||||
boundaryLabel: "sandbox mount root",
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
getScriptsFromCalls,
|
||||
installFsBridgeTestHarness,
|
||||
mockedExecDockerRaw,
|
||||
mockedOpenBoundaryFile,
|
||||
mockedOpenRootFile,
|
||||
withTempDir,
|
||||
} from "./fs-bridge.test-helpers.js";
|
||||
|
||||
@@ -159,7 +159,7 @@ describe("sandbox fs bridge shell compatibility", () => {
|
||||
});
|
||||
|
||||
it("re-validates target before the pinned write helper runs", async () => {
|
||||
mockedOpenBoundaryFile
|
||||
mockedOpenRootFile
|
||||
.mockImplementationOnce(async () => ({ ok: false, reason: "path" }))
|
||||
.mockImplementationOnce(async () => ({
|
||||
ok: false,
|
||||
|
||||
@@ -4,21 +4,21 @@ import path from "node:path";
|
||||
import { beforeEach, expect, vi, type Mock } from "vitest";
|
||||
|
||||
type ExecDockerRawFn = typeof import("./docker.js").execDockerRaw;
|
||||
type OpenBoundaryFileFn = typeof import("./fs-bridge-path-safety.runtime.js").openBoundaryFile;
|
||||
type OpenRootFileFn = typeof import("./fs-bridge-path-safety.runtime.js").openRootFile;
|
||||
type ExecDockerArgs = Parameters<ExecDockerRawFn>[0];
|
||||
type ExecDockerRawMock = Mock<ExecDockerRawFn>;
|
||||
type OpenBoundaryFileMock = Mock<OpenBoundaryFileFn>;
|
||||
type OpenRootFileMock = Mock<OpenRootFileFn>;
|
||||
type FsBridgeHoisted = {
|
||||
execDockerRaw: ExecDockerRawMock;
|
||||
openBoundaryFile: OpenBoundaryFileMock;
|
||||
openRootFile: OpenRootFileMock;
|
||||
};
|
||||
|
||||
let actualOpenBoundaryFile: OpenBoundaryFileFn | undefined;
|
||||
let actualOpenRootFile: OpenRootFileFn | undefined;
|
||||
|
||||
const hoisted = vi.hoisted(
|
||||
(): FsBridgeHoisted => ({
|
||||
execDockerRaw: vi.fn(),
|
||||
openBoundaryFile: vi.fn(),
|
||||
openRootFile: vi.fn(),
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -31,11 +31,10 @@ vi.mock("./fs-bridge-path-safety.runtime.js", async () => {
|
||||
const actual = await vi.importActual<typeof import("./fs-bridge-path-safety.runtime.js")>(
|
||||
"./fs-bridge-path-safety.runtime.js",
|
||||
);
|
||||
actualOpenBoundaryFile = actual.openBoundaryFile;
|
||||
actualOpenRootFile = actual.openRootFile;
|
||||
return {
|
||||
...actual,
|
||||
openBoundaryFile: (params: Parameters<OpenBoundaryFileFn>[0]) =>
|
||||
hoisted.openBoundaryFile(params),
|
||||
openRootFile: (params: Parameters<OpenRootFileFn>[0]) => hoisted.openRootFile(params),
|
||||
};
|
||||
});
|
||||
|
||||
@@ -54,11 +53,10 @@ async function loadFreshFsBridgeModuleForTest() {
|
||||
const actual = await vi.importActual<typeof import("./fs-bridge-path-safety.runtime.js")>(
|
||||
"./fs-bridge-path-safety.runtime.js",
|
||||
);
|
||||
actualOpenBoundaryFile = actual.openBoundaryFile;
|
||||
actualOpenRootFile = actual.openRootFile;
|
||||
return {
|
||||
...actual,
|
||||
openBoundaryFile: (params: Parameters<OpenBoundaryFileFn>[0]) =>
|
||||
hoisted.openBoundaryFile(params),
|
||||
openRootFile: (params: Parameters<OpenRootFileFn>[0]) => hoisted.openRootFile(params),
|
||||
};
|
||||
});
|
||||
({ createSandboxFsBridge: createSandboxFsBridgeImpl } = await import("./fs-bridge.js"));
|
||||
@@ -74,7 +72,7 @@ export function createSandboxFsBridge(
|
||||
}
|
||||
|
||||
export const mockedExecDockerRaw: ExecDockerRawMock = hoisted.execDockerRaw;
|
||||
export const mockedOpenBoundaryFile: OpenBoundaryFileMock = hoisted.openBoundaryFile;
|
||||
export const mockedOpenRootFile: OpenRootFileMock = hoisted.openRootFile;
|
||||
const DOCKER_SCRIPT_INDEX = 5;
|
||||
const DOCKER_FIRST_SCRIPT_ARG_INDEX = 7;
|
||||
|
||||
@@ -206,7 +204,7 @@ export async function expectMkdirpAllowsExistingDirectory(params?: {
|
||||
await fs.mkdir(nestedDir, { recursive: true });
|
||||
|
||||
if (params?.forceBoundaryIoFallback) {
|
||||
mockedOpenBoundaryFile.mockImplementationOnce(async () => ({
|
||||
mockedOpenRootFile.mockImplementationOnce(async () => ({
|
||||
ok: false,
|
||||
reason: "io",
|
||||
error: Object.assign(new Error("EISDIR"), { code: "EISDIR" }),
|
||||
@@ -239,9 +237,9 @@ export function installFsBridgeTestHarness() {
|
||||
beforeEach(async () => {
|
||||
await loadFreshFsBridgeModuleForTest();
|
||||
mockedExecDockerRaw.mockClear();
|
||||
mockedOpenBoundaryFile.mockClear();
|
||||
if (actualOpenBoundaryFile) {
|
||||
mockedOpenBoundaryFile.mockImplementation(actualOpenBoundaryFile);
|
||||
mockedOpenRootFile.mockClear();
|
||||
if (actualOpenRootFile) {
|
||||
mockedOpenRootFile.mockImplementation(actualOpenRootFile);
|
||||
}
|
||||
installDockerReadMock();
|
||||
});
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import path from "node:path";
|
||||
import { isPathInside } from "../../infra/path-guards.js";
|
||||
import { normalizeOptionalLowercaseString } from "../../shared/string-coerce.js";
|
||||
import { resolveSandboxInputPath, resolveSandboxPath } from "../sandbox-paths.js";
|
||||
import type { SandboxFsBridgeContext } from "./backend-handle.types.js";
|
||||
@@ -228,11 +229,7 @@ function isPathInsideHost(root: string, target: string): boolean {
|
||||
path.dirname(resolvedTarget),
|
||||
);
|
||||
const canonicalTarget = path.resolve(canonicalTargetParent, path.basename(resolvedTarget));
|
||||
const rel = path.relative(canonicalRoot, canonicalTarget);
|
||||
if (!rel) {
|
||||
return true;
|
||||
}
|
||||
return !(rel.startsWith("..") || path.isAbsolute(rel));
|
||||
return isPathInside(canonicalRoot, canonicalTarget);
|
||||
}
|
||||
|
||||
function toHostSegments(relativePosix: string): string[] {
|
||||
|
||||
@@ -46,10 +46,10 @@ vi.mock("../../infra/json-files.js", async () => {
|
||||
);
|
||||
return {
|
||||
...actual,
|
||||
writeJsonAtomic: async (
|
||||
writeJson: async (
|
||||
filePath: string,
|
||||
value: unknown,
|
||||
options?: Parameters<typeof actual.writeJsonAtomic>[2],
|
||||
options?: Parameters<typeof actual.writeJson>[2],
|
||||
) => {
|
||||
const payload = JSON.stringify(value);
|
||||
const gate = writeGateState.active;
|
||||
@@ -64,7 +64,7 @@ vi.mock("../../infra/json-files.js", async () => {
|
||||
}
|
||||
await gate.waitForRelease;
|
||||
}
|
||||
await actual.writeJsonAtomic(filePath, value, options);
|
||||
await actual.writeJson(filePath, value, options);
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { z } from "zod";
|
||||
import { writeJsonAtomic } from "../../infra/json-files.js";
|
||||
import { writeJson } from "../../infra/json-files.js";
|
||||
import { safeParseJsonWithSchema } from "../../utils/zod-parse.js";
|
||||
import { acquireSessionWriteLock } from "../session-write-lock.js";
|
||||
import {
|
||||
@@ -171,7 +171,7 @@ async function readShardedEntry<T extends RegistryEntry>(
|
||||
|
||||
async function writeShardedEntry(dir: string, entry: RegistryEntryPayload): Promise<void> {
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
await writeJsonAtomic(shardedEntryFilePath(dir, entry.containerName), entry, {
|
||||
await writeJson(shardedEntryFilePath(dir, entry.containerName), entry, {
|
||||
trailingNewline: true,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { spawn } from "node:child_process";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { resolveBoundaryPath } from "../../infra/boundary-path.js";
|
||||
import { resolveRootPath } from "../../infra/boundary-path.js";
|
||||
import { parseSshTarget } from "../../infra/ssh-tunnel.js";
|
||||
import { resolvePreferredOpenClawTmpDir } from "../../infra/tmp-openclaw-dir.js";
|
||||
import { resolveUserPath } from "../../utils.js";
|
||||
@@ -349,7 +349,7 @@ async function assertSafeUploadSymlinks(localDir: string): Promise<void> {
|
||||
const entryPath = path.join(currentDir, entry.name);
|
||||
if (entry.isSymbolicLink()) {
|
||||
try {
|
||||
await resolveBoundaryPath({
|
||||
await resolveRootPath({
|
||||
absolutePath: entryPath,
|
||||
rootPath: rootDir,
|
||||
boundaryLabel: "SSH sandbox upload tree",
|
||||
|
||||
@@ -2,7 +2,7 @@ import syncFs from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import type { OptionalBootstrapFileName } from "../../config/types.agent-defaults.js";
|
||||
import { openBoundaryFile } from "../../infra/boundary-file-read.js";
|
||||
import { openRootFile } from "../../infra/boundary-file-read.js";
|
||||
import { resolveUserPath } from "../../utils.js";
|
||||
import {
|
||||
DEFAULT_AGENTS_FILENAME,
|
||||
@@ -40,7 +40,7 @@ export async function ensureSandboxWorkspace(
|
||||
await fs.access(dest);
|
||||
} catch {
|
||||
try {
|
||||
const opened = await openBoundaryFile({
|
||||
const opened = await openRootFile({
|
||||
absolutePath: src,
|
||||
rootPath: seed,
|
||||
boundaryLabel: "sandbox seed workspace",
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { replaceFileAtomic } from "../infra/replace-file.js";
|
||||
import { STREAM_ERROR_FALLBACK_TEXT } from "./stream-message-shared.js";
|
||||
|
||||
/** Placeholder for blank user messages — preserves the user turn so strict
|
||||
@@ -278,28 +279,19 @@ export async function repairSessionFileIfNeeded(params: {
|
||||
|
||||
const cleaned = `${entries.map((entry) => JSON.stringify(entry)).join("\n")}\n`;
|
||||
const backupPath = `${sessionFile}.bak-${process.pid}-${Date.now()}`;
|
||||
const tmpPath = `${sessionFile}.repair-${process.pid}-${Date.now()}.tmp`;
|
||||
try {
|
||||
const stat = await fs.stat(sessionFile).catch(() => null);
|
||||
await fs.writeFile(backupPath, content, "utf-8");
|
||||
if (stat) {
|
||||
await fs.chmod(backupPath, stat.mode);
|
||||
}
|
||||
await fs.writeFile(tmpPath, cleaned, "utf-8");
|
||||
if (stat) {
|
||||
await fs.chmod(tmpPath, stat.mode);
|
||||
}
|
||||
await fs.rename(tmpPath, sessionFile);
|
||||
await replaceFileAtomic({
|
||||
filePath: sessionFile,
|
||||
content: cleaned,
|
||||
preserveExistingMode: true,
|
||||
tempPrefix: `${path.basename(sessionFile)}.repair`,
|
||||
});
|
||||
} catch (err) {
|
||||
try {
|
||||
await fs.unlink(tmpPath);
|
||||
} catch (cleanupErr) {
|
||||
params.warn?.(
|
||||
`session file repair cleanup failed: ${cleanupErr instanceof Error ? cleanupErr.message : "unknown error"} (${path.basename(
|
||||
tmpPath,
|
||||
)})`,
|
||||
);
|
||||
}
|
||||
return {
|
||||
repaired: false,
|
||||
droppedLines,
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import fsSync from "node:fs";
|
||||
import "../infra/fs-safe-defaults.js";
|
||||
import type fsSync from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { createFileLockManager } from "../infra/file-lock-manager.js";
|
||||
import { getProcessStartTime, isPidAlive } from "../shared/pid-alive.js";
|
||||
import { resolveProcessScopedMap } from "../shared/process-scoped-map.js";
|
||||
import { SessionWriteLockTimeoutError } from "./session-write-lock-error.js";
|
||||
|
||||
type LockFilePayload = {
|
||||
@@ -16,19 +17,6 @@ function isValidLockNumber(value: unknown): value is number {
|
||||
return typeof value === "number" && Number.isInteger(value) && value >= 0;
|
||||
}
|
||||
|
||||
type HeldLock = {
|
||||
count: number;
|
||||
handle: fs.FileHandle;
|
||||
lockPath: string;
|
||||
acquiredAt: number;
|
||||
maxHoldMs: number;
|
||||
releasePromise?: Promise<void>;
|
||||
};
|
||||
|
||||
type SyncClosableFileHandle = fs.FileHandle & {
|
||||
[key: symbol]: unknown;
|
||||
};
|
||||
|
||||
export type SessionLockInspection = {
|
||||
lockPath: string;
|
||||
pid: number | null;
|
||||
@@ -43,7 +31,6 @@ export type SessionLockInspection = {
|
||||
const CLEANUP_SIGNALS = ["SIGINT", "SIGTERM", "SIGQUIT", "SIGABRT"] as const;
|
||||
type CleanupSignal = (typeof CLEANUP_SIGNALS)[number];
|
||||
const CLEANUP_STATE_KEY = Symbol.for("openclaw.sessionWriteLockCleanupState");
|
||||
const HELD_LOCKS_KEY = Symbol.for("openclaw.sessionWriteLockHeldLocks");
|
||||
const WATCHDOG_STATE_KEY = Symbol.for("openclaw.sessionWriteLockWatchdogState");
|
||||
|
||||
const DEFAULT_STALE_MS = 30 * 60 * 1000;
|
||||
@@ -73,7 +60,7 @@ type LockInspectionDetails = Pick<
|
||||
"pid" | "pidAlive" | "createdAt" | "ageMs" | "stale" | "staleReasons"
|
||||
>;
|
||||
|
||||
const HELD_LOCKS = resolveProcessScopedMap<HeldLock>(HELD_LOCKS_KEY);
|
||||
const SESSION_LOCKS = createFileLockManager("openclaw.session-write-lock");
|
||||
|
||||
export type SessionWriteLockAcquireTimeoutConfig = {
|
||||
session?: {
|
||||
@@ -151,105 +138,30 @@ export function resolveSessionLockMaxHoldFromTimeout(params: {
|
||||
return Math.min(MAX_LOCK_HOLD_MS, Math.max(minMs, timeoutMs + graceMs));
|
||||
}
|
||||
|
||||
async function releaseHeldLock(
|
||||
normalizedSessionFile: string,
|
||||
held: HeldLock,
|
||||
opts: { force?: boolean } = {},
|
||||
): Promise<boolean> {
|
||||
const current = HELD_LOCKS.get(normalizedSessionFile);
|
||||
if (current !== held) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (opts.force) {
|
||||
held.count = 0;
|
||||
} else {
|
||||
held.count -= 1;
|
||||
if (held.count > 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (held.releasePromise) {
|
||||
await held.releasePromise.catch(() => undefined);
|
||||
return true;
|
||||
}
|
||||
|
||||
HELD_LOCKS.delete(normalizedSessionFile);
|
||||
held.releasePromise = (async () => {
|
||||
try {
|
||||
await held.handle.close();
|
||||
} catch {
|
||||
// Ignore errors during cleanup - best effort.
|
||||
}
|
||||
try {
|
||||
await fs.rm(held.lockPath, { force: true });
|
||||
} catch {
|
||||
// Ignore errors during cleanup - best effort.
|
||||
}
|
||||
})();
|
||||
|
||||
try {
|
||||
await held.releasePromise;
|
||||
return true;
|
||||
} finally {
|
||||
held.releasePromise = undefined;
|
||||
if (HELD_LOCKS.size === 0) {
|
||||
stopWatchdogTimer();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Synchronously release all held locks.
|
||||
* Used during process exit when async operations aren't reliable.
|
||||
*/
|
||||
function releaseAllLocksSync(): void {
|
||||
for (const [sessionFile, held] of HELD_LOCKS) {
|
||||
closeFileHandleSyncBestEffort(held.handle);
|
||||
try {
|
||||
fsSync.rmSync(held.lockPath, { force: true });
|
||||
} catch {
|
||||
// Ignore errors during cleanup - best effort
|
||||
}
|
||||
HELD_LOCKS.delete(sessionFile);
|
||||
}
|
||||
if (HELD_LOCKS.size === 0) {
|
||||
stopWatchdogTimer();
|
||||
}
|
||||
}
|
||||
|
||||
function closeFileHandleSyncBestEffort(handle: fs.FileHandle): void {
|
||||
const syncCloseSymbol = Object.getOwnPropertySymbols(Object.getPrototypeOf(handle)).find(
|
||||
(symbol) => symbol.description === "kCloseSync",
|
||||
);
|
||||
if (syncCloseSymbol) {
|
||||
const closeSync = (handle as SyncClosableFileHandle)[syncCloseSymbol];
|
||||
if (typeof closeSync === "function") {
|
||||
try {
|
||||
closeSync.call(handle);
|
||||
return;
|
||||
} catch {
|
||||
// Fall back to async close below.
|
||||
}
|
||||
}
|
||||
}
|
||||
void handle.close().catch(() => undefined);
|
||||
SESSION_LOCKS.reset();
|
||||
stopWatchdogTimer();
|
||||
}
|
||||
|
||||
async function runLockWatchdogCheck(nowMs = Date.now()): Promise<number> {
|
||||
let released = 0;
|
||||
for (const [sessionFile, held] of HELD_LOCKS.entries()) {
|
||||
for (const held of SESSION_LOCKS.heldEntries()) {
|
||||
const maxHoldMs =
|
||||
typeof held.metadata.maxHoldMs === "number" ? held.metadata.maxHoldMs : DEFAULT_MAX_HOLD_MS;
|
||||
const heldForMs = nowMs - held.acquiredAt;
|
||||
if (heldForMs <= held.maxHoldMs) {
|
||||
if (heldForMs <= maxHoldMs) {
|
||||
continue;
|
||||
}
|
||||
|
||||
process.stderr.write(
|
||||
`[session-write-lock] releasing lock held for ${heldForMs}ms (max=${held.maxHoldMs}ms): ${held.lockPath}\n`,
|
||||
`[session-write-lock] releasing lock held for ${heldForMs}ms (max=${maxHoldMs}ms): ${held.lockPath}\n`,
|
||||
);
|
||||
|
||||
const didRelease = await releaseHeldLock(sessionFile, held, { force: true });
|
||||
const didRelease = await held.forceRelease();
|
||||
if (didRelease) {
|
||||
released += 1;
|
||||
}
|
||||
@@ -458,14 +370,14 @@ async function shouldReclaimContendedLockFile(
|
||||
|
||||
function shouldTreatAsOrphanSelfLock(params: {
|
||||
payload: LockFilePayload | null;
|
||||
normalizedSessionFile: string;
|
||||
heldByThisProcess: boolean;
|
||||
reclaimLockWithoutStarttime: boolean;
|
||||
}): boolean {
|
||||
const pid = isValidLockNumber(params.payload?.pid) ? params.payload.pid : null;
|
||||
if (pid !== process.pid) {
|
||||
return false;
|
||||
}
|
||||
if (HELD_LOCKS.has(params.normalizedSessionFile)) {
|
||||
if (params.heldByThisProcess) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -484,14 +396,14 @@ function inspectLockPayloadForSession(params: {
|
||||
payload: LockFilePayload | null;
|
||||
staleMs: number;
|
||||
nowMs: number;
|
||||
normalizedSessionFile: string;
|
||||
heldByThisProcess: boolean;
|
||||
reclaimLockWithoutStarttime: boolean;
|
||||
}): LockInspectionDetails {
|
||||
const inspected = inspectLockPayload(params.payload, params.staleMs, params.nowMs);
|
||||
if (
|
||||
!shouldTreatAsOrphanSelfLock({
|
||||
payload: params.payload,
|
||||
normalizedSessionFile: params.normalizedSessionFile,
|
||||
heldByThisProcess: params.heldByThisProcess,
|
||||
reclaimLockWithoutStarttime: params.reclaimLockWithoutStarttime,
|
||||
})
|
||||
) {
|
||||
@@ -541,13 +453,11 @@ export async function cleanStaleLockFiles(params: {
|
||||
for (const entry of lockEntries) {
|
||||
const lockPath = path.join(sessionsDir, entry.name);
|
||||
const payload = await readLockPayload(lockPath);
|
||||
const sessionFile = lockPath.slice(0, -".lock".length);
|
||||
const normalizedSessionFile = await resolveNormalizedSessionFile(sessionFile);
|
||||
const inspected = inspectLockPayloadForSession({
|
||||
payload,
|
||||
staleMs,
|
||||
nowMs,
|
||||
normalizedSessionFile,
|
||||
heldByThisProcess: false,
|
||||
reclaimLockWithoutStarttime: false,
|
||||
});
|
||||
const lockInfo: SessionLockInspection = {
|
||||
@@ -589,97 +499,46 @@ export async function acquireSessionWriteLock(params: {
|
||||
const maxHoldMs = resolvePositiveMs(params.maxHoldMs, DEFAULT_MAX_HOLD_MS);
|
||||
const sessionFile = path.resolve(params.sessionFile);
|
||||
const sessionDir = path.dirname(sessionFile);
|
||||
await fs.mkdir(sessionDir, { recursive: true });
|
||||
const normalizedSessionFile = await resolveNormalizedSessionFile(sessionFile);
|
||||
const lockPath = `${normalizedSessionFile}.lock`;
|
||||
|
||||
const held = HELD_LOCKS.get(normalizedSessionFile);
|
||||
if (allowReentrant && held) {
|
||||
held.count += 1;
|
||||
return {
|
||||
release: async () => {
|
||||
await releaseHeldLock(normalizedSessionFile, held);
|
||||
await fs.mkdir(sessionDir, { recursive: true });
|
||||
try {
|
||||
const lock = await SESSION_LOCKS.acquire(sessionFile, {
|
||||
staleMs,
|
||||
timeoutMs,
|
||||
retry: { minTimeout: 50, maxTimeout: 1000, factor: 1 },
|
||||
allowReentrant,
|
||||
metadata: { maxHoldMs },
|
||||
payload: () => {
|
||||
const createdAt = new Date().toISOString();
|
||||
const starttime = getProcessStartTime(process.pid);
|
||||
const lockPayload: LockFilePayload = { pid: process.pid, createdAt };
|
||||
if (starttime !== null) {
|
||||
lockPayload.starttime = starttime;
|
||||
}
|
||||
return lockPayload as Record<string, unknown>;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const startedAt = Date.now();
|
||||
let attempt = 0;
|
||||
while (Date.now() - startedAt < timeoutMs) {
|
||||
attempt += 1;
|
||||
let handle: fs.FileHandle | null = null;
|
||||
try {
|
||||
handle = await fs.open(lockPath, "wx");
|
||||
const createdHeld: HeldLock = {
|
||||
count: 1,
|
||||
handle,
|
||||
lockPath,
|
||||
acquiredAt: Date.now(),
|
||||
maxHoldMs,
|
||||
};
|
||||
HELD_LOCKS.set(normalizedSessionFile, createdHeld);
|
||||
const createdAt = new Date().toISOString();
|
||||
const starttime = getProcessStartTime(process.pid);
|
||||
const lockPayload: LockFilePayload = { pid: process.pid, createdAt };
|
||||
if (starttime !== null) {
|
||||
lockPayload.starttime = starttime;
|
||||
}
|
||||
await handle.writeFile(JSON.stringify(lockPayload, null, 2), "utf8");
|
||||
return {
|
||||
release: async () => {
|
||||
await releaseHeldLock(normalizedSessionFile, createdHeld);
|
||||
},
|
||||
};
|
||||
} catch (err) {
|
||||
if (handle) {
|
||||
const currentHeld = HELD_LOCKS.get(normalizedSessionFile);
|
||||
if (currentHeld?.handle === handle) {
|
||||
HELD_LOCKS.delete(normalizedSessionFile);
|
||||
if (HELD_LOCKS.size === 0) {
|
||||
stopWatchdogTimer();
|
||||
}
|
||||
}
|
||||
try {
|
||||
await handle.close();
|
||||
} catch {
|
||||
// Ignore cleanup errors on failed lock initialization.
|
||||
}
|
||||
try {
|
||||
await fs.rm(lockPath, { force: true });
|
||||
} catch {
|
||||
// Ignore cleanup errors on failed lock initialization.
|
||||
}
|
||||
}
|
||||
const code = (err as { code?: unknown }).code;
|
||||
if (code !== "EEXIST") {
|
||||
throw err;
|
||||
}
|
||||
const payload = await readLockPayload(lockPath);
|
||||
const nowMs = Date.now();
|
||||
const inspected = inspectLockPayloadForSession({
|
||||
payload,
|
||||
staleMs,
|
||||
nowMs,
|
||||
normalizedSessionFile,
|
||||
reclaimLockWithoutStarttime: true,
|
||||
});
|
||||
if (await shouldReclaimContendedLockFile(lockPath, inspected, staleMs, nowMs)) {
|
||||
await fs.rm(lockPath, { force: true });
|
||||
continue;
|
||||
}
|
||||
|
||||
const remainingMs = timeoutMs - (Date.now() - startedAt);
|
||||
if (remainingMs <= 0) {
|
||||
break;
|
||||
}
|
||||
const delay = Math.min(1000, 50 * attempt, remainingMs);
|
||||
await new Promise((r) => setTimeout(r, delay));
|
||||
shouldReclaim: async ({ payload, nowMs, heldByThisProcess }) => {
|
||||
const inspected = inspectLockPayloadForSession({
|
||||
payload: payload as LockFilePayload | null,
|
||||
staleMs,
|
||||
nowMs,
|
||||
heldByThisProcess,
|
||||
reclaimLockWithoutStarttime: true,
|
||||
});
|
||||
return await shouldReclaimContendedLockFile(lockPath, inspected, staleMs, nowMs);
|
||||
},
|
||||
});
|
||||
return { release: lock.release };
|
||||
} catch (err) {
|
||||
if ((err as { code?: unknown }).code !== "file_lock_timeout") {
|
||||
throw err;
|
||||
}
|
||||
const timeoutLockPath = (err as { lockPath?: string }).lockPath ?? lockPath;
|
||||
const payload = await readLockPayload(timeoutLockPath);
|
||||
const owner = typeof payload?.pid === "number" ? `pid=${payload.pid}` : "unknown";
|
||||
throw new SessionWriteLockTimeoutError({ timeoutMs, owner, lockPath: timeoutLockPath });
|
||||
}
|
||||
|
||||
const payload = await readLockPayload(lockPath);
|
||||
const owner = typeof payload?.pid === "number" ? `pid=${payload.pid}` : "unknown";
|
||||
throw new SessionWriteLockTimeoutError({ timeoutMs, owner, lockPath });
|
||||
}
|
||||
|
||||
export const __testing = {
|
||||
@@ -690,9 +549,7 @@ export const __testing = {
|
||||
};
|
||||
|
||||
export async function drainSessionWriteLockStateForTest(): Promise<void> {
|
||||
for (const [sessionFile, held] of Array.from(HELD_LOCKS.entries())) {
|
||||
await releaseHeldLock(sessionFile, held, { force: true }).catch(() => undefined);
|
||||
}
|
||||
await SESSION_LOCKS.drain();
|
||||
stopWatchdogTimer();
|
||||
unregisterCleanupHandlers();
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ const searchClawHubSkillsMock = vi.fn();
|
||||
const archiveCleanupMock = vi.fn();
|
||||
const withExtractedArchiveRootMock = vi.fn();
|
||||
const installPackageDirMock = vi.fn();
|
||||
const fileExistsMock = vi.fn();
|
||||
const pathExistsMock = vi.fn();
|
||||
|
||||
vi.mock("../infra/clawhub.js", () => ({
|
||||
fetchClawHubSkillDetail: fetchClawHubSkillDetailMock,
|
||||
@@ -29,8 +29,8 @@ vi.mock("../infra/install-package-dir.js", () => ({
|
||||
installPackageDir: installPackageDirMock,
|
||||
}));
|
||||
|
||||
vi.mock("../infra/archive.js", () => ({
|
||||
fileExists: fileExistsMock,
|
||||
vi.mock("../infra/fs-safe.js", () => ({
|
||||
pathExists: pathExistsMock,
|
||||
}));
|
||||
|
||||
const { installSkillFromClawHub, searchSkillsFromClawHub, updateSkillsFromClawHub } =
|
||||
@@ -46,10 +46,10 @@ describe("skills-clawhub", () => {
|
||||
archiveCleanupMock.mockReset();
|
||||
withExtractedArchiveRootMock.mockReset();
|
||||
installPackageDirMock.mockReset();
|
||||
fileExistsMock.mockReset();
|
||||
pathExistsMock.mockReset();
|
||||
|
||||
resolveClawHubBaseUrlMock.mockReturnValue("https://clawhub.ai");
|
||||
fileExistsMock.mockImplementation(async (input: string) => input.endsWith("SKILL.md"));
|
||||
pathExistsMock.mockImplementation(async (input: string) => input.endsWith("SKILL.md"));
|
||||
fetchClawHubSkillDetailMock.mockResolvedValue({
|
||||
skill: {
|
||||
slug: "agentreceipt",
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { fileExists } from "../infra/archive.js";
|
||||
import {
|
||||
downloadClawHubSkillArchive,
|
||||
fetchClawHubSkillDetail,
|
||||
@@ -10,6 +9,7 @@ import {
|
||||
type ClawHubSkillSearchResult,
|
||||
} from "../infra/clawhub.js";
|
||||
import { formatErrorMessage } from "../infra/errors.js";
|
||||
import { pathExists } from "../infra/fs-safe.js";
|
||||
import { withExtractedArchiveRoot } from "../infra/install-flow.js";
|
||||
import { installPackageDir } from "../infra/install-package-dir.js";
|
||||
import { resolveSafeInstallDir } from "../infra/install-safe-path.js";
|
||||
@@ -133,7 +133,7 @@ function resolveSkillInstallDir(workspaceDir: string, slug: string): string {
|
||||
|
||||
async function ensureSkillRoot(rootDir: string): Promise<void> {
|
||||
for (const candidate of ["SKILL.md", "skill.md", "skills.md", "SKILL.MD"]) {
|
||||
if (await fileExists(path.join(rootDir, candidate))) {
|
||||
if (await pathExists(path.join(rootDir, candidate))) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -274,7 +274,7 @@ async function performClawHubSkillInstall(
|
||||
baseUrl: params.baseUrl,
|
||||
});
|
||||
const targetDir = resolveSkillInstallDir(params.workspaceDir, params.slug);
|
||||
if (!params.force && (await fileExists(targetDir))) {
|
||||
if (!params.force && (await pathExists(targetDir))) {
|
||||
return {
|
||||
ok: false,
|
||||
error: `Skill already exists at ${targetDir}. Re-run with force/update.`,
|
||||
|
||||
@@ -6,7 +6,7 @@ import { pipeline } from "node:stream/promises";
|
||||
import type { ReadableStream as NodeReadableStream } from "node:stream/web";
|
||||
import { isWindowsDrivePath } from "../infra/archive-path.js";
|
||||
import { formatErrorMessage } from "../infra/errors.js";
|
||||
import { writeFileFromPathWithinRoot } from "../infra/fs-safe.js";
|
||||
import { root as fsRoot } from "../infra/fs-safe.js";
|
||||
import { assertCanonicalPathWithinBase } from "../infra/install-safe-path.js";
|
||||
import { fetchWithSsrFGuard } from "../infra/net/fetch-guard.js";
|
||||
import { isWithinDir } from "../infra/path-safety.js";
|
||||
@@ -29,21 +29,21 @@ function isNodeReadableStream(value: unknown): value is NodeJS.ReadableStream {
|
||||
}
|
||||
|
||||
function resolveDownloadTargetDir(entry: SkillEntry, spec: SkillInstallSpec): string {
|
||||
const safeRoot = resolveSkillToolsRootDir(entry);
|
||||
const root = resolveSkillToolsRootDir(entry);
|
||||
const raw = spec.targetDir?.trim();
|
||||
if (!raw) {
|
||||
return safeRoot;
|
||||
return root;
|
||||
}
|
||||
|
||||
// Treat non-absolute paths as relative to the per-skill tools root.
|
||||
const resolved =
|
||||
raw.startsWith("~") || path.isAbsolute(raw) || isWindowsDrivePath(raw)
|
||||
? resolveUserPath(raw)
|
||||
: path.resolve(safeRoot, raw);
|
||||
: path.resolve(root, raw);
|
||||
|
||||
if (!isWithinDir(safeRoot, resolved)) {
|
||||
if (!isWithinDir(root, resolved)) {
|
||||
throw new Error(
|
||||
`Refusing to install outside the skill tools directory. targetDir="${raw}" resolves to "${resolved}". Allowed root: "${safeRoot}".`,
|
||||
`Refusing to install outside the skill tools directory. targetDir="${raw}" resolves to "${resolved}". Allowed root: "${root}".`,
|
||||
);
|
||||
}
|
||||
return resolved;
|
||||
@@ -99,11 +99,8 @@ async function downloadFile(params: {
|
||||
? body
|
||||
: Readable.fromWeb(body as NodeReadableStream);
|
||||
await pipeline(readable, file);
|
||||
await writeFileFromPathWithinRoot({
|
||||
rootDir: params.rootDir,
|
||||
relativePath: params.relativePath,
|
||||
sourcePath: tempPath,
|
||||
});
|
||||
const root = await fsRoot(params.rootDir);
|
||||
await root.copyIn(params.relativePath, tempPath);
|
||||
const stat = await fs.promises.stat(destPath);
|
||||
return { bytes: stat.size };
|
||||
} finally {
|
||||
@@ -118,7 +115,7 @@ export async function installDownloadSpec(params: {
|
||||
timeoutMs: number;
|
||||
}): Promise<SkillInstallResult> {
|
||||
const { entry, spec, timeoutMs } = params;
|
||||
const safeRoot = resolveSkillToolsRootDir(entry);
|
||||
const root = resolveSkillToolsRootDir(entry);
|
||||
const url = spec.url?.trim();
|
||||
if (!url) {
|
||||
return {
|
||||
@@ -141,33 +138,33 @@ export async function installDownloadSpec(params: {
|
||||
filename = "download";
|
||||
}
|
||||
|
||||
let canonicalSafeRoot = "";
|
||||
let canonicalRoot = "";
|
||||
let targetDir = "";
|
||||
try {
|
||||
await ensureDir(safeRoot);
|
||||
await ensureDir(root);
|
||||
await assertCanonicalPathWithinBase({
|
||||
baseDir: safeRoot,
|
||||
candidatePath: safeRoot,
|
||||
baseDir: root,
|
||||
candidatePath: root,
|
||||
boundaryLabel: "skill tools directory",
|
||||
});
|
||||
canonicalSafeRoot = await fs.promises.realpath(safeRoot);
|
||||
canonicalRoot = await fs.promises.realpath(root);
|
||||
|
||||
const requestedTargetDir = resolveDownloadTargetDir(entry, spec);
|
||||
await ensureDir(requestedTargetDir);
|
||||
await assertCanonicalPathWithinBase({
|
||||
baseDir: safeRoot,
|
||||
baseDir: root,
|
||||
candidatePath: requestedTargetDir,
|
||||
boundaryLabel: "skill tools directory",
|
||||
});
|
||||
const targetRelativePath = path.relative(safeRoot, requestedTargetDir);
|
||||
targetDir = path.join(canonicalSafeRoot, targetRelativePath);
|
||||
const targetRelativePath = path.relative(root, requestedTargetDir);
|
||||
targetDir = path.join(canonicalRoot, targetRelativePath);
|
||||
} catch (err) {
|
||||
const message = formatErrorMessage(err);
|
||||
return { ok: false, message, stdout: "", stderr: message, code: null };
|
||||
}
|
||||
|
||||
const archivePath = path.join(targetDir, filename);
|
||||
const archiveRelativePath = path.relative(canonicalSafeRoot, archivePath);
|
||||
const archiveRelativePath = path.relative(canonicalRoot, archivePath);
|
||||
if (
|
||||
!archiveRelativePath ||
|
||||
archiveRelativePath === ".." ||
|
||||
@@ -186,7 +183,7 @@ export async function installDownloadSpec(params: {
|
||||
try {
|
||||
const result = await downloadFile({
|
||||
url,
|
||||
rootDir: canonicalSafeRoot,
|
||||
rootDir: canonicalRoot,
|
||||
relativePath: archiveRelativePath,
|
||||
timeoutMs,
|
||||
});
|
||||
@@ -220,7 +217,7 @@ export async function installDownloadSpec(params: {
|
||||
|
||||
try {
|
||||
await assertCanonicalPathWithinBase({
|
||||
baseDir: canonicalSafeRoot,
|
||||
baseDir: canonicalRoot,
|
||||
candidatePath: targetDir,
|
||||
boundaryLabel: "skill tools directory",
|
||||
});
|
||||
|
||||
@@ -22,60 +22,6 @@ vi.mock("../infra/net/fetch-guard.js", () => ({
|
||||
fetchWithSsrFGuard: (...args: unknown[]) => fetchWithSsrFGuardMock(...args),
|
||||
}));
|
||||
|
||||
// Download tests cover installer path handling; fs-safe has dedicated pinned-helper coverage.
|
||||
vi.mock("../infra/fs-pinned-write-helper.js", async () => {
|
||||
const fs = await import("node:fs/promises");
|
||||
const path = await import("node:path");
|
||||
const { pipeline } = await import("node:stream/promises");
|
||||
|
||||
type PinnedWriteParams = {
|
||||
rootPath: string;
|
||||
relativeParentPath: string;
|
||||
basename: string;
|
||||
mkdir: boolean;
|
||||
mode: number;
|
||||
input:
|
||||
| { kind: "buffer"; data: string | Buffer; encoding?: BufferEncoding }
|
||||
| { kind: "stream"; stream: NodeJS.ReadableStream };
|
||||
};
|
||||
|
||||
async function resolveParentPath(params: PinnedWriteParams): Promise<string> {
|
||||
const parentPath = params.relativeParentPath
|
||||
? path.join(params.rootPath, ...params.relativeParentPath.split("/"))
|
||||
: params.rootPath;
|
||||
if (params.mkdir) {
|
||||
await fs.mkdir(parentPath, { recursive: true });
|
||||
}
|
||||
return parentPath;
|
||||
}
|
||||
|
||||
async function writePinnedTarget(params: PinnedWriteParams, targetPath: string) {
|
||||
if (params.input.kind === "buffer") {
|
||||
await fs.writeFile(targetPath, params.input.data, {
|
||||
encoding: params.input.encoding,
|
||||
mode: params.mode,
|
||||
});
|
||||
return;
|
||||
}
|
||||
const handle = await fs.open(targetPath, "w", params.mode);
|
||||
try {
|
||||
await pipeline(params.input.stream, handle.createWriteStream());
|
||||
} finally {
|
||||
await handle.close().catch(() => undefined);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
runPinnedWriteHelper: async (params: PinnedWriteParams) => {
|
||||
const parentPath = await resolveParentPath(params);
|
||||
const targetPath = path.join(parentPath, params.basename);
|
||||
await writePinnedTarget(params, targetPath);
|
||||
const stat = await fs.stat(targetPath);
|
||||
return { dev: stat.dev, ino: stat.ino };
|
||||
},
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("./skills.js", () => ({
|
||||
hasBinary: (bin: string) => hasBinaryMock(bin),
|
||||
}));
|
||||
@@ -262,7 +208,7 @@ describe("installDownloadSpec extraction safety", () => {
|
||||
"fails closed when the lexical tools root is rebound before the final copy",
|
||||
async () => {
|
||||
const entry = buildEntry("base-rebind");
|
||||
const safeRoot = resolveSkillToolsRootDir(entry);
|
||||
const safeToolsRoot = resolveSkillToolsRootDir(entry);
|
||||
const outsideRoot = path.join(workspaceDir, "outside-root");
|
||||
await fs.mkdir(outsideRoot, { recursive: true });
|
||||
|
||||
@@ -274,9 +220,9 @@ describe("installDownloadSpec extraction safety", () => {
|
||||
body: Readable.from(
|
||||
(async function* () {
|
||||
yield Buffer.from("payload");
|
||||
const reboundRoot = `${safeRoot}-rebound`;
|
||||
await fs.rename(safeRoot, reboundRoot);
|
||||
await fs.symlink(outsideRoot, safeRoot);
|
||||
const reboundRoot = `${safeToolsRoot}-rebound`;
|
||||
await fs.rename(safeToolsRoot, reboundRoot);
|
||||
await fs.symlink(outsideRoot, safeToolsRoot);
|
||||
})(),
|
||||
),
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { openVerifiedFileSync } from "../../infra/safe-open-sync.js";
|
||||
import { openRootFileSync } from "../../infra/boundary-file-read.js";
|
||||
import { parseFrontmatter, resolveSkillInvocationPolicy } from "./frontmatter.js";
|
||||
import { createSyntheticSourceInfo, type Skill } from "./skill-contract.js";
|
||||
import type { ParsedSkillFrontmatter } from "./types.js";
|
||||
@@ -10,31 +10,22 @@ type LoadedLocalSkill = {
|
||||
frontmatter: ParsedSkillFrontmatter;
|
||||
};
|
||||
|
||||
function isPathWithinRoot(rootRealPath: string, candidatePath: string): boolean {
|
||||
const relative = path.relative(rootRealPath, candidatePath);
|
||||
return (
|
||||
relative === "" ||
|
||||
(!relative.startsWith(`..${path.sep}`) && relative !== ".." && !path.isAbsolute(relative))
|
||||
);
|
||||
}
|
||||
|
||||
function readSkillFileSync(params: {
|
||||
rootRealPath: string;
|
||||
filePath: string;
|
||||
maxBytes?: number;
|
||||
}): string | null {
|
||||
const opened = openVerifiedFileSync({
|
||||
filePath: params.filePath,
|
||||
rejectPathSymlink: true,
|
||||
const opened = openRootFileSync({
|
||||
absolutePath: params.filePath,
|
||||
rootPath: params.rootRealPath,
|
||||
rootRealPath: params.rootRealPath,
|
||||
boundaryLabel: "skill root",
|
||||
maxBytes: params.maxBytes,
|
||||
});
|
||||
if (!opened.ok) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
if (!isPathWithinRoot(params.rootRealPath, opened.path)) {
|
||||
return null;
|
||||
}
|
||||
return fs.readFileSync(opened.fd, "utf8");
|
||||
} finally {
|
||||
fs.closeSync(opened.fd);
|
||||
|
||||
@@ -2,6 +2,7 @@ import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { isAcpRuntimeSpawnAvailable } from "../../acp/runtime/availability.js";
|
||||
import type { OpenClawConfig } from "../../config/types.openclaw.js";
|
||||
import { walkDirectorySync } from "../../infra/fs-safe.js";
|
||||
import { createSubsystemLogger } from "../../logging/subsystem.js";
|
||||
import {
|
||||
normalizePluginsConfigWithResolver,
|
||||
@@ -130,15 +131,13 @@ function collectSkillTargets(dir: string, targets: Map<string, string>): void {
|
||||
return;
|
||||
}
|
||||
|
||||
let entries: fs.Dirent[];
|
||||
try {
|
||||
entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
const entries = walkDirectorySync(dir, {
|
||||
maxDepth: 1,
|
||||
symlinks: "skip",
|
||||
include: (entry) => entry.kind === "directory",
|
||||
}).entries;
|
||||
for (const entry of entries) {
|
||||
if (!entry.isDirectory()) continue;
|
||||
const childPath = path.join(dir, entry.name);
|
||||
const childPath = entry.path;
|
||||
if (!hasPublishableSkillFile({ skillDir: childPath, rootDir: dir })) continue;
|
||||
const basename = entry.name;
|
||||
const existing = targets.get(basename);
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import fs, { type Dirent } from "node:fs";
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import type { OpenClawConfig } from "../../config/types.openclaw.js";
|
||||
import { walkDirectorySync } from "../../infra/fs-safe.js";
|
||||
import { resolveOsHomeDir } from "../../infra/home-dir.js";
|
||||
import { isPathInside } from "../../infra/path-guards.js";
|
||||
import { createSubsystemLogger } from "../../logging/subsystem.js";
|
||||
@@ -181,44 +182,21 @@ function listChildDirectories(
|
||||
opts?.maxRawEntriesToScan === undefined
|
||||
? resolveRawEntryScanLimit(opts?.maxCandidateDirs)
|
||||
: Math.max(0, opts.maxRawEntriesToScan);
|
||||
try {
|
||||
const dirs: string[] = [];
|
||||
let scannedEntryCount = 0;
|
||||
let truncated = false;
|
||||
const handle = fs.opendirSync(dir);
|
||||
try {
|
||||
let entry: Dirent | null;
|
||||
while ((entry = handle.readSync()) !== null) {
|
||||
if (scannedEntryCount >= maxRawEntriesToScan) {
|
||||
truncated = true;
|
||||
break;
|
||||
}
|
||||
scannedEntryCount += 1;
|
||||
|
||||
if (entry.name.startsWith(".")) continue;
|
||||
if (entry.name === "node_modules") continue;
|
||||
const fullPath = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
dirs.push(entry.name);
|
||||
continue;
|
||||
}
|
||||
if (entry.isSymbolicLink()) {
|
||||
try {
|
||||
if (fs.statSync(fullPath).isDirectory()) {
|
||||
dirs.push(entry.name);
|
||||
}
|
||||
} catch {
|
||||
// ignore broken symlinks
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
handle.closeSync();
|
||||
}
|
||||
return { dirs, scannedEntryCount, truncated };
|
||||
} catch {
|
||||
const scan = walkDirectorySync(dir, {
|
||||
maxDepth: 1,
|
||||
maxEntries: maxRawEntriesToScan,
|
||||
symlinks: "follow",
|
||||
include: (entry) =>
|
||||
entry.kind === "directory" && !entry.name.startsWith(".") && entry.name !== "node_modules",
|
||||
});
|
||||
if (scan.scannedEntryCount === 0 && scan.entries.length === 0) {
|
||||
return { dirs: [], scannedEntryCount: 0, truncated: false };
|
||||
}
|
||||
return {
|
||||
dirs: scan.entries.map((entry) => entry.name),
|
||||
scannedEntryCount: scan.scannedEntryCount,
|
||||
truncated: scan.truncated,
|
||||
};
|
||||
}
|
||||
|
||||
function resolveRawEntryScanLimit(maxCandidateDirs: number | undefined): number {
|
||||
|
||||
@@ -2,6 +2,7 @@ import crypto from "node:crypto";
|
||||
import { promises as fs } from "node:fs";
|
||||
import path from "node:path";
|
||||
import type { OpenClawConfig } from "../config/types.openclaw.js";
|
||||
import { privateFileStore } from "../infra/private-file-store.js";
|
||||
import { normalizeOptionalString } from "../shared/string-coerce.js";
|
||||
import { resolveAgentWorkspaceDir } from "./agent-scope.js";
|
||||
|
||||
@@ -131,6 +132,7 @@ export async function materializeSubagentAttachments(params: {
|
||||
|
||||
try {
|
||||
await fs.mkdir(absDir, { recursive: true, mode: 0o700 });
|
||||
const store = privateFileStore(absDir);
|
||||
|
||||
const seen = new Set<string>();
|
||||
const files: SubagentAttachmentReceiptFile[] = [];
|
||||
@@ -192,14 +194,11 @@ export async function materializeSubagentAttachments(params: {
|
||||
}
|
||||
|
||||
const sha256 = crypto.createHash("sha256").update(buf).digest("hex");
|
||||
const outPath = path.join(absDir, name);
|
||||
writeJobs.push({ outPath, buf });
|
||||
writeJobs.push({ outPath: name, buf });
|
||||
files.push({ name, bytes, sha256 });
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
writeJobs.map(({ outPath, buf }) => fs.writeFile(outPath, buf, { mode: 0o600, flag: "wx" })),
|
||||
);
|
||||
await Promise.all(writeJobs.map(({ outPath, buf }) => store.writeText(outPath, buf)));
|
||||
|
||||
const manifest = {
|
||||
relDir,
|
||||
@@ -207,14 +206,7 @@ export async function materializeSubagentAttachments(params: {
|
||||
totalBytes,
|
||||
files,
|
||||
};
|
||||
await fs.writeFile(
|
||||
path.join(absDir, ".manifest.json"),
|
||||
JSON.stringify(manifest, null, 2) + "\n",
|
||||
{
|
||||
mode: 0o600,
|
||||
flag: "wx",
|
||||
},
|
||||
);
|
||||
await store.writeJson(".manifest.json", manifest, { trailingNewline: true });
|
||||
|
||||
return {
|
||||
status: "ok",
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
import crypto from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { Type } from "typebox";
|
||||
import { writeBase64ToFile } from "../../cli/nodes-camera.js";
|
||||
import { canvasSnapshotTempPath, parseCanvasSnapshotPayload } from "../../cli/nodes-canvas.js";
|
||||
import type { OpenClawConfig } from "../../config/types.openclaw.js";
|
||||
import { logVerbose, shouldLogVerbose } from "../../globals.js";
|
||||
import { isInboundPathAllowed } from "../../media/inbound-path-policy.js";
|
||||
import { readLocalFileFromRoots } from "../../infra/fs-safe.js";
|
||||
import { getDefaultMediaLocalRoots } from "../../media/local-roots.js";
|
||||
import { imageMimeFromFormat } from "../../media/mime.js";
|
||||
import { normalizeLowercaseStringOrEmpty } from "../../shared/string-coerce.js";
|
||||
@@ -33,22 +31,19 @@ async function readJsonlFromPath(jsonlPath: string): Promise<string> {
|
||||
if (!trimmed) {
|
||||
return "";
|
||||
}
|
||||
const resolved = path.resolve(trimmed);
|
||||
const roots = getDefaultMediaLocalRoots();
|
||||
if (!isInboundPathAllowed({ filePath: resolved, roots })) {
|
||||
const result = await readLocalFileFromRoots({
|
||||
filePath: trimmed,
|
||||
roots,
|
||||
label: "canvas jsonlPath",
|
||||
});
|
||||
if (!result) {
|
||||
if (shouldLogVerbose()) {
|
||||
logVerbose(`Blocked canvas jsonlPath outside allowed roots: ${resolved}`);
|
||||
logVerbose(`Blocked canvas jsonlPath outside allowed roots: ${trimmed}`);
|
||||
}
|
||||
throw new Error("jsonlPath outside allowed roots");
|
||||
}
|
||||
const canonical = await fs.realpath(resolved).catch(() => resolved);
|
||||
if (!isInboundPathAllowed({ filePath: canonical, roots })) {
|
||||
if (shouldLogVerbose()) {
|
||||
logVerbose(`Blocked canvas jsonlPath outside allowed roots: ${canonical}`);
|
||||
}
|
||||
throw new Error("jsonlPath outside allowed roots");
|
||||
}
|
||||
return await fs.readFile(canonical, "utf8");
|
||||
return result.buffer.toString("utf8");
|
||||
}
|
||||
|
||||
// Flattened schema: runtime validates per-action requirements.
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import fs from "node:fs/promises";
|
||||
import type {
|
||||
AgentTool,
|
||||
AgentToolResult,
|
||||
AgentToolUpdateCallback,
|
||||
} from "@mariozechner/pi-agent-core";
|
||||
import type { TSchema } from "typebox";
|
||||
import { readLocalFileSafely } from "../../infra/fs-safe.js";
|
||||
import { detectMime } from "../../media/mime.js";
|
||||
import { readSnakeCaseParamRaw } from "../../param-key.js";
|
||||
import type { ImageSanitizationLimits } from "../image-sanitization.js";
|
||||
@@ -345,7 +345,7 @@ export async function imageResultFromFile(params: {
|
||||
details?: Record<string, unknown>;
|
||||
imageSanitization?: ImageSanitizationLimits;
|
||||
}): Promise<AgentToolResult<unknown>> {
|
||||
const buf = await fs.readFile(params.path);
|
||||
const buf = (await readLocalFileSafely({ filePath: params.path })).buffer;
|
||||
const mimeType = (await detectMime({ buffer: buf.slice(0, 256) })) ?? "image/png";
|
||||
return await imageResult({
|
||||
label: params.label,
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import syncFs from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { openBoundaryFile } from "../infra/boundary-file-read.js";
|
||||
import { openRootFile } from "../infra/boundary-file-read.js";
|
||||
import { pathExists } from "../infra/fs-safe.js";
|
||||
import { replaceFileAtomic } from "../infra/replace-file.js";
|
||||
import {
|
||||
CANONICAL_ROOT_MEMORY_FILENAME,
|
||||
exactWorkspaceEntryExists,
|
||||
@@ -55,7 +57,7 @@ async function readWorkspaceFileWithGuards(params: {
|
||||
filePath: string;
|
||||
workspaceDir: string;
|
||||
}): Promise<WorkspaceGuardedReadResult> {
|
||||
const opened = await openBoundaryFile({
|
||||
const opened = await openRootFile({
|
||||
absolutePath: params.filePath,
|
||||
rootPath: params.workspaceDir,
|
||||
boundaryLabel: "workspace root",
|
||||
@@ -197,15 +199,6 @@ async function writeFileIfMissing(filePath: string, content: string): Promise<bo
|
||||
}
|
||||
}
|
||||
|
||||
async function fileExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function fileContentDiffersFromTemplate(
|
||||
filePath: string,
|
||||
template: string,
|
||||
@@ -274,7 +267,7 @@ async function reconcileWorkspaceBootstrapCompletionState(params: {
|
||||
state: WorkspaceSetupState;
|
||||
bootstrapExists?: boolean;
|
||||
}): Promise<WorkspaceBootstrapCompletionReconcileResult> {
|
||||
const bootstrapExists = params.bootstrapExists ?? (await fileExists(params.bootstrapPath));
|
||||
const bootstrapExists = params.bootstrapExists ?? (await pathExists(params.bootstrapPath));
|
||||
if (
|
||||
typeof params.state.setupCompletedAt === "string" &&
|
||||
params.state.setupCompletedAt.trim().length > 0
|
||||
@@ -384,7 +377,7 @@ export async function resolveWorkspaceBootstrapStatus(
|
||||
return "complete";
|
||||
}
|
||||
const bootstrapPath = path.join(resolvedDir, DEFAULT_BOOTSTRAP_FILENAME);
|
||||
const bootstrapExists = await fileExists(bootstrapPath);
|
||||
const bootstrapExists = await pathExists(bootstrapPath);
|
||||
if (!bootstrapExists) {
|
||||
return "complete";
|
||||
}
|
||||
@@ -416,16 +409,11 @@ async function writeWorkspaceSetupState(
|
||||
statePath: string,
|
||||
state: WorkspaceSetupState,
|
||||
): Promise<void> {
|
||||
await fs.mkdir(path.dirname(statePath), { recursive: true });
|
||||
const payload = `${JSON.stringify(state, null, 2)}\n`;
|
||||
const tmpPath = `${statePath}.tmp-${process.pid}-${Date.now().toString(36)}`;
|
||||
try {
|
||||
await fs.writeFile(tmpPath, payload, { encoding: "utf-8" });
|
||||
await fs.rename(tmpPath, statePath);
|
||||
} catch (err) {
|
||||
await fs.unlink(tmpPath).catch(() => {});
|
||||
throw err;
|
||||
}
|
||||
await replaceFileAtomic({
|
||||
filePath: statePath,
|
||||
content: `${JSON.stringify(state, null, 2)}\n`,
|
||||
tempPrefix: ".workspace-state",
|
||||
});
|
||||
}
|
||||
|
||||
async function hasGitRepo(dir: string): Promise<boolean> {
|
||||
@@ -561,7 +549,7 @@ export async function ensureAgentWorkspace(params?: {
|
||||
};
|
||||
const nowIso = () => new Date().toISOString();
|
||||
|
||||
let bootstrapExists = await fileExists(bootstrapPath);
|
||||
let bootstrapExists = await pathExists(bootstrapPath);
|
||||
if (!state.bootstrapSeededAt && bootstrapExists) {
|
||||
markState({ bootstrapSeededAt: nowIso() });
|
||||
}
|
||||
@@ -596,7 +584,7 @@ export async function ensureAgentWorkspace(params?: {
|
||||
const bootstrapTemplate = await loadTemplate(DEFAULT_BOOTSTRAP_FILENAME);
|
||||
const wroteBootstrap = await writeFileIfMissing(bootstrapPath, bootstrapTemplate);
|
||||
if (!wroteBootstrap) {
|
||||
bootstrapExists = await fileExists(bootstrapPath);
|
||||
bootstrapExists = await pathExists(bootstrapPath);
|
||||
} else {
|
||||
bootstrapExists = true;
|
||||
}
|
||||
|
||||
@@ -17,19 +17,20 @@ const childProcessMocks = vi.hoisted(() => ({
|
||||
spawn: vi.fn(),
|
||||
}));
|
||||
const fsSafeMocks = vi.hoisted(() => {
|
||||
class MockSafeOpenError extends Error {
|
||||
class MockFsSafeError extends Error {
|
||||
readonly code: string;
|
||||
|
||||
constructor(code: string, message: string) {
|
||||
super(message);
|
||||
this.name = "SafeOpenError";
|
||||
this.name = "FsSafeError";
|
||||
this.code = code;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
SafeOpenError: MockSafeOpenError,
|
||||
copyFileWithinRoot: vi.fn(),
|
||||
FsSafeError: MockFsSafeError,
|
||||
rootCopyFrom: vi.fn(),
|
||||
root: vi.fn(),
|
||||
readLocalFileSafely: vi.fn(),
|
||||
};
|
||||
});
|
||||
@@ -51,7 +52,7 @@ vi.mock("node:child_process", async () => {
|
||||
vi.mock("../infra/fs-safe.js", () => fsSafeMocks);
|
||||
vi.mock("../media/channel-inbound-roots.js", () => mediaRootMocks);
|
||||
|
||||
async function copyFileWithinRootForTest({
|
||||
async function rootCopyFromForTest({
|
||||
sourcePath,
|
||||
rootDir,
|
||||
relativePath,
|
||||
@@ -64,7 +65,7 @@ async function copyFileWithinRootForTest({
|
||||
}) {
|
||||
const sourceStat = await fs.stat(sourcePath);
|
||||
if (typeof maxBytes === "number" && sourceStat.size > maxBytes) {
|
||||
throw new fsSafeMocks.SafeOpenError(
|
||||
throw new fsSafeMocks.FsSafeError(
|
||||
"too-large",
|
||||
`file exceeds limit of ${maxBytes} bytes (got ${sourceStat.size})`,
|
||||
);
|
||||
@@ -75,7 +76,7 @@ async function copyFileWithinRootForTest({
|
||||
const destPath = path.resolve(rootReal, relativePath);
|
||||
const rootPrefix = `${rootReal}${path.sep}`;
|
||||
if (destPath !== rootReal && !destPath.startsWith(rootPrefix)) {
|
||||
throw new fsSafeMocks.SafeOpenError("outside-workspace", "file is outside workspace root");
|
||||
throw new fsSafeMocks.FsSafeError("outside-workspace", "file is outside workspace root");
|
||||
}
|
||||
|
||||
const parentDir = dirname(destPath);
|
||||
@@ -87,7 +88,7 @@ async function copyFileWithinRootForTest({
|
||||
try {
|
||||
const stat = await fs.lstat(cursor);
|
||||
if (stat.isSymbolicLink()) {
|
||||
throw new fsSafeMocks.SafeOpenError("symlink", "symlink not allowed");
|
||||
throw new fsSafeMocks.FsSafeError("symlink", "symlink not allowed");
|
||||
}
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
@@ -102,7 +103,7 @@ async function copyFileWithinRootForTest({
|
||||
try {
|
||||
const destStat = await fs.lstat(destPath);
|
||||
if (destStat.isSymbolicLink()) {
|
||||
throw new fsSafeMocks.SafeOpenError("symlink", "symlink not allowed");
|
||||
throw new fsSafeMocks.FsSafeError("symlink", "symlink not allowed");
|
||||
}
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
|
||||
@@ -117,7 +118,16 @@ beforeEach(() => {
|
||||
sandboxMocks.ensureSandboxWorkspaceForSession.mockReset();
|
||||
sandboxMocks.assertSandboxPath.mockReset().mockResolvedValue({ resolved: "", relative: "" });
|
||||
childProcessMocks.spawn.mockClear();
|
||||
fsSafeMocks.copyFileWithinRoot.mockReset().mockImplementation(copyFileWithinRootForTest);
|
||||
fsSafeMocks.rootCopyFrom.mockReset().mockImplementation(rootCopyFromForTest);
|
||||
fsSafeMocks.root.mockReset().mockImplementation(async (rootDir: string) => ({
|
||||
copyFrom: async (sourcePath: string, relativePath: string, options?: { maxBytes?: number }) =>
|
||||
await rootCopyFromForTest({
|
||||
sourcePath,
|
||||
rootDir,
|
||||
relativePath,
|
||||
maxBytes: options?.maxBytes,
|
||||
}),
|
||||
}));
|
||||
mediaRootMocks.resolveChannelRemoteInboundAttachmentRoots
|
||||
.mockReset()
|
||||
.mockReturnValue(["/Users/demo/Library/Messages/Attachments"]);
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
type SessionEntry as PiSessionEntry,
|
||||
type SessionHeader,
|
||||
} from "@mariozechner/pi-coding-agent";
|
||||
import { pathExists } from "../../infra/fs-safe.js";
|
||||
import type { ReplyPayload } from "../types.js";
|
||||
import {
|
||||
isReplyPayload,
|
||||
@@ -122,15 +123,6 @@ async function generateHtml(sessionData: SessionData): Promise<string> {
|
||||
].reduce((html, [name, value]) => replaceHtmlPlaceholder(html, name, value), template);
|
||||
}
|
||||
|
||||
async function fileExists(pathName: string): Promise<boolean> {
|
||||
try {
|
||||
await fsp.access(pathName);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function addCollisionSuffix(filePath: string, suffix: number): string {
|
||||
const ext = path.extname(filePath);
|
||||
const baseName = path.basename(filePath, ext);
|
||||
@@ -152,7 +144,6 @@ async function writeNewDefaultExportFile(filePath: string, html: string): Promis
|
||||
}
|
||||
throw new Error(`Could not find an unused export filename near ${filePath}`);
|
||||
}
|
||||
|
||||
async function readSessionDataFromTranscript(sessionFile: string): Promise<{
|
||||
header: SessionHeader | null;
|
||||
entries: PiSessionEntry[];
|
||||
@@ -183,7 +174,7 @@ export async function buildExportSessionReply(params: HandleCommandsParams): Pro
|
||||
}
|
||||
const { entry, sessionFile } = sessionTarget;
|
||||
|
||||
if (!(await fileExists(sessionFile))) {
|
||||
if (!(await pathExists(sessionFile))) {
|
||||
return { text: `❌ Session file not found: ${sessionFile}` };
|
||||
}
|
||||
|
||||
|
||||
@@ -27,6 +27,11 @@ const hoisted = await vi.hoisted(async () => {
|
||||
await actualAccess(file);
|
||||
},
|
||||
),
|
||||
statMock: vi.fn(
|
||||
async (file: fs.PathLike, actualStat: (path: fs.PathLike) => Promise<unknown>) => {
|
||||
return await actualStat(file);
|
||||
},
|
||||
),
|
||||
};
|
||||
});
|
||||
|
||||
@@ -59,6 +64,7 @@ vi.mock("node:fs/promises", async () => {
|
||||
const mockedFs = {
|
||||
...actual,
|
||||
access: (file: fs.PathLike) => hoisted.accessMock(file, actual.access),
|
||||
stat: (file: fs.PathLike) => hoisted.statMock(file, actual.stat),
|
||||
};
|
||||
return {
|
||||
...mockedFs,
|
||||
@@ -67,6 +73,7 @@ vi.mock("node:fs/promises", async () => {
|
||||
});
|
||||
|
||||
const tempDirs: string[] = [];
|
||||
const mockedSessionFile = "/tmp/target-store/session.jsonl";
|
||||
|
||||
function makeTempDir(): string {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-export-command-"));
|
||||
@@ -173,9 +180,20 @@ describe("buildExportTrajectoryReply", () => {
|
||||
await actualAccess(file);
|
||||
},
|
||||
);
|
||||
hoisted.statMock.mockImplementation(
|
||||
async (file: fs.PathLike, actualStat: (path: fs.PathLike) => Promise<unknown>) => {
|
||||
if (file.toString() === "/tmp/target-store/session.jsonl") {
|
||||
return {};
|
||||
}
|
||||
return await actualStat(file);
|
||||
},
|
||||
);
|
||||
fs.mkdirSync(path.dirname(mockedSessionFile), { recursive: true });
|
||||
fs.writeFileSync(mockedSessionFile, "{}\n");
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
fs.rmSync(mockedSessionFile, { force: true });
|
||||
for (const dir of tempDirs.splice(0)) {
|
||||
fs.rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
@@ -238,6 +256,7 @@ describe("buildExportTrajectoryReply", () => {
|
||||
|
||||
it("does not echo absolute session paths when the transcript is missing", async () => {
|
||||
const { buildExportTrajectoryReply } = await import("./commands-export-trajectory.js");
|
||||
fs.rmSync(mockedSessionFile, { force: true });
|
||||
hoisted.accessMock.mockImplementation(
|
||||
async (file: fs.PathLike, actualAccess: (path: fs.PathLike) => Promise<void>) => {
|
||||
if (file.toString() === "/tmp/target-store/session.jsonl") {
|
||||
@@ -246,6 +265,14 @@ describe("buildExportTrajectoryReply", () => {
|
||||
await actualAccess(file);
|
||||
},
|
||||
);
|
||||
hoisted.statMock.mockImplementation(
|
||||
async (file: fs.PathLike, actualStat: (path: fs.PathLike) => Promise<unknown>) => {
|
||||
if (file.toString() === "/tmp/target-store/session.jsonl") {
|
||||
throw Object.assign(new Error("missing"), { code: "ENOENT" });
|
||||
}
|
||||
return await actualStat(file);
|
||||
},
|
||||
);
|
||||
|
||||
const reply = await buildExportTrajectoryReply(makeParams());
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import fsp from "node:fs/promises";
|
||||
import { resolveSessionAgentId } from "../../agents/agent-scope.js";
|
||||
import { createExecTool } from "../../agents/bash-tools.js";
|
||||
import type { ExecToolDetails } from "../../agents/bash-tools.js";
|
||||
import { formatErrorMessage } from "../../infra/errors.js";
|
||||
import type { ExecApprovalRequest } from "../../infra/exec-approvals.js";
|
||||
import { pathExists } from "../../infra/fs-safe.js";
|
||||
import {
|
||||
exportTrajectoryForCommand,
|
||||
formatTrajectoryCommandExportSummary,
|
||||
@@ -56,15 +56,6 @@ const defaultExportTrajectoryCommandDeps: ExportTrajectoryCommandDeps = {
|
||||
deliverPrivateTrajectoryReply: deliverPrivateTrajectoryReply,
|
||||
};
|
||||
|
||||
async function fileExists(pathName: string): Promise<boolean> {
|
||||
try {
|
||||
await fsp.access(pathName);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function buildExportTrajectoryCommandReply(
|
||||
params: HandleCommandsParams,
|
||||
deps: Partial<ExportTrajectoryCommandDeps> = {},
|
||||
@@ -146,7 +137,7 @@ export async function buildExportTrajectoryReply(
|
||||
}
|
||||
const { entry, sessionFile } = sessionTarget;
|
||||
|
||||
if (!(await fileExists(sessionFile))) {
|
||||
if (!(await pathExists(sessionFile))) {
|
||||
return { text: "❌ Session file not found." };
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import { resolveAgentContextLimits } from "../../agents/agent-scope.js";
|
||||
import { resolveCronStyleNow } from "../../agents/current-time.js";
|
||||
import { resolveUserTimezone } from "../../agents/date-time.js";
|
||||
import type { OpenClawConfig } from "../../config/types.openclaw.js";
|
||||
import { openBoundaryFile } from "../../infra/boundary-file-read.js";
|
||||
import { openRootFile } from "../../infra/boundary-file-read.js";
|
||||
import { normalizeLowercaseStringOrEmpty } from "../../shared/string-coerce.js";
|
||||
|
||||
const MAX_CONTEXT_CHARS = 1800;
|
||||
@@ -78,7 +78,7 @@ export async function readPostCompactionContext(
|
||||
const agentsPath = path.join(workspaceDir, "AGENTS.md");
|
||||
|
||||
try {
|
||||
const opened = await openBoundaryFile({
|
||||
const opened = await openRootFile({
|
||||
absolutePath: agentsPath,
|
||||
rootPath: workspaceDir,
|
||||
boundaryLabel: "workspace root",
|
||||
|
||||
@@ -19,6 +19,7 @@ import {
|
||||
type SessionEntry as StoreSessionEntry,
|
||||
} from "../../config/sessions/types.js";
|
||||
import { readLatestRecentSessionUsageFromTranscriptAsync } from "../../gateway/session-utils.fs.js";
|
||||
import { readRegularFile } from "../../infra/fs-safe.js";
|
||||
|
||||
type ForkSourceTranscript = {
|
||||
cwd: string;
|
||||
@@ -169,7 +170,7 @@ function collectBranchLabels(params: {
|
||||
async function readForkSourceTranscript(
|
||||
parentSessionFile: string,
|
||||
): Promise<ForkSourceTranscript | null> {
|
||||
const raw = await fs.readFile(parentSessionFile, "utf-8");
|
||||
const raw = (await readRegularFile({ filePath: parentSessionFile })).buffer.toString("utf-8");
|
||||
const fileEntries = parseSessionEntries(raw);
|
||||
migrateSessionEntries(fileEntries);
|
||||
const header =
|
||||
@@ -281,15 +282,6 @@ async function writeBranchedSession(params: {
|
||||
return { sessionId, sessionFile };
|
||||
}
|
||||
|
||||
async function fileExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
const stat = await fs.stat(filePath);
|
||||
return stat.isFile();
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function forkSessionFromParentRuntime(params: {
|
||||
parentEntry: StoreSessionEntry;
|
||||
agentId: string;
|
||||
@@ -300,7 +292,7 @@ export async function forkSessionFromParentRuntime(params: {
|
||||
params.parentEntry,
|
||||
{ agentId: params.agentId, sessionsDir: params.sessionsDir },
|
||||
);
|
||||
if (!parentSessionFile || !(await fileExists(parentSessionFile))) {
|
||||
if (!parentSessionFile) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
|
||||
@@ -7,7 +7,7 @@ import { ensureSandboxWorkspaceForSession } from "../../agents/sandbox.js";
|
||||
import { slugifySessionKey } from "../../agents/sandbox/shared.js";
|
||||
import type { OpenClawConfig } from "../../config/types.openclaw.js";
|
||||
import { logVerbose } from "../../globals.js";
|
||||
import { copyFileWithinRoot, SafeOpenError } from "../../infra/fs-safe.js";
|
||||
import { root as fsRoot, FsSafeError } from "../../infra/fs-safe.js";
|
||||
import { normalizeScpRemoteHost, normalizeScpRemotePath } from "../../infra/scp-host.js";
|
||||
import { resolvePreferredOpenClawTmpDir } from "../../infra/tmp-openclaw-dir.js";
|
||||
import { resolveChannelRemoteInboundAttachmentRoots } from "../../media/channel-inbound-roots.js";
|
||||
@@ -107,7 +107,7 @@ export async function stageSandboxMedia(params: {
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
if (err instanceof SafeOpenError && err.code === "too-large") {
|
||||
if (err instanceof FsSafeError && err.code === "too-large") {
|
||||
logVerbose(
|
||||
`Blocking inbound media staging above ${STAGED_MEDIA_MAX_BYTES} bytes: ${source}`,
|
||||
);
|
||||
@@ -139,10 +139,8 @@ async function stageLocalFileIntoRoot(params: {
|
||||
relativeDestPath: string;
|
||||
maxBytes?: number;
|
||||
}): Promise<void> {
|
||||
await copyFileWithinRoot({
|
||||
sourcePath: params.sourcePath,
|
||||
rootDir: params.rootDir,
|
||||
relativePath: params.relativeDestPath,
|
||||
const root = await fsRoot(params.rootDir);
|
||||
await root.copyIn(params.relativeDestPath, params.sourcePath, {
|
||||
maxBytes: params.maxBytes,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { resolveUserTimezone } from "../../agents/date-time.js";
|
||||
import type { OpenClawConfig } from "../../config/config.js";
|
||||
import { openBoundaryFile } from "../../infra/boundary-file-read.js";
|
||||
import { openRootFile } from "../../infra/boundary-file-read.js";
|
||||
|
||||
const STARTUP_MEMORY_FILE_MAX_BYTES = 16_384;
|
||||
const STARTUP_MEMORY_FILE_MAX_CHARS = 1_200;
|
||||
@@ -205,7 +205,7 @@ async function readStartupMemoryFile(params: {
|
||||
maxFileBytes: number;
|
||||
}): Promise<string | null> {
|
||||
const absolutePath = path.join(params.workspaceDir, params.relativePath);
|
||||
const opened = await openBoundaryFile({
|
||||
const opened = await openRootFile({
|
||||
absolutePath,
|
||||
rootPath: params.workspaceDir,
|
||||
boundaryLabel: "workspace root",
|
||||
|
||||
49
src/canvas-host/file-resolver.test.ts
Normal file
49
src/canvas-host/file-resolver.test.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { createTrackedTempDirs } from "../test-utils/tracked-temp-dirs.js";
|
||||
import { normalizeUrlPath, resolveFileWithinRoot } from "./file-resolver.js";
|
||||
|
||||
const tempDirs = createTrackedTempDirs();
|
||||
|
||||
afterEach(async () => {
|
||||
await tempDirs.cleanup();
|
||||
});
|
||||
|
||||
describe("resolveFileWithinRoot", () => {
|
||||
it("normalizes URL paths", () => {
|
||||
expect(normalizeUrlPath("/nested/../file.txt")).toBe("/file.txt");
|
||||
expect(normalizeUrlPath("plain.txt")).toBe("/plain.txt");
|
||||
});
|
||||
|
||||
it("opens directory index files through the fs-safe root", async () => {
|
||||
const root = await tempDirs.make("openclaw-canvas-resolver-");
|
||||
await fs.mkdir(path.join(root, "docs"), { recursive: true });
|
||||
await fs.writeFile(path.join(root, "docs", "index.html"), "<h1>docs</h1>");
|
||||
|
||||
const result = await resolveFileWithinRoot(root, "/docs");
|
||||
expect(result).not.toBeNull();
|
||||
try {
|
||||
await expect(result?.handle.readFile({ encoding: "utf8" })).resolves.toBe("<h1>docs</h1>");
|
||||
} finally {
|
||||
await result?.handle.close().catch(() => {});
|
||||
}
|
||||
});
|
||||
|
||||
it("rejects traversal paths", async () => {
|
||||
const root = await tempDirs.make("openclaw-canvas-resolver-");
|
||||
|
||||
await expect(resolveFileWithinRoot(root, "/../outside.txt")).resolves.toBeNull();
|
||||
});
|
||||
|
||||
it.runIf(process.platform !== "win32")("rejects symlink entries", async () => {
|
||||
const root = await tempDirs.make("openclaw-canvas-resolver-");
|
||||
const outside = await tempDirs.make("openclaw-canvas-resolver-outside-");
|
||||
const target = path.join(outside, "outside.html");
|
||||
const link = path.join(root, "link.html");
|
||||
await fs.writeFile(target, "outside");
|
||||
await fs.symlink(target, link);
|
||||
|
||||
await expect(resolveFileWithinRoot(root, "/link.html")).resolves.toBeNull();
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,5 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { SafeOpenError, openFileWithinRoot, type SafeOpenResult } from "../infra/fs-safe.js";
|
||||
import { root as fsRoot, FsSafeError, type OpenResult } from "../infra/fs-safe.js";
|
||||
|
||||
export function normalizeUrlPath(rawPath: string): string {
|
||||
const decoded = decodeURIComponent(rawPath || "/");
|
||||
@@ -11,18 +10,19 @@ export function normalizeUrlPath(rawPath: string): string {
|
||||
export async function resolveFileWithinRoot(
|
||||
rootReal: string,
|
||||
urlPath: string,
|
||||
): Promise<SafeOpenResult | null> {
|
||||
): Promise<OpenResult | null> {
|
||||
const normalized = normalizeUrlPath(urlPath);
|
||||
const rel = normalized.replace(/^\/+/, "");
|
||||
if (rel.split("/").some((p) => p === "..")) {
|
||||
return null;
|
||||
}
|
||||
const root = await fsRoot(rootReal);
|
||||
|
||||
const tryOpen = async (relative: string) => {
|
||||
try {
|
||||
return await openFileWithinRoot({ rootDir: rootReal, relativePath: relative });
|
||||
return await root.open(relative);
|
||||
} catch (err) {
|
||||
if (err instanceof SafeOpenError) {
|
||||
if (err instanceof FsSafeError) {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
@@ -33,17 +33,19 @@ export async function resolveFileWithinRoot(
|
||||
return await tryOpen(path.posix.join(rel, "index.html"));
|
||||
}
|
||||
|
||||
const candidate = path.join(rootReal, rel);
|
||||
try {
|
||||
const st = await fs.lstat(candidate);
|
||||
if (st.isSymbolicLink()) {
|
||||
const st = await root.stat(rel);
|
||||
if (st.isSymbolicLink) {
|
||||
return null;
|
||||
}
|
||||
if (st.isDirectory()) {
|
||||
if (st.isDirectory) {
|
||||
return await tryOpen(path.posix.join(rel, "index.html"));
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
} catch (err) {
|
||||
if (err instanceof FsSafeError) {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
|
||||
return await tryOpen(rel);
|
||||
|
||||
@@ -890,7 +890,7 @@ module.exports = {
|
||||
};
|
||||
});
|
||||
vi.doMock("../../infra/boundary-file-read.js", () => ({
|
||||
openBoundaryFileSync: ({ absolutePath }: { absolutePath: string }) => ({
|
||||
openRootFileSync: ({ absolutePath }: { absolutePath: string }) => ({
|
||||
ok: true,
|
||||
path: absolutePath,
|
||||
fd: fs.openSync(absolutePath, "r"),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import fs from "node:fs";
|
||||
import { createRequire } from "node:module";
|
||||
import path from "node:path";
|
||||
import { openBoundaryFileSync } from "../../infra/boundary-file-read.js";
|
||||
import { openRootFileSync } from "../../infra/boundary-file-read.js";
|
||||
import { isJavaScriptModulePath } from "../../plugins/native-module-require.js";
|
||||
import {
|
||||
getCachedPluginModuleLoader,
|
||||
@@ -88,7 +88,7 @@ export function loadChannelPluginModule(params: {
|
||||
boundaryRootDir?: string;
|
||||
boundaryLabel?: string;
|
||||
}): unknown {
|
||||
const opened = openBoundaryFileSync({
|
||||
const opened = openRootFileSync({
|
||||
absolutePath: params.modulePath,
|
||||
rootPath: params.boundaryRootDir ?? params.rootDir,
|
||||
boundaryLabel: params.boundaryLabel ?? "plugin root",
|
||||
|
||||
@@ -27,6 +27,7 @@ import {
|
||||
type GatewayService,
|
||||
} from "../../daemon/service.js";
|
||||
import { createLowDiskSpaceWarning } from "../../infra/disk-space.js";
|
||||
import { pathExists } from "../../infra/fs-safe.js";
|
||||
import { runGlobalPackageUpdateSteps } from "../../infra/package-update-steps.js";
|
||||
import { getSelfAndAncestorPidsSync } from "../../infra/restart-stale-pids.js";
|
||||
import { nodeVersionSatisfiesEngine } from "../../infra/runtime-guard.js";
|
||||
@@ -179,15 +180,6 @@ function isTrackedPackageInstallRecord(record: PluginInstallRecord): boolean {
|
||||
);
|
||||
}
|
||||
|
||||
async function pathExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function collectMissingPluginInstallPayloads(params: {
|
||||
records: Record<string, PluginInstallRecord>;
|
||||
config?: OpenClawConfig;
|
||||
|
||||
@@ -13,6 +13,7 @@ import { resolveAuthStorePath } from "../agents/auth-profiles/paths.js";
|
||||
import { loadPersistedAuthProfileStore } from "../agents/auth-profiles/persisted.js";
|
||||
import { commitConfigWithPendingPluginInstalls } from "../cli/plugins-install-record-commit.js";
|
||||
import { logConfigUpdated } from "../config/logging.js";
|
||||
import { pathExists } from "../infra/fs-safe.js";
|
||||
import { saveJsonFile } from "../infra/json-file.js";
|
||||
import { DEFAULT_AGENT_ID, normalizeAgentId } from "../routing/session-key.js";
|
||||
import { type RuntimeEnv, writeRuntimeJson } from "../runtime.js";
|
||||
@@ -48,15 +49,6 @@ type AgentsAddOptions = {
|
||||
json?: boolean;
|
||||
};
|
||||
|
||||
async function fileExists(pathname: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.stat(pathname);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function copyPortableAuthProfiles(params: {
|
||||
destAuthPath: string;
|
||||
sourceAgentDir: string;
|
||||
@@ -291,8 +283,8 @@ export async function agentsAddCommand(
|
||||
normalizeLowercaseStringOrEmpty(path.resolve(mainAuthPath));
|
||||
if (
|
||||
!sameAuthPath &&
|
||||
(await fileExists(sourceAuthPath)) &&
|
||||
!(await fileExists(destAuthPath))
|
||||
(await pathExists(sourceAuthPath)) &&
|
||||
!(await pathExists(destAuthPath))
|
||||
) {
|
||||
const sourceStore = loadPersistedAuthProfileStore(sourceAgentDir);
|
||||
const portable = sourceStore
|
||||
|
||||
@@ -2,6 +2,7 @@ import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { resolveDefaultAgentWorkspaceDir } from "../agents/workspace.js";
|
||||
import type { OpenClawConfig } from "../config/types.openclaw.js";
|
||||
import { isPathInside } from "../infra/path-guards.js";
|
||||
import type { RuntimeEnv } from "../runtime.js";
|
||||
import { resolveHomeDir, resolveUserPath, shortenHomeInString } from "../utils.js";
|
||||
|
||||
@@ -55,8 +56,7 @@ export function buildCleanupPlan(params: {
|
||||
}
|
||||
|
||||
export function isPathWithin(child: string, parent: string): boolean {
|
||||
const relative = path.relative(parent, child);
|
||||
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
|
||||
return isPathInside(parent, child);
|
||||
}
|
||||
|
||||
function isUnsafeRemovalTarget(target: string): boolean {
|
||||
|
||||
@@ -164,13 +164,13 @@ vi.mock("../infra/json-files.js", async () => {
|
||||
writeTextAtomic: async (
|
||||
filePath: string,
|
||||
content: string,
|
||||
options?: { mode?: number; ensureDirMode?: number; appendTrailingNewline?: boolean },
|
||||
options?: { mode?: number; dirMode?: number; trailingNewline?: boolean },
|
||||
) => {
|
||||
const payload =
|
||||
options?.appendTrailingNewline && !content.endsWith("\n") ? `${content}\n` : content;
|
||||
options?.trailingNewline && !content.endsWith("\n") ? `${content}\n` : content;
|
||||
await fs.promises.mkdir(path.dirname(filePath), {
|
||||
recursive: true,
|
||||
...(typeof options?.ensureDirMode === "number" ? { mode: options.ensureDirMode } : {}),
|
||||
...(typeof options?.dirMode === "number" ? { mode: options.dirMode } : {}),
|
||||
});
|
||||
await fs.promises.writeFile(filePath, payload, {
|
||||
encoding: "utf8",
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import fsp from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import {
|
||||
resolveDefaultSessionStorePath,
|
||||
@@ -8,6 +7,7 @@ import {
|
||||
import { loadSessionStore } from "../config/sessions/store.js";
|
||||
import type { SessionEntry } from "../config/sessions/types.js";
|
||||
import { formatErrorMessage } from "../infra/errors.js";
|
||||
import { pathExists } from "../infra/fs-safe.js";
|
||||
import { resolveAgentIdFromSessionKey } from "../routing/session-key.js";
|
||||
import { type RuntimeEnv, writeRuntimeJson } from "../runtime.js";
|
||||
import {
|
||||
@@ -72,15 +72,6 @@ function resolveExportTrajectoryOptions(
|
||||
};
|
||||
}
|
||||
|
||||
async function fileExists(pathName: string): Promise<boolean> {
|
||||
try {
|
||||
await fsp.access(pathName);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function exportTrajectoryCommand(
|
||||
opts: ExportTrajectoryCommandOptions,
|
||||
runtime: RuntimeEnv,
|
||||
@@ -123,7 +114,7 @@ export async function exportTrajectoryCommand(
|
||||
runtime.exit(1);
|
||||
return;
|
||||
}
|
||||
if (!(await fileExists(sessionFile))) {
|
||||
if (!(await pathExists(sessionFile))) {
|
||||
runtime.error("Session file not found.");
|
||||
runtime.exit(1);
|
||||
return;
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { resolveAgentWorkspaceDir } from "../agents/agent-scope.js";
|
||||
import { resolveStorePath } from "../config/sessions/paths.js";
|
||||
import { readSessionStoreReadOnly } from "../config/sessions/store-read.js";
|
||||
import type { OpenClawConfig } from "../config/types.js";
|
||||
import { listGatewayAgentsBasic } from "../gateway/agent-list.js";
|
||||
import { pathExists } from "../infra/fs-safe.js";
|
||||
|
||||
export type AgentLocalStatus = {
|
||||
id: string;
|
||||
@@ -24,15 +24,6 @@ type AgentLocalStatusesResult = {
|
||||
bootstrapPendingCount: number;
|
||||
};
|
||||
|
||||
async function fileExists(p: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(p);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function getAgentLocalStatuses(
|
||||
cfg: OpenClawConfig,
|
||||
): Promise<AgentLocalStatusesResult> {
|
||||
@@ -51,7 +42,7 @@ export async function getAgentLocalStatuses(
|
||||
})();
|
||||
|
||||
const bootstrapPath = workspaceDir != null ? path.join(workspaceDir, "BOOTSTRAP.md") : null;
|
||||
const bootstrapPending = bootstrapPath != null ? await fileExists(bootstrapPath) : null;
|
||||
const bootstrapPending = bootstrapPath != null ? await pathExists(bootstrapPath) : null;
|
||||
|
||||
const sessionsPath = resolveStorePath(cfg.session?.store, { agentId });
|
||||
const store = readSessionStoreReadOnly(sessionsPath);
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import { randomBytes } from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import type { OpenClawConfig } from "../config/config.js";
|
||||
import { resolveStateDir } from "../config/paths.js";
|
||||
import { expandHomePrefix } from "../infra/home-dir.js";
|
||||
import { privateFileStore } from "../infra/private-file-store.js";
|
||||
import {
|
||||
DEFAULT_COMMITMENT_EXPIRE_AFTER_HOURS,
|
||||
DEFAULT_COMMITMENT_MAX_PER_HEARTBEAT,
|
||||
@@ -111,8 +111,9 @@ function sanitizeStoreForWrite(store: CommitmentStoreFile): CommitmentStoreFile
|
||||
async function loadCommitmentStoreInternal(storePath?: string): Promise<LoadedCommitmentStore> {
|
||||
const resolved = resolveCommitmentStorePath(storePath);
|
||||
try {
|
||||
const raw = await fs.promises.readFile(resolved, "utf-8");
|
||||
const parsed = JSON.parse(raw) as unknown;
|
||||
const parsed = await privateFileStore(path.dirname(resolved)).readJsonIfExists(
|
||||
path.basename(resolved),
|
||||
);
|
||||
if (
|
||||
!isRecord(parsed) ||
|
||||
parsed.version !== STORE_VERSION ||
|
||||
@@ -149,15 +150,10 @@ export async function saveCommitmentStore(
|
||||
store: CommitmentStoreFile,
|
||||
): Promise<void> {
|
||||
const resolved = resolveCommitmentStorePath(storePath);
|
||||
const dir = path.dirname(resolved);
|
||||
await fs.promises.mkdir(dir, { recursive: true, mode: 0o700 });
|
||||
await fs.promises.chmod(dir, 0o700).catch(() => undefined);
|
||||
const json = JSON.stringify(sanitizeStoreForWrite(store), null, 2);
|
||||
const tmp = `${resolved}.${process.pid}.${randomBytes(6).toString("hex")}.tmp`;
|
||||
await fs.promises.writeFile(tmp, json, { encoding: "utf-8", mode: 0o600 });
|
||||
await fs.promises.chmod(tmp, 0o600).catch(() => undefined);
|
||||
await fs.promises.rename(tmp, resolved);
|
||||
await fs.promises.chmod(resolved, 0o600).catch(() => undefined);
|
||||
await privateFileStore(path.dirname(resolved)).writeJson(
|
||||
path.basename(resolved),
|
||||
sanitizeStoreForWrite(store),
|
||||
);
|
||||
}
|
||||
|
||||
function generateCommitmentId(nowMs: number): string {
|
||||
|
||||
@@ -4,6 +4,7 @@ import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { resolveOpenClawPackageRootSync } from "../infra/openclaw-root.js";
|
||||
import { replaceFileAtomicSync } from "../infra/replace-file.js";
|
||||
import type { ConfigSchemaResponse } from "./schema.js";
|
||||
import { schemaHasChildren } from "./schema.shared.js";
|
||||
|
||||
@@ -597,8 +598,13 @@ function readFileIfExists(filePath: string): string | null {
|
||||
}
|
||||
|
||||
function writeFileAtomic(filePath: string, content: string): void {
|
||||
fsSync.mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
fsSync.writeFileSync(filePath, content, "utf8");
|
||||
replaceFileAtomicSync({
|
||||
filePath,
|
||||
content,
|
||||
dirMode: 0o755,
|
||||
mode: 0o644,
|
||||
tempPrefix: path.basename(filePath),
|
||||
});
|
||||
}
|
||||
|
||||
function sha256(content: string): string {
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import JSON5 from "json5";
|
||||
import { canUseBoundaryFileOpen, openBoundaryFileSync } from "../infra/boundary-file-read.js";
|
||||
import { canUseRootFileOpen, openRootFileSync } from "../infra/boundary-file-read.js";
|
||||
import { isPathInside } from "../security/scan-paths.js";
|
||||
import { isPlainObject } from "../utils.js";
|
||||
import { isBlockedObjectKey } from "./prototype-keys.js";
|
||||
@@ -359,11 +359,11 @@ function isNotFoundError(error: unknown): boolean {
|
||||
export function readConfigIncludeFileWithGuards(params: IncludeFileReadParams): string {
|
||||
const ioFs = params.ioFs ?? fs;
|
||||
const maxBytes = params.maxBytes ?? MAX_INCLUDE_FILE_BYTES;
|
||||
if (!canUseBoundaryFileOpen(ioFs)) {
|
||||
if (!canUseRootFileOpen(ioFs)) {
|
||||
return ioFs.readFileSync(params.resolvedPath, "utf-8");
|
||||
}
|
||||
|
||||
const opened = openBoundaryFileSync({
|
||||
const opened = openRootFileSync({
|
||||
absolutePath: params.resolvedPath,
|
||||
rootPath: params.rootRealDir,
|
||||
rootRealPath: params.rootRealDir,
|
||||
|
||||
@@ -8,6 +8,7 @@ import { ensureOwnerDisplaySecret } from "../agents/owner-display.js";
|
||||
import { loadDotEnv } from "../infra/dotenv.js";
|
||||
import { formatErrorMessage } from "../infra/errors.js";
|
||||
import { resolveRequiredHomeDir } from "../infra/home-dir.js";
|
||||
import { replaceFileAtomic, replaceFileAtomicSync } from "../infra/replace-file.js";
|
||||
import {
|
||||
loadShellEnvFallback,
|
||||
resolveShellEnvFallbackTimeoutMs,
|
||||
@@ -1313,38 +1314,15 @@ export function createConfigIO(
|
||||
}
|
||||
|
||||
function replaceConfigFileSync(raw: string): void {
|
||||
const dir = path.dirname(configPath);
|
||||
deps.fs.mkdirSync(dir, { recursive: true, mode: 0o700 });
|
||||
const tmp = path.join(
|
||||
dir,
|
||||
`${path.basename(configPath)}.${process.pid}.${crypto.randomUUID()}.tmp`,
|
||||
);
|
||||
try {
|
||||
deps.fs.writeFileSync(tmp, raw, {
|
||||
encoding: "utf-8",
|
||||
mode: 0o600,
|
||||
});
|
||||
try {
|
||||
deps.fs.renameSync(tmp, configPath);
|
||||
} catch (err) {
|
||||
const code = (err as NodeJS.ErrnoException)?.code;
|
||||
if (code !== "EPERM" && code !== "EEXIST") {
|
||||
throw err;
|
||||
}
|
||||
deps.fs.copyFileSync(tmp, configPath);
|
||||
deps.fs.chmodSync(configPath, 0o600);
|
||||
deps.fs.unlinkSync(tmp);
|
||||
}
|
||||
} catch (err) {
|
||||
try {
|
||||
deps.fs.unlinkSync(tmp);
|
||||
} catch (cleanupErr) {
|
||||
if ((cleanupErr as NodeJS.ErrnoException)?.code !== "ENOENT") {
|
||||
deps.logger.warn(`Failed to clean temporary config file ${tmp}: ${String(cleanupErr)}`);
|
||||
}
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
replaceFileAtomicSync({
|
||||
filePath: configPath,
|
||||
content: raw,
|
||||
dirMode: 0o700,
|
||||
mode: 0o600,
|
||||
tempPrefix: path.basename(configPath),
|
||||
copyFallbackOnPermissionError: true,
|
||||
fileSystem: deps.fs,
|
||||
});
|
||||
}
|
||||
|
||||
function migrateAndStripShippedPluginInstallConfigRecords(
|
||||
@@ -2208,57 +2186,29 @@ export function createConfigIO(
|
||||
throw err;
|
||||
}
|
||||
|
||||
const tmp = path.join(
|
||||
dir,
|
||||
`${path.basename(configPath)}.${process.pid}.${crypto.randomUUID()}.tmp`,
|
||||
);
|
||||
|
||||
const pluginInstallConfigMigration =
|
||||
ensureShippedPluginInstallConfigRecordsMigratedForWrite(snapshot);
|
||||
let configCommitted = false;
|
||||
try {
|
||||
await deps.fs.promises.writeFile(tmp, json, {
|
||||
encoding: "utf-8",
|
||||
const result = await replaceFileAtomic({
|
||||
filePath: configPath,
|
||||
content: json,
|
||||
dirMode: 0o700,
|
||||
mode: 0o600,
|
||||
tempPrefix: path.basename(configPath),
|
||||
copyFallbackOnPermissionError: true,
|
||||
fileSystem: deps.fs,
|
||||
beforeRename: async () => {
|
||||
if (deps.fs.existsSync(configPath)) {
|
||||
await maintainConfigBackups(configPath, deps.fs.promises);
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
if (deps.fs.existsSync(configPath)) {
|
||||
await maintainConfigBackups(configPath, deps.fs.promises);
|
||||
}
|
||||
|
||||
try {
|
||||
await deps.fs.promises.rename(tmp, configPath);
|
||||
} catch (err) {
|
||||
const code = (err as { code?: string }).code;
|
||||
// Windows doesn't reliably support atomic replace via rename when dest exists.
|
||||
if (code === "EPERM" || code === "EEXIST") {
|
||||
await deps.fs.promises.copyFile(tmp, configPath);
|
||||
await deps.fs.promises.chmod(configPath, 0o600).catch(() => {
|
||||
// best-effort
|
||||
});
|
||||
await deps.fs.promises.unlink(tmp).catch(() => {
|
||||
// best-effort
|
||||
});
|
||||
configCommitted = true;
|
||||
logConfigOverwrite();
|
||||
logConfigWriteAnomalies();
|
||||
await appendWriteAudit(
|
||||
"copy-fallback",
|
||||
undefined,
|
||||
await deps.fs.promises.stat(configPath).catch(() => null),
|
||||
);
|
||||
return { persistedHash: nextHash, persistedConfig: stampedOutputConfig };
|
||||
}
|
||||
await deps.fs.promises.unlink(tmp).catch(() => {
|
||||
// best-effort
|
||||
});
|
||||
throw err;
|
||||
}
|
||||
configCommitted = true;
|
||||
logConfigOverwrite();
|
||||
logConfigWriteAnomalies();
|
||||
await appendWriteAudit(
|
||||
"rename",
|
||||
result.method,
|
||||
undefined,
|
||||
await deps.fs.promises.stat(configPath).catch(() => null),
|
||||
);
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import crypto from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { isDeepStrictEqual } from "node:util";
|
||||
import { formatErrorMessage } from "../infra/errors.js";
|
||||
import { replaceFileAtomic } from "../infra/replace-file.js";
|
||||
import { isPathInside } from "../security/scan-paths.js";
|
||||
import { isRecord } from "../utils.js";
|
||||
import { maintainConfigBackups } from "./backup-rotation.js";
|
||||
@@ -102,31 +102,19 @@ function getSingleTopLevelIncludeTarget(params: {
|
||||
}
|
||||
|
||||
async function writeJsonFileAtomic(filePath: string, value: unknown): Promise<void> {
|
||||
const dir = path.dirname(filePath);
|
||||
const tmp = path.join(
|
||||
dir,
|
||||
`${path.basename(filePath)}.${process.pid}.${crypto.randomUUID()}.tmp`,
|
||||
);
|
||||
try {
|
||||
await fs.mkdir(dir, { recursive: true });
|
||||
await fs.writeFile(tmp, `${JSON.stringify(value, null, 2)}\n`, {
|
||||
encoding: "utf-8",
|
||||
mode: 0o600,
|
||||
});
|
||||
await fs.access(filePath).then(
|
||||
async () => await maintainConfigBackups(filePath, fs),
|
||||
() => undefined,
|
||||
);
|
||||
await fs.rename(tmp, filePath);
|
||||
await fs.chmod(filePath, 0o600).catch(() => {
|
||||
// best-effort
|
||||
});
|
||||
} catch (err) {
|
||||
await fs.unlink(tmp).catch(() => {
|
||||
// best-effort
|
||||
});
|
||||
throw err;
|
||||
}
|
||||
await replaceFileAtomic({
|
||||
filePath,
|
||||
content: `${JSON.stringify(value, null, 2)}\n`,
|
||||
dirMode: 0o700,
|
||||
mode: 0o600,
|
||||
tempPrefix: path.basename(filePath),
|
||||
beforeRename: async () => {
|
||||
await fs.access(filePath).then(
|
||||
async () => await maintainConfigBackups(filePath, fs),
|
||||
() => undefined,
|
||||
);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
async function tryWriteSingleTopLevelIncludeMutation(params: {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import path from "node:path";
|
||||
import { resolveAgentWorkspaceDir, resolveDefaultAgentId } from "../agents/agent-scope.js";
|
||||
import { CHANNEL_IDS, normalizeChatChannelId } from "../channels/ids.js";
|
||||
import { isPathInside } from "../infra/path-guards.js";
|
||||
import { planManifestModelCatalogSuppressions } from "../model-catalog/index.js";
|
||||
import { withBundledPluginAllowlistCompat } from "../plugins/bundled-compat.js";
|
||||
import {
|
||||
@@ -1419,8 +1420,8 @@ function validateConfigObjectWithPluginsBase(
|
||||
}
|
||||
if (
|
||||
sourcePath === resolvedLoadPath ||
|
||||
sourcePath.startsWith(`${resolvedLoadPath}${path.sep}`) ||
|
||||
resolvedLoadPath.startsWith(`${sourcePath}${path.sep}`)
|
||||
isPathInside(resolvedLoadPath, sourcePath) ||
|
||||
isPathInside(sourcePath, resolvedLoadPath)
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { resolveStateDir } from "../config/paths.js";
|
||||
import { appendRegularFile } from "../infra/fs-safe.js";
|
||||
|
||||
type CrestodianAuditEntry = {
|
||||
timestamp: string;
|
||||
@@ -29,9 +30,10 @@ export async function appendCrestodianAuditEntry(
|
||||
timestamp: new Date().toISOString(),
|
||||
...entry,
|
||||
} satisfies CrestodianAuditEntry);
|
||||
await fs.appendFile(auditPath, `${line}\n`, { encoding: "utf8", mode: 0o600 });
|
||||
await fs.chmod(auditPath, 0o600).catch(() => {
|
||||
// Best-effort on platforms/filesystems without POSIX modes.
|
||||
await appendRegularFile({
|
||||
filePath: auditPath,
|
||||
content: `${line}\n`,
|
||||
rejectSymlinkParents: true,
|
||||
});
|
||||
return auditPath;
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { randomBytes } from "node:crypto";
|
||||
import fsSync from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { parseByteSize } from "../cli/parse-bytes.js";
|
||||
import type { CronConfig } from "../config/types.cron.js";
|
||||
import { appendRegularFile, isPathInside, pathExists, root as fsRoot } from "../infra/fs-safe.js";
|
||||
import { privateFileStore } from "../infra/private-file-store.js";
|
||||
import {
|
||||
normalizeLowercaseStringOrEmpty,
|
||||
normalizeOptionalString,
|
||||
@@ -84,7 +85,7 @@ export function resolveCronRunLogPath(params: { storePath: string; jobId: string
|
||||
const runsDir = path.resolve(dir, "runs");
|
||||
const safeJobId = assertSafeCronRunLogJobId(params.jobId);
|
||||
const resolvedPath = path.resolve(runsDir, `${safeJobId}.jsonl`);
|
||||
if (!resolvedPath.startsWith(`${runsDir}${path.sep}`)) {
|
||||
if (!isPathInside(runsDir, resolvedPath)) {
|
||||
throw new Error("invalid cron run log job id");
|
||||
}
|
||||
return resolvedPath;
|
||||
@@ -147,11 +148,10 @@ async function pruneIfNeeded(filePath: string, opts: { maxBytes: number; keepLin
|
||||
.map((l) => l.trim())
|
||||
.filter(Boolean);
|
||||
const kept = lines.slice(Math.max(0, lines.length - opts.keepLines));
|
||||
const tmp = `${filePath}.${process.pid}.${randomBytes(8).toString("hex")}.tmp`;
|
||||
await fs.writeFile(tmp, `${kept.join("\n")}\n`, { encoding: "utf-8", mode: 0o600 });
|
||||
await setSecureFileMode(tmp);
|
||||
await fs.rename(tmp, filePath);
|
||||
await setSecureFileMode(filePath);
|
||||
await privateFileStore(path.dirname(filePath)).writeText(
|
||||
path.basename(filePath),
|
||||
`${kept.join("\n")}\n`,
|
||||
);
|
||||
}
|
||||
|
||||
export async function appendCronRunLog(
|
||||
@@ -167,9 +167,10 @@ export async function appendCronRunLog(
|
||||
const runDir = path.dirname(resolved);
|
||||
await fs.mkdir(runDir, { recursive: true, mode: 0o700 });
|
||||
await fs.chmod(runDir, 0o700).catch(() => undefined);
|
||||
await fs.appendFile(resolved, `${JSON.stringify(entry)}\n`, {
|
||||
encoding: "utf-8",
|
||||
mode: 0o600,
|
||||
await appendRegularFile({
|
||||
filePath: resolved,
|
||||
content: `${JSON.stringify(entry)}\n`,
|
||||
rejectSymlinkParents: true,
|
||||
});
|
||||
await setSecureFileMode(resolved);
|
||||
await pruneIfNeeded(resolved, {
|
||||
@@ -447,10 +448,31 @@ export async function readCronRunLogEntriesPageAll(
|
||||
const query = normalizeLowercaseStringOrEmpty(opts.query);
|
||||
const sortDir: CronRunLogSortDir = opts.sortDir === "asc" ? "asc" : "desc";
|
||||
const runsDir = path.resolve(path.dirname(path.resolve(opts.storePath)), "runs");
|
||||
const files = await fs.readdir(runsDir, { withFileTypes: true }).catch(() => []);
|
||||
if (!(await pathExists(runsDir))) {
|
||||
return {
|
||||
entries: [],
|
||||
total: 0,
|
||||
offset: 0,
|
||||
limit,
|
||||
hasMore: false,
|
||||
nextOffset: null,
|
||||
};
|
||||
}
|
||||
const runsRoot = await fsRoot(runsDir).catch(() => null);
|
||||
if (!runsRoot) {
|
||||
return {
|
||||
entries: [],
|
||||
total: 0,
|
||||
offset: 0,
|
||||
limit,
|
||||
hasMore: false,
|
||||
nextOffset: null,
|
||||
};
|
||||
}
|
||||
const files = await runsRoot.list(".", { withFileTypes: true }).catch(() => []);
|
||||
const jsonlFiles = files
|
||||
.filter((entry) => entry.isFile() && entry.name.endsWith(".jsonl"))
|
||||
.map((entry) => path.join(runsDir, entry.name));
|
||||
.filter((entry) => entry.isFile && entry.name.endsWith(".jsonl"))
|
||||
.map((entry) => entry.name);
|
||||
if (jsonlFiles.length === 0) {
|
||||
return {
|
||||
entries: [],
|
||||
@@ -461,10 +483,10 @@ export async function readCronRunLogEntriesPageAll(
|
||||
nextOffset: null,
|
||||
};
|
||||
}
|
||||
await Promise.all(jsonlFiles.map((f) => drainPendingWrite(f)));
|
||||
await Promise.all(jsonlFiles.map((fileName) => drainPendingWrite(path.join(runsDir, fileName))));
|
||||
const chunks = await Promise.all(
|
||||
jsonlFiles.map(async (filePath) => {
|
||||
const raw = await fs.readFile(filePath, "utf-8").catch(() => "");
|
||||
jsonlFiles.map(async (fileName) => {
|
||||
const raw = await runsRoot.readText(fileName).catch(() => "");
|
||||
return parseAllRunLogEntries(raw);
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { randomBytes } from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { expandHomePrefix } from "../infra/home-dir.js";
|
||||
import { replaceFileAtomic } from "../infra/replace-file.js";
|
||||
import { resolveConfigDir } from "../utils.js";
|
||||
import { parseJsonWithJson5Fallback } from "../utils/parse-json-compat.js";
|
||||
import { tryCronScheduleIdentity } from "./schedule-identity.js";
|
||||
@@ -329,13 +329,15 @@ async function setSecureFileMode(filePath: string): Promise<void> {
|
||||
}
|
||||
|
||||
async function atomicWrite(filePath: string, content: string, dirMode = 0o700): Promise<void> {
|
||||
const dir = path.dirname(filePath);
|
||||
await fs.promises.mkdir(dir, { recursive: true, mode: dirMode });
|
||||
await fs.promises.chmod(dir, dirMode).catch(() => undefined);
|
||||
const tmp = `${filePath}.${process.pid}.${randomBytes(8).toString("hex")}.tmp`;
|
||||
await fs.promises.writeFile(tmp, content, { encoding: "utf-8", mode: 0o600 });
|
||||
await renameWithRetry(tmp, filePath);
|
||||
await setSecureFileMode(filePath);
|
||||
await replaceFileAtomic({
|
||||
filePath,
|
||||
content,
|
||||
dirMode,
|
||||
mode: 0o600,
|
||||
tempPrefix: ".openclaw-cron",
|
||||
renameMaxRetries: 3,
|
||||
copyFallbackOnPermissionError: true,
|
||||
});
|
||||
}
|
||||
|
||||
async function serializedFileNeedsWrite(
|
||||
@@ -409,28 +411,3 @@ export async function saveCronStore(
|
||||
}
|
||||
updatedCache.needsSplitMigration = stateOnly && migrating;
|
||||
}
|
||||
|
||||
const RENAME_MAX_RETRIES = 3;
|
||||
const RENAME_BASE_DELAY_MS = 50;
|
||||
|
||||
async function renameWithRetry(src: string, dest: string): Promise<void> {
|
||||
for (let attempt = 0; attempt <= RENAME_MAX_RETRIES; attempt++) {
|
||||
try {
|
||||
await fs.promises.rename(src, dest);
|
||||
return;
|
||||
} catch (err) {
|
||||
const code = (err as { code?: string }).code;
|
||||
if (code === "EBUSY" && attempt < RENAME_MAX_RETRIES) {
|
||||
await new Promise((resolve) => setTimeout(resolve, RENAME_BASE_DELAY_MS * 2 ** attempt));
|
||||
continue;
|
||||
}
|
||||
// Windows doesn't reliably support atomic replace via rename when dest exists.
|
||||
if (code === "EPERM" || code === "EEXIST") {
|
||||
await fs.promises.copyFile(src, dest);
|
||||
await fs.promises.unlink(src).catch(() => {});
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { pathExists } from "../infra/fs-safe.js";
|
||||
import { readPackageName, readPackageVersion } from "../infra/package-json.js";
|
||||
import type { GatewayServiceCommandConfig } from "./service-types.js";
|
||||
|
||||
@@ -64,15 +65,6 @@ async function tryRealpath(value: string | undefined): Promise<string | undefine
|
||||
}
|
||||
}
|
||||
|
||||
async function pathExists(candidate: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(candidate);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function isSourceCheckoutRoot(candidate: string): Promise<boolean> {
|
||||
const hasRepoMarker =
|
||||
(await pathExists(path.join(candidate, ".git"))) ||
|
||||
|
||||
@@ -3,6 +3,7 @@ import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { CANVAS_HOST_PATH } from "../canvas-host/a2ui.js";
|
||||
import { resolveStateDir } from "../config/paths.js";
|
||||
import { root as fsRoot, sanitizeUntrustedFileName } from "../infra/fs-safe.js";
|
||||
import { resolveUserPath } from "../utils.js";
|
||||
|
||||
type CanvasDocumentKind = "html_bundle" | "url_embed" | "document" | "image" | "video_asset";
|
||||
@@ -74,12 +75,27 @@ function escapeHtml(value: string): string {
|
||||
function normalizeLogicalPath(value: string): string {
|
||||
const normalized = value.replaceAll("\\", "/").replace(/^\/+/, "");
|
||||
const parts = normalized.split("/").filter(Boolean);
|
||||
if (parts.length === 0 || parts.some((part) => part === "." || part === "..")) {
|
||||
if (
|
||||
parts.length === 0 ||
|
||||
parts.some(
|
||||
(part) => part === "." || part === ".." || part.includes(":") || hasControlCharacter(part),
|
||||
)
|
||||
) {
|
||||
throw new Error("canvas document logicalPath invalid");
|
||||
}
|
||||
return parts.join("/");
|
||||
}
|
||||
|
||||
function hasControlCharacter(value: string): boolean {
|
||||
for (const char of value) {
|
||||
const code = char.charCodeAt(0);
|
||||
if (code < 0x20 || code === 0x7f) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function canvasDocumentId(): string {
|
||||
return `cv_${randomUUID().replaceAll("-", "")}`;
|
||||
}
|
||||
@@ -172,16 +188,17 @@ export function resolveCanvasHttpPathToLocalPath(
|
||||
}
|
||||
}
|
||||
|
||||
async function writeManifest(rootDir: string, manifest: CanvasDocumentManifest): Promise<void> {
|
||||
await fs.writeFile(
|
||||
path.join(rootDir, "manifest.json"),
|
||||
`${JSON.stringify(manifest, null, 2)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
type CanvasDocumentRoot = Awaited<ReturnType<typeof fsRoot>>;
|
||||
|
||||
async function writeManifest(
|
||||
root: CanvasDocumentRoot,
|
||||
manifest: CanvasDocumentManifest,
|
||||
): Promise<void> {
|
||||
await root.writeJson("manifest.json", manifest, { space: 2 });
|
||||
}
|
||||
|
||||
async function copyAssets(
|
||||
rootDir: string,
|
||||
root: CanvasDocumentRoot,
|
||||
assets: CanvasDocumentAsset[] | undefined,
|
||||
workspaceDir: string,
|
||||
): Promise<CanvasDocumentManifest["assets"]> {
|
||||
@@ -193,9 +210,7 @@ async function copyAssets(
|
||||
: path.isAbsolute(asset.sourcePath)
|
||||
? path.resolve(asset.sourcePath)
|
||||
: path.resolve(workspaceDir, asset.sourcePath);
|
||||
const destination = path.join(rootDir, logicalPath);
|
||||
await fs.mkdir(path.dirname(destination), { recursive: true });
|
||||
await fs.copyFile(sourcePath, destination);
|
||||
await root.copyIn(logicalPath, sourcePath);
|
||||
copied.push({
|
||||
logicalPath,
|
||||
...(asset.contentType ? { contentType: asset.contentType } : {}),
|
||||
@@ -206,6 +221,7 @@ async function copyAssets(
|
||||
|
||||
async function materializeEntrypoint(
|
||||
rootDir: string,
|
||||
root: CanvasDocumentRoot,
|
||||
input: CanvasDocumentCreateInput,
|
||||
workspaceDir: string,
|
||||
): Promise<Pick<CanvasDocumentManifest, "entryUrl" | "localEntrypoint" | "externalUrl">> {
|
||||
@@ -215,7 +231,7 @@ async function materializeEntrypoint(
|
||||
}
|
||||
if (entrypoint.type === "html") {
|
||||
const fileName = "index.html";
|
||||
await fs.writeFile(path.join(rootDir, fileName), entrypoint.value, "utf8");
|
||||
await root.write(fileName, entrypoint.value);
|
||||
return {
|
||||
localEntrypoint: fileName,
|
||||
entryUrl: buildCanvasDocumentEntryUrl(path.basename(rootDir), fileName),
|
||||
@@ -224,7 +240,7 @@ async function materializeEntrypoint(
|
||||
if (entrypoint.type === "url") {
|
||||
if (input.kind === "document" && isPdfPathLike(entrypoint.value)) {
|
||||
const fileName = "index.html";
|
||||
await fs.writeFile(path.join(rootDir, fileName), buildPdfWrapper(entrypoint.value), "utf8");
|
||||
await root.write(fileName, buildPdfWrapper(entrypoint.value));
|
||||
return {
|
||||
localEntrypoint: fileName,
|
||||
externalUrl: entrypoint.value,
|
||||
@@ -244,23 +260,23 @@ async function materializeEntrypoint(
|
||||
: path.resolve(workspaceDir, entrypoint.value);
|
||||
|
||||
if (input.kind === "image" || input.kind === "video_asset") {
|
||||
const copiedName = path.basename(resolvedPath);
|
||||
await fs.copyFile(resolvedPath, path.join(rootDir, copiedName));
|
||||
const copiedName = sanitizeUntrustedFileName(path.basename(resolvedPath), "asset");
|
||||
await root.copyIn(copiedName, resolvedPath);
|
||||
const wrapper =
|
||||
input.kind === "image"
|
||||
? `<!doctype html><html><body style="margin:0;background:#0f172a;display:flex;align-items:center;justify-content:center;"><img src="${escapeHtml(copiedName)}" style="max-width:100%;max-height:100vh;object-fit:contain;" /></body></html>`
|
||||
: `<!doctype html><html><body style="margin:0;background:#0f172a;"><video src="${escapeHtml(copiedName)}" controls autoplay style="width:100%;height:100vh;object-fit:contain;background:#000;"></video></body></html>`;
|
||||
await fs.writeFile(path.join(rootDir, "index.html"), wrapper, "utf8");
|
||||
await root.write("index.html", wrapper);
|
||||
return {
|
||||
localEntrypoint: "index.html",
|
||||
entryUrl: buildCanvasDocumentEntryUrl(path.basename(rootDir), "index.html"),
|
||||
};
|
||||
}
|
||||
|
||||
const fileName = path.basename(resolvedPath);
|
||||
await fs.copyFile(resolvedPath, path.join(rootDir, fileName));
|
||||
const fileName = sanitizeUntrustedFileName(path.basename(resolvedPath), "document");
|
||||
await root.copyIn(fileName, resolvedPath);
|
||||
if (input.kind === "document" && isPdfPathLike(fileName)) {
|
||||
await fs.writeFile(path.join(rootDir, "index.html"), buildPdfWrapper(fileName), "utf8");
|
||||
await root.write("index.html", buildPdfWrapper(fileName));
|
||||
return {
|
||||
localEntrypoint: "index.html",
|
||||
entryUrl: buildCanvasDocumentEntryUrl(path.basename(rootDir), "index.html"),
|
||||
@@ -284,8 +300,9 @@ export async function createCanvasDocument(
|
||||
});
|
||||
await fs.rm(rootDir, { recursive: true, force: true }).catch(() => undefined);
|
||||
await fs.mkdir(rootDir, { recursive: true });
|
||||
const assets = await copyAssets(rootDir, input.assets, workspaceDir);
|
||||
const entry = await materializeEntrypoint(rootDir, input, workspaceDir);
|
||||
const root = await fsRoot(rootDir);
|
||||
const assets = await copyAssets(root, input.assets, workspaceDir);
|
||||
const entry = await materializeEntrypoint(rootDir, root, input, workspaceDir);
|
||||
const manifest: CanvasDocumentManifest = {
|
||||
id,
|
||||
kind: input.kind,
|
||||
@@ -300,7 +317,7 @@ export async function createCanvasDocument(
|
||||
...(entry.externalUrl ? { externalUrl: entry.externalUrl } : {}),
|
||||
assets,
|
||||
};
|
||||
await writeManifest(rootDir, manifest);
|
||||
await writeManifest(root, manifest);
|
||||
return manifest;
|
||||
}
|
||||
|
||||
|
||||
@@ -4,17 +4,16 @@ import type { IncomingMessage, ServerResponse } from "node:http";
|
||||
import path from "node:path";
|
||||
import { resolveAgentAvatar, resolvePublicAgentAvatarSource } from "../agents/identity-avatar.js";
|
||||
import type { OpenClawConfig } from "../config/types.openclaw.js";
|
||||
import { matchBoundaryFileOpenFailure, openBoundaryFileSync } from "../infra/boundary-file-read.js";
|
||||
import { matchRootFileOpenFailure, openRootFileSync } from "../infra/boundary-file-read.js";
|
||||
import {
|
||||
isPackageProvenControlUiRootSync,
|
||||
resolveControlUiRootSync,
|
||||
} from "../infra/control-ui-assets.js";
|
||||
import { listDevicePairing, verifyDeviceToken } from "../infra/device-pairing.js";
|
||||
import { openLocalFileSafely, SafeOpenError } from "../infra/fs-safe.js";
|
||||
import { openLocalFileSafely, FsSafeError, readSecureFile } from "../infra/fs-safe.js";
|
||||
import { safeFileURLToPath } from "../infra/local-file-access.js";
|
||||
import { verifyPairingToken } from "../infra/pairing-token.js";
|
||||
import { isWithinDir } from "../infra/path-safety.js";
|
||||
import { openVerifiedFileSync } from "../infra/safe-open-sync.js";
|
||||
import { assertLocalMediaAllowed, getDefaultLocalRoots } from "../media/local-media-access.js";
|
||||
import { getAgentScopedMediaLocalRoots } from "../media/local-roots.js";
|
||||
import { resolveMediaReferenceLocalPath } from "../media/media-reference.js";
|
||||
@@ -441,7 +440,7 @@ function verifyAssistantMediaTicket(ticket: string | null, source: string, nowMs
|
||||
}
|
||||
|
||||
function classifyAssistantMediaError(err: unknown): AssistantMediaAvailability {
|
||||
if (err instanceof SafeOpenError) {
|
||||
if (err instanceof FsSafeError) {
|
||||
switch (err.code) {
|
||||
case "not-found":
|
||||
return { available: false, code: "file-not-found", reason: "File not found" };
|
||||
@@ -687,21 +686,17 @@ export async function handleControlUiAvatarRequest(
|
||||
return true;
|
||||
}
|
||||
|
||||
const safeAvatar = resolveSafeAvatarFile(resolved.filePath);
|
||||
const safeAvatar = await resolveSafeAvatarFile(resolved.filePath);
|
||||
if (!safeAvatar) {
|
||||
respondControlUiNotFound(res);
|
||||
return true;
|
||||
}
|
||||
try {
|
||||
if (respondHeadForFile(req, res, safeAvatar.path)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
serveResolvedFile(res, safeAvatar.path, fs.readFileSync(safeAvatar.fd));
|
||||
if (respondHeadForFile(req, res, safeAvatar.path)) {
|
||||
return true;
|
||||
} finally {
|
||||
fs.closeSync(safeAvatar.fd);
|
||||
}
|
||||
|
||||
serveResolvedFile(res, safeAvatar.path, safeAvatar.buffer);
|
||||
return true;
|
||||
}
|
||||
|
||||
function setStaticFileHeaders(res: ServerResponse, filePath: string) {
|
||||
@@ -736,16 +731,20 @@ function isExpectedSafePathError(error: unknown): boolean {
|
||||
return code === "ENOENT" || code === "ENOTDIR" || code === "ELOOP";
|
||||
}
|
||||
|
||||
function resolveSafeAvatarFile(filePath: string): { path: string; fd: number } | null {
|
||||
const opened = openVerifiedFileSync({
|
||||
filePath,
|
||||
rejectPathSymlink: true,
|
||||
maxBytes: AVATAR_MAX_BYTES,
|
||||
});
|
||||
if (!opened.ok) {
|
||||
async function resolveSafeAvatarFile(
|
||||
filePath: string,
|
||||
): Promise<{ path: string; buffer: Buffer } | null> {
|
||||
try {
|
||||
const read = await readSecureFile({
|
||||
filePath,
|
||||
label: "Control UI avatar",
|
||||
permissions: { allowInsecure: true, allowReadableByOthers: true },
|
||||
io: { maxBytes: AVATAR_MAX_BYTES },
|
||||
});
|
||||
return { path: read.realPath, buffer: read.buffer };
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
return { path: opened.path, fd: opened.fd };
|
||||
}
|
||||
|
||||
function resolveSafeControlUiFile(
|
||||
@@ -753,7 +752,7 @@ function resolveSafeControlUiFile(
|
||||
filePath: string,
|
||||
rejectHardlinks: boolean,
|
||||
): { path: string; fd: number } | null {
|
||||
const opened = openBoundaryFileSync({
|
||||
const opened = openRootFileSync({
|
||||
absolutePath: filePath,
|
||||
rootPath: rootReal,
|
||||
rootRealPath: rootReal,
|
||||
@@ -762,7 +761,7 @@ function resolveSafeControlUiFile(
|
||||
rejectHardlinks,
|
||||
});
|
||||
if (!opened.ok) {
|
||||
return matchBoundaryFileOpenFailure(opened, {
|
||||
return matchRootFileOpenFailure(opened, {
|
||||
io: (failure) => {
|
||||
throw failure.error;
|
||||
},
|
||||
|
||||
@@ -4,6 +4,7 @@ import type { IncomingMessage, ServerResponse } from "node:http";
|
||||
import path from "node:path";
|
||||
import { getLatestSubagentRunByChildSessionKey } from "../agents/subagent-registry.js";
|
||||
import { resolveStateDir } from "../config/paths.js";
|
||||
import { readLocalFileSafely } from "../infra/fs-safe.js";
|
||||
import { safeFileURLToPath } from "../infra/local-file-access.js";
|
||||
import {
|
||||
getImageMetadata,
|
||||
@@ -366,7 +367,7 @@ function parseImageDataUrl(
|
||||
}
|
||||
|
||||
async function getVariantStats(filePath: string) {
|
||||
const [stats, metadataBuffer] = await Promise.all([fs.stat(filePath), fs.readFile(filePath)]);
|
||||
const { buffer: metadataBuffer, stat } = await readLocalFileSafely({ filePath });
|
||||
const metadata = (await getImageMetadata(metadataBuffer).catch(() => null)) ?? {
|
||||
width: null,
|
||||
height: null,
|
||||
@@ -374,7 +375,7 @@ async function getVariantStats(filePath: string) {
|
||||
return {
|
||||
width: metadata.width ?? null,
|
||||
height: metadata.height ?? null,
|
||||
sizeBytes: Number.isFinite(stats.size) ? stats.size : null,
|
||||
sizeBytes: Number.isFinite(stat.size) ? stat.size : null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -866,7 +867,7 @@ export async function createManagedOutgoingImageBlocks(params: {
|
||||
let originalBuffer =
|
||||
parsedDataUrl.kind === "image-data-url"
|
||||
? parsedDataUrl.buffer
|
||||
: await fs.readFile(savedOriginal.path);
|
||||
: (await readLocalFileSafely({ filePath: savedOriginal.path })).buffer;
|
||||
validateManagedImageBuffer(originalBuffer, alt, limits);
|
||||
|
||||
let originalStats = await getVariantStats(savedOriginal.path);
|
||||
@@ -1081,7 +1082,7 @@ export async function handleManagedOutgoingImageHttpRequest(
|
||||
|
||||
let body: Buffer;
|
||||
try {
|
||||
body = await fs.readFile(record.original.path);
|
||||
body = (await readLocalFileSafely({ filePath: record.original.path })).buffer;
|
||||
} catch {
|
||||
sendStatus(res, 404, "not found");
|
||||
return true;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { describe, expect, it, vi, beforeEach } from "vitest";
|
||||
import { SafeOpenError } from "../../infra/fs-safe.js";
|
||||
import { FsSafeError } from "../../infra/fs-safe.js";
|
||||
/* ------------------------------------------------------------------ */
|
||||
/* Mocks */
|
||||
/* ------------------------------------------------------------------ */
|
||||
@@ -39,7 +39,17 @@ const mocks = vi.hoisted(() => ({
|
||||
fsRealpath: vi.fn(async (p: string) => p),
|
||||
fsReadlink: vi.fn(async () => ""),
|
||||
fsOpen: vi.fn(async () => ({}) as unknown),
|
||||
writeFileWithinRoot: vi.fn(async () => {}),
|
||||
rootRead: vi.fn(async (_params?: unknown) => ({
|
||||
buffer: Buffer.from(""),
|
||||
realPath: "/workspace/test-agent/AGENTS.md",
|
||||
stat: { size: 0, mtimeMs: 0 },
|
||||
})),
|
||||
rootOpen: vi.fn(async (_params?: unknown) => ({
|
||||
handle: { close: vi.fn(async () => {}) },
|
||||
realPath: "/workspace/test-agent/AGENTS.md",
|
||||
stat: { size: 0, mtimeMs: 0 },
|
||||
})),
|
||||
rootWrite: vi.fn(async (_params?: unknown) => {}),
|
||||
}));
|
||||
|
||||
vi.mock("../../config/config.js", async () => {
|
||||
@@ -106,7 +116,23 @@ vi.mock("../../infra/fs-safe.js", async () => {
|
||||
await vi.importActual<typeof import("../../infra/fs-safe.js")>("../../infra/fs-safe.js");
|
||||
return {
|
||||
...actual,
|
||||
writeFileWithinRoot: mocks.writeFileWithinRoot,
|
||||
root: vi.fn(async (rootDir: string) => ({
|
||||
open: async (relativePath: string, options?: Record<string, unknown>) =>
|
||||
await mocks.rootOpen({ rootDir, relativePath, ...options }),
|
||||
read: async (relativePath: string, options?: Record<string, unknown>) =>
|
||||
await mocks.rootRead({ rootDir, relativePath, ...options }),
|
||||
write: async (
|
||||
relativePath: string,
|
||||
data: string | Buffer,
|
||||
options?: Record<string, unknown>,
|
||||
) =>
|
||||
await mocks.rootWrite({
|
||||
rootDir,
|
||||
relativePath,
|
||||
data,
|
||||
...options,
|
||||
}),
|
||||
})),
|
||||
};
|
||||
});
|
||||
|
||||
@@ -154,9 +180,44 @@ beforeEach(() => {
|
||||
mocks.resolveAgentWorkspaceDir.mockImplementation((cfg: unknown, agentId?: string) =>
|
||||
resolveMockWorkspaceDir(cfg, agentId),
|
||||
);
|
||||
mocks.writeFileWithinRoot.mockResolvedValue(undefined);
|
||||
mocks.rootOpen.mockResolvedValue({
|
||||
handle: { close: vi.fn(async () => {}) },
|
||||
realPath: "/workspace/test-agent/AGENTS.md",
|
||||
stat: { size: 0, mtimeMs: 0 },
|
||||
});
|
||||
mocks.rootRead.mockResolvedValue({
|
||||
buffer: Buffer.from(""),
|
||||
realPath: "/workspace/test-agent/AGENTS.md",
|
||||
stat: { size: 0, mtimeMs: 0 },
|
||||
});
|
||||
mocks.rootWrite.mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
function makeRootForTest(overrides?: {
|
||||
open?: (params: Record<string, unknown>) => Promise<unknown>;
|
||||
read?: (params: Record<string, unknown>) => Promise<unknown>;
|
||||
write?: (params: Record<string, unknown>) => Promise<unknown>;
|
||||
}) {
|
||||
return async (rootDir: string) =>
|
||||
({
|
||||
open: async (relativePath: string, options?: Record<string, unknown>) =>
|
||||
await (overrides?.open ?? mocks.rootOpen)({ rootDir, relativePath, ...options }),
|
||||
read: async (relativePath: string, options?: Record<string, unknown>) =>
|
||||
await (overrides?.read ?? mocks.rootRead)({ rootDir, relativePath, ...options }),
|
||||
write: async (
|
||||
relativePath: string,
|
||||
data: string | Buffer,
|
||||
options?: Record<string, unknown>,
|
||||
) =>
|
||||
await (overrides?.write ?? mocks.rootWrite)({
|
||||
rootDir,
|
||||
relativePath,
|
||||
data,
|
||||
...options,
|
||||
}),
|
||||
}) as never;
|
||||
}
|
||||
|
||||
function makeCall(method: keyof typeof agentsHandlers, params: Record<string, unknown>) {
|
||||
const respond = vi.fn();
|
||||
const handler = agentsHandlers[method];
|
||||
@@ -466,7 +527,7 @@ describe("agents.create", () => {
|
||||
identity: expect.objectContaining({ name: "Plain Agent" }),
|
||||
}),
|
||||
);
|
||||
expect(mocks.writeFileWithinRoot).toHaveBeenCalledWith(
|
||||
expect(mocks.rootWrite).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
rootDir: "/resolved/tmp/ws",
|
||||
relativePath: "IDENTITY.md",
|
||||
@@ -494,7 +555,7 @@ describe("agents.create", () => {
|
||||
}),
|
||||
}),
|
||||
);
|
||||
expect(mocks.writeFileWithinRoot).toHaveBeenCalledWith(
|
||||
expect(mocks.rootWrite).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
rootDir: "/resolved/tmp/ws",
|
||||
relativePath: "IDENTITY.md",
|
||||
@@ -503,9 +564,9 @@ describe("agents.create", () => {
|
||||
);
|
||||
});
|
||||
|
||||
it("does not persist config when IDENTITY.md write fails with SafeOpenError", async () => {
|
||||
mocks.writeFileWithinRoot.mockRejectedValueOnce(
|
||||
new SafeOpenError("path-mismatch", "path escapes workspace root"),
|
||||
it("does not persist config when IDENTITY.md write fails with FsSafeError", async () => {
|
||||
mocks.rootWrite.mockRejectedValueOnce(
|
||||
new FsSafeError("path-mismatch", "path escapes workspace root"),
|
||||
);
|
||||
|
||||
const { respond, promise } = makeCall("agents.create", {
|
||||
@@ -524,9 +585,11 @@ describe("agents.create", () => {
|
||||
|
||||
it("does not persist config when IDENTITY.md read fails", async () => {
|
||||
agentsTesting.setDepsForTests({
|
||||
readFileWithinRoot: async () => {
|
||||
throw createErrnoError("EACCES");
|
||||
},
|
||||
root: makeRootForTest({
|
||||
read: async () => {
|
||||
throw createErrnoError("EACCES");
|
||||
},
|
||||
}),
|
||||
});
|
||||
mocks.ensureAgentWorkspace.mockResolvedValueOnce({
|
||||
dir: "/resolved/tmp/ws",
|
||||
@@ -540,14 +603,16 @@ describe("agents.create", () => {
|
||||
|
||||
await expect(promise).rejects.toMatchObject({ code: "EACCES" });
|
||||
expect(mocks.writeConfigFile).not.toHaveBeenCalled();
|
||||
expect(mocks.writeFileWithinRoot).not.toHaveBeenCalled();
|
||||
expect(mocks.rootWrite).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("treats unsafe IDENTITY.md reads as invalid create requests", async () => {
|
||||
agentsTesting.setDepsForTests({
|
||||
readFileWithinRoot: async () => {
|
||||
throw new SafeOpenError("invalid-path", "path is not a regular file under root");
|
||||
},
|
||||
root: makeRootForTest({
|
||||
read: async () => {
|
||||
throw new FsSafeError("invalid-path", "path is not a regular file under root");
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const { respond, promise } = makeCall("agents.create", {
|
||||
@@ -564,14 +629,14 @@ describe("agents.create", () => {
|
||||
}),
|
||||
);
|
||||
expect(mocks.writeConfigFile).not.toHaveBeenCalled();
|
||||
expect(mocks.writeFileWithinRoot).not.toHaveBeenCalled();
|
||||
expect(mocks.rootWrite).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("uses non-blocking reads for IDENTITY.md during agents.create", async () => {
|
||||
const readFileWithinRoot = vi.fn(async () => {
|
||||
throw new SafeOpenError("not-found", "file not found");
|
||||
const rootRead = vi.fn(async () => {
|
||||
throw new FsSafeError("not-found", "file not found");
|
||||
});
|
||||
agentsTesting.setDepsForTests({ readFileWithinRoot });
|
||||
agentsTesting.setDepsForTests({ root: makeRootForTest({ read: rootRead }) });
|
||||
|
||||
const { promise } = makeCall("agents.create", {
|
||||
name: "NB Agent",
|
||||
@@ -579,7 +644,7 @@ describe("agents.create", () => {
|
||||
});
|
||||
await promise;
|
||||
|
||||
expect(readFileWithinRoot).toHaveBeenCalledWith(
|
||||
expect(rootRead).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
relativePath: "IDENTITY.md",
|
||||
nonBlockingRead: true,
|
||||
@@ -685,7 +750,7 @@ describe("agents.update", () => {
|
||||
}),
|
||||
}),
|
||||
);
|
||||
expect(mocks.writeFileWithinRoot).toHaveBeenCalledWith(
|
||||
expect(mocks.rootWrite).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
rootDir: "/workspace/test-agent",
|
||||
relativePath: "IDENTITY.md",
|
||||
@@ -710,7 +775,7 @@ describe("agents.update", () => {
|
||||
identity: expect.objectContaining({ emoji: "🦀" }),
|
||||
}),
|
||||
);
|
||||
expect(mocks.writeFileWithinRoot).toHaveBeenCalledWith(
|
||||
expect(mocks.rootWrite).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
rootDir: "/workspace/test-agent",
|
||||
relativePath: "IDENTITY.md",
|
||||
@@ -742,7 +807,7 @@ describe("agents.update", () => {
|
||||
}),
|
||||
}),
|
||||
);
|
||||
expect(mocks.writeFileWithinRoot).toHaveBeenCalledWith(
|
||||
expect(mocks.rootWrite).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
rootDir: "/workspace/test-agent",
|
||||
relativePath: "IDENTITY.md",
|
||||
@@ -759,50 +824,52 @@ describe("agents.update", () => {
|
||||
identityPathCreated: true,
|
||||
});
|
||||
agentsTesting.setDepsForTests({
|
||||
readFileWithinRoot: async ({ rootDir, relativePath }) => {
|
||||
const filePath = `${rootDir}/${relativePath}`;
|
||||
if (filePath === "/workspace/test-agent/IDENTITY.md") {
|
||||
return {
|
||||
buffer: Buffer.from(
|
||||
[
|
||||
"# IDENTITY.md - Agent Identity",
|
||||
"",
|
||||
"- **Name:** Current Agent",
|
||||
"- **Creature:** Steady Turtle",
|
||||
"- **Vibe:** Calm and methodical",
|
||||
"- **Emoji:** 🐢",
|
||||
"",
|
||||
"## Role",
|
||||
"",
|
||||
"Protect the queue.",
|
||||
"",
|
||||
].join("\n"),
|
||||
),
|
||||
realPath: filePath,
|
||||
stat: makeFileStat(),
|
||||
};
|
||||
}
|
||||
if (filePath === "/resolved/new/workspace/IDENTITY.md") {
|
||||
return {
|
||||
buffer: Buffer.from(
|
||||
[
|
||||
"# IDENTITY.md - Agent Identity",
|
||||
"",
|
||||
"- **Name:** C-3PO (Clawd's Third Protocol Observer)",
|
||||
"- **Creature:** Flustered Protocol Droid",
|
||||
"",
|
||||
"## Role",
|
||||
"",
|
||||
"Debug agent for `--dev` mode.",
|
||||
"",
|
||||
].join("\n"),
|
||||
),
|
||||
realPath: filePath,
|
||||
stat: makeFileStat(),
|
||||
};
|
||||
}
|
||||
throw createEnoentError();
|
||||
},
|
||||
root: makeRootForTest({
|
||||
read: async ({ rootDir, relativePath }) => {
|
||||
const filePath = `${String(rootDir)}/${String(relativePath)}`;
|
||||
if (filePath === "/workspace/test-agent/IDENTITY.md") {
|
||||
return {
|
||||
buffer: Buffer.from(
|
||||
[
|
||||
"# IDENTITY.md - Agent Identity",
|
||||
"",
|
||||
"- **Name:** Current Agent",
|
||||
"- **Creature:** Steady Turtle",
|
||||
"- **Vibe:** Calm and methodical",
|
||||
"- **Emoji:** 🐢",
|
||||
"",
|
||||
"## Role",
|
||||
"",
|
||||
"Protect the queue.",
|
||||
"",
|
||||
].join("\n"),
|
||||
),
|
||||
realPath: filePath,
|
||||
stat: makeFileStat(),
|
||||
};
|
||||
}
|
||||
if (filePath === "/resolved/new/workspace/IDENTITY.md") {
|
||||
return {
|
||||
buffer: Buffer.from(
|
||||
[
|
||||
"# IDENTITY.md - Agent Identity",
|
||||
"",
|
||||
"- **Name:** C-3PO (Clawd's Third Protocol Observer)",
|
||||
"- **Creature:** Flustered Protocol Droid",
|
||||
"",
|
||||
"## Role",
|
||||
"",
|
||||
"Debug agent for `--dev` mode.",
|
||||
"",
|
||||
].join("\n"),
|
||||
),
|
||||
realPath: filePath,
|
||||
stat: makeFileStat(),
|
||||
};
|
||||
}
|
||||
throw createEnoentError();
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const { respond, promise } = makeCall("agents.update", {
|
||||
@@ -812,19 +879,19 @@ describe("agents.update", () => {
|
||||
await promise;
|
||||
|
||||
expect(respond).toHaveBeenCalledWith(true, { ok: true, agentId: "test-agent" }, undefined);
|
||||
expect(mocks.writeFileWithinRoot).toHaveBeenCalledWith(
|
||||
expect(mocks.rootWrite).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
rootDir: "/resolved/new/workspace",
|
||||
relativePath: "IDENTITY.md",
|
||||
data: expect.stringContaining("- **Creature:** Steady Turtle"),
|
||||
}),
|
||||
);
|
||||
expect(mocks.writeFileWithinRoot).toHaveBeenCalledWith(
|
||||
expect(mocks.rootWrite).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.stringContaining("## Role"),
|
||||
}),
|
||||
);
|
||||
expect(mocks.writeFileWithinRoot).not.toHaveBeenCalledWith(
|
||||
expect(mocks.rootWrite).not.toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.stringContaining("Flustered Protocol Droid"),
|
||||
}),
|
||||
@@ -837,48 +904,50 @@ describe("agents.update", () => {
|
||||
identityPathCreated: false,
|
||||
});
|
||||
agentsTesting.setDepsForTests({
|
||||
readFileWithinRoot: async ({ rootDir, relativePath }) => {
|
||||
const filePath = `${rootDir}/${relativePath}`;
|
||||
if (filePath === "/workspace/test-agent/IDENTITY.md") {
|
||||
return {
|
||||
buffer: Buffer.from(
|
||||
[
|
||||
"# IDENTITY.md - Agent Identity",
|
||||
"",
|
||||
"- **Name:** Current Agent",
|
||||
"- **Creature:** Old Turtle",
|
||||
"",
|
||||
"## Role",
|
||||
"",
|
||||
"Old workspace role.",
|
||||
"",
|
||||
].join("\n"),
|
||||
),
|
||||
realPath: filePath,
|
||||
stat: makeFileStat(),
|
||||
};
|
||||
}
|
||||
if (filePath === "/resolved/new/workspace/IDENTITY.md") {
|
||||
return {
|
||||
buffer: Buffer.from(
|
||||
[
|
||||
"# IDENTITY.md - Agent Identity",
|
||||
"",
|
||||
"- **Name:** Destination Agent",
|
||||
"- **Creature:** Destination Fox",
|
||||
"",
|
||||
"## Role",
|
||||
"",
|
||||
"Destination workspace role.",
|
||||
"",
|
||||
].join("\n"),
|
||||
),
|
||||
realPath: filePath,
|
||||
stat: makeFileStat(),
|
||||
};
|
||||
}
|
||||
throw createEnoentError();
|
||||
},
|
||||
root: makeRootForTest({
|
||||
read: async ({ rootDir, relativePath }) => {
|
||||
const filePath = `${String(rootDir)}/${String(relativePath)}`;
|
||||
if (filePath === "/workspace/test-agent/IDENTITY.md") {
|
||||
return {
|
||||
buffer: Buffer.from(
|
||||
[
|
||||
"# IDENTITY.md - Agent Identity",
|
||||
"",
|
||||
"- **Name:** Current Agent",
|
||||
"- **Creature:** Old Turtle",
|
||||
"",
|
||||
"## Role",
|
||||
"",
|
||||
"Old workspace role.",
|
||||
"",
|
||||
].join("\n"),
|
||||
),
|
||||
realPath: filePath,
|
||||
stat: makeFileStat(),
|
||||
};
|
||||
}
|
||||
if (filePath === "/resolved/new/workspace/IDENTITY.md") {
|
||||
return {
|
||||
buffer: Buffer.from(
|
||||
[
|
||||
"# IDENTITY.md - Agent Identity",
|
||||
"",
|
||||
"- **Name:** Destination Agent",
|
||||
"- **Creature:** Destination Fox",
|
||||
"",
|
||||
"## Role",
|
||||
"",
|
||||
"Destination workspace role.",
|
||||
"",
|
||||
].join("\n"),
|
||||
),
|
||||
realPath: filePath,
|
||||
stat: makeFileStat(),
|
||||
};
|
||||
}
|
||||
throw createEnoentError();
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const { respond, promise } = makeCall("agents.update", {
|
||||
@@ -888,19 +957,19 @@ describe("agents.update", () => {
|
||||
await promise;
|
||||
|
||||
expect(respond).toHaveBeenCalledWith(true, { ok: true, agentId: "test-agent" }, undefined);
|
||||
expect(mocks.writeFileWithinRoot).toHaveBeenCalledWith(
|
||||
expect(mocks.rootWrite).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
rootDir: "/resolved/new/workspace",
|
||||
relativePath: "IDENTITY.md",
|
||||
data: expect.stringContaining("- **Creature:** Destination Fox"),
|
||||
}),
|
||||
);
|
||||
expect(mocks.writeFileWithinRoot).toHaveBeenCalledWith(
|
||||
expect(mocks.rootWrite).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.stringContaining("Destination workspace role."),
|
||||
}),
|
||||
);
|
||||
expect(mocks.writeFileWithinRoot).not.toHaveBeenCalledWith(
|
||||
expect(mocks.rootWrite).not.toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
data: expect.stringContaining("Old workspace role."),
|
||||
}),
|
||||
@@ -908,8 +977,8 @@ describe("agents.update", () => {
|
||||
});
|
||||
|
||||
it("does not persist config when IDENTITY.md write fails on update", async () => {
|
||||
mocks.writeFileWithinRoot.mockRejectedValueOnce(
|
||||
new SafeOpenError("path-mismatch", "path escapes workspace root"),
|
||||
mocks.rootWrite.mockRejectedValueOnce(
|
||||
new FsSafeError("path-mismatch", "path escapes workspace root"),
|
||||
);
|
||||
|
||||
const { respond, promise } = makeCall("agents.update", {
|
||||
@@ -929,9 +998,11 @@ describe("agents.update", () => {
|
||||
|
||||
it("treats unsafe IDENTITY.md reads as invalid update requests", async () => {
|
||||
agentsTesting.setDepsForTests({
|
||||
readFileWithinRoot: async () => {
|
||||
throw new SafeOpenError("invalid-path", "path is not a regular file under root");
|
||||
},
|
||||
root: makeRootForTest({
|
||||
read: async () => {
|
||||
throw new FsSafeError("invalid-path", "path is not a regular file under root");
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const { respond, promise } = makeCall("agents.update", {
|
||||
@@ -948,14 +1019,14 @@ describe("agents.update", () => {
|
||||
}),
|
||||
);
|
||||
expect(mocks.writeConfigFile).not.toHaveBeenCalled();
|
||||
expect(mocks.writeFileWithinRoot).not.toHaveBeenCalled();
|
||||
expect(mocks.rootWrite).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("uses non-blocking reads for IDENTITY.md during agents.update", async () => {
|
||||
const readFileWithinRoot = vi.fn(async () => {
|
||||
throw new SafeOpenError("not-found", "file not found");
|
||||
const rootRead = vi.fn(async () => {
|
||||
throw new FsSafeError("not-found", "file not found");
|
||||
});
|
||||
agentsTesting.setDepsForTests({ readFileWithinRoot });
|
||||
agentsTesting.setDepsForTests({ root: makeRootForTest({ read: rootRead }) });
|
||||
|
||||
const { promise } = makeCall("agents.update", {
|
||||
agentId: "test-agent",
|
||||
@@ -963,7 +1034,7 @@ describe("agents.update", () => {
|
||||
});
|
||||
await promise;
|
||||
|
||||
expect(readFileWithinRoot).toHaveBeenCalledWith(
|
||||
expect(rootRead).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
relativePath: "IDENTITY.md",
|
||||
nonBlockingRead: true,
|
||||
@@ -1082,10 +1153,10 @@ describe("agents.files.list", () => {
|
||||
});
|
||||
|
||||
it("reports unreadable workspace files as present in list responses", async () => {
|
||||
const openFileWithinRoot = vi.fn(async () => {
|
||||
const rootOpen = vi.fn(async () => {
|
||||
throw createErrnoError("EACCES");
|
||||
});
|
||||
agentsTesting.setDepsForTests({ openFileWithinRoot });
|
||||
agentsTesting.setDepsForTests({ root: makeRootForTest({ open: rootOpen }) });
|
||||
mocks.fsLstat.mockImplementation(async (...args: unknown[]) => {
|
||||
if (args[0] === "/workspace/main/AGENTS.md") {
|
||||
return makeFileStat({ size: 17, mtimeMs: 4567 });
|
||||
@@ -1112,7 +1183,7 @@ describe("agents.files.list", () => {
|
||||
size: 17,
|
||||
}),
|
||||
);
|
||||
expect(openFileWithinRoot).not.toHaveBeenCalled();
|
||||
expect(rootOpen).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1128,32 +1199,33 @@ describe("agents.files.get/set symlink safety", () => {
|
||||
});
|
||||
|
||||
function mockWorkspaceEscapeSymlink() {
|
||||
const safeOpenError = new SafeOpenError("invalid-path", "path escapes workspace root");
|
||||
const safeOpenError = new FsSafeError("invalid-path", "path escapes workspace root");
|
||||
agentsTesting.setDepsForTests({
|
||||
openFileWithinRoot: async () => {
|
||||
throw safeOpenError;
|
||||
},
|
||||
readFileWithinRoot: async () => {
|
||||
throw safeOpenError;
|
||||
},
|
||||
root: makeRootForTest({
|
||||
open: async () => {
|
||||
throw safeOpenError;
|
||||
},
|
||||
read: async () => {
|
||||
throw safeOpenError;
|
||||
},
|
||||
}),
|
||||
});
|
||||
mocks.writeFileWithinRoot.mockRejectedValue(safeOpenError);
|
||||
mocks.rootWrite.mockRejectedValue(safeOpenError);
|
||||
}
|
||||
|
||||
function mockInWorkspaceSymlinkAlias() {
|
||||
const safeOpenError = new SafeOpenError(
|
||||
"invalid-path",
|
||||
"path is not a regular file under root",
|
||||
);
|
||||
const safeOpenError = new FsSafeError("invalid-path", "path is not a regular file under root");
|
||||
agentsTesting.setDepsForTests({
|
||||
openFileWithinRoot: async () => {
|
||||
throw safeOpenError;
|
||||
},
|
||||
readFileWithinRoot: async () => {
|
||||
throw safeOpenError;
|
||||
},
|
||||
root: makeRootForTest({
|
||||
open: async () => {
|
||||
throw safeOpenError;
|
||||
},
|
||||
read: async () => {
|
||||
throw safeOpenError;
|
||||
},
|
||||
}),
|
||||
});
|
||||
mocks.writeFileWithinRoot.mockRejectedValue(safeOpenError);
|
||||
mocks.rootWrite.mockRejectedValue(safeOpenError);
|
||||
}
|
||||
|
||||
it.each([
|
||||
@@ -1179,16 +1251,18 @@ describe("agents.files.get/set symlink safety", () => {
|
||||
);
|
||||
|
||||
function mockHardlinkedWorkspaceAlias() {
|
||||
const safeOpenError = new SafeOpenError("invalid-path", "hardlinked path not allowed");
|
||||
const safeOpenError = new FsSafeError("invalid-path", "hardlinked path not allowed");
|
||||
agentsTesting.setDepsForTests({
|
||||
openFileWithinRoot: async () => {
|
||||
throw safeOpenError;
|
||||
},
|
||||
readFileWithinRoot: async () => {
|
||||
throw safeOpenError;
|
||||
},
|
||||
root: makeRootForTest({
|
||||
open: async () => {
|
||||
throw safeOpenError;
|
||||
},
|
||||
read: async () => {
|
||||
throw safeOpenError;
|
||||
},
|
||||
}),
|
||||
});
|
||||
mocks.writeFileWithinRoot.mockRejectedValue(safeOpenError);
|
||||
mocks.rootWrite.mockRejectedValue(safeOpenError);
|
||||
}
|
||||
|
||||
it.each([
|
||||
@@ -1206,12 +1280,12 @@ describe("agents.files.get/set symlink safety", () => {
|
||||
);
|
||||
|
||||
it("uses non-blocking safe reads for agents.files.get", async () => {
|
||||
const readFileWithinRoot = vi.fn(async () => ({
|
||||
const rootRead = vi.fn(async () => ({
|
||||
buffer: Buffer.from("hello"),
|
||||
realPath: "/workspace/test-agent/AGENTS.md",
|
||||
stat: makeFileStat({ size: 5 }),
|
||||
}));
|
||||
agentsTesting.setDepsForTests({ readFileWithinRoot });
|
||||
agentsTesting.setDepsForTests({ root: makeRootForTest({ read: rootRead }) });
|
||||
|
||||
const { respond, promise } = makeCall("agents.files.get", {
|
||||
agentId: "main",
|
||||
@@ -1219,11 +1293,11 @@ describe("agents.files.get/set symlink safety", () => {
|
||||
});
|
||||
await promise;
|
||||
|
||||
expect(readFileWithinRoot).toHaveBeenCalledWith(
|
||||
expect(rootRead).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
rootDir: "/workspace/test-agent",
|
||||
relativePath: "AGENTS.md",
|
||||
rejectHardlinks: true,
|
||||
hardlinks: "reject",
|
||||
nonBlockingRead: true,
|
||||
}),
|
||||
);
|
||||
|
||||
@@ -33,13 +33,8 @@ import {
|
||||
} from "../../config/sessions.js";
|
||||
import type { IdentityConfig } from "../../config/types.base.js";
|
||||
import type { OpenClawConfig } from "../../config/types.openclaw.js";
|
||||
import { sameFileIdentity } from "../../infra/file-identity.js";
|
||||
import {
|
||||
openFileWithinRoot,
|
||||
readFileWithinRoot,
|
||||
SafeOpenError,
|
||||
writeFileWithinRoot,
|
||||
} from "../../infra/fs-safe.js";
|
||||
import { sameFileIdentity } from "../../infra/fs-safe-advanced.js";
|
||||
import { root, FsSafeError, type ReadResult } from "../../infra/fs-safe.js";
|
||||
import { movePathToTrash } from "../../plugin-sdk/browser-maintenance.js";
|
||||
import { DEFAULT_AGENT_ID, normalizeAgentId } from "../../routing/session-key.js";
|
||||
import { resolveUserPath } from "../../utils.js";
|
||||
@@ -72,28 +67,27 @@ const BOOTSTRAP_FILE_NAMES_POST_ONBOARDING = BOOTSTRAP_FILE_NAMES.filter(
|
||||
);
|
||||
|
||||
const agentsHandlerDeps = {
|
||||
root,
|
||||
isWorkspaceSetupCompleted,
|
||||
openFileWithinRoot,
|
||||
readFileWithinRoot,
|
||||
writeFileWithinRoot,
|
||||
};
|
||||
|
||||
export const __testing = {
|
||||
setDepsForTests(
|
||||
overrides: Partial<{
|
||||
root: typeof root;
|
||||
isWorkspaceSetupCompleted: typeof isWorkspaceSetupCompleted;
|
||||
openFileWithinRoot: typeof openFileWithinRoot;
|
||||
readFileWithinRoot: typeof readFileWithinRoot;
|
||||
writeFileWithinRoot: typeof writeFileWithinRoot;
|
||||
}>,
|
||||
) {
|
||||
Object.assign(agentsHandlerDeps, overrides);
|
||||
if (overrides.isWorkspaceSetupCompleted) {
|
||||
agentsHandlerDeps.isWorkspaceSetupCompleted = overrides.isWorkspaceSetupCompleted;
|
||||
}
|
||||
if (overrides.root) {
|
||||
agentsHandlerDeps.root = overrides.root;
|
||||
}
|
||||
},
|
||||
resetDepsForTests() {
|
||||
agentsHandlerDeps.root = root;
|
||||
agentsHandlerDeps.isWorkspaceSetupCompleted = isWorkspaceSetupCompleted;
|
||||
agentsHandlerDeps.openFileWithinRoot = openFileWithinRoot;
|
||||
agentsHandlerDeps.readFileWithinRoot = readFileWithinRoot;
|
||||
agentsHandlerDeps.writeFileWithinRoot = writeFileWithinRoot;
|
||||
},
|
||||
};
|
||||
|
||||
@@ -315,14 +309,10 @@ async function writeWorkspaceFileOrRespond(params: {
|
||||
}): Promise<boolean> {
|
||||
await fs.mkdir(params.workspaceDir, { recursive: true });
|
||||
try {
|
||||
await agentsHandlerDeps.writeFileWithinRoot({
|
||||
rootDir: params.workspaceDir,
|
||||
relativePath: params.name,
|
||||
data: params.content,
|
||||
encoding: "utf8",
|
||||
});
|
||||
const workspaceRoot = await agentsHandlerDeps.root(params.workspaceDir);
|
||||
await workspaceRoot.write(params.name, params.content, { encoding: "utf8" });
|
||||
} catch (err) {
|
||||
if (err instanceof SafeOpenError) {
|
||||
if (err instanceof FsSafeError) {
|
||||
respondWorkspaceFileUnsafe(params.respond, params.name);
|
||||
return false;
|
||||
}
|
||||
@@ -354,15 +344,14 @@ async function readWorkspaceFileContent(
|
||||
name: string,
|
||||
): Promise<string | undefined> {
|
||||
try {
|
||||
const safeRead = await agentsHandlerDeps.readFileWithinRoot({
|
||||
rootDir: workspaceDir,
|
||||
relativePath: name,
|
||||
rejectHardlinks: true,
|
||||
const workspaceRoot = await agentsHandlerDeps.root(workspaceDir);
|
||||
const safeRead = await workspaceRoot.read(name, {
|
||||
hardlinks: "reject",
|
||||
nonBlockingRead: true,
|
||||
});
|
||||
return safeRead.buffer.toString("utf-8");
|
||||
} catch (err) {
|
||||
if (err instanceof SafeOpenError && err.code === "not-found") {
|
||||
if (err instanceof FsSafeError && err.code === "not-found") {
|
||||
return undefined;
|
||||
}
|
||||
throw err;
|
||||
@@ -407,7 +396,7 @@ async function buildIdentityMarkdownOrRespondUnsafe(params: {
|
||||
try {
|
||||
return await buildIdentityMarkdownForWrite(params);
|
||||
} catch (err) {
|
||||
if (err instanceof SafeOpenError) {
|
||||
if (err instanceof FsSafeError) {
|
||||
respondWorkspaceFileUnsafe(params.respond, DEFAULT_IDENTITY_FILENAME);
|
||||
return null;
|
||||
}
|
||||
@@ -716,20 +705,19 @@ export const agentsHandlers: GatewayRequestHandlers = {
|
||||
}
|
||||
const { agentId, workspaceDir, name } = resolved;
|
||||
const filePath = path.join(workspaceDir, name);
|
||||
let safeRead: Awaited<ReturnType<typeof readFileWithinRoot>>;
|
||||
let safeRead: ReadResult;
|
||||
try {
|
||||
safeRead = await agentsHandlerDeps.readFileWithinRoot({
|
||||
rootDir: workspaceDir,
|
||||
relativePath: name,
|
||||
rejectHardlinks: true,
|
||||
const workspaceRoot = await agentsHandlerDeps.root(workspaceDir);
|
||||
safeRead = await workspaceRoot.read(name, {
|
||||
hardlinks: "reject",
|
||||
nonBlockingRead: true,
|
||||
});
|
||||
} catch (err) {
|
||||
if (err instanceof SafeOpenError && err.code === "not-found") {
|
||||
if (err instanceof FsSafeError && err.code === "not-found") {
|
||||
respondWorkspaceFileMissing({ respond, agentId, workspaceDir, name, filePath });
|
||||
return;
|
||||
}
|
||||
if (err instanceof SafeOpenError) {
|
||||
if (err instanceof FsSafeError) {
|
||||
respondWorkspaceFileUnsafe(respond, name);
|
||||
return;
|
||||
}
|
||||
@@ -770,14 +758,10 @@ export const agentsHandlers: GatewayRequestHandlers = {
|
||||
const filePath = path.join(workspaceDir, name);
|
||||
const content = params.content;
|
||||
try {
|
||||
await agentsHandlerDeps.writeFileWithinRoot({
|
||||
rootDir: workspaceDir,
|
||||
relativePath: name,
|
||||
data: content,
|
||||
encoding: "utf8",
|
||||
});
|
||||
const workspaceRoot = await agentsHandlerDeps.root(workspaceDir);
|
||||
await workspaceRoot.write(name, content, { encoding: "utf8" });
|
||||
} catch (err) {
|
||||
if (!(err instanceof SafeOpenError)) {
|
||||
if (!(err instanceof FsSafeError)) {
|
||||
throw err;
|
||||
}
|
||||
respondWorkspaceFileUnsafe(respond, name);
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import fs from "node:fs";
|
||||
import fsPromises from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { pathToFileURL } from "node:url";
|
||||
@@ -116,8 +117,7 @@ describe("buildWebchatAudioContentBlocksFromReplyPayloads", () => {
|
||||
});
|
||||
|
||||
it("drops tool-result file:// URLs with remote hosts before touching the filesystem", async () => {
|
||||
const statSpy = vi.spyOn(fs, "statSync");
|
||||
const readSpy = vi.spyOn(fs, "readFileSync");
|
||||
const openSpy = vi.spyOn(fsPromises, "open");
|
||||
|
||||
const blocks = await buildWebchatAudioContentBlocksFromReplyPayloads([
|
||||
{
|
||||
@@ -128,11 +128,9 @@ describe("buildWebchatAudioContentBlocksFromReplyPayloads", () => {
|
||||
]);
|
||||
|
||||
expect(blocks).toHaveLength(0);
|
||||
expect(statSpy).not.toHaveBeenCalled();
|
||||
expect(readSpy).not.toHaveBeenCalled();
|
||||
expect(openSpy).not.toHaveBeenCalled();
|
||||
|
||||
statSpy.mockRestore();
|
||||
readSpy.mockRestore();
|
||||
openSpy.mockRestore();
|
||||
});
|
||||
|
||||
it("rejects a local audio file outside configured localRoots", async () => {
|
||||
@@ -174,19 +172,11 @@ describe("buildWebchatAudioContentBlocksFromReplyPayloads", () => {
|
||||
expect((blocks[0] as { type?: string }).type).toBe("audio");
|
||||
});
|
||||
|
||||
it("does not read file contents when stat reports size over the cap", async () => {
|
||||
it("skips local audio when the opened file stat is over the cap", async () => {
|
||||
tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-webchat-audio-"));
|
||||
const audioPath = path.join(tmpDir, "huge.mp3");
|
||||
fs.writeFileSync(audioPath, Buffer.from([0x02]));
|
||||
|
||||
const origStat = fs.statSync.bind(fs);
|
||||
const statSpy = vi.spyOn(fs, "statSync").mockImplementation((p: fs.PathLike) => {
|
||||
if (String(p) === audioPath) {
|
||||
return { isFile: () => true, size: 16 * 1024 * 1024 } as fs.Stats;
|
||||
}
|
||||
return origStat(p);
|
||||
});
|
||||
const readSpy = vi.spyOn(fs, "readFileSync");
|
||||
fs.truncateSync(audioPath, 16 * 1024 * 1024);
|
||||
|
||||
const blocks = await buildWebchatAudioContentBlocksFromReplyPayloads(
|
||||
[{ mediaUrl: audioPath, trustedLocalMedia: true }],
|
||||
@@ -194,10 +184,6 @@ describe("buildWebchatAudioContentBlocksFromReplyPayloads", () => {
|
||||
);
|
||||
|
||||
expect(blocks).toHaveLength(0);
|
||||
expect(readSpy).not.toHaveBeenCalled();
|
||||
|
||||
statSpy.mockRestore();
|
||||
readSpy.mockRestore();
|
||||
});
|
||||
|
||||
it("rejects untrusted local audio paths", async () => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import type { ReplyPayload } from "../../auto-reply/reply-payload.js";
|
||||
import { openLocalFileSafely } from "../../infra/fs-safe.js";
|
||||
import { assertNoWindowsNetworkPath, safeFileURLToPath } from "../../infra/local-file-access.js";
|
||||
import { assertLocalMediaAllowed, LocalMediaAccessError } from "../../media/local-media-access.js";
|
||||
import { isAudioFileName } from "../../media/mime.js";
|
||||
@@ -41,6 +41,11 @@ type WebchatAudioEmbeddingOptions = {
|
||||
|
||||
type WebchatAssistantMediaOptions = WebchatAudioEmbeddingOptions;
|
||||
|
||||
type LocalAudioContentBlock = {
|
||||
path: string;
|
||||
block: Record<string, unknown>;
|
||||
};
|
||||
|
||||
/** Map `mediaUrl` strings to an absolute filesystem path for local embedding (plain paths or `file:` URLs). */
|
||||
function resolveLocalMediaPathForEmbedding(raw: string): string | null {
|
||||
const trimmed = raw.trim();
|
||||
@@ -75,12 +80,11 @@ function resolveLocalMediaPathForEmbedding(raw: string): string | null {
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
/** Returns a readable local file path when it is a regular file and within the size cap (single stat before read). */
|
||||
async function resolveLocalAudioFileForEmbedding(
|
||||
async function readLocalAudioContentBlockForEmbedding(
|
||||
payload: ReplyPayload,
|
||||
raw: string,
|
||||
options: WebchatAudioEmbeddingOptions | undefined,
|
||||
): Promise<string | null> {
|
||||
): Promise<LocalAudioContentBlock | null> {
|
||||
if (payload.trustedLocalMedia !== true) {
|
||||
return null;
|
||||
}
|
||||
@@ -91,18 +95,36 @@ async function resolveLocalAudioFileForEmbedding(
|
||||
if (!isAudioFileName(resolved)) {
|
||||
return null;
|
||||
}
|
||||
let opened: Awaited<ReturnType<typeof openLocalFileSafely>> | undefined;
|
||||
try {
|
||||
await assertLocalMediaAllowed(resolved, options?.localRoots);
|
||||
const st = fs.statSync(resolved);
|
||||
if (!st.isFile() || st.size > MAX_WEBCHAT_AUDIO_BYTES) {
|
||||
opened = await openLocalFileSafely({ filePath: resolved });
|
||||
await assertLocalMediaAllowed(opened.realPath, options?.localRoots);
|
||||
if (opened.stat.size > MAX_WEBCHAT_AUDIO_BYTES) {
|
||||
return null;
|
||||
}
|
||||
return resolved;
|
||||
const buf = await opened.handle.readFile();
|
||||
if (buf.length > MAX_WEBCHAT_AUDIO_BYTES) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
path: opened.realPath,
|
||||
block: {
|
||||
type: "audio",
|
||||
source: {
|
||||
type: "base64",
|
||||
media_type: mimeTypeForPath(opened.realPath),
|
||||
data: buf.toString("base64"),
|
||||
},
|
||||
},
|
||||
};
|
||||
} catch (err) {
|
||||
if (err instanceof LocalMediaAccessError) {
|
||||
options?.onLocalAudioAccessDenied?.(err);
|
||||
}
|
||||
return null;
|
||||
} finally {
|
||||
await opened?.handle.close().catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -171,15 +193,12 @@ export async function buildWebchatAudioContentBlocksFromReplyPayloads(
|
||||
if (!url) {
|
||||
continue;
|
||||
}
|
||||
const resolved = await resolveLocalAudioFileForEmbedding(payload, url, options);
|
||||
if (!resolved || seen.has(resolved)) {
|
||||
const audio = await readLocalAudioContentBlockForEmbedding(payload, url, options);
|
||||
if (!audio || seen.has(audio.path)) {
|
||||
continue;
|
||||
}
|
||||
seen.add(resolved);
|
||||
const block = tryReadLocalAudioContentBlock(resolved);
|
||||
if (block) {
|
||||
blocks.push(block);
|
||||
}
|
||||
seen.add(audio.path);
|
||||
blocks.push(audio.block);
|
||||
}
|
||||
}
|
||||
return blocks;
|
||||
@@ -213,18 +232,15 @@ export async function buildWebchatAssistantMessageFromReplyPayloads(
|
||||
if (!url) {
|
||||
continue;
|
||||
}
|
||||
const resolvedAudioPath = await resolveLocalAudioFileForEmbedding(payload, url, options);
|
||||
if (resolvedAudioPath) {
|
||||
if (seenAudio.has(resolvedAudioPath)) {
|
||||
const audio = await readLocalAudioContentBlockForEmbedding(payload, url, options);
|
||||
if (audio) {
|
||||
if (seenAudio.has(audio.path)) {
|
||||
continue;
|
||||
}
|
||||
seenAudio.add(resolvedAudioPath);
|
||||
const block = tryReadLocalAudioContentBlock(resolvedAudioPath);
|
||||
if (block) {
|
||||
payloadMediaBlocks.push(block);
|
||||
hasAudio = true;
|
||||
payloadHasAudio = true;
|
||||
}
|
||||
seenAudio.add(audio.path);
|
||||
payloadMediaBlocks.push(audio.block);
|
||||
hasAudio = true;
|
||||
payloadHasAudio = true;
|
||||
continue;
|
||||
}
|
||||
const imageUrl = resolveEmbeddableImageUrl(url);
|
||||
@@ -270,20 +286,3 @@ export async function buildWebchatAssistantMessageFromReplyPayloads(
|
||||
}
|
||||
return { content, transcriptText };
|
||||
}
|
||||
|
||||
function tryReadLocalAudioContentBlock(filePath: string): Record<string, unknown> | null {
|
||||
try {
|
||||
const buf = fs.readFileSync(filePath);
|
||||
if (buf.length > MAX_WEBCHAT_AUDIO_BYTES) {
|
||||
return null;
|
||||
}
|
||||
const mediaType = mimeTypeForPath(filePath);
|
||||
const base64Data = buf.toString("base64");
|
||||
return {
|
||||
type: "audio",
|
||||
source: { type: "base64", media_type: mediaType, data: base64Data },
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -184,24 +184,24 @@ export function registerControlUiAndPairingSuite(): void {
|
||||
};
|
||||
|
||||
const stripPairedMetadataRolesAndScopes = async (deviceId: string) => {
|
||||
const { resolvePairingPaths, readJsonFile } = await import("../infra/pairing-files.js");
|
||||
const { writeJsonAtomic } = await import("../infra/json-files.js");
|
||||
const { resolvePairingPaths, tryReadJson } = await import("../infra/pairing-files.js");
|
||||
const { writeJson } = await import("../infra/json-files.js");
|
||||
const { pairedPath } = resolvePairingPaths(undefined, "devices");
|
||||
const paired = (await readJsonFile<Record<string, Record<string, unknown>>>(pairedPath)) ?? {};
|
||||
const paired = (await tryReadJson<Record<string, Record<string, unknown>>>(pairedPath)) ?? {};
|
||||
const legacy = getRequiredPairedMetadata(paired, deviceId);
|
||||
delete legacy.roles;
|
||||
delete legacy.scopes;
|
||||
await writeJsonAtomic(pairedPath, paired);
|
||||
await writeJson(pairedPath, paired);
|
||||
};
|
||||
|
||||
const overwritePairedPublicKey = async (deviceId: string, publicKey: string) => {
|
||||
const { resolvePairingPaths, readJsonFile } = await import("../infra/pairing-files.js");
|
||||
const { writeJsonAtomic } = await import("../infra/json-files.js");
|
||||
const { resolvePairingPaths, tryReadJson } = await import("../infra/pairing-files.js");
|
||||
const { writeJson } = await import("../infra/json-files.js");
|
||||
const { pairedPath } = resolvePairingPaths(undefined, "devices");
|
||||
const paired = (await readJsonFile<Record<string, Record<string, unknown>>>(pairedPath)) ?? {};
|
||||
const paired = (await tryReadJson<Record<string, Record<string, unknown>>>(pairedPath)) ?? {};
|
||||
const metadata = getRequiredPairedMetadata(paired, deviceId);
|
||||
metadata.publicKey = publicKey;
|
||||
await writeJsonAtomic(pairedPath, paired);
|
||||
await writeJson(pairedPath, paired);
|
||||
};
|
||||
|
||||
const seedApprovedOperatorReadPairing = async (params: {
|
||||
|
||||
@@ -56,7 +56,7 @@ import {
|
||||
type SessionScope,
|
||||
} from "../config/sessions.js";
|
||||
import type { OpenClawConfig } from "../config/types.openclaw.js";
|
||||
import { openBoundaryFileSync } from "../infra/boundary-file-read.js";
|
||||
import { openRootFileSync } from "../infra/boundary-file-read.js";
|
||||
import { projectPluginSessionExtensionsSync } from "../plugins/host-hook-state.js";
|
||||
import {
|
||||
DEFAULT_AGENT_ID,
|
||||
@@ -168,7 +168,7 @@ function resolveIdentityAvatarUrl(
|
||||
return undefined;
|
||||
}
|
||||
try {
|
||||
const opened = openBoundaryFileSync({
|
||||
const opened = openRootFileSync({
|
||||
absolutePath: resolvedCandidate,
|
||||
rootPath: workspaceRoot,
|
||||
rootRealPath: workspaceRoot,
|
||||
|
||||
@@ -28,6 +28,7 @@ import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { resolveStateDir } from "../../../config/paths.js";
|
||||
import { formatErrorMessage } from "../../../infra/errors.js";
|
||||
import { appendRegularFile } from "../../../infra/fs-safe.js";
|
||||
import { createSubsystemLogger } from "../../../logging/subsystem.js";
|
||||
import type { HookHandler } from "../../hooks.js";
|
||||
|
||||
@@ -59,7 +60,11 @@ const logCommand: HookHandler = async (event) => {
|
||||
source: event.context.commandSource ?? "unknown",
|
||||
}) + "\n";
|
||||
|
||||
await fs.appendFile(logFile, logLine, "utf-8");
|
||||
await appendRegularFile({
|
||||
filePath: logFile,
|
||||
content: logLine,
|
||||
rejectSymlinkParents: true,
|
||||
});
|
||||
} catch (err) {
|
||||
const message = formatErrorMessage(err);
|
||||
log.error(`Failed to log command: ${message}`);
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
} from "../../../agents/agent-scope.js";
|
||||
import { resolveStateDir } from "../../../config/paths.js";
|
||||
import type { OpenClawConfig } from "../../../config/types.openclaw.js";
|
||||
import { writeFileWithinRoot } from "../../../infra/fs-safe.js";
|
||||
import { root } from "../../../infra/fs-safe.js";
|
||||
import { createSubsystemLogger } from "../../../logging/subsystem.js";
|
||||
import {
|
||||
parseAgentSessionKey,
|
||||
@@ -277,12 +277,8 @@ async function saveSessionMemoryNow(event: Parameters<HookHandler>[0]): Promise<
|
||||
const entry = entryParts.join("\n");
|
||||
|
||||
// Write under memory root with alias-safe file validation.
|
||||
await writeFileWithinRoot({
|
||||
rootDir: memoryDir,
|
||||
relativePath: filename,
|
||||
data: entry,
|
||||
encoding: "utf-8",
|
||||
});
|
||||
const memoryRoot = await root(memoryDir);
|
||||
await memoryRoot.write(filename, entry, { encoding: "utf-8" });
|
||||
log.debug("Memory file written successfully");
|
||||
|
||||
// Log completion (but don't send user-visible confirmation - it's internal housekeeping)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { fileExists, readJsonFile, resolveArchiveKind } from "../infra/archive.js";
|
||||
import { resolveArchiveKind } from "../infra/archive.js";
|
||||
import { pathExists } from "../infra/fs-safe.js";
|
||||
import { resolveExistingInstallPath, withExtractedArchiveRoot } from "../infra/install-flow.js";
|
||||
import { installFromValidatedNpmSpecArchive } from "../infra/install-from-npm-spec.js";
|
||||
import {
|
||||
@@ -18,19 +19,20 @@ import {
|
||||
ensureInstallTargetAvailable,
|
||||
resolveCanonicalInstallTarget,
|
||||
} from "../infra/install-target.js";
|
||||
import { readJson } from "../infra/json-files.js";
|
||||
import { isPathInside, isPathInsideWithRealpath } from "../security/scan-paths.js";
|
||||
|
||||
export type { NpmIntegrityDrift, NpmSpecResolution };
|
||||
|
||||
export {
|
||||
ensureInstallTargetAvailable,
|
||||
fileExists,
|
||||
pathExists as fileExists,
|
||||
installFromValidatedNpmSpecArchive,
|
||||
installPackageDir,
|
||||
installPackageDirWithManifestDeps,
|
||||
isPathInside,
|
||||
isPathInsideWithRealpath,
|
||||
readJsonFile,
|
||||
readJson as readJsonFile,
|
||||
resolveArchiveKind,
|
||||
resolveArchiveSourcePath,
|
||||
resolveCanonicalInstallTarget,
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import type { OpenClawConfig } from "../config/types.openclaw.js";
|
||||
import { openBoundaryFile } from "../infra/boundary-file-read.js";
|
||||
import { openRootFile } from "../infra/boundary-file-read.js";
|
||||
import { formatErrorMessage } from "../infra/errors.js";
|
||||
import { createSubsystemLogger } from "../logging/subsystem.js";
|
||||
import { resolveGlobalSingleton } from "../shared/global-singleton.js";
|
||||
@@ -119,7 +119,7 @@ export async function loadInternalHooks(
|
||||
);
|
||||
continue;
|
||||
}
|
||||
const opened = await openBoundaryFile({
|
||||
const opened = await openRootFile({
|
||||
absolutePath: entry.hook.handlerPath,
|
||||
rootPath: hookBaseDir,
|
||||
boundaryLabel: "hook directory",
|
||||
@@ -215,7 +215,7 @@ export async function loadInternalHooks(
|
||||
log.error(`Handler module path must stay within workspaceDir: ${safeLogValue(rawModule)}`);
|
||||
continue;
|
||||
}
|
||||
const opened = await openBoundaryFile({
|
||||
const opened = await openRootFile({
|
||||
absolutePath: modulePathSafe,
|
||||
rootPath: baseDirReal,
|
||||
boundaryLabel: "workspace directory",
|
||||
|
||||
@@ -2,7 +2,7 @@ import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { MANIFEST_KEY } from "../compat/legacy-names.js";
|
||||
import type { OpenClawConfig } from "../config/types.openclaw.js";
|
||||
import { openBoundaryFileSync } from "../infra/boundary-file-read.js";
|
||||
import { openRootFileSync } from "../infra/boundary-file-read.js";
|
||||
import { createSubsystemLogger } from "../logging/subsystem.js";
|
||||
import { isPathInsideWithRealpath } from "../security/scan-paths.js";
|
||||
import { CONFIG_DIR, resolveUserPath } from "../utils.js";
|
||||
@@ -28,7 +28,7 @@ type LoadedHook = {
|
||||
|
||||
function readHookPackageManifest(dir: string): HookPackageManifest | null {
|
||||
const manifestPath = path.join(dir, "package.json");
|
||||
const raw = readBoundaryFileUtf8({
|
||||
const raw = readRootFileUtf8({
|
||||
absolutePath: manifestPath,
|
||||
rootPath: dir,
|
||||
boundaryLabel: "hook package directory",
|
||||
@@ -71,7 +71,7 @@ function loadHookFromDir(params: {
|
||||
nameHint?: string;
|
||||
}): LoadedHook | null {
|
||||
const hookMdPath = path.join(params.hookDir, "HOOK.md");
|
||||
const content = readBoundaryFileUtf8({
|
||||
const content = readRootFileUtf8({
|
||||
absolutePath: hookMdPath,
|
||||
rootPath: params.hookDir,
|
||||
boundaryLabel: "hook directory",
|
||||
@@ -89,7 +89,7 @@ function loadHookFromDir(params: {
|
||||
let handlerPath: string | undefined;
|
||||
for (const candidate of handlerCandidates) {
|
||||
const candidatePath = path.join(params.hookDir, candidate);
|
||||
const safeCandidatePath = resolveBoundaryFilePath({
|
||||
const safeCandidatePath = resolveRootFilePath({
|
||||
absolutePath: candidatePath,
|
||||
rootPath: params.hookDir,
|
||||
boundaryLabel: "hook directory",
|
||||
@@ -293,12 +293,12 @@ export function loadWorkspaceHookEntries(
|
||||
});
|
||||
}
|
||||
|
||||
function readBoundaryFileUtf8(params: {
|
||||
function readRootFileUtf8(params: {
|
||||
absolutePath: string;
|
||||
rootPath: string;
|
||||
boundaryLabel: string;
|
||||
}): string | null {
|
||||
return withOpenedBoundaryFileSync(params, (opened) => {
|
||||
return withOpenedRootFileSync(params, (opened) => {
|
||||
try {
|
||||
return fs.readFileSync(opened.fd, "utf-8");
|
||||
} catch {
|
||||
@@ -307,7 +307,7 @@ function readBoundaryFileUtf8(params: {
|
||||
});
|
||||
}
|
||||
|
||||
function withOpenedBoundaryFileSync<T>(
|
||||
function withOpenedRootFileSync<T>(
|
||||
params: {
|
||||
absolutePath: string;
|
||||
rootPath: string;
|
||||
@@ -315,7 +315,7 @@ function withOpenedBoundaryFileSync<T>(
|
||||
},
|
||||
read: (opened: { fd: number; path: string }) => T,
|
||||
): T | null {
|
||||
const opened = openBoundaryFileSync({
|
||||
const opened = openRootFileSync({
|
||||
absolutePath: params.absolutePath,
|
||||
rootPath: params.rootPath,
|
||||
boundaryLabel: params.boundaryLabel,
|
||||
@@ -330,10 +330,10 @@ function withOpenedBoundaryFileSync<T>(
|
||||
}
|
||||
}
|
||||
|
||||
function resolveBoundaryFilePath(params: {
|
||||
function resolveRootFilePath(params: {
|
||||
absolutePath: string;
|
||||
rootPath: string;
|
||||
boundaryLabel: string;
|
||||
}): string | null {
|
||||
return withOpenedBoundaryFileSync(params, (opened) => opened.path);
|
||||
return withOpenedRootFileSync(params, (opened) => opened.path);
|
||||
}
|
||||
|
||||
@@ -4,12 +4,11 @@ import { afterEach, describe, expect, it, vi } from "vitest";
|
||||
import { createTrackedTempDirs } from "../test-utils/tracked-temp-dirs.js";
|
||||
import {
|
||||
createTarEntryPreflightChecker,
|
||||
fileExists,
|
||||
readJsonFile,
|
||||
resolveArchiveKind,
|
||||
resolvePackedRootDir,
|
||||
withTimeout,
|
||||
} from "./archive.js";
|
||||
import { pathExists, withTimeout } from "./fs-safe.js";
|
||||
import { readJsonFileStrict } from "./json-files.js";
|
||||
|
||||
const tempDirs = createTrackedTempDirs();
|
||||
const createTempDir = () => tempDirs.make("openclaw-archive-helper-test-");
|
||||
@@ -159,9 +158,9 @@ describe("archive helpers", () => {
|
||||
await fs.writeFile(jsonPath, '{"ok":true}', "utf8");
|
||||
await fs.writeFile(badPath, "{not json", "utf8");
|
||||
|
||||
await expect(readJsonFile<{ ok: boolean }>(jsonPath)).resolves.toEqual({ ok: true });
|
||||
await expect(readJsonFile(badPath)).rejects.toThrow();
|
||||
await expect(fileExists(jsonPath)).resolves.toBe(true);
|
||||
await expect(fileExists(path.join(dir, "missing.json"))).resolves.toBe(false);
|
||||
await expect(readJsonFileStrict<{ ok: boolean }>(jsonPath)).resolves.toEqual({ ok: true });
|
||||
await expect(readJsonFileStrict(badPath)).rejects.toThrow();
|
||||
await expect(pathExists(jsonPath)).resolves.toBe(true);
|
||||
await expect(pathExists(path.join(dir, "missing.json"))).resolves.toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,63 +1,8 @@
|
||||
import path from "node:path";
|
||||
import { resolveSafeBaseDir } from "./path-safety.js";
|
||||
|
||||
export function isWindowsDrivePath(value: string): boolean {
|
||||
return /^[a-zA-Z]:[\\/]/.test(value);
|
||||
}
|
||||
|
||||
export function normalizeArchiveEntryPath(raw: string): string {
|
||||
return raw.replaceAll("\\", "/");
|
||||
}
|
||||
|
||||
export function validateArchiveEntryPath(
|
||||
entryPath: string,
|
||||
params?: { escapeLabel?: string },
|
||||
): void {
|
||||
if (!entryPath || entryPath === "." || entryPath === "./") {
|
||||
return;
|
||||
}
|
||||
if (isWindowsDrivePath(entryPath)) {
|
||||
throw new Error(`archive entry uses a drive path: ${entryPath}`);
|
||||
}
|
||||
const normalized = path.posix.normalize(normalizeArchiveEntryPath(entryPath));
|
||||
const escapeLabel = params?.escapeLabel ?? "destination";
|
||||
if (normalized === ".." || normalized.startsWith("../")) {
|
||||
throw new Error(`archive entry escapes ${escapeLabel}: ${entryPath}`);
|
||||
}
|
||||
if (path.posix.isAbsolute(normalized) || normalized.startsWith("//")) {
|
||||
throw new Error(`archive entry is absolute: ${entryPath}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function stripArchivePath(entryPath: string, stripComponents: number): string | null {
|
||||
const raw = normalizeArchiveEntryPath(entryPath);
|
||||
if (!raw || raw === "." || raw === "./") {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Mimic tar --strip-components semantics (raw segments before normalization)
|
||||
// so strip-induced escapes like "a/../b" are visible to validators.
|
||||
const parts = raw.split("/").filter((part) => part.length > 0 && part !== ".");
|
||||
const strip = Math.max(0, Math.floor(stripComponents));
|
||||
const stripped = strip === 0 ? parts.join("/") : parts.slice(strip).join("/");
|
||||
const result = path.posix.normalize(stripped);
|
||||
if (!result || result === "." || result === "./") {
|
||||
return null;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
export function resolveArchiveOutputPath(params: {
|
||||
rootDir: string;
|
||||
relPath: string;
|
||||
originalPath: string;
|
||||
escapeLabel?: string;
|
||||
}): string {
|
||||
const safeBase = resolveSafeBaseDir(params.rootDir);
|
||||
const outPath = path.resolve(params.rootDir, params.relPath);
|
||||
const escapeLabel = params.escapeLabel ?? "destination";
|
||||
if (!outPath.startsWith(safeBase)) {
|
||||
throw new Error(`archive entry escapes ${escapeLabel}: ${params.originalPath}`);
|
||||
}
|
||||
return outPath;
|
||||
}
|
||||
import "./fs-safe-defaults.js";
|
||||
export {
|
||||
isWindowsDrivePath,
|
||||
normalizeArchiveEntryPath,
|
||||
resolveArchiveOutputPath,
|
||||
stripArchivePath,
|
||||
validateArchiveEntryPath,
|
||||
} from "@openclaw/fs-safe/archive";
|
||||
|
||||
@@ -1,218 +1,10 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { copyFileWithinRoot } from "./fs-safe.js";
|
||||
import { isNotFoundPathError, isPathInside } from "./path-guards.js";
|
||||
|
||||
const ERROR_ARCHIVE_ENTRY_TRAVERSES_SYMLINK = "archive entry traverses symlink in destination";
|
||||
|
||||
export type ArchiveSecurityErrorCode =
|
||||
| "destination-not-directory"
|
||||
| "destination-symlink"
|
||||
| "destination-symlink-traversal";
|
||||
|
||||
export class ArchiveSecurityError extends Error {
|
||||
code: ArchiveSecurityErrorCode;
|
||||
|
||||
constructor(code: ArchiveSecurityErrorCode, message: string, options?: ErrorOptions) {
|
||||
super(message, options);
|
||||
this.code = code;
|
||||
this.name = "ArchiveSecurityError";
|
||||
}
|
||||
}
|
||||
|
||||
function symlinkTraversalError(originalPath: string): ArchiveSecurityError {
|
||||
return new ArchiveSecurityError(
|
||||
"destination-symlink-traversal",
|
||||
`${ERROR_ARCHIVE_ENTRY_TRAVERSES_SYMLINK}: ${originalPath}`,
|
||||
);
|
||||
}
|
||||
|
||||
export async function prepareArchiveDestinationDir(destDir: string): Promise<string> {
|
||||
const stat = await fs.lstat(destDir);
|
||||
if (stat.isSymbolicLink()) {
|
||||
throw new ArchiveSecurityError("destination-symlink", "archive destination is a symlink");
|
||||
}
|
||||
if (!stat.isDirectory()) {
|
||||
throw new ArchiveSecurityError(
|
||||
"destination-not-directory",
|
||||
"archive destination is not a directory",
|
||||
);
|
||||
}
|
||||
return await fs.realpath(destDir);
|
||||
}
|
||||
|
||||
async function assertNoSymlinkTraversal(params: {
|
||||
rootDir: string;
|
||||
relPath: string;
|
||||
originalPath: string;
|
||||
}): Promise<void> {
|
||||
const parts = params.relPath.split(/[\\/]+/).filter(Boolean);
|
||||
let current = path.resolve(params.rootDir);
|
||||
for (const part of parts) {
|
||||
current = path.join(current, part);
|
||||
let stat: Awaited<ReturnType<typeof fs.lstat>>;
|
||||
try {
|
||||
stat = await fs.lstat(current);
|
||||
} catch (err) {
|
||||
if (isNotFoundPathError(err)) {
|
||||
continue;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
if (stat.isSymbolicLink()) {
|
||||
throw symlinkTraversalError(params.originalPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function assertResolvedInsideDestination(params: {
|
||||
destinationRealDir: string;
|
||||
targetPath: string;
|
||||
originalPath: string;
|
||||
}): Promise<void> {
|
||||
let resolved: string;
|
||||
try {
|
||||
resolved = await fs.realpath(params.targetPath);
|
||||
} catch (err) {
|
||||
if (isNotFoundPathError(err)) {
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
if (!isPathInside(params.destinationRealDir, resolved)) {
|
||||
throw symlinkTraversalError(params.originalPath);
|
||||
}
|
||||
}
|
||||
|
||||
export async function prepareArchiveOutputPath(params: {
|
||||
destinationDir: string;
|
||||
destinationRealDir: string;
|
||||
relPath: string;
|
||||
outPath: string;
|
||||
originalPath: string;
|
||||
isDirectory: boolean;
|
||||
}): Promise<void> {
|
||||
await assertNoSymlinkTraversal({
|
||||
rootDir: params.destinationDir,
|
||||
relPath: params.relPath,
|
||||
originalPath: params.originalPath,
|
||||
});
|
||||
|
||||
if (params.isDirectory) {
|
||||
await fs.mkdir(params.outPath, { recursive: true });
|
||||
await assertResolvedInsideDestination({
|
||||
destinationRealDir: params.destinationRealDir,
|
||||
targetPath: params.outPath,
|
||||
originalPath: params.originalPath,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const parentDir = path.dirname(params.outPath);
|
||||
await fs.mkdir(parentDir, { recursive: true });
|
||||
await assertResolvedInsideDestination({
|
||||
destinationRealDir: params.destinationRealDir,
|
||||
targetPath: parentDir,
|
||||
originalPath: params.originalPath,
|
||||
});
|
||||
}
|
||||
|
||||
async function applyStagedEntryMode(params: {
|
||||
destinationRealDir: string;
|
||||
relPath: string;
|
||||
mode: number;
|
||||
originalPath: string;
|
||||
}): Promise<void> {
|
||||
const destinationPath = path.join(params.destinationRealDir, params.relPath);
|
||||
await assertResolvedInsideDestination({
|
||||
destinationRealDir: params.destinationRealDir,
|
||||
targetPath: destinationPath,
|
||||
originalPath: params.originalPath,
|
||||
});
|
||||
if (params.mode !== 0) {
|
||||
await fs.chmod(destinationPath, params.mode).catch(() => undefined);
|
||||
}
|
||||
}
|
||||
|
||||
export async function withStagedArchiveDestination<T>(params: {
|
||||
destinationRealDir: string;
|
||||
run: (stagingDir: string) => Promise<T>;
|
||||
}): Promise<T> {
|
||||
const stagingDir = await fs.mkdtemp(path.join(params.destinationRealDir, ".openclaw-archive-"));
|
||||
try {
|
||||
return await params.run(stagingDir);
|
||||
} finally {
|
||||
await fs.rm(stagingDir, { recursive: true, force: true }).catch(() => undefined);
|
||||
}
|
||||
}
|
||||
|
||||
export async function mergeExtractedTreeIntoDestination(params: {
|
||||
sourceDir: string;
|
||||
destinationDir: string;
|
||||
destinationRealDir: string;
|
||||
}): Promise<void> {
|
||||
const walk = async (currentSourceDir: string): Promise<void> => {
|
||||
const entries = await fs.readdir(currentSourceDir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const sourcePath = path.join(currentSourceDir, entry.name);
|
||||
const relPath = path.relative(params.sourceDir, sourcePath);
|
||||
const originalPath = relPath.split(path.sep).join("/");
|
||||
const destinationPath = path.join(params.destinationDir, relPath);
|
||||
const sourceStat = await fs.lstat(sourcePath);
|
||||
|
||||
if (sourceStat.isSymbolicLink()) {
|
||||
throw symlinkTraversalError(originalPath);
|
||||
}
|
||||
|
||||
if (sourceStat.isDirectory()) {
|
||||
await prepareArchiveOutputPath({
|
||||
destinationDir: params.destinationDir,
|
||||
destinationRealDir: params.destinationRealDir,
|
||||
relPath,
|
||||
outPath: destinationPath,
|
||||
originalPath,
|
||||
isDirectory: true,
|
||||
});
|
||||
await walk(sourcePath);
|
||||
await applyStagedEntryMode({
|
||||
destinationRealDir: params.destinationRealDir,
|
||||
relPath,
|
||||
mode: sourceStat.mode & 0o777,
|
||||
originalPath,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!sourceStat.isFile()) {
|
||||
throw new Error(`archive staging contains unsupported entry: ${originalPath}`);
|
||||
}
|
||||
|
||||
await prepareArchiveOutputPath({
|
||||
destinationDir: params.destinationDir,
|
||||
destinationRealDir: params.destinationRealDir,
|
||||
relPath,
|
||||
outPath: destinationPath,
|
||||
originalPath,
|
||||
isDirectory: false,
|
||||
});
|
||||
await copyFileWithinRoot({
|
||||
sourcePath,
|
||||
rootDir: params.destinationRealDir,
|
||||
relativePath: relPath,
|
||||
mkdir: true,
|
||||
});
|
||||
await applyStagedEntryMode({
|
||||
destinationRealDir: params.destinationRealDir,
|
||||
relPath,
|
||||
mode: sourceStat.mode & 0o777,
|
||||
originalPath,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
await walk(params.sourceDir);
|
||||
}
|
||||
|
||||
export function createArchiveSymlinkTraversalError(originalPath: string): ArchiveSecurityError {
|
||||
return symlinkTraversalError(originalPath);
|
||||
}
|
||||
import "./fs-safe-defaults.js";
|
||||
export {
|
||||
ArchiveSecurityError,
|
||||
createArchiveSymlinkTraversalError,
|
||||
mergeExtractedTreeIntoDestination,
|
||||
prepareArchiveDestinationDir,
|
||||
prepareArchiveOutputPath,
|
||||
withStagedArchiveDestination,
|
||||
type ArchiveSecurityErrorCode,
|
||||
} from "@openclaw/fs-safe/archive";
|
||||
|
||||
@@ -1,891 +1,27 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { constants as fsConstants } from "node:fs";
|
||||
import type { Stats } from "node:fs";
|
||||
import type { FileHandle } from "node:fs/promises";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { Readable, Transform } from "node:stream";
|
||||
import { pipeline } from "node:stream/promises";
|
||||
import JSZip from "jszip";
|
||||
import * as tar from "tar";
|
||||
import { normalizeLowercaseStringOrEmpty } from "../shared/string-coerce.js";
|
||||
import {
|
||||
resolveArchiveOutputPath,
|
||||
stripArchivePath,
|
||||
validateArchiveEntryPath,
|
||||
} from "./archive-path.js";
|
||||
import {
|
||||
createArchiveSymlinkTraversalError,
|
||||
mergeExtractedTreeIntoDestination,
|
||||
prepareArchiveDestinationDir,
|
||||
prepareArchiveOutputPath,
|
||||
withStagedArchiveDestination,
|
||||
} from "./archive-staging.js";
|
||||
import { sameFileIdentity } from "./file-identity.js";
|
||||
import { openFileWithinRoot, openWritableFileWithinRoot, SafeOpenError } from "./fs-safe.js";
|
||||
import { isNotFoundPathError } from "./path-guards.js";
|
||||
|
||||
export type ArchiveKind = "tar" | "zip";
|
||||
|
||||
export type ArchiveLogger = {
|
||||
info?: (message: string) => void;
|
||||
warn?: (message: string) => void;
|
||||
};
|
||||
|
||||
export type ArchiveExtractLimits = {
|
||||
/**
|
||||
* Max archive file bytes (compressed).
|
||||
*/
|
||||
maxArchiveBytes?: number;
|
||||
/** Max number of extracted entries (files + dirs). */
|
||||
maxEntries?: number;
|
||||
/** Max extracted bytes (sum of all files). */
|
||||
maxExtractedBytes?: number;
|
||||
/** Max extracted bytes for a single file entry. */
|
||||
maxEntryBytes?: number;
|
||||
};
|
||||
|
||||
export { ArchiveSecurityError, type ArchiveSecurityErrorCode } from "./archive-staging.js";
|
||||
import "./fs-safe-defaults.js";
|
||||
export {
|
||||
ARCHIVE_LIMIT_ERROR_CODE,
|
||||
ArchiveLimitError,
|
||||
ArchiveSecurityError,
|
||||
DEFAULT_MAX_ARCHIVE_BYTES_ZIP,
|
||||
DEFAULT_MAX_ENTRIES,
|
||||
DEFAULT_MAX_EXTRACTED_BYTES,
|
||||
DEFAULT_MAX_ENTRY_BYTES,
|
||||
createArchiveSymlinkTraversalError,
|
||||
createTarEntryPreflightChecker,
|
||||
extractArchive,
|
||||
loadZipArchiveWithPreflight,
|
||||
mergeExtractedTreeIntoDestination,
|
||||
prepareArchiveDestinationDir,
|
||||
prepareArchiveOutputPath,
|
||||
readZipCentralDirectoryEntryCount,
|
||||
resolveArchiveKind,
|
||||
resolvePackedRootDir,
|
||||
withStagedArchiveDestination,
|
||||
} from "./archive-staging.js";
|
||||
|
||||
/** @internal */
|
||||
export const DEFAULT_MAX_ARCHIVE_BYTES_ZIP = 256 * 1024 * 1024;
|
||||
/** @internal */
|
||||
export const DEFAULT_MAX_ENTRIES = 50_000;
|
||||
/** @internal */
|
||||
export const DEFAULT_MAX_EXTRACTED_BYTES = 512 * 1024 * 1024;
|
||||
/** @internal */
|
||||
export const DEFAULT_MAX_ENTRY_BYTES = 256 * 1024 * 1024;
|
||||
|
||||
export const ARCHIVE_LIMIT_ERROR_CODE = {
|
||||
ARCHIVE_SIZE_EXCEEDS_LIMIT: "archive-size-exceeds-limit",
|
||||
ENTRY_COUNT_EXCEEDS_LIMIT: "archive-entry-count-exceeds-limit",
|
||||
ENTRY_EXTRACTED_SIZE_EXCEEDS_LIMIT: "archive-entry-extracted-size-exceeds-limit",
|
||||
EXTRACTED_SIZE_EXCEEDS_LIMIT: "archive-extracted-size-exceeds-limit",
|
||||
} as const;
|
||||
|
||||
export type ArchiveLimitErrorCode =
|
||||
(typeof ARCHIVE_LIMIT_ERROR_CODE)[keyof typeof ARCHIVE_LIMIT_ERROR_CODE];
|
||||
|
||||
const ARCHIVE_LIMIT_ERROR_MESSAGE = {
|
||||
[ARCHIVE_LIMIT_ERROR_CODE.ARCHIVE_SIZE_EXCEEDS_LIMIT]: "archive size exceeds limit",
|
||||
[ARCHIVE_LIMIT_ERROR_CODE.ENTRY_COUNT_EXCEEDS_LIMIT]: "archive entry count exceeds limit",
|
||||
[ARCHIVE_LIMIT_ERROR_CODE.ENTRY_EXTRACTED_SIZE_EXCEEDS_LIMIT]:
|
||||
"archive entry extracted size exceeds limit",
|
||||
[ARCHIVE_LIMIT_ERROR_CODE.EXTRACTED_SIZE_EXCEEDS_LIMIT]: "archive extracted size exceeds limit",
|
||||
} as const satisfies Record<ArchiveLimitErrorCode, string>;
|
||||
|
||||
export class ArchiveLimitError extends Error {
|
||||
readonly code: ArchiveLimitErrorCode;
|
||||
|
||||
constructor(code: ArchiveLimitErrorCode) {
|
||||
super(ARCHIVE_LIMIT_ERROR_MESSAGE[code]);
|
||||
this.name = "ArchiveLimitError";
|
||||
this.code = code;
|
||||
}
|
||||
}
|
||||
|
||||
const ZIP_EOCD_SIGNATURE = 0x06054b50;
|
||||
const ZIP64_EOCD_SIGNATURE = 0x06064b50;
|
||||
const ZIP64_EOCD_LOCATOR_SIGNATURE = 0x07064b50;
|
||||
const ZIP_EOCD_MIN_BYTES = 22;
|
||||
const ZIP_EOCD_MAX_COMMENT_BYTES = 0xffff;
|
||||
const ZIP64_ENTRY_COUNT_SENTINEL = 0xffff;
|
||||
const ZIP64_UINT32_SENTINEL = 0xffffffff;
|
||||
const ZIP_CENTRAL_FILE_HEADER_SIGNATURE = 0x02014b50;
|
||||
const ZIP_CENTRAL_FILE_HEADER_MIN_BYTES = 46;
|
||||
const ZIP_CENTRAL_FILE_HEADER_NAME_LENGTH_OFFSET = 28;
|
||||
const ZIP_CENTRAL_FILE_HEADER_EXTRA_LENGTH_OFFSET = 30;
|
||||
const ZIP_CENTRAL_FILE_HEADER_COMMENT_LENGTH_OFFSET = 32;
|
||||
const ZIP_EOCD_TOTAL_ENTRIES_OFFSET = 10;
|
||||
const ZIP_EOCD_CENTRAL_DIRECTORY_SIZE_OFFSET = 12;
|
||||
const ZIP_EOCD_CENTRAL_DIRECTORY_OFFSET_OFFSET = 16;
|
||||
const ZIP_EOCD_COMMENT_LENGTH_OFFSET = 20;
|
||||
const ZIP64_EOCD_LOCATOR_BYTES = 20;
|
||||
const ZIP64_EOCD_OFFSET_OFFSET = 8;
|
||||
const ZIP64_EOCD_TOTAL_ENTRIES_OFFSET = 32;
|
||||
const ZIP64_EOCD_CENTRAL_DIRECTORY_SIZE_OFFSET = 40;
|
||||
const ZIP64_EOCD_CENTRAL_DIRECTORY_OFFSET_OFFSET = 48;
|
||||
const SUPPORTS_NOFOLLOW = process.platform !== "win32" && "O_NOFOLLOW" in fsConstants;
|
||||
const OPEN_WRITE_CREATE_FLAGS =
|
||||
fsConstants.O_WRONLY |
|
||||
fsConstants.O_CREAT |
|
||||
fsConstants.O_EXCL |
|
||||
(SUPPORTS_NOFOLLOW ? fsConstants.O_NOFOLLOW : 0);
|
||||
|
||||
const TAR_SUFFIXES = [".tgz", ".tar.gz", ".tar"];
|
||||
|
||||
export function resolveArchiveKind(filePath: string): ArchiveKind | null {
|
||||
const lower = normalizeLowercaseStringOrEmpty(filePath);
|
||||
if (lower.endsWith(".zip")) {
|
||||
return "zip";
|
||||
}
|
||||
if (TAR_SUFFIXES.some((suffix) => lower.endsWith(suffix))) {
|
||||
return "tar";
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
type ResolvePackedRootDirOptions = {
|
||||
rootMarkers?: string[];
|
||||
};
|
||||
|
||||
async function hasPackedRootMarker(extractDir: string, rootMarkers: string[]): Promise<boolean> {
|
||||
for (const marker of rootMarkers) {
|
||||
const trimmed = marker.trim();
|
||||
if (!trimmed) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
await fs.stat(path.join(extractDir, trimmed));
|
||||
return true;
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export async function resolvePackedRootDir(
|
||||
extractDir: string,
|
||||
options?: ResolvePackedRootDirOptions,
|
||||
): Promise<string> {
|
||||
const direct = path.join(extractDir, "package");
|
||||
try {
|
||||
const stat = await fs.stat(direct);
|
||||
if (stat.isDirectory()) {
|
||||
return direct;
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
if ((options?.rootMarkers?.length ?? 0) > 0) {
|
||||
const hasMarker = await hasPackedRootMarker(extractDir, options?.rootMarkers ?? []);
|
||||
if (hasMarker) {
|
||||
return extractDir;
|
||||
}
|
||||
}
|
||||
|
||||
const entries = await fs.readdir(extractDir, { withFileTypes: true });
|
||||
const dirs = entries.filter((entry) => entry.isDirectory()).map((entry) => entry.name);
|
||||
if (dirs.length !== 1) {
|
||||
throw new Error(`unexpected archive layout (dirs: ${dirs.join(", ")})`);
|
||||
}
|
||||
const onlyDir = dirs[0];
|
||||
if (!onlyDir) {
|
||||
throw new Error("unexpected archive layout (no package dir found)");
|
||||
}
|
||||
return path.join(extractDir, onlyDir);
|
||||
}
|
||||
|
||||
export async function withTimeout<T>(
|
||||
promise: Promise<T>,
|
||||
timeoutMs: number,
|
||||
label: string,
|
||||
): Promise<T> {
|
||||
let timeoutId: ReturnType<typeof setTimeout> | undefined;
|
||||
try {
|
||||
return await Promise.race([
|
||||
promise,
|
||||
new Promise<T>((_, reject) => {
|
||||
timeoutId = setTimeout(
|
||||
() => reject(new Error(`${label} timed out after ${timeoutMs}ms`)),
|
||||
timeoutMs,
|
||||
);
|
||||
}),
|
||||
]);
|
||||
} finally {
|
||||
if (timeoutId) {
|
||||
clearTimeout(timeoutId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type ResolvedArchiveExtractLimits = Required<ArchiveExtractLimits>;
|
||||
|
||||
function clampLimit(value: number | undefined): number | undefined {
|
||||
if (typeof value !== "number" || !Number.isFinite(value)) {
|
||||
return undefined;
|
||||
}
|
||||
const v = Math.floor(value);
|
||||
return v > 0 ? v : undefined;
|
||||
}
|
||||
|
||||
function resolveExtractLimits(limits?: ArchiveExtractLimits): ResolvedArchiveExtractLimits {
|
||||
// Defaults: defensive, but should not break normal installs.
|
||||
return {
|
||||
maxArchiveBytes: clampLimit(limits?.maxArchiveBytes) ?? DEFAULT_MAX_ARCHIVE_BYTES_ZIP,
|
||||
maxEntries: clampLimit(limits?.maxEntries) ?? DEFAULT_MAX_ENTRIES,
|
||||
maxExtractedBytes: clampLimit(limits?.maxExtractedBytes) ?? DEFAULT_MAX_EXTRACTED_BYTES,
|
||||
maxEntryBytes: clampLimit(limits?.maxEntryBytes) ?? DEFAULT_MAX_ENTRY_BYTES,
|
||||
};
|
||||
}
|
||||
|
||||
function assertArchiveEntryCountWithinLimit(
|
||||
entryCount: number,
|
||||
limits: ResolvedArchiveExtractLimits,
|
||||
) {
|
||||
if (entryCount > limits.maxEntries) {
|
||||
throw new ArchiveLimitError(ARCHIVE_LIMIT_ERROR_CODE.ENTRY_COUNT_EXCEEDS_LIMIT);
|
||||
}
|
||||
}
|
||||
|
||||
function asBufferView(buffer: Buffer | Uint8Array): Buffer {
|
||||
if (Buffer.isBuffer(buffer)) {
|
||||
return buffer;
|
||||
}
|
||||
return Buffer.from(buffer.buffer, buffer.byteOffset, buffer.byteLength);
|
||||
}
|
||||
|
||||
function readSafeUInt64LE(buffer: Buffer, offset: number): number {
|
||||
const value = buffer.readBigUInt64LE(offset);
|
||||
if (value > BigInt(Number.MAX_SAFE_INTEGER)) {
|
||||
return Number.MAX_SAFE_INTEGER;
|
||||
}
|
||||
return Number(value);
|
||||
}
|
||||
|
||||
function findZipEndOfCentralDirectory(buffer: Buffer): number {
|
||||
if (buffer.byteLength < ZIP_EOCD_MIN_BYTES) {
|
||||
return -1;
|
||||
}
|
||||
const minOffset = Math.max(
|
||||
0,
|
||||
buffer.byteLength - ZIP_EOCD_MIN_BYTES - ZIP_EOCD_MAX_COMMENT_BYTES,
|
||||
);
|
||||
for (let offset = buffer.byteLength - ZIP_EOCD_MIN_BYTES; offset >= minOffset; offset -= 1) {
|
||||
if (buffer.readUInt32LE(offset) !== ZIP_EOCD_SIGNATURE) {
|
||||
continue;
|
||||
}
|
||||
const commentLength = buffer.readUInt16LE(offset + ZIP_EOCD_COMMENT_LENGTH_OFFSET);
|
||||
if (offset + ZIP_EOCD_MIN_BYTES + commentLength === buffer.byteLength) {
|
||||
return offset;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
type ZipCentralDirectoryInfo = {
|
||||
declaredEntryCount: number;
|
||||
centralDirectoryOffset: number;
|
||||
centralDirectorySize: number;
|
||||
endOfCentralDirectoryOffset: number;
|
||||
};
|
||||
|
||||
function readZip64CentralDirectoryInfo(
|
||||
buffer: Buffer,
|
||||
eocdOffset: number,
|
||||
): ZipCentralDirectoryInfo | null {
|
||||
const locatorOffset = eocdOffset - ZIP64_EOCD_LOCATOR_BYTES;
|
||||
if (locatorOffset < 0 || buffer.readUInt32LE(locatorOffset) !== ZIP64_EOCD_LOCATOR_SIGNATURE) {
|
||||
return null;
|
||||
}
|
||||
const zip64EocdOffset = readSafeUInt64LE(buffer, locatorOffset + ZIP64_EOCD_OFFSET_OFFSET);
|
||||
if (
|
||||
zip64EocdOffset < 0 ||
|
||||
zip64EocdOffset + ZIP64_EOCD_CENTRAL_DIRECTORY_OFFSET_OFFSET + 8 > buffer.byteLength ||
|
||||
buffer.readUInt32LE(zip64EocdOffset) !== ZIP64_EOCD_SIGNATURE
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
declaredEntryCount: readSafeUInt64LE(buffer, zip64EocdOffset + ZIP64_EOCD_TOTAL_ENTRIES_OFFSET),
|
||||
centralDirectorySize: readSafeUInt64LE(
|
||||
buffer,
|
||||
zip64EocdOffset + ZIP64_EOCD_CENTRAL_DIRECTORY_SIZE_OFFSET,
|
||||
),
|
||||
centralDirectoryOffset: readSafeUInt64LE(
|
||||
buffer,
|
||||
zip64EocdOffset + ZIP64_EOCD_CENTRAL_DIRECTORY_OFFSET_OFFSET,
|
||||
),
|
||||
endOfCentralDirectoryOffset: eocdOffset,
|
||||
};
|
||||
}
|
||||
|
||||
function readZipCentralDirectoryInfo(buffer: Buffer): ZipCentralDirectoryInfo | null {
|
||||
const eocdOffset = findZipEndOfCentralDirectory(buffer);
|
||||
if (eocdOffset < 0) {
|
||||
return null;
|
||||
}
|
||||
const declaredEntryCount = buffer.readUInt16LE(eocdOffset + ZIP_EOCD_TOTAL_ENTRIES_OFFSET);
|
||||
const centralDirectorySize = buffer.readUInt32LE(
|
||||
eocdOffset + ZIP_EOCD_CENTRAL_DIRECTORY_SIZE_OFFSET,
|
||||
);
|
||||
const centralDirectoryOffset = buffer.readUInt32LE(
|
||||
eocdOffset + ZIP_EOCD_CENTRAL_DIRECTORY_OFFSET_OFFSET,
|
||||
);
|
||||
const usesZip64 =
|
||||
declaredEntryCount === ZIP64_ENTRY_COUNT_SENTINEL ||
|
||||
centralDirectorySize === ZIP64_UINT32_SENTINEL ||
|
||||
centralDirectoryOffset === ZIP64_UINT32_SENTINEL;
|
||||
if (usesZip64) {
|
||||
return (
|
||||
readZip64CentralDirectoryInfo(buffer, eocdOffset) ?? {
|
||||
declaredEntryCount,
|
||||
centralDirectoryOffset,
|
||||
centralDirectorySize,
|
||||
endOfCentralDirectoryOffset: eocdOffset,
|
||||
}
|
||||
);
|
||||
}
|
||||
return {
|
||||
declaredEntryCount,
|
||||
centralDirectoryOffset,
|
||||
centralDirectorySize,
|
||||
endOfCentralDirectoryOffset: eocdOffset,
|
||||
};
|
||||
}
|
||||
|
||||
function countZipCentralDirectoryHeaders(
|
||||
buffer: Buffer,
|
||||
info: ZipCentralDirectoryInfo,
|
||||
): number | null {
|
||||
const start = info.centralDirectoryOffset;
|
||||
const declaredEnd = start + info.centralDirectorySize;
|
||||
const scanEnd = info.endOfCentralDirectoryOffset;
|
||||
if (
|
||||
!Number.isSafeInteger(start) ||
|
||||
!Number.isSafeInteger(declaredEnd) ||
|
||||
!Number.isSafeInteger(scanEnd) ||
|
||||
start < 0 ||
|
||||
declaredEnd < start ||
|
||||
scanEnd < start ||
|
||||
scanEnd > buffer.byteLength
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
let offset = start;
|
||||
let count = 0;
|
||||
while (offset < scanEnd) {
|
||||
if (scanEnd - offset < ZIP_CENTRAL_FILE_HEADER_MIN_BYTES) {
|
||||
break;
|
||||
}
|
||||
if (buffer.readUInt32LE(offset) !== ZIP_CENTRAL_FILE_HEADER_SIGNATURE) {
|
||||
break;
|
||||
}
|
||||
const nameLength = buffer.readUInt16LE(offset + ZIP_CENTRAL_FILE_HEADER_NAME_LENGTH_OFFSET);
|
||||
const extraLength = buffer.readUInt16LE(offset + ZIP_CENTRAL_FILE_HEADER_EXTRA_LENGTH_OFFSET);
|
||||
const commentLength = buffer.readUInt16LE(
|
||||
offset + ZIP_CENTRAL_FILE_HEADER_COMMENT_LENGTH_OFFSET,
|
||||
);
|
||||
const nextOffset =
|
||||
offset + ZIP_CENTRAL_FILE_HEADER_MIN_BYTES + nameLength + extraLength + commentLength;
|
||||
if (nextOffset <= offset || nextOffset > scanEnd) {
|
||||
return null;
|
||||
}
|
||||
count += 1;
|
||||
offset = nextOffset;
|
||||
}
|
||||
return count > 0 || info.declaredEntryCount === 0 ? count : null;
|
||||
}
|
||||
|
||||
/** @internal */
|
||||
export function readZipCentralDirectoryEntryCount(buffer: Buffer | Uint8Array): number | null {
|
||||
const view = asBufferView(buffer);
|
||||
const info = readZipCentralDirectoryInfo(view);
|
||||
if (!info) {
|
||||
return null;
|
||||
}
|
||||
const countedEntryCount = countZipCentralDirectoryHeaders(view, info);
|
||||
return countedEntryCount === null
|
||||
? info.declaredEntryCount
|
||||
: Math.max(info.declaredEntryCount, countedEntryCount);
|
||||
}
|
||||
|
||||
export async function loadZipArchiveWithPreflight(
|
||||
buffer: Buffer | Uint8Array,
|
||||
limits?: ArchiveExtractLimits,
|
||||
): Promise<JSZip> {
|
||||
const resolvedLimits = resolveExtractLimits(limits);
|
||||
if (buffer.byteLength > resolvedLimits.maxArchiveBytes) {
|
||||
throw new ArchiveLimitError(ARCHIVE_LIMIT_ERROR_CODE.ARCHIVE_SIZE_EXCEEDS_LIMIT);
|
||||
}
|
||||
const entryCount = readZipCentralDirectoryEntryCount(buffer);
|
||||
if (entryCount !== null) {
|
||||
assertArchiveEntryCountWithinLimit(entryCount, resolvedLimits);
|
||||
}
|
||||
return await JSZip.loadAsync(buffer);
|
||||
}
|
||||
|
||||
function createByteBudgetTracker(limits: ResolvedArchiveExtractLimits): {
|
||||
startEntry: () => void;
|
||||
addBytes: (bytes: number) => void;
|
||||
addEntrySize: (size: number) => void;
|
||||
} {
|
||||
let entryBytes = 0;
|
||||
let extractedBytes = 0;
|
||||
|
||||
const addBytes = (bytes: number) => {
|
||||
const b = Math.max(0, Math.floor(bytes));
|
||||
if (b === 0) {
|
||||
return;
|
||||
}
|
||||
entryBytes += b;
|
||||
if (entryBytes > limits.maxEntryBytes) {
|
||||
throw new ArchiveLimitError(ARCHIVE_LIMIT_ERROR_CODE.ENTRY_EXTRACTED_SIZE_EXCEEDS_LIMIT);
|
||||
}
|
||||
extractedBytes += b;
|
||||
if (extractedBytes > limits.maxExtractedBytes) {
|
||||
throw new ArchiveLimitError(ARCHIVE_LIMIT_ERROR_CODE.EXTRACTED_SIZE_EXCEEDS_LIMIT);
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
startEntry() {
|
||||
entryBytes = 0;
|
||||
},
|
||||
addBytes,
|
||||
addEntrySize(size: number) {
|
||||
const s = Math.max(0, Math.floor(size));
|
||||
if (s > limits.maxEntryBytes) {
|
||||
throw new ArchiveLimitError(ARCHIVE_LIMIT_ERROR_CODE.ENTRY_EXTRACTED_SIZE_EXCEEDS_LIMIT);
|
||||
}
|
||||
// Note: tar budgets are based on the header-declared size.
|
||||
addBytes(s);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function createExtractBudgetTransform(params: {
|
||||
onChunkBytes: (bytes: number) => void;
|
||||
}): Transform {
|
||||
return new Transform({
|
||||
transform(chunk, _encoding, callback) {
|
||||
try {
|
||||
const buf = chunk instanceof Buffer ? chunk : Buffer.from(chunk as Uint8Array);
|
||||
params.onChunkBytes(buf.byteLength);
|
||||
callback(null, buf);
|
||||
} catch (err) {
|
||||
callback(err instanceof Error ? err : new Error(String(err)));
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function symlinkTraversalError(originalPath: string) {
|
||||
return createArchiveSymlinkTraversalError(originalPath);
|
||||
}
|
||||
|
||||
type OpenZipOutputFileResult = {
|
||||
handle: FileHandle;
|
||||
createdForWrite: boolean;
|
||||
openedRealPath: string;
|
||||
openedStat: Stats;
|
||||
};
|
||||
|
||||
async function openZipOutputFile(params: {
|
||||
relPath: string;
|
||||
originalPath: string;
|
||||
destinationRealDir: string;
|
||||
}): Promise<OpenZipOutputFileResult> {
|
||||
try {
|
||||
return await openWritableFileWithinRoot({
|
||||
rootDir: params.destinationRealDir,
|
||||
relativePath: params.relPath,
|
||||
mkdir: false,
|
||||
mode: 0o666,
|
||||
});
|
||||
} catch (err) {
|
||||
if (
|
||||
err instanceof SafeOpenError &&
|
||||
(err.code === "invalid-path" ||
|
||||
err.code === "outside-workspace" ||
|
||||
err.code === "path-mismatch")
|
||||
) {
|
||||
throw symlinkTraversalError(params.originalPath);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async function cleanupPartialRegularFile(filePath: string): Promise<void> {
|
||||
let stat: Awaited<ReturnType<typeof fs.lstat>>;
|
||||
try {
|
||||
stat = await fs.lstat(filePath);
|
||||
} catch (err) {
|
||||
if (isNotFoundPathError(err)) {
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
if (stat.isFile()) {
|
||||
await fs.unlink(filePath).catch(() => undefined);
|
||||
}
|
||||
}
|
||||
|
||||
function buildArchiveAtomicTempPath(targetPath: string): string {
|
||||
return path.join(
|
||||
path.dirname(targetPath),
|
||||
`.${path.basename(targetPath)}.${process.pid}.${randomUUID()}.tmp`,
|
||||
);
|
||||
}
|
||||
|
||||
async function verifyZipWriteResult(params: {
|
||||
destinationRealDir: string;
|
||||
relPath: string;
|
||||
expectedStat: Stats;
|
||||
}): Promise<string> {
|
||||
const opened = await openFileWithinRoot({
|
||||
rootDir: params.destinationRealDir,
|
||||
relativePath: params.relPath,
|
||||
rejectHardlinks: true,
|
||||
});
|
||||
try {
|
||||
if (!sameFileIdentity(opened.stat, params.expectedStat)) {
|
||||
throw new SafeOpenError("path-mismatch", "path changed during zip extract");
|
||||
}
|
||||
return opened.realPath;
|
||||
} finally {
|
||||
await opened.handle.close().catch(() => undefined);
|
||||
}
|
||||
}
|
||||
|
||||
type ZipEntry = {
|
||||
name: string;
|
||||
dir: boolean;
|
||||
unixPermissions?: number;
|
||||
nodeStream?: () => NodeJS.ReadableStream;
|
||||
async: (type: "nodebuffer") => Promise<Buffer>;
|
||||
};
|
||||
|
||||
type ZipExtractBudget = ReturnType<typeof createByteBudgetTracker>;
|
||||
|
||||
async function readZipEntryStream(entry: ZipEntry): Promise<NodeJS.ReadableStream> {
|
||||
if (typeof entry.nodeStream === "function") {
|
||||
return entry.nodeStream();
|
||||
}
|
||||
// Old JSZip: fall back to buffering, but still extract via a stream.
|
||||
const buf = await entry.async("nodebuffer");
|
||||
return Readable.from(buf);
|
||||
}
|
||||
|
||||
function resolveZipOutputPath(params: {
|
||||
entryPath: string;
|
||||
strip: number;
|
||||
destinationDir: string;
|
||||
}): { relPath: string; outPath: string } | null {
|
||||
validateArchiveEntryPath(params.entryPath);
|
||||
const relPath = stripArchivePath(params.entryPath, params.strip);
|
||||
if (!relPath) {
|
||||
return null;
|
||||
}
|
||||
validateArchiveEntryPath(relPath);
|
||||
return {
|
||||
relPath,
|
||||
outPath: resolveArchiveOutputPath({
|
||||
rootDir: params.destinationDir,
|
||||
relPath,
|
||||
originalPath: params.entryPath,
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
async function prepareZipOutputPath(params: {
|
||||
destinationDir: string;
|
||||
destinationRealDir: string;
|
||||
relPath: string;
|
||||
outPath: string;
|
||||
originalPath: string;
|
||||
isDirectory: boolean;
|
||||
}): Promise<void> {
|
||||
await prepareArchiveOutputPath(params);
|
||||
}
|
||||
|
||||
async function writeZipFileEntry(params: {
|
||||
entry: ZipEntry;
|
||||
relPath: string;
|
||||
destinationRealDir: string;
|
||||
budget: ZipExtractBudget;
|
||||
}): Promise<void> {
|
||||
const opened = await openZipOutputFile({
|
||||
relPath: params.relPath,
|
||||
originalPath: params.entry.name,
|
||||
destinationRealDir: params.destinationRealDir,
|
||||
});
|
||||
params.budget.startEntry();
|
||||
const readable = await readZipEntryStream(params.entry);
|
||||
const destinationPath = opened.openedRealPath;
|
||||
const targetMode = opened.openedStat.mode & 0o777;
|
||||
await opened.handle.close().catch(() => undefined);
|
||||
|
||||
let tempHandle: FileHandle | null = null;
|
||||
let tempPath: string | null = null;
|
||||
let tempStat: Stats | null = null;
|
||||
let handleClosedByStream = false;
|
||||
|
||||
try {
|
||||
tempPath = buildArchiveAtomicTempPath(destinationPath);
|
||||
tempHandle = await fs.open(tempPath, OPEN_WRITE_CREATE_FLAGS, targetMode || 0o666);
|
||||
const writable = tempHandle.createWriteStream();
|
||||
writable.once("close", () => {
|
||||
handleClosedByStream = true;
|
||||
});
|
||||
|
||||
await pipeline(
|
||||
readable,
|
||||
createExtractBudgetTransform({ onChunkBytes: params.budget.addBytes }),
|
||||
writable,
|
||||
);
|
||||
tempStat = await fs.stat(tempPath);
|
||||
if (!tempStat) {
|
||||
throw new Error("zip temp write did not produce file metadata");
|
||||
}
|
||||
if (!handleClosedByStream) {
|
||||
await tempHandle.close().catch(() => undefined);
|
||||
handleClosedByStream = true;
|
||||
}
|
||||
tempHandle = null;
|
||||
await fs.rename(tempPath, destinationPath);
|
||||
tempPath = null;
|
||||
const verifiedPath = await verifyZipWriteResult({
|
||||
destinationRealDir: params.destinationRealDir,
|
||||
relPath: params.relPath,
|
||||
expectedStat: tempStat,
|
||||
});
|
||||
|
||||
// Best-effort permission restore for zip entries created on unix.
|
||||
if (typeof params.entry.unixPermissions === "number") {
|
||||
const mode = params.entry.unixPermissions & 0o777;
|
||||
if (mode !== 0) {
|
||||
await fs.chmod(verifiedPath, mode).catch(() => undefined);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (tempPath) {
|
||||
await fs.rm(tempPath, { force: true }).catch(() => undefined);
|
||||
} else {
|
||||
await cleanupPartialRegularFile(destinationPath).catch(() => undefined);
|
||||
}
|
||||
if (err instanceof SafeOpenError) {
|
||||
throw symlinkTraversalError(params.entry.name);
|
||||
}
|
||||
throw err;
|
||||
} finally {
|
||||
if (tempHandle && !handleClosedByStream) {
|
||||
await tempHandle.close().catch(() => undefined);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function extractZip(params: {
|
||||
archivePath: string;
|
||||
destDir: string;
|
||||
stripComponents?: number;
|
||||
limits?: ArchiveExtractLimits;
|
||||
}): Promise<void> {
|
||||
const limits = resolveExtractLimits(params.limits);
|
||||
const destinationRealDir = await prepareArchiveDestinationDir(params.destDir);
|
||||
const stat = await fs.stat(params.archivePath);
|
||||
if (stat.size > limits.maxArchiveBytes) {
|
||||
throw new ArchiveLimitError(ARCHIVE_LIMIT_ERROR_CODE.ARCHIVE_SIZE_EXCEEDS_LIMIT);
|
||||
}
|
||||
|
||||
const buffer = await fs.readFile(params.archivePath);
|
||||
const zip = await loadZipArchiveWithPreflight(buffer, limits);
|
||||
const entries = Object.values(zip.files) as ZipEntry[];
|
||||
const strip = Math.max(0, Math.floor(params.stripComponents ?? 0));
|
||||
|
||||
assertArchiveEntryCountWithinLimit(entries.length, limits);
|
||||
|
||||
const budget = createByteBudgetTracker(limits);
|
||||
|
||||
for (const entry of entries) {
|
||||
const output = resolveZipOutputPath({
|
||||
entryPath: entry.name,
|
||||
strip,
|
||||
destinationDir: params.destDir,
|
||||
});
|
||||
if (!output) {
|
||||
continue;
|
||||
}
|
||||
|
||||
await prepareZipOutputPath({
|
||||
destinationDir: params.destDir,
|
||||
destinationRealDir,
|
||||
relPath: output.relPath,
|
||||
outPath: output.outPath,
|
||||
originalPath: entry.name,
|
||||
isDirectory: entry.dir,
|
||||
});
|
||||
if (entry.dir) {
|
||||
continue;
|
||||
}
|
||||
|
||||
await writeZipFileEntry({
|
||||
entry,
|
||||
relPath: output.relPath,
|
||||
destinationRealDir,
|
||||
budget,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export type TarEntryInfo = { path: string; type: string; size: number };
|
||||
|
||||
const BLOCKED_TAR_ENTRY_TYPES = new Set([
|
||||
"SymbolicLink",
|
||||
"Link",
|
||||
"BlockDevice",
|
||||
"CharacterDevice",
|
||||
"FIFO",
|
||||
"Socket",
|
||||
]);
|
||||
|
||||
function readTarEntryInfo(entry: unknown): TarEntryInfo {
|
||||
const p =
|
||||
typeof entry === "object" && entry !== null && "path" in entry
|
||||
? String((entry as { path: unknown }).path)
|
||||
: "";
|
||||
const t =
|
||||
typeof entry === "object" && entry !== null && "type" in entry
|
||||
? String((entry as { type: unknown }).type)
|
||||
: "";
|
||||
const s =
|
||||
typeof entry === "object" &&
|
||||
entry !== null &&
|
||||
"size" in entry &&
|
||||
typeof (entry as { size?: unknown }).size === "number" &&
|
||||
Number.isFinite((entry as { size: number }).size)
|
||||
? Math.max(0, Math.floor((entry as { size: number }).size))
|
||||
: 0;
|
||||
return { path: p, type: t, size: s };
|
||||
}
|
||||
|
||||
export function createTarEntryPreflightChecker(params: {
|
||||
rootDir: string;
|
||||
stripComponents?: number;
|
||||
limits?: ArchiveExtractLimits;
|
||||
escapeLabel?: string;
|
||||
}): (entry: TarEntryInfo) => void {
|
||||
const strip = Math.max(0, Math.floor(params.stripComponents ?? 0));
|
||||
const limits = resolveExtractLimits(params.limits);
|
||||
let entryCount = 0;
|
||||
const budget = createByteBudgetTracker(limits);
|
||||
|
||||
return (entry: TarEntryInfo) => {
|
||||
validateArchiveEntryPath(entry.path, { escapeLabel: params.escapeLabel });
|
||||
|
||||
const relPath = stripArchivePath(entry.path, strip);
|
||||
if (!relPath) {
|
||||
return;
|
||||
}
|
||||
validateArchiveEntryPath(relPath, { escapeLabel: params.escapeLabel });
|
||||
resolveArchiveOutputPath({
|
||||
rootDir: params.rootDir,
|
||||
relPath,
|
||||
originalPath: entry.path,
|
||||
escapeLabel: params.escapeLabel,
|
||||
});
|
||||
|
||||
if (BLOCKED_TAR_ENTRY_TYPES.has(entry.type)) {
|
||||
throw new Error(`tar entry is a link: ${entry.path}`);
|
||||
}
|
||||
|
||||
entryCount += 1;
|
||||
assertArchiveEntryCountWithinLimit(entryCount, limits);
|
||||
budget.addEntrySize(entry.size);
|
||||
};
|
||||
}
|
||||
|
||||
export async function extractArchive(params: {
|
||||
archivePath: string;
|
||||
destDir: string;
|
||||
timeoutMs: number;
|
||||
kind?: ArchiveKind;
|
||||
stripComponents?: number;
|
||||
tarGzip?: boolean;
|
||||
limits?: ArchiveExtractLimits;
|
||||
logger?: ArchiveLogger;
|
||||
}): Promise<void> {
|
||||
const kind = params.kind ?? resolveArchiveKind(params.archivePath);
|
||||
if (!kind) {
|
||||
throw new Error(`unsupported archive: ${params.archivePath}`);
|
||||
}
|
||||
|
||||
const label = kind === "zip" ? "extract zip" : "extract tar";
|
||||
if (kind === "tar") {
|
||||
await withTimeout(
|
||||
(async () => {
|
||||
const limits = resolveExtractLimits(params.limits);
|
||||
const stat = await fs.stat(params.archivePath);
|
||||
if (stat.size > limits.maxArchiveBytes) {
|
||||
throw new ArchiveLimitError(ARCHIVE_LIMIT_ERROR_CODE.ARCHIVE_SIZE_EXCEEDS_LIMIT);
|
||||
}
|
||||
|
||||
const destinationRealDir = await prepareArchiveDestinationDir(params.destDir);
|
||||
await withStagedArchiveDestination({
|
||||
destinationRealDir,
|
||||
run: async (stagingDir) => {
|
||||
const checkTarEntrySafety = createTarEntryPreflightChecker({
|
||||
rootDir: destinationRealDir,
|
||||
stripComponents: params.stripComponents,
|
||||
limits,
|
||||
});
|
||||
// A canonical cwd is not enough here: tar can still follow
|
||||
// pre-existing child symlinks in the live destination tree.
|
||||
// Extract into a private staging dir first, then merge through
|
||||
// the same safe-open boundary checks used by direct file writes.
|
||||
await tar.x({
|
||||
file: params.archivePath,
|
||||
cwd: stagingDir,
|
||||
strip: Math.max(0, Math.floor(params.stripComponents ?? 0)),
|
||||
gzip: params.tarGzip,
|
||||
preservePaths: false,
|
||||
strict: true,
|
||||
onReadEntry(entry) {
|
||||
try {
|
||||
checkTarEntrySafety(readTarEntryInfo(entry));
|
||||
} catch (err) {
|
||||
const error = err instanceof Error ? err : new Error(String(err));
|
||||
// Node's EventEmitter calls listeners with `this` bound to the
|
||||
// emitter (tar.Unpack), which exposes Parser.abort().
|
||||
const emitter = this as unknown as { abort?: (error: Error) => void };
|
||||
emitter.abort?.(error);
|
||||
}
|
||||
},
|
||||
});
|
||||
await mergeExtractedTreeIntoDestination({
|
||||
sourceDir: stagingDir,
|
||||
destinationDir: destinationRealDir,
|
||||
destinationRealDir,
|
||||
});
|
||||
},
|
||||
});
|
||||
})(),
|
||||
params.timeoutMs,
|
||||
label,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
await withTimeout(
|
||||
extractZip({
|
||||
archivePath: params.archivePath,
|
||||
destDir: params.destDir,
|
||||
stripComponents: params.stripComponents,
|
||||
limits: params.limits,
|
||||
}),
|
||||
params.timeoutMs,
|
||||
label,
|
||||
);
|
||||
}
|
||||
|
||||
export async function fileExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.stat(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
export async function readJsonFile<T>(filePath: string): Promise<T> {
|
||||
const raw = await fs.readFile(filePath, "utf-8");
|
||||
return JSON.parse(raw) as T;
|
||||
}
|
||||
type ArchiveExtractLimits,
|
||||
type ArchiveKind,
|
||||
type ArchiveLimitErrorCode,
|
||||
type ArchiveLogger,
|
||||
type ArchiveSecurityErrorCode,
|
||||
type TarEntryInfo,
|
||||
} from "@openclaw/fs-safe/archive";
|
||||
|
||||
@@ -1,240 +1,12 @@
|
||||
import path from "node:path";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import * as upstream from "@openclaw/fs-safe/advanced";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import * as shim from "./boundary-file-read.js";
|
||||
|
||||
const resolveBoundaryPathSyncMock = vi.hoisted(() => vi.fn());
|
||||
const resolveBoundaryPathMock = vi.hoisted(() => vi.fn());
|
||||
const openVerifiedFileSyncMock = vi.hoisted(() => vi.fn());
|
||||
|
||||
vi.mock("./boundary-path.js", () => ({
|
||||
resolveBoundaryPathSync: (...args: unknown[]) => resolveBoundaryPathSyncMock(...args),
|
||||
resolveBoundaryPath: (...args: unknown[]) => resolveBoundaryPathMock(...args),
|
||||
}));
|
||||
|
||||
vi.mock("./safe-open-sync.js", () => ({
|
||||
openVerifiedFileSync: (...args: unknown[]) => openVerifiedFileSyncMock(...args),
|
||||
}));
|
||||
|
||||
let canUseBoundaryFileOpen: typeof import("./boundary-file-read.js").canUseBoundaryFileOpen;
|
||||
let matchBoundaryFileOpenFailure: typeof import("./boundary-file-read.js").matchBoundaryFileOpenFailure;
|
||||
let openBoundaryFile: typeof import("./boundary-file-read.js").openBoundaryFile;
|
||||
let openBoundaryFileSync: typeof import("./boundary-file-read.js").openBoundaryFileSync;
|
||||
|
||||
describe("boundary-file-read", () => {
|
||||
beforeEach(async () => {
|
||||
vi.resetModules();
|
||||
({
|
||||
canUseBoundaryFileOpen,
|
||||
matchBoundaryFileOpenFailure,
|
||||
openBoundaryFile,
|
||||
openBoundaryFileSync,
|
||||
} = await import("./boundary-file-read.js"));
|
||||
resolveBoundaryPathSyncMock.mockReset();
|
||||
resolveBoundaryPathMock.mockReset();
|
||||
openVerifiedFileSyncMock.mockReset();
|
||||
});
|
||||
|
||||
it("recognizes the required sync fs surface", () => {
|
||||
const validFs = {
|
||||
openSync() {},
|
||||
closeSync() {},
|
||||
fstatSync() {},
|
||||
lstatSync() {},
|
||||
realpathSync() {},
|
||||
readFileSync() {},
|
||||
constants: {},
|
||||
};
|
||||
|
||||
expect(canUseBoundaryFileOpen(validFs as never)).toBe(true);
|
||||
expect(
|
||||
canUseBoundaryFileOpen({
|
||||
...validFs,
|
||||
openSync: undefined,
|
||||
} as never),
|
||||
).toBe(false);
|
||||
expect(
|
||||
canUseBoundaryFileOpen({
|
||||
...validFs,
|
||||
constants: null,
|
||||
} as never),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it("maps sync boundary resolution into verified file opens", () => {
|
||||
const stat = { size: 3 } as never;
|
||||
const ioFs = { marker: "io" } as never;
|
||||
const absolutePath = path.resolve("plugin.json");
|
||||
|
||||
resolveBoundaryPathSyncMock.mockReturnValue({
|
||||
canonicalPath: "/real/plugin.json",
|
||||
rootCanonicalPath: "/real/root",
|
||||
});
|
||||
openVerifiedFileSyncMock.mockReturnValue({
|
||||
ok: true,
|
||||
path: "/real/plugin.json",
|
||||
fd: 7,
|
||||
stat,
|
||||
});
|
||||
|
||||
const opened = openBoundaryFileSync({
|
||||
absolutePath: "plugin.json",
|
||||
rootPath: "/workspace",
|
||||
boundaryLabel: "plugin root",
|
||||
ioFs,
|
||||
});
|
||||
|
||||
expect(resolveBoundaryPathSyncMock).toHaveBeenCalledWith({
|
||||
absolutePath,
|
||||
rootPath: "/workspace",
|
||||
rootCanonicalPath: undefined,
|
||||
boundaryLabel: "plugin root",
|
||||
skipLexicalRootCheck: undefined,
|
||||
});
|
||||
expect(openVerifiedFileSyncMock).toHaveBeenCalledWith({
|
||||
filePath: absolutePath,
|
||||
resolvedPath: "/real/plugin.json",
|
||||
rejectHardlinks: true,
|
||||
maxBytes: undefined,
|
||||
allowedType: undefined,
|
||||
ioFs,
|
||||
});
|
||||
expect(opened).toEqual({
|
||||
ok: true,
|
||||
path: "/real/plugin.json",
|
||||
fd: 7,
|
||||
stat,
|
||||
rootRealPath: "/real/root",
|
||||
});
|
||||
});
|
||||
|
||||
it("returns validation errors when sync boundary resolution throws", () => {
|
||||
const error = new Error("outside root");
|
||||
resolveBoundaryPathSyncMock.mockImplementation(() => {
|
||||
throw error;
|
||||
});
|
||||
|
||||
const opened = openBoundaryFileSync({
|
||||
absolutePath: "plugin.json",
|
||||
rootPath: "/workspace",
|
||||
boundaryLabel: "plugin root",
|
||||
});
|
||||
|
||||
expect(opened).toEqual({
|
||||
ok: false,
|
||||
reason: "validation",
|
||||
error,
|
||||
});
|
||||
expect(openVerifiedFileSyncMock).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("guards against unexpected async sync-resolution results", () => {
|
||||
resolveBoundaryPathSyncMock.mockReturnValue(
|
||||
Promise.resolve({
|
||||
canonicalPath: "/real/plugin.json",
|
||||
rootCanonicalPath: "/real/root",
|
||||
}),
|
||||
);
|
||||
|
||||
const opened = openBoundaryFileSync({
|
||||
absolutePath: "plugin.json",
|
||||
rootPath: "/workspace",
|
||||
boundaryLabel: "plugin root",
|
||||
});
|
||||
|
||||
expect(opened.ok).toBe(false);
|
||||
if (opened.ok) {
|
||||
return;
|
||||
}
|
||||
expect(opened.reason).toBe("validation");
|
||||
expect(String(opened.error)).toContain("Unexpected async boundary resolution");
|
||||
});
|
||||
|
||||
it("awaits async boundary resolution before verifying the file", async () => {
|
||||
const ioFs = { marker: "io" } as never;
|
||||
const absolutePath = path.resolve("notes.txt");
|
||||
|
||||
resolveBoundaryPathMock.mockResolvedValue({
|
||||
canonicalPath: "/real/notes.txt",
|
||||
rootCanonicalPath: "/real/root",
|
||||
});
|
||||
openVerifiedFileSyncMock.mockReturnValue({
|
||||
ok: false,
|
||||
reason: "validation",
|
||||
error: new Error("blocked"),
|
||||
});
|
||||
|
||||
const opened = await openBoundaryFile({
|
||||
absolutePath: "notes.txt",
|
||||
rootPath: "/workspace",
|
||||
boundaryLabel: "workspace",
|
||||
aliasPolicy: { allowFinalSymlinkForUnlink: true },
|
||||
ioFs,
|
||||
});
|
||||
|
||||
expect(resolveBoundaryPathMock).toHaveBeenCalledWith({
|
||||
absolutePath,
|
||||
rootPath: "/workspace",
|
||||
rootCanonicalPath: undefined,
|
||||
boundaryLabel: "workspace",
|
||||
policy: { allowFinalSymlinkForUnlink: true },
|
||||
skipLexicalRootCheck: undefined,
|
||||
});
|
||||
expect(openVerifiedFileSyncMock).toHaveBeenCalledWith({
|
||||
filePath: absolutePath,
|
||||
resolvedPath: "/real/notes.txt",
|
||||
rejectHardlinks: true,
|
||||
maxBytes: undefined,
|
||||
allowedType: undefined,
|
||||
ioFs,
|
||||
});
|
||||
expect(opened).toEqual({
|
||||
ok: false,
|
||||
reason: "validation",
|
||||
error: expect.any(Error),
|
||||
});
|
||||
});
|
||||
|
||||
it("maps async boundary resolution failures to validation errors", async () => {
|
||||
const error = new Error("escaped");
|
||||
resolveBoundaryPathMock.mockRejectedValue(error);
|
||||
|
||||
const opened = await openBoundaryFile({
|
||||
absolutePath: "notes.txt",
|
||||
rootPath: "/workspace",
|
||||
boundaryLabel: "workspace",
|
||||
});
|
||||
|
||||
expect(opened).toEqual({
|
||||
ok: false,
|
||||
reason: "validation",
|
||||
error,
|
||||
});
|
||||
expect(openVerifiedFileSyncMock).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("matches boundary file failures by reason with fallback support", () => {
|
||||
const missing = matchBoundaryFileOpenFailure(
|
||||
{ ok: false, reason: "path", error: new Error("missing") },
|
||||
{
|
||||
path: () => "missing",
|
||||
fallback: () => "fallback",
|
||||
},
|
||||
);
|
||||
const io = matchBoundaryFileOpenFailure(
|
||||
{ ok: false, reason: "io", error: new Error("io") },
|
||||
{
|
||||
io: () => "io",
|
||||
fallback: () => "fallback",
|
||||
},
|
||||
);
|
||||
const validation = matchBoundaryFileOpenFailure(
|
||||
{ ok: false, reason: "validation", error: new Error("blocked") },
|
||||
{
|
||||
fallback: (failure) => failure.reason,
|
||||
},
|
||||
);
|
||||
|
||||
expect(missing).toBe("missing");
|
||||
expect(io).toBe("io");
|
||||
expect(validation).toBe("validation");
|
||||
describe("root file open shim", () => {
|
||||
it("re-exports the fs-safe root file helpers", () => {
|
||||
expect(shim.canUseRootFileOpen).toBe(upstream.canUseRootFileOpen);
|
||||
expect(shim.matchRootFileOpenFailure).toBe(upstream.matchRootFileOpenFailure);
|
||||
expect(shim.openRootFile).toBe(upstream.openRootFile);
|
||||
expect(shim.openRootFileSync).toBe(upstream.openRootFileSync);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,224 +1,12 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import {
|
||||
resolveBoundaryPath,
|
||||
resolveBoundaryPathSync,
|
||||
type ResolvedBoundaryPath,
|
||||
} from "./boundary-path.js";
|
||||
import type { PathAliasPolicy } from "./path-alias-guards.js";
|
||||
import {
|
||||
openVerifiedFileSync,
|
||||
type SafeOpenSyncAllowedType,
|
||||
type SafeOpenSyncFailureReason,
|
||||
} from "./safe-open-sync.js";
|
||||
|
||||
type BoundaryReadFs = Pick<
|
||||
typeof fs,
|
||||
| "closeSync"
|
||||
| "constants"
|
||||
| "fstatSync"
|
||||
| "lstatSync"
|
||||
| "openSync"
|
||||
| "readFileSync"
|
||||
| "realpathSync"
|
||||
>;
|
||||
|
||||
export type BoundaryFileOpenFailureReason = SafeOpenSyncFailureReason | "validation";
|
||||
|
||||
export type BoundaryFileOpenResult =
|
||||
| { ok: true; path: string; fd: number; stat: fs.Stats; rootRealPath: string }
|
||||
| { ok: false; reason: BoundaryFileOpenFailureReason; error?: unknown };
|
||||
|
||||
export type BoundaryFileOpenFailure = Extract<BoundaryFileOpenResult, { ok: false }>;
|
||||
|
||||
export type OpenBoundaryFileSyncParams = {
|
||||
absolutePath: string;
|
||||
rootPath: string;
|
||||
boundaryLabel: string;
|
||||
rootRealPath?: string;
|
||||
maxBytes?: number;
|
||||
rejectHardlinks?: boolean;
|
||||
allowedType?: SafeOpenSyncAllowedType;
|
||||
skipLexicalRootCheck?: boolean;
|
||||
ioFs?: BoundaryReadFs;
|
||||
};
|
||||
|
||||
export type OpenBoundaryFileParams = OpenBoundaryFileSyncParams & {
|
||||
aliasPolicy?: PathAliasPolicy;
|
||||
};
|
||||
|
||||
type ResolvedBoundaryFilePath = {
|
||||
absolutePath: string;
|
||||
resolvedPath: string;
|
||||
rootRealPath: string;
|
||||
};
|
||||
|
||||
export function canUseBoundaryFileOpen(ioFs: typeof fs): boolean {
|
||||
return (
|
||||
typeof ioFs.openSync === "function" &&
|
||||
typeof ioFs.closeSync === "function" &&
|
||||
typeof ioFs.fstatSync === "function" &&
|
||||
typeof ioFs.lstatSync === "function" &&
|
||||
typeof ioFs.realpathSync === "function" &&
|
||||
typeof ioFs.readFileSync === "function" &&
|
||||
typeof ioFs.constants === "object" &&
|
||||
ioFs.constants !== null
|
||||
);
|
||||
}
|
||||
|
||||
export function openBoundaryFileSync(params: OpenBoundaryFileSyncParams): BoundaryFileOpenResult {
|
||||
const ioFs = params.ioFs ?? fs;
|
||||
const resolved = resolveBoundaryFilePathGeneric({
|
||||
absolutePath: params.absolutePath,
|
||||
resolve: (absolutePath) =>
|
||||
resolveBoundaryPathSync({
|
||||
absolutePath,
|
||||
rootPath: params.rootPath,
|
||||
rootCanonicalPath: params.rootRealPath,
|
||||
boundaryLabel: params.boundaryLabel,
|
||||
skipLexicalRootCheck: params.skipLexicalRootCheck,
|
||||
}),
|
||||
});
|
||||
if (resolved instanceof Promise) {
|
||||
return toBoundaryValidationError(new Error("Unexpected async boundary resolution"));
|
||||
}
|
||||
return finalizeBoundaryFileOpen({
|
||||
resolved,
|
||||
maxBytes: params.maxBytes,
|
||||
rejectHardlinks: params.rejectHardlinks,
|
||||
allowedType: params.allowedType,
|
||||
ioFs,
|
||||
});
|
||||
}
|
||||
|
||||
export function matchBoundaryFileOpenFailure<T>(
|
||||
failure: BoundaryFileOpenFailure,
|
||||
handlers: {
|
||||
path?: (failure: BoundaryFileOpenFailure) => T;
|
||||
validation?: (failure: BoundaryFileOpenFailure) => T;
|
||||
io?: (failure: BoundaryFileOpenFailure) => T;
|
||||
fallback: (failure: BoundaryFileOpenFailure) => T;
|
||||
},
|
||||
): T {
|
||||
switch (failure.reason) {
|
||||
case "path":
|
||||
return handlers.path ? handlers.path(failure) : handlers.fallback(failure);
|
||||
case "validation":
|
||||
return handlers.validation ? handlers.validation(failure) : handlers.fallback(failure);
|
||||
case "io":
|
||||
return handlers.io ? handlers.io(failure) : handlers.fallback(failure);
|
||||
}
|
||||
return handlers.fallback(failure);
|
||||
}
|
||||
|
||||
function openBoundaryFileResolved(params: {
|
||||
absolutePath: string;
|
||||
resolvedPath: string;
|
||||
rootRealPath: string;
|
||||
maxBytes?: number;
|
||||
rejectHardlinks?: boolean;
|
||||
allowedType?: SafeOpenSyncAllowedType;
|
||||
ioFs: BoundaryReadFs;
|
||||
}): BoundaryFileOpenResult {
|
||||
const opened = openVerifiedFileSync({
|
||||
filePath: params.absolutePath,
|
||||
resolvedPath: params.resolvedPath,
|
||||
rejectHardlinks: params.rejectHardlinks ?? true,
|
||||
maxBytes: params.maxBytes,
|
||||
allowedType: params.allowedType,
|
||||
ioFs: params.ioFs,
|
||||
});
|
||||
if (!opened.ok) {
|
||||
return opened;
|
||||
}
|
||||
return {
|
||||
ok: true,
|
||||
path: opened.path,
|
||||
fd: opened.fd,
|
||||
stat: opened.stat,
|
||||
rootRealPath: params.rootRealPath,
|
||||
};
|
||||
}
|
||||
|
||||
function finalizeBoundaryFileOpen(params: {
|
||||
resolved: ResolvedBoundaryFilePath | BoundaryFileOpenResult;
|
||||
maxBytes?: number;
|
||||
rejectHardlinks?: boolean;
|
||||
allowedType?: SafeOpenSyncAllowedType;
|
||||
ioFs: BoundaryReadFs;
|
||||
}): BoundaryFileOpenResult {
|
||||
if ("ok" in params.resolved) {
|
||||
return params.resolved;
|
||||
}
|
||||
return openBoundaryFileResolved({
|
||||
absolutePath: params.resolved.absolutePath,
|
||||
resolvedPath: params.resolved.resolvedPath,
|
||||
rootRealPath: params.resolved.rootRealPath,
|
||||
maxBytes: params.maxBytes,
|
||||
rejectHardlinks: params.rejectHardlinks,
|
||||
allowedType: params.allowedType,
|
||||
ioFs: params.ioFs,
|
||||
});
|
||||
}
|
||||
|
||||
export async function openBoundaryFile(
|
||||
params: OpenBoundaryFileParams,
|
||||
): Promise<BoundaryFileOpenResult> {
|
||||
const ioFs = params.ioFs ?? fs;
|
||||
const maybeResolved = resolveBoundaryFilePathGeneric({
|
||||
absolutePath: params.absolutePath,
|
||||
resolve: (absolutePath) =>
|
||||
resolveBoundaryPath({
|
||||
absolutePath,
|
||||
rootPath: params.rootPath,
|
||||
rootCanonicalPath: params.rootRealPath,
|
||||
boundaryLabel: params.boundaryLabel,
|
||||
policy: params.aliasPolicy,
|
||||
skipLexicalRootCheck: params.skipLexicalRootCheck,
|
||||
}),
|
||||
});
|
||||
const resolved = maybeResolved instanceof Promise ? await maybeResolved : maybeResolved;
|
||||
return finalizeBoundaryFileOpen({
|
||||
resolved,
|
||||
maxBytes: params.maxBytes,
|
||||
rejectHardlinks: params.rejectHardlinks,
|
||||
allowedType: params.allowedType,
|
||||
ioFs,
|
||||
});
|
||||
}
|
||||
|
||||
function toBoundaryValidationError(error: unknown): BoundaryFileOpenResult {
|
||||
return { ok: false, reason: "validation", error };
|
||||
}
|
||||
|
||||
function mapResolvedBoundaryPath(
|
||||
absolutePath: string,
|
||||
resolved: ResolvedBoundaryPath,
|
||||
): ResolvedBoundaryFilePath {
|
||||
return {
|
||||
absolutePath,
|
||||
resolvedPath: resolved.canonicalPath,
|
||||
rootRealPath: resolved.rootCanonicalPath,
|
||||
};
|
||||
}
|
||||
|
||||
function resolveBoundaryFilePathGeneric(params: {
|
||||
absolutePath: string;
|
||||
resolve: (absolutePath: string) => ResolvedBoundaryPath | Promise<ResolvedBoundaryPath>;
|
||||
}):
|
||||
| ResolvedBoundaryFilePath
|
||||
| BoundaryFileOpenResult
|
||||
| Promise<ResolvedBoundaryFilePath | BoundaryFileOpenResult> {
|
||||
const absolutePath = path.resolve(params.absolutePath);
|
||||
try {
|
||||
const resolved = params.resolve(absolutePath);
|
||||
if (resolved instanceof Promise) {
|
||||
return resolved
|
||||
.then((value) => mapResolvedBoundaryPath(absolutePath, value))
|
||||
.catch((error) => toBoundaryValidationError(error));
|
||||
}
|
||||
return mapResolvedBoundaryPath(absolutePath, resolved);
|
||||
} catch (error) {
|
||||
return toBoundaryValidationError(error);
|
||||
}
|
||||
}
|
||||
import "./fs-safe-defaults.js";
|
||||
export {
|
||||
canUseRootFileOpen,
|
||||
matchRootFileOpenFailure,
|
||||
openRootFile,
|
||||
openRootFileSync,
|
||||
type OpenRootFileParams,
|
||||
type OpenRootFileSyncParams,
|
||||
type RootFileOpenFailure,
|
||||
type RootFileOpenFailureReason,
|
||||
type RootFileOpenResult,
|
||||
} from "@openclaw/fs-safe/advanced";
|
||||
|
||||
@@ -2,7 +2,7 @@ import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { withTempDir } from "../test-helpers/temp-dir.js";
|
||||
import { resolveBoundaryPath, resolveBoundaryPathSync } from "./boundary-path.js";
|
||||
import { resolveRootPath, resolveRootPathSync } from "./boundary-path.js";
|
||||
import { isPathInside } from "./path-guards.js";
|
||||
|
||||
function createSeededRandom(seed: number): () => number {
|
||||
@@ -13,7 +13,7 @@ function createSeededRandom(seed: number): () => number {
|
||||
};
|
||||
}
|
||||
|
||||
describe("resolveBoundaryPath", () => {
|
||||
describe("resolveRootPath", () => {
|
||||
it("resolves symlink parents with non-existent leafs inside root", async () => {
|
||||
if (process.platform === "win32") {
|
||||
return;
|
||||
@@ -27,7 +27,7 @@ describe("resolveBoundaryPath", () => {
|
||||
await fs.symlink(targetDir, linkPath);
|
||||
|
||||
const unresolved = path.join(linkPath, "missing.txt");
|
||||
const result = await resolveBoundaryPath({
|
||||
const result = await resolveRootPath({
|
||||
absolutePath: unresolved,
|
||||
rootPath: root,
|
||||
boundaryLabel: "sandbox root",
|
||||
@@ -56,14 +56,14 @@ describe("resolveBoundaryPath", () => {
|
||||
const dangling = path.join(linkPath, "missing.txt");
|
||||
|
||||
await expect(
|
||||
resolveBoundaryPath({
|
||||
resolveRootPath({
|
||||
absolutePath: dangling,
|
||||
rootPath: root,
|
||||
boundaryLabel: "sandbox root",
|
||||
}),
|
||||
).rejects.toThrow(/Symlink escapes sandbox root/i);
|
||||
expect(() =>
|
||||
resolveBoundaryPathSync({
|
||||
resolveRootPathSync({
|
||||
absolutePath: dangling,
|
||||
rootPath: root,
|
||||
boundaryLabel: "sandbox root",
|
||||
@@ -88,14 +88,14 @@ describe("resolveBoundaryPath", () => {
|
||||
await fs.symlink(outsideFile, linkPath);
|
||||
|
||||
await expect(
|
||||
resolveBoundaryPath({
|
||||
resolveRootPath({
|
||||
absolutePath: linkPath,
|
||||
rootPath: root,
|
||||
boundaryLabel: "sandbox root",
|
||||
}),
|
||||
).rejects.toThrow(/Symlink escapes sandbox root/i);
|
||||
|
||||
const allowed = await resolveBoundaryPath({
|
||||
const allowed = await resolveRootPath({
|
||||
absolutePath: linkPath,
|
||||
rootPath: root,
|
||||
boundaryLabel: "sandbox root",
|
||||
@@ -121,7 +121,7 @@ describe("resolveBoundaryPath", () => {
|
||||
await fs.writeFile(path.join(root, fileName), "export default {}", "utf8");
|
||||
await fs.symlink(root, aliasRoot);
|
||||
|
||||
const resolved = await resolveBoundaryPath({
|
||||
const resolved = await resolveRootPath({
|
||||
absolutePath: path.join(aliasRoot, fileName),
|
||||
rootPath: await fs.realpath(root),
|
||||
boundaryLabel: "plugin root",
|
||||
@@ -129,7 +129,7 @@ describe("resolveBoundaryPath", () => {
|
||||
expect(resolved.exists).toBe(true);
|
||||
expect(isPathInside(resolved.rootCanonicalPath, resolved.canonicalPath)).toBe(true);
|
||||
|
||||
const resolvedSync = resolveBoundaryPathSync({
|
||||
const resolvedSync = resolveRootPathSync({
|
||||
absolutePath: path.join(aliasRoot, fileName),
|
||||
rootPath: await fs.realpath(root),
|
||||
boundaryLabel: "plugin root",
|
||||
@@ -167,7 +167,7 @@ describe("resolveBoundaryPath", () => {
|
||||
const useLink = rand() > 0.5;
|
||||
const safeBase = useLink ? safeLinkBase : safeRealBase;
|
||||
const safeCandidate = path.join(safeBase, `new-${token}.txt`);
|
||||
const safeResolved = await resolveBoundaryPath({
|
||||
const safeResolved = await resolveRootPath({
|
||||
absolutePath: safeCandidate,
|
||||
rootPath: root,
|
||||
boundaryLabel: "sandbox root",
|
||||
@@ -176,7 +176,7 @@ describe("resolveBoundaryPath", () => {
|
||||
|
||||
const unsafeCandidate = path.join(escapeLink, `new-${token}.txt`);
|
||||
await expect(
|
||||
resolveBoundaryPath({
|
||||
resolveRootPath({
|
||||
absolutePath: unsafeCandidate,
|
||||
rootPath: root,
|
||||
boundaryLabel: "sandbox root",
|
||||
|
||||
@@ -1,861 +1,9 @@
|
||||
import fs from "node:fs";
|
||||
import fsp from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { isNotFoundPathError, isPathInside } from "./path-guards.js";
|
||||
|
||||
type BoundaryPathIntent = "read" | "write" | "create" | "delete" | "stat";
|
||||
|
||||
export type BoundaryPathAliasPolicy = {
|
||||
allowFinalSymlinkForUnlink?: boolean;
|
||||
allowFinalHardlinkForUnlink?: boolean;
|
||||
};
|
||||
|
||||
export const BOUNDARY_PATH_ALIAS_POLICIES = {
|
||||
strict: Object.freeze({
|
||||
allowFinalSymlinkForUnlink: false,
|
||||
allowFinalHardlinkForUnlink: false,
|
||||
}),
|
||||
unlinkTarget: Object.freeze({
|
||||
allowFinalSymlinkForUnlink: true,
|
||||
allowFinalHardlinkForUnlink: true,
|
||||
}),
|
||||
} as const;
|
||||
|
||||
type ResolveBoundaryPathParams = {
|
||||
absolutePath: string;
|
||||
rootPath: string;
|
||||
boundaryLabel: string;
|
||||
intent?: BoundaryPathIntent;
|
||||
policy?: BoundaryPathAliasPolicy;
|
||||
skipLexicalRootCheck?: boolean;
|
||||
rootCanonicalPath?: string;
|
||||
};
|
||||
|
||||
type ResolvedBoundaryPathKind = "missing" | "file" | "directory" | "symlink" | "other";
|
||||
|
||||
export type ResolvedBoundaryPath = {
|
||||
absolutePath: string;
|
||||
canonicalPath: string;
|
||||
rootPath: string;
|
||||
rootCanonicalPath: string;
|
||||
relativePath: string;
|
||||
exists: boolean;
|
||||
kind: ResolvedBoundaryPathKind;
|
||||
};
|
||||
|
||||
export async function resolveBoundaryPath(
|
||||
params: ResolveBoundaryPathParams,
|
||||
): Promise<ResolvedBoundaryPath> {
|
||||
const rootPath = path.resolve(params.rootPath);
|
||||
const absolutePath = path.resolve(params.absolutePath);
|
||||
const rootCanonicalPath = params.rootCanonicalPath
|
||||
? path.resolve(params.rootCanonicalPath)
|
||||
: await resolvePathViaExistingAncestor(rootPath);
|
||||
const context = createBoundaryResolutionContext({
|
||||
resolveParams: params,
|
||||
rootPath,
|
||||
absolutePath,
|
||||
rootCanonicalPath,
|
||||
outsideLexicalCanonicalPath: await resolveOutsideLexicalCanonicalPathAsync({
|
||||
rootPath,
|
||||
absolutePath,
|
||||
}),
|
||||
});
|
||||
|
||||
const outsideResult = await resolveOutsideBoundaryPathAsync({
|
||||
boundaryLabel: params.boundaryLabel,
|
||||
context,
|
||||
});
|
||||
if (outsideResult) {
|
||||
return outsideResult;
|
||||
}
|
||||
|
||||
return resolveBoundaryPathLexicalAsync({
|
||||
params,
|
||||
absolutePath: context.absolutePath,
|
||||
rootPath: context.rootPath,
|
||||
rootCanonicalPath: context.rootCanonicalPath,
|
||||
});
|
||||
}
|
||||
|
||||
export function resolveBoundaryPathSync(params: ResolveBoundaryPathParams): ResolvedBoundaryPath {
|
||||
const rootPath = path.resolve(params.rootPath);
|
||||
const absolutePath = path.resolve(params.absolutePath);
|
||||
const rootCanonicalPath = params.rootCanonicalPath
|
||||
? path.resolve(params.rootCanonicalPath)
|
||||
: resolvePathViaExistingAncestorSync(rootPath);
|
||||
const context = createBoundaryResolutionContext({
|
||||
resolveParams: params,
|
||||
rootPath,
|
||||
absolutePath,
|
||||
rootCanonicalPath,
|
||||
outsideLexicalCanonicalPath: resolveOutsideLexicalCanonicalPathSync({
|
||||
rootPath,
|
||||
absolutePath,
|
||||
}),
|
||||
});
|
||||
|
||||
const outsideResult = resolveOutsideBoundaryPathSync({
|
||||
boundaryLabel: params.boundaryLabel,
|
||||
context,
|
||||
});
|
||||
if (outsideResult) {
|
||||
return outsideResult;
|
||||
}
|
||||
|
||||
return resolveBoundaryPathLexicalSync({
|
||||
params,
|
||||
absolutePath: context.absolutePath,
|
||||
rootPath: context.rootPath,
|
||||
rootCanonicalPath: context.rootCanonicalPath,
|
||||
});
|
||||
}
|
||||
|
||||
type LexicalTraversalState = {
|
||||
segments: string[];
|
||||
allowFinalSymlink: boolean;
|
||||
canonicalCursor: string;
|
||||
lexicalCursor: string;
|
||||
preserveFinalSymlink: boolean;
|
||||
};
|
||||
|
||||
type BoundaryResolutionContext = {
|
||||
rootPath: string;
|
||||
absolutePath: string;
|
||||
rootCanonicalPath: string;
|
||||
lexicalInside: boolean;
|
||||
canonicalOutsideLexicalPath: string;
|
||||
};
|
||||
|
||||
function isPromiseLike<T>(value: unknown): value is PromiseLike<T> {
|
||||
return Boolean(
|
||||
value &&
|
||||
(typeof value === "object" || typeof value === "function") &&
|
||||
"then" in value &&
|
||||
typeof (value as { then?: unknown }).then === "function",
|
||||
);
|
||||
}
|
||||
|
||||
function createLexicalTraversalState(params: {
|
||||
params: ResolveBoundaryPathParams;
|
||||
rootPath: string;
|
||||
rootCanonicalPath: string;
|
||||
absolutePath: string;
|
||||
}): LexicalTraversalState {
|
||||
const relative = path.relative(params.rootPath, params.absolutePath);
|
||||
return {
|
||||
segments: relative.split(path.sep).filter(Boolean),
|
||||
allowFinalSymlink: params.params.policy?.allowFinalSymlinkForUnlink === true,
|
||||
canonicalCursor: params.rootCanonicalPath,
|
||||
lexicalCursor: params.rootPath,
|
||||
preserveFinalSymlink: false,
|
||||
};
|
||||
}
|
||||
|
||||
function assertLexicalCursorInsideBoundary(params: {
|
||||
params: ResolveBoundaryPathParams;
|
||||
rootCanonicalPath: string;
|
||||
absolutePath: string;
|
||||
candidatePath: string;
|
||||
}): void {
|
||||
assertInsideBoundary({
|
||||
boundaryLabel: params.params.boundaryLabel,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
candidatePath: params.candidatePath,
|
||||
absolutePath: params.absolutePath,
|
||||
});
|
||||
}
|
||||
|
||||
function applyMissingSuffixToCanonicalCursor(params: {
|
||||
state: LexicalTraversalState;
|
||||
missingFromIndex: number;
|
||||
rootCanonicalPath: string;
|
||||
params: ResolveBoundaryPathParams;
|
||||
absolutePath: string;
|
||||
}): void {
|
||||
const missingSuffix = params.state.segments.slice(params.missingFromIndex);
|
||||
params.state.canonicalCursor = path.resolve(params.state.canonicalCursor, ...missingSuffix);
|
||||
assertLexicalCursorInsideBoundary({
|
||||
params: params.params,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
candidatePath: params.state.canonicalCursor,
|
||||
absolutePath: params.absolutePath,
|
||||
});
|
||||
}
|
||||
|
||||
function advanceCanonicalCursorForSegment(params: {
|
||||
state: LexicalTraversalState;
|
||||
segment: string;
|
||||
rootCanonicalPath: string;
|
||||
params: ResolveBoundaryPathParams;
|
||||
absolutePath: string;
|
||||
}): void {
|
||||
params.state.canonicalCursor = path.resolve(params.state.canonicalCursor, params.segment);
|
||||
assertLexicalCursorInsideBoundary({
|
||||
params: params.params,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
candidatePath: params.state.canonicalCursor,
|
||||
absolutePath: params.absolutePath,
|
||||
});
|
||||
}
|
||||
|
||||
function finalizeLexicalResolution(params: {
|
||||
params: ResolveBoundaryPathParams;
|
||||
rootPath: string;
|
||||
rootCanonicalPath: string;
|
||||
absolutePath: string;
|
||||
state: LexicalTraversalState;
|
||||
kind: { exists: boolean; kind: ResolvedBoundaryPathKind };
|
||||
}): ResolvedBoundaryPath {
|
||||
assertLexicalCursorInsideBoundary({
|
||||
params: params.params,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
candidatePath: params.state.canonicalCursor,
|
||||
absolutePath: params.absolutePath,
|
||||
});
|
||||
return buildResolvedBoundaryPath({
|
||||
absolutePath: params.absolutePath,
|
||||
canonicalPath: params.state.canonicalCursor,
|
||||
rootPath: params.rootPath,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
kind: params.kind,
|
||||
});
|
||||
}
|
||||
|
||||
function handleLexicalLstatFailure(params: {
|
||||
error: unknown;
|
||||
state: LexicalTraversalState;
|
||||
missingFromIndex: number;
|
||||
rootCanonicalPath: string;
|
||||
resolveParams: ResolveBoundaryPathParams;
|
||||
absolutePath: string;
|
||||
}): boolean {
|
||||
if (!isNotFoundPathError(params.error)) {
|
||||
return false;
|
||||
}
|
||||
applyMissingSuffixToCanonicalCursor({
|
||||
state: params.state,
|
||||
missingFromIndex: params.missingFromIndex,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
params: params.resolveParams,
|
||||
absolutePath: params.absolutePath,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
function handleLexicalStatReadFailure(params: {
|
||||
error: unknown;
|
||||
state: LexicalTraversalState;
|
||||
missingFromIndex: number;
|
||||
rootCanonicalPath: string;
|
||||
resolveParams: ResolveBoundaryPathParams;
|
||||
absolutePath: string;
|
||||
}): null {
|
||||
if (
|
||||
handleLexicalLstatFailure({
|
||||
error: params.error,
|
||||
state: params.state,
|
||||
missingFromIndex: params.missingFromIndex,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
resolveParams: params.resolveParams,
|
||||
absolutePath: params.absolutePath,
|
||||
})
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
throw params.error;
|
||||
}
|
||||
|
||||
function handleLexicalStatDisposition(params: {
|
||||
state: LexicalTraversalState;
|
||||
isSymbolicLink: boolean;
|
||||
segment: string;
|
||||
isLast: boolean;
|
||||
rootCanonicalPath: string;
|
||||
resolveParams: ResolveBoundaryPathParams;
|
||||
absolutePath: string;
|
||||
}): "continue" | "break" | "resolve-link" {
|
||||
if (!params.isSymbolicLink) {
|
||||
advanceCanonicalCursorForSegment({
|
||||
state: params.state,
|
||||
segment: params.segment,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
params: params.resolveParams,
|
||||
absolutePath: params.absolutePath,
|
||||
});
|
||||
return "continue";
|
||||
}
|
||||
|
||||
if (params.state.allowFinalSymlink && params.isLast) {
|
||||
params.state.preserveFinalSymlink = true;
|
||||
advanceCanonicalCursorForSegment({
|
||||
state: params.state,
|
||||
segment: params.segment,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
params: params.resolveParams,
|
||||
absolutePath: params.absolutePath,
|
||||
});
|
||||
return "break";
|
||||
}
|
||||
|
||||
return "resolve-link";
|
||||
}
|
||||
|
||||
function applyResolvedSymlinkHop(params: {
|
||||
state: LexicalTraversalState;
|
||||
linkCanonical: string;
|
||||
rootCanonicalPath: string;
|
||||
boundaryLabel: string;
|
||||
}): void {
|
||||
if (!isPathInside(params.rootCanonicalPath, params.linkCanonical)) {
|
||||
throw symlinkEscapeError({
|
||||
boundaryLabel: params.boundaryLabel,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
symlinkPath: params.state.lexicalCursor,
|
||||
});
|
||||
}
|
||||
params.state.canonicalCursor = params.linkCanonical;
|
||||
params.state.lexicalCursor = params.linkCanonical;
|
||||
}
|
||||
|
||||
function readLexicalStat(params: {
|
||||
state: LexicalTraversalState;
|
||||
missingFromIndex: number;
|
||||
rootCanonicalPath: string;
|
||||
resolveParams: ResolveBoundaryPathParams;
|
||||
absolutePath: string;
|
||||
read: (cursor: string) => fs.Stats | Promise<fs.Stats>;
|
||||
}): fs.Stats | null | Promise<fs.Stats | null> {
|
||||
try {
|
||||
const stat = params.read(params.state.lexicalCursor);
|
||||
if (isPromiseLike<fs.Stats>(stat)) {
|
||||
return Promise.resolve(stat).catch((error) =>
|
||||
handleLexicalStatReadFailure({ ...params, error }),
|
||||
);
|
||||
}
|
||||
return stat;
|
||||
} catch (error) {
|
||||
return handleLexicalStatReadFailure({ ...params, error });
|
||||
}
|
||||
}
|
||||
|
||||
function resolveAndApplySymlinkHop(params: {
|
||||
state: LexicalTraversalState;
|
||||
rootCanonicalPath: string;
|
||||
boundaryLabel: string;
|
||||
resolveLinkCanonical: (cursor: string) => string | Promise<string>;
|
||||
}): void | Promise<void> {
|
||||
const linkCanonical = params.resolveLinkCanonical(params.state.lexicalCursor);
|
||||
if (isPromiseLike<string>(linkCanonical)) {
|
||||
return Promise.resolve(linkCanonical).then((value) =>
|
||||
applyResolvedSymlinkHop({
|
||||
state: params.state,
|
||||
linkCanonical: value,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
boundaryLabel: params.boundaryLabel,
|
||||
}),
|
||||
);
|
||||
}
|
||||
applyResolvedSymlinkHop({
|
||||
state: params.state,
|
||||
linkCanonical,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
boundaryLabel: params.boundaryLabel,
|
||||
});
|
||||
}
|
||||
|
||||
type LexicalTraversalStep = {
|
||||
idx: number;
|
||||
segment: string;
|
||||
isLast: boolean;
|
||||
};
|
||||
|
||||
function* iterateLexicalTraversal(state: LexicalTraversalState): Iterable<LexicalTraversalStep> {
|
||||
for (let idx = 0; idx < state.segments.length; idx += 1) {
|
||||
const segment = state.segments[idx] ?? "";
|
||||
const isLast = idx === state.segments.length - 1;
|
||||
state.lexicalCursor = path.join(state.lexicalCursor, segment);
|
||||
yield { idx, segment, isLast };
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveBoundaryPathLexicalAsync(params: {
|
||||
params: ResolveBoundaryPathParams;
|
||||
absolutePath: string;
|
||||
rootPath: string;
|
||||
rootCanonicalPath: string;
|
||||
}): Promise<ResolvedBoundaryPath> {
|
||||
const state = createLexicalTraversalState(params);
|
||||
const sharedStepParams = {
|
||||
state,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
resolveParams: params.params,
|
||||
absolutePath: params.absolutePath,
|
||||
};
|
||||
|
||||
for (const { idx, segment, isLast } of iterateLexicalTraversal(state)) {
|
||||
const stat = await readLexicalStat({
|
||||
...sharedStepParams,
|
||||
missingFromIndex: idx,
|
||||
read: (cursor) => fsp.lstat(cursor),
|
||||
});
|
||||
if (!stat) {
|
||||
break;
|
||||
}
|
||||
|
||||
const disposition = handleLexicalStatDisposition({
|
||||
...sharedStepParams,
|
||||
isSymbolicLink: stat.isSymbolicLink(),
|
||||
segment,
|
||||
isLast,
|
||||
});
|
||||
if (disposition === "continue") {
|
||||
continue;
|
||||
}
|
||||
if (disposition === "break") {
|
||||
break;
|
||||
}
|
||||
|
||||
await resolveAndApplySymlinkHop({
|
||||
state,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
boundaryLabel: params.params.boundaryLabel,
|
||||
resolveLinkCanonical: (cursor) => resolveSymlinkHopPath(cursor),
|
||||
});
|
||||
}
|
||||
|
||||
const kind = await getPathKind(params.absolutePath, state.preserveFinalSymlink);
|
||||
return finalizeLexicalResolution({
|
||||
...params,
|
||||
state,
|
||||
kind,
|
||||
});
|
||||
}
|
||||
|
||||
function resolveBoundaryPathLexicalSync(params: {
|
||||
params: ResolveBoundaryPathParams;
|
||||
absolutePath: string;
|
||||
rootPath: string;
|
||||
rootCanonicalPath: string;
|
||||
}): ResolvedBoundaryPath {
|
||||
const state = createLexicalTraversalState(params);
|
||||
for (let idx = 0; idx < state.segments.length; idx += 1) {
|
||||
const segment = state.segments[idx] ?? "";
|
||||
const isLast = idx === state.segments.length - 1;
|
||||
state.lexicalCursor = path.join(state.lexicalCursor, segment);
|
||||
const maybeStat = readLexicalStat({
|
||||
state,
|
||||
missingFromIndex: idx,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
resolveParams: params.params,
|
||||
absolutePath: params.absolutePath,
|
||||
read: (cursor) => fs.lstatSync(cursor),
|
||||
});
|
||||
if (isPromiseLike<fs.Stats | null>(maybeStat)) {
|
||||
throw new Error("Unexpected async lexical stat");
|
||||
}
|
||||
const stat = maybeStat;
|
||||
if (!stat) {
|
||||
break;
|
||||
}
|
||||
|
||||
const disposition = handleLexicalStatDisposition({
|
||||
state,
|
||||
isSymbolicLink: stat.isSymbolicLink(),
|
||||
segment,
|
||||
isLast,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
resolveParams: params.params,
|
||||
absolutePath: params.absolutePath,
|
||||
});
|
||||
if (disposition === "continue") {
|
||||
continue;
|
||||
}
|
||||
if (disposition === "break") {
|
||||
break;
|
||||
}
|
||||
|
||||
const maybeApplied = resolveAndApplySymlinkHop({
|
||||
state,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
boundaryLabel: params.params.boundaryLabel,
|
||||
resolveLinkCanonical: (cursor) => resolveSymlinkHopPathSync(cursor),
|
||||
});
|
||||
if (isPromiseLike<void>(maybeApplied)) {
|
||||
throw new Error("Unexpected async symlink resolution");
|
||||
}
|
||||
}
|
||||
|
||||
const kind = getPathKindSync(params.absolutePath, state.preserveFinalSymlink);
|
||||
return finalizeLexicalResolution({
|
||||
...params,
|
||||
state,
|
||||
kind,
|
||||
});
|
||||
}
|
||||
|
||||
function resolveCanonicalOutsideLexicalPath(params: {
|
||||
absolutePath: string;
|
||||
outsideLexicalCanonicalPath?: string;
|
||||
}): string {
|
||||
return params.outsideLexicalCanonicalPath ?? params.absolutePath;
|
||||
}
|
||||
|
||||
function createBoundaryResolutionContext(params: {
|
||||
resolveParams: ResolveBoundaryPathParams;
|
||||
rootPath: string;
|
||||
absolutePath: string;
|
||||
rootCanonicalPath: string;
|
||||
outsideLexicalCanonicalPath?: string;
|
||||
}): BoundaryResolutionContext {
|
||||
const lexicalInside = isPathInside(params.rootPath, params.absolutePath);
|
||||
const canonicalOutsideLexicalPath = resolveCanonicalOutsideLexicalPath({
|
||||
absolutePath: params.absolutePath,
|
||||
outsideLexicalCanonicalPath: params.outsideLexicalCanonicalPath,
|
||||
});
|
||||
assertLexicalBoundaryOrCanonicalAlias({
|
||||
skipLexicalRootCheck: params.resolveParams.skipLexicalRootCheck,
|
||||
lexicalInside,
|
||||
canonicalOutsideLexicalPath,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
boundaryLabel: params.resolveParams.boundaryLabel,
|
||||
rootPath: params.rootPath,
|
||||
absolutePath: params.absolutePath,
|
||||
});
|
||||
return {
|
||||
rootPath: params.rootPath,
|
||||
absolutePath: params.absolutePath,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
lexicalInside,
|
||||
canonicalOutsideLexicalPath,
|
||||
};
|
||||
}
|
||||
|
||||
async function resolveOutsideBoundaryPathAsync(params: {
|
||||
boundaryLabel: string;
|
||||
context: BoundaryResolutionContext;
|
||||
}): Promise<ResolvedBoundaryPath | null> {
|
||||
if (params.context.lexicalInside) {
|
||||
return null;
|
||||
}
|
||||
const kind = await getPathKind(params.context.absolutePath, false);
|
||||
return buildOutsideBoundaryPathFromContext({
|
||||
boundaryLabel: params.boundaryLabel,
|
||||
context: params.context,
|
||||
kind,
|
||||
});
|
||||
}
|
||||
|
||||
function resolveOutsideBoundaryPathSync(params: {
|
||||
boundaryLabel: string;
|
||||
context: BoundaryResolutionContext;
|
||||
}): ResolvedBoundaryPath | null {
|
||||
if (params.context.lexicalInside) {
|
||||
return null;
|
||||
}
|
||||
const kind = getPathKindSync(params.context.absolutePath, false);
|
||||
return buildOutsideBoundaryPathFromContext({
|
||||
boundaryLabel: params.boundaryLabel,
|
||||
context: params.context,
|
||||
kind,
|
||||
});
|
||||
}
|
||||
|
||||
function buildOutsideBoundaryPathFromContext(params: {
|
||||
boundaryLabel: string;
|
||||
context: BoundaryResolutionContext;
|
||||
kind: { exists: boolean; kind: ResolvedBoundaryPathKind };
|
||||
}): ResolvedBoundaryPath {
|
||||
return buildOutsideLexicalBoundaryPath({
|
||||
boundaryLabel: params.boundaryLabel,
|
||||
rootCanonicalPath: params.context.rootCanonicalPath,
|
||||
absolutePath: params.context.absolutePath,
|
||||
canonicalOutsideLexicalPath: params.context.canonicalOutsideLexicalPath,
|
||||
rootPath: params.context.rootPath,
|
||||
kind: params.kind,
|
||||
});
|
||||
}
|
||||
|
||||
async function resolveOutsideLexicalCanonicalPathAsync(params: {
|
||||
rootPath: string;
|
||||
absolutePath: string;
|
||||
}): Promise<string | undefined> {
|
||||
if (isPathInside(params.rootPath, params.absolutePath)) {
|
||||
return undefined;
|
||||
}
|
||||
return await resolvePathViaExistingAncestor(params.absolutePath);
|
||||
}
|
||||
|
||||
function resolveOutsideLexicalCanonicalPathSync(params: {
|
||||
rootPath: string;
|
||||
absolutePath: string;
|
||||
}): string | undefined {
|
||||
if (isPathInside(params.rootPath, params.absolutePath)) {
|
||||
return undefined;
|
||||
}
|
||||
return resolvePathViaExistingAncestorSync(params.absolutePath);
|
||||
}
|
||||
|
||||
function buildOutsideLexicalBoundaryPath(params: {
|
||||
boundaryLabel: string;
|
||||
rootCanonicalPath: string;
|
||||
absolutePath: string;
|
||||
canonicalOutsideLexicalPath: string;
|
||||
rootPath: string;
|
||||
kind: { exists: boolean; kind: ResolvedBoundaryPathKind };
|
||||
}): ResolvedBoundaryPath {
|
||||
assertInsideBoundary({
|
||||
boundaryLabel: params.boundaryLabel,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
candidatePath: params.canonicalOutsideLexicalPath,
|
||||
absolutePath: params.absolutePath,
|
||||
});
|
||||
return buildResolvedBoundaryPath({
|
||||
absolutePath: params.absolutePath,
|
||||
canonicalPath: params.canonicalOutsideLexicalPath,
|
||||
rootPath: params.rootPath,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
kind: params.kind,
|
||||
});
|
||||
}
|
||||
|
||||
function assertLexicalBoundaryOrCanonicalAlias(params: {
|
||||
skipLexicalRootCheck?: boolean;
|
||||
lexicalInside: boolean;
|
||||
canonicalOutsideLexicalPath: string;
|
||||
rootCanonicalPath: string;
|
||||
boundaryLabel: string;
|
||||
rootPath: string;
|
||||
absolutePath: string;
|
||||
}): void {
|
||||
if (params.skipLexicalRootCheck || params.lexicalInside) {
|
||||
return;
|
||||
}
|
||||
if (isPathInside(params.rootCanonicalPath, params.canonicalOutsideLexicalPath)) {
|
||||
return;
|
||||
}
|
||||
throw pathEscapeError({
|
||||
boundaryLabel: params.boundaryLabel,
|
||||
rootPath: params.rootPath,
|
||||
absolutePath: params.absolutePath,
|
||||
});
|
||||
}
|
||||
|
||||
function buildResolvedBoundaryPath(params: {
|
||||
absolutePath: string;
|
||||
canonicalPath: string;
|
||||
rootPath: string;
|
||||
rootCanonicalPath: string;
|
||||
kind: { exists: boolean; kind: ResolvedBoundaryPathKind };
|
||||
}): ResolvedBoundaryPath {
|
||||
return {
|
||||
absolutePath: params.absolutePath,
|
||||
canonicalPath: params.canonicalPath,
|
||||
rootPath: params.rootPath,
|
||||
rootCanonicalPath: params.rootCanonicalPath,
|
||||
relativePath: relativeInsideRoot(params.rootCanonicalPath, params.canonicalPath),
|
||||
exists: params.kind.exists,
|
||||
kind: params.kind.kind,
|
||||
};
|
||||
}
|
||||
|
||||
async function resolvePathViaExistingAncestor(targetPath: string): Promise<string> {
|
||||
const normalized = path.resolve(targetPath);
|
||||
let cursor = normalized;
|
||||
const missingSuffix: string[] = [];
|
||||
|
||||
while (!isFilesystemRoot(cursor) && !(await pathExists(cursor))) {
|
||||
missingSuffix.unshift(path.basename(cursor));
|
||||
const parent = path.dirname(cursor);
|
||||
if (parent === cursor) {
|
||||
break;
|
||||
}
|
||||
cursor = parent;
|
||||
}
|
||||
|
||||
if (!(await pathExists(cursor))) {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
try {
|
||||
const resolvedAncestor = path.resolve(await fsp.realpath(cursor));
|
||||
if (missingSuffix.length === 0) {
|
||||
return resolvedAncestor;
|
||||
}
|
||||
return path.resolve(resolvedAncestor, ...missingSuffix);
|
||||
} catch {
|
||||
return normalized;
|
||||
}
|
||||
}
|
||||
|
||||
export function resolvePathViaExistingAncestorSync(targetPath: string): string {
|
||||
const normalized = path.resolve(targetPath);
|
||||
let cursor = normalized;
|
||||
const missingSuffix: string[] = [];
|
||||
|
||||
while (!isFilesystemRoot(cursor) && !fs.existsSync(cursor)) {
|
||||
missingSuffix.unshift(path.basename(cursor));
|
||||
const parent = path.dirname(cursor);
|
||||
if (parent === cursor) {
|
||||
break;
|
||||
}
|
||||
cursor = parent;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(cursor)) {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
try {
|
||||
// Keep sync behavior aligned with async (`fsp.realpath`) to avoid
|
||||
// platform-specific canonical alias drift (notably on Windows).
|
||||
const resolvedAncestor = path.resolve(fs.realpathSync(cursor));
|
||||
if (missingSuffix.length === 0) {
|
||||
return resolvedAncestor;
|
||||
}
|
||||
return path.resolve(resolvedAncestor, ...missingSuffix);
|
||||
} catch {
|
||||
return normalized;
|
||||
}
|
||||
}
|
||||
|
||||
async function getPathKind(
|
||||
absolutePath: string,
|
||||
preserveFinalSymlink: boolean,
|
||||
): Promise<{ exists: boolean; kind: ResolvedBoundaryPathKind }> {
|
||||
try {
|
||||
const stat = preserveFinalSymlink
|
||||
? await fsp.lstat(absolutePath)
|
||||
: await fsp.stat(absolutePath);
|
||||
return { exists: true, kind: toResolvedKind(stat) };
|
||||
} catch (error) {
|
||||
if (isNotFoundPathError(error)) {
|
||||
return { exists: false, kind: "missing" };
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function getPathKindSync(
|
||||
absolutePath: string,
|
||||
preserveFinalSymlink: boolean,
|
||||
): { exists: boolean; kind: ResolvedBoundaryPathKind } {
|
||||
try {
|
||||
const stat = preserveFinalSymlink ? fs.lstatSync(absolutePath) : fs.statSync(absolutePath);
|
||||
return { exists: true, kind: toResolvedKind(stat) };
|
||||
} catch (error) {
|
||||
if (isNotFoundPathError(error)) {
|
||||
return { exists: false, kind: "missing" };
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function toResolvedKind(stat: fs.Stats): ResolvedBoundaryPathKind {
|
||||
if (stat.isFile()) {
|
||||
return "file";
|
||||
}
|
||||
if (stat.isDirectory()) {
|
||||
return "directory";
|
||||
}
|
||||
if (stat.isSymbolicLink()) {
|
||||
return "symlink";
|
||||
}
|
||||
return "other";
|
||||
}
|
||||
|
||||
function relativeInsideRoot(rootPath: string, targetPath: string): string {
|
||||
const relative = path.relative(path.resolve(rootPath), path.resolve(targetPath));
|
||||
if (!relative || relative === ".") {
|
||||
return "";
|
||||
}
|
||||
if (relative.startsWith("..") || path.isAbsolute(relative)) {
|
||||
return "";
|
||||
}
|
||||
return relative;
|
||||
}
|
||||
|
||||
function assertInsideBoundary(params: {
|
||||
boundaryLabel: string;
|
||||
rootCanonicalPath: string;
|
||||
candidatePath: string;
|
||||
absolutePath: string;
|
||||
}): void {
|
||||
if (isPathInside(params.rootCanonicalPath, params.candidatePath)) {
|
||||
return;
|
||||
}
|
||||
throw new Error(
|
||||
`Path resolves outside ${params.boundaryLabel} (${shortPath(params.rootCanonicalPath)}): ${shortPath(params.absolutePath)}`,
|
||||
);
|
||||
}
|
||||
|
||||
function pathEscapeError(params: {
|
||||
boundaryLabel: string;
|
||||
rootPath: string;
|
||||
absolutePath: string;
|
||||
}): Error {
|
||||
return new Error(
|
||||
`Path escapes ${params.boundaryLabel} (${shortPath(params.rootPath)}): ${shortPath(params.absolutePath)}`,
|
||||
);
|
||||
}
|
||||
|
||||
function symlinkEscapeError(params: {
|
||||
boundaryLabel: string;
|
||||
rootCanonicalPath: string;
|
||||
symlinkPath: string;
|
||||
}): Error {
|
||||
return new Error(
|
||||
`Symlink escapes ${params.boundaryLabel} (${shortPath(params.rootCanonicalPath)}): ${shortPath(params.symlinkPath)}`,
|
||||
);
|
||||
}
|
||||
|
||||
function shortPath(value: string): string {
|
||||
const home = os.homedir();
|
||||
if (value.startsWith(home)) {
|
||||
return `~${value.slice(home.length)}`;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
function isFilesystemRoot(candidate: string): boolean {
|
||||
return path.parse(candidate).root === candidate;
|
||||
}
|
||||
|
||||
async function pathExists(targetPath: string): Promise<boolean> {
|
||||
try {
|
||||
await fsp.lstat(targetPath);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (isNotFoundPathError(error)) {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveSymlinkHopPath(symlinkPath: string): Promise<string> {
|
||||
try {
|
||||
return path.resolve(await fsp.realpath(symlinkPath));
|
||||
} catch (error) {
|
||||
if (!isNotFoundPathError(error)) {
|
||||
throw error;
|
||||
}
|
||||
const linkTarget = await fsp.readlink(symlinkPath);
|
||||
const linkAbsolute = path.resolve(path.dirname(symlinkPath), linkTarget);
|
||||
return resolvePathViaExistingAncestor(linkAbsolute);
|
||||
}
|
||||
}
|
||||
|
||||
function resolveSymlinkHopPathSync(symlinkPath: string): string {
|
||||
try {
|
||||
return path.resolve(fs.realpathSync(symlinkPath));
|
||||
} catch (error) {
|
||||
if (!isNotFoundPathError(error)) {
|
||||
throw error;
|
||||
}
|
||||
const linkTarget = fs.readlinkSync(symlinkPath);
|
||||
const linkAbsolute = path.resolve(path.dirname(symlinkPath), linkTarget);
|
||||
return resolvePathViaExistingAncestorSync(linkAbsolute);
|
||||
}
|
||||
}
|
||||
import "./fs-safe-defaults.js";
|
||||
export {
|
||||
ROOT_PATH_ALIAS_POLICIES,
|
||||
resolvePathViaExistingAncestorSync,
|
||||
resolveRootPath,
|
||||
resolveRootPathSync,
|
||||
type ResolvedRootPath,
|
||||
type RootPathAliasPolicy,
|
||||
} from "@openclaw/fs-safe/advanced";
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
} from "../shared/device-auth-store.js";
|
||||
import type { DeviceAuthStore } from "../shared/device-auth.js";
|
||||
import { safeParseJsonWithSchema } from "../utils/zod-parse.js";
|
||||
import { privateFileStoreSync } from "./private-file-store.js";
|
||||
|
||||
const DEVICE_AUTH_FILE = "device-auth.json";
|
||||
const DeviceAuthStoreSchema = z.object({
|
||||
@@ -35,13 +36,9 @@ function readStore(filePath: string): DeviceAuthStore | null {
|
||||
}
|
||||
|
||||
function writeStore(filePath: string, store: DeviceAuthStore): void {
|
||||
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
fs.writeFileSync(filePath, `${JSON.stringify(store, null, 2)}\n`, { mode: 0o600 });
|
||||
try {
|
||||
fs.chmodSync(filePath, 0o600);
|
||||
} catch {
|
||||
// best-effort
|
||||
}
|
||||
privateFileStoreSync(path.dirname(filePath)).writeJson(path.basename(filePath), store, {
|
||||
trailingNewline: true,
|
||||
});
|
||||
}
|
||||
|
||||
export function loadDeviceAuthToken(params: {
|
||||
|
||||
@@ -11,12 +11,7 @@ import {
|
||||
import { roleScopesAllow } from "../shared/operator-scope-compat.js";
|
||||
import { normalizeDevicePublicKeyBase64Url } from "./device-identity.js";
|
||||
import { resolvePairingPaths } from "./pairing-files.js";
|
||||
import {
|
||||
createAsyncLock,
|
||||
pruneExpiredPending,
|
||||
readJsonFile,
|
||||
writeJsonAtomic,
|
||||
} from "./pairing-files.js";
|
||||
import { createAsyncLock, pruneExpiredPending, tryReadJson, writeJson } from "./pairing-files.js";
|
||||
import { generatePairingToken, verifyPairingToken } from "./pairing-token.js";
|
||||
|
||||
export const DEVICE_BOOTSTRAP_TOKEN_TTL_MS = 10 * 60 * 1000;
|
||||
@@ -164,7 +159,7 @@ function normalizeBootstrapPublicKey(publicKey: string): string {
|
||||
|
||||
async function loadState(baseDir?: string): Promise<DeviceBootstrapStateFile> {
|
||||
const bootstrapPath = resolveBootstrapPath(baseDir);
|
||||
const rawState = (await readJsonFile<DeviceBootstrapStateFile>(bootstrapPath)) ?? {};
|
||||
const rawState = (await tryReadJson<DeviceBootstrapStateFile>(bootstrapPath)) ?? {};
|
||||
const state: DeviceBootstrapStateFile = {};
|
||||
if (!rawState || typeof rawState !== "object" || Array.isArray(rawState)) {
|
||||
return state;
|
||||
@@ -195,7 +190,7 @@ async function loadState(baseDir?: string): Promise<DeviceBootstrapStateFile> {
|
||||
|
||||
async function persistState(state: DeviceBootstrapStateFile, baseDir?: string): Promise<void> {
|
||||
const bootstrapPath = resolveBootstrapPath(baseDir);
|
||||
await writeJsonAtomic(bootstrapPath, state);
|
||||
await writeJson(bootstrapPath, state);
|
||||
}
|
||||
|
||||
export async function issueDeviceBootstrapToken(
|
||||
|
||||
@@ -2,6 +2,7 @@ import crypto from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { resolveStateDir } from "../config/paths.js";
|
||||
import { privateFileStoreSync } from "./private-file-store.js";
|
||||
|
||||
export type DeviceIdentity = {
|
||||
deviceId: string;
|
||||
@@ -21,10 +22,6 @@ function resolveDefaultIdentityPath(): string {
|
||||
return path.join(resolveStateDir(), "identity", "device.json");
|
||||
}
|
||||
|
||||
function ensureDir(filePath: string) {
|
||||
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
}
|
||||
|
||||
const ED25519_SPKI_PREFIX = Buffer.from("302a300506032b6570032100", "hex");
|
||||
|
||||
function base64UrlEncode(buf: Buffer): string {
|
||||
@@ -81,12 +78,9 @@ export function loadOrCreateDeviceIdentity(
|
||||
...parsed,
|
||||
deviceId: derivedId,
|
||||
};
|
||||
fs.writeFileSync(filePath, `${JSON.stringify(updated, null, 2)}\n`, { mode: 0o600 });
|
||||
try {
|
||||
fs.chmodSync(filePath, 0o600);
|
||||
} catch {
|
||||
// best-effort
|
||||
}
|
||||
privateFileStoreSync(path.dirname(filePath)).writeJson(path.basename(filePath), updated, {
|
||||
trailingNewline: true,
|
||||
});
|
||||
return {
|
||||
deviceId: derivedId,
|
||||
publicKeyPem: parsed.publicKeyPem,
|
||||
@@ -105,7 +99,6 @@ export function loadOrCreateDeviceIdentity(
|
||||
}
|
||||
|
||||
const identity = generateIdentity();
|
||||
ensureDir(filePath);
|
||||
const stored: StoredIdentity = {
|
||||
version: 1,
|
||||
deviceId: identity.deviceId,
|
||||
@@ -113,12 +106,9 @@ export function loadOrCreateDeviceIdentity(
|
||||
privateKeyPem: identity.privateKeyPem,
|
||||
createdAtMs: Date.now(),
|
||||
};
|
||||
fs.writeFileSync(filePath, `${JSON.stringify(stored, null, 2)}\n`, { mode: 0o600 });
|
||||
try {
|
||||
fs.chmodSync(filePath, 0o600);
|
||||
} catch {
|
||||
// best-effort
|
||||
}
|
||||
privateFileStoreSync(path.dirname(filePath)).writeJson(path.basename(filePath), stored, {
|
||||
trailingNewline: true,
|
||||
});
|
||||
return identity;
|
||||
}
|
||||
|
||||
|
||||
@@ -13,11 +13,11 @@ import {
|
||||
import {
|
||||
createAsyncLock,
|
||||
pruneExpiredPending,
|
||||
readDurableJsonFile,
|
||||
readJsonIfExists,
|
||||
reconcilePendingPairingRequests,
|
||||
coercePairingStateRecord,
|
||||
resolvePairingPaths,
|
||||
writeJsonAtomic,
|
||||
writeJson,
|
||||
} from "./pairing-files.js";
|
||||
import { rejectPendingPairingRequest } from "./pairing-pending.js";
|
||||
import { generatePairingToken, verifyPairingToken } from "./pairing-token.js";
|
||||
@@ -154,8 +154,8 @@ export function formatDevicePairingForbiddenMessage(result: DevicePairingForbidd
|
||||
async function loadState(baseDir?: string): Promise<DevicePairingStateFile> {
|
||||
const { pendingPath, pairedPath } = resolvePairingPaths(baseDir, "devices");
|
||||
const [pending, paired] = await Promise.all([
|
||||
readDurableJsonFile<unknown>(pendingPath),
|
||||
readDurableJsonFile<unknown>(pairedPath),
|
||||
readJsonIfExists<unknown>(pendingPath),
|
||||
readJsonIfExists<unknown>(pairedPath),
|
||||
]);
|
||||
const state: DevicePairingStateFile = {
|
||||
pendingById: coercePairingStateRecord<DevicePairingPendingRequest>(pending),
|
||||
@@ -174,16 +174,16 @@ async function persistState(
|
||||
) {
|
||||
const { pendingPath, pairedPath } = resolvePairingPaths(baseDir, "devices");
|
||||
if (target === "pending") {
|
||||
await writeJsonAtomic(pendingPath, state.pendingById);
|
||||
await writeJson(pendingPath, state.pendingById);
|
||||
return;
|
||||
}
|
||||
if (target === "paired") {
|
||||
await writeJsonAtomic(pairedPath, state.pairedByDeviceId);
|
||||
await writeJson(pairedPath, state.pairedByDeviceId);
|
||||
return;
|
||||
}
|
||||
await Promise.all([
|
||||
writeJsonAtomic(pendingPath, state.pendingById),
|
||||
writeJsonAtomic(pairedPath, state.pairedByDeviceId),
|
||||
writeJson(pendingPath, state.pendingById),
|
||||
writeJson(pairedPath, state.pairedByDeviceId),
|
||||
]);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { appendFileSync, mkdirSync } from "node:fs";
|
||||
import { mkdirSync } from "node:fs";
|
||||
import { dirname } from "node:path";
|
||||
import { performance } from "node:perf_hooks";
|
||||
import type { OpenClawConfig } from "../config/types.openclaw.js";
|
||||
import { isDiagnosticFlagEnabled } from "./diagnostic-flags.js";
|
||||
import { isTruthyEnvValue } from "./env.js";
|
||||
import { appendRegularFileSync } from "./regular-file.js";
|
||||
|
||||
const OPENCLAW_DIAGNOSTICS_TIMELINE_SCHEMA_VERSION = "openclaw.diagnostics.v1";
|
||||
|
||||
@@ -167,7 +168,7 @@ export function emitDiagnosticsTimelineEvent(
|
||||
mkdirSync(dir, { recursive: true });
|
||||
createdTimelineDirs.add(dir);
|
||||
}
|
||||
appendFileSync(path, line, "utf8");
|
||||
appendRegularFileSync({ filePath: path, content: line });
|
||||
} catch (error) {
|
||||
if (!warnedAboutTimelineWrite) {
|
||||
warnedAboutTimelineWrite = true;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user