[codex] Extract filesystem safety primitives (#77918)

* refactor: extract filesystem safety primitives

* refactor: use fs-safe for file access helpers

* refactor: reuse fs-safe for media reads

* refactor: use fs-safe for image reads

* refactor: reuse fs-safe in qqbot media opener

* refactor: reuse fs-safe for local media checks

* refactor: consume cleaner fs-safe api

* refactor: align fs-safe json option names

* fix: preserve fs-safe migration contracts

* refactor: use fs-safe primitive subpaths

* refactor: use grouped fs-safe subpaths

* refactor: align fs-safe api usage

* refactor: adapt private state store api

* chore: refresh proof gate

* refactor: follow fs-safe json api split

* refactor: follow reduced fs-safe surface

* build: default fs-safe python helper off

* fix: preserve fs-safe plugin sdk aliases

* refactor: consolidate fs-safe usage

* refactor: unify fs-safe store usage

* refactor: trim fs-safe temp workspace usage

* refactor: hide low-level fs-safe primitives

* build: use published fs-safe package

* fix: preserve outbound recovery durability after rebase

* chore: refresh pr checks
This commit is contained in:
Peter Steinberger
2026-05-06 02:15:17 +01:00
committed by GitHub
parent 61481eb34f
commit 538605ff44
356 changed files with 4918 additions and 11913 deletions

View File

@@ -3,6 +3,7 @@ import * as fs from "node:fs";
import * as os from "node:os";
import * as path from "node:path";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { normalizeSource } from "../messaging/media-source.js";
import {
ApiError,
MediaFileType,
@@ -333,4 +334,51 @@ describe("media-chunked: ChunkedMediaApi.uploadChunked", () => {
await fs.promises.rm(tmp, { recursive: true, force: true });
}
});
it("uses the verified localPath handle if the path is replaced before chunked upload", async () => {
const tmp = await fs.promises.mkdtemp(path.join(os.tmpdir(), "chunked-verified-"));
const filePath = path.join(tmp, "fixture.bin");
await fs.promises.writeFile(filePath, FIXTURE_BUFFER);
const source = await normalizeSource({ localPath: filePath }, { maxSize: 1_000_000 });
await fs.promises.rm(filePath);
await fs.promises.writeFile(filePath, Buffer.from("replacement bytes"));
try {
const client = mockApiClient();
const tm = mockTokenManager();
stubFetchOk();
client.request.mockImplementation(async (_t, _m, p) => {
if (p.endsWith("/upload_prepare")) {
return makePrepareResponse("uid-verified", 3);
}
if (p.endsWith("/upload_part_finish")) {
return {};
}
if (p.endsWith("/files")) {
return { file_uuid: "u", file_info: "fi", ttl: 10 } satisfies UploadMediaResponse;
}
throw new Error(`unexpected ${p}`);
});
const api = new ChunkedMediaApi(client, tm);
await api.uploadChunked({
scope: "c2c",
targetId: "u1",
fileType: MediaFileType.VIDEO,
source,
creds: { appId: "a", clientSecret: "s" },
});
const prepareCall = client.request.mock.calls.find((c) =>
String(c[2]).endsWith("/upload_prepare"),
)!;
const prepareBody = prepareCall[3] as { md5: string };
expect(prepareBody.md5).toBe(crypto.createHash("md5").update(FIXTURE_BUFFER).digest("hex"));
} finally {
if (source.kind === "localPath") {
await source.opened?.close().catch(() => undefined);
}
await fs.promises.rm(tmp, { recursive: true, force: true });
}
});
});

View File

@@ -35,8 +35,9 @@
*/
import * as crypto from "node:crypto";
import * as fs from "node:fs";
import type { MediaSource } from "../messaging/media-source.js";
import type { FileHandle } from "node:fs/promises";
import type { MediaSource, OpenedLocalFile } from "../messaging/media-source.js";
import { openLocalFile } from "../messaging/media-source.js";
import {
ApiError,
MediaFileType,
@@ -178,138 +179,137 @@ export class ChunkedMediaApi {
async uploadChunked(opts: UploadChunkedOptions): Promise<UploadMediaResponse> {
const prefix = opts.logPrefix ?? "[qqbot:chunked-upload]";
// 1. Resolve input: size + local path (or temp buffer handle).
const input = resolveSource(opts.source, opts.fileName);
const displayName = input.fileName;
const fileSize = input.size;
const pathLabel = input.kind === "localPath" ? input.path : "<buffer>";
this.logger?.info?.(
`${prefix} Start: file=${displayName} size=${formatFileSize(fileSize)} type=${opts.fileType}`,
);
// 2. Compute md5 / sha1 / md5_10m. Identical for buffer and localPath,
// but the localPath path streams so it never has to materialize the
// whole file twice.
const hashes = await computeHashes(input);
this.logger?.debug?.(
`${prefix} hashes: md5=${hashes.md5} sha1=${hashes.sha1} md5_10m=${hashes.md5_10m}`,
);
// 3. Upload-cache fast path: the md5 hash is already a strong content
// identifier, so we can short-circuit before even calling upload_prepare.
if (this.cache) {
const cached = this.cache.get(hashes.md5, opts.scope, opts.targetId, opts.fileType);
if (cached) {
this.logger?.info?.(
`${prefix} cache HIT (md5=${hashes.md5.slice(0, 8)}) — skipping chunked upload`,
);
return { file_uuid: "", file_info: cached, ttl: 0 };
}
}
// 4. upload_prepare.
const fileNameForPrepare =
opts.fileType === MediaFileType.FILE ? this.sanitize(displayName) : displayName;
const prepareResp = await this.callUploadPrepare(
opts,
fileNameForPrepare,
fileSize,
hashes,
pathLabel,
);
const { upload_id, parts } = prepareResp;
const block_size = prepareResp.block_size;
const maxConcurrent = Math.min(
prepareResp.concurrency ? prepareResp.concurrency : DEFAULT_CONCURRENT_PARTS,
MAX_CONCURRENT_PARTS,
);
const retryTimeoutMs = prepareResp.retry_timeout
? Math.min(prepareResp.retry_timeout * 1000, MAX_PART_FINISH_RETRY_TIMEOUT_MS)
: undefined;
this.logger?.info?.(
`${prefix} prepared: upload_id=${upload_id} block=${formatFileSize(block_size)} parts=${parts.length} concurrency=${maxConcurrent}`,
);
// 5. Upload every part. Concurrency is per-upload, not global.
let completedParts = 0;
let uploadedBytes = 0;
const uploadPart = async (part: UploadPart): Promise<void> => {
const partIndex = part.index; // 1-based.
const offset = (partIndex - 1) * block_size;
const length = Math.min(block_size, fileSize - offset);
const partBuffer = await readPart(input, offset, length);
const md5Hex = crypto.createHash("md5").update(partBuffer).digest("hex");
this.logger?.debug?.(
`${prefix} part ${partIndex}/${parts.length}: ${formatFileSize(length)} offset=${offset} md5=${md5Hex}`,
);
// 5a. PUT to pre-signed COS URL.
await putToPresignedUrl(
part.presigned_url,
partBuffer,
partIndex,
parts.length,
this.logger,
prefix,
);
// 5b. upload_part_finish — fetch a fresh token each time to defend
// against long uploads exceeding the token TTL.
await this.callUploadPartFinish(opts, upload_id, partIndex, length, md5Hex, retryTimeoutMs);
completedParts++;
uploadedBytes += length;
this.logger?.info?.(
`${prefix} part ${partIndex}/${parts.length} done (${completedParts}/${parts.length})`,
);
opts.onProgress?.({
completedParts,
totalParts: parts.length,
uploadedBytes,
totalBytes: fileSize,
});
};
// 1. Resolve input: size + verified local file descriptor (or buffer).
const input = await resolveSource(opts.source, opts.fileName);
try {
const displayName = input.fileName;
const fileSize = input.size;
const pathLabel = input.kind === "localPath" ? input.path : "<buffer>";
this.logger?.info?.(
`${prefix} Start: file=${displayName} size=${formatFileSize(fileSize)} type=${opts.fileType}`,
);
// 2. Compute md5 / sha1 / md5_10m. Identical for buffer and localPath,
// but the localPath descriptor streams so it never has to materialize the
// whole file twice or reopen a path after validation.
const hashes = await computeHashes(input);
this.logger?.debug?.(
`${prefix} hashes: md5=${hashes.md5} sha1=${hashes.sha1} md5_10m=${hashes.md5_10m}`,
);
// 3. Upload-cache fast path: the md5 hash is already a strong content
// identifier, so we can short-circuit before even calling upload_prepare.
if (this.cache) {
const cached = this.cache.get(hashes.md5, opts.scope, opts.targetId, opts.fileType);
if (cached) {
this.logger?.info?.(
`${prefix} cache HIT (md5=${hashes.md5.slice(0, 8)}) — skipping chunked upload`,
);
return { file_uuid: "", file_info: cached, ttl: 0 };
}
}
// 4. upload_prepare.
const fileNameForPrepare =
opts.fileType === MediaFileType.FILE ? this.sanitize(displayName) : displayName;
const prepareResp = await this.callUploadPrepare(
opts,
fileNameForPrepare,
fileSize,
hashes,
pathLabel,
);
const { upload_id, parts } = prepareResp;
const block_size = prepareResp.block_size;
const maxConcurrent = Math.min(
prepareResp.concurrency ? prepareResp.concurrency : DEFAULT_CONCURRENT_PARTS,
MAX_CONCURRENT_PARTS,
);
const retryTimeoutMs = prepareResp.retry_timeout
? Math.min(prepareResp.retry_timeout * 1000, MAX_PART_FINISH_RETRY_TIMEOUT_MS)
: undefined;
this.logger?.info?.(
`${prefix} prepared: upload_id=${upload_id} block=${formatFileSize(block_size)} parts=${parts.length} concurrency=${maxConcurrent}`,
);
// 5. Upload every part. Concurrency is per-upload, not global.
let completedParts = 0;
let uploadedBytes = 0;
const uploadPart = async (part: UploadPart): Promise<void> => {
const partIndex = part.index; // 1-based.
const offset = (partIndex - 1) * block_size;
const length = Math.min(block_size, fileSize - offset);
const partBuffer = await readPart(input, offset, length);
const md5Hex = crypto.createHash("md5").update(partBuffer).digest("hex");
this.logger?.debug?.(
`${prefix} part ${partIndex}/${parts.length}: ${formatFileSize(length)} offset=${offset} md5=${md5Hex}`,
);
// 5a. PUT to pre-signed COS URL.
await putToPresignedUrl(
part.presigned_url,
partBuffer,
partIndex,
parts.length,
this.logger,
prefix,
);
// 5b. upload_part_finish — fetch a fresh token each time to defend
// against long uploads exceeding the token TTL.
await this.callUploadPartFinish(opts, upload_id, partIndex, length, md5Hex, retryTimeoutMs);
completedParts++;
uploadedBytes += length;
this.logger?.info?.(
`${prefix} part ${partIndex}/${parts.length} done (${completedParts}/${parts.length})`,
);
opts.onProgress?.({
completedParts,
totalParts: parts.length,
uploadedBytes,
totalBytes: fileSize,
});
};
await runWithConcurrency(
parts.map((part) => () => uploadPart(part)),
maxConcurrent,
);
this.logger?.info?.(`${prefix} all parts uploaded, completing...`);
// 6. complete_upload.
const result = await this.callCompleteUpload(opts, upload_id);
this.logger?.info?.(`${prefix} completed: file_uuid=${result.file_uuid} ttl=${result.ttl}s`);
// 7. Populate the shared upload cache so subsequent sends skip re-uploading.
if (this.cache && result.file_info && result.ttl > 0) {
this.cache.set(
hashes.md5,
opts.scope,
opts.targetId,
opts.fileType,
result.file_info,
result.file_uuid,
result.ttl,
);
}
return result;
} finally {
// If the input opened a buffered read stream we don't keep state,
// but localPath readers open / close the file per-part so there
// is nothing to unwind here. Kept as a seam for future streaming
// optimizations.
if (input.kind === "localPath" && input.closeWhenDone) {
await input.opened.close().catch(() => undefined);
}
}
this.logger?.info?.(`${prefix} all parts uploaded, completing...`);
// 6. complete_upload.
const result = await this.callCompleteUpload(opts, upload_id);
this.logger?.info?.(`${prefix} completed: file_uuid=${result.file_uuid} ttl=${result.ttl}s`);
// 7. Populate the shared upload cache so subsequent sends skip re-uploading.
if (this.cache && result.file_info && result.ttl > 0) {
this.cache.set(
hashes.md5,
opts.scope,
opts.targetId,
opts.fileType,
result.file_info,
result.file_uuid,
result.ttl,
);
}
return result;
}
// -------- Internal call wrappers --------
@@ -429,17 +429,31 @@ export function isChunkedUploadImplemented(): boolean {
* the bytes plus the metadata required by `upload_prepare`.
*/
type ChunkedInput =
| { kind: "localPath"; path: string; size: number; fileName: string }
| {
kind: "localPath";
path: string;
size: number;
fileName: string;
opened: OpenedLocalFile;
closeWhenDone: boolean;
}
| { kind: "buffer"; buffer: Buffer; size: number; fileName: string };
function resolveSource(source: MediaSource, fileNameOverride?: string): ChunkedInput {
async function resolveSource(
source: MediaSource,
fileNameOverride?: string,
): Promise<ChunkedInput> {
if (source.kind === "localPath") {
const inferredName = source.path.split(/[/\\]/).pop() || "file";
const opened =
source.opened ?? (await openLocalFile(source.path, { maxSize: Number.MAX_SAFE_INTEGER }));
return {
kind: "localPath",
path: source.path,
size: source.size,
size: opened.size,
fileName: fileNameOverride ?? inferredName,
opened,
closeWhenDone: source.opened === undefined,
};
}
if (source.kind === "buffer") {
@@ -460,14 +474,9 @@ async function readPart(input: ChunkedInput, offset: number, length: number): Pr
if (input.kind === "buffer") {
return input.buffer.subarray(offset, offset + length);
}
const handle = await fs.promises.open(input.path, "r");
try {
const buf = Buffer.alloc(length);
const { bytesRead } = await handle.read(buf, 0, length, offset);
return bytesRead < length ? buf.subarray(0, bytesRead) : buf;
} finally {
await handle.close();
}
const buf = Buffer.alloc(length);
const { bytesRead } = await input.opened.handle.read(buf, 0, length, offset);
return bytesRead < length ? buf.subarray(0, bytesRead) : buf;
}
// ============ Hash computation ============
@@ -476,8 +485,8 @@ async function readPart(input: ChunkedInput, offset: number, length: number): Pr
* Stream the source once to compute md5 + sha1 + md5_10m.
*
* For buffer inputs the three hashes are computed in a single pass over
* the existing memory. For localPath inputs a ReadStream drives the
* hashers so memory use stays constant.
* the existing memory. For localPath inputs the verified descriptor drives
* the hashers so memory use stays constant.
*/
async function computeHashes(input: ChunkedInput): Promise<UploadPrepareHashes> {
if (input.kind === "buffer") {
@@ -497,7 +506,7 @@ async function computeHashes(input: ChunkedInput): Promise<UploadPrepareHashes>
let consumed = 0;
const needsMd5_10m = input.size > MD5_10M_SIZE;
const stream = fs.createReadStream(input.path);
const stream = createReadStreamFromHandle(input.opened.handle);
stream.on("data", (chunk: Buffer | string) => {
const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
md5.update(buf);
@@ -523,6 +532,10 @@ async function computeHashes(input: ChunkedInput): Promise<UploadPrepareHashes>
});
}
function createReadStreamFromHandle(handle: FileHandle): NodeJS.ReadableStream {
return handle.createReadStream({ autoClose: false, start: 0 });
}
// ============ COS PUT ============
/** Per-part retry budget for the COS PUT call (exponential backoff). */

View File

@@ -26,7 +26,7 @@
*/
import fs from "node:fs";
import path from "node:path";
import { replaceFileAtomicSync } from "openclaw/plugin-sdk/security-runtime";
import { getCredentialBackupFile, getLegacyCredentialBackupFile } from "../utils/data-paths.js";
interface CredentialBackup {
@@ -43,16 +43,17 @@ export function saveCredentialBackup(accountId: string, appId: string, clientSec
}
try {
const backupPath = getCredentialBackupFile(accountId);
fs.mkdirSync(path.dirname(backupPath), { recursive: true });
const data: CredentialBackup = {
accountId,
appId,
clientSecret,
savedAt: new Date().toISOString(),
};
const tmpPath = `${backupPath}.tmp`;
fs.writeFileSync(tmpPath, `${JSON.stringify(data, null, 2)}\n`, "utf8");
fs.renameSync(tmpPath, backupPath);
replaceFileAtomicSync({
filePath: backupPath,
content: `${JSON.stringify(data, null, 2)}\n`,
tempPrefix: ".qqbot-credential-backup",
});
} catch {
/* best-effort — ignore */
}
@@ -89,10 +90,11 @@ export function loadCredentialBackup(accountId?: string): CredentialBackup | nul
if (data.accountId) {
try {
const backupPath = getCredentialBackupFile(data.accountId);
fs.mkdirSync(path.dirname(backupPath), { recursive: true });
const tmpPath = `${backupPath}.tmp`;
fs.writeFileSync(tmpPath, `${JSON.stringify(data, null, 2)}\n`, "utf8");
fs.renameSync(tmpPath, backupPath);
replaceFileAtomicSync({
filePath: backupPath,
content: `${JSON.stringify(data, null, 2)}\n`,
tempPrefix: ".qqbot-credential-backup",
});
fs.unlinkSync(legacy);
} catch {
/* ignore migration errors */

View File

@@ -9,9 +9,8 @@
*
* - `url` — remote http(s) URL that the QQ server can fetch directly.
* - `base64` — in-memory base64 string (typically from a `data:` URL).
* - `localPath` — on-disk file; kept as a path so a future chunked-upload
* implementation can stream it via `fs.createReadStream` without the 4/3×
* base64 memory overhead.
* - `localPath` — on-disk file; kept as a path plus an optional verified
* descriptor so uploaders can avoid reopening a path after validation.
* - `buffer` — in-memory raw bytes (e.g. TTS output, downloaded url-fallback).
*
* ## Security baseline (localPath branch)
@@ -29,7 +28,8 @@
* reading the whole file first.
*/
import * as fs from "node:fs";
import type { FileHandle } from "node:fs/promises";
import { FsSafeError, openLocalFileSafely } from "openclaw/plugin-sdk/security-runtime";
import { MAX_UPLOAD_SIZE, formatFileSize, getMimeType } from "../utils/file-utils.js";
// ============ Types ============
@@ -39,14 +39,14 @@ import { MAX_UPLOAD_SIZE, formatFileSize, getMimeType } from "../utils/file-util
*
* - `url`: remote URL — upload via `file_data=null; url=...`.
* - `base64`: already-encoded base64 — upload via `file_data=...`.
* - `localPath`: on-disk file — one-shot path reads it into a buffer;
* chunked path (future) streams it via `fs.createReadStream`.
* - `localPath`: on-disk file — uploaders should prefer `opened` when present
* and only reopen `path` for direct, already-normalized test/helper calls.
* - `buffer`: raw bytes in memory — same as above minus disk I/O.
*/
export type MediaSource =
| { kind: "url"; url: string }
| { kind: "base64"; data: string; mime?: string }
| { kind: "localPath"; path: string; size: number; mime?: string }
| { kind: "localPath"; path: string; size: number; mime?: string; opened?: OpenedLocalFile }
| { kind: "buffer"; buffer: Buffer; fileName?: string; mime?: string };
/**
@@ -92,8 +92,8 @@ function tryParseDataUrl(value: string): { mime: string; data: string } | null {
*
* Callers MUST call {@link OpenedLocalFile.close} (typically in a `finally`).
*/
interface OpenedLocalFile {
handle: fs.promises.FileHandle;
export interface OpenedLocalFile {
handle: FileHandle;
size: number;
close(): Promise<void>;
}
@@ -120,27 +120,26 @@ export async function openLocalFile(
opts: { maxSize?: number } = {},
): Promise<OpenedLocalFile> {
const maxSize = opts.maxSize ?? MAX_UPLOAD_SIZE;
const openFlags =
fs.constants.O_RDONLY | ("O_NOFOLLOW" in fs.constants ? fs.constants.O_NOFOLLOW : 0);
const handle = await fs.promises.open(filePath, openFlags);
try {
const stat = await handle.stat();
if (!stat.isFile()) {
throw new Error("Path is not a regular file");
const opened = await openLocalFileSafely({ filePath }).catch((err: unknown) => {
if (err instanceof FsSafeError && err.code === "not-file") {
throw new Error("Path is not a regular file", { cause: err });
}
if (stat.size > maxSize) {
throw err;
});
try {
if (opened.stat.size > maxSize) {
throw new Error(
`File is too large (${formatFileSize(stat.size)}); QQ Bot API limit is ${formatFileSize(maxSize)}`,
`File is too large (${formatFileSize(opened.stat.size)}); QQ Bot API limit is ${formatFileSize(maxSize)}`,
);
}
return {
handle,
size: stat.size,
close: () => handle.close(),
handle: opened.handle,
size: opened.stat.size,
close: () => opened.handle.close(),
};
} catch (err) {
// Close the handle on any validation failure to avoid fd leaks.
await handle.close().catch(() => undefined);
await opened.handle.close().catch(() => undefined);
throw err;
}
}
@@ -153,10 +152,9 @@ export async function openLocalFile(
* - Strings passed via `{ url }` that start with `data:` are auto-resolved
* to a `base64` branch (this is the unified `data:` URL support that was
* previously only implemented in `sendImage`).
* - `localPath` branches open the file with {@link openLocalFile} solely to
* validate size / regular-file / O_NOFOLLOW invariants. The handle is
* closed immediately — actual reading is deferred to the uploader so
* the chunked path can stream without double-reading.
* - `localPath` branches open the file with {@link openLocalFile} and carry
* that descriptor to the uploader, so later reads use the exact file that
* passed regular-file / O_NOFOLLOW / size validation.
* - `buffer` branches enforce the same ceiling inline.
*
* `maxSize` defaults to {@link MAX_UPLOAD_SIZE} (20MB, one-shot upload limit).
@@ -188,16 +186,13 @@ export async function normalizeSource(
if ("localPath" in raw) {
const opened = await openLocalFile(raw.localPath, { maxSize });
try {
return {
kind: "localPath",
path: raw.localPath,
size: opened.size,
mime: getMimeType(raw.localPath),
};
} finally {
await opened.close();
}
return {
kind: "localPath",
path: raw.localPath,
size: opened.size,
mime: getMimeType(raw.localPath),
opened,
};
}
// buffer branch

View File

@@ -2,8 +2,11 @@
* Low-level outbound media sends (photo, voice, video, document) and path resolution.
*/
import fs from "node:fs";
import path from "node:path";
import {
pathExistsSync,
resolveLocalPathFromRootsSync,
} from "openclaw/plugin-sdk/security-runtime";
import type { GatewayAccount } from "../types.js";
import { MediaFileType } from "../types.js";
import {
@@ -98,79 +101,32 @@ function isHttpOrDataSource(pathValue: string): boolean {
);
}
function isPathWithinRoot(candidate: string, root: string): boolean {
const relative = path.relative(root, candidate);
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
}
function resolveMissingPathWithinMediaRoot(normalizedPath: string): string | null {
const resolvedCandidate = path.resolve(normalizedPath);
if (fs.existsSync(resolvedCandidate)) {
if (pathExistsSync(resolvedCandidate)) {
return null;
}
const allowedRoot = path.resolve(getQQBotMediaDir());
let canonicalAllowedRoot: string;
try {
canonicalAllowedRoot = fs.realpathSync(allowedRoot);
} catch {
return null;
}
const missingSegments: string[] = [];
let cursor = resolvedCandidate;
while (!fs.existsSync(cursor)) {
const parent = path.dirname(cursor);
if (parent === cursor) {
break;
}
missingSegments.unshift(path.basename(cursor));
cursor = parent;
}
if (!fs.existsSync(cursor)) {
return null;
}
let canonicalCursor: string;
try {
canonicalCursor = fs.realpathSync(cursor);
} catch {
return null;
}
const canonicalCandidate =
missingSegments.length > 0 ? path.join(canonicalCursor, ...missingSegments) : canonicalCursor;
return isPathWithinRoot(canonicalCandidate, canonicalAllowedRoot) ? canonicalCandidate : null;
return (
resolveLocalPathFromRootsSync({
filePath: resolvedCandidate,
roots: [getQQBotMediaDir()],
label: "QQ Bot media storage",
allowMissing: true,
})?.path ?? null
);
}
function resolveExistingPathWithinRoots(
normalizedPath: string,
allowedRoots: readonly string[],
): string | null {
const resolvedCandidate = path.resolve(normalizedPath);
if (!fs.existsSync(resolvedCandidate)) {
return null;
}
let canonicalCandidate: string;
try {
canonicalCandidate = fs.realpathSync(resolvedCandidate);
} catch {
return null;
}
for (const root of allowedRoots) {
const resolvedRoot = path.resolve(root);
const canonicalRoot = fs.existsSync(resolvedRoot)
? fs.realpathSync(resolvedRoot)
: resolvedRoot;
if (isPathWithinRoot(canonicalCandidate, canonicalRoot)) {
return canonicalCandidate;
}
}
return null;
return (
resolveLocalPathFromRootsSync({
filePath: normalizedPath,
roots: allowedRoots,
label: "QQ Bot local roots",
})?.path ?? null
);
}
export function resolveOutboundMediaPath(

View File

@@ -596,33 +596,39 @@ async function sendMediaInternal(
maxSize: Number.MAX_SAFE_INTEGER,
});
const uploadResult = await dispatchUpload(
ctx,
scope,
opts.target.id,
KIND_TO_FILE_TYPE[opts.kind],
source,
c,
opts.fileName,
);
try {
const uploadResult = await dispatchUpload(
ctx,
scope,
opts.target.id,
KIND_TO_FILE_TYPE[opts.kind],
source,
c,
opts.fileName,
);
// Content is semantically meaningful only for image / video — the voice
// and file APIs ignore it.
const msgContent = opts.kind === "image" || opts.kind === "video" ? opts.content : undefined;
// Content is semantically meaningful only for image / video — the voice
// and file APIs ignore it.
const msgContent = opts.kind === "image" || opts.kind === "video" ? opts.content : undefined;
const result = await ctx.mediaApi.sendMediaMessage(
scope,
opts.target.id,
uploadResult.file_info,
c,
{
msgId: opts.msgId,
content: msgContent,
},
);
const result = await ctx.mediaApi.sendMediaMessage(
scope,
opts.target.id,
uploadResult.file_info,
c,
{
msgId: opts.msgId,
content: msgContent,
},
);
notifyMediaHook(opts.creds.appId, result, buildOutboundMeta(opts, source));
return result;
notifyMediaHook(opts.creds.appId, result, buildOutboundMeta(opts, source));
return result;
} finally {
if (source.kind === "localPath") {
await source.opened?.close().catch(() => undefined);
}
}
}
/**
@@ -668,6 +674,12 @@ async function dispatchUpload(
fileName,
});
}
if (source.opened) {
return ctx.mediaApi.uploadMedia(scope, targetId, fileType, creds, {
buffer: await source.opened.handle.readFile(),
fileName,
});
}
return ctx.mediaApi.uploadMedia(scope, targetId, fileType, creds, {
localPath: source.path,
fileName,

View File

@@ -7,6 +7,7 @@
import fs from "node:fs";
import path from "node:path";
import { appendRegularFileSync, replaceFileAtomicSync } from "openclaw/plugin-sdk/security-runtime";
import { formatErrorMessage } from "../utils/format.js";
import { debugLog, debugError } from "../utils/log.js";
import { getQQBotDataDir, getQQBotDataPath } from "../utils/platform.js";
@@ -88,7 +89,7 @@ function ensureDir(): void {
function appendLine(line: RefIndexLine): void {
try {
ensureDir();
fs.appendFileSync(getRefIndexFile(), JSON.stringify(line) + "\n", "utf-8");
appendRegularFileSync({ filePath: getRefIndexFile(), content: JSON.stringify(line) + "\n" });
totalLinesOnDisk++;
} catch (err) {
debugError(`[ref-index-store] Failed to append: ${formatErrorMessage(err)}`);
@@ -109,7 +110,6 @@ function compactFile(): void {
try {
ensureDir();
const refIndexFile = getRefIndexFile();
const tmpPath = refIndexFile + ".tmp";
const lines: string[] = [];
for (const [key, entry] of cache) {
lines.push(
@@ -127,8 +127,11 @@ function compactFile(): void {
}),
);
}
fs.writeFileSync(tmpPath, lines.join("\n") + "\n", "utf-8");
fs.renameSync(tmpPath, refIndexFile);
replaceFileAtomicSync({
filePath: refIndexFile,
content: `${lines.join("\n")}\n`,
tempPrefix: ".qqbot-ref-index",
});
totalLinesOnDisk = cache.size;
debugLog(`[ref-index-store] Compacted: ${before} lines → ${totalLinesOnDisk} lines`);
} catch (err) {

View File

@@ -5,8 +5,8 @@
* built-ins + log + platform (both zero plugin-sdk).
*/
import fs from "node:fs";
import path from "node:path";
import { privateFileStoreSync } from "openclaw/plugin-sdk/security-runtime";
import type { ChatScope } from "../types.js";
import { formatErrorMessage } from "../utils/format.js";
import { debugLog, debugError } from "../utils/log.js";
@@ -49,9 +49,10 @@ function loadUsersFromFile(): Map<string, KnownUser> {
usersCache = new Map();
try {
const knownUsersFile = getKnownUsersFile();
if (fs.existsSync(knownUsersFile)) {
const data = fs.readFileSync(knownUsersFile, "utf-8");
const users = JSON.parse(data) as KnownUser[];
const users = privateFileStoreSync(path.dirname(knownUsersFile)).readJsonIfExists<KnownUser[]>(
path.basename(knownUsersFile),
);
if (users) {
for (const user of users) {
usersCache.set(makeUserKey(user), user);
}
@@ -80,10 +81,10 @@ function doSaveUsersToFile(): void {
}
try {
ensureDir();
fs.writeFileSync(
getKnownUsersFile(),
JSON.stringify(Array.from(usersCache.values()), null, 2),
"utf-8",
const filePath = getKnownUsersFile();
privateFileStoreSync(path.dirname(filePath)).writeJson(
path.basename(filePath),
Array.from(usersCache.values()),
);
isDirty = false;
} catch (err) {

View File

@@ -7,6 +7,7 @@
import fs from "node:fs";
import path from "node:path";
import { privateFileStoreSync } from "openclaw/plugin-sdk/security-runtime";
import { formatErrorMessage } from "../utils/format.js";
import { debugLog, debugError } from "../utils/log.js";
import { getQQBotDataDir, getQQBotDataPath } from "../utils/platform.js";
@@ -66,18 +67,20 @@ function getCandidateSessionPaths(accountId: string): string[] {
export function loadSession(accountId: string, expectedAppId?: string): SessionState | null {
try {
let filePath: string | null = null;
let state: SessionState | null = null;
for (const candidatePath of getCandidateSessionPaths(accountId)) {
if (fs.existsSync(candidatePath)) {
state = privateFileStoreSync(path.dirname(candidatePath)).readJsonIfExists<SessionState>(
path.basename(candidatePath),
);
if (state) {
filePath = candidatePath;
break;
}
}
if (!filePath) {
if (!filePath || !state) {
return null;
}
const data = fs.readFileSync(filePath, "utf-8");
const state = JSON.parse(data) as SessionState;
const now = Date.now();
if (now - state.savedAt > SESSION_EXPIRE_TIME) {
@@ -162,7 +165,7 @@ function doSaveSession(state: SessionState): void {
try {
ensureDir();
const stateToSave: SessionState = { ...state, savedAt: Date.now() };
fs.writeFileSync(filePath, JSON.stringify(stateToSave, null, 2), "utf-8");
privateFileStoreSync(path.dirname(filePath)).writeJson(path.basename(filePath), stateToSave);
if (legacyPath !== filePath && fs.existsSync(legacyPath)) {
fs.unlinkSync(legacyPath);
}

View File

@@ -11,6 +11,7 @@
import * as fs from "node:fs";
import * as path from "node:path";
import { readRegularFileSync } from "openclaw/plugin-sdk/security-runtime";
import { formatErrorMessage } from "./format.js";
import { debugLog, debugError, debugWarn } from "./log.js";
import { normalizeLowercaseStringOrEmpty as normalizeLowercase } from "./string-normalize.js";
@@ -81,11 +82,13 @@ export async function convertSilkToWav(
inputPath: string,
outputDir?: string,
): Promise<{ wavPath: string; duration: number } | null> {
if (!fs.existsSync(inputPath)) {
let fileBuf: Buffer;
try {
fileBuf = readRegularFileSync({ filePath: inputPath }).buffer;
} catch {
return null;
}
const fileBuf = fs.readFileSync(inputPath);
const strippedBuf = stripAmrHeader(fileBuf);
const rawData = new Uint8Array(
strippedBuf.buffer,
@@ -188,11 +191,13 @@ export async function audioFileToSilkBase64(
filePath: string,
directUploadFormats?: string[],
): Promise<string | null> {
if (!fs.existsSync(filePath)) {
let buf: Buffer;
try {
buf = readRegularFileSync({ filePath }).buffer;
} catch {
return null;
}
const buf = fs.readFileSync(filePath);
if (buf.length === 0) {
debugError(`[audio-convert] file is empty: ${filePath}`);
return null;

View File

@@ -13,7 +13,13 @@ vi.mock("../adapter/index.js", () => ({
}),
}));
import { QQBOT_MEDIA_SSRF_POLICY, downloadFile } from "./file-utils.js";
import {
QQBOT_MEDIA_SSRF_POLICY,
checkFileSize,
downloadFile,
fileExistsAsync,
readFileAsync,
} from "./file-utils.js";
describe("qqbot file-utils downloadFile", () => {
let tempDir: string;
@@ -69,4 +75,15 @@ describe("qqbot file-utils downloadFile", () => {
expect(savedPath).toBeNull();
expect(adapterMocks.fetchMedia).not.toHaveBeenCalled();
});
it.skipIf(process.platform === "win32")("rejects symlinked local media helpers", async () => {
const targetPath = path.join(tempDir, "target.png");
const linkPath = path.join(tempDir, "link.png");
await fs.promises.writeFile(targetPath, "image-bytes");
await fs.promises.symlink(targetPath, linkPath);
expect(checkFileSize(linkPath).ok).toBe(false);
await expect(readFileAsync(linkPath)).rejects.toThrow();
await expect(fileExistsAsync(linkPath)).resolves.toBe(false);
});
});

View File

@@ -1,6 +1,11 @@
import crypto from "node:crypto";
import * as fs from "node:fs";
import * as path from "node:path";
import {
openLocalFileSafely,
readRegularFile,
statRegularFileSync,
} from "openclaw/plugin-sdk/security-runtime";
import { getPlatformAdapter } from "../adapter/index.js";
import type { SsrfPolicyConfig } from "../adapter/types.js";
import { MediaFileType } from "../types.js";
@@ -72,17 +77,20 @@ interface FileSizeCheckResult {
/** Validate that a file is within the allowed upload size. */
export function checkFileSize(filePath: string, maxSize = MAX_UPLOAD_SIZE): FileSizeCheckResult {
try {
const stat = fs.statSync(filePath);
if (stat.size > maxSize) {
const sizeMB = (stat.size / (1024 * 1024)).toFixed(1);
const result = statRegularFileSync(filePath);
if (result.missing) {
throw Object.assign(new Error(`File not found: ${filePath}`), { code: "ENOENT" });
}
if (result.stat.size > maxSize) {
const sizeMB = (result.stat.size / (1024 * 1024)).toFixed(1);
const limitMB = (maxSize / (1024 * 1024)).toFixed(0);
return {
ok: false,
size: stat.size,
size: result.stat.size,
error: `File is too large (${sizeMB}MB); QQ Bot API limit is ${limitMB}MB`,
};
}
return { ok: true, size: stat.size };
return { ok: true, size: result.stat.size };
} catch (err) {
return {
ok: false,
@@ -94,16 +102,21 @@ export function checkFileSize(filePath: string, maxSize = MAX_UPLOAD_SIZE): File
/** Read file contents asynchronously. */
export async function readFileAsync(filePath: string): Promise<Buffer> {
return fs.promises.readFile(filePath);
return (await readRegularFile({ filePath })).buffer;
}
/** Check file readability asynchronously. */
export async function fileExistsAsync(filePath: string): Promise<boolean> {
const opened = await openLocalFileSafely({ filePath }).catch(() => null);
if (!opened) {
return false;
}
try {
await fs.promises.access(filePath, fs.constants.R_OK);
return true;
} catch {
return false;
} finally {
await opened.handle.close().catch(() => undefined);
}
}