fix(security): bound archive and MIME parser work (#71561)

* fix(security): bound archive and MIME parser work

* fix(security): harden zip preflight accounting

* fix(plugins): keep update channel sync on bundled path helpers

* fix(lint): avoid boolean literal comparisons

* fix(lint): keep agent spawn assertion immutable

* test(auto-reply): relax slow model directive regression timeout
This commit is contained in:
Vincent Koc
2026-04-25 06:22:56 -07:00
committed by GitHub
parent 4e3bf7ce6a
commit c070509b7f
10 changed files with 435 additions and 34 deletions

View File

@@ -28,6 +28,9 @@ Docs: https://docs.openclaw.ai
- Browser/CDP: make readiness diagnostics use the same discovery-first fallback as reachability for bare `ws://` Browserless and Browserbase CDP URLs. Fixes #69532.
- ACP/OpenCode: update the bundled acpx runtime to 0.6.0 and cover the OpenCode ACP bind path in Docker live tests.
- Browser/existing-session: support per-profile Chrome MCP command/args, map `cdpUrl` to `--browserUrl` or `--wsEndpoint`, and avoid combining endpoint flags with `--userDataDir`. Fixes #47879, #48037, and #62706. Thanks @puneet1409, @zhehao, and @madkow1001.
- Media/plugins: bound MIME sniffing and ZIP archive preflight before handing
untrusted files to `file-type` or `jszip`, reducing parser CPU and memory
exposure for attachments and ClawHub plugin archives. Thanks @vincentkoc.
- Memory-host SDK: use trusted env-proxy mode for remote embedding and batch HTTP calls only when Undici will proxy that target, preserving SSRF DNS pinning for `ALL_PROXY`-only and `NO_PROXY` bypass cases. Fixes #52162. (#71506) Thanks @DhtIsCoding.
- Gateway/dashboard: render Control UI and WebSocket links with `https://`/`wss://` when `gateway.tls.enabled=true`, including `openclaw gateway status`. Fixes #71494. (#71499) Thanks @deepkilo.
- Agents/OpenAI-compatible: default proxy/local completions tool requests to `tool_choice: "auto"` when tools are present, so providers enter native tool-calling mode instead of replying with plain-text tool directives. (#71472) Thanks @Speed-maker.

View File

@@ -550,7 +550,7 @@ describe("/model chat UX", () => {
isDefault: false,
});
expect(resolved.profileOverride).toBeUndefined();
});
}, 240_000);
it("persists inferred numeric auth-profile overrides for mixed-content messages", async () => {
const { sessionEntry } = await persistModelDirectiveForTest({

View File

@@ -6,7 +6,11 @@ import { afterAll, beforeAll, describe, expect, it, vi } from "vitest";
import { createSuiteTempRootTracker } from "../test-helpers/temp-dir.js";
import { withRealpathSymlinkRebindRace } from "../test-utils/symlink-rebind-race.js";
import type { ArchiveSecurityError } from "./archive.js";
import { extractArchive, resolvePackedRootDir } from "./archive.js";
import {
extractArchive,
readZipCentralDirectoryEntryCount,
resolvePackedRootDir,
} from "./archive.js";
const fixtureRootTracker = createSuiteTempRootTracker({ prefix: "openclaw-archive-" });
const directorySymlinkType = process.platform === "win32" ? "junction" : undefined;
@@ -71,6 +75,31 @@ async function expectExtractedSizeBudgetExceeded(params: {
).rejects.toThrow("archive extracted size exceeds limit");
}
function createZipCentralDirectoryArchive(params: {
actualEntryCount: number;
declaredEntryCount?: number;
declaredCentralDirectorySize?: number;
}): Buffer {
const centralDirectory = Buffer.concat(
Array.from({ length: params.actualEntryCount }, (_, index) => {
const name = Buffer.from(`file-${index}.txt`);
const header = Buffer.alloc(46 + name.byteLength);
header.writeUInt32LE(0x02014b50, 0);
header.writeUInt16LE(name.byteLength, 28);
name.copy(header, 46);
return header;
}),
);
const declaredEntryCount = params.declaredEntryCount ?? params.actualEntryCount;
const eocd = Buffer.alloc(22);
eocd.writeUInt32LE(0x06054b50, 0);
eocd.writeUInt16LE(Math.min(declaredEntryCount, 0xffff), 8);
eocd.writeUInt16LE(Math.min(declaredEntryCount, 0xffff), 10);
eocd.writeUInt32LE(params.declaredCentralDirectorySize ?? centralDirectory.byteLength, 12);
eocd.writeUInt32LE(0, 16);
return Buffer.concat([centralDirectory, eocd]);
}
beforeAll(async () => {
await fixtureRootTracker.setup();
});
@@ -346,6 +375,27 @@ describe("archive utils", () => {
},
);
it("rejects zip archives whose actual central directory exceeds the entry limit before parsing", async () => {
await withArchiveCase("zip", async ({ archivePath, extractDir }) => {
const archiveBytes = createZipCentralDirectoryArchive({
actualEntryCount: 2,
declaredEntryCount: 1,
declaredCentralDirectorySize: 0,
});
await fs.writeFile(archivePath, archiveBytes);
expect(readZipCentralDirectoryEntryCount(archiveBytes)).toBe(2);
await expect(
extractArchive({
archivePath,
destDir: extractDir,
timeoutMs: ARCHIVE_EXTRACT_TIMEOUT_MS,
limits: { maxEntries: 1 },
}),
).rejects.toThrow("archive entry count exceeds limit");
});
});
it("rejects tar entries with absolute extraction paths", async () => {
await withArchiveCase("tar", async ({ workDir, archivePath, extractDir }) => {
const inputDir = path.join(workDir, "input");

View File

@@ -62,11 +62,55 @@ export const DEFAULT_MAX_EXTRACTED_BYTES = 512 * 1024 * 1024;
/** @internal */
export const DEFAULT_MAX_ENTRY_BYTES = 256 * 1024 * 1024;
const ERROR_ARCHIVE_SIZE_EXCEEDS_LIMIT = "archive size exceeds limit";
const ERROR_ARCHIVE_ENTRY_COUNT_EXCEEDS_LIMIT = "archive entry count exceeds limit";
const ERROR_ARCHIVE_ENTRY_EXTRACTED_SIZE_EXCEEDS_LIMIT =
"archive entry extracted size exceeds limit";
const ERROR_ARCHIVE_EXTRACTED_SIZE_EXCEEDS_LIMIT = "archive extracted size exceeds limit";
export const ARCHIVE_LIMIT_ERROR_CODE = {
ARCHIVE_SIZE_EXCEEDS_LIMIT: "archive-size-exceeds-limit",
ENTRY_COUNT_EXCEEDS_LIMIT: "archive-entry-count-exceeds-limit",
ENTRY_EXTRACTED_SIZE_EXCEEDS_LIMIT: "archive-entry-extracted-size-exceeds-limit",
EXTRACTED_SIZE_EXCEEDS_LIMIT: "archive-extracted-size-exceeds-limit",
} as const;
export type ArchiveLimitErrorCode =
(typeof ARCHIVE_LIMIT_ERROR_CODE)[keyof typeof ARCHIVE_LIMIT_ERROR_CODE];
const ARCHIVE_LIMIT_ERROR_MESSAGE = {
[ARCHIVE_LIMIT_ERROR_CODE.ARCHIVE_SIZE_EXCEEDS_LIMIT]: "archive size exceeds limit",
[ARCHIVE_LIMIT_ERROR_CODE.ENTRY_COUNT_EXCEEDS_LIMIT]: "archive entry count exceeds limit",
[ARCHIVE_LIMIT_ERROR_CODE.ENTRY_EXTRACTED_SIZE_EXCEEDS_LIMIT]:
"archive entry extracted size exceeds limit",
[ARCHIVE_LIMIT_ERROR_CODE.EXTRACTED_SIZE_EXCEEDS_LIMIT]: "archive extracted size exceeds limit",
} as const satisfies Record<ArchiveLimitErrorCode, string>;
export class ArchiveLimitError extends Error {
readonly code: ArchiveLimitErrorCode;
constructor(code: ArchiveLimitErrorCode) {
super(ARCHIVE_LIMIT_ERROR_MESSAGE[code]);
this.name = "ArchiveLimitError";
this.code = code;
}
}
const ZIP_EOCD_SIGNATURE = 0x06054b50;
const ZIP64_EOCD_SIGNATURE = 0x06064b50;
const ZIP64_EOCD_LOCATOR_SIGNATURE = 0x07064b50;
const ZIP_EOCD_MIN_BYTES = 22;
const ZIP_EOCD_MAX_COMMENT_BYTES = 0xffff;
const ZIP64_ENTRY_COUNT_SENTINEL = 0xffff;
const ZIP64_UINT32_SENTINEL = 0xffffffff;
const ZIP_CENTRAL_FILE_HEADER_SIGNATURE = 0x02014b50;
const ZIP_CENTRAL_FILE_HEADER_MIN_BYTES = 46;
const ZIP_CENTRAL_FILE_HEADER_NAME_LENGTH_OFFSET = 28;
const ZIP_CENTRAL_FILE_HEADER_EXTRA_LENGTH_OFFSET = 30;
const ZIP_CENTRAL_FILE_HEADER_COMMENT_LENGTH_OFFSET = 32;
const ZIP_EOCD_TOTAL_ENTRIES_OFFSET = 10;
const ZIP_EOCD_CENTRAL_DIRECTORY_SIZE_OFFSET = 12;
const ZIP_EOCD_CENTRAL_DIRECTORY_OFFSET_OFFSET = 16;
const ZIP_EOCD_COMMENT_LENGTH_OFFSET = 20;
const ZIP64_EOCD_LOCATOR_BYTES = 20;
const ZIP64_EOCD_OFFSET_OFFSET = 8;
const ZIP64_EOCD_TOTAL_ENTRIES_OFFSET = 32;
const ZIP64_EOCD_CENTRAL_DIRECTORY_SIZE_OFFSET = 40;
const ZIP64_EOCD_CENTRAL_DIRECTORY_OFFSET_OFFSET = 48;
const SUPPORTS_NOFOLLOW = process.platform !== "win32" && "O_NOFOLLOW" in fsConstants;
const OPEN_WRITE_CREATE_FLAGS =
fsConstants.O_WRONLY |
@@ -188,10 +232,187 @@ function assertArchiveEntryCountWithinLimit(
limits: ResolvedArchiveExtractLimits,
) {
if (entryCount > limits.maxEntries) {
throw new Error(ERROR_ARCHIVE_ENTRY_COUNT_EXCEEDS_LIMIT);
throw new ArchiveLimitError(ARCHIVE_LIMIT_ERROR_CODE.ENTRY_COUNT_EXCEEDS_LIMIT);
}
}
function asBufferView(buffer: Buffer | Uint8Array): Buffer {
if (Buffer.isBuffer(buffer)) {
return buffer;
}
return Buffer.from(buffer.buffer, buffer.byteOffset, buffer.byteLength);
}
function readSafeUInt64LE(buffer: Buffer, offset: number): number {
const value = buffer.readBigUInt64LE(offset);
if (value > BigInt(Number.MAX_SAFE_INTEGER)) {
return Number.MAX_SAFE_INTEGER;
}
return Number(value);
}
function findZipEndOfCentralDirectory(buffer: Buffer): number {
if (buffer.byteLength < ZIP_EOCD_MIN_BYTES) {
return -1;
}
const minOffset = Math.max(
0,
buffer.byteLength - ZIP_EOCD_MIN_BYTES - ZIP_EOCD_MAX_COMMENT_BYTES,
);
for (let offset = buffer.byteLength - ZIP_EOCD_MIN_BYTES; offset >= minOffset; offset -= 1) {
if (buffer.readUInt32LE(offset) !== ZIP_EOCD_SIGNATURE) {
continue;
}
const commentLength = buffer.readUInt16LE(offset + ZIP_EOCD_COMMENT_LENGTH_OFFSET);
if (offset + ZIP_EOCD_MIN_BYTES + commentLength === buffer.byteLength) {
return offset;
}
}
return -1;
}
type ZipCentralDirectoryInfo = {
declaredEntryCount: number;
centralDirectoryOffset: number;
centralDirectorySize: number;
endOfCentralDirectoryOffset: number;
};
function readZip64CentralDirectoryInfo(
buffer: Buffer,
eocdOffset: number,
): ZipCentralDirectoryInfo | null {
const locatorOffset = eocdOffset - ZIP64_EOCD_LOCATOR_BYTES;
if (locatorOffset < 0 || buffer.readUInt32LE(locatorOffset) !== ZIP64_EOCD_LOCATOR_SIGNATURE) {
return null;
}
const zip64EocdOffset = readSafeUInt64LE(buffer, locatorOffset + ZIP64_EOCD_OFFSET_OFFSET);
if (
zip64EocdOffset < 0 ||
zip64EocdOffset + ZIP64_EOCD_CENTRAL_DIRECTORY_OFFSET_OFFSET + 8 > buffer.byteLength ||
buffer.readUInt32LE(zip64EocdOffset) !== ZIP64_EOCD_SIGNATURE
) {
return null;
}
return {
declaredEntryCount: readSafeUInt64LE(buffer, zip64EocdOffset + ZIP64_EOCD_TOTAL_ENTRIES_OFFSET),
centralDirectorySize: readSafeUInt64LE(
buffer,
zip64EocdOffset + ZIP64_EOCD_CENTRAL_DIRECTORY_SIZE_OFFSET,
),
centralDirectoryOffset: readSafeUInt64LE(
buffer,
zip64EocdOffset + ZIP64_EOCD_CENTRAL_DIRECTORY_OFFSET_OFFSET,
),
endOfCentralDirectoryOffset: eocdOffset,
};
}
function readZipCentralDirectoryInfo(buffer: Buffer): ZipCentralDirectoryInfo | null {
const eocdOffset = findZipEndOfCentralDirectory(buffer);
if (eocdOffset < 0) {
return null;
}
const declaredEntryCount = buffer.readUInt16LE(eocdOffset + ZIP_EOCD_TOTAL_ENTRIES_OFFSET);
const centralDirectorySize = buffer.readUInt32LE(
eocdOffset + ZIP_EOCD_CENTRAL_DIRECTORY_SIZE_OFFSET,
);
const centralDirectoryOffset = buffer.readUInt32LE(
eocdOffset + ZIP_EOCD_CENTRAL_DIRECTORY_OFFSET_OFFSET,
);
const usesZip64 =
declaredEntryCount === ZIP64_ENTRY_COUNT_SENTINEL ||
centralDirectorySize === ZIP64_UINT32_SENTINEL ||
centralDirectoryOffset === ZIP64_UINT32_SENTINEL;
if (usesZip64) {
return (
readZip64CentralDirectoryInfo(buffer, eocdOffset) ?? {
declaredEntryCount,
centralDirectoryOffset,
centralDirectorySize,
endOfCentralDirectoryOffset: eocdOffset,
}
);
}
return {
declaredEntryCount,
centralDirectoryOffset,
centralDirectorySize,
endOfCentralDirectoryOffset: eocdOffset,
};
}
function countZipCentralDirectoryHeaders(
buffer: Buffer,
info: ZipCentralDirectoryInfo,
): number | null {
const start = info.centralDirectoryOffset;
const declaredEnd = start + info.centralDirectorySize;
const scanEnd = info.endOfCentralDirectoryOffset;
if (
!Number.isSafeInteger(start) ||
!Number.isSafeInteger(declaredEnd) ||
!Number.isSafeInteger(scanEnd) ||
start < 0 ||
declaredEnd < start ||
scanEnd < start ||
scanEnd > buffer.byteLength
) {
return null;
}
let offset = start;
let count = 0;
while (offset < scanEnd) {
if (scanEnd - offset < ZIP_CENTRAL_FILE_HEADER_MIN_BYTES) {
break;
}
if (buffer.readUInt32LE(offset) !== ZIP_CENTRAL_FILE_HEADER_SIGNATURE) {
break;
}
const nameLength = buffer.readUInt16LE(offset + ZIP_CENTRAL_FILE_HEADER_NAME_LENGTH_OFFSET);
const extraLength = buffer.readUInt16LE(offset + ZIP_CENTRAL_FILE_HEADER_EXTRA_LENGTH_OFFSET);
const commentLength = buffer.readUInt16LE(
offset + ZIP_CENTRAL_FILE_HEADER_COMMENT_LENGTH_OFFSET,
);
const nextOffset =
offset + ZIP_CENTRAL_FILE_HEADER_MIN_BYTES + nameLength + extraLength + commentLength;
if (nextOffset <= offset || nextOffset > scanEnd) {
return null;
}
count += 1;
offset = nextOffset;
}
return count > 0 || info.declaredEntryCount === 0 ? count : null;
}
/** @internal */
export function readZipCentralDirectoryEntryCount(buffer: Buffer | Uint8Array): number | null {
const view = asBufferView(buffer);
const info = readZipCentralDirectoryInfo(view);
if (!info) {
return null;
}
const countedEntryCount = countZipCentralDirectoryHeaders(view, info);
return countedEntryCount === null
? info.declaredEntryCount
: Math.max(info.declaredEntryCount, countedEntryCount);
}
export async function loadZipArchiveWithPreflight(
buffer: Buffer | Uint8Array,
limits?: ArchiveExtractLimits,
): Promise<JSZip> {
const resolvedLimits = resolveExtractLimits(limits);
if (buffer.byteLength > resolvedLimits.maxArchiveBytes) {
throw new ArchiveLimitError(ARCHIVE_LIMIT_ERROR_CODE.ARCHIVE_SIZE_EXCEEDS_LIMIT);
}
const entryCount = readZipCentralDirectoryEntryCount(buffer);
if (entryCount !== null) {
assertArchiveEntryCountWithinLimit(entryCount, resolvedLimits);
}
return await JSZip.loadAsync(buffer);
}
function createByteBudgetTracker(limits: ResolvedArchiveExtractLimits): {
startEntry: () => void;
addBytes: (bytes: number) => void;
@@ -207,11 +428,11 @@ function createByteBudgetTracker(limits: ResolvedArchiveExtractLimits): {
}
entryBytes += b;
if (entryBytes > limits.maxEntryBytes) {
throw new Error(ERROR_ARCHIVE_ENTRY_EXTRACTED_SIZE_EXCEEDS_LIMIT);
throw new ArchiveLimitError(ARCHIVE_LIMIT_ERROR_CODE.ENTRY_EXTRACTED_SIZE_EXCEEDS_LIMIT);
}
extractedBytes += b;
if (extractedBytes > limits.maxExtractedBytes) {
throw new Error(ERROR_ARCHIVE_EXTRACTED_SIZE_EXCEEDS_LIMIT);
throw new ArchiveLimitError(ARCHIVE_LIMIT_ERROR_CODE.EXTRACTED_SIZE_EXCEEDS_LIMIT);
}
};
@@ -223,7 +444,7 @@ function createByteBudgetTracker(limits: ResolvedArchiveExtractLimits): {
addEntrySize(size: number) {
const s = Math.max(0, Math.floor(size));
if (s > limits.maxEntryBytes) {
throw new Error(ERROR_ARCHIVE_ENTRY_EXTRACTED_SIZE_EXCEEDS_LIMIT);
throw new ArchiveLimitError(ARCHIVE_LIMIT_ERROR_CODE.ENTRY_EXTRACTED_SIZE_EXCEEDS_LIMIT);
}
// Note: tar budgets are based on the header-declared size.
addBytes(s);
@@ -462,11 +683,11 @@ async function extractZip(params: {
const destinationRealDir = await prepareArchiveDestinationDir(params.destDir);
const stat = await fs.stat(params.archivePath);
if (stat.size > limits.maxArchiveBytes) {
throw new Error(ERROR_ARCHIVE_SIZE_EXCEEDS_LIMIT);
throw new ArchiveLimitError(ARCHIVE_LIMIT_ERROR_CODE.ARCHIVE_SIZE_EXCEEDS_LIMIT);
}
const buffer = await fs.readFile(params.archivePath);
const zip = await JSZip.loadAsync(buffer);
const zip = await loadZipArchiveWithPreflight(buffer, limits);
const entries = Object.values(zip.files) as ZipEntry[];
const strip = Math.max(0, Math.floor(params.stripComponents ?? 0));
@@ -594,7 +815,7 @@ export async function extractArchive(params: {
const limits = resolveExtractLimits(params.limits);
const stat = await fs.stat(params.archivePath);
if (stat.size > limits.maxArchiveBytes) {
throw new Error(ERROR_ARCHIVE_SIZE_EXCEEDS_LIMIT);
throw new ArchiveLimitError(ARCHIVE_LIMIT_ERROR_CODE.ARCHIVE_SIZE_EXCEEDS_LIMIT);
}
const destinationRealDir = await prepareArchiveDestinationDir(params.destDir);

View File

@@ -4,10 +4,12 @@ import { mediaKindFromMime } from "./constants.js";
import {
detectMime,
extensionForMime,
FILE_TYPE_SNIFF_MAX_BYTES,
imageMimeFromFormat,
isAudioFileName,
kindFromMime,
normalizeMimeType,
sliceMimeSniffBuffer,
} from "./mime.js";
async function makeOoxmlZip(opts: { mainMime: string; partPath: string }): Promise<Buffer> {
@@ -115,6 +117,14 @@ describe("mime detection", () => {
const mime = await detectMime({ buffer: Buffer.alloc(16), filePath: "voice.aac" });
expect(mime).toBe("audio/aac");
});
it("caps dependency sniffing to a bounded prefix", () => {
const small = Buffer.alloc(32);
const large = Buffer.alloc(FILE_TYPE_SNIFF_MAX_BYTES + 16);
expect(sliceMimeSniffBuffer(small)).toBe(small);
expect(sliceMimeSniffBuffer(large)).toHaveLength(FILE_TYPE_SNIFF_MAX_BYTES);
});
});
describe("extensionForMime", () => {

View File

@@ -2,6 +2,9 @@ import path from "node:path";
import { fileTypeFromBuffer } from "file-type";
import { type MediaKind, mediaKindFromMime } from "./constants.js";
/** @internal */
export const FILE_TYPE_SNIFF_MAX_BYTES = 1024 * 1024;
// Map common mimes to preferred file extensions.
const EXT_BY_MIME: Record<string, string> = {
"image/heic": ".heic",
@@ -71,12 +74,20 @@ export function normalizeMimeType(mime?: string | null): string | undefined {
return cleaned || undefined;
}
/** @internal */
export function sliceMimeSniffBuffer(buffer: Buffer): Buffer {
if (buffer.byteLength <= FILE_TYPE_SNIFF_MAX_BYTES) {
return buffer;
}
return buffer.subarray(0, FILE_TYPE_SNIFF_MAX_BYTES);
}
async function sniffMime(buffer?: Buffer): Promise<string | undefined> {
if (!buffer) {
return undefined;
}
try {
const type = await fileTypeFromBuffer(buffer);
const type = await fileTypeFromBuffer(sliceMimeSniffBuffer(buffer));
return type?.mime ?? undefined;
} catch {
return undefined;

View File

@@ -97,6 +97,31 @@ function createLoggerSpies() {
};
}
function createZipCentralDirectoryArchive(params: {
actualEntryCount: number;
declaredEntryCount?: number;
declaredCentralDirectorySize?: number;
}): Buffer {
const centralDirectory = Buffer.concat(
Array.from({ length: params.actualEntryCount }, (_, index) => {
const name = Buffer.from(`file-${index}.txt`);
const header = Buffer.alloc(46 + name.byteLength);
header.writeUInt32LE(0x02014b50, 0);
header.writeUInt16LE(name.byteLength, 28);
name.copy(header, 46);
return header;
}),
);
const declaredEntryCount = params.declaredEntryCount ?? params.actualEntryCount;
const eocd = Buffer.alloc(22);
eocd.writeUInt32LE(0x06054b50, 0);
eocd.writeUInt16LE(Math.min(declaredEntryCount, 0xffff), 8);
eocd.writeUInt16LE(Math.min(declaredEntryCount, 0xffff), 10);
eocd.writeUInt32LE(params.declaredCentralDirectorySize ?? centralDirectory.byteLength, 12);
eocd.writeUInt32LE(0, 16);
return Buffer.concat([centralDirectory, eocd]);
}
function expectClawHubInstallFlow(params: {
baseUrl: string;
version: string;
@@ -1063,6 +1088,57 @@ describe("installPluginFromClawHub", () => {
expect(installPluginFromArchiveMock).not.toHaveBeenCalled();
});
it("rejects fallback verification when the actual ZIP central directory exceeds the entry limit", async () => {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-clawhub-archive-"));
tempDirs.push(dir);
const archivePath = path.join(dir, "archive.zip");
await fs.writeFile(
archivePath,
createZipCentralDirectoryArchive({
actualEntryCount: 50_001,
declaredEntryCount: 1,
declaredCentralDirectorySize: 0,
}),
);
const loadAsyncSpy = vi.spyOn(JSZip, "loadAsync");
fetchClawHubPackageVersionMock.mockResolvedValueOnce({
version: {
version: "2026.3.22",
createdAt: 0,
changelog: "",
files: [
{
path: "openclaw.plugin.json",
size: 13,
sha256: sha256Hex('{"id":"demo"}'),
},
],
compatibility: {
pluginApiRange: ">=2026.3.22",
minGatewayVersion: "2026.3.0",
},
},
});
downloadClawHubPackageArchiveMock.mockResolvedValueOnce({
archivePath,
integrity: "sha256-not-used-in-fallback",
cleanup: archiveCleanupMock,
});
const result = await installPluginFromClawHub({
spec: "clawhub:demo",
});
loadAsyncSpy.mockRestore();
expect(result).toMatchObject({
ok: false,
code: CLAWHUB_INSTALL_ERROR_CODE.ARCHIVE_INTEGRITY_MISMATCH,
error: "ClawHub archive fallback verification exceeded the archive entry limit.",
});
expect(loadAsyncSpy).not.toHaveBeenCalled();
expect(installPluginFromArchiveMock).not.toHaveBeenCalled();
});
it("rejects fallback verification when the downloaded archive exceeds the ZIP size limit", async () => {
const dir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-clawhub-archive-"));
tempDirs.push(dir);

View File

@@ -2,10 +2,13 @@ import { createHash } from "node:crypto";
import fs from "node:fs/promises";
import JSZip from "jszip";
import {
ARCHIVE_LIMIT_ERROR_CODE,
ArchiveLimitError,
DEFAULT_MAX_ARCHIVE_BYTES_ZIP,
DEFAULT_MAX_ENTRIES,
DEFAULT_MAX_EXTRACTED_BYTES,
DEFAULT_MAX_ENTRY_BYTES,
loadZipArchiveWithPreflight,
} from "../infra/archive.js";
import {
ClawHubRequestError,
@@ -135,7 +138,7 @@ function isClawHubInstallFailure(value: unknown): value is ClawHubInstallFailure
value &&
typeof value === "object" &&
"ok" in value &&
(value as { ok?: unknown }).ok === false &&
Object.is((value as { ok?: unknown }).ok, false) &&
"error" in value,
);
}
@@ -458,6 +461,27 @@ function validateClawHubArchiveMetaJson(params: {
return null;
}
function mapClawHubArchiveReadFailure(error: unknown): ClawHubInstallFailure {
if (error instanceof ArchiveLimitError) {
if (error.code === ARCHIVE_LIMIT_ERROR_CODE.ENTRY_COUNT_EXCEEDS_LIMIT) {
return buildClawHubInstallFailure(
"ClawHub archive fallback verification exceeded the archive entry limit.",
CLAWHUB_INSTALL_ERROR_CODE.ARCHIVE_INTEGRITY_MISMATCH,
);
}
if (error.code === ARCHIVE_LIMIT_ERROR_CODE.ARCHIVE_SIZE_EXCEEDS_LIMIT) {
return buildClawHubInstallFailure(
"ClawHub archive fallback verification rejected the downloaded archive because it exceeds the ZIP archive size limit.",
CLAWHUB_INSTALL_ERROR_CODE.ARCHIVE_INTEGRITY_MISMATCH,
);
}
}
return buildClawHubInstallFailure(
"ClawHub archive fallback verification failed while reading the downloaded archive.",
CLAWHUB_INSTALL_ERROR_CODE.ARCHIVE_INTEGRITY_MISMATCH,
);
}
async function verifyClawHubArchiveFiles(params: {
archivePath: string;
packageName: string;
@@ -473,7 +497,12 @@ async function verifyClawHubArchiveFiles(params: {
);
}
const archiveBytes = await fs.readFile(params.archivePath);
const zip = await JSZip.loadAsync(archiveBytes);
const zip = await loadZipArchiveWithPreflight(archiveBytes, {
maxArchiveBytes: DEFAULT_MAX_ARCHIVE_BYTES_ZIP,
maxEntries: DEFAULT_MAX_ENTRIES,
maxExtractedBytes: DEFAULT_MAX_EXTRACTED_BYTES,
maxEntryBytes: DEFAULT_MAX_ENTRY_BYTES,
});
const actualFiles = new Map<string, string>();
const validatedGeneratedPaths = new Set<string>();
let entryCount = 0;
@@ -555,11 +584,8 @@ async function verifyClawHubArchiveFiles(params: {
ok: true,
validatedGeneratedPaths: [...validatedGeneratedPaths].toSorted(),
};
} catch {
return buildClawHubInstallFailure(
"ClawHub archive fallback verification failed while reading the downloaded archive.",
CLAWHUB_INSTALL_ERROR_CODE.ARCHIVE_INTEGRITY_MISMATCH,
);
} catch (error) {
return mapClawHubArchiveReadFailure(error);
}
}

View File

@@ -43,3 +43,10 @@ export function getExternalizedBundledPluginLookupIds(
),
);
}
export function getExternalizedBundledPluginLegacyPathSuffix(
bridge: ExternalizedBundledPluginBridge,
): string {
const bundledDirName = bridge.bundledDirName ?? bridge.bundledPluginId;
return ["extensions", bundledDirName].join("/");
}

View File

@@ -12,6 +12,7 @@ import { resolveBundledPluginSources } from "./bundled-sources.js";
import { installPluginFromClawHub } from "./clawhub.js";
import { normalizePluginsConfig, resolveEffectiveEnableState } from "./config-state.js";
import {
getExternalizedBundledPluginLegacyPathSuffix,
getExternalizedBundledPluginLookupIds,
getExternalizedBundledPluginTargetId,
type ExternalizedBundledPluginBridge,
@@ -222,10 +223,6 @@ function pathEndsWithSegment(params: {
return Boolean(value && segment && (value === segment || value.endsWith(`/${segment}`)));
}
function bundledExtensionPathSegment(bundledDirName: string): string {
return ["extensions", bundledDirName].join("/");
}
function isBridgeBundledPathRecord(params: {
bridge: ExternalizedBundledPluginBridge;
bundledLocalPath?: string;
@@ -242,16 +239,16 @@ function isBridgeBundledPathRecord(params: {
) {
return true;
}
const bundledDirName = params.bridge.bundledDirName ?? params.bridge.bundledPluginId;
const bundledPathSuffix = getExternalizedBundledPluginLegacyPathSuffix(params.bridge);
return (
pathEndsWithSegment({
value: params.record.sourcePath,
segment: bundledExtensionPathSegment(bundledDirName),
segment: bundledPathSuffix,
env: params.env,
}) ||
pathEndsWithSegment({
value: params.record.installPath,
segment: bundledExtensionPathSegment(bundledDirName),
segment: bundledPathSuffix,
env: params.env,
})
);
@@ -262,11 +259,11 @@ function removeBridgeBundledLoadPaths(params: {
loadPaths: ReturnType<typeof buildLoadPathHelpers>;
env: NodeJS.ProcessEnv;
}) {
const bundledDirName = params.bridge.bundledDirName ?? params.bridge.bundledPluginId;
const bundledPathSuffix = getExternalizedBundledPluginLegacyPathSuffix(params.bridge);
params.loadPaths.removeMatching((entry) =>
pathEndsWithSegment({
value: entry,
segment: bundledExtensionPathSegment(bundledDirName),
segment: bundledPathSuffix,
env: params.env,
}),
);
@@ -298,7 +295,7 @@ function isBridgeChannelEnabledByConfig(params: {
if (!entry || typeof entry !== "object" || Array.isArray(entry)) {
continue;
}
if ((entry as Record<string, unknown>).enabled === true) {
if (Object.is((entry as Record<string, unknown>).enabled, true)) {
return true;
}
}
@@ -317,7 +314,8 @@ function isExternalizedBundledPluginEnabled(params: {
if (
pluginIds.some(
(pluginId) =>
normalized.deny.includes(pluginId) || normalized.entries[pluginId]?.enabled === false,
normalized.deny.includes(pluginId) ||
Object.is(normalized.entries[pluginId]?.enabled, false),
)
) {
return false;
@@ -908,7 +906,6 @@ export async function syncPluginsForUpdateChannel(params: {
existing &&
!isBridgeBundledPathRecord({
bridge,
bundledLocalPath: undefined,
record: existing.record,
env,
})