test: dedupe infra and plugin-sdk utility suites

This commit is contained in:
Peter Steinberger
2026-03-27 23:08:54 +00:00
parent 0558f2470d
commit 8a788e2c0c
33 changed files with 1337 additions and 1021 deletions

View File

@@ -8,6 +8,10 @@ import {
validateArchiveEntryPath,
} from "./archive-path.js";
function expectArchivePathError(run: () => void, message: string) {
expect(run).toThrow(message);
}
describe("archive path helpers", () => {
it.each([
{ value: "C:\\temp\\file.txt", expected: true },
@@ -51,11 +55,13 @@ describe("archive path helpers", () => {
message: "archive entry is absolute: \\\\server\\share.txt",
},
])("$name", ({ entryPath, message }) => {
expect(() =>
validateArchiveEntryPath(entryPath, {
escapeLabel: "targetDir",
}),
).toThrow(message);
expectArchivePathError(
() =>
validateArchiveEntryPath(entryPath, {
escapeLabel: "targetDir",
}),
message,
);
});
it.each([
@@ -71,11 +77,13 @@ describe("archive path helpers", () => {
it("preserves strip-induced traversal for follow-up validation", () => {
const stripped = stripArchivePath("a/../escape.txt", 1);
expect(stripped).toBe("../escape.txt");
expect(() =>
validateArchiveEntryPath(stripped ?? "", {
escapeLabel: "targetDir",
}),
).toThrow("archive entry escapes targetDir: ../escape.txt");
expectArchivePathError(
() =>
validateArchiveEntryPath(stripped ?? "", {
escapeLabel: "targetDir",
}),
"archive entry escapes targetDir: ../escape.txt",
);
});
it.each([
@@ -95,14 +103,16 @@ describe("archive path helpers", () => {
])("$name", ({ relPath, originalPath, escapeLabel, expected, message }) => {
const rootDir = path.join(path.sep, "tmp", "archive-root");
if (message) {
expect(() =>
resolveArchiveOutputPath({
rootDir,
relPath,
originalPath,
escapeLabel,
}),
).toThrow(message);
expectArchivePathError(
() =>
resolveArchiveOutputPath({
rootDir,
relPath,
originalPath,
escapeLabel,
}),
message,
);
return;
}

View File

@@ -8,51 +8,99 @@ async function makeTempDir(label: string): Promise<string> {
return fs.mkdtemp(path.join(os.tmpdir(), `openclaw-${label}-`));
}
async function expectGitRootResolution(params: {
label: string;
setup: (
temp: string,
) => Promise<{ startPath: string; expectedRoot: string | null; expectedHead: string | null }>;
}): Promise<void> {
const temp = await makeTempDir(params.label);
const { startPath, expectedRoot, expectedHead } = await params.setup(temp);
expect(findGitRoot(startPath)).toBe(expectedRoot);
expect(resolveGitHeadPath(startPath)).toBe(expectedHead);
}
describe("git-root", () => {
it("finds git root when starting at the repo root itself", async () => {
const temp = await makeTempDir("git-root-self");
const repoRoot = path.join(temp, "repo");
await fs.mkdir(path.join(repoRoot, ".git"), { recursive: true });
expect(findGitRoot(repoRoot)).toBe(repoRoot);
expect(resolveGitHeadPath(repoRoot)).toBe(path.join(repoRoot, ".git", "HEAD"));
});
it("finds git root and HEAD path when .git is a directory", async () => {
const temp = await makeTempDir("git-root-dir");
const repoRoot = path.join(temp, "repo");
const workspace = path.join(repoRoot, "nested", "workspace");
await fs.mkdir(path.join(repoRoot, ".git"), { recursive: true });
await fs.mkdir(workspace, { recursive: true });
expect(findGitRoot(workspace)).toBe(repoRoot);
expect(resolveGitHeadPath(workspace)).toBe(path.join(repoRoot, ".git", "HEAD"));
});
it("resolves HEAD path when .git is a gitdir pointer file", async () => {
const temp = await makeTempDir("git-root-file");
const repoRoot = path.join(temp, "repo");
const workspace = path.join(repoRoot, "nested", "workspace");
const gitDir = path.join(repoRoot, ".actual-git");
await fs.mkdir(workspace, { recursive: true });
await fs.mkdir(gitDir, { recursive: true });
await fs.writeFile(path.join(repoRoot, ".git"), "gitdir: .actual-git\n", "utf-8");
expect(findGitRoot(workspace)).toBe(repoRoot);
expect(resolveGitHeadPath(workspace)).toBe(path.join(gitDir, "HEAD"));
});
it("keeps root detection for .git file and skips invalid gitdir content for HEAD lookup", async () => {
const temp = await makeTempDir("git-root-invalid-file");
const parentRoot = path.join(temp, "repo");
const childRoot = path.join(parentRoot, "child");
const nested = path.join(childRoot, "nested");
await fs.mkdir(path.join(parentRoot, ".git"), { recursive: true });
await fs.mkdir(nested, { recursive: true });
await fs.writeFile(path.join(childRoot, ".git"), "not-a-gitdir-pointer\n", "utf-8");
expect(findGitRoot(nested)).toBe(childRoot);
expect(resolveGitHeadPath(nested)).toBe(path.join(parentRoot, ".git", "HEAD"));
it.each([
{
name: "starting at the repo root itself",
label: "git-root-self",
setup: async (temp: string) => {
const repoRoot = path.join(temp, "repo");
await fs.mkdir(path.join(repoRoot, ".git"), { recursive: true });
return {
startPath: repoRoot,
expectedRoot: repoRoot,
expectedHead: path.join(repoRoot, ".git", "HEAD"),
};
},
},
{
name: ".git is a directory",
label: "git-root-dir",
setup: async (temp: string) => {
const repoRoot = path.join(temp, "repo");
const workspace = path.join(repoRoot, "nested", "workspace");
await fs.mkdir(path.join(repoRoot, ".git"), { recursive: true });
await fs.mkdir(workspace, { recursive: true });
return {
startPath: workspace,
expectedRoot: repoRoot,
expectedHead: path.join(repoRoot, ".git", "HEAD"),
};
},
},
{
name: ".git is a gitdir pointer file",
label: "git-root-file",
setup: async (temp: string) => {
const repoRoot = path.join(temp, "repo");
const workspace = path.join(repoRoot, "nested", "workspace");
const gitDir = path.join(repoRoot, ".actual-git");
await fs.mkdir(workspace, { recursive: true });
await fs.mkdir(gitDir, { recursive: true });
await fs.writeFile(path.join(repoRoot, ".git"), "gitdir: .actual-git\n", "utf-8");
return {
startPath: workspace,
expectedRoot: repoRoot,
expectedHead: path.join(gitDir, "HEAD"),
};
},
},
{
name: "invalid gitdir content still keeps root detection",
label: "git-root-invalid-file",
setup: async (temp: string) => {
const parentRoot = path.join(temp, "repo");
const childRoot = path.join(parentRoot, "child");
const nested = path.join(childRoot, "nested");
await fs.mkdir(path.join(parentRoot, ".git"), { recursive: true });
await fs.mkdir(nested, { recursive: true });
await fs.writeFile(path.join(childRoot, ".git"), "not-a-gitdir-pointer\n", "utf-8");
return {
startPath: nested,
expectedRoot: childRoot,
expectedHead: path.join(parentRoot, ".git", "HEAD"),
};
},
},
{
name: "invalid gitdir content without a parent repo",
label: "git-root-invalid-only",
setup: async (temp: string) => {
const repoRoot = path.join(temp, "repo");
const nested = path.join(repoRoot, "nested");
await fs.mkdir(nested, { recursive: true });
await fs.writeFile(path.join(repoRoot, ".git"), "not-a-gitdir-pointer\n", "utf-8");
return {
startPath: nested,
expectedRoot: repoRoot,
expectedHead: null,
};
},
},
])("resolves git roots when $name", async ({ label, setup }) => {
await expectGitRootResolution({ label, setup });
});
it("respects maxDepth traversal limit", async () => {
@@ -65,15 +113,4 @@ describe("git-root", () => {
expect(findGitRoot(nested, { maxDepth: 2 })).toBeNull();
expect(resolveGitHeadPath(nested, { maxDepth: 2 })).toBeNull();
});
it("returns null for HEAD lookup when only an invalid .git file exists", async () => {
const temp = await makeTempDir("git-root-invalid-only");
const repoRoot = path.join(temp, "repo");
const nested = path.join(repoRoot, "nested");
await fs.mkdir(nested, { recursive: true });
await fs.writeFile(path.join(repoRoot, ".git"), "not-a-gitdir-pointer\n", "utf-8");
expect(findGitRoot(nested)).toBe(repoRoot);
expect(resolveGitHeadPath(nested)).toBeNull();
});
});

View File

@@ -82,28 +82,31 @@ describe("resolveEffectiveHomeDir", () => {
});
describe("resolveRequiredHomeDir", () => {
it("returns cwd when no home source is available", () => {
expect(
resolveRequiredHomeDir({} as NodeJS.ProcessEnv, () => {
it.each([
{
name: "returns cwd when no home source is available",
env: {} as NodeJS.ProcessEnv,
homedir: () => {
throw new Error("no home");
}),
).toBe(process.cwd());
});
it("returns a fully resolved path for OPENCLAW_HOME", () => {
const result = resolveRequiredHomeDir(
{ OPENCLAW_HOME: "/custom/home" } as NodeJS.ProcessEnv,
() => "/fallback",
);
expect(result).toBe(path.resolve("/custom/home"));
});
it("returns cwd when OPENCLAW_HOME is tilde-only and no fallback home exists", () => {
expect(
resolveRequiredHomeDir({ OPENCLAW_HOME: "~" } as NodeJS.ProcessEnv, () => {
},
expected: process.cwd(),
},
{
name: "returns a fully resolved path for OPENCLAW_HOME",
env: { OPENCLAW_HOME: "/custom/home" } as NodeJS.ProcessEnv,
homedir: () => "/fallback",
expected: path.resolve("/custom/home"),
},
{
name: "returns cwd when OPENCLAW_HOME is tilde-only and no fallback home exists",
env: { OPENCLAW_HOME: "~" } as NodeJS.ProcessEnv,
homedir: () => {
throw new Error("no home");
}),
).toBe(process.cwd());
},
expected: process.cwd(),
},
])("$name", ({ env, homedir, expected }) => {
expect(resolveRequiredHomeDir(env, homedir)).toBe(expected);
});
});
@@ -157,32 +160,43 @@ describe("expandHomePrefix", () => {
});
describe("resolveHomeRelativePath", () => {
it("returns blank input unchanged", () => {
expect(resolveHomeRelativePath(" ")).toBe("");
});
it("resolves trimmed relative and absolute paths", () => {
expect(resolveHomeRelativePath(" ./tmp/file.txt ")).toBe(path.resolve("./tmp/file.txt"));
expect(resolveHomeRelativePath(" /tmp/file.txt ")).toBe(path.resolve("/tmp/file.txt"));
});
it("expands tilde paths using the resolved home directory", () => {
expect(
resolveHomeRelativePath("~/docs", {
it.each([
{
name: "returns blank input unchanged",
input: " ",
expected: "",
},
{
name: "resolves trimmed relative paths",
input: " ./tmp/file.txt ",
expected: path.resolve("./tmp/file.txt"),
},
{
name: "resolves trimmed absolute paths",
input: " /tmp/file.txt ",
expected: path.resolve("/tmp/file.txt"),
},
{
name: "expands tilde paths using the resolved home directory",
input: "~/docs",
opts: {
env: { OPENCLAW_HOME: "/srv/openclaw-home" } as NodeJS.ProcessEnv,
}),
).toBe(path.resolve("/srv/openclaw-home/docs"));
});
it("falls back to cwd when tilde paths have no home source", () => {
expect(
resolveHomeRelativePath("~", {
},
expected: path.resolve("/srv/openclaw-home/docs"),
},
{
name: "falls back to cwd when tilde paths have no home source",
input: "~",
opts: {
env: {} as NodeJS.ProcessEnv,
homedir: () => {
throw new Error("no home");
},
}),
).toBe(path.resolve(process.cwd()));
},
expected: path.resolve(process.cwd()),
},
])("$name", ({ input, opts, expected }) => {
expect(resolveHomeRelativePath(input, opts)).toBe(expected);
});
});

View File

@@ -62,6 +62,20 @@ async function expectGuardPayloadTooLarge(params: {
return { req, res, guard };
}
async function readJsonBody(params: {
chunks?: string[];
maxBytes: number;
emptyObjectOnEmpty?: boolean;
}) {
const req = createMockRequest({ chunks: params.chunks });
return await readJsonBodyWithLimit(req, {
maxBytes: params.maxBytes,
...(params.emptyObjectOnEmpty === undefined
? {}
: { emptyObjectOnEmpty: params.emptyObjectOnEmpty }),
});
}
function createMockRequest(params: {
chunks?: string[];
headers?: Record<string, string>;
@@ -128,25 +142,38 @@ describe("http body limits", () => {
await expectReadPayloadTooLarge({ chunks, headers, maxBytes });
});
it("returns json parse error when body is invalid", async () => {
const req = createMockRequest({ chunks: ["{bad json"] });
const result = await readJsonBodyWithLimit(req, { maxBytes: 1024, emptyObjectOnEmpty: false });
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.code).toBe("INVALID_JSON");
}
});
it("returns empty object for an empty body by default", async () => {
const req = createMockRequest({ chunks: [" "] });
const result = await readJsonBodyWithLimit(req, { maxBytes: 1024 });
expect(result).toEqual({ ok: true, value: {} });
});
it("returns payload-too-large for json body", async () => {
const req = createMockRequest({ chunks: ["x".repeat(1024)] });
const result = await readJsonBodyWithLimit(req, { maxBytes: 10 });
expect(result).toEqual({ ok: false, code: "PAYLOAD_TOO_LARGE", error: "Payload too large" });
it.each([
{
name: "returns json parse error when body is invalid",
params: { chunks: ["{bad json"], maxBytes: 1024, emptyObjectOnEmpty: false },
assertResult: (result: Awaited<ReturnType<typeof readJsonBody>>) => {
expect(result.ok).toBe(false);
if (!result.ok) {
expect(result.code).toBe("INVALID_JSON");
}
},
},
{
name: "returns empty object for an empty body by default",
params: { chunks: [" "], maxBytes: 1024 },
assertResult: (result: Awaited<ReturnType<typeof readJsonBody>>) => {
expect(result).toEqual({ ok: true, value: {} });
},
},
{
name: "returns payload-too-large for json body",
params: { chunks: ["x".repeat(1024)], maxBytes: 10 },
assertResult: (result: Awaited<ReturnType<typeof readJsonBody>>) => {
expect(result).toEqual({
ok: false,
code: "PAYLOAD_TOO_LARGE",
error: "Payload too large",
});
},
},
])("$name", async ({ params, assertResult }) => {
const result = await readJsonBody(params);
assertResult(result);
});
it.each([

View File

@@ -5,21 +5,27 @@ import { withTempDir } from "../test-helpers/temp-dir.js";
import { loadJsonFile, saveJsonFile } from "./json-file.js";
describe("json-file helpers", () => {
it("returns undefined for missing and invalid JSON files", async () => {
it.each([
{
name: "missing files",
setup: () => {},
},
{
name: "invalid JSON files",
setup: (pathname: string) => {
fs.writeFileSync(pathname, "{", "utf8");
},
},
{
name: "directory targets",
setup: (pathname: string) => {
fs.mkdirSync(pathname);
},
},
])("returns undefined for $name", async ({ setup }) => {
await withTempDir({ prefix: "openclaw-json-file-" }, async (root) => {
const pathname = path.join(root, "config.json");
expect(loadJsonFile(pathname)).toBeUndefined();
fs.writeFileSync(pathname, "{", "utf8");
expect(loadJsonFile(pathname)).toBeUndefined();
});
});
it("returns undefined when the target path is a directory", async () => {
await withTempDir({ prefix: "openclaw-json-file-" }, async (root) => {
const pathname = path.join(root, "config-dir");
fs.mkdirSync(pathname);
setup(pathname);
expect(loadJsonFile(pathname)).toBeUndefined();
});
});
@@ -44,13 +50,22 @@ describe("json-file helpers", () => {
});
});
it("overwrites existing JSON files with the latest payload", async () => {
it.each([
{
name: "new files",
setup: () => {},
},
{
name: "existing JSON files",
setup: (pathname: string) => {
fs.writeFileSync(pathname, '{"enabled":false}\n', "utf8");
},
},
])("writes the latest payload for $name", async ({ setup }) => {
await withTempDir({ prefix: "openclaw-json-file-" }, async (root) => {
const pathname = path.join(root, "config.json");
fs.writeFileSync(pathname, '{"enabled":false}\n', "utf8");
setup(pathname);
saveJsonFile(pathname, { enabled: true, count: 2 });
expect(loadJsonFile(pathname)).toEqual({ enabled: true, count: 2 });
});
});

View File

@@ -33,11 +33,10 @@ describe("jsonUtf8Bytes", () => {
expect(jsonUtf8Bytes(circular)).toBe(Buffer.byteLength("[object Object]", "utf8"));
});
it("uses string conversion for BigInt serialization failures", () => {
expect(jsonUtf8Bytes(12n)).toBe(Buffer.byteLength("12", "utf8"));
});
it("uses string conversion for symbol serialization failures", () => {
expect(jsonUtf8Bytes(Symbol("token"))).toBe(Buffer.byteLength("Symbol(token)", "utf8"));
it.each([
{ name: "BigInt serialization failures", value: 12n, expected: "12" },
{ name: "symbol serialization failures", value: Symbol("token"), expected: "Symbol(token)" },
])("uses string conversion for $name", ({ value, expected }) => {
expect(jsonUtf8Bytes(value)).toBe(Buffer.byteLength(expected, "utf8"));
});
});

View File

@@ -34,21 +34,32 @@ afterEach(() => {
});
describe("getMachineDisplayName", () => {
it("uses the hostname fallback in test mode and strips a trimmed .local suffix", async () => {
const hostnameSpy = vi.spyOn(os, "hostname").mockReturnValue(" clawbox.LOCAL ");
const machineName = await importMachineName("test-fallback");
it.each([
{
name: "uses the hostname fallback in test mode and strips a trimmed .local suffix",
scope: "test-fallback",
hostname: " clawbox.LOCAL ",
expected: "clawbox",
expectedCalls: 1,
repeatLookup: true,
},
{
name: "falls back to the default product name when hostname is blank",
scope: "blank-hostname",
hostname: " ",
expected: "openclaw",
expectedCalls: 1,
repeatLookup: false,
},
])("$name", async ({ scope, hostname, expected, expectedCalls, repeatLookup }) => {
const hostnameSpy = vi.spyOn(os, "hostname").mockReturnValue(hostname);
const machineName = await importMachineName(scope);
await expect(machineName.getMachineDisplayName()).resolves.toBe("clawbox");
await expect(machineName.getMachineDisplayName()).resolves.toBe("clawbox");
expect(hostnameSpy).toHaveBeenCalledTimes(1);
expect(execFileMock).not.toHaveBeenCalled();
});
it("falls back to the default product name when hostname is blank", async () => {
vi.spyOn(os, "hostname").mockReturnValue(" ");
const machineName = await importMachineName("blank-hostname");
await expect(machineName.getMachineDisplayName()).resolves.toBe("openclaw");
await expect(machineName.getMachineDisplayName()).resolves.toBe(expected);
if (repeatLookup) {
await expect(machineName.getMachineDisplayName()).resolves.toBe(expected);
}
expect(hostnameSpy).toHaveBeenCalledTimes(expectedCalls);
expect(execFileMock).not.toHaveBeenCalled();
});
});

View File

@@ -2,38 +2,47 @@ import { describe, expect, it } from "vitest";
import { pruneMapToMaxSize } from "./map-size.js";
describe("pruneMapToMaxSize", () => {
it("keeps the newest entries after flooring fractional limits", () => {
const map = new Map([
["a", 1],
["b", 2],
["c", 3],
]);
pruneMapToMaxSize(map, 2.9);
expect([...map.entries()]).toEqual([
["b", 2],
["c", 3],
]);
});
it("clears maps for zero or negative limits and leaves undersized maps untouched", () => {
const cleared = new Map([
["a", 1],
["b", 2],
]);
pruneMapToMaxSize(cleared, 0);
expect([...cleared.entries()]).toEqual([]);
const alsoCleared = new Map([
["a", 1],
["b", 2],
]);
pruneMapToMaxSize(alsoCleared, -4);
expect([...alsoCleared.entries()]).toEqual([]);
const unchanged = new Map([["a", 1]]);
pruneMapToMaxSize(unchanged, 5);
expect([...unchanged.entries()]).toEqual([["a", 1]]);
it.each([
{
name: "keeps the newest entries after flooring fractional limits",
entries: [
["a", 1],
["b", 2],
["c", 3],
] as const,
maxSize: 2.9,
expected: [
["b", 2],
["c", 3],
],
},
{
name: "clears maps for zero limits",
entries: [
["a", 1],
["b", 2],
] as const,
maxSize: 0,
expected: [],
},
{
name: "clears maps for negative limits",
entries: [
["a", 1],
["b", 2],
] as const,
maxSize: -4,
expected: [],
},
{
name: "leaves undersized maps untouched",
entries: [["a", 1]] as const,
maxSize: 5,
expected: [["a", 1]],
},
])("$name", ({ entries, maxSize, expected }) => {
const map = new Map(entries);
pruneMapToMaxSize(map, maxSize);
expect([...map.entries()]).toEqual(expected);
});
});

View File

@@ -2,18 +2,14 @@ import { describe, expect, it } from "vitest";
import { normalizeHostname } from "./hostname.js";
describe("normalizeHostname", () => {
it("trims, lowercases, and strips a trailing dot", () => {
expect(normalizeHostname(" Example.COM. ")).toBe("example.com");
expect(normalizeHostname(" ")).toBe("");
});
it("unwraps bracketed ipv6 hosts after normalization", () => {
expect(normalizeHostname(" [FD7A:115C:A1E0::1] ")).toBe("fd7a:115c:a1e0::1");
expect(normalizeHostname(" [FD7A:115C:A1E0::1]. ")).toBe("fd7a:115c:a1e0::1");
});
it("leaves non-fully-bracketed values otherwise unchanged", () => {
expect(normalizeHostname("[fd7a:115c:a1e0::1")).toBe("[fd7a:115c:a1e0::1");
expect(normalizeHostname("fd7a:115c:a1e0::1]")).toBe("fd7a:115c:a1e0::1]");
it.each([
{ input: " Example.COM. ", expected: "example.com" },
{ input: " ", expected: "" },
{ input: " [FD7A:115C:A1E0::1] ", expected: "fd7a:115c:a1e0::1" },
{ input: " [FD7A:115C:A1E0::1]. ", expected: "fd7a:115c:a1e0::1" },
{ input: "[fd7a:115c:a1e0::1", expected: "[fd7a:115c:a1e0::1" },
{ input: "fd7a:115c:a1e0::1]", expected: "fd7a:115c:a1e0::1]" },
])("normalizes %j", ({ input, expected }) => {
expect(normalizeHostname(input)).toBe(expected);
});
});

View File

@@ -25,6 +25,10 @@ function setFile(p: string, content = "") {
state.entries.set(abs(p), { kind: "file", content });
}
function setPackageRoot(root: string, name = "openclaw") {
setFile(path.join(root, "package.json"), JSON.stringify({ name }));
}
vi.mock("node:fs", async (importOriginal) => {
const actual = await importOriginal<typeof import("node:fs")>();
const wrapped = {
@@ -99,93 +103,109 @@ describe("resolveOpenClawPackageRoot", () => {
state.realpathErrors.clear();
});
it("resolves package root from .bin argv1", async () => {
const project = fx("bin-scenario");
const argv1 = path.join(project, "node_modules", ".bin", "openclaw");
const pkgRoot = path.join(project, "node_modules", "openclaw");
setFile(path.join(pkgRoot, "package.json"), JSON.stringify({ name: "openclaw" }));
expect(resolveOpenClawPackageRootSync({ argv1 })).toBe(pkgRoot);
});
it("resolves package root via symlinked argv1", async () => {
const project = fx("symlink-scenario");
const bin = path.join(project, "bin", "openclaw");
const realPkg = path.join(project, "real-pkg");
state.realpaths.set(abs(bin), abs(path.join(realPkg, "openclaw.mjs")));
setFile(path.join(realPkg, "package.json"), JSON.stringify({ name: "openclaw" }));
expect(resolveOpenClawPackageRootSync({ argv1: bin })).toBe(realPkg);
});
it("falls back when argv1 realpath throws", async () => {
const project = fx("realpath-throw-scenario");
const argv1 = path.join(project, "node_modules", ".bin", "openclaw");
const pkgRoot = path.join(project, "node_modules", "openclaw");
state.realpathErrors.add(abs(argv1));
setFile(path.join(pkgRoot, "package.json"), JSON.stringify({ name: "openclaw" }));
expect(resolveOpenClawPackageRootSync({ argv1 })).toBe(pkgRoot);
});
it("prefers moduleUrl candidates", async () => {
const pkgRoot = fx("moduleurl");
setFile(path.join(pkgRoot, "package.json"), JSON.stringify({ name: "openclaw" }));
const moduleUrl = pathToFileURL(path.join(pkgRoot, "dist", "index.js")).toString();
expect(resolveOpenClawPackageRootSync({ moduleUrl })).toBe(pkgRoot);
});
it("falls through from a non-openclaw moduleUrl candidate to cwd", async () => {
const wrongPkgRoot = fx("moduleurl-fallthrough", "wrong");
const cwdPkgRoot = fx("moduleurl-fallthrough", "cwd");
setFile(path.join(wrongPkgRoot, "package.json"), JSON.stringify({ name: "not-openclaw" }));
setFile(path.join(cwdPkgRoot, "package.json"), JSON.stringify({ name: "openclaw" }));
const moduleUrl = pathToFileURL(path.join(wrongPkgRoot, "dist", "index.js")).toString();
expect(resolveOpenClawPackageRootSync({ moduleUrl, cwd: cwdPkgRoot })).toBe(cwdPkgRoot);
await expect(resolveOpenClawPackageRoot({ moduleUrl, cwd: cwdPkgRoot })).resolves.toBe(
cwdPkgRoot,
);
});
it("ignores invalid moduleUrl values and falls back to cwd", async () => {
const pkgRoot = fx("invalid-moduleurl");
setFile(path.join(pkgRoot, "package.json"), JSON.stringify({ name: "openclaw" }));
expect(resolveOpenClawPackageRootSync({ moduleUrl: "not-a-file-url", cwd: pkgRoot })).toBe(
pkgRoot,
);
await expect(
resolveOpenClawPackageRoot({ moduleUrl: "not-a-file-url", cwd: pkgRoot }),
).resolves.toBe(pkgRoot);
});
it("returns null for non-openclaw package roots", async () => {
const pkgRoot = fx("not-openclaw");
setFile(path.join(pkgRoot, "package.json"), JSON.stringify({ name: "not-openclaw" }));
expect(resolveOpenClawPackageRootSync({ cwd: pkgRoot })).toBeNull();
});
it("falls back from a symlinked argv1 to the node_modules package root", () => {
const project = fx("symlink-node-modules-fallback");
const argv1 = path.join(project, "node_modules", ".bin", "openclaw");
state.realpaths.set(abs(argv1), abs(path.join(project, "versions", "current", "openclaw.mjs")));
const pkgRoot = path.join(project, "node_modules", "openclaw");
setFile(path.join(pkgRoot, "package.json"), JSON.stringify({ name: "openclaw" }));
expect(resolveOpenClawPackageRootSync({ argv1 })).toBe(pkgRoot);
});
it("async resolver matches sync behavior", async () => {
const pkgRoot = fx("async");
setFile(path.join(pkgRoot, "package.json"), JSON.stringify({ name: "openclaw" }));
await expect(resolveOpenClawPackageRoot({ cwd: pkgRoot })).resolves.toBe(pkgRoot);
});
it("async resolver returns null when no package roots exist", async () => {
await expect(resolveOpenClawPackageRoot({ cwd: fx("missing") })).resolves.toBeNull();
it.each([
{
name: "resolves package root from .bin argv1",
setup: () => {
const project = fx("bin-scenario");
const argv1 = path.join(project, "node_modules", ".bin", "openclaw");
const pkgRoot = path.join(project, "node_modules", "openclaw");
setPackageRoot(pkgRoot);
return { opts: { argv1 }, expected: pkgRoot };
},
},
{
name: "resolves package root via symlinked argv1",
setup: () => {
const project = fx("symlink-scenario");
const bin = path.join(project, "bin", "openclaw");
const realPkg = path.join(project, "real-pkg");
state.realpaths.set(abs(bin), abs(path.join(realPkg, "openclaw.mjs")));
setPackageRoot(realPkg);
return { opts: { argv1: bin }, expected: realPkg };
},
},
{
name: "falls back when argv1 realpath throws",
setup: () => {
const project = fx("realpath-throw-scenario");
const argv1 = path.join(project, "node_modules", ".bin", "openclaw");
const pkgRoot = path.join(project, "node_modules", "openclaw");
state.realpathErrors.add(abs(argv1));
setPackageRoot(pkgRoot);
return { opts: { argv1 }, expected: pkgRoot };
},
},
{
name: "prefers moduleUrl candidates",
setup: () => {
const pkgRoot = fx("moduleurl");
setPackageRoot(pkgRoot);
return {
opts: { moduleUrl: pathToFileURL(path.join(pkgRoot, "dist", "index.js")).toString() },
expected: pkgRoot,
};
},
},
{
name: "falls through from a non-openclaw moduleUrl candidate to cwd",
setup: () => {
const wrongPkgRoot = fx("moduleurl-fallthrough", "wrong");
const cwdPkgRoot = fx("moduleurl-fallthrough", "cwd");
setPackageRoot(wrongPkgRoot, "not-openclaw");
setPackageRoot(cwdPkgRoot);
return {
opts: {
moduleUrl: pathToFileURL(path.join(wrongPkgRoot, "dist", "index.js")).toString(),
cwd: cwdPkgRoot,
},
expected: cwdPkgRoot,
};
},
},
{
name: "ignores invalid moduleUrl values and falls back to cwd",
setup: () => {
const pkgRoot = fx("invalid-moduleurl");
setPackageRoot(pkgRoot);
return {
opts: { moduleUrl: "not-a-file-url", cwd: pkgRoot },
expected: pkgRoot,
};
},
},
{
name: "returns null for non-openclaw package roots",
setup: () => {
const pkgRoot = fx("not-openclaw");
setPackageRoot(pkgRoot, "not-openclaw");
return { opts: { cwd: pkgRoot }, expected: null };
},
},
{
name: "falls back from a symlinked argv1 to the node_modules package root",
setup: () => {
const project = fx("symlink-node-modules-fallback");
const argv1 = path.join(project, "node_modules", ".bin", "openclaw");
state.realpaths.set(
abs(argv1),
abs(path.join(project, "versions", "current", "openclaw.mjs")),
);
const pkgRoot = path.join(project, "node_modules", "openclaw");
setPackageRoot(pkgRoot);
return { opts: { argv1 }, expected: pkgRoot };
},
},
{
name: "returns null when no package roots exist",
setup: () => ({
opts: { cwd: fx("missing") },
expected: null,
}),
},
])("$name", async ({ setup }) => {
const { opts, expected } = setup();
expect(resolveOpenClawPackageRootSync(opts)).toBe(expected);
await expect(resolveOpenClawPackageRoot(opts)).resolves.toBe(expected);
});
});

View File

@@ -9,56 +9,85 @@ vi.mock("node:child_process", () => ({
import { resolveOsSummary } from "./os-summary.js";
type OsSummaryCase = {
name: string;
platform: ReturnType<typeof os.platform>;
release: string;
arch: ReturnType<typeof os.arch>;
swVersStdout?: string;
expected: ReturnType<typeof resolveOsSummary>;
};
describe("resolveOsSummary", () => {
afterEach(() => {
vi.restoreAllMocks();
});
it("formats darwin labels from sw_vers output", () => {
vi.spyOn(os, "platform").mockReturnValue("darwin");
vi.spyOn(os, "release").mockReturnValue("24.0.0");
vi.spyOn(os, "arch").mockReturnValue("arm64");
spawnSyncMock.mockReturnValue({
stdout: " 15.4 \n",
stderr: "",
pid: 1,
output: [],
status: 0,
signal: null,
});
expect(resolveOsSummary()).toEqual({
platform: "darwin",
arch: "arm64",
it.each<OsSummaryCase>([
{
name: "formats darwin labels from sw_vers output",
platform: "darwin" as const,
release: "24.0.0",
label: "macos 15.4 (arm64)",
});
});
it("falls back to os.release when sw_vers output is blank", () => {
vi.spyOn(os, "platform").mockReturnValue("darwin");
vi.spyOn(os, "release").mockReturnValue("24.1.0");
vi.spyOn(os, "arch").mockReturnValue("x64");
spawnSyncMock.mockReturnValue({
stdout: " ",
stderr: "",
pid: 1,
output: [],
status: 0,
signal: null,
});
expect(resolveOsSummary().label).toBe("macos 24.1.0 (x64)");
});
it("formats windows and non-darwin labels from os metadata", () => {
vi.spyOn(os, "release").mockReturnValue("10.0.26100");
vi.spyOn(os, "arch").mockReturnValue("x64");
vi.spyOn(os, "platform").mockReturnValue("win32");
expect(resolveOsSummary().label).toBe("windows 10.0.26100 (x64)");
vi.spyOn(os, "platform").mockReturnValue("linux");
expect(resolveOsSummary().label).toBe("linux 10.0.26100 (x64)");
arch: "arm64",
swVersStdout: " 15.4 \n",
expected: {
platform: "darwin",
arch: "arm64",
release: "24.0.0",
label: "macos 15.4 (arm64)",
},
},
{
name: "falls back to os.release when sw_vers output is blank",
platform: "darwin" as const,
release: "24.1.0",
arch: "x64",
swVersStdout: " ",
expected: {
platform: "darwin",
arch: "x64",
release: "24.1.0",
label: "macos 24.1.0 (x64)",
},
},
{
name: "formats windows labels from os metadata",
platform: "win32" as const,
release: "10.0.26100",
arch: "x64",
expected: {
platform: "win32",
arch: "x64",
release: "10.0.26100",
label: "windows 10.0.26100 (x64)",
},
},
{
name: "formats non-darwin labels from os metadata",
platform: "linux" as const,
release: "10.0.26100",
arch: "x64",
expected: {
platform: "linux",
arch: "x64",
release: "10.0.26100",
label: "linux 10.0.26100 (x64)",
},
},
])("$name", ({ platform, release, arch, swVersStdout, expected }) => {
vi.spyOn(os, "platform").mockReturnValue(platform);
vi.spyOn(os, "release").mockReturnValue(release);
vi.spyOn(os, "arch").mockReturnValue(arch);
if (platform === "darwin") {
spawnSyncMock.mockReturnValue({
stdout: swVersStdout ?? "",
stderr: "",
pid: 1,
output: [],
status: 0,
signal: null,
});
}
expect(resolveOsSummary()).toEqual(expected);
});
});

View File

@@ -4,6 +4,15 @@ import { describe, expect, it } from "vitest";
import { withTempDir } from "../test-helpers/temp-dir.js";
import { readPackageName, readPackageVersion } from "./package-json.js";
async function expectPackageMeta(params: {
root: string;
expectedVersion: string | null;
expectedName: string | null;
}): Promise<void> {
await expect(readPackageVersion(params.root)).resolves.toBe(params.expectedVersion);
await expect(readPackageName(params.root)).resolves.toBe(params.expectedName);
}
describe("package-json helpers", () => {
it("reads package version and trims package name", async () => {
await withTempDir({ prefix: "openclaw-package-json-" }, async (root) => {
@@ -13,34 +22,64 @@ describe("package-json helpers", () => {
"utf8",
);
await expect(readPackageVersion(root)).resolves.toBe("1.2.3");
await expect(readPackageName(root)).resolves.toBe("@openclaw/demo");
await expectPackageMeta({
root,
expectedVersion: "1.2.3",
expectedName: "@openclaw/demo",
});
});
});
it("returns null for missing or invalid package.json data", async () => {
await withTempDir({ prefix: "openclaw-package-json-" }, async (root) => {
await expect(readPackageVersion(root)).resolves.toBeNull();
await expect(readPackageName(root)).resolves.toBeNull();
await fs.writeFile(path.join(root, "package.json"), "{", "utf8");
await expect(readPackageVersion(root)).resolves.toBeNull();
await expect(readPackageName(root)).resolves.toBeNull();
await fs.writeFile(
path.join(root, "package.json"),
JSON.stringify({ version: 123, name: " " }),
"utf8",
);
await expect(readPackageVersion(root)).resolves.toBeNull();
await expect(readPackageName(root)).resolves.toBeNull();
await fs.writeFile(
path.join(root, "package.json"),
JSON.stringify({ version: " ", name: "@openclaw/demo" }),
"utf8",
);
await expect(readPackageVersion(root)).resolves.toBeNull();
});
});
it.each([
{
name: "missing package.json",
writePackageJson: async (_root: string) => {},
expectedVersion: null,
expectedName: null,
},
{
name: "invalid JSON",
writePackageJson: async (root: string) => {
await fs.writeFile(path.join(root, "package.json"), "{", "utf8");
},
expectedVersion: null,
expectedName: null,
},
{
name: "invalid typed fields",
writePackageJson: async (root: string) => {
await fs.writeFile(
path.join(root, "package.json"),
JSON.stringify({ version: 123, name: " " }),
"utf8",
);
},
expectedVersion: null,
expectedName: null,
},
{
name: "blank version strings",
writePackageJson: async (root: string) => {
await fs.writeFile(
path.join(root, "package.json"),
JSON.stringify({ version: " ", name: "@openclaw/demo" }),
"utf8",
);
},
expectedVersion: null,
expectedName: "@openclaw/demo",
},
])(
"returns normalized nulls for $name",
async ({ writePackageJson, expectedVersion, expectedName }) => {
await withTempDir({ prefix: "openclaw-package-json-" }, async (root) => {
await writePackageJson(root);
await expectPackageMeta({
root,
expectedVersion,
expectedName,
});
});
},
);
});

View File

@@ -4,25 +4,18 @@ import { normalizePackageTagInput } from "./package-tag.js";
describe("normalizePackageTagInput", () => {
const packageNames = ["openclaw", "@openclaw/plugin"] as const;
it("returns null for blank inputs", () => {
expect(normalizePackageTagInput(undefined, packageNames)).toBeNull();
expect(normalizePackageTagInput(" ", packageNames)).toBeNull();
});
it("strips known package-name prefixes before returning the tag", () => {
expect(normalizePackageTagInput("openclaw@beta", packageNames)).toBe("beta");
expect(normalizePackageTagInput("@openclaw/plugin@2026.2.24", packageNames)).toBe("2026.2.24");
expect(normalizePackageTagInput("openclaw@ ", packageNames)).toBeNull();
});
it("treats exact known package names as an empty tag", () => {
expect(normalizePackageTagInput("openclaw", packageNames)).toBeNull();
expect(normalizePackageTagInput(" @openclaw/plugin ", packageNames)).toBeNull();
});
it("returns trimmed raw values when no package prefix matches", () => {
expect(normalizePackageTagInput(" latest ", packageNames)).toBe("latest");
expect(normalizePackageTagInput("@other/plugin@beta", packageNames)).toBe("@other/plugin@beta");
expect(normalizePackageTagInput("openclawer@beta", packageNames)).toBe("openclawer@beta");
it.each([
{ input: undefined, expected: null },
{ input: " ", expected: null },
{ input: "openclaw@beta", expected: "beta" },
{ input: "@openclaw/plugin@2026.2.24", expected: "2026.2.24" },
{ input: "openclaw@ ", expected: null },
{ input: "openclaw", expected: null },
{ input: " @openclaw/plugin ", expected: null },
{ input: " latest ", expected: "latest" },
{ input: "@other/plugin@beta", expected: "@other/plugin@beta" },
{ input: "openclawer@beta", expected: "openclawer@beta" },
])("normalizes %j", ({ input, expected }) => {
expect(normalizePackageTagInput(input, packageNames)).toBe(expected);
});
});

View File

@@ -97,10 +97,29 @@ describe("ensureOpenClawCliOnPath", () => {
return (process.env.PATH ?? "").split(path.delimiter);
}
function resetBootstrapEnv(pathValue = "/usr/bin") {
process.env.PATH = pathValue;
delete process.env.OPENCLAW_PATH_BOOTSTRAPPED;
delete process.env.OPENCLAW_ALLOW_PROJECT_LOCAL_BIN;
delete process.env.HOMEBREW_PREFIX;
delete process.env.HOMEBREW_BREW_FILE;
delete process.env.XDG_BIN_HOME;
}
function expectPathsAfter(parts: string[], anchor: string, expectedPaths: string[]) {
const anchorIndex = parts.indexOf(anchor);
expect(anchorIndex).toBeGreaterThanOrEqual(0);
for (const expectedPath of expectedPaths) {
expect(
parts.indexOf(expectedPath),
`${expectedPath} should come after ${anchor}`,
).toBeGreaterThan(anchorIndex);
}
}
it("prepends the bundled app bin dir when a sibling openclaw exists", () => {
const { tmp, appBinDir, appCli } = setupAppCliRoot("case-bundled");
process.env.PATH = "/usr/bin";
delete process.env.OPENCLAW_PATH_BOOTSTRAPPED;
resetBootstrapEnv();
const updated = bootstrapPath({
execPath: appCli,
@@ -131,8 +150,7 @@ describe("ensureOpenClawCliOnPath", () => {
setDir(shimsDir);
process.env.MISE_DATA_DIR = miseDataDir;
process.env.PATH = "/usr/bin";
delete process.env.OPENCLAW_PATH_BOOTSTRAPPED;
resetBootstrapEnv();
const updated = bootstrapPath({
execPath: appCli,
@@ -140,10 +158,7 @@ describe("ensureOpenClawCliOnPath", () => {
homeDir: tmp,
platform: "darwin",
});
const usrBinIndex = updated.indexOf("/usr/bin");
const shimsIndex = updated.indexOf(shimsDir);
expect(usrBinIndex).toBeGreaterThanOrEqual(0);
expect(shimsIndex).toBeGreaterThan(usrBinIndex);
expectPathsAfter(updated, "/usr/bin", [shimsDir]);
});
it.each([
@@ -167,9 +182,7 @@ describe("ensureOpenClawCliOnPath", () => {
setDir(localBinDir);
setExe(localCli);
process.env.PATH = "/usr/bin";
delete process.env.OPENCLAW_PATH_BOOTSTRAPPED;
delete process.env.OPENCLAW_ALLOW_PROJECT_LOCAL_BIN;
resetBootstrapEnv();
const withoutOptIn = bootstrapPath({
execPath: appCli,
@@ -179,8 +192,7 @@ describe("ensureOpenClawCliOnPath", () => {
});
expect(withoutOptIn.includes(localBinDir)).toBe(false);
process.env.PATH = "/usr/bin";
delete process.env.OPENCLAW_PATH_BOOTSTRAPPED;
resetBootstrapEnv();
if (envValue === undefined) {
delete process.env.OPENCLAW_ALLOW_PROJECT_LOCAL_BIN;
} else {
@@ -194,10 +206,7 @@ describe("ensureOpenClawCliOnPath", () => {
platform: "darwin",
...(allowProjectLocalBin === undefined ? {} : { allowProjectLocalBin }),
});
const usrBinIndex = withOptIn.indexOf("/usr/bin");
const localIndex = withOptIn.indexOf(localBinDir);
expect(usrBinIndex).toBeGreaterThanOrEqual(0);
expect(localIndex).toBeGreaterThan(usrBinIndex);
expectPathsAfter(withOptIn, "/usr/bin", [localBinDir]);
},
);
@@ -209,9 +218,8 @@ describe("ensureOpenClawCliOnPath", () => {
setDir(path.join(tmp, ".local"));
setDir(localBin);
process.env.PATH = "/usr/bin";
resetBootstrapEnv();
process.env.XDG_BIN_HOME = xdgBinHome;
delete process.env.OPENCLAW_PATH_BOOTSTRAPPED;
const updated = bootstrapPath({
execPath: appCli,
@@ -228,9 +236,7 @@ describe("ensureOpenClawCliOnPath", () => {
setDir(path.join(tmp, ".local"));
setDir(localBin);
process.env.PATH = "/usr/bin:/bin";
delete process.env.OPENCLAW_PATH_BOOTSTRAPPED;
delete process.env.XDG_BIN_HOME;
resetBootstrapEnv("/usr/bin:/bin");
const updated = bootstrapPath({
execPath: appCli,
@@ -238,11 +244,7 @@ describe("ensureOpenClawCliOnPath", () => {
homeDir: tmp,
platform: "linux",
});
const usrBinIndex = updated.indexOf("/usr/bin");
const localBinIndex = updated.indexOf(localBin);
expect(usrBinIndex).toBeGreaterThanOrEqual(0);
expect(localBinIndex).toBeGreaterThanOrEqual(0);
expect(localBinIndex).toBeGreaterThan(usrBinIndex);
expectPathsAfter(updated, "/usr/bin", [localBin]);
});
it("places all user-writable home dirs after system dirs", () => {
@@ -260,9 +262,7 @@ describe("ensureOpenClawCliOnPath", () => {
setDir(path.join(tmp, ".yarn"));
setDir(yarnBin);
process.env.PATH = "/usr/bin:/bin";
delete process.env.OPENCLAW_PATH_BOOTSTRAPPED;
delete process.env.XDG_BIN_HOME;
resetBootstrapEnv("/usr/bin:/bin");
const updated = bootstrapPath({
execPath: appCli,
@@ -270,64 +270,58 @@ describe("ensureOpenClawCliOnPath", () => {
homeDir: tmp,
platform: "linux",
});
const usrBinIndex = updated.indexOf("/usr/bin");
for (const userDir of [localBin, pnpmBin, bunBin, yarnBin]) {
const idx = updated.indexOf(userDir);
expect(idx, `${userDir} should come after /usr/bin`).toBeGreaterThan(usrBinIndex);
}
expectPathsAfter(updated, "/usr/bin", [localBin, pnpmBin, bunBin, yarnBin]);
});
it("appends Homebrew dirs after immutable OS dirs", () => {
const { tmp, appCli } = setupAppCliRoot("case-homebrew-after-system");
setDir("/opt/homebrew/bin");
setDir("/usr/local/bin");
process.env.PATH = "/usr/bin:/bin";
delete process.env.OPENCLAW_PATH_BOOTSTRAPPED;
delete process.env.HOMEBREW_PREFIX;
delete process.env.HOMEBREW_BREW_FILE;
delete process.env.XDG_BIN_HOME;
const updated = bootstrapPath({
execPath: appCli,
cwd: tmp,
homeDir: tmp,
platform: "darwin",
});
const usrBinIndex = updated.indexOf("/usr/bin");
expect(usrBinIndex).toBeGreaterThanOrEqual(0);
expect(updated.indexOf("/opt/homebrew/bin")).toBeGreaterThan(usrBinIndex);
expect(updated.indexOf("/usr/local/bin")).toBeGreaterThan(usrBinIndex);
});
it("appends Linuxbrew dirs after system dirs", () => {
const tmp = abs("/tmp/openclaw-path/case-linuxbrew");
const execDir = path.join(tmp, "exec");
setDir(tmp);
setDir(execDir);
const linuxbrewDir = path.join(tmp, ".linuxbrew");
const linuxbrewBin = path.join(linuxbrewDir, "bin");
const linuxbrewSbin = path.join(linuxbrewDir, "sbin");
setDir(linuxbrewDir);
setDir(linuxbrewBin);
setDir(linuxbrewSbin);
process.env.PATH = "/usr/bin";
delete process.env.OPENCLAW_PATH_BOOTSTRAPPED;
delete process.env.HOMEBREW_PREFIX;
delete process.env.HOMEBREW_BREW_FILE;
delete process.env.XDG_BIN_HOME;
const parts = bootstrapPath({
execPath: path.join(execDir, "node"),
cwd: tmp,
homeDir: tmp,
platform: "linux",
});
const usrBinIndex = parts.indexOf("/usr/bin");
expect(usrBinIndex).toBeGreaterThanOrEqual(0);
expect(parts.indexOf(linuxbrewBin)).toBeGreaterThan(usrBinIndex);
expect(parts.indexOf(linuxbrewSbin)).toBeGreaterThan(usrBinIndex);
it.each([
{
name: "appends Homebrew dirs after immutable OS dirs",
setup: () => {
const { tmp, appCli } = setupAppCliRoot("case-homebrew-after-system");
setDir("/opt/homebrew/bin");
setDir("/usr/local/bin");
resetBootstrapEnv("/usr/bin:/bin");
return {
params: {
execPath: appCli,
cwd: tmp,
homeDir: tmp,
platform: "darwin" as const,
},
expectedPaths: ["/opt/homebrew/bin", "/usr/local/bin"],
anchor: "/usr/bin",
};
},
},
{
name: "appends Linuxbrew dirs after system dirs",
setup: () => {
const tmp = abs("/tmp/openclaw-path/case-linuxbrew");
const execDir = path.join(tmp, "exec");
setDir(tmp);
setDir(execDir);
const linuxbrewDir = path.join(tmp, ".linuxbrew");
const linuxbrewBin = path.join(linuxbrewDir, "bin");
const linuxbrewSbin = path.join(linuxbrewDir, "sbin");
setDir(linuxbrewDir);
setDir(linuxbrewBin);
setDir(linuxbrewSbin);
resetBootstrapEnv();
return {
params: {
execPath: path.join(execDir, "node"),
cwd: tmp,
homeDir: tmp,
platform: "linux" as const,
},
expectedPaths: [linuxbrewBin, linuxbrewSbin],
anchor: "/usr/bin",
};
},
},
])("$name", ({ setup }) => {
const { params, expectedPaths, anchor } = setup();
const updated = bootstrapPath(params);
expectPathsAfter(updated, anchor, expectedPaths);
});
});

View File

@@ -7,13 +7,17 @@ import {
normalizePathPrepend,
} from "./path-prepend.js";
const env = (value: Record<string, string>) => value;
describe("path prepend helpers", () => {
it("finds the actual PATH key while preserving original casing", () => {
expect(findPathKey({ PATH: "/usr/bin" })).toBe("PATH");
expect(findPathKey({ Path: "/usr/bin" })).toBe("Path");
expect(findPathKey({ path: "/usr/bin" })).toBe("path");
expect(findPathKey({ PaTh: "/usr/bin" })).toBe("PaTh");
expect(findPathKey({ HOME: "/tmp" })).toBe("PATH");
it.each([
{ env: env({ PATH: "/usr/bin" }), expected: "PATH" },
{ env: env({ Path: "/usr/bin" }), expected: "Path" },
{ env: env({ path: "/usr/bin" }), expected: "path" },
{ env: env({ PaTh: "/usr/bin" }), expected: "PaTh" },
{ env: env({ HOME: "/tmp" }), expected: "PATH" },
])("finds the PATH key for %j", ({ env, expected }) => {
expect(findPathKey(env)).toBe(expected);
});
it("normalizes prepend lists by trimming, skipping blanks, and deduping", () => {
@@ -30,18 +34,29 @@ describe("path prepend helpers", () => {
expect(normalizePathPrepend()).toEqual([]);
});
it("merges prepended paths ahead of existing values without duplicates", () => {
expect(mergePathPrepend(`/usr/bin${path.delimiter}/opt/bin`, ["/custom/bin", "/usr/bin"])).toBe(
["/custom/bin", "/usr/bin", "/opt/bin"].join(path.delimiter),
);
expect(mergePathPrepend(undefined, ["/custom/bin"])).toBe("/custom/bin");
expect(mergePathPrepend("/usr/bin", [])).toBe("/usr/bin");
});
it("trims existing path entries while preserving order", () => {
expect(
mergePathPrepend(` /usr/bin ${path.delimiter} ${path.delimiter} /opt/bin `, ["/custom/bin"]),
).toBe(["/custom/bin", "/usr/bin", "/opt/bin"].join(path.delimiter));
it.each([
{
existingPath: `/usr/bin${path.delimiter}/opt/bin`,
prepend: ["/custom/bin", "/usr/bin"],
expected: ["/custom/bin", "/usr/bin", "/opt/bin"].join(path.delimiter),
},
{
existingPath: undefined,
prepend: ["/custom/bin"],
expected: "/custom/bin",
},
{
existingPath: "/usr/bin",
prepend: [],
expected: "/usr/bin",
},
{
existingPath: ` /usr/bin ${path.delimiter} ${path.delimiter} /opt/bin `,
prepend: ["/custom/bin"],
expected: ["/custom/bin", "/usr/bin", "/opt/bin"].join(path.delimiter),
},
])("merges prepended paths for %j", ({ existingPath, prepend, expected }) => {
expect(mergePathPrepend(existingPath, prepend)).toBe(expected);
});
it("applies prepends to the discovered PATH key and preserves existing casing", () => {
@@ -56,19 +71,30 @@ describe("path prepend helpers", () => {
});
});
it("respects requireExisting and ignores empty prepend lists", () => {
const envWithoutPath = { HOME: "/tmp/home" };
applyPathPrepend(envWithoutPath, ["/custom/bin"], { requireExisting: true });
expect(envWithoutPath).toEqual({ HOME: "/tmp/home" });
const envWithBlankPath = { path: "" };
applyPathPrepend(envWithBlankPath, ["/custom/bin"], { requireExisting: true });
expect(envWithBlankPath).toEqual({ path: "" });
const envWithPath = { PATH: "/usr/bin" };
applyPathPrepend(envWithPath, [], { requireExisting: true });
applyPathPrepend(envWithPath, undefined, { requireExisting: true });
expect(envWithPath).toEqual({ PATH: "/usr/bin" });
it.each([
{
env: env({ HOME: "/tmp/home" }),
prepend: ["/custom/bin"],
expected: env({ HOME: "/tmp/home" }),
},
{
env: env({ path: "" }),
prepend: ["/custom/bin"],
expected: env({ path: "" }),
},
{
env: env({ PATH: "/usr/bin" }),
prepend: [],
expected: env({ PATH: "/usr/bin" }),
},
{
env: env({ PATH: "/usr/bin" }),
prepend: undefined,
expected: env({ PATH: "/usr/bin" }),
},
])("respects requireExisting for %j", ({ env, prepend, expected }) => {
applyPathPrepend(env, prepend, { requireExisting: true });
expect(env).toEqual(expected);
});
it("creates PATH when prepends are provided and no path key exists", () => {

View File

@@ -2,12 +2,15 @@ import { describe, expect, it } from "vitest";
import { isPlainObject } from "./plain-object.js";
describe("isPlainObject", () => {
it.each([{}, { a: 1 }, Object.create(null), new (class X {})()])(
"accepts object-tag values: %j",
(value) => {
expect(isPlainObject(value)).toBe(true);
},
);
it.each([
{},
{ a: 1 },
Object.create(null),
new (class X {})(),
{ [Symbol.toStringTag]: "Object" },
])("accepts object-tag values: %j", (value) => {
expect(isPlainObject(value)).toBe(true);
});
it.each([
null,
@@ -22,8 +25,4 @@ describe("isPlainObject", () => {
])("rejects non-plain values: %j", (value) => {
expect(isPlainObject(value)).toBe(false);
});
it("accepts object-tag values with an explicit Object toStringTag", () => {
expect(isPlainObject({ [Symbol.toStringTag]: "Object" })).toBe(true);
});
});

View File

@@ -3,6 +3,42 @@ import { createTelegramRetryRunner } from "./retry-policy.js";
const ZERO_DELAY_RETRY = { attempts: 3, minDelayMs: 0, maxDelayMs: 0, jitter: 0 };
async function runRetryCase(params: {
runnerOptions: Parameters<typeof createTelegramRetryRunner>[0];
fnSteps: Array<{ type: "reject" | "resolve"; value: unknown }>;
expectedCalls: number;
expectedValue?: unknown;
expectedError?: string;
}): Promise<void> {
vi.useFakeTimers();
const runner = createTelegramRetryRunner(params.runnerOptions);
const fn = vi.fn();
const allRejects =
params.fnSteps.length > 0 && params.fnSteps.every((step) => step.type === "reject");
if (allRejects) {
fn.mockRejectedValue(params.fnSteps[0]?.value);
}
for (const [index, step] of params.fnSteps.entries()) {
if (allRejects && index > 0) {
break;
}
if (step.type === "reject") {
fn.mockRejectedValueOnce(step.value);
} else {
fn.mockResolvedValueOnce(step.value);
}
}
const promise = runner(fn, "test");
const assertion = params.expectedError
? expect(promise).rejects.toThrow(params.expectedError)
: expect(promise).resolves.toBe(params.expectedValue);
await vi.runAllTimersAsync();
await assertion;
expect(fn).toHaveBeenCalledTimes(params.expectedCalls);
}
describe("createTelegramRetryRunner", () => {
afterEach(() => {
vi.useRealTimers();
@@ -98,32 +134,13 @@ describe("createTelegramRetryRunner", () => {
expectedError: "connection timeout",
},
])("$name", async ({ runnerOptions, fnSteps, expectedCalls, expectedValue, expectedError }) => {
vi.useFakeTimers();
const runner = createTelegramRetryRunner(runnerOptions);
const fn = vi.fn();
const allRejects = fnSteps.length > 0 && fnSteps.every((step) => step.type === "reject");
if (allRejects) {
fn.mockRejectedValue(fnSteps[0]?.value);
}
for (const [index, step] of fnSteps.entries()) {
if (allRejects && index > 0) {
break;
}
if (step.type === "reject") {
fn.mockRejectedValueOnce(step.value);
} else {
fn.mockResolvedValueOnce(step.value);
}
}
const promise = runner(fn, "test");
const assertion = expectedError
? expect(promise).rejects.toThrow(expectedError)
: expect(promise).resolves.toBe(expectedValue);
await vi.runAllTimersAsync();
await assertion;
expect(fn).toHaveBeenCalledTimes(expectedCalls);
await runRetryCase({
runnerOptions,
fnSteps,
expectedCalls,
expectedValue,
expectedError,
});
});
});

View File

@@ -52,28 +52,63 @@ function mockFstatSync(stat: fs.Stats): SafeOpenSyncFstatSync {
return ((_: number) => stat) as unknown as SafeOpenSyncFstatSync;
}
describe("openVerifiedFileSync", () => {
it("returns a path error for missing files", async () => {
await withTempDir("openclaw-safe-open-", async (root) => {
const opened = openVerifiedFileSync({ filePath: path.join(root, "missing.txt") });
expect(opened.ok).toBe(false);
if (!opened.ok) {
expect(opened.reason).toBe("path");
}
});
async function expectOpenFailure(params: {
setup: (root: string) => Promise<Parameters<typeof openVerifiedFileSync>[0]>;
expectedReason: "path" | "validation" | "io";
}): Promise<void> {
await withTempDir("openclaw-safe-open-", async (root) => {
const opened = openVerifiedFileSync(await params.setup(root));
expect(opened.ok).toBe(false);
if (!opened.ok) {
expect(opened.reason).toBe(params.expectedReason);
}
});
}
it("rejects directories by default", async () => {
await withTempDir("openclaw-safe-open-", async (root) => {
const targetDir = path.join(root, "nested");
await fsp.mkdir(targetDir, { recursive: true });
const opened = openVerifiedFileSync({ filePath: targetDir });
expect(opened.ok).toBe(false);
if (!opened.ok) {
expect(opened.reason).toBe("validation");
}
});
describe("openVerifiedFileSync", () => {
it.each([
{
name: "missing files",
expectedReason: "path" as const,
setup: async (root: string) => ({ filePath: path.join(root, "missing.txt") }),
},
{
name: "directories by default",
expectedReason: "validation" as const,
setup: async (root: string) => {
const targetDir = path.join(root, "nested");
await fsp.mkdir(targetDir, { recursive: true });
return { filePath: targetDir };
},
},
{
name: "symlink paths when rejectPathSymlink is enabled",
expectedReason: "validation" as const,
setup: async (root: string) => {
const targetFile = path.join(root, "target.txt");
const linkFile = path.join(root, "link.txt");
await fsp.writeFile(targetFile, "hello");
await fsp.symlink(targetFile, linkFile);
return {
filePath: linkFile,
rejectPathSymlink: true,
};
},
},
{
name: "files larger than maxBytes",
expectedReason: "validation" as const,
setup: async (root: string) => {
const filePath = path.join(root, "payload.txt");
await fsp.writeFile(filePath, "hello");
return {
filePath,
maxBytes: 4,
};
},
},
])("fails for $name", async ({ setup, expectedReason }) => {
await expectOpenFailure({ setup, expectedReason });
});
it("accepts directories when allowedType is directory", async () => {
@@ -95,40 +130,6 @@ describe("openVerifiedFileSync", () => {
});
});
it("rejects symlink paths when rejectPathSymlink is enabled", async () => {
await withTempDir("openclaw-safe-open-", async (root) => {
const targetFile = path.join(root, "target.txt");
const linkFile = path.join(root, "link.txt");
await fsp.writeFile(targetFile, "hello");
await fsp.symlink(targetFile, linkFile);
const opened = openVerifiedFileSync({
filePath: linkFile,
rejectPathSymlink: true,
});
expect(opened.ok).toBe(false);
if (!opened.ok) {
expect(opened.reason).toBe("validation");
}
});
});
it("rejects files larger than maxBytes", async () => {
await withTempDir("openclaw-safe-open-", async (root) => {
const filePath = path.join(root, "payload.txt");
await fsp.writeFile(filePath, "hello");
const opened = openVerifiedFileSync({
filePath,
maxBytes: 4,
});
expect(opened.ok).toBe(false);
if (!opened.ok) {
expect(opened.reason).toBe("validation");
}
});
});
it("rejects post-open validation mismatches and closes the fd", () => {
const closeSync = (fd: number) => {
closed.push(fd);

View File

@@ -80,6 +80,18 @@ function expectFallsBackToOsTmpDir(params: { lstatSync: NonNullable<TmpDirOption
expect(tmpdir).toHaveBeenCalled();
}
function expectResolvesFallbackTmpDir(params: {
lstatSync: NonNullable<TmpDirOptions["lstatSync"]>;
accessSync?: NonNullable<TmpDirOptions["accessSync"]>;
}) {
const { resolved, tmpdir } = resolveWithMocks({
lstatSync: params.lstatSync,
...(params.accessSync ? { accessSync: params.accessSync } : {}),
});
expect(resolved).toBe(fallbackTmp());
expect(tmpdir).toHaveBeenCalled();
}
function missingThenSecureLstat(uid = 501) {
return vi
.fn<NonNullable<TmpDirOptions["lstatSync"]>>()
@@ -159,57 +171,49 @@ describe("resolvePreferredOpenClawTmpDir", () => {
expect(tmpdir).not.toHaveBeenCalled();
});
it("falls back to os.tmpdir()/openclaw when /tmp/openclaw is not a directory", () => {
const lstatSync = vi.fn(() => makeDirStat({ isDirectory: false, mode: 0o100644 }));
const { resolved, tmpdir } = resolveWithMocks({ lstatSync });
expect(resolved).toBe(fallbackTmp());
expect(tmpdir).toHaveBeenCalled();
});
it("falls back to os.tmpdir()/openclaw when /tmp is not writable", () => {
const accessSync = vi.fn((target: string) => {
if (target === "/tmp") {
throw new Error("read-only");
}
});
const lstatSync = vi.fn(() => {
throw nodeErrorWithCode("ENOENT");
});
const { resolved, tmpdir } = resolveWithMocks({
accessSync,
lstatSync,
});
expect(resolved).toBe(fallbackTmp());
expect(tmpdir).toHaveBeenCalled();
});
it("falls back when /tmp/openclaw exists but is not writable", () => {
const accessSync = vi.fn((target: string) => {
if (target === POSIX_OPENCLAW_TMP_DIR) {
throw new Error("not writable");
}
});
const { resolved, tmpdir } = resolveWithMocks({
accessSync,
it.each([
{
name: "falls back to os.tmpdir()/openclaw when /tmp/openclaw is not a directory",
lstatSync: vi.fn(() => makeDirStat({ isDirectory: false, mode: 0o100644 })),
},
{
name: "falls back to os.tmpdir()/openclaw when /tmp is not writable",
lstatSync: vi.fn(() => {
throw nodeErrorWithCode("ENOENT");
}),
accessSync: vi.fn((target: string) => {
if (target === "/tmp") {
throw new Error("read-only");
}
}),
},
{
name: "falls back when /tmp/openclaw exists but is not writable",
lstatSync: vi.fn(() => secureDirStat()),
});
expect(resolved).toBe(fallbackTmp());
expect(tmpdir).toHaveBeenCalled();
});
it("falls back when /tmp/openclaw is a symlink", () => {
expectFallsBackToOsTmpDir({ lstatSync: symlinkTmpDirLstat() });
});
it("falls back when /tmp/openclaw is not owned by the current user", () => {
expectFallsBackToOsTmpDir({ lstatSync: vi.fn(() => makeDirStat({ uid: 0 })) });
});
it("falls back when /tmp/openclaw is group/other writable", () => {
expectFallsBackToOsTmpDir({ lstatSync: vi.fn(() => makeDirStat({ mode: 0o40777 })) });
accessSync: vi.fn((target: string) => {
if (target === POSIX_OPENCLAW_TMP_DIR) {
throw new Error("not writable");
}
}),
},
{
name: "falls back when /tmp/openclaw is a symlink",
lstatSync: symlinkTmpDirLstat(),
},
{
name: "falls back when /tmp/openclaw is not owned by the current user",
lstatSync: vi.fn(() => makeDirStat({ uid: 0 })),
},
{
name: "falls back when /tmp/openclaw is group/other writable",
lstatSync: vi.fn(() => makeDirStat({ mode: 0o40777 })),
},
])("$name", ({ lstatSync, accessSync }) => {
if (accessSync) {
expectResolvesFallbackTmpDir({ lstatSync, accessSync });
return;
}
expectFallsBackToOsTmpDir({ lstatSync });
});
it("repairs existing /tmp/openclaw permissions when they are too broad", () => {

View File

@@ -22,6 +22,14 @@ function createFakeServer(): FakeServer {
return server;
}
async function expectTaskPending(task: Promise<unknown>) {
const early = await Promise.race([
task.then(() => "resolved"),
new Promise<"pending">((resolve) => setTimeout(() => resolve("pending"), 25)),
]);
expect(early).toBe("pending");
}
describe("plugin-sdk channel lifecycle helpers", () => {
it("binds account id onto status patches", () => {
const setStatus = vi.fn();
@@ -42,12 +50,7 @@ describe("plugin-sdk channel lifecycle helpers", () => {
it("resolves waitUntilAbort when signal aborts", async () => {
const abort = new AbortController();
const task = waitUntilAbort(abort.signal);
const early = await Promise.race([
task.then(() => "resolved"),
new Promise<"pending">((resolve) => setTimeout(() => resolve("pending"), 25)),
]);
expect(early).toBe("pending");
await expectTaskPending(task);
abort.abort();
await expect(task).resolves.toBeUndefined();
@@ -75,11 +78,7 @@ describe("plugin-sdk channel lifecycle helpers", () => {
},
});
const early = await Promise.race([
task.then(() => "resolved"),
new Promise<"pending">((resolve) => setTimeout(() => resolve("pending"), 25)),
]);
expect(early).toBe("pending");
await expectTaskPending(task);
expect(stop).not.toHaveBeenCalled();
abort.abort();
@@ -90,12 +89,7 @@ describe("plugin-sdk channel lifecycle helpers", () => {
it("keeps server task pending until close, then resolves", async () => {
const server = createFakeServer();
const task = keepHttpServerTaskAlive({ server });
const early = await Promise.race([
task.then(() => "resolved"),
new Promise<"pending">((resolve) => setTimeout(() => resolve("pending"), 25)),
]);
expect(early).toBe("pending");
await expectTaskPending(task);
server.close();
await expect(task).resolves.toBeUndefined();

View File

@@ -5,14 +5,21 @@ import {
createChannelPairingController,
} from "./channel-pairing.js";
function createReplyCollector() {
const replies: string[] = [];
return {
replies,
sendPairingReply: vi.fn(async (text: string) => {
replies.push(text);
}),
};
}
describe("createChannelPairingController", () => {
it("scopes store access and issues pairing challenges through the scoped store", async () => {
const readAllowFromStore = vi.fn(async () => ["alice"]);
const upsertPairingRequest = vi.fn(async () => ({ code: "123456", created: true }));
const replies: string[] = [];
const sendPairingReply = vi.fn(async (text: string) => {
replies.push(text);
});
const { replies, sendPairingReply } = createReplyCollector();
const runtime = {
channel: {
pairing: {
@@ -53,7 +60,7 @@ describe("createChannelPairingController", () => {
describe("createChannelPairingChallengeIssuer", () => {
it("binds a channel and scoped pairing store to challenge issuance", async () => {
const upsertPairingRequest = vi.fn(async () => ({ code: "654321", created: true }));
const replies: string[] = [];
const { replies, sendPairingReply } = createReplyCollector();
const issueChallenge = createChannelPairingChallengeIssuer({
channel: "signal",
upsertPairingRequest,
@@ -62,9 +69,7 @@ describe("createChannelPairingChallengeIssuer", () => {
await issueChallenge({
senderId: "user-2",
senderIdLine: "Your id: user-2",
sendPairingReply: async (text: string) => {
replies.push(text);
},
sendPairingReply,
});
expect(upsertPairingRequest).toHaveBeenCalledWith({

View File

@@ -6,45 +6,45 @@ const baseCfg = {
commands: { useAccessGroups: true },
} as unknown as OpenClawConfig;
describe("plugin-sdk/command-auth", () => {
it("authorizes group commands from explicit group allowlist", async () => {
const result = await resolveSenderCommandAuthorization({
cfg: baseCfg,
rawBody: "/status",
isGroup: true,
dmPolicy: "pairing",
configuredAllowFrom: ["dm-owner"],
configuredGroupAllowFrom: ["group-owner"],
senderId: "group-owner",
isSenderAllowed: (senderId, allowFrom) => allowFrom.includes(senderId),
readAllowFromStore: async () => ["paired-user"],
shouldComputeCommandAuthorized: () => true,
resolveCommandAuthorizedFromAuthorizers: ({ useAccessGroups, authorizers }) =>
useAccessGroups && authorizers.some((entry) => entry.configured && entry.allowed),
});
expect(result.commandAuthorized).toBe(true);
expect(result.senderAllowedForCommands).toBe(true);
expect(result.effectiveAllowFrom).toEqual(["dm-owner"]);
expect(result.effectiveGroupAllowFrom).toEqual(["group-owner"]);
async function resolveAuthorization(params: {
senderId: string;
configuredAllowFrom?: string[];
configuredGroupAllowFrom?: string[];
}) {
return resolveSenderCommandAuthorization({
cfg: baseCfg,
rawBody: "/status",
isGroup: true,
dmPolicy: "pairing",
configuredAllowFrom: params.configuredAllowFrom ?? ["dm-owner"],
configuredGroupAllowFrom: params.configuredGroupAllowFrom ?? ["group-owner"],
senderId: params.senderId,
isSenderAllowed: (senderId, allowFrom) => allowFrom.includes(senderId),
readAllowFromStore: async () => ["paired-user"],
shouldComputeCommandAuthorized: () => true,
resolveCommandAuthorizedFromAuthorizers: ({ useAccessGroups, authorizers }) =>
useAccessGroups && authorizers.some((entry) => entry.configured && entry.allowed),
});
}
it("keeps pairing-store identities DM-only for group command auth", async () => {
const result = await resolveSenderCommandAuthorization({
cfg: baseCfg,
rawBody: "/status",
isGroup: true,
dmPolicy: "pairing",
configuredAllowFrom: ["dm-owner"],
configuredGroupAllowFrom: ["group-owner"],
describe("plugin-sdk/command-auth", () => {
it.each([
{
name: "authorizes group commands from explicit group allowlist",
senderId: "group-owner",
expectedAuthorized: true,
expectedSenderAllowed: true,
},
{
name: "keeps pairing-store identities DM-only for group command auth",
senderId: "paired-user",
isSenderAllowed: (senderId, allowFrom) => allowFrom.includes(senderId),
readAllowFromStore: async () => ["paired-user"],
shouldComputeCommandAuthorized: () => true,
resolveCommandAuthorizedFromAuthorizers: ({ useAccessGroups, authorizers }) =>
useAccessGroups && authorizers.some((entry) => entry.configured && entry.allowed),
});
expect(result.commandAuthorized).toBe(false);
expect(result.senderAllowedForCommands).toBe(false);
expectedAuthorized: false,
expectedSenderAllowed: false,
},
])("$name", async ({ senderId, expectedAuthorized, expectedSenderAllowed }) => {
const result = await resolveAuthorization({ senderId });
expect(result.commandAuthorized).toBe(expectedAuthorized);
expect(result.senderAllowedForCommands).toBe(expectedSenderAllowed);
expect(result.effectiveAllowFrom).toEqual(["dm-owner"]);
expect(result.effectiveGroupAllowFrom).toEqual(["group-owner"]);
});

View File

@@ -11,6 +11,39 @@ const baseCfg = {
commands: { useAccessGroups: true },
} as unknown as OpenClawConfig;
function createDirectDmRuntime() {
const recordInboundSession = vi.fn(async () => {});
const dispatchReplyWithBufferedBlockDispatcher = vi.fn(async ({ dispatcherOptions }) => {
await dispatcherOptions.deliver({ text: "reply text" });
});
return {
recordInboundSession,
dispatchReplyWithBufferedBlockDispatcher,
runtime: {
channel: {
routing: {
resolveAgentRoute: vi.fn(({ accountId, peer }) => ({
agentId: "agent-main",
accountId,
sessionKey: `dm:${peer.id}`,
})),
},
session: {
resolveStorePath: vi.fn(() => "/tmp/direct-dm-session-store"),
readSessionUpdatedAt: vi.fn(() => 1234),
recordInboundSession,
},
reply: {
resolveEnvelopeFormatOptions: vi.fn(() => ({ mode: "agent" })),
formatAgentEnvelope: vi.fn(({ body }) => `env:${body}`),
finalizeInboundContext: vi.fn((ctx) => ctx),
dispatchReplyWithBufferedBlockDispatcher,
},
},
} as never,
};
}
describe("plugin-sdk/direct-dm", () => {
it("resolves inbound DM access and command auth through one helper", async () => {
const result = await resolveInboundDirectDmAccessWithRuntime({
@@ -62,17 +95,17 @@ describe("plugin-sdk/direct-dm", () => {
});
await expect(
authorizer({
senderId: "pair-me",
reply: async () => {},
}),
).resolves.toBe("pairing");
await expect(
authorizer({
senderId: "blocked",
reply: async () => {},
}),
).resolves.toBe("block");
Promise.all([
authorizer({
senderId: "pair-me",
reply: async () => {},
}),
authorizer({
senderId: "blocked",
reply: async () => {},
}),
]),
).resolves.toEqual(["pairing", "block"]);
expect(issuePairingChallenge).toHaveBeenCalledTimes(1);
expect(onBlocked).toHaveBeenCalledWith({
@@ -98,38 +131,15 @@ describe("plugin-sdk/direct-dm", () => {
});
it("dispatches direct DMs through the standard route/session/reply pipeline", async () => {
const recordInboundSession = vi.fn(async () => {});
const dispatchReplyWithBufferedBlockDispatcher = vi.fn(async ({ dispatcherOptions }) => {
await dispatcherOptions.deliver({ text: "reply text" });
});
const { recordInboundSession, dispatchReplyWithBufferedBlockDispatcher, runtime } =
createDirectDmRuntime();
const deliver = vi.fn(async () => {});
const result = await dispatchInboundDirectDmWithRuntime({
cfg: {
session: { store: { type: "jsonl" } },
} as never,
runtime: {
channel: {
routing: {
resolveAgentRoute: vi.fn(({ accountId, peer }) => ({
agentId: "agent-main",
accountId,
sessionKey: `dm:${peer.id}`,
})),
},
session: {
resolveStorePath: vi.fn(() => "/tmp/direct-dm-session-store"),
readSessionUpdatedAt: vi.fn(() => 1234),
recordInboundSession,
},
reply: {
resolveEnvelopeFormatOptions: vi.fn(() => ({ mode: "agent" })),
formatAgentEnvelope: vi.fn(({ body }) => `env:${body}`),
finalizeInboundContext: vi.fn((ctx) => ctx),
dispatchReplyWithBufferedBlockDispatcher,
},
},
} as never,
runtime,
channel: "nostr",
channelLabel: "Nostr",
accountId: "default",

View File

@@ -14,60 +14,81 @@ describe("fetchWithBearerAuthScopeFallback", () => {
).rejects.toThrow("URL must use HTTPS");
});
it("returns immediately when the first attempt succeeds", async () => {
const fetchFn = vi.fn(async () => new Response("ok", { status: 200 }));
const tokenProvider = { getAccessToken: vi.fn(async () => "unused") };
const response = await fetchWithBearerAuthScopeFallback({
it.each([
{
name: "returns immediately when the first attempt succeeds",
url: "https://example.com/file",
scopes: ["https://graph.microsoft.com"],
fetchFn: asFetch(fetchFn),
tokenProvider,
});
expect(response.status).toBe(200);
expect(fetchFn).toHaveBeenCalledTimes(1);
expect(tokenProvider.getAccessToken).not.toHaveBeenCalled();
});
it("retries with auth scopes after a 401 response", async () => {
const fetchFn = vi
.fn()
.mockResolvedValueOnce(new Response("unauthorized", { status: 401 }))
.mockResolvedValueOnce(new Response("ok", { status: 200 }));
const tokenProvider = { getAccessToken: vi.fn(async () => "token-1") };
const response = await fetchWithBearerAuthScopeFallback({
responses: [new Response("ok", { status: 200 })],
shouldAttachAuth: undefined,
expectedStatus: 200,
expectedFetchCalls: 1,
expectedTokenCalls: [] as string[],
expectedAuthHeader: null,
},
{
name: "retries with auth scopes after a 401 response",
url: "https://graph.microsoft.com/v1.0/me",
scopes: ["https://graph.microsoft.com", "https://api.botframework.com"],
fetchFn: asFetch(fetchFn),
tokenProvider,
});
expect(response.status).toBe(200);
expect(fetchFn).toHaveBeenCalledTimes(2);
expect(tokenProvider.getAccessToken).toHaveBeenCalledWith("https://graph.microsoft.com");
const secondCall = fetchFn.mock.calls[1] as [string, RequestInit | undefined];
const secondHeaders = new Headers(secondCall[1]?.headers);
expect(secondHeaders.get("authorization")).toBe("Bearer token-1");
});
it("does not attach auth when host predicate rejects url", async () => {
const fetchFn = vi.fn(async () => new Response("unauthorized", { status: 401 }));
const tokenProvider = { getAccessToken: vi.fn(async () => "token-1") };
const response = await fetchWithBearerAuthScopeFallback({
responses: [
new Response("unauthorized", { status: 401 }),
new Response("ok", { status: 200 }),
],
shouldAttachAuth: undefined,
expectedStatus: 200,
expectedFetchCalls: 2,
expectedTokenCalls: ["https://graph.microsoft.com"],
expectedAuthHeader: "Bearer token-1",
},
{
name: "does not attach auth when host predicate rejects url",
url: "https://example.com/file",
scopes: ["https://graph.microsoft.com"],
fetchFn: asFetch(fetchFn),
tokenProvider,
responses: [new Response("unauthorized", { status: 401 })],
shouldAttachAuth: () => false,
});
expectedStatus: 401,
expectedFetchCalls: 1,
expectedTokenCalls: [] as string[],
expectedAuthHeader: null,
},
])(
"$name",
async ({
url,
scopes,
responses,
shouldAttachAuth,
expectedStatus,
expectedFetchCalls,
expectedTokenCalls,
expectedAuthHeader,
}) => {
const fetchFn = vi.fn();
for (const response of responses) {
fetchFn.mockResolvedValueOnce(response);
}
const tokenProvider = { getAccessToken: vi.fn(async () => "token-1") };
expect(response.status).toBe(401);
expect(fetchFn).toHaveBeenCalledTimes(1);
expect(tokenProvider.getAccessToken).not.toHaveBeenCalled();
});
const response = await fetchWithBearerAuthScopeFallback({
url,
scopes,
fetchFn: asFetch(fetchFn),
tokenProvider,
shouldAttachAuth,
});
expect(response.status).toBe(expectedStatus);
expect(fetchFn).toHaveBeenCalledTimes(expectedFetchCalls);
const tokenCalls = tokenProvider.getAccessToken.mock.calls as unknown as Array<[string]>;
expect(tokenCalls.map(([scope]) => scope)).toEqual(expectedTokenCalls);
if (expectedAuthHeader === null) {
return;
}
const secondCallInit = fetchFn.mock.calls.at(1)?.[1] as RequestInit | undefined;
const secondHeaders = new Headers(secondCallInit?.headers);
expect(secondHeaders.get("authorization")).toBe(expectedAuthHeader);
},
);
it("continues across scopes when token retrieval fails", async () => {
const fetchFn = vi

View File

@@ -42,9 +42,13 @@ async function collectRuntimeExports(filePath: string, seen = new Set<string>())
return exportNames;
}
async function readIndexRuntimeExports() {
return await collectRuntimeExports(path.join(import.meta.dirname, "index.ts"));
}
describe("plugin-sdk exports", () => {
it("does not expose runtime modules", async () => {
const runtimeExports = await collectRuntimeExports(path.join(import.meta.dirname, "index.ts"));
const runtimeExports = await readIndexRuntimeExports();
const forbidden = [
"chunkMarkdownText",
"chunkText",
@@ -87,7 +91,7 @@ describe("plugin-sdk exports", () => {
});
it("keeps the root runtime surface intentionally small", async () => {
const runtimeExports = await collectRuntimeExports(path.join(import.meta.dirname, "index.ts"));
const runtimeExports = await readIndexRuntimeExports();
expect([...runtimeExports].toSorted()).toEqual([
"delegateCompactionToRuntime",
"emptyPluginConfigSchema",

View File

@@ -57,23 +57,25 @@ describe("enqueueKeyedTask", () => {
it("keeps queue alive after task failures", async () => {
const tails = new Map<string, Promise<void>>();
await expect(
enqueueKeyedTask({
tails,
key: "a",
task: async () => {
throw new Error("boom");
},
}),
).rejects.toThrow("boom");
const runs = [
() =>
enqueueKeyedTask({
tails,
key: "a",
task: async () => {
throw new Error("boom");
},
}),
() =>
enqueueKeyedTask({
tails,
key: "a",
task: async () => "ok",
}),
];
await expect(
enqueueKeyedTask({
tails,
key: "a",
task: async () => "ok",
}),
).resolves.toBe("ok");
await expect(runs[0]()).rejects.toThrow("boom");
await expect(runs[1]()).resolves.toBe("ok");
});
it("runs enqueue/settle hooks once per task", async () => {

View File

@@ -12,6 +12,15 @@ async function makeTmpRoot(): Promise<string> {
return root;
}
function createDedupe(root: string, overrides?: { ttlMs?: number }) {
return createPersistentDedupe({
ttlMs: overrides?.ttlMs ?? 24 * 60 * 60 * 1000,
memoryMaxSize: 100,
fileMaxEntries: 1000,
resolveFilePath: (namespace) => path.join(root, `${namespace}.json`),
});
}
afterEach(async () => {
await Promise.all(
tmpRoots.splice(0).map((root) => fs.rm(root, { recursive: true, force: true })),
@@ -21,35 +30,18 @@ afterEach(async () => {
describe("createPersistentDedupe", () => {
it("deduplicates keys and persists across instances", async () => {
const root = await makeTmpRoot();
const resolveFilePath = (namespace: string) => path.join(root, `${namespace}.json`);
const first = createPersistentDedupe({
ttlMs: 24 * 60 * 60 * 1000,
memoryMaxSize: 100,
fileMaxEntries: 1000,
resolveFilePath,
});
const first = createDedupe(root);
expect(await first.checkAndRecord("m1", { namespace: "a" })).toBe(true);
expect(await first.checkAndRecord("m1", { namespace: "a" })).toBe(false);
const second = createPersistentDedupe({
ttlMs: 24 * 60 * 60 * 1000,
memoryMaxSize: 100,
fileMaxEntries: 1000,
resolveFilePath,
});
const second = createDedupe(root);
expect(await second.checkAndRecord("m1", { namespace: "a" })).toBe(false);
expect(await second.checkAndRecord("m1", { namespace: "b" })).toBe(true);
});
it("guards concurrent calls for the same key", async () => {
const root = await makeTmpRoot();
const dedupe = createPersistentDedupe({
ttlMs: 10_000,
memoryMaxSize: 100,
fileMaxEntries: 1000,
resolveFilePath: (namespace) => path.join(root, `${namespace}.json`),
});
const dedupe = createDedupe(root, { ttlMs: 10_000 });
const [first, second] = await Promise.all([
dedupe.checkAndRecord("race-key", { namespace: "feishu" }),
@@ -73,23 +65,11 @@ describe("createPersistentDedupe", () => {
it("warmup loads persisted entries into memory", async () => {
const root = await makeTmpRoot();
const resolveFilePath = (namespace: string) => path.join(root, `${namespace}.json`);
const writer = createPersistentDedupe({
ttlMs: 24 * 60 * 60 * 1000,
memoryMaxSize: 100,
fileMaxEntries: 1000,
resolveFilePath,
});
const writer = createDedupe(root);
expect(await writer.checkAndRecord("msg-1", { namespace: "acct" })).toBe(true);
expect(await writer.checkAndRecord("msg-2", { namespace: "acct" })).toBe(true);
const reader = createPersistentDedupe({
ttlMs: 24 * 60 * 60 * 1000,
memoryMaxSize: 100,
fileMaxEntries: 1000,
resolveFilePath,
});
const reader = createDedupe(root);
const loaded = await reader.warmup("acct");
expect(loaded).toBe(2);
expect(await reader.checkAndRecord("msg-1", { namespace: "acct" })).toBe(false);
@@ -97,42 +77,37 @@ describe("createPersistentDedupe", () => {
expect(await reader.checkAndRecord("msg-3", { namespace: "acct" })).toBe(true);
});
it("warmup returns 0 when no disk file exists", async () => {
it.each([
{
name: "returns 0 when no disk file exists",
setup: async (root: string) => createDedupe(root, { ttlMs: 10_000 }),
namespace: "nonexistent",
expectedLoaded: 0,
verify: async () => undefined,
},
{
name: "skips expired entries",
setup: async (root: string) => {
const writer = createDedupe(root, { ttlMs: 1000 });
const oldNow = Date.now() - 2000;
expect(await writer.checkAndRecord("old-msg", { namespace: "acct", now: oldNow })).toBe(
true,
);
expect(await writer.checkAndRecord("new-msg", { namespace: "acct" })).toBe(true);
return createDedupe(root, { ttlMs: 1000 });
},
namespace: "acct",
expectedLoaded: 1,
verify: async (reader: ReturnType<typeof createDedupe>) => {
expect(await reader.checkAndRecord("old-msg", { namespace: "acct" })).toBe(true);
expect(await reader.checkAndRecord("new-msg", { namespace: "acct" })).toBe(false);
},
},
])("warmup $name", async ({ setup, namespace, expectedLoaded, verify }) => {
const root = await makeTmpRoot();
const dedupe = createPersistentDedupe({
ttlMs: 10_000,
memoryMaxSize: 100,
fileMaxEntries: 1000,
resolveFilePath: (ns) => path.join(root, `${ns}.json`),
});
const loaded = await dedupe.warmup("nonexistent");
expect(loaded).toBe(0);
});
it("warmup skips expired entries", async () => {
const root = await makeTmpRoot();
const resolveFilePath = (namespace: string) => path.join(root, `${namespace}.json`);
const ttlMs = 1000;
const writer = createPersistentDedupe({
ttlMs,
memoryMaxSize: 100,
fileMaxEntries: 1000,
resolveFilePath,
});
const oldNow = Date.now() - 2000;
expect(await writer.checkAndRecord("old-msg", { namespace: "acct", now: oldNow })).toBe(true);
expect(await writer.checkAndRecord("new-msg", { namespace: "acct" })).toBe(true);
const reader = createPersistentDedupe({
ttlMs,
memoryMaxSize: 100,
fileMaxEntries: 1000,
resolveFilePath,
});
const loaded = await reader.warmup("acct");
expect(loaded).toBe(1);
expect(await reader.checkAndRecord("old-msg", { namespace: "acct" })).toBe(true);
expect(await reader.checkAndRecord("new-msg", { namespace: "acct" })).toBe(false);
const reader = await setup(root);
const loaded = await reader.warmup(namespace);
expect(loaded).toBe(expectedLoaded);
await verify(reader);
});
});

View File

@@ -35,6 +35,16 @@ function createCatalogContext(
};
}
async function captureProviderEntry(params: {
entry: ReturnType<typeof defineSingleProviderPluginEntry>;
config?: ProviderCatalogContext["config"];
}) {
const captured = capturePluginRegistration(params.entry);
const provider = captured.providers[0];
const catalog = await provider?.catalog?.run(createCatalogContext(params.config));
return { captured, provider, catalog };
}
describe("defineSingleProviderPluginEntry", () => {
it("registers a single provider with default wizard metadata", async () => {
const entry = defineSingleProviderPluginEntry({
@@ -66,9 +76,8 @@ describe("defineSingleProviderPluginEntry", () => {
},
});
const captured = capturePluginRegistration(entry);
const { captured, provider, catalog } = await captureProviderEntry({ entry });
expect(captured.providers).toHaveLength(1);
const provider = captured.providers[0];
expect(provider).toMatchObject({
id: "demo",
label: "Demo",
@@ -90,7 +99,6 @@ describe("defineSingleProviderPluginEntry", () => {
methodId: "api-key",
});
const catalog = await provider?.catalog?.run(createCatalogContext());
expect(catalog).toEqual({
provider: {
api: "openai-completions",
@@ -159,11 +167,22 @@ describe("defineSingleProviderPluginEntry", () => {
},
});
const captured = capturePluginRegistration(entry);
const { captured, provider, catalog } = await captureProviderEntry({
entry,
config: {
models: {
providers: {
gateway: {
baseUrl: "https://override.test/v1",
models: [createModel("router", "Router")],
},
},
},
},
});
expect(captured.providers).toHaveLength(1);
expect(captured.webSearchProviders).toHaveLength(1);
const provider = captured.providers[0];
expect(provider).toMatchObject({
id: "gateway",
label: "Gateway",
@@ -180,18 +199,6 @@ describe("defineSingleProviderPluginEntry", () => {
groupHint: "Primary key",
});
const catalog = await provider?.catalog?.run(
createCatalogContext({
models: {
providers: {
gateway: {
baseUrl: "https://override.test/v1",
models: [createModel("router", "Router")],
},
},
},
}),
);
expect(catalog).toEqual({
provider: {
api: "openai-completions",

View File

@@ -2,16 +2,23 @@ import { describe, expect, it } from "vitest";
import { resolveRequestUrl } from "./request-url.js";
describe("resolveRequestUrl", () => {
it("resolves string input", () => {
expect(resolveRequestUrl("https://example.com/a")).toBe("https://example.com/a");
});
it("resolves URL input", () => {
expect(resolveRequestUrl(new URL("https://example.com/b"))).toBe("https://example.com/b");
});
it("resolves object input with url field", () => {
const requestLike = { url: "https://example.com/c" } as unknown as RequestInfo;
expect(resolveRequestUrl(requestLike)).toBe("https://example.com/c");
it.each([
{
name: "resolves string input",
input: "https://example.com/a",
expected: "https://example.com/a",
},
{
name: "resolves URL input",
input: new URL("https://example.com/b"),
expected: "https://example.com/b",
},
{
name: "resolves object input with url field",
input: { url: "https://example.com/c" } as unknown as RequestInfo,
expected: "https://example.com/c",
},
])("$name", ({ input, expected }) => {
expect(resolveRequestUrl(input)).toBe(expected);
});
});

View File

@@ -108,6 +108,14 @@ function loadRootAliasWithStubs(options?: {
};
}
function createPackageRoot() {
return path.dirname(path.dirname(rootAliasPath));
}
function createDistAliasPath() {
return path.join(createPackageRoot(), "dist", "plugin-sdk", "root-alias.cjs");
}
describe("plugin-sdk root alias", () => {
it("exposes the fast empty config schema helper", () => {
const factory = rootSdk.emptyPluginConfigSchema as (() => EmptySchema) | undefined;
@@ -149,7 +157,7 @@ describe("plugin-sdk root alias", () => {
it("loads legacy root exports on demand and preserves reflection", () => {
const lazyModule = loadRootAliasWithStubs({
monolithicExports: {
slowHelper: () => "loaded",
slowHelper: (): string => "loaded",
},
});
const lazyRootSdk = lazyModule.moduleExports;
@@ -164,40 +172,44 @@ describe("plugin-sdk root alias", () => {
expect(Object.getOwnPropertyDescriptor(lazyRootSdk, "slowHelper")).toBeDefined();
});
it("prefers native loading when compat resolves to dist", () => {
const lazyModule = loadRootAliasWithStubs({
distExists: true,
monolithicExports: {
slowHelper: () => "loaded",
it.each([
{
name: "prefers native loading when compat resolves to dist",
options: {
distExists: true,
monolithicExports: {
slowHelper: (): string => "loaded",
},
},
});
expectedTryNative: true,
},
{
name: "prefers source loading under vitest even when compat resolves to dist",
options: {
distExists: true,
env: { VITEST: "1" },
monolithicExports: {
slowHelper: (): string => "loaded",
},
},
expectedTryNative: false,
},
])("$name", ({ options, expectedTryNative }) => {
const lazyModule = loadRootAliasWithStubs(options);
expect((lazyModule.moduleExports.slowHelper as () => string)()).toBe("loaded");
expect(lazyModule.createJitiOptions.at(-1)?.tryNative).toBe(true);
});
it("prefers source loading under vitest even when compat resolves to dist", () => {
const lazyModule = loadRootAliasWithStubs({
distExists: true,
env: { VITEST: "1" },
monolithicExports: {
slowHelper: () => "loaded",
},
});
expect((lazyModule.moduleExports.slowHelper as () => string)()).toBe("loaded");
expect(lazyModule.createJitiOptions.at(-1)?.tryNative).toBe(false);
expect(lazyModule.createJitiOptions.at(-1)?.tryNative).toBe(expectedTryNative);
});
it("falls back to src files even when the alias itself is loaded from dist", () => {
const packageRoot = path.dirname(path.dirname(rootAliasPath));
const distAliasPath = path.join(packageRoot, "dist", "plugin-sdk", "root-alias.cjs");
const packageRoot = createPackageRoot();
const distAliasPath = createDistAliasPath();
const lazyModule = loadRootAliasWithStubs({
aliasPath: distAliasPath,
distExists: false,
monolithicExports: {
onDiagnosticEvent: () => () => undefined,
slowHelper: () => "loaded",
onDiagnosticEvent: (): (() => void) => () => undefined,
slowHelper: (): string => "loaded",
},
});
@@ -216,15 +228,15 @@ describe("plugin-sdk root alias", () => {
});
it("prefers hashed dist diagnostic events chunks before falling back to src", () => {
const packageRoot = path.dirname(path.dirname(rootAliasPath));
const distAliasPath = path.join(packageRoot, "dist", "plugin-sdk", "root-alias.cjs");
const packageRoot = createPackageRoot();
const distAliasPath = createDistAliasPath();
const lazyModule = loadRootAliasWithStubs({
aliasPath: distAliasPath,
distExists: false,
distEntries: ["diagnostic-events-W3Hz61fI.js"],
monolithicExports: {
r: () => () => undefined,
slowHelper: () => "loaded",
r: (): (() => void) => () => undefined,
slowHelper: (): string => "loaded",
},
});
@@ -241,36 +253,42 @@ describe("plugin-sdk root alias", () => {
);
});
it("forwards delegateCompactionToRuntime through the compat-backed root alias", () => {
const delegateCompactionToRuntime = () => "delegated";
it.each([
{
name: "forwards delegateCompactionToRuntime through the compat-backed root alias",
exportName: "delegateCompactionToRuntime",
exportValue: () => "delegated",
expectIdentity: true,
assertForwarded: (value: unknown) => {
expect(typeof value).toBe("function");
expect((value as () => string)()).toBe("delegated");
},
},
{
name: "forwards onDiagnosticEvent through the compat-backed root alias",
exportName: "onDiagnosticEvent",
exportValue: () => () => undefined,
expectIdentity: false,
assertForwarded: (value: unknown) => {
expect(typeof value).toBe("function");
expect(typeof (value as (listener: () => void) => () => void)(() => undefined)).toBe(
"function",
);
},
},
])("$name", ({ exportName, exportValue, expectIdentity, assertForwarded }) => {
const lazyModule = loadRootAliasWithStubs({
monolithicExports: {
delegateCompactionToRuntime,
[exportName]: exportValue,
},
});
const lazyRootSdk = lazyModule.moduleExports;
const forwarded = lazyModule.moduleExports[exportName];
expect(typeof lazyRootSdk.delegateCompactionToRuntime).toBe("function");
expect(lazyRootSdk.delegateCompactionToRuntime).toBe(delegateCompactionToRuntime);
expect("delegateCompactionToRuntime" in lazyRootSdk).toBe(true);
});
it("forwards onDiagnosticEvent through the compat-backed root alias", () => {
const onDiagnosticEvent = () => () => undefined;
const lazyModule = loadRootAliasWithStubs({
monolithicExports: {
onDiagnosticEvent,
},
});
const lazyRootSdk = lazyModule.moduleExports;
expect(typeof lazyRootSdk.onDiagnosticEvent).toBe("function");
expect(
typeof (lazyRootSdk.onDiagnosticEvent as (listener: () => void) => () => void)(
() => undefined,
),
).toBe("function");
expect("onDiagnosticEvent" in lazyRootSdk).toBe(true);
assertForwarded(forwarded);
if (expectIdentity) {
expect(forwarded).toBe(exportValue);
}
expect(exportName in lazyModule.moduleExports).toBe(true);
});
it("loads legacy root exports through the merged root wrapper", { timeout: 240_000 }, () => {

View File

@@ -6,19 +6,27 @@ import {
} from "./secret-input.js";
describe("plugin-sdk secret input helpers", () => {
it("accepts undefined for optional secret input", () => {
expect(buildOptionalSecretInputSchema().safeParse(undefined).success).toBe(true);
});
it("accepts arrays of secret inputs", () => {
const result = buildSecretInputArraySchema().safeParse([
"sk-plain",
{ source: "env", provider: "default", id: "OPENAI_API_KEY" },
]);
expect(result.success).toBe(true);
});
it("normalizes plaintext secret strings", () => {
expect(normalizeSecretInputString(" sk-test ")).toBe("sk-test");
it.each([
{
name: "accepts undefined for optional secret input",
run: () => buildOptionalSecretInputSchema().safeParse(undefined).success,
expected: true,
},
{
name: "accepts arrays of secret inputs",
run: () =>
buildSecretInputArraySchema().safeParse([
"sk-plain",
{ source: "env", provider: "default", id: "OPENAI_API_KEY" },
]).success,
expected: true,
},
{
name: "normalizes plaintext secret strings",
run: () => normalizeSecretInputString(" sk-test "),
expected: "sk-test",
},
])("$name", ({ run, expected }) => {
expect(run()).toEqual(expected);
});
});

View File

@@ -4,31 +4,49 @@ import { describe, expect, it } from "vitest";
import { resolvePreferredOpenClawTmpDir } from "../infra/tmp-openclaw-dir.js";
import { buildRandomTempFilePath, withTempDownloadPath } from "./temp-path.js";
describe("buildRandomTempFilePath", () => {
it("builds deterministic paths when now/uuid are provided", () => {
const result = buildRandomTempFilePath({
prefix: "line-media",
extension: ".jpg",
tmpDir: "/tmp",
now: 123,
uuid: "abc",
});
expect(result).toBe(path.join("/tmp", "line-media-123-abc.jpg"));
});
function expectPathInsideTmpRoot(resultPath: string) {
const tmpRoot = path.resolve(resolvePreferredOpenClawTmpDir());
const resolved = path.resolve(resultPath);
const rel = path.relative(tmpRoot, resolved);
expect(rel === ".." || rel.startsWith(`..${path.sep}`)).toBe(false);
expect(resultPath).not.toContain("..");
}
it("sanitizes prefix and extension to avoid path traversal segments", () => {
const tmpRoot = path.resolve(resolvePreferredOpenClawTmpDir());
const result = buildRandomTempFilePath({
prefix: "../../channels/../media",
extension: "/../.jpg",
now: 123,
uuid: "abc",
});
const resolved = path.resolve(result);
const rel = path.relative(tmpRoot, resolved);
expect(rel === ".." || rel.startsWith(`..${path.sep}`)).toBe(false);
expect(path.basename(result)).toBe("channels-media-123-abc.jpg");
expect(result).not.toContain("..");
describe("buildRandomTempFilePath", () => {
it.each([
{
name: "builds deterministic paths when now/uuid are provided",
input: {
prefix: "line-media",
extension: ".jpg",
tmpDir: "/tmp",
now: 123,
uuid: "abc",
},
expectedPath: path.join("/tmp", "line-media-123-abc.jpg"),
expectedBasename: "line-media-123-abc.jpg",
verifyInsideTmpRoot: false,
},
{
name: "sanitizes prefix and extension to avoid path traversal segments",
input: {
prefix: "../../channels/../media",
extension: "/../.jpg",
now: 123,
uuid: "abc",
},
expectedBasename: "channels-media-123-abc.jpg",
verifyInsideTmpRoot: true,
},
])("$name", ({ input, expectedPath, expectedBasename, verifyInsideTmpRoot }) => {
const result = buildRandomTempFilePath(input);
if (expectedPath) {
expect(result).toBe(expectedPath);
}
expect(path.basename(result)).toBe(expectedBasename);
if (verifyInsideTmpRoot) {
expectPathInsideTmpRoot(result);
}
});
});
@@ -50,7 +68,6 @@ describe("withTempDownloadPath", () => {
});
it("sanitizes prefix and fileName", async () => {
const tmpRoot = path.resolve(resolvePreferredOpenClawTmpDir());
let capturedPath = "";
await withTempDownloadPath(
{
@@ -62,10 +79,7 @@ describe("withTempDownloadPath", () => {
},
);
const resolved = path.resolve(capturedPath);
const rel = path.relative(tmpRoot, resolved);
expect(rel === ".." || rel.startsWith(`..${path.sep}`)).toBe(false);
expectPathInsideTmpRoot(capturedPath);
expect(path.basename(capturedPath)).toBe("evil.bin");
expect(capturedPath).not.toContain("..");
});
});

View File

@@ -2,15 +2,26 @@ import { describe, expect, it } from "vitest";
import { chunkTextForOutbound } from "./text-chunking.js";
describe("chunkTextForOutbound", () => {
it("returns empty for empty input", () => {
expect(chunkTextForOutbound("", 10)).toEqual([]);
});
it("splits on newline or whitespace boundaries", () => {
expect(chunkTextForOutbound("alpha\nbeta gamma", 8)).toEqual(["alpha", "beta", "gamma"]);
});
it("falls back to hard limit when no separator exists", () => {
expect(chunkTextForOutbound("abcdefghij", 4)).toEqual(["abcd", "efgh", "ij"]);
it.each([
{
name: "returns empty for empty input",
text: "",
maxLen: 10,
expected: [],
},
{
name: "splits on newline or whitespace boundaries",
text: "alpha\nbeta gamma",
maxLen: 8,
expected: ["alpha", "beta", "gamma"],
},
{
name: "falls back to hard limit when no separator exists",
text: "abcdefghij",
maxLen: 4,
expected: ["abcd", "efgh", "ij"],
},
])("$name", ({ text, maxLen, expected }) => {
expect(chunkTextForOutbound(text, maxLen)).toEqual(expected);
});
});