refactor: simplify e2e fixture helpers

This commit is contained in:
Peter Steinberger
2026-04-29 10:08:23 +01:00
parent c33968e10c
commit 422d139ba0
11 changed files with 509 additions and 388 deletions

View File

@@ -1,6 +1,6 @@
#!/usr/bin/env node
import { execFileSync, spawnSync } from "node:child_process";
import { execFileSync, spawn as nodeSpawn } from "node:child_process";
import path from "node:path";
import { pathToFileURL } from "node:url";
import {
@@ -33,6 +33,15 @@ export function buildBlacksmithRunArgs({ commandArgs, testboxId }) {
return ["testbox", "run", "--id", testboxId, command];
}
export function resolveTestboxSyncTimeoutMs(env = process.env) {
const raw = env.OPENCLAW_TESTBOX_SYNC_TIMEOUT_MS;
if (raw === undefined || raw === "") {
return 5 * 60 * 1000;
}
const parsed = Number(raw);
return Number.isFinite(parsed) && parsed >= 0 ? parsed : 5 * 60 * 1000;
}
function hasClaimFlag(runnerArgs) {
return runnerArgs.includes("--claim") || runnerArgs.includes("--claim-fresh");
}
@@ -41,11 +50,77 @@ function stripRunnerOnlyFlags(runnerArgs) {
return runnerArgs.filter((arg) => arg !== "--claim" && arg !== "--claim-fresh");
}
export function runBlacksmithTestboxRunner({
function pipeChunk(stream, chunk) {
if (chunk) {
stream.write(chunk);
}
}
function runBlacksmithWithSyncGuard({ args, cwd, env, spawn, stderr, stdout, syncTimeoutMs }) {
return new Promise((resolve) => {
const child = spawn("blacksmith", args, {
cwd,
env,
stdio: ["inherit", "pipe", "pipe"],
});
let settled = false;
let syncingSince = 0;
let timedOut = false;
let timer;
const finish = (code) => {
if (settled) {
return;
}
settled = true;
clearInterval(timer);
resolve(timedOut ? 124 : typeof code === "number" ? code : 1);
};
const handleOutput = (stream, chunk) => {
const text = String(chunk);
pipeChunk(stream, chunk);
if (text.includes("Syncing...")) {
syncingSince ||= Date.now();
} else if (syncingSince && /\b(running|executing|command|pnpm|npm|yarn|bun)\b/iu.test(text)) {
syncingSince = 0;
}
};
child.stdout?.on("data", (chunk) => handleOutput(stdout, chunk));
child.stderr?.on("data", (chunk) => handleOutput(stderr, chunk));
child.on("error", (error) => {
stderr.write(`Failed to start blacksmith: ${error.message}\n`);
finish(1);
});
child.on("close", (code) => finish(code));
timer = setInterval(
() => {
if (!syncingSince || syncTimeoutMs <= 0) {
return;
}
if (Date.now() - syncingSince < syncTimeoutMs) {
return;
}
stderr.write(
`Blacksmith Testbox sync produced no post-sync output for ${syncTimeoutMs}ms; terminating local runner. ` +
"Rerun with OPENCLAW_TESTBOX_SYNC_TIMEOUT_MS=0 to disable this guard.\n",
);
timedOut = true;
syncingSince = 0;
child.kill?.("SIGTERM");
},
Math.min(Math.max(syncTimeoutMs, 1), 1000),
);
});
}
export async function runBlacksmithTestboxRunner({
argv = process.argv.slice(2),
cwd = process.cwd(),
env = process.env,
spawn = spawnSync,
spawn = nodeSpawn,
stderr = process.stderr,
stdout = process.stdout,
} = {}) {
@@ -103,20 +178,17 @@ export function runBlacksmithTestboxRunner({
return 0;
}
const result = spawn("blacksmith", blacksmithArgs, {
return await runBlacksmithWithSyncGuard({
args: blacksmithArgs,
cwd,
env,
stdio: "inherit",
spawn,
stderr,
stdout,
syncTimeoutMs: resolveTestboxSyncTimeoutMs(env),
});
if (typeof result.status === "number") {
return result.status;
}
if (result.error) {
stderr.write(`Failed to start blacksmith: ${result.error.message}\n`);
}
return 1;
}
if (import.meta.url === pathToFileURL(process.argv[1] ?? "").href) {
process.exitCode = runBlacksmithTestboxRunner();
process.exitCode = await runBlacksmithTestboxRunner();
}

View File

@@ -67,59 +67,7 @@ set -e
cat "$RUN_LOG"
node - "$STATS_LOG" "$MAX_MEMORY_MIB" "$MAX_CPU_PERCENT" <<'NODE'
const fs = require("node:fs");
const [statsFile, maxMemoryRaw, maxCpuRaw] = process.argv.slice(2);
const maxMemoryMiB = Number(maxMemoryRaw);
const maxCpuPercent = Number(maxCpuRaw);
const parseMemoryMiB = (raw) => {
const value = String(raw || "").split("/")[0]?.trim() || "";
const match = /^([0-9.]+)\s*([KMGT]?i?B)$/iu.exec(value);
if (!match) return 0;
const amount = Number(match[1]);
const unit = match[2].toLowerCase();
if (unit === "kb" || unit === "kib") return amount / 1024;
if (unit === "mb" || unit === "mib") return amount;
if (unit === "gb" || unit === "gib") return amount * 1024;
if (unit === "tb" || unit === "tib") return amount * 1024 * 1024;
return 0;
};
const lines = fs.existsSync(statsFile)
? fs.readFileSync(statsFile, "utf8").split(/\r?\n/u).filter(Boolean)
: [];
let maxObservedMemoryMiB = 0;
let maxObservedCpuPercent = 0;
for (const line of lines) {
let parsed;
try {
parsed = JSON.parse(line);
} catch {
continue;
}
maxObservedMemoryMiB = Math.max(maxObservedMemoryMiB, parseMemoryMiB(parsed.MemUsage));
maxObservedCpuPercent = Math.max(
maxObservedCpuPercent,
Number(String(parsed.CPUPerc || "0").replace(/%$/u, "")) || 0,
);
}
console.log(
`kitchen-sink resource peak: memory=${maxObservedMemoryMiB.toFixed(1)}MiB cpu=${maxObservedCpuPercent.toFixed(1)}% samples=${lines.length}`,
);
if (lines.length === 0) {
throw new Error("no docker stats samples captured for kitchen-sink plugin lane");
}
if (maxObservedMemoryMiB > maxMemoryMiB) {
throw new Error(
`kitchen-sink memory peak ${maxObservedMemoryMiB.toFixed(1)}MiB exceeded ${maxMemoryMiB}MiB`,
);
}
if (maxObservedCpuPercent > maxCpuPercent) {
throw new Error(
`kitchen-sink CPU peak ${maxObservedCpuPercent.toFixed(1)}% exceeded ${maxCpuPercent}%`,
);
}
NODE
node scripts/e2e/lib/docker-stats/assert-resource-ceiling.mjs "$STATS_LOG" "$MAX_MEMORY_MIB" "$MAX_CPU_PERCENT" kitchen-sink
rm -f "$RUN_LOG" "$STATS_LOG"
exit "$run_status"

View File

@@ -0,0 +1,68 @@
import fs from "node:fs";
const [statsFile, maxMemoryRaw, maxCpuRaw, label = "docker"] = process.argv.slice(2);
const maxMemoryMiB = Number(maxMemoryRaw);
const maxCpuPercent = Number(maxCpuRaw);
function parseMemoryMiB(raw) {
const value =
String(raw || "")
.split("/")[0]
?.trim() || "";
const match = /^([0-9.]+)\s*([KMGT]?i?B)$/iu.exec(value);
if (!match) {
return 0;
}
const amount = Number(match[1]);
const unit = match[2].toLowerCase();
if (unit === "kb" || unit === "kib") {
return amount / 1024;
}
if (unit === "mb" || unit === "mib") {
return amount;
}
if (unit === "gb" || unit === "gib") {
return amount * 1024;
}
if (unit === "tb" || unit === "tib") {
return amount * 1024 * 1024;
}
return 0;
}
const lines = fs.existsSync(statsFile)
? fs.readFileSync(statsFile, "utf8").split(/\r?\n/u).filter(Boolean)
: [];
let maxObservedMemoryMiB = 0;
let maxObservedCpuPercent = 0;
for (const line of lines) {
let parsed;
try {
parsed = JSON.parse(line);
} catch {
continue;
}
maxObservedMemoryMiB = Math.max(maxObservedMemoryMiB, parseMemoryMiB(parsed.MemUsage));
maxObservedCpuPercent = Math.max(
maxObservedCpuPercent,
Number(String(parsed.CPUPerc || "0").replace(/%$/u, "")) || 0,
);
}
console.log(
`${label} resource peak: memory=${maxObservedMemoryMiB.toFixed(1)}MiB cpu=${maxObservedCpuPercent.toFixed(1)}% samples=${lines.length}`,
);
if (lines.length === 0) {
throw new Error(`no docker stats samples captured for ${label}`);
}
if (maxObservedMemoryMiB > maxMemoryMiB) {
throw new Error(
`${label} memory peak ${maxObservedMemoryMiB.toFixed(1)}MiB exceeded ${maxMemoryMiB}MiB`,
);
}
if (maxObservedCpuPercent > maxCpuPercent) {
throw new Error(
`${label} CPU peak ${maxObservedCpuPercent.toFixed(1)}% exceeded ${maxCpuPercent}%`,
);
}

View File

@@ -0,0 +1,122 @@
import fs from "node:fs";
import path from "node:path";
const command = process.argv[2];
const readJson = (file) => JSON.parse(fs.readFileSync(file, "utf8"));
function assertOnboardState() {
const home = process.argv[3];
const stateDir = path.join(home, ".openclaw");
const configPath = path.join(stateDir, "openclaw.json");
const agentDir = path.join(stateDir, "agents", "main", "agent");
const authPath = path.join(agentDir, "auth-profiles.json");
if (!fs.existsSync(configPath)) {
throw new Error("onboard did not write openclaw.json");
}
if (!fs.existsSync(agentDir)) {
throw new Error("onboard did not create main agent dir");
}
if (!fs.existsSync(authPath)) {
throw new Error("onboard did not create auth-profiles.json");
}
const authRaw = fs.readFileSync(authPath, "utf8");
if (!authRaw.includes("OPENAI_API_KEY")) {
throw new Error("auth profile did not persist OPENAI_API_KEY env ref");
}
if (authRaw.includes("sk-openclaw-npm-onboard-e2e")) {
throw new Error("auth profile persisted the raw OpenAI test key");
}
}
function configureMockModel() {
const mockPort = Number(process.argv[3]);
const configPath = path.join(process.env.HOME, ".openclaw", "openclaw.json");
const cfg = readJson(configPath);
const modelRef = "openai/gpt-5.5";
const cost = { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 };
cfg.models = {
...cfg.models,
mode: "merge",
providers: {
...cfg.models?.providers,
openai: {
...cfg.models?.providers?.openai,
baseUrl: `http://127.0.0.1:${mockPort}/v1`,
apiKey: { source: "env", provider: "default", id: "OPENAI_API_KEY" },
api: "openai-responses",
request: { ...cfg.models?.providers?.openai?.request, allowPrivateNetwork: true },
models: [
{
id: "gpt-5.5",
name: "gpt-5.5",
api: "openai-responses",
reasoning: false,
input: ["text", "image"],
cost,
contextWindow: 128000,
contextTokens: 96000,
maxTokens: 4096,
},
],
},
},
};
cfg.agents = {
...cfg.agents,
defaults: {
...cfg.agents?.defaults,
model: { primary: modelRef },
models: {
...cfg.agents?.defaults?.models,
[modelRef]: { params: { transport: "sse", openaiWsWarmup: false } },
},
},
};
cfg.plugins = {
...cfg.plugins,
enabled: true,
};
fs.writeFileSync(configPath, `${JSON.stringify(cfg, null, 2)}\n`);
}
function assertChannelConfig() {
const channel = process.argv[3];
const token = process.argv[4];
const configPath = path.join(process.env.HOME, ".openclaw", "openclaw.json");
const cfg = readJson(configPath);
const entry = cfg.channels?.[channel];
if (!entry || entry.enabled === false) {
throw new Error(`${channel} was not enabled`);
}
if (!JSON.stringify(entry).includes(token)) {
throw new Error(`${channel} token was not persisted`);
}
}
function assertAgentTurn() {
const marker = process.argv[3];
const logPath = process.argv[4];
const output = fs.readFileSync("/tmp/openclaw-agent.combined", "utf8");
if (!output.includes(marker)) {
throw new Error(`agent JSON did not contain success marker. Output: ${output}`);
}
const requestLog = fs.existsSync(logPath) ? fs.readFileSync(logPath, "utf8") : "";
if (!/\/v1\/(responses|chat\/completions)/u.test(requestLog)) {
throw new Error(`mock OpenAI server was not used. Requests: ${requestLog}`);
}
}
const commands = {
"assert-onboard-state": assertOnboardState,
"configure-mock-model": configureMockModel,
"assert-channel-config": assertChannelConfig,
"assert-agent-turn": assertAgentTurn,
};
const fn = commands[command];
if (!fn) {
throw new Error(`unknown npm onboard/channel/agent assertion command: ${command}`);
}
fn();

View File

@@ -0,0 +1,87 @@
import fs from "node:fs";
const command = process.argv[2];
function assertPatchBehavior() {
return import("../../../../dist/extensions/openai/native-web-search.js").then(
({ patchOpenAINativeWebSearchPayload }) => {
const injectedPayload = {
reasoning: { effort: "minimal", summary: "auto" },
};
const injectedResult = patchOpenAINativeWebSearchPayload(injectedPayload);
if (injectedResult !== "injected") {
throw new Error(`expected native web_search injection, got ${injectedResult}`);
}
if (injectedPayload.reasoning.effort !== "low") {
throw new Error(
`expected injected native web_search to raise minimal reasoning to low, got ${JSON.stringify(injectedPayload.reasoning)}`,
);
}
if (!injectedPayload.tools?.some((tool) => tool?.type === "web_search")) {
throw new Error(`native web_search was not injected: ${JSON.stringify(injectedPayload)}`);
}
const existingNativePayload = {
tools: [{ type: "web_search" }],
reasoning: { effort: "minimal" },
};
const existingResult = patchOpenAINativeWebSearchPayload(existingNativePayload);
if (existingResult !== "native_tool_already_present") {
throw new Error(`expected existing native web_search, got ${existingResult}`);
}
if (existingNativePayload.reasoning.effort !== "low") {
throw new Error(
`expected existing native web_search to raise minimal reasoning to low, got ${JSON.stringify(existingNativePayload.reasoning)}`,
);
}
},
);
}
function assertSuccessRequest() {
const logPath = process.argv[3];
const entries = fs
.readFileSync(logPath, "utf8")
.trim()
.split(/\n+/u)
.filter(Boolean)
.map((line) => JSON.parse(line));
const responseEntries = entries.filter((entry) => entry.path === "/v1/responses");
if (responseEntries.length < 1) {
throw new Error(`mock OpenAI /v1/responses was not used. Requests: ${JSON.stringify(entries)}`);
}
const success = responseEntries.find((entry) =>
JSON.stringify(entry.body).includes("OPENCLAW_SCHEMA_E2E_OK"),
);
if (!success) {
throw new Error(`missing success request. Requests: ${JSON.stringify(responseEntries)}`);
}
const tools = Array.isArray(success.body.tools) ? success.body.tools : [];
const hasWebSearch = tools.some(
(tool) =>
tool?.type === "web_search" ||
(tool?.type === "function" &&
(tool?.name === "web_search" || tool?.function?.name === "web_search")),
);
if (!hasWebSearch) {
throw new Error(
`success request did not include web_search. Body: ${JSON.stringify(success.body)}`,
);
}
if (success.body.reasoning?.effort === "minimal") {
throw new Error(
`expected web_search request to avoid minimal reasoning, got ${JSON.stringify(success.body.reasoning)}`,
);
}
}
const commands = {
"assert-patch-behavior": assertPatchBehavior,
"assert-success-request": assertSuccessRequest,
};
const fn = commands[command];
if (!fn) {
throw new Error(`unknown OpenAI web-search minimal assertion command: ${command}`);
}
await fn();

View File

@@ -54,39 +54,7 @@ trap 'status=$?; dump_debug_logs "$status"; exit "$status"' ERR
entry="$(openclaw_e2e_resolve_entrypoint)"
mkdir -p "$OPENCLAW_STATE_DIR"
node --input-type=module <<'NODE'
import { patchOpenAINativeWebSearchPayload } from "./dist/extensions/openai/native-web-search.js";
const injectedPayload = {
reasoning: { effort: "minimal", summary: "auto" },
};
const injectedResult = patchOpenAINativeWebSearchPayload(injectedPayload);
if (injectedResult !== "injected") {
throw new Error(`expected native web_search injection, got ${injectedResult}`);
}
if (injectedPayload.reasoning.effort !== "low") {
throw new Error(
`expected injected native web_search to raise minimal reasoning to low, got ${JSON.stringify(injectedPayload.reasoning)}`,
);
}
if (!injectedPayload.tools?.some((tool) => tool?.type === "web_search")) {
throw new Error(`native web_search was not injected: ${JSON.stringify(injectedPayload)}`);
}
const existingNativePayload = {
tools: [{ type: "web_search" }],
reasoning: { effort: "minimal" },
};
const existingResult = patchOpenAINativeWebSearchPayload(existingNativePayload);
if (existingResult !== "native_tool_already_present") {
throw new Error(`expected existing native web_search, got ${existingResult}`);
}
if (existingNativePayload.reasoning.effort !== "low") {
throw new Error(
`expected existing native web_search to raise minimal reasoning to low, got ${JSON.stringify(existingNativePayload.reasoning)}`,
);
}
NODE
node scripts/e2e/lib/openai-web-search-minimal/assertions.mjs assert-patch-behavior
cat >"$OPENCLAW_STATE_DIR/openclaw.json" <<JSON
{
@@ -189,27 +157,7 @@ node "$entry" gateway health \
PORT="$PORT" OPENCLAW_GATEWAY_TOKEN="$TOKEN" node scripts/e2e/lib/openai-web-search-minimal/client.mjs success >/tmp/openclaw-openai-web-search-minimal-client-success.log 2>&1
node - "$MOCK_REQUEST_LOG" <<'NODE'
const fs = require("node:fs");
const logPath = process.argv[2];
const entries = fs.readFileSync(logPath, "utf8").trim().split(/\n+/).filter(Boolean).map((line) => JSON.parse(line));
const responseEntries = entries.filter((entry) => entry.path === "/v1/responses");
if (responseEntries.length < 1) {
throw new Error(`mock OpenAI /v1/responses was not used. Requests: ${JSON.stringify(entries)}`);
}
const success = responseEntries.find((entry) => JSON.stringify(entry.body).includes("OPENCLAW_SCHEMA_E2E_OK"));
if (!success) {
throw new Error(`missing success request. Requests: ${JSON.stringify(responseEntries)}`);
}
const tools = Array.isArray(success.body.tools) ? success.body.tools : [];
const hasWebSearch = tools.some((tool) => tool?.type === "web_search" || (tool?.type === "function" && (tool?.name === "web_search" || tool?.function?.name === "web_search")));
if (!hasWebSearch) {
throw new Error(`success request did not include web_search. Body: ${JSON.stringify(success.body)}`);
}
if (success.body.reasoning?.effort === "minimal") {
throw new Error(`expected web_search request to avoid minimal reasoning, got ${JSON.stringify(success.body.reasoning)}`);
}
NODE
node scripts/e2e/lib/openai-web-search-minimal/assertions.mjs assert-success-request "$MOCK_REQUEST_LOG"
PORT="$PORT" OPENCLAW_GATEWAY_TOKEN="$TOKEN" node scripts/e2e/lib/openai-web-search-minimal/client.mjs reject >/tmp/openclaw-openai-web-search-minimal-client-reject.log 2>&1

View File

@@ -5,6 +5,26 @@ record_fixture_plugin_trust() {
node scripts/e2e/lib/plugins/assertions.mjs record-fixture-plugin-trust "$plugin_id" "$plugin_root" "$enabled"
}
write_demo_fixture_plugin() {
local dir="$1"
mkdir -p "$dir"
cat >"$dir/index.js" <<'JS'
module.exports = {
id: "demo-plugin",
name: "Demo Plugin",
description: "Docker E2E demo plugin",
register(api) {
api.registerTool(() => null, { name: "demo_tool" });
api.registerGatewayMethod("demo.ping", async () => ({ ok: true }));
api.registerCli(() => {}, { commands: ["demo"] });
api.registerService({ id: "demo-service", start: () => {} });
},
};
JS
write_fixture_manifest "$dir/openclaw.plugin.json" demo-plugin
}
write_fixture_plugin() {
local dir="$1"
local id="$2"
@@ -29,7 +49,14 @@ module.exports = {
},
};
JS
cat >"$dir/openclaw.plugin.json" <<'JSON'
write_fixture_manifest "$dir/openclaw.plugin.json" "$id"
}
write_fixture_manifest() {
local file="$1"
local id="$2"
cat >"$file" <<'JSON'
{
"id": "placeholder",
"configSchema": {
@@ -38,5 +65,38 @@ JS
}
}
JSON
node scripts/e2e/lib/plugins/assertions.mjs set-manifest-id "$dir/openclaw.plugin.json" "$id"
node scripts/e2e/lib/plugins/assertions.mjs set-manifest-id "$file" "$id"
}
pack_fixture_plugin() {
local pack_dir="$1"
local output_tgz="$2"
local id="$3"
local version="$4"
local method="$5"
local name="$6"
mkdir -p "$pack_dir/package"
write_fixture_plugin "$pack_dir/package" "$id" "$version" "$method" "$name"
tar -czf "$output_tgz" -C "$pack_dir" package
}
write_claude_bundle_fixture() {
local bundle_root="$1"
mkdir -p "$bundle_root/.claude-plugin" "$bundle_root/commands"
cat >"$bundle_root/.claude-plugin/plugin.json" <<'JSON'
{
"name": "claude-bundle-e2e"
}
JSON
cat >"$bundle_root/commands/office-hours.md" <<'MD'
---
description: Help with architecture and rollout planning
---
Act as an engineering advisor.
Focus on:
$ARGUMENTS
MD
}

View File

@@ -18,30 +18,7 @@ source scripts/e2e/lib/plugins/marketplace.sh
source scripts/e2e/lib/plugins/clawhub.sh
demo_plugin_id="demo-plugin"
demo_plugin_root="$OPENCLAW_PLUGIN_HOME/$demo_plugin_id"
mkdir -p "$demo_plugin_root"
cat >"$demo_plugin_root/index.js" <<'JS'
module.exports = {
id: "demo-plugin",
name: "Demo Plugin",
description: "Docker E2E demo plugin",
register(api) {
api.registerTool(() => null, { name: "demo_tool" });
api.registerGatewayMethod("demo.ping", async () => ({ ok: true }));
api.registerCli(() => {}, { commands: ["demo"] });
api.registerService({ id: "demo-service", start: () => {} });
},
};
JS
cat >"$demo_plugin_root/openclaw.plugin.json" <<'JSON'
{
"id": "demo-plugin",
"configSchema": {
"type": "object",
"properties": {}
}
}
JSON
write_demo_fixture_plugin "$demo_plugin_root"
record_fixture_plugin_trust "$demo_plugin_id" "$demo_plugin_root" 1
node "$OPENCLAW_ENTRY" plugins list --json >/tmp/plugins.json
@@ -51,33 +28,7 @@ node scripts/e2e/lib/plugins/assertions.mjs demo-plugin
echo "Testing tgz install flow..."
pack_dir="$(mktemp -d "/tmp/openclaw-plugin-pack.XXXXXX")"
mkdir -p "$pack_dir/package"
cat >"$pack_dir/package/package.json" <<'JSON'
{
"name": "@openclaw/demo-plugin-tgz",
"version": "0.0.1",
"openclaw": { "extensions": ["./index.js"] }
}
JSON
cat >"$pack_dir/package/index.js" <<'JS'
module.exports = {
id: "demo-plugin-tgz",
name: "Demo Plugin TGZ",
register(api) {
api.registerGatewayMethod("demo.tgz", async () => ({ ok: true }));
},
};
JS
cat >"$pack_dir/package/openclaw.plugin.json" <<'JSON'
{
"id": "demo-plugin-tgz",
"configSchema": {
"type": "object",
"properties": {}
}
}
JSON
tar -czf /tmp/demo-plugin-tgz.tgz -C "$pack_dir" package
pack_fixture_plugin "$pack_dir" /tmp/demo-plugin-tgz.tgz demo-plugin-tgz 0.0.1 demo.tgz "Demo Plugin TGZ"
run_logged install-tgz node "$OPENCLAW_ENTRY" plugins install /tmp/demo-plugin-tgz.tgz
node "$OPENCLAW_ENTRY" plugins list --json >/tmp/plugins2.json
@@ -87,31 +38,7 @@ node scripts/e2e/lib/plugins/assertions.mjs plugin-tgz
echo "Testing install from local folder (plugins.load.paths)..."
dir_plugin="$(mktemp -d "/tmp/openclaw-plugin-dir.XXXXXX")"
cat >"$dir_plugin/package.json" <<'JSON'
{
"name": "@openclaw/demo-plugin-dir",
"version": "0.0.1",
"openclaw": { "extensions": ["./index.js"] }
}
JSON
cat >"$dir_plugin/index.js" <<'JS'
module.exports = {
id: "demo-plugin-dir",
name: "Demo Plugin DIR",
register(api) {
api.registerGatewayMethod("demo.dir", async () => ({ ok: true }));
},
};
JS
cat >"$dir_plugin/openclaw.plugin.json" <<'JSON'
{
"id": "demo-plugin-dir",
"configSchema": {
"type": "object",
"properties": {}
}
}
JSON
write_fixture_plugin "$dir_plugin" demo-plugin-dir 0.0.1 demo.dir "Demo Plugin DIR"
run_logged install-dir node "$OPENCLAW_ENTRY" plugins install "$dir_plugin"
node "$OPENCLAW_ENTRY" plugins list --json >/tmp/plugins3.json
@@ -121,32 +48,7 @@ node scripts/e2e/lib/plugins/assertions.mjs plugin-dir
echo "Testing install from npm spec (file:)..."
file_pack_dir="$(mktemp -d "/tmp/openclaw-plugin-filepack.XXXXXX")"
mkdir -p "$file_pack_dir/package"
cat >"$file_pack_dir/package/package.json" <<'JSON'
{
"name": "@openclaw/demo-plugin-file",
"version": "0.0.1",
"openclaw": { "extensions": ["./index.js"] }
}
JSON
cat >"$file_pack_dir/package/index.js" <<'JS'
module.exports = {
id: "demo-plugin-file",
name: "Demo Plugin FILE",
register(api) {
api.registerGatewayMethod("demo.file", async () => ({ ok: true }));
},
};
JS
cat >"$file_pack_dir/package/openclaw.plugin.json" <<'JSON'
{
"id": "demo-plugin-file",
"configSchema": {
"type": "object",
"properties": {}
}
}
JSON
write_fixture_plugin "$file_pack_dir/package" demo-plugin-file 0.0.1 demo.file "Demo Plugin FILE"
run_logged install-file node "$OPENCLAW_ENTRY" plugins install "file:$file_pack_dir/package"
node "$OPENCLAW_ENTRY" plugins list --json >/tmp/plugins4.json
@@ -157,21 +59,7 @@ node scripts/e2e/lib/plugins/assertions.mjs plugin-file
echo "Testing Claude bundle enable and inspect flow..."
bundle_plugin_id="claude-bundle-e2e"
bundle_root="$OPENCLAW_PLUGIN_HOME/$bundle_plugin_id"
mkdir -p "$bundle_root/.claude-plugin" "$bundle_root/commands"
cat >"$bundle_root/.claude-plugin/plugin.json" <<'JSON'
{
"name": "claude-bundle-e2e"
}
JSON
cat >"$bundle_root/commands/office-hours.md" <<'MD'
---
description: Help with architecture and rollout planning
---
Act as an engineering advisor.
Focus on:
$ARGUMENTS
MD
write_claude_bundle_fixture "$bundle_root"
record_fixture_plugin_trust "$bundle_plugin_id" "$bundle_root" 0
node "$OPENCLAW_ENTRY" plugins list --json >/tmp/plugins-bundle-disabled.json
@@ -183,31 +71,7 @@ node scripts/e2e/lib/plugins/assertions.mjs bundle-inspect
echo "Testing plugin install visible after explicit restart..."
slash_install_dir="$(mktemp -d "/tmp/openclaw-plugin-slash-install.XXXXXX")"
cat >"$slash_install_dir/package.json" <<'JSON'
{
"name": "@openclaw/slash-install-plugin",
"version": "0.0.1",
"openclaw": { "extensions": ["./index.js"] }
}
JSON
cat >"$slash_install_dir/index.js" <<'JS'
module.exports = {
id: "slash-install-plugin",
name: "Slash Install Plugin",
register(api) {
api.registerGatewayMethod("demo.slash.install", async () => ({ ok: true }));
},
};
JS
cat >"$slash_install_dir/openclaw.plugin.json" <<'JSON'
{
"id": "slash-install-plugin",
"configSchema": {
"type": "object",
"properties": {}
}
}
JSON
write_fixture_plugin "$slash_install_dir" slash-install-plugin 0.0.1 demo.slash.install "Slash Install Plugin"
run_logged install-slash-plugin node "$OPENCLAW_ENTRY" plugins install "$slash_install_dir"
node "$OPENCLAW_ENTRY" plugins inspect slash-install-plugin --json >/tmp/plugin-command-install-show.json

View File

@@ -14,11 +14,11 @@ PACKAGE_TGZ="${OPENCLAW_CURRENT_PACKAGE_TGZ:-}"
CHANNEL="${OPENCLAW_NPM_ONBOARD_CHANNEL:-telegram}"
case "$CHANNEL" in
telegram | discord) ;;
*)
echo "OPENCLAW_NPM_ONBOARD_CHANNEL must be telegram or discord, got: $CHANNEL" >&2
exit 1
;;
telegram | discord) ;;
*)
echo "OPENCLAW_NPM_ONBOARD_CHANNEL must be telegram or discord, got: $CHANNEL" >&2
exit 1
;;
esac
docker_e2e_build_or_reuse "$IMAGE_NAME" npm-onboard-channel-agent "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR" "$DOCKER_TARGET"
@@ -47,7 +47,7 @@ if ! docker_e2e_run_with_harness \
-e OPENCLAW_NPM_ONBOARD_CHANNEL="$CHANNEL" \
-e "OPENCLAW_TEST_STATE_SCRIPT_B64=$OPENCLAW_TEST_STATE_SCRIPT_B64" \
"${DOCKER_E2E_PACKAGE_ARGS[@]}" \
-i "$IMAGE_NAME" bash -s >"$run_log" 2>&1 <<'EOF'
-i "$IMAGE_NAME" bash -s >"$run_log" 2>&1 <<'EOF'; then
set -euo pipefail
source scripts/lib/openclaw-e2e-instance.sh
@@ -123,108 +123,14 @@ openclaw onboard --non-interactive --accept-risk \
--skip-health \
--json >/tmp/openclaw-onboard.json
node - "$HOME" <<'NODE'
const fs = require("node:fs");
const path = require("node:path");
const home = process.argv[2];
const stateDir = path.join(home, ".openclaw");
const configPath = path.join(stateDir, "openclaw.json");
const agentDir = path.join(stateDir, "agents", "main", "agent");
const authPath = path.join(agentDir, "auth-profiles.json");
if (!fs.existsSync(configPath)) {
throw new Error("onboard did not write openclaw.json");
}
if (!fs.existsSync(agentDir)) {
throw new Error("onboard did not create main agent dir");
}
if (!fs.existsSync(authPath)) {
throw new Error("onboard did not create auth-profiles.json");
}
const authRaw = fs.readFileSync(authPath, "utf8");
if (!authRaw.includes("OPENAI_API_KEY")) {
throw new Error("auth profile did not persist OPENAI_API_KEY env ref");
}
if (authRaw.includes("sk-openclaw-npm-onboard-e2e")) {
throw new Error("auth profile persisted the raw OpenAI test key");
}
NODE
node - "$MOCK_PORT" <<'NODE'
const fs = require("node:fs");
const path = require("node:path");
const mockPort = Number(process.argv[2]);
const configPath = path.join(process.env.HOME, ".openclaw", "openclaw.json");
const cfg = JSON.parse(fs.readFileSync(configPath, "utf8"));
const modelRef = "openai/gpt-5.5";
const cost = { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 };
cfg.models = {
...(cfg.models || {}),
mode: "merge",
providers: {
...(cfg.models?.providers || {}),
openai: {
...(cfg.models?.providers?.openai || {}),
baseUrl: `http://127.0.0.1:${mockPort}/v1`,
apiKey: { source: "env", provider: "default", id: "OPENAI_API_KEY" },
api: "openai-responses",
request: { ...(cfg.models?.providers?.openai?.request || {}), allowPrivateNetwork: true },
models: [
{
id: "gpt-5.5",
name: "gpt-5.5",
api: "openai-responses",
reasoning: false,
input: ["text", "image"],
cost,
contextWindow: 128000,
contextTokens: 96000,
maxTokens: 4096,
},
],
},
},
};
cfg.agents = {
...(cfg.agents || {}),
defaults: {
...(cfg.agents?.defaults || {}),
model: { primary: modelRef },
models: {
...(cfg.agents?.defaults?.models || {}),
[modelRef]: { params: { transport: "sse", openaiWsWarmup: false } },
},
},
};
cfg.plugins = {
...(cfg.plugins || {}),
enabled: true,
};
fs.writeFileSync(configPath, `${JSON.stringify(cfg, null, 2)}\n`);
NODE
node scripts/e2e/lib/npm-onboard-channel-agent/assertions.mjs assert-onboard-state "$HOME"
node scripts/e2e/lib/npm-onboard-channel-agent/assertions.mjs configure-mock-model "$MOCK_PORT"
openclaw_e2e_assert_dep_absent "$DEP_SENTINEL" "$package_root" "$HOME/.openclaw"
echo "Configuring $CHANNEL..."
openclaw channels add --channel "$CHANNEL" --token "$CHANNEL_TOKEN" >/tmp/openclaw-channel-add.log 2>&1
node - "$CHANNEL" "$CHANNEL_TOKEN" <<'NODE'
const fs = require("node:fs");
const path = require("node:path");
const channel = process.argv[2];
const token = process.argv[3];
const cfg = JSON.parse(fs.readFileSync(path.join(process.env.HOME, ".openclaw", "openclaw.json"), "utf8"));
const entry = cfg.channels?.[channel];
if (!entry || entry.enabled === false) {
throw new Error(`${channel} was not enabled`);
}
const serialized = JSON.stringify(entry);
if (!serialized.includes(token)) {
throw new Error(`${channel} token was not persisted`);
}
NODE
node scripts/e2e/lib/npm-onboard-channel-agent/assertions.mjs assert-channel-config "$CHANNEL" "$CHANNEL_TOKEN"
echo "Running doctor after channel activation..."
openclaw doctor --repair --non-interactive >/tmp/openclaw-doctor.log 2>&1
@@ -238,23 +144,10 @@ openclaw agent --local \
--thinking off \
--json >/tmp/openclaw-agent.combined 2>&1
node - "$SUCCESS_MARKER" "$MOCK_REQUEST_LOG" <<'NODE'
const fs = require("node:fs");
const marker = process.argv[2];
const logPath = process.argv[3];
const output = fs.readFileSync("/tmp/openclaw-agent.combined", "utf8");
if (!output.includes(marker)) {
throw new Error(`agent JSON did not contain success marker. Output: ${output}`);
}
const requestLog = fs.existsSync(logPath) ? fs.readFileSync(logPath, "utf8") : "";
if (!/\/v1\/(responses|chat\/completions)/.test(requestLog)) {
throw new Error(`mock OpenAI server was not used. Requests: ${requestLog}`);
}
NODE
node scripts/e2e/lib/npm-onboard-channel-agent/assertions.mjs assert-agent-turn "$SUCCESS_MARKER" "$MOCK_REQUEST_LOG"
echo "npm tarball onboard/channel/agent Docker E2E passed for $CHANNEL"
EOF
then
docker_e2e_print_log "$run_log"
rm -f "$run_log"
exit 1