test: add npm tarball onboarding docker e2e

This commit is contained in:
Peter Steinberger
2026-04-23 05:52:37 +01:00
parent c2f0559829
commit 61dfbd78d5
3 changed files with 488 additions and 1 deletions

View File

@@ -76,6 +76,14 @@ These commands sit beside the main test suites when you need QA-lab realism:
`.artifacts/qa-e2e/...`.
- `pnpm qa:lab:up`
- Starts the Docker-backed QA site for operator-style QA work.
- `pnpm test:docker:npm-onboard-channel-agent`
- Builds an npm tarball from the current checkout, installs it globally in
Docker, runs non-interactive OpenAI API-key onboarding, configures Telegram
by default, verifies enabling the plugin installs runtime dependencies on
demand, runs doctor, and runs one local agent turn against a mocked OpenAI
endpoint.
- Use `OPENCLAW_NPM_ONBOARD_CHANNEL=discord` to run the same packaged-install
lane with Discord.
- `pnpm test:docker:bundled-channel-deps`
- Packs and installs the current OpenClaw build in Docker, starts the Gateway
with OpenAI configured, then enables bundled channel/plugins via config
@@ -915,6 +923,7 @@ The live-model Docker runners also bind-mount only the needed CLI auth homes (or
- Pi bundle MCP tools (real stdio MCP server + embedded Pi profile allow/deny smoke): `pnpm test:docker:pi-bundle-mcp-tools` (script: `scripts/e2e/pi-bundle-mcp-tools-docker.sh`)
- Cron/subagent MCP cleanup (real Gateway + stdio MCP child teardown after isolated cron and one-shot subagent runs): `pnpm test:docker:cron-mcp-cleanup` (script: `scripts/e2e/cron-mcp-cleanup-docker.sh`)
- Plugins (install smoke + `/plugin` alias + Claude-bundle restart semantics): `pnpm test:docker:plugins` (script: `scripts/e2e/plugins-docker.sh`)
- Npm tarball onboarding/channel/agent smoke: `pnpm test:docker:npm-onboard-channel-agent` installs the packed OpenClaw tarball globally in Docker, configures OpenAI via env-ref onboarding plus Telegram by default, verifies enabling the plugin installs its runtime deps on demand, runs doctor, and runs one mocked OpenAI agent turn. Reuse a prebuilt tarball with `OPENCLAW_NPM_ONBOARD_PACKAGE_TGZ=/path/to/openclaw-*.tgz`, skip the host rebuild with `OPENCLAW_NPM_ONBOARD_HOST_BUILD=0`, or switch channel with `OPENCLAW_NPM_ONBOARD_CHANNEL=discord`.
- Bundled plugin runtime deps: `pnpm test:docker:bundled-channel-deps` builds a small Docker runner image by default, builds and packs OpenClaw once on the host, then mounts that tarball into each Linux install scenario. Reuse the image with `OPENCLAW_SKIP_DOCKER_BUILD=1`, skip the host rebuild after a fresh local build with `OPENCLAW_BUNDLED_CHANNEL_HOST_BUILD=0`, or point at an existing tarball with `OPENCLAW_BUNDLED_CHANNEL_PACKAGE_TGZ=/path/to/openclaw-*.tgz`.
- Narrow bundled plugin runtime deps while iterating by disabling unrelated scenarios, for example:
`OPENCLAW_BUNDLED_CHANNEL_SCENARIOS=0 OPENCLAW_BUNDLED_CHANNEL_UPDATE_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_ROOT_OWNED_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_SETUP_ENTRY_SCENARIO=0 pnpm test:docker:bundled-channel-deps`.

View File

@@ -1416,7 +1416,7 @@
"test:contracts:plugins": "node scripts/run-vitest.mjs run --config test/vitest/vitest.contracts-plugin.config.ts --maxWorkers=1",
"test:coverage": "node scripts/run-vitest.mjs run --config test/vitest/vitest.unit.config.ts --coverage",
"test:coverage:changed": "node scripts/run-vitest.mjs run --config test/vitest/vitest.unit.config.ts --coverage --changed origin/main",
"test:docker:all": "pnpm test:docker:live-build && OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:live-models && OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:live-gateway && pnpm test:docker:openwebui && pnpm test:docker:onboard && pnpm test:docker:gateway-network && pnpm test:docker:mcp-channels && pnpm test:docker:pi-bundle-mcp-tools && pnpm test:docker:cron-mcp-cleanup && pnpm test:docker:qr && pnpm test:docker:doctor-switch && pnpm test:docker:plugins && pnpm test:docker:bundled-channel-deps && pnpm test:docker:cleanup",
"test:docker:all": "pnpm test:docker:live-build && OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:live-models && OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:live-gateway && pnpm test:docker:openwebui && pnpm test:docker:onboard && pnpm test:docker:npm-onboard-channel-agent && pnpm test:docker:gateway-network && pnpm test:docker:mcp-channels && pnpm test:docker:pi-bundle-mcp-tools && pnpm test:docker:cron-mcp-cleanup && pnpm test:docker:qr && pnpm test:docker:doctor-switch && pnpm test:docker:plugins && pnpm test:docker:bundled-channel-deps && pnpm test:docker:cleanup",
"test:docker:bundled-channel-deps": "bash scripts/e2e/bundled-channel-runtime-deps-docker.sh",
"test:docker:bundled-channel-deps:fast": "OPENCLAW_BUNDLED_CHANNEL_SCENARIOS=0 OPENCLAW_BUNDLED_CHANNEL_UPDATE_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_ROOT_OWNED_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_SETUP_ENTRY_SCENARIO=1 OPENCLAW_BUNDLED_CHANNEL_LOAD_FAILURE_SCENARIO=1 bash scripts/e2e/bundled-channel-runtime-deps-docker.sh",
"test:docker:cleanup": "bash scripts/test-cleanup-docker.sh",
@@ -1443,6 +1443,7 @@
"test:docker:live-models:codex": "OPENCLAW_LIVE_PROVIDERS=codex-cli OPENCLAW_LIVE_MODELS=codex-cli/gpt-5.4 bash scripts/test-live-models-docker.sh",
"test:docker:live-models:gemini": "OPENCLAW_LIVE_PROVIDERS=google-gemini-cli OPENCLAW_LIVE_MODELS=google-gemini-cli/gemini-3.1-pro-preview bash scripts/test-live-models-docker.sh",
"test:docker:mcp-channels": "bash scripts/e2e/mcp-channels-docker.sh",
"test:docker:npm-onboard-channel-agent": "bash scripts/e2e/npm-onboard-channel-agent-docker.sh",
"test:docker:onboard": "bash scripts/e2e/onboard-docker.sh",
"test:docker:openwebui": "bash scripts/e2e/openwebui-docker.sh",
"test:docker:pi-bundle-mcp-tools": "bash scripts/e2e/pi-bundle-mcp-tools-docker.sh",

View File

@@ -0,0 +1,477 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
source "$ROOT_DIR/scripts/lib/docker-e2e-logs.sh"
IMAGE_NAME="${OPENCLAW_NPM_ONBOARD_E2E_IMAGE:-openclaw-npm-onboard-channel-agent-e2e}"
DOCKER_TARGET="${OPENCLAW_NPM_ONBOARD_DOCKER_TARGET:-e2e-runner}"
HOST_BUILD="${OPENCLAW_NPM_ONBOARD_HOST_BUILD:-1}"
PACKAGE_TGZ="${OPENCLAW_NPM_ONBOARD_PACKAGE_TGZ:-}"
CHANNEL="${OPENCLAW_NPM_ONBOARD_CHANNEL:-telegram}"
case "$CHANNEL" in
telegram | discord) ;;
*)
echo "OPENCLAW_NPM_ONBOARD_CHANNEL must be telegram or discord, got: $CHANNEL" >&2
exit 1
;;
esac
if [ "${OPENCLAW_SKIP_DOCKER_BUILD:-0}" = "1" ]; then
echo "Reusing Docker image: $IMAGE_NAME (OPENCLAW_SKIP_DOCKER_BUILD=1)"
else
echo "Building Docker image target $DOCKER_TARGET..."
run_logged npm-onboard-channel-agent-build docker build --target "$DOCKER_TARGET" -t "$IMAGE_NAME" -f "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR"
fi
prepare_package_tgz() {
if [ -n "$PACKAGE_TGZ" ]; then
if [ ! -f "$PACKAGE_TGZ" ]; then
echo "OPENCLAW_NPM_ONBOARD_PACKAGE_TGZ does not exist: $PACKAGE_TGZ" >&2
exit 1
fi
PACKAGE_TGZ="$(cd "$(dirname "$PACKAGE_TGZ")" && pwd)/$(basename "$PACKAGE_TGZ")"
return 0
fi
if [ "$HOST_BUILD" != "0" ]; then
echo "Building host package artifacts..."
run_logged npm-onboard-channel-agent-host-build pnpm build
else
echo "Skipping host build (OPENCLAW_NPM_ONBOARD_HOST_BUILD=0)"
fi
echo "Writing package inventory and packing once..."
run_logged npm-onboard-channel-agent-inventory node --import tsx --input-type=module -e 'const { writePackageDistInventory } = await import("./src/infra/package-dist-inventory.ts"); await writePackageDistInventory(process.cwd());'
local pack_dir
pack_dir="$(mktemp -d "${TMPDIR:-/tmp}/openclaw-npm-onboard-pack.XXXXXX")"
run_logged npm-onboard-channel-agent-pack npm pack --ignore-scripts --pack-destination "$pack_dir"
PACKAGE_TGZ="$(find "$pack_dir" -maxdepth 1 -name 'openclaw-*.tgz' -print -quit)"
if [ -z "$PACKAGE_TGZ" ]; then
echo "missing packed OpenClaw tarball" >&2
exit 1
fi
PACKAGE_TGZ="$(cd "$(dirname "$PACKAGE_TGZ")" && pwd)/$(basename "$PACKAGE_TGZ")"
}
prepare_package_tgz
DOCKER_PACKAGE_TGZ="/tmp/openclaw-current.tgz"
run_log="$(mktemp "${TMPDIR:-/tmp}/openclaw-npm-onboard-channel-agent.XXXXXX.log")"
echo "Running npm tarball onboard/channel/agent Docker E2E ($CHANNEL)..."
if ! docker run --rm \
-e COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \
-e OPENCLAW_NPM_ONBOARD_CHANNEL="$CHANNEL" \
-e OPENCLAW_CURRENT_PACKAGE_TGZ="$DOCKER_PACKAGE_TGZ" \
-v "$PACKAGE_TGZ:$DOCKER_PACKAGE_TGZ:ro" \
-i "$IMAGE_NAME" bash -s >"$run_log" 2>&1 <<'EOF'
set -euo pipefail
export HOME="$(mktemp -d "/tmp/openclaw-npm-onboard.XXXXXX")"
export NPM_CONFIG_PREFIX="$HOME/.npm-global"
export PATH="$NPM_CONFIG_PREFIX/bin:$PATH"
export OPENAI_API_KEY="sk-openclaw-npm-onboard-e2e"
export OPENCLAW_GATEWAY_TOKEN="npm-onboard-channel-agent-token"
CHANNEL="${OPENCLAW_NPM_ONBOARD_CHANNEL:?missing OPENCLAW_NPM_ONBOARD_CHANNEL}"
PORT="18789"
MOCK_PORT="44080"
SUCCESS_MARKER="OPENCLAW_AGENT_E2E_OK_ASSISTANT"
MOCK_REQUEST_LOG="/tmp/openclaw-mock-openai-requests.jsonl"
mock_pid=""
case "$CHANNEL" in
telegram)
CHANNEL_TOKEN="123456:openclaw-npm-onboard-token"
DEP_SENTINEL="grammy"
;;
discord)
CHANNEL_TOKEN="openclaw-npm-onboard-discord-token"
DEP_SENTINEL="discord-api-types"
;;
*)
echo "unsupported channel: $CHANNEL" >&2
exit 1
;;
esac
cleanup() {
if [ -n "${mock_pid:-}" ] && kill -0 "$mock_pid" 2>/dev/null; then
kill "$mock_pid" 2>/dev/null || true
wait "$mock_pid" 2>/dev/null || true
fi
}
trap cleanup EXIT
dump_debug_logs() {
local status="$1"
echo "npm onboard/channel/agent scenario failed with exit code $status" >&2
for file in \
/tmp/openclaw-install.log \
/tmp/openclaw-onboard.json \
/tmp/openclaw-channel-add.log \
/tmp/openclaw-doctor.log \
/tmp/openclaw-agent.combined \
/tmp/openclaw-agent.err \
/tmp/openclaw-agent.json \
/tmp/openclaw-mock-openai.log \
"$MOCK_REQUEST_LOG"; do
if [ -f "$file" ]; then
echo "--- $file ---" >&2
sed -n '1,220p' "$file" >&2 || true
fi
done
}
trap 'status=$?; dump_debug_logs "$status"; exit "$status"' ERR
echo "Installing mounted OpenClaw package..."
package_tgz="${OPENCLAW_CURRENT_PACKAGE_TGZ:?missing OPENCLAW_CURRENT_PACKAGE_TGZ}"
npm install -g "$package_tgz" --no-fund --no-audit >/tmp/openclaw-install.log 2>&1
command -v openclaw >/dev/null
package_root="$(npm root -g)/openclaw"
test -d "$package_root/dist/extensions/telegram"
test -d "$package_root/dist/extensions/discord"
assert_dep_absent() {
local sentinel="$1"
if find "$package_root" "$HOME/.openclaw" -path "*/node_modules/$sentinel/package.json" -print -quit 2>/dev/null | grep -q .; then
echo "$sentinel should not be installed before channel activation repair" >&2
find "$package_root" "$HOME/.openclaw" -path "*/node_modules/$sentinel/package.json" -print 2>/dev/null >&2 || true
exit 1
fi
}
assert_dep_present() {
local sentinel="$1"
if ! find "$package_root" "$HOME/.openclaw" -path "*/node_modules/$sentinel/package.json" -print -quit 2>/dev/null | grep -q .; then
echo "$sentinel was not installed on demand" >&2
find "$package_root" "$HOME/.openclaw" -maxdepth 6 -type d -name node_modules -print 2>/dev/null >&2 || true
exit 1
fi
}
cat >/tmp/openclaw-mock-openai.mjs <<'NODE'
import http from "node:http";
import fs from "node:fs";
const port = Number(process.env.MOCK_PORT);
const successMarker = process.env.SUCCESS_MARKER;
const requestLog = process.env.MOCK_REQUEST_LOG;
function readBody(req) {
return new Promise((resolve, reject) => {
let body = "";
req.setEncoding("utf8");
req.on("data", (chunk) => {
body += chunk;
});
req.on("end", () => resolve(body));
req.on("error", reject);
});
}
function writeJson(res, status, body) {
res.writeHead(status, { "content-type": "application/json" });
res.end(JSON.stringify(body));
}
function responseEvents(text) {
return [
{
type: "response.output_item.added",
item: {
type: "message",
id: "msg_e2e_1",
role: "assistant",
content: [],
status: "in_progress",
},
},
{
type: "response.output_item.done",
item: {
type: "message",
id: "msg_e2e_1",
role: "assistant",
status: "completed",
content: [{ type: "output_text", text, annotations: [] }],
},
},
{
type: "response.completed",
response: {
status: "completed",
usage: {
input_tokens: 11,
output_tokens: 7,
total_tokens: 18,
input_tokens_details: { cached_tokens: 0 },
},
},
},
];
}
function writeSse(res, events) {
res.writeHead(200, {
"content-type": "text/event-stream",
"cache-control": "no-store",
connection: "keep-alive",
});
for (const event of events) {
res.write(`data: ${JSON.stringify(event)}\n\n`);
}
res.write("data: [DONE]\n\n");
res.end();
}
function writeChatCompletion(res, stream) {
if (stream) {
writeSse(res, [
{
id: "chatcmpl_e2e",
object: "chat.completion.chunk",
choices: [{ index: 0, delta: { role: "assistant", content: successMarker } }],
},
{
id: "chatcmpl_e2e",
object: "chat.completion.chunk",
choices: [{ index: 0, delta: {}, finish_reason: "stop" }],
},
]);
return;
}
writeJson(res, 200, {
id: "chatcmpl_e2e",
object: "chat.completion",
choices: [{ index: 0, message: { role: "assistant", content: successMarker }, finish_reason: "stop" }],
usage: { prompt_tokens: 11, completion_tokens: 7, total_tokens: 18 },
});
}
const server = http.createServer(async (req, res) => {
const url = new URL(req.url ?? "/", "http://127.0.0.1");
if (req.method === "GET" && url.pathname === "/health") {
writeJson(res, 200, { ok: true });
return;
}
if (req.method === "GET" && url.pathname === "/v1/models") {
writeJson(res, 200, {
object: "list",
data: [{ id: "gpt-5.4", object: "model", owned_by: "openclaw-e2e" }],
});
return;
}
const bodyText = await readBody(req);
fs.appendFileSync(requestLog, JSON.stringify({ method: req.method, path: url.pathname, body: bodyText }) + "\n");
let body = {};
try {
body = bodyText ? JSON.parse(bodyText) : {};
} catch {
body = {};
}
if (req.method === "POST" && url.pathname === "/v1/responses") {
if (body.stream === false) {
writeJson(res, 200, {
id: "resp_e2e",
object: "response",
status: "completed",
output: [
{
type: "message",
id: "msg_e2e_1",
role: "assistant",
status: "completed",
content: [{ type: "output_text", text: successMarker, annotations: [] }],
},
],
usage: { input_tokens: 11, output_tokens: 7, total_tokens: 18 },
});
return;
}
writeSse(res, responseEvents(successMarker));
return;
}
if (req.method === "POST" && url.pathname === "/v1/chat/completions") {
writeChatCompletion(res, body.stream !== false);
return;
}
writeJson(res, 404, { error: { message: `unhandled mock route: ${req.method} ${url.pathname}` } });
});
server.listen(port, "127.0.0.1", () => {
console.log(`mock-openai listening on ${port}`);
});
NODE
MOCK_PORT="$MOCK_PORT" SUCCESS_MARKER="$SUCCESS_MARKER" MOCK_REQUEST_LOG="$MOCK_REQUEST_LOG" node /tmp/openclaw-mock-openai.mjs >/tmp/openclaw-mock-openai.log 2>&1 &
mock_pid="$!"
for _ in $(seq 1 80); do
if node -e "fetch('http://127.0.0.1:${MOCK_PORT}/health').then(r=>process.exit(r.ok?0:1)).catch(()=>process.exit(1))"; then
break
fi
sleep 0.1
done
node -e "fetch('http://127.0.0.1:${MOCK_PORT}/health').then(r=>process.exit(r.ok?0:1)).catch(()=>process.exit(1))"
echo "Running non-interactive onboarding..."
openclaw onboard --non-interactive --accept-risk \
--mode local \
--auth-choice openai-api-key \
--secret-input-mode ref \
--gateway-port "$PORT" \
--gateway-bind loopback \
--skip-daemon \
--skip-ui \
--skip-skills \
--skip-health \
--json >/tmp/openclaw-onboard.json
node - "$HOME" <<'NODE'
const fs = require("node:fs");
const path = require("node:path");
const home = process.argv[2];
const stateDir = path.join(home, ".openclaw");
const configPath = path.join(stateDir, "openclaw.json");
const agentDir = path.join(stateDir, "agents", "main", "agent");
const authPath = path.join(agentDir, "auth-profiles.json");
if (!fs.existsSync(configPath)) {
throw new Error("onboard did not write openclaw.json");
}
if (!fs.existsSync(agentDir)) {
throw new Error("onboard did not create main agent dir");
}
if (!fs.existsSync(authPath)) {
throw new Error("onboard did not create auth-profiles.json");
}
const authRaw = fs.readFileSync(authPath, "utf8");
if (!authRaw.includes("OPENAI_API_KEY")) {
throw new Error("auth profile did not persist OPENAI_API_KEY env ref");
}
if (authRaw.includes("sk-openclaw-npm-onboard-e2e")) {
throw new Error("auth profile persisted the raw OpenAI test key");
}
NODE
node - "$MOCK_PORT" <<'NODE'
const fs = require("node:fs");
const path = require("node:path");
const mockPort = Number(process.argv[2]);
const configPath = path.join(process.env.HOME, ".openclaw", "openclaw.json");
const cfg = JSON.parse(fs.readFileSync(configPath, "utf8"));
const modelRef = "openai/gpt-5.4";
const cost = { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 };
cfg.models = {
...(cfg.models || {}),
mode: "merge",
providers: {
...(cfg.models?.providers || {}),
openai: {
...(cfg.models?.providers?.openai || {}),
baseUrl: `http://127.0.0.1:${mockPort}/v1`,
apiKey: { source: "env", provider: "default", id: "OPENAI_API_KEY" },
api: "openai-responses",
request: { ...(cfg.models?.providers?.openai?.request || {}), allowPrivateNetwork: true },
models: [
{
id: "gpt-5.4",
name: "gpt-5.4",
api: "openai-responses",
reasoning: false,
input: ["text", "image"],
cost,
contextWindow: 128000,
contextTokens: 96000,
maxTokens: 4096,
},
],
},
},
};
cfg.agents = {
...(cfg.agents || {}),
defaults: {
...(cfg.agents?.defaults || {}),
model: { primary: modelRef },
models: {
...(cfg.agents?.defaults?.models || {}),
[modelRef]: { params: { transport: "sse", openaiWsWarmup: false } },
},
},
};
cfg.plugins = {
...(cfg.plugins || {}),
enabled: true,
};
fs.writeFileSync(configPath, `${JSON.stringify(cfg, null, 2)}\n`);
NODE
assert_dep_absent "$DEP_SENTINEL"
echo "Configuring $CHANNEL..."
openclaw channels add --channel "$CHANNEL" --token "$CHANNEL_TOKEN" >/tmp/openclaw-channel-add.log 2>&1
node - "$CHANNEL" "$CHANNEL_TOKEN" <<'NODE'
const fs = require("node:fs");
const path = require("node:path");
const channel = process.argv[2];
const token = process.argv[3];
const cfg = JSON.parse(fs.readFileSync(path.join(process.env.HOME, ".openclaw", "openclaw.json"), "utf8"));
const entry = cfg.channels?.[channel];
if (!entry || entry.enabled === false) {
throw new Error(`${channel} was not enabled`);
}
const serialized = JSON.stringify(entry);
if (!serialized.includes(token)) {
throw new Error(`${channel} token was not persisted`);
}
NODE
assert_dep_present "$DEP_SENTINEL"
echo "Running doctor after activated plugin dep install..."
openclaw doctor --non-interactive >/tmp/openclaw-doctor.log 2>&1
assert_dep_present "$DEP_SENTINEL"
echo "Running local agent turn against mocked OpenAI..."
openclaw agent --local \
--agent main \
--session-id npm-onboard-channel-agent \
--message "Return the success marker from the test server." \
--thinking off \
--json >/tmp/openclaw-agent.combined 2>&1
node - "$SUCCESS_MARKER" "$MOCK_REQUEST_LOG" <<'NODE'
const fs = require("node:fs");
const marker = process.argv[2];
const logPath = process.argv[3];
const output = fs.readFileSync("/tmp/openclaw-agent.combined", "utf8");
if (!output.includes(marker)) {
throw new Error(`agent JSON did not contain success marker. Output: ${output}`);
}
const requestLog = fs.existsSync(logPath) ? fs.readFileSync(logPath, "utf8") : "";
if (!/\/v1\/(responses|chat\/completions)/.test(requestLog)) {
throw new Error(`mock OpenAI server was not used. Requests: ${requestLog}`);
}
NODE
echo "npm tarball onboard/channel/agent Docker E2E passed for $CHANNEL"
EOF
then
cat "$run_log"
rm -f "$run_log"
exit 1
fi
rm -f "$run_log"
echo "npm tarball onboard/channel/agent Docker E2E passed ($CHANNEL)"