mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-06 16:30:57 +00:00
refactor: simplify plugin dependency handling
Simplify plugin installation and runtime loading around package-manager-owned dependencies, with Jiti reserved for local/TS fallback paths. Also scans npm plugin install roots so hoisted transitive dependencies are covered by dependency denylist and node_modules symlink checks.
This commit is contained in:
committed by
GitHub
parent
2e8e9cd6ca
commit
ed8f50f240
@@ -46,7 +46,7 @@ const CONTROL_UI_I18N_SCOPE_RE =
|
||||
const NATIVE_ONLY_RE =
|
||||
/^(apps\/android\/|apps\/ios\/|apps\/macos\/|apps\/macos-mlx-tts\/|apps\/shared\/|Swabble\/|appcast\.xml$)/;
|
||||
const FAST_INSTALL_SMOKE_SCOPE_RE =
|
||||
/^(Dockerfile$|\.npmrc$|package\.json$|pnpm-lock\.yaml$|pnpm-workspace\.yaml$|scripts\/ci-changed-scope\.mjs$|scripts\/postinstall-bundled-plugins\.mjs$|scripts\/e2e\/(?:Dockerfile(?:\.qr-import)?|agents-delete-shared-workspace-docker\.sh|gateway-network-docker\.sh|bundled-channel-runtime-deps-docker\.sh)$|src\/plugins\/bundled-runtime-deps\.ts$|extensions\/[^/]+\/(?:package\.json|openclaw\.plugin\.json)$|\.github\/workflows\/install-smoke\.yml$|\.github\/actions\/setup-node-env\/action\.yml$)/;
|
||||
/^(Dockerfile$|\.npmrc$|package\.json$|pnpm-lock\.yaml$|pnpm-workspace\.yaml$|scripts\/ci-changed-scope\.mjs$|scripts\/postinstall-bundled-plugins\.mjs$|scripts\/e2e\/(?:Dockerfile(?:\.qr-import)?|agents-delete-shared-workspace-docker\.sh|gateway-network-docker\.sh)$|extensions\/[^/]+\/(?:package\.json|openclaw\.plugin\.json)$|\.github\/workflows\/install-smoke\.yml$|\.github\/actions\/setup-node-env\/action\.yml$)/;
|
||||
const FULL_INSTALL_SMOKE_SCOPE_RE =
|
||||
/^(Dockerfile$|\.npmrc$|package\.json$|pnpm-lock\.yaml$|pnpm-workspace\.yaml$|scripts\/ci-changed-scope\.mjs$|scripts\/install\.sh$|scripts\/test-install-sh-docker\.sh$|scripts\/docker\/|scripts\/e2e\/(?:Dockerfile(?:\.qr-import)?|qr-import-docker\.sh|bun-global-install-smoke\.sh)$|\.github\/workflows\/install-smoke\.yml$|\.github\/actions\/setup-node-env\/action\.yml$)/;
|
||||
const FAST_INSTALL_SMOKE_RUNTIME_SCOPE_RE = /^src\/(?:channels|gateway|plugin-sdk|plugins)\//;
|
||||
|
||||
@@ -192,14 +192,13 @@ The `Dockerfile` supports two optional build args:
|
||||
volumes:
|
||||
- ${OPENCLAW_CONFIG_DIR}:/home/node/.openclaw
|
||||
- ${OPENCLAW_WORKSPACE_DIR}:/home/node/.openclaw/workspace
|
||||
- openclaw-plugin-runtime-deps:/var/lib/openclaw/plugin-runtime-deps
|
||||
```
|
||||
|
||||
This means:
|
||||
|
||||
- `~/.openclaw/.env` is available inside the container at `/home/node/.openclaw/.env` — OpenClaw loads it automatically as the global env fallback
|
||||
- `~/.openclaw/openclaw.json` is available at `/home/node/.openclaw/openclaw.json` — the gateway watches it and hot-reloads most changes
|
||||
- Generated bundled plugin runtime deps and mirrors live in the `openclaw-plugin-runtime-deps` Docker volume at `/var/lib/openclaw/plugin-runtime-deps`, not in the host config bind mount
|
||||
- Downloadable plugin packages and install records live under the mounted OpenClaw home
|
||||
- No need to add API keys to `docker-compose.yml` or configure anything inside the container
|
||||
- Keys survive `clawdock-update`, `clawdock-rebuild`, and `clawdock-clean` because they live on the host
|
||||
|
||||
|
||||
@@ -363,8 +363,7 @@ export function copyBundledPluginMetadata(params = {}) {
|
||||
manifest,
|
||||
generatedChannelConfigsByPlugin.get(manifest.id),
|
||||
);
|
||||
// Generated skill assets live under a dedicated dist-owned directory. Runtime
|
||||
// dependency staging owns dist plugin node_modules; do not remove it here.
|
||||
// Generated skill assets live under a dedicated dist-owned directory.
|
||||
removePathIfExists(path.join(distPluginDir, GENERATED_BUNDLED_SKILLS_DIR));
|
||||
const copiedSkills = copyDeclaredPluginSkillPaths({
|
||||
manifest: manifestWithGeneratedChannelConfigs,
|
||||
|
||||
@@ -20,7 +20,6 @@ COPY packages ./packages
|
||||
COPY extensions ./extensions
|
||||
COPY patches ./patches
|
||||
COPY scripts/postinstall-bundled-plugins.mjs scripts/preinstall-package-manager-warning.mjs scripts/npm-runner.mjs scripts/windows-cmd-helpers.mjs ./scripts/
|
||||
COPY scripts/lib/bundled-runtime-deps-install.mjs ./scripts/lib/bundled-runtime-deps-install.mjs
|
||||
COPY scripts/lib/package-dist-imports.mjs ./scripts/lib/package-dist-imports.mjs
|
||||
RUN --mount=type=cache,id=openclaw-pnpm-store,target=/root/.local/share/pnpm/store,sharing=locked \
|
||||
corepack enable \
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# Runs bundled plugin runtime-dependency Docker scenarios from a mounted OpenClaw
|
||||
# npm tarball. The default image is a clean runner; each scenario installs the
|
||||
# tarball so package install behavior is what gets tested.
|
||||
set -euo pipefail
|
||||
|
||||
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
source "$ROOT_DIR/scripts/lib/docker-e2e-image.sh"
|
||||
source "$ROOT_DIR/scripts/lib/docker-e2e-package.sh"
|
||||
source "$ROOT_DIR/scripts/e2e/lib/bundled-channel-runtime-deps-runner.sh"
|
||||
source "$ROOT_DIR/scripts/e2e/lib/bundled-channel/channel.sh"
|
||||
source "$ROOT_DIR/scripts/e2e/lib/bundled-channel/root-owned.sh"
|
||||
source "$ROOT_DIR/scripts/e2e/lib/bundled-channel/setup-entry.sh"
|
||||
source "$ROOT_DIR/scripts/e2e/lib/bundled-channel/disabled-config.sh"
|
||||
source "$ROOT_DIR/scripts/e2e/lib/bundled-channel/update.sh"
|
||||
source "$ROOT_DIR/scripts/e2e/lib/bundled-channel/load-failure.sh"
|
||||
|
||||
IMAGE_NAME="$(docker_e2e_resolve_image "openclaw-bundled-channel-deps-e2e" OPENCLAW_BUNDLED_CHANNEL_DEPS_E2E_IMAGE)"
|
||||
UPDATE_BASELINE_VERSION="${OPENCLAW_BUNDLED_CHANNEL_UPDATE_BASELINE_VERSION:-2026.4.20}"
|
||||
DOCKER_TARGET="${OPENCLAW_BUNDLED_CHANNEL_DOCKER_TARGET:-bare}"
|
||||
HOST_BUILD="${OPENCLAW_BUNDLED_CHANNEL_HOST_BUILD:-1}"
|
||||
PACKAGE_TGZ="${OPENCLAW_CURRENT_PACKAGE_TGZ:-}"
|
||||
RUN_CHANNEL_SCENARIOS="${OPENCLAW_BUNDLED_CHANNEL_SCENARIOS:-1}"
|
||||
RUN_UPDATE_SCENARIO="${OPENCLAW_BUNDLED_CHANNEL_UPDATE_SCENARIO:-1}"
|
||||
RUN_ROOT_OWNED_SCENARIO="${OPENCLAW_BUNDLED_CHANNEL_ROOT_OWNED_SCENARIO:-1}"
|
||||
RUN_SETUP_ENTRY_SCENARIO="${OPENCLAW_BUNDLED_CHANNEL_SETUP_ENTRY_SCENARIO:-1}"
|
||||
RUN_LOAD_FAILURE_SCENARIO="${OPENCLAW_BUNDLED_CHANNEL_LOAD_FAILURE_SCENARIO:-1}"
|
||||
RUN_DISABLED_CONFIG_SCENARIO="${OPENCLAW_BUNDLED_CHANNEL_DISABLED_CONFIG_SCENARIO:-1}"
|
||||
CHANNEL_ONLY="${OPENCLAW_BUNDLED_CHANNEL_ONLY:-}"
|
||||
DOCKER_RUN_TIMEOUT="${OPENCLAW_BUNDLED_CHANNEL_DOCKER_RUN_TIMEOUT:-900s}"
|
||||
DOCKER_UPDATE_RUN_TIMEOUT="${OPENCLAW_BUNDLED_CHANNEL_UPDATE_DOCKER_RUN_TIMEOUT:-${OPENCLAW_BUNDLED_CHANNEL_DOCKER_RUN_TIMEOUT:-2400s}}"
|
||||
|
||||
docker_e2e_build_or_reuse "$IMAGE_NAME" bundled-channel-deps "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR" "$DOCKER_TARGET"
|
||||
|
||||
prepare_package_tgz() {
|
||||
if [ -n "$PACKAGE_TGZ" ]; then
|
||||
PACKAGE_TGZ="$(docker_e2e_prepare_package_tgz bundled-channel-deps "$PACKAGE_TGZ")"
|
||||
return 0
|
||||
fi
|
||||
if [ "$HOST_BUILD" = "0" ] && [ -z "${OPENCLAW_CURRENT_PACKAGE_TGZ:-}" ]; then
|
||||
echo "OPENCLAW_BUNDLED_CHANNEL_HOST_BUILD=0 requires OPENCLAW_CURRENT_PACKAGE_TGZ" >&2
|
||||
exit 1
|
||||
fi
|
||||
PACKAGE_TGZ="$(docker_e2e_prepare_package_tgz bundled-channel-deps)"
|
||||
}
|
||||
|
||||
prepare_package_tgz
|
||||
docker_e2e_package_mount_args "$PACKAGE_TGZ"
|
||||
docker_e2e_harness_mount_args
|
||||
|
||||
run_bundled_channel_runtime_dep_scenarios
|
||||
@@ -1,83 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Scenario selection for bundled plugin runtime-dependency Docker tests.
|
||||
# The large scenario bodies stay in the owning test script; this helper keeps
|
||||
# env flag parsing and dispatch in one small, reviewable place.
|
||||
|
||||
bundled_channel_state_script_b64() {
|
||||
docker_e2e_test_state_shell_b64 "$1" empty
|
||||
}
|
||||
|
||||
run_bundled_channel_container() {
|
||||
local label="$1"
|
||||
local timeout_value="$2"
|
||||
shift 2
|
||||
run_logged_print "$label" timeout "$timeout_value" docker run --rm \
|
||||
"${DOCKER_E2E_HARNESS_ARGS[@]}" \
|
||||
"$@"
|
||||
}
|
||||
|
||||
run_bundled_channel_container_with_state() {
|
||||
local label="$1"
|
||||
local timeout_value="$2"
|
||||
local state_label="$3"
|
||||
shift 3
|
||||
local state_script_b64
|
||||
state_script_b64="$(bundled_channel_state_script_b64 "$state_label")"
|
||||
run_bundled_channel_container "$label" "$timeout_value" \
|
||||
-e COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \
|
||||
-e "OPENCLAW_TEST_STATE_SCRIPT_B64=$state_script_b64" \
|
||||
"$@"
|
||||
}
|
||||
|
||||
run_bundled_channel_container_with_state_heartbeat() {
|
||||
local label="$1"
|
||||
local heartbeat="$2"
|
||||
local timeout_value="$3"
|
||||
local state_label="$4"
|
||||
shift 4
|
||||
local state_script_b64
|
||||
state_script_b64="$(bundled_channel_state_script_b64 "$state_label")"
|
||||
run_logged_print_heartbeat "$label" "$heartbeat" timeout "$timeout_value" docker run --rm \
|
||||
"${DOCKER_E2E_HARNESS_ARGS[@]}" \
|
||||
-e COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \
|
||||
-e "OPENCLAW_TEST_STATE_SCRIPT_B64=$state_script_b64" \
|
||||
"$@"
|
||||
}
|
||||
|
||||
run_bundled_channel_runtime_dep_scenarios() {
|
||||
if [ "$RUN_CHANNEL_SCENARIOS" != "0" ]; then
|
||||
IFS=',' read -r -a CHANNEL_SCENARIOS <<<"${OPENCLAW_BUNDLED_CHANNELS:-${CHANNEL_ONLY:-telegram,discord,slack,feishu,memory-lancedb}}"
|
||||
for channel_scenario in "${CHANNEL_SCENARIOS[@]}"; do
|
||||
channel_scenario="${channel_scenario//[[:space:]]/}"
|
||||
[ -n "$channel_scenario" ] || continue
|
||||
case "$channel_scenario" in
|
||||
telegram) run_channel_scenario telegram grammy ;;
|
||||
discord) run_channel_scenario discord discord-api-types ;;
|
||||
slack) run_channel_scenario slack @slack/web-api ;;
|
||||
feishu) run_channel_scenario feishu @larksuiteoapi/node-sdk ;;
|
||||
memory-lancedb) run_channel_scenario memory-lancedb @lancedb/lancedb ;;
|
||||
*)
|
||||
echo "Unsupported OPENCLAW_BUNDLED_CHANNELS entry: $channel_scenario" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
fi
|
||||
|
||||
if [ "$RUN_UPDATE_SCENARIO" != "0" ]; then
|
||||
run_update_scenario
|
||||
fi
|
||||
if [ "$RUN_ROOT_OWNED_SCENARIO" != "0" ]; then
|
||||
run_root_owned_global_scenario
|
||||
fi
|
||||
if [ "$RUN_SETUP_ENTRY_SCENARIO" != "0" ]; then
|
||||
run_setup_entry_scenario
|
||||
fi
|
||||
if [ "$RUN_DISABLED_CONFIG_SCENARIO" != "0" ]; then
|
||||
run_disabled_config_scenario
|
||||
fi
|
||||
if [ "$RUN_LOAD_FAILURE_SCENARIO" != "0" ]; then
|
||||
run_load_failure_scenario
|
||||
fi
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
|
||||
const raw = JSON.parse(fs.readFileSync(process.argv[2], "utf8"));
|
||||
const payload = raw.result ?? raw.data ?? raw;
|
||||
const channel = process.argv[3];
|
||||
const dump = () => JSON.stringify(raw, null, 2).slice(0, 4000);
|
||||
|
||||
const hasChannelMeta = Array.isArray(payload.channelMeta)
|
||||
? payload.channelMeta.some((entry) => entry?.id === channel)
|
||||
: Boolean(payload.channelMeta?.[channel]);
|
||||
if (!hasChannelMeta) {
|
||||
throw new Error(`missing channelMeta.${channel}\n${dump()}`);
|
||||
}
|
||||
if (!payload.channels || !payload.channels[channel]) {
|
||||
throw new Error(`missing channels.${channel}\n${dump()}`);
|
||||
}
|
||||
const accounts = payload.channelAccounts?.[channel];
|
||||
if (!Array.isArray(accounts) || accounts.length === 0) {
|
||||
throw new Error(`missing channelAccounts.${channel}\n${dump()}`);
|
||||
}
|
||||
|
||||
console.log(`${channel} channel plugin visible`);
|
||||
@@ -1,44 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
const stageDir = process.argv[2];
|
||||
const depName = process.argv[3];
|
||||
const manifestName = ".openclaw-runtime-deps.json";
|
||||
const matches = [];
|
||||
|
||||
function visit(dir) {
|
||||
let entries;
|
||||
try {
|
||||
entries = fs.readdirSync(dir, { withFileTypes: true });
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
visit(fullPath);
|
||||
continue;
|
||||
}
|
||||
if (entry.name !== manifestName) {
|
||||
continue;
|
||||
}
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(fs.readFileSync(fullPath, "utf8"));
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
const specs = Array.isArray(parsed.specs) ? parsed.specs : [];
|
||||
for (const spec of specs) {
|
||||
if (typeof spec === "string" && spec.startsWith(`${depName}@`)) {
|
||||
matches.push(`${fullPath}: ${spec}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
visit(stageDir);
|
||||
if (matches.length > 0) {
|
||||
process.stderr.write(`${matches.join("\n")}\n`);
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
|
||||
const payload = JSON.parse(fs.readFileSync(process.argv[2], "utf8"));
|
||||
const expectedBefore = process.argv[3];
|
||||
const expectedAfter = process.argv[4];
|
||||
if (payload.status !== "ok") {
|
||||
throw new Error(`expected update status ok, got ${JSON.stringify(payload.status)}`);
|
||||
}
|
||||
if (expectedBefore && (payload.before?.version ?? null) !== expectedBefore) {
|
||||
throw new Error(
|
||||
`expected before.version ${expectedBefore}, got ${JSON.stringify(payload.before?.version)}`,
|
||||
);
|
||||
}
|
||||
if ((payload.after?.version ?? null) !== expectedAfter) {
|
||||
throw new Error(
|
||||
`expected after.version ${expectedAfter}, got ${JSON.stringify(payload.after?.version)}`,
|
||||
);
|
||||
}
|
||||
const steps = Array.isArray(payload.steps) ? payload.steps : [];
|
||||
const doctor = steps.find((step) => step?.name === "openclaw doctor");
|
||||
if (!doctor) {
|
||||
throw new Error("missing openclaw doctor step");
|
||||
}
|
||||
if (Number(doctor.exitCode ?? 1) !== 0) {
|
||||
throw new Error(`openclaw doctor step failed: ${JSON.stringify(doctor)}`);
|
||||
}
|
||||
@@ -1,224 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Runs one bundled plugin channel runtime-dependency scenario.
|
||||
# Sourced by scripts/e2e/bundled-channel-runtime-deps-docker.sh.
|
||||
|
||||
run_channel_scenario() {
|
||||
local channel="$1"
|
||||
local dep_sentinel="$2"
|
||||
|
||||
echo "Running bundled $channel runtime deps Docker E2E..."
|
||||
run_bundled_channel_container_with_state \
|
||||
"bundled-channel-deps-$channel" \
|
||||
"$DOCKER_RUN_TIMEOUT" \
|
||||
"bundled-channel-deps-$channel" \
|
||||
-e OPENCLAW_CHANNEL_UNDER_TEST="$channel" \
|
||||
-e OPENCLAW_DEP_SENTINEL="$dep_sentinel" \
|
||||
"${DOCKER_E2E_PACKAGE_ARGS[@]}" \
|
||||
-i "$IMAGE_NAME" bash -s <<'EOF'
|
||||
set -euo pipefail
|
||||
|
||||
source scripts/lib/openclaw-e2e-instance.sh
|
||||
source scripts/e2e/lib/bundled-channel/common.sh
|
||||
openclaw_e2e_eval_test_state_from_b64 "${OPENCLAW_TEST_STATE_SCRIPT_B64:?missing OPENCLAW_TEST_STATE_SCRIPT_B64}"
|
||||
export NPM_CONFIG_PREFIX="$HOME/.npm-global"
|
||||
export PATH="$NPM_CONFIG_PREFIX/bin:$PATH"
|
||||
export OPENAI_API_KEY="sk-openclaw-bundled-channel-deps-e2e"
|
||||
export OPENCLAW_NO_ONBOARD=1
|
||||
|
||||
TOKEN="bundled-channel-deps-token"
|
||||
PORT="18789"
|
||||
CHANNEL="${OPENCLAW_CHANNEL_UNDER_TEST:?missing OPENCLAW_CHANNEL_UNDER_TEST}"
|
||||
DEP_SENTINEL="${OPENCLAW_DEP_SENTINEL:?missing OPENCLAW_DEP_SENTINEL}"
|
||||
gateway_pid=""
|
||||
|
||||
terminate_gateways() {
|
||||
openclaw_e2e_terminate_gateways "${gateway_pid:-}"
|
||||
}
|
||||
|
||||
cleanup() {
|
||||
terminate_gateways
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
bundled_channel_install_package /tmp/openclaw-install.log
|
||||
|
||||
command -v openclaw >/dev/null
|
||||
package_root="$(openclaw_e2e_package_root)"
|
||||
openclaw_e2e_assert_package_extensions "$package_root" telegram discord slack feishu memory-lancedb
|
||||
|
||||
if [ -d "$package_root/dist/extensions/$CHANNEL/node_modules" ]; then
|
||||
echo "$CHANNEL runtime deps should not be preinstalled in package" >&2
|
||||
find "$package_root/dist/extensions/$CHANNEL/node_modules" -maxdepth 2 -type f | head -20 >&2 || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
start_gateway() {
|
||||
local log_file="$1"
|
||||
local skip_sidecars="${2:-0}"
|
||||
: >"$log_file"
|
||||
if [ "$skip_sidecars" = "1" ]; then
|
||||
OPENCLAW_SKIP_CHANNELS=1 OPENCLAW_SKIP_PROVIDERS=1 \
|
||||
openclaw gateway --port "$PORT" --bind loopback --allow-unconfigured >"$log_file" 2>&1 &
|
||||
else
|
||||
openclaw gateway --port "$PORT" --bind loopback --allow-unconfigured >"$log_file" 2>&1 &
|
||||
fi
|
||||
gateway_pid="$!"
|
||||
|
||||
# Cold bundled dependency staging can exceed 60s under 10-way Docker aggregate load.
|
||||
for _ in $(seq 1 1200); do
|
||||
if grep -Eq "listening on ws://|\\[gateway\\] http server listening|\\[gateway\\] ready( \\(|$)" "$log_file"; then
|
||||
return 0
|
||||
fi
|
||||
if ! kill -0 "$gateway_pid" 2>/dev/null; then
|
||||
echo "gateway exited unexpectedly" >&2
|
||||
cat "$log_file" >&2
|
||||
exit 1
|
||||
fi
|
||||
sleep 0.25
|
||||
done
|
||||
|
||||
echo "timed out waiting for gateway" >&2
|
||||
cat "$log_file" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
stop_gateway() {
|
||||
terminate_gateways
|
||||
gateway_pid=""
|
||||
}
|
||||
|
||||
wait_for_gateway_health() {
|
||||
local log_file="${1:-}"
|
||||
if [ -n "${gateway_pid:-}" ] && kill -0 "$gateway_pid" 2>/dev/null; then
|
||||
return 0
|
||||
fi
|
||||
echo "gateway process exited after ready marker" >&2
|
||||
if [ -n "$log_file" ]; then
|
||||
cat "$log_file" >&2
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
parse_channel_status_json() {
|
||||
local out="$1"
|
||||
local channel="$2"
|
||||
node scripts/e2e/lib/bundled-channel/assert-channel-status.mjs "$out" "$channel"
|
||||
}
|
||||
|
||||
assert_channel_status() {
|
||||
local channel="$1"
|
||||
if [ "$channel" = "memory-lancedb" ]; then
|
||||
echo "memory-lancedb plugin activation verified by dependency sentinel"
|
||||
return 0
|
||||
fi
|
||||
local out="/tmp/openclaw-channel-status-$channel.json"
|
||||
local err="/tmp/openclaw-channel-status-$channel.err"
|
||||
local parse_err="/tmp/openclaw-channel-status-$channel.parse.err"
|
||||
local parse_out="/tmp/openclaw-channel-status-$channel.parse.out"
|
||||
for _ in $(seq 1 30); do
|
||||
if openclaw gateway call channels.status \
|
||||
--url "ws://127.0.0.1:$PORT" \
|
||||
--token "$TOKEN" \
|
||||
--timeout 10000 \
|
||||
--json \
|
||||
--params '{"probe":false}' >"$out" 2>"$err"; then
|
||||
if parse_channel_status_json "$out" "$channel" >"$parse_out" 2>"$parse_err"; then
|
||||
cat "$parse_out"
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
if grep -Eq "\\[gateway\\] ready \\(.*\\b$channel\\b" /tmp/openclaw-"$channel"-*.log 2>/dev/null; then
|
||||
echo "$channel channel plugin visible in gateway ready log"
|
||||
return 0
|
||||
fi
|
||||
sleep 2
|
||||
done
|
||||
if [ ! -s "$out" ]; then
|
||||
cat "$err" >&2 || true
|
||||
else
|
||||
cat "$parse_err" >&2 || true
|
||||
cat "$out" >&2 || true
|
||||
fi
|
||||
cat /tmp/openclaw-"$channel"-*.log >&2 2>/dev/null || true
|
||||
return 1
|
||||
}
|
||||
|
||||
assert_installed_once() {
|
||||
local log_file="$1"
|
||||
local channel="$2"
|
||||
local dep_path="$3"
|
||||
local count
|
||||
count="$(grep -Ec "\\[plugins\\] $channel installed bundled runtime deps( in [0-9]+ms)?:" "$log_file" || true)"
|
||||
if [ "$count" -eq 1 ]; then
|
||||
return 0
|
||||
fi
|
||||
if [ "$count" -eq 0 ] && [ -n "$(bundled_channel_find_external_dep_package "$dep_path")" ]; then
|
||||
return 0
|
||||
fi
|
||||
echo "expected one runtime deps install log or staged dependency sentinel for $channel, got $count log lines" >&2
|
||||
cat "$log_file" >&2
|
||||
find "$(bundled_channel_stage_root)" -maxdepth 12 -type f | sort | head -120 >&2 || true
|
||||
exit 1
|
||||
}
|
||||
|
||||
assert_not_installed() {
|
||||
local log_file="$1"
|
||||
local channel="$2"
|
||||
if grep -Eq "\\[plugins\\] $channel installed bundled runtime deps( in [0-9]+ms)?:" "$log_file"; then
|
||||
echo "expected no runtime deps reinstall for $channel" >&2
|
||||
cat "$log_file" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
assert_dep_sentinel() {
|
||||
local channel="$1"
|
||||
local dep_path="$2"
|
||||
bundled_channel_assert_dep_available "$channel" "$dep_path" "$package_root"
|
||||
}
|
||||
|
||||
assert_no_dep_sentinel() {
|
||||
local channel="$1"
|
||||
local dep_path="$2"
|
||||
bundled_channel_assert_no_dep_available "$channel" "$dep_path" "$package_root"
|
||||
}
|
||||
|
||||
assert_no_install_stage() {
|
||||
local channel="$1"
|
||||
local stage="$package_root/dist/extensions/$channel/.openclaw-install-stage"
|
||||
if [ -e "$stage" ]; then
|
||||
echo "install stage should be cleaned after activation for $channel" >&2
|
||||
find "$stage" -maxdepth 4 -type f | sort | head -80 >&2 || true
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
echo "Starting baseline gateway with OpenAI configured..."
|
||||
bundled_channel_write_config baseline
|
||||
start_gateway "/tmp/openclaw-$CHANNEL-baseline.log" 1
|
||||
wait_for_gateway_health "/tmp/openclaw-$CHANNEL-baseline.log"
|
||||
stop_gateway
|
||||
assert_no_dep_sentinel "$CHANNEL" "$DEP_SENTINEL"
|
||||
|
||||
echo "Enabling $CHANNEL by config edit, then restarting gateway..."
|
||||
bundled_channel_write_config "$CHANNEL"
|
||||
start_gateway "/tmp/openclaw-$CHANNEL-first.log"
|
||||
wait_for_gateway_health "/tmp/openclaw-$CHANNEL-first.log"
|
||||
assert_installed_once "/tmp/openclaw-$CHANNEL-first.log" "$CHANNEL" "$DEP_SENTINEL"
|
||||
assert_dep_sentinel "$CHANNEL" "$DEP_SENTINEL"
|
||||
assert_no_install_stage "$CHANNEL"
|
||||
assert_channel_status "$CHANNEL"
|
||||
stop_gateway
|
||||
|
||||
echo "Restarting gateway again; $CHANNEL deps must stay installed..."
|
||||
start_gateway "/tmp/openclaw-$CHANNEL-second.log"
|
||||
wait_for_gateway_health "/tmp/openclaw-$CHANNEL-second.log"
|
||||
assert_not_installed "/tmp/openclaw-$CHANNEL-second.log" "$CHANNEL"
|
||||
assert_no_install_stage "$CHANNEL"
|
||||
assert_channel_status "$CHANNEL"
|
||||
stop_gateway
|
||||
|
||||
echo "bundled $CHANNEL runtime deps Docker E2E passed"
|
||||
EOF
|
||||
}
|
||||
@@ -1,137 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Container-side helpers shared by bundled channel Docker E2E scenarios.
|
||||
# These functions assume the OpenClaw package is installed globally inside the
|
||||
# test container and the scenario has exported HOME/OPENAI_API_KEY as needed.
|
||||
|
||||
bundled_channel_package_root() {
|
||||
printf "%s/openclaw" "$(npm root -g)"
|
||||
}
|
||||
|
||||
bundled_channel_stage_root() {
|
||||
printf "%s/.openclaw/plugin-runtime-deps" "$HOME"
|
||||
}
|
||||
|
||||
bundled_channel_stage_dir() {
|
||||
printf "%s" "${OPENCLAW_PLUGIN_STAGE_DIR:-$(bundled_channel_stage_root)}"
|
||||
}
|
||||
|
||||
bundled_channel_install_package() {
|
||||
openclaw_e2e_install_package "$@"
|
||||
}
|
||||
|
||||
bundled_channel_find_external_dep_package() {
|
||||
local dep_path="$1"
|
||||
find "$(bundled_channel_stage_root)" -maxdepth 12 -path "*/node_modules/$dep_path/package.json" -type f -print -quit 2>/dev/null || true
|
||||
}
|
||||
|
||||
bundled_channel_find_staged_dep_package() {
|
||||
local dep_path="$1"
|
||||
find "$(bundled_channel_stage_dir)" -maxdepth 12 -path "*/node_modules/$dep_path/package.json" -type f -print -quit 2>/dev/null || true
|
||||
}
|
||||
|
||||
bundled_channel_dump_stage_dir() {
|
||||
find "$(bundled_channel_stage_dir)" -maxdepth 12 -type f | sort | head -160 >&2 || true
|
||||
}
|
||||
|
||||
bundled_channel_assert_no_package_dep_available() {
|
||||
local channel="$1"
|
||||
local dep_path="$2"
|
||||
local root="${3:-$(bundled_channel_package_root)}"
|
||||
for candidate in \
|
||||
"$root/dist/extensions/$channel/node_modules/$dep_path/package.json" \
|
||||
"$root/dist/extensions/node_modules/$dep_path/package.json" \
|
||||
"$root/node_modules/$dep_path/package.json"; do
|
||||
if [ -f "$candidate" ]; then
|
||||
echo "packaged install should not mutate package tree for $channel: $candidate" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
if [ -f "$HOME/node_modules/$dep_path/package.json" ]; then
|
||||
echo "bundled runtime deps should not use HOME npm project for $channel: $HOME/node_modules/$dep_path/package.json" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
bundled_channel_assert_dep_available() {
|
||||
local channel="$1"
|
||||
local dep_path="$2"
|
||||
local root="${3:-$(bundled_channel_package_root)}"
|
||||
if [ -n "$(bundled_channel_find_external_dep_package "$dep_path")" ]; then
|
||||
bundled_channel_assert_no_package_dep_available "$channel" "$dep_path" "$root"
|
||||
return 0
|
||||
fi
|
||||
echo "missing dependency sentinel for $channel: $dep_path" >&2
|
||||
find "$root/dist/extensions/$channel" -maxdepth 3 -type f | sort | head -80 >&2 || true
|
||||
find "$root/node_modules" -maxdepth 3 -path "*/$dep_path/package.json" -type f -print >&2 || true
|
||||
find "$(bundled_channel_stage_root)" -maxdepth 12 -type f | sort | head -120 >&2 || true
|
||||
exit 1
|
||||
}
|
||||
|
||||
bundled_channel_assert_no_dep_available() {
|
||||
local channel="$1"
|
||||
local dep_path="$2"
|
||||
local root="${3:-$(bundled_channel_package_root)}"
|
||||
bundled_channel_assert_no_package_dep_available "$channel" "$dep_path" "$root"
|
||||
if [ -n "$(bundled_channel_find_external_dep_package "$dep_path")" ]; then
|
||||
echo "dependency sentinel should be absent before repair for $channel: $dep_path" >&2
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
bundled_channel_assert_no_staged_dep() {
|
||||
local channel="$1"
|
||||
local dep_path="$2"
|
||||
local message="${3:-$channel unexpectedly staged $dep_path}"
|
||||
if [ -n "$(bundled_channel_find_staged_dep_package "$dep_path")" ]; then
|
||||
echo "$message" >&2
|
||||
bundled_channel_dump_stage_dir
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
bundled_channel_assert_staged_dep() {
|
||||
local channel="$1"
|
||||
local dep_path="$2"
|
||||
local log_file="${3:-}"
|
||||
if [ -n "$(bundled_channel_find_staged_dep_package "$dep_path")" ]; then
|
||||
return 0
|
||||
fi
|
||||
echo "missing external staged dependency sentinel for $channel: $dep_path" >&2
|
||||
if [ -n "$log_file" ]; then
|
||||
cat "$log_file" >&2 || true
|
||||
fi
|
||||
bundled_channel_dump_stage_dir
|
||||
exit 1
|
||||
}
|
||||
|
||||
bundled_channel_assert_no_staged_manifest_spec() {
|
||||
local channel="$1"
|
||||
local dep_path="$2"
|
||||
local log_file="${3:-}"
|
||||
if ! node scripts/e2e/lib/bundled-channel/assert-no-staged-manifest-spec.mjs "$(bundled_channel_stage_dir)" "$dep_path"; then
|
||||
echo "$channel unexpectedly selected $dep_path for external runtime deps" >&2
|
||||
if [ -n "$log_file" ]; then
|
||||
cat "$log_file" >&2 || true
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
bundled_channel_remove_runtime_dep() {
|
||||
local channel="$1"
|
||||
local dep_path="$2"
|
||||
local root="${3:-$(bundled_channel_package_root)}"
|
||||
rm -rf "$root/dist/extensions/$channel/node_modules"
|
||||
rm -rf "$root/dist/extensions/node_modules/$dep_path"
|
||||
rm -rf "$root/node_modules/$dep_path"
|
||||
rm -rf "$(bundled_channel_stage_root)"
|
||||
}
|
||||
|
||||
bundled_channel_write_config() {
|
||||
local mode="$1"
|
||||
node scripts/e2e/lib/bundled-channel/write-config.mjs \
|
||||
"$mode" \
|
||||
"${TOKEN:-bundled-channel-config-token}" \
|
||||
"${PORT:-18789}"
|
||||
}
|
||||
@@ -1,63 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Runs disabled-config runtime-dependency isolation scenarios.
|
||||
# Sourced by scripts/e2e/bundled-channel-runtime-deps-docker.sh.
|
||||
|
||||
run_disabled_config_scenario() {
|
||||
echo "Running bundled channel disabled-config runtime deps Docker E2E..."
|
||||
run_bundled_channel_container_with_state \
|
||||
bundled-channel-disabled-config \
|
||||
"$DOCKER_RUN_TIMEOUT" \
|
||||
bundled-channel-disabled-config \
|
||||
"${DOCKER_E2E_PACKAGE_ARGS[@]}" \
|
||||
-i "$IMAGE_NAME" bash -s <<'EOF'
|
||||
set -euo pipefail
|
||||
|
||||
source scripts/lib/openclaw-e2e-instance.sh
|
||||
source scripts/e2e/lib/bundled-channel/common.sh
|
||||
openclaw_e2e_eval_test_state_from_b64 "${OPENCLAW_TEST_STATE_SCRIPT_B64:?missing OPENCLAW_TEST_STATE_SCRIPT_B64}"
|
||||
export NPM_CONFIG_PREFIX="$HOME/.npm-global"
|
||||
export PATH="$NPM_CONFIG_PREFIX/bin:$PATH"
|
||||
export OPENCLAW_NO_ONBOARD=1
|
||||
export OPENCLAW_PLUGIN_STAGE_DIR="$HOME/.openclaw/plugin-runtime-deps"
|
||||
mkdir -p "$OPENCLAW_PLUGIN_STAGE_DIR"
|
||||
|
||||
assert_dep_absent_everywhere() {
|
||||
local channel="$1"
|
||||
local dep_path="$2"
|
||||
local root="$3"
|
||||
bundled_channel_assert_no_package_dep_available "$channel" "$dep_path" "$root"
|
||||
bundled_channel_assert_no_staged_manifest_spec "$channel" "$dep_path" /tmp/openclaw-disabled-config-doctor.log
|
||||
}
|
||||
|
||||
bundled_channel_install_package /tmp/openclaw-disabled-config-install.log
|
||||
|
||||
root="$(bundled_channel_package_root)"
|
||||
test -d "$root/dist/extensions/telegram"
|
||||
test -d "$root/dist/extensions/discord"
|
||||
test -d "$root/dist/extensions/slack"
|
||||
rm -rf "$root/dist/extensions/telegram/node_modules"
|
||||
rm -rf "$root/dist/extensions/discord/node_modules"
|
||||
rm -rf "$root/dist/extensions/slack/node_modules"
|
||||
|
||||
bundled_channel_write_config disabled-config
|
||||
|
||||
if ! openclaw doctor --non-interactive >/tmp/openclaw-disabled-config-doctor.log 2>&1; then
|
||||
echo "doctor failed for disabled-config runtime deps smoke" >&2
|
||||
cat /tmp/openclaw-disabled-config-doctor.log >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
assert_dep_absent_everywhere telegram grammy "$root"
|
||||
assert_dep_absent_everywhere slack @slack/web-api "$root"
|
||||
assert_dep_absent_everywhere discord discord-api-types "$root"
|
||||
|
||||
if grep -Eq "(grammy|@slack/web-api|discord-api-types)" /tmp/openclaw-disabled-config-doctor.log; then
|
||||
echo "doctor installed runtime deps for an explicitly disabled channel/plugin" >&2
|
||||
cat /tmp/openclaw-disabled-config-doctor.log >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "bundled channel disabled-config runtime deps Docker E2E passed"
|
||||
EOF
|
||||
}
|
||||
@@ -1,85 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
import { readdir } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { pathToFileURL } from "node:url";
|
||||
|
||||
const root = process.argv[2] || process.env.OPENCLAW_PACKAGE_ROOT;
|
||||
if (!root) {
|
||||
throw new Error("missing package root");
|
||||
}
|
||||
|
||||
const distDir = path.join(root, "dist");
|
||||
const onboardChannelFiles = (await readdir(distDir))
|
||||
.filter((entry) => /^onboard-channels-.*\.js$/.test(entry))
|
||||
.toSorted();
|
||||
let setupChannels;
|
||||
for (const entry of onboardChannelFiles) {
|
||||
const module = await import(pathToFileURL(path.join(distDir, entry)));
|
||||
if (typeof module.setupChannels === "function") {
|
||||
setupChannels = module.setupChannels;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!setupChannels) {
|
||||
throw new Error(
|
||||
`could not find packaged setupChannels export in ${JSON.stringify(onboardChannelFiles)}`,
|
||||
);
|
||||
}
|
||||
|
||||
let channelSelectCount = 0;
|
||||
const notes = [];
|
||||
const prompter = {
|
||||
intro: async () => {},
|
||||
outro: async () => {},
|
||||
note: async (body, title) => {
|
||||
notes.push({ title, body });
|
||||
},
|
||||
confirm: async ({ message, initialValue }) => {
|
||||
if (message === "Link WhatsApp now (QR)?") {
|
||||
return false;
|
||||
}
|
||||
return initialValue ?? true;
|
||||
},
|
||||
select: async ({ message, options }) => {
|
||||
if (message === "Select a channel") {
|
||||
channelSelectCount += 1;
|
||||
return channelSelectCount === 1 ? "whatsapp" : "__done__";
|
||||
}
|
||||
if (message === "Install WhatsApp plugin?") {
|
||||
if (!options?.some((option) => option.value === "local")) {
|
||||
throw new Error(`missing bundled local install option: ${JSON.stringify(options)}`);
|
||||
}
|
||||
return "local";
|
||||
}
|
||||
if (message === "WhatsApp phone setup") {
|
||||
return "separate";
|
||||
}
|
||||
if (message === "WhatsApp DM policy") {
|
||||
return "disabled";
|
||||
}
|
||||
throw new Error(`unexpected select prompt: ${message}`);
|
||||
},
|
||||
multiselect: async ({ message }) => {
|
||||
throw new Error(`unexpected multiselect prompt: ${message}`);
|
||||
},
|
||||
text: async ({ message }) => {
|
||||
throw new Error(`unexpected text prompt: ${message}`);
|
||||
},
|
||||
};
|
||||
const runtime = {
|
||||
log: (message) => console.log(message),
|
||||
error: (message) => console.error(message),
|
||||
};
|
||||
|
||||
const result = await setupChannels({ plugins: { enabled: true } }, runtime, prompter, {
|
||||
deferStatusUntilSelection: true,
|
||||
skipConfirm: true,
|
||||
skipStatusNote: true,
|
||||
skipDmPolicyPrompt: true,
|
||||
initialSelection: ["whatsapp"],
|
||||
});
|
||||
|
||||
if (!result.channels?.whatsapp) {
|
||||
throw new Error(`WhatsApp setup did not write channel config: ${JSON.stringify(result)}`);
|
||||
}
|
||||
console.log("packaged guided WhatsApp setup completed");
|
||||
@@ -1,34 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Runs load-failure isolation scenarios.
|
||||
# Sourced by scripts/e2e/bundled-channel-runtime-deps-docker.sh.
|
||||
|
||||
run_load_failure_scenario() {
|
||||
echo "Running bundled channel load-failure isolation Docker E2E..."
|
||||
run_bundled_channel_container_with_state \
|
||||
bundled-channel-load-failure \
|
||||
"$DOCKER_RUN_TIMEOUT" \
|
||||
bundled-channel-load-failure \
|
||||
"${DOCKER_E2E_PACKAGE_ARGS[@]}" \
|
||||
-i "$IMAGE_NAME" bash -s <<'EOF'
|
||||
set -euo pipefail
|
||||
|
||||
source scripts/lib/openclaw-e2e-instance.sh
|
||||
source scripts/e2e/lib/bundled-channel/common.sh
|
||||
openclaw_e2e_eval_test_state_from_b64 "${OPENCLAW_TEST_STATE_SCRIPT_B64:?missing OPENCLAW_TEST_STATE_SCRIPT_B64}"
|
||||
export NPM_CONFIG_PREFIX="$HOME/.npm-global"
|
||||
export PATH="$NPM_CONFIG_PREFIX/bin:$PATH"
|
||||
export OPENCLAW_NO_ONBOARD=1
|
||||
|
||||
bundled_channel_install_package /tmp/openclaw-load-failure-install.log
|
||||
|
||||
root="$(bundled_channel_package_root)"
|
||||
plugin_dir="$root/dist/extensions/load-failure-alpha"
|
||||
node scripts/e2e/lib/bundled-channel/write-load-failure-fixture.mjs "$plugin_dir"
|
||||
|
||||
echo "Loading synthetic failing bundled channel through packaged loader..."
|
||||
node scripts/e2e/lib/bundled-channel/loader-probe.mjs load-failure "$root" load-failure-alpha
|
||||
|
||||
echo "bundled channel load-failure isolation Docker E2E passed"
|
||||
EOF
|
||||
}
|
||||
@@ -1,121 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { pathToFileURL } from "node:url";
|
||||
|
||||
function usage() {
|
||||
console.error("Usage: loader-probe.mjs <setup-entries|load-failure> <package-root> [channel...]");
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
function findBundledLoader(root) {
|
||||
const distDir = path.join(root, "dist");
|
||||
const bundledPath = fs
|
||||
.readdirSync(distDir)
|
||||
.filter((entry) => /^bundled-[A-Za-z0-9_-]+\.js$/.test(entry))
|
||||
.map((entry) => path.join(distDir, entry))
|
||||
.find((entry) => fs.readFileSync(entry, "utf8").includes("src/channels/plugins/bundled.ts"));
|
||||
if (!bundledPath) {
|
||||
throw new Error("missing packaged bundled channel loader artifact");
|
||||
}
|
||||
return bundledPath;
|
||||
}
|
||||
|
||||
function namedExport(module, name) {
|
||||
const fn = Object.values(module).find(
|
||||
(value) => typeof value === "function" && value.name === name,
|
||||
);
|
||||
if (typeof fn !== "function") {
|
||||
throw new Error(
|
||||
`missing packaged bundled loader export ${name}; exports=${Object.keys(module).join(",")}`,
|
||||
);
|
||||
}
|
||||
return fn;
|
||||
}
|
||||
|
||||
async function importBundled(root) {
|
||||
return import(pathToFileURL(findBundledLoader(root)));
|
||||
}
|
||||
|
||||
function loadCounts() {
|
||||
return {
|
||||
plugin: globalThis.__loadFailurePlugin,
|
||||
setup: globalThis.__loadFailureSetup,
|
||||
secrets: globalThis.__loadFailureSecrets,
|
||||
setupSecrets: globalThis.__loadFailureSetupSecrets,
|
||||
};
|
||||
}
|
||||
|
||||
function exerciseLoaders(loaders, id) {
|
||||
for (const [name, fn] of loaders) {
|
||||
try {
|
||||
fn(id);
|
||||
} catch (error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
if (message.includes("synthetic")) {
|
||||
throw new Error(`bundled export ${name} leaked synthetic load failure: ${message}`, {
|
||||
cause: error,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const [command, root, ...args] = process.argv.slice(2);
|
||||
if (!command || !root) {
|
||||
usage();
|
||||
}
|
||||
|
||||
if (command === "load-failure") {
|
||||
process.env.OPENCLAW_BUNDLED_PLUGINS_DIR = path.join(root, "dist/extensions");
|
||||
}
|
||||
|
||||
const bundled = await importBundled(root);
|
||||
|
||||
if (command === "setup-entries") {
|
||||
const channels = args.length > 0 ? args : ["feishu", "whatsapp"];
|
||||
const setupPluginLoader = namedExport(bundled, "getBundledChannelSetupPlugin");
|
||||
for (const channel of channels) {
|
||||
const plugin = setupPluginLoader(channel);
|
||||
if (!plugin) {
|
||||
throw new Error(`${channel} setup plugin did not load pre-config`);
|
||||
}
|
||||
if (plugin.id !== channel) {
|
||||
throw new Error(`${channel} setup plugin id mismatch: ${plugin.id}`);
|
||||
}
|
||||
console.log(`${channel} setup plugin loaded pre-config`);
|
||||
}
|
||||
} else if (command === "load-failure") {
|
||||
const id = args[0] || "load-failure-alpha";
|
||||
const loaderNames = [
|
||||
"getBundledChannelPlugin",
|
||||
"getBundledChannelSetupPlugin",
|
||||
"getBundledChannelSecrets",
|
||||
"getBundledChannelSetupSecrets",
|
||||
];
|
||||
const loaders = loaderNames.map((name) => [name, namedExport(bundled, name)]);
|
||||
|
||||
exerciseLoaders(loaders, id);
|
||||
const firstCounts = loadCounts();
|
||||
exerciseLoaders(loaders, id);
|
||||
const secondCounts = loadCounts();
|
||||
for (const key of ["plugin", "setup", "setupSecrets"]) {
|
||||
const first = firstCounts[key];
|
||||
if (!Number.isInteger(first) || first < 1) {
|
||||
throw new Error(`expected ${key} failure to be exercised at least once, got ${first}`);
|
||||
}
|
||||
if (secondCounts[key] !== first) {
|
||||
throw new Error(
|
||||
`expected ${key} failure to be cached after first pass, got ${first} then ${secondCounts[key]}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
if (firstCounts.secrets !== undefined && secondCounts.secrets !== firstCounts.secrets) {
|
||||
throw new Error(
|
||||
`expected secrets failure to be cached after first pass, got ${firstCounts.secrets} then ${secondCounts.secrets}`,
|
||||
);
|
||||
}
|
||||
console.log("synthetic bundled channel load failures were isolated and cached");
|
||||
} else {
|
||||
usage();
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
import { execFileSync } from "node:child_process";
|
||||
|
||||
const raw = execFileSync("tar", ["-xOf", process.argv[2], "package/package.json"], {
|
||||
encoding: "utf8",
|
||||
});
|
||||
process.stdout.write(String(JSON.parse(raw).version));
|
||||
@@ -1,124 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Runs the root-owned global install runtime-dependency scenario.
|
||||
# Sourced by scripts/e2e/bundled-channel-runtime-deps-docker.sh.
|
||||
|
||||
run_root_owned_global_scenario() {
|
||||
echo "Running bundled channel root-owned global install Docker E2E..."
|
||||
run_bundled_channel_container bundled-channel-root-owned "$DOCKER_RUN_TIMEOUT" \
|
||||
--user root \
|
||||
-e COREPACK_ENABLE_DOWNLOAD_PROMPT=0 \
|
||||
"${DOCKER_E2E_PACKAGE_ARGS[@]}" \
|
||||
-i "$IMAGE_NAME" bash -s <<'EOF'
|
||||
set -euo pipefail
|
||||
|
||||
source scripts/lib/openclaw-e2e-instance.sh
|
||||
source scripts/e2e/lib/bundled-channel/common.sh
|
||||
export HOME="/root"
|
||||
export OPENAI_API_KEY="sk-openclaw-bundled-channel-root-owned-e2e"
|
||||
export OPENCLAW_NO_ONBOARD=1
|
||||
export OPENCLAW_PLUGIN_STAGE_DIR="/var/lib/openclaw/plugin-runtime-deps"
|
||||
|
||||
TOKEN="bundled-channel-root-owned-token"
|
||||
PORT="18791"
|
||||
CHANNEL="slack"
|
||||
DEP_SENTINEL="@slack/web-api"
|
||||
gateway_pid=""
|
||||
|
||||
cleanup() {
|
||||
if [ -n "${gateway_pid:-}" ] && kill -0 "$gateway_pid" 2>/dev/null; then
|
||||
kill "$gateway_pid" 2>/dev/null || true
|
||||
wait "$gateway_pid" 2>/dev/null || true
|
||||
fi
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
bundled_channel_install_package /tmp/openclaw-root-owned-install.log "mounted OpenClaw package into root-owned global npm"
|
||||
|
||||
root="$(bundled_channel_package_root)"
|
||||
test -d "$root/dist/extensions/$CHANNEL"
|
||||
rm -rf "$root/dist/extensions/$CHANNEL/node_modules"
|
||||
chmod -R a-w "$root"
|
||||
mkdir -p "$OPENCLAW_PLUGIN_STAGE_DIR" /home/appuser/.openclaw
|
||||
chown -R appuser:appuser /home/appuser/.openclaw /var/lib/openclaw
|
||||
|
||||
if runuser -u appuser -- test -w "$root"; then
|
||||
echo "expected package root to be unwritable for appuser" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
OPENCLAW_BUNDLED_CHANNEL_CONFIG_PATH=/home/appuser/.openclaw/openclaw.json \
|
||||
OPENCLAW_BUNDLED_CHANNEL_SLACK_BOT_TOKEN=xoxb-bundled-channel-root-owned-token \
|
||||
OPENCLAW_BUNDLED_CHANNEL_SLACK_APP_TOKEN=xapp-bundled-channel-root-owned-token \
|
||||
bundled_channel_write_config slack
|
||||
chown appuser:appuser /home/appuser/.openclaw/openclaw.json
|
||||
|
||||
start_gateway() {
|
||||
local log_file="$1"
|
||||
: >"$log_file"
|
||||
chown appuser:appuser "$log_file"
|
||||
runuser -u appuser -- env \
|
||||
HOME=/home/appuser \
|
||||
OPENAI_API_KEY="$OPENAI_API_KEY" \
|
||||
OPENCLAW_NO_ONBOARD=1 \
|
||||
OPENCLAW_PLUGIN_STAGE_DIR="$OPENCLAW_PLUGIN_STAGE_DIR" \
|
||||
npm_config_cache=/tmp/openclaw-root-owned-npm-cache \
|
||||
bash -c 'openclaw gateway --port "$1" --bind loopback --allow-unconfigured >"$2" 2>&1' \
|
||||
bash "$PORT" "$log_file" &
|
||||
gateway_pid="$!"
|
||||
|
||||
# Cold bundled dependency staging can exceed 60s under 10-way Docker aggregate load.
|
||||
for _ in $(seq 1 1200); do
|
||||
if grep -Eq "listening on ws://|\\[gateway\\] http server listening|\\[gateway\\] ready( \\(|$)" "$log_file"; then
|
||||
return 0
|
||||
fi
|
||||
if ! kill -0 "$gateway_pid" 2>/dev/null; then
|
||||
echo "gateway exited unexpectedly" >&2
|
||||
cat "$log_file" >&2
|
||||
exit 1
|
||||
fi
|
||||
sleep 0.25
|
||||
done
|
||||
|
||||
echo "timed out waiting for gateway" >&2
|
||||
cat "$log_file" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
wait_for_slack_provider_start() {
|
||||
for _ in $(seq 1 180); do
|
||||
if grep -Eq "\\[slack\\] \\[default\\] starting provider|An API error occurred: invalid_auth|\\[plugins\\] slack installed bundled runtime deps|\\[gateway\\] ready \\(.*\\bslack\\b" /tmp/openclaw-root-owned-gateway.log; then
|
||||
return 0
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
echo "timed out waiting for slack provider startup" >&2
|
||||
cat /tmp/openclaw-root-owned-gateway.log >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
start_gateway /tmp/openclaw-root-owned-gateway.log
|
||||
wait_for_slack_provider_start
|
||||
|
||||
bundled_channel_assert_no_package_dep_available "$CHANNEL" "$DEP_SENTINEL" "$root"
|
||||
bundled_channel_assert_staged_dep "$CHANNEL" "$DEP_SENTINEL" /tmp/openclaw-root-owned-gateway.log
|
||||
if [ -e "$root/dist/extensions/node_modules/openclaw/package.json" ]; then
|
||||
echo "root-owned package tree was mutated with SDK alias" >&2
|
||||
find "$root/dist/extensions/node_modules/openclaw" -maxdepth 4 -type f | sort | head -80 >&2 || true
|
||||
exit 1
|
||||
fi
|
||||
if ! find "$(bundled_channel_stage_dir)" -maxdepth 12 -path "*/dist/extensions/node_modules/openclaw/package.json" -type f | grep -q .; then
|
||||
echo "missing external staged openclaw/plugin-sdk alias" >&2
|
||||
bundled_channel_dump_stage_dir
|
||||
cat /tmp/openclaw-root-owned-gateway.log >&2
|
||||
exit 1
|
||||
fi
|
||||
if grep -Eq "failed to install bundled runtime deps|Cannot find package 'openclaw'|Cannot find module 'openclaw/plugin-sdk'" /tmp/openclaw-root-owned-gateway.log; then
|
||||
echo "root-owned gateway hit bundled runtime dependency errors" >&2
|
||||
cat /tmp/openclaw-root-owned-gateway.log >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "root-owned global install Docker E2E passed"
|
||||
EOF
|
||||
}
|
||||
@@ -1,67 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Runs setup-entry runtime-dependency installation scenarios.
|
||||
# Sourced by scripts/e2e/bundled-channel-runtime-deps-docker.sh.
|
||||
|
||||
run_setup_entry_scenario() {
|
||||
echo "Running bundled channel setup-entry runtime deps Docker E2E..."
|
||||
run_bundled_channel_container_with_state \
|
||||
bundled-channel-setup-entry \
|
||||
"$DOCKER_RUN_TIMEOUT" \
|
||||
bundled-channel-setup-entry \
|
||||
"${DOCKER_E2E_PACKAGE_ARGS[@]}" \
|
||||
-i "$IMAGE_NAME" bash -s <<'EOF'
|
||||
set -euo pipefail
|
||||
|
||||
source scripts/lib/openclaw-e2e-instance.sh
|
||||
source scripts/e2e/lib/bundled-channel/common.sh
|
||||
openclaw_e2e_eval_test_state_from_b64 "${OPENCLAW_TEST_STATE_SCRIPT_B64:?missing OPENCLAW_TEST_STATE_SCRIPT_B64}"
|
||||
export NPM_CONFIG_PREFIX="$HOME/.npm-global"
|
||||
export PATH="$NPM_CONFIG_PREFIX/bin:$PATH"
|
||||
export OPENCLAW_NO_ONBOARD=1
|
||||
export OPENCLAW_PLUGIN_STAGE_DIR="$HOME/.openclaw/plugin-runtime-deps"
|
||||
mkdir -p "$OPENCLAW_PLUGIN_STAGE_DIR"
|
||||
|
||||
declare -A SETUP_ENTRY_DEP_SENTINELS=(
|
||||
[feishu]="@larksuiteoapi/node-sdk"
|
||||
[whatsapp]="@whiskeysockets/baileys"
|
||||
)
|
||||
|
||||
bundled_channel_install_package /tmp/openclaw-setup-entry-install.log
|
||||
|
||||
root="$(bundled_channel_package_root)"
|
||||
for channel in "${!SETUP_ENTRY_DEP_SENTINELS[@]}"; do
|
||||
dep_sentinel="${SETUP_ENTRY_DEP_SENTINELS[$channel]}"
|
||||
test -d "$root/dist/extensions/$channel"
|
||||
bundled_channel_assert_no_package_dep_available "$channel" "$dep_sentinel" "$root"
|
||||
done
|
||||
|
||||
echo "Probing real bundled setup entries before channel configuration..."
|
||||
node scripts/e2e/lib/bundled-channel/loader-probe.mjs setup-entries "$root" feishu whatsapp
|
||||
|
||||
for channel in "${!SETUP_ENTRY_DEP_SENTINELS[@]}"; do
|
||||
dep_sentinel="${SETUP_ENTRY_DEP_SENTINELS[$channel]}"
|
||||
bundled_channel_assert_no_package_dep_available "$channel" "$dep_sentinel" "$root"
|
||||
bundled_channel_assert_no_staged_dep "$channel" "$dep_sentinel" "setup-entry discovery installed $channel external staged deps before channel configuration"
|
||||
done
|
||||
|
||||
echo "Running packaged guided WhatsApp setup; runtime deps should be staged before finalize..."
|
||||
node scripts/e2e/lib/bundled-channel/guided-whatsapp-setup.mjs "$root"
|
||||
|
||||
bundled_channel_assert_no_package_dep_available whatsapp @whiskeysockets/baileys "$root"
|
||||
bundled_channel_assert_staged_dep whatsapp @whiskeysockets/baileys
|
||||
|
||||
echo "Configuring setup-entry channels; doctor should now install bundled runtime deps externally..."
|
||||
bundled_channel_write_config setup-entry-channels
|
||||
|
||||
openclaw doctor --fix --non-interactive >/tmp/openclaw-setup-entry-doctor.log 2>&1
|
||||
|
||||
for channel in "${!SETUP_ENTRY_DEP_SENTINELS[@]}"; do
|
||||
dep_sentinel="${SETUP_ENTRY_DEP_SENTINELS[$channel]}"
|
||||
bundled_channel_assert_no_package_dep_available "$channel" "$dep_sentinel" "$root"
|
||||
bundled_channel_assert_staged_dep "$channel" "$dep_sentinel" /tmp/openclaw-setup-entry-doctor.log
|
||||
done
|
||||
|
||||
echo "bundled channel setup-entry runtime deps Docker E2E passed"
|
||||
EOF
|
||||
}
|
||||
@@ -1,178 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
#
|
||||
# Runs baseline-to-current bundled plugin update scenarios.
|
||||
# Sourced by scripts/e2e/bundled-channel-runtime-deps-docker.sh.
|
||||
|
||||
run_update_scenario() {
|
||||
echo "Running bundled channel runtime deps Docker update E2E..."
|
||||
run_bundled_channel_container_with_state_heartbeat \
|
||||
bundled-channel-update \
|
||||
30 \
|
||||
"$DOCKER_UPDATE_RUN_TIMEOUT" \
|
||||
bundled-channel-update \
|
||||
-e OPENCLAW_BUNDLED_CHANNEL_UPDATE_BASELINE_VERSION="$UPDATE_BASELINE_VERSION" \
|
||||
-e "OPENCLAW_BUNDLED_CHANNEL_UPDATE_TARGETS=${OPENCLAW_BUNDLED_CHANNEL_UPDATE_TARGETS:-telegram,discord,slack,feishu,memory-lancedb,acpx}" \
|
||||
"${DOCKER_E2E_PACKAGE_ARGS[@]}" \
|
||||
-i "$IMAGE_NAME" bash -s <<'EOF'
|
||||
set -euo pipefail
|
||||
|
||||
source scripts/lib/openclaw-e2e-instance.sh
|
||||
source scripts/e2e/lib/bundled-channel/common.sh
|
||||
openclaw_e2e_eval_test_state_from_b64 "${OPENCLAW_TEST_STATE_SCRIPT_B64:?missing OPENCLAW_TEST_STATE_SCRIPT_B64}"
|
||||
export NPM_CONFIG_PREFIX="$HOME/.npm-global"
|
||||
export PATH="$NPM_CONFIG_PREFIX/bin:$PATH"
|
||||
export OPENAI_API_KEY="sk-openclaw-bundled-channel-update-e2e"
|
||||
export OPENCLAW_NO_ONBOARD=1
|
||||
export OPENCLAW_UPDATE_PACKAGE_SPEC=""
|
||||
export OPENCLAW_BUNDLED_CHANNEL_MEMORY_DB_PATH="~/.openclaw/memory/lancedb-update-e2e"
|
||||
|
||||
TOKEN="bundled-channel-update-token"
|
||||
PORT="18790"
|
||||
UPDATE_TARGETS="${OPENCLAW_BUNDLED_CHANNEL_UPDATE_TARGETS:-telegram,discord,slack,feishu,memory-lancedb,acpx}"
|
||||
|
||||
poison_home_npm_project() {
|
||||
printf '{"name":"openclaw-home-prefix-poison","private":true}\n' >"$HOME/package.json"
|
||||
rm -rf "$HOME/node_modules"
|
||||
mkdir -p "$HOME/node_modules"
|
||||
chmod 500 "$HOME/node_modules"
|
||||
}
|
||||
|
||||
assert_no_unknown_stage_roots() {
|
||||
if find "$(bundled_channel_stage_root)" -maxdepth 1 -type d -name 'openclaw-unknown-*' -print -quit 2>/dev/null | grep -q .; then
|
||||
echo "runtime deps created second-generation unknown stage roots" >&2
|
||||
find "$(bundled_channel_stage_root)" -maxdepth 1 -type d -name 'openclaw-*' -print | sort >&2 || true
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
package_tgz="${OPENCLAW_CURRENT_PACKAGE_TGZ:?missing OPENCLAW_CURRENT_PACKAGE_TGZ}"
|
||||
update_target="file:$package_tgz"
|
||||
candidate_version="$(node scripts/e2e/lib/bundled-channel/package-version-from-tgz.mjs "$package_tgz")"
|
||||
|
||||
assert_update_ok() {
|
||||
local json_file="$1"
|
||||
local expected_before="$2"
|
||||
node scripts/e2e/lib/bundled-channel/assert-update-result.mjs "$json_file" "$expected_before" "$candidate_version"
|
||||
}
|
||||
|
||||
run_update_and_capture() {
|
||||
local label="$1"
|
||||
local out_file="$2"
|
||||
set +e
|
||||
openclaw update --tag "$update_target" --yes --json >"$out_file" 2>"/tmp/openclaw-$label-update.stderr"
|
||||
local status=$?
|
||||
set -e
|
||||
if [ "$status" -ne 0 ]; then
|
||||
echo "openclaw update failed for $label with exit code $status" >&2
|
||||
cat "$out_file" >&2 || true
|
||||
cat "/tmp/openclaw-$label-update.stderr" >&2 || true
|
||||
exit "$status"
|
||||
fi
|
||||
}
|
||||
|
||||
should_run_update_target() {
|
||||
local target="$1"
|
||||
case ",$UPDATE_TARGETS," in
|
||||
*",all,"* | *",$target,"*) return 0 ;;
|
||||
*) return 1 ;;
|
||||
esac
|
||||
}
|
||||
|
||||
echo "Update targets: $UPDATE_TARGETS"
|
||||
bundled_channel_install_package /tmp/openclaw-update-baseline-install.log "current candidate as update baseline"
|
||||
command -v openclaw >/dev/null
|
||||
poison_home_npm_project
|
||||
baseline_root="$(bundled_channel_package_root)"
|
||||
test -d "$baseline_root/dist/extensions/telegram"
|
||||
test -d "$baseline_root/dist/extensions/feishu"
|
||||
test -d "$baseline_root/dist/extensions/acpx"
|
||||
|
||||
if should_run_update_target telegram; then
|
||||
echo "Replicating configured Telegram missing-runtime state..."
|
||||
bundled_channel_write_config telegram
|
||||
bundled_channel_assert_no_dep_available telegram grammy
|
||||
set +e
|
||||
openclaw doctor --non-interactive >/tmp/openclaw-baseline-doctor.log 2>&1
|
||||
baseline_doctor_status=$?
|
||||
set -e
|
||||
echo "baseline doctor exited with $baseline_doctor_status"
|
||||
bundled_channel_remove_runtime_dep telegram grammy
|
||||
bundled_channel_assert_no_dep_available telegram grammy
|
||||
|
||||
echo "Updating from baseline to current candidate; candidate doctor must repair Telegram deps..."
|
||||
run_update_and_capture telegram /tmp/openclaw-update-telegram.json
|
||||
cat /tmp/openclaw-update-telegram.json
|
||||
assert_update_ok /tmp/openclaw-update-telegram.json "$candidate_version"
|
||||
bundled_channel_assert_dep_available telegram grammy
|
||||
assert_no_unknown_stage_roots
|
||||
|
||||
echo "Mutating installed package: remove Telegram deps, then update-mode doctor repairs them..."
|
||||
bundled_channel_remove_runtime_dep telegram grammy
|
||||
bundled_channel_assert_no_dep_available telegram grammy
|
||||
if ! OPENCLAW_UPDATE_IN_PROGRESS=1 openclaw doctor --non-interactive >/tmp/openclaw-update-mode-doctor.log 2>&1; then
|
||||
echo "update-mode doctor failed while repairing Telegram deps" >&2
|
||||
cat /tmp/openclaw-update-mode-doctor.log >&2
|
||||
exit 1
|
||||
fi
|
||||
bundled_channel_assert_dep_available telegram grammy
|
||||
assert_no_unknown_stage_roots
|
||||
fi
|
||||
|
||||
if should_run_update_target discord; then
|
||||
echo "Mutating config to Discord and rerunning same-version update path..."
|
||||
bundled_channel_write_config discord
|
||||
bundled_channel_remove_runtime_dep discord discord-api-types
|
||||
bundled_channel_assert_no_dep_available discord discord-api-types
|
||||
run_update_and_capture discord /tmp/openclaw-update-discord.json
|
||||
cat /tmp/openclaw-update-discord.json
|
||||
assert_update_ok /tmp/openclaw-update-discord.json "$candidate_version"
|
||||
bundled_channel_assert_dep_available discord discord-api-types
|
||||
fi
|
||||
|
||||
if should_run_update_target slack; then
|
||||
echo "Mutating config to Slack and rerunning same-version update path..."
|
||||
bundled_channel_write_config slack
|
||||
bundled_channel_remove_runtime_dep slack @slack/web-api
|
||||
bundled_channel_assert_no_dep_available slack @slack/web-api
|
||||
run_update_and_capture slack /tmp/openclaw-update-slack.json
|
||||
cat /tmp/openclaw-update-slack.json
|
||||
assert_update_ok /tmp/openclaw-update-slack.json "$candidate_version"
|
||||
bundled_channel_assert_dep_available slack @slack/web-api
|
||||
fi
|
||||
|
||||
if should_run_update_target feishu; then
|
||||
echo "Mutating config to Feishu and rerunning same-version update path..."
|
||||
bundled_channel_write_config feishu
|
||||
bundled_channel_remove_runtime_dep feishu @larksuiteoapi/node-sdk
|
||||
bundled_channel_assert_no_dep_available feishu @larksuiteoapi/node-sdk
|
||||
run_update_and_capture feishu /tmp/openclaw-update-feishu.json
|
||||
cat /tmp/openclaw-update-feishu.json
|
||||
assert_update_ok /tmp/openclaw-update-feishu.json "$candidate_version"
|
||||
bundled_channel_assert_dep_available feishu @larksuiteoapi/node-sdk
|
||||
fi
|
||||
|
||||
if should_run_update_target memory-lancedb; then
|
||||
echo "Mutating config to memory-lancedb and rerunning same-version update path..."
|
||||
bundled_channel_write_config memory-lancedb
|
||||
bundled_channel_remove_runtime_dep memory-lancedb @lancedb/lancedb
|
||||
bundled_channel_assert_no_dep_available memory-lancedb @lancedb/lancedb
|
||||
run_update_and_capture memory-lancedb /tmp/openclaw-update-memory-lancedb.json
|
||||
cat /tmp/openclaw-update-memory-lancedb.json
|
||||
assert_update_ok /tmp/openclaw-update-memory-lancedb.json "$candidate_version"
|
||||
bundled_channel_assert_dep_available memory-lancedb @lancedb/lancedb
|
||||
fi
|
||||
|
||||
if should_run_update_target acpx; then
|
||||
echo "Removing ACPX runtime package and rerunning same-version update path..."
|
||||
bundled_channel_write_config acpx
|
||||
bundled_channel_remove_runtime_dep acpx acpx
|
||||
bundled_channel_assert_no_dep_available acpx acpx
|
||||
run_update_and_capture acpx /tmp/openclaw-update-acpx.json
|
||||
cat /tmp/openclaw-update-acpx.json
|
||||
assert_update_ok /tmp/openclaw-update-acpx.json "$candidate_version"
|
||||
bundled_channel_assert_dep_available acpx acpx
|
||||
fi
|
||||
|
||||
echo "bundled channel runtime deps Docker update E2E passed"
|
||||
EOF
|
||||
}
|
||||
@@ -1,190 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
const mode = process.argv[2];
|
||||
const token = process.argv[3];
|
||||
const port = Number(process.argv[4]);
|
||||
const configPath =
|
||||
process.env.OPENCLAW_BUNDLED_CHANNEL_CONFIG_PATH ||
|
||||
path.join(process.env.HOME, ".openclaw", "openclaw.json");
|
||||
const config = fs.existsSync(configPath) ? JSON.parse(fs.readFileSync(configPath, "utf8")) : {};
|
||||
|
||||
if (mode === "disabled-config") {
|
||||
const stateDir = path.dirname(configPath);
|
||||
const disabledConfig = {
|
||||
gateway: {
|
||||
mode: "local",
|
||||
auth: {
|
||||
mode: "token",
|
||||
token: "disabled-config-runtime-deps-token",
|
||||
},
|
||||
},
|
||||
plugins: {
|
||||
enabled: true,
|
||||
entries: {
|
||||
discord: { enabled: false },
|
||||
},
|
||||
},
|
||||
channels: {
|
||||
telegram: {
|
||||
enabled: false,
|
||||
botToken: "123456:disabled-config-token",
|
||||
dmPolicy: "disabled",
|
||||
groupPolicy: "disabled",
|
||||
},
|
||||
slack: {
|
||||
enabled: false,
|
||||
botToken: "xoxb-disabled-config-token",
|
||||
appToken: "xapp-disabled-config-token",
|
||||
},
|
||||
discord: {
|
||||
enabled: true,
|
||||
token: "disabled-plugin-entry-token",
|
||||
dmPolicy: "disabled",
|
||||
groupPolicy: "disabled",
|
||||
},
|
||||
},
|
||||
};
|
||||
fs.mkdirSync(path.join(stateDir, "agents", "main", "sessions"), { recursive: true });
|
||||
fs.writeFileSync(configPath, `${JSON.stringify(disabledConfig, null, 2)}\n`, "utf8");
|
||||
fs.chmodSync(stateDir, 0o700);
|
||||
fs.chmodSync(configPath, 0o600);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
config.gateway = {
|
||||
...config.gateway,
|
||||
port,
|
||||
auth: { mode: "token", token },
|
||||
controlUi: { enabled: false },
|
||||
};
|
||||
config.agents = {
|
||||
...config.agents,
|
||||
defaults: {
|
||||
...config.agents?.defaults,
|
||||
model: { primary: "openai/gpt-4.1-mini" },
|
||||
},
|
||||
};
|
||||
config.models = {
|
||||
...config.models,
|
||||
providers: {
|
||||
...config.models?.providers,
|
||||
openai: {
|
||||
...config.models?.providers?.openai,
|
||||
apiKey: process.env.OPENAI_API_KEY,
|
||||
baseUrl: "https://api.openai.com/v1",
|
||||
models: [],
|
||||
},
|
||||
},
|
||||
};
|
||||
config.plugins = {
|
||||
...config.plugins,
|
||||
enabled: true,
|
||||
};
|
||||
config.channels = {
|
||||
...config.channels,
|
||||
telegram: {
|
||||
...config.channels?.telegram,
|
||||
enabled: mode === "telegram",
|
||||
botToken:
|
||||
process.env.OPENCLAW_BUNDLED_CHANNEL_TELEGRAM_TOKEN || "123456:bundled-channel-update-token",
|
||||
dmPolicy: "disabled",
|
||||
groupPolicy: "disabled",
|
||||
},
|
||||
discord: {
|
||||
...config.channels?.discord,
|
||||
enabled: mode === "discord",
|
||||
dmPolicy: "disabled",
|
||||
groupPolicy: "disabled",
|
||||
},
|
||||
slack: {
|
||||
...config.channels?.slack,
|
||||
enabled: mode === "slack",
|
||||
botToken:
|
||||
process.env.OPENCLAW_BUNDLED_CHANNEL_SLACK_BOT_TOKEN || "xoxb-bundled-channel-update-token",
|
||||
appToken:
|
||||
process.env.OPENCLAW_BUNDLED_CHANNEL_SLACK_APP_TOKEN || "xapp-bundled-channel-update-token",
|
||||
},
|
||||
feishu: {
|
||||
...config.channels?.feishu,
|
||||
enabled: mode === "feishu",
|
||||
},
|
||||
};
|
||||
if (mode === "memory-lancedb") {
|
||||
config.plugins = {
|
||||
...config.plugins,
|
||||
enabled: true,
|
||||
allow: [...new Set([...(config.plugins?.allow || []), "memory-lancedb"])],
|
||||
slots: {
|
||||
...config.plugins?.slots,
|
||||
memory: "memory-lancedb",
|
||||
},
|
||||
entries: {
|
||||
...config.plugins?.entries,
|
||||
"memory-lancedb": {
|
||||
...config.plugins?.entries?.["memory-lancedb"],
|
||||
enabled: true,
|
||||
config: {
|
||||
...config.plugins?.entries?.["memory-lancedb"]?.config,
|
||||
embedding: {
|
||||
...config.plugins?.entries?.["memory-lancedb"]?.config?.embedding,
|
||||
apiKey: process.env.OPENAI_API_KEY,
|
||||
model: "text-embedding-3-small",
|
||||
},
|
||||
dbPath:
|
||||
process.env.OPENCLAW_BUNDLED_CHANNEL_MEMORY_DB_PATH || "~/.openclaw/memory/lancedb-e2e",
|
||||
autoCapture: false,
|
||||
autoRecall: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
if (mode === "acpx") {
|
||||
config.plugins = {
|
||||
...config.plugins,
|
||||
enabled: true,
|
||||
allow:
|
||||
Array.isArray(config.plugins?.allow) && config.plugins.allow.length > 0
|
||||
? [...new Set([...config.plugins.allow, "acpx"])]
|
||||
: config.plugins?.allow,
|
||||
entries: {
|
||||
...config.plugins?.entries,
|
||||
acpx: {
|
||||
...config.plugins?.entries?.acpx,
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
if (mode === "setup-entry-channels") {
|
||||
config.plugins = {
|
||||
...config.plugins,
|
||||
enabled: true,
|
||||
entries: {
|
||||
...config.plugins?.entries,
|
||||
feishu: {
|
||||
...config.plugins?.entries?.feishu,
|
||||
enabled: true,
|
||||
},
|
||||
whatsapp: {
|
||||
...config.plugins?.entries?.whatsapp,
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
config.channels = {
|
||||
...config.channels,
|
||||
feishu: {
|
||||
...config.channels?.feishu,
|
||||
enabled: true,
|
||||
},
|
||||
whatsapp: {
|
||||
...config.channels?.whatsapp,
|
||||
enabled: true,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fs.mkdirSync(path.dirname(configPath), { recursive: true });
|
||||
fs.writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8");
|
||||
@@ -1,42 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
const [pluginDir] = process.argv.slice(2);
|
||||
if (!pluginDir) {
|
||||
throw new Error("usage: write-load-failure-fixture.mjs <plugin-dir>");
|
||||
}
|
||||
|
||||
const writeJson = (filename, contents) =>
|
||||
fs.writeFileSync(path.join(pluginDir, filename), `${JSON.stringify(contents, null, 2)}\n`);
|
||||
|
||||
fs.mkdirSync(pluginDir, { recursive: true });
|
||||
writeJson("package.json", {
|
||||
name: "@openclaw/load-failure-alpha",
|
||||
version: "2026.4.21",
|
||||
private: true,
|
||||
type: "module",
|
||||
openclaw: { extensions: ["./index.js"], setupEntry: "./setup-entry.js" },
|
||||
});
|
||||
writeJson("openclaw.plugin.json", {
|
||||
id: "load-failure-alpha",
|
||||
channels: ["load-failure-alpha"],
|
||||
configSchema: { type: "object", additionalProperties: false, properties: {} },
|
||||
});
|
||||
fs.writeFileSync(
|
||||
path.join(pluginDir, "index.js"),
|
||||
`export default {
|
||||
kind: "bundled-channel-entry", id: "load-failure-alpha", name: "Load Failure Alpha", description: "Load Failure Alpha", register() {},
|
||||
loadChannelSecrets() { globalThis.__loadFailureSecrets = (globalThis.__loadFailureSecrets ?? 0) + 1; throw new Error("synthetic channel secrets failure"); },
|
||||
loadChannelPlugin() { globalThis.__loadFailurePlugin = (globalThis.__loadFailurePlugin ?? 0) + 1; throw new Error("synthetic channel plugin failure"); }
|
||||
};
|
||||
`,
|
||||
);
|
||||
fs.writeFileSync(
|
||||
path.join(pluginDir, "setup-entry.js"),
|
||||
`export default {
|
||||
kind: "bundled-channel-setup-entry",
|
||||
loadSetupSecrets() { globalThis.__loadFailureSetupSecrets = (globalThis.__loadFailureSetupSecrets ?? 0) + 1; throw new Error("synthetic setup secrets failure"); },
|
||||
loadSetupPlugin() { globalThis.__loadFailureSetup = (globalThis.__loadFailureSetup ?? 0) + 1; throw new Error("synthetic setup plugin failure"); }
|
||||
};
|
||||
`,
|
||||
);
|
||||
@@ -565,12 +565,7 @@ function findReadyLogIndex(logPath) {
|
||||
function assertNoPostReadyRuntimeDepsWork(logPath, readyIndex) {
|
||||
const log = fs.existsSync(logPath) ? fs.readFileSync(logPath, "utf8") : "";
|
||||
const postReady = log.slice(Math.max(0, readyIndex));
|
||||
const forbidden = [
|
||||
/\[plugins\].*installed bundled runtime deps/iu,
|
||||
/\[plugins\].*installing bundled runtime deps/iu,
|
||||
/\[plugins\].*staging bundled runtime deps/iu,
|
||||
/\b(?:npm|pnpm|yarn|corepack) install\b/iu,
|
||||
];
|
||||
const forbidden = [/\b(?:npm|pnpm|yarn|corepack) install\b/iu];
|
||||
const match = forbidden.find((pattern) => pattern.test(postReady));
|
||||
if (match) {
|
||||
throw new Error(`post-ready runtime dependency work matched ${match}: ${tailText(postReady)}`);
|
||||
@@ -578,14 +573,7 @@ function assertNoPostReadyRuntimeDepsWork(logPath, readyIndex) {
|
||||
}
|
||||
|
||||
function assertNoRuntimeDepsLocks() {
|
||||
const roots = [
|
||||
...(process.env.OPENCLAW_PLUGIN_STAGE_DIR ? [process.env.OPENCLAW_PLUGIN_STAGE_DIR] : []),
|
||||
path.join(
|
||||
process.env.OPENCLAW_STATE_DIR || path.join(process.env.HOME || os.homedir(), ".openclaw"),
|
||||
"plugin-runtime-deps",
|
||||
),
|
||||
path.join(process.cwd(), "dist", "extensions"),
|
||||
];
|
||||
const roots = [path.join(process.cwd(), "dist", "extensions")];
|
||||
for (const root of roots) {
|
||||
if (!fs.existsSync(root)) {
|
||||
continue;
|
||||
|
||||
@@ -15,8 +15,6 @@ fi
|
||||
export OPENCLAW_ENTRY
|
||||
|
||||
openclaw_e2e_eval_test_state_from_b64 "${OPENCLAW_TEST_STATE_SCRIPT_B64:?missing OPENCLAW_TEST_STATE_SCRIPT_B64}"
|
||||
OPENCLAW_PLUGIN_STAGE_BASE_DIR="${OPENCLAW_PLUGIN_STAGE_DIR:-$HOME/.openclaw/plugin-runtime-deps}"
|
||||
mkdir -p "$OPENCLAW_PLUGIN_STAGE_BASE_DIR"
|
||||
|
||||
probe="scripts/e2e/lib/bundled-plugin-install-uninstall/probe.mjs"
|
||||
runtime_smoke="scripts/e2e/lib/bundled-plugin-install-uninstall/runtime-smoke.mjs"
|
||||
@@ -33,8 +31,6 @@ echo "Selected ${#plugin_entries[@]} bundled plugins for shard ${OPENCLAW_BUNDLE
|
||||
plugin_index=0
|
||||
for plugin_entry in "${plugin_entries[@]}"; do
|
||||
IFS=$'\t' read -r plugin_id plugin_dir requires_config <<<"$plugin_entry"
|
||||
export OPENCLAW_PLUGIN_STAGE_DIR="$OPENCLAW_PLUGIN_STAGE_BASE_DIR/$plugin_index-$plugin_id"
|
||||
mkdir -p "$OPENCLAW_PLUGIN_STAGE_DIR"
|
||||
install_log="/tmp/openclaw-install-${plugin_index}.log"
|
||||
uninstall_log="/tmp/openclaw-uninstall-${plugin_index}.log"
|
||||
plugin_started_at="$(date +%s)"
|
||||
|
||||
@@ -8,30 +8,46 @@ if (!url || !token) {
|
||||
throw new Error("missing GW_URL/GW_TOKEN");
|
||||
}
|
||||
|
||||
const CONNECT_READY_TIMEOUT_MS = Number.parseInt(
|
||||
process.env.OPENCLAW_GATEWAY_NETWORK_CONNECT_READY_TIMEOUT_MS || "60000",
|
||||
const deadlineMs = Number.parseInt(
|
||||
process.env.OPENCLAW_GATEWAY_NETWORK_CLIENT_CONNECT_TIMEOUT_MS ??
|
||||
process.env.OPENCLAW_GATEWAY_NETWORK_CONNECT_READY_TIMEOUT_MS ??
|
||||
"80000",
|
||||
10,
|
||||
);
|
||||
if (!Number.isFinite(deadlineMs) || deadlineMs < 0) {
|
||||
throw new Error(`invalid gateway network client timeout: ${String(deadlineMs)}`);
|
||||
}
|
||||
const deadline = Date.now() + Math.max(1_000, deadlineMs);
|
||||
|
||||
async function openSocket() {
|
||||
function delay(ms) {
|
||||
return new Promise((resolve) => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
async function openSocket(timeoutMs = 10_000) {
|
||||
const ws = new WebSocket(url);
|
||||
await new Promise((resolve, reject) => {
|
||||
const timer = setTimeout(() => reject(new Error("ws open timeout")), 30_000);
|
||||
const timer = setTimeout(() => {
|
||||
ws.close();
|
||||
reject(new Error("ws open timeout"));
|
||||
}, timeoutMs);
|
||||
ws.once("open", () => {
|
||||
clearTimeout(timer);
|
||||
resolve();
|
||||
});
|
||||
ws.once("error", (error) => {
|
||||
clearTimeout(timer);
|
||||
reject(error);
|
||||
reject(error instanceof Error ? error : new Error(String(error)));
|
||||
});
|
||||
});
|
||||
return ws;
|
||||
}
|
||||
|
||||
function onceFrame(ws, filter, timeoutMs = 30_000) {
|
||||
function onceFrame(ws, filter, timeoutMs = 10_000) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const timer = setTimeout(() => reject(new Error("timeout")), timeoutMs);
|
||||
const timer = setTimeout(() => {
|
||||
ws.off("message", handler);
|
||||
reject(new Error("timeout"));
|
||||
}, timeoutMs);
|
||||
const handler = (data) => {
|
||||
const obj = JSON.parse(String(data));
|
||||
if (!filter(obj)) {
|
||||
@@ -45,52 +61,51 @@ function onceFrame(ws, filter, timeoutMs = 30_000) {
|
||||
});
|
||||
}
|
||||
|
||||
async function attemptConnect() {
|
||||
const ws = await openSocket();
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "req",
|
||||
id: "c1",
|
||||
method: "connect",
|
||||
params: {
|
||||
minProtocol: PROTOCOL_VERSION,
|
||||
maxProtocol: PROTOCOL_VERSION,
|
||||
client: {
|
||||
id: "test",
|
||||
displayName: "docker-net-e2e",
|
||||
version: "dev",
|
||||
platform: process.platform,
|
||||
mode: "test",
|
||||
},
|
||||
caps: [],
|
||||
auth: { token },
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
const connectRes = await onceFrame(ws, (frame) => frame?.type === "res" && frame?.id === "c1");
|
||||
if (connectRes.ok) {
|
||||
ws.close();
|
||||
return;
|
||||
}
|
||||
ws.close();
|
||||
throw new Error(`connect failed: ${connectRes.error?.message ?? "unknown"}`);
|
||||
}
|
||||
|
||||
const startedAt = Date.now();
|
||||
let lastError;
|
||||
while (Date.now() - startedAt < CONNECT_READY_TIMEOUT_MS) {
|
||||
while (Date.now() < deadline) {
|
||||
let ws;
|
||||
try {
|
||||
await attemptConnect();
|
||||
console.log("ok");
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
if (!String(error).includes("gateway starting")) {
|
||||
throw error;
|
||||
ws = await openSocket();
|
||||
ws.send(
|
||||
JSON.stringify({
|
||||
type: "req",
|
||||
id: "c1",
|
||||
method: "connect",
|
||||
params: {
|
||||
minProtocol: PROTOCOL_VERSION,
|
||||
maxProtocol: PROTOCOL_VERSION,
|
||||
client: {
|
||||
id: "test",
|
||||
displayName: "docker-net-e2e",
|
||||
version: "dev",
|
||||
platform: process.platform,
|
||||
mode: "test",
|
||||
},
|
||||
caps: [],
|
||||
auth: { token },
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
const connectRes = await onceFrame(ws, (frame) => frame?.type === "res" && frame?.id === "c1");
|
||||
if (connectRes.ok) {
|
||||
ws.close();
|
||||
console.log("ok");
|
||||
process.exit(0);
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
|
||||
const message = connectRes.error?.message ?? "unknown";
|
||||
lastError = new Error(`connect failed: ${message}`);
|
||||
if (!message.includes("gateway starting")) {
|
||||
throw lastError;
|
||||
}
|
||||
} catch (error) {
|
||||
lastError = error instanceof Error ? error : new Error(String(error));
|
||||
} finally {
|
||||
ws?.close();
|
||||
}
|
||||
|
||||
await delay(500);
|
||||
}
|
||||
|
||||
throw lastError ?? new Error("connect failed");
|
||||
throw lastError ?? new Error("connect failed: timeout");
|
||||
|
||||
@@ -144,7 +144,7 @@ const expectMissing = (listValue, expected, field) => {
|
||||
}
|
||||
};
|
||||
|
||||
const INVALID_PROBE_DIAGNOSTIC_SURFACE_MODES = new Set(["full", "adversarial"]);
|
||||
const INVALID_PROBE_DIAGNOSTIC_SURFACE_MODES = new Set(["full", "conformance", "adversarial"]);
|
||||
|
||||
function assertExpectedDiagnostics(surfaceMode, errorMessages) {
|
||||
const expectedErrorMessages = new Set([
|
||||
|
||||
@@ -253,10 +253,11 @@ function assertGitPlugin() {
|
||||
if (!installPath || !fs.existsSync(installPath)) {
|
||||
throw new Error(`git install path missing on disk: ${installPath}`);
|
||||
}
|
||||
const extensionsRoot = path.join(process.env.HOME, ".openclaw", "extensions");
|
||||
if (!installPath.startsWith(`${extensionsRoot}${path.sep}`)) {
|
||||
throw new Error(`git install path is outside managed extensions root: ${installPath}`);
|
||||
const gitRoot = path.join(process.env.HOME, ".openclaw", "git");
|
||||
if (!installPath.endsWith(`${path.sep}repo`)) {
|
||||
throw new Error(`git install path should point at cloned repo root: ${installPath}`);
|
||||
}
|
||||
assertRealPathInside(gitRoot, installPath, "git install path");
|
||||
}
|
||||
|
||||
function assertRealPathInside(parentPath, childPath, label) {
|
||||
|
||||
@@ -285,10 +285,19 @@ function assertStateSurvived() {
|
||||
fs.existsSync(path.join(stateDir, "agents", "main", "sessions", "legacy-session.json")),
|
||||
"legacy session file missing",
|
||||
);
|
||||
assert(
|
||||
fs.existsSync(path.join(stateDir, "plugin-runtime-deps", "discord")),
|
||||
"plugin runtime deps root missing",
|
||||
);
|
||||
const stage = process.env.OPENCLAW_UPGRADE_SURVIVOR_ASSERT_STAGE || "survival";
|
||||
const legacyRuntimeRoot = path.join(stateDir, "plugin-runtime-deps");
|
||||
if (stage === "baseline") {
|
||||
assert(
|
||||
fs.existsSync(path.join(legacyRuntimeRoot, "discord")),
|
||||
"legacy plugin runtime deps root missing before doctor cleanup",
|
||||
);
|
||||
} else {
|
||||
assert(
|
||||
!fs.existsSync(legacyRuntimeRoot),
|
||||
`legacy plugin runtime deps root survived update/doctor: ${legacyRuntimeRoot}`,
|
||||
);
|
||||
}
|
||||
if (scenario === "bootstrap-persona") {
|
||||
for (const [fileName, contents] of PERSONA_FILES) {
|
||||
const actual = fs.readFileSync(path.join(workspace, fileName), "utf8");
|
||||
@@ -296,7 +305,6 @@ function assertStateSurvived() {
|
||||
}
|
||||
}
|
||||
if (scenario === "versioned-runtime-deps") {
|
||||
const stage = process.env.OPENCLAW_UPGRADE_SURVIVOR_ASSERT_STAGE || "survival";
|
||||
if (stage === "baseline") {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -26,16 +26,31 @@ const baseUrl = option("--base-url");
|
||||
const probePath = option("--path");
|
||||
const expectKind = option("--expect");
|
||||
const out = option("--out");
|
||||
const url = new URL(probePath, baseUrl).toString();
|
||||
const timeoutMs = Number.parseInt(
|
||||
process.env.OPENCLAW_UPGRADE_SURVIVOR_PROBE_TIMEOUT_MS || "60000",
|
||||
option("--timeout-ms", process.env.OPENCLAW_UPGRADE_SURVIVOR_PROBE_TIMEOUT_MS || "60000"),
|
||||
10,
|
||||
);
|
||||
const url = new URL(probePath, baseUrl).toString();
|
||||
|
||||
if (!Number.isFinite(timeoutMs) || timeoutMs < 0) {
|
||||
throw new Error(`invalid --timeout-ms: ${String(timeoutMs)}`);
|
||||
}
|
||||
if (expectKind !== "live" && expectKind !== "ready") {
|
||||
throw new Error(`unknown probe expectation: ${expectKind}`);
|
||||
}
|
||||
|
||||
function matchesExpectation(body) {
|
||||
if (expectKind === "live") {
|
||||
return body?.ok === true && body?.status === "live";
|
||||
}
|
||||
return body?.ready === true;
|
||||
}
|
||||
|
||||
const startedAt = Date.now();
|
||||
let lastError;
|
||||
while (Date.now() - startedAt < timeoutMs) {
|
||||
const attemptStartedAt = Date.now();
|
||||
let lastResult;
|
||||
|
||||
while (Date.now() - startedAt <= timeoutMs) {
|
||||
try {
|
||||
const response = await fetch(url, { method: "GET" });
|
||||
const text = await response.text();
|
||||
@@ -45,34 +60,31 @@ while (Date.now() - startedAt < timeoutMs) {
|
||||
} catch (error) {
|
||||
throw new Error(`${url} returned non-JSON probe body: ${String(error)}`, { cause: error });
|
||||
}
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`${url} probe failed with HTTP ${response.status}: ${text}`);
|
||||
}
|
||||
if (expectKind === "live") {
|
||||
if (body?.ok !== true || body?.status !== "live") {
|
||||
throw new Error(`${url} did not report live status: ${text}`);
|
||||
}
|
||||
} else if (expectKind === "ready") {
|
||||
if (body?.ready !== true) {
|
||||
throw new Error(`${url} did not report ready status: ${text}`);
|
||||
}
|
||||
} else {
|
||||
throw new Error(`unknown probe expectation: ${expectKind}`);
|
||||
}
|
||||
|
||||
writeJson(out, {
|
||||
lastResult = {
|
||||
body,
|
||||
elapsedMs: Date.now() - startedAt,
|
||||
path: probePath,
|
||||
status: response.status,
|
||||
url,
|
||||
});
|
||||
process.exit(0);
|
||||
text,
|
||||
};
|
||||
if (response.ok && matchesExpectation(body)) {
|
||||
writeJson(out, {
|
||||
body,
|
||||
elapsedMs: Date.now() - startedAt,
|
||||
path: probePath,
|
||||
status: response.status,
|
||||
url,
|
||||
});
|
||||
process.exit(0);
|
||||
}
|
||||
lastError = response.ok
|
||||
? `${url} did not report ${expectKind} status: ${text}`
|
||||
: `${url} probe failed with HTTP ${response.status}: ${text}`;
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
const elapsedMs = Date.now() - attemptStartedAt;
|
||||
await new Promise((resolve) => setTimeout(resolve, Math.max(100, 500 - elapsedMs)));
|
||||
lastError = error instanceof Error ? error.message : String(error);
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
}
|
||||
throw lastError ?? new Error(`${url} probe timed out`);
|
||||
|
||||
const suffix = lastResult ? ` (last HTTP ${lastResult.status}: ${lastResult.text})` : "";
|
||||
throw new Error(
|
||||
`${url} probe did not satisfy ${expectKind} within ${timeoutMs}ms: ${lastError ?? "no response"}${suffix}`,
|
||||
);
|
||||
|
||||
@@ -302,7 +302,7 @@ assert_legacy_runtime_deps_symlink_repaired() {
|
||||
local target_dir
|
||||
target_dir="$(legacy_runtime_deps_symlink_target "$plugin")"
|
||||
if [ -L "$target_dir" ]; then
|
||||
echo "legacy runtime deps symlink survived package update: $target_dir -> $(readlink "$target_dir")" >&2
|
||||
echo "legacy runtime deps symlink survived update/doctor: $target_dir -> $(readlink "$target_dir")" >&2
|
||||
return 1
|
||||
fi
|
||||
echo "Legacy runtime deps symlink repaired for $plugin."
|
||||
@@ -536,8 +536,8 @@ phase assert-baseline assert_baseline_state
|
||||
phase seed-legacy-runtime-deps-symlink seed_legacy_runtime_deps_symlink
|
||||
phase resolve-candidate resolve_candidate_version
|
||||
phase update-candidate update_candidate
|
||||
phase assert-legacy-runtime-deps-symlink-repaired assert_legacy_runtime_deps_symlink_repaired
|
||||
phase doctor run_doctor
|
||||
phase assert-legacy-runtime-deps-symlink-repaired assert_legacy_runtime_deps_symlink_repaired
|
||||
phase validate-post-doctor-config validate_post_doctor_config
|
||||
phase assert-survival assert_survival
|
||||
phase gateway-start start_gateway
|
||||
|
||||
@@ -134,7 +134,7 @@ node scripts/e2e/lib/npm-onboard-channel-agent/assertions.mjs assert-channel-con
|
||||
|
||||
echo "Running doctor after channel activation..."
|
||||
openclaw doctor --repair --non-interactive >/tmp/openclaw-doctor.log 2>&1
|
||||
openclaw_e2e_assert_dep_present "$DEP_SENTINEL" "$package_root" "$HOME/.openclaw"
|
||||
openclaw_e2e_assert_dep_absent "$DEP_SENTINEL" "$package_root" "$HOME/.openclaw"
|
||||
|
||||
echo "Running local agent turn against mocked OpenAI..."
|
||||
openclaw agent --local \
|
||||
|
||||
@@ -304,12 +304,6 @@ if [ "${OPENCLAW_NPM_TELEGRAM_SKIP_HOTPATH:-0}" != "1" ]; then
|
||||
openclaw channels add --channel telegram --token "123456:openclaw-npm-telegram-hotpath" >/tmp/openclaw-npm-telegram-channel-add.log 2>&1 </dev/null
|
||||
openclaw doctor --fix --non-interactive >/tmp/openclaw-npm-telegram-doctor-fix.log 2>&1 </dev/null
|
||||
openclaw doctor --non-interactive >/tmp/openclaw-npm-telegram-doctor-check.log 2>&1 </dev/null
|
||||
if grep -F -q "Bundled plugin runtime deps are missing." /tmp/openclaw-npm-telegram-doctor-check.log; then
|
||||
exit 1
|
||||
fi
|
||||
if grep -F -q "Failed to install bundled plugin runtime deps" /tmp/openclaw-npm-telegram-doctor-fix.log; then
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
export OPENCLAW_NPM_TELEGRAM_SUT_COMMAND="$(command -v openclaw)"
|
||||
|
||||
@@ -40,7 +40,7 @@ ${this.input.guestNode} ${this.input.guestOpenClawEntry} config set channels.dis
|
||||
${this.input.guestNode} ${this.input.guestOpenClawEntry} config set channels.discord.enabled true
|
||||
${this.input.guestNode} ${this.input.guestOpenClawEntry} config set channels.discord.groupPolicy allowlist
|
||||
${this.input.guestNode} ${this.input.guestOpenClawEntry} config set channels.discord.guilds ${shellQuote(guilds)} --strict-json
|
||||
${this.input.guestNode} ${this.input.guestOpenClawEntry} plugins deps --repair
|
||||
${this.input.guestNode} ${this.input.guestOpenClawEntry} doctor --fix --yes --non-interactive
|
||||
${this.input.guestNode} ${this.input.guestOpenClawEntry} gateway restart
|
||||
${this.input.guestNode} ${this.input.guestOpenClawEntry} channels status --probe --json`);
|
||||
}
|
||||
|
||||
@@ -324,7 +324,6 @@ class MacosSmoke {
|
||||
destination: this.tgzDir,
|
||||
packageSpec: this.options.targetPackageSpec,
|
||||
requireControlUi: true,
|
||||
stageRuntimeDeps: !this.options.targetPackageSpec,
|
||||
});
|
||||
if (this.options.targetPackageSpec) {
|
||||
this.targetExpectVersion =
|
||||
|
||||
@@ -285,7 +285,6 @@ class NpmUpdateSmoke {
|
||||
this.artifact = await packOpenClaw({
|
||||
destination: this.tgzDir,
|
||||
requireControlUi: true,
|
||||
stageRuntimeDeps: true,
|
||||
});
|
||||
this.server = await startHostServer({
|
||||
artifactPath: this.artifact.path,
|
||||
|
||||
@@ -86,7 +86,6 @@ export async function packOpenClaw(input: {
|
||||
destination: string;
|
||||
packageSpec?: string;
|
||||
requireControlUi?: boolean;
|
||||
stageRuntimeDeps?: boolean;
|
||||
}): Promise<PackageArtifact> {
|
||||
await mkdir(input.destination, { recursive: true });
|
||||
if (input.packageSpec) {
|
||||
@@ -126,9 +125,6 @@ export async function packOpenClaw(input: {
|
||||
"--eval",
|
||||
"import { writePackageDistInventory } from './src/infra/package-dist-inventory.ts'; await writePackageDistInventory(process.cwd());",
|
||||
]);
|
||||
if (input.stageRuntimeDeps) {
|
||||
run("node", ["scripts/stage-bundled-plugin-runtime-deps.mjs"]);
|
||||
}
|
||||
const shortHead = run("git", ["rev-parse", "--short", "HEAD"], { quiet: true }).stdout.trim();
|
||||
const output = run(
|
||||
"npm",
|
||||
|
||||
@@ -18,6 +18,3 @@ export function listBundledPluginBuildEntries(
|
||||
params?: BundledPluginBuildEntryParams,
|
||||
): Record<string, string>;
|
||||
export function listBundledPluginPackArtifacts(params?: BundledPluginBuildEntryParams): string[];
|
||||
export function listBundledPluginRuntimeDependencies(
|
||||
params?: BundledPluginBuildEntryParams,
|
||||
): string[];
|
||||
|
||||
@@ -47,10 +47,6 @@ function collectPluginSourceEntries(packageJson) {
|
||||
return packageEntries.length > 0 ? packageEntries : ["./index.ts"];
|
||||
}
|
||||
|
||||
function shouldStageBundledPluginRuntimeDependencies(packageJson) {
|
||||
return packageJson?.openclaw?.bundle?.stageRuntimeDependencies === true;
|
||||
}
|
||||
|
||||
function collectTopLevelPublicSurfaceEntries(pluginDir) {
|
||||
if (!fs.existsSync(pluginDir)) {
|
||||
return [];
|
||||
@@ -166,23 +162,3 @@ export function listBundledPluginPackArtifacts(params = {}) {
|
||||
|
||||
return [...artifacts].toSorted((left, right) => left.localeCompare(right));
|
||||
}
|
||||
|
||||
export function listBundledPluginRuntimeDependencies(params = {}) {
|
||||
const runtimeDependencies = new Set();
|
||||
|
||||
for (const { packageJson } of collectBundledPluginBuildEntries(params)) {
|
||||
if (!shouldStageBundledPluginRuntimeDependencies(packageJson)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const dependencyName of Object.keys(packageJson?.dependencies ?? {})) {
|
||||
runtimeDependencies.add(dependencyName);
|
||||
}
|
||||
|
||||
for (const dependencyName of Object.keys(packageJson?.optionalDependencies ?? {})) {
|
||||
runtimeDependencies.add(dependencyName);
|
||||
}
|
||||
}
|
||||
|
||||
return [...runtimeDependencies].toSorted((left, right) => left.localeCompare(right));
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
const JS_EXTENSIONS = new Set([".cjs", ".js", ".mjs"]);
|
||||
export function collectRuntimeDependencySpecs(packageJson = {}) {
|
||||
return new Map(
|
||||
[
|
||||
@@ -28,39 +27,22 @@ export function packageNameFromSpecifier(specifier) {
|
||||
return first.startsWith("@") && second ? `${first}/${second}` : first;
|
||||
}
|
||||
|
||||
function readJson(filePath) {
|
||||
return JSON.parse(fs.readFileSync(filePath, "utf8"));
|
||||
}
|
||||
|
||||
function collectPackageJsonPaths(rootDir) {
|
||||
if (!fs.existsSync(rootDir)) {
|
||||
return [];
|
||||
}
|
||||
return fs
|
||||
.readdirSync(rootDir, { withFileTypes: true })
|
||||
.filter((entry) => entry.isDirectory())
|
||||
.map((entry) => path.join(rootDir, entry.name, "package.json"))
|
||||
.filter((packageJsonPath) => fs.existsSync(packageJsonPath))
|
||||
.toSorted((left, right) => left.localeCompare(right));
|
||||
}
|
||||
|
||||
function usesStagedRuntimeDependencies(packageJson) {
|
||||
return packageJson?.openclaw?.bundle?.stageRuntimeDependencies === true;
|
||||
}
|
||||
|
||||
function dependencySentinelPath(packageRoot, dependencyName) {
|
||||
return path.join(packageRoot, "node_modules", ...dependencyName.split("/"), "package.json");
|
||||
}
|
||||
|
||||
function pluginIdFromPackageJsonPath(packageJsonPath) {
|
||||
return path.basename(path.dirname(packageJsonPath));
|
||||
}
|
||||
|
||||
export function collectBundledPluginRuntimeDependencySpecs(bundledPluginsDir) {
|
||||
const specs = new Map();
|
||||
|
||||
for (const packageJsonPath of collectPackageJsonPaths(bundledPluginsDir)) {
|
||||
const packageJson = readJson(packageJsonPath);
|
||||
if (!fs.existsSync(bundledPluginsDir)) {
|
||||
return specs;
|
||||
}
|
||||
|
||||
const packageJsonPaths = fs
|
||||
.readdirSync(bundledPluginsDir, { withFileTypes: true })
|
||||
.filter((entry) => entry.isDirectory())
|
||||
.map((entry) => path.join(bundledPluginsDir, entry.name, "package.json"))
|
||||
.filter((packageJsonPath) => fs.existsSync(packageJsonPath))
|
||||
.toSorted((left, right) => left.localeCompare(right));
|
||||
|
||||
for (const packageJsonPath of packageJsonPaths) {
|
||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
|
||||
const pluginId = path.basename(path.dirname(packageJsonPath));
|
||||
for (const [name, spec] of collectRuntimeDependencySpecs(packageJson)) {
|
||||
const existing = specs.get(name);
|
||||
@@ -78,178 +60,3 @@ export function collectBundledPluginRuntimeDependencySpecs(bundledPluginsDir) {
|
||||
|
||||
return specs;
|
||||
}
|
||||
|
||||
export function collectBuiltBundledPluginStagedRuntimeDependencyErrors(params) {
|
||||
const errors = [];
|
||||
|
||||
for (const packageJsonPath of collectPackageJsonPaths(params.bundledPluginsDir)) {
|
||||
const packageJson = readJson(packageJsonPath);
|
||||
if (!usesStagedRuntimeDependencies(packageJson)) {
|
||||
continue;
|
||||
}
|
||||
const pluginId = pluginIdFromPackageJsonPath(packageJsonPath);
|
||||
const pluginRoot = path.dirname(packageJsonPath);
|
||||
|
||||
for (const [dependencyName, spec] of collectRuntimeDependencySpecs(packageJson)) {
|
||||
if (!fs.existsSync(dependencySentinelPath(pluginRoot, dependencyName))) {
|
||||
const specText = String(spec);
|
||||
errors.push(
|
||||
`built bundled plugin '${pluginId}' is missing staged runtime dependency '${dependencyName}: ${specText}' under dist/extensions/${pluginId}/node_modules.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return errors.toSorted((left, right) => left.localeCompare(right));
|
||||
}
|
||||
|
||||
function walkJavaScriptFiles(rootDir) {
|
||||
const files = [];
|
||||
if (!fs.existsSync(rootDir)) {
|
||||
return files;
|
||||
}
|
||||
const queue = [rootDir];
|
||||
while (queue.length > 0) {
|
||||
const current = queue.shift();
|
||||
for (const entry of fs.readdirSync(current, { withFileTypes: true })) {
|
||||
const fullPath = path.join(current, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
if (entry.name === "node_modules") {
|
||||
continue;
|
||||
}
|
||||
queue.push(fullPath);
|
||||
continue;
|
||||
}
|
||||
if (entry.isFile() && JS_EXTENSIONS.has(path.extname(entry.name))) {
|
||||
files.push(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
return files.toSorted((left, right) => left.localeCompare(right));
|
||||
}
|
||||
|
||||
function extractModuleSpecifiers(source) {
|
||||
const specifiers = new Set();
|
||||
const patterns = [
|
||||
/\bfrom\s*["']([^"']+)["']/g,
|
||||
/\bimport\s*["']([^"']+)["']/g,
|
||||
/\bimport\s*\(\s*["']([^"']+)["']\s*\)/g,
|
||||
/\brequire\s*\(\s*["']([^"']+)["']\s*\)/g,
|
||||
];
|
||||
for (const pattern of patterns) {
|
||||
for (const match of source.matchAll(pattern)) {
|
||||
if (match[1]) {
|
||||
specifiers.add(match[1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
return specifiers;
|
||||
}
|
||||
|
||||
function isPluginOwnedDistImporter(relativePath, source, pluginIds) {
|
||||
return pluginIds.some(
|
||||
(pluginId) =>
|
||||
relativePath.startsWith(`extensions/${pluginId}/`) ||
|
||||
source.includes(`//#region extensions/${pluginId}/`),
|
||||
);
|
||||
}
|
||||
|
||||
export function collectRootDistBundledRuntimeMirrors(params) {
|
||||
const distDir = params.distDir;
|
||||
const bundledSpecs = params.bundledRuntimeDependencySpecs;
|
||||
const mirrors = new Map();
|
||||
|
||||
for (const filePath of walkJavaScriptFiles(distDir)) {
|
||||
const source = fs.readFileSync(filePath, "utf8");
|
||||
const relativePath = path.relative(distDir, filePath).replaceAll(path.sep, "/");
|
||||
for (const specifier of extractModuleSpecifiers(source)) {
|
||||
const dependencyName = packageNameFromSpecifier(specifier);
|
||||
if (!dependencyName || !bundledSpecs.has(dependencyName)) {
|
||||
continue;
|
||||
}
|
||||
const bundledSpec = bundledSpecs.get(dependencyName);
|
||||
if (isPluginOwnedDistImporter(relativePath, source, bundledSpec.pluginIds)) {
|
||||
continue;
|
||||
}
|
||||
const existing = mirrors.get(dependencyName);
|
||||
if (existing) {
|
||||
existing.importers.add(relativePath);
|
||||
continue;
|
||||
}
|
||||
mirrors.set(dependencyName, {
|
||||
importers: new Set([relativePath]),
|
||||
pluginIds: bundledSpec.pluginIds,
|
||||
spec: bundledSpec.spec,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return mirrors;
|
||||
}
|
||||
|
||||
export function collectBundledPluginRootRuntimeMirrorErrors(params) {
|
||||
const errors = [];
|
||||
const declaredRootRuntimeDeps = collectRuntimeDependencySpecs(params.rootPackageJson);
|
||||
const declaredMirrorDeps =
|
||||
params.rootPackageJson?.openclaw?.bundle?.mirroredRootRuntimeDependencies ?? [];
|
||||
const declaredMirrorDepNames = new Set(
|
||||
Array.isArray(declaredMirrorDeps)
|
||||
? declaredMirrorDeps.filter((dependencyName) => typeof dependencyName === "string")
|
||||
: [],
|
||||
);
|
||||
|
||||
for (const [dependencyName, record] of params.bundledRuntimeDependencySpecs) {
|
||||
for (const conflict of record.conflicts) {
|
||||
errors.push(
|
||||
`bundled runtime dependency '${dependencyName}' has conflicting plugin specs: ${record.pluginIds.join(", ")} use '${record.spec}', ${conflict.pluginId} uses '${conflict.spec}'.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (const [dependencyName, record] of params.requiredRootMirrors) {
|
||||
if (declaredRootRuntimeDeps.has(dependencyName)) {
|
||||
if (!declaredMirrorDepNames.has(dependencyName)) {
|
||||
const importerList = Array.from(record.importers)
|
||||
.toSorted((left, right) => left.localeCompare(right))
|
||||
.join(", ");
|
||||
errors.push(
|
||||
`installed package root mirror '${dependencyName}' for dist importers: ${importerList} is missing from package.json openclaw.bundle.mirroredRootRuntimeDependencies. Add it there so packaged runtime installs the mirrored dependency, or keep imports under dist/extensions/${record.pluginIds[0]}/.`,
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
const importerList = Array.from(record.importers)
|
||||
.toSorted((left, right) => left.localeCompare(right))
|
||||
.join(", ");
|
||||
errors.push(
|
||||
`installed package root is missing mirrored bundled runtime dependency '${dependencyName}' for dist importers: ${importerList}. Add it to package.json dependencies/optionalDependencies or keep imports under dist/extensions/${record.pluginIds[0]}/.`,
|
||||
);
|
||||
}
|
||||
|
||||
return errors.toSorted((left, right) => left.localeCompare(right));
|
||||
}
|
||||
|
||||
export function collectDeclaredRootRuntimeDependencyMetadataErrors(rootPackageJson) {
|
||||
const declaredRootRuntimeDeps = collectRuntimeDependencySpecs(rootPackageJson);
|
||||
const declaredMirrorDeps =
|
||||
rootPackageJson?.openclaw?.bundle?.mirroredRootRuntimeDependencies ?? [];
|
||||
if (!Array.isArray(declaredMirrorDeps)) {
|
||||
return ["package.json openclaw.bundle.mirroredRootRuntimeDependencies must be an array."];
|
||||
}
|
||||
|
||||
const errors = [];
|
||||
for (const dependencyName of declaredMirrorDeps) {
|
||||
if (typeof dependencyName !== "string" || dependencyName.trim().length === 0) {
|
||||
errors.push(
|
||||
"package.json openclaw.bundle.mirroredRootRuntimeDependencies entries must be non-empty strings.",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
if (!declaredRootRuntimeDeps.has(dependencyName)) {
|
||||
errors.push(
|
||||
`package.json openclaw.bundle.mirroredRootRuntimeDependencies declares '${dependencyName}' but package.json dependencies/optionalDependencies do not include it.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
return errors.toSorted((left, right) => left.localeCompare(right));
|
||||
}
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
|
||||
const NPM_CONFIG_KEYS_TO_RESET = new Set([
|
||||
"npm_config_global",
|
||||
"npm_config_ignore_scripts",
|
||||
"npm_config_include_workspace_root",
|
||||
"npm_config_location",
|
||||
"npm_config_prefix",
|
||||
"npm_config_workspace",
|
||||
"npm_config_workspaces",
|
||||
]);
|
||||
|
||||
export function createNestedNpmInstallEnv(env = process.env) {
|
||||
const nextEnv = { ...env };
|
||||
for (const key of Object.keys(nextEnv)) {
|
||||
if (NPM_CONFIG_KEYS_TO_RESET.has(key.toLowerCase())) {
|
||||
delete nextEnv[key];
|
||||
}
|
||||
}
|
||||
return nextEnv;
|
||||
}
|
||||
|
||||
export function createBundledRuntimeDependencyInstallEnv(env = process.env, options = {}) {
|
||||
const nextEnv = {
|
||||
...createNestedNpmInstallEnv(env),
|
||||
npm_config_dry_run: "false",
|
||||
npm_config_fetch_retries: env.npm_config_fetch_retries ?? "5",
|
||||
npm_config_fetch_retry_maxtimeout: env.npm_config_fetch_retry_maxtimeout ?? "120000",
|
||||
npm_config_fetch_retry_mintimeout: env.npm_config_fetch_retry_mintimeout ?? "10000",
|
||||
npm_config_fetch_timeout: env.npm_config_fetch_timeout ?? "300000",
|
||||
npm_config_ignore_scripts: "true",
|
||||
npm_config_legacy_peer_deps: "true",
|
||||
npm_config_package_lock: "false",
|
||||
npm_config_save: "false",
|
||||
npm_config_workspaces: "false",
|
||||
};
|
||||
if (options.ci) {
|
||||
nextEnv.CI = "1";
|
||||
}
|
||||
if (options.quiet) {
|
||||
Object.assign(nextEnv, {
|
||||
npm_config_audit: "false",
|
||||
npm_config_fund: "false",
|
||||
npm_config_loglevel: "error",
|
||||
npm_config_progress: "false",
|
||||
npm_config_yes: "true",
|
||||
});
|
||||
}
|
||||
return nextEnv;
|
||||
}
|
||||
|
||||
export function createBundledRuntimeDependencyInstallArgs(specs = [], options = {}) {
|
||||
return [
|
||||
"install",
|
||||
...(options.noAudit ? ["--no-audit"] : []),
|
||||
...(options.noFund ? ["--no-fund"] : []),
|
||||
"--ignore-scripts",
|
||||
"--workspaces=false",
|
||||
...(options.silent ? ["--silent"] : []),
|
||||
...specs,
|
||||
];
|
||||
}
|
||||
|
||||
export function runBundledRuntimeDependencyNpmInstall(params) {
|
||||
const runSpawnSync = params.spawnSyncImpl ?? spawnSync;
|
||||
const result = runSpawnSync(params.npmRunner.command, params.npmRunner.args, {
|
||||
cwd: params.cwd,
|
||||
encoding: "utf8",
|
||||
env: params.env ?? params.npmRunner.env ?? process.env,
|
||||
shell: params.npmRunner.shell,
|
||||
stdio: params.stdio ?? "pipe",
|
||||
...(params.timeoutMs ? { timeout: params.timeoutMs } : {}),
|
||||
windowsHide: true,
|
||||
windowsVerbatimArguments: params.npmRunner.windowsVerbatimArguments,
|
||||
});
|
||||
if (result.status === 0) {
|
||||
return;
|
||||
}
|
||||
const output = [result.stderr, result.stdout].filter(Boolean).join("\n").trim();
|
||||
throw new Error(output || "npm install failed");
|
||||
}
|
||||
@@ -1,212 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import {
|
||||
collectInstalledRuntimeDependencyRoots,
|
||||
dependencyNodeModulesPath,
|
||||
findContainingRealRoot,
|
||||
resolveInstalledDirectDependencyNames,
|
||||
selectRuntimeDependencyRootsToCopy,
|
||||
} from "./bundled-runtime-deps-package-tree.mjs";
|
||||
import { pruneStagedRuntimeDependencyCargo } from "./bundled-runtime-deps-prune.mjs";
|
||||
import {
|
||||
assertPathIsNotSymlink,
|
||||
makePluginOwnedTempDir,
|
||||
removeLegacyBundledRuntimeDepsSymlink,
|
||||
removeOwnedTempPathBestEffort,
|
||||
removePathIfExists,
|
||||
replaceDirAtomically,
|
||||
writeJsonAtomically,
|
||||
} from "./bundled-runtime-deps-stage-state.mjs";
|
||||
|
||||
function copyMaterializedDependencyTree(params) {
|
||||
const { activeRoots, allowedRealRoots, sourcePath, targetPath } = params;
|
||||
const sourceStats = fs.lstatSync(sourcePath);
|
||||
|
||||
if (sourceStats.isSymbolicLink()) {
|
||||
let resolvedPath;
|
||||
try {
|
||||
resolvedPath = fs.realpathSync(sourcePath);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
const containingRoot = findContainingRealRoot(resolvedPath, allowedRealRoots);
|
||||
if (containingRoot === null) {
|
||||
return false;
|
||||
}
|
||||
if (activeRoots.has(containingRoot)) {
|
||||
return true;
|
||||
}
|
||||
const nextActiveRoots = new Set(activeRoots);
|
||||
nextActiveRoots.add(containingRoot);
|
||||
return copyMaterializedDependencyTree({
|
||||
activeRoots: nextActiveRoots,
|
||||
allowedRealRoots,
|
||||
sourcePath: resolvedPath,
|
||||
targetPath,
|
||||
});
|
||||
}
|
||||
|
||||
if (sourceStats.isDirectory()) {
|
||||
fs.mkdirSync(targetPath, { recursive: true });
|
||||
for (const entry of fs
|
||||
.readdirSync(sourcePath, { withFileTypes: true })
|
||||
.toSorted((left, right) => left.name.localeCompare(right.name))) {
|
||||
if (
|
||||
!copyMaterializedDependencyTree({
|
||||
activeRoots,
|
||||
allowedRealRoots,
|
||||
sourcePath: path.join(sourcePath, entry.name),
|
||||
targetPath: path.join(targetPath, entry.name),
|
||||
})
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
if (sourceStats.isFile()) {
|
||||
fs.mkdirSync(path.dirname(targetPath), { recursive: true });
|
||||
fs.copyFileSync(sourcePath, targetPath);
|
||||
fs.chmodSync(targetPath, sourceStats.mode);
|
||||
return true;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export function listBundledPluginRuntimeDirs(repoRoot) {
|
||||
const extensionsRoot = path.join(repoRoot, "dist", "extensions");
|
||||
if (!fs.existsSync(extensionsRoot)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return fs
|
||||
.readdirSync(extensionsRoot, { withFileTypes: true })
|
||||
.filter((dirent) => dirent.isDirectory())
|
||||
.map((dirent) => path.join(extensionsRoot, dirent.name))
|
||||
.filter((pluginDir) => fs.existsSync(path.join(pluginDir, "package.json")));
|
||||
}
|
||||
|
||||
export function resolveInstalledWorkspacePluginRoot(repoRoot, pluginId) {
|
||||
const currentPluginRoot = path.join(repoRoot, "extensions", pluginId);
|
||||
if (fs.existsSync(path.join(currentPluginRoot, "node_modules"))) {
|
||||
return currentPluginRoot;
|
||||
}
|
||||
|
||||
const nodeModulesDir = path.join(repoRoot, "node_modules");
|
||||
if (!fs.existsSync(nodeModulesDir)) {
|
||||
return currentPluginRoot;
|
||||
}
|
||||
|
||||
let installedWorkspaceRoot;
|
||||
try {
|
||||
installedWorkspaceRoot = path.dirname(fs.realpathSync(nodeModulesDir));
|
||||
} catch {
|
||||
return currentPluginRoot;
|
||||
}
|
||||
|
||||
const installedPluginRoot = path.join(installedWorkspaceRoot, "extensions", pluginId);
|
||||
if (fs.existsSync(path.join(installedPluginRoot, "package.json"))) {
|
||||
return installedPluginRoot;
|
||||
}
|
||||
|
||||
return currentPluginRoot;
|
||||
}
|
||||
|
||||
export function stageInstalledRootRuntimeDeps(params) {
|
||||
const {
|
||||
directDependencyPackageRoot = null,
|
||||
cheapFingerprint,
|
||||
fingerprint,
|
||||
packageJson,
|
||||
pluginDir,
|
||||
pruneConfig,
|
||||
repoRoot,
|
||||
stampPath,
|
||||
} = params;
|
||||
const dependencySpecs = {
|
||||
...packageJson.dependencies,
|
||||
...packageJson.optionalDependencies,
|
||||
};
|
||||
const optionalDependencyNames = new Set(Object.keys(packageJson.optionalDependencies ?? {}));
|
||||
const rootNodeModulesDir = path.join(repoRoot, "node_modules");
|
||||
if (Object.keys(dependencySpecs).length === 0 || !fs.existsSync(rootNodeModulesDir)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const directDependencyNames = resolveInstalledDirectDependencyNames(
|
||||
rootNodeModulesDir,
|
||||
dependencySpecs,
|
||||
directDependencyPackageRoot,
|
||||
optionalDependencyNames,
|
||||
);
|
||||
if (directDependencyNames === null) {
|
||||
return false;
|
||||
}
|
||||
const resolution = collectInstalledRuntimeDependencyRoots(
|
||||
rootNodeModulesDir,
|
||||
dependencySpecs,
|
||||
directDependencyPackageRoot,
|
||||
optionalDependencyNames,
|
||||
);
|
||||
if (resolution === null) {
|
||||
return false;
|
||||
}
|
||||
const rootsToCopy = selectRuntimeDependencyRootsToCopy(resolution);
|
||||
const nodeModulesDir = path.join(pluginDir, "node_modules");
|
||||
if (rootsToCopy.length === 0) {
|
||||
removeLegacyBundledRuntimeDepsSymlink(nodeModulesDir, repoRoot);
|
||||
assertPathIsNotSymlink(nodeModulesDir, "remove runtime deps");
|
||||
removePathIfExists(nodeModulesDir);
|
||||
writeJsonAtomically(stampPath, {
|
||||
cheapFingerprint,
|
||||
fingerprint,
|
||||
generatedAt: new Date().toISOString(),
|
||||
});
|
||||
return true;
|
||||
}
|
||||
const allowedRealRoots = rootsToCopy.map((record) => record.realRoot);
|
||||
|
||||
const stagedNodeModulesDir = path.join(
|
||||
makePluginOwnedTempDir(pluginDir, "stage"),
|
||||
"node_modules",
|
||||
);
|
||||
|
||||
try {
|
||||
for (const record of rootsToCopy.toSorted((left, right) =>
|
||||
left.name.localeCompare(right.name),
|
||||
)) {
|
||||
const sourcePath = record.realRoot;
|
||||
const targetPath = dependencyNodeModulesPath(stagedNodeModulesDir, record.name);
|
||||
if (targetPath === null) {
|
||||
return false;
|
||||
}
|
||||
fs.mkdirSync(path.dirname(targetPath), { recursive: true });
|
||||
const sourceRootReal = findContainingRealRoot(sourcePath, allowedRealRoots);
|
||||
if (
|
||||
sourceRootReal === null ||
|
||||
!copyMaterializedDependencyTree({
|
||||
activeRoots: new Set([sourceRootReal]),
|
||||
allowedRealRoots,
|
||||
sourcePath,
|
||||
targetPath,
|
||||
})
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
pruneStagedRuntimeDependencyCargo(stagedNodeModulesDir, pruneConfig);
|
||||
|
||||
removeLegacyBundledRuntimeDepsSymlink(nodeModulesDir, repoRoot);
|
||||
replaceDirAtomically(nodeModulesDir, stagedNodeModulesDir);
|
||||
writeJsonAtomically(stampPath, {
|
||||
cheapFingerprint,
|
||||
fingerprint,
|
||||
generatedAt: new Date().toISOString(),
|
||||
});
|
||||
return true;
|
||||
} finally {
|
||||
removeOwnedTempPathBestEffort(path.dirname(stagedNodeModulesDir));
|
||||
}
|
||||
}
|
||||
@@ -1,272 +0,0 @@
|
||||
import { createHash } from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import semverSatisfies from "semver/functions/satisfies.js";
|
||||
|
||||
function readJson(filePath) {
|
||||
return JSON.parse(fs.readFileSync(filePath, "utf8"));
|
||||
}
|
||||
|
||||
function dependencyPathSegments(depName) {
|
||||
if (typeof depName !== "string" || depName.length === 0) {
|
||||
return null;
|
||||
}
|
||||
const segments = depName.split("/");
|
||||
if (depName.startsWith("@")) {
|
||||
if (segments.length !== 2) {
|
||||
return null;
|
||||
}
|
||||
const [scope, name] = segments;
|
||||
if (
|
||||
!/^@[A-Za-z0-9._-]+$/.test(scope) ||
|
||||
!/^[A-Za-z0-9._-]+$/.test(name) ||
|
||||
scope === "@." ||
|
||||
scope === "@.."
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
return [scope, name];
|
||||
}
|
||||
if (segments.length !== 1 || !/^[A-Za-z0-9._-]+$/.test(segments[0])) {
|
||||
return null;
|
||||
}
|
||||
return segments;
|
||||
}
|
||||
|
||||
export function dependencyNodeModulesPath(nodeModulesDir, depName) {
|
||||
const segments = dependencyPathSegments(depName);
|
||||
return segments ? path.join(nodeModulesDir, ...segments) : null;
|
||||
}
|
||||
|
||||
function dependencyVersionSatisfied(spec, installedVersion) {
|
||||
return semverSatisfies(installedVersion, spec, { includePrerelease: false });
|
||||
}
|
||||
|
||||
export function readInstalledDependencyVersionFromRoot(depRoot) {
|
||||
const packageJsonPath = path.join(depRoot, "package.json");
|
||||
if (!fs.existsSync(packageJsonPath)) {
|
||||
return null;
|
||||
}
|
||||
const version = readJson(packageJsonPath).version;
|
||||
return typeof version === "string" ? version : null;
|
||||
}
|
||||
|
||||
export function resolveInstalledDependencyRoot(params) {
|
||||
const candidates = [];
|
||||
if (params.parentPackageRoot) {
|
||||
const nestedDepRoot = dependencyNodeModulesPath(
|
||||
path.join(params.parentPackageRoot, "node_modules"),
|
||||
params.depName,
|
||||
);
|
||||
if (nestedDepRoot !== null) {
|
||||
candidates.push(nestedDepRoot);
|
||||
}
|
||||
}
|
||||
const rootDepRoot = dependencyNodeModulesPath(params.rootNodeModulesDir, params.depName);
|
||||
if (rootDepRoot !== null) {
|
||||
candidates.push(rootDepRoot);
|
||||
}
|
||||
|
||||
for (const depRoot of candidates) {
|
||||
const installedVersion = readInstalledDependencyVersionFromRoot(depRoot);
|
||||
if (installedVersion === null) {
|
||||
continue;
|
||||
}
|
||||
if (params.enforceSpec === false || dependencyVersionSatisfied(params.spec, installedVersion)) {
|
||||
return depRoot;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export function collectInstalledRuntimeDependencyRoots(
|
||||
rootNodeModulesDir,
|
||||
dependencySpecs,
|
||||
directDependencyPackageRoot = null,
|
||||
optionalDependencyNames = new Set(),
|
||||
) {
|
||||
const packageCache = new Map();
|
||||
const directRoots = [];
|
||||
const allRoots = [];
|
||||
const queue = Object.entries(dependencySpecs).map(([depName, spec]) => ({
|
||||
depName,
|
||||
optional: optionalDependencyNames.has(depName),
|
||||
spec,
|
||||
parentPackageRoot: directDependencyPackageRoot,
|
||||
direct: true,
|
||||
}));
|
||||
const seen = new Set();
|
||||
|
||||
while (queue.length > 0) {
|
||||
const current = queue.shift();
|
||||
const depRoot = resolveInstalledDependencyRoot({
|
||||
depName: current.depName,
|
||||
spec: current.spec,
|
||||
enforceSpec: current.direct,
|
||||
parentPackageRoot: current.parentPackageRoot,
|
||||
rootNodeModulesDir,
|
||||
});
|
||||
if (depRoot === null) {
|
||||
if (current.optional) {
|
||||
continue;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
const canonicalDepRoot = fs.realpathSync(depRoot);
|
||||
|
||||
const seenKey = `${current.depName}\0${canonicalDepRoot}`;
|
||||
if (seen.has(seenKey)) {
|
||||
continue;
|
||||
}
|
||||
seen.add(seenKey);
|
||||
|
||||
const record = { name: current.depName, root: depRoot, realRoot: canonicalDepRoot };
|
||||
allRoots.push(record);
|
||||
if (current.direct) {
|
||||
directRoots.push(record);
|
||||
}
|
||||
|
||||
const packageJson =
|
||||
packageCache.get(canonicalDepRoot) ?? readJson(path.join(depRoot, "package.json"));
|
||||
packageCache.set(canonicalDepRoot, packageJson);
|
||||
for (const [childName, childSpec] of Object.entries(packageJson.dependencies ?? {})) {
|
||||
queue.push({
|
||||
depName: childName,
|
||||
optional: false,
|
||||
spec: childSpec,
|
||||
parentPackageRoot: depRoot,
|
||||
direct: false,
|
||||
});
|
||||
}
|
||||
for (const [childName, childSpec] of Object.entries(packageJson.optionalDependencies ?? {})) {
|
||||
queue.push({
|
||||
depName: childName,
|
||||
optional: true,
|
||||
spec: childSpec,
|
||||
parentPackageRoot: depRoot,
|
||||
direct: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return { allRoots, directRoots };
|
||||
}
|
||||
|
||||
function pathIsInsideCopiedRoot(candidateRoot, copiedRoot) {
|
||||
return candidateRoot === copiedRoot || candidateRoot.startsWith(`${copiedRoot}${path.sep}`);
|
||||
}
|
||||
|
||||
export function findContainingRealRoot(candidatePath, allowedRealRoots) {
|
||||
return (
|
||||
allowedRealRoots.find((rootPath) => pathIsInsideCopiedRoot(candidatePath, rootPath)) ?? null
|
||||
);
|
||||
}
|
||||
|
||||
export function selectRuntimeDependencyRootsToCopy(resolution) {
|
||||
const rootsToCopy = [];
|
||||
|
||||
for (const record of resolution.directRoots) {
|
||||
rootsToCopy.push(record);
|
||||
}
|
||||
|
||||
for (const record of resolution.allRoots) {
|
||||
if (rootsToCopy.some((entry) => pathIsInsideCopiedRoot(record.realRoot, entry.realRoot))) {
|
||||
continue;
|
||||
}
|
||||
rootsToCopy.push(record);
|
||||
}
|
||||
|
||||
return rootsToCopy;
|
||||
}
|
||||
|
||||
export function resolveInstalledDirectDependencyNames(
|
||||
rootNodeModulesDir,
|
||||
dependencySpecs,
|
||||
directDependencyPackageRoot = null,
|
||||
optionalDependencyNames = new Set(),
|
||||
) {
|
||||
const directDependencyNames = [];
|
||||
for (const [depName, spec] of Object.entries(dependencySpecs)) {
|
||||
const depRoot = resolveInstalledDependencyRoot({
|
||||
depName,
|
||||
spec,
|
||||
parentPackageRoot: directDependencyPackageRoot,
|
||||
rootNodeModulesDir,
|
||||
});
|
||||
if (depRoot === null) {
|
||||
if (optionalDependencyNames.has(depName)) {
|
||||
continue;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
const installedVersion = readInstalledDependencyVersionFromRoot(depRoot);
|
||||
if (installedVersion === null || !dependencyVersionSatisfied(spec, installedVersion)) {
|
||||
return null;
|
||||
}
|
||||
directDependencyNames.push(depName);
|
||||
}
|
||||
return directDependencyNames;
|
||||
}
|
||||
|
||||
function appendDirectoryFingerprint(hash, rootDir, currentDir = rootDir) {
|
||||
const entries = fs
|
||||
.readdirSync(currentDir, { withFileTypes: true })
|
||||
.toSorted((left, right) => left.name.localeCompare(right.name));
|
||||
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(currentDir, entry.name);
|
||||
const relativePath = path.relative(rootDir, fullPath).replace(/\\/g, "/");
|
||||
const stats = fs.lstatSync(fullPath);
|
||||
if (stats.isSymbolicLink()) {
|
||||
hash.update(`symlink:${relativePath}->${fs.readlinkSync(fullPath).replace(/\\/g, "/")}\n`);
|
||||
continue;
|
||||
}
|
||||
if (stats.isDirectory()) {
|
||||
hash.update(`dir:${relativePath}\n`);
|
||||
appendDirectoryFingerprint(hash, rootDir, fullPath);
|
||||
continue;
|
||||
}
|
||||
if (!stats.isFile()) {
|
||||
continue;
|
||||
}
|
||||
const stat = fs.statSync(fullPath);
|
||||
hash.update(`file:${relativePath}:${stat.size}\n`);
|
||||
hash.update(fs.readFileSync(fullPath));
|
||||
}
|
||||
}
|
||||
|
||||
function createInstalledRuntimeClosureFingerprint(records) {
|
||||
const hash = createHash("sha256");
|
||||
for (const record of [...records].toSorted(
|
||||
(left, right) =>
|
||||
left.name.localeCompare(right.name) || left.realRoot.localeCompare(right.realRoot),
|
||||
)) {
|
||||
if (!fs.existsSync(record.realRoot)) {
|
||||
return null;
|
||||
}
|
||||
hash.update(`package:${record.name}:${record.realRoot}\n`);
|
||||
appendDirectoryFingerprint(hash, record.realRoot);
|
||||
}
|
||||
return hash.digest("hex");
|
||||
}
|
||||
|
||||
export function resolveInstalledRuntimeClosureFingerprint(params) {
|
||||
const dependencySpecs = {
|
||||
...params.packageJson.dependencies,
|
||||
...params.packageJson.optionalDependencies,
|
||||
};
|
||||
if (Object.keys(dependencySpecs).length === 0 || !fs.existsSync(params.rootNodeModulesDir)) {
|
||||
return null;
|
||||
}
|
||||
const resolution = collectInstalledRuntimeDependencyRoots(
|
||||
params.rootNodeModulesDir,
|
||||
dependencySpecs,
|
||||
params.directDependencyPackageRoot,
|
||||
new Set(Object.keys(params.packageJson.optionalDependencies ?? {})),
|
||||
);
|
||||
if (resolution === null) {
|
||||
return null;
|
||||
}
|
||||
return createInstalledRuntimeClosureFingerprint(selectRuntimeDependencyRootsToCopy(resolution));
|
||||
}
|
||||
@@ -1,198 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { dependencyNodeModulesPath } from "./bundled-runtime-deps-package-tree.mjs";
|
||||
import { removePathIfExists } from "./bundled-runtime-deps-stage-state.mjs";
|
||||
|
||||
const defaultStagedRuntimeDepGlobalPruneSuffixes = [".d.ts", ".map"];
|
||||
const defaultStagedRuntimeDepGlobalPruneDirectories = [
|
||||
"__snapshots__",
|
||||
"__tests__",
|
||||
"test",
|
||||
"tests",
|
||||
];
|
||||
const defaultStagedRuntimeDepGlobalPruneFilePatterns = [
|
||||
/(?:^|\/)[^/]+\.(?:test|spec)\.(?:[cm]?[jt]sx?)$/u,
|
||||
];
|
||||
const defaultStagedRuntimeDepPruneRules = new Map([
|
||||
["@larksuiteoapi/node-sdk", { paths: ["types"] }],
|
||||
[
|
||||
"@matrix-org/matrix-sdk-crypto-nodejs",
|
||||
{
|
||||
paths: ["index.d.ts", "README.md", "CHANGELOG.md", "RELEASING.md", ".node-version"],
|
||||
},
|
||||
],
|
||||
[
|
||||
"@matrix-org/matrix-sdk-crypto-wasm",
|
||||
{
|
||||
paths: [
|
||||
"index.d.ts",
|
||||
"pkg/matrix_sdk_crypto_wasm.d.ts",
|
||||
"pkg/matrix_sdk_crypto_wasm_bg.wasm.d.ts",
|
||||
"README.md",
|
||||
],
|
||||
},
|
||||
],
|
||||
[
|
||||
"matrix-js-sdk",
|
||||
{
|
||||
paths: ["src", "CHANGELOG.md", "CONTRIBUTING.rst", "README.md", "release.sh"],
|
||||
suffixes: [".d.ts"],
|
||||
},
|
||||
],
|
||||
["matrix-widget-api", { paths: ["src"], suffixes: [".d.ts"] }],
|
||||
["oidc-client-ts", { paths: ["README.md"], suffixes: [".d.ts"] }],
|
||||
["music-metadata", { paths: ["README.md"], suffixes: [".d.ts"] }],
|
||||
["@cloudflare/workers-types", { paths: ["."] }],
|
||||
["gifwrap", { paths: ["test"] }],
|
||||
["playwright-core", { paths: ["types"], suffixes: [".d.ts"] }],
|
||||
["@jimp/plugin-blit", { paths: ["src/__image_snapshots__"] }],
|
||||
["@jimp/plugin-blur", { paths: ["src/__image_snapshots__"] }],
|
||||
["@jimp/plugin-color", { paths: ["src/__image_snapshots__"] }],
|
||||
["@jimp/plugin-print", { paths: ["src/__image_snapshots__"] }],
|
||||
["@jimp/plugin-quantize", { paths: ["src/__image_snapshots__"] }],
|
||||
["@jimp/plugin-threshold", { paths: ["src/__image_snapshots__"] }],
|
||||
["tokenjuice", { keepDirectories: ["dist/rules/tests"] }],
|
||||
]);
|
||||
|
||||
export function resolveRuntimeDepPruneConfig(params = {}) {
|
||||
return {
|
||||
globalPruneDirectories:
|
||||
params.stagedRuntimeDepGlobalPruneDirectories ??
|
||||
defaultStagedRuntimeDepGlobalPruneDirectories,
|
||||
globalPruneFilePatterns:
|
||||
params.stagedRuntimeDepGlobalPruneFilePatterns ??
|
||||
defaultStagedRuntimeDepGlobalPruneFilePatterns,
|
||||
globalPruneSuffixes:
|
||||
params.stagedRuntimeDepGlobalPruneSuffixes ?? defaultStagedRuntimeDepGlobalPruneSuffixes,
|
||||
pruneRules: params.stagedRuntimeDepPruneRules ?? defaultStagedRuntimeDepPruneRules,
|
||||
};
|
||||
}
|
||||
|
||||
function walkFiles(rootDir, visitFile) {
|
||||
if (!fs.existsSync(rootDir)) {
|
||||
return;
|
||||
}
|
||||
const queue = [rootDir];
|
||||
for (let index = 0; index < queue.length; index += 1) {
|
||||
const currentDir = queue[index];
|
||||
for (const entry of fs.readdirSync(currentDir, { withFileTypes: true })) {
|
||||
const fullPath = path.join(currentDir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
queue.push(fullPath);
|
||||
continue;
|
||||
}
|
||||
if (entry.isFile()) {
|
||||
visitFile(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function pruneDependencyFilesBySuffixes(depRoot, suffixes) {
|
||||
if (!suffixes || suffixes.length === 0 || !fs.existsSync(depRoot)) {
|
||||
return;
|
||||
}
|
||||
walkFiles(depRoot, (fullPath) => {
|
||||
if (suffixes.some((suffix) => fullPath.endsWith(suffix))) {
|
||||
removePathIfExists(fullPath);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function relativePathSegments(rootDir, fullPath) {
|
||||
return path.relative(rootDir, fullPath).split(path.sep).filter(Boolean);
|
||||
}
|
||||
|
||||
function isNodeModulesPackageRoot(segments, index) {
|
||||
const parent = segments[index - 1];
|
||||
if (parent === "node_modules") {
|
||||
return true;
|
||||
}
|
||||
return parent?.startsWith("@") === true && segments[index - 2] === "node_modules";
|
||||
}
|
||||
|
||||
function pruneDependencyDirectoriesByBasename(depRoot, basenames, keepDirs = new Set()) {
|
||||
if (!basenames || basenames.length === 0 || !fs.existsSync(depRoot)) {
|
||||
return;
|
||||
}
|
||||
const basenameSet = new Set(basenames);
|
||||
const queue = [depRoot];
|
||||
for (let index = 0; index < queue.length; index += 1) {
|
||||
const currentDir = queue[index];
|
||||
for (const entry of fs.readdirSync(currentDir, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
const fullPath = path.join(currentDir, entry.name);
|
||||
const segments = relativePathSegments(depRoot, fullPath);
|
||||
if (basenameSet.has(entry.name) && !isNodeModulesPackageRoot(segments, segments.length - 1)) {
|
||||
if (keepDirs.has(fullPath)) {
|
||||
queue.push(fullPath);
|
||||
continue;
|
||||
}
|
||||
removePathIfExists(fullPath);
|
||||
continue;
|
||||
}
|
||||
queue.push(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function pruneDependencyFilesByPatterns(depRoot, patterns) {
|
||||
if (!patterns || patterns.length === 0 || !fs.existsSync(depRoot)) {
|
||||
return;
|
||||
}
|
||||
walkFiles(depRoot, (fullPath) => {
|
||||
const relativePath = relativePathSegments(depRoot, fullPath).join("/");
|
||||
if (patterns.some((pattern) => pattern.test(relativePath))) {
|
||||
removePathIfExists(fullPath);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function pruneStagedInstalledDependencyCargo(nodeModulesDir, depName, pruneConfig) {
|
||||
const depRoot = dependencyNodeModulesPath(nodeModulesDir, depName);
|
||||
if (depRoot === null) {
|
||||
return;
|
||||
}
|
||||
const pruneRule = pruneConfig.pruneRules.get(depName);
|
||||
for (const relativePath of pruneRule?.paths ?? []) {
|
||||
removePathIfExists(path.join(depRoot, relativePath));
|
||||
}
|
||||
const keepDirs = new Set(
|
||||
(pruneRule?.keepDirectories ?? []).map((relativePath) => path.resolve(depRoot, relativePath)),
|
||||
);
|
||||
pruneDependencyDirectoriesByBasename(depRoot, pruneConfig.globalPruneDirectories, keepDirs);
|
||||
pruneDependencyFilesByPatterns(depRoot, pruneConfig.globalPruneFilePatterns);
|
||||
pruneDependencyFilesBySuffixes(depRoot, pruneConfig.globalPruneSuffixes);
|
||||
pruneDependencyFilesBySuffixes(depRoot, pruneRule?.suffixes ?? []);
|
||||
}
|
||||
|
||||
function listInstalledDependencyNames(nodeModulesDir) {
|
||||
if (!fs.existsSync(nodeModulesDir)) {
|
||||
return [];
|
||||
}
|
||||
const names = [];
|
||||
for (const entry of fs.readdirSync(nodeModulesDir, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
if (entry.name.startsWith("@")) {
|
||||
const scopeDir = path.join(nodeModulesDir, entry.name);
|
||||
for (const scopedEntry of fs.readdirSync(scopeDir, { withFileTypes: true })) {
|
||||
if (scopedEntry.isDirectory()) {
|
||||
names.push(`${entry.name}/${scopedEntry.name}`);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
}
|
||||
names.push(entry.name);
|
||||
}
|
||||
return names;
|
||||
}
|
||||
|
||||
export function pruneStagedRuntimeDependencyCargo(nodeModulesDir, pruneConfig) {
|
||||
for (const depName of listInstalledDependencyNames(nodeModulesDir)) {
|
||||
pruneStagedInstalledDependencyCargo(nodeModulesDir, depName, pruneConfig);
|
||||
}
|
||||
}
|
||||
@@ -1,235 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
const TRANSIENT_TEMP_REMOVE_ERROR_CODES = new Set(["EBUSY", "ENOTEMPTY", "EPERM"]);
|
||||
const TEMP_REMOVE_RETRY_DELAYS_MS = [10, 25, 50];
|
||||
const TEMP_OWNER_FILE = "owner.json";
|
||||
|
||||
function readJson(filePath) {
|
||||
return JSON.parse(fs.readFileSync(filePath, "utf8"));
|
||||
}
|
||||
|
||||
function writeJson(filePath, value) {
|
||||
fs.writeFileSync(filePath, `${JSON.stringify(value, null, 2)}\n`, "utf8");
|
||||
}
|
||||
|
||||
export function removePathIfExists(targetPath, options = {}) {
|
||||
const retryDelays = options.retryTransient ? TEMP_REMOVE_RETRY_DELAYS_MS : [];
|
||||
for (let attempt = 0; attempt <= retryDelays.length; attempt += 1) {
|
||||
try {
|
||||
fs.rmSync(targetPath, { recursive: true, force: true });
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (!isTransientTempRemoveError(error)) {
|
||||
throw error;
|
||||
}
|
||||
const delay = retryDelays[attempt];
|
||||
if (delay === undefined) {
|
||||
if (options.ignoreTransient) {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
sleepSync(delay);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function removeOwnedTempPathBestEffort(targetPath) {
|
||||
return removePathIfExists(targetPath, { retryTransient: true, ignoreTransient: true });
|
||||
}
|
||||
|
||||
function isTransientTempRemoveError(error) {
|
||||
return (
|
||||
!!error &&
|
||||
typeof error === "object" &&
|
||||
typeof error.code === "string" &&
|
||||
TRANSIENT_TEMP_REMOVE_ERROR_CODES.has(error.code)
|
||||
);
|
||||
}
|
||||
|
||||
function sleepSync(ms) {
|
||||
if (!Number.isFinite(ms) || ms <= 0) {
|
||||
return;
|
||||
}
|
||||
Atomics.wait(new Int32Array(new SharedArrayBuffer(4)), 0, 0, ms);
|
||||
}
|
||||
|
||||
function makeTempDir(parentDir, prefix) {
|
||||
return fs.mkdtempSync(path.join(parentDir, prefix));
|
||||
}
|
||||
|
||||
export function writeRuntimeDepsTempOwner(tempDir) {
|
||||
writeJson(path.join(tempDir, TEMP_OWNER_FILE), {
|
||||
pid: process.pid,
|
||||
createdAtMs: Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
function makeOwnedTempDir(parentDir, prefix) {
|
||||
const tempDir = makeTempDir(parentDir, prefix);
|
||||
writeRuntimeDepsTempOwner(tempDir);
|
||||
return tempDir;
|
||||
}
|
||||
|
||||
export function sanitizeTempPrefixSegment(value) {
|
||||
const normalized = value.replace(/[^A-Za-z0-9._-]+/g, "-").replace(/-+/g, "-");
|
||||
return normalized.length > 0 ? normalized : "plugin";
|
||||
}
|
||||
|
||||
export function makePluginOwnedTempDir(pluginDir, label) {
|
||||
return makeOwnedTempDir(pluginDir, `.openclaw-runtime-deps-${label}-`);
|
||||
}
|
||||
|
||||
export function assertPathIsNotSymlink(targetPath, label) {
|
||||
try {
|
||||
if (fs.lstatSync(targetPath).isSymbolicLink()) {
|
||||
throw new Error(`refusing to ${label} via symlinked path: ${targetPath}`);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error?.code === "ENOENT") {
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function isDirectChildPath(parentPath, childPath) {
|
||||
const relativePath = path.relative(parentPath, childPath);
|
||||
return (
|
||||
relativePath.length > 0 &&
|
||||
!relativePath.startsWith("..") &&
|
||||
!path.isAbsolute(relativePath) &&
|
||||
!relativePath.includes(path.sep)
|
||||
);
|
||||
}
|
||||
|
||||
function isLegacyBundledRuntimeDepsNodeModulesPath(targetPath, repoRoot, linkedPath) {
|
||||
const legacyRuntimeDepsRoot = path.resolve(repoRoot, ".local", "bundled-plugin-runtime-deps");
|
||||
const resolvedLinkedPath = path.resolve(path.dirname(targetPath), linkedPath);
|
||||
return (
|
||||
path.basename(resolvedLinkedPath) === "node_modules" &&
|
||||
isDirectChildPath(legacyRuntimeDepsRoot, path.dirname(resolvedLinkedPath))
|
||||
);
|
||||
}
|
||||
|
||||
export function removeLegacyBundledRuntimeDepsSymlink(targetPath, repoRoot) {
|
||||
let stats;
|
||||
try {
|
||||
stats = fs.lstatSync(targetPath);
|
||||
} catch (error) {
|
||||
if (error?.code === "ENOENT") {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
if (!stats.isSymbolicLink()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let linkedPath;
|
||||
try {
|
||||
linkedPath = fs.readlinkSync(targetPath);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
if (!isLegacyBundledRuntimeDepsNodeModulesPath(targetPath, repoRoot, linkedPath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
removePathIfExists(targetPath);
|
||||
return true;
|
||||
}
|
||||
|
||||
export function replaceDirAtomically(targetPath, sourcePath) {
|
||||
assertPathIsNotSymlink(targetPath, "replace runtime deps");
|
||||
const targetParentDir = path.dirname(targetPath);
|
||||
fs.mkdirSync(targetParentDir, { recursive: true });
|
||||
const backupPath = makeTempDir(
|
||||
targetParentDir,
|
||||
`.openclaw-runtime-deps-backup-${sanitizeTempPrefixSegment(path.basename(targetPath))}-`,
|
||||
);
|
||||
removePathIfExists(backupPath, { retryTransient: true });
|
||||
|
||||
let movedExistingTarget = false;
|
||||
try {
|
||||
if (fs.existsSync(targetPath)) {
|
||||
fs.renameSync(targetPath, backupPath);
|
||||
writeRuntimeDepsTempOwner(backupPath);
|
||||
movedExistingTarget = true;
|
||||
}
|
||||
fs.renameSync(sourcePath, targetPath);
|
||||
removeOwnedTempPathBestEffort(backupPath);
|
||||
} catch (error) {
|
||||
if (movedExistingTarget && !fs.existsSync(targetPath) && fs.existsSync(backupPath)) {
|
||||
fs.renameSync(backupPath, targetPath);
|
||||
removePathIfExists(path.join(targetPath, TEMP_OWNER_FILE));
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export function writeJsonAtomically(targetPath, value) {
|
||||
assertPathIsNotSymlink(targetPath, "write runtime deps stamp");
|
||||
const targetParentDir = path.dirname(targetPath);
|
||||
fs.mkdirSync(targetParentDir, { recursive: true });
|
||||
const tempDir = makeOwnedTempDir(
|
||||
targetParentDir,
|
||||
`.openclaw-runtime-deps-stamp-${sanitizeTempPrefixSegment(path.basename(targetPath))}-`,
|
||||
);
|
||||
const tempPath = path.join(tempDir, path.basename(targetPath));
|
||||
try {
|
||||
fs.writeFileSync(tempPath, `${JSON.stringify(value, null, 2)}\n`, {
|
||||
encoding: "utf8",
|
||||
flag: "wx",
|
||||
});
|
||||
fs.renameSync(tempPath, targetPath);
|
||||
} finally {
|
||||
removeOwnedTempPathBestEffort(tempDir);
|
||||
}
|
||||
}
|
||||
|
||||
function readRuntimeDepsTempOwner(tempDir) {
|
||||
try {
|
||||
const owner = readJson(path.join(tempDir, TEMP_OWNER_FILE));
|
||||
return owner && typeof owner === "object" ? owner : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function isLiveProcess(pid) {
|
||||
if (!Number.isInteger(pid) || pid <= 0) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
process.kill(pid, 0);
|
||||
return true;
|
||||
} catch (error) {
|
||||
return error?.code === "EPERM";
|
||||
}
|
||||
}
|
||||
|
||||
function shouldRemoveRuntimeDepsTempDir(tempDir) {
|
||||
const owner = readRuntimeDepsTempOwner(tempDir);
|
||||
if (!owner || typeof owner.pid !== "number") {
|
||||
return true;
|
||||
}
|
||||
return !isLiveProcess(owner.pid);
|
||||
}
|
||||
|
||||
export function removeStaleRuntimeDepsTempDirs(pluginDir) {
|
||||
if (!fs.existsSync(pluginDir)) {
|
||||
return;
|
||||
}
|
||||
for (const entry of fs.readdirSync(pluginDir, { withFileTypes: true })) {
|
||||
if (entry.name.startsWith(".openclaw-runtime-deps-")) {
|
||||
const targetPath = path.join(pluginDir, entry.name);
|
||||
if (!shouldRemoveRuntimeDepsTempDir(targetPath)) {
|
||||
continue;
|
||||
}
|
||||
removeOwnedTempPathBestEffort(targetPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
import { createHash } from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { sanitizeTempPrefixSegment } from "./bundled-runtime-deps-stage-state.mjs";
|
||||
|
||||
const runtimeDepsStagingVersion = 7;
|
||||
|
||||
function readJson(filePath) {
|
||||
return JSON.parse(fs.readFileSync(filePath, "utf8"));
|
||||
}
|
||||
|
||||
function readOptionalUtf8(filePath) {
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return null;
|
||||
}
|
||||
return fs.readFileSync(filePath, "utf8");
|
||||
}
|
||||
|
||||
export function resolveLegacyRuntimeDepsStampPath(pluginDir) {
|
||||
return path.join(pluginDir, ".openclaw-runtime-deps-stamp.json");
|
||||
}
|
||||
|
||||
export function resolveRuntimeDepsStampPath(repoRoot, pluginId) {
|
||||
return path.join(
|
||||
repoRoot,
|
||||
".artifacts",
|
||||
"bundled-runtime-deps-stamps",
|
||||
`${sanitizeTempPrefixSegment(pluginId)}.json`,
|
||||
);
|
||||
}
|
||||
|
||||
export function createRuntimeDepsFingerprint(packageJson, pruneConfig, params = {}) {
|
||||
return createHash("sha256")
|
||||
.update(
|
||||
JSON.stringify({
|
||||
cheapFingerprint: createRuntimeDepsCheapFingerprint(packageJson, pruneConfig, params),
|
||||
rootInstalledRuntimeFingerprint: params.rootInstalledRuntimeFingerprint ?? null,
|
||||
}),
|
||||
)
|
||||
.digest("hex");
|
||||
}
|
||||
|
||||
export function createRuntimeDepsCheapFingerprint(packageJson, pruneConfig, params = {}) {
|
||||
const repoRoot = params.repoRoot;
|
||||
const lockfilePath =
|
||||
typeof repoRoot === "string" && repoRoot.length > 0
|
||||
? path.join(repoRoot, "pnpm-lock.yaml")
|
||||
: null;
|
||||
const rootLockfile = lockfilePath ? readOptionalUtf8(lockfilePath) : null;
|
||||
return createHash("sha256")
|
||||
.update(
|
||||
JSON.stringify({
|
||||
globalPruneDirectories: pruneConfig.globalPruneDirectories,
|
||||
globalPruneFilePatterns: pruneConfig.globalPruneFilePatterns.map((pattern) =>
|
||||
pattern.toString(),
|
||||
),
|
||||
globalPruneSuffixes: pruneConfig.globalPruneSuffixes,
|
||||
packageJson,
|
||||
pruneRules: [...pruneConfig.pruneRules.entries()],
|
||||
rootLockfile,
|
||||
version: runtimeDepsStagingVersion,
|
||||
}),
|
||||
)
|
||||
.digest("hex");
|
||||
}
|
||||
|
||||
export function readRuntimeDepsStamp(stampPath) {
|
||||
if (!fs.existsSync(stampPath)) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return readJson(stampPath);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -142,6 +142,11 @@
|
||||
"class": "default-runtime-initially",
|
||||
"risk": ["provider-sdk", "network"]
|
||||
},
|
||||
"playwright-core": {
|
||||
"owner": "core:browser",
|
||||
"class": "core-runtime",
|
||||
"risk": ["browser-automation", "cdp"]
|
||||
},
|
||||
"pdfjs-dist": {
|
||||
"owner": "plugin:document-extract",
|
||||
"class": "plugin-runtime",
|
||||
@@ -158,11 +163,6 @@
|
||||
"class": "default-runtime-initially",
|
||||
"risk": ["terminal-rendering", "png-encoding"]
|
||||
},
|
||||
"semver": {
|
||||
"owner": "core:package-versioning",
|
||||
"class": "core-runtime",
|
||||
"risk": ["version-parser"]
|
||||
},
|
||||
"sharp": {
|
||||
"owner": "plugin:media-understanding-core",
|
||||
"class": "plugin-runtime",
|
||||
|
||||
@@ -36,7 +36,6 @@ export function parseLaneSelection(raw) {
|
||||
return [];
|
||||
}
|
||||
const laneAliases = new Map([
|
||||
["bundled-channel-deps", ["bundled-channel-deps-compat"]],
|
||||
["install-e2e", ["install-e2e-openai", "install-e2e-anthropic"]],
|
||||
[
|
||||
"bundled-plugin-install-uninstall",
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
// Keep lane names, commands, image kind, timeout, resources, and release chunks
|
||||
// here. Planning and execution live in separate modules.
|
||||
|
||||
const BUNDLED_UPDATE_NO_OUTPUT_TIMEOUT_MS = 4 * 60 * 1000;
|
||||
const BUNDLED_UPDATE_TIMEOUT_MS = 6 * 60 * 1000;
|
||||
export const DEFAULT_LIVE_RETRIES = 1;
|
||||
const LIVE_ACP_TIMEOUT_MS = 20 * 60 * 1000;
|
||||
const LIVE_CLI_TIMEOUT_MS = 20 * 60 * 1000;
|
||||
@@ -21,9 +19,6 @@ export const LIVE_RETRY_PATTERNS = [
|
||||
/ECONNRESET|ETIMEDOUT|ENOTFOUND/i,
|
||||
];
|
||||
|
||||
const bundledChannelLaneCommand =
|
||||
"OPENCLAW_SKIP_DOCKER_BUILD=1 OPENCLAW_BUNDLED_CHANNEL_UPDATE_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_ROOT_OWNED_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_SETUP_ENTRY_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_LOAD_FAILURE_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_DISABLED_CONFIG_SCENARIO=0 pnpm test:docker:bundled-channel-deps";
|
||||
|
||||
function liveDockerScriptCommand(script, envPrefix = "") {
|
||||
const prefix = envPrefix ? `${envPrefix} ` : "";
|
||||
return `${prefix}OPENCLAW_SKIP_DOCKER_BUILD=1 bash -c 'harness="\${OPENCLAW_DOCKER_E2E_TRUSTED_HARNESS_DIR:-}"; if [ -z "$harness" ]; then if [ -d .release-harness/scripts ]; then harness=.release-harness; else harness=.; fi; fi; OPENCLAW_LIVE_DOCKER_REPO_ROOT="\${OPENCLAW_DOCKER_E2E_REPO_ROOT:-$PWD}" bash "$harness/scripts/${script}"'`;
|
||||
@@ -108,72 +103,6 @@ function serviceLane(name, command, options = {}) {
|
||||
});
|
||||
}
|
||||
|
||||
function bundledChannelScenarioLane(name, env, options = {}) {
|
||||
return npmLane(
|
||||
name,
|
||||
`${env} OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:bundled-channel-deps`,
|
||||
options,
|
||||
);
|
||||
}
|
||||
|
||||
const bundledChannelSmokeLanes = ["telegram", "discord", "slack", "feishu", "memory-lancedb"].map(
|
||||
(channel) =>
|
||||
npmLane(
|
||||
`bundled-channel-${channel}`,
|
||||
`OPENCLAW_BUNDLED_CHANNELS=${channel} ${bundledChannelLaneCommand}`,
|
||||
{ stateScenario: "empty" },
|
||||
),
|
||||
);
|
||||
|
||||
const bundledChannelUpdateLanes = [
|
||||
"telegram",
|
||||
"discord",
|
||||
"slack",
|
||||
"feishu",
|
||||
"memory-lancedb",
|
||||
"acpx",
|
||||
].map((target) =>
|
||||
bundledChannelScenarioLane(
|
||||
`bundled-channel-update-${target}`,
|
||||
`OPENCLAW_BUNDLED_CHANNEL_SCENARIOS=0 OPENCLAW_BUNDLED_CHANNEL_UPDATE_SCENARIO=1 OPENCLAW_BUNDLED_CHANNEL_UPDATE_TARGETS=${target} OPENCLAW_BUNDLED_CHANNEL_ROOT_OWNED_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_SETUP_ENTRY_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_LOAD_FAILURE_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_DISABLED_CONFIG_SCENARIO=0`,
|
||||
{
|
||||
noOutputTimeoutMs: BUNDLED_UPDATE_NO_OUTPUT_TIMEOUT_MS,
|
||||
retryPatterns: LIVE_RETRY_PATTERNS,
|
||||
retries: 1,
|
||||
stateScenario: "empty",
|
||||
timeoutMs: BUNDLED_UPDATE_TIMEOUT_MS,
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
const bundledChannelContractLanes = [
|
||||
bundledChannelScenarioLane(
|
||||
"bundled-channel-root-owned",
|
||||
"OPENCLAW_BUNDLED_CHANNEL_SCENARIOS=0 OPENCLAW_BUNDLED_CHANNEL_UPDATE_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_ROOT_OWNED_SCENARIO=1 OPENCLAW_BUNDLED_CHANNEL_SETUP_ENTRY_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_LOAD_FAILURE_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_DISABLED_CONFIG_SCENARIO=0",
|
||||
),
|
||||
bundledChannelScenarioLane(
|
||||
"bundled-channel-setup-entry",
|
||||
"OPENCLAW_BUNDLED_CHANNEL_SCENARIOS=0 OPENCLAW_BUNDLED_CHANNEL_UPDATE_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_ROOT_OWNED_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_SETUP_ENTRY_SCENARIO=1 OPENCLAW_BUNDLED_CHANNEL_LOAD_FAILURE_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_DISABLED_CONFIG_SCENARIO=0",
|
||||
{ stateScenario: "empty" },
|
||||
),
|
||||
bundledChannelScenarioLane(
|
||||
"bundled-channel-load-failure",
|
||||
"OPENCLAW_BUNDLED_CHANNEL_SCENARIOS=0 OPENCLAW_BUNDLED_CHANNEL_UPDATE_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_ROOT_OWNED_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_SETUP_ENTRY_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_LOAD_FAILURE_SCENARIO=1 OPENCLAW_BUNDLED_CHANNEL_DISABLED_CONFIG_SCENARIO=0",
|
||||
{ stateScenario: "empty" },
|
||||
),
|
||||
bundledChannelScenarioLane(
|
||||
"bundled-channel-disabled-config",
|
||||
"OPENCLAW_BUNDLED_CHANNEL_SCENARIOS=0 OPENCLAW_BUNDLED_CHANNEL_UPDATE_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_ROOT_OWNED_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_SETUP_ENTRY_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_LOAD_FAILURE_SCENARIO=0 OPENCLAW_BUNDLED_CHANNEL_DISABLED_CONFIG_SCENARIO=1",
|
||||
{ stateScenario: "empty" },
|
||||
),
|
||||
];
|
||||
|
||||
const bundledScenarioLanes = [
|
||||
...bundledChannelSmokeLanes,
|
||||
...bundledChannelUpdateLanes,
|
||||
...bundledChannelContractLanes,
|
||||
];
|
||||
|
||||
const bundledPluginInstallUninstallLanes = Array.from(
|
||||
{ length: BUNDLED_PLUGIN_INSTALL_UNINSTALL_SHARDS },
|
||||
(_, index) =>
|
||||
@@ -313,18 +242,12 @@ export const mainLanes = [
|
||||
weight: 6,
|
||||
},
|
||||
),
|
||||
npmLane(
|
||||
"bundled-channel-deps-compat",
|
||||
"OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:bundled-channel-deps:fast",
|
||||
{ resources: ["service"], stateScenario: "empty", weight: 3 },
|
||||
),
|
||||
npmLane("plugin-update", "OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:plugin-update", {
|
||||
stateScenario: "empty",
|
||||
}),
|
||||
serviceLane("config-reload", "OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:config-reload", {
|
||||
stateScenario: "empty",
|
||||
}),
|
||||
...bundledScenarioLanes,
|
||||
lane("openai-image-auth", "OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:openai-image-auth", {
|
||||
stateScenario: "empty",
|
||||
}),
|
||||
@@ -504,7 +427,6 @@ const releasePathBundledChannelLanes = [
|
||||
npmLane("plugin-update", "OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:plugin-update", {
|
||||
stateScenario: "empty",
|
||||
}),
|
||||
...bundledScenarioLanes,
|
||||
];
|
||||
|
||||
const releasePathPackageInstallOpenAiLanes = [
|
||||
@@ -606,15 +528,6 @@ const primaryReleasePathChunks = {
|
||||
"plugins-runtime-install-f": bundledPluginInstallUninstallLanes.slice(15, 18),
|
||||
"plugins-runtime-install-g": bundledPluginInstallUninstallLanes.slice(18, 21),
|
||||
"plugins-runtime-install-h": bundledPluginInstallUninstallLanes.slice(21),
|
||||
"bundled-channels-core": [releasePathBundledChannelLanes[0], ...bundledChannelSmokeLanes],
|
||||
"bundled-channels-update-a": [bundledChannelUpdateLanes[0], bundledChannelUpdateLanes[4]],
|
||||
"bundled-channels-update-discord": [bundledChannelUpdateLanes[1]],
|
||||
"bundled-channels-update-b": [
|
||||
bundledChannelUpdateLanes[2],
|
||||
bundledChannelUpdateLanes[3],
|
||||
bundledChannelUpdateLanes[5],
|
||||
],
|
||||
"bundled-channels-contracts": bundledChannelContractLanes,
|
||||
openwebui: [],
|
||||
};
|
||||
|
||||
@@ -628,11 +541,6 @@ const legacyReleasePathChunks = {
|
||||
"plugins-runtime": releasePathPluginRuntimeLanes,
|
||||
"plugins-integrations": [...releasePathPluginRuntimeLanes, ...releasePathBundledChannelLanes],
|
||||
"bundled-channels": releasePathBundledChannelLanes,
|
||||
"bundled-channels-update-a-legacy": [
|
||||
bundledChannelUpdateLanes[0],
|
||||
bundledChannelUpdateLanes[1],
|
||||
bundledChannelUpdateLanes[4],
|
||||
],
|
||||
};
|
||||
|
||||
function openWebUILane() {
|
||||
|
||||
@@ -4,6 +4,7 @@ export const OPTIONAL_BUNDLED_BUILD_ENV: "OPENCLAW_INCLUDE_OPTIONAL_BUNDLED";
|
||||
export function isOptionalBundledCluster(cluster: string): boolean;
|
||||
export function shouldIncludeOptionalBundledClusters(env?: NodeJS.ProcessEnv): boolean;
|
||||
export function hasReleasedBundledInstall(packageJson: unknown): boolean;
|
||||
export function isExplicitlyDownloadablePlugin(packageJson: unknown): boolean;
|
||||
export function shouldBuildBundledCluster(
|
||||
cluster: string,
|
||||
env?: NodeJS.ProcessEnv,
|
||||
|
||||
@@ -35,7 +35,14 @@ export function hasReleasedBundledInstall(packageJson) {
|
||||
);
|
||||
}
|
||||
|
||||
export function isExplicitlyDownloadablePlugin(packageJson) {
|
||||
return packageJson?.openclaw?.bundle?.includeInCore === false;
|
||||
}
|
||||
|
||||
export function shouldBuildBundledCluster(cluster, env = process.env, options = {}) {
|
||||
if (isExplicitlyDownloadablePlugin(options.packageJson)) {
|
||||
return false;
|
||||
}
|
||||
if (hasReleasedBundledInstall(options.packageJson)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ export const PLUGIN_PRERELEASE_REQUIRED_SURFACES = Object.freeze([
|
||||
"bundled-lifecycle",
|
||||
"external-plugins",
|
||||
"update-no-op",
|
||||
"channel-runtime-deps",
|
||||
"installed-plugin-deps",
|
||||
"doctor-fix",
|
||||
"config-round-trip",
|
||||
"gateway-bootstrap",
|
||||
@@ -29,11 +29,7 @@ const pluginPrereleaseDockerLanes = Object.freeze([
|
||||
},
|
||||
{
|
||||
lane: "update-channel-switch",
|
||||
surfaces: ["package-artifact", "channel-runtime-deps", "update-no-op"],
|
||||
},
|
||||
{
|
||||
lane: "bundled-channel-deps-compat",
|
||||
surfaces: ["package-artifact", "channel-runtime-deps", "gateway-bootstrap"],
|
||||
surfaces: ["package-artifact", "installed-plugin-deps", "update-no-op"],
|
||||
},
|
||||
{
|
||||
lane: "plugins-offline",
|
||||
|
||||
@@ -20,7 +20,7 @@ import { createConnection as createNetConnection, createServer as createNetServe
|
||||
import { tmpdir } from "node:os";
|
||||
import { dirname, join, resolve, win32 as pathWin32 } from "node:path";
|
||||
import { fileURLToPath, pathToFileURL } from "node:url";
|
||||
import { assertNoBundledRuntimeDepsStagingDebris } from "../src/infra/package-dist-inventory.ts";
|
||||
import { assertNoLegacyPluginDependencyStagingDebris } from "../src/infra/package-dist-inventory.ts";
|
||||
import { isLocalBuildMetadataDistPath } from "./lib/local-build-metadata-paths.mjs";
|
||||
|
||||
const SCRIPT_PATH = fileURLToPath(import.meta.url);
|
||||
@@ -521,7 +521,7 @@ function isPackagedDistPath(relativePath) {
|
||||
}
|
||||
|
||||
export async function writePackageDistInventoryForCandidate(params) {
|
||||
await assertNoBundledRuntimeDepsStagingDebris(params.sourceDir);
|
||||
await assertNoLegacyPluginDependencyStagingDebris(params.sourceDir);
|
||||
const dryRun = await runCommand(
|
||||
npmCommand(),
|
||||
["pack", "--dry-run", "--ignore-scripts", "--json"],
|
||||
@@ -585,11 +585,11 @@ async function runFreshLane(params) {
|
||||
env,
|
||||
tgzPath: params.build.candidateTgz,
|
||||
logPath: join(params.logsDir, "fresh-install.log"),
|
||||
restoreBundledPluginRuntimeDeps: false,
|
||||
restoreBundledPluginPostinstall: false,
|
||||
});
|
||||
const installed = readInstalledMetadata(lane.prefixDir);
|
||||
verifyInstalledCandidate(installed, params.build);
|
||||
logLanePhase(lane, "restore-bundled-plugin-runtime-deps");
|
||||
logLanePhase(lane, "run-bundled-plugin-postinstall");
|
||||
await runBundledPluginPostinstall({
|
||||
lane,
|
||||
env,
|
||||
@@ -691,10 +691,10 @@ async function runUpgradeLane(params) {
|
||||
env,
|
||||
tgzPath: params.baselineTgz,
|
||||
logPath: join(params.logsDir, "upgrade-install-baseline.log"),
|
||||
restoreBundledPluginRuntimeDeps: false,
|
||||
restoreBundledPluginPostinstall: false,
|
||||
});
|
||||
}
|
||||
logLanePhase(lane, "restore-baseline-bundled-plugin-runtime-deps");
|
||||
logLanePhase(lane, "run-baseline-bundled-plugin-postinstall");
|
||||
await runBundledPluginPostinstall({
|
||||
lane,
|
||||
env,
|
||||
@@ -736,7 +736,7 @@ async function runUpgradeLane(params) {
|
||||
logPath: join(params.logsDir, "upgrade-update-status.log"),
|
||||
timeoutMs: 2 * 60 * 1000,
|
||||
});
|
||||
logLanePhase(lane, "restore-bundled-plugin-runtime-deps");
|
||||
logLanePhase(lane, "run-bundled-plugin-postinstall");
|
||||
await runBundledPluginPostinstall({
|
||||
lane,
|
||||
env,
|
||||
@@ -1215,7 +1215,7 @@ export function shouldStopManagedGatewayBeforeManualFallback(platform = process.
|
||||
return shouldUseManagedGatewayService(platform);
|
||||
}
|
||||
|
||||
function shouldRestoreBundledPluginRuntimeDeps() {
|
||||
function shouldRunBundledPluginPostinstall() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -2235,8 +2235,8 @@ async function installTarballPackage(params) {
|
||||
timeoutMs: params.timeoutMs,
|
||||
});
|
||||
if (
|
||||
params.restoreBundledPluginRuntimeDeps !== false &&
|
||||
shouldRestoreBundledPluginRuntimeDeps({ lane: params.lane })
|
||||
params.restoreBundledPluginPostinstall !== false &&
|
||||
shouldRunBundledPluginPostinstall({ lane: params.lane })
|
||||
) {
|
||||
await runBundledPluginPostinstall({
|
||||
lane: params.lane,
|
||||
@@ -2689,7 +2689,7 @@ function buildReleaseAgentTurnArgs(sessionId) {
|
||||
|
||||
export function shouldRetryCrossOsAgentTurnError(error) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
return /failed to (?:install|stage) bundled runtime deps|failed to stage bundled runtime deps after|Agent output did not contain the expected OK marker|model idle timeout|did not produce a response before the model idle timeout|gateway request timeout for agent|Command timed out|timed out and could not be terminated cleanly/u.test(
|
||||
return /Agent output did not contain the expected OK marker|model idle timeout|did not produce a response before the model idle timeout|gateway request timeout for agent|Command timed out|timed out and could not be terminated cleanly/u.test(
|
||||
message,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -19,8 +19,6 @@ import { formatErrorMessage } from "../src/infra/errors.ts";
|
||||
import { BUNDLED_RUNTIME_SIDECAR_PATHS } from "../src/plugins/runtime-sidecar-paths.ts";
|
||||
import { listBundledPluginPackArtifacts } from "./lib/bundled-plugin-build-entries.mjs";
|
||||
import {
|
||||
collectBundledPluginRootRuntimeMirrorErrors,
|
||||
collectRootDistBundledRuntimeMirrors,
|
||||
collectRuntimeDependencySpecs,
|
||||
packageNameFromSpecifier,
|
||||
} from "./lib/bundled-plugin-root-runtime-mirrors.mjs";
|
||||
@@ -112,7 +110,6 @@ export function collectInstalledPackageErrors(params: {
|
||||
|
||||
errors.push(...collectInstalledContextEngineRuntimeErrors(params.packageRoot));
|
||||
errors.push(...collectInstalledRootDependencyManifestErrors(params.packageRoot));
|
||||
errors.push(...collectInstalledMirroredRootDependencyManifestErrors(params.packageRoot));
|
||||
|
||||
return errors;
|
||||
}
|
||||
@@ -440,52 +437,6 @@ function readBundledExtensionPackageJsons(packageRoot: string): {
|
||||
return { manifests, errors };
|
||||
}
|
||||
|
||||
export function collectInstalledMirroredRootDependencyManifestErrors(
|
||||
packageRoot: string,
|
||||
): string[] {
|
||||
const packageJsonPath = join(packageRoot, "package.json");
|
||||
if (!existsSync(packageJsonPath)) {
|
||||
return ["installed package is missing package.json."];
|
||||
}
|
||||
|
||||
const rootPackageJson = JSON.parse(readFileSync(packageJsonPath, "utf8")) as InstalledPackageJson;
|
||||
const { manifests, errors } = readBundledExtensionPackageJsons(packageRoot);
|
||||
const bundledRuntimeDependencySpecs = new Map<
|
||||
string,
|
||||
{ conflicts: Array<{ pluginId: string; spec: string }>; pluginIds: string[]; spec: string }
|
||||
>();
|
||||
|
||||
for (const { id, manifest: extensionPackageJson } of manifests) {
|
||||
const extensionRuntimeDeps = collectRuntimeDependencySpecs(extensionPackageJson);
|
||||
for (const [dependencyName, spec] of extensionRuntimeDeps) {
|
||||
const existing = bundledRuntimeDependencySpecs.get(dependencyName);
|
||||
if (existing) {
|
||||
if (existing.spec !== spec) {
|
||||
existing.conflicts.push({ pluginId: id, spec });
|
||||
} else if (!existing.pluginIds.includes(id)) {
|
||||
existing.pluginIds.push(id);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
bundledRuntimeDependencySpecs.set(dependencyName, { conflicts: [], pluginIds: [id], spec });
|
||||
}
|
||||
}
|
||||
|
||||
const requiredRootMirrors = collectRootDistBundledRuntimeMirrors({
|
||||
bundledRuntimeDependencySpecs,
|
||||
distDir: join(packageRoot, "dist"),
|
||||
});
|
||||
errors.push(
|
||||
...collectBundledPluginRootRuntimeMirrorErrors({
|
||||
bundledRuntimeDependencySpecs,
|
||||
requiredRootMirrors,
|
||||
rootPackageJson,
|
||||
}),
|
||||
);
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
function npmExec(args: string[], cwd: string): string {
|
||||
const invocation = resolveNpmCommandInvocation({
|
||||
npmExecPath: process.env.npm_execpath,
|
||||
|
||||
@@ -66,7 +66,6 @@ const MAX_CALVER_DISTANCE_DAYS = 2;
|
||||
const REQUIRED_PACKED_PATHS = [
|
||||
PACKAGE_DIST_INVENTORY_RELATIVE_PATH,
|
||||
"dist/control-ui/index.html",
|
||||
"scripts/lib/bundled-runtime-deps-install.mjs",
|
||||
...WORKSPACE_TEMPLATE_PACK_PATHS,
|
||||
];
|
||||
const CONTROL_UI_ASSET_PREFIX = "dist/control-ui/assets/";
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
#!/usr/bin/env node
|
||||
// Runs after install to keep packaged dist safe and compatible.
|
||||
// Bundled extension runtime dependencies are extension-owned. `openclaw doctor
|
||||
// --fix` and `openclaw plugins deps --repair` own the repair path for plugins
|
||||
// that are actually used.
|
||||
// Keep packaged dist safe and compatible. Plugin package dependencies are
|
||||
// installed only by explicit plugin install/update flows, never postinstall.
|
||||
import { randomUUID } from "node:crypto";
|
||||
import {
|
||||
chmodSync,
|
||||
@@ -24,7 +23,6 @@ import { basename, dirname, isAbsolute, join, posix, relative } from "node:path"
|
||||
import { fileURLToPath, pathToFileURL } from "node:url";
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
const DEFAULT_EXTENSIONS_DIR = join(__dirname, "..", "dist", "extensions");
|
||||
const DEFAULT_PACKAGE_ROOT = join(__dirname, "..");
|
||||
const DISABLE_POSTINSTALL_ENV = "OPENCLAW_DISABLE_BUNDLED_PLUGIN_POSTINSTALL";
|
||||
const DISABLE_PLUGIN_REGISTRY_MIGRATION_ENV = "OPENCLAW_DISABLE_PLUGIN_REGISTRY_MIGRATION";
|
||||
@@ -105,10 +103,6 @@ function hasEnvFlag(env, key) {
|
||||
return Boolean(value && value !== "0" && value !== "false" && value !== "no");
|
||||
}
|
||||
|
||||
function readJson(filePath) {
|
||||
return JSON.parse(readFileSync(filePath, "utf8"));
|
||||
}
|
||||
|
||||
function normalizeRelativePath(filePath) {
|
||||
return filePath.replace(/\\/g, "/");
|
||||
}
|
||||
@@ -453,71 +447,6 @@ export function pruneInstalledPackageDist(params = {}) {
|
||||
return removed;
|
||||
}
|
||||
|
||||
function dependencySentinelPath(depName) {
|
||||
return join("node_modules", ...depName.split("/"), "package.json");
|
||||
}
|
||||
|
||||
function collectRuntimeDeps(packageJson) {
|
||||
return {
|
||||
...packageJson.dependencies,
|
||||
...packageJson.optionalDependencies,
|
||||
};
|
||||
}
|
||||
|
||||
export function discoverBundledPluginRuntimeDeps(params = {}) {
|
||||
const extensionsDir = params.extensionsDir ?? DEFAULT_EXTENSIONS_DIR;
|
||||
const pathExists = params.existsSync ?? existsSync;
|
||||
const readDir = params.readdirSync ?? readdirSync;
|
||||
const readJsonFile = params.readJson ?? readJson;
|
||||
const deps = new Map();
|
||||
|
||||
if (!pathExists(extensionsDir)) {
|
||||
return [...deps.values()].toSorted((a, b) => a.name.localeCompare(b.name));
|
||||
}
|
||||
|
||||
for (const entry of readDir(extensionsDir, { withFileTypes: true })) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
const pluginId = entry.name;
|
||||
const packageJsonPath = join(extensionsDir, pluginId, "package.json");
|
||||
if (!pathExists(packageJsonPath)) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
const packageJson = readJsonFile(packageJsonPath);
|
||||
for (const [name, version] of Object.entries(collectRuntimeDeps(packageJson))) {
|
||||
const existing = deps.get(name);
|
||||
if (existing) {
|
||||
if (existing.version !== version) {
|
||||
continue;
|
||||
}
|
||||
if (!existing.pluginIds.includes(pluginId)) {
|
||||
existing.pluginIds.push(pluginId);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
deps.set(name, {
|
||||
name,
|
||||
version,
|
||||
sentinelPath: dependencySentinelPath(name),
|
||||
pluginIds: [pluginId],
|
||||
});
|
||||
}
|
||||
} catch {
|
||||
// Ignore malformed plugin manifests; runtime will surface those separately.
|
||||
}
|
||||
}
|
||||
|
||||
return [...deps.values()]
|
||||
.map((dep) =>
|
||||
Object.assign({}, dep, {
|
||||
pluginIds: [...dep.pluginIds].toSorted((a, b) => a.localeCompare(b)),
|
||||
}),
|
||||
)
|
||||
.toSorted((a, b) => a.name.localeCompare(b.name));
|
||||
}
|
||||
|
||||
export function applyBaileysEncryptedStreamFinishHotfix(params = {}) {
|
||||
const packageRoot = params.packageRoot ?? DEFAULT_PACKAGE_ROOT;
|
||||
const pathExists = params.existsSync ?? existsSync;
|
||||
|
||||
@@ -6,7 +6,6 @@ import {
|
||||
lstatSync,
|
||||
mkdtempSync,
|
||||
mkdirSync,
|
||||
realpathSync,
|
||||
readdirSync,
|
||||
readFileSync,
|
||||
rmSync,
|
||||
@@ -17,15 +16,11 @@ import { dirname, join, resolve } from "node:path";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import { COMPLETION_SKIP_PLUGIN_COMMANDS_ENV } from "../src/cli/completion-runtime.ts";
|
||||
import {
|
||||
isBundledRuntimeDepsInstallStagePath,
|
||||
isLegacyPluginDependencyInstallStagePath,
|
||||
LOCAL_BUILD_METADATA_DIST_PATHS,
|
||||
PACKAGE_DIST_INVENTORY_RELATIVE_PATH,
|
||||
writePackageDistInventory,
|
||||
} from "../src/infra/package-dist-inventory.ts";
|
||||
import {
|
||||
resolveBundledRuntimeDependencyInstallRoot,
|
||||
resolveBundledRuntimeDependencyPackageInstallRoot,
|
||||
} from "../src/plugins/bundled-runtime-deps-roots.ts";
|
||||
import { checkCliBootstrapExternalImports } from "./check-cli-bootstrap-imports.mjs";
|
||||
import {
|
||||
collectBundledExtensionManifestErrors,
|
||||
@@ -33,32 +28,19 @@ import {
|
||||
type ExtensionPackageJson as PackageJson,
|
||||
} from "./lib/bundled-extension-manifest.ts";
|
||||
import { listBundledPluginPackArtifacts } from "./lib/bundled-plugin-build-entries.mjs";
|
||||
import {
|
||||
collectBuiltBundledPluginStagedRuntimeDependencyErrors,
|
||||
collectBundledPluginRootRuntimeMirrorErrors,
|
||||
collectBundledPluginRuntimeDependencySpecs,
|
||||
collectDeclaredRootRuntimeDependencyMetadataErrors,
|
||||
collectRootDistBundledRuntimeMirrors,
|
||||
} from "./lib/bundled-plugin-root-runtime-mirrors.mjs";
|
||||
import { collectBundledPluginRuntimeDependencySpecs } from "./lib/bundled-plugin-root-runtime-mirrors.mjs";
|
||||
import { collectPackUnpackedSizeErrors as collectNpmPackUnpackedSizeErrors } from "./lib/npm-pack-budget.mjs";
|
||||
import { listPluginSdkDistArtifacts } from "./lib/plugin-sdk-entries.mjs";
|
||||
import {
|
||||
runInstalledWorkspaceBootstrapSmoke,
|
||||
WORKSPACE_TEMPLATE_PACK_PATHS,
|
||||
} from "./lib/workspace-bootstrap-smoke.mjs";
|
||||
import { discoverBundledPluginRuntimeDeps } from "./postinstall-bundled-plugins.mjs";
|
||||
import { listStaticExtensionAssetOutputs } from "./runtime-postbuild.mjs";
|
||||
import { sparkleBuildFloorsFromShortVersion, type SparkleBuildFloors } from "./sparkle-build.ts";
|
||||
import { buildCmdExeCommandLine } from "./windows-cmd-helpers.mjs";
|
||||
|
||||
export { collectBundledExtensionManifestErrors } from "./lib/bundled-extension-manifest.ts";
|
||||
export {
|
||||
collectBuiltBundledPluginStagedRuntimeDependencyErrors,
|
||||
collectBundledPluginRootRuntimeMirrorErrors,
|
||||
collectDeclaredRootRuntimeDependencyMetadataErrors,
|
||||
collectRootDistBundledRuntimeMirrors,
|
||||
packageNameFromSpecifier,
|
||||
} from "./lib/bundled-plugin-root-runtime-mirrors.mjs";
|
||||
export { packageNameFromSpecifier } from "./lib/bundled-plugin-root-runtime-mirrors.mjs";
|
||||
|
||||
type PackFile = { path: string };
|
||||
type PackResult = { files?: PackFile[]; filename?: string; unpackedSize?: number };
|
||||
@@ -73,7 +55,6 @@ const requiredPathGroups = [
|
||||
...WORKSPACE_TEMPLATE_PACK_PATHS,
|
||||
"scripts/npm-runner.mjs",
|
||||
"scripts/preinstall-package-manager-warning.mjs",
|
||||
"scripts/lib/bundled-runtime-deps-install.mjs",
|
||||
"scripts/lib/package-dist-imports.mjs",
|
||||
"scripts/postinstall-bundled-plugins.mjs",
|
||||
"dist/plugin-sdk/compat.js",
|
||||
@@ -166,32 +147,18 @@ function collectBundledExtensions(): BundledExtension[] {
|
||||
function checkBundledExtensionMetadata() {
|
||||
const extensions = collectBundledExtensions();
|
||||
const manifestErrors = collectBundledExtensionManifestErrors(extensions);
|
||||
const rootPackage = JSON.parse(readFileSync(resolve("package.json"), "utf8")) as {
|
||||
dependencies?: Record<string, string>;
|
||||
optionalDependencies?: Record<string, string>;
|
||||
};
|
||||
const bundledRuntimeDependencySpecs = collectBundledPluginRuntimeDependencySpecs(
|
||||
resolve("extensions"),
|
||||
);
|
||||
const requiredRootMirrors = collectRootDistBundledRuntimeMirrors({
|
||||
bundledRuntimeDependencySpecs,
|
||||
distDir: resolve("dist"),
|
||||
});
|
||||
const rootMirrorErrors = collectBundledPluginRootRuntimeMirrorErrors({
|
||||
bundledRuntimeDependencySpecs,
|
||||
requiredRootMirrors,
|
||||
rootPackageJson: rootPackage,
|
||||
});
|
||||
const rootMirrorMetadataErrors = collectDeclaredRootRuntimeDependencyMetadataErrors(rootPackage);
|
||||
const builtArtifactErrors = collectBuiltBundledPluginStagedRuntimeDependencyErrors({
|
||||
bundledPluginsDir: resolve("dist/extensions"),
|
||||
});
|
||||
const errors = [
|
||||
...manifestErrors,
|
||||
...rootMirrorErrors,
|
||||
...rootMirrorMetadataErrors,
|
||||
...builtArtifactErrors,
|
||||
];
|
||||
const dependencyConflictErrors = [...bundledRuntimeDependencySpecs.entries()]
|
||||
.flatMap(([dependencyName, record]) =>
|
||||
record.conflicts.map(
|
||||
(conflict) =>
|
||||
`bundled runtime dependency '${dependencyName}' has conflicting plugin specs: ${record.pluginIds.join(", ")} use '${record.spec}', ${conflict.pluginId} uses '${conflict.spec}'.`,
|
||||
),
|
||||
)
|
||||
.toSorted((left, right) => left.localeCompare(right));
|
||||
const errors = [...manifestErrors, ...dependencyConflictErrors];
|
||||
if (errors.length > 0) {
|
||||
console.error("release-check: bundled extension manifest validation failed:");
|
||||
for (const error of errors) {
|
||||
@@ -346,107 +313,6 @@ function runPackedBundledPluginPostinstall(packageRoot: string): void {
|
||||
});
|
||||
}
|
||||
|
||||
export function collectInstalledBundledPluginRuntimeDepErrors(packageRoot: string): string[] {
|
||||
const extensionsDir = join(packageRoot, "dist", "extensions");
|
||||
if (!existsSync(extensionsDir)) {
|
||||
return [];
|
||||
}
|
||||
const runtimeDeps = discoverBundledPluginRuntimeDeps({ extensionsDir });
|
||||
return runtimeDeps
|
||||
.filter((dep) => !existsSync(join(packageRoot, dep.sentinelPath)))
|
||||
.map((dep) => {
|
||||
const owners = dep.pluginIds.length > 0 ? dep.pluginIds.join(", ") : "unknown";
|
||||
return `bundled plugin runtime dependency '${dep.name}@${dep.version}' (owners: ${owners}) is missing at ${dep.sentinelPath}.`;
|
||||
})
|
||||
.toSorted((left, right) => left.localeCompare(right));
|
||||
}
|
||||
|
||||
function bundledRuntimeDependencySentinelPath(
|
||||
packageRoot: string,
|
||||
pluginId: string,
|
||||
dependencyName: string,
|
||||
): string {
|
||||
return join(
|
||||
packageRoot,
|
||||
"dist",
|
||||
"extensions",
|
||||
pluginId,
|
||||
"node_modules",
|
||||
...dependencyName.split("/"),
|
||||
"package.json",
|
||||
);
|
||||
}
|
||||
|
||||
export function bundledRuntimeDependencySentinelCandidates(
|
||||
packageRoot: string,
|
||||
pluginId: string,
|
||||
dependencyName: string,
|
||||
env: NodeJS.ProcessEnv = process.env,
|
||||
): string[] {
|
||||
const dependencyParts = dependencyName.split("/");
|
||||
const packageRoots = [
|
||||
packageRoot,
|
||||
(() => {
|
||||
try {
|
||||
return realpathSync(packageRoot);
|
||||
} catch {
|
||||
return packageRoot;
|
||||
}
|
||||
})(),
|
||||
];
|
||||
const runtimeRoots = packageRoots.flatMap((root) => [
|
||||
resolveBundledRuntimeDependencyPackageInstallRoot(root, { env }),
|
||||
resolveBundledRuntimeDependencyInstallRoot(join(root, "dist", "extensions", pluginId), {
|
||||
env,
|
||||
}),
|
||||
]);
|
||||
return [
|
||||
bundledRuntimeDependencySentinelPath(packageRoot, pluginId, dependencyName),
|
||||
join(packageRoot, "dist", "extensions", "node_modules", ...dependencyParts, "package.json"),
|
||||
join(packageRoot, "node_modules", ...dependencyParts, "package.json"),
|
||||
...runtimeRoots.map((root) => join(root, "node_modules", ...dependencyParts, "package.json")),
|
||||
].filter((candidate, index, candidates) => candidates.indexOf(candidate) === index);
|
||||
}
|
||||
|
||||
function assertBundledRuntimeDependencyAbsent(params: {
|
||||
packageRoot: string;
|
||||
pluginId: string;
|
||||
dependencyName: string;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
}): void {
|
||||
const sentinelPath = bundledRuntimeDependencySentinelCandidates(
|
||||
params.packageRoot,
|
||||
params.pluginId,
|
||||
params.dependencyName,
|
||||
params.env,
|
||||
).find((candidate) => existsSync(candidate));
|
||||
if (sentinelPath) {
|
||||
throw new Error(
|
||||
`release-check: ${params.pluginId} runtime dependency ${params.dependencyName} was installed before plugin activation (${sentinelPath}).`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function assertBundledRuntimeDependencyPresent(params: {
|
||||
packageRoot: string;
|
||||
pluginId: string;
|
||||
dependencyName: string;
|
||||
env?: NodeJS.ProcessEnv;
|
||||
}): void {
|
||||
const sentinelPath = bundledRuntimeDependencySentinelCandidates(
|
||||
params.packageRoot,
|
||||
params.pluginId,
|
||||
params.dependencyName,
|
||||
params.env,
|
||||
).find((candidate) => existsSync(candidate));
|
||||
if (sentinelPath) {
|
||||
return;
|
||||
}
|
||||
throw new Error(
|
||||
`release-check: ${params.pluginId} runtime dependency ${params.dependencyName} was not installed during plugin activation.`,
|
||||
);
|
||||
}
|
||||
|
||||
function writePackedBundledPluginActivationConfig(homeDir: string): void {
|
||||
const configPath = join(homeDir, ".openclaw", "openclaw.json");
|
||||
mkdirSync(join(homeDir, ".openclaw"), { recursive: true });
|
||||
@@ -490,20 +356,12 @@ function writePackedBundledPluginActivationConfig(homeDir: string): void {
|
||||
}
|
||||
|
||||
function runPackedBundledPluginActivationSmoke(packageRoot: string, tmpRoot: string): void {
|
||||
const lazyDeps = [
|
||||
{ pluginId: "browser", dependencyName: "playwright-core" },
|
||||
{ pluginId: "feishu", dependencyName: "@larksuiteoapi/node-sdk" },
|
||||
] as const;
|
||||
|
||||
const homeDir = join(tmpRoot, "activation-home");
|
||||
mkdirSync(homeDir, { recursive: true });
|
||||
const env = createPackedCliSmokeEnv(process.env, {
|
||||
HOME: homeDir,
|
||||
OPENAI_API_KEY: "sk-openclaw-release-check",
|
||||
});
|
||||
for (const dep of lazyDeps) {
|
||||
assertBundledRuntimeDependencyAbsent({ packageRoot, env, ...dep });
|
||||
}
|
||||
|
||||
writePackedBundledPluginActivationConfig(homeDir);
|
||||
execFileSync(process.execPath, [join(packageRoot, "openclaw.mjs"), "plugins", "doctor"], {
|
||||
@@ -511,10 +369,6 @@ function runPackedBundledPluginActivationSmoke(packageRoot: string, tmpRoot: str
|
||||
stdio: "inherit",
|
||||
env,
|
||||
});
|
||||
|
||||
for (const dep of lazyDeps) {
|
||||
assertBundledRuntimeDependencyPresent({ packageRoot, env, ...dep });
|
||||
}
|
||||
}
|
||||
|
||||
function runPackedTaskRegistryControlRuntimeSmoke(packageRoot: string): void {
|
||||
@@ -684,7 +538,7 @@ export function collectForbiddenPackPaths(paths: Iterable<string>): string[] {
|
||||
return [...paths]
|
||||
.filter(
|
||||
(path) =>
|
||||
isBundledRuntimeDepsInstallStagePath(path) ||
|
||||
isLegacyPluginDependencyInstallStagePath(path) ||
|
||||
forbiddenPrefixes.some((prefix) => path.startsWith(prefix)) ||
|
||||
/(^|\/)\.openclaw-runtime-deps-[^/]+(\/|$)/u.test(path) ||
|
||||
path.endsWith("/.openclaw-runtime-deps-stamp.json") ||
|
||||
|
||||
@@ -3,11 +3,7 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import {
|
||||
collectBundledPluginRuntimeDependencySpecs,
|
||||
collectRootDistBundledRuntimeMirrors,
|
||||
packageNameFromSpecifier,
|
||||
} from "./lib/bundled-plugin-root-runtime-mirrors.mjs";
|
||||
import { packageNameFromSpecifier } from "./lib/bundled-plugin-root-runtime-mirrors.mjs";
|
||||
|
||||
const DEFAULT_SCAN_ROOTS = ["src", "extensions", "packages", "ui", "scripts", "test"];
|
||||
const SCANNED_EXTENSIONS = new Set([".cjs", ".cts", ".js", ".jsx", ".mjs", ".mts", ".ts", ".tsx"]);
|
||||
@@ -23,6 +19,12 @@ const DYNAMIC_CONSTANT_IMPORT_PATTERNS = [
|
||||
/\brequire\s*\(\s*([_$A-Za-z][\w$]*)\s*\)/g,
|
||||
/\b(?:require|[_$A-Za-z][\w$]*require[\w$]*)\.resolve\s*\(\s*([_$A-Za-z][\w$]*)\s*\)/gi,
|
||||
];
|
||||
const ROOT_OWNED_EXTENSION_RUNTIME_DEPENDENCIES = new Map([
|
||||
[
|
||||
"playwright-core",
|
||||
"keep at root; the internal browser runtime is shipped with core even though downloadable browser-adjacent plugins also declare it",
|
||||
],
|
||||
]);
|
||||
|
||||
function readJson(filePath) {
|
||||
return JSON.parse(fs.readFileSync(filePath, "utf8"));
|
||||
@@ -149,16 +151,6 @@ function sectionSetIsSubsetOf(sectionSet, allowed) {
|
||||
export function classifyRootDependencyOwnership(record) {
|
||||
const sections = new Set(record.sections);
|
||||
|
||||
if (record.rootMirrorImporters.length > 0) {
|
||||
if (!sectionSetContainsCore(sections)) {
|
||||
return {
|
||||
category: "extension_only_localizable",
|
||||
recommendation:
|
||||
"remove from root package.json and rely on owning extension manifests plus doctor --fix",
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (sections.size === 0) {
|
||||
return {
|
||||
category: "unreferenced",
|
||||
@@ -187,6 +179,17 @@ export function classifyRootDependencyOwnership(record) {
|
||||
};
|
||||
}
|
||||
|
||||
const rootOwnedExtensionRuntime = ROOT_OWNED_EXTENSION_RUNTIME_DEPENDENCIES.get(record.depName);
|
||||
if (
|
||||
rootOwnedExtensionRuntime &&
|
||||
sectionSetIsSubsetOf(sections, new Set(["extensions", "test"]))
|
||||
) {
|
||||
return {
|
||||
category: "root_owned_extension_runtime",
|
||||
recommendation: rootOwnedExtensionRuntime,
|
||||
};
|
||||
}
|
||||
|
||||
if (sectionSetIsSubsetOf(sections, new Set(["extensions", "test"]))) {
|
||||
return {
|
||||
category: "extension_only_localizable",
|
||||
@@ -216,7 +219,6 @@ export function collectRootDependencyOwnershipAudit(params = {}) {
|
||||
sections: new Set(),
|
||||
files: new Set(),
|
||||
declaredInExtensions: [],
|
||||
rootMirrorImporters: [],
|
||||
spec: rootDependencies[depName],
|
||||
},
|
||||
]),
|
||||
@@ -247,26 +249,6 @@ export function collectRootDependencyOwnershipAudit(params = {}) {
|
||||
}
|
||||
}
|
||||
|
||||
const distDir = path.join(repoRoot, "dist");
|
||||
if (fs.existsSync(distDir)) {
|
||||
const bundledSpecs = collectBundledPluginRuntimeDependencySpecs(
|
||||
path.join(repoRoot, "extensions"),
|
||||
);
|
||||
const rootMirrors = collectRootDistBundledRuntimeMirrors({
|
||||
bundledRuntimeDependencySpecs: bundledSpecs,
|
||||
distDir,
|
||||
});
|
||||
for (const [depName, mirror] of rootMirrors) {
|
||||
const record = records.get(depName);
|
||||
if (!record) {
|
||||
continue;
|
||||
}
|
||||
record.rootMirrorImporters = [...mirror.importers].toSorted((left, right) =>
|
||||
left.localeCompare(right),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return [...records.values()]
|
||||
.map((record) => {
|
||||
const classification = classifyRootDependencyOwnership({
|
||||
@@ -280,7 +262,6 @@ export function collectRootDependencyOwnershipAudit(params = {}) {
|
||||
fileCount: record.files.size,
|
||||
sampleFiles: [...record.files].slice(0, 5),
|
||||
declaredInExtensions: record.declaredInExtensions,
|
||||
rootMirrorImporters: record.rootMirrorImporters,
|
||||
category: classification.category,
|
||||
recommendation: classification.recommendation,
|
||||
};
|
||||
@@ -320,9 +301,6 @@ function printTextReport(records) {
|
||||
if (record.declaredInExtensions.length > 0) {
|
||||
details.push(`extensions=${record.declaredInExtensions.join(",")}`);
|
||||
}
|
||||
if (record.rootMirrorImporters.length > 0) {
|
||||
details.push(`rootDist=${record.rootMirrorImporters.join(",")}`);
|
||||
}
|
||||
console.log(`- ${record.depName}@${record.spec} :: ${details.join(" | ")}`);
|
||||
console.log(` ${record.recommendation}`);
|
||||
}
|
||||
|
||||
@@ -33,7 +33,6 @@ const runtimePostBuildWatchedPaths = [
|
||||
"scripts/runtime-postbuild-stamp.mjs",
|
||||
"scripts/runtime-postbuild-shared.mjs",
|
||||
"scripts/runtime-postbuild.mjs",
|
||||
"scripts/stage-bundled-plugin-runtime-deps.mjs",
|
||||
"scripts/stage-bundled-plugin-runtime.mjs",
|
||||
"scripts/windows-cmd-helpers.mjs",
|
||||
"scripts/write-official-channel-catalog.mjs",
|
||||
|
||||
@@ -5,7 +5,6 @@ import { fileURLToPath, pathToFileURL } from "node:url";
|
||||
import { copyBundledPluginMetadata } from "./copy-bundled-plugin-metadata.mjs";
|
||||
import { copyPluginSdkRootAlias } from "./copy-plugin-sdk-root-alias.mjs";
|
||||
import { writeTextFileIfChanged } from "./runtime-postbuild-shared.mjs";
|
||||
import { stageBundledPluginRuntimeDeps } from "./stage-bundled-plugin-runtime-deps.mjs";
|
||||
import { stageBundledPluginRuntime } from "./stage-bundled-plugin-runtime.mjs";
|
||||
import { writeOfficialChannelCatalog } from "./write-official-channel-catalog.mjs";
|
||||
|
||||
@@ -124,7 +123,6 @@ export function runRuntimePostBuild(params = {}) {
|
||||
runPhase("plugin SDK root alias", () => copyPluginSdkRootAlias(params));
|
||||
runPhase("bundled plugin metadata", () => copyBundledPluginMetadata(params));
|
||||
runPhase("official channel catalog", () => writeOfficialChannelCatalog(params));
|
||||
runPhase("bundled plugin runtime deps", () => stageBundledPluginRuntimeDeps(params));
|
||||
runPhase("bundled plugin runtime overlay", () => stageBundledPluginRuntime(params));
|
||||
runPhase("stable root runtime aliases", () => writeStableRootRuntimeAliases(params));
|
||||
runPhase("legacy CLI exit compat chunks", () => writeLegacyCliExitCompatChunks(params));
|
||||
|
||||
@@ -1,461 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { performance } from "node:perf_hooks";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import {
|
||||
createBundledRuntimeDependencyInstallArgs,
|
||||
createBundledRuntimeDependencyInstallEnv,
|
||||
runBundledRuntimeDependencyNpmInstall,
|
||||
} from "./lib/bundled-runtime-deps-install.mjs";
|
||||
import {
|
||||
listBundledPluginRuntimeDirs,
|
||||
resolveInstalledWorkspacePluginRoot,
|
||||
stageInstalledRootRuntimeDeps,
|
||||
} from "./lib/bundled-runtime-deps-materialize.mjs";
|
||||
import {
|
||||
readInstalledDependencyVersionFromRoot,
|
||||
resolveInstalledDependencyRoot,
|
||||
resolveInstalledRuntimeClosureFingerprint,
|
||||
} from "./lib/bundled-runtime-deps-package-tree.mjs";
|
||||
import {
|
||||
pruneStagedRuntimeDependencyCargo,
|
||||
resolveRuntimeDepPruneConfig,
|
||||
} from "./lib/bundled-runtime-deps-prune.mjs";
|
||||
import {
|
||||
assertPathIsNotSymlink,
|
||||
makePluginOwnedTempDir,
|
||||
removeLegacyBundledRuntimeDepsSymlink,
|
||||
removeOwnedTempPathBestEffort,
|
||||
removePathIfExists,
|
||||
removeStaleRuntimeDepsTempDirs,
|
||||
replaceDirAtomically,
|
||||
sanitizeTempPrefixSegment,
|
||||
writeJsonAtomically,
|
||||
writeRuntimeDepsTempOwner,
|
||||
} from "./lib/bundled-runtime-deps-stage-state.mjs";
|
||||
import {
|
||||
createRuntimeDepsCheapFingerprint,
|
||||
createRuntimeDepsFingerprint,
|
||||
readRuntimeDepsStamp,
|
||||
resolveLegacyRuntimeDepsStampPath,
|
||||
resolveRuntimeDepsStampPath,
|
||||
} from "./lib/bundled-runtime-deps-stamp.mjs";
|
||||
import { resolveNpmRunner } from "./npm-runner.mjs";
|
||||
|
||||
const exactVersionSpecRe = /^\d+\.\d+\.\d+(?:-[0-9A-Za-z.-]+)?$/u;
|
||||
|
||||
function readJson(filePath) {
|
||||
return JSON.parse(fs.readFileSync(filePath, "utf8"));
|
||||
}
|
||||
|
||||
function writeJson(filePath, value) {
|
||||
fs.writeFileSync(filePath, `${JSON.stringify(value, null, 2)}\n`, "utf8");
|
||||
}
|
||||
|
||||
function hasRuntimeDeps(packageJson) {
|
||||
return (
|
||||
Object.keys(packageJson.dependencies ?? {}).length > 0 ||
|
||||
Object.keys(packageJson.optionalDependencies ?? {}).length > 0
|
||||
);
|
||||
}
|
||||
|
||||
function shouldStageRuntimeDeps(packageJson) {
|
||||
return packageJson.openclaw?.bundle?.stageRuntimeDependencies === true;
|
||||
}
|
||||
|
||||
function sanitizeBundledManifestForRuntimeInstall(pluginDir) {
|
||||
const manifestPath = path.join(pluginDir, "package.json");
|
||||
const packageJson = readJson(manifestPath);
|
||||
let changed = false;
|
||||
|
||||
if (packageJson.peerDependencies) {
|
||||
delete packageJson.peerDependencies;
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if (packageJson.peerDependenciesMeta) {
|
||||
delete packageJson.peerDependenciesMeta;
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if (packageJson.devDependencies) {
|
||||
delete packageJson.devDependencies;
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if (changed) {
|
||||
writeJson(manifestPath, packageJson);
|
||||
}
|
||||
|
||||
return packageJson;
|
||||
}
|
||||
|
||||
function isSafeRuntimeDependencySpec(spec) {
|
||||
if (typeof spec !== "string") {
|
||||
return false;
|
||||
}
|
||||
const normalized = spec.trim();
|
||||
if (normalized.length === 0) {
|
||||
return false;
|
||||
}
|
||||
const lower = normalized.toLowerCase();
|
||||
if (
|
||||
lower.startsWith("file:") ||
|
||||
lower.startsWith("link:") ||
|
||||
lower.startsWith("workspace:") ||
|
||||
lower.startsWith("git:") ||
|
||||
lower.startsWith("git+") ||
|
||||
lower.startsWith("ssh:") ||
|
||||
lower.startsWith("http:") ||
|
||||
lower.startsWith("https:")
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
if (normalized.includes("://")) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
normalized.startsWith("/") ||
|
||||
normalized.startsWith("\\") ||
|
||||
normalized.startsWith("../") ||
|
||||
normalized.startsWith("..\\") ||
|
||||
normalized.includes("/../") ||
|
||||
normalized.includes("\\..\\")
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function assertSafeRuntimeDependencySpec(depName, spec) {
|
||||
if (!isSafeRuntimeDependencySpec(spec)) {
|
||||
throw new Error(`disallowed runtime dependency spec for ${depName}: ${spec}`);
|
||||
}
|
||||
}
|
||||
|
||||
function resolveInstalledPinnedDependencyVersion(params) {
|
||||
const depRoot = resolveInstalledDependencyRoot({
|
||||
depName: params.depName,
|
||||
enforceSpec: true,
|
||||
parentPackageRoot: params.parentPackageRoot,
|
||||
rootNodeModulesDir: params.rootNodeModulesDir,
|
||||
spec: params.spec,
|
||||
});
|
||||
if (depRoot === null) {
|
||||
return null;
|
||||
}
|
||||
return readInstalledDependencyVersionFromRoot(depRoot);
|
||||
}
|
||||
|
||||
function resolvePinnedRuntimeDependencyVersion(params) {
|
||||
assertSafeRuntimeDependencySpec(params.depName, params.spec);
|
||||
if (exactVersionSpecRe.test(params.spec)) {
|
||||
return params.spec;
|
||||
}
|
||||
const installedVersion = resolveInstalledPinnedDependencyVersion(params);
|
||||
if (typeof installedVersion === "string" && exactVersionSpecRe.test(installedVersion)) {
|
||||
return installedVersion;
|
||||
}
|
||||
throw new Error(
|
||||
`runtime dependency ${params.depName} must resolve to an exact installed version, got: ${params.spec}`,
|
||||
);
|
||||
}
|
||||
|
||||
function collectRuntimeDependencyGroups(packageJson) {
|
||||
const readRuntimeGroup = (group) =>
|
||||
Object.fromEntries(
|
||||
Object.entries(group ?? {}).filter(
|
||||
(entry) => typeof entry[0] === "string" && typeof entry[1] === "string",
|
||||
),
|
||||
);
|
||||
return {
|
||||
dependencies: readRuntimeGroup(packageJson.dependencies),
|
||||
optionalDependencies: readRuntimeGroup(packageJson.optionalDependencies),
|
||||
};
|
||||
}
|
||||
|
||||
function resolvePinnedRuntimeDependencyGroup(group, params = {}) {
|
||||
return Object.fromEntries(
|
||||
Object.entries(group).map(([name, version]) => {
|
||||
const pinnedVersion = resolvePinnedRuntimeDependencyVersion({
|
||||
depName: name,
|
||||
parentPackageRoot: params.directDependencyPackageRoot ?? null,
|
||||
rootNodeModulesDir: params.rootNodeModulesDir ?? path.join(process.cwd(), "node_modules"),
|
||||
spec: version,
|
||||
});
|
||||
return [name, pinnedVersion];
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
function resolvePinnedRuntimeDependencyGroups(packageJson, params = {}) {
|
||||
const runtimeGroups = collectRuntimeDependencyGroups(packageJson);
|
||||
return {
|
||||
dependencies: resolvePinnedRuntimeDependencyGroup(runtimeGroups.dependencies, params),
|
||||
optionalDependencies: resolvePinnedRuntimeDependencyGroup(
|
||||
runtimeGroups.optionalDependencies,
|
||||
params,
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
export function collectRuntimeDependencyInstallManifest(packageJson, params = {}) {
|
||||
const pinnedGroups = resolvePinnedRuntimeDependencyGroups(packageJson, params);
|
||||
return createRuntimeInstallManifest(params.pluginId ?? "runtime-deps", pinnedGroups);
|
||||
}
|
||||
|
||||
export function collectRuntimeDependencyInstallSpecs(packageJson, params = {}) {
|
||||
const manifest = collectRuntimeDependencyInstallManifest(packageJson, params);
|
||||
const buildSpecs = (group) =>
|
||||
Object.entries(group ?? {}).map(([name, version]) => `${name}@${String(version)}`);
|
||||
return {
|
||||
dependencies: buildSpecs(manifest.dependencies),
|
||||
optionalDependencies: buildSpecs(manifest.optionalDependencies),
|
||||
};
|
||||
}
|
||||
|
||||
function createRuntimeInstallManifest(pluginId, pinnedGroups) {
|
||||
const manifest = {
|
||||
name: `openclaw-runtime-deps-${sanitizeTempPrefixSegment(pluginId)}`,
|
||||
private: true,
|
||||
version: "0.0.0",
|
||||
};
|
||||
if (Object.keys(pinnedGroups.dependencies).length > 0) {
|
||||
manifest.dependencies = pinnedGroups.dependencies;
|
||||
}
|
||||
if (Object.keys(pinnedGroups.optionalDependencies).length > 0) {
|
||||
manifest.optionalDependencies = pinnedGroups.optionalDependencies;
|
||||
}
|
||||
return manifest;
|
||||
}
|
||||
|
||||
function runNpmInstall(params) {
|
||||
return runBundledRuntimeDependencyNpmInstall({
|
||||
cwd: params.cwd,
|
||||
npmRunner: params.npmRunner,
|
||||
env: createBundledRuntimeDependencyInstallEnv(params.npmRunner.env ?? process.env, {
|
||||
ci: true,
|
||||
quiet: true,
|
||||
}),
|
||||
spawnSyncImpl: params.spawnSyncImpl,
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
timeout: params.timeoutMs ?? 5 * 60 * 1000,
|
||||
});
|
||||
}
|
||||
|
||||
function installPluginRuntimeDepsWithRetries(params) {
|
||||
const { attempts = 3 } = params;
|
||||
let lastError;
|
||||
for (let attempt = 1; attempt <= attempts; attempt += 1) {
|
||||
try {
|
||||
params.install({ ...params.installParams, attempt });
|
||||
return;
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
if (attempt === attempts) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
function createRootRuntimeStagingError(params) {
|
||||
const runtimeDependencyNames = [
|
||||
...Object.keys(params.packageJson.dependencies ?? {}),
|
||||
...Object.keys(params.packageJson.optionalDependencies ?? {}),
|
||||
].toSorted((left, right) => left.localeCompare(right));
|
||||
const dependencyLabel =
|
||||
runtimeDependencyNames.length > 0 ? runtimeDependencyNames.join(", ") : "<none>";
|
||||
const causeMessage =
|
||||
params.cause instanceof Error && typeof params.cause.message === "string"
|
||||
? ` Cause: ${params.cause.message}`
|
||||
: "";
|
||||
return new Error(
|
||||
`failed to stage bundled runtime deps for ${params.pluginId}: ` +
|
||||
`runtime dependency closure must resolve from the installed root workspace graph. ` +
|
||||
`Could not materialize: ${dependencyLabel}. ` +
|
||||
"Run `pnpm install` and rebuild from a trusted workspace checkout, or provide a hardened fallback installer." +
|
||||
causeMessage,
|
||||
);
|
||||
}
|
||||
|
||||
function installPluginRuntimeDeps(params) {
|
||||
const {
|
||||
directDependencyPackageRoot = null,
|
||||
cheapFingerprint,
|
||||
fingerprint,
|
||||
packageJson,
|
||||
pluginDir,
|
||||
pluginId,
|
||||
pruneConfig,
|
||||
repoRoot,
|
||||
stampPath,
|
||||
} = params;
|
||||
const nodeModulesDir = path.join(pluginDir, "node_modules");
|
||||
const tempInstallDir = makePluginOwnedTempDir(pluginDir, "install");
|
||||
const pinnedGroups = resolvePinnedRuntimeDependencyGroups(packageJson, {
|
||||
directDependencyPackageRoot,
|
||||
rootNodeModulesDir: path.join(repoRoot, "node_modules"),
|
||||
});
|
||||
const requiredDependencyCount = Object.keys(pinnedGroups.dependencies).length;
|
||||
try {
|
||||
writeJson(
|
||||
path.join(tempInstallDir, "package.json"),
|
||||
createRuntimeInstallManifest(pluginId, pinnedGroups),
|
||||
);
|
||||
if (requiredDependencyCount > 0 || Object.keys(pinnedGroups.optionalDependencies).length > 0) {
|
||||
runNpmInstall({
|
||||
cwd: tempInstallDir,
|
||||
npmRunner: resolveNpmRunner({
|
||||
npmArgs: createBundledRuntimeDependencyInstallArgs([], {
|
||||
noAudit: true,
|
||||
noFund: true,
|
||||
silent: true,
|
||||
}),
|
||||
}),
|
||||
});
|
||||
}
|
||||
const stagedNodeModulesDir = path.join(tempInstallDir, "node_modules");
|
||||
if (requiredDependencyCount > 0 && !fs.existsSync(stagedNodeModulesDir)) {
|
||||
throw new Error(
|
||||
`failed to stage bundled runtime deps for ${pluginId}: explicit npm install produced no node_modules directory`,
|
||||
);
|
||||
}
|
||||
if (fs.existsSync(stagedNodeModulesDir)) {
|
||||
pruneStagedRuntimeDependencyCargo(stagedNodeModulesDir, pruneConfig);
|
||||
removeLegacyBundledRuntimeDepsSymlink(nodeModulesDir, repoRoot);
|
||||
replaceDirAtomically(nodeModulesDir, stagedNodeModulesDir);
|
||||
} else {
|
||||
removeLegacyBundledRuntimeDepsSymlink(nodeModulesDir, repoRoot);
|
||||
assertPathIsNotSymlink(nodeModulesDir, "remove runtime deps");
|
||||
removePathIfExists(nodeModulesDir);
|
||||
}
|
||||
writeJsonAtomically(stampPath, {
|
||||
cheapFingerprint,
|
||||
fingerprint,
|
||||
generatedAt: new Date().toISOString(),
|
||||
});
|
||||
} finally {
|
||||
removeOwnedTempPathBestEffort(tempInstallDir);
|
||||
}
|
||||
}
|
||||
|
||||
export function stageBundledPluginRuntimeDeps(params = {}) {
|
||||
const repoRoot = params.cwd ?? params.repoRoot ?? process.cwd();
|
||||
const installPluginRuntimeDepsImpl =
|
||||
params.installPluginRuntimeDepsImpl ?? installPluginRuntimeDeps;
|
||||
const installAttempts = params.installAttempts ?? 3;
|
||||
const pruneConfig = resolveRuntimeDepPruneConfig(params);
|
||||
const timingsEnabled =
|
||||
params.timings ?? process.env.OPENCLAW_RUNTIME_DEPS_STAGING_TIMINGS === "1";
|
||||
const runPluginPhase = (pluginId, label, action) => {
|
||||
const startedAt = performance.now();
|
||||
try {
|
||||
return action();
|
||||
} finally {
|
||||
if (timingsEnabled) {
|
||||
const durationMs = Math.round(performance.now() - startedAt);
|
||||
console.error(
|
||||
`stage-bundled-plugin-runtime-deps: ${pluginId} ${label} completed in ${durationMs}ms`,
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
for (const pluginDir of listBundledPluginRuntimeDirs(repoRoot)) {
|
||||
const pluginId = path.basename(pluginDir);
|
||||
const sourcePluginRoot = resolveInstalledWorkspacePluginRoot(repoRoot, pluginId);
|
||||
const directDependencyPackageRoot = fs.existsSync(path.join(sourcePluginRoot, "package.json"))
|
||||
? sourcePluginRoot
|
||||
: null;
|
||||
const packageJson = runPluginPhase(pluginId, "sanitize manifest", () =>
|
||||
sanitizeBundledManifestForRuntimeInstall(pluginDir),
|
||||
);
|
||||
const nodeModulesDir = path.join(pluginDir, "node_modules");
|
||||
const stampPath = resolveRuntimeDepsStampPath(repoRoot, pluginId);
|
||||
const legacyStampPath = resolveLegacyRuntimeDepsStampPath(pluginDir);
|
||||
runPluginPhase(pluginId, "cleanup stale runtime dirs", () => {
|
||||
removePathIfExists(legacyStampPath);
|
||||
removeStaleRuntimeDepsTempDirs(pluginDir);
|
||||
});
|
||||
if (!hasRuntimeDeps(packageJson) || !shouldStageRuntimeDeps(packageJson)) {
|
||||
runPluginPhase(pluginId, "remove unstaged runtime deps", () => {
|
||||
removePathIfExists(nodeModulesDir);
|
||||
removePathIfExists(stampPath);
|
||||
});
|
||||
continue;
|
||||
}
|
||||
const cheapFingerprint = runPluginPhase(pluginId, "cheap fingerprint", () =>
|
||||
createRuntimeDepsCheapFingerprint(packageJson, pruneConfig, {
|
||||
repoRoot,
|
||||
}),
|
||||
);
|
||||
const stamp = readRuntimeDepsStamp(stampPath);
|
||||
const rootInstalledRuntimeFingerprint = runPluginPhase(
|
||||
pluginId,
|
||||
"installed runtime fingerprint",
|
||||
() =>
|
||||
resolveInstalledRuntimeClosureFingerprint({
|
||||
directDependencyPackageRoot,
|
||||
packageJson,
|
||||
rootNodeModulesDir: path.join(repoRoot, "node_modules"),
|
||||
}),
|
||||
);
|
||||
const fingerprint = createRuntimeDepsFingerprint(packageJson, pruneConfig, {
|
||||
repoRoot,
|
||||
rootInstalledRuntimeFingerprint,
|
||||
});
|
||||
if (fs.existsSync(nodeModulesDir) && stamp?.fingerprint === fingerprint) {
|
||||
runPluginPhase(pluginId, "reuse staged runtime deps", () => {});
|
||||
continue;
|
||||
}
|
||||
if (
|
||||
runPluginPhase(pluginId, "stage installed root runtime deps", () =>
|
||||
stageInstalledRootRuntimeDeps({
|
||||
directDependencyPackageRoot,
|
||||
fingerprint,
|
||||
cheapFingerprint,
|
||||
packageJson,
|
||||
pluginDir,
|
||||
pruneConfig,
|
||||
repoRoot,
|
||||
stampPath,
|
||||
}),
|
||||
)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
runPluginPhase(pluginId, "fallback install runtime deps", () =>
|
||||
installPluginRuntimeDepsWithRetries({
|
||||
attempts: installAttempts,
|
||||
install: installPluginRuntimeDepsImpl,
|
||||
installParams: {
|
||||
directDependencyPackageRoot,
|
||||
fingerprint,
|
||||
cheapFingerprint,
|
||||
packageJson,
|
||||
pluginDir,
|
||||
pluginId,
|
||||
pruneConfig,
|
||||
repoRoot,
|
||||
stampPath,
|
||||
},
|
||||
}),
|
||||
);
|
||||
} catch (error) {
|
||||
throw createRootRuntimeStagingError({ packageJson, pluginId, cause: error });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const __testing = {
|
||||
removeStaleRuntimeDepsTempDirs,
|
||||
replaceDirAtomically,
|
||||
runNpmInstall,
|
||||
writeRuntimeDepsTempOwner,
|
||||
};
|
||||
|
||||
if (import.meta.url === pathToFileURL(process.argv[1] ?? "").href) {
|
||||
stageBundledPluginRuntimeDeps();
|
||||
}
|
||||
@@ -3,10 +3,6 @@ import path from "node:path";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import { removePathIfExists } from "./runtime-postbuild-shared.mjs";
|
||||
|
||||
function symlinkType() {
|
||||
return process.platform === "win32" ? "junction" : "dir";
|
||||
}
|
||||
|
||||
function relativeSymlinkTarget(sourcePath, targetPath) {
|
||||
const relativeTarget = path.relative(path.dirname(targetPath), sourcePath);
|
||||
return relativeTarget || ".";
|
||||
@@ -77,27 +73,6 @@ function writeJsonFile(targetPath, value) {
|
||||
fs.writeFileSync(targetPath, `${JSON.stringify(value, null, 2)}\n`, "utf8");
|
||||
}
|
||||
|
||||
function removeStaleOpenClawSelfReference(sourcePluginNodeModulesDir, repoRoot) {
|
||||
if (!fs.existsSync(sourcePluginNodeModulesDir)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const selfReferencePath = path.join(sourcePluginNodeModulesDir, "openclaw");
|
||||
try {
|
||||
const existing = fs.lstatSync(selfReferencePath);
|
||||
if (!existing.isSymbolicLink()) {
|
||||
return;
|
||||
}
|
||||
if (fs.realpathSync(selfReferencePath) === fs.realpathSync(repoRoot)) {
|
||||
removePathIfExists(selfReferencePath);
|
||||
}
|
||||
} catch (error) {
|
||||
if (error?.code !== "ENOENT") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function ensureOpenClawExtensionAlias(params) {
|
||||
const pluginSdkDir = path.join(params.repoRoot, "dist", "plugin-sdk");
|
||||
if (!fs.existsSync(pluginSdkDir)) {
|
||||
@@ -231,21 +206,6 @@ function stagePluginRuntimeOverlay(sourceDir, targetDir, relativeDir = "") {
|
||||
}
|
||||
}
|
||||
|
||||
function linkPluginNodeModules(params) {
|
||||
const runtimeNodeModulesDir = path.join(params.runtimePluginDir, "node_modules");
|
||||
removePathIfExists(runtimeNodeModulesDir);
|
||||
if (!fs.existsSync(params.sourcePluginNodeModulesDir)) {
|
||||
return;
|
||||
}
|
||||
removeStaleOpenClawSelfReference(params.sourcePluginNodeModulesDir, params.repoRoot);
|
||||
ensureSymlink(
|
||||
params.sourcePluginNodeModulesDir,
|
||||
runtimeNodeModulesDir,
|
||||
symlinkType(),
|
||||
params.sourcePluginNodeModulesDir,
|
||||
);
|
||||
}
|
||||
|
||||
export function stageBundledPluginRuntime(params = {}) {
|
||||
const repoRoot = params.cwd ?? params.repoRoot ?? process.cwd();
|
||||
const distRoot = path.join(repoRoot, "dist");
|
||||
@@ -268,14 +228,8 @@ export function stageBundledPluginRuntime(params = {}) {
|
||||
}
|
||||
const distPluginDir = path.join(distExtensionsRoot, dirent.name);
|
||||
const runtimePluginDir = path.join(runtimeExtensionsRoot, dirent.name);
|
||||
const distPluginNodeModulesDir = path.join(distPluginDir, "node_modules");
|
||||
|
||||
stagePluginRuntimeOverlay(distPluginDir, runtimePluginDir);
|
||||
linkPluginNodeModules({
|
||||
repoRoot,
|
||||
runtimePluginDir,
|
||||
sourcePluginNodeModulesDir: distPluginNodeModulesDir,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,252 +0,0 @@
|
||||
import assert from "node:assert/strict";
|
||||
import { spawnSync } from "node:child_process";
|
||||
import fs from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import {
|
||||
collectBuiltBundledPluginStagedRuntimeDependencyErrors,
|
||||
collectBundledPluginRootRuntimeMirrorErrors,
|
||||
collectBundledPluginRuntimeDependencySpecs,
|
||||
collectDeclaredRootRuntimeDependencyMetadataErrors,
|
||||
collectRootDistBundledRuntimeMirrors,
|
||||
} from "./lib/bundled-plugin-root-runtime-mirrors.mjs";
|
||||
import { parsePackageRootArg } from "./lib/package-root-args.mjs";
|
||||
|
||||
const { packageRoot } = parsePackageRootArg(
|
||||
process.argv.slice(2),
|
||||
"OPENCLAW_BUNDLED_RUNTIME_DEPS_ROOT",
|
||||
);
|
||||
const rootPackageJsonPath = path.join(packageRoot, "package.json");
|
||||
const builtPluginsDir = path.join(packageRoot, "dist", "extensions");
|
||||
|
||||
assert.ok(fs.existsSync(rootPackageJsonPath), `package.json missing from ${packageRoot}`);
|
||||
assert.ok(fs.existsSync(builtPluginsDir), `built bundled plugins missing from ${builtPluginsDir}`);
|
||||
|
||||
const rootPackageJson = JSON.parse(fs.readFileSync(rootPackageJsonPath, "utf8"));
|
||||
const bundledRuntimeDependencySpecs = collectBundledPluginRuntimeDependencySpecs(
|
||||
path.join(packageRoot, "extensions"),
|
||||
);
|
||||
const requiredRootMirrors = collectRootDistBundledRuntimeMirrors({
|
||||
bundledRuntimeDependencySpecs,
|
||||
distDir: path.join(packageRoot, "dist"),
|
||||
});
|
||||
const errors = [
|
||||
...collectBundledPluginRootRuntimeMirrorErrors({
|
||||
bundledRuntimeDependencySpecs,
|
||||
requiredRootMirrors,
|
||||
rootPackageJson,
|
||||
}),
|
||||
...collectDeclaredRootRuntimeDependencyMetadataErrors(rootPackageJson),
|
||||
...collectBuiltBundledPluginStagedRuntimeDependencyErrors({
|
||||
bundledPluginsDir: builtPluginsDir,
|
||||
}),
|
||||
];
|
||||
|
||||
assert.deepEqual(errors, [], errors.join("\n"));
|
||||
|
||||
function packageNodeModulesPath(nodeModulesDir, packageName) {
|
||||
return path.join(nodeModulesDir, ...packageName.split("/"));
|
||||
}
|
||||
|
||||
function stageBrowserRuntimeDependencyStub(stageNodeModulesDir, packageName) {
|
||||
const packageDir = packageNodeModulesPath(stageNodeModulesDir, packageName);
|
||||
fs.mkdirSync(packageDir, { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(packageDir, "package.json"),
|
||||
`${JSON.stringify(
|
||||
{
|
||||
name: packageName,
|
||||
version: "0.0.0",
|
||||
main: "./index.cjs",
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}\n`,
|
||||
"utf8",
|
||||
);
|
||||
|
||||
if (packageName === "playwright-core") {
|
||||
fs.writeFileSync(
|
||||
path.join(packageDir, "index.cjs"),
|
||||
[
|
||||
"module.exports = {",
|
||||
" chromium: { marker: 'stub-chromium' },",
|
||||
" devices: { 'Stub Device': { marker: 'stub-device' } },",
|
||||
"};",
|
||||
"",
|
||||
].join("\n"),
|
||||
"utf8",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (packageName === "typebox") {
|
||||
fs.writeFileSync(
|
||||
path.join(packageDir, "index.cjs"),
|
||||
[
|
||||
"const createSchema = (kind, value = {}) => ({ kind, ...value });",
|
||||
"const Type = new Proxy(function Type() {}, {",
|
||||
" get(_target, prop) {",
|
||||
" if (prop === Symbol.toStringTag) {",
|
||||
" return 'Type';",
|
||||
" }",
|
||||
" return (...args) => createSchema(String(prop), { args });",
|
||||
" },",
|
||||
"});",
|
||||
"module.exports = { Type };",
|
||||
"",
|
||||
].join("\n"),
|
||||
"utf8",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
fs.writeFileSync(path.join(packageDir, "index.cjs"), "module.exports = {};\n", "utf8");
|
||||
}
|
||||
|
||||
function findBuiltBrowserEntryPath(distDir) {
|
||||
const candidates = fs
|
||||
.readdirSync(distDir, { withFileTypes: true })
|
||||
.filter((entry) => entry.isFile() && /^pw-ai-(?!state-).*\.js$/u.test(entry.name))
|
||||
.map((entry) => path.join(distDir, entry.name))
|
||||
.toSorted((left, right) => left.localeCompare(right));
|
||||
if (candidates.length === 0) {
|
||||
throw new assert.AssertionError({
|
||||
message: `missing built pw-ai entry under ${distDir}`,
|
||||
});
|
||||
}
|
||||
return candidates[0];
|
||||
}
|
||||
|
||||
function createBuiltBrowserImportSmokeFixture(packageRoot) {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-built-browser-smoke-"));
|
||||
const tempDistDir = path.join(tempRoot, "dist");
|
||||
const tempNodeModulesDir = path.join(tempRoot, "node_modules");
|
||||
const stageNodeModulesDir = path.join(
|
||||
tempRoot,
|
||||
".openclaw",
|
||||
"plugin-runtime-deps",
|
||||
"browser",
|
||||
"node_modules",
|
||||
);
|
||||
|
||||
fs.cpSync(path.join(packageRoot, "dist"), tempDistDir, {
|
||||
recursive: true,
|
||||
dereference: true,
|
||||
});
|
||||
fs.copyFileSync(path.join(packageRoot, "package.json"), path.join(tempRoot, "package.json"));
|
||||
fs.cpSync(path.join(packageRoot, "node_modules"), tempNodeModulesDir, {
|
||||
recursive: true,
|
||||
dereference: true,
|
||||
});
|
||||
fs.rmSync(path.join(tempNodeModulesDir, "playwright-core"), {
|
||||
force: true,
|
||||
recursive: true,
|
||||
});
|
||||
|
||||
assert.ok(!fs.existsSync(path.join(tempNodeModulesDir, "playwright-core")));
|
||||
fs.mkdirSync(stageNodeModulesDir, { recursive: true });
|
||||
assert.deepEqual(fs.readdirSync(stageNodeModulesDir), []);
|
||||
|
||||
const browserPackageJson = JSON.parse(
|
||||
fs.readFileSync(path.join(tempDistDir, "extensions", "browser", "package.json"), "utf8"),
|
||||
);
|
||||
const browserRuntimeDeps = new Map(
|
||||
[
|
||||
...Object.entries(browserPackageJson.dependencies ?? {}),
|
||||
...Object.entries(browserPackageJson.optionalDependencies ?? {}),
|
||||
].filter((entry) => typeof entry[1] === "string" && entry[1].length > 0),
|
||||
);
|
||||
const missingBrowserRuntimeDeps = [...browserRuntimeDeps.keys()]
|
||||
.filter((packageName) => {
|
||||
const rootSentinel = path.join(tempNodeModulesDir, ...packageName.split("/"), "package.json");
|
||||
const stagedSentinel = path.join(
|
||||
stageNodeModulesDir,
|
||||
...packageName.split("/"),
|
||||
"package.json",
|
||||
);
|
||||
return !fs.existsSync(rootSentinel) && !fs.existsSync(stagedSentinel);
|
||||
})
|
||||
.toSorted((left, right) => left.localeCompare(right));
|
||||
|
||||
for (const packageName of missingBrowserRuntimeDeps) {
|
||||
stageBrowserRuntimeDependencyStub(stageNodeModulesDir, packageName);
|
||||
}
|
||||
|
||||
return {
|
||||
entryPath: findBuiltBrowserEntryPath(tempDistDir),
|
||||
stageNodeModulesDir,
|
||||
tempRoot,
|
||||
};
|
||||
}
|
||||
|
||||
function runNodeEval(params) {
|
||||
return spawnSync(process.execPath, ["--input-type=module", "--eval", params.source], {
|
||||
cwd: params.cwd,
|
||||
encoding: "utf8",
|
||||
env: params.env,
|
||||
});
|
||||
}
|
||||
|
||||
function runBuiltBrowserImportSmoke(packageRoot) {
|
||||
const fixture = createBuiltBrowserImportSmokeFixture(packageRoot);
|
||||
try {
|
||||
assert.ok(fs.existsSync(fixture.entryPath), `missing built pw-ai entry: ${fixture.entryPath}`);
|
||||
assert.ok(
|
||||
!fs.existsSync(path.join(fixture.tempRoot, "node_modules", "playwright-core")),
|
||||
"package-root playwright-core should be absent in the smoke fixture",
|
||||
);
|
||||
assert.ok(
|
||||
fs.existsSync(path.join(fixture.stageNodeModulesDir, "playwright-core", "package.json")),
|
||||
"staged playwright-core should be present in the smoke fixture",
|
||||
);
|
||||
|
||||
const rootEsmResult = runNodeEval({
|
||||
cwd: fixture.tempRoot,
|
||||
env: { ...process.env, NODE_PATH: fixture.stageNodeModulesDir },
|
||||
source:
|
||||
"await import('playwright-core')" +
|
||||
".then(() => { process.exitCode = 1; })" +
|
||||
".catch((error) => { if (error?.code !== 'ERR_MODULE_NOT_FOUND') throw error; });",
|
||||
});
|
||||
assert.equal(
|
||||
rootEsmResult.status,
|
||||
0,
|
||||
[
|
||||
"[build-smoke] native ESM unexpectedly resolved staged playwright-core",
|
||||
rootEsmResult.stdout.trim(),
|
||||
rootEsmResult.stderr.trim(),
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join("\n"),
|
||||
);
|
||||
|
||||
const builtImportResult = runNodeEval({
|
||||
cwd: fixture.tempRoot,
|
||||
env: { ...process.env, NODE_PATH: fixture.stageNodeModulesDir },
|
||||
source: `await import(${JSON.stringify(pathToFileURL(fixture.entryPath).href)});`,
|
||||
});
|
||||
assert.equal(
|
||||
builtImportResult.status,
|
||||
0,
|
||||
[
|
||||
"[build-smoke] built browser pw-ai import failed",
|
||||
`status=${String(builtImportResult.status)}`,
|
||||
`signal=${String(builtImportResult.signal)}`,
|
||||
builtImportResult.stdout.trim(),
|
||||
builtImportResult.stderr.trim(),
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join("\n"),
|
||||
);
|
||||
} finally {
|
||||
fs.rmSync(fixture.tempRoot, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
runBuiltBrowserImportSmoke(packageRoot);
|
||||
|
||||
process.stdout.write(
|
||||
`[build-smoke] bundled runtime dependency smoke passed packageRoot=${packageRoot}\n`,
|
||||
);
|
||||
@@ -4,7 +4,6 @@ import { spawn } from "node:child_process";
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import { collectBundledPluginBuildEntries } from "./lib/bundled-plugin-build-entries.mjs";
|
||||
import { BUNDLED_PLUGIN_PATH_PREFIX } from "./lib/bundled-plugin-paths.mjs";
|
||||
import { resolvePnpmRunner } from "./pnpm-runner.mjs";
|
||||
import {
|
||||
@@ -22,7 +21,6 @@ const DEFAULT_CAPTURE_BYTES = 8 * 1024 * 1024;
|
||||
const DEFAULT_HEARTBEAT_MS = 30_000;
|
||||
const TERMINATION_GRACE_MS = 5_000;
|
||||
const TSDOWN_OUTPUT_ROOTS = ["dist", "dist-runtime"];
|
||||
const DIST_RUNTIME_DEPS_ROOT = "extensions";
|
||||
|
||||
function removeDistPluginNodeModulesSymlinks(rootDir) {
|
||||
const extensionsDir = path.join(rootDir, "extensions");
|
||||
@@ -56,17 +54,9 @@ function pruneStaleRuntimeSymlinks() {
|
||||
|
||||
export function cleanTsdownOutputRoots(params = {}) {
|
||||
const cwd = params.cwd ?? process.cwd();
|
||||
const stagedRuntimeDependencyPluginIds = collectStagedRuntimeDependencyPluginIds({
|
||||
cwd,
|
||||
env: params.env ?? process.env,
|
||||
});
|
||||
const fsImpl = params.fs ?? fs;
|
||||
for (const root of TSDOWN_OUTPUT_ROOTS) {
|
||||
const rootPath = path.join(cwd, root);
|
||||
if (root === "dist") {
|
||||
cleanDistOutputRoot(rootPath, stagedRuntimeDependencyPluginIds, fsImpl);
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
fsImpl.rmSync(rootPath, { force: true, recursive: true });
|
||||
} catch {
|
||||
@@ -75,86 +65,6 @@ export function cleanTsdownOutputRoots(params = {}) {
|
||||
}
|
||||
}
|
||||
|
||||
function collectStagedRuntimeDependencyPluginIds(params) {
|
||||
try {
|
||||
return new Set(
|
||||
collectBundledPluginBuildEntries(params)
|
||||
.filter(({ packageJson }) => shouldStageBundledPluginRuntimeDependencies(packageJson))
|
||||
.map(({ id }) => id),
|
||||
);
|
||||
} catch {
|
||||
return new Set();
|
||||
}
|
||||
}
|
||||
|
||||
function shouldStageBundledPluginRuntimeDependencies(packageJson) {
|
||||
return packageJson?.openclaw?.bundle?.stageRuntimeDependencies === true;
|
||||
}
|
||||
|
||||
function cleanDistOutputRoot(distRoot, stagedRuntimeDependencyPluginIds, fsImpl) {
|
||||
let entries = [];
|
||||
try {
|
||||
entries = fsImpl.readdirSync(distRoot, { withFileTypes: true });
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const entry of entries) {
|
||||
const entryPath = path.join(distRoot, entry.name);
|
||||
try {
|
||||
if (entry.isDirectory() && entry.name === DIST_RUNTIME_DEPS_ROOT) {
|
||||
cleanDistExtensionsRoot(entryPath, stagedRuntimeDependencyPluginIds, fsImpl);
|
||||
continue;
|
||||
}
|
||||
fsImpl.rmSync(entryPath, { force: true, recursive: true });
|
||||
} catch {
|
||||
// Best-effort cleanup. tsdown will overwrite or recreate generated output.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function cleanDistExtensionsRoot(extensionsDistRoot, stagedRuntimeDependencyPluginIds, fsImpl) {
|
||||
let entries = [];
|
||||
try {
|
||||
entries = fsImpl.readdirSync(extensionsDistRoot, { withFileTypes: true });
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const entry of entries) {
|
||||
const pluginDistRoot = path.join(extensionsDistRoot, entry.name);
|
||||
try {
|
||||
if (!entry.isDirectory() || !stagedRuntimeDependencyPluginIds.has(entry.name)) {
|
||||
fsImpl.rmSync(pluginDistRoot, { force: true, recursive: true });
|
||||
continue;
|
||||
}
|
||||
cleanDistPluginOutputRoot(pluginDistRoot, fsImpl);
|
||||
} catch {
|
||||
// Best-effort cleanup. Runtime postbuild validates current plugin metadata next.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function cleanDistPluginOutputRoot(pluginDistRoot, fsImpl) {
|
||||
let entries = [];
|
||||
try {
|
||||
entries = fsImpl.readdirSync(pluginDistRoot, { withFileTypes: true });
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory() && entry.name === "node_modules") {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
fsImpl.rmSync(path.join(pluginDistRoot, entry.name), { force: true, recursive: true });
|
||||
} catch {
|
||||
// Best-effort cleanup. tsdown/runtime-postbuild will rewrite generated files.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function pruneStaleRootChunkFiles(params = {}) {
|
||||
const cwd = params.cwd ?? process.cwd();
|
||||
const fsImpl = params.fs ?? fs;
|
||||
|
||||
Reference in New Issue
Block a user