test: harden plugin update validation

This commit is contained in:
Peter Steinberger
2026-05-01 23:19:30 +01:00
parent 34b40b007c
commit bcd6499abd
18 changed files with 610 additions and 22 deletions

View File

@@ -440,7 +440,9 @@ jobs:
artifact_name: ${{ needs.prepare_release_package.outputs.artifact_name }}
package_sha256: ${{ needs.prepare_release_package.outputs.package_sha256 }}
suite_profile: custom
docker_lanes: plugins-offline plugin-update
docker_lanes: doctor-switch update-channel-switch upgrade-survivor published-upgrade-survivor plugins-offline plugin-update
published_upgrade_survivor_baselines: release-history
published_upgrade_survivor_scenarios: reported-issues
telegram_mode: mock-openai
telegram_scenarios: telegram-help-command,telegram-commands-command,telegram-tools-compact-command,telegram-whoami-command,telegram-context-command,telegram-mention-gating
secrets:

View File

@@ -579,6 +579,14 @@
"source": "Testing",
"target": "测试"
},
{
"source": "Update and plugin tests",
"target": "更新和插件测试"
},
{
"source": "Testing updates and plugins",
"target": "更新和插件测试"
},
{
"source": "Async Exec Duplicate Completion Investigation",
"target": "Async Exec Duplicate Completion Investigation"

View File

@@ -188,7 +188,11 @@ Keep `workflow_ref` and `package_ref` separate. `workflow_ref` is the trusted wo
The `package` profile uses offline plugin coverage so published-package validation is not gated on live ClawHub availability. The optional Telegram lane reuses the `package-under-test` artifact in `NPM Telegram Beta E2E`, with the published npm spec path kept for standalone dispatches.
Release checks call Package Acceptance with `source=ref`, `package_ref=<release-ref>`, `workflow_ref=<release workflow ref>`, `suite_profile=custom`, `docker_lanes='plugins-offline plugin-update'`, and `telegram_mode=mock-openai`. Release-path Docker chunks cover the overlapping package/update/plugin lanes; Package Acceptance keeps offline plugin, update, and Telegram proof against the same resolved package tarball. Cross-OS release checks still cover OS-specific onboarding, installer, and platform behavior; package/update product validation should start with Package Acceptance. The `published-upgrade-survivor` Docker lane validates one published package baseline per run. In Package Acceptance, the resolved `package-under-test` tarball is always the candidate and `published_upgrade_survivor_baseline` selects the fallback published baseline, defaulting to `openclaw@latest`; failed-lane rerun commands preserve that baseline. Set `published_upgrade_survivor_baselines=release-history` to expand the lane across a deduped history matrix: the latest six stable releases, `2026.4.23`, and the latest stable release before `2026-03-15`. Set `published_upgrade_survivor_scenarios=reported-issues` to expand the same baselines across issue-shaped fixtures for Feishu config, preserved bootstrap/persona files, tilde log paths, and stale legacy plugin dependency roots. Local aggregate runs can pass exact package specs with `OPENCLAW_UPGRADE_SURVIVOR_BASELINE_SPECS`, keep a single lane with `OPENCLAW_UPGRADE_SURVIVOR_BASELINE_SPEC` such as `openclaw@2026.4.15`, or set `OPENCLAW_UPGRADE_SURVIVOR_SCENARIOS` for the scenario matrix. The published lane configures the baseline with a baked `openclaw config set` command recipe, records recipe steps in `summary.json`, and probes `/healthz`, `/readyz`, plus RPC status after Gateway start. The Windows packaged and installer fresh lanes also verify that an installed package can import a browser-control override from a raw absolute Windows path. The OpenAI cross-OS agent-turn smoke defaults to `OPENCLAW_CROSS_OS_OPENAI_MODEL` when set, otherwise `openai/gpt-5.5`, so the install and gateway proof stays on the preferred GPT-5 test model.
For the dedicated update and plugin testing policy, including local commands,
Docker lanes, Package Acceptance inputs, release defaults, and failure triage,
see [Testing updates and plugins](/help/testing-updates-plugins).
Release checks call Package Acceptance with `source=artifact`, the prepared release package artifact, `suite_profile=custom`, `docker_lanes='doctor-switch update-channel-switch upgrade-survivor published-upgrade-survivor plugins-offline plugin-update'`, `published_upgrade_survivor_baselines=release-history`, `published_upgrade_survivor_scenarios=reported-issues`, and `telegram_mode=mock-openai`. This keeps package migration, update, stale-plugin-dependency cleanup, offline plugin, plugin-update, and Telegram proof on the same resolved package tarball. Cross-OS release checks still cover OS-specific onboarding, installer, and platform behavior; package/update product validation should start with Package Acceptance. The `published-upgrade-survivor` Docker lane validates one published package baseline per run. In Package Acceptance, the resolved `package-under-test` tarball is always the candidate and `published_upgrade_survivor_baseline` selects the fallback published baseline, defaulting to `openclaw@latest`; failed-lane rerun commands preserve that baseline. Set `published_upgrade_survivor_baselines=release-history` to expand the lane across a deduped history matrix: the latest six stable releases, `2026.4.23`, and the latest stable release before `2026-03-15`. Set `published_upgrade_survivor_scenarios=reported-issues` to expand the same baselines across issue-shaped fixtures for Feishu config, preserved bootstrap/persona files, tilde log paths, and stale legacy plugin dependency roots. Local aggregate runs can pass exact package specs with `OPENCLAW_UPGRADE_SURVIVOR_BASELINE_SPECS`, keep a single lane with `OPENCLAW_UPGRADE_SURVIVOR_BASELINE_SPEC` such as `openclaw@2026.4.15`, or set `OPENCLAW_UPGRADE_SURVIVOR_SCENARIOS` for the scenario matrix. The published lane configures the baseline with a baked `openclaw config set` command recipe, records recipe steps in `summary.json`, and probes `/healthz`, `/readyz`, plus RPC status after Gateway start. The Windows packaged and installer fresh lanes also verify that an installed package can import a browser-control override from a raw absolute Windows path. The OpenAI cross-OS agent-turn smoke defaults to `OPENCLAW_CROSS_OS_OPENAI_MODEL` when set, otherwise `openai/gpt-5.5`, so the install and gateway proof stays on the preferred GPT-5 test model.
### Legacy compatibility windows

View File

@@ -1732,7 +1732,7 @@
},
{
"group": "Testing",
"pages": ["help/testing", "help/testing-live"]
"pages": ["help/testing", "help/testing-updates-plugins", "help/testing-live"]
},
{
"group": "Diagnostics",

View File

@@ -29,6 +29,7 @@ Quick "get unstuck" path for the most common problems:
## Testing
- [Testing](/help/testing) — test suites and Docker runners
- [Update and plugin tests](/help/testing-updates-plugins) — package update, migration, and plugin install validation
- [Live tests](/help/testing-live) — network-touching provider and CLI smokes
## Community and meta

View File

@@ -0,0 +1,228 @@
---
summary: "How OpenClaw validates update paths, package migrations, and plugin install/update behavior"
read_when:
- Changing OpenClaw update, doctor, package acceptance, or plugin install behavior
- Preparing or approving a release candidate
- Debugging package update, plugin dependency cleanup, or plugin install regressions
title: "Testing: updates and plugins"
sidebarTitle: "Update and plugin tests"
---
This is the dedicated checklist for update and plugin validation. The goal is
simple: prove the installable package can update real user state, repair stale
legacy state through `doctor`, and still install, load, update, and uninstall
plugins from the supported sources.
For the broader test runner map, see [Testing](/help/testing). For live provider
keys and network-touching suites, see [Testing live](/help/testing-live).
## What we protect
Update and plugin tests protect these contracts:
- A package tarball is complete, has a valid `dist/postinstall-inventory.json`,
and does not depend on unpacked repo files.
- A user can move from an older published package to the candidate package
without losing config, agents, sessions, workspaces, plugin allowlists, or
channel config.
- `openclaw doctor --fix --non-interactive` owns legacy cleanup and repair
paths. Startup should not grow hidden compatibility migrations for stale
plugin state.
- Plugin installs work from local directories, git repos, npm packages, and the
ClawHub registry path.
- Plugin npm dependencies are installed in the managed npm root, scanned before
trust, and removed through npm during uninstall so hoisted dependencies do not
linger.
- Plugin update is stable when nothing changed: install records, resolved source,
and enabled state stay intact.
## Local proof during development
Start narrow:
```bash
pnpm changed:lanes --json
pnpm check:changed
pnpm test:changed
```
For plugin install, uninstall, dependency, or package-inventory changes, also
run the focused tests that cover the edited seam:
```bash
pnpm test src/plugins/uninstall.test.ts src/infra/package-dist-inventory.test.ts test/scripts/package-acceptance-workflow.test.ts
```
Before any package Docker lane consumes a tarball, prove the package artifact:
```bash
pnpm release:check
```
`release:check` runs config/docs/API drift checks, writes the package dist
inventory, runs `npm pack --dry-run`, rejects forbidden packed files, installs
the tarball into a temp prefix, runs postinstall, and smokes bundled channel
entrypoints.
## Docker lanes
The Docker lanes are the product-level proof. They install or update a real
package inside Linux containers and assert behavior through CLI commands,
Gateway startup, HTTP probes, RPC status, and filesystem state.
Use focused lanes while iterating:
```bash
pnpm test:docker:plugins
pnpm test:docker:plugin-update
pnpm test:docker:upgrade-survivor
pnpm test:docker:published-upgrade-survivor
```
Important lanes:
- `test:docker:plugins` validates plugin install smoke, local folder installs,
local folders with preinstalled dependencies, git installs with package
dependencies, npm package dependency installs, local ClawHub fixture installs,
marketplace update behavior, and Claude-bundle enable/inspect. Set
`OPENCLAW_PLUGINS_E2E_CLAWHUB=0` to keep the ClawHub block hermetic/offline.
- `test:docker:plugin-update` validates that an unchanged installed plugin does
not reinstall or lose install metadata during `openclaw plugins update`.
- `test:docker:upgrade-survivor` installs the candidate tarball over a dirty
old-user fixture, runs package update plus non-interactive doctor, then starts
a loopback Gateway and checks state preservation.
- `test:docker:published-upgrade-survivor` first installs a published baseline,
configures it through a baked `openclaw config set` recipe, updates it to the
candidate tarball, runs doctor, checks legacy cleanup, starts the Gateway, and
probes `/healthz`, `/readyz`, and RPC status.
Useful published-upgrade survivor variants:
```bash
OPENCLAW_UPGRADE_SURVIVOR_BASELINE_SPEC=openclaw@2026.4.23 \
OPENCLAW_UPGRADE_SURVIVOR_SCENARIO=versioned-runtime-deps \
pnpm test:docker:published-upgrade-survivor
OPENCLAW_UPGRADE_SURVIVOR_BASELINE_SPEC=openclaw@latest \
OPENCLAW_UPGRADE_SURVIVOR_SCENARIO=bootstrap-persona \
pnpm test:docker:published-upgrade-survivor
```
Available scenarios are `base`, `feishu-channel`, `bootstrap-persona`,
`tilde-log-path`, and `versioned-runtime-deps`. In aggregate runs,
`OPENCLAW_UPGRADE_SURVIVOR_SCENARIOS=reported-issues` expands to all reported
issue-shaped scenarios.
## Package Acceptance
Package Acceptance is the GitHub-native package gate. It resolves one candidate
package into a `package-under-test` tarball, records version and SHA-256, then
runs reusable Docker E2E lanes against that exact tarball. The workflow harness
ref is separate from the package source ref, so current test logic can validate
older trusted releases.
Candidate sources:
- `source=npm`: validate `openclaw@beta`, `openclaw@latest`, or an exact
published version.
- `source=ref`: pack a trusted branch, tag, or commit with the selected current
harness.
- `source=url`: validate an HTTPS tarball with required `package_sha256`.
- `source=artifact`: reuse a tarball uploaded by another Actions run.
Release checks call Package Acceptance with the package/update/plugin set:
```text
doctor-switch update-channel-switch upgrade-survivor published-upgrade-survivor plugins-offline plugin-update
```
They also pass:
```text
published_upgrade_survivor_baselines=release-history
published_upgrade_survivor_scenarios=reported-issues
telegram_mode=mock-openai
```
This keeps package migration, update channel switching, stale plugin dependency
cleanup, offline plugin coverage, plugin update behavior, and Telegram package
QA on the same resolved artifact.
Run a package profile manually when validating a candidate before release:
```bash
gh workflow run package-acceptance.yml \
--ref main \
-f workflow_ref=main \
-f source=npm \
-f package_spec=openclaw@beta \
-f suite_profile=package \
-f published_upgrade_survivor_baselines=release-history \
-f published_upgrade_survivor_scenarios=reported-issues \
-f telegram_mode=mock-openai
```
Use `suite_profile=product` when the release question includes MCP channels,
cron/subagent cleanup, OpenAI web search, or OpenWebUI. Use `suite_profile=full`
only when you need full Docker release-path coverage.
## Release default
For release candidates, the default proof stack is:
1. `pnpm check:changed` and `pnpm test:changed` for source-level regressions.
2. `pnpm release:check` for package artifact integrity.
3. Package Acceptance `package` profile or the release-check custom package
lanes for install/update/plugin contracts.
4. Cross-OS release checks for OS-specific installer, onboarding, and platform
behavior.
5. Live suites only when the changed surface touches provider or hosted-service
behavior.
On maintainer machines, broad gates and Docker/package product proof should run
in Testbox unless explicitly doing local proof.
## Legacy compatibility
Compatibility leniency is narrow and time boxed:
- Packages through `2026.4.25`, including `2026.4.25-beta.*`, may tolerate
already-shipped package metadata gaps in Package Acceptance.
- The published `2026.4.26` package may warn for local build metadata stamp
files already shipped.
- Later packages must satisfy modern contracts. The same gaps fail instead of
warning or skipping.
Do not add new startup migrations for these old shapes. Add or extend a doctor
repair, then prove it with `upgrade-survivor` or `published-upgrade-survivor`.
## Adding coverage
When changing update or plugin behavior, add coverage at the lowest layer that
can fail for the right reason:
- Pure path or metadata logic: unit test beside the source.
- Package inventory or packed-file behavior: `package-dist-inventory` or tarball
checker test.
- CLI install/update behavior: Docker lane assertion or fixture.
- Published-release migration behavior: `published-upgrade-survivor` scenario.
- Registry/package source behavior: `test:docker:plugins` fixture or ClawHub
fixture server.
Keep new Docker fixtures hermetic by default. Use local fixture registries and
fake packages unless the point of the test is live registry behavior.
## Failure triage
Start with the artifact identity:
- Package Acceptance `resolve_package` summary: source, version, SHA-256, and
artifact name.
- Docker artifacts: `.artifacts/docker-tests/**/summary.json`,
`failures.json`, lane logs, and rerun commands.
- Upgrade survivor summary: `.artifacts/upgrade-survivor/summary.json`,
including baseline version, candidate version, scenario, phase timings, and
recipe steps.
Prefer rerunning the failed exact lane with the same package artifact over
rerunning the whole release umbrella.

View File

@@ -585,7 +585,9 @@ Use this decision table:
For the live model matrix, CLI backend smokes, ACP smokes, Codex app-server
harness, and all media-provider live tests (Deepgram, BytePlus, ComfyUI, image,
music, video, media harness) — plus credential handling for live runs — see
[Testing live suites](/help/testing-live).
[Testing live suites](/help/testing-live). For the dedicated update and
plugin validation checklist, see
[Testing updates and plugins](/help/testing-updates-plugins).
## Docker runners (optional "works in Linux" checks)
@@ -600,7 +602,7 @@ These Docker runners split into two buckets:
`OPENCLAW_LIVE_GATEWAY_MODEL_TIMEOUT_MS=90000`. Override those env vars when you
explicitly want the larger exhaustive scan.
- `test:docker:all` builds the live Docker image once via `test:docker:live-build`, packs OpenClaw once as an npm tarball through `scripts/package-openclaw-for-docker.mjs`, then builds/reuses two `scripts/e2e/Dockerfile` images. The bare image is only the Node/Git runner for install/update/plugin-dependency lanes; those lanes mount the prebuilt tarball. The functional image installs the same tarball into `/app` for built-app functionality lanes. Docker lane definitions live in `scripts/lib/docker-e2e-scenarios.mjs`; planner logic lives in `scripts/lib/docker-e2e-plan.mjs`; `scripts/test-docker-all.mjs` executes the selected plan. The aggregate uses a weighted local scheduler: `OPENCLAW_DOCKER_ALL_PARALLELISM` controls process slots, while resource caps keep heavy live, npm-install, and multi-service lanes from all starting at once. If a single lane is heavier than the active caps, the scheduler can still start it when the pool is empty and then keeps it running alone until capacity is available again. Defaults are 10 slots, `OPENCLAW_DOCKER_ALL_LIVE_LIMIT=9`, `OPENCLAW_DOCKER_ALL_NPM_LIMIT=10`, and `OPENCLAW_DOCKER_ALL_SERVICE_LIMIT=7`; tune `OPENCLAW_DOCKER_ALL_WEIGHT_LIMIT` or `OPENCLAW_DOCKER_ALL_DOCKER_LIMIT` only when the Docker host has more headroom. The runner performs a Docker preflight by default, removes stale OpenClaw E2E containers, prints status every 30 seconds, stores successful lane timings in `.artifacts/docker-tests/lane-timings.json`, and uses those timings to start longer lanes first on later runs. Use `OPENCLAW_DOCKER_ALL_DRY_RUN=1` to print the weighted lane manifest without building or running Docker, or `node scripts/test-docker-all.mjs --plan-json` to print the CI plan for selected lanes, package/image needs, and credentials.
- `Package Acceptance` is the GitHub-native package gate for "does this installable tarball work as a product?" It resolves one candidate package from `source=npm`, `source=ref`, `source=url`, or `source=artifact`, uploads it as `package-under-test`, then runs the reusable Docker E2E lanes against that exact tarball instead of repacking the selected ref. `workflow_ref` selects the trusted workflow/harness scripts, while `package_ref` selects the source commit/branch/tag to pack when `source=ref`; this lets current acceptance logic validate older trusted commits. Profiles are ordered by breadth: `smoke` is quick install/channel/agent plus gateway/config, `package` is the package/update/plugin contract plus the keyless upgrade-survivor fixture, the published-baseline upgrade survivor lane, and the default native replacement for most Parallels package/update coverage, `product` adds MCP channels, cron/subagent cleanup, OpenAI web search, and OpenWebUI, and `full` runs the release-path Docker chunks with OpenWebUI. For `published-upgrade-survivor`, Package Acceptance always uses `package-under-test` as the candidate and `published_upgrade_survivor_baseline` as the fallback published baseline, defaulting to `openclaw@latest`; set `published_upgrade_survivor_baselines=release-history` to shard the lane across a deduped matrix of the latest six stable releases, `2026.4.23`, and the latest stable release before `2026-03-15`. The published lane configures its baseline with a baked `openclaw config set` command recipe, then records recipe steps in the lane summary. Release validation runs a custom package delta (`plugins-offline plugin-update`) plus Telegram package QA because the release-path Docker chunks already cover the overlapping package/update/plugin lanes. Targeted GitHub Docker rerun commands generated from artifacts include prior package artifact, prepared image inputs, and the published upgrade-survivor baseline list when available, so failed lanes can avoid rebuilding the package and images.
- `Package Acceptance` is the GitHub-native package gate for "does this installable tarball work as a product?" It resolves one candidate package from `source=npm`, `source=ref`, `source=url`, or `source=artifact`, uploads it as `package-under-test`, then runs the reusable Docker E2E lanes against that exact tarball instead of repacking the selected ref. Profiles are ordered by breadth: `smoke`, `package`, `product`, and `full`. See [Testing updates and plugins](/help/testing-updates-plugins) for the package/update/plugin contract, published-upgrade survivor matrix, release defaults, and failure triage.
- Build and release checks run `scripts/check-cli-bootstrap-imports.mjs` after tsdown. The guard walks the static built graph from `dist/entry.js` and `dist/cli/run-main.js` and fails if pre-dispatch startup imports package dependencies such as Commander, prompt UI, undici, or logging before command dispatch; it also keeps the bundled gateway run chunk under budget and rejects static imports of known cold gateway paths. Packaged CLI smoke also covers root help, onboard help, doctor help, status, config schema, and a model-list command.
- Package Acceptance legacy compatibility is capped at `2026.4.25` (`2026.4.25-beta.*` included). Through that cutoff, the harness tolerates only shipped-package metadata gaps: omitted private QA inventory entries, missing `gateway install --wrapper`, missing patch files in the tarball-derived git fixture, missing persisted `update.channel`, legacy plugin install-record locations, missing marketplace install-record persistence, and config metadata migration during `plugins update`. For packages after `2026.4.25`, those paths are strict failures.
- Container smoke runners: `test:docker:openwebui`, `test:docker:onboard`, `test:docker:npm-onboard-channel-agent`, `test:docker:update-channel-switch`, `test:docker:upgrade-survivor`, `test:docker:published-upgrade-survivor`, `test:docker:session-runtime-context`, `test:docker:agents-delete-shared-workspace`, `test:docker:gateway-network`, `test:docker:browser-cdp-snapshot`, `test:docker:mcp-channels`, `test:docker:pi-bundle-mcp-tools`, `test:docker:cron-mcp-cleanup`, `test:docker:plugins`, `test:docker:plugin-update`, and `test:docker:config-reload` boot one or more real containers and verify higher-level integration paths.
@@ -814,4 +816,5 @@ When you fix a provider/model issue discovered in live:
## Related
- [Testing live](/help/testing-live)
- [Testing updates and plugins](/help/testing-updates-plugins)
- [CI](/ci)

View File

@@ -422,18 +422,24 @@ Supported candidate sources:
- `source=url`: download an HTTPS `.tgz` with required `package_sha256`
- `source=artifact`: reuse a `.tgz` uploaded by another GitHub Actions run
`OpenClaw Release Checks` runs Package Acceptance with `source=ref`,
`package_ref=<release-ref>`, `suite_profile=custom`,
`docker_lanes=plugins-offline plugin-update`, and `telegram_mode=mock-openai`.
The release-path Docker chunks cover the overlapping install, update, and
plugin-update lanes; Package Acceptance keeps offline plugin fixtures, plugin
update, and Telegram package QA against the same resolved tarball. It is the
GitHub-native
`OpenClaw Release Checks` runs Package Acceptance with `source=artifact`, the
prepared release package artifact, `suite_profile=custom`,
`docker_lanes=doctor-switch update-channel-switch upgrade-survivor published-upgrade-survivor plugins-offline plugin-update`,
`published_upgrade_survivor_baselines=release-history`,
`published_upgrade_survivor_scenarios=reported-issues`, and
`telegram_mode=mock-openai`. Package Acceptance keeps migration, update, stale
plugin dependency cleanup, offline plugin fixtures, plugin update, and Telegram
package QA against the same resolved tarball. It is the GitHub-native
replacement for most of the package/update coverage that previously required
Parallels. Cross-OS release checks still matter for OS-specific onboarding,
installer, and platform behavior, but package/update product validation should
prefer Package Acceptance.
The canonical checklist for update and plugin validation is
[Testing updates and plugins](/help/testing-updates-plugins). Use it when
deciding which local, Docker, Package Acceptance, or release-check lane proves a
plugin install/update, doctor cleanup, or published-package migration change.
Legacy package-acceptance leniency is intentionally time boxed. Packages through
`2026.4.25` may use the compatibility path for metadata gaps already published
to npm: private QA inventory entries missing from the tarball, missing

View File

@@ -6,6 +6,7 @@ title: "Tests"
---
- Full testing kit (suites, live, Docker): [Testing](/help/testing)
- Update and plugin package validation: [Testing updates and plugins](/help/testing-updates-plugins)
- `pnpm test:force`: Kills any lingering gateway process holding the default control port, then runs the full Vitest suite with an isolated gateway port so server tests dont collide with a running instance. Use this when a prior gateway run left port 18789 occupied.
- `pnpm test:coverage`: Runs the unit suite with V8 coverage (via `vitest.unit.config.ts`). This is a loaded-file unit coverage gate, not whole-repo all-file coverage. Thresholds are 70% lines/functions/statements and 55% branches. Because `coverage.all` is false, the gate measures files loaded by the unit coverage suite instead of treating every split-lane source file as uncovered.
@@ -142,3 +143,4 @@ pnpm test:docker:qr
- [Testing](/help/testing)
- [Testing live](/help/testing-live)
- [Testing updates and plugins](/help/testing-updates-plugins)

View File

@@ -5,6 +5,15 @@ function writePluginManifest(file, id) {
writeJson(file, { id, configSchema: { type: "object", properties: {} } });
}
function writeFakeIsNumberPackage(dir) {
writeJson(path.join(dir, "package.json"), {
name: "is-number",
version: "7.0.0",
main: "index.js",
});
write(path.join(dir, "index.js"), "module.exports = (value) => typeof value === 'number';\n");
}
function writePluginDemo([dir]) {
write(
path.join(requireArg(dir, "dir"), "index.js"),
@@ -35,6 +44,22 @@ function writePlugin([dir, id, version, method, name]) {
writePluginManifest(path.join(dir, "openclaw.plugin.json"), id);
}
function writePluginWithVendoredDependency([dir, id, version, method, name]) {
writePlugin([dir, id, version, method, name]);
const packageJsonPath = path.join(dir, "package.json");
writeJson(packageJsonPath, {
name: `@openclaw/${id}`,
version,
dependencies: { "is-number": "7.0.0" },
openclaw: { extensions: ["./index.js"] },
});
write(
path.join(dir, "index.js"),
`const isNumber = require("is-number");\nmodule.exports = { id: ${JSON.stringify(id)}, name: ${JSON.stringify(name)}, register(api) { api.registerGatewayMethod(${JSON.stringify(method)}, async () => ({ ok: isNumber(42) })); }, };\n`,
);
writeFakeIsNumberPackage(path.join(dir, "node_modules", "is-number"));
}
function writePluginWithCli([dir, id, version, method, name, cliRoot, cliOutput]) {
for (const [value, label] of [
[dir, "dir"],
@@ -50,11 +75,13 @@ function writePluginWithCli([dir, id, version, method, name, cliRoot, cliOutput]
writeJson(path.join(dir, "package.json"), {
name: `@openclaw/${id}`,
version,
dependencies: { "is-number": "file:./deps/is-number" },
openclaw: { extensions: ["./index.js"] },
});
writeFakeIsNumberPackage(path.join(dir, "deps", "is-number"));
write(
path.join(dir, "index.js"),
`module.exports = { id: ${JSON.stringify(id)}, name: ${JSON.stringify(name)}, register(api) { api.registerGatewayMethod(${JSON.stringify(method)}, async () => ({ ok: true })); api.registerCli(({ program }) => { const root = program.command(${JSON.stringify(cliRoot)}).description(${JSON.stringify(`${name} fixture command`)}); root.command("ping").description("Print fixture ping output").action(() => { console.log(${JSON.stringify(cliOutput)}); }); }, { descriptors: [{ name: ${JSON.stringify(cliRoot)}, description: ${JSON.stringify(`${name} fixture command`)}, hasSubcommands: true }] }); }, };\n`,
`const isNumber = require("is-number");\nmodule.exports = { id: ${JSON.stringify(id)}, name: ${JSON.stringify(name)}, register(api) { api.registerGatewayMethod(${JSON.stringify(method)}, async () => ({ ok: isNumber(42) })); api.registerCli(({ program }) => { const root = program.command(${JSON.stringify(cliRoot)}).description(${JSON.stringify(`${name} fixture command`)}); root.command("ping").description("Print fixture ping output").action(() => { console.log(${JSON.stringify(cliOutput)}); }); }, { descriptors: [{ name: ${JSON.stringify(cliRoot)}, description: ${JSON.stringify(`${name} fixture command`)}, hasSubcommands: true }] }); }, };\n`,
);
writePluginManifest(path.join(dir, "openclaw.plugin.json"), id);
}
@@ -99,6 +126,7 @@ function writePluginMarketplace([root]) {
export const pluginCommands = {
"plugin-demo": writePluginDemo,
plugin: writePlugin,
"plugin-vendored-dep": writePluginWithVendoredDependency,
"plugin-cli": writePluginWithCli,
"plugin-manifest": ([file, id]) =>
writePluginManifest(requireArg(file, "file"), requireArg(id, "id")),

View File

@@ -258,6 +258,11 @@ function assertGitPlugin() {
throw new Error(`git install path should point at cloned repo root: ${installPath}`);
}
assertRealPathInside(gitRoot, installPath, "git install path");
const dependencyPackagePath = path.join(installPath, "node_modules", "is-number", "package.json");
if (!fs.existsSync(dependencyPackagePath)) {
throw new Error(`missing git plugin installed dependency: ${dependencyPackagePath}`);
}
assertRealPathInside(installPath, dependencyPackagePath, "git plugin installed dependency");
}
function assertRealPathInside(parentPath, childPath, label) {
@@ -292,6 +297,36 @@ function assertClawHubExternalInstallContract(installPath) {
assertRealPathInside(installPath, dependencyPackagePath, "ClawHub isolated dependency");
}
function assertPluginDirDeps() {
const sourceDir = process.argv[3];
assertSimplePlugin(
"/tmp/plugins-dir-deps.json",
"/tmp/plugins-dir-deps-inspect.json",
"demo-plugin-dir-deps",
"demo.dir.deps",
);
const record = getInstallRecords()["demo-plugin-dir-deps"];
if (!record) {
throw new Error("missing local dependency plugin install record");
}
if (record.source !== "path") {
throw new Error(`unexpected local dependency plugin source: ${record.source}`);
}
if (record.sourcePath !== sourceDir) {
throw new Error(`unexpected local dependency plugin source path: ${record.sourcePath}`);
}
const installPath = record.installPath?.replace(/^~(?=$|\/)/u, process.env.HOME);
if (!installPath || !fs.existsSync(installPath)) {
throw new Error(`local dependency plugin install path missing on disk: ${installPath}`);
}
const dependencyPackagePath = path.join(installPath, "node_modules", "is-number", "package.json");
if (!fs.existsSync(dependencyPackagePath)) {
throw new Error(`missing copied local plugin dependency: ${dependencyPackagePath}`);
}
assertRealPathInside(installPath, dependencyPackagePath, "local plugin copied dependency");
}
function assertMarketplaceUpdated() {
const data = readJson("/tmp/plugins-marketplace-updated.json");
const inspect = readJson("/tmp/plugins-marketplace-updated-inspect.json");
@@ -459,6 +494,7 @@ const commands = {
"demo-plugin-dir",
"demo.dir",
),
"plugin-dir-deps": assertPluginDirDeps,
"plugin-file": () =>
assertSimplePlugin(
"/tmp/plugins4.json",

View File

@@ -32,6 +32,16 @@ write_fixture_plugin_with_cli() {
node scripts/e2e/lib/fixture.mjs plugin-cli "$dir" "$id" "$version" "$method" "$name" "$cli_root" "$cli_output"
}
write_fixture_plugin_with_vendored_dependency() {
local dir="$1"
local id="$2"
local version="$3"
local method="$4"
local name="$5"
node scripts/e2e/lib/fixture.mjs plugin-vendored-dep "$dir" "$id" "$version" "$method" "$name"
}
write_fixture_manifest() {
local file="$1"
local id="$2"

View File

@@ -46,6 +46,16 @@ node "$OPENCLAW_ENTRY" plugins inspect demo-plugin-dir --runtime --json >/tmp/pl
node scripts/e2e/lib/plugins/assertions.mjs plugin-dir
echo "Testing install from local folder with preinstalled dependencies..."
dir_deps_plugin="$(mktemp -d "/tmp/openclaw-plugin-dir-deps.XXXXXX")"
write_fixture_plugin_with_vendored_dependency "$dir_deps_plugin" demo-plugin-dir-deps 0.0.1 demo.dir.deps "Demo Plugin DIR Deps"
run_logged install-dir-deps node "$OPENCLAW_ENTRY" plugins install "$dir_deps_plugin"
node "$OPENCLAW_ENTRY" plugins list --json >/tmp/plugins-dir-deps.json
node "$OPENCLAW_ENTRY" plugins inspect demo-plugin-dir-deps --runtime --json >/tmp/plugins-dir-deps-inspect.json
node scripts/e2e/lib/plugins/assertions.mjs plugin-dir-deps "$dir_deps_plugin"
echo "Testing install from npm spec (file:)..."
file_pack_dir="$(mktemp -d "/tmp/openclaw-plugin-filepack.XXXXXX")"
write_fixture_plugin "$file_pack_dir/package" demo-plugin-file 0.0.1 demo.file "Demo Plugin FILE"

View File

@@ -144,6 +144,49 @@ describe("package dist inventory", () => {
});
});
it("omits packaged extension node_modules while keeping extension runtime files", async () => {
await withTempDir(
{ prefix: "openclaw-dist-inventory-extension-node-modules-" },
async (packageRoot) => {
const extensionRuntime = path.join(
packageRoot,
"dist",
"extensions",
"demo",
"runtime-api.js",
);
const rootSdkAliasPackage = path.join(
packageRoot,
"dist",
"extensions",
"node_modules",
"openclaw",
"package.json",
);
const extensionDependencyPackage = path.join(
packageRoot,
"dist",
"extensions",
"demo",
"node_modules",
"left-pad",
"package.json",
);
await fs.mkdir(path.dirname(extensionRuntime), { recursive: true });
await fs.mkdir(path.dirname(rootSdkAliasPackage), { recursive: true });
await fs.mkdir(path.dirname(extensionDependencyPackage), { recursive: true });
await fs.writeFile(extensionRuntime, "export {};\n", "utf8");
await fs.writeFile(rootSdkAliasPackage, "{}", "utf8");
await fs.writeFile(extensionDependencyPackage, "{}", "utf8");
await expect(writePackageDistInventory(packageRoot)).resolves.toEqual([
"dist/extensions/demo/runtime-api.js",
]);
},
);
});
it("reports runtime-created install staging dirs during installed dist verification", async () => {
await withTempDir({ prefix: "openclaw-dist-inventory-stage-" }, async (packageRoot) => {
const realFile = path.join(packageRoot, "dist", "real-AbC123.js");

View File

@@ -32,6 +32,8 @@ const OMITTED_PRIVATE_QA_PLUGIN_SDK_FILES = new Set([
]);
const OMITTED_PRIVATE_QA_DIST_PREFIXES = ["dist/qa-runtime-"];
const OMITTED_DIST_SUBTREE_PATTERNS = [
/^dist\/extensions\/node_modules(?:\/|$)/u,
/^dist\/extensions\/[^/]+\/node_modules(?:\/|$)/u,
/^dist\/extensions\/qa-matrix(?:\/|$)/u,
new RegExp(`^dist/plugin-sdk/extensions/${LEGACY_QA_CHANNEL_DIR}(?:/|$)`, "u"),
new RegExp(`^dist/plugin-sdk/extensions/${LEGACY_QA_LAB_DIR}(?:/|$)`, "u"),

View File

@@ -17,6 +17,12 @@ import {
uninstallPlugin,
} from "./uninstall.js";
const runCommandWithTimeoutMock = vi.hoisted(() => vi.fn());
vi.mock("../process/exec.js", () => ({
runCommandWithTimeout: runCommandWithTimeoutMock,
}));
type PluginConfig = NonNullable<OpenClawConfig["plugins"]>;
type PluginInstallRecord = NonNullable<PluginConfig["installs"]>[string];
@@ -668,6 +674,15 @@ describe("uninstallPlugin", () => {
const tempDirs: string[] = [];
beforeEach(async () => {
runCommandWithTimeoutMock.mockReset();
runCommandWithTimeoutMock.mockResolvedValue({
code: 0,
stdout: "",
stderr: "",
signal: null,
killed: false,
termination: "exit",
});
tempDir = await makeTrackedTempDirAsync("uninstall-test", tempDirs);
});
@@ -748,6 +763,112 @@ describe("uninstallPlugin", () => {
await expect(fs.access(pluginDir)).rejects.toThrow();
});
it("uninstalls npm-managed packages through npm before deleting the package directory", async () => {
const stateDir = path.join(tempDir, "state");
const extensionsDir = path.join(stateDir, "extensions");
const npmRoot = path.join(stateDir, "npm");
const pluginDir = path.join(npmRoot, "node_modules", "@openclaw", "kitchen-sink");
const hoistedDir = path.join(npmRoot, "node_modules", "is-number");
await fs.mkdir(pluginDir, { recursive: true });
await fs.mkdir(hoistedDir, { recursive: true });
await fs.writeFile(path.join(pluginDir, "package.json"), "{}");
await fs.writeFile(path.join(hoistedDir, "package.json"), "{}");
const plan = planPluginUninstall({
config: createPluginConfig({
entries: createSinglePluginEntries("openclaw-kitchen-sink-fixture"),
installs: {
"openclaw-kitchen-sink-fixture": {
source: "npm",
spec: "@openclaw/kitchen-sink@1.0.0",
installPath: pluginDir,
},
},
}),
pluginId: "openclaw-kitchen-sink-fixture",
deleteFiles: true,
extensionsDir,
});
expect(plan.ok).toBe(true);
if (!plan.ok) {
throw new Error(plan.error);
}
expect(plan.directoryRemoval).toEqual({
target: pluginDir,
cleanup: {
kind: "npm",
npmRoot,
packageName: "@openclaw/kitchen-sink",
},
});
const applied = await applyPluginUninstallDirectoryRemoval(plan.directoryRemoval);
expect(applied).toEqual({ directoryRemoved: true, warnings: [] });
expect(runCommandWithTimeoutMock).toHaveBeenCalledWith(
[
"npm",
"uninstall",
"--loglevel=error",
"--ignore-scripts",
"--no-audit",
"--no-fund",
"--prefix",
npmRoot,
"@openclaw/kitchen-sink",
],
expect.objectContaining({
timeoutMs: 300_000,
env: expect.objectContaining({
NPM_CONFIG_IGNORE_SCRIPTS: "true",
npm_config_package_lock: "true",
}),
}),
);
await expect(fs.access(pluginDir)).rejects.toThrow();
});
it("warns and still removes npm package dirs when npm prune cleanup fails", async () => {
runCommandWithTimeoutMock.mockResolvedValueOnce({
code: 1,
stdout: "",
stderr: "registry unavailable",
signal: null,
killed: false,
termination: "exit",
});
const stateDir = path.join(tempDir, "state");
const extensionsDir = path.join(stateDir, "extensions");
const npmRoot = path.join(stateDir, "npm");
const pluginDir = path.join(npmRoot, "node_modules", "demo-plugin");
await fs.mkdir(pluginDir, { recursive: true });
const result = await uninstallPlugin({
config: createPluginConfig({
entries: createSinglePluginEntries("demo-plugin"),
installs: {
"demo-plugin": {
source: "npm",
spec: "demo-plugin@1.0.0",
installPath: pluginDir,
},
},
}),
pluginId: "demo-plugin",
deleteFiles: true,
extensionsDir,
});
const successfulResult = expectSuccessfulUninstallActions(result, {
directory: true,
});
expect(successfulResult.warnings).toEqual([
"Failed to prune npm dependencies for plugin package demo-plugin: registry unavailable",
]);
await expect(fs.access(pluginDir)).rejects.toThrow();
});
it.each([
{
name: "preserves directory for linked plugins",

View File

@@ -4,6 +4,8 @@ import path from "node:path";
import type { OpenClawConfig } from "../config/types.openclaw.js";
import type { PluginInstallRecord } from "../config/types.plugins.js";
import { formatErrorMessage } from "../infra/errors.js";
import { createSafeNpmInstallEnv } from "../infra/safe-package-install.js";
import { runCommandWithTimeout } from "../process/exec.js";
import {
resolveDefaultPluginGitDir,
resolveDefaultPluginNpmDir,
@@ -92,6 +94,11 @@ export type UninstallPluginResult =
export type PluginUninstallDirectoryRemoval = {
target: string;
cleanup?: {
kind: "npm";
npmRoot: string;
packageName: string;
};
};
export type PluginUninstallPlanResult =
@@ -118,12 +125,12 @@ export function resolveUninstallDirectoryTarget(params: {
return null;
}
const npmManagedPath = resolveNpmManagedInstallPath({
const npmManagedInstall = resolveNpmManagedInstall({
installRecord: params.installRecord,
extensionsDir: params.extensionsDir,
});
if (npmManagedPath) {
return npmManagedPath;
if (npmManagedInstall) {
return npmManagedInstall.installPath;
}
const gitManagedPath = resolveGitManagedInstallPath({
installRecord: params.installRecord,
@@ -166,10 +173,10 @@ export function resolveUninstallDirectoryTarget(params: {
return defaultPath;
}
function resolveNpmManagedInstallPath(params: {
function resolveNpmManagedInstall(params: {
installRecord?: PluginInstallRecord;
extensionsDir?: string;
}): string | null {
}): { installPath: string; npmRoot: string; packageName: string } | null {
const installPath = params.installRecord?.installPath?.trim();
if (params.installRecord?.source !== "npm" || !installPath) {
return null;
@@ -187,12 +194,34 @@ function resolveNpmManagedInstallPath(params: {
isPathInsideOrEqual(nodeModulesRoot, installPath) &&
resolveComparablePath(nodeModulesRoot) !== resolveComparablePath(installPath)
) {
return installPath;
const packageName = resolveNpmPackageNameFromInstallPath({ installPath, nodeModulesRoot });
return packageName ? { installPath, npmRoot, packageName } : null;
}
}
return null;
}
function resolveNpmPackageNameFromInstallPath(params: {
installPath: string;
nodeModulesRoot: string;
}): string | null {
const relativePath = path.relative(
path.resolve(params.nodeModulesRoot),
path.resolve(params.installPath),
);
if (!relativePath || relativePath.startsWith("..") || path.isAbsolute(relativePath)) {
return null;
}
const segments = relativePath.split(path.sep).filter(Boolean);
if (segments.length < 1) {
return null;
}
if (segments[0]?.startsWith("@")) {
return segments.length >= 2 ? `${segments[0]}/${segments[1]}` : null;
}
return segments[0] ?? null;
}
function resolveGitManagedInstallPath(params: {
installRecord?: PluginInstallRecord;
extensionsDir?: string;
@@ -481,6 +510,14 @@ export function planPluginUninstall(params: UninstallPluginParams): PluginUninst
directory: false,
};
const npmManagedInstall =
deleteFiles && !isLinked
? resolveNpmManagedInstall({
installRecord,
extensionsDir,
})
: null;
const deleteTarget =
deleteFiles && !isLinked
? resolveUninstallDirectoryTarget({
@@ -496,7 +533,20 @@ export function planPluginUninstall(params: UninstallPluginParams): PluginUninst
config: newConfig,
pluginId,
actions,
directoryRemoval: deleteTarget ? { target: deleteTarget } : null,
directoryRemoval: deleteTarget
? {
target: deleteTarget,
...(npmManagedInstall
? {
cleanup: {
kind: "npm",
npmRoot: npmManagedInstall.npmRoot,
packageName: npmManagedInstall.packageName,
},
}
: {}),
}
: null,
};
}
@@ -512,13 +562,43 @@ export async function applyPluginUninstallDirectoryRemoval(
.access(removal.target)
.then(() => true)
.catch(() => false)) ?? false;
const warnings: string[] = [];
if (removal.cleanup?.kind === "npm") {
const uninstall = await runCommandWithTimeout(
[
"npm",
"uninstall",
"--loglevel=error",
"--ignore-scripts",
"--no-audit",
"--no-fund",
"--prefix",
removal.cleanup.npmRoot,
removal.cleanup.packageName,
],
{
timeoutMs: 300_000,
env: createSafeNpmInstallEnv(process.env, { packageLock: true, quiet: true }),
},
);
if (uninstall.code !== 0) {
warnings.push(
`Failed to prune npm dependencies for plugin package ${removal.cleanup.packageName}: ${
uninstall.stderr.trim() ||
uninstall.stdout.trim() ||
`npm exited with code ${uninstall.code}`
}`,
);
}
}
try {
await fs.rm(removal.target, { recursive: true, force: true });
return { directoryRemoved: existed, warnings: [] };
return { directoryRemoved: existed, warnings };
} catch (error) {
return {
directoryRemoved: false,
warnings: [
...warnings,
`Failed to remove plugin directory ${removal.target}: ${formatErrorMessage(error)}`,
],
};

View File

@@ -375,7 +375,11 @@ describe("package artifact reuse", () => {
"package_sha256: ${{ needs.prepare_release_package.outputs.package_sha256 }}",
);
expect(workflow).toContain("suite_profile: custom");
expect(workflow).toContain("docker_lanes: plugins-offline plugin-update");
expect(workflow).toContain(
"docker_lanes: doctor-switch update-channel-switch upgrade-survivor published-upgrade-survivor plugins-offline plugin-update",
);
expect(workflow).toContain("published_upgrade_survivor_baselines: release-history");
expect(workflow).toContain("published_upgrade_survivor_scenarios: reported-issues");
expect(workflow).toContain("telegram_mode: mock-openai");
expect(workflow).toContain(
"telegram_scenarios: telegram-help-command,telegram-commands-command,telegram-tools-compact-command,telegram-whoami-command,telegram-context-command,telegram-mention-gating",