mirror of
https://github.com/openclaw/openclaw.git
synced 2026-04-22 14:41:34 +00:00
refactor: remove custom test planner runtime
This commit is contained in:
51
.github/workflows/ci.yml
vendored
51
.github/workflows/ci.yml
vendored
@@ -17,8 +17,8 @@ env:
|
||||
FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: "true"
|
||||
|
||||
jobs:
|
||||
# Preflight: establish routing truth and planner-owned matrices once, then let
|
||||
# real work fan out from a single source of truth.
|
||||
# Preflight: establish routing truth and job matrices once, then let real
|
||||
# work fan out from a single source of truth.
|
||||
preflight:
|
||||
if: github.event_name != 'pull_request' || !github.event.pull_request.draft
|
||||
runs-on: blacksmith-16vcpu-ubuntu-2404
|
||||
@@ -302,8 +302,6 @@ jobs:
|
||||
- name: Run ${{ matrix.task }} (${{ matrix.runtime }})
|
||||
env:
|
||||
TASK: ${{ matrix.task }}
|
||||
SHARD_COUNT: ${{ matrix.shard_count || '' }}
|
||||
SHARD_INDEX: ${{ matrix.shard_index || '' }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
@@ -312,10 +310,6 @@ jobs:
|
||||
pnpm test:bundled
|
||||
;;
|
||||
extensions)
|
||||
if [ -n "$SHARD_COUNT" ] && [ -n "$SHARD_INDEX" ]; then
|
||||
export OPENCLAW_TEST_SHARDS="$SHARD_COUNT"
|
||||
export OPENCLAW_TEST_SHARD_INDEX="$SHARD_INDEX"
|
||||
fi
|
||||
pnpm test:extensions
|
||||
;;
|
||||
contracts|contracts-protocol)
|
||||
@@ -363,21 +357,13 @@ jobs:
|
||||
if: (github.event_name != 'pull_request' || matrix.task != 'compat-node22') && matrix.runtime == 'node' && (matrix.task == 'test' || matrix.task == 'channels' || matrix.task == 'compat-node22')
|
||||
env:
|
||||
TASK: ${{ matrix.task }}
|
||||
SHARD_COUNT: ${{ matrix.shard_count || '' }}
|
||||
SHARD_INDEX: ${{ matrix.shard_index || '' }}
|
||||
run: |
|
||||
# `pnpm test:planner` runs `scripts/test-parallel.mjs`, which spawns multiple Node processes.
|
||||
# Default heap limits have been too low on Linux CI (V8 OOM near 4GB).
|
||||
echo "OPENCLAW_TEST_WORKERS=2" >> "$GITHUB_ENV"
|
||||
echo "OPENCLAW_TEST_MAX_OLD_SPACE_SIZE_MB=6144" >> "$GITHUB_ENV"
|
||||
echo "NODE_OPTIONS=--max-old-space-size=6144" >> "$GITHUB_ENV"
|
||||
echo "OPENCLAW_VITEST_MAX_WORKERS=2" >> "$GITHUB_ENV"
|
||||
if [ "$TASK" = "channels" ]; then
|
||||
echo "OPENCLAW_TEST_WORKERS=1" >> "$GITHUB_ENV"
|
||||
echo "OPENCLAW_VITEST_MAX_WORKERS=1" >> "$GITHUB_ENV"
|
||||
echo "OPENCLAW_TEST_ISOLATE=1" >> "$GITHUB_ENV"
|
||||
fi
|
||||
if [ -n "$SHARD_COUNT" ] && [ -n "$SHARD_INDEX" ]; then
|
||||
echo "OPENCLAW_TEST_SHARDS=$SHARD_COUNT" >> "$GITHUB_ENV"
|
||||
echo "OPENCLAW_TEST_SHARD_INDEX=$SHARD_INDEX" >> "$GITHUB_ENV"
|
||||
fi
|
||||
|
||||
- name: Download dist artifact
|
||||
if: matrix.task == 'test'
|
||||
@@ -402,7 +388,7 @@ jobs:
|
||||
set -euo pipefail
|
||||
case "$TASK" in
|
||||
test)
|
||||
pnpm test:planner
|
||||
pnpm test
|
||||
;;
|
||||
channels)
|
||||
pnpm test:channels
|
||||
@@ -738,8 +724,7 @@ jobs:
|
||||
env:
|
||||
NODE_OPTIONS: --max-old-space-size=6144
|
||||
# Keep total concurrency predictable on the 32 vCPU runner.
|
||||
# Windows shard 2 has shown intermittent instability at 2 workers.
|
||||
OPENCLAW_TEST_WORKERS: 1
|
||||
OPENCLAW_VITEST_MAX_WORKERS: 1
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
@@ -812,15 +797,6 @@ jobs:
|
||||
# caches can skip repeated rebuild/download work on later shards/runs.
|
||||
pnpm install --frozen-lockfile --prefer-offline --ignore-scripts=false --config.engine-strict=false --config.enable-pre-post-scripts=true --config.side-effects-cache=true || pnpm install --frozen-lockfile --prefer-offline --ignore-scripts=false --config.engine-strict=false --config.enable-pre-post-scripts=true --config.side-effects-cache=true
|
||||
|
||||
- name: Configure test shard (Windows)
|
||||
if: matrix.task == 'test'
|
||||
env:
|
||||
SHARD_COUNT: ${{ matrix.shard_count }}
|
||||
SHARD_INDEX: ${{ matrix.shard_index }}
|
||||
run: |
|
||||
echo "OPENCLAW_TEST_SHARDS=$SHARD_COUNT" >> "$GITHUB_ENV"
|
||||
echo "OPENCLAW_TEST_SHARD_INDEX=$SHARD_INDEX" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Download dist artifact
|
||||
if: matrix.task == 'test'
|
||||
uses: actions/download-artifact@v8
|
||||
@@ -843,7 +819,7 @@ jobs:
|
||||
set -euo pipefail
|
||||
case "$TASK" in
|
||||
test)
|
||||
pnpm test:planner
|
||||
pnpm test
|
||||
;;
|
||||
*)
|
||||
echo "Unsupported Windows checks task: $TASK" >&2
|
||||
@@ -884,24 +860,17 @@ jobs:
|
||||
name: canvas-a2ui-bundle
|
||||
path: src/canvas-host/a2ui/
|
||||
|
||||
- name: Configure test shard (macOS)
|
||||
env:
|
||||
SHARD_COUNT: ${{ matrix.shard_count }}
|
||||
SHARD_INDEX: ${{ matrix.shard_index }}
|
||||
run: |
|
||||
echo "OPENCLAW_TEST_SHARDS=$SHARD_COUNT" >> "$GITHUB_ENV"
|
||||
echo "OPENCLAW_TEST_SHARD_INDEX=$SHARD_INDEX" >> "$GITHUB_ENV"
|
||||
|
||||
- name: TS tests (macOS)
|
||||
env:
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
OPENCLAW_VITEST_MAX_WORKERS: 2
|
||||
TASK: ${{ matrix.task }}
|
||||
shell: bash
|
||||
run: |
|
||||
set -euo pipefail
|
||||
case "$TASK" in
|
||||
test)
|
||||
pnpm test:planner
|
||||
pnpm test
|
||||
;;
|
||||
*)
|
||||
echo "Unsupported macOS node task: $TASK" >&2
|
||||
|
||||
25
package.json
25
package.json
@@ -1047,13 +1047,13 @@
|
||||
"test:all": "pnpm lint && pnpm build && pnpm test && pnpm test:e2e && pnpm test:live && pnpm test:docker:all",
|
||||
"test:auth:compat": "vitest run --config vitest.gateway.config.ts src/gateway/server.auth.compat-baseline.test.ts src/gateway/client.test.ts src/gateway/reconnect-gating.test.ts src/gateway/protocol/connect-error-details.test.ts",
|
||||
"test:build:singleton": "node scripts/test-built-plugin-singleton.mjs",
|
||||
"test:bundled": "node scripts/test-parallel.mjs --surface bundled",
|
||||
"test:bundled": "vitest run --config vitest.bundled.config.ts",
|
||||
"test:changed": "pnpm test -- --changed origin/main",
|
||||
"test:changed:max": "node scripts/test-parallel.mjs --profile max --changed origin/main",
|
||||
"test:channels": "node scripts/test-parallel.mjs --surface channels",
|
||||
"test:changed:max": "OPENCLAW_VITEST_MAX_WORKERS=8 node scripts/test-projects.mjs --changed origin/main",
|
||||
"test:channels": "vitest run --config vitest.channels.config.ts",
|
||||
"test:contracts": "pnpm test:contracts:channels && pnpm test:contracts:plugins",
|
||||
"test:contracts:channels": "OPENCLAW_TEST_PROFILE=serial pnpm exec vitest run --config vitest.contracts.config.ts src/channels/plugins/contracts",
|
||||
"test:contracts:plugins": "OPENCLAW_TEST_PROFILE=serial pnpm exec vitest run --config vitest.contracts.config.ts src/plugins/contracts",
|
||||
"test:contracts:channels": "pnpm exec vitest run --config vitest.contracts.config.ts --maxWorkers=1 src/channels/plugins/contracts",
|
||||
"test:contracts:plugins": "pnpm exec vitest run --config vitest.contracts.config.ts --maxWorkers=1 src/plugins/contracts",
|
||||
"test:coverage": "vitest run --config vitest.unit.config.ts --coverage",
|
||||
"test:coverage:changed": "vitest run --config vitest.unit.config.ts --coverage --changed origin/main",
|
||||
"test:docker:all": "pnpm test:docker:live-build && OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:live-models && OPENCLAW_SKIP_DOCKER_BUILD=1 pnpm test:docker:live-gateway && pnpm test:docker:openwebui && pnpm test:docker:onboard && pnpm test:docker:gateway-network && pnpm test:docker:mcp-channels && pnpm test:docker:qr && pnpm test:docker:doctor-switch && pnpm test:docker:plugins && pnpm test:docker:cleanup",
|
||||
@@ -1073,7 +1073,7 @@
|
||||
"test:e2e": "vitest run --config vitest.e2e.config.ts",
|
||||
"test:e2e:openshell": "OPENCLAW_E2E_OPENSHELL=1 vitest run --config vitest.e2e.config.ts test/openshell-sandbox.e2e.test.ts",
|
||||
"test:extension": "node scripts/test-extension.mjs",
|
||||
"test:extensions": "node scripts/test-parallel.mjs --surface extensions",
|
||||
"test:extensions": "vitest run --config vitest.extensions.config.ts",
|
||||
"test:extensions:memory": "node scripts/profile-extension-memory.mjs",
|
||||
"test:fast": "vitest run --config vitest.unit.config.ts",
|
||||
"test:force": "node --import tsx scripts/test-force.ts",
|
||||
@@ -1086,25 +1086,20 @@
|
||||
"test:live": "node scripts/test-live.mjs",
|
||||
"test:live:gateway-profiles": "node scripts/test-live.mjs -- src/gateway/gateway-models.profiles.live.test.ts",
|
||||
"test:live:models-profiles": "node scripts/test-live.mjs -- src/agents/models.profiles.live.test.ts",
|
||||
"test:max": "node scripts/test-parallel.mjs --profile max",
|
||||
"test:max": "OPENCLAW_VITEST_MAX_WORKERS=8 node scripts/test-projects.mjs",
|
||||
"test:parallels:linux": "bash scripts/e2e/parallels-linux-smoke.sh",
|
||||
"test:parallels:macos": "bash scripts/e2e/parallels-macos-smoke.sh",
|
||||
"test:parallels:npm-update": "bash scripts/e2e/parallels-npm-update-smoke.sh",
|
||||
"test:parallels:windows": "bash scripts/e2e/parallels-windows-smoke.sh",
|
||||
"test:perf:budget": "node scripts/test-perf-budget.mjs",
|
||||
"test:perf:hotspots": "node scripts/test-hotspots.mjs",
|
||||
"test:perf:imports": "OPENCLAW_VITEST_IMPORT_DURATIONS=1 OPENCLAW_VITEST_PRINT_IMPORT_BREAKDOWN=1 pnpm test",
|
||||
"test:perf:imports:changed": "OPENCLAW_VITEST_IMPORT_DURATIONS=1 OPENCLAW_VITEST_PRINT_IMPORT_BREAKDOWN=1 pnpm test -- --changed origin/main",
|
||||
"test:perf:imports": "OPENCLAW_VITEST_IMPORT_DURATIONS=1 OPENCLAW_VITEST_PRINT_IMPORT_BREAKDOWN=1 node scripts/test-projects.mjs",
|
||||
"test:perf:imports:changed": "OPENCLAW_VITEST_IMPORT_DURATIONS=1 OPENCLAW_VITEST_PRINT_IMPORT_BREAKDOWN=1 node scripts/test-projects.mjs --changed origin/main",
|
||||
"test:perf:profile:main": "node scripts/run-vitest-profile.mjs main",
|
||||
"test:perf:profile:runner": "node scripts/run-vitest-profile.mjs runner",
|
||||
"test:perf:update-memory-hotspots": "node scripts/test-update-memory-hotspots.mjs",
|
||||
"test:perf:update-memory-hotspots:extensions": "node scripts/test-update-memory-hotspots.mjs --config vitest.extensions.config.ts --out test/fixtures/test-memory-hotspots.extensions.json --lane extensions --lane-prefix extensions-batch- --min-delta-kb 1048576 --limit 20",
|
||||
"test:perf:update-timings": "node scripts/test-update-timings.mjs",
|
||||
"test:perf:update-timings:extensions": "node scripts/test-update-timings.mjs --config vitest.extensions.config.ts",
|
||||
"test:planner": "node scripts/test-parallel.mjs",
|
||||
"test:projects": "node scripts/test-projects.mjs",
|
||||
"test:sectriage": "pnpm exec vitest run --config vitest.gateway.config.ts && vitest run --config vitest.unit.config.ts --exclude src/daemon/launchd.integration.test.ts --exclude src/process/exec.test.ts",
|
||||
"test:serial": "node scripts/test-parallel.mjs --profile serial",
|
||||
"test:serial": "OPENCLAW_VITEST_MAX_WORKERS=1 node scripts/test-projects.mjs",
|
||||
"test:startup:bench": "node --import tsx scripts/bench-cli-startup.ts",
|
||||
"test:startup:bench:check": "node scripts/test-cli-startup-bench-budget.mjs",
|
||||
"test:startup:bench:save": "node --import tsx scripts/bench-cli-startup.ts --preset all --runs 5 --warmup 1 --output .artifacts/cli-startup-bench-all.json",
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { appendFileSync } from "node:fs";
|
||||
import { buildCIExecutionManifest } from "./test-planner/planner.mjs";
|
||||
import path from "node:path";
|
||||
import { pathToFileURL } from "node:url";
|
||||
|
||||
const WORKFLOWS = new Set(["ci", "install-smoke"]);
|
||||
|
||||
@@ -23,50 +24,159 @@ const parseArgs = (argv) => {
|
||||
return parsed;
|
||||
};
|
||||
|
||||
const outputPath = process.env.GITHUB_OUTPUT;
|
||||
|
||||
if (!outputPath) {
|
||||
throw new Error("GITHUB_OUTPUT is required");
|
||||
}
|
||||
|
||||
const { workflow } = parseArgs(process.argv.slice(2));
|
||||
const manifest = buildCIExecutionManifest(undefined, { env: process.env });
|
||||
|
||||
const writeOutput = (name, value) => {
|
||||
appendFileSync(outputPath, `${name}=${value}\n`, "utf8");
|
||||
const parseBooleanEnv = (value, defaultValue = false) => {
|
||||
if (value === undefined) {
|
||||
return defaultValue;
|
||||
}
|
||||
const normalized = value.trim().toLowerCase();
|
||||
if (normalized === "true" || normalized === "1") {
|
||||
return true;
|
||||
}
|
||||
if (normalized === "false" || normalized === "0" || normalized === "") {
|
||||
return false;
|
||||
}
|
||||
return defaultValue;
|
||||
};
|
||||
|
||||
if (workflow === "ci") {
|
||||
writeOutput("docs_only", String(manifest.scope.docsOnly));
|
||||
writeOutput("docs_changed", String(manifest.scope.docsChanged));
|
||||
writeOutput("run_node", String(manifest.scope.runNode));
|
||||
writeOutput("run_macos", String(manifest.scope.runMacos));
|
||||
writeOutput("run_android", String(manifest.scope.runAndroid));
|
||||
writeOutput("run_skills_python", String(manifest.scope.runSkillsPython));
|
||||
writeOutput("run_windows", String(manifest.scope.runWindows));
|
||||
writeOutput("has_changed_extensions", String(manifest.scope.hasChangedExtensions));
|
||||
writeOutput("changed_extensions_matrix", JSON.stringify(manifest.scope.changedExtensionsMatrix));
|
||||
writeOutput("run_build_artifacts", String(manifest.jobs.buildArtifacts.enabled));
|
||||
writeOutput("run_checks_fast", String(manifest.jobs.checksFast.enabled));
|
||||
writeOutput("checks_fast_matrix", JSON.stringify(manifest.jobs.checksFast.matrix));
|
||||
writeOutput("run_checks", String(manifest.jobs.checks.enabled));
|
||||
writeOutput("checks_matrix", JSON.stringify(manifest.jobs.checks.matrix));
|
||||
writeOutput("run_extension_fast", String(manifest.jobs.extensionFast.enabled));
|
||||
writeOutput("extension_fast_matrix", JSON.stringify(manifest.jobs.extensionFast.matrix));
|
||||
writeOutput("run_check", String(manifest.jobs.check.enabled));
|
||||
writeOutput("run_check_additional", String(manifest.jobs.checkAdditional.enabled));
|
||||
writeOutput("run_build_smoke", String(manifest.jobs.buildSmoke.enabled));
|
||||
writeOutput("run_check_docs", String(manifest.jobs.checkDocs.enabled));
|
||||
writeOutput("run_skills_python_job", String(manifest.jobs.skillsPython.enabled));
|
||||
writeOutput("run_checks_windows", String(manifest.jobs.checksWindows.enabled));
|
||||
writeOutput("checks_windows_matrix", JSON.stringify(manifest.jobs.checksWindows.matrix));
|
||||
writeOutput("run_macos_node", String(manifest.jobs.macosNode.enabled));
|
||||
writeOutput("macos_node_matrix", JSON.stringify(manifest.jobs.macosNode.matrix));
|
||||
writeOutput("run_macos_swift", String(manifest.jobs.macosSwift.enabled));
|
||||
writeOutput("run_android_job", String(manifest.jobs.android.enabled));
|
||||
writeOutput("android_matrix", JSON.stringify(manifest.jobs.android.matrix));
|
||||
writeOutput("required_check_names", JSON.stringify(manifest.requiredCheckNames));
|
||||
} else if (workflow === "install-smoke") {
|
||||
writeOutput("docs_only", String(manifest.scope.docsOnly));
|
||||
writeOutput("run_install_smoke", String(manifest.jobs.installSmoke.enabled));
|
||||
const parseJsonEnv = (value, fallback) => {
|
||||
try {
|
||||
return value ? JSON.parse(value) : fallback;
|
||||
} catch {
|
||||
return fallback;
|
||||
}
|
||||
};
|
||||
|
||||
const createMatrix = (include) => ({ include });
|
||||
|
||||
export function buildWorkflowManifest(env = process.env, workflow = "ci") {
|
||||
const eventName = env.GITHUB_EVENT_NAME ?? "pull_request";
|
||||
const isPush = eventName === "push";
|
||||
const docsOnly = parseBooleanEnv(env.OPENCLAW_CI_DOCS_ONLY);
|
||||
const docsChanged = parseBooleanEnv(env.OPENCLAW_CI_DOCS_CHANGED);
|
||||
const runNode = parseBooleanEnv(env.OPENCLAW_CI_RUN_NODE);
|
||||
const runMacos = parseBooleanEnv(env.OPENCLAW_CI_RUN_MACOS);
|
||||
const runAndroid = parseBooleanEnv(env.OPENCLAW_CI_RUN_ANDROID);
|
||||
const runWindows = parseBooleanEnv(env.OPENCLAW_CI_RUN_WINDOWS);
|
||||
const runSkillsPython = parseBooleanEnv(env.OPENCLAW_CI_RUN_SKILLS_PYTHON);
|
||||
const hasChangedExtensions = parseBooleanEnv(env.OPENCLAW_CI_HAS_CHANGED_EXTENSIONS);
|
||||
const changedExtensionsMatrix = parseJsonEnv(env.OPENCLAW_CI_CHANGED_EXTENSIONS_MATRIX, {
|
||||
include: [],
|
||||
});
|
||||
const runChangedSmoke = parseBooleanEnv(env.OPENCLAW_CI_RUN_CHANGED_SMOKE);
|
||||
|
||||
const checksFastMatrix = createMatrix(
|
||||
runNode
|
||||
? [
|
||||
{ check_name: "checks-fast-bundled", runtime: "node", task: "bundled" },
|
||||
{ check_name: "checks-fast-extensions", runtime: "node", task: "extensions" },
|
||||
{
|
||||
check_name: "checks-fast-contracts-protocol",
|
||||
runtime: "node",
|
||||
task: "contracts-protocol",
|
||||
},
|
||||
]
|
||||
: [],
|
||||
);
|
||||
|
||||
const checksMatrixInclude = runNode
|
||||
? [
|
||||
{ check_name: "checks-node-test", runtime: "node", task: "test" },
|
||||
{ check_name: "checks-node-channels", runtime: "node", task: "channels" },
|
||||
...(isPush
|
||||
? [
|
||||
{
|
||||
check_name: "checks-node-compat-node22",
|
||||
runtime: "node",
|
||||
task: "compat-node22",
|
||||
node_version: "22.x",
|
||||
cache_key_suffix: "node22",
|
||||
},
|
||||
]
|
||||
: []),
|
||||
]
|
||||
: [];
|
||||
|
||||
const windowsMatrix = createMatrix(
|
||||
runWindows ? [{ check_name: "checks-windows-node-test", runtime: "node", task: "test" }] : [],
|
||||
);
|
||||
const macosNodeMatrix = createMatrix(
|
||||
runMacos ? [{ check_name: "macos-node", runtime: "node", task: "test" }] : [],
|
||||
);
|
||||
const androidMatrix = createMatrix(
|
||||
runAndroid
|
||||
? [
|
||||
{ check_name: "android-test-play", task: "test-play" },
|
||||
{ check_name: "android-test-third-party", task: "test-third-party" },
|
||||
{ check_name: "android-build-play", task: "build-play" },
|
||||
{ check_name: "android-build-third-party", task: "build-third-party" },
|
||||
]
|
||||
: [],
|
||||
);
|
||||
const extensionFastMatrix = createMatrix(
|
||||
hasChangedExtensions
|
||||
? (changedExtensionsMatrix.include ?? []).map((entry) => ({
|
||||
check_name: `extension-fast-${entry.extension}`,
|
||||
extension: entry.extension,
|
||||
}))
|
||||
: [],
|
||||
);
|
||||
|
||||
if (workflow === "install-smoke") {
|
||||
return {
|
||||
docs_only: docsOnly,
|
||||
run_install_smoke: !docsOnly && runChangedSmoke,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
docs_only: docsOnly,
|
||||
docs_changed: docsChanged,
|
||||
run_node: !docsOnly && runNode,
|
||||
run_macos: !docsOnly && runMacos,
|
||||
run_android: !docsOnly && runAndroid,
|
||||
run_skills_python: !docsOnly && runSkillsPython,
|
||||
run_windows: !docsOnly && runWindows,
|
||||
has_changed_extensions: !docsOnly && hasChangedExtensions,
|
||||
changed_extensions_matrix: changedExtensionsMatrix,
|
||||
run_build_artifacts: !docsOnly && runNode,
|
||||
run_checks_fast: !docsOnly && runNode,
|
||||
checks_fast_matrix: checksFastMatrix,
|
||||
run_checks: !docsOnly && runNode,
|
||||
checks_matrix: createMatrix(checksMatrixInclude),
|
||||
run_extension_fast: !docsOnly && hasChangedExtensions,
|
||||
extension_fast_matrix: extensionFastMatrix,
|
||||
run_check: !docsOnly && runNode,
|
||||
run_check_additional: !docsOnly && runNode,
|
||||
run_build_smoke: !docsOnly && runNode,
|
||||
run_check_docs: docsChanged,
|
||||
run_skills_python_job: !docsOnly && runSkillsPython,
|
||||
run_checks_windows: !docsOnly && runWindows,
|
||||
checks_windows_matrix: windowsMatrix,
|
||||
run_macos_node: !docsOnly && runMacos,
|
||||
macos_node_matrix: macosNodeMatrix,
|
||||
run_macos_swift: !docsOnly && runMacos,
|
||||
run_android_job: !docsOnly && runAndroid,
|
||||
android_matrix: androidMatrix,
|
||||
};
|
||||
}
|
||||
|
||||
const entryHref = process.argv[1] ? pathToFileURL(path.resolve(process.argv[1])).href : "";
|
||||
|
||||
if (import.meta.url === entryHref) {
|
||||
const outputPath = process.env.GITHUB_OUTPUT;
|
||||
|
||||
if (!outputPath) {
|
||||
throw new Error("GITHUB_OUTPUT is required");
|
||||
}
|
||||
|
||||
const { workflow } = parseArgs(process.argv.slice(2));
|
||||
const manifest = buildWorkflowManifest(process.env, workflow);
|
||||
|
||||
const writeOutput = (name, value) => {
|
||||
appendFileSync(outputPath, `${name}=${value}\n`, "utf8");
|
||||
};
|
||||
|
||||
for (const [key, value] of Object.entries(manifest)) {
|
||||
writeOutput(key, typeof value === "string" ? value : JSON.stringify(value));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ run_prepare_push_retry_gates() {
|
||||
run_quiet_logged "pnpm build (lease-retry)" ".local/lease-retry-build.log" pnpm build
|
||||
run_quiet_logged "pnpm check (lease-retry)" ".local/lease-retry-check.log" pnpm check
|
||||
if [ "$docs_only" != "true" ]; then
|
||||
run_quiet_logged "pnpm test:planner (lease-retry)" ".local/lease-retry-test.log" pnpm test:planner
|
||||
run_quiet_logged "pnpm test (lease-retry)" ".local/lease-retry-test.log" pnpm test
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -103,13 +103,11 @@ prepare_gates() {
|
||||
echo "Docs-only change detected with high confidence; skipping pnpm test."
|
||||
else
|
||||
gates_mode="full"
|
||||
local prepare_unit_fast_batch_target_ms
|
||||
prepare_unit_fast_batch_target_ms="${OPENCLAW_PREPARE_TEST_UNIT_FAST_BATCH_TARGET_MS:-5000}"
|
||||
echo "Running pnpm test:planner with OPENCLAW_TEST_UNIT_FAST_BATCH_TARGET_MS=$prepare_unit_fast_batch_target_ms for shorter-lived unit-fast workers."
|
||||
echo "Running pnpm test with OPENCLAW_VITEST_MAX_WORKERS=${OPENCLAW_VITEST_MAX_WORKERS:-4}."
|
||||
run_quiet_logged \
|
||||
"pnpm test:planner" \
|
||||
"pnpm test" \
|
||||
".local/gates-test.log" \
|
||||
env OPENCLAW_TEST_UNIT_FAST_BATCH_TARGET_MS="$prepare_unit_fast_batch_target_ms" pnpm test:planner
|
||||
env OPENCLAW_VITEST_MAX_WORKERS="${OPENCLAW_VITEST_MAX_WORKERS:-4}" pnpm test
|
||||
previous_full_gates_head="$current_head"
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -59,10 +59,10 @@ run_linux_ci_mirror() {
|
||||
run_step pnpm vitest run --config vitest.extensions.config.ts --maxWorkers=1
|
||||
run_step env CI=true pnpm exec vitest run --config vitest.unit.config.ts --maxWorkers=1
|
||||
|
||||
log_step "OPENCLAW_TEST_WORKERS=${OPENCLAW_TEST_WORKERS:-1} OPENCLAW_TEST_MAX_OLD_SPACE_SIZE_MB=${OPENCLAW_TEST_MAX_OLD_SPACE_SIZE_MB:-6144} pnpm test:planner"
|
||||
OPENCLAW_TEST_WORKERS="${OPENCLAW_TEST_WORKERS:-1}" \
|
||||
OPENCLAW_TEST_MAX_OLD_SPACE_SIZE_MB="${OPENCLAW_TEST_MAX_OLD_SPACE_SIZE_MB:-6144}" \
|
||||
pnpm test:planner
|
||||
log_step "OPENCLAW_VITEST_MAX_WORKERS=${OPENCLAW_VITEST_MAX_WORKERS:-1} NODE_OPTIONS=${NODE_OPTIONS:---max-old-space-size=6144} pnpm test"
|
||||
OPENCLAW_VITEST_MAX_WORKERS="${OPENCLAW_VITEST_MAX_WORKERS:-1}" \
|
||||
NODE_OPTIONS="${NODE_OPTIONS:---max-old-space-size=6144}" \
|
||||
pnpm test
|
||||
}
|
||||
|
||||
run_macos_ci_mirror() {
|
||||
|
||||
@@ -2,7 +2,8 @@ import { spawnSync } from "node:child_process";
|
||||
import fs from "node:fs";
|
||||
import { floatFlag, intFlag, parseFlagArgs, readEnvNumber, stringFlag } from "./lib/arg-utils.mjs";
|
||||
import { readJsonFile } from "./test-report-utils.mjs";
|
||||
import { cliStartupBenchManifestPath } from "./test-runner-manifest.mjs";
|
||||
|
||||
const CLI_STARTUP_BENCH_FIXTURE_PATH = "test/fixtures/cli-startup-bench.json";
|
||||
|
||||
function formatMs(value) {
|
||||
return `${value.toFixed(1)}ms`;
|
||||
@@ -42,7 +43,7 @@ if (process.argv.slice(2).includes("--help")) {
|
||||
const opts = parseFlagArgs(
|
||||
process.argv.slice(2),
|
||||
{
|
||||
baseline: cliStartupBenchManifestPath,
|
||||
baseline: CLI_STARTUP_BENCH_FIXTURE_PATH,
|
||||
report: "",
|
||||
entry: "openclaw.mjs",
|
||||
preset: "all",
|
||||
|
||||
@@ -9,13 +9,11 @@ import {
|
||||
BUNDLED_PLUGIN_PATH_PREFIX,
|
||||
BUNDLED_PLUGIN_ROOT_DIR,
|
||||
} from "./lib/bundled-plugin-paths.mjs";
|
||||
import { loadTestRunnerBehavior } from "./test-runner-manifest.mjs";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const repoRoot = path.resolve(__dirname, "..");
|
||||
const pnpm = "pnpm";
|
||||
const testRunnerBehavior = loadTestRunnerBehavior();
|
||||
|
||||
function runGit(args, options = {}) {
|
||||
return execFileSync("git", args, {
|
||||
@@ -236,42 +234,15 @@ export function resolveExtensionTestPlan(params = {}) {
|
||||
const testFiles = roots
|
||||
.flatMap((root) => collectTestFiles(path.join(repoRoot, root)))
|
||||
.map((filePath) => normalizeRelative(path.relative(repoRoot, filePath)));
|
||||
const { isolatedTestFiles, sharedTestFiles } = partitionExtensionTestFiles({ config, testFiles });
|
||||
|
||||
return {
|
||||
config,
|
||||
extensionDir: relativeExtensionDir,
|
||||
extensionId,
|
||||
isolatedTestFiles,
|
||||
roots,
|
||||
sharedTestFiles,
|
||||
testFiles,
|
||||
};
|
||||
}
|
||||
|
||||
export function partitionExtensionTestFiles(params) {
|
||||
const testFiles = params.testFiles.map((filePath) => normalizeRelative(filePath));
|
||||
let isolatedEntries = [];
|
||||
let isolatedPrefixes = [];
|
||||
|
||||
if (params.config === "vitest.channels.config.ts") {
|
||||
isolatedEntries = testRunnerBehavior.channels.isolated;
|
||||
isolatedPrefixes = testRunnerBehavior.channels.isolatedPrefixes;
|
||||
} else if (params.config === "vitest.extensions.config.ts") {
|
||||
isolatedEntries = testRunnerBehavior.extensions.isolated;
|
||||
}
|
||||
|
||||
const isolatedEntrySet = new Set(isolatedEntries.map((entry) => entry.file));
|
||||
const isolatedTestFiles = testFiles.filter(
|
||||
(file) =>
|
||||
isolatedEntrySet.has(file) || isolatedPrefixes.some((prefix) => file.startsWith(prefix)),
|
||||
);
|
||||
const isolatedTestFileSet = new Set(isolatedTestFiles);
|
||||
const sharedTestFiles = testFiles.filter((file) => !isolatedTestFileSet.has(file));
|
||||
|
||||
return { isolatedTestFiles, sharedTestFiles };
|
||||
}
|
||||
|
||||
async function runVitestBatch(params) {
|
||||
return await new Promise((resolve, reject) => {
|
||||
const child = spawn(
|
||||
@@ -412,8 +383,6 @@ async function run() {
|
||||
console.log(`config: ${plan.config}`);
|
||||
console.log(`roots: ${plan.roots.join(", ")}`);
|
||||
console.log(`tests: ${plan.testFiles.length}`);
|
||||
console.log(`shared: ${plan.sharedTestFiles.length}`);
|
||||
console.log(`isolated: ${plan.isolatedTestFiles.length}`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
@@ -425,36 +394,13 @@ async function run() {
|
||||
console.log(
|
||||
`[test-extension] Running ${plan.testFiles.length} test files for ${plan.extensionId} with ${plan.config}`,
|
||||
);
|
||||
|
||||
if (plan.sharedTestFiles.length > 0 && plan.isolatedTestFiles.length > 0) {
|
||||
console.log(
|
||||
`[test-extension] Split into ${plan.sharedTestFiles.length} shared and ${plan.isolatedTestFiles.length} isolated files`,
|
||||
);
|
||||
}
|
||||
|
||||
if (plan.sharedTestFiles.length > 0) {
|
||||
const sharedExitCode = await runVitestBatch({
|
||||
args: passthroughArgs,
|
||||
config: plan.config,
|
||||
env: process.env,
|
||||
files: plan.sharedTestFiles,
|
||||
});
|
||||
if (sharedExitCode !== 0) {
|
||||
process.exit(sharedExitCode);
|
||||
}
|
||||
}
|
||||
|
||||
if (plan.isolatedTestFiles.length > 0) {
|
||||
const isolatedExitCode = await runVitestBatch({
|
||||
args: passthroughArgs,
|
||||
config: plan.config,
|
||||
env: { ...process.env, OPENCLAW_TEST_ISOLATE: "1" },
|
||||
files: plan.isolatedTestFiles,
|
||||
});
|
||||
process.exit(isolatedExitCode);
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
const exitCode = await runVitestBatch({
|
||||
args: passthroughArgs,
|
||||
config: plan.config,
|
||||
env: process.env,
|
||||
files: plan.testFiles,
|
||||
});
|
||||
process.exit(exitCode);
|
||||
}
|
||||
|
||||
const entryHref = process.argv[1] ? pathToFileURL(path.resolve(process.argv[1])).href : "";
|
||||
|
||||
@@ -1,283 +0,0 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
const ESCAPE = String.fromCodePoint(27);
|
||||
const BELL = String.fromCodePoint(7);
|
||||
const ANSI_ESCAPE_PATTERN = new RegExp(
|
||||
// Strip CSI/OSC-style control sequences from Vitest output before parsing file lines.
|
||||
`${ESCAPE}(?:\\][^${BELL}]*(?:${BELL}|${ESCAPE}\\\\)|\\[[0-?]*[ -/]*[@-~]|[@-Z\\\\-_])`,
|
||||
"g",
|
||||
);
|
||||
const GITHUB_CLI_LOG_PREFIX_PATTERN =
|
||||
/^[^\t\r\n]+\t[^\t\r\n]+\t\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?Z\s+/u;
|
||||
const GITHUB_ACTIONS_LOG_PREFIX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d+)?Z\s+/u;
|
||||
|
||||
const COMPLETED_TEST_FILE_LINE_PATTERN =
|
||||
/(?<file>(?:src|extensions|test|ui)\/\S+?\.(?:live\.test|e2e\.test|test)\.ts)\s+\(.*\)\s+(?<duration>\d+(?:\.\d+)?)(?<unit>ms|s)\s*$/;
|
||||
const MEMORY_TRACE_SUMMARY_PATTERN =
|
||||
/^\[test-parallel\]\[mem\] summary (?<lane>\S+) files=(?<files>\d+) peak=(?<peak>[0-9]+(?:\.[0-9]+)?(?:GiB|MiB|KiB)) totalDelta=(?<totalDelta>[+-]?[0-9]+(?:\.[0-9]+)?(?:GiB|MiB|KiB)) peakAt=(?<peakAt>\S+) top=(?<top>.*)$/u;
|
||||
const MEMORY_TRACE_TOP_ENTRY_PATTERN =
|
||||
/^(?<file>(?:src|extensions|test|ui)\/\S+?\.(?:live\.test|e2e\.test|test)\.ts):(?<delta>[+-]?[0-9]+(?:\.[0-9]+)?(?:GiB|MiB|KiB))$/u;
|
||||
|
||||
const PS_COLUMNS = ["pid=", "ppid=", "rss=", "comm="];
|
||||
|
||||
function parseDurationMs(rawValue, unit) {
|
||||
const parsed = Number.parseFloat(rawValue);
|
||||
if (!Number.isFinite(parsed)) {
|
||||
return null;
|
||||
}
|
||||
return unit === "s" ? Math.round(parsed * 1000) : Math.round(parsed);
|
||||
}
|
||||
|
||||
export function parseMemoryValueKb(rawValue) {
|
||||
const match = rawValue.match(/^(?<sign>[+-]?)(?<value>\d+(?:\.\d+)?)(?<unit>GiB|MiB|KiB)$/u);
|
||||
if (!match?.groups) {
|
||||
return null;
|
||||
}
|
||||
const value = Number.parseFloat(match.groups.value);
|
||||
if (!Number.isFinite(value)) {
|
||||
return null;
|
||||
}
|
||||
const multiplier =
|
||||
match.groups.unit === "GiB" ? 1024 ** 2 : match.groups.unit === "MiB" ? 1024 : 1;
|
||||
const signed = Math.round(value * multiplier);
|
||||
return match.groups.sign === "-" ? -signed : signed;
|
||||
}
|
||||
|
||||
function stripAnsi(text) {
|
||||
return text.replaceAll(ANSI_ESCAPE_PATTERN, "");
|
||||
}
|
||||
|
||||
function normalizeLogLine(line) {
|
||||
return line
|
||||
.replace(GITHUB_CLI_LOG_PREFIX_PATTERN, "")
|
||||
.replace(GITHUB_ACTIONS_LOG_PREFIX_PATTERN, "");
|
||||
}
|
||||
|
||||
export function parseCompletedTestFileLines(text) {
|
||||
return stripAnsi(text)
|
||||
.split(/\r?\n/u)
|
||||
.map((line) => normalizeLogLine(line))
|
||||
.map((line) => {
|
||||
const match = line.match(COMPLETED_TEST_FILE_LINE_PATTERN);
|
||||
if (!match?.groups) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
file: match.groups.file,
|
||||
durationMs: parseDurationMs(match.groups.duration, match.groups.unit),
|
||||
};
|
||||
})
|
||||
.filter((entry) => entry !== null);
|
||||
}
|
||||
|
||||
export function parseMemoryTraceSummaryLines(text) {
|
||||
return stripAnsi(text)
|
||||
.split(/\r?\n/u)
|
||||
.map((line) => normalizeLogLine(line))
|
||||
.map((line) => {
|
||||
const match = line.match(MEMORY_TRACE_SUMMARY_PATTERN);
|
||||
if (!match?.groups) {
|
||||
return null;
|
||||
}
|
||||
const peakRssKb = parseMemoryValueKb(match.groups.peak);
|
||||
const totalDeltaKb = parseMemoryValueKb(match.groups.totalDelta);
|
||||
const fileCount = Number.parseInt(match.groups.files, 10);
|
||||
if (!Number.isInteger(fileCount) || peakRssKb === null || totalDeltaKb === null) {
|
||||
return null;
|
||||
}
|
||||
const top =
|
||||
match.groups.top === "none"
|
||||
? []
|
||||
: match.groups.top
|
||||
.split(/,\s+/u)
|
||||
.map((entry) => {
|
||||
const topMatch = entry.match(MEMORY_TRACE_TOP_ENTRY_PATTERN);
|
||||
if (!topMatch?.groups) {
|
||||
return null;
|
||||
}
|
||||
const deltaKb = parseMemoryValueKb(topMatch.groups.delta);
|
||||
if (deltaKb === null) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
file: topMatch.groups.file,
|
||||
deltaKb,
|
||||
};
|
||||
})
|
||||
.filter((entry) => entry !== null);
|
||||
return {
|
||||
lane: match.groups.lane,
|
||||
files: fileCount,
|
||||
peakRssKb,
|
||||
totalDeltaKb,
|
||||
peakAt: match.groups.peakAt,
|
||||
top,
|
||||
};
|
||||
})
|
||||
.filter((entry) => entry !== null);
|
||||
}
|
||||
|
||||
export function getProcessTreeRecords(rootPid) {
|
||||
if (!Number.isInteger(rootPid) || rootPid <= 0 || process.platform === "win32") {
|
||||
return null;
|
||||
}
|
||||
|
||||
const result = spawnSync("ps", ["-axo", PS_COLUMNS.join(",")], {
|
||||
encoding: "utf8",
|
||||
});
|
||||
if (result.status !== 0 || result.error) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const childPidsByParent = new Map();
|
||||
const recordsByPid = new Map();
|
||||
for (const line of result.stdout.split(/\r?\n/u)) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed) {
|
||||
continue;
|
||||
}
|
||||
const [pidRaw, parentRaw, rssRaw, commandRaw] = trimmed.split(/\s+/u, 4);
|
||||
const pid = Number.parseInt(pidRaw ?? "", 10);
|
||||
const parentPid = Number.parseInt(parentRaw ?? "", 10);
|
||||
const rssKb = Number.parseInt(rssRaw ?? "", 10);
|
||||
if (!Number.isInteger(pid) || !Number.isInteger(parentPid) || !Number.isInteger(rssKb)) {
|
||||
continue;
|
||||
}
|
||||
const siblings = childPidsByParent.get(parentPid) ?? [];
|
||||
siblings.push(pid);
|
||||
childPidsByParent.set(parentPid, siblings);
|
||||
recordsByPid.set(pid, {
|
||||
pid,
|
||||
parentPid,
|
||||
rssKb,
|
||||
command: commandRaw ?? "",
|
||||
});
|
||||
}
|
||||
|
||||
if (!recordsByPid.has(rootPid)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const queue = [rootPid];
|
||||
const visited = new Set();
|
||||
const records = [];
|
||||
while (queue.length > 0) {
|
||||
const pid = queue.shift();
|
||||
if (pid === undefined || visited.has(pid)) {
|
||||
continue;
|
||||
}
|
||||
visited.add(pid);
|
||||
const record = recordsByPid.get(pid);
|
||||
if (record) {
|
||||
records.push(record);
|
||||
}
|
||||
for (const childPid of childPidsByParent.get(pid) ?? []) {
|
||||
if (!visited.has(childPid)) {
|
||||
queue.push(childPid);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return records;
|
||||
}
|
||||
|
||||
export function sampleProcessTreeRssKb(rootPid) {
|
||||
const records = getProcessTreeRecords(rootPid);
|
||||
if (!records) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let rssKb = 0;
|
||||
let processCount = 0;
|
||||
for (const record of records) {
|
||||
rssKb += record.rssKb;
|
||||
processCount += 1;
|
||||
}
|
||||
|
||||
return { rssKb, processCount };
|
||||
}
|
||||
|
||||
const REPORT_FILE_PATTERN =
|
||||
/^report\.(?<date>\d+)\.(?<time>\d+)\.(?<pid>\d+)\.0\.(?<sequence>\d+)\.json$/u;
|
||||
|
||||
function readDiagnosticReport(reportPath) {
|
||||
try {
|
||||
const raw = fs.readFileSync(reportPath, "utf8");
|
||||
const parsed = JSON.parse(raw);
|
||||
const rssBytes = parsed?.resourceUsage?.rss;
|
||||
const usedHeapBytes = parsed?.javascriptHeap?.usedMemory;
|
||||
const externalBytes = parsed?.javascriptHeap?.externalMemory;
|
||||
if (
|
||||
!Number.isFinite(rssBytes) ||
|
||||
!Number.isFinite(usedHeapBytes) ||
|
||||
!Number.isFinite(externalBytes)
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
rssKb: Math.round(rssBytes / 1024),
|
||||
usedHeapKb: Math.round(usedHeapBytes / 1024),
|
||||
externalKb: Math.round(externalBytes / 1024),
|
||||
};
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function summarizeDiagnosticReports(reportDir) {
|
||||
if (typeof reportDir !== "string" || reportDir.trim() === "") {
|
||||
return [];
|
||||
}
|
||||
let entries;
|
||||
try {
|
||||
entries = fs.readdirSync(reportDir, { withFileTypes: true });
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
|
||||
const reportsByPid = new Map();
|
||||
for (const entry of entries) {
|
||||
if (!entry.isFile()) {
|
||||
continue;
|
||||
}
|
||||
const match = entry.name.match(REPORT_FILE_PATTERN);
|
||||
if (!match?.groups) {
|
||||
continue;
|
||||
}
|
||||
const pid = Number.parseInt(match.groups.pid, 10);
|
||||
const sequence = Number.parseInt(match.groups.sequence, 10);
|
||||
if (!Number.isInteger(pid) || !Number.isInteger(sequence)) {
|
||||
continue;
|
||||
}
|
||||
const reportPath = path.join(reportDir, entry.name);
|
||||
const report = readDiagnosticReport(reportPath);
|
||||
if (!report) {
|
||||
continue;
|
||||
}
|
||||
const bucket = reportsByPid.get(pid) ?? [];
|
||||
bucket.push({ pid, sequence, ...report });
|
||||
reportsByPid.set(pid, bucket);
|
||||
}
|
||||
|
||||
return [...reportsByPid.entries()]
|
||||
.map(([pid, reports]) => {
|
||||
const ordered = reports.toSorted((left, right) => left.sequence - right.sequence);
|
||||
const first = ordered[0];
|
||||
const last = ordered.at(-1);
|
||||
if (!first || !last) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
pid,
|
||||
first,
|
||||
last,
|
||||
rssDeltaKb: last.rssKb - first.rssKb,
|
||||
usedHeapDeltaKb: last.usedHeapKb - first.usedHeapKb,
|
||||
externalDeltaKb: last.externalKb - first.externalKb,
|
||||
};
|
||||
})
|
||||
.filter((entry) => entry !== null)
|
||||
.toSorted((left, right) => right.rssDeltaKb - left.rssDeltaKb);
|
||||
}
|
||||
@@ -1,53 +0,0 @@
|
||||
const DEFAULT_OUTPUT_CAPTURE_LIMIT = 200_000;
|
||||
|
||||
const fatalOutputPatterns = [
|
||||
/FATAL ERROR:/i,
|
||||
/JavaScript heap out of memory/i,
|
||||
/node::OOMErrorHandler/i,
|
||||
/ERR_WORKER_OUT_OF_MEMORY/i,
|
||||
];
|
||||
|
||||
export function appendCapturedOutput(current, chunk, limit = DEFAULT_OUTPUT_CAPTURE_LIMIT) {
|
||||
if (!chunk) {
|
||||
return current;
|
||||
}
|
||||
const next = `${current}${chunk}`;
|
||||
if (next.length <= limit) {
|
||||
return next;
|
||||
}
|
||||
return next.slice(-limit);
|
||||
}
|
||||
|
||||
export function hasFatalTestRunOutput(output) {
|
||||
return fatalOutputPatterns.some((pattern) => pattern.test(output));
|
||||
}
|
||||
|
||||
export function formatCapturedOutputTail(output, maxLines = 60, maxChars = 6000) {
|
||||
const trimmed = output.trim();
|
||||
if (!trimmed) {
|
||||
return "";
|
||||
}
|
||||
const lines = trimmed.split(/\r?\n/u);
|
||||
const tailLines = lines.slice(-maxLines);
|
||||
const tail = tailLines.join("\n");
|
||||
if (tail.length <= maxChars) {
|
||||
return tail;
|
||||
}
|
||||
return tail.slice(-maxChars);
|
||||
}
|
||||
|
||||
export function resolveTestRunExitCode({ code, signal, output, fatalSeen = false, childError }) {
|
||||
if (typeof code === "number" && code !== 0) {
|
||||
return code;
|
||||
}
|
||||
if (childError) {
|
||||
return 1;
|
||||
}
|
||||
if (signal) {
|
||||
return 1;
|
||||
}
|
||||
if (fatalSeen || hasFatalTestRunOutput(output)) {
|
||||
return 1;
|
||||
}
|
||||
return code ?? 0;
|
||||
}
|
||||
@@ -1,207 +0,0 @@
|
||||
import {
|
||||
createExecutionArtifacts,
|
||||
executePlan,
|
||||
formatExplanation,
|
||||
formatPlanOutput,
|
||||
} from "./test-planner/executor.mjs";
|
||||
import {
|
||||
buildCIExecutionManifest,
|
||||
buildExecutionPlan,
|
||||
explainExecutionTarget,
|
||||
} from "./test-planner/planner.mjs";
|
||||
|
||||
const parseCliArgs = (args) => {
|
||||
const wrapper = {
|
||||
ciManifest: false,
|
||||
failurePolicy: null,
|
||||
plan: false,
|
||||
explain: null,
|
||||
mode: null,
|
||||
profile: null,
|
||||
surfaces: [],
|
||||
files: [],
|
||||
passthroughArgs: [],
|
||||
showHelp: false,
|
||||
};
|
||||
let passthroughMode = false;
|
||||
for (let index = 0; index < args.length; index += 1) {
|
||||
const arg = args[index];
|
||||
if (arg === "--collect-failures") {
|
||||
wrapper.failurePolicy = "collect-all";
|
||||
continue;
|
||||
}
|
||||
if (arg === "--failure-policy") {
|
||||
const nextValue = args[index + 1] ?? "";
|
||||
if (nextValue === "fail-fast" || nextValue === "collect-all") {
|
||||
wrapper.failurePolicy = nextValue;
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
throw new Error(`Invalid --failure-policy value: ${String(nextValue || "<missing>")}`);
|
||||
}
|
||||
if (arg.startsWith("--failure-policy=")) {
|
||||
const value = arg.slice("--failure-policy=".length);
|
||||
if (value === "fail-fast" || value === "collect-all") {
|
||||
wrapper.failurePolicy = value;
|
||||
continue;
|
||||
}
|
||||
throw new Error(`Invalid --failure-policy value: ${String(value || "<missing>")}`);
|
||||
}
|
||||
if (passthroughMode) {
|
||||
wrapper.passthroughArgs.push(arg);
|
||||
continue;
|
||||
}
|
||||
if (arg === "--") {
|
||||
passthroughMode = true;
|
||||
continue;
|
||||
}
|
||||
if (arg === "--plan") {
|
||||
wrapper.plan = true;
|
||||
continue;
|
||||
}
|
||||
if (arg === "--ci-manifest") {
|
||||
wrapper.ciManifest = true;
|
||||
continue;
|
||||
}
|
||||
if (arg === "--help") {
|
||||
wrapper.showHelp = true;
|
||||
continue;
|
||||
}
|
||||
if (arg === "--mode") {
|
||||
const nextValue = args[index + 1] ?? null;
|
||||
if (nextValue === "ci" || nextValue === "local") {
|
||||
wrapper.mode = nextValue;
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (arg === "--profile") {
|
||||
const nextValue = args[index + 1] ?? "";
|
||||
if (!nextValue || nextValue === "--" || nextValue.startsWith("-")) {
|
||||
throw new Error(`Invalid --profile value: ${String(nextValue || "<missing>")}`);
|
||||
}
|
||||
wrapper.profile = nextValue;
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
if (arg === "--surface") {
|
||||
const nextValue = args[index + 1] ?? "";
|
||||
if (!nextValue || nextValue === "--" || nextValue.startsWith("-")) {
|
||||
throw new Error(`Invalid --surface value: ${String(nextValue || "<missing>")}`);
|
||||
}
|
||||
wrapper.surfaces.push(nextValue);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
if (arg === "--files") {
|
||||
const nextValue = args[index + 1] ?? "";
|
||||
if (!nextValue || nextValue === "--" || nextValue.startsWith("-")) {
|
||||
throw new Error(`Invalid --files value: ${String(nextValue || "<missing>")}`);
|
||||
}
|
||||
wrapper.files.push(nextValue);
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
if (arg === "--explain") {
|
||||
const nextValue = args[index + 1] ?? "";
|
||||
if (!nextValue || nextValue === "--" || nextValue.startsWith("-")) {
|
||||
throw new Error(`Invalid --explain value: ${String(nextValue || "<missing>")}`);
|
||||
}
|
||||
wrapper.explain = nextValue;
|
||||
index += 1;
|
||||
continue;
|
||||
}
|
||||
wrapper.passthroughArgs.push(arg);
|
||||
}
|
||||
return wrapper;
|
||||
};
|
||||
|
||||
const exitWithCleanup = (artifacts, code) => {
|
||||
artifacts?.cleanupTempArtifacts?.();
|
||||
process.exit(code);
|
||||
};
|
||||
|
||||
let rawCli;
|
||||
try {
|
||||
rawCli = parseCliArgs(process.argv.slice(2));
|
||||
} catch (error) {
|
||||
console.error(`[test-parallel] ${error instanceof Error ? error.message : String(error)}`);
|
||||
process.exit(2);
|
||||
}
|
||||
if (rawCli.showHelp) {
|
||||
console.log(
|
||||
[
|
||||
"Usage: node scripts/test-parallel.mjs [wrapper flags] [-- vitest args]",
|
||||
"",
|
||||
"Runs the planner-backed OpenClaw test wrapper.",
|
||||
"",
|
||||
"Wrapper flags:",
|
||||
" --plan Print the resolved execution plan and exit",
|
||||
" --ci-manifest Print the planner-backed CI execution manifest as JSON and exit",
|
||||
" --explain <file> Explain how a file is classified and run, then exit",
|
||||
" --surface <name> Select a surface: unit, bundled, extensions, channels, contracts, gateway",
|
||||
" --files <pattern> Add targeted files or path patterns (repeatable)",
|
||||
" --mode <ci|local> Override runtime mode",
|
||||
" --profile <name> Override execution intent: normal, max, serial",
|
||||
" --failure-policy <name> Override execution failure policy: fail-fast, collect-all",
|
||||
" --collect-failures Shortcut for --failure-policy collect-all",
|
||||
" --help Show this help text",
|
||||
"",
|
||||
"Examples:",
|
||||
" node scripts/test-parallel.mjs",
|
||||
" node scripts/test-parallel.mjs --plan --surface unit --surface extensions",
|
||||
" node scripts/test-parallel.mjs --explain src/auto-reply/reply/followup-runner.test.ts",
|
||||
" node scripts/test-parallel.mjs --collect-failures --surface unit",
|
||||
" node scripts/test-parallel.mjs --files src/foo.test.ts -- --reporter=dot",
|
||||
"",
|
||||
"Environment:",
|
||||
" OPENCLAW_TEST_LIST_LANES=1 Print the resolved plan before execution",
|
||||
" OPENCLAW_TEST_SHOW_POOL_DECISION=1 Include thread/fork pool decisions in diagnostics",
|
||||
].join("\n"),
|
||||
);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const request = {
|
||||
failurePolicy: rawCli.failurePolicy,
|
||||
mode: rawCli.mode,
|
||||
profile: rawCli.profile,
|
||||
surfaces: rawCli.surfaces,
|
||||
fileFilters: rawCli.files,
|
||||
passthroughArgs: rawCli.passthroughArgs,
|
||||
};
|
||||
|
||||
if (rawCli.explain) {
|
||||
const explanation = explainExecutionTarget(
|
||||
{ ...request, passthroughArgs: [], fileFilters: [rawCli.explain] },
|
||||
{ env: process.env },
|
||||
);
|
||||
console.log(formatExplanation(explanation));
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (rawCli.ciManifest) {
|
||||
const manifest = buildCIExecutionManifest(undefined, { env: process.env });
|
||||
console.log(`${JSON.stringify(manifest, null, 2)}\n`);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const artifacts = createExecutionArtifacts(process.env);
|
||||
let plan;
|
||||
try {
|
||||
plan = buildExecutionPlan(request, {
|
||||
env: process.env,
|
||||
writeTempJsonArtifact: artifacts.writeTempJsonArtifact,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`[test-parallel] ${error instanceof Error ? error.message : String(error)}`);
|
||||
exitWithCleanup(artifacts, 2);
|
||||
}
|
||||
|
||||
if (process.env.OPENCLAW_TEST_LIST_LANES === "1" || rawCli.plan) {
|
||||
console.log(formatPlanOutput(plan));
|
||||
exitWithCleanup(artifacts, 0);
|
||||
}
|
||||
|
||||
const result = await executePlan(plan, { env: process.env, artifacts });
|
||||
process.exit(typeof result === "number" ? result : result.exitCode);
|
||||
@@ -1,225 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { channelTestPrefixes, isChannelSurfaceTestFile } from "../../vitest.channel-paths.mjs";
|
||||
import {
|
||||
isBoundaryTestFile,
|
||||
isBundledPluginDependentUnitTestFile,
|
||||
isUnitConfigTestFile,
|
||||
} from "../../vitest.unit-paths.mjs";
|
||||
import {
|
||||
BUNDLED_PLUGIN_PATH_PREFIX,
|
||||
BUNDLED_PLUGIN_ROOT_DIR,
|
||||
} from "../lib/bundled-plugin-paths.mjs";
|
||||
import { dedupeFilesPreserveOrder, loadTestRunnerBehavior } from "../test-runner-manifest.mjs";
|
||||
|
||||
const baseConfigPrefixes = ["src/agents/", "src/auto-reply/", "src/commands/", "test/", "ui/"];
|
||||
const contractTestPrefixes = ["src/channels/plugins/contracts/", "src/plugins/contracts/"];
|
||||
|
||||
export const normalizeRepoPath = (value) => value.split(path.sep).join("/");
|
||||
|
||||
const toRepoRelativePath = (value) => {
|
||||
const relativePath = normalizeRepoPath(path.relative(process.cwd(), path.resolve(value)));
|
||||
return relativePath.startsWith("../") || relativePath === ".." ? null : relativePath;
|
||||
};
|
||||
|
||||
const walkTestFiles = (rootDir) => {
|
||||
if (!fs.existsSync(rootDir)) {
|
||||
return [];
|
||||
}
|
||||
const entries = fs.readdirSync(rootDir, { withFileTypes: true });
|
||||
const files = [];
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(rootDir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
files.push(...walkTestFiles(fullPath));
|
||||
continue;
|
||||
}
|
||||
if (!entry.isFile()) {
|
||||
continue;
|
||||
}
|
||||
if (
|
||||
fullPath.endsWith(".test.ts") ||
|
||||
fullPath.endsWith(".live.test.ts") ||
|
||||
fullPath.endsWith(".e2e.test.ts")
|
||||
) {
|
||||
files.push(normalizeRepoPath(fullPath));
|
||||
}
|
||||
}
|
||||
return files;
|
||||
};
|
||||
|
||||
export function loadTestCatalog() {
|
||||
const behaviorManifest = loadTestRunnerBehavior();
|
||||
const existingFiles = (entries) =>
|
||||
entries.map((entry) => entry.file).filter((file) => fs.existsSync(file));
|
||||
const existingUnitConfigFiles = (entries) => existingFiles(entries).filter(isUnitConfigTestFile);
|
||||
const baseThreadPinnedFiles = existingFiles(behaviorManifest.base?.threadPinned ?? []);
|
||||
const channelIsolatedManifestFiles = existingFiles(behaviorManifest.channels?.isolated ?? []);
|
||||
const channelIsolatedPrefixes = behaviorManifest.channels?.isolatedPrefixes ?? [];
|
||||
const extensionForkIsolatedFiles = existingFiles(behaviorManifest.extensions?.isolated ?? []);
|
||||
const unitForkIsolatedFiles = existingUnitConfigFiles(behaviorManifest.unit.isolated);
|
||||
const unitThreadPinnedFiles = existingUnitConfigFiles(behaviorManifest.unit.threadPinned);
|
||||
const unitBehaviorOverrideSet = new Set([...unitForkIsolatedFiles, ...unitThreadPinnedFiles]);
|
||||
const allKnownTestFiles = [
|
||||
...new Set([
|
||||
...walkTestFiles("src"),
|
||||
...walkTestFiles(BUNDLED_PLUGIN_ROOT_DIR),
|
||||
...walkTestFiles("packages"),
|
||||
...walkTestFiles("test"),
|
||||
...walkTestFiles(path.join("ui", "src", "ui")),
|
||||
]),
|
||||
];
|
||||
const channelIsolatedFiles = dedupeFilesPreserveOrder([
|
||||
...channelIsolatedManifestFiles,
|
||||
...allKnownTestFiles.filter((file) =>
|
||||
channelIsolatedPrefixes.some((prefix) => file.startsWith(prefix)),
|
||||
),
|
||||
]);
|
||||
const channelIsolatedFileSet = new Set(channelIsolatedFiles);
|
||||
const extensionForkIsolatedFileSet = new Set(extensionForkIsolatedFiles);
|
||||
const baseThreadPinnedFileSet = new Set(baseThreadPinnedFiles);
|
||||
const unitThreadPinnedFileSet = new Set(unitThreadPinnedFiles);
|
||||
const unitForkIsolatedFileSet = new Set(unitForkIsolatedFiles);
|
||||
|
||||
const classifyTestFile = (fileFilter, options = {}) => {
|
||||
const normalizedFile = normalizeRepoPath(fileFilter);
|
||||
const reasons = [];
|
||||
const isolated =
|
||||
options.unitMemoryIsolatedFiles?.includes(normalizedFile) ||
|
||||
options.extensionMemoryIsolatedFiles?.includes(normalizedFile) ||
|
||||
options.extensionTimedIsolatedFiles?.includes(normalizedFile) ||
|
||||
options.channelTimedIsolatedFiles?.includes(normalizedFile) ||
|
||||
unitForkIsolatedFileSet.has(normalizedFile) ||
|
||||
extensionForkIsolatedFileSet.has(normalizedFile) ||
|
||||
channelIsolatedFileSet.has(normalizedFile);
|
||||
if (options.unitMemoryIsolatedFiles?.includes(normalizedFile)) {
|
||||
reasons.push("unit-memory-isolated");
|
||||
}
|
||||
if (options.extensionMemoryIsolatedFiles?.includes(normalizedFile)) {
|
||||
reasons.push("extensions-memory-heavy");
|
||||
}
|
||||
if (options.extensionTimedIsolatedFiles?.includes(normalizedFile)) {
|
||||
reasons.push("extensions-timed-heavy");
|
||||
}
|
||||
if (options.channelTimedIsolatedFiles?.includes(normalizedFile)) {
|
||||
reasons.push("channels-timed-heavy");
|
||||
}
|
||||
if (unitForkIsolatedFileSet.has(normalizedFile)) {
|
||||
reasons.push("unit-isolated-manifest");
|
||||
}
|
||||
if (extensionForkIsolatedFileSet.has(normalizedFile)) {
|
||||
reasons.push("extensions-isolated-manifest");
|
||||
}
|
||||
if (channelIsolatedFileSet.has(normalizedFile)) {
|
||||
reasons.push("channels-isolated-rule");
|
||||
}
|
||||
|
||||
let surface = "base";
|
||||
if (isBundledPluginDependentUnitTestFile(normalizedFile)) {
|
||||
surface = "bundled";
|
||||
} else if (isBoundaryTestFile(normalizedFile)) {
|
||||
surface = "unit";
|
||||
} else if (isUnitConfigTestFile(normalizedFile)) {
|
||||
surface = "unit";
|
||||
} else if (contractTestPrefixes.some((prefix) => normalizedFile.startsWith(prefix))) {
|
||||
surface = "contracts";
|
||||
} else if (normalizedFile.endsWith(".live.test.ts")) {
|
||||
surface = "live";
|
||||
} else if (normalizedFile.endsWith(".e2e.test.ts")) {
|
||||
surface = "e2e";
|
||||
} else if (isChannelSurfaceTestFile(normalizedFile)) {
|
||||
surface = "channels";
|
||||
} else if (normalizedFile.startsWith(BUNDLED_PLUGIN_PATH_PREFIX)) {
|
||||
surface = "extensions";
|
||||
} else if (normalizedFile.startsWith("src/gateway/")) {
|
||||
surface = "gateway";
|
||||
} else if (baseConfigPrefixes.some((prefix) => normalizedFile.startsWith(prefix))) {
|
||||
surface = "base";
|
||||
} else if (normalizedFile.startsWith("src/")) {
|
||||
surface = "unit";
|
||||
}
|
||||
if (surface === "unit") {
|
||||
reasons.push("unit-surface");
|
||||
} else if (surface !== "base") {
|
||||
reasons.push(`${surface}-surface`);
|
||||
} else {
|
||||
reasons.push("base-surface");
|
||||
}
|
||||
|
||||
const legacyBasePinned = baseThreadPinnedFileSet.has(normalizedFile);
|
||||
if (legacyBasePinned) {
|
||||
reasons.push("base-pinned-manifest");
|
||||
}
|
||||
if (unitThreadPinnedFileSet.has(normalizedFile)) {
|
||||
reasons.push("unit-pinned-manifest");
|
||||
}
|
||||
|
||||
return {
|
||||
file: normalizedFile,
|
||||
surface,
|
||||
isolated,
|
||||
legacyBasePinned,
|
||||
reasons,
|
||||
};
|
||||
};
|
||||
|
||||
const resolveFilterMatches = (fileFilter) => {
|
||||
const normalizedFilter = normalizeRepoPath(fileFilter);
|
||||
const repoRelativeFilter = toRepoRelativePath(fileFilter);
|
||||
if (fs.existsSync(fileFilter)) {
|
||||
const stats = fs.statSync(fileFilter);
|
||||
if (stats.isFile()) {
|
||||
if (repoRelativeFilter && allKnownTestFiles.includes(repoRelativeFilter)) {
|
||||
return [repoRelativeFilter];
|
||||
}
|
||||
throw new Error(`Explicit path ${fileFilter} is not a known test file.`);
|
||||
}
|
||||
if (stats.isDirectory()) {
|
||||
if (!repoRelativeFilter) {
|
||||
throw new Error(`Explicit path ${fileFilter} is outside the repo test roots.`);
|
||||
}
|
||||
const prefix = repoRelativeFilter.endsWith("/")
|
||||
? repoRelativeFilter
|
||||
: `${repoRelativeFilter}/`;
|
||||
const matches = allKnownTestFiles.filter((file) => file.startsWith(prefix));
|
||||
if (matches.length === 0) {
|
||||
throw new Error(`Explicit path ${fileFilter} does not contain known test files.`);
|
||||
}
|
||||
return matches;
|
||||
}
|
||||
}
|
||||
if (/[*?[\]{}]/.test(normalizedFilter)) {
|
||||
return allKnownTestFiles.filter((file) => path.matchesGlob(file, normalizedFilter));
|
||||
}
|
||||
return allKnownTestFiles.filter((file) => file.includes(normalizedFilter));
|
||||
};
|
||||
|
||||
return {
|
||||
allKnownTestFiles,
|
||||
allKnownUnitFiles: allKnownTestFiles.filter((file) => isUnitConfigTestFile(file)),
|
||||
baseThreadPinnedFiles,
|
||||
channelIsolatedFiles,
|
||||
channelIsolatedFileSet,
|
||||
channelTestPrefixes,
|
||||
extensionForkIsolatedFiles,
|
||||
extensionForkIsolatedFileSet,
|
||||
unitBehaviorOverrideSet,
|
||||
unitForkIsolatedFiles,
|
||||
unitThreadPinnedFiles,
|
||||
baseThreadPinnedFileSet,
|
||||
classifyTestFile,
|
||||
resolveFilterMatches,
|
||||
};
|
||||
}
|
||||
|
||||
export const testSurfaces = [
|
||||
"unit",
|
||||
"bundled",
|
||||
"extensions",
|
||||
"channels",
|
||||
"contracts",
|
||||
"gateway",
|
||||
"live",
|
||||
"e2e",
|
||||
"base",
|
||||
];
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,382 +0,0 @@
|
||||
import os from "node:os";
|
||||
|
||||
export const TEST_PROFILES = new Set(["normal", "serial", "max"]);
|
||||
|
||||
export const parsePositiveInt = (value) => {
|
||||
const parsed = Number.parseInt(value ?? "", 10);
|
||||
return Number.isFinite(parsed) && parsed > 0 ? parsed : null;
|
||||
};
|
||||
|
||||
export const resolveVitestMode = (env = process.env, explicitMode = null) => {
|
||||
if (explicitMode === "ci" || explicitMode === "local") {
|
||||
return explicitMode;
|
||||
}
|
||||
return env.CI === "true" || env.GITHUB_ACTIONS === "true" ? "ci" : "local";
|
||||
};
|
||||
|
||||
const clamp = (value, min, max) => Math.max(min, Math.min(max, value));
|
||||
|
||||
const parseProfile = (rawProfile) => {
|
||||
if (!rawProfile) {
|
||||
return "normal";
|
||||
}
|
||||
const normalized = rawProfile.trim().toLowerCase();
|
||||
if (normalized === "low") {
|
||||
return "serial";
|
||||
}
|
||||
if (!TEST_PROFILES.has(normalized)) {
|
||||
throw new Error(
|
||||
`Unsupported test profile "${normalized}". Supported profiles: normal, serial, max.`,
|
||||
);
|
||||
}
|
||||
return normalized;
|
||||
};
|
||||
|
||||
const resolveLoadRatio = (env, cpuCount, platform, loadAverage) => {
|
||||
const loadAwareDisabledRaw = env.OPENCLAW_TEST_LOAD_AWARE?.trim().toLowerCase();
|
||||
const loadAwareDisabled = loadAwareDisabledRaw === "0" || loadAwareDisabledRaw === "false";
|
||||
if (loadAwareDisabled || platform === "win32" || cpuCount <= 0) {
|
||||
return 0;
|
||||
}
|
||||
const source = Array.isArray(loadAverage) ? loadAverage : os.loadavg();
|
||||
return source.length > 0 ? source[0] / cpuCount : 0;
|
||||
};
|
||||
|
||||
const resolveMemoryBand = (memoryGiB) => {
|
||||
if (memoryGiB < 24) {
|
||||
return "constrained";
|
||||
}
|
||||
if (memoryGiB < 48) {
|
||||
return "moderate";
|
||||
}
|
||||
if (memoryGiB < 96) {
|
||||
return "mid";
|
||||
}
|
||||
return "high";
|
||||
};
|
||||
|
||||
const resolveLoadBand = (isLoadAware, loadRatio) => {
|
||||
if (!isLoadAware) {
|
||||
return "normal";
|
||||
}
|
||||
if (loadRatio < 0.5) {
|
||||
return "idle";
|
||||
}
|
||||
if (loadRatio < 0.9) {
|
||||
return "normal";
|
||||
}
|
||||
if (loadRatio < 1.1) {
|
||||
return "busy";
|
||||
}
|
||||
return "saturated";
|
||||
};
|
||||
|
||||
const scaleForLoad = (value, loadBand) => {
|
||||
if (value === null || value === undefined) {
|
||||
return value;
|
||||
}
|
||||
const scale = loadBand === "busy" ? 0.75 : loadBand === "saturated" ? 0.5 : 1;
|
||||
return Math.max(1, Math.floor(value * scale));
|
||||
};
|
||||
|
||||
const scaleConcurrencyForLoad = (value, loadBand) => {
|
||||
if (value === null || value === undefined) {
|
||||
return value;
|
||||
}
|
||||
const scale = loadBand === "busy" ? 0.8 : loadBand === "saturated" ? 0.5 : 1;
|
||||
return Math.max(1, Math.floor(value * scale));
|
||||
};
|
||||
|
||||
const scaleBatchTargetForLoad = (value, loadBand) => {
|
||||
if (value === null || value === undefined || value <= 0) {
|
||||
return value;
|
||||
}
|
||||
const scale = loadBand === "busy" ? 0.75 : loadBand === "saturated" ? 0.5 : 1;
|
||||
return Math.max(5_000, Math.floor(value * scale));
|
||||
};
|
||||
|
||||
const LOCAL_MEMORY_BUDGETS = {
|
||||
constrained: {
|
||||
vitestCap: 2,
|
||||
unitShared: 2,
|
||||
channelsShared: 2,
|
||||
unitIsolated: 1,
|
||||
unitHeavy: 1,
|
||||
extensions: 1,
|
||||
gateway: 1,
|
||||
topLevelNoIsolate: 4,
|
||||
topLevelIsolated: 2,
|
||||
deferred: 1,
|
||||
heavyFileLimit: 36,
|
||||
heavyLaneCount: 3,
|
||||
memoryHeavyFileLimit: 8,
|
||||
unitFastBatchTargetMs: 10_000,
|
||||
channelsBatchTargetMs: 0,
|
||||
extensionsBatchTargetMs: 60_000,
|
||||
},
|
||||
moderate: {
|
||||
vitestCap: 3,
|
||||
unitShared: 3,
|
||||
channelsShared: 3,
|
||||
unitIsolated: 1,
|
||||
unitHeavy: 1,
|
||||
extensions: 2,
|
||||
gateway: 1,
|
||||
topLevelNoIsolate: 6,
|
||||
topLevelIsolated: 2,
|
||||
deferred: 1,
|
||||
heavyFileLimit: 48,
|
||||
heavyLaneCount: 4,
|
||||
memoryHeavyFileLimit: 12,
|
||||
unitFastBatchTargetMs: 15_000,
|
||||
channelsBatchTargetMs: 0,
|
||||
extensionsBatchTargetMs: 120_000,
|
||||
},
|
||||
mid: {
|
||||
vitestCap: 4,
|
||||
unitShared: 4,
|
||||
channelsShared: 4,
|
||||
unitIsolated: 1,
|
||||
unitHeavy: 1,
|
||||
extensions: 3,
|
||||
gateway: 1,
|
||||
topLevelNoIsolate: 8,
|
||||
topLevelIsolated: 3,
|
||||
deferred: 2,
|
||||
heavyFileLimit: 60,
|
||||
heavyLaneCount: 4,
|
||||
memoryHeavyFileLimit: 16,
|
||||
unitFastBatchTargetMs: 0,
|
||||
channelsBatchTargetMs: 0,
|
||||
extensionsBatchTargetMs: 180_000,
|
||||
},
|
||||
high: {
|
||||
vitestCap: 6,
|
||||
unitShared: 6,
|
||||
channelsShared: 5,
|
||||
unitIsolated: 2,
|
||||
unitHeavy: 2,
|
||||
extensions: 5,
|
||||
gateway: 3,
|
||||
topLevelNoIsolate: 14,
|
||||
topLevelIsolated: 4,
|
||||
deferred: 8,
|
||||
heavyFileLimit: 80,
|
||||
heavyLaneCount: 5,
|
||||
memoryHeavyFileLimit: 16,
|
||||
unitFastBatchTargetMs: 45_000,
|
||||
channelsBatchTargetMs: 30_000,
|
||||
extensionsBatchTargetMs: 300_000,
|
||||
},
|
||||
};
|
||||
|
||||
const withIntentBudgetAdjustments = (budget, intentProfile, cpuCount) => {
|
||||
if (intentProfile === "serial") {
|
||||
return {
|
||||
...budget,
|
||||
vitestMaxWorkers: 1,
|
||||
unitSharedWorkers: 1,
|
||||
channelSharedWorkers: 1,
|
||||
unitIsolatedWorkers: 1,
|
||||
unitHeavyWorkers: 1,
|
||||
extensionWorkers: 1,
|
||||
gatewayWorkers: 1,
|
||||
topLevelParallelEnabled: false,
|
||||
topLevelParallelLimit: 1,
|
||||
topLevelParallelLimitNoIsolate: 1,
|
||||
topLevelParallelLimitIsolated: 1,
|
||||
deferredRunConcurrency: 1,
|
||||
};
|
||||
}
|
||||
|
||||
if (intentProfile === "max") {
|
||||
const maxTopLevelParallelLimit = clamp(
|
||||
Math.max(budget.topLevelParallelLimitNoIsolate ?? budget.topLevelParallelLimit ?? 1, 5),
|
||||
1,
|
||||
8,
|
||||
);
|
||||
return {
|
||||
...budget,
|
||||
vitestMaxWorkers: clamp(Math.max(budget.vitestMaxWorkers, Math.min(8, cpuCount)), 1, 16),
|
||||
unitSharedWorkers: clamp(Math.max(budget.unitSharedWorkers, Math.min(8, cpuCount)), 1, 16),
|
||||
channelSharedWorkers: clamp(
|
||||
Math.max(budget.channelSharedWorkers ?? budget.unitSharedWorkers, Math.min(6, cpuCount)),
|
||||
1,
|
||||
16,
|
||||
),
|
||||
unitIsolatedWorkers: clamp(Math.max(budget.unitIsolatedWorkers, Math.min(4, cpuCount)), 1, 4),
|
||||
unitHeavyWorkers: clamp(Math.max(budget.unitHeavyWorkers, Math.min(4, cpuCount)), 1, 4),
|
||||
extensionWorkers: clamp(Math.max(budget.extensionWorkers, Math.min(6, cpuCount)), 1, 6),
|
||||
gatewayWorkers: clamp(Math.max(budget.gatewayWorkers, Math.min(2, cpuCount)), 1, 6),
|
||||
topLevelParallelEnabled: true,
|
||||
topLevelParallelLimit: maxTopLevelParallelLimit,
|
||||
topLevelParallelLimitNoIsolate: maxTopLevelParallelLimit,
|
||||
topLevelParallelLimitIsolated: clamp(
|
||||
Math.max(budget.topLevelParallelLimitIsolated ?? budget.topLevelParallelLimit ?? 1, 4),
|
||||
1,
|
||||
8,
|
||||
),
|
||||
deferredRunConcurrency: Math.max(budget.deferredRunConcurrency ?? 1, 3),
|
||||
};
|
||||
}
|
||||
|
||||
return budget;
|
||||
};
|
||||
|
||||
export function resolveRuntimeCapabilities(env = process.env, options = {}) {
|
||||
const mode = resolveVitestMode(env, options.mode ?? null);
|
||||
const isCI = mode === "ci";
|
||||
const platform = options.platform ?? process.platform;
|
||||
const runnerOs = env.RUNNER_OS ?? "";
|
||||
const isMacOS = platform === "darwin" || runnerOs === "macOS";
|
||||
const isWindows = platform === "win32" || runnerOs === "Windows";
|
||||
const isWindowsCi = isCI && isWindows;
|
||||
const hostCpuCount =
|
||||
parsePositiveInt(env.OPENCLAW_TEST_HOST_CPU_COUNT) ?? options.cpuCount ?? os.cpus().length;
|
||||
const totalMemoryBytes = options.totalMemoryBytes ?? os.totalmem();
|
||||
const hostMemoryGiB =
|
||||
parsePositiveInt(env.OPENCLAW_TEST_HOST_MEMORY_GIB) ?? Math.floor(totalMemoryBytes / 1024 ** 3);
|
||||
const nodeMajor = Number.parseInt(
|
||||
(options.nodeVersion ?? process.versions.node).split(".")[0] ?? "",
|
||||
10,
|
||||
);
|
||||
const intentProfile = parseProfile(options.profile ?? env.OPENCLAW_TEST_PROFILE ?? "normal");
|
||||
const loadRatio = !isCI ? resolveLoadRatio(env, hostCpuCount, platform, options.loadAverage) : 0;
|
||||
const loadAware = !isCI && platform !== "win32";
|
||||
const memoryBand = resolveMemoryBand(hostMemoryGiB);
|
||||
const loadBand = resolveLoadBand(loadAware, loadRatio);
|
||||
const runtimeProfileName = isCI
|
||||
? isWindows
|
||||
? "ci-windows"
|
||||
: isMacOS
|
||||
? "ci-macos"
|
||||
: "ci-linux"
|
||||
: isWindows
|
||||
? "local-windows"
|
||||
: isMacOS
|
||||
? "local-darwin"
|
||||
: "local-linux";
|
||||
|
||||
return {
|
||||
mode,
|
||||
runtimeProfileName,
|
||||
isCI,
|
||||
isMacOS,
|
||||
isWindows,
|
||||
isWindowsCi,
|
||||
platform,
|
||||
hostCpuCount,
|
||||
hostMemoryGiB,
|
||||
nodeMajor,
|
||||
intentProfile,
|
||||
memoryBand,
|
||||
loadAware,
|
||||
loadRatio,
|
||||
loadBand,
|
||||
};
|
||||
}
|
||||
|
||||
export function resolveExecutionBudget(runtimeCapabilities) {
|
||||
const runtime = runtimeCapabilities;
|
||||
const cpuCount = clamp(runtime.hostCpuCount, 1, 16);
|
||||
|
||||
if (runtime.isCI) {
|
||||
const macCiWorkers = runtime.isMacOS ? 1 : null;
|
||||
return {
|
||||
vitestMaxWorkers: runtime.isWindows ? 2 : runtime.isMacOS ? 1 : 3,
|
||||
unitSharedWorkers: macCiWorkers,
|
||||
channelSharedWorkers: macCiWorkers,
|
||||
unitIsolatedWorkers: macCiWorkers,
|
||||
unitHeavyWorkers: macCiWorkers,
|
||||
extensionWorkers: macCiWorkers,
|
||||
gatewayWorkers: macCiWorkers,
|
||||
topLevelParallelEnabled: runtime.intentProfile !== "serial" && !runtime.isWindows,
|
||||
topLevelParallelLimit: runtime.isWindows ? 2 : 4,
|
||||
topLevelParallelLimitNoIsolate: runtime.isWindows ? 2 : 4,
|
||||
topLevelParallelLimitIsolated: runtime.isWindows ? 2 : 4,
|
||||
deferredRunConcurrency: null,
|
||||
heavyUnitFileLimit: 64,
|
||||
heavyUnitLaneCount: 4,
|
||||
memoryHeavyUnitFileLimit: 64,
|
||||
unitFastLaneCount: runtime.isWindows ? 1 : 3,
|
||||
unitFastBatchTargetMs: runtime.isWindows ? 0 : 45_000,
|
||||
channelsBatchTargetMs: runtime.isWindows ? 0 : 30_000,
|
||||
extensionsBatchTargetMs: runtime.isWindows ? 0 : 30_000,
|
||||
};
|
||||
}
|
||||
|
||||
const bandBudget = LOCAL_MEMORY_BUDGETS[runtime.memoryBand];
|
||||
const baseBudget = {
|
||||
vitestMaxWorkers: Math.min(cpuCount, bandBudget.vitestCap),
|
||||
unitSharedWorkers: Math.min(cpuCount, bandBudget.unitShared),
|
||||
channelSharedWorkers: Math.min(cpuCount, bandBudget.channelsShared ?? bandBudget.unitShared),
|
||||
unitIsolatedWorkers: Math.min(cpuCount, bandBudget.unitIsolated),
|
||||
unitHeavyWorkers: Math.min(cpuCount, bandBudget.unitHeavy),
|
||||
extensionWorkers: Math.min(cpuCount, bandBudget.extensions),
|
||||
gatewayWorkers: Math.min(cpuCount, bandBudget.gateway),
|
||||
topLevelParallelEnabled: !runtime.isWindows,
|
||||
topLevelParallelLimit: Math.min(cpuCount, bandBudget.topLevelIsolated),
|
||||
topLevelParallelLimitNoIsolate: Math.min(cpuCount, bandBudget.topLevelNoIsolate),
|
||||
topLevelParallelLimitIsolated: Math.min(cpuCount, bandBudget.topLevelIsolated),
|
||||
deferredRunConcurrency: bandBudget.deferred,
|
||||
heavyUnitFileLimit: bandBudget.heavyFileLimit,
|
||||
heavyUnitLaneCount: bandBudget.heavyLaneCount,
|
||||
memoryHeavyUnitFileLimit: bandBudget.memoryHeavyFileLimit,
|
||||
unitFastLaneCount: 1,
|
||||
unitFastBatchTargetMs: bandBudget.unitFastBatchTargetMs,
|
||||
channelsBatchTargetMs: bandBudget.channelsBatchTargetMs ?? 0,
|
||||
extensionsBatchTargetMs: bandBudget.extensionsBatchTargetMs ?? 300_000,
|
||||
};
|
||||
|
||||
const loadAdjustedBudget = {
|
||||
...baseBudget,
|
||||
vitestMaxWorkers: scaleForLoad(baseBudget.vitestMaxWorkers, runtime.loadBand),
|
||||
unitSharedWorkers: scaleForLoad(baseBudget.unitSharedWorkers, runtime.loadBand),
|
||||
channelSharedWorkers: scaleForLoad(baseBudget.channelSharedWorkers, runtime.loadBand),
|
||||
unitIsolatedWorkers: scaleForLoad(baseBudget.unitIsolatedWorkers, runtime.loadBand),
|
||||
unitHeavyWorkers: scaleForLoad(baseBudget.unitHeavyWorkers, runtime.loadBand),
|
||||
extensionWorkers: scaleForLoad(baseBudget.extensionWorkers, runtime.loadBand),
|
||||
gatewayWorkers: scaleForLoad(baseBudget.gatewayWorkers, runtime.loadBand),
|
||||
topLevelParallelLimit: scaleConcurrencyForLoad(
|
||||
baseBudget.topLevelParallelLimit,
|
||||
runtime.loadBand,
|
||||
),
|
||||
topLevelParallelLimitNoIsolate: scaleConcurrencyForLoad(
|
||||
baseBudget.topLevelParallelLimitNoIsolate,
|
||||
runtime.loadBand,
|
||||
),
|
||||
topLevelParallelLimitIsolated: scaleConcurrencyForLoad(
|
||||
baseBudget.topLevelParallelLimitIsolated,
|
||||
runtime.loadBand,
|
||||
),
|
||||
unitFastBatchTargetMs: scaleBatchTargetForLoad(
|
||||
baseBudget.unitFastBatchTargetMs,
|
||||
runtime.loadBand,
|
||||
),
|
||||
deferredRunConcurrency:
|
||||
runtime.loadBand === "busy"
|
||||
? Math.max(1, (baseBudget.deferredRunConcurrency ?? 1) - 1)
|
||||
: runtime.loadBand === "saturated"
|
||||
? 1
|
||||
: baseBudget.deferredRunConcurrency,
|
||||
};
|
||||
|
||||
return withIntentBudgetAdjustments(loadAdjustedBudget, runtime.intentProfile, cpuCount);
|
||||
}
|
||||
|
||||
export function resolveLocalVitestMaxWorkers(env = process.env, options = {}) {
|
||||
const explicit = parsePositiveInt(env.OPENCLAW_VITEST_MAX_WORKERS);
|
||||
if (explicit !== null) {
|
||||
return explicit;
|
||||
}
|
||||
|
||||
const runtimeCapabilities = resolveRuntimeCapabilities(env, {
|
||||
cpuCount: options.cpuCount,
|
||||
totalMemoryBytes: options.totalMemoryBytes,
|
||||
platform: options.platform,
|
||||
mode: "local",
|
||||
loadAverage: options.loadAverage,
|
||||
profile: options.profile,
|
||||
});
|
||||
return resolveExecutionBudget(runtimeCapabilities).vitestMaxWorkers;
|
||||
}
|
||||
@@ -1,353 +0,0 @@
|
||||
import { normalizeTrackedRepoPath, tryReadJsonFile } from "./test-report-utils.mjs";
|
||||
|
||||
export const behaviorManifestPath = "test/fixtures/test-parallel.behavior.json";
|
||||
export const cliStartupBenchManifestPath = "test/fixtures/cli-startup-bench.json";
|
||||
export const unitTimingManifestPath = "test/fixtures/test-timings.unit.json";
|
||||
export const channelTimingManifestPath = "test/fixtures/test-timings.channels.json";
|
||||
export const extensionTimingManifestPath = "test/fixtures/test-timings.extensions.json";
|
||||
export const unitMemoryHotspotManifestPath = "test/fixtures/test-memory-hotspots.unit.json";
|
||||
export const extensionMemoryHotspotManifestPath =
|
||||
"test/fixtures/test-memory-hotspots.extensions.json";
|
||||
|
||||
const defaultTimingManifest = {
|
||||
config: "vitest.unit.config.ts",
|
||||
defaultDurationMs: 250,
|
||||
files: {},
|
||||
};
|
||||
const defaultChannelTimingManifest = {
|
||||
config: "vitest.channels.config.ts",
|
||||
defaultDurationMs: 3000,
|
||||
files: {},
|
||||
};
|
||||
const defaultExtensionTimingManifest = {
|
||||
config: "vitest.extensions.config.ts",
|
||||
defaultDurationMs: 1000,
|
||||
files: {},
|
||||
};
|
||||
const defaultMemoryHotspotManifest = {
|
||||
config: "vitest.unit.config.ts",
|
||||
defaultMinDeltaKb: 256 * 1024,
|
||||
files: {},
|
||||
};
|
||||
const defaultExtensionMemoryHotspotManifest = {
|
||||
config: "vitest.extensions.config.ts",
|
||||
defaultMinDeltaKb: 1024 * 1024,
|
||||
files: {},
|
||||
};
|
||||
|
||||
const normalizeManifestEntries = (entries) =>
|
||||
entries
|
||||
.map((entry) =>
|
||||
typeof entry === "string"
|
||||
? { file: normalizeTrackedRepoPath(entry), reason: "" }
|
||||
: {
|
||||
file: normalizeTrackedRepoPath(String(entry?.file ?? "")),
|
||||
reason: typeof entry?.reason === "string" ? entry.reason : "",
|
||||
},
|
||||
)
|
||||
.filter((entry) => entry.file.length > 0);
|
||||
|
||||
const mergeManifestEntries = (section, keys) => {
|
||||
const merged = [];
|
||||
const seenFiles = new Set();
|
||||
for (const key of keys) {
|
||||
const normalizedEntries = normalizeManifestEntries(section?.[key] ?? []);
|
||||
for (const entry of normalizedEntries) {
|
||||
if (seenFiles.has(entry.file)) {
|
||||
continue;
|
||||
}
|
||||
seenFiles.add(entry.file);
|
||||
merged.push(entry);
|
||||
}
|
||||
}
|
||||
return merged;
|
||||
};
|
||||
|
||||
const mergeManifestStrings = (section, keys) => {
|
||||
const merged = [];
|
||||
const seen = new Set();
|
||||
for (const key of keys) {
|
||||
const values = Array.isArray(section?.[key]) ? section[key] : [];
|
||||
for (const value of values) {
|
||||
if (typeof value !== "string") {
|
||||
continue;
|
||||
}
|
||||
const normalizedValue = normalizeTrackedRepoPath(value);
|
||||
if (normalizedValue.length === 0 || seen.has(normalizedValue)) {
|
||||
continue;
|
||||
}
|
||||
seen.add(normalizedValue);
|
||||
merged.push(normalizedValue);
|
||||
}
|
||||
}
|
||||
return merged;
|
||||
};
|
||||
|
||||
export function loadTestRunnerBehavior() {
|
||||
const raw = tryReadJsonFile(behaviorManifestPath, {});
|
||||
const unit = raw.unit ?? {};
|
||||
const base = raw.base ?? {};
|
||||
const channels = raw.channels ?? {};
|
||||
const extensions = raw.extensions ?? {};
|
||||
return {
|
||||
base: {
|
||||
threadPinned: mergeManifestEntries(base, ["threadPinned", "threadSingleton"]),
|
||||
},
|
||||
channels: {
|
||||
isolated: mergeManifestEntries(channels, ["isolated"]),
|
||||
isolatedPrefixes: mergeManifestStrings(channels, ["isolatedPrefixes"]),
|
||||
},
|
||||
extensions: {
|
||||
isolated: mergeManifestEntries(extensions, ["isolated"]),
|
||||
},
|
||||
unit: {
|
||||
isolated: mergeManifestEntries(unit, ["isolated"]),
|
||||
threadPinned: mergeManifestEntries(unit, ["threadPinned", "threadSingleton"]),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
const loadTimingManifest = (manifestPath, fallbackManifest) => {
|
||||
const raw = tryReadJsonFile(manifestPath, fallbackManifest);
|
||||
const defaultDurationMs =
|
||||
Number.isFinite(raw.defaultDurationMs) && raw.defaultDurationMs > 0
|
||||
? raw.defaultDurationMs
|
||||
: fallbackManifest.defaultDurationMs;
|
||||
const files = Object.fromEntries(
|
||||
Object.entries(raw.files ?? {})
|
||||
.map(([file, value]) => {
|
||||
const normalizedFile = normalizeTrackedRepoPath(file);
|
||||
const durationMs =
|
||||
Number.isFinite(value?.durationMs) && value.durationMs >= 0 ? value.durationMs : null;
|
||||
const testCount =
|
||||
Number.isFinite(value?.testCount) && value.testCount >= 0 ? value.testCount : null;
|
||||
if (!durationMs) {
|
||||
return [normalizedFile, null];
|
||||
}
|
||||
return [
|
||||
normalizedFile,
|
||||
{
|
||||
durationMs,
|
||||
...(testCount !== null ? { testCount } : {}),
|
||||
},
|
||||
];
|
||||
})
|
||||
.filter(([, value]) => value !== null),
|
||||
);
|
||||
|
||||
return {
|
||||
config: typeof raw.config === "string" && raw.config ? raw.config : fallbackManifest.config,
|
||||
generatedAt: typeof raw.generatedAt === "string" ? raw.generatedAt : "",
|
||||
defaultDurationMs,
|
||||
files,
|
||||
};
|
||||
};
|
||||
|
||||
export function loadUnitTimingManifest() {
|
||||
return loadTimingManifest(unitTimingManifestPath, defaultTimingManifest);
|
||||
}
|
||||
|
||||
export function loadChannelTimingManifest() {
|
||||
return loadTimingManifest(channelTimingManifestPath, defaultChannelTimingManifest);
|
||||
}
|
||||
|
||||
export function loadExtensionTimingManifest() {
|
||||
return loadTimingManifest(extensionTimingManifestPath, defaultExtensionTimingManifest);
|
||||
}
|
||||
|
||||
const loadMemoryHotspotManifest = (manifestPath, fallbackManifest) => {
|
||||
const raw = tryReadJsonFile(manifestPath, fallbackManifest);
|
||||
const defaultMinDeltaKb =
|
||||
Number.isFinite(raw.defaultMinDeltaKb) && raw.defaultMinDeltaKb > 0
|
||||
? raw.defaultMinDeltaKb
|
||||
: fallbackManifest.defaultMinDeltaKb;
|
||||
const files = Object.fromEntries(
|
||||
Object.entries(raw.files ?? {})
|
||||
.map(([file, value]) => {
|
||||
const normalizedFile = normalizeTrackedRepoPath(file);
|
||||
const deltaKb =
|
||||
Number.isFinite(value?.deltaKb) && value.deltaKb > 0 ? Math.round(value.deltaKb) : null;
|
||||
const sources = Array.isArray(value?.sources)
|
||||
? value.sources.filter((source) => typeof source === "string" && source.length > 0)
|
||||
: [];
|
||||
if (deltaKb === null) {
|
||||
return [normalizedFile, null];
|
||||
}
|
||||
return [
|
||||
normalizedFile,
|
||||
{
|
||||
deltaKb,
|
||||
...(sources.length > 0 ? { sources } : {}),
|
||||
},
|
||||
];
|
||||
})
|
||||
.filter(([, value]) => value !== null),
|
||||
);
|
||||
|
||||
return {
|
||||
config: typeof raw.config === "string" && raw.config ? raw.config : fallbackManifest.config,
|
||||
generatedAt: typeof raw.generatedAt === "string" ? raw.generatedAt : "",
|
||||
defaultMinDeltaKb,
|
||||
files,
|
||||
};
|
||||
};
|
||||
|
||||
export function loadUnitMemoryHotspotManifest() {
|
||||
return loadMemoryHotspotManifest(unitMemoryHotspotManifestPath, defaultMemoryHotspotManifest);
|
||||
}
|
||||
|
||||
export function loadExtensionMemoryHotspotManifest() {
|
||||
return loadMemoryHotspotManifest(
|
||||
extensionMemoryHotspotManifestPath,
|
||||
defaultExtensionMemoryHotspotManifest,
|
||||
);
|
||||
}
|
||||
|
||||
export function selectTimedHeavyFiles({
|
||||
candidates,
|
||||
limit,
|
||||
minDurationMs,
|
||||
exclude = new Set(),
|
||||
timings,
|
||||
}) {
|
||||
return candidates
|
||||
.filter((file) => !exclude.has(file))
|
||||
.map((file) => ({
|
||||
file,
|
||||
durationMs: timings.files[file]?.durationMs ?? timings.defaultDurationMs,
|
||||
known: Boolean(timings.files[file]),
|
||||
}))
|
||||
.filter((entry) => entry.known && entry.durationMs >= minDurationMs)
|
||||
.toSorted((a, b) => b.durationMs - a.durationMs)
|
||||
.slice(0, limit)
|
||||
.map((entry) => entry.file);
|
||||
}
|
||||
|
||||
export function selectMemoryHeavyFiles({
|
||||
candidates,
|
||||
limit,
|
||||
minDeltaKb,
|
||||
exclude = new Set(),
|
||||
hotspots,
|
||||
}) {
|
||||
return candidates
|
||||
.filter((file) => !exclude.has(file))
|
||||
.map((file) => ({
|
||||
file,
|
||||
deltaKb: hotspots.files[file]?.deltaKb ?? 0,
|
||||
known: Boolean(hotspots.files[file]),
|
||||
}))
|
||||
.filter((entry) => entry.known && entry.deltaKb >= minDeltaKb)
|
||||
.toSorted((a, b) => b.deltaKb - a.deltaKb)
|
||||
.slice(0, limit)
|
||||
.map((entry) => entry.file);
|
||||
}
|
||||
|
||||
export function selectUnitHeavyFileGroups({
|
||||
candidates,
|
||||
behaviorOverrides = new Set(),
|
||||
timedLimit,
|
||||
timedMinDurationMs,
|
||||
memoryLimit,
|
||||
memoryMinDeltaKb,
|
||||
timings,
|
||||
hotspots,
|
||||
}) {
|
||||
const memoryHeavyFiles =
|
||||
memoryLimit > 0
|
||||
? selectMemoryHeavyFiles({
|
||||
candidates,
|
||||
limit: memoryLimit,
|
||||
minDeltaKb: memoryMinDeltaKb,
|
||||
exclude: behaviorOverrides,
|
||||
hotspots,
|
||||
})
|
||||
: [];
|
||||
const schedulingOverrides = new Set([...behaviorOverrides, ...memoryHeavyFiles]);
|
||||
const timedHeavyFiles =
|
||||
timedLimit > 0
|
||||
? selectTimedHeavyFiles({
|
||||
candidates,
|
||||
limit: timedLimit,
|
||||
minDurationMs: timedMinDurationMs,
|
||||
exclude: schedulingOverrides,
|
||||
timings,
|
||||
})
|
||||
: [];
|
||||
|
||||
return {
|
||||
memoryHeavyFiles,
|
||||
timedHeavyFiles,
|
||||
};
|
||||
}
|
||||
|
||||
export function packFilesByDuration(files, bucketCount, estimateDurationMs) {
|
||||
const normalizedBucketCount = Math.max(0, Math.floor(bucketCount));
|
||||
if (normalizedBucketCount <= 0 || files.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return packFilesIntoDurationBuckets(
|
||||
files,
|
||||
Array.from({ length: Math.min(normalizedBucketCount, files.length) }, () => ({
|
||||
totalMs: 0,
|
||||
files: [],
|
||||
})),
|
||||
estimateDurationMs,
|
||||
).filter((bucket) => bucket.length > 0);
|
||||
}
|
||||
|
||||
export function packFilesByDurationWithBaseLoads(
|
||||
files,
|
||||
bucketCount,
|
||||
estimateDurationMs,
|
||||
baseLoadsMs = [],
|
||||
) {
|
||||
const normalizedBucketCount = Math.max(0, Math.floor(bucketCount));
|
||||
if (normalizedBucketCount <= 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return packFilesIntoDurationBuckets(
|
||||
files,
|
||||
Array.from({ length: normalizedBucketCount }, (_, index) => ({
|
||||
totalMs:
|
||||
Number.isFinite(baseLoadsMs[index]) && baseLoadsMs[index] >= 0
|
||||
? Math.round(baseLoadsMs[index])
|
||||
: 0,
|
||||
files: [],
|
||||
})),
|
||||
estimateDurationMs,
|
||||
);
|
||||
}
|
||||
|
||||
function packFilesIntoDurationBuckets(files, buckets, estimateDurationMs) {
|
||||
const sortedFiles = [...files].toSorted((left, right) => {
|
||||
return estimateDurationMs(right) - estimateDurationMs(left);
|
||||
});
|
||||
|
||||
for (const file of sortedFiles) {
|
||||
const bucket = buckets.reduce((lightest, current) =>
|
||||
current.totalMs < lightest.totalMs ? current : lightest,
|
||||
);
|
||||
bucket.files.push(file);
|
||||
bucket.totalMs += estimateDurationMs(file);
|
||||
}
|
||||
|
||||
return buckets.map((bucket) => bucket.files);
|
||||
}
|
||||
|
||||
export function dedupeFilesPreserveOrder(files, exclude = new Set()) {
|
||||
const result = [];
|
||||
const seen = new Set();
|
||||
|
||||
for (const file of files) {
|
||||
if (exclude.has(file) || seen.has(file)) {
|
||||
continue;
|
||||
}
|
||||
seen.add(file);
|
||||
result.push(file);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import { parseFlagArgs, stringFlag, intFlag } from "./lib/arg-utils.mjs";
|
||||
import { cliStartupBenchManifestPath } from "./test-runner-manifest.mjs";
|
||||
|
||||
const CLI_STARTUP_BENCH_FIXTURE_PATH = "test/fixtures/cli-startup-bench.json";
|
||||
|
||||
if (process.argv.slice(2).includes("--help")) {
|
||||
console.log(
|
||||
@@ -28,7 +29,7 @@ if (process.argv.slice(2).includes("--help")) {
|
||||
const opts = parseFlagArgs(
|
||||
process.argv.slice(2),
|
||||
{
|
||||
out: cliStartupBenchManifestPath,
|
||||
out: CLI_STARTUP_BENCH_FIXTURE_PATH,
|
||||
entry: "openclaw.mjs",
|
||||
preset: "all",
|
||||
runs: 5,
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
import { execFileSync } from "node:child_process";
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
export function loadHotspotInputTexts({
|
||||
logPaths = [],
|
||||
ghJobs = [],
|
||||
ghRuns = [],
|
||||
ghRunJobMatches = [],
|
||||
readFileSyncImpl = fs.readFileSync,
|
||||
execFileSyncImpl = execFileSync,
|
||||
}) {
|
||||
const inputs = [];
|
||||
for (const logPath of logPaths) {
|
||||
inputs.push({
|
||||
sourceName: path.basename(logPath, path.extname(logPath)),
|
||||
text: readFileSyncImpl(logPath, "utf8"),
|
||||
});
|
||||
}
|
||||
const normalizedRunJobMatches = ghRunJobMatches
|
||||
.map((match) => (typeof match === "string" ? match.trim() : String(match ?? "").trim()))
|
||||
.filter((match) => match.length > 0)
|
||||
.map((match) => match.toLowerCase());
|
||||
const shouldIncludeRunJob = (jobName) => {
|
||||
if (normalizedRunJobMatches.length === 0) {
|
||||
return true;
|
||||
}
|
||||
if (typeof jobName !== "string") {
|
||||
return false;
|
||||
}
|
||||
const normalizedName = jobName.toLowerCase();
|
||||
return normalizedRunJobMatches.some((match) => normalizedName.includes(match));
|
||||
};
|
||||
// Deduplicate explicit and run-derived job ids so repeated inputs do not refetch the same log.
|
||||
const ghJobIds = new Set(
|
||||
ghJobs
|
||||
.map((jobId) => (typeof jobId === "string" ? jobId.trim() : String(jobId ?? "").trim()))
|
||||
.filter((jobId) => jobId.length > 0),
|
||||
);
|
||||
for (const ghRunId of ghRuns) {
|
||||
const normalizedRunId =
|
||||
typeof ghRunId === "string" ? ghRunId.trim() : String(ghRunId ?? "").trim();
|
||||
if (normalizedRunId.length === 0) {
|
||||
continue;
|
||||
}
|
||||
let rawJobs;
|
||||
try {
|
||||
rawJobs = execFileSyncImpl("gh", ["run", "view", normalizedRunId, "--json", "jobs"], {
|
||||
encoding: "utf8",
|
||||
maxBuffer: 8 * 1024 * 1024,
|
||||
});
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`[test-update-memory-hotspots] failed to fetch gh run ${normalizedRunId} jobs`,
|
||||
{ cause: error },
|
||||
);
|
||||
}
|
||||
let jobs = [];
|
||||
try {
|
||||
const parsed = JSON.parse(rawJobs);
|
||||
jobs = Array.isArray(parsed?.jobs) ? parsed.jobs : [];
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`[test-update-memory-hotspots] failed to parse gh run ${normalizedRunId} jobs json`,
|
||||
{ cause: error },
|
||||
);
|
||||
}
|
||||
for (const job of jobs) {
|
||||
if (!shouldIncludeRunJob(job?.name)) {
|
||||
continue;
|
||||
}
|
||||
const jobId = job?.databaseId;
|
||||
if (!Number.isFinite(jobId)) {
|
||||
continue;
|
||||
}
|
||||
ghJobIds.add(String(jobId));
|
||||
}
|
||||
}
|
||||
for (const ghJobId of ghJobIds) {
|
||||
inputs.push({
|
||||
sourceName: `gh-job-${String(ghJobId)}`,
|
||||
text: execFileSyncImpl("gh", ["run", "view", "--job", String(ghJobId), "--log"], {
|
||||
encoding: "utf8",
|
||||
maxBuffer: 64 * 1024 * 1024,
|
||||
}),
|
||||
});
|
||||
}
|
||||
return inputs;
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
export function matchesHotspotSummaryLane(lane, targetLane, lanePrefixes = []) {
|
||||
if (lane === targetLane) {
|
||||
return true;
|
||||
}
|
||||
return lanePrefixes.some((prefix) => prefix.length > 0 && lane.startsWith(prefix));
|
||||
}
|
||||
@@ -1,169 +0,0 @@
|
||||
import { intFlag, parseFlagArgs, stringFlag, stringListFlag } from "./lib/arg-utils.mjs";
|
||||
import { parseMemoryTraceSummaryLines } from "./test-parallel-memory.mjs";
|
||||
import { normalizeTrackedRepoPath, tryReadJsonFile, writeJsonFile } from "./test-report-utils.mjs";
|
||||
import { unitMemoryHotspotManifestPath } from "./test-runner-manifest.mjs";
|
||||
import { loadHotspotInputTexts } from "./test-update-memory-hotspots-sources.mjs";
|
||||
import { matchesHotspotSummaryLane } from "./test-update-memory-hotspots-utils.mjs";
|
||||
|
||||
if (process.argv.slice(2).includes("--help")) {
|
||||
console.log(
|
||||
[
|
||||
"Usage: node scripts/test-update-memory-hotspots.mjs [options]",
|
||||
"",
|
||||
"Generate or refresh the unit memory-hotspot manifest from test-parallel memory logs.",
|
||||
"",
|
||||
"Options:",
|
||||
" --config <path> Vitest config label stored in the output manifest",
|
||||
" --out <path> Output manifest path (default: test/fixtures/test-memory-hotspots.unit.json)",
|
||||
" --lane <name> Primary lane name to match (default: unit-fast)",
|
||||
" --lane-prefix <prefix> Additional lane prefixes to include (repeatable)",
|
||||
" --log <path> Memory trace log to ingest (repeatable, required)",
|
||||
" --gh-job <id> GitHub Actions job id to ingest via gh (repeatable)",
|
||||
" --gh-run <id> GitHub Actions run id to ingest via gh (repeatable)",
|
||||
" --gh-run-job-match <text> Filter gh-run jobs by name substring (repeatable)",
|
||||
" --min-delta-kb <kb> Minimum RSS delta to retain (default: 262144)",
|
||||
" --limit <count> Max hotspot entries to retain (default: 64)",
|
||||
" --help Show this help text",
|
||||
"",
|
||||
"Examples:",
|
||||
" node scripts/test-update-memory-hotspots.mjs --log /tmp/unit-fast.log",
|
||||
" node scripts/test-update-memory-hotspots.mjs --log a.log --log b.log --lane-prefix unit-fast-batch-",
|
||||
" node scripts/test-update-memory-hotspots.mjs --gh-job 69804189668 --gh-job 69804189672",
|
||||
" node scripts/test-update-memory-hotspots.mjs --gh-run 23933168654 --gh-run-job-match extensions",
|
||||
].join("\n"),
|
||||
);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function parseArgs(argv) {
|
||||
return parseFlagArgs(
|
||||
argv,
|
||||
{
|
||||
config: "vitest.unit.config.ts",
|
||||
out: unitMemoryHotspotManifestPath,
|
||||
lane: "unit-fast",
|
||||
lanePrefixes: [],
|
||||
logs: [],
|
||||
ghJobs: [],
|
||||
ghRuns: [],
|
||||
ghRunJobMatches: [],
|
||||
minDeltaKb: 256 * 1024,
|
||||
limit: 64,
|
||||
},
|
||||
[
|
||||
stringFlag("--config", "config"),
|
||||
stringFlag("--out", "out"),
|
||||
stringFlag("--lane", "lane"),
|
||||
stringListFlag("--lane-prefix", "lanePrefixes"),
|
||||
stringListFlag("--log", "logs"),
|
||||
stringListFlag("--gh-job", "ghJobs"),
|
||||
stringListFlag("--gh-run", "ghRuns"),
|
||||
stringListFlag("--gh-run-job-match", "ghRunJobMatches"),
|
||||
intFlag("--min-delta-kb", "minDeltaKb", { min: 1 }),
|
||||
intFlag("--limit", "limit", { min: 1 }),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
function mergeHotspotEntry(aggregated, file, value) {
|
||||
if (!(Number.isFinite(value?.deltaKb) && value.deltaKb > 0)) {
|
||||
return;
|
||||
}
|
||||
const normalizedFile = normalizeTrackedRepoPath(file);
|
||||
const normalizeSourceLabel = (source) => {
|
||||
const separator = source.lastIndexOf(":");
|
||||
if (separator === -1) {
|
||||
return source.endsWith(".log") ? source.slice(0, -4) : source;
|
||||
}
|
||||
const name = source.slice(0, separator);
|
||||
const lane = source.slice(separator + 1);
|
||||
return `${name.endsWith(".log") ? name.slice(0, -4) : name}:${lane}`;
|
||||
};
|
||||
const nextSources = Array.isArray(value?.sources)
|
||||
? value.sources
|
||||
.filter((source) => typeof source === "string" && source.length > 0)
|
||||
.map(normalizeSourceLabel)
|
||||
: [];
|
||||
const previous = aggregated.get(normalizedFile);
|
||||
if (!previous) {
|
||||
aggregated.set(normalizedFile, {
|
||||
deltaKb: Math.round(value.deltaKb),
|
||||
sources: [...new Set(nextSources)],
|
||||
});
|
||||
return;
|
||||
}
|
||||
previous.deltaKb = Math.max(previous.deltaKb, Math.round(value.deltaKb));
|
||||
for (const source of nextSources) {
|
||||
if (!previous.sources.includes(source)) {
|
||||
previous.sources.push(source);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const opts = parseArgs(process.argv.slice(2));
|
||||
|
||||
if (opts.logs.length === 0 && opts.ghJobs.length === 0 && opts.ghRuns.length === 0) {
|
||||
console.error(
|
||||
"[test-update-memory-hotspots] pass at least one --log <path>, --gh-job <id>, or --gh-run <id>.",
|
||||
);
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
const aggregated = new Map();
|
||||
const existing = tryReadJsonFile(opts.out, null);
|
||||
if (existing) {
|
||||
for (const [file, value] of Object.entries(existing.files ?? {})) {
|
||||
mergeHotspotEntry(aggregated, file, value);
|
||||
}
|
||||
}
|
||||
for (const input of loadHotspotInputTexts({
|
||||
logPaths: opts.logs,
|
||||
ghJobs: opts.ghJobs,
|
||||
ghRuns: opts.ghRuns,
|
||||
ghRunJobMatches: opts.ghRunJobMatches,
|
||||
})) {
|
||||
const text = input.text;
|
||||
const summaries = parseMemoryTraceSummaryLines(text).filter((summary) =>
|
||||
matchesHotspotSummaryLane(summary.lane, opts.lane, opts.lanePrefixes),
|
||||
);
|
||||
for (const summary of summaries) {
|
||||
for (const record of summary.top) {
|
||||
if (record.deltaKb < opts.minDeltaKb) {
|
||||
continue;
|
||||
}
|
||||
mergeHotspotEntry(aggregated, record.file, {
|
||||
deltaKb: record.deltaKb,
|
||||
sources: [`${input.sourceName}:${summary.lane}`],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const files = Object.fromEntries(
|
||||
[...aggregated.entries()]
|
||||
.toSorted((left, right) => right[1].deltaKb - left[1].deltaKb)
|
||||
.slice(0, opts.limit)
|
||||
.map(([file, value]) => [
|
||||
file,
|
||||
{
|
||||
deltaKb: value.deltaKb,
|
||||
sources: value.sources.toSorted(),
|
||||
},
|
||||
]),
|
||||
);
|
||||
|
||||
const output = {
|
||||
config: opts.config,
|
||||
generatedAt: new Date().toISOString(),
|
||||
defaultMinDeltaKb: opts.minDeltaKb,
|
||||
lane:
|
||||
opts.lanePrefixes.length === 0
|
||||
? opts.lane
|
||||
: [opts.lane, ...opts.lanePrefixes.map((prefix) => String(prefix).concat("*"))].join(", "),
|
||||
files,
|
||||
};
|
||||
|
||||
writeJsonFile(opts.out, output);
|
||||
console.log(
|
||||
`[test-update-memory-hotspots] wrote ${String(Object.keys(files).length)} hotspots to ${opts.out}`,
|
||||
);
|
||||
@@ -1,105 +0,0 @@
|
||||
import { intFlag, parseFlagArgs, stringFlag } from "./lib/arg-utils.mjs";
|
||||
import { loadVitestReportFromArgs } from "./lib/vitest-report-cli-utils.mjs";
|
||||
import {
|
||||
collectVitestFileDurations,
|
||||
normalizeTrackedRepoPath,
|
||||
writeJsonFile,
|
||||
} from "./test-report-utils.mjs";
|
||||
import { extensionTimingManifestPath, unitTimingManifestPath } from "./test-runner-manifest.mjs";
|
||||
|
||||
const resolveDefaultManifestSettings = (config) => {
|
||||
if (config === "vitest.extensions.config.ts") {
|
||||
return {
|
||||
out: extensionTimingManifestPath,
|
||||
defaultDurationMs: 1000,
|
||||
description: "extension",
|
||||
};
|
||||
}
|
||||
return {
|
||||
out: unitTimingManifestPath,
|
||||
defaultDurationMs: 250,
|
||||
description: "unit",
|
||||
};
|
||||
};
|
||||
|
||||
if (process.argv.slice(2).includes("--help")) {
|
||||
console.log(
|
||||
[
|
||||
"Usage: node scripts/test-update-timings.mjs [options]",
|
||||
"",
|
||||
"Generate or refresh a test timing manifest from a Vitest JSON report.",
|
||||
"",
|
||||
"Options:",
|
||||
" --config <path> Vitest config to run when no report is supplied",
|
||||
" --report <path> Reuse an existing Vitest JSON report",
|
||||
" --out <path> Output manifest path (default follows --config)",
|
||||
" --limit <count> Max number of file timings to retain (default: 256)",
|
||||
" --default-duration-ms <ms> Fallback duration for unknown files (default follows --config)",
|
||||
" --help Show this help text",
|
||||
"",
|
||||
"Examples:",
|
||||
" node scripts/test-update-timings.mjs",
|
||||
" node scripts/test-update-timings.mjs --config vitest.unit.config.ts --limit 128",
|
||||
" node scripts/test-update-timings.mjs --config vitest.extensions.config.ts",
|
||||
" node scripts/test-update-timings.mjs --report /tmp/vitest-report.json --out /tmp/timings.json",
|
||||
].join("\n"),
|
||||
);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
function parseArgs(argv) {
|
||||
const parsed = parseFlagArgs(
|
||||
argv,
|
||||
{
|
||||
config: "vitest.unit.config.ts",
|
||||
limit: 256,
|
||||
reportPath: "",
|
||||
out: "",
|
||||
defaultDurationMs: 0,
|
||||
},
|
||||
[
|
||||
stringFlag("--config", "config"),
|
||||
intFlag("--limit", "limit", { min: 1 }),
|
||||
stringFlag("--report", "reportPath"),
|
||||
stringFlag("--out", "out"),
|
||||
intFlag("--default-duration-ms", "defaultDurationMs", { min: 1 }),
|
||||
],
|
||||
);
|
||||
const defaults = resolveDefaultManifestSettings(parsed.config);
|
||||
return {
|
||||
...parsed,
|
||||
out: parsed.out || defaults.out,
|
||||
defaultDurationMs:
|
||||
Number.isFinite(parsed.defaultDurationMs) && parsed.defaultDurationMs > 0
|
||||
? parsed.defaultDurationMs
|
||||
: defaults.defaultDurationMs,
|
||||
description: defaults.description,
|
||||
};
|
||||
}
|
||||
|
||||
const opts = parseArgs(process.argv.slice(2));
|
||||
const report = loadVitestReportFromArgs(opts, "openclaw-vitest-timings");
|
||||
const files = Object.fromEntries(
|
||||
collectVitestFileDurations(report, normalizeTrackedRepoPath)
|
||||
.toSorted((a, b) => b.durationMs - a.durationMs)
|
||||
.slice(0, opts.limit)
|
||||
.map((entry) => [
|
||||
entry.file,
|
||||
{
|
||||
durationMs: entry.durationMs,
|
||||
testCount: entry.testCount,
|
||||
},
|
||||
]),
|
||||
);
|
||||
|
||||
const output = {
|
||||
config: opts.config,
|
||||
generatedAt: new Date().toISOString(),
|
||||
defaultDurationMs: opts.defaultDurationMs,
|
||||
files,
|
||||
};
|
||||
|
||||
writeJsonFile(opts.out, output);
|
||||
console.log(
|
||||
`[test-update-timings] wrote ${String(Object.keys(files).length)} ${opts.description} timings to ${opts.out}`,
|
||||
);
|
||||
@@ -1,3 +1,4 @@
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
import { defineConfig } from "vitest/config";
|
||||
@@ -6,15 +7,37 @@ import {
|
||||
BUNDLED_PLUGIN_TEST_GLOB,
|
||||
} from "./scripts/lib/bundled-plugin-paths.mjs";
|
||||
import { pluginSdkSubpaths } from "./scripts/lib/plugin-sdk-entries.mjs";
|
||||
import { resolveLocalVitestMaxWorkers } from "./scripts/test-planner/runtime-profile.mjs";
|
||||
import {
|
||||
behaviorManifestPath,
|
||||
unitMemoryHotspotManifestPath,
|
||||
unitTimingManifestPath,
|
||||
} from "./scripts/test-runner-manifest.mjs";
|
||||
import { loadVitestExperimentalConfig } from "./vitest.performance-config.ts";
|
||||
|
||||
export { resolveLocalVitestMaxWorkers };
|
||||
const clamp = (value, min, max) => Math.max(min, Math.min(max, value));
|
||||
|
||||
function parsePositiveInt(value) {
|
||||
const parsed = Number.parseInt(value ?? "", 10);
|
||||
return Number.isFinite(parsed) && parsed > 0 ? parsed : null;
|
||||
}
|
||||
|
||||
export function resolveLocalVitestMaxWorkers(
|
||||
env = process.env,
|
||||
system = {
|
||||
cpuCount: os.cpus().length || 1,
|
||||
totalMemoryBytes: os.totalmem(),
|
||||
},
|
||||
) {
|
||||
const override = parsePositiveInt(env.OPENCLAW_VITEST_MAX_WORKERS ?? env.OPENCLAW_TEST_WORKERS);
|
||||
if (override !== null) {
|
||||
return clamp(override, 1, 16);
|
||||
}
|
||||
|
||||
const cpuCount = Math.max(1, system.cpuCount || 1);
|
||||
const memoryGiB = (system.totalMemoryBytes || 0) / 1024 ** 3;
|
||||
let workers = cpuCount >= 16 ? 6 : cpuCount >= 10 ? 4 : cpuCount >= 6 ? 3 : 2;
|
||||
if (memoryGiB < 24) {
|
||||
workers = Math.min(workers, 2);
|
||||
} else if (memoryGiB < 48) {
|
||||
workers = Math.min(workers, 3);
|
||||
}
|
||||
return clamp(workers, 1, 16);
|
||||
}
|
||||
|
||||
const repoRoot = path.dirname(fileURLToPath(import.meta.url));
|
||||
const isCI = process.env.CI === "true" || process.env.GITHUB_ACTIONS === "true";
|
||||
@@ -55,12 +78,9 @@ export default defineConfig({
|
||||
"test/setup.ts",
|
||||
"test/setup.shared.ts",
|
||||
"test/setup.extensions.ts",
|
||||
"scripts/test-parallel.mjs",
|
||||
"scripts/test-planner/catalog.mjs",
|
||||
"scripts/test-planner/executor.mjs",
|
||||
"scripts/test-planner/planner.mjs",
|
||||
"scripts/test-planner/runtime-profile.mjs",
|
||||
"scripts/test-runner-manifest.mjs",
|
||||
"scripts/test-projects.mjs",
|
||||
"scripts/test-projects-lib.mjs",
|
||||
"scripts/ci-write-manifest-outputs.mjs",
|
||||
"vitest.channel-paths.mjs",
|
||||
"vitest.channels.config.ts",
|
||||
"vitest.bundled.config.ts",
|
||||
@@ -71,12 +91,10 @@ export default defineConfig({
|
||||
"vitest.gateway.config.ts",
|
||||
"vitest.live.config.ts",
|
||||
"vitest.performance-config.ts",
|
||||
"vitest.projects.config.ts",
|
||||
"vitest.scoped-config.ts",
|
||||
"vitest.unit.config.ts",
|
||||
"vitest.unit-paths.mjs",
|
||||
behaviorManifestPath,
|
||||
unitTimingManifestPath,
|
||||
unitMemoryHotspotManifestPath,
|
||||
],
|
||||
include: [
|
||||
"src/**/*.test.ts",
|
||||
|
||||
Reference in New Issue
Block a user