fix(test): keep local Vitest checks serialized

This commit is contained in:
Vincent Koc
2026-04-25 03:05:28 -07:00
parent 5e0cca5e24
commit 814409a3b3
8 changed files with 144 additions and 72 deletions

View File

@@ -16,6 +16,9 @@ export const DEFAULT_LOCAL_FULL_SUITE_VITEST_WORKERS: number;
export const LARGE_LOCAL_FULL_SUITE_VITEST_WORKERS: number;
export function isCiLikeEnv(env?: Record<string, string | undefined>): boolean;
export function resolveLocalVitestEnv(
env?: Record<string, string | undefined>,
): Record<string, string | undefined>;
export function detectVitestHostInfo(): Required<VitestHostInfo>;
export function resolveLocalVitestMaxWorkers(
env?: Record<string, string | undefined>,

View File

@@ -24,6 +24,18 @@ export function isCiLikeEnv(env = process.env) {
return env.CI === "true" || env.GITHUB_ACTIONS === "true";
}
export function resolveLocalVitestEnv(env = process.env) {
const normalizedLocalCheck = env.OPENCLAW_LOCAL_CHECK?.trim().toLowerCase();
if (isCiLikeEnv(env) || (normalizedLocalCheck !== "0" && normalizedLocalCheck !== "false")) {
return env;
}
return {
...env,
OPENCLAW_LOCAL_CHECK: "1",
};
}
export function detectVitestHostInfo() {
return {
cpuCount:

View File

@@ -1,6 +1,7 @@
import { spawn } from "node:child_process";
import { createRequire } from "node:module";
import path from "node:path";
import { resolveLocalVitestEnv } from "./lib/vitest-local-scheduling.mjs";
import { spawnPnpmRunner } from "./pnpm-runner.mjs";
import {
forwardSignalToVitestProcessGroup,
@@ -47,15 +48,16 @@ export function resolveVitestSpawnParams(env = process.env, platform = process.p
}
export function resolveVitestSpawnEnv(env = process.env) {
if (!shouldApplyNativeWorkerBudget(env)) {
return env;
const nextEnv = resolveLocalVitestEnv(env);
if (!shouldApplyNativeWorkerBudget(nextEnv)) {
return nextEnv;
}
const nativeWorkerCount = String(resolveNativeWorkerCount(env));
const nativeWorkerCount = String(resolveNativeWorkerCount(nextEnv));
return {
...env,
RAYON_NUM_THREADS: env.RAYON_NUM_THREADS?.trim() || nativeWorkerCount,
TOKIO_WORKER_THREADS: env.TOKIO_WORKER_THREADS?.trim() || nativeWorkerCount,
...nextEnv,
RAYON_NUM_THREADS: nextEnv.RAYON_NUM_THREADS?.trim() || nativeWorkerCount,
TOKIO_WORKER_THREADS: nextEnv.TOKIO_WORKER_THREADS?.trim() || nativeWorkerCount,
};
}

View File

@@ -2,7 +2,11 @@ import fs from "node:fs";
import path from "node:path";
import { performance } from "node:perf_hooks";
import { acquireLocalHeavyCheckLockSync } from "./lib/local-heavy-check-runtime.mjs";
import { isCiLikeEnv, resolveLocalFullSuiteProfile } from "./lib/vitest-local-scheduling.mjs";
import {
isCiLikeEnv,
resolveLocalFullSuiteProfile,
resolveLocalVitestEnv,
} from "./lib/vitest-local-scheduling.mjs";
import {
resolveVitestCliEntry,
resolveVitestNodeArgs,
@@ -341,6 +345,7 @@ async function runVitestSpecsParallel(specs, concurrency) {
async function main() {
const args = process.argv.slice(2);
const baseEnv = resolveLocalVitestEnv(process.env);
const { targetArgs } = parseTestProjectsArgs(args, process.cwd());
const changedTargetArgs =
targetArgs.length === 0 ? resolveChangedTargetArgs(args, process.cwd()) : null;
@@ -349,7 +354,7 @@ async function main() {
? buildFullSuiteVitestRunPlans(args, process.cwd()).map((plan) => ({
config: plan.config,
continueOnFailure: true,
env: process.env,
env: baseEnv,
includeFilePath: null,
includePatterns: null,
pnpmArgs: [
@@ -365,12 +370,12 @@ async function main() {
watchMode: plan.watchMode,
}))
: createVitestRunSpecs(args, {
baseEnv: process.env,
baseEnv,
cwd: process.cwd(),
});
const runSpecs = applyDefaultMultiSpecVitestCachePaths(
applyDefaultVitestNoOutputTimeout(rawRunSpecs, { env: process.env }),
{ cwd: process.cwd(), env: process.env },
applyDefaultVitestNoOutputTimeout(rawRunSpecs, { env: baseEnv }),
{ cwd: process.cwd(), env: baseEnv },
);
if (runSpecs.length === 0) {
@@ -378,10 +383,10 @@ async function main() {
return;
}
releaseLock = shouldAcquireLocalHeavyCheckLock(runSpecs, process.env)
releaseLock = shouldAcquireLocalHeavyCheckLock(runSpecs, baseEnv)
? acquireLocalHeavyCheckLockSync({
cwd: process.cwd(),
env: process.env,
env: baseEnv,
toolName: "test",
})
: () => {};
@@ -391,28 +396,28 @@ async function main() {
changedTargetArgs === null &&
!runSpecs.some((spec) => spec.watchMode);
const isExplicitParallelMultiConfigRun =
Boolean(process.env.OPENCLAW_TEST_PROJECTS_PARALLEL) &&
Boolean(baseEnv.OPENCLAW_TEST_PROJECTS_PARALLEL) &&
runSpecs.length > 1 &&
!runSpecs.some((spec) => spec.watchMode);
const isParallelShardRun =
isFullSuiteRun || isFullExtensionsProjectRun(runSpecs) || isExplicitParallelMultiConfigRun;
if (isParallelShardRun) {
const concurrency = resolveParallelFullSuiteConcurrency(runSpecs.length, process.env);
const concurrency = resolveParallelFullSuiteConcurrency(runSpecs.length, baseEnv);
if (concurrency > 1) {
const localFullSuiteProfile = resolveLocalFullSuiteProfile(process.env);
const shardTimings = readShardTimings(process.cwd(), process.env);
const localFullSuiteProfile = resolveLocalFullSuiteProfile(baseEnv);
const shardTimings = readShardTimings(process.cwd(), baseEnv);
const parallelSpecs = applyDefaultParallelVitestWorkerBudget(
applyParallelVitestCachePaths(orderFullSuiteSpecsForParallelRun(runSpecs, shardTimings), {
cwd: process.cwd(),
env: process.env,
env: baseEnv,
}),
process.env,
baseEnv,
);
if (
!isCiLikeEnv(process.env) &&
!process.env.OPENCLAW_TEST_PROJECTS_PARALLEL &&
!process.env.OPENCLAW_VITEST_MAX_WORKERS &&
!process.env.OPENCLAW_TEST_WORKERS &&
!isCiLikeEnv(baseEnv) &&
!baseEnv.OPENCLAW_TEST_PROJECTS_PARALLEL &&
!baseEnv.OPENCLAW_VITEST_MAX_WORKERS &&
!baseEnv.OPENCLAW_TEST_WORKERS &&
localFullSuiteProfile.shardParallelism === 10 &&
localFullSuiteProfile.vitestMaxWorkers === 2
) {
@@ -425,7 +430,7 @@ async function main() {
parallelSpecs,
concurrency,
);
writeShardTimings(timings, process.cwd(), process.env);
writeShardTimings(timings, process.cwd(), baseEnv);
console.error(
`[test] completed ${parallelSpecs.length} Vitest shards; Vitest summaries above are per-shard, not aggregate totals.`,
);
@@ -455,7 +460,7 @@ async function main() {
timings.push(result.timing);
}
}
writeShardTimings(timings, process.cwd(), process.env);
writeShardTimings(timings, process.cwd(), baseEnv);
releaseLockOnce();
if (exitCode !== 0) {