test: harden Parallels fresh install smoke

This commit is contained in:
Peter Steinberger
2026-04-21 05:28:06 +01:00
parent b485ee7e36
commit 6d409a6182
12 changed files with 205 additions and 53 deletions

View File

@@ -39,17 +39,16 @@ const resolveGatewayPort = vi.hoisted(() => vi.fn((_cfg?: unknown, _env?: unknow
const findVerifiedGatewayListenerPidsOnPortSync = vi.fn<(port: number) => number[]>(() => []);
const signalVerifiedGatewayPidSync = vi.fn<(pid: number, signal: "SIGTERM" | "SIGUSR1") => void>();
const formatGatewayPidList = vi.fn<(pids: number[]) => string>((pids) => pids.join(", "));
const probeGateway =
vi.fn<
(opts: {
url: string;
auth?: { token?: string; password?: string };
timeoutMs: number;
}) => Promise<{
ok: boolean;
configSnapshot: unknown;
}>
>();
const probeGateway = vi.fn<
(opts: {
url: string;
auth?: { token?: string; password?: string };
timeoutMs: number;
}) => Promise<{
ok: boolean;
configSnapshot: unknown;
}>
>();
const isRestartEnabled = vi.fn<(config?: { commands?: unknown }) => boolean>(() => true);
const loadConfig = vi.hoisted(() => vi.fn(() => ({})));
const recoverInstalledLaunchAgent = vi.hoisted(() => vi.fn());
@@ -290,6 +289,27 @@ describe("runDaemonRestart health checks", () => {
expect(renderRestartDiagnostics).toHaveBeenCalledTimes(1);
});
it("waits longer for Windows gateway restart health", async () => {
vi.spyOn(process, "platform", "get").mockReturnValue("win32");
waitForGatewayHealthyRestart.mockResolvedValue({
healthy: true,
staleGatewayPids: [],
runtime: { status: "running" },
portUsage: { port: 18789, status: "busy", listeners: [], hints: [] },
});
await runDaemonRestart({ json: true });
expect(waitForGatewayHealthyRestart).toHaveBeenCalledWith(
expect.objectContaining({
attempts: 360,
delayMs: 500,
includeUnknownListenersAsStale: true,
port: 18789,
}),
);
});
it("fails restart with a stopped-free message when the waiter exits early", async () => {
const { formatCliCommand } = await import("../command-format.js");
const unhealthy: RestartHealthSnapshot = {

View File

@@ -33,6 +33,13 @@ import type { DaemonLifecycleOptions } from "./types.js";
const POST_RESTART_HEALTH_ATTEMPTS = DEFAULT_RESTART_HEALTH_ATTEMPTS;
const POST_RESTART_HEALTH_DELAY_MS = DEFAULT_RESTART_HEALTH_DELAY_MS;
const WINDOWS_POST_RESTART_HEALTH_TIMEOUT_MS = 180_000;
function postRestartHealthAttempts(): number {
return process.platform === "win32"
? Math.ceil(WINDOWS_POST_RESTART_HEALTH_TIMEOUT_MS / POST_RESTART_HEALTH_DELAY_MS)
: POST_RESTART_HEALTH_ATTEMPTS;
}
function formatRestartFailure(params: {
health: GatewayRestartSnapshot;
@@ -183,7 +190,8 @@ export async function runDaemonRestart(opts: DaemonLifecycleOptions = {}): Promi
const restartPort = await resolveGatewayLifecyclePort(service).catch(() =>
resolveGatewayPortFallback(),
);
const restartWaitMs = POST_RESTART_HEALTH_ATTEMPTS * POST_RESTART_HEALTH_DELAY_MS;
const restartHealthAttempts = postRestartHealthAttempts();
const restartWaitMs = restartHealthAttempts * POST_RESTART_HEALTH_DELAY_MS;
const restartWaitSeconds = Math.round(restartWaitMs / 1000);
return await runServiceRestart({
@@ -204,7 +212,7 @@ export async function runDaemonRestart(opts: DaemonLifecycleOptions = {}): Promi
if (restartedWithoutServiceManager) {
const health = await waitForGatewayHealthyListener({
port: restartPort,
attempts: POST_RESTART_HEALTH_ATTEMPTS,
attempts: restartHealthAttempts,
delayMs: POST_RESTART_HEALTH_DELAY_MS,
});
if (health.healthy) {
@@ -233,7 +241,7 @@ export async function runDaemonRestart(opts: DaemonLifecycleOptions = {}): Promi
let health = await waitForGatewayHealthyRestart({
service,
port: restartPort,
attempts: POST_RESTART_HEALTH_ATTEMPTS,
attempts: restartHealthAttempts,
delayMs: POST_RESTART_HEALTH_DELAY_MS,
includeUnknownListenersAsStale: process.platform === "win32",
});
@@ -254,7 +262,7 @@ export async function runDaemonRestart(opts: DaemonLifecycleOptions = {}): Promi
health = await waitForGatewayHealthyRestart({
service,
port: restartPort,
attempts: POST_RESTART_HEALTH_ATTEMPTS,
attempts: restartHealthAttempts,
delayMs: POST_RESTART_HEALTH_DELAY_MS,
includeUnknownListenersAsStale: process.platform === "win32",
});

View File

@@ -89,6 +89,7 @@ async function inspectAmbiguousOwnershipWithProbe(
}
async function waitForStoppedFreeGatewayRestart() {
const attempts = process.platform === "win32" ? 360 : 120;
const service = makeGatewayService({ status: "stopped" });
inspectPortUsage.mockResolvedValue({
port: 18789,
@@ -101,7 +102,7 @@ async function waitForStoppedFreeGatewayRestart() {
return waitForGatewayHealthyRestart({
service,
port: 18789,
attempts: 120,
attempts,
delayMs: 500,
});
}
@@ -292,9 +293,9 @@ describe("inspectGatewayRestart", () => {
runtime: { status: "stopped" },
portUsage: { status: "free" },
waitOutcome: "stopped-free",
elapsedMs: 27_500,
elapsedMs: 92_500,
});
expect(sleep).toHaveBeenCalledTimes(55);
expect(sleep).toHaveBeenCalledTimes(185);
});
it("annotates timeout waits when the health loop exhausts all attempts", async () => {

View File

@@ -20,7 +20,7 @@ export const DEFAULT_RESTART_HEALTH_ATTEMPTS = Math.ceil(
DEFAULT_RESTART_HEALTH_TIMEOUT_MS / DEFAULT_RESTART_HEALTH_DELAY_MS,
);
const STOPPED_FREE_EARLY_EXIT_GRACE_MS = 10_000;
const WINDOWS_STOPPED_FREE_EARLY_EXIT_GRACE_MS = 25_000;
const WINDOWS_STOPPED_FREE_EARLY_EXIT_GRACE_MS = 90_000;
export type GatewayRestartWaitOutcome = "healthy" | "stale-pids" | "stopped-free" | "timeout";