Wizard: bound hatch TUI timeout (#76241)

* Wizard: bound hatch TUI timeout

* Scripts: avoid control regex in hatch stall repro

* Scripts: remove hatch stall repro harness

* Changelog: note hatch timeout fix

* Changelog: use PR reference for hatch timeout
This commit is contained in:
Josh Avant
2026-05-02 15:28:32 -05:00
committed by GitHub
parent 292bbcc292
commit 86cc29274e
5 changed files with 108 additions and 0 deletions

View File

@@ -19,6 +19,7 @@ Docs: https://docs.openclaw.ai
- Status: show the `openai-codex` OAuth profile for `openai/gpt-*` sessions running through the native Codex runtime instead of reporting auth as unknown. (#76197) Thanks @mbelinky.
- Gateway: avoid repeated plugin tool descriptor config hashing so large runtime configs do not block reply startup and trigger reconnect/timeouts. (#75944) Thanks @joshavant.
- Plugins/externalization: keep diagnostics ClawHub packages and persisted bundled-plugin relocation on npm-first install metadata for launch, and omit Discord from the core package now that its external package is published. Thanks @vincentkoc.
- Setup/TUI: bound the Terminal hatch bootstrap run so a stalled provider request times out instead of leaving first-run hatching stuck behind the watchdog. (#76241) Thanks @joshavant.
- Plugins/Codex: allow the official npm Codex plugin to install without the unsafe-install override, keep `/codex` command ownership, and cover the real npm Docker live path through managed `.openclaw/npm` dependencies plus uninstall failure proof.
- Gateway/status: add concrete service, config, listener-owner, and log collection next steps when gateway probes fail and Bonjour finds no local gateway, so frozen or port-conflict reports include the data needed for root-cause triage. Refs #49012. Thanks @vincentkoc.

View File

@@ -429,6 +429,35 @@ describe("EmbeddedTuiBackend", () => {
expect(capturedSignal?.aborted).toBe(true);
});
it("passes explicit chat timeouts to the agent command as seconds", async () => {
const { EmbeddedTuiBackend } = await import("./embedded-backend.js");
agentCommandFromIngressMock.mockResolvedValueOnce({
payloads: [{ text: "hello" }],
meta: {},
});
const backend = new EmbeddedTuiBackend();
backend.start();
try {
await backend.sendChat({
sessionKey: "agent:main:main",
message: "Wake up, my friend!",
runId: "run-explicit-timeout",
timeoutMs: 300_000,
});
await flushMicrotasks();
expect(agentCommandFromIngressMock).toHaveBeenCalledTimes(1);
expect(agentCommandFromIngressMock.mock.calls[0]?.[0]).toEqual(
expect.objectContaining({
timeout: "300",
}),
);
} finally {
backend.stop();
}
});
it("restores embedded mode and runtime loggers on stop", async () => {
const { EmbeddedTuiBackend } = await import("./embedded-backend.js");

View File

@@ -102,6 +102,35 @@ describe("launchTuiCli", () => {
);
});
it("passes initial message and timeout through to the relaunched TUI", async () => {
const child = createChildProcess();
spawnMock.mockImplementation((_cmd: string, _args: string[], _opts: SpawnOptions) => {
queueMicrotask(() => child.emit("exit", 0, null));
return child;
});
await launchTuiCli({
local: true,
deliver: false,
message: "Wake up, my friend!",
timeoutMs: 300_000,
});
expect(spawnMock).toHaveBeenCalledWith(
process.execPath,
[
"/repo/openclaw.mjs",
"tui",
"--local",
"--message",
"Wake up, my friend!",
"--timeout-ms",
"300000",
],
expect.objectContaining({ stdio: "inherit" }),
);
});
it("launches compiled CLI shapes without repeating the current command", async () => {
process.argv[1] = "setup";
const child = createChildProcess();

View File

@@ -1,3 +1,4 @@
import fs from "node:fs/promises";
import { beforeEach, describe, expect, it, vi } from "vitest";
import { createWizardPrompter as buildWizardPrompter } from "../../test/helpers/wizard-prompter.js";
import type { OpenClawConfig } from "../config/config.js";
@@ -346,6 +347,52 @@ describe("finalizeSetupWizard", () => {
local: true,
deliver: false,
message: undefined,
timeoutMs: 300_000,
});
});
it("bounds the bootstrap hatch TUI run timeout", async () => {
vi.spyOn(fs, "access").mockResolvedValueOnce(undefined);
const select = vi.fn(async (params: { message: string }) => {
if (params.message === "How do you want to hatch your bot?") {
return "tui";
}
return "later";
});
const prompter = buildWizardPrompter({
select: select as never,
confirm: vi.fn(async () => false),
});
await finalizeSetupWizard({
flow: "quickstart",
opts: {
acceptRisk: true,
authChoice: "skip",
installDaemon: false,
skipHealth: true,
skipUi: false,
},
baseConfig: {},
nextConfig: {},
workspaceDir: "/tmp",
settings: {
port: 18789,
bind: "loopback",
authMode: "token",
gatewayToken: undefined,
tailscaleMode: "off",
tailscaleResetOnExit: false,
},
prompter,
runtime: createRuntime(),
});
expect(launchTuiCli).toHaveBeenCalledWith({
local: true,
deliver: false,
message: "Wake up, my friend!",
timeoutMs: 300_000,
});
});

View File

@@ -52,6 +52,7 @@ type FinalizeOnboardingOptions = {
type OnboardSearchModule = typeof import("../commands/onboard-search.js");
let onboardSearchModulePromise: Promise<OnboardSearchModule> | undefined;
const HATCH_TUI_TIMEOUT_MS = 5 * 60 * 1000;
function loadOnboardSearchModule(): Promise<OnboardSearchModule> {
onboardSearchModulePromise ??= import("../commands/onboard-search.js");
@@ -458,6 +459,7 @@ export async function finalizeSetupWizard(
local: true,
deliver: false,
message: hasBootstrap ? "Wake up, my friend!" : undefined,
timeoutMs: HATCH_TUI_TIMEOUT_MS,
});
} finally {
restoreTerminalState("post-setup tui", { resumeStdinIfPaused: true });