TUI: simplify stale response notice (#77120)

This commit is contained in:
Dave Morin
2026-05-03 22:50:24 -07:00
committed by GitHub
parent a9d77b3eb0
commit 1df6226d90
3 changed files with 10 additions and 12 deletions

View File

@@ -385,6 +385,7 @@ Docs: https://docs.openclaw.ai
- Plugins/update: keep externalized bundled npm bridge updates on the normal plugin security scanner path instead of granting source-linked official trust without artifact provenance. (#76765) Thanks @Lucenx9.
- Agents/reply context: label replied-to messages as the current user message target in model-visible metadata, so short replies are grounded to their explicit reply target instead of nearby chat history. (#76817) Thanks @obviyus.
- Doctor/plugins: install configured missing official plugins such as Discord and Brave during doctor/update repair, auto-enable repaired provider plugins, preserve config when a download fails, and stop auto-enable from inventing plugin entries when no manifest declares a configured channel. Fixes #76872. Thanks @jack-stormentswe.
- TUI: replace the stale-response watchdog notice with plain user-facing copy so stalled replies no longer surface backend or streaming internals.
## 2026.5.2

View File

@@ -894,6 +894,9 @@ describe("tui-event-handlers: handleAgentEvent", () => {
});
describe("tui-event-handlers: streaming watchdog", () => {
const expectedTimeoutMessage =
"This response is taking longer than expected. Send another message to continue.";
beforeEach(() => {
vi.useFakeTimers();
});
@@ -971,7 +974,7 @@ describe("tui-event-handlers: streaming watchdog", () => {
expect(setActivityStatus).toHaveBeenLastCalledWith("idle");
expect(state.activeChatRunId).toBeNull();
expect(chatLog.addSystem).toHaveBeenCalledWith(expect.stringContaining("streaming watchdog"));
expect(chatLog.addSystem).toHaveBeenCalledWith(expectedTimeoutMessage);
handlers.dispose?.();
});
@@ -1177,9 +1180,7 @@ describe("tui-event-handlers: streaming watchdog", () => {
expect(setActivityStatus).toHaveBeenLastCalledWith("idle");
expect(state.activeChatRunId).toBeNull();
expect(loadHistory).toHaveBeenCalledTimes(1);
expect(chatLog.addSystem).not.toHaveBeenCalledWith(
expect.stringContaining("streaming watchdog"),
);
expect(chatLog.addSystem).not.toHaveBeenCalledWith(expectedTimeoutMessage);
handlers.dispose?.();
});
@@ -1206,9 +1207,7 @@ describe("tui-event-handlers: streaming watchdog", () => {
const statusCalls = setActivityStatus.mock.calls.map((c) => c[0]);
expect(statusCalls.filter((s) => s === "idle").length).toBe(1);
expect(chatLog.addSystem).not.toHaveBeenCalledWith(
expect.stringContaining("streaming watchdog"),
);
expect(chatLog.addSystem).not.toHaveBeenCalledWith(expectedTimeoutMessage);
expect(state.activeChatRunId).toBeNull();
handlers.dispose?.();

View File

@@ -49,6 +49,8 @@ type EventHandlerContext = {
};
const DEFAULT_STREAMING_WATCHDOG_MS = 30_000;
const STREAMING_WATCHDOG_USER_MESSAGE =
"This response is taking longer than expected. Send another message to continue.";
export function createEventHandlers(context: EventHandlerContext) {
const {
@@ -129,11 +131,7 @@ export function createEventHandlers(context: EventHandlerContext) {
return;
}
flushPendingHistoryRefreshIfIdle();
chatLog.addSystem(
`streaming watchdog: no stream updates for ${Math.round(
streamingWatchdogMs / 1000,
)}s; resetting status. The backend may have dropped this run silently — send a new message to resync.`,
);
chatLog.addSystem(STREAMING_WATCHDOG_USER_MESSAGE);
tui.requestRender();
}, streamingWatchdogMs);
const maybeUnref = (streamingWatchdogTimer as { unref?: () => void }).unref;