fix(tui): preserve pending sends and busy-state visibility (#59800)

* fix(tui): preserve pending messages across refreshes

* fix(tui): keep fallback runs visibly active

* fix(tui): expose full verbose mode and reclaim width

* refactor(tui): drop stale optimistic-send state

* test(tui): drop unused state binding

* docs(changelog): add tui beta note

* fix(tui): bound fallback wait and dedupe pending restore

* fix(tui): preserve queued sends and busy-state visibility

* chore(changelog): align tui pending-send note

* chore(changelog): refine tui release note
This commit is contained in:
Vincent Koc
2026-04-03 20:39:55 +09:00
committed by GitHub
parent 79da4a46b4
commit 51d6d7013f
10 changed files with 274 additions and 6 deletions

View File

@@ -140,6 +140,7 @@ Docs: https://docs.openclaw.ai
- Telegram/exec approvals: fall back to the origin session key for async approval followups and keep resume-failure status delivery sanitized so Telegram followups still land without leaking raw exec metadata. (#59351) Thanks @seonang.
- Node-host/exec approvals: bind `pnpm dlx` invocations through the approval planner's mutable-script path so the effective runtime command is resolved for approval instead of being left unbound. (#58374)
- Exec/node hosts: stop forwarding the gateway workspace cwd to remote node exec when no workdir was explicitly requested, so cross-platform node approvals fall back to the node default cwd instead of failing with `SYSTEM_RUN_DENIED`. (#58977) Thanks @Starhappysh.
- TUI/chat: keep pending local sends visible and reconciled across history reloads, make busy/error recovery clearer through fallback and terminal-error paths, and reclaim transcript width for long links and paths. (#59800) Thanks @vincentkoc.
- Exec approvals/channels: decouple initiating-surface approval availability from native delivery enablement so Telegram, Slack, and Discord still expose approvals when approvers exist and native target routing is configured separately. (#59776) Thanks @joelnishanth.
- Plugins/runtime: reuse compatible active registries for `web_search` and `web_fetch` provider snapshot resolution so repeated runtime reads do not re-import the same bundled plugin set on each agent message. Related #48380.
- Plugins/OpenAI: enable reference-image edits for `gpt-image-1` by routing edit calls to `/images/edits` with multipart image uploads, and update image-generation capability/docs metadata accordingly.

View File

@@ -7,7 +7,7 @@ export class AssistantMessageComponent extends Container {
constructor(text: string) {
super();
this.body = new HyperlinkMarkdown(text, 1, 0, markdownTheme, {
this.body = new HyperlinkMarkdown(text, 0, 0, markdownTheme, {
// Keep assistant body text in terminal default foreground so contrast
// follows the user's terminal theme (dark or light).
color: (line) => theme.assistantText(line),

View File

@@ -89,4 +89,61 @@ describe("ChatLog", () => {
expect(rendered).not.toContain("BTW: what is 17 * 19?");
expect(chatLog.hasVisibleBtw()).toBe(false);
});
it("preserves pending user messages across history rebuilds", () => {
const chatLog = new ChatLog(40);
chatLog.addPendingUser("run-1", "queued hello");
chatLog.clearAll({ preservePendingUsers: true });
chatLog.addSystem("session agent:main:main");
chatLog.restorePendingUsers();
const rendered = chatLog.render(120).join("\n");
expect(rendered).toContain("queued hello");
expect(chatLog.countPendingUsers()).toBe(1);
});
it("does not append the same pending component twice when it is already mounted", () => {
const chatLog = new ChatLog(40);
chatLog.addPendingUser("run-1", "queued hello");
chatLog.restorePendingUsers();
expect(chatLog.children.length).toBe(1);
expect(chatLog.render(120).join("\n")).toContain("queued hello");
});
it("stops counting a pending user message once the run is committed", () => {
const chatLog = new ChatLog(40);
chatLog.addPendingUser("run-1", "hello");
expect(chatLog.countPendingUsers()).toBe(1);
expect(chatLog.commitPendingUser("run-1")).toBe(true);
expect(chatLog.countPendingUsers()).toBe(0);
expect(chatLog.render(120).join("\n")).toContain("hello");
});
it("reconciles pending users against rebuilt history using timestamps", () => {
const chatLog = new ChatLog(40);
chatLog.addPendingUser("run-1", "queued hello", 2_000);
expect(
chatLog.reconcilePendingUsers([
{ text: "queued hello", timestamp: 2_100 },
{ text: "older", timestamp: 1_000 },
]),
).toEqual(["run-1"]);
expect(chatLog.countPendingUsers()).toBe(0);
});
it("does not hide a new repeated prompt when only older history matches", () => {
const chatLog = new ChatLog(40);
chatLog.addPendingUser("run-1", "continue", 5_000);
expect(chatLog.reconcilePendingUsers([{ text: "continue", timestamp: 4_000 }])).toEqual([]);
expect(chatLog.countPendingUsers()).toBe(1);
});
});

View File

@@ -10,6 +10,14 @@ export class ChatLog extends Container {
private readonly maxComponents: number;
private toolById = new Map<string, ToolExecutionComponent>();
private streamingRuns = new Map<string, AssistantMessageComponent>();
private pendingUsers = new Map<
string,
{
component: UserMessageComponent;
text: string;
createdAt: number;
}
>();
private btwMessage: BtwInlineMessage | null = null;
private toolsExpanded = false;
@@ -29,6 +37,11 @@ export class ChatLog extends Container {
this.streamingRuns.delete(runId);
}
}
for (const [runId, entry] of this.pendingUsers.entries()) {
if (entry.component === component) {
this.pendingUsers.delete(runId);
}
}
if (this.btwMessage === component) {
this.btwMessage = null;
}
@@ -50,11 +63,30 @@ export class ChatLog extends Container {
this.pruneOverflow();
}
clearAll() {
clearAll(opts?: { preservePendingUsers?: boolean }) {
this.clear();
this.toolById.clear();
this.streamingRuns.clear();
this.btwMessage = null;
if (!opts?.preservePendingUsers) {
this.pendingUsers.clear();
}
}
restorePendingUsers() {
for (const entry of this.pendingUsers.values()) {
if (this.children.includes(entry.component)) {
continue;
}
this.append(entry.component);
}
}
clearPendingUsers() {
for (const entry of this.pendingUsers.values()) {
this.removeChild(entry.component);
}
this.pendingUsers.clear();
}
private createSystemMessage(text: string): Container {
@@ -72,6 +104,77 @@ export class ChatLog extends Container {
this.append(new UserMessageComponent(text));
}
addPendingUser(runId: string, text: string, createdAt = Date.now()) {
const existing = this.pendingUsers.get(runId);
if (existing) {
existing.text = text;
existing.createdAt = createdAt;
existing.component.setText(text);
return existing.component;
}
const component = new UserMessageComponent(text);
this.pendingUsers.set(runId, { component, text, createdAt });
this.append(component);
return component;
}
commitPendingUser(runId: string) {
return this.pendingUsers.delete(runId);
}
dropPendingUser(runId: string) {
const existing = this.pendingUsers.get(runId);
if (!existing) {
return false;
}
this.removeChild(existing.component);
this.pendingUsers.delete(runId);
return true;
}
hasPendingUser(runId: string) {
return this.pendingUsers.has(runId);
}
reconcilePendingUsers(
historyUsers: Array<{
text: string;
timestamp?: number | null;
}>,
) {
const normalizedHistory = historyUsers
.map((entry) => ({
text: entry.text.trim(),
timestamp: typeof entry.timestamp === "number" ? entry.timestamp : null,
}))
.filter((entry) => entry.text.length > 0 && entry.timestamp !== null);
const clearedRunIds: string[] = [];
for (const [runId, entry] of this.pendingUsers.entries()) {
const pendingText = entry.text.trim();
if (!pendingText) {
continue;
}
const matchIndex = normalizedHistory.findIndex(
(historyEntry) =>
historyEntry.text === pendingText && (historyEntry.timestamp ?? 0) >= entry.createdAt,
);
if (matchIndex === -1) {
continue;
}
if (this.children.includes(entry.component)) {
this.removeChild(entry.component);
}
this.pendingUsers.delete(runId);
clearedRunIds.push(runId);
normalizedHistory.splice(matchIndex, 1);
}
return clearedRunIds;
}
countPendingUsers() {
return this.pendingUsers.size;
}
private resolveRunId(runId?: string) {
return runId ?? "default";
}

View File

@@ -0,0 +1,32 @@
import { TUI } from "@mariozechner/pi-tui";
import { afterEach, describe, expect, it, vi } from "vitest";
import { editorTheme } from "../theme/theme.js";
import { CustomEditor } from "./custom-editor.js";
describe("CustomEditor", () => {
afterEach(() => {
vi.restoreAllMocks();
});
it("routes alt+enter to the follow-up handler", () => {
const tui = { requestRender: vi.fn() } as unknown as TUI;
const editor = new CustomEditor(tui, editorTheme);
const onAltEnter = vi.fn();
editor.onAltEnter = onAltEnter;
editor.handleInput("\u001b\r");
expect(onAltEnter).toHaveBeenCalledTimes(1);
});
it("routes alt+up to the dequeue handler", () => {
const tui = { requestRender: vi.fn() } as unknown as TUI;
const editor = new CustomEditor(tui, editorTheme);
const onAltUp = vi.fn();
editor.onAltUp = onAltUp;
editor.handleInput("\u001bp");
expect(onAltUp).toHaveBeenCalledTimes(1);
});
});

View File

@@ -11,12 +11,17 @@ export class CustomEditor extends Editor {
onCtrlT?: () => void;
onShiftTab?: () => void;
onAltEnter?: () => void;
onAltUp?: () => void;
handleInput(data: string): void {
if (matchesKey(data, Key.alt("enter")) && this.onAltEnter) {
this.onAltEnter();
return;
}
if (matchesKey(data, Key.alt("up")) && this.onAltUp) {
this.onAltUp();
return;
}
if (matchesKey(data, Key.ctrl("l")) && this.onCtrlL) {
this.onCtrlL();
return;

View File

@@ -1,14 +1,15 @@
import { Container, Markdown, Spacer } from "@mariozechner/pi-tui";
import { Container, Spacer } from "@mariozechner/pi-tui";
import { markdownTheme } from "../theme/theme.js";
import { HyperlinkMarkdown } from "./hyperlink-markdown.js";
type MarkdownOptions = ConstructorParameters<typeof Markdown>[4];
type MarkdownOptions = ConstructorParameters<typeof HyperlinkMarkdown>[4];
export class MarkdownMessageComponent extends Container {
private body: Markdown;
private body: HyperlinkMarkdown;
constructor(text: string, y: number, options?: MarkdownOptions) {
super();
this.body = new Markdown(text, 1, y, markdownTheme, options);
this.body = new HyperlinkMarkdown(text, 0, y, markdownTheme, options);
this.addChild(new Spacer(1));
this.addChild(this.body);
}

View File

@@ -0,0 +1,25 @@
import { describe, expect, it } from "vitest";
import { PendingMessagesComponent } from "./pending-messages.js";
describe("PendingMessagesComponent", () => {
it("renders queued steering and follow-up messages", () => {
const component = new PendingMessagesComponent();
component.setMessages([
{ runId: "run-1", text: "continue", mode: "steer" },
{ runId: "run-2", text: "after that, write tests", mode: "followUp" },
]);
const rendered = component.render(120).join("\n");
expect(rendered).toContain("Steer: continue");
expect(rendered).toContain("Follow-up: after that, write tests");
expect(rendered).toContain("alt+up");
});
it("clears its output when no queued messages remain", () => {
const component = new PendingMessagesComponent();
component.setMessages([{ runId: "run-1", text: "continue", mode: "steer" }]);
component.clearMessages();
expect(component.render(120).join("\n")).toBe("");
});
});

View File

@@ -0,0 +1,35 @@
import { Container, Spacer, Text } from "@mariozechner/pi-tui";
import { theme } from "../theme/theme.js";
import type { QueuedMessage } from "../tui-types.js";
function formatLabel(mode: QueuedMessage["mode"]) {
return mode === "followUp" ? "Follow-up" : "Steer";
}
export class PendingMessagesComponent extends Container {
private messages: QueuedMessage[] = [];
setMessages(messages: QueuedMessage[]) {
this.messages = [...messages];
this.renderMessages();
}
clearMessages() {
this.messages = [];
this.renderMessages();
}
private renderMessages() {
this.clear();
if (this.messages.length === 0) {
return;
}
this.addChild(new Spacer(1));
for (const message of this.messages) {
const label = formatLabel(message.mode);
this.addChild(new Text(theme.dim(`${label}: ${message.text}`), 1, 0));
}
this.addChild(new Text(theme.dim("↳ alt+up to restore queued messages"), 1, 0));
}
}

View File

@@ -60,6 +60,14 @@ export type AgentSummary = {
name?: string;
};
export type QueuedMessageMode = "steer" | "followUp";
export type QueuedMessage = {
runId: string;
text: string;
mode: QueuedMessageMode;
};
export type GatewayStatusSummary = {
runtimeVersion?: string | null;
linkChannel?: {
@@ -109,6 +117,7 @@ export type TuiStateAccess = {
currentSessionId: string | null;
activeChatRunId: string | null;
pendingOptimisticUserMessage?: boolean;
queuedMessages?: QueuedMessage[];
historyLoaded: boolean;
sessionInfo: SessionInfo;
initialSessionApplied: boolean;