fix(control-ui): coalesce duplicate chat submits

This commit is contained in:
Peter Steinberger
2026-04-27 20:45:19 +01:00
parent 8cddb6ce7d
commit f6b2ba4a10
5 changed files with 141 additions and 29 deletions

View File

@@ -18,6 +18,7 @@ Docs: https://docs.openclaw.ai
- Sessions: ignore future-dated session activity timestamps during reset freshness checks and cap future `updatedAt` values at the merge boundary so clock-skewed messages cannot keep stale sessions alive forever. Fixes #72989. Thanks @martingarramon.
- Plugins/CLI: allow managed plugin installs when the active extensions root is a symlink to a real state directory, while keeping nested target symlinks blocked and suppressing misleading hook-pack fallback errors for install-boundary failures. Fixes #72946. Thanks @mayank6136.
- Gateway/startup: keep hot Gateway boot paths on leaf config imports and add max-RSS reporting to the gateway startup bench so low-memory startup regressions are visible before release. Thanks @vincentkoc.
- WebChat: read `chat.history` from active transcript branches, drop stale streamed assistant tails once final history catches up, and coalesce duplicate in-flight Control UI submits, so rewritten prompts, completed replies, and rapid send events no longer render or process twice. Fixes #72975, #72963, and #72974. Thanks @dmagdici, @lhtpluto, and @Benjamin5281999.
- WebChat/TTS: persist automatic final-mode TTS audio as a supplemental audio-only transcript update instead of adding a second assistant message with the same visible text. Fixes #72830. Thanks @lhtpluto.
- Agents/LSP: terminate bundled stdio LSP process trees during runtime disposal and Gateway shutdown, so nested children such as `tsserver` do not survive stop or restart. Fixes #72357. Thanks @ai-hpc and @bittoby.
- Diagnostics/OTEL: capture privacy-safe model-call request payload bytes, streamed response bytes, first-response latency, and total duration in diagnostic events, plugin hooks, stability snapshots, and OTEL model-call spans/metrics without logging raw model content. Fixes #33832. Thanks @wwh830.

View File

@@ -24,6 +24,8 @@ Status: the macOS/iOS SwiftUI chat UI talks directly to the Gateway WebSocket.
- The UI connects to the Gateway WebSocket and uses `chat.history`, `chat.send`, and `chat.inject`.
- `chat.history` is bounded for stability: Gateway may truncate long text fields, omit heavy metadata, and replace oversized entries with `[chat.history omitted: message too large]`.
- `chat.history` follows the active transcript branch for modern append-only session files, so abandoned rewrite branches and superseded prompt copies are not rendered in WebChat.
- Control UI coalesces duplicate in-flight submits for the same session, message, and attachments before generating a new `chat.send` run id; the Gateway still dedupes repeated requests that reuse the same idempotency key.
- `chat.history` is also display-normalized: runtime-only OpenClaw context,
inbound envelope wrappers, inline delivery directive tags
such as `[[reply_to_*]]` and `[[audio_as_voice]]`, plain-text tool-call XML

View File

@@ -41,10 +41,12 @@ function requestUrl(input: string | URL | Request): string {
}
function makeHost(overrides?: Partial<ChatHost>): ChatHost {
return {
const host = {
client: null,
chatMessages: [],
chatStream: null,
chatStreamSegments: [],
chatToolMessages: [],
connected: true,
chatLoading: false,
chatMessage: "",
@@ -71,9 +73,13 @@ function makeHost(overrides?: Partial<ChatHost>): ChatHost {
chatModelsLoading: false,
chatModelCatalog: [],
refreshSessionsAfterChat: new Set<string>(),
toolStreamById: new Map(),
toolStreamOrder: [],
toolStreamSyncTimer: null,
updateComplete: Promise.resolve(),
...overrides,
};
return host as ChatHost;
}
function createSessionsResult(sessions: GatewaySessionRow[]): SessionsListResult {
@@ -548,6 +554,32 @@ describe("handleSendChat", () => {
expect(host.chatMessage).toBe("queued while busy");
});
it("coalesces duplicate in-flight chat submits before the gateway acknowledges them", async () => {
const sent = createDeferred<unknown>();
const request = vi.fn((method: string) => {
if (method === "chat.send") {
return sent.promise;
}
throw new Error(`Unexpected request: ${method}`);
});
const host = makeHost({
client: { request } as unknown as ChatHost["client"],
});
const first = handleSendChat(host, "same prompt");
const second = handleSendChat(host, "same prompt");
expect(request).toHaveBeenCalledTimes(1);
expect(host.chatQueue).toEqual([]);
expect(host.chatMessages).toHaveLength(1);
sent.resolve({ runId: host.chatRunId, status: "started" });
await Promise.all([first, second]);
expect(request).toHaveBeenCalledTimes(1);
expect(host.chatMessages).toHaveLength(1);
});
it("restores the BTW draft when detached send fails", async () => {
const host = makeHost({
client: {

View File

@@ -61,6 +61,7 @@ export type ChatHost = ChatInputHistoryState & {
updateComplete?: Promise<unknown>;
refreshSessionsAfterChat: Set<string>;
pendingAbort?: { runId: string; sessionKey: string } | null;
chatSubmitGuards?: Map<string, Promise<void>>;
/** Callback for slash-command side effects that need app-level access. */
onSlashAction?: (action: string) => void;
};
@@ -225,6 +226,54 @@ async function sendChatMessageNow(
return ok;
}
function attachmentSubmitSignature(attachment: ChatAttachment): string {
return JSON.stringify([
attachment.id,
attachment.mimeType,
attachment.fileName ?? "",
attachment.dataUrl.length,
attachment.dataUrl.slice(0, 64),
]);
}
function chatSubmitKey(
host: ChatHost,
kind: "btw" | "message",
message: string,
attachments: ChatAttachment[],
): string {
return JSON.stringify([
kind,
host.sessionKey,
message.trim(),
attachments.map(attachmentSubmitSignature),
]);
}
async function withChatSubmitGuard<T>(
host: ChatHost,
key: string,
run: () => Promise<T>,
): Promise<T | undefined> {
const guards = (host.chatSubmitGuards ??= new Map<string, Promise<void>>());
if (guards.has(key)) {
return undefined;
}
let releaseGuard!: () => void;
const guard = new Promise<void>((resolve) => {
releaseGuard = resolve;
});
guards.set(key, guard);
try {
return await run();
} finally {
releaseGuard();
if (guards.get(key) === guard) {
guards.delete(key);
}
}
}
async function sendDetachedBtwMessage(
host: ChatHost,
message: string,
@@ -362,16 +411,19 @@ export async function handleSendChat(
}
if (isBtwCommand(message)) {
if (messageOverride == null) {
recordNonTranscriptInputHistory(host, message);
host.chatMessage = "";
host.chatAttachments = [];
resetChatInputHistoryNavigation(host);
}
await sendDetachedBtwMessage(host, message, {
previousDraft: messageOverride == null ? previousDraft : undefined,
attachments: hasAttachments ? attachmentsToSend : undefined,
previousAttachments: messageOverride == null ? attachments : undefined,
const submitKey = chatSubmitKey(host, "btw", message, attachmentsToSend);
await withChatSubmitGuard(host, submitKey, async () => {
if (messageOverride == null) {
recordNonTranscriptInputHistory(host, message);
host.chatMessage = "";
host.chatAttachments = [];
resetChatInputHistoryNavigation(host);
}
await sendDetachedBtwMessage(host, message, {
previousDraft: messageOverride == null ? previousDraft : undefined,
attachments: hasAttachments ? attachmentsToSend : undefined,
previousAttachments: messageOverride == null ? attachments : undefined,
});
});
return;
}
@@ -407,27 +459,30 @@ export async function handleSendChat(
}
const refreshSessions = isChatResetCommand(message);
if (messageOverride == null) {
host.chatMessage = "";
host.chatAttachments = [];
resetChatInputHistoryNavigation(host);
}
if (isChatBusy(host)) {
const submitKey = chatSubmitKey(host, "message", message, attachmentsToSend);
await withChatSubmitGuard(host, submitKey, async () => {
if (messageOverride == null) {
recordNonTranscriptInputHistory(host, message);
host.chatMessage = "";
host.chatAttachments = [];
resetChatInputHistoryNavigation(host);
}
enqueueChatMessage(host, message, attachmentsToSend, refreshSessions);
return;
}
await sendChatMessageNow(host, message, {
previousDraft: messageOverride == null ? previousDraft : undefined,
restoreDraft: Boolean(messageOverride && opts?.restoreDraft),
attachments: hasAttachments ? attachmentsToSend : undefined,
previousAttachments: messageOverride == null ? attachments : undefined,
restoreAttachments: Boolean(messageOverride && opts?.restoreDraft),
refreshSessions,
if (isChatBusy(host)) {
if (messageOverride == null) {
recordNonTranscriptInputHistory(host, message);
}
enqueueChatMessage(host, message, attachmentsToSend, refreshSessions);
return;
}
await sendChatMessageNow(host, message, {
previousDraft: messageOverride == null ? previousDraft : undefined,
restoreDraft: Boolean(messageOverride && opts?.restoreDraft),
attachments: hasAttachments ? attachmentsToSend : undefined,
previousAttachments: messageOverride == null ? attachments : undefined,
restoreAttachments: Boolean(messageOverride && opts?.restoreDraft),
refreshSessions,
});
});
}

View File

@@ -624,6 +624,28 @@ describe("loadChatHistory", () => {
});
describe("sendChatMessage", () => {
it("does not start a second chat.send while the first send is awaiting ack", async () => {
const sent = createDeferred<unknown>();
const request = vi.fn(() => sent.promise);
const state = createState({
connected: true,
client: { request } as unknown as ChatState["client"],
});
const first = sendChatMessage(state, "hello");
const activeRunId = state.chatRunId;
const second = sendChatMessage(state, "hello");
expect(request).toHaveBeenCalledTimes(1);
expect(state.chatMessages).toHaveLength(1);
await expect(second).resolves.toBe(activeRunId);
sent.resolve({ runId: activeRunId, status: "started" });
await expect(first).resolves.toBe(activeRunId);
expect(request).toHaveBeenCalledTimes(1);
expect(state.chatMessages).toHaveLength(1);
});
it("serializes non-image chat attachments as files", async () => {
const request = vi.fn().mockResolvedValue({ runId: "run-1", status: "started" });
const state = createState({