fix(control-ui): preserve optimistic chat tail

This commit is contained in:
Peter Steinberger
2026-04-25 06:15:47 +01:00
parent 86dc820560
commit 1afbfdf451
4 changed files with 144 additions and 1 deletions

View File

@@ -81,6 +81,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Control UI/WebChat: hide heartbeat prompts, `HEARTBEAT_OK` acknowledgments, and internal-only runtime context turns from visible chat history while leaving the underlying transcript intact. Fixes #71381. Thanks @gerald1950ggg-ai.
- Control UI/chat: keep optimistic user and assistant tail messages visible when a final history refresh briefly returns an older snapshot, preventing message cards from flash-disappearing until the next refresh. Fixes #71371. Thanks @WolvenRA.
- Talk/TTS: resolve configured extension speech providers from the active runtime registry before provider-list discovery, so Talk mode no longer rejects valid plugin speech providers as unsupported.
- Sessions/subagents: stop stale ended runs and old store-only child reverse links from reappearing in `childSessions`, while keeping live descendants and recently-ended children visible. Fixes #57920.
- Subagents: stop stale unended runs from counting as active or pending forever, while preserving restart-aborted recovery for recoverable child sessions. Fixes #71252. Thanks @hclsys.

View File

@@ -154,6 +154,10 @@ Cron jobs panel notes:
- `chat.history` responses are size-bounded for UI safety. When transcript entries are too large, Gateway may truncate long text fields, omit heavy metadata blocks, and replace oversized messages with a placeholder (`[chat.history omitted: message too large]`).
- Assistant/generated images are persisted as managed media references and served back through authenticated Gateway media URLs, so reloads do not depend on raw base64 image payloads staying in the chat history response.
- `chat.history` also strips display-only inline directive tags from visible assistant text (for example `[[reply_to_*]]` and `[[audio_as_voice]]`), plain-text tool-call XML payloads (including `<tool_call>...</tool_call>`, `<function_call>...</function_call>`, `<tool_calls>...</tool_calls>`, `<function_calls>...</function_calls>`, and truncated tool-call blocks), and leaked ASCII/full-width model control tokens, and omits assistant entries whose whole visible text is only the exact silent token `NO_REPLY` / `no_reply`.
- During an active send and the final history refresh, the chat view keeps local
optimistic user/assistant messages visible if `chat.history` briefly returns
an older snapshot; the canonical transcript replaces those local messages once
the Gateway history catches up.
- `chat.inject` appends an assistant note to the session transcript and broadcasts a `chat` event for UI-only updates (no agent run, no channel delivery).
- The chat header model and thinking pickers patch the active session immediately through `sessions.patch`; they are persistent session overrides, not one-turn-only send options.
- When fresh Gateway session usage reports show high context pressure, the chat

View File

@@ -786,6 +786,68 @@ describe("loadChatHistory", () => {
]);
});
it("keeps local optimistic tail messages when history reload returns a stale snapshot", async () => {
const persistedUser = {
role: "user",
content: [{ type: "text", text: "first" }],
__openclaw: { seq: 1 },
};
const optimisticUser = {
role: "user",
content: [{ type: "text", text: "latest ask" }],
timestamp: 10,
};
const optimisticAssistant = {
role: "assistant",
content: [{ type: "text", text: "latest answer" }],
timestamp: 11,
};
const request = vi.fn().mockResolvedValue({
messages: [persistedUser],
thinkingLevel: "low",
});
const state = createState({
connected: true,
client: { request } as unknown as ChatState["client"],
chatMessages: [persistedUser, optimisticUser, optimisticAssistant],
});
await loadChatHistory(state);
expect(state.chatMessages).toEqual([persistedUser, optimisticUser, optimisticAssistant]);
expect(state.chatStream).toBeNull();
});
it("does not duplicate optimistic tail messages after history catches up", async () => {
const optimisticUser = {
role: "user",
content: [{ type: "text", text: "latest ask" }],
timestamp: 10,
};
const historyUser = {
role: "user",
content: [{ type: "text", text: "latest ask" }],
__openclaw: { seq: 1 },
};
const historyAssistant = {
role: "assistant",
content: [{ type: "text", text: "latest answer" }],
__openclaw: { seq: 2 },
};
const request = vi.fn().mockResolvedValue({
messages: [historyUser, historyAssistant],
});
const state = createState({
connected: true,
client: { request } as unknown as ChatState["client"],
chatMessages: [optimisticUser],
});
await loadChatHistory(state);
expect(state.chatMessages).toEqual([historyUser, historyAssistant]);
});
it("shows a targeted message when chat history is unauthorized", async () => {
const request = vi.fn().mockRejectedValue(
new GatewayRequestError({

View File

@@ -135,6 +135,80 @@ function shouldHideHistoryMessage(message: unknown): boolean {
);
}
function hasTranscriptMeta(message: unknown): boolean {
return Boolean(
message &&
typeof message === "object" &&
(message as { __openclaw?: unknown }).__openclaw &&
typeof (message as { __openclaw?: unknown }).__openclaw === "object",
);
}
function isLocallyOptimisticHistoryMessage(message: unknown): boolean {
if (!message || typeof message !== "object" || hasTranscriptMeta(message)) {
return false;
}
const role = normalizeLowercaseStringOrEmpty((message as { role?: unknown }).role);
return role === "user" || role === "assistant";
}
function messageDisplaySignature(message: unknown): string | null {
if (!message || typeof message !== "object") {
return null;
}
const role = normalizeLowercaseStringOrEmpty((message as { role?: unknown }).role);
if (!role) {
return null;
}
const text = extractText(message)?.trim();
if (text) {
return `${role}:text:${text}`;
}
try {
const content = JSON.stringify((message as { content?: unknown }).content ?? null);
return `${role}:content:${content}`;
} catch {
return null;
}
}
function preserveOptimisticTailMessages(
historyMessages: unknown[],
previousMessages: unknown[],
): unknown[] {
if (historyMessages.length === 0 || previousMessages.length === 0) {
return historyMessages;
}
const historySignatures = new Set(
historyMessages
.map((message) => messageDisplaySignature(message))
.filter((signature): signature is string => Boolean(signature)),
);
let sharedPreviousIndex = -1;
for (let index = previousMessages.length - 1; index >= 0; index--) {
const signature = messageDisplaySignature(previousMessages[index]);
if (signature && historySignatures.has(signature)) {
sharedPreviousIndex = index;
break;
}
}
if (sharedPreviousIndex < 0) {
return historyMessages;
}
const optimisticTail: unknown[] = [];
for (const message of previousMessages.slice(sharedPreviousIndex + 1)) {
if (!isLocallyOptimisticHistoryMessage(message) || shouldHideHistoryMessage(message)) {
return historyMessages;
}
const signature = messageDisplaySignature(message);
if (!signature || historySignatures.has(signature)) {
return historyMessages;
}
optimisticTail.push(message);
}
return optimisticTail.length > 0 ? [...historyMessages, ...optimisticTail] : historyMessages;
}
function isRetryableStartupUnavailable(err: unknown, method: string): err is GatewayRequestError {
if (!(err instanceof GatewayRequestError)) {
return false;
@@ -203,6 +277,7 @@ export async function loadChatHistory(state: ChatState) {
const sessionKey = state.sessionKey;
const requestVersion = beginChatHistoryRequest(state);
const startedAt = Date.now();
const previousMessages = state.chatMessages;
state.chatLoading = true;
state.lastError = null;
try {
@@ -237,7 +312,8 @@ export async function loadChatHistory(state: ChatState) {
return;
}
const messages = Array.isArray(res.messages) ? res.messages : [];
state.chatMessages = messages.filter((message) => !shouldHideHistoryMessage(message));
const visibleMessages = messages.filter((message) => !shouldHideHistoryMessage(message));
state.chatMessages = preserveOptimisticTailMessages(visibleMessages, previousMessages);
state.chatThinkingLevel = res.thinkingLevel ?? null;
// Clear all streaming state — history includes tool results and text
// inline, so keeping streaming artifacts would cause duplicates.