fix(ui): repair control chat metadata handling

This commit is contained in:
Val Alexander
2026-05-07 23:32:05 -05:00
parent 22e83ab3c1
commit ba2f855fcb
3 changed files with 54 additions and 12 deletions

View File

@@ -65,6 +65,7 @@ Docs: https://docs.openclaw.ai
- Slack/performance: reduce message preparation, stream recipient lookup, and thread-context allocation overhead on Slack reply hot paths. Thanks @vincentkoc.
- Channels/streaming: cap progress-draft tool lines by default so edited progress boxes avoid jumpy reflow from long wrapped lines.
- Control UI/chat: add an agent-first filter to the chat session picker, keep chat controls/composer responsive across phone/tablet/desktop widths, keep desktop chat controls on one row, avoid duplicate avatar refreshes during initial chat load, and hide that row while scrolling down the transcript. Thanks @BunsDev.
- Control UI/chat: strip untrusted sender metadata from live streams and transcript display, preserve canvas preview anchors, and stop operator UI clients from injecting their internal client id as sender identity. Fixes #78739. Thanks @guguangxin-eng, @hclsys, and @BunsDev.
- Control UI/chat: collapse consecutive duplicate text messages into one bubble with a count so repeated text-only messages stay compact without hiding nearby context.
- Control UI/chat and Sessions: label inherited thinking defaults separately from explicit overrides while preserving provider-supplied option labels. Fixes #77581. Thanks @BunsDev and @Beandon13.
- Agents/runtime: add prepared runtime foundation contracts for carrying provider, model, tool, TTS, and outbound runtime facts through later reply-path migrations. Thanks @mcaxtr.

View File

@@ -154,7 +154,6 @@ describe("buildChatItems", () => {
expect(items).toEqual([]);
});
<<<<<<< fix/control-ui-sender-metadata-stream
it("suppresses active sender metadata streams before rendering", () => {
const items = buildChatItems(
createProps({
@@ -197,7 +196,8 @@ describe("buildChatItems", () => {
});
expect(groups).toEqual([]);
=======
});
it("renders only the last 100 history messages and shows a hidden-count notice", () => {
const items = buildChatItems(
createProps({
@@ -229,7 +229,6 @@ describe("buildChatItems", () => {
expect(groups.at(-1)?.messages[0].message).toMatchObject({
content: "message 104",
});
>>>>>>> main
});
it("does not collapse duplicate text messages separated by another message", () => {
@@ -312,6 +311,45 @@ describe("buildChatItems", () => {
expect(firstMessageContent(groups[1]).some((block) => isCanvasBlock(block))).toBe(false);
});
it("preserves a metadata-only assistant anchor when lifting canvas previews", () => {
const groups = messageGroups({
messages: [
{
id: "assistant-metadata-anchor",
role: "assistant",
content: SENDER_METADATA_BLOCK,
timestamp: 1_000,
},
],
toolMessages: [
{
id: "tool-canvas-for-empty-anchor",
role: "tool",
toolCallId: "call-canvas-empty-anchor",
toolName: "canvas_render",
content: JSON.stringify({
kind: "canvas",
view: {
backend: "canvas",
id: "cv_empty_anchor",
url: "/__openclaw__/canvas/documents/cv_empty_anchor/index.html",
title: "Empty anchor demo",
preferred_height: 320,
},
presentation: {
target: "assistant_message",
},
}),
timestamp: 1_001,
},
],
});
expect(
groups.some((group) => firstMessageContent(group).some((block) => isCanvasBlock(block))),
).toBe(true);
});
it("does not lift generic view handles from non-canvas payloads", () => {
const groups = messageGroups({
messages: [

View File

@@ -260,11 +260,18 @@ function sanitizeStreamText(text: string): string {
}
export function buildChatItems(props: BuildChatItemsProps): Array<ChatItem | MessageGroup> {
const items: ChatItem[] = [];
let items: ChatItem[] = [];
const history = (Array.isArray(props.messages) ? props.messages : []).filter(
(message) => !isAssistantHeartbeatAckForDisplay(message),
);
const tools = Array.isArray(props.toolMessages) ? props.toolMessages : [];
const liftedCanvasSources = tools
.map((tool) => extractChatMessagePreview(tool))
.filter((entry) => Boolean(entry)) as Array<{
preview: Extract<NonNullable<ToolCard["preview"]>, { kind: "canvas" }>;
text: string | null;
timestamp: number | null;
}>;
const historyStart = Math.max(0, history.length - CHAT_HISTORY_RENDER_LIMIT);
if (historyStart > 0) {
items.push({
@@ -309,7 +316,7 @@ export function buildChatItems(props: BuildChatItemsProps): Array<ChatItem | Mes
if (props.searchOpen && searchQuery.trim() && !messageMatchesSearchQuery(msg, searchQuery)) {
continue;
}
if (!hasRenderableNormalizedMessage(msg)) {
if (!hasRenderableNormalizedMessage(msg) && normalized.role.toLowerCase() !== "assistant") {
continue;
}
@@ -319,13 +326,6 @@ export function buildChatItems(props: BuildChatItemsProps): Array<ChatItem | Mes
message: msg,
});
}
const liftedCanvasSources = tools
.map((tool) => extractChatMessagePreview(tool))
.filter((entry) => Boolean(entry)) as Array<{
preview: Extract<NonNullable<ToolCard["preview"]>, { kind: "canvas" }>;
text: string | null;
timestamp: number | null;
}>;
for (const liftedCanvasSource of liftedCanvasSources) {
const assistantIndex = findNearestAssistantMessageIndex(items, liftedCanvasSource.timestamp);
if (assistantIndex == null) {
@@ -344,6 +344,9 @@ export function buildChatItems(props: BuildChatItemsProps): Array<ChatItem | Mes
),
};
}
items = items.filter(
(item) => item.kind !== "message" || hasRenderableNormalizedMessage(item.message),
);
const segments = props.streamSegments ?? [];
const maxLen = Math.max(segments.length, tools.length);
for (let i = 0; i < maxLen; i++) {