fix(telegram): clean up tool-only previews

This commit is contained in:
Val Alexander
2026-05-04 08:25:59 -05:00
committed by Val Alexander
parent fc1f1f4fdf
commit 042d7b8823
3 changed files with 35 additions and 1 deletions

View File

@@ -43,6 +43,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Telegram: clean up tool-only draft previews after assistant message boundaries so transient `Surfacing...` tool-status bubbles do not linger when no matching final preview arrives. Thanks @BunsDev.
- TUI/escape abort: track the in-flight runId after `chat.send` resolves so pressing Esc during the gap before the first gateway event aborts the run instead of repeatedly printing `no active run`. Fixes #1296. Thanks @Lukavyi and @romneyda.
- TUI/render: stop the long-token sanitizer from injecting literal spaces inside inline code spans, fenced code blocks, table borders, and bare hyphenated/dotted identifiers, so copied package names, entity IDs, and shell line-continuations stay byte-for-byte intact while narrow-terminal protection still chunks unidentifiable long prose tokens. Fixes #48432, #39505. Thanks @DocOellerson, @xeusoc, @CCcassiusdjs, @akramcodez, @brokemac79, @romneyda.
- Gateway/status: label Linux managed gateway services as `systemd user`, making status output explicit about the user-service scope instead of implying a system-level unit. Thanks @vincentkoc.

View File

@@ -815,6 +815,36 @@ describe("dispatchTelegramMessage draft streaming", () => {
expect(draftStream.clear).not.toHaveBeenCalled();
});
it("cleans up tool-only Telegram previews archived at assistant boundaries", async () => {
const draftStream = createSequencedDraftStream(2001);
createTelegramDraftStream.mockReturnValue(draftStream);
dispatchReplyWithBufferedBlockDispatcher.mockImplementation(async ({ replyOptions }) => {
await replyOptions?.onToolStart?.({ name: "exec", phase: "start" });
await replyOptions?.onItemEvent?.({
kind: "command",
name: "exec",
progressText: "exec git status",
});
await replyOptions?.onAssistantMessageStart?.();
return { queuedFinal: false };
});
const bot = createBot();
await dispatchWithContext({
context: createContext(),
streamMode: "partial",
telegramCfg: { streaming: { mode: "partial" } },
bot,
});
expect(draftStream.update).toHaveBeenCalledWith(
expect.stringMatching(/`🛠️ Exec: exec git status`$/),
);
expect(draftStream.materialize).toHaveBeenCalled();
expect(draftStream.forceNewMessage).toHaveBeenCalled();
expect(bot.api.deleteMessage).toHaveBeenCalledWith(123, 2001);
});
it("streams Telegram command progress text by default when preview streaming is active", async () => {
const draftStream = createDraftStream();
createTelegramDraftStream.mockReturnValue(draftStream);

View File

@@ -483,6 +483,7 @@ export const dispatchTelegramMessage = async ({
Boolean(answerLane.stream) && resolveChannelStreamingPreviewToolProgress(telegramCfg);
let previewToolProgressSuppressed = false;
let previewToolProgressLines: string[] = [];
let answerLaneHasAssistantContent = false;
const renderProgressDraft = async (options?: { flush?: boolean }) => {
if (!answerLane.stream || streamMode !== "progress") {
return;
@@ -605,13 +606,14 @@ export const dispatchTelegramMessage = async ({
messageId: previewMessageId,
textSnapshot: answerLane.lastPartialText,
visibleSinceMs: answerLane.stream?.visibleSinceMs?.(),
deleteIfUnused: false,
deleteIfUnused: !answerLaneHasAssistantContent,
});
}
answerLane.stream?.forceNewMessage();
didForceNewMessage = true;
}
resetDraftLaneState(answerLane);
answerLaneHasAssistantContent = false;
if (didForceNewMessage) {
activePreviewLifecycleByLane.answer = "transient";
retainPreviewOnCleanupByLane.answer = false;
@@ -630,6 +632,7 @@ export const dispatchTelegramMessage = async ({
if (streamMode === "progress") {
return;
}
answerLaneHasAssistantContent = true;
previewToolProgressSuppressed = true;
previewToolProgressLines = [];
}