diff --git a/CHANGELOG.md b/CHANGELOG.md index ebdce48999a..57b5f01fb51 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -69,7 +69,6 @@ Docs: https://docs.openclaw.ai ### Fixes -- Agents/embed: only mark `lastBlockReplyText` after a text_end block reply is actually emitted, so message_end keeps its safety delivery when directive parsing suppresses an earlier chunk (fixes dropped channel replies including Telegram forum topics where logs showed skipping message_end sends). Fixes #77833. - TUI/sessions: bound the session picker to recent rows and use exact lookup-style refreshes for the active session, so dusty stores no longer make TUI hydrate weeks-old transcripts before becoming responsive. Thanks @vincentkoc. - Doctor/gateway: report recent supervisor restart handoffs in `openclaw doctor --deep`, using the installed service environment when available so service-managed clean exits are visible in guided diagnostics. Thanks @shakkernerd. - Gateway/status: show recent supervisor restart handoffs in `openclaw gateway status --deep`, including JSON details, so clean service-managed restarts are reported as restart handoffs instead of opaque stopped-service diagnostics. Thanks @shakkernerd. @@ -328,6 +327,7 @@ Docs: https://docs.openclaw.ai - CLI/update: stop dev-channel source updates immediately when `git fetch` fails, so tag conflicts cannot keep preflight, rebase, or build steps running against stale refs while the Gateway is still on the old runtime. (#77845) Thanks @obviyus. - Config/recovery: chmod restored `openclaw.json` back to owner-only (`0600`) after suspicious-read backup recovery on POSIX hosts, so a previously world-readable config mode cannot persist into a freshly restored credential-bearing config. (#77488) Thanks @drobison00. - Memory/dreaming: persist last dreaming-ingestion calendar day per daily note in `daily-ingestion.json` so unchanged notes are still re-ingested once per dreaming day for promotion signals toward deep thresholds. Fixes #76225. (#76359) Thanks @neeravmakwana. +- Agents/embed: keep message_end safety delivery armed when a silent text_end chunk produces no block reply, fixing dropped Telegram/forum replies. Fixes #77833. (#77840) Thanks @neeravmakwana. ## 2026.5.3-1 diff --git a/src/agents/pi-embedded-subscribe.subscribe-embedded-pi-session.emits-block-replies-text-end-does-not.test.ts b/src/agents/pi-embedded-subscribe.subscribe-embedded-pi-session.emits-block-replies-text-end-does-not.test.ts index d1633993c0e..17471fd60c9 100644 --- a/src/agents/pi-embedded-subscribe.subscribe-embedded-pi-session.emits-block-replies-text-end-does-not.test.ts +++ b/src/agents/pi-embedded-subscribe.subscribe-embedded-pi-session.emits-block-replies-text-end-does-not.test.ts @@ -127,7 +127,7 @@ describe("subscribeEmbeddedPiSession", () => { it("message_end block-replies visible text when text_end streamed only silent NO_REPLY chunks", async () => { const onBlockReply = vi.fn(); - const { emit } = createTextEndBlockReplyHarness({ onBlockReply }); + const { emit, subscription } = createTextEndBlockReplyHarness({ onBlockReply }); emit({ type: "message_start", message: { role: "assistant" } }); emitAssistantTextEnd({ emit, content: "NO_REPLY" }); @@ -148,6 +148,7 @@ describe("subscribeEmbeddedPiSession", () => { expect(onBlockReply).toHaveBeenCalledTimes(1); }); expect(onBlockReply.mock.calls[0]?.[0]?.text).toBe("Final visible reply."); + expect(subscription.assistantTexts).toEqual(["Final visible reply."]); }); it("does not duplicate when message_end flushes and a late text_end arrives", async () => { diff --git a/src/agents/pi-embedded-subscribe.ts b/src/agents/pi-embedded-subscribe.ts index 7321c7a5d99..b4431a0b6ab 100644 --- a/src/agents/pi-embedded-subscribe.ts +++ b/src/agents/pi-embedded-subscribe.ts @@ -733,8 +733,8 @@ export function subscribeEmbeddedPiSession(params: SubscribeEmbeddedPiSessionPar return; } - pushAssistantText(chunk); if (!params.onBlockReply) { + pushAssistantText(chunk); return; } const splitResult = replyDirectiveAccumulator.consume(chunk); @@ -749,11 +749,10 @@ export function subscribeEmbeddedPiSession(params: SubscribeEmbeddedPiSessionPar replyToTag, replyToCurrent, } = splitResult; - // Skip empty payloads, but always emit if audioAsVoice is set (to propagate the flag) if (!cleanedText && (!mediaUrls || mediaUrls.length === 0) && !audioAsVoice) { return; } - state.lastBlockReplyText = chunk; + pushAssistantText(chunk); emitBlockReply( { text: cleanedText, @@ -769,6 +768,7 @@ export function subscribeEmbeddedPiSession(params: SubscribeEmbeddedPiSessionPar options?.final === true || Boolean(mediaUrls?.length || audioAsVoice), }, ); + state.lastBlockReplyText = chunk; }; const consumeReplyDirectives = (text: string, options?: { final?: boolean }) =>