From d6a4ec6a3d94c58a3d5dd7370bdfb8c1bfae1884 Mon Sep 17 00:00:00 2001 From: Vincent Koc Date: Sun, 29 Mar 2026 00:59:15 -0700 Subject: [PATCH] fix(telegram): sanitize invalid stream-order errors (#55999) * fix(telegram): sanitize invalid stream order errors * docs(changelog): remove maintainer follow-up entry --- CHANGELOG.md | 1 + ...d-helpers.formatassistanterrortext.test.ts | 9 +++++++++ ...ded-helpers.sanitizeuserfacingtext.test.ts | 11 ++++++++++ src/agents/pi-embedded-helpers/errors.ts | 20 +++++++++++++++++++ 4 files changed, 41 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5128f0e1ebc..143f34d6ca5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -115,6 +115,7 @@ Docs: https://docs.openclaw.ai - Message tool/buttons: keep the shared `buttons` schema optional in merged tool definitions so plain `action=send` calls stop failing validation when no buttons are provided. (#54418) Thanks @adzendo. - Agents/openai-compatible tool calls: deduplicate repeated tool call ids across live assistant messages and replayed history so OpenAI-compatible backends no longer reject duplicate `tool_call_id` values with HTTP 400. (#40996) Thanks @xaeon2026. - Models/openai-completions: default non-native OpenAI-compatible providers to omit tool-definition `strict` fields unless users explicitly opt back in, so tool calling keeps working on providers that reject that option. (#45497) Thanks @sahancava. +- Telegram/Anthropic streaming: replace raw invalid stream-order provider errors with a safe retry message so internal `message_start/message_stop` failures do not leak into chats. (#55408) Thanks @imydal. - Plugins/context engines: retry strict legacy `assemble()` calls without the new `prompt` field when older engines reject it, preserving prompt-aware retrieval compatibility for pre-prompt plugins. (#50848) thanks @danhdoan. - CLI/update status: explicitly say `up to date` when the local version already matches npm latest, while keeping the availability logic unchanged. (#51409) Thanks @dongzhenye. - Daemon/Linux: stop flagging non-gateway systemd services as duplicate gateways just because their unit files mention OpenClaw, reducing false-positive doctor/log noise. (#45328) Thanks @gregretkowski. diff --git a/src/agents/pi-embedded-helpers.formatassistanterrortext.test.ts b/src/agents/pi-embedded-helpers.formatassistanterrortext.test.ts index e4a54cfb9a4..1a24c5307c5 100644 --- a/src/agents/pi-embedded-helpers.formatassistanterrortext.test.ts +++ b/src/agents/pi-embedded-helpers.formatassistanterrortext.test.ts @@ -204,6 +204,15 @@ describe("formatAssistantErrorText", () => { "LLM request failed: network connection was interrupted.", ); }); + + it("sanitizes invalid streaming event order errors", () => { + const msg = makeAssistantError( + 'Unexpected event order, got message_start before receiving "message_stop"', + ); + expect(formatAssistantErrorText(msg)).toBe( + "LLM request failed: provider returned an invalid streaming response. Please try again.", + ); + }); }); describe("formatRawAssistantErrorForUi", () => { diff --git a/src/agents/pi-embedded-helpers.sanitizeuserfacingtext.test.ts b/src/agents/pi-embedded-helpers.sanitizeuserfacingtext.test.ts index 850bed8d535..71ccde3f509 100644 --- a/src/agents/pi-embedded-helpers.sanitizeuserfacingtext.test.ts +++ b/src/agents/pi-embedded-helpers.sanitizeuserfacingtext.test.ts @@ -136,6 +136,17 @@ describe("sanitizeUserFacingText", () => { ).toBe("LLM request failed: connection refused by the provider endpoint."); }); + it("sanitizes invalid streaming event order errors", () => { + expect( + sanitizeUserFacingText( + 'Unexpected event order, got message_start before receiving "message_stop"', + { errorContext: true }, + ), + ).toBe( + "LLM request failed: provider returned an invalid streaming response. Please try again.", + ); + }); + it.each([ { input: "Hello there!\n\nHello there!", diff --git a/src/agents/pi-embedded-helpers/errors.ts b/src/agents/pi-embedded-helpers/errors.ts index 0bad1ed4c2a..8c5b02862f0 100644 --- a/src/agents/pi-embedded-helpers/errors.ts +++ b/src/agents/pi-embedded-helpers/errors.ts @@ -174,6 +174,18 @@ function isReasoningConstraintErrorMessage(raw: string): boolean { ); } +function isInvalidStreamingEventOrderError(raw: string): boolean { + if (!raw) { + return false; + } + const lower = raw.toLowerCase(); + return ( + lower.includes("unexpected event order") && + lower.includes("message_start") && + lower.includes("message_stop") + ); +} + function hasRateLimitTpmHint(raw: string): boolean { const lower = raw.toLowerCase(); return /\btpm\b/i.test(lower) || lower.includes("tokens per minute"); @@ -685,6 +697,10 @@ export function formatAssistantErrorText( ); } + if (isInvalidStreamingEventOrderError(raw)) { + return "LLM request failed: provider returned an invalid streaming response. Please try again."; + } + // Catch role ordering errors - including JSON-wrapped and "400" prefix variants if ( /incorrect role information|roles must alternate|400.*role|"message".*role.*information/i.test( @@ -777,6 +793,10 @@ export function sanitizeUserFacingText(text: string, opts?: { errorContext?: boo return BILLING_ERROR_USER_MESSAGE; } + if (isInvalidStreamingEventOrderError(trimmed)) { + return "LLM request failed: provider returned an invalid streaming response. Please try again."; + } + if (isRawApiErrorPayload(trimmed) || isLikelyHttpErrorText(trimmed)) { return formatRawAssistantErrorForUi(trimmed); }