fix(telegram): sanitize invalid stream-order errors (#55999)

* fix(telegram): sanitize invalid stream order errors

* docs(changelog): remove maintainer follow-up entry
This commit is contained in:
Vincent Koc
2026-03-29 00:59:15 -07:00
committed by GitHub
parent aec58d4cde
commit d6a4ec6a3d
4 changed files with 41 additions and 0 deletions

View File

@@ -115,6 +115,7 @@ Docs: https://docs.openclaw.ai
- Message tool/buttons: keep the shared `buttons` schema optional in merged tool definitions so plain `action=send` calls stop failing validation when no buttons are provided. (#54418) Thanks @adzendo.
- Agents/openai-compatible tool calls: deduplicate repeated tool call ids across live assistant messages and replayed history so OpenAI-compatible backends no longer reject duplicate `tool_call_id` values with HTTP 400. (#40996) Thanks @xaeon2026.
- Models/openai-completions: default non-native OpenAI-compatible providers to omit tool-definition `strict` fields unless users explicitly opt back in, so tool calling keeps working on providers that reject that option. (#45497) Thanks @sahancava.
- Telegram/Anthropic streaming: replace raw invalid stream-order provider errors with a safe retry message so internal `message_start/message_stop` failures do not leak into chats. (#55408) Thanks @imydal.
- Plugins/context engines: retry strict legacy `assemble()` calls without the new `prompt` field when older engines reject it, preserving prompt-aware retrieval compatibility for pre-prompt plugins. (#50848) thanks @danhdoan.
- CLI/update status: explicitly say `up to date` when the local version already matches npm latest, while keeping the availability logic unchanged. (#51409) Thanks @dongzhenye.
- Daemon/Linux: stop flagging non-gateway systemd services as duplicate gateways just because their unit files mention OpenClaw, reducing false-positive doctor/log noise. (#45328) Thanks @gregretkowski.

View File

@@ -204,6 +204,15 @@ describe("formatAssistantErrorText", () => {
"LLM request failed: network connection was interrupted.",
);
});
it("sanitizes invalid streaming event order errors", () => {
const msg = makeAssistantError(
'Unexpected event order, got message_start before receiving "message_stop"',
);
expect(formatAssistantErrorText(msg)).toBe(
"LLM request failed: provider returned an invalid streaming response. Please try again.",
);
});
});
describe("formatRawAssistantErrorForUi", () => {

View File

@@ -136,6 +136,17 @@ describe("sanitizeUserFacingText", () => {
).toBe("LLM request failed: connection refused by the provider endpoint.");
});
it("sanitizes invalid streaming event order errors", () => {
expect(
sanitizeUserFacingText(
'Unexpected event order, got message_start before receiving "message_stop"',
{ errorContext: true },
),
).toBe(
"LLM request failed: provider returned an invalid streaming response. Please try again.",
);
});
it.each([
{
input: "Hello there!\n\nHello there!",

View File

@@ -174,6 +174,18 @@ function isReasoningConstraintErrorMessage(raw: string): boolean {
);
}
function isInvalidStreamingEventOrderError(raw: string): boolean {
if (!raw) {
return false;
}
const lower = raw.toLowerCase();
return (
lower.includes("unexpected event order") &&
lower.includes("message_start") &&
lower.includes("message_stop")
);
}
function hasRateLimitTpmHint(raw: string): boolean {
const lower = raw.toLowerCase();
return /\btpm\b/i.test(lower) || lower.includes("tokens per minute");
@@ -685,6 +697,10 @@ export function formatAssistantErrorText(
);
}
if (isInvalidStreamingEventOrderError(raw)) {
return "LLM request failed: provider returned an invalid streaming response. Please try again.";
}
// Catch role ordering errors - including JSON-wrapped and "400" prefix variants
if (
/incorrect role information|roles must alternate|400.*role|"message".*role.*information/i.test(
@@ -777,6 +793,10 @@ export function sanitizeUserFacingText(text: string, opts?: { errorContext?: boo
return BILLING_ERROR_USER_MESSAGE;
}
if (isInvalidStreamingEventOrderError(trimmed)) {
return "LLM request failed: provider returned an invalid streaming response. Please try again.";
}
if (isRawApiErrorPayload(trimmed) || isLikelyHttpErrorText(trimmed)) {
return formatRawAssistantErrorForUi(trimmed);
}