From 638865b5d8c49f4f200e6ce10aa949bad8db75df Mon Sep 17 00:00:00 2001 From: jackybzhou Date: Wed, 1 Apr 2026 23:34:31 +0800 Subject: [PATCH] fix: suppress raw JSON parse errors from leaking to Discord channels (#59076) [AI-assisted] When streaming tool calls with long CJK text, the Anthropic SDK's SSE parser can throw SyntaxError (e.g. 'Expected "," or "}" after property value in JSON at position 334'). This error propagates through pi-ai's catch block as an assistant error message and reaches formatAssistantErrorText, which did not recognize the pattern and forwarded the raw error text to the Discord channel. - Add isStreamingJsonParseError() detector in pi-embedded-helpers/errors.ts - Intercept in formatAssistantErrorText() to return a friendly message - Also guard sanitizeUserFacingText() errorContext path for defense-in-depth - Add 5 test cases covering various JSON parse error patterns and contexts Verified: 131 related tests pass, pnpm build succeeds, zero regression. AI-assisted: code fix and test generation verified by automated test runs --- ...d-helpers.formatassistanterrortext.test.ts | 54 +++++++++++++++++++ src/agents/pi-embedded-helpers/errors.ts | 5 ++ .../sanitize-user-facing-text.ts | 11 ++++ 3 files changed, 70 insertions(+) diff --git a/src/agents/pi-embedded-helpers.formatassistanterrortext.test.ts b/src/agents/pi-embedded-helpers.formatassistanterrortext.test.ts index fd26408718e..f856cd1d50a 100644 --- a/src/agents/pi-embedded-helpers.formatassistanterrortext.test.ts +++ b/src/agents/pi-embedded-helpers.formatassistanterrortext.test.ts @@ -7,6 +7,7 @@ import { getApiErrorPayloadFingerprint, formatRawAssistantErrorForUi, isRawApiErrorPayload, + sanitizeUserFacingText, } from "./pi-embedded-helpers.js"; import { makeAssistantMessageFixture } from "./test-helpers/assistant-message-fixtures.js"; @@ -349,6 +350,40 @@ describe("formatAssistantErrorText", () => { "LLM request failed: provider returned an invalid streaming response. Please try again.", ); }); + + it("sanitizes streaming JSON parse errors from Anthropic SDK (#59076)", () => { + const msg = makeAssistantError( + "Expected ',' or '}' after property value in JSON at position 334 (line 1 column 335)", + ); + expect(formatAssistantErrorText(msg)).toBe( + "LLM streaming response contained a malformed fragment. Please try again.", + ); + }); + + it("sanitizes 'Expected double-quoted property name' JSON parse errors (#59076)", () => { + const msg = makeAssistantError( + "Expected double-quoted property name in JSON at position 8912 (line 219 column 5)", + ); + expect(formatAssistantErrorText(msg)).toBe( + "LLM streaming response contained a malformed fragment. Please try again.", + ); + }); + + it("sanitizes 'Unexpected token' JSON parse errors (#59076)", () => { + const msg = makeAssistantError("Unexpected token < in JSON at position 0"); + expect(formatAssistantErrorText(msg)).toBe( + "LLM streaming response contained a malformed fragment. Please try again.", + ); + }); + + it("keeps provider request-validation JSON diagnostics actionable", () => { + const msg = makeAssistantError( + '{"type":"error","error":{"type":"invalid_request_error","message":"Expected value in JSON at position 12 for messages.0.content"}}', + ); + expect(formatAssistantErrorText(msg)).toBe( + "LLM request rejected: Expected value in JSON at position 12 for messages.0.content", + ); + }); }); describe("formatRawAssistantErrorForUi", () => { @@ -424,3 +459,22 @@ describe("raw API error payload helpers", () => { ); }); }); + +describe("sanitizeUserFacingText — streaming JSON parse error (#59076)", () => { + it("rewrites JSON parse error in error context", () => { + const result = sanitizeUserFacingText( + "Expected ',' or '}' after property value in JSON at position 334 (line 1 column 335)", + { errorContext: true }, + ); + expect(result).toBe("LLM streaming response contained a malformed fragment. Please try again."); + }); + + it("does not rewrite JSON parse error when not in error context", () => { + // When not in error context, the text could be legitimate assistant content + // mentioning JSON errors. Don't rewrite. + const text = + "Expected ',' or '}' after property value in JSON at position 334 (line 1 column 335)"; + const result = sanitizeUserFacingText(text, { errorContext: false }); + expect(result).toBe(text); + }); +}); diff --git a/src/agents/pi-embedded-helpers/errors.ts b/src/agents/pi-embedded-helpers/errors.ts index 12c719818dd..c805a590b8c 100644 --- a/src/agents/pi-embedded-helpers/errors.ts +++ b/src/agents/pi-embedded-helpers/errors.ts @@ -46,6 +46,7 @@ import { isInvalidStreamingEventOrderError, isLikelyHttpErrorText, isRawApiErrorPayload, + isStreamingJsonParseError, sanitizeUserFacingText, } from "./sanitize-user-facing-text.js"; import type { FailoverReason } from "./types.js"; @@ -1139,6 +1140,10 @@ export function formatAssistantErrorText( return formatRawAssistantErrorForUi(raw); } + if (isStreamingJsonParseError(raw)) { + return "LLM streaming response contained a malformed fragment. Please try again."; + } + // Never return raw unhandled errors - log for debugging but return safe message if (raw.length > 600) { log.warn(`Long error truncated: ${raw.slice(0, 200)}`); diff --git a/src/agents/pi-embedded-helpers/sanitize-user-facing-text.ts b/src/agents/pi-embedded-helpers/sanitize-user-facing-text.ts index 7708e09a952..ad331f49631 100644 --- a/src/agents/pi-embedded-helpers/sanitize-user-facing-text.ts +++ b/src/agents/pi-embedded-helpers/sanitize-user-facing-text.ts @@ -209,6 +209,13 @@ export function isInvalidStreamingEventOrderError(raw: string): boolean { ); } +export function isStreamingJsonParseError(raw: string): boolean { + if (!raw) { + return false; + } + return /\b(?:expected|unexpected)\b.+\bin json\b.+\bposition\b/i.test(raw); +} + function hasRateLimitTpmHint(raw: string): boolean { const lower = normalizeLowercaseStringOrEmpty(raw); return /\btpm\b/i.test(lower) || lower.includes("tokens per minute"); @@ -419,6 +426,10 @@ export function sanitizeUserFacingText(text: unknown, opts?: { errorContext?: bo return formatRawAssistantErrorForUi(trimmed); } + if (isStreamingJsonParseError(trimmed)) { + return "LLM streaming response contained a malformed fragment. Please try again."; + } + if (ERROR_PREFIX_RE.test(trimmed)) { const prefixedCopy = formatRateLimitOrOverloadedErrorCopy(trimmed); if (prefixedCopy) {