fix: suppress raw JSON parse errors from leaking to Discord channels (#59076) [AI-assisted]

When streaming tool calls with long CJK text, the Anthropic SDK's SSE
parser can throw SyntaxError (e.g. 'Expected "," or "}" after property
value in JSON at position 334'). This error propagates through pi-ai's
catch block as an assistant error message and reaches formatAssistantErrorText,
which did not recognize the pattern and forwarded the raw error text to
the Discord channel.

- Add isStreamingJsonParseError() detector in pi-embedded-helpers/errors.ts
- Intercept in formatAssistantErrorText() to return a friendly message
- Also guard sanitizeUserFacingText() errorContext path for defense-in-depth
- Add 5 test cases covering various JSON parse error patterns and contexts

Verified: 131 related tests pass, pnpm build succeeds, zero regression.
AI-assisted: code fix and test generation verified by automated test runs
This commit is contained in:
jackybzhou
2026-04-01 23:34:31 +08:00
committed by Mason Huang
parent 03e17d19e9
commit 638865b5d8
3 changed files with 70 additions and 0 deletions

View File

@@ -7,6 +7,7 @@ import {
getApiErrorPayloadFingerprint,
formatRawAssistantErrorForUi,
isRawApiErrorPayload,
sanitizeUserFacingText,
} from "./pi-embedded-helpers.js";
import { makeAssistantMessageFixture } from "./test-helpers/assistant-message-fixtures.js";
@@ -349,6 +350,40 @@ describe("formatAssistantErrorText", () => {
"LLM request failed: provider returned an invalid streaming response. Please try again.",
);
});
it("sanitizes streaming JSON parse errors from Anthropic SDK (#59076)", () => {
const msg = makeAssistantError(
"Expected ',' or '}' after property value in JSON at position 334 (line 1 column 335)",
);
expect(formatAssistantErrorText(msg)).toBe(
"LLM streaming response contained a malformed fragment. Please try again.",
);
});
it("sanitizes 'Expected double-quoted property name' JSON parse errors (#59076)", () => {
const msg = makeAssistantError(
"Expected double-quoted property name in JSON at position 8912 (line 219 column 5)",
);
expect(formatAssistantErrorText(msg)).toBe(
"LLM streaming response contained a malformed fragment. Please try again.",
);
});
it("sanitizes 'Unexpected token' JSON parse errors (#59076)", () => {
const msg = makeAssistantError("Unexpected token < in JSON at position 0");
expect(formatAssistantErrorText(msg)).toBe(
"LLM streaming response contained a malformed fragment. Please try again.",
);
});
it("keeps provider request-validation JSON diagnostics actionable", () => {
const msg = makeAssistantError(
'{"type":"error","error":{"type":"invalid_request_error","message":"Expected value in JSON at position 12 for messages.0.content"}}',
);
expect(formatAssistantErrorText(msg)).toBe(
"LLM request rejected: Expected value in JSON at position 12 for messages.0.content",
);
});
});
describe("formatRawAssistantErrorForUi", () => {
@@ -424,3 +459,22 @@ describe("raw API error payload helpers", () => {
);
});
});
describe("sanitizeUserFacingText — streaming JSON parse error (#59076)", () => {
it("rewrites JSON parse error in error context", () => {
const result = sanitizeUserFacingText(
"Expected ',' or '}' after property value in JSON at position 334 (line 1 column 335)",
{ errorContext: true },
);
expect(result).toBe("LLM streaming response contained a malformed fragment. Please try again.");
});
it("does not rewrite JSON parse error when not in error context", () => {
// When not in error context, the text could be legitimate assistant content
// mentioning JSON errors. Don't rewrite.
const text =
"Expected ',' or '}' after property value in JSON at position 334 (line 1 column 335)";
const result = sanitizeUserFacingText(text, { errorContext: false });
expect(result).toBe(text);
});
});

View File

@@ -46,6 +46,7 @@ import {
isInvalidStreamingEventOrderError,
isLikelyHttpErrorText,
isRawApiErrorPayload,
isStreamingJsonParseError,
sanitizeUserFacingText,
} from "./sanitize-user-facing-text.js";
import type { FailoverReason } from "./types.js";
@@ -1139,6 +1140,10 @@ export function formatAssistantErrorText(
return formatRawAssistantErrorForUi(raw);
}
if (isStreamingJsonParseError(raw)) {
return "LLM streaming response contained a malformed fragment. Please try again.";
}
// Never return raw unhandled errors - log for debugging but return safe message
if (raw.length > 600) {
log.warn(`Long error truncated: ${raw.slice(0, 200)}`);

View File

@@ -209,6 +209,13 @@ export function isInvalidStreamingEventOrderError(raw: string): boolean {
);
}
export function isStreamingJsonParseError(raw: string): boolean {
if (!raw) {
return false;
}
return /\b(?:expected|unexpected)\b.+\bin json\b.+\bposition\b/i.test(raw);
}
function hasRateLimitTpmHint(raw: string): boolean {
const lower = normalizeLowercaseStringOrEmpty(raw);
return /\btpm\b/i.test(lower) || lower.includes("tokens per minute");
@@ -419,6 +426,10 @@ export function sanitizeUserFacingText(text: unknown, opts?: { errorContext?: bo
return formatRawAssistantErrorForUi(trimmed);
}
if (isStreamingJsonParseError(trimmed)) {
return "LLM streaming response contained a malformed fragment. Please try again.";
}
if (ERROR_PREFIX_RE.test(trimmed)) {
const prefixedCopy = formatRateLimitOrOverloadedErrorCopy(trimmed);
if (prefixedCopy) {