mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-06 06:50:43 +00:00
fix: suppress raw JSON parse errors from leaking to Discord channels (#59076) [AI-assisted] (#59118)
Merged via squash.
Prepared head SHA: b8b3686445
Co-authored-by: singleGanghood <156392444+singleGanghood@users.noreply.github.com>
Co-authored-by: hxy91819 <8814856+hxy91819@users.noreply.github.com>
Reviewed-by: @hxy91819
This commit is contained in:
@@ -20,6 +20,7 @@ Docs: https://docs.openclaw.ai
|
||||
|
||||
### Fixes
|
||||
|
||||
- Agents/errors: suppress malformed streaming tool-call JSON fragments before they reach chat surfaces while preserving provider request-validation diagnostics. Fixes #59076; keeps #59080 as duplicate coverage. (#59118) Thanks @singleGanghood.
|
||||
- CLI/models: restore provider-filtered `models list --all --provider <id>` rows for providers without manifest/static catalog coverage, including Anthropic and Amazon Bedrock, while keeping the compatibility fallback off expensive availability and resolver paths. Thanks @shakkernerd.
|
||||
- CLI/tools: keep the Gateway `tools.*` RPC namespace out of plugin command discovery and managed proxy startup, so stray commands like `openclaw tools effective` fail quickly instead of cold-loading plugin metadata. Refs #73477. Thanks @oromeis.
|
||||
- CLI/status: keep default text `openclaw status --usage` on metadata-only channel scans unless `--deep` or `--all` is set, and send stray `openclaw tools --help` through the precomputed root-help fast path so latency-triage commands avoid plugin/runtime cold loads before printing. Refs #73477 and #74220. Thanks @oromeis and @NianJiuZst.
|
||||
|
||||
@@ -41,6 +41,14 @@ function createStalledSseResponse(params: { onCancel: (reason: unknown) => void
|
||||
params.onCancel(reason);
|
||||
},
|
||||
});
|
||||
|
||||
return new Response(body, {
|
||||
status: 200,
|
||||
headers: { "content-type": "text/event-stream" },
|
||||
});
|
||||
}
|
||||
|
||||
function createRawSseResponse(body: string): Response {
|
||||
return new Response(body, {
|
||||
status: 200,
|
||||
headers: { "content-type": "text/event-stream" },
|
||||
@@ -339,6 +347,23 @@ describe("anthropic transport stream", () => {
|
||||
expect(guardedFetchMock).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("classifies malformed Anthropic SSE data as a stable transport error", async () => {
|
||||
guardedFetchMock.mockResolvedValueOnce(createRawSseResponse('data: {"type":\n\n'));
|
||||
|
||||
const result = await runTransportStream(
|
||||
makeAnthropicTransportModel(),
|
||||
{
|
||||
messages: [{ role: "user", content: "hello" }],
|
||||
} as AnthropicStreamContext,
|
||||
{
|
||||
apiKey: "sk-ant-api",
|
||||
} as AnthropicStreamOptions,
|
||||
);
|
||||
|
||||
expect(result.stopReason).toBe("error");
|
||||
expect(result.errorMessage).toBe("OpenClaw transport error: malformed_streaming_fragment");
|
||||
});
|
||||
|
||||
it("preserves Anthropic OAuth identity and tool-name remapping with transport overrides", async () => {
|
||||
guardedFetchMock.mockResolvedValueOnce(
|
||||
createSseResponse([
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
type SimpleStreamOptions,
|
||||
type ThinkingLevel,
|
||||
} from "@mariozechner/pi-ai";
|
||||
import { MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE } from "../shared/assistant-error-format.js";
|
||||
import { normalizeLowercaseStringOrEmpty } from "../shared/string-coerce.js";
|
||||
import {
|
||||
applyAnthropicPayloadPolicyToParams,
|
||||
@@ -534,6 +535,17 @@ function readAnthropicSseChunk(
|
||||
});
|
||||
}
|
||||
|
||||
function parseAnthropicSseEventData(data: string): Record<string, unknown> {
|
||||
try {
|
||||
return JSON.parse(data) as Record<string, unknown>;
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
throw new Error(MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE, { cause: error });
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function* parseAnthropicSseBody(
|
||||
body: ReadableStream<Uint8Array>,
|
||||
signal?: AbortSignal,
|
||||
@@ -558,7 +570,7 @@ async function* parseAnthropicSseBody(
|
||||
.map((line) => line.slice(5).trimStart())
|
||||
.join("\n");
|
||||
if (data && data !== "[DONE]") {
|
||||
yield JSON.parse(data) as Record<string, unknown>;
|
||||
yield parseAnthropicSseEventData(data);
|
||||
}
|
||||
frameEnd = buffer.indexOf("\n\n");
|
||||
}
|
||||
@@ -571,7 +583,7 @@ async function* parseAnthropicSseBody(
|
||||
.map((line) => line.slice(5).trimStart())
|
||||
.join("\n");
|
||||
if (data && data !== "[DONE]") {
|
||||
yield JSON.parse(data) as Record<string, unknown>;
|
||||
yield parseAnthropicSseEventData(data);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { AssistantMessage } from "@mariozechner/pi-ai";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE } from "../shared/assistant-error-format.js";
|
||||
import {
|
||||
BILLING_ERROR_USER_MESSAGE,
|
||||
formatBillingErrorMessage,
|
||||
@@ -7,6 +8,7 @@ import {
|
||||
getApiErrorPayloadFingerprint,
|
||||
formatRawAssistantErrorForUi,
|
||||
isRawApiErrorPayload,
|
||||
sanitizeUserFacingText,
|
||||
} from "./pi-embedded-helpers.js";
|
||||
import { makeAssistantMessageFixture } from "./test-helpers/assistant-message-fixtures.js";
|
||||
|
||||
@@ -349,6 +351,34 @@ describe("formatAssistantErrorText", () => {
|
||||
"LLM request failed: provider returned an invalid streaming response. Please try again.",
|
||||
);
|
||||
});
|
||||
|
||||
it("sanitizes transport-classified malformed streaming fragments (#59076)", () => {
|
||||
const msg = makeAssistantError(MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE);
|
||||
expect(formatAssistantErrorText(msg)).toBe(
|
||||
"LLM streaming response contained a malformed fragment. Please try again.",
|
||||
);
|
||||
});
|
||||
|
||||
it("does not broadly rewrite non-streaming 'Unexpected token' JSON parse errors", () => {
|
||||
const msg = makeAssistantError("Unexpected token < in JSON at position 0");
|
||||
expect(formatAssistantErrorText(msg)).toBe("Unexpected token < in JSON at position 0");
|
||||
});
|
||||
|
||||
it("does not rewrite non-streaming provider JSON request-validation diagnostics", () => {
|
||||
const msg = makeAssistantError("Expected value in JSON at position 12 for messages.0.content");
|
||||
expect(formatAssistantErrorText(msg)).toBe(
|
||||
"Expected value in JSON at position 12 for messages.0.content",
|
||||
);
|
||||
});
|
||||
|
||||
it("keeps provider request-validation JSON diagnostics actionable", () => {
|
||||
const msg = makeAssistantError(
|
||||
'{"type":"error","error":{"type":"invalid_request_error","message":"Expected value in JSON at position 12 for messages.0.content"}}',
|
||||
);
|
||||
expect(formatAssistantErrorText(msg)).toBe(
|
||||
"LLM request rejected: Expected value in JSON at position 12 for messages.0.content",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatRawAssistantErrorForUi", () => {
|
||||
@@ -424,3 +454,21 @@ describe("raw API error payload helpers", () => {
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("sanitizeUserFacingText — streaming JSON parse error (#59076)", () => {
|
||||
it("rewrites transport-classified malformed streaming fragments in error context", () => {
|
||||
const result = sanitizeUserFacingText(MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE, {
|
||||
errorContext: true,
|
||||
});
|
||||
expect(result).toBe("LLM streaming response contained a malformed fragment. Please try again.");
|
||||
});
|
||||
|
||||
it("does not rewrite JSON parse error when not in error context", () => {
|
||||
// When not in error context, the text could be legitimate assistant content
|
||||
// mentioning JSON errors. Don't rewrite.
|
||||
const text =
|
||||
"Expected ',' or '}' after property value in JSON at position 334 (line 1 column 335)";
|
||||
const result = sanitizeUserFacingText(text, { errorContext: false });
|
||||
expect(result).toBe(text);
|
||||
});
|
||||
});
|
||||
|
||||
38
src/agents/pi-embedded-helpers/errors.test.ts
Normal file
38
src/agents/pi-embedded-helpers/errors.test.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import type { AssistantMessage } from "@mariozechner/pi-ai";
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE } from "../../shared/assistant-error-format.js";
|
||||
import { makeAssistantMessageFixture } from "../test-helpers/assistant-message-fixtures.js";
|
||||
import { formatAssistantErrorText } from "./errors.js";
|
||||
|
||||
describe("formatAssistantErrorText streaming JSON parse classification", () => {
|
||||
const makeAssistantError = (errorMessage: string): AssistantMessage =>
|
||||
makeAssistantMessageFixture({
|
||||
errorMessage,
|
||||
content: [{ type: "text", text: errorMessage }],
|
||||
});
|
||||
|
||||
it("suppresses transport-classified malformed streaming fragments", () => {
|
||||
const msg = makeAssistantError(MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE);
|
||||
expect(formatAssistantErrorText(msg)).toBe(
|
||||
"LLM streaming response contained a malformed fragment. Please try again.",
|
||||
);
|
||||
});
|
||||
|
||||
it("does not suppress unclassified JSON.parse text", () => {
|
||||
const msg = makeAssistantError(
|
||||
"Expected ',' or '}' after property value in JSON at position 334 (line 1 column 335)",
|
||||
);
|
||||
expect(formatAssistantErrorText(msg)).toBe(
|
||||
"Expected ',' or '}' after property value in JSON at position 334 (line 1 column 335)",
|
||||
);
|
||||
});
|
||||
|
||||
it("keeps non-streaming provider request-validation syntax diagnostics", () => {
|
||||
const msg = makeAssistantError(
|
||||
'{"type":"error","error":{"type":"invalid_request_error","message":"Expected value in JSON at position 12 for messages.0.content"}}',
|
||||
);
|
||||
expect(formatAssistantErrorText(msg)).toBe(
|
||||
"LLM request rejected: Expected value in JSON at position 12 for messages.0.content",
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -46,6 +46,7 @@ import {
|
||||
isInvalidStreamingEventOrderError,
|
||||
isLikelyHttpErrorText,
|
||||
isRawApiErrorPayload,
|
||||
isStreamingJsonParseError,
|
||||
sanitizeUserFacingText,
|
||||
} from "./sanitize-user-facing-text.js";
|
||||
import type { FailoverReason } from "./types.js";
|
||||
@@ -1139,6 +1140,10 @@ export function formatAssistantErrorText(
|
||||
return formatRawAssistantErrorForUi(raw);
|
||||
}
|
||||
|
||||
if (isStreamingJsonParseError(raw)) {
|
||||
return "LLM streaming response contained a malformed fragment. Please try again.";
|
||||
}
|
||||
|
||||
// Never return raw unhandled errors - log for debugging but return safe message
|
||||
if (raw.length > 600) {
|
||||
log.warn(`Long error truncated: ${raw.slice(0, 200)}`);
|
||||
|
||||
@@ -3,6 +3,7 @@ import {
|
||||
extractLeadingHttpStatus,
|
||||
formatRawAssistantErrorForUi,
|
||||
isCloudflareOrHtmlErrorPage,
|
||||
MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE,
|
||||
parseApiErrorInfo,
|
||||
parseApiErrorPayload,
|
||||
} from "../../shared/assistant-error-format.js";
|
||||
@@ -209,6 +210,17 @@ export function isInvalidStreamingEventOrderError(raw: string): boolean {
|
||||
);
|
||||
}
|
||||
|
||||
export function isStreamingJsonParseError(raw: string): boolean {
|
||||
if (!raw) {
|
||||
return false;
|
||||
}
|
||||
const trimmed = raw.trim();
|
||||
if (trimmed === MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function hasRateLimitTpmHint(raw: string): boolean {
|
||||
const lower = normalizeLowercaseStringOrEmpty(raw);
|
||||
return /\btpm\b/i.test(lower) || lower.includes("tokens per minute");
|
||||
@@ -419,6 +431,10 @@ export function sanitizeUserFacingText(text: unknown, opts?: { errorContext?: bo
|
||||
return formatRawAssistantErrorForUi(trimmed);
|
||||
}
|
||||
|
||||
if (isStreamingJsonParseError(trimmed)) {
|
||||
return "LLM streaming response contained a malformed fragment. Please try again.";
|
||||
}
|
||||
|
||||
if (ERROR_PREFIX_RE.test(trimmed)) {
|
||||
const prefixedCopy = formatRateLimitOrOverloadedErrorCopy(trimmed);
|
||||
if (prefixedCopy) {
|
||||
|
||||
@@ -20,7 +20,6 @@ type TransportOutputShape = {
|
||||
};
|
||||
|
||||
export const EMPTY_TOOL_RESULT_TEXT = "(no output)";
|
||||
|
||||
export function sanitizeTransportPayloadText(text: string): string {
|
||||
return text.replace(
|
||||
/[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?<![\uD800-\uDBFF])[\uDC00-\uDFFF]/g,
|
||||
|
||||
@@ -15,6 +15,11 @@ const CLOUDFLARE_HTML_ERROR_CODES = new Set([521, 522, 523, 524, 525, 526, 530])
|
||||
const STANDALONE_HTML_ERROR_HINT_RE =
|
||||
/\bcloudflare\b|cdn-cgi\/challenge-platform|challenge-error-text|enable javascript and cookies to continue|access denied|forbidden|service unavailable|bad gateway|web server is down|captcha|attention required/i;
|
||||
|
||||
export const MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE =
|
||||
"OpenClaw transport error: malformed_streaming_fragment";
|
||||
export const MALFORMED_STREAMING_FRAGMENT_USER_MESSAGE =
|
||||
"LLM streaming response contained a malformed fragment. Please try again.";
|
||||
|
||||
type ErrorPayload = Record<string, unknown>;
|
||||
|
||||
export type ApiErrorInfo = {
|
||||
@@ -188,6 +193,10 @@ export function formatRawAssistantErrorForUi(raw?: string): string {
|
||||
return "LLM request failed with an unknown error.";
|
||||
}
|
||||
|
||||
if (trimmed === MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE) {
|
||||
return MALFORMED_STREAMING_FRAGMENT_USER_MESSAGE;
|
||||
}
|
||||
|
||||
const leadingStatus = extractLeadingHttpStatus(trimmed);
|
||||
const isHtmlChallenge = isCloudflareOrHtmlErrorPage(trimmed);
|
||||
if (leadingStatus && isHtmlChallenge) {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE } from "../shared/assistant-error-format.js";
|
||||
import { createEventHandlers } from "./tui-event-handlers.js";
|
||||
import type { AgentEvent, BtwEvent, ChatEvent, TuiStateAccess } from "./tui-types.js";
|
||||
|
||||
@@ -753,6 +754,42 @@ describe("tui-event-handlers: handleAgentEvent", () => {
|
||||
expect(chatLog.dropAssistant).not.toHaveBeenCalledWith("run-error-envelope");
|
||||
});
|
||||
|
||||
it("renders malformed streaming fragment text when chat final only has event errorMessage", () => {
|
||||
const { state, chatLog, handleChatEvent } = createHandlersHarness({
|
||||
state: { activeChatRunId: null },
|
||||
});
|
||||
|
||||
handleChatEvent({
|
||||
runId: "run-malformed-final",
|
||||
sessionKey: state.currentSessionKey,
|
||||
state: "final",
|
||||
message: { content: [] },
|
||||
errorMessage: MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE,
|
||||
});
|
||||
|
||||
expect(chatLog.finalizeAssistant).toHaveBeenCalledWith(
|
||||
"LLM streaming response contained a malformed fragment. Please try again.",
|
||||
"run-malformed-final",
|
||||
);
|
||||
});
|
||||
|
||||
it("renders malformed streaming fragment text for chat error events", () => {
|
||||
const { state, chatLog, handleChatEvent } = createHandlersHarness({
|
||||
state: { activeChatRunId: null },
|
||||
});
|
||||
|
||||
handleChatEvent({
|
||||
runId: "run-malformed-error",
|
||||
sessionKey: state.currentSessionKey,
|
||||
state: "error",
|
||||
errorMessage: MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE,
|
||||
});
|
||||
|
||||
expect(chatLog.addSystem).toHaveBeenCalledWith(
|
||||
"run error: LLM streaming response contained a malformed fragment. Please try again.",
|
||||
);
|
||||
});
|
||||
|
||||
it("shows a concise /auth hint for local auth failures", () => {
|
||||
const { chatLog, handleChatEvent } = createHandlersHarness({
|
||||
localMode: true,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { isAuthErrorMessage } from "../agents/pi-embedded-helpers.js";
|
||||
import { parseAgentSessionKey } from "../sessions/session-key-utils.js";
|
||||
import { formatRawAssistantErrorForUi } from "../shared/assistant-error-format.js";
|
||||
import { normalizeLowercaseStringOrEmpty } from "../shared/string-coerce.js";
|
||||
import { asString, extractTextFromMessage, isCommandMessage } from "./tui-formatters.js";
|
||||
import { TuiStreamAssembler } from "./tui-stream-assembler.js";
|
||||
@@ -450,7 +451,8 @@ export function createEventHandlers(context: EventHandlerContext) {
|
||||
forgetLocalBtwRunId?.(evt.runId);
|
||||
const wasActiveRun = state.activeChatRunId === evt.runId;
|
||||
const errorMessage = evt.errorMessage ?? "unknown";
|
||||
chatLog.addSystem(resolveAuthErrorHint(errorMessage) ?? `run error: ${errorMessage}`);
|
||||
const renderedError = formatRawAssistantErrorForUi(errorMessage);
|
||||
chatLog.addSystem(resolveAuthErrorHint(errorMessage) ?? `run error: ${renderedError}`);
|
||||
terminateRun({ runId: evt.runId, wasActiveRun, status: "error" });
|
||||
maybeRefreshHistoryForRun(evt.runId);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE } from "../shared/assistant-error-format.js";
|
||||
import {
|
||||
extractContentFromMessage,
|
||||
extractTextFromMessage,
|
||||
@@ -42,6 +43,17 @@ describe("extractTextFromMessage", () => {
|
||||
expect(text).toContain("This request would exceed your account's rate limit.");
|
||||
});
|
||||
|
||||
it("renders malformed streaming fragment errors with friendly text", () => {
|
||||
const text = extractTextFromMessage({
|
||||
role: "assistant",
|
||||
content: [],
|
||||
stopReason: "error",
|
||||
errorMessage: MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE,
|
||||
});
|
||||
|
||||
expect(text).toBe("LLM streaming response contained a malformed fragment. Please try again.");
|
||||
});
|
||||
|
||||
it("falls back to a generic message when errorMessage is missing", () => {
|
||||
const text = extractTextFromMessage({
|
||||
role: "assistant",
|
||||
@@ -275,6 +287,16 @@ describe("extractContentFromMessage", () => {
|
||||
|
||||
expect(text).toContain("HTTP 429");
|
||||
});
|
||||
|
||||
it("formats malformed streaming fragment errors when content is not an array", () => {
|
||||
const text = extractContentFromMessage({
|
||||
role: "assistant",
|
||||
stopReason: "error",
|
||||
errorMessage: MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE,
|
||||
});
|
||||
|
||||
expect(text).toBe("LLM streaming response contained a malformed fragment. Please try again.");
|
||||
});
|
||||
});
|
||||
|
||||
describe("isCommandMessage", () => {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
import type { OpenClawConfig } from "../config/config.js";
|
||||
import { MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE } from "../shared/assistant-error-format.js";
|
||||
import { getSlashCommands, parseCommand } from "./commands.js";
|
||||
import {
|
||||
createBackspaceDeduper,
|
||||
@@ -40,6 +41,16 @@ describe("resolveFinalAssistantText", () => {
|
||||
}),
|
||||
).toContain("HTTP 401");
|
||||
});
|
||||
|
||||
it("formats malformed streaming fragment errors when final and streamed text are empty", () => {
|
||||
expect(
|
||||
resolveFinalAssistantText({
|
||||
finalText: "",
|
||||
streamedText: "",
|
||||
errorMessage: MALFORMED_STREAMING_FRAGMENT_ERROR_MESSAGE,
|
||||
}),
|
||||
).toBe("LLM streaming response contained a malformed fragment. Please try again.");
|
||||
});
|
||||
});
|
||||
|
||||
describe("tui slash commands", () => {
|
||||
|
||||
Reference in New Issue
Block a user