fix(discord): show reasoning text in progress drafts (#78050)

* fix(discord): show reasoning text in progress drafts

* fix(discord): handle reasoning progress snapshots

* test: isolate usage-format models fixture
This commit is contained in:
Peter Steinberger
2026-05-05 21:57:00 +01:00
committed by GitHub
parent 01dda73e9b
commit d94e7f5114
7 changed files with 177 additions and 6 deletions

View File

@@ -86,6 +86,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Discord/streaming: show live reasoning text in progress drafts instead of a bare `Reasoning` status line.
- Doctor/status: warn when `OPENCLAW_GATEWAY_TOKEN` would shadow a different active `gateway.auth.token` source for local CLI commands, while avoiding false positives when config points at the same env token. Fixes #74271. Thanks @yelog.
- Gateway/OpenAI-compatible: send the assistant role SSE chunk as soon as streaming chat-completion headers are accepted, so cold agent setup cannot leave `/v1/chat/completions` clients with a bodyless 200 response until their idle timeout fires.
- Agents/media: avoid direct generated-media completion fallback while the announce-agent run is still pending, so async video and music completions do not duplicate raw media messages. (#77754)

View File

@@ -82,6 +82,8 @@ export function createDiscordDraftPreviewController(params: {
});
let previewToolProgressSuppressed = false;
let previewToolProgressLines: string[] = [];
let reasoningProgressRawText = "";
let lastReasoningProgressLine: string | undefined;
const progressSeed = `${params.accountId}:${params.deliverChannelId}`;
const renderProgressDraft = async (options?: { flush?: boolean }) => {
@@ -116,6 +118,8 @@ export function createDiscordDraftPreviewController(params: {
draftChunker?.reset();
previewToolProgressSuppressed = false;
previewToolProgressLines = [];
reasoningProgressRawText = "";
lastReasoningProgressLine = undefined;
};
const forceNewMessageIfNeeded = () => {
@@ -163,8 +167,11 @@ export function createDiscordDraftPreviewController(params: {
return;
}
const normalized = line?.replace(/\s+/g, " ").trim();
if (!normalized) {
return;
}
if (discordStreamMode !== "progress") {
if (!previewToolProgressEnabled || previewToolProgressSuppressed || !normalized) {
if (!previewToolProgressEnabled || previewToolProgressSuppressed) {
return;
}
const previous = previewToolProgressLines.at(-1);
@@ -200,6 +207,36 @@ export function createDiscordDraftPreviewController(params: {
await renderProgressDraft();
}
},
async pushReasoningProgress(text?: string) {
if (!draftStream || discordStreamMode !== "progress" || !text) {
return;
}
reasoningProgressRawText = mergeReasoningProgressText(reasoningProgressRawText, text);
const normalized = normalizeReasoningProgressLine(reasoningProgressRawText);
if (!normalized) {
return;
}
if (previewToolProgressEnabled && !previewToolProgressSuppressed) {
const priorIndex =
lastReasoningProgressLine === undefined
? -1
: previewToolProgressLines.lastIndexOf(lastReasoningProgressLine);
if (priorIndex >= 0) {
previewToolProgressLines = [...previewToolProgressLines];
previewToolProgressLines[priorIndex] = normalized;
} else {
previewToolProgressLines = [...previewToolProgressLines, normalized].slice(
-resolveChannelProgressDraftMaxLines(params.discordConfig),
);
}
lastReasoningProgressLine = normalized;
}
const alreadyStarted = progressDraftGate.hasStarted;
await progressDraftGate.noteWork();
if (alreadyStarted && progressDraftGate.hasStarted) {
await renderProgressDraft();
}
},
resolvePreviewFinalText(text?: string) {
if (typeof text !== "string") {
return undefined;
@@ -329,3 +366,29 @@ export function createDiscordDraftPreviewController(params: {
},
};
}
function normalizeReasoningProgressLine(text: string): string {
return text
.replace(/^\s*(?:>\s*)?Reasoning:\s*/i, "")
.replace(/\s+/g, " ")
.trim();
}
function mergeReasoningProgressText(current: string, incoming: string): string {
if (!current) {
return incoming;
}
const normalizedCurrent = normalizeReasoningProgressLine(current);
const normalizedIncoming = normalizeReasoningProgressLine(incoming);
if (!normalizedIncoming || normalizedIncoming === normalizedCurrent) {
return current;
}
if (isReasoningSnapshotText(incoming) || normalizedIncoming.startsWith(normalizedCurrent)) {
return incoming;
}
return `${current}${incoming}`;
}
function isReasoningSnapshotText(text: string): boolean {
return /^\s*(?:>\s*)?Reasoning:\s*/i.test(text);
}

View File

@@ -96,7 +96,7 @@ type DispatchInboundParams = {
sendFinalReply: (payload: ReplyPayload) => boolean | Promise<boolean>;
};
replyOptions?: {
onReasoningStream?: () => Promise<void> | void;
onReasoningStream?: (payload?: { text?: string }) => Promise<void> | void;
onReasoningEnd?: () => Promise<void> | void;
onToolStart?: (payload: {
name?: string;
@@ -105,6 +105,7 @@ type DispatchInboundParams = {
detailMode?: "explain" | "raw";
}) => Promise<void> | void;
onItemEvent?: (payload: {
kind?: string;
progressText?: string;
summary?: string;
title?: string;
@@ -1616,6 +1617,72 @@ describe("processDiscordMessage draft streaming", () => {
expect(draftStream.update).toHaveBeenCalledWith("Shelling\n🛠 Exec\n• done");
});
it("shows reasoning text instead of a bare Reasoning progress line", async () => {
const draftStream = createMockDraftStreamForTest();
dispatchInboundMessage.mockImplementationOnce(async (params?: DispatchInboundParams) => {
await params?.replyOptions?.onToolStart?.({ name: "exec", phase: "start" });
await params?.replyOptions?.onItemEvent?.({
kind: "analysis",
title: "Reasoning",
});
await params?.replyOptions?.onReasoningStream?.({ text: "Reading " });
await params?.replyOptions?.onReasoningStream?.({ text: "the event projector" });
return createNoQueuedDispatchResult();
});
const ctx = await createAutomaticSourceDeliveryContext({
discordConfig: {
streaming: {
mode: "progress",
progress: {
label: "Clawing...",
},
},
},
});
await runProcessDiscordMessage(ctx);
expect(draftStream.update).toHaveBeenCalledWith(
"Clawing...\n🛠 Exec\n• Reading the event projector",
);
expect(draftStream.update).not.toHaveBeenCalledWith(expect.stringContaining("Reasoning"));
});
it("replaces reasoning snapshots instead of appending duplicates", async () => {
const draftStream = createMockDraftStreamForTest();
dispatchInboundMessage.mockImplementationOnce(async (params?: DispatchInboundParams) => {
await params?.replyOptions?.onToolStart?.({ name: "exec", phase: "start" });
await params?.replyOptions?.onReasoningStream?.({ text: "Reasoning:\n_Checking files_" });
await params?.replyOptions?.onReasoningStream?.({
text: "Reasoning:\n_Checking files and tests_",
});
return createNoQueuedDispatchResult();
});
const ctx = await createAutomaticSourceDeliveryContext({
discordConfig: {
streaming: {
mode: "progress",
progress: {
label: "Clawing...",
},
},
},
});
await runProcessDiscordMessage(ctx);
expect(draftStream.update).toHaveBeenCalledWith(
"Clawing...\n🛠 Exec\n• _Checking files and tests_",
);
expect(draftStream.update).not.toHaveBeenCalledWith(
expect.stringContaining("_Checking files_Reasoning:"),
);
});
it("keeps Discord progress lines across assistant boundaries", async () => {
const draftStream = createMockDraftStreamForTest();

View File

@@ -660,8 +660,9 @@ export async function processDiscordMessage(
onModelSelected,
suppressDefaultToolProgressMessages:
draftPreview.suppressDefaultToolProgressMessages ? true : undefined,
onReasoningStream: async () => {
onReasoningStream: async (payload) => {
await statusReactions.setThinking();
await draftPreview.pushReasoningProgress(payload?.text);
},
onToolStart: async (payload) => {
if (isProcessAborted(abortSignal)) {

View File

@@ -337,6 +337,21 @@ describe("channel-streaming", () => {
},
),
).toBe("🛠️ Exec");
expect(
formatChannelProgressDraftLine({
event: "item",
itemKind: "analysis",
title: "Reasoning",
}),
).toBeUndefined();
expect(
formatChannelProgressDraftLine({
event: "item",
itemKind: "analysis",
title: "Reasoning",
progressText: "Reading the code path",
}),
).toBe("Reading the code path");
});
it("starts progress drafts after five seconds or a second work event", async () => {

View File

@@ -275,6 +275,17 @@ function isCommandProgressItem(input: Extract<ChannelProgressDraftLineInput, { e
return itemKind === "command" || isCommandToolName(input.name);
}
function isEmptyReasoningProgressItem(
input: Extract<ChannelProgressDraftLineInput, { event: "item" }>,
meta: string | undefined,
): boolean {
return (
!meta &&
normalizeOptionalLowercaseString(input.itemKind) === "analysis" &&
normalizeOptionalLowercaseString(input.title) === "reasoning"
);
}
function patchMetas(input: Extract<ChannelProgressDraftLineInput, { event: "patch" }>): string[] {
const fileMetas = [...(input.added ?? []), ...(input.modified ?? []), ...(input.deleted ?? [])];
return compactStrings([input.summary, ...fileMetas, input.title]);
@@ -346,6 +357,9 @@ export function buildChannelProgressDraftLine(
(options?.commandText === "status" && isCommandProgressItem(input)
? undefined
: input.progressText);
if (isEmptyReasoningProgressItem(input, meta)) {
return undefined;
}
if (name) {
return buildNamedProgressLine(input.event, name, [meta], options, {
status: input.status,

View File

@@ -18,11 +18,16 @@ import {
describe("usage-format", () => {
const originalAgentDir = process.env.OPENCLAW_AGENT_DIR;
const originalStateDir = process.env.OPENCLAW_STATE_DIR;
let stateDir: string;
let agentDir: string;
beforeEach(async () => {
agentDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-usage-format-"));
process.env.OPENCLAW_AGENT_DIR = agentDir;
stateDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-usage-format-"));
agentDir = path.join(stateDir, "agents", "main", "agent");
process.env.OPENCLAW_STATE_DIR = stateDir;
delete process.env.OPENCLAW_AGENT_DIR;
await fs.mkdir(agentDir, { recursive: true });
__resetUsageFormatCachesForTest();
__resetGatewayModelPricingCacheForTest();
});
@@ -33,9 +38,14 @@ describe("usage-format", () => {
} else {
process.env.OPENCLAW_AGENT_DIR = originalAgentDir;
}
if (originalStateDir === undefined) {
delete process.env.OPENCLAW_STATE_DIR;
} else {
process.env.OPENCLAW_STATE_DIR = originalStateDir;
}
__resetUsageFormatCachesForTest();
__resetGatewayModelPricingCacheForTest();
await fs.rm(agentDir, { recursive: true, force: true });
await fs.rm(stateDir, { recursive: true, force: true });
});
it("formats token counts", () => {