Reply: fix followup compaction notices

Route followup compaction start and completion notices through the same reply-threading path used by normal replies, add focused coverage for non-verbose followup notices, and add the required changelog entry.

Regeneration-Prompt: |
  Prepare PR #38805 for merge after review found that followup turns still used the old verbose-only compaction completion path. Preserve the earlier main-path fixes for TTS suppression and reply threading, then make the followup runner send a start notice and a completion notice for both verbose and non-verbose sessions. The notices need to carry isCompactionNotice metadata so replyToMode=first still threads the real assistant reply, and the change should stay narrowly scoped to followup delivery plus focused tests and the required changelog entry.
This commit is contained in:
Josh Lehman
2026-03-20 19:41:14 -07:00
parent b14f425061
commit 0f48c1bbf6
3 changed files with 151 additions and 37 deletions

View File

@@ -58,6 +58,7 @@ Docs: https://docs.openclaw.ai
- Telegram/topics: auto-rename DM forum topics on first message with LLM-generated labels, with per-account and per-DM `autoTopicLabel` overrides. (#51502) Thanks @Lukavyi.
- Docs/plugins: add the community wecom plugin listing to the docs catalog. (#29905) Thanks @sliverp.
- Models/GitHub Copilot: allow forward-compat dynamic model ids without code updates, while preserving configured provider and per-model overrides for those synthetic models. (#51325) Thanks @fuller-stack-dev.
- Agents/compaction: notify users when followup auto-compaction starts and finishes, keeping those notices out of TTS and preserving reply threading for the real assistant reply. (#38805) Thanks @zidongdesign.
### Fixes

View File

@@ -70,6 +70,10 @@ function mockCompactionRun(params: {
async (args: {
onAgentEvent?: (evt: { stream: string; data: Record<string, unknown> }) => void;
}) => {
args.onAgentEvent?.({
stream: "compaction",
data: { phase: "start" },
});
args.onAgentEvent?.({
stream: "compaction",
data: { phase: "end", willRetry: params.willRetry, completed: true },
@@ -84,7 +88,7 @@ function createAsyncReplySpy() {
}
describe("createFollowupRunner compaction", () => {
it("adds verbose auto-compaction notice and tracks count", async () => {
it("adds compaction notices and tracks count in verbose mode", async () => {
const storePath = path.join(
await fs.mkdtemp(path.join(tmpdir(), "openclaw-compaction-")),
"sessions.json",
@@ -122,9 +126,15 @@ describe("createFollowupRunner compaction", () => {
await runner(queued);
expect(onBlockReply).toHaveBeenCalled();
const firstCall = (onBlockReply.mock.calls as unknown as Array<Array<{ text?: string }>>)[0];
expect(firstCall?.[0]?.text).toContain("Auto-compaction complete");
expect(onBlockReply).toHaveBeenCalledTimes(3);
const calls = onBlockReply.mock.calls as unknown as Array<
Array<{ text?: string; isCompactionNotice?: boolean }>
>;
expect(calls[0]?.[0]?.text).toBe("🧹 Compacting context...");
expect(calls[0]?.[0]?.isCompactionNotice).toBe(true);
expect(calls[1]?.[0]?.text).toContain("Auto-compaction complete");
expect(calls[1]?.[0]?.isCompactionNotice).toBe(true);
expect(calls[2]?.[0]?.text).toBe("final");
expect(sessionStore.main.compactionCount).toBe(1);
});
@@ -171,12 +181,84 @@ describe("createFollowupRunner compaction", () => {
await runner(queued);
expect(onBlockReply).toHaveBeenCalled();
const firstCall = (onBlockReply.mock.calls as unknown as Array<Array<{ text?: string }>>)[0];
expect(firstCall?.[0]?.text).toContain("Auto-compaction complete");
expect(onBlockReply).toHaveBeenCalledTimes(2);
const calls = onBlockReply.mock.calls as unknown as Array<
Array<{ text?: string; isCompactionNotice?: boolean }>
>;
expect(calls[0]?.[0]?.text).toContain("Auto-compaction complete");
expect(calls[0]?.[0]?.isCompactionNotice).toBe(true);
expect(calls[1]?.[0]?.text).toBe("final");
expect(sessionStore.main.compactionCount).toBe(2);
});
it("threads followup compaction notices without consuming the first reply slot", async () => {
const storePath = path.join(
await fs.mkdtemp(path.join(tmpdir(), "openclaw-compaction-threading-")),
"sessions.json",
);
const sessionEntry: SessionEntry = {
sessionId: "session",
updatedAt: Date.now(),
};
const sessionStore: Record<string, SessionEntry> = {
main: sessionEntry,
};
const onBlockReply = vi.fn(async () => {});
mockCompactionRun({
willRetry: true,
result: { payloads: [{ text: "final" }], meta: {} },
});
const runner = createFollowupRunner({
opts: { onBlockReply },
typing: createMockTypingController(),
typingMode: "instant",
sessionEntry,
sessionStore,
sessionKey: "main",
storePath,
defaultModel: "anthropic/claude-opus-4-5",
});
const queued = createQueuedRun({
messageId: "msg-42",
run: {
messageProvider: "discord",
config: {
channels: {
discord: {
replyToMode: "first",
},
},
},
verboseLevel: "off",
},
});
await runner(queued);
expect(onBlockReply).toHaveBeenCalledTimes(3);
const calls = onBlockReply.mock.calls as unknown as Array<
Array<{ text?: string; replyToId?: string; isCompactionNotice?: boolean }>
>;
expect(calls[0]?.[0]).toMatchObject({
text: "🧹 Compacting context...",
replyToId: "msg-42",
isCompactionNotice: true,
});
expect(calls[1]?.[0]).toMatchObject({
text: "✅ Context compacted (count 1).",
replyToId: "msg-42",
isCompactionNotice: true,
});
expect(calls[2]?.[0]).toMatchObject({
text: "final",
replyToId: "msg-42",
});
expect(calls[2]?.[0]?.isCompactionNotice).toBeUndefined();
});
it("does not count failed compaction end events in followup runs", async () => {
const storePath = path.join(
await fs.mkdtemp(path.join(tmpdir(), "openclaw-compaction-failed-")),

View File

@@ -148,6 +148,43 @@ export function createFollowupRunner(params: {
isControlUiVisible: shouldSurfaceToControlUi,
});
}
const replyToChannel = resolveOriginMessageProvider({
originatingChannel: queued.originatingChannel,
provider: queued.run.messageProvider,
}) as OriginatingChannelType | undefined;
const replyToMode = resolveReplyToMode(
queued.run.config,
replyToChannel,
queued.originatingAccountId,
queued.originatingChatType,
);
const currentMessageId = queued.messageId?.trim() || undefined;
const applyFollowupReplyThreading = (payloads: ReplyPayload[]) =>
applyReplyThreading({
payloads,
replyToMode,
replyToChannel,
currentMessageId,
});
const sendCompactionNotice = async (text: string) => {
const noticePayloads = applyFollowupReplyThreading([
{
text,
replyToCurrent: true,
isCompactionNotice: true,
},
]);
if (noticePayloads.length === 0) {
return;
}
try {
await sendFollowupPayloads(noticePayloads, queued);
} catch (err) {
logVerbose(
`followup queue: compaction notice delivery failed (non-fatal): ${String(err)}`,
);
}
};
let autoCompactionCount = 0;
let runResult: Awaited<ReturnType<typeof runEmbeddedPiAgent>>;
let fallbackProvider = queued.run.provider;
@@ -229,6 +266,9 @@ export function createFollowupRunner(params: {
return;
}
const phase = typeof evt.data.phase === "string" ? evt.data.phase : "";
if (phase === "start") {
void sendCompactionNotice("🧹 Compacting context...");
}
const completed = evt.data?.completed === true;
if (phase === "end" && completed) {
attemptCompactionCount += 1;
@@ -284,9 +324,6 @@ export function createFollowupRunner(params: {
}
const payloadArray = runResult.payloads ?? [];
if (payloadArray.length === 0) {
return;
}
const sanitizedPayloads = payloadArray.flatMap((payload) => {
const text = payload.text;
if (!text || !text.includes("HEARTBEAT_OK")) {
@@ -299,22 +336,7 @@ export function createFollowupRunner(params: {
}
return [{ ...payload, text: stripped.text }];
});
const replyToChannel = resolveOriginMessageProvider({
originatingChannel: queued.originatingChannel,
provider: queued.run.messageProvider,
}) as OriginatingChannelType | undefined;
const replyToMode = resolveReplyToMode(
queued.run.config,
replyToChannel,
queued.originatingAccountId,
queued.originatingChatType,
);
const replyTaggedPayloads: ReplyPayload[] = applyReplyThreading({
payloads: sanitizedPayloads,
replyToMode,
replyToChannel,
});
const replyTaggedPayloads = applyFollowupReplyThreading(sanitizedPayloads);
const dedupedPayloads = filterMessagingToolDuplicates({
payloads: replyTaggedPayloads,
@@ -338,11 +360,7 @@ export function createFollowupRunner(params: {
accountId: queued.run.agentAccountId,
}),
});
const finalPayloads = suppressMessagingToolReplies ? [] : mediaFilteredPayloads;
if (finalPayloads.length === 0) {
return;
}
let finalPayloads = suppressMessagingToolReplies ? [] : mediaFilteredPayloads;
if (autoCompactionCount > 0) {
const count = await incrementRunCompactionCount({
@@ -354,12 +372,25 @@ export function createFollowupRunner(params: {
lastCallUsage: runResult.meta?.agentMeta?.lastCallUsage,
contextTokensUsed,
});
if (queued.run.verboseLevel && queued.run.verboseLevel !== "off") {
const suffix = typeof count === "number" ? ` (count ${count})` : "";
finalPayloads.unshift({
text: `🧹 Auto-compaction complete${suffix}.`,
});
}
const suffix = typeof count === "number" ? ` (count ${count})` : "";
const completionText =
queued.run.verboseLevel && queued.run.verboseLevel !== "off"
? `🧹 Auto-compaction complete${suffix}.`
: `✅ Context compacted${suffix}.`;
finalPayloads = [
...applyFollowupReplyThreading([
{
text: completionText,
replyToCurrent: true,
isCompactionNotice: true,
},
]),
...finalPayloads,
];
}
if (finalPayloads.length === 0) {
return;
}
await sendFollowupPayloads(finalPayloads, queued);