fix: tighten delivery mirror dedupe (#67185) (thanks @andyylin)

This commit is contained in:
Peter Steinberger
2026-04-16 16:49:47 +01:00
parent e95efa4373
commit d842ec4179
3 changed files with 73 additions and 4 deletions

View File

@@ -46,6 +46,7 @@ Docs: https://docs.openclaw.ai
- OpenAI Codex/CLI: keep resumed `codex exec resume` runs on the safe non-interactive path without reintroducing the removed dangerous bypass flag by passing the supported `--skip-git-repo-check` resume arg that real Codex CLI requires outside trusted git directories. (#67666) Thanks @plgonzalezrx8.
- Codex/app-server: parse Desktop-originated app-server user agents such as `Codex Desktop/0.118.0`, keeping the version gate working when the Codex CLI inherits a multi-word originator. (#64666) Thanks @cyrusaf.
- Cron/announce delivery: keep isolated announce `NO_REPLY` stripping case-insensitive across direct and text delivery, preserve structured media-only sends when a caption strips silent, and derive main-session awareness from the cleaned payloads so silent captions no longer leak stale `NO_REPLY` text. (#65016) Thanks @BKF-Gitty.
- Sessions/Codex: skip redundant `delivery-mirror` transcript appends only when the latest assistant message has the same visible text, preventing duplicate visible replies on Codex-backed turns without suppressing repeated answers across turns. (#67185) Thanks @andyylin.
## 2026.4.15-beta.1

View File

@@ -164,6 +164,76 @@ describe("appendAssistantMessageToSessionTranscript", () => {
}
});
it("does not reuse an older matching assistant message across turns", async () => {
writeTranscriptStore();
const olderResult = await appendExactAssistantMessageToSessionTranscript({
sessionKey,
storePath: fixture.storePath(),
message: {
role: "assistant",
content: [{ type: "text", text: "Repeated answer" }],
api: "openai-responses",
provider: "codex",
model: "gpt-5.4",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
},
stopReason: "stop",
timestamp: Date.now(),
},
});
const latestResult = await appendExactAssistantMessageToSessionTranscript({
sessionKey,
storePath: fixture.storePath(),
message: {
role: "assistant",
content: [{ type: "text", text: "Different latest answer" }],
api: "openai-responses",
provider: "codex",
model: "gpt-5.4",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
},
stopReason: "stop",
timestamp: Date.now(),
},
});
const mirrorResult = await appendAssistantMessageToSessionTranscript({
sessionKey,
text: "Repeated answer",
storePath: fixture.storePath(),
});
expect(olderResult.ok).toBe(true);
expect(latestResult.ok).toBe(true);
expect(mirrorResult.ok).toBe(true);
if (olderResult.ok && latestResult.ok && mirrorResult.ok) {
expect(mirrorResult.messageId).not.toBe(olderResult.messageId);
expect(mirrorResult.messageId).not.toBe(latestResult.messageId);
const lines = fs.readFileSync(mirrorResult.sessionFile, "utf-8").trim().split("\n");
expect(lines.length).toBe(4);
const messageLine = JSON.parse(lines[3]);
expect(messageLine.message.provider).toBe("openclaw");
expect(messageLine.message.model).toBe("delivery-mirror");
expect(messageLine.message.content[0].text).toBe("Repeated answer");
}
});
it("finds session entry using normalized (lowercased) key", async () => {
const storeKey = "agent:main:bluebubbles:direct:+15551234567";
const store = {

View File

@@ -265,9 +265,6 @@ function isRedundantDeliveryMirror(message: SessionTranscriptAssistantMessage):
}
function extractAssistantMessageText(message: SessionTranscriptAssistantMessage): string | null {
if (typeof message.text === "string" && message.text.trim()) {
return message.text.trim();
}
if (!Array.isArray(message.content)) {
return null;
}
@@ -314,11 +311,12 @@ async function findLatestEquivalentAssistantMessageId(
}
const candidateText = extractAssistantMessageText(candidate);
if (candidateText !== expectedText) {
continue;
return undefined;
}
if (typeof parsed.id === "string" && parsed.id) {
return parsed.id;
}
return undefined;
} catch {
continue;
}