fix(telegram): preserve streamed generated media

This commit is contained in:
Peter Steinberger
2026-04-28 06:02:33 +01:00
parent 8c8dfa768a
commit bb97f19396
3 changed files with 79 additions and 2 deletions

View File

@@ -57,6 +57,7 @@ Docs: https://docs.openclaw.ai
- Gateway/models: merge explicit `models.providers.*.models` rows into the Gateway model catalog with normalized provider/model dedupe, and use normalized image-capability lookup so custom vision models keep native image attachments even when Pi discovery omits them or model ID casing differs. Fixes #64213 and #65165. Thanks @billonese and @202233a.
- Gateway/reload: publish canonical post-write source config to in-process reloaders so simple config saves no longer create phantom plugin diffs or trigger unnecessary Gateway restarts. (#73267) Thanks @szsip239.
- CLI/tasks: ship the task-registry control runtime in npm packages so `openclaw tasks cancel` can load ACP/subagent cancellation helpers from published builds. Fixes #68997. Thanks @1OAKDesign.
- Channels/Telegram: preserve unsent generated media after partial reply streaming has already delivered the text, so `image_generate` outputs still reach Telegram as photos instead of being dropped from the final payload. Fixes #73253. Thanks @mlaihk.
- Export/session: keep inline export HTML scripts and vendor libraries injected after template formatting so generated session exports open with the app code, markdown renderer, and syntax highlighter present. Fixes #41862 and #49957; carries forward #41861 and #68947. Thanks @briannewman, @martenzi, and @armanddp.
- Agents/ACPX: stage the patched Claude ACP adapter as an ACPX runtime dependency and route known Codex/Claude ACP commands through local wrappers, so Gateway runtime no longer depends on live `npx` adapter resolution. Fixes #73202. Thanks @joerod26.
- Memory/compaction: let pre-compaction memory flush use an exact `agents.defaults.compaction.memoryFlush.model` override such as `ollama/qwen3:8b` without inheriting the active session fallback chain, so local housekeeping can avoid paid conversation models. Fixes #53772. Thanks @limen96.

View File

@@ -296,6 +296,54 @@ describe("buildReplyPayloads media filter integration", () => {
expect(replyPayloads).toHaveLength(0);
});
it("keeps unsent final media after block pipeline streamed the text", async () => {
const pipeline: Parameters<typeof buildReplyPayloads>[0]["blockReplyPipeline"] = {
didStream: () => true,
isAborted: () => false,
hasSentPayload: (payload) => payload.text === "response" && !payload.mediaUrl,
enqueue: () => {},
flush: async () => {},
stop: () => {},
hasBuffered: () => false,
getSentMediaUrls: () => [],
};
const { replyPayloads } = await buildReplyPayloads({
...baseParams,
blockStreamingEnabled: true,
blockReplyPipeline: pipeline,
payloads: [{ text: "response", mediaUrl: "/tmp/generated.png" }],
});
expect(replyPayloads).toHaveLength(1);
expect(replyPayloads[0]).toMatchObject({
mediaUrl: "/tmp/generated.png",
text: undefined,
});
});
it("drops already-sent final media after block pipeline streamed successfully", async () => {
const pipeline: Parameters<typeof buildReplyPayloads>[0]["blockReplyPipeline"] = {
didStream: () => true,
isAborted: () => false,
hasSentPayload: (payload) => payload.text === "response" && !payload.mediaUrl,
enqueue: () => {},
flush: async () => {},
stop: () => {},
hasBuffered: () => false,
getSentMediaUrls: () => ["/tmp/generated.png"],
};
const { replyPayloads } = await buildReplyPayloads({
...baseParams,
blockStreamingEnabled: true,
blockReplyPipeline: pipeline,
payloads: [{ text: "response", mediaUrl: "/tmp/generated.png" }],
});
expect(replyPayloads).toHaveLength(0);
});
it("preserves post-stream error payloads when block pipeline streamed successfully", async () => {
const pipeline: Parameters<typeof buildReplyPayloads>[0]["blockReplyPipeline"] = {
didStream: () => true,

View File

@@ -224,8 +224,34 @@ export async function buildReplyPayloads(params: {
: mediaFilteredPayloads;
const isDirectlySentBlockPayload = (payload: ReplyPayload) =>
Boolean(params.directlySentBlockKeys?.has(createBlockReplyContentKey(payload)));
const preserveUnsentMediaAfterBlockStream = (payload: ReplyPayload): ReplyPayload | null => {
if (payload.isError) {
return payload;
}
const reply = resolveSendableOutboundReplyParts(payload);
if (!reply.hasMedia) {
return null;
}
if (!reply.trimmedText) {
return payload;
}
const textOnlyPayload = {
...payload,
mediaUrl: undefined,
mediaUrls: undefined,
audioAsVoice: undefined,
};
if (!params.blockReplyPipeline?.hasSentPayload(textOnlyPayload)) {
return payload;
}
return {
...payload,
text: undefined,
audioAsVoice: payload.audioAsVoice || undefined,
};
};
const contentSuppressedPayloads = shouldDropFinalPayloads
? dedupedPayloads.filter((payload) => payload.isError)
? dedupedPayloads.flatMap((payload) => preserveUnsentMediaAfterBlockStream(payload) ?? [])
: params.blockStreamingEnabled
? dedupedPayloads.filter(
(payload) =>
@@ -252,7 +278,9 @@ export async function buildReplyPayloads(params: {
sentMediaUrls: blockSentMediaUrls,
})
: contentSuppressedPayloads;
const replyPayloads = suppressMessagingToolReplies ? [] : filteredPayloads;
const replyPayloads = suppressMessagingToolReplies
? []
: filteredPayloads.filter(isRenderablePayload);
return {
replyPayloads,