test(reply): add block-stream whitespace trim regression

This commit is contained in:
Sebastian
2026-02-14 22:43:20 -05:00
parent 8aeee98593
commit ec4225c28e
2 changed files with 51 additions and 0 deletions

View File

@@ -25,6 +25,7 @@ Docs: https://docs.openclaw.ai
- Media: accept `MEDIA:`-prefixed paths (lenient whitespace) when loading outbound media to prevent `ENOENT` for tool-returned local media paths. (#13107) Thanks @mcaxtr.
- Media understanding: treat binary `application/vnd.*`/zip/octet-stream attachments as non-text (while keeping vendor `+json`/`+xml` text-eligible) so Office/ZIP files are not inlined into prompt body text. (#16513) Thanks @rmramsey32.
- Agents: deliver tool result media (screenshots, images, audio) to channels regardless of verbose level. (#11735) Thanks @strelov1.
- Auto-reply/Block streaming: strip leading whitespace from streamed block replies so messages starting with blank lines no longer deliver visible leading empty lines. (#16422) Thanks @mcinteerj.
- Agents/Image tool: allow workspace-local image paths by including the active workspace directory in local media allowlists, and trust sandbox-validated paths in image loaders to prevent false "not under an allowed directory" rejections. (#15541)
- Agents/Image tool: propagate the effective workspace root into tool wiring so workspace-local image paths are accepted by default when running without an explicit `workspaceDir`. (#16722)
- BlueBubbles: include sender identity in group chat envelopes and pass clean message text to the agent prompt, aligning with iMessage/Signal formatting. (#16210) Thanks @zerone0x.

View File

@@ -211,4 +211,54 @@ describe("block streaming", () => {
expect(onBlockReplyStreamMode).not.toHaveBeenCalled();
});
});
it("trims leading whitespace in block-streamed replies", async () => {
await withTempHome(async (home) => {
const seen: string[] = [];
const onBlockReply = vi.fn(async (payload) => {
seen.push(payload.text ?? "");
});
piEmbeddedMock.runEmbeddedPiAgent.mockImplementation(
async (params: RunEmbeddedPiAgentParams) => {
void params.onBlockReply?.({ text: "\n\n Hello from stream" });
return {
payloads: [{ text: "\n\n Hello from stream" }],
meta: {
durationMs: 5,
agentMeta: { sessionId: "s", provider: "p", model: "m" },
},
};
},
);
const res = await getReplyFromConfig(
{
Body: "ping",
From: "+1004",
To: "+2000",
MessageSid: "msg-128",
Provider: "telegram",
},
{
onBlockReply,
disableBlockStreaming: false,
},
{
agents: {
defaults: {
model: "anthropic/claude-opus-4-5",
workspace: path.join(home, "openclaw"),
},
},
channels: { telegram: { allowFrom: ["*"] } },
session: { store: path.join(home, "sessions.json") },
},
);
expect(res).toBeUndefined();
expect(onBlockReply).toHaveBeenCalledTimes(1);
expect(seen).toEqual(["Hello from stream"]);
});
});
});