test(agents): share lifecycle warn assertion

This commit is contained in:
Vincent Koc
2026-04-12 11:03:43 +01:00
parent 0450f98157
commit 269a5b0cfc

View File

@@ -47,6 +47,15 @@ function createContext(
} as unknown as EmbeddedPiSubscribeContext;
}
async function handleAgentEndAndReadWarnMeta(ctx: EmbeddedPiSubscribeContext) {
await handleAgentEnd(ctx);
const warn = vi.mocked(ctx.log.warn);
expect(warn).toHaveBeenCalledTimes(1);
expect(warn.mock.calls[0]?.[0]).toBe("embedded run agent end");
return warn.mock.calls[0]?.[1];
}
describe("handleAgentEnd", () => {
it("logs the resolved error message when run ends with assistant error", async () => {
const onAgentEvent = vi.fn();
@@ -61,12 +70,8 @@ describe("handleAgentEnd", () => {
);
ctx.state.livenessState = "working";
await handleAgentEnd(ctx);
const warn = vi.mocked(ctx.log.warn);
expect(warn).toHaveBeenCalledTimes(1);
expect(warn.mock.calls[0]?.[0]).toBe("embedded run agent end");
expect(warn.mock.calls[0]?.[1]).toMatchObject({
const warnMeta = await handleAgentEndAndReadWarnMeta(ctx);
expect(warnMeta).toMatchObject({
event: "embedded_run_agent_end",
runId: "run-1",
error: "LLM request failed: connection refused by the provider endpoint.",
@@ -95,12 +100,8 @@ describe("handleAgentEnd", () => {
content: [{ type: "text", text: "" }],
});
await handleAgentEnd(ctx);
const warn = vi.mocked(ctx.log.warn);
expect(warn).toHaveBeenCalledTimes(1);
expect(warn.mock.calls[0]?.[0]).toBe("embedded run agent end");
expect(warn.mock.calls[0]?.[1]).toMatchObject({
const warnMeta = await handleAgentEndAndReadWarnMeta(ctx);
expect(warnMeta).toMatchObject({
event: "embedded_run_agent_end",
runId: "run-1",
error: "The AI service is temporarily overloaded. Please try again in a moment.",