From 060467ef832113f025cd996e0ea82fcc994c3c8e Mon Sep 17 00:00:00 2001 From: Peter Steinberger Date: Thu, 23 Apr 2026 18:19:51 +0100 Subject: [PATCH] test: dedupe OpenAI stream wrapper assertions --- .../openai-stream-wrappers.test.ts | 50 +++---------------- 1 file changed, 8 insertions(+), 42 deletions(-) diff --git a/src/agents/pi-embedded-runner/openai-stream-wrappers.test.ts b/src/agents/pi-embedded-runner/openai-stream-wrappers.test.ts index f80dbc16d12..a29001c5d11 100644 --- a/src/agents/pi-embedded-runner/openai-stream-wrappers.test.ts +++ b/src/agents/pi-embedded-runner/openai-stream-wrappers.test.ts @@ -88,27 +88,10 @@ describe("createOpenAIThinkingLevelWrapper", () => { }); it("overrides existing reasoning.effort from upstream wrappers", () => { - const baseStreamFn: StreamFn = (model, _context, options) => { - const payload: Record = { - model: model.id, - reasoning: { effort: "none" }, - }; - options?.onPayload?.(payload, model); - return createAssistantMessageEventStream(); - }; - - const payloads: Array> = []; - const capture: StreamFn = (model, context, options) => { - return baseStreamFn(model, context, { - ...options, - onPayload: (payload, m) => { - options?.onPayload?.(payload, m); - payloads.push(structuredClone(payload as Record)); - }, - }); - }; - - const wrapped = createOpenAIThinkingLevelWrapper(capture, "medium"); + const { baseStreamFn, payloads } = createPayloadCapture({ + initialReasoning: { effort: "none" }, + }); + const wrapped = createOpenAIThinkingLevelWrapper(baseStreamFn, "medium"); void wrapped(codexModel, { messages: [] }, {}); expect(payloads[0]?.reasoning).toEqual({ effort: "medium" }); @@ -121,27 +104,10 @@ describe("createOpenAIThinkingLevelWrapper", () => { }); it("preserves other reasoning properties when overriding effort", () => { - const baseStreamFn: StreamFn = (model, _context, options) => { - const payload: Record = { - model: model.id, - reasoning: { effort: "none", summary: "auto" }, - }; - options?.onPayload?.(payload, model); - return createAssistantMessageEventStream(); - }; - - const payloads: Array> = []; - const capture: StreamFn = (model, context, options) => { - return baseStreamFn(model, context, { - ...options, - onPayload: (payload, m) => { - options?.onPayload?.(payload, m); - payloads.push(structuredClone(payload as Record)); - }, - }); - }; - - const wrapped = createOpenAIThinkingLevelWrapper(capture, "high"); + const { baseStreamFn, payloads } = createPayloadCapture({ + initialReasoning: { effort: "none", summary: "auto" }, + }); + const wrapped = createOpenAIThinkingLevelWrapper(baseStreamFn, "high"); void wrapped(codexModel, { messages: [] }, {}); expect(payloads[0]?.reasoning).toEqual({ effort: "high", summary: "auto" });