From 728e29a89876e72202610aa9f8ec8e715f5b2e9e Mon Sep 17 00:00:00 2001 From: Peter Steinberger Date: Thu, 23 Apr 2026 03:52:26 +0100 Subject: [PATCH] test(agents): cover openai-completions tool-call arg repair --- .../attempt.tool-call-argument-repair.test.ts | 107 +++++++++++++++++- 1 file changed, 106 insertions(+), 1 deletion(-) diff --git a/src/agents/pi-embedded-runner/run/attempt.tool-call-argument-repair.test.ts b/src/agents/pi-embedded-runner/run/attempt.tool-call-argument-repair.test.ts index 5392c90d7d3..6f96b01f2a1 100644 --- a/src/agents/pi-embedded-runner/run/attempt.tool-call-argument-repair.test.ts +++ b/src/agents/pi-embedded-runner/run/attempt.tool-call-argument-repair.test.ts @@ -1,5 +1,51 @@ import { describe, expect, it } from "vitest"; -import { shouldRepairMalformedToolCallArguments } from "./attempt.tool-call-argument-repair.js"; +import { + shouldRepairMalformedToolCallArguments, + wrapStreamFnRepairMalformedToolCallArguments, +} from "./attempt.tool-call-argument-repair.js"; + +type FakeWrappedStream = { + result: () => Promise; + [Symbol.asyncIterator]: () => AsyncIterator; +}; + +type FakeStreamFn = ( + model: never, + context: never, + options: never, +) => FakeWrappedStream | Promise; + +function createFakeStream(params: { + events: unknown[]; + resultMessage: unknown; +}): FakeWrappedStream { + return { + async result() { + return params.resultMessage; + }, + [Symbol.asyncIterator]() { + return (async function* () { + for (const event of params.events) { + yield event; + } + })(); + }, + }; +} + +async function invokeProviderStream(params: { + provider: string; + modelApi: string; + baseFn: FakeStreamFn; +}): Promise { + const streamFn = shouldRepairMalformedToolCallArguments({ + provider: params.provider, + modelApi: params.modelApi, + }) + ? (wrapStreamFnRepairMalformedToolCallArguments(params.baseFn as never) as FakeStreamFn) + : params.baseFn; + return await Promise.resolve(streamFn({} as never, {} as never, {} as never)); +} describe("shouldRepairMalformedToolCallArguments", () => { it("keeps the repair enabled for kimi providers on anthropic-messages", () => { @@ -38,3 +84,62 @@ describe("shouldRepairMalformedToolCallArguments", () => { ).toBe(false); }); }); + +describe("openai-completions malformed tool-call argument repair", () => { + it("repairs fragmented OpenAI-compatible function-call args before tool execution", async () => { + const partialToolCall = { type: "functionCall", name: "read", arguments: {} }; + const streamedToolCall = { type: "functionCall", name: "read", arguments: {} }; + const endMessageToolCall = { type: "functionCall", name: "read", arguments: {} }; + const finalToolCall = { type: "functionCall", name: "read", arguments: {} }; + const partialMessage = { role: "assistant", content: [partialToolCall] }; + const endMessage = { role: "assistant", content: [endMessageToolCall] }; + const finalMessage = { role: "assistant", content: [finalToolCall] }; + + const stream = await invokeProviderStream({ + provider: "sglang", + modelApi: "openai-completions", + baseFn: () => + createFakeStream({ + events: [ + { + type: "toolcall_delta", + contentIndex: 0, + delta: ".functions.read:0 ", + partial: partialMessage, + }, + { + type: "toolcall_delta", + contentIndex: 0, + delta: '{"path":"/tmp/report.txt"', + partial: partialMessage, + }, + { + type: "toolcall_delta", + contentIndex: 0, + delta: "}x", + partial: partialMessage, + }, + { + type: "toolcall_end", + contentIndex: 0, + toolCall: streamedToolCall, + partial: partialMessage, + message: endMessage, + }, + ], + resultMessage: finalMessage, + }), + }); + + for await (const _item of stream) { + // drain + } + const result = await stream.result(); + + expect(partialToolCall.arguments).toEqual({ path: "/tmp/report.txt" }); + expect(streamedToolCall.arguments).toEqual({ path: "/tmp/report.txt" }); + expect(endMessageToolCall.arguments).toEqual({ path: "/tmp/report.txt" }); + expect(finalToolCall.arguments).toEqual({ path: "/tmp/report.txt" }); + expect(result).toBe(finalMessage); + }); +});