test(agents): cover openai-completions tool-call arg repair

This commit is contained in:
Peter Steinberger
2026-04-23 03:52:26 +01:00
parent 6b765f58d3
commit 728e29a898

View File

@@ -1,5 +1,51 @@
import { describe, expect, it } from "vitest";
import { shouldRepairMalformedToolCallArguments } from "./attempt.tool-call-argument-repair.js";
import {
shouldRepairMalformedToolCallArguments,
wrapStreamFnRepairMalformedToolCallArguments,
} from "./attempt.tool-call-argument-repair.js";
type FakeWrappedStream = {
result: () => Promise<unknown>;
[Symbol.asyncIterator]: () => AsyncIterator<unknown>;
};
type FakeStreamFn = (
model: never,
context: never,
options: never,
) => FakeWrappedStream | Promise<FakeWrappedStream>;
function createFakeStream(params: {
events: unknown[];
resultMessage: unknown;
}): FakeWrappedStream {
return {
async result() {
return params.resultMessage;
},
[Symbol.asyncIterator]() {
return (async function* () {
for (const event of params.events) {
yield event;
}
})();
},
};
}
async function invokeProviderStream(params: {
provider: string;
modelApi: string;
baseFn: FakeStreamFn;
}): Promise<FakeWrappedStream> {
const streamFn = shouldRepairMalformedToolCallArguments({
provider: params.provider,
modelApi: params.modelApi,
})
? (wrapStreamFnRepairMalformedToolCallArguments(params.baseFn as never) as FakeStreamFn)
: params.baseFn;
return await Promise.resolve(streamFn({} as never, {} as never, {} as never));
}
describe("shouldRepairMalformedToolCallArguments", () => {
it("keeps the repair enabled for kimi providers on anthropic-messages", () => {
@@ -38,3 +84,62 @@ describe("shouldRepairMalformedToolCallArguments", () => {
).toBe(false);
});
});
describe("openai-completions malformed tool-call argument repair", () => {
it("repairs fragmented OpenAI-compatible function-call args before tool execution", async () => {
const partialToolCall = { type: "functionCall", name: "read", arguments: {} };
const streamedToolCall = { type: "functionCall", name: "read", arguments: {} };
const endMessageToolCall = { type: "functionCall", name: "read", arguments: {} };
const finalToolCall = { type: "functionCall", name: "read", arguments: {} };
const partialMessage = { role: "assistant", content: [partialToolCall] };
const endMessage = { role: "assistant", content: [endMessageToolCall] };
const finalMessage = { role: "assistant", content: [finalToolCall] };
const stream = await invokeProviderStream({
provider: "sglang",
modelApi: "openai-completions",
baseFn: () =>
createFakeStream({
events: [
{
type: "toolcall_delta",
contentIndex: 0,
delta: ".functions.read:0 ",
partial: partialMessage,
},
{
type: "toolcall_delta",
contentIndex: 0,
delta: '{"path":"/tmp/report.txt"',
partial: partialMessage,
},
{
type: "toolcall_delta",
contentIndex: 0,
delta: "}x",
partial: partialMessage,
},
{
type: "toolcall_end",
contentIndex: 0,
toolCall: streamedToolCall,
partial: partialMessage,
message: endMessage,
},
],
resultMessage: finalMessage,
}),
});
for await (const _item of stream) {
// drain
}
const result = await stream.result();
expect(partialToolCall.arguments).toEqual({ path: "/tmp/report.txt" });
expect(streamedToolCall.arguments).toEqual({ path: "/tmp/report.txt" });
expect(endMessageToolCall.arguments).toEqual({ path: "/tmp/report.txt" });
expect(finalToolCall.arguments).toEqual({ path: "/tmp/report.txt" });
expect(result).toBe(finalMessage);
});
});