perf(test): reduce hot-suite import and setup overhead

This commit is contained in:
Peter Steinberger
2026-02-13 20:26:26 +00:00
parent 1655df7ac0
commit 2086cdfb9b
11 changed files with 312 additions and 472 deletions

View File

@@ -18,198 +18,169 @@ function buildModel(): Model<"openai-responses"> {
};
}
function installFailingFetchCapture() {
const originalFetch = globalThis.fetch;
let lastBody: unknown;
const fetchImpl: typeof fetch = async (_input, init) => {
const rawBody = init?.body;
const bodyText = (() => {
if (!rawBody) {
return "";
}
if (typeof rawBody === "string") {
return rawBody;
}
if (rawBody instanceof Uint8Array) {
return Buffer.from(rawBody).toString("utf8");
}
if (rawBody instanceof ArrayBuffer) {
return Buffer.from(new Uint8Array(rawBody)).toString("utf8");
}
return null;
})();
lastBody = bodyText ? (JSON.parse(bodyText) as unknown) : undefined;
throw new Error("intentional fetch abort (test)");
};
globalThis.fetch = fetchImpl;
return {
getLastBody: () => lastBody as Record<string, unknown> | undefined,
restore: () => {
globalThis.fetch = originalFetch;
},
};
}
describe("openai-responses reasoning replay", () => {
it("replays reasoning for tool-call-only turns (OpenAI requires it)", async () => {
const cap = installFailingFetchCapture();
try {
const model = buildModel();
const model = buildModel();
const controller = new AbortController();
controller.abort();
let payload: Record<string, unknown> | undefined;
const assistantToolOnly: AssistantMessage = {
role: "assistant",
api: "openai-responses",
provider: "openai",
model: "gpt-5.2",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
const assistantToolOnly: AssistantMessage = {
role: "assistant",
api: "openai-responses",
provider: "openai",
model: "gpt-5.2",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
},
stopReason: "toolUse",
timestamp: Date.now(),
content: [
{
type: "thinking",
thinking: "internal",
thinkingSignature: JSON.stringify({
type: "reasoning",
id: "rs_test",
summary: [],
}),
},
stopReason: "toolUse",
timestamp: Date.now(),
content: [
{
type: "toolCall",
id: "call_123|fc_123",
name: "noop",
arguments: {},
},
],
};
const toolResult: ToolResultMessage = {
role: "toolResult",
toolCallId: "call_123|fc_123",
toolName: "noop",
content: [{ type: "text", text: "ok" }],
isError: false,
timestamp: Date.now(),
};
const stream = streamOpenAIResponses(
model,
{
systemPrompt: "system",
messages: [
{
type: "thinking",
thinking: "internal",
thinkingSignature: JSON.stringify({
type: "reasoning",
id: "rs_test",
summary: [],
}),
role: "user",
content: "Call noop.",
timestamp: Date.now(),
},
assistantToolOnly,
toolResult,
{
type: "toolCall",
id: "call_123|fc_123",
name: "noop",
arguments: {},
role: "user",
content: "Now reply with ok.",
timestamp: Date.now(),
},
],
};
const toolResult: ToolResultMessage = {
role: "toolResult",
toolCallId: "call_123|fc_123",
toolName: "noop",
content: [{ type: "text", text: "ok" }],
isError: false,
timestamp: Date.now(),
};
const stream = streamOpenAIResponses(
model,
{
systemPrompt: "system",
messages: [
{
role: "user",
content: "Call noop.",
timestamp: Date.now(),
},
assistantToolOnly,
toolResult,
{
role: "user",
content: "Now reply with ok.",
timestamp: Date.now(),
},
],
tools: [
{
name: "noop",
description: "no-op",
parameters: Type.Object({}, { additionalProperties: false }),
},
],
tools: [
{
name: "noop",
description: "no-op",
parameters: Type.Object({}, { additionalProperties: false }),
},
],
},
{
apiKey: "test",
signal: controller.signal,
onPayload: (nextPayload) => {
payload = nextPayload as Record<string, unknown>;
},
{ apiKey: "test" },
);
},
);
await stream.result();
await stream.result();
const body = cap.getLastBody();
const input = Array.isArray(body?.input) ? body?.input : [];
const types = input
.map((item) =>
item && typeof item === "object" ? (item as Record<string, unknown>).type : undefined,
)
.filter((t): t is string => typeof t === "string");
const input = Array.isArray(payload?.input) ? payload?.input : [];
const types = input
.map((item) =>
item && typeof item === "object" ? (item as Record<string, unknown>).type : undefined,
)
.filter((t): t is string => typeof t === "string");
expect(types).toContain("reasoning");
expect(types).toContain("function_call");
expect(types.indexOf("reasoning")).toBeLessThan(types.indexOf("function_call"));
} finally {
cap.restore();
}
expect(types).toContain("reasoning");
expect(types).toContain("function_call");
expect(types.indexOf("reasoning")).toBeLessThan(types.indexOf("function_call"));
});
it("still replays reasoning when paired with an assistant message", async () => {
const cap = installFailingFetchCapture();
try {
const model = buildModel();
const model = buildModel();
const controller = new AbortController();
controller.abort();
let payload: Record<string, unknown> | undefined;
const assistantWithText: AssistantMessage = {
role: "assistant",
api: "openai-responses",
provider: "openai",
model: "gpt-5.2",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
},
stopReason: "stop",
timestamp: Date.now(),
content: [
{
type: "thinking",
thinking: "internal",
thinkingSignature: JSON.stringify({
type: "reasoning",
id: "rs_test",
summary: [],
}),
},
{ type: "text", text: "hello", textSignature: "msg_test" },
],
};
const stream = streamOpenAIResponses(
model,
const assistantWithText: AssistantMessage = {
role: "assistant",
api: "openai-responses",
provider: "openai",
model: "gpt-5.2",
usage: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
totalTokens: 0,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0, total: 0 },
},
stopReason: "stop",
timestamp: Date.now(),
content: [
{
systemPrompt: "system",
messages: [
{ role: "user", content: "Hi", timestamp: Date.now() },
assistantWithText,
{ role: "user", content: "Ok", timestamp: Date.now() },
],
type: "thinking",
thinking: "internal",
thinkingSignature: JSON.stringify({
type: "reasoning",
id: "rs_test",
summary: [],
}),
},
{ apiKey: "test" },
);
{ type: "text", text: "hello", textSignature: "msg_test" },
],
};
await stream.result();
const stream = streamOpenAIResponses(
model,
{
systemPrompt: "system",
messages: [
{ role: "user", content: "Hi", timestamp: Date.now() },
assistantWithText,
{ role: "user", content: "Ok", timestamp: Date.now() },
],
},
{
apiKey: "test",
signal: controller.signal,
onPayload: (nextPayload) => {
payload = nextPayload as Record<string, unknown>;
},
},
);
const body = cap.getLastBody();
const input = Array.isArray(body?.input) ? body?.input : [];
const types = input
.map((item) =>
item && typeof item === "object" ? (item as Record<string, unknown>).type : undefined,
)
.filter((t): t is string => typeof t === "string");
await stream.result();
expect(types).toContain("reasoning");
expect(types).toContain("message");
} finally {
cap.restore();
}
const input = Array.isArray(payload?.input) ? payload?.input : [];
const types = input
.map((item) =>
item && typeof item === "object" ? (item as Record<string, unknown>).type : undefined,
)
.filter((t): t is string => typeof t === "string");
expect(types).toContain("reasoning");
expect(types).toContain("message");
});
});