diff --git a/src/agents/pi-embedded-runner/proxy-stream-wrappers.test.ts b/src/agents/pi-embedded-runner/proxy-stream-wrappers.test.ts index 48dcb2b9041..7b3757d0cf8 100644 --- a/src/agents/pi-embedded-runner/proxy-stream-wrappers.test.ts +++ b/src/agents/pi-embedded-runner/proxy-stream-wrappers.test.ts @@ -7,6 +7,30 @@ import { createOpenRouterWrapper, } from "./proxy-stream-wrappers.js"; +function runSystemCacheWrapper(model: Partial>) { + const payload = { + messages: [{ role: "system", content: "system prompt" }], + }; + const baseStreamFn: StreamFn = (resolvedModel, _context, options) => { + options?.onPayload?.(payload, resolvedModel); + return createAssistantMessageEventStream(); + }; + + const wrapped = createOpenRouterSystemCacheWrapper(baseStreamFn); + void wrapped( + { + api: "openai-completions", + provider: "openrouter", + id: "anthropic/claude-sonnet-4.6", + ...model, + } as Model<"openai-completions">, + { messages: [] }, + {}, + ); + + return payload; +} + describe("proxy stream wrappers", () => { it("adds OpenRouter attribution headers to stream options", () => { const calls: Array<{ headers?: Record }> = []; @@ -40,24 +64,7 @@ describe("proxy stream wrappers", () => { }); it("injects cache_control markers for declared OpenRouter Anthropic models on the default route", () => { - const payload = { - messages: [{ role: "system", content: "system prompt" }], - }; - const baseStreamFn: StreamFn = (model, _context, options) => { - options?.onPayload?.(payload, model); - return createAssistantMessageEventStream(); - }; - - const wrapped = createOpenRouterSystemCacheWrapper(baseStreamFn); - void wrapped( - { - api: "openai-completions", - provider: "openrouter", - id: "anthropic/claude-sonnet-4.6", - } as Model<"openai-completions">, - { messages: [] }, - {}, - ); + const payload = runSystemCacheWrapper({}); expect(payload.messages[0]?.content).toEqual([ { type: "text", text: "system prompt", cache_control: { type: "ephemeral" } }, @@ -65,49 +72,18 @@ describe("proxy stream wrappers", () => { }); it("does not inject cache_control markers for declared OpenRouter providers on custom proxy URLs", () => { - const payload = { - messages: [{ role: "system", content: "system prompt" }], - }; - const baseStreamFn: StreamFn = (model, _context, options) => { - options?.onPayload?.(payload, model); - return createAssistantMessageEventStream(); - }; - - const wrapped = createOpenRouterSystemCacheWrapper(baseStreamFn); - void wrapped( - { - api: "openai-completions", - provider: "openrouter", - id: "anthropic/claude-sonnet-4.6", - baseUrl: "https://proxy.example.com/v1", - } as Model<"openai-completions">, - { messages: [] }, - {}, - ); + const payload = runSystemCacheWrapper({ + baseUrl: "https://proxy.example.com/v1", + }); expect(payload.messages[0]?.content).toBe("system prompt"); }); it("injects cache_control markers for native OpenRouter hosts behind custom provider ids", () => { - const payload = { - messages: [{ role: "system", content: "system prompt" }], - }; - const baseStreamFn: StreamFn = (model, _context, options) => { - options?.onPayload?.(payload, model); - return createAssistantMessageEventStream(); - }; - - const wrapped = createOpenRouterSystemCacheWrapper(baseStreamFn); - void wrapped( - { - api: "openai-completions", - provider: "custom-openrouter", - id: "anthropic/claude-sonnet-4.6", - baseUrl: "https://openrouter.ai/api/v1", - } as Model<"openai-completions">, - { messages: [] }, - {}, - ); + const payload = runSystemCacheWrapper({ + provider: "custom-openrouter", + baseUrl: "https://openrouter.ai/api/v1", + }); expect(payload.messages[0]?.content).toEqual([ { type: "text", text: "system prompt", cache_control: { type: "ephemeral" } },