test(agents): share proxy stream wrapper fixture

This commit is contained in:
Vincent Koc
2026-04-12 10:53:47 +01:00
parent f80a8e7b6c
commit f2c7cec8de

View File

@@ -7,6 +7,30 @@ import {
createOpenRouterWrapper,
} from "./proxy-stream-wrappers.js";
function runSystemCacheWrapper(model: Partial<Model<"openai-completions">>) {
const payload = {
messages: [{ role: "system", content: "system prompt" }],
};
const baseStreamFn: StreamFn = (resolvedModel, _context, options) => {
options?.onPayload?.(payload, resolvedModel);
return createAssistantMessageEventStream();
};
const wrapped = createOpenRouterSystemCacheWrapper(baseStreamFn);
void wrapped(
{
api: "openai-completions",
provider: "openrouter",
id: "anthropic/claude-sonnet-4.6",
...model,
} as Model<"openai-completions">,
{ messages: [] },
{},
);
return payload;
}
describe("proxy stream wrappers", () => {
it("adds OpenRouter attribution headers to stream options", () => {
const calls: Array<{ headers?: Record<string, string> }> = [];
@@ -40,24 +64,7 @@ describe("proxy stream wrappers", () => {
});
it("injects cache_control markers for declared OpenRouter Anthropic models on the default route", () => {
const payload = {
messages: [{ role: "system", content: "system prompt" }],
};
const baseStreamFn: StreamFn = (model, _context, options) => {
options?.onPayload?.(payload, model);
return createAssistantMessageEventStream();
};
const wrapped = createOpenRouterSystemCacheWrapper(baseStreamFn);
void wrapped(
{
api: "openai-completions",
provider: "openrouter",
id: "anthropic/claude-sonnet-4.6",
} as Model<"openai-completions">,
{ messages: [] },
{},
);
const payload = runSystemCacheWrapper({});
expect(payload.messages[0]?.content).toEqual([
{ type: "text", text: "system prompt", cache_control: { type: "ephemeral" } },
@@ -65,49 +72,18 @@ describe("proxy stream wrappers", () => {
});
it("does not inject cache_control markers for declared OpenRouter providers on custom proxy URLs", () => {
const payload = {
messages: [{ role: "system", content: "system prompt" }],
};
const baseStreamFn: StreamFn = (model, _context, options) => {
options?.onPayload?.(payload, model);
return createAssistantMessageEventStream();
};
const wrapped = createOpenRouterSystemCacheWrapper(baseStreamFn);
void wrapped(
{
api: "openai-completions",
provider: "openrouter",
id: "anthropic/claude-sonnet-4.6",
baseUrl: "https://proxy.example.com/v1",
} as Model<"openai-completions">,
{ messages: [] },
{},
);
const payload = runSystemCacheWrapper({
baseUrl: "https://proxy.example.com/v1",
});
expect(payload.messages[0]?.content).toBe("system prompt");
});
it("injects cache_control markers for native OpenRouter hosts behind custom provider ids", () => {
const payload = {
messages: [{ role: "system", content: "system prompt" }],
};
const baseStreamFn: StreamFn = (model, _context, options) => {
options?.onPayload?.(payload, model);
return createAssistantMessageEventStream();
};
const wrapped = createOpenRouterSystemCacheWrapper(baseStreamFn);
void wrapped(
{
api: "openai-completions",
provider: "custom-openrouter",
id: "anthropic/claude-sonnet-4.6",
baseUrl: "https://openrouter.ai/api/v1",
} as Model<"openai-completions">,
{ messages: [] },
{},
);
const payload = runSystemCacheWrapper({
provider: "custom-openrouter",
baseUrl: "https://openrouter.ai/api/v1",
});
expect(payload.messages[0]?.content).toEqual([
{ type: "text", text: "system prompt", cache_control: { type: "ephemeral" } },