fix(providers): centralize stream request headers (#59542)

* fix(providers): centralize stream request headers

* Update src/agents/provider-request-config.ts

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

---------

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
This commit is contained in:
Vincent Koc
2026-04-02 17:21:46 +09:00
committed by GitHub
parent 08962b6812
commit 331e835dab
5 changed files with 119 additions and 37 deletions

View File

@@ -15,7 +15,7 @@
import { EventEmitter } from "node:events";
import WebSocket, { type ClientOptions } from "ws";
import { resolveProviderRequestAttributionHeaders } from "./provider-attribution.js";
import { resolveProviderRequestHeaders } from "./provider-request-config.js";
// ─────────────────────────────────────────────────────────────────────────────
// WebSocket Event Types (Server → Client)
@@ -403,17 +403,18 @@ export class OpenAIWebSocketManager extends EventEmitter<InternalEvents> {
}
const socket = this.socketFactory(this.wsUrl, {
headers: {
Authorization: `Bearer ${this.apiKey}`,
"OpenAI-Beta": "responses-websocket=v1",
...resolveProviderRequestAttributionHeaders({
provider: "openai",
api: "openai-responses",
baseUrl: this.wsUrl,
capability: "llm",
transport: "websocket",
}),
},
headers: resolveProviderRequestHeaders({
provider: "openai",
api: "openai-responses",
baseUrl: this.wsUrl,
capability: "llm",
transport: "websocket",
defaultHeaders: {
Authorization: `Bearer ${this.apiKey}`,
"OpenAI-Beta": "responses-websocket=v1",
},
precedence: "defaults-win",
}),
});
this.ws = socket;

View File

@@ -6,10 +6,8 @@ import {
patchCodexNativeWebSearchPayload,
resolveCodexNativeSearchActivation,
} from "../codex-native-web-search.js";
import {
resolveProviderRequestAttributionHeaders,
resolveProviderRequestPolicy,
} from "../provider-attribution.js";
import { resolveProviderRequestPolicy } from "../provider-attribution.js";
import { resolveProviderRequestHeaders } from "../provider-request-config.js";
import { log } from "./logger.js";
import { streamWithPayloadPatch } from "./stream-payload-utils.js";
@@ -540,16 +538,15 @@ export function createOpenAIAttributionHeadersWrapper(
}
return underlying(model, context, {
...options,
headers: {
...options?.headers,
...resolveProviderRequestAttributionHeaders({
provider: attributionProvider,
api: typeof model.api === "string" ? model.api : undefined,
baseUrl: typeof model.baseUrl === "string" ? model.baseUrl : undefined,
capability: "llm",
transport: "stream",
}),
},
headers: resolveProviderRequestHeaders({
provider: attributionProvider,
api: typeof model.api === "string" ? model.api : undefined,
baseUrl: typeof model.baseUrl === "string" ? model.baseUrl : undefined,
capability: "llm",
transport: "stream",
callerHeaders: options?.headers,
precedence: "defaults-win",
}),
});
};
}

View File

@@ -1,7 +1,7 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import { streamSimple } from "@mariozechner/pi-ai";
import type { ThinkLevel } from "../../auto-reply/thinking.js";
import { resolveProviderRequestAttributionHeaders } from "../provider-attribution.js";
import { resolveProviderRequestHeaders } from "../provider-request-config.js";
import { streamWithPayloadPatch } from "./stream-payload-utils.js";
const KILOCODE_FEATURE_HEADER = "X-KILOCODE-FEATURE";
const KILOCODE_FEATURE_DEFAULT = "openclaw";
@@ -111,12 +111,14 @@ export function createOpenRouterWrapper(
): StreamFn {
const underlying = baseStreamFn ?? streamSimple;
return (model, context, options) => {
const attributionHeaders = resolveProviderRequestAttributionHeaders({
const headers = resolveProviderRequestHeaders({
provider: typeof model.provider === "string" ? model.provider : "openrouter",
api: typeof model.api === "string" ? model.api : undefined,
baseUrl: typeof model.baseUrl === "string" ? model.baseUrl : undefined,
capability: "llm",
transport: "stream",
callerHeaders: options?.headers,
precedence: "caller-wins",
});
return streamWithPayloadPatch(
underlying,
@@ -124,10 +126,7 @@ export function createOpenRouterWrapper(
context,
{
...options,
headers: {
...attributionHeaders,
...options?.headers,
},
headers,
},
(payload) => {
normalizeProxyReasoningPayload(payload, thinkingLevel);
@@ -146,16 +145,23 @@ export function createKilocodeWrapper(
): StreamFn {
const underlying = baseStreamFn ?? streamSimple;
return (model, context, options) => {
const headers = resolveProviderRequestHeaders({
provider: typeof model.provider === "string" ? model.provider : "kilocode",
api: typeof model.api === "string" ? model.api : undefined,
baseUrl: typeof model.baseUrl === "string" ? model.baseUrl : undefined,
capability: "llm",
transport: "stream",
callerHeaders: options?.headers,
defaultHeaders: resolveKilocodeAppHeaders(),
precedence: "defaults-win",
});
return streamWithPayloadPatch(
underlying,
model,
context,
{
...options,
headers: {
...options?.headers,
...resolveKilocodeAppHeaders(),
},
headers,
},
(payload) => {
normalizeProxyReasoningPayload(payload, thinkingLevel);

View File

@@ -1,5 +1,8 @@
import { describe, expect, it } from "vitest";
import { resolveProviderRequestConfig } from "./provider-request-config.js";
import {
resolveProviderRequestConfig,
resolveProviderRequestHeaders,
} from "./provider-request-config.js";
describe("provider request config", () => {
it("merges discovered, provider, and model headers in precedence order", () => {
@@ -62,4 +65,48 @@ describe("provider request config", () => {
expect(resolved.policy.endpointClass).toBe("openrouter");
expect(resolved.policy.attributionProvider).toBe("openrouter");
});
it("lets defaults override caller headers when requested", () => {
const resolved = resolveProviderRequestHeaders({
provider: "openai",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
capability: "llm",
transport: "stream",
callerHeaders: {
originator: "spoofed",
"User-Agent": "spoofed/0.0.0",
"X-Custom": "1",
},
precedence: "defaults-win",
});
expect(resolved).toMatchObject({
originator: "openclaw",
version: expect.any(String),
"User-Agent": expect.stringMatching(/^openclaw\//),
"X-Custom": "1",
});
});
it("lets caller headers override defaults when requested", () => {
const resolved = resolveProviderRequestHeaders({
provider: "openrouter",
api: "openai-completions",
capability: "llm",
transport: "stream",
callerHeaders: {
"HTTP-Referer": "https://example.com",
"X-Custom": "1",
},
precedence: "caller-wins",
});
expect(resolved).toEqual({
"HTTP-Referer": "https://example.com",
"X-OpenRouter-Title": "OpenClaw",
"X-OpenRouter-Categories": "cli-agent",
"X-Custom": "1",
});
});
});

View File

@@ -32,6 +32,8 @@ export type ResolvedProviderRequestConfig = {
policy: ProviderRequestPolicyResolution;
};
export type ProviderRequestHeaderPrecedence = "caller-wins" | "defaults-win";
export function mergeProviderRequestHeaders(
...headerSets: Array<Record<string, string> | undefined>
): Record<string, string> | undefined {
@@ -86,3 +88,32 @@ export function resolveProviderRequestConfig(params: {
policy,
};
}
export function resolveProviderRequestHeaders(params: {
provider: string;
api?: RequestApi;
baseUrl?: string;
capability?: ProviderRequestCapability;
transport?: ProviderRequestTransport;
callerHeaders?: Record<string, string>;
defaultHeaders?: Record<string, string>;
precedence?: ProviderRequestHeaderPrecedence;
}): Record<string, string> | undefined {
const requestConfig = resolveProviderRequestConfig({
provider: params.provider,
api: params.api,
baseUrl: params.baseUrl,
capability: params.capability,
transport: params.transport,
providerHeaders: params.defaultHeaders,
});
const mergedDefaults = mergeProviderRequestHeaders(
requestConfig.headers,
requestConfig.policy.attributionHeaders,
);
// When precedence is omitted, defaults-win is the conservative choice:
// attribution/default headers cannot be silently overridden by callers.
return params.precedence === "caller-wins"
? mergeProviderRequestHeaders(mergedDefaults, params.callerHeaders)
: mergeProviderRequestHeaders(params.callerHeaders, mergedDefaults);
}