fix: send OpenClaw attribution to OpenAI

This commit is contained in:
Peter Steinberger
2026-04-29 13:53:56 +01:00
parent 9881a808f2
commit d1b2d81752
27 changed files with 597 additions and 68 deletions

View File

@@ -26,6 +26,7 @@ Docs: https://docs.openclaw.ai
- Channels/Discord: treat bare numeric outbound targets that match the effective Discord DM allowlist as user DMs while preserving account-specific legacy `dm.allowFrom` precedence over inherited root `allowFrom`. (#74303) Thanks @Squirbie.
- Control UI: make the chat sidebar split divider focusable, keyboard-resizable, ARIA-described, and pointer-event based so sidebar resizing works without a mouse. Thanks @BunsDev.
- Agents/usage: keep PI embedded-run telemetry attributed to the resolved model provider instead of the PI harness label, so OpenRouter and other provider-backed turns report the right provider in session usage and traces. Thanks @vincentkoc.
- Agents/attribution: send OpenClaw attribution headers on native OpenAI and Codex traffic, including SDK transports, realtime voice and TTS, device-code auth, WHAM usage, and remote embeddings, so PI-origin defaults no longer leak into provider requests. Thanks @vincentkoc.
- Agents/auth: keep OAuth auth profiles inherited from the main agent read-through instead of copying refresh tokens into secondary agents, and refresh Codex app-server tokens against the owning store so multi-agent swarms avoid reused refresh-token failures. Fixes #74055. Thanks @ClarityInvest.
- Channels/Telegram: honor `ALL_PROXY` / `all_proxy` and service-level `OPENCLAW_PROXY_URL` when constructing the HTTP/1-only Telegram Bot API transport, so Windows and service installs that rely on those proxy settings no longer fall back to direct egress. Fixes #74014; refs #74086. Thanks @SymbolStar.
- Channels/Telegram: continue polling when `deleteWebhook` hits a transient network failure but `getWebhookInfo` confirms no webhook is configured, so startup does not retry cleanup forever after the webhook was already removed. Refs #74086; carries forward #47384. Thanks @clovericbot.

View File

@@ -1,2 +1,2 @@
c14ed336d7add0044299560f2fb2fa9272f23aae335799313f32c63521edc24e plugin-sdk-api-baseline.json
e096b25bd16bf1b0562a783609e9f7d945b6e29560ef8ad3fb433145fe084a5d plugin-sdk-api-baseline.jsonl
597577966dfee329740d7b0a331263afc26db518fe778f0fad95e2a01da88d83 plugin-sdk-api-baseline.json
65fb1cad5e5ec1764e3ccfcfd3fbb2e5cfb938ad34b45e6416bba0c00a1d735a plugin-sdk-api-baseline.jsonl

View File

@@ -20,6 +20,7 @@ function createJsonResponse(body: unknown, init?: { status?: number }) {
describe("loginOpenAICodexDeviceCode", () => {
it("requests a device code, polls for authorization, and exchanges OAuth tokens", async () => {
vi.useFakeTimers();
vi.stubEnv("OPENCLAW_VERSION", "2026.3.22");
try {
const fetchMock = vi
.fn()
@@ -78,6 +79,38 @@ describe("loginOpenAICodexDeviceCode", () => {
"https://auth.openai.com/api/accounts/deviceauth/usercode",
expect.objectContaining({
method: "POST",
headers: {
"Content-Type": "application/json",
originator: "openclaw",
version: "2026.3.22",
"User-Agent": "openclaw/2026.3.22",
},
}),
);
expect(fetchMock).toHaveBeenNthCalledWith(
2,
"https://auth.openai.com/api/accounts/deviceauth/token",
expect.objectContaining({
method: "POST",
headers: {
"Content-Type": "application/json",
originator: "openclaw",
version: "2026.3.22",
"User-Agent": "openclaw/2026.3.22",
},
}),
);
expect(fetchMock).toHaveBeenNthCalledWith(
4,
"https://auth.openai.com/oauth/token",
expect.objectContaining({
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
originator: "openclaw",
version: "2026.3.22",
"User-Agent": "openclaw/2026.3.22",
},
}),
);
expect(onVerification).toHaveBeenCalledWith({
@@ -96,6 +129,7 @@ describe("loginOpenAICodexDeviceCode", () => {
expect(credentials.expires).toBeGreaterThan(Date.now());
} finally {
vi.useRealTimers();
vi.unstubAllEnvs();
}
});

View File

@@ -8,6 +8,16 @@ const OPENAI_CODEX_DEVICE_CODE_DEFAULT_INTERVAL_MS = 5_000;
const OPENAI_CODEX_DEVICE_CODE_MIN_INTERVAL_MS = 1_000;
const OPENAI_CODEX_DEVICE_CALLBACK_URL = `${OPENAI_AUTH_BASE_URL}/deviceauth/callback`;
function resolveOpenAICodexDeviceCodeHeaders(contentType: string): Record<string, string> {
const version = process.env.OPENCLAW_VERSION?.trim();
return {
"Content-Type": contentType,
originator: "openclaw",
...(version ? { version } : {}),
"User-Agent": version ? `openclaw/${version}` : "openclaw",
};
}
type OpenAICodexDeviceCodePrompt = {
verificationUrl: string;
userCode: string;
@@ -129,9 +139,7 @@ function formatDeviceCodeError(params: {
async function requestOpenAICodexDeviceCode(fetchFn: typeof fetch): Promise<RequestedDeviceCode> {
const response = await fetchFn(`${OPENAI_AUTH_BASE_URL}/api/accounts/deviceauth/usercode`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
headers: resolveOpenAICodexDeviceCodeHeaders("application/json"),
body: JSON.stringify({
client_id: OPENAI_CODEX_CLIENT_ID,
}),
@@ -180,9 +188,7 @@ async function pollOpenAICodexDeviceCode(params: {
while (Date.now() < deadline) {
const response = await params.fetchFn(`${OPENAI_AUTH_BASE_URL}/api/accounts/deviceauth/token`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
headers: resolveOpenAICodexDeviceCodeHeaders("application/json"),
body: JSON.stringify({
device_auth_id: params.deviceAuthId,
user_code: params.userCode,
@@ -229,9 +235,7 @@ async function exchangeOpenAICodexDeviceCode(params: {
}): Promise<OpenAICodexDeviceCodeCredentials> {
const response = await params.fetchFn(`${OPENAI_AUTH_BASE_URL}/oauth/token`, {
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
headers: resolveOpenAICodexDeviceCodeHeaders("application/x-www-form-urlencoded"),
body: new URLSearchParams({
grant_type: "authorization_code",
code: params.authorizationCode,

View File

@@ -1,3 +1,4 @@
import { resolveProviderRequestHeaders } from "openclaw/plugin-sdk/provider-http";
import {
createRealtimeTranscriptionWebSocketSession,
type RealtimeTranscriptionProviderConfig,
@@ -107,7 +108,16 @@ function createOpenAIRealtimeTranscriptionSession(
providerId: "openai",
callbacks: config,
url: OPENAI_REALTIME_TRANSCRIPTION_URL,
headers: {
headers: resolveProviderRequestHeaders({
provider: "openai",
baseUrl: OPENAI_REALTIME_TRANSCRIPTION_URL,
capability: "audio",
transport: "websocket",
defaultHeaders: {
Authorization: `Bearer ${config.apiKey}`,
"OpenAI-Beta": "realtime=v1",
},
}) ?? {
Authorization: `Bearer ${config.apiKey}`,
"OpenAI-Beta": "realtime=v1",
},

View File

@@ -1,8 +1,8 @@
import { REALTIME_VOICE_AUDIO_FORMAT_PCM16_24KHZ } from "openclaw/plugin-sdk/realtime-voice";
import { beforeEach, describe, expect, it, vi } from "vitest";
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { buildOpenAIRealtimeVoiceProvider } from "./realtime-voice-provider.js";
const { FakeWebSocket } = vi.hoisted(() => {
const { FakeWebSocket, fetchWithSsrFGuardMock } = vi.hoisted(() => {
type Listener = (...args: unknown[]) => void;
class MockWebSocket {
@@ -15,8 +15,10 @@ const { FakeWebSocket } = vi.hoisted(() => {
sent: string[] = [];
closed = false;
terminated = false;
args: unknown[];
constructor() {
constructor(...args: unknown[]) {
this.args = args;
MockWebSocket.instances.push(this);
}
@@ -49,13 +51,17 @@ const { FakeWebSocket } = vi.hoisted(() => {
}
}
return { FakeWebSocket: MockWebSocket };
return { FakeWebSocket: MockWebSocket, fetchWithSsrFGuardMock: vi.fn() };
});
vi.mock("ws", () => ({
default: FakeWebSocket,
}));
vi.mock("openclaw/plugin-sdk/ssrf-runtime", () => ({
fetchWithSsrFGuard: fetchWithSsrFGuardMock,
}));
type FakeWebSocketInstance = InstanceType<typeof FakeWebSocket>;
type SentRealtimeEvent = {
type: string;
@@ -70,9 +76,93 @@ function parseSent(socket: FakeWebSocketInstance): SentRealtimeEvent[] {
return socket.sent.map((payload: string) => JSON.parse(payload) as SentRealtimeEvent);
}
function createJsonResponse(body: unknown, init?: { status?: number }): Response {
return new Response(JSON.stringify(body), {
status: init?.status ?? 200,
headers: {
"Content-Type": "application/json",
},
});
}
describe("buildOpenAIRealtimeVoiceProvider", () => {
beforeEach(() => {
FakeWebSocket.instances = [];
fetchWithSsrFGuardMock.mockReset();
});
afterEach(() => {
vi.unstubAllEnvs();
});
it("adds OpenClaw attribution headers to native realtime websocket requests", () => {
vi.stubEnv("OPENCLAW_VERSION", "2026.3.22");
const provider = buildOpenAIRealtimeVoiceProvider();
const bridge = provider.createBridge({
providerConfig: { apiKey: "sk-test" }, // pragma: allowlist secret
onAudio: vi.fn(),
onClearAudio: vi.fn(),
});
void bridge.connect();
bridge.close();
const socket = FakeWebSocket.instances[0];
const options = socket?.args[1] as { headers?: Record<string, string> } | undefined;
expect(options?.headers).toMatchObject({
originator: "openclaw",
version: "2026.3.22",
"User-Agent": "openclaw/2026.3.22",
});
});
it("returns browser-safe OpenClaw attribution headers for native WebRTC offers", async () => {
vi.stubEnv("OPENCLAW_VERSION", "2026.3.22");
fetchWithSsrFGuardMock.mockResolvedValueOnce({
response: createJsonResponse({
client_secret: { value: "client-secret-123" },
expires_at: 1_765_000_000,
}),
release: vi.fn(async () => undefined),
});
const provider = buildOpenAIRealtimeVoiceProvider();
if (!provider.createBrowserSession) {
throw new Error("expected OpenAI realtime provider to support browser sessions");
}
const session = await provider.createBrowserSession({
providerConfig: { apiKey: "sk-test" }, // pragma: allowlist secret
instructions: "Be concise.",
});
expect(fetchWithSsrFGuardMock).toHaveBeenCalledWith(
expect.objectContaining({
url: "https://api.openai.com/v1/realtime/client_secrets",
init: expect.objectContaining({
method: "POST",
headers: expect.objectContaining({
Authorization: "Bearer sk-test", // pragma: allowlist secret
"Content-Type": "application/json",
originator: "openclaw",
version: "2026.3.22",
"User-Agent": "openclaw/2026.3.22",
}),
}),
}),
);
expect(session).toMatchObject({
provider: "openai",
transport: "webrtc-sdp",
clientSecret: "client-secret-123",
offerUrl: "https://api.openai.com/v1/realtime/calls",
offerHeaders: {
originator: "openclaw",
version: "2026.3.22",
},
});
expect((session as { offerHeaders?: Record<string, string> }).offerHeaders).not.toHaveProperty(
"User-Agent",
);
});
it("normalizes provider-owned voice settings from raw provider config", () => {

View File

@@ -1,5 +1,8 @@
import { randomUUID } from "node:crypto";
import { createProviderHttpError } from "openclaw/plugin-sdk/provider-http";
import {
createProviderHttpError,
resolveProviderRequestHeaders,
} from "openclaw/plugin-sdk/provider-http";
import {
captureWsEvent,
createDebugProxyWebSocketAgent,
@@ -350,11 +353,18 @@ class OpenAIRealtimeVoiceBridge implements RealtimeVoiceBridge {
.replace(/\/$/, "")
.replace(/^http(s?):/, (_, secure: string) => `ws${secure}:`);
const apiVersion = cfg.azureApiVersion ?? "2024-10-01-preview";
const url = `${base}/openai/realtime?api-version=${apiVersion}&deployment=${encodeURIComponent(
cfg.azureDeployment,
)}`;
return {
url: `${base}/openai/realtime?api-version=${apiVersion}&deployment=${encodeURIComponent(
cfg.azureDeployment,
)}`,
headers: { "api-key": cfg.apiKey },
url,
headers: resolveProviderRequestHeaders({
provider: "openai",
baseUrl: url,
capability: "audio",
transport: "websocket",
defaultHeaders: { "api-key": cfg.apiKey },
}) ?? { "api-key": cfg.apiKey },
};
}
@@ -362,19 +372,36 @@ class OpenAIRealtimeVoiceBridge implements RealtimeVoiceBridge {
const base = cfg.azureEndpoint
.replace(/\/$/, "")
.replace(/^http(s?):/, (_, secure: string) => `ws${secure}:`);
const url = `${base}/v1/realtime?model=${encodeURIComponent(
cfg.model ?? OpenAIRealtimeVoiceBridge.DEFAULT_MODEL,
)}`;
return {
url: `${base}/v1/realtime?model=${encodeURIComponent(
cfg.model ?? OpenAIRealtimeVoiceBridge.DEFAULT_MODEL,
)}`,
headers: { Authorization: `Bearer ${cfg.apiKey}` },
url,
headers: resolveProviderRequestHeaders({
provider: "openai",
baseUrl: url,
capability: "audio",
transport: "websocket",
defaultHeaders: { Authorization: `Bearer ${cfg.apiKey}` },
}) ?? { Authorization: `Bearer ${cfg.apiKey}` },
};
}
const url = `wss://api.openai.com/v1/realtime?model=${encodeURIComponent(
cfg.model ?? OpenAIRealtimeVoiceBridge.DEFAULT_MODEL,
)}`;
return {
url: `wss://api.openai.com/v1/realtime?model=${encodeURIComponent(
cfg.model ?? OpenAIRealtimeVoiceBridge.DEFAULT_MODEL,
)}`,
headers: {
url,
headers: resolveProviderRequestHeaders({
provider: "openai",
baseUrl: url,
capability: "audio",
transport: "websocket",
defaultHeaders: {
Authorization: `Bearer ${cfg.apiKey}`,
"OpenAI-Beta": "realtime=v1",
},
}) ?? {
Authorization: `Bearer ${cfg.apiKey}`,
"OpenAI-Beta": "realtime=v1",
},
@@ -602,6 +629,20 @@ function readStringField(value: unknown, key: string): string | undefined {
return typeof raw === "string" && raw.trim() ? raw.trim() : undefined;
}
function resolveOpenAIRealtimeBrowserOfferHeaders(): Record<string, string> | undefined {
const headers = resolveProviderRequestHeaders({
provider: "openai",
baseUrl: "https://api.openai.com/v1/realtime/calls",
capability: "audio",
transport: "http",
defaultHeaders: {},
});
const browserHeaders = Object.fromEntries(
Object.entries(headers ?? {}).filter(([key]) => key.toLowerCase() !== "user-agent"),
);
return Object.keys(browserHeaders).length > 0 ? browserHeaders : undefined;
}
async function createOpenAIRealtimeBrowserSession(
req: RealtimeVoiceBrowserSessionCreateRequest,
): Promise<RealtimeVoiceBrowserSession> {
@@ -633,7 +674,16 @@ async function createOpenAIRealtimeBrowserSession(
url: "https://api.openai.com/v1/realtime/client_secrets",
init: {
method: "POST",
headers: {
headers: resolveProviderRequestHeaders({
provider: "openai",
baseUrl: "https://api.openai.com/v1/realtime/client_secrets",
capability: "audio",
transport: "http",
defaultHeaders: {
Authorization: `Bearer ${apiKey}`,
"Content-Type": "application/json",
},
}) ?? {
Authorization: `Bearer ${apiKey}`,
"Content-Type": "application/json",
},
@@ -663,11 +713,13 @@ async function createOpenAIRealtimeBrowserSession(
payload && typeof payload === "object"
? (payload as Record<string, unknown>).expires_at
: undefined;
const offerHeaders = resolveOpenAIRealtimeBrowserOfferHeaders();
return {
provider: "openai",
transport: "webrtc-sdp",
clientSecret,
offerUrl: "https://api.openai.com/v1/realtime/calls",
...(offerHeaders ? { offerHeaders } : {}),
model,
voice,
...(typeof expiresAt === "number" ? { expiresAt } : {}),

View File

@@ -111,6 +111,36 @@ describe("openai tts", () => {
});
describe("openaiTTS diagnostics", () => {
it("adds OpenClaw attribution headers to native OpenAI speech requests", async () => {
vi.stubEnv("OPENCLAW_VERSION", "2026.3.22");
const fetchMock = vi.fn(
async (_url: string | URL, _init?: RequestInit) =>
new Response(Buffer.from("audio-bytes"), { status: 200 }),
);
globalThis.fetch = fetchMock as unknown as typeof fetch;
await openaiTTS({
text: "hello",
apiKey: "test-key",
baseUrl: "https://api.openai.com/v1",
model: "gpt-4o-mini-tts",
voice: "alloy",
responseFormat: "mp3",
timeoutMs: 5_000,
});
expect(fetchMock).toHaveBeenCalledWith(
"https://api.openai.com/v1/audio/speech",
expect.objectContaining({
headers: expect.objectContaining({
originator: "openclaw",
version: "2026.3.22",
"User-Agent": "openclaw/2026.3.22",
}),
}),
);
});
it("sends instructions to custom OpenAI-compatible endpoints", async () => {
const fetchMock = vi.fn(
async (_url: string | URL, _init?: RequestInit) =>

View File

@@ -1,4 +1,7 @@
import { assertOkOrThrowProviderError } from "openclaw/plugin-sdk/provider-http";
import {
assertOkOrThrowProviderError,
resolveProviderRequestHeaders,
} from "openclaw/plugin-sdk/provider-http";
import {
captureHttpExchange,
isDebugProxyGlobalFetchPatchInstalled,
@@ -97,7 +100,16 @@ export async function openaiTTS(params: {
throw new Error(`Invalid voice: ${voice}`);
}
const requestHeaders = {
const requestHeaders = resolveProviderRequestHeaders({
provider: "openai",
baseUrl,
capability: "audio",
transport: "http",
defaultHeaders: {
Authorization: `Bearer ${apiKey}`,
"Content-Type": "application/json",
},
}) ?? {
Authorization: `Bearer ${apiKey}`,
"Content-Type": "application/json",
};

View File

@@ -0,0 +1,29 @@
import { describe, expect, it, vi } from "vitest";
import { resolveRemoteEmbeddingBearerClient } from "./embeddings-remote-client.js";
describe("resolveRemoteEmbeddingBearerClient", () => {
it("adds OpenClaw attribution to native OpenAI embedding requests", async () => {
vi.stubEnv("OPENCLAW_VERSION", "2026.3.22");
const client = await resolveRemoteEmbeddingBearerClient({
provider: "openai",
defaultBaseUrl: "https://api.openai.com/v1",
options: {
config: { models: {} } as never,
model: "text-embedding-3-large",
remote: {
apiKey: "sk-test",
headers: {
originator: "pi",
"User-Agent": "pi",
},
},
},
});
expect(client.headers).toMatchObject({
originator: "openclaw",
version: "2026.3.22",
"User-Agent": "openclaw/2026.3.22",
});
});
});

View File

@@ -7,6 +7,26 @@ import { normalizeOptionalString } from "./string-utils.js";
export type RemoteEmbeddingProviderId = string;
function resolveOpenClawAttributionHeaders(): Record<string, string> {
const version = typeof process !== "undefined" ? process.env.OPENCLAW_VERSION?.trim() : undefined;
return {
originator: "openclaw",
...(version ? { version } : {}),
"User-Agent": version ? `openclaw/${version}` : "openclaw",
};
}
function isNativeOpenAIEmbeddingRoute(provider: string, baseUrl: string): boolean {
if (provider !== "openai") {
return false;
}
try {
return new URL(baseUrl).hostname.toLowerCase().replace(/\.+$/, "") === "api.openai.com";
} catch {
return false;
}
}
export async function resolveRemoteEmbeddingBearerClient(params: {
provider: RemoteEmbeddingProviderId;
options: EmbeddingProviderOptions;
@@ -37,5 +57,8 @@ export async function resolveRemoteEmbeddingBearerClient(params: {
Authorization: `Bearer ${apiKey}`,
...headerOverrides,
};
if (isNativeOpenAIEmbeddingRoute(params.provider, baseUrl)) {
Object.assign(headers, resolveOpenClawAttributionHeaders());
}
return { baseUrl, headers, ssrfPolicy: buildRemoteBaseUrlPolicy(baseUrl) };
}

View File

@@ -895,6 +895,8 @@ describe("markAuthProfileFailure — WHAM-aware Codex cooldowns", () => {
headers: expect.objectContaining({
Authorization: "Bearer codex-access-token",
"ChatGPT-Account-Id": "acct_test_123",
originator: "openclaw",
"User-Agent": expect.stringMatching(/^openclaw\//),
}),
}),
);

View File

@@ -1,5 +1,6 @@
import type { OpenClawConfig } from "../../config/types.openclaw.js";
import { normalizeProviderId } from "../provider-id.js";
import { resolveProviderRequestHeaders } from "../provider-request-config.js";
import { logAuthProfileFailureStateChange } from "./state-observation.js";
import { saveAuthProfileStore, updateAuthProfileStoreWithLock } from "./store.js";
import type { AuthProfileFailureReason, AuthProfileStore, ProfileUsageStats } from "./types.js";
@@ -161,14 +162,21 @@ export async function probeWhamForCooldown(
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), WHAM_TIMEOUT_MS);
try {
const headers: Record<string, string> = {
const defaultHeaders: Record<string, string> = {
Authorization: `Bearer ${profile.access}`,
Accept: "application/json",
"User-Agent": "CodexBar",
};
if (profile.accountId) {
headers["ChatGPT-Account-Id"] = profile.accountId;
defaultHeaders["ChatGPT-Account-Id"] = profile.accountId;
}
const headers =
resolveProviderRequestHeaders({
provider: "openai-codex",
baseUrl: WHAM_USAGE_URL,
capability: "other",
transport: "http",
defaultHeaders,
}) ?? defaultHeaders;
const res = await fetch(WHAM_USAGE_URL, {
method: "GET",

View File

@@ -1,6 +1,6 @@
import { createServer } from "node:http";
import type { Model } from "@mariozechner/pi-ai";
import { describe, expect, it } from "vitest";
import { describe, expect, it, vi } from "vitest";
import {
buildOpenAIResponsesParams,
buildOpenAICompletionsParams,
@@ -21,6 +21,72 @@ import {
import { SYSTEM_PROMPT_CACHE_BOUNDARY } from "./system-prompt-cache-boundary.js";
describe("openai transport stream", () => {
it("adds OpenClaw attribution to native OpenAI transport headers and protects it from pi", () => {
vi.stubEnv("OPENCLAW_VERSION", "2026.3.22");
const headers = __testing.buildOpenAIClientHeaders(
{
id: "gpt-5.4",
name: "GPT-5.4",
api: "openai-responses",
provider: "openai",
baseUrl: "https://api.openai.com/v1",
headers: {
originator: "pi",
"User-Agent": "pi",
"X-Provider": "model",
},
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
} satisfies Model<"openai-responses">,
{ systemPrompt: "", messages: [] } as never,
{
originator: "pi",
"User-Agent": "pi",
"X-Caller": "request",
},
);
expect(headers).toMatchObject({
originator: "openclaw",
version: "2026.3.22",
"User-Agent": "openclaw/2026.3.22",
"X-Provider": "model",
"X-Caller": "request",
});
});
it("adds OpenClaw attribution to native OpenAI Codex transport headers", () => {
vi.stubEnv("OPENCLAW_VERSION", "2026.3.22");
const headers = __testing.buildOpenAIClientHeaders(
{
id: "gpt-5.4-codex",
name: "GPT-5.4 Codex",
api: "openai-codex-responses",
provider: "openai-codex",
baseUrl: "https://chatgpt.com/backend-api",
headers: {
originator: "pi",
"User-Agent": "pi",
},
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
} satisfies Model<"openai-codex-responses">,
{ systemPrompt: "", messages: [] } as never,
);
expect(headers).toMatchObject({
originator: "openclaw",
version: "2026.3.22",
"User-Agent": "openclaw/2026.3.22",
});
});
it("moves Azure OpenAI completions api-version headers into default query params", () => {
const config = __testing.buildOpenAICompletionsClientConfig(
{

View File

@@ -43,6 +43,7 @@ import {
resolveOpenAIStrictToolFlagForInventory,
resolveOpenAIStrictToolSetting,
} from "./openai-tool-schema.js";
import { resolveProviderRequestPolicyConfig } from "./provider-request-config.js";
import {
buildGuardedModelFetch,
resolveModelRequestTimeoutMs,
@@ -630,23 +631,28 @@ function buildOpenAIClientHeaders(
optionHeaders?: Record<string, string>,
turnHeaders?: Record<string, string>,
): Record<string, string> {
const headers = { ...model.headers };
const providerHeaders = { ...model.headers };
if (model.provider === "github-copilot") {
Object.assign(
headers,
providerHeaders,
buildCopilotDynamicHeaders({
messages: context.messages,
hasImages: hasCopilotVisionInput(context.messages),
}),
);
}
if (optionHeaders) {
Object.assign(headers, optionHeaders);
}
if (turnHeaders) {
Object.assign(headers, turnHeaders);
}
return headers;
const callerHeaders = { ...optionHeaders, ...turnHeaders };
const headers = resolveProviderRequestPolicyConfig({
provider: model.provider,
api: model.api,
baseUrl: model.baseUrl,
capability: "llm",
transport: "stream",
providerHeaders,
callerHeaders: Object.keys(callerHeaders).length > 0 ? callerHeaders : undefined,
precedence: "caller-wins",
}).headers;
return headers ?? {};
}
function resolveProviderTransportTurnState(
@@ -1869,6 +1875,7 @@ function mapStopReason(reason: string | null) {
}
export const __testing = {
buildOpenAIClientHeaders,
buildOpenAISdkClientOptions,
buildOpenAISdkRequestOptions,
createAzureOpenAIClient,

View File

@@ -2,7 +2,10 @@ import type { StreamFn } from "@mariozechner/pi-agent-core";
import type { Model } from "@mariozechner/pi-ai";
import { createAssistantMessageEventStream } from "@mariozechner/pi-ai";
import { describe, expect, it } from "vitest";
import { createOpenAIThinkingLevelWrapper } from "./openai-stream-wrappers.js";
import {
createOpenAIAttributionHeadersWrapper,
createOpenAIThinkingLevelWrapper,
} from "./openai-stream-wrappers.js";
function createPayloadCapture(opts?: { initialReasoning?: unknown }) {
const payloads: Array<Record<string, unknown>> = [];
@@ -206,3 +209,44 @@ describe("createOpenAIThinkingLevelWrapper", () => {
expect(payloads[0]?.reasoning).toEqual({ effort: "xhigh" });
});
});
describe("createOpenAIAttributionHeadersWrapper", () => {
it("routes native Codex traffic through the OpenClaw transport instead of pi upstream", () => {
let upstreamCalls = 0;
let codexCalls = 0;
let capturedHeaders: Record<string, string> | undefined;
const upstream: StreamFn = () => {
upstreamCalls += 1;
return createAssistantMessageEventStream();
};
const codexTransport: StreamFn = (_model, _context, options) => {
codexCalls += 1;
capturedHeaders = options?.headers;
return createAssistantMessageEventStream();
};
const wrapped = createOpenAIAttributionHeadersWrapper(upstream, {
codexNativeTransportStreamFn: codexTransport,
});
void wrapped(
{
...codexModel,
baseUrl: "https://chatgpt.com/backend-api",
} as Model<"openai-codex-responses">,
{ messages: [] },
{
headers: {
originator: "pi",
"User-Agent": "pi",
},
},
);
expect(upstreamCalls).toBe(0);
expect(codexCalls).toBe(1);
expect(capturedHeaders).toMatchObject({
originator: "openclaw",
"User-Agent": expect.stringMatching(/^openclaw\//),
});
});
});

View File

@@ -15,6 +15,7 @@ import {
resolveOpenAIResponsesPayloadPolicy,
} from "../openai-responses-payload-policy.js";
import { resolveOpenAITextVerbosity, type OpenAITextVerbosity } from "../openai-text-verbosity.js";
import { createOpenAIResponsesTransportStreamFn } from "../openai-transport-stream.js";
import { resolveProviderRequestPolicyConfig } from "../provider-request-config.js";
import { log } from "./logger.js";
import { mapThinkingLevelToReasoningEffort } from "./reasoning-effort-utils.js";
@@ -489,6 +490,7 @@ export function createOpenAIDefaultTransportWrapper(baseStreamFn: StreamFn | und
export function createOpenAIAttributionHeadersWrapper(
baseStreamFn: StreamFn | undefined,
opts?: { codexNativeTransportStreamFn?: StreamFn },
): StreamFn {
const underlying = baseStreamFn ?? streamSimple;
return (model, context, options) => {
@@ -496,7 +498,11 @@ export function createOpenAIAttributionHeadersWrapper(
if (!attributionProvider) {
return underlying(model, context, options);
}
return underlying(model, context, {
const streamFn =
attributionProvider === "openai-codex"
? (opts?.codexNativeTransportStreamFn ?? createOpenAIResponsesTransportStreamFn())
: underlying;
return streamFn(model, context, {
...options,
headers: resolveProviderRequestPolicyConfig({
provider: attributionProvider,

View File

@@ -606,7 +606,7 @@ describe("provider attribution", () => {
});
});
it("requires the dedicated OpenAI audio transcription API for audio attribution", () => {
it("applies OpenAI attribution to every verified native capability", () => {
expect(
resolveProviderRequestPolicy({
provider: "openai",
@@ -636,14 +636,25 @@ describe("provider attribution", () => {
expect(
resolveProviderRequestPolicy({
provider: "openai",
api: "not-openai-audio",
baseUrl: "https://api.openai.com/v1",
transport: "media-understanding",
transport: "http",
capability: "image",
}),
).toMatchObject({
attributionProvider: "openai",
allowsHiddenAttribution: true,
});
expect(
resolveProviderRequestPolicy({
provider: "openai",
baseUrl: "https://api.openai.com/v1",
transport: "websocket",
capability: "audio",
}),
).toMatchObject({
attributionProvider: undefined,
allowsHiddenAttribution: false,
attributionProvider: "openai",
allowsHiddenAttribution: true,
});
});

View File

@@ -601,7 +601,6 @@ export function resolveProviderRequestPolicy(
const policy = resolveProviderAttributionPolicy(provider, env);
const endpointResolution = resolveProviderEndpoint(input.baseUrl);
const endpointClass = endpointResolution.endpointClass;
const api = normalizeOptionalLowercaseString(input.api);
const usesConfiguredBaseUrl = endpointClass !== "default";
const usesKnownNativeOpenAIEndpoint =
endpointClass === "openai-public" ||
@@ -614,19 +613,9 @@ export function resolveProviderRequestPolicy(
const usesExplicitProxyLikeEndpoint = usesConfiguredBaseUrl && !usesKnownNativeOpenAIEndpoint;
let attributionProvider: string | undefined;
if (
provider === "openai" &&
(api === "openai-completions" ||
api === "openai-responses" ||
(input.capability === "audio" && api === "openai-audio-transcriptions")) &&
usesOpenAIPublicAttributionHost
) {
if (provider === "openai" && usesOpenAIPublicAttributionHost) {
attributionProvider = "openai";
} else if (
provider === "openai-codex" &&
(api === "openai-codex-responses" || api === "openai-responses") &&
usesOpenAICodexAttributionHost
) {
} else if (provider === "openai-codex" && usesOpenAICodexAttributionHost) {
attributionProvider = "openai-codex";
} else if (provider === "openrouter" && policy?.enabledByDefault) {
// OpenRouter attribution is documented, but only apply it to known

View File

@@ -102,6 +102,7 @@ const BrowserRealtimeWebRtcSdpSessionSchema = Type.Object(
transport: Type.Optional(Type.Literal("webrtc-sdp")),
clientSecret: NonEmptyString,
offerUrl: Type.Optional(Type.String()),
offerHeaders: Type.Optional(Type.Record(Type.String(), Type.String())),
model: Type.Optional(Type.String()),
voice: Type.Optional(Type.String()),
expiresAt: Type.Optional(Type.Number()),

View File

@@ -27,6 +27,8 @@ describe("fetchCodexUsage", () => {
const mockFetch = createProviderUsageFetch(async (_url, init) => {
const headers = (init?.headers as Record<string, string> | undefined) ?? {};
expect(headers["ChatGPT-Account-Id"]).toBe("acct-1");
expect(headers.originator).toBe("openclaw");
expect(headers["User-Agent"]).toMatch(/^openclaw\//);
return makeResponse(200, {
rate_limit: {
primary_window: {

View File

@@ -1,3 +1,4 @@
import { resolveProviderRequestHeaders } from "../agents/provider-request-config.js";
import { buildUsageHttpErrorSnapshot, fetchJson } from "./provider-usage.fetch.shared.js";
import { clampPercent, PROVIDER_LABELS } from "./provider-usage.shared.js";
import type { ProviderUsageSnapshot, UsageWindow } from "./provider-usage.types.js";
@@ -53,14 +54,21 @@ export async function fetchCodexUsage(
timeoutMs: number,
fetchFn: typeof fetch,
): Promise<ProviderUsageSnapshot> {
const headers: Record<string, string> = {
const defaultHeaders: Record<string, string> = {
Authorization: `Bearer ${token}`,
"User-Agent": "CodexBar",
Accept: "application/json",
};
if (accountId) {
headers["ChatGPT-Account-Id"] = accountId;
defaultHeaders["ChatGPT-Account-Id"] = accountId;
}
const headers =
resolveProviderRequestHeaders({
provider: "openai-codex",
baseUrl: "https://chatgpt.com/backend-api/wham/usage",
capability: "other",
transport: "http",
defaultHeaders,
}) ?? defaultHeaders;
const res = await fetchJson(
"https://chatgpt.com/backend-api/wham/usage",

View File

@@ -48,6 +48,7 @@ export type {
ProviderRequestTlsOverride,
ProviderRequestTransportOverrides,
} from "../agents/provider-request-config.js";
export { resolveProviderRequestHeaders } from "../agents/provider-request-config.js";
export {
resolveProviderEndpoint,
resolveProviderRequestCapabilities,

View File

@@ -102,6 +102,7 @@ export type RealtimeVoiceBrowserWebRtcSdpSession = {
transport?: "webrtc-sdp";
clientSecret: string;
offerUrl?: string;
offerHeaders?: Record<string, string>;
model?: string;
voice?: string;
expiresAt?: number;

View File

@@ -24,6 +24,7 @@ export type RealtimeTalkWebRtcSdpSessionResult = {
transport?: "webrtc-sdp";
clientSecret: string;
offerUrl?: string;
offerHeaders?: Record<string, string>;
model?: string;
voice?: string;
expiresAt?: number;

View File

@@ -72,6 +72,7 @@ export class WebRtcSdpRealtimeTalkTransport implements RealtimeTalkTransport {
method: "POST",
body: offer.sdp,
headers: {
...this.session.offerHeaders,
Authorization: `Bearer ${this.session.clientSecret}`,
"Content-Type": "application/sdp",
},

View File

@@ -0,0 +1,96 @@
// @vitest-environment jsdom
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
import { WebRtcSdpRealtimeTalkTransport } from "./chat/realtime-talk-webrtc.ts";
class FakeDataChannel extends EventTarget {
readyState: RTCDataChannelState = "open";
send = vi.fn();
close = vi.fn(() => {
this.readyState = "closed";
});
}
class FakePeerConnection extends EventTarget {
connectionState: RTCPeerConnectionState = "new";
readonly channel = new FakeDataChannel();
readonly addTrack = vi.fn();
localDescription: RTCSessionDescriptionInit | null = null;
remoteDescription: RTCSessionDescriptionInit | null = null;
createDataChannel(): RTCDataChannel {
return this.channel as unknown as RTCDataChannel;
}
async createOffer(): Promise<RTCSessionDescriptionInit> {
return { type: "offer", sdp: "offer-sdp" };
}
async setLocalDescription(description: RTCSessionDescriptionInit): Promise<void> {
this.localDescription = description;
}
async setRemoteDescription(description: RTCSessionDescriptionInit): Promise<void> {
this.remoteDescription = description;
}
close(): void {
this.connectionState = "closed";
}
}
describe("WebRtcSdpRealtimeTalkTransport", () => {
afterEach(() => {
vi.unstubAllGlobals();
});
beforeEach(() => {
const track = { stop: vi.fn() } as unknown as MediaStreamTrack;
const stream = {
getAudioTracks: () => [track],
getTracks: () => [track],
} as unknown as MediaStream;
Object.defineProperty(globalThis.navigator, "mediaDevices", {
configurable: true,
value: {
getUserMedia: vi.fn(async () => stream),
},
});
vi.stubGlobal("RTCPeerConnection", FakePeerConnection as unknown as typeof RTCPeerConnection);
});
it("sends provider offer headers with the WebRTC SDP request", async () => {
const fetchMock = vi.fn(async () => new Response("answer-sdp"));
vi.stubGlobal("fetch", fetchMock as unknown as typeof fetch);
const transport = new WebRtcSdpRealtimeTalkTransport(
{
provider: "openai",
transport: "webrtc-sdp",
clientSecret: "client-secret-123",
offerUrl: "https://api.openai.com/v1/realtime/calls",
offerHeaders: {
originator: "openclaw",
version: "2026.3.22",
},
},
{
client: {} as never,
sessionKey: "main",
callbacks: {},
},
);
await transport.start();
expect(fetchMock).toHaveBeenCalledWith("https://api.openai.com/v1/realtime/calls", {
method: "POST",
body: "offer-sdp",
headers: {
originator: "openclaw",
version: "2026.3.22",
Authorization: "Bearer client-secret-123",
"Content-Type": "application/sdp",
},
});
transport.stop();
});
});