refactor(providers): centralize request attribution policy (#59433)

* refactor(providers): centralize request attribution policy

* style(providers): normalize request policy formatting

* style(providers): normalize request policy formatting

* style(providers): normalize request policy formatting

* docs(changelog): note provider request policy fix

* fix(providers): tighten request policy gates
This commit is contained in:
Vincent Koc
2026-04-02 14:10:53 +09:00
committed by GitHub
parent 4309dc6d5e
commit 1a037ff6cd
11 changed files with 581 additions and 83 deletions

View File

@@ -17,6 +17,7 @@ Docs: https://docs.openclaw.ai
- Gateway/exec loopback: restore legacy-role fallback for empty paired-device token maps and allow silent local role upgrades so local exec and node clients stop failing with pairing-required errors after `2026.3.31`. (#59092) Thanks @openperf.
- WhatsApp/media: add HTML, XML, and CSS to the MIME map and fall back gracefully for unknown media types instead of dropping the attachment. (#51562) Thanks @bobbyt74.
- WhatsApp/presence: send `unavailable` presence on connect in self-chat mode so personal-phone users stop losing all push notifications while the gateway is running. Thanks @mcaxtr.
- Providers/OpenAI-compatible routing: centralize native-vs-proxy request policy so hidden attribution and related OpenAI-family defaults only apply on verified native endpoints across stream, websocket, and shared audio HTTP paths. Thanks @vincentkoc.
## 2026.4.1-beta.1

View File

@@ -12,6 +12,7 @@ export const DEFAULT_OPENAI_AUDIO_BASE_URL = "https://api.openai.com/v1";
export async function transcribeOpenAiAudio(params: AudioTranscriptionRequest) {
return await transcribeOpenAiCompatibleAudio({
...params,
provider: "openai",
defaultBaseUrl: DEFAULT_OPENAI_AUDIO_BASE_URL,
defaultModel: OPENAI_DEFAULT_AUDIO_TRANSCRIPTION_MODEL,
});

View File

@@ -251,6 +251,28 @@ describe("OpenAIWebSocketManager", () => {
await connectPromise;
});
it("does not add hidden attribution headers on custom websocket endpoints", async () => {
const manager = buildManager({
url: "wss://proxy.example.com/v1/responses",
});
const connectPromise = manager.connect("sk-test-key");
const sock = lastSocket();
expect(sock.options).toMatchObject({
headers: expect.objectContaining({
Authorization: "Bearer sk-test-key",
"OpenAI-Beta": "responses-websocket=v1",
}),
});
const headers = sock.options?.headers as Record<string, string>;
expect(headers.originator).toBeUndefined();
expect(headers.version).toBeUndefined();
expect(headers["User-Agent"]).toBeUndefined();
sock.simulateOpen();
await connectPromise;
});
it("resolves when the connection opens", async () => {
const manager = buildManager();
const connectPromise = manager.connect("sk-test");

View File

@@ -15,7 +15,7 @@
import { EventEmitter } from "node:events";
import WebSocket, { type ClientOptions } from "ws";
import { resolveProviderAttributionHeaders } from "./provider-attribution.js";
import { resolveProviderRequestAttributionHeaders } from "./provider-attribution.js";
// ─────────────────────────────────────────────────────────────────────────────
// WebSocket Event Types (Server → Client)
@@ -259,14 +259,6 @@ const MAX_RETRIES = 5;
/** Backoff delays in ms: 1s, 2s, 4s, 8s, 16s */
const BACKOFF_DELAYS_MS = [1000, 2000, 4000, 8000, 16000] as const;
function isOpenAIPublicWebSocketUrl(url: string): boolean {
try {
return new URL(url).hostname.toLowerCase() === "api.openai.com";
} catch {
return url.toLowerCase().includes("api.openai.com");
}
}
export interface OpenAIWebSocketManagerOptions {
/** Override the default WebSocket URL (useful for testing) */
url?: string;
@@ -414,9 +406,13 @@ export class OpenAIWebSocketManager extends EventEmitter<InternalEvents> {
headers: {
Authorization: `Bearer ${this.apiKey}`,
"OpenAI-Beta": "responses-websocket=v1",
...(isOpenAIPublicWebSocketUrl(this.wsUrl)
? resolveProviderAttributionHeaders("openai")
: undefined),
...resolveProviderRequestAttributionHeaders({
provider: "openai",
api: "openai-responses",
baseUrl: this.wsUrl,
capability: "llm",
transport: "websocket",
}),
},
});

View File

@@ -6,7 +6,10 @@ import {
patchCodexNativeWebSearchPayload,
resolveCodexNativeSearchActivation,
} from "../codex-native-web-search.js";
import { resolveProviderAttributionHeaders } from "../provider-attribution.js";
import {
resolveProviderRequestAttributionHeaders,
resolveProviderRequestPolicy,
} from "../provider-attribution.js";
import { log } from "./logger.js";
import { streamWithPayloadPatch } from "./stream-payload-utils.js";
@@ -22,48 +25,18 @@ const OPENAI_REASONING_COMPAT_PROVIDERS = new Set([
"azure-openai-responses",
]);
function isDirectOpenAIBaseUrl(baseUrl: unknown): boolean {
if (typeof baseUrl !== "string" || !baseUrl.trim()) {
return false;
}
try {
const host = new URL(baseUrl).hostname.toLowerCase();
return (
host === "api.openai.com" || host === "chatgpt.com" || host.endsWith(".openai.azure.com")
);
} catch {
const normalized = baseUrl.toLowerCase();
return (
normalized.includes("api.openai.com") ||
normalized.includes("chatgpt.com") ||
normalized.includes(".openai.azure.com")
);
}
}
function isOpenAIPublicApiBaseUrl(baseUrl: unknown): boolean {
if (typeof baseUrl !== "string" || !baseUrl.trim()) {
return false;
}
try {
return new URL(baseUrl).hostname.toLowerCase() === "api.openai.com";
} catch {
return baseUrl.toLowerCase().includes("api.openai.com");
}
}
function isOpenAICodexBaseUrl(baseUrl: unknown): boolean {
if (typeof baseUrl !== "string" || !baseUrl.trim()) {
return false;
}
try {
return new URL(baseUrl).hostname.toLowerCase() === "chatgpt.com";
} catch {
return baseUrl.toLowerCase().includes("chatgpt.com");
}
function resolveOpenAIRequestPolicy(model: {
api?: unknown;
provider?: unknown;
baseUrl?: unknown;
}) {
return resolveProviderRequestPolicy({
provider: typeof model.provider === "string" ? model.provider : undefined,
api: typeof model.api === "string" ? model.api : undefined,
baseUrl: typeof model.baseUrl === "string" ? model.baseUrl : undefined,
capability: "llm",
transport: "stream",
});
}
function shouldApplyOpenAIAttributionHeaders(model: {
@@ -71,21 +44,10 @@ function shouldApplyOpenAIAttributionHeaders(model: {
provider?: unknown;
baseUrl?: unknown;
}): "openai" | "openai-codex" | undefined {
if (
model.provider === "openai" &&
(model.api === "openai-completions" || model.api === "openai-responses") &&
isOpenAIPublicApiBaseUrl(model.baseUrl)
) {
return "openai";
}
if (
model.provider === "openai-codex" &&
(model.api === "openai-codex-responses" || model.api === "openai-responses") &&
isOpenAICodexBaseUrl(model.baseUrl)
) {
return "openai-codex";
}
return undefined;
const attributionProvider = resolveOpenAIRequestPolicy(model).attributionProvider;
return attributionProvider === "openai" || attributionProvider === "openai-codex"
? attributionProvider
: undefined;
}
function shouldApplyOpenAIServiceTier(model: {
@@ -93,17 +55,18 @@ function shouldApplyOpenAIServiceTier(model: {
provider?: unknown;
baseUrl?: unknown;
}): boolean {
const policy = resolveOpenAIRequestPolicy(model);
if (
model.provider === "openai" &&
model.api === "openai-responses" &&
isOpenAIPublicApiBaseUrl(model.baseUrl)
policy.endpointClass === "openai-public"
) {
return true;
}
if (
model.provider === "openai-codex" &&
(model.api === "openai-codex-responses" || model.api === "openai-responses") &&
isOpenAICodexBaseUrl(model.baseUrl)
policy.endpointClass === "openai-codex"
) {
return true;
}
@@ -128,7 +91,7 @@ function shouldForceResponsesStore(model: {
if (!OPENAI_RESPONSES_PROVIDERS.has(model.provider)) {
return false;
}
return isDirectOpenAIBaseUrl(model.baseUrl);
return resolveOpenAIRequestPolicy(model).usesKnownNativeOpenAIEndpoint;
}
function parsePositiveInteger(value: unknown): number | undefined {
@@ -193,10 +156,12 @@ function shouldStripResponsesPromptCache(model: { api?: unknown; baseUrl?: unkno
}
// Missing baseUrl means pi-ai will use the default OpenAI endpoint, so keep
// prompt cache fields for that direct path.
if (typeof model.baseUrl !== "string" || !model.baseUrl.trim()) {
return false;
}
return !isDirectOpenAIBaseUrl(model.baseUrl);
return resolveProviderRequestPolicy({
baseUrl: typeof model.baseUrl === "string" ? model.baseUrl : undefined,
api: typeof model.api === "string" ? model.api : undefined,
transport: "stream",
capability: "llm",
}).usesExplicitProxyLikeEndpoint;
}
function shouldApplyOpenAIReasoningCompatibility(model: {
@@ -577,7 +542,13 @@ export function createOpenAIAttributionHeadersWrapper(
...options,
headers: {
...options?.headers,
...resolveProviderAttributionHeaders(attributionProvider),
...resolveProviderRequestAttributionHeaders({
provider: attributionProvider,
api: typeof model.api === "string" ? model.api : undefined,
baseUrl: typeof model.baseUrl === "string" ? model.baseUrl : undefined,
capability: "llm",
transport: "stream",
}),
},
});
};

View File

@@ -1,7 +1,7 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import { streamSimple } from "@mariozechner/pi-ai";
import type { ThinkLevel } from "../../auto-reply/thinking.js";
import { resolveProviderAttributionHeaders } from "../provider-attribution.js";
import { resolveProviderRequestAttributionHeaders } from "../provider-attribution.js";
import { streamWithPayloadPatch } from "./stream-payload-utils.js";
const KILOCODE_FEATURE_HEADER = "X-KILOCODE-FEATURE";
const KILOCODE_FEATURE_DEFAULT = "openclaw";
@@ -111,7 +111,13 @@ export function createOpenRouterWrapper(
): StreamFn {
const underlying = baseStreamFn ?? streamSimple;
return (model, context, options) => {
const attributionHeaders = resolveProviderAttributionHeaders("openrouter");
const attributionHeaders = resolveProviderRequestAttributionHeaders({
provider: typeof model.provider === "string" ? model.provider : "openrouter",
api: typeof model.api === "string" ? model.api : undefined,
baseUrl: typeof model.baseUrl === "string" ? model.baseUrl : undefined,
capability: "llm",
transport: "stream",
});
return streamWithPayloadPatch(
underlying,
model,

View File

@@ -4,6 +4,8 @@ import {
resolveProviderAttributionHeaders,
resolveProviderAttributionIdentity,
resolveProviderAttributionPolicy,
resolveProviderRequestAttributionHeaders,
resolveProviderRequestPolicy,
} from "./provider-attribution.js";
describe("provider attribution", () => {
@@ -114,4 +116,204 @@ describe("provider attribution", () => {
["together", false, "vendor-sdk-hook-only", "default-headers"],
]);
});
it("authorizes hidden OpenAI attribution only on verified native hosts", () => {
expect(
resolveProviderRequestPolicy(
{
provider: "openai",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
transport: "stream",
capability: "llm",
},
{ OPENCLAW_VERSION: "2026.3.22" },
),
).toMatchObject({
endpointClass: "openai-public",
attributionProvider: "openai",
allowsHiddenAttribution: true,
usesKnownNativeOpenAIEndpoint: true,
usesVerifiedOpenAIAttributionHost: true,
usesExplicitProxyLikeEndpoint: false,
});
expect(
resolveProviderRequestPolicy(
{
provider: "openai",
api: "openai-responses",
baseUrl: "https://proxy.example.com/v1",
transport: "stream",
capability: "llm",
},
{ OPENCLAW_VERSION: "2026.3.22" },
),
).toMatchObject({
endpointClass: "custom",
attributionProvider: undefined,
allowsHiddenAttribution: false,
usesKnownNativeOpenAIEndpoint: false,
usesVerifiedOpenAIAttributionHost: false,
usesExplicitProxyLikeEndpoint: true,
});
});
it("classifies OpenAI-family default, codex, and Azure routes distinctly", () => {
expect(
resolveProviderRequestPolicy({
provider: "openai",
api: "openai-responses",
transport: "stream",
capability: "llm",
}),
).toMatchObject({
endpointClass: "default",
attributionProvider: undefined,
usesKnownNativeOpenAIRoute: true,
usesExplicitProxyLikeEndpoint: false,
});
expect(
resolveProviderRequestPolicy({
provider: "openai-codex",
api: "openai-responses",
baseUrl: "https://chatgpt.com/backend-api",
transport: "stream",
capability: "llm",
}),
).toMatchObject({
endpointClass: "openai-codex",
attributionProvider: "openai-codex",
allowsHiddenAttribution: true,
});
expect(
resolveProviderRequestPolicy({
provider: "azure-openai",
api: "azure-openai-responses",
baseUrl: "https://tenant.openai.azure.com/openai/v1",
transport: "stream",
capability: "llm",
}),
).toMatchObject({
endpointClass: "azure-openai",
attributionProvider: undefined,
allowsHiddenAttribution: false,
usesKnownNativeOpenAIEndpoint: true,
});
});
it("treats OpenRouter-hosted Responses routes as explicit proxy-like endpoints", () => {
expect(
resolveProviderRequestPolicy({
provider: "openrouter",
api: "openai-responses",
baseUrl: "https://openrouter.ai/api/v1",
transport: "stream",
capability: "llm",
}),
).toMatchObject({
endpointClass: "openrouter",
usesExplicitProxyLikeEndpoint: true,
attributionProvider: "openrouter",
});
});
it("keeps documented OpenRouter attribution centralized while leaving host-gating deferred", () => {
expect(
resolveProviderRequestPolicy({
provider: "openrouter",
api: "openai-responses",
baseUrl: "https://openrouter.ai/api/v1",
transport: "stream",
capability: "llm",
}),
).toMatchObject({
endpointClass: "openrouter",
attributionProvider: "openrouter",
allowsHiddenAttribution: false,
});
expect(
resolveProviderRequestAttributionHeaders({
provider: "openrouter",
baseUrl: "https://proxy.example.com/v1",
transport: "stream",
capability: "llm",
}),
).toEqual({
"HTTP-Referer": "https://openclaw.ai",
"X-OpenRouter-Title": "OpenClaw",
"X-OpenRouter-Categories": "cli-agent",
});
});
it("models other provider families without enabling hidden attribution", () => {
expect(
resolveProviderRequestPolicy({
provider: "google",
baseUrl: "https://generativelanguage.googleapis.com",
transport: "http",
capability: "image",
}),
).toMatchObject({
knownProviderFamily: "google",
attributionProvider: undefined,
allowsHiddenAttribution: false,
});
expect(
resolveProviderRequestPolicy({
provider: "github-copilot",
transport: "http",
capability: "llm",
}),
).toMatchObject({
knownProviderFamily: "github-copilot",
attributionProvider: undefined,
allowsHiddenAttribution: false,
});
});
it("requires the dedicated OpenAI audio transcription API for audio attribution", () => {
expect(
resolveProviderRequestPolicy({
provider: "openai",
api: "openai-audio-transcriptions",
baseUrl: "https://api.openai.com/v1",
transport: "media-understanding",
capability: "audio",
}),
).toMatchObject({
attributionProvider: "openai",
allowsHiddenAttribution: true,
});
expect(
resolveProviderRequestPolicy({
provider: "openai",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
transport: "media-understanding",
capability: "audio",
}),
).toMatchObject({
attributionProvider: "openai",
allowsHiddenAttribution: true,
});
expect(
resolveProviderRequestPolicy({
provider: "openai",
api: "not-openai-audio",
baseUrl: "https://api.openai.com/v1",
transport: "media-understanding",
capability: "audio",
}),
).toMatchObject({
attributionProvider: undefined,
allowsHiddenAttribution: false,
});
});
});

View File

@@ -28,9 +28,148 @@ export type ProviderAttributionPolicy = {
export type ProviderAttributionIdentity = Pick<ProviderAttributionPolicy, "product" | "version">;
export type ProviderRequestTransport = "stream" | "websocket" | "http" | "media-understanding";
export type ProviderRequestCapability = "llm" | "audio" | "image" | "video" | "other";
export type ProviderEndpointClass =
| "default"
| "openai-public"
| "openai-codex"
| "azure-openai"
| "openrouter"
| "local"
| "custom"
| "invalid";
export type ProviderRequestPolicyInput = {
provider?: string | null;
api?: string | null;
baseUrl?: string | null;
transport?: ProviderRequestTransport;
capability?: ProviderRequestCapability;
};
export type ProviderRequestPolicyResolution = {
provider?: string;
policy?: ProviderAttributionPolicy;
endpointClass: ProviderEndpointClass;
usesConfiguredBaseUrl: boolean;
knownProviderFamily: string;
attributionProvider?: string;
attributionHeaders?: Record<string, string>;
allowsHiddenAttribution: boolean;
usesKnownNativeOpenAIEndpoint: boolean;
usesKnownNativeOpenAIRoute: boolean;
usesVerifiedOpenAIAttributionHost: boolean;
usesExplicitProxyLikeEndpoint: boolean;
};
const OPENCLAW_ATTRIBUTION_PRODUCT = "OpenClaw";
const OPENCLAW_ATTRIBUTION_ORIGINATOR = "openclaw";
const LOCAL_ENDPOINT_HOSTS = new Set(["localhost", "127.0.0.1", "::1", "[::1]"]);
function formatOpenClawUserAgent(version: string): string {
return `${OPENCLAW_ATTRIBUTION_ORIGINATOR}/${version}`;
}
function resolveUrlHostname(value: unknown): string | undefined {
if (typeof value !== "string" || !value.trim()) {
return undefined;
}
try {
return new URL(value).hostname.toLowerCase();
} catch {
const normalized = value.trim().toLowerCase();
if (normalized.includes("api.openai.com")) {
return "api.openai.com";
}
if (normalized.includes("chatgpt.com")) {
return "chatgpt.com";
}
if (normalized.includes(".openai.azure.com")) {
const suffixStart = normalized.indexOf(".openai.azure.com");
const prefix = normalized.slice(0, suffixStart).replace(/^https?:\/\//, "");
return `${prefix}.openai.azure.com`;
}
if (normalized.includes("openrouter.ai")) {
return "openrouter.ai";
}
if (
normalized.includes("localhost") ||
normalized.includes("127.0.0.1") ||
normalized.includes("[::1]") ||
normalized.includes("://::1")
) {
return "localhost";
}
return undefined;
}
}
function isLocalEndpointHost(host: string): boolean {
return (
LOCAL_ENDPOINT_HOSTS.has(host) ||
host.endsWith(".localhost") ||
host.endsWith(".local") ||
host.endsWith(".internal")
);
}
function classifyProviderEndpoint(baseUrl: string | null | undefined): ProviderEndpointClass {
if (typeof baseUrl !== "string" || !baseUrl.trim()) {
return "default";
}
const host = resolveUrlHostname(baseUrl);
if (!host) {
return "invalid";
}
if (host === "api.openai.com") {
return "openai-public";
}
if (host === "chatgpt.com") {
return "openai-codex";
}
if (host === "openrouter.ai" || host.endsWith(".openrouter.ai")) {
return "openrouter";
}
if (host.endsWith(".openai.azure.com")) {
return "azure-openai";
}
if (isLocalEndpointHost(host)) {
return "local";
}
return "custom";
}
function resolveKnownProviderFamily(provider: string | undefined): string {
switch (provider) {
case "openai":
case "openai-codex":
case "azure-openai":
case "azure-openai-responses":
return "openai-family";
case "openrouter":
return "openrouter";
case "anthropic":
return "anthropic";
case "google":
return "google";
case "github-copilot":
return "github-copilot";
case "groq":
return "groq";
case "mistral":
return "mistral";
case "together":
return "together";
default:
return provider || "unknown";
}
}
export function resolveProviderAttributionIdentity(
env: RuntimeVersionEnv = process.env as RuntimeVersionEnv,
): ProviderAttributionIdentity {
@@ -75,7 +214,7 @@ function buildOpenAIAttributionPolicy(
headers: {
originator: OPENCLAW_ATTRIBUTION_ORIGINATOR,
version: identity.version,
"User-Agent": `${OPENCLAW_ATTRIBUTION_ORIGINATOR}/${identity.version}`,
"User-Agent": formatOpenClawUserAgent(identity.version),
},
};
}
@@ -95,7 +234,7 @@ function buildOpenAICodexAttributionPolicy(
headers: {
originator: OPENCLAW_ATTRIBUTION_ORIGINATOR,
version: identity.version,
"User-Agent": `${OPENCLAW_ATTRIBUTION_ORIGINATOR}/${identity.version}`,
"User-Agent": formatOpenClawUserAgent(identity.version),
},
};
}
@@ -174,3 +313,75 @@ export function resolveProviderAttributionHeaders(
}
return policy.headers;
}
export function resolveProviderRequestPolicy(
input: ProviderRequestPolicyInput,
env: RuntimeVersionEnv = process.env as RuntimeVersionEnv,
): ProviderRequestPolicyResolution {
const provider = normalizeProviderId(input.provider ?? "");
const policy = resolveProviderAttributionPolicy(provider, env);
const endpointClass = classifyProviderEndpoint(input.baseUrl);
const api = input.api?.trim().toLowerCase();
const usesConfiguredBaseUrl = endpointClass !== "default";
const usesKnownNativeOpenAIEndpoint =
endpointClass === "openai-public" ||
endpointClass === "openai-codex" ||
endpointClass === "azure-openai";
const usesOpenAIPublicAttributionHost = endpointClass === "openai-public";
const usesOpenAICodexAttributionHost = endpointClass === "openai-codex";
const usesVerifiedOpenAIAttributionHost =
usesOpenAIPublicAttributionHost || usesOpenAICodexAttributionHost;
const usesExplicitProxyLikeEndpoint = usesConfiguredBaseUrl && !usesKnownNativeOpenAIEndpoint;
let attributionProvider: string | undefined;
if (
provider === "openai" &&
(api === "openai-completions" ||
api === "openai-responses" ||
(input.capability === "audio" && api === "openai-audio-transcriptions")) &&
usesOpenAIPublicAttributionHost
) {
attributionProvider = "openai";
} else if (
provider === "openai-codex" &&
(api === "openai-codex-responses" || api === "openai-responses") &&
usesOpenAICodexAttributionHost
) {
attributionProvider = "openai-codex";
} else if (provider === "openrouter" && policy?.enabledByDefault) {
// OpenRouter attribution is documented and intentionally remains
// provider-key-gated for this pass, including custom base URLs configured
// under the openrouter provider. The endpoint class is still surfaced so a
// later host-gating decision can reuse the same classifier without changing
// callers again.
attributionProvider = "openrouter";
}
const attributionHeaders = attributionProvider
? resolveProviderAttributionHeaders(attributionProvider, env)
: undefined;
return {
provider: provider || undefined,
policy,
endpointClass,
usesConfiguredBaseUrl,
knownProviderFamily: resolveKnownProviderFamily(provider || undefined),
attributionProvider,
attributionHeaders,
allowsHiddenAttribution:
attributionProvider !== undefined && policy?.verification === "vendor-hidden-api-spec",
usesKnownNativeOpenAIEndpoint,
usesKnownNativeOpenAIRoute:
endpointClass === "default" ? provider === "openai" : usesKnownNativeOpenAIEndpoint,
usesVerifiedOpenAIAttributionHost,
usesExplicitProxyLikeEndpoint,
};
}
export function resolveProviderRequestAttributionHeaders(
input: ProviderRequestPolicyInput,
env: RuntimeVersionEnv = process.env as RuntimeVersionEnv,
): Record<string, string> | undefined {
return resolveProviderRequestPolicy(input, env).attributionHeaders;
}

View File

@@ -0,0 +1,51 @@
import { describe, expect, it } from "vitest";
import {
createRequestCaptureJsonFetch,
installPinnedHostnameTestHooks,
} from "./audio.test-helpers.js";
import { transcribeOpenAiCompatibleAudio } from "./openai-compatible-audio.js";
installPinnedHostnameTestHooks();
describe("transcribeOpenAiCompatibleAudio", () => {
it("adds hidden attribution headers on the native OpenAI audio host", async () => {
const { fetchFn, getRequest } = createRequestCaptureJsonFetch({ text: "ok" });
await transcribeOpenAiCompatibleAudio({
buffer: Buffer.from("audio"),
fileName: "note.mp3",
apiKey: "test-key",
timeoutMs: 1000,
fetchFn,
provider: "openai",
defaultBaseUrl: "https://api.openai.com/v1",
defaultModel: "gpt-4o-mini-transcribe",
});
const headers = new Headers(getRequest().init?.headers);
expect(headers.get("originator")).toBe("openclaw");
expect(headers.get("version")).toBeTruthy();
expect(headers.get("user-agent")).toMatch(/^openclaw\//);
});
it("does not add hidden attribution headers on custom OpenAI-compatible hosts", async () => {
const { fetchFn, getRequest } = createRequestCaptureJsonFetch({ text: "ok" });
await transcribeOpenAiCompatibleAudio({
buffer: Buffer.from("audio"),
fileName: "note.mp3",
apiKey: "test-key",
timeoutMs: 1000,
fetchFn,
provider: "openai",
baseUrl: "https://proxy.example.com/v1",
defaultBaseUrl: "https://api.openai.com/v1",
defaultModel: "gpt-4o-mini-transcribe",
});
const headers = new Headers(getRequest().init?.headers);
expect(headers.get("originator")).toBeNull();
expect(headers.get("version")).toBeNull();
expect(headers.get("user-agent")).toBeNull();
});
});

View File

@@ -1,5 +1,6 @@
import path from "node:path";
import {
applyProviderRequestHeaders,
assertOkOrThrowHttpError,
normalizeBaseUrl,
postTranscriptionRequest,
@@ -10,6 +11,7 @@ import type { AudioTranscriptionRequest, AudioTranscriptionResult } from "./type
type OpenAiCompatibleAudioParams = AudioTranscriptionRequest & {
defaultBaseUrl: string;
defaultModel: string;
provider?: string;
};
function resolveModel(model: string | undefined, fallback: string): string {
@@ -41,7 +43,14 @@ export async function transcribeOpenAiCompatibleAudio(
form.append("prompt", params.prompt.trim());
}
const headers = new Headers(params.headers);
const headers = applyProviderRequestHeaders({
headers: params.headers,
provider: params.provider,
api: "openai-audio-transcriptions",
baseUrl,
capability: "audio",
transport: "media-understanding",
});
if (!headers.has("authorization")) {
headers.set("authorization", `Bearer ${params.apiKey}`);
}

View File

@@ -1,6 +1,7 @@
import type { GuardedFetchResult } from "../infra/net/fetch-guard.js";
import { fetchWithSsrFGuard } from "../infra/net/fetch-guard.js";
import type { LookupFn, SsrFPolicy } from "../infra/net/ssrf.js";
import { resolveProviderRequestAttributionHeaders } from "../agents/provider-attribution.js";
export { fetchWithTimeout } from "../utils/fetch-timeout.js";
const MAX_ERROR_CHARS = 300;
@@ -10,6 +11,33 @@ export function normalizeBaseUrl(baseUrl: string | undefined, fallback: string):
return raw.replace(/\/+$/, "");
}
export function applyProviderRequestHeaders(params: {
headers?: HeadersInit;
provider?: string;
api?: string;
baseUrl?: string;
capability?: "audio" | "image" | "video" | "other";
transport?: "http" | "media-understanding";
}): Headers {
const headers = new Headers(params.headers);
const attributionHeaders = resolveProviderRequestAttributionHeaders({
provider: params.provider,
api: params.api,
baseUrl: params.baseUrl,
capability: params.capability ?? "other",
transport: params.transport ?? "http",
});
if (!attributionHeaders) {
return headers;
}
for (const [key, value] of Object.entries(attributionHeaders)) {
if (!headers.has(key)) {
headers.set(key, value);
}
}
return headers;
}
export async function fetchWithTimeoutGuarded(
url: string,
init: RequestInit,