fix: preserve Azure OpenAI completions api version

This commit is contained in:
Peter Steinberger
2026-04-22 22:35:25 +01:00
parent cb16d22780
commit 6ab3751287
2 changed files with 129 additions and 2 deletions

View File

@@ -19,6 +19,74 @@ import {
import { SYSTEM_PROMPT_CACHE_BOUNDARY } from "./system-prompt-cache-boundary.js";
describe("openai transport stream", () => {
it("moves Azure OpenAI completions api-version headers into default query params", () => {
const config = __testing.buildOpenAICompletionsClientConfig(
{
id: "gpt-4o-mini",
name: "GPT-4o Mini",
api: "openai-completions",
provider: "azure-custom",
baseUrl: "https://example.openai.azure.com/openai/deployments/gpt-4o-mini?existing=1",
headers: {
"api-key": "azure-key",
"api-version": "2024-10-21",
"X-Tenant": "acme",
},
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 128000,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
{ systemPrompt: "", messages: [] } as never,
);
expect(config).toEqual({
baseURL: "https://example.openai.azure.com/openai/deployments/gpt-4o-mini",
defaultHeaders: {
"api-key": "azure-key",
"X-Tenant": "acme",
},
defaultQuery: {
existing: "1",
"api-version": "2024-10-21",
},
});
});
it("preserves configured base URL query params without moving non-Azure headers", () => {
const config = __testing.buildOpenAICompletionsClientConfig(
{
id: "proxy-model",
name: "Proxy Model",
api: "openai-completions",
provider: "custom-proxy",
baseUrl: "https://proxy.example.com/v1?tenant=acme",
headers: {
"api-version": "proxy-header",
"X-Tenant": "acme",
},
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 128000,
maxTokens: 4096,
} satisfies Model<"openai-completions">,
{ systemPrompt: "", messages: [] } as never,
);
expect(config).toEqual({
baseURL: "https://proxy.example.com/v1",
defaultHeaders: {
"api-version": "proxy-header",
"X-Tenant": "acme",
},
defaultQuery: {
tenant: "acme",
},
});
});
it("reports the supported transport-aware APIs", () => {
expect(isTransportAwareApiSupported("openai-responses")).toBe(true);
expect(isTransportAwareApiSupported("openai-codex-responses")).toBe(true);

View File

@@ -963,15 +963,73 @@ function createOpenAICompletionsClient(
apiKey: string,
optionHeaders?: Record<string, string>,
) {
const clientConfig = buildOpenAICompletionsClientConfig(model, context, optionHeaders);
return new OpenAI({
apiKey,
baseURL: model.baseUrl,
baseURL: clientConfig.baseURL,
dangerouslyAllowBrowser: true,
defaultHeaders: buildOpenAIClientHeaders(model, context, optionHeaders),
defaultHeaders: clientConfig.defaultHeaders,
defaultQuery: clientConfig.defaultQuery,
fetch: buildGuardedModelFetch(model),
});
}
function isAzureOpenAICompatibleHost(hostname: string): boolean {
return (
hostname.endsWith(".openai.azure.com") ||
hostname.endsWith(".services.ai.azure.com") ||
hostname.endsWith(".cognitiveservices.azure.com")
);
}
function buildOpenAICompletionsClientConfig(
model: Model<Api>,
context: Context,
optionHeaders?: Record<string, string>,
): {
baseURL: string;
defaultHeaders: Record<string, string>;
defaultQuery?: Record<string, string>;
} {
const headers = buildOpenAIClientHeaders(model, context, optionHeaders);
const defaultQuery: Record<string, string> = {};
let baseURL = model.baseUrl;
let isAzureHost = false;
try {
const parsed = new URL(model.baseUrl);
isAzureHost = isAzureOpenAICompatibleHost(parsed.hostname.toLowerCase());
parsed.searchParams.forEach((value, key) => {
if (value) {
defaultQuery[key] = value;
}
});
parsed.search = "";
baseURL = parsed.toString().replace(/\/$/, "");
} catch {
// Keep the configured base URL unchanged; the OpenAI SDK will surface invalid URLs.
}
if (isAzureHost) {
const apiVersionHeader = Object.keys(headers).find(
(key) => key.toLowerCase() === "api-version",
);
if (apiVersionHeader) {
const apiVersion = headers[apiVersionHeader]?.trim();
delete headers[apiVersionHeader];
if (apiVersion && !defaultQuery["api-version"]) {
defaultQuery["api-version"] = apiVersion;
}
}
}
return {
baseURL,
defaultHeaders: headers,
defaultQuery: Object.keys(defaultQuery).length > 0 ? defaultQuery : undefined,
};
}
export function createOpenAICompletionsTransportStreamFn(): StreamFn {
return (model, context, options) => {
const eventStream = createAssistantMessageEventStream();
@@ -1577,5 +1635,6 @@ function mapStopReason(reason: string | null) {
}
export const __testing = {
buildOpenAICompletionsClientConfig,
processOpenAICompletionsStream,
};