From dd42154e45227955c30e2d4e92f9d548d645ef87 Mon Sep 17 00:00:00 2001 From: Vincent Koc Date: Sat, 4 Apr 2026 02:14:01 +0900 Subject: [PATCH] fix(providers): stop forcing reasoning effort on proxy completions --- src/agents/openai-completions-compat.ts | 6 ++++++ src/agents/openai-transport-stream.test.ts | 6 +++++- src/agents/openai-transport-stream.ts | 3 +-- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/src/agents/openai-completions-compat.ts b/src/agents/openai-completions-compat.ts index 903dba6d70d..4500ad3eba1 100644 --- a/src/agents/openai-completions-compat.ts +++ b/src/agents/openai-completions-compat.ts @@ -10,6 +10,7 @@ type OpenAICompletionsCompatDefaultsInput = { export type OpenAICompletionsCompatDefaults = { supportsStore: boolean; supportsDeveloperRole: boolean; + supportsReasoningEffort: boolean; supportsUsageInStreaming: boolean; supportsStrictMode: boolean; }; @@ -48,6 +49,11 @@ export function resolveOpenAICompletionsCompatDefaults( supportsStore: !isNonStandard && knownProviderFamily !== "mistral" && !usesExplicitProxyLikeEndpoint, supportsDeveloperRole: !isNonStandard && !isMoonshotLike && !usesConfiguredNonOpenAIEndpoint, + supportsReasoningEffort: + !isZai && + knownProviderFamily !== "mistral" && + endpointClass !== "xai-native" && + !usesExplicitProxyLikeEndpoint, supportsUsageInStreaming: !isNonStandard && !usesConfiguredNonOpenAIEndpoint, supportsStrictMode: !isZai && !usesConfiguredNonOpenAIEndpoint, }; diff --git a/src/agents/openai-transport-stream.test.ts b/src/agents/openai-transport-stream.test.ts index de089a4c98c..a556752e772 100644 --- a/src/agents/openai-transport-stream.test.ts +++ b/src/agents/openai-transport-stream.test.ts @@ -406,15 +406,19 @@ describe("openai transport stream", () => { }, ], } as never, - undefined, + { + reasoningEffort: "high", + } as never, ) as { messages?: Array<{ role?: string }>; + reasoning_effort?: unknown; stream_options?: unknown; store?: unknown; tools?: Array<{ function?: { strict?: boolean } }>; }; expect(params.messages?.[0]).toMatchObject({ role: "system" }); + expect(params).not.toHaveProperty("reasoning_effort"); expect(params).not.toHaveProperty("stream_options"); expect(params).not.toHaveProperty("store"); expect(params.tools?.[0]?.function).not.toHaveProperty("strict"); diff --git a/src/agents/openai-transport-stream.ts b/src/agents/openai-transport-stream.ts index b8fe5cc5a1d..666bbf8f827 100644 --- a/src/agents/openai-transport-stream.ts +++ b/src/agents/openai-transport-stream.ts @@ -1351,7 +1351,6 @@ function detectCompat(model: OpenAIModeModel) { const isZai = endpointClass === "zai-native" || (isDefaultRoute && provider === "zai"); const useMaxTokens = endpointClass === "chutes-native" || (isDefaultRoute && provider === "chutes") || isMistral; - const isGrok = endpointClass === "xai-native" || (isDefaultRoute && provider === "xai"); const isGroq = endpointClass === "groq-native" || (isDefaultRoute && provider === "groq"); const reasoningEffortMap: Record = isGroq && model.id === "qwen/qwen3-32b" @@ -1366,7 +1365,7 @@ function detectCompat(model: OpenAIModeModel) { return { supportsStore: compatDefaults.supportsStore, supportsDeveloperRole: compatDefaults.supportsDeveloperRole, - supportsReasoningEffort: !isGrok && !isMistral && !isZai, + supportsReasoningEffort: compatDefaults.supportsReasoningEffort, reasoningEffortMap, supportsUsageInStreaming: compatDefaults.supportsUsageInStreaming, maxTokensField: useMaxTokens ? "max_tokens" : "max_completion_tokens",