fix: omit xAI reasoning efforts

This commit is contained in:
Peter Steinberger
2026-05-06 05:13:04 +01:00
parent d221d7b6a9
commit ea26a9dba0
5 changed files with 74 additions and 3 deletions

View File

@@ -102,6 +102,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Providers/xAI: stop sending OpenAI-style reasoning effort controls to native Grok Responses models, so `xai/grok-4.3` no longer fails live Docker/Gateway runs with `Invalid reasoning effort`.
- Discord/gateway: measure heartbeat ACK timeouts from the actual heartbeat send, preventing late initial heartbeats from triggering false reconnect loops while the channel is still awaiting readiness. Fixes #77668. (#78087) Thanks @bryce-d-greybeard and @NikolaFC.
- Discord/guilds: route plain text control commands such as `/steer` through the normal authorization and mention gate instead of silently dropping them before an agent session can see them. Fixes #78080. Thanks @ramitrkar-hash.
- Control UI/Sessions: make the compaction count a compact `N Checkpoint(s)` disclosure and show expanded session-level details with modern checkpoint history cards across responsive table layouts. Thanks @BunsDev.

View File

@@ -221,7 +221,14 @@ describe("xai provider plugin", () => {
model: createProviderModel({ id: "grok-4-1-fast" }),
} as never),
).toMatchObject({
thinkingLevelMap: { off: null },
thinkingLevelMap: {
off: null,
minimal: null,
low: null,
medium: null,
high: null,
xhigh: null,
},
compat: {
toolSchemaProfile: "xai",
nativeWebSearchTool: true,

View File

@@ -7,13 +7,22 @@ type XaiRuntimeModelCompat = {
>;
};
const XAI_UNSUPPORTED_REASONING_EFFORTS = {
off: null,
minimal: null,
low: null,
medium: null,
high: null,
xhigh: null,
} satisfies NonNullable<XaiRuntimeModelCompat["thinkingLevelMap"]>;
export function applyXaiRuntimeModelCompat<T extends XaiRuntimeModelCompat>(model: T): T {
const withCompat = applyXaiModelCompat(model);
return {
...withCompat,
thinkingLevelMap: {
...withCompat.thinkingLevelMap,
off: null,
...XAI_UNSUPPORTED_REASONING_EFFORTS,
},
};
}

View File

@@ -1,6 +1,8 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import type { Api, Context, Model } from "@mariozechner/pi-ai";
import { streamSimpleOpenAIResponses } from "@mariozechner/pi-ai/openai-responses";
import { describe, expect, it } from "vitest";
import { applyXaiRuntimeModelCompat } from "./runtime-model-compat.js";
import {
createXaiFastModeWrapper,
createXaiToolPayloadCompatibilityWrapper,
@@ -65,6 +67,44 @@ function runXaiToolPayloadWrapper(params: {
);
}
async function captureXaiResponsesPayloadWithThinking(): Promise<Record<string, unknown>> {
const model = applyXaiRuntimeModelCompat({
api: "openai-responses",
provider: "xai",
id: "grok-4.3",
baseUrl: "https://api.x.ai/v1",
reasoning: true,
input: ["text", "image"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_000_000,
maxTokens: 64_000,
} as Model<"openai-responses">);
const payloadPromise = new Promise<Record<string, unknown>>((resolve, reject) => {
const timeout = setTimeout(
() => reject(new Error("provider payload callback was not invoked")),
1_000,
);
const stream = streamSimpleOpenAIResponses(
model,
{ messages: [{ role: "user", content: "hello", timestamp: 0 }] },
{
apiKey: "test-api-key",
cacheRetention: "none",
reasoning: "low",
onPayload: (payload) => {
clearTimeout(timeout);
resolve(structuredClone(payload as Record<string, unknown>));
throw new Error("stop after payload capture");
},
},
);
void stream.result();
});
return await payloadPromise;
}
describe("xai stream wrappers", () => {
it("rewrites supported Grok models to fast variants when fast mode is enabled", () => {
expect(captureWrappedModelId({ modelId: "grok-3", fastMode: true })).toBe("grok-3-fast");
@@ -139,6 +179,13 @@ describe("xai stream wrappers", () => {
expect(payload).not.toHaveProperty("reasoning_effort");
});
it("marks native xAI Responses thinking efforts unsupported before pi-ai builds payloads", async () => {
const payload = await captureXaiResponsesPayloadWithThinking();
expect(payload).not.toHaveProperty("reasoning");
expect(payload).not.toHaveProperty("include");
});
it("moves image-bearing tool results out of function_call_output payloads", () => {
const payload: Record<string, unknown> = {
input: [

View File

@@ -585,7 +585,14 @@ describe("xai provider models", () => {
api: "openai-responses",
baseUrl: "https://api.x.ai/v1",
reasoning: true,
thinkingLevelMap: { off: null },
thinkingLevelMap: {
off: null,
minimal: null,
low: null,
medium: null,
high: null,
xhigh: null,
},
input: ["text", "image"],
contextWindow: 1_000_000,
maxTokens: 64_000,