test(release): tolerate xAI billing drift in live checks

This commit is contained in:
Peter Steinberger
2026-05-02 02:04:00 +01:00
parent a78df4a1a3
commit dd1b9c6481
7 changed files with 355 additions and 276 deletions

View File

@@ -1,3 +1,4 @@
import { isBillingErrorMessage } from "openclaw/plugin-sdk/test-env";
import { describe, expect, it } from "vitest";
import { createXSearchTool } from "./x-search.js";
@@ -42,6 +43,12 @@ describeLive("xai x_search live", () => {
message?: string;
};
const errorMessage = [details.error, details.message].filter(Boolean).join(" ");
if (isBillingErrorMessage(errorMessage)) {
console.warn(`[xai:x-search:live] skip: billing drift: ${errorMessage}`);
return;
}
expect(details.error, details.message).toBeUndefined();
expect(details.provider).toBe("xai");
expect(details.content?.trim().length ?? 0).toBeGreaterThan(0);

View File

@@ -12,6 +12,7 @@ import {
runRealtimeSttLiveTest,
} from "openclaw/plugin-sdk/provider-test-contracts";
import { getRuntimeConfig } from "openclaw/plugin-sdk/runtime-config-snapshot";
import { isBillingErrorMessage } from "openclaw/plugin-sdk/test-env";
import { describe, expect, it } from "vitest";
import plugin from "./index.js";
import { XAI_DEFAULT_STT_MODEL } from "./stt.js";
@@ -71,211 +72,234 @@ const registerXaiPlugin = () =>
name: "xAI Provider",
});
async function runXaiLiveCase(label: string, run: () => Promise<void>): Promise<void> {
try {
await run();
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
if (isBillingErrorMessage(message)) {
console.warn(`[xai:live] skip ${label}: billing drift: ${message}`);
return;
}
throw error;
}
}
describeLive("xai plugin live", () => {
it("synthesizes TTS through the registered speech provider", async () => {
const { speechProviders } = await registerXaiPlugin();
const speechProvider = requireRegisteredProvider(speechProviders, "xai");
const cfg = createLiveConfig();
await runXaiLiveCase("tts", async () => {
const { speechProviders } = await registerXaiPlugin();
const speechProvider = requireRegisteredProvider(speechProviders, "xai");
const cfg = createLiveConfig();
const voices = await speechProvider.listVoices?.({});
expect(voices).toEqual(expect.arrayContaining([expect.objectContaining({ id: "eve" })]));
const voices = await speechProvider.listVoices?.({});
expect(voices).toEqual(expect.arrayContaining([expect.objectContaining({ id: "eve" })]));
const audioFile = await speechProvider.synthesize({
text: "OpenClaw xAI text to speech integration test OK.",
cfg,
providerConfig: {
apiKey: XAI_API_KEY,
baseUrl: "https://api.x.ai/v1",
voiceId: "eve",
},
target: "audio-file",
timeoutMs: 90_000,
const audioFile = await speechProvider.synthesize({
text: "OpenClaw xAI text to speech integration test OK.",
cfg,
providerConfig: {
apiKey: XAI_API_KEY,
baseUrl: "https://api.x.ai/v1",
voiceId: "eve",
},
target: "audio-file",
timeoutMs: 90_000,
});
expect(audioFile.outputFormat).toBe("mp3");
expect(audioFile.fileExtension).toBe(".mp3");
expect(audioFile.voiceCompatible).toBe(false);
expect(audioFile.audioBuffer.byteLength).toBeGreaterThan(512);
const telephony = await speechProvider.synthesizeTelephony?.({
text: "OpenClaw xAI telephony check OK.",
cfg,
providerConfig: {
apiKey: XAI_API_KEY,
baseUrl: "https://api.x.ai/v1",
voiceId: "eve",
},
timeoutMs: 90_000,
});
if (!telephony) {
throw new Error("xAI telephony synthesis did not return audio");
}
expect(telephony.outputFormat).toBe("pcm");
expect(telephony.sampleRate).toBe(24_000);
expect(telephony?.audioBuffer.byteLength).toBeGreaterThan(512);
});
expect(audioFile.outputFormat).toBe("mp3");
expect(audioFile.fileExtension).toBe(".mp3");
expect(audioFile.voiceCompatible).toBe(false);
expect(audioFile.audioBuffer.byteLength).toBeGreaterThan(512);
const telephony = await speechProvider.synthesizeTelephony?.({
text: "OpenClaw xAI telephony check OK.",
cfg,
providerConfig: {
apiKey: XAI_API_KEY,
baseUrl: "https://api.x.ai/v1",
voiceId: "eve",
},
timeoutMs: 90_000,
});
if (!telephony) {
throw new Error("xAI telephony synthesis did not return audio");
}
expect(telephony.outputFormat).toBe("pcm");
expect(telephony.sampleRate).toBe(24_000);
expect(telephony?.audioBuffer.byteLength).toBeGreaterThan(512);
}, 120_000);
it("transcribes audio through the registered media provider", async () => {
const { mediaProviders, speechProviders } = await registerXaiPlugin();
const mediaProvider = requireRegisteredProvider(mediaProviders, "xai");
const speechProvider = requireRegisteredProvider(speechProviders, "xai");
const cfg = createLiveConfig();
const phrase = "OpenClaw xAI speech to text integration test OK.";
await runXaiLiveCase("stt", async () => {
const { mediaProviders, speechProviders } = await registerXaiPlugin();
const mediaProvider = requireRegisteredProvider(mediaProviders, "xai");
const speechProvider = requireRegisteredProvider(speechProviders, "xai");
const cfg = createLiveConfig();
const phrase = "OpenClaw xAI speech to text integration test OK.";
const audioFile = await speechProvider.synthesize({
text: phrase,
cfg,
providerConfig: {
const audioFile = await speechProvider.synthesize({
text: phrase,
cfg,
providerConfig: {
apiKey: XAI_API_KEY,
baseUrl: "https://api.x.ai/v1",
voiceId: "eve",
},
target: "audio-file",
timeoutMs: 90_000,
});
const transcript = await mediaProvider.transcribeAudio?.({
buffer: audioFile.audioBuffer,
fileName: "xai-stt-live.mp3",
mime: "audio/mpeg",
apiKey: XAI_API_KEY,
baseUrl: "https://api.x.ai/v1",
voiceId: "eve",
},
target: "audio-file",
timeoutMs: 90_000,
});
model: XAI_DEFAULT_STT_MODEL,
timeoutMs: 90_000,
});
const transcript = await mediaProvider.transcribeAudio?.({
buffer: audioFile.audioBuffer,
fileName: "xai-stt-live.mp3",
mime: "audio/mpeg",
apiKey: XAI_API_KEY,
baseUrl: "https://api.x.ai/v1",
model: XAI_DEFAULT_STT_MODEL,
timeoutMs: 90_000,
const normalized = transcript?.text.toLowerCase() ?? "";
expect(transcript?.model).toBe(XAI_DEFAULT_STT_MODEL);
expectOpenClawLiveTranscriptMarker(normalized);
expect(normalized).toContain("speech");
expect(normalized).toContain("text");
expect(normalized).toContain("integration");
});
const normalized = transcript?.text.toLowerCase() ?? "";
expect(transcript?.model).toBe(XAI_DEFAULT_STT_MODEL);
expectOpenClawLiveTranscriptMarker(normalized);
expect(normalized).toContain("speech");
expect(normalized).toContain("text");
expect(normalized).toContain("integration");
}, 180_000);
it("opens xAI realtime STT before sending audio", async () => {
const { realtimeTranscriptionProviders } = await registerXaiPlugin();
const realtimeProvider = requireRegisteredProvider(realtimeTranscriptionProviders, "xai");
const errors: Error[] = [];
const session = realtimeProvider.createSession({
providerConfig: {
apiKey: XAI_API_KEY,
baseUrl: "https://api.x.ai/v1",
sampleRate: 16_000,
encoding: "pcm",
interimResults: true,
endpointingMs: 800,
language: "en",
},
onError: (error) => errors.push(error),
});
await runXaiLiveCase("realtime-open", async () => {
const { realtimeTranscriptionProviders } = await registerXaiPlugin();
const realtimeProvider = requireRegisteredProvider(realtimeTranscriptionProviders, "xai");
const errors: Error[] = [];
const session = realtimeProvider.createSession({
providerConfig: {
apiKey: XAI_API_KEY,
baseUrl: "https://api.x.ai/v1",
sampleRate: 16_000,
encoding: "pcm",
interimResults: true,
endpointingMs: 800,
language: "en",
},
onError: (error) => errors.push(error),
});
try {
await session.connect();
expect(errors).toEqual([]);
expect(session.isConnected()).toBe(true);
} finally {
session.close();
}
try {
await session.connect();
expect(errors).toEqual([]);
expect(session.isConnected()).toBe(true);
} finally {
session.close();
}
});
}, 30_000);
it("streams realtime STT through the registered transcription provider", async () => {
const { realtimeTranscriptionProviders, speechProviders } = await registerXaiPlugin();
const realtimeProvider = requireRegisteredProvider(realtimeTranscriptionProviders, "xai");
const speechProvider = requireRegisteredProvider(speechProviders, "xai");
const cfg = createLiveConfig();
const phrase = "OpenClaw xAI realtime transcription integration test OK.";
await runXaiLiveCase("realtime-stream", async () => {
const { realtimeTranscriptionProviders, speechProviders } = await registerXaiPlugin();
const realtimeProvider = requireRegisteredProvider(realtimeTranscriptionProviders, "xai");
const speechProvider = requireRegisteredProvider(speechProviders, "xai");
const cfg = createLiveConfig();
const phrase = "OpenClaw xAI realtime transcription integration test OK.";
const telephony = await speechProvider.synthesizeTelephony?.({
text: phrase,
cfg,
providerConfig: {
apiKey: XAI_API_KEY,
baseUrl: "https://api.x.ai/v1",
voiceId: "eve",
},
timeoutMs: 90_000,
const telephony = await speechProvider.synthesizeTelephony?.({
text: phrase,
cfg,
providerConfig: {
apiKey: XAI_API_KEY,
baseUrl: "https://api.x.ai/v1",
voiceId: "eve",
},
timeoutMs: 90_000,
});
if (!telephony) {
throw new Error("xAI telephony synthesis did not return audio");
}
expect(telephony.outputFormat).toBe("pcm");
expect(telephony.sampleRate).toBe(24_000);
const chunkSize = Math.max(1, Math.floor(telephony.sampleRate * 2 * 0.1));
const { transcripts, partials } = await runRealtimeSttLiveTest({
provider: realtimeProvider,
providerConfig: {
apiKey: XAI_API_KEY,
baseUrl: "https://api.x.ai/v1",
sampleRate: telephony.sampleRate,
encoding: "pcm",
interimResults: true,
endpointingMs: 500,
language: "en",
},
audio: telephony.audioBuffer,
chunkSize,
delayMs: 20,
closeBeforeWait: true,
});
const normalized = transcripts.join(" ").toLowerCase();
expectOpenClawLiveTranscriptMarker(normalized);
expect(normalized).toContain("transcription");
expect(partials.length + transcripts.length).toBeGreaterThan(0);
});
if (!telephony) {
throw new Error("xAI telephony synthesis did not return audio");
}
expect(telephony.outputFormat).toBe("pcm");
expect(telephony.sampleRate).toBe(24_000);
const chunkSize = Math.max(1, Math.floor(telephony.sampleRate * 2 * 0.1));
const { transcripts, partials } = await runRealtimeSttLiveTest({
provider: realtimeProvider,
providerConfig: {
apiKey: XAI_API_KEY,
baseUrl: "https://api.x.ai/v1",
sampleRate: telephony.sampleRate,
encoding: "pcm",
interimResults: true,
endpointingMs: 500,
language: "en",
},
audio: telephony.audioBuffer,
chunkSize,
delayMs: 20,
closeBeforeWait: true,
});
const normalized = transcripts.join(" ").toLowerCase();
expectOpenClawLiveTranscriptMarker(normalized);
expect(normalized).toContain("transcription");
expect(partials.length + transcripts.length).toBeGreaterThan(0);
}, 180_000);
it("generates and edits images through the registered image provider", async () => {
const { imageProviders } = await registerXaiPlugin();
const imageProvider = requireRegisteredProvider(imageProviders, "xai");
const cfg = createLiveConfig();
const agentDir = await createTempAgentDir();
await runXaiLiveCase("image", async () => {
const { imageProviders } = await registerXaiPlugin();
const imageProvider = requireRegisteredProvider(imageProviders, "xai");
const cfg = createLiveConfig();
const agentDir = await createTempAgentDir();
try {
const generated = await imageProvider.generateImage({
provider: "xai",
model: LIVE_IMAGE_MODEL,
prompt: "Create a minimal flat orange square centered on a white background.",
cfg,
agentDir,
authStore: EMPTY_AUTH_STORE,
timeoutMs: 180_000,
count: 1,
aspectRatio: "1:1",
resolution: "1K",
});
try {
const generated = await imageProvider.generateImage({
provider: "xai",
model: LIVE_IMAGE_MODEL,
prompt: "Create a minimal flat orange square centered on a white background.",
cfg,
agentDir,
authStore: EMPTY_AUTH_STORE,
timeoutMs: 180_000,
count: 1,
aspectRatio: "1:1",
resolution: "1K",
});
expect(generated.model).toBe(LIVE_IMAGE_MODEL);
expect(generated.images.length).toBeGreaterThan(0);
expect(generated.images[0]?.mimeType.startsWith("image/")).toBe(true);
expect(generated.images[0]?.buffer.byteLength).toBeGreaterThan(1_000);
expect(generated.model).toBe(LIVE_IMAGE_MODEL);
expect(generated.images.length).toBeGreaterThan(0);
expect(generated.images[0]?.mimeType.startsWith("image/")).toBe(true);
expect(generated.images[0]?.buffer.byteLength).toBeGreaterThan(1_000);
const edited = await imageProvider.generateImage({
provider: "xai",
model: LIVE_IMAGE_MODEL,
prompt:
"Render this image as a pencil sketch with detailed shading. Keep the same framing.",
cfg,
agentDir,
authStore: EMPTY_AUTH_STORE,
timeoutMs: 180_000,
count: 1,
resolution: "1K",
inputImages: [
{
buffer: createReferencePng(),
mimeType: "image/png",
fileName: "reference.png",
},
],
});
const edited = await imageProvider.generateImage({
provider: "xai",
model: LIVE_IMAGE_MODEL,
prompt:
"Render this image as a pencil sketch with detailed shading. Keep the same framing.",
cfg,
agentDir,
authStore: EMPTY_AUTH_STORE,
timeoutMs: 180_000,
count: 1,
resolution: "1K",
inputImages: [
{
buffer: createReferencePng(),
mimeType: "image/png",
fileName: "reference.png",
},
],
});
expect(edited.model).toBe(LIVE_IMAGE_MODEL);
expect(edited.images.length).toBeGreaterThan(0);
expect(edited.images[0]?.mimeType.startsWith("image/")).toBe(true);
expect(edited.images[0]?.buffer.byteLength).toBeGreaterThan(1_000);
} finally {
await fs.rm(agentDir, { recursive: true, force: true });
}
expect(edited.model).toBe(LIVE_IMAGE_MODEL);
expect(edited.images.length).toBeGreaterThan(0);
expect(edited.images[0]?.mimeType.startsWith("image/")).toBe(true);
expect(edited.images[0]?.buffer.byteLength).toBeGreaterThan(1_000);
} finally {
await fs.rm(agentDir, { recursive: true, force: true });
}
});
}, 300_000);
});

View File

@@ -214,6 +214,12 @@ describe("isBillingErrorMessage", () => {
expect(isBillingErrorMessage(msg)).toBe(true);
expect(classifyFailoverReason(msg)).toBe("billing");
});
it("matches provider spending-limit exhaustion messages", () => {
const msg =
"Your team has either used all available credits or reached its monthly spending limit.";
expect(isBillingErrorMessage(msg)).toBe(true);
expect(classifyFailoverReason(msg)).toBe("billing");
});
it("classifies flat JSON billing payloads with string error code (#74079)", () => {
const raw =
'{"error":"insufficient_balance","message":"Insufficient MBT balance. Top up or upgrade your subscription to continue.","upgradeUrl":"/settings/billing"}';

View File

@@ -179,6 +179,8 @@ const ERROR_PATTERNS = {
/["']?(?:status|code)["']?\s*[:=]\s*402\b|\bhttp\s*402\b|\berror(?:\s+code)?\s*[:=]?\s*402\b|\b(?:got|returned|received)\s+(?:a\s+)?402\b|^\s*402\s+payment/i,
"payment required",
"insufficient credits",
/used\s+all\s+available\s+credits/i,
/(?:monthly\s+)?spend(?:ing)?\s+limit/i,
/insufficient[_ ]quota/i,
"credit balance",
"plans & billing",

View File

@@ -6,6 +6,7 @@ import {
extractNonEmptyAssistantText,
isLiveTestEnabled,
} from "./live-test-helpers.js";
import { isBillingErrorMessage } from "./pi-embedded-helpers/failover-matches.js";
import { applyExtraParamsToAgent } from "./pi-embedded-runner.js";
import { createWebSearchTool } from "./tools/web-search.js";
@@ -30,6 +31,19 @@ function resolveLiveXaiModel() {
return getModel("xai", "grok-4-1-fast-reasoning" as never) ?? getModel("xai", "grok-4");
}
async function runXaiLiveCase(label: string, run: () => Promise<void>): Promise<void> {
try {
await run();
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
if (isBillingErrorMessage(message)) {
console.warn(`[xai:live] skip ${label}: billing drift: ${message}`);
return;
}
throw error;
}
}
async function collectDoneMessage(
stream: AsyncIterable<{ type: string; message?: AssistantLikeMessage }>,
): Promise<AssistantLikeMessage> {
@@ -50,124 +64,136 @@ function extractFirstToolCallId(message: AssistantLikeMessage): string | undefin
describeLive("xai live", () => {
it("returns assistant text for Grok 4.1 Fast Reasoning", async () => {
const model = resolveLiveXaiModel();
expect(model).toBeDefined();
const res = await completeSimple(
model,
{
messages: createSingleUserPromptMessage(),
},
{
apiKey: XAI_KEY,
maxTokens: 64,
reasoning: "medium",
},
);
expect(extractNonEmptyAssistantText(res.content).length).toBeGreaterThan(0);
}, 30_000);
it("applies xAI tool wrappers on live tool calls", async () => {
const model = resolveLiveXaiModel();
expect(model).toBeDefined();
const agent = { streamFn: streamSimple };
applyExtraParamsToAgent(agent, undefined, "xai", model.id);
const noopTool = {
name: "noop",
description: "Return ok.",
parameters: Type.Object({}, { additionalProperties: false }),
};
const prompts = [
"Call the tool `noop` with {}. Do not write any other text.",
"IMPORTANT: Call the tool `noop` with {} and respond only with the tool call.",
"Return only a tool call for `noop` with {}.",
];
let doneMessage: AssistantLikeMessage | undefined;
let capturedPayload: Record<string, unknown> | undefined;
for (const prompt of prompts) {
capturedPayload = undefined;
const stream = agent.streamFn(
await runXaiLiveCase("complete", async () => {
const model = resolveLiveXaiModel();
expect(model).toBeDefined();
const res = await completeSimple(
model,
{
messages: createSingleUserPromptMessage(prompt),
tools: [noopTool],
messages: createSingleUserPromptMessage(),
},
{
apiKey: XAI_KEY,
maxTokens: 128,
maxTokens: 64,
reasoning: "medium",
onPayload: (payload) => {
capturedPayload = payload as Record<string, unknown>;
},
},
);
doneMessage = await collectDoneMessage(
stream as AsyncIterable<{ type: string; message?: AssistantLikeMessage }>,
);
if (extractFirstToolCallId(doneMessage)) {
break;
expect(extractNonEmptyAssistantText(res.content).length).toBeGreaterThan(0);
});
}, 30_000);
it("applies xAI tool wrappers on live tool calls", async () => {
await runXaiLiveCase("tool-call", async () => {
const model = resolveLiveXaiModel();
expect(model).toBeDefined();
const agent = { streamFn: streamSimple };
applyExtraParamsToAgent(agent, undefined, "xai", model.id);
const noopTool = {
name: "noop",
description: "Return ok.",
parameters: Type.Object({}, { additionalProperties: false }),
};
const prompts = [
"Call the tool `noop` with {}. Do not write any other text.",
"IMPORTANT: Call the tool `noop` with {} and respond only with the tool call.",
"Return only a tool call for `noop` with {}.",
];
let doneMessage: AssistantLikeMessage | undefined;
let capturedPayload: Record<string, unknown> | undefined;
for (const prompt of prompts) {
capturedPayload = undefined;
const stream = agent.streamFn(
model,
{
messages: createSingleUserPromptMessage(prompt),
tools: [noopTool],
},
{
apiKey: XAI_KEY,
maxTokens: 128,
reasoning: "medium",
onPayload: (payload) => {
capturedPayload = payload as Record<string, unknown>;
},
},
);
doneMessage = await collectDoneMessage(
stream as AsyncIterable<{ type: string; message?: AssistantLikeMessage }>,
);
if (extractFirstToolCallId(doneMessage)) {
break;
}
}
}
expect(doneMessage).toBeDefined();
expect(extractFirstToolCallId(doneMessage!)).toBeDefined();
if (capturedPayload && Object.hasOwn(capturedPayload, "tool_stream")) {
expect(capturedPayload.tool_stream).toBe(true);
}
expect(doneMessage).toBeDefined();
expect(extractFirstToolCallId(doneMessage!)).toBeDefined();
if (capturedPayload && Object.hasOwn(capturedPayload, "tool_stream")) {
expect(capturedPayload.tool_stream).toBe(true);
}
const payloadTools = Array.isArray(capturedPayload?.tools)
? (capturedPayload.tools as Array<Record<string, unknown>>)
: [];
const firstFunction = payloadTools[0]?.function;
if (firstFunction && typeof firstFunction === "object") {
expect([undefined, false]).toContain((firstFunction as Record<string, unknown>).strict);
}
const payloadTools = Array.isArray(capturedPayload?.tools)
? (capturedPayload.tools as Array<Record<string, unknown>>)
: [];
const firstFunction = payloadTools[0]?.function;
if (firstFunction && typeof firstFunction === "object") {
expect([undefined, false]).toContain((firstFunction as Record<string, unknown>).strict);
}
});
}, 45_000);
it("runs Grok web_search live", async () => {
const tool = createWebSearchTool({
config: {
tools: {
web: {
search: {
provider: "grok",
timeoutSeconds: XAI_WEB_SEARCH_LIVE_TIMEOUT_SECONDS,
grok: {
model: "grok-4-1-fast",
await runXaiLiveCase("web-search", async () => {
const tool = createWebSearchTool({
config: {
tools: {
web: {
search: {
provider: "grok",
timeoutSeconds: XAI_WEB_SEARCH_LIVE_TIMEOUT_SECONDS,
grok: {
model: "grok-4-1-fast",
},
},
},
},
},
},
});
expect(tool).toBeTruthy();
const result = await tool!.execute("web-search:grok-live", {
query: "OpenClaw GitHub",
count: 3,
});
const details = (result.details ?? {}) as {
provider?: string;
content?: string;
citations?: string[];
inlineCitations?: Array<unknown>;
error?: string;
message?: string;
};
const errorMessage = [details.error, details.message].filter(Boolean).join(" ");
if (isBillingErrorMessage(errorMessage)) {
console.warn(`[xai:live] skip web-search: billing drift: ${errorMessage}`);
return;
}
expect(details.error, details.message).toBeUndefined();
expect(details.provider).toBe("grok");
expect(details.content?.trim().length ?? 0).toBeGreaterThan(0);
const citationCount =
(Array.isArray(details.citations) ? details.citations.length : 0) +
(Array.isArray(details.inlineCitations) ? details.inlineCitations.length : 0);
expect(citationCount).toBeGreaterThan(0);
});
expect(tool).toBeTruthy();
const result = await tool!.execute("web-search:grok-live", {
query: "OpenClaw GitHub",
count: 3,
});
const details = (result.details ?? {}) as {
provider?: string;
content?: string;
citations?: string[];
inlineCitations?: Array<unknown>;
error?: string;
message?: string;
};
expect(details.error, details.message).toBeUndefined();
expect(details.provider).toBe("grok");
expect(details.content?.trim().length ?? 0).toBeGreaterThan(0);
const citationCount =
(Array.isArray(details.citations) ? details.citations.length : 0) +
(Array.isArray(details.inlineCitations) ? details.inlineCitations.length : 0);
expect(citationCount).toBeGreaterThan(0);
}, 90_000);
});

View File

@@ -30,6 +30,7 @@ import { normalizeProviderId } from "../agents/model-selection.js";
import { shouldSuppressBuiltInModel } from "../agents/model-suppression.js";
import { ensureOpenClawModelsJson } from "../agents/models-config.js";
import { isRateLimitErrorMessage } from "../agents/pi-embedded-helpers/errors.js";
import { isBillingErrorMessage } from "../agents/pi-embedded-helpers/failover-matches.js";
import { discoverAuthStorage, discoverModels } from "../agents/pi-model-discovery.js";
import { STREAM_ERROR_FALLBACK_TEXT } from "../agents/stream-message-shared.js";
import { clearRuntimeConfigSnapshot, getRuntimeConfig } from "../config/io.js";
@@ -1965,6 +1966,11 @@ async function runGatewayModelSuite(params: GatewayModelSuiteParams) {
logProgress(`${progressLabel}: skip (google rate limit)`);
break;
}
if (isBillingErrorMessage(message)) {
skippedCount += 1;
logProgress(`${progressLabel}: skip (billing drift)`);
break;
}
if (
(model.provider === "minimax" ||
model.provider === "opencode" ||

View File

@@ -7,6 +7,7 @@ import { resolveOpenClawAgentDir } from "../src/agents/agent-paths.js";
import { collectProviderApiKeys } from "../src/agents/live-auth-keys.js";
import { isLiveProfileKeyModeEnabled, isLiveTestEnabled } from "../src/agents/live-test-helpers.js";
import { resolveApiKeyForProvider } from "../src/agents/model-auth.js";
import { isBillingErrorMessage } from "../src/agents/pi-embedded-helpers/failover-matches.js";
import { loadConfig, type OpenClawConfig } from "../src/config/config.js";
import {
DEFAULT_LIVE_IMAGE_MODELS,
@@ -266,6 +267,13 @@ describeLive("image generation live (provider sweep)", () => {
);
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
if (isBillingErrorMessage(message)) {
skipped.push(`${testCase.id} (${authLabel}): billing drift`);
console.warn(
`[live:image-generation] skip ${testCase.id} ms=${Date.now() - startedAt} reason=billing drift error=${message}`,
);
continue;
}
failures.push(`${testCase.id} (${authLabel}): ${message}`);
console.error(
`[live:image-generation] failed ${testCase.id} ms=${Date.now() - startedAt} error=${message}`,