fix: defer model pricing refresh

This commit is contained in:
Peter Steinberger
2026-04-23 08:15:36 +01:00
parent 8714badc0c
commit 966e814c5e
3 changed files with 47 additions and 5 deletions

View File

@@ -39,6 +39,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Gateway/model pricing: fetch OpenRouter and LiteLLM pricing asynchronously at startup and extend catalog fetch timeouts to 30 seconds, reducing noisy timeout warnings during slow upstream responses.
- Status: show `Fast` in `/status` when fast mode is enabled, including config/default-derived fast mode, and omit it when disabled.
- Models/auth: merge provider-owned default-model additions from `openclaw models auth login` instead of replacing `agents.defaults.models`, so re-authenticating an OAuth provider such as OpenAI Codex no longer wipes other providers' aliases and per-model params. Migrations that must rename keys (Anthropic -> Claude CLI) opt in with `replaceDefaultModels`. Fixes #69414. (#70435) Thanks @neeravmakwana.
- Media understanding/audio: prefer configured or key-backed STT providers before auto-detected local Whisper CLIs, so installed local transcription tools no longer shadow API providers such as Groq/OpenAI in `tools.media.audio` auto mode. Fixes #68727.

View File

@@ -19,6 +19,7 @@ import {
collectConfiguredModelPricingRefs,
getCachedGatewayModelPricing,
refreshGatewayModelPricingCache,
startGatewayModelPricingRefresh,
} from "./model-pricing-cache.js";
describe("model-pricing-cache", () => {
@@ -519,6 +520,39 @@ describe("model-pricing-cache", () => {
});
});
it("defers bootstrap refresh work until after the starter returns", async () => {
const config = {
agents: {
defaults: {
model: { primary: "anthropic/claude-opus-4-6" },
},
},
} as unknown as OpenClawConfig;
const fetchImpl = withFetchPreconnect(
vi.fn(async (input: RequestInfo | URL) => {
const url =
typeof input === "string" ? input : input instanceof URL ? input.href : input.url;
if (url.includes("openrouter.ai")) {
return new Response(JSON.stringify({ data: [] }), {
status: 200,
headers: { "Content-Type": "application/json" },
});
}
return new Response(JSON.stringify({}), {
status: 200,
headers: { "Content-Type": "application/json" },
});
}),
);
const stop = startGatewayModelPricingRefresh({ config, fetchImpl });
expect(fetchImpl).not.toHaveBeenCalled();
await vi.dynamicImportSettled();
expect(fetchImpl).toHaveBeenCalled();
stop();
});
it("logs configured timeout seconds when pricing fetches time out", async () => {
const warnings: string[] = [];
loggingState.rawConsole = {
@@ -549,10 +583,10 @@ describe("model-pricing-cache", () => {
expect(warnings).toEqual(
expect.arrayContaining([
expect.stringContaining(
"OpenRouter pricing fetch failed (timeout 15s): TimeoutError: The operation was aborted due to timeout",
"OpenRouter pricing fetch failed (timeout 30s): TimeoutError: The operation was aborted due to timeout",
),
expect.stringContaining(
"LiteLLM pricing fetch failed (timeout 15s): TimeoutError: The operation was aborted due to timeout",
"LiteLLM pricing fetch failed (timeout 30s): TimeoutError: The operation was aborted due to timeout",
),
]),
);

View File

@@ -40,7 +40,7 @@ const OPENROUTER_MODELS_URL = "https://openrouter.ai/api/v1/models";
const LITELLM_PRICING_URL =
"https://raw.githubusercontent.com/BerriAI/litellm/main/model_prices_and_context_window.json";
const CACHE_TTL_MS = 24 * 60 * 60_000;
const FETCH_TIMEOUT_MS = 15_000;
const FETCH_TIMEOUT_MS = 30_000;
const MAX_PRICING_CATALOG_BYTES = 5 * 1024 * 1024;
const PROVIDER_ALIAS_TO_OPENROUTER: Record<string, string> = {
"google-gemini-cli": "google",
@@ -655,10 +655,17 @@ export function startGatewayModelPricingRefresh(params: {
config: OpenClawConfig;
fetchImpl?: typeof fetch;
}): () => void {
void refreshGatewayModelPricingCache(params).catch((error: unknown) => {
log.warn(`pricing bootstrap failed: ${String(error)}`);
let stopped = false;
queueMicrotask(() => {
if (stopped) {
return;
}
void refreshGatewayModelPricingCache(params).catch((error: unknown) => {
log.warn(`pricing bootstrap failed: ${String(error)}`);
});
});
return () => {
stopped = true;
clearRefreshTimer();
};
}