fix: use static provider catalogs for model listing

This commit is contained in:
Shakker
2026-04-22 02:36:18 +01:00
committed by Shakker
parent 651d5e0022
commit 04ecf284fc
16 changed files with 219 additions and 25 deletions

View File

@@ -13,7 +13,7 @@ import {
applyChutesApiKeyConfig,
applyChutesProviderConfig,
} from "./onboard.js";
import { buildChutesProvider } from "./provider-catalog.js";
import { buildChutesProvider, buildStaticChutesProvider } from "./provider-catalog.js";
const PROVIDER_ID = "chutes";
@@ -180,6 +180,12 @@ export default definePluginEntry({
};
},
},
staticCatalog: {
order: "profile",
run: async () => ({
provider: buildStaticChutesProvider(),
}),
},
});
},
});

View File

@@ -6,6 +6,14 @@ import {
discoverChutesModels,
} from "./models.js";
export function buildStaticChutesProvider(): ModelProviderConfig {
return {
baseUrl: CHUTES_BASE_URL,
api: "openai-completions",
models: CHUTES_MODEL_CATALOG.map(buildChutesModelDefinition),
};
}
/**
* Build the Chutes provider with dynamic model discovery.
* Falls back to the static catalog on failure.

View File

@@ -3,7 +3,7 @@ import { defineSingleProviderPluginEntry } from "openclaw/plugin-sdk/provider-en
import { PASSTHROUGH_GEMINI_REPLAY_HOOKS } from "openclaw/plugin-sdk/provider-model-shared";
import { KILOCODE_THINKING_STREAM_HOOKS } from "openclaw/plugin-sdk/provider-stream-family";
import { applyKilocodeConfig, KILOCODE_DEFAULT_MODEL_REF } from "./onboard.js";
import { buildKilocodeProviderWithDiscovery } from "./provider-catalog.js";
import { buildKilocodeProvider, buildKilocodeProviderWithDiscovery } from "./provider-catalog.js";
const PROVIDER_ID = "kilocode";
@@ -29,6 +29,7 @@ export default defineSingleProviderPluginEntry({
],
catalog: {
buildProvider: buildKilocodeProviderWithDiscovery,
buildStaticProvider: buildKilocodeProvider,
},
augmentModelCatalog: ({ config }) =>
readConfiguredProviderCatalogEntries({

View File

@@ -52,6 +52,7 @@ export default defineSingleProviderPluginEntry({
],
catalog: {
buildProvider: buildMoonshotProvider,
buildStaticProvider: buildMoonshotProvider,
allowExplicitBaseUrl: true,
},
applyNativeStreamingUsageCompat: ({ providerConfig }) =>

View File

@@ -2,8 +2,15 @@ import { describe, expect, it, vi } from "vitest";
import { registerSingleProviderPlugin } from "../../test/helpers/plugins/plugin-registration.js";
import { expectPassthroughReplayPolicy } from "../../test/helpers/provider-replay-policy.ts";
import openrouterPlugin from "./index.js";
import { buildOpenrouterProvider } from "./provider-catalog.js";
describe("openrouter provider hooks", () => {
it("includes Kimi K2.6 in the bundled catalog", () => {
expect(buildOpenrouterProvider().models?.map((model) => model.id)).toContain(
"moonshotai/kimi-k2.6",
);
});
it("owns passthrough-gemini replay policy for Gemini-backed models", async () => {
await expectPassthroughReplayPolicy({
plugin: openrouterPlugin,

View File

@@ -110,6 +110,12 @@ export default definePluginEntry({
};
},
},
staticCatalog: {
order: "simple",
run: async () => ({
provider: buildOpenrouterProvider(),
}),
},
resolveDynamicModel: (ctx) => buildDynamicOpenRouterModel(ctx),
prepareDynamicModel: async (ctx) => {
await loadOpenRouterModelCapabilities(ctx.modelId);

View File

@@ -1,6 +1,9 @@
import { defineSingleProviderPluginEntry } from "openclaw/plugin-sdk/provider-entry";
import { applyVercelAiGatewayConfig, VERCEL_AI_GATEWAY_DEFAULT_MODEL_REF } from "./onboard.js";
import { buildVercelAiGatewayProvider } from "./provider-catalog.js";
import {
buildStaticVercelAiGatewayProvider,
buildVercelAiGatewayProvider,
} from "./provider-catalog.js";
const PROVIDER_ID = "vercel-ai-gateway";
@@ -30,6 +33,7 @@ export default defineSingleProviderPluginEntry({
],
catalog: {
buildProvider: buildVercelAiGatewayProvider,
buildStaticProvider: buildStaticVercelAiGatewayProvider,
},
},
});

View File

@@ -1,6 +1,9 @@
import { describe, expect, it } from "vitest";
import { getStaticVercelAiGatewayModelCatalog, VERCEL_AI_GATEWAY_BASE_URL } from "./api.js";
import { buildVercelAiGatewayProvider } from "./provider-catalog.js";
import {
buildStaticVercelAiGatewayProvider,
buildVercelAiGatewayProvider,
} from "./provider-catalog.js";
describe("vercel ai gateway provider catalog", () => {
it("builds the bundled Vercel AI Gateway defaults", async () => {
@@ -9,13 +12,29 @@ describe("vercel ai gateway provider catalog", () => {
expect(provider.baseUrl).toBe(VERCEL_AI_GATEWAY_BASE_URL);
expect(provider.api).toBe("anthropic-messages");
expect(provider.models?.map((model) => model.id)).toEqual(
expect.arrayContaining(["anthropic/claude-opus-4.6", "openai/gpt-5.4", "openai/gpt-5.4-pro"]),
expect.arrayContaining([
"anthropic/claude-opus-4.6",
"openai/gpt-5.4",
"openai/gpt-5.4-pro",
"moonshotai/kimi-k2.6",
]),
);
});
it("exposes the static fallback model catalog", () => {
expect(getStaticVercelAiGatewayModelCatalog().map((model) => model.id)).toEqual(
expect.arrayContaining(["anthropic/claude-opus-4.6", "openai/gpt-5.4", "openai/gpt-5.4-pro"]),
expect.arrayContaining([
"anthropic/claude-opus-4.6",
"openai/gpt-5.4",
"openai/gpt-5.4-pro",
"moonshotai/kimi-k2.6",
]),
);
});
it("builds an offline static provider catalog", () => {
expect(buildStaticVercelAiGatewayProvider().models?.map((model) => model.id)).toEqual(
expect.arrayContaining(["moonshotai/kimi-k2.6"]),
);
});
});

View File

@@ -1,5 +1,17 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { discoverVercelAiGatewayModels, VERCEL_AI_GATEWAY_BASE_URL } from "./models.js";
import {
discoverVercelAiGatewayModels,
getStaticVercelAiGatewayModelCatalog,
VERCEL_AI_GATEWAY_BASE_URL,
} from "./models.js";
export function buildStaticVercelAiGatewayProvider(): ModelProviderConfig {
return {
baseUrl: VERCEL_AI_GATEWAY_BASE_URL,
api: "anthropic-messages",
models: getStaticVercelAiGatewayModelCatalog(),
};
}
export async function buildVercelAiGatewayProvider(): Promise<ModelProviderConfig> {
return {