feat(plugins): move anthropic and openai vendors to plugins

This commit is contained in:
Peter Steinberger
2026-03-15 17:07:28 -07:00
parent e42d86afa9
commit ee7ecb2dd4
17 changed files with 530 additions and 10 deletions

8
.github/labeler.yml vendored
View File

@@ -242,6 +242,10 @@
- changed-files:
- any-glob-to-any-file:
- "extensions/byteplus/**"
"extensions: anthropic":
- changed-files:
- any-glob-to-any-file:
- "extensions/anthropic/**"
"extensions: cloudflare-ai-gateway":
- changed-files:
- any-glob-to-any-file:
@@ -258,6 +262,10 @@
- changed-files:
- any-glob-to-any-file:
- "extensions/kilocode/**"
"extensions: openai":
- changed-files:
- any-glob-to-any-file:
- "extensions/openai/**"
"extensions: kimi-coding":
- changed-files:
- any-glob-to-any-file:

View File

@@ -51,10 +51,14 @@ Typical split:
Current bundled examples:
- `anthropic`: Claude 4.6 forward-compat fallback, usage endpoint fetching,
and cache-TTL/provider-family metadata
- `openrouter`: pass-through model ids, request wrappers, provider capability
hints, and cache-TTL policy
- `github-copilot`: forward-compat model fallback, Claude-thinking transcript
hints, runtime token exchange, and usage endpoint fetching
- `openai`: GPT-5.4 forward-compat fallback, direct OpenAI transport
normalization, and provider-family metadata
- `openai-codex`: forward-compat model fallback, transport normalization, and
default transport params plus usage endpoint fetching
- `google-gemini-cli`: Gemini 3.1 forward-compat fallback plus usage-token

View File

@@ -164,6 +164,7 @@ Important trust note:
- [Nostr](/channels/nostr) — `@openclaw/nostr`
- [Zalo](/channels/zalo) — `@openclaw/zalo`
- [Microsoft Teams](/channels/msteams) — `@openclaw/msteams`
- Anthropic provider runtime — bundled as `anthropic` (enabled by default)
- BytePlus provider catalog — bundled as `byteplus` (enabled by default)
- Cloudflare AI Gateway provider catalog — bundled as `cloudflare-ai-gateway` (enabled by default)
- Google Antigravity OAuth (provider auth) — bundled as `google-antigravity-auth` (disabled by default)
@@ -178,6 +179,7 @@ Important trust note:
- Model Studio provider catalog — bundled as `modelstudio` (enabled by default)
- Moonshot provider runtime — bundled as `moonshot` (enabled by default)
- NVIDIA provider catalog — bundled as `nvidia` (enabled by default)
- OpenAI provider runtime — bundled as `openai` (enabled by default)
- OpenAI Codex provider runtime — bundled as `openai-codex` (enabled by default)
- OpenCode Go provider capabilities — bundled as `opencode-go` (enabled by default)
- OpenCode Zen provider capabilities — bundled as `opencode` (enabled by default)
@@ -348,6 +350,13 @@ api.registerProvider({
### Built-in examples
- Anthropic uses `resolveDynamicModel`, `capabilities`, `resolveUsageAuth`,
`fetchUsageSnapshot`, and `isCacheTtlEligible` because it owns Claude 4.6
forward-compat, provider-family hints, usage endpoint integration, and
prompt-cache eligibility.
- OpenAI uses `resolveDynamicModel`, `normalizeResolvedModel`, and
`capabilities` because it owns GPT-5.4 forward-compat plus the direct OpenAI
`openai-completions` -> `openai-responses` normalization.
- OpenRouter uses `catalog` plus `resolveDynamicModel` and
`prepareDynamicModel` because the provider is pass-through and may expose new
model ids before OpenClaw's static catalog updates.

View File

@@ -0,0 +1,102 @@
import { describe, expect, it } from "vitest";
import type { ProviderPlugin } from "../../src/plugins/types.js";
import {
createProviderUsageFetch,
makeResponse,
} from "../../src/test-utils/provider-usage-fetch.js";
import anthropicPlugin from "./index.js";
function registerProvider(): ProviderPlugin {
let provider: ProviderPlugin | undefined;
anthropicPlugin.register({
registerProvider(nextProvider: ProviderPlugin) {
provider = nextProvider;
},
} as never);
if (!provider) {
throw new Error("provider registration missing");
}
return provider;
}
describe("anthropic plugin", () => {
it("owns anthropic 4.6 forward-compat resolution", () => {
const provider = registerProvider();
const model = provider.resolveDynamicModel?.({
provider: "anthropic",
modelId: "claude-sonnet-4.6-20260219",
modelRegistry: {
find: (_provider: string, id: string) =>
id === "claude-sonnet-4.5-20260219"
? {
id,
name: id,
api: "anthropic-messages",
provider: "anthropic",
baseUrl: "https://api.anthropic.com",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200_000,
maxTokens: 8_192,
}
: null,
} as never,
});
expect(model).toMatchObject({
id: "claude-sonnet-4.6-20260219",
provider: "anthropic",
api: "anthropic-messages",
baseUrl: "https://api.anthropic.com",
});
});
it("owns usage auth resolution", async () => {
const provider = registerProvider();
await expect(
provider.resolveUsageAuth?.({
config: {} as never,
env: {} as NodeJS.ProcessEnv,
provider: "anthropic",
resolveApiKeyFromConfigAndStore: () => undefined,
resolveOAuthToken: async () => ({
token: "anthropic-oauth-token",
}),
}),
).resolves.toEqual({
token: "anthropic-oauth-token",
});
});
it("owns usage snapshot fetching", async () => {
const provider = registerProvider();
const mockFetch = createProviderUsageFetch(async (url) => {
if (url.includes("api.anthropic.com/api/oauth/usage")) {
return makeResponse(200, {
five_hour: { utilization: 20, resets_at: "2026-01-07T01:00:00Z" },
seven_day: { utilization: 35, resets_at: "2026-01-09T01:00:00Z" },
});
}
return makeResponse(404, "not found");
});
const snapshot = await provider.fetchUsageSnapshot?.({
config: {} as never,
env: {} as NodeJS.ProcessEnv,
provider: "anthropic",
token: "anthropic-oauth-token",
timeoutMs: 5_000,
fetchFn: mockFetch as unknown as typeof fetch,
});
expect(snapshot).toEqual({
provider: "anthropic",
displayName: "Claude",
windows: [
{ label: "5h", usedPercent: 20, resetAt: Date.parse("2026-01-07T01:00:00Z") },
{ label: "Week", usedPercent: 35, resetAt: Date.parse("2026-01-09T01:00:00Z") },
],
});
});
});

View File

@@ -0,0 +1,124 @@
import {
emptyPluginConfigSchema,
type OpenClawPluginApi,
type ProviderResolveDynamicModelContext,
type ProviderRuntimeModel,
} from "openclaw/plugin-sdk/core";
import { normalizeModelCompat } from "../../src/agents/model-compat.js";
import { fetchClaudeUsage } from "../../src/infra/provider-usage.fetch.js";
const PROVIDER_ID = "anthropic";
const ANTHROPIC_OPUS_46_MODEL_ID = "claude-opus-4-6";
const ANTHROPIC_OPUS_46_DOT_MODEL_ID = "claude-opus-4.6";
const ANTHROPIC_OPUS_TEMPLATE_MODEL_IDS = ["claude-opus-4-5", "claude-opus-4.5"] as const;
const ANTHROPIC_SONNET_46_MODEL_ID = "claude-sonnet-4-6";
const ANTHROPIC_SONNET_46_DOT_MODEL_ID = "claude-sonnet-4.6";
const ANTHROPIC_SONNET_TEMPLATE_MODEL_IDS = ["claude-sonnet-4-5", "claude-sonnet-4.5"] as const;
function cloneFirstTemplateModel(params: {
modelId: string;
templateIds: readonly string[];
ctx: ProviderResolveDynamicModelContext;
}): ProviderRuntimeModel | undefined {
const trimmedModelId = params.modelId.trim();
for (const templateId of [...new Set(params.templateIds)].filter(Boolean)) {
const template = params.ctx.modelRegistry.find(
PROVIDER_ID,
templateId,
) as ProviderRuntimeModel | null;
if (!template) {
continue;
}
return normalizeModelCompat({
...template,
id: trimmedModelId,
name: trimmedModelId,
} as ProviderRuntimeModel);
}
return undefined;
}
function resolveAnthropic46ForwardCompatModel(params: {
ctx: ProviderResolveDynamicModelContext;
dashModelId: string;
dotModelId: string;
dashTemplateId: string;
dotTemplateId: string;
fallbackTemplateIds: readonly string[];
}): ProviderRuntimeModel | undefined {
const trimmedModelId = params.ctx.modelId.trim();
const lower = trimmedModelId.toLowerCase();
const is46Model =
lower === params.dashModelId ||
lower === params.dotModelId ||
lower.startsWith(`${params.dashModelId}-`) ||
lower.startsWith(`${params.dotModelId}-`);
if (!is46Model) {
return undefined;
}
const templateIds: string[] = [];
if (lower.startsWith(params.dashModelId)) {
templateIds.push(lower.replace(params.dashModelId, params.dashTemplateId));
}
if (lower.startsWith(params.dotModelId)) {
templateIds.push(lower.replace(params.dotModelId, params.dotTemplateId));
}
templateIds.push(...params.fallbackTemplateIds);
return cloneFirstTemplateModel({
modelId: trimmedModelId,
templateIds,
ctx: params.ctx,
});
}
function resolveAnthropicForwardCompatModel(
ctx: ProviderResolveDynamicModelContext,
): ProviderRuntimeModel | undefined {
return (
resolveAnthropic46ForwardCompatModel({
ctx,
dashModelId: ANTHROPIC_OPUS_46_MODEL_ID,
dotModelId: ANTHROPIC_OPUS_46_DOT_MODEL_ID,
dashTemplateId: "claude-opus-4-5",
dotTemplateId: "claude-opus-4.5",
fallbackTemplateIds: ANTHROPIC_OPUS_TEMPLATE_MODEL_IDS,
}) ??
resolveAnthropic46ForwardCompatModel({
ctx,
dashModelId: ANTHROPIC_SONNET_46_MODEL_ID,
dotModelId: ANTHROPIC_SONNET_46_DOT_MODEL_ID,
dashTemplateId: "claude-sonnet-4-5",
dotTemplateId: "claude-sonnet-4.5",
fallbackTemplateIds: ANTHROPIC_SONNET_TEMPLATE_MODEL_IDS,
})
);
}
const anthropicPlugin = {
id: PROVIDER_ID,
name: "Anthropic Provider",
description: "Bundled Anthropic provider plugin",
configSchema: emptyPluginConfigSchema(),
register(api: OpenClawPluginApi) {
api.registerProvider({
id: PROVIDER_ID,
label: "Anthropic",
docsPath: "/providers/models",
envVars: ["ANTHROPIC_OAUTH_TOKEN", "ANTHROPIC_API_KEY"],
auth: [],
resolveDynamicModel: (ctx) => resolveAnthropicForwardCompatModel(ctx),
capabilities: {
providerFamily: "anthropic",
dropThinkingBlockModelHints: ["claude"],
},
resolveUsageAuth: async (ctx) => await ctx.resolveOAuthToken(),
fetchUsageSnapshot: async (ctx) =>
await fetchClaudeUsage(ctx.token, ctx.timeoutMs, ctx.fetchFn),
isCacheTtlEligible: () => true,
});
},
};
export default anthropicPlugin;

View File

@@ -0,0 +1,9 @@
{
"id": "anthropic",
"providers": ["anthropic"],
"configSchema": {
"type": "object",
"additionalProperties": false,
"properties": {}
}
}

View File

@@ -0,0 +1,12 @@
{
"name": "@openclaw/anthropic-provider",
"version": "2026.3.14",
"private": true,
"description": "OpenClaw Anthropic provider plugin",
"type": "module",
"openclaw": {
"extensions": [
"./index.ts"
]
}
}

View File

@@ -0,0 +1,76 @@
import { describe, expect, it } from "vitest";
import type { ProviderPlugin } from "../../src/plugins/types.js";
import openAIPlugin from "./index.js";
function registerProvider(): ProviderPlugin {
let provider: ProviderPlugin | undefined;
openAIPlugin.register({
registerProvider(nextProvider: ProviderPlugin) {
provider = nextProvider;
},
} as never);
if (!provider) {
throw new Error("provider registration missing");
}
return provider;
}
describe("openai plugin", () => {
it("owns openai gpt-5.4 forward-compat resolution", () => {
const provider = registerProvider();
const model = provider.resolveDynamicModel?.({
provider: "openai",
modelId: "gpt-5.4-pro",
modelRegistry: {
find: (_provider: string, id: string) =>
id === "gpt-5.2-pro"
? {
id,
name: id,
api: "openai-responses",
provider: "openai",
baseUrl: "https://api.openai.com/v1",
reasoning: true,
input: ["text", "image"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200_000,
maxTokens: 8_192,
}
: null,
} as never,
});
expect(model).toMatchObject({
id: "gpt-5.4-pro",
provider: "openai",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
contextWindow: 1_050_000,
maxTokens: 128_000,
});
});
it("owns direct openai transport normalization", () => {
const provider = registerProvider();
expect(
provider.normalizeResolvedModel?.({
provider: "openai",
modelId: "gpt-5.4",
model: {
id: "gpt-5.4",
name: "gpt-5.4",
api: "openai-completions",
provider: "openai",
baseUrl: "https://api.openai.com/v1",
reasoning: true,
input: ["text", "image"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_050_000,
maxTokens: 128_000,
},
}),
).toMatchObject({
api: "openai-responses",
});
});
});

137
extensions/openai/index.ts Normal file
View File

@@ -0,0 +1,137 @@
import {
emptyPluginConfigSchema,
type OpenClawPluginApi,
type ProviderResolveDynamicModelContext,
type ProviderRuntimeModel,
} from "openclaw/plugin-sdk/core";
import { DEFAULT_CONTEXT_TOKENS } from "../../src/agents/defaults.js";
import { normalizeModelCompat } from "../../src/agents/model-compat.js";
import { normalizeProviderId } from "../../src/agents/model-selection.js";
const PROVIDER_ID = "openai";
const OPENAI_BASE_URL = "https://api.openai.com/v1";
const OPENAI_GPT_54_MODEL_ID = "gpt-5.4";
const OPENAI_GPT_54_PRO_MODEL_ID = "gpt-5.4-pro";
const OPENAI_GPT_54_CONTEXT_TOKENS = 1_050_000;
const OPENAI_GPT_54_MAX_TOKENS = 128_000;
const OPENAI_GPT_54_TEMPLATE_MODEL_IDS = ["gpt-5.2"] as const;
const OPENAI_GPT_54_PRO_TEMPLATE_MODEL_IDS = ["gpt-5.2-pro", "gpt-5.2"] as const;
function isOpenAIApiBaseUrl(baseUrl?: string): boolean {
const trimmed = baseUrl?.trim();
if (!trimmed) {
return false;
}
return /^https?:\/\/api\.openai\.com(?:\/v1)?\/?$/i.test(trimmed);
}
function normalizeOpenAITransport(model: ProviderRuntimeModel): ProviderRuntimeModel {
const useResponsesTransport =
model.api === "openai-completions" && (!model.baseUrl || isOpenAIApiBaseUrl(model.baseUrl));
if (!useResponsesTransport) {
return model;
}
return {
...model,
api: "openai-responses",
};
}
function cloneFirstTemplateModel(params: {
modelId: string;
templateIds: readonly string[];
ctx: ProviderResolveDynamicModelContext;
patch?: Partial<ProviderRuntimeModel>;
}): ProviderRuntimeModel | undefined {
const trimmedModelId = params.modelId.trim();
for (const templateId of [...new Set(params.templateIds)].filter(Boolean)) {
const template = params.ctx.modelRegistry.find(
PROVIDER_ID,
templateId,
) as ProviderRuntimeModel | null;
if (!template) {
continue;
}
return normalizeModelCompat({
...template,
id: trimmedModelId,
name: trimmedModelId,
...params.patch,
} as ProviderRuntimeModel);
}
return undefined;
}
function resolveOpenAIGpt54ForwardCompatModel(
ctx: ProviderResolveDynamicModelContext,
): ProviderRuntimeModel | undefined {
const trimmedModelId = ctx.modelId.trim();
const lower = trimmedModelId.toLowerCase();
let templateIds: readonly string[];
if (lower === OPENAI_GPT_54_MODEL_ID) {
templateIds = OPENAI_GPT_54_TEMPLATE_MODEL_IDS;
} else if (lower === OPENAI_GPT_54_PRO_MODEL_ID) {
templateIds = OPENAI_GPT_54_PRO_TEMPLATE_MODEL_IDS;
} else {
return undefined;
}
return (
cloneFirstTemplateModel({
modelId: trimmedModelId,
templateIds,
ctx,
patch: {
api: "openai-responses",
provider: PROVIDER_ID,
baseUrl: OPENAI_BASE_URL,
reasoning: true,
input: ["text", "image"],
contextWindow: OPENAI_GPT_54_CONTEXT_TOKENS,
maxTokens: OPENAI_GPT_54_MAX_TOKENS,
},
}) ??
normalizeModelCompat({
id: trimmedModelId,
name: trimmedModelId,
api: "openai-responses",
provider: PROVIDER_ID,
baseUrl: OPENAI_BASE_URL,
reasoning: true,
input: ["text", "image"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: OPENAI_GPT_54_CONTEXT_TOKENS,
maxTokens: OPENAI_GPT_54_MAX_TOKENS,
} as ProviderRuntimeModel)
);
}
const openAIPlugin = {
id: PROVIDER_ID,
name: "OpenAI Provider",
description: "Bundled OpenAI provider plugin",
configSchema: emptyPluginConfigSchema(),
register(api: OpenClawPluginApi) {
api.registerProvider({
id: PROVIDER_ID,
label: "OpenAI",
docsPath: "/providers/models",
envVars: ["OPENAI_API_KEY"],
auth: [],
resolveDynamicModel: (ctx) => resolveOpenAIGpt54ForwardCompatModel(ctx),
normalizeResolvedModel: (ctx) => {
if (normalizeProviderId(ctx.provider) !== PROVIDER_ID) {
return undefined;
}
return normalizeOpenAITransport(ctx.model);
},
capabilities: {
providerFamily: "openai",
},
});
},
};
export default openAIPlugin;

View File

@@ -0,0 +1,9 @@
{
"id": "openai",
"providers": ["openai"],
"configSchema": {
"type": "object",
"additionalProperties": false,
"properties": {}
}
}

View File

@@ -0,0 +1,12 @@
{
"name": "@openclaw/openai-provider",
"version": "2026.3.14",
"private": true,
"description": "OpenClaw OpenAI provider plugin",
"type": "module",
"openclaw": {
"extensions": [
"./index.ts"
]
}
}

View File

@@ -10,7 +10,7 @@ export type CacheTtlEntryData = {
modelId?: string;
};
const CACHE_TTL_NATIVE_PROVIDERS = new Set(["anthropic", "moonshot", "zai"]);
const CACHE_TTL_NATIVE_PROVIDERS = new Set(["moonshot", "zai"]);
export function isCacheTtlEligibleProvider(provider: string, modelId: string): boolean {
const normalizedProvider = provider.toLowerCase();
@@ -28,6 +28,11 @@ export function isCacheTtlEligibleProvider(provider: string, modelId: string): b
if (normalizedProvider === "kilocode" && normalizedModelId.startsWith("anthropic/")) {
return true;
}
// Legacy fallback for tests / plugin-disabled contexts. The Anthropic plugin
// owns this policy in normal runtime.
if (normalizedProvider === "anthropic") {
return true;
}
if (CACHE_TTL_NATIVE_PROVIDERS.has(normalizedProvider)) {
return true;
}

View File

@@ -34,7 +34,7 @@ type InlineProviderConfig = {
headers?: unknown;
};
const PLUGIN_FIRST_DYNAMIC_PROVIDERS = new Set(["google-gemini-cli", "zai"]);
const PLUGIN_FIRST_DYNAMIC_PROVIDERS = new Set(["anthropic", "google-gemini-cli", "openai", "zai"]);
function sanitizeModelHeaders(
headers: unknown,

View File

@@ -2,6 +2,15 @@ import { describe, expect, it, vi } from "vitest";
const resolveProviderCapabilitiesWithPluginMock = vi.fn((params: { provider: string }) => {
switch (params.provider) {
case "anthropic":
return {
providerFamily: "anthropic",
dropThinkingBlockModelHints: ["claude"],
};
case "openai":
return {
providerFamily: "openai",
};
case "openrouter":
return {
openAiCompatTurnValidation: false,
@@ -47,7 +56,7 @@ import {
} from "./provider-capabilities.js";
describe("resolveProviderCapabilities", () => {
it("returns native anthropic defaults for ordinary providers", () => {
it("returns provider-owned anthropic defaults for ordinary providers", () => {
expect(resolveProviderCapabilities("anthropic")).toEqual({
anthropicToolSchemaMode: "native",
anthropicToolChoiceMode: "native",

View File

@@ -28,20 +28,17 @@ const DEFAULT_PROVIDER_CAPABILITIES: ProviderCapabilities = {
};
const CORE_PROVIDER_CAPABILITIES: Record<string, Partial<ProviderCapabilities>> = {
anthropic: {
providerFamily: "anthropic",
dropThinkingBlockModelHints: ["claude"],
},
"amazon-bedrock": {
providerFamily: "anthropic",
dropThinkingBlockModelHints: ["claude"],
},
openai: {
providerFamily: "openai",
},
};
const PLUGIN_CAPABILITIES_FALLBACKS: Record<string, Partial<ProviderCapabilities>> = {
anthropic: {
providerFamily: "anthropic",
dropThinkingBlockModelHints: ["claude"],
},
mistral: {
transcriptToolCallIdMode: "strict9",
transcriptToolCallIdModelHints: [
@@ -64,6 +61,9 @@ const PLUGIN_CAPABILITIES_FALLBACKS: Record<string, Partial<ProviderCapabilities
geminiThoughtSignatureSanitization: true,
geminiThoughtSignatureModelHints: ["gemini"],
},
openai: {
providerFamily: "openai",
},
};
export function resolveProviderCapabilities(provider?: string | null): ProviderCapabilities {

View File

@@ -24,6 +24,7 @@ export type NormalizedPluginsConfig = {
};
export const BUNDLED_ENABLED_BY_DEFAULT = new Set<string>([
"anthropic",
"byteplus",
"cloudflare-ai-gateway",
"device-pair",
@@ -38,6 +39,7 @@ export const BUNDLED_ENABLED_BY_DEFAULT = new Set<string>([
"moonshot",
"nvidia",
"ollama",
"openai",
"openai-codex",
"opencode",
"opencode-go",

View File

@@ -5,6 +5,7 @@ import type { ProviderPlugin } from "./types.js";
const log = createSubsystemLogger("plugins");
const BUNDLED_PROVIDER_ALLOWLIST_COMPAT_PLUGIN_IDS = [
"anthropic",
"byteplus",
"cloudflare-ai-gateway",
"copilot-proxy",
@@ -20,6 +21,7 @@ const BUNDLED_PROVIDER_ALLOWLIST_COMPAT_PLUGIN_IDS = [
"moonshot",
"nvidia",
"ollama",
"openai",
"openai-codex",
"opencode",
"opencode-go",