feat: add Arcee AI provider plugin

Add a bundled Arcee AI provider plugin with ARCEEAI_API_KEY onboarding,
Trinity model catalog (mini, large-preview, large-thinking), and
OpenAI-compatible API support.

- Trinity Large Thinking: 256K context, reasoning enabled
- Trinity Large Preview: 128K context, general-purpose
- Trinity Mini 26B: 128K context, fast and cost-efficient
This commit is contained in:
arthurbr11
2026-04-05 11:01:59 -07:00
committed by Peter Steinberger
parent 8f421f0e78
commit 5ac2f58c57
16 changed files with 329 additions and 1 deletions

4
.github/labeler.yml vendored
View File

@@ -257,6 +257,10 @@
- changed-files:
- any-glob-to-any-file:
- "extensions/acpx/**"
"extensions: arcee":
- changed-files:
- any-glob-to-any-file:
- "extensions/arcee/**"
"extensions: byteplus":
- changed-files:
- any-glob-to-any-file:

View File

@@ -72,6 +72,7 @@ Docs: https://docs.openclaw.ai
- Agents/video generation: add the built-in `video_generate` tool so agents can create videos through configured providers and return the generated media directly in the reply.
- Agents/music generation: ignore unsupported optional hints such as `durationSeconds` with a warning instead of hard-failing requests on providers like Google Lyria.
- Providers/Arcee AI: add a bundled Arcee AI provider plugin with `ARCEEAI_API_KEY` onboarding, Trinity model catalog (mini, large-preview, large-thinking), and OpenAI-compatible API support.
- Providers/ComfyUI: add a bundled `comfy` workflow media plugin for local ComfyUI and Comfy Cloud workflows, including shared `image_generate`, `video_generate`, and workflow-backed `music_generate` support, with prompt injection, optional reference-image upload, live tests, and output download.
- Tools/music generation: add the built-in `music_generate` tool with bundled Google (Lyria) and MiniMax providers plus workflow-backed Comfy support, including async task tracking and follow-up delivery of finished audio.
- Providers: add bundled Qwen, Fireworks AI, and StepFun providers, plus MiniMax TTS, Ollama Web Search, and MiniMax Search integrations for chat, speech, and search workflows. (#60032, #55921, #59318, #54648)

View File

@@ -1231,6 +1231,7 @@
"pages": [
"providers/alibaba",
"providers/anthropic",
"providers/arcee",
"providers/bedrock",
"providers/bedrock-mantle",
"providers/chutes",

74
docs/providers/arcee.md Normal file
View File

@@ -0,0 +1,74 @@
---
title: "Arcee AI"
summary: "Arcee AI setup (auth + model selection)"
read_when:
- You want to use Arcee AI with OpenClaw
- You need the API key env var or CLI auth choice
---
# Arcee AI
[Arcee AI](https://arcee.ai) provides access to the Trinity family of mixture-of-experts models through an OpenAI-compatible API. All Trinity models are Apache 2.0 licensed.
- Provider: `arcee`
- Auth: `ARCEEAI_API_KEY`
- API: OpenAI-compatible
- Base URL: `https://api.arcee.ai/api/v1`
## Quick start
1. Get an API key from the [Arcee platform dashboard](https://app.arcee.ai).
2. Set the API key (recommended: store it for the Gateway):
```bash
openclaw onboard --auth-choice arceeai-api-key
```
3. Set a default model:
```json5
{
agents: {
defaults: {
model: { primary: "arcee/trinity-large-thinking" },
},
},
}
```
## Non-interactive example
```bash
openclaw onboard --non-interactive \
--mode local \
--auth-choice arceeai-api-key \
--arceeai-api-key "$ARCEEAI_API_KEY"
```
This will set `arcee/trinity-large-thinking` as the default model.
## Environment note
If the Gateway runs as a daemon (launchd/systemd), make sure `ARCEEAI_API_KEY`
is available to that process (for example, in `~/.openclaw/.env` or via
`env.shellEnv`).
## Built-in catalog
OpenClaw currently ships this bundled Arcee catalog:
| Model ref | Name | Input | Context | Cost (in/out per 1M) | Notes |
| ------------------------------ | ---------------------- | ----- | ------- | -------------------- | ----------------------------------------- |
| `arcee/trinity-large-thinking` | Trinity Large Thinking | text | 256K | $0.25 / $0.90 | Default model; reasoning enabled |
| `arcee/trinity-large-preview` | Trinity Large Preview | text | 128K | $0.25 / $1.00 | General-purpose; 400B params, 13B active |
| `arcee/trinity-mini` | Trinity Mini 26B | text | 128K | $0.045 / $0.15 | Fast and cost-efficient; function calling |
The onboarding preset sets `arcee/trinity-large-thinking` as the default model.
## Supported features
- Streaming
- Tool use / function calling
- Structured output (JSON mode and JSON schema)
- Extended thinking (Trinity Large Thinking)

View File

@@ -29,6 +29,7 @@ Looking for chat channel docs (WhatsApp/Telegram/Discord/Slack/Mattermost (plugi
- [Alibaba Model Studio](/providers/alibaba)
- [Amazon Bedrock](/providers/bedrock)
- [Anthropic (API + Claude CLI)](/providers/anthropic)
- [Arcee AI (Trinity models)](/providers/arcee)
- [BytePlus (International)](/concepts/model-providers#byteplus-international)
- [Chutes](/providers/chutes)
- [ComfyUI](/providers/comfy)

3
extensions/arcee/api.ts Normal file
View File

@@ -0,0 +1,3 @@
export { buildArceeModelDefinition, ARCEE_BASE_URL, ARCEE_MODEL_CATALOG } from "./models.js";
export { buildArceeProvider } from "./provider-catalog.js";
export { applyArceeConfig, ARCEE_DEFAULT_MODEL_REF } from "./onboard.js";

View File

@@ -0,0 +1,58 @@
import { describe, expect, it } from "vitest";
import { resolveProviderPluginChoice } from "../../src/plugins/provider-auth-choice.runtime.js";
import { registerSingleProviderPlugin } from "../../test/helpers/plugins/plugin-registration.js";
import arceePlugin from "./index.js";
describe("arcee provider plugin", () => {
it("registers Arcee AI with api-key auth wizard metadata", async () => {
const provider = await registerSingleProviderPlugin(arceePlugin);
const resolved = resolveProviderPluginChoice({
providers: [provider],
choice: "arceeai-api-key",
});
expect(provider.id).toBe("arcee");
expect(provider.label).toBe("Arcee AI");
expect(provider.envVars).toEqual(["ARCEEAI_API_KEY"]);
expect(provider.auth).toHaveLength(1);
expect(resolved).not.toBeNull();
expect(resolved?.provider.id).toBe("arcee");
expect(resolved?.method.id).toBe("api-key");
});
it("builds the static Arcee AI model catalog", async () => {
const provider = await registerSingleProviderPlugin(arceePlugin);
expect(provider.catalog).toBeDefined();
const catalog = await provider.catalog!.run({
config: {},
env: {},
resolveProviderApiKey: () => ({ apiKey: "test-key" }),
resolveProviderAuth: () => ({
apiKey: "test-key",
mode: "api_key",
source: "env",
}),
} as never);
expect(catalog && "provider" in catalog).toBe(true);
if (!catalog || !("provider" in catalog)) {
throw new Error("expected single-provider catalog");
}
expect(catalog.provider.api).toBe("openai-completions");
expect(catalog.provider.baseUrl).toBe("https://api.arcee.ai/api/v1");
expect(catalog.provider.models?.map((model) => model.id)).toEqual([
"trinity-mini",
"trinity-large-preview",
"trinity-large-thinking",
]);
expect(
catalog.provider.models?.find((model) => model.id === "trinity-large-thinking")?.reasoning,
).toBe(true);
expect(
catalog.provider.models?.find((model) => model.id === "trinity-large-thinking")
?.contextWindow,
).toBe(262144);
});
});

39
extensions/arcee/index.ts Normal file
View File

@@ -0,0 +1,39 @@
import { defineSingleProviderPluginEntry } from "openclaw/plugin-sdk/provider-entry";
import { applyArceeConfig, ARCEE_DEFAULT_MODEL_REF } from "./onboard.js";
import { buildArceeProvider } from "./provider-catalog.js";
const PROVIDER_ID = "arcee";
export default defineSingleProviderPluginEntry({
id: PROVIDER_ID,
name: "Arcee AI Provider",
description: "Bundled Arcee AI provider plugin",
provider: {
label: "Arcee AI",
docsPath: "/providers/arcee",
auth: [
{
methodId: "api-key",
label: "Arcee AI API key",
hint: "API key",
optionKey: "arceeaiApiKey",
flagName: "--arceeai-api-key",
envVar: "ARCEEAI_API_KEY",
promptMessage: "Enter Arcee AI API key",
defaultModel: ARCEE_DEFAULT_MODEL_REF,
applyConfig: (cfg) => applyArceeConfig(cfg),
wizard: {
choiceId: "arceeai-api-key",
choiceLabel: "Arcee AI API key",
groupId: "arcee",
groupLabel: "Arcee AI",
groupHint: "API key",
},
},
],
catalog: {
buildProvider: buildArceeProvider,
allowExplicitBaseUrl: true,
},
},
});

View File

@@ -0,0 +1,67 @@
import type { ModelDefinitionConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const ARCEE_BASE_URL = "https://api.arcee.ai/api/v1";
export const ARCEE_MODEL_CATALOG: ModelDefinitionConfig[] = [
{
id: "trinity-mini",
name: "Trinity Mini 26B",
reasoning: false,
input: ["text"],
contextWindow: 131072,
maxTokens: 8192,
cost: {
input: 0.045,
output: 0.15,
cacheRead: 0.045,
cacheWrite: 0.045,
},
},
{
id: "trinity-large-preview",
name: "Trinity Large Preview",
reasoning: false,
input: ["text"],
contextWindow: 131072,
maxTokens: 8192,
cost: {
input: 0.25,
output: 1.0,
cacheRead: 0.25,
cacheWrite: 0.25,
},
},
{
id: "trinity-large-thinking",
name: "Trinity Large Thinking",
reasoning: true,
input: ["text"],
contextWindow: 262144,
maxTokens: 16384,
cost: {
input: 0.25,
output: 0.9,
cacheRead: 0.25,
cacheWrite: 0.25,
},
compat: {
supportsReasoningEffort: false,
},
},
];
export function buildArceeModelDefinition(
model: (typeof ARCEE_MODEL_CATALOG)[number],
): ModelDefinitionConfig {
return {
id: model.id,
name: model.name,
api: "openai-completions",
reasoning: model.reasoning,
input: model.input,
cost: model.cost,
contextWindow: model.contextWindow,
maxTokens: model.maxTokens,
...(model.compat ? { compat: model.compat } : {}),
};
}

View File

@@ -0,0 +1,26 @@
import {
createModelCatalogPresetAppliers,
type OpenClawConfig,
} from "openclaw/plugin-sdk/provider-onboard";
import { buildArceeModelDefinition, ARCEE_BASE_URL, ARCEE_MODEL_CATALOG } from "./api.js";
export const ARCEE_DEFAULT_MODEL_REF = "arcee/trinity-large-thinking";
const arceePresetAppliers = createModelCatalogPresetAppliers({
primaryModelRef: ARCEE_DEFAULT_MODEL_REF,
resolveParams: (_cfg: OpenClawConfig) => ({
providerId: "arcee",
api: "openai-completions",
baseUrl: ARCEE_BASE_URL,
catalogModels: ARCEE_MODEL_CATALOG.map(buildArceeModelDefinition),
aliases: [{ modelRef: ARCEE_DEFAULT_MODEL_REF, alias: "Arcee AI" }],
}),
});
export function applyArceeProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
return arceePresetAppliers.applyProviderConfig(cfg);
}
export function applyArceeConfig(cfg: OpenClawConfig): OpenClawConfig {
return arceePresetAppliers.applyConfig(cfg);
}

View File

@@ -0,0 +1,28 @@
{
"id": "arcee",
"enabledByDefault": true,
"providers": ["arcee"],
"providerAuthEnvVars": {
"arcee": ["ARCEEAI_API_KEY"]
},
"providerAuthChoices": [
{
"provider": "arcee",
"method": "api-key",
"choiceId": "arceeai-api-key",
"choiceLabel": "Arcee AI API key",
"groupId": "arcee",
"groupLabel": "Arcee AI",
"groupHint": "API key",
"optionKey": "arceeaiApiKey",
"cliFlag": "--arceeai-api-key",
"cliOption": "--arceeai-api-key <key>",
"cliDescription": "Arcee AI API key"
}
],
"configSchema": {
"type": "object",
"additionalProperties": false,
"properties": {}
}
}

View File

@@ -0,0 +1,12 @@
{
"name": "@openclaw/arcee-provider",
"version": "2026.4.4",
"private": true,
"description": "OpenClaw Arcee provider plugin",
"type": "module",
"openclaw": {
"extensions": [
"./index.ts"
]
}
}

View File

@@ -0,0 +1,10 @@
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
import { buildArceeModelDefinition, ARCEE_BASE_URL, ARCEE_MODEL_CATALOG } from "./api.js";
export function buildArceeProvider(): ModelProviderConfig {
return {
baseUrl: ARCEE_BASE_URL,
api: "openai-completions",
models: ARCEE_MODEL_CATALOG.map(buildArceeModelDefinition),
};
}

View File

@@ -44,7 +44,7 @@ const defaultImportPiSdk = () => import("./pi-model-discovery-runtime.js");
let importPiSdk = defaultImportPiSdk;
let modelSuppressionPromise: Promise<typeof import("./model-suppression.runtime.js")> | undefined;
const NON_PI_NATIVE_MODEL_PROVIDERS = new Set(["deepseek", "kilocode", "ollama"]);
const NON_PI_NATIVE_MODEL_PROVIDERS = new Set(["arcee", "deepseek", "kilocode", "ollama"]);
function shouldLogModelCatalogTiming(): boolean {
return process.env.OPENCLAW_DEBUG_INGRESS_TIMING === "1";

View File

@@ -12,6 +12,7 @@ export type BuiltInAuthChoice =
| "oauth"
| "setup-token"
| "token"
| "arceeai-api-key"
| "chutes"
| "deepseek-api-key"
| "openai-codex"
@@ -65,6 +66,7 @@ export type AuthChoice = BuiltInAuthChoice | (string & {});
export type BuiltInAuthChoiceGroupId =
| "openai"
| "anthropic"
| "arcee"
| "chutes"
| "deepseek"
| "google"
@@ -131,6 +133,7 @@ export type OnboardOptions = OnboardDynamicProviderOptions & {
tokenExpiresIn?: string;
/** API key persistence mode for setup flows (default: plaintext). */
secretInputMode?: SecretInputMode;
arceeaiApiKey?: string;
cloudflareAiGatewayAccountId?: string;
cloudflareAiGatewayGatewayId?: string;
customBaseUrl?: string;

View File

@@ -73,6 +73,7 @@ export { MissingEnvVarError } from "./env-substitution.js";
const SHELL_ENV_EXPECTED_KEYS = [
"OPENAI_API_KEY",
"ANTHROPIC_API_KEY",
"ARCEEAI_API_KEY",
"DEEPSEEK_API_KEY",
"ANTHROPIC_OAUTH_TOKEN",
"GEMINI_API_KEY",