refactor(anthropic-vertex): move SDK runtime to plugin (#71174)

* refactor(anthropic-vertex): move sdk runtime to plugin

* fix(anthropic-vertex): stage provider runtime deps

* fix(anthropic-vertex): reuse stream factory wrapper
This commit is contained in:
Vincent Koc
2026-04-24 11:52:35 -07:00
committed by GitHub
parent 07f33b2909
commit d795000377
10 changed files with 350 additions and 216 deletions

View File

@@ -0,0 +1,77 @@
import type { Model } from "@mariozechner/pi-ai";
import { beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
const hoisted = vi.hoisted(() => {
const streamAnthropicMock = vi.fn(() => Symbol("anthropic-vertex-stream"));
const anthropicVertexCtorMock = vi.fn();
return {
streamAnthropicMock,
anthropicVertexCtorMock,
};
});
vi.mock("@mariozechner/pi-ai", async () => {
const original =
await vi.importActual<typeof import("@mariozechner/pi-ai")>("@mariozechner/pi-ai");
return {
...original,
streamAnthropic: hoisted.streamAnthropicMock,
};
});
vi.mock("@anthropic-ai/vertex-sdk", () => ({
AnthropicVertex: vi.fn(function MockAnthropicVertex(options: unknown) {
hoisted.anthropicVertexCtorMock(options);
return { options };
}),
}));
let createAnthropicVertexStreamFn: typeof import("./api.js").createAnthropicVertexStreamFn;
let createAnthropicVertexStreamFnForModel: typeof import("./api.js").createAnthropicVertexStreamFnForModel;
function makeModel(): Model<"anthropic-messages"> {
return {
id: "claude-sonnet-4-6",
api: "anthropic-messages",
provider: "anthropic-vertex",
maxTokens: 128000,
} as Model<"anthropic-messages">;
}
describe("Anthropic Vertex API stream factories", () => {
beforeAll(async () => {
({ createAnthropicVertexStreamFn, createAnthropicVertexStreamFnForModel } =
await import("./api.js"));
});
beforeEach(() => {
hoisted.streamAnthropicMock.mockClear();
hoisted.anthropicVertexCtorMock.mockClear();
});
it("reuses the runtime stream factory across direct stream calls", async () => {
const streamFn = createAnthropicVertexStreamFn("vertex-project", "us-east5");
const model = makeModel();
await streamFn(model, { messages: [] }, {});
await streamFn(model, { messages: [] }, {});
expect(hoisted.anthropicVertexCtorMock).toHaveBeenCalledTimes(1);
expect(hoisted.streamAnthropicMock).toHaveBeenCalledTimes(2);
});
it("reuses the runtime stream factory across model-derived stream calls", async () => {
const streamFn = createAnthropicVertexStreamFnForModel(makeModel(), {
ANTHROPIC_VERTEX_PROJECT_ID: "vertex-project",
GOOGLE_CLOUD_LOCATION: "us-east5",
} as NodeJS.ProcessEnv);
const model = makeModel();
await streamFn(model, { messages: [] }, {});
await streamFn(model, { messages: [] }, {});
expect(hoisted.anthropicVertexCtorMock).toHaveBeenCalledTimes(1);
expect(hoisted.streamAnthropicMock).toHaveBeenCalledTimes(2);
});
});

View File

@@ -1,3 +1,5 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
export {
ANTHROPIC_VERTEX_DEFAULT_MODEL_ID,
buildAnthropicVertexProvider,
@@ -40,3 +42,30 @@ export function resolveImplicitAnthropicVertexProvider(params?: { env?: NodeJS.P
return buildAnthropicVertexProvider({ env });
}
export function createAnthropicVertexStreamFn(
projectId: string | undefined,
region: string,
baseURL?: string,
): StreamFn {
const streamFnPromise = import("./stream-runtime.js").then((runtime) =>
runtime.createAnthropicVertexStreamFn(projectId, region, baseURL),
);
return async (model, context, options) => {
const streamFn = await streamFnPromise;
return streamFn(model, context, options);
};
}
export function createAnthropicVertexStreamFnForModel(
model: { baseUrl?: string },
env: NodeJS.ProcessEnv = process.env,
): StreamFn {
const streamFnPromise = import("./stream-runtime.js").then((runtime) =>
runtime.createAnthropicVertexStreamFnForModel(model, env),
);
return async (...args) => {
const streamFn = await streamFnPromise;
return streamFn(...args);
};
}

View File

@@ -4,10 +4,18 @@
"private": true,
"description": "OpenClaw Anthropic Vertex provider plugin",
"type": "module",
"dependencies": {
"@anthropic-ai/vertex-sdk": "^0.16.0",
"@mariozechner/pi-agent-core": "0.70.2",
"@mariozechner/pi-ai": "0.70.2"
},
"devDependencies": {
"@openclaw/plugin-sdk": "workspace:*"
},
"openclaw": {
"bundle": {
"stageRuntimeDependencies": true
},
"extensions": [
"./index.ts"
]

View File

@@ -0,0 +1,315 @@
import type { Model } from "@mariozechner/pi-ai";
import { beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
const SYSTEM_PROMPT_CACHE_BOUNDARY = "\n<!-- OPENCLAW_CACHE_BOUNDARY -->\n";
const hoisted = vi.hoisted(() => {
const streamAnthropicMock = vi.fn<(model: unknown, context: unknown, options: unknown) => symbol>(
() => Symbol("anthropic-vertex-stream"),
);
const anthropicVertexCtorMock = vi.fn();
return {
streamAnthropicMock,
anthropicVertexCtorMock,
};
});
vi.mock("@mariozechner/pi-ai", async () => {
const original =
await vi.importActual<typeof import("@mariozechner/pi-ai")>("@mariozechner/pi-ai");
return {
...original,
streamAnthropic: (model: unknown, context: unknown, options: unknown) =>
hoisted.streamAnthropicMock(model, context, options),
};
});
vi.mock("@anthropic-ai/vertex-sdk", () => ({
AnthropicVertex: vi.fn(function MockAnthropicVertex(options: unknown) {
hoisted.anthropicVertexCtorMock(options);
return { options };
}),
}));
let createAnthropicVertexStreamFn: typeof import("./stream-runtime.js").createAnthropicVertexStreamFn;
let createAnthropicVertexStreamFnForModel: typeof import("./stream-runtime.js").createAnthropicVertexStreamFnForModel;
function makeModel(params: { id: string; maxTokens?: number }): Model<"anthropic-messages"> {
return {
id: params.id,
api: "anthropic-messages",
provider: "anthropic-vertex",
...(params.maxTokens !== undefined ? { maxTokens: params.maxTokens } : {}),
} as Model<"anthropic-messages">;
}
const CACHE_BOUNDARY_PROMPT = `Stable prefix${SYSTEM_PROMPT_CACHE_BOUNDARY}Dynamic suffix`;
type PayloadHook = (payload: unknown, payloadModel: unknown) => Promise<unknown>;
function captureCacheBoundaryPayloadHook(onPayload: PayloadHook) {
const streamFn = createAnthropicVertexStreamFn("vertex-project", "us-east5");
const model = makeModel({ id: "claude-sonnet-4-6", maxTokens: 64000 });
void streamFn(
model,
{
systemPrompt: CACHE_BOUNDARY_PROMPT,
messages: [{ role: "user", content: "Hello" }],
} as never,
{
cacheRetention: "short",
onPayload,
} as never,
);
const transportOptions = hoisted.streamAnthropicMock.mock.calls[0]?.[2] as {
onPayload?: PayloadHook;
};
return { model, onPayload: transportOptions.onPayload };
}
function buildExpectedCacheBoundaryPayload(messageText: string) {
return {
system: [
{
type: "text",
text: "Stable prefix",
cache_control: { type: "ephemeral" },
},
{
type: "text",
text: "Dynamic suffix",
},
],
messages: [
{
role: "user",
content: [
{
type: "text",
text: messageText,
cache_control: { type: "ephemeral" },
},
],
},
],
};
}
describe("createAnthropicVertexStreamFn", () => {
beforeAll(async () => {
({ createAnthropicVertexStreamFn, createAnthropicVertexStreamFnForModel } =
await import("./stream-runtime.js"));
});
beforeEach(() => {
hoisted.streamAnthropicMock.mockClear();
hoisted.anthropicVertexCtorMock.mockClear();
});
it("omits projectId when ADC credentials are used without an explicit project", () => {
const streamFn = createAnthropicVertexStreamFn(undefined, "global");
void streamFn(makeModel({ id: "claude-sonnet-4-6", maxTokens: 128000 }), { messages: [] }, {});
expect(hoisted.anthropicVertexCtorMock).toHaveBeenCalledWith({
region: "global",
});
});
it("passes an explicit baseURL through to the Vertex client", () => {
const streamFn = createAnthropicVertexStreamFn(
"vertex-project",
"us-east5",
"https://proxy.example.test/vertex/v1",
);
void streamFn(makeModel({ id: "claude-sonnet-4-6", maxTokens: 128000 }), { messages: [] }, {});
expect(hoisted.anthropicVertexCtorMock).toHaveBeenCalledWith({
projectId: "vertex-project",
region: "us-east5",
baseURL: "https://proxy.example.test/vertex/v1",
});
});
it("defaults maxTokens to the model limit instead of the old 32000 cap", () => {
const streamFn = createAnthropicVertexStreamFn("vertex-project", "us-east5");
const model = makeModel({ id: "claude-opus-4-6", maxTokens: 128000 });
void streamFn(model, { messages: [] }, {});
expect(hoisted.streamAnthropicMock).toHaveBeenCalledWith(
model,
{ messages: [] },
expect.objectContaining({
maxTokens: 128000,
}),
);
});
it("clamps explicit maxTokens to the selected model limit", () => {
const streamFn = createAnthropicVertexStreamFn("vertex-project", "us-east5");
const model = makeModel({ id: "claude-sonnet-4-6", maxTokens: 128000 });
void streamFn(model, { messages: [] }, { maxTokens: 999999 });
expect(hoisted.streamAnthropicMock).toHaveBeenCalledWith(
model,
{ messages: [] },
expect.objectContaining({
maxTokens: 128000,
}),
);
});
it("maps xhigh reasoning to max effort for adaptive Opus models", () => {
const streamFn = createAnthropicVertexStreamFn("vertex-project", "us-east5");
const model = makeModel({ id: "claude-opus-4-6", maxTokens: 64000 });
void streamFn(model, { messages: [] }, { reasoning: "xhigh" });
expect(hoisted.streamAnthropicMock).toHaveBeenCalledWith(
model,
{ messages: [] },
expect.objectContaining({
thinkingEnabled: true,
effort: "max",
}),
);
});
it("maps xhigh reasoning to xhigh effort for Opus 4.7", () => {
const streamFn = createAnthropicVertexStreamFn("vertex-project", "us-east5");
const model = makeModel({ id: "claude-opus-4-7", maxTokens: 64000 });
void streamFn(model, { messages: [] }, { reasoning: "xhigh" });
expect(hoisted.streamAnthropicMock).toHaveBeenCalledWith(
model,
{ messages: [] },
expect.objectContaining({
thinkingEnabled: true,
effort: "xhigh",
}),
);
});
it("applies Anthropic cache-boundary shaping before forwarding payload hooks", async () => {
const onPayload = vi.fn(async (payload: unknown) => payload);
const { model, onPayload: transportPayloadHook } = captureCacheBoundaryPayloadHook(onPayload);
const payload = {
system: [
{
type: "text",
text: CACHE_BOUNDARY_PROMPT,
cache_control: { type: "ephemeral" },
},
],
messages: [{ role: "user", content: "Hello" }],
};
const nextPayload = await transportPayloadHook?.(payload, model);
const expectedPayload = buildExpectedCacheBoundaryPayload("Hello");
expect(onPayload).toHaveBeenCalledWith(expectedPayload, model);
expect(nextPayload).toEqual(expectedPayload);
});
it("reapplies Anthropic cache-boundary shaping when payload hooks return a fresh payload", async () => {
const onPayload = vi.fn(async () => ({
system: [
{
type: "text",
text: CACHE_BOUNDARY_PROMPT,
},
],
messages: [{ role: "user", content: "Hello again" }],
}));
const { model, onPayload: transportPayloadHook } = captureCacheBoundaryPayloadHook(onPayload);
const nextPayload = await transportPayloadHook?.(
{
system: [
{
type: "text",
text: CACHE_BOUNDARY_PROMPT,
},
],
messages: [{ role: "user", content: "Hello" }],
},
model,
);
expect(nextPayload).toEqual(buildExpectedCacheBoundaryPayload("Hello again"));
});
it("omits maxTokens when neither the model nor request provide a finite limit", () => {
const streamFn = createAnthropicVertexStreamFn("vertex-project", "us-east5");
const model = makeModel({ id: "claude-sonnet-4-6" });
void streamFn(model, { messages: [] }, { maxTokens: Number.NaN });
expect(hoisted.streamAnthropicMock).toHaveBeenCalledWith(
model,
{ messages: [] },
expect.not.objectContaining({
maxTokens: expect.anything(),
}),
);
});
});
describe("createAnthropicVertexStreamFnForModel", () => {
beforeEach(() => {
hoisted.anthropicVertexCtorMock.mockClear();
});
it("derives project and region from the model and env", () => {
const streamFn = createAnthropicVertexStreamFnForModel(
{ baseUrl: "https://europe-west4-aiplatform.googleapis.com" },
{ GOOGLE_CLOUD_PROJECT_ID: "vertex-project" } as NodeJS.ProcessEnv,
);
void streamFn(makeModel({ id: "claude-sonnet-4-6", maxTokens: 64000 }), { messages: [] }, {});
expect(hoisted.anthropicVertexCtorMock).toHaveBeenCalledWith({
projectId: "vertex-project",
region: "europe-west4",
baseURL: "https://europe-west4-aiplatform.googleapis.com/v1",
});
});
it("preserves explicit custom provider base URLs", () => {
const streamFn = createAnthropicVertexStreamFnForModel(
{ baseUrl: "https://proxy.example.test/custom-root/v1" },
{ GOOGLE_CLOUD_PROJECT_ID: "vertex-project" } as NodeJS.ProcessEnv,
);
void streamFn(makeModel({ id: "claude-sonnet-4-6", maxTokens: 64000 }), { messages: [] }, {});
expect(hoisted.anthropicVertexCtorMock).toHaveBeenCalledWith({
projectId: "vertex-project",
region: "global",
baseURL: "https://proxy.example.test/custom-root/v1",
});
});
it("adds /v1 for path-prefixed custom provider base URLs", () => {
const streamFn = createAnthropicVertexStreamFnForModel(
{ baseUrl: "https://proxy.example.test/custom-root" },
{ GOOGLE_CLOUD_PROJECT_ID: "vertex-project" } as NodeJS.ProcessEnv,
);
void streamFn(makeModel({ id: "claude-sonnet-4-6", maxTokens: 64000 }), { messages: [] }, {});
expect(hoisted.anthropicVertexCtorMock).toHaveBeenCalledWith({
projectId: "vertex-project",
region: "global",
baseURL: "https://proxy.example.test/custom-root/v1",
});
});
});

View File

@@ -0,0 +1,199 @@
import { AnthropicVertex } from "@anthropic-ai/vertex-sdk";
import type { StreamFn } from "@mariozechner/pi-agent-core";
import { streamAnthropic, type AnthropicOptions, type Model } from "@mariozechner/pi-ai";
import {
applyAnthropicPayloadPolicyToParams,
resolveAnthropicPayloadPolicy,
} from "openclaw/plugin-sdk/provider-stream-shared";
import { resolveAnthropicVertexClientRegion, resolveAnthropicVertexProjectId } from "./region.js";
type AnthropicVertexEffort = NonNullable<AnthropicOptions["effort"]>;
type AnthropicVertexAdaptiveEffort = AnthropicVertexEffort | "xhigh";
function isClaudeOpus47Model(modelId: string): boolean {
return modelId.includes("opus-4-7") || modelId.includes("opus-4.7");
}
function isClaudeOpus46Model(modelId: string): boolean {
return modelId.includes("opus-4-6") || modelId.includes("opus-4.6");
}
function supportsAdaptiveThinking(modelId: string): boolean {
return (
isClaudeOpus47Model(modelId) ||
isClaudeOpus46Model(modelId) ||
modelId.includes("sonnet-4-6") ||
modelId.includes("sonnet-4.6")
);
}
function mapAnthropicAdaptiveEffort(
reasoning: string,
modelId: string,
): AnthropicVertexAdaptiveEffort {
const effortMap: Record<string, AnthropicVertexAdaptiveEffort> = {
minimal: "low",
low: "low",
medium: "medium",
high: "high",
xhigh: isClaudeOpus47Model(modelId) ? "xhigh" : isClaudeOpus46Model(modelId) ? "max" : "high",
};
return effortMap[reasoning] ?? "high";
}
function resolveAnthropicVertexMaxTokens(params: {
modelMaxTokens: number | undefined;
requestedMaxTokens: number | undefined;
}): number | undefined {
const modelMax =
typeof params.modelMaxTokens === "number" &&
Number.isFinite(params.modelMaxTokens) &&
params.modelMaxTokens > 0
? Math.floor(params.modelMaxTokens)
: undefined;
const requested =
typeof params.requestedMaxTokens === "number" &&
Number.isFinite(params.requestedMaxTokens) &&
params.requestedMaxTokens > 0
? Math.floor(params.requestedMaxTokens)
: undefined;
if (modelMax !== undefined && requested !== undefined) {
return Math.min(requested, modelMax);
}
return requested ?? modelMax;
}
function createAnthropicVertexOnPayload(params: {
model: { api: string; baseUrl?: string; provider: string };
cacheRetention: AnthropicOptions["cacheRetention"] | undefined;
onPayload: AnthropicOptions["onPayload"] | undefined;
}): NonNullable<AnthropicOptions["onPayload"]> {
const policy = resolveAnthropicPayloadPolicy({
provider: params.model.provider,
api: params.model.api,
baseUrl: params.model.baseUrl,
cacheRetention: params.cacheRetention,
enableCacheControl: true,
});
function applyPolicy(payload: unknown): unknown {
if (payload && typeof payload === "object" && !Array.isArray(payload)) {
applyAnthropicPayloadPolicyToParams(payload as Record<string, unknown>, policy);
}
return payload;
}
return async (payload, model) => {
const shapedPayload = applyPolicy(payload);
const nextPayload = await params.onPayload?.(shapedPayload, model);
if (nextPayload === undefined || nextPayload === shapedPayload) {
return shapedPayload;
}
return applyPolicy(nextPayload);
};
}
/**
* Create a StreamFn that routes through pi-ai's `streamAnthropic` with an
* injected `AnthropicVertex` client. All streaming, message conversion, and
* event handling is handled by pi-ai — we only supply the GCP-authenticated
* client and map SimpleStreamOptions → AnthropicOptions.
*/
export function createAnthropicVertexStreamFn(
projectId: string | undefined,
region: string,
baseURL?: string,
): StreamFn {
const client = new AnthropicVertex({
region,
...(baseURL ? { baseURL } : {}),
...(projectId ? { projectId } : {}),
});
return (model, context, options) => {
const transportModel = model as Model<"anthropic-messages"> & {
api: string;
baseUrl?: string;
provider: string;
};
const maxTokens = resolveAnthropicVertexMaxTokens({
modelMaxTokens: transportModel.maxTokens,
requestedMaxTokens: options?.maxTokens,
});
const opts: AnthropicOptions = {
client: client as unknown as AnthropicOptions["client"],
temperature: options?.temperature,
...(maxTokens !== undefined ? { maxTokens } : {}),
signal: options?.signal,
cacheRetention: options?.cacheRetention,
sessionId: options?.sessionId,
headers: options?.headers,
onPayload: createAnthropicVertexOnPayload({
model: transportModel,
cacheRetention: options?.cacheRetention,
onPayload: options?.onPayload,
}),
maxRetryDelayMs: options?.maxRetryDelayMs,
metadata: options?.metadata,
};
if (options?.reasoning) {
if (supportsAdaptiveThinking(model.id)) {
opts.thinkingEnabled = true;
opts.effort = mapAnthropicAdaptiveEffort(
options.reasoning,
model.id,
) as AnthropicVertexEffort;
} else {
opts.thinkingEnabled = true;
const budgets = options.thinkingBudgets;
opts.thinkingBudgetTokens =
(budgets && options.reasoning in budgets
? budgets[options.reasoning as keyof typeof budgets]
: undefined) ?? 10000;
}
} else {
opts.thinkingEnabled = false;
}
return streamAnthropic(transportModel, context, opts);
};
}
function resolveAnthropicVertexSdkBaseUrl(baseUrl?: string): string | undefined {
const trimmed = baseUrl?.trim();
if (!trimmed) {
return undefined;
}
try {
const url = new URL(trimmed);
const normalizedPath = url.pathname.replace(/\/+$/, "");
if (!normalizedPath || normalizedPath === "") {
url.pathname = "/v1";
return url.toString().replace(/\/$/, "");
}
if (!normalizedPath.endsWith("/v1")) {
url.pathname = `${normalizedPath}/v1`;
return url.toString().replace(/\/$/, "");
}
return trimmed;
} catch {
return trimmed;
}
}
export function createAnthropicVertexStreamFnForModel(
model: { baseUrl?: string },
env: NodeJS.ProcessEnv = process.env,
): StreamFn {
return createAnthropicVertexStreamFn(
resolveAnthropicVertexProjectId(env),
resolveAnthropicVertexClientRegion({
baseUrl: model.baseUrl,
env,
}),
resolveAnthropicVertexSdkBaseUrl(model.baseUrl),
);
}