refactor(google): move Gemini transport into plugin

This commit is contained in:
Peter Steinberger
2026-04-18 21:28:19 +01:00
parent 3a20606c04
commit 85826c83e4
15 changed files with 227 additions and 60 deletions

View File

@@ -287,6 +287,7 @@ Current bundled provider examples:
| `plugin-sdk/provider-tools` | Provider tool/schema compat helpers | `ProviderToolCompatFamily`, `buildProviderToolCompatFamilyHooks`, Gemini schema cleanup + diagnostics, and xAI compat helpers such as `resolveXaiModelCompatPatch` / `applyXaiModelCompat` |
| `plugin-sdk/provider-usage` | Provider usage helpers | `fetchClaudeUsage`, `fetchGeminiUsage`, `fetchGithubCopilotUsage`, and other provider usage helpers |
| `plugin-sdk/provider-stream` | Provider stream wrapper helpers | `ProviderStreamFamily`, `buildProviderStreamFamilyHooks`, `composeProviderStreamWrappers`, stream wrapper types, and shared Anthropic/Bedrock/Google/Kilocode/Moonshot/OpenAI/OpenRouter/Z.A.I/MiniMax/Copilot wrapper helpers |
| `plugin-sdk/provider-transport-runtime` | Provider transport helpers | Native provider transport helpers such as guarded fetch, transport message transforms, and writable transport event streams |
| `plugin-sdk/keyed-async-queue` | Ordered async queue | `KeyedAsyncQueue` |
| `plugin-sdk/media-runtime` | Shared media helpers | Media fetch/transform/store helpers plus media payload builders |
| `plugin-sdk/media-generation-runtime` | Shared media-generation helpers | Shared failover helpers, candidate selection, and missing-model messaging for image/video/music generation |

View File

@@ -146,6 +146,7 @@ explicitly promotes one as public.
| `plugin-sdk/provider-tools` | `ProviderToolCompatFamily`, `buildProviderToolCompatFamilyHooks`, Gemini schema cleanup + diagnostics, and xAI compat helpers such as `resolveXaiModelCompatPatch` / `applyXaiModelCompat` |
| `plugin-sdk/provider-usage` | `fetchClaudeUsage` and similar |
| `plugin-sdk/provider-stream` | `ProviderStreamFamily`, `buildProviderStreamFamilyHooks`, `composeProviderStreamWrappers`, stream wrapper types, and shared Anthropic/Bedrock/Google/Kilocode/Moonshot/OpenAI/OpenRouter/Z.A.I/MiniMax/Copilot wrapper helpers |
| `plugin-sdk/provider-transport-runtime` | Native provider transport helpers such as guarded fetch, transport message transforms, and writable transport event streams |
| `plugin-sdk/provider-onboard` | Onboarding config patch helpers |
| `plugin-sdk/global-singleton` | Process-local singleton/map/cache helpers |
</Accordion>

View File

@@ -2,7 +2,8 @@ import {
resolveProviderHttpRequestConfig,
type ProviderRequestTransportOverrides,
} from "openclaw/plugin-sdk/provider-http";
import { parseGoogleOauthApiKey } from "./oauth-token-shared.js";
import { parseGeminiAuth } from "./gemini-auth.js";
export { parseGeminiAuth };
export { applyGoogleGeminiModelDefault, GOOGLE_GEMINI_DEFAULT_MODEL } from "./onboard.js";
import {
DEFAULT_GOOGLE_API_BASE_URL,
@@ -23,6 +24,10 @@ export {
type GoogleThinkingInputLevel,
type GoogleThinkingLevel,
} from "./thinking-api.js";
export {
buildGoogleGenerativeAiParams,
createGoogleGenerativeAiTransportStreamFn,
} from "./transport-stream.js";
export {
DEFAULT_GOOGLE_API_BASE_URL,
isGoogleGenerativeAiApi,
@@ -37,25 +42,6 @@ export {
export { buildGoogleGeminiCliProvider } from "./gemini-cli-provider.js";
export { buildGoogleProvider } from "./provider-registration.js";
export function parseGeminiAuth(apiKey: string): { headers: Record<string, string> } {
const parsed = apiKey.startsWith("{") ? parseGoogleOauthApiKey(apiKey) : null;
if (parsed?.token) {
return {
headers: {
Authorization: `Bearer ${parsed.token}`,
"Content-Type": "application/json",
},
};
}
return {
headers: {
"x-goog-api-key": apiKey,
"Content-Type": "application/json",
},
};
}
function resolveTrustedGoogleGenerativeAiBaseUrl(baseUrl?: string): string {
const normalized =
normalizeGoogleGenerativeAiBaseUrl(baseUrl ?? DEFAULT_GOOGLE_API_BASE_URL) ??

View File

@@ -0,0 +1,20 @@
import { parseGoogleOauthApiKey } from "./oauth-token-shared.js";
export function parseGeminiAuth(apiKey: string): { headers: Record<string, string> } {
const parsed = apiKey.startsWith("{") ? parseGoogleOauthApiKey(apiKey) : null;
if (parsed?.token) {
return {
headers: {
Authorization: `Bearer ${parsed.token}`,
"Content-Type": "application/json",
},
};
}
return {
headers: {
"x-goog-api-key": apiKey,
"Content-Type": "application/json",
},
};
}

View File

@@ -9,6 +9,7 @@ import {
normalizeGoogleProviderConfig,
resolveGoogleGenerativeAiTransport,
} from "./provider-policy.js";
import { createGoogleGenerativeAiTransportStreamFn } from "./transport-stream.js";
export function buildGoogleProvider(): ProviderPlugin {
return {
@@ -48,6 +49,10 @@ export function buildGoogleProvider(): ProviderPlugin {
providerId: ctx.provider,
ctx,
}),
createStreamFn: ({ model }) =>
model.api === "google-generative-ai"
? createGoogleGenerativeAiTransportStreamFn()
: undefined,
...GOOGLE_GEMINI_PROVIDER_HOOKS,
isModernModelRef: ({ modelId }) => isModernGoogleModel(modelId),
};

View File

@@ -9,6 +9,8 @@ export {
normalizeGoogleApiBaseUrl,
normalizeGoogleModelId,
parseGeminiAuth,
buildGoogleGenerativeAiParams,
createGoogleGenerativeAiTransportStreamFn,
resolveGoogleGemini3ThinkingLevel,
resolveGoogleGenerativeAiHttpRequestConfig,
sanitizeGoogleThinkingPayload,

View File

@@ -1,18 +1,32 @@
import type { Model } from "@mariozechner/pi-ai";
import { beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
import { attachModelProviderRequestTransport } from "./provider-request-config.js";
const { buildGuardedModelFetchMock, guardedFetchMock } = vi.hoisted(() => ({
buildGuardedModelFetchMock: vi.fn(),
guardedFetchMock: vi.fn(),
}));
vi.mock("./provider-transport-fetch.js", () => ({
vi.mock("openclaw/plugin-sdk/provider-transport-runtime", async (importOriginal) => ({
...(await importOriginal()),
buildGuardedModelFetch: buildGuardedModelFetchMock,
}));
let buildGoogleGenerativeAiParams: typeof import("./google-transport-stream.js").buildGoogleGenerativeAiParams;
let createGoogleGenerativeAiTransportStreamFn: typeof import("./google-transport-stream.js").createGoogleGenerativeAiTransportStreamFn;
let buildGoogleGenerativeAiParams: typeof import("./transport-stream.js").buildGoogleGenerativeAiParams;
let createGoogleGenerativeAiTransportStreamFn: typeof import("./transport-stream.js").createGoogleGenerativeAiTransportStreamFn;
const MODEL_PROVIDER_REQUEST_TRANSPORT_SYMBOL = Symbol.for(
"openclaw.modelProviderRequestTransport",
);
function attachModelProviderRequestTransport<TModel extends object>(
model: TModel,
request: unknown,
): TModel {
return {
...model,
[MODEL_PROVIDER_REQUEST_TRANSPORT_SYMBOL]: request,
};
}
function buildGeminiModel(
overrides: Partial<Model<"google-generative-ai">> = {},
@@ -50,7 +64,7 @@ function buildSseResponse(events: unknown[]): Response {
describe("google transport stream", () => {
beforeAll(async () => {
({ buildGoogleGenerativeAiParams, createGoogleGenerativeAiTransportStreamFn } =
await import("./google-transport-stream.js"));
await import("./transport-stream.js"));
});
beforeEach(() => {
@@ -70,7 +84,10 @@ describe("google transport stream", () => {
parts: [
{ thought: true, text: "draft", thoughtSignature: "sig_1" },
{ text: "answer" },
{ functionCall: { name: "lookup", args: { q: "hello" } } },
{
thoughtSignature: "call_sig_1",
functionCall: { name: "lookup", args: { q: "hello" } },
},
],
},
finishReason: "STOP",
@@ -182,7 +199,12 @@ describe("google transport stream", () => {
content: [
{ type: "thinking", thinking: "draft", thinkingSignature: "sig_1" },
{ type: "text", text: "answer" },
{ type: "toolCall", name: "lookup", arguments: { q: "hello" } },
{
type: "toolCall",
name: "lookup",
arguments: { q: "hello" },
thoughtSignature: "call_sig_1",
},
],
});
});
@@ -263,6 +285,45 @@ describe("google transport stream", () => {
});
});
it("replays Gemini tool call thought signatures for same-model history", () => {
const model = buildGeminiModel({
id: "gemini-3-flash-preview",
name: "Gemini 3 Flash Preview",
});
const params = buildGoogleGenerativeAiParams(model, {
messages: [
{
role: "assistant",
provider: "google",
api: "google-generative-ai",
model: "gemini-3-flash-preview",
stopReason: "toolUse",
timestamp: 0,
content: [
{
type: "toolCall",
id: "call_1",
name: "lookup",
arguments: { q: "hello" },
thoughtSignature: "call_sig_1",
},
],
},
],
} as never);
expect(params.contents[0]).toMatchObject({
role: "model",
parts: [
{
thoughtSignature: "call_sig_1",
functionCall: { name: "lookup", args: { q: "hello" } },
},
],
});
});
it("builds direct Gemini payloads without negative fallback thinking budgets", () => {
const model = {
id: "custom-gemini-model",

View File

@@ -7,21 +7,8 @@ import {
type SimpleStreamOptions,
type ThinkingLevel,
} from "@mariozechner/pi-ai";
import { parseGeminiAuth } from "../infra/gemini-auth.js";
import { normalizeGoogleApiBaseUrl } from "../infra/google-api-base-url.js";
import { normalizeLowercaseStringOrEmpty } from "../shared/string-coerce.js";
import {
isGoogleGemini3FlashModel,
isGoogleGemini3ProModel,
resolveGoogleGemini3ThinkingLevel,
stripInvalidGoogleThinkingBudget,
type GoogleThinkingInputLevel,
type GoogleThinkingLevel,
} from "./google-thinking-compat.js";
import { buildGuardedModelFetch } from "./provider-transport-fetch.js";
import { stripSystemPromptCacheBoundary } from "./system-prompt-cache-boundary.js";
import { transformTransportMessages } from "./transport-message-transform.js";
import {
buildGuardedModelFetch,
coerceTransportToolCallArguments,
createEmptyTransportUsage,
createWritableTransportEventStream,
@@ -29,8 +16,21 @@ import {
finalizeTransportStream,
mergeTransportHeaders,
sanitizeTransportPayloadText,
stripSystemPromptCacheBoundary,
transformTransportMessages,
type WritableTransportStream,
} from "./transport-stream-shared.js";
} from "openclaw/plugin-sdk/provider-transport-runtime";
import { normalizeLowercaseStringOrEmpty } from "openclaw/plugin-sdk/text-runtime";
import { parseGeminiAuth } from "./gemini-auth.js";
import { normalizeGoogleApiBaseUrl } from "./provider-policy.js";
import {
isGoogleGemini3FlashModel,
isGoogleGemini3ProModel,
resolveGoogleGemini3ThinkingLevel,
stripInvalidGoogleThinkingBudget,
type GoogleThinkingInputLevel,
type GoogleThinkingLevel,
} from "./thinking-api.js";
type GoogleTransportModel = Model<"google-generative-ai"> & {
headers?: Record<string, string>;
@@ -69,7 +69,13 @@ type GoogleGenerateContentRequest = {
type GoogleTransportContentBlock =
| { type: "text"; text: string; textSignature?: string }
| { type: "thinking"; thinking: string; thinkingSignature?: string }
| { type: "toolCall"; id: string; name: string; arguments: Record<string, unknown> };
| {
type: "toolCall";
id: string;
name: string;
arguments: Record<string, unknown>;
thoughtSignature?: string;
};
type MutableAssistantOutput = {
role: "assistant";
@@ -711,6 +717,7 @@ export function createGoogleGenerativeAiTransportStreamFn(): StreamFn {
id: toolCallId,
name: part.functionCall.name || "",
arguments: part.functionCall.args ?? {},
thoughtSignature: part.thoughtSignature,
};
output.content.push(toolCall);
const blockIndex = output.content.length - 1;

View File

@@ -1057,6 +1057,10 @@
"types": "./dist/plugin-sdk/provider-stream-shared.d.ts",
"default": "./dist/plugin-sdk/provider-stream-shared.js"
},
"./plugin-sdk/provider-transport-runtime": {
"types": "./dist/plugin-sdk/provider-transport-runtime.d.ts",
"default": "./dist/plugin-sdk/provider-transport-runtime.js"
},
"./plugin-sdk/provider-stream": {
"types": "./dist/plugin-sdk/provider-stream.d.ts",
"default": "./dist/plugin-sdk/provider-stream.js"

View File

@@ -250,6 +250,7 @@
"provider-onboard",
"provider-stream-family",
"provider-stream-shared",
"provider-transport-runtime",
"provider-stream",
"provider-tools",
"provider-usage",

View File

@@ -26,7 +26,13 @@ export function registerProviderStreamForModel<TApi extends Api>(params: {
modelId: params.model.id,
model: params.model,
},
}) ?? createTransportAwareStreamFnForModel(params.model);
}) ??
createTransportAwareStreamFnForModel(params.model, {
cfg: params.cfg,
agentDir: params.agentDir,
workspaceDir: params.workspaceDir,
env: params.env,
});
if (!streamFn) {
return undefined;
}

View File

@@ -1,7 +1,8 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import type { Api, Model } from "@mariozechner/pi-ai";
import type { OpenClawConfig } from "../config/types.openclaw.js";
import { resolveProviderStreamFn } from "../plugins/provider-runtime.js";
import { createAnthropicMessagesTransportStreamFn } from "./anthropic-transport-stream.js";
import { createGoogleGenerativeAiTransportStreamFn } from "./google-transport-stream.js";
import {
createAzureOpenAIResponsesTransportStreamFn,
createOpenAICompletionsTransportStreamFn,
@@ -27,8 +28,55 @@ const SIMPLE_TRANSPORT_API_ALIAS: Record<string, Api> = {
"google-generative-ai": "openclaw-google-generative-ai-transport",
};
function createSupportedTransportStreamFn(api: Api): StreamFn | undefined {
switch (api) {
type ProviderTransportStreamContext = {
cfg?: OpenClawConfig;
agentDir?: string;
workspaceDir?: string;
env?: NodeJS.ProcessEnv;
};
function createProviderOwnedGoogleTransportStreamFn(
model: Model<Api>,
ctx?: ProviderTransportStreamContext,
): StreamFn | undefined {
return (
resolveProviderStreamFn({
provider: model.provider,
config: ctx?.cfg,
workspaceDir: ctx?.workspaceDir,
env: ctx?.env,
context: {
config: ctx?.cfg,
agentDir: ctx?.agentDir,
workspaceDir: ctx?.workspaceDir,
provider: model.provider,
modelId: model.id,
model,
},
}) ??
resolveProviderStreamFn({
provider: "google",
config: ctx?.cfg,
workspaceDir: ctx?.workspaceDir,
env: ctx?.env,
context: {
config: ctx?.cfg,
agentDir: ctx?.agentDir,
workspaceDir: ctx?.workspaceDir,
provider: model.provider,
modelId: model.id,
model,
},
}) ??
undefined
);
}
function createSupportedTransportStreamFn(
model: Model<Api>,
ctx?: ProviderTransportStreamContext,
): StreamFn | undefined {
switch (model.api) {
case "openai-responses":
case "openai-codex-responses":
return createOpenAIResponsesTransportStreamFn();
@@ -39,7 +87,7 @@ function createSupportedTransportStreamFn(api: Api): StreamFn | undefined {
case "anthropic-messages":
return createAnthropicMessagesTransportStreamFn();
case "google-generative-ai":
return createGoogleGenerativeAiTransportStreamFn();
return createProviderOwnedGoogleTransportStreamFn(model, ctx);
default:
return undefined;
}
@@ -58,7 +106,10 @@ export function resolveTransportAwareSimpleApi(api: Api): Api | undefined {
return SIMPLE_TRANSPORT_API_ALIAS[api];
}
export function createTransportAwareStreamFnForModel(model: Model<Api>): StreamFn | undefined {
export function createTransportAwareStreamFnForModel(
model: Model<Api>,
ctx?: ProviderTransportStreamContext,
): StreamFn | undefined {
if (!hasTransportOverrides(model)) {
return undefined;
}
@@ -67,18 +118,24 @@ export function createTransportAwareStreamFnForModel(model: Model<Api>): StreamF
`Model-provider request.proxy/request.tls is not yet supported for api "${model.api}"`,
);
}
return createSupportedTransportStreamFn(model.api);
return createSupportedTransportStreamFn(model, ctx);
}
export function createBoundaryAwareStreamFnForModel(model: Model<Api>): StreamFn | undefined {
export function createBoundaryAwareStreamFnForModel(
model: Model<Api>,
ctx?: ProviderTransportStreamContext,
): StreamFn | undefined {
if (!isTransportAwareApiSupported(model.api)) {
return undefined;
}
return createSupportedTransportStreamFn(model.api);
return createSupportedTransportStreamFn(model, ctx);
}
export function prepareTransportAwareSimpleModel<TApi extends Api>(model: Model<TApi>): Model<Api> {
const streamFn = createTransportAwareStreamFnForModel(model as Model<Api>);
export function prepareTransportAwareSimpleModel<TApi extends Api>(
model: Model<TApi>,
ctx?: ProviderTransportStreamContext,
): Model<Api> {
const streamFn = createTransportAwareStreamFnForModel(model as Model<Api>, ctx);
const alias = resolveTransportAwareSimpleApi(model.api);
if (!streamFn || !alias) {
return model;
@@ -89,6 +146,9 @@ export function prepareTransportAwareSimpleModel<TApi extends Api>(model: Model<
};
}
export function buildTransportAwareSimpleStreamFn(model: Model<Api>): StreamFn | undefined {
return createTransportAwareStreamFnForModel(model);
export function buildTransportAwareSimpleStreamFn(
model: Model<Api>,
ctx?: ProviderTransportStreamContext,
): StreamFn | undefined {
return createTransportAwareStreamFnForModel(model, ctx);
}

View File

@@ -147,8 +147,8 @@ describe("prepareModelForSimpleCompletion", () => {
const result = prepareModelForSimpleCompletion({ model });
expect(prepareTransportAwareSimpleModel).toHaveBeenCalledWith(model);
expect(buildTransportAwareSimpleStreamFn).toHaveBeenCalledWith(model);
expect(prepareTransportAwareSimpleModel).toHaveBeenCalledWith(model, { cfg: undefined });
expect(buildTransportAwareSimpleStreamFn).toHaveBeenCalledWith(model, { cfg: undefined });
expect(ensureCustomApiRegistered).toHaveBeenCalledWith(
"openclaw-openai-responses-transport",
"transport-stream",

View File

@@ -23,9 +23,9 @@ export function prepareModelForSimpleCompletion<TApi extends Api>(params: {
return model;
}
const transportAwareModel = prepareTransportAwareSimpleModel(model);
const transportAwareModel = prepareTransportAwareSimpleModel(model, { cfg });
if (transportAwareModel !== model) {
const streamFn = buildTransportAwareSimpleStreamFn(model);
const streamFn = buildTransportAwareSimpleStreamFn(model, { cfg });
if (streamFn) {
ensureCustomApiRegistered(transportAwareModel.api, streamFn);
return transportAwareModel;

View File

@@ -0,0 +1,13 @@
export { buildGuardedModelFetch } from "../agents/provider-transport-fetch.js";
export { stripSystemPromptCacheBoundary } from "../agents/system-prompt-cache-boundary.js";
export { transformTransportMessages } from "../agents/transport-message-transform.js";
export {
coerceTransportToolCallArguments,
createEmptyTransportUsage,
createWritableTransportEventStream,
failTransportStream,
finalizeTransportStream,
mergeTransportHeaders,
sanitizeTransportPayloadText,
type WritableTransportStream,
} from "../agents/transport-stream-shared.js";