feat: add Google Meet paired-node Chrome transport

This commit is contained in:
Peter Steinberger
2026-04-24 05:31:20 +01:00
parent 098557623f
commit 569290c36d
19 changed files with 1226 additions and 34 deletions

View File

@@ -17,6 +17,7 @@ Docs: https://docs.openclaw.ai
- Codex harness/context-engine: run context-engine bootstrap, assembly, post-turn maintenance, and engine-owned compaction in Codex app-server sessions while keeping native Codex thread state and compaction auditable. (#70809) Thanks @jalehman.
- Plugins/Google Meet: add a bundled participant plugin with personal Google auth, explicit meeting URL joins, Chrome and Twilio transports, and realtime voice support. (#70765) Thanks @steipete.
- Plugins/Google Meet: default Chrome realtime sessions to OpenAI plus SoX `rec`/`play` audio bridge commands, so the usual setup only needs the plugin enabled and `OPENAI_API_KEY`.
- Plugins/Google Meet: add a `chrome-node` transport so a paired macOS node, such as a Parallels VM, can own Chrome, BlackHole, and SoX while the Gateway machine keeps the agent and model key.
- Providers/OpenAI: add image generation and reference-image editing through Codex OAuth, so `openai/gpt-image-2` works without an `OPENAI_API_KEY`. Fixes #70703.
- Providers/OpenRouter: add image generation and reference-image editing through `image_generate`, so OpenRouter image models work with `OPENROUTER_API_KEY`. Fixes #55066 via #67668. Thanks @notamicrodose.
- Image generation: let agents request provider-supported quality and output format hints, and pass OpenAI-specific background, moderation, compression, and user hints through the `image_generate` tool. (#70503) Thanks @ottodeng.

View File

@@ -1,2 +1,2 @@
5949119eccfa6ccc1bca232b9cf6bb1df0bd4b5eb53f8314db59c95bd8fcb2b0 plugin-sdk-api-baseline.json
f2827b8c1078eef3ba84b12cafab560c42516bfc8af20c8a5bdd4b6fcee5158a plugin-sdk-api-baseline.jsonl
b9c997ae9dba2c534942c1c79e8285f773ab7481c282e8a981e362e8132f944f plugin-sdk-api-baseline.json
c2f8370ae879d4404a9ac7f7aa7f43859e990f04f4872cbd8bc48da05d4bc671 plugin-sdk-api-baseline.jsonl

View File

@@ -2,7 +2,7 @@
summary: "Google Meet plugin: join explicit Meet URLs through Chrome or Twilio with realtime voice defaults"
read_when:
- You want an OpenClaw agent to join a Google Meet call
- You are configuring Chrome or Twilio as a Google Meet transport
- You are configuring Chrome, Chrome node, or Twilio as a Google Meet transport
title: "Google Meet plugin"
---
@@ -19,6 +19,7 @@ The plugin is explicit by design:
- Auth starts as personal Google OAuth or an already signed-in Chrome profile.
- There is no automatic consent announcement.
- The default Chrome audio backend is `BlackHole 2ch`.
- Chrome can run locally or on a paired node host.
- Twilio accepts a dial-in number plus optional PIN or DTMF sequence.
- The CLI command is `googlemeet`; `meet` is reserved for broader agent
teleconference workflows.
@@ -88,6 +89,111 @@ the microphone/speaker path used by OpenClaw. For clean duplex audio, use
separate virtual devices or a Loopback-style graph; a single BlackHole device is
enough for a first smoke test but can echo.
### Local Gateway + Parallels Chrome
You do **not** need a full OpenClaw Gateway or model API key inside a macOS VM
just to make the VM own Chrome. Run the Gateway and agent locally, then run a
node host in the VM. Enable the bundled plugin on the VM once so the node
advertises the Chrome command:
What runs where:
- Gateway host: OpenClaw Gateway, agent workspace, model/API keys, realtime
provider, and the Google Meet plugin config.
- Parallels macOS VM: OpenClaw CLI/node host, Google Chrome, SoX, BlackHole 2ch,
and a Chrome profile signed in to Google.
- Not needed in the VM: Gateway service, agent config, OpenAI/GPT key, or model
provider setup.
Install the VM dependencies:
```bash
brew install blackhole-2ch sox
```
Reboot the VM after installing BlackHole so macOS exposes `BlackHole 2ch`:
```bash
sudo reboot
```
After reboot, verify the VM can see the audio device and SoX commands:
```bash
system_profiler SPAudioDataType | grep -i BlackHole
command -v rec play
```
Install or update OpenClaw in the VM, then enable the bundled plugin there:
```bash
openclaw plugins enable google-meet
```
Start the node host in the VM:
```bash
openclaw node run --host <gateway-host> --port 18789 --display-name parallels-macos
```
Approve the node from the Gateway host:
```bash
openclaw devices list
openclaw devices approve <requestId>
```
Confirm the Gateway sees the node and that it advertises `googlemeet.chrome`:
```bash
openclaw nodes status
```
Route Meet through that node on the Gateway host:
```json5
{
plugins: {
entries: {
"google-meet": {
enabled: true,
config: {
defaultTransport: "chrome-node",
chromeNode: {
node: "parallels-macos",
},
},
},
},
},
}
```
Now join normally from the Gateway host:
```bash
openclaw googlemeet join https://meet.google.com/abc-defg-hij
```
or ask the agent to use the `google_meet` tool with `transport: "chrome-node"`.
If `chromeNode.node` is omitted, OpenClaw auto-selects only when exactly one
connected node advertises `googlemeet.chrome`. If several capable nodes are
connected, set `chromeNode.node` to the node id, display name, or remote IP.
Common failure checks:
- `No connected Google Meet-capable node`: start `openclaw node run` in the VM,
approve pairing, and make sure `openclaw plugins enable google-meet` was run
in the VM.
- `BlackHole 2ch audio device not found on the node`: install `blackhole-2ch`
in the VM and reboot the VM.
- Chrome opens but cannot join: sign in to Chrome inside the VM and confirm that
profile can join the Meet URL manually.
- No audio: in Meet, route microphone/speaker through the virtual audio device
path used by OpenClaw; use separate virtual devices or Loopback-style routing
for clean duplex audio.
## Install notes
The Chrome realtime default uses two external tools:
@@ -110,10 +216,13 @@ upstream licensing terms or get a separate license from Existential Audio.
Chrome transport opens the Meet URL in Google Chrome and joins as the signed-in
Chrome profile. On macOS, the plugin checks for `BlackHole 2ch` before launch.
If configured, it also runs an audio bridge health command and startup command
before opening Chrome.
before opening Chrome. Use `chrome` when Chrome/audio live on the Gateway host;
use `chrome-node` when Chrome/audio live on a paired node such as a Parallels
macOS VM.
```bash
openclaw googlemeet join https://meet.google.com/abc-defg-hij --transport chrome
openclaw googlemeet join https://meet.google.com/abc-defg-hij --transport chrome-node
```
Route Chrome microphone and speaker audio through the local OpenClaw audio
@@ -210,6 +319,7 @@ Defaults:
- `defaultTransport: "chrome"`
- `defaultMode: "realtime"`
- `chromeNode.node`: optional node id/name/IP for `chrome-node`
- `chrome.audioBackend: "blackhole-2ch"`
- `chrome.audioInputCommand`: SoX `rec` command writing 8 kHz G.711 mu-law
audio to stdout
@@ -230,6 +340,9 @@ Optional overrides:
chrome: {
browserProfile: "Default",
},
chromeNode: {
node: "parallels-macos",
},
realtime: {
toolPolicy: "owner",
},
@@ -259,11 +372,16 @@ Agents can use the `google_meet` tool:
{
"action": "join",
"url": "https://meet.google.com/abc-defg-hij",
"transport": "chrome",
"transport": "chrome-node",
"mode": "realtime"
}
```
Use `transport: "chrome"` when Chrome runs on the Gateway host. Use
`transport: "chrome-node"` when Chrome runs on a paired node such as a Parallels
VM. In both cases the realtime model and `openclaw_agent_consult` run on the
Gateway host, so model credentials stay there.
Use `action: "status"` to list active sessions or inspect a session ID. Use
`action: "leave"` to mark a session ended.

View File

@@ -3,7 +3,7 @@ summary: "api.runtime -- the injected runtime helpers available to plugins"
title: "Plugin runtime helpers"
sidebarTitle: "Runtime Helpers"
read_when:
- You need to call core helpers from a plugin (TTS, STT, image gen, web search, subagent)
- You need to call core helpers from a plugin (TTS, STT, image gen, web search, subagent, nodes)
- You want to understand what api.runtime exposes
- You are accessing config, agent, or media helpers from plugin code
---
@@ -119,6 +119,27 @@ await api.runtime.subagent.deleteSession({
Untrusted plugins can still run subagents, but override requests are rejected.
</Warning>
### `api.runtime.nodes`
List connected nodes and invoke a node-host command from Gateway-loaded plugin
code. Use this when a plugin owns local work on a paired device, for example a
browser or audio bridge on another Mac.
```typescript
const { nodes } = await api.runtime.nodes.list({ connected: true });
const result = await api.runtime.nodes.invoke({
nodeId: "mac-studio",
command: "my-plugin.command",
params: { action: "start" },
timeoutMs: 30000,
});
```
This runtime is only available inside the Gateway. Node commands still go
through normal Gateway node pairing, command allowlists, and node-local command
handling.
### `api.runtime.taskFlow`
Bind a Task Flow runtime to an existing OpenClaw session key or trusted tool

View File

@@ -16,6 +16,7 @@ import {
refreshGoogleMeetAccessToken,
resolveGoogleMeetAccessToken,
} from "./src/oauth.js";
import { startNodeRealtimeAudioBridge } from "./src/realtime-node.js";
import { startCommandRealtimeAudioBridge } from "./src/realtime.js";
import { normalizeMeetUrl } from "./src/runtime.js";
import { buildMeetDtmfSequence, normalizeDialInNumber } from "./src/transports/twilio.js";
@@ -65,10 +66,51 @@ type TestBridgeProcess = {
on: EventEmitter["on"];
};
function setup(config: Record<string, unknown> = {}) {
type NodeListResult = {
nodes: Array<{
nodeId: string;
displayName?: string;
connected?: boolean;
commands?: string[];
remoteIp?: string;
}>;
};
function setup(
config: Record<string, unknown> = {},
options: {
nodesListResult?: NodeListResult;
nodesInvokeResult?: unknown;
nodesInvokeHandler?: (params: {
nodeId: string;
command: string;
params?: unknown;
timeoutMs?: number;
}) => Promise<unknown>;
} = {},
) {
const methods = new Map<string, unknown>();
const tools: unknown[] = [];
const cliRegistrations: unknown[] = [];
const nodeHostCommands: unknown[] = [];
const nodesList = vi.fn(
async () =>
options.nodesListResult ?? {
nodes: [
{
nodeId: "node-1",
displayName: "parallels-macos",
connected: true,
commands: ["googlemeet.chrome"],
},
],
},
);
const nodesInvoke = vi.fn(async (params) =>
options.nodesInvokeHandler
? options.nodesInvokeHandler(params)
: (options.nodesInvokeResult ?? { launched: true }),
);
const runCommandWithTimeout = vi.fn(async (argv: string[]) => {
if (argv[0] === "system_profiler") {
return { code: 0, stdout: "BlackHole 2ch", stderr: "" };
@@ -87,14 +129,27 @@ function setup(config: Record<string, unknown> = {}) {
runCommandWithTimeout,
formatNativeDependencyHint: vi.fn(() => "Install with brew install blackhole-2ch."),
},
nodes: {
list: nodesList,
invoke: nodesInvoke,
},
} as unknown as OpenClawPluginApi["runtime"],
logger: noopLogger,
registerGatewayMethod: (method: string, handler: unknown) => methods.set(method, handler),
registerTool: (tool: unknown) => tools.push(tool),
registerCli: (_registrar: unknown, opts: unknown) => cliRegistrations.push(opts),
registerNodeHostCommand: (command: unknown) => nodeHostCommands.push(command),
});
plugin.register(api);
return { cliRegistrations, methods, tools, runCommandWithTimeout };
return {
cliRegistrations,
methods,
tools,
runCommandWithTimeout,
nodesList,
nodesInvoke,
nodeHostCommands,
};
}
describe("google-meet plugin", () => {
@@ -207,6 +262,18 @@ describe("google-meet plugin", () => {
});
});
it("registers the node-host command used by chrome-node transport", () => {
const { nodeHostCommands } = setup();
expect(nodeHostCommands).toContainEqual(
expect.objectContaining({
command: "googlemeet.chrome",
cap: "google-meet",
handle: expect.any(Function),
}),
);
});
it("uses a provider-safe flat tool parameter schema", () => {
const { tools } = setup();
const tool = tools[0] as { parameters: unknown };
@@ -219,7 +286,7 @@ describe("google-meet plugin", () => {
type: "string",
enum: ["join", "status", "setup_status", "resolve_space", "preflight", "leave"],
},
transport: { type: "string", enum: ["chrome", "twilio"] },
transport: { type: "string", enum: ["chrome", "chrome-node", "twilio"] },
mode: { type: "string", enum: ["realtime", "transcribe"] },
},
});
@@ -452,6 +519,110 @@ describe("google-meet plugin", () => {
}
});
it("joins Chrome on a paired node without local Chrome or BlackHole", async () => {
const { methods, nodesList, nodesInvoke } = setup({
defaultTransport: "chrome-node",
defaultMode: "transcribe",
chromeNode: { node: "parallels-macos" },
});
const handler = methods.get("googlemeet.join") as
| ((ctx: {
params: Record<string, unknown>;
respond: ReturnType<typeof vi.fn>;
}) => Promise<void>)
| undefined;
const respond = vi.fn();
await handler?.({
params: { url: "https://meet.google.com/abc-defg-hij" },
respond,
});
expect(respond.mock.calls[0]?.[0]).toBe(true);
expect(nodesList).toHaveBeenCalledWith({ connected: true });
expect(nodesInvoke).toHaveBeenCalledWith(
expect.objectContaining({
nodeId: "node-1",
command: "googlemeet.chrome",
params: expect.objectContaining({
action: "start",
url: "https://meet.google.com/abc-defg-hij",
mode: "transcribe",
}),
}),
);
expect(respond.mock.calls[0]?.[1]).toMatchObject({
session: {
transport: "chrome-node",
chrome: {
nodeId: "node-1",
launched: true,
},
},
});
});
it("explains when chrome-node has no capable paired node", async () => {
const { tools } = setup(
{
defaultTransport: "chrome-node",
defaultMode: "transcribe",
},
{
nodesListResult: { nodes: [] },
},
);
const tool = tools[0] as {
execute: (id: string, params: unknown) => Promise<{ details: { error?: string } }>;
};
const result = await tool.execute("id", {
action: "join",
url: "https://meet.google.com/abc-defg-hij",
});
expect(result.details.error).toContain("No connected Google Meet-capable node");
expect(result.details.error).toContain("openclaw node run");
});
it("requires chromeNode.node when multiple capable nodes are connected", async () => {
const { tools } = setup(
{
defaultTransport: "chrome-node",
defaultMode: "transcribe",
},
{
nodesListResult: {
nodes: [
{
nodeId: "node-1",
displayName: "parallels-macos",
connected: true,
commands: ["googlemeet.chrome"],
},
{
nodeId: "node-2",
displayName: "mac-studio-vm",
connected: true,
commands: ["googlemeet.chrome"],
},
],
},
},
);
const tool = tools[0] as {
execute: (id: string, params: unknown) => Promise<{ details: { error?: string } }>;
};
const result = await tool.execute("id", {
action: "join",
url: "https://meet.google.com/abc-defg-hij",
});
expect(result.details.error).toContain("Multiple Google Meet-capable nodes connected");
expect(result.details.error).toContain("chromeNode.node");
});
it("runs configured Chrome audio bridge commands before launch", async () => {
const originalPlatform = process.platform;
Object.defineProperty(process, "platform", { value: "darwin" });
@@ -626,4 +797,140 @@ describe("google-meet plugin", () => {
expect(inputProcess.kill).toHaveBeenCalledWith("SIGTERM");
expect(outputProcess.kill).toHaveBeenCalledWith("SIGTERM");
});
it("pipes paired-node command-pair audio through the realtime provider", async () => {
let callbacks:
| {
onAudio: (audio: Buffer) => void;
onToolCall?: (event: {
itemId: string;
callId: string;
name: string;
args: unknown;
}) => void;
tools?: unknown[];
}
| undefined;
const sendAudio = vi.fn();
const bridge = {
connect: vi.fn(async () => {}),
sendAudio,
setMediaTimestamp: vi.fn(),
submitToolResult: vi.fn(),
acknowledgeMark: vi.fn(),
close: vi.fn(),
isConnected: vi.fn(() => true),
};
const provider: RealtimeVoiceProviderPlugin = {
id: "openai",
label: "OpenAI",
autoSelectOrder: 1,
resolveConfig: ({ rawConfig }) => rawConfig,
isConfigured: () => true,
createBridge: (req) => {
callbacks = req;
return bridge;
},
};
let pullCount = 0;
const runtime = {
nodes: {
invoke: vi.fn(async ({ params }: { params?: { action?: string; base64?: string } }) => {
if (params?.action === "pullAudio") {
pullCount += 1;
if (pullCount === 1) {
return { bridgeId: "bridge-1", base64: Buffer.from([9, 8, 7]).toString("base64") };
}
await new Promise((resolve) => setTimeout(resolve, 1_000));
return { bridgeId: "bridge-1" };
}
return { ok: true };
}),
},
agent: {
resolveAgentDir: vi.fn(() => "/tmp/agent"),
resolveAgentWorkspaceDir: vi.fn(() => "/tmp/workspace"),
ensureAgentWorkspace: vi.fn(async () => {}),
session: {
resolveStorePath: vi.fn(() => "/tmp/sessions.json"),
loadSessionStore: vi.fn(() => ({})),
saveSessionStore: vi.fn(async () => {}),
resolveSessionFilePath: vi.fn(() => "/tmp/session.json"),
},
runEmbeddedPiAgent: vi.fn(async () => ({
payloads: [{ text: "Use the launch update." }],
meta: {},
})),
resolveAgentTimeoutMs: vi.fn(() => 1000),
},
};
const handle = await startNodeRealtimeAudioBridge({
config: resolveGoogleMeetConfig({
realtime: { provider: "openai", model: "gpt-realtime" },
}),
fullConfig: {} as never,
runtime: runtime as never,
meetingSessionId: "meet-1",
nodeId: "node-1",
bridgeId: "bridge-1",
logger: noopLogger,
providers: [provider],
});
callbacks?.onAudio(Buffer.from([1, 2, 3]));
callbacks?.onToolCall?.({
itemId: "item-1",
callId: "tool-call-1",
name: "openclaw_agent_consult",
args: { question: "What should I say?" },
});
await vi.waitFor(() => {
expect(sendAudio).toHaveBeenCalledWith(Buffer.from([9, 8, 7]));
});
await vi.waitFor(() => {
expect(runtime.nodes.invoke).toHaveBeenCalledWith(
expect.objectContaining({
nodeId: "node-1",
command: "googlemeet.chrome",
params: expect.objectContaining({
action: "pushAudio",
bridgeId: "bridge-1",
base64: Buffer.from([1, 2, 3]).toString("base64"),
}),
}),
);
});
await vi.waitFor(() => {
expect(bridge.submitToolResult).toHaveBeenCalledWith("tool-call-1", {
text: "Use the launch update.",
});
});
expect(callbacks).toMatchObject({
tools: [
expect.objectContaining({
name: "openclaw_agent_consult",
}),
],
});
expect(handle).toMatchObject({
type: "node-command-pair",
providerId: "openai",
nodeId: "node-1",
bridgeId: "bridge-1",
});
await handle.stop();
expect(bridge.close).toHaveBeenCalled();
expect(runtime.nodes.invoke).toHaveBeenCalledWith(
expect.objectContaining({
nodeId: "node-1",
command: "googlemeet.chrome",
params: { action: "stop", bridgeId: "bridge-1" },
timeoutMs: 5_000,
}),
);
});
});

View File

@@ -11,6 +11,7 @@ import {
type GoogleMeetTransport,
} from "./src/config.js";
import { buildGoogleMeetPreflightReport, fetchGoogleMeetSpace } from "./src/meet.js";
import { handleGoogleMeetNodeHostCommand } from "./src/node-host.js";
import { resolveGoogleMeetAccessToken } from "./src/oauth.js";
import { GoogleMeetRuntime } from "./src/runtime.js";
@@ -30,7 +31,7 @@ const googleMeetConfigSchema = {
},
defaultTransport: {
label: "Default Transport",
help: "Chrome uses a signed-in browser profile. Twilio uses Meet dial-in numbers.",
help: "Chrome uses a signed-in browser profile. Chrome-node runs Chrome on a paired node. Twilio uses Meet dial-in numbers.",
},
defaultMode: {
label: "Default Mode",
@@ -57,6 +58,11 @@ const googleMeetConfigSchema = {
label: "Audio Bridge Health Command",
advanced: true,
},
"chromeNode.node": {
label: "Chrome Node",
help: "Node id/name/IP that owns Chrome, BlackHole, and SoX for chrome-node transport.",
advanced: true,
},
"twilio.defaultDialInNumber": {
label: "Default Dial-In Number",
placeholder: "+15551234567",
@@ -110,7 +116,7 @@ const GoogleMeetToolSchema = Type.Object({
}),
url: Type.Optional(Type.String({ description: "Explicit https://meet.google.com/... URL" })),
transport: Type.Optional(
Type.String({ enum: ["chrome", "twilio"], description: "Join transport" }),
Type.String({ enum: ["chrome", "chrome-node", "twilio"], description: "Join transport" }),
),
mode: Type.Optional(Type.String({ enum: ["realtime", "transcribe"], description: "Join mode" })),
dialInNumber: Type.Optional(Type.String({ description: "Meet dial-in number for Twilio" })),
@@ -139,7 +145,7 @@ function json(payload: unknown) {
}
function normalizeTransport(value: unknown): GoogleMeetTransport | undefined {
return value === "chrome" || value === "twilio" ? value : undefined;
return value === "chrome" || value === "chrome-node" || value === "twilio" ? value : undefined;
}
function normalizeMode(value: unknown): GoogleMeetMode | undefined {
@@ -321,6 +327,12 @@ export default definePluginEntry({
},
});
api.registerNodeHostCommand({
command: "googlemeet.chrome",
cap: "google-meet",
handle: handleGoogleMeetNodeHostCommand,
});
api.registerCli(
({ program }) =>
registerGoogleMeetCli({

View File

@@ -20,7 +20,7 @@
},
"defaultTransport": {
"label": "Default Transport",
"help": "Chrome uses a signed-in browser profile. Twilio uses Meet dial-in numbers."
"help": "Chrome uses a signed-in browser profile. Chrome-node runs Chrome on a paired node. Twilio uses Meet dial-in numbers."
},
"defaultMode": {
"label": "Default Mode",
@@ -55,6 +55,11 @@
"label": "Audio Bridge Health Command",
"advanced": true
},
"chromeNode.node": {
"label": "Chrome Node",
"help": "Node id/name/IP that owns Chrome, BlackHole, and SoX for chrome-node transport.",
"advanced": true
},
"twilio.defaultDialInNumber": {
"label": "Default Dial-In Number",
"placeholder": "+15551234567"
@@ -157,7 +162,7 @@
},
"defaultTransport": {
"type": "string",
"enum": ["chrome", "twilio"],
"enum": ["chrome", "chrome-node", "twilio"],
"default": "chrome"
},
"defaultMode": {
@@ -241,6 +246,15 @@
}
}
},
"chromeNode": {
"type": "object",
"additionalProperties": false,
"properties": {
"node": {
"type": "string"
}
}
},
"twilio": {
"type": "object",
"additionalProperties": false,

View File

@@ -175,7 +175,7 @@ export function registerGoogleMeetCli(params: {
root
.command("join")
.argument("[url]", "Explicit https://meet.google.com/... URL")
.option("--transport <transport>", "Transport: chrome or twilio")
.option("--transport <transport>", "Transport: chrome, chrome-node, or twilio")
.option("--mode <mode>", "Mode: realtime or transcribe")
.option("--dial-in-number <phone>", "Meet dial-in number for Twilio transport")
.option("--pin <pin>", "Meet phone PIN; # is appended if omitted")

View File

@@ -4,7 +4,7 @@ import {
normalizeOptionalString,
} from "openclaw/plugin-sdk/text-runtime";
export type GoogleMeetTransport = "chrome" | "twilio";
export type GoogleMeetTransport = "chrome" | "chrome-node" | "twilio";
export type GoogleMeetMode = "realtime" | "transcribe";
export type GoogleMeetToolPolicy = "safe-read-only" | "owner" | "none";
@@ -28,6 +28,9 @@ export type GoogleMeetConfig = {
audioBridgeCommand?: string[];
audioBridgeHealthCommand?: string[];
};
chromeNode: {
node?: string;
};
twilio: {
defaultDialInNumber?: string;
defaultPin?: string;
@@ -112,6 +115,7 @@ export const DEFAULT_GOOGLE_MEET_CONFIG: GoogleMeetConfig = {
audioInputCommand: [...DEFAULT_GOOGLE_MEET_AUDIO_INPUT_COMMAND],
audioOutputCommand: [...DEFAULT_GOOGLE_MEET_AUDIO_OUTPUT_COMMAND],
},
chromeNode: {},
twilio: {},
voiceCall: {
enabled: true,
@@ -234,7 +238,9 @@ function resolveProvidersConfig(value: unknown): Record<string, Record<string, u
function resolveTransport(value: unknown, fallback: GoogleMeetTransport): GoogleMeetTransport {
const normalized = normalizeOptionalLowercaseString(value);
return normalized === "chrome" || normalized === "twilio" ? normalized : fallback;
return normalized === "chrome" || normalized === "chrome-node" || normalized === "twilio"
? normalized
: fallback;
}
function resolveMode(value: unknown, fallback: GoogleMeetMode): GoogleMeetMode {
@@ -261,6 +267,7 @@ export function resolveGoogleMeetConfigWithEnv(
const defaults = asRecord(raw.defaults);
const preview = asRecord(raw.preview);
const chrome = asRecord(raw.chrome);
const chromeNode = asRecord(raw.chromeNode);
const twilio = asRecord(raw.twilio);
const voiceCall = asRecord(raw.voiceCall);
const realtime = asRecord(raw.realtime);
@@ -303,6 +310,9 @@ export function resolveGoogleMeetConfigWithEnv(
audioBridgeCommand: resolveStringArray(chrome.audioBridgeCommand),
audioBridgeHealthCommand: resolveStringArray(chrome.audioBridgeHealthCommand),
},
chromeNode: {
node: normalizeOptionalString(chromeNode.node),
},
twilio: {
defaultDialInNumber: normalizeOptionalString(twilio.defaultDialInNumber),
defaultPin: normalizeOptionalString(twilio.defaultPin),

View File

@@ -0,0 +1,279 @@
import { spawn, spawnSync, type ChildProcess } from "node:child_process";
import { randomUUID } from "node:crypto";
import { setTimeout as sleep } from "node:timers/promises";
import { formatErrorMessage } from "openclaw/plugin-sdk/error-runtime";
import {
DEFAULT_GOOGLE_MEET_AUDIO_INPUT_COMMAND,
DEFAULT_GOOGLE_MEET_AUDIO_OUTPUT_COMMAND,
} from "./config.js";
import { outputMentionsBlackHole2ch } from "./transports/chrome.js";
type NodeBridgeSession = {
id: string;
input?: ChildProcess;
output?: ChildProcess;
chunks: Buffer[];
waiters: Array<() => void>;
closed: boolean;
};
const sessions = new Map<string, NodeBridgeSession>();
function asRecord(value: unknown): Record<string, unknown> {
return value && typeof value === "object" && !Array.isArray(value)
? (value as Record<string, unknown>)
: {};
}
function readString(value: unknown): string | undefined {
return typeof value === "string" && value.trim() ? value.trim() : undefined;
}
function readStringArray(value: unknown): string[] | undefined {
if (!Array.isArray(value)) {
return undefined;
}
const result = value.filter(
(entry): entry is string => typeof entry === "string" && entry.length > 0,
);
return result.length > 0 ? result : undefined;
}
function readNumber(value: unknown, fallback: number): number {
return typeof value === "number" && Number.isFinite(value) && value > 0 ? value : fallback;
}
function runCommandWithTimeout(argv: string[], timeoutMs: number) {
const [command, ...args] = argv;
if (!command) {
throw new Error("command must not be empty");
}
const result = spawnSync(command, args, {
encoding: "utf8",
timeout: timeoutMs,
});
return {
code: typeof result.status === "number" ? result.status : result.error ? 1 : 0,
stdout: result.stdout ?? "",
stderr: result.stderr ?? (result.error ? formatErrorMessage(result.error) : ""),
};
}
function assertBlackHoleAvailable(timeoutMs: number) {
if (process.platform !== "darwin") {
throw new Error("Chrome Meet transport with blackhole-2ch audio is currently macOS-only");
}
const result = runCommandWithTimeout(["system_profiler", "SPAudioDataType"], timeoutMs);
const output = `${result.stdout}\n${result.stderr}`;
if (result.code !== 0 || !outputMentionsBlackHole2ch(output)) {
throw new Error("BlackHole 2ch audio device not found on the node.");
}
}
function splitCommand(argv: string[]): { command: string; args: string[] } {
const [command, ...args] = argv;
if (!command) {
throw new Error("audio command must not be empty");
}
return { command, args };
}
function wake(session: NodeBridgeSession) {
const waiters = session.waiters.splice(0);
for (const waiter of waiters) {
waiter();
}
}
function stopSession(session: NodeBridgeSession) {
if (session.closed) {
return;
}
session.closed = true;
session.input?.kill("SIGTERM");
session.output?.kill("SIGTERM");
wake(session);
}
function startCommandPair(params: {
inputCommand: string[];
outputCommand: string[];
}): NodeBridgeSession {
const input = splitCommand(params.inputCommand);
const output = splitCommand(params.outputCommand);
const session: NodeBridgeSession = {
id: `meet_node_${randomUUID()}`,
chunks: [],
waiters: [],
closed: false,
};
const outputProcess = spawn(output.command, output.args, {
stdio: ["pipe", "ignore", "pipe"],
});
const inputProcess = spawn(input.command, input.args, {
stdio: ["ignore", "pipe", "pipe"],
});
session.input = inputProcess;
session.output = outputProcess;
inputProcess.stdout?.on("data", (chunk) => {
session.chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
if (session.chunks.length > 200) {
session.chunks.splice(0, session.chunks.length - 200);
}
wake(session);
});
inputProcess.on("exit", () => stopSession(session));
outputProcess.on("exit", () => stopSession(session));
inputProcess.on("error", () => stopSession(session));
outputProcess.on("error", () => stopSession(session));
sessions.set(session.id, session);
return session;
}
async function pullAudio(params: Record<string, unknown>) {
const bridgeId = readString(params.bridgeId);
if (!bridgeId) {
throw new Error("bridgeId required");
}
const session = sessions.get(bridgeId);
if (!session) {
throw new Error(`unknown bridgeId: ${bridgeId}`);
}
const timeoutMs = Math.min(readNumber(params.timeoutMs, 250), 2_000);
if (session.chunks.length === 0 && !session.closed) {
await Promise.race([
sleep(timeoutMs),
new Promise<void>((resolve) => {
session.waiters.push(resolve);
}),
]);
}
const chunk = session.chunks.shift();
return {
bridgeId,
closed: session.closed,
base64: chunk ? chunk.toString("base64") : undefined,
};
}
function pushAudio(params: Record<string, unknown>) {
const bridgeId = readString(params.bridgeId);
const base64 = readString(params.base64);
if (!bridgeId || !base64) {
throw new Error("bridgeId and base64 required");
}
const session = sessions.get(bridgeId);
if (!session || session.closed) {
throw new Error(`bridge is not open: ${bridgeId}`);
}
session.output?.stdin?.write(Buffer.from(base64, "base64"));
return { bridgeId, ok: true };
}
function startChrome(params: Record<string, unknown>) {
const url = readString(params.url);
if (!url) {
throw new Error("url required");
}
const timeoutMs = readNumber(params.joinTimeoutMs, 30_000);
assertBlackHoleAvailable(Math.min(timeoutMs, 10_000));
const healthCommand = readStringArray(params.audioBridgeHealthCommand);
if (healthCommand) {
const health = runCommandWithTimeout(healthCommand, timeoutMs);
if (health.code !== 0) {
throw new Error(
`Chrome audio bridge health check failed: ${health.stderr || health.stdout || health.code}`,
);
}
}
let bridgeId: string | undefined;
let audioBridge: { type: "external-command" | "node-command-pair" } | undefined;
const bridgeCommand = readStringArray(params.audioBridgeCommand);
if (bridgeCommand) {
const bridge = runCommandWithTimeout(bridgeCommand, timeoutMs);
if (bridge.code !== 0) {
throw new Error(
`failed to start Chrome audio bridge: ${bridge.stderr || bridge.stdout || bridge.code}`,
);
}
audioBridge = { type: "external-command" };
} else if (params.mode === "realtime") {
const session = startCommandPair({
inputCommand: readStringArray(params.audioInputCommand) ?? [
...DEFAULT_GOOGLE_MEET_AUDIO_INPUT_COMMAND,
],
outputCommand: readStringArray(params.audioOutputCommand) ?? [
...DEFAULT_GOOGLE_MEET_AUDIO_OUTPUT_COMMAND,
],
});
bridgeId = session.id;
audioBridge = { type: "node-command-pair" };
}
if (params.launch !== false) {
const argv = ["open", "-a", "Google Chrome"];
const browserProfile = readString(params.browserProfile);
if (browserProfile) {
argv.push("--args", `--profile-directory=${browserProfile}`);
}
argv.push(url);
const result = runCommandWithTimeout(argv, timeoutMs);
if (result.code !== 0) {
if (bridgeId) {
const session = sessions.get(bridgeId);
if (session) {
stopSession(session);
}
}
throw new Error(
`failed to launch Chrome for Meet: ${result.stderr || result.stdout || result.code}`,
);
}
}
return { launched: params.launch !== false, bridgeId, audioBridge };
}
function stopChrome(params: Record<string, unknown>) {
const bridgeId = readString(params.bridgeId);
if (!bridgeId) {
return { ok: true, stopped: false };
}
const session = sessions.get(bridgeId);
if (!session) {
return { ok: true, stopped: false };
}
stopSession(session);
sessions.delete(bridgeId);
return { ok: true, stopped: true };
}
export async function handleGoogleMeetNodeHostCommand(paramsJSON?: string | null): Promise<string> {
const raw = paramsJSON ? JSON.parse(paramsJSON) : {};
const params = asRecord(raw);
const action = readString(params.action);
let result: unknown;
switch (action) {
case "setup":
assertBlackHoleAvailable(10_000);
result = { ok: true };
break;
case "start":
result = startChrome(params);
break;
case "pullAudio":
result = await pullAudio(params);
break;
case "pushAudio":
result = pushAudio(params);
break;
case "stop":
result = stopChrome(params);
break;
default:
throw new Error("unsupported googlemeet.chrome action");
}
return JSON.stringify(result);
}

View File

@@ -0,0 +1,193 @@
import type { OpenClawConfig } from "openclaw/plugin-sdk/config-runtime";
import { formatErrorMessage } from "openclaw/plugin-sdk/error-runtime";
import type { PluginRuntime, RuntimeLogger } from "openclaw/plugin-sdk/plugin-runtime";
import {
createRealtimeVoiceBridgeSession,
type RealtimeVoiceBridgeSession,
type RealtimeVoiceProviderPlugin,
} from "openclaw/plugin-sdk/realtime-voice";
import {
consultOpenClawAgentForGoogleMeet,
GOOGLE_MEET_AGENT_CONSULT_TOOL_NAME,
resolveGoogleMeetRealtimeTools,
} from "./agent-consult.js";
import type { GoogleMeetConfig } from "./config.js";
import { resolveGoogleMeetRealtimeProvider } from "./realtime.js";
export type ChromeNodeRealtimeAudioBridgeHandle = {
type: "node-command-pair";
providerId: string;
nodeId: string;
bridgeId: string;
stop: () => Promise<void>;
};
function asRecord(value: unknown): Record<string, unknown> {
return value && typeof value === "object" && !Array.isArray(value)
? (value as Record<string, unknown>)
: {};
}
function readString(value: unknown): string | undefined {
return typeof value === "string" && value.trim() ? value : undefined;
}
export async function startNodeRealtimeAudioBridge(params: {
config: GoogleMeetConfig;
fullConfig: OpenClawConfig;
runtime: PluginRuntime;
meetingSessionId: string;
nodeId: string;
bridgeId: string;
logger: RuntimeLogger;
providers?: RealtimeVoiceProviderPlugin[];
}): Promise<ChromeNodeRealtimeAudioBridgeHandle> {
let stopped = false;
let bridge: RealtimeVoiceBridgeSession | null = null;
const resolved = resolveGoogleMeetRealtimeProvider({
config: params.config,
fullConfig: params.fullConfig,
providers: params.providers,
});
const transcript: Array<{ role: "user" | "assistant"; text: string }> = [];
const stop = async () => {
if (stopped) {
return;
}
stopped = true;
try {
bridge?.close();
} catch (error) {
params.logger.debug?.(
`[google-meet] node realtime bridge close ignored: ${formatErrorMessage(error)}`,
);
}
try {
await params.runtime.nodes.invoke({
nodeId: params.nodeId,
command: "googlemeet.chrome",
params: { action: "stop", bridgeId: params.bridgeId },
timeoutMs: 5_000,
});
} catch (error) {
params.logger.debug?.(
`[google-meet] node audio bridge stop ignored: ${formatErrorMessage(error)}`,
);
}
};
bridge = createRealtimeVoiceBridgeSession({
provider: resolved.provider,
providerConfig: resolved.providerConfig,
instructions: params.config.realtime.instructions,
markStrategy: "ack-immediately",
tools: resolveGoogleMeetRealtimeTools(params.config.realtime.toolPolicy),
audioSink: {
isOpen: () => !stopped,
sendAudio: (muLaw) => {
void params.runtime.nodes
.invoke({
nodeId: params.nodeId,
command: "googlemeet.chrome",
params: {
action: "pushAudio",
bridgeId: params.bridgeId,
base64: Buffer.from(muLaw).toString("base64"),
},
timeoutMs: 5_000,
})
.catch((error) => {
params.logger.warn(
`[google-meet] node audio output failed: ${formatErrorMessage(error)}`,
);
void stop();
});
},
},
onTranscript: (role, text, isFinal) => {
if (isFinal) {
transcript.push({ role, text });
if (transcript.length > 40) {
transcript.splice(0, transcript.length - 40);
}
params.logger.debug?.(`[google-meet] ${role}: ${text}`);
}
},
onToolCall: (event, session) => {
if (event.name !== GOOGLE_MEET_AGENT_CONSULT_TOOL_NAME) {
session.submitToolResult(event.callId || event.itemId, {
error: `Tool "${event.name}" not available`,
});
return;
}
void consultOpenClawAgentForGoogleMeet({
config: params.config,
fullConfig: params.fullConfig,
runtime: params.runtime,
logger: params.logger,
meetingSessionId: params.meetingSessionId,
args: event.args,
transcript,
})
.then((result) => {
session.submitToolResult(event.callId || event.itemId, result);
})
.catch((error: Error) => {
session.submitToolResult(event.callId || event.itemId, {
error: formatErrorMessage(error),
});
});
},
onError: (error) => {
params.logger.warn(
`[google-meet] node realtime voice bridge failed: ${formatErrorMessage(error)}`,
);
void stop();
},
onClose: (reason) => {
if (reason === "error") {
void stop();
}
},
});
await bridge.connect();
void (async () => {
for (;;) {
if (stopped) {
break;
}
try {
const raw = await params.runtime.nodes.invoke({
nodeId: params.nodeId,
command: "googlemeet.chrome",
params: { action: "pullAudio", bridgeId: params.bridgeId, timeoutMs: 250 },
timeoutMs: 2_000,
});
const result = asRecord(raw);
const base64 = readString(result.base64);
if (base64) {
bridge?.sendAudio(Buffer.from(base64, "base64"));
}
if (result.closed === true) {
await stop();
}
} catch (error) {
if (!stopped) {
params.logger.warn(`[google-meet] node audio input failed: ${formatErrorMessage(error)}`);
await stop();
}
}
}
})();
return {
type: "node-command-pair",
providerId: resolved.provider.id,
nodeId: params.nodeId,
bridgeId: params.bridgeId,
stop,
};
}

View File

@@ -5,7 +5,7 @@ import type { PluginRuntime, RuntimeLogger } from "openclaw/plugin-sdk/plugin-ru
import { normalizeOptionalString } from "openclaw/plugin-sdk/text-runtime";
import type { GoogleMeetConfig, GoogleMeetMode, GoogleMeetTransport } from "./config.js";
import { getGoogleMeetSetupStatus } from "./setup.js";
import { launchChromeMeet } from "./transports/chrome.js";
import { launchChromeMeet, launchChromeMeetOnNode } from "./transports/chrome.js";
import { buildMeetDtmfSequence, normalizeDialInNumber } from "./transports/twilio.js";
import type {
GoogleMeetJoinRequest,
@@ -94,7 +94,11 @@ export class GoogleMeetRuntime {
createdAt,
updatedAt: createdAt,
participantIdentity:
transport === "chrome" ? "signed-in Google Chrome profile" : "Twilio phone participant",
transport === "twilio"
? "Twilio phone participant"
: transport === "chrome-node"
? "signed-in Google Chrome profile on a paired node"
: "signed-in Google Chrome profile",
realtime: {
enabled: mode === "realtime",
provider: this.params.config.realtime.provider,
@@ -105,36 +109,54 @@ export class GoogleMeetRuntime {
};
try {
if (transport === "chrome") {
const result = await launchChromeMeet({
runtime: this.params.runtime,
config: this.params.config,
fullConfig: this.params.fullConfig,
meetingSessionId: session.id,
mode,
url,
logger: this.params.logger,
});
if (transport === "chrome" || transport === "chrome-node") {
const result =
transport === "chrome-node"
? await launchChromeMeetOnNode({
runtime: this.params.runtime,
config: this.params.config,
fullConfig: this.params.fullConfig,
meetingSessionId: session.id,
mode,
url,
logger: this.params.logger,
})
: await launchChromeMeet({
runtime: this.params.runtime,
config: this.params.config,
fullConfig: this.params.fullConfig,
meetingSessionId: session.id,
mode,
url,
logger: this.params.logger,
});
session.chrome = {
audioBackend: this.params.config.chrome.audioBackend,
launched: result.launched,
nodeId: "nodeId" in result ? result.nodeId : undefined,
browserProfile: this.params.config.chrome.browserProfile,
audioBridge: result.audioBridge
? {
type: result.audioBridge.type,
provider:
result.audioBridge.type === "command-pair"
result.audioBridge.type === "command-pair" ||
result.audioBridge.type === "node-command-pair"
? result.audioBridge.providerId
: undefined,
}
: undefined,
};
if (result.audioBridge?.type === "command-pair") {
if (
result.audioBridge?.type === "command-pair" ||
result.audioBridge?.type === "node-command-pair"
) {
this.#sessionStops.set(session.id, result.audioBridge.stop);
}
session.notes.push(
result.audioBridge
? "Chrome transport joins as the signed-in Google profile and routes realtime audio through the configured bridge."
? transport === "chrome-node"
? "Chrome node transport joins as the signed-in Google profile on the selected node and routes realtime audio through the node bridge."
: "Chrome transport joins as the signed-in Google profile and routes realtime audio through the configured bridge."
: "Chrome transport joins as the signed-in Google profile and expects BlackHole 2ch audio routing.",
);
} else {

View File

@@ -2,6 +2,10 @@ import type { OpenClawConfig } from "openclaw/plugin-sdk/config-runtime";
import type { PluginRuntime } from "openclaw/plugin-sdk/plugin-runtime";
import type { RuntimeLogger } from "openclaw/plugin-sdk/plugin-runtime";
import type { GoogleMeetConfig } from "../config.js";
import {
startNodeRealtimeAudioBridge,
type ChromeNodeRealtimeAudioBridgeHandle,
} from "../realtime-node.js";
import {
startCommandRealtimeAudioBridge,
type ChromeRealtimeAudioBridgeHandle,
@@ -146,3 +150,127 @@ export async function launchChromeMeet(params: {
throw error;
}
}
function isGoogleMeetNode(node: {
commands?: string[];
connected?: boolean;
nodeId?: string;
displayName?: string;
remoteIp?: string;
}) {
return (
node.connected === true &&
Array.isArray(node.commands) &&
node.commands.includes("googlemeet.chrome")
);
}
async function resolveChromeNode(params: {
runtime: PluginRuntime;
requestedNode?: string;
}): Promise<string> {
const list = await params.runtime.nodes.list({ connected: true });
const nodes = list.nodes.filter(isGoogleMeetNode);
if (nodes.length === 0) {
throw new Error(
"No connected Google Meet-capable node. Run `openclaw node run` on the Chrome host and approve pairing.",
);
}
const requested = params.requestedNode?.trim();
if (requested) {
const matches = nodes.filter((node) =>
[node.nodeId, node.displayName, node.remoteIp].some((value) => value === requested),
);
if (matches.length === 1) {
return matches[0].nodeId;
}
throw new Error(`Google Meet node not found or ambiguous: ${requested}`);
}
if (nodes.length === 1) {
return nodes[0].nodeId;
}
throw new Error(
"Multiple Google Meet-capable nodes connected. Set plugins.entries.google-meet.config.chromeNode.node.",
);
}
function parseNodeStartResult(raw: unknown): {
launched?: boolean;
bridgeId?: string;
audioBridge?: { type?: string };
} {
if (!raw || typeof raw !== "object") {
throw new Error("Google Meet node returned an invalid start result.");
}
return raw as {
launched?: boolean;
bridgeId?: string;
audioBridge?: { type?: string };
};
}
export async function launchChromeMeetOnNode(params: {
runtime: PluginRuntime;
config: GoogleMeetConfig;
fullConfig: OpenClawConfig;
meetingSessionId: string;
mode: "realtime" | "transcribe";
url: string;
logger: RuntimeLogger;
}): Promise<{
nodeId: string;
launched: boolean;
audioBridge?:
| { type: "external-command" }
| ({ type: "node-command-pair" } & ChromeNodeRealtimeAudioBridgeHandle);
}> {
const nodeId = await resolveChromeNode({
runtime: params.runtime,
requestedNode: params.config.chromeNode.node,
});
const raw = await params.runtime.nodes.invoke({
nodeId,
command: "googlemeet.chrome",
params: {
action: "start",
url: params.url,
mode: params.mode,
launch: params.config.chrome.launch,
browserProfile: params.config.chrome.browserProfile,
joinTimeoutMs: params.config.chrome.joinTimeoutMs,
audioInputCommand: params.config.chrome.audioInputCommand,
audioOutputCommand: params.config.chrome.audioOutputCommand,
audioBridgeCommand: params.config.chrome.audioBridgeCommand,
audioBridgeHealthCommand: params.config.chrome.audioBridgeHealthCommand,
},
timeoutMs: params.config.chrome.joinTimeoutMs + 5_000,
});
const result = parseNodeStartResult(raw);
if (result.audioBridge?.type === "node-command-pair") {
if (!result.bridgeId) {
throw new Error("Google Meet node did not return an audio bridge id.");
}
const bridge = await startNodeRealtimeAudioBridge({
config: params.config,
fullConfig: params.fullConfig,
runtime: params.runtime,
meetingSessionId: params.meetingSessionId,
nodeId,
bridgeId: result.bridgeId,
logger: params.logger,
});
return {
nodeId,
launched: result.launched === true,
audioBridge: bridge,
};
}
if (result.audioBridge?.type === "external-command") {
return {
nodeId,
launched: result.launched === true,
audioBridge: { type: "external-command" },
};
}
return { nodeId, launched: result.launched === true };
}

View File

@@ -29,9 +29,10 @@ export type GoogleMeetSession = {
chrome?: {
audioBackend: "blackhole-2ch";
launched: boolean;
nodeId?: string;
browserProfile?: string;
audioBridge?: {
type: "command-pair" | "external-command";
type: "command-pair" | "node-command-pair" | "external-command";
provider?: string;
};
};

View File

@@ -3,9 +3,10 @@ import { applyPluginAutoEnable } from "../config/plugin-auto-enable.js";
import type { OpenClawConfig } from "../config/types.openclaw.js";
import type { PluginRegistry } from "../plugins/registry.js";
import { pinActivePluginChannelRegistry } from "../plugins/runtime.js";
import { setGatewaySubagentRuntime } from "../plugins/runtime/index.js";
import { setGatewayNodesRuntime, setGatewaySubagentRuntime } from "../plugins/runtime/index.js";
import type { GatewayRequestHandler } from "./server-methods/types.js";
import {
createGatewayNodesRuntime,
createGatewaySubagentRuntime,
loadGatewayPlugins,
setPluginSubagentOverridePolicies,
@@ -35,6 +36,7 @@ type GatewayPluginBootstrapParams = {
function installGatewayPluginRuntimeEnvironment(cfg: OpenClawConfig) {
setPluginSubagentOverridePolicies(cfg);
setGatewaySubagentRuntime(createGatewaySubagentRuntime());
setGatewayNodesRuntime(createGatewayNodesRuntime());
}
function logGatewayPluginDiagnostics(params: {

View File

@@ -384,6 +384,28 @@ export function createGatewaySubagentRuntime(): PluginRuntime["subagent"] {
};
}
export function createGatewayNodesRuntime(): PluginRuntime["nodes"] {
return {
async list(params) {
const payload = await dispatchGatewayMethod<{ nodes?: unknown[] }>("node.list", {
...(params?.connected === true && { connected: true }),
});
const nodes = Array.isArray(payload?.nodes) ? payload.nodes : [];
return { nodes: nodes as Awaited<ReturnType<PluginRuntime["nodes"]["list"]>>["nodes"] };
},
async invoke(params) {
const payload = await dispatchGatewayMethod<unknown>("node.invoke", {
nodeId: params.nodeId,
command: params.command,
...(params.params !== undefined && { params: params.params }),
timeoutMs: params.timeoutMs,
idempotencyKey: params.idempotencyKey || randomUUID(),
});
return payload;
},
};
}
// ── Plugin loading ──────────────────────────────────────────────────
function createGatewayPluginRegistrationLogger(params?: {

View File

@@ -146,12 +146,14 @@ const GATEWAY_SUBAGENT_SYMBOL: unique symbol = Symbol.for(
type GatewaySubagentState = {
subagent: PluginRuntime["subagent"] | undefined;
nodes: PluginRuntime["nodes"] | undefined;
};
const gatewaySubagentState = resolveGlobalSingleton<GatewaySubagentState>(
GATEWAY_SUBAGENT_SYMBOL,
() => ({
subagent: undefined,
nodes: undefined,
}),
);
@@ -165,12 +167,17 @@ export function setGatewaySubagentRuntime(subagent: PluginRuntime["subagent"]):
gatewaySubagentState.subagent = subagent;
}
export function setGatewayNodesRuntime(nodes: PluginRuntime["nodes"]): void {
gatewaySubagentState.nodes = nodes;
}
/**
* Reset the process-global gateway subagent runtime.
* Used by tests to avoid leaking gateway state across module reloads.
*/
export function clearGatewaySubagentRuntime(): void {
gatewaySubagentState.subagent = undefined;
gatewaySubagentState.nodes = undefined;
}
/**
@@ -200,6 +207,29 @@ function createLateBindingSubagent(
});
}
function createUnavailableNodesRuntime(): PluginRuntime["nodes"] {
const unavailable = () => {
throw new Error("Plugin node runtime is only available inside the Gateway.");
};
return {
list: unavailable,
invoke: unavailable,
};
}
function createLateBindingNodes(allowGatewayBinding = false): PluginRuntime["nodes"] {
const unavailable = createUnavailableNodesRuntime();
if (!allowGatewayBinding) {
return unavailable;
}
return new Proxy(unavailable, {
get(_target, prop, _receiver) {
const resolved = gatewaySubagentState.nodes ?? unavailable;
return Reflect.get(resolved, prop, resolved);
},
});
}
export function createPluginRuntime(_options: CreatePluginRuntimeOptions = {}): PluginRuntime {
const mediaUnderstanding = createRuntimeMediaUnderstandingFacade();
const taskFlow = createRuntimeTaskFlow();
@@ -216,6 +246,7 @@ export function createPluginRuntime(_options: CreatePluginRuntimeOptions = {}):
_options.subagent,
_options.allowGatewaySubagentBinding === true,
),
nodes: createLateBindingNodes(_options.allowGatewaySubagentBinding === true),
system: createRuntimeSystem(),
media: createRuntimeMedia(),
webSearch: {

View File

@@ -50,6 +50,29 @@ export type SubagentDeleteSessionParams = {
deleteTranscript?: boolean;
};
export type RuntimeNodeListParams = {
connected?: boolean;
};
export type RuntimeNodeListResult = {
nodes: Array<{
nodeId: string;
displayName?: string;
remoteIp?: string;
connected?: boolean;
caps?: string[];
commands?: string[];
}>;
};
export type RuntimeNodeInvokeParams = {
nodeId: string;
command: string;
params?: unknown;
timeoutMs?: number;
idempotencyKey?: string;
};
/** Trusted in-process runtime surface injected into native plugins. */
export type PluginRuntime = PluginRuntimeCore & {
subagent: {
@@ -62,6 +85,10 @@ export type PluginRuntime = PluginRuntimeCore & {
getSession: (params: SubagentGetSessionParams) => Promise<SubagentGetSessionResult>;
deleteSession: (params: SubagentDeleteSessionParams) => Promise<void>;
};
nodes: {
list: (params?: RuntimeNodeListParams) => Promise<RuntimeNodeListResult>;
invoke: (params: RuntimeNodeInvokeParams) => Promise<unknown>;
};
channel: PluginRuntimeChannel;
};

View File

@@ -441,6 +441,10 @@ export function createPluginRuntimeMock(overrides: DeepPartial<PluginRuntime> =
getSession: vi.fn(),
deleteSession: vi.fn(),
},
nodes: {
list: vi.fn(async () => ({ nodes: [] })),
invoke: vi.fn(),
},
};
return mergeDeep(base, overrides);