refactor: reuse shared dedupe helpers

This commit is contained in:
Peter Steinberger
2026-04-29 12:14:36 +01:00
parent 81ad827380
commit 34ec184dcb
10 changed files with 74 additions and 1523 deletions

View File

@@ -1,142 +1,9 @@
import fs from "node:fs";
import os from "node:os";
import path from "node:path";
import { movePathToTrash as movePathToTrashWithAllowedRoots } from "openclaw/plugin-sdk/browser-config";
import { resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
const TRASH_DESTINATION_COLLISION_CODES = new Set(["EEXIST", "ENOTEMPTY", "ERR_FS_CP_EEXIST"]);
const TRASH_DESTINATION_RETRY_LIMIT = 4;
function getFsErrorCode(error: unknown): string | undefined {
if (!error || typeof error !== "object" || !("code" in error)) {
return undefined;
}
const code = (error as NodeJS.ErrnoException).code;
return typeof code === "string" ? code : undefined;
}
function isTrashDestinationCollision(error: unknown): boolean {
const code = getFsErrorCode(error);
return Boolean(code && TRASH_DESTINATION_COLLISION_CODES.has(code));
}
function isSameOrChildPath(candidate: string, parent: string): boolean {
return candidate === parent || candidate.startsWith(`${parent}${path.sep}`);
}
function resolveAllowedTrashRoots(): string[] {
const roots = [os.homedir(), resolvePreferredOpenClawTmpDir()].map((root) => {
try {
return path.resolve(fs.realpathSync.native(root));
} catch {
return path.resolve(root);
}
});
return [...new Set(roots)];
}
function assertAllowedTrashTarget(targetPath: string): void {
let resolvedTargetPath = path.resolve(targetPath);
try {
resolvedTargetPath = path.resolve(fs.realpathSync.native(targetPath));
} catch {
// The subsequent move will surface missing or inaccessible targets.
}
const isAllowed = resolveAllowedTrashRoots().some(
(root) => resolvedTargetPath !== root && isSameOrChildPath(resolvedTargetPath, root),
);
if (!isAllowed) {
throw new Error(`Refusing to trash path outside allowed roots: ${targetPath}`);
}
}
function resolveTrashDir(): string {
const homeDir = os.homedir();
const trashDir = path.join(homeDir, ".Trash");
fs.mkdirSync(trashDir, { recursive: true, mode: 0o700 });
const trashDirStat = fs.lstatSync(trashDir);
if (!trashDirStat.isDirectory() || trashDirStat.isSymbolicLink()) {
throw new Error(`Refusing to use non-directory/symlink trash directory: ${trashDir}`);
}
const realHome = path.resolve(fs.realpathSync.native(homeDir));
const resolvedTrashDir = path.resolve(fs.realpathSync.native(trashDir));
if (resolvedTrashDir === realHome || !isSameOrChildPath(resolvedTrashDir, realHome)) {
throw new Error(`Trash directory escaped home directory: ${trashDir}`);
}
return resolvedTrashDir;
}
function trashBaseName(targetPath: string): string {
const resolvedTargetPath = path.resolve(targetPath);
if (resolvedTargetPath === path.parse(resolvedTargetPath).root) {
throw new Error(`Refusing to trash root path: ${targetPath}`);
}
const base = path.basename(resolvedTargetPath).replace(/[\\/]+/g, "");
if (!base) {
throw new Error(`Unable to derive safe trash basename for: ${targetPath}`);
}
return base;
}
function resolveContainedPath(root: string, leaf: string): string {
const resolvedRoot = path.resolve(root);
const resolvedPath = path.resolve(resolvedRoot, leaf);
if (!isSameOrChildPath(resolvedPath, resolvedRoot) || resolvedPath === resolvedRoot) {
throw new Error(`Trash destination escaped trash directory: ${resolvedPath}`);
}
return resolvedPath;
}
function reserveTrashDestination(trashDir: string, base: string, timestamp: number): string {
const containerPrefix = resolveContainedPath(trashDir, `${base}-${timestamp}-`);
const container = fs.mkdtempSync(containerPrefix);
const resolvedContainer = path.resolve(container);
const resolvedTrashDir = path.resolve(trashDir);
if (
resolvedContainer === resolvedTrashDir ||
!isSameOrChildPath(resolvedContainer, resolvedTrashDir)
) {
throw new Error(`Trash destination escaped trash directory: ${container}`);
}
return resolveContainedPath(container, base);
}
function movePathToDestination(targetPath: string, dest: string): boolean {
try {
fs.renameSync(targetPath, dest);
return true;
} catch (error) {
if (getFsErrorCode(error) !== "EXDEV") {
if (isTrashDestinationCollision(error)) {
return false;
}
throw error;
}
}
try {
fs.cpSync(targetPath, dest, { recursive: true, force: false, errorOnExist: true });
fs.rmSync(targetPath, { recursive: true, force: false });
return true;
} catch (error) {
if (isTrashDestinationCollision(error)) {
return false;
}
throw error;
}
}
export async function movePathToTrash(targetPath: string): Promise<string> {
// Avoid resolving external trash helpers through the service PATH during cleanup.
const base = trashBaseName(targetPath);
assertAllowedTrashTarget(targetPath);
const trashDir = resolveTrashDir();
const timestamp = Date.now();
for (let attempt = 0; attempt < TRASH_DESTINATION_RETRY_LIMIT; attempt += 1) {
const dest = reserveTrashDestination(trashDir, base, timestamp);
if (movePathToDestination(targetPath, dest)) {
return dest;
}
}
throw new Error(`Unable to choose a unique trash destination for ${targetPath}`);
return await movePathToTrashWithAllowedRoots(targetPath, {
allowedRoots: [os.homedir(), resolvePreferredOpenClawTmpDir()],
});
}

View File

@@ -1,169 +1,6 @@
import path from "node:path";
import { loadJsonFile, saveJsonFile } from "openclaw/plugin-sdk/json-store";
import { resolveProviderEndpoint } from "openclaw/plugin-sdk/provider-model-shared";
import { resolveStateDir } from "openclaw/plugin-sdk/state-paths";
import { normalizeLowercaseStringOrEmpty } from "openclaw/plugin-sdk/string-coerce-runtime";
const COPILOT_TOKEN_URL = "https://api.github.com/copilot_internal/v2/token";
const COPILOT_EDITOR_VERSION = "vscode/1.96.2";
const COPILOT_USER_AGENT = "GitHubCopilotChat/0.26.7";
const COPILOT_EDITOR_PLUGIN_VERSION = "copilot-chat/0.35.0";
const COPILOT_GITHUB_API_VERSION = "2025-04-01";
export const DEFAULT_COPILOT_API_BASE_URL = "https://api.individual.githubcopilot.com";
export type CachedCopilotToken = {
token: string;
expiresAt: number;
updatedAt: number;
};
function buildCopilotIdeHeaders(
params: {
includeApiVersion?: boolean;
} = {},
): Record<string, string> {
return {
"Editor-Version": COPILOT_EDITOR_VERSION,
"Editor-Plugin-Version": COPILOT_EDITOR_PLUGIN_VERSION,
"User-Agent": COPILOT_USER_AGENT,
...(params.includeApiVersion ? { "X-Github-Api-Version": COPILOT_GITHUB_API_VERSION } : {}),
};
}
function resolveCopilotTokenCachePath(env: NodeJS.ProcessEnv = process.env) {
return path.join(resolveStateDir(env), "credentials", "github-copilot.token.json");
}
function isTokenUsable(cache: CachedCopilotToken, now = Date.now()): boolean {
return cache.expiresAt - now > 5 * 60 * 1000;
}
function parseCopilotTokenResponse(value: unknown): {
token: string;
expiresAt: number;
} {
if (!value || typeof value !== "object") {
throw new Error("Unexpected response from GitHub Copilot token endpoint");
}
const asRecord = value as Record<string, unknown>;
const token = asRecord.token;
const expiresAt = asRecord.expires_at;
if (typeof token !== "string" || token.trim().length === 0) {
throw new Error("Copilot token response missing token");
}
let expiresAtMs: number;
if (typeof expiresAt === "number" && Number.isFinite(expiresAt)) {
expiresAtMs = expiresAt < 100_000_000_000 ? expiresAt * 1000 : expiresAt;
} else if (typeof expiresAt === "string" && expiresAt.trim().length > 0) {
const parsed = Number.parseInt(expiresAt, 10);
if (!Number.isFinite(parsed)) {
throw new Error("Copilot token response has invalid expires_at");
}
expiresAtMs = parsed < 100_000_000_000 ? parsed * 1000 : parsed;
} else {
throw new Error("Copilot token response missing expires_at");
}
return { token, expiresAt: expiresAtMs };
}
function resolveCopilotProxyHost(proxyEp: string): string | null {
const trimmed = proxyEp.trim();
if (!trimmed) {
return null;
}
const urlText = /^https?:\/\//i.test(trimmed) ? trimmed : `https://${trimmed}`;
try {
const url = new URL(urlText);
if (url.protocol !== "http:" && url.protocol !== "https:") {
return null;
}
return normalizeLowercaseStringOrEmpty(url.hostname);
} catch {
return null;
}
}
export function deriveCopilotApiBaseUrlFromToken(token: string): string | null {
const trimmed = token.trim();
if (!trimmed) {
return null;
}
const match = trimmed.match(/(?:^|;)\s*proxy-ep=([^;\s]+)/i);
const proxyEp = match?.[1]?.trim();
if (!proxyEp) {
return null;
}
const proxyHost = resolveCopilotProxyHost(proxyEp);
if (!proxyHost) {
return null;
}
const host = proxyHost.replace(/^proxy\./i, "api.");
const baseUrl = `https://${host}`;
return resolveProviderEndpoint(baseUrl).endpointClass === "invalid" ? null : baseUrl;
}
export async function resolveCopilotApiToken(params: {
githubToken: string;
env?: NodeJS.ProcessEnv;
fetchImpl?: typeof fetch;
cachePath?: string;
loadJsonFileImpl?: (path: string) => unknown;
saveJsonFileImpl?: (path: string, value: CachedCopilotToken) => void;
}): Promise<{
token: string;
expiresAt: number;
source: string;
baseUrl: string;
}> {
const env = params.env ?? process.env;
const cachePath = params.cachePath?.trim() || resolveCopilotTokenCachePath(env);
const loadJsonFileFn = params.loadJsonFileImpl ?? loadJsonFile;
const saveJsonFileFn = params.saveJsonFileImpl ?? saveJsonFile;
const cached = loadJsonFileFn(cachePath) as CachedCopilotToken | undefined;
if (cached && typeof cached.token === "string" && typeof cached.expiresAt === "number") {
if (isTokenUsable(cached)) {
return {
token: cached.token,
expiresAt: cached.expiresAt,
source: `cache:${cachePath}`,
baseUrl: deriveCopilotApiBaseUrlFromToken(cached.token) ?? DEFAULT_COPILOT_API_BASE_URL,
};
}
}
const fetchImpl = params.fetchImpl ?? fetch;
const res = await fetchImpl(COPILOT_TOKEN_URL, {
method: "GET",
headers: {
Accept: "application/json",
Authorization: `Bearer ${params.githubToken}`,
...buildCopilotIdeHeaders({ includeApiVersion: true }),
},
});
if (!res.ok) {
throw new Error(`Copilot token exchange failed: HTTP ${res.status}`);
}
const json = parseCopilotTokenResponse(await res.json());
const payload: CachedCopilotToken = {
token: json.token,
expiresAt: json.expiresAt,
updatedAt: Date.now(),
};
saveJsonFileFn(cachePath, payload);
return {
token: payload.token,
expiresAt: payload.expiresAt,
source: `fetched:${COPILOT_TOKEN_URL}`,
baseUrl: deriveCopilotApiBaseUrlFromToken(payload.token) ?? DEFAULT_COPILOT_API_BASE_URL,
};
}
export {
DEFAULT_COPILOT_API_BASE_URL,
deriveCopilotApiBaseUrlFromToken,
resolveCopilotApiToken,
type CachedCopilotToken,
} from "openclaw/plugin-sdk/provider-auth";

View File

@@ -1,165 +1,22 @@
import { randomUUID } from "node:crypto";
import { parseStandalonePlainTextToolCallBlocks } from "openclaw/plugin-sdk/tool-payload";
export type LmstudioPlainTextToolCallBlock = {
arguments: Record<string, unknown>;
name: string;
};
const END_TOOL_REQUEST = "[END_TOOL_REQUEST]";
const MAX_PAYLOAD_CHARS = 256_000;
function isToolNameChar(char: string | undefined): boolean {
return Boolean(char && /[A-Za-z0-9_-]/.test(char));
}
function skipHorizontalWhitespace(text: string, start: number): number {
let index = start;
while (index < text.length && (text[index] === " " || text[index] === "\t")) {
index += 1;
}
return index;
}
function skipWhitespace(text: string, start: number): number {
let index = start;
while (index < text.length && /\s/.test(text[index] ?? "")) {
index += 1;
}
return index;
}
function consumeLineBreak(text: string, start: number): number | null {
if (text[start] === "\r") {
return text[start + 1] === "\n" ? start + 2 : start + 1;
}
if (text[start] === "\n") {
return start + 1;
}
return null;
}
function parseOpening(text: string, start: number): { end: number; name: string } | null {
if (text[start] !== "[") {
return null;
}
let cursor = start + 1;
const nameStart = cursor;
while (isToolNameChar(text[cursor])) {
cursor += 1;
}
if (cursor === nameStart || text[cursor] !== "]") {
return null;
}
const name = text.slice(nameStart, cursor);
cursor += 1;
cursor = skipHorizontalWhitespace(text, cursor);
const afterLineBreak = consumeLineBreak(text, cursor);
if (afterLineBreak === null) {
return null;
}
return { end: afterLineBreak, name };
}
function consumeJsonObject(
text: string,
start: number,
): { end: number; value: Record<string, unknown> } | null {
const cursor = skipWhitespace(text, start);
if (text[cursor] !== "{") {
return null;
}
let depth = 0;
let inString = false;
let escaped = false;
for (let index = cursor; index < text.length; index += 1) {
if (index + 1 - cursor > MAX_PAYLOAD_CHARS) {
return null;
}
const char = text[index];
if (inString) {
if (escaped) {
escaped = false;
} else if (char === "\\") {
escaped = true;
} else if (char === '"') {
inString = false;
}
continue;
}
if (char === '"') {
inString = true;
continue;
}
if (char === "{") {
depth += 1;
} else if (char === "}") {
depth -= 1;
if (depth === 0) {
try {
const parsed = JSON.parse(text.slice(cursor, index + 1)) as unknown;
if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) {
return null;
}
return { end: index + 1, value: parsed as Record<string, unknown> };
} catch {
return null;
}
}
}
}
return null;
}
function parseClosing(text: string, start: number, name: string): number | null {
const cursor = skipWhitespace(text, start);
if (text.startsWith(END_TOOL_REQUEST, cursor)) {
return cursor + END_TOOL_REQUEST.length;
}
const namedClosing = `[/${name}]`;
if (text.startsWith(namedClosing, cursor)) {
return cursor + namedClosing.length;
}
return null;
}
function parseBlockAt(
text: string,
start: number,
allowedToolNames: Set<string>,
): { block: LmstudioPlainTextToolCallBlock; end: number } | null {
const opening = parseOpening(text, start);
if (!opening || !allowedToolNames.has(opening.name)) {
return null;
}
const payload = consumeJsonObject(text, opening.end);
if (!payload) {
return null;
}
const end = parseClosing(text, payload.end, opening.name);
if (end === null) {
return null;
}
return {
block: { arguments: payload.value, name: opening.name },
end,
};
}
export function parseLmstudioPlainTextToolCalls(
text: string,
allowedToolNames: Set<string>,
): LmstudioPlainTextToolCallBlock[] | null {
const blocks: LmstudioPlainTextToolCallBlock[] = [];
let cursor = skipWhitespace(text, 0);
while (cursor < text.length) {
const parsed = parseBlockAt(text, cursor, allowedToolNames);
if (!parsed) {
return null;
}
blocks.push(parsed.block);
cursor = skipWhitespace(text, parsed.end);
}
return blocks.length > 0 ? blocks : null;
const blocks = parseStandalonePlainTextToolCallBlocks(text, {
allowedToolNames,
maxPayloadBytes: MAX_PAYLOAD_CHARS,
});
return blocks?.map((block) => ({ arguments: block.arguments, name: block.name })) ?? null;
}
export function createLmstudioSyntheticToolCallId(): string {

View File

@@ -1,19 +1,15 @@
import {
createMigrationItem,
markMigrationItemConflict,
markMigrationItemError,
markMigrationItemSkipped,
applyMigrationConfigPatchItem,
applyMigrationManualItem,
createMigrationConfigPatchItem,
createMigrationManualItem,
hasMigrationConfigPatchConflict,
MIGRATION_REASON_TARGET_EXISTS,
} from "openclaw/plugin-sdk/migration";
import type { MigrationItem, MigrationProviderContext } from "openclaw/plugin-sdk/plugin-entry";
import { childRecord, isRecord, readJsonObject, sanitizeName } from "./helpers.js";
import type { ClaudeSource } from "./source.js";
type ConfigPatchDetails = {
path: string[];
value: unknown;
};
type MappedMcpSource = {
sourceId: string;
sourceLabel: string;
@@ -21,110 +17,6 @@ type MappedMcpSource = {
servers: Record<string, unknown>;
};
const CONFIG_RUNTIME_UNAVAILABLE = "config runtime unavailable";
const MISSING_CONFIG_PATCH = "missing config patch";
class ConfigPatchConflictError extends Error {
constructor(readonly reason: string) {
super(reason);
this.name = "ConfigPatchConflictError";
}
}
function readPath(root: Record<string, unknown>, path: readonly string[]): unknown {
let current: unknown = root;
for (const segment of path) {
if (!isRecord(current)) {
return undefined;
}
current = current[segment];
}
return current;
}
function mergeValue(left: unknown, right: unknown): unknown {
if (!isRecord(left) || !isRecord(right)) {
return structuredClone(right);
}
const next: Record<string, unknown> = { ...left };
for (const [key, value] of Object.entries(right)) {
next[key] = mergeValue(next[key], value);
}
return next;
}
function writePath(root: Record<string, unknown>, path: readonly string[], value: unknown): void {
let current = root;
for (const segment of path.slice(0, -1)) {
const existing = current[segment];
if (!isRecord(existing)) {
current[segment] = {};
}
current = current[segment] as Record<string, unknown>;
}
const leaf = path.at(-1);
if (!leaf) {
return;
}
current[leaf] = mergeValue(current[leaf], value);
}
function hasPatchConflict(
config: MigrationProviderContext["config"],
path: readonly string[],
value: unknown,
): boolean {
if (!isRecord(value)) {
return readPath(config as Record<string, unknown>, path) !== undefined;
}
const existing = readPath(config as Record<string, unknown>, path);
if (!isRecord(existing)) {
return false;
}
return Object.keys(value).some((key) => existing[key] !== undefined);
}
function createConfigPatchItem(params: {
id: string;
target: string;
path: string[];
value: unknown;
message: string;
conflict?: boolean;
reason?: string;
source?: string;
details?: Record<string, unknown>;
}): MigrationItem {
return createMigrationItem({
id: params.id,
kind: "config",
action: "merge",
source: params.source,
target: params.target,
status: params.conflict ? "conflict" : "planned",
reason: params.conflict ? (params.reason ?? MIGRATION_REASON_TARGET_EXISTS) : undefined,
message: params.message,
details: { ...params.details, path: params.path, value: params.value },
});
}
function createManualItem(params: {
id: string;
source: string;
message: string;
recommendation: string;
}): MigrationItem {
return createMigrationItem({
id: params.id,
kind: "manual",
action: "manual",
source: params.source,
status: "skipped",
message: params.message,
reason: params.recommendation,
});
}
function mapMcpServers(raw: unknown): Record<string, unknown> | undefined {
if (!isRecord(raw)) {
return undefined;
@@ -226,9 +118,10 @@ export async function buildConfigItems(params: {
const duplicate = (counts.get(name) ?? 0) > 1;
const conflict =
duplicate ||
(!params.ctx.overwrite && hasPatchConflict(params.ctx.config, ["mcp", "servers"], patch));
(!params.ctx.overwrite &&
hasMigrationConfigPatchConflict(params.ctx.config, ["mcp", "servers"], patch));
items.push(
createConfigPatchItem({
createMigrationConfigPatchItem({
id: `config:mcp-server:${sanitizeName(mcpSource.sourceId)}:${sanitizeName(name)}`,
source: mcpSource.sourcePath,
target: `mcp.servers.${name}`,
@@ -254,7 +147,7 @@ export async function buildConfigItems(params: {
const settings = await readJsonObject(settingsPath);
if (settingsPath && settings.hooks !== undefined) {
items.push(
createManualItem({
createMigrationManualItem({
id: `manual:hooks:${sanitizeName(settingsPath)}`,
source: settingsPath,
message: "Claude hooks were found but are not enabled automatically.",
@@ -264,7 +157,7 @@ export async function buildConfigItems(params: {
}
if (settingsPath && settings.permissions !== undefined) {
items.push(
createManualItem({
createMigrationManualItem({
id: `manual:permissions:${sanitizeName(settingsPath)}`,
source: settingsPath,
message: "Claude permission settings were found but are not translated automatically.",
@@ -275,7 +168,7 @@ export async function buildConfigItems(params: {
}
if (settingsPath && settings.env !== undefined) {
items.push(
createManualItem({
createMigrationManualItem({
id: `manual:env:${sanitizeName(settingsPath)}`,
source: settingsPath,
message: "Claude environment defaults were found but are not copied automatically.",
@@ -289,56 +182,13 @@ export async function buildConfigItems(params: {
return items;
}
function readConfigPatchDetails(item: MigrationItem): ConfigPatchDetails | undefined {
const path = item.details?.path;
if (
!Array.isArray(path) ||
!path.every((segment): segment is string => typeof segment === "string")
) {
return undefined;
}
return { path, value: item.details?.value };
}
export async function applyConfigItem(
ctx: MigrationProviderContext,
item: MigrationItem,
): Promise<MigrationItem> {
if (item.status !== "planned") {
return item;
}
const details = readConfigPatchDetails(item);
if (!details) {
return markMigrationItemError(item, MISSING_CONFIG_PATCH);
}
const configApi = ctx.runtime?.config;
if (!configApi?.current || !configApi.mutateConfigFile) {
return markMigrationItemError(item, CONFIG_RUNTIME_UNAVAILABLE);
}
try {
const currentConfig = configApi.current() as MigrationProviderContext["config"];
if (!ctx.overwrite && hasPatchConflict(currentConfig, details.path, details.value)) {
return markMigrationItemConflict(item, MIGRATION_REASON_TARGET_EXISTS);
}
await configApi.mutateConfigFile({
base: "runtime",
afterWrite: { mode: "auto" },
mutate(draft) {
if (!ctx.overwrite && hasPatchConflict(draft, details.path, details.value)) {
throw new ConfigPatchConflictError(MIGRATION_REASON_TARGET_EXISTS);
}
writePath(draft as Record<string, unknown>, details.path, details.value);
},
});
return { ...item, status: "migrated" };
} catch (err) {
if (err instanceof ConfigPatchConflictError) {
return markMigrationItemConflict(item, err.reason);
}
return markMigrationItemError(item, err instanceof Error ? err.message : String(err));
}
return applyMigrationConfigPatchItem(ctx, item);
}
export function applyManualItem(item: MigrationItem): MigrationItem {
return markMigrationItemSkipped(item, item.reason ?? "manual follow-up required");
return applyMigrationManualItem(item);
}

View File

@@ -1,9 +1,9 @@
import {
createMigrationItem,
markMigrationItemConflict,
markMigrationItemError,
markMigrationItemSkipped,
MIGRATION_REASON_TARGET_EXISTS,
applyMigrationConfigPatchItem,
applyMigrationManualItem,
createMigrationConfigPatchItem,
createMigrationManualItem,
hasMigrationConfigPatchConflict,
} from "openclaw/plugin-sdk/migration";
import type { MigrationItem, MigrationProviderContext } from "openclaw/plugin-sdk/plugin-entry";
import { childRecord, isRecord, readString, readStringArray } from "./helpers.js";
@@ -15,21 +15,6 @@ type HermesProviderConfig = {
models: string[];
};
type ConfigPatchDetails = {
path: string[];
value: unknown;
};
const CONFIG_RUNTIME_UNAVAILABLE = "config runtime unavailable";
const MISSING_CONFIG_PATCH = "missing config patch";
class ConfigPatchConflictError extends Error {
constructor(readonly reason: string) {
super(reason);
this.name = "ConfigPatchConflictError";
}
}
function envKeyForProvider(providerId: string): string {
return `${providerId.toUpperCase().replaceAll(/[^A-Z0-9]/gu, "_")}_API_KEY`;
}
@@ -175,96 +160,6 @@ function mapSkillEntries(config: Record<string, unknown>): Record<string, unknow
return Object.keys(entries).length > 0 ? entries : undefined;
}
function readPath(root: Record<string, unknown>, path: readonly string[]): unknown {
let current: unknown = root;
for (const segment of path) {
if (!isRecord(current)) {
return undefined;
}
current = current[segment];
}
return current;
}
function mergeValue(left: unknown, right: unknown): unknown {
if (!isRecord(left) || !isRecord(right)) {
return structuredClone(right);
}
const next: Record<string, unknown> = { ...left };
for (const [key, value] of Object.entries(right)) {
next[key] = mergeValue(next[key], value);
}
return next;
}
function writePath(root: Record<string, unknown>, path: readonly string[], value: unknown): void {
let current = root;
for (const segment of path.slice(0, -1)) {
const existing = current[segment];
if (!isRecord(existing)) {
current[segment] = {};
}
current = current[segment] as Record<string, unknown>;
}
const leaf = path.at(-1);
if (!leaf) {
return;
}
current[leaf] = mergeValue(current[leaf], value);
}
function hasPatchConflict(
config: MigrationProviderContext["config"],
path: readonly string[],
value: unknown,
): boolean {
if (!isRecord(value)) {
return readPath(config as Record<string, unknown>, path) !== undefined;
}
const existing = readPath(config as Record<string, unknown>, path);
if (!isRecord(existing)) {
return false;
}
return Object.keys(value).some((key) => existing[key] !== undefined);
}
function createConfigPatchItem(params: {
id: string;
target: string;
path: string[];
value: unknown;
message: string;
conflict?: boolean;
}): MigrationItem {
return createMigrationItem({
id: params.id,
kind: "config",
action: "merge",
target: params.target,
status: params.conflict ? "conflict" : "planned",
reason: params.conflict ? MIGRATION_REASON_TARGET_EXISTS : undefined,
message: params.message,
details: { path: params.path, value: params.value },
});
}
function createManualItem(params: {
id: string;
source: string;
message: string;
recommendation: string;
}): MigrationItem {
return createMigrationItem({
id: params.id,
kind: "manual",
action: "manual",
source: params.source,
status: "skipped",
message: params.message,
reason: params.recommendation,
});
}
export function buildConfigItems(params: {
ctx: MigrationProviderContext;
config: Record<string, unknown>;
@@ -277,7 +172,7 @@ export function buildConfigItems(params: {
if (params.hasMemoryFiles || memoryProvider) {
items.push(
createConfigPatchItem({
createMigrationConfigPatchItem({
id: "config:memory",
target: "memory",
path: ["memory"],
@@ -285,11 +180,11 @@ export function buildConfigItems(params: {
message: "Use OpenClaw built-in file memory for imported Hermes memory files.",
conflict:
!params.ctx.overwrite &&
hasPatchConflict(params.ctx.config, ["memory"], { backend: true }),
hasMigrationConfigPatchConflict(params.ctx.config, ["memory"], { backend: true }),
}),
);
items.push(
createConfigPatchItem({
createMigrationConfigPatchItem({
id: "config:memory-plugin-slot",
target: "plugins.slots",
path: ["plugins", "slots"],
@@ -297,7 +192,9 @@ export function buildConfigItems(params: {
message: "Select the default OpenClaw memory plugin for imported file memory.",
conflict:
!params.ctx.overwrite &&
hasPatchConflict(params.ctx.config, ["plugins", "slots"], { memory: true }),
hasMigrationConfigPatchConflict(params.ctx.config, ["plugins", "slots"], {
memory: true,
}),
}),
);
}
@@ -310,7 +207,7 @@ export function buildConfigItems(params: {
},
};
items.push(
createConfigPatchItem({
createMigrationConfigPatchItem({
id: "config:memory-plugin:honcho",
target: "plugins.entries.honcho",
path: ["plugins", "entries"],
@@ -318,11 +215,11 @@ export function buildConfigItems(params: {
message: "Preserve Hermes Honcho memory settings as a plugin entry for manual activation.",
conflict:
!params.ctx.overwrite &&
hasPatchConflict(params.ctx.config, ["plugins", "entries"], value),
hasMigrationConfigPatchConflict(params.ctx.config, ["plugins", "entries"], value),
}),
);
items.push(
createManualItem({
createMigrationManualItem({
id: "manual:memory-provider:honcho",
source: "config.yaml:memory.provider",
message:
@@ -333,7 +230,7 @@ export function buildConfigItems(params: {
);
} else if (memoryProvider && !["builtin", "file", "files"].includes(memoryProvider)) {
items.push(
createManualItem({
createMigrationManualItem({
id: `manual:memory-provider:${memoryProvider}`,
source: "config.yaml:memory.provider",
message: `Hermes memory provider "${memoryProvider}" does not have a known OpenClaw mapping.`,
@@ -346,7 +243,7 @@ export function buildConfigItems(params: {
if (providers.length > 0) {
const value = Object.fromEntries(providers.map((entry) => [entry.id, providerConfig(entry)]));
items.push(
createConfigPatchItem({
createMigrationConfigPatchItem({
id: "config:model-providers",
target: "models.providers",
path: ["models", "providers"],
@@ -354,7 +251,7 @@ export function buildConfigItems(params: {
message: "Import Hermes provider and custom endpoint config.",
conflict:
!params.ctx.overwrite &&
hasPatchConflict(params.ctx.config, ["models", "providers"], value),
hasMigrationConfigPatchConflict(params.ctx.config, ["models", "providers"], value),
}),
);
}
@@ -366,7 +263,7 @@ export function buildConfigItems(params: {
const mcpServers = mapMcpServers(rawMcpServers);
if (mcpServers) {
items.push(
createConfigPatchItem({
createMigrationConfigPatchItem({
id: "config:mcp-servers",
target: "mcp.servers",
path: ["mcp", "servers"],
@@ -374,7 +271,7 @@ export function buildConfigItems(params: {
message: "Import Hermes MCP server definitions.",
conflict:
!params.ctx.overwrite &&
hasPatchConflict(params.ctx.config, ["mcp", "servers"], mcpServers),
hasMigrationConfigPatchConflict(params.ctx.config, ["mcp", "servers"], mcpServers),
}),
);
}
@@ -382,7 +279,7 @@ export function buildConfigItems(params: {
const skillEntries = mapSkillEntries(params.config);
if (skillEntries) {
items.push(
createConfigPatchItem({
createMigrationConfigPatchItem({
id: "config:skill-entries",
target: "skills.entries",
path: ["skills", "entries"],
@@ -390,7 +287,7 @@ export function buildConfigItems(params: {
message: "Import Hermes skill config values.",
conflict:
!params.ctx.overwrite &&
hasPatchConflict(params.ctx.config, ["skills", "entries"], skillEntries),
hasMigrationConfigPatchConflict(params.ctx.config, ["skills", "entries"], skillEntries),
}),
);
}
@@ -398,56 +295,13 @@ export function buildConfigItems(params: {
return items;
}
function readConfigPatchDetails(item: MigrationItem): ConfigPatchDetails | undefined {
const path = item.details?.path;
if (
!Array.isArray(path) ||
!path.every((segment): segment is string => typeof segment === "string")
) {
return undefined;
}
return { path, value: item.details?.value };
}
export async function applyConfigItem(
ctx: MigrationProviderContext,
item: MigrationItem,
): Promise<MigrationItem> {
if (item.status !== "planned") {
return item;
}
const details = readConfigPatchDetails(item);
if (!details) {
return markMigrationItemError(item, MISSING_CONFIG_PATCH);
}
const configApi = ctx.runtime?.config;
if (!configApi?.current || !configApi.mutateConfigFile) {
return markMigrationItemError(item, CONFIG_RUNTIME_UNAVAILABLE);
}
try {
const currentConfig = configApi.current() as MigrationProviderContext["config"];
if (!ctx.overwrite && hasPatchConflict(currentConfig, details.path, details.value)) {
return markMigrationItemConflict(item, MIGRATION_REASON_TARGET_EXISTS);
}
await configApi.mutateConfigFile({
base: "runtime",
afterWrite: { mode: "auto" },
mutate(draft) {
if (!ctx.overwrite && hasPatchConflict(draft, details.path, details.value)) {
throw new ConfigPatchConflictError(MIGRATION_REASON_TARGET_EXISTS);
}
writePath(draft as Record<string, unknown>, details.path, details.value);
},
});
return { ...item, status: "migrated" };
} catch (err) {
if (err instanceof ConfigPatchConflictError) {
return markMigrationItemConflict(item, err.reason);
}
return markMigrationItemError(item, err instanceof Error ? err.message : String(err));
}
return applyMigrationConfigPatchItem(ctx, item);
}
export function applyManualItem(item: MigrationItem): MigrationItem {
return markMigrationItemSkipped(item, item.reason ?? "manual follow-up required");
return applyMigrationManualItem(item);
}

View File

@@ -1,173 +1 @@
export type QaBusConversationKind = "direct" | "channel";
export type QaBusConversation = {
id: string;
kind: QaBusConversationKind;
title?: string;
};
export type QaBusAttachment = {
id: string;
kind: "image" | "video" | "audio" | "file";
mimeType: string;
fileName?: string;
inline?: boolean;
url?: string;
contentBase64?: string;
width?: number;
height?: number;
durationMs?: number;
altText?: string;
transcript?: string;
};
export type QaBusMessage = {
id: string;
accountId: string;
direction: "inbound" | "outbound";
conversation: QaBusConversation;
senderId: string;
senderName?: string;
text: string;
timestamp: number;
threadId?: string;
threadTitle?: string;
replyToId?: string;
deleted?: boolean;
editedAt?: number;
attachments?: QaBusAttachment[];
reactions: Array<{
emoji: string;
senderId: string;
timestamp: number;
}>;
};
export type QaBusThread = {
id: string;
accountId: string;
conversationId: string;
title: string;
createdAt: number;
createdBy: string;
};
export type QaBusEvent =
| { cursor: number; kind: "inbound-message"; accountId: string; message: QaBusMessage }
| { cursor: number; kind: "outbound-message"; accountId: string; message: QaBusMessage }
| { cursor: number; kind: "thread-created"; accountId: string; thread: QaBusThread }
| { cursor: number; kind: "message-edited"; accountId: string; message: QaBusMessage }
| { cursor: number; kind: "message-deleted"; accountId: string; message: QaBusMessage }
| {
cursor: number;
kind: "reaction-added";
accountId: string;
message: QaBusMessage;
emoji: string;
senderId: string;
};
export type QaBusInboundMessageInput = {
accountId?: string;
conversation: QaBusConversation;
senderId: string;
senderName?: string;
text: string;
timestamp?: number;
threadId?: string;
threadTitle?: string;
replyToId?: string;
attachments?: QaBusAttachment[];
};
export type QaBusOutboundMessageInput = {
accountId?: string;
to: string;
senderId?: string;
senderName?: string;
text: string;
timestamp?: number;
threadId?: string;
replyToId?: string;
attachments?: QaBusAttachment[];
};
export type QaBusCreateThreadInput = {
accountId?: string;
conversationId: string;
title: string;
createdBy?: string;
timestamp?: number;
};
export type QaBusReactToMessageInput = {
accountId?: string;
messageId: string;
emoji: string;
senderId?: string;
timestamp?: number;
};
export type QaBusEditMessageInput = {
accountId?: string;
messageId: string;
text: string;
timestamp?: number;
};
export type QaBusDeleteMessageInput = {
accountId?: string;
messageId: string;
timestamp?: number;
};
export type QaBusSearchMessagesInput = {
accountId?: string;
query?: string;
conversationId?: string;
threadId?: string;
limit?: number;
};
export type QaBusReadMessageInput = {
accountId?: string;
messageId: string;
};
export type QaBusPollInput = {
accountId?: string;
cursor?: number;
timeoutMs?: number;
limit?: number;
};
export type QaBusPollResult = {
cursor: number;
events: QaBusEvent[];
};
export type QaBusStateSnapshot = {
cursor: number;
conversations: QaBusConversation[];
threads: QaBusThread[];
messages: QaBusMessage[];
events: QaBusEvent[];
};
export type QaBusWaitForInput =
| {
timeoutMs?: number;
kind: "event-kind";
eventKind: QaBusEvent["kind"];
}
| {
timeoutMs?: number;
kind: "message-text";
textIncludes: string;
direction?: QaBusMessage["direction"];
}
| {
timeoutMs?: number;
kind: "thread-id";
threadId: string;
};
export type * from "openclaw/plugin-sdk/qa-channel-protocol";

View File

@@ -1,173 +1 @@
export type QaBusConversationKind = "direct" | "channel";
export type QaBusConversation = {
id: string;
kind: QaBusConversationKind;
title?: string;
};
export type QaBusAttachment = {
id: string;
kind: "image" | "video" | "audio" | "file";
mimeType: string;
fileName?: string;
inline?: boolean;
url?: string;
contentBase64?: string;
width?: number;
height?: number;
durationMs?: number;
altText?: string;
transcript?: string;
};
export type QaBusMessage = {
id: string;
accountId: string;
direction: "inbound" | "outbound";
conversation: QaBusConversation;
senderId: string;
senderName?: string;
text: string;
timestamp: number;
threadId?: string;
threadTitle?: string;
replyToId?: string;
deleted?: boolean;
editedAt?: number;
attachments?: QaBusAttachment[];
reactions: Array<{
emoji: string;
senderId: string;
timestamp: number;
}>;
};
export type QaBusThread = {
id: string;
accountId: string;
conversationId: string;
title: string;
createdAt: number;
createdBy: string;
};
export type QaBusEvent =
| { cursor: number; kind: "inbound-message"; accountId: string; message: QaBusMessage }
| { cursor: number; kind: "outbound-message"; accountId: string; message: QaBusMessage }
| { cursor: number; kind: "thread-created"; accountId: string; thread: QaBusThread }
| { cursor: number; kind: "message-edited"; accountId: string; message: QaBusMessage }
| { cursor: number; kind: "message-deleted"; accountId: string; message: QaBusMessage }
| {
cursor: number;
kind: "reaction-added";
accountId: string;
message: QaBusMessage;
emoji: string;
senderId: string;
};
export type QaBusInboundMessageInput = {
accountId?: string;
conversation: QaBusConversation;
senderId: string;
senderName?: string;
text: string;
timestamp?: number;
threadId?: string;
threadTitle?: string;
replyToId?: string;
attachments?: QaBusAttachment[];
};
export type QaBusOutboundMessageInput = {
accountId?: string;
to: string;
senderId?: string;
senderName?: string;
text: string;
timestamp?: number;
threadId?: string;
replyToId?: string;
attachments?: QaBusAttachment[];
};
export type QaBusCreateThreadInput = {
accountId?: string;
conversationId: string;
title: string;
createdBy?: string;
timestamp?: number;
};
export type QaBusReactToMessageInput = {
accountId?: string;
messageId: string;
emoji: string;
senderId?: string;
timestamp?: number;
};
export type QaBusEditMessageInput = {
accountId?: string;
messageId: string;
text: string;
timestamp?: number;
};
export type QaBusDeleteMessageInput = {
accountId?: string;
messageId: string;
timestamp?: number;
};
export type QaBusSearchMessagesInput = {
accountId?: string;
query?: string;
conversationId?: string;
threadId?: string;
limit?: number;
};
export type QaBusReadMessageInput = {
accountId?: string;
messageId: string;
};
export type QaBusPollInput = {
accountId?: string;
cursor?: number;
timeoutMs?: number;
limit?: number;
};
export type QaBusPollResult = {
cursor: number;
events: QaBusEvent[];
};
export type QaBusStateSnapshot = {
cursor: number;
conversations: QaBusConversation[];
threads: QaBusThread[];
messages: QaBusMessage[];
events: QaBusEvent[];
};
export type QaBusWaitForInput =
| {
timeoutMs?: number;
kind: "event-kind";
eventKind: QaBusEvent["kind"];
}
| {
timeoutMs?: number;
kind: "message-text";
textIncludes: string;
direction?: QaBusMessage["direction"];
}
| {
timeoutMs?: number;
kind: "thread-id";
threadId: string;
};
export type * from "openclaw/plugin-sdk/qa-channel-protocol";

View File

@@ -1,9 +1,13 @@
import type { Context } from "@mariozechner/pi-ai";
import { buildCopilotIdeHeaders } from "../plugin-sdk/provider-auth.js";
export const COPILOT_EDITOR_VERSION = "vscode/1.96.2";
export const COPILOT_USER_AGENT = "GitHubCopilotChat/0.26.7";
export const COPILOT_EDITOR_PLUGIN_VERSION = "copilot-chat/0.35.0";
export const COPILOT_GITHUB_API_VERSION = "2025-04-01";
export {
buildCopilotIdeHeaders,
COPILOT_EDITOR_PLUGIN_VERSION,
COPILOT_EDITOR_VERSION,
COPILOT_GITHUB_API_VERSION,
COPILOT_USER_AGENT,
} from "../plugin-sdk/provider-auth.js";
function inferCopilotInitiator(messages: Context["messages"]): "agent" | "user" {
const last = messages[messages.length - 1];
@@ -39,19 +43,6 @@ export function hasCopilotVisionInput(messages: Context["messages"]): boolean {
});
}
export function buildCopilotIdeHeaders(
params: {
includeApiVersion?: boolean;
} = {},
): Record<string, string> {
return {
"Editor-Version": COPILOT_EDITOR_VERSION,
"Editor-Plugin-Version": COPILOT_EDITOR_PLUGIN_VERSION,
"User-Agent": COPILOT_USER_AGENT,
...(params.includeApiVersion ? { "X-Github-Api-Version": COPILOT_GITHUB_API_VERSION } : {}),
};
}
export function buildCopilotDynamicHeaders(params: {
messages: Context["messages"];
hasImages: boolean;

View File

@@ -1,162 +1,6 @@
import path from "node:path";
import { resolveStateDir } from "../config/paths.js";
import { loadJsonFile, saveJsonFile } from "../infra/json-file.js";
import { normalizeLowercaseStringOrEmpty } from "../shared/string-coerce.js";
import { buildCopilotIdeHeaders } from "./copilot-dynamic-headers.js";
import { resolveProviderEndpoint } from "./provider-attribution.js";
const COPILOT_TOKEN_URL = "https://api.github.com/copilot_internal/v2/token";
export type CachedCopilotToken = {
token: string;
/** milliseconds since epoch */
expiresAt: number;
/** milliseconds since epoch */
updatedAt: number;
};
function resolveCopilotTokenCachePath(env: NodeJS.ProcessEnv = process.env) {
return path.join(resolveStateDir(env), "credentials", "github-copilot.token.json");
}
function isTokenUsable(cache: CachedCopilotToken, now = Date.now()): boolean {
// Keep a small safety margin when checking expiry.
return cache.expiresAt - now > 5 * 60 * 1000;
}
function parseCopilotTokenResponse(value: unknown): {
token: string;
expiresAt: number;
} {
if (!value || typeof value !== "object") {
throw new Error("Unexpected response from GitHub Copilot token endpoint");
}
const asRecord = value as Record<string, unknown>;
const token = asRecord.token;
const expiresAt = asRecord.expires_at;
if (typeof token !== "string" || token.trim().length === 0) {
throw new Error("Copilot token response missing token");
}
// GitHub returns a unix timestamp (seconds), but we defensively accept ms too.
// Use a 1e11 threshold so large seconds-epoch values are not misread as ms.
let expiresAtMs: number;
if (typeof expiresAt === "number" && Number.isFinite(expiresAt)) {
expiresAtMs = expiresAt < 100_000_000_000 ? expiresAt * 1000 : expiresAt;
} else if (typeof expiresAt === "string" && expiresAt.trim().length > 0) {
const parsed = Number.parseInt(expiresAt, 10);
if (!Number.isFinite(parsed)) {
throw new Error("Copilot token response has invalid expires_at");
}
expiresAtMs = parsed < 100_000_000_000 ? parsed * 1000 : parsed;
} else {
throw new Error("Copilot token response missing expires_at");
}
return { token, expiresAt: expiresAtMs };
}
export const DEFAULT_COPILOT_API_BASE_URL = "https://api.individual.githubcopilot.com";
function resolveCopilotProxyHost(proxyEp: string): string | null {
const trimmed = proxyEp.trim();
if (!trimmed) {
return null;
}
const urlText = /^https?:\/\//i.test(trimmed) ? trimmed : `https://${trimmed}`;
try {
const url = new URL(urlText);
if (url.protocol !== "http:" && url.protocol !== "https:") {
return null;
}
return normalizeLowercaseStringOrEmpty(url.hostname);
} catch {
return null;
}
}
export function deriveCopilotApiBaseUrlFromToken(token: string): string | null {
const trimmed = token.trim();
if (!trimmed) {
return null;
}
// The token returned from the Copilot token endpoint is a semicolon-delimited
// set of key/value pairs. One of them is `proxy-ep=...`.
const match = trimmed.match(/(?:^|;)\s*proxy-ep=([^;\s]+)/i);
const proxyEp = match?.[1]?.trim();
if (!proxyEp) {
return null;
}
// pi-ai expects converting proxy.* -> api.*
// (see upstream getGitHubCopilotBaseUrl).
const proxyHost = resolveCopilotProxyHost(proxyEp);
if (!proxyHost) {
return null;
}
const host = proxyHost.replace(/^proxy\./i, "api.");
const baseUrl = `https://${host}`;
return resolveProviderEndpoint(baseUrl).endpointClass === "invalid" ? null : baseUrl;
}
export async function resolveCopilotApiToken(params: {
githubToken: string;
env?: NodeJS.ProcessEnv;
fetchImpl?: typeof fetch;
cachePath?: string;
loadJsonFileImpl?: (path: string) => unknown;
saveJsonFileImpl?: (path: string, value: CachedCopilotToken) => void;
}): Promise<{
token: string;
expiresAt: number;
source: string;
baseUrl: string;
}> {
const env = params.env ?? process.env;
const cachePath = params.cachePath?.trim() || resolveCopilotTokenCachePath(env);
const loadJsonFileFn = params.loadJsonFileImpl ?? loadJsonFile;
const saveJsonFileFn = params.saveJsonFileImpl ?? saveJsonFile;
const cached = loadJsonFileFn(cachePath) as CachedCopilotToken | undefined;
if (cached && typeof cached.token === "string" && typeof cached.expiresAt === "number") {
if (isTokenUsable(cached)) {
return {
token: cached.token,
expiresAt: cached.expiresAt,
source: `cache:${cachePath}`,
baseUrl: deriveCopilotApiBaseUrlFromToken(cached.token) ?? DEFAULT_COPILOT_API_BASE_URL,
};
}
}
const fetchImpl = params.fetchImpl ?? fetch;
const res = await fetchImpl(COPILOT_TOKEN_URL, {
method: "GET",
headers: {
Accept: "application/json",
Authorization: `Bearer ${params.githubToken}`,
...buildCopilotIdeHeaders({ includeApiVersion: true }),
},
});
if (!res.ok) {
throw new Error(`Copilot token exchange failed: HTTP ${res.status}`);
}
const json = parseCopilotTokenResponse(await res.json());
const payload: CachedCopilotToken = {
token: json.token,
expiresAt: json.expiresAt,
updatedAt: Date.now(),
};
saveJsonFileFn(cachePath, payload);
return {
token: payload.token,
expiresAt: payload.expiresAt,
source: `fetched:${COPILOT_TOKEN_URL}`,
baseUrl: deriveCopilotApiBaseUrlFromToken(payload.token) ?? DEFAULT_COPILOT_API_BASE_URL,
};
}
export {
DEFAULT_COPILOT_API_BASE_URL,
deriveCopilotApiBaseUrlFromToken,
resolveCopilotApiToken,
type CachedCopilotToken,
} from "../plugin-sdk/provider-auth.js";

View File

@@ -1,211 +1,6 @@
export type PlainTextToolCallBlock = {
arguments: Record<string, unknown>;
end: number;
name: string;
raw: string;
start: number;
};
type ParseOptions = {
allowedToolNames?: Iterable<string>;
maxPayloadBytes?: number;
};
const DEFAULT_MAX_PAYLOAD_BYTES = 256_000;
const END_TOOL_REQUEST = "[END_TOOL_REQUEST]";
function isToolNameChar(char: string | undefined): boolean {
return Boolean(char && /[A-Za-z0-9_-]/.test(char));
}
function skipHorizontalWhitespace(text: string, start: number): number {
let index = start;
while (index < text.length && (text[index] === " " || text[index] === "\t")) {
index += 1;
}
return index;
}
function skipWhitespace(text: string, start: number): number {
let index = start;
while (index < text.length && /\s/.test(text[index] ?? "")) {
index += 1;
}
return index;
}
function consumeLineBreak(text: string, start: number): number | null {
if (text[start] === "\r") {
return text[start + 1] === "\n" ? start + 2 : start + 1;
}
if (text[start] === "\n") {
return start + 1;
}
return null;
}
function parseOpening(text: string, start: number): { end: number; name: string } | null {
if (text[start] !== "[") {
return null;
}
let cursor = start + 1;
const nameStart = cursor;
while (isToolNameChar(text[cursor])) {
cursor += 1;
}
if (cursor === nameStart || text[cursor] !== "]") {
return null;
}
const name = text.slice(nameStart, cursor);
cursor += 1;
cursor = skipHorizontalWhitespace(text, cursor);
const afterLineBreak = consumeLineBreak(text, cursor);
if (afterLineBreak === null) {
return null;
}
return { end: afterLineBreak, name };
}
function consumeJsonObject(
text: string,
start: number,
maxPayloadBytes: number,
): { end: number; value: Record<string, unknown> } | null {
let cursor = skipWhitespace(text, start);
if (text[cursor] !== "{") {
return null;
}
let depth = 0;
let inString = false;
let escaped = false;
for (let index = cursor; index < text.length; index += 1) {
const char = text[index];
if (index + 1 - cursor > maxPayloadBytes) {
return null;
}
if (inString) {
if (escaped) {
escaped = false;
} else if (char === "\\") {
escaped = true;
} else if (char === '"') {
inString = false;
}
continue;
}
if (char === '"') {
inString = true;
continue;
}
if (char === "{") {
depth += 1;
} else if (char === "}") {
depth -= 1;
if (depth === 0) {
const rawJson = text.slice(cursor, index + 1);
try {
const parsed = JSON.parse(rawJson) as unknown;
if (!parsed || typeof parsed !== "object" || Array.isArray(parsed)) {
return null;
}
return { end: index + 1, value: parsed as Record<string, unknown> };
} catch {
return null;
}
}
}
}
return null;
}
function parseClosing(text: string, start: number, name: string): number | null {
let cursor = skipWhitespace(text, start);
if (text.startsWith(END_TOOL_REQUEST, cursor)) {
return cursor + END_TOOL_REQUEST.length;
}
const namedClosing = `[/${name}]`;
if (text.startsWith(namedClosing, cursor)) {
return cursor + namedClosing.length;
}
return null;
}
function parseBlockAt(
text: string,
start: number,
options?: ParseOptions,
): PlainTextToolCallBlock | null {
const opening = parseOpening(text, start);
if (!opening) {
return null;
}
const allowedToolNames = options?.allowedToolNames
? new Set(options.allowedToolNames)
: undefined;
if (allowedToolNames && !allowedToolNames.has(opening.name)) {
return null;
}
const payload = consumeJsonObject(
text,
opening.end,
options?.maxPayloadBytes ?? DEFAULT_MAX_PAYLOAD_BYTES,
);
if (!payload) {
return null;
}
const end = parseClosing(text, payload.end, opening.name);
if (end === null) {
return null;
}
return {
arguments: payload.value,
end,
name: opening.name,
raw: text.slice(start, end),
start,
};
}
export function parseStandalonePlainTextToolCallBlocks(
text: string,
options?: ParseOptions,
): PlainTextToolCallBlock[] | null {
const blocks: PlainTextToolCallBlock[] = [];
let cursor = skipWhitespace(text, 0);
while (cursor < text.length) {
const block = parseBlockAt(text, cursor, options);
if (!block) {
return null;
}
blocks.push(block);
cursor = skipWhitespace(text, block.end);
}
return blocks.length > 0 ? blocks : null;
}
export function stripPlainTextToolCallBlocks(text: string): string {
if (!text || !/\[[A-Za-z0-9_-]+\]/.test(text)) {
return text;
}
let result = "";
let cursor = 0;
let index = 0;
while (index < text.length) {
const lineStart = index === 0 || text[index - 1] === "\n";
if (!lineStart) {
index += 1;
continue;
}
const blockStart = skipHorizontalWhitespace(text, index);
const block = parseBlockAt(text, blockStart);
if (!block) {
index += 1;
continue;
}
result += text.slice(cursor, index);
cursor = block.end;
index = block.end;
}
result += text.slice(cursor);
return result;
}
export {
parseStandalonePlainTextToolCallBlocks,
stripPlainTextToolCallBlocks,
type PlainTextToolCallBlock,
type PlainTextToolCallParseOptions,
} from "../../plugin-sdk/tool-payload.js";