mirror of
https://github.com/openclaw/openclaw.git
synced 2026-03-12 07:20:45 +00:00
* docs: add ACP thread-bound agents plan doc * docs: expand ACP implementation specification * feat(acp): route ACP sessions through core dispatch and lifecycle cleanup * feat(acp): add /acp commands and Discord spawn gate * ACP: add acpx runtime plugin backend * fix(subagents): defer transient lifecycle errors before announce * Agents: harden ACP sessions_spawn and tighten spawn guidance * Agents: require explicit ACP target for runtime spawns * docs: expand ACP control-plane implementation plan * ACP: harden metadata seeding and spawn guidance * ACP: centralize runtime control-plane manager and fail-closed dispatch * ACP: harden runtime manager and unify spawn helpers * Commands: route ACP sessions through ACP runtime in agent command * ACP: require persisted metadata for runtime spawns * Sessions: preserve ACP metadata when updating entries * Plugins: harden ACP backend registry across loaders * ACPX: make availability probe compatible with adapters * E2E: add manual Discord ACP plain-language smoke script * ACPX: preserve streamed spacing across Discord delivery * Docs: add ACP Discord streaming strategy * ACP: harden Discord stream buffering for thread replies * ACP: reuse shared block reply pipeline for projector * ACP: unify streaming config and adopt coalesceIdleMs * Docs: add temporary ACP production hardening plan * Docs: trim temporary ACP hardening plan goals * Docs: gate ACP thread controls by backend capabilities * ACP: add capability-gated runtime controls and /acp operator commands * Docs: remove temporary ACP hardening plan * ACP: fix spawn target validation and close cache cleanup * ACP: harden runtime dispatch and recovery paths * ACP: split ACP command/runtime internals and centralize policy * ACP: harden runtime lifecycle, validation, and observability * ACP: surface runtime and backend session IDs in thread bindings * docs: add temp plan for binding-service migration * ACP: migrate thread binding flows to SessionBindingService * ACP: address review feedback and preserve prompt wording * ACPX plugin: pin runtime dependency and prefer bundled CLI * Discord: complete binding-service migration cleanup and restore ACP plan * Docs: add standalone ACP agents guide * ACP: route harness intents to thread-bound ACP sessions * ACP: fix spawn thread routing and queue-owner stall * ACP: harden startup reconciliation and command bypass handling * ACP: fix dispatch bypass type narrowing * ACP: align runtime metadata to agentSessionId * ACP: normalize session identifier handling and labels * ACP: mark thread banner session ids provisional until first reply * ACP: stabilize session identity mapping and startup reconciliation * ACP: add resolved session-id notices and cwd in thread intros * Discord: prefix thread meta notices consistently * Discord: unify ACP/thread meta notices with gear prefix * Discord: split thread persona naming from meta formatting * Extensions: bump acpx plugin dependency to 0.1.9 * Agents: gate ACP prompt guidance behind acp.enabled * Docs: remove temp experiment plan docs * Docs: scope streaming plan to holy grail refactor * Docs: refactor ACP agents guide for human-first flow * Docs/Skill: add ACP feature-flag guidance and direct acpx telephone-game flow * Docs/Skill: add OpenCode and Pi to ACP harness lists * Docs/Skill: align ACP harness list with current acpx registry * Dev/Test: move ACP plain-language smoke script and mark as keep * Docs/Skill: reorder ACP harness lists with Pi first * ACP: split control-plane manager into core/types/utils modules * Docs: refresh ACP thread-bound agents plan * ACP: extract dispatch lane and split manager domains * ACP: centralize binding context and remove reverse deps * Infra: unify system message formatting * ACP: centralize error boundaries and session id rendering * ACP: enforce init concurrency cap and strict meta clear * Tests: fix ACP dispatch binding mock typing * Tests: fix Discord thread-binding mock drift and ACP request id * ACP: gate slash bypass and persist cleared overrides * ACPX: await pre-abort cancel before runTurn return * Extension: pin acpx runtime dependency to 0.1.11 * Docs: add pinned acpx install strategy for ACP extension * Extensions/acpx: enforce strict local pinned startup * Extensions/acpx: tighten acp-router install guidance * ACPX: retry runtime test temp-dir cleanup * Extensions/acpx: require proactive ACPX repair for thread spawns * Extensions/acpx: require restart offer after acpx reinstall * extensions/acpx: remove workspace protocol devDependency * extensions/acpx: bump pinned acpx to 0.1.13 * extensions/acpx: sync lockfile after dependency bump * ACPX: make runtime spawn Windows-safe * fix: align doctor-config-flow repair tests with default-account migration (#23580) (thanks @osolmaz)
406 lines
12 KiB
JavaScript
406 lines
12 KiB
JavaScript
#!/usr/bin/env node
|
|
|
|
import { promises as fs } from "node:fs";
|
|
import path from "node:path";
|
|
import { fileURLToPath } from "node:url";
|
|
import ts from "typescript";
|
|
|
|
const repoRoot = path.resolve(path.dirname(fileURLToPath(import.meta.url)), "..");
|
|
|
|
const acpCoreProtectedSources = [
|
|
path.join(repoRoot, "src", "acp"),
|
|
path.join(repoRoot, "src", "agents", "acp-spawn.ts"),
|
|
path.join(repoRoot, "src", "auto-reply", "reply", "commands-acp"),
|
|
path.join(repoRoot, "src", "infra", "outbound", "conversation-id.ts"),
|
|
];
|
|
|
|
const channelCoreProtectedSources = [
|
|
path.join(repoRoot, "src", "channels", "thread-bindings-policy.ts"),
|
|
path.join(repoRoot, "src", "channels", "thread-bindings-messages.ts"),
|
|
];
|
|
const acpUserFacingTextSources = [
|
|
path.join(repoRoot, "src", "auto-reply", "reply", "commands-acp"),
|
|
];
|
|
const systemMarkLiteralGuardSources = [
|
|
path.join(repoRoot, "src", "auto-reply", "reply", "commands-acp"),
|
|
path.join(repoRoot, "src", "auto-reply", "reply", "dispatch-acp.ts"),
|
|
path.join(repoRoot, "src", "auto-reply", "reply", "directive-handling.shared.ts"),
|
|
path.join(repoRoot, "src", "channels", "thread-bindings-messages.ts"),
|
|
];
|
|
|
|
const channelIds = [
|
|
"bluebubbles",
|
|
"discord",
|
|
"googlechat",
|
|
"imessage",
|
|
"irc",
|
|
"line",
|
|
"matrix",
|
|
"msteams",
|
|
"signal",
|
|
"slack",
|
|
"telegram",
|
|
"web",
|
|
"whatsapp",
|
|
"zalo",
|
|
"zalouser",
|
|
];
|
|
|
|
const channelIdSet = new Set(channelIds);
|
|
const channelSegmentRe = new RegExp(`(^|[._/-])(?:${channelIds.join("|")})([._/-]|$)`);
|
|
const comparisonOperators = new Set([
|
|
ts.SyntaxKind.EqualsEqualsEqualsToken,
|
|
ts.SyntaxKind.ExclamationEqualsEqualsToken,
|
|
ts.SyntaxKind.EqualsEqualsToken,
|
|
ts.SyntaxKind.ExclamationEqualsToken,
|
|
]);
|
|
|
|
const allowedViolations = new Set([]);
|
|
|
|
function isTestLikeFile(filePath) {
|
|
return (
|
|
filePath.endsWith(".test.ts") ||
|
|
filePath.endsWith(".test-utils.ts") ||
|
|
filePath.endsWith(".test-harness.ts") ||
|
|
filePath.endsWith(".e2e-harness.ts")
|
|
);
|
|
}
|
|
|
|
async function collectTypeScriptFiles(targetPath) {
|
|
const stat = await fs.stat(targetPath);
|
|
if (stat.isFile()) {
|
|
if (!targetPath.endsWith(".ts") || isTestLikeFile(targetPath)) {
|
|
return [];
|
|
}
|
|
return [targetPath];
|
|
}
|
|
|
|
const entries = await fs.readdir(targetPath, { withFileTypes: true });
|
|
const files = [];
|
|
for (const entry of entries) {
|
|
const entryPath = path.join(targetPath, entry.name);
|
|
if (entry.isDirectory()) {
|
|
files.push(...(await collectTypeScriptFiles(entryPath)));
|
|
continue;
|
|
}
|
|
if (!entry.isFile()) {
|
|
continue;
|
|
}
|
|
if (!entryPath.endsWith(".ts")) {
|
|
continue;
|
|
}
|
|
if (isTestLikeFile(entryPath)) {
|
|
continue;
|
|
}
|
|
files.push(entryPath);
|
|
}
|
|
return files;
|
|
}
|
|
|
|
function toLine(sourceFile, node) {
|
|
return sourceFile.getLineAndCharacterOfPosition(node.getStart(sourceFile)).line + 1;
|
|
}
|
|
|
|
function isChannelsPropertyAccess(node) {
|
|
if (ts.isPropertyAccessExpression(node)) {
|
|
return node.name.text === "channels";
|
|
}
|
|
if (ts.isElementAccessExpression(node) && ts.isStringLiteral(node.argumentExpression)) {
|
|
return node.argumentExpression.text === "channels";
|
|
}
|
|
return false;
|
|
}
|
|
|
|
function readStringLiteral(node) {
|
|
if (ts.isStringLiteral(node)) {
|
|
return node.text;
|
|
}
|
|
if (ts.isNoSubstitutionTemplateLiteral(node)) {
|
|
return node.text;
|
|
}
|
|
return null;
|
|
}
|
|
|
|
function isChannelLiteralNode(node) {
|
|
const text = readStringLiteral(node);
|
|
return text ? channelIdSet.has(text) : false;
|
|
}
|
|
|
|
function matchesChannelModuleSpecifier(specifier) {
|
|
return channelSegmentRe.test(specifier.replaceAll("\\", "/"));
|
|
}
|
|
|
|
function getPropertyNameText(name) {
|
|
if (ts.isIdentifier(name) || ts.isStringLiteral(name) || ts.isNumericLiteral(name)) {
|
|
return name.text;
|
|
}
|
|
return null;
|
|
}
|
|
|
|
const userFacingChannelNameRe =
|
|
/\b(?:discord|telegram|slack|signal|imessage|whatsapp|google\s*chat|irc|line|zalo|matrix|msteams|bluebubbles)\b/i;
|
|
const systemMarkLiteral = "⚙️";
|
|
|
|
function isModuleSpecifierStringNode(node) {
|
|
const parent = node.parent;
|
|
if (ts.isImportDeclaration(parent) || ts.isExportDeclaration(parent)) {
|
|
return true;
|
|
}
|
|
return (
|
|
ts.isCallExpression(parent) &&
|
|
parent.expression.kind === ts.SyntaxKind.ImportKeyword &&
|
|
parent.arguments[0] === node
|
|
);
|
|
}
|
|
|
|
export function findChannelAgnosticBoundaryViolations(
|
|
content,
|
|
fileName = "source.ts",
|
|
options = {},
|
|
) {
|
|
const checkModuleSpecifiers = options.checkModuleSpecifiers ?? true;
|
|
const checkConfigPaths = options.checkConfigPaths ?? true;
|
|
const checkChannelComparisons = options.checkChannelComparisons ?? true;
|
|
const checkChannelAssignments = options.checkChannelAssignments ?? true;
|
|
const moduleSpecifierMatcher = options.moduleSpecifierMatcher ?? matchesChannelModuleSpecifier;
|
|
|
|
const sourceFile = ts.createSourceFile(fileName, content, ts.ScriptTarget.Latest, true);
|
|
const violations = [];
|
|
|
|
const visit = (node) => {
|
|
if (
|
|
checkModuleSpecifiers &&
|
|
ts.isImportDeclaration(node) &&
|
|
ts.isStringLiteral(node.moduleSpecifier)
|
|
) {
|
|
const specifier = node.moduleSpecifier.text;
|
|
if (moduleSpecifierMatcher(specifier)) {
|
|
violations.push({
|
|
line: toLine(sourceFile, node.moduleSpecifier),
|
|
reason: `imports channel module "${specifier}"`,
|
|
});
|
|
}
|
|
}
|
|
|
|
if (
|
|
checkModuleSpecifiers &&
|
|
ts.isExportDeclaration(node) &&
|
|
node.moduleSpecifier &&
|
|
ts.isStringLiteral(node.moduleSpecifier)
|
|
) {
|
|
const specifier = node.moduleSpecifier.text;
|
|
if (moduleSpecifierMatcher(specifier)) {
|
|
violations.push({
|
|
line: toLine(sourceFile, node.moduleSpecifier),
|
|
reason: `re-exports channel module "${specifier}"`,
|
|
});
|
|
}
|
|
}
|
|
|
|
if (
|
|
checkModuleSpecifiers &&
|
|
ts.isCallExpression(node) &&
|
|
node.expression.kind === ts.SyntaxKind.ImportKeyword &&
|
|
node.arguments.length > 0 &&
|
|
ts.isStringLiteral(node.arguments[0])
|
|
) {
|
|
const specifier = node.arguments[0].text;
|
|
if (moduleSpecifierMatcher(specifier)) {
|
|
violations.push({
|
|
line: toLine(sourceFile, node.arguments[0]),
|
|
reason: `dynamically imports channel module "${specifier}"`,
|
|
});
|
|
}
|
|
}
|
|
|
|
if (
|
|
checkConfigPaths &&
|
|
ts.isPropertyAccessExpression(node) &&
|
|
channelIdSet.has(node.name.text)
|
|
) {
|
|
if (isChannelsPropertyAccess(node.expression)) {
|
|
violations.push({
|
|
line: toLine(sourceFile, node.name),
|
|
reason: `references config path "channels.${node.name.text}"`,
|
|
});
|
|
}
|
|
}
|
|
|
|
if (
|
|
checkConfigPaths &&
|
|
ts.isElementAccessExpression(node) &&
|
|
ts.isStringLiteral(node.argumentExpression) &&
|
|
channelIdSet.has(node.argumentExpression.text)
|
|
) {
|
|
if (isChannelsPropertyAccess(node.expression)) {
|
|
violations.push({
|
|
line: toLine(sourceFile, node.argumentExpression),
|
|
reason: `references config path "channels[${JSON.stringify(node.argumentExpression.text)}]"`,
|
|
});
|
|
}
|
|
}
|
|
|
|
if (
|
|
checkChannelComparisons &&
|
|
ts.isBinaryExpression(node) &&
|
|
comparisonOperators.has(node.operatorToken.kind)
|
|
) {
|
|
if (isChannelLiteralNode(node.left) || isChannelLiteralNode(node.right)) {
|
|
const leftText = node.left.getText(sourceFile);
|
|
const rightText = node.right.getText(sourceFile);
|
|
violations.push({
|
|
line: toLine(sourceFile, node.operatorToken),
|
|
reason: `compares with channel id literal (${leftText} ${node.operatorToken.getText(sourceFile)} ${rightText})`,
|
|
});
|
|
}
|
|
}
|
|
|
|
if (checkChannelAssignments && ts.isPropertyAssignment(node)) {
|
|
const propName = getPropertyNameText(node.name);
|
|
if (propName === "channel" && isChannelLiteralNode(node.initializer)) {
|
|
violations.push({
|
|
line: toLine(sourceFile, node.initializer),
|
|
reason: `assigns channel id literal to "channel" (${node.initializer.getText(sourceFile)})`,
|
|
});
|
|
}
|
|
}
|
|
|
|
ts.forEachChild(node, visit);
|
|
};
|
|
|
|
visit(sourceFile);
|
|
return violations;
|
|
}
|
|
|
|
export function findChannelCoreReverseDependencyViolations(content, fileName = "source.ts") {
|
|
return findChannelAgnosticBoundaryViolations(content, fileName, {
|
|
checkModuleSpecifiers: true,
|
|
checkConfigPaths: false,
|
|
checkChannelComparisons: false,
|
|
checkChannelAssignments: false,
|
|
moduleSpecifierMatcher: matchesChannelModuleSpecifier,
|
|
});
|
|
}
|
|
|
|
export function findAcpUserFacingChannelNameViolations(content, fileName = "source.ts") {
|
|
const sourceFile = ts.createSourceFile(fileName, content, ts.ScriptTarget.Latest, true);
|
|
const violations = [];
|
|
|
|
const visit = (node) => {
|
|
const text = readStringLiteral(node);
|
|
if (text && userFacingChannelNameRe.test(text) && !isModuleSpecifierStringNode(node)) {
|
|
violations.push({
|
|
line: toLine(sourceFile, node),
|
|
reason: `user-facing text references channel name (${JSON.stringify(text)})`,
|
|
});
|
|
}
|
|
ts.forEachChild(node, visit);
|
|
};
|
|
|
|
visit(sourceFile);
|
|
return violations;
|
|
}
|
|
|
|
export function findSystemMarkLiteralViolations(content, fileName = "source.ts") {
|
|
const sourceFile = ts.createSourceFile(fileName, content, ts.ScriptTarget.Latest, true);
|
|
const violations = [];
|
|
|
|
const visit = (node) => {
|
|
const text = readStringLiteral(node);
|
|
if (text && text.includes(systemMarkLiteral) && !isModuleSpecifierStringNode(node)) {
|
|
violations.push({
|
|
line: toLine(sourceFile, node),
|
|
reason: `hardcoded system mark literal (${JSON.stringify(text)})`,
|
|
});
|
|
}
|
|
ts.forEachChild(node, visit);
|
|
};
|
|
|
|
visit(sourceFile);
|
|
return violations;
|
|
}
|
|
|
|
const boundaryRuleSets = [
|
|
{
|
|
id: "acp-core",
|
|
sources: acpCoreProtectedSources,
|
|
scan: (content, fileName) => findChannelAgnosticBoundaryViolations(content, fileName),
|
|
},
|
|
{
|
|
id: "channel-core-reverse-deps",
|
|
sources: channelCoreProtectedSources,
|
|
scan: (content, fileName) => findChannelCoreReverseDependencyViolations(content, fileName),
|
|
},
|
|
{
|
|
id: "acp-user-facing-text",
|
|
sources: acpUserFacingTextSources,
|
|
scan: (content, fileName) => findAcpUserFacingChannelNameViolations(content, fileName),
|
|
},
|
|
{
|
|
id: "system-mark-literal-usage",
|
|
sources: systemMarkLiteralGuardSources,
|
|
scan: (content, fileName) => findSystemMarkLiteralViolations(content, fileName),
|
|
},
|
|
];
|
|
|
|
export async function main() {
|
|
const violations = [];
|
|
for (const ruleSet of boundaryRuleSets) {
|
|
const files = (
|
|
await Promise.all(
|
|
ruleSet.sources.map(async (sourcePath) => {
|
|
try {
|
|
return await collectTypeScriptFiles(sourcePath);
|
|
} catch (error) {
|
|
if (error && typeof error === "object" && "code" in error && error.code === "ENOENT") {
|
|
return [];
|
|
}
|
|
throw error;
|
|
}
|
|
}),
|
|
)
|
|
).flat();
|
|
for (const filePath of files) {
|
|
const relativeFile = path.relative(repoRoot, filePath);
|
|
if (
|
|
allowedViolations.has(`${ruleSet.id}:${relativeFile}`) ||
|
|
allowedViolations.has(relativeFile)
|
|
) {
|
|
continue;
|
|
}
|
|
const content = await fs.readFile(filePath, "utf8");
|
|
for (const violation of ruleSet.scan(content, relativeFile)) {
|
|
violations.push(`${ruleSet.id} ${relativeFile}:${violation.line}: ${violation.reason}`);
|
|
}
|
|
}
|
|
}
|
|
|
|
if (violations.length === 0) {
|
|
return;
|
|
}
|
|
|
|
console.error("Found channel-specific references in channel-agnostic sources:");
|
|
for (const violation of violations) {
|
|
console.error(`- ${violation}`);
|
|
}
|
|
console.error(
|
|
"Move channel-specific logic to channel adapters or add a justified allowlist entry.",
|
|
);
|
|
process.exit(1);
|
|
}
|
|
|
|
const isDirectExecution = (() => {
|
|
const entry = process.argv[1];
|
|
if (!entry) {
|
|
return false;
|
|
}
|
|
return path.resolve(entry) === fileURLToPath(import.meta.url);
|
|
})();
|
|
|
|
if (isDirectExecution) {
|
|
main().catch((error) => {
|
|
console.error(error);
|
|
process.exit(1);
|
|
});
|
|
}
|