mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-06 06:50:43 +00:00
fix(memory-wiki): route bridge CLI through gateway
Route Memory Wiki bridge-mode status, doctor, and bridge import CLI paths through Gateway RPC when bridge artifact reads are active, while preserving local/offline fallbacks. Harden Gateway CLI rendering and imported-source writes: validate RPC response shapes, bound response strings before rendering/JSON serialization, sanitize/escape terminal-controlled output, avoid redundant JSON forwarding, and replace imported source pages through a temp-file rename path with symlink and hardlink regressions. Fixes #65722 Fixes #65976 Fixes #66082 Fixes #67979 Fixes #68371 Fixes #68828 Fixes #69019 Fixes #70181 Fixes #70242 Fixes #70842 Thanks @moorsecopers99, @vincentkoc, and @prasad-yashdeep.
This commit is contained in:
@@ -448,6 +448,7 @@ Docs: https://docs.openclaw.ai
|
||||
- TTS/SecretRef: resolve `messages.tts.providers.*.apiKey` from the active runtime snapshot so SecretRef-backed MiniMax and other TTS provider keys work in runtime reply/audio paths. Fixes #68690. Thanks @joshavant.
|
||||
- Gateway/install: surface systemd user-bus recovery hints during Linux service activation and retry via the target user scope when `systemctl --user` reports no-medium bus failures, without letting stale `SUDO_USER` override `sudo -u` installs. Fixes #39673; refs #44417 and #63561. Thanks @Arbor4, @myrsu, @mssteuer, and @boyuaner.
|
||||
- CLI/nodes: make unfiltered `openclaw nodes list` prefer the effective paired-node view used by `nodes status` while preserving pending rows, pairing-scope fallback, terminal-safe table rendering, and paired JSON metadata. Fixes #46871; carries forward #65772 through the ProjectClownfish #72619 repair. Thanks @skainguyen1412.
|
||||
- Memory Wiki/CLI: route active bridge-mode status, doctor, and bridge imports through Gateway RPC so CLI checks use the runtime memory plugin context while disabled bridge imports stay local/offline. Carries forward #67208 and #71479; related #70185. Thanks @moorsecopers99, @vincentkoc, and @prasad-yashdeep.
|
||||
- CLI/startup: read generated startup metadata from the bundled `dist` layout before falling back to live help rendering, so root/browser help and channel-option bootstrap stay on the fast path. Thanks @vincentkoc.
|
||||
- Feishu/Lark: stop treating broadcast-only `@all`/`@_all` messages as bot mentions while preserving direct bot mentions, including messages that also include `@all`. Fixes #37706. Thanks @JosepLee.
|
||||
- CLI/help: treat positional `help` invocations like `openclaw channels help` as help paths for startup gating, avoiding model/auth warmup while preserving positional arguments such as `openclaw docs help`. Thanks @gumadeiras.
|
||||
|
||||
@@ -68,10 +68,18 @@ Inspect current vault mode, health, and Obsidian CLI availability.
|
||||
Use this first when you are unsure whether the vault is initialized, bridge mode
|
||||
is healthy, or Obsidian integration is available.
|
||||
|
||||
When bridge mode is active and configured to read memory artifacts, this command
|
||||
queries the running Gateway so it sees the same active memory plugin context as
|
||||
agent/runtime memory.
|
||||
|
||||
### `wiki doctor`
|
||||
|
||||
Run wiki health checks and surface configuration or vault problems.
|
||||
|
||||
When bridge mode is active and configured to read memory artifacts, this command
|
||||
queries the running Gateway before building the report. Disabled bridge imports
|
||||
and bridge configs that do not read memory artifacts remain local/offline.
|
||||
|
||||
Typical issues include:
|
||||
|
||||
- bridge mode enabled without public memory artifacts
|
||||
@@ -168,6 +176,11 @@ source pages.
|
||||
Use this in `bridge` mode when you want the latest exported memory artifacts
|
||||
pulled into the wiki vault.
|
||||
|
||||
For active bridge artifact reads, the CLI routes the import through Gateway RPC
|
||||
so the import uses the runtime memory plugin context. If bridge imports are
|
||||
disabled or artifact reads are turned off, the command keeps the local/offline
|
||||
zero-import behavior.
|
||||
|
||||
### `wiki unsafe-local import`
|
||||
|
||||
Import from explicitly configured local paths in `unsafe-local` mode.
|
||||
|
||||
@@ -65,6 +65,12 @@ If bridge mode reports zero exported artifacts, the active memory plugin is not
|
||||
currently exposing public bridge inputs yet. Run `openclaw wiki doctor` first,
|
||||
then confirm the active memory plugin supports public artifacts.
|
||||
|
||||
When bridge mode is active and `bridge.readMemoryArtifacts` is enabled,
|
||||
`openclaw wiki status`, `openclaw wiki doctor`, and `openclaw wiki bridge
|
||||
import` read through the running Gateway. That keeps CLI bridge checks aligned
|
||||
with the runtime memory plugin context. If bridge is disabled or artifact reads
|
||||
are turned off, those commands keep their local/offline behavior.
|
||||
|
||||
## Vault modes
|
||||
|
||||
`memory-wiki` supports three vault modes:
|
||||
|
||||
@@ -300,6 +300,97 @@ describe("syncMemoryWikiBridgeSources", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("refuses to overwrite bridge source pages through vault symlinks", async () => {
|
||||
const workspaceDir = await createBridgeWorkspace("symlink-workspace");
|
||||
const { rootDir: vaultDir, config } = await createVault({
|
||||
rootDir: nextCaseRoot("symlink-vault"),
|
||||
config: {
|
||||
vaultMode: "bridge",
|
||||
bridge: {
|
||||
enabled: true,
|
||||
readMemoryArtifacts: true,
|
||||
indexMemoryRoot: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
const memoryPath = path.join(workspaceDir, "MEMORY.md");
|
||||
await fs.writeFile(memoryPath, "# Durable Memory\n", "utf8");
|
||||
registerBridgeArtifacts([
|
||||
{
|
||||
kind: "memory-root",
|
||||
workspaceDir,
|
||||
relativePath: "MEMORY.md",
|
||||
absolutePath: memoryPath,
|
||||
agentIds: ["main"],
|
||||
contentType: "markdown",
|
||||
},
|
||||
]);
|
||||
const appConfig: OpenClawConfig = {
|
||||
agents: {
|
||||
list: [{ id: "main", default: true, workspace: workspaceDir }],
|
||||
},
|
||||
};
|
||||
const first = await syncMemoryWikiBridgeSources({ config, appConfig });
|
||||
const pagePath = first.pagePaths[0] ?? "";
|
||||
const pageAbsPath = path.join(vaultDir, pagePath);
|
||||
const externalTarget = path.join(workspaceDir, "outside.md");
|
||||
await fs.writeFile(externalTarget, "external target\n", "utf8");
|
||||
await fs.rm(pageAbsPath);
|
||||
await fs.symlink(externalTarget, pageAbsPath);
|
||||
await fs.writeFile(memoryPath, "# Updated Durable Memory\n", "utf8");
|
||||
|
||||
await expect(syncMemoryWikiBridgeSources({ config, appConfig })).rejects.toThrow(
|
||||
"Refusing to write imported source page through symlink",
|
||||
);
|
||||
await expect(fs.readFile(externalTarget, "utf8")).resolves.toBe("external target\n");
|
||||
});
|
||||
|
||||
it("replaces bridge source page hardlinks without clobbering their target", async () => {
|
||||
const workspaceDir = await createBridgeWorkspace("hardlink-workspace");
|
||||
const { rootDir: vaultDir, config } = await createVault({
|
||||
rootDir: nextCaseRoot("hardlink-vault"),
|
||||
config: {
|
||||
vaultMode: "bridge",
|
||||
bridge: {
|
||||
enabled: true,
|
||||
readMemoryArtifacts: true,
|
||||
indexMemoryRoot: true,
|
||||
},
|
||||
},
|
||||
});
|
||||
const memoryPath = path.join(workspaceDir, "MEMORY.md");
|
||||
await fs.writeFile(memoryPath, "# Durable Memory\n", "utf8");
|
||||
registerBridgeArtifacts([
|
||||
{
|
||||
kind: "memory-root",
|
||||
workspaceDir,
|
||||
relativePath: "MEMORY.md",
|
||||
absolutePath: memoryPath,
|
||||
agentIds: ["main"],
|
||||
contentType: "markdown",
|
||||
},
|
||||
]);
|
||||
const appConfig: OpenClawConfig = {
|
||||
agents: {
|
||||
list: [{ id: "main", default: true, workspace: workspaceDir }],
|
||||
},
|
||||
};
|
||||
const first = await syncMemoryWikiBridgeSources({ config, appConfig });
|
||||
const pagePath = first.pagePaths[0] ?? "";
|
||||
const pageAbsPath = path.join(vaultDir, pagePath);
|
||||
const externalTarget = path.join(workspaceDir, "outside-hardlink.md");
|
||||
await fs.writeFile(externalTarget, "external target\n", "utf8");
|
||||
await fs.rm(pageAbsPath);
|
||||
await fs.link(externalTarget, pageAbsPath);
|
||||
await fs.writeFile(memoryPath, "# Updated Durable Memory\n", "utf8");
|
||||
|
||||
const second = await syncMemoryWikiBridgeSources({ config, appConfig });
|
||||
|
||||
expect(second.updatedCount).toBe(1);
|
||||
await expect(fs.readFile(externalTarget, "utf8")).resolves.toBe("external target\n");
|
||||
await expect(fs.readFile(pageAbsPath, "utf8")).resolves.toContain("# Updated Durable Memory");
|
||||
});
|
||||
|
||||
it("caps composed bridge source filenames to the filesystem component limit", async () => {
|
||||
const workspaceDir = await createBridgeWorkspace(`${"漢".repeat(50)}-workspace`);
|
||||
const { rootDir: vaultDir, config } = await createVault({
|
||||
|
||||
@@ -3,11 +3,25 @@ import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { Command } from "commander";
|
||||
import { afterAll, afterEach, beforeAll, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { registerWikiCli, runWikiChatGptImport, runWikiChatGptRollback } from "./cli.js";
|
||||
import {
|
||||
registerWikiCli,
|
||||
runWikiBridgeImport,
|
||||
runWikiChatGptImport,
|
||||
runWikiChatGptRollback,
|
||||
runWikiDoctor,
|
||||
runWikiStatus,
|
||||
} from "./cli.js";
|
||||
import type { MemoryWikiPluginConfig } from "./config.js";
|
||||
import { parseWikiMarkdown, renderWikiMarkdown } from "./markdown.js";
|
||||
import type { MemoryWikiDoctorReport, MemoryWikiStatus } from "./status.js";
|
||||
import { createMemoryWikiTestHarness } from "./test-helpers.js";
|
||||
|
||||
const callGatewayFromCliMock = vi.hoisted(() => vi.fn());
|
||||
|
||||
vi.mock("openclaw/plugin-sdk/gateway-runtime", () => ({
|
||||
callGatewayFromCli: callGatewayFromCliMock,
|
||||
}));
|
||||
|
||||
const { createVault } = createMemoryWikiTestHarness();
|
||||
let suiteRoot = "";
|
||||
let caseIndex = 0;
|
||||
@@ -24,6 +38,7 @@ describe("memory-wiki cli", () => {
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
callGatewayFromCliMock.mockReset();
|
||||
vi.spyOn(process.stdout, "write").mockImplementation(
|
||||
(() => true) as typeof process.stdout.write,
|
||||
);
|
||||
@@ -88,6 +103,45 @@ describe("memory-wiki cli", () => {
|
||||
return exportDir;
|
||||
}
|
||||
|
||||
function createGatewayStatus(config: {
|
||||
vault: { path: string };
|
||||
bridge: MemoryWikiStatus["bridge"];
|
||||
}): MemoryWikiStatus {
|
||||
return {
|
||||
vaultMode: "bridge",
|
||||
renderMode: "native",
|
||||
vaultPath: config.vault.path,
|
||||
vaultExists: true,
|
||||
bridge: config.bridge,
|
||||
bridgePublicArtifactCount: 2,
|
||||
obsidianCli: {
|
||||
enabled: false,
|
||||
requested: false,
|
||||
available: false,
|
||||
command: null,
|
||||
},
|
||||
unsafeLocal: {
|
||||
allowPrivateMemoryCoreAccess: false,
|
||||
pathCount: 0,
|
||||
},
|
||||
pageCounts: {
|
||||
source: 0,
|
||||
entity: 0,
|
||||
concept: 0,
|
||||
synthesis: 0,
|
||||
report: 0,
|
||||
},
|
||||
sourceCounts: {
|
||||
native: 0,
|
||||
bridge: 0,
|
||||
bridgeEvents: 0,
|
||||
unsafeLocal: 0,
|
||||
other: 0,
|
||||
},
|
||||
warnings: [],
|
||||
};
|
||||
}
|
||||
|
||||
it("registers apply synthesis and writes a synthesis page", async () => {
|
||||
const { rootDir, config } = await createCliVault();
|
||||
const program = new Command();
|
||||
@@ -193,6 +247,226 @@ cli note
|
||||
await program.parseAsync(["wiki", "doctor", "--json"], { from: "user" });
|
||||
|
||||
expect(process.exitCode).toBe(1);
|
||||
expect(callGatewayFromCliMock).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("routes active bridge status and doctor through the gateway", async () => {
|
||||
const { config } = await createCliVault({
|
||||
config: {
|
||||
vaultMode: "bridge",
|
||||
bridge: { enabled: true, readMemoryArtifacts: true },
|
||||
},
|
||||
initialize: true,
|
||||
});
|
||||
const status = createGatewayStatus(config);
|
||||
const report: MemoryWikiDoctorReport = {
|
||||
healthy: false,
|
||||
warningCount: 1,
|
||||
status: {
|
||||
...status,
|
||||
warnings: [
|
||||
{
|
||||
code: "bridge-artifacts-missing",
|
||||
message: "No exported artifacts.",
|
||||
},
|
||||
],
|
||||
},
|
||||
fixes: [
|
||||
{
|
||||
code: "bridge-artifacts-missing",
|
||||
message: "Create memory artifacts.",
|
||||
},
|
||||
],
|
||||
};
|
||||
callGatewayFromCliMock.mockResolvedValueOnce(status).mockResolvedValueOnce(report);
|
||||
|
||||
await expect(runWikiStatus({ config, json: true })).resolves.toBe(status);
|
||||
await expect(runWikiDoctor({ config, json: true })).resolves.toBe(report);
|
||||
|
||||
expect(process.exitCode).toBe(1);
|
||||
expect(callGatewayFromCliMock).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
"wiki.status",
|
||||
{ timeout: "30000" },
|
||||
undefined,
|
||||
{ progress: false },
|
||||
);
|
||||
expect(callGatewayFromCliMock).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
"wiki.doctor",
|
||||
{ timeout: "30000" },
|
||||
undefined,
|
||||
{ progress: false },
|
||||
);
|
||||
});
|
||||
|
||||
it("sanitizes gateway status text output without changing JSON output", async () => {
|
||||
const { config } = await createCliVault({
|
||||
config: {
|
||||
vaultMode: "bridge",
|
||||
bridge: { enabled: true, readMemoryArtifacts: true },
|
||||
},
|
||||
initialize: true,
|
||||
});
|
||||
const unsafeStatus = createGatewayStatus({
|
||||
...config,
|
||||
vault: { path: "\u001B[2J/tmp/wiki\nforged prompt\u202E" },
|
||||
});
|
||||
unsafeStatus.warnings = [
|
||||
{
|
||||
code: "bridge-artifacts-missing",
|
||||
message: "missing artifacts\r\nfake success\u001B[31m\u202E",
|
||||
},
|
||||
];
|
||||
const textOutput: string[] = [];
|
||||
callGatewayFromCliMock.mockResolvedValueOnce(unsafeStatus);
|
||||
|
||||
await runWikiStatus({
|
||||
config,
|
||||
stdout: {
|
||||
write: ((chunk: string) => textOutput.push(chunk) > 0) as NodeJS.WriteStream["write"],
|
||||
},
|
||||
});
|
||||
|
||||
const renderedText = textOutput.join("");
|
||||
expect(renderedText).not.toContain("\u001B");
|
||||
expect(renderedText).not.toContain("\u202E");
|
||||
expect(renderedText).toContain("(/tmp/wiki forged prompt)");
|
||||
expect(renderedText).toContain("- missing artifacts fake success");
|
||||
|
||||
const jsonOutput: string[] = [];
|
||||
callGatewayFromCliMock.mockResolvedValueOnce(unsafeStatus);
|
||||
|
||||
await runWikiStatus({
|
||||
config,
|
||||
json: true,
|
||||
stdout: {
|
||||
write: ((chunk: string) => jsonOutput.push(chunk) > 0) as NodeJS.WriteStream["write"],
|
||||
},
|
||||
});
|
||||
|
||||
const renderedJson = jsonOutput.join("");
|
||||
expect(renderedJson).not.toContain("\u001B");
|
||||
expect(renderedJson).not.toContain("\u202E");
|
||||
expect(renderedJson).not.toContain("\r");
|
||||
expect(renderedJson).toContain("\\u001b[2J/tmp/wiki\\nforged prompt\\u202e");
|
||||
expect(renderedJson).toContain("missing artifacts\\r\\nfake success\\u001b[31m\\u202e");
|
||||
|
||||
const parsed = JSON.parse(renderedJson) as MemoryWikiStatus;
|
||||
expect(parsed.vaultPath).toBe("\u001B[2J/tmp/wiki\nforged prompt\u202E");
|
||||
expect(parsed.warnings[0]?.message).toBe("missing artifacts\r\nfake success\u001B[31m\u202E");
|
||||
});
|
||||
|
||||
it("rejects malformed gateway responses before rendering", async () => {
|
||||
const { config } = await createCliVault({
|
||||
config: {
|
||||
vaultMode: "bridge",
|
||||
bridge: { enabled: true, readMemoryArtifacts: true },
|
||||
},
|
||||
initialize: true,
|
||||
});
|
||||
callGatewayFromCliMock.mockResolvedValueOnce({ vaultMode: "bridge" });
|
||||
|
||||
await expect(runWikiStatus({ config })).rejects.toThrow(
|
||||
"Invalid Gateway response for wiki.status.",
|
||||
);
|
||||
});
|
||||
|
||||
it("rejects oversized gateway strings before rendering", async () => {
|
||||
const { config } = await createCliVault({
|
||||
config: {
|
||||
vaultMode: "bridge",
|
||||
bridge: { enabled: true, readMemoryArtifacts: true },
|
||||
},
|
||||
initialize: true,
|
||||
});
|
||||
const status = createGatewayStatus(config);
|
||||
status.warnings = [
|
||||
{
|
||||
code: "bridge-artifacts-missing",
|
||||
message: "x".repeat(10_001),
|
||||
},
|
||||
];
|
||||
callGatewayFromCliMock.mockResolvedValueOnce(status);
|
||||
|
||||
await expect(runWikiStatus({ config })).rejects.toThrow(
|
||||
"Invalid Gateway response for wiki.status.",
|
||||
);
|
||||
});
|
||||
|
||||
it("truncates gateway status text output after rendering", async () => {
|
||||
const { config } = await createCliVault({
|
||||
config: {
|
||||
vaultMode: "bridge",
|
||||
bridge: { enabled: true, readMemoryArtifacts: true },
|
||||
},
|
||||
initialize: true,
|
||||
});
|
||||
const status = createGatewayStatus(config);
|
||||
status.warnings = [
|
||||
{
|
||||
code: "bridge-artifacts-missing",
|
||||
message: `${"warning ".repeat(500)}tail`,
|
||||
},
|
||||
];
|
||||
const textOutput: string[] = [];
|
||||
callGatewayFromCliMock.mockResolvedValueOnce(status);
|
||||
|
||||
await runWikiStatus({
|
||||
config,
|
||||
stdout: {
|
||||
write: ((chunk: string) => textOutput.push(chunk) > 0) as NodeJS.WriteStream["write"],
|
||||
},
|
||||
});
|
||||
|
||||
const renderedText = textOutput.join("");
|
||||
expect(renderedText).toContain("... [truncated]");
|
||||
expect(renderedText).not.toContain("tail");
|
||||
});
|
||||
|
||||
it("routes active bridge imports through the gateway and keeps disabled bridge imports local", async () => {
|
||||
const active = await createCliVault({
|
||||
config: {
|
||||
vaultMode: "bridge",
|
||||
bridge: { enabled: true, readMemoryArtifacts: true },
|
||||
},
|
||||
initialize: true,
|
||||
});
|
||||
callGatewayFromCliMock.mockResolvedValueOnce({
|
||||
importedCount: 1,
|
||||
updatedCount: 0,
|
||||
skippedCount: 0,
|
||||
removedCount: 0,
|
||||
artifactCount: 1,
|
||||
workspaces: 1,
|
||||
pagePaths: ["sources/bridge-alpha.md"],
|
||||
indexesRefreshed: true,
|
||||
indexUpdatedFiles: ["index.md"],
|
||||
indexRefreshReason: "import-changed",
|
||||
});
|
||||
|
||||
const activeResult = await runWikiBridgeImport({ config: active.config, json: true });
|
||||
|
||||
expect(activeResult.importedCount).toBe(1);
|
||||
expect(callGatewayFromCliMock).toHaveBeenCalledWith(
|
||||
"wiki.bridge.import",
|
||||
{ timeout: "30000" },
|
||||
undefined,
|
||||
{ progress: false },
|
||||
);
|
||||
|
||||
callGatewayFromCliMock.mockClear();
|
||||
const disabled = await createCliVault({
|
||||
config: {
|
||||
vaultMode: "bridge",
|
||||
bridge: { enabled: false },
|
||||
},
|
||||
});
|
||||
|
||||
const disabledResult = await runWikiBridgeImport({ config: disabled.config, json: true });
|
||||
|
||||
expect(disabledResult.artifactCount).toBe(0);
|
||||
expect(callGatewayFromCliMock).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("imports ChatGPT exports with dry-run, apply, and rollback", async () => {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import fs from "node:fs/promises";
|
||||
import type { Command } from "commander";
|
||||
import { callGatewayFromCli } from "openclaw/plugin-sdk/gateway-runtime";
|
||||
import type { OpenClawConfig } from "../api.js";
|
||||
import { applyMemoryWikiMutation } from "./apply.js";
|
||||
import {
|
||||
@@ -27,14 +28,29 @@ import {
|
||||
} from "./obsidian.js";
|
||||
import { getMemoryWikiPage, searchMemoryWiki } from "./query.js";
|
||||
import { syncMemoryWikiImportedSources } from "./source-sync.js";
|
||||
import type { MemoryWikiImportedSourceSyncResult } from "./source-sync.js";
|
||||
import {
|
||||
buildMemoryWikiDoctorReport,
|
||||
renderMemoryWikiDoctor,
|
||||
renderMemoryWikiStatus,
|
||||
type MemoryWikiDoctorReport,
|
||||
type MemoryWikiStatus,
|
||||
resolveMemoryWikiStatus,
|
||||
} from "./status.js";
|
||||
import { initializeMemoryWikiVault } from "./vault.js";
|
||||
|
||||
const WIKI_GATEWAY_TIMEOUT_MS = "30000";
|
||||
const GATEWAY_TERMINAL_STRING_MAX_CHARS = 2_000;
|
||||
const GATEWAY_RESPONSE_MAX_ARRAY_ITEMS = 10_000;
|
||||
const GATEWAY_RESPONSE_MAX_STRING_CHARS = 10_000;
|
||||
const GATEWAY_RESPONSE_MAX_CODE_CHARS = 256;
|
||||
const ANSI_ESCAPE_SEQUENCE_PATTERN = new RegExp(
|
||||
String.raw`(?:\x1B\[[0-?]*[ -/]*[@-~]|\x1B[@-Z\\-_]|\x9B[0-?]*[ -/]*[@-~])`,
|
||||
"g",
|
||||
);
|
||||
const TERMINAL_CONTROL_CHARACTER_PATTERN = new RegExp(String.raw`[\x00-\x1F\x7F-\x9F]+`, "g");
|
||||
const UNICODE_FORMAT_CONTROL_PATTERN = /[\u061C\u200B-\u200F\u202A-\u202E\u2060-\u206F\uFEFF]/g;
|
||||
|
||||
type WikiStatusCommandOptions = {
|
||||
json?: boolean;
|
||||
};
|
||||
@@ -143,10 +159,173 @@ function isResolvedMemoryWikiConfig(
|
||||
);
|
||||
}
|
||||
|
||||
function sanitizeGatewayStringForTerminal(value: string): string {
|
||||
const truncated =
|
||||
value.length > GATEWAY_TERMINAL_STRING_MAX_CHARS
|
||||
? value.slice(0, GATEWAY_TERMINAL_STRING_MAX_CHARS)
|
||||
: value;
|
||||
const sanitized = truncated
|
||||
.replace(ANSI_ESCAPE_SEQUENCE_PATTERN, "")
|
||||
.replace(TERMINAL_CONTROL_CHARACTER_PATTERN, " ")
|
||||
.replace(UNICODE_FORMAT_CONTROL_PATTERN, "");
|
||||
return value.length > GATEWAY_TERMINAL_STRING_MAX_CHARS
|
||||
? `${sanitized}... [truncated]`
|
||||
: sanitized;
|
||||
}
|
||||
|
||||
function escapeGatewayJsonForTerminal(json: string): string {
|
||||
return json.replace(UNICODE_FORMAT_CONTROL_PATTERN, (char) => {
|
||||
const codePoint = char.codePointAt(0);
|
||||
return typeof codePoint === "number" ? `\\u${codePoint.toString(16).padStart(4, "0")}` : "";
|
||||
});
|
||||
}
|
||||
|
||||
function writeOutput(output: string, writer: Pick<NodeJS.WriteStream, "write"> = process.stdout) {
|
||||
writer.write(output.endsWith("\n") ? output : `${output}\n`);
|
||||
}
|
||||
|
||||
function shouldRouteBridgeRuntimeThroughGateway(config: ResolvedMemoryWikiConfig): boolean {
|
||||
return (
|
||||
config.vaultMode === "bridge" && config.bridge.enabled && config.bridge.readMemoryArtifacts
|
||||
);
|
||||
}
|
||||
|
||||
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||
return Boolean(value && typeof value === "object" && !Array.isArray(value));
|
||||
}
|
||||
|
||||
function isBoundedGatewayString(
|
||||
value: unknown,
|
||||
maxChars = GATEWAY_RESPONSE_MAX_STRING_CHARS,
|
||||
): value is string {
|
||||
return typeof value === "string" && value.length <= maxChars;
|
||||
}
|
||||
|
||||
function isStringArray(
|
||||
value: unknown,
|
||||
maxChars = GATEWAY_RESPONSE_MAX_STRING_CHARS,
|
||||
): value is string[] {
|
||||
return (
|
||||
Array.isArray(value) &&
|
||||
value.length <= GATEWAY_RESPONSE_MAX_ARRAY_ITEMS &&
|
||||
value.every((item) => isBoundedGatewayString(item, maxChars))
|
||||
);
|
||||
}
|
||||
|
||||
function hasNumberFields(value: Record<string, unknown>, keys: readonly string[]): boolean {
|
||||
return keys.every((key) => typeof value[key] === "number");
|
||||
}
|
||||
|
||||
function isWarningList(value: unknown): boolean {
|
||||
return (
|
||||
Array.isArray(value) &&
|
||||
value.length <= GATEWAY_RESPONSE_MAX_ARRAY_ITEMS &&
|
||||
value.every(
|
||||
(item) =>
|
||||
isRecord(item) &&
|
||||
isBoundedGatewayString(item.code, GATEWAY_RESPONSE_MAX_CODE_CHARS) &&
|
||||
isBoundedGatewayString(item.message),
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function isMemoryWikiStatus(value: unknown): value is MemoryWikiStatus {
|
||||
if (!isRecord(value)) {
|
||||
return false;
|
||||
}
|
||||
const bridge = value.bridge;
|
||||
const obsidianCli = value.obsidianCli;
|
||||
const unsafeLocal = value.unsafeLocal;
|
||||
const pageCounts = value.pageCounts;
|
||||
const sourceCounts = value.sourceCounts;
|
||||
return (
|
||||
isBoundedGatewayString(value.vaultMode, GATEWAY_RESPONSE_MAX_CODE_CHARS) &&
|
||||
isBoundedGatewayString(value.renderMode, GATEWAY_RESPONSE_MAX_CODE_CHARS) &&
|
||||
isBoundedGatewayString(value.vaultPath) &&
|
||||
typeof value.vaultExists === "boolean" &&
|
||||
(typeof value.bridgePublicArtifactCount === "number" ||
|
||||
value.bridgePublicArtifactCount === null) &&
|
||||
isRecord(bridge) &&
|
||||
typeof bridge.enabled === "boolean" &&
|
||||
isRecord(obsidianCli) &&
|
||||
typeof obsidianCli.enabled === "boolean" &&
|
||||
typeof obsidianCli.requested === "boolean" &&
|
||||
typeof obsidianCli.available === "boolean" &&
|
||||
(isBoundedGatewayString(obsidianCli.command) || obsidianCli.command === null) &&
|
||||
isRecord(unsafeLocal) &&
|
||||
typeof unsafeLocal.allowPrivateMemoryCoreAccess === "boolean" &&
|
||||
typeof unsafeLocal.pathCount === "number" &&
|
||||
isRecord(pageCounts) &&
|
||||
hasNumberFields(pageCounts, ["source", "entity", "concept", "synthesis", "report"]) &&
|
||||
isRecord(sourceCounts) &&
|
||||
hasNumberFields(sourceCounts, ["native", "bridge", "bridgeEvents", "unsafeLocal", "other"]) &&
|
||||
isWarningList(value.warnings)
|
||||
);
|
||||
}
|
||||
|
||||
function isMemoryWikiDoctorReport(value: unknown): value is MemoryWikiDoctorReport {
|
||||
return (
|
||||
isRecord(value) &&
|
||||
typeof value.healthy === "boolean" &&
|
||||
typeof value.warningCount === "number" &&
|
||||
isMemoryWikiStatus(value.status) &&
|
||||
Array.isArray(value.fixes) &&
|
||||
value.fixes.length <= GATEWAY_RESPONSE_MAX_ARRAY_ITEMS &&
|
||||
value.fixes.every(
|
||||
(item) =>
|
||||
isRecord(item) &&
|
||||
isBoundedGatewayString(item.code, GATEWAY_RESPONSE_MAX_CODE_CHARS) &&
|
||||
isBoundedGatewayString(item.message),
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function isMemoryWikiImportResult(value: unknown): value is MemoryWikiImportedSourceSyncResult {
|
||||
return (
|
||||
isRecord(value) &&
|
||||
hasNumberFields(value, [
|
||||
"importedCount",
|
||||
"updatedCount",
|
||||
"skippedCount",
|
||||
"removedCount",
|
||||
"artifactCount",
|
||||
"workspaces",
|
||||
]) &&
|
||||
isStringArray(value.pagePaths) &&
|
||||
typeof value.indexesRefreshed === "boolean" &&
|
||||
isStringArray(value.indexUpdatedFiles) &&
|
||||
isBoundedGatewayString(value.indexRefreshReason, GATEWAY_RESPONSE_MAX_CODE_CHARS)
|
||||
);
|
||||
}
|
||||
|
||||
function validateWikiGatewayResult(
|
||||
method: "wiki.status" | "wiki.doctor" | "wiki.bridge.import",
|
||||
value: unknown,
|
||||
): MemoryWikiStatus | MemoryWikiDoctorReport | MemoryWikiImportedSourceSyncResult {
|
||||
if (method === "wiki.status" && isMemoryWikiStatus(value)) {
|
||||
return value;
|
||||
}
|
||||
if (method === "wiki.doctor" && isMemoryWikiDoctorReport(value)) {
|
||||
return value;
|
||||
}
|
||||
if (method === "wiki.bridge.import" && isMemoryWikiImportResult(value)) {
|
||||
return value;
|
||||
}
|
||||
throw new Error(`Invalid Gateway response for ${method}.`);
|
||||
}
|
||||
|
||||
async function callWikiGateway(method: "wiki.status"): Promise<MemoryWikiStatus>;
|
||||
async function callWikiGateway(method: "wiki.doctor"): Promise<MemoryWikiDoctorReport>;
|
||||
async function callWikiGateway(
|
||||
method: "wiki.bridge.import",
|
||||
): Promise<MemoryWikiImportedSourceSyncResult>;
|
||||
async function callWikiGateway(method: "wiki.status" | "wiki.doctor" | "wiki.bridge.import") {
|
||||
const result = await callGatewayFromCli(method, { timeout: WIKI_GATEWAY_TIMEOUT_MS }, undefined, {
|
||||
progress: false,
|
||||
});
|
||||
return validateWikiGatewayResult(method, result);
|
||||
}
|
||||
|
||||
function normalizeCliStringList(values?: string[]): string[] | undefined {
|
||||
if (!values) {
|
||||
return undefined;
|
||||
@@ -201,6 +380,16 @@ function formatJsonOrText<T>(
|
||||
return json ? JSON.stringify(result, null, 2) : render(result);
|
||||
}
|
||||
|
||||
function formatGatewayJsonOrText<T>(
|
||||
result: T,
|
||||
json: boolean | undefined,
|
||||
render: (result: T) => string,
|
||||
): string {
|
||||
return json
|
||||
? escapeGatewayJsonForTerminal(JSON.stringify(result, null, 2))
|
||||
: sanitizeGatewayStringForTerminal(render(result));
|
||||
}
|
||||
|
||||
async function runWikiCommandWithSummary<T>(params: {
|
||||
json?: boolean;
|
||||
stdout?: Pick<NodeJS.WriteStream, "write">;
|
||||
@@ -255,12 +444,19 @@ export async function runWikiStatus(params: {
|
||||
json?: boolean;
|
||||
stdout?: Pick<NodeJS.WriteStream, "write">;
|
||||
}) {
|
||||
await syncMemoryWikiImportedSources({ config: params.config, appConfig: params.appConfig });
|
||||
const status = await resolveMemoryWikiStatus(params.config, {
|
||||
appConfig: params.appConfig,
|
||||
});
|
||||
const routeThroughGateway = shouldRouteBridgeRuntimeThroughGateway(params.config);
|
||||
const status = routeThroughGateway
|
||||
? await callWikiGateway("wiki.status")
|
||||
: await (async () => {
|
||||
await syncMemoryWikiImportedSources({ config: params.config, appConfig: params.appConfig });
|
||||
return await resolveMemoryWikiStatus(params.config, {
|
||||
appConfig: params.appConfig,
|
||||
});
|
||||
})();
|
||||
writeOutput(
|
||||
params.json ? JSON.stringify(status, null, 2) : renderMemoryWikiStatus(status),
|
||||
routeThroughGateway
|
||||
? formatGatewayJsonOrText(status, params.json, renderMemoryWikiStatus)
|
||||
: formatJsonOrText(status, params.json, renderMemoryWikiStatus),
|
||||
params.stdout,
|
||||
);
|
||||
return status;
|
||||
@@ -272,17 +468,24 @@ export async function runWikiDoctor(params: {
|
||||
json?: boolean;
|
||||
stdout?: Pick<NodeJS.WriteStream, "write">;
|
||||
}) {
|
||||
await syncMemoryWikiImportedSources({ config: params.config, appConfig: params.appConfig });
|
||||
const report = buildMemoryWikiDoctorReport(
|
||||
await resolveMemoryWikiStatus(params.config, {
|
||||
appConfig: params.appConfig,
|
||||
}),
|
||||
);
|
||||
const routeThroughGateway = shouldRouteBridgeRuntimeThroughGateway(params.config);
|
||||
const report = routeThroughGateway
|
||||
? await callWikiGateway("wiki.doctor")
|
||||
: await (async () => {
|
||||
await syncMemoryWikiImportedSources({ config: params.config, appConfig: params.appConfig });
|
||||
return buildMemoryWikiDoctorReport(
|
||||
await resolveMemoryWikiStatus(params.config, {
|
||||
appConfig: params.appConfig,
|
||||
}),
|
||||
);
|
||||
})();
|
||||
if (!report.healthy) {
|
||||
process.exitCode = 1;
|
||||
}
|
||||
writeOutput(
|
||||
params.json ? JSON.stringify(report, null, 2) : renderMemoryWikiDoctor(report),
|
||||
routeThroughGateway
|
||||
? formatGatewayJsonOrText(report, params.json, renderMemoryWikiDoctor)
|
||||
: formatJsonOrText(report, params.json, renderMemoryWikiDoctor),
|
||||
params.stdout,
|
||||
);
|
||||
return report;
|
||||
@@ -505,6 +708,13 @@ export async function runWikiBridgeImport(params: {
|
||||
json?: boolean;
|
||||
stdout?: Pick<NodeJS.WriteStream, "write">;
|
||||
}) {
|
||||
const render = (value: MemoryWikiImportedSourceSyncResult) =>
|
||||
`Bridge import synced ${value.artifactCount} artifacts across ${value.workspaces} workspaces (${value.importedCount} new, ${value.updatedCount} updated, ${value.skippedCount} unchanged, ${value.removedCount} removed). Indexes ${value.indexesRefreshed ? `refreshed (${value.indexUpdatedFiles.length} files)` : `not refreshed (${value.indexRefreshReason})`}.`;
|
||||
if (shouldRouteBridgeRuntimeThroughGateway(params.config)) {
|
||||
const result = await callWikiGateway("wiki.bridge.import");
|
||||
writeOutput(formatGatewayJsonOrText(result, params.json, render), params.stdout);
|
||||
return result;
|
||||
}
|
||||
return runWikiCommandWithSummary({
|
||||
json: params.json,
|
||||
stdout: params.stdout,
|
||||
@@ -513,8 +723,7 @@ export async function runWikiBridgeImport(params: {
|
||||
config: params.config,
|
||||
appConfig: params.appConfig,
|
||||
}),
|
||||
render: (value) =>
|
||||
`Bridge import synced ${value.artifactCount} artifacts across ${value.workspaces} workspaces (${value.importedCount} new, ${value.updatedCount} updated, ${value.skippedCount} unchanged, ${value.removedCount} removed). Indexes ${value.indexesRefreshed ? `refreshed (${value.indexUpdatedFiles.length} files)` : `not refreshed (${value.indexRefreshReason})`}.`,
|
||||
render,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { constants as fsConstants } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { pathExists } from "./source-path-shared.js";
|
||||
import {
|
||||
setImportedSourceEntry,
|
||||
shouldSkipImportedSourceWrite,
|
||||
@@ -8,6 +9,123 @@ import {
|
||||
} from "./source-sync-state.js";
|
||||
|
||||
type ImportedSourceState = Parameters<typeof shouldSkipImportedSourceWrite>[0]["state"];
|
||||
type FileStats = Awaited<ReturnType<typeof fs.lstat>>;
|
||||
|
||||
function isPathInside(parent: string, child: string): boolean {
|
||||
const relative = path.relative(parent, child);
|
||||
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
|
||||
}
|
||||
|
||||
async function resolveWritableVaultPagePath(params: {
|
||||
vaultRoot: string;
|
||||
pagePath: string;
|
||||
}): Promise<{
|
||||
pageAbsPath: string;
|
||||
pageDir: string;
|
||||
pageDirRealPath: string;
|
||||
vaultRealPath: string;
|
||||
existing: FileStats | null;
|
||||
}> {
|
||||
const vaultAbsPath = path.resolve(params.vaultRoot);
|
||||
const pageAbsPath = path.resolve(vaultAbsPath, params.pagePath);
|
||||
if (!isPathInside(vaultAbsPath, pageAbsPath)) {
|
||||
throw new Error(`Refusing to write imported source page outside vault: ${params.pagePath}`);
|
||||
}
|
||||
|
||||
const vaultRealPath = await fs.realpath(vaultAbsPath);
|
||||
const pageDir = path.dirname(pageAbsPath);
|
||||
await fs.mkdir(pageDir, { recursive: true });
|
||||
const pageDirRealPath = await fs.realpath(pageDir);
|
||||
if (!isPathInside(vaultRealPath, pageDirRealPath)) {
|
||||
throw new Error(`Refusing to write imported source page outside vault: ${params.pagePath}`);
|
||||
}
|
||||
|
||||
const existing = await fs.lstat(pageAbsPath).catch((err: unknown) => {
|
||||
if ((err as NodeJS.ErrnoException)?.code === "ENOENT") {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
if (existing?.isSymbolicLink()) {
|
||||
throw new Error(`Refusing to write imported source page through symlink: ${params.pagePath}`);
|
||||
}
|
||||
if (existing && !existing.isFile()) {
|
||||
throw new Error(`Refusing to write imported source page over non-file: ${params.pagePath}`);
|
||||
}
|
||||
return { pageAbsPath, pageDir, pageDirRealPath, vaultRealPath, existing };
|
||||
}
|
||||
|
||||
async function assertWritablePageDir(params: {
|
||||
pageDir: string;
|
||||
pageDirRealPath: string;
|
||||
vaultRealPath: string;
|
||||
pagePath: string;
|
||||
}): Promise<void> {
|
||||
const currentPageDirRealPath = await fs.realpath(params.pageDir);
|
||||
if (
|
||||
currentPageDirRealPath !== params.pageDirRealPath ||
|
||||
!isPathInside(params.vaultRealPath, currentPageDirRealPath)
|
||||
) {
|
||||
throw new Error(`Refusing to write imported source page outside vault: ${params.pagePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function validateDestinationForReplace(filePath: string, pagePath: string): Promise<void> {
|
||||
const existing = await fs.lstat(filePath).catch((err: unknown) => {
|
||||
if ((err as NodeJS.ErrnoException)?.code === "ENOENT") {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
if (existing?.isSymbolicLink()) {
|
||||
throw new Error(`Refusing to write imported source page through symlink: ${pagePath}`);
|
||||
}
|
||||
if (existing && !existing.isFile()) {
|
||||
throw new Error(`Refusing to write imported source page over non-file: ${pagePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function writeFileAtomicInVault(params: {
|
||||
filePath: string;
|
||||
pageDir: string;
|
||||
pageDirRealPath: string;
|
||||
vaultRealPath: string;
|
||||
pagePath: string;
|
||||
content: string;
|
||||
}): Promise<void> {
|
||||
const noFollow = fsConstants.O_NOFOLLOW ?? 0;
|
||||
await assertWritablePageDir(params);
|
||||
|
||||
const tempPath = path.join(params.pageDir, `.openclaw-wiki-${process.pid}-${randomUUID()}.tmp`);
|
||||
let shouldRemoveTemp = true;
|
||||
try {
|
||||
const handle = await fs.open(
|
||||
tempPath,
|
||||
fsConstants.O_WRONLY | fsConstants.O_CREAT | fsConstants.O_EXCL | noFollow,
|
||||
0o600,
|
||||
);
|
||||
try {
|
||||
const tempStat = await handle.stat();
|
||||
if (!tempStat.isFile() || tempStat.nlink !== 1) {
|
||||
throw new Error(
|
||||
`Refusing to write imported source page through unsafe temp file: ${params.pagePath}`,
|
||||
);
|
||||
}
|
||||
await handle.writeFile(params.content, "utf8");
|
||||
} finally {
|
||||
await handle.close();
|
||||
}
|
||||
await assertWritablePageDir(params);
|
||||
await validateDestinationForReplace(params.filePath, params.pagePath);
|
||||
await fs.rename(tempPath, params.filePath);
|
||||
shouldRemoveTemp = false;
|
||||
await assertWritablePageDir(params);
|
||||
} finally {
|
||||
if (shouldRemoveTemp) {
|
||||
await fs.rm(tempPath, { force: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function writeImportedSourcePage(params: {
|
||||
vaultRoot: string;
|
||||
@@ -21,8 +139,17 @@ export async function writeImportedSourcePage(params: {
|
||||
state: ImportedSourceState;
|
||||
buildRendered: (raw: string, updatedAt: string) => string;
|
||||
}): Promise<{ pagePath: string; changed: boolean; created: boolean }> {
|
||||
const pageAbsPath = path.join(params.vaultRoot, params.pagePath);
|
||||
const created = !(await pathExists(pageAbsPath));
|
||||
const {
|
||||
pageAbsPath,
|
||||
pageDir,
|
||||
pageDirRealPath,
|
||||
vaultRealPath,
|
||||
existing: pageStat,
|
||||
} = await resolveWritableVaultPagePath({
|
||||
vaultRoot: params.vaultRoot,
|
||||
pagePath: params.pagePath,
|
||||
});
|
||||
const created = !pageStat;
|
||||
const updatedAt = new Date(params.sourceUpdatedAtMs).toISOString();
|
||||
const shouldSkip = await shouldSkipImportedSourceWrite({
|
||||
vaultRoot: params.vaultRoot,
|
||||
@@ -40,9 +167,16 @@ export async function writeImportedSourcePage(params: {
|
||||
|
||||
const raw = await fs.readFile(params.sourcePath, "utf8");
|
||||
const rendered = params.buildRendered(raw, updatedAt);
|
||||
const existing = await fs.readFile(pageAbsPath, "utf8").catch(() => "");
|
||||
const existing = pageStat ? await fs.readFile(pageAbsPath, "utf8").catch(() => "") : "";
|
||||
if (existing !== rendered) {
|
||||
await fs.writeFile(pageAbsPath, rendered, "utf8");
|
||||
await writeFileAtomicInVault({
|
||||
filePath: pageAbsPath,
|
||||
pageDir,
|
||||
pageDirRealPath,
|
||||
vaultRealPath,
|
||||
pagePath: params.pagePath,
|
||||
content: rendered,
|
||||
});
|
||||
}
|
||||
|
||||
setImportedSourceEntry({
|
||||
|
||||
Reference in New Issue
Block a user