mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-06 05:00:42 +00:00
[codex] Extract filesystem safety primitives (#77918)
* refactor: extract filesystem safety primitives * refactor: use fs-safe for file access helpers * refactor: reuse fs-safe for media reads * refactor: use fs-safe for image reads * refactor: reuse fs-safe in qqbot media opener * refactor: reuse fs-safe for local media checks * refactor: consume cleaner fs-safe api * refactor: align fs-safe json option names * fix: preserve fs-safe migration contracts * refactor: use fs-safe primitive subpaths * refactor: use grouped fs-safe subpaths * refactor: align fs-safe api usage * refactor: adapt private state store api * chore: refresh proof gate * refactor: follow fs-safe json api split * refactor: follow reduced fs-safe surface * build: default fs-safe python helper off * fix: preserve fs-safe plugin sdk aliases * refactor: consolidate fs-safe usage * refactor: unify fs-safe store usage * refactor: trim fs-safe temp workspace usage * refactor: hide low-level fs-safe primitives * build: use published fs-safe package * fix: preserve outbound recovery durability after rebase * chore: refresh pr checks
This commit is contained in:
committed by
GitHub
parent
61481eb34f
commit
538605ff44
@@ -71,6 +71,8 @@ Docs: https://docs.openclaw.ai
|
||||
- Gateway/performance: defer non-readiness sidecars until after the ready signal, avoid hot-path channel plugin barrel imports, and fast-path trusted bundled plugin metadata during Gateway startup.
|
||||
- Gateway/performance: avoid importing `jiti` on native-loadable plugin startup paths, so compiled bundled plugin surfaces do not pay source-transform loader cost unless fallback loading is actually needed.
|
||||
- Plugins/loader: preserve real compiled plugin module evaluation errors on the native fast path instead of treating every thrown `.js` module as a source-transform fallback miss. Thanks @vincentkoc.
|
||||
- Plugin SDK/fs-safe: expose reusable atomic replacement, sibling-temp writes, and cross-device move fallback helpers through `plugin-sdk/security-runtime`, and move OpenClaw's duplicated safe filesystem write paths onto the shared `@openclaw/fs-safe` package.
|
||||
- Plugin SDK/fs-safe: rename the public temp workspace helpers to `tempWorkspace`, `withTempWorkspace`, `tempWorkspaceSync`, and `withTempWorkspaceSync`, matching the cleaner `@openclaw/fs-safe` API before the package is published.
|
||||
- Providers/OpenRouter: add opt-in response caching params that send OpenRouter's `X-OpenRouter-Cache`, `X-OpenRouter-Cache-TTL`, and cache-clear headers only on verified OpenRouter routes. Thanks @vincentkoc.
|
||||
- Providers/OpenRouter: expand app-attribution categories so OpenClaw advertises coding, programming, writing, chat, and personal-agent usage on verified OpenRouter routes. Thanks @vincentkoc.
|
||||
- Agents/performance: pass the resolved workspace through BTW, compaction, embedded-run model generation, and PDF model setup so explicit agent-dir model refreshes can reuse the current workspace-scoped plugin metadata snapshot instead of falling back to cold plugin metadata scans. (#77519, #77532)
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
fe061b6f35adb2b152d8f48244a94d4934b335143cc5f5aebb8cc96e5ba8b287 plugin-sdk-api-baseline.json
|
||||
495248d5981456192aaf7da2ed23d5951eaa6d9e59d70c716ab91c3da3620e73 plugin-sdk-api-baseline.jsonl
|
||||
1a06492fe05d1c9dc3194677f52d57ec90468b93023b70d0852ef01d87c7eae3 plugin-sdk-api-baseline.json
|
||||
c950a1923c0dc7d31120a3010e24217bcf22fd9cacbe102d3ae19b0120c0f648 plugin-sdk-api-baseline.jsonl
|
||||
|
||||
@@ -59,6 +59,10 @@
|
||||
"source": "Gateway RPC reference",
|
||||
"target": "Gateway RPC 参考"
|
||||
},
|
||||
{
|
||||
"source": "Secure file operations",
|
||||
"target": "安全文件操作"
|
||||
},
|
||||
{
|
||||
"source": "Sessions",
|
||||
"target": "会话"
|
||||
@@ -758,5 +762,9 @@
|
||||
{
|
||||
"source": "/cli/config",
|
||||
"target": "/cli/config"
|
||||
},
|
||||
{
|
||||
"source": "fs-safe Cleanup Plan",
|
||||
"target": "fs-safe Cleanup Plan"
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1501,6 +1501,7 @@
|
||||
"group": "Security and sandboxing",
|
||||
"pages": [
|
||||
"gateway/security/index",
|
||||
"gateway/security/secure-file-operations",
|
||||
"gateway/security/audit-checks",
|
||||
"gateway/operator-scopes",
|
||||
"gateway/sandboxing",
|
||||
|
||||
@@ -65,6 +65,12 @@ OpenClaw assumes the host and config boundary are trusted:
|
||||
- Session identifiers (`sessionKey`, session IDs, labels) are routing selectors, not authorization tokens.
|
||||
- If several people can message one tool-enabled agent, each of them can steer that same permission set. Per-user session/memory isolation helps privacy, but does not convert a shared agent into per-user host authorization.
|
||||
|
||||
### Secure file operations
|
||||
|
||||
OpenClaw uses `@openclaw/fs-safe` for root-bounded file access, atomic writes, archive extraction, temp workspaces, and secret-file helpers. OpenClaw defaults fs-safe's optional POSIX Python helper to **off**; set `OPENCLAW_FS_SAFE_PYTHON_MODE=auto` or `require` only when you want the extra fd-relative mutation hardening and can support a Python runtime.
|
||||
|
||||
Details: [Secure file operations](/gateway/security/secure-file-operations).
|
||||
|
||||
### Shared Slack workspace: real risk
|
||||
|
||||
If "everyone in Slack can message the bot," the core risk is delegated tool authority:
|
||||
|
||||
76
docs/gateway/security/secure-file-operations.md
Normal file
76
docs/gateway/security/secure-file-operations.md
Normal file
@@ -0,0 +1,76 @@
|
||||
---
|
||||
summary: "How OpenClaw handles local file access safely, and why the optional fs-safe Python helper is off by default"
|
||||
read_when:
|
||||
- Changing file access, archive extraction, workspace storage, or plugin filesystem helpers
|
||||
title: "Secure file operations"
|
||||
---
|
||||
|
||||
OpenClaw uses [`@openclaw/fs-safe`](https://github.com/openclaw/fs-safe) for security-sensitive local file operations: root-bounded reads/writes, atomic replacement, archive extraction, temp workspaces, JSON state, and secret-file handling.
|
||||
|
||||
The goal is a consistent **library guardrail** for trusted OpenClaw code that receives untrusted path names. It is not a sandbox. Host filesystem permissions, OS users, containers, and the agent/tool policy still define the real blast radius.
|
||||
|
||||
## Default: no Python helper
|
||||
|
||||
OpenClaw defaults the fs-safe POSIX Python helper to **off**.
|
||||
|
||||
Why:
|
||||
|
||||
- the gateway should not spawn a persistent Python sidecar unless an operator opted into it;
|
||||
- many installs do not need the extra parent-directory mutation hardening;
|
||||
- disabling Python keeps package/runtime behavior more predictable across desktop, Docker, CI, and bundled app environments.
|
||||
|
||||
OpenClaw only changes the default. If you explicitly set a mode, fs-safe honors it:
|
||||
|
||||
```bash
|
||||
# Default OpenClaw behavior: Node-only fs-safe fallbacks.
|
||||
OPENCLAW_FS_SAFE_PYTHON_MODE=off
|
||||
|
||||
# Opt into the helper when available, falling back if unavailable.
|
||||
OPENCLAW_FS_SAFE_PYTHON_MODE=auto
|
||||
|
||||
# Fail closed if the helper cannot start.
|
||||
OPENCLAW_FS_SAFE_PYTHON_MODE=require
|
||||
|
||||
# Optional explicit interpreter.
|
||||
OPENCLAW_FS_SAFE_PYTHON=/usr/bin/python3
|
||||
```
|
||||
|
||||
The generic fs-safe names also work: `FS_SAFE_PYTHON_MODE` and `FS_SAFE_PYTHON`.
|
||||
|
||||
## What stays protected without Python
|
||||
|
||||
With the helper off, OpenClaw still uses fs-safe's Node paths for:
|
||||
|
||||
- rejecting relative-path escapes such as `..`, absolute paths, and path separators where only names are allowed;
|
||||
- resolving operations through a trusted root handle instead of ad-hoc `path.resolve(...).startsWith(...)` checks;
|
||||
- refusing symlink and hardlink patterns on APIs that require that policy;
|
||||
- opening files with identity checks where the API returns or consumes file contents;
|
||||
- atomic sibling-temp writes for state/config files;
|
||||
- byte limits for reads and archive extraction;
|
||||
- private modes for secrets and state files where the API requires them.
|
||||
|
||||
These protections cover the normal OpenClaw threat model: trusted gateway code handling untrusted model/plugin/channel path input inside a single trusted operator boundary.
|
||||
|
||||
## What Python adds
|
||||
|
||||
On POSIX, fs-safe's optional helper keeps one persistent Python process and uses fd-relative filesystem operations for parent-directory mutations such as rename, remove, mkdir, stat/list, and some write paths.
|
||||
|
||||
That narrows same-UID race windows where another process can swap a parent directory between validation and mutation. It is defense in depth for hosts where untrusted local processes can modify the same directories OpenClaw is operating in.
|
||||
|
||||
If your deployment has that risk and Python is guaranteed to exist, use:
|
||||
|
||||
```bash
|
||||
OPENCLAW_FS_SAFE_PYTHON_MODE=require
|
||||
```
|
||||
|
||||
Use `require` rather than `auto` when the helper is part of your security posture; `auto` intentionally falls back to Node-only behavior if the helper is unavailable.
|
||||
|
||||
## Plugin and core guidance
|
||||
|
||||
- Plugin-facing file access should go through `openclaw/plugin-sdk/*` helpers, not raw `fs`, when a path comes from a message, model output, config, or plugin input.
|
||||
- Core code should use the local fs-safe wrappers under `src/infra/*` so OpenClaw's process policy is applied consistently.
|
||||
- Archive extraction should use the fs-safe archive helpers with explicit size, entry-count, link, and destination limits.
|
||||
- Secrets should use OpenClaw secret helpers or fs-safe secret/private-state helpers; do not hand-roll mode checks around `fs.writeFile`.
|
||||
- If you need hostile local-user isolation, do not rely on fs-safe alone. Run separate gateways under separate OS users/hosts or use sandboxing.
|
||||
|
||||
Related: [Security](/gateway/security), [Sandboxing](/gateway/sandboxing), [Exec approvals](/tools/exec-approvals), [Secrets](/gateway/secrets).
|
||||
@@ -425,7 +425,7 @@ releases.
|
||||
| `plugin-sdk/approval-native-runtime` | Approval target helpers | Native approval target/account binding helpers |
|
||||
| `plugin-sdk/approval-reply-runtime` | Approval reply helpers | Exec/plugin approval reply payload helpers |
|
||||
| `plugin-sdk/channel-runtime-context` | Channel runtime-context helpers | Generic channel runtime-context register/get/watch helpers |
|
||||
| `plugin-sdk/security-runtime` | Security helpers | Shared trust, DM gating, external-content, and secret-collection helpers |
|
||||
| `plugin-sdk/security-runtime` | Security helpers | Shared trust, DM gating, root-bounded file/path helpers, external-content, and secret-collection helpers |
|
||||
| `plugin-sdk/ssrf-policy` | SSRF policy helpers | Host allowlist and private-network policy helpers |
|
||||
| `plugin-sdk/ssrf-runtime` | SSRF runtime helpers | Pinned-dispatcher, guarded fetch, SSRF policy helpers |
|
||||
| `plugin-sdk/system-event-runtime` | System event helpers | `enqueueSystemEvent`, `peekSystemEventEntries` |
|
||||
|
||||
@@ -161,7 +161,7 @@ For the plugin authoring guide, see [Plugin SDK overview](/plugins/sdk-overview)
|
||||
| `plugin-sdk/allow-from` | `formatAllowFromLowercase` |
|
||||
| `plugin-sdk/channel-secret-runtime` | Narrow secret-contract collection helpers for channel/plugin secret surfaces |
|
||||
| `plugin-sdk/secret-ref-runtime` | Narrow `coerceSecretRef` and SecretRef typing helpers for secret-contract/config parsing |
|
||||
| `plugin-sdk/security-runtime` | Shared trust, DM gating, external-content, sensitive text redaction, constant-time secret comparison, and secret-collection helpers |
|
||||
| `plugin-sdk/security-runtime` | Shared trust, DM gating, root-bounded file/path helpers including create-only writes, sync/async atomic file replacement, sibling temp writes, cross-device move fallback, private file-store helpers, symlink-parent guards, external-content, sensitive text redaction, constant-time secret comparison, and secret-collection helpers |
|
||||
| `plugin-sdk/ssrf-policy` | Host allowlist and private-network SSRF policy helpers |
|
||||
| `plugin-sdk/ssrf-dispatcher` | Narrow pinned-dispatcher helpers without the broad infra runtime surface |
|
||||
| `plugin-sdk/ssrf-runtime` | Pinned-dispatcher, SSRF-guarded fetch, SSRF error, and SSRF policy helpers |
|
||||
@@ -210,7 +210,7 @@ For the plugin authoring guide, see [Plugin SDK overview](/plugins/sdk-overview)
|
||||
| `plugin-sdk/param-readers` | Common tool/CLI param readers |
|
||||
| `plugin-sdk/tool-payload` | Extract normalized payloads from tool result objects |
|
||||
| `plugin-sdk/tool-send` | Extract canonical send target fields from tool args |
|
||||
| `plugin-sdk/temp-path` | Shared temp-download path helpers |
|
||||
| `plugin-sdk/temp-path` | Shared temp-download path helpers and private secure temp workspaces |
|
||||
| `plugin-sdk/logging-core` | Subsystem logger and redaction helpers |
|
||||
| `plugin-sdk/markdown-table-runtime` | Markdown table mode and conversion helpers |
|
||||
| `plugin-sdk/model-session-runtime` | Model/session override helpers such as `applyModelOverrideToSessionEntry` and `resolveAgentMaxConcurrent` |
|
||||
|
||||
448
docs/refactor/fs-cleanup.md
Normal file
448
docs/refactor/fs-cleanup.md
Normal file
@@ -0,0 +1,448 @@
|
||||
---
|
||||
title: "fs-safe Cleanup Plan"
|
||||
summary: "Plan for consolidating OpenClaw filesystem helpers around @openclaw/fs-safe"
|
||||
read_when:
|
||||
- You are refactoring OpenClaw filesystem helpers
|
||||
- You are changing @openclaw/fs-safe imports, wrappers, or plugin SDK file APIs
|
||||
- You are deciding whether a local file helper belongs in OpenClaw or fs-safe
|
||||
---
|
||||
|
||||
## Status
|
||||
|
||||
Implemented on `codex/extract-fs-safe-primitives`. Keep this file as the
|
||||
cleanup checklist for follow-up reviews and future fs-safe surface changes.
|
||||
|
||||
## Goal
|
||||
|
||||
Make OpenClaw's filesystem access boring and predictable:
|
||||
|
||||
- Core code uses one small set of OpenClaw wrappers that apply OpenClaw policy.
|
||||
- Plugin SDK compatibility aliases stay deliberate and documented.
|
||||
- fs-safe keeps a small public story centered on `root()`, with lower-level
|
||||
primitives behind explicit subpaths.
|
||||
- Duplicate JSON, temp, private-store, and path helper names disappear from
|
||||
OpenClaw internals.
|
||||
- Security-sensitive behavior keeps regression tests before names move.
|
||||
|
||||
## Non-goals
|
||||
|
||||
- Do not remove public plugin SDK exports in this cleanup. Keep deprecated
|
||||
aliases until a versioned SDK migration removes them.
|
||||
- Do not make fs-safe a sandbox. It remains a library guardrail for local file
|
||||
access, not OS isolation.
|
||||
- Do not convert all absolute-path reads to root-bounded reads. Some OpenClaw
|
||||
paths are trusted absolute paths and should stay explicit.
|
||||
- Do not chase cosmetic import churn without reducing helper count or clarifying
|
||||
trust boundaries.
|
||||
|
||||
## fs-safe Package Pin
|
||||
|
||||
`@openclaw/fs-safe` is published on npm and consumed through a semver range.
|
||||
Fresh checkouts and CI runners should install the package from the public
|
||||
registry, not from a local `link:../fs-safe` checkout or a GitHub tarball.
|
||||
|
||||
Current range:
|
||||
|
||||
- `^0.1.0`
|
||||
|
||||
The published package ships built `dist` files, so OpenClaw should not list it
|
||||
in `pnpm.onlyBuiltDependencies`.
|
||||
|
||||
## Current Shape
|
||||
|
||||
fs-safe's main entry is intentionally narrow:
|
||||
|
||||
- `root`
|
||||
- `FsSafeError`
|
||||
- `categorizeFsSafeError`
|
||||
- root option/result types
|
||||
- Python helper configuration
|
||||
|
||||
The wider surface lives behind subpaths:
|
||||
|
||||
- `/json`
|
||||
- `/store`
|
||||
- `/temp`
|
||||
- `/atomic`
|
||||
- `/root`
|
||||
- `/advanced`
|
||||
- `/archive`
|
||||
- `/walk`
|
||||
|
||||
OpenClaw now keeps fs-safe behind a small wrapper boundary:
|
||||
|
||||
- local `src/infra/*` wrappers for core policy defaults
|
||||
- public plugin SDK aliases, including older names from before fs-safe
|
||||
- package-local utility exports where importing `src/infra` would cross a
|
||||
package boundary
|
||||
|
||||
An import-boundary test rejects new direct fs-safe imports outside those
|
||||
allowed areas.
|
||||
|
||||
## Usage Map
|
||||
|
||||
### Root-bounded access
|
||||
|
||||
Representative use:
|
||||
|
||||
- `src/gateway/server-methods/agents.ts`
|
||||
- `src/agents/pi-tools.read.ts`
|
||||
- `src/agents/apply-patch.ts`
|
||||
- `src/plugins/install.ts`
|
||||
- `src/auto-reply/reply/stage-sandbox-media.ts`
|
||||
- `src/gateway/canvas-documents.ts`
|
||||
|
||||
Keep this family. `root()` is the fs-safe product surface OpenClaw should push
|
||||
callers toward.
|
||||
|
||||
### JSON helpers
|
||||
|
||||
OpenClaw still uses many names for the same operations:
|
||||
|
||||
- `readJsonFile`
|
||||
- `readJsonFileStrict`
|
||||
- `readDurableJsonFile`
|
||||
- `writeJsonAtomic`
|
||||
- `loadJsonFile`
|
||||
- `saveJsonFile`
|
||||
- `readJsonFileWithFallback`
|
||||
- `writeJsonFileAtomically`
|
||||
|
||||
fs-safe's canonical names are clearer:
|
||||
|
||||
- `tryReadJson`
|
||||
- `readJson`
|
||||
- `readJsonIfExists`
|
||||
- `writeJson`
|
||||
- `readJsonSync`
|
||||
- `tryReadJsonSync`
|
||||
- `writeJsonSync`
|
||||
|
||||
This was the highest-value cleanup because it removed naming drift without
|
||||
changing semantics. Compatibility aliases stay in `src/infra/json-files.ts` and
|
||||
plugin SDK barrels.
|
||||
|
||||
### Private state and stores
|
||||
|
||||
Representative use:
|
||||
|
||||
- `src/commitments/store.ts`
|
||||
- `src/agents/models-config.ts`
|
||||
- `src/agents/pi-auth-json.ts`
|
||||
- `src/cron/run-log.ts`
|
||||
- `src/secrets/shared.ts`
|
||||
- `src/infra/device-auth-store.ts`
|
||||
- `src/infra/device-identity.ts`
|
||||
|
||||
Current overlap:
|
||||
|
||||
- `fileStore`
|
||||
- `fileStore({ private: true })`
|
||||
- plugin SDK private-state aliases
|
||||
|
||||
The concepts are now one family. fs-safe exposes private mode through
|
||||
`fileStore({ private: true })`; OpenClaw internals and bundled plugins use
|
||||
store-shaped wrappers instead of standalone private JSON/text helpers.
|
||||
|
||||
### Temp workspaces
|
||||
|
||||
Representative use:
|
||||
|
||||
- `src/media/qr-image.ts`
|
||||
- `extensions/discord/src/send.voice.ts`
|
||||
- `extensions/discord/src/voice/audio.ts`
|
||||
- `extensions/qa-lab/src/temp-dir.test-helper.ts`
|
||||
|
||||
`tempWorkspace` is the stable useful primitive. One-shot temp targets and
|
||||
sibling-temp helpers are lower-level implementation tools.
|
||||
|
||||
### Atomic writes
|
||||
|
||||
Representative use:
|
||||
|
||||
- config and session stores
|
||||
- cron stores
|
||||
- plugin install paths
|
||||
- extension state files
|
||||
|
||||
Keep atomic replacement as a public fs-safe subpath. OpenClaw should use the
|
||||
same canonical JSON/text helpers where possible instead of hand-picking lower
|
||||
level atomic calls for ordinary JSON state.
|
||||
|
||||
### Regular, secure, and root file reads
|
||||
|
||||
These are not true duplicates:
|
||||
|
||||
- `root()` protects root-relative untrusted paths.
|
||||
- regular-file helpers read trusted absolute paths with regular-file checks.
|
||||
- secure-file helpers add ownership and mode checks for secret references.
|
||||
|
||||
Keep them separate. Document the trust boundary instead of hiding it behind one
|
||||
generic "read file" helper.
|
||||
|
||||
### Archive helpers
|
||||
|
||||
Representative use:
|
||||
|
||||
- plugin install
|
||||
- skill install
|
||||
- marketplace and ClawHub archive flows
|
||||
|
||||
Keep as a separate fs-safe subpath. Do not leak archive entry plumbing into
|
||||
OpenClaw core call sites unless the caller is actually validating archive
|
||||
metadata.
|
||||
|
||||
## Target Design
|
||||
|
||||
### OpenClaw imports
|
||||
|
||||
Core OpenClaw code should use local policy wrappers:
|
||||
|
||||
- `src/infra/fs-safe.ts` for common root/error helpers
|
||||
- `src/infra/json-files.ts` for the temporary JSON compatibility layer
|
||||
- `src/infra/private-file-store.ts` until private stores are unified
|
||||
- `src/infra/replace-file.ts` for low-level atomic replacement
|
||||
- `src/infra/boundary-file-read.ts` for loader/package boundary reads
|
||||
- `src/infra/archive.ts` for archive extraction policy
|
||||
- `src/infra/file-lock-manager.ts` for the rare core service that needs
|
||||
manager-style lock lifecycle/diagnostics
|
||||
|
||||
New direct imports from `@openclaw/fs-safe/*` should be reserved for:
|
||||
|
||||
- package-level utilities outside core that cannot import `src/infra`
|
||||
- compatibility shims
|
||||
- code that intentionally consumes a narrow fs-safe subpath, such as
|
||||
`openclaw/plugin-sdk/file-lock` using `@openclaw/fs-safe/file-lock`
|
||||
|
||||
### Plugin SDK exports
|
||||
|
||||
Plugin SDK exports are contractual. Keep aliases even when OpenClaw internals
|
||||
move to canonical names.
|
||||
|
||||
Mark older names as deprecated in types/docs when the replacement is stable:
|
||||
|
||||
- `readJsonFileWithFallback` -> `readJsonIfExists` or a store method
|
||||
- `writeJsonFileAtomically` -> `writeJson`
|
||||
- `loadJsonFile` -> `tryReadJson`
|
||||
- `saveJsonFile` -> `writeJson`
|
||||
- `readFileWithinRoot` -> `root(...).read*`
|
||||
- `writeFileWithinRoot` -> `root(...).write`
|
||||
|
||||
### fs-safe stores
|
||||
|
||||
Move toward one store family:
|
||||
|
||||
```ts
|
||||
const store = fileStore({
|
||||
rootDir,
|
||||
private: true,
|
||||
mode: 0o600,
|
||||
dirMode: 0o700,
|
||||
});
|
||||
```
|
||||
|
||||
or a thin alias:
|
||||
|
||||
```ts
|
||||
const store = stateStore({ rootDir, private: true });
|
||||
```
|
||||
|
||||
The store family should cover:
|
||||
|
||||
- `read`
|
||||
- `readText`
|
||||
- `readJson`
|
||||
- `readTextIfExists`
|
||||
- `readJsonIfExists`
|
||||
- `write`
|
||||
- `writeJson`
|
||||
- `remove`
|
||||
- `exists`
|
||||
- `open`
|
||||
- `copyIn`
|
||||
- `writeStream`
|
||||
- `pruneExpired`
|
||||
|
||||
This cleanup added that store shape in fs-safe, removed the unshipped
|
||||
`privateStateStore` surface, and moved OpenClaw internals and bundled plugins
|
||||
onto explicit store reads/writes.
|
||||
|
||||
### Temp
|
||||
|
||||
Keep stable public temp surface small:
|
||||
|
||||
```ts
|
||||
await using workspace = await tempWorkspace({ prefix: "openclaw-" });
|
||||
const target = workspace.path("payload.bin");
|
||||
```
|
||||
|
||||
Move one-shot temp target helpers and sibling-temp helpers to advanced/internal
|
||||
unless a concrete OpenClaw caller needs the public contract.
|
||||
|
||||
## Refactor Phases
|
||||
|
||||
### Phase 1: Inventory and Guards
|
||||
|
||||
- Add a small import-boundary test that lists allowed direct
|
||||
`@openclaw/fs-safe/*` imports in OpenClaw core.
|
||||
- Add regression tests for the JSON symlink behavior kept by
|
||||
`src/infra/json-file.ts`.
|
||||
- Add regression tests for public plugin SDK aliases that must keep resolving.
|
||||
- Add a doc note to the plugin SDK runtime docs once aliases are marked
|
||||
deprecated.
|
||||
|
||||
Exit criteria:
|
||||
|
||||
- The current compatibility surface is executable-tested.
|
||||
- New direct fs-safe imports are visible in review.
|
||||
|
||||
### Phase 2: JSON Name Cleanup
|
||||
|
||||
- Convert OpenClaw internal callers from old JSON names to canonical fs-safe
|
||||
names where the semantics are identical.
|
||||
- Keep plugin SDK aliases unchanged.
|
||||
- Collapse `src/infra/json-file.ts` and `src/infra/json-files.ts` into one
|
||||
compatibility module if that reduces indirection without losing symlink
|
||||
semantics.
|
||||
- Keep `saveJsonFile` symlink-target behavior until every caller/test is
|
||||
intentionally migrated.
|
||||
|
||||
Exit criteria:
|
||||
|
||||
- Core internal code no longer imports `readJsonFileStrict`,
|
||||
`readDurableJsonFile`, or `writeJsonAtomic` unless it is a compatibility shim.
|
||||
- Plugin SDK aliases still pass import/type tests.
|
||||
|
||||
### Phase 3: Store Unification
|
||||
|
||||
- Add the unified private mode to fs-safe's store API.
|
||||
- Remove the unshipped `privateStateStore` surface instead of keeping a second
|
||||
store family.
|
||||
- Migrate OpenClaw private-state internals to the unified store shape in small
|
||||
groups:
|
||||
- auth/profile state
|
||||
- device identity and device auth
|
||||
- cron/run logs
|
||||
- commitments
|
||||
- extension state
|
||||
- Regenerate the plugin SDK API baseline for the intentional pre-release
|
||||
private-helper removal.
|
||||
|
||||
Exit criteria:
|
||||
|
||||
- OpenClaw internals and bundled plugins do not call standalone private
|
||||
JSON/text helpers.
|
||||
- `fileStore({ private: true })` is the only private multi-file store API.
|
||||
|
||||
### Phase 4: Temp Simplification
|
||||
|
||||
- Replace OpenClaw one-shot temp target call sites with `tempWorkspace`.
|
||||
- Keep `resolvePreferredOpenClawTmpDir` as OpenClaw policy.
|
||||
- Move one-shot temp and sibling-temp helpers out of the curated OpenClaw
|
||||
wrapper surface.
|
||||
|
||||
Exit criteria:
|
||||
|
||||
- OpenClaw uses `tempWorkspace` for temporary file lifetimes unless a low-level
|
||||
atomic helper owns the temp path.
|
||||
|
||||
### Phase 5: Shim Reduction
|
||||
|
||||
- Group one-line fs-safe shims into a smaller number of named OpenClaw policy
|
||||
modules.
|
||||
- Delete shims that are no longer imported.
|
||||
- Keep shims that preserve public SDK names or OpenClaw-specific defaults.
|
||||
|
||||
Candidate stable shims:
|
||||
|
||||
- `src/infra/fs-safe.ts`
|
||||
- `src/infra/json-files.ts`
|
||||
- `src/infra/private-file-store.ts`
|
||||
- `src/infra/replace-file.ts`
|
||||
- `src/infra/boundary-file-read.ts`
|
||||
- `src/infra/archive.ts`
|
||||
|
||||
Candidate advanced-only grouping:
|
||||
|
||||
- path guards
|
||||
- symlink parent guards
|
||||
- hardlink guards
|
||||
- move-path helpers
|
||||
- file identity helpers
|
||||
- sibling temp helpers
|
||||
|
||||
Exit criteria:
|
||||
|
||||
- The local wrapper list has policy meaning, not one file per fs-safe module.
|
||||
|
||||
### Phase 6: fs-safe Public Surface Finalization
|
||||
|
||||
- Keep `@openclaw/fs-safe` main entry curated.
|
||||
- Keep `root()` as the primary README/API story.
|
||||
- Keep `openPinnedFileSync` internal. Use `readSecureFile`, `root().open`, or
|
||||
`openRootFile*` wrappers instead of exposing the fd-level pinned primitive.
|
||||
- Keep `createSidecarLockManager` internal. Public callers should use
|
||||
`acquireFileLock` / `withFileLock`; `createFileLockManager` is subpath-only
|
||||
for long-lived services that need held-lock inspection or drain/reset.
|
||||
- Move rare root escape hatches such as `openWritable` to advanced only if API
|
||||
checks show no supported caller needs the main root interface.
|
||||
- Keep `regular-file`, `secure-file`, archive, and root helpers separate
|
||||
because their trust models differ.
|
||||
- Remove or mark unstable any standalone helper that is fully covered by root or
|
||||
store methods.
|
||||
|
||||
Exit criteria:
|
||||
|
||||
- fs-safe has a stable pre-1.0 public surface.
|
||||
- OpenClaw imports only stable fs-safe APIs outside compatibility shims.
|
||||
|
||||
## Verification
|
||||
|
||||
Use targeted proof per phase:
|
||||
|
||||
- JSON cleanup:
|
||||
- JSON symlink tests
|
||||
- plugin SDK JSON-store import tests
|
||||
- representative extension tests that use JSON store aliases
|
||||
- Store unification:
|
||||
- private mode tests in fs-safe
|
||||
- auth profile persistence tests
|
||||
- device identity tests
|
||||
- cron/run-log tests
|
||||
- Temp cleanup:
|
||||
- media temp tests
|
||||
- Discord voice temp tests
|
||||
- QA-lab temp helper tests
|
||||
- Shim reduction:
|
||||
- plugin SDK API generation/check
|
||||
- import-boundary tests
|
||||
- `pnpm build`
|
||||
|
||||
Before merging a broad cleanup batch, run the changed gate and build:
|
||||
|
||||
```sh
|
||||
pnpm check:changed
|
||||
pnpm build
|
||||
```
|
||||
|
||||
Implementation proof from this cleanup:
|
||||
|
||||
- `pnpm test src/infra/fs-safe-import-boundary.test.ts src/plugin-sdk/temp-path.test.ts src/agents/models-config.write-serialization.test.ts src/infra/json-file.test.ts src/infra/json-files.test.ts`
|
||||
- `pnpm test src/infra/fs-safe-import-boundary.test.ts src/infra/device-auth-store.test.ts src/infra/device-identity.test.ts src/infra/exec-approvals.test.ts src/agents/models-config.write-serialization.test.ts src/agents/pi-embedded-runner/openrouter-model-capabilities.test.ts src/agents/harness/native-hook-relay.test.ts`
|
||||
- `pnpm test src/infra/fs-safe-import-boundary.test.ts src/infra/hardlink-guards.test.ts src/infra/file-identity.test.ts src/plugin-sdk/fs-safe-compat.test.ts src/plugin-sdk/temp-path.test.ts`
|
||||
- `pnpm plugin-sdk:api:check`
|
||||
- `pnpm build`
|
||||
- Blacksmith Testbox `pnpm install --frozen-lockfile --config.minimum-release-age=0 && pnpm check:changed`
|
||||
- In `../fs-safe`: `pnpm docs:site && pnpm build && pnpm test test/api-coverage.test.ts test/new-primitives.test.ts`
|
||||
|
||||
## Review Checklist
|
||||
|
||||
- Does this change reduce a public name, local wrapper, or duplicated semantic
|
||||
family?
|
||||
- Is the old name public plugin SDK surface? If yes, keep a deprecated alias.
|
||||
- Does the replacement preserve symlink, hardlink, mode, and missing-file
|
||||
behavior?
|
||||
- Is the caller using an untrusted relative path, trusted absolute path, secret
|
||||
path, archive entry, or temp lifetime? Pick the helper that says that out
|
||||
loud.
|
||||
- Are docs and plugin SDK API snapshots updated when exported names change?
|
||||
@@ -3369,10 +3369,8 @@ describe("active-memory plugin", () => {
|
||||
});
|
||||
|
||||
it("keeps subagent transcripts off disk by default by using a temp session file", async () => {
|
||||
const mkdtempSpy = vi
|
||||
.spyOn(fs, "mkdtemp")
|
||||
.mockResolvedValue("/tmp/openclaw-active-memory-temp");
|
||||
const rmSpy = vi.spyOn(fs, "rm").mockResolvedValue(undefined);
|
||||
const mkdtempSpy = vi.spyOn(fs, "mkdtemp");
|
||||
const rmSpy = vi.spyOn(fs, "rm");
|
||||
|
||||
await hooks.before_prompt_build(
|
||||
{ prompt: "what wings should i order? temp transcript path", messages: [] },
|
||||
@@ -3385,10 +3383,9 @@ describe("active-memory plugin", () => {
|
||||
);
|
||||
|
||||
expect(mkdtempSpy).toHaveBeenCalled();
|
||||
expect(runEmbeddedPiAgent.mock.calls.at(-1)?.[0]?.sessionFile).toBe(
|
||||
"/tmp/openclaw-active-memory-temp/session.jsonl",
|
||||
);
|
||||
expect(rmSpy).toHaveBeenCalledWith("/tmp/openclaw-active-memory-temp", {
|
||||
const sessionFile = runEmbeddedPiAgent.mock.calls.at(-1)?.[0]?.sessionFile;
|
||||
expect(sessionFile).toMatch(/openclaw-active-memory-.*\/session\.jsonl$/);
|
||||
expect(rmSpy).toHaveBeenCalledWith(path.dirname(sessionFile), {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
|
||||
@@ -18,11 +18,12 @@ import {
|
||||
} from "openclaw/plugin-sdk/plugin-config-runtime";
|
||||
import { definePluginEntry, type OpenClawPluginApi } from "openclaw/plugin-sdk/plugin-entry";
|
||||
import { parseAgentSessionKey, parseThreadSessionSuffix } from "openclaw/plugin-sdk/routing";
|
||||
import { isPathInside, replaceFileAtomic } from "openclaw/plugin-sdk/security-runtime";
|
||||
import {
|
||||
resolveSessionStoreEntry,
|
||||
updateSessionStore,
|
||||
} from "openclaw/plugin-sdk/session-store-runtime";
|
||||
import { resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
import { tempWorkspace, resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
|
||||
const DEFAULT_TIMEOUT_MS = 15_000;
|
||||
const DEFAULT_AGENT_ID = "main";
|
||||
@@ -422,7 +423,7 @@ function resolveSafeTranscriptDir(baseSessionsDir: string, transcriptDir: string
|
||||
}
|
||||
const resolvedBase = path.resolve(baseSessionsDir);
|
||||
const candidate = path.resolve(resolvedBase, normalized);
|
||||
if (candidate !== resolvedBase && !candidate.startsWith(resolvedBase + path.sep)) {
|
||||
if (!isPathInside(resolvedBase, candidate)) {
|
||||
return path.resolve(resolvedBase, DEFAULT_TRANSCRIPT_DIR);
|
||||
}
|
||||
return candidate;
|
||||
@@ -664,14 +665,11 @@ async function readToggleStore(statePath: string): Promise<ActiveMemoryToggleSto
|
||||
}
|
||||
|
||||
async function writeToggleStore(statePath: string, store: ActiveMemoryToggleStore): Promise<void> {
|
||||
await fs.mkdir(path.dirname(statePath), { recursive: true });
|
||||
const tempPath = `${statePath}.${process.pid}.${Date.now()}.${crypto.randomUUID()}.tmp`;
|
||||
try {
|
||||
await fs.writeFile(tempPath, `${JSON.stringify(store, null, 2)}\n`, "utf8");
|
||||
await fs.rename(tempPath, statePath);
|
||||
} finally {
|
||||
await fs.rm(tempPath, { force: true }).catch(() => undefined);
|
||||
}
|
||||
await replaceFileAtomic({
|
||||
filePath: statePath,
|
||||
content: `${JSON.stringify(store, null, 2)}\n`,
|
||||
tempPrefix: ".active-memory",
|
||||
});
|
||||
}
|
||||
|
||||
async function isSessionActiveMemoryDisabled(params: {
|
||||
@@ -2378,9 +2376,13 @@ async function runRecallSubagent(params: {
|
||||
const subagentSessionKey = parentSessionKey
|
||||
? `${parentSessionKey}:${subagentSuffix}`
|
||||
: `agent:${params.agentId}:${subagentSuffix}`;
|
||||
const tempDir = params.config.persistTranscripts
|
||||
const transientWorkspace = params.config.persistTranscripts
|
||||
? undefined
|
||||
: await fs.mkdtemp(path.join(resolvePreferredOpenClawTmpDir(), "openclaw-active-memory-"));
|
||||
: await tempWorkspace({
|
||||
rootDir: resolvePreferredOpenClawTmpDir(),
|
||||
prefix: "openclaw-active-memory-",
|
||||
});
|
||||
const tempDir = transientWorkspace?.dir;
|
||||
const persistedDir = params.config.persistTranscripts
|
||||
? resolveSafeTranscriptDir(
|
||||
resolvePersistentTranscriptBaseDir(params.api, params.agentId),
|
||||
@@ -2479,9 +2481,7 @@ async function runRecallSubagent(params: {
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
if (tempDir) {
|
||||
await fs.rm(tempDir, { recursive: true, force: true }).catch(() => {});
|
||||
}
|
||||
await transientWorkspace?.cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import crypto from "node:crypto";
|
||||
import path from "node:path";
|
||||
import { sanitizeUntrustedFileName } from "openclaw/plugin-sdk/security-runtime";
|
||||
import {
|
||||
normalizeLowercaseStringOrEmpty,
|
||||
normalizeOptionalLowercaseString,
|
||||
@@ -35,9 +36,7 @@ const AUDIO_MIME_MP3 = new Set(["audio/mpeg", "audio/mp3"]);
|
||||
const AUDIO_MIME_CAF = new Set(["audio/x-caf", "audio/caf"]);
|
||||
|
||||
function sanitizeFilename(input: string | undefined, fallback: string): string {
|
||||
const trimmed = input?.trim() ?? "";
|
||||
const base = trimmed ? path.basename(trimmed) : "";
|
||||
const name = base || fallback;
|
||||
const name = sanitizeUntrustedFileName(input ?? "", fallback);
|
||||
// Strip characters that could enable multipart header injection (CWE-93)
|
||||
return name.replace(/[\r\n"\\]/g, "_");
|
||||
}
|
||||
|
||||
@@ -1,13 +1,8 @@
|
||||
import { constants as fsConstants } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import {
|
||||
basenameFromMediaSource,
|
||||
safeFileURLToPath,
|
||||
readLocalFileFromRoots,
|
||||
} from "openclaw/plugin-sdk/file-access-runtime";
|
||||
import { resolveChannelMediaMaxBytes } from "openclaw/plugin-sdk/media-runtime";
|
||||
import { lowercasePreservingWhitespace } from "openclaw/plugin-sdk/text-runtime";
|
||||
import { resolveBlueBubblesAccount } from "./accounts.js";
|
||||
import { sendBlueBubblesAttachment } from "./attachments.js";
|
||||
import { resolveBlueBubblesMessageId } from "./monitor-reply-cache.js";
|
||||
@@ -31,61 +26,6 @@ function assertMediaWithinLimit(sizeBytes: number, maxBytes?: number): void {
|
||||
throw new Error(`Media exceeds ${maxLabel}MB limit (got ${sizeLabel}MB)`);
|
||||
}
|
||||
|
||||
function resolveLocalMediaPath(source: string): string {
|
||||
if (!source.startsWith("file://")) {
|
||||
return source;
|
||||
}
|
||||
try {
|
||||
return safeFileURLToPath(source);
|
||||
} catch {
|
||||
throw new Error(`Invalid file:// URL: ${source}`);
|
||||
}
|
||||
}
|
||||
|
||||
function expandHomePath(input: string): string {
|
||||
if (input === "~") {
|
||||
return os.homedir();
|
||||
}
|
||||
if (input.startsWith("~/") || input.startsWith(`~${path.sep}`)) {
|
||||
return path.join(os.homedir(), input.slice(2));
|
||||
}
|
||||
return input;
|
||||
}
|
||||
|
||||
function resolveConfiguredPath(input: string): string {
|
||||
const trimmed = input.trim();
|
||||
if (!trimmed) {
|
||||
throw new Error("Empty mediaLocalRoots entry is not allowed");
|
||||
}
|
||||
if (trimmed.startsWith("file://")) {
|
||||
try {
|
||||
return safeFileURLToPath(trimmed);
|
||||
} catch {
|
||||
throw new Error(`Invalid file:// URL in mediaLocalRoots: ${input}`);
|
||||
}
|
||||
}
|
||||
const resolved = expandHomePath(trimmed);
|
||||
if (!path.isAbsolute(resolved)) {
|
||||
throw new Error(`mediaLocalRoots entries must be absolute paths: ${input}`);
|
||||
}
|
||||
return resolved;
|
||||
}
|
||||
|
||||
function isPathInsideRoot(candidate: string, root: string): boolean {
|
||||
const normalizedCandidate = path.normalize(candidate);
|
||||
const normalizedRoot = path.normalize(root);
|
||||
const rootWithSep = normalizedRoot.endsWith(path.sep)
|
||||
? normalizedRoot
|
||||
: normalizedRoot + path.sep;
|
||||
if (process.platform === "win32") {
|
||||
const candidateLower = lowercasePreservingWhitespace(normalizedCandidate);
|
||||
const rootLower = lowercasePreservingWhitespace(normalizedRoot);
|
||||
const rootWithSepLower = lowercasePreservingWhitespace(rootWithSep);
|
||||
return candidateLower === rootLower || candidateLower.startsWith(rootWithSepLower);
|
||||
}
|
||||
return normalizedCandidate === normalizedRoot || normalizedCandidate.startsWith(rootWithSep);
|
||||
}
|
||||
|
||||
function resolveMediaLocalRoots(params: { cfg: OpenClawConfig; accountId?: string }): string[] {
|
||||
const account = resolveBlueBubblesAccount({
|
||||
cfg: params.cfg,
|
||||
@@ -111,60 +51,17 @@ async function assertLocalMediaPathAllowed(params: {
|
||||
);
|
||||
}
|
||||
|
||||
const resolvedLocalPath = path.resolve(params.localPath);
|
||||
const supportsNoFollow = process.platform !== "win32" && "O_NOFOLLOW" in fsConstants;
|
||||
const openFlags = fsConstants.O_RDONLY | (supportsNoFollow ? fsConstants.O_NOFOLLOW : 0);
|
||||
|
||||
for (const rootEntry of params.localRoots) {
|
||||
const resolvedRootInput = resolveConfiguredPath(rootEntry);
|
||||
const relativeToRoot = path.relative(resolvedRootInput, resolvedLocalPath);
|
||||
if (
|
||||
relativeToRoot.startsWith("..") ||
|
||||
path.isAbsolute(relativeToRoot) ||
|
||||
relativeToRoot === ""
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let rootReal: string;
|
||||
try {
|
||||
rootReal = await fs.realpath(resolvedRootInput);
|
||||
} catch {
|
||||
rootReal = path.resolve(resolvedRootInput);
|
||||
}
|
||||
const candidatePath = path.resolve(rootReal, relativeToRoot);
|
||||
|
||||
if (!isPathInsideRoot(candidatePath, rootReal)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let handle: Awaited<ReturnType<typeof fs.open>> | null = null;
|
||||
try {
|
||||
handle = await fs.open(candidatePath, openFlags);
|
||||
const realPath = await fs.realpath(candidatePath);
|
||||
if (!isPathInsideRoot(realPath, rootReal)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const stat = await handle.stat();
|
||||
if (!stat.isFile()) {
|
||||
continue;
|
||||
}
|
||||
const realStat = await fs.stat(realPath);
|
||||
if (stat.ino !== realStat.ino || stat.dev !== realStat.dev) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const data = await handle.readFile();
|
||||
return { data, realPath, sizeBytes: stat.size };
|
||||
} catch {
|
||||
// Try next configured root.
|
||||
continue;
|
||||
} finally {
|
||||
if (handle) {
|
||||
await handle.close().catch(() => {});
|
||||
}
|
||||
}
|
||||
const localFile = await readLocalFileFromRoots({
|
||||
filePath: params.localPath,
|
||||
roots: params.localRoots,
|
||||
label: "mediaLocalRoots",
|
||||
});
|
||||
if (localFile) {
|
||||
return {
|
||||
data: localFile.buffer,
|
||||
realPath: localFile.realPath,
|
||||
sizeBytes: localFile.stat.size,
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
@@ -244,9 +141,8 @@ export async function sendBlueBubblesMedia(params: {
|
||||
resolvedContentType = resolvedContentType ?? fetched.contentType ?? undefined;
|
||||
resolvedFilename = resolvedFilename ?? fetched.fileName;
|
||||
} else {
|
||||
const localPath = expandHomePath(resolveLocalMediaPath(source));
|
||||
const localFile = await assertLocalMediaPathAllowed({
|
||||
localPath,
|
||||
localPath: source,
|
||||
localRoots: mediaLocalRoots,
|
||||
accountId,
|
||||
});
|
||||
|
||||
@@ -1,51 +1,13 @@
|
||||
import crypto from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { writeFileFromPathWithinRoot } from "../infra/fs-safe.js";
|
||||
import { sanitizeUntrustedFileName } from "./safe-filename.js";
|
||||
|
||||
function buildSiblingTempPath(targetPath: string): string {
|
||||
const id = crypto.randomUUID();
|
||||
const safeTail = sanitizeUntrustedFileName(path.basename(targetPath), "output.bin");
|
||||
return path.join(path.dirname(targetPath), `.openclaw-output-${id}-${safeTail}.part`);
|
||||
}
|
||||
import { writeViaSiblingTempPath as writeViaSiblingTempPathBase } from "../sdk-security-runtime.js";
|
||||
|
||||
export async function writeViaSiblingTempPath(params: {
|
||||
rootDir: string;
|
||||
targetPath: string;
|
||||
writeTemp: (tempPath: string) => Promise<void>;
|
||||
}): Promise<void> {
|
||||
const rootDir = await fs
|
||||
.realpath(path.resolve(params.rootDir))
|
||||
.catch(() => path.resolve(params.rootDir));
|
||||
const requestedTargetPath = path.resolve(params.targetPath);
|
||||
const targetPath = await fs
|
||||
.realpath(path.dirname(requestedTargetPath))
|
||||
.then((realDir) => path.join(realDir, path.basename(requestedTargetPath)))
|
||||
.catch(() => requestedTargetPath);
|
||||
const relativeTargetPath = path.relative(rootDir, targetPath);
|
||||
if (
|
||||
!relativeTargetPath ||
|
||||
relativeTargetPath === ".." ||
|
||||
relativeTargetPath.startsWith(`..${path.sep}`) ||
|
||||
path.isAbsolute(relativeTargetPath)
|
||||
) {
|
||||
throw new Error("Target path is outside the allowed root");
|
||||
}
|
||||
const tempPath = buildSiblingTempPath(targetPath);
|
||||
let renameSucceeded = false;
|
||||
try {
|
||||
await params.writeTemp(tempPath);
|
||||
await writeFileFromPathWithinRoot({
|
||||
rootDir,
|
||||
relativePath: relativeTargetPath,
|
||||
sourcePath: tempPath,
|
||||
mkdir: false,
|
||||
});
|
||||
renameSucceeded = true;
|
||||
} finally {
|
||||
if (!renameSucceeded) {
|
||||
await fs.rm(tempPath, { force: true }).catch(() => {});
|
||||
}
|
||||
}
|
||||
await writeViaSiblingTempPathBase({
|
||||
...params,
|
||||
fallbackFileName: "output.bin",
|
||||
tempPrefix: ".openclaw-output-",
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { SafeOpenError, openFileWithinRoot } from "../infra/fs-safe.js";
|
||||
import { isNotFoundPathError, isPathInside } from "../infra/path-guards.js";
|
||||
import { resolvePreferredOpenClawTmpDir } from "../infra/tmp-openclaw-dir.js";
|
||||
export {
|
||||
resolveExistingPathsWithinRoot,
|
||||
pathScope,
|
||||
resolvePathsWithinRoot,
|
||||
resolvePathWithinRoot,
|
||||
resolveStrictExistingPathsWithinRoot,
|
||||
resolveWritablePathWithinRoot,
|
||||
} from "../sdk-security-runtime.js";
|
||||
|
||||
const DEFAULT_FALLBACK_BROWSER_TMP_DIR = "/tmp/openclaw";
|
||||
|
||||
@@ -28,241 +33,3 @@ const DEFAULT_BROWSER_TMP_DIR = canUseNodeFs()
|
||||
export const DEFAULT_TRACE_DIR = DEFAULT_BROWSER_TMP_DIR;
|
||||
export const DEFAULT_DOWNLOAD_DIR = path.join(DEFAULT_BROWSER_TMP_DIR, "downloads");
|
||||
export const DEFAULT_UPLOAD_DIR = path.join(DEFAULT_BROWSER_TMP_DIR, "uploads");
|
||||
|
||||
type InvalidPathResult = { ok: false; error: string };
|
||||
type ResolvePathsWithinRootParams = {
|
||||
rootDir: string;
|
||||
requestedPaths: string[];
|
||||
scopeLabel: string;
|
||||
};
|
||||
type ResolvePathsWithinRootResult = { ok: true; paths: string[] } | InvalidPathResult;
|
||||
|
||||
function invalidPath(scopeLabel: string): InvalidPathResult {
|
||||
return {
|
||||
ok: false,
|
||||
error: `Invalid path: must stay within ${scopeLabel}`,
|
||||
};
|
||||
}
|
||||
|
||||
async function resolveRealPathIfExists(targetPath: string): Promise<string | undefined> {
|
||||
try {
|
||||
return await fs.realpath(targetPath);
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveTrustedRootRealPath(rootDir: string): Promise<string | undefined> {
|
||||
try {
|
||||
const rootLstat = await fs.lstat(rootDir);
|
||||
if (!rootLstat.isDirectory() || rootLstat.isSymbolicLink()) {
|
||||
return undefined;
|
||||
}
|
||||
return await fs.realpath(rootDir);
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
async function validateCanonicalPathWithinRoot(params: {
|
||||
rootRealPath: string;
|
||||
candidatePath: string;
|
||||
expect: "directory" | "file";
|
||||
}): Promise<"ok" | "not-found" | "invalid"> {
|
||||
try {
|
||||
const candidateLstat = await fs.lstat(params.candidatePath);
|
||||
if (candidateLstat.isSymbolicLink()) {
|
||||
return "invalid";
|
||||
}
|
||||
if (params.expect === "directory" && !candidateLstat.isDirectory()) {
|
||||
return "invalid";
|
||||
}
|
||||
if (params.expect === "file" && !candidateLstat.isFile()) {
|
||||
return "invalid";
|
||||
}
|
||||
if (params.expect === "file" && candidateLstat.nlink > 1) {
|
||||
return "invalid";
|
||||
}
|
||||
const candidateRealPath = await fs.realpath(params.candidatePath);
|
||||
return isPathInside(params.rootRealPath, candidateRealPath) ? "ok" : "invalid";
|
||||
} catch (err) {
|
||||
return isNotFoundPathError(err) ? "not-found" : "invalid";
|
||||
}
|
||||
}
|
||||
|
||||
export function resolvePathWithinRoot(params: {
|
||||
rootDir: string;
|
||||
requestedPath: string;
|
||||
scopeLabel: string;
|
||||
defaultFileName?: string;
|
||||
}): { ok: true; path: string } | { ok: false; error: string } {
|
||||
const root = path.resolve(params.rootDir);
|
||||
const raw = params.requestedPath.trim();
|
||||
if (!raw) {
|
||||
if (!params.defaultFileName) {
|
||||
return { ok: false, error: "path is required" };
|
||||
}
|
||||
return { ok: true, path: path.join(root, params.defaultFileName) };
|
||||
}
|
||||
const resolved = path.resolve(root, raw);
|
||||
const rel = path.relative(root, resolved);
|
||||
if (!rel || rel.startsWith("..") || path.isAbsolute(rel)) {
|
||||
return { ok: false, error: `Invalid path: must stay within ${params.scopeLabel}` };
|
||||
}
|
||||
return { ok: true, path: resolved };
|
||||
}
|
||||
|
||||
export async function resolveWritablePathWithinRoot(params: {
|
||||
rootDir: string;
|
||||
requestedPath: string;
|
||||
scopeLabel: string;
|
||||
defaultFileName?: string;
|
||||
}): Promise<{ ok: true; path: string } | { ok: false; error: string }> {
|
||||
const lexical = resolvePathWithinRoot(params);
|
||||
if (!lexical.ok) {
|
||||
return lexical;
|
||||
}
|
||||
|
||||
const rootDir = path.resolve(params.rootDir);
|
||||
const rootRealPath = await resolveTrustedRootRealPath(rootDir);
|
||||
if (!rootRealPath) {
|
||||
return invalidPath(params.scopeLabel);
|
||||
}
|
||||
|
||||
const requestedPath = lexical.path;
|
||||
const parentDir = path.dirname(requestedPath);
|
||||
const parentStatus = await validateCanonicalPathWithinRoot({
|
||||
rootRealPath,
|
||||
candidatePath: parentDir,
|
||||
expect: "directory",
|
||||
});
|
||||
if (parentStatus !== "ok") {
|
||||
return invalidPath(params.scopeLabel);
|
||||
}
|
||||
|
||||
const targetStatus = await validateCanonicalPathWithinRoot({
|
||||
rootRealPath,
|
||||
candidatePath: requestedPath,
|
||||
expect: "file",
|
||||
});
|
||||
if (targetStatus === "invalid") {
|
||||
return invalidPath(params.scopeLabel);
|
||||
}
|
||||
|
||||
return lexical;
|
||||
}
|
||||
|
||||
export function resolvePathsWithinRoot(
|
||||
params: ResolvePathsWithinRootParams,
|
||||
): ResolvePathsWithinRootResult {
|
||||
const resolvedPaths: string[] = [];
|
||||
for (const raw of params.requestedPaths) {
|
||||
const pathResult = resolvePathWithinRoot({
|
||||
rootDir: params.rootDir,
|
||||
requestedPath: raw,
|
||||
scopeLabel: params.scopeLabel,
|
||||
});
|
||||
if (!pathResult.ok) {
|
||||
return { ok: false, error: pathResult.error };
|
||||
}
|
||||
resolvedPaths.push(pathResult.path);
|
||||
}
|
||||
return { ok: true, paths: resolvedPaths };
|
||||
}
|
||||
|
||||
export async function resolveExistingPathsWithinRoot(
|
||||
params: ResolvePathsWithinRootParams,
|
||||
): Promise<ResolvePathsWithinRootResult> {
|
||||
return await resolveCheckedPathsWithinRoot(params, true);
|
||||
}
|
||||
|
||||
export async function resolveStrictExistingPathsWithinRoot(
|
||||
params: ResolvePathsWithinRootParams,
|
||||
): Promise<ResolvePathsWithinRootResult> {
|
||||
return await resolveCheckedPathsWithinRoot(params, false);
|
||||
}
|
||||
|
||||
async function resolveCheckedPathsWithinRoot(
|
||||
params: ResolvePathsWithinRootParams,
|
||||
allowMissingFallback: boolean,
|
||||
): Promise<ResolvePathsWithinRootResult> {
|
||||
const rootDir = path.resolve(params.rootDir);
|
||||
// Keep historical behavior for missing roots and rely on openFileWithinRoot for final checks.
|
||||
const rootRealPath = await resolveRealPathIfExists(rootDir);
|
||||
|
||||
const isInRoot = (relativePath: string) =>
|
||||
Boolean(relativePath) && !relativePath.startsWith("..") && !path.isAbsolute(relativePath);
|
||||
|
||||
const resolveExistingRelativePath = async (
|
||||
requestedPath: string,
|
||||
): Promise<
|
||||
{ ok: true; relativePath: string; fallbackPath: string } | { ok: false; error: string }
|
||||
> => {
|
||||
const raw = requestedPath.trim();
|
||||
const lexicalPathResult = resolvePathWithinRoot({
|
||||
rootDir,
|
||||
requestedPath,
|
||||
scopeLabel: params.scopeLabel,
|
||||
});
|
||||
if (lexicalPathResult.ok) {
|
||||
return {
|
||||
ok: true,
|
||||
relativePath: path.relative(rootDir, lexicalPathResult.path),
|
||||
fallbackPath: lexicalPathResult.path,
|
||||
};
|
||||
}
|
||||
if (!rootRealPath || !raw || !path.isAbsolute(raw)) {
|
||||
return lexicalPathResult;
|
||||
}
|
||||
try {
|
||||
const resolvedExistingPath = await fs.realpath(raw);
|
||||
const relativePath = path.relative(rootRealPath, resolvedExistingPath);
|
||||
if (!isInRoot(relativePath)) {
|
||||
return lexicalPathResult;
|
||||
}
|
||||
return {
|
||||
ok: true,
|
||||
relativePath,
|
||||
fallbackPath: resolvedExistingPath,
|
||||
};
|
||||
} catch {
|
||||
return lexicalPathResult;
|
||||
}
|
||||
};
|
||||
|
||||
const resolvedPaths: string[] = [];
|
||||
for (const raw of params.requestedPaths) {
|
||||
const pathResult = await resolveExistingRelativePath(raw);
|
||||
if (!pathResult.ok) {
|
||||
return { ok: false, error: pathResult.error };
|
||||
}
|
||||
|
||||
let opened: Awaited<ReturnType<typeof openFileWithinRoot>> | undefined;
|
||||
try {
|
||||
opened = await openFileWithinRoot({
|
||||
rootDir,
|
||||
relativePath: pathResult.relativePath,
|
||||
});
|
||||
resolvedPaths.push(opened.realPath);
|
||||
} catch (err) {
|
||||
if (allowMissingFallback && err instanceof SafeOpenError && err.code === "not-found") {
|
||||
// Preserve historical behavior for paths that do not exist yet.
|
||||
resolvedPaths.push(pathResult.fallbackPath);
|
||||
continue;
|
||||
}
|
||||
if (err instanceof SafeOpenError && err.code === "outside-workspace") {
|
||||
return {
|
||||
ok: false,
|
||||
error: `File is outside ${params.scopeLabel}`,
|
||||
};
|
||||
}
|
||||
return {
|
||||
ok: false,
|
||||
error: `Invalid path: must stay within ${params.scopeLabel} and be a regular non-symlink file`,
|
||||
};
|
||||
} finally {
|
||||
await opened?.handle.close().catch(() => {});
|
||||
}
|
||||
}
|
||||
return { ok: true, paths: resolvedPaths };
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ import {
|
||||
withRouteTabContext,
|
||||
} from "./agent.shared.js";
|
||||
import { EXISTING_SESSION_LIMITS } from "./existing-session-limits.js";
|
||||
import { DEFAULT_UPLOAD_DIR, resolveExistingPathsWithinRoot } from "./path-output.js";
|
||||
import { DEFAULT_UPLOAD_DIR, pathScope } from "./path-output.js";
|
||||
import type { BrowserRouteRegistrar } from "./types.js";
|
||||
import {
|
||||
asyncBrowserRoute,
|
||||
@@ -43,11 +43,9 @@ export function registerBrowserAgentActHookRoutes(
|
||||
ctx,
|
||||
targetId,
|
||||
run: async ({ profileCtx, cdpUrl, tab }) => {
|
||||
const uploadPathsResult = await resolveExistingPathsWithinRoot({
|
||||
rootDir: DEFAULT_UPLOAD_DIR,
|
||||
requestedPaths: paths,
|
||||
scopeLabel: `uploads directory (${DEFAULT_UPLOAD_DIR})`,
|
||||
});
|
||||
const uploadPathsResult = await pathScope(DEFAULT_UPLOAD_DIR, {
|
||||
label: `uploads directory (${DEFAULT_UPLOAD_DIR})`,
|
||||
}).existing(paths);
|
||||
if (!uploadPathsResult.ok) {
|
||||
res.status(400).json({ error: uploadPathsResult.error });
|
||||
return;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import fs from "node:fs/promises";
|
||||
import { resolveWritablePathWithinRoot } from "./path-output.js";
|
||||
import { pathScope } from "./path-output.js";
|
||||
import type { BrowserResponse } from "./types.js";
|
||||
|
||||
export async function ensureOutputRootDir(rootDir: string): Promise<void> {
|
||||
@@ -17,12 +17,10 @@ export async function resolveWritableOutputPathOrRespond(params: {
|
||||
if (params.ensureRootDir) {
|
||||
await ensureOutputRootDir(params.rootDir);
|
||||
}
|
||||
const pathResult = await resolveWritablePathWithinRoot({
|
||||
rootDir: params.rootDir,
|
||||
requestedPath: params.requestedPath,
|
||||
scopeLabel: params.scopeLabel,
|
||||
defaultFileName: params.defaultFileName,
|
||||
});
|
||||
const pathResult = await pathScope(params.rootDir, { label: params.scopeLabel }).writable(
|
||||
params.requestedPath,
|
||||
{ defaultName: params.defaultFileName },
|
||||
);
|
||||
if (!pathResult.ok) {
|
||||
params.res.status(400).json({ error: pathResult.error });
|
||||
return null;
|
||||
|
||||
@@ -2,6 +2,7 @@ export {
|
||||
DEFAULT_DOWNLOAD_DIR,
|
||||
DEFAULT_TRACE_DIR,
|
||||
DEFAULT_UPLOAD_DIR,
|
||||
pathScope,
|
||||
resolveExistingPathsWithinRoot,
|
||||
resolveWritablePathWithinRoot,
|
||||
} from "../paths.js";
|
||||
|
||||
@@ -1,27 +1 @@
|
||||
import path from "node:path";
|
||||
import { normalizeOptionalString } from "openclaw/plugin-sdk/text-runtime";
|
||||
|
||||
export function sanitizeUntrustedFileName(fileName: string, fallbackName: string): string {
|
||||
const trimmed = normalizeOptionalString(fileName) ?? "";
|
||||
if (!trimmed) {
|
||||
return fallbackName;
|
||||
}
|
||||
let base = path.posix.basename(trimmed);
|
||||
base = path.win32.basename(base);
|
||||
let cleaned = "";
|
||||
for (let i = 0; i < base.length; i++) {
|
||||
const code = base.charCodeAt(i);
|
||||
if (code < 0x20 || code === 0x7f) {
|
||||
continue;
|
||||
}
|
||||
cleaned += base[i];
|
||||
}
|
||||
base = cleaned.trim();
|
||||
if (!base || base === "." || base === "..") {
|
||||
return fallbackName;
|
||||
}
|
||||
if (base.length > 200) {
|
||||
base = base.slice(0, 200);
|
||||
}
|
||||
return base;
|
||||
}
|
||||
export { sanitizeUntrustedFileName } from "../sdk-security-runtime.js";
|
||||
|
||||
@@ -1,5 +1 @@
|
||||
export {
|
||||
SafeOpenError,
|
||||
openFileWithinRoot,
|
||||
writeFileFromPathWithinRoot,
|
||||
} from "../sdk-security-runtime.js";
|
||||
export { root, FsSafeError } from "../sdk-security-runtime.js";
|
||||
|
||||
@@ -9,13 +9,20 @@ export {
|
||||
isPrivateNetworkAllowedByPolicy,
|
||||
matchesHostnameAllowlist,
|
||||
normalizeHostname,
|
||||
openFileWithinRoot,
|
||||
pathScope,
|
||||
redactSensitiveText,
|
||||
resolveExistingPathsWithinRoot,
|
||||
resolvePinnedHostnameWithPolicy,
|
||||
resolvePathsWithinRoot,
|
||||
resolvePathWithinRoot,
|
||||
root,
|
||||
safeEqualSecret,
|
||||
SafeOpenError,
|
||||
sanitizeUntrustedFileName,
|
||||
resolveStrictExistingPathsWithinRoot,
|
||||
resolveWritablePathWithinRoot,
|
||||
FsSafeError,
|
||||
SsrFBlockedError,
|
||||
writeViaSiblingTempPath,
|
||||
wrapExternalContent,
|
||||
writeFileFromPathWithinRoot,
|
||||
} from "openclaw/plugin-sdk/security-runtime";
|
||||
export type { LookupFn, SsrFPolicy } from "openclaw/plugin-sdk/security-runtime";
|
||||
|
||||
@@ -40,6 +40,7 @@ import {
|
||||
} from "openclaw/plugin-sdk/agent-harness-runtime";
|
||||
import { resolveAgentDir } from "openclaw/plugin-sdk/agent-runtime";
|
||||
import { emitTrustedDiagnosticEvent } from "openclaw/plugin-sdk/diagnostic-runtime";
|
||||
import { pathExists } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { handleCodexAppServerApprovalRequest } from "./approval-bridge.js";
|
||||
import {
|
||||
refreshCodexAppServerAuthTokens,
|
||||
@@ -444,7 +445,7 @@ export async function runCodexAppServerAttempt(
|
||||
runId: params.runId,
|
||||
},
|
||||
});
|
||||
const hadSessionFile = await fileExists(params.sessionFile);
|
||||
const hadSessionFile = await pathExists(params.sessionFile);
|
||||
let historyMessages = (await readMirroredSessionHistoryMessages(params.sessionFile)) ?? [];
|
||||
const hookContext = {
|
||||
runId: params.runId,
|
||||
@@ -1927,18 +1928,6 @@ async function mirrorTranscriptBestEffort(params: {
|
||||
}
|
||||
}
|
||||
|
||||
async function fileExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.stat(filePath);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function isNonEmptyString(value: unknown): value is string {
|
||||
return typeof value === "string" && value.length > 0;
|
||||
}
|
||||
|
||||
@@ -1,22 +1,9 @@
|
||||
import { withTimeout as withSharedTimeout } from "openclaw/plugin-sdk/security-runtime";
|
||||
|
||||
export async function withTimeout<T>(
|
||||
promise: Promise<T>,
|
||||
timeoutMs: number,
|
||||
timeoutMessage: string,
|
||||
): Promise<T> {
|
||||
if (!Number.isFinite(timeoutMs) || timeoutMs <= 0) {
|
||||
return await promise;
|
||||
}
|
||||
let timeout: NodeJS.Timeout | undefined;
|
||||
try {
|
||||
return await Promise.race([
|
||||
promise,
|
||||
new Promise<never>((_, reject) => {
|
||||
timeout = setTimeout(() => reject(new Error(timeoutMessage)), Math.max(1, timeoutMs));
|
||||
}),
|
||||
]);
|
||||
} finally {
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
}
|
||||
return await withSharedTimeout(promise, timeoutMs, { message: timeoutMessage });
|
||||
}
|
||||
|
||||
@@ -6,6 +6,10 @@ import type {
|
||||
EmbeddedRunAttemptResult,
|
||||
} from "openclaw/plugin-sdk/agent-harness";
|
||||
import { resolveUserPath } from "openclaw/plugin-sdk/agent-harness";
|
||||
import {
|
||||
appendRegularFile,
|
||||
resolveRegularFileAppendFlags,
|
||||
} from "openclaw/plugin-sdk/security-runtime";
|
||||
|
||||
type CodexTrajectoryRecorder = {
|
||||
filePath: string;
|
||||
@@ -39,13 +43,7 @@ type CodexTrajectoryOpenFlagConstants = Pick<
|
||||
export function resolveCodexTrajectoryAppendFlags(
|
||||
constants: CodexTrajectoryOpenFlagConstants = nodeFs.constants,
|
||||
): number {
|
||||
const noFollow = constants.O_NOFOLLOW;
|
||||
return (
|
||||
constants.O_CREAT |
|
||||
constants.O_APPEND |
|
||||
constants.O_WRONLY |
|
||||
(typeof noFollow === "number" ? noFollow : 0)
|
||||
);
|
||||
return resolveRegularFileAppendFlags(constants);
|
||||
}
|
||||
|
||||
export function resolveCodexTrajectoryPointerFlags(
|
||||
@@ -60,78 +58,13 @@ export function resolveCodexTrajectoryPointerFlags(
|
||||
);
|
||||
}
|
||||
|
||||
async function assertNoSymlinkParents(filePath: string): Promise<void> {
|
||||
const resolvedDir = path.resolve(path.dirname(filePath));
|
||||
const parsed = path.parse(resolvedDir);
|
||||
const relativeParts = path.relative(parsed.root, resolvedDir).split(path.sep).filter(Boolean);
|
||||
let current = parsed.root;
|
||||
for (const part of relativeParts) {
|
||||
current = path.join(current, part);
|
||||
const stat = await fs.lstat(current);
|
||||
if (stat.isSymbolicLink()) {
|
||||
if (path.dirname(current) === parsed.root) {
|
||||
continue;
|
||||
}
|
||||
throw new Error(`Refusing to write trajectory under symlinked directory: ${current}`);
|
||||
}
|
||||
if (!stat.isDirectory()) {
|
||||
throw new Error(`Refusing to write trajectory under non-directory: ${current}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function verifyStableOpenedTrajectoryFile(params: {
|
||||
preOpenStat?: nodeFs.Stats;
|
||||
postOpenStat: nodeFs.Stats;
|
||||
filePath: string;
|
||||
}): void {
|
||||
if (!params.postOpenStat.isFile()) {
|
||||
throw new Error(`Refusing to write trajectory to non-file: ${params.filePath}`);
|
||||
}
|
||||
if (params.postOpenStat.nlink > 1) {
|
||||
throw new Error(`Refusing to write trajectory to hardlinked file: ${params.filePath}`);
|
||||
}
|
||||
const pre = params.preOpenStat;
|
||||
if (pre && (pre.dev !== params.postOpenStat.dev || pre.ino !== params.postOpenStat.ino)) {
|
||||
throw new Error(`Refusing to write trajectory after file changed: ${params.filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function safeAppendTrajectoryFile(filePath: string, line: string): Promise<void> {
|
||||
await assertNoSymlinkParents(filePath);
|
||||
|
||||
let preOpenStat: nodeFs.Stats | undefined;
|
||||
try {
|
||||
const stat = await fs.lstat(filePath);
|
||||
if (stat.isSymbolicLink()) {
|
||||
throw new Error(`Refusing to write trajectory through symlink: ${filePath}`);
|
||||
}
|
||||
if (!stat.isFile()) {
|
||||
throw new Error(`Refusing to write trajectory to non-file: ${filePath}`);
|
||||
}
|
||||
preOpenStat = stat;
|
||||
} catch (err) {
|
||||
if ((err as NodeJS.ErrnoException).code !== "ENOENT") {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
const lineBytes = Buffer.byteLength(line, "utf8");
|
||||
if ((preOpenStat?.size ?? 0) + lineBytes > TRAJECTORY_RUNTIME_FILE_MAX_BYTES) {
|
||||
return;
|
||||
}
|
||||
|
||||
const handle = await fs.open(filePath, resolveCodexTrajectoryAppendFlags(), 0o600);
|
||||
try {
|
||||
const stat = await handle.stat();
|
||||
verifyStableOpenedTrajectoryFile({ preOpenStat, postOpenStat: stat, filePath });
|
||||
if (stat.size + lineBytes > TRAJECTORY_RUNTIME_FILE_MAX_BYTES) {
|
||||
return;
|
||||
}
|
||||
await handle.chmod(0o600);
|
||||
await handle.appendFile(line, "utf8");
|
||||
} finally {
|
||||
await handle.close();
|
||||
}
|
||||
await appendRegularFile({
|
||||
filePath,
|
||||
content: line,
|
||||
maxFileBytes: TRAJECTORY_RUNTIME_FILE_MAX_BYTES,
|
||||
rejectSymlinkParents: true,
|
||||
});
|
||||
}
|
||||
|
||||
function boundedTrajectoryLine(event: Record<string, unknown>): string | undefined {
|
||||
|
||||
@@ -1,14 +1,10 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { pathExists } from "openclaw/plugin-sdk/security-runtime";
|
||||
|
||||
export async function exists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
return await pathExists(filePath);
|
||||
}
|
||||
|
||||
export async function isDirectory(filePath: string | undefined): Promise<boolean> {
|
||||
|
||||
@@ -4,6 +4,7 @@ import type { OpenClawPluginService } from "openclaw/plugin-sdk/core";
|
||||
import { listDevicePairing } from "openclaw/plugin-sdk/device-bootstrap";
|
||||
import { formatErrorMessage } from "openclaw/plugin-sdk/error-runtime";
|
||||
import type { OpenClawPluginApi } from "openclaw/plugin-sdk/plugin-entry";
|
||||
import { replaceFileAtomic } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { normalizeOptionalString } from "openclaw/plugin-sdk/text-runtime";
|
||||
|
||||
const NOTIFY_STATE_FILE = "device-pair-notify.json";
|
||||
@@ -145,9 +146,12 @@ async function readNotifyState(filePath: string): Promise<NotifyStateFile> {
|
||||
}
|
||||
|
||||
async function writeNotifyState(filePath: string, state: NotifyStateFile): Promise<void> {
|
||||
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
||||
const content = JSON.stringify(state, null, 2);
|
||||
await fs.writeFile(filePath, `${content}\n`, "utf8");
|
||||
await replaceFileAtomic({
|
||||
filePath,
|
||||
content: `${content}\n`,
|
||||
tempPrefix: ".device-pair-notify",
|
||||
});
|
||||
}
|
||||
|
||||
function notifySubscriberKey(subscriber: {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import crypto from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { root as fsRoot } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { normalizeOptionalString } from "openclaw/plugin-sdk/text-runtime";
|
||||
import type { PluginLogger } from "../api.js";
|
||||
import type { DiffArtifactContext, DiffArtifactMeta, DiffOutputFormat } from "./types.js";
|
||||
@@ -36,6 +37,7 @@ type StandaloneFileMeta = {
|
||||
};
|
||||
|
||||
type ArtifactMetaFileName = "meta.json" | "file-meta.json";
|
||||
type ArtifactRoot = Awaited<ReturnType<typeof fsRoot>>;
|
||||
|
||||
export class DiffArtifactStore {
|
||||
private readonly rootDir: string;
|
||||
@@ -76,8 +78,9 @@ export class DiffArtifactStore {
|
||||
...(params.context ? { context: params.context } : {}),
|
||||
};
|
||||
|
||||
await fs.mkdir(artifactDir, { recursive: true });
|
||||
await fs.writeFile(htmlPath, params.html, "utf8");
|
||||
const root = await this.artifactRoot();
|
||||
await root.mkdir(id);
|
||||
await root.write(path.posix.join(id, "viewer.html"), params.html);
|
||||
await this.writeMeta(meta);
|
||||
this.scheduleCleanup();
|
||||
return meta;
|
||||
@@ -104,7 +107,7 @@ export class DiffArtifactStore {
|
||||
throw new Error(`Diff artifact not found: ${id}`);
|
||||
}
|
||||
const htmlPath = this.normalizeStoredPath(meta.htmlPath, "htmlPath");
|
||||
return await fs.readFile(htmlPath, "utf8");
|
||||
return await (await this.artifactRoot()).readText(this.relativeStoredPath(htmlPath));
|
||||
}
|
||||
|
||||
async updateFilePath(id: string, filePath: string): Promise<DiffArtifactMeta> {
|
||||
@@ -151,7 +154,7 @@ export class DiffArtifactStore {
|
||||
...(params.context ? { context: params.context } : {}),
|
||||
};
|
||||
|
||||
await fs.mkdir(artifactDir, { recursive: true });
|
||||
await (await this.artifactRoot()).mkdir(id);
|
||||
await this.writeStandaloneMeta(meta);
|
||||
this.scheduleCleanup();
|
||||
return {
|
||||
@@ -212,6 +215,11 @@ export class DiffArtifactStore {
|
||||
await fs.mkdir(this.rootDir, { recursive: true });
|
||||
}
|
||||
|
||||
private async artifactRoot(): Promise<ArtifactRoot> {
|
||||
await this.ensureRoot();
|
||||
return await fsRoot(this.rootDir);
|
||||
}
|
||||
|
||||
private maybeCleanupExpired(): void {
|
||||
const now = Date.now();
|
||||
if (this.cleanupInFlight || now < this.nextCleanupAt) {
|
||||
@@ -283,16 +291,12 @@ export class DiffArtifactStore {
|
||||
}
|
||||
}
|
||||
|
||||
private metaFilePath(id: string, fileName: ArtifactMetaFileName): string {
|
||||
return path.join(this.artifactDir(id), fileName);
|
||||
}
|
||||
|
||||
private async writeJsonMeta(
|
||||
id: string,
|
||||
fileName: ArtifactMetaFileName,
|
||||
data: unknown,
|
||||
): Promise<void> {
|
||||
await fs.writeFile(this.metaFilePath(id, fileName), JSON.stringify(data, null, 2), "utf8");
|
||||
await (await this.artifactRoot()).writeJson(path.posix.join(id, fileName), data, { space: 2 });
|
||||
}
|
||||
|
||||
private async readJsonMeta(
|
||||
@@ -301,7 +305,7 @@ export class DiffArtifactStore {
|
||||
context: string,
|
||||
): Promise<unknown> {
|
||||
try {
|
||||
const raw = await fs.readFile(this.metaFilePath(id, fileName), "utf8");
|
||||
const raw = await (await this.artifactRoot()).readText(path.posix.join(id, fileName));
|
||||
return JSON.parse(raw) as unknown;
|
||||
} catch (error) {
|
||||
if (isFileNotFound(error)) {
|
||||
@@ -330,6 +334,11 @@ export class DiffArtifactStore {
|
||||
return candidate;
|
||||
}
|
||||
|
||||
private relativeStoredPath(storedPath: string): string {
|
||||
const relativePath = path.relative(this.rootDir, this.normalizeStoredPath(storedPath, "path"));
|
||||
return relativePath.split(path.sep).join(path.posix.sep);
|
||||
}
|
||||
|
||||
private assertWithinRoot(candidate: string, label = "path"): void {
|
||||
const relative = path.relative(this.rootDir, candidate);
|
||||
if (
|
||||
@@ -362,7 +371,8 @@ function isExpired(meta: { expiresAt: string }): boolean {
|
||||
}
|
||||
|
||||
function isFileNotFound(error: unknown): boolean {
|
||||
return error instanceof Error && "code" in error && error.code === "ENOENT";
|
||||
const code = error instanceof Error && "code" in error ? error.code : undefined;
|
||||
return code === "ENOENT" || code === "not-found";
|
||||
}
|
||||
|
||||
function normalizeArtifactContext(value: unknown): DiffArtifactContext | undefined {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createHash } from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { ApplicationCommandType, type APIApplicationCommand } from "discord-api-types/v10";
|
||||
import { privateFileStore } from "openclaw/plugin-sdk/security-runtime";
|
||||
import {
|
||||
createApplicationCommand,
|
||||
deleteApplicationCommand,
|
||||
@@ -147,9 +147,10 @@ export class DiscordCommandDeployer {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const raw = await fs.readFile(storePath, "utf8");
|
||||
const parsed = JSON.parse(raw) as { hashes?: unknown };
|
||||
if (!parsed.hashes || typeof parsed.hashes !== "object") {
|
||||
const parsed = await privateFileStore(path.dirname(storePath)).readJsonIfExists<{
|
||||
hashes?: unknown;
|
||||
}>(path.basename(storePath));
|
||||
if (!parsed?.hashes || typeof parsed.hashes !== "object") {
|
||||
return;
|
||||
}
|
||||
for (const [key, value] of Object.entries(parsed.hashes)) {
|
||||
@@ -168,24 +169,17 @@ export class DiscordCommandDeployer {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await fs.mkdir(path.dirname(storePath), { recursive: true });
|
||||
const tmpPath = `${storePath}.${process.pid}.${Date.now()}.tmp`;
|
||||
await fs.writeFile(
|
||||
tmpPath,
|
||||
`${JSON.stringify(
|
||||
{
|
||||
version: 1,
|
||||
updatedAt: new Date().toISOString(),
|
||||
hashes: Object.fromEntries(
|
||||
[...this.hashes.entries()].toSorted(([left], [right]) => left.localeCompare(right)),
|
||||
),
|
||||
},
|
||||
null,
|
||||
2,
|
||||
)}\n`,
|
||||
"utf8",
|
||||
await privateFileStore(path.dirname(storePath)).writeJson(
|
||||
path.basename(storePath),
|
||||
{
|
||||
version: 1,
|
||||
updatedAt: new Date().toISOString(),
|
||||
hashes: Object.fromEntries(
|
||||
[...this.hashes.entries()].toSorted(([left], [right]) => left.localeCompare(right)),
|
||||
),
|
||||
},
|
||||
{ trailingNewline: true },
|
||||
);
|
||||
await fs.rename(tmpPath, storePath);
|
||||
} catch {
|
||||
// The cache is only an optimization to avoid redundant Discord writes.
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import crypto from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { recordChannelActivity } from "openclaw/plugin-sdk/channel-activity-runtime";
|
||||
@@ -10,7 +9,7 @@ import {
|
||||
} from "openclaw/plugin-sdk/media-runtime";
|
||||
import { requireRuntimeConfig } from "openclaw/plugin-sdk/plugin-config-runtime";
|
||||
import type { RetryConfig } from "openclaw/plugin-sdk/retry-runtime";
|
||||
import { resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
import { tempWorkspace, resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
import { loadWebMediaRaw } from "openclaw/plugin-sdk/web-media";
|
||||
import { resolveDiscordAccount } from "./accounts.js";
|
||||
import type { RequestClient } from "./internal/discord.js";
|
||||
@@ -46,17 +45,21 @@ function toDiscordSendResult(
|
||||
});
|
||||
}
|
||||
|
||||
async function materializeVoiceMessageInput(mediaUrl: string): Promise<{ filePath: string }> {
|
||||
async function materializeVoiceMessageInput(
|
||||
mediaUrl: string,
|
||||
): Promise<{ filePath: string; cleanup: () => Promise<void> }> {
|
||||
// Security: reuse the standard media loader so we apply SSRF guards + allowed-local-root checks.
|
||||
// Then write to a private temp file so ffmpeg/ffprobe never sees the original URL/path string.
|
||||
const media = await loadWebMediaRaw(mediaUrl, maxBytesForKind("audio"));
|
||||
const extFromName = media.fileName ? path.extname(media.fileName) : "";
|
||||
const extFromMime = media.contentType ? extensionForMime(media.contentType) : "";
|
||||
const ext = extFromName || extFromMime || ".bin";
|
||||
const tempDir = resolvePreferredOpenClawTmpDir();
|
||||
const filePath = path.join(tempDir, `voice-src-${crypto.randomUUID()}${ext}`);
|
||||
await fs.writeFile(filePath, media.buffer, { mode: 0o600 });
|
||||
return { filePath };
|
||||
const workspace = await tempWorkspace({
|
||||
rootDir: resolvePreferredOpenClawTmpDir(),
|
||||
prefix: "voice-src-",
|
||||
});
|
||||
const filePath = await workspace.write(`input${ext}`, media.buffer);
|
||||
return { filePath, cleanup: async () => await workspace.cleanup() };
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -74,7 +77,8 @@ export async function sendVoiceMessageDiscord(
|
||||
audioPath: string,
|
||||
opts: VoiceMessageOpts,
|
||||
): Promise<DiscordSendResult> {
|
||||
const { filePath: localInputPath } = await materializeVoiceMessageInput(audioPath);
|
||||
const { filePath: localInputPath, cleanup: cleanupLocalInput } =
|
||||
await materializeVoiceMessageInput(audioPath);
|
||||
let oggPath: string | null = null;
|
||||
let oggCleanup = false;
|
||||
let token: string | undefined;
|
||||
@@ -131,6 +135,6 @@ export async function sendVoiceMessageDiscord(
|
||||
throw err;
|
||||
} finally {
|
||||
await unlinkIfExists(oggCleanup ? oggPath : null);
|
||||
await unlinkIfExists(localInputPath);
|
||||
await cleanupLocalInput();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import { createRequire } from "node:module";
|
||||
import path from "node:path";
|
||||
import type { Readable } from "node:stream";
|
||||
import { logVerbose, shouldLogVerbose } from "openclaw/plugin-sdk/runtime-env";
|
||||
import { formatErrorMessage } from "openclaw/plugin-sdk/ssrf-runtime";
|
||||
import { resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
import { tempWorkspace, resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
|
||||
@@ -153,11 +151,13 @@ function estimateDurationSeconds(pcm: Buffer): number {
|
||||
export async function writeVoiceWavFile(
|
||||
pcm: Buffer,
|
||||
): Promise<{ path: string; durationSeconds: number }> {
|
||||
const tempDir = await fs.mkdtemp(path.join(resolvePreferredOpenClawTmpDir(), "discord-voice-"));
|
||||
const filePath = path.join(tempDir, `segment-${randomUUID()}.wav`);
|
||||
const workspace = await tempWorkspace({
|
||||
rootDir: resolvePreferredOpenClawTmpDir(),
|
||||
prefix: "discord-voice-",
|
||||
});
|
||||
const wav = buildWavBuffer(pcm);
|
||||
await fs.writeFile(filePath, wav);
|
||||
scheduleTempCleanup(tempDir);
|
||||
const filePath = await workspace.write("segment.wav", wav);
|
||||
scheduleTempCleanup(workspace.dir);
|
||||
return { path: filePath, durationSeconds: estimateDurationSeconds(pcm) };
|
||||
}
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { realpathSync } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
@@ -61,7 +62,7 @@ function expectPathIsolatedToTmpRoot(pathValue: string, key: string): void {
|
||||
expect(pathValue).not.toContain(key);
|
||||
expect(pathValue).not.toContain("..");
|
||||
|
||||
const tmpRoot = path.resolve(resolvePreferredOpenClawTmpDir());
|
||||
const tmpRoot = realpathSync(resolvePreferredOpenClawTmpDir());
|
||||
const resolved = path.resolve(pathValue);
|
||||
const rel = path.relative(tmpRoot, resolved);
|
||||
expect(rel === ".." || rel.startsWith(`..${path.sep}`)).toBe(false);
|
||||
|
||||
@@ -5,8 +5,10 @@ import type * as Lark from "@larksuiteoapi/node-sdk";
|
||||
import type { MessageReceipt } from "openclaw/plugin-sdk/channel-message";
|
||||
import { mediaKindFromMime } from "openclaw/plugin-sdk/media-mime";
|
||||
import { MEDIA_FFMPEG_MAX_AUDIO_DURATION_SECS, runFfmpeg } from "openclaw/plugin-sdk/media-runtime";
|
||||
import { readRegularFile } from "openclaw/plugin-sdk/security-runtime";
|
||||
import {
|
||||
resolvePreferredOpenClawTmpDir,
|
||||
withTempWorkspace,
|
||||
withTempDownloadPath,
|
||||
} from "openclaw/plugin-sdk/temp-path";
|
||||
import { normalizeLowercaseStringOrEmpty } from "openclaw/plugin-sdk/text-runtime";
|
||||
@@ -421,10 +423,11 @@ export async function uploadImageFeishu(params: {
|
||||
const { cfg, image, imageType = "message", accountId } = params;
|
||||
const { client } = createConfiguredFeishuMediaClient({ cfg, accountId });
|
||||
|
||||
// SDK accepts Buffer directly or fs.ReadStream for file paths
|
||||
// Using Readable.from(buffer) causes issues with form-data library
|
||||
// SDK accepts Buffer directly. Keep string path support on this helper, but
|
||||
// verify the path as a regular local file before uploading it.
|
||||
// See: https://github.com/larksuite/node-sdk/issues/121
|
||||
const imageData = typeof image === "string" ? fs.createReadStream(image) : image;
|
||||
const imageData =
|
||||
typeof image === "string" ? (await readRegularFile({ filePath: image })).buffer : image;
|
||||
|
||||
const response = await requestFeishuApi(
|
||||
() =>
|
||||
@@ -475,10 +478,11 @@ export async function uploadFileFeishu(params: {
|
||||
const { cfg, file, fileName, fileType, duration, accountId } = params;
|
||||
const { client } = createConfiguredFeishuMediaClient({ cfg, accountId });
|
||||
|
||||
// SDK accepts Buffer directly or fs.ReadStream for file paths
|
||||
// Using Readable.from(buffer) causes issues with form-data library
|
||||
// SDK accepts Buffer directly. Keep string path support on this helper, but
|
||||
// verify the path as a regular local file before uploading it.
|
||||
// See: https://github.com/larksuite/node-sdk/issues/121
|
||||
const fileData = typeof file === "string" ? fs.createReadStream(file) : file;
|
||||
const fileData =
|
||||
typeof file === "string" ? (await readRegularFile({ filePath: file })).buffer : file;
|
||||
|
||||
const safeFileName = sanitizeFileNameForUpload(fileName);
|
||||
|
||||
@@ -747,45 +751,42 @@ async function transcodeToFeishuVoiceOpus(params: {
|
||||
fileName: string;
|
||||
contentType?: string;
|
||||
}): Promise<{ buffer: Buffer; fileName: string; contentType: string }> {
|
||||
const tempRoot = resolvePreferredOpenClawTmpDir();
|
||||
await fs.promises.mkdir(tempRoot, { recursive: true, mode: 0o700 });
|
||||
const tempDir = await fs.promises.mkdtemp(path.join(tempRoot, "feishu-voice-"));
|
||||
try {
|
||||
const ext = normalizeLowercaseStringOrEmpty(path.extname(params.fileName));
|
||||
const inputExt = ext && ext.length <= 12 ? ext : ".audio";
|
||||
const inputPath = path.join(tempDir, `input${inputExt}`);
|
||||
const outputPath = path.join(tempDir, FEISHU_VOICE_FILE_NAME);
|
||||
await fs.promises.writeFile(inputPath, params.buffer, { mode: 0o600 });
|
||||
await runFfmpeg([
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"error",
|
||||
"-y",
|
||||
"-i",
|
||||
inputPath,
|
||||
"-vn",
|
||||
"-sn",
|
||||
"-dn",
|
||||
"-t",
|
||||
String(MEDIA_FFMPEG_MAX_AUDIO_DURATION_SECS),
|
||||
"-ar",
|
||||
String(FEISHU_VOICE_SAMPLE_RATE_HZ),
|
||||
"-ac",
|
||||
"1",
|
||||
"-c:a",
|
||||
"libopus",
|
||||
"-b:a",
|
||||
FEISHU_VOICE_BITRATE,
|
||||
outputPath,
|
||||
]);
|
||||
return {
|
||||
buffer: await fs.promises.readFile(outputPath),
|
||||
fileName: FEISHU_VOICE_FILE_NAME,
|
||||
contentType: "audio/ogg",
|
||||
};
|
||||
} finally {
|
||||
await fs.promises.rm(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
return await withTempWorkspace(
|
||||
{ rootDir: resolvePreferredOpenClawTmpDir(), prefix: "feishu-voice-" },
|
||||
async (workspace) => {
|
||||
const ext = normalizeLowercaseStringOrEmpty(path.extname(params.fileName));
|
||||
const inputExt = ext && ext.length <= 12 ? ext : ".audio";
|
||||
const inputPath = await workspace.write(`input${inputExt}`, params.buffer);
|
||||
const outputPath = workspace.path(FEISHU_VOICE_FILE_NAME);
|
||||
await runFfmpeg([
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"error",
|
||||
"-y",
|
||||
"-i",
|
||||
inputPath,
|
||||
"-vn",
|
||||
"-sn",
|
||||
"-dn",
|
||||
"-t",
|
||||
String(MEDIA_FFMPEG_MAX_AUDIO_DURATION_SECS),
|
||||
"-ar",
|
||||
String(FEISHU_VOICE_SAMPLE_RATE_HZ),
|
||||
"-ac",
|
||||
"1",
|
||||
"-c:a",
|
||||
"libopus",
|
||||
"-b:a",
|
||||
FEISHU_VOICE_BITRATE,
|
||||
outputPath,
|
||||
]);
|
||||
return {
|
||||
buffer: await workspace.read(FEISHU_VOICE_FILE_NAME),
|
||||
fileName: FEISHU_VOICE_FILE_NAME,
|
||||
contentType: "audio/ogg",
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
async function prepareFeishuVoiceMedia(params: {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import {
|
||||
attachChannelToResult,
|
||||
@@ -18,6 +17,7 @@ import {
|
||||
sendPayloadMediaSequenceAndFinalize,
|
||||
sendTextMediaPayload,
|
||||
} from "openclaw/plugin-sdk/reply-payload";
|
||||
import { statRegularFileSync } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { normalizeLowercaseStringOrEmpty } from "openclaw/plugin-sdk/text-runtime";
|
||||
import { resolveFeishuAccount } from "./accounts.js";
|
||||
import { createFeishuCardInteractionEnvelope } from "./card-interaction.js";
|
||||
@@ -66,18 +66,12 @@ function normalizePossibleLocalImagePath(text: string | undefined): string | nul
|
||||
if (!path.isAbsolute(raw)) {
|
||||
return null;
|
||||
}
|
||||
if (!fs.existsSync(raw)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Fix race condition: wrap statSync in try-catch to handle file deletion
|
||||
// between existsSync and statSync
|
||||
try {
|
||||
if (!fs.statSync(raw).isFile()) {
|
||||
const stat = statRegularFileSync(raw);
|
||||
if (stat.missing) {
|
||||
return null;
|
||||
}
|
||||
} catch {
|
||||
// File may have been deleted or became inaccessible between checks
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,11 @@ import { spawn } from "node:child_process";
|
||||
import crypto from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import {
|
||||
FsSafeError,
|
||||
resolveAbsolutePathForRead,
|
||||
root as fsRoot,
|
||||
} from "openclaw/plugin-sdk/security-runtime";
|
||||
|
||||
const DIR_FETCH_HARD_MAX_BYTES = 16 * 1024 * 1024;
|
||||
const DIR_FETCH_DEFAULT_MAX_BYTES = 8 * 1024 * 1024;
|
||||
@@ -50,6 +55,17 @@ function clampMaxBytes(input: unknown): number {
|
||||
}
|
||||
|
||||
function classifyFsError(err: unknown): DirFetchErrCode {
|
||||
if (err instanceof FsSafeError) {
|
||||
if (err.code === "not-found") {
|
||||
return "NOT_FOUND";
|
||||
}
|
||||
if (err.code === "symlink") {
|
||||
return "SYMLINK_REDIRECT";
|
||||
}
|
||||
if (err.code === "invalid-path") {
|
||||
return "INVALID_PATH";
|
||||
}
|
||||
}
|
||||
const code = (err as { code?: string } | null)?.code;
|
||||
if (code === "ENOENT") {
|
||||
return "NOT_FOUND";
|
||||
@@ -145,18 +161,18 @@ async function listTarEntries(tarBuffer: Buffer): Promise<string[]> {
|
||||
|
||||
async function listTreeEntries(root: string, maxEntries: number): Promise<string[] | "TOO_MANY"> {
|
||||
const results: string[] = [];
|
||||
async function visit(dir: string): Promise<boolean> {
|
||||
const entries = await fs.readdir(dir, { withFileTypes: true });
|
||||
const rootHandle = await fsRoot(root);
|
||||
async function visit(relativeDir: string): Promise<boolean> {
|
||||
const entries = await rootHandle.list(relativeDir, { withFileTypes: true });
|
||||
entries.sort((left, right) => left.name.localeCompare(right.name));
|
||||
for (const entry of entries) {
|
||||
const abs = path.join(dir, entry.name);
|
||||
const rel = path.relative(root, abs).replace(/\\/gu, "/");
|
||||
const rel = path.posix.join(relativeDir === "." ? "" : relativeDir, entry.name);
|
||||
results.push(rel);
|
||||
if (results.length > maxEntries) {
|
||||
return false;
|
||||
}
|
||||
if (entry.isDirectory()) {
|
||||
const ok = await visit(abs);
|
||||
if (entry.isDirectory) {
|
||||
const ok = await visit(rel);
|
||||
if (!ok) {
|
||||
return false;
|
||||
}
|
||||
@@ -164,7 +180,7 @@ async function listTreeEntries(root: string, maxEntries: number): Promise<string
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return (await visit(root)) ? results : "TOO_MANY";
|
||||
return (await visit(".")) ? results : "TOO_MANY";
|
||||
}
|
||||
|
||||
export async function handleDirFetch(params: DirFetchParams): Promise<DirFetchResult> {
|
||||
@@ -186,22 +202,31 @@ export async function handleDirFetch(params: DirFetchParams): Promise<DirFetchRe
|
||||
|
||||
let canonical: string;
|
||||
try {
|
||||
canonical = await fs.realpath(requestedPath);
|
||||
canonical = (
|
||||
await resolveAbsolutePathForRead(requestedPath, {
|
||||
symlinks: followSymlinks ? "follow" : "reject",
|
||||
})
|
||||
).canonicalPath;
|
||||
} catch (err) {
|
||||
const code = classifyFsError(err);
|
||||
const canonicalPath =
|
||||
err instanceof FsSafeError &&
|
||||
err.cause &&
|
||||
typeof err.cause === "object" &&
|
||||
"canonicalPath" in err.cause &&
|
||||
typeof err.cause.canonicalPath === "string"
|
||||
? err.cause.canonicalPath
|
||||
: undefined;
|
||||
return {
|
||||
ok: false,
|
||||
code,
|
||||
message: code === "NOT_FOUND" ? "directory not found" : `realpath failed: ${String(err)}`,
|
||||
};
|
||||
}
|
||||
|
||||
if (!followSymlinks && canonical !== requestedPath) {
|
||||
return {
|
||||
ok: false,
|
||||
code: "SYMLINK_REDIRECT",
|
||||
message: `path traverses a symlink; refusing because followSymlinks=false (set plugins.entries.file-transfer.config.nodes.<node>.followSymlinks=true to allow, or update allowReadPaths to the canonical path)`,
|
||||
canonicalPath: canonical,
|
||||
message:
|
||||
code === "NOT_FOUND"
|
||||
? "directory not found"
|
||||
: code === "SYMLINK_REDIRECT"
|
||||
? "path traverses a symlink; refusing because followSymlinks=false (set plugins.entries.file-transfer.config.nodes.<node>.followSymlinks=true to allow, or update allowReadPaths to the canonical path)"
|
||||
: `realpath failed: ${String(err)}`,
|
||||
...(canonicalPath ? { canonicalPath } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import {
|
||||
FsSafeError,
|
||||
resolveAbsolutePathForRead,
|
||||
root,
|
||||
} from "openclaw/plugin-sdk/security-runtime";
|
||||
import { mimeFromExtension } from "../shared/mime.js";
|
||||
|
||||
export const DIR_LIST_DEFAULT_MAX_ENTRIES = 200;
|
||||
@@ -54,6 +59,17 @@ function clampMaxEntries(input: unknown): number {
|
||||
}
|
||||
|
||||
function classifyFsError(err: unknown): DirListErrCode {
|
||||
if (err instanceof FsSafeError) {
|
||||
if (err.code === "not-found") {
|
||||
return "NOT_FOUND";
|
||||
}
|
||||
if (err.code === "symlink") {
|
||||
return "SYMLINK_REDIRECT";
|
||||
}
|
||||
if (err.code === "invalid-path") {
|
||||
return "INVALID_PATH";
|
||||
}
|
||||
}
|
||||
const code = (err as { code?: string } | null)?.code;
|
||||
if (code === "ENOENT") {
|
||||
return "NOT_FOUND";
|
||||
@@ -86,22 +102,31 @@ export async function handleDirList(params: DirListParams): Promise<DirListResul
|
||||
|
||||
let canonical: string;
|
||||
try {
|
||||
canonical = await fs.realpath(requestedPath);
|
||||
canonical = (
|
||||
await resolveAbsolutePathForRead(requestedPath, {
|
||||
symlinks: followSymlinks ? "follow" : "reject",
|
||||
})
|
||||
).canonicalPath;
|
||||
} catch (err) {
|
||||
const code = classifyFsError(err);
|
||||
const canonicalPath =
|
||||
err instanceof FsSafeError &&
|
||||
err.cause &&
|
||||
typeof err.cause === "object" &&
|
||||
"canonicalPath" in err.cause &&
|
||||
typeof err.cause.canonicalPath === "string"
|
||||
? err.cause.canonicalPath
|
||||
: undefined;
|
||||
return {
|
||||
ok: false,
|
||||
code,
|
||||
message: code === "NOT_FOUND" ? "path not found" : `realpath failed: ${String(err)}`,
|
||||
};
|
||||
}
|
||||
|
||||
if (!followSymlinks && canonical !== requestedPath) {
|
||||
return {
|
||||
ok: false,
|
||||
code: "SYMLINK_REDIRECT",
|
||||
message: `path traverses a symlink; refusing because followSymlinks=false (set plugins.entries.file-transfer.config.nodes.<node>.followSymlinks=true to allow, or update allowReadPaths to the canonical path)`,
|
||||
canonicalPath: canonical,
|
||||
message:
|
||||
code === "NOT_FOUND"
|
||||
? "path not found"
|
||||
: code === "SYMLINK_REDIRECT"
|
||||
? "path traverses a symlink; refusing because followSymlinks=false (set plugins.entries.file-transfer.config.nodes.<node>.followSymlinks=true to allow, or update allowReadPaths to the canonical path)"
|
||||
: `realpath failed: ${String(err)}`,
|
||||
...(canonicalPath ? { canonicalPath } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -122,50 +147,39 @@ export async function handleDirList(params: DirListParams): Promise<DirListResul
|
||||
};
|
||||
}
|
||||
|
||||
let names: string[];
|
||||
let listedEntries: { name: string; isDirectory: boolean; size: number; mtimeMs: number }[];
|
||||
try {
|
||||
names = await fs.readdir(canonical, { encoding: "utf8" });
|
||||
const dirRoot = await root(canonical);
|
||||
listedEntries = await dirRoot.list(".", { withFileTypes: true });
|
||||
} catch (err) {
|
||||
const code = classifyFsError(err);
|
||||
return {
|
||||
ok: false,
|
||||
code,
|
||||
message: `readdir failed: ${String(err)}`,
|
||||
message: `list failed: ${String(err)}`,
|
||||
canonicalPath: canonical,
|
||||
};
|
||||
}
|
||||
|
||||
// Sort by name for stable pagination
|
||||
names.sort((a, b) => a.localeCompare(b));
|
||||
listedEntries.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
const total = names.length;
|
||||
const page = names.slice(offset, offset + maxEntries);
|
||||
const total = listedEntries.length;
|
||||
const page = listedEntries.slice(offset, offset + maxEntries);
|
||||
const truncated = offset + maxEntries < total;
|
||||
const nextPageToken = truncated ? String(offset + maxEntries) : undefined;
|
||||
|
||||
const entries: DirListEntry[] = [];
|
||||
for (const name of page) {
|
||||
const entryPath = path.join(canonical, name);
|
||||
|
||||
let isDir = false;
|
||||
let size = 0;
|
||||
let mtime = 0;
|
||||
try {
|
||||
const s = await fs.stat(entryPath);
|
||||
isDir = s.isDirectory();
|
||||
size = isDir ? 0 : s.size;
|
||||
mtime = s.mtimeMs;
|
||||
} catch {
|
||||
// stat may fail for broken symlinks; keep zeros and treat as file
|
||||
}
|
||||
for (const entry of page) {
|
||||
const entryPath = path.join(canonical, entry.name);
|
||||
const isDir = entry.isDirectory;
|
||||
|
||||
entries.push({
|
||||
name,
|
||||
name: entry.name,
|
||||
path: entryPath,
|
||||
size,
|
||||
mimeType: isDir ? "inode/directory" : mimeFromExtension(name),
|
||||
size: isDir ? 0 : entry.size,
|
||||
mimeType: isDir ? "inode/directory" : mimeFromExtension(entry.name),
|
||||
isDir,
|
||||
mtime,
|
||||
mtime: entry.mtimeMs,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import { spawnSync } from "node:child_process";
|
||||
import crypto from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import {
|
||||
FsSafeError,
|
||||
resolveAbsolutePathForRead,
|
||||
root,
|
||||
} from "openclaw/plugin-sdk/security-runtime";
|
||||
import { EXTENSION_MIME } from "../shared/mime.js";
|
||||
|
||||
export const FILE_FETCH_HARD_MAX_BYTES = 16 * 1024 * 1024;
|
||||
@@ -70,6 +74,20 @@ function clampMaxBytes(input: unknown): number {
|
||||
}
|
||||
|
||||
function classifyFsError(err: unknown): FileFetchErrCode {
|
||||
if (err instanceof FsSafeError) {
|
||||
if (err.code === "not-found") {
|
||||
return "NOT_FOUND";
|
||||
}
|
||||
if (err.code === "symlink") {
|
||||
return "SYMLINK_REDIRECT";
|
||||
}
|
||||
if (err.code === "invalid-path") {
|
||||
return "INVALID_PATH";
|
||||
}
|
||||
if (err.code === "not-file") {
|
||||
return "IS_DIRECTORY";
|
||||
}
|
||||
}
|
||||
const code = (err as { code?: string } | null)?.code;
|
||||
if (code === "ENOENT") {
|
||||
return "NOT_FOUND";
|
||||
@@ -101,103 +119,102 @@ export async function handleFileFetch(params: FileFetchParams): Promise<FileFetc
|
||||
|
||||
let canonical: string;
|
||||
try {
|
||||
canonical = await fs.realpath(requestedPath);
|
||||
canonical = (
|
||||
await resolveAbsolutePathForRead(requestedPath, {
|
||||
symlinks: followSymlinks ? "follow" : "reject",
|
||||
})
|
||||
).canonicalPath;
|
||||
} catch (err) {
|
||||
const code = classifyFsError(err);
|
||||
const canonicalPath =
|
||||
err instanceof FsSafeError &&
|
||||
err.cause &&
|
||||
typeof err.cause === "object" &&
|
||||
"canonicalPath" in err.cause &&
|
||||
typeof err.cause.canonicalPath === "string"
|
||||
? err.cause.canonicalPath
|
||||
: undefined;
|
||||
return {
|
||||
ok: false,
|
||||
code,
|
||||
message:
|
||||
code === "NOT_FOUND"
|
||||
? "file not found"
|
||||
: code === "SYMLINK_REDIRECT"
|
||||
? "path traverses a symlink; refusing because followSymlinks=false (set plugins.entries.file-transfer.config.nodes.<node>.followSymlinks=true to allow, or update allowReadPaths to the canonical path)"
|
||||
: `realpath failed: ${String(err)}`,
|
||||
...(canonicalPath ? { canonicalPath } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
let opened: Awaited<ReturnType<Awaited<ReturnType<typeof root>>["open"]>>;
|
||||
try {
|
||||
const parentRoot = await root(path.dirname(canonical));
|
||||
opened = await parentRoot.open(path.basename(canonical));
|
||||
} catch (err) {
|
||||
const code = classifyFsError(err);
|
||||
return {
|
||||
ok: false,
|
||||
code,
|
||||
message: code === "NOT_FOUND" ? "file not found" : `realpath failed: ${String(err)}`,
|
||||
};
|
||||
}
|
||||
|
||||
// Refuse to follow symlinks anywhere in the path unless the operator
|
||||
// has explicitly opted in. A symlink in user-controlled territory
|
||||
// (e.g. ~/Downloads/evil → /etc) could redirect an allowed-looking
|
||||
// request to a disallowed canonical target. The error includes the
|
||||
// canonical path so the operator can either update their allowlist
|
||||
// to the canonical form or set followSymlinks=true on this node.
|
||||
if (!followSymlinks && canonical !== requestedPath) {
|
||||
return {
|
||||
ok: false,
|
||||
code: "SYMLINK_REDIRECT",
|
||||
message: `path traverses a symlink; refusing because followSymlinks=false (set plugins.entries.file-transfer.config.nodes.<node>.followSymlinks=true to allow, or update allowReadPaths to the canonical path)`,
|
||||
message: code === "IS_DIRECTORY" ? "path is a directory" : `open failed: ${String(err)}`,
|
||||
canonicalPath: canonical,
|
||||
};
|
||||
}
|
||||
|
||||
let stats: Awaited<ReturnType<typeof fs.stat>>;
|
||||
try {
|
||||
stats = await fs.stat(canonical);
|
||||
} catch (err) {
|
||||
const code = classifyFsError(err);
|
||||
return { ok: false, code, message: `stat failed: ${String(err)}`, canonicalPath: canonical };
|
||||
}
|
||||
const stats = opened.stat;
|
||||
if (stats.size > maxBytes) {
|
||||
return {
|
||||
ok: false,
|
||||
code: "FILE_TOO_LARGE",
|
||||
message: `file size ${stats.size} exceeds limit ${maxBytes}`,
|
||||
canonicalPath: opened.realPath,
|
||||
};
|
||||
}
|
||||
|
||||
if (stats.isDirectory()) {
|
||||
return {
|
||||
ok: false,
|
||||
code: "IS_DIRECTORY",
|
||||
message: "path is a directory",
|
||||
canonicalPath: canonical,
|
||||
};
|
||||
}
|
||||
if (!stats.isFile()) {
|
||||
return {
|
||||
ok: false,
|
||||
code: "READ_ERROR",
|
||||
message: "path is not a regular file",
|
||||
canonicalPath: canonical,
|
||||
};
|
||||
}
|
||||
if (stats.size > maxBytes) {
|
||||
return {
|
||||
ok: false,
|
||||
code: "FILE_TOO_LARGE",
|
||||
message: `file size ${stats.size} exceeds limit ${maxBytes}`,
|
||||
canonicalPath: canonical,
|
||||
};
|
||||
}
|
||||
if (preflightOnly) {
|
||||
return {
|
||||
ok: true,
|
||||
path: opened.realPath,
|
||||
size: stats.size,
|
||||
mimeType: "",
|
||||
base64: "",
|
||||
sha256: "",
|
||||
preflightOnly: true,
|
||||
};
|
||||
}
|
||||
|
||||
const buffer = await opened.handle.readFile();
|
||||
if (buffer.byteLength > maxBytes) {
|
||||
return {
|
||||
ok: false,
|
||||
code: "FILE_TOO_LARGE",
|
||||
message: `read ${buffer.byteLength} bytes exceeds limit ${maxBytes}`,
|
||||
canonicalPath: opened.realPath,
|
||||
};
|
||||
}
|
||||
|
||||
const sha256 = crypto.createHash("sha256").update(buffer).digest("hex");
|
||||
const base64 = buffer.toString("base64");
|
||||
const mimeType = detectMimeType(opened.realPath);
|
||||
|
||||
if (preflightOnly) {
|
||||
return {
|
||||
ok: true,
|
||||
path: canonical,
|
||||
size: stats.size,
|
||||
mimeType: "",
|
||||
base64: "",
|
||||
sha256: "",
|
||||
preflightOnly: true,
|
||||
path: opened.realPath,
|
||||
size: buffer.byteLength,
|
||||
mimeType,
|
||||
base64,
|
||||
sha256,
|
||||
};
|
||||
}
|
||||
|
||||
let buffer: Buffer;
|
||||
try {
|
||||
buffer = await fs.readFile(canonical);
|
||||
} catch (err) {
|
||||
const code = classifyFsError(err);
|
||||
return { ok: false, code, message: `read failed: ${String(err)}`, canonicalPath: canonical };
|
||||
}
|
||||
|
||||
if (buffer.byteLength > maxBytes) {
|
||||
return {
|
||||
ok: false,
|
||||
code: "FILE_TOO_LARGE",
|
||||
message: `read ${buffer.byteLength} bytes exceeds limit ${maxBytes}`,
|
||||
canonicalPath: canonical,
|
||||
code,
|
||||
message: `read failed: ${String(err)}`,
|
||||
canonicalPath: opened.realPath,
|
||||
};
|
||||
} finally {
|
||||
await opened.handle.close().catch(() => undefined);
|
||||
}
|
||||
|
||||
const sha256 = crypto.createHash("sha256").update(buffer).digest("hex");
|
||||
const base64 = buffer.toString("base64");
|
||||
const mimeType = detectMimeType(canonical);
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
path: canonical,
|
||||
size: buffer.byteLength,
|
||||
mimeType,
|
||||
base64,
|
||||
sha256,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import crypto from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import {
|
||||
canonicalPathFromExistingAncestor,
|
||||
FsSafeError,
|
||||
resolveAbsolutePathForWrite,
|
||||
root,
|
||||
} from "openclaw/plugin-sdk/security-runtime";
|
||||
|
||||
const MAX_CONTENT_BYTES = 16 * 1024 * 1024; // 16 MB
|
||||
|
||||
@@ -39,74 +45,37 @@ function err(code: string, message: string, canonicalPath?: string): FileWriteEr
|
||||
return { ok: false, code, message, ...(canonicalPath ? { canonicalPath } : {}) };
|
||||
}
|
||||
|
||||
async function pathExists(p: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(p);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function findExistingAncestor(p: string): Promise<string | null> {
|
||||
let current = p;
|
||||
while (true) {
|
||||
try {
|
||||
await fs.lstat(current);
|
||||
return current;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
const parent = path.dirname(current);
|
||||
if (parent === current) {
|
||||
return null;
|
||||
}
|
||||
current = parent;
|
||||
}
|
||||
}
|
||||
|
||||
async function canonicalTargetFromExistingAncestor(targetPath: string): Promise<string> {
|
||||
const ancestor = await findExistingAncestor(targetPath);
|
||||
if (!ancestor) {
|
||||
return targetPath;
|
||||
}
|
||||
let canonicalAncestor: string;
|
||||
try {
|
||||
canonicalAncestor = await fs.realpath(ancestor);
|
||||
} catch {
|
||||
canonicalAncestor = ancestor;
|
||||
}
|
||||
const relative = path.relative(ancestor, targetPath);
|
||||
return relative ? path.join(canonicalAncestor, relative) : canonicalAncestor;
|
||||
}
|
||||
|
||||
async function rejectParentSymlinkRedirect(
|
||||
targetPath: string,
|
||||
parentDir: string,
|
||||
): Promise<FileWriteError | null> {
|
||||
const ancestor = await findExistingAncestor(parentDir);
|
||||
if (!ancestor) {
|
||||
return null;
|
||||
}
|
||||
let canonicalAncestor: string;
|
||||
try {
|
||||
canonicalAncestor = await fs.realpath(ancestor);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
if (canonicalAncestor === ancestor) {
|
||||
return null;
|
||||
}
|
||||
const canonicalTarget = path.join(canonicalAncestor, path.relative(ancestor, targetPath));
|
||||
function symlinkRedirectError(error: FsSafeError): FileWriteError {
|
||||
const canonicalTarget =
|
||||
error.cause &&
|
||||
typeof error.cause === "object" &&
|
||||
"canonicalPath" in error.cause &&
|
||||
typeof error.cause.canonicalPath === "string"
|
||||
? error.cause.canonicalPath
|
||||
: undefined;
|
||||
return err(
|
||||
"SYMLINK_REDIRECT",
|
||||
`parent ${ancestor} resolves through a symlink to ${canonicalAncestor}; refusing because followSymlinks=false (set plugins.entries.file-transfer.config.nodes.<node>.followSymlinks=true to allow, or update allowWritePaths to the canonical path)`,
|
||||
"path traverses a symlink; refusing because followSymlinks=false (set plugins.entries.file-transfer.config.nodes.<node>.followSymlinks=true to allow, or update allowWritePaths to the canonical path)",
|
||||
canonicalTarget,
|
||||
);
|
||||
}
|
||||
|
||||
function writeFsSafeError(error: FsSafeError, targetPath: string): FileWriteError {
|
||||
if (error.code === "symlink") {
|
||||
return err(
|
||||
"SYMLINK_TARGET_DENIED",
|
||||
`path is a symlink; refusing to write through it: ${targetPath}`,
|
||||
);
|
||||
}
|
||||
if (error.code === "not-file") {
|
||||
return err("IS_DIRECTORY", `path resolves to a directory: ${targetPath}`);
|
||||
}
|
||||
if (error.code === "already-exists") {
|
||||
return err("EXISTS_NO_OVERWRITE", `file already exists and overwrite is false: ${targetPath}`);
|
||||
}
|
||||
return err("WRITE_ERROR", error.message, targetPath);
|
||||
}
|
||||
|
||||
export async function handleFileWrite(
|
||||
params: Partial<FileWriteParams> & Record<string, unknown>,
|
||||
): Promise<FileWriteResult> {
|
||||
@@ -158,20 +127,21 @@ export async function handleFileWrite(
|
||||
);
|
||||
}
|
||||
|
||||
// 3. Resolve parent dir
|
||||
const targetPath = path.normalize(rawPath);
|
||||
const parentDir = path.dirname(targetPath);
|
||||
|
||||
const parentExists = await pathExists(parentDir);
|
||||
|
||||
// Refuse symlink traversal in the existing parent chain before creating
|
||||
// missing directories. Recursive mkdir follows symlinked ancestors, so this
|
||||
// has to run before mkdir can mutate the canonical target.
|
||||
if (!followSymlinks) {
|
||||
const redirect = await rejectParentSymlinkRedirect(targetPath, parentDir);
|
||||
if (redirect) {
|
||||
return redirect;
|
||||
let targetPath: string;
|
||||
let parentDir: string;
|
||||
let parentExists: boolean;
|
||||
try {
|
||||
const resolved = await resolveAbsolutePathForWrite(rawPath, {
|
||||
symlinks: followSymlinks ? "follow" : "reject",
|
||||
});
|
||||
targetPath = resolved.path;
|
||||
parentDir = resolved.parentDir;
|
||||
parentExists = resolved.parentExists;
|
||||
} catch (error) {
|
||||
if (error instanceof FsSafeError && error.code === "symlink") {
|
||||
return symlinkRedirectError(error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (!parentExists) {
|
||||
@@ -189,7 +159,7 @@ export async function handleFileWrite(
|
||||
}
|
||||
return {
|
||||
ok: true,
|
||||
path: await canonicalTargetFromExistingAncestor(targetPath),
|
||||
path: await canonicalPathFromExistingAncestor(targetPath),
|
||||
size: buf.length,
|
||||
sha256: computedSha256,
|
||||
overwritten: false,
|
||||
@@ -203,15 +173,19 @@ export async function handleFileWrite(
|
||||
}
|
||||
}
|
||||
|
||||
// Re-check after mkdir as a race-defense: if the parent chain changed
|
||||
// between the first check and directory creation, fail before writing bytes.
|
||||
if (!followSymlinks) {
|
||||
const redirect = await rejectParentSymlinkRedirect(targetPath, parentDir);
|
||||
if (redirect) {
|
||||
return redirect;
|
||||
try {
|
||||
await resolveAbsolutePathForWrite(targetPath, {
|
||||
symlinks: followSymlinks ? "follow" : "reject",
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof FsSafeError && error.code === "symlink") {
|
||||
return symlinkRedirectError(error);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
const targetFileName = path.basename(targetPath);
|
||||
const parentRoot = await root(parentDir);
|
||||
let overwritten = false;
|
||||
try {
|
||||
const existingLStat = await fs.lstat(targetPath);
|
||||
@@ -232,8 +206,9 @@ export async function handleFileWrite(
|
||||
}
|
||||
overwritten = true;
|
||||
} catch (statErr: unknown) {
|
||||
// ENOENT is fine — file does not exist yet
|
||||
if ((statErr as NodeJS.ErrnoException).code !== "ENOENT") {
|
||||
const statErrorCode =
|
||||
statErr instanceof FsSafeError ? statErr.code : (statErr as NodeJS.ErrnoException).code;
|
||||
if (statErrorCode !== "not-found" && statErrorCode !== "ENOENT") {
|
||||
const message = statErr instanceof Error ? statErr.message : String(statErr);
|
||||
if (message.toLowerCase().includes("permission")) {
|
||||
return err("PERMISSION_DENIED", `permission denied: ${targetPath}`);
|
||||
@@ -259,55 +234,45 @@ export async function handleFileWrite(
|
||||
if (preflightOnly) {
|
||||
return {
|
||||
ok: true,
|
||||
path: await canonicalTargetFromExistingAncestor(targetPath),
|
||||
path: await canonicalPathFromExistingAncestor(targetPath),
|
||||
size: buf.length,
|
||||
sha256: computedSha256,
|
||||
overwritten,
|
||||
};
|
||||
}
|
||||
|
||||
// 6. Atomic write: write to tmp, then rename
|
||||
const tmpSuffix = crypto.randomBytes(8).toString("hex");
|
||||
const tmpPath = `${targetPath}.${tmpSuffix}.tmp`;
|
||||
|
||||
try {
|
||||
await fs.writeFile(tmpPath, buf);
|
||||
if (overwrite) {
|
||||
await parentRoot.write(targetFileName, buf);
|
||||
} else {
|
||||
await parentRoot.create(targetFileName, buf);
|
||||
}
|
||||
} catch (writeErr) {
|
||||
if (writeErr instanceof FsSafeError) {
|
||||
return writeFsSafeError(writeErr, targetPath);
|
||||
}
|
||||
const message = writeErr instanceof Error ? writeErr.message : String(writeErr);
|
||||
// Clean up tmp if possible
|
||||
await fs.unlink(tmpPath).catch(() => {});
|
||||
if (message.toLowerCase().includes("permission") || message.toLowerCase().includes("access")) {
|
||||
return err("PERMISSION_DENIED", `permission denied writing to: ${parentDir}`);
|
||||
}
|
||||
return err("WRITE_ERROR", `failed to write file: ${message}`);
|
||||
}
|
||||
|
||||
try {
|
||||
await fs.rename(tmpPath, targetPath);
|
||||
} catch (renameErr) {
|
||||
const message = renameErr instanceof Error ? renameErr.message : String(renameErr);
|
||||
await fs.unlink(tmpPath).catch(() => {});
|
||||
if (message.toLowerCase().includes("permission") || message.toLowerCase().includes("access")) {
|
||||
return err("PERMISSION_DENIED", `permission denied renaming to: ${targetPath}`);
|
||||
}
|
||||
return err("WRITE_ERROR", `failed to rename tmp to target: ${message}`);
|
||||
}
|
||||
|
||||
const writtenBuf = buf;
|
||||
|
||||
// 8. Re-realpath to resolve any symlinks in the final path
|
||||
let canonicalPath = targetPath;
|
||||
try {
|
||||
canonicalPath = await fs.realpath(targetPath);
|
||||
} catch {
|
||||
// Best effort; use normalized path as fallback
|
||||
canonicalPath = targetPath;
|
||||
const opened = await parentRoot.open(targetFileName);
|
||||
canonicalPath = opened.realPath;
|
||||
await opened.handle.close().catch(() => undefined);
|
||||
} catch (openErr) {
|
||||
if (openErr instanceof FsSafeError) {
|
||||
return writeFsSafeError(openErr, targetPath);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
path: canonicalPath,
|
||||
size: writtenBuf.length,
|
||||
size: buf.length,
|
||||
sha256: computedSha256,
|
||||
overwritten,
|
||||
};
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { appendRegularFile } from "openclaw/plugin-sdk/security-runtime";
|
||||
|
||||
export type FileTransferAuditOp = "file.fetch" | "dir.list" | "dir.fetch" | "file.write";
|
||||
|
||||
@@ -86,7 +87,11 @@ export async function appendFileTransferAudit(
|
||||
timestamp: new Date().toISOString(),
|
||||
...record,
|
||||
})}\n`;
|
||||
await fs.appendFile(auditFilePath(dir), line, { mode: 0o600 });
|
||||
await appendRegularFile({
|
||||
filePath: auditFilePath(dir),
|
||||
content: line,
|
||||
rejectSymlinkParents: true,
|
||||
});
|
||||
} catch (e) {
|
||||
process.stderr.write(`[file-transfer:audit] append failed: ${String(e)}\n`);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import crypto from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import {
|
||||
callGatewayTool,
|
||||
listNodes,
|
||||
@@ -7,7 +6,7 @@ import {
|
||||
type AnyAgentTool,
|
||||
type NodeListNode,
|
||||
} from "openclaw/plugin-sdk/agent-harness-runtime";
|
||||
import { resolveMediaBufferPath } from "openclaw/plugin-sdk/media-store";
|
||||
import { readMediaBuffer } from "openclaw/plugin-sdk/media-store";
|
||||
import { appendFileTransferAudit } from "../shared/audit.js";
|
||||
import { throwFromNodePayload } from "../shared/errors.js";
|
||||
import {
|
||||
@@ -28,14 +27,11 @@ async function readSourceBytes(input: {
|
||||
}): Promise<{ buffer: Buffer; contentBase64: string; source: "inline" | "media" }> {
|
||||
const sourceMediaId = input.sourceMediaId?.trim();
|
||||
if (sourceMediaId) {
|
||||
const mediaPath = await resolveMediaBufferPath(sourceMediaId, FILE_TRANSFER_SUBDIR);
|
||||
const stat = await fs.stat(mediaPath);
|
||||
if (stat.size > FILE_WRITE_HARD_MAX_BYTES) {
|
||||
throw new Error(
|
||||
`sourceMediaId too large: ${stat.size} bytes; maximum is ${FILE_WRITE_HARD_MAX_BYTES} bytes`,
|
||||
);
|
||||
}
|
||||
const buffer = await fs.readFile(mediaPath);
|
||||
const { buffer } = await readMediaBuffer(
|
||||
sourceMediaId,
|
||||
FILE_TRANSFER_SUBDIR,
|
||||
FILE_WRITE_HARD_MAX_BYTES,
|
||||
);
|
||||
return { buffer, contentBase64: buffer.toString("base64"), source: "media" };
|
||||
}
|
||||
if (input.contentBase64 === undefined) {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { mkdtemp, readFile, rm } from "node:fs/promises";
|
||||
import { readFile } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { resolveApiKeyForProvider } from "openclaw/plugin-sdk/provider-auth-runtime";
|
||||
import {
|
||||
@@ -7,7 +7,7 @@ import {
|
||||
waitProviderOperationPollInterval,
|
||||
} from "openclaw/plugin-sdk/provider-http";
|
||||
import { fetchWithSsrFGuard } from "openclaw/plugin-sdk/ssrf-runtime";
|
||||
import { resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
import { resolvePreferredOpenClawTmpDir, withTempWorkspace } from "openclaw/plugin-sdk/temp-path";
|
||||
import { normalizeOptionalString } from "openclaw/plugin-sdk/text-runtime";
|
||||
import type {
|
||||
GeneratedVideoAsset,
|
||||
@@ -151,24 +151,22 @@ async function downloadGeneratedVideo(params: {
|
||||
file: unknown;
|
||||
index: number;
|
||||
}): Promise<GeneratedVideoAsset> {
|
||||
const tempDir = await mkdtemp(
|
||||
path.join(resolvePreferredOpenClawTmpDir(), "openclaw-google-video-"),
|
||||
return await withTempWorkspace(
|
||||
{ rootDir: resolvePreferredOpenClawTmpDir(), prefix: "openclaw-google-video-" },
|
||||
async ({ dir: tempDir }) => {
|
||||
const downloadPath = path.join(tempDir, `video-${params.index + 1}.mp4`);
|
||||
await params.client.files.download({
|
||||
file: params.file as never,
|
||||
downloadPath,
|
||||
});
|
||||
const buffer = await readFile(downloadPath);
|
||||
return {
|
||||
buffer,
|
||||
mimeType: "video/mp4",
|
||||
fileName: `video-${params.index + 1}.mp4`,
|
||||
};
|
||||
},
|
||||
);
|
||||
const downloadPath = path.join(tempDir, `video-${params.index + 1}.mp4`);
|
||||
try {
|
||||
await params.client.files.download({
|
||||
file: params.file as never,
|
||||
downloadPath,
|
||||
});
|
||||
const buffer = await readFile(downloadPath);
|
||||
return {
|
||||
buffer,
|
||||
mimeType: "video/mp4",
|
||||
fileName: `video-${params.index + 1}.mp4`,
|
||||
};
|
||||
} finally {
|
||||
await rm(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
function resolveGoogleGeneratedVideoDownloadUrl(params: {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import net from "node:net";
|
||||
import tls from "node:tls";
|
||||
import { withTimeout } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { normalizeLowercaseStringOrEmpty } from "openclaw/plugin-sdk/text-runtime";
|
||||
import {
|
||||
parseIrcLine,
|
||||
@@ -64,24 +65,6 @@ function toError(err: unknown): Error {
|
||||
return new Error(typeof err === "string" ? err : JSON.stringify(err));
|
||||
}
|
||||
|
||||
function withTimeout<T>(promise: Promise<T>, timeoutMs: number, label: string): Promise<T> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const timer = setTimeout(
|
||||
() => reject(new Error(`${label} timed out after ${timeoutMs}ms`)),
|
||||
timeoutMs,
|
||||
);
|
||||
promise
|
||||
.then((result) => {
|
||||
clearTimeout(timer);
|
||||
resolve(result);
|
||||
})
|
||||
.catch((error) => {
|
||||
clearTimeout(timer);
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function buildFallbackNick(nick: string): string {
|
||||
const normalized = nick.replace(/\s+/g, "");
|
||||
const safe = normalized.replace(/[^A-Za-z0-9_\-[\]\\`^{}|]/g, "");
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
export { resolvePreferredOpenClawTmpDir } from "./src/runtime-api.js";
|
||||
export { resolvePreferredOpenClawTmpDir, withTempWorkspace } from "./src/runtime-api.js";
|
||||
export {
|
||||
definePluginEntry,
|
||||
type AnyAgentTool,
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import Ajv from "ajv";
|
||||
import { normalizeOptionalString } from "openclaw/plugin-sdk/text-runtime";
|
||||
import { Type } from "typebox";
|
||||
import { resolvePreferredOpenClawTmpDir } from "../api.js";
|
||||
import { resolvePreferredOpenClawTmpDir, withTempWorkspace } from "../api.js";
|
||||
import type { OpenClawPluginApi } from "../api.js";
|
||||
|
||||
const AjvCtor = Ajv as unknown as typeof import("ajv").default;
|
||||
@@ -208,78 +207,69 @@ export function createLlmTaskTool(api: OpenClawPluginApi) {
|
||||
|
||||
const fullPrompt = `${system}\n\nTASK:\n${prompt}\n\nINPUT_JSON:\n${inputJson}\n`;
|
||||
|
||||
let tmpDir: string | null = null;
|
||||
try {
|
||||
tmpDir = await fs.mkdtemp(
|
||||
path.join(resolvePreferredOpenClawTmpDir(), "openclaw-llm-task-"),
|
||||
);
|
||||
const sessionId = `llm-task-${Date.now()}`;
|
||||
const sessionFile = path.join(tmpDir, "session.json");
|
||||
return await withTempWorkspace(
|
||||
{ rootDir: resolvePreferredOpenClawTmpDir(), prefix: "openclaw-llm-task-" },
|
||||
async ({ dir: tmpDir }) => {
|
||||
const sessionId = `llm-task-${Date.now()}`;
|
||||
const sessionFile = path.join(tmpDir, "session.json");
|
||||
|
||||
const result = await api.runtime.agent.runEmbeddedPiAgent({
|
||||
sessionId,
|
||||
sessionFile,
|
||||
workspaceDir: api.config?.agents?.defaults?.workspace ?? process.cwd(),
|
||||
config: api.config,
|
||||
prompt: fullPrompt,
|
||||
timeoutMs,
|
||||
runId: `llm-task-${Date.now()}`,
|
||||
provider,
|
||||
model,
|
||||
authProfileId,
|
||||
authProfileIdSource: authProfileId ? "user" : "auto",
|
||||
thinkLevel,
|
||||
streamParams,
|
||||
disableTools: true,
|
||||
});
|
||||
const result = await api.runtime.agent.runEmbeddedPiAgent({
|
||||
sessionId,
|
||||
sessionFile,
|
||||
workspaceDir: api.config?.agents?.defaults?.workspace ?? process.cwd(),
|
||||
config: api.config,
|
||||
prompt: fullPrompt,
|
||||
timeoutMs,
|
||||
runId: `llm-task-${Date.now()}`,
|
||||
provider,
|
||||
model,
|
||||
authProfileId,
|
||||
authProfileIdSource: authProfileId ? "user" : "auto",
|
||||
thinkLevel,
|
||||
streamParams,
|
||||
disableTools: true,
|
||||
});
|
||||
|
||||
const text = collectText(
|
||||
typeof result === "object" && result !== null && "payloads" in result
|
||||
? (result as { payloads?: Array<{ text?: string; isError?: boolean }> }).payloads
|
||||
: undefined,
|
||||
);
|
||||
if (!text) {
|
||||
throw new Error("LLM returned empty output");
|
||||
}
|
||||
|
||||
const raw = stripCodeFences(text);
|
||||
let parsed: unknown;
|
||||
try {
|
||||
parsed = JSON.parse(raw);
|
||||
} catch {
|
||||
throw new Error("LLM returned invalid JSON");
|
||||
}
|
||||
|
||||
const schema = params.schema;
|
||||
if (schema && typeof schema === "object" && !Array.isArray(schema)) {
|
||||
const ajv = new AjvCtor({ allErrors: true, strict: false });
|
||||
const validate = ajv.compile(schema);
|
||||
const ok = validate(parsed);
|
||||
if (!ok) {
|
||||
const msg =
|
||||
validate.errors
|
||||
?.map(
|
||||
(e: { instancePath?: string; message?: string }) =>
|
||||
`${e.instancePath || "<root>"} ${e.message || "invalid"}`,
|
||||
)
|
||||
.join("; ") ?? "invalid";
|
||||
throw new Error(`LLM JSON did not match schema: ${msg}`);
|
||||
const text = collectText(
|
||||
typeof result === "object" && result !== null && "payloads" in result
|
||||
? (result as { payloads?: Array<{ text?: string; isError?: boolean }> }).payloads
|
||||
: undefined,
|
||||
);
|
||||
if (!text) {
|
||||
throw new Error("LLM returned empty output");
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(parsed, null, 2) }],
|
||||
details: { json: parsed, provider, model },
|
||||
};
|
||||
} finally {
|
||||
if (tmpDir) {
|
||||
const raw = stripCodeFences(text);
|
||||
let parsed: unknown;
|
||||
try {
|
||||
await fs.rm(tmpDir, { recursive: true, force: true });
|
||||
parsed = JSON.parse(raw);
|
||||
} catch {
|
||||
// ignore
|
||||
throw new Error("LLM returned invalid JSON");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const schema = params.schema;
|
||||
if (schema && typeof schema === "object" && !Array.isArray(schema)) {
|
||||
const ajv = new AjvCtor({ allErrors: true, strict: false });
|
||||
const validate = ajv.compile(schema);
|
||||
const ok = validate(parsed);
|
||||
if (!ok) {
|
||||
const msg =
|
||||
validate.errors
|
||||
?.map(
|
||||
(e: { instancePath?: string; message?: string }) =>
|
||||
`${e.instancePath || "<root>"} ${e.message || "invalid"}`,
|
||||
)
|
||||
.join("; ") ?? "invalid";
|
||||
throw new Error(`LLM JSON did not match schema: ${msg}`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{ type: "text", text: JSON.stringify(parsed, null, 2) }],
|
||||
details: { json: parsed, provider, model },
|
||||
};
|
||||
},
|
||||
);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
export { resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
export { resolvePreferredOpenClawTmpDir, withTempWorkspace } from "openclaw/plugin-sdk/temp-path";
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
import { resolveGlobalMap } from "openclaw/plugin-sdk/global-singleton";
|
||||
import { resolveStateDir } from "openclaw/plugin-sdk/memory-core-host-runtime-core";
|
||||
import { getRuntimeConfig } from "openclaw/plugin-sdk/runtime-config-snapshot";
|
||||
import { pathExists, replaceFileAtomic } from "openclaw/plugin-sdk/security-runtime";
|
||||
import {
|
||||
loadSessionStore,
|
||||
resolveStorePath,
|
||||
@@ -488,29 +489,14 @@ async function assertSafeDreamsPath(dreamsPath: string): Promise<void> {
|
||||
|
||||
async function writeDreamsFileAtomic(dreamsPath: string, content: string): Promise<void> {
|
||||
await assertSafeDreamsPath(dreamsPath);
|
||||
const existing = await fs.stat(dreamsPath).catch((err: NodeJS.ErrnoException) => {
|
||||
if (err.code === "ENOENT") {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
await replaceFileAtomic({
|
||||
filePath: dreamsPath,
|
||||
content,
|
||||
mode: 0o600,
|
||||
preserveExistingMode: true,
|
||||
tempPrefix: `${path.basename(dreamsPath)}.dreams`,
|
||||
throwOnCleanupError: true,
|
||||
});
|
||||
const mode = existing?.mode ?? 0o600;
|
||||
const tempPath = `${dreamsPath}.${process.pid}.${Date.now()}.tmp`;
|
||||
await fs.writeFile(tempPath, content, { encoding: "utf-8", flag: "wx", mode });
|
||||
await fs.chmod(tempPath, mode).catch(() => undefined);
|
||||
try {
|
||||
await fs.rename(tempPath, dreamsPath);
|
||||
await fs.chmod(dreamsPath, mode).catch(() => undefined);
|
||||
} catch (err) {
|
||||
const cleanupError = await fs.rm(tempPath, { force: true }).catch((rmErr) => rmErr);
|
||||
if (cleanupError) {
|
||||
throw new Error(
|
||||
`Atomic DREAMS.md write failed (${formatErrorMessage(err)}); cleanup also failed (${formatErrorMessage(cleanupError)})`,
|
||||
{ cause: err },
|
||||
);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async function updateDreamsFile<T>(params: {
|
||||
@@ -710,15 +696,6 @@ export async function appendNarrativeEntry(params: {
|
||||
|
||||
// ── Orchestrator ───────────────────────────────────────────────────────
|
||||
|
||||
async function safePathExists(pathname: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.stat(pathname);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeComparablePath(pathname: string): string {
|
||||
return process.platform === "win32" ? pathname.toLowerCase() : pathname;
|
||||
}
|
||||
@@ -814,7 +791,7 @@ async function scrubDreamingNarrativeArtifacts(logger: Logger): Promise<void> {
|
||||
if (!isDreamingSessionStoreKey(key)) {
|
||||
continue;
|
||||
}
|
||||
if (!normalizedSessionFile || !(await safePathExists(normalizedSessionFile))) {
|
||||
if (!normalizedSessionFile || !(await pathExists(normalizedSessionFile))) {
|
||||
needsStoreUpdate = true;
|
||||
}
|
||||
}
|
||||
@@ -834,7 +811,7 @@ async function scrubDreamingNarrativeArtifacts(logger: Logger): Promise<void> {
|
||||
if (!isDreamingSessionStoreKey(key)) {
|
||||
continue;
|
||||
}
|
||||
if (!normalizedSessionFile || !(await safePathExists(normalizedSessionFile))) {
|
||||
if (!normalizedSessionFile || !(await pathExists(normalizedSessionFile))) {
|
||||
delete lockedStore[key];
|
||||
prunedForAgent += 1;
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ import {
|
||||
resolveMemoryRemDreamingConfig,
|
||||
} from "openclaw/plugin-sdk/memory-core-host-status";
|
||||
import type { OpenClawPluginApi } from "openclaw/plugin-sdk/plugin-entry";
|
||||
import { appendRegularFile, privateFileStore } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { writeDailyDreamingPhaseBlock } from "./dreaming-markdown.js";
|
||||
import {
|
||||
generateAndAppendDreamNarrative,
|
||||
@@ -443,11 +444,11 @@ function normalizeMemoryDay(value: unknown): string | undefined {
|
||||
async function readDailyIngestionState(workspaceDir: string): Promise<DailyIngestionState> {
|
||||
const statePath = resolveDailyIngestionStatePath(workspaceDir);
|
||||
try {
|
||||
const raw = await fs.readFile(statePath, "utf-8");
|
||||
return normalizeDailyIngestionState(JSON.parse(raw) as unknown);
|
||||
return normalizeDailyIngestionState(
|
||||
await privateFileStore(workspaceDir).readJsonIfExists(path.relative(workspaceDir, statePath)),
|
||||
);
|
||||
} catch (err) {
|
||||
const code = (err as NodeJS.ErrnoException)?.code;
|
||||
if (code === "ENOENT" || err instanceof SyntaxError) {
|
||||
if (err instanceof SyntaxError) {
|
||||
return { version: 1, files: {} };
|
||||
}
|
||||
throw err;
|
||||
@@ -459,10 +460,9 @@ async function writeDailyIngestionState(
|
||||
state: DailyIngestionState,
|
||||
): Promise<void> {
|
||||
const statePath = resolveDailyIngestionStatePath(workspaceDir);
|
||||
await fs.mkdir(path.dirname(statePath), { recursive: true });
|
||||
const tmpPath = `${statePath}.${process.pid}.${Date.now()}.tmp`;
|
||||
await fs.writeFile(tmpPath, `${JSON.stringify(state, null, 2)}\n`, "utf-8");
|
||||
await fs.rename(tmpPath, statePath);
|
||||
await privateFileStore(workspaceDir).writeJson(path.relative(workspaceDir, statePath), state, {
|
||||
trailingNewline: true,
|
||||
});
|
||||
}
|
||||
|
||||
type SessionIngestionFileState = {
|
||||
@@ -556,11 +556,11 @@ function normalizeSessionIngestionState(raw: unknown): SessionIngestionState {
|
||||
async function readSessionIngestionState(workspaceDir: string): Promise<SessionIngestionState> {
|
||||
const statePath = resolveSessionIngestionStatePath(workspaceDir);
|
||||
try {
|
||||
const raw = await fs.readFile(statePath, "utf-8");
|
||||
return normalizeSessionIngestionState(JSON.parse(raw) as unknown);
|
||||
return normalizeSessionIngestionState(
|
||||
await privateFileStore(workspaceDir).readJsonIfExists(path.relative(workspaceDir, statePath)),
|
||||
);
|
||||
} catch (err) {
|
||||
const code = (err as NodeJS.ErrnoException)?.code;
|
||||
if (code === "ENOENT" || err instanceof SyntaxError) {
|
||||
if (err instanceof SyntaxError) {
|
||||
return { version: 3, files: {}, seenMessages: {} };
|
||||
}
|
||||
throw err;
|
||||
@@ -572,10 +572,9 @@ async function writeSessionIngestionState(
|
||||
state: SessionIngestionState,
|
||||
): Promise<void> {
|
||||
const statePath = resolveSessionIngestionStatePath(workspaceDir);
|
||||
await fs.mkdir(path.dirname(statePath), { recursive: true });
|
||||
const tmpPath = `${statePath}.${process.pid}.${Date.now()}.tmp`;
|
||||
await fs.writeFile(tmpPath, `${JSON.stringify(state, null, 2)}\n`, "utf-8");
|
||||
await fs.rename(tmpPath, statePath);
|
||||
await privateFileStore(workspaceDir).writeJson(path.relative(workspaceDir, statePath), state, {
|
||||
trailingNewline: true,
|
||||
});
|
||||
}
|
||||
|
||||
function trimTrackedSessionScopes(
|
||||
@@ -714,7 +713,11 @@ async function appendSessionCorpusLines(params: {
|
||||
? normalizedExisting.slice(0, -1).split("\n").length
|
||||
: normalizedExisting.split("\n").length;
|
||||
const payload = `${params.lines.map((entry) => entry.rendered).join("\n")}\n`;
|
||||
await fs.appendFile(absolutePath, payload, "utf-8");
|
||||
await appendRegularFile({
|
||||
filePath: absolutePath,
|
||||
content: payload,
|
||||
rejectSymlinkParents: true,
|
||||
});
|
||||
return params.lines.map((entry, index) => {
|
||||
const lineNumber = existingLineCount + index + 1;
|
||||
return {
|
||||
|
||||
@@ -9,12 +9,13 @@ import { formatErrorMessage } from "openclaw/plugin-sdk/error-runtime";
|
||||
import { withFileLock } from "openclaw/plugin-sdk/file-lock";
|
||||
import {
|
||||
createSubsystemLogger,
|
||||
isPathInside,
|
||||
root,
|
||||
resolveAgentContextLimits,
|
||||
resolveMemorySearchSyncConfig,
|
||||
resolveAgentWorkspaceDir,
|
||||
resolveGlobalSingleton,
|
||||
resolveStateDir,
|
||||
writeFileWithinRoot,
|
||||
type OpenClawConfig,
|
||||
} from "openclaw/plugin-sdk/memory-core-host-engine-foundation";
|
||||
import {
|
||||
@@ -1302,7 +1303,15 @@ export class QmdMemoryManager implements MemorySearchManager {
|
||||
if (!absPath.endsWith(".md")) {
|
||||
throw new Error("path required");
|
||||
}
|
||||
const statResult = await statRegularFile(absPath);
|
||||
let statResult: Awaited<ReturnType<typeof statRegularFile>>;
|
||||
try {
|
||||
statResult = await statRegularFile(absPath);
|
||||
} catch (err) {
|
||||
if (err instanceof Error && err.message === "path must be a regular file") {
|
||||
throw new Error("path required", { cause: err });
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
if (statResult.missing) {
|
||||
return { text: "", path: relPath };
|
||||
}
|
||||
@@ -2203,6 +2212,7 @@ export class QmdMemoryManager implements MemorySearchManager {
|
||||
}
|
||||
const exportDir = this.sessionExporter.dir;
|
||||
await fs.mkdir(exportDir, { recursive: true });
|
||||
const exportRoot = await root(exportDir);
|
||||
const files = await listSessionFilesForAgent(this.agentId);
|
||||
const keep = new Set<string>();
|
||||
const tracked = new Set<string>();
|
||||
@@ -2222,10 +2232,7 @@ export class QmdMemoryManager implements MemorySearchManager {
|
||||
tracked.add(sessionFile);
|
||||
const state = this.exportedSessionState.get(sessionFile);
|
||||
if (!state || state.hash !== entry.hash || state.mtimeMs !== entry.mtimeMs) {
|
||||
await writeFileWithinRoot({
|
||||
rootDir: exportDir,
|
||||
relativePath: targetName,
|
||||
data: this.renderSessionMarkdown(entry),
|
||||
await exportRoot.write(targetName, this.renderSessionMarkdown(entry), {
|
||||
encoding: "utf-8",
|
||||
});
|
||||
}
|
||||
@@ -2236,18 +2243,18 @@ export class QmdMemoryManager implements MemorySearchManager {
|
||||
});
|
||||
keep.add(target);
|
||||
}
|
||||
const exported = await fs.readdir(exportDir).catch(() => []);
|
||||
const exported = await exportRoot.list(".").catch(() => []);
|
||||
for (const name of exported) {
|
||||
if (!name.endsWith(".md")) {
|
||||
continue;
|
||||
}
|
||||
const full = path.join(exportDir, name);
|
||||
if (!keep.has(full)) {
|
||||
await fs.rm(full, { force: true });
|
||||
await exportRoot.remove(name).catch(() => undefined);
|
||||
}
|
||||
}
|
||||
for (const [sessionFile, state] of this.exportedSessionState) {
|
||||
if (!tracked.has(sessionFile) || !state.target.startsWith(exportDir + path.sep)) {
|
||||
if (!tracked.has(sessionFile) || !isPathInside(exportDir, state.target)) {
|
||||
this.exportedSessionState.delete(sessionFile);
|
||||
}
|
||||
}
|
||||
@@ -2788,23 +2795,11 @@ export class QmdMemoryManager implements MemorySearchManager {
|
||||
}
|
||||
|
||||
private isWithinWorkspace(absPath: string): boolean {
|
||||
const normalizedWorkspace = this.workspaceDir.endsWith(path.sep)
|
||||
? this.workspaceDir
|
||||
: `${this.workspaceDir}${path.sep}`;
|
||||
if (absPath === this.workspaceDir) {
|
||||
return true;
|
||||
}
|
||||
const candidate = absPath.endsWith(path.sep) ? absPath : `${absPath}${path.sep}`;
|
||||
return candidate.startsWith(normalizedWorkspace);
|
||||
return isPathInside(this.workspaceDir, absPath);
|
||||
}
|
||||
|
||||
private isWithinRoot(root: string, candidate: string): boolean {
|
||||
const normalizedRoot = root.endsWith(path.sep) ? root : `${root}${path.sep}`;
|
||||
if (candidate === root) {
|
||||
return true;
|
||||
}
|
||||
const next = candidate.endsWith(path.sep) ? candidate : `${candidate}${path.sep}`;
|
||||
return next.startsWith(normalizedRoot);
|
||||
return isPathInside(root, candidate);
|
||||
}
|
||||
|
||||
private clampResultsByInjectedChars(results: MemorySearchResult[]): MemorySearchResult[] {
|
||||
|
||||
@@ -3,17 +3,9 @@ import path from "node:path";
|
||||
import { resolveMemoryHostEventLogPath } from "openclaw/plugin-sdk/memory-core-host-events";
|
||||
import { resolveMemoryDreamingWorkspaces } from "openclaw/plugin-sdk/memory-core-host-status";
|
||||
import type { MemoryPluginPublicArtifact } from "openclaw/plugin-sdk/memory-host-core";
|
||||
import { pathExists } from "openclaw/plugin-sdk/security-runtime";
|
||||
import type { OpenClawConfig } from "../api.js";
|
||||
|
||||
async function pathExists(inputPath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(inputPath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function listMarkdownFilesRecursive(rootDir: string): Promise<string[]> {
|
||||
const entries = await fs.readdir(rootDir, { withFileTypes: true }).catch(() => []);
|
||||
const files: string[] = [];
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { createHash, randomUUID } from "node:crypto";
|
||||
import { createHash } from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import type { MemorySearchResult } from "openclaw/plugin-sdk/memory-core-host-runtime-files";
|
||||
import { formatMemoryDreamingDay } from "openclaw/plugin-sdk/memory-core-host-status";
|
||||
import { appendMemoryHostEvent } from "openclaw/plugin-sdk/memory-host-events";
|
||||
import { privateFileStore } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { normalizeLowercaseStringOrEmpty } from "openclaw/plugin-sdk/text-runtime";
|
||||
import {
|
||||
deriveConceptTags,
|
||||
@@ -758,9 +759,10 @@ async function withShortTermLock<T>(workspaceDir: string, task: () => Promise<T>
|
||||
async function readStore(workspaceDir: string, nowIso: string): Promise<ShortTermRecallStore> {
|
||||
const storePath = resolveStorePath(workspaceDir);
|
||||
try {
|
||||
const raw = await fs.readFile(storePath, "utf-8");
|
||||
const parsed = JSON.parse(raw) as unknown;
|
||||
return normalizeStore(parsed, nowIso);
|
||||
return normalizeStore(
|
||||
await privateFileStore(workspaceDir).readJsonIfExists(path.relative(workspaceDir, storePath)),
|
||||
nowIso,
|
||||
);
|
||||
} catch (err) {
|
||||
if ((err as NodeJS.ErrnoException)?.code === "ENOENT") {
|
||||
return emptyStore(nowIso);
|
||||
@@ -830,13 +832,13 @@ async function readPhaseSignalStore(
|
||||
): Promise<ShortTermPhaseSignalStore> {
|
||||
const phaseSignalPath = resolvePhaseSignalPath(workspaceDir);
|
||||
try {
|
||||
const raw = await fs.readFile(phaseSignalPath, "utf-8");
|
||||
return normalizePhaseSignalStore(JSON.parse(raw) as unknown, nowIso);
|
||||
} catch (err) {
|
||||
const code = (err as NodeJS.ErrnoException)?.code;
|
||||
if (code === "ENOENT" || err instanceof SyntaxError) {
|
||||
return emptyPhaseSignalStore(nowIso);
|
||||
}
|
||||
return normalizePhaseSignalStore(
|
||||
await privateFileStore(workspaceDir).readJsonIfExists(
|
||||
path.relative(workspaceDir, phaseSignalPath),
|
||||
),
|
||||
nowIso,
|
||||
);
|
||||
} catch {
|
||||
return emptyPhaseSignalStore(nowIso);
|
||||
}
|
||||
}
|
||||
@@ -847,17 +849,21 @@ async function writePhaseSignalStore(
|
||||
): Promise<void> {
|
||||
const phaseSignalPath = resolvePhaseSignalPath(workspaceDir);
|
||||
await ensureShortTermArtifactsDir(workspaceDir);
|
||||
const tmpPath = `${phaseSignalPath}.${process.pid}.${Date.now()}.${randomUUID()}.tmp`;
|
||||
await fs.writeFile(tmpPath, `${JSON.stringify(store, null, 2)}\n`, "utf-8");
|
||||
await fs.rename(tmpPath, phaseSignalPath);
|
||||
await privateFileStore(workspaceDir).writeJson(
|
||||
path.relative(workspaceDir, phaseSignalPath),
|
||||
store,
|
||||
{
|
||||
trailingNewline: true,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
async function writeStore(workspaceDir: string, store: ShortTermRecallStore): Promise<void> {
|
||||
const storePath = resolveStorePath(workspaceDir);
|
||||
await ensureShortTermArtifactsDir(workspaceDir);
|
||||
const tmpPath = `${storePath}.${process.pid}.${Date.now()}.${randomUUID()}.tmp`;
|
||||
await fs.writeFile(tmpPath, `${JSON.stringify(store, null, 2)}\n`, "utf-8");
|
||||
await fs.rename(tmpPath, storePath);
|
||||
await privateFileStore(workspaceDir).writeJson(path.relative(workspaceDir, storePath), store, {
|
||||
trailingNewline: true,
|
||||
});
|
||||
}
|
||||
|
||||
export function isShortTermMemoryPath(filePath: string): boolean {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import {
|
||||
replaceManagedMarkdownBlock,
|
||||
withTrailingNewline,
|
||||
} from "openclaw/plugin-sdk/memory-host-markdown";
|
||||
import { root as fsRoot } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { compileMemoryWikiVault, type CompileMemoryWikiResult } from "./compile.js";
|
||||
import type { ResolvedMemoryWikiConfig } from "./config.js";
|
||||
import {
|
||||
@@ -150,22 +150,23 @@ function buildSynthesisBody(params: {
|
||||
}
|
||||
|
||||
async function writeWikiPage(params: {
|
||||
absolutePath: string;
|
||||
rootDir: string;
|
||||
relativePath: string;
|
||||
frontmatter: Record<string, unknown>;
|
||||
body: string;
|
||||
}): Promise<boolean> {
|
||||
const root = await fsRoot(params.rootDir);
|
||||
const rendered = withTrailingNewline(
|
||||
renderWikiMarkdown({
|
||||
frontmatter: params.frontmatter,
|
||||
body: params.body,
|
||||
}),
|
||||
);
|
||||
const existing = await fs.readFile(params.absolutePath, "utf8").catch(() => "");
|
||||
const existing = await root.readText(params.relativePath).catch(() => "");
|
||||
if (existing === rendered) {
|
||||
return false;
|
||||
}
|
||||
await fs.mkdir(path.dirname(params.absolutePath), { recursive: true });
|
||||
await fs.writeFile(params.absolutePath, rendered, "utf8");
|
||||
await root.write(params.relativePath, rendered);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -183,14 +184,15 @@ async function applyCreateSynthesisMutation(params: {
|
||||
}): Promise<{ changed: boolean; pagePath: string; pageId: string }> {
|
||||
const slug = slugifyWikiSegment(params.mutation.title);
|
||||
const pagePath = path.join("syntheses", `${slug}.md`).replace(/\\/g, "/");
|
||||
const absolutePath = path.join(params.config.vault.path, pagePath);
|
||||
const existing = await fs.readFile(absolutePath, "utf8").catch(() => "");
|
||||
const root = await fsRoot(params.config.vault.path);
|
||||
const existing = await root.readText(pagePath).catch(() => "");
|
||||
const parsed = parseWikiMarkdown(existing);
|
||||
const pageId =
|
||||
(typeof parsed.frontmatter.id === "string" && parsed.frontmatter.id.trim()) ||
|
||||
`synthesis.${slug}`;
|
||||
const changed = await writeWikiPage({
|
||||
absolutePath,
|
||||
rootDir: params.config.vault.path,
|
||||
relativePath: pagePath,
|
||||
frontmatter: {
|
||||
...parsed.frontmatter,
|
||||
pageType: "synthesis",
|
||||
@@ -278,7 +280,8 @@ async function applyUpdateMetadataMutation(params: {
|
||||
}
|
||||
const parsed = parseWikiMarkdown(page.raw);
|
||||
const changed = await writeWikiPage({
|
||||
absolutePath: page.absolutePath,
|
||||
rootDir: params.config.vault.path,
|
||||
relativePath: page.relativePath,
|
||||
frontmatter: buildUpdatedFrontmatter({
|
||||
original: parsed.frontmatter,
|
||||
mutation: params.mutation,
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
replaceManagedMarkdownBlock,
|
||||
withTrailingNewline,
|
||||
} from "openclaw/plugin-sdk/memory-host-markdown";
|
||||
import { root as fsRoot } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { normalizeLowercaseStringOrEmpty } from "openclaw/plugin-sdk/text-runtime";
|
||||
import {
|
||||
assessClaimFreshness,
|
||||
@@ -768,12 +769,13 @@ async function refreshPageRelatedBlocks(params: {
|
||||
if (!params.config.render.createBacklinks) {
|
||||
return [];
|
||||
}
|
||||
const root = await fsRoot(params.config.vault.path);
|
||||
const updatedFiles: string[] = [];
|
||||
for (const page of params.pages) {
|
||||
if (page.kind === "report") {
|
||||
continue;
|
||||
}
|
||||
const original = await fs.readFile(page.absolutePath, "utf8");
|
||||
const original = await root.readText(page.relativePath);
|
||||
const updated = withTrailingNewline(
|
||||
replaceManagedMarkdownBlock({
|
||||
original,
|
||||
@@ -790,7 +792,7 @@ async function refreshPageRelatedBlocks(params: {
|
||||
if (updated === original) {
|
||||
continue;
|
||||
}
|
||||
await fs.writeFile(page.absolutePath, updated, "utf8");
|
||||
await root.write(page.relativePath, updated);
|
||||
updatedFiles.push(page.absolutePath);
|
||||
}
|
||||
return updatedFiles;
|
||||
@@ -817,13 +819,15 @@ function renderSectionList(params: {
|
||||
}
|
||||
|
||||
async function writeManagedMarkdownFile(params: {
|
||||
filePath: string;
|
||||
rootDir: string;
|
||||
relativePath: string;
|
||||
title: string;
|
||||
startMarker: string;
|
||||
endMarker: string;
|
||||
body: string;
|
||||
}): Promise<boolean> {
|
||||
const original = await fs.readFile(params.filePath, "utf8").catch(() => `# ${params.title}\n`);
|
||||
const root = await fsRoot(params.rootDir);
|
||||
const original = await root.readText(params.relativePath).catch(() => `# ${params.title}\n`);
|
||||
const updated = replaceManagedMarkdownBlock({
|
||||
original,
|
||||
heading: "## Generated",
|
||||
@@ -835,7 +839,7 @@ async function writeManagedMarkdownFile(params: {
|
||||
if (rendered === original) {
|
||||
return false;
|
||||
}
|
||||
await fs.writeFile(params.filePath, rendered, "utf8");
|
||||
await root.write(params.relativePath, rendered);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -846,8 +850,8 @@ async function writeDashboardPage(params: {
|
||||
pages: WikiPageSummary[];
|
||||
now: Date;
|
||||
}): Promise<boolean> {
|
||||
const filePath = path.join(params.rootDir, params.definition.relativePath);
|
||||
const original = await fs.readFile(filePath, "utf8").catch(() =>
|
||||
const root = await fsRoot(params.rootDir);
|
||||
const original = await root.readText(params.definition.relativePath).catch(() =>
|
||||
renderWikiMarkdown({
|
||||
frontmatter: {
|
||||
pageType: "report",
|
||||
@@ -911,7 +915,7 @@ async function writeDashboardPage(params: {
|
||||
body: updatedBody,
|
||||
}),
|
||||
);
|
||||
await fs.writeFile(filePath, rendered, "utf8");
|
||||
await root.write(params.definition.relativePath, rendered);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -1267,11 +1271,13 @@ async function writeAgentDigestArtifacts(params: {
|
||||
[agentDigestPath, agentDigest],
|
||||
[claimsDigestPath, claimsDigest],
|
||||
] as const) {
|
||||
const existing = await fs.readFile(filePath, "utf8").catch(() => "");
|
||||
const relativePath = path.relative(params.rootDir, filePath);
|
||||
const root = await fsRoot(params.rootDir);
|
||||
const existing = await root.readText(relativePath).catch(() => "");
|
||||
if (existing === content) {
|
||||
continue;
|
||||
}
|
||||
await fs.writeFile(filePath, content, "utf8");
|
||||
await root.write(relativePath, content);
|
||||
updatedFiles.push(filePath);
|
||||
}
|
||||
return updatedFiles;
|
||||
@@ -1303,7 +1309,8 @@ export async function compileMemoryWikiVault(
|
||||
const rootIndexPath = path.join(rootDir, "index.md");
|
||||
if (
|
||||
await writeManagedMarkdownFile({
|
||||
filePath: rootIndexPath,
|
||||
rootDir,
|
||||
relativePath: "index.md",
|
||||
title: "Wiki Index",
|
||||
startMarker: "<!-- openclaw:wiki:index:start -->",
|
||||
endMarker: "<!-- openclaw:wiki:index:end -->",
|
||||
@@ -1314,10 +1321,12 @@ export async function compileMemoryWikiVault(
|
||||
}
|
||||
|
||||
for (const group of COMPILE_PAGE_GROUPS) {
|
||||
const filePath = path.join(rootDir, group.dir, "index.md");
|
||||
const relativePath = path.join(group.dir, "index.md").replace(/\\/g, "/");
|
||||
const filePath = path.join(rootDir, relativePath);
|
||||
if (
|
||||
await writeManagedMarkdownFile({
|
||||
filePath,
|
||||
rootDir,
|
||||
relativePath,
|
||||
title: group.heading,
|
||||
startMarker: `<!-- openclaw:wiki:${group.dir}:index:start -->`,
|
||||
endMarker: `<!-- openclaw:wiki:${group.dir}:index:end -->`,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { pathExists } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { compileMemoryWikiVault } from "./compile.js";
|
||||
import type { ResolvedMemoryWikiConfig } from "./config.js";
|
||||
import { appendMemoryWikiLog } from "./log.js";
|
||||
@@ -16,13 +17,6 @@ type IngestMemoryWikiSourceResult = {
|
||||
indexUpdatedFiles: string[];
|
||||
};
|
||||
|
||||
function pathExists(filePath: string): Promise<boolean> {
|
||||
return fs
|
||||
.access(filePath)
|
||||
.then(() => true)
|
||||
.catch(() => false);
|
||||
}
|
||||
|
||||
function resolveSourceTitle(sourcePath: string, explicitTitle?: string): string {
|
||||
if (explicitTitle?.trim()) {
|
||||
return explicitTitle.trim();
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { appendRegularFile } from "openclaw/plugin-sdk/security-runtime";
|
||||
|
||||
type MemoryWikiLogEntry = {
|
||||
type: "init" | "ingest" | "compile" | "lint";
|
||||
@@ -13,5 +14,9 @@ export async function appendMemoryWikiLog(
|
||||
): Promise<void> {
|
||||
const logPath = path.join(vaultRoot, ".openclaw-wiki", "log.jsonl");
|
||||
await fs.mkdir(path.dirname(logPath), { recursive: true });
|
||||
await fs.appendFile(logPath, `${JSON.stringify(entry)}\n`, "utf8");
|
||||
await appendRegularFile({
|
||||
filePath: logPath,
|
||||
content: `${JSON.stringify(entry)}\n`,
|
||||
rejectSymlinkParents: true,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { constants as fsConstants } from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { FsSafeError, root as fsRoot } from "openclaw/plugin-sdk/security-runtime";
|
||||
import {
|
||||
setImportedSourceEntry,
|
||||
shouldSkipImportedSourceWrite,
|
||||
@@ -9,123 +7,6 @@ import {
|
||||
} from "./source-sync-state.js";
|
||||
|
||||
type ImportedSourceState = Parameters<typeof shouldSkipImportedSourceWrite>[0]["state"];
|
||||
type FileStats = Awaited<ReturnType<typeof fs.lstat>>;
|
||||
|
||||
function isPathInside(parent: string, child: string): boolean {
|
||||
const relative = path.relative(parent, child);
|
||||
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
|
||||
}
|
||||
|
||||
async function resolveWritableVaultPagePath(params: {
|
||||
vaultRoot: string;
|
||||
pagePath: string;
|
||||
}): Promise<{
|
||||
pageAbsPath: string;
|
||||
pageDir: string;
|
||||
pageDirRealPath: string;
|
||||
vaultRealPath: string;
|
||||
existing: FileStats | null;
|
||||
}> {
|
||||
const vaultAbsPath = path.resolve(params.vaultRoot);
|
||||
const pageAbsPath = path.resolve(vaultAbsPath, params.pagePath);
|
||||
if (!isPathInside(vaultAbsPath, pageAbsPath)) {
|
||||
throw new Error(`Refusing to write imported source page outside vault: ${params.pagePath}`);
|
||||
}
|
||||
|
||||
const vaultRealPath = await fs.realpath(vaultAbsPath);
|
||||
const pageDir = path.dirname(pageAbsPath);
|
||||
await fs.mkdir(pageDir, { recursive: true });
|
||||
const pageDirRealPath = await fs.realpath(pageDir);
|
||||
if (!isPathInside(vaultRealPath, pageDirRealPath)) {
|
||||
throw new Error(`Refusing to write imported source page outside vault: ${params.pagePath}`);
|
||||
}
|
||||
|
||||
const existing = await fs.lstat(pageAbsPath).catch((err: unknown) => {
|
||||
if ((err as NodeJS.ErrnoException)?.code === "ENOENT") {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
if (existing?.isSymbolicLink()) {
|
||||
throw new Error(`Refusing to write imported source page through symlink: ${params.pagePath}`);
|
||||
}
|
||||
if (existing && !existing.isFile()) {
|
||||
throw new Error(`Refusing to write imported source page over non-file: ${params.pagePath}`);
|
||||
}
|
||||
return { pageAbsPath, pageDir, pageDirRealPath, vaultRealPath, existing };
|
||||
}
|
||||
|
||||
async function assertWritablePageDir(params: {
|
||||
pageDir: string;
|
||||
pageDirRealPath: string;
|
||||
vaultRealPath: string;
|
||||
pagePath: string;
|
||||
}): Promise<void> {
|
||||
const currentPageDirRealPath = await fs.realpath(params.pageDir);
|
||||
if (
|
||||
currentPageDirRealPath !== params.pageDirRealPath ||
|
||||
!isPathInside(params.vaultRealPath, currentPageDirRealPath)
|
||||
) {
|
||||
throw new Error(`Refusing to write imported source page outside vault: ${params.pagePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function validateDestinationForReplace(filePath: string, pagePath: string): Promise<void> {
|
||||
const existing = await fs.lstat(filePath).catch((err: unknown) => {
|
||||
if ((err as NodeJS.ErrnoException)?.code === "ENOENT") {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
if (existing?.isSymbolicLink()) {
|
||||
throw new Error(`Refusing to write imported source page through symlink: ${pagePath}`);
|
||||
}
|
||||
if (existing && !existing.isFile()) {
|
||||
throw new Error(`Refusing to write imported source page over non-file: ${pagePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function writeFileAtomicInVault(params: {
|
||||
filePath: string;
|
||||
pageDir: string;
|
||||
pageDirRealPath: string;
|
||||
vaultRealPath: string;
|
||||
pagePath: string;
|
||||
content: string;
|
||||
}): Promise<void> {
|
||||
const noFollow = fsConstants.O_NOFOLLOW ?? 0;
|
||||
await assertWritablePageDir(params);
|
||||
|
||||
const tempPath = path.join(params.pageDir, `.openclaw-wiki-${process.pid}-${randomUUID()}.tmp`);
|
||||
let shouldRemoveTemp = true;
|
||||
try {
|
||||
const handle = await fs.open(
|
||||
tempPath,
|
||||
fsConstants.O_WRONLY | fsConstants.O_CREAT | fsConstants.O_EXCL | noFollow,
|
||||
0o600,
|
||||
);
|
||||
try {
|
||||
const tempStat = await handle.stat();
|
||||
if (!tempStat.isFile() || tempStat.nlink !== 1) {
|
||||
throw new Error(
|
||||
`Refusing to write imported source page through unsafe temp file: ${params.pagePath}`,
|
||||
);
|
||||
}
|
||||
await handle.writeFile(params.content, "utf8");
|
||||
} finally {
|
||||
await handle.close();
|
||||
}
|
||||
await assertWritablePageDir(params);
|
||||
await validateDestinationForReplace(params.filePath, params.pagePath);
|
||||
await fs.rename(tempPath, params.filePath);
|
||||
shouldRemoveTemp = false;
|
||||
await assertWritablePageDir(params);
|
||||
} finally {
|
||||
if (shouldRemoveTemp) {
|
||||
await fs.rm(tempPath, { force: true });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function writeImportedSourcePage(params: {
|
||||
vaultRoot: string;
|
||||
@@ -139,15 +20,15 @@ export async function writeImportedSourcePage(params: {
|
||||
state: ImportedSourceState;
|
||||
buildRendered: (raw: string, updatedAt: string) => string;
|
||||
}): Promise<{ pagePath: string; changed: boolean; created: boolean }> {
|
||||
const {
|
||||
pageAbsPath,
|
||||
pageDir,
|
||||
pageDirRealPath,
|
||||
vaultRealPath,
|
||||
existing: pageStat,
|
||||
} = await resolveWritableVaultPagePath({
|
||||
vaultRoot: params.vaultRoot,
|
||||
pagePath: params.pagePath,
|
||||
const vault = await fsRoot(params.vaultRoot);
|
||||
const pageStat = await vault.stat(params.pagePath).catch((error: unknown) => {
|
||||
if (
|
||||
error instanceof FsSafeError &&
|
||||
(error.code === "not-found" || error.code === "path-alias")
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
const created = !pageStat;
|
||||
const updatedAt = new Date(params.sourceUpdatedAtMs).toISOString();
|
||||
@@ -167,16 +48,22 @@ export async function writeImportedSourcePage(params: {
|
||||
|
||||
const raw = await fs.readFile(params.sourcePath, "utf8");
|
||||
const rendered = params.buildRendered(raw, updatedAt);
|
||||
const existing = pageStat ? await fs.readFile(pageAbsPath, "utf8").catch(() => "") : "";
|
||||
const existing = pageStat ? await vault.readText(params.pagePath).catch(() => "") : "";
|
||||
if (existing !== rendered) {
|
||||
await writeFileAtomicInVault({
|
||||
filePath: pageAbsPath,
|
||||
pageDir,
|
||||
pageDirRealPath,
|
||||
vaultRealPath,
|
||||
pagePath: params.pagePath,
|
||||
content: rendered,
|
||||
});
|
||||
try {
|
||||
if (pageStat && pageStat.nlink > 1) {
|
||||
await vault.remove(params.pagePath);
|
||||
}
|
||||
await vault.write(params.pagePath, rendered);
|
||||
} catch (error) {
|
||||
if (error instanceof FsSafeError) {
|
||||
throw new Error(
|
||||
`Refusing to write imported source page through symlink: ${params.pagePath}`,
|
||||
{ cause: error },
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
setImportedSourceEntry({
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { listActiveMemoryPublicArtifacts } from "openclaw/plugin-sdk/memory-host-core";
|
||||
import { pathExists } from "openclaw/plugin-sdk/security-runtime";
|
||||
import type { OpenClawConfig } from "../api.js";
|
||||
import type { ResolvedMemoryWikiConfig } from "./config.js";
|
||||
import { inferWikiPageKind, toWikiPageSummary, type WikiPageKind } from "./markdown.js";
|
||||
@@ -65,15 +66,6 @@ type ResolveMemoryWikiStatusDeps = {
|
||||
resolveCommand?: (command: string) => Promise<string | null>;
|
||||
};
|
||||
|
||||
async function pathExists(inputPath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(inputPath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function collectVaultCounts(vaultPath: string): Promise<{
|
||||
pageCounts: Record<WikiPageKind, number>;
|
||||
sourceCounts: MemoryWikiStatus["sourceCounts"];
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
replaceManagedMarkdownBlock,
|
||||
withTrailingNewline,
|
||||
} from "openclaw/plugin-sdk/memory-host-markdown";
|
||||
import { FsSafeError, pathExists, root as fsRoot } from "openclaw/plugin-sdk/security-runtime";
|
||||
import type { ResolvedMemoryWikiConfig } from "./config.js";
|
||||
import { appendMemoryWikiLog } from "./log.js";
|
||||
|
||||
@@ -72,25 +73,22 @@ This vault is maintained by the OpenClaw memory-wiki plugin.
|
||||
`);
|
||||
}
|
||||
|
||||
async function pathExists(inputPath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(inputPath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function writeFileIfMissing(
|
||||
filePath: string,
|
||||
rootDir: string,
|
||||
relativePath: string,
|
||||
content: string,
|
||||
createdFiles: string[],
|
||||
): Promise<void> {
|
||||
if (await pathExists(filePath)) {
|
||||
return;
|
||||
const root = await fsRoot(rootDir);
|
||||
try {
|
||||
await root.create(relativePath, content);
|
||||
} catch (err) {
|
||||
if (err instanceof FsSafeError && err.code === "already-exists") {
|
||||
return;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
await fs.writeFile(filePath, content, "utf8");
|
||||
createdFiles.push(filePath);
|
||||
createdFiles.push(path.join(rootDir, relativePath));
|
||||
}
|
||||
|
||||
export async function initializeMemoryWikiVault(
|
||||
@@ -114,20 +112,18 @@ export async function initializeMemoryWikiVault(
|
||||
await fs.mkdir(fullPath, { recursive: true });
|
||||
}
|
||||
|
||||
await writeFileIfMissing(path.join(rootDir, "AGENTS.md"), buildAgentsMarkdown(), createdFiles);
|
||||
await writeFileIfMissing(rootDir, "AGENTS.md", buildAgentsMarkdown(), createdFiles);
|
||||
await writeFileIfMissing(rootDir, "WIKI.md", buildWikiOverviewMarkdown(config), createdFiles);
|
||||
await writeFileIfMissing(rootDir, "index.md", buildIndexMarkdown(), createdFiles);
|
||||
await writeFileIfMissing(
|
||||
path.join(rootDir, "WIKI.md"),
|
||||
buildWikiOverviewMarkdown(config),
|
||||
createdFiles,
|
||||
);
|
||||
await writeFileIfMissing(path.join(rootDir, "index.md"), buildIndexMarkdown(), createdFiles);
|
||||
await writeFileIfMissing(
|
||||
path.join(rootDir, "inbox.md"),
|
||||
rootDir,
|
||||
"inbox.md",
|
||||
withTrailingNewline("# Inbox\n\nDrop raw ideas, questions, and source links here.\n"),
|
||||
createdFiles,
|
||||
);
|
||||
await writeFileIfMissing(
|
||||
path.join(rootDir, ".openclaw-wiki", "state.json"),
|
||||
rootDir,
|
||||
".openclaw-wiki/state.json",
|
||||
withTrailingNewline(
|
||||
JSON.stringify(
|
||||
{
|
||||
@@ -141,7 +137,7 @@ export async function initializeMemoryWikiVault(
|
||||
),
|
||||
createdFiles,
|
||||
);
|
||||
await writeFileIfMissing(path.join(rootDir, ".openclaw-wiki", "log.jsonl"), "", createdFiles);
|
||||
await writeFileIfMissing(rootDir, ".openclaw-wiki/log.jsonl", "", createdFiles);
|
||||
|
||||
if (createdDirectories.length > 0 || createdFiles.length > 0) {
|
||||
await appendMemoryWikiLog(rootDir, {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { mkdirSync, mkdtempSync, readFileSync, rmSync } from "node:fs";
|
||||
import { readFileSync } from "node:fs";
|
||||
import path from "node:path";
|
||||
import {
|
||||
CHROMIUM_FULL_VERSION,
|
||||
@@ -21,7 +21,7 @@ import {
|
||||
fetchWithSsrFGuard,
|
||||
ssrfPolicyFromHttpBaseUrlAllowedHostname,
|
||||
} from "openclaw/plugin-sdk/ssrf-runtime";
|
||||
import { resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
import { tempWorkspace, resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
import { edgeTTS, inferEdgeExtension } from "./tts.js";
|
||||
|
||||
const DEFAULT_EDGE_VOICE = "en-US-MichelleNeural";
|
||||
@@ -236,9 +236,11 @@ export function buildMicrosoftSpeechProvider(): SpeechProviderPlugin {
|
||||
isConfigured: ({ providerConfig }) => readMicrosoftProviderConfig(providerConfig).enabled,
|
||||
synthesize: async (req) => {
|
||||
const config = readMicrosoftProviderConfig(req.providerConfig);
|
||||
const tempRoot = resolvePreferredOpenClawTmpDir();
|
||||
mkdirSync(tempRoot, { recursive: true, mode: 0o700 });
|
||||
const tempDir = mkdtempSync(path.join(tempRoot, "tts-microsoft-"));
|
||||
const temp = await tempWorkspace({
|
||||
rootDir: resolvePreferredOpenClawTmpDir(),
|
||||
prefix: "tts-microsoft-",
|
||||
});
|
||||
const tempDir = temp.dir;
|
||||
const overrideVoice = trimToUndefined(req.providerOverrides?.voice);
|
||||
let voice = overrideVoice ?? config.voice;
|
||||
let lang = config.lang;
|
||||
@@ -286,7 +288,7 @@ export function buildMicrosoftSpeechProvider(): SpeechProviderPlugin {
|
||||
return await runEdge(outputFormat);
|
||||
}
|
||||
} finally {
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
await temp.cleanup();
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
MIGRATION_REASON_MISSING_SOURCE_OR_TARGET,
|
||||
} from "openclaw/plugin-sdk/migration";
|
||||
import type { MigrationItem } from "openclaw/plugin-sdk/plugin-entry";
|
||||
import { appendRegularFile, pathExists } from "openclaw/plugin-sdk/security-runtime";
|
||||
|
||||
export function resolveHomePath(input: string): string {
|
||||
if (input === "~") {
|
||||
@@ -18,12 +19,7 @@ export function resolveHomePath(input: string): string {
|
||||
}
|
||||
|
||||
export async function exists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
return await pathExists(filePath);
|
||||
}
|
||||
|
||||
export async function isDirectory(dirPath: string): Promise<boolean> {
|
||||
@@ -92,7 +88,11 @@ export async function appendItem(item: MigrationItem): Promise<MigrationItem> {
|
||||
: path.basename(item.source);
|
||||
const header = `\n\n<!-- Imported from Claude: ${label} -->\n\n`;
|
||||
await fs.mkdir(path.dirname(item.target), { recursive: true });
|
||||
await fs.appendFile(item.target, `${header}${content.trimEnd()}\n`, "utf8");
|
||||
await appendRegularFile({
|
||||
filePath: item.target,
|
||||
content: `${header}${content.trimEnd()}\n`,
|
||||
rejectSymlinkParents: true,
|
||||
});
|
||||
return { ...item, status: "migrated" };
|
||||
} catch (err) {
|
||||
return markMigrationItemError(item, err instanceof Error ? err.message : String(err));
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
MIGRATION_REASON_MISSING_SOURCE_OR_TARGET,
|
||||
} from "openclaw/plugin-sdk/migration";
|
||||
import type { MigrationItem } from "openclaw/plugin-sdk/plugin-entry";
|
||||
import { appendRegularFile, pathExists } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { parse as parseYaml } from "yaml";
|
||||
|
||||
export function resolveHomePath(input: string): string {
|
||||
@@ -19,12 +20,7 @@ export function resolveHomePath(input: string): string {
|
||||
}
|
||||
|
||||
export async function exists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
return await pathExists(filePath);
|
||||
}
|
||||
|
||||
export async function isDirectory(dirPath: string): Promise<boolean> {
|
||||
@@ -126,7 +122,11 @@ export async function appendItem(item: MigrationItem): Promise<MigrationItem> {
|
||||
const content = await fs.readFile(item.source, "utf8");
|
||||
const header = `\n\n<!-- Imported from Hermes: ${path.basename(item.source)} -->\n\n`;
|
||||
await fs.mkdir(path.dirname(item.target), { recursive: true });
|
||||
await fs.appendFile(item.target, `${header}${content.trimEnd()}\n`, "utf8");
|
||||
await appendRegularFile({
|
||||
filePath: item.target,
|
||||
content: `${header}${content.trimEnd()}\n`,
|
||||
rejectSymlinkParents: true,
|
||||
});
|
||||
return { ...item, status: "migrated" };
|
||||
} catch (err) {
|
||||
return markMigrationItemError(item, err instanceof Error ? err.message : String(err));
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { replaceFileAtomic } from "openclaw/plugin-sdk/security-runtime";
|
||||
|
||||
/** Default cooldown between reflections per session (5 minutes). */
|
||||
export const DEFAULT_COOLDOWN_MS = 300_000;
|
||||
@@ -93,8 +93,11 @@ export async function storeSessionLearning(params: {
|
||||
learnings = learnings.slice(-10);
|
||||
}
|
||||
|
||||
await fs.mkdir(path.dirname(learningsFile), { recursive: true });
|
||||
await fs.writeFile(learningsFile, JSON.stringify(learnings, null, 2), "utf-8");
|
||||
await replaceFileAtomic({
|
||||
filePath: learningsFile,
|
||||
content: JSON.stringify(learnings, null, 2),
|
||||
tempPrefix: ".msteams-learnings",
|
||||
});
|
||||
if (!exists && legacyLearningsFile !== learningsFile) {
|
||||
await fs.rm(legacyLearningsFile, { force: true }).catch(() => undefined);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { resolveThreadSessionKeys } from "openclaw/plugin-sdk/routing";
|
||||
import { appendRegularFile } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { normalizeOptionalLowercaseString } from "openclaw/plugin-sdk/text-runtime";
|
||||
import { formatUnknownError } from "./errors.js";
|
||||
import { buildFeedbackEvent, runFeedbackReflection } from "./feedback-reflection.js";
|
||||
@@ -256,7 +256,11 @@ async function handleFeedbackInvoke(
|
||||
});
|
||||
const safeKey = route.sessionKey.replace(/[^a-zA-Z0-9_-]/g, "_");
|
||||
const transcriptFile = path.join(storePath, `${safeKey}.jsonl`);
|
||||
await fs.appendFile(transcriptFile, JSON.stringify(feedbackEvent) + "\n", "utf-8").catch(() => {
|
||||
await appendRegularFile({
|
||||
filePath: transcriptFile,
|
||||
content: `${JSON.stringify(feedbackEvent)}\n`,
|
||||
rejectSymlinkParents: true,
|
||||
}).catch(() => {
|
||||
// Best effort — transcript dir may not exist yet
|
||||
});
|
||||
} catch {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import fs from "node:fs";
|
||||
import { withFileLock as withPathLock } from "openclaw/plugin-sdk/file-lock";
|
||||
import { readJsonFileWithFallback, writeJsonFileAtomically } from "openclaw/plugin-sdk/json-store";
|
||||
import { pathExists } from "openclaw/plugin-sdk/security-runtime";
|
||||
|
||||
const STORE_LOCK_OPTIONS = {
|
||||
retries: {
|
||||
@@ -25,9 +25,7 @@ export async function writeJsonFile(filePath: string, value: unknown): Promise<v
|
||||
}
|
||||
|
||||
async function ensureJsonFile(filePath: string, fallback: unknown) {
|
||||
try {
|
||||
await fs.promises.access(filePath);
|
||||
} catch {
|
||||
if (!(await pathExists(filePath))) {
|
||||
await writeJsonFile(filePath, fallback);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { readFileSync, writeFileSync, mkdirSync } from "node:fs";
|
||||
import { dirname } from "node:path";
|
||||
import { readFileSync } from "node:fs";
|
||||
import { basename, dirname } from "node:path";
|
||||
import { privateFileStoreSync } from "openclaw/plugin-sdk/security-runtime";
|
||||
import type { MSTeamsConfig } from "../runtime-api.js";
|
||||
import type { MSTeamsDelegatedTokens } from "./oauth.shared.js";
|
||||
import { refreshMSTeamsDelegatedTokens } from "./oauth.token.js";
|
||||
@@ -158,9 +159,7 @@ export function loadDelegatedTokens(): MSTeamsDelegatedTokens | undefined {
|
||||
|
||||
export function saveDelegatedTokens(tokens: MSTeamsDelegatedTokens): void {
|
||||
const tokenPath = resolveDelegatedTokenPath();
|
||||
const dir = dirname(tokenPath);
|
||||
mkdirSync(dir, { recursive: true });
|
||||
writeFileSync(tokenPath, JSON.stringify(tokens, null, 2), "utf8");
|
||||
privateFileStoreSync(dirname(tokenPath)).writeJson(basename(tokenPath), tokens);
|
||||
}
|
||||
|
||||
export async function resolveDelegatedAccessToken(params: {
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import crypto from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { safeParseJsonWithSchema } from "openclaw/plugin-sdk/extension-shared";
|
||||
import { privateFileStore } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { z } from "zod";
|
||||
import { getNostrRuntime } from "./runtime.js";
|
||||
|
||||
@@ -114,13 +113,14 @@ export async function readNostrBusState(params: {
|
||||
}): Promise<NostrBusState | null> {
|
||||
const filePath = resolveNostrStatePath(params.accountId, params.env);
|
||||
try {
|
||||
const raw = await fs.readFile(filePath, "utf-8");
|
||||
return safeParseState(raw);
|
||||
} catch (err) {
|
||||
const code = (err as { code?: string }).code;
|
||||
if (code === "ENOENT") {
|
||||
const raw = await privateFileStore(path.dirname(filePath)).readTextIfExists(
|
||||
path.basename(filePath),
|
||||
);
|
||||
if (raw === null) {
|
||||
return null;
|
||||
}
|
||||
return safeParseState(raw);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -133,20 +133,15 @@ export async function writeNostrBusState(params: {
|
||||
env?: NodeJS.ProcessEnv;
|
||||
}): Promise<void> {
|
||||
const filePath = resolveNostrStatePath(params.accountId, params.env);
|
||||
const dir = path.dirname(filePath);
|
||||
await fs.mkdir(dir, { recursive: true, mode: 0o700 });
|
||||
const tmp = path.join(dir, `${path.basename(filePath)}.${crypto.randomUUID()}.tmp`);
|
||||
const payload: NostrBusState = {
|
||||
version: STORE_VERSION,
|
||||
lastProcessedAt: params.lastProcessedAt,
|
||||
gatewayStartedAt: params.gatewayStartedAt,
|
||||
recentEventIds: (params.recentEventIds ?? []).filter((x): x is string => typeof x === "string"),
|
||||
};
|
||||
await fs.writeFile(tmp, `${JSON.stringify(payload, null, 2)}\n`, {
|
||||
encoding: "utf-8",
|
||||
await privateFileStore(path.dirname(filePath)).writeJson(path.basename(filePath), payload, {
|
||||
trailingNewline: true,
|
||||
});
|
||||
await fs.chmod(tmp, 0o600);
|
||||
await fs.rename(tmp, filePath);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -187,13 +182,14 @@ export async function readNostrProfileState(params: {
|
||||
}): Promise<NostrProfileState | null> {
|
||||
const filePath = resolveNostrProfileStatePath(params.accountId, params.env);
|
||||
try {
|
||||
const raw = await fs.readFile(filePath, "utf-8");
|
||||
return safeParseProfileState(raw);
|
||||
} catch (err) {
|
||||
const code = (err as { code?: string }).code;
|
||||
if (code === "ENOENT") {
|
||||
const raw = await privateFileStore(path.dirname(filePath)).readTextIfExists(
|
||||
path.basename(filePath),
|
||||
);
|
||||
if (raw === null) {
|
||||
return null;
|
||||
}
|
||||
return safeParseProfileState(raw);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -206,18 +202,13 @@ export async function writeNostrProfileState(params: {
|
||||
env?: NodeJS.ProcessEnv;
|
||||
}): Promise<void> {
|
||||
const filePath = resolveNostrProfileStatePath(params.accountId, params.env);
|
||||
const dir = path.dirname(filePath);
|
||||
await fs.mkdir(dir, { recursive: true, mode: 0o700 });
|
||||
const tmp = path.join(dir, `${path.basename(filePath)}.${crypto.randomUUID()}.tmp`);
|
||||
const payload: NostrProfileState = {
|
||||
version: PROFILE_STATE_VERSION,
|
||||
lastPublishedAt: params.lastPublishedAt,
|
||||
lastPublishedEventId: params.lastPublishedEventId,
|
||||
lastPublishResults: params.lastPublishResults,
|
||||
};
|
||||
await fs.writeFile(tmp, `${JSON.stringify(payload, null, 2)}\n`, {
|
||||
encoding: "utf-8",
|
||||
await privateFileStore(path.dirname(filePath)).writeJson(path.basename(filePath), payload, {
|
||||
trailingNewline: true,
|
||||
});
|
||||
await fs.chmod(tmp, 0o600);
|
||||
await fs.rename(tmp, filePath);
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
resolvePreferredOpenClawTmpDir,
|
||||
runSshSandboxCommand,
|
||||
sanitizeEnvVars,
|
||||
withTempWorkspace,
|
||||
} from "openclaw/plugin-sdk/sandbox";
|
||||
import { normalizeLowercaseStringOrEmpty } from "openclaw/plugin-sdk/text-runtime";
|
||||
import type { OpenShellSandboxBackend } from "./backend.types.js";
|
||||
@@ -411,65 +412,61 @@ class OpenShellSandboxBackendImpl {
|
||||
}
|
||||
|
||||
private async syncWorkspaceFromRemote(): Promise<void> {
|
||||
const tmpDir = await fs.mkdtemp(
|
||||
path.join(resolveOpenShellTmpRoot(), "openclaw-openshell-sync-"),
|
||||
await withTempWorkspace(
|
||||
{ rootDir: resolveOpenShellTmpRoot(), prefix: "openclaw-openshell-sync-" },
|
||||
async ({ dir: tmpDir }) => {
|
||||
const result = await runOpenShellCli({
|
||||
context: this.params.execContext,
|
||||
args: [
|
||||
"sandbox",
|
||||
"download",
|
||||
this.params.execContext.sandboxName,
|
||||
this.params.remoteWorkspaceDir,
|
||||
tmpDir,
|
||||
],
|
||||
cwd: this.params.createParams.workspaceDir,
|
||||
});
|
||||
if (result.code !== 0) {
|
||||
throw new Error(result.stderr.trim() || "openshell sandbox download failed");
|
||||
}
|
||||
await replaceDirectoryContents({
|
||||
sourceDir: tmpDir,
|
||||
targetDir: this.params.createParams.workspaceDir,
|
||||
// Never sync trusted host hook directories or repository metadata from
|
||||
// the remote sandbox.
|
||||
excludeDirs: DEFAULT_OPEN_SHELL_MIRROR_EXCLUDE_DIRS,
|
||||
});
|
||||
},
|
||||
);
|
||||
try {
|
||||
const result = await runOpenShellCli({
|
||||
context: this.params.execContext,
|
||||
args: [
|
||||
"sandbox",
|
||||
"download",
|
||||
this.params.execContext.sandboxName,
|
||||
this.params.remoteWorkspaceDir,
|
||||
tmpDir,
|
||||
],
|
||||
cwd: this.params.createParams.workspaceDir,
|
||||
});
|
||||
if (result.code !== 0) {
|
||||
throw new Error(result.stderr.trim() || "openshell sandbox download failed");
|
||||
}
|
||||
await replaceDirectoryContents({
|
||||
sourceDir: tmpDir,
|
||||
targetDir: this.params.createParams.workspaceDir,
|
||||
// Never sync trusted host hook directories or repository metadata from
|
||||
// the remote sandbox.
|
||||
excludeDirs: DEFAULT_OPEN_SHELL_MIRROR_EXCLUDE_DIRS,
|
||||
});
|
||||
} finally {
|
||||
await fs.rm(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
private async uploadPathToRemote(localPath: string, remotePath: string): Promise<void> {
|
||||
const tmpDir = await fs.mkdtemp(
|
||||
path.join(resolveOpenShellTmpRoot(), "openclaw-openshell-upload-"),
|
||||
await withTempWorkspace(
|
||||
{ rootDir: resolveOpenShellTmpRoot(), prefix: "openclaw-openshell-upload-" },
|
||||
async ({ dir: tmpDir }) => {
|
||||
// Stage a symlink-free snapshot so upload never dereferences host paths
|
||||
// outside the mirrored workspace tree.
|
||||
await stageDirectoryContents({
|
||||
sourceDir: localPath,
|
||||
targetDir: tmpDir,
|
||||
});
|
||||
const result = await runOpenShellCli({
|
||||
context: this.params.execContext,
|
||||
args: [
|
||||
"sandbox",
|
||||
"upload",
|
||||
"--no-git-ignore",
|
||||
this.params.execContext.sandboxName,
|
||||
tmpDir,
|
||||
remotePath,
|
||||
],
|
||||
cwd: this.params.createParams.workspaceDir,
|
||||
});
|
||||
if (result.code !== 0) {
|
||||
throw new Error(result.stderr.trim() || "openshell sandbox upload failed");
|
||||
}
|
||||
},
|
||||
);
|
||||
try {
|
||||
// Stage a symlink-free snapshot so upload never dereferences host paths
|
||||
// outside the mirrored workspace tree.
|
||||
await stageDirectoryContents({
|
||||
sourceDir: localPath,
|
||||
targetDir: tmpDir,
|
||||
});
|
||||
const result = await runOpenShellCli({
|
||||
context: this.params.execContext,
|
||||
args: [
|
||||
"sandbox",
|
||||
"upload",
|
||||
"--no-git-ignore",
|
||||
this.params.execContext.sandboxName,
|
||||
tmpDir,
|
||||
remotePath,
|
||||
],
|
||||
cwd: this.params.createParams.workspaceDir,
|
||||
});
|
||||
if (result.code !== 0) {
|
||||
throw new Error(result.stderr.trim() || "openshell sandbox upload failed");
|
||||
}
|
||||
} finally {
|
||||
await fs.rm(tmpDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
private async maybeSeedRemoteWorkspace(): Promise<void> {
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
import fs from "node:fs";
|
||||
import fsPromises from "node:fs/promises";
|
||||
import type { FileHandle } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { writeFileWithinRoot } from "openclaw/plugin-sdk/file-access-runtime";
|
||||
import { root as fsRoot } from "openclaw/plugin-sdk/file-access-runtime";
|
||||
import type {
|
||||
SandboxFsBridge,
|
||||
SandboxFsStat,
|
||||
SandboxResolvedPath,
|
||||
} from "openclaw/plugin-sdk/sandbox";
|
||||
import { createWritableRenameTargetResolver } from "openclaw/plugin-sdk/sandbox";
|
||||
import { isPathInside } from "openclaw/plugin-sdk/security-runtime";
|
||||
import type { OpenShellFsBridgeContext, OpenShellSandboxBackend } from "./backend.types.js";
|
||||
import { movePathWithCopyFallback } from "./mirror.js";
|
||||
|
||||
@@ -52,15 +51,28 @@ class OpenShellFsBridge implements SandboxFsBridge {
|
||||
}): Promise<Buffer> {
|
||||
const target = this.resolveTarget(params);
|
||||
const hostPath = this.requireHostPath(target);
|
||||
const handle = await openPinnedReadableFile({
|
||||
absolutePath: hostPath,
|
||||
rootPath: target.mountHostRoot,
|
||||
containerPath: target.containerPath,
|
||||
});
|
||||
let opened: Awaited<ReturnType<Awaited<ReturnType<typeof fsRoot>>["open"]>>;
|
||||
try {
|
||||
return (await handle.readFile()) as Buffer;
|
||||
} finally {
|
||||
await handle.close();
|
||||
await assertLocalPathSafety({
|
||||
target,
|
||||
root: target.mountHostRoot,
|
||||
allowMissingLeaf: false,
|
||||
allowFinalSymlinkForUnlink: false,
|
||||
});
|
||||
const root = await fsRoot(target.mountHostRoot);
|
||||
opened = await root.open(path.relative(target.mountHostRoot, hostPath), {
|
||||
hardlinks: "reject",
|
||||
});
|
||||
try {
|
||||
return (await opened.handle.readFile()) as Buffer;
|
||||
} finally {
|
||||
await opened.handle.close();
|
||||
}
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
`Sandbox boundary checks failed; cannot read files: ${target.containerPath}`,
|
||||
{ cause: err },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,10 +96,8 @@ class OpenShellFsBridge implements SandboxFsBridge {
|
||||
const buffer = Buffer.isBuffer(params.data)
|
||||
? params.data
|
||||
: Buffer.from(params.data, params.encoding ?? "utf8");
|
||||
await writeFileWithinRoot({
|
||||
rootDir: target.mountHostRoot,
|
||||
relativePath: path.relative(target.mountHostRoot, hostPath),
|
||||
data: buffer,
|
||||
const root = await fsRoot(target.mountHostRoot);
|
||||
await root.write(path.relative(target.mountHostRoot, hostPath), buffer, {
|
||||
mkdir: params.mkdir,
|
||||
});
|
||||
await this.backend.syncLocalPathToRemote(hostPath, target.containerPath);
|
||||
@@ -291,11 +301,6 @@ class OpenShellFsBridge implements SandboxFsBridge {
|
||||
}
|
||||
}
|
||||
|
||||
function isPathInside(root: string, target: string): boolean {
|
||||
const relative = path.relative(root, target);
|
||||
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
|
||||
}
|
||||
|
||||
async function assertLocalPathSafety(params: {
|
||||
target: ResolvedMountPath;
|
||||
root: string;
|
||||
@@ -358,199 +363,8 @@ async function resolveCanonicalCandidate(targetPath: string): Promise<string> {
|
||||
}
|
||||
}
|
||||
|
||||
async function openPinnedReadableFile(params: {
|
||||
absolutePath: string;
|
||||
rootPath: string;
|
||||
containerPath: string;
|
||||
}): Promise<FileHandle> {
|
||||
// The literal root is what `resolveTarget` joins caller-provided relative
|
||||
// paths against, so pre-open containment must be checked in literal form.
|
||||
// The canonical root is derived separately and used for the post-open
|
||||
// path checks (fd-path readlink and realpath cross-check), so a workspace
|
||||
// that is itself configured as a symlink still works.
|
||||
const literalRoot = path.resolve(params.rootPath);
|
||||
const canonicalRoot = await fsPromises.realpath(literalRoot).catch(() => literalRoot);
|
||||
const literalPath = path.resolve(params.absolutePath);
|
||||
// Cheap string-prefix check on the caller-provided absolute path; no
|
||||
// filesystem state is read here, so there is no TOCTOU window. Deeper
|
||||
// checks run after the fd is pinned.
|
||||
if (!isPathInside(literalRoot, literalPath)) {
|
||||
throw new Error(`Sandbox path escapes allowed mounts; cannot access: ${params.containerPath}`);
|
||||
}
|
||||
const { flags: openReadFlags, supportsNoFollow } = resolveOpenReadFlags();
|
||||
// Open first so every later check runs against an fd that is already pinned
|
||||
// to one specific inode. `O_NOFOLLOW` prevents the final path component from
|
||||
// being a symlink; the ancestor walk below handles parent-directory symlink
|
||||
// swaps on platforms where fd-path readlink is not available.
|
||||
const handle = await fsPromises.open(literalPath, openReadFlags);
|
||||
try {
|
||||
const openedStat = await handle.stat();
|
||||
if (!openedStat.isFile()) {
|
||||
throw new Error(`Sandbox boundary checks failed; cannot read files: ${params.containerPath}`);
|
||||
}
|
||||
if (openedStat.nlink > 1) {
|
||||
throw new Error(`Sandbox boundary checks failed; cannot read files: ${params.containerPath}`);
|
||||
}
|
||||
const resolvedPath = await resolveOpenedReadablePath(handle.fd);
|
||||
if (resolvedPath !== null) {
|
||||
// Primary guarantee on Linux: the fd's resolved path is derived from the
|
||||
// kernel, so a parent-directory swap cannot make this return a stale path.
|
||||
if (!isPathInside(canonicalRoot, resolvedPath)) {
|
||||
throw new Error(
|
||||
`Sandbox boundary checks failed; cannot read files: ${params.containerPath}`,
|
||||
);
|
||||
}
|
||||
return handle;
|
||||
}
|
||||
// Fallback for platforms where fd-path readlink is unavailable. On macOS,
|
||||
// `/dev/fd/N` is a character device so readlink returns EINVAL; on Windows
|
||||
// there is no `/proc` equivalent. With no kernel-backed path readback we
|
||||
// must prove the pinned fd is in-root without trusting a separate
|
||||
// `realpath` + `lstat` pair that would race between the two awaits. Walk
|
||||
// every ancestor between `literalRoot` and `literalPath` — the actual
|
||||
// on-disk chain — and reject if any ancestor is a symlink, then use a
|
||||
// single `stat` call to confirm that the path still resolves to the
|
||||
// same file the fd has pinned. `fs.promises.stat` resolves the path and
|
||||
// returns the final file's identity in one syscall, so there is no
|
||||
// between-await window for an attacker to race.
|
||||
await assertAncestorChainHasNoSymlinks(literalRoot, literalPath, params.containerPath, {
|
||||
// On platforms where `O_NOFOLLOW` is unavailable (Windows), the open
|
||||
// call would have transparently followed a final-component symlink, so
|
||||
// the ancestor walk has to lstat the leaf as well.
|
||||
includeLeaf: !supportsNoFollow,
|
||||
});
|
||||
const currentResolvedStat = await fsPromises.stat(literalPath);
|
||||
if (!sameFileIdentity(currentResolvedStat, openedStat)) {
|
||||
throw new Error(`Sandbox boundary checks failed; cannot read files: ${params.containerPath}`);
|
||||
}
|
||||
// Belt-and-suspenders: re-fstat the pinned fd after the identity check and
|
||||
// confirm the file type and link count are still trustworthy. A hardlink
|
||||
// that appeared between the initial fstat and here is not exploitable for
|
||||
// the read (the fd is already pinned to the original inode), but failing
|
||||
// closed here keeps the guarantee simple: the bytes we return always come
|
||||
// from a file that was a single-linked regular file at verification time.
|
||||
const postCheckStat = await handle.stat();
|
||||
if (!postCheckStat.isFile() || postCheckStat.nlink > 1) {
|
||||
throw new Error(`Sandbox boundary checks failed; cannot read files: ${params.containerPath}`);
|
||||
}
|
||||
return handle;
|
||||
} catch (error) {
|
||||
await handle.close();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Walks each directory between canonicalRoot (exclusive) and
|
||||
// targetAbsolutePath, `lstat`'ing each segment. Rejects if any intermediate
|
||||
// segment is a symlink or a non-directory. By default the final component is
|
||||
// not walked because `O_NOFOLLOW` already protects it on the open call. Pass
|
||||
// `includeLeaf: true` on platforms where `O_NOFOLLOW` is unavailable
|
||||
// (Windows) so a symlinked leaf cannot be followed silently by `open`.
|
||||
async function assertAncestorChainHasNoSymlinks(
|
||||
canonicalRoot: string,
|
||||
targetAbsolutePath: string,
|
||||
containerPath: string,
|
||||
options: { includeLeaf?: boolean } = {},
|
||||
): Promise<void> {
|
||||
const relative = path.relative(canonicalRoot, targetAbsolutePath);
|
||||
if (relative === "" || relative.startsWith("..") || path.isAbsolute(relative)) {
|
||||
return;
|
||||
}
|
||||
const segments = relative.split(path.sep).filter((segment) => segment.length > 0);
|
||||
const lastIndex = options.includeLeaf ? segments.length : segments.length - 1;
|
||||
let cursor = canonicalRoot;
|
||||
for (let i = 0; i < lastIndex; i += 1) {
|
||||
cursor = path.join(cursor, segments[i]);
|
||||
const stat = await fsPromises.lstat(cursor).catch(() => null);
|
||||
if (!stat) {
|
||||
throw new Error(`Sandbox boundary checks failed; cannot read files: ${containerPath}`);
|
||||
}
|
||||
const isLeaf = i === segments.length - 1;
|
||||
if (stat.isSymbolicLink()) {
|
||||
throw new Error(`Sandbox boundary checks failed; cannot read files: ${containerPath}`);
|
||||
}
|
||||
if (!isLeaf && !stat.isDirectory()) {
|
||||
throw new Error(`Sandbox boundary checks failed; cannot read files: ${containerPath}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type ReadOpenFlagsResolution = { flags: number; supportsNoFollow: boolean };
|
||||
|
||||
let readOpenFlagsResolverForTest: (() => ReadOpenFlagsResolution) | undefined;
|
||||
|
||||
function resolveOpenReadFlags(): ReadOpenFlagsResolution {
|
||||
if (readOpenFlagsResolverForTest) {
|
||||
return readOpenFlagsResolverForTest();
|
||||
}
|
||||
const closeOnExec = (fs.constants as Record<string, number>).O_CLOEXEC ?? 0;
|
||||
const supportsNoFollow = typeof fs.constants.O_NOFOLLOW === "number";
|
||||
const noFollow = supportsNoFollow ? fs.constants.O_NOFOLLOW : 0;
|
||||
return {
|
||||
flags: fs.constants.O_RDONLY | noFollow | closeOnExec,
|
||||
supportsNoFollow,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Test-only seam for forcing the open-flag/`O_NOFOLLOW` resolution. Used to
|
||||
* exercise the Windows-style fallback (no `O_NOFOLLOW`, ancestor walk
|
||||
* includes the leaf) on platforms where `fs.constants.O_NOFOLLOW` is a
|
||||
* non-configurable native data property and cannot be patched directly.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
export function setReadOpenFlagsResolverForTest(
|
||||
resolver: (() => ReadOpenFlagsResolution) | undefined,
|
||||
_resolver: (() => { flags: number; supportsNoFollow: boolean }) | undefined,
|
||||
): void {
|
||||
readOpenFlagsResolverForTest = resolver;
|
||||
}
|
||||
|
||||
// Resolves the absolute path associated with an open fd via the kernel-backed
|
||||
// `/proc/self/fd/<fd>` (Linux) or `/dev/fd/<fd>` (some BSDs). Returns null
|
||||
// when no fd-path endpoint is available. Note: on macOS `/dev/fd/N` is a
|
||||
// character device rather than a symlink, so `readlink` fails with EINVAL
|
||||
// there and the caller must use the ancestor-walk fallback instead.
|
||||
async function resolveOpenedReadablePath(fd: number): Promise<string | null> {
|
||||
for (const fdPath of [`/proc/self/fd/${fd}`, `/dev/fd/${fd}`]) {
|
||||
try {
|
||||
const openedPath = await fsPromises.readlink(fdPath);
|
||||
return normalizeOpenedReadablePath(openedPath);
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function normalizeOpenedReadablePath(openedPath: string): string {
|
||||
const deletedSuffix = " (deleted)";
|
||||
const withoutDeletedSuffix = openedPath.endsWith(deletedSuffix)
|
||||
? openedPath.slice(0, -deletedSuffix.length)
|
||||
: openedPath;
|
||||
return path.resolve(withoutDeletedSuffix);
|
||||
}
|
||||
|
||||
// File identity comparison with win32-aware `dev=0` handling, matching the
|
||||
// shared `src/infra/file-identity.ts` contract. Kept local because extension
|
||||
// production code is not allowed to reach into core `src/**` by relative
|
||||
// import, and this helper is not yet part of the `openclaw/plugin-sdk/*`
|
||||
// public surface. Stats here come from `FileHandle.stat()` / `fs.promises.stat()`
|
||||
// with no `{ bigint: true }` option, so all fields are numbers.
|
||||
function sameFileIdentity(
|
||||
left: { dev: number; ino: number },
|
||||
right: { dev: number; ino: number },
|
||||
platform: NodeJS.Platform = process.platform,
|
||||
): boolean {
|
||||
if (left.ino !== right.ino) {
|
||||
return false;
|
||||
}
|
||||
if (left.dev === right.dev) {
|
||||
return true;
|
||||
}
|
||||
// On Windows, path-based stat can report `dev=0` while fd-based stat reports
|
||||
// a real volume serial. Treat either side `dev=0` as "unknown device"
|
||||
// rather than a mismatch so legitimate Windows fallback reads are not
|
||||
// rejected.
|
||||
return platform === "win32" && (left.dev === 0 || right.dev === 0);
|
||||
// Retained for older OpenShell tests; pinned reads now delegate to fs-safe.
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { movePathWithCopyFallback } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { normalizeLowercaseStringOrEmpty } from "openclaw/plugin-sdk/text-runtime";
|
||||
|
||||
export const DEFAULT_OPEN_SHELL_MIRROR_EXCLUDE_DIRS = ["hooks", "git-hooks", ".git"] as const;
|
||||
@@ -137,23 +138,4 @@ export async function stageDirectoryContents(params: {
|
||||
}
|
||||
}
|
||||
|
||||
export async function movePathWithCopyFallback(params: {
|
||||
from: string;
|
||||
to: string;
|
||||
}): Promise<void> {
|
||||
try {
|
||||
await fs.rename(params.from, params.to);
|
||||
return;
|
||||
} catch (error) {
|
||||
const code = (error as NodeJS.ErrnoException | null)?.code;
|
||||
if (code !== "EXDEV") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
await fs.cp(params.from, params.to, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
dereference: false,
|
||||
});
|
||||
await fs.rm(params.from, { recursive: true, force: true });
|
||||
}
|
||||
export { movePathWithCopyFallback };
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import nodeFs from "node:fs";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
@@ -201,22 +200,6 @@ afterEach(async () => {
|
||||
await Promise.all(tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true })));
|
||||
});
|
||||
|
||||
function cloneStatWithDev<T extends nodeFs.Stats | nodeFs.BigIntStats>(
|
||||
stat: T,
|
||||
dev: number | bigint,
|
||||
): T {
|
||||
return Object.defineProperty(
|
||||
Object.create(Object.getPrototypeOf(stat), Object.getOwnPropertyDescriptors(stat)),
|
||||
"dev",
|
||||
{
|
||||
value: dev,
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
writable: true,
|
||||
},
|
||||
) as T;
|
||||
}
|
||||
|
||||
function createMirrorBackendMock(): OpenShellSandboxBackend {
|
||||
return {
|
||||
id: "openshell",
|
||||
@@ -324,12 +307,11 @@ describe("openshell fs bridges", () => {
|
||||
expect(backend.syncLocalPathToRemote).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("rejects a parent symlink swap that lands outside the sandbox root", async () => {
|
||||
it("rejects a parent symlink that lands outside the sandbox root", async () => {
|
||||
const workspaceDir = await makeTempDir("openclaw-openshell-fs-");
|
||||
const outsideDir = await makeTempDir("openclaw-openshell-outside-");
|
||||
await fs.mkdir(path.join(workspaceDir, "subdir"), { recursive: true });
|
||||
await fs.writeFile(path.join(workspaceDir, "subdir", "secret.txt"), "inside", "utf8");
|
||||
await fs.writeFile(path.join(outsideDir, "secret.txt"), "outside", "utf8");
|
||||
await fs.symlink(outsideDir, path.join(workspaceDir, "subdir"));
|
||||
const backend = createMirrorBackendMock();
|
||||
const sandbox = createSandboxTestContext({
|
||||
overrides: {
|
||||
@@ -342,30 +324,13 @@ describe("openshell fs bridges", () => {
|
||||
|
||||
const { createOpenShellFsBridge } = await import("./fs-bridge.js");
|
||||
const bridge = createOpenShellFsBridge({ sandbox, backend });
|
||||
const originalOpen = fs.open.bind(fs);
|
||||
const targetPath = path.join(workspaceDir, "subdir", "secret.txt");
|
||||
let swapped = false;
|
||||
const openSpy = vi.spyOn(fs, "open").mockImplementation((async (...args: unknown[]) => {
|
||||
const filePath = args[0];
|
||||
if (!swapped && filePath === targetPath) {
|
||||
swapped = true;
|
||||
nodeFs.rmSync(path.join(workspaceDir, "subdir"), { recursive: true, force: true });
|
||||
nodeFs.symlinkSync(outsideDir, path.join(workspaceDir, "subdir"));
|
||||
}
|
||||
return await (originalOpen as (...delegated: unknown[]) => Promise<unknown>)(...args);
|
||||
}) as unknown as typeof fs.open);
|
||||
|
||||
try {
|
||||
await expect(bridge.readFile({ filePath: "subdir/secret.txt" })).rejects.toThrow(
|
||||
"Sandbox boundary checks failed",
|
||||
);
|
||||
expect(openSpy).toHaveBeenCalled();
|
||||
} finally {
|
||||
openSpy.mockRestore();
|
||||
}
|
||||
await expect(bridge.readFile({ filePath: "subdir/secret.txt" })).rejects.toThrow(
|
||||
"Sandbox boundary checks failed",
|
||||
);
|
||||
});
|
||||
|
||||
it("falls back to inode checks when fd path resolution is unavailable", async () => {
|
||||
it("reads regular files through the shared safe fs root", async () => {
|
||||
const workspaceDir = await makeTempDir("openclaw-openshell-fs-");
|
||||
await fs.mkdir(path.join(workspaceDir, "subdir"), { recursive: true });
|
||||
await fs.writeFile(path.join(workspaceDir, "subdir", "secret.txt"), "inside", "utf8");
|
||||
@@ -382,127 +347,17 @@ describe("openshell fs bridges", () => {
|
||||
|
||||
const { createOpenShellFsBridge } = await import("./fs-bridge.js");
|
||||
const bridge = createOpenShellFsBridge({ sandbox, backend });
|
||||
const readlinkSpy = vi
|
||||
.spyOn(fs, "readlink")
|
||||
.mockRejectedValue(new Error("fd path unavailable"));
|
||||
|
||||
try {
|
||||
await expect(bridge.readFile({ filePath: "subdir/secret.txt" })).resolves.toEqual(
|
||||
Buffer.from("inside"),
|
||||
);
|
||||
expect(readlinkSpy).toHaveBeenCalled();
|
||||
} finally {
|
||||
readlinkSpy.mockRestore();
|
||||
}
|
||||
await expect(bridge.readFile({ filePath: "subdir/secret.txt" })).resolves.toEqual(
|
||||
Buffer.from("inside"),
|
||||
);
|
||||
});
|
||||
|
||||
// The shared `sameFileIdentity` contract intentionally treats either-side
|
||||
// `dev=0` as "unknown device" on win32 (path-based stat can legitimately
|
||||
// report `dev=0` there) and only fails closed on other platforms. Skip the
|
||||
// Linux/macOS rejection expectation on Windows runners.
|
||||
it.skipIf(process.platform === "win32")(
|
||||
"rejects fallback reads when path stats report an unknown device id",
|
||||
async () => {
|
||||
const workspaceDir = await makeTempDir("openclaw-openshell-fs-");
|
||||
const targetPath = path.join(workspaceDir, "subdir", "secret.txt");
|
||||
await fs.mkdir(path.join(workspaceDir, "subdir"), { recursive: true });
|
||||
await fs.writeFile(targetPath, "inside", "utf8");
|
||||
|
||||
const backend = createMirrorBackendMock();
|
||||
const sandbox = createSandboxTestContext({
|
||||
overrides: {
|
||||
backendId: "openshell",
|
||||
workspaceDir,
|
||||
agentWorkspaceDir: workspaceDir,
|
||||
containerWorkdir: "/sandbox",
|
||||
},
|
||||
});
|
||||
|
||||
const { createOpenShellFsBridge } = await import("./fs-bridge.js");
|
||||
const bridge = createOpenShellFsBridge({ sandbox, backend });
|
||||
const readlinkSpy = vi
|
||||
.spyOn(fs, "readlink")
|
||||
.mockRejectedValue(new Error("fd path unavailable"));
|
||||
const originalStat = fs.stat.bind(fs);
|
||||
const statSpy = vi.spyOn(fs, "stat").mockImplementation(async (...args) => {
|
||||
const stat = await originalStat(...args);
|
||||
if (args[0] === targetPath) {
|
||||
return cloneStatWithDev(stat, 0);
|
||||
}
|
||||
return stat;
|
||||
});
|
||||
|
||||
try {
|
||||
await expect(bridge.readFile({ filePath: "subdir/secret.txt" })).rejects.toThrow(
|
||||
"Sandbox boundary checks failed",
|
||||
);
|
||||
expect(readlinkSpy).toHaveBeenCalled();
|
||||
expect(statSpy).toHaveBeenCalledWith(targetPath);
|
||||
} finally {
|
||||
statSpy.mockRestore();
|
||||
readlinkSpy.mockRestore();
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
it("rejects fallback reads when an ancestor directory is swapped to a symlink", async () => {
|
||||
const workspaceDir = await makeTempDir("openclaw-openshell-fs-");
|
||||
const outsideDir = await makeTempDir("openclaw-openshell-outside-");
|
||||
await fs.mkdir(path.join(workspaceDir, "subdir"), { recursive: true });
|
||||
await fs.writeFile(path.join(workspaceDir, "subdir", "secret.txt"), "inside", "utf8");
|
||||
await fs.writeFile(path.join(outsideDir, "secret.txt"), "outside", "utf8");
|
||||
|
||||
const backend = createMirrorBackendMock();
|
||||
const sandbox = createSandboxTestContext({
|
||||
overrides: {
|
||||
backendId: "openshell",
|
||||
workspaceDir,
|
||||
agentWorkspaceDir: workspaceDir,
|
||||
containerWorkdir: "/sandbox",
|
||||
},
|
||||
});
|
||||
|
||||
const { createOpenShellFsBridge } = await import("./fs-bridge.js");
|
||||
const bridge = createOpenShellFsBridge({ sandbox, backend });
|
||||
const originalOpen = fs.open.bind(fs);
|
||||
const targetPath = path.join(workspaceDir, "subdir", "secret.txt");
|
||||
let swapped = false;
|
||||
const openSpy = vi.spyOn(fs, "open").mockImplementation((async (...args: unknown[]) => {
|
||||
const filePath = args[0];
|
||||
if (!swapped && filePath === targetPath) {
|
||||
swapped = true;
|
||||
nodeFs.rmSync(path.join(workspaceDir, "subdir"), { recursive: true, force: true });
|
||||
nodeFs.symlinkSync(outsideDir, path.join(workspaceDir, "subdir"));
|
||||
}
|
||||
return await (originalOpen as (...delegated: unknown[]) => Promise<unknown>)(...args);
|
||||
}) as unknown as typeof fs.open);
|
||||
// Force the fallback verification path even on Linux so the ancestor-walk
|
||||
// guard is exercised directly.
|
||||
const readlinkSpy = vi
|
||||
.spyOn(fs, "readlink")
|
||||
.mockRejectedValue(new Error("fd path unavailable"));
|
||||
|
||||
try {
|
||||
await expect(bridge.readFile({ filePath: "subdir/secret.txt" })).rejects.toThrow(
|
||||
"Sandbox boundary checks failed",
|
||||
);
|
||||
expect(openSpy).toHaveBeenCalled();
|
||||
expect(readlinkSpy).toHaveBeenCalled();
|
||||
} finally {
|
||||
readlinkSpy.mockRestore();
|
||||
openSpy.mockRestore();
|
||||
}
|
||||
});
|
||||
|
||||
it("rejects fallback reads of a symlinked leaf when O_NOFOLLOW is unavailable", async () => {
|
||||
it("rejects reads of a symlinked leaf", async () => {
|
||||
const workspaceDir = await makeTempDir("openclaw-openshell-fs-");
|
||||
const outsideDir = await makeTempDir("openclaw-openshell-outside-");
|
||||
await fs.mkdir(path.join(workspaceDir, "subdir"), { recursive: true });
|
||||
await fs.writeFile(path.join(outsideDir, "secret.txt"), "outside", "utf8");
|
||||
// The workspace contains a symlink as the FINAL path component pointing
|
||||
// out-of-root. On Windows `O_NOFOLLOW` is `undefined`, so `open` would
|
||||
// silently traverse the symlink to the outside file; the ancestor walk
|
||||
// must lstat the leaf in that case to fail closed.
|
||||
await fs.symlink(
|
||||
path.join(outsideDir, "secret.txt"),
|
||||
path.join(workspaceDir, "subdir", "secret.txt"),
|
||||
@@ -518,30 +373,12 @@ describe("openshell fs bridges", () => {
|
||||
},
|
||||
});
|
||||
|
||||
const { createOpenShellFsBridge, setReadOpenFlagsResolverForTest } =
|
||||
await import("./fs-bridge.js");
|
||||
const { createOpenShellFsBridge } = await import("./fs-bridge.js");
|
||||
const bridge = createOpenShellFsBridge({ sandbox, backend });
|
||||
// Force the fallback path so the leaf-lstat guard is exercised.
|
||||
const readlinkSpy = vi
|
||||
.spyOn(fs, "readlink")
|
||||
.mockRejectedValue(new Error("fd path unavailable"));
|
||||
// Simulate a host that lacks `O_NOFOLLOW` (e.g. Windows) without touching
|
||||
// the non-configurable native `fs.constants` data property. The bridge
|
||||
// exposes a test-only seam for exactly this case.
|
||||
setReadOpenFlagsResolverForTest(() => ({
|
||||
flags: nodeFs.constants.O_RDONLY,
|
||||
supportsNoFollow: false,
|
||||
}));
|
||||
|
||||
try {
|
||||
await expect(bridge.readFile({ filePath: "subdir/secret.txt" })).rejects.toThrow(
|
||||
"Sandbox boundary checks failed",
|
||||
);
|
||||
expect(readlinkSpy).toHaveBeenCalled();
|
||||
} finally {
|
||||
setReadOpenFlagsResolverForTest(undefined);
|
||||
readlinkSpy.mockRestore();
|
||||
}
|
||||
await expect(bridge.readFile({ filePath: "subdir/secret.txt" })).rejects.toThrow(
|
||||
"Sandbox boundary checks failed",
|
||||
);
|
||||
});
|
||||
|
||||
it("rejects hardlinked files inside the sandbox root", async () => {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import type { OpenClawConfig } from "openclaw/plugin-sdk/config-types";
|
||||
import { replaceFileAtomic } from "openclaw/plugin-sdk/security-runtime";
|
||||
import {
|
||||
normalizeLowercaseStringOrEmpty,
|
||||
normalizeOptionalLowercaseString,
|
||||
@@ -150,7 +151,6 @@ async function readArmState(statePath: string): Promise<ArmStateFile | null> {
|
||||
}
|
||||
|
||||
async function writeArmState(statePath: string, state: ArmStateFile | null): Promise<void> {
|
||||
await fs.mkdir(path.dirname(statePath), { recursive: true });
|
||||
if (!state) {
|
||||
try {
|
||||
await fs.unlink(statePath);
|
||||
@@ -159,7 +159,11 @@ async function writeArmState(statePath: string, state: ArmStateFile | null): Pro
|
||||
}
|
||||
return;
|
||||
}
|
||||
await fs.writeFile(statePath, `${JSON.stringify(state, null, 2)}\n`, "utf8");
|
||||
await replaceFileAtomic({
|
||||
filePath: statePath,
|
||||
content: `${JSON.stringify(state, null, 2)}\n`,
|
||||
tempPrefix: ".phone-control-arm",
|
||||
});
|
||||
}
|
||||
|
||||
function normalizeDenyList(cfg: OpenClawPluginApi["config"]): string[] {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { assertNoSymlinkParents, pathScope } from "openclaw/plugin-sdk/security-runtime";
|
||||
|
||||
export function resolveRepoRelativeOutputDir(repoRoot: string, outputDir?: string) {
|
||||
if (!outputDir) {
|
||||
@@ -8,12 +9,11 @@ export function resolveRepoRelativeOutputDir(repoRoot: string, outputDir?: strin
|
||||
if (path.isAbsolute(outputDir)) {
|
||||
throw new Error("--output-dir must be a relative path inside the repo root.");
|
||||
}
|
||||
const resolved = path.resolve(repoRoot, outputDir);
|
||||
const relative = path.relative(repoRoot, resolved);
|
||||
if (relative.startsWith("..") || path.isAbsolute(relative)) {
|
||||
const resolved = pathScope(repoRoot, { label: "repo root" }).resolve(outputDir);
|
||||
if (!resolved.ok) {
|
||||
throw new Error("--output-dir must stay within the repo root.");
|
||||
}
|
||||
return resolved;
|
||||
return resolved.path;
|
||||
}
|
||||
|
||||
async function resolveNearestExistingPath(targetPath: string) {
|
||||
@@ -44,22 +44,18 @@ function assertRepoRelativePath(repoRoot: string, targetPath: string, label: str
|
||||
}
|
||||
|
||||
async function assertNoSymlinkSegments(repoRoot: string, targetPath: string, label: string) {
|
||||
const relative = assertRepoRelativePath(repoRoot, targetPath, label);
|
||||
let current = repoRoot;
|
||||
for (const segment of relative.split(path.sep).filter((entry) => entry.length > 0)) {
|
||||
current = path.join(current, segment);
|
||||
let stats: Awaited<ReturnType<typeof fs.lstat>> | null = null;
|
||||
try {
|
||||
stats = await fs.lstat(current);
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
break;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
if (stats.isSymbolicLink()) {
|
||||
throw new Error(`${label} must not traverse symlinks.`);
|
||||
assertRepoRelativePath(repoRoot, targetPath, label);
|
||||
try {
|
||||
await assertNoSymlinkParents({
|
||||
rootDir: repoRoot,
|
||||
targetPath,
|
||||
messagePrefix: label,
|
||||
});
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.message.includes("symlink")) {
|
||||
throw new Error(`${label} must not traverse symlinks.`, { cause: error });
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,40 +77,10 @@ export async function ensureRepoBoundDirectory(
|
||||
label: string,
|
||||
opts?: { mode?: number },
|
||||
) {
|
||||
const repoRootResolved = path.resolve(repoRoot);
|
||||
const targetResolved = path.resolve(targetDir);
|
||||
const relative = assertRepoRelativePath(repoRootResolved, targetResolved, label);
|
||||
const repoRootReal = await fs.realpath(repoRootResolved);
|
||||
let current = repoRootResolved;
|
||||
for (const segment of relative.split(path.sep).filter((entry) => entry.length > 0)) {
|
||||
current = path.join(current, segment);
|
||||
while (true) {
|
||||
try {
|
||||
const stats = await fs.lstat(current);
|
||||
if (stats.isSymbolicLink()) {
|
||||
throw new Error(`${label} must not traverse symlinks.`);
|
||||
}
|
||||
if (!stats.isDirectory()) {
|
||||
throw new Error(`${label} must point to a directory.`);
|
||||
}
|
||||
break;
|
||||
} catch (error) {
|
||||
const code = (error as NodeJS.ErrnoException).code;
|
||||
if (code !== "ENOENT") {
|
||||
throw error;
|
||||
}
|
||||
try {
|
||||
await fs.mkdir(current, { recursive: false, mode: opts?.mode });
|
||||
} catch (mkdirError) {
|
||||
if ((mkdirError as NodeJS.ErrnoException).code === "EEXIST") {
|
||||
continue;
|
||||
}
|
||||
throw mkdirError;
|
||||
}
|
||||
}
|
||||
}
|
||||
await assertNoSymlinkSegments(path.resolve(repoRoot), path.resolve(targetDir), label);
|
||||
const result = await pathScope(repoRoot, { label }).ensureDir(targetDir, { mode: opts?.mode });
|
||||
if (!result.ok) {
|
||||
throw new Error(`${label} must stay within the repo root.`);
|
||||
}
|
||||
const targetReal = await fs.realpath(targetResolved);
|
||||
assertRepoRelativePath(repoRootReal, targetReal, label);
|
||||
return targetResolved;
|
||||
return result.path;
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { pathToFileURL } from "node:url";
|
||||
import { formatErrorMessage } from "openclaw/plugin-sdk/error-runtime";
|
||||
import { pathExists } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { ensureRepoBoundDirectory, resolveRepoRelativeOutputDir } from "../cli-paths.js";
|
||||
|
||||
export type MantisDesktopBrowserSmokeOptions = {
|
||||
@@ -146,15 +147,6 @@ async function defaultCommandRunner(
|
||||
});
|
||||
}
|
||||
|
||||
async function pathExists(filePath: string) {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveCrabboxBin(params: {
|
||||
env: NodeJS.ProcessEnv;
|
||||
explicit?: string;
|
||||
|
||||
@@ -2,6 +2,7 @@ import { spawn, type SpawnOptions } from "node:child_process";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { formatErrorMessage } from "openclaw/plugin-sdk/error-runtime";
|
||||
import { pathExists } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { ensureRepoBoundDirectory, resolveRepoRelativeOutputDir } from "../cli-paths.js";
|
||||
import {
|
||||
acquireQaCredentialLease,
|
||||
@@ -255,15 +256,6 @@ async function defaultCommandRunner(
|
||||
});
|
||||
}
|
||||
|
||||
async function pathExists(filePath: string) {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function readRemoteMetadata(
|
||||
outputDir: string,
|
||||
): Promise<SlackDesktopRemoteMetadata | undefined> {
|
||||
@@ -289,7 +281,6 @@ async function readRemoteMetadata(
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
async function resolveCrabboxBin(params: {
|
||||
env: NodeJS.ProcessEnv;
|
||||
explicit?: string;
|
||||
|
||||
@@ -2,6 +2,7 @@ import { spawn, type SpawnOptions } from "node:child_process";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { formatErrorMessage } from "openclaw/plugin-sdk/error-runtime";
|
||||
import { pathExists } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { ensureRepoBoundDirectory, resolveRepoRelativeOutputDir } from "../cli-paths.js";
|
||||
|
||||
export type MantisVisualTaskVisionMode = "image-describe" | "metadata";
|
||||
@@ -211,15 +212,6 @@ async function defaultCommandRunner(
|
||||
});
|
||||
}
|
||||
|
||||
async function pathExists(filePath: string) {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function nonEmptyFileExists(filePath: string) {
|
||||
try {
|
||||
const stat = await fs.stat(filePath);
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { execFile } from "node:child_process";
|
||||
import { randomUUID } from "node:crypto";
|
||||
import fs from "node:fs";
|
||||
import { access, appendFile, mkdir, writeFile } from "node:fs/promises";
|
||||
import { access, mkdir, writeFile } from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { appendRegularFile } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
import type { QaProviderMode } from "./model-selection.js";
|
||||
import { resolveQaForwardedLiveEnv, resolveQaLiveProviderConfigPath } from "./providers/env.js";
|
||||
@@ -432,7 +433,7 @@ export function renderQaMultipassGuestScript(
|
||||
}
|
||||
|
||||
async function appendMultipassLog(logPath: string, message: string) {
|
||||
await appendFile(logPath, message, "utf8");
|
||||
await appendRegularFile({ filePath: logPath, content: message });
|
||||
}
|
||||
|
||||
async function runMultipassCommand(logPath: string, args: string[], options: ExecFileOptions = {}) {
|
||||
|
||||
@@ -1,20 +1,23 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import { resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
import {
|
||||
tempWorkspace,
|
||||
resolvePreferredOpenClawTmpDir,
|
||||
type TempWorkspace,
|
||||
} from "openclaw/plugin-sdk/temp-path";
|
||||
|
||||
export function createTempDirHarness() {
|
||||
const tempDirs: string[] = [];
|
||||
const tempDirs: TempWorkspace[] = [];
|
||||
|
||||
return {
|
||||
async cleanup() {
|
||||
await Promise.all(
|
||||
tempDirs.splice(0).map((dir) => fs.rm(dir, { recursive: true, force: true })),
|
||||
);
|
||||
await Promise.all(tempDirs.splice(0).map((dir) => dir.cleanup()));
|
||||
},
|
||||
async makeTempDir(prefix: string) {
|
||||
const dir = await fs.mkdtemp(path.join(resolvePreferredOpenClawTmpDir(), prefix));
|
||||
const dir = await tempWorkspace({
|
||||
rootDir: resolvePreferredOpenClawTmpDir(),
|
||||
prefix,
|
||||
});
|
||||
tempDirs.push(dir);
|
||||
return dir;
|
||||
return dir.dir;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { readFile, rename, writeFile } from "node:fs/promises";
|
||||
import { readFile } from "node:fs/promises";
|
||||
import { replaceFileAtomic } from "openclaw/plugin-sdk/security-runtime";
|
||||
|
||||
export function isMatrixQaPlainRecord(value: unknown): value is Record<string, unknown> {
|
||||
return Boolean(value && typeof value === "object" && !Array.isArray(value));
|
||||
@@ -19,9 +19,12 @@ async function readMatrixQaGatewayConfigFile(configPath: string) {
|
||||
}
|
||||
|
||||
async function writeMatrixQaGatewayConfigFile(configPath: string, config: unknown) {
|
||||
const tempPath = `${configPath}.${randomUUID()}.tmp`;
|
||||
await writeFile(tempPath, `${JSON.stringify(config, null, 2)}\n`, { mode: 0o600 });
|
||||
await rename(tempPath, configPath);
|
||||
await replaceFileAtomic({
|
||||
filePath: configPath,
|
||||
content: `${JSON.stringify(config, null, 2)}\n`,
|
||||
mode: 0o600,
|
||||
tempPrefix: ".matrix-qa-config",
|
||||
});
|
||||
}
|
||||
|
||||
export async function readMatrixQaGatewayMatrixAccount(params: {
|
||||
|
||||
@@ -3,6 +3,7 @@ import * as fs from "node:fs";
|
||||
import * as os from "node:os";
|
||||
import * as path from "node:path";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { normalizeSource } from "../messaging/media-source.js";
|
||||
import {
|
||||
ApiError,
|
||||
MediaFileType,
|
||||
@@ -333,4 +334,51 @@ describe("media-chunked: ChunkedMediaApi.uploadChunked", () => {
|
||||
await fs.promises.rm(tmp, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("uses the verified localPath handle if the path is replaced before chunked upload", async () => {
|
||||
const tmp = await fs.promises.mkdtemp(path.join(os.tmpdir(), "chunked-verified-"));
|
||||
const filePath = path.join(tmp, "fixture.bin");
|
||||
await fs.promises.writeFile(filePath, FIXTURE_BUFFER);
|
||||
const source = await normalizeSource({ localPath: filePath }, { maxSize: 1_000_000 });
|
||||
await fs.promises.rm(filePath);
|
||||
await fs.promises.writeFile(filePath, Buffer.from("replacement bytes"));
|
||||
try {
|
||||
const client = mockApiClient();
|
||||
const tm = mockTokenManager();
|
||||
stubFetchOk();
|
||||
|
||||
client.request.mockImplementation(async (_t, _m, p) => {
|
||||
if (p.endsWith("/upload_prepare")) {
|
||||
return makePrepareResponse("uid-verified", 3);
|
||||
}
|
||||
if (p.endsWith("/upload_part_finish")) {
|
||||
return {};
|
||||
}
|
||||
if (p.endsWith("/files")) {
|
||||
return { file_uuid: "u", file_info: "fi", ttl: 10 } satisfies UploadMediaResponse;
|
||||
}
|
||||
throw new Error(`unexpected ${p}`);
|
||||
});
|
||||
|
||||
const api = new ChunkedMediaApi(client, tm);
|
||||
await api.uploadChunked({
|
||||
scope: "c2c",
|
||||
targetId: "u1",
|
||||
fileType: MediaFileType.VIDEO,
|
||||
source,
|
||||
creds: { appId: "a", clientSecret: "s" },
|
||||
});
|
||||
|
||||
const prepareCall = client.request.mock.calls.find((c) =>
|
||||
String(c[2]).endsWith("/upload_prepare"),
|
||||
)!;
|
||||
const prepareBody = prepareCall[3] as { md5: string };
|
||||
expect(prepareBody.md5).toBe(crypto.createHash("md5").update(FIXTURE_BUFFER).digest("hex"));
|
||||
} finally {
|
||||
if (source.kind === "localPath") {
|
||||
await source.opened?.close().catch(() => undefined);
|
||||
}
|
||||
await fs.promises.rm(tmp, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -35,8 +35,9 @@
|
||||
*/
|
||||
|
||||
import * as crypto from "node:crypto";
|
||||
import * as fs from "node:fs";
|
||||
import type { MediaSource } from "../messaging/media-source.js";
|
||||
import type { FileHandle } from "node:fs/promises";
|
||||
import type { MediaSource, OpenedLocalFile } from "../messaging/media-source.js";
|
||||
import { openLocalFile } from "../messaging/media-source.js";
|
||||
import {
|
||||
ApiError,
|
||||
MediaFileType,
|
||||
@@ -178,138 +179,137 @@ export class ChunkedMediaApi {
|
||||
async uploadChunked(opts: UploadChunkedOptions): Promise<UploadMediaResponse> {
|
||||
const prefix = opts.logPrefix ?? "[qqbot:chunked-upload]";
|
||||
|
||||
// 1. Resolve input: size + local path (or temp buffer handle).
|
||||
const input = resolveSource(opts.source, opts.fileName);
|
||||
|
||||
const displayName = input.fileName;
|
||||
const fileSize = input.size;
|
||||
const pathLabel = input.kind === "localPath" ? input.path : "<buffer>";
|
||||
|
||||
this.logger?.info?.(
|
||||
`${prefix} Start: file=${displayName} size=${formatFileSize(fileSize)} type=${opts.fileType}`,
|
||||
);
|
||||
|
||||
// 2. Compute md5 / sha1 / md5_10m. Identical for buffer and localPath,
|
||||
// but the localPath path streams so it never has to materialize the
|
||||
// whole file twice.
|
||||
const hashes = await computeHashes(input);
|
||||
this.logger?.debug?.(
|
||||
`${prefix} hashes: md5=${hashes.md5} sha1=${hashes.sha1} md5_10m=${hashes.md5_10m}`,
|
||||
);
|
||||
|
||||
// 3. Upload-cache fast path: the md5 hash is already a strong content
|
||||
// identifier, so we can short-circuit before even calling upload_prepare.
|
||||
if (this.cache) {
|
||||
const cached = this.cache.get(hashes.md5, opts.scope, opts.targetId, opts.fileType);
|
||||
if (cached) {
|
||||
this.logger?.info?.(
|
||||
`${prefix} cache HIT (md5=${hashes.md5.slice(0, 8)}) — skipping chunked upload`,
|
||||
);
|
||||
return { file_uuid: "", file_info: cached, ttl: 0 };
|
||||
}
|
||||
}
|
||||
|
||||
// 4. upload_prepare.
|
||||
const fileNameForPrepare =
|
||||
opts.fileType === MediaFileType.FILE ? this.sanitize(displayName) : displayName;
|
||||
const prepareResp = await this.callUploadPrepare(
|
||||
opts,
|
||||
fileNameForPrepare,
|
||||
fileSize,
|
||||
hashes,
|
||||
pathLabel,
|
||||
);
|
||||
|
||||
const { upload_id, parts } = prepareResp;
|
||||
const block_size = prepareResp.block_size;
|
||||
const maxConcurrent = Math.min(
|
||||
prepareResp.concurrency ? prepareResp.concurrency : DEFAULT_CONCURRENT_PARTS,
|
||||
MAX_CONCURRENT_PARTS,
|
||||
);
|
||||
const retryTimeoutMs = prepareResp.retry_timeout
|
||||
? Math.min(prepareResp.retry_timeout * 1000, MAX_PART_FINISH_RETRY_TIMEOUT_MS)
|
||||
: undefined;
|
||||
|
||||
this.logger?.info?.(
|
||||
`${prefix} prepared: upload_id=${upload_id} block=${formatFileSize(block_size)} parts=${parts.length} concurrency=${maxConcurrent}`,
|
||||
);
|
||||
|
||||
// 5. Upload every part. Concurrency is per-upload, not global.
|
||||
let completedParts = 0;
|
||||
let uploadedBytes = 0;
|
||||
|
||||
const uploadPart = async (part: UploadPart): Promise<void> => {
|
||||
const partIndex = part.index; // 1-based.
|
||||
const offset = (partIndex - 1) * block_size;
|
||||
const length = Math.min(block_size, fileSize - offset);
|
||||
|
||||
const partBuffer = await readPart(input, offset, length);
|
||||
const md5Hex = crypto.createHash("md5").update(partBuffer).digest("hex");
|
||||
|
||||
this.logger?.debug?.(
|
||||
`${prefix} part ${partIndex}/${parts.length}: ${formatFileSize(length)} offset=${offset} md5=${md5Hex}`,
|
||||
);
|
||||
|
||||
// 5a. PUT to pre-signed COS URL.
|
||||
await putToPresignedUrl(
|
||||
part.presigned_url,
|
||||
partBuffer,
|
||||
partIndex,
|
||||
parts.length,
|
||||
this.logger,
|
||||
prefix,
|
||||
);
|
||||
|
||||
// 5b. upload_part_finish — fetch a fresh token each time to defend
|
||||
// against long uploads exceeding the token TTL.
|
||||
await this.callUploadPartFinish(opts, upload_id, partIndex, length, md5Hex, retryTimeoutMs);
|
||||
|
||||
completedParts++;
|
||||
uploadedBytes += length;
|
||||
this.logger?.info?.(
|
||||
`${prefix} part ${partIndex}/${parts.length} done (${completedParts}/${parts.length})`,
|
||||
);
|
||||
|
||||
opts.onProgress?.({
|
||||
completedParts,
|
||||
totalParts: parts.length,
|
||||
uploadedBytes,
|
||||
totalBytes: fileSize,
|
||||
});
|
||||
};
|
||||
// 1. Resolve input: size + verified local file descriptor (or buffer).
|
||||
const input = await resolveSource(opts.source, opts.fileName);
|
||||
|
||||
try {
|
||||
const displayName = input.fileName;
|
||||
const fileSize = input.size;
|
||||
const pathLabel = input.kind === "localPath" ? input.path : "<buffer>";
|
||||
|
||||
this.logger?.info?.(
|
||||
`${prefix} Start: file=${displayName} size=${formatFileSize(fileSize)} type=${opts.fileType}`,
|
||||
);
|
||||
|
||||
// 2. Compute md5 / sha1 / md5_10m. Identical for buffer and localPath,
|
||||
// but the localPath descriptor streams so it never has to materialize the
|
||||
// whole file twice or reopen a path after validation.
|
||||
const hashes = await computeHashes(input);
|
||||
this.logger?.debug?.(
|
||||
`${prefix} hashes: md5=${hashes.md5} sha1=${hashes.sha1} md5_10m=${hashes.md5_10m}`,
|
||||
);
|
||||
|
||||
// 3. Upload-cache fast path: the md5 hash is already a strong content
|
||||
// identifier, so we can short-circuit before even calling upload_prepare.
|
||||
if (this.cache) {
|
||||
const cached = this.cache.get(hashes.md5, opts.scope, opts.targetId, opts.fileType);
|
||||
if (cached) {
|
||||
this.logger?.info?.(
|
||||
`${prefix} cache HIT (md5=${hashes.md5.slice(0, 8)}) — skipping chunked upload`,
|
||||
);
|
||||
return { file_uuid: "", file_info: cached, ttl: 0 };
|
||||
}
|
||||
}
|
||||
|
||||
// 4. upload_prepare.
|
||||
const fileNameForPrepare =
|
||||
opts.fileType === MediaFileType.FILE ? this.sanitize(displayName) : displayName;
|
||||
const prepareResp = await this.callUploadPrepare(
|
||||
opts,
|
||||
fileNameForPrepare,
|
||||
fileSize,
|
||||
hashes,
|
||||
pathLabel,
|
||||
);
|
||||
|
||||
const { upload_id, parts } = prepareResp;
|
||||
const block_size = prepareResp.block_size;
|
||||
const maxConcurrent = Math.min(
|
||||
prepareResp.concurrency ? prepareResp.concurrency : DEFAULT_CONCURRENT_PARTS,
|
||||
MAX_CONCURRENT_PARTS,
|
||||
);
|
||||
const retryTimeoutMs = prepareResp.retry_timeout
|
||||
? Math.min(prepareResp.retry_timeout * 1000, MAX_PART_FINISH_RETRY_TIMEOUT_MS)
|
||||
: undefined;
|
||||
|
||||
this.logger?.info?.(
|
||||
`${prefix} prepared: upload_id=${upload_id} block=${formatFileSize(block_size)} parts=${parts.length} concurrency=${maxConcurrent}`,
|
||||
);
|
||||
|
||||
// 5. Upload every part. Concurrency is per-upload, not global.
|
||||
let completedParts = 0;
|
||||
let uploadedBytes = 0;
|
||||
|
||||
const uploadPart = async (part: UploadPart): Promise<void> => {
|
||||
const partIndex = part.index; // 1-based.
|
||||
const offset = (partIndex - 1) * block_size;
|
||||
const length = Math.min(block_size, fileSize - offset);
|
||||
|
||||
const partBuffer = await readPart(input, offset, length);
|
||||
const md5Hex = crypto.createHash("md5").update(partBuffer).digest("hex");
|
||||
|
||||
this.logger?.debug?.(
|
||||
`${prefix} part ${partIndex}/${parts.length}: ${formatFileSize(length)} offset=${offset} md5=${md5Hex}`,
|
||||
);
|
||||
|
||||
// 5a. PUT to pre-signed COS URL.
|
||||
await putToPresignedUrl(
|
||||
part.presigned_url,
|
||||
partBuffer,
|
||||
partIndex,
|
||||
parts.length,
|
||||
this.logger,
|
||||
prefix,
|
||||
);
|
||||
|
||||
// 5b. upload_part_finish — fetch a fresh token each time to defend
|
||||
// against long uploads exceeding the token TTL.
|
||||
await this.callUploadPartFinish(opts, upload_id, partIndex, length, md5Hex, retryTimeoutMs);
|
||||
|
||||
completedParts++;
|
||||
uploadedBytes += length;
|
||||
this.logger?.info?.(
|
||||
`${prefix} part ${partIndex}/${parts.length} done (${completedParts}/${parts.length})`,
|
||||
);
|
||||
|
||||
opts.onProgress?.({
|
||||
completedParts,
|
||||
totalParts: parts.length,
|
||||
uploadedBytes,
|
||||
totalBytes: fileSize,
|
||||
});
|
||||
};
|
||||
|
||||
await runWithConcurrency(
|
||||
parts.map((part) => () => uploadPart(part)),
|
||||
maxConcurrent,
|
||||
);
|
||||
|
||||
this.logger?.info?.(`${prefix} all parts uploaded, completing...`);
|
||||
|
||||
// 6. complete_upload.
|
||||
const result = await this.callCompleteUpload(opts, upload_id);
|
||||
this.logger?.info?.(`${prefix} completed: file_uuid=${result.file_uuid} ttl=${result.ttl}s`);
|
||||
|
||||
// 7. Populate the shared upload cache so subsequent sends skip re-uploading.
|
||||
if (this.cache && result.file_info && result.ttl > 0) {
|
||||
this.cache.set(
|
||||
hashes.md5,
|
||||
opts.scope,
|
||||
opts.targetId,
|
||||
opts.fileType,
|
||||
result.file_info,
|
||||
result.file_uuid,
|
||||
result.ttl,
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
// If the input opened a buffered read stream we don't keep state,
|
||||
// but localPath readers open / close the file per-part so there
|
||||
// is nothing to unwind here. Kept as a seam for future streaming
|
||||
// optimizations.
|
||||
if (input.kind === "localPath" && input.closeWhenDone) {
|
||||
await input.opened.close().catch(() => undefined);
|
||||
}
|
||||
}
|
||||
|
||||
this.logger?.info?.(`${prefix} all parts uploaded, completing...`);
|
||||
|
||||
// 6. complete_upload.
|
||||
const result = await this.callCompleteUpload(opts, upload_id);
|
||||
this.logger?.info?.(`${prefix} completed: file_uuid=${result.file_uuid} ttl=${result.ttl}s`);
|
||||
|
||||
// 7. Populate the shared upload cache so subsequent sends skip re-uploading.
|
||||
if (this.cache && result.file_info && result.ttl > 0) {
|
||||
this.cache.set(
|
||||
hashes.md5,
|
||||
opts.scope,
|
||||
opts.targetId,
|
||||
opts.fileType,
|
||||
result.file_info,
|
||||
result.file_uuid,
|
||||
result.ttl,
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// -------- Internal call wrappers --------
|
||||
@@ -429,17 +429,31 @@ export function isChunkedUploadImplemented(): boolean {
|
||||
* the bytes plus the metadata required by `upload_prepare`.
|
||||
*/
|
||||
type ChunkedInput =
|
||||
| { kind: "localPath"; path: string; size: number; fileName: string }
|
||||
| {
|
||||
kind: "localPath";
|
||||
path: string;
|
||||
size: number;
|
||||
fileName: string;
|
||||
opened: OpenedLocalFile;
|
||||
closeWhenDone: boolean;
|
||||
}
|
||||
| { kind: "buffer"; buffer: Buffer; size: number; fileName: string };
|
||||
|
||||
function resolveSource(source: MediaSource, fileNameOverride?: string): ChunkedInput {
|
||||
async function resolveSource(
|
||||
source: MediaSource,
|
||||
fileNameOverride?: string,
|
||||
): Promise<ChunkedInput> {
|
||||
if (source.kind === "localPath") {
|
||||
const inferredName = source.path.split(/[/\\]/).pop() || "file";
|
||||
const opened =
|
||||
source.opened ?? (await openLocalFile(source.path, { maxSize: Number.MAX_SAFE_INTEGER }));
|
||||
return {
|
||||
kind: "localPath",
|
||||
path: source.path,
|
||||
size: source.size,
|
||||
size: opened.size,
|
||||
fileName: fileNameOverride ?? inferredName,
|
||||
opened,
|
||||
closeWhenDone: source.opened === undefined,
|
||||
};
|
||||
}
|
||||
if (source.kind === "buffer") {
|
||||
@@ -460,14 +474,9 @@ async function readPart(input: ChunkedInput, offset: number, length: number): Pr
|
||||
if (input.kind === "buffer") {
|
||||
return input.buffer.subarray(offset, offset + length);
|
||||
}
|
||||
const handle = await fs.promises.open(input.path, "r");
|
||||
try {
|
||||
const buf = Buffer.alloc(length);
|
||||
const { bytesRead } = await handle.read(buf, 0, length, offset);
|
||||
return bytesRead < length ? buf.subarray(0, bytesRead) : buf;
|
||||
} finally {
|
||||
await handle.close();
|
||||
}
|
||||
const buf = Buffer.alloc(length);
|
||||
const { bytesRead } = await input.opened.handle.read(buf, 0, length, offset);
|
||||
return bytesRead < length ? buf.subarray(0, bytesRead) : buf;
|
||||
}
|
||||
|
||||
// ============ Hash computation ============
|
||||
@@ -476,8 +485,8 @@ async function readPart(input: ChunkedInput, offset: number, length: number): Pr
|
||||
* Stream the source once to compute md5 + sha1 + md5_10m.
|
||||
*
|
||||
* For buffer inputs the three hashes are computed in a single pass over
|
||||
* the existing memory. For localPath inputs a ReadStream drives the
|
||||
* hashers so memory use stays constant.
|
||||
* the existing memory. For localPath inputs the verified descriptor drives
|
||||
* the hashers so memory use stays constant.
|
||||
*/
|
||||
async function computeHashes(input: ChunkedInput): Promise<UploadPrepareHashes> {
|
||||
if (input.kind === "buffer") {
|
||||
@@ -497,7 +506,7 @@ async function computeHashes(input: ChunkedInput): Promise<UploadPrepareHashes>
|
||||
let consumed = 0;
|
||||
const needsMd5_10m = input.size > MD5_10M_SIZE;
|
||||
|
||||
const stream = fs.createReadStream(input.path);
|
||||
const stream = createReadStreamFromHandle(input.opened.handle);
|
||||
stream.on("data", (chunk: Buffer | string) => {
|
||||
const buf = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
|
||||
md5.update(buf);
|
||||
@@ -523,6 +532,10 @@ async function computeHashes(input: ChunkedInput): Promise<UploadPrepareHashes>
|
||||
});
|
||||
}
|
||||
|
||||
function createReadStreamFromHandle(handle: FileHandle): NodeJS.ReadableStream {
|
||||
return handle.createReadStream({ autoClose: false, start: 0 });
|
||||
}
|
||||
|
||||
// ============ COS PUT ============
|
||||
|
||||
/** Per-part retry budget for the COS PUT call (exponential backoff). */
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
*/
|
||||
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { replaceFileAtomicSync } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { getCredentialBackupFile, getLegacyCredentialBackupFile } from "../utils/data-paths.js";
|
||||
|
||||
interface CredentialBackup {
|
||||
@@ -43,16 +43,17 @@ export function saveCredentialBackup(accountId: string, appId: string, clientSec
|
||||
}
|
||||
try {
|
||||
const backupPath = getCredentialBackupFile(accountId);
|
||||
fs.mkdirSync(path.dirname(backupPath), { recursive: true });
|
||||
const data: CredentialBackup = {
|
||||
accountId,
|
||||
appId,
|
||||
clientSecret,
|
||||
savedAt: new Date().toISOString(),
|
||||
};
|
||||
const tmpPath = `${backupPath}.tmp`;
|
||||
fs.writeFileSync(tmpPath, `${JSON.stringify(data, null, 2)}\n`, "utf8");
|
||||
fs.renameSync(tmpPath, backupPath);
|
||||
replaceFileAtomicSync({
|
||||
filePath: backupPath,
|
||||
content: `${JSON.stringify(data, null, 2)}\n`,
|
||||
tempPrefix: ".qqbot-credential-backup",
|
||||
});
|
||||
} catch {
|
||||
/* best-effort — ignore */
|
||||
}
|
||||
@@ -89,10 +90,11 @@ export function loadCredentialBackup(accountId?: string): CredentialBackup | nul
|
||||
if (data.accountId) {
|
||||
try {
|
||||
const backupPath = getCredentialBackupFile(data.accountId);
|
||||
fs.mkdirSync(path.dirname(backupPath), { recursive: true });
|
||||
const tmpPath = `${backupPath}.tmp`;
|
||||
fs.writeFileSync(tmpPath, `${JSON.stringify(data, null, 2)}\n`, "utf8");
|
||||
fs.renameSync(tmpPath, backupPath);
|
||||
replaceFileAtomicSync({
|
||||
filePath: backupPath,
|
||||
content: `${JSON.stringify(data, null, 2)}\n`,
|
||||
tempPrefix: ".qqbot-credential-backup",
|
||||
});
|
||||
fs.unlinkSync(legacy);
|
||||
} catch {
|
||||
/* ignore migration errors */
|
||||
|
||||
@@ -9,9 +9,8 @@
|
||||
*
|
||||
* - `url` — remote http(s) URL that the QQ server can fetch directly.
|
||||
* - `base64` — in-memory base64 string (typically from a `data:` URL).
|
||||
* - `localPath` — on-disk file; kept as a path so a future chunked-upload
|
||||
* implementation can stream it via `fs.createReadStream` without the 4/3×
|
||||
* base64 memory overhead.
|
||||
* - `localPath` — on-disk file; kept as a path plus an optional verified
|
||||
* descriptor so uploaders can avoid reopening a path after validation.
|
||||
* - `buffer` — in-memory raw bytes (e.g. TTS output, downloaded url-fallback).
|
||||
*
|
||||
* ## Security baseline (localPath branch)
|
||||
@@ -29,7 +28,8 @@
|
||||
* reading the whole file first.
|
||||
*/
|
||||
|
||||
import * as fs from "node:fs";
|
||||
import type { FileHandle } from "node:fs/promises";
|
||||
import { FsSafeError, openLocalFileSafely } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { MAX_UPLOAD_SIZE, formatFileSize, getMimeType } from "../utils/file-utils.js";
|
||||
|
||||
// ============ Types ============
|
||||
@@ -39,14 +39,14 @@ import { MAX_UPLOAD_SIZE, formatFileSize, getMimeType } from "../utils/file-util
|
||||
*
|
||||
* - `url`: remote URL — upload via `file_data=null; url=...`.
|
||||
* - `base64`: already-encoded base64 — upload via `file_data=...`.
|
||||
* - `localPath`: on-disk file — one-shot path reads it into a buffer;
|
||||
* chunked path (future) streams it via `fs.createReadStream`.
|
||||
* - `localPath`: on-disk file — uploaders should prefer `opened` when present
|
||||
* and only reopen `path` for direct, already-normalized test/helper calls.
|
||||
* - `buffer`: raw bytes in memory — same as above minus disk I/O.
|
||||
*/
|
||||
export type MediaSource =
|
||||
| { kind: "url"; url: string }
|
||||
| { kind: "base64"; data: string; mime?: string }
|
||||
| { kind: "localPath"; path: string; size: number; mime?: string }
|
||||
| { kind: "localPath"; path: string; size: number; mime?: string; opened?: OpenedLocalFile }
|
||||
| { kind: "buffer"; buffer: Buffer; fileName?: string; mime?: string };
|
||||
|
||||
/**
|
||||
@@ -92,8 +92,8 @@ function tryParseDataUrl(value: string): { mime: string; data: string } | null {
|
||||
*
|
||||
* Callers MUST call {@link OpenedLocalFile.close} (typically in a `finally`).
|
||||
*/
|
||||
interface OpenedLocalFile {
|
||||
handle: fs.promises.FileHandle;
|
||||
export interface OpenedLocalFile {
|
||||
handle: FileHandle;
|
||||
size: number;
|
||||
close(): Promise<void>;
|
||||
}
|
||||
@@ -120,27 +120,26 @@ export async function openLocalFile(
|
||||
opts: { maxSize?: number } = {},
|
||||
): Promise<OpenedLocalFile> {
|
||||
const maxSize = opts.maxSize ?? MAX_UPLOAD_SIZE;
|
||||
const openFlags =
|
||||
fs.constants.O_RDONLY | ("O_NOFOLLOW" in fs.constants ? fs.constants.O_NOFOLLOW : 0);
|
||||
const handle = await fs.promises.open(filePath, openFlags);
|
||||
try {
|
||||
const stat = await handle.stat();
|
||||
if (!stat.isFile()) {
|
||||
throw new Error("Path is not a regular file");
|
||||
const opened = await openLocalFileSafely({ filePath }).catch((err: unknown) => {
|
||||
if (err instanceof FsSafeError && err.code === "not-file") {
|
||||
throw new Error("Path is not a regular file", { cause: err });
|
||||
}
|
||||
if (stat.size > maxSize) {
|
||||
throw err;
|
||||
});
|
||||
try {
|
||||
if (opened.stat.size > maxSize) {
|
||||
throw new Error(
|
||||
`File is too large (${formatFileSize(stat.size)}); QQ Bot API limit is ${formatFileSize(maxSize)}`,
|
||||
`File is too large (${formatFileSize(opened.stat.size)}); QQ Bot API limit is ${formatFileSize(maxSize)}`,
|
||||
);
|
||||
}
|
||||
return {
|
||||
handle,
|
||||
size: stat.size,
|
||||
close: () => handle.close(),
|
||||
handle: opened.handle,
|
||||
size: opened.stat.size,
|
||||
close: () => opened.handle.close(),
|
||||
};
|
||||
} catch (err) {
|
||||
// Close the handle on any validation failure to avoid fd leaks.
|
||||
await handle.close().catch(() => undefined);
|
||||
await opened.handle.close().catch(() => undefined);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
@@ -153,10 +152,9 @@ export async function openLocalFile(
|
||||
* - Strings passed via `{ url }` that start with `data:` are auto-resolved
|
||||
* to a `base64` branch (this is the unified `data:` URL support that was
|
||||
* previously only implemented in `sendImage`).
|
||||
* - `localPath` branches open the file with {@link openLocalFile} solely to
|
||||
* validate size / regular-file / O_NOFOLLOW invariants. The handle is
|
||||
* closed immediately — actual reading is deferred to the uploader so
|
||||
* the chunked path can stream without double-reading.
|
||||
* - `localPath` branches open the file with {@link openLocalFile} and carry
|
||||
* that descriptor to the uploader, so later reads use the exact file that
|
||||
* passed regular-file / O_NOFOLLOW / size validation.
|
||||
* - `buffer` branches enforce the same ceiling inline.
|
||||
*
|
||||
* `maxSize` defaults to {@link MAX_UPLOAD_SIZE} (20MB, one-shot upload limit).
|
||||
@@ -188,16 +186,13 @@ export async function normalizeSource(
|
||||
|
||||
if ("localPath" in raw) {
|
||||
const opened = await openLocalFile(raw.localPath, { maxSize });
|
||||
try {
|
||||
return {
|
||||
kind: "localPath",
|
||||
path: raw.localPath,
|
||||
size: opened.size,
|
||||
mime: getMimeType(raw.localPath),
|
||||
};
|
||||
} finally {
|
||||
await opened.close();
|
||||
}
|
||||
return {
|
||||
kind: "localPath",
|
||||
path: raw.localPath,
|
||||
size: opened.size,
|
||||
mime: getMimeType(raw.localPath),
|
||||
opened,
|
||||
};
|
||||
}
|
||||
|
||||
// buffer branch
|
||||
|
||||
@@ -2,8 +2,11 @@
|
||||
* Low-level outbound media sends (photo, voice, video, document) and path resolution.
|
||||
*/
|
||||
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import {
|
||||
pathExistsSync,
|
||||
resolveLocalPathFromRootsSync,
|
||||
} from "openclaw/plugin-sdk/security-runtime";
|
||||
import type { GatewayAccount } from "../types.js";
|
||||
import { MediaFileType } from "../types.js";
|
||||
import {
|
||||
@@ -98,79 +101,32 @@ function isHttpOrDataSource(pathValue: string): boolean {
|
||||
);
|
||||
}
|
||||
|
||||
function isPathWithinRoot(candidate: string, root: string): boolean {
|
||||
const relative = path.relative(root, candidate);
|
||||
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
|
||||
}
|
||||
|
||||
function resolveMissingPathWithinMediaRoot(normalizedPath: string): string | null {
|
||||
const resolvedCandidate = path.resolve(normalizedPath);
|
||||
if (fs.existsSync(resolvedCandidate)) {
|
||||
if (pathExistsSync(resolvedCandidate)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const allowedRoot = path.resolve(getQQBotMediaDir());
|
||||
let canonicalAllowedRoot: string;
|
||||
try {
|
||||
canonicalAllowedRoot = fs.realpathSync(allowedRoot);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
||||
const missingSegments: string[] = [];
|
||||
let cursor = resolvedCandidate;
|
||||
while (!fs.existsSync(cursor)) {
|
||||
const parent = path.dirname(cursor);
|
||||
if (parent === cursor) {
|
||||
break;
|
||||
}
|
||||
missingSegments.unshift(path.basename(cursor));
|
||||
cursor = parent;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(cursor)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let canonicalCursor: string;
|
||||
try {
|
||||
canonicalCursor = fs.realpathSync(cursor);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
const canonicalCandidate =
|
||||
missingSegments.length > 0 ? path.join(canonicalCursor, ...missingSegments) : canonicalCursor;
|
||||
|
||||
return isPathWithinRoot(canonicalCandidate, canonicalAllowedRoot) ? canonicalCandidate : null;
|
||||
return (
|
||||
resolveLocalPathFromRootsSync({
|
||||
filePath: resolvedCandidate,
|
||||
roots: [getQQBotMediaDir()],
|
||||
label: "QQ Bot media storage",
|
||||
allowMissing: true,
|
||||
})?.path ?? null
|
||||
);
|
||||
}
|
||||
|
||||
function resolveExistingPathWithinRoots(
|
||||
normalizedPath: string,
|
||||
allowedRoots: readonly string[],
|
||||
): string | null {
|
||||
const resolvedCandidate = path.resolve(normalizedPath);
|
||||
if (!fs.existsSync(resolvedCandidate)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let canonicalCandidate: string;
|
||||
try {
|
||||
canonicalCandidate = fs.realpathSync(resolvedCandidate);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
||||
for (const root of allowedRoots) {
|
||||
const resolvedRoot = path.resolve(root);
|
||||
const canonicalRoot = fs.existsSync(resolvedRoot)
|
||||
? fs.realpathSync(resolvedRoot)
|
||||
: resolvedRoot;
|
||||
if (isPathWithinRoot(canonicalCandidate, canonicalRoot)) {
|
||||
return canonicalCandidate;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
return (
|
||||
resolveLocalPathFromRootsSync({
|
||||
filePath: normalizedPath,
|
||||
roots: allowedRoots,
|
||||
label: "QQ Bot local roots",
|
||||
})?.path ?? null
|
||||
);
|
||||
}
|
||||
|
||||
export function resolveOutboundMediaPath(
|
||||
|
||||
@@ -596,33 +596,39 @@ async function sendMediaInternal(
|
||||
maxSize: Number.MAX_SAFE_INTEGER,
|
||||
});
|
||||
|
||||
const uploadResult = await dispatchUpload(
|
||||
ctx,
|
||||
scope,
|
||||
opts.target.id,
|
||||
KIND_TO_FILE_TYPE[opts.kind],
|
||||
source,
|
||||
c,
|
||||
opts.fileName,
|
||||
);
|
||||
try {
|
||||
const uploadResult = await dispatchUpload(
|
||||
ctx,
|
||||
scope,
|
||||
opts.target.id,
|
||||
KIND_TO_FILE_TYPE[opts.kind],
|
||||
source,
|
||||
c,
|
||||
opts.fileName,
|
||||
);
|
||||
|
||||
// Content is semantically meaningful only for image / video — the voice
|
||||
// and file APIs ignore it.
|
||||
const msgContent = opts.kind === "image" || opts.kind === "video" ? opts.content : undefined;
|
||||
// Content is semantically meaningful only for image / video — the voice
|
||||
// and file APIs ignore it.
|
||||
const msgContent = opts.kind === "image" || opts.kind === "video" ? opts.content : undefined;
|
||||
|
||||
const result = await ctx.mediaApi.sendMediaMessage(
|
||||
scope,
|
||||
opts.target.id,
|
||||
uploadResult.file_info,
|
||||
c,
|
||||
{
|
||||
msgId: opts.msgId,
|
||||
content: msgContent,
|
||||
},
|
||||
);
|
||||
const result = await ctx.mediaApi.sendMediaMessage(
|
||||
scope,
|
||||
opts.target.id,
|
||||
uploadResult.file_info,
|
||||
c,
|
||||
{
|
||||
msgId: opts.msgId,
|
||||
content: msgContent,
|
||||
},
|
||||
);
|
||||
|
||||
notifyMediaHook(opts.creds.appId, result, buildOutboundMeta(opts, source));
|
||||
return result;
|
||||
notifyMediaHook(opts.creds.appId, result, buildOutboundMeta(opts, source));
|
||||
return result;
|
||||
} finally {
|
||||
if (source.kind === "localPath") {
|
||||
await source.opened?.close().catch(() => undefined);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -668,6 +674,12 @@ async function dispatchUpload(
|
||||
fileName,
|
||||
});
|
||||
}
|
||||
if (source.opened) {
|
||||
return ctx.mediaApi.uploadMedia(scope, targetId, fileType, creds, {
|
||||
buffer: await source.opened.handle.readFile(),
|
||||
fileName,
|
||||
});
|
||||
}
|
||||
return ctx.mediaApi.uploadMedia(scope, targetId, fileType, creds, {
|
||||
localPath: source.path,
|
||||
fileName,
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { appendRegularFileSync, replaceFileAtomicSync } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { formatErrorMessage } from "../utils/format.js";
|
||||
import { debugLog, debugError } from "../utils/log.js";
|
||||
import { getQQBotDataDir, getQQBotDataPath } from "../utils/platform.js";
|
||||
@@ -88,7 +89,7 @@ function ensureDir(): void {
|
||||
function appendLine(line: RefIndexLine): void {
|
||||
try {
|
||||
ensureDir();
|
||||
fs.appendFileSync(getRefIndexFile(), JSON.stringify(line) + "\n", "utf-8");
|
||||
appendRegularFileSync({ filePath: getRefIndexFile(), content: JSON.stringify(line) + "\n" });
|
||||
totalLinesOnDisk++;
|
||||
} catch (err) {
|
||||
debugError(`[ref-index-store] Failed to append: ${formatErrorMessage(err)}`);
|
||||
@@ -109,7 +110,6 @@ function compactFile(): void {
|
||||
try {
|
||||
ensureDir();
|
||||
const refIndexFile = getRefIndexFile();
|
||||
const tmpPath = refIndexFile + ".tmp";
|
||||
const lines: string[] = [];
|
||||
for (const [key, entry] of cache) {
|
||||
lines.push(
|
||||
@@ -127,8 +127,11 @@ function compactFile(): void {
|
||||
}),
|
||||
);
|
||||
}
|
||||
fs.writeFileSync(tmpPath, lines.join("\n") + "\n", "utf-8");
|
||||
fs.renameSync(tmpPath, refIndexFile);
|
||||
replaceFileAtomicSync({
|
||||
filePath: refIndexFile,
|
||||
content: `${lines.join("\n")}\n`,
|
||||
tempPrefix: ".qqbot-ref-index",
|
||||
});
|
||||
totalLinesOnDisk = cache.size;
|
||||
debugLog(`[ref-index-store] Compacted: ${before} lines → ${totalLinesOnDisk} lines`);
|
||||
} catch (err) {
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
* built-ins + log + platform (both zero plugin-sdk).
|
||||
*/
|
||||
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { privateFileStoreSync } from "openclaw/plugin-sdk/security-runtime";
|
||||
import type { ChatScope } from "../types.js";
|
||||
import { formatErrorMessage } from "../utils/format.js";
|
||||
import { debugLog, debugError } from "../utils/log.js";
|
||||
@@ -49,9 +49,10 @@ function loadUsersFromFile(): Map<string, KnownUser> {
|
||||
usersCache = new Map();
|
||||
try {
|
||||
const knownUsersFile = getKnownUsersFile();
|
||||
if (fs.existsSync(knownUsersFile)) {
|
||||
const data = fs.readFileSync(knownUsersFile, "utf-8");
|
||||
const users = JSON.parse(data) as KnownUser[];
|
||||
const users = privateFileStoreSync(path.dirname(knownUsersFile)).readJsonIfExists<KnownUser[]>(
|
||||
path.basename(knownUsersFile),
|
||||
);
|
||||
if (users) {
|
||||
for (const user of users) {
|
||||
usersCache.set(makeUserKey(user), user);
|
||||
}
|
||||
@@ -80,10 +81,10 @@ function doSaveUsersToFile(): void {
|
||||
}
|
||||
try {
|
||||
ensureDir();
|
||||
fs.writeFileSync(
|
||||
getKnownUsersFile(),
|
||||
JSON.stringify(Array.from(usersCache.values()), null, 2),
|
||||
"utf-8",
|
||||
const filePath = getKnownUsersFile();
|
||||
privateFileStoreSync(path.dirname(filePath)).writeJson(
|
||||
path.basename(filePath),
|
||||
Array.from(usersCache.values()),
|
||||
);
|
||||
isDirty = false;
|
||||
} catch (err) {
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { privateFileStoreSync } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { formatErrorMessage } from "../utils/format.js";
|
||||
import { debugLog, debugError } from "../utils/log.js";
|
||||
import { getQQBotDataDir, getQQBotDataPath } from "../utils/platform.js";
|
||||
@@ -66,18 +67,20 @@ function getCandidateSessionPaths(accountId: string): string[] {
|
||||
export function loadSession(accountId: string, expectedAppId?: string): SessionState | null {
|
||||
try {
|
||||
let filePath: string | null = null;
|
||||
let state: SessionState | null = null;
|
||||
for (const candidatePath of getCandidateSessionPaths(accountId)) {
|
||||
if (fs.existsSync(candidatePath)) {
|
||||
state = privateFileStoreSync(path.dirname(candidatePath)).readJsonIfExists<SessionState>(
|
||||
path.basename(candidatePath),
|
||||
);
|
||||
if (state) {
|
||||
filePath = candidatePath;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!filePath) {
|
||||
if (!filePath || !state) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const data = fs.readFileSync(filePath, "utf-8");
|
||||
const state = JSON.parse(data) as SessionState;
|
||||
const now = Date.now();
|
||||
|
||||
if (now - state.savedAt > SESSION_EXPIRE_TIME) {
|
||||
@@ -162,7 +165,7 @@ function doSaveSession(state: SessionState): void {
|
||||
try {
|
||||
ensureDir();
|
||||
const stateToSave: SessionState = { ...state, savedAt: Date.now() };
|
||||
fs.writeFileSync(filePath, JSON.stringify(stateToSave, null, 2), "utf-8");
|
||||
privateFileStoreSync(path.dirname(filePath)).writeJson(path.basename(filePath), stateToSave);
|
||||
if (legacyPath !== filePath && fs.existsSync(legacyPath)) {
|
||||
fs.unlinkSync(legacyPath);
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
|
||||
import * as fs from "node:fs";
|
||||
import * as path from "node:path";
|
||||
import { readRegularFileSync } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { formatErrorMessage } from "./format.js";
|
||||
import { debugLog, debugError, debugWarn } from "./log.js";
|
||||
import { normalizeLowercaseStringOrEmpty as normalizeLowercase } from "./string-normalize.js";
|
||||
@@ -81,11 +82,13 @@ export async function convertSilkToWav(
|
||||
inputPath: string,
|
||||
outputDir?: string,
|
||||
): Promise<{ wavPath: string; duration: number } | null> {
|
||||
if (!fs.existsSync(inputPath)) {
|
||||
let fileBuf: Buffer;
|
||||
try {
|
||||
fileBuf = readRegularFileSync({ filePath: inputPath }).buffer;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
||||
const fileBuf = fs.readFileSync(inputPath);
|
||||
const strippedBuf = stripAmrHeader(fileBuf);
|
||||
const rawData = new Uint8Array(
|
||||
strippedBuf.buffer,
|
||||
@@ -188,11 +191,13 @@ export async function audioFileToSilkBase64(
|
||||
filePath: string,
|
||||
directUploadFormats?: string[],
|
||||
): Promise<string | null> {
|
||||
if (!fs.existsSync(filePath)) {
|
||||
let buf: Buffer;
|
||||
try {
|
||||
buf = readRegularFileSync({ filePath }).buffer;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
||||
const buf = fs.readFileSync(filePath);
|
||||
if (buf.length === 0) {
|
||||
debugError(`[audio-convert] file is empty: ${filePath}`);
|
||||
return null;
|
||||
|
||||
@@ -13,7 +13,13 @@ vi.mock("../adapter/index.js", () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
import { QQBOT_MEDIA_SSRF_POLICY, downloadFile } from "./file-utils.js";
|
||||
import {
|
||||
QQBOT_MEDIA_SSRF_POLICY,
|
||||
checkFileSize,
|
||||
downloadFile,
|
||||
fileExistsAsync,
|
||||
readFileAsync,
|
||||
} from "./file-utils.js";
|
||||
|
||||
describe("qqbot file-utils downloadFile", () => {
|
||||
let tempDir: string;
|
||||
@@ -69,4 +75,15 @@ describe("qqbot file-utils downloadFile", () => {
|
||||
expect(savedPath).toBeNull();
|
||||
expect(adapterMocks.fetchMedia).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it.skipIf(process.platform === "win32")("rejects symlinked local media helpers", async () => {
|
||||
const targetPath = path.join(tempDir, "target.png");
|
||||
const linkPath = path.join(tempDir, "link.png");
|
||||
await fs.promises.writeFile(targetPath, "image-bytes");
|
||||
await fs.promises.symlink(targetPath, linkPath);
|
||||
|
||||
expect(checkFileSize(linkPath).ok).toBe(false);
|
||||
await expect(readFileAsync(linkPath)).rejects.toThrow();
|
||||
await expect(fileExistsAsync(linkPath)).resolves.toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
import crypto from "node:crypto";
|
||||
import * as fs from "node:fs";
|
||||
import * as path from "node:path";
|
||||
import {
|
||||
openLocalFileSafely,
|
||||
readRegularFile,
|
||||
statRegularFileSync,
|
||||
} from "openclaw/plugin-sdk/security-runtime";
|
||||
import { getPlatformAdapter } from "../adapter/index.js";
|
||||
import type { SsrfPolicyConfig } from "../adapter/types.js";
|
||||
import { MediaFileType } from "../types.js";
|
||||
@@ -72,17 +77,20 @@ interface FileSizeCheckResult {
|
||||
/** Validate that a file is within the allowed upload size. */
|
||||
export function checkFileSize(filePath: string, maxSize = MAX_UPLOAD_SIZE): FileSizeCheckResult {
|
||||
try {
|
||||
const stat = fs.statSync(filePath);
|
||||
if (stat.size > maxSize) {
|
||||
const sizeMB = (stat.size / (1024 * 1024)).toFixed(1);
|
||||
const result = statRegularFileSync(filePath);
|
||||
if (result.missing) {
|
||||
throw Object.assign(new Error(`File not found: ${filePath}`), { code: "ENOENT" });
|
||||
}
|
||||
if (result.stat.size > maxSize) {
|
||||
const sizeMB = (result.stat.size / (1024 * 1024)).toFixed(1);
|
||||
const limitMB = (maxSize / (1024 * 1024)).toFixed(0);
|
||||
return {
|
||||
ok: false,
|
||||
size: stat.size,
|
||||
size: result.stat.size,
|
||||
error: `File is too large (${sizeMB}MB); QQ Bot API limit is ${limitMB}MB`,
|
||||
};
|
||||
}
|
||||
return { ok: true, size: stat.size };
|
||||
return { ok: true, size: result.stat.size };
|
||||
} catch (err) {
|
||||
return {
|
||||
ok: false,
|
||||
@@ -94,16 +102,21 @@ export function checkFileSize(filePath: string, maxSize = MAX_UPLOAD_SIZE): File
|
||||
|
||||
/** Read file contents asynchronously. */
|
||||
export async function readFileAsync(filePath: string): Promise<Buffer> {
|
||||
return fs.promises.readFile(filePath);
|
||||
return (await readRegularFile({ filePath })).buffer;
|
||||
}
|
||||
|
||||
/** Check file readability asynchronously. */
|
||||
export async function fileExistsAsync(filePath: string): Promise<boolean> {
|
||||
const opened = await openLocalFileSafely({ filePath }).catch(() => null);
|
||||
if (!opened) {
|
||||
return false;
|
||||
}
|
||||
try {
|
||||
await fs.promises.access(filePath, fs.constants.R_OK);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
} finally {
|
||||
await opened.handle.close().catch(() => undefined);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import {
|
||||
pathExists,
|
||||
replaceFileAtomic,
|
||||
resolvePathWithinRoot,
|
||||
} from "openclaw/plugin-sdk/security-runtime";
|
||||
import { bumpSkillsSnapshotVersion } from "../api.js";
|
||||
import { assertSkillContentSafe, scanSkillContent } from "./scanner.js";
|
||||
import type { SkillProposal, SkillScanFinding } from "./types.js";
|
||||
@@ -38,31 +42,27 @@ function assertValidSection(section: string): string {
|
||||
function skillDir(workspaceDir: string, skillName: string): string {
|
||||
const safeName = assertValidSkillName(skillName);
|
||||
const root = path.resolve(workspaceDir, "skills");
|
||||
const dir = path.resolve(root, safeName);
|
||||
if (!dir.startsWith(`${root}${path.sep}`)) {
|
||||
const dir = resolvePathWithinRoot({
|
||||
rootDir: root,
|
||||
requestedPath: safeName,
|
||||
scopeLabel: "workspace skills directory",
|
||||
});
|
||||
if (!dir.ok) {
|
||||
throw new Error("skill path escapes workspace skills directory");
|
||||
}
|
||||
return dir;
|
||||
return dir.path;
|
||||
}
|
||||
|
||||
function skillPath(workspaceDir: string, skillName: string): string {
|
||||
return path.join(skillDir(workspaceDir, skillName), "SKILL.md");
|
||||
}
|
||||
|
||||
async function pathExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function atomicWrite(filePath: string, content: string): Promise<void> {
|
||||
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
||||
const tempPath = `${filePath}.tmp-${process.pid}-${Date.now().toString(36)}-${randomUUID()}`;
|
||||
await fs.writeFile(tempPath, content, "utf8");
|
||||
await fs.rename(tempPath, filePath);
|
||||
await replaceFileAtomic({
|
||||
filePath,
|
||||
content,
|
||||
tempPrefix: ".skill-workshop",
|
||||
});
|
||||
}
|
||||
|
||||
function formatSkillMarkdown(params: { name: string; description: string; body: string }): string {
|
||||
@@ -173,10 +173,14 @@ export async function writeSupportFile(params: {
|
||||
}
|
||||
assertSkillContentSafe(params.content);
|
||||
const root = skillDir(params.workspaceDir, name);
|
||||
const target = path.resolve(root, ...parts);
|
||||
if (!target.startsWith(`${root}${path.sep}`)) {
|
||||
const target = resolvePathWithinRoot({
|
||||
rootDir: root,
|
||||
requestedPath: path.join(...parts),
|
||||
scopeLabel: "skill directory",
|
||||
});
|
||||
if (!target.ok) {
|
||||
throw new Error("support file path escapes skill directory");
|
||||
}
|
||||
await atomicWrite(target, `${params.content.trimEnd()}\n`);
|
||||
return target;
|
||||
await atomicWrite(target.path, `${params.content.trimEnd()}\n`);
|
||||
return target.path;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { createHash, randomUUID } from "node:crypto";
|
||||
import fs from "node:fs/promises";
|
||||
import { createHash } from "node:crypto";
|
||||
import path from "node:path";
|
||||
import { privateFileStore } from "openclaw/plugin-sdk/security-runtime";
|
||||
import type { SkillProposal, SkillWorkshopStatus } from "./types.js";
|
||||
|
||||
type StoreFile = {
|
||||
@@ -42,24 +42,21 @@ async function withLock<T>(key: string, task: () => Promise<T>): Promise<T> {
|
||||
}
|
||||
}
|
||||
|
||||
async function readJson(filePath: string): Promise<StoreFile> {
|
||||
try {
|
||||
const raw = await fs.readFile(filePath, "utf8");
|
||||
const parsed = JSON.parse(raw) as StoreFile;
|
||||
return {
|
||||
version: 1,
|
||||
proposals: Array.isArray(parsed.proposals) ? parsed.proposals : [],
|
||||
review:
|
||||
parsed.review && typeof parsed.review === "object"
|
||||
? normalizeReviewState(parsed.review as Partial<SkillWorkshopReviewState>)
|
||||
: undefined,
|
||||
};
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
return { version: 1, proposals: [] };
|
||||
}
|
||||
throw error;
|
||||
async function readJson(rootDir: string, filePath: string): Promise<StoreFile> {
|
||||
const parsed = await privateFileStore(rootDir).readJsonIfExists<StoreFile>(
|
||||
path.relative(rootDir, filePath),
|
||||
);
|
||||
if (!parsed) {
|
||||
return { version: 1, proposals: [] };
|
||||
}
|
||||
return {
|
||||
version: 1,
|
||||
proposals: Array.isArray(parsed.proposals) ? parsed.proposals : [],
|
||||
review:
|
||||
parsed.review && typeof parsed.review === "object"
|
||||
? normalizeReviewState(parsed.review as Partial<SkillWorkshopReviewState>)
|
||||
: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeReviewState(
|
||||
@@ -80,26 +77,27 @@ function normalizeReviewState(
|
||||
};
|
||||
}
|
||||
|
||||
async function atomicWriteJson(filePath: string, data: StoreFile): Promise<void> {
|
||||
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
||||
const tempPath = `${filePath}.tmp-${process.pid}-${Date.now().toString(36)}-${randomUUID()}`;
|
||||
await fs.writeFile(tempPath, `${JSON.stringify(data, null, 2)}\n`, "utf8");
|
||||
await fs.rename(tempPath, filePath);
|
||||
async function atomicWriteJson(rootDir: string, filePath: string, data: StoreFile): Promise<void> {
|
||||
await privateFileStore(rootDir).writeJson(path.relative(rootDir, filePath), data, {
|
||||
trailingNewline: true,
|
||||
});
|
||||
}
|
||||
|
||||
export class SkillWorkshopStore {
|
||||
readonly stateDir: string;
|
||||
readonly filePath: string;
|
||||
|
||||
constructor(params: { stateDir: string; workspaceDir: string }) {
|
||||
this.stateDir = path.resolve(params.stateDir);
|
||||
this.filePath = path.join(
|
||||
params.stateDir,
|
||||
this.stateDir,
|
||||
"skill-workshop",
|
||||
`${workspaceKey(params.workspaceDir)}.json`,
|
||||
);
|
||||
}
|
||||
|
||||
async list(status?: SkillWorkshopStatus): Promise<SkillProposal[]> {
|
||||
const file = await readJson(this.filePath);
|
||||
const file = await readJson(this.stateDir, this.filePath);
|
||||
const proposals = status
|
||||
? file.proposals.filter((proposal) => proposal.status === status)
|
||||
: file.proposals;
|
||||
@@ -112,7 +110,7 @@ export class SkillWorkshopStore {
|
||||
|
||||
async add(proposal: SkillProposal, maxPending: number): Promise<SkillProposal> {
|
||||
return await withLock(this.filePath, async () => {
|
||||
const file = await readJson(this.filePath);
|
||||
const file = await readJson(this.stateDir, this.filePath);
|
||||
const duplicate = file.proposals.find(
|
||||
(item) =>
|
||||
(item.status === "pending" || item.status === "quarantined") &&
|
||||
@@ -134,48 +132,52 @@ export class SkillWorkshopStore {
|
||||
).length <= maxPending
|
||||
);
|
||||
});
|
||||
await atomicWriteJson(this.filePath, { ...file, version: 1, proposals: nextProposals });
|
||||
await atomicWriteJson(this.stateDir, this.filePath, {
|
||||
...file,
|
||||
version: 1,
|
||||
proposals: nextProposals,
|
||||
});
|
||||
return proposal;
|
||||
});
|
||||
}
|
||||
|
||||
async updateStatus(id: string, status: SkillWorkshopStatus): Promise<SkillProposal> {
|
||||
return await withLock(this.filePath, async () => {
|
||||
const file = await readJson(this.filePath);
|
||||
const file = await readJson(this.stateDir, this.filePath);
|
||||
const index = file.proposals.findIndex((proposal) => proposal.id === id);
|
||||
if (index < 0) {
|
||||
throw new Error(`proposal not found: ${id}`);
|
||||
}
|
||||
const updated = { ...file.proposals[index], status, updatedAt: Date.now() };
|
||||
file.proposals[index] = updated;
|
||||
await atomicWriteJson(this.filePath, file);
|
||||
await atomicWriteJson(this.stateDir, this.filePath, file);
|
||||
return updated;
|
||||
});
|
||||
}
|
||||
|
||||
async recordReviewTurn(toolCalls: number): Promise<SkillWorkshopReviewState> {
|
||||
return await withLock(this.filePath, async () => {
|
||||
const file = await readJson(this.filePath);
|
||||
const file = await readJson(this.stateDir, this.filePath);
|
||||
const current = normalizeReviewState(file.review);
|
||||
const next = {
|
||||
...current,
|
||||
turnsSinceReview: current.turnsSinceReview + 1,
|
||||
toolCallsSinceReview: current.toolCallsSinceReview + Math.max(0, Math.trunc(toolCalls)),
|
||||
};
|
||||
await atomicWriteJson(this.filePath, { ...file, review: next });
|
||||
await atomicWriteJson(this.stateDir, this.filePath, { ...file, review: next });
|
||||
return next;
|
||||
});
|
||||
}
|
||||
|
||||
async markReviewed(): Promise<SkillWorkshopReviewState> {
|
||||
return await withLock(this.filePath, async () => {
|
||||
const file = await readJson(this.filePath);
|
||||
const file = await readJson(this.stateDir, this.filePath);
|
||||
const next = {
|
||||
turnsSinceReview: 0,
|
||||
toolCallsSinceReview: 0,
|
||||
lastReviewAt: Date.now(),
|
||||
};
|
||||
await atomicWriteJson(this.filePath, { ...file, review: next });
|
||||
await atomicWriteJson(this.stateDir, this.filePath, { ...file, review: next });
|
||||
return next;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import { spawn } from "node:child_process";
|
||||
import { mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync } from "node:fs";
|
||||
import { join } from "node:path";
|
||||
import { resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/sandbox";
|
||||
import { tempWorkspaceSync, resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/sandbox";
|
||||
|
||||
type TranscodeOutcome =
|
||||
| { ok: true; buffer: Buffer }
|
||||
@@ -54,13 +52,13 @@ export async function transcodeAudioBuffer(params: {
|
||||
return { ok: false, reason: "platform-unsupported" };
|
||||
}
|
||||
|
||||
const tmpRoot = resolvePreferredOpenClawTmpDir();
|
||||
mkdirSync(tmpRoot, { recursive: true, mode: 0o700 });
|
||||
const tmpDir = mkdtempSync(join(tmpRoot, "tts-transcode-"));
|
||||
const inPath = join(tmpDir, `in.${source}`);
|
||||
const outPath = join(tmpDir, `out.${target}`);
|
||||
const tmp = tempWorkspaceSync({
|
||||
rootDir: resolvePreferredOpenClawTmpDir(),
|
||||
prefix: "tts-transcode-",
|
||||
});
|
||||
const inPath = tmp.write(`in.${source}`, params.audioBuffer);
|
||||
const outPath = tmp.path(`out.${target}`);
|
||||
try {
|
||||
writeFileSync(inPath, params.audioBuffer, { mode: 0o600 });
|
||||
const result = await runAfconvert({
|
||||
args: [...recipe, inPath, outPath],
|
||||
timeoutMs: params.timeoutMs ?? 5000,
|
||||
@@ -68,15 +66,11 @@ export async function transcodeAudioBuffer(params: {
|
||||
if (!result.ok) {
|
||||
return { ok: false, reason: "transcoder-failed", detail: result.detail };
|
||||
}
|
||||
return { ok: true, buffer: readFileSync(outPath) };
|
||||
return { ok: true, buffer: tmp.read(`out.${target}`) };
|
||||
} catch (err) {
|
||||
return { ok: false, reason: "transcoder-failed", detail: (err as Error).message };
|
||||
} finally {
|
||||
try {
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
} catch {
|
||||
// best-effort cleanup
|
||||
}
|
||||
tmp.cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,4 @@
|
||||
import { randomBytes } from "node:crypto";
|
||||
import {
|
||||
existsSync,
|
||||
mkdirSync,
|
||||
readFileSync,
|
||||
writeFileSync,
|
||||
mkdtempSync,
|
||||
renameSync,
|
||||
unlinkSync,
|
||||
} from "node:fs";
|
||||
import { existsSync, readFileSync } from "node:fs";
|
||||
import path from "node:path";
|
||||
import { resolveChannelTtsVoiceDelivery } from "openclaw/plugin-sdk/channel-targets";
|
||||
import type {
|
||||
@@ -30,7 +21,8 @@ import {
|
||||
selectApplicableRuntimeConfig,
|
||||
} from "openclaw/plugin-sdk/runtime-config-snapshot";
|
||||
import { isVerbose, logVerbose } from "openclaw/plugin-sdk/runtime-env";
|
||||
import { resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/sandbox";
|
||||
import { tempWorkspaceSync, resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/sandbox";
|
||||
import { privateFileStoreSync } from "openclaw/plugin-sdk/security-runtime";
|
||||
import {
|
||||
normalizeLowercaseStringOrEmpty,
|
||||
normalizeOptionalLowercaseString,
|
||||
@@ -566,24 +558,12 @@ function readPrefs(prefsPath: string): TtsUserPrefs {
|
||||
}
|
||||
|
||||
function atomicWriteFileSync(filePath: string, content: string): void {
|
||||
const tmpPath = `${filePath}.tmp.${Date.now()}.${randomBytes(8).toString("hex")}`;
|
||||
writeFileSync(tmpPath, content, { mode: 0o600 });
|
||||
try {
|
||||
renameSync(tmpPath, filePath);
|
||||
} catch (err) {
|
||||
try {
|
||||
unlinkSync(tmpPath);
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
privateFileStoreSync(path.dirname(filePath)).writeText(path.basename(filePath), content);
|
||||
}
|
||||
|
||||
function updatePrefs(prefsPath: string, update: (prefs: TtsUserPrefs) => void): void {
|
||||
const prefs = readPrefs(prefsPath);
|
||||
update(prefs);
|
||||
mkdirSync(path.dirname(prefsPath), { recursive: true });
|
||||
atomicWriteFileSync(prefsPath, JSON.stringify(prefs, null, 2));
|
||||
}
|
||||
|
||||
@@ -1136,12 +1116,12 @@ export async function textToSpeech(params: {
|
||||
outputFormat = transcoded.outputFormat;
|
||||
}
|
||||
|
||||
const tempRoot = resolvePreferredOpenClawTmpDir();
|
||||
mkdirSync(tempRoot, { recursive: true, mode: 0o700 });
|
||||
const tempDir = mkdtempSync(path.join(tempRoot, "tts-"));
|
||||
const audioPath = path.join(tempDir, `voice-${Date.now()}${fileExtension}`);
|
||||
writeFileSync(audioPath, audioBuffer);
|
||||
scheduleCleanup(tempDir);
|
||||
const temp = tempWorkspaceSync({
|
||||
rootDir: resolvePreferredOpenClawTmpDir(),
|
||||
prefix: "tts-",
|
||||
});
|
||||
const audioPath = temp.write(`voice-${Date.now()}${fileExtension}`, audioBuffer);
|
||||
scheduleCleanup(temp.dir);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
|
||||
@@ -6,10 +6,17 @@ import type { TelegramContext } from "./types.js";
|
||||
|
||||
const saveMediaBuffer = vi.fn();
|
||||
const fetchRemoteMedia = vi.fn();
|
||||
const readFileWithinRoot = vi.fn();
|
||||
const rootRead = vi.fn();
|
||||
|
||||
vi.mock("openclaw/plugin-sdk/file-access-runtime", () => ({
|
||||
readFileWithinRoot: (...args: unknown[]) => readFileWithinRoot(...args),
|
||||
root: async (rootDir: string) => ({
|
||||
read: async (relativePath: string, options?: { maxBytes?: number }) =>
|
||||
await rootRead({
|
||||
rootDir,
|
||||
relativePath,
|
||||
maxBytes: options?.maxBytes,
|
||||
}),
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock("./delivery.resolve-media.runtime.js", () => {
|
||||
@@ -201,7 +208,7 @@ describe("resolveMedia getFile retry", () => {
|
||||
vi.useFakeTimers();
|
||||
fetchRemoteMedia.mockReset();
|
||||
saveMediaBuffer.mockReset();
|
||||
readFileWithinRoot.mockReset();
|
||||
rootRead.mockReset();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -435,7 +442,7 @@ describe("resolveMedia getFile retry", () => {
|
||||
|
||||
it("copies trusted local absolute file paths into inbound media storage for media downloads", async () => {
|
||||
const getFile = vi.fn().mockResolvedValue({ file_path: "/var/lib/telegram-bot-api/file.pdf" });
|
||||
readFileWithinRoot.mockResolvedValueOnce({
|
||||
rootRead.mockResolvedValueOnce({
|
||||
buffer: Buffer.from("pdf-data"),
|
||||
realPath: "/var/lib/telegram-bot-api/file.pdf",
|
||||
stat: { size: 8 },
|
||||
@@ -451,7 +458,7 @@ describe("resolveMedia getFile retry", () => {
|
||||
);
|
||||
|
||||
expect(fetchRemoteMedia).not.toHaveBeenCalled();
|
||||
expect(readFileWithinRoot).toHaveBeenCalledWith({
|
||||
expect(rootRead).toHaveBeenCalledWith({
|
||||
rootDir: "/var/lib/telegram-bot-api",
|
||||
relativePath: "file.pdf",
|
||||
maxBytes: MAX_MEDIA_BYTES,
|
||||
@@ -476,7 +483,7 @@ describe("resolveMedia getFile retry", () => {
|
||||
const getFile = vi
|
||||
.fn()
|
||||
.mockResolvedValue({ file_path: "/var/lib/telegram-bot-api/sticker.webp" });
|
||||
readFileWithinRoot.mockResolvedValueOnce({
|
||||
rootRead.mockResolvedValueOnce({
|
||||
buffer: Buffer.from("sticker-data"),
|
||||
realPath: "/var/lib/telegram-bot-api/sticker.webp",
|
||||
stat: { size: 12 },
|
||||
@@ -491,7 +498,7 @@ describe("resolveMedia getFile retry", () => {
|
||||
});
|
||||
|
||||
expect(fetchRemoteMedia).not.toHaveBeenCalled();
|
||||
expect(readFileWithinRoot).toHaveBeenCalledWith({
|
||||
expect(rootRead).toHaveBeenCalledWith({
|
||||
rootDir: "/var/lib/telegram-bot-api",
|
||||
relativePath: "sticker.webp",
|
||||
maxBytes: MAX_MEDIA_BYTES,
|
||||
@@ -513,7 +520,7 @@ describe("resolveMedia getFile retry", () => {
|
||||
|
||||
it("maps trusted local absolute path read failures to MediaFetchError", async () => {
|
||||
const getFile = vi.fn().mockResolvedValue({ file_path: "/var/lib/telegram-bot-api/file.pdf" });
|
||||
readFileWithinRoot.mockRejectedValueOnce(new Error("file not found"));
|
||||
rootRead.mockRejectedValueOnce(new Error("file not found"));
|
||||
|
||||
await expect(
|
||||
resolveMediaWithDefaults(makeCtx("document", getFile, { mime_type: "application/pdf" }), {
|
||||
@@ -530,7 +537,7 @@ describe("resolveMedia getFile retry", () => {
|
||||
|
||||
it("maps oversized trusted local absolute path reads to MediaFetchError", async () => {
|
||||
const getFile = vi.fn().mockResolvedValue({ file_path: "/var/lib/telegram-bot-api/file.pdf" });
|
||||
readFileWithinRoot.mockRejectedValueOnce(new Error("file exceeds limit"));
|
||||
rootRead.mockRejectedValueOnce(new Error("file exceeds limit"));
|
||||
|
||||
await expect(
|
||||
resolveMediaWithDefaults(makeCtx("document", getFile, { mime_type: "application/pdf" }), {
|
||||
@@ -558,7 +565,7 @@ describe("resolveMedia getFile retry", () => {
|
||||
}),
|
||||
);
|
||||
|
||||
expect(readFileWithinRoot).not.toHaveBeenCalled();
|
||||
expect(rootRead).not.toHaveBeenCalled();
|
||||
expect(fetchRemoteMedia).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import path from "node:path";
|
||||
import { GrammyError } from "grammy";
|
||||
import { readFileWithinRoot } from "openclaw/plugin-sdk/file-access-runtime";
|
||||
import { root as fsRoot } from "openclaw/plugin-sdk/file-access-runtime";
|
||||
import type { TelegramTransport } from "../fetch.js";
|
||||
import { cacheSticker, getCachedSticker } from "../sticker-cache.js";
|
||||
import {
|
||||
@@ -203,9 +203,8 @@ async function downloadAndSaveTelegramFile(params: {
|
||||
if (trustedLocalFile) {
|
||||
let localFile;
|
||||
try {
|
||||
localFile = await readFileWithinRoot({
|
||||
rootDir: trustedLocalFile.rootDir,
|
||||
relativePath: trustedLocalFile.relativePath,
|
||||
const root = await fsRoot(trustedLocalFile.rootDir);
|
||||
localFile = await root.read(trustedLocalFile.relativePath, {
|
||||
maxBytes: params.maxBytes,
|
||||
});
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import type { OpenClawConfig } from "openclaw/plugin-sdk/config-types";
|
||||
import { logVerbose } from "openclaw/plugin-sdk/runtime-env";
|
||||
import { replaceFileAtomicSync } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { resolveStorePath } from "openclaw/plugin-sdk/session-store-runtime";
|
||||
|
||||
const TTL_MS = 24 * 60 * 60 * 1000;
|
||||
@@ -119,10 +119,11 @@ function persistSentMessages(bucket: SentMessageBucket): void {
|
||||
fs.rmSync(persistedPath, { force: true });
|
||||
return;
|
||||
}
|
||||
fs.mkdirSync(path.dirname(persistedPath), { recursive: true });
|
||||
const tempPath = `${persistedPath}.${process.pid}.tmp`;
|
||||
fs.writeFileSync(tempPath, JSON.stringify(serialized), "utf-8");
|
||||
fs.renameSync(tempPath, persistedPath);
|
||||
replaceFileAtomicSync({
|
||||
filePath: persistedPath,
|
||||
content: JSON.stringify(serialized),
|
||||
tempPrefix: ".telegram-sent-message-cache",
|
||||
});
|
||||
}
|
||||
|
||||
export function recordSentMessage(
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import fs from "node:fs";
|
||||
import type { ChannelLegacyStateMigrationPlan } from "openclaw/plugin-sdk/channel-contract";
|
||||
import { resolveChannelAllowFromPath } from "openclaw/plugin-sdk/channel-pairing-paths";
|
||||
import type { OpenClawConfig } from "openclaw/plugin-sdk/config-types";
|
||||
import { statRegularFileSync } from "openclaw/plugin-sdk/security-runtime";
|
||||
import { resolveDefaultTelegramAccountId } from "./account-selection.js";
|
||||
|
||||
function fileExists(pathValue: string): boolean {
|
||||
try {
|
||||
return fs.existsSync(pathValue) && fs.statSync(pathValue).isFile();
|
||||
return !statRegularFileSync(pathValue).missing;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { logVerbose } from "openclaw/plugin-sdk/runtime-env";
|
||||
import { replaceFileAtomicSync } from "openclaw/plugin-sdk/security-runtime";
|
||||
|
||||
const MAX_ENTRIES = 2_048;
|
||||
const TOPIC_NAME_CACHE_STATE_KEY = Symbol.for("openclaw.telegramTopicNameCacheState");
|
||||
@@ -146,10 +146,11 @@ function persistTopicStore(persistedPath: string, store: TopicNameStore): void {
|
||||
fs.rmSync(persistedPath, { force: true });
|
||||
return;
|
||||
}
|
||||
fs.mkdirSync(path.dirname(persistedPath), { recursive: true });
|
||||
const tempPath = `${persistedPath}.${process.pid}.tmp`;
|
||||
fs.writeFileSync(tempPath, JSON.stringify(Object.fromEntries(store)), "utf-8");
|
||||
fs.renameSync(tempPath, persistedPath);
|
||||
replaceFileAtomicSync({
|
||||
filePath: persistedPath,
|
||||
content: JSON.stringify(Object.fromEntries(store)),
|
||||
tempPrefix: ".telegram-topic-name-cache",
|
||||
});
|
||||
}
|
||||
|
||||
export function updateTopicName(
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { spawn } from "node:child_process";
|
||||
import { existsSync, mkdtempSync, readdirSync, readFileSync, rmSync, writeFileSync } from "node:fs";
|
||||
import { existsSync, readdirSync, readFileSync } from "node:fs";
|
||||
import path from "node:path";
|
||||
import { runFfmpeg } from "openclaw/plugin-sdk/media-runtime";
|
||||
import { createSubsystemLogger } from "openclaw/plugin-sdk/runtime-env";
|
||||
@@ -9,7 +9,7 @@ import type {
|
||||
SpeechSynthesisRequest,
|
||||
SpeechTelephonySynthesisRequest,
|
||||
} from "openclaw/plugin-sdk/speech-core";
|
||||
import { resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
import { tempWorkspace, resolvePreferredOpenClawTmpDir } from "openclaw/plugin-sdk/temp-path";
|
||||
|
||||
const log = createSubsystemLogger("tts-local-cli");
|
||||
|
||||
@@ -326,7 +326,11 @@ export function buildCliSpeechProvider(): SpeechProviderPlugin {
|
||||
|
||||
log.debug(`synthesize: text=${req.text.slice(0, 50)}...`);
|
||||
|
||||
const tempDir = mkdtempSync(path.join(resolvePreferredOpenClawTmpDir(), "openclaw-cli-tts-"));
|
||||
const temp = await tempWorkspace({
|
||||
rootDir: resolvePreferredOpenClawTmpDir(),
|
||||
prefix: "openclaw-cli-tts-",
|
||||
});
|
||||
const tempDir = temp.dir;
|
||||
|
||||
try {
|
||||
const result = await runCli({
|
||||
@@ -351,7 +355,7 @@ export function buildCliSpeechProvider(): SpeechProviderPlugin {
|
||||
const inputFile =
|
||||
result.audioPath ?? path.join(tempDir, `input${getFileExt(result.actualFormat)}`);
|
||||
if (!result.audioPath) {
|
||||
writeFileSync(inputFile, result.buffer);
|
||||
await temp.write(`input${getFileExt(result.actualFormat)}`, result.buffer);
|
||||
}
|
||||
buffer = await convertAudio(inputFile, tempDir, "opus");
|
||||
format = "opus";
|
||||
@@ -365,7 +369,7 @@ export function buildCliSpeechProvider(): SpeechProviderPlugin {
|
||||
const inputFile =
|
||||
result.audioPath ?? path.join(tempDir, `input${getFileExt(result.actualFormat)}`);
|
||||
if (!result.audioPath) {
|
||||
writeFileSync(inputFile, result.buffer);
|
||||
await temp.write(`input${getFileExt(result.actualFormat)}`, result.buffer);
|
||||
}
|
||||
buffer = await convertAudio(inputFile, tempDir, desired);
|
||||
format = desired;
|
||||
@@ -383,9 +387,7 @@ export function buildCliSpeechProvider(): SpeechProviderPlugin {
|
||||
voiceCompatible: req.target === "voice-note" && format === "opus",
|
||||
};
|
||||
} finally {
|
||||
try {
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
} catch {}
|
||||
await temp.cleanup();
|
||||
}
|
||||
},
|
||||
|
||||
@@ -397,7 +399,11 @@ export function buildCliSpeechProvider(): SpeechProviderPlugin {
|
||||
|
||||
log.debug(`synthesizeTelephony: text=${req.text.slice(0, 50)}...`);
|
||||
|
||||
const tempDir = mkdtempSync(path.join(resolvePreferredOpenClawTmpDir(), "openclaw-cli-tts-"));
|
||||
const temp = await tempWorkspace({
|
||||
rootDir: resolvePreferredOpenClawTmpDir(),
|
||||
prefix: "openclaw-cli-tts-",
|
||||
});
|
||||
const tempDir = temp.dir;
|
||||
|
||||
try {
|
||||
const result = await runCli({
|
||||
@@ -415,7 +421,7 @@ export function buildCliSpeechProvider(): SpeechProviderPlugin {
|
||||
const inputFile =
|
||||
result.audioPath ?? path.join(tempDir, `input${getFileExt(result.actualFormat)}`);
|
||||
if (!result.audioPath) {
|
||||
writeFileSync(inputFile, result.buffer);
|
||||
await temp.write(`input${getFileExt(result.actualFormat)}`, result.buffer);
|
||||
}
|
||||
|
||||
// Convert to raw 16kHz mono PCM for telephony (no WAV headers)
|
||||
@@ -427,9 +433,7 @@ export function buildCliSpeechProvider(): SpeechProviderPlugin {
|
||||
sampleRate: 16000,
|
||||
};
|
||||
} finally {
|
||||
try {
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
} catch {}
|
||||
await temp.cleanup();
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import fs from "node:fs";
|
||||
import fsp from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import {
|
||||
appendRegularFile,
|
||||
privateFileStore,
|
||||
privateFileStoreSync,
|
||||
} from "openclaw/plugin-sdk/security-runtime";
|
||||
import { CallRecordSchema, TerminalStates, type CallId, type CallRecord } from "../types.js";
|
||||
|
||||
const pendingPersistWrites = new Set<Promise<void>>();
|
||||
@@ -9,8 +12,11 @@ export function persistCallRecord(storePath: string, call: CallRecord): void {
|
||||
const logPath = path.join(storePath, "calls.jsonl");
|
||||
const line = `${JSON.stringify(call)}\n`;
|
||||
// Fire-and-forget async write to avoid blocking event loop.
|
||||
const write = fsp
|
||||
.appendFile(logPath, line)
|
||||
const write = appendRegularFile({
|
||||
filePath: logPath,
|
||||
content: line,
|
||||
rejectSymlinkParents: true,
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error("[voice-call] Failed to persist call record:", err);
|
||||
})
|
||||
@@ -31,7 +37,8 @@ export function loadActiveCallsFromStore(storePath: string): {
|
||||
rejectedProviderCallIds: Set<string>;
|
||||
} {
|
||||
const logPath = path.join(storePath, "calls.jsonl");
|
||||
if (!fs.existsSync(logPath)) {
|
||||
const content = privateFileStoreSync(storePath).readTextIfExists(path.basename(logPath));
|
||||
if (content === null) {
|
||||
return {
|
||||
activeCalls: new Map(),
|
||||
providerCallIdMap: new Map(),
|
||||
@@ -39,8 +46,6 @@ export function loadActiveCallsFromStore(storePath: string): {
|
||||
rejectedProviderCallIds: new Set(),
|
||||
};
|
||||
}
|
||||
|
||||
const content = fs.readFileSync(logPath, "utf-8");
|
||||
const lines = content.split("\n");
|
||||
|
||||
const callMap = new Map<CallId, CallRecord>();
|
||||
@@ -82,14 +87,10 @@ export async function getCallHistoryFromStore(
|
||||
limit = 50,
|
||||
): Promise<CallRecord[]> {
|
||||
const logPath = path.join(storePath, "calls.jsonl");
|
||||
|
||||
try {
|
||||
await fsp.access(logPath);
|
||||
} catch {
|
||||
const content = await privateFileStore(storePath).readTextIfExists(path.basename(logPath));
|
||||
if (content === null) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const content = await fsp.readFile(logPath, "utf-8");
|
||||
const lines = content.trim().split("\n").filter(Boolean);
|
||||
const calls: CallRecord[] = [];
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
parseRealtimeVoiceAgentConsultArgs,
|
||||
type RealtimeVoiceAgentConsultResult,
|
||||
} from "openclaw/plugin-sdk/realtime-voice";
|
||||
import { withTimeout } from "openclaw/plugin-sdk/security-runtime";
|
||||
import type { VoiceCallRealtimeFastContextConfig } from "./config.js";
|
||||
|
||||
type Logger = {
|
||||
@@ -74,22 +75,6 @@ function buildMissText(query: string): string {
|
||||
].join("\n\n");
|
||||
}
|
||||
|
||||
async function withTimeout<T>(promise: Promise<T>, timeoutMs: number): Promise<T> {
|
||||
let timer: ReturnType<typeof setTimeout> | undefined;
|
||||
try {
|
||||
return await Promise.race([
|
||||
promise,
|
||||
new Promise<T>((_resolve, reject) => {
|
||||
timer = setTimeout(() => reject(new RealtimeFastContextTimeoutError(timeoutMs)), timeoutMs);
|
||||
}),
|
||||
]);
|
||||
} finally {
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function lookupFastContext(params: {
|
||||
cfg: OpenClawConfig;
|
||||
agentId: string;
|
||||
@@ -138,6 +123,7 @@ export async function resolveRealtimeFastContextConsult(params: {
|
||||
query,
|
||||
}),
|
||||
params.config.timeoutMs,
|
||||
{ createError: () => new RealtimeFastContextTimeoutError(params.config.timeoutMs) },
|
||||
);
|
||||
if (lookup.status === "unavailable") {
|
||||
params.logger.debug?.(`[voice-call] realtime fast context unavailable: ${lookup.error}`);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user