docs: restore onboard docs references

This commit is contained in:
Peter Steinberger
2026-03-16 05:50:48 +00:00
parent 2acbea0da7
commit f9e185887f
64 changed files with 328 additions and 219 deletions

View File

@@ -55,10 +55,6 @@
"source": "CLI Setup Reference",
"target": "CLI 设置参考"
},
{
"source": "Setup Overview",
"target": "设置概览"
},
{
"source": "Setup Wizard (CLI)",
"target": "设置向导CLI"

View File

@@ -74,7 +74,7 @@ openclaw hooks info session-memory
### Onboarding
During onboarding (`openclaw setup --wizard`), you'll be prompted to enable recommended hooks. The wizard automatically discovers eligible hooks and presents them for selection.
During onboarding (`openclaw onboard`), you'll be prompted to enable recommended hooks. The wizard automatically discovers eligible hooks and presents them for selection.
## Hook Discovery

View File

@@ -26,7 +26,7 @@ Status: bundled plugin that talks to the BlueBubbles macOS server over HTTP. **R
1. Install the BlueBubbles server on your Mac (follow the instructions at [bluebubbles.app/install](https://bluebubbles.app/install)).
2. In the BlueBubbles config, enable the web API and set a password.
3. Run `openclaw setup --wizard` and select BlueBubbles, or configure manually:
3. Run `openclaw onboard` and select BlueBubbles, or configure manually:
```json5
{
@@ -129,7 +129,7 @@ launchctl load ~/Library/LaunchAgents/com.user.poke-messages.plist
BlueBubbles is available in the interactive setup wizard:
```
openclaw setup --wizard
openclaw onboard
```
The wizard prompts for:

View File

@@ -35,7 +35,7 @@ There are two ways to add the Feishu channel:
If you just installed OpenClaw, run the setup wizard:
```bash
openclaw setup --wizard
openclaw onboard
```
The wizard guides you through:

View File

@@ -16,7 +16,7 @@ Nostr is a decentralized protocol for social networking. This channel enables Op
### Onboarding (recommended)
- The setup wizard (`openclaw setup --wizard`) and `openclaw channels add` list optional channel plugins.
- The setup wizard (`openclaw onboard`) and `openclaw channels add` list optional channel plugins.
- Selecting Nostr prompts you to install the plugin on demand.
Install defaults:

View File

@@ -27,7 +27,7 @@ Details: [Plugins](/tools/plugin)
## Quick setup
1. Install and enable the Synology Chat plugin.
- `openclaw setup --wizard` now shows Synology Chat in the same channel setup list as `openclaw channels add`.
- `openclaw onboard` now shows Synology Chat in the same channel setup list as `openclaw channels add`.
- Non-interactive setup: `openclaw channels add --channel synology-chat --token <token> --url <incoming-webhook-url>`
2. In Synology Chat integrations:
- Create an incoming webhook and copy its URL.
@@ -36,7 +36,7 @@ Details: [Plugins](/tools/plugin)
- `https://gateway-host/webhook/synology` by default.
- Or your custom `channels.synology-chat.webhookPath`.
4. Finish setup in OpenClaw.
- Guided: `openclaw setup --wizard`
- Guided: `openclaw onboard`
- Direct: `openclaw channels add --channel synology-chat --token <token> --url <incoming-webhook-url>`
5. Restart gateway and send a DM to the Synology Chat bot.

View File

@@ -13,7 +13,7 @@ This page describes the current CLI behavior. If commands change, update this do
## Command pages
- [`setup`](/cli/setup)
- [`onboard`](/cli/onboard) (legacy alias for `setup --wizard`)
- [`onboard`](/cli/onboard)
- [`configure`](/cli/configure)
- [`config`](/cli/config)
- [`completion`](/cli/completion)

View File

@@ -1,30 +1,157 @@
---
summary: "Legacy CLI alias for `openclaw setup --wizard`"
summary: "CLI reference for `openclaw onboard` (interactive setup wizard)"
read_when:
- You encountered `openclaw onboard` in older docs or scripts
- You want guided setup for gateway, workspace, auth, channels, and skills
title: "onboard"
---
# `openclaw onboard`
Legacy alias for `openclaw setup --wizard`.
Prefer:
```bash
openclaw setup --wizard
```
`openclaw onboard` still accepts the same flags and behavior for compatibility.
Interactive setup wizard (local or remote Gateway setup).
## Related guides
- Primary command docs: [`openclaw setup`](/cli/setup)
- Setup wizard guide: [Setup Wizard (CLI)](/start/wizard)
- Setup overview: [Setup Overview](/start/onboarding-overview)
- Setup wizard reference: [CLI Setup Reference](/start/wizard-cli-reference)
- CLI onboarding hub: [Setup Wizard (CLI)](/start/wizard)
- Onboarding overview: [Onboarding Overview](/start/onboarding-overview)
- CLI onboarding reference: [CLI Setup Reference](/start/wizard-cli-reference)
- CLI automation: [CLI Automation](/start/wizard-cli-automation)
- macOS onboarding: [Onboarding (macOS App)](/start/onboarding)
For examples, flags, and non-interactive behavior, use the primary docs at
[`openclaw setup`](/cli/setup) and [CLI Setup Reference](/start/wizard-cli-reference).
## Examples
```bash
openclaw onboard
openclaw onboard --flow quickstart
openclaw onboard --flow manual
openclaw onboard --mode remote --remote-url wss://gateway-host:18789
```
For plaintext private-network `ws://` targets (trusted networks only), set
`OPENCLAW_ALLOW_INSECURE_PRIVATE_WS=1` in the onboarding process environment.
Non-interactive custom provider:
```bash
openclaw onboard --non-interactive \
--auth-choice custom-api-key \
--custom-base-url "https://llm.example.com/v1" \
--custom-model-id "foo-large" \
--custom-api-key "$CUSTOM_API_KEY" \
--secret-input-mode plaintext \
--custom-compatibility openai
```
`--custom-api-key` is optional in non-interactive mode. If omitted, onboarding checks `CUSTOM_API_KEY`.
Non-interactive Ollama:
```bash
openclaw onboard --non-interactive \
--auth-choice ollama \
--custom-base-url "http://ollama-host:11434" \
--custom-model-id "qwen3.5:27b" \
--accept-risk
```
`--custom-base-url` defaults to `http://127.0.0.1:11434`. `--custom-model-id` is optional; if omitted, onboarding uses Ollama's suggested defaults. Cloud model IDs such as `kimi-k2.5:cloud` also work here.
Store provider keys as refs instead of plaintext:
```bash
openclaw onboard --non-interactive \
--auth-choice openai-api-key \
--secret-input-mode ref \
--accept-risk
```
With `--secret-input-mode ref`, onboarding writes env-backed refs instead of plaintext key values.
For auth-profile backed providers this writes `keyRef` entries; for custom providers this writes `models.providers.<id>.apiKey` as an env ref (for example `{ source: "env", provider: "default", id: "CUSTOM_API_KEY" }`).
Non-interactive `ref` mode contract:
- Set the provider env var in the onboarding process environment (for example `OPENAI_API_KEY`).
- Do not pass inline key flags (for example `--openai-api-key`) unless that env var is also set.
- If an inline key flag is passed without the required env var, onboarding fails fast with guidance.
Gateway token options in non-interactive mode:
- `--gateway-auth token --gateway-token <token>` stores a plaintext token.
- `--gateway-auth token --gateway-token-ref-env <name>` stores `gateway.auth.token` as an env SecretRef.
- `--gateway-token` and `--gateway-token-ref-env` are mutually exclusive.
- `--gateway-token-ref-env` requires a non-empty env var in the onboarding process environment.
- With `--install-daemon`, when token auth requires a token, SecretRef-managed gateway tokens are validated but not persisted as resolved plaintext in supervisor service environment metadata.
- With `--install-daemon`, if token mode requires a token and the configured token SecretRef is unresolved, onboarding fails closed with remediation guidance.
- With `--install-daemon`, if both `gateway.auth.token` and `gateway.auth.password` are configured and `gateway.auth.mode` is unset, onboarding blocks install until mode is set explicitly.
Example:
```bash
export OPENCLAW_GATEWAY_TOKEN="your-token"
openclaw onboard --non-interactive \
--mode local \
--auth-choice skip \
--gateway-auth token \
--gateway-token-ref-env OPENCLAW_GATEWAY_TOKEN \
--accept-risk
```
Non-interactive local gateway health:
- Unless you pass `--skip-health`, onboarding waits for a reachable local gateway before it exits successfully.
- `--install-daemon` starts the managed gateway install path first. Without it, you must already have a local gateway running, for example `openclaw gateway run`.
- If you only want config/workspace/bootstrap writes in automation, use `--skip-health`.
- On native Windows, `--install-daemon` tries Scheduled Tasks first and falls back to a per-user Startup-folder login item if task creation is denied.
Interactive onboarding behavior with reference mode:
- Choose **Use secret reference** when prompted.
- Then choose either:
- Environment variable
- Configured secret provider (`file` or `exec`)
- Onboarding performs a fast preflight validation before saving the ref.
- If validation fails, onboarding shows the error and lets you retry.
Non-interactive Z.AI endpoint choices:
Note: `--auth-choice zai-api-key` now auto-detects the best Z.AI endpoint for your key (prefers the general API with `zai/glm-5`).
If you specifically want the GLM Coding Plan endpoints, pick `zai-coding-global` or `zai-coding-cn`.
```bash
# Promptless endpoint selection
openclaw onboard --non-interactive \
--auth-choice zai-coding-global \
--zai-api-key "$ZAI_API_KEY"
# Other Z.AI endpoint choices:
# --auth-choice zai-coding-cn
# --auth-choice zai-global
# --auth-choice zai-cn
```
Non-interactive Mistral example:
```bash
openclaw onboard --non-interactive \
--auth-choice mistral-api-key \
--mistral-api-key "$MISTRAL_API_KEY"
```
Flow notes:
- `quickstart`: minimal prompts, auto-generates a gateway token.
- `manual`: full prompts for port/bind/auth (alias of `advanced`).
- Local onboarding DM scope behavior: [CLI Setup Reference](/start/wizard-cli-reference#outputs-and-internals).
- Fastest first chat: `openclaw dashboard` (Control UI, no channel setup).
- Custom Provider: connect any OpenAI or Anthropic compatible endpoint,
including hosted providers not listed. Use Unknown to auto-detect.
## Common follow-up commands
```bash
openclaw configure
openclaw agents add <name>
```
<Note>
`--json` does not imply non-interactive mode. Use `--non-interactive` for scripts.
</Note>

View File

@@ -1,43 +1,29 @@
---
summary: "CLI reference for `openclaw setup` (initialize config/workspace or run the setup wizard)"
summary: "CLI reference for `openclaw setup` (initialize config + workspace)"
read_when:
- You want first-run setup without the guided wizard
- You want the guided setup wizard via `openclaw setup --wizard`
- Youre doing first-run setup without the full setup wizard
- You want to set the default workspace path
title: "setup"
---
# `openclaw setup`
Initialize `~/.openclaw/openclaw.json` and the agent workspace, or run the guided setup wizard.
Initialize `~/.openclaw/openclaw.json` and the agent workspace.
Related:
- Getting started: [Getting started](/start/getting-started)
- Setup wizard: [Setup Wizard (CLI)](/start/wizard)
- macOS app onboarding: [Onboarding](/start/onboarding)
- Wizard: [Onboarding](/start/onboarding)
## Examples
```bash
openclaw setup
openclaw setup --workspace ~/.openclaw/workspace
openclaw setup --wizard
openclaw setup --wizard --install-daemon
```
Without flags, `openclaw setup` only ensures config + workspace defaults.
Use `--wizard` for the full guided flow.
To run the wizard via setup:
## Modes
- `openclaw setup`: initialize config/workspace defaults only
- `openclaw setup --wizard`: guided setup for auth, gateway, channels, and skills
- `openclaw setup --wizard --non-interactive`: scripted setup flow
## Related guides
- Setup wizard guide: [Setup Wizard (CLI)](/start/wizard)
- Setup wizard reference: [CLI Setup Reference](/start/wizard-cli-reference)
- Setup wizard automation: [CLI Automation](/start/wizard-cli-automation)
- Legacy alias: [`openclaw onboard`](/cli/onboard)
```bash
openclaw setup --wizard
```

View File

@@ -36,7 +36,7 @@ inside a sandbox workspace under `~/.openclaw/sandboxes`, not your host workspac
}
```
`openclaw setup --wizard`, `openclaw configure`, or `openclaw setup` will create the
`openclaw onboard`, `openclaw configure`, or `openclaw setup` will create the
workspace and seed the bootstrap files if they are missing.
Sandbox seed copies only accept regular in-workspace files; symlink/hardlink
aliases that resolve outside the source workspace are ignored.

View File

@@ -15,7 +15,7 @@ For model selection rules, see [/concepts/models](/concepts/models).
- Model refs use `provider/model` (example: `opencode/claude-opus-4-6`).
- If you set `agents.defaults.models`, it becomes the allowlist.
- CLI helpers: `openclaw setup --wizard`, `openclaw models list`, `openclaw models set <provider/model>`.
- CLI helpers: `openclaw onboard`, `openclaw models list`, `openclaw models set <provider/model>`.
- Provider plugins can inject model catalogs via `registerProvider({ catalog })`;
OpenClaw merges that output into `models.providers` before writing
`models.json`.
@@ -139,7 +139,7 @@ OpenClaw ships with the piai catalog. These providers require **no**
- Auth: `OPENAI_API_KEY`
- Optional rotation: `OPENAI_API_KEYS`, `OPENAI_API_KEY_1`, `OPENAI_API_KEY_2`, plus `OPENCLAW_LIVE_OPENAI_KEY` (single override)
- Example models: `openai/gpt-5.4`, `openai/gpt-5.4-pro`
- CLI: `openclaw setup --wizard --auth-choice openai-api-key`
- CLI: `openclaw onboard --auth-choice openai-api-key`
- Default transport is `auto` (WebSocket-first, SSE fallback)
- Override per model via `agents.defaults.models["openai/<model>"].params.transport` (`"sse"`, `"websocket"`, or `"auto"`)
- OpenAI Responses WebSocket warm-up defaults to enabled via `params.openaiWsWarmup` (`true`/`false`)
@@ -159,7 +159,7 @@ OpenClaw ships with the piai catalog. These providers require **no**
- Auth: `ANTHROPIC_API_KEY` or `claude setup-token`
- Optional rotation: `ANTHROPIC_API_KEYS`, `ANTHROPIC_API_KEY_1`, `ANTHROPIC_API_KEY_2`, plus `OPENCLAW_LIVE_ANTHROPIC_KEY` (single override)
- Example model: `anthropic/claude-opus-4-6`
- CLI: `openclaw setup --wizard --auth-choice token` (paste setup-token) or `openclaw models auth paste-token --provider anthropic`
- CLI: `openclaw onboard --auth-choice token` (paste setup-token) or `openclaw models auth paste-token --provider anthropic`
- Direct API-key models support the shared `/fast` toggle and `params.fastMode`; OpenClaw maps that to Anthropic `service_tier` (`auto` vs `standard_only`)
- Policy note: setup-token support is technical compatibility; Anthropic has blocked some subscription usage outside Claude Code in the past. Verify current Anthropic terms and decide based on your risk tolerance.
- Recommendation: Anthropic API key auth is the safer, recommended path over subscription setup-token auth.
@@ -175,7 +175,7 @@ OpenClaw ships with the piai catalog. These providers require **no**
- Provider: `openai-codex`
- Auth: OAuth (ChatGPT)
- Example model: `openai-codex/gpt-5.4`
- CLI: `openclaw setup --wizard --auth-choice openai-codex` or `openclaw models auth login --provider openai-codex`
- CLI: `openclaw onboard --auth-choice openai-codex` or `openclaw models auth login --provider openai-codex`
- Default transport is `auto` (WebSocket-first, SSE fallback)
- Override per model via `agents.defaults.models["openai-codex/<model>"].params.transport` (`"sse"`, `"websocket"`, or `"auto"`)
- Shares the same `/fast` toggle and `params.fastMode` config as direct `openai/*`
@@ -194,7 +194,7 @@ OpenClaw ships with the piai catalog. These providers require **no**
- Zen runtime provider: `opencode`
- Go runtime provider: `opencode-go`
- Example models: `opencode/claude-opus-4-6`, `opencode-go/kimi-k2.5`
- CLI: `openclaw setup --wizard --auth-choice opencode-zen` or `openclaw setup --wizard --auth-choice opencode-go`
- CLI: `openclaw onboard --auth-choice opencode-zen` or `openclaw onboard --auth-choice opencode-go`
```json5
{
@@ -209,7 +209,7 @@ OpenClaw ships with the piai catalog. These providers require **no**
- Optional rotation: `GEMINI_API_KEYS`, `GEMINI_API_KEY_1`, `GEMINI_API_KEY_2`, `GOOGLE_API_KEY` fallback, and `OPENCLAW_LIVE_GEMINI_KEY` (single override)
- Example models: `google/gemini-3.1-pro-preview`, `google/gemini-3-flash-preview`
- Compatibility: legacy OpenClaw config using `google/gemini-3.1-flash-preview` is normalized to `google/gemini-3-flash-preview`
- CLI: `openclaw setup --wizard --auth-choice gemini-api-key`
- CLI: `openclaw onboard --auth-choice gemini-api-key`
### Google Vertex and Gemini CLI
@@ -227,7 +227,7 @@ OpenClaw ships with the piai catalog. These providers require **no**
- Provider: `zai`
- Auth: `ZAI_API_KEY`
- Example model: `zai/glm-5`
- CLI: `openclaw setup --wizard --auth-choice zai-api-key`
- CLI: `openclaw onboard --auth-choice zai-api-key`
- Aliases: `z.ai/*` and `z-ai/*` normalize to `zai/*`
### Vercel AI Gateway
@@ -235,14 +235,14 @@ OpenClaw ships with the piai catalog. These providers require **no**
- Provider: `vercel-ai-gateway`
- Auth: `AI_GATEWAY_API_KEY`
- Example model: `vercel-ai-gateway/anthropic/claude-opus-4.6`
- CLI: `openclaw setup --wizard --auth-choice ai-gateway-api-key`
- CLI: `openclaw onboard --auth-choice ai-gateway-api-key`
### Kilo Gateway
- Provider: `kilocode`
- Auth: `KILOCODE_API_KEY`
- Example model: `kilocode/anthropic/claude-opus-4.6`
- CLI: `openclaw setup --wizard --kilocode-api-key <key>`
- CLI: `openclaw onboard --kilocode-api-key <key>`
- Base URL: `https://api.kilo.ai/api/gateway/`
- Expanded built-in catalog includes GLM-5 Free, MiniMax M2.5 Free, GPT-5.2, Gemini 3 Pro Preview, Gemini 3 Flash Preview, Grok Code Fast 1, and Kimi K2.5.
@@ -271,13 +271,13 @@ See [/providers/kilocode](/providers/kilocode) for setup details.
- xAI: `xai` (`XAI_API_KEY`)
- Mistral: `mistral` (`MISTRAL_API_KEY`)
- Example model: `mistral/mistral-large-latest`
- CLI: `openclaw setup --wizard --auth-choice mistral-api-key`
- CLI: `openclaw onboard --auth-choice mistral-api-key`
- Groq: `groq` (`GROQ_API_KEY`)
- Cerebras: `cerebras` (`CEREBRAS_API_KEY`)
- GLM models on Cerebras use ids `zai-glm-4.7` and `zai-glm-4.6`.
- OpenAI-compatible base URL: `https://api.cerebras.ai/v1`.
- GitHub Copilot: `github-copilot` (`COPILOT_GITHUB_TOKEN` / `GH_TOKEN` / `GITHUB_TOKEN`)
- Hugging Face Inference example model: `huggingface/deepseek-ai/DeepSeek-R1`; CLI: `openclaw setup --wizard --auth-choice huggingface-api-key`. See [Hugging Face (Inference)](/providers/huggingface).
- Hugging Face Inference example model: `huggingface/deepseek-ai/DeepSeek-R1`; CLI: `openclaw onboard --auth-choice huggingface-api-key`. See [Hugging Face (Inference)](/providers/huggingface).
## Providers via `models.providers` (custom/base URL)
@@ -367,7 +367,7 @@ Volcano Engine (火山引擎) provides access to Doubao and other models in Chin
- Provider: `volcengine` (coding: `volcengine-plan`)
- Auth: `VOLCANO_ENGINE_API_KEY`
- Example model: `volcengine/doubao-seed-1-8-251228`
- CLI: `openclaw setup --wizard --auth-choice volcengine-api-key`
- CLI: `openclaw onboard --auth-choice volcengine-api-key`
```json5
{
@@ -400,7 +400,7 @@ BytePlus ARK provides access to the same models as Volcano Engine for internatio
- Provider: `byteplus` (coding: `byteplus-plan`)
- Auth: `BYTEPLUS_API_KEY`
- Example model: `byteplus/seed-1-8-251228`
- CLI: `openclaw setup --wizard --auth-choice byteplus-api-key`
- CLI: `openclaw onboard --auth-choice byteplus-api-key`
```json5
{
@@ -431,7 +431,7 @@ Synthetic provides Anthropic-compatible models behind the `synthetic` provider:
- Provider: `synthetic`
- Auth: `SYNTHETIC_API_KEY`
- Example model: `synthetic/hf:MiniMaxAI/MiniMax-M2.5`
- CLI: `openclaw setup --wizard --auth-choice synthetic-api-key`
- CLI: `openclaw onboard --auth-choice synthetic-api-key`
```json5
{
@@ -485,7 +485,7 @@ ollama pull llama3.3
Ollama is detected locally at `http://127.0.0.1:11434` when you opt in with
`OLLAMA_API_KEY`, and the bundled provider plugin adds Ollama directly to
`openclaw setup --wizard` and the model picker. See [/providers/ollama](/providers/ollama)
`openclaw onboard` and the model picker. See [/providers/ollama](/providers/ollama)
for onboarding, cloud/local mode, and custom configuration.
### vLLM
@@ -595,7 +595,7 @@ Notes:
## CLI examples
```bash
openclaw setup --wizard --auth-choice opencode-zen
openclaw onboard --auth-choice opencode-zen
openclaw models set opencode/claude-opus-4-6
openclaw models list
```

View File

@@ -39,7 +39,7 @@ Related:
If you dont want to hand-edit config, run the setup wizard:
```bash
openclaw setup --wizard
openclaw onboard
```
It can set up model + auth for common providers, including **OpenAI Code (Codex)

View File

@@ -92,7 +92,7 @@ Flow shape:
2. paste the token into OpenClaw
3. store as a token auth profile (no refresh)
The wizard path is `openclaw setup --wizard` → auth choice `setup-token` (Anthropic).
The wizard path is `openclaw onboard` → auth choice `setup-token` (Anthropic).
### OpenAI Codex (ChatGPT OAuth)
@@ -107,7 +107,7 @@ Flow shape (PKCE):
5. exchange at `https://auth.openai.com/oauth/token`
6. extract `accountId` from the access token and store `{ access, refresh, expires, accountId }`
Wizard path is `openclaw setup --wizard` → auth choice `openai-codex`.
Wizard path is `openclaw onboard` → auth choice `openai-codex`.
## Refresh + expiry

View File

@@ -50,7 +50,7 @@ openclaw doctor
```
If youd rather not manage env vars yourself, the setup wizard can store
API keys for daemon use: `openclaw setup --wizard`.
API keys for daemon use: `openclaw onboard`.
See [Help](/help) for details on env inheritance (`env.shellEnv`,
`~/.openclaw/.env`, systemd/launchd).

View File

@@ -2182,7 +2182,7 @@ Use `cerebras/zai-glm-4.7` for Cerebras; `zai/glm-4.7` for Z.AI direct.
}
```
Set `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`). Use `opencode/...` refs for the Zen catalog or `opencode-go/...` refs for the Go catalog. Shortcut: `openclaw setup --wizard --auth-choice opencode-zen` or `openclaw setup --wizard --auth-choice opencode-go`.
Set `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`). Use `opencode/...` refs for the Zen catalog or `opencode-go/...` refs for the Go catalog. Shortcut: `openclaw onboard --auth-choice opencode-zen` or `openclaw onboard --auth-choice opencode-go`.
</Accordion>
@@ -2199,7 +2199,7 @@ Set `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`). Use `opencode/...` refs for
}
```
Set `ZAI_API_KEY`. `z.ai/*` and `z-ai/*` are accepted aliases. Shortcut: `openclaw setup --wizard --auth-choice zai-api-key`.
Set `ZAI_API_KEY`. `z.ai/*` and `z-ai/*` are accepted aliases. Shortcut: `openclaw onboard --auth-choice zai-api-key`.
- General endpoint: `https://api.z.ai/api/paas/v4`
- Coding endpoint (default): `https://api.z.ai/api/coding/paas/v4`
@@ -2242,7 +2242,7 @@ Set `ZAI_API_KEY`. `z.ai/*` and `z-ai/*` are accepted aliases. Shortcut: `opencl
}
```
For the China endpoint: `baseUrl: "https://api.moonshot.cn/v1"` or `openclaw setup --wizard --auth-choice moonshot-api-key-cn`.
For the China endpoint: `baseUrl: "https://api.moonshot.cn/v1"` or `openclaw onboard --auth-choice moonshot-api-key-cn`.
</Accordion>
@@ -2260,7 +2260,7 @@ For the China endpoint: `baseUrl: "https://api.moonshot.cn/v1"` or `openclaw set
}
```
Anthropic-compatible, built-in provider. Shortcut: `openclaw setup --wizard --auth-choice kimi-code-api-key`.
Anthropic-compatible, built-in provider. Shortcut: `openclaw onboard --auth-choice kimi-code-api-key`.
</Accordion>
@@ -2299,7 +2299,7 @@ Anthropic-compatible, built-in provider. Shortcut: `openclaw setup --wizard --au
}
```
Base URL should omit `/v1` (Anthropic client appends it). Shortcut: `openclaw setup --wizard --auth-choice synthetic-api-key`.
Base URL should omit `/v1` (Anthropic client appends it). Shortcut: `openclaw onboard --auth-choice synthetic-api-key`.
</Accordion>
@@ -2339,7 +2339,7 @@ Base URL should omit `/v1` (Anthropic client appends it). Shortcut: `openclaw se
}
```
Set `MINIMAX_API_KEY`. Shortcut: `openclaw setup --wizard --auth-choice minimax-api`.
Set `MINIMAX_API_KEY`. Shortcut: `openclaw onboard --auth-choice minimax-api`.
</Accordion>

View File

@@ -20,7 +20,7 @@ If the file is missing, OpenClaw uses safe defaults. Common reasons to add a con
See the [full reference](/gateway/configuration-reference) for every available field.
<Tip>
**New to configuration?** Start with `openclaw setup --wizard` for interactive setup, or check out the [Configuration Examples](/gateway/configuration-examples) guide for complete copy-paste configs.
**New to configuration?** Start with `openclaw onboard` for interactive setup, or check out the [Configuration Examples](/gateway/configuration-examples) guide for complete copy-paste configs.
</Tip>
## Minimal config
@@ -38,7 +38,7 @@ See the [full reference](/gateway/configuration-reference) for every available f
<Tabs>
<Tab title="Interactive wizard">
```bash
openclaw setup --wizard # full setup wizard
openclaw onboard # full setup wizard
openclaw configure # config wizard
```
</Tab>

View File

@@ -11,7 +11,7 @@ title: "Local Models"
Local is doable, but OpenClaw expects large context + strong defenses against prompt injection. Small cards truncate context and leak safety. Aim high: **≥2 maxed-out Mac Studios or equivalent GPU rig (~$30k+)**. A single **24 GB** GPU works only for lighter prompts with higher latency. Use the **largest / full-size model variant you can run**; aggressively quantized or “small” checkpoints raise prompt-injection risk (see [Security](/gateway/security)).
If you want the lowest-friction local setup, start with [Ollama](/providers/ollama) and `openclaw setup --wizard`. This page is the opinionated guide for higher-end local stacks and custom OpenAI-compatible local servers.
If you want the lowest-friction local setup, start with [Ollama](/providers/ollama) and `openclaw onboard`. This page is the opinionated guide for higher-end local stacks and custom OpenAI-compatible local servers.
## Recommended: LM Studio + MiniMax M2.5 (Responses API, full-size)

View File

@@ -59,7 +59,7 @@ Port spacing: leave at least 20 ports between base ports so the derived browser/
```bash
# Main bot (existing or fresh, without --profile param)
# Runs on port 18789 + Chrome CDC/Canvas/... Ports
openclaw setup --wizard
openclaw onboard
openclaw gateway install
# Rescue bot (isolated profile + ports)

View File

@@ -321,7 +321,7 @@ The repo recommends running from source and using the setup wizard:
```bash
curl -fsSL https://openclaw.ai/install.sh | bash
openclaw setup --wizard --install-daemon
openclaw onboard --install-daemon
```
The wizard can also build UI assets automatically. After onboarding, you typically run the Gateway on port **18789**.
@@ -334,10 +334,10 @@ cd openclaw
pnpm install
pnpm build
pnpm ui:build # auto-installs UI deps on first run
openclaw setup --wizard
openclaw onboard
```
If you don't have a global install yet, run it via `pnpm openclaw setup --wizard`.
If you don't have a global install yet, run it via `pnpm openclaw onboard`.
### How do I open the dashboard after onboarding
@@ -687,7 +687,7 @@ Docs: [Update](/cli/update), [Updating](/install/updating).
### What does the setup wizard actually do
`openclaw setup --wizard` is the recommended setup path. In **local mode** it walks you through:
`openclaw onboard` is the recommended setup path. In **local mode** it walks you through:
- **Model/auth setup** (provider OAuth/setup-token flows and API keys supported, plus local model options such as LM Studio)
- **Workspace** location + bootstrap files
@@ -1904,7 +1904,7 @@ openclaw reset --scope full --yes --non-interactive
Then re-run setup:
```bash
openclaw setup --wizard --install-daemon
openclaw onboard --install-daemon
```
Notes:
@@ -2092,7 +2092,7 @@ Quickest setup:
1. Install Ollama from `https://ollama.com/download`
2. Pull a local model such as `ollama pull glm-4.7-flash`
3. If you want Ollama Cloud too, run `ollama signin`
4. Run `openclaw setup --wizard` and choose `Ollama`
4. Run `openclaw onboard` and choose `Ollama`
5. Pick `Local` or `Cloud + Local`
Notes:

View File

@@ -34,7 +34,7 @@ title: "OpenClaw"
Install OpenClaw and bring up the Gateway in minutes.
</Card>
<Card title="Run the Wizard" href="/start/wizard" icon="sparkles">
Guided setup with `openclaw setup --wizard` and pairing flows.
Guided setup with `openclaw onboard` and pairing flows.
</Card>
<Card title="Open the Control UI" href="/web/control-ui" icon="layout-dashboard">
Launch the browser dashboard for chat, config, and sessions.
@@ -103,7 +103,7 @@ The Gateway is the single source of truth for sessions, routing, and channel con
</Step>
<Step title="Onboard and install the service">
```bash
openclaw setup --wizard --install-daemon
openclaw onboard --install-daemon
```
</Step>
<Step title="Pair WhatsApp and start the Gateway">

View File

@@ -31,7 +31,7 @@ Shelley, [exe.dev](https://exe.dev)'s agent, can install OpenClaw instantly with
prompt. The prompt used is as below:
```
Set up OpenClaw (https://docs.openclaw.ai/install) on this VM. Use the non-interactive and accept-risk flags for openclaw setup --wizarding. Add the supplied auth or token as needed. Configure nginx to forward from the default port 18789 to the root location on the default enabled site config, making sure to enable Websocket support. Pairing is done by "openclaw devices list" and "openclaw devices approve <request id>". Make sure the dashboard shows that OpenClaw's health is OK. exe.dev handles forwarding from port 8000 to port 80/443 and HTTPS for us, so the final "reachable" should be <vm-name>.exe.xyz, without port specification.
Set up OpenClaw (https://docs.openclaw.ai/install) on this VM. Use the non-interactive and accept-risk flags for openclaw onboarding. Add the supplied auth or token as needed. Configure nginx to forward from the default port 18789 to the root location on the default enabled site config, making sure to enable Websocket support. Pairing is done by "openclaw devices list" and "openclaw devices approve <request id>". Make sure the dashboard shows that OpenClaw's health is OK. exe.dev handles forwarding from port 8000 to port 80/443 and HTTPS for us, so the final "reachable" should be <vm-name>.exe.xyz, without port specification.
```
## Manual installation

View File

@@ -76,7 +76,7 @@ For VPS/cloud hosts, avoid third-party "1-click" marketplace images when possibl
<Tab title="npm">
```bash
npm install -g openclaw@latest
openclaw setup --wizard --install-daemon
openclaw onboard --install-daemon
```
<Accordion title="sharp build errors?">
@@ -93,7 +93,7 @@ For VPS/cloud hosts, avoid third-party "1-click" marketplace images when possibl
```bash
pnpm add -g openclaw@latest
pnpm approve-builds -g # approve openclaw, node-llama-cpp, sharp, etc.
openclaw setup --wizard --install-daemon
openclaw onboard --install-daemon
```
<Note>
@@ -140,7 +140,7 @@ For VPS/cloud hosts, avoid third-party "1-click" marketplace images when possibl
</Step>
<Step title="Run onboarding">
```bash
openclaw setup --wizard --install-daemon
openclaw onboard --install-daemon
```
</Step>
</Steps>

View File

@@ -224,7 +224,7 @@ Designed for environments where you want everything under a local prefix (defaul
| `--version <ver>` | OpenClaw version or dist-tag (default: `latest`) |
| `--node-version <ver>` | Node version (default: `22.22.0`) |
| `--json` | Emit NDJSON events |
| `--onboard` | Run `openclaw setup --wizard` after install |
| `--onboard` | Run `openclaw onboard` after install |
| `--no-onboard` | Skip onboarding (default) |
| `--set-npm-prefix` | On Linux, force npm prefix to `~/.npm-global` if current prefix is not writable |
| `--help` | Show usage (`-h`) |

View File

@@ -138,7 +138,7 @@ Inside the VM:
```bash
npm install -g openclaw@latest
openclaw setup --wizard --install-daemon
openclaw onboard --install-daemon
```
Follow the onboarding prompts to set up your model provider (Anthropic, OpenAI, etc.).

View File

@@ -80,7 +80,7 @@ openclaw --version
## 4) Run Onboarding
```bash
openclaw setup --wizard --install-daemon
openclaw onboard --install-daemon
```
The wizard will walk you through:

View File

@@ -42,7 +42,7 @@ Native companion apps for Windows are also planned; the Gateway is recommended v
Use one of these (all supported):
- Wizard (recommended): `openclaw setup --wizard --install-daemon`
- Wizard (recommended): `openclaw onboard --install-daemon`
- Direct: `openclaw gateway install`
- Configure flow: `openclaw configure` → select **Gateway service**
- Repair/migrate: `openclaw doctor` (offers to install or fix the service)

View File

@@ -17,7 +17,7 @@ Native Linux companion apps are planned. Contributions are welcome if you want t
1. Install Node 24 (recommended; Node 22 LTS, currently `22.16+`, still works for compatibility)
2. `npm i -g openclaw@latest`
3. `openclaw setup --wizard --install-daemon`
3. `openclaw onboard --install-daemon`
4. From your laptop: `ssh -N -L 18789:127.0.0.1:18789 <user>@<host>`
5. Open `http://127.0.0.1:18789/` and paste your token
@@ -39,7 +39,7 @@ Step-by-step VPS guide: [exe.dev](/install/exe-dev)
Use one of these:
```
openclaw setup --wizard --install-daemon
openclaw onboard --install-daemon
```
Or:

View File

@@ -130,7 +130,7 @@ The hackable install gives you direct access to logs and code — useful for deb
## 7) Run Onboarding
```bash
openclaw setup --wizard --install-daemon
openclaw onboard --install-daemon
```
Follow the wizard:

View File

@@ -38,8 +38,8 @@ openclaw agent --local --agent main --thinking low -m "Reply with exactly WINDOW
Current caveats:
- `openclaw setup --wizard --non-interactive` still expects a reachable local gateway unless you pass `--skip-health`
- `openclaw setup --wizard --non-interactive --install-daemon` and `openclaw gateway install` try Windows Scheduled Tasks first
- `openclaw onboard --non-interactive` still expects a reachable local gateway unless you pass `--skip-health`
- `openclaw onboard --non-interactive --install-daemon` and `openclaw gateway install` try Windows Scheduled Tasks first
- if Scheduled Task creation is denied, OpenClaw falls back to a per-user Startup-folder login item and starts the gateway immediately
- if `schtasks` itself wedges or stops responding, OpenClaw now aborts that path quickly and falls back instead of hanging forever
- Scheduled Tasks are still preferred when available because they provide better supervisor status
@@ -47,7 +47,7 @@ Current caveats:
If you want the native CLI only, without gateway service install, use one of these:
```powershell
openclaw setup --wizard --non-interactive --skip-health
openclaw onboard --non-interactive --skip-health
openclaw gateway run
```
@@ -70,7 +70,7 @@ If Scheduled Task creation is blocked, the fallback service mode still auto-star
Inside WSL2:
```
openclaw setup --wizard --install-daemon
openclaw onboard --install-daemon
```
Or:
@@ -230,7 +230,7 @@ cd openclaw
pnpm install
pnpm ui:build # auto-installs UI deps on first run
pnpm build
openclaw setup --wizard
openclaw onboard
```
Full guide: [Getting Started](/start/getting-started)

View File

@@ -19,11 +19,11 @@ Create your API key in the Anthropic Console.
### CLI setup
```bash
openclaw setup --wizard
openclaw onboard
# choose: Anthropic API key
# or non-interactive
openclaw setup --wizard --anthropic-api-key "$ANTHROPIC_API_KEY"
openclaw onboard --anthropic-api-key "$ANTHROPIC_API_KEY"
```
### Config snippet
@@ -214,7 +214,7 @@ openclaw models auth paste-token --provider anthropic
```bash
# Paste a setup-token during setup
openclaw setup --wizard --auth-choice setup-token
openclaw onboard --auth-choice setup-token
```
### Config snippet (setup-token)

View File

@@ -22,7 +22,7 @@ For Anthropic models, use your Anthropic API key.
1. Set the provider API key and Gateway details:
```bash
openclaw setup --wizard --auth-choice cloudflare-ai-gateway-api-key
openclaw onboard --auth-choice cloudflare-ai-gateway-api-key
```
2. Set a default model:
@@ -40,7 +40,7 @@ openclaw setup --wizard --auth-choice cloudflare-ai-gateway-api-key
## Non-interactive example
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice cloudflare-ai-gateway-api-key \
--cloudflare-ai-gateway-account-id "your-account-id" \

View File

@@ -15,16 +15,16 @@ models are accessed via the `zai` provider and model IDs like `zai/glm-5`.
```bash
# Coding Plan Global, recommended for Coding Plan users
openclaw setup --wizard --auth-choice zai-coding-global
openclaw onboard --auth-choice zai-coding-global
# Coding Plan CN (China region), recommended for Coding Plan users
openclaw setup --wizard --auth-choice zai-coding-cn
openclaw onboard --auth-choice zai-coding-cn
# General API
openclaw setup --wizard --auth-choice zai-global
openclaw onboard --auth-choice zai-global
# General API CN (China region)
openclaw setup --wizard --auth-choice zai-cn
openclaw onboard --auth-choice zai-cn
```
## Config snippet

View File

@@ -21,7 +21,7 @@ title: "Hugging Face (Inference)"
2. Run onboarding and choose **Hugging Face** in the provider dropdown, then enter your API key when prompted:
```bash
openclaw setup --wizard --auth-choice huggingface-api-key
openclaw onboard --auth-choice huggingface-api-key
```
3. In the **Default Hugging Face model** dropdown, pick the model you want (the list is loaded from the Inference API when you have a valid token; otherwise a built-in list is shown). Your choice is saved as the default model.
@@ -40,7 +40,7 @@ openclaw setup --wizard --auth-choice huggingface-api-key
## Non-interactive example
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice huggingface-api-key \
--huggingface-api-key "$HF_TOKEN"

View File

@@ -15,7 +15,7 @@ Looking for chat channel docs (WhatsApp/Telegram/Discord/Slack/Mattermost (plugi
## Quick start
1. Authenticate with the provider (usually via `openclaw setup --wizard`).
1. Authenticate with the provider (usually via `openclaw onboard`).
2. Set the default model:
```json5

View File

@@ -19,7 +19,7 @@ endpoint and API key. It is OpenAI-compatible, so most OpenAI SDKs work by switc
## CLI setup
```bash
openclaw setup --wizard --kilocode-api-key <key>
openclaw onboard --kilocode-api-key <key>
```
Or set the environment variable:

View File

@@ -22,7 +22,7 @@ read_when:
### Via onboarding
```bash
openclaw setup --wizard --auth-choice litellm-api-key
openclaw onboard --auth-choice litellm-api-key
```
### Manual setup

View File

@@ -44,7 +44,7 @@ Enable the bundled OAuth plugin and authenticate:
```bash
openclaw plugins enable minimax # skip if already loaded.
openclaw gateway restart # restart if gateway is already running
openclaw setup --wizard --auth-choice minimax-portal
openclaw onboard --auth-choice minimax-portal
```
You will be prompted to select an endpoint:

View File

@@ -15,9 +15,9 @@ Mistral can also be used for memory embeddings (`memorySearch.provider = "mistra
## CLI setup
```bash
openclaw setup --wizard --auth-choice mistral-api-key
openclaw onboard --auth-choice mistral-api-key
# or non-interactive
openclaw setup --wizard --mistral-api-key "$MISTRAL_API_KEY"
openclaw onboard --mistral-api-key "$MISTRAL_API_KEY"
```
## Config snippet (LLM provider)

View File

@@ -13,7 +13,7 @@ model as `provider/model`.
## Quick start (two steps)
1. Authenticate with the provider (usually via `openclaw setup --wizard`).
1. Authenticate with the provider (usually via `openclaw onboard`).
2. Set the default model:
```json5

View File

@@ -26,13 +26,13 @@ Current Kimi K2 model IDs:
[//]: # "moonshot-kimi-k2-ids:end"
```bash
openclaw setup --wizard --auth-choice moonshot-api-key
openclaw onboard --auth-choice moonshot-api-key
```
Kimi Coding:
```bash
openclaw setup --wizard --auth-choice kimi-code-api-key
openclaw onboard --auth-choice kimi-code-api-key
```
Note: Moonshot and Kimi Coding are separate providers. Keys are not interchangeable, endpoints differ, and model refs differ (Moonshot uses `moonshot/...`, Kimi Coding uses `kimi-coding/...`).

View File

@@ -16,7 +16,7 @@ Export the key once, then run onboarding and set an NVIDIA model:
```bash
export NVIDIA_API_KEY="nvapi-..."
openclaw setup --wizard --auth-choice skip
openclaw onboard --auth-choice skip
openclaw models set nvidia/nvidia/llama-3.1-nemotron-70b-instruct
```

View File

@@ -21,7 +21,7 @@ Ollama is a local LLM runtime that makes it easy to run open-source models on yo
The fastest way to set up Ollama is through the setup wizard:
```bash
openclaw setup --wizard
openclaw onboard
```
Select **Ollama** from the provider list. The wizard will:
@@ -35,7 +35,7 @@ Select **Ollama** from the provider list. The wizard will:
Non-interactive mode is also supported:
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--auth-choice ollama \
--accept-risk
```
@@ -43,7 +43,7 @@ openclaw setup --wizard --non-interactive \
Optionally specify a custom base URL or model:
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--auth-choice ollama \
--custom-base-url "http://ollama-host:11434" \
--custom-model-id "qwen3.5:27b" \
@@ -73,7 +73,7 @@ ollama signin
4. Run onboarding and choose `Ollama`:
```bash
openclaw setup --wizard
openclaw onboard
```
- `Local`: local models only

View File

@@ -20,9 +20,9 @@ Get your API key from the OpenAI dashboard.
### CLI setup
```bash
openclaw setup --wizard --auth-choice openai-api-key
openclaw onboard --auth-choice openai-api-key
# or non-interactive
openclaw setup --wizard --openai-api-key "$OPENAI_API_KEY"
openclaw onboard --openai-api-key "$OPENAI_API_KEY"
```
### Config snippet
@@ -52,7 +52,7 @@ Codex cloud requires ChatGPT sign-in, while the Codex CLI supports ChatGPT or AP
```bash
# Run Codex OAuth in the wizard
openclaw setup --wizard --auth-choice openai-codex
openclaw onboard --auth-choice openai-codex
# Or run OAuth directly
openclaw models auth login --provider openai-codex

View File

@@ -21,9 +21,9 @@ provider id `opencode-go` so upstream per-model routing stays correct.
## CLI setup
```bash
openclaw setup --wizard --auth-choice opencode-go
openclaw onboard --auth-choice opencode-go
# or non-interactive
openclaw setup --wizard --opencode-go-api-key "$OPENCODE_API_KEY"
openclaw onboard --opencode-go-api-key "$OPENCODE_API_KEY"
```
## Config snippet

View File

@@ -22,15 +22,15 @@ as one OpenCode setup.
### Zen catalog
```bash
openclaw setup --wizard --auth-choice opencode-zen
openclaw setup --wizard --opencode-zen-api-key "$OPENCODE_API_KEY"
openclaw onboard --auth-choice opencode-zen
openclaw onboard --opencode-zen-api-key "$OPENCODE_API_KEY"
```
### Go catalog
```bash
openclaw setup --wizard --auth-choice opencode-go
openclaw setup --wizard --opencode-go-api-key "$OPENCODE_API_KEY"
openclaw onboard --auth-choice opencode-go
openclaw onboard --opencode-go-api-key "$OPENCODE_API_KEY"
```
## Config snippet

View File

@@ -14,7 +14,7 @@ endpoint and API key. It is OpenAI-compatible, so most OpenAI SDKs work by switc
## CLI setup
```bash
openclaw setup --wizard --auth-choice apiKey --token-provider openrouter --token "$OPENROUTER_API_KEY"
openclaw onboard --auth-choice apiKey --token-provider openrouter --token "$OPENROUTER_API_KEY"
```
## Config snippet

View File

@@ -27,7 +27,7 @@ endpoint and API key. It is OpenAI-compatible, so most OpenAI SDKs work by switc
## CLI setup
```bash
openclaw setup --wizard --auth-choice qianfan-api-key
openclaw onboard --auth-choice qianfan-api-key
```
## Related Documentation

View File

@@ -33,7 +33,7 @@ export SGLANG_API_KEY="sglang-local"
3. Run onboarding and choose `SGLang`, or set a model directly:
```bash
openclaw setup --wizard
openclaw onboard
```
```json5

View File

@@ -17,7 +17,7 @@ Synthetic exposes Anthropic-compatible endpoints. OpenClaw registers it as the
2. Run onboarding:
```bash
openclaw setup --wizard --auth-choice synthetic-api-key
openclaw onboard --auth-choice synthetic-api-key
```
The default model is set to:

View File

@@ -18,7 +18,7 @@ The [Together AI](https://together.ai) provides access to leading open-source mo
1. Set the API key (recommended: store it for the Gateway):
```bash
openclaw setup --wizard --auth-choice together-api-key
openclaw onboard --auth-choice together-api-key
```
2. Set a default model:
@@ -36,7 +36,7 @@ openclaw setup --wizard --auth-choice together-api-key
## Non-interactive example
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice together-api-key \
--together-api-key "$TOGETHER_API_KEY"

View File

@@ -58,7 +58,7 @@ export VENICE_API_KEY="vapi_xxxxxxxxxxxx"
**Option B: Interactive Setup (Recommended)**
```bash
openclaw setup --wizard --auth-choice venice-api-key
openclaw onboard --auth-choice venice-api-key
```
This will:
@@ -71,7 +71,7 @@ This will:
**Option C: Non-interactive**
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--auth-choice venice-api-key \
--venice-api-key "vapi_xxxxxxxxxxxx"
```

View File

@@ -21,7 +21,7 @@ The [Vercel AI Gateway](https://vercel.com/ai-gateway) provides a unified API to
1. Set the API key (recommended: store it for the Gateway):
```bash
openclaw setup --wizard --auth-choice ai-gateway-api-key
openclaw onboard --auth-choice ai-gateway-api-key
```
2. Set a default model:
@@ -39,7 +39,7 @@ openclaw setup --wizard --auth-choice ai-gateway-api-key
## Non-interactive example
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice ai-gateway-api-key \
--ai-gateway-api-key "$AI_GATEWAY_API_KEY"

View File

@@ -22,9 +22,9 @@ the `xiaomi` provider with a Xiaomi MiMo API key.
## CLI setup
```bash
openclaw setup --wizard --auth-choice xiaomi-api-key
openclaw onboard --auth-choice xiaomi-api-key
# or non-interactive
openclaw setup --wizard --auth-choice xiaomi-api-key --xiaomi-api-key "$XIAOMI_API_KEY"
openclaw onboard --auth-choice xiaomi-api-key --xiaomi-api-key "$XIAOMI_API_KEY"
```
## Config snippet

View File

@@ -16,16 +16,16 @@ with a Z.AI API key.
```bash
# Coding Plan Global, recommended for Coding Plan users
openclaw setup --wizard --auth-choice zai-coding-global
openclaw onboard --auth-choice zai-coding-global
# Coding Plan CN (China region), recommended for Coding Plan users
openclaw setup --wizard --auth-choice zai-coding-cn
openclaw onboard --auth-choice zai-coding-cn
# General API
openclaw setup --wizard --auth-choice zai-global
openclaw onboard --auth-choice zai-global
# General API CN (China region)
openclaw setup --wizard --auth-choice zai-cn
openclaw onboard --auth-choice zai-cn
```
## Config snippet

View File

@@ -2,7 +2,7 @@
summary: "Full reference for the CLI setup wizard: every step, flag, and config field"
read_when:
- Looking up a specific wizard step or flag
- Automating setup with non-interactive mode
- Automating onboarding with non-interactive mode
- Debugging wizard behavior
title: "Setup Wizard Reference"
sidebarTitle: "Wizard Reference"
@@ -10,7 +10,7 @@ sidebarTitle: "Wizard Reference"
# Setup Wizard Reference
This is the full reference for the `openclaw setup --wizard` CLI wizard.
This is the full reference for the `openclaw onboard` CLI wizard.
For a high-level overview, see [Setup Wizard](/start/wizard).
## Flow details (local mode)
@@ -76,11 +76,11 @@ For a high-level overview, see [Setup Wizard](/start/wizard).
- In token mode, interactive setup offers:
- **Generate/store plaintext token** (default)
- **Use SecretRef** (opt-in)
- Quickstart reuses existing `gateway.auth.token` SecretRefs across `env`, `file`, and `exec` providers for setup probe/dashboard bootstrap.
- If that SecretRef is configured but cannot be resolved, setup fails early with a clear fix message instead of silently degrading runtime auth.
- Quickstart reuses existing `gateway.auth.token` SecretRefs across `env`, `file`, and `exec` providers for onboarding probe/dashboard bootstrap.
- If that SecretRef is configured but cannot be resolved, onboarding fails early with a clear fix message instead of silently degrading runtime auth.
- In password mode, interactive setup also supports plaintext or SecretRef storage.
- Non-interactive token SecretRef path: `--gateway-token-ref-env <ENV_VAR>`.
- Requires a non-empty env var in the setup process environment.
- Requires a non-empty env var in the onboarding process environment.
- Cannot be combined with `--gateway-token`.
- Disable auth only if you fully trust every local process.
- Nonloopback binds still require auth.
@@ -137,7 +137,7 @@ If the Control UI assets are missing, the wizard attempts to build them; fallbac
Use `--non-interactive` to automate or script onboarding:
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice apiKey \
--anthropic-api-key "$ANTHROPIC_API_KEY" \
@@ -154,7 +154,7 @@ Gateway token SecretRef in non-interactive mode:
```bash
export OPENCLAW_GATEWAY_TOKEN="your-token"
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice skip \
--gateway-auth token \

View File

@@ -54,7 +54,7 @@ Check your Node version with `node --version` if you are unsure.
</Step>
<Step title="Run the setup wizard">
```bash
openclaw setup --wizard --install-daemon
openclaw onboard --install-daemon
```
The wizard configures auth, gateway settings, and optional channels.

View File

@@ -1,18 +1,18 @@
---
summary: "Overview of OpenClaw setup options and flows"
summary: "Overview of OpenClaw onboarding options and flows"
read_when:
- Choosing a setup path
- Choosing an onboarding path
- Setting up a new environment
title: "Setup Overview"
sidebarTitle: "Setup Overview"
title: "Onboarding Overview"
sidebarTitle: "Onboarding Overview"
---
# Setup Overview
# Onboarding Overview
OpenClaw supports multiple setup paths depending on where the Gateway runs
OpenClaw supports multiple onboarding paths depending on where the Gateway runs
and how you prefer to configure providers.
## Choose your setup path
## Choose your onboarding path
- **CLI wizard** for macOS, Linux, and Windows (via WSL2).
- **macOS app** for a guided first run on Apple silicon or Intel Macs.
@@ -22,14 +22,14 @@ and how you prefer to configure providers.
Run the wizard in a terminal:
```bash
openclaw setup --wizard
openclaw onboard
```
Use the CLI wizard when you want full control of the Gateway, workspace,
channels, and skills. Docs:
- [Setup Wizard (CLI)](/start/wizard)
- [`openclaw setup --wizard` command](/cli/setup)
- [`openclaw onboard` command](/cli/onboard)
## macOS app onboarding
@@ -48,4 +48,4 @@ CLI wizard. You will be asked to:
- Provide a model ID and optional alias.
- Choose an Endpoint ID so multiple custom endpoints can coexist.
For detailed steps, follow the CLI setup docs above.
For detailed steps, follow the CLI onboarding docs above.

View File

@@ -1,7 +1,7 @@
---
summary: "Scripted setup wizard and agent setup for the OpenClaw CLI"
summary: "Scripted onboarding and agent setup for the OpenClaw CLI"
read_when:
- You are automating setup in scripts or CI
- You are automating onboarding in scripts or CI
- You need non-interactive examples for specific providers
title: "CLI Automation"
sidebarTitle: "CLI automation"
@@ -9,7 +9,7 @@ sidebarTitle: "CLI automation"
# CLI Automation
Use `--non-interactive` to automate `openclaw setup --wizard`.
Use `--non-interactive` to automate `openclaw onboard`.
<Note>
`--json` does not imply non-interactive mode. Use `--non-interactive` (and `--workspace`) for scripts.
@@ -18,7 +18,7 @@ Use `--non-interactive` to automate `openclaw setup --wizard`.
## Baseline non-interactive example
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice apiKey \
--anthropic-api-key "$ANTHROPIC_API_KEY" \
@@ -41,7 +41,7 @@ Passing inline key flags without the matching env var now fails fast.
Example:
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice openai-api-key \
--secret-input-mode ref \
@@ -53,7 +53,7 @@ openclaw setup --wizard --non-interactive \
<AccordionGroup>
<Accordion title="Gemini example">
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice gemini-api-key \
--gemini-api-key "$GEMINI_API_KEY" \
@@ -63,7 +63,7 @@ openclaw setup --wizard --non-interactive \
</Accordion>
<Accordion title="Z.AI example">
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice zai-api-key \
--zai-api-key "$ZAI_API_KEY" \
@@ -73,7 +73,7 @@ openclaw setup --wizard --non-interactive \
</Accordion>
<Accordion title="Vercel AI Gateway example">
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice ai-gateway-api-key \
--ai-gateway-api-key "$AI_GATEWAY_API_KEY" \
@@ -83,7 +83,7 @@ openclaw setup --wizard --non-interactive \
</Accordion>
<Accordion title="Cloudflare AI Gateway example">
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice cloudflare-ai-gateway-api-key \
--cloudflare-ai-gateway-account-id "your-account-id" \
@@ -95,7 +95,7 @@ openclaw setup --wizard --non-interactive \
</Accordion>
<Accordion title="Moonshot example">
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice moonshot-api-key \
--moonshot-api-key "$MOONSHOT_API_KEY" \
@@ -105,7 +105,7 @@ openclaw setup --wizard --non-interactive \
</Accordion>
<Accordion title="Mistral example">
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice mistral-api-key \
--mistral-api-key "$MISTRAL_API_KEY" \
@@ -115,7 +115,7 @@ openclaw setup --wizard --non-interactive \
</Accordion>
<Accordion title="Synthetic example">
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice synthetic-api-key \
--synthetic-api-key "$SYNTHETIC_API_KEY" \
@@ -125,7 +125,7 @@ openclaw setup --wizard --non-interactive \
</Accordion>
<Accordion title="OpenCode example">
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice opencode-zen \
--opencode-zen-api-key "$OPENCODE_API_KEY" \
@@ -136,7 +136,7 @@ openclaw setup --wizard --non-interactive \
</Accordion>
<Accordion title="Ollama example">
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice ollama \
--custom-model-id "qwen3.5:27b" \
@@ -147,7 +147,7 @@ openclaw setup --wizard --non-interactive \
</Accordion>
<Accordion title="Custom provider example">
```bash
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice custom-api-key \
--custom-base-url "https://llm.example.com/v1" \
@@ -165,7 +165,7 @@ openclaw setup --wizard --non-interactive \
```bash
export CUSTOM_API_KEY="your-key"
openclaw setup --wizard --non-interactive \
openclaw onboard --non-interactive \
--mode local \
--auth-choice custom-api-key \
--custom-base-url "https://llm.example.com/v1" \
@@ -212,4 +212,4 @@ Notes:
- Onboarding hub: [Setup Wizard (CLI)](/start/wizard)
- Full reference: [CLI Setup Reference](/start/wizard-cli-reference)
- Command reference: [`openclaw setup --wizard`](/cli/setup)
- Command reference: [`openclaw onboard`](/cli/onboard)

View File

@@ -1,15 +1,15 @@
---
summary: "Complete reference for CLI setup flow, auth/model setup, outputs, and internals"
read_when:
- You need detailed behavior for `openclaw setup --wizard`
- You are debugging setup results or integrating setup clients
- You need detailed behavior for openclaw onboard
- You are debugging onboarding results or integrating onboarding clients
title: "CLI Setup Reference"
sidebarTitle: "CLI reference"
---
# CLI Setup Reference
This page is the full reference for `openclaw setup --wizard`.
This page is the full reference for `openclaw onboard`.
For the short guide, see [Setup Wizard (CLI)](/start/wizard).
## What the wizard does
@@ -56,7 +56,7 @@ It does not install or modify anything on the remote host.
- **Use SecretRef** (opt-in)
- In password mode, interactive setup also supports plaintext or SecretRef storage.
- Non-interactive token SecretRef path: `--gateway-token-ref-env <ENV_VAR>`.
- Requires a non-empty env var in the setup process environment.
- Requires a non-empty env var in the onboarding process environment.
- Cannot be combined with `--gateway-token`.
- Disable auth only if you fully trust every local process.
- Non-loopback binds still require auth.
@@ -220,20 +220,20 @@ Credential and profile paths:
Credential storage mode:
- Default setup behavior persists API keys as plaintext values in auth profiles.
- Default onboarding behavior persists API keys as plaintext values in auth profiles.
- `--secret-input-mode ref` enables reference mode instead of plaintext key storage.
In interactive setup, you can choose either:
- environment variable ref (for example `keyRef: { source: "env", provider: "default", id: "OPENAI_API_KEY" }`)
- configured provider ref (`file` or `exec`) with provider alias + id
- Interactive reference mode runs a fast preflight validation before saving.
- Env refs: validates variable name + non-empty value in the current setup environment.
- Env refs: validates variable name + non-empty value in the current onboarding environment.
- Provider refs: validates provider config and resolves the requested id.
- If preflight fails, setup shows the error and lets you retry.
- If preflight fails, onboarding shows the error and lets you retry.
- In non-interactive mode, `--secret-input-mode ref` is env-backed only.
- Set the provider env var in the setup process environment.
- Inline key flags (for example `--openai-api-key`) require that env var to be set; otherwise setup fails fast.
- Set the provider env var in the onboarding process environment.
- Inline key flags (for example `--openai-api-key`) require that env var to be set; otherwise onboarding fails fast.
- For custom providers, non-interactive `ref` mode stores `models.providers.<id>.apiKey` as `{ source: "env", provider: "default", id: "CUSTOM_API_KEY" }`.
- In that custom-provider case, `--custom-api-key` requires `CUSTOM_API_KEY` to be set; otherwise setup fails fast.
- In that custom-provider case, `--custom-api-key` requires `CUSTOM_API_KEY` to be set; otherwise onboarding fails fast.
- Gateway auth credentials support plaintext and SecretRef choices in interactive setup:
- Token mode: **Generate/store plaintext token** (default) or **Use SecretRef**.
- Password mode: plaintext or SecretRef.
@@ -252,9 +252,9 @@ Typical fields in `~/.openclaw/openclaw.json`:
- `agents.defaults.workspace`
- `agents.defaults.model` / `models.providers` (if Minimax chosen)
- `tools.profile` (local setup defaults to `"coding"` when unset; existing explicit values are preserved)
- `tools.profile` (local onboarding defaults to `"coding"` when unset; existing explicit values are preserved)
- `gateway.*` (mode, bind, auth, tailscale)
- `session.dmScope` (local setup defaults this to `per-channel-peer` when unset; existing explicit values are preserved)
- `session.dmScope` (local onboarding defaults this to `per-channel-peer` when unset; existing explicit values are preserved)
- `channels.telegram.botToken`, `channels.discord.token`, `channels.signal.*`, `channels.imessage.*`
- Channel allowlists (Slack, Discord, Matrix, Microsoft Teams) when you opt in during prompts (names resolve to IDs when possible)
- `skills.install.nodeManager`
@@ -296,4 +296,4 @@ Signal setup behavior:
- Onboarding hub: [Setup Wizard (CLI)](/start/wizard)
- Automation and scripts: [CLI Automation](/start/wizard-cli-automation)
- Command reference: [`openclaw setup --wizard`](/cli/setup)
- Command reference: [`openclaw onboard`](/cli/onboard)

View File

@@ -4,7 +4,7 @@ read_when:
- Running or configuring the setup wizard
- Setting up a new machine
title: "Setup Wizard (CLI)"
sidebarTitle: "Setup: CLI"
sidebarTitle: "Onboarding: CLI"
---
# Setup Wizard (CLI)
@@ -15,7 +15,7 @@ It configures a local Gateway or a remote Gateway connection, plus channels, ski
and workspace defaults in one guided flow.
```bash
openclaw setup --wizard
openclaw onboard
```
<Info>
@@ -52,7 +52,7 @@ The wizard starts with **QuickStart** (defaults) vs **Advanced** (full control).
- Gateway port **18789**
- Gateway auth **Token** (autogenerated, even on loopback)
- Tool policy default for new local setups: `tools.profile: "coding"` (existing explicit profile is preserved)
- DM isolation default: local setup writes `session.dmScope: "per-channel-peer"` when unset. Details: [CLI Setup Reference](/start/wizard-cli-reference#outputs-and-internals)
- DM isolation default: local onboarding writes `session.dmScope: "per-channel-peer"` when unset. Details: [CLI Setup Reference](/start/wizard-cli-reference#outputs-and-internals)
- Tailscale exposure **Off**
- Telegram + WhatsApp DMs default to **allowlist** (you'll be prompted for your phone number)
</Tab>
@@ -119,7 +119,7 @@ For the deeper technical reference, including RPC details, see
## Related docs
- CLI command reference: [`openclaw setup`](/cli/setup)
- Setup overview: [Setup Overview](/start/onboarding-overview)
- CLI command reference: [`openclaw onboard`](/cli/onboard)
- Onboarding overview: [Onboarding Overview](/start/onboarding-overview)
- macOS app onboarding: [Onboarding](/start/onboarding)
- Agent first-run ritual: [Agent Bootstrapping](/start/bootstrapping)

View File

@@ -1195,11 +1195,11 @@ A provider plugin can participate in five distinct phases:
`auth[].run(ctx)` performs OAuth, API-key capture, device code, or custom
setup and returns auth profiles plus optional config patches.
2. **Non-interactive setup**
`auth[].runNonInteractive(ctx)` handles `openclaw setup --wizard --non-interactive`
`auth[].runNonInteractive(ctx)` handles `openclaw onboard --non-interactive`
without prompts. Use this when the provider needs custom headless setup
beyond the built-in simple API-key paths.
3. **Wizard integration**
`wizard.setup` adds an entry to `openclaw setup --wizard`.
`wizard.setup` adds an entry to `openclaw onboard`.
`wizard.modelPicker` adds a setup entry to the model picker.
4. **Implicit discovery**
`discovery.run(ctx)` can contribute provider config automatically during
@@ -1360,7 +1360,7 @@ or more auth methods (OAuth, API key, device code, etc.). Those methods can
power:
- `openclaw models auth login --provider <id> [--method <id>]`
- `openclaw setup --wizard`
- `openclaw onboard`
- model-picker “custom provider” setup entries
- implicit provider discovery during model resolution/listing
@@ -1435,7 +1435,7 @@ Notes:
for headless onboarding.
- Return `configPatch` when you need to add default models or provider config.
- Return `defaultModel` so `--set-default` can update agent defaults.
- `wizard.setup` adds a provider choice to `openclaw setup --wizard`.
- `wizard.setup` adds a provider choice to `openclaw onboard`.
- `wizard.modelPicker` adds a “setup this provider” entry to the model picker.
- `discovery.run` returns either `{ provider }` for the plugins own provider id
or `{ providers }` for multi-provider discovery.

View File

@@ -10,7 +10,7 @@ import { hasExplicitOptions } from "../command-options.js";
export function registerSetupCommand(program: Command) {
program
.command("setup")
.description("Initialize config/workspace or run the setup wizard")
.description("Initialize ~/.openclaw/openclaw.json and the agent workspace")
.addHelpText(
"after",
() =>
@@ -20,8 +20,8 @@ export function registerSetupCommand(program: Command) {
"--workspace <dir>",
"Agent workspace directory (default: ~/.openclaw/workspace; stored as agents.defaults.workspace)",
)
.option("--wizard", "Run the guided setup wizard", false)
.option("--non-interactive", "Run the setup wizard without prompts", false)
.option("--wizard", "Run the interactive onboarding wizard", false)
.option("--non-interactive", "Run the wizard without prompts", false)
.option("--mode <mode>", "Wizard mode: local|remote")
.option("--remote-url <url>", "Remote Gateway WebSocket URL")
.option("--remote-token <token>", "Remote Gateway token (optional)")

View File

@@ -61,7 +61,7 @@ export async function setupWizardCommand(
[
"Non-interactive setup requires explicit risk acknowledgement.",
"Read: https://docs.openclaw.ai/security",
`Re-run with: ${formatCliCommand("openclaw setup --wizard --non-interactive --accept-risk ...")}`,
`Re-run with: ${formatCliCommand("openclaw onboard --non-interactive --accept-risk ...")}`,
].join("\n"),
);
runtime.exit(1);

View File

@@ -134,7 +134,7 @@ export async function resetCommand(runtime: RuntimeEnv, opts: ResetOptions) {
for (const dir of sessionDirs) {
await removePath(dir, runtime, { dryRun, label: dir });
}
runtime.log(`Next: ${formatCliCommand("openclaw setup --wizard --install-daemon")}`);
runtime.log(`Next: ${formatCliCommand("openclaw onboard --install-daemon")}`);
return;
}
@@ -145,7 +145,7 @@ export async function resetCommand(runtime: RuntimeEnv, opts: ResetOptions) {
{ dryRun },
);
await removeWorkspaceDirs(workspaceDirs, runtime, { dryRun });
runtime.log(`Next: ${formatCliCommand("openclaw setup --wizard --install-daemon")}`);
runtime.log(`Next: ${formatCliCommand("openclaw onboard --install-daemon")}`);
return;
}
}