diff --git a/docs/.i18n/glossary.zh-CN.json b/docs/.i18n/glossary.zh-CN.json index bc1892d1e9a..36e44b6d909 100644 --- a/docs/.i18n/glossary.zh-CN.json +++ b/docs/.i18n/glossary.zh-CN.json @@ -55,10 +55,6 @@ "source": "CLI Setup Reference", "target": "CLI 设置参考" }, - { - "source": "Setup Overview", - "target": "设置概览" - }, { "source": "Setup Wizard (CLI)", "target": "设置向导(CLI)" diff --git a/docs/automation/hooks.md b/docs/automation/hooks.md index 84c7a234e11..deda79d3db5 100644 --- a/docs/automation/hooks.md +++ b/docs/automation/hooks.md @@ -74,7 +74,7 @@ openclaw hooks info session-memory ### Onboarding -During onboarding (`openclaw setup --wizard`), you'll be prompted to enable recommended hooks. The wizard automatically discovers eligible hooks and presents them for selection. +During onboarding (`openclaw onboard`), you'll be prompted to enable recommended hooks. The wizard automatically discovers eligible hooks and presents them for selection. ## Hook Discovery diff --git a/docs/channels/bluebubbles.md b/docs/channels/bluebubbles.md index c51c7967b00..9c2f0eb6de4 100644 --- a/docs/channels/bluebubbles.md +++ b/docs/channels/bluebubbles.md @@ -26,7 +26,7 @@ Status: bundled plugin that talks to the BlueBubbles macOS server over HTTP. **R 1. Install the BlueBubbles server on your Mac (follow the instructions at [bluebubbles.app/install](https://bluebubbles.app/install)). 2. In the BlueBubbles config, enable the web API and set a password. -3. Run `openclaw setup --wizard` and select BlueBubbles, or configure manually: +3. Run `openclaw onboard` and select BlueBubbles, or configure manually: ```json5 { @@ -129,7 +129,7 @@ launchctl load ~/Library/LaunchAgents/com.user.poke-messages.plist BlueBubbles is available in the interactive setup wizard: ``` -openclaw setup --wizard +openclaw onboard ``` The wizard prompts for: diff --git a/docs/channels/feishu.md b/docs/channels/feishu.md index 7e13a3077df..3768906d940 100644 --- a/docs/channels/feishu.md +++ b/docs/channels/feishu.md @@ -35,7 +35,7 @@ There are two ways to add the Feishu channel: If you just installed OpenClaw, run the setup wizard: ```bash -openclaw setup --wizard +openclaw onboard ``` The wizard guides you through: diff --git a/docs/channels/nostr.md b/docs/channels/nostr.md index ce410dd879a..46888da0352 100644 --- a/docs/channels/nostr.md +++ b/docs/channels/nostr.md @@ -16,7 +16,7 @@ Nostr is a decentralized protocol for social networking. This channel enables Op ### Onboarding (recommended) -- The setup wizard (`openclaw setup --wizard`) and `openclaw channels add` list optional channel plugins. +- The setup wizard (`openclaw onboard`) and `openclaw channels add` list optional channel plugins. - Selecting Nostr prompts you to install the plugin on demand. Install defaults: diff --git a/docs/channels/synology-chat.md b/docs/channels/synology-chat.md index cc3b2f2ed73..aae655f27b7 100644 --- a/docs/channels/synology-chat.md +++ b/docs/channels/synology-chat.md @@ -27,7 +27,7 @@ Details: [Plugins](/tools/plugin) ## Quick setup 1. Install and enable the Synology Chat plugin. - - `openclaw setup --wizard` now shows Synology Chat in the same channel setup list as `openclaw channels add`. + - `openclaw onboard` now shows Synology Chat in the same channel setup list as `openclaw channels add`. - Non-interactive setup: `openclaw channels add --channel synology-chat --token --url ` 2. In Synology Chat integrations: - Create an incoming webhook and copy its URL. @@ -36,7 +36,7 @@ Details: [Plugins](/tools/plugin) - `https://gateway-host/webhook/synology` by default. - Or your custom `channels.synology-chat.webhookPath`. 4. Finish setup in OpenClaw. - - Guided: `openclaw setup --wizard` + - Guided: `openclaw onboard` - Direct: `openclaw channels add --channel synology-chat --token --url ` 5. Restart gateway and send a DM to the Synology Chat bot. diff --git a/docs/cli/index.md b/docs/cli/index.md index 80e6efdadd5..ded970cde9d 100644 --- a/docs/cli/index.md +++ b/docs/cli/index.md @@ -13,7 +13,7 @@ This page describes the current CLI behavior. If commands change, update this do ## Command pages - [`setup`](/cli/setup) -- [`onboard`](/cli/onboard) (legacy alias for `setup --wizard`) +- [`onboard`](/cli/onboard) - [`configure`](/cli/configure) - [`config`](/cli/config) - [`completion`](/cli/completion) diff --git a/docs/cli/onboard.md b/docs/cli/onboard.md index 16aa8413135..899ccd82713 100644 --- a/docs/cli/onboard.md +++ b/docs/cli/onboard.md @@ -1,30 +1,157 @@ --- -summary: "Legacy CLI alias for `openclaw setup --wizard`" +summary: "CLI reference for `openclaw onboard` (interactive setup wizard)" read_when: - - You encountered `openclaw onboard` in older docs or scripts + - You want guided setup for gateway, workspace, auth, channels, and skills title: "onboard" --- # `openclaw onboard` -Legacy alias for `openclaw setup --wizard`. - -Prefer: - -```bash -openclaw setup --wizard -``` - -`openclaw onboard` still accepts the same flags and behavior for compatibility. +Interactive setup wizard (local or remote Gateway setup). ## Related guides -- Primary command docs: [`openclaw setup`](/cli/setup) -- Setup wizard guide: [Setup Wizard (CLI)](/start/wizard) -- Setup overview: [Setup Overview](/start/onboarding-overview) -- Setup wizard reference: [CLI Setup Reference](/start/wizard-cli-reference) +- CLI onboarding hub: [Setup Wizard (CLI)](/start/wizard) +- Onboarding overview: [Onboarding Overview](/start/onboarding-overview) +- CLI onboarding reference: [CLI Setup Reference](/start/wizard-cli-reference) - CLI automation: [CLI Automation](/start/wizard-cli-automation) - macOS onboarding: [Onboarding (macOS App)](/start/onboarding) -For examples, flags, and non-interactive behavior, use the primary docs at -[`openclaw setup`](/cli/setup) and [CLI Setup Reference](/start/wizard-cli-reference). +## Examples + +```bash +openclaw onboard +openclaw onboard --flow quickstart +openclaw onboard --flow manual +openclaw onboard --mode remote --remote-url wss://gateway-host:18789 +``` + +For plaintext private-network `ws://` targets (trusted networks only), set +`OPENCLAW_ALLOW_INSECURE_PRIVATE_WS=1` in the onboarding process environment. + +Non-interactive custom provider: + +```bash +openclaw onboard --non-interactive \ + --auth-choice custom-api-key \ + --custom-base-url "https://llm.example.com/v1" \ + --custom-model-id "foo-large" \ + --custom-api-key "$CUSTOM_API_KEY" \ + --secret-input-mode plaintext \ + --custom-compatibility openai +``` + +`--custom-api-key` is optional in non-interactive mode. If omitted, onboarding checks `CUSTOM_API_KEY`. + +Non-interactive Ollama: + +```bash +openclaw onboard --non-interactive \ + --auth-choice ollama \ + --custom-base-url "http://ollama-host:11434" \ + --custom-model-id "qwen3.5:27b" \ + --accept-risk +``` + +`--custom-base-url` defaults to `http://127.0.0.1:11434`. `--custom-model-id` is optional; if omitted, onboarding uses Ollama's suggested defaults. Cloud model IDs such as `kimi-k2.5:cloud` also work here. + +Store provider keys as refs instead of plaintext: + +```bash +openclaw onboard --non-interactive \ + --auth-choice openai-api-key \ + --secret-input-mode ref \ + --accept-risk +``` + +With `--secret-input-mode ref`, onboarding writes env-backed refs instead of plaintext key values. +For auth-profile backed providers this writes `keyRef` entries; for custom providers this writes `models.providers..apiKey` as an env ref (for example `{ source: "env", provider: "default", id: "CUSTOM_API_KEY" }`). + +Non-interactive `ref` mode contract: + +- Set the provider env var in the onboarding process environment (for example `OPENAI_API_KEY`). +- Do not pass inline key flags (for example `--openai-api-key`) unless that env var is also set. +- If an inline key flag is passed without the required env var, onboarding fails fast with guidance. + +Gateway token options in non-interactive mode: + +- `--gateway-auth token --gateway-token ` stores a plaintext token. +- `--gateway-auth token --gateway-token-ref-env ` stores `gateway.auth.token` as an env SecretRef. +- `--gateway-token` and `--gateway-token-ref-env` are mutually exclusive. +- `--gateway-token-ref-env` requires a non-empty env var in the onboarding process environment. +- With `--install-daemon`, when token auth requires a token, SecretRef-managed gateway tokens are validated but not persisted as resolved plaintext in supervisor service environment metadata. +- With `--install-daemon`, if token mode requires a token and the configured token SecretRef is unresolved, onboarding fails closed with remediation guidance. +- With `--install-daemon`, if both `gateway.auth.token` and `gateway.auth.password` are configured and `gateway.auth.mode` is unset, onboarding blocks install until mode is set explicitly. + +Example: + +```bash +export OPENCLAW_GATEWAY_TOKEN="your-token" +openclaw onboard --non-interactive \ + --mode local \ + --auth-choice skip \ + --gateway-auth token \ + --gateway-token-ref-env OPENCLAW_GATEWAY_TOKEN \ + --accept-risk +``` + +Non-interactive local gateway health: + +- Unless you pass `--skip-health`, onboarding waits for a reachable local gateway before it exits successfully. +- `--install-daemon` starts the managed gateway install path first. Without it, you must already have a local gateway running, for example `openclaw gateway run`. +- If you only want config/workspace/bootstrap writes in automation, use `--skip-health`. +- On native Windows, `--install-daemon` tries Scheduled Tasks first and falls back to a per-user Startup-folder login item if task creation is denied. + +Interactive onboarding behavior with reference mode: + +- Choose **Use secret reference** when prompted. +- Then choose either: + - Environment variable + - Configured secret provider (`file` or `exec`) +- Onboarding performs a fast preflight validation before saving the ref. + - If validation fails, onboarding shows the error and lets you retry. + +Non-interactive Z.AI endpoint choices: + +Note: `--auth-choice zai-api-key` now auto-detects the best Z.AI endpoint for your key (prefers the general API with `zai/glm-5`). +If you specifically want the GLM Coding Plan endpoints, pick `zai-coding-global` or `zai-coding-cn`. + +```bash +# Promptless endpoint selection +openclaw onboard --non-interactive \ + --auth-choice zai-coding-global \ + --zai-api-key "$ZAI_API_KEY" + +# Other Z.AI endpoint choices: +# --auth-choice zai-coding-cn +# --auth-choice zai-global +# --auth-choice zai-cn +``` + +Non-interactive Mistral example: + +```bash +openclaw onboard --non-interactive \ + --auth-choice mistral-api-key \ + --mistral-api-key "$MISTRAL_API_KEY" +``` + +Flow notes: + +- `quickstart`: minimal prompts, auto-generates a gateway token. +- `manual`: full prompts for port/bind/auth (alias of `advanced`). +- Local onboarding DM scope behavior: [CLI Setup Reference](/start/wizard-cli-reference#outputs-and-internals). +- Fastest first chat: `openclaw dashboard` (Control UI, no channel setup). +- Custom Provider: connect any OpenAI or Anthropic compatible endpoint, + including hosted providers not listed. Use Unknown to auto-detect. + +## Common follow-up commands + +```bash +openclaw configure +openclaw agents add +``` + + +`--json` does not imply non-interactive mode. Use `--non-interactive` for scripts. + diff --git a/docs/cli/setup.md b/docs/cli/setup.md index d8b5f686ef9..d8992ba8a43 100644 --- a/docs/cli/setup.md +++ b/docs/cli/setup.md @@ -1,43 +1,29 @@ --- -summary: "CLI reference for `openclaw setup` (initialize config/workspace or run the setup wizard)" +summary: "CLI reference for `openclaw setup` (initialize config + workspace)" read_when: - - You want first-run setup without the guided wizard - - You want the guided setup wizard via `openclaw setup --wizard` + - You’re doing first-run setup without the full setup wizard - You want to set the default workspace path title: "setup" --- # `openclaw setup` -Initialize `~/.openclaw/openclaw.json` and the agent workspace, or run the guided setup wizard. +Initialize `~/.openclaw/openclaw.json` and the agent workspace. Related: - Getting started: [Getting started](/start/getting-started) -- Setup wizard: [Setup Wizard (CLI)](/start/wizard) -- macOS app onboarding: [Onboarding](/start/onboarding) +- Wizard: [Onboarding](/start/onboarding) ## Examples ```bash openclaw setup openclaw setup --workspace ~/.openclaw/workspace -openclaw setup --wizard -openclaw setup --wizard --install-daemon ``` -Without flags, `openclaw setup` only ensures config + workspace defaults. -Use `--wizard` for the full guided flow. +To run the wizard via setup: -## Modes - -- `openclaw setup`: initialize config/workspace defaults only -- `openclaw setup --wizard`: guided setup for auth, gateway, channels, and skills -- `openclaw setup --wizard --non-interactive`: scripted setup flow - -## Related guides - -- Setup wizard guide: [Setup Wizard (CLI)](/start/wizard) -- Setup wizard reference: [CLI Setup Reference](/start/wizard-cli-reference) -- Setup wizard automation: [CLI Automation](/start/wizard-cli-automation) -- Legacy alias: [`openclaw onboard`](/cli/onboard) +```bash +openclaw setup --wizard +``` diff --git a/docs/concepts/agent-workspace.md b/docs/concepts/agent-workspace.md index 7fc114818cb..ff55f241bcd 100644 --- a/docs/concepts/agent-workspace.md +++ b/docs/concepts/agent-workspace.md @@ -36,7 +36,7 @@ inside a sandbox workspace under `~/.openclaw/sandboxes`, not your host workspac } ``` -`openclaw setup --wizard`, `openclaw configure`, or `openclaw setup` will create the +`openclaw onboard`, `openclaw configure`, or `openclaw setup` will create the workspace and seed the bootstrap files if they are missing. Sandbox seed copies only accept regular in-workspace files; symlink/hardlink aliases that resolve outside the source workspace are ignored. diff --git a/docs/concepts/model-providers.md b/docs/concepts/model-providers.md index 8e8f17f4a67..fc0656c0dd4 100644 --- a/docs/concepts/model-providers.md +++ b/docs/concepts/model-providers.md @@ -15,7 +15,7 @@ For model selection rules, see [/concepts/models](/concepts/models). - Model refs use `provider/model` (example: `opencode/claude-opus-4-6`). - If you set `agents.defaults.models`, it becomes the allowlist. -- CLI helpers: `openclaw setup --wizard`, `openclaw models list`, `openclaw models set `. +- CLI helpers: `openclaw onboard`, `openclaw models list`, `openclaw models set `. - Provider plugins can inject model catalogs via `registerProvider({ catalog })`; OpenClaw merges that output into `models.providers` before writing `models.json`. @@ -139,7 +139,7 @@ OpenClaw ships with the pi‑ai catalog. These providers require **no** - Auth: `OPENAI_API_KEY` - Optional rotation: `OPENAI_API_KEYS`, `OPENAI_API_KEY_1`, `OPENAI_API_KEY_2`, plus `OPENCLAW_LIVE_OPENAI_KEY` (single override) - Example models: `openai/gpt-5.4`, `openai/gpt-5.4-pro` -- CLI: `openclaw setup --wizard --auth-choice openai-api-key` +- CLI: `openclaw onboard --auth-choice openai-api-key` - Default transport is `auto` (WebSocket-first, SSE fallback) - Override per model via `agents.defaults.models["openai/"].params.transport` (`"sse"`, `"websocket"`, or `"auto"`) - OpenAI Responses WebSocket warm-up defaults to enabled via `params.openaiWsWarmup` (`true`/`false`) @@ -159,7 +159,7 @@ OpenClaw ships with the pi‑ai catalog. These providers require **no** - Auth: `ANTHROPIC_API_KEY` or `claude setup-token` - Optional rotation: `ANTHROPIC_API_KEYS`, `ANTHROPIC_API_KEY_1`, `ANTHROPIC_API_KEY_2`, plus `OPENCLAW_LIVE_ANTHROPIC_KEY` (single override) - Example model: `anthropic/claude-opus-4-6` -- CLI: `openclaw setup --wizard --auth-choice token` (paste setup-token) or `openclaw models auth paste-token --provider anthropic` +- CLI: `openclaw onboard --auth-choice token` (paste setup-token) or `openclaw models auth paste-token --provider anthropic` - Direct API-key models support the shared `/fast` toggle and `params.fastMode`; OpenClaw maps that to Anthropic `service_tier` (`auto` vs `standard_only`) - Policy note: setup-token support is technical compatibility; Anthropic has blocked some subscription usage outside Claude Code in the past. Verify current Anthropic terms and decide based on your risk tolerance. - Recommendation: Anthropic API key auth is the safer, recommended path over subscription setup-token auth. @@ -175,7 +175,7 @@ OpenClaw ships with the pi‑ai catalog. These providers require **no** - Provider: `openai-codex` - Auth: OAuth (ChatGPT) - Example model: `openai-codex/gpt-5.4` -- CLI: `openclaw setup --wizard --auth-choice openai-codex` or `openclaw models auth login --provider openai-codex` +- CLI: `openclaw onboard --auth-choice openai-codex` or `openclaw models auth login --provider openai-codex` - Default transport is `auto` (WebSocket-first, SSE fallback) - Override per model via `agents.defaults.models["openai-codex/"].params.transport` (`"sse"`, `"websocket"`, or `"auto"`) - Shares the same `/fast` toggle and `params.fastMode` config as direct `openai/*` @@ -194,7 +194,7 @@ OpenClaw ships with the pi‑ai catalog. These providers require **no** - Zen runtime provider: `opencode` - Go runtime provider: `opencode-go` - Example models: `opencode/claude-opus-4-6`, `opencode-go/kimi-k2.5` -- CLI: `openclaw setup --wizard --auth-choice opencode-zen` or `openclaw setup --wizard --auth-choice opencode-go` +- CLI: `openclaw onboard --auth-choice opencode-zen` or `openclaw onboard --auth-choice opencode-go` ```json5 { @@ -209,7 +209,7 @@ OpenClaw ships with the pi‑ai catalog. These providers require **no** - Optional rotation: `GEMINI_API_KEYS`, `GEMINI_API_KEY_1`, `GEMINI_API_KEY_2`, `GOOGLE_API_KEY` fallback, and `OPENCLAW_LIVE_GEMINI_KEY` (single override) - Example models: `google/gemini-3.1-pro-preview`, `google/gemini-3-flash-preview` - Compatibility: legacy OpenClaw config using `google/gemini-3.1-flash-preview` is normalized to `google/gemini-3-flash-preview` -- CLI: `openclaw setup --wizard --auth-choice gemini-api-key` +- CLI: `openclaw onboard --auth-choice gemini-api-key` ### Google Vertex and Gemini CLI @@ -227,7 +227,7 @@ OpenClaw ships with the pi‑ai catalog. These providers require **no** - Provider: `zai` - Auth: `ZAI_API_KEY` - Example model: `zai/glm-5` -- CLI: `openclaw setup --wizard --auth-choice zai-api-key` +- CLI: `openclaw onboard --auth-choice zai-api-key` - Aliases: `z.ai/*` and `z-ai/*` normalize to `zai/*` ### Vercel AI Gateway @@ -235,14 +235,14 @@ OpenClaw ships with the pi‑ai catalog. These providers require **no** - Provider: `vercel-ai-gateway` - Auth: `AI_GATEWAY_API_KEY` - Example model: `vercel-ai-gateway/anthropic/claude-opus-4.6` -- CLI: `openclaw setup --wizard --auth-choice ai-gateway-api-key` +- CLI: `openclaw onboard --auth-choice ai-gateway-api-key` ### Kilo Gateway - Provider: `kilocode` - Auth: `KILOCODE_API_KEY` - Example model: `kilocode/anthropic/claude-opus-4.6` -- CLI: `openclaw setup --wizard --kilocode-api-key ` +- CLI: `openclaw onboard --kilocode-api-key ` - Base URL: `https://api.kilo.ai/api/gateway/` - Expanded built-in catalog includes GLM-5 Free, MiniMax M2.5 Free, GPT-5.2, Gemini 3 Pro Preview, Gemini 3 Flash Preview, Grok Code Fast 1, and Kimi K2.5. @@ -271,13 +271,13 @@ See [/providers/kilocode](/providers/kilocode) for setup details. - xAI: `xai` (`XAI_API_KEY`) - Mistral: `mistral` (`MISTRAL_API_KEY`) - Example model: `mistral/mistral-large-latest` -- CLI: `openclaw setup --wizard --auth-choice mistral-api-key` +- CLI: `openclaw onboard --auth-choice mistral-api-key` - Groq: `groq` (`GROQ_API_KEY`) - Cerebras: `cerebras` (`CEREBRAS_API_KEY`) - GLM models on Cerebras use ids `zai-glm-4.7` and `zai-glm-4.6`. - OpenAI-compatible base URL: `https://api.cerebras.ai/v1`. - GitHub Copilot: `github-copilot` (`COPILOT_GITHUB_TOKEN` / `GH_TOKEN` / `GITHUB_TOKEN`) -- Hugging Face Inference example model: `huggingface/deepseek-ai/DeepSeek-R1`; CLI: `openclaw setup --wizard --auth-choice huggingface-api-key`. See [Hugging Face (Inference)](/providers/huggingface). +- Hugging Face Inference example model: `huggingface/deepseek-ai/DeepSeek-R1`; CLI: `openclaw onboard --auth-choice huggingface-api-key`. See [Hugging Face (Inference)](/providers/huggingface). ## Providers via `models.providers` (custom/base URL) @@ -367,7 +367,7 @@ Volcano Engine (火山引擎) provides access to Doubao and other models in Chin - Provider: `volcengine` (coding: `volcengine-plan`) - Auth: `VOLCANO_ENGINE_API_KEY` - Example model: `volcengine/doubao-seed-1-8-251228` -- CLI: `openclaw setup --wizard --auth-choice volcengine-api-key` +- CLI: `openclaw onboard --auth-choice volcengine-api-key` ```json5 { @@ -400,7 +400,7 @@ BytePlus ARK provides access to the same models as Volcano Engine for internatio - Provider: `byteplus` (coding: `byteplus-plan`) - Auth: `BYTEPLUS_API_KEY` - Example model: `byteplus/seed-1-8-251228` -- CLI: `openclaw setup --wizard --auth-choice byteplus-api-key` +- CLI: `openclaw onboard --auth-choice byteplus-api-key` ```json5 { @@ -431,7 +431,7 @@ Synthetic provides Anthropic-compatible models behind the `synthetic` provider: - Provider: `synthetic` - Auth: `SYNTHETIC_API_KEY` - Example model: `synthetic/hf:MiniMaxAI/MiniMax-M2.5` -- CLI: `openclaw setup --wizard --auth-choice synthetic-api-key` +- CLI: `openclaw onboard --auth-choice synthetic-api-key` ```json5 { @@ -485,7 +485,7 @@ ollama pull llama3.3 Ollama is detected locally at `http://127.0.0.1:11434` when you opt in with `OLLAMA_API_KEY`, and the bundled provider plugin adds Ollama directly to -`openclaw setup --wizard` and the model picker. See [/providers/ollama](/providers/ollama) +`openclaw onboard` and the model picker. See [/providers/ollama](/providers/ollama) for onboarding, cloud/local mode, and custom configuration. ### vLLM @@ -595,7 +595,7 @@ Notes: ## CLI examples ```bash -openclaw setup --wizard --auth-choice opencode-zen +openclaw onboard --auth-choice opencode-zen openclaw models set opencode/claude-opus-4-6 openclaw models list ``` diff --git a/docs/concepts/models.md b/docs/concepts/models.md index f190630ac36..e85e605456f 100644 --- a/docs/concepts/models.md +++ b/docs/concepts/models.md @@ -39,7 +39,7 @@ Related: If you don’t want to hand-edit config, run the setup wizard: ```bash -openclaw setup --wizard +openclaw onboard ``` It can set up model + auth for common providers, including **OpenAI Code (Codex) diff --git a/docs/concepts/oauth.md b/docs/concepts/oauth.md index 4b8b2739c22..4766687ad51 100644 --- a/docs/concepts/oauth.md +++ b/docs/concepts/oauth.md @@ -92,7 +92,7 @@ Flow shape: 2. paste the token into OpenClaw 3. store as a token auth profile (no refresh) -The wizard path is `openclaw setup --wizard` → auth choice `setup-token` (Anthropic). +The wizard path is `openclaw onboard` → auth choice `setup-token` (Anthropic). ### OpenAI Codex (ChatGPT OAuth) @@ -107,7 +107,7 @@ Flow shape (PKCE): 5. exchange at `https://auth.openai.com/oauth/token` 6. extract `accountId` from the access token and store `{ access, refresh, expires, accountId }` -Wizard path is `openclaw setup --wizard` → auth choice `openai-codex`. +Wizard path is `openclaw onboard` → auth choice `openai-codex`. ## Refresh + expiry diff --git a/docs/gateway/authentication.md b/docs/gateway/authentication.md index fe8e5b760d3..c25501e6cdd 100644 --- a/docs/gateway/authentication.md +++ b/docs/gateway/authentication.md @@ -50,7 +50,7 @@ openclaw doctor ``` If you’d rather not manage env vars yourself, the setup wizard can store -API keys for daemon use: `openclaw setup --wizard`. +API keys for daemon use: `openclaw onboard`. See [Help](/help) for details on env inheritance (`env.shellEnv`, `~/.openclaw/.env`, systemd/launchd). diff --git a/docs/gateway/configuration-reference.md b/docs/gateway/configuration-reference.md index b28cde9c260..0653fd3834f 100644 --- a/docs/gateway/configuration-reference.md +++ b/docs/gateway/configuration-reference.md @@ -2182,7 +2182,7 @@ Use `cerebras/zai-glm-4.7` for Cerebras; `zai/glm-4.7` for Z.AI direct. } ``` -Set `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`). Use `opencode/...` refs for the Zen catalog or `opencode-go/...` refs for the Go catalog. Shortcut: `openclaw setup --wizard --auth-choice opencode-zen` or `openclaw setup --wizard --auth-choice opencode-go`. +Set `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`). Use `opencode/...` refs for the Zen catalog or `opencode-go/...` refs for the Go catalog. Shortcut: `openclaw onboard --auth-choice opencode-zen` or `openclaw onboard --auth-choice opencode-go`. @@ -2199,7 +2199,7 @@ Set `OPENCODE_API_KEY` (or `OPENCODE_ZEN_API_KEY`). Use `opencode/...` refs for } ``` -Set `ZAI_API_KEY`. `z.ai/*` and `z-ai/*` are accepted aliases. Shortcut: `openclaw setup --wizard --auth-choice zai-api-key`. +Set `ZAI_API_KEY`. `z.ai/*` and `z-ai/*` are accepted aliases. Shortcut: `openclaw onboard --auth-choice zai-api-key`. - General endpoint: `https://api.z.ai/api/paas/v4` - Coding endpoint (default): `https://api.z.ai/api/coding/paas/v4` @@ -2242,7 +2242,7 @@ Set `ZAI_API_KEY`. `z.ai/*` and `z-ai/*` are accepted aliases. Shortcut: `opencl } ``` -For the China endpoint: `baseUrl: "https://api.moonshot.cn/v1"` or `openclaw setup --wizard --auth-choice moonshot-api-key-cn`. +For the China endpoint: `baseUrl: "https://api.moonshot.cn/v1"` or `openclaw onboard --auth-choice moonshot-api-key-cn`. @@ -2260,7 +2260,7 @@ For the China endpoint: `baseUrl: "https://api.moonshot.cn/v1"` or `openclaw set } ``` -Anthropic-compatible, built-in provider. Shortcut: `openclaw setup --wizard --auth-choice kimi-code-api-key`. +Anthropic-compatible, built-in provider. Shortcut: `openclaw onboard --auth-choice kimi-code-api-key`. @@ -2299,7 +2299,7 @@ Anthropic-compatible, built-in provider. Shortcut: `openclaw setup --wizard --au } ``` -Base URL should omit `/v1` (Anthropic client appends it). Shortcut: `openclaw setup --wizard --auth-choice synthetic-api-key`. +Base URL should omit `/v1` (Anthropic client appends it). Shortcut: `openclaw onboard --auth-choice synthetic-api-key`. @@ -2339,7 +2339,7 @@ Base URL should omit `/v1` (Anthropic client appends it). Shortcut: `openclaw se } ``` -Set `MINIMAX_API_KEY`. Shortcut: `openclaw setup --wizard --auth-choice minimax-api`. +Set `MINIMAX_API_KEY`. Shortcut: `openclaw onboard --auth-choice minimax-api`. diff --git a/docs/gateway/configuration.md b/docs/gateway/configuration.md index db4bb167417..a699e74652f 100644 --- a/docs/gateway/configuration.md +++ b/docs/gateway/configuration.md @@ -20,7 +20,7 @@ If the file is missing, OpenClaw uses safe defaults. Common reasons to add a con See the [full reference](/gateway/configuration-reference) for every available field. -**New to configuration?** Start with `openclaw setup --wizard` for interactive setup, or check out the [Configuration Examples](/gateway/configuration-examples) guide for complete copy-paste configs. +**New to configuration?** Start with `openclaw onboard` for interactive setup, or check out the [Configuration Examples](/gateway/configuration-examples) guide for complete copy-paste configs. ## Minimal config @@ -38,7 +38,7 @@ See the [full reference](/gateway/configuration-reference) for every available f ```bash - openclaw setup --wizard # full setup wizard + openclaw onboard # full setup wizard openclaw configure # config wizard ``` diff --git a/docs/gateway/local-models.md b/docs/gateway/local-models.md index 93a63c38170..4059f988776 100644 --- a/docs/gateway/local-models.md +++ b/docs/gateway/local-models.md @@ -11,7 +11,7 @@ title: "Local Models" Local is doable, but OpenClaw expects large context + strong defenses against prompt injection. Small cards truncate context and leak safety. Aim high: **≥2 maxed-out Mac Studios or equivalent GPU rig (~$30k+)**. A single **24 GB** GPU works only for lighter prompts with higher latency. Use the **largest / full-size model variant you can run**; aggressively quantized or “small” checkpoints raise prompt-injection risk (see [Security](/gateway/security)). -If you want the lowest-friction local setup, start with [Ollama](/providers/ollama) and `openclaw setup --wizard`. This page is the opinionated guide for higher-end local stacks and custom OpenAI-compatible local servers. +If you want the lowest-friction local setup, start with [Ollama](/providers/ollama) and `openclaw onboard`. This page is the opinionated guide for higher-end local stacks and custom OpenAI-compatible local servers. ## Recommended: LM Studio + MiniMax M2.5 (Responses API, full-size) diff --git a/docs/gateway/multiple-gateways.md b/docs/gateway/multiple-gateways.md index b2d5257f0ff..6d1cf423b98 100644 --- a/docs/gateway/multiple-gateways.md +++ b/docs/gateway/multiple-gateways.md @@ -59,7 +59,7 @@ Port spacing: leave at least 20 ports between base ports so the derived browser/ ```bash # Main bot (existing or fresh, without --profile param) # Runs on port 18789 + Chrome CDC/Canvas/... Ports -openclaw setup --wizard +openclaw onboard openclaw gateway install # Rescue bot (isolated profile + ports) diff --git a/docs/help/faq.md b/docs/help/faq.md index 670ea170c19..8fdf39ab5c1 100644 --- a/docs/help/faq.md +++ b/docs/help/faq.md @@ -321,7 +321,7 @@ The repo recommends running from source and using the setup wizard: ```bash curl -fsSL https://openclaw.ai/install.sh | bash -openclaw setup --wizard --install-daemon +openclaw onboard --install-daemon ``` The wizard can also build UI assets automatically. After onboarding, you typically run the Gateway on port **18789**. @@ -334,10 +334,10 @@ cd openclaw pnpm install pnpm build pnpm ui:build # auto-installs UI deps on first run -openclaw setup --wizard +openclaw onboard ``` -If you don't have a global install yet, run it via `pnpm openclaw setup --wizard`. +If you don't have a global install yet, run it via `pnpm openclaw onboard`. ### How do I open the dashboard after onboarding @@ -687,7 +687,7 @@ Docs: [Update](/cli/update), [Updating](/install/updating). ### What does the setup wizard actually do -`openclaw setup --wizard` is the recommended setup path. In **local mode** it walks you through: +`openclaw onboard` is the recommended setup path. In **local mode** it walks you through: - **Model/auth setup** (provider OAuth/setup-token flows and API keys supported, plus local model options such as LM Studio) - **Workspace** location + bootstrap files @@ -1904,7 +1904,7 @@ openclaw reset --scope full --yes --non-interactive Then re-run setup: ```bash -openclaw setup --wizard --install-daemon +openclaw onboard --install-daemon ``` Notes: @@ -2092,7 +2092,7 @@ Quickest setup: 1. Install Ollama from `https://ollama.com/download` 2. Pull a local model such as `ollama pull glm-4.7-flash` 3. If you want Ollama Cloud too, run `ollama signin` -4. Run `openclaw setup --wizard` and choose `Ollama` +4. Run `openclaw onboard` and choose `Ollama` 5. Pick `Local` or `Cloud + Local` Notes: diff --git a/docs/index.md b/docs/index.md index e8c2210caff..7c69600f55d 100644 --- a/docs/index.md +++ b/docs/index.md @@ -34,7 +34,7 @@ title: "OpenClaw" Install OpenClaw and bring up the Gateway in minutes. - Guided setup with `openclaw setup --wizard` and pairing flows. + Guided setup with `openclaw onboard` and pairing flows. Launch the browser dashboard for chat, config, and sessions. @@ -103,7 +103,7 @@ The Gateway is the single source of truth for sessions, routing, and channel con ```bash - openclaw setup --wizard --install-daemon + openclaw onboard --install-daemon ``` diff --git a/docs/install/exe-dev.md b/docs/install/exe-dev.md index b66865593da..c49dab4e426 100644 --- a/docs/install/exe-dev.md +++ b/docs/install/exe-dev.md @@ -31,7 +31,7 @@ Shelley, [exe.dev](https://exe.dev)'s agent, can install OpenClaw instantly with prompt. The prompt used is as below: ``` -Set up OpenClaw (https://docs.openclaw.ai/install) on this VM. Use the non-interactive and accept-risk flags for openclaw setup --wizarding. Add the supplied auth or token as needed. Configure nginx to forward from the default port 18789 to the root location on the default enabled site config, making sure to enable Websocket support. Pairing is done by "openclaw devices list" and "openclaw devices approve ". Make sure the dashboard shows that OpenClaw's health is OK. exe.dev handles forwarding from port 8000 to port 80/443 and HTTPS for us, so the final "reachable" should be .exe.xyz, without port specification. +Set up OpenClaw (https://docs.openclaw.ai/install) on this VM. Use the non-interactive and accept-risk flags for openclaw onboarding. Add the supplied auth or token as needed. Configure nginx to forward from the default port 18789 to the root location on the default enabled site config, making sure to enable Websocket support. Pairing is done by "openclaw devices list" and "openclaw devices approve ". Make sure the dashboard shows that OpenClaw's health is OK. exe.dev handles forwarding from port 8000 to port 80/443 and HTTPS for us, so the final "reachable" should be .exe.xyz, without port specification. ``` ## Manual installation diff --git a/docs/install/index.md b/docs/install/index.md index 59396c49b5f..21adfdaa592 100644 --- a/docs/install/index.md +++ b/docs/install/index.md @@ -76,7 +76,7 @@ For VPS/cloud hosts, avoid third-party "1-click" marketplace images when possibl ```bash npm install -g openclaw@latest - openclaw setup --wizard --install-daemon + openclaw onboard --install-daemon ``` @@ -93,7 +93,7 @@ For VPS/cloud hosts, avoid third-party "1-click" marketplace images when possibl ```bash pnpm add -g openclaw@latest pnpm approve-builds -g # approve openclaw, node-llama-cpp, sharp, etc. - openclaw setup --wizard --install-daemon + openclaw onboard --install-daemon ``` @@ -140,7 +140,7 @@ For VPS/cloud hosts, avoid third-party "1-click" marketplace images when possibl ```bash - openclaw setup --wizard --install-daemon + openclaw onboard --install-daemon ``` diff --git a/docs/install/installer.md b/docs/install/installer.md index 813fa7b31b4..5859c22fd0d 100644 --- a/docs/install/installer.md +++ b/docs/install/installer.md @@ -224,7 +224,7 @@ Designed for environments where you want everything under a local prefix (defaul | `--version ` | OpenClaw version or dist-tag (default: `latest`) | | `--node-version ` | Node version (default: `22.22.0`) | | `--json` | Emit NDJSON events | -| `--onboard` | Run `openclaw setup --wizard` after install | +| `--onboard` | Run `openclaw onboard` after install | | `--no-onboard` | Skip onboarding (default) | | `--set-npm-prefix` | On Linux, force npm prefix to `~/.npm-global` if current prefix is not writable | | `--help` | Show usage (`-h`) | diff --git a/docs/install/macos-vm.md b/docs/install/macos-vm.md index 3e036c6ee0d..f2eadfda113 100644 --- a/docs/install/macos-vm.md +++ b/docs/install/macos-vm.md @@ -138,7 +138,7 @@ Inside the VM: ```bash npm install -g openclaw@latest -openclaw setup --wizard --install-daemon +openclaw onboard --install-daemon ``` Follow the onboarding prompts to set up your model provider (Anthropic, OpenAI, etc.). diff --git a/docs/platforms/digitalocean.md b/docs/platforms/digitalocean.md index aaea2644ca6..cd05587ae76 100644 --- a/docs/platforms/digitalocean.md +++ b/docs/platforms/digitalocean.md @@ -80,7 +80,7 @@ openclaw --version ## 4) Run Onboarding ```bash -openclaw setup --wizard --install-daemon +openclaw onboard --install-daemon ``` The wizard will walk you through: diff --git a/docs/platforms/index.md b/docs/platforms/index.md index 3c7ecca0f48..ec2663aefe4 100644 --- a/docs/platforms/index.md +++ b/docs/platforms/index.md @@ -42,7 +42,7 @@ Native companion apps for Windows are also planned; the Gateway is recommended v Use one of these (all supported): -- Wizard (recommended): `openclaw setup --wizard --install-daemon` +- Wizard (recommended): `openclaw onboard --install-daemon` - Direct: `openclaw gateway install` - Configure flow: `openclaw configure` → select **Gateway service** - Repair/migrate: `openclaw doctor` (offers to install or fix the service) diff --git a/docs/platforms/linux.md b/docs/platforms/linux.md index 29de3dd47ea..c03dba6f795 100644 --- a/docs/platforms/linux.md +++ b/docs/platforms/linux.md @@ -17,7 +17,7 @@ Native Linux companion apps are planned. Contributions are welcome if you want t 1. Install Node 24 (recommended; Node 22 LTS, currently `22.16+`, still works for compatibility) 2. `npm i -g openclaw@latest` -3. `openclaw setup --wizard --install-daemon` +3. `openclaw onboard --install-daemon` 4. From your laptop: `ssh -N -L 18789:127.0.0.1:18789 @` 5. Open `http://127.0.0.1:18789/` and paste your token @@ -39,7 +39,7 @@ Step-by-step VPS guide: [exe.dev](/install/exe-dev) Use one of these: ``` -openclaw setup --wizard --install-daemon +openclaw onboard --install-daemon ``` Or: diff --git a/docs/platforms/raspberry-pi.md b/docs/platforms/raspberry-pi.md index 4a3bf7b8204..2050b6395b4 100644 --- a/docs/platforms/raspberry-pi.md +++ b/docs/platforms/raspberry-pi.md @@ -130,7 +130,7 @@ The hackable install gives you direct access to logs and code — useful for deb ## 7) Run Onboarding ```bash -openclaw setup --wizard --install-daemon +openclaw onboard --install-daemon ``` Follow the wizard: diff --git a/docs/platforms/windows.md b/docs/platforms/windows.md index c8047271e65..e40d798604d 100644 --- a/docs/platforms/windows.md +++ b/docs/platforms/windows.md @@ -38,8 +38,8 @@ openclaw agent --local --agent main --thinking low -m "Reply with exactly WINDOW Current caveats: -- `openclaw setup --wizard --non-interactive` still expects a reachable local gateway unless you pass `--skip-health` -- `openclaw setup --wizard --non-interactive --install-daemon` and `openclaw gateway install` try Windows Scheduled Tasks first +- `openclaw onboard --non-interactive` still expects a reachable local gateway unless you pass `--skip-health` +- `openclaw onboard --non-interactive --install-daemon` and `openclaw gateway install` try Windows Scheduled Tasks first - if Scheduled Task creation is denied, OpenClaw falls back to a per-user Startup-folder login item and starts the gateway immediately - if `schtasks` itself wedges or stops responding, OpenClaw now aborts that path quickly and falls back instead of hanging forever - Scheduled Tasks are still preferred when available because they provide better supervisor status @@ -47,7 +47,7 @@ Current caveats: If you want the native CLI only, without gateway service install, use one of these: ```powershell -openclaw setup --wizard --non-interactive --skip-health +openclaw onboard --non-interactive --skip-health openclaw gateway run ``` @@ -70,7 +70,7 @@ If Scheduled Task creation is blocked, the fallback service mode still auto-star Inside WSL2: ``` -openclaw setup --wizard --install-daemon +openclaw onboard --install-daemon ``` Or: @@ -230,7 +230,7 @@ cd openclaw pnpm install pnpm ui:build # auto-installs UI deps on first run pnpm build -openclaw setup --wizard +openclaw onboard ``` Full guide: [Getting Started](/start/getting-started) diff --git a/docs/providers/anthropic.md b/docs/providers/anthropic.md index 5611eec7ba4..d16d76f6315 100644 --- a/docs/providers/anthropic.md +++ b/docs/providers/anthropic.md @@ -19,11 +19,11 @@ Create your API key in the Anthropic Console. ### CLI setup ```bash -openclaw setup --wizard +openclaw onboard # choose: Anthropic API key # or non-interactive -openclaw setup --wizard --anthropic-api-key "$ANTHROPIC_API_KEY" +openclaw onboard --anthropic-api-key "$ANTHROPIC_API_KEY" ``` ### Config snippet @@ -214,7 +214,7 @@ openclaw models auth paste-token --provider anthropic ```bash # Paste a setup-token during setup -openclaw setup --wizard --auth-choice setup-token +openclaw onboard --auth-choice setup-token ``` ### Config snippet (setup-token) diff --git a/docs/providers/cloudflare-ai-gateway.md b/docs/providers/cloudflare-ai-gateway.md index 63f471413e8..392a611e705 100644 --- a/docs/providers/cloudflare-ai-gateway.md +++ b/docs/providers/cloudflare-ai-gateway.md @@ -22,7 +22,7 @@ For Anthropic models, use your Anthropic API key. 1. Set the provider API key and Gateway details: ```bash -openclaw setup --wizard --auth-choice cloudflare-ai-gateway-api-key +openclaw onboard --auth-choice cloudflare-ai-gateway-api-key ``` 2. Set a default model: @@ -40,7 +40,7 @@ openclaw setup --wizard --auth-choice cloudflare-ai-gateway-api-key ## Non-interactive example ```bash -openclaw setup --wizard --non-interactive \ +openclaw onboard --non-interactive \ --mode local \ --auth-choice cloudflare-ai-gateway-api-key \ --cloudflare-ai-gateway-account-id "your-account-id" \ diff --git a/docs/providers/glm.md b/docs/providers/glm.md index bd096212cd0..64fe39a42df 100644 --- a/docs/providers/glm.md +++ b/docs/providers/glm.md @@ -15,16 +15,16 @@ models are accessed via the `zai` provider and model IDs like `zai/glm-5`. ```bash # Coding Plan Global, recommended for Coding Plan users -openclaw setup --wizard --auth-choice zai-coding-global +openclaw onboard --auth-choice zai-coding-global # Coding Plan CN (China region), recommended for Coding Plan users -openclaw setup --wizard --auth-choice zai-coding-cn +openclaw onboard --auth-choice zai-coding-cn # General API -openclaw setup --wizard --auth-choice zai-global +openclaw onboard --auth-choice zai-global # General API CN (China region) -openclaw setup --wizard --auth-choice zai-cn +openclaw onboard --auth-choice zai-cn ``` ## Config snippet diff --git a/docs/providers/huggingface.md b/docs/providers/huggingface.md index 416037dca49..7b33955f524 100644 --- a/docs/providers/huggingface.md +++ b/docs/providers/huggingface.md @@ -21,7 +21,7 @@ title: "Hugging Face (Inference)" 2. Run onboarding and choose **Hugging Face** in the provider dropdown, then enter your API key when prompted: ```bash -openclaw setup --wizard --auth-choice huggingface-api-key +openclaw onboard --auth-choice huggingface-api-key ``` 3. In the **Default Hugging Face model** dropdown, pick the model you want (the list is loaded from the Inference API when you have a valid token; otherwise a built-in list is shown). Your choice is saved as the default model. @@ -40,7 +40,7 @@ openclaw setup --wizard --auth-choice huggingface-api-key ## Non-interactive example ```bash -openclaw setup --wizard --non-interactive \ +openclaw onboard --non-interactive \ --mode local \ --auth-choice huggingface-api-key \ --huggingface-api-key "$HF_TOKEN" diff --git a/docs/providers/index.md b/docs/providers/index.md index 0e5c181f56b..f68cd0e0b53 100644 --- a/docs/providers/index.md +++ b/docs/providers/index.md @@ -15,7 +15,7 @@ Looking for chat channel docs (WhatsApp/Telegram/Discord/Slack/Mattermost (plugi ## Quick start -1. Authenticate with the provider (usually via `openclaw setup --wizard`). +1. Authenticate with the provider (usually via `openclaw onboard`). 2. Set the default model: ```json5 diff --git a/docs/providers/kilocode.md b/docs/providers/kilocode.md index b3d75e64bcf..15f8e4c2b7c 100644 --- a/docs/providers/kilocode.md +++ b/docs/providers/kilocode.md @@ -19,7 +19,7 @@ endpoint and API key. It is OpenAI-compatible, so most OpenAI SDKs work by switc ## CLI setup ```bash -openclaw setup --wizard --kilocode-api-key +openclaw onboard --kilocode-api-key ``` Or set the environment variable: diff --git a/docs/providers/litellm.md b/docs/providers/litellm.md index d96e1bb795c..51ad0d599f8 100644 --- a/docs/providers/litellm.md +++ b/docs/providers/litellm.md @@ -22,7 +22,7 @@ read_when: ### Via onboarding ```bash -openclaw setup --wizard --auth-choice litellm-api-key +openclaw onboard --auth-choice litellm-api-key ``` ### Manual setup diff --git a/docs/providers/minimax.md b/docs/providers/minimax.md index 7a39111f6c2..0d3635352cc 100644 --- a/docs/providers/minimax.md +++ b/docs/providers/minimax.md @@ -44,7 +44,7 @@ Enable the bundled OAuth plugin and authenticate: ```bash openclaw plugins enable minimax # skip if already loaded. openclaw gateway restart # restart if gateway is already running -openclaw setup --wizard --auth-choice minimax-portal +openclaw onboard --auth-choice minimax-portal ``` You will be prompted to select an endpoint: diff --git a/docs/providers/mistral.md b/docs/providers/mistral.md index 60a9e82853d..44e594abf21 100644 --- a/docs/providers/mistral.md +++ b/docs/providers/mistral.md @@ -15,9 +15,9 @@ Mistral can also be used for memory embeddings (`memorySearch.provider = "mistra ## CLI setup ```bash -openclaw setup --wizard --auth-choice mistral-api-key +openclaw onboard --auth-choice mistral-api-key # or non-interactive -openclaw setup --wizard --mistral-api-key "$MISTRAL_API_KEY" +openclaw onboard --mistral-api-key "$MISTRAL_API_KEY" ``` ## Config snippet (LLM provider) diff --git a/docs/providers/models.md b/docs/providers/models.md index 0bbff47c51e..a117d286051 100644 --- a/docs/providers/models.md +++ b/docs/providers/models.md @@ -13,7 +13,7 @@ model as `provider/model`. ## Quick start (two steps) -1. Authenticate with the provider (usually via `openclaw setup --wizard`). +1. Authenticate with the provider (usually via `openclaw onboard`). 2. Set the default model: ```json5 diff --git a/docs/providers/moonshot.md b/docs/providers/moonshot.md index de21a6ffb0a..daf9c881de5 100644 --- a/docs/providers/moonshot.md +++ b/docs/providers/moonshot.md @@ -26,13 +26,13 @@ Current Kimi K2 model IDs: [//]: # "moonshot-kimi-k2-ids:end" ```bash -openclaw setup --wizard --auth-choice moonshot-api-key +openclaw onboard --auth-choice moonshot-api-key ``` Kimi Coding: ```bash -openclaw setup --wizard --auth-choice kimi-code-api-key +openclaw onboard --auth-choice kimi-code-api-key ``` Note: Moonshot and Kimi Coding are separate providers. Keys are not interchangeable, endpoints differ, and model refs differ (Moonshot uses `moonshot/...`, Kimi Coding uses `kimi-coding/...`). diff --git a/docs/providers/nvidia.md b/docs/providers/nvidia.md index 2708d88db96..693a51db9b3 100644 --- a/docs/providers/nvidia.md +++ b/docs/providers/nvidia.md @@ -16,7 +16,7 @@ Export the key once, then run onboarding and set an NVIDIA model: ```bash export NVIDIA_API_KEY="nvapi-..." -openclaw setup --wizard --auth-choice skip +openclaw onboard --auth-choice skip openclaw models set nvidia/nvidia/llama-3.1-nemotron-70b-instruct ``` diff --git a/docs/providers/ollama.md b/docs/providers/ollama.md index db36f90a2da..5a1eb2bd27e 100644 --- a/docs/providers/ollama.md +++ b/docs/providers/ollama.md @@ -21,7 +21,7 @@ Ollama is a local LLM runtime that makes it easy to run open-source models on yo The fastest way to set up Ollama is through the setup wizard: ```bash -openclaw setup --wizard +openclaw onboard ``` Select **Ollama** from the provider list. The wizard will: @@ -35,7 +35,7 @@ Select **Ollama** from the provider list. The wizard will: Non-interactive mode is also supported: ```bash -openclaw setup --wizard --non-interactive \ +openclaw onboard --non-interactive \ --auth-choice ollama \ --accept-risk ``` @@ -43,7 +43,7 @@ openclaw setup --wizard --non-interactive \ Optionally specify a custom base URL or model: ```bash -openclaw setup --wizard --non-interactive \ +openclaw onboard --non-interactive \ --auth-choice ollama \ --custom-base-url "http://ollama-host:11434" \ --custom-model-id "qwen3.5:27b" \ @@ -73,7 +73,7 @@ ollama signin 4. Run onboarding and choose `Ollama`: ```bash -openclaw setup --wizard +openclaw onboard ``` - `Local`: local models only diff --git a/docs/providers/openai.md b/docs/providers/openai.md index 4f90d092838..a6a60f8f2ea 100644 --- a/docs/providers/openai.md +++ b/docs/providers/openai.md @@ -20,9 +20,9 @@ Get your API key from the OpenAI dashboard. ### CLI setup ```bash -openclaw setup --wizard --auth-choice openai-api-key +openclaw onboard --auth-choice openai-api-key # or non-interactive -openclaw setup --wizard --openai-api-key "$OPENAI_API_KEY" +openclaw onboard --openai-api-key "$OPENAI_API_KEY" ``` ### Config snippet @@ -52,7 +52,7 @@ Codex cloud requires ChatGPT sign-in, while the Codex CLI supports ChatGPT or AP ```bash # Run Codex OAuth in the wizard -openclaw setup --wizard --auth-choice openai-codex +openclaw onboard --auth-choice openai-codex # Or run OAuth directly openclaw models auth login --provider openai-codex diff --git a/docs/providers/opencode-go.md b/docs/providers/opencode-go.md index 2d826712977..4552e916beb 100644 --- a/docs/providers/opencode-go.md +++ b/docs/providers/opencode-go.md @@ -21,9 +21,9 @@ provider id `opencode-go` so upstream per-model routing stays correct. ## CLI setup ```bash -openclaw setup --wizard --auth-choice opencode-go +openclaw onboard --auth-choice opencode-go # or non-interactive -openclaw setup --wizard --opencode-go-api-key "$OPENCODE_API_KEY" +openclaw onboard --opencode-go-api-key "$OPENCODE_API_KEY" ``` ## Config snippet diff --git a/docs/providers/opencode.md b/docs/providers/opencode.md index 98eb2cfcbe0..da44e5154c0 100644 --- a/docs/providers/opencode.md +++ b/docs/providers/opencode.md @@ -22,15 +22,15 @@ as one OpenCode setup. ### Zen catalog ```bash -openclaw setup --wizard --auth-choice opencode-zen -openclaw setup --wizard --opencode-zen-api-key "$OPENCODE_API_KEY" +openclaw onboard --auth-choice opencode-zen +openclaw onboard --opencode-zen-api-key "$OPENCODE_API_KEY" ``` ### Go catalog ```bash -openclaw setup --wizard --auth-choice opencode-go -openclaw setup --wizard --opencode-go-api-key "$OPENCODE_API_KEY" +openclaw onboard --auth-choice opencode-go +openclaw onboard --opencode-go-api-key "$OPENCODE_API_KEY" ``` ## Config snippet diff --git a/docs/providers/openrouter.md b/docs/providers/openrouter.md index 4da33dbb1bc..5a9023481be 100644 --- a/docs/providers/openrouter.md +++ b/docs/providers/openrouter.md @@ -14,7 +14,7 @@ endpoint and API key. It is OpenAI-compatible, so most OpenAI SDKs work by switc ## CLI setup ```bash -openclaw setup --wizard --auth-choice apiKey --token-provider openrouter --token "$OPENROUTER_API_KEY" +openclaw onboard --auth-choice apiKey --token-provider openrouter --token "$OPENROUTER_API_KEY" ``` ## Config snippet diff --git a/docs/providers/qianfan.md b/docs/providers/qianfan.md index 9784dcc64dd..1e80dafb26b 100644 --- a/docs/providers/qianfan.md +++ b/docs/providers/qianfan.md @@ -27,7 +27,7 @@ endpoint and API key. It is OpenAI-compatible, so most OpenAI SDKs work by switc ## CLI setup ```bash -openclaw setup --wizard --auth-choice qianfan-api-key +openclaw onboard --auth-choice qianfan-api-key ``` ## Related Documentation diff --git a/docs/providers/sglang.md b/docs/providers/sglang.md index 96d33d5e767..ce66950c0c3 100644 --- a/docs/providers/sglang.md +++ b/docs/providers/sglang.md @@ -33,7 +33,7 @@ export SGLANG_API_KEY="sglang-local" 3. Run onboarding and choose `SGLang`, or set a model directly: ```bash -openclaw setup --wizard +openclaw onboard ``` ```json5 diff --git a/docs/providers/synthetic.md b/docs/providers/synthetic.md index 0e662320984..ae406a0e390 100644 --- a/docs/providers/synthetic.md +++ b/docs/providers/synthetic.md @@ -17,7 +17,7 @@ Synthetic exposes Anthropic-compatible endpoints. OpenClaw registers it as the 2. Run onboarding: ```bash -openclaw setup --wizard --auth-choice synthetic-api-key +openclaw onboard --auth-choice synthetic-api-key ``` The default model is set to: diff --git a/docs/providers/together.md b/docs/providers/together.md index e93224e5da3..62bab43a204 100644 --- a/docs/providers/together.md +++ b/docs/providers/together.md @@ -18,7 +18,7 @@ The [Together AI](https://together.ai) provides access to leading open-source mo 1. Set the API key (recommended: store it for the Gateway): ```bash -openclaw setup --wizard --auth-choice together-api-key +openclaw onboard --auth-choice together-api-key ``` 2. Set a default model: @@ -36,7 +36,7 @@ openclaw setup --wizard --auth-choice together-api-key ## Non-interactive example ```bash -openclaw setup --wizard --non-interactive \ +openclaw onboard --non-interactive \ --mode local \ --auth-choice together-api-key \ --together-api-key "$TOGETHER_API_KEY" diff --git a/docs/providers/venice.md b/docs/providers/venice.md index a793239eb6f..520cf22d82b 100644 --- a/docs/providers/venice.md +++ b/docs/providers/venice.md @@ -58,7 +58,7 @@ export VENICE_API_KEY="vapi_xxxxxxxxxxxx" **Option B: Interactive Setup (Recommended)** ```bash -openclaw setup --wizard --auth-choice venice-api-key +openclaw onboard --auth-choice venice-api-key ``` This will: @@ -71,7 +71,7 @@ This will: **Option C: Non-interactive** ```bash -openclaw setup --wizard --non-interactive \ +openclaw onboard --non-interactive \ --auth-choice venice-api-key \ --venice-api-key "vapi_xxxxxxxxxxxx" ``` diff --git a/docs/providers/vercel-ai-gateway.md b/docs/providers/vercel-ai-gateway.md index 55acf7f2ba7..f76e2b51bb5 100644 --- a/docs/providers/vercel-ai-gateway.md +++ b/docs/providers/vercel-ai-gateway.md @@ -21,7 +21,7 @@ The [Vercel AI Gateway](https://vercel.com/ai-gateway) provides a unified API to 1. Set the API key (recommended: store it for the Gateway): ```bash -openclaw setup --wizard --auth-choice ai-gateway-api-key +openclaw onboard --auth-choice ai-gateway-api-key ``` 2. Set a default model: @@ -39,7 +39,7 @@ openclaw setup --wizard --auth-choice ai-gateway-api-key ## Non-interactive example ```bash -openclaw setup --wizard --non-interactive \ +openclaw onboard --non-interactive \ --mode local \ --auth-choice ai-gateway-api-key \ --ai-gateway-api-key "$AI_GATEWAY_API_KEY" diff --git a/docs/providers/xiaomi.md b/docs/providers/xiaomi.md index ec6ec043125..da1cf7fe38a 100644 --- a/docs/providers/xiaomi.md +++ b/docs/providers/xiaomi.md @@ -22,9 +22,9 @@ the `xiaomi` provider with a Xiaomi MiMo API key. ## CLI setup ```bash -openclaw setup --wizard --auth-choice xiaomi-api-key +openclaw onboard --auth-choice xiaomi-api-key # or non-interactive -openclaw setup --wizard --auth-choice xiaomi-api-key --xiaomi-api-key "$XIAOMI_API_KEY" +openclaw onboard --auth-choice xiaomi-api-key --xiaomi-api-key "$XIAOMI_API_KEY" ``` ## Config snippet diff --git a/docs/providers/zai.md b/docs/providers/zai.md index 86a0b3c6878..6f3aea27020 100644 --- a/docs/providers/zai.md +++ b/docs/providers/zai.md @@ -16,16 +16,16 @@ with a Z.AI API key. ```bash # Coding Plan Global, recommended for Coding Plan users -openclaw setup --wizard --auth-choice zai-coding-global +openclaw onboard --auth-choice zai-coding-global # Coding Plan CN (China region), recommended for Coding Plan users -openclaw setup --wizard --auth-choice zai-coding-cn +openclaw onboard --auth-choice zai-coding-cn # General API -openclaw setup --wizard --auth-choice zai-global +openclaw onboard --auth-choice zai-global # General API CN (China region) -openclaw setup --wizard --auth-choice zai-cn +openclaw onboard --auth-choice zai-cn ``` ## Config snippet diff --git a/docs/reference/wizard.md b/docs/reference/wizard.md index b52aa74086d..5bfa3da7f9f 100644 --- a/docs/reference/wizard.md +++ b/docs/reference/wizard.md @@ -2,7 +2,7 @@ summary: "Full reference for the CLI setup wizard: every step, flag, and config field" read_when: - Looking up a specific wizard step or flag - - Automating setup with non-interactive mode + - Automating onboarding with non-interactive mode - Debugging wizard behavior title: "Setup Wizard Reference" sidebarTitle: "Wizard Reference" @@ -10,7 +10,7 @@ sidebarTitle: "Wizard Reference" # Setup Wizard Reference -This is the full reference for the `openclaw setup --wizard` CLI wizard. +This is the full reference for the `openclaw onboard` CLI wizard. For a high-level overview, see [Setup Wizard](/start/wizard). ## Flow details (local mode) @@ -76,11 +76,11 @@ For a high-level overview, see [Setup Wizard](/start/wizard). - In token mode, interactive setup offers: - **Generate/store plaintext token** (default) - **Use SecretRef** (opt-in) - - Quickstart reuses existing `gateway.auth.token` SecretRefs across `env`, `file`, and `exec` providers for setup probe/dashboard bootstrap. - - If that SecretRef is configured but cannot be resolved, setup fails early with a clear fix message instead of silently degrading runtime auth. + - Quickstart reuses existing `gateway.auth.token` SecretRefs across `env`, `file`, and `exec` providers for onboarding probe/dashboard bootstrap. + - If that SecretRef is configured but cannot be resolved, onboarding fails early with a clear fix message instead of silently degrading runtime auth. - In password mode, interactive setup also supports plaintext or SecretRef storage. - Non-interactive token SecretRef path: `--gateway-token-ref-env `. - - Requires a non-empty env var in the setup process environment. + - Requires a non-empty env var in the onboarding process environment. - Cannot be combined with `--gateway-token`. - Disable auth only if you fully trust every local process. - Non‑loopback binds still require auth. @@ -137,7 +137,7 @@ If the Control UI assets are missing, the wizard attempts to build them; fallbac Use `--non-interactive` to automate or script onboarding: ```bash -openclaw setup --wizard --non-interactive \ +openclaw onboard --non-interactive \ --mode local \ --auth-choice apiKey \ --anthropic-api-key "$ANTHROPIC_API_KEY" \ @@ -154,7 +154,7 @@ Gateway token SecretRef in non-interactive mode: ```bash export OPENCLAW_GATEWAY_TOKEN="your-token" -openclaw setup --wizard --non-interactive \ +openclaw onboard --non-interactive \ --mode local \ --auth-choice skip \ --gateway-auth token \ diff --git a/docs/start/getting-started.md b/docs/start/getting-started.md index af779afbe42..3fc64e5087d 100644 --- a/docs/start/getting-started.md +++ b/docs/start/getting-started.md @@ -54,7 +54,7 @@ Check your Node version with `node --version` if you are unsure. ```bash - openclaw setup --wizard --install-daemon + openclaw onboard --install-daemon ``` The wizard configures auth, gateway settings, and optional channels. diff --git a/docs/start/onboarding-overview.md b/docs/start/onboarding-overview.md index c2147252d2b..1e94a4db64a 100644 --- a/docs/start/onboarding-overview.md +++ b/docs/start/onboarding-overview.md @@ -1,18 +1,18 @@ --- -summary: "Overview of OpenClaw setup options and flows" +summary: "Overview of OpenClaw onboarding options and flows" read_when: - - Choosing a setup path + - Choosing an onboarding path - Setting up a new environment -title: "Setup Overview" -sidebarTitle: "Setup Overview" +title: "Onboarding Overview" +sidebarTitle: "Onboarding Overview" --- -# Setup Overview +# Onboarding Overview -OpenClaw supports multiple setup paths depending on where the Gateway runs +OpenClaw supports multiple onboarding paths depending on where the Gateway runs and how you prefer to configure providers. -## Choose your setup path +## Choose your onboarding path - **CLI wizard** for macOS, Linux, and Windows (via WSL2). - **macOS app** for a guided first run on Apple silicon or Intel Macs. @@ -22,14 +22,14 @@ and how you prefer to configure providers. Run the wizard in a terminal: ```bash -openclaw setup --wizard +openclaw onboard ``` Use the CLI wizard when you want full control of the Gateway, workspace, channels, and skills. Docs: - [Setup Wizard (CLI)](/start/wizard) -- [`openclaw setup --wizard` command](/cli/setup) +- [`openclaw onboard` command](/cli/onboard) ## macOS app onboarding @@ -48,4 +48,4 @@ CLI wizard. You will be asked to: - Provide a model ID and optional alias. - Choose an Endpoint ID so multiple custom endpoints can coexist. -For detailed steps, follow the CLI setup docs above. +For detailed steps, follow the CLI onboarding docs above. diff --git a/docs/start/wizard-cli-automation.md b/docs/start/wizard-cli-automation.md index 17803cefe48..884d49e143b 100644 --- a/docs/start/wizard-cli-automation.md +++ b/docs/start/wizard-cli-automation.md @@ -1,7 +1,7 @@ --- -summary: "Scripted setup wizard and agent setup for the OpenClaw CLI" +summary: "Scripted onboarding and agent setup for the OpenClaw CLI" read_when: - - You are automating setup in scripts or CI + - You are automating onboarding in scripts or CI - You need non-interactive examples for specific providers title: "CLI Automation" sidebarTitle: "CLI automation" @@ -9,7 +9,7 @@ sidebarTitle: "CLI automation" # CLI Automation -Use `--non-interactive` to automate `openclaw setup --wizard`. +Use `--non-interactive` to automate `openclaw onboard`. `--json` does not imply non-interactive mode. Use `--non-interactive` (and `--workspace`) for scripts. @@ -18,7 +18,7 @@ Use `--non-interactive` to automate `openclaw setup --wizard`. ## Baseline non-interactive example ```bash -openclaw setup --wizard --non-interactive \ +openclaw onboard --non-interactive \ --mode local \ --auth-choice apiKey \ --anthropic-api-key "$ANTHROPIC_API_KEY" \ @@ -41,7 +41,7 @@ Passing inline key flags without the matching env var now fails fast. Example: ```bash -openclaw setup --wizard --non-interactive \ +openclaw onboard --non-interactive \ --mode local \ --auth-choice openai-api-key \ --secret-input-mode ref \ @@ -53,7 +53,7 @@ openclaw setup --wizard --non-interactive \ ```bash - openclaw setup --wizard --non-interactive \ + openclaw onboard --non-interactive \ --mode local \ --auth-choice gemini-api-key \ --gemini-api-key "$GEMINI_API_KEY" \ @@ -63,7 +63,7 @@ openclaw setup --wizard --non-interactive \ ```bash - openclaw setup --wizard --non-interactive \ + openclaw onboard --non-interactive \ --mode local \ --auth-choice zai-api-key \ --zai-api-key "$ZAI_API_KEY" \ @@ -73,7 +73,7 @@ openclaw setup --wizard --non-interactive \ ```bash - openclaw setup --wizard --non-interactive \ + openclaw onboard --non-interactive \ --mode local \ --auth-choice ai-gateway-api-key \ --ai-gateway-api-key "$AI_GATEWAY_API_KEY" \ @@ -83,7 +83,7 @@ openclaw setup --wizard --non-interactive \ ```bash - openclaw setup --wizard --non-interactive \ + openclaw onboard --non-interactive \ --mode local \ --auth-choice cloudflare-ai-gateway-api-key \ --cloudflare-ai-gateway-account-id "your-account-id" \ @@ -95,7 +95,7 @@ openclaw setup --wizard --non-interactive \ ```bash - openclaw setup --wizard --non-interactive \ + openclaw onboard --non-interactive \ --mode local \ --auth-choice moonshot-api-key \ --moonshot-api-key "$MOONSHOT_API_KEY" \ @@ -105,7 +105,7 @@ openclaw setup --wizard --non-interactive \ ```bash - openclaw setup --wizard --non-interactive \ + openclaw onboard --non-interactive \ --mode local \ --auth-choice mistral-api-key \ --mistral-api-key "$MISTRAL_API_KEY" \ @@ -115,7 +115,7 @@ openclaw setup --wizard --non-interactive \ ```bash - openclaw setup --wizard --non-interactive \ + openclaw onboard --non-interactive \ --mode local \ --auth-choice synthetic-api-key \ --synthetic-api-key "$SYNTHETIC_API_KEY" \ @@ -125,7 +125,7 @@ openclaw setup --wizard --non-interactive \ ```bash - openclaw setup --wizard --non-interactive \ + openclaw onboard --non-interactive \ --mode local \ --auth-choice opencode-zen \ --opencode-zen-api-key "$OPENCODE_API_KEY" \ @@ -136,7 +136,7 @@ openclaw setup --wizard --non-interactive \ ```bash - openclaw setup --wizard --non-interactive \ + openclaw onboard --non-interactive \ --mode local \ --auth-choice ollama \ --custom-model-id "qwen3.5:27b" \ @@ -147,7 +147,7 @@ openclaw setup --wizard --non-interactive \ ```bash - openclaw setup --wizard --non-interactive \ + openclaw onboard --non-interactive \ --mode local \ --auth-choice custom-api-key \ --custom-base-url "https://llm.example.com/v1" \ @@ -165,7 +165,7 @@ openclaw setup --wizard --non-interactive \ ```bash export CUSTOM_API_KEY="your-key" - openclaw setup --wizard --non-interactive \ + openclaw onboard --non-interactive \ --mode local \ --auth-choice custom-api-key \ --custom-base-url "https://llm.example.com/v1" \ @@ -212,4 +212,4 @@ Notes: - Onboarding hub: [Setup Wizard (CLI)](/start/wizard) - Full reference: [CLI Setup Reference](/start/wizard-cli-reference) -- Command reference: [`openclaw setup --wizard`](/cli/setup) +- Command reference: [`openclaw onboard`](/cli/onboard) diff --git a/docs/start/wizard-cli-reference.md b/docs/start/wizard-cli-reference.md index 2a2bac76528..36bd836a13f 100644 --- a/docs/start/wizard-cli-reference.md +++ b/docs/start/wizard-cli-reference.md @@ -1,15 +1,15 @@ --- summary: "Complete reference for CLI setup flow, auth/model setup, outputs, and internals" read_when: - - You need detailed behavior for `openclaw setup --wizard` - - You are debugging setup results or integrating setup clients + - You need detailed behavior for openclaw onboard + - You are debugging onboarding results or integrating onboarding clients title: "CLI Setup Reference" sidebarTitle: "CLI reference" --- # CLI Setup Reference -This page is the full reference for `openclaw setup --wizard`. +This page is the full reference for `openclaw onboard`. For the short guide, see [Setup Wizard (CLI)](/start/wizard). ## What the wizard does @@ -56,7 +56,7 @@ It does not install or modify anything on the remote host. - **Use SecretRef** (opt-in) - In password mode, interactive setup also supports plaintext or SecretRef storage. - Non-interactive token SecretRef path: `--gateway-token-ref-env `. - - Requires a non-empty env var in the setup process environment. + - Requires a non-empty env var in the onboarding process environment. - Cannot be combined with `--gateway-token`. - Disable auth only if you fully trust every local process. - Non-loopback binds still require auth. @@ -220,20 +220,20 @@ Credential and profile paths: Credential storage mode: -- Default setup behavior persists API keys as plaintext values in auth profiles. +- Default onboarding behavior persists API keys as plaintext values in auth profiles. - `--secret-input-mode ref` enables reference mode instead of plaintext key storage. In interactive setup, you can choose either: - environment variable ref (for example `keyRef: { source: "env", provider: "default", id: "OPENAI_API_KEY" }`) - configured provider ref (`file` or `exec`) with provider alias + id - Interactive reference mode runs a fast preflight validation before saving. - - Env refs: validates variable name + non-empty value in the current setup environment. + - Env refs: validates variable name + non-empty value in the current onboarding environment. - Provider refs: validates provider config and resolves the requested id. - - If preflight fails, setup shows the error and lets you retry. + - If preflight fails, onboarding shows the error and lets you retry. - In non-interactive mode, `--secret-input-mode ref` is env-backed only. - - Set the provider env var in the setup process environment. - - Inline key flags (for example `--openai-api-key`) require that env var to be set; otherwise setup fails fast. + - Set the provider env var in the onboarding process environment. + - Inline key flags (for example `--openai-api-key`) require that env var to be set; otherwise onboarding fails fast. - For custom providers, non-interactive `ref` mode stores `models.providers..apiKey` as `{ source: "env", provider: "default", id: "CUSTOM_API_KEY" }`. - - In that custom-provider case, `--custom-api-key` requires `CUSTOM_API_KEY` to be set; otherwise setup fails fast. + - In that custom-provider case, `--custom-api-key` requires `CUSTOM_API_KEY` to be set; otherwise onboarding fails fast. - Gateway auth credentials support plaintext and SecretRef choices in interactive setup: - Token mode: **Generate/store plaintext token** (default) or **Use SecretRef**. - Password mode: plaintext or SecretRef. @@ -252,9 +252,9 @@ Typical fields in `~/.openclaw/openclaw.json`: - `agents.defaults.workspace` - `agents.defaults.model` / `models.providers` (if Minimax chosen) -- `tools.profile` (local setup defaults to `"coding"` when unset; existing explicit values are preserved) +- `tools.profile` (local onboarding defaults to `"coding"` when unset; existing explicit values are preserved) - `gateway.*` (mode, bind, auth, tailscale) -- `session.dmScope` (local setup defaults this to `per-channel-peer` when unset; existing explicit values are preserved) +- `session.dmScope` (local onboarding defaults this to `per-channel-peer` when unset; existing explicit values are preserved) - `channels.telegram.botToken`, `channels.discord.token`, `channels.signal.*`, `channels.imessage.*` - Channel allowlists (Slack, Discord, Matrix, Microsoft Teams) when you opt in during prompts (names resolve to IDs when possible) - `skills.install.nodeManager` @@ -296,4 +296,4 @@ Signal setup behavior: - Onboarding hub: [Setup Wizard (CLI)](/start/wizard) - Automation and scripts: [CLI Automation](/start/wizard-cli-automation) -- Command reference: [`openclaw setup --wizard`](/cli/setup) +- Command reference: [`openclaw onboard`](/cli/onboard) diff --git a/docs/start/wizard.md b/docs/start/wizard.md index fe887ea9a4f..7bbe9df64cf 100644 --- a/docs/start/wizard.md +++ b/docs/start/wizard.md @@ -4,7 +4,7 @@ read_when: - Running or configuring the setup wizard - Setting up a new machine title: "Setup Wizard (CLI)" -sidebarTitle: "Setup: CLI" +sidebarTitle: "Onboarding: CLI" --- # Setup Wizard (CLI) @@ -15,7 +15,7 @@ It configures a local Gateway or a remote Gateway connection, plus channels, ski and workspace defaults in one guided flow. ```bash -openclaw setup --wizard +openclaw onboard ``` @@ -52,7 +52,7 @@ The wizard starts with **QuickStart** (defaults) vs **Advanced** (full control). - Gateway port **18789** - Gateway auth **Token** (auto‑generated, even on loopback) - Tool policy default for new local setups: `tools.profile: "coding"` (existing explicit profile is preserved) - - DM isolation default: local setup writes `session.dmScope: "per-channel-peer"` when unset. Details: [CLI Setup Reference](/start/wizard-cli-reference#outputs-and-internals) + - DM isolation default: local onboarding writes `session.dmScope: "per-channel-peer"` when unset. Details: [CLI Setup Reference](/start/wizard-cli-reference#outputs-and-internals) - Tailscale exposure **Off** - Telegram + WhatsApp DMs default to **allowlist** (you'll be prompted for your phone number) @@ -119,7 +119,7 @@ For the deeper technical reference, including RPC details, see ## Related docs -- CLI command reference: [`openclaw setup`](/cli/setup) -- Setup overview: [Setup Overview](/start/onboarding-overview) +- CLI command reference: [`openclaw onboard`](/cli/onboard) +- Onboarding overview: [Onboarding Overview](/start/onboarding-overview) - macOS app onboarding: [Onboarding](/start/onboarding) - Agent first-run ritual: [Agent Bootstrapping](/start/bootstrapping) diff --git a/docs/tools/plugin.md b/docs/tools/plugin.md index 560d25930d5..c14f3c39f56 100644 --- a/docs/tools/plugin.md +++ b/docs/tools/plugin.md @@ -1195,11 +1195,11 @@ A provider plugin can participate in five distinct phases: `auth[].run(ctx)` performs OAuth, API-key capture, device code, or custom setup and returns auth profiles plus optional config patches. 2. **Non-interactive setup** - `auth[].runNonInteractive(ctx)` handles `openclaw setup --wizard --non-interactive` + `auth[].runNonInteractive(ctx)` handles `openclaw onboard --non-interactive` without prompts. Use this when the provider needs custom headless setup beyond the built-in simple API-key paths. 3. **Wizard integration** - `wizard.setup` adds an entry to `openclaw setup --wizard`. + `wizard.setup` adds an entry to `openclaw onboard`. `wizard.modelPicker` adds a setup entry to the model picker. 4. **Implicit discovery** `discovery.run(ctx)` can contribute provider config automatically during @@ -1360,7 +1360,7 @@ or more auth methods (OAuth, API key, device code, etc.). Those methods can power: - `openclaw models auth login --provider [--method ]` -- `openclaw setup --wizard` +- `openclaw onboard` - model-picker “custom provider” setup entries - implicit provider discovery during model resolution/listing @@ -1435,7 +1435,7 @@ Notes: for headless onboarding. - Return `configPatch` when you need to add default models or provider config. - Return `defaultModel` so `--set-default` can update agent defaults. -- `wizard.setup` adds a provider choice to `openclaw setup --wizard`. +- `wizard.setup` adds a provider choice to `openclaw onboard`. - `wizard.modelPicker` adds a “setup this provider” entry to the model picker. - `discovery.run` returns either `{ provider }` for the plugin’s own provider id or `{ providers }` for multi-provider discovery. diff --git a/src/cli/program/register.setup.ts b/src/cli/program/register.setup.ts index 95888cb236a..33893d945bb 100644 --- a/src/cli/program/register.setup.ts +++ b/src/cli/program/register.setup.ts @@ -10,7 +10,7 @@ import { hasExplicitOptions } from "../command-options.js"; export function registerSetupCommand(program: Command) { program .command("setup") - .description("Initialize config/workspace or run the setup wizard") + .description("Initialize ~/.openclaw/openclaw.json and the agent workspace") .addHelpText( "after", () => @@ -20,8 +20,8 @@ export function registerSetupCommand(program: Command) { "--workspace ", "Agent workspace directory (default: ~/.openclaw/workspace; stored as agents.defaults.workspace)", ) - .option("--wizard", "Run the guided setup wizard", false) - .option("--non-interactive", "Run the setup wizard without prompts", false) + .option("--wizard", "Run the interactive onboarding wizard", false) + .option("--non-interactive", "Run the wizard without prompts", false) .option("--mode ", "Wizard mode: local|remote") .option("--remote-url ", "Remote Gateway WebSocket URL") .option("--remote-token ", "Remote Gateway token (optional)") diff --git a/src/commands/onboard.ts b/src/commands/onboard.ts index 8eb16fb2c07..c9af3fbf937 100644 --- a/src/commands/onboard.ts +++ b/src/commands/onboard.ts @@ -61,7 +61,7 @@ export async function setupWizardCommand( [ "Non-interactive setup requires explicit risk acknowledgement.", "Read: https://docs.openclaw.ai/security", - `Re-run with: ${formatCliCommand("openclaw setup --wizard --non-interactive --accept-risk ...")}`, + `Re-run with: ${formatCliCommand("openclaw onboard --non-interactive --accept-risk ...")}`, ].join("\n"), ); runtime.exit(1); diff --git a/src/commands/reset.ts b/src/commands/reset.ts index eca1d78e7c1..596d80a139a 100644 --- a/src/commands/reset.ts +++ b/src/commands/reset.ts @@ -134,7 +134,7 @@ export async function resetCommand(runtime: RuntimeEnv, opts: ResetOptions) { for (const dir of sessionDirs) { await removePath(dir, runtime, { dryRun, label: dir }); } - runtime.log(`Next: ${formatCliCommand("openclaw setup --wizard --install-daemon")}`); + runtime.log(`Next: ${formatCliCommand("openclaw onboard --install-daemon")}`); return; } @@ -145,7 +145,7 @@ export async function resetCommand(runtime: RuntimeEnv, opts: ResetOptions) { { dryRun }, ); await removeWorkspaceDirs(workspaceDirs, runtime, { dryRun }); - runtime.log(`Next: ${formatCliCommand("openclaw setup --wizard --install-daemon")}`); + runtime.log(`Next: ${formatCliCommand("openclaw onboard --install-daemon")}`); return; } }