fix: show fast mode in status

This commit is contained in:
Peter Steinberger
2026-04-23 08:03:34 +01:00
parent 2d7a4edba3
commit 8714badc0c
8 changed files with 76 additions and 2 deletions

View File

@@ -39,6 +39,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Status: show `Fast` in `/status` when fast mode is enabled, including config/default-derived fast mode, and omit it when disabled.
- Models/auth: merge provider-owned default-model additions from `openclaw models auth login` instead of replacing `agents.defaults.models`, so re-authenticating an OAuth provider such as OpenAI Codex no longer wipes other providers' aliases and per-model params. Migrations that must rename keys (Anthropic -> Claude CLI) opt in with `replaceDefaultModels`. Fixes #69414. (#70435) Thanks @neeravmakwana.
- Media understanding/audio: prefer configured or key-backed STT providers before auto-detected local Whisper CLIs, so installed local transcription tools no longer shadow API providers such as Groq/OpenAI in `tools.media.audio` auto mode. Fixes #68727.
- Providers/OpenAI: lock the auth picker wording for OpenAI API key, Codex browser login, and Codex device pairing so the setup choices no longer imply a mixed Codex/API-key auth path. (#67848) Thanks @tmlxrd.

View File

@@ -68,6 +68,7 @@ title: "Thinking Levels"
- For direct public `anthropic/*` requests, including OAuth-authenticated traffic sent to `api.anthropic.com`, fast mode maps to Anthropic service tiers: `/fast on` sets `service_tier=auto`, `/fast off` sets `service_tier=standard_only`.
- For `minimax/*` on the Anthropic-compatible path, `/fast on` (or `params.fastMode: true`) rewrites `MiniMax-M2.7` to `MiniMax-M2.7-highspeed`.
- Explicit Anthropic `serviceTier` / `service_tier` model params override the fast-mode default when both are set. OpenClaw still skips Anthropic service-tier injection for non-Anthropic proxy base URLs.
- `/status` shows `Fast` only when fast mode is enabled.
## Verbose directives (/verbose or /v)

View File

@@ -268,6 +268,23 @@ describe("info command handlers", () => {
);
});
it("forwards resolved fast mode to /status", async () => {
const params = buildInfoParams("/status", {
commands: { text: true },
channels: { whatsapp: { allowFrom: ["*"] } },
} as OpenClawConfig);
params.resolvedFastMode = true;
const statusResult = await handleStatusCommand(params, true);
expect(statusResult?.shouldContinue).toBe(false);
expect(vi.mocked(buildStatusReply)).toHaveBeenCalledWith(
expect.objectContaining({
resolvedFastMode: true,
}),
);
});
it("uses the canonical target session agent when listing /commands", async () => {
const { handleCommandsListCommand } = await import("./commands-info.js");
const params = buildInfoParams("/commands", {

View File

@@ -204,6 +204,7 @@ export const handleStatusCommand: CommandHandler = async (params, allowTextComma
model: params.model,
contextTokens: params.contextTokens,
resolvedThinkLevel: params.resolvedThinkLevel,
resolvedFastMode: params.resolvedFastMode,
resolvedVerboseLevel: params.resolvedVerboseLevel,
resolvedReasoningLevel: params.resolvedReasoningLevel,
resolvedElevatedLevel: params.resolvedElevatedLevel,

View File

@@ -53,6 +53,7 @@ export type HandleCommandsParams = {
opts?: GetReplyOptions;
defaultGroupActivation: () => "always" | "mention";
resolvedThinkLevel?: ThinkLevel;
resolvedFastMode?: boolean;
resolvedVerboseLevel: VerboseLevel;
resolvedReasoningLevel: ReasoningLevel;
resolvedElevatedLevel?: ElevatedLevel;

View File

@@ -276,7 +276,24 @@ describe("buildStatusMessage", () => {
queue: { mode: "collect", depth: 0 },
});
expect(normalizeTestText(text)).toContain("Fast: on");
expect(normalizeTestText(text)).toContain("Fast");
});
it("hides fast mode when disabled", () => {
const text = buildStatusMessage({
agent: {
model: "anthropic/claude-opus-4-6",
},
sessionEntry: {
sessionId: "fast-off",
updatedAt: 0,
fastMode: false,
},
sessionKey: "agent:main:main",
queue: { mode: "collect", depth: 0 },
});
expect(normalizeTestText(text)).not.toContain("Fast");
});
it("shows configured text verbosity for the active model", () => {

View File

@@ -0,0 +1,29 @@
import { describe, expect, it } from "vitest";
import { normalizeTestText } from "../../test/helpers/normalize-text.js";
import { buildStatusMessage } from "./status-message.js";
const buildFastStatus = (model: string, fastMode: boolean) =>
normalizeTestText(
buildStatusMessage({
modelAuth: "api-key",
activeModelAuth: "api-key",
agent: { model },
sessionEntry: {
sessionId: "fast-status",
updatedAt: 0,
fastMode,
},
sessionKey: "agent:main:main",
queue: { mode: "collect", depth: 0 },
}),
);
describe("buildStatusMessage fast mode labels", () => {
it("shows fast mode when enabled", () => {
expect(buildFastStatus("openai/gpt-5.4", true)).toContain("Fast");
});
it("hides fast mode when disabled", () => {
expect(buildFastStatus("anthropic/claude-opus-4-6", false)).not.toContain("Fast");
});
});

View File

@@ -237,6 +237,13 @@ const formatQueueDetails = (queue?: QueueStatus) => {
return detailParts.length ? ` (${detailParts.join(" · ")})` : "";
};
const formatFastModeLabel = (enabled: boolean) => {
if (!enabled) {
return null;
}
return "Fast";
};
const readUsageFromSessionLog = (
sessionId?: string,
sessionEntry?: SessionEntry,
@@ -705,7 +712,7 @@ export function buildStatusMessage(args: StatusArgs): string {
const optionParts = [
`Runtime: ${runtime.label}`,
`Think: ${thinkLevel}`,
fastMode ? "Fast: on" : null,
formatFastModeLabel(fastMode),
textVerbosity ? `Text: ${textVerbosity}` : null,
verboseLabel,
traceLabel,