fix(auto-reply): pass model catalog to think menus

This commit is contained in:
Peter Steinberger
2026-04-28 19:37:02 +01:00
parent f351961173
commit 065284deab
5 changed files with 100 additions and 7 deletions

View File

@@ -30,6 +30,7 @@ Docs: https://docs.openclaw.ai
- Memory-core/dreaming: retry managed dreaming cron registration after startup when the cron service is not reachable yet, so the scheduled Memory Dreaming Promotion sweep recovers without waiting for heartbeat traffic. Fixes #72841. Thanks @amknight.
- Acpx/runtime: validate the runtime session mode at the `AcpxRuntime.ensureSession` wrapper boundary so callers that pass anything other than `persistent` or `oneshot` get a clear `ACP_INVALID_RUNTIME_OPTION` error instead of silently round-tripping through the encoded handle as a default `persistent` mode and later throwing `SessionResumeRequiredError`. Investigation context: #73071. (#73548) Thanks @amknight.
- CLI/infer: keep web-search fallback on missing provider API keys, preserve structured validation errors from the selected provider, and let per-request image describe prompts override configured media-entry prompts. (#63263) Thanks @Spolen23.
- Chat commands: include configured model-catalog reasoning metadata when building `/think` argument menus so Ollama Cloud and other provider-owned reasoning models show supported levels instead of only `off`. Fixes #73515; supersedes #73568. Thanks @danielzinhu99 and @neeravmakwana.
- CLI/model probes: add repeatable image `--file` inputs to `infer model run` for local and gateway multimodal model smokes, so vision models such as Ollama Qwen VL and Gemini can be tested through the raw model-probe surface. Fixes #63700. Thanks @cedricjanssens.
- CLI/image describe: pass `--prompt` and `--timeout-ms` through `infer image describe` and `describe-many`, so custom vision instructions and slow local model budgets reach media-understanding providers such as Ollama, OpenAI, Google, and OpenRouter. Refs #63700. Thanks @cedricjanssens.
- WhatsApp/Web: pass explicit Baileys socket timings into every WhatsApp Web socket and expose `web.whatsapp.*` keepalive, connect, and query timeout settings so unstable networks can avoid repeated 408 disconnect and opening-handshake timeout loops. Fixes #56365. (#73580) Thanks @velvet-shark.

View File

@@ -714,7 +714,7 @@ export function buildBuiltinChatCommands(): ChatCommandDefinition[] {
name: "level",
description: "Thinking level",
type: "string",
choices: ({ provider, model }) => listThinkingLevels(provider, model),
choices: ({ provider, model, catalog }) => listThinkingLevels(provider, model, catalog),
},
],
argsMenu: "auto",

View File

@@ -61,6 +61,27 @@ function installSlackNativeCommandOverrides() {
});
}
function installOllamaThinkingProvider() {
const registry = createTestRegistry();
registry.providers.push({
pluginId: "ollama",
source: "test",
provider: {
id: "ollama",
label: "Ollama",
auth: [],
resolveThinkingProfile: ({ reasoning }: { reasoning?: boolean }) => ({
levels:
reasoning === true
? [{ id: "off" }, { id: "low" }, { id: "medium" }, { id: "high" }, { id: "max" }]
: [{ id: "off" }],
defaultLevel: "off",
}),
} as never,
});
setActivePluginRegistry(registry);
}
beforeEach(() => {
vi.doUnmock("../channels/plugins/index.js");
setActivePluginRegistry(createTestRegistry([]));
@@ -455,6 +476,7 @@ describe("commands registry args", () => {
let seen: {
provider?: string;
model?: string;
catalogLength?: number;
commandKey: string;
argName: string;
} | null = null;
@@ -471,8 +493,14 @@ describe("commands registry args", () => {
name: "level",
description: "level",
type: "string",
choices: ({ provider, model, command, arg }) => {
seen = { provider, model, commandKey: command.key, argName: arg.name };
choices: ({ provider, model, catalog, command, arg }) => {
seen = {
provider,
model,
catalogLength: catalog?.length,
commandKey: command.key,
argName: arg.name,
};
return ["low", "high"];
},
},
@@ -491,6 +519,7 @@ describe("commands registry args", () => {
const seenChoice = seen as {
provider?: string;
model?: string;
catalogLength?: number;
commandKey: string;
argName: string;
} | null;
@@ -498,6 +527,44 @@ describe("commands registry args", () => {
expect(seenChoice?.argName).toBe("level");
expect(seenChoice?.provider).toBeTruthy();
expect(seenChoice?.model).toBeTruthy();
expect(seenChoice?.catalogLength).toBe(0);
});
it("uses configured model catalog reasoning for /think arg menus", () => {
installOllamaThinkingProvider();
const command = findCommandByNativeName("think");
expect(command).toBeTruthy();
if (!command) {
return;
}
const menu = resolveCommandArgMenu({
command,
args: undefined,
cfg: {
models: {
providers: {
ollama: {
models: [{ id: "glm-5.1:cloud", name: "GLM 5.1 Cloud", reasoning: true }],
},
},
},
} as never,
provider: "ollama",
model: "glm-5.1:cloud",
});
expect(menu?.arg.name).toBe("level");
expect(menu?.choices.map((choice) => choice.value)).toEqual([
"off",
"low",
"medium",
"high",
"max",
]);
expect(formatCommandArgMenuTitle({ command, menu: menu! })).toBe(
"Choose level for /think.\nOptions: off, low, medium, high, max.",
);
});
it("does not show menus when args were provided as raw text only", () => {

View File

@@ -1,5 +1,8 @@
import { DEFAULT_MODEL, DEFAULT_PROVIDER } from "../agents/defaults.js";
import { resolveConfiguredModelRef } from "../agents/model-selection.js";
import {
buildConfiguredModelCatalog,
resolveConfiguredModelRef,
} from "../agents/model-selection.js";
import type { SkillCommandSpec } from "../agents/skills.js";
import { getChannelPlugin, getLoadedChannelPlugin } from "../channels/plugins/index.js";
import type { OpenClawConfig } from "../config/types.js";
@@ -26,6 +29,7 @@ import type {
NativeCommandSpec,
ShouldHandleTextCommandsParams,
} from "./commands-registry.types.js";
import type { ThinkingCatalogEntry } from "./thinking.shared.js";
export {
isCommandEnabled,
@@ -257,6 +261,7 @@ export function resolveCommandArgChoices(params: {
cfg?: OpenClawConfig;
provider?: string;
model?: string;
catalog?: ThinkingCatalogEntry[];
}): ResolvedCommandArgChoice[] {
const { command, arg, cfg } = params;
if (!arg.choices) {
@@ -271,6 +276,7 @@ export function resolveCommandArgChoices(params: {
cfg,
provider: params.provider ?? defaults.provider,
model: params.model ?? defaults.model,
catalog: params.catalog ?? (cfg ? buildConfiguredModelCatalog({ cfg }) : undefined),
command,
arg,
};
@@ -287,19 +293,29 @@ export function resolveCommandArgMenu(params: {
cfg?: OpenClawConfig;
provider?: string;
model?: string;
catalog?: ThinkingCatalogEntry[];
}): { arg: CommandArgDefinition; choices: ResolvedCommandArgChoice[]; title?: string } | null {
const { command, args, cfg, provider, model } = params;
const { command, args, cfg, provider, model, catalog } = params;
if (!command.args || !command.argsMenu) {
return null;
}
if (command.argsParsing === "none") {
return null;
}
const resolvedCatalog = catalog ?? (cfg ? buildConfiguredModelCatalog({ cfg }) : undefined);
const argSpec = command.argsMenu;
const argName =
argSpec === "auto"
? command.args.find(
(arg) => resolveCommandArgChoices({ command, arg, cfg, provider, model }).length > 0,
(arg) =>
resolveCommandArgChoices({
command,
arg,
cfg,
provider,
model,
catalog: resolvedCatalog,
}).length > 0,
)?.name
: argSpec.arg;
if (!argName) {
@@ -315,7 +331,14 @@ export function resolveCommandArgMenu(params: {
if (!arg) {
return null;
}
const choices = resolveCommandArgChoices({ command, arg, cfg, provider, model });
const choices = resolveCommandArgChoices({
command,
arg,
cfg,
provider,
model,
catalog: resolvedCatalog,
});
if (choices.length === 0) {
return null;
}

View File

@@ -1,5 +1,6 @@
import type { OpenClawConfig } from "../config/types.js";
import type { CommandArgValues } from "./commands-args.types.js";
import type { ThinkingCatalogEntry } from "./thinking.shared.js";
export type { CommandArgValue, CommandArgValues, CommandArgs } from "./commands-args.types.js";
@@ -28,6 +29,7 @@ export type CommandArgChoiceContext = {
cfg?: OpenClawConfig;
provider?: string;
model?: string;
catalog?: ThinkingCatalogEntry[];
command: ChatCommandDefinition;
arg: CommandArgDefinition;
};