Default reasoning to on when model has reasoning: true (fix #22456) (#22513)

* Default reasoning to on when model has reasoning: true (fix #22456)

What: When a model is configured with reasoning: true in openclaw.json (e.g. OpenRouter x-ai/grok-4.1-fast), the session now defaults reasoningLevel to on if the user has not set it via /reasoning or session store.

Why: Users expected setting reasoning: true on the model to enable reasoning; previously only session/directive reasoningLevel was used and it always defaulted to off, so Think stayed off despite the model config.

* Chore: sync formatted files from main for CI

* Changelog: note zwffff/main OpenRouter fix

* Changelog: fix OpenRouter entry text

* Update msteams.md

* Update msteams.md

* Update msteams.md

---------

Co-authored-by: 曾文锋0668000834 <zeng.wenfeng@xydigit.com>
Co-authored-by: Vincent Koc <vincentkoc@ieee.org>
This commit is contained in:
zwffff
2026-02-23 01:19:36 +08:00
committed by GitHub
parent 9ae08ce205
commit c543994e90
5 changed files with 74 additions and 1 deletions

View File

@@ -27,6 +27,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Providers/OpenRouter: default reasoning to enabled when the selected model advertises `reasoning: true` and no session/directive override is set. (#22513) Thanks @zwffff.
- Providers/OpenRouter: map `/think` levels to `reasoning.effort` in embedded runs while preserving explicit `reasoning.max_tokens` payloads. (#17236) Thanks @robbyczgw-cla.
- Gateway/OpenRouter: preserve stored session provider when model IDs are vendor-prefixed (for example, `anthropic/...`) so follow-up turns do not incorrectly route to direct provider APIs. (#22753) Thanks @dndodson.
- Providers/OpenRouter: preserve the required `openrouter/` prefix for OpenRouter-native model IDs during model-ref normalization. (#12942) Thanks @omair445.

View File

@@ -529,6 +529,21 @@ export function resolveThinkingDefault(params: {
return "off";
}
/** Default reasoning level when session/directive do not set it: "on" if model supports reasoning, else "off". */
export function resolveReasoningDefault(params: {
provider: string;
model: string;
catalog?: ModelCatalogEntry[];
}): "on" | "off" {
const key = modelKey(params.provider, params.model);
const candidate = params.catalog?.find(
(entry) =>
(entry.provider === params.provider && entry.id === params.model) ||
(entry.provider === key && entry.id === params.model),
);
return candidate?.reasoning === true ? "on" : "off";
}
/**
* Resolve the model configured for Gmail hook processing.
* Returns null if hooks.gmail.model is not set.

View File

@@ -345,7 +345,7 @@ export async function resolveReplyDirectives(params: {
directives.verboseLevel ??
(sessionEntry?.verboseLevel as VerboseLevel | undefined) ??
(agentCfg?.verboseDefault as VerboseLevel | undefined);
const resolvedReasoningLevel: ReasoningLevel =
let resolvedReasoningLevel: ReasoningLevel =
directives.reasoningLevel ??
(sessionEntry?.reasoningLevel as ReasoningLevel | undefined) ??
"off";
@@ -389,6 +389,14 @@ export async function resolveReplyDirectives(params: {
provider = modelState.provider;
model = modelState.model;
// When neither directive nor session set reasoning, default to model capability (e.g. OpenRouter with reasoning: true).
const reasoningExplicitlySet =
directives.reasoningLevel !== undefined ||
(sessionEntry?.reasoningLevel !== undefined && sessionEntry?.reasoningLevel !== null);
if (!reasoningExplicitlySet && resolvedReasoningLevel === "off") {
resolvedReasoningLevel = await modelState.resolveDefaultReasoningLevel();
}
let contextTokens = resolveContextTokens({
agentCfg,
model,

View File

@@ -264,3 +264,35 @@ describe("createModelSelectionState respects session model override", () => {
expect(state.model).toBe("deepseek-v3-4bit-mlx");
});
});
describe("createModelSelectionState resolveDefaultReasoningLevel", () => {
it("returns on when catalog model has reasoning true", async () => {
const { loadModelCatalog } = await import("../../agents/model-catalog.js");
vi.mocked(loadModelCatalog).mockResolvedValueOnce([
{ provider: "openrouter", id: "x-ai/grok-4.1-fast", name: "Grok", reasoning: true },
]);
const state = await createModelSelectionState({
cfg: {} as OpenClawConfig,
agentCfg: undefined,
defaultProvider: "openrouter",
defaultModel: "x-ai/grok-4.1-fast",
provider: "openrouter",
model: "x-ai/grok-4.1-fast",
hasModelDirective: false,
});
await expect(state.resolveDefaultReasoningLevel()).resolves.toBe("on");
});
it("returns off when catalog model has no reasoning", async () => {
const state = await createModelSelectionState({
cfg: {} as OpenClawConfig,
agentCfg: undefined,
defaultProvider: "openai",
defaultModel: "gpt-4o-mini",
provider: "openai",
model: "gpt-4o-mini",
hasModelDirective: false,
});
await expect(state.resolveDefaultReasoningLevel()).resolves.toBe("off");
});
});

View File

@@ -8,6 +8,7 @@ import {
modelKey,
normalizeProviderId,
resolveModelRefFromString,
resolveReasoningDefault,
resolveThinkingDefault,
} from "../../agents/model-selection.js";
import type { OpenClawConfig } from "../../config/config.js";
@@ -32,6 +33,8 @@ type ModelSelectionState = {
allowedModelCatalog: ModelCatalog;
resetModelOverride: boolean;
resolveDefaultThinkingLevel: () => Promise<ThinkLevel>;
/** Default reasoning level from model capability: "on" if model has reasoning, else "off". */
resolveDefaultReasoningLevel: () => Promise<"on" | "off">;
needsModelCatalog: boolean;
};
@@ -397,6 +400,19 @@ export async function createModelSelectionState(params: {
return defaultThinkingLevel;
};
const resolveDefaultReasoningLevel = async (): Promise<"on" | "off"> => {
let catalogForReasoning = modelCatalog ?? allowedModelCatalog;
if (!catalogForReasoning || catalogForReasoning.length === 0) {
modelCatalog = await loadModelCatalog({ config: cfg });
catalogForReasoning = modelCatalog;
}
return resolveReasoningDefault({
provider,
model,
catalog: catalogForReasoning,
});
};
return {
provider,
model,
@@ -404,6 +420,7 @@ export async function createModelSelectionState(params: {
allowedModelCatalog,
resetModelOverride,
resolveDefaultThinkingLevel,
resolveDefaultReasoningLevel,
needsModelCatalog,
};
}