fix: expose dynamic thinking options to UI

This commit is contained in:
Peter Steinberger
2026-04-25 00:33:11 +01:00
parent 5dab0dae56
commit b3db7c6987
15 changed files with 480 additions and 78 deletions

View File

@@ -11,6 +11,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Discord/replies: run `message_sending` plugin hooks for Discord reply delivery, including DM targets, so plugins can transform or cancel outbound Discord replies consistently with other channels. Fixes #59350. (#71094) Thanks @wei840222.
- Control UI/commands: carry provider-owned thinking option ids/labels in session rows and defaults so fresh sessions show and accept dynamic modes such as `adaptive`, `xhigh`, and `max`. Fixes #71269. Thanks @Young-Khalil.
- Providers/GitHub Copilot: keep the plugin stream wrapper from claiming transport selection before OpenClaw picks a boundary-aware stream path, avoiding Pi's stale fallback Copilot headers on normal model turns. Thanks @steipete.
- Discord/subagents: pass runtime config into thread-bound native subagent binding and require it at the helper boundary so Discord channel resolution keeps account-aware config. Fixes #71054. (#70945) Thanks @jai.
- Slack/Assistant: accept Slack Assistant DM `message_changed` events when their metadata identifies the human sender, while continuing to drop self-authored bot edits. Fixes #55445. Thanks @AlfredPros.

View File

@@ -112,7 +112,7 @@ title: "Thinking levels"
- The web chat thinking selector mirrors the session's stored level from the inbound session store/config when the page loads.
- Picking another level writes the session override immediately via `sessions.patch`; it does not wait for the next send and it is not a one-shot `thinkingOnce` override.
- The first option is always `Default (<resolved level>)`, where the resolved default comes from the active session model's provider thinking profile plus the same fallback logic that `/status` and `session_status` use.
- The picker uses `thinkingOptions` returned by the gateway session row. The browser UI does not keep its own provider regex list; plugins own model-specific level sets.
- The picker uses `thinkingLevels` returned by the gateway session row/defaults, with `thinkingOptions` kept as a legacy label list. The browser UI does not keep its own provider regex list; plugins own model-specific level sets.
- `/think:<level>` still works and updates the same stored session level, so chat directives and the picker stay in sync.
## Provider profiles
@@ -120,4 +120,4 @@ title: "Thinking levels"
- Provider plugins can expose `resolveThinkingProfile(ctx)` to define the model's supported levels and default.
- Each profile level has a stored canonical `id` (`off`, `minimal`, `low`, `medium`, `high`, `xhigh`, `adaptive`, or `max`) and may include a display `label`. Binary providers use `{ id: "low", label: "on" }`.
- Published legacy hooks (`supportsXHighThinking`, `isBinaryThinking`, and `resolveDefaultThinkingLevel`) remain as compatibility adapters, but new custom level sets should use `resolveThinkingProfile`.
- Gateway rows expose `thinkingOptions` and `thinkingDefault` so ACP/chat clients render the same profile that runtime validation uses.
- Gateway rows/defaults expose `thinkingLevels`, `thinkingOptions`, and `thinkingDefault` so ACP/chat clients render the same profile ids and labels that runtime validation uses.

View File

@@ -8,6 +8,7 @@ const providerRuntimeMocks = vi.hoisted(() => ({
}));
let listThinkingLevelLabels: typeof import("./thinking.js").listThinkingLevelLabels;
let listThinkingLevelOptions: typeof import("./thinking.js").listThinkingLevelOptions;
let listThinkingLevels: typeof import("./thinking.js").listThinkingLevels;
let normalizeReasoningLevel: typeof import("./thinking.js").normalizeReasoningLevel;
let normalizeThinkLevel: typeof import("./thinking.js").normalizeThinkLevel;
@@ -37,6 +38,7 @@ beforeEach(async () => {
({
listThinkingLevelLabels,
listThinkingLevelOptions,
listThinkingLevels,
normalizeReasoningLevel,
normalizeThinkLevel,
@@ -144,6 +146,19 @@ describe("listThinkingLevels", () => {
expect(listThinkingLevels("anthropic", "claude-opus-4-7")).toContain("max");
});
it("preserves provider profile ids and labels", () => {
providerRuntimeMocks.resolveProviderThinkingProfile.mockReturnValue({
levels: [{ id: "off" }, { id: "adaptive", label: "auto" }, { id: "max", label: "maximum" }],
defaultLevel: "adaptive",
});
expect(listThinkingLevelOptions("demo", "demo-model")).toEqual([
{ id: "off", label: "off" },
{ id: "adaptive", label: "auto" },
{ id: "max", label: "maximum" },
]);
});
it("uses provider thinking profiles ahead of legacy hooks", () => {
providerRuntimeMocks.resolveProviderThinkingProfile.mockReturnValue({
levels: [{ id: "off" }, { id: "low", label: "on" }],

View File

@@ -42,14 +42,17 @@ import {
normalizeOptionalString,
} from "../shared/string-coerce.js";
type ThinkingLevelOption = {
export type ThinkingLevelOption = {
id: ThinkLevel;
label: string;
};
type RankedThinkingLevelOption = ThinkingLevelOption & {
rank: number;
};
type ResolvedThinkingProfile = {
levels: ThinkingLevelOption[];
levels: RankedThinkingLevelOption[];
defaultLevel?: ThinkLevel | null;
};
@@ -70,7 +73,7 @@ function resolveThinkingPolicyContext(params: {
function normalizeProfileLevel(
level: ProviderThinkingProfile["levels"][number],
): ThinkingLevelOption | undefined {
): RankedThinkingLevelOption | undefined {
const normalized = normalizeThinkLevel(level.id);
if (!normalized) {
return undefined;
@@ -83,7 +86,7 @@ function normalizeProfileLevel(
}
function normalizeThinkingProfile(profile: ProviderThinkingProfile): ResolvedThinkingProfile {
const byId = new Map<ThinkLevel, ThinkingLevelOption>();
const byId = new Map<ThinkLevel, RankedThinkingLevelOption>();
for (const raw of profile.levels) {
const level = normalizeProfileLevel(raw);
if (level) {
@@ -204,9 +207,16 @@ export function listThinkingLevels(provider?: string | null, model?: string | nu
return profile.levels.map((level) => level.id);
}
export function listThinkingLevelLabels(provider?: string | null, model?: string | null): string[] {
export function listThinkingLevelOptions(
provider?: string | null,
model?: string | null,
): ThinkingLevelOption[] {
const profile = resolveThinkingProfile({ provider, model });
return profile.levels.map((level) => level.label);
return profile.levels.map(({ id, label }) => ({ id, label }));
}
export function listThinkingLevelLabels(provider?: string | null, model?: string | null): string[] {
return listThinkingLevelOptions(provider, model).map((level) => level.label);
}
export function formatThinkingLevels(

View File

@@ -5,11 +5,14 @@ import { afterEach, describe, expect, test } from "vitest";
import { resetConfigRuntimeState, setRuntimeConfigSnapshot } from "../config/config.js";
import type { OpenClawConfig } from "../config/config.js";
import type { SessionEntry } from "../config/sessions.js";
import { createEmptyPluginRegistry } from "../plugins/registry-empty.js";
import { resetPluginRuntimeStateForTest, setActivePluginRegistry } from "../plugins/runtime.js";
import { withStateDirEnv } from "../test-helpers/state-dir-env.js";
import {
capArrayByJsonBytes,
classifySessionKey,
deriveSessionTitle,
getSessionDefaults,
listAgentsForGateway,
listSessionsFromStore,
loadSessionEntry,
@@ -67,6 +70,7 @@ function createModelDefaultsConfig(params: {
describe("gateway session utils", () => {
afterEach(() => {
resetConfigRuntimeState();
resetPluginRuntimeStateForTest();
});
test("capArrayByJsonBytes trims from the front", () => {
@@ -88,6 +92,53 @@ describe("gateway session utils", () => {
expect(parseGroupKey("foo:bar")).toBeNull();
});
test("session defaults include provider-owned thinking options", () => {
const registry = createEmptyPluginRegistry();
registry.providers.push({
pluginId: "test",
source: "test",
provider: {
id: "openai-codex",
label: "OpenAI Codex",
auth: [],
resolveThinkingProfile: ({ modelId }) => ({
levels: [
{ id: "off" },
{ id: "minimal" },
{ id: "low" },
{ id: "medium" },
{ id: "adaptive" },
{ id: "high" },
...(modelId === "gpt-5.5" ? [{ id: "xhigh" as const }] : []),
{ id: "max", label: "maximum" },
],
defaultLevel: "adaptive",
}),
},
});
setActivePluginRegistry(registry);
const defaults = getSessionDefaults(
createModelDefaultsConfig({ primary: "openai-codex/gpt-5.5" }),
);
expect(defaults).toMatchObject({
modelProvider: "openai-codex",
model: "gpt-5.5",
thinkingDefault: "adaptive",
});
expect(defaults.thinkingLevels).toEqual(
expect.arrayContaining([
{ id: "adaptive", label: "adaptive" },
{ id: "xhigh", label: "xhigh" },
{ id: "max", label: "maximum" },
]),
);
expect(defaults.thinkingOptions).toEqual(
expect.arrayContaining(["adaptive", "xhigh", "maximum"]),
);
});
test("classifySessionKey respects chat type + prefixes", () => {
expect(classifySessionKey("global")).toBe("global");
expect(classifySessionKey("unknown")).toBe("unknown");

View File

@@ -25,7 +25,10 @@ import {
listSubagentRunsForController,
resolveSubagentSessionStatus,
} from "../agents/subagent-registry-read.js";
import { listThinkingLevelLabels, resolveThinkingDefaultForModel } from "../auto-reply/thinking.js";
import {
listThinkingLevelOptions,
resolveThinkingDefaultForModel,
} from "../auto-reply/thinking.js";
import { loadConfig } from "../config/config.js";
import { resolveAgentModelFallbackValues } from "../config/model-input.js";
import { resolveStateDir } from "../config/paths.js";
@@ -1072,10 +1075,17 @@ export function getSessionDefaults(cfg: OpenClawConfig): GatewaySessionsDefaults
cfg.agents?.defaults?.contextTokens ??
lookupContextTokens(resolved.model, { allowAsyncLoad: false }) ??
DEFAULT_CONTEXT_TOKENS;
const thinkingLevels = listThinkingLevelOptions(resolved.provider, resolved.model);
return {
modelProvider: resolved.provider ?? null,
model: resolved.model ?? null,
contextTokens: contextTokens ?? null,
thinkingLevels,
thinkingOptions: thinkingLevels.map((level) => level.label),
thinkingDefault: resolveThinkingDefaultForModel({
provider: resolved.provider,
model: resolved.model,
}),
};
}
@@ -1377,6 +1387,7 @@ export function buildGatewaySessionRow(params: {
const rowModel = selectedModel?.model ?? model;
const thinkingProvider = rowModelProvider ?? DEFAULT_PROVIDER;
const thinkingModel = rowModel ?? DEFAULT_MODEL;
const thinkingLevels = listThinkingLevelOptions(thinkingProvider, thinkingModel);
return {
key,
@@ -1402,7 +1413,8 @@ export function buildGatewaySessionRow(params: {
systemSent: entry?.systemSent,
abortedLastRun: entry?.abortedLastRun,
thinkingLevel: entry?.thinkingLevel,
thinkingOptions: listThinkingLevelLabels(thinkingProvider, thinkingModel),
thinkingLevels,
thinkingOptions: thinkingLevels.map((level) => level.label),
thinkingDefault: resolveThinkingDefaultForModel({
provider: thinkingProvider,
model: thinkingModel,

View File

@@ -11,6 +11,14 @@ export type GatewaySessionsDefaults = {
modelProvider: string | null;
model: string | null;
contextTokens: number | null;
thinkingLevels?: GatewayThinkingLevelOption[];
thinkingOptions?: string[];
thinkingDefault?: string;
};
export type GatewayThinkingLevelOption = {
id: string;
label: string;
};
export type SessionRunStatus = "running" | "done" | "failed" | "killed" | "timeout";
@@ -39,6 +47,7 @@ export type GatewaySessionRow = {
systemSent?: boolean;
abortedLastRun?: boolean;
thinkingLevel?: string;
thinkingLevels?: GatewayThinkingLevelOption[];
thinkingOptions?: string[];
thinkingDefault?: string;
fastMode?: boolean;

View File

@@ -60,6 +60,9 @@ export function createSessionsListResult(
modelProvider?: string | null;
defaultsModel?: string | null;
defaultsProvider?: string | null;
defaultsThinkingLevels?: SessionsListResult["defaults"]["thinkingLevels"];
defaultsThinkingOptions?: string[];
defaultsThinkingDefault?: string;
omitSessionFromList?: boolean;
} = {},
): SessionsListResult {
@@ -68,6 +71,9 @@ export function createSessionsListResult(
modelProvider = model ? "openai" : null,
defaultsModel = "gpt-5",
defaultsProvider = defaultsModel ? "openai" : null,
defaultsThinkingLevels,
defaultsThinkingOptions,
defaultsThinkingDefault,
omitSessionFromList = false,
} = params;
@@ -79,6 +85,9 @@ export function createSessionsListResult(
modelProvider: defaultsProvider,
model: defaultsModel,
contextTokens: null,
...(defaultsThinkingLevels ? { thinkingLevels: defaultsThinkingLevels } : {}),
...(defaultsThinkingOptions ? { thinkingOptions: defaultsThinkingOptions } : {}),
...(defaultsThinkingDefault ? { thinkingDefault: defaultsThinkingDefault } : {}),
},
sessions: omitSessionFromList
? []

View File

@@ -273,4 +273,35 @@ describe("chat session controls", () => {
expect(rerendered?.value).toBe("openai/gpt-5-mini");
vi.unstubAllGlobals();
});
it("uses default thinking options when the active session is absent", () => {
const { state } = createChatHeaderState({ omitSessionFromList: true });
state.sessionsResult = createSessionsListResult({
defaultsModel: "gpt-5.5",
defaultsProvider: "openai-codex",
defaultsThinkingLevels: [
{ id: "off", label: "off" },
{ id: "adaptive", label: "adaptive" },
{ id: "xhigh", label: "xhigh" },
{ id: "max", label: "maximum" },
],
omitSessionFromList: true,
});
const container = document.createElement("div");
render(renderChatSessionSelect(state), container);
const thinkingSelect = container.querySelector<HTMLSelectElement>(
'select[data-chat-thinking-select="true"]',
);
const options = [...(thinkingSelect?.options ?? [])].map((option) => option.value);
expect(options).toContain("adaptive");
expect(options).toContain("xhigh");
expect(options).toContain("max");
expect(
[...(thinkingSelect?.options ?? [])]
.find((option) => option.value === "max")
?.textContent?.trim(),
).toBe("maximum");
});
});

View File

@@ -16,7 +16,7 @@ import {
normalizeThinkLevel,
resolveThinkingDefaultForModel,
} from "../thinking.ts";
import type { SessionsListResult } from "../types.ts";
import type { GatewayThinkingLevelOption, SessionsListResult } from "../types.ts";
type ChatSessionSwitchHandler = (state: AppViewState, nextSessionKey: string) => void;
@@ -139,7 +139,7 @@ function resolveThinkingTargetModel(state: AppViewState): {
}
function buildThinkingOptions(
labels: readonly string[],
levels: readonly GatewayThinkingLevelOption[],
currentOverride: string,
): ChatThinkingSelectOption[] {
const seen = new Set<string>();
@@ -159,9 +159,9 @@ function buildThinkingOptions(
);
};
for (const label of labels) {
const normalized = normalizeThinkLevel(label) ?? normalizeLowercaseStringOrEmpty(label);
addOption(normalized, label);
for (const level of levels) {
const normalized = normalizeThinkLevel(level.id) ?? normalizeLowercaseStringOrEmpty(level.id);
addOption(normalized, level.label);
}
if (currentOverride) {
addOption(currentOverride);
@@ -169,6 +169,28 @@ function buildThinkingOptions(
return options;
}
function resolveThinkingLevelOptions(
activeRow: SessionsListResult["sessions"][number] | undefined,
defaults: SessionsListResult["defaults"] | undefined,
provider: string | null,
model: string | null,
): GatewayThinkingLevelOption[] {
if (activeRow?.thinkingLevels?.length) {
return activeRow.thinkingLevels;
}
if (defaults?.thinkingLevels?.length) {
return defaults.thinkingLevels;
}
const labels =
activeRow?.thinkingOptions ??
defaults?.thinkingOptions ??
(provider && model ? listThinkingLevelLabels(provider, model) : listThinkingLevelLabels());
return labels.map((label) => ({
id: normalizeThinkLevel(label) ?? normalizeLowercaseStringOrEmpty(label),
label,
}));
}
function resolveChatThinkingSelectState(state: AppViewState): ChatThinkingSelectState {
const activeRow = state.sessionsResult?.sessions?.find((row) => row.key === state.sessionKey);
const persisted = activeRow?.thinkingLevel;
@@ -177,11 +199,15 @@ function resolveChatThinkingSelectState(state: AppViewState): ChatThinkingSelect
? (normalizeThinkLevel(persisted) ?? persisted.trim())
: "";
const { provider, model } = resolveThinkingTargetModel(state);
const labels =
activeRow?.thinkingOptions ??
(provider && model ? listThinkingLevelLabels(provider, model) : listThinkingLevelLabels());
const levels = resolveThinkingLevelOptions(
activeRow,
state.sessionsResult?.defaults,
provider,
model,
);
const defaultLevel =
activeRow?.thinkingDefault ??
state.sessionsResult?.defaults?.thinkingDefault ??
(provider && model
? resolveThinkingDefaultForModel({
provider,
@@ -192,7 +218,7 @@ function resolveChatThinkingSelectState(state: AppViewState): ChatThinkingSelect
return {
currentOverride,
defaultLabel: `Default (${defaultLevel})`,
options: buildThinkingOptions(labels, currentOverride),
options: buildThinkingOptions(levels, currentOverride),
};
}

View File

@@ -571,6 +571,101 @@ describe("executeSlashCommand directives", () => {
});
});
it("uses default thinking options when the active session is absent", async () => {
const request = vi.fn(async (method: string, payload?: unknown) => {
if (method === "sessions.list") {
return {
defaults: {
modelProvider: "openai-codex",
model: "gpt-5.5",
thinkingLevels: [
{ id: "off", label: "off" },
{ id: "minimal", label: "minimal" },
{ id: "low", label: "low" },
{ id: "medium", label: "medium" },
{ id: "adaptive", label: "adaptive" },
{ id: "high", label: "high" },
{ id: "xhigh", label: "xhigh" },
{ id: "max", label: "maximum" },
],
thinkingOptions: [
"off",
"minimal",
"low",
"medium",
"adaptive",
"high",
"xhigh",
"maximum",
],
thinkingDefault: "adaptive",
},
sessions: [],
};
}
if (method === "models.list") {
return {
models: [{ id: "gpt-5.5", provider: "openai-codex", reasoning: true }],
};
}
if (method === "sessions.patch") {
return { ok: true, ...((payload ?? {}) as object) };
}
throw new Error(`unexpected method: ${method}`);
});
const status = await executeSlashCommand(
{ request } as unknown as GatewayBrowserClient,
"agent:main:main",
"think",
"",
);
const setXhigh = await executeSlashCommand(
{ request } as unknown as GatewayBrowserClient,
"agent:main:main",
"think",
"xhigh",
);
const setMax = await executeSlashCommand(
{ request } as unknown as GatewayBrowserClient,
"agent:main:main",
"think",
"max",
);
const setMaximum = await executeSlashCommand(
{ request } as unknown as GatewayBrowserClient,
"agent:main:main",
"think",
"maximum",
);
const setAdaptive = await executeSlashCommand(
{ request } as unknown as GatewayBrowserClient,
"agent:main:main",
"think",
"auto",
);
expect(status.content).toBe(
"Current thinking level: adaptive.\nOptions: off, minimal, low, medium, adaptive, high, xhigh, maximum.",
);
expect(setXhigh.content).toBe("Thinking level set to **xhigh**.");
expect(setMax.content).toBe("Thinking level set to **max**.");
expect(setMaximum.content).toBe("Thinking level set to **max**.");
expect(setAdaptive.content).toBe("Thinking level set to **adaptive**.");
expect(request).toHaveBeenCalledWith("sessions.patch", {
key: "agent:main:main",
thinkingLevel: "xhigh",
});
expect(request).toHaveBeenCalledWith("sessions.patch", {
key: "agent:main:main",
thinkingLevel: "max",
});
expect(request).toHaveBeenCalledWith("sessions.patch", {
key: "agent:main:main",
thinkingLevel: "adaptive",
});
});
it("reports the current verbose level for bare /verbose", async () => {
const request = vi.fn(async (method: string, _payload?: unknown) => {
if (method === "sessions.list") {

View File

@@ -27,6 +27,7 @@ import type {
AgentsListResult,
ChatModelOverride,
GatewaySessionRow,
GatewayThinkingLevelOption,
ModelCatalogEntry,
SessionsListResult,
SessionsPatchResult,
@@ -254,11 +255,11 @@ async function executeThink(
if (!rawLevel) {
try {
const { session, models } = await loadThinkingCommandState(client, sessionKey);
const { session, defaults, models } = await loadThinkingCommandState(client, sessionKey);
return {
content: formatDirectiveOptions(
`Current thinking level: ${resolveCurrentThinkingLevel(session, models)}.`,
formatThinkingOptionsForSession(session),
`Current thinking level: ${resolveCurrentThinkingLevel(session, defaults, models)}.`,
formatThinkingOptionsForSession(session, defaults),
),
};
} catch (err) {
@@ -266,23 +267,17 @@ async function executeThink(
}
}
const level = normalizeThinkLevel(rawLevel);
if (!level) {
try {
const session = await loadCurrentSession(client, sessionKey);
return {
content: `Unrecognized thinking level "${rawLevel}". Valid levels: ${formatThinkingOptionsForSession(session)}.`,
};
} catch (err) {
return { content: `Failed to validate thinking level: ${String(err)}` };
}
}
try {
const session = await loadCurrentSession(client, sessionKey);
if (!isThinkingLevelOptionForSession(session, level)) {
const { session, defaults } = await loadCurrentSessionState(client, sessionKey);
const level = resolveThinkingLevelInput(rawLevel, session, defaults);
if (!level) {
return {
content: `Unsupported thinking level "${rawLevel}" for this model. Valid levels: ${formatThinkingOptionsForSession(session)}.`,
content: `Unrecognized thinking level "${rawLevel}". Valid levels: ${formatThinkingOptionsForSession(session, defaults)}.`,
};
}
if (!isThinkingLevelOptionForSession(session, defaults, level)) {
return {
content: `Unsupported thinking level "${rawLevel}" for this model. Valid levels: ${formatThinkingOptionsForSession(session, defaults)}.`,
};
}
await client.request("sessions.patch", { key: sessionKey, thinkingLevel: level });
@@ -602,30 +597,85 @@ function formatDirectiveOptions(text: string, options: string): string {
function formatThinkingOptionsForSession(
session: GatewaySessionRow | undefined,
defaults?: SessionsListResult["defaults"],
separator = ", ",
): string {
if (session?.thinkingOptions?.length) {
return session.thinkingOptions.join(separator);
return resolveThinkingLevelOptionsForSession(session, defaults)
.map((level) => level.label)
.join(separator);
}
function resolveThinkingLevelInput(
rawLevel: string,
session: GatewaySessionRow | undefined,
defaults: SessionsListResult["defaults"] | undefined,
): string | undefined {
const normalized = normalizeThinkLevel(rawLevel);
if (normalized) {
return normalized;
}
return formatThinkingLevels(session?.modelProvider, session?.model);
const rawKey = normalizeLowercaseStringOrEmpty(rawLevel);
return resolveThinkingLevelOptionsForSession(session, defaults)
.map((option) => ({
id: normalizeThinkLevel(option.id) ?? normalizeLowercaseStringOrEmpty(option.id),
label: normalizeLowercaseStringOrEmpty(option.label),
}))
.find((option) => option.id === rawKey || option.label === rawKey)?.id;
}
function isThinkingLevelOptionForSession(
session: GatewaySessionRow | undefined,
defaults: SessionsListResult["defaults"] | undefined,
level: string,
): boolean {
const labels = session?.thinkingOptions?.length
? session.thinkingOptions
: formatThinkingOptionsForSession(session).split(/\s*,\s*/);
return labels.some((label) => normalizeThinkLevel(label) === level);
return resolveThinkingLevelOptionsForSession(session, defaults).some((option) => {
const id = normalizeThinkLevel(option.id) ?? normalizeLowercaseStringOrEmpty(option.id);
return id === level || normalizeThinkLevel(option.label) === level;
});
}
function resolveThinkingLevelOptionsForSession(
session: GatewaySessionRow | undefined,
defaults: SessionsListResult["defaults"] | undefined,
): GatewayThinkingLevelOption[] {
if (session?.thinkingLevels?.length) {
return session.thinkingLevels;
}
if (defaults?.thinkingLevels?.length) {
return defaults.thinkingLevels;
}
const labels =
session?.thinkingOptions?.length || defaults?.thinkingOptions?.length
? (session?.thinkingOptions ?? defaults?.thinkingOptions ?? [])
: formatThinkingLevels(
session?.modelProvider ?? defaults?.modelProvider,
session?.model ?? defaults?.model,
).split(/\s*,\s*/);
return labels.filter(Boolean).map((label) => ({
id: normalizeThinkLevel(label) ?? normalizeLowercaseStringOrEmpty(label),
label,
}));
}
async function loadCurrentSession(
client: GatewayBrowserClient,
sessionKey: string,
): Promise<GatewaySessionRow | undefined> {
return (await loadCurrentSessionState(client, sessionKey)).session;
}
async function loadCurrentSessionState(
client: GatewayBrowserClient,
sessionKey: string,
): Promise<{
session: GatewaySessionRow | undefined;
defaults: SessionsListResult["defaults"] | undefined;
}> {
const sessions = await client.request<SessionsListResult>("sessions.list", {});
return resolveCurrentSession(sessions, sessionKey);
return {
session: resolveCurrentSession(sessions, sessionKey),
defaults: sessions?.defaults,
};
}
function resolveCurrentSession(
@@ -652,6 +702,7 @@ async function loadThinkingCommandState(client: GatewayBrowserClient, sessionKey
]);
return {
session: resolveCurrentSession(sessions, sessionKey),
defaults: sessions?.defaults,
models,
};
}
@@ -673,24 +724,31 @@ async function loadModelCatalog(
function resolveCurrentThinkingLevel(
session: GatewaySessionRow | undefined,
defaults: SessionsListResult["defaults"] | undefined,
models: ModelCatalogEntry[],
): string {
const persisted = normalizeThinkLevel(session?.thinkingLevel);
if (persisted) {
return (
session?.thinkingOptions?.find((label) => normalizeThinkLevel(label) === persisted) ??
persisted
resolveThinkingLevelOptionsForSession(session, defaults).find(
(level) => normalizeThinkLevel(level.id) === persisted,
)?.label ?? persisted
);
}
if (session?.thinkingDefault) {
return session.thinkingDefault;
}
if (!session?.modelProvider || !session.model) {
if (defaults?.thinkingDefault) {
return defaults.thinkingDefault;
}
const provider = session?.modelProvider ?? defaults?.modelProvider;
const model = session?.model ?? defaults?.model;
if (!provider || !model) {
return "off";
}
return resolveThinkingDefaultForModel({
provider: session.modelProvider,
model: session.model,
provider,
model,
catalog: models,
});
}

View File

@@ -319,6 +319,14 @@ export type GatewaySessionsDefaults = {
modelProvider: string | null;
model: string | null;
contextTokens: number | null;
thinkingLevels?: GatewayThinkingLevelOption[];
thinkingOptions?: string[];
thinkingDefault?: string;
};
export type GatewayThinkingLevelOption = {
id: string;
label: string;
};
export type ChatModelOverride = import("./chat-model-ref.types.ts").ChatModelOverride;
@@ -411,6 +419,7 @@ export type GatewaySessionRow = {
systemSent?: boolean;
abortedLastRun?: boolean;
thinkingLevel?: string;
thinkingLevels?: GatewayThinkingLevelOption[];
thinkingOptions?: string[];
thinkingDefault?: string;
fastMode?: boolean;

View File

@@ -65,6 +65,84 @@ function buildProps(result: SessionsListResult): SessionsProps {
}
describe("sessions view", () => {
it("renders and patches provider-owned thinking ids", async () => {
const container = document.createElement("div");
const onPatch = vi.fn();
render(
renderSessions({
...buildProps(
buildResult({
key: "agent:main:main",
kind: "direct",
updatedAt: Date.now(),
thinkingLevel: "adaptive",
thinkingLevels: [
{ id: "off", label: "off" },
{ id: "adaptive", label: "adaptive" },
{ id: "max", label: "maximum" },
],
}),
),
onPatch,
}),
container,
);
await Promise.resolve();
const thinking = container.querySelector("tbody select") as HTMLSelectElement | null;
expect(thinking?.value).toBe("adaptive");
expect(Array.from(thinking?.options ?? []).map((option) => option.value)).toEqual([
"",
"off",
"adaptive",
"max",
]);
expect(
Array.from(thinking?.options ?? [])
.find((option) => option.value === "max")
?.textContent?.trim(),
).toBe("maximum");
thinking!.value = "max";
thinking!.dispatchEvent(new Event("change", { bubbles: true }));
expect(onPatch).toHaveBeenCalledWith("agent:main:main", { thinkingLevel: "max" });
});
it("keeps legacy binary thinking labels patching canonical ids", async () => {
const container = document.createElement("div");
const onPatch = vi.fn();
render(
renderSessions({
...buildProps(
buildResult({
key: "agent:main:main",
kind: "direct",
updatedAt: Date.now(),
thinkingLevel: "low",
thinkingOptions: ["off", "on"],
}),
),
onPatch,
}),
container,
);
await Promise.resolve();
const thinking = container.querySelector("tbody select") as HTMLSelectElement | null;
expect(thinking?.value).toBe("low");
expect(
Array.from(thinking?.options ?? [])
.find((option) => option.value === "low")
?.textContent?.trim(),
).toBe("on");
thinking!.value = "low";
thinking!.dispatchEvent(new Event("change", { bubbles: true }));
expect(onPatch).toHaveBeenCalledWith("agent:main:main", { thinkingLevel: "low" });
});
it("keeps session selects stable and deselects only the current page", async () => {
const container = document.createElement("div");
render(

View File

@@ -5,8 +5,10 @@ import { icons } from "../icons.ts";
import { pathForTab } from "../navigation.ts";
import { formatSessionTokens } from "../presenter.ts";
import { normalizeLowercaseStringOrEmpty, normalizeOptionalString } from "../string-coerce.ts";
import { normalizeThinkLevel } from "../thinking.ts";
import type {
GatewaySessionRow,
GatewayThinkingLevelOption,
SessionCompactionCheckpoint,
SessionsListResult,
} from "../types.ts";
@@ -78,13 +80,26 @@ const FAST_LEVELS = [
const REASONING_LEVELS = ["", "off", "on", "stream"] as const;
const PAGE_SIZES = [10, 25, 50, 100] as const;
function resolveThinkLevelOptions(row: GatewaySessionRow): readonly string[] {
const options = row.thinkingOptions?.length ? row.thinkingOptions : DEFAULT_THINK_LEVELS;
return ["", ...options];
function normalizeThinkingOptionValue(raw: string): string {
return normalizeThinkLevel(raw) ?? normalizeLowercaseStringOrEmpty(raw);
}
function isBinaryThinkingRow(row: GatewaySessionRow): boolean {
return row.thinkingOptions?.includes("on") === true;
function resolveThinkLevelOptions(
row: GatewaySessionRow,
): readonly { value: string; label: string }[] {
const options: readonly GatewayThinkingLevelOption[] = row.thinkingLevels?.length
? row.thinkingLevels
: (row.thinkingOptions?.length ? row.thinkingOptions : DEFAULT_THINK_LEVELS).map((label) => ({
id: normalizeThinkingOptionValue(label),
label,
}));
return [
{ value: "", label: "inherit" },
...options.map((option) => ({
value: normalizeThinkingOptionValue(option.id),
label: option.label,
})),
];
}
function withCurrentOption(options: readonly string[], current: string): string[] {
@@ -110,26 +125,10 @@ function withCurrentLabeledOption(
return [...options, { value: current, label: `${current} (custom)` }];
}
function resolveThinkLevelDisplay(value: string, isBinary: boolean): string {
if (!isBinary) {
return value;
}
if (!value || value === "off") {
return value;
}
return "on";
}
function resolveThinkLevelPatchValue(value: string, isBinary: boolean): string | null {
function resolveThinkLevelPatchValue(value: string): string | null {
if (!value) {
return null;
}
if (!isBinary) {
return value;
}
if (value === "on") {
return "low";
}
return value;
}
@@ -442,9 +441,8 @@ export function renderSessions(props: SessionsProps) {
function renderRows(row: GatewaySessionRow, props: SessionsProps) {
const updated = row.updatedAt ? formatRelativeTimestamp(row.updatedAt) : t("common.na");
const rawThinking = row.thinkingLevel ?? "";
const isBinaryThinking = isBinaryThinkingRow(row);
const thinking = resolveThinkLevelDisplay(rawThinking, isBinaryThinking);
const thinkLevels = withCurrentOption(resolveThinkLevelOptions(row), thinking);
const thinking = rawThinking ? normalizeThinkingOptionValue(rawThinking) : "";
const thinkLevels = withCurrentLabeledOption(resolveThinkLevelOptions(row), thinking);
const fastMode = row.fastMode === true ? "on" : row.fastMode === false ? "off" : "";
const fastLevels = withCurrentLabeledOption(FAST_LEVELS, fastMode);
const verbose = row.verboseLevel ?? "";
@@ -562,14 +560,14 @@ function renderRows(row: GatewaySessionRow, props: SessionsProps) {
@change=${(e: Event) => {
const value = (e.target as HTMLSelectElement).value;
props.onPatch(row.key, {
thinkingLevel: resolveThinkLevelPatchValue(value, isBinaryThinking),
thinkingLevel: resolveThinkLevelPatchValue(value),
});
}}
>
${thinkLevels.map(
(level) =>
html`<option value=${level} ?selected=${thinking === level}>
${level || "inherit"}
html`<option value=${level.value} ?selected=${thinking === level.value}>
${level.label}
</option>`,
)}
</select>