fix(ui): centralize control model ref handling

This commit is contained in:
Peter Steinberger
2026-03-16 04:13:32 +00:00
parent 093e51f2b3
commit 7e8f5ca71b
13 changed files with 310 additions and 49 deletions

View File

@@ -83,7 +83,14 @@ describe("handleSendChat", () => {
);
const request = vi.fn(async (method: string, _params?: unknown) => {
if (method === "sessions.patch") {
return { ok: true, key: "main" };
return {
ok: true,
key: "main",
resolved: {
modelProvider: "openai",
model: "gpt-5-mini",
},
};
}
if (method === "chat.history") {
return { messages: [], thinkingLevel: null };
@@ -93,7 +100,7 @@ describe("handleSendChat", () => {
ts: 0,
path: "",
count: 0,
defaults: { model: "gpt-5", contextTokens: null },
defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
sessions: [],
};
}
@@ -116,6 +123,9 @@ describe("handleSendChat", () => {
key: "main",
model: "gpt-5-mini",
});
expect(host.chatModelOverrides.main).toBe("gpt-5-mini");
expect(host.chatModelOverrides.main).toEqual({
kind: "qualified",
value: "openai/gpt-5-mini",
});
});
});

View File

@@ -10,7 +10,7 @@ import { loadModels } from "./controllers/models.ts";
import { loadSessions } from "./controllers/sessions.ts";
import type { GatewayBrowserClient, GatewayHelloOk } from "./gateway.ts";
import { normalizeBasePath } from "./navigation.ts";
import type { ModelCatalogEntry } from "./types.ts";
import type { ChatModelOverride, ModelCatalogEntry } from "./types.ts";
import type { ChatAttachment, ChatQueueItem } from "./ui-types.ts";
import { generateUUID } from "./uuid.ts";
@@ -29,7 +29,7 @@ export type ChatHost = {
basePath: string;
hello: GatewayHelloOk | null;
chatAvatarUrl: string | null;
chatModelOverrides: Record<string, string | null>;
chatModelOverrides: Record<string, ChatModelOverride | null>;
chatModelsLoading: boolean;
chatModelCatalog: ModelCatalogEntry[];
updateComplete?: Promise<unknown>;
@@ -308,10 +308,10 @@ async function dispatchSlashCommand(
injectCommandResult(host, result.content);
}
if (result.sessionPatch && "model" in result.sessionPatch) {
if (result.sessionPatch && "modelOverride" in result.sessionPatch) {
host.chatModelOverrides = {
...host.chatModelOverrides,
[targetSessionKey]: result.sessionPatch.model ?? null,
[targetSessionKey]: result.sessionPatch.modelOverride ?? null,
};
}

View File

@@ -6,6 +6,13 @@ import { refreshChat } from "./app-chat.ts";
import { syncUrlWithSessionKey } from "./app-settings.ts";
import type { AppViewState } from "./app-view-state.ts";
import { OpenClawApp } from "./app.ts";
import {
buildChatModelOption,
createChatModelOverride,
formatChatModelDisplay,
normalizeChatModelOverrideValue,
resolveServerChatModelValue,
} from "./chat-model-ref.ts";
import { ChatState, loadChatHistory } from "./controllers/chat.ts";
import { loadSessions } from "./controllers/sessions.ts";
import { icons } from "./icons.ts";
@@ -521,8 +528,8 @@ function resolveActiveSessionRow(state: AppViewState) {
function resolveModelOverrideValue(state: AppViewState): string {
// Prefer the local cache — it reflects in-flight patches before sessionsResult refreshes.
const cached = state.chatModelOverrides[state.sessionKey];
if (typeof cached === "string") {
return cached.trim();
if (cached) {
return normalizeChatModelOverrideValue(cached, state.chatModelCatalog ?? []);
}
// cached === null means explicitly cleared to default.
if (cached === null) {
@@ -532,21 +539,14 @@ function resolveModelOverrideValue(state: AppViewState): string {
// Include provider prefix so the value matches option keys (provider/model).
const activeRow = resolveActiveSessionRow(state);
if (activeRow && typeof activeRow.model === "string" && activeRow.model.trim()) {
const provider = activeRow.modelProvider?.trim();
const model = activeRow.model.trim();
return provider ? `${provider}/${model}` : model;
return resolveServerChatModelValue(activeRow.model, activeRow.modelProvider);
}
return "";
}
function resolveDefaultModelValue(state: AppViewState): string {
const defaults = state.sessionsResult?.defaults;
const model = defaults?.model;
if (typeof model !== "string" || !model.trim()) {
return "";
}
const provider = defaults?.modelProvider?.trim();
return provider ? `${provider}/${model.trim()}` : model.trim();
return resolveServerChatModelValue(defaults?.model, defaults?.modelProvider);
}
function buildChatModelOptions(
@@ -570,9 +570,8 @@ function buildChatModelOptions(
};
for (const entry of catalog) {
const provider = entry.provider?.trim();
const value = provider ? `${provider}/${entry.id}` : entry.id;
addOption(value, provider ? `${entry.id} · ${provider}` : entry.id);
const option = buildChatModelOption(entry);
addOption(option.value, option.label);
}
if (currentOverride) {
@@ -592,9 +591,7 @@ function renderChatModelSelect(state: AppViewState) {
currentOverride,
defaultModel,
);
const defaultDisplay = defaultModel.includes("/")
? `${defaultModel.slice(defaultModel.indexOf("/") + 1)} · ${defaultModel.slice(0, defaultModel.indexOf("/"))}`
: defaultModel;
const defaultDisplay = formatChatModelDisplay(defaultModel);
const defaultLabel = defaultModel ? `Default (${defaultDisplay})` : "Default model";
const busy =
state.chatLoading || state.chatSending || Boolean(state.chatRunId) || state.chatStream !== null;
@@ -639,7 +636,7 @@ async function switchChatModel(state: AppViewState, nextModel: string) {
// Write the override cache immediately so the picker stays in sync during the RPC round-trip.
state.chatModelOverrides = {
...state.chatModelOverrides,
[targetSessionKey]: nextModel || null,
[targetSessionKey]: createChatModelOverride(nextModel),
};
try {
await state.client.request("sessions.patch", {

View File

@@ -21,6 +21,7 @@ import type {
HealthSummary,
LogEntry,
LogLevel,
ChatModelOverride,
ModelCatalogEntry,
NostrProfile,
PresenceEntry,
@@ -71,7 +72,7 @@ export type AppViewState = {
fallbackStatus: FallbackStatus | null;
chatAvatarUrl: string | null;
chatThinkingLevel: string | null;
chatModelOverrides: Record<string, string | null>;
chatModelOverrides: Record<string, ChatModelOverride | null>;
chatModelsLoading: boolean;
chatModelCatalog: ModelCatalogEntry[];
chatQueue: ChatQueueItem[];

View File

@@ -69,6 +69,7 @@ import type {
AgentIdentityResult,
ConfigSnapshot,
ConfigUiHints,
ChatModelOverride,
CronJob,
CronRunLogEntry,
CronStatus,
@@ -158,7 +159,7 @@ export class OpenClawApp extends LitElement {
@state() fallbackStatus: FallbackStatus | null = null;
@state() chatAvatarUrl: string | null = null;
@state() chatThinkingLevel: string | null = null;
@state() chatModelOverrides: Record<string, string | null> = {};
@state() chatModelOverrides: Record<string, ChatModelOverride | null> = {};
@state() chatModelsLoading = false;
@state() chatModelCatalog: ModelCatalogEntry[] = [];
@state() chatQueue: ChatQueueItem[] = [];

View File

@@ -0,0 +1,50 @@
import { describe, expect, it } from "vitest";
import {
buildChatModelOption,
createChatModelOverride,
formatChatModelDisplay,
normalizeChatModelOverrideValue,
resolveServerChatModelValue,
} from "./chat-model-ref.ts";
import type { ModelCatalogEntry } from "./types.ts";
const catalog: ModelCatalogEntry[] = [
{ id: "gpt-5-mini", name: "GPT-5 Mini", provider: "openai" },
{ id: "claude-sonnet-4-5", name: "Claude Sonnet 4.5", provider: "anthropic" },
];
describe("chat-model-ref helpers", () => {
it("builds provider-qualified option values and labels", () => {
expect(buildChatModelOption(catalog[0])).toEqual({
value: "openai/gpt-5-mini",
label: "gpt-5-mini · openai",
});
});
it("normalizes raw overrides when the catalog match is unique", () => {
expect(normalizeChatModelOverrideValue(createChatModelOverride("gpt-5-mini"), catalog)).toBe(
"openai/gpt-5-mini",
);
});
it("keeps ambiguous raw overrides unchanged", () => {
const ambiguousCatalog: ModelCatalogEntry[] = [
{ id: "gpt-5-mini", name: "GPT-5 Mini", provider: "openai" },
{ id: "gpt-5-mini", name: "GPT-5 Mini", provider: "openrouter" },
];
expect(
normalizeChatModelOverrideValue(createChatModelOverride("gpt-5-mini"), ambiguousCatalog),
).toBe("gpt-5-mini");
});
it("formats qualified model refs consistently for default labels", () => {
expect(formatChatModelDisplay("openai/gpt-5-mini")).toBe("gpt-5-mini · openai");
expect(formatChatModelDisplay("alias-only")).toBe("alias-only");
});
it("resolves server session data to qualified option values", () => {
expect(resolveServerChatModelValue("gpt-5-mini", "openai")).toBe("openai/gpt-5-mini");
expect(resolveServerChatModelValue("alias-only", null)).toBe("alias-only");
});
});

View File

@@ -0,0 +1,93 @@
import type { ModelCatalogEntry } from "./types.ts";
export type ChatModelOverride =
| {
kind: "qualified";
value: string;
}
| {
kind: "raw";
value: string;
};
export function buildQualifiedChatModelValue(model: string, provider?: string | null): string {
const trimmedModel = model.trim();
if (!trimmedModel) {
return "";
}
const trimmedProvider = provider?.trim();
return trimmedProvider ? `${trimmedProvider}/${trimmedModel}` : trimmedModel;
}
export function createChatModelOverride(value: string): ChatModelOverride | null {
const trimmed = value.trim();
if (!trimmed) {
return null;
}
if (trimmed.includes("/")) {
return { kind: "qualified", value: trimmed };
}
return { kind: "raw", value: trimmed };
}
export function normalizeChatModelOverrideValue(
override: ChatModelOverride | null | undefined,
catalog: ModelCatalogEntry[],
): string {
if (!override) {
return "";
}
const trimmed = override?.value.trim();
if (!trimmed) {
return "";
}
if (override.kind === "qualified") {
return trimmed;
}
let matchedValue = "";
for (const entry of catalog) {
if (entry.id.trim().toLowerCase() !== trimmed.toLowerCase()) {
continue;
}
const candidate = buildQualifiedChatModelValue(entry.id, entry.provider);
if (!matchedValue) {
matchedValue = candidate;
continue;
}
if (matchedValue.toLowerCase() !== candidate.toLowerCase()) {
return trimmed;
}
}
return matchedValue || trimmed;
}
export function resolveServerChatModelValue(
model?: string | null,
provider?: string | null,
): string {
if (typeof model !== "string") {
return "";
}
return buildQualifiedChatModelValue(model, provider);
}
export function formatChatModelDisplay(value: string): string {
const trimmed = value.trim();
if (!trimmed) {
return "";
}
const separator = trimmed.indexOf("/");
if (separator <= 0) {
return trimmed;
}
return `${trimmed.slice(separator + 1)} · ${trimmed.slice(0, separator)}`;
}
export function buildChatModelOption(entry: ModelCatalogEntry): { value: string; label: string } {
const provider = entry.provider?.trim();
return {
value: buildQualifiedChatModelValue(entry.id, provider),
label: provider ? `${entry.id} · ${provider}` : entry.id,
};
}

View File

@@ -235,7 +235,7 @@ describe("executeSlashCommand directives", () => {
const request = vi.fn(async (method: string, _payload?: unknown) => {
if (method === "sessions.list") {
return {
defaults: { model: "default-model" },
defaults: { modelProvider: "openai", model: "default-model" },
sessions: [
row("agent:main:main", {
model: "gpt-4.1-mini",
@@ -265,6 +265,38 @@ describe("executeSlashCommand directives", () => {
expect(request).toHaveBeenNthCalledWith(2, "models.list", {});
});
it("mirrors resolved provider-qualified model refs after /model changes", async () => {
const request = vi.fn(async (method: string, _payload?: unknown) => {
if (method === "sessions.patch") {
return {
ok: true,
key: "main",
resolved: {
modelProvider: "openai",
model: "gpt-5-mini",
},
};
}
throw new Error(`unexpected method: ${method}`);
});
const result = await executeSlashCommand(
{ request } as unknown as GatewayBrowserClient,
"main",
"model",
"gpt-5-mini",
);
expect(request).toHaveBeenCalledWith("sessions.patch", {
key: "main",
model: "gpt-5-mini",
});
expect(result.sessionPatch?.modelOverride).toEqual({
kind: "qualified",
value: "openai/gpt-5-mini",
});
});
it("resolves the legacy main alias for /usage", async () => {
const request = vi.fn(async (method: string, _payload?: unknown) => {
if (method === "sessions.list") {

View File

@@ -16,8 +16,15 @@ import {
isSubagentSessionKey,
parseAgentSessionKey,
} from "../../../../src/routing/session-key.js";
import { createChatModelOverride, resolveServerChatModelValue } from "../chat-model-ref.ts";
import type { GatewayBrowserClient } from "../gateway.ts";
import type { AgentsListResult, GatewaySessionRow, SessionsListResult } from "../types.ts";
import type {
AgentsListResult,
ChatModelOverride,
GatewaySessionRow,
SessionsListResult,
SessionsPatchResult,
} from "../types.ts";
import { SLASH_COMMANDS } from "./slash-commands.ts";
export type SlashCommandResult = {
@@ -35,7 +42,7 @@ export type SlashCommandResult = {
| "navigate-usage";
/** Optional session-level directive changes that the caller should mirror locally. */
sessionPatch?: {
model?: string | null;
modelOverride?: ChatModelOverride | null;
};
};
@@ -144,11 +151,18 @@ async function executeModel(
}
try {
await client.request("sessions.patch", { key: sessionKey, model: args.trim() });
const patched = await client.request<SessionsPatchResult>("sessions.patch", {
key: sessionKey,
model: args.trim(),
});
const resolvedValue = resolveServerChatModelValue(
patched.resolved?.model ?? args.trim(),
patched.resolved?.modelProvider,
);
return {
content: `Model set to \`${args.trim()}\`.`,
action: "refresh",
sessionPatch: { model: args.trim() },
sessionPatch: { modelOverride: createChatModelOverride(resolvedValue) },
};
} catch (err) {
return { content: `Failed to set model: ${String(err)}` };

View File

@@ -321,6 +321,8 @@ export type GatewaySessionsDefaults = {
contextTokens: number | null;
};
export type ChatModelOverride = import("./chat-model-ref.ts").ChatModelOverride;
export type GatewayAgentRow = SharedGatewayAgentRow;
export type AgentsListResult = {
@@ -402,7 +404,12 @@ export type SessionsPatchResult = SessionsPatchResultBase<{
verboseLevel?: string;
reasoningLevel?: string;
elevatedLevel?: string;
}>;
}> & {
resolved?: {
modelProvider?: string;
model?: string;
};
};
export type {
CostUsageDailyEntry,

View File

@@ -31,7 +31,7 @@ function createProps(overrides: Partial<ChatProps> = {}): ChatProps {
ts: 0,
path: "",
count: 1,
defaults: { model: "gpt-5", contextTokens: null },
defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
sessions: [
{
key: "main",

View File

@@ -15,7 +15,7 @@ function createSessions(): SessionsListResult {
ts: 0,
path: "",
count: 0,
defaults: { model: null, contextTokens: null },
defaults: { modelProvider: null, model: null, contextTokens: null },
sessions: [],
};
}
@@ -28,6 +28,7 @@ function createChatHeaderState(
} = {},
): { state: AppViewState; request: ReturnType<typeof vi.fn> } {
let currentModel = overrides.model ?? null;
let currentModelProvider = currentModel ? "openai" : undefined;
const omitSessionFromList = overrides.omitSessionFromList ?? false;
const catalog = overrides.models ?? [
{ id: "gpt-5", name: "GPT-5", provider: "openai" },
@@ -35,7 +36,26 @@ function createChatHeaderState(
];
const request = vi.fn(async (method: string, params: Record<string, unknown>) => {
if (method === "sessions.patch") {
currentModel = (params.model as string | null | undefined) ?? null;
const nextModel = (params.model as string | null | undefined) ?? null;
if (!nextModel) {
currentModel = null;
currentModelProvider = undefined;
} else {
const normalized = nextModel.trim();
const slashIndex = normalized.indexOf("/");
if (slashIndex > 0) {
currentModelProvider = normalized.slice(0, slashIndex);
currentModel = normalized.slice(slashIndex + 1);
} else {
currentModel = normalized;
const matchingProviders = catalog
.filter((entry) => entry.id === normalized)
.map((entry) => entry.provider)
.filter(Boolean);
currentModelProvider =
matchingProviders.length === 1 ? matchingProviders[0] : currentModelProvider;
}
}
return { ok: true, key: "main" };
}
if (method === "chat.history") {
@@ -46,10 +66,18 @@ function createChatHeaderState(
ts: 0,
path: "",
count: omitSessionFromList ? 0 : 1,
defaults: { model: "gpt-5", contextTokens: null },
defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
sessions: omitSessionFromList
? []
: [{ key: "main", kind: "direct", updatedAt: null, model: currentModel }],
: [
{
key: "main",
kind: "direct",
updatedAt: null,
modelProvider: currentModelProvider,
model: currentModel,
},
],
};
}
if (method === "models.list") {
@@ -65,10 +93,18 @@ function createChatHeaderState(
ts: 0,
path: "",
count: omitSessionFromList ? 0 : 1,
defaults: { model: "gpt-5", contextTokens: null },
defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
sessions: omitSessionFromList
? []
: [{ key: "main", kind: "direct", updatedAt: null, model: currentModel }],
: [
{
key: "main",
kind: "direct",
updatedAt: null,
modelProvider: currentModelProvider,
model: currentModel,
},
],
},
chatModelOverrides: {},
chatModelCatalog: catalog,
@@ -566,13 +602,13 @@ describe("chat view", () => {
expect(modelSelect).not.toBeNull();
expect(modelSelect?.value).toBe("");
modelSelect!.value = "gpt-5-mini";
modelSelect!.value = "openai/gpt-5-mini";
modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
await flushTasks();
expect(request).toHaveBeenCalledWith("sessions.patch", {
key: "main",
model: "gpt-5-mini",
model: "openai/gpt-5-mini",
});
expect(request).not.toHaveBeenCalledWith("chat.history", expect.anything());
expect(state.sessionsResult?.sessions[0]?.model).toBe("gpt-5-mini");
@@ -594,7 +630,7 @@ describe("chat view", () => {
'select[data-chat-model-select="true"]',
);
expect(modelSelect).not.toBeNull();
expect(modelSelect?.value).toBe("gpt-5-mini");
expect(modelSelect?.value).toBe("openai/gpt-5-mini");
modelSelect!.value = "";
modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
@@ -638,7 +674,7 @@ describe("chat view", () => {
);
expect(modelSelect).not.toBeNull();
modelSelect!.value = "gpt-5-mini";
modelSelect!.value = "openai/gpt-5-mini";
modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
await flushTasks();
render(renderChatSessionSelect(state), container);
@@ -646,10 +682,30 @@ describe("chat view", () => {
const rerendered = container.querySelector<HTMLSelectElement>(
'select[data-chat-model-select="true"]',
);
expect(rerendered?.value).toBe("gpt-5-mini");
expect(rerendered?.value).toBe("openai/gpt-5-mini");
vi.unstubAllGlobals();
});
it("normalizes cached bare /model overrides to the matching catalog option", () => {
const { state } = createChatHeaderState();
state.chatModelOverrides = { main: { kind: "raw", value: "gpt-5-mini" } };
const container = document.createElement("div");
render(renderChatSessionSelect(state), container);
const modelSelect = container.querySelector<HTMLSelectElement>(
'select[data-chat-model-select="true"]',
);
expect(modelSelect).not.toBeNull();
expect(modelSelect?.value).toBe("openai/gpt-5-mini");
const optionValues = Array.from(modelSelect?.querySelectorAll("option") ?? []).map(
(option) => option.value,
);
expect(optionValues).toContain("openai/gpt-5-mini");
expect(optionValues).not.toContain("gpt-5-mini");
});
it("prefers the session label over displayName in the grouped chat session selector", () => {
const { state } = createChatHeaderState({ omitSessionFromList: true });
state.sessionKey = "agent:main:subagent:4f2146de-887b-4176-9abe-91140082959b";
@@ -658,7 +714,7 @@ describe("chat view", () => {
ts: 0,
path: "",
count: 1,
defaults: { model: "gpt-5", contextTokens: null },
defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
sessions: [
{
key: state.sessionKey,
@@ -708,7 +764,7 @@ describe("chat view", () => {
ts: 0,
path: "",
count: 1,
defaults: { model: "gpt-5", contextTokens: null },
defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
sessions: [
{
key: state.sessionKey,
@@ -737,7 +793,7 @@ describe("chat view", () => {
ts: 0,
path: "",
count: 2,
defaults: { model: "gpt-5", contextTokens: null },
defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
sessions: [
{
key: "agent:main:subagent:4f2146de-887b-4176-9abe-91140082959b",

View File

@@ -8,7 +8,7 @@ function buildResult(session: SessionsListResult["sessions"][number]): SessionsL
ts: Date.now(),
path: "(multiple)",
count: 1,
defaults: { model: null, contextTokens: null },
defaults: { modelProvider: null, model: null, contextTokens: null },
sessions: [session],
};
}