mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-06 06:40:44 +00:00
test: split chat session control tests
This commit is contained in:
276
ui/src/ui/chat/session-controls.test.ts
Normal file
276
ui/src/ui/chat/session-controls.test.ts
Normal file
@@ -0,0 +1,276 @@
|
||||
/* @vitest-environment jsdom */
|
||||
|
||||
import { render } from "lit";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
import type { AppViewState } from "../app-view-state.ts";
|
||||
import {
|
||||
createModelCatalog,
|
||||
createSessionsListResult,
|
||||
DEFAULT_CHAT_MODEL_CATALOG,
|
||||
} from "../chat-model.test-helpers.ts";
|
||||
import type { GatewayBrowserClient } from "../gateway.ts";
|
||||
import type { ModelCatalogEntry } from "../types.ts";
|
||||
import { renderChatSessionSelect } from "./session-controls.ts";
|
||||
|
||||
function createChatHeaderState(
|
||||
overrides: {
|
||||
model?: string | null;
|
||||
modelProvider?: string | null;
|
||||
models?: ModelCatalogEntry[];
|
||||
omitSessionFromList?: boolean;
|
||||
} = {},
|
||||
): { state: AppViewState; request: ReturnType<typeof vi.fn> } {
|
||||
let currentModel = overrides.model ?? null;
|
||||
let currentModelProvider = overrides.modelProvider ?? (currentModel ? "openai" : null);
|
||||
const omitSessionFromList = overrides.omitSessionFromList ?? false;
|
||||
const catalog = overrides.models ?? createModelCatalog(...DEFAULT_CHAT_MODEL_CATALOG);
|
||||
const request = vi.fn(async (method: string, params: Record<string, unknown>) => {
|
||||
if (method === "sessions.patch") {
|
||||
const nextModel = (params.model as string | null | undefined) ?? null;
|
||||
if (!nextModel) {
|
||||
currentModel = null;
|
||||
currentModelProvider = null;
|
||||
} else {
|
||||
const normalized = nextModel.trim();
|
||||
const slashIndex = normalized.indexOf("/");
|
||||
if (slashIndex > 0) {
|
||||
currentModelProvider = normalized.slice(0, slashIndex);
|
||||
currentModel = normalized.slice(slashIndex + 1);
|
||||
} else {
|
||||
currentModel = normalized;
|
||||
const matchingProviders = catalog
|
||||
.filter((entry) => entry.id === normalized)
|
||||
.map((entry) => entry.provider)
|
||||
.filter(Boolean);
|
||||
currentModelProvider =
|
||||
matchingProviders.length === 1 ? matchingProviders[0] : currentModelProvider;
|
||||
}
|
||||
}
|
||||
return { ok: true, key: "main" };
|
||||
}
|
||||
if (method === "chat.history") {
|
||||
return { messages: [], thinkingLevel: null };
|
||||
}
|
||||
if (method === "sessions.list") {
|
||||
return createSessionsListResult({
|
||||
model: currentModel,
|
||||
modelProvider: currentModelProvider,
|
||||
omitSessionFromList,
|
||||
});
|
||||
}
|
||||
if (method === "models.list") {
|
||||
return { models: catalog };
|
||||
}
|
||||
if (method === "tools.effective") {
|
||||
return {
|
||||
agentId: "main",
|
||||
profile: "coding",
|
||||
groups: [],
|
||||
};
|
||||
}
|
||||
throw new Error(`Unexpected request: ${method}`);
|
||||
});
|
||||
const state = {
|
||||
sessionKey: "main",
|
||||
connected: true,
|
||||
sessionsHideCron: true,
|
||||
sessionsResult: createSessionsListResult({
|
||||
model: currentModel,
|
||||
modelProvider: currentModelProvider,
|
||||
omitSessionFromList,
|
||||
}),
|
||||
chatModelOverrides: {},
|
||||
chatModelCatalog: catalog,
|
||||
chatModelsLoading: false,
|
||||
client: { request } as unknown as GatewayBrowserClient,
|
||||
settings: {
|
||||
gatewayUrl: "",
|
||||
token: "",
|
||||
locale: "en",
|
||||
sessionKey: "main",
|
||||
lastActiveSessionKey: "main",
|
||||
theme: "claw",
|
||||
themeMode: "dark",
|
||||
splitRatio: 0.6,
|
||||
navCollapsed: false,
|
||||
navGroupsCollapsed: {},
|
||||
borderRadius: 50,
|
||||
chatFocusMode: false,
|
||||
chatShowThinking: false,
|
||||
},
|
||||
chatMessage: "",
|
||||
chatStream: null,
|
||||
chatStreamStartedAt: null,
|
||||
chatRunId: null,
|
||||
chatQueue: [],
|
||||
chatMessages: [],
|
||||
chatLoading: false,
|
||||
chatThinkingLevel: null,
|
||||
lastError: null,
|
||||
chatAvatarUrl: null,
|
||||
basePath: "",
|
||||
hello: null,
|
||||
agentsList: null,
|
||||
agentsPanel: "overview",
|
||||
agentsSelectedId: null,
|
||||
toolsEffectiveLoading: false,
|
||||
toolsEffectiveLoadingKey: null,
|
||||
toolsEffectiveResultKey: null,
|
||||
toolsEffectiveError: null,
|
||||
toolsEffectiveResult: null,
|
||||
applySettings(next: AppViewState["settings"]) {
|
||||
state.settings = next;
|
||||
},
|
||||
loadAssistantIdentity: vi.fn(),
|
||||
resetToolStream: vi.fn(),
|
||||
resetChatScroll: vi.fn(),
|
||||
} as unknown as AppViewState & {
|
||||
client: GatewayBrowserClient;
|
||||
settings: AppViewState["settings"];
|
||||
};
|
||||
return { state, request };
|
||||
}
|
||||
|
||||
function flushTasks() {
|
||||
return new Promise<void>((resolve) => queueMicrotask(resolve));
|
||||
}
|
||||
|
||||
describe("chat session controls", () => {
|
||||
it("patches the current session model from the chat header picker", async () => {
|
||||
vi.stubGlobal(
|
||||
"fetch",
|
||||
vi.fn().mockResolvedValue({
|
||||
ok: false,
|
||||
} satisfies Partial<Response>),
|
||||
);
|
||||
const { state, request } = createChatHeaderState();
|
||||
const container = document.createElement("div");
|
||||
render(renderChatSessionSelect(state), container);
|
||||
|
||||
const modelSelect = container.querySelector<HTMLSelectElement>(
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(modelSelect).not.toBeNull();
|
||||
expect(modelSelect?.value).toBe("");
|
||||
|
||||
modelSelect!.value = "openai/gpt-5-mini";
|
||||
modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
|
||||
await flushTasks();
|
||||
|
||||
expect(request).toHaveBeenCalledWith("sessions.patch", {
|
||||
key: "main",
|
||||
model: "openai/gpt-5-mini",
|
||||
});
|
||||
expect(request).not.toHaveBeenCalledWith("chat.history", expect.anything());
|
||||
expect(state.sessionsResult?.sessions[0]?.model).toBe("gpt-5-mini");
|
||||
expect(state.sessionsResult?.sessions[0]?.modelProvider).toBe("openai");
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
it("reloads effective tools after a chat-header model switch for the active tools panel", async () => {
|
||||
vi.stubGlobal(
|
||||
"fetch",
|
||||
vi.fn().mockResolvedValue({
|
||||
ok: false,
|
||||
} satisfies Partial<Response>),
|
||||
);
|
||||
const { state, request } = createChatHeaderState();
|
||||
state.agentsPanel = "tools";
|
||||
state.agentsSelectedId = "main";
|
||||
state.toolsEffectiveResultKey = "main:main";
|
||||
state.toolsEffectiveResult = {
|
||||
agentId: "main",
|
||||
profile: "coding",
|
||||
groups: [],
|
||||
};
|
||||
const container = document.createElement("div");
|
||||
render(renderChatSessionSelect(state), container);
|
||||
|
||||
const modelSelect = container.querySelector<HTMLSelectElement>(
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(modelSelect).not.toBeNull();
|
||||
|
||||
modelSelect!.value = "openai/gpt-5-mini";
|
||||
modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
|
||||
await flushTasks();
|
||||
|
||||
expect(request).toHaveBeenCalledWith("tools.effective", {
|
||||
agentId: "main",
|
||||
sessionKey: "main",
|
||||
});
|
||||
expect(state.toolsEffectiveResultKey).toBe("main:main:model=openai/gpt-5-mini");
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
it("clears the session model override back to the default model", async () => {
|
||||
vi.stubGlobal(
|
||||
"fetch",
|
||||
vi.fn().mockResolvedValue({
|
||||
ok: false,
|
||||
} satisfies Partial<Response>),
|
||||
);
|
||||
const { state, request } = createChatHeaderState({ model: "gpt-5-mini" });
|
||||
const container = document.createElement("div");
|
||||
render(renderChatSessionSelect(state), container);
|
||||
|
||||
const modelSelect = container.querySelector<HTMLSelectElement>(
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(modelSelect).not.toBeNull();
|
||||
expect(modelSelect?.value).toBe("openai/gpt-5-mini");
|
||||
|
||||
modelSelect!.value = "";
|
||||
modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
|
||||
await flushTasks();
|
||||
|
||||
expect(request).toHaveBeenCalledWith("sessions.patch", {
|
||||
key: "main",
|
||||
model: null,
|
||||
});
|
||||
expect(state.sessionsResult?.sessions[0]?.model).toBeUndefined();
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
it("disables the chat header model picker while a run is active", () => {
|
||||
const { state } = createChatHeaderState();
|
||||
state.chatRunId = "run-123";
|
||||
state.chatStream = "Working";
|
||||
const container = document.createElement("div");
|
||||
render(renderChatSessionSelect(state), container);
|
||||
|
||||
const modelSelect = container.querySelector<HTMLSelectElement>(
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(modelSelect).not.toBeNull();
|
||||
expect(modelSelect?.disabled).toBe(true);
|
||||
});
|
||||
|
||||
it("keeps the selected model visible when the active session is absent from sessions.list", async () => {
|
||||
vi.stubGlobal(
|
||||
"fetch",
|
||||
vi.fn().mockResolvedValue({
|
||||
ok: false,
|
||||
} satisfies Partial<Response>),
|
||||
);
|
||||
const { state } = createChatHeaderState({ omitSessionFromList: true });
|
||||
const container = document.createElement("div");
|
||||
render(renderChatSessionSelect(state), container);
|
||||
|
||||
const modelSelect = container.querySelector<HTMLSelectElement>(
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(modelSelect).not.toBeNull();
|
||||
|
||||
modelSelect!.value = "openai/gpt-5-mini";
|
||||
modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
|
||||
await flushTasks();
|
||||
render(renderChatSessionSelect(state), container);
|
||||
|
||||
const rerendered = container.querySelector<HTMLSelectElement>(
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(rerendered?.value).toBe("openai/gpt-5-mini");
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
});
|
||||
@@ -3,17 +3,8 @@
|
||||
import { render } from "lit";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
import { getSafeLocalStorage } from "../../local-storage.ts";
|
||||
import type { AppViewState } from "../app-view-state.ts";
|
||||
import {
|
||||
createModelCatalog,
|
||||
createSessionsListResult,
|
||||
DEFAULT_CHAT_MODEL_CATALOG,
|
||||
} from "../chat-model.test-helpers.ts";
|
||||
import { resetAssistantAttachmentAvailabilityCacheForTest } from "../chat/grouped-render.ts";
|
||||
import { normalizeMessage } from "../chat/message-normalizer.ts";
|
||||
import { renderChatSessionSelect } from "../chat/session-controls.ts";
|
||||
import type { GatewayBrowserClient } from "../gateway.ts";
|
||||
import type { ModelCatalogEntry } from "../types.ts";
|
||||
import type { SessionsListResult } from "../types.ts";
|
||||
import { renderChat, type ChatProps } from "./chat.ts";
|
||||
|
||||
@@ -27,125 +18,6 @@ function createSessions(): SessionsListResult {
|
||||
};
|
||||
}
|
||||
|
||||
function createChatHeaderState(
|
||||
overrides: {
|
||||
model?: string | null;
|
||||
modelProvider?: string | null;
|
||||
models?: ModelCatalogEntry[];
|
||||
omitSessionFromList?: boolean;
|
||||
} = {},
|
||||
): { state: AppViewState; request: ReturnType<typeof vi.fn> } {
|
||||
let currentModel = overrides.model ?? null;
|
||||
let currentModelProvider = overrides.modelProvider ?? (currentModel ? "openai" : null);
|
||||
const omitSessionFromList = overrides.omitSessionFromList ?? false;
|
||||
const catalog = overrides.models ?? createModelCatalog(...DEFAULT_CHAT_MODEL_CATALOG);
|
||||
const request = vi.fn(async (method: string, params: Record<string, unknown>) => {
|
||||
if (method === "sessions.patch") {
|
||||
const nextModel = (params.model as string | null | undefined) ?? null;
|
||||
if (!nextModel) {
|
||||
currentModel = null;
|
||||
currentModelProvider = null;
|
||||
} else {
|
||||
const normalized = nextModel.trim();
|
||||
const slashIndex = normalized.indexOf("/");
|
||||
if (slashIndex > 0) {
|
||||
currentModelProvider = normalized.slice(0, slashIndex);
|
||||
currentModel = normalized.slice(slashIndex + 1);
|
||||
} else {
|
||||
currentModel = normalized;
|
||||
const matchingProviders = catalog
|
||||
.filter((entry) => entry.id === normalized)
|
||||
.map((entry) => entry.provider)
|
||||
.filter(Boolean);
|
||||
currentModelProvider =
|
||||
matchingProviders.length === 1 ? matchingProviders[0] : currentModelProvider;
|
||||
}
|
||||
}
|
||||
return { ok: true, key: "main" };
|
||||
}
|
||||
if (method === "chat.history") {
|
||||
return { messages: [], thinkingLevel: null };
|
||||
}
|
||||
if (method === "sessions.list") {
|
||||
return createSessionsListResult({
|
||||
model: currentModel,
|
||||
modelProvider: currentModelProvider,
|
||||
omitSessionFromList,
|
||||
});
|
||||
}
|
||||
if (method === "models.list") {
|
||||
return { models: catalog };
|
||||
}
|
||||
if (method === "tools.effective") {
|
||||
return {
|
||||
agentId: "main",
|
||||
profile: "coding",
|
||||
groups: [],
|
||||
};
|
||||
}
|
||||
throw new Error(`Unexpected request: ${method}`);
|
||||
});
|
||||
const state = {
|
||||
sessionKey: "main",
|
||||
connected: true,
|
||||
sessionsHideCron: true,
|
||||
sessionsResult: createSessionsListResult({
|
||||
model: currentModel,
|
||||
modelProvider: currentModelProvider,
|
||||
omitSessionFromList,
|
||||
}),
|
||||
chatModelOverrides: {},
|
||||
chatModelCatalog: catalog,
|
||||
chatModelsLoading: false,
|
||||
client: { request } as unknown as GatewayBrowserClient,
|
||||
settings: {
|
||||
gatewayUrl: "",
|
||||
token: "",
|
||||
locale: "en",
|
||||
sessionKey: "main",
|
||||
lastActiveSessionKey: "main",
|
||||
theme: "claw",
|
||||
themeMode: "dark",
|
||||
splitRatio: 0.6,
|
||||
navCollapsed: false,
|
||||
navGroupsCollapsed: {},
|
||||
borderRadius: 50,
|
||||
chatFocusMode: false,
|
||||
chatShowThinking: false,
|
||||
},
|
||||
chatMessage: "",
|
||||
chatStream: null,
|
||||
chatStreamStartedAt: null,
|
||||
chatRunId: null,
|
||||
chatQueue: [],
|
||||
chatMessages: [],
|
||||
chatLoading: false,
|
||||
chatThinkingLevel: null,
|
||||
lastError: null,
|
||||
chatAvatarUrl: null,
|
||||
basePath: "",
|
||||
hello: null,
|
||||
agentsList: null,
|
||||
agentsPanel: "overview",
|
||||
agentsSelectedId: null,
|
||||
toolsEffectiveLoading: false,
|
||||
toolsEffectiveLoadingKey: null,
|
||||
toolsEffectiveResultKey: null,
|
||||
toolsEffectiveError: null,
|
||||
toolsEffectiveResult: null,
|
||||
applySettings(next: AppViewState["settings"]) {
|
||||
state.settings = next;
|
||||
},
|
||||
loadAssistantIdentity: vi.fn(),
|
||||
resetToolStream: vi.fn(),
|
||||
resetChatScroll: vi.fn(),
|
||||
} as unknown as AppViewState & {
|
||||
client: GatewayBrowserClient;
|
||||
settings: AppViewState["settings"];
|
||||
};
|
||||
return { state, request };
|
||||
}
|
||||
|
||||
function flushTasks() {
|
||||
return new Promise<void>((resolve) => queueMicrotask(resolve));
|
||||
}
|
||||
@@ -808,144 +680,6 @@ describe("chat view", () => {
|
||||
expect(confirm?.classList.contains("chat-delete-confirm--right")).toBe(true);
|
||||
});
|
||||
|
||||
it("patches the current session model from the chat header picker", async () => {
|
||||
vi.stubGlobal(
|
||||
"fetch",
|
||||
vi.fn().mockResolvedValue({
|
||||
ok: false,
|
||||
} satisfies Partial<Response>),
|
||||
);
|
||||
const { state, request } = createChatHeaderState();
|
||||
const container = document.createElement("div");
|
||||
render(renderChatSessionSelect(state), container);
|
||||
|
||||
const modelSelect = container.querySelector<HTMLSelectElement>(
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(modelSelect).not.toBeNull();
|
||||
expect(modelSelect?.value).toBe("");
|
||||
|
||||
modelSelect!.value = "openai/gpt-5-mini";
|
||||
modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
|
||||
await flushTasks();
|
||||
|
||||
expect(request).toHaveBeenCalledWith("sessions.patch", {
|
||||
key: "main",
|
||||
model: "openai/gpt-5-mini",
|
||||
});
|
||||
expect(request).not.toHaveBeenCalledWith("chat.history", expect.anything());
|
||||
expect(state.sessionsResult?.sessions[0]?.model).toBe("gpt-5-mini");
|
||||
expect(state.sessionsResult?.sessions[0]?.modelProvider).toBe("openai");
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
it("reloads effective tools after a chat-header model switch for the active tools panel", async () => {
|
||||
vi.stubGlobal(
|
||||
"fetch",
|
||||
vi.fn().mockResolvedValue({
|
||||
ok: false,
|
||||
} satisfies Partial<Response>),
|
||||
);
|
||||
const { state, request } = createChatHeaderState();
|
||||
state.agentsPanel = "tools";
|
||||
state.agentsSelectedId = "main";
|
||||
state.toolsEffectiveResultKey = "main:main";
|
||||
state.toolsEffectiveResult = {
|
||||
agentId: "main",
|
||||
profile: "coding",
|
||||
groups: [],
|
||||
};
|
||||
const container = document.createElement("div");
|
||||
render(renderChatSessionSelect(state), container);
|
||||
|
||||
const modelSelect = container.querySelector<HTMLSelectElement>(
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(modelSelect).not.toBeNull();
|
||||
|
||||
modelSelect!.value = "openai/gpt-5-mini";
|
||||
modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
|
||||
await flushTasks();
|
||||
|
||||
expect(request).toHaveBeenCalledWith("tools.effective", {
|
||||
agentId: "main",
|
||||
sessionKey: "main",
|
||||
});
|
||||
expect(state.toolsEffectiveResultKey).toBe("main:main:model=openai/gpt-5-mini");
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
it("clears the session model override back to the default model", async () => {
|
||||
vi.stubGlobal(
|
||||
"fetch",
|
||||
vi.fn().mockResolvedValue({
|
||||
ok: false,
|
||||
} satisfies Partial<Response>),
|
||||
);
|
||||
const { state, request } = createChatHeaderState({ model: "gpt-5-mini" });
|
||||
const container = document.createElement("div");
|
||||
render(renderChatSessionSelect(state), container);
|
||||
|
||||
const modelSelect = container.querySelector<HTMLSelectElement>(
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(modelSelect).not.toBeNull();
|
||||
expect(modelSelect?.value).toBe("openai/gpt-5-mini");
|
||||
|
||||
modelSelect!.value = "";
|
||||
modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
|
||||
await flushTasks();
|
||||
|
||||
expect(request).toHaveBeenCalledWith("sessions.patch", {
|
||||
key: "main",
|
||||
model: null,
|
||||
});
|
||||
expect(state.sessionsResult?.sessions[0]?.model).toBeUndefined();
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
it("disables the chat header model picker while a run is active", () => {
|
||||
const { state } = createChatHeaderState();
|
||||
state.chatRunId = "run-123";
|
||||
state.chatStream = "Working";
|
||||
const container = document.createElement("div");
|
||||
render(renderChatSessionSelect(state), container);
|
||||
|
||||
const modelSelect = container.querySelector<HTMLSelectElement>(
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(modelSelect).not.toBeNull();
|
||||
expect(modelSelect?.disabled).toBe(true);
|
||||
});
|
||||
|
||||
it("keeps the selected model visible when the active session is absent from sessions.list", async () => {
|
||||
vi.stubGlobal(
|
||||
"fetch",
|
||||
vi.fn().mockResolvedValue({
|
||||
ok: false,
|
||||
} satisfies Partial<Response>),
|
||||
);
|
||||
const { state } = createChatHeaderState({ omitSessionFromList: true });
|
||||
const container = document.createElement("div");
|
||||
render(renderChatSessionSelect(state), container);
|
||||
|
||||
const modelSelect = container.querySelector<HTMLSelectElement>(
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(modelSelect).not.toBeNull();
|
||||
|
||||
modelSelect!.value = "openai/gpt-5-mini";
|
||||
modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
|
||||
await flushTasks();
|
||||
render(renderChatSessionSelect(state), container);
|
||||
|
||||
const rerendered = container.querySelector<HTMLSelectElement>(
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(rerendered?.value).toBe("openai/gpt-5-mini");
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
it("keeps tool cards collapsed by default and expands them inline on demand", async () => {
|
||||
const container = document.createElement("div");
|
||||
const props = createProps({
|
||||
|
||||
Reference in New Issue
Block a user