mirror of
https://github.com/openclaw/openclaw.git
synced 2026-03-12 07:20:45 +00:00
fix(tui): prevent stale model indicator after /model
This commit is contained in:
@@ -30,6 +30,7 @@ Docs: https://docs.openclaw.ai
|
||||
### Fixes
|
||||
|
||||
- Gateway/chat streaming tool-boundary text retention: merge assistant delta segments into per-run chat buffers so pre-tool text is preserved in live chat deltas/finals when providers emit post-tool assistant segments as non-prefix snapshots. (#36957) Thanks @Datyedyeguy.
|
||||
- TUI/model indicator freshness: prevent stale session snapshots from overwriting freshly patched model selection (and reset per-session freshness when switching session keys) so `/model` updates reflect immediately instead of lagging by one or more commands. (#21255) Thanks @kowza.
|
||||
- OpenAI Codex OAuth/login hardening: fail OAuth completion early when the returned token is missing `api.responses.write`, and allow `openclaw models auth login --provider openai-codex` to use the built-in OAuth path even when no provider plugins are installed. (#36660) Thanks @driesvints.
|
||||
- Gateway/remote WS break-glass hostname support: honor `OPENCLAW_ALLOW_INSECURE_PRIVATE_WS=1` for `ws://` hostname URLs (not only private IP literals) across onboarding validation and runtime gateway connection checks, while still rejecting public IP literals and non-unicast IPv6 endpoints. (#36930) Thanks @manju-rn.
|
||||
- Routing/binding lookup scalability: pre-index route bindings by channel/account and avoid full binding-list rescans on channel-account cache rollover, preventing multi-second `resolveAgentRoute` stalls in large binding configurations. (#36915) Thanks @songchenghao.
|
||||
|
||||
@@ -111,4 +111,162 @@ describe("tui session actions", () => {
|
||||
expect(updateFooter).toHaveBeenCalledTimes(2);
|
||||
expect(requestRender).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it("keeps patched model selection when a refresh returns an older snapshot", async () => {
|
||||
const listSessions = vi.fn().mockResolvedValue({
|
||||
ts: Date.now(),
|
||||
path: "/tmp/sessions.json",
|
||||
count: 1,
|
||||
defaults: {},
|
||||
sessions: [
|
||||
{
|
||||
key: "agent:main:main",
|
||||
model: "old-model",
|
||||
modelProvider: "ollama",
|
||||
updatedAt: 100,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const state: TuiStateAccess = {
|
||||
agentDefaultId: "main",
|
||||
sessionMainKey: "agent:main:main",
|
||||
sessionScope: "global",
|
||||
agents: [],
|
||||
currentAgentId: "main",
|
||||
currentSessionKey: "agent:main:main",
|
||||
currentSessionId: null,
|
||||
activeChatRunId: null,
|
||||
historyLoaded: false,
|
||||
sessionInfo: {
|
||||
model: "old-model",
|
||||
modelProvider: "ollama",
|
||||
updatedAt: 100,
|
||||
},
|
||||
initialSessionApplied: true,
|
||||
isConnected: true,
|
||||
autoMessageSent: false,
|
||||
toolsExpanded: false,
|
||||
showThinking: false,
|
||||
connectionStatus: "connected",
|
||||
activityStatus: "idle",
|
||||
statusTimeout: null,
|
||||
lastCtrlCAt: 0,
|
||||
};
|
||||
|
||||
const { applySessionInfoFromPatch, refreshSessionInfo } = createSessionActions({
|
||||
client: { listSessions } as unknown as GatewayChatClient,
|
||||
chatLog: { addSystem: vi.fn() } as unknown as import("./components/chat-log.js").ChatLog,
|
||||
tui: { requestRender: vi.fn() } as unknown as import("@mariozechner/pi-tui").TUI,
|
||||
opts: {},
|
||||
state,
|
||||
agentNames: new Map(),
|
||||
initialSessionInput: "",
|
||||
initialSessionAgentId: null,
|
||||
resolveSessionKey: vi.fn(),
|
||||
updateHeader: vi.fn(),
|
||||
updateFooter: vi.fn(),
|
||||
updateAutocompleteProvider: vi.fn(),
|
||||
setActivityStatus: vi.fn(),
|
||||
});
|
||||
|
||||
applySessionInfoFromPatch({
|
||||
key: "agent:main:main",
|
||||
entry: {
|
||||
sessionId: "session-1",
|
||||
model: "new-model",
|
||||
modelProvider: "openai",
|
||||
updatedAt: 200,
|
||||
},
|
||||
});
|
||||
|
||||
expect(state.sessionInfo.model).toBe("new-model");
|
||||
expect(state.sessionInfo.modelProvider).toBe("openai");
|
||||
|
||||
await refreshSessionInfo();
|
||||
|
||||
expect(state.sessionInfo.model).toBe("new-model");
|
||||
expect(state.sessionInfo.modelProvider).toBe("openai");
|
||||
expect(state.sessionInfo.updatedAt).toBe(200);
|
||||
});
|
||||
|
||||
it("accepts older session snapshots after switching session keys", async () => {
|
||||
const listSessions = vi.fn().mockResolvedValue({
|
||||
ts: Date.now(),
|
||||
path: "/tmp/sessions.json",
|
||||
count: 1,
|
||||
defaults: {},
|
||||
sessions: [
|
||||
{
|
||||
key: "agent:main:other",
|
||||
model: "session-model",
|
||||
modelProvider: "openai",
|
||||
updatedAt: 50,
|
||||
},
|
||||
],
|
||||
});
|
||||
const loadHistory = vi.fn().mockResolvedValue({
|
||||
sessionId: "session-2",
|
||||
messages: [],
|
||||
});
|
||||
|
||||
const state: TuiStateAccess = {
|
||||
agentDefaultId: "main",
|
||||
sessionMainKey: "agent:main:main",
|
||||
sessionScope: "global",
|
||||
agents: [],
|
||||
currentAgentId: "main",
|
||||
currentSessionKey: "agent:main:main",
|
||||
currentSessionId: null,
|
||||
activeChatRunId: null,
|
||||
historyLoaded: true,
|
||||
sessionInfo: {
|
||||
model: "previous-model",
|
||||
modelProvider: "anthropic",
|
||||
updatedAt: 500,
|
||||
},
|
||||
initialSessionApplied: true,
|
||||
isConnected: true,
|
||||
autoMessageSent: false,
|
||||
toolsExpanded: false,
|
||||
showThinking: false,
|
||||
connectionStatus: "connected",
|
||||
activityStatus: "idle",
|
||||
statusTimeout: null,
|
||||
lastCtrlCAt: 0,
|
||||
};
|
||||
|
||||
const { setSession } = createSessionActions({
|
||||
client: {
|
||||
listSessions,
|
||||
loadHistory,
|
||||
} as unknown as GatewayChatClient,
|
||||
chatLog: {
|
||||
addSystem: vi.fn(),
|
||||
clearAll: vi.fn(),
|
||||
} as unknown as import("./components/chat-log.js").ChatLog,
|
||||
tui: { requestRender: vi.fn() } as unknown as import("@mariozechner/pi-tui").TUI,
|
||||
opts: {},
|
||||
state,
|
||||
agentNames: new Map(),
|
||||
initialSessionInput: "",
|
||||
initialSessionAgentId: null,
|
||||
resolveSessionKey: vi.fn((raw?: string) => raw ?? "agent:main:main"),
|
||||
updateHeader: vi.fn(),
|
||||
updateFooter: vi.fn(),
|
||||
updateAutocompleteProvider: vi.fn(),
|
||||
setActivityStatus: vi.fn(),
|
||||
});
|
||||
|
||||
await setSession("agent:main:other");
|
||||
|
||||
expect(loadHistory).toHaveBeenCalledWith({
|
||||
sessionKey: "agent:main:other",
|
||||
limit: 200,
|
||||
});
|
||||
expect(state.currentSessionKey).toBe("agent:main:other");
|
||||
expect(state.sessionInfo.model).toBe("session-model");
|
||||
expect(state.sessionInfo.modelProvider).toBe("openai");
|
||||
expect(state.sessionInfo.updatedAt).toBe(50);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -151,17 +151,12 @@ export function createSessionActions(context: SessionActionContext) {
|
||||
|
||||
const entryUpdatedAt = entry?.updatedAt ?? null;
|
||||
const currentUpdatedAt = state.sessionInfo.updatedAt ?? null;
|
||||
const modelChanged =
|
||||
(entry?.modelProvider !== undefined &&
|
||||
entry.modelProvider !== state.sessionInfo.modelProvider) ||
|
||||
(entry?.model !== undefined && entry.model !== state.sessionInfo.model);
|
||||
if (
|
||||
!params.force &&
|
||||
entryUpdatedAt !== null &&
|
||||
currentUpdatedAt !== null &&
|
||||
entryUpdatedAt < currentUpdatedAt &&
|
||||
!defaultsChanged &&
|
||||
!modelChanged
|
||||
!defaultsChanged
|
||||
) {
|
||||
return;
|
||||
}
|
||||
@@ -362,6 +357,9 @@ export function createSessionActions(context: SessionActionContext) {
|
||||
state.currentSessionKey = nextKey;
|
||||
state.activeChatRunId = null;
|
||||
state.currentSessionId = null;
|
||||
// Session keys can move backwards in updatedAt ordering; drop previous session freshness
|
||||
// so refresh data for the newly selected session isn't rejected as stale.
|
||||
state.sessionInfo.updatedAt = null;
|
||||
state.historyLoaded = false;
|
||||
clearLocalRunIds?.();
|
||||
updateHeader();
|
||||
|
||||
Reference in New Issue
Block a user