fix(chat): decode native thinking metadata

Decode gateway-provided thinking metadata for native iOS/macOS chat picker options, preserving extended and legacy thinking levels without leaking default-model options across sessions.\n\nVerification:\n- swift test --package-path apps/shared/OpenClawKit --filter ChatViewModelTests --no-parallel\n- swift test --package-path apps/macos --filter WebChatSwiftUISmokeTests --no-parallel\n- pnpm lint:swift\n- pnpm check:changed\n\nFollow-up maintainer fix for #40878 review feedback.
This commit is contained in:
Val Alexander
2026-05-07 02:39:01 -05:00
committed by GitHub
parent 62ccd8b644
commit 9ffe290a17
6 changed files with 525 additions and 13 deletions

View File

@@ -140,6 +140,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Native chat: decode gateway-provided thinking metadata for the iOS/macOS picker so provider-specific levels such as `adaptive`, `xhigh`, and `max` appear without leaking unsupported default-model options. Thanks @BunsDev.
- Agents/tools: fail `exec host=node` before `system.run` when the selected node is known to be disconnected, with an actionable reconnect message instead of a raw node invoke failure. Thanks @BunsDev.
- Agents/models: accept legacy `anthropic-cli/*` model refs as Claude CLI runtime refs instead of failing model resolution with `Unknown model`. Thanks @BunsDev.
- Agents/tools: keep restrictive-profile tool-section warnings scoped to the configured sections whose tools are still missing from `alsoAllow`, so already re-allowed filesystem tools do not make exec-only fixes look broader than they are. Thanks @BunsDev.

View File

@@ -63,8 +63,12 @@ struct MacGatewayChatTransport: OpenClawChatTransport {
let mainSessionKey = await GatewayConnection.shared.cachedMainSessionKey()
let defaults = decoded.defaults.map {
OpenClawChatSessionsDefaults(
modelProvider: $0.modelProvider,
model: $0.model,
contextTokens: $0.contextTokens,
thinkingLevels: $0.thinkingLevels,
thinkingOptions: $0.thinkingOptions,
thinkingDefault: $0.thinkingDefault,
mainSessionKey: mainSessionKey)
} ?? OpenClawChatSessionsDefaults(
model: nil,

View File

@@ -9,8 +9,6 @@ import UniformTypeIdentifiers
@MainActor
struct OpenClawChatComposer: View {
private static let menuThinkingLevels = ["off", "low", "medium", "high"]
@Bindable var viewModel: OpenClawChatViewModel
let style: OpenClawChatView.Style
let showsSessionSwitcher: Bool
@@ -95,12 +93,8 @@ struct OpenClawChatComposer: View {
get: { self.viewModel.thinkingLevel },
set: { next in self.viewModel.selectThinkingLevel(next) }))
{
Text("Off").tag("off")
Text("Low").tag("low")
Text("Medium").tag("medium")
Text("High").tag("high")
if !Self.menuThinkingLevels.contains(self.viewModel.thinkingLevel) {
Text(self.viewModel.thinkingLevel.capitalized).tag(self.viewModel.thinkingLevel)
ForEach(self.viewModel.thinkingLevelOptions) { option in
Text(option.label).tag(option.id)
}
}
.labelsHidden()

View File

@@ -1,5 +1,15 @@
import Foundation
public struct OpenClawChatThinkingLevelOption: Codable, Identifiable, Sendable, Hashable {
public let id: String
public let label: String
public init(id: String, label: String) {
self.id = id
self.label = label
}
}
public struct OpenClawChatModelChoice: Identifiable, Codable, Sendable, Hashable {
public var id: String {
self.selectionID
@@ -34,13 +44,29 @@ public struct OpenClawChatModelChoice: Identifiable, Codable, Sendable, Hashable
}
public struct OpenClawChatSessionsDefaults: Codable, Sendable {
public let modelProvider: String?
public let model: String?
public let contextTokens: Int?
public let thinkingLevels: [OpenClawChatThinkingLevelOption]?
public let thinkingOptions: [String]?
public let thinkingDefault: String?
public let mainSessionKey: String?
public init(model: String?, contextTokens: Int?, mainSessionKey: String? = nil) {
public init(
modelProvider: String? = nil,
model: String?,
contextTokens: Int?,
thinkingLevels: [OpenClawChatThinkingLevelOption]? = nil,
thinkingOptions: [String]? = nil,
thinkingDefault: String? = nil,
mainSessionKey: String? = nil)
{
self.modelProvider = modelProvider
self.model = model
self.contextTokens = contextTokens
self.thinkingLevels = thinkingLevels
self.thinkingOptions = thinkingOptions
self.thinkingDefault = thinkingDefault
self.mainSessionKey = mainSessionKey
}
}
@@ -72,6 +98,57 @@ public struct OpenClawChatSessionEntry: Codable, Identifiable, Sendable, Hashabl
public let modelProvider: String?
public let model: String?
public let contextTokens: Int?
public let thinkingLevels: [OpenClawChatThinkingLevelOption]?
public let thinkingOptions: [String]?
public let thinkingDefault: String?
public init(
key: String,
kind: String?,
displayName: String?,
surface: String?,
subject: String?,
room: String?,
space: String?,
updatedAt: Double?,
sessionId: String?,
systemSent: Bool?,
abortedLastRun: Bool?,
thinkingLevel: String?,
verboseLevel: String?,
inputTokens: Int?,
outputTokens: Int?,
totalTokens: Int?,
modelProvider: String?,
model: String?,
contextTokens: Int?,
thinkingLevels: [OpenClawChatThinkingLevelOption]? = nil,
thinkingOptions: [String]? = nil,
thinkingDefault: String? = nil)
{
self.key = key
self.kind = kind
self.displayName = displayName
self.surface = surface
self.subject = subject
self.room = room
self.space = space
self.updatedAt = updatedAt
self.sessionId = sessionId
self.systemSent = systemSent
self.abortedLastRun = abortedLastRun
self.thinkingLevel = thinkingLevel
self.verboseLevel = verboseLevel
self.inputTokens = inputTokens
self.outputTokens = outputTokens
self.totalTokens = totalTokens
self.modelProvider = modelProvider
self.model = model
self.contextTokens = contextTokens
self.thinkingLevels = thinkingLevels
self.thinkingOptions = thinkingOptions
self.thinkingDefault = thinkingDefault
}
}
public struct OpenClawChatSessionsListResponse: Codable, Sendable {

View File

@@ -21,6 +21,7 @@ public final class OpenClawChatViewModel {
public private(set) var messages: [OpenClawChatMessage] = []
public var input: String = ""
public private(set) var thinkingLevel: String
public private(set) var thinkingLevelOptions: [OpenClawChatThinkingLevelOption]
public private(set) var modelSelectionID: String = "__default__"
public private(set) var modelChoices: [OpenClawChatModelChoice] = []
public private(set) var isLoading = false
@@ -83,7 +84,11 @@ public final class OpenClawChatViewModel {
self.sessionKey = sessionKey
self.transport = transport
let normalizedThinkingLevel = Self.normalizedThinkingLevel(initialThinkingLevel)
self.thinkingLevel = normalizedThinkingLevel ?? "off"
let initialResolvedThinkingLevel = normalizedThinkingLevel ?? "off"
self.thinkingLevel = initialResolvedThinkingLevel
self.thinkingLevelOptions = Self.withCurrentThinkingOption(
Self.baseThinkingLevelOptions,
current: initialResolvedThinkingLevel)
self.prefersExplicitThinkingLevel = normalizedThinkingLevel != nil
self.onThinkingLevelChanged = onThinkingLevelChanged
@@ -198,6 +203,14 @@ public final class OpenClawChatViewModel {
return "Default: \(self.modelLabel(for: defaultModelID))"
}
private static let baseThinkingLevelOptions: [OpenClawChatThinkingLevelOption] = [
OpenClawChatThinkingLevelOption(id: "off", label: "off"),
OpenClawChatThinkingLevelOption(id: "minimal", label: "minimal"),
OpenClawChatThinkingLevelOption(id: "low", label: "low"),
OpenClawChatThinkingLevelOption(id: "medium", label: "medium"),
OpenClawChatThinkingLevelOption(id: "high", label: "high"),
]
public func addAttachments(urls: [URL]) {
Task { await self.loadAttachments(urls: urls) }
}
@@ -243,6 +256,7 @@ public final class OpenClawChatViewModel {
{
self.thinkingLevel = level
}
self.syncThinkingLevelOptions()
await self.pollHealthIfNeeded(force: true)
await self.fetchSessions(limit: 50)
await self.fetchModels()
@@ -594,6 +608,7 @@ public final class OpenClawChatViewModel {
self.sessions = res.sessions
self.sessionDefaults = res.defaults
self.syncSelectedModel()
self.syncThinkingLevelOptions()
} catch {
// Best-effort.
}
@@ -675,6 +690,8 @@ public final class OpenClawChatViewModel {
let sessionKey = self.sessionKey
self.thinkingLevel = next
self.syncThinkingLevelOptions()
self.updateCurrentSessionThinkingLevel(next, sessionKey: sessionKey)
self.onThinkingLevelChanged?(next)
self.nextThinkingSelectionRequestID &+= 1
let requestID = self.nextThinkingSelectionRequestID
@@ -770,6 +787,99 @@ public final class OpenClawChatViewModel {
}
}
private func syncThinkingLevelOptions() {
let currentSession = self.sessions.first(where: { $0.key == self.sessionKey })
var options = self.resolvedThinkingLevelOptions(for: currentSession)
if let current = Self.normalizedThinkingLevel(self.thinkingLevel) {
options = Self.withCurrentThinkingOption(options, current: current)
}
self.thinkingLevelOptions = options
}
private func resolvedThinkingLevelOptions(
for currentSession: OpenClawChatSessionEntry?) -> [OpenClawChatThinkingLevelOption]
{
if let levels = Self.normalizedThinkingLevelOptions(currentSession?.thinkingLevels), !levels.isEmpty {
return levels
}
let defaultsMatch = currentSession.map {
Self.sessionModelMatchesDefaults($0, defaults: self.sessionDefaults)
} ?? true
if defaultsMatch,
let levels = Self.normalizedThinkingLevelOptions(self.sessionDefaults?.thinkingLevels),
!levels.isEmpty
{
return levels
}
if let options = Self.thinkingOptions(from: currentSession?.thinkingOptions), !options.isEmpty {
return options
}
if defaultsMatch,
let options = Self.thinkingOptions(from: self.sessionDefaults?.thinkingOptions),
!options.isEmpty
{
return options
}
return Self.baseThinkingLevelOptions
}
private static func sessionModelMatchesDefaults(
_ session: OpenClawChatSessionEntry,
defaults: OpenClawChatSessionsDefaults?) -> Bool
{
let providerMatches = session.modelProvider == nil || session.modelProvider == defaults?.modelProvider
let modelMatches = session.model == nil || session.model == defaults?.model
return providerMatches && modelMatches
}
private static func normalizedThinkingLevelOptions(
_ levels: [OpenClawChatThinkingLevelOption]?) -> [OpenClawChatThinkingLevelOption]?
{
guard let levels else { return nil }
return Self.dedupedThinkingOptions(
levels.compactMap { level in
guard let id = Self.normalizedThinkingLevel(level.id) else { return nil }
let label = level.label.trimmingCharacters(in: .whitespacesAndNewlines)
return OpenClawChatThinkingLevelOption(id: id, label: label.isEmpty ? id : label)
})
}
private static func thinkingOptions(from labels: [String]?) -> [OpenClawChatThinkingLevelOption]? {
guard let labels else { return nil }
return Self.dedupedThinkingOptions(
labels.compactMap { label in
guard let id = Self.normalizedThinkingLevel(label) else { return nil }
let trimmed = label.trimmingCharacters(in: .whitespacesAndNewlines)
return OpenClawChatThinkingLevelOption(id: id, label: trimmed.isEmpty ? id : trimmed)
})
}
private static func withCurrentThinkingOption(
_ options: [OpenClawChatThinkingLevelOption],
current: String) -> [OpenClawChatThinkingLevelOption]
{
guard !options.contains(where: { $0.id == current }) else { return options }
return options + [OpenClawChatThinkingLevelOption(id: current, label: current)]
}
private static func dedupedThinkingOptions(
_ options: [OpenClawChatThinkingLevelOption]) -> [OpenClawChatThinkingLevelOption]
{
var result: [OpenClawChatThinkingLevelOption] = []
var seen = Set<String>()
for option in options {
guard !option.id.isEmpty, !seen.contains(option.id) else { continue }
seen.insert(option.id)
result.append(option)
}
return result
}
private func placeholderSession(key: String) -> OpenClawChatSessionEntry {
OpenClawChatSessionEntry(
key: key,
@@ -858,6 +968,9 @@ public final class OpenClawChatViewModel {
modelProvider: resolved.modelProvider,
sessionKey: sessionKey,
syncSelection: syncSelection)
if sessionKey == self.sessionKey {
self.syncThinkingLevelOptions()
}
}
private func resolvedSessionModelIdentity(forSelectionID selectionID: String)
@@ -885,6 +998,34 @@ public final class OpenClawChatViewModel {
return "\(provider)/\(modelID)"
}
private func updateCurrentSessionThinkingLevel(_ thinkingLevel: String?, sessionKey: String) {
guard let index = self.sessions.firstIndex(where: { $0.key == sessionKey }) else { return }
let current = self.sessions[index]
self.sessions[index] = OpenClawChatSessionEntry(
key: current.key,
kind: current.kind,
displayName: current.displayName,
surface: current.surface,
subject: current.subject,
room: current.room,
space: current.space,
updatedAt: current.updatedAt,
sessionId: current.sessionId,
systemSent: current.systemSent,
abortedLastRun: current.abortedLastRun,
thinkingLevel: thinkingLevel,
verboseLevel: current.verboseLevel,
inputTokens: current.inputTokens,
outputTokens: current.outputTokens,
totalTokens: current.totalTokens,
modelProvider: current.modelProvider,
model: current.model,
contextTokens: current.contextTokens,
thinkingLevels: current.thinkingLevels,
thinkingOptions: current.thinkingOptions,
thinkingDefault: current.thinkingDefault)
}
private func updateCurrentSessionModel(
modelID: String?,
modelProvider: String?,
@@ -1084,6 +1225,7 @@ public final class OpenClawChatViewModel {
let level = Self.normalizedThinkingLevel(payload.thinkingLevel)
{
self.thinkingLevel = level
self.syncThinkingLevelOptions()
}
} catch {
chatUILogger.error("refresh history failed \(error.localizedDescription, privacy: .public)")
@@ -1195,9 +1337,33 @@ public final class OpenClawChatViewModel {
private static func normalizedThinkingLevel(_ level: String?) -> String? {
guard let level else { return nil }
let trimmed = level.trimmingCharacters(in: .whitespacesAndNewlines).lowercased()
guard ["off", "minimal", "low", "medium", "high", "xhigh", "adaptive"].contains(trimmed) else {
return nil
guard !trimmed.isEmpty else { return nil }
let collapsed = trimmed.replacingOccurrences(
of: "[\\s_-]+",
with: "",
options: .regularExpression)
switch collapsed {
case "adaptive", "auto":
return "adaptive"
case "max":
return "max"
case "xhigh", "extrahigh":
return "xhigh"
case "off", "none":
return "off"
case "on", "enable", "enabled":
return "low"
case "min", "minimal", "think":
return "minimal"
case "low", "thinkhard":
return "low"
case "mid", "med", "medium", "thinkharder", "harder":
return "medium"
case "high", "ultra", "ultrathink", "thinkhardest", "highest":
return "high"
default:
return trimmed
}
return trimmed
}
}

View File

@@ -46,6 +46,10 @@ private func sessionEntry(key: String, updatedAt: Double) -> OpenClawChatSession
contextTokens: nil)
}
private func thinkingOption(_ id: String, label: String? = nil) -> OpenClawChatThinkingLevelOption {
OpenClawChatThinkingLevelOption(id: id, label: label ?? id)
}
private func sessionEntry(
key: String,
updatedAt: Double,
@@ -1632,6 +1636,272 @@ extension TestChatTransportState {
}
}
@Test func decodesGatewayThinkingMetadataFromSessionList() throws {
let json = """
{
"defaults": {
"modelProvider": "anthropic",
"model": "claude-opus-4-7",
"thinkingLevels": [
{ "id": "off", "label": "off" },
{ "id": "adaptive", "label": "adaptive" },
{ "id": "max", "label": "maximum" }
],
"thinkingOptions": ["off", "adaptive", "maximum"],
"thinkingDefault": "adaptive"
},
"sessions": [
{
"key": "main",
"modelProvider": "openrouter",
"model": "deepseek/deepseek-v4",
"thinkingLevel": "max",
"thinkingLevels": [
{ "id": "off", "label": "off" },
{ "id": "xhigh", "label": "xhigh" },
{ "id": "max", "label": "max" }
],
"thinkingOptions": ["off", "xhigh", "max"],
"thinkingDefault": "max"
}
]
}
"""
let decoded = try JSONDecoder().decode(
OpenClawChatSessionsListResponse.self,
from: Data(json.utf8))
#expect(decoded.defaults?.modelProvider == "anthropic")
#expect(decoded.defaults?.thinkingLevels?.map(\.id) == ["off", "adaptive", "max"])
#expect(decoded.defaults?.thinkingLevels?.last?.label == "maximum")
#expect(decoded.defaults?.thinkingDefault == "adaptive")
#expect(decoded.sessions.first?.thinkingLevels?.map(\.id) == ["off", "xhigh", "max"])
#expect(decoded.sessions.first?.thinkingDefault == "max")
}
@Test func sessionThinkingLevelsDrivePickerOptions() async throws {
let history = OpenClawChatHistoryPayload(
sessionKey: "main",
sessionId: "sess-main",
messages: [],
thinkingLevel: "adaptive")
let sessions = OpenClawChatSessionsListResponse(
ts: 1,
path: nil,
count: 1,
defaults: OpenClawChatSessionsDefaults(
modelProvider: "openai-codex",
model: "gpt-5.5",
contextTokens: nil,
thinkingLevels: [
thinkingOption("off"),
thinkingOption("low"),
thinkingOption("xhigh"),
thinkingOption("max", label: "maximum"),
],
thinkingOptions: ["off", "low", "xhigh", "maximum"],
thinkingDefault: "xhigh"),
sessions: [
OpenClawChatSessionEntry(
key: "main",
kind: nil,
displayName: nil,
surface: nil,
subject: nil,
room: nil,
space: nil,
updatedAt: 1,
sessionId: "sess-main",
systemSent: nil,
abortedLastRun: nil,
thinkingLevel: "adaptive",
verboseLevel: nil,
inputTokens: nil,
outputTokens: nil,
totalTokens: nil,
modelProvider: "anthropic",
model: "claude-opus-4-7",
contextTokens: nil,
thinkingLevels: [
thinkingOption("off"),
thinkingOption("adaptive"),
thinkingOption("max", label: "maximum"),
],
thinkingOptions: ["off", "adaptive", "maximum"],
thinkingDefault: "adaptive"),
])
let (_, vm) = await makeViewModel(
historyResponses: [history],
sessionsResponses: [sessions])
try await loadAndWaitBootstrap(vm: vm, sessionId: "sess-main")
#expect(await MainActor.run { vm.thinkingLevel } == "adaptive")
#expect(await MainActor.run { vm.thinkingLevelOptions.map(\.id) } == ["off", "adaptive", "max"])
#expect(await MainActor.run { vm.thinkingLevelOptions.map(\.label) } == ["off", "adaptive", "maximum"])
}
@Test func thinkingOptionsFallbackAndCurrentUnsupportedLevelStayVisible() async throws {
let history = OpenClawChatHistoryPayload(
sessionKey: "main",
sessionId: "sess-main",
messages: [],
thinkingLevel: "xhigh")
let sessions = OpenClawChatSessionsListResponse(
ts: 1,
path: nil,
count: 1,
defaults: nil,
sessions: [
OpenClawChatSessionEntry(
key: "main",
kind: nil,
displayName: nil,
surface: nil,
subject: nil,
room: nil,
space: nil,
updatedAt: 1,
sessionId: "sess-main",
systemSent: nil,
abortedLastRun: nil,
thinkingLevel: "xhigh",
verboseLevel: nil,
inputTokens: nil,
outputTokens: nil,
totalTokens: nil,
modelProvider: "openrouter",
model: "deepseek/deepseek-v4",
contextTokens: nil,
thinkingLevels: nil,
thinkingOptions: ["off", "max"],
thinkingDefault: "max"),
])
let (_, vm) = await makeViewModel(
historyResponses: [history],
sessionsResponses: [sessions])
try await loadAndWaitBootstrap(vm: vm, sessionId: "sess-main")
#expect(await MainActor.run { vm.thinkingLevel } == "xhigh")
#expect(await MainActor.run { vm.thinkingLevelOptions.map(\.id) } == ["off", "max", "xhigh"])
#expect(await MainActor.run { vm.thinkingLevelOptions.map(\.label) } == ["off", "max", "xhigh"])
}
@Test func matchingDefaultThinkingLevelsBeatLegacyRowThinkingOptions() async throws {
let history = OpenClawChatHistoryPayload(
sessionKey: "main",
sessionId: "sess-main",
messages: [],
thinkingLevel: "adaptive")
let sessions = OpenClawChatSessionsListResponse(
ts: 1,
path: nil,
count: 1,
defaults: OpenClawChatSessionsDefaults(
modelProvider: "anthropic",
model: "claude-opus-4-7",
contextTokens: nil,
thinkingLevels: [
thinkingOption("off"),
thinkingOption("adaptive"),
thinkingOption("max"),
],
thinkingOptions: ["off", "adaptive", "max"],
thinkingDefault: "adaptive"),
sessions: [
OpenClawChatSessionEntry(
key: "main",
kind: nil,
displayName: nil,
surface: nil,
subject: nil,
room: nil,
space: nil,
updatedAt: 1,
sessionId: "sess-main",
systemSent: nil,
abortedLastRun: nil,
thinkingLevel: "adaptive",
verboseLevel: nil,
inputTokens: nil,
outputTokens: nil,
totalTokens: nil,
modelProvider: "anthropic",
model: "claude-opus-4-7",
contextTokens: nil,
thinkingLevels: nil,
thinkingOptions: ["off"],
thinkingDefault: "off"),
])
let (_, vm) = await makeViewModel(
historyResponses: [history],
sessionsResponses: [sessions])
try await loadAndWaitBootstrap(vm: vm, sessionId: "sess-main")
#expect(await MainActor.run { vm.thinkingLevelOptions.map(\.id) } == ["off", "adaptive", "max"])
}
@Test func defaultThinkingLevelsDoNotLeakToDifferentSessionModel() async throws {
let history = OpenClawChatHistoryPayload(
sessionKey: "main",
sessionId: "sess-main",
messages: [],
thinkingLevel: "max")
let sessions = OpenClawChatSessionsListResponse(
ts: 1,
path: nil,
count: 1,
defaults: OpenClawChatSessionsDefaults(
modelProvider: "anthropic",
model: "claude-opus-4-7",
contextTokens: nil,
thinkingLevels: [
thinkingOption("off"),
thinkingOption("adaptive"),
thinkingOption("max"),
],
thinkingOptions: ["off", "adaptive", "max"],
thinkingDefault: "adaptive"),
sessions: [
OpenClawChatSessionEntry(
key: "main",
kind: nil,
displayName: nil,
surface: nil,
subject: nil,
room: nil,
space: nil,
updatedAt: 1,
sessionId: "sess-main",
systemSent: nil,
abortedLastRun: nil,
thinkingLevel: "max",
verboseLevel: nil,
inputTokens: nil,
outputTokens: nil,
totalTokens: nil,
modelProvider: "openai",
model: "gpt-5.4",
contextTokens: nil),
])
let (_, vm) = await makeViewModel(
historyResponses: [history],
sessionsResponses: [sessions])
try await loadAndWaitBootstrap(vm: vm, sessionId: "sess-main")
#expect(await MainActor.run { vm.thinkingLevel } == "max")
#expect(await MainActor.run { vm.thinkingLevelOptions.map(\.id) } ==
["off", "minimal", "low", "medium", "high", "max"])
}
@Test func staleThinkingPatchCompletionReappliesLatestSelection() async throws {
let history = OpenClawChatHistoryPayload(
sessionKey: "main",