fix: prefer transcript model in sessions list (#55628) (thanks @MonkeyLeeT)

* gateway: prefer transcript model in sessions list

* gateway: keep live subagent model in session rows

* gateway: prefer selected model until runtime refresh

* gateway: simplify session model identity selection

* gateway: avoid transcript model fallback on cost-only reads
This commit is contained in:
Ted Li
2026-03-29 00:50:55 -07:00
committed by GitHub
parent 08b5206b19
commit ebb919e311
3 changed files with 253 additions and 12 deletions

View File

@@ -427,7 +427,7 @@ describe("gateway server sessions", () => {
ws.close();
});
test("sessions.list surfaces transcript usage fallbacks and parent child relationships", async () => {
test("sessions.list surfaces transcript usage and model fallbacks from the transcript", async () => {
const { dir } = await createSessionStoreDir();
testState.agentConfig = {
models: {
@@ -477,7 +477,7 @@ describe("gateway server sessions", () => {
sessionId: "sess-child",
updatedAt: Date.now() - 1_000,
modelProvider: "anthropic",
model: "claude-sonnet-4-6",
model: "claude-sonnet-4-5",
parentSessionKey: "agent:main:main",
totalTokens: 0,
totalTokensFresh: false,
@@ -499,6 +499,8 @@ describe("gateway server sessions", () => {
totalTokensFresh?: boolean;
contextTokens?: number;
estimatedCostUsd?: number;
modelProvider?: string;
model?: string;
}>;
}>(ws, "sessions.list", {});
@@ -513,6 +515,8 @@ describe("gateway server sessions", () => {
expect(child?.totalTokensFresh).toBe(true);
expect(child?.contextTokens).toBe(1_048_576);
expect(child?.estimatedCostUsd).toBe(0.0042);
expect(child?.modelProvider).toBe("anthropic");
expect(child?.model).toBe("claude-sonnet-4-6");
ws.close();
});

View File

@@ -1364,6 +1364,218 @@ describe("listSessionsFromStore search", () => {
fs.rmSync(tmpDir, { recursive: true, force: true });
}
});
test("keeps a running subagent model when transcript fallback still reflects an older run", () => {
const tmpDir = fs.mkdtempSync(
path.join(os.tmpdir(), "openclaw-session-utils-subagent-stale-model-"),
);
const storePath = path.join(tmpDir, "sessions.json");
const now = Date.now();
const cfg = {
session: { mainKey: "main" },
agents: {
list: [{ id: "main", default: true }],
defaults: {
models: {
"anthropic/claude-sonnet-4-6": { params: { context1m: true } },
},
},
},
} as unknown as OpenClawConfig;
fs.writeFileSync(
path.join(tmpDir, "sess-child-stale.jsonl"),
[
JSON.stringify({ type: "session", version: 1, id: "sess-child-stale" }),
JSON.stringify({
message: {
role: "assistant",
provider: "anthropic",
model: "claude-sonnet-4-6",
usage: {
input: 2_000,
output: 500,
cacheRead: 1_200,
cost: { total: 0.007725 },
},
},
}),
].join("\n"),
"utf-8",
);
addSubagentRunForTests({
runId: "run-child-live-new-model",
childSessionKey: "agent:main:subagent:child-live-stale-transcript",
controllerSessionKey: "agent:main:main",
requesterSessionKey: "agent:main:main",
requesterDisplayKey: "main",
task: "child task",
cleanup: "keep",
createdAt: now - 5_000,
startedAt: now - 4_000,
model: "openai/gpt-5.4",
});
try {
const result = listSessionsFromStore({
cfg,
storePath,
store: {
"agent:main:subagent:child-live-stale-transcript": {
sessionId: "sess-child-stale",
updatedAt: now,
spawnedBy: "agent:main:main",
totalTokens: 0,
totalTokensFresh: false,
} as SessionEntry,
},
opts: {},
});
expect(result.sessions[0]).toMatchObject({
key: "agent:main:subagent:child-live-stale-transcript",
status: "running",
modelProvider: "openai",
model: "gpt-5.4",
totalTokens: 3_200,
totalTokensFresh: true,
});
} finally {
fs.rmSync(tmpDir, { recursive: true, force: true });
}
});
test("keeps the selected override model when runtime identity was intentionally cleared", () => {
const tmpDir = fs.mkdtempSync(
path.join(os.tmpdir(), "openclaw-session-utils-cleared-runtime-model-"),
);
const storePath = path.join(tmpDir, "sessions.json");
const now = Date.now();
const cfg = {
session: { mainKey: "main" },
agents: {
list: [{ id: "main", default: true }],
defaults: {
models: {
"anthropic/claude-sonnet-4-6": { params: { context1m: true } },
},
},
},
} as unknown as OpenClawConfig;
fs.writeFileSync(
path.join(tmpDir, "sess-override.jsonl"),
[
JSON.stringify({ type: "session", version: 1, id: "sess-override" }),
JSON.stringify({
message: {
role: "assistant",
provider: "anthropic",
model: "claude-sonnet-4-6",
usage: {
input: 2_000,
output: 500,
cacheRead: 1_200,
cost: { total: 0.007725 },
},
},
}),
].join("\n"),
"utf-8",
);
try {
const result = listSessionsFromStore({
cfg,
storePath,
store: {
"agent:main:main": {
sessionId: "sess-override",
updatedAt: now,
providerOverride: "openai",
modelOverride: "gpt-5.4",
totalTokens: 0,
totalTokensFresh: false,
} as SessionEntry,
},
opts: {},
});
expect(result.sessions[0]).toMatchObject({
key: "agent:main:main",
modelProvider: "openai",
model: "gpt-5.4",
totalTokens: 3_200,
totalTokensFresh: true,
});
} finally {
fs.rmSync(tmpDir, { recursive: true, force: true });
}
});
test("does not replace the current runtime model when transcript fallback is only for missing pricing", () => {
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), "openclaw-session-utils-pricing-"));
const storePath = path.join(tmpDir, "sessions.json");
const now = Date.now();
const cfg = {
session: { mainKey: "main" },
agents: {
list: [{ id: "main", default: true }],
},
} as unknown as OpenClawConfig;
fs.writeFileSync(
path.join(tmpDir, "sess-pricing.jsonl"),
[
JSON.stringify({ type: "session", version: 1, id: "sess-pricing" }),
JSON.stringify({
message: {
role: "assistant",
provider: "anthropic",
model: "claude-sonnet-4-6",
usage: {
input: 2_000,
output: 500,
cacheRead: 1_200,
cost: { total: 0.007725 },
},
},
}),
].join("\n"),
"utf-8",
);
try {
const result = listSessionsFromStore({
cfg,
storePath,
store: {
"agent:main:main": {
sessionId: "sess-pricing",
updatedAt: now,
modelProvider: "openai",
model: "gpt-5.4",
contextTokens: 200_000,
totalTokens: 3_200,
totalTokensFresh: true,
inputTokens: 2_000,
outputTokens: 500,
cacheRead: 1_200,
} as SessionEntry,
},
opts: {},
});
expect(result.sessions[0]).toMatchObject({
key: "agent:main:main",
modelProvider: "openai",
model: "gpt-5.4",
totalTokens: 3_200,
totalTokensFresh: true,
contextTokens: 200_000,
});
} finally {
fs.rmSync(tmpDir, { recursive: true, force: true });
}
});
});
describe("listSessionsFromStore subagent metadata", () => {

View File

@@ -312,6 +312,8 @@ function resolveTranscriptUsageFallback(params: {
totalTokens?: number;
totalTokensFresh?: boolean;
contextTokens?: number;
modelProvider?: string;
model?: string;
} | null {
const entry = params.entry;
if (!entry?.sessionId) {
@@ -352,6 +354,8 @@ function resolveTranscriptUsageFallback(params: {
},
});
return {
modelProvider,
model,
totalTokens: resolvePositiveNumber(snapshot.totalTokens),
totalTokensFresh: snapshot.totalTokensFresh === true,
contextTokens: resolvePositiveNumber(contextTokens),
@@ -1170,26 +1174,47 @@ export function buildGatewaySessionRow(params: {
sessionAgentId,
subagentRun?.model,
);
const modelProvider = resolvedModel.provider;
const model = resolvedModel.model ?? DEFAULT_MODEL;
const transcriptUsage =
resolvePositiveNumber(resolveFreshSessionTotalTokens(entry)) === undefined ||
resolvePositiveNumber(entry?.contextTokens) === undefined ||
const runtimeModelPresent =
Boolean(entry?.model?.trim()) || Boolean(entry?.modelProvider?.trim());
const needsTranscriptTotalTokens =
resolvePositiveNumber(resolveFreshSessionTotalTokens(entry)) === undefined;
const needsTranscriptContextTokens =
resolvePositiveNumber(entry?.contextTokens) === undefined;
const needsTranscriptEstimatedCostUsd =
resolveEstimatedSessionCostUsd({
cfg,
provider: modelProvider,
model,
provider: resolvedModel.provider,
model: resolvedModel.model ?? DEFAULT_MODEL,
entry,
}) === undefined
}) === undefined;
const transcriptUsage =
needsTranscriptTotalTokens || needsTranscriptContextTokens || needsTranscriptEstimatedCostUsd
? resolveTranscriptUsageFallback({
cfg,
key,
entry,
storePath,
fallbackProvider: modelProvider,
fallbackModel: model,
fallbackProvider: resolvedModel.provider,
fallbackModel: resolvedModel.model ?? DEFAULT_MODEL,
})
: null;
const preferLiveSubagentModelIdentity =
Boolean(subagentRun?.model?.trim()) && subagentStatus === "running";
const shouldUseTranscriptModelIdentity =
runtimeModelPresent &&
!preferLiveSubagentModelIdentity &&
(needsTranscriptTotalTokens || needsTranscriptContextTokens);
const resolvedModelIdentity = {
provider: resolvedModel.provider,
model: resolvedModel.model ?? DEFAULT_MODEL,
};
const modelIdentity = shouldUseTranscriptModelIdentity
? {
provider: transcriptUsage?.modelProvider ?? resolvedModelIdentity.provider,
model: transcriptUsage?.model ?? resolvedModelIdentity.model,
}
: resolvedModelIdentity;
const { provider: modelProvider, model } = modelIdentity;
const totalTokens =
resolvePositiveNumber(resolveFreshSessionTotalTokens(entry)) ??
resolvePositiveNumber(transcriptUsage?.totalTokens);