feat: surface cached token counts in /status output (openclaw#21248) thanks @vishaltandale00

Verified:
- pnpm build
- pnpm check
- pnpm test:macmini

Co-authored-by: vishaltandale00 <9222298+vishaltandale00@users.noreply.github.com>
Co-authored-by: Tak Hoffman <781889+Takhoffman@users.noreply.github.com>
This commit is contained in:
Vishal
2026-02-19 22:06:13 -05:00
committed by GitHub
parent db8ffb13f4
commit f1e1cc4ee3
18 changed files with 766 additions and 9 deletions

View File

@@ -12,6 +12,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Auto-reply/WebChat: avoid defaulting inbound runtime channel labels to unrelated providers (for example `whatsapp`) for webchat sessions so channel-specific formatting guidance stays accurate. (#21534) Thanks @lbo728.
- Status: include persisted `cacheRead`/`cacheWrite` in session summaries so compact `/status` output consistently shows cache hit percentages from real session data.
- Heartbeat/Cron: restore interval heartbeat behavior so missing `HEARTBEAT.md` no longer suppresses runs (only effectively empty files skip), preserving prompt-driven and tagged-cron execution paths.
- WhatsApp/Cron/Heartbeat: enforce allowlisted routing for implicit scheduled/system delivery by merging pairing-store + configured `allowFrom` recipients, selecting authorized recipients when last-route context points to a non-allowlisted chat, and preventing heartbeat fan-out to recent unauthorized chats.
- Heartbeat/Active hours: constrain active-hours `24` sentinel parsing to `24:00` in time validation so invalid values like `24:30` are rejected early. (#21410) thanks @adhitShet.

View File

@@ -0,0 +1,154 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import { describe, expect, it, vi } from "vitest";
import { applyExtraParamsToAgent } from "../pi-embedded-runner.js";
// Mock the logger to avoid noise in tests
vi.mock("./logger.js", () => ({
log: {
debug: vi.fn(),
warn: vi.fn(),
},
}));
describe("cacheRetention default behavior", () => {
it("returns 'short' for Anthropic when not configured", () => {
const agent: { streamFn?: StreamFn } = {};
const cfg = undefined;
const provider = "anthropic";
const modelId = "claude-3-sonnet";
applyExtraParamsToAgent(agent, cfg, provider, modelId);
// Verify streamFn was set (indicating cache retention was applied)
expect(agent.streamFn).toBeDefined();
// The fact that agent.streamFn was modified indicates that cacheRetention
// default "short" was applied. We don't need to call the actual function
// since that would require API provider setup.
});
it("respects explicit 'none' config", () => {
const agent: { streamFn?: StreamFn } = {};
const cfg = {
agents: {
defaults: {
models: {
"anthropic/claude-3-sonnet": {
params: {
cacheRetention: "none" as const,
},
},
},
},
},
};
const provider = "anthropic";
const modelId = "claude-3-sonnet";
applyExtraParamsToAgent(agent, cfg, provider, modelId);
// Verify streamFn was set (config was applied)
expect(agent.streamFn).toBeDefined();
});
it("respects explicit 'long' config", () => {
const agent: { streamFn?: StreamFn } = {};
const cfg = {
agents: {
defaults: {
models: {
"anthropic/claude-3-opus": {
params: {
cacheRetention: "long" as const,
},
},
},
},
},
};
const provider = "anthropic";
const modelId = "claude-3-opus";
applyExtraParamsToAgent(agent, cfg, provider, modelId);
// Verify streamFn was set (config was applied)
expect(agent.streamFn).toBeDefined();
});
it("respects legacy cacheControlTtl config", () => {
const agent: { streamFn?: StreamFn } = {};
const cfg = {
agents: {
defaults: {
models: {
"anthropic/claude-3-haiku": {
params: {
cacheControlTtl: "1h",
},
},
},
},
},
};
const provider = "anthropic";
const modelId = "claude-3-haiku";
applyExtraParamsToAgent(agent, cfg, provider, modelId);
// Verify streamFn was set (legacy config was applied)
expect(agent.streamFn).toBeDefined();
});
it("returns undefined for non-Anthropic providers", () => {
const agent: { streamFn?: StreamFn } = {};
const cfg = undefined;
const provider = "openai";
const modelId = "gpt-4";
applyExtraParamsToAgent(agent, cfg, provider, modelId);
// For OpenAI, the streamFn might be wrapped for other reasons (like OpenAI responses store)
// but cacheRetention should not be applied
// This is implicitly tested by the lack of cacheRetention-specific wrapping
});
it("prefers explicit cacheRetention over default", () => {
const agent: { streamFn?: StreamFn } = {};
const cfg = {
agents: {
defaults: {
models: {
"anthropic/claude-3-sonnet": {
params: {
cacheRetention: "long" as const,
temperature: 0.7,
},
},
},
},
},
};
const provider = "anthropic";
const modelId = "claude-3-sonnet";
applyExtraParamsToAgent(agent, cfg, provider, modelId);
// Verify streamFn was set with explicit config
expect(agent.streamFn).toBeDefined();
});
it("works with extraParamsOverride", () => {
const agent: { streamFn?: StreamFn } = {};
const cfg = undefined;
const provider = "anthropic";
const modelId = "claude-3-sonnet";
const extraParamsOverride = {
cacheRetention: "none" as const,
};
applyExtraParamsToAgent(agent, cfg, provider, modelId, extraParamsOverride);
// Verify streamFn was set (override was applied)
expect(agent.streamFn).toBeDefined();
});
});

View File

@@ -44,6 +44,8 @@ type CacheRetentionStreamOptions = Partial<SimpleStreamOptions> & {
*
* Only applies to Anthropic provider (OpenRouter uses openai-completions API
* with hardcoded cache_control, not the cacheRetention stream option).
*
* Defaults to "short" for Anthropic provider when not explicitly configured.
*/
function resolveCacheRetention(
extraParams: Record<string, unknown> | undefined,
@@ -67,7 +69,9 @@ function resolveCacheRetention(
if (legacy === "1h") {
return "long";
}
return undefined;
// Default to "short" for Anthropic when not explicitly configured
return "short";
}
function createStreamFnWithExtraParams(

View File

@@ -0,0 +1,155 @@
import { describe, expect, it, beforeEach } from "vitest";
import { makeTempWorkspace, writeWorkspaceFile } from "../test-helpers/workspace.js";
import {
loadWorkspaceBootstrapFiles,
DEFAULT_AGENTS_FILENAME,
DEFAULT_TOOLS_FILENAME,
DEFAULT_SOUL_FILENAME,
} from "./workspace.js";
describe("system prompt stability for cache hits", () => {
let workspaceDir: string;
beforeEach(async () => {
workspaceDir = await makeTempWorkspace("openclaw-system-prompt-stability-");
});
it("returns identical results for same inputs across multiple calls", async () => {
const agentsContent = "# AGENTS.md - Your Workspace\n\nTest agents file.";
const toolsContent = "# TOOLS.md - Local Notes\n\nTest tools file.";
const soulContent = "# SOUL.md - Who You Are\n\nTest soul file.";
// Write workspace files
await writeWorkspaceFile({
dir: workspaceDir,
name: DEFAULT_AGENTS_FILENAME,
content: agentsContent,
});
await writeWorkspaceFile({
dir: workspaceDir,
name: DEFAULT_TOOLS_FILENAME,
content: toolsContent,
});
await writeWorkspaceFile({
dir: workspaceDir,
name: DEFAULT_SOUL_FILENAME,
content: soulContent,
});
// Load the same workspace multiple times
const results = await Promise.all([
loadWorkspaceBootstrapFiles(workspaceDir),
loadWorkspaceBootstrapFiles(workspaceDir),
loadWorkspaceBootstrapFiles(workspaceDir),
loadWorkspaceBootstrapFiles(workspaceDir),
loadWorkspaceBootstrapFiles(workspaceDir),
]);
// All results should be structurally identical
for (let i = 1; i < results.length; i++) {
expect(results[i]).toEqual(results[0]);
}
// Verify specific content consistency
const agentsFiles = results.map((result) =>
result.find((f) => f.name === DEFAULT_AGENTS_FILENAME),
);
const toolsFiles = results.map((result) =>
result.find((f) => f.name === DEFAULT_TOOLS_FILENAME),
);
const soulFiles = results.map((result) => result.find((f) => f.name === DEFAULT_SOUL_FILENAME));
// All instances should have identical content
for (let i = 1; i < agentsFiles.length; i++) {
expect(agentsFiles[i]?.content).toBe(agentsFiles[0]?.content);
expect(toolsFiles[i]?.content).toBe(toolsFiles[0]?.content);
expect(soulFiles[i]?.content).toBe(soulFiles[0]?.content);
}
// Verify the actual content matches what we wrote
expect(agentsFiles[0]?.content).toBe(agentsContent);
expect(toolsFiles[0]?.content).toBe(toolsContent);
expect(soulFiles[0]?.content).toBe(soulContent);
});
it("returns consistent ordering across calls", async () => {
const testFiles = [
{ name: DEFAULT_AGENTS_FILENAME, content: "# Agents content" },
{ name: DEFAULT_TOOLS_FILENAME, content: "# Tools content" },
{ name: DEFAULT_SOUL_FILENAME, content: "# Soul content" },
];
// Write all test files
for (const file of testFiles) {
await writeWorkspaceFile({ dir: workspaceDir, name: file.name, content: file.content });
}
// Load multiple times
const results = await Promise.all([
loadWorkspaceBootstrapFiles(workspaceDir),
loadWorkspaceBootstrapFiles(workspaceDir),
loadWorkspaceBootstrapFiles(workspaceDir),
]);
// All results should have the same file order
for (let i = 1; i < results.length; i++) {
const names1 = results[0].map((f) => f.name);
const namesI = results[i].map((f) => f.name);
expect(namesI).toEqual(names1);
}
});
it("maintains consistency even with missing files", async () => {
// Only create some files, leave others missing
await writeWorkspaceFile({
dir: workspaceDir,
name: DEFAULT_AGENTS_FILENAME,
content: "# Agents only",
});
// Load multiple times
const results = await Promise.all([
loadWorkspaceBootstrapFiles(workspaceDir),
loadWorkspaceBootstrapFiles(workspaceDir),
loadWorkspaceBootstrapFiles(workspaceDir),
]);
// All results should be identical
for (let i = 1; i < results.length; i++) {
expect(results[i]).toEqual(results[0]);
}
// Verify missing files are consistently marked as missing
for (const result of results) {
const agentsFile = result.find((f) => f.name === DEFAULT_AGENTS_FILENAME);
const toolsFile = result.find((f) => f.name === DEFAULT_TOOLS_FILENAME);
expect(agentsFile?.missing).toBe(false);
expect(agentsFile?.content).toBe("# Agents only");
expect(toolsFile?.missing).toBe(true);
expect(toolsFile?.content).toBeUndefined();
}
});
it("maintains consistency across concurrent loads", async () => {
const content = "# Concurrent load test";
await writeWorkspaceFile({ dir: workspaceDir, name: DEFAULT_AGENTS_FILENAME, content });
// Start multiple concurrent loads
const promises = Array.from({ length: 20 }, () => loadWorkspaceBootstrapFiles(workspaceDir));
const results = await Promise.all(promises);
// All concurrent results should be identical
for (let i = 1; i < results.length; i++) {
expect(results[i]).toEqual(results[0]);
}
// Verify content consistency
for (const result of results) {
const agentsFile = result.find((f) => f.name === DEFAULT_AGENTS_FILENAME);
expect(agentsFile?.content).toBe(content);
expect(agentsFile?.missing).toBe(false);
}
});
});

144
src/agents/usage.test.ts Normal file
View File

@@ -0,0 +1,144 @@
import { describe, expect, it } from "vitest";
import {
normalizeUsage,
hasNonzeroUsage,
derivePromptTokens,
deriveSessionTotalTokens,
} from "./usage.js";
describe("normalizeUsage", () => {
it("normalizes cache fields from provider response", () => {
const usage = normalizeUsage({
input: 1000,
output: 500,
cacheRead: 2000,
cacheWrite: 300,
});
expect(usage).toEqual({
input: 1000,
output: 500,
cacheRead: 2000,
cacheWrite: 300,
total: undefined,
});
});
it("normalizes cache fields from alternate naming", () => {
const usage = normalizeUsage({
input_tokens: 1000,
output_tokens: 500,
cache_read_input_tokens: 2000,
cache_creation_input_tokens: 300,
});
expect(usage).toEqual({
input: 1000,
output: 500,
cacheRead: 2000,
cacheWrite: 300,
total: undefined,
});
});
it("handles cache_read and cache_write naming variants", () => {
const usage = normalizeUsage({
input: 1000,
cache_read: 1500,
cache_write: 200,
});
expect(usage).toEqual({
input: 1000,
output: undefined,
cacheRead: 1500,
cacheWrite: 200,
total: undefined,
});
});
it("returns undefined when no valid fields are provided", () => {
const usage = normalizeUsage(null);
expect(usage).toBeUndefined();
});
it("handles undefined input", () => {
const usage = normalizeUsage(undefined);
expect(usage).toBeUndefined();
});
});
describe("hasNonzeroUsage", () => {
it("returns true when cache read is nonzero", () => {
const usage = { cacheRead: 100 };
expect(hasNonzeroUsage(usage)).toBe(true);
});
it("returns true when cache write is nonzero", () => {
const usage = { cacheWrite: 50 };
expect(hasNonzeroUsage(usage)).toBe(true);
});
it("returns true when both cache fields are nonzero", () => {
const usage = { cacheRead: 100, cacheWrite: 50 };
expect(hasNonzeroUsage(usage)).toBe(true);
});
it("returns false when cache fields are zero", () => {
const usage = { cacheRead: 0, cacheWrite: 0 };
expect(hasNonzeroUsage(usage)).toBe(false);
});
it("returns false for undefined usage", () => {
expect(hasNonzeroUsage(undefined)).toBe(false);
});
});
describe("derivePromptTokens", () => {
it("includes cache tokens in prompt total", () => {
const usage = {
input: 1000,
cacheRead: 500,
cacheWrite: 200,
};
const promptTokens = derivePromptTokens(usage);
expect(promptTokens).toBe(1700); // 1000 + 500 + 200
});
it("handles missing cache fields", () => {
const usage = {
input: 1000,
};
const promptTokens = derivePromptTokens(usage);
expect(promptTokens).toBe(1000);
});
it("returns undefined for empty usage", () => {
const promptTokens = derivePromptTokens({});
expect(promptTokens).toBeUndefined();
});
});
describe("deriveSessionTotalTokens", () => {
it("includes cache tokens in total calculation", () => {
const totalTokens = deriveSessionTotalTokens({
usage: {
input: 1000,
cacheRead: 500,
cacheWrite: 200,
},
contextTokens: 4000,
});
expect(totalTokens).toBe(1700); // 1000 + 500 + 200
});
it("prefers promptTokens override over derived total", () => {
const totalTokens = deriveSessionTotalTokens({
usage: {
input: 1000,
cacheRead: 500,
cacheWrite: 200,
},
contextTokens: 4000,
promptTokens: 2500, // Override
});
expect(totalTokens).toBe(2500);
});
});

View File

@@ -0,0 +1,130 @@
import fs from "node:fs/promises";
import path from "node:path";
import { describe, expect, it, beforeEach } from "vitest";
import { makeTempWorkspace, writeWorkspaceFile } from "../test-helpers/workspace.js";
import { loadWorkspaceBootstrapFiles, DEFAULT_AGENTS_FILENAME } from "./workspace.js";
describe("workspace bootstrap file caching", () => {
let workspaceDir: string;
beforeEach(async () => {
workspaceDir = await makeTempWorkspace("openclaw-bootstrap-cache-test-");
});
it("returns cached content when mtime unchanged", async () => {
const content1 = "# Initial content";
await writeWorkspaceFile({
dir: workspaceDir,
name: DEFAULT_AGENTS_FILENAME,
content: content1,
});
// First load
const result1 = await loadWorkspaceBootstrapFiles(workspaceDir);
const agentsFile1 = result1.find((f) => f.name === DEFAULT_AGENTS_FILENAME);
expect(agentsFile1?.content).toBe(content1);
expect(agentsFile1?.missing).toBe(false);
// Second load should use cached content (same mtime)
const result2 = await loadWorkspaceBootstrapFiles(workspaceDir);
const agentsFile2 = result2.find((f) => f.name === DEFAULT_AGENTS_FILENAME);
expect(agentsFile2?.content).toBe(content1);
expect(agentsFile2?.missing).toBe(false);
// Verify both calls returned the same content without re-reading
expect(agentsFile1?.content).toBe(agentsFile2?.content);
});
it("invalidates cache when mtime changes", async () => {
const content1 = "# Initial content";
const content2 = "# Updated content";
await writeWorkspaceFile({
dir: workspaceDir,
name: DEFAULT_AGENTS_FILENAME,
content: content1,
});
// First load
const result1 = await loadWorkspaceBootstrapFiles(workspaceDir);
const agentsFile1 = result1.find((f) => f.name === DEFAULT_AGENTS_FILENAME);
expect(agentsFile1?.content).toBe(content1);
// Wait a bit to ensure mtime will be different
await new Promise((resolve) => setTimeout(resolve, 10));
// Modify the file
await writeWorkspaceFile({
dir: workspaceDir,
name: DEFAULT_AGENTS_FILENAME,
content: content2,
});
// Second load should detect the change and return new content
const result2 = await loadWorkspaceBootstrapFiles(workspaceDir);
const agentsFile2 = result2.find((f) => f.name === DEFAULT_AGENTS_FILENAME);
expect(agentsFile2?.content).toBe(content2);
expect(agentsFile2?.missing).toBe(false);
});
it("handles file deletion gracefully", async () => {
const content = "# Some content";
const filePath = path.join(workspaceDir, DEFAULT_AGENTS_FILENAME);
await writeWorkspaceFile({ dir: workspaceDir, name: DEFAULT_AGENTS_FILENAME, content });
// First load
const result1 = await loadWorkspaceBootstrapFiles(workspaceDir);
const agentsFile1 = result1.find((f) => f.name === DEFAULT_AGENTS_FILENAME);
expect(agentsFile1?.content).toBe(content);
expect(agentsFile1?.missing).toBe(false);
// Delete the file
await fs.unlink(filePath);
// Second load should handle deletion gracefully
const result2 = await loadWorkspaceBootstrapFiles(workspaceDir);
const agentsFile2 = result2.find((f) => f.name === DEFAULT_AGENTS_FILENAME);
expect(agentsFile2?.missing).toBe(true);
expect(agentsFile2?.content).toBeUndefined();
});
it("handles concurrent access", async () => {
const content = "# Concurrent test content";
await writeWorkspaceFile({ dir: workspaceDir, name: DEFAULT_AGENTS_FILENAME, content });
// Multiple concurrent loads should all succeed
const promises = Array.from({ length: 10 }, () => loadWorkspaceBootstrapFiles(workspaceDir));
const results = await Promise.all(promises);
// All results should be identical
for (const result of results) {
const agentsFile = result.find((f) => f.name === DEFAULT_AGENTS_FILENAME);
expect(agentsFile?.content).toBe(content);
expect(agentsFile?.missing).toBe(false);
}
});
it("caches files independently by path", async () => {
const content1 = "# File 1 content";
const content2 = "# File 2 content";
// Create two different workspace directories
const workspace1 = await makeTempWorkspace("openclaw-cache-test1-");
const workspace2 = await makeTempWorkspace("openclaw-cache-test2-");
await writeWorkspaceFile({ dir: workspace1, name: DEFAULT_AGENTS_FILENAME, content: content1 });
await writeWorkspaceFile({ dir: workspace2, name: DEFAULT_AGENTS_FILENAME, content: content2 });
// Load from both workspaces
const result1 = await loadWorkspaceBootstrapFiles(workspace1);
const result2 = await loadWorkspaceBootstrapFiles(workspace2);
const agentsFile1 = result1.find((f) => f.name === DEFAULT_AGENTS_FILENAME);
const agentsFile2 = result2.find((f) => f.name === DEFAULT_AGENTS_FILENAME);
expect(agentsFile1?.content).toBe(content1);
expect(agentsFile2?.content).toBe(content2);
});
});

View File

@@ -36,6 +36,35 @@ const WORKSPACE_STATE_VERSION = 1;
const workspaceTemplateCache = new Map<string, Promise<string>>();
let gitAvailabilityPromise: Promise<boolean> | null = null;
// File content cache with mtime invalidation to avoid redundant reads
const workspaceFileCache = new Map<string, { content: string; mtimeMs: number }>();
/**
* Read file with caching based on mtime. Returns cached content if file
* hasn't changed, otherwise reads from disk and updates cache.
*/
async function readFileWithCache(filePath: string): Promise<string> {
try {
const stats = await fs.stat(filePath);
const mtimeMs = stats.mtimeMs;
const cached = workspaceFileCache.get(filePath);
// Return cached content if mtime matches
if (cached && cached.mtimeMs === mtimeMs) {
return cached.content;
}
// Read from disk and update cache
const content = await fs.readFile(filePath, "utf-8");
workspaceFileCache.set(filePath, { content, mtimeMs });
return content;
} catch (error) {
// Remove from cache if file doesn't exist or is unreadable
workspaceFileCache.delete(filePath);
throw error;
}
}
function stripFrontMatter(content: string): string {
if (!content.startsWith("---")) {
return content;
@@ -451,7 +480,7 @@ export async function loadWorkspaceBootstrapFiles(dir: string): Promise<Workspac
const result: WorkspaceBootstrapFile[] = [];
for (const entry of entries) {
try {
const content = await fs.readFile(entry.filePath, "utf-8");
const content = await readFileWithCache(entry.filePath);
result.push({
name: entry.name,
path: entry.filePath,
@@ -531,7 +560,7 @@ export async function loadExtraBootstrapFiles(
if (!VALID_BOOTSTRAP_NAMES.has(baseName)) {
continue;
}
const content = await fs.readFile(realFilePath, "utf-8");
const content = await readFileWithCache(realFilePath);
result.push({
name: baseName as WorkspaceBootstrapFileName,
path: filePath,

View File

@@ -269,6 +269,8 @@ export async function incrementCompactionCount(params: {
// Clear input/output breakdown since we only have the total estimate after compaction
updates.inputTokens = undefined;
updates.outputTokens = undefined;
updates.cacheRead = undefined;
updates.cacheWrite = undefined;
}
sessionStore[sessionKey] = {
...entry,

View File

@@ -86,6 +86,8 @@ export async function persistSessionUsageUpdate(params: {
const patch: Partial<SessionEntry> = {
inputTokens: input,
outputTokens: output,
cacheRead: params.usage?.cacheRead ?? 0,
cacheWrite: params.usage?.cacheWrite ?? 0,
// Missing a last-call snapshot means context utilization is stale/unknown.
totalTokens,
totalTokensFresh: typeof totalTokens === "number",

View File

@@ -264,6 +264,36 @@ const formatUsagePair = (input?: number | null, output?: number | null) => {
return `🧮 Tokens: ${inputLabel} in / ${outputLabel} out`;
};
const formatCacheLine = (
input?: number | null,
cacheRead?: number | null,
cacheWrite?: number | null,
) => {
if (!cacheRead && !cacheWrite) {
return null;
}
if (
(typeof cacheRead !== "number" || cacheRead <= 0) &&
(typeof cacheWrite !== "number" || cacheWrite <= 0)
) {
return null;
}
const cachedLabel = typeof cacheRead === "number" ? formatTokenCount(cacheRead) : "0";
const newLabel = typeof cacheWrite === "number" ? formatTokenCount(cacheWrite) : "0";
const totalInput =
(typeof cacheRead === "number" ? cacheRead : 0) +
(typeof cacheWrite === "number" ? cacheWrite : 0) +
(typeof input === "number" ? input : 0);
const hitRate =
totalInput > 0 && typeof cacheRead === "number"
? Math.round((cacheRead / totalInput) * 100)
: 0;
return `🗄️ Cache: ${hitRate}% hit · ${cachedLabel} cached, ${newLabel} new`;
};
const formatMediaUnderstandingLine = (decisions?: ReadonlyArray<MediaUnderstandingDecision>) => {
if (!decisions || decisions.length === 0) {
return null;
@@ -359,6 +389,8 @@ export function buildStatusMessage(args: StatusArgs): string {
let inputTokens = entry?.inputTokens;
let outputTokens = entry?.outputTokens;
let cacheRead = entry?.cacheRead;
let cacheWrite = entry?.cacheWrite;
let totalTokens = entry?.totalTokens ?? (entry?.inputTokens ?? 0) + (entry?.outputTokens ?? 0);
// Prefer prompt-size tokens from the session transcript when it looks larger
@@ -509,6 +541,7 @@ export function buildStatusMessage(args: StatusArgs): string {
const commit = resolveCommitHash();
const versionLine = `🦞 OpenClaw ${VERSION}${commit ? ` (${commit})` : ""}`;
const usagePair = formatUsagePair(inputTokens, outputTokens);
const cacheLine = formatCacheLine(inputTokens, cacheRead, cacheWrite);
const costLine = costLabel ? `💵 Cost: ${costLabel}` : null;
const usageCostLine =
usagePair && costLine ? `${usagePair} · ${costLine}` : (usagePair ?? costLine);
@@ -521,6 +554,7 @@ export function buildStatusMessage(args: StatusArgs): string {
modelLine,
fallbackLine,
usageCostLine,
cacheLine,
`📚 ${contextLine}`,
mediaLine,
args.usageLine,

View File

@@ -76,6 +76,8 @@ export async function updateSessionStoreAfterAgentRun(params: {
next.outputTokens = output;
next.totalTokens = totalTokens;
next.totalTokensFresh = true;
next.cacheRead = usage.cacheRead ?? 0;
next.cacheWrite = usage.cacheWrite ?? 0;
}
if (compactionsThisRun > 0) {
next.compactionCount = (entry.compactionCount ?? 0) + compactionsThisRun;

View File

@@ -20,6 +20,8 @@ function createDefaultSessionStoreEntry() {
thinkingLevel: "low",
inputTokens: 2_000,
outputTokens: 3_000,
cacheRead: 2_000,
cacheWrite: 1_000,
totalTokens: 5_000,
contextTokens: 10_000,
model: "pi:opus",
@@ -340,6 +342,8 @@ describe("statusCommand", () => {
expect(payload.sessions.defaults.model).toBeTruthy();
expect(payload.sessions.defaults.contextTokens).toBeGreaterThan(0);
expect(payload.sessions.recent[0].percentUsed).toBe(50);
expect(payload.sessions.recent[0].cacheRead).toBe(2_000);
expect(payload.sessions.recent[0].cacheWrite).toBe(1_000);
expect(payload.sessions.recent[0].totalTokensFresh).toBe(true);
expect(payload.sessions.recent[0].remainingTokens).toBe(5000);
expect(payload.sessions.recent[0].flags).toContain("verbose:on");
@@ -387,6 +391,7 @@ describe("statusCommand", () => {
expect(logs.some((l: string) => l.includes("Sessions"))).toBe(true);
expect(logs.some((l: string) => l.includes("+1000"))).toBe(true);
expect(logs.some((l: string) => l.includes("50%"))).toBe(true);
expect(logs.some((l: string) => l.includes("40% cached"))).toBe(true);
expect(logs.some((l: string) => l.includes("LaunchAgent"))).toBe(true);
expect(logs.some((l: string) => l.includes("FAQ:"))).toBe(true);
expect(logs.some((l: string) => l.includes("Troubleshooting:"))).toBe(true);

View File

@@ -21,18 +21,37 @@ export const shortenText = (value: string, maxLen: number) => {
};
export const formatTokensCompact = (
sess: Pick<SessionStatus, "totalTokens" | "contextTokens" | "percentUsed">,
sess: Pick<
SessionStatus,
"totalTokens" | "contextTokens" | "percentUsed" | "cacheRead" | "cacheWrite"
>,
) => {
const used = sess.totalTokens;
const ctx = sess.contextTokens;
const cacheRead = sess.cacheRead;
const cacheWrite = sess.cacheWrite;
let result = "";
if (used == null) {
return ctx ? `unknown/${formatKTokens(ctx)} (?%)` : "unknown used";
result = ctx ? `unknown/${formatKTokens(ctx)} (?%)` : "unknown used";
} else if (!ctx) {
result = `${formatKTokens(used)} used`;
} else {
const pctLabel = sess.percentUsed != null ? `${sess.percentUsed}%` : "?%";
result = `${formatKTokens(used)}/${formatKTokens(ctx)} (${pctLabel})`;
}
if (!ctx) {
return `${formatKTokens(used)} used`;
// Add cache hit rate if there are cached reads
if (typeof cacheRead === "number" && cacheRead > 0) {
const total =
typeof used === "number"
? used
: cacheRead + (typeof cacheWrite === "number" ? cacheWrite : 0);
const hitRate = Math.round((cacheRead / total) * 100);
result += ` · 🗄️ ${hitRate}% cached`;
}
const pctLabel = sess.percentUsed != null ? `${sess.percentUsed}%` : "?%";
return `${formatKTokens(used)}/${formatKTokens(ctx)} (${pctLabel})`;
return result;
};
export const formatDaemonRuntimeShort = (runtime?: {

View File

@@ -160,6 +160,8 @@ export async function getStatusSummary(
abortedLastRun: entry?.abortedLastRun,
inputTokens: entry?.inputTokens,
outputTokens: entry?.outputTokens,
cacheRead: entry?.cacheRead,
cacheWrite: entry?.cacheWrite,
totalTokens: total ?? null,
totalTokensFresh,
remainingTokens: remaining,

View File

@@ -17,6 +17,8 @@ export type SessionStatus = {
outputTokens?: number;
totalTokens: number | null;
totalTokensFresh: boolean;
cacheRead?: number;
cacheWrite?: number;
remainingTokens: number | null;
percentUsed: number | null;
model: string | null;

View File

@@ -0,0 +1,68 @@
import { describe, expect, it } from "vitest";
import type { SessionEntry } from "./types.js";
import { mergeSessionEntry } from "./types.js";
describe("SessionEntry cache fields", () => {
it("supports cacheRead and cacheWrite fields", () => {
const entry: SessionEntry = {
sessionId: "test-session",
updatedAt: Date.now(),
cacheRead: 1500,
cacheWrite: 300,
};
expect(entry.cacheRead).toBe(1500);
expect(entry.cacheWrite).toBe(300);
});
it("merges cache fields properly", () => {
const existing: SessionEntry = {
sessionId: "test-session",
updatedAt: Date.now(),
cacheRead: 1000,
cacheWrite: 200,
totalTokens: 5000,
};
const patch: Partial<SessionEntry> = {
cacheRead: 1500,
cacheWrite: 300,
};
const merged = mergeSessionEntry(existing, patch);
expect(merged.cacheRead).toBe(1500);
expect(merged.cacheWrite).toBe(300);
expect(merged.totalTokens).toBe(5000); // Preserved from existing
});
it("handles undefined cache fields", () => {
const entry: SessionEntry = {
sessionId: "test-session",
updatedAt: Date.now(),
totalTokens: 5000,
};
expect(entry.cacheRead).toBeUndefined();
expect(entry.cacheWrite).toBeUndefined();
});
it("allows cache fields to be cleared with undefined", () => {
const existing: SessionEntry = {
sessionId: "test-session",
updatedAt: Date.now(),
cacheRead: 1000,
cacheWrite: 200,
};
const patch: Partial<SessionEntry> = {
cacheRead: undefined,
cacheWrite: undefined,
};
const merged = mergeSessionEntry(existing, patch);
expect(merged.cacheRead).toBeUndefined();
expect(merged.cacheWrite).toBeUndefined();
});
});

View File

@@ -78,6 +78,8 @@ export type SessionEntry = {
* totalTokens as stale/unknown for context-utilization displays.
*/
totalTokensFresh?: boolean;
cacheRead?: number;
cacheWrite?: number;
modelProvider?: string;
model?: string;
/**

View File

@@ -538,6 +538,8 @@ export async function runCronIsolatedAgentTurn(params: {
cronSession.sessionEntry.outputTokens = output;
cronSession.sessionEntry.totalTokens = totalTokens;
cronSession.sessionEntry.totalTokensFresh = true;
cronSession.sessionEntry.cacheRead = usage.cacheRead ?? 0;
cronSession.sessionEntry.cacheWrite = usage.cacheWrite ?? 0;
telemetry = {
model: modelUsed,