fix(logging): expose trace fields in file logs

This commit is contained in:
Vincent Koc
2026-04-26 12:23:42 -07:00
parent edf40ab6c9
commit df542f75a9
5 changed files with 83 additions and 23 deletions

View File

@@ -6,6 +6,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Logging: write validated diagnostic trace context as top-level `traceId`, `spanId`, `parentSpanId`, and `traceFlags` fields in file-log JSONL records so traced requests and model calls are easier to correlate in log processors. Refs #40353. Thanks @liangruochong44-ui.
- Logging/sessions: apply configured redaction patterns to persisted session transcript text and accept escaped character classes in safe custom redaction regexes, so transcript JSONL no longer keeps matching sensitive text in the clear. Fixes #42982. Thanks @panpan0000.
- Auto-reply: poison inbound message dedupe after replay-unsafe provider/runtime failures so retries stay safe before visible progress but cannot duplicate messages after block output, tool side effects, or session progress. Fixes #69303; keeps #58549 and #64606 as duplicate validation. Thanks @martingarramon, @NikolaFC, and @zeroth-blip.
- Agents/model fallback: jump directly to a known later live-session model redirect instead of walking unrelated fallback candidates, while preserving the already-landed live-session/fallback loop guard. Fixes #57471; related loop family already closed via #58496. Thanks @yuxiaoyang2007-prog.

View File

@@ -150,6 +150,10 @@ When any subkey is enabled, model and tool spans get bounded, redacted
- **Logs:** OTLP logs respect `logging.level` (file log level). They use the
diagnostic log-record redaction path, not console formatting. High-volume
installs should prefer OTLP collector sampling/filtering over local sampling.
- **File-log correlation:** JSONL file logs include top-level `traceId`,
`spanId`, `parentSpanId`, and `traceFlags` when the log call carries a valid
diagnostic trace context, which lets log processors join local log lines with
exported spans.
## Exported metrics

View File

@@ -157,6 +157,13 @@ You can override both via the **`OPENCLAW_LOG_LEVEL`** environment variable (e.g
`--verbose` only affects console output and WS log verbosity; it does not change
file log levels.
### Trace correlation
File logs are JSONL. When a log call carries a valid diagnostic trace context,
OpenClaw writes the trace fields as top-level JSON keys (`traceId`, `spanId`,
`parentSpanId`, `traceFlags`) so external log processors can correlate the line
with OTEL spans and provider `traceparent` propagation.
### Console styles
`logging.consoleStyle`:

View File

@@ -1,9 +1,11 @@
import fs from "node:fs";
import { afterAll, afterEach, beforeAll, describe, expect, it } from "vitest";
import { getLogger, resetLogger, setLoggerOverride } from "../logging.js";
import { getChildLogger, getLogger, resetLogger, setLoggerOverride } from "../logging.js";
import { createSuiteLogPathTracker } from "./log-test-helpers.js";
const secret = "sk-testsecret1234567890abcd";
const TRACE_ID = "4bf92f3577b34da6a3ce929d0e0e4736";
const SPAN_ID = "00f067aa0ba902b7";
const logPathTracker = createSuiteLogPathTracker("openclaw-log-redaction-");
const originalConfigPath = process.env.OPENCLAW_CONFIG_PATH;
const originalTestFileLog = process.env.OPENCLAW_TEST_FILE_LOG;
@@ -75,4 +77,24 @@ describe("file log redaction", () => {
const content = fs.readFileSync(logPath, "utf8");
expect(content).toContain("configured log path works");
});
it("writes trace context as top-level JSONL fields", () => {
const logPath = logPathTracker.nextPath();
setLoggerOverride({ level: "info", file: logPath });
const logger = getChildLogger({
subsystem: "gateway",
trace: { traceId: TRACE_ID, spanId: SPAN_ID },
});
logger.info({ route: "/api/health" }, "request completed");
const [line] = fs.readFileSync(logPath, "utf8").trim().split("\n");
const record = JSON.parse(line ?? "{}") as Record<string, unknown>;
expect(record.traceId).toBe(TRACE_ID);
expect(record.spanId).toBe(SPAN_ID);
expect(record).toMatchObject({
traceId: TRACE_ID,
spanId: SPAN_ID,
});
});
});

View File

@@ -202,6 +202,37 @@ function extractTraceContext(value: unknown): DiagnosticTraceContext | undefined
return normalizeTraceContext((value as { trace?: unknown }).trace);
}
function getSortedNumericLogArgs(logObj: TsLogRecord): unknown[] {
return Object.entries(logObj)
.filter(([key]) => /^\d+$/.test(key))
.toSorted((a, b) => Number(a[0]) - Number(b[0]))
.map(([, value]) => value);
}
function extractLogBindingPrefix(numericArgs: unknown[]): {
bindings?: Record<string, unknown>;
args: unknown[];
} {
if (
typeof numericArgs[0] === "string" &&
numericArgs[0].length <= MAX_DIAGNOSTIC_LOG_BINDINGS_JSON_CHARS &&
numericArgs[0].trim().startsWith("{")
) {
try {
const parsed = JSON.parse(numericArgs[0]);
if (parsed && typeof parsed === "object" && !Array.isArray(parsed)) {
return {
bindings: parsed as Record<string, unknown>,
args: numericArgs.slice(1),
};
}
} catch {
// ignore malformed json bindings
}
}
return { args: numericArgs };
}
function findLogTraceContext(
bindings: Record<string, unknown> | undefined,
numericArgs: readonly unknown[],
@@ -219,6 +250,20 @@ function findLogTraceContext(
return undefined;
}
function buildTraceFileLogFields(logObj: TsLogRecord): Record<string, string> | undefined {
const { bindings, args } = extractLogBindingPrefix(getSortedNumericLogArgs(logObj));
const trace = findLogTraceContext(bindings, args);
if (!trace) {
return undefined;
}
return {
traceId: trace.traceId,
...(trace.spanId ? { spanId: trace.spanId } : {}),
...(trace.parentSpanId ? { parentSpanId: trace.parentSpanId } : {}),
...(trace.traceFlags ? { traceFlags: trace.traceFlags } : {}),
};
}
function buildDiagnosticLogRecord(logObj: TsLogRecord) {
const meta = logObj._meta as
| {
@@ -235,27 +280,7 @@ function buildDiagnosticLogRecord(logObj: TsLogRecord) {
};
}
| undefined;
const numericArgs = Object.entries(logObj)
.filter(([key]) => /^\d+$/.test(key))
.toSorted((a, b) => Number(a[0]) - Number(b[0]))
.map(([, value]) => value);
let bindings: Record<string, unknown> | undefined;
if (
typeof numericArgs[0] === "string" &&
numericArgs[0].length <= MAX_DIAGNOSTIC_LOG_BINDINGS_JSON_CHARS &&
numericArgs[0].trim().startsWith("{")
) {
try {
const parsed = JSON.parse(numericArgs[0]);
if (parsed && typeof parsed === "object" && !Array.isArray(parsed)) {
bindings = parsed as Record<string, unknown>;
numericArgs.shift();
}
} catch {
// ignore malformed json bindings
}
}
const { bindings, args: numericArgs } = extractLogBindingPrefix(getSortedNumericLogArgs(logObj));
const trace = findLogTraceContext(bindings, numericArgs);
const structuredArg = numericArgs[0];
@@ -420,7 +445,8 @@ function buildLogger(settings: ResolvedSettings): TsLogger<LogObj> {
currentFileBytes = getCurrentLogFileBytes(activeFile);
}
const time = formatTimestamp(logObj.date ?? new Date(), { style: "long" });
const line = redactSensitiveText(JSON.stringify({ ...logObj, time }));
const traceFields = buildTraceFileLogFields(logObj as TsLogRecord);
const line = redactSensitiveText(JSON.stringify({ ...logObj, time, ...traceFields }));
const payload = `${line}\n`;
const payloadBytes = Buffer.byteLength(payload, "utf8");
const nextBytes = currentFileBytes + payloadBytes;