diff --git a/src/agents/pi-embedded-runner/replay-history.ts b/src/agents/pi-embedded-runner/replay-history.ts index 20f943231c3..25b156998dd 100644 --- a/src/agents/pi-embedded-runner/replay-history.ts +++ b/src/agents/pi-embedded-runner/replay-history.ts @@ -227,6 +227,26 @@ function stripStaleAssistantUsageBeforeLatestCompaction(messages: AgentMessage[] return touched ? out : messages; } +export function normalizeAssistantReplayContent(messages: AgentMessage[]): AgentMessage[] { + let touched = false; + const out = [...messages]; + for (let i = 0; i < out.length; i += 1) { + const message = out[i] as (AgentMessage & { role?: unknown; content?: unknown }) | undefined; + if (!message || message.role !== "assistant" || Array.isArray(message.content)) { + continue; + } + out[i] = { + ...(message as unknown as Record), + content: + typeof message.content === "string" + ? [{ type: "text", text: message.content }] + : [{ type: "text", text: "" }], + } as AgentMessage; + touched = true; + } + return touched ? out : messages; +} + function normalizeAssistantUsageSnapshot(usage: unknown) { const normalized = normalizeUsage((usage ?? undefined) as UsageLike | undefined); if (!normalized) { @@ -443,8 +463,9 @@ export async function sanitizeSessionHistory(params: { params.modelApi === "openai-responses" || params.modelApi === "openai-codex-responses" || params.modelApi === "azure-openai-responses"; + const normalizedAssistantReplay = normalizeAssistantReplayContent(withInterSessionMarkers); const sanitizedImages = await sanitizeSessionMessagesImages( - withInterSessionMarkers, + normalizedAssistantReplay, "session:history", { sanitizeMode: policy.sanitizeMode, diff --git a/src/agents/pi-embedded-runner/run/attempt.ts b/src/agents/pi-embedded-runner/run/attempt.ts index 0e4bc64c375..c462873b94c 100644 --- a/src/agents/pi-embedded-runner/run/attempt.ts +++ b/src/agents/pi-embedded-runner/run/attempt.ts @@ -138,7 +138,11 @@ import { type PromptCacheChange, } from "../prompt-cache-observability.js"; import { resolveCacheRetention } from "../prompt-cache-retention.js"; -import { sanitizeSessionHistory, validateReplayTurns } from "../replay-history.js"; +import { + normalizeAssistantReplayContent, + sanitizeSessionHistory, + validateReplayTurns, +} from "../replay-history.js"; import { observeReplayMetadata, replayMetadataFromState } from "../replay-state.js"; import { clearActiveEmbeddedRun, @@ -1140,6 +1144,9 @@ export async function runEmbeddedAttempt( throw new Error("Embedded agent session missing"); } const activeSession = session; + const baseConvertToLlm = activeSession.agent.convertToLlm.bind(activeSession.agent); + activeSession.agent.convertToLlm = async (messages) => + await baseConvertToLlm(normalizeAssistantReplayContent(messages)); let prePromptMessageCount = activeSession.messages.length; abortSessionForYield = () => { yieldAbortSettled = Promise.resolve(activeSession.abort()); @@ -2179,6 +2186,12 @@ export async function runEmbeddedAttempt( } if (!skipPromptSubmission) { + const normalizedReplayMessages = normalizeAssistantReplayContent( + activeSession.messages, + ); + if (normalizedReplayMessages !== activeSession.messages) { + activeSession.agent.state.messages = normalizedReplayMessages; + } finalPromptText = effectivePrompt; const btwSnapshotMessages = activeSession.messages.slice(-MAX_BTW_SNAPSHOT_MESSAGES); updateActiveEmbeddedRunSnapshot(params.sessionId, { diff --git a/src/agents/pi-embedded-runner/sanitize-session-history.tool-result-details.test.ts b/src/agents/pi-embedded-runner/sanitize-session-history.tool-result-details.test.ts index 54d1afb027a..a951176e2a0 100644 --- a/src/agents/pi-embedded-runner/sanitize-session-history.tool-result-details.test.ts +++ b/src/agents/pi-embedded-runner/sanitize-session-history.tool-result-details.test.ts @@ -60,4 +60,25 @@ describe("sanitizeSessionHistory toolResult details stripping", () => { const serialized = JSON.stringify(sanitized); expect(serialized).not.toContain("Ignore previous instructions"); }); + + it("normalizes malformed assistant string content before replay sanitization", async () => { + const sm = SessionManager.inMemory(); + + const sanitized = await sanitizeSessionHistory({ + messages: [ + { role: "assistant", content: "plain reply", timestamp: 1 } as unknown as AgentMessage, + { role: "user", content: "continue", timestamp: 2 } satisfies UserMessage, + ], + modelApi: "openai-responses", + provider: "github-copilot", + modelId: "gpt-5-mini", + sessionManager: sm, + sessionId: "test", + }); + + expect(sanitized[0]).toMatchObject({ + role: "assistant", + content: [{ type: "text", text: "plain reply" }], + }); + }); });