fix(replay): preserve synthetic tool repair aliases

* fix(replay): preserve synthetic tool repair aliases

* test(replay): cover Bedrock repair ownership
This commit is contained in:
Vincent Koc
2026-04-23 19:33:05 -07:00
committed by GitHub
parent a958b6e723
commit d42069b11e
3 changed files with 102 additions and 6 deletions

View File

@@ -22,6 +22,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Agents/replay: stop OpenAI/Codex transcript replay from synthesizing missing tool results while still preserving synthetic repair on Anthropic, Gemini, and Bedrock transport-owned sessions. (#61556) Thanks @VictorJeon and @vincentkoc.
- Agents/WebChat: surface non-retryable provider failures such as billing, auth, and rate-limit errors from the embedded runner instead of logging `surface_error` and leaving webchat with no rendered error. Fixes #70124. (#70848) Thanks @truffle-dev.
- Memory/CLI: declare the built-in `local` embedding provider in the memory-core manifest, so standalone `openclaw memory status`, `index`, and `search` can resolve local embeddings just like the gateway runtime. Fixes #70836. (#70873) Thanks @mattznojassist.
- Gateway/WebChat: preserve image attachments for text-only primary models by offloading them as media refs instead of dropping them, so configured image tools can still inspect the original file. Fixes #68513, #44276, #51656, #70212.

View File

@@ -0,0 +1,82 @@
import type { Api, Context, Model } from "@mariozechner/pi-ai";
import { describe, expect, it } from "vitest";
import { transformTransportMessages } from "./transport-message-transform.js";
function makeModel(api: Api, provider: string, id: string): Model<Api> {
return { api, provider, id, input: [], output: [] } as unknown as Model<Api>;
}
function assistantToolCall(
id: string,
name = "read",
): Extract<Context["messages"][number], { role: "assistant" }> {
return {
role: "assistant",
provider: "openai",
api: "openai-responses",
model: "gpt-5.4",
stopReason: "toolUse",
timestamp: Date.now(),
content: [{ type: "toolCall", id, name, arguments: {} }],
} as Extract<Context["messages"][number], { role: "assistant" }>;
}
describe("transformTransportMessages synthetic tool-result policy", () => {
it("does not synthesize missing tool results for OpenAI-compatible transports", () => {
const messages: Context["messages"] = [
assistantToolCall("call_openai_1"),
{ role: "user", content: "continue", timestamp: Date.now() },
];
const result = transformTransportMessages(
messages,
makeModel("openai-responses", "openai", "gpt-5.4"),
);
expect(result.map((msg) => msg.role)).toEqual(["assistant", "user"]);
});
it("still synthesizes missing tool results for Anthropic transports", () => {
const messages: Context["messages"] = [
assistantToolCall("call_anthropic_1"),
{ role: "user", content: "continue", timestamp: Date.now() },
];
const result = transformTransportMessages(
messages,
makeModel("anthropic-messages", "anthropic", "claude-opus-4-6"),
);
expect(result.map((msg) => msg.role)).toEqual(["assistant", "toolResult", "user"]);
expect(result[1]).toMatchObject({
role: "toolResult",
toolCallId: "call_anthropic_1",
isError: true,
});
});
it("still synthesizes missing tool results for transport alias apis that own replay repair", () => {
const messages: Context["messages"] = [
assistantToolCall("call_transport_1"),
{ role: "user", content: "continue", timestamp: Date.now() },
];
const anthropicAlias = transformTransportMessages(
messages,
makeModel("openclaw-anthropic-messages-transport" as Api, "anthropic", "claude-opus-4-6"),
);
expect(anthropicAlias.map((msg) => msg.role)).toEqual(["assistant", "toolResult", "user"]);
const googleAlias = transformTransportMessages(
messages,
makeModel("openclaw-google-generative-ai-transport" as Api, "google", "gemini-2.5-pro"),
);
expect(googleAlias.map((msg) => msg.role)).toEqual(["assistant", "toolResult", "user"]);
const bedrockCanonical = transformTransportMessages(
messages,
makeModel("bedrock-converse-stream" as Api, "bedrock", "anthropic.claude-opus-4-6"),
);
expect(bedrockCanonical.map((msg) => msg.role)).toEqual(["assistant", "toolResult", "user"]);
});
});

View File

@@ -1,7 +1,19 @@
import type { Api, Context, Model } from "@mariozechner/pi-ai";
const SYNTHETIC_TOOL_RESULT_APIS = new Set<string>([
"anthropic-messages",
"openclaw-anthropic-messages-transport",
"bedrock-converse-stream",
"google-generative-ai",
"openclaw-google-generative-ai-transport",
]);
type PendingToolCall = { id: string; name: string };
function defaultAllowSyntheticToolResults(modelApi: Api): boolean {
return SYNTHETIC_TOOL_RESULT_APIS.has(modelApi);
}
function appendMissingToolResults(
result: Context["messages"],
pendingToolCalls: PendingToolCall[],
@@ -30,6 +42,7 @@ export function transformTransportMessages(
source: { provider: string; api: Api; model: string },
) => string,
): Context["messages"] {
const allowSyntheticToolResults = defaultAllowSyntheticToolResults(model.api);
const toolCallIdMap = new Map<string, string>();
const transformed = messages.map((msg) => {
if (msg.role === "user") {
@@ -95,11 +108,11 @@ export function transformTransportMessages(
let existingToolResultIds = new Set<string>();
for (const msg of transformed) {
if (msg.role === "assistant") {
if (pendingToolCalls.length > 0) {
if (allowSyntheticToolResults && pendingToolCalls.length > 0) {
appendMissingToolResults(result, pendingToolCalls, existingToolResultIds);
pendingToolCalls = [];
existingToolResultIds = new Set();
}
pendingToolCalls = [];
existingToolResultIds = new Set();
if (msg.stopReason === "error" || msg.stopReason === "aborted") {
continue;
}
@@ -119,11 +132,11 @@ export function transformTransportMessages(
result.push(msg);
continue;
}
if (pendingToolCalls.length > 0) {
if (allowSyntheticToolResults && pendingToolCalls.length > 0) {
appendMissingToolResults(result, pendingToolCalls, existingToolResultIds);
pendingToolCalls = [];
existingToolResultIds = new Set();
}
pendingToolCalls = [];
existingToolResultIds = new Set();
result.push(msg);
}
return result;