diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9358cfe79a3..f48245320ec 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -28,6 +28,7 @@ Docs: https://docs.openclaw.ai
- CLI/directory: report unsupported directory operations for installed channel plugins instead of prompting to reinstall the plugin when it lacks a directory adapter. Fixes #75770. Thanks @lawong888.
- Web search: keep public provider requests on the strict SSRF guard and reserve private-network access for explicit self-hosted SearXNG/Firecrawl endpoints. Fixes #74357 and supersedes #74360. Thanks @fede-kamel.
- Web search/Firecrawl: allow self-hosted private/internal Firecrawl `baseUrl` endpoints, including HTTP for private targets, while keeping hosted Firecrawl on the strict official endpoint. Fixes #63877 and supersedes #59666, #63941, and #74013. Thanks @jhthompson12, @jzakirov, @Mlightsnow, and @shad0wca7.
+- Providers/OpenRouter: strip trailing assistant prefill turns from verified OpenRouter Anthropic model requests when reasoning is enabled, so Claude 4.6 routes no longer fail with Anthropic's prefill rejection through the OpenAI-compatible adapter. Fixes #75395. Thanks @sbmilburn.
- Feishu: preserve Feishu/Lark HTTP error bodies for message sends, media sends, and chat member lookups, so HTTP 400 failures include vendor code, message, log id, and troubleshooter details. Fixes #73860. Thanks @desksk.
- Agents/transcripts: avoid reopening large Pi transcript files through the synchronous session manager for maintenance rewrites, persisted tool-result truncation, manual compaction boundary hardening, and queued compaction rotation. Thanks @mariozechner.
- Web search/Exa: accept `plugins.entries.exa.config.webSearch.baseUrl`, normalize it to the Exa `/search` endpoint, and partition cached results by endpoint. Fixes #54928 and supersedes #54939. Thanks @mrpl327 and @lyfuci.
diff --git a/docs/providers/openrouter.md b/docs/providers/openrouter.md
index 9ef2957aad4..d8cc51bda28 100644
--- a/docs/providers/openrouter.md
+++ b/docs/providers/openrouter.md
@@ -159,6 +159,13 @@ does **not** inject those OpenRouter-specific headers or Anthropic cache markers
better prompt-cache reuse on system/developer prompt blocks.
+
+ On verified OpenRouter routes, Anthropic model refs with reasoning enabled
+ drop trailing assistant prefill turns before the request reaches OpenRouter,
+ matching Anthropic's requirement that reasoning conversations end with a user
+ turn.
+
+
On supported non-`auto` routes, OpenClaw maps the selected thinking level to
OpenRouter proxy reasoning payloads. Unsupported model hints and
diff --git a/docs/reference/transcript-hygiene.md b/docs/reference/transcript-hygiene.md
index c53ee188cdb..fa79ca86123 100644
--- a/docs/reference/transcript-hygiene.md
+++ b/docs/reference/transcript-hygiene.md
@@ -177,6 +177,12 @@ inter-session user turns that only have provenance metadata.
- Thought signature cleanup: strip non-base64 `thought_signature` values (keep base64).
+**OpenRouter Anthropic**
+
+- Trailing assistant prefill turns are stripped from verified OpenRouter
+ OpenAI-compatible Anthropic model payloads when reasoning is enabled, matching
+ direct Anthropic and Cloudflare Anthropic replay behavior.
+
**Everything else**
- Image sanitization only.
diff --git a/extensions/openrouter/index.test.ts b/extensions/openrouter/index.test.ts
index 66d77857d1e..2bea1091f5f 100644
--- a/extensions/openrouter/index.test.ts
+++ b/extensions/openrouter/index.test.ts
@@ -218,4 +218,111 @@ describe("openrouter provider hooks", () => {
expect(capturedPayload).toEqual({});
expect(baseStreamFn).toHaveBeenCalledOnce();
});
+
+ it("strips OpenRouter-routed Anthropic assistant prefill when reasoning is enabled", async () => {
+ const provider = await registerSingleProviderPlugin(openrouterPlugin);
+ let capturedPayload: Record | undefined;
+ const baseStreamFn = vi.fn(
+ (
+ ...args: Parameters
+ ): ReturnType => {
+ const payload = {
+ messages: [
+ { role: "user", content: "Return JSON." },
+ { role: "assistant", content: "{" },
+ ],
+ };
+ void args[2]?.onPayload?.(payload, args[0]);
+ capturedPayload = payload;
+ return { async *[Symbol.asyncIterator]() {} } as never;
+ },
+ );
+
+ const wrapped = provider.wrapStreamFn?.({
+ provider: "openrouter",
+ modelId: "anthropic/claude-opus-4.6",
+ streamFn: baseStreamFn,
+ thinkingLevel: "high",
+ } as never);
+
+ void wrapped?.(
+ {
+ provider: "openrouter",
+ api: "openai-completions",
+ id: "anthropic/claude-opus-4.6",
+ baseUrl: "https://openrouter.ai/api/v1",
+ compat: {},
+ } as never,
+ { messages: [] } as never,
+ {},
+ );
+
+ expect(capturedPayload).toMatchObject({
+ messages: [{ role: "user", content: "Return JSON." }],
+ reasoning: { effort: "high" },
+ });
+ expect(baseStreamFn).toHaveBeenCalledOnce();
+ });
+
+ it("keeps OpenRouter Anthropic prefill when reasoning is disabled or the route is custom", async () => {
+ const provider = await registerSingleProviderPlugin(openrouterPlugin);
+ const payloads: Array> = [];
+ const baseStreamFn = vi.fn(
+ (
+ ...args: Parameters
+ ): ReturnType => {
+ const payload = {
+ messages: [
+ { role: "user", content: "Return JSON." },
+ { role: "assistant", content: "{" },
+ ],
+ };
+ void args[2]?.onPayload?.(payload, args[0]);
+ payloads.push(payload);
+ return { async *[Symbol.asyncIterator]() {} } as never;
+ },
+ );
+
+ const disabled = provider.wrapStreamFn?.({
+ provider: "openrouter",
+ modelId: "anthropic/claude-opus-4.6",
+ streamFn: baseStreamFn,
+ thinkingLevel: "off",
+ } as never);
+ void disabled?.(
+ {
+ provider: "openrouter",
+ api: "openai-completions",
+ id: "anthropic/claude-opus-4.6",
+ baseUrl: "https://openrouter.ai/api/v1",
+ compat: {},
+ } as never,
+ { messages: [] } as never,
+ {},
+ );
+
+ const customRoute = provider.wrapStreamFn?.({
+ provider: "openrouter",
+ modelId: "anthropic/claude-opus-4.6",
+ streamFn: baseStreamFn,
+ thinkingLevel: "high",
+ } as never);
+ void customRoute?.(
+ {
+ provider: "openrouter",
+ api: "openai-completions",
+ id: "anthropic/claude-opus-4.6",
+ baseUrl: "https://proxy.example.com/v1",
+ compat: {},
+ } as never,
+ { messages: [] } as never,
+ {},
+ );
+
+ expect(payloads).toHaveLength(2);
+ expect(payloads[0]?.messages).toHaveLength(2);
+ expect(payloads[0]).not.toHaveProperty("reasoning");
+ expect(payloads[1]?.messages).toHaveLength(2);
+ expect(payloads[1]).toMatchObject({ reasoning: { effort: "high" } });
+ });
});
diff --git a/extensions/openrouter/stream.ts b/extensions/openrouter/stream.ts
index 37573c455cb..48fa6d3c379 100644
--- a/extensions/openrouter/stream.ts
+++ b/extensions/openrouter/stream.ts
@@ -1,7 +1,65 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import type { ProviderWrapStreamFnContext } from "openclaw/plugin-sdk/plugin-entry";
import { OPENROUTER_THINKING_STREAM_HOOKS } from "openclaw/plugin-sdk/provider-stream-family";
-import { isOpenRouterProxyReasoningUnsupportedModel } from "./provider-catalog.js";
+import {
+ createPayloadPatchStreamWrapper,
+ stripTrailingAssistantPrefillMessages,
+} from "openclaw/plugin-sdk/provider-stream-shared";
+import { createSubsystemLogger } from "openclaw/plugin-sdk/runtime-env";
+import {
+ isOpenRouterProxyReasoningUnsupportedModel,
+ normalizeOpenRouterBaseUrl,
+ OPENROUTER_BASE_URL,
+} from "./provider-catalog.js";
+
+const log = createSubsystemLogger("openrouter-stream");
+
+function readString(value: unknown): string | undefined {
+ return typeof value === "string" ? value.trim() : undefined;
+}
+
+function isOpenRouterAnthropicModelId(modelId: unknown): boolean {
+ const normalized = readString(modelId)?.toLowerCase();
+ return (
+ normalized?.startsWith("anthropic/") === true ||
+ normalized?.startsWith("openrouter/anthropic/") === true
+ );
+}
+
+function isVerifiedOpenRouterRoute(model: Parameters[0]): boolean {
+ const provider = readString(model.provider)?.toLowerCase();
+ const baseUrl = readString(model.baseUrl);
+ if (baseUrl) {
+ return normalizeOpenRouterBaseUrl(baseUrl) === OPENROUTER_BASE_URL;
+ }
+ return provider === "openrouter";
+}
+
+function shouldPatchAnthropicOpenRouterPayload(model: Parameters[0]): boolean {
+ const api = readString(model.api);
+ return (
+ (api === undefined || api === "openai-completions") &&
+ isOpenRouterAnthropicModelId(model.id) &&
+ isVerifiedOpenRouterRoute(model)
+ );
+}
+
+function isEnabledReasoningValue(value: unknown): boolean {
+ if (value === undefined || value === null || value === false) {
+ return false;
+ }
+ if (typeof value === "string") {
+ const normalized = value.trim().toLowerCase();
+ return normalized !== "" && normalized !== "off" && normalized !== "none";
+ }
+ return true;
+}
+
+function isOpenRouterReasoningPayloadEnabled(payload: Record): boolean {
+ return (
+ isEnabledReasoningValue(payload.reasoning) || isEnabledReasoningValue(payload.reasoning_effort)
+ );
+}
function injectOpenRouterRouting(
baseStreamFn: StreamFn | undefined,
@@ -28,6 +86,26 @@ function injectOpenRouterRouting(
);
}
+function createOpenRouterAnthropicPrefillWrapper(baseStreamFn: StreamFn | undefined): StreamFn {
+ return createPayloadPatchStreamWrapper(
+ baseStreamFn,
+ ({ payload }) => {
+ if (!isOpenRouterReasoningPayloadEnabled(payload)) {
+ return;
+ }
+ const stripped = stripTrailingAssistantPrefillMessages(payload);
+ if (stripped > 0) {
+ log.warn(
+ `removed ${stripped} trailing assistant prefill message${stripped === 1 ? "" : "s"} because OpenRouter-routed Anthropic reasoning requires conversations to end with a user turn`,
+ );
+ }
+ },
+ {
+ shouldPatch: ({ model }) => shouldPatchAnthropicOpenRouterPayload(model),
+ },
+ );
+}
+
export function wrapOpenRouterProviderStream(
ctx: ProviderWrapStreamFnContext,
): StreamFn | null | undefined {
@@ -40,15 +118,22 @@ export function wrapOpenRouterProviderStream(
: ctx.streamFn;
const wrapStreamFn = OPENROUTER_THINKING_STREAM_HOOKS.wrapStreamFn ?? undefined;
if (!wrapStreamFn) {
- return routedStreamFn;
+ return createOpenRouterAnthropicPrefillWrapper(routedStreamFn);
}
- return (
+ const wrappedStreamFn =
wrapStreamFn({
...ctx,
streamFn: routedStreamFn,
thinkingLevel: isOpenRouterProxyReasoningUnsupportedModel(ctx.modelId)
? undefined
: ctx.thinkingLevel,
- }) ?? undefined
- );
+ }) ?? undefined;
+ return createOpenRouterAnthropicPrefillWrapper(wrappedStreamFn);
}
+
+export const __testing = {
+ isOpenRouterAnthropicModelId,
+ isOpenRouterReasoningPayloadEnabled,
+ isVerifiedOpenRouterRoute,
+ shouldPatchAnthropicOpenRouterPayload,
+};
diff --git a/src/plugin-sdk/provider-stream-shared.test.ts b/src/plugin-sdk/provider-stream-shared.test.ts
index 97f035e82c8..9a68ce9d6e3 100644
--- a/src/plugin-sdk/provider-stream-shared.test.ts
+++ b/src/plugin-sdk/provider-stream-shared.test.ts
@@ -10,6 +10,7 @@ import {
decodeHtmlEntitiesInObject,
hasCopilotVisionInput,
isOpenAICompatibleThinkingEnabled,
+ stripTrailingAssistantPrefillMessages,
stripTrailingAnthropicAssistantPrefillWhenThinking,
} from "./provider-stream-shared.js";
@@ -301,6 +302,18 @@ describe("createPayloadPatchStreamWrapper", () => {
});
describe("stripTrailingAnthropicAssistantPrefillWhenThinking", () => {
+ it("exposes unconditional assistant prefill stripping for proxy reasoning wrappers", () => {
+ const payload = {
+ messages: [
+ { role: "user", content: "Return JSON." },
+ { role: "assistant", content: "{" },
+ ],
+ };
+
+ expect(stripTrailingAssistantPrefillMessages(payload)).toBe(1);
+ expect(payload.messages).toEqual([{ role: "user", content: "Return JSON." }]);
+ });
+
it("removes trailing assistant text turns when Anthropic thinking is enabled", () => {
const payload = {
thinking: { type: "enabled", budget_tokens: 1024 },
diff --git a/src/plugin-sdk/provider-stream-shared.ts b/src/plugin-sdk/provider-stream-shared.ts
index 5250c7cc747..a1ee1428bb7 100644
--- a/src/plugin-sdk/provider-stream-shared.ts
+++ b/src/plugin-sdk/provider-stream-shared.ts
@@ -179,10 +179,8 @@ function assistantMessageHasAnthropicToolUse(message: Record):
);
}
-export function stripTrailingAnthropicAssistantPrefillWhenThinking(
- payload: Record,
-): number {
- if (!isAnthropicThinkingEnabled(payload) || !Array.isArray(payload.messages)) {
+export function stripTrailingAssistantPrefillMessages(payload: Record): number {
+ if (!Array.isArray(payload.messages)) {
return 0;
}
@@ -204,6 +202,15 @@ export function stripTrailingAnthropicAssistantPrefillWhenThinking(
return stripped;
}
+export function stripTrailingAnthropicAssistantPrefillWhenThinking(
+ payload: Record,
+): number {
+ if (!isAnthropicThinkingEnabled(payload)) {
+ return 0;
+ }
+ return stripTrailingAssistantPrefillMessages(payload);
+}
+
export function createAnthropicThinkingPrefillPayloadWrapper(
baseStreamFn: StreamFn | undefined,
onStripped?: (stripped: number) => void,