fix: keep copilot on boundary-aware stream path

This commit is contained in:
Peter Steinberger
2026-04-25 00:06:26 +01:00
parent 1787ae0f5d
commit beefcda68f
5 changed files with 77 additions and 28 deletions

View File

@@ -10,10 +10,12 @@ Docs: https://docs.openclaw.ai
### Fixes
- Providers/GitHub Copilot: keep the plugin stream wrapper from claiming transport selection before OpenClaw picks a boundary-aware stream path, avoiding Pi's stale fallback Copilot headers on normal model turns. Thanks @steipete.
- Discord/subagents: pass runtime config into thread-bound native subagent binding and require it at the helper boundary so Discord channel resolution keeps account-aware config. Fixes #71054. (#70945) Thanks @jai.
- Slack/Assistant: accept Slack Assistant DM `message_changed` events when their metadata identifies the human sender, while continuing to drop self-authored bot edits. Fixes #55445. Thanks @AlfredPros.
- Agents/failover: stop body-less HTTP 400/422 proxy failures from defaulting to `"format"` classification, so embedded retries surface the opaque provider failure instead of falling into a compaction loop. Fixes #66462. (#67024) Thanks @altaywtf and @HongzhuLiu.
- Plugins/loader: use cached discovery-mode snapshot loads for read-only plugin capability lookups, keep snapshot caches isolated from active Gateway registries, and make same-plugin channel/HTTP route re-registration idempotent so repeated snapshot or hot-reload paths no longer rerun full plugin side effects or accumulate duplicate surfaces. Fixes #51781, #52031, #54181, and #57514. Thanks @livingghost, @okuyam2y, @ShionEria, and @bbshih.
## 2026.4.24
### Breaking

View File

@@ -145,7 +145,11 @@ describeLive("github-copilot connection-bound Responses IDs live", () => {
};
let capturedPayload: Record<string, unknown> | undefined;
const stream = wrapCopilotOpenAIResponsesStream(streamOpenAIResponses as never)(
const wrappedStream = wrapCopilotOpenAIResponsesStream(streamOpenAIResponses as never);
if (!wrappedStream) {
throw new Error("expected Copilot Responses stream wrapper");
}
const stream = wrappedStream(
model as never,
context as never,
{

View File

@@ -6,6 +6,14 @@ import {
wrapCopilotProviderStream,
} from "./stream.js";
function requireStreamFn(streamFn: ReturnType<typeof wrapCopilotProviderStream>) {
expect(streamFn).toBeTypeOf("function");
if (!streamFn) {
throw new Error("expected stream fn");
}
return streamFn;
}
describe("wrapCopilotAnthropicStream", () => {
it("adds Copilot headers and Anthropic cache markers for Claude payloads", async () => {
const payloads: Array<{
@@ -28,7 +36,7 @@ describe("wrapCopilotAnthropicStream", () => {
} as never;
});
const wrapped = wrapCopilotAnthropicStream(baseStreamFn);
const wrapped = requireStreamFn(wrapCopilotAnthropicStream(baseStreamFn));
const messages = [
{
role: "user",
@@ -77,7 +85,7 @@ describe("wrapCopilotAnthropicStream", () => {
it("leaves non-Anthropic Copilot models untouched", () => {
const baseStreamFn = vi.fn(() => ({ async *[Symbol.asyncIterator]() {} }) as never);
const wrapped = wrapCopilotAnthropicStream(baseStreamFn);
const wrapped = requireStreamFn(wrapCopilotAnthropicStream(baseStreamFn));
const options = { headers: { Existing: "1" } };
void wrapped(
@@ -105,7 +113,7 @@ describe("wrapCopilotAnthropicStream", () => {
} as never;
});
const wrapped = wrapCopilotOpenAIResponsesStream(baseStreamFn);
const wrapped = requireStreamFn(wrapCopilotOpenAIResponsesStream(baseStreamFn));
const messages = [
{
role: "toolResult",
@@ -149,7 +157,7 @@ describe("wrapCopilotAnthropicStream", () => {
} as never;
});
const wrapped = wrapCopilotOpenAIResponsesStream(baseStreamFn);
const wrapped = requireStreamFn(wrapCopilotOpenAIResponsesStream(baseStreamFn));
await wrapped(
{
@@ -171,9 +179,11 @@ describe("wrapCopilotAnthropicStream", () => {
it("adapts provider stream context without changing wrapper behavior", () => {
const baseStreamFn = vi.fn(() => ({ async *[Symbol.asyncIterator]() {} }) as never);
const wrapped = wrapCopilotProviderStream({
streamFn: baseStreamFn,
} as never);
const wrapped = requireStreamFn(
wrapCopilotProviderStream({
streamFn: baseStreamFn,
} as never),
);
void wrapped(
{
@@ -187,4 +197,12 @@ describe("wrapCopilotAnthropicStream", () => {
expect(baseStreamFn).toHaveBeenCalledOnce();
});
it("does not claim provider transport before OpenClaw chooses one", () => {
expect(
wrapCopilotProviderStream({
streamFn: undefined,
} as never),
).toBeUndefined();
});
});

View File

@@ -1,5 +1,4 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import { streamSimple } from "@mariozechner/pi-ai";
import type { ProviderWrapStreamFnContext } from "openclaw/plugin-sdk/plugin-entry";
import {
applyAnthropicEphemeralCacheControlMarkers,
@@ -23,8 +22,26 @@ function patchOnPayloadResult(result: unknown): unknown {
return result;
}
export function wrapCopilotAnthropicStream(baseStreamFn: StreamFn | undefined): StreamFn {
const underlying = baseStreamFn ?? streamSimple;
function buildCopilotRequestHeaders(
context: Parameters<StreamFn>[1],
headers: Record<string, string> | undefined,
): Record<string, string> {
return {
...buildCopilotDynamicHeaders({
messages: context.messages,
hasImages: hasCopilotVisionInput(context.messages),
}),
...headers,
};
}
export function wrapCopilotAnthropicStream(
baseStreamFn: StreamFn | undefined,
): StreamFn | undefined {
if (!baseStreamFn) {
return undefined;
}
const underlying = baseStreamFn;
return (model, context, options) => {
if (model.provider !== "github-copilot" || model.api !== "anthropic-messages") {
return underlying(model, context, options);
@@ -36,21 +53,20 @@ export function wrapCopilotAnthropicStream(baseStreamFn: StreamFn | undefined):
context,
{
...options,
headers: {
...buildCopilotDynamicHeaders({
messages: context.messages,
hasImages: hasCopilotVisionInput(context.messages),
}),
...options?.headers,
},
headers: buildCopilotRequestHeaders(context, options?.headers),
},
applyAnthropicEphemeralCacheControlMarkers,
);
};
}
export function wrapCopilotOpenAIResponsesStream(baseStreamFn: StreamFn | undefined): StreamFn {
const underlying = baseStreamFn ?? streamSimple;
export function wrapCopilotOpenAIResponsesStream(
baseStreamFn: StreamFn | undefined,
): StreamFn | undefined {
if (!baseStreamFn) {
return undefined;
}
const underlying = baseStreamFn;
return (model, context, options) => {
if (model.provider !== "github-copilot" || model.api !== "openai-responses") {
return underlying(model, context, options);
@@ -59,13 +75,7 @@ export function wrapCopilotOpenAIResponsesStream(baseStreamFn: StreamFn | undefi
const originalOnPayload = options?.onPayload;
const wrappedOptions: StreamOptions = {
...options,
headers: {
...buildCopilotDynamicHeaders({
messages: context.messages,
hasImages: hasCopilotVisionInput(context.messages),
}),
...options?.headers,
},
headers: buildCopilotRequestHeaders(context, options?.headers),
onPayload: (payload, payloadModel) => {
rewriteCopilotResponsePayloadConnectionBoundIds(payload);
return patchOnPayloadResult(originalOnPayload?.(payload, payloadModel));
@@ -75,6 +85,6 @@ export function wrapCopilotOpenAIResponsesStream(baseStreamFn: StreamFn | undefi
};
}
export function wrapCopilotProviderStream(ctx: ProviderWrapStreamFnContext): StreamFn {
export function wrapCopilotProviderStream(ctx: ProviderWrapStreamFnContext): StreamFn | undefined {
return wrapCopilotOpenAIResponsesStream(wrapCopilotAnthropicStream(ctx.streamFn));
}

View File

@@ -111,6 +111,21 @@ describe("resolveEmbeddedAgentStreamFn", () => {
expect(streamFn).not.toBe(streamSimple);
});
it("routes GitHub Copilot fallbacks through boundary-aware transports", () => {
const streamFn = resolveEmbeddedAgentStreamFn({
currentStreamFn: undefined,
shouldUseWebSocketTransport: false,
sessionId: "session-1",
model: {
api: "openai-responses",
provider: "github-copilot",
id: "gpt-5.4",
} as never,
});
expect(streamFn).not.toBe(streamSimple);
});
it("injects the resolved run api key into provider-owned stream functions", async () => {
const providerStreamFn = vi.fn(async (_model, _context, options) => options);
const authStorage = {