mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-06 04:50:44 +00:00
fix(openai): default direct responses to sse
This commit is contained in:
@@ -52,6 +52,7 @@ Docs: https://docs.openclaw.ai
|
||||
|
||||
### Fixes
|
||||
|
||||
- Agents/OpenAI: default direct OpenAI Responses models to the SSE transport instead of WebSocket auto-selection, preventing pi runtime chat turns from hanging on servers where the WebSocket path stalls while the OpenAI HTTP stream works. Thanks @vincentkoc.
|
||||
- CLI/update: disable and skip plugins that fail package-update plugin sync, so a broken npm/ClawHub/git/marketplace plugin cannot turn a successful OpenClaw package update into a failed update result. Thanks @vincentkoc.
|
||||
- CLI/update: use an absolute POSIX npm script shell during package-manager updates, so restricted PATH environments can still run dependency lifecycle scripts while updating from `--tag main`. Fixes #77530. Thanks @PeterTremonti.
|
||||
- Diagnostics: grant the internal diagnostics event bus to official installed diagnostics exporter plugins, so npm-installed `@openclaw/diagnostics-prometheus` can emit metrics without broadening the capability to arbitrary global plugins. Fixes #76628. Thanks @RayWoo.
|
||||
|
||||
@@ -508,9 +508,9 @@ describe("buildOpenAIProvider", () => {
|
||||
});
|
||||
|
||||
expect(extraParams).toMatchObject({
|
||||
transport: "auto",
|
||||
openaiWsWarmup: true,
|
||||
transport: "sse",
|
||||
});
|
||||
expect(extraParams?.openaiWsWarmup).toBeUndefined();
|
||||
expect(result.payload.store).toBe(true);
|
||||
expect(result.payload.context_management).toEqual([
|
||||
{ type: "compaction", compact_threshold: 140_000 },
|
||||
|
||||
@@ -227,7 +227,7 @@ export function buildOpenAIProvider(): ProviderPlugin {
|
||||
shouldUseOpenAIResponsesTransport({ provider, api, baseUrl })
|
||||
? { api: "openai-responses", baseUrl }
|
||||
: undefined,
|
||||
...buildOpenAIResponsesProviderHooks({ openaiWsWarmup: true }),
|
||||
...buildOpenAIResponsesProviderHooks({ transport: "sse" }),
|
||||
matchesContextOverflowError: ({ errorMessage }) =>
|
||||
/content_filter.*(?:prompt|input).*(?:too long|exceed)/i.test(errorMessage),
|
||||
resolveReasoningOutputMode: () => "native",
|
||||
|
||||
@@ -50,10 +50,11 @@ function hasSupportedOpenAIResponsesTransport(
|
||||
|
||||
function defaultOpenAIResponsesExtraParams(
|
||||
extraParams: Record<string, unknown> | undefined,
|
||||
options?: { openaiWsWarmup?: boolean },
|
||||
options?: { openaiWsWarmup?: boolean; transport?: "auto" | "sse" | "websocket" },
|
||||
): Record<string, unknown> | undefined {
|
||||
const hasSupportedTransport = hasSupportedOpenAIResponsesTransport(extraParams?.transport);
|
||||
const hasExplicitWarmup = typeof extraParams?.openaiWsWarmup === "boolean";
|
||||
const defaultTransport = options?.transport ?? "auto";
|
||||
const shouldDefaultWarmup = options?.openaiWsWarmup === true;
|
||||
if (hasSupportedTransport && (!shouldDefaultWarmup || hasExplicitWarmup)) {
|
||||
return extraParams;
|
||||
@@ -61,7 +62,7 @@ function defaultOpenAIResponsesExtraParams(
|
||||
|
||||
return {
|
||||
...extraParams,
|
||||
...(hasSupportedTransport ? {} : { transport: "auto" }),
|
||||
...(hasSupportedTransport ? {} : { transport: defaultTransport }),
|
||||
...(shouldDefaultWarmup && !hasExplicitWarmup ? { openaiWsWarmup: true } : {}),
|
||||
};
|
||||
}
|
||||
@@ -93,6 +94,7 @@ const wrapOpenAIResponsesProviderStreamFn: NonNullable<
|
||||
|
||||
export function buildOpenAIResponsesProviderHooks(options?: {
|
||||
openaiWsWarmup?: boolean;
|
||||
transport?: "auto" | "sse" | "websocket";
|
||||
}): OpenAIResponsesProviderHooks {
|
||||
return {
|
||||
buildReplayPolicy: buildOpenAIReplayPolicy,
|
||||
|
||||
Reference in New Issue
Block a user