mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-06 10:40:43 +00:00
fix: prevent embedded runs from lowering undici timeouts
This commit is contained in:
@@ -15,6 +15,7 @@ Docs: https://docs.openclaw.ai
|
||||
|
||||
### Fixes
|
||||
|
||||
- Agents/transport: stop embedded runs from lowering the process-wide undici stream timeouts, so slow Gemini image generation and other long-running provider requests no longer inherit short run-attempt headers timeouts. Fixes #70423. Thanks @giangthb.
|
||||
- Memory/QMD: recreate stale managed QMD collections when startup repair finds the collection name already exists, so root memory narrows back to `MEMORY.md` instead of staying on broad workspace markdown indexing.
|
||||
- Agents/OpenAI: surface selected-model capacity failures from PI, Codex, and auto-reply harness paths with a model-switch hint instead of the generic empty-response error. Thanks @vincentkoc.
|
||||
- Providers/OpenAI: route `openai/gpt-image-2` through configured Codex OAuth directly when an `openai-codex` profile is active, instead of probing `OPENAI_API_KEY` first.
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import {
|
||||
DEFAULT_UNDICI_STREAM_TIMEOUT_MS,
|
||||
ensureGlobalUndiciEnvProxyDispatcher,
|
||||
ensureGlobalUndiciStreamTimeouts,
|
||||
} from "../../../infra/net/undici-global-dispatcher.js";
|
||||
@@ -7,5 +8,7 @@ export function configureEmbeddedAttemptHttpRuntime(params: { timeoutMs: number
|
||||
// Proxy bootstrap must happen before timeout tuning so the timeouts wrap the
|
||||
// active EnvHttpProxyAgent instead of being replaced by a bare proxy dispatcher.
|
||||
ensureGlobalUndiciEnvProxyDispatcher();
|
||||
ensureGlobalUndiciStreamTimeouts({ timeoutMs: params.timeoutMs });
|
||||
ensureGlobalUndiciStreamTimeouts({
|
||||
timeoutMs: Math.max(params.timeoutMs, DEFAULT_UNDICI_STREAM_TIMEOUT_MS),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
const mocks = vi.hoisted(() => ({
|
||||
DEFAULT_UNDICI_STREAM_TIMEOUT_MS: 30 * 60 * 1000,
|
||||
ensureGlobalUndiciEnvProxyDispatcher: vi.fn(),
|
||||
ensureGlobalUndiciStreamTimeouts: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("../../../infra/net/undici-global-dispatcher.js", () => ({
|
||||
DEFAULT_UNDICI_STREAM_TIMEOUT_MS: mocks.DEFAULT_UNDICI_STREAM_TIMEOUT_MS,
|
||||
ensureGlobalUndiciEnvProxyDispatcher: mocks.ensureGlobalUndiciEnvProxyDispatcher,
|
||||
ensureGlobalUndiciStreamTimeouts: mocks.ensureGlobalUndiciStreamTimeouts,
|
||||
}));
|
||||
@@ -18,12 +20,23 @@ describe("runEmbeddedAttempt undici timeout wiring", () => {
|
||||
mocks.ensureGlobalUndiciStreamTimeouts.mockReset();
|
||||
});
|
||||
|
||||
it("forwards the configured run timeout into global undici stream tuning", () => {
|
||||
it("does not lower global undici stream tuning below the shared default", () => {
|
||||
configureEmbeddedAttemptHttpRuntime({ timeoutMs: 123_456 });
|
||||
|
||||
expect(mocks.ensureGlobalUndiciEnvProxyDispatcher).toHaveBeenCalledOnce();
|
||||
expect(mocks.ensureGlobalUndiciStreamTimeouts).toHaveBeenCalledWith({
|
||||
timeoutMs: 123_456,
|
||||
timeoutMs: mocks.DEFAULT_UNDICI_STREAM_TIMEOUT_MS,
|
||||
});
|
||||
});
|
||||
|
||||
it("preserves run timeouts above the shared default", () => {
|
||||
const timeoutMs = mocks.DEFAULT_UNDICI_STREAM_TIMEOUT_MS + 1_000;
|
||||
|
||||
configureEmbeddedAttemptHttpRuntime({ timeoutMs });
|
||||
|
||||
expect(mocks.ensureGlobalUndiciEnvProxyDispatcher).toHaveBeenCalledOnce();
|
||||
expect(mocks.ensureGlobalUndiciStreamTimeouts).toHaveBeenCalledWith({
|
||||
timeoutMs,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -142,6 +142,26 @@ describe("ensureGlobalUndiciStreamTimeouts", () => {
|
||||
expect(setGlobalDispatcher).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it("does not lower global stream timeouts below the default floor", () => {
|
||||
ensureGlobalUndiciStreamTimeouts({ timeoutMs: 15_000 });
|
||||
|
||||
expect(setGlobalDispatcher).toHaveBeenCalledTimes(1);
|
||||
const next = getCurrentDispatcher() as { options?: Record<string, unknown> };
|
||||
expect(next.options?.bodyTimeout).toBe(DEFAULT_UNDICI_STREAM_TIMEOUT_MS);
|
||||
expect(next.options?.headersTimeout).toBe(DEFAULT_UNDICI_STREAM_TIMEOUT_MS);
|
||||
});
|
||||
|
||||
it("honors explicit global stream timeouts above the default floor", () => {
|
||||
const timeoutMs = DEFAULT_UNDICI_STREAM_TIMEOUT_MS + 1_000;
|
||||
|
||||
ensureGlobalUndiciStreamTimeouts({ timeoutMs });
|
||||
|
||||
expect(setGlobalDispatcher).toHaveBeenCalledTimes(1);
|
||||
const next = getCurrentDispatcher() as { options?: Record<string, unknown> };
|
||||
expect(next.options?.bodyTimeout).toBe(timeoutMs);
|
||||
expect(next.options?.headersTimeout).toBe(timeoutMs);
|
||||
});
|
||||
|
||||
it("re-applies when autoSelectFamily decision changes", () => {
|
||||
getDefaultAutoSelectFamily.mockReturnValue(true);
|
||||
ensureGlobalUndiciStreamTimeouts();
|
||||
|
||||
@@ -110,10 +110,10 @@ export function ensureGlobalUndiciEnvProxyDispatcher(): void {
|
||||
|
||||
export function ensureGlobalUndiciStreamTimeouts(opts?: { timeoutMs?: number }): void {
|
||||
const timeoutMsRaw = opts?.timeoutMs ?? DEFAULT_UNDICI_STREAM_TIMEOUT_MS;
|
||||
const timeoutMs = Math.max(1, Math.floor(timeoutMsRaw));
|
||||
if (!Number.isFinite(timeoutMsRaw)) {
|
||||
return;
|
||||
}
|
||||
const timeoutMs = Math.max(DEFAULT_UNDICI_STREAM_TIMEOUT_MS, Math.floor(timeoutMsRaw));
|
||||
const kind = resolveCurrentDispatcherKind();
|
||||
if (kind === null) {
|
||||
return;
|
||||
|
||||
Reference in New Issue
Block a user