feat(agents): default codex transport to websocket-first

This commit is contained in:
Peter Steinberger
2026-02-26 16:21:34 +01:00
parent 63c6080d50
commit 03d7641b0e
5 changed files with 204 additions and 0 deletions

View File

@@ -7,6 +7,7 @@ Docs: https://docs.openclaw.ai
### Changes
- Highlight: External Secrets Management introduces a full `openclaw secrets` workflow (`audit`, `configure`, `apply`, `reload`) with runtime snapshot activation, strict `secrets apply` target-path validation, safer migration scrubbing, ref-only auth-profile support, and dedicated docs. (#26155) Thanks @joshavant.
- Codex/WebSocket transport: make `openai-codex` WebSocket-first by default (`transport: "auto"` with SSE fallback), keep explicit per-model/runtime transport overrides, and add regression coverage + docs for transport selection.
- Agents/Routing CLI: add `openclaw agents bindings`, `openclaw agents bind`, and `openclaw agents unbind` for account-scoped route management, including channel-only to account-scoped binding upgrades, role-aware binding identity handling, plugin-resolved binding account IDs, and optional account-binding prompts in `openclaw channels add`. (#27195) thanks @gumadeiras.
- ACP/Thread-bound agents: make ACP agents first-class runtimes for thread sessions with `acp` spawn/send dispatch integration, acpx backend bridging, lifecycle controls, startup reconciliation, runtime cleanup, and coalesced thread replies. (#23580) thanks @osolmaz.
- Onboarding/Plugins: let channel plugins own interactive onboarding flows with optional `configureInteractive` and `configureWhenConfigured` hooks while preserving the generic fallback path. (#27191) thanks @gumadeiras.

View File

@@ -70,6 +70,8 @@ OpenClaw ships with the piai catalog. These providers require **no**
- Auth: OAuth (ChatGPT)
- Example model: `openai-codex/gpt-5.3-codex`
- CLI: `openclaw onboard --auth-choice openai-codex` or `openclaw models auth login --provider openai-codex`
- Default transport is `auto` (WebSocket-first, SSE fallback)
- Override per model via `agents.defaults.models["openai-codex/<model>"].params.transport` (`"sse"`, `"websocket"`, or `"auto"`)
```json5
{

View File

@@ -56,6 +56,33 @@ openclaw models auth login --provider openai-codex
}
```
### Codex transport default
OpenClaw uses `pi-ai` for model streaming. For `openai-codex/*` models you can set
`agents.defaults.models.<provider/model>.params.transport` to select transport:
- Default is `"auto"` (WebSocket-first, then SSE fallback).
- `"sse"`: force SSE
- `"websocket"`: force WebSocket
- `"auto"`: try WebSocket, then fall back to SSE
```json5
{
agents: {
defaults: {
model: { primary: "openai-codex/gpt-5.3-codex" },
models: {
"openai-codex/gpt-5.3-codex": {
params: {
transport: "auto",
},
},
},
},
},
}
```
## Notes
- Model refs always use `provider/model` (see [/concepts/models](/concepts/models)).

View File

@@ -490,6 +490,160 @@ describe("applyExtraParamsToAgent", () => {
});
});
it("passes configured websocket transport through stream options", () => {
const { calls, agent } = createOptionsCaptureAgent();
const cfg = {
agents: {
defaults: {
models: {
"openai-codex/gpt-5.3-codex": {
params: {
transport: "websocket",
},
},
},
},
},
};
applyExtraParamsToAgent(agent, cfg, "openai-codex", "gpt-5.3-codex");
const model = {
api: "openai-codex-responses",
provider: "openai-codex",
id: "gpt-5.3-codex",
} as Model<"openai-codex-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(calls).toHaveLength(1);
expect(calls[0]?.transport).toBe("websocket");
});
it("defaults Codex transport to auto (WebSocket-first)", () => {
const { calls, agent } = createOptionsCaptureAgent();
applyExtraParamsToAgent(agent, undefined, "openai-codex", "gpt-5.3-codex");
const model = {
api: "openai-codex-responses",
provider: "openai-codex",
id: "gpt-5.3-codex",
} as Model<"openai-codex-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(calls).toHaveLength(1);
expect(calls[0]?.transport).toBe("auto");
});
it("does not set transport defaults for non-Codex providers", () => {
const { calls, agent } = createOptionsCaptureAgent();
applyExtraParamsToAgent(agent, undefined, "openai", "gpt-5");
const model = {
api: "openai-responses",
provider: "openai",
id: "gpt-5",
} as Model<"openai-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(calls).toHaveLength(1);
expect(calls[0]?.transport).toBeUndefined();
});
it("allows forcing Codex transport to SSE", () => {
const { calls, agent } = createOptionsCaptureAgent();
const cfg = {
agents: {
defaults: {
models: {
"openai-codex/gpt-5.3-codex": {
params: {
transport: "sse",
},
},
},
},
},
};
applyExtraParamsToAgent(agent, cfg, "openai-codex", "gpt-5.3-codex");
const model = {
api: "openai-codex-responses",
provider: "openai-codex",
id: "gpt-5.3-codex",
} as Model<"openai-codex-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(calls).toHaveLength(1);
expect(calls[0]?.transport).toBe("sse");
});
it("lets runtime options override configured transport", () => {
const { calls, agent } = createOptionsCaptureAgent();
const cfg = {
agents: {
defaults: {
models: {
"openai-codex/gpt-5.3-codex": {
params: {
transport: "websocket",
},
},
},
},
},
};
applyExtraParamsToAgent(agent, cfg, "openai-codex", "gpt-5.3-codex");
const model = {
api: "openai-codex-responses",
provider: "openai-codex",
id: "gpt-5.3-codex",
} as Model<"openai-codex-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, { transport: "sse" });
expect(calls).toHaveLength(1);
expect(calls[0]?.transport).toBe("sse");
});
it("falls back to Codex default transport when configured value is invalid", () => {
const { calls, agent } = createOptionsCaptureAgent();
const cfg = {
agents: {
defaults: {
models: {
"openai-codex/gpt-5.3-codex": {
params: {
transport: "udp",
},
},
},
},
},
};
applyExtraParamsToAgent(agent, cfg, "openai-codex", "gpt-5.3-codex");
const model = {
api: "openai-codex-responses",
provider: "openai-codex",
id: "gpt-5.3-codex",
} as Model<"openai-codex-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
expect(calls).toHaveLength(1);
expect(calls[0]?.transport).toBe("auto");
});
it("disables prompt caching for non-Anthropic Bedrock models", () => {
const { calls, agent } = createOptionsCaptureAgent();

View File

@@ -117,6 +117,13 @@ function createStreamFnWithExtraParams(
if (typeof extraParams.maxTokens === "number") {
streamParams.maxTokens = extraParams.maxTokens;
}
const transport = extraParams.transport;
if (transport === "sse" || transport === "websocket" || transport === "auto") {
streamParams.transport = transport;
} else if (transport != null) {
const transportSummary = typeof transport === "string" ? transport : typeof transport;
log.warn(`ignoring invalid transport param: ${transportSummary}`);
}
const cacheRetention = resolveCacheRetention(extraParams, provider);
if (cacheRetention) {
streamParams.cacheRetention = cacheRetention;
@@ -234,6 +241,15 @@ function createOpenAIResponsesStoreWrapper(baseStreamFn: StreamFn | undefined):
};
}
function createCodexDefaultTransportWrapper(baseStreamFn: StreamFn | undefined): StreamFn {
const underlying = baseStreamFn ?? streamSimple;
return (model, context, options) =>
underlying(model, context, {
...options,
transport: options?.transport ?? "auto",
});
}
function isAnthropic1MModel(modelId: string): boolean {
const normalized = modelId.trim().toLowerCase();
return ANTHROPIC_1M_MODEL_PREFIXES.some((prefix) => normalized.startsWith(prefix));
@@ -652,6 +668,10 @@ export function applyExtraParamsToAgent(
modelId,
agentId,
});
if (provider === "openai-codex") {
// Default Codex to WebSocket-first when nothing else specifies transport.
agent.streamFn = createCodexDefaultTransportWrapper(agent.streamFn);
}
const override =
extraParamsOverride && Object.keys(extraParamsOverride).length > 0
? Object.fromEntries(