mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-06 17:10:49 +00:00
fix(agents): avoid empty Codex Responses input
This commit is contained in:
committed by
GitHub
parent
4b4e0c82e4
commit
06b1d4e0f7
@@ -24,6 +24,7 @@ Docs: https://docs.openclaw.ai
|
|||||||
### Fixes
|
### Fixes
|
||||||
|
|
||||||
- Auto-reply: honor explicit `silentReply.direct: "allow"` for clean empty or reasoning-only direct chat turns while keeping the default direct-chat empty-response guard conservative. Fixes #74409. Thanks @jesuskannolis.
|
- Auto-reply: honor explicit `silentReply.direct: "allow"` for clean empty or reasoning-only direct chat turns while keeping the default direct-chat empty-response guard conservative. Fixes #74409. Thanks @jesuskannolis.
|
||||||
|
- OpenAI Codex: send a non-empty Responses input item when a Codex turn only has systemPrompt-backed instructions, avoiding ChatGPT backend 400s from `input: []`. Fixes #73820. Thanks @woodhouse-bot.
|
||||||
- Ollama: normalize provider-prefixed tool-call names at the native stream boundary so Kimi/Ollama calls such as `functions.exec` dispatch as `exec` instead of missing configured tools. Fixes #74487. Thanks @afurm and @carreipeia.
|
- Ollama: normalize provider-prefixed tool-call names at the native stream boundary so Kimi/Ollama calls such as `functions.exec` dispatch as `exec` instead of missing configured tools. Fixes #74487. Thanks @afurm and @carreipeia.
|
||||||
- Security/audit: resolve configured model aliases before model-tier and small-parameter checks, so alias-based GPT-5/Codex configs no longer report false weak-model warnings. Fixes #74455. Thanks @blaspat.
|
- Security/audit: resolve configured model aliases before model-tier and small-parameter checks, so alias-based GPT-5/Codex configs no longer report false weak-model warnings. Fixes #74455. Thanks @blaspat.
|
||||||
- CLI/agent: isolate Gateway-timeout embedded fallback runs under explicit `gateway-fallback-*` sessions so accepted Gateway runs cannot race transcript locks or replace the routed conversation session. Fixes #62981. Thanks @HemantSudarshan.
|
- CLI/agent: isolate Gateway-timeout embedded fallback runs under explicit `gateway-fallback-*` sessions so accepted Gateway runs cannot race transcript locks or replace the routed conversation session. Fixes #62981. Thanks @HemantSudarshan.
|
||||||
|
|||||||
@@ -1031,6 +1031,40 @@ describe("openai transport stream", () => {
|
|||||||
expect(params.service_tier).toBe("auto");
|
expect(params.service_tier).toBe("auto");
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("adds minimal user input for Codex responses when only the system prompt is present", () => {
|
||||||
|
const params = buildOpenAIResponsesParams(
|
||||||
|
{
|
||||||
|
id: "gpt-5.4",
|
||||||
|
name: "GPT-5.4",
|
||||||
|
api: "openai-codex-responses",
|
||||||
|
provider: "openai-codex",
|
||||||
|
baseUrl: "https://chatgpt.com/backend-api",
|
||||||
|
reasoning: true,
|
||||||
|
input: ["text"],
|
||||||
|
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||||
|
contextWindow: 200000,
|
||||||
|
maxTokens: 8192,
|
||||||
|
} satisfies Model<"openai-codex-responses">,
|
||||||
|
{
|
||||||
|
systemPrompt: `Stable prefix${SYSTEM_PROMPT_CACHE_BOUNDARY}Dynamic suffix`,
|
||||||
|
messages: [],
|
||||||
|
tools: [],
|
||||||
|
} as never,
|
||||||
|
undefined,
|
||||||
|
) as {
|
||||||
|
input?: Array<{ role?: string; content?: Array<{ type?: string; text?: string }> }>;
|
||||||
|
instructions?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
expect(params.instructions).toBe("Stable prefix\nDynamic suffix");
|
||||||
|
expect(params.input).toEqual([
|
||||||
|
{
|
||||||
|
role: "user",
|
||||||
|
content: [{ type: "input_text", text: " " }],
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
it("does not infer high reasoning when Pi passes thinking off", () => {
|
it("does not infer high reasoning when Pi passes thinking off", () => {
|
||||||
const params = buildOpenAIResponsesParams(
|
const params = buildOpenAIResponsesParams(
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -53,6 +53,7 @@ import { transformTransportMessages } from "./transport-message-transform.js";
|
|||||||
import { mergeTransportMetadata, sanitizeTransportPayloadText } from "./transport-stream-shared.js";
|
import { mergeTransportMetadata, sanitizeTransportPayloadText } from "./transport-stream-shared.js";
|
||||||
|
|
||||||
const DEFAULT_AZURE_OPENAI_API_VERSION = "2024-12-01-preview";
|
const DEFAULT_AZURE_OPENAI_API_VERSION = "2024-12-01-preview";
|
||||||
|
const OPENAI_CODEX_RESPONSES_EMPTY_INPUT_TEXT = " ";
|
||||||
const log = createSubsystemLogger("openai-transport");
|
const log = createSubsystemLogger("openai-transport");
|
||||||
|
|
||||||
type BaseStreamOptions = {
|
type BaseStreamOptions = {
|
||||||
@@ -876,6 +877,22 @@ function buildOpenAICodexResponsesInstructions(context: Context): string | undef
|
|||||||
return sanitizeTransportPayloadText(stripSystemPromptCacheBoundary(context.systemPrompt));
|
return sanitizeTransportPayloadText(stripSystemPromptCacheBoundary(context.systemPrompt));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function ensureOpenAICodexResponsesInput(messages: ResponseInput, context: Context): void {
|
||||||
|
if (messages.length > 0 || !context.systemPrompt) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const text = buildOpenAICodexResponsesInstructions(context);
|
||||||
|
if (!text) {
|
||||||
|
throw new Error(
|
||||||
|
"OpenAI Codex Responses requires non-empty input when only systemPrompt is provided.",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
messages.push({
|
||||||
|
role: "user",
|
||||||
|
content: [{ type: "input_text", text: OPENAI_CODEX_RESPONSES_EMPTY_INPUT_TEXT }],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
export function buildOpenAIResponsesParams(
|
export function buildOpenAIResponsesParams(
|
||||||
model: Model<Api>,
|
model: Model<Api>,
|
||||||
context: Context,
|
context: Context,
|
||||||
@@ -892,6 +909,9 @@ export function buildOpenAIResponsesParams(
|
|||||||
new Set(["openai", "openai-codex", "opencode", "azure-openai-responses"]),
|
new Set(["openai", "openai-codex", "opencode", "azure-openai-responses"]),
|
||||||
{ includeSystemPrompt: !isCodexResponses, supportsDeveloperRole },
|
{ includeSystemPrompt: !isCodexResponses, supportsDeveloperRole },
|
||||||
);
|
);
|
||||||
|
if (isCodexResponses) {
|
||||||
|
ensureOpenAICodexResponsesInput(messages, context);
|
||||||
|
}
|
||||||
const cacheRetention = resolveCacheRetention(options?.cacheRetention);
|
const cacheRetention = resolveCacheRetention(options?.cacheRetention);
|
||||||
const payloadPolicy = resolveOpenAIResponsesPayloadPolicy(model, {
|
const payloadPolicy = resolveOpenAIResponsesPayloadPolicy(model, {
|
||||||
storeMode: "disable",
|
storeMode: "disable",
|
||||||
|
|||||||
Reference in New Issue
Block a user