fix: keep codex resume runs non-interactive (#67666) (thanks @plgonzalezrx8)

This commit is contained in:
Peter Steinberger
2026-04-16 16:36:05 +01:00
parent 4c66978591
commit 461d0050d9
6 changed files with 23 additions and 17 deletions

View File

@@ -43,6 +43,7 @@ Docs: https://docs.openclaw.ai
- Extensions/lmstudio: add exponential backoff to the inference-preload wrapper so an LM Studio model-load failure (for example the built-in memory guardrail rejecting a load because the swap is saturated) no longer produces a WARN line every ~2s for every chat request. The wrapper now records consecutive preload failures per `(baseUrl, modelKey, contextLength)` tuple with a 5s → 10s → 20s → … → 5min cooldown and skips the preload step entirely while a cooldown is active, letting chat requests proceed directly to the stream (the model is often already loaded via the LM Studio UI). The combined `preload failed` log line now reports consecutive-failure count and remaining cooldown so operators can act on the real issue instead of drowning in repeated warnings. (#67401) Thanks @xantorres.
- Agents/replay: re-run tool/result pairing after strict replay tool-call ID sanitization on outbound requests so Anthropic-compatible providers like MiniMax no longer receive malformed orphan tool-result IDs such as `...toolresult1` during compaction and retry flows. (#67620) Thanks @stainlu.
- Gateway/startup: fix spurious SIGUSR1 restart loop on Linux/systemd when plugin auto-enable is the only startup config write; the config hash guard was not captured for that write path, causing chokidar to treat each boot write as an external change and trigger a reload → restart cycle that corrupts manifest.db after repeated cycles. Fixes #67436. (#67557) thanks @openperf
- OpenAI Codex/CLI: keep resumed `codex exec resume` runs on the safe non-interactive path without reintroducing the removed dangerous bypass flag by passing the supported `--skip-git-repo-check` resume arg that real Codex CLI requires outside trusted git directories. (#67666) Thanks @plgonzalezrx8.
## 2026.4.15-beta.1

View File

@@ -221,7 +221,7 @@ The bundled OpenAI plugin also registers a default for `codex-cli`:
- `command: "codex"`
- `args: ["exec","--json","--color","never","--sandbox","workspace-write","--skip-git-repo-check"]`
- `resumeArgs: ["exec","resume","{sessionId}","--color","never","--sandbox","workspace-write","--skip-git-repo-check"]`
- `resumeArgs: ["exec","resume","{sessionId}","--skip-git-repo-check"]`
- `output: "jsonl"`
- `resumeOutput: "text"`
- `modelArg: "--model"`

View File

@@ -31,7 +31,7 @@ export function buildOpenAICodexCliBackend(): CliBackendPlugin {
"workspace-write",
"--skip-git-repo-check",
],
resumeArgs: ["exec", "resume", "{sessionId}"],
resumeArgs: ["exec", "resume", "{sessionId}", "--skip-git-repo-check"],
output: "jsonl",
resumeOutput: "text",
input: "arg",

View File

@@ -244,7 +244,7 @@ beforeEach(() => {
"workspace-write",
"--skip-git-repo-check",
],
resumeArgs: ["exec", "resume", "{sessionId}"],
resumeArgs: ["exec", "resume", "{sessionId}", "--skip-git-repo-check"],
systemPromptFileConfigArg: "-c",
systemPromptFileConfigKey: "model_instructions_file",
systemPromptWhen: "first",
@@ -309,7 +309,7 @@ beforeEach(() => {
});
describe("resolveCliBackendConfig reliability merge", () => {
it("defaults codex-cli to workspace-write for fresh and resume runs", () => {
it("defaults codex-cli fresh sandboxing and resume trust bypass for non-git runs", () => {
const resolved = resolveCliBackendConfig("codex-cli");
expect(resolved).not.toBeNull();
@@ -322,7 +322,12 @@ describe("resolveCliBackendConfig reliability merge", () => {
"workspace-write",
"--skip-git-repo-check",
]);
expect(resolved?.config.resumeArgs).toEqual(["exec", "resume", "{sessionId}"]);
expect(resolved?.config.resumeArgs).toEqual([
"exec",
"resume",
"{sessionId}",
"--skip-git-repo-check",
]);
});
it("deep-merges reliability watchdog overrides for codex", () => {

View File

@@ -53,7 +53,7 @@ function buildPreparedCliRunContext(params: {
: {
command: "codex",
args: ["exec", "--json"],
resumeArgs: ["exec", "resume", "{sessionId}", "--json"],
resumeArgs: ["exec", "resume", "{sessionId}", "--skip-git-repo-check"],
output: "text" as const,
input: "arg" as const,
modelArg: "--model",
@@ -435,7 +435,16 @@ describe("runCliAgent spawn path", () => {
scopeKey?: string;
};
expect(input.mode).toBe("child");
expect(input.argv?.[0]).toBe("codex");
expect(input.argv).toEqual([
"codex",
"exec",
"resume",
"thread-123",
"--skip-git-repo-check",
"--model",
"gpt-5.4",
"hi",
]);
expect(input.timeoutMs).toBe(1_000);
expect(input.noOutputTimeoutMs).toBeGreaterThanOrEqual(1_000);
expect(input.replaceExistingScope).toBe(true);

View File

@@ -120,16 +120,7 @@ function buildOpenAICodexCliBackendFixture(): CliBackendPlugin {
"workspace-write",
"--skip-git-repo-check",
],
resumeArgs: [
"exec",
"resume",
"{sessionId}",
"--color",
"never",
"--sandbox",
"workspace-write",
"--skip-git-repo-check",
],
resumeArgs: ["exec", "resume", "{sessionId}", "--skip-git-repo-check"],
output: "jsonl",
resumeOutput: "text",
input: "arg",