diff --git a/extensions/openai/cli-backend.ts b/extensions/openai/cli-backend.ts index 65eb145b608..9876d89852e 100644 --- a/extensions/openai/cli-backend.ts +++ b/extensions/openai/cli-backend.ts @@ -14,7 +14,7 @@ export function buildOpenAICodexCliBackend(): CliBackendPlugin { defaultImageProbe: true, defaultMcpProbe: true, docker: { - npmPackage: "@openai/codex@0.124.0", + npmPackage: "@openai/codex@0.128.0", binaryName: "codex", }, }, diff --git a/scripts/test-live-cli-backend-docker.sh b/scripts/test-live-cli-backend-docker.sh index ba84c5cc013..7fc1bbbea04 100644 --- a/scripts/test-live-cli-backend-docker.sh +++ b/scripts/test-live-cli-backend-docker.sh @@ -399,6 +399,39 @@ cd "$tmp_dir" if [ "${OPENCLAW_LIVE_CLI_BACKEND_USE_CI_SAFE_CODEX_CONFIG:-0}" = "1" ]; then node --import tsx "$trusted_scripts_dir/prepare-codex-ci-config.ts" "$HOME/.codex/config.toml" "$tmp_dir" fi +if [ "$provider" = "codex-cli" ] && [ "${OPENCLAW_LIVE_CLI_BACKEND_AUTH:-auto}" = "api-key" ]; then + codex_probe_model="${OPENCLAW_LIVE_CLI_BACKEND_MODEL#*/}" + codex_probe_token="OPENCLAW-CODEX-DIRECT-PROBE" + codex_probe_stdout="$tmp_dir/codex-direct-probe.stdout" + codex_probe_stderr="$tmp_dir/codex-direct-probe.stderr" + if ! timeout --foreground --kill-after=10s 180s \ + "${OPENCLAW_LIVE_CLI_BACKEND_COMMAND:-codex}" \ + exec \ + --json \ + --color \ + never \ + --sandbox \ + danger-full-access \ + -c \ + 'service_tier="fast"' \ + --skip-git-repo-check \ + --model \ + "$codex_probe_model" \ + "Reply exactly: $codex_probe_token" \ + >"$codex_probe_stdout" 2>"$codex_probe_stderr" &2 + sed -n '1,120p' "$codex_probe_stdout" >&2 || true + sed -n '1,120p' "$codex_probe_stderr" >&2 || true + exit 1 + fi + if ! grep -q "$codex_probe_token" "$codex_probe_stdout"; then + echo "ERROR: direct Codex CLI probe did not return expected token." >&2 + sed -n '1,120p' "$codex_probe_stdout" >&2 || true + sed -n '1,120p' "$codex_probe_stderr" >&2 || true + exit 1 + fi + echo "==> Direct Codex CLI probe ok" +fi pnpm test:live src/gateway/gateway-cli-backend.live.test.ts EOF diff --git a/src/agents/cli-backends.test.ts b/src/agents/cli-backends.test.ts index b488f3f6c66..ae385fd130f 100644 --- a/src/agents/cli-backends.test.ts +++ b/src/agents/cli-backends.test.ts @@ -64,7 +64,7 @@ function createBackendEntry(params: { params.id === "claude-cli" ? "@anthropic-ai/claude-code" : params.id === "codex-cli" - ? "@openai/codex@0.125.0" + ? "@openai/codex@0.128.0" : params.id === "google-gemini-cli" ? "@google/gemini-cli" : undefined, @@ -490,7 +490,7 @@ describe("resolveCliBackendLiveTest", () => { defaultModelRef: "codex-cli/gpt-5.5", defaultImageProbe: true, defaultMcpProbe: true, - dockerNpmPackage: "@openai/codex@0.125.0", + dockerNpmPackage: "@openai/codex@0.128.0", dockerBinaryName: "codex", }); }); diff --git a/test/scripts/docker-build-helper.test.ts b/test/scripts/docker-build-helper.test.ts index 80baa639224..3238dedb895 100644 --- a/test/scripts/docker-build-helper.test.ts +++ b/test/scripts/docker-build-helper.test.ts @@ -90,6 +90,8 @@ describe("docker build helper", () => { expect(liveCliBackend).toContain( 'OPENCLAW_LIVE_DOCKER_REPO_ROOT="$ROOT_DIR" "$TRUSTED_HARNESS_DIR/scripts/test-live-build-docker.sh"', ); + expect(liveCliBackend).toContain("direct Codex CLI probe failed before OpenClaw gateway smoke"); + expect(liveCliBackend).toContain("==> Direct Codex CLI probe ok"); expect(liveCliBackend).not.toContain( 'echo "==> Reuse live-test image: $LIVE_IMAGE_NAME (OPENCLAW_SKIP_DOCKER_BUILD=1)"', );