diff --git a/CHANGELOG.md b/CHANGELOG.md index 813cb1a4706..402da720a79 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -27,7 +27,7 @@ Docs: https://docs.openclaw.ai - Providers/Codex: pass agent and workspace directories into provider stream wrappers so Codex native `web_search` activation can evaluate the correct auth context, and smoke-test the built status-message runtime by resolving the emitted bundle name. Carries forward #67843; refs #65909. Thanks @neilofneils404. - Cron/models: keep `payload.model` as a per-job primary that can use configured fallbacks, while still letting `payload.fallbacks: []` make cron runs strict and avoid hidden agent-primary retries. Refs #73023. Thanks @pavelyortho-cyber. - Models/fallbacks: treat user-selected session models as exact choices, so `/model ollama/...` and model-picker switches fail visibly when the selected provider is unreachable instead of answering from an unrelated configured fallback. Fixes #73023. Thanks @pavelyortho-cyber. -- Codex harness: expose `appServer.clearEnv` in the plugin config schema so deployments can keep Gateway-level `OPENAI_API_KEY` for embeddings and direct OpenAI models while removing it from the spawned native Codex app-server process. Fixes #73057. Thanks @holgergruenhagen. +- Codex harness: automatically clear inherited `OPENAI_API_KEY` from spawned Codex app-server processes when the harness is using ChatGPT subscription auth, while keeping explicit Codex API-key profiles and the manual `appServer.clearEnv` escape hatch available. Fixes #73057. Thanks @holgergruenhagen. - CLI/model probes: fail local `infer model run` probes when the provider returns no text output, so unreachable local providers and empty completions no longer look like successful smoke tests. Refs #73023. Thanks @pavelyortho-cyber. - CLI/Ollama: run local `infer model run` through the lean provider completion path and skip global model discovery for one-shot local probes, so Ollama smoke tests no longer pay full chat-agent/tool startup cost or hang before the native `/api/chat` request. Fixes #72851. Thanks @TotalRes2020. - Doctor/gateway services: ignore launchd/systemd companion services that only reference the gateway as a dependency, suppress inactive Linux extra-service warnings, and avoid rewriting a running systemd gateway command/entrypoint during doctor repair. Carries forward #39118. Thanks @therk. diff --git a/docs/plugins/codex-harness.md b/docs/plugins/codex-harness.md index ecfe5bdb3a0..37164afe451 100644 --- a/docs/plugins/codex-harness.md +++ b/docs/plugins/codex-harness.md @@ -509,9 +509,15 @@ For an already-running app-server, use WebSocket transport: ``` Stdio app-server launches inherit OpenClaw's process environment by default. -When the Gateway needs `OPENAI_API_KEY` for embeddings or direct OpenAI models -but Codex should use the local ChatGPT login, clear that variable only for the -Codex child: +When OpenClaw sees that the Codex harness is using a ChatGPT subscription-style +auth profile, including the local Codex CLI login imported as +`openai-codex:default`, it automatically removes `OPENAI_API_KEY` from the +spawned Codex child process. That keeps Gateway-level API keys available for +embeddings or direct OpenAI models without making native Codex app-server turns +bill through the API by accident. + +Explicit Codex API-key profiles are left alone. If a deployment needs additional +environment isolation, add those variables to `appServer.clearEnv`: ```json5 { @@ -534,21 +540,21 @@ Codex child: Supported `appServer` fields: -| Field | Default | Meaning | -| ------------------- | ---------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------- | -| `transport` | `"stdio"` | `"stdio"` spawns Codex; `"websocket"` connects to `url`. | -| `command` | managed Codex binary | Executable for stdio transport. Leave unset to use the managed binary; set it only for an explicit override. | -| `args` | `["app-server", "--listen", "stdio://"]` | Arguments for stdio transport. | -| `url` | unset | WebSocket app-server URL. | -| `authToken` | unset | Bearer token for WebSocket transport. | -| `headers` | `{}` | Extra WebSocket headers. | -| `clearEnv` | `[]` | Environment variable names removed from the spawned stdio app-server process after OpenClaw builds its inherited environment. | -| `requestTimeoutMs` | `60000` | Timeout for app-server control-plane calls. | -| `mode` | `"yolo"` | Preset for YOLO or guardian-reviewed execution. | -| `approvalPolicy` | `"never"` | Native Codex approval policy sent to thread start/resume/turn. | -| `sandbox` | `"danger-full-access"` | Native Codex sandbox mode sent to thread start/resume. | -| `approvalsReviewer` | `"user"` | Use `"auto_review"` to let Codex review native approval prompts. `guardian_subagent` remains a legacy alias. | -| `serviceTier` | unset | Optional Codex app-server service tier: `"fast"`, `"flex"`, or `null`. Invalid legacy values are ignored. | +| Field | Default | Meaning | +| ------------------- | ---------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- | +| `transport` | `"stdio"` | `"stdio"` spawns Codex; `"websocket"` connects to `url`. | +| `command` | managed Codex binary | Executable for stdio transport. Leave unset to use the managed binary; set it only for an explicit override. | +| `args` | `["app-server", "--listen", "stdio://"]` | Arguments for stdio transport. | +| `url` | unset | WebSocket app-server URL. | +| `authToken` | unset | Bearer token for WebSocket transport. | +| `headers` | `{}` | Extra WebSocket headers. | +| `clearEnv` | `[]` | Extra environment variable names removed from the spawned stdio app-server process after OpenClaw builds its inherited environment. | +| `requestTimeoutMs` | `60000` | Timeout for app-server control-plane calls. | +| `mode` | `"yolo"` | Preset for YOLO or guardian-reviewed execution. | +| `approvalPolicy` | `"never"` | Native Codex approval policy sent to thread start/resume/turn. | +| `sandbox` | `"danger-full-access"` | Native Codex sandbox mode sent to thread start/resume. | +| `approvalsReviewer` | `"user"` | Use `"auto_review"` to let Codex review native approval prompts. `guardian_subagent` remains a legacy alias. | +| `serviceTier` | unset | Optional Codex app-server service tier: `"fast"`, `"flex"`, or `null`. Invalid legacy values are ignored. | Environment overrides remain available for local testing: diff --git a/extensions/codex/src/app-server/auth-bridge.test.ts b/extensions/codex/src/app-server/auth-bridge.test.ts index 33906ef8d1e..4b7d9c48d2a 100644 --- a/extensions/codex/src/app-server/auth-bridge.test.ts +++ b/extensions/codex/src/app-server/auth-bridge.test.ts @@ -8,6 +8,7 @@ import { bridgeCodexAppServerStartOptions, refreshCodexAppServerAuthTokens, } from "./auth-bridge.js"; +import type { CodexAppServerStartOptions } from "./config.js"; const oauthMocks = vi.hoisted(() => ({ refreshOpenAICodexToken: vi.fn(), @@ -96,25 +97,54 @@ afterEach(() => { providerRuntimeMocks.refreshProviderOAuthCredentialWithPlugin.mockClear(); }); +function createStartOptions( + overrides: Partial = {}, +): CodexAppServerStartOptions { + return { + transport: "stdio", + command: "codex", + args: ["app-server"], + headers: { authorization: "Bearer dev-token" }, + ...overrides, + }; +} + +async function writeCodexCliAuthFile(codexHome: string): Promise { + await fs.mkdir(codexHome, { recursive: true }); + await fs.writeFile( + path.join(codexHome, "auth.json"), + JSON.stringify({ + tokens: { + access_token: "cli-access-token", + refresh_token: "cli-refresh-token", + account_id: "cli-account-123", + }, + }), + ); +} + describe("bridgeCodexAppServerStartOptions", () => { - it("leaves Codex app-server start options unchanged", async () => { + it("clears an inherited OpenAI API key when local Codex CLI OAuth is available", async () => { const agentDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-codex-app-server-")); - const startOptions = { - transport: "stdio" as const, - command: "codex", - args: ["app-server"], - headers: { authorization: "Bearer dev-token" }, + const codexHome = path.join(agentDir, "codex-home"); + const startOptions = createStartOptions({ env: { CODEX_HOME: "/tmp/source-codex-home", EXISTING: "1" }, clearEnv: ["FOO"], - }; + }); + vi.stubEnv("CODEX_HOME", codexHome); try { + await writeCodexCliAuthFile(codexHome); + await expect( bridgeCodexAppServerStartOptions({ startOptions, agentDir, - authProfileId: "openai-codex:default", }), - ).resolves.toBe(startOptions); + ).resolves.toEqual({ + ...startOptions, + clearEnv: ["FOO", "OPENAI_API_KEY"], + }); + expect(startOptions.clearEnv).toEqual(["FOO"]); await expect(fs.access(path.join(agentDir, "harness-auth"))).rejects.toMatchObject({ code: "ENOENT", }); @@ -123,6 +153,126 @@ describe("bridgeCodexAppServerStartOptions", () => { } }); + it("clears an inherited OpenAI API key for an explicit Codex OAuth profile", async () => { + const agentDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-codex-app-server-")); + const startOptions = createStartOptions({ clearEnv: ["FOO"] }); + try { + upsertAuthProfile({ + agentDir, + profileId: "openai-codex:work", + credential: { + type: "oauth", + provider: "openai-codex", + access: "access-token", + refresh: "refresh-token", + expires: Date.now() + 24 * 60 * 60_000, + accountId: "account-123", + }, + }); + + await expect( + bridgeCodexAppServerStartOptions({ + startOptions, + agentDir, + authProfileId: "openai-codex:work", + }), + ).resolves.toEqual({ + ...startOptions, + clearEnv: ["FOO", "OPENAI_API_KEY"], + }); + } finally { + await fs.rm(agentDir, { recursive: true, force: true }); + } + }); + + it("clears an inherited OpenAI API key for an explicit Codex token profile", async () => { + const agentDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-codex-app-server-")); + const startOptions = createStartOptions({ clearEnv: ["FOO"] }); + try { + upsertAuthProfile({ + agentDir, + profileId: "openai-codex:work", + credential: { + type: "token", + provider: "openai-codex", + token: "access-token", + }, + }); + + await expect( + bridgeCodexAppServerStartOptions({ + startOptions, + agentDir, + authProfileId: "openai-codex:work", + }), + ).resolves.toEqual({ + ...startOptions, + clearEnv: ["FOO", "OPENAI_API_KEY"], + }); + } finally { + await fs.rm(agentDir, { recursive: true, force: true }); + } + }); + + it("keeps an inherited OpenAI API key for an explicit Codex api-key profile", async () => { + const agentDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-codex-app-server-")); + const startOptions = createStartOptions({ clearEnv: ["FOO"] }); + try { + upsertAuthProfile({ + agentDir, + profileId: "openai-codex:work", + credential: { + type: "api_key", + provider: "openai-codex", + key: "explicit-api-key", + }, + }); + + await expect( + bridgeCodexAppServerStartOptions({ + startOptions, + agentDir, + authProfileId: "openai-codex:work", + }), + ).resolves.toBe(startOptions); + } finally { + await fs.rm(agentDir, { recursive: true, force: true }); + } + }); + + it("does not clear process environment for websocket app-server connections", async () => { + const agentDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-codex-app-server-")); + const startOptions = createStartOptions({ + transport: "websocket", + url: "ws://127.0.0.1:1455", + clearEnv: ["FOO"], + }); + try { + upsertAuthProfile({ + agentDir, + profileId: "openai-codex:work", + credential: { + type: "oauth", + provider: "openai-codex", + access: "access-token", + refresh: "refresh-token", + expires: Date.now() + 24 * 60 * 60_000, + accountId: "account-123", + }, + }); + + await expect( + bridgeCodexAppServerStartOptions({ + startOptions, + agentDir, + authProfileId: "openai-codex:work", + }), + ).resolves.toBe(startOptions); + } finally { + await fs.rm(agentDir, { recursive: true, force: true }); + } + }); + it("applies an OpenAI Codex OAuth profile through app-server login", async () => { const agentDir = await fs.mkdtemp(path.join(os.tmpdir(), "openclaw-codex-app-server-")); const request = vi.fn(async () => ({ type: "chatgptAuthTokens" })); diff --git a/extensions/codex/src/app-server/auth-bridge.ts b/extensions/codex/src/app-server/auth-bridge.ts index 241fb3b2684..135fc7fffa3 100644 --- a/extensions/codex/src/app-server/auth-bridge.ts +++ b/extensions/codex/src/app-server/auth-bridge.ts @@ -13,15 +13,25 @@ import type { ChatgptAuthTokensRefreshResponse } from "./protocol-generated/type import type { LoginAccountParams } from "./protocol-generated/typescript/v2/LoginAccountParams.js"; const CODEX_APP_SERVER_AUTH_PROVIDER = "openai-codex"; +const OPENAI_CODEX_DEFAULT_PROFILE_ID = "openai-codex:default"; +const OPENAI_API_KEY_ENV_VAR = "OPENAI_API_KEY"; export async function bridgeCodexAppServerStartOptions(params: { startOptions: CodexAppServerStartOptions; agentDir: string; authProfileId?: string; }): Promise { - void params.agentDir; - void params.authProfileId; - return params.startOptions; + if (params.startOptions.transport !== "stdio") { + return params.startOptions; + } + const store = ensureAuthProfileStore(params.agentDir, { allowKeychainPrompt: false }); + const shouldClearInheritedOpenAiApiKey = shouldClearOpenAiApiKeyForCodexAuthProfile({ + store, + authProfileId: params.authProfileId, + }); + return shouldClearInheritedOpenAiApiKey + ? withClearedEnvironmentVariable(params.startOptions, OPENAI_API_KEY_ENV_VAR) + : params.startOptions; } export async function applyCodexAppServerAuthProfile(params: { @@ -161,6 +171,38 @@ function isCodexAppServerAuthProvider(provider: string): boolean { return resolveProviderIdForAuth(provider) === CODEX_APP_SERVER_AUTH_PROVIDER; } +function shouldClearOpenAiApiKeyForCodexAuthProfile(params: { + store: ReturnType; + authProfileId?: string; +}): boolean { + const profileId = params.authProfileId?.trim(); + const credential = profileId + ? params.store.profiles[profileId] + : params.store.profiles[OPENAI_CODEX_DEFAULT_PROFILE_ID]; + return isCodexSubscriptionCredential(credential); +} + +function isCodexSubscriptionCredential(credential: AuthProfileCredential | undefined): boolean { + if (!credential || !isCodexAppServerAuthProvider(credential.provider)) { + return false; + } + return credential.type === "oauth" || credential.type === "token"; +} + +function withClearedEnvironmentVariable( + startOptions: CodexAppServerStartOptions, + envVar: string, +): CodexAppServerStartOptions { + const clearEnv = startOptions.clearEnv ?? []; + if (clearEnv.includes(envVar)) { + return startOptions; + } + return { + ...startOptions, + clearEnv: [...clearEnv, envVar], + }; +} + function buildChatgptAuthTokensParams( profileId: string, credential: AuthProfileCredential,