diff --git a/extensions/openai/default-models.ts b/extensions/openai/default-models.ts index 7034c2053c0..e4157a3caf3 100644 --- a/extensions/openai/default-models.ts +++ b/extensions/openai/default-models.ts @@ -5,7 +5,7 @@ import { } from "openclaw/plugin-sdk/provider-onboard"; export const OPENAI_DEFAULT_MODEL = "openai/gpt-5.5"; -export const OPENAI_CODEX_DEFAULT_MODEL = "openai-codex/gpt-5.5"; +export const OPENAI_CODEX_DEFAULT_MODEL = "openai/gpt-5.5"; export const OPENAI_DEFAULT_IMAGE_MODEL = "gpt-image-2"; export const OPENAI_DEFAULT_TTS_MODEL = "gpt-4o-mini-tts"; export const OPENAI_DEFAULT_TTS_VOICE = "alloy"; diff --git a/extensions/openai/openai-provider.ts b/extensions/openai/openai-provider.ts index b52a87525aa..67adb2f7c91 100644 --- a/extensions/openai/openai-provider.ts +++ b/extensions/openai/openai-provider.ts @@ -275,7 +275,7 @@ export function buildOpenAIProvider(): ProviderPlugin { if (ctx.provider !== PROVIDER_ID || ctx.listProfileIds("openai-codex").length === 0) { return undefined; } - return 'No API key found for provider "openai". You are authenticated with OpenAI Codex OAuth. Use openai-codex/gpt-5.5 (OAuth) or set OPENAI_API_KEY to use openai/gpt-5.5.'; + return 'No API key found for provider "openai". You are authenticated with OpenAI Codex OAuth. Use openai/gpt-5.5 with the Codex OAuth profile, or set OPENAI_API_KEY for direct OpenAI API access.'; }, suppressBuiltInModel: (ctx) => { if ( diff --git a/src/auto-reply/reply/agent-runner-execution.ts b/src/auto-reply/reply/agent-runner-execution.ts index 5278ad1a7d4..c9c2c0ba230 100644 --- a/src/auto-reply/reply/agent-runner-execution.ts +++ b/src/auto-reply/reply/agent-runner-execution.ts @@ -356,7 +356,7 @@ function buildMissingApiKeyFailureText(message: string): string | null { return null; } if (provider === "openai" && normalizedMessage.includes("OpenAI Codex OAuth")) { - return "⚠️ Missing API key for OpenAI on the gateway. Use `openai-codex/gpt-5.5` for OAuth, or set `OPENAI_API_KEY`, then try again."; + return "⚠️ Missing API key for OpenAI on the gateway. Use `openai/gpt-5.5` with the Codex OAuth profile, or set `OPENAI_API_KEY`, then try again."; } if (SAFE_MISSING_API_KEY_PROVIDERS.has(provider)) { return `⚠️ Missing API key for provider "${provider}". Configure the gateway auth for that provider, then try again.`; diff --git a/src/auto-reply/reply/response-prefix-template.ts b/src/auto-reply/reply/response-prefix-template.ts index 9c8d745ec9f..be0d593ddbe 100644 --- a/src/auto-reply/reply/response-prefix-template.ts +++ b/src/auto-reply/reply/response-prefix-template.ts @@ -10,7 +10,7 @@ import { normalizeLowercaseStringOrEmpty } from "../../shared/string-coerce.js"; export type ResponsePrefixContext = { /** Short model name (e.g., "gpt-5.4", "claude-opus-4-6") */ model?: string; - /** Full model ID including provider (e.g., "openai-codex/gpt-5.4") */ + /** Full model ID including provider (e.g., "openai/gpt-5.5") */ modelFull?: string; /** Provider name (e.g., "openai-codex", "anthropic") */ provider?: string; @@ -77,7 +77,7 @@ export function resolveResponsePrefixTemplate( * - Common version suffixes (e.g., "-latest") * * @example - * extractShortModelName("openai-codex/gpt-5.4") // "gpt-5.4" + * extractShortModelName("openai/gpt-5.5") // "gpt-5.5" * extractShortModelName("claude-opus-4-6-20260205") // "claude-opus-4-6" * extractShortModelName("gpt-5.4-latest") // "gpt-5.4" */ diff --git a/src/gateway/gateway-codex-harness.live-helpers.ts b/src/gateway/gateway-codex-harness.live-helpers.ts index 7413937da0c..b4aed1dd656 100644 --- a/src/gateway/gateway-codex-harness.live-helpers.ts +++ b/src/gateway/gateway-codex-harness.live-helpers.ts @@ -7,7 +7,9 @@ export const EXPECTED_CODEX_MODELS_COMMAND_TEXT = [ "Available agent IDs in this session:", "opened an interactive trust prompt", "opened an interactive model-selection prompt", + "running as Codex on `openai/", "running as Codex on `codex/", + "currently running on `openai/", "currently running on `codex/", "stdin is not a terminal", "The local `codex models` entrypoint is interactive in this environment", @@ -30,21 +32,33 @@ export const EXPECTED_CODEX_MODELS_COMMAND_TEXT = [ "Available models in this Codex install", "Available agent models:", "Visible options in this session:", + "Current: `openai/", "Current: `codex/", "Current model:", + "Current model: `openai/", "Current model: `codex/", + "Current model is `openai/", "Current model is `codex/", + "Current session model: `openai/", "Current session model: `codex/", + "Current session model is `openai/", "Current session model is `codex/", + "The current session is using `openai/", "The current session is using `codex/", + "current session is using `openai/", "current session is using `codex/", "Configured model from `~/.codex/config.toml`:", "Configured models in this session:", "Default model:", + "This harness is configured with a single Codex model: `openai/", "This harness is configured with a single Codex model: `codex/", + "Primary model: `openai/", "Primary model: `codex/", + "Registered models: `openai/", "Registered models: `codex/", + "Active model: `openai/", "Active model: `codex/", + "Current active model is `openai/", "Current active model is `codex/", "Current OpenClaw session status reports the active model as:", ] as const; @@ -98,7 +112,7 @@ export function isExpectedCodexModelsCommandText(text: string): boolean { normalized.includes("live openclaw config shows") || normalized.includes("current gateway config"); const isSessionConfigFallback = - text.includes("`codex/") && + (text.includes("`openai/") || text.includes("`codex/")) && ((mentionsConfiguredModels && mentionsSessionModel) || (mentionsConfigSummary && (mentionsConfiguredModels || mentionsSessionModel))); @@ -112,6 +126,8 @@ export function isExpectedCodexModelsCommandText(text: string): boolean { normalized.includes("available here:") || normalized.includes("available agent ids in this session:"); const mentionsCurrentActiveModel = + normalized.includes("current active model is `openai/") || + normalized.includes("current active model is openai/") || normalized.includes("current active model is `codex/") || normalized.includes("current active model is codex/"); const mentionsCurrentSelectedModel = @@ -123,11 +139,12 @@ export function isExpectedCodexModelsCommandText(text: string): boolean { mentionsVisibleOptions && mentionsCurrentActiveModel; const isAgentIdModelSummary = - normalized.includes("available agent ids in this session:") && text.includes("`codex/"); + normalized.includes("available agent ids in this session:") && + (text.includes("`openai/") || text.includes("`codex/")); const isAvailableHereModelSummary = normalized.includes("available here:") && normalized.includes("current session model") && - text.includes("`codex/"); + (text.includes("`openai/") || text.includes("`codex/")); const isInteractiveTuiSummary = mentionsCodexModelsCommand && mentionsInteractiveSelection && diff --git a/src/plugins/provider-model-defaults.ts b/src/plugins/provider-model-defaults.ts index 8cecb0b4ea5..744b12f61c0 100644 --- a/src/plugins/provider-model-defaults.ts +++ b/src/plugins/provider-model-defaults.ts @@ -7,7 +7,7 @@ import { ensureModelAllowlistEntry } from "./provider-model-allowlist.js"; import { applyAgentDefaultPrimaryModel } from "./provider-model-primary.js"; export const OPENAI_DEFAULT_MODEL = "openai/gpt-5.5"; -export const OPENAI_CODEX_DEFAULT_MODEL = "openai-codex/gpt-5.5"; +export const OPENAI_CODEX_DEFAULT_MODEL = "openai/gpt-5.5"; export const OPENAI_DEFAULT_IMAGE_MODEL = "gpt-image-2"; export const OPENAI_DEFAULT_TTS_MODEL = "gpt-4o-mini-tts"; export const OPENAI_DEFAULT_TTS_VOICE = "alloy";