mirror of
https://github.com/openclaw/openclaw.git
synced 2026-05-14 23:50:43 +00:00
Stop preserving stale whole-agent Codex runtime pins
This removes stale whole-agent Codex runtime pins from the remaining doctor/Crestodian paths and teaches doctor cron repair to normalize old openai-codex payload model refs to canonical openai refs. Runtime policy stays provider/model-scoped and cron execution stays strict.
This commit is contained in:
@@ -5,10 +5,59 @@ import {
|
||||
|
||||
type UnknownRecord = Record<string, unknown>;
|
||||
|
||||
function toCanonicalOpenAIModelRef(value: unknown): string | undefined {
|
||||
const raw = readString(value);
|
||||
if (typeof raw !== "string") {
|
||||
return undefined;
|
||||
}
|
||||
const trimmed = raw.trim();
|
||||
const slash = trimmed.indexOf("/");
|
||||
if (slash <= 0) {
|
||||
return undefined;
|
||||
}
|
||||
const provider = trimmed.slice(0, slash).trim().toLowerCase();
|
||||
if (provider !== "openai-codex") {
|
||||
return undefined;
|
||||
}
|
||||
const model = trimmed.slice(slash + 1).trim();
|
||||
return model ? `openai/${model}` : undefined;
|
||||
}
|
||||
|
||||
function normalizeChannel(value: string): string {
|
||||
return normalizeOptionalLowercaseString(value) ?? "";
|
||||
}
|
||||
|
||||
export function hasLegacyOpenAICodexCronModelRef(payload: UnknownRecord): boolean {
|
||||
if (toCanonicalOpenAIModelRef(payload.model)) {
|
||||
return true;
|
||||
}
|
||||
const fallbacks = payload.fallbacks;
|
||||
return (
|
||||
Array.isArray(fallbacks) && fallbacks.some((fallback) => toCanonicalOpenAIModelRef(fallback))
|
||||
);
|
||||
}
|
||||
|
||||
function migrateLegacyOpenAICodexModelRefs(payload: UnknownRecord): boolean {
|
||||
let mutated = false;
|
||||
|
||||
const model = toCanonicalOpenAIModelRef(payload.model);
|
||||
if (model && payload.model !== model) {
|
||||
payload.model = model;
|
||||
mutated = true;
|
||||
}
|
||||
|
||||
const fallbacks = payload.fallbacks;
|
||||
if (Array.isArray(fallbacks)) {
|
||||
const next = fallbacks.map((fallback) => toCanonicalOpenAIModelRef(fallback) ?? fallback);
|
||||
if (next.some((fallback, index) => fallback !== fallbacks[index])) {
|
||||
payload.fallbacks = next;
|
||||
mutated = true;
|
||||
}
|
||||
}
|
||||
|
||||
return mutated;
|
||||
}
|
||||
|
||||
export function migrateLegacyCronPayload(payload: UnknownRecord): boolean {
|
||||
let mutated = false;
|
||||
|
||||
@@ -34,5 +83,9 @@ export function migrateLegacyCronPayload(payload: UnknownRecord): boolean {
|
||||
mutated = true;
|
||||
}
|
||||
|
||||
if (migrateLegacyOpenAICodexModelRefs(payload)) {
|
||||
mutated = true;
|
||||
}
|
||||
|
||||
return mutated;
|
||||
}
|
||||
|
||||
@@ -106,6 +106,30 @@ describe("normalizeStoredCronJobs", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("rewrites legacy OpenAI Codex model refs in cron payloads", () => {
|
||||
const { job, result } = normalizeOneJob(
|
||||
makeLegacyJob({
|
||||
id: "legacy-codex-cron-model",
|
||||
schedule: { kind: "every", everyMs: 60_000 },
|
||||
payload: {
|
||||
kind: "agentTurn",
|
||||
message: "ping",
|
||||
model: " openai-codex/gpt-5.5 ",
|
||||
fallbacks: ["anthropic/claude-opus-4.6", "openai-codex/gpt-5.4-mini"],
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
expect(result.mutated).toBe(true);
|
||||
expect(result.issues.legacyPayloadCodexModel).toBe(1);
|
||||
expect(job.payload).toMatchObject({
|
||||
kind: "agentTurn",
|
||||
message: "ping",
|
||||
model: "openai/gpt-5.5",
|
||||
fallbacks: ["anthropic/claude-opus-4.6", "openai/gpt-5.4-mini"],
|
||||
});
|
||||
});
|
||||
|
||||
it("does not report legacyPayloadKind for already-normalized payload kinds", () => {
|
||||
const jobs = [
|
||||
{
|
||||
|
||||
@@ -10,7 +10,10 @@ import {
|
||||
normalizeOptionalStringifiedId,
|
||||
} from "../shared/string-coerce.js";
|
||||
import { normalizeLegacyDeliveryInput } from "./doctor-cron-legacy-delivery.js";
|
||||
import { migrateLegacyCronPayload } from "./doctor-cron-payload-migration.js";
|
||||
import {
|
||||
hasLegacyOpenAICodexCronModelRef,
|
||||
migrateLegacyCronPayload,
|
||||
} from "./doctor-cron-payload-migration.js";
|
||||
|
||||
type CronStoreIssueKey =
|
||||
| "jobId"
|
||||
@@ -19,6 +22,7 @@ type CronStoreIssueKey =
|
||||
| "legacyScheduleString"
|
||||
| "legacyScheduleCron"
|
||||
| "legacyPayloadKind"
|
||||
| "legacyPayloadCodexModel"
|
||||
| "legacyPayloadProvider"
|
||||
| "legacyTopLevelPayloadFields"
|
||||
| "legacyTopLevelDeliveryFields"
|
||||
@@ -380,8 +384,12 @@ export function normalizeStoredCronJobs(
|
||||
|
||||
if (payloadRecord) {
|
||||
const hadLegacyPayloadProvider = Boolean(normalizeOptionalString(payloadRecord.provider));
|
||||
const hadLegacyPayloadCodexModel = hasLegacyOpenAICodexCronModelRef(payloadRecord);
|
||||
if (migrateLegacyCronPayload(payloadRecord)) {
|
||||
mutated = true;
|
||||
if (hadLegacyPayloadCodexModel) {
|
||||
trackIssue("legacyPayloadCodexModel");
|
||||
}
|
||||
if (hadLegacyPayloadProvider) {
|
||||
trackIssue("legacyPayloadProvider");
|
||||
}
|
||||
|
||||
@@ -54,6 +54,11 @@ function formatLegacyIssuePreview(issues: Partial<Record<string, number>>): stri
|
||||
if (issues.legacyPayloadKind) {
|
||||
lines.push(`- ${pluralize(issues.legacyPayloadKind, "job")} needs payload kind normalization`);
|
||||
}
|
||||
if (issues.legacyPayloadCodexModel) {
|
||||
lines.push(
|
||||
`- ${pluralize(issues.legacyPayloadCodexModel, "job")} still uses legacy \`openai-codex/*\` cron model refs`,
|
||||
);
|
||||
}
|
||||
if (issues.legacyPayloadProvider) {
|
||||
lines.push(
|
||||
`- ${pluralize(issues.legacyPayloadProvider, "job")} still uses payload \`provider\` as a delivery alias`,
|
||||
|
||||
@@ -124,6 +124,7 @@ describe("collectCodexRouteWarnings", () => {
|
||||
cfg: {
|
||||
agents: {
|
||||
defaults: {
|
||||
agentRuntime: { id: "codex" },
|
||||
model: {
|
||||
primary: "openai-codex/gpt-5.5",
|
||||
fallbacks: ["openai-codex/gpt-5.4", "anthropic/claude-sonnet-4-6"],
|
||||
@@ -191,7 +192,11 @@ describe("collectCodexRouteWarnings", () => {
|
||||
});
|
||||
|
||||
expect(result.warnings).toStrictEqual([]);
|
||||
expect(result.changes).toEqual([expect.stringContaining("Repaired Codex model routes")]);
|
||||
expect(result.changes).toEqual([
|
||||
expect.stringContaining("Repaired Codex model routes"),
|
||||
"Removed agents.defaults.agentRuntime; runtime is now provider/model scoped.",
|
||||
"Removed agents.list.worker.agentRuntime; runtime is now provider/model scoped.",
|
||||
]);
|
||||
expect(result.cfg.agents?.defaults?.model).toEqual({
|
||||
primary: "openai/gpt-5.5",
|
||||
fallbacks: ["openai/gpt-5.4", "anthropic/claude-sonnet-4-6"],
|
||||
@@ -210,8 +215,8 @@ describe("collectCodexRouteWarnings", () => {
|
||||
expect(result.cfg.agents?.list?.[0]).toMatchObject({
|
||||
id: "worker",
|
||||
model: "openai/gpt-5.4",
|
||||
agentRuntime: { id: "codex" },
|
||||
});
|
||||
expect(result.cfg.agents?.list?.[0]?.agentRuntime).toBeUndefined();
|
||||
expect(result.cfg.channels?.modelByChannel?.telegram?.default).toBe("openai/gpt-5.4");
|
||||
expect(result.cfg.hooks?.mappings?.[0]?.model).toBe("openai/gpt-5.4-mini");
|
||||
expect(result.cfg.hooks?.gmail?.model).toBe("openai/gpt-5.4");
|
||||
|
||||
@@ -18,6 +18,11 @@ type SessionRouteRepairResult = {
|
||||
changed: boolean;
|
||||
sessionKeys: string[];
|
||||
};
|
||||
type ConfigRouteRepairResult = {
|
||||
cfg: OpenClawConfig;
|
||||
changes: CodexRouteHit[];
|
||||
runtimePinChanges: string[];
|
||||
};
|
||||
type CodexSessionRouteRepairSummary = {
|
||||
scannedStores: number;
|
||||
repairedStores: number;
|
||||
@@ -442,10 +447,38 @@ function rewriteAgentModelRefs(params: {
|
||||
}
|
||||
}
|
||||
|
||||
function rewriteConfigModelRefs(params: { cfg: OpenClawConfig; env?: NodeJS.ProcessEnv }): {
|
||||
function clearLegacyAgentRuntimePolicy(
|
||||
container: MutableRecord | undefined,
|
||||
pathLabel: string,
|
||||
changes: string[],
|
||||
): void {
|
||||
if (!container) {
|
||||
return;
|
||||
}
|
||||
if (asMutableRecord(container.embeddedHarness)) {
|
||||
delete container.embeddedHarness;
|
||||
changes.push(`Removed ${pathLabel}.embeddedHarness; runtime is now provider/model scoped.`);
|
||||
}
|
||||
if (asMutableRecord(container.agentRuntime)) {
|
||||
delete container.agentRuntime;
|
||||
changes.push(`Removed ${pathLabel}.agentRuntime; runtime is now provider/model scoped.`);
|
||||
}
|
||||
}
|
||||
|
||||
function clearConfigLegacyAgentRuntimePolicies(cfg: OpenClawConfig): string[] {
|
||||
const changes: string[] = [];
|
||||
clearLegacyAgentRuntimePolicy(asMutableRecord(cfg.agents?.defaults), "agents.defaults", changes);
|
||||
for (const [index, agent] of (cfg.agents?.list ?? []).entries()) {
|
||||
const id = typeof agent.id === "string" && agent.id.trim() ? agent.id.trim() : String(index);
|
||||
clearLegacyAgentRuntimePolicy(agent as MutableRecord, `agents.list.${id}`, changes);
|
||||
}
|
||||
return changes;
|
||||
}
|
||||
|
||||
function rewriteConfigModelRefs(params: {
|
||||
cfg: OpenClawConfig;
|
||||
changes: CodexRouteHit[];
|
||||
} {
|
||||
env?: NodeJS.ProcessEnv;
|
||||
}): ConfigRouteRepairResult {
|
||||
const nextConfig = structuredClone(params.cfg);
|
||||
const hits: CodexRouteHit[] = [];
|
||||
const defaultsRuntime = nextConfig.agents?.defaults?.agentRuntime;
|
||||
@@ -518,9 +551,12 @@ function rewriteConfigModelRefs(params: { cfg: OpenClawConfig; env?: NodeJS.Proc
|
||||
key: "model",
|
||||
path: "channels.discord.voice.model",
|
||||
});
|
||||
const runtimePinChanges =
|
||||
hits.length > 0 ? clearConfigLegacyAgentRuntimePolicies(nextConfig) : [];
|
||||
return {
|
||||
cfg: hits.length > 0 ? nextConfig : params.cfg,
|
||||
cfg: hits.length > 0 || runtimePinChanges.length > 0 ? nextConfig : params.cfg,
|
||||
changes: hits,
|
||||
runtimePinChanges,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -545,7 +581,7 @@ export function collectCodexRouteWarnings(params: {
|
||||
hit.runtime ? `; current runtime is "${hit.runtime}"` : ""
|
||||
}.`,
|
||||
),
|
||||
"- Run `openclaw doctor --fix`: it rewrites configured model refs and stale sessions to `openai/*` without changing explicit runtime policy.",
|
||||
"- Run `openclaw doctor --fix`: it rewrites configured model refs and stale sessions to `openai/*`, clears old whole-agent runtime pins, and keeps provider/model runtime policy.",
|
||||
].join("\n"),
|
||||
];
|
||||
}
|
||||
@@ -578,6 +614,7 @@ export function maybeRepairCodexRoutes(params: {
|
||||
`Repaired Codex model routes:\n${repaired.changes
|
||||
.map((hit) => `- ${formatCodexRouteChange(hit)}`)
|
||||
.join("\n")}`,
|
||||
...repaired.runtimePinChanges,
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
@@ -71,7 +71,6 @@ function buildCodexAppServerPlannerConfig(workspaceDir: string): OpenClawConfig
|
||||
agents: {
|
||||
defaults: {
|
||||
workspace: workspaceDir,
|
||||
agentRuntime: { id: "codex" },
|
||||
model: { primary: `openai/${CRESTODIAN_CODEX_MODEL}` },
|
||||
},
|
||||
},
|
||||
|
||||
@@ -159,7 +159,6 @@ describe("Crestodian assistant", () => {
|
||||
agents: {
|
||||
defaults: {
|
||||
workspace: "/tmp/workspace",
|
||||
agentRuntime: { id: "codex" },
|
||||
model: { primary: "openai/gpt-5.5" },
|
||||
},
|
||||
},
|
||||
@@ -220,7 +219,6 @@ describe("Crestodian assistant", () => {
|
||||
expect(firstEmbeddedCall.config).toMatchObject({
|
||||
agents: {
|
||||
defaults: {
|
||||
agentRuntime: { id: "codex" },
|
||||
model: { primary: "openai/gpt-5.5" },
|
||||
},
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user