mirror of
https://github.com/openclaw/openclaw.git
synced 2026-04-01 12:21:25 +00:00
Models: preserve source markers and serialize models writes
This commit is contained in:
@@ -20,6 +20,7 @@ import {
|
||||
type ModelsConfig = NonNullable<OpenClawConfig["models"]>;
|
||||
|
||||
const DEFAULT_MODE: NonNullable<ModelsConfig["mode"]> = "merge";
|
||||
const MODELS_JSON_WRITE_LOCKS = new Map<string, Promise<void>>();
|
||||
|
||||
function resolvePreferredTokenLimit(explicitValue: number, implicitValue: number): number {
|
||||
// Keep catalog refresh behavior for stale low values while preserving
|
||||
@@ -233,52 +234,73 @@ function resolveModelsConfigInput(config?: OpenClawConfig): OpenClawConfig {
|
||||
return config;
|
||||
}
|
||||
|
||||
async function withModelsJsonWriteLock<T>(targetPath: string, run: () => Promise<T>): Promise<T> {
|
||||
const prior = MODELS_JSON_WRITE_LOCKS.get(targetPath) ?? Promise.resolve();
|
||||
let release: () => void = () => {};
|
||||
const gate = new Promise<void>((resolve) => {
|
||||
release = resolve;
|
||||
});
|
||||
const pending = prior.then(() => gate);
|
||||
MODELS_JSON_WRITE_LOCKS.set(targetPath, pending);
|
||||
await prior;
|
||||
try {
|
||||
return await run();
|
||||
} finally {
|
||||
release();
|
||||
if (MODELS_JSON_WRITE_LOCKS.get(targetPath) === pending) {
|
||||
MODELS_JSON_WRITE_LOCKS.delete(targetPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function ensureOpenClawModelsJson(
|
||||
config?: OpenClawConfig,
|
||||
agentDirOverride?: string,
|
||||
): Promise<{ agentDir: string; wrote: boolean }> {
|
||||
const cfg = resolveModelsConfigInput(config);
|
||||
const agentDir = agentDirOverride?.trim() ? agentDirOverride.trim() : resolveOpenClawAgentDir();
|
||||
|
||||
// Ensure config env vars (e.g. AWS_PROFILE, AWS_ACCESS_KEY_ID) are
|
||||
// available in process.env before implicit provider discovery. Some
|
||||
// callers (agent runner, tools) pass config objects that haven't gone
|
||||
// through the full loadConfig() pipeline which applies these.
|
||||
applyConfigEnvVars(cfg);
|
||||
|
||||
const providers = await resolveProvidersForModelsJson({ cfg, agentDir });
|
||||
|
||||
if (Object.keys(providers).length === 0) {
|
||||
return { agentDir, wrote: false };
|
||||
}
|
||||
|
||||
const mode = cfg.models?.mode ?? DEFAULT_MODE;
|
||||
const targetPath = path.join(agentDir, "models.json");
|
||||
const secretRefManagedProviders = new Set<string>();
|
||||
|
||||
const normalizedProviders =
|
||||
normalizeProviders({
|
||||
providers,
|
||||
agentDir,
|
||||
secretDefaults: cfg.secrets?.defaults,
|
||||
return await withModelsJsonWriteLock(targetPath, async () => {
|
||||
// Ensure config env vars (e.g. AWS_PROFILE, AWS_ACCESS_KEY_ID) are
|
||||
// available in process.env before implicit provider discovery. Some
|
||||
// callers (agent runner, tools) pass config objects that haven't gone
|
||||
// through the full loadConfig() pipeline which applies these.
|
||||
applyConfigEnvVars(cfg);
|
||||
|
||||
const providers = await resolveProvidersForModelsJson({ cfg, agentDir });
|
||||
|
||||
if (Object.keys(providers).length === 0) {
|
||||
return { agentDir, wrote: false };
|
||||
}
|
||||
|
||||
const mode = cfg.models?.mode ?? DEFAULT_MODE;
|
||||
const secretRefManagedProviders = new Set<string>();
|
||||
|
||||
const normalizedProviders =
|
||||
normalizeProviders({
|
||||
providers,
|
||||
agentDir,
|
||||
secretDefaults: cfg.secrets?.defaults,
|
||||
secretRefManagedProviders,
|
||||
}) ?? providers;
|
||||
const mergedProviders = await resolveProvidersForMode({
|
||||
mode,
|
||||
targetPath,
|
||||
providers: normalizedProviders,
|
||||
secretRefManagedProviders,
|
||||
}) ?? providers;
|
||||
const mergedProviders = await resolveProvidersForMode({
|
||||
mode,
|
||||
targetPath,
|
||||
providers: normalizedProviders,
|
||||
secretRefManagedProviders,
|
||||
});
|
||||
const next = `${JSON.stringify({ providers: mergedProviders }, null, 2)}\n`;
|
||||
const existingRaw = await readRawFile(targetPath);
|
||||
});
|
||||
const next = `${JSON.stringify({ providers: mergedProviders }, null, 2)}\n`;
|
||||
const existingRaw = await readRawFile(targetPath);
|
||||
|
||||
if (existingRaw === next) {
|
||||
if (existingRaw === next) {
|
||||
await ensureModelsFileMode(targetPath);
|
||||
return { agentDir, wrote: false };
|
||||
}
|
||||
|
||||
await fs.mkdir(agentDir, { recursive: true, mode: 0o700 });
|
||||
await fs.writeFile(targetPath, next, { mode: 0o600 });
|
||||
await ensureModelsFileMode(targetPath);
|
||||
return { agentDir, wrote: false };
|
||||
}
|
||||
|
||||
await fs.mkdir(agentDir, { recursive: true, mode: 0o700 });
|
||||
await fs.writeFile(targetPath, next, { mode: 0o600 });
|
||||
await ensureModelsFileMode(targetPath);
|
||||
return { agentDir, wrote: true };
|
||||
return { agentDir, wrote: true };
|
||||
});
|
||||
}
|
||||
|
||||
55
src/agents/models-config.write-serialization.test.ts
Normal file
55
src/agents/models-config.write-serialization.test.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
import fs from "node:fs/promises";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
import {
|
||||
CUSTOM_PROXY_MODELS_CONFIG,
|
||||
installModelsConfigTestHooks,
|
||||
withModelsTempHome,
|
||||
} from "./models-config.e2e-harness.js";
|
||||
import { ensureOpenClawModelsJson } from "./models-config.js";
|
||||
import { readGeneratedModelsJson } from "./models-config.test-utils.js";
|
||||
|
||||
installModelsConfigTestHooks();
|
||||
|
||||
describe("models-config write serialization", () => {
|
||||
it("serializes concurrent models.json writes to avoid overlap", async () => {
|
||||
await withModelsTempHome(async () => {
|
||||
const first = structuredClone(CUSTOM_PROXY_MODELS_CONFIG);
|
||||
const second = structuredClone(CUSTOM_PROXY_MODELS_CONFIG);
|
||||
const firstModel = first.models?.providers?.["custom-proxy"]?.models?.[0];
|
||||
const secondModel = second.models?.providers?.["custom-proxy"]?.models?.[0];
|
||||
if (!firstModel || !secondModel) {
|
||||
throw new Error("custom-proxy fixture missing expected model entries");
|
||||
}
|
||||
firstModel.name = "Proxy A";
|
||||
secondModel.name = "Proxy B with longer name";
|
||||
|
||||
const originalWriteFile = fs.writeFile.bind(fs);
|
||||
let inFlightWrites = 0;
|
||||
let maxInFlightWrites = 0;
|
||||
const writeSpy = vi.spyOn(fs, "writeFile").mockImplementation(async (...args) => {
|
||||
inFlightWrites += 1;
|
||||
if (inFlightWrites > maxInFlightWrites) {
|
||||
maxInFlightWrites = inFlightWrites;
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 20));
|
||||
try {
|
||||
return await originalWriteFile(...args);
|
||||
} finally {
|
||||
inFlightWrites -= 1;
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
await Promise.all([ensureOpenClawModelsJson(first), ensureOpenClawModelsJson(second)]);
|
||||
} finally {
|
||||
writeSpy.mockRestore();
|
||||
}
|
||||
|
||||
expect(maxInFlightWrites).toBe(1);
|
||||
const parsed = await readGeneratedModelsJson<{
|
||||
providers: { "custom-proxy"?: { models?: Array<{ name?: string }> } };
|
||||
}>();
|
||||
expect(parsed.providers["custom-proxy"]?.models?.[0]?.name).toBe("Proxy B with longer name");
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -5,6 +5,11 @@ let loadModelRegistry: typeof import("./models/list.registry.js").loadModelRegis
|
||||
let toModelRow: typeof import("./models/list.registry.js").toModelRow;
|
||||
|
||||
const loadConfig = vi.fn();
|
||||
const readConfigFileSnapshotForWrite = vi.fn().mockResolvedValue({
|
||||
snapshot: { valid: false, resolved: {} },
|
||||
writeOptions: {},
|
||||
});
|
||||
const setRuntimeConfigSnapshot = vi.fn();
|
||||
const ensureOpenClawModelsJson = vi.fn().mockResolvedValue(undefined);
|
||||
const resolveOpenClawAgentDir = vi.fn().mockReturnValue("/tmp/openclaw-agent");
|
||||
const ensureAuthProfileStore = vi.fn().mockReturnValue({ version: 1, profiles: {} });
|
||||
@@ -29,6 +34,8 @@ vi.mock("../config/config.js", () => ({
|
||||
CONFIG_PATH: "/tmp/openclaw.json",
|
||||
STATE_DIR: "/tmp/openclaw-state",
|
||||
loadConfig,
|
||||
readConfigFileSnapshotForWrite,
|
||||
setRuntimeConfigSnapshot,
|
||||
}));
|
||||
|
||||
vi.mock("../agents/models-config.js", () => ({
|
||||
@@ -84,8 +91,16 @@ vi.mock("../agents/pi-model-discovery.js", () => {
|
||||
});
|
||||
|
||||
vi.mock("../agents/pi-embedded-runner/model.js", () => ({
|
||||
resolveModel: () => {
|
||||
throw new Error("resolveModel should not be called from models.list tests");
|
||||
resolveModelWithRegistry: ({
|
||||
provider,
|
||||
modelId,
|
||||
modelRegistry,
|
||||
}: {
|
||||
provider: string;
|
||||
modelId: string;
|
||||
modelRegistry: { find: (provider: string, id: string) => unknown };
|
||||
}) => {
|
||||
return modelRegistry.find(provider, modelId);
|
||||
},
|
||||
}));
|
||||
|
||||
@@ -114,6 +129,13 @@ beforeEach(() => {
|
||||
modelRegistryState.getAllError = undefined;
|
||||
modelRegistryState.getAvailableError = undefined;
|
||||
listProfilesForProvider.mockReturnValue([]);
|
||||
ensureOpenClawModelsJson.mockClear();
|
||||
readConfigFileSnapshotForWrite.mockClear();
|
||||
readConfigFileSnapshotForWrite.mockResolvedValue({
|
||||
snapshot: { valid: false, resolved: {} },
|
||||
writeOptions: {},
|
||||
});
|
||||
setRuntimeConfigSnapshot.mockClear();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -302,6 +324,35 @@ describe("models list/status", () => {
|
||||
await expect(loadModelRegistry({})).rejects.toThrow("model discovery unavailable");
|
||||
});
|
||||
|
||||
it("loadModelRegistry persists using source config snapshot when provided", async () => {
|
||||
modelRegistryState.models = [OPENAI_MODEL];
|
||||
modelRegistryState.available = [OPENAI_MODEL];
|
||||
const sourceConfig = {
|
||||
models: { providers: { openai: { apiKey: "$OPENAI_API_KEY" } } },
|
||||
};
|
||||
const resolvedConfig = {
|
||||
models: { providers: { openai: { apiKey: "sk-resolved-runtime-value" } } },
|
||||
};
|
||||
|
||||
await loadModelRegistry(resolvedConfig as never, { sourceConfig: sourceConfig as never });
|
||||
|
||||
expect(ensureOpenClawModelsJson).toHaveBeenCalledTimes(1);
|
||||
expect(ensureOpenClawModelsJson).toHaveBeenCalledWith(sourceConfig);
|
||||
});
|
||||
|
||||
it("loadModelRegistry uses resolved config when no source snapshot is provided", async () => {
|
||||
modelRegistryState.models = [OPENAI_MODEL];
|
||||
modelRegistryState.available = [OPENAI_MODEL];
|
||||
const resolvedConfig = {
|
||||
models: { providers: { openai: { apiKey: "sk-resolved-runtime-value" } } },
|
||||
};
|
||||
|
||||
await loadModelRegistry(resolvedConfig as never);
|
||||
|
||||
expect(ensureOpenClawModelsJson).toHaveBeenCalledTimes(1);
|
||||
expect(ensureOpenClawModelsJson).toHaveBeenCalledWith(resolvedConfig);
|
||||
});
|
||||
|
||||
it("toModelRow does not crash without cfg/authStore when availability is undefined", async () => {
|
||||
const row = toModelRow({
|
||||
model: makeGoogleAntigravityTemplate(
|
||||
|
||||
@@ -2,11 +2,26 @@ import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
const mocks = vi.hoisted(() => {
|
||||
const printModelTable = vi.fn();
|
||||
const sourceConfig = {
|
||||
agents: { defaults: { model: { primary: "openai-codex/gpt-5.4" } } },
|
||||
models: { providers: { openai: { apiKey: "$OPENAI_API_KEY" } } },
|
||||
};
|
||||
const resolvedConfig = {
|
||||
agents: { defaults: { model: { primary: "openai-codex/gpt-5.4" } } },
|
||||
models: { providers: { openai: { apiKey: "sk-resolved-runtime-value" } } },
|
||||
};
|
||||
return {
|
||||
loadConfig: vi.fn().mockReturnValue({
|
||||
agents: { defaults: { model: { primary: "openai-codex/gpt-5.4" } } },
|
||||
models: { providers: {} },
|
||||
}),
|
||||
sourceConfig,
|
||||
resolvedConfig,
|
||||
loadModelsConfigWithSource: vi.fn().mockResolvedValue({
|
||||
sourceConfig,
|
||||
resolvedConfig,
|
||||
diagnostics: [],
|
||||
}),
|
||||
ensureAuthProfileStore: vi.fn().mockReturnValue({ version: 1, profiles: {}, order: {} }),
|
||||
loadModelRegistry: vi
|
||||
.fn()
|
||||
@@ -58,6 +73,10 @@ vi.mock("./list.registry.js", async (importOriginal) => {
|
||||
};
|
||||
});
|
||||
|
||||
vi.mock("./load-config.js", () => ({
|
||||
loadModelsConfigWithSource: mocks.loadModelsConfigWithSource,
|
||||
}));
|
||||
|
||||
vi.mock("./list.configured.js", () => ({
|
||||
resolveConfiguredEntries: mocks.resolveConfiguredEntries,
|
||||
}));
|
||||
@@ -95,6 +114,16 @@ describe("modelsListCommand forward-compat", () => {
|
||||
expect(codex?.tags).not.toContain("missing");
|
||||
});
|
||||
|
||||
it("passes source config to model registry loading for persistence safety", async () => {
|
||||
const runtime = { log: vi.fn(), error: vi.fn() };
|
||||
|
||||
await modelsListCommand({ json: true }, runtime as never);
|
||||
|
||||
expect(mocks.loadModelRegistry).toHaveBeenCalledWith(mocks.resolvedConfig, {
|
||||
sourceConfig: mocks.sourceConfig,
|
||||
});
|
||||
});
|
||||
|
||||
it("keeps configured local openai gpt-5.4 entries visible in --local output", async () => {
|
||||
mocks.resolveConfiguredEntries.mockReturnValueOnce({
|
||||
entries: [
|
||||
|
||||
@@ -8,7 +8,7 @@ import { formatErrorWithStack } from "./list.errors.js";
|
||||
import { loadModelRegistry, toModelRow } from "./list.registry.js";
|
||||
import { printModelTable } from "./list.table.js";
|
||||
import type { ModelRow } from "./list.types.js";
|
||||
import { loadModelsConfig } from "./load-config.js";
|
||||
import { loadModelsConfigWithSource } from "./load-config.js";
|
||||
import { DEFAULT_PROVIDER, ensureFlagCompatibility, isLocalBaseUrl, modelKey } from "./shared.js";
|
||||
|
||||
export async function modelsListCommand(
|
||||
@@ -23,7 +23,10 @@ export async function modelsListCommand(
|
||||
) {
|
||||
ensureFlagCompatibility(opts);
|
||||
const { ensureAuthProfileStore } = await import("../../agents/auth-profiles.js");
|
||||
const cfg = await loadModelsConfig({ commandName: "models list", runtime });
|
||||
const { sourceConfig, resolvedConfig: cfg } = await loadModelsConfigWithSource({
|
||||
commandName: "models list",
|
||||
runtime,
|
||||
});
|
||||
const authStore = ensureAuthProfileStore();
|
||||
const providerFilter = (() => {
|
||||
const raw = opts.provider?.trim();
|
||||
@@ -39,7 +42,7 @@ export async function modelsListCommand(
|
||||
let availableKeys: Set<string> | undefined;
|
||||
let availabilityErrorMessage: string | undefined;
|
||||
try {
|
||||
const loaded = await loadModelRegistry(cfg);
|
||||
const loaded = await loadModelRegistry(cfg, { sourceConfig });
|
||||
modelRegistry = loaded.registry;
|
||||
models = loaded.models;
|
||||
availableKeys = loaded.availableKeys;
|
||||
|
||||
@@ -94,8 +94,13 @@ function loadAvailableModels(registry: ModelRegistry): Model<Api>[] {
|
||||
}
|
||||
}
|
||||
|
||||
export async function loadModelRegistry(cfg: OpenClawConfig) {
|
||||
await ensureOpenClawModelsJson(cfg);
|
||||
export async function loadModelRegistry(
|
||||
cfg: OpenClawConfig,
|
||||
opts?: { sourceConfig?: OpenClawConfig },
|
||||
) {
|
||||
// Persistence must be based on source config (pre-resolution) so SecretRef-managed
|
||||
// credentials remain markers in models.json for command paths too.
|
||||
await ensureOpenClawModelsJson(opts?.sourceConfig ?? cfg);
|
||||
const agentDir = resolveOpenClawAgentDir();
|
||||
const authStorage = discoverAuthStorage(agentDir);
|
||||
const registry = discoverModels(authStorage, agentDir);
|
||||
|
||||
95
src/commands/models/load-config.test.ts
Normal file
95
src/commands/models/load-config.test.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
|
||||
const mocks = vi.hoisted(() => ({
|
||||
loadConfig: vi.fn(),
|
||||
readConfigFileSnapshotForWrite: vi.fn(),
|
||||
setRuntimeConfigSnapshot: vi.fn(),
|
||||
resolveCommandSecretRefsViaGateway: vi.fn(),
|
||||
getModelsCommandSecretTargetIds: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("../../config/config.js", () => ({
|
||||
loadConfig: mocks.loadConfig,
|
||||
readConfigFileSnapshotForWrite: mocks.readConfigFileSnapshotForWrite,
|
||||
setRuntimeConfigSnapshot: mocks.setRuntimeConfigSnapshot,
|
||||
}));
|
||||
|
||||
vi.mock("../../cli/command-secret-gateway.js", () => ({
|
||||
resolveCommandSecretRefsViaGateway: mocks.resolveCommandSecretRefsViaGateway,
|
||||
}));
|
||||
|
||||
vi.mock("../../cli/command-secret-targets.js", () => ({
|
||||
getModelsCommandSecretTargetIds: mocks.getModelsCommandSecretTargetIds,
|
||||
}));
|
||||
|
||||
import { loadModelsConfig, loadModelsConfigWithSource } from "./load-config.js";
|
||||
|
||||
describe("models load-config", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("returns source+resolved configs and sets runtime snapshot", async () => {
|
||||
const sourceConfig = {
|
||||
models: {
|
||||
providers: {
|
||||
openai: {
|
||||
apiKey: { source: "env", provider: "default", id: "OPENAI_API_KEY" },
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
const runtimeConfig = { models: { providers: { openai: { apiKey: "sk-runtime" } } } };
|
||||
const resolvedConfig = { models: { providers: { openai: { apiKey: "sk-resolved" } } } };
|
||||
const targetIds = new Set(["models.providers.*.apiKey"]);
|
||||
const runtime = { log: vi.fn(), error: vi.fn(), exit: vi.fn() };
|
||||
|
||||
mocks.loadConfig.mockReturnValue(runtimeConfig);
|
||||
mocks.readConfigFileSnapshotForWrite.mockResolvedValue({
|
||||
snapshot: { valid: true, resolved: sourceConfig },
|
||||
writeOptions: {},
|
||||
});
|
||||
mocks.getModelsCommandSecretTargetIds.mockReturnValue(targetIds);
|
||||
mocks.resolveCommandSecretRefsViaGateway.mockResolvedValue({
|
||||
resolvedConfig,
|
||||
diagnostics: ["diag-one", "diag-two"],
|
||||
});
|
||||
|
||||
const result = await loadModelsConfigWithSource({ commandName: "models list", runtime });
|
||||
|
||||
expect(mocks.resolveCommandSecretRefsViaGateway).toHaveBeenCalledWith({
|
||||
config: runtimeConfig,
|
||||
commandName: "models list",
|
||||
targetIds,
|
||||
});
|
||||
expect(mocks.setRuntimeConfigSnapshot).toHaveBeenCalledWith(resolvedConfig, sourceConfig);
|
||||
expect(runtime.log).toHaveBeenNthCalledWith(1, "[secrets] diag-one");
|
||||
expect(runtime.log).toHaveBeenNthCalledWith(2, "[secrets] diag-two");
|
||||
expect(result).toEqual({
|
||||
sourceConfig,
|
||||
resolvedConfig,
|
||||
diagnostics: ["diag-one", "diag-two"],
|
||||
});
|
||||
});
|
||||
|
||||
it("loadModelsConfig returns resolved config while preserving runtime snapshot behavior", async () => {
|
||||
const sourceConfig = { models: { providers: {} } };
|
||||
const runtimeConfig = { models: { providers: { openai: { apiKey: "sk-runtime" } } } };
|
||||
const resolvedConfig = { models: { providers: { openai: { apiKey: "sk-resolved" } } } };
|
||||
const targetIds = new Set(["models.providers.*.apiKey"]);
|
||||
|
||||
mocks.loadConfig.mockReturnValue(runtimeConfig);
|
||||
mocks.readConfigFileSnapshotForWrite.mockResolvedValue({
|
||||
snapshot: { valid: true, resolved: sourceConfig },
|
||||
writeOptions: {},
|
||||
});
|
||||
mocks.getModelsCommandSecretTargetIds.mockReturnValue(targetIds);
|
||||
mocks.resolveCommandSecretRefsViaGateway.mockResolvedValue({
|
||||
resolvedConfig,
|
||||
diagnostics: [],
|
||||
});
|
||||
|
||||
await expect(loadModelsConfig({ commandName: "models list" })).resolves.toBe(resolvedConfig);
|
||||
expect(mocks.setRuntimeConfigSnapshot).toHaveBeenCalledWith(resolvedConfig, sourceConfig);
|
||||
});
|
||||
});
|
||||
@@ -1,15 +1,39 @@
|
||||
import { resolveCommandSecretRefsViaGateway } from "../../cli/command-secret-gateway.js";
|
||||
import { getModelsCommandSecretTargetIds } from "../../cli/command-secret-targets.js";
|
||||
import { loadConfig, type OpenClawConfig } from "../../config/config.js";
|
||||
import {
|
||||
loadConfig,
|
||||
readConfigFileSnapshotForWrite,
|
||||
setRuntimeConfigSnapshot,
|
||||
type OpenClawConfig,
|
||||
} from "../../config/config.js";
|
||||
import type { RuntimeEnv } from "../../runtime.js";
|
||||
|
||||
export async function loadModelsConfig(params: {
|
||||
export type LoadedModelsConfig = {
|
||||
sourceConfig: OpenClawConfig;
|
||||
resolvedConfig: OpenClawConfig;
|
||||
diagnostics: string[];
|
||||
};
|
||||
|
||||
async function loadSourceConfigSnapshot(fallback: OpenClawConfig): Promise<OpenClawConfig> {
|
||||
try {
|
||||
const { snapshot } = await readConfigFileSnapshotForWrite();
|
||||
if (snapshot.valid) {
|
||||
return snapshot.resolved;
|
||||
}
|
||||
} catch {
|
||||
// Fall back to runtime-loaded config if source snapshot cannot be read.
|
||||
}
|
||||
return fallback;
|
||||
}
|
||||
|
||||
export async function loadModelsConfigWithSource(params: {
|
||||
commandName: string;
|
||||
runtime?: RuntimeEnv;
|
||||
}): Promise<OpenClawConfig> {
|
||||
const loadedRaw = loadConfig();
|
||||
}): Promise<LoadedModelsConfig> {
|
||||
const runtimeConfig = loadConfig();
|
||||
const sourceConfig = await loadSourceConfigSnapshot(runtimeConfig);
|
||||
const { resolvedConfig, diagnostics } = await resolveCommandSecretRefsViaGateway({
|
||||
config: loadedRaw,
|
||||
config: runtimeConfig,
|
||||
commandName: params.commandName,
|
||||
targetIds: getModelsCommandSecretTargetIds(),
|
||||
});
|
||||
@@ -18,5 +42,17 @@ export async function loadModelsConfig(params: {
|
||||
params.runtime.log(`[secrets] ${entry}`);
|
||||
}
|
||||
}
|
||||
return resolvedConfig;
|
||||
setRuntimeConfigSnapshot(resolvedConfig, sourceConfig);
|
||||
return {
|
||||
sourceConfig,
|
||||
resolvedConfig,
|
||||
diagnostics,
|
||||
};
|
||||
}
|
||||
|
||||
export async function loadModelsConfig(params: {
|
||||
commandName: string;
|
||||
runtime?: RuntimeEnv;
|
||||
}): Promise<OpenClawConfig> {
|
||||
return (await loadModelsConfigWithSource(params)).resolvedConfig;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user