refactor: dedupe tooling helpers

This commit is contained in:
Peter Steinberger
2026-04-23 18:06:49 +01:00
parent f98f93c29a
commit 2045c0977e
11 changed files with 244 additions and 347 deletions

View File

@@ -2,31 +2,10 @@ import fs from "node:fs/promises";
import { createRequire } from "node:module";
import os from "node:os";
import path from "node:path";
import {
applyProviderConfigWithDefaultModelPreset,
type ModelDefinitionConfig,
type OpenClawConfig,
} from "../../src/plugin-sdk/provider-onboard.ts";
import { applyDockerOpenAiProviderConfig, type OpenClawConfig } from "./docker-openai-seed.ts";
const require = createRequire(import.meta.url);
const DOCKER_OPENAI_MODEL_REF = "openai/gpt-5.4";
const DOCKER_OPENAI_MODEL: ModelDefinitionConfig = {
id: "gpt-5.4",
name: "gpt-5.4",
api: "openai-responses",
reasoning: true,
input: ["text", "image"],
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 1_050_000,
maxTokens: 128_000,
};
async function writeProbeServer(params: {
serverPath: string;
pidPath: string;
@@ -88,7 +67,7 @@ async function main() {
await fs.rm(exitPath, { force: true });
await writeProbeServer({ serverPath, pidPath, pidsPath, exitPath });
const seededConfig = applyProviderConfigWithDefaultModelPreset(
const seededConfig = applyDockerOpenAiProviderConfig(
{
gateway: {
controlUi: {
@@ -123,21 +102,8 @@ async function main() {
},
},
} satisfies OpenClawConfig,
{
providerId: "openai",
api: "openai-responses",
baseUrl: "http://127.0.0.1:9/v1",
defaultModel: DOCKER_OPENAI_MODEL,
defaultModelId: DOCKER_OPENAI_MODEL.id,
aliases: [{ modelRef: DOCKER_OPENAI_MODEL_REF, alias: "GPT" }],
primaryModelRef: DOCKER_OPENAI_MODEL_REF,
},
"sk-docker-cron-mcp-cleanup-test",
);
const openAiProvider = seededConfig.models?.providers?.openai;
if (!openAiProvider) {
throw new Error("failed to seed OpenAI provider config");
}
openAiProvider.apiKey = "sk-docker-cron-mcp-cleanup-test";
await fs.writeFile(configPath, `${JSON.stringify(seededConfig, null, 2)}\n`, "utf-8");

View File

@@ -0,0 +1,45 @@
import {
applyProviderConfigWithDefaultModelPreset,
type ModelDefinitionConfig,
type OpenClawConfig,
} from "../../src/plugin-sdk/provider-onboard.ts";
export type { OpenClawConfig };
const DOCKER_OPENAI_MODEL_REF = "openai/gpt-5.4";
const DOCKER_OPENAI_MODEL: ModelDefinitionConfig = {
id: "gpt-5.4",
name: "gpt-5.4",
api: "openai-responses",
reasoning: true,
input: ["text", "image"],
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 1_050_000,
maxTokens: 128_000,
};
export function applyDockerOpenAiProviderConfig(
config: OpenClawConfig,
apiKey: string,
): OpenClawConfig {
const seededConfig = applyProviderConfigWithDefaultModelPreset(config, {
providerId: "openai",
api: "openai-responses",
baseUrl: "http://127.0.0.1:9/v1",
defaultModel: DOCKER_OPENAI_MODEL,
defaultModelId: DOCKER_OPENAI_MODEL.id,
aliases: [{ modelRef: DOCKER_OPENAI_MODEL_REF, alias: "GPT" }],
primaryModelRef: DOCKER_OPENAI_MODEL_REF,
});
const openAiProvider = seededConfig.models?.providers?.openai;
if (!openAiProvider) {
throw new Error("failed to seed OpenAI provider config");
}
openAiProvider.apiKey = apiKey;
return seededConfig;
}

View File

@@ -1,28 +1,7 @@
import fs from "node:fs/promises";
import os from "node:os";
import path from "node:path";
import {
applyProviderConfigWithDefaultModelPreset,
type ModelDefinitionConfig,
type OpenClawConfig,
} from "../../src/plugin-sdk/provider-onboard.ts";
const DOCKER_OPENAI_MODEL_REF = "openai/gpt-5.4";
const DOCKER_OPENAI_MODEL: ModelDefinitionConfig = {
id: "gpt-5.4",
name: "gpt-5.4",
api: "openai-responses",
reasoning: true,
input: ["text", "image"],
cost: {
input: 0,
output: 0,
cacheRead: 0,
cacheWrite: 0,
},
contextWindow: 1_050_000,
maxTokens: 128_000,
};
import { applyDockerOpenAiProviderConfig, type OpenClawConfig } from "./docker-openai-seed.ts";
async function main() {
const stateDir = process.env.OPENCLAW_STATE_DIR?.trim() || path.join(os.homedir(), ".openclaw");
@@ -36,7 +15,7 @@ async function main() {
await fs.mkdir(sessionsDir, { recursive: true });
await fs.mkdir(path.dirname(configPath), { recursive: true });
const seededConfig = applyProviderConfigWithDefaultModelPreset(
const seededConfig = applyDockerOpenAiProviderConfig(
{
gateway: {
controlUi: {
@@ -45,21 +24,8 @@ async function main() {
},
},
} satisfies OpenClawConfig,
{
providerId: "openai",
api: "openai-responses",
baseUrl: "http://127.0.0.1:9/v1",
defaultModel: DOCKER_OPENAI_MODEL,
defaultModelId: DOCKER_OPENAI_MODEL.id,
aliases: [{ modelRef: DOCKER_OPENAI_MODEL_REF, alias: "GPT" }],
primaryModelRef: DOCKER_OPENAI_MODEL_REF,
},
"sk-docker-smoke-test",
);
const openAiProvider = seededConfig.models?.providers?.openai;
if (!openAiProvider) {
throw new Error("failed to seed OpenAI provider config");
}
openAiProvider.apiKey = "sk-docker-smoke-test";
await fs.writeFile(configPath, JSON.stringify(seededConfig, null, 2), "utf-8");