Onboarding: enforce custom model context minimum

This commit is contained in:
Vignesh Natarajan
2026-02-28 13:37:21 -08:00
parent e90429794a
commit a623c9c8d2
3 changed files with 104 additions and 2 deletions

View File

@@ -1,4 +1,5 @@
import { afterEach, describe, expect, it, vi } from "vitest";
import { CONTEXT_WINDOW_HARD_MIN_TOKENS } from "../agents/context-window-guard.js";
import { defaultRuntime } from "../runtime.js";
import {
applyCustomApiConfig,
@@ -326,6 +327,91 @@ describe("promptCustomApiConfig", () => {
});
describe("applyCustomApiConfig", () => {
it("uses hard-min context window for newly added custom models", () => {
const result = applyCustomApiConfig({
config: {},
baseUrl: "https://llm.example.com/v1",
modelId: "foo-large",
compatibility: "openai",
providerId: "custom",
});
const model = result.config.models?.providers?.custom?.models?.find(
(entry) => entry.id === "foo-large",
);
expect(model?.contextWindow).toBe(CONTEXT_WINDOW_HARD_MIN_TOKENS);
});
it("upgrades existing custom model context window when below hard minimum", () => {
const result = applyCustomApiConfig({
config: {
models: {
providers: {
custom: {
api: "openai-completions",
baseUrl: "https://llm.example.com/v1",
models: [
{
id: "foo-large",
name: "foo-large",
contextWindow: 4096,
maxTokens: 1024,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
reasoning: false,
},
],
},
},
},
},
baseUrl: "https://llm.example.com/v1",
modelId: "foo-large",
compatibility: "openai",
providerId: "custom",
});
const model = result.config.models?.providers?.custom?.models?.find(
(entry) => entry.id === "foo-large",
);
expect(model?.contextWindow).toBe(CONTEXT_WINDOW_HARD_MIN_TOKENS);
});
it("preserves existing custom model context window when already above minimum", () => {
const result = applyCustomApiConfig({
config: {
models: {
providers: {
custom: {
api: "openai-completions",
baseUrl: "https://llm.example.com/v1",
models: [
{
id: "foo-large",
name: "foo-large",
contextWindow: 131072,
maxTokens: 4096,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
reasoning: false,
},
],
},
},
},
},
baseUrl: "https://llm.example.com/v1",
modelId: "foo-large",
compatibility: "openai",
providerId: "custom",
});
const model = result.config.models?.providers?.custom?.models?.find(
(entry) => entry.id === "foo-large",
);
expect(model?.contextWindow).toBe(131072);
});
it.each([
{
name: "invalid compatibility values at runtime",

View File

@@ -1,3 +1,4 @@
import { CONTEXT_WINDOW_HARD_MIN_TOKENS } from "../agents/context-window-guard.js";
import { DEFAULT_PROVIDER } from "../agents/defaults.js";
import { buildModelAliasIndex, modelKey } from "../agents/model-selection.js";
import type { OpenClawConfig } from "../config/config.js";
@@ -16,10 +17,15 @@ import { normalizeAlias } from "./models/shared.js";
import type { SecretInputMode } from "./onboard-types.js";
const DEFAULT_OLLAMA_BASE_URL = "http://127.0.0.1:11434/v1";
const DEFAULT_CONTEXT_WINDOW = 4096;
const DEFAULT_CONTEXT_WINDOW = CONTEXT_WINDOW_HARD_MIN_TOKENS;
const DEFAULT_MAX_TOKENS = 4096;
const VERIFY_TIMEOUT_MS = 30_000;
function normalizeContextWindowForCustomModel(value: unknown): number {
const parsed = typeof value === "number" && Number.isFinite(value) ? Math.floor(value) : 0;
return parsed >= CONTEXT_WINDOW_HARD_MIN_TOKENS ? parsed : CONTEXT_WINDOW_HARD_MIN_TOKENS;
}
/**
* Detects if a URL is from Azure AI Foundry or Azure OpenAI.
* Matches both:
@@ -600,7 +606,16 @@ export function applyCustomApiConfig(params: ApplyCustomApiConfigParams): Custom
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
reasoning: false,
};
const mergedModels = hasModel ? existingModels : [...existingModels, nextModel];
const mergedModels = hasModel
? existingModels.map((model) =>
model.id === modelId
? {
...model,
contextWindow: normalizeContextWindowForCustomModel(model.contextWindow),
}
: model,
)
: [...existingModels, nextModel];
const { apiKey: existingApiKey, ...existingProviderRest } = existingProvider ?? {};
const normalizedApiKey =
normalizeOptionalProviderApiKey(params.apiKey) ??