refactor: share native streaming compat helpers

This commit is contained in:
Peter Steinberger
2026-04-04 12:18:15 +01:00
parent aaa173a4a7
commit 3207c5326a
8 changed files with 188 additions and 91 deletions

View File

@@ -1,4 +1,7 @@
import { resolveProviderRequestCapabilities } from "openclaw/plugin-sdk/provider-http";
import {
applyProviderNativeStreamingUsageCompat,
supportsNativeStreamingUsageCompat,
} from "openclaw/plugin-sdk/provider-catalog-shared";
import type {
ModelDefinitionConfig,
ModelProviderConfig,
@@ -105,44 +108,19 @@ export const MODELSTUDIO_MODEL_CATALOG: ReadonlyArray<ModelDefinitionConfig> = [
];
export function isNativeModelStudioBaseUrl(baseUrl: string | undefined): boolean {
return resolveProviderRequestCapabilities({
provider: "modelstudio",
api: "openai-completions",
return supportsNativeStreamingUsageCompat({
providerId: "modelstudio",
baseUrl,
capability: "llm",
transport: "stream",
}).supportsNativeStreamingUsageCompat;
}
function withStreamingUsageCompat(provider: ModelProviderConfig): ModelProviderConfig {
if (!Array.isArray(provider.models) || provider.models.length === 0) {
return provider;
}
let changed = false;
const models = provider.models.map((model) => {
if (model.compat?.supportsUsageInStreaming !== undefined) {
return model;
}
changed = true;
return {
...model,
compat: {
...model.compat,
supportsUsageInStreaming: true,
},
};
});
return changed ? { ...provider, models } : provider;
}
export function applyModelStudioNativeStreamingUsageCompat(
provider: ModelProviderConfig,
): ModelProviderConfig {
return isNativeModelStudioBaseUrl(provider.baseUrl)
? withStreamingUsageCompat(provider)
: provider;
return applyProviderNativeStreamingUsageCompat({
providerId: "modelstudio",
providerConfig: provider,
});
}
export function buildModelStudioModelDefinition(params: {

View File

@@ -1,4 +1,7 @@
import { resolveProviderRequestCapabilities } from "openclaw/plugin-sdk/provider-http";
import {
applyProviderNativeStreamingUsageCompat,
supportsNativeStreamingUsageCompat,
} from "openclaw/plugin-sdk/provider-catalog-shared";
import type { ModelProviderConfig } from "openclaw/plugin-sdk/provider-model-shared";
export const MOONSHOT_BASE_URL = "https://api.moonshot.ai/v1";
@@ -53,42 +56,19 @@ const MOONSHOT_MODEL_CATALOG = [
] as const;
export function isNativeMoonshotBaseUrl(baseUrl: string | undefined): boolean {
return resolveProviderRequestCapabilities({
provider: "moonshot",
api: "openai-completions",
return supportsNativeStreamingUsageCompat({
providerId: "moonshot",
baseUrl,
capability: "llm",
transport: "stream",
}).supportsNativeStreamingUsageCompat;
}
function withStreamingUsageCompat(provider: ModelProviderConfig): ModelProviderConfig {
if (!Array.isArray(provider.models) || provider.models.length === 0) {
return provider;
}
let changed = false;
const models = provider.models.map((model) => {
if (model.compat?.supportsUsageInStreaming !== undefined) {
return model;
}
changed = true;
return {
...model,
compat: {
...model.compat,
supportsUsageInStreaming: true,
},
};
});
return changed ? { ...provider, models } : provider;
}
export function applyMoonshotNativeStreamingUsageCompat(
provider: ModelProviderConfig,
): ModelProviderConfig {
return isNativeMoonshotBaseUrl(provider.baseUrl) ? withStreamingUsageCompat(provider) : provider;
return applyProviderNativeStreamingUsageCompat({
providerId: "moonshot",
providerConfig: provider,
});
}
export function buildMoonshotProvider(): ModelProviderConfig {

View File

@@ -170,6 +170,32 @@ describe("normalizeModelCompat", () => {
expect(supportsStrictMode(normalized)).toBe(false);
});
it("keeps supportsUsageInStreaming on for DashScope-compatible endpoints regardless of provider id", () => {
const model = {
...baseModel(),
provider: "custom-qwen",
baseUrl: "https://dashscope-intl.aliyuncs.com/compatible-mode/v1",
};
delete (model as { compat?: unknown }).compat;
const normalized = normalizeModelCompat(model);
expect(supportsDeveloperRole(normalized)).toBe(false);
expect(supportsUsageInStreaming(normalized)).toBe(true);
expect(supportsStrictMode(normalized)).toBe(false);
});
it("keeps supportsUsageInStreaming on for Moonshot-native endpoints regardless of provider id", () => {
const model = {
...baseModel(),
provider: "custom-kimi",
baseUrl: "https://api.moonshot.ai/v1",
};
delete (model as { compat?: unknown }).compat;
const normalized = normalizeModelCompat(model);
expect(supportsDeveloperRole(normalized)).toBe(false);
expect(supportsUsageInStreaming(normalized)).toBe(true);
expect(supportsStrictMode(normalized)).toBe(false);
});
it("leaves native api.openai.com model untouched", () => {
const model = {
...baseModel(),

View File

@@ -1,4 +1,6 @@
import type { Model } from "@mariozechner/pi-ai";
import type { ProviderEndpointClass, ProviderRequestCapabilities } from "./provider-attribution.js";
import { resolveProviderRequestCapabilities } from "./provider-attribution.js";
type OpenAICompletionsCompatDefaultsInput = {
provider?: string;
@@ -18,6 +20,11 @@ export type OpenAICompletionsCompatDefaults = {
supportsStrictMode: boolean;
};
export type DetectedOpenAICompletionsCompat = {
capabilities: ProviderRequestCapabilities;
defaults: OpenAICompletionsCompatDefaults;
};
function isDefaultRouteProvider(provider: string | undefined, ...ids: string[]) {
return provider !== undefined && ids.includes(provider);
}
@@ -89,3 +96,27 @@ export function resolveOpenAICompletionsCompatDefaultsFromCapabilities(
): OpenAICompletionsCompatDefaults {
return resolveOpenAICompletionsCompatDefaults(input);
}
export function detectOpenAICompletionsCompat(
model: Pick<Model<"openai-completions">, "provider" | "baseUrl" | "id" | "compat">,
): DetectedOpenAICompletionsCompat {
const capabilities = resolveProviderRequestCapabilities({
provider: model.provider,
api: "openai-completions",
baseUrl: model.baseUrl,
capability: "llm",
transport: "stream",
modelId: model.id,
compat:
model.compat && typeof model.compat === "object"
? (model.compat as { supportsStore?: boolean })
: undefined,
});
return {
capabilities,
defaults: resolveOpenAICompletionsCompatDefaultsFromCapabilities({
provider: model.provider,
...capabilities,
}),
};
}

View File

@@ -22,7 +22,7 @@ import type {
import { resolveProviderTransportTurnStateWithPlugin } from "../plugins/provider-runtime.js";
import type { ProviderRuntimeModel } from "../plugins/types.js";
import { buildCopilotDynamicHeaders, hasCopilotVisionInput } from "./copilot-dynamic-headers.js";
import { resolveOpenAICompletionsCompatDefaultsFromCapabilities } from "./openai-completions-compat.js";
import { detectOpenAICompletionsCompat } from "./openai-completions-compat.js";
import {
applyOpenAIResponsesPayloadPolicy,
resolveOpenAIResponsesPayloadPolicy,
@@ -1113,24 +1113,9 @@ async function processOpenAICompletionsStream(
function detectCompat(model: OpenAIModeModel) {
const provider = model.provider;
const capabilities = resolveProviderRequestCapabilities({
provider,
api: model.api,
baseUrl: model.baseUrl,
capability: "llm",
transport: "stream",
modelId: model.id,
compat:
model.compat && typeof model.compat === "object"
? (model.compat as { supportsStore?: boolean })
: undefined,
});
const { capabilities, defaults: compatDefaults } = detectOpenAICompletionsCompat(model);
const endpointClass = capabilities.endpointClass;
const isDefaultRoute = endpointClass === "default";
const compatDefaults = resolveOpenAICompletionsCompatDefaultsFromCapabilities({
provider,
...capabilities,
});
const isGroq = endpointClass === "groq-native" || (isDefaultRoute && provider === "groq");
const reasoningEffortMap: Record<string, string> =
isGroq && model.id === "qwen/qwen3-32b"

View File

@@ -0,0 +1,56 @@
import { describe, expect, it } from "vitest";
import {
applyProviderNativeStreamingUsageCompat,
supportsNativeStreamingUsageCompat,
} from "./provider-catalog-shared.js";
import type { ModelDefinitionConfig } from "./provider-model-shared.js";
function buildModel(id: string, supportsUsageInStreaming?: boolean): ModelDefinitionConfig {
return {
id,
name: id,
reasoning: false,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1024,
maxTokens: 1024,
...(supportsUsageInStreaming === undefined ? {} : { compat: { supportsUsageInStreaming } }),
};
}
describe("provider-catalog-shared native streaming usage compat", () => {
it("detects native streaming usage compat from the endpoint capabilities", () => {
expect(
supportsNativeStreamingUsageCompat({
providerId: "custom-qwen",
baseUrl: "https://dashscope.aliyuncs.com/compatible-mode/v1",
}),
).toBe(true);
expect(
supportsNativeStreamingUsageCompat({
providerId: "custom-kimi",
baseUrl: "https://api.moonshot.ai/v1",
}),
).toBe(true);
expect(
supportsNativeStreamingUsageCompat({
providerId: "custom-proxy",
baseUrl: "https://proxy.example.com/v1",
}),
).toBe(false);
});
it("opts models into streaming usage for native endpoints while preserving explicit overrides", () => {
const provider = applyProviderNativeStreamingUsageCompat({
providerId: "custom-qwen",
providerConfig: {
api: "openai-completions",
baseUrl: "https://dashscope.aliyuncs.com/compatible-mode/v1",
models: [buildModel("qwen-plus"), buildModel("qwen-max", false)],
},
});
expect(provider.models?.[0]?.compat?.supportsUsageInStreaming).toBe(true);
expect(provider.models?.[1]?.compat?.supportsUsageInStreaming).toBe(false);
});
});

View File

@@ -3,6 +3,9 @@
// Keep provider-owned exports out of this subpath so plugin loaders can import it
// without recursing through provider-specific facades.
import { resolveProviderRequestCapabilities } from "./provider-http.js";
import type { ModelProviderConfig } from "./provider-model-shared.js";
export type { ProviderCatalogContext, ProviderCatalogResult } from "../plugins/types.js";
export {
@@ -10,3 +13,51 @@ export {
buildSingleProviderApiKeyCatalog,
findCatalogTemplate,
} from "../plugins/provider-catalog.js";
function withStreamingUsageCompat(provider: ModelProviderConfig): ModelProviderConfig {
if (!Array.isArray(provider.models) || provider.models.length === 0) {
return provider;
}
let changed = false;
const models = provider.models.map((model) => {
if (model.compat?.supportsUsageInStreaming !== undefined) {
return model;
}
changed = true;
return {
...model,
compat: {
...model.compat,
supportsUsageInStreaming: true,
},
};
});
return changed ? { ...provider, models } : provider;
}
export function supportsNativeStreamingUsageCompat(params: {
providerId: string;
baseUrl: string | undefined;
}): boolean {
return resolveProviderRequestCapabilities({
provider: params.providerId,
api: "openai-completions",
baseUrl: params.baseUrl,
capability: "llm",
transport: "stream",
}).supportsNativeStreamingUsageCompat;
}
export function applyProviderNativeStreamingUsageCompat(params: {
providerId: string;
providerConfig: ModelProviderConfig;
}): ModelProviderConfig {
return supportsNativeStreamingUsageCompat({
providerId: params.providerId,
baseUrl: params.providerConfig.baseUrl,
})
? withStreamingUsageCompat(params.providerConfig)
: params.providerConfig;
}

View File

@@ -1,6 +1,5 @@
import type { Api, Model } from "@mariozechner/pi-ai";
import { resolveOpenAICompletionsCompatDefaultsFromCapabilities } from "../agents/openai-completions-compat.js";
import { resolveProviderRequestCapabilities } from "../agents/provider-attribution.js";
import { detectOpenAICompletionsCompat } from "../agents/openai-completions-compat.js";
import type { ModelCompatConfig } from "../config/types.models.js";
function extractModelCompat(
@@ -94,16 +93,7 @@ export function normalizeModelCompat(model: Model<Api>): Model<Api> {
const compat = model.compat ?? undefined;
const detectedCompatDefaults = baseUrl
? resolveOpenAICompletionsCompatDefaultsFromCapabilities({
provider: typeof model.provider === "string" ? model.provider : undefined,
...resolveProviderRequestCapabilities({
provider: typeof model.provider === "string" ? model.provider : undefined,
api: model.api,
baseUrl,
capability: "llm",
transport: "stream",
}),
})
? detectOpenAICompletionsCompat(model).defaults
: undefined;
const needsForce = Boolean(
detectedCompatDefaults &&