fix(providers): respect responses developer-role compat (#60385)

This commit is contained in:
Vincent Koc
2026-04-04 00:49:16 +09:00
committed by GitHub
parent 62b736a8c2
commit bd4f745833
2 changed files with 56 additions and 3 deletions

View File

@@ -1,6 +1,7 @@
import type { Model } from "@mariozechner/pi-ai";
import { describe, expect, it } from "vitest";
import {
buildOpenAIResponsesParams,
buildTransportAwareSimpleStreamFn,
isTransportAwareApiSupported,
parseTransportChunkUsage,
@@ -253,4 +254,54 @@ describe("openai transport stream", () => {
},
});
});
it("uses system role instead of developer for responses providers that disable developer role", () => {
const params = buildOpenAIResponsesParams(
{
id: "grok-4.1-fast",
name: "Grok 4.1 Fast",
api: "openai-responses",
provider: "xai",
baseUrl: "https://api.x.ai/v1",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
} satisfies Model<"openai-responses">,
{
systemPrompt: "system",
messages: [],
tools: [],
} as never,
undefined,
) as { input?: Array<{ role?: string }> };
expect(params.input?.[0]).toMatchObject({ role: "system" });
});
it("keeps developer role for native OpenAI reasoning responses models", () => {
const params = buildOpenAIResponsesParams(
{
id: "gpt-5",
name: "GPT-5",
api: "openai-responses",
provider: "openai",
baseUrl: "https://api.openai.com/v1",
reasoning: true,
input: ["text"],
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 200000,
maxTokens: 8192,
} satisfies Model<"openai-responses">,
{
systemPrompt: "system",
messages: [],
tools: [],
} as never,
undefined,
) as { input?: Array<{ role?: string }> };
expect(params.input?.[0]).toMatchObject({ role: "developer" });
});
});

View File

@@ -345,7 +345,7 @@ function convertResponsesMessages(
model: Model<Api>,
context: Context,
allowedToolCallProviders: Set<string>,
options?: { includeSystemPrompt?: boolean },
options?: { includeSystemPrompt?: boolean; supportsDeveloperRole?: boolean },
) {
const messages: unknown[] = [];
const normalizeIdPart = (part: string) => {
@@ -383,7 +383,7 @@ function convertResponsesMessages(
const includeSystemPrompt = options?.includeSystemPrompt ?? true;
if (includeSystemPrompt && context.systemPrompt) {
messages.push({
role: model.reasoning ? "developer" : "system",
role: model.reasoning && options?.supportsDeveloperRole !== false ? "developer" : "system",
content: sanitizeTransportPayloadText(context.systemPrompt),
});
}
@@ -952,15 +952,17 @@ function getPromptCacheRetention(
return baseUrl?.includes("api.openai.com") ? "24h" : undefined;
}
function buildOpenAIResponsesParams(
export function buildOpenAIResponsesParams(
model: Model<Api>,
context: Context,
options: OpenAIResponsesOptions | undefined,
) {
const compat = getCompat(model as OpenAIModeModel);
const messages = convertResponsesMessages(
model,
context,
new Set(["openai", "openai-codex", "opencode", "azure-openai-responses"]),
{ supportsDeveloperRole: compat.supportsDeveloperRole },
);
const cacheRetention = resolveCacheRetention(options?.cacheRetention);
const params: Record<string, unknown> = {