fix(agents): include azure-openai in Responses API store override (#42934)

Merged via squash.

Prepared head SHA: d3285fef41
Co-authored-by: ademczuk <5212682+ademczuk@users.noreply.github.com>
Co-authored-by: frankekn <4488090+frankekn@users.noreply.github.com>
Reviewed-by: @frankekn
This commit is contained in:
ademczuk
2026-03-11 09:16:10 +01:00
committed by GitHub
parent a2e30824e6
commit dc4441322f
3 changed files with 16 additions and 1 deletions

View File

@@ -92,6 +92,7 @@ Docs: https://docs.openclaw.ai
- Security/system.run: fail closed for approval-backed interpreter/runtime commands when OpenClaw cannot bind exactly one concrete local file operand, while extending best-effort direct-file binding to additional runtime forms. Thanks @tdjackey for reporting.
- Gateway/session reset auth: split conversation `/new` and `/reset` handling away from the admin-only `sessions.reset` control-plane RPC so write-scoped gateway callers can no longer reach the privileged reset path through `agent`. Thanks @tdjackey for reporting.
- Telegram/final preview delivery followup: keep ambiguous missing-`message_id` finals only when a preview was already visible, while first-preview/no-id cases still fall back so Telegram users do not lose the final reply. (#41932) thanks @hougangdev.
- Agents/Azure OpenAI Responses: include the `azure-openai` provider in the Responses API store override so Azure OpenAI multi-turn cron jobs and embedded agent runs no longer fail with HTTP 400 "store is set to false". (#42934, fixes #42800) Thanks @ademczuk.
## 2026.3.8

View File

@@ -1449,6 +1449,20 @@ describe("applyExtraParamsToAgent", () => {
expect(payload.store).toBe(true);
});
it("forces store=true for azure-openai provider with openai-responses API (#42800)", () => {
const payload = runResponsesPayloadMutationCase({
applyProvider: "azure-openai",
applyModelId: "gpt-5-mini",
model: {
api: "openai-responses",
provider: "azure-openai",
id: "gpt-5-mini",
baseUrl: "https://myresource.openai.azure.com/openai/v1",
} as unknown as Model<"openai-responses">,
});
expect(payload.store).toBe(true);
});
it("injects configured OpenAI service_tier into Responses payloads", () => {
const payload = runResponsesPayloadMutationCase({
applyProvider: "openai",

View File

@@ -6,7 +6,7 @@ import { log } from "./logger.js";
type OpenAIServiceTier = "auto" | "default" | "flex" | "priority";
const OPENAI_RESPONSES_APIS = new Set(["openai-responses"]);
const OPENAI_RESPONSES_PROVIDERS = new Set(["openai", "azure-openai-responses"]);
const OPENAI_RESPONSES_PROVIDERS = new Set(["openai", "azure-openai", "azure-openai-responses"]);
function isDirectOpenAIBaseUrl(baseUrl: unknown): boolean {
if (typeof baseUrl !== "string" || !baseUrl.trim()) {