fix(github-copilot): preserve reasoning IDs for Copilot Codex models (#71684)

* fix(github-copilot): preserve all reasoning IDs and add gpt-5.3-codex support

The existing guard (8fd15ed0e5) only skipped rewriting reasoning item IDs
when encrypted_content was a non-null string. When gpt-5.3-codex is used
via GitHub Copilot, the model falls through to the forward-compat catch-all
with reasoning:false, so encrypted_content is never requested and arrives
as null — bypassing the guard and causing a rewrite. Copilot validates
reasoning item IDs server-side regardless of whether the client includes
encrypted_content, so the rewritten id triggers the 400 error.

Two changes:

1. connection-bound-ids.ts: skip ALL reasoning items unconditionally.
   Reasoning items always reference server-side state bound to their
   original ID; rewriting any of them breaks Copilot's lookup.

2. models.ts + index.ts: extend the forward-compat cloning logic to
   cover gpt-5.3-codex (adds it to the template-target set and to
   CODEX_TEMPLATE_MODEL_IDS so it can also serve as a template source
   for gpt-5.4). Adds gpt-5.3-codex to COPILOT_XHIGH_MODEL_IDS for
   the thinking profile.

Thanks @InvalidPandaa.

* docs(github-copilot): clarify gpt-5.3-codex is a no-op template for itself

https://claude.ai/code/session_01EAFmq4WyKkiUkVAqRXp4Bm

* fix(github-copilot): remove dead reasoning prefix branch in deriveReplacementId

https://claude.ai/code/session_01EAFmq4WyKkiUkVAqRXp4Bm

* fix(github-copilot): align reasoning id replay tests

* test(plugin-sdk): use cjs sidecar for require fast path

---------

Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: Peter Steinberger <steipete@gmail.com>
This commit is contained in:
InvalidPanda ツ
2026-04-25 21:52:07 +02:00
committed by GitHub
parent 1d49b8cdaa
commit b64bfc5d9a
8 changed files with 101 additions and 36 deletions

View File

@@ -135,6 +135,50 @@ describe("resolveCopilotForwardCompatModel", () => {
expect((result as unknown as Record<string, unknown>).reasoning).toBe(true);
});
it("clones gpt-5.3-codex template for gpt-5.3-codex when not in registry", () => {
const template = {
id: "gpt-5.2-codex",
name: "gpt-5.2-codex",
provider: "github-copilot",
api: "openai-responses",
reasoning: true,
contextWindow: 200_000,
};
const ctx = createMockCtx("gpt-5.3-codex", {
"github-copilot/gpt-5.2-codex": template,
});
const result = requireResolvedModel(ctx);
expect(result.id).toBe("gpt-5.3-codex");
expect(result.name).toBe("gpt-5.3-codex");
expect((result as unknown as Record<string, unknown>).reasoning).toBe(true);
});
it("prefers gpt-5.3-codex as template source over gpt-5.2-codex for gpt-5.4", () => {
const template53 = {
id: "gpt-5.3-codex",
name: "gpt-5.3-codex",
provider: "github-copilot",
api: "openai-responses",
reasoning: true,
contextWindow: 300_000,
};
const template52 = {
id: "gpt-5.2-codex",
name: "gpt-5.2-codex",
provider: "github-copilot",
api: "openai-responses",
reasoning: true,
contextWindow: 200_000,
};
const ctx = createMockCtx("gpt-5.4", {
"github-copilot/gpt-5.3-codex": template53,
"github-copilot/gpt-5.2-codex": template52,
});
const result = requireResolvedModel(ctx);
expect(result.id).toBe("gpt-5.4");
expect((result as unknown as Record<string, unknown>).contextWindow).toBe(300_000);
});
it("falls through to synthetic catch-all when codex template is missing", () => {
const ctx = createMockCtx("gpt-5.4");
const result = requireResolvedModel(ctx);
@@ -158,11 +202,20 @@ describe("resolveCopilotForwardCompatModel", () => {
}
});
it("infers reasoning=true for Codex model IDs", () => {
for (const id of ["gpt-5.4-codex", "gpt-5.5-codex", "gpt-5.4-codex-mini", "gpt-5.3-codex"]) {
const ctx = createMockCtx(id);
const result = requireResolvedModel(ctx);
expect((result as unknown as Record<string, unknown>).reasoning).toBe(true);
}
});
it("sets reasoning=false for non-reasoning model IDs including mid-string o1/o3", () => {
for (const id of [
"gpt-5.4-mini",
"claude-sonnet-4.6",
"gpt-4o",
"mycodexmodel",
"audio-o1-hd",
"turbo-o3-voice",
]) {