Files
openclaw/extensions/github-copilot/stream.test.ts
InvalidPanda ツ b64bfc5d9a fix(github-copilot): preserve reasoning IDs for Copilot Codex models (#71684)
* fix(github-copilot): preserve all reasoning IDs and add gpt-5.3-codex support

The existing guard (8fd15ed0e5) only skipped rewriting reasoning item IDs
when encrypted_content was a non-null string. When gpt-5.3-codex is used
via GitHub Copilot, the model falls through to the forward-compat catch-all
with reasoning:false, so encrypted_content is never requested and arrives
as null — bypassing the guard and causing a rewrite. Copilot validates
reasoning item IDs server-side regardless of whether the client includes
encrypted_content, so the rewritten id triggers the 400 error.

Two changes:

1. connection-bound-ids.ts: skip ALL reasoning items unconditionally.
   Reasoning items always reference server-side state bound to their
   original ID; rewriting any of them breaks Copilot's lookup.

2. models.ts + index.ts: extend the forward-compat cloning logic to
   cover gpt-5.3-codex (adds it to the template-target set and to
   CODEX_TEMPLATE_MODEL_IDS so it can also serve as a template source
   for gpt-5.4). Adds gpt-5.3-codex to COPILOT_XHIGH_MODEL_IDS for
   the thinking profile.

Thanks @InvalidPandaa.

* docs(github-copilot): clarify gpt-5.3-codex is a no-op template for itself

https://claude.ai/code/session_01EAFmq4WyKkiUkVAqRXp4Bm

* fix(github-copilot): remove dead reasoning prefix branch in deriveReplacementId

https://claude.ai/code/session_01EAFmq4WyKkiUkVAqRXp4Bm

* fix(github-copilot): align reasoning id replay tests

* test(plugin-sdk): use cjs sidecar for require fast path

---------

Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: Peter Steinberger <steipete@gmail.com>
2026-04-25 20:52:07 +01:00

216 lines
6.4 KiB
TypeScript

import { buildCopilotDynamicHeaders } from "openclaw/plugin-sdk/provider-stream-shared";
import { describe, expect, it, vi } from "vitest";
import {
wrapCopilotAnthropicStream,
wrapCopilotOpenAIResponsesStream,
wrapCopilotProviderStream,
} from "./stream.js";
function requireStreamFn(streamFn: ReturnType<typeof wrapCopilotProviderStream>) {
expect(streamFn).toBeTypeOf("function");
if (!streamFn) {
throw new Error("expected stream fn");
}
return streamFn;
}
describe("wrapCopilotAnthropicStream", () => {
it("adds Copilot headers and Anthropic cache markers for Claude payloads", async () => {
const payloads: Array<{
messages: Array<Record<string, unknown>>;
}> = [];
const baseStreamFn = vi.fn((model, _context, options) => {
const payload = {
messages: [
{ role: "system", content: "system prompt" },
{
role: "assistant",
content: [{ type: "thinking", text: "draft", cache_control: { type: "ephemeral" } }],
},
],
};
options?.onPayload?.(payload, model);
payloads.push(payload);
return {
async *[Symbol.asyncIterator]() {},
} as never;
});
const wrapped = requireStreamFn(wrapCopilotAnthropicStream(baseStreamFn));
const messages = [
{
role: "user",
content: [
{ type: "text", text: "look" },
{ type: "image", image: "data:image/png;base64,abc" },
],
},
] as Parameters<typeof buildCopilotDynamicHeaders>[0]["messages"];
const context = { messages };
const expectedCopilotHeaders = buildCopilotDynamicHeaders({
messages,
hasImages: true,
});
void wrapped(
{
provider: "github-copilot",
api: "anthropic-messages",
id: "claude-sonnet-4.6",
} as never,
context as never,
{
headers: { "X-Test": "1" },
},
);
expect(baseStreamFn).toHaveBeenCalledOnce();
expect(baseStreamFn.mock.calls[0]?.[2]).toMatchObject({
headers: {
...expectedCopilotHeaders,
"X-Test": "1",
},
});
expect(payloads[0]?.messages).toEqual([
{
role: "system",
content: [{ type: "text", text: "system prompt", cache_control: { type: "ephemeral" } }],
},
{
role: "assistant",
content: [{ type: "thinking", text: "draft" }],
},
]);
});
it("leaves non-Anthropic Copilot models untouched", () => {
const baseStreamFn = vi.fn(() => ({ async *[Symbol.asyncIterator]() {} }) as never);
const wrapped = requireStreamFn(wrapCopilotAnthropicStream(baseStreamFn));
const options = { headers: { Existing: "1" } };
void wrapped(
{
provider: "github-copilot",
api: "openai-responses",
id: "gpt-4.1",
} as never,
{ messages: [{ role: "user", content: "hi" }] } as never,
options as never,
);
expect(baseStreamFn).toHaveBeenCalledWith(expect.anything(), expect.anything(), options);
});
it("adds Copilot headers, preserves reasoning IDs, and rewrites message IDs before payload send", () => {
const reasoningId = Buffer.from(`reasoning-${"x".repeat(24)}`).toString("base64");
const messageId = Buffer.from(`message-${"y".repeat(24)}`).toString("base64");
const payloads: Array<{ input: Array<Record<string, unknown>> }> = [];
const baseStreamFn = vi.fn((_model, _context, options) => {
const payload = {
input: [
{ id: reasoningId, type: "reasoning" },
{ id: messageId, type: "message" },
],
};
options?.onPayload?.(payload, _model);
payloads.push(payload);
return {
async *[Symbol.asyncIterator]() {},
} as never;
});
const wrapped = requireStreamFn(wrapCopilotOpenAIResponsesStream(baseStreamFn));
const messages = [
{
role: "toolResult",
content: [
{ type: "text", text: "look" },
{ type: "image", image: "data:image/png;base64,abc" },
],
},
] as Parameters<typeof buildCopilotDynamicHeaders>[0]["messages"];
const expectedCopilotHeaders = buildCopilotDynamicHeaders({
messages,
hasImages: true,
});
void wrapped(
{
provider: "github-copilot",
api: "openai-responses",
id: "gpt-5.4",
} as never,
{ messages } as never,
{ headers: { "X-Test": "1" } },
);
expect(baseStreamFn.mock.calls[0]?.[2]).toMatchObject({
headers: {
...expectedCopilotHeaders,
"X-Test": "1",
},
});
expect(payloads[0]?.input[0]?.id).toBe(reasoningId);
expect(payloads[0]?.input[1]?.id).toMatch(/^msg_[a-f0-9]{16}$/);
});
it("rewrites Copilot Responses IDs returned by an existing payload hook", async () => {
const connectionBoundId = Buffer.from(`message-${"y".repeat(24)}`).toString("base64");
let returnedPayload: unknown;
const baseStreamFn = vi.fn(async (_model, _context, options) => {
returnedPayload = await options?.onPayload?.({ input: [] }, _model);
return {
async *[Symbol.asyncIterator]() {},
} as never;
});
const wrapped = requireStreamFn(wrapCopilotOpenAIResponsesStream(baseStreamFn));
await wrapped(
{
provider: "github-copilot",
api: "openai-responses",
id: "gpt-5.4",
} as never,
{ messages: [{ role: "user", content: "hi" }] } as never,
{
onPayload: () => ({ input: [{ id: connectionBoundId, type: "message" }] }),
} as never,
);
expect((returnedPayload as { input: Array<Record<string, unknown>> }).input[0]?.id).toMatch(
/^msg_[a-f0-9]{16}$/,
);
});
it("adapts provider stream context without changing wrapper behavior", () => {
const baseStreamFn = vi.fn(() => ({ async *[Symbol.asyncIterator]() {} }) as never);
const wrapped = requireStreamFn(
wrapCopilotProviderStream({
streamFn: baseStreamFn,
} as never),
);
void wrapped(
{
provider: "github-copilot",
api: "openai-responses",
id: "gpt-4.1",
} as never,
{ messages: [{ role: "user", content: "hi" }] } as never,
{},
);
expect(baseStreamFn).toHaveBeenCalledOnce();
});
it("does not claim provider transport before OpenClaw chooses one", () => {
expect(
wrapCopilotProviderStream({
streamFn: undefined,
} as never),
).toBeUndefined();
});
});