fix(github-copilot): preserve encrypted reasoning ids with encrypted_content (#71448)

Preserve encrypted Copilot Responses reasoning item IDs during replay and harden the live Copilot replay probe.

Thanks @a410979729-sys.
This commit is contained in:
a410979729-sys
2026-04-25 15:57:47 +08:00
committed by GitHub
parent 10ed007fb4
commit 8fd15ed0e5
4 changed files with 89 additions and 17 deletions

View File

@@ -15,6 +15,9 @@ Docs: https://docs.openclaw.ai
- OpenAI/Codex image generation: canonicalize legacy `openai-codex.baseUrl` values such as `https://chatgpt.com/backend-api` to the Codex Responses backend before calling `gpt-image-2`, matching the chat transport. Fixes #71460.
- Control UI: make `/usage` use the fresh context snapshot for context percentage, and include cache-write tokens in the Usage overview cache-hit denominator. Fixes #47885. Thanks @imwyvern and @Ante042.
- GitHub Copilot: preserve encrypted Responses reasoning item IDs during replay
so Copilot can validate encrypted reasoning payloads across requests. (#71448)
Thanks @a410979729-sys.
- Telegram/webhook: acknowledge validated webhook updates before running bot middleware, keeping slow agent turns from tripping Telegram delivery retries while preserving per-chat processing lanes. Fixes #71392.
- MCP: retire one-shot embedded bundled MCP runtimes at run end, skip bundle-MCP startup when a runtime tool allowlist cannot reach bundle-MCP tools, and add `mcp.sessionIdleTtlMs` idle eviction for leaked session runtimes. Fixes #71106, #71110, #70389, and #70808.
- MCP/config reload: hot-apply `mcp.*` changes by disposing cached session MCP runtimes, and dispose bundled MCP runtimes during gateway shutdown so removed `mcp.servers` entries reap child processes promptly. Fixes #60656.

View File

@@ -1,6 +1,7 @@
import { streamOpenAIResponses, type AssistantMessage, type Model } from "@mariozechner/pi-ai";
import { buildCopilotDynamicHeaders } from "openclaw/plugin-sdk/provider-stream-shared";
import { describe, expect, it } from "vitest";
import { resolveFirstGithubToken } from "./auth.js";
import { wrapCopilotOpenAIResponsesStream } from "./stream.js";
import { resolveCopilotApiToken } from "./token.js";
@@ -8,14 +9,21 @@ const LIVE =
process.env.OPENCLAW_LIVE_TEST === "1" ||
process.env.LIVE === "1" ||
process.env.GITHUB_COPILOT_LIVE_TEST === "1";
const GITHUB_TOKEN =
const ENV_GITHUB_TOKEN =
process.env.OPENCLAW_LIVE_GITHUB_COPILOT_TOKEN ??
process.env.COPILOT_GITHUB_TOKEN ??
process.env.GH_TOKEN ??
process.env.GITHUB_TOKEN ??
"";
const LIVE_MODEL_ID = process.env.OPENCLAW_LIVE_GITHUB_COPILOT_MODEL?.trim() || "gpt-5.4";
const describeLive = LIVE && GITHUB_TOKEN.trim().length > 0 ? describe : describe.skip;
const describeLive = LIVE ? describe : describe.skip;
type CopilotApiToken = {
token: string;
expiresAt: number;
source: string;
baseUrl: string;
};
const ZERO_USAGE = {
input: 0,
@@ -99,6 +107,27 @@ function buildReplayAssistantMessage(connectionBoundId: string): AssistantMessag
};
}
async function resolveGithubTokenCandidates(): Promise<Array<{ source: string; token: string }>> {
const candidates: Array<{ source: string; token: string }> = [];
const envToken = ENV_GITHUB_TOKEN.trim();
if (envToken) {
candidates.push({ source: "env", token: envToken });
}
const profileEnv = {
...process.env,
COPILOT_GITHUB_TOKEN: "",
GH_TOKEN: "",
GITHUB_TOKEN: "",
};
const profile = await resolveFirstGithubToken({ env: profileEnv });
const profileToken = profile.githubToken.trim();
if (profileToken && !candidates.some((candidate) => candidate.token === profileToken)) {
candidates.push({ source: "auth-profile", token: profileToken });
}
return candidates;
}
function extractText(response: unknown): string {
const content = (response as { content?: Array<{ type?: string; text?: string }> }).content;
if (!Array.isArray(content)) {
@@ -114,22 +143,37 @@ function extractText(response: unknown): string {
describeLive("github-copilot connection-bound Responses IDs live", () => {
it("rewrites replayed connection-bound item IDs before sending to Copilot", async () => {
logProgress("start");
let token;
try {
logProgress("exchanging GitHub token for Copilot token");
token = await withTimeout(
"Copilot token exchange",
resolveCopilotApiToken({
githubToken: GITHUB_TOKEN,
fetchImpl: fetchWithTimeout,
}),
15_000,
);
} catch (error) {
logProgress(`skip (${error instanceof Error ? error.message : String(error)})`);
return;
const candidates = await resolveGithubTokenCandidates();
if (candidates.length === 0) {
throw new Error("No GitHub Copilot token found in env or auth profile");
}
let token: CopilotApiToken | undefined;
const failures: string[] = [];
for (const candidate of candidates) {
try {
logProgress(`exchanging ${candidate.source} GitHub token for Copilot token`);
token = await withTimeout(
"Copilot token exchange",
resolveCopilotApiToken({
githubToken: candidate.token,
fetchImpl: fetchWithTimeout,
}),
15_000,
);
logProgress(
`token ok via ${candidate.source} (${token.source.startsWith("cache:") ? "cache" : "fetched"})`,
);
break;
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
failures.push(`${candidate.source}: ${message}`);
logProgress(`token exchange failed via ${candidate.source} (${message})`);
}
}
if (!token) {
throw new Error(`Copilot token exchange failed for all candidates: ${failures.join("; ")}`);
}
logProgress(`token ok (${token.source.startsWith("cache:") ? "cache" : "fetched"})`);
const model = buildModel(token.baseUrl);
const staleId = Buffer.from(`copilot-${"x".repeat(24)}`).toString("base64");

View File

@@ -35,6 +35,28 @@ describe("github-copilot connection-bound response IDs", () => {
expect(input[4]?.id).toMatch(/^msg_[a-f0-9]{16}$/);
});
it("preserves reasoning IDs when encrypted_content is present", () => {
const originalId = Buffer.from(`reasoning-${"e".repeat(24)}`).toString("base64");
const input = [
{
id: originalId,
type: "reasoning",
encrypted_content: "opaque-encrypted-payload",
},
];
expect(rewriteCopilotConnectionBoundResponseIds(input)).toBe(false);
expect(input[0]?.id).toBe(originalId);
});
it("still rewrites reasoning IDs when encrypted_content is absent", () => {
const originalId = Buffer.from(`reasoning-${"n".repeat(24)}`).toString("base64");
const input = [{ id: originalId, type: "reasoning" }];
expect(rewriteCopilotConnectionBoundResponseIds(input)).toBe(true);
expect(input[0]?.id).toMatch(/^rs_[a-f0-9]{16}$/);
});
it("patches response payload input arrays only", () => {
const messageId = Buffer.from(`message-${"m".repeat(24)}`).toString("base64");
const payload = { input: [{ id: messageId, type: "message" }] };

View File

@@ -35,6 +35,9 @@ export function rewriteCopilotConnectionBoundResponseIds(input: unknown): boolea
if (typeof id !== "string" || id.length === 0) {
continue;
}
if (item.type === "reasoning" && typeof item.encrypted_content === "string") {
continue;
}
if (looksLikeConnectionBoundId(id)) {
item.id = deriveReplacementId(typeof item.type === "string" ? item.type : undefined, id);
rewrote = true;