@playwo/opencode-cursor-oauth 0.0.0-dev.c80ebcb27754 → 0.0.0-dev.e3644b4a140d

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/proxy.js CHANGED
@@ -16,8 +16,11 @@ import { create, fromBinary, fromJson, toBinary, toJson } from "@bufbuild/protob
16
16
  import { ValueSchema } from "@bufbuild/protobuf/wkt";
17
17
  import { AgentClientMessageSchema, AgentRunRequestSchema, AgentServerMessageSchema, BidiRequestIdSchema, ClientHeartbeatSchema, ConversationActionSchema, ConversationStateStructureSchema, ConversationStepSchema, AgentConversationTurnStructureSchema, ConversationTurnStructureSchema, AssistantMessageSchema, BackgroundShellSpawnResultSchema, DeleteResultSchema, DeleteRejectedSchema, DiagnosticsResultSchema, ExecClientMessageSchema, FetchErrorSchema, FetchResultSchema, GetBlobResultSchema, GrepErrorSchema, GrepResultSchema, KvClientMessageSchema, LsRejectedSchema, LsResultSchema, McpErrorSchema, McpResultSchema, McpSuccessSchema, McpTextContentSchema, McpToolDefinitionSchema, McpToolResultContentItemSchema, ModelDetailsSchema, ReadRejectedSchema, ReadResultSchema, RequestContextResultSchema, RequestContextSchema, RequestContextSuccessSchema, SetBlobResultSchema, ShellRejectedSchema, ShellResultSchema, UserMessageActionSchema, UserMessageSchema, WriteRejectedSchema, WriteResultSchema, WriteShellStdinErrorSchema, WriteShellStdinResultSchema, } from "./proto/agent_pb";
18
18
  import { createHash } from "node:crypto";
19
+ import { connect as connectHttp2 } from "node:http2";
20
+ import { errorDetails, logPluginError, logPluginWarn } from "./logger";
19
21
  const CURSOR_API_URL = process.env.CURSOR_API_URL ?? "https://api2.cursor.sh";
20
22
  const CURSOR_CLIENT_VERSION = "cli-2026.01.09-231024f";
23
+ const CURSOR_CONNECT_PROTOCOL_VERSION = "1";
21
24
  const CONNECT_END_STREAM_FLAG = 0b00000010;
22
25
  const SSE_HEADERS = {
23
26
  "Content-Type": "text/event-stream",
@@ -38,6 +41,31 @@ function evictStaleConversations() {
38
41
  }
39
42
  }
40
43
  }
44
+ function normalizeAgentKey(agentKey) {
45
+ const trimmed = agentKey?.trim();
46
+ return trimmed ? trimmed : "default";
47
+ }
48
+ function hashString(value) {
49
+ return createHash("sha256").update(value).digest("hex");
50
+ }
51
+ function createStoredConversation() {
52
+ return {
53
+ conversationId: crypto.randomUUID(),
54
+ checkpoint: null,
55
+ blobStore: new Map(),
56
+ lastAccessMs: Date.now(),
57
+ systemPromptHash: "",
58
+ completedTurnsFingerprint: "",
59
+ };
60
+ }
61
+ function resetStoredConversation(stored) {
62
+ stored.conversationId = crypto.randomUUID();
63
+ stored.checkpoint = null;
64
+ stored.blobStore = new Map();
65
+ stored.lastAccessMs = Date.now();
66
+ stored.systemPromptHash = "";
67
+ stored.completedTurnsFingerprint = "";
68
+ }
41
69
  /** Connect protocol frame: [1-byte flags][4-byte BE length][payload] */
42
70
  function frameConnectMessage(data, flags = 0) {
43
71
  const frame = Buffer.alloc(5 + data.length);
@@ -47,18 +75,19 @@ function frameConnectMessage(data, flags = 0) {
47
75
  return frame;
48
76
  }
49
77
  function buildCursorHeaders(options, contentType, extra = {}) {
50
- const headers = new Headers({
78
+ const headers = new Headers(buildCursorHeaderValues(options, contentType, extra));
79
+ return headers;
80
+ }
81
+ function buildCursorHeaderValues(options, contentType, extra = {}) {
82
+ return {
51
83
  authorization: `Bearer ${options.accessToken}`,
52
84
  "content-type": contentType,
53
85
  "x-ghost-mode": "true",
54
86
  "x-cursor-client-version": CURSOR_CLIENT_VERSION,
55
87
  "x-cursor-client-type": "cli",
56
88
  "x-request-id": crypto.randomUUID(),
57
- });
58
- for (const [key, value] of Object.entries(extra)) {
59
- headers.set(key, value);
60
- }
61
- return headers;
89
+ ...extra,
90
+ };
62
91
  }
63
92
  function encodeVarint(value) {
64
93
  if (!Number.isSafeInteger(value) || value < 0) {
@@ -130,6 +159,11 @@ async function createCursorSession(options) {
130
159
  });
131
160
  if (!response.ok || !response.body) {
132
161
  const errorBody = await response.text().catch(() => "");
162
+ logPluginError("Cursor RunSSE request failed", {
163
+ requestId: options.requestId,
164
+ status: response.status,
165
+ responseBody: errorBody,
166
+ });
133
167
  throw new Error(`RunSSE failed: ${response.status}${errorBody ? ` ${errorBody}` : ""}`);
134
168
  }
135
169
  const cbs = {
@@ -160,6 +194,12 @@ async function createCursorSession(options) {
160
194
  });
161
195
  if (!appendResponse.ok) {
162
196
  const errorBody = await appendResponse.text().catch(() => "");
197
+ logPluginError("Cursor BidiAppend request failed", {
198
+ requestId: options.requestId,
199
+ appendSeqno: appendSeqno - 1,
200
+ status: appendResponse.status,
201
+ responseBody: errorBody,
202
+ });
163
203
  throw new Error(`BidiAppend failed: ${appendResponse.status}${errorBody ? ` ${errorBody}` : ""}`);
164
204
  }
165
205
  await appendResponse.arrayBuffer().catch(() => undefined);
@@ -183,7 +223,11 @@ async function createCursorSession(options) {
183
223
  }
184
224
  }
185
225
  }
186
- catch {
226
+ catch (error) {
227
+ logPluginWarn("Cursor stream reader closed with error", {
228
+ requestId: options.requestId,
229
+ ...errorDetails(error),
230
+ });
187
231
  finish(alive ? 1 : closeCode);
188
232
  }
189
233
  })();
@@ -196,7 +240,11 @@ async function createCursorSession(options) {
196
240
  return;
197
241
  writeChain = writeChain
198
242
  .then(() => append(data))
199
- .catch(() => {
243
+ .catch((error) => {
244
+ logPluginError("Cursor stream append failed", {
245
+ requestId: options.requestId,
246
+ ...errorDetails(error),
247
+ });
200
248
  try {
201
249
  abortController.abort();
202
250
  }
@@ -236,6 +284,17 @@ async function createCursorSession(options) {
236
284
  };
237
285
  }
238
286
  export async function callCursorUnaryRpc(options) {
287
+ const target = new URL(options.rpcPath, options.url ?? CURSOR_API_URL);
288
+ const transport = options.transport ?? "auto";
289
+ if (transport === "http2" || (transport === "auto" && target.protocol === "https:")) {
290
+ const http2Result = await callCursorUnaryRpcOverHttp2(options, target);
291
+ if (transport === "http2" || http2Result.timedOut || http2Result.exitCode !== 1) {
292
+ return http2Result;
293
+ }
294
+ }
295
+ return callCursorUnaryRpcOverFetch(options, target);
296
+ }
297
+ async function callCursorUnaryRpcOverFetch(options, target) {
239
298
  let timedOut = false;
240
299
  const timeoutMs = options.timeoutMs ?? 5_000;
241
300
  const controller = new AbortController();
@@ -246,9 +305,13 @@ export async function callCursorUnaryRpc(options) {
246
305
  }, timeoutMs)
247
306
  : undefined;
248
307
  try {
249
- const response = await fetch(new URL(options.rpcPath, options.url ?? CURSOR_API_URL), {
308
+ const response = await fetch(target, {
250
309
  method: "POST",
251
- headers: buildCursorHeaders(options, "application/proto"),
310
+ headers: buildCursorHeaders(options, "application/proto", {
311
+ accept: "application/proto, application/json",
312
+ "connect-protocol-version": CURSOR_CONNECT_PROTOCOL_VERSION,
313
+ "connect-timeout-ms": String(timeoutMs),
314
+ }),
252
315
  body: toFetchBody(options.requestBody),
253
316
  signal: controller.signal,
254
317
  });
@@ -260,6 +323,12 @@ export async function callCursorUnaryRpc(options) {
260
323
  };
261
324
  }
262
325
  catch {
326
+ logPluginError("Cursor unary fetch transport failed", {
327
+ rpcPath: options.rpcPath,
328
+ url: target.toString(),
329
+ timeoutMs,
330
+ timedOut,
331
+ });
263
332
  return {
264
333
  body: new Uint8Array(),
265
334
  exitCode: timedOut ? 124 : 1,
@@ -271,6 +340,121 @@ export async function callCursorUnaryRpc(options) {
271
340
  clearTimeout(timeout);
272
341
  }
273
342
  }
343
+ async function callCursorUnaryRpcOverHttp2(options, target) {
344
+ const timeoutMs = options.timeoutMs ?? 5_000;
345
+ const authority = `${target.protocol}//${target.host}`;
346
+ return new Promise((resolve) => {
347
+ let settled = false;
348
+ let timedOut = false;
349
+ let session;
350
+ let stream;
351
+ const finish = (result) => {
352
+ if (settled)
353
+ return;
354
+ settled = true;
355
+ if (timeout)
356
+ clearTimeout(timeout);
357
+ try {
358
+ stream?.close();
359
+ }
360
+ catch { }
361
+ try {
362
+ session?.close();
363
+ }
364
+ catch { }
365
+ resolve(result);
366
+ };
367
+ const timeout = timeoutMs > 0
368
+ ? setTimeout(() => {
369
+ timedOut = true;
370
+ finish({
371
+ body: new Uint8Array(),
372
+ exitCode: 124,
373
+ timedOut: true,
374
+ });
375
+ }, timeoutMs)
376
+ : undefined;
377
+ try {
378
+ session = connectHttp2(authority);
379
+ session.once("error", (error) => {
380
+ logPluginError("Cursor unary HTTP/2 session failed", {
381
+ rpcPath: options.rpcPath,
382
+ url: target.toString(),
383
+ timedOut,
384
+ ...errorDetails(error),
385
+ });
386
+ finish({
387
+ body: new Uint8Array(),
388
+ exitCode: timedOut ? 124 : 1,
389
+ timedOut,
390
+ });
391
+ });
392
+ const headers = {
393
+ ":method": "POST",
394
+ ":path": `${target.pathname}${target.search}`,
395
+ ...buildCursorHeaderValues(options, "application/proto", {
396
+ accept: "application/proto, application/json",
397
+ "connect-protocol-version": CURSOR_CONNECT_PROTOCOL_VERSION,
398
+ "connect-timeout-ms": String(timeoutMs),
399
+ }),
400
+ };
401
+ stream = session.request(headers);
402
+ let statusCode = 0;
403
+ const chunks = [];
404
+ stream.once("response", (responseHeaders) => {
405
+ const statusHeader = responseHeaders[":status"];
406
+ statusCode = typeof statusHeader === "number"
407
+ ? statusHeader
408
+ : Number(statusHeader ?? 0);
409
+ });
410
+ stream.on("data", (chunk) => {
411
+ chunks.push(Buffer.from(chunk));
412
+ });
413
+ stream.once("end", () => {
414
+ const body = new Uint8Array(Buffer.concat(chunks));
415
+ finish({
416
+ body,
417
+ exitCode: statusCode >= 200 && statusCode < 300 ? 0 : (statusCode || 1),
418
+ timedOut,
419
+ });
420
+ });
421
+ stream.once("error", (error) => {
422
+ logPluginError("Cursor unary HTTP/2 stream failed", {
423
+ rpcPath: options.rpcPath,
424
+ url: target.toString(),
425
+ timedOut,
426
+ ...errorDetails(error),
427
+ });
428
+ finish({
429
+ body: new Uint8Array(),
430
+ exitCode: timedOut ? 124 : 1,
431
+ timedOut,
432
+ });
433
+ });
434
+ // Bun's node:http2 client currently breaks on end(Buffer.alloc(0)) against
435
+ // Cursor's HTTPS endpoint, but a header-only end() succeeds for empty unary bodies.
436
+ if (options.requestBody.length > 0) {
437
+ stream.end(Buffer.from(options.requestBody));
438
+ }
439
+ else {
440
+ stream.end();
441
+ }
442
+ }
443
+ catch (error) {
444
+ logPluginError("Cursor unary HTTP/2 setup failed", {
445
+ rpcPath: options.rpcPath,
446
+ url: target.toString(),
447
+ timedOut,
448
+ ...errorDetails(error),
449
+ });
450
+ finish({
451
+ body: new Uint8Array(),
452
+ exitCode: timedOut ? 124 : 1,
453
+ timedOut,
454
+ });
455
+ }
456
+ });
457
+ }
274
458
  let proxyServer;
275
459
  let proxyPort;
276
460
  let proxyAccessTokenProvider;
@@ -312,10 +496,19 @@ export async function startProxy(getAccessToken, models = []) {
312
496
  throw new Error("Cursor proxy access token provider not configured");
313
497
  }
314
498
  const accessToken = await proxyAccessTokenProvider();
315
- return handleChatCompletion(body, accessToken);
499
+ const sessionId = req.headers.get("x-opencode-session-id")
500
+ ?? req.headers.get("x-session-id")
501
+ ?? undefined;
502
+ const agentKey = req.headers.get("x-opencode-agent") ?? undefined;
503
+ return handleChatCompletion(body, accessToken, { sessionId, agentKey });
316
504
  }
317
505
  catch (err) {
318
506
  const message = err instanceof Error ? err.message : String(err);
507
+ logPluginError("Cursor proxy request failed", {
508
+ path: url.pathname,
509
+ method: req.method,
510
+ ...errorDetails(err),
511
+ });
319
512
  return new Response(JSON.stringify({
320
513
  error: { message, type: "server_error", code: "internal_error" },
321
514
  }), { status: 500, headers: { "Content-Type": "application/json" } });
@@ -345,10 +538,11 @@ export function stopProxy() {
345
538
  activeBridges.clear();
346
539
  conversationStates.clear();
347
540
  }
348
- function handleChatCompletion(body, accessToken) {
349
- const { systemPrompt, userText, turns, toolResults } = parseMessages(body.messages);
541
+ function handleChatCompletion(body, accessToken, context = {}) {
542
+ const parsed = parseMessages(body.messages);
543
+ const { systemPrompt, userText, turns, toolResults, pendingAssistantSummary, completedTurnsFingerprint, } = parsed;
350
544
  const modelId = body.model;
351
- const tools = body.tools ?? [];
545
+ const tools = selectToolsForChoice(body.tools ?? [], body.tool_choice);
352
546
  if (!userText && toolResults.length === 0) {
353
547
  return new Response(JSON.stringify({
354
548
  error: {
@@ -357,16 +551,24 @@ function handleChatCompletion(body, accessToken) {
357
551
  },
358
552
  }), { status: 400, headers: { "Content-Type": "application/json" } });
359
553
  }
360
- // bridgeKey: model-specific, for active tool-call bridges
554
+ // bridgeKey: session/agent-scoped, for active tool-call bridges
361
555
  // convKey: model-independent, for conversation state that survives model switches
362
- const bridgeKey = deriveBridgeKey(modelId, body.messages);
363
- const convKey = deriveConversationKey(body.messages);
556
+ const bridgeKey = deriveBridgeKey(modelId, body.messages, context.sessionId, context.agentKey);
557
+ const convKey = deriveConversationKey(body.messages, context.sessionId, context.agentKey);
364
558
  const activeBridge = activeBridges.get(bridgeKey);
365
559
  if (activeBridge && toolResults.length > 0) {
366
560
  activeBridges.delete(bridgeKey);
367
561
  if (activeBridge.bridge.alive) {
562
+ if (activeBridge.modelId !== modelId) {
563
+ logPluginWarn("Resuming pending Cursor tool call on original model after model switch", {
564
+ requestedModelId: modelId,
565
+ resumedModelId: activeBridge.modelId,
566
+ convKey,
567
+ bridgeKey,
568
+ });
569
+ }
368
570
  // Resume the live bridge with tool results
369
- return handleToolResultResume(activeBridge, toolResults, modelId, bridgeKey, convKey);
571
+ return handleToolResultResume(activeBridge, toolResults, bridgeKey, convKey);
370
572
  }
371
573
  // Bridge died (timeout, server disconnect, etc.).
372
574
  // Clean up and fall through to start a fresh bridge.
@@ -381,28 +583,47 @@ function handleChatCompletion(body, accessToken) {
381
583
  }
382
584
  let stored = conversationStates.get(convKey);
383
585
  if (!stored) {
384
- stored = {
385
- conversationId: deterministicConversationId(convKey),
386
- checkpoint: null,
387
- blobStore: new Map(),
388
- lastAccessMs: Date.now(),
389
- };
586
+ stored = createStoredConversation();
390
587
  conversationStates.set(convKey, stored);
391
588
  }
589
+ const systemPromptHash = hashString(systemPrompt);
590
+ if (stored.checkpoint
591
+ && (stored.systemPromptHash !== systemPromptHash
592
+ || (turns.length > 0 && stored.completedTurnsFingerprint !== completedTurnsFingerprint))) {
593
+ resetStoredConversation(stored);
594
+ }
595
+ stored.systemPromptHash = systemPromptHash;
596
+ stored.completedTurnsFingerprint = completedTurnsFingerprint;
392
597
  stored.lastAccessMs = Date.now();
393
598
  evictStaleConversations();
394
599
  // Build the request. When tool results are present but the bridge died,
395
600
  // we must still include the last user text so Cursor has context.
396
601
  const mcpTools = buildMcpToolDefinitions(tools);
397
- const effectiveUserText = userText || (toolResults.length > 0
398
- ? toolResults.map((r) => r.content).join("\n")
399
- : "");
400
- const payload = buildCursorRequest(modelId, systemPrompt, effectiveUserText, turns, stored.conversationId, stored.checkpoint, stored.blobStore);
602
+ const needsInitialHandoff = !stored.checkpoint && (turns.length > 0 || pendingAssistantSummary || toolResults.length > 0);
603
+ const replayTurns = needsInitialHandoff ? [] : turns;
604
+ const effectiveUserText = needsInitialHandoff
605
+ ? buildInitialHandoffPrompt(userText, turns, pendingAssistantSummary, toolResults)
606
+ : toolResults.length > 0
607
+ ? buildToolResumePrompt(userText, pendingAssistantSummary, toolResults)
608
+ : userText;
609
+ const payload = buildCursorRequest(modelId, systemPrompt, effectiveUserText, replayTurns, stored.conversationId, stored.checkpoint, stored.blobStore);
401
610
  payload.mcpTools = mcpTools;
402
611
  if (body.stream === false) {
403
- return handleNonStreamingResponse(payload, accessToken, modelId, convKey);
612
+ return handleNonStreamingResponse(payload, accessToken, modelId, convKey, {
613
+ systemPrompt,
614
+ systemPromptHash,
615
+ completedTurnsFingerprint,
616
+ turns,
617
+ userText,
618
+ });
404
619
  }
405
- return handleStreamingResponse(payload, accessToken, modelId, bridgeKey, convKey);
620
+ return handleStreamingResponse(payload, accessToken, modelId, bridgeKey, convKey, {
621
+ systemPrompt,
622
+ systemPromptHash,
623
+ completedTurnsFingerprint,
624
+ turns,
625
+ userText,
626
+ });
406
627
  }
407
628
  /** Normalize OpenAI message content to a plain string. */
408
629
  function textContent(content) {
@@ -417,8 +638,6 @@ function textContent(content) {
417
638
  }
418
639
  function parseMessages(messages) {
419
640
  let systemPrompt = "You are a helpful assistant.";
420
- const pairs = [];
421
- const toolResults = [];
422
641
  // Collect system messages
423
642
  const systemParts = messages
424
643
  .filter((m) => m.role === "system")
@@ -426,40 +645,181 @@ function parseMessages(messages) {
426
645
  if (systemParts.length > 0) {
427
646
  systemPrompt = systemParts.join("\n");
428
647
  }
429
- // Separate tool results from conversation turns
430
648
  const nonSystem = messages.filter((m) => m.role !== "system");
431
- let pendingUser = "";
649
+ const parsedTurns = [];
650
+ let currentTurn;
432
651
  for (const msg of nonSystem) {
433
- if (msg.role === "tool") {
434
- toolResults.push({
435
- toolCallId: msg.tool_call_id ?? "",
436
- content: textContent(msg.content),
437
- });
652
+ if (msg.role === "user") {
653
+ if (currentTurn)
654
+ parsedTurns.push(currentTurn);
655
+ currentTurn = {
656
+ userText: textContent(msg.content),
657
+ segments: [],
658
+ };
659
+ continue;
438
660
  }
439
- else if (msg.role === "user") {
440
- if (pendingUser) {
441
- pairs.push({ userText: pendingUser, assistantText: "" });
442
- }
443
- pendingUser = textContent(msg.content);
661
+ if (!currentTurn) {
662
+ currentTurn = { userText: "", segments: [] };
444
663
  }
445
- else if (msg.role === "assistant") {
446
- // Skip assistant messages that are just tool_calls with no text
664
+ if (msg.role === "assistant") {
447
665
  const text = textContent(msg.content);
448
- if (pendingUser) {
449
- pairs.push({ userText: pendingUser, assistantText: text });
450
- pendingUser = "";
666
+ if (text) {
667
+ currentTurn.segments.push({ kind: "assistantText", text });
668
+ }
669
+ if (msg.tool_calls?.length) {
670
+ currentTurn.segments.push({
671
+ kind: "assistantToolCalls",
672
+ toolCalls: msg.tool_calls,
673
+ });
451
674
  }
675
+ continue;
676
+ }
677
+ if (msg.role === "tool") {
678
+ currentTurn.segments.push({
679
+ kind: "toolResult",
680
+ result: {
681
+ toolCallId: msg.tool_call_id ?? "",
682
+ content: textContent(msg.content),
683
+ },
684
+ });
685
+ }
686
+ }
687
+ if (currentTurn)
688
+ parsedTurns.push(currentTurn);
689
+ let userText = "";
690
+ let toolResults = [];
691
+ let pendingAssistantSummary = "";
692
+ let completedTurnStates = parsedTurns;
693
+ const lastTurn = parsedTurns.at(-1);
694
+ if (lastTurn) {
695
+ const trailingSegments = splitTrailingToolResults(lastTurn.segments);
696
+ const hasAssistantSummary = trailingSegments.base.length > 0;
697
+ if (trailingSegments.trailing.length > 0 && hasAssistantSummary) {
698
+ completedTurnStates = parsedTurns.slice(0, -1);
699
+ userText = lastTurn.userText;
700
+ toolResults = trailingSegments.trailing.map((segment) => segment.result);
701
+ pendingAssistantSummary = summarizeTurnSegments(trailingSegments.base);
702
+ }
703
+ else if (lastTurn.userText && lastTurn.segments.length === 0) {
704
+ completedTurnStates = parsedTurns.slice(0, -1);
705
+ userText = lastTurn.userText;
452
706
  }
453
707
  }
454
- let lastUserText = "";
455
- if (pendingUser) {
456
- lastUserText = pendingUser;
708
+ const turns = completedTurnStates
709
+ .map((turn) => ({
710
+ userText: turn.userText,
711
+ assistantText: summarizeTurnSegments(turn.segments),
712
+ }))
713
+ .filter((turn) => turn.userText || turn.assistantText);
714
+ return {
715
+ systemPrompt,
716
+ userText,
717
+ turns,
718
+ toolResults,
719
+ pendingAssistantSummary,
720
+ completedTurnsFingerprint: buildCompletedTurnsFingerprint(systemPrompt, turns),
721
+ };
722
+ }
723
+ function splitTrailingToolResults(segments) {
724
+ let index = segments.length;
725
+ while (index > 0 && segments[index - 1]?.kind === "toolResult") {
726
+ index -= 1;
457
727
  }
458
- else if (pairs.length > 0 && toolResults.length === 0) {
459
- const last = pairs.pop();
460
- lastUserText = last.userText;
728
+ return {
729
+ base: segments.slice(0, index),
730
+ trailing: segments.slice(index).filter((segment) => segment.kind === "toolResult"),
731
+ };
732
+ }
733
+ function summarizeTurnSegments(segments) {
734
+ const parts = [];
735
+ for (const segment of segments) {
736
+ if (segment.kind === "assistantText") {
737
+ const trimmed = segment.text.trim();
738
+ if (trimmed)
739
+ parts.push(trimmed);
740
+ continue;
741
+ }
742
+ if (segment.kind === "assistantToolCalls") {
743
+ const summary = segment.toolCalls.map(formatToolCallSummary).join("\n\n");
744
+ if (summary)
745
+ parts.push(summary);
746
+ continue;
747
+ }
748
+ parts.push(formatToolResultSummary(segment.result));
461
749
  }
462
- return { systemPrompt, userText: lastUserText, turns: pairs, toolResults };
750
+ return parts.join("\n\n").trim();
751
+ }
752
+ function formatToolCallSummary(call) {
753
+ const args = call.function.arguments?.trim();
754
+ return args
755
+ ? `[assistant requested tool ${call.function.name} id=${call.id}]\n${args}`
756
+ : `[assistant requested tool ${call.function.name} id=${call.id}]`;
757
+ }
758
+ function formatToolResultSummary(result) {
759
+ const label = result.toolCallId
760
+ ? `[tool result id=${result.toolCallId}]`
761
+ : "[tool result]";
762
+ const content = result.content.trim();
763
+ return content ? `${label}\n${content}` : label;
764
+ }
765
+ function buildCompletedTurnsFingerprint(systemPrompt, turns) {
766
+ return hashString(JSON.stringify({ systemPrompt, turns }));
767
+ }
768
+ function buildToolResumePrompt(userText, pendingAssistantSummary, toolResults) {
769
+ const parts = [userText.trim()];
770
+ if (pendingAssistantSummary.trim()) {
771
+ parts.push(`[previous assistant tool activity]\n${pendingAssistantSummary.trim()}`);
772
+ }
773
+ if (toolResults.length > 0) {
774
+ parts.push(toolResults.map(formatToolResultSummary).join("\n\n"));
775
+ }
776
+ return parts.filter(Boolean).join("\n\n");
777
+ }
778
+ function buildInitialHandoffPrompt(userText, turns, pendingAssistantSummary, toolResults) {
779
+ const transcript = turns.map((turn, index) => {
780
+ const sections = [`Turn ${index + 1}`];
781
+ if (turn.userText.trim())
782
+ sections.push(`User: ${turn.userText.trim()}`);
783
+ if (turn.assistantText.trim())
784
+ sections.push(`Assistant: ${turn.assistantText.trim()}`);
785
+ return sections.join("\n");
786
+ });
787
+ const inProgress = buildToolResumePrompt("", pendingAssistantSummary, toolResults).trim();
788
+ const history = [
789
+ ...transcript,
790
+ ...(inProgress ? [`In-progress turn\n${inProgress}`] : []),
791
+ ].join("\n\n").trim();
792
+ if (!history)
793
+ return userText;
794
+ return [
795
+ "[OpenCode session handoff]",
796
+ "You are continuing an existing session that previously ran on another provider/model.",
797
+ "Treat the transcript below as prior conversation history before answering the latest user message.",
798
+ "",
799
+ "<previous-session-transcript>",
800
+ history,
801
+ "</previous-session-transcript>",
802
+ "",
803
+ "Latest user message:",
804
+ userText.trim(),
805
+ ].filter(Boolean).join("\n");
806
+ }
807
+ function selectToolsForChoice(tools, toolChoice) {
808
+ if (!tools.length)
809
+ return [];
810
+ if (toolChoice === undefined || toolChoice === null || toolChoice === "auto" || toolChoice === "required") {
811
+ return tools;
812
+ }
813
+ if (toolChoice === "none") {
814
+ return [];
815
+ }
816
+ if (typeof toolChoice === "object") {
817
+ const choice = toolChoice;
818
+ if (choice.type === "function" && typeof choice.function?.name === "string") {
819
+ return tools.filter((tool) => tool.function.name === choice.function.name);
820
+ }
821
+ }
822
+ return tools;
463
823
  }
464
824
  /** Convert OpenAI tool definitions to Cursor's MCP tool protobuf format. */
465
825
  function buildMcpToolDefinitions(tools) {
@@ -568,6 +928,7 @@ function buildCursorRequest(modelId, systemPrompt, userText, turns, conversation
568
928
  action,
569
929
  modelDetails,
570
930
  conversationId,
931
+ customSystemPrompt: systemPrompt,
571
932
  });
572
933
  const clientMessage = create(AgentClientMessageSchema, {
573
934
  message: { case: "runRequest", value: runRequest },
@@ -602,6 +963,12 @@ function makeHeartbeatBytes() {
602
963
  });
603
964
  return toBinary(AgentClientMessageSchema, heartbeat);
604
965
  }
966
+ function scheduleBridgeEnd(bridge) {
967
+ queueMicrotask(() => {
968
+ if (bridge.alive)
969
+ bridge.end();
970
+ });
971
+ }
605
972
  /**
606
973
  * Create a stateful parser for Connect protocol frames.
607
974
  * Handles buffering partial data across chunks.
@@ -744,6 +1111,12 @@ function handleKvMessage(kvMsg, blobStore, sendFrame) {
744
1111
  const blobId = kvMsg.message.value.blobId;
745
1112
  const blobIdKey = Buffer.from(blobId).toString("hex");
746
1113
  const blobData = blobStore.get(blobIdKey);
1114
+ if (!blobData) {
1115
+ logPluginWarn("Cursor requested missing blob", {
1116
+ blobId: blobIdKey,
1117
+ knownBlobCount: blobStore.size,
1118
+ });
1119
+ }
747
1120
  sendKvResponse(kvMsg, "getBlobResult", create(GetBlobResultSchema, blobData ? { blobData } : {}), sendFrame);
748
1121
  }
749
1122
  else if (kvCase === "setBlobArgs") {
@@ -908,42 +1281,56 @@ function sendExecResult(execMsg, messageCase, value, sendFrame) {
908
1281
  });
909
1282
  sendFrame(toBinary(AgentClientMessageSchema, clientMessage));
910
1283
  }
911
- /** Derive a key for active bridge lookup (tool-call continuations). Model-specific. */
912
- function deriveBridgeKey(modelId, messages) {
1284
+ /** Derive a key for active bridge lookup (tool-call continuations). */
1285
+ function deriveBridgeKey(modelId, messages, sessionId, agentKey) {
1286
+ if (sessionId) {
1287
+ const normalizedAgent = normalizeAgentKey(agentKey);
1288
+ return createHash("sha256")
1289
+ .update(`bridge:${sessionId}:${normalizedAgent}`)
1290
+ .digest("hex")
1291
+ .slice(0, 16);
1292
+ }
913
1293
  const firstUserMsg = messages.find((m) => m.role === "user");
914
1294
  const firstUserText = firstUserMsg ? textContent(firstUserMsg.content) : "";
1295
+ const normalizedAgent = normalizeAgentKey(agentKey);
915
1296
  return createHash("sha256")
916
- .update(`bridge:${modelId}:${firstUserText.slice(0, 200)}`)
1297
+ .update(`bridge:${normalizedAgent}:${modelId}:${firstUserText.slice(0, 200)}`)
917
1298
  .digest("hex")
918
1299
  .slice(0, 16);
919
1300
  }
920
1301
  /** Derive a key for conversation state. Model-independent so context survives model switches. */
921
- function deriveConversationKey(messages) {
922
- const firstUserMsg = messages.find((m) => m.role === "user");
923
- const firstUserText = firstUserMsg ? textContent(firstUserMsg.content) : "";
1302
+ function deriveConversationKey(messages, sessionId, agentKey) {
1303
+ if (sessionId) {
1304
+ const normalizedAgent = normalizeAgentKey(agentKey);
1305
+ return createHash("sha256")
1306
+ .update(`session:${sessionId}:${normalizedAgent}`)
1307
+ .digest("hex")
1308
+ .slice(0, 16);
1309
+ }
924
1310
  return createHash("sha256")
925
- .update(`conv:${firstUserText.slice(0, 200)}`)
1311
+ .update(`${normalizeAgentKey(agentKey)}:${buildConversationFingerprint(messages)}`)
926
1312
  .digest("hex")
927
1313
  .slice(0, 16);
928
1314
  }
929
- /** Deterministic UUID derived from convKey so Cursor's server-side conversation
930
- * persists across proxy restarts. Formats 16 bytes of SHA-256 as a v4-shaped UUID. */
931
- function deterministicConversationId(convKey) {
932
- const hex = createHash("sha256")
933
- .update(`cursor-conv-id:${convKey}`)
934
- .digest("hex")
935
- .slice(0, 32);
936
- // Format as UUID: xxxxxxxx-xxxx-4xxx-Nxxx-xxxxxxxxxxxx
937
- return [
938
- hex.slice(0, 8),
939
- hex.slice(8, 12),
940
- `4${hex.slice(13, 16)}`,
941
- `${(0x8 | (parseInt(hex[16], 16) & 0x3)).toString(16)}${hex.slice(17, 20)}`,
942
- hex.slice(20, 32),
943
- ].join("-");
1315
+ function buildConversationFingerprint(messages) {
1316
+ return messages.map((message) => {
1317
+ const toolCallIDs = (message.tool_calls ?? []).map((call) => call.id).join(",");
1318
+ return `${message.role}:${textContent(message.content)}:${message.tool_call_id ?? ""}:${toolCallIDs}`;
1319
+ }).join("\n---\n");
1320
+ }
1321
+ function updateStoredConversationAfterCompletion(convKey, metadata, assistantText) {
1322
+ const stored = conversationStates.get(convKey);
1323
+ if (!stored)
1324
+ return;
1325
+ const nextTurns = metadata.userText
1326
+ ? [...metadata.turns, { userText: metadata.userText, assistantText: assistantText.trim() }]
1327
+ : metadata.turns;
1328
+ stored.systemPromptHash = metadata.systemPromptHash;
1329
+ stored.completedTurnsFingerprint = buildCompletedTurnsFingerprint(metadata.systemPrompt, nextTurns);
1330
+ stored.lastAccessMs = Date.now();
944
1331
  }
945
1332
  /** Create an SSE streaming Response that reads from a live bridge. */
946
- function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools, modelId, bridgeKey, convKey) {
1333
+ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools, modelId, bridgeKey, convKey, metadata) {
947
1334
  const completionId = `chatcmpl-${crypto.randomUUID().replace(/-/g, "").slice(0, 28)}`;
948
1335
  const created = Math.floor(Date.now() / 1000);
949
1336
  const stream = new ReadableStream({
@@ -991,7 +1378,9 @@ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools,
991
1378
  totalTokens: 0,
992
1379
  };
993
1380
  const tagFilter = createThinkingTagFilter();
1381
+ let assistantText = metadata.assistantSeedText ?? "";
994
1382
  let mcpExecReceived = false;
1383
+ let endStreamError = null;
995
1384
  const processChunk = createConnectFrameParser((messageBytes) => {
996
1385
  try {
997
1386
  const serverMessage = fromBinary(AgentServerMessageSchema, messageBytes);
@@ -1003,8 +1392,10 @@ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools,
1003
1392
  const { content, reasoning } = tagFilter.process(text);
1004
1393
  if (reasoning)
1005
1394
  sendSSE(makeChunk({ reasoning_content: reasoning }));
1006
- if (content)
1395
+ if (content) {
1396
+ assistantText += content;
1007
1397
  sendSSE(makeChunk({ content }));
1398
+ }
1008
1399
  }
1009
1400
  },
1010
1401
  // onMcpExec — the model wants to execute a tool.
@@ -1014,8 +1405,21 @@ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools,
1014
1405
  const flushed = tagFilter.flush();
1015
1406
  if (flushed.reasoning)
1016
1407
  sendSSE(makeChunk({ reasoning_content: flushed.reasoning }));
1017
- if (flushed.content)
1408
+ if (flushed.content) {
1409
+ assistantText += flushed.content;
1018
1410
  sendSSE(makeChunk({ content: flushed.content }));
1411
+ }
1412
+ const assistantSeedText = [
1413
+ assistantText.trim(),
1414
+ formatToolCallSummary({
1415
+ id: exec.toolCallId,
1416
+ type: "function",
1417
+ function: {
1418
+ name: exec.toolName,
1419
+ arguments: exec.decodedArgs,
1420
+ },
1421
+ }),
1422
+ ].filter(Boolean).join("\n\n");
1019
1423
  const toolCallIndex = state.toolCallIndex++;
1020
1424
  sendSSE(makeChunk({
1021
1425
  tool_calls: [{
@@ -1035,6 +1439,11 @@ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools,
1035
1439
  blobStore,
1036
1440
  mcpTools,
1037
1441
  pendingExecs: state.pendingExecs,
1442
+ modelId,
1443
+ metadata: {
1444
+ ...metadata,
1445
+ assistantSeedText,
1446
+ },
1038
1447
  });
1039
1448
  sendSSE(makeChunk({}, "tool_calls"));
1040
1449
  sendDone();
@@ -1051,10 +1460,16 @@ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools,
1051
1460
  // Skip unparseable messages
1052
1461
  }
1053
1462
  }, (endStreamBytes) => {
1054
- const endError = parseConnectEndStream(endStreamBytes);
1055
- if (endError) {
1056
- sendSSE(makeChunk({ content: `\n[Error: ${endError.message}]` }));
1463
+ endStreamError = parseConnectEndStream(endStreamBytes);
1464
+ if (endStreamError) {
1465
+ logPluginError("Cursor stream returned Connect end-stream error", {
1466
+ modelId,
1467
+ bridgeKey,
1468
+ convKey,
1469
+ ...errorDetails(endStreamError),
1470
+ });
1057
1471
  }
1472
+ scheduleBridgeEnd(bridge);
1058
1473
  });
1059
1474
  bridge.onData(processChunk);
1060
1475
  bridge.onClose((code) => {
@@ -1065,27 +1480,39 @@ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools,
1065
1480
  stored.blobStore.set(k, v);
1066
1481
  stored.lastAccessMs = Date.now();
1067
1482
  }
1483
+ if (endStreamError) {
1484
+ activeBridges.delete(bridgeKey);
1485
+ if (!closed) {
1486
+ closed = true;
1487
+ controller.error(endStreamError);
1488
+ }
1489
+ return;
1490
+ }
1068
1491
  if (!mcpExecReceived) {
1069
1492
  const flushed = tagFilter.flush();
1070
1493
  if (flushed.reasoning)
1071
1494
  sendSSE(makeChunk({ reasoning_content: flushed.reasoning }));
1072
- if (flushed.content)
1495
+ if (flushed.content) {
1496
+ assistantText += flushed.content;
1073
1497
  sendSSE(makeChunk({ content: flushed.content }));
1498
+ }
1499
+ updateStoredConversationAfterCompletion(convKey, metadata, assistantText);
1074
1500
  sendSSE(makeChunk({}, "stop"));
1075
1501
  sendSSE(makeUsageChunk());
1076
1502
  sendDone();
1077
1503
  closeController();
1078
1504
  }
1079
- else if (code !== 0) {
1080
- // Bridge died while tool calls are pending (timeout, crash, etc.).
1081
- // Close the SSE stream so the client doesn't hang forever.
1082
- sendSSE(makeChunk({ content: "\n[Error: bridge connection lost]" }));
1083
- sendSSE(makeChunk({}, "stop"));
1084
- sendSSE(makeUsageChunk());
1085
- sendDone();
1086
- closeController();
1087
- // Remove stale entry so the next request doesn't try to resume it.
1505
+ else {
1088
1506
  activeBridges.delete(bridgeKey);
1507
+ if (code !== 0 && !closed) {
1508
+ // Bridge died while tool calls are pending (timeout, crash, etc.).
1509
+ // Close the SSE stream so the client doesn't hang forever.
1510
+ sendSSE(makeChunk({ content: "\n[Error: bridge connection lost]" }));
1511
+ sendSSE(makeChunk({}, "stop"));
1512
+ sendSSE(makeUsageChunk());
1513
+ sendDone();
1514
+ closeController();
1515
+ }
1089
1516
  }
1090
1517
  });
1091
1518
  },
@@ -1103,13 +1530,20 @@ async function startBridge(accessToken, requestBytes) {
1103
1530
  const heartbeatTimer = setInterval(() => bridge.write(makeHeartbeatBytes()), 5_000);
1104
1531
  return { bridge, heartbeatTimer };
1105
1532
  }
1106
- async function handleStreamingResponse(payload, accessToken, modelId, bridgeKey, convKey) {
1533
+ async function handleStreamingResponse(payload, accessToken, modelId, bridgeKey, convKey, metadata) {
1107
1534
  const { bridge, heartbeatTimer } = await startBridge(accessToken, payload.requestBytes);
1108
- return createBridgeStreamResponse(bridge, heartbeatTimer, payload.blobStore, payload.mcpTools, modelId, bridgeKey, convKey);
1535
+ return createBridgeStreamResponse(bridge, heartbeatTimer, payload.blobStore, payload.mcpTools, modelId, bridgeKey, convKey, metadata);
1109
1536
  }
1110
1537
  /** Resume a paused bridge by sending MCP results and continuing to stream. */
1111
- function handleToolResultResume(active, toolResults, modelId, bridgeKey, convKey) {
1112
- const { bridge, heartbeatTimer, blobStore, mcpTools, pendingExecs } = active;
1538
+ function handleToolResultResume(active, toolResults, bridgeKey, convKey) {
1539
+ const { bridge, heartbeatTimer, blobStore, mcpTools, pendingExecs, modelId, metadata } = active;
1540
+ const resumeMetadata = {
1541
+ ...metadata,
1542
+ assistantSeedText: [
1543
+ metadata.assistantSeedText?.trim() ?? "",
1544
+ toolResults.map(formatToolResultSummary).join("\n\n"),
1545
+ ].filter(Boolean).join("\n\n"),
1546
+ };
1113
1547
  // Send mcpResult for each pending exec that has a matching tool result
1114
1548
  for (const exec of pendingExecs) {
1115
1549
  const result = toolResults.find((r) => r.toolCallId === exec.toolCallId);
@@ -1149,12 +1583,15 @@ function handleToolResultResume(active, toolResults, modelId, bridgeKey, convKey
1149
1583
  });
1150
1584
  bridge.write(toBinary(AgentClientMessageSchema, clientMessage));
1151
1585
  }
1152
- return createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools, modelId, bridgeKey, convKey);
1586
+ return createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools, modelId, bridgeKey, convKey, resumeMetadata);
1153
1587
  }
1154
- async function handleNonStreamingResponse(payload, accessToken, modelId, convKey) {
1588
+ async function handleNonStreamingResponse(payload, accessToken, modelId, convKey, metadata) {
1155
1589
  const completionId = `chatcmpl-${crypto.randomUUID().replace(/-/g, "").slice(0, 28)}`;
1156
1590
  const created = Math.floor(Date.now() / 1000);
1157
- const { text, usage } = await collectFullResponse(payload, accessToken, convKey);
1591
+ const { text, usage, finishReason, toolCalls } = await collectFullResponse(payload, accessToken, modelId, convKey, metadata);
1592
+ const message = finishReason === "tool_calls"
1593
+ ? { role: "assistant", content: null, tool_calls: toolCalls }
1594
+ : { role: "assistant", content: text };
1158
1595
  return new Response(JSON.stringify({
1159
1596
  id: completionId,
1160
1597
  object: "chat.completion",
@@ -1163,16 +1600,18 @@ async function handleNonStreamingResponse(payload, accessToken, modelId, convKey
1163
1600
  choices: [
1164
1601
  {
1165
1602
  index: 0,
1166
- message: { role: "assistant", content: text },
1167
- finish_reason: "stop",
1603
+ message,
1604
+ finish_reason: finishReason,
1168
1605
  },
1169
1606
  ],
1170
1607
  usage,
1171
1608
  }), { headers: { "Content-Type": "application/json" } });
1172
1609
  }
1173
- async function collectFullResponse(payload, accessToken, convKey) {
1174
- const { promise, resolve } = Promise.withResolvers();
1610
+ async function collectFullResponse(payload, accessToken, modelId, convKey, metadata) {
1611
+ const { promise, resolve, reject } = Promise.withResolvers();
1175
1612
  let fullText = "";
1613
+ let endStreamError = null;
1614
+ const pendingToolCalls = [];
1176
1615
  const { bridge, heartbeatTimer } = await startBridge(accessToken, payload.requestBytes);
1177
1616
  const state = {
1178
1617
  toolCallIndex: 0,
@@ -1189,7 +1628,17 @@ async function collectFullResponse(payload, accessToken, convKey) {
1189
1628
  return;
1190
1629
  const { content } = tagFilter.process(text);
1191
1630
  fullText += content;
1192
- }, () => { }, (checkpointBytes) => {
1631
+ }, (exec) => {
1632
+ pendingToolCalls.push({
1633
+ id: exec.toolCallId,
1634
+ type: "function",
1635
+ function: {
1636
+ name: exec.toolName,
1637
+ arguments: exec.decodedArgs,
1638
+ },
1639
+ });
1640
+ scheduleBridgeEnd(bridge);
1641
+ }, (checkpointBytes) => {
1193
1642
  const stored = conversationStates.get(convKey);
1194
1643
  if (stored) {
1195
1644
  stored.checkpoint = checkpointBytes;
@@ -1200,7 +1649,17 @@ async function collectFullResponse(payload, accessToken, convKey) {
1200
1649
  catch {
1201
1650
  // Skip
1202
1651
  }
1203
- }, () => { }));
1652
+ }, (endStreamBytes) => {
1653
+ endStreamError = parseConnectEndStream(endStreamBytes);
1654
+ if (endStreamError) {
1655
+ logPluginError("Cursor non-streaming response returned Connect end-stream error", {
1656
+ modelId,
1657
+ convKey,
1658
+ ...errorDetails(endStreamError),
1659
+ });
1660
+ }
1661
+ scheduleBridgeEnd(bridge);
1662
+ }));
1204
1663
  bridge.onClose(() => {
1205
1664
  clearInterval(heartbeatTimer);
1206
1665
  const stored = conversationStates.get(convKey);
@@ -1211,10 +1670,19 @@ async function collectFullResponse(payload, accessToken, convKey) {
1211
1670
  }
1212
1671
  const flushed = tagFilter.flush();
1213
1672
  fullText += flushed.content;
1673
+ if (endStreamError) {
1674
+ reject(endStreamError);
1675
+ return;
1676
+ }
1677
+ if (pendingToolCalls.length === 0) {
1678
+ updateStoredConversationAfterCompletion(convKey, metadata, fullText);
1679
+ }
1214
1680
  const usage = computeUsage(state);
1215
1681
  resolve({
1216
1682
  text: fullText,
1217
1683
  usage,
1684
+ finishReason: pendingToolCalls.length > 0 ? "tool_calls" : "stop",
1685
+ toolCalls: pendingToolCalls,
1218
1686
  });
1219
1687
  });
1220
1688
  return promise;