@playwo/opencode-cursor-oauth 0.0.0-dev.de8f891a2e99 → 0.0.0-dev.e1637ce79fd6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. package/README.md +19 -91
  2. package/dist/auth.js +1 -2
  3. package/dist/constants.d.ts +2 -0
  4. package/dist/constants.js +2 -0
  5. package/dist/cursor/bidi-session.d.ts +12 -0
  6. package/dist/cursor/bidi-session.js +164 -0
  7. package/dist/cursor/config.d.ts +4 -0
  8. package/dist/cursor/config.js +4 -0
  9. package/dist/cursor/connect-framing.d.ts +10 -0
  10. package/dist/cursor/connect-framing.js +80 -0
  11. package/dist/cursor/headers.d.ts +6 -0
  12. package/dist/cursor/headers.js +16 -0
  13. package/dist/cursor/index.d.ts +5 -0
  14. package/dist/cursor/index.js +5 -0
  15. package/dist/cursor/unary-rpc.d.ts +12 -0
  16. package/dist/cursor/unary-rpc.js +124 -0
  17. package/dist/index.d.ts +2 -14
  18. package/dist/index.js +2 -297
  19. package/dist/logger.d.ts +1 -0
  20. package/dist/logger.js +10 -2
  21. package/dist/models.js +1 -23
  22. package/dist/openai/index.d.ts +3 -0
  23. package/dist/openai/index.js +3 -0
  24. package/dist/openai/messages.d.ts +39 -0
  25. package/dist/openai/messages.js +228 -0
  26. package/dist/openai/tools.d.ts +7 -0
  27. package/dist/openai/tools.js +58 -0
  28. package/dist/openai/types.d.ts +41 -0
  29. package/dist/openai/types.js +1 -0
  30. package/dist/plugin/cursor-auth-plugin.d.ts +3 -0
  31. package/dist/plugin/cursor-auth-plugin.js +139 -0
  32. package/dist/proto/agent_pb.js +637 -319
  33. package/dist/provider/index.d.ts +2 -0
  34. package/dist/provider/index.js +2 -0
  35. package/dist/provider/model-cost.d.ts +9 -0
  36. package/dist/provider/model-cost.js +206 -0
  37. package/dist/provider/models.d.ts +8 -0
  38. package/dist/provider/models.js +86 -0
  39. package/dist/proxy/bridge-non-streaming.d.ts +3 -0
  40. package/dist/proxy/bridge-non-streaming.js +117 -0
  41. package/dist/proxy/bridge-session.d.ts +5 -0
  42. package/dist/proxy/bridge-session.js +11 -0
  43. package/dist/proxy/bridge-streaming.d.ts +5 -0
  44. package/dist/proxy/bridge-streaming.js +400 -0
  45. package/dist/proxy/bridge.d.ts +3 -0
  46. package/dist/proxy/bridge.js +3 -0
  47. package/dist/proxy/chat-completion.d.ts +2 -0
  48. package/dist/proxy/chat-completion.js +153 -0
  49. package/dist/proxy/conversation-meta.d.ts +12 -0
  50. package/dist/proxy/conversation-meta.js +1 -0
  51. package/dist/proxy/conversation-state.d.ts +35 -0
  52. package/dist/proxy/conversation-state.js +95 -0
  53. package/dist/proxy/cursor-request.d.ts +6 -0
  54. package/dist/proxy/cursor-request.js +104 -0
  55. package/dist/proxy/index.d.ts +12 -0
  56. package/dist/proxy/index.js +12 -0
  57. package/dist/proxy/server.d.ts +6 -0
  58. package/dist/proxy/server.js +107 -0
  59. package/dist/proxy/sse.d.ts +5 -0
  60. package/dist/proxy/sse.js +5 -0
  61. package/dist/proxy/state-sync.d.ts +2 -0
  62. package/dist/proxy/state-sync.js +17 -0
  63. package/dist/proxy/stream-dispatch.d.ts +42 -0
  64. package/dist/proxy/stream-dispatch.js +633 -0
  65. package/dist/proxy/stream-state.d.ts +7 -0
  66. package/dist/proxy/stream-state.js +1 -0
  67. package/dist/proxy/title.d.ts +1 -0
  68. package/dist/proxy/title.js +103 -0
  69. package/dist/proxy/types.d.ts +30 -0
  70. package/dist/proxy/types.js +1 -0
  71. package/dist/proxy.d.ts +2 -20
  72. package/dist/proxy.js +2 -1385
  73. package/package.json +1 -2
package/dist/proxy.js CHANGED
@@ -1,1385 +1,2 @@
1
- /**
2
- * Local OpenAI-compatible proxy that translates requests to Cursor's gRPC protocol.
3
- *
4
- * Accepts POST /v1/chat/completions in OpenAI format, translates to Cursor's
5
- * protobuf/Connect protocol, and streams back OpenAI-format SSE.
6
- *
7
- * Tool calling uses Cursor's native MCP tool protocol:
8
- * - OpenAI tool defs → McpToolDefinition in RequestContext
9
- * - Cursor toolCallStarted/Delta/Completed → OpenAI tool_calls SSE chunks
10
- * - mcpArgs exec → pause stream, return tool_calls to caller
11
- * - Follow-up request with tool results → resume bridge with mcpResult
12
- *
13
- * Cursor agent streaming runs via RunSSE + BidiAppend, avoiding any Node sidecar.
14
- */
15
- import { create, fromBinary, fromJson, toBinary, toJson } from "@bufbuild/protobuf";
16
- import { ValueSchema } from "@bufbuild/protobuf/wkt";
17
- import { AgentClientMessageSchema, AgentRunRequestSchema, AgentServerMessageSchema, BidiRequestIdSchema, ClientHeartbeatSchema, ConversationActionSchema, ConversationStateStructureSchema, ConversationStepSchema, AgentConversationTurnStructureSchema, ConversationTurnStructureSchema, AssistantMessageSchema, BackgroundShellSpawnResultSchema, DeleteResultSchema, DeleteRejectedSchema, DiagnosticsResultSchema, ExecClientMessageSchema, FetchErrorSchema, FetchResultSchema, GetBlobResultSchema, GrepErrorSchema, GrepResultSchema, KvClientMessageSchema, LsRejectedSchema, LsResultSchema, McpErrorSchema, McpResultSchema, McpSuccessSchema, McpTextContentSchema, McpToolDefinitionSchema, McpToolResultContentItemSchema, ModelDetailsSchema, ReadRejectedSchema, ReadResultSchema, RequestContextResultSchema, RequestContextSchema, RequestContextSuccessSchema, SetBlobResultSchema, ShellRejectedSchema, ShellResultSchema, UserMessageActionSchema, UserMessageSchema, WriteRejectedSchema, WriteResultSchema, WriteShellStdinErrorSchema, WriteShellStdinResultSchema, } from "./proto/agent_pb";
18
- import { createHash } from "node:crypto";
19
- import { connect as connectHttp2 } from "node:http2";
20
- import { errorDetails, logPluginError, logPluginWarn } from "./logger";
21
- const CURSOR_API_URL = process.env.CURSOR_API_URL ?? "https://api2.cursor.sh";
22
- const CURSOR_CLIENT_VERSION = "cli-2026.01.09-231024f";
23
- const CURSOR_CONNECT_PROTOCOL_VERSION = "1";
24
- const CONNECT_END_STREAM_FLAG = 0b00000010;
25
- const SSE_HEADERS = {
26
- "Content-Type": "text/event-stream",
27
- "Cache-Control": "no-cache",
28
- Connection: "keep-alive",
29
- };
30
- // Active bridges keyed by a session token (derived from conversation state).
31
- // When tool_calls are returned, the bridge stays alive. The next request
32
- // with tool results looks up the bridge and sends mcpResult messages.
33
- const activeBridges = new Map();
34
- const conversationStates = new Map();
35
- const CONVERSATION_TTL_MS = 30 * 60 * 1000; // 30 minutes
36
- function evictStaleConversations() {
37
- const now = Date.now();
38
- for (const [key, stored] of conversationStates) {
39
- if (now - stored.lastAccessMs > CONVERSATION_TTL_MS) {
40
- conversationStates.delete(key);
41
- }
42
- }
43
- }
44
- /** Connect protocol frame: [1-byte flags][4-byte BE length][payload] */
45
- function frameConnectMessage(data, flags = 0) {
46
- const frame = Buffer.alloc(5 + data.length);
47
- frame[0] = flags;
48
- frame.writeUInt32BE(data.length, 1);
49
- frame.set(data, 5);
50
- return frame;
51
- }
52
- function buildCursorHeaders(options, contentType, extra = {}) {
53
- const headers = new Headers(buildCursorHeaderValues(options, contentType, extra));
54
- return headers;
55
- }
56
- function buildCursorHeaderValues(options, contentType, extra = {}) {
57
- return {
58
- authorization: `Bearer ${options.accessToken}`,
59
- "content-type": contentType,
60
- "x-ghost-mode": "true",
61
- "x-cursor-client-version": CURSOR_CLIENT_VERSION,
62
- "x-cursor-client-type": "cli",
63
- "x-request-id": crypto.randomUUID(),
64
- ...extra,
65
- };
66
- }
67
- function encodeVarint(value) {
68
- if (!Number.isSafeInteger(value) || value < 0) {
69
- throw new Error(`Unsupported varint value: ${value}`);
70
- }
71
- const bytes = [];
72
- let current = value;
73
- while (current >= 0x80) {
74
- bytes.push((current & 0x7f) | 0x80);
75
- current = Math.floor(current / 128);
76
- }
77
- bytes.push(current);
78
- return Uint8Array.from(bytes);
79
- }
80
- function encodeProtoField(tag, wireType, value) {
81
- const key = encodeVarint((tag << 3) | wireType);
82
- const out = new Uint8Array(key.length + value.length);
83
- out.set(key, 0);
84
- out.set(value, key.length);
85
- return out;
86
- }
87
- function encodeProtoStringField(tag, value) {
88
- const bytes = new TextEncoder().encode(value);
89
- const len = encodeVarint(bytes.length);
90
- const payload = new Uint8Array(len.length + bytes.length);
91
- payload.set(len, 0);
92
- payload.set(bytes, len.length);
93
- return encodeProtoField(tag, 2, payload);
94
- }
95
- function encodeProtoMessageField(tag, value) {
96
- const len = encodeVarint(value.length);
97
- const payload = new Uint8Array(len.length + value.length);
98
- payload.set(len, 0);
99
- payload.set(value, len.length);
100
- return encodeProtoField(tag, 2, payload);
101
- }
102
- function encodeProtoVarintField(tag, value) {
103
- return encodeProtoField(tag, 0, encodeVarint(value));
104
- }
105
- function concatBytes(parts) {
106
- const total = parts.reduce((sum, part) => sum + part.length, 0);
107
- const out = new Uint8Array(total);
108
- let offset = 0;
109
- for (const part of parts) {
110
- out.set(part, offset);
111
- offset += part.length;
112
- }
113
- return out;
114
- }
115
- function toFetchBody(data) {
116
- return data.buffer.slice(data.byteOffset, data.byteOffset + data.byteLength);
117
- }
118
- function encodeBidiAppendRequest(dataHex, requestId, appendSeqno) {
119
- const requestIdBytes = toBinary(BidiRequestIdSchema, create(BidiRequestIdSchema, { requestId }));
120
- return concatBytes([
121
- encodeProtoStringField(1, dataHex),
122
- encodeProtoMessageField(2, requestIdBytes),
123
- encodeProtoVarintField(3, appendSeqno),
124
- ]);
125
- }
126
- async function createCursorSession(options) {
127
- const response = await fetch(new URL("/agent.v1.AgentService/RunSSE", options.url ?? CURSOR_API_URL), {
128
- method: "POST",
129
- headers: buildCursorHeaders(options, "application/connect+proto", {
130
- accept: "text/event-stream",
131
- "connect-protocol-version": "1",
132
- }),
133
- body: toFetchBody(frameConnectMessage(toBinary(BidiRequestIdSchema, create(BidiRequestIdSchema, { requestId: options.requestId })))),
134
- });
135
- if (!response.ok || !response.body) {
136
- const errorBody = await response.text().catch(() => "");
137
- logPluginError("Cursor RunSSE request failed", {
138
- requestId: options.requestId,
139
- status: response.status,
140
- responseBody: errorBody,
141
- });
142
- throw new Error(`RunSSE failed: ${response.status}${errorBody ? ` ${errorBody}` : ""}`);
143
- }
144
- const cbs = {
145
- data: null,
146
- close: null,
147
- };
148
- const abortController = new AbortController();
149
- const reader = response.body.getReader();
150
- let appendSeqno = 0;
151
- let alive = true;
152
- let closeCode = 0;
153
- let writeChain = Promise.resolve();
154
- const pendingChunks = [];
155
- const finish = (code) => {
156
- if (!alive)
157
- return;
158
- alive = false;
159
- closeCode = code;
160
- cbs.close?.(code);
161
- };
162
- const append = async (data) => {
163
- const requestBody = encodeBidiAppendRequest(Buffer.from(data).toString("hex"), options.requestId, appendSeqno++);
164
- const appendResponse = await fetch(new URL("/aiserver.v1.BidiService/BidiAppend", options.url ?? CURSOR_API_URL), {
165
- method: "POST",
166
- headers: buildCursorHeaders(options, "application/proto"),
167
- body: toFetchBody(requestBody),
168
- signal: abortController.signal,
169
- });
170
- if (!appendResponse.ok) {
171
- const errorBody = await appendResponse.text().catch(() => "");
172
- logPluginError("Cursor BidiAppend request failed", {
173
- requestId: options.requestId,
174
- appendSeqno: appendSeqno - 1,
175
- status: appendResponse.status,
176
- responseBody: errorBody,
177
- });
178
- throw new Error(`BidiAppend failed: ${appendResponse.status}${errorBody ? ` ${errorBody}` : ""}`);
179
- }
180
- await appendResponse.arrayBuffer().catch(() => undefined);
181
- };
182
- (async () => {
183
- try {
184
- while (true) {
185
- const { done, value } = await reader.read();
186
- if (done) {
187
- finish(0);
188
- break;
189
- }
190
- if (value && value.length > 0) {
191
- const chunk = Buffer.from(value);
192
- if (cbs.data) {
193
- cbs.data(chunk);
194
- }
195
- else {
196
- pendingChunks.push(chunk);
197
- }
198
- }
199
- }
200
- }
201
- catch (error) {
202
- logPluginWarn("Cursor stream reader closed with error", {
203
- requestId: options.requestId,
204
- ...errorDetails(error),
205
- });
206
- finish(alive ? 1 : closeCode);
207
- }
208
- })();
209
- return {
210
- get alive() {
211
- return alive;
212
- },
213
- write(data) {
214
- if (!alive)
215
- return;
216
- writeChain = writeChain
217
- .then(() => append(data))
218
- .catch((error) => {
219
- logPluginError("Cursor stream append failed", {
220
- requestId: options.requestId,
221
- ...errorDetails(error),
222
- });
223
- try {
224
- abortController.abort();
225
- }
226
- catch { }
227
- try {
228
- reader.cancel();
229
- }
230
- catch { }
231
- finish(1);
232
- });
233
- },
234
- end() {
235
- try {
236
- abortController.abort();
237
- }
238
- catch { }
239
- try {
240
- reader.cancel();
241
- }
242
- catch { }
243
- finish(0);
244
- },
245
- onData(cb) {
246
- cbs.data = cb;
247
- while (pendingChunks.length > 0) {
248
- cb(pendingChunks.shift());
249
- }
250
- },
251
- onClose(cb) {
252
- if (!alive) {
253
- queueMicrotask(() => cb(closeCode));
254
- }
255
- else {
256
- cbs.close = cb;
257
- }
258
- },
259
- };
260
- }
261
- export async function callCursorUnaryRpc(options) {
262
- const target = new URL(options.rpcPath, options.url ?? CURSOR_API_URL);
263
- const transport = options.transport ?? "auto";
264
- if (transport === "http2" || (transport === "auto" && target.protocol === "https:")) {
265
- const http2Result = await callCursorUnaryRpcOverHttp2(options, target);
266
- if (transport === "http2" || http2Result.timedOut || http2Result.exitCode !== 1) {
267
- return http2Result;
268
- }
269
- }
270
- return callCursorUnaryRpcOverFetch(options, target);
271
- }
272
- async function callCursorUnaryRpcOverFetch(options, target) {
273
- let timedOut = false;
274
- const timeoutMs = options.timeoutMs ?? 5_000;
275
- const controller = new AbortController();
276
- const timeout = timeoutMs > 0
277
- ? setTimeout(() => {
278
- timedOut = true;
279
- controller.abort();
280
- }, timeoutMs)
281
- : undefined;
282
- try {
283
- const response = await fetch(target, {
284
- method: "POST",
285
- headers: buildCursorHeaders(options, "application/proto", {
286
- accept: "application/proto, application/json",
287
- "connect-protocol-version": CURSOR_CONNECT_PROTOCOL_VERSION,
288
- "connect-timeout-ms": String(timeoutMs),
289
- }),
290
- body: toFetchBody(options.requestBody),
291
- signal: controller.signal,
292
- });
293
- const body = new Uint8Array(await response.arrayBuffer());
294
- return {
295
- body,
296
- exitCode: response.ok ? 0 : response.status,
297
- timedOut,
298
- };
299
- }
300
- catch {
301
- logPluginError("Cursor unary fetch transport failed", {
302
- rpcPath: options.rpcPath,
303
- url: target.toString(),
304
- timeoutMs,
305
- timedOut,
306
- });
307
- return {
308
- body: new Uint8Array(),
309
- exitCode: timedOut ? 124 : 1,
310
- timedOut,
311
- };
312
- }
313
- finally {
314
- if (timeout)
315
- clearTimeout(timeout);
316
- }
317
- }
318
- async function callCursorUnaryRpcOverHttp2(options, target) {
319
- const timeoutMs = options.timeoutMs ?? 5_000;
320
- const authority = `${target.protocol}//${target.host}`;
321
- return new Promise((resolve) => {
322
- let settled = false;
323
- let timedOut = false;
324
- let session;
325
- let stream;
326
- const finish = (result) => {
327
- if (settled)
328
- return;
329
- settled = true;
330
- if (timeout)
331
- clearTimeout(timeout);
332
- try {
333
- stream?.close();
334
- }
335
- catch { }
336
- try {
337
- session?.close();
338
- }
339
- catch { }
340
- resolve(result);
341
- };
342
- const timeout = timeoutMs > 0
343
- ? setTimeout(() => {
344
- timedOut = true;
345
- finish({
346
- body: new Uint8Array(),
347
- exitCode: 124,
348
- timedOut: true,
349
- });
350
- }, timeoutMs)
351
- : undefined;
352
- try {
353
- session = connectHttp2(authority);
354
- session.once("error", (error) => {
355
- logPluginError("Cursor unary HTTP/2 session failed", {
356
- rpcPath: options.rpcPath,
357
- url: target.toString(),
358
- timedOut,
359
- ...errorDetails(error),
360
- });
361
- finish({
362
- body: new Uint8Array(),
363
- exitCode: timedOut ? 124 : 1,
364
- timedOut,
365
- });
366
- });
367
- const headers = {
368
- ":method": "POST",
369
- ":path": `${target.pathname}${target.search}`,
370
- ...buildCursorHeaderValues(options, "application/proto", {
371
- accept: "application/proto, application/json",
372
- "connect-protocol-version": CURSOR_CONNECT_PROTOCOL_VERSION,
373
- "connect-timeout-ms": String(timeoutMs),
374
- }),
375
- };
376
- stream = session.request(headers);
377
- let statusCode = 0;
378
- const chunks = [];
379
- stream.once("response", (responseHeaders) => {
380
- const statusHeader = responseHeaders[":status"];
381
- statusCode = typeof statusHeader === "number"
382
- ? statusHeader
383
- : Number(statusHeader ?? 0);
384
- });
385
- stream.on("data", (chunk) => {
386
- chunks.push(Buffer.from(chunk));
387
- });
388
- stream.once("end", () => {
389
- const body = new Uint8Array(Buffer.concat(chunks));
390
- finish({
391
- body,
392
- exitCode: statusCode >= 200 && statusCode < 300 ? 0 : (statusCode || 1),
393
- timedOut,
394
- });
395
- });
396
- stream.once("error", (error) => {
397
- logPluginError("Cursor unary HTTP/2 stream failed", {
398
- rpcPath: options.rpcPath,
399
- url: target.toString(),
400
- timedOut,
401
- ...errorDetails(error),
402
- });
403
- finish({
404
- body: new Uint8Array(),
405
- exitCode: timedOut ? 124 : 1,
406
- timedOut,
407
- });
408
- });
409
- // Bun's node:http2 client currently breaks on end(Buffer.alloc(0)) against
410
- // Cursor's HTTPS endpoint, but a header-only end() succeeds for empty unary bodies.
411
- if (options.requestBody.length > 0) {
412
- stream.end(Buffer.from(options.requestBody));
413
- }
414
- else {
415
- stream.end();
416
- }
417
- }
418
- catch (error) {
419
- logPluginError("Cursor unary HTTP/2 setup failed", {
420
- rpcPath: options.rpcPath,
421
- url: target.toString(),
422
- timedOut,
423
- ...errorDetails(error),
424
- });
425
- finish({
426
- body: new Uint8Array(),
427
- exitCode: timedOut ? 124 : 1,
428
- timedOut,
429
- });
430
- }
431
- });
432
- }
433
- let proxyServer;
434
- let proxyPort;
435
- let proxyAccessTokenProvider;
436
- let proxyModels = [];
437
- function buildOpenAIModelList(models) {
438
- return models.map((model) => ({
439
- id: model.id,
440
- object: "model",
441
- created: 0,
442
- owned_by: "cursor",
443
- }));
444
- }
445
- export function getProxyPort() {
446
- return proxyPort;
447
- }
448
- export async function startProxy(getAccessToken, models = []) {
449
- proxyAccessTokenProvider = getAccessToken;
450
- proxyModels = models.map((model) => ({
451
- id: model.id,
452
- name: model.name,
453
- }));
454
- if (proxyServer && proxyPort)
455
- return proxyPort;
456
- proxyServer = Bun.serve({
457
- port: 0,
458
- idleTimeout: 255, // max — Cursor responses can take 30s+
459
- async fetch(req) {
460
- const url = new URL(req.url);
461
- if (req.method === "GET" && url.pathname === "/v1/models") {
462
- return new Response(JSON.stringify({
463
- object: "list",
464
- data: buildOpenAIModelList(proxyModels),
465
- }), { headers: { "Content-Type": "application/json" } });
466
- }
467
- if (req.method === "POST" && url.pathname === "/v1/chat/completions") {
468
- try {
469
- const body = (await req.json());
470
- if (!proxyAccessTokenProvider) {
471
- throw new Error("Cursor proxy access token provider not configured");
472
- }
473
- const accessToken = await proxyAccessTokenProvider();
474
- return handleChatCompletion(body, accessToken);
475
- }
476
- catch (err) {
477
- const message = err instanceof Error ? err.message : String(err);
478
- logPluginError("Cursor proxy request failed", {
479
- path: url.pathname,
480
- method: req.method,
481
- ...errorDetails(err),
482
- });
483
- return new Response(JSON.stringify({
484
- error: { message, type: "server_error", code: "internal_error" },
485
- }), { status: 500, headers: { "Content-Type": "application/json" } });
486
- }
487
- }
488
- return new Response("Not Found", { status: 404 });
489
- },
490
- });
491
- proxyPort = proxyServer.port;
492
- if (!proxyPort)
493
- throw new Error("Failed to bind proxy to a port");
494
- return proxyPort;
495
- }
496
- export function stopProxy() {
497
- if (proxyServer) {
498
- proxyServer.stop();
499
- proxyServer = undefined;
500
- proxyPort = undefined;
501
- proxyAccessTokenProvider = undefined;
502
- proxyModels = [];
503
- }
504
- // Clean up any lingering bridges
505
- for (const active of activeBridges.values()) {
506
- clearInterval(active.heartbeatTimer);
507
- active.bridge.end();
508
- }
509
- activeBridges.clear();
510
- conversationStates.clear();
511
- }
512
- function handleChatCompletion(body, accessToken) {
513
- const { systemPrompt, userText, turns, toolResults } = parseMessages(body.messages);
514
- const modelId = body.model;
515
- const tools = body.tools ?? [];
516
- if (!userText && toolResults.length === 0) {
517
- return new Response(JSON.stringify({
518
- error: {
519
- message: "No user message found",
520
- type: "invalid_request_error",
521
- },
522
- }), { status: 400, headers: { "Content-Type": "application/json" } });
523
- }
524
- // bridgeKey: model-specific, for active tool-call bridges
525
- // convKey: model-independent, for conversation state that survives model switches
526
- const bridgeKey = deriveBridgeKey(modelId, body.messages);
527
- const convKey = deriveConversationKey(body.messages);
528
- const activeBridge = activeBridges.get(bridgeKey);
529
- if (activeBridge && toolResults.length > 0) {
530
- activeBridges.delete(bridgeKey);
531
- if (activeBridge.bridge.alive) {
532
- // Resume the live bridge with tool results
533
- return handleToolResultResume(activeBridge, toolResults, modelId, bridgeKey, convKey);
534
- }
535
- // Bridge died (timeout, server disconnect, etc.).
536
- // Clean up and fall through to start a fresh bridge.
537
- clearInterval(activeBridge.heartbeatTimer);
538
- activeBridge.bridge.end();
539
- }
540
- // Clean up stale bridge if present
541
- if (activeBridge && activeBridges.has(bridgeKey)) {
542
- clearInterval(activeBridge.heartbeatTimer);
543
- activeBridge.bridge.end();
544
- activeBridges.delete(bridgeKey);
545
- }
546
- let stored = conversationStates.get(convKey);
547
- if (!stored) {
548
- stored = {
549
- conversationId: deterministicConversationId(convKey),
550
- checkpoint: null,
551
- blobStore: new Map(),
552
- lastAccessMs: Date.now(),
553
- };
554
- conversationStates.set(convKey, stored);
555
- }
556
- stored.lastAccessMs = Date.now();
557
- evictStaleConversations();
558
- // Build the request. When tool results are present but the bridge died,
559
- // we must still include the last user text so Cursor has context.
560
- const mcpTools = buildMcpToolDefinitions(tools);
561
- const effectiveUserText = userText || (toolResults.length > 0
562
- ? toolResults.map((r) => r.content).join("\n")
563
- : "");
564
- const payload = buildCursorRequest(modelId, systemPrompt, effectiveUserText, turns, stored.conversationId, stored.checkpoint, stored.blobStore);
565
- payload.mcpTools = mcpTools;
566
- if (body.stream === false) {
567
- return handleNonStreamingResponse(payload, accessToken, modelId, convKey);
568
- }
569
- return handleStreamingResponse(payload, accessToken, modelId, bridgeKey, convKey);
570
- }
571
- /** Normalize OpenAI message content to a plain string. */
572
- function textContent(content) {
573
- if (content == null)
574
- return "";
575
- if (typeof content === "string")
576
- return content;
577
- return content
578
- .filter((p) => p.type === "text" && p.text)
579
- .map((p) => p.text)
580
- .join("\n");
581
- }
582
- function parseMessages(messages) {
583
- let systemPrompt = "You are a helpful assistant.";
584
- const pairs = [];
585
- const toolResults = [];
586
- // Collect system messages
587
- const systemParts = messages
588
- .filter((m) => m.role === "system")
589
- .map((m) => textContent(m.content));
590
- if (systemParts.length > 0) {
591
- systemPrompt = systemParts.join("\n");
592
- }
593
- // Separate tool results from conversation turns
594
- const nonSystem = messages.filter((m) => m.role !== "system");
595
- let pendingUser = "";
596
- for (const msg of nonSystem) {
597
- if (msg.role === "tool") {
598
- toolResults.push({
599
- toolCallId: msg.tool_call_id ?? "",
600
- content: textContent(msg.content),
601
- });
602
- }
603
- else if (msg.role === "user") {
604
- if (pendingUser) {
605
- pairs.push({ userText: pendingUser, assistantText: "" });
606
- }
607
- pendingUser = textContent(msg.content);
608
- }
609
- else if (msg.role === "assistant") {
610
- // Skip assistant messages that are just tool_calls with no text
611
- const text = textContent(msg.content);
612
- if (pendingUser) {
613
- pairs.push({ userText: pendingUser, assistantText: text });
614
- pendingUser = "";
615
- }
616
- }
617
- }
618
- let lastUserText = "";
619
- if (pendingUser) {
620
- lastUserText = pendingUser;
621
- }
622
- else if (pairs.length > 0 && toolResults.length === 0) {
623
- const last = pairs.pop();
624
- lastUserText = last.userText;
625
- }
626
- return { systemPrompt, userText: lastUserText, turns: pairs, toolResults };
627
- }
628
- /** Convert OpenAI tool definitions to Cursor's MCP tool protobuf format. */
629
- function buildMcpToolDefinitions(tools) {
630
- return tools.map((t) => {
631
- const fn = t.function;
632
- const jsonSchema = fn.parameters && typeof fn.parameters === "object"
633
- ? fn.parameters
634
- : { type: "object", properties: {}, required: [] };
635
- const inputSchema = toBinary(ValueSchema, fromJson(ValueSchema, jsonSchema));
636
- return create(McpToolDefinitionSchema, {
637
- name: fn.name,
638
- description: fn.description || "",
639
- providerIdentifier: "opencode",
640
- toolName: fn.name,
641
- inputSchema,
642
- });
643
- });
644
- }
645
- /** Decode a Cursor MCP arg value (protobuf Value bytes) to a JS value. */
646
- function decodeMcpArgValue(value) {
647
- try {
648
- const parsed = fromBinary(ValueSchema, value);
649
- return toJson(ValueSchema, parsed);
650
- }
651
- catch { }
652
- return new TextDecoder().decode(value);
653
- }
654
- /** Decode a map of MCP arg values. */
655
- function decodeMcpArgsMap(args) {
656
- const decoded = {};
657
- for (const [key, value] of Object.entries(args)) {
658
- decoded[key] = decodeMcpArgValue(value);
659
- }
660
- return decoded;
661
- }
662
- function buildCursorRequest(modelId, systemPrompt, userText, turns, conversationId, checkpoint, existingBlobStore) {
663
- const blobStore = new Map(existingBlobStore ?? []);
664
- // System prompt → blob store (Cursor requests it back via KV handshake)
665
- const systemJson = JSON.stringify({ role: "system", content: systemPrompt });
666
- const systemBytes = new TextEncoder().encode(systemJson);
667
- const systemBlobId = new Uint8Array(createHash("sha256").update(systemBytes).digest());
668
- blobStore.set(Buffer.from(systemBlobId).toString("hex"), systemBytes);
669
- let conversationState;
670
- if (checkpoint) {
671
- conversationState = fromBinary(ConversationStateStructureSchema, checkpoint);
672
- }
673
- else {
674
- const turnBytes = [];
675
- for (const turn of turns) {
676
- const userMsg = create(UserMessageSchema, {
677
- text: turn.userText,
678
- messageId: crypto.randomUUID(),
679
- });
680
- const userMsgBytes = toBinary(UserMessageSchema, userMsg);
681
- const stepBytes = [];
682
- if (turn.assistantText) {
683
- const step = create(ConversationStepSchema, {
684
- message: {
685
- case: "assistantMessage",
686
- value: create(AssistantMessageSchema, { text: turn.assistantText }),
687
- },
688
- });
689
- stepBytes.push(toBinary(ConversationStepSchema, step));
690
- }
691
- const agentTurn = create(AgentConversationTurnStructureSchema, {
692
- userMessage: userMsgBytes,
693
- steps: stepBytes,
694
- });
695
- const turnStructure = create(ConversationTurnStructureSchema, {
696
- turn: { case: "agentConversationTurn", value: agentTurn },
697
- });
698
- turnBytes.push(toBinary(ConversationTurnStructureSchema, turnStructure));
699
- }
700
- conversationState = create(ConversationStateStructureSchema, {
701
- rootPromptMessagesJson: [systemBlobId],
702
- turns: turnBytes,
703
- todos: [],
704
- pendingToolCalls: [],
705
- previousWorkspaceUris: [],
706
- fileStates: {},
707
- fileStatesV2: {},
708
- summaryArchives: [],
709
- turnTimings: [],
710
- subagentStates: {},
711
- selfSummaryCount: 0,
712
- readPaths: [],
713
- });
714
- }
715
- const userMessage = create(UserMessageSchema, {
716
- text: userText,
717
- messageId: crypto.randomUUID(),
718
- });
719
- const action = create(ConversationActionSchema, {
720
- action: {
721
- case: "userMessageAction",
722
- value: create(UserMessageActionSchema, { userMessage }),
723
- },
724
- });
725
- const modelDetails = create(ModelDetailsSchema, {
726
- modelId,
727
- displayModelId: modelId,
728
- displayName: modelId,
729
- });
730
- const runRequest = create(AgentRunRequestSchema, {
731
- conversationState,
732
- action,
733
- modelDetails,
734
- conversationId,
735
- });
736
- const clientMessage = create(AgentClientMessageSchema, {
737
- message: { case: "runRequest", value: runRequest },
738
- });
739
- return {
740
- requestBytes: toBinary(AgentClientMessageSchema, clientMessage),
741
- blobStore,
742
- mcpTools: [],
743
- };
744
- }
745
- function parseConnectEndStream(data) {
746
- try {
747
- const payload = JSON.parse(new TextDecoder().decode(data));
748
- const error = payload?.error;
749
- if (error) {
750
- const code = error.code ?? "unknown";
751
- const message = error.message ?? "Unknown error";
752
- return new Error(`Connect error ${code}: ${message}`);
753
- }
754
- return null;
755
- }
756
- catch {
757
- return new Error("Failed to parse Connect end stream");
758
- }
759
- }
760
- function makeHeartbeatBytes() {
761
- const heartbeat = create(AgentClientMessageSchema, {
762
- message: {
763
- case: "clientHeartbeat",
764
- value: create(ClientHeartbeatSchema, {}),
765
- },
766
- });
767
- return toBinary(AgentClientMessageSchema, heartbeat);
768
- }
769
- /**
770
- * Create a stateful parser for Connect protocol frames.
771
- * Handles buffering partial data across chunks.
772
- */
773
- function createConnectFrameParser(onMessage, onEndStream) {
774
- let pending = Buffer.alloc(0);
775
- return (incoming) => {
776
- pending = Buffer.concat([pending, incoming]);
777
- while (pending.length >= 5) {
778
- const flags = pending[0];
779
- const msgLen = pending.readUInt32BE(1);
780
- if (pending.length < 5 + msgLen)
781
- break;
782
- const messageBytes = pending.subarray(5, 5 + msgLen);
783
- pending = pending.subarray(5 + msgLen);
784
- if (flags & CONNECT_END_STREAM_FLAG) {
785
- onEndStream(messageBytes);
786
- }
787
- else {
788
- onMessage(messageBytes);
789
- }
790
- }
791
- };
792
- }
793
- const THINKING_TAG_NAMES = ['think', 'thinking', 'reasoning', 'thought', 'think_intent'];
794
- const MAX_THINKING_TAG_LEN = 16; // </think_intent> is 15 chars
795
- /**
796
- * Strip thinking tags from streamed text, routing tagged content to reasoning.
797
- * Buffers partial tags across chunk boundaries.
798
- */
799
- function createThinkingTagFilter() {
800
- let buffer = '';
801
- let inThinking = false;
802
- return {
803
- process(text) {
804
- const input = buffer + text;
805
- buffer = '';
806
- let content = '';
807
- let reasoning = '';
808
- let lastIdx = 0;
809
- const re = new RegExp(`<(/?)(?:${THINKING_TAG_NAMES.join('|')})\\s*>`, 'gi');
810
- let match;
811
- while ((match = re.exec(input)) !== null) {
812
- const before = input.slice(lastIdx, match.index);
813
- if (inThinking)
814
- reasoning += before;
815
- else
816
- content += before;
817
- inThinking = match[1] !== '/';
818
- lastIdx = re.lastIndex;
819
- }
820
- const rest = input.slice(lastIdx);
821
- // Buffer a trailing '<' that could be the start of a thinking tag.
822
- const ltPos = rest.lastIndexOf('<');
823
- if (ltPos >= 0 && rest.length - ltPos < MAX_THINKING_TAG_LEN && /^<\/?[a-z_]*$/i.test(rest.slice(ltPos))) {
824
- buffer = rest.slice(ltPos);
825
- const before = rest.slice(0, ltPos);
826
- if (inThinking)
827
- reasoning += before;
828
- else
829
- content += before;
830
- }
831
- else {
832
- if (inThinking)
833
- reasoning += rest;
834
- else
835
- content += rest;
836
- }
837
- return { content, reasoning };
838
- },
839
- flush() {
840
- const b = buffer;
841
- buffer = '';
842
- if (!b)
843
- return { content: '', reasoning: '' };
844
- return inThinking ? { content: '', reasoning: b } : { content: b, reasoning: '' };
845
- },
846
- };
847
- }
848
- function computeUsage(state) {
849
- const completion_tokens = state.outputTokens;
850
- const total_tokens = state.totalTokens || completion_tokens;
851
- const prompt_tokens = Math.max(0, total_tokens - completion_tokens);
852
- return { prompt_tokens, completion_tokens, total_tokens };
853
- }
854
- function processServerMessage(msg, blobStore, mcpTools, sendFrame, state, onText, onMcpExec, onCheckpoint) {
855
- const msgCase = msg.message.case;
856
- if (msgCase === "interactionUpdate") {
857
- handleInteractionUpdate(msg.message.value, state, onText);
858
- }
859
- else if (msgCase === "kvServerMessage") {
860
- handleKvMessage(msg.message.value, blobStore, sendFrame);
861
- }
862
- else if (msgCase === "execServerMessage") {
863
- handleExecMessage(msg.message.value, mcpTools, sendFrame, onMcpExec);
864
- }
865
- else if (msgCase === "conversationCheckpointUpdate") {
866
- const stateStructure = msg.message.value;
867
- if (stateStructure.tokenDetails) {
868
- state.totalTokens = stateStructure.tokenDetails.usedTokens;
869
- }
870
- if (onCheckpoint) {
871
- onCheckpoint(toBinary(ConversationStateStructureSchema, stateStructure));
872
- }
873
- }
874
- }
875
- function handleInteractionUpdate(update, state, onText) {
876
- const updateCase = update.message?.case;
877
- if (updateCase === "textDelta") {
878
- const delta = update.message.value.text || "";
879
- if (delta)
880
- onText(delta, false);
881
- }
882
- else if (updateCase === "thinkingDelta") {
883
- const delta = update.message.value.text || "";
884
- if (delta)
885
- onText(delta, true);
886
- }
887
- else if (updateCase === "tokenDelta") {
888
- state.outputTokens += update.message.value.tokens ?? 0;
889
- }
890
- // toolCallStarted, partialToolCall, toolCallDelta, toolCallCompleted
891
- // are intentionally ignored. MCP tool calls flow through the exec
892
- // message path (mcpArgs → mcpResult), not interaction updates.
893
- }
894
- /** Send a KV client response back to Cursor. */
895
- function sendKvResponse(kvMsg, messageCase, value, sendFrame) {
896
- const response = create(KvClientMessageSchema, {
897
- id: kvMsg.id,
898
- message: { case: messageCase, value: value },
899
- });
900
- const clientMsg = create(AgentClientMessageSchema, {
901
- message: { case: "kvClientMessage", value: response },
902
- });
903
- sendFrame(toBinary(AgentClientMessageSchema, clientMsg));
904
- }
905
- function handleKvMessage(kvMsg, blobStore, sendFrame) {
906
- const kvCase = kvMsg.message.case;
907
- if (kvCase === "getBlobArgs") {
908
- const blobId = kvMsg.message.value.blobId;
909
- const blobIdKey = Buffer.from(blobId).toString("hex");
910
- const blobData = blobStore.get(blobIdKey);
911
- sendKvResponse(kvMsg, "getBlobResult", create(GetBlobResultSchema, blobData ? { blobData } : {}), sendFrame);
912
- }
913
- else if (kvCase === "setBlobArgs") {
914
- const { blobId, blobData } = kvMsg.message.value;
915
- blobStore.set(Buffer.from(blobId).toString("hex"), blobData);
916
- sendKvResponse(kvMsg, "setBlobResult", create(SetBlobResultSchema, {}), sendFrame);
917
- }
918
- }
919
- function handleExecMessage(execMsg, mcpTools, sendFrame, onMcpExec) {
920
- const execCase = execMsg.message.case;
921
- if (execCase === "requestContextArgs") {
922
- const requestContext = create(RequestContextSchema, {
923
- rules: [],
924
- repositoryInfo: [],
925
- tools: mcpTools,
926
- gitRepos: [],
927
- projectLayouts: [],
928
- mcpInstructions: [],
929
- fileContents: {},
930
- customSubagents: [],
931
- });
932
- const result = create(RequestContextResultSchema, {
933
- result: {
934
- case: "success",
935
- value: create(RequestContextSuccessSchema, { requestContext }),
936
- },
937
- });
938
- sendExecResult(execMsg, "requestContextResult", result, sendFrame);
939
- return;
940
- }
941
- if (execCase === "mcpArgs") {
942
- const mcpArgs = execMsg.message.value;
943
- const decoded = decodeMcpArgsMap(mcpArgs.args ?? {});
944
- onMcpExec({
945
- execId: execMsg.execId,
946
- execMsgId: execMsg.id,
947
- toolCallId: mcpArgs.toolCallId || crypto.randomUUID(),
948
- toolName: mcpArgs.toolName || mcpArgs.name,
949
- decodedArgs: JSON.stringify(decoded),
950
- });
951
- return;
952
- }
953
- // --- Reject native Cursor tools ---
954
- // The model tries these first. We must respond with rejection/error
955
- // so it falls back to our MCP tools (registered via RequestContext).
956
- const REJECT_REASON = "Tool not available in this environment. Use the MCP tools provided instead.";
957
- if (execCase === "readArgs") {
958
- const args = execMsg.message.value;
959
- const result = create(ReadResultSchema, {
960
- result: { case: "rejected", value: create(ReadRejectedSchema, { path: args.path, reason: REJECT_REASON }) },
961
- });
962
- sendExecResult(execMsg, "readResult", result, sendFrame);
963
- return;
964
- }
965
- if (execCase === "lsArgs") {
966
- const args = execMsg.message.value;
967
- const result = create(LsResultSchema, {
968
- result: { case: "rejected", value: create(LsRejectedSchema, { path: args.path, reason: REJECT_REASON }) },
969
- });
970
- sendExecResult(execMsg, "lsResult", result, sendFrame);
971
- return;
972
- }
973
- if (execCase === "grepArgs") {
974
- const result = create(GrepResultSchema, {
975
- result: { case: "error", value: create(GrepErrorSchema, { error: REJECT_REASON }) },
976
- });
977
- sendExecResult(execMsg, "grepResult", result, sendFrame);
978
- return;
979
- }
980
- if (execCase === "writeArgs") {
981
- const args = execMsg.message.value;
982
- const result = create(WriteResultSchema, {
983
- result: { case: "rejected", value: create(WriteRejectedSchema, { path: args.path, reason: REJECT_REASON }) },
984
- });
985
- sendExecResult(execMsg, "writeResult", result, sendFrame);
986
- return;
987
- }
988
- if (execCase === "deleteArgs") {
989
- const args = execMsg.message.value;
990
- const result = create(DeleteResultSchema, {
991
- result: { case: "rejected", value: create(DeleteRejectedSchema, { path: args.path, reason: REJECT_REASON }) },
992
- });
993
- sendExecResult(execMsg, "deleteResult", result, sendFrame);
994
- return;
995
- }
996
- if (execCase === "shellArgs" || execCase === "shellStreamArgs") {
997
- const args = execMsg.message.value;
998
- const result = create(ShellResultSchema, {
999
- result: {
1000
- case: "rejected",
1001
- value: create(ShellRejectedSchema, {
1002
- command: args.command ?? "",
1003
- workingDirectory: args.workingDirectory ?? "",
1004
- reason: REJECT_REASON,
1005
- isReadonly: false,
1006
- }),
1007
- },
1008
- });
1009
- sendExecResult(execMsg, "shellResult", result, sendFrame);
1010
- return;
1011
- }
1012
- if (execCase === "backgroundShellSpawnArgs") {
1013
- const args = execMsg.message.value;
1014
- const result = create(BackgroundShellSpawnResultSchema, {
1015
- result: {
1016
- case: "rejected",
1017
- value: create(ShellRejectedSchema, {
1018
- command: args.command ?? "",
1019
- workingDirectory: args.workingDirectory ?? "",
1020
- reason: REJECT_REASON,
1021
- isReadonly: false,
1022
- }),
1023
- },
1024
- });
1025
- sendExecResult(execMsg, "backgroundShellSpawnResult", result, sendFrame);
1026
- return;
1027
- }
1028
- if (execCase === "writeShellStdinArgs") {
1029
- const result = create(WriteShellStdinResultSchema, {
1030
- result: { case: "error", value: create(WriteShellStdinErrorSchema, { error: REJECT_REASON }) },
1031
- });
1032
- sendExecResult(execMsg, "writeShellStdinResult", result, sendFrame);
1033
- return;
1034
- }
1035
- if (execCase === "fetchArgs") {
1036
- const args = execMsg.message.value;
1037
- const result = create(FetchResultSchema, {
1038
- result: { case: "error", value: create(FetchErrorSchema, { url: args.url ?? "", error: REJECT_REASON }) },
1039
- });
1040
- sendExecResult(execMsg, "fetchResult", result, sendFrame);
1041
- return;
1042
- }
1043
- if (execCase === "diagnosticsArgs") {
1044
- const result = create(DiagnosticsResultSchema, {});
1045
- sendExecResult(execMsg, "diagnosticsResult", result, sendFrame);
1046
- return;
1047
- }
1048
- // MCP resource/screen/computer exec types
1049
- const miscCaseMap = {
1050
- listMcpResourcesExecArgs: "listMcpResourcesExecResult",
1051
- readMcpResourceExecArgs: "readMcpResourceExecResult",
1052
- recordScreenArgs: "recordScreenResult",
1053
- computerUseArgs: "computerUseResult",
1054
- };
1055
- const resultCase = miscCaseMap[execCase];
1056
- if (resultCase) {
1057
- sendExecResult(execMsg, resultCase, create(McpResultSchema, {}), sendFrame);
1058
- return;
1059
- }
1060
- // Unknown exec type — log and ignore
1061
- console.error(`[proxy] unhandled exec: ${execCase}`);
1062
- }
1063
- /** Send an exec client message back to Cursor. */
1064
- function sendExecResult(execMsg, messageCase, value, sendFrame) {
1065
- const execClientMessage = create(ExecClientMessageSchema, {
1066
- id: execMsg.id,
1067
- execId: execMsg.execId,
1068
- message: { case: messageCase, value: value },
1069
- });
1070
- const clientMessage = create(AgentClientMessageSchema, {
1071
- message: { case: "execClientMessage", value: execClientMessage },
1072
- });
1073
- sendFrame(toBinary(AgentClientMessageSchema, clientMessage));
1074
- }
1075
- /** Derive a key for active bridge lookup (tool-call continuations). Model-specific. */
1076
- function deriveBridgeKey(modelId, messages) {
1077
- const firstUserMsg = messages.find((m) => m.role === "user");
1078
- const firstUserText = firstUserMsg ? textContent(firstUserMsg.content) : "";
1079
- return createHash("sha256")
1080
- .update(`bridge:${modelId}:${firstUserText.slice(0, 200)}`)
1081
- .digest("hex")
1082
- .slice(0, 16);
1083
- }
1084
- /** Derive a key for conversation state. Model-independent so context survives model switches. */
1085
- function deriveConversationKey(messages) {
1086
- const firstUserMsg = messages.find((m) => m.role === "user");
1087
- const firstUserText = firstUserMsg ? textContent(firstUserMsg.content) : "";
1088
- return createHash("sha256")
1089
- .update(`conv:${firstUserText.slice(0, 200)}`)
1090
- .digest("hex")
1091
- .slice(0, 16);
1092
- }
1093
- /** Deterministic UUID derived from convKey so Cursor's server-side conversation
1094
- * persists across proxy restarts. Formats 16 bytes of SHA-256 as a v4-shaped UUID. */
1095
- function deterministicConversationId(convKey) {
1096
- const hex = createHash("sha256")
1097
- .update(`cursor-conv-id:${convKey}`)
1098
- .digest("hex")
1099
- .slice(0, 32);
1100
- // Format as UUID: xxxxxxxx-xxxx-4xxx-Nxxx-xxxxxxxxxxxx
1101
- return [
1102
- hex.slice(0, 8),
1103
- hex.slice(8, 12),
1104
- `4${hex.slice(13, 16)}`,
1105
- `${(0x8 | (parseInt(hex[16], 16) & 0x3)).toString(16)}${hex.slice(17, 20)}`,
1106
- hex.slice(20, 32),
1107
- ].join("-");
1108
- }
1109
- /** Create an SSE streaming Response that reads from a live bridge. */
1110
- function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools, modelId, bridgeKey, convKey) {
1111
- const completionId = `chatcmpl-${crypto.randomUUID().replace(/-/g, "").slice(0, 28)}`;
1112
- const created = Math.floor(Date.now() / 1000);
1113
- const stream = new ReadableStream({
1114
- start(controller) {
1115
- const encoder = new TextEncoder();
1116
- let closed = false;
1117
- const sendSSE = (data) => {
1118
- if (closed)
1119
- return;
1120
- controller.enqueue(encoder.encode(`data: ${JSON.stringify(data)}\n\n`));
1121
- };
1122
- const sendDone = () => {
1123
- if (closed)
1124
- return;
1125
- controller.enqueue(encoder.encode("data: [DONE]\n\n"));
1126
- };
1127
- const closeController = () => {
1128
- if (closed)
1129
- return;
1130
- closed = true;
1131
- controller.close();
1132
- };
1133
- const makeChunk = (delta, finishReason = null) => ({
1134
- id: completionId,
1135
- object: "chat.completion.chunk",
1136
- created,
1137
- model: modelId,
1138
- choices: [{ index: 0, delta, finish_reason: finishReason }],
1139
- });
1140
- const makeUsageChunk = () => {
1141
- const { prompt_tokens, completion_tokens, total_tokens } = computeUsage(state);
1142
- return {
1143
- id: completionId,
1144
- object: "chat.completion.chunk",
1145
- created,
1146
- model: modelId,
1147
- choices: [],
1148
- usage: { prompt_tokens, completion_tokens, total_tokens },
1149
- };
1150
- };
1151
- const state = {
1152
- toolCallIndex: 0,
1153
- pendingExecs: [],
1154
- outputTokens: 0,
1155
- totalTokens: 0,
1156
- };
1157
- const tagFilter = createThinkingTagFilter();
1158
- let mcpExecReceived = false;
1159
- const processChunk = createConnectFrameParser((messageBytes) => {
1160
- try {
1161
- const serverMessage = fromBinary(AgentServerMessageSchema, messageBytes);
1162
- processServerMessage(serverMessage, blobStore, mcpTools, (data) => bridge.write(data), state, (text, isThinking) => {
1163
- if (isThinking) {
1164
- sendSSE(makeChunk({ reasoning_content: text }));
1165
- }
1166
- else {
1167
- const { content, reasoning } = tagFilter.process(text);
1168
- if (reasoning)
1169
- sendSSE(makeChunk({ reasoning_content: reasoning }));
1170
- if (content)
1171
- sendSSE(makeChunk({ content }));
1172
- }
1173
- },
1174
- // onMcpExec — the model wants to execute a tool.
1175
- (exec) => {
1176
- state.pendingExecs.push(exec);
1177
- mcpExecReceived = true;
1178
- const flushed = tagFilter.flush();
1179
- if (flushed.reasoning)
1180
- sendSSE(makeChunk({ reasoning_content: flushed.reasoning }));
1181
- if (flushed.content)
1182
- sendSSE(makeChunk({ content: flushed.content }));
1183
- const toolCallIndex = state.toolCallIndex++;
1184
- sendSSE(makeChunk({
1185
- tool_calls: [{
1186
- index: toolCallIndex,
1187
- id: exec.toolCallId,
1188
- type: "function",
1189
- function: {
1190
- name: exec.toolName,
1191
- arguments: exec.decodedArgs,
1192
- },
1193
- }],
1194
- }));
1195
- // Keep the bridge alive for tool result continuation.
1196
- activeBridges.set(bridgeKey, {
1197
- bridge,
1198
- heartbeatTimer,
1199
- blobStore,
1200
- mcpTools,
1201
- pendingExecs: state.pendingExecs,
1202
- });
1203
- sendSSE(makeChunk({}, "tool_calls"));
1204
- sendDone();
1205
- closeController();
1206
- }, (checkpointBytes) => {
1207
- const stored = conversationStates.get(convKey);
1208
- if (stored) {
1209
- stored.checkpoint = checkpointBytes;
1210
- stored.lastAccessMs = Date.now();
1211
- }
1212
- });
1213
- }
1214
- catch {
1215
- // Skip unparseable messages
1216
- }
1217
- }, (endStreamBytes) => {
1218
- const endError = parseConnectEndStream(endStreamBytes);
1219
- if (endError) {
1220
- sendSSE(makeChunk({ content: `\n[Error: ${endError.message}]` }));
1221
- }
1222
- });
1223
- bridge.onData(processChunk);
1224
- bridge.onClose((code) => {
1225
- clearInterval(heartbeatTimer);
1226
- const stored = conversationStates.get(convKey);
1227
- if (stored) {
1228
- for (const [k, v] of blobStore)
1229
- stored.blobStore.set(k, v);
1230
- stored.lastAccessMs = Date.now();
1231
- }
1232
- if (!mcpExecReceived) {
1233
- const flushed = tagFilter.flush();
1234
- if (flushed.reasoning)
1235
- sendSSE(makeChunk({ reasoning_content: flushed.reasoning }));
1236
- if (flushed.content)
1237
- sendSSE(makeChunk({ content: flushed.content }));
1238
- sendSSE(makeChunk({}, "stop"));
1239
- sendSSE(makeUsageChunk());
1240
- sendDone();
1241
- closeController();
1242
- }
1243
- else if (code !== 0) {
1244
- // Bridge died while tool calls are pending (timeout, crash, etc.).
1245
- // Close the SSE stream so the client doesn't hang forever.
1246
- sendSSE(makeChunk({ content: "\n[Error: bridge connection lost]" }));
1247
- sendSSE(makeChunk({}, "stop"));
1248
- sendSSE(makeUsageChunk());
1249
- sendDone();
1250
- closeController();
1251
- // Remove stale entry so the next request doesn't try to resume it.
1252
- activeBridges.delete(bridgeKey);
1253
- }
1254
- });
1255
- },
1256
- });
1257
- return new Response(stream, { headers: SSE_HEADERS });
1258
- }
1259
- /** Start a Cursor RunSSE session, send the initial request, and start heartbeats. */
1260
- async function startBridge(accessToken, requestBytes) {
1261
- const requestId = crypto.randomUUID();
1262
- const bridge = await createCursorSession({
1263
- accessToken,
1264
- requestId,
1265
- });
1266
- bridge.write(requestBytes);
1267
- const heartbeatTimer = setInterval(() => bridge.write(makeHeartbeatBytes()), 5_000);
1268
- return { bridge, heartbeatTimer };
1269
- }
1270
- async function handleStreamingResponse(payload, accessToken, modelId, bridgeKey, convKey) {
1271
- const { bridge, heartbeatTimer } = await startBridge(accessToken, payload.requestBytes);
1272
- return createBridgeStreamResponse(bridge, heartbeatTimer, payload.blobStore, payload.mcpTools, modelId, bridgeKey, convKey);
1273
- }
1274
- /** Resume a paused bridge by sending MCP results and continuing to stream. */
1275
- function handleToolResultResume(active, toolResults, modelId, bridgeKey, convKey) {
1276
- const { bridge, heartbeatTimer, blobStore, mcpTools, pendingExecs } = active;
1277
- // Send mcpResult for each pending exec that has a matching tool result
1278
- for (const exec of pendingExecs) {
1279
- const result = toolResults.find((r) => r.toolCallId === exec.toolCallId);
1280
- const mcpResult = result
1281
- ? create(McpResultSchema, {
1282
- result: {
1283
- case: "success",
1284
- value: create(McpSuccessSchema, {
1285
- content: [
1286
- create(McpToolResultContentItemSchema, {
1287
- content: {
1288
- case: "text",
1289
- value: create(McpTextContentSchema, { text: result.content }),
1290
- },
1291
- }),
1292
- ],
1293
- isError: false,
1294
- }),
1295
- },
1296
- })
1297
- : create(McpResultSchema, {
1298
- result: {
1299
- case: "error",
1300
- value: create(McpErrorSchema, { error: "Tool result not provided" }),
1301
- },
1302
- });
1303
- const execClientMessage = create(ExecClientMessageSchema, {
1304
- id: exec.execMsgId,
1305
- execId: exec.execId,
1306
- message: {
1307
- case: "mcpResult",
1308
- value: mcpResult,
1309
- },
1310
- });
1311
- const clientMessage = create(AgentClientMessageSchema, {
1312
- message: { case: "execClientMessage", value: execClientMessage },
1313
- });
1314
- bridge.write(toBinary(AgentClientMessageSchema, clientMessage));
1315
- }
1316
- return createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools, modelId, bridgeKey, convKey);
1317
- }
1318
- async function handleNonStreamingResponse(payload, accessToken, modelId, convKey) {
1319
- const completionId = `chatcmpl-${crypto.randomUUID().replace(/-/g, "").slice(0, 28)}`;
1320
- const created = Math.floor(Date.now() / 1000);
1321
- const { text, usage } = await collectFullResponse(payload, accessToken, convKey);
1322
- return new Response(JSON.stringify({
1323
- id: completionId,
1324
- object: "chat.completion",
1325
- created,
1326
- model: modelId,
1327
- choices: [
1328
- {
1329
- index: 0,
1330
- message: { role: "assistant", content: text },
1331
- finish_reason: "stop",
1332
- },
1333
- ],
1334
- usage,
1335
- }), { headers: { "Content-Type": "application/json" } });
1336
- }
1337
- async function collectFullResponse(payload, accessToken, convKey) {
1338
- const { promise, resolve } = Promise.withResolvers();
1339
- let fullText = "";
1340
- const { bridge, heartbeatTimer } = await startBridge(accessToken, payload.requestBytes);
1341
- const state = {
1342
- toolCallIndex: 0,
1343
- pendingExecs: [],
1344
- outputTokens: 0,
1345
- totalTokens: 0,
1346
- };
1347
- const tagFilter = createThinkingTagFilter();
1348
- bridge.onData(createConnectFrameParser((messageBytes) => {
1349
- try {
1350
- const serverMessage = fromBinary(AgentServerMessageSchema, messageBytes);
1351
- processServerMessage(serverMessage, payload.blobStore, payload.mcpTools, (data) => bridge.write(data), state, (text, isThinking) => {
1352
- if (isThinking)
1353
- return;
1354
- const { content } = tagFilter.process(text);
1355
- fullText += content;
1356
- }, () => { }, (checkpointBytes) => {
1357
- const stored = conversationStates.get(convKey);
1358
- if (stored) {
1359
- stored.checkpoint = checkpointBytes;
1360
- stored.lastAccessMs = Date.now();
1361
- }
1362
- });
1363
- }
1364
- catch {
1365
- // Skip
1366
- }
1367
- }, () => { }));
1368
- bridge.onClose(() => {
1369
- clearInterval(heartbeatTimer);
1370
- const stored = conversationStates.get(convKey);
1371
- if (stored) {
1372
- for (const [k, v] of payload.blobStore)
1373
- stored.blobStore.set(k, v);
1374
- stored.lastAccessMs = Date.now();
1375
- }
1376
- const flushed = tagFilter.flush();
1377
- fullText += flushed.content;
1378
- const usage = computeUsage(state);
1379
- resolve({
1380
- text: fullText,
1381
- usage,
1382
- });
1383
- });
1384
- return promise;
1385
- }
1
+ export { getProxyPort, startProxy, stopProxy } from "./proxy/index";
2
+ export { callCursorUnaryRpc } from "./cursor";