@playwo/opencode-cursor-oauth 0.0.0-dev.e795e5ffd849 → 0.0.0-dev.e95256212849

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/dist/auth.js +1 -2
  2. package/dist/constants.d.ts +2 -0
  3. package/dist/constants.js +2 -0
  4. package/dist/cursor/bidi-session.d.ts +12 -0
  5. package/dist/cursor/bidi-session.js +164 -0
  6. package/dist/cursor/config.d.ts +4 -0
  7. package/dist/cursor/config.js +4 -0
  8. package/dist/cursor/connect-framing.d.ts +10 -0
  9. package/dist/cursor/connect-framing.js +80 -0
  10. package/dist/cursor/headers.d.ts +6 -0
  11. package/dist/cursor/headers.js +16 -0
  12. package/dist/cursor/index.d.ts +5 -0
  13. package/dist/cursor/index.js +5 -0
  14. package/dist/cursor/unary-rpc.d.ts +12 -0
  15. package/dist/cursor/unary-rpc.js +124 -0
  16. package/dist/index.d.ts +2 -14
  17. package/dist/index.js +2 -297
  18. package/dist/logger.js +7 -2
  19. package/dist/models.js +1 -23
  20. package/dist/openai/index.d.ts +3 -0
  21. package/dist/openai/index.js +3 -0
  22. package/dist/openai/messages.d.ts +39 -0
  23. package/dist/openai/messages.js +228 -0
  24. package/dist/openai/tools.d.ts +7 -0
  25. package/dist/openai/tools.js +58 -0
  26. package/dist/openai/types.d.ts +41 -0
  27. package/dist/openai/types.js +1 -0
  28. package/dist/plugin/cursor-auth-plugin.d.ts +3 -0
  29. package/dist/plugin/cursor-auth-plugin.js +139 -0
  30. package/dist/proto/agent_pb.js +637 -319
  31. package/dist/provider/index.d.ts +2 -0
  32. package/dist/provider/index.js +2 -0
  33. package/dist/provider/model-cost.d.ts +9 -0
  34. package/dist/provider/model-cost.js +206 -0
  35. package/dist/provider/models.d.ts +8 -0
  36. package/dist/provider/models.js +86 -0
  37. package/dist/proxy/bridge-non-streaming.d.ts +3 -0
  38. package/dist/proxy/bridge-non-streaming.js +119 -0
  39. package/dist/proxy/bridge-session.d.ts +5 -0
  40. package/dist/proxy/bridge-session.js +11 -0
  41. package/dist/proxy/bridge-streaming.d.ts +5 -0
  42. package/dist/proxy/bridge-streaming.js +342 -0
  43. package/dist/proxy/bridge.d.ts +3 -0
  44. package/dist/proxy/bridge.js +3 -0
  45. package/dist/proxy/chat-completion.d.ts +2 -0
  46. package/dist/proxy/chat-completion.js +114 -0
  47. package/dist/proxy/conversation-meta.d.ts +12 -0
  48. package/dist/proxy/conversation-meta.js +1 -0
  49. package/dist/proxy/conversation-state.d.ts +35 -0
  50. package/dist/proxy/conversation-state.js +95 -0
  51. package/dist/proxy/cursor-request.d.ts +6 -0
  52. package/dist/proxy/cursor-request.js +104 -0
  53. package/dist/proxy/index.d.ts +12 -0
  54. package/dist/proxy/index.js +12 -0
  55. package/dist/proxy/server.d.ts +6 -0
  56. package/dist/proxy/server.js +107 -0
  57. package/dist/proxy/sse.d.ts +5 -0
  58. package/dist/proxy/sse.js +5 -0
  59. package/dist/proxy/state-sync.d.ts +2 -0
  60. package/dist/proxy/state-sync.js +17 -0
  61. package/dist/proxy/stream-dispatch.d.ts +42 -0
  62. package/dist/proxy/stream-dispatch.js +614 -0
  63. package/dist/proxy/stream-state.d.ts +9 -0
  64. package/dist/proxy/stream-state.js +1 -0
  65. package/dist/proxy/title.d.ts +1 -0
  66. package/dist/proxy/title.js +103 -0
  67. package/dist/proxy/types.d.ts +27 -0
  68. package/dist/proxy/types.js +1 -0
  69. package/dist/proxy.d.ts +2 -20
  70. package/dist/proxy.js +2 -1416
  71. package/package.json +1 -1
@@ -0,0 +1,342 @@
1
+ import { create, fromBinary, toBinary } from "@bufbuild/protobuf";
2
+ import { AgentClientMessageSchema, AgentServerMessageSchema, ExecClientMessageSchema, McpErrorSchema, McpResultSchema, McpSuccessSchema, McpTextContentSchema, McpToolResultContentItemSchema, } from "../proto/agent_pb";
3
+ import { errorDetails, logPluginError, logPluginWarn } from "../logger";
4
+ import { formatToolCallSummary, formatToolResultSummary, } from "../openai/messages";
5
+ import { activeBridges, updateStoredConversationAfterCompletion, } from "./conversation-state";
6
+ import { startBridge } from "./bridge-session";
7
+ import { updateConversationCheckpoint, syncStoredBlobStore, } from "./state-sync";
8
+ import { SSE_HEADERS } from "./sse";
9
+ import { computeUsage, createConnectFrameParser, createThinkingTagFilter, parseConnectEndStream, processServerMessage, scheduleBridgeEnd, } from "./stream-dispatch";
10
+ const SSE_KEEPALIVE_INTERVAL_MS = 15_000;
11
+ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools, modelId, bridgeKey, convKey, metadata) {
12
+ const completionId = `chatcmpl-${crypto.randomUUID().replace(/-/g, "").slice(0, 28)}`;
13
+ const created = Math.floor(Date.now() / 1000);
14
+ let keepaliveTimer;
15
+ const stopKeepalive = () => {
16
+ if (!keepaliveTimer)
17
+ return;
18
+ clearInterval(keepaliveTimer);
19
+ keepaliveTimer = undefined;
20
+ };
21
+ const stream = new ReadableStream({
22
+ start(controller) {
23
+ const encoder = new TextEncoder();
24
+ let closed = false;
25
+ const state = {
26
+ toolCallIndex: 0,
27
+ pendingExecs: [],
28
+ outputTokens: 0,
29
+ totalTokens: 0,
30
+ interactionToolArgsText: new Map(),
31
+ emittedToolCallIds: new Set(),
32
+ };
33
+ const tagFilter = createThinkingTagFilter();
34
+ let assistantText = metadata.assistantSeedText ?? "";
35
+ let mcpExecReceived = false;
36
+ let endStreamError = null;
37
+ const sendSSE = (data) => {
38
+ if (closed)
39
+ return;
40
+ controller.enqueue(encoder.encode(`data: ${JSON.stringify(data)}\n\n`));
41
+ };
42
+ const sendKeepalive = () => {
43
+ if (closed)
44
+ return;
45
+ controller.enqueue(encoder.encode(": keep-alive\n\n"));
46
+ };
47
+ const sendDone = () => {
48
+ if (closed)
49
+ return;
50
+ controller.enqueue(encoder.encode("data: [DONE]\n\n"));
51
+ };
52
+ const failStream = (message, code) => {
53
+ if (closed)
54
+ return;
55
+ sendSSE({
56
+ error: {
57
+ message,
58
+ type: "server_error",
59
+ ...(code ? { code } : {}),
60
+ },
61
+ });
62
+ sendDone();
63
+ closeController();
64
+ };
65
+ const closeController = () => {
66
+ if (closed)
67
+ return;
68
+ closed = true;
69
+ stopKeepalive();
70
+ controller.close();
71
+ };
72
+ const makeChunk = (delta, finishReason = null) => ({
73
+ id: completionId,
74
+ object: "chat.completion.chunk",
75
+ created,
76
+ model: modelId,
77
+ choices: [{ index: 0, delta, finish_reason: finishReason }],
78
+ });
79
+ const makeUsageChunk = () => {
80
+ const { prompt_tokens, completion_tokens, total_tokens } = computeUsage(state);
81
+ return {
82
+ id: completionId,
83
+ object: "chat.completion.chunk",
84
+ created,
85
+ model: modelId,
86
+ choices: [],
87
+ usage: { prompt_tokens, completion_tokens, total_tokens },
88
+ };
89
+ };
90
+ const processChunk = createConnectFrameParser((messageBytes) => {
91
+ try {
92
+ const serverMessage = fromBinary(AgentServerMessageSchema, messageBytes);
93
+ processServerMessage(serverMessage, blobStore, mcpTools, (data) => bridge.write(data), state, (text, isThinking) => {
94
+ if (isThinking) {
95
+ sendSSE(makeChunk({ reasoning_content: text }));
96
+ return;
97
+ }
98
+ const { content, reasoning } = tagFilter.process(text);
99
+ if (reasoning)
100
+ sendSSE(makeChunk({ reasoning_content: reasoning }));
101
+ if (content) {
102
+ assistantText += content;
103
+ sendSSE(makeChunk({ content }));
104
+ }
105
+ }, (exec) => {
106
+ const existingIndex = state.pendingExecs.findIndex((candidate) => candidate.toolCallId === exec.toolCallId);
107
+ if (existingIndex >= 0) {
108
+ state.pendingExecs[existingIndex] = exec;
109
+ }
110
+ else {
111
+ state.pendingExecs.push(exec);
112
+ }
113
+ mcpExecReceived = true;
114
+ const flushed = tagFilter.flush();
115
+ if (flushed.reasoning)
116
+ sendSSE(makeChunk({ reasoning_content: flushed.reasoning }));
117
+ if (flushed.content) {
118
+ assistantText += flushed.content;
119
+ sendSSE(makeChunk({ content: flushed.content }));
120
+ }
121
+ const assistantSeedText = [
122
+ assistantText.trim(),
123
+ formatToolCallSummary({
124
+ id: exec.toolCallId,
125
+ type: "function",
126
+ function: {
127
+ name: exec.toolName,
128
+ arguments: exec.decodedArgs,
129
+ },
130
+ }),
131
+ ]
132
+ .filter(Boolean)
133
+ .join("\n\n");
134
+ sendSSE(makeChunk({
135
+ tool_calls: [
136
+ {
137
+ index: state.toolCallIndex++,
138
+ id: exec.toolCallId,
139
+ type: "function",
140
+ function: {
141
+ name: exec.toolName,
142
+ arguments: exec.decodedArgs,
143
+ },
144
+ },
145
+ ],
146
+ }));
147
+ activeBridges.set(bridgeKey, {
148
+ bridge,
149
+ heartbeatTimer,
150
+ blobStore,
151
+ mcpTools,
152
+ pendingExecs: state.pendingExecs,
153
+ modelId,
154
+ metadata: {
155
+ ...metadata,
156
+ assistantSeedText,
157
+ },
158
+ });
159
+ sendSSE(makeChunk({}, "tool_calls"));
160
+ sendDone();
161
+ closeController();
162
+ }, (checkpointBytes) => updateConversationCheckpoint(convKey, checkpointBytes), () => scheduleBridgeEnd(bridge), (info) => {
163
+ endStreamError = new Error(`Cursor returned unsupported ${info.category}: ${info.caseName}${info.detail ? ` (${info.detail})` : ""}`);
164
+ logPluginError("Closing Cursor bridge after unsupported message", {
165
+ modelId,
166
+ bridgeKey,
167
+ convKey,
168
+ category: info.category,
169
+ caseName: info.caseName,
170
+ detail: info.detail,
171
+ });
172
+ scheduleBridgeEnd(bridge);
173
+ }, (info) => {
174
+ endStreamError = new Error(`Cursor requested unsupported exec type: ${info.execCase}`);
175
+ logPluginError("Closing Cursor bridge after unsupported exec", {
176
+ modelId,
177
+ bridgeKey,
178
+ convKey,
179
+ execCase: info.execCase,
180
+ execId: info.execId,
181
+ execMsgId: info.execMsgId,
182
+ });
183
+ scheduleBridgeEnd(bridge);
184
+ });
185
+ }
186
+ catch {
187
+ // Skip unparseable messages.
188
+ }
189
+ }, (endStreamBytes) => {
190
+ endStreamError = parseConnectEndStream(endStreamBytes);
191
+ if (endStreamError) {
192
+ logPluginError("Cursor stream returned Connect end-stream error", {
193
+ modelId,
194
+ bridgeKey,
195
+ convKey,
196
+ ...errorDetails(endStreamError),
197
+ });
198
+ }
199
+ scheduleBridgeEnd(bridge);
200
+ });
201
+ keepaliveTimer = setInterval(() => {
202
+ try {
203
+ sendKeepalive();
204
+ }
205
+ catch (error) {
206
+ logPluginWarn("Failed to write SSE keepalive", {
207
+ modelId,
208
+ bridgeKey,
209
+ convKey,
210
+ ...errorDetails(error),
211
+ });
212
+ stopKeepalive();
213
+ }
214
+ }, SSE_KEEPALIVE_INTERVAL_MS);
215
+ bridge.onData(processChunk);
216
+ bridge.onClose((code) => {
217
+ clearInterval(heartbeatTimer);
218
+ stopKeepalive();
219
+ syncStoredBlobStore(convKey, blobStore);
220
+ if (endStreamError) {
221
+ activeBridges.delete(bridgeKey);
222
+ failStream(endStreamError.message, "cursor_bridge_closed");
223
+ return;
224
+ }
225
+ if (!mcpExecReceived) {
226
+ const flushed = tagFilter.flush();
227
+ if (flushed.reasoning)
228
+ sendSSE(makeChunk({ reasoning_content: flushed.reasoning }));
229
+ if (flushed.content) {
230
+ assistantText += flushed.content;
231
+ sendSSE(makeChunk({ content: flushed.content }));
232
+ }
233
+ updateStoredConversationAfterCompletion(convKey, metadata, assistantText);
234
+ sendSSE(makeChunk({}, "stop"));
235
+ sendSSE(makeUsageChunk());
236
+ sendDone();
237
+ closeController();
238
+ return;
239
+ }
240
+ activeBridges.delete(bridgeKey);
241
+ if (code !== 0 && !closed) {
242
+ failStream("Cursor bridge connection lost", "cursor_bridge_closed");
243
+ }
244
+ });
245
+ },
246
+ cancel(reason) {
247
+ stopKeepalive();
248
+ clearInterval(heartbeatTimer);
249
+ syncStoredBlobStore(convKey, blobStore);
250
+ const active = activeBridges.get(bridgeKey);
251
+ if (active?.bridge === bridge) {
252
+ activeBridges.delete(bridgeKey);
253
+ }
254
+ logPluginWarn("OpenCode client disconnected from Cursor SSE stream", {
255
+ modelId,
256
+ bridgeKey,
257
+ convKey,
258
+ reason: reason instanceof Error ? reason.message : String(reason ?? ""),
259
+ });
260
+ bridge.end();
261
+ },
262
+ });
263
+ return new Response(stream, { headers: SSE_HEADERS });
264
+ }
265
+ export async function handleStreamingResponse(payload, accessToken, modelId, bridgeKey, convKey, metadata) {
266
+ const { bridge, heartbeatTimer } = await startBridge(accessToken, payload.requestBytes);
267
+ return createBridgeStreamResponse(bridge, heartbeatTimer, payload.blobStore, payload.mcpTools, modelId, bridgeKey, convKey, metadata);
268
+ }
269
+ async function waitForResolvablePendingExecs(active, toolResults, timeoutMs = 2_000) {
270
+ const pendingToolCallIds = new Set(toolResults.map((result) => result.toolCallId));
271
+ const deadline = Date.now() + timeoutMs;
272
+ while (Date.now() < deadline) {
273
+ const unresolved = active.pendingExecs.filter((exec) => pendingToolCallIds.has(exec.toolCallId) && exec.execMsgId === 0);
274
+ if (unresolved.length === 0) {
275
+ return;
276
+ }
277
+ await new Promise((resolve) => setTimeout(resolve, 25));
278
+ }
279
+ const unresolved = active.pendingExecs.filter((exec) => pendingToolCallIds.has(exec.toolCallId) && exec.execMsgId === 0);
280
+ if (unresolved.length > 0) {
281
+ logPluginWarn("Cursor exec metadata did not arrive before tool-result resume", {
282
+ bridgeToolCallIds: unresolved.map((exec) => exec.toolCallId),
283
+ modelId: active.modelId,
284
+ });
285
+ }
286
+ }
287
+ export async function handleToolResultResume(active, toolResults, bridgeKey, convKey) {
288
+ const { bridge, heartbeatTimer, blobStore, mcpTools, pendingExecs, modelId, metadata, } = active;
289
+ const resumeMetadata = {
290
+ ...metadata,
291
+ assistantSeedText: [
292
+ metadata.assistantSeedText?.trim() ?? "",
293
+ toolResults.map(formatToolResultSummary).join("\n\n"),
294
+ ]
295
+ .filter(Boolean)
296
+ .join("\n\n"),
297
+ };
298
+ await waitForResolvablePendingExecs(active, toolResults);
299
+ for (const exec of pendingExecs) {
300
+ const result = toolResults.find((toolResult) => toolResult.toolCallId === exec.toolCallId);
301
+ const mcpResult = result
302
+ ? create(McpResultSchema, {
303
+ result: {
304
+ case: "success",
305
+ value: create(McpSuccessSchema, {
306
+ content: [
307
+ create(McpToolResultContentItemSchema, {
308
+ content: {
309
+ case: "text",
310
+ value: create(McpTextContentSchema, {
311
+ text: result.content,
312
+ }),
313
+ },
314
+ }),
315
+ ],
316
+ isError: false,
317
+ }),
318
+ },
319
+ })
320
+ : create(McpResultSchema, {
321
+ result: {
322
+ case: "error",
323
+ value: create(McpErrorSchema, {
324
+ error: "Tool result not provided",
325
+ }),
326
+ },
327
+ });
328
+ const execClientMessage = create(ExecClientMessageSchema, {
329
+ id: exec.execMsgId,
330
+ execId: exec.execId,
331
+ message: {
332
+ case: "mcpResult",
333
+ value: mcpResult,
334
+ },
335
+ });
336
+ const clientMessage = create(AgentClientMessageSchema, {
337
+ message: { case: "execClientMessage", value: execClientMessage },
338
+ });
339
+ bridge.write(toBinary(AgentClientMessageSchema, clientMessage));
340
+ }
341
+ return createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools, modelId, bridgeKey, convKey, resumeMetadata);
342
+ }
@@ -0,0 +1,3 @@
1
+ export { startBridge } from "./bridge-session";
2
+ export { handleStreamingResponse, handleToolResultResume, } from "./bridge-streaming";
3
+ export { handleNonStreamingResponse } from "./bridge-non-streaming";
@@ -0,0 +1,3 @@
1
+ export { startBridge } from "./bridge-session";
2
+ export { handleStreamingResponse, handleToolResultResume, } from "./bridge-streaming";
3
+ export { handleNonStreamingResponse } from "./bridge-non-streaming";
@@ -0,0 +1,2 @@
1
+ import type { ChatCompletionRequest, ChatRequestContext } from "../openai/types";
2
+ export declare function handleChatCompletion(body: ChatCompletionRequest, accessToken: string, context?: ChatRequestContext): Response | Promise<Response>;
@@ -0,0 +1,114 @@
1
+ import { logPluginWarn } from "../logger";
2
+ import { buildInitialHandoffPrompt, buildTitleSourceText, buildToolResumePrompt, detectTitleRequest, parseMessages, } from "../openai/messages";
3
+ import { buildMcpToolDefinitions, selectToolsForChoice } from "../openai/tools";
4
+ import { activeBridges, conversationStates, createStoredConversation, deriveBridgeKey, deriveConversationKey, evictStaleConversations, hashString, normalizeAgentKey, resetStoredConversation, } from "./conversation-state";
5
+ import { buildCursorRequest } from "./cursor-request";
6
+ import { handleNonStreamingResponse, handleStreamingResponse, handleToolResultResume, } from "./bridge";
7
+ import { handleTitleGenerationRequest } from "./title";
8
+ export function handleChatCompletion(body, accessToken, context = {}) {
9
+ const parsed = parseMessages(body.messages);
10
+ const { systemPrompt, userText, turns, toolResults, pendingAssistantSummary, completedTurnsFingerprint, } = parsed;
11
+ const modelId = body.model;
12
+ const normalizedAgentKey = normalizeAgentKey(context.agentKey);
13
+ const titleDetection = detectTitleRequest(body);
14
+ const isTitleAgent = titleDetection.matched;
15
+ if (isTitleAgent) {
16
+ const titleSourceText = buildTitleSourceText(userText, turns, pendingAssistantSummary, toolResults);
17
+ if (!titleSourceText) {
18
+ return new Response(JSON.stringify({
19
+ error: {
20
+ message: "No title source text found",
21
+ type: "invalid_request_error",
22
+ },
23
+ }), { status: 400, headers: { "Content-Type": "application/json" } });
24
+ }
25
+ return handleTitleGenerationRequest(titleSourceText, accessToken, modelId, body.stream !== false);
26
+ }
27
+ const tools = selectToolsForChoice(body.tools ?? [], body.tool_choice);
28
+ if (!userText && toolResults.length === 0) {
29
+ return new Response(JSON.stringify({
30
+ error: {
31
+ message: "No user message found",
32
+ type: "invalid_request_error",
33
+ },
34
+ }), { status: 400, headers: { "Content-Type": "application/json" } });
35
+ }
36
+ // bridgeKey: session/agent-scoped, for active tool-call bridges
37
+ // convKey: model-independent, for conversation state that survives model switches
38
+ const bridgeKey = deriveBridgeKey(modelId, body.messages, context.sessionId, context.agentKey);
39
+ const convKey = deriveConversationKey(body.messages, context.sessionId, context.agentKey);
40
+ const activeBridge = activeBridges.get(bridgeKey);
41
+ if (activeBridge && toolResults.length > 0) {
42
+ activeBridges.delete(bridgeKey);
43
+ if (activeBridge.bridge.alive) {
44
+ if (activeBridge.modelId !== modelId) {
45
+ logPluginWarn("Resuming pending Cursor tool call on original model after model switch", {
46
+ requestedModelId: modelId,
47
+ resumedModelId: activeBridge.modelId,
48
+ convKey,
49
+ bridgeKey,
50
+ });
51
+ }
52
+ // Resume the live bridge with tool results
53
+ return handleToolResultResume(activeBridge, toolResults, bridgeKey, convKey);
54
+ }
55
+ // Bridge died (timeout, server disconnect, etc.).
56
+ // Clean up and fall through to start a fresh bridge.
57
+ clearInterval(activeBridge.heartbeatTimer);
58
+ activeBridge.bridge.end();
59
+ }
60
+ // Clean up stale bridge if present
61
+ if (activeBridge && activeBridges.has(bridgeKey)) {
62
+ clearInterval(activeBridge.heartbeatTimer);
63
+ activeBridge.bridge.end();
64
+ activeBridges.delete(bridgeKey);
65
+ }
66
+ let stored = conversationStates.get(convKey);
67
+ if (!stored) {
68
+ stored = createStoredConversation();
69
+ conversationStates.set(convKey, stored);
70
+ }
71
+ const systemPromptHash = hashString(systemPrompt);
72
+ if (stored.checkpoint &&
73
+ (stored.systemPromptHash !== systemPromptHash ||
74
+ (turns.length > 0 &&
75
+ stored.completedTurnsFingerprint !== completedTurnsFingerprint))) {
76
+ resetStoredConversation(stored);
77
+ }
78
+ stored.systemPromptHash = systemPromptHash;
79
+ stored.completedTurnsFingerprint = completedTurnsFingerprint;
80
+ stored.lastAccessMs = Date.now();
81
+ evictStaleConversations();
82
+ // Build the request. When tool results are present but the bridge died,
83
+ // we must still include the last user text so Cursor has context.
84
+ const mcpTools = buildMcpToolDefinitions(tools);
85
+ const hasPendingAssistantSummary = pendingAssistantSummary.trim().length > 0;
86
+ const needsInitialHandoff = !stored.checkpoint &&
87
+ (turns.length > 0 || hasPendingAssistantSummary || toolResults.length > 0);
88
+ const replayTurns = needsInitialHandoff ? [] : turns;
89
+ let effectiveUserText = needsInitialHandoff
90
+ ? buildInitialHandoffPrompt(userText, turns, pendingAssistantSummary, toolResults)
91
+ : toolResults.length > 0 || hasPendingAssistantSummary
92
+ ? buildToolResumePrompt(userText, pendingAssistantSummary, toolResults)
93
+ : userText;
94
+ const payload = buildCursorRequest(modelId, systemPrompt, effectiveUserText, replayTurns, stored.conversationId, stored.checkpoint, stored.blobStore);
95
+ payload.mcpTools = mcpTools;
96
+ if (body.stream === false) {
97
+ return handleNonStreamingResponse(payload, accessToken, modelId, convKey, {
98
+ systemPrompt,
99
+ systemPromptHash,
100
+ completedTurnsFingerprint,
101
+ turns,
102
+ userText,
103
+ agentKey: normalizedAgentKey,
104
+ });
105
+ }
106
+ return handleStreamingResponse(payload, accessToken, modelId, bridgeKey, convKey, {
107
+ systemPrompt,
108
+ systemPromptHash,
109
+ completedTurnsFingerprint,
110
+ turns,
111
+ userText,
112
+ agentKey: normalizedAgentKey,
113
+ });
114
+ }
@@ -0,0 +1,12 @@
1
+ export interface ConversationRequestMetadata {
2
+ systemPrompt: string;
3
+ systemPromptHash: string;
4
+ completedTurnsFingerprint: string;
5
+ turns: Array<{
6
+ userText: string;
7
+ assistantText: string;
8
+ }>;
9
+ userText: string;
10
+ assistantSeedText?: string;
11
+ agentKey?: string;
12
+ }
@@ -0,0 +1 @@
1
+ export {};
@@ -0,0 +1,35 @@
1
+ import type { OpenAIMessage } from "../openai/types";
2
+ import type { ActiveBridge } from "./types";
3
+ export declare const activeBridges: Map<string, ActiveBridge>;
4
+ export interface StoredConversation {
5
+ conversationId: string;
6
+ checkpoint: Uint8Array | null;
7
+ blobStore: Map<string, Uint8Array>;
8
+ lastAccessMs: number;
9
+ systemPromptHash: string;
10
+ completedTurnsFingerprint: string;
11
+ }
12
+ export declare const conversationStates: Map<string, StoredConversation>;
13
+ export declare function evictStaleConversations(): void;
14
+ export declare function normalizeAgentKey(agentKey?: string): string;
15
+ export declare function hashString(value: string): string;
16
+ export declare function createStoredConversation(): StoredConversation;
17
+ export declare function resetStoredConversation(stored: StoredConversation): void;
18
+ export declare function deriveBridgeKey(modelId: string, messages: OpenAIMessage[], sessionId?: string, agentKey?: string): string;
19
+ /** Derive a key for conversation state. Model-independent so context survives model switches. */
20
+ export declare function deriveConversationKey(messages: OpenAIMessage[], sessionId?: string, agentKey?: string): string;
21
+ export declare function buildConversationFingerprint(messages: OpenAIMessage[]): string;
22
+ interface ConversationRequestMetadata {
23
+ systemPrompt: string;
24
+ systemPromptHash: string;
25
+ completedTurnsFingerprint: string;
26
+ turns: Array<{
27
+ userText: string;
28
+ assistantText: string;
29
+ }>;
30
+ userText: string;
31
+ assistantSeedText?: string;
32
+ agentKey?: string;
33
+ }
34
+ export declare function updateStoredConversationAfterCompletion(convKey: string, metadata: ConversationRequestMetadata, assistantText: string): void;
35
+ export {};
@@ -0,0 +1,95 @@
1
+ import { createHash } from "node:crypto";
2
+ import { buildCompletedTurnsFingerprint, textContent, } from "../openai/messages";
3
+ // Active bridges keyed by a session token (derived from conversation state).
4
+ // When tool_calls are returned, the bridge stays alive. The next request
5
+ // with tool results looks up the bridge and sends mcpResult messages.
6
+ export const activeBridges = new Map();
7
+ export const conversationStates = new Map();
8
+ const CONVERSATION_TTL_MS = 30 * 60 * 1000; // 30 minutes
9
+ export function evictStaleConversations() {
10
+ const now = Date.now();
11
+ for (const [key, stored] of conversationStates) {
12
+ if (now - stored.lastAccessMs > CONVERSATION_TTL_MS) {
13
+ conversationStates.delete(key);
14
+ }
15
+ }
16
+ }
17
+ export function normalizeAgentKey(agentKey) {
18
+ const trimmed = agentKey?.trim();
19
+ return trimmed ? trimmed : "default";
20
+ }
21
+ export function hashString(value) {
22
+ return createHash("sha256").update(value).digest("hex");
23
+ }
24
+ export function createStoredConversation() {
25
+ return {
26
+ conversationId: crypto.randomUUID(),
27
+ checkpoint: null,
28
+ blobStore: new Map(),
29
+ lastAccessMs: Date.now(),
30
+ systemPromptHash: "",
31
+ completedTurnsFingerprint: "",
32
+ };
33
+ }
34
+ export function resetStoredConversation(stored) {
35
+ stored.conversationId = crypto.randomUUID();
36
+ stored.checkpoint = null;
37
+ stored.blobStore = new Map();
38
+ stored.lastAccessMs = Date.now();
39
+ stored.systemPromptHash = "";
40
+ stored.completedTurnsFingerprint = "";
41
+ }
42
+ export function deriveBridgeKey(modelId, messages, sessionId, agentKey) {
43
+ if (sessionId) {
44
+ const normalizedAgent = normalizeAgentKey(agentKey);
45
+ return createHash("sha256")
46
+ .update(`bridge:${sessionId}:${normalizedAgent}`)
47
+ .digest("hex")
48
+ .slice(0, 16);
49
+ }
50
+ const firstUserMsg = messages.find((m) => m.role === "user");
51
+ const firstUserText = firstUserMsg ? textContent(firstUserMsg.content) : "";
52
+ const normalizedAgent = normalizeAgentKey(agentKey);
53
+ return createHash("sha256")
54
+ .update(`bridge:${normalizedAgent}:${modelId}:${firstUserText.slice(0, 200)}`)
55
+ .digest("hex")
56
+ .slice(0, 16);
57
+ }
58
+ /** Derive a key for conversation state. Model-independent so context survives model switches. */
59
+ export function deriveConversationKey(messages, sessionId, agentKey) {
60
+ if (sessionId) {
61
+ const normalizedAgent = normalizeAgentKey(agentKey);
62
+ return createHash("sha256")
63
+ .update(`session:${sessionId}:${normalizedAgent}`)
64
+ .digest("hex")
65
+ .slice(0, 16);
66
+ }
67
+ return createHash("sha256")
68
+ .update(`${normalizeAgentKey(agentKey)}:${buildConversationFingerprint(messages)}`)
69
+ .digest("hex")
70
+ .slice(0, 16);
71
+ }
72
+ export function buildConversationFingerprint(messages) {
73
+ return messages
74
+ .map((message) => {
75
+ const toolCallIDs = (message.tool_calls ?? [])
76
+ .map((call) => call.id)
77
+ .join(",");
78
+ return `${message.role}:${textContent(message.content)}:${message.tool_call_id ?? ""}:${toolCallIDs}`;
79
+ })
80
+ .join("\n---\n");
81
+ }
82
+ export function updateStoredConversationAfterCompletion(convKey, metadata, assistantText) {
83
+ const stored = conversationStates.get(convKey);
84
+ if (!stored)
85
+ return;
86
+ const nextTurns = metadata.userText
87
+ ? [
88
+ ...metadata.turns,
89
+ { userText: metadata.userText, assistantText: assistantText.trim() },
90
+ ]
91
+ : metadata.turns;
92
+ stored.systemPromptHash = metadata.systemPromptHash;
93
+ stored.completedTurnsFingerprint = buildCompletedTurnsFingerprint(metadata.systemPrompt, nextTurns);
94
+ stored.lastAccessMs = Date.now();
95
+ }
@@ -0,0 +1,6 @@
1
+ import type { CursorRequestPayload } from "./types";
2
+ export declare function buildCursorRequest(modelId: string, systemPrompt: string, userText: string, turns: Array<{
3
+ userText: string;
4
+ assistantText: string;
5
+ }>, conversationId: string, checkpoint: Uint8Array | null, existingBlobStore?: Map<string, Uint8Array>): CursorRequestPayload;
6
+ export declare function buildCursorResumeRequest(modelId: string, systemPrompt: string, conversationId: string, checkpoint: Uint8Array, existingBlobStore?: Map<string, Uint8Array>): CursorRequestPayload;