@playwo/opencode-cursor-oauth 0.0.0-dev.c80ebcb27754 → 0.0.0-dev.da5538092563

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (76) hide show
  1. package/README.md +32 -83
  2. package/dist/auth.js +27 -3
  3. package/dist/constants.d.ts +2 -0
  4. package/dist/constants.js +2 -0
  5. package/dist/cursor/bidi-session.d.ts +12 -0
  6. package/dist/cursor/bidi-session.js +164 -0
  7. package/dist/cursor/config.d.ts +4 -0
  8. package/dist/cursor/config.js +4 -0
  9. package/dist/cursor/connect-framing.d.ts +10 -0
  10. package/dist/cursor/connect-framing.js +80 -0
  11. package/dist/cursor/headers.d.ts +6 -0
  12. package/dist/cursor/headers.js +16 -0
  13. package/dist/cursor/index.d.ts +5 -0
  14. package/dist/cursor/index.js +5 -0
  15. package/dist/cursor/unary-rpc.d.ts +12 -0
  16. package/dist/cursor/unary-rpc.js +124 -0
  17. package/dist/index.d.ts +2 -14
  18. package/dist/index.js +2 -229
  19. package/dist/logger.d.ts +7 -0
  20. package/dist/logger.js +150 -0
  21. package/dist/models.d.ts +3 -0
  22. package/dist/models.js +80 -54
  23. package/dist/openai/index.d.ts +3 -0
  24. package/dist/openai/index.js +3 -0
  25. package/dist/openai/messages.d.ts +39 -0
  26. package/dist/openai/messages.js +228 -0
  27. package/dist/openai/tools.d.ts +7 -0
  28. package/dist/openai/tools.js +58 -0
  29. package/dist/openai/types.d.ts +41 -0
  30. package/dist/openai/types.js +1 -0
  31. package/dist/plugin/cursor-auth-plugin.d.ts +3 -0
  32. package/dist/plugin/cursor-auth-plugin.js +139 -0
  33. package/dist/proto/agent_pb.js +637 -319
  34. package/dist/provider/index.d.ts +2 -0
  35. package/dist/provider/index.js +2 -0
  36. package/dist/provider/model-cost.d.ts +9 -0
  37. package/dist/provider/model-cost.js +206 -0
  38. package/dist/provider/models.d.ts +8 -0
  39. package/dist/provider/models.js +86 -0
  40. package/dist/proxy/bridge-close-controller.d.ts +6 -0
  41. package/dist/proxy/bridge-close-controller.js +37 -0
  42. package/dist/proxy/bridge-non-streaming.d.ts +3 -0
  43. package/dist/proxy/bridge-non-streaming.js +123 -0
  44. package/dist/proxy/bridge-session.d.ts +5 -0
  45. package/dist/proxy/bridge-session.js +11 -0
  46. package/dist/proxy/bridge-streaming.d.ts +5 -0
  47. package/dist/proxy/bridge-streaming.js +409 -0
  48. package/dist/proxy/bridge.d.ts +3 -0
  49. package/dist/proxy/bridge.js +3 -0
  50. package/dist/proxy/chat-completion.d.ts +2 -0
  51. package/dist/proxy/chat-completion.js +153 -0
  52. package/dist/proxy/conversation-meta.d.ts +12 -0
  53. package/dist/proxy/conversation-meta.js +1 -0
  54. package/dist/proxy/conversation-state.d.ts +35 -0
  55. package/dist/proxy/conversation-state.js +95 -0
  56. package/dist/proxy/cursor-request.d.ts +6 -0
  57. package/dist/proxy/cursor-request.js +101 -0
  58. package/dist/proxy/index.d.ts +12 -0
  59. package/dist/proxy/index.js +12 -0
  60. package/dist/proxy/server.d.ts +6 -0
  61. package/dist/proxy/server.js +107 -0
  62. package/dist/proxy/sse.d.ts +5 -0
  63. package/dist/proxy/sse.js +5 -0
  64. package/dist/proxy/state-sync.d.ts +2 -0
  65. package/dist/proxy/state-sync.js +17 -0
  66. package/dist/proxy/stream-dispatch.d.ts +42 -0
  67. package/dist/proxy/stream-dispatch.js +641 -0
  68. package/dist/proxy/stream-state.d.ts +7 -0
  69. package/dist/proxy/stream-state.js +1 -0
  70. package/dist/proxy/title.d.ts +1 -0
  71. package/dist/proxy/title.js +103 -0
  72. package/dist/proxy/types.d.ts +32 -0
  73. package/dist/proxy/types.js +1 -0
  74. package/dist/proxy.d.ts +2 -19
  75. package/dist/proxy.js +2 -1221
  76. package/package.json +1 -2
@@ -0,0 +1,2 @@
1
+ export * from "./model-cost";
2
+ export * from "./models";
@@ -0,0 +1,2 @@
1
+ export * from "./model-cost";
2
+ export * from "./models";
@@ -0,0 +1,9 @@
1
+ export interface ModelCost {
2
+ input: number;
3
+ output: number;
4
+ cache: {
5
+ read: number;
6
+ write: number;
7
+ };
8
+ }
9
+ export declare function estimateModelCost(modelId: string): ModelCost;
@@ -0,0 +1,206 @@
1
+ const MODEL_COST_TABLE = {
2
+ "claude-4-sonnet": {
3
+ input: 3,
4
+ output: 15,
5
+ cache: { read: 0.3, write: 3.75 },
6
+ },
7
+ "claude-4-sonnet-1m": {
8
+ input: 6,
9
+ output: 22.5,
10
+ cache: { read: 0.6, write: 7.5 },
11
+ },
12
+ "claude-4.5-haiku": {
13
+ input: 1,
14
+ output: 5,
15
+ cache: { read: 0.1, write: 1.25 },
16
+ },
17
+ "claude-4.5-opus": {
18
+ input: 5,
19
+ output: 25,
20
+ cache: { read: 0.5, write: 6.25 },
21
+ },
22
+ "claude-4.5-sonnet": {
23
+ input: 3,
24
+ output: 15,
25
+ cache: { read: 0.3, write: 3.75 },
26
+ },
27
+ "claude-4.6-opus": {
28
+ input: 5,
29
+ output: 25,
30
+ cache: { read: 0.5, write: 6.25 },
31
+ },
32
+ "claude-4.6-opus-fast": {
33
+ input: 30,
34
+ output: 150,
35
+ cache: { read: 3, write: 37.5 },
36
+ },
37
+ "claude-4.6-sonnet": {
38
+ input: 3,
39
+ output: 15,
40
+ cache: { read: 0.3, write: 3.75 },
41
+ },
42
+ "composer-1": { input: 1.25, output: 10, cache: { read: 0.125, write: 0 } },
43
+ "composer-1.5": { input: 3.5, output: 17.5, cache: { read: 0.35, write: 0 } },
44
+ "composer-2": { input: 0.5, output: 2.5, cache: { read: 0.2, write: 0 } },
45
+ "composer-2-fast": {
46
+ input: 1.5,
47
+ output: 7.5,
48
+ cache: { read: 0.2, write: 0 },
49
+ },
50
+ "gemini-2.5-flash": {
51
+ input: 0.3,
52
+ output: 2.5,
53
+ cache: { read: 0.03, write: 0 },
54
+ },
55
+ "gemini-3-flash": { input: 0.5, output: 3, cache: { read: 0.05, write: 0 } },
56
+ "gemini-3-pro": { input: 2, output: 12, cache: { read: 0.2, write: 0 } },
57
+ "gemini-3-pro-image": {
58
+ input: 2,
59
+ output: 12,
60
+ cache: { read: 0.2, write: 0 },
61
+ },
62
+ "gemini-3.1-pro": { input: 2, output: 12, cache: { read: 0.2, write: 0 } },
63
+ "gpt-5": { input: 1.25, output: 10, cache: { read: 0.125, write: 0 } },
64
+ "gpt-5-fast": { input: 2.5, output: 20, cache: { read: 0.25, write: 0 } },
65
+ "gpt-5-mini": { input: 0.25, output: 2, cache: { read: 0.025, write: 0 } },
66
+ "gpt-5-codex": { input: 1.25, output: 10, cache: { read: 0.125, write: 0 } },
67
+ "gpt-5.1-codex": {
68
+ input: 1.25,
69
+ output: 10,
70
+ cache: { read: 0.125, write: 0 },
71
+ },
72
+ "gpt-5.1-codex-max": {
73
+ input: 1.25,
74
+ output: 10,
75
+ cache: { read: 0.125, write: 0 },
76
+ },
77
+ "gpt-5.1-codex-mini": {
78
+ input: 0.25,
79
+ output: 2,
80
+ cache: { read: 0.025, write: 0 },
81
+ },
82
+ "gpt-5.2": { input: 1.75, output: 14, cache: { read: 0.175, write: 0 } },
83
+ "gpt-5.2-codex": {
84
+ input: 1.75,
85
+ output: 14,
86
+ cache: { read: 0.175, write: 0 },
87
+ },
88
+ "gpt-5.3-codex": {
89
+ input: 1.75,
90
+ output: 14,
91
+ cache: { read: 0.175, write: 0 },
92
+ },
93
+ "gpt-5.4": { input: 2.5, output: 15, cache: { read: 0.25, write: 0 } },
94
+ "gpt-5.4-mini": {
95
+ input: 0.75,
96
+ output: 4.5,
97
+ cache: { read: 0.075, write: 0 },
98
+ },
99
+ "gpt-5.4-nano": { input: 0.2, output: 1.25, cache: { read: 0.02, write: 0 } },
100
+ "grok-4.20": { input: 2, output: 6, cache: { read: 0.2, write: 0 } },
101
+ "kimi-k2.5": { input: 0.6, output: 3, cache: { read: 0.1, write: 0 } },
102
+ };
103
+ const MODEL_COST_PATTERNS = [
104
+ {
105
+ match: (id) => /claude.*opus.*fast/i.test(id),
106
+ cost: MODEL_COST_TABLE["claude-4.6-opus-fast"],
107
+ },
108
+ {
109
+ match: (id) => /claude.*opus/i.test(id),
110
+ cost: MODEL_COST_TABLE["claude-4.6-opus"],
111
+ },
112
+ {
113
+ match: (id) => /claude.*haiku/i.test(id),
114
+ cost: MODEL_COST_TABLE["claude-4.5-haiku"],
115
+ },
116
+ {
117
+ match: (id) => /claude.*sonnet/i.test(id),
118
+ cost: MODEL_COST_TABLE["claude-4.6-sonnet"],
119
+ },
120
+ {
121
+ match: (id) => /claude/i.test(id),
122
+ cost: MODEL_COST_TABLE["claude-4.6-sonnet"],
123
+ },
124
+ {
125
+ match: (id) => /composer-?2/i.test(id),
126
+ cost: MODEL_COST_TABLE["composer-2"],
127
+ },
128
+ {
129
+ match: (id) => /composer-?1\.5/i.test(id),
130
+ cost: MODEL_COST_TABLE["composer-1.5"],
131
+ },
132
+ {
133
+ match: (id) => /composer/i.test(id),
134
+ cost: MODEL_COST_TABLE["composer-1"],
135
+ },
136
+ {
137
+ match: (id) => /gpt-5\.4.*nano/i.test(id),
138
+ cost: MODEL_COST_TABLE["gpt-5.4-nano"],
139
+ },
140
+ {
141
+ match: (id) => /gpt-5\.4.*mini/i.test(id),
142
+ cost: MODEL_COST_TABLE["gpt-5.4-mini"],
143
+ },
144
+ { match: (id) => /gpt-5\.4/i.test(id), cost: MODEL_COST_TABLE["gpt-5.4"] },
145
+ {
146
+ match: (id) => /gpt-5\.3/i.test(id),
147
+ cost: MODEL_COST_TABLE["gpt-5.3-codex"],
148
+ },
149
+ { match: (id) => /gpt-5\.2/i.test(id), cost: MODEL_COST_TABLE["gpt-5.2"] },
150
+ {
151
+ match: (id) => /gpt-5\.1.*mini/i.test(id),
152
+ cost: MODEL_COST_TABLE["gpt-5.1-codex-mini"],
153
+ },
154
+ {
155
+ match: (id) => /gpt-5\.1/i.test(id),
156
+ cost: MODEL_COST_TABLE["gpt-5.1-codex"],
157
+ },
158
+ {
159
+ match: (id) => /gpt-5.*mini/i.test(id),
160
+ cost: MODEL_COST_TABLE["gpt-5-mini"],
161
+ },
162
+ {
163
+ match: (id) => /gpt-5.*fast/i.test(id),
164
+ cost: MODEL_COST_TABLE["gpt-5-fast"],
165
+ },
166
+ { match: (id) => /gpt-5/i.test(id), cost: MODEL_COST_TABLE["gpt-5"] },
167
+ {
168
+ match: (id) => /gemini.*3\.1/i.test(id),
169
+ cost: MODEL_COST_TABLE["gemini-3.1-pro"],
170
+ },
171
+ {
172
+ match: (id) => /gemini.*3.*flash/i.test(id),
173
+ cost: MODEL_COST_TABLE["gemini-3-flash"],
174
+ },
175
+ {
176
+ match: (id) => /gemini.*3/i.test(id),
177
+ cost: MODEL_COST_TABLE["gemini-3-pro"],
178
+ },
179
+ {
180
+ match: (id) => /gemini.*flash/i.test(id),
181
+ cost: MODEL_COST_TABLE["gemini-2.5-flash"],
182
+ },
183
+ {
184
+ match: (id) => /gemini/i.test(id),
185
+ cost: MODEL_COST_TABLE["gemini-3.1-pro"],
186
+ },
187
+ { match: (id) => /grok/i.test(id), cost: MODEL_COST_TABLE["grok-4.20"] },
188
+ { match: (id) => /kimi/i.test(id), cost: MODEL_COST_TABLE["kimi-k2.5"] },
189
+ ];
190
+ const DEFAULT_COST = {
191
+ input: 3,
192
+ output: 15,
193
+ cache: { read: 0.3, write: 0 },
194
+ };
195
+ export function estimateModelCost(modelId) {
196
+ const normalized = modelId.toLowerCase();
197
+ const exact = MODEL_COST_TABLE[normalized];
198
+ if (exact)
199
+ return exact;
200
+ const stripped = normalized.replace(/-(high|medium|low|preview|thinking|spark-preview)$/g, "");
201
+ const strippedMatch = MODEL_COST_TABLE[stripped];
202
+ if (strippedMatch)
203
+ return strippedMatch;
204
+ return (MODEL_COST_PATTERNS.find((pattern) => pattern.match(normalized))?.cost ??
205
+ DEFAULT_COST);
206
+ }
@@ -0,0 +1,8 @@
1
+ import type { CursorModel } from "../models";
2
+ export interface ProviderWithModels {
3
+ models?: Record<string, unknown>;
4
+ }
5
+ export declare function setProviderModels(provider: unknown, models: Record<string, unknown>): void;
6
+ export declare function buildCursorProviderModels(models: CursorModel[], port: number): Record<string, unknown>;
7
+ export declare function buildDisabledProviderConfig(message: string): Record<string, unknown>;
8
+ export declare function stripAuthorizationHeader(init?: RequestInit): RequestInit | undefined;
@@ -0,0 +1,86 @@
1
+ import { CURSOR_PROVIDER_ID } from "../constants";
2
+ import { estimateModelCost } from "./model-cost";
3
+ export function setProviderModels(provider, models) {
4
+ if (!provider || typeof provider !== "object")
5
+ return;
6
+ provider.models = models;
7
+ }
8
+ export function buildCursorProviderModels(models, port) {
9
+ return Object.fromEntries(models.map((model) => [
10
+ model.id,
11
+ {
12
+ id: model.id,
13
+ providerID: CURSOR_PROVIDER_ID,
14
+ api: {
15
+ id: model.id,
16
+ url: `http://localhost:${port}/v1`,
17
+ npm: "@ai-sdk/openai-compatible",
18
+ },
19
+ name: model.name,
20
+ capabilities: {
21
+ temperature: true,
22
+ reasoning: model.reasoning,
23
+ attachment: false,
24
+ toolcall: true,
25
+ input: {
26
+ text: true,
27
+ audio: false,
28
+ image: false,
29
+ video: false,
30
+ pdf: false,
31
+ },
32
+ output: {
33
+ text: true,
34
+ audio: false,
35
+ image: false,
36
+ video: false,
37
+ pdf: false,
38
+ },
39
+ interleaved: false,
40
+ },
41
+ cost: estimateModelCost(model.id),
42
+ limit: {
43
+ context: model.contextWindow,
44
+ output: model.maxTokens,
45
+ },
46
+ status: "active",
47
+ options: {},
48
+ headers: {},
49
+ release_date: "",
50
+ variants: {},
51
+ },
52
+ ]));
53
+ }
54
+ export function buildDisabledProviderConfig(message) {
55
+ return {
56
+ baseURL: "http://127.0.0.1/cursor-disabled/v1",
57
+ apiKey: "cursor-disabled",
58
+ async fetch() {
59
+ return new Response(JSON.stringify({
60
+ error: {
61
+ message,
62
+ type: "server_error",
63
+ code: "cursor_model_discovery_failed",
64
+ },
65
+ }), {
66
+ status: 503,
67
+ headers: { "Content-Type": "application/json" },
68
+ });
69
+ },
70
+ };
71
+ }
72
+ export function stripAuthorizationHeader(init) {
73
+ if (!init?.headers)
74
+ return init;
75
+ if (init.headers instanceof Headers) {
76
+ init.headers.delete("authorization");
77
+ return init;
78
+ }
79
+ if (Array.isArray(init.headers)) {
80
+ init.headers = init.headers.filter(([key]) => key.toLowerCase() !== "authorization");
81
+ return init;
82
+ }
83
+ delete init.headers["authorization"];
84
+ delete init.headers["Authorization"];
85
+ return init;
86
+ }
@@ -0,0 +1,6 @@
1
+ import type { CursorSession } from "../cursor/bidi-session";
2
+ export declare function createBridgeCloseController(bridge: CursorSession): {
3
+ noteTurnEnded: () => void;
4
+ noteCheckpoint: () => void;
5
+ dispose: () => void;
6
+ };
@@ -0,0 +1,37 @@
1
+ import { scheduleBridgeEnd } from "./stream-dispatch";
2
+ const TURN_END_GRACE_MS = 750;
3
+ export function createBridgeCloseController(bridge) {
4
+ let turnEnded = false;
5
+ let checkpointSeen = false;
6
+ let closeTimer;
7
+ const clearCloseTimer = () => {
8
+ if (!closeTimer)
9
+ return;
10
+ clearTimeout(closeTimer);
11
+ closeTimer = undefined;
12
+ };
13
+ const closeBridge = () => {
14
+ clearCloseTimer();
15
+ scheduleBridgeEnd(bridge);
16
+ };
17
+ return {
18
+ noteTurnEnded() {
19
+ turnEnded = true;
20
+ if (checkpointSeen) {
21
+ closeBridge();
22
+ return;
23
+ }
24
+ clearCloseTimer();
25
+ closeTimer = setTimeout(closeBridge, TURN_END_GRACE_MS);
26
+ },
27
+ noteCheckpoint() {
28
+ checkpointSeen = true;
29
+ if (turnEnded) {
30
+ closeBridge();
31
+ }
32
+ },
33
+ dispose() {
34
+ clearCloseTimer();
35
+ },
36
+ };
37
+ }
@@ -0,0 +1,3 @@
1
+ import type { ConversationRequestMetadata } from "./conversation-meta";
2
+ import type { CursorRequestPayload } from "./types";
3
+ export declare function handleNonStreamingResponse(payload: CursorRequestPayload, accessToken: string, modelId: string, convKey: string, metadata: ConversationRequestMetadata): Promise<Response>;
@@ -0,0 +1,123 @@
1
+ import { fromBinary } from "@bufbuild/protobuf";
2
+ import { AgentServerMessageSchema } from "../proto/agent_pb";
3
+ import { errorDetails, logPluginError } from "../logger";
4
+ import { updateStoredConversationAfterCompletion } from "./conversation-state";
5
+ import { startBridge } from "./bridge-session";
6
+ import { updateConversationCheckpoint, syncStoredBlobStore, } from "./state-sync";
7
+ import { computeUsage, createConnectFrameParser, createThinkingTagFilter, parseConnectEndStream, processServerMessage, scheduleBridgeEnd, } from "./stream-dispatch";
8
+ import { createBridgeCloseController } from "./bridge-close-controller";
9
+ export async function handleNonStreamingResponse(payload, accessToken, modelId, convKey, metadata) {
10
+ const completionId = `chatcmpl-${crypto.randomUUID().replace(/-/g, "").slice(0, 28)}`;
11
+ const created = Math.floor(Date.now() / 1000);
12
+ const { text, usage, finishReason, toolCalls } = await collectFullResponse(payload, accessToken, modelId, convKey, metadata);
13
+ const message = finishReason === "tool_calls"
14
+ ? { role: "assistant", content: null, tool_calls: toolCalls }
15
+ : { role: "assistant", content: text };
16
+ return new Response(JSON.stringify({
17
+ id: completionId,
18
+ object: "chat.completion",
19
+ created,
20
+ model: modelId,
21
+ choices: [
22
+ {
23
+ index: 0,
24
+ message,
25
+ finish_reason: finishReason,
26
+ },
27
+ ],
28
+ usage,
29
+ }), { headers: { "Content-Type": "application/json" } });
30
+ }
31
+ async function collectFullResponse(payload, accessToken, modelId, convKey, metadata) {
32
+ const { promise, resolve, reject } = Promise.withResolvers();
33
+ let fullText = "";
34
+ let endStreamError = null;
35
+ const pendingToolCalls = [];
36
+ const { bridge, heartbeatTimer } = await startBridge(accessToken, payload.requestBytes);
37
+ const bridgeCloseController = createBridgeCloseController(bridge);
38
+ const state = {
39
+ toolCallIndex: 0,
40
+ pendingExecs: [],
41
+ outputTokens: 0,
42
+ totalTokens: 0,
43
+ };
44
+ const tagFilter = createThinkingTagFilter();
45
+ bridge.onData(createConnectFrameParser((messageBytes) => {
46
+ try {
47
+ const serverMessage = fromBinary(AgentServerMessageSchema, messageBytes);
48
+ processServerMessage(serverMessage, payload.blobStore, payload.cloudRule, payload.mcpTools, (data) => bridge.write(data), state, (text, isThinking) => {
49
+ if (isThinking)
50
+ return;
51
+ const { content } = tagFilter.process(text);
52
+ fullText += content;
53
+ }, (exec) => {
54
+ pendingToolCalls.push({
55
+ id: exec.toolCallId,
56
+ type: "function",
57
+ function: {
58
+ name: exec.toolName,
59
+ arguments: exec.decodedArgs,
60
+ },
61
+ });
62
+ scheduleBridgeEnd(bridge);
63
+ }, (checkpointBytes) => {
64
+ updateConversationCheckpoint(convKey, checkpointBytes);
65
+ bridgeCloseController.noteCheckpoint();
66
+ }, () => bridgeCloseController.noteTurnEnded(), (info) => {
67
+ endStreamError = new Error(`Cursor returned unsupported ${info.category}: ${info.caseName}${info.detail ? ` (${info.detail})` : ""}`);
68
+ logPluginError("Closing non-streaming Cursor bridge after unsupported message", {
69
+ modelId,
70
+ convKey,
71
+ category: info.category,
72
+ caseName: info.caseName,
73
+ detail: info.detail,
74
+ });
75
+ scheduleBridgeEnd(bridge);
76
+ }, (info) => {
77
+ endStreamError = new Error(`Cursor requested unsupported exec type: ${info.execCase}`);
78
+ logPluginError("Closing non-streaming Cursor bridge after unsupported exec", {
79
+ modelId,
80
+ convKey,
81
+ execCase: info.execCase,
82
+ execId: info.execId,
83
+ execMsgId: info.execMsgId,
84
+ });
85
+ scheduleBridgeEnd(bridge);
86
+ });
87
+ }
88
+ catch {
89
+ // Skip unparseable messages.
90
+ }
91
+ }, (endStreamBytes) => {
92
+ endStreamError = parseConnectEndStream(endStreamBytes);
93
+ if (endStreamError) {
94
+ logPluginError("Cursor non-streaming response returned Connect end-stream error", {
95
+ modelId,
96
+ convKey,
97
+ ...errorDetails(endStreamError),
98
+ });
99
+ }
100
+ scheduleBridgeEnd(bridge);
101
+ }));
102
+ bridge.onClose(() => {
103
+ bridgeCloseController.dispose();
104
+ clearInterval(heartbeatTimer);
105
+ syncStoredBlobStore(convKey, payload.blobStore);
106
+ const flushed = tagFilter.flush();
107
+ fullText += flushed.content;
108
+ if (endStreamError) {
109
+ reject(endStreamError);
110
+ return;
111
+ }
112
+ if (pendingToolCalls.length === 0) {
113
+ updateStoredConversationAfterCompletion(convKey, metadata, fullText);
114
+ }
115
+ resolve({
116
+ text: fullText,
117
+ usage: computeUsage(state),
118
+ finishReason: pendingToolCalls.length > 0 ? "tool_calls" : "stop",
119
+ toolCalls: pendingToolCalls,
120
+ });
121
+ });
122
+ return promise;
123
+ }
@@ -0,0 +1,5 @@
1
+ import type { CursorSession } from "../cursor/bidi-session";
2
+ export declare function startBridge(accessToken: string, requestBytes: Uint8Array): Promise<{
3
+ bridge: CursorSession;
4
+ heartbeatTimer: NodeJS.Timeout;
5
+ }>;
@@ -0,0 +1,11 @@
1
+ import { createCursorSession } from "../cursor/bidi-session";
2
+ import { makeHeartbeatBytes } from "./stream-dispatch";
3
+ const HEARTBEAT_INTERVAL_MS = 5_000;
4
+ export async function startBridge(accessToken, requestBytes) {
5
+ const bridge = await createCursorSession({
6
+ accessToken,
7
+ initialRequestBytes: requestBytes,
8
+ });
9
+ const heartbeatTimer = setInterval(() => bridge.write(makeHeartbeatBytes()), HEARTBEAT_INTERVAL_MS);
10
+ return { bridge, heartbeatTimer };
11
+ }
@@ -0,0 +1,5 @@
1
+ import { type ToolResultInfo } from "../openai/messages";
2
+ import type { ConversationRequestMetadata } from "./conversation-meta";
3
+ import type { ActiveBridge, CursorRequestPayload } from "./types";
4
+ export declare function handleStreamingResponse(payload: CursorRequestPayload, accessToken: string, modelId: string, bridgeKey: string, convKey: string, metadata: ConversationRequestMetadata): Promise<Response>;
5
+ export declare function handleToolResultResume(active: ActiveBridge, toolResults: ToolResultInfo[], bridgeKey: string, convKey: string): Promise<Response>;