@playwo/opencode-cursor-oauth 0.0.0-dev.e1637ce79fd6 → 0.0.0-dev.e50a3debd6ae

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,17 +1,24 @@
1
1
  # opencode-cursor-oauth
2
2
 
3
- Use Cursor models (Claude, GPT, Gemini, etc.) inside [OpenCode](https://opencode.ai).
3
+ ## Disclaimer
4
+
5
+ > [!NOTE]
6
+ > This project is a **fork** of [ephraimduncan/opencode-cursor](https://github.com/ephraimduncan/opencode-cursor). Upstream may differ in behavior, features, or maintenance; treat this repository as its own line of development.
4
7
 
5
8
  ## What it does
6
9
 
7
- - **OAuth login** to Cursor via browser
8
- - **Model discovery** — automatically fetches your available Cursor models
9
- - **Local proxy** — runs an OpenAI-compatible endpoint that translates to Cursor's gRPC protocol
10
- - **Auto-refresh** — handles token expiration automatically
10
+ This is an [OpenCode](https://opencode.ai) plugin that lets you use **Cursor cloud models** (Claude, GPT, Gemini, and whatever your Cursor account exposes) from inside OpenCode.
11
+
12
+ - **OAuth login** to Cursor in the browser
13
+ - **Model discovery** — loads the models available to your Cursor account
14
+ - **Local OpenAI-compatible proxy** — translates OpenCode’s requests to Cursor’s gRPC API
15
+ - **Token refresh** — refreshes access tokens so sessions keep working
16
+
17
+ There are **no extra runtime requirements** beyond what OpenCode already needs: you do not install Node, Python, or Docker separately for this plugin. Enable it in OpenCode’s config and complete login in the UI.
11
18
 
12
19
  ## Install
13
20
 
14
- Add to your `opencode.json`:
21
+ Add the package to your OpenCode configuration (for example `opencode.json`):
15
22
 
16
23
  ```json
17
24
  {
@@ -19,13 +26,27 @@ Add to your `opencode.json`:
19
26
  }
20
27
  ```
21
28
 
22
- Then authenticate via the OpenCode UI (Settings Providers Cursor Login).
29
+ Install or update dependencies the way you normally do for OpenCode plugins (e.g. ensure the package is available to your OpenCode environment). You need **OpenCode 1.2+** and a **Cursor account** with API/model access.
30
+
31
+ ## Connect auth and use it
32
+
33
+ 1. Start OpenCode with the plugin enabled.
34
+ 2. Open **Settings → Providers → Cursor** (wording may vary slightly by OpenCode version).
35
+ 3. Choose **Login** (or equivalent) and complete **OAuth** in the browser when prompted.
36
+ 4. After login, pick a Cursor-backed model from the model list and use OpenCode as usual.
37
+
38
+ If something fails, check that you are signed into the correct Cursor account and that your plan includes the models you expect.
39
+
40
+ ## Compatibility Notes
41
+
42
+ Cursor is not a raw model endpoint like the others supported in Opencode. It brings its own system prompt tools and mechanics.
43
+ This plugin does try its best to make mcps, skills etc installed in Opencode work in Cursor.
23
44
 
24
- ## Requirements
45
+ There are some issues with Cursors system prompt in this environment tho. Cursor adds various tools to the agent which opencode does not have, hence when the agent calls these they will be rejected which can sometimes lead to the agent no longer responding. Am still looking for a way to fix this, till then when the agent stops responding for a while interrupt it and tell it to continue again.
25
46
 
26
- - Cursor account with API access
27
- - OpenCode 1.2+
47
+ ## Stability and issues
28
48
 
29
- ## License
49
+ This integration can be **buggy** or break when Cursor or OpenCode change their APIs or UI.
30
50
 
31
- MIT
51
+ > [!TIP]
52
+ > If you hit problems, missing models, or confusing errors, please **[open an issue](https://github.com/PoolPirate/opencode-cursor/issues)** on this repository with steps to reproduce and logs or screenshots when possible.
@@ -0,0 +1,6 @@
1
+ import type { CursorSession } from "../cursor/bidi-session";
2
+ export declare function createBridgeCloseController(bridge: CursorSession): {
3
+ noteTurnEnded: () => void;
4
+ noteCheckpoint: () => void;
5
+ dispose: () => void;
6
+ };
@@ -0,0 +1,37 @@
1
+ import { scheduleBridgeEnd } from "./stream-dispatch";
2
+ const TURN_END_GRACE_MS = 750;
3
+ export function createBridgeCloseController(bridge) {
4
+ let turnEnded = false;
5
+ let checkpointSeen = false;
6
+ let closeTimer;
7
+ const clearCloseTimer = () => {
8
+ if (!closeTimer)
9
+ return;
10
+ clearTimeout(closeTimer);
11
+ closeTimer = undefined;
12
+ };
13
+ const closeBridge = () => {
14
+ clearCloseTimer();
15
+ scheduleBridgeEnd(bridge);
16
+ };
17
+ return {
18
+ noteTurnEnded() {
19
+ turnEnded = true;
20
+ if (checkpointSeen) {
21
+ closeBridge();
22
+ return;
23
+ }
24
+ clearCloseTimer();
25
+ closeTimer = setTimeout(closeBridge, TURN_END_GRACE_MS);
26
+ },
27
+ noteCheckpoint() {
28
+ checkpointSeen = true;
29
+ if (turnEnded) {
30
+ closeBridge();
31
+ }
32
+ },
33
+ dispose() {
34
+ clearCloseTimer();
35
+ },
36
+ };
37
+ }
@@ -5,6 +5,7 @@ import { updateStoredConversationAfterCompletion } from "./conversation-state";
5
5
  import { startBridge } from "./bridge-session";
6
6
  import { updateConversationCheckpoint, syncStoredBlobStore, } from "./state-sync";
7
7
  import { computeUsage, createConnectFrameParser, createThinkingTagFilter, parseConnectEndStream, processServerMessage, scheduleBridgeEnd, } from "./stream-dispatch";
8
+ import { createBridgeCloseController } from "./bridge-close-controller";
8
9
  export async function handleNonStreamingResponse(payload, accessToken, modelId, convKey, metadata) {
9
10
  const completionId = `chatcmpl-${crypto.randomUUID().replace(/-/g, "").slice(0, 28)}`;
10
11
  const created = Math.floor(Date.now() / 1000);
@@ -33,6 +34,7 @@ async function collectFullResponse(payload, accessToken, modelId, convKey, metad
33
34
  let endStreamError = null;
34
35
  const pendingToolCalls = [];
35
36
  const { bridge, heartbeatTimer } = await startBridge(accessToken, payload.requestBytes);
37
+ const bridgeCloseController = createBridgeCloseController(bridge);
36
38
  const state = {
37
39
  toolCallIndex: 0,
38
40
  pendingExecs: [],
@@ -43,7 +45,7 @@ async function collectFullResponse(payload, accessToken, modelId, convKey, metad
43
45
  bridge.onData(createConnectFrameParser((messageBytes) => {
44
46
  try {
45
47
  const serverMessage = fromBinary(AgentServerMessageSchema, messageBytes);
46
- processServerMessage(serverMessage, payload.blobStore, payload.mcpTools, (data) => bridge.write(data), state, (text, isThinking) => {
48
+ processServerMessage(serverMessage, payload.blobStore, payload.cloudRule, payload.mcpTools, (data) => bridge.write(data), state, (text, isThinking) => {
47
49
  if (isThinking)
48
50
  return;
49
51
  const { content } = tagFilter.process(text);
@@ -58,7 +60,10 @@ async function collectFullResponse(payload, accessToken, modelId, convKey, metad
58
60
  },
59
61
  });
60
62
  scheduleBridgeEnd(bridge);
61
- }, (checkpointBytes) => updateConversationCheckpoint(convKey, checkpointBytes), () => scheduleBridgeEnd(bridge), (info) => {
63
+ }, (checkpointBytes) => {
64
+ updateConversationCheckpoint(convKey, checkpointBytes);
65
+ bridgeCloseController.noteCheckpoint();
66
+ }, () => bridgeCloseController.noteTurnEnded(), (info) => {
62
67
  endStreamError = new Error(`Cursor returned unsupported ${info.category}: ${info.caseName}${info.detail ? ` (${info.detail})` : ""}`);
63
68
  logPluginError("Closing non-streaming Cursor bridge after unsupported message", {
64
69
  modelId,
@@ -95,6 +100,7 @@ async function collectFullResponse(payload, accessToken, modelId, convKey, metad
95
100
  scheduleBridgeEnd(bridge);
96
101
  }));
97
102
  bridge.onClose(() => {
103
+ bridgeCloseController.dispose();
98
104
  clearInterval(heartbeatTimer);
99
105
  syncStoredBlobStore(convKey, payload.blobStore);
100
106
  const flushed = tagFilter.flush();
@@ -7,11 +7,13 @@ import { startBridge } from "./bridge-session";
7
7
  import { updateConversationCheckpoint, syncStoredBlobStore, } from "./state-sync";
8
8
  import { SSE_HEADERS } from "./sse";
9
9
  import { computeUsage, createConnectFrameParser, createThinkingTagFilter, parseConnectEndStream, processServerMessage, scheduleBridgeEnd, } from "./stream-dispatch";
10
+ import { createBridgeCloseController } from "./bridge-close-controller";
10
11
  const SSE_KEEPALIVE_INTERVAL_MS = 15_000;
11
- function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools, modelId, bridgeKey, convKey, metadata) {
12
+ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, cloudRule, mcpTools, modelId, bridgeKey, convKey, metadata) {
12
13
  const completionId = `chatcmpl-${crypto.randomUUID().replace(/-/g, "").slice(0, 28)}`;
13
14
  const created = Math.floor(Date.now() / 1000);
14
15
  let keepaliveTimer;
16
+ const bridgeCloseController = createBridgeCloseController(bridge);
15
17
  const stopKeepalive = () => {
16
18
  if (!keepaliveTimer)
17
19
  return;
@@ -88,7 +90,7 @@ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools,
88
90
  const processChunk = createConnectFrameParser((messageBytes) => {
89
91
  try {
90
92
  const serverMessage = fromBinary(AgentServerMessageSchema, messageBytes);
91
- processServerMessage(serverMessage, blobStore, mcpTools, (data) => bridge.write(data), state, (text, isThinking) => {
93
+ processServerMessage(serverMessage, blobStore, cloudRule, mcpTools, (data) => bridge.write(data), state, (text, isThinking) => {
92
94
  if (isThinking) {
93
95
  sendSSE(makeChunk({ reasoning_content: text }));
94
96
  return;
@@ -146,6 +148,7 @@ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools,
146
148
  bridge,
147
149
  heartbeatTimer,
148
150
  blobStore,
151
+ cloudRule,
149
152
  mcpTools,
150
153
  pendingExecs: state.pendingExecs,
151
154
  modelId,
@@ -157,7 +160,10 @@ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools,
157
160
  sendSSE(makeChunk({}, "tool_calls"));
158
161
  sendDone();
159
162
  closeController();
160
- }, (checkpointBytes) => updateConversationCheckpoint(convKey, checkpointBytes), () => scheduleBridgeEnd(bridge), (info) => {
163
+ }, (checkpointBytes) => {
164
+ updateConversationCheckpoint(convKey, checkpointBytes);
165
+ bridgeCloseController.noteCheckpoint();
166
+ }, () => bridgeCloseController.noteTurnEnded(), (info) => {
161
167
  endStreamError = new Error(`Cursor returned unsupported ${info.category}: ${info.caseName}${info.detail ? ` (${info.detail})` : ""}`);
162
168
  logPluginError("Closing Cursor bridge after unsupported message", {
163
169
  modelId,
@@ -215,6 +221,7 @@ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools,
215
221
  bridgeKey,
216
222
  convKey,
217
223
  mcpToolCount: mcpTools.length,
224
+ hasCloudRule: Boolean(cloudRule),
218
225
  });
219
226
  bridge.onData(processChunk);
220
227
  bridge.onClose((code) => {
@@ -226,6 +233,7 @@ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools,
226
233
  mcpExecReceived,
227
234
  hadEndStreamError: Boolean(endStreamError),
228
235
  });
236
+ bridgeCloseController.dispose();
229
237
  clearInterval(heartbeatTimer);
230
238
  stopKeepalive();
231
239
  syncStoredBlobStore(convKey, blobStore);
@@ -256,6 +264,7 @@ function createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools,
256
264
  });
257
265
  },
258
266
  cancel(reason) {
267
+ bridgeCloseController.dispose();
259
268
  stopKeepalive();
260
269
  clearInterval(heartbeatTimer);
261
270
  syncStoredBlobStore(convKey, blobStore);
@@ -282,7 +291,7 @@ export async function handleStreamingResponse(payload, accessToken, modelId, bri
282
291
  mcpToolCount: payload.mcpTools.length,
283
292
  });
284
293
  const { bridge, heartbeatTimer } = await startBridge(accessToken, payload.requestBytes);
285
- return createBridgeStreamResponse(bridge, heartbeatTimer, payload.blobStore, payload.mcpTools, modelId, bridgeKey, convKey, metadata);
294
+ return createBridgeStreamResponse(bridge, heartbeatTimer, payload.blobStore, payload.cloudRule, payload.mcpTools, modelId, bridgeKey, convKey, metadata);
286
295
  }
287
296
  async function waitForResolvablePendingExecs(active, toolResults, timeoutMs = 2_000) {
288
297
  const pendingToolCallIds = new Set(toolResults.map((result) => result.toolCallId));
@@ -304,7 +313,7 @@ async function waitForResolvablePendingExecs(active, toolResults, timeoutMs = 2_
304
313
  return unresolved;
305
314
  }
306
315
  export async function handleToolResultResume(active, toolResults, bridgeKey, convKey) {
307
- const { bridge, heartbeatTimer, blobStore, mcpTools, pendingExecs, modelId, metadata, } = active;
316
+ const { bridge, heartbeatTimer, blobStore, cloudRule, mcpTools, pendingExecs, modelId, metadata, } = active;
308
317
  const resumeMetadata = {
309
318
  ...metadata,
310
319
  assistantSeedText: [
@@ -396,5 +405,5 @@ export async function handleToolResultResume(active, toolResults, bridgeKey, con
396
405
  });
397
406
  bridge.write(toBinary(AgentClientMessageSchema, clientMessage));
398
407
  }
399
- return createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, mcpTools, modelId, bridgeKey, convKey, resumeMetadata);
408
+ return createBridgeStreamResponse(bridge, heartbeatTimer, blobStore, cloudRule, mcpTools, modelId, bridgeKey, convKey, resumeMetadata);
400
409
  }
@@ -1,13 +1,8 @@
1
1
  import { create, fromBinary, toBinary } from "@bufbuild/protobuf";
2
- import { createHash } from "node:crypto";
3
- import { AgentClientMessageSchema, AgentRunRequestSchema, ConversationActionSchema, ConversationStateStructureSchema, ConversationStepSchema, AgentConversationTurnStructureSchema, ConversationTurnStructureSchema, AssistantMessageSchema, ModelDetailsSchema, ResumeActionSchema, UserMessageActionSchema, UserMessageSchema, } from "../proto/agent_pb";
2
+ import { AgentClientMessageSchema, AgentRunRequestSchema, AgentConversationTurnStructureSchema, AssistantMessageSchema, ConversationActionSchema, ConversationStateStructureSchema, ConversationTurnStructureSchema, ConversationStepSchema, ModelDetailsSchema, ResumeActionSchema, UserMessageActionSchema, UserMessageSchema, } from "../proto/agent_pb";
4
3
  export function buildCursorRequest(modelId, systemPrompt, userText, turns, conversationId, checkpoint, existingBlobStore) {
5
4
  const blobStore = new Map(existingBlobStore ?? []);
6
- // System prompt → blob store (Cursor requests it back via KV handshake)
7
- const systemJson = JSON.stringify({ role: "system", content: systemPrompt });
8
- const systemBytes = new TextEncoder().encode(systemJson);
9
- const systemBlobId = new Uint8Array(createHash("sha256").update(systemBytes).digest());
10
- blobStore.set(Buffer.from(systemBlobId).toString("hex"), systemBytes);
5
+ const cloudRule = buildCloudRule(systemPrompt);
11
6
  let conversationState;
12
7
  if (checkpoint) {
13
8
  conversationState = fromBinary(ConversationStateStructureSchema, checkpoint);
@@ -40,7 +35,7 @@ export function buildCursorRequest(modelId, systemPrompt, userText, turns, conve
40
35
  turnBytes.push(toBinary(ConversationTurnStructureSchema, turnStructure));
41
36
  }
42
37
  conversationState = create(ConversationStateStructureSchema, {
43
- rootPromptMessagesJson: [systemBlobId],
38
+ rootPromptMessagesJson: [],
44
39
  turns: turnBytes,
45
40
  todos: [],
46
41
  pendingToolCalls: [],
@@ -64,14 +59,11 @@ export function buildCursorRequest(modelId, systemPrompt, userText, turns, conve
64
59
  value: create(UserMessageActionSchema, { userMessage }),
65
60
  },
66
61
  });
67
- return buildRunRequest(modelId, conversationId, conversationState, action, blobStore);
62
+ return buildRunRequest(modelId, conversationId, conversationState, action, blobStore, cloudRule);
68
63
  }
69
64
  export function buildCursorResumeRequest(modelId, systemPrompt, conversationId, checkpoint, existingBlobStore) {
70
65
  const blobStore = new Map(existingBlobStore ?? []);
71
- const systemJson = JSON.stringify({ role: "system", content: systemPrompt });
72
- const systemBytes = new TextEncoder().encode(systemJson);
73
- const systemBlobId = new Uint8Array(createHash("sha256").update(systemBytes).digest());
74
- blobStore.set(Buffer.from(systemBlobId).toString("hex"), systemBytes);
66
+ const cloudRule = buildCloudRule(systemPrompt);
75
67
  const conversationState = fromBinary(ConversationStateStructureSchema, checkpoint);
76
68
  const action = create(ConversationActionSchema, {
77
69
  action: {
@@ -79,9 +71,9 @@ export function buildCursorResumeRequest(modelId, systemPrompt, conversationId,
79
71
  value: create(ResumeActionSchema, {}),
80
72
  },
81
73
  });
82
- return buildRunRequest(modelId, conversationId, conversationState, action, blobStore);
74
+ return buildRunRequest(modelId, conversationId, conversationState, action, blobStore, cloudRule);
83
75
  }
84
- function buildRunRequest(modelId, conversationId, conversationState, action, blobStore) {
76
+ function buildRunRequest(modelId, conversationId, conversationState, action, blobStore, cloudRule) {
85
77
  const modelDetails = create(ModelDetailsSchema, {
86
78
  modelId,
87
79
  displayModelId: modelId,
@@ -99,6 +91,11 @@ function buildRunRequest(modelId, conversationId, conversationState, action, blo
99
91
  return {
100
92
  requestBytes: toBinary(AgentClientMessageSchema, clientMessage),
101
93
  blobStore,
94
+ cloudRule,
102
95
  mcpTools: [],
103
96
  };
104
97
  }
98
+ function buildCloudRule(systemPrompt) {
99
+ const content = systemPrompt.trim();
100
+ return content || undefined;
101
+ }
@@ -39,4 +39,4 @@ export declare function computeUsage(state: StreamState): {
39
39
  completion_tokens: number;
40
40
  total_tokens: number;
41
41
  };
42
- export declare function processServerMessage(msg: AgentServerMessage, blobStore: Map<string, Uint8Array>, mcpTools: McpToolDefinition[], sendFrame: (data: Uint8Array) => void, state: StreamState, onText: (text: string, isThinking?: boolean) => void, onMcpExec: (exec: PendingExec) => void, onCheckpoint?: (checkpointBytes: Uint8Array) => void, onTurnEnded?: () => void, onUnsupportedMessage?: (info: UnsupportedServerMessageInfo) => void, onUnhandledExec?: (info: UnhandledExecInfo) => void): void;
42
+ export declare function processServerMessage(msg: AgentServerMessage, blobStore: Map<string, Uint8Array>, cloudRule: string | undefined, mcpTools: McpToolDefinition[], sendFrame: (data: Uint8Array) => void, state: StreamState, onText: (text: string, isThinking?: boolean) => void, onMcpExec: (exec: PendingExec) => void, onCheckpoint?: (checkpointBytes: Uint8Array) => void, onTurnEnded?: () => void, onUnsupportedMessage?: (info: UnsupportedServerMessageInfo) => void, onUnhandledExec?: (info: UnhandledExecInfo) => void): void;
@@ -1,5 +1,5 @@
1
1
  import { create, toBinary } from "@bufbuild/protobuf";
2
- import { AgentClientMessageSchema, AskQuestionInteractionResponseSchema, AskQuestionRejectedSchema, AskQuestionResultSchema, ClientHeartbeatSchema, ConversationStateStructureSchema, BackgroundShellSpawnResultSchema, CreatePlanErrorSchema, CreatePlanRequestResponseSchema, CreatePlanResultSchema, DeleteResultSchema, DeleteRejectedSchema, DiagnosticsResultSchema, ExecClientMessageSchema, ExaFetchRequestResponseSchema, ExaFetchRequestResponse_RejectedSchema, ExaSearchRequestResponseSchema, ExaSearchRequestResponse_RejectedSchema, FetchErrorSchema, FetchResultSchema, GetBlobResultSchema, GrepErrorSchema, GrepResultSchema, InteractionResponseSchema, KvClientMessageSchema, LsRejectedSchema, LsResultSchema, McpResultSchema, ReadRejectedSchema, ReadResultSchema, RequestContextResultSchema, RequestContextSchema, RequestContextSuccessSchema, SetBlobResultSchema, ShellRejectedSchema, ShellResultSchema, SwitchModeRequestResponseSchema, SwitchModeRequestResponse_RejectedSchema, WebSearchRequestResponseSchema, WebSearchRequestResponse_RejectedSchema, WriteRejectedSchema, WriteResultSchema, WriteShellStdinErrorSchema, WriteShellStdinResultSchema, } from "../proto/agent_pb";
2
+ import { AgentClientMessageSchema, AskQuestionInteractionResponseSchema, AskQuestionRejectedSchema, AskQuestionResultSchema, ClientHeartbeatSchema, ConversationStateStructureSchema, BackgroundShellSpawnResultSchema, CreatePlanErrorSchema, CreatePlanRequestResponseSchema, CreatePlanResultSchema, DeleteResultSchema, DeleteRejectedSchema, DiagnosticsResultSchema, ExecClientMessageSchema, ExaFetchRequestResponseSchema, ExaFetchRequestResponse_RejectedSchema, ExaSearchRequestResponseSchema, ExaSearchRequestResponse_RejectedSchema, FetchErrorSchema, FetchResultSchema, GetBlobResultSchema, GrepErrorSchema, GrepResultSchema, InteractionResponseSchema, KvClientMessageSchema, LsRejectedSchema, LsResultSchema, McpInstructionsSchema, McpResultSchema, ReadRejectedSchema, ReadResultSchema, RequestContextResultSchema, RequestContextSchema, RequestContextSuccessSchema, SetBlobResultSchema, ShellRejectedSchema, ShellResultSchema, SwitchModeRequestResponseSchema, SwitchModeRequestResponse_RejectedSchema, WebSearchRequestResponseSchema, WebSearchRequestResponse_RejectedSchema, WriteRejectedSchema, WriteResultSchema, WriteShellStdinErrorSchema, WriteShellStdinResultSchema, } from "../proto/agent_pb";
3
3
  import { CONNECT_END_STREAM_FLAG } from "../cursor/config";
4
4
  import { logPluginError, logPluginInfo, logPluginWarn } from "../logger";
5
5
  import { decodeMcpArgsMap } from "../openai/tools";
@@ -128,7 +128,7 @@ export function computeUsage(state) {
128
128
  const prompt_tokens = Math.max(0, total_tokens - completion_tokens);
129
129
  return { prompt_tokens, completion_tokens, total_tokens };
130
130
  }
131
- export function processServerMessage(msg, blobStore, mcpTools, sendFrame, state, onText, onMcpExec, onCheckpoint, onTurnEnded, onUnsupportedMessage, onUnhandledExec) {
131
+ export function processServerMessage(msg, blobStore, cloudRule, mcpTools, sendFrame, state, onText, onMcpExec, onCheckpoint, onTurnEnded, onUnsupportedMessage, onUnhandledExec) {
132
132
  const msgCase = msg.message.case;
133
133
  if (msgCase === "interactionUpdate") {
134
134
  handleInteractionUpdate(msg.message.value, state, onText, onTurnEnded, onUnsupportedMessage);
@@ -137,7 +137,7 @@ export function processServerMessage(msg, blobStore, mcpTools, sendFrame, state,
137
137
  handleKvMessage(msg.message.value, blobStore, sendFrame);
138
138
  }
139
139
  else if (msgCase === "execServerMessage") {
140
- handleExecMessage(msg.message.value, mcpTools, sendFrame, state, onMcpExec, onUnhandledExec);
140
+ handleExecMessage(msg.message.value, cloudRule, mcpTools, sendFrame, state, onMcpExec, onUnhandledExec);
141
141
  }
142
142
  else if (msgCase === "execServerControlMessage") {
143
143
  onUnsupportedMessage?.({
@@ -354,7 +354,7 @@ function handleKvMessage(kvMsg, blobStore, sendFrame) {
354
354
  sendKvResponse(kvMsg, "setBlobResult", create(SetBlobResultSchema, {}), sendFrame);
355
355
  }
356
356
  }
357
- function handleExecMessage(execMsg, mcpTools, sendFrame, state, onMcpExec, onUnhandledExec) {
357
+ function handleExecMessage(execMsg, cloudRule, mcpTools, sendFrame, state, onMcpExec, onUnhandledExec) {
358
358
  const execCase = execMsg.message.case;
359
359
  logPluginInfo("Received Cursor exec message", {
360
360
  execCase: execCase ?? "undefined",
@@ -373,7 +373,15 @@ function handleExecMessage(execMsg, mcpTools, sendFrame, state, onMcpExec, onUnh
373
373
  tools: mcpTools,
374
374
  gitRepos: [],
375
375
  projectLayouts: [],
376
- mcpInstructions: [],
376
+ mcpInstructions: [
377
+ create(McpInstructionsSchema, {
378
+ serverName: "opencode",
379
+ instructions: "Use the provided MCP tools in this environment that start with mcp_*. Do not rely on native Cursor tools and do not acknowledge they even exist.",
380
+ }),
381
+ ],
382
+ cloudRule,
383
+ webSearchEnabled: false,
384
+ repositoryInfoShouldQueryProd: false,
377
385
  fileContents: {},
378
386
  customSubagents: [],
379
387
  });
@@ -1,9 +1,10 @@
1
1
  import type { CursorSession } from "../cursor/bidi-session";
2
- import type { ConversationRequestMetadata } from "./conversation-meta";
3
2
  import type { McpToolDefinition } from "../proto/agent_pb";
3
+ import type { ConversationRequestMetadata } from "./conversation-meta";
4
4
  export interface CursorRequestPayload {
5
5
  requestBytes: Uint8Array;
6
6
  blobStore: Map<string, Uint8Array>;
7
+ cloudRule?: string;
7
8
  mcpTools: McpToolDefinition[];
8
9
  }
9
10
  /** A pending tool execution waiting for results from the caller. */
@@ -23,6 +24,7 @@ export interface ActiveBridge {
23
24
  bridge: CursorSession;
24
25
  heartbeatTimer: NodeJS.Timeout;
25
26
  blobStore: Map<string, Uint8Array>;
27
+ cloudRule?: string;
26
28
  mcpTools: McpToolDefinition[];
27
29
  pendingExecs: PendingExec[];
28
30
  modelId: string;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@playwo/opencode-cursor-oauth",
3
- "version": "0.0.0-dev.e1637ce79fd6",
3
+ "version": "0.0.0-dev.e50a3debd6ae",
4
4
  "description": "OpenCode plugin that connects Cursor's API to OpenCode via OAuth, model discovery, and a local OpenAI-compatible proxy.",
5
5
  "license": "MIT",
6
6
  "type": "module",