cursor-api-proxy 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. package/README.md +219 -0
  2. package/dist/cli.d.ts +5 -0
  3. package/dist/cli.js +53 -0
  4. package/dist/cli.js.map +1 -0
  5. package/dist/client.d.ts +95 -0
  6. package/dist/client.js +316 -0
  7. package/dist/client.js.map +1 -0
  8. package/dist/lib/agent-cmd-args.d.ts +5 -0
  9. package/dist/lib/agent-cmd-args.js +24 -0
  10. package/dist/lib/agent-cmd-args.js.map +1 -0
  11. package/dist/lib/agent-runner.d.ts +12 -0
  12. package/dist/lib/agent-runner.js +38 -0
  13. package/dist/lib/agent-runner.js.map +1 -0
  14. package/dist/lib/anthropic.d.ts +26 -0
  15. package/dist/lib/anthropic.js +59 -0
  16. package/dist/lib/anthropic.js.map +1 -0
  17. package/dist/lib/cli-stream-parser.d.ts +8 -0
  18. package/dist/lib/cli-stream-parser.js +46 -0
  19. package/dist/lib/cli-stream-parser.js.map +1 -0
  20. package/dist/lib/config.d.ts +28 -0
  21. package/dist/lib/config.js +24 -0
  22. package/dist/lib/config.js.map +1 -0
  23. package/dist/lib/cursor-cli.d.ts +9 -0
  24. package/dist/lib/cursor-cli.js +30 -0
  25. package/dist/lib/cursor-cli.js.map +1 -0
  26. package/dist/lib/cursorCli.d.ts +9 -0
  27. package/dist/lib/cursorCli.js +30 -0
  28. package/dist/lib/cursorCli.js.map +1 -0
  29. package/dist/lib/env.d.ts +41 -0
  30. package/dist/lib/env.js +138 -0
  31. package/dist/lib/env.js.map +1 -0
  32. package/dist/lib/handlers/anthropic-messages.d.ts +9 -0
  33. package/dist/lib/handlers/anthropic-messages.js +124 -0
  34. package/dist/lib/handlers/anthropic-messages.js.map +1 -0
  35. package/dist/lib/handlers/chat-completions.d.ts +9 -0
  36. package/dist/lib/handlers/chat-completions.js +98 -0
  37. package/dist/lib/handlers/chat-completions.js.map +1 -0
  38. package/dist/lib/handlers/health.d.ts +7 -0
  39. package/dist/lib/handlers/health.js +15 -0
  40. package/dist/lib/handlers/health.js.map +1 -0
  41. package/dist/lib/handlers/models.d.ts +14 -0
  42. package/dist/lib/handlers/models.js +34 -0
  43. package/dist/lib/handlers/models.js.map +1 -0
  44. package/dist/lib/http.d.ts +5 -0
  45. package/dist/lib/http.js +32 -0
  46. package/dist/lib/http.js.map +1 -0
  47. package/dist/lib/max-mode-preflight.d.ts +5 -0
  48. package/dist/lib/max-mode-preflight.js +56 -0
  49. package/dist/lib/max-mode-preflight.js.map +1 -0
  50. package/dist/lib/model-map.d.ts +17 -0
  51. package/dist/lib/model-map.js +62 -0
  52. package/dist/lib/model-map.js.map +1 -0
  53. package/dist/lib/modelMap.d.ts +17 -0
  54. package/dist/lib/modelMap.js +62 -0
  55. package/dist/lib/modelMap.js.map +1 -0
  56. package/dist/lib/openai.d.ts +7 -0
  57. package/dist/lib/openai.js +59 -0
  58. package/dist/lib/openai.js.map +1 -0
  59. package/dist/lib/process.d.ts +19 -0
  60. package/dist/lib/process.js +92 -0
  61. package/dist/lib/process.js.map +1 -0
  62. package/dist/lib/request-listener.d.ts +7 -0
  63. package/dist/lib/request-listener.js +70 -0
  64. package/dist/lib/request-listener.js.map +1 -0
  65. package/dist/lib/request-log.d.ts +13 -0
  66. package/dist/lib/request-log.js +110 -0
  67. package/dist/lib/request-log.js.map +1 -0
  68. package/dist/lib/requestLog.d.ts +2 -0
  69. package/dist/lib/requestLog.js +19 -0
  70. package/dist/lib/requestLog.js.map +1 -0
  71. package/dist/lib/resolve-model.d.ts +8 -0
  72. package/dist/lib/resolve-model.js +18 -0
  73. package/dist/lib/resolve-model.js.map +1 -0
  74. package/dist/lib/server.d.ts +8 -0
  75. package/dist/lib/server.js +35 -0
  76. package/dist/lib/server.js.map +1 -0
  77. package/dist/lib/workspace.d.ts +6 -0
  78. package/dist/lib/workspace.js +15 -0
  79. package/dist/lib/workspace.js.map +1 -0
  80. package/package.json +50 -0
package/README.md ADDED
@@ -0,0 +1,219 @@
1
+ # cursor-api-proxy
2
+
3
+ OpenAI-compatible proxy for Cursor CLI. Expose Cursor models on localhost so any LLM client (OpenAI SDK, LiteLLM, LangChain, etc.) can call them as a standard chat API.
4
+
5
+ This package works as **one npm dependency**: use it as an **SDK** in your app to call the proxy API, and/or run the **CLI** to start the proxy server. Core behavior is unchanged.
6
+
7
+ ## Prerequisites (required for the proxy to work)
8
+
9
+ - **Node.js** 18+
10
+ - **Cursor agent CLI** (`agent`). This package does **not** install or bundle the CLI. You must install and set it up separately. This project is developed and tested with `agent` version **2026.02.27-e7d2ef6**.
11
+
12
+ ```bash
13
+ curl https://cursor.com/install -fsS | bash
14
+ agent login
15
+ agent --list-models
16
+ ```
17
+
18
+ For automation, set `CURSOR_API_KEY` instead of using `agent login`.
19
+
20
+ ## Install
21
+
22
+ **From npm (use as SDK in another project):**
23
+
24
+ ```bash
25
+ npm install cursor-api-proxy
26
+ ```
27
+
28
+ **From source (develop or run CLI locally):**
29
+
30
+ ```bash
31
+ git clone <this-repo>
32
+ cd cursor-api-proxy
33
+ npm install
34
+ npm run build
35
+ ```
36
+
37
+ ## Run the proxy (CLI)
38
+
39
+ Start the server so the API is available (e.g. for the SDK or any HTTP client):
40
+
41
+ ```bash
42
+ npx cursor-api-proxy
43
+ # or from repo: npm start / node dist/cli.js
44
+ ```
45
+
46
+ To expose on your network (e.g. Tailscale):
47
+
48
+ ```bash
49
+ npx cursor-api-proxy --tailscale
50
+ ```
51
+
52
+ By default the server listens on **http://127.0.0.1:8765**. Optionally set `CURSOR_BRIDGE_API_KEY` to require `Authorization: Bearer <key>` on requests.
53
+
54
+ ### HTTPS with Tailscale (MagicDNS)
55
+
56
+ To serve over HTTPS so browsers and clients trust the connection (e.g. `https://macbook.tail4048eb.ts.net:8765`):
57
+
58
+ 1. **Generate Tailscale certificates** on this machine (run from the project directory or where you want the cert files):
59
+
60
+ ```bash
61
+ sudo tailscale cert macbook.tail4048eb.ts.net
62
+ ```
63
+
64
+ This creates `macbook.tail4048eb.ts.net.crt` and `macbook.tail4048eb.ts.net.key` in the current directory.
65
+
66
+ 2. **Run the proxy with TLS** and optional Tailscale bind:
67
+
68
+ ```bash
69
+ export CURSOR_BRIDGE_API_KEY=your-secret
70
+ export CURSOR_BRIDGE_TLS_CERT=/path/to/macbook.tail4048eb.ts.net.crt
71
+ export CURSOR_BRIDGE_TLS_KEY=/path/to/macbook.tail4048eb.ts.net.key
72
+ # Bind to Tailscale IP so the service is only on the tailnet (optional):
73
+ export CURSOR_BRIDGE_HOST=100.123.47.103
74
+ npm start
75
+ ```
76
+
77
+ Or bind to all interfaces and use HTTPS:
78
+
79
+ ```bash
80
+ CURSOR_BRIDGE_TLS_CERT=./macbook.tail4048eb.ts.net.crt \
81
+ CURSOR_BRIDGE_TLS_KEY=./macbook.tail4048eb.ts.net.key \
82
+ CURSOR_BRIDGE_API_KEY=your-secret \
83
+ npm start -- --tailscale
84
+ ```
85
+
86
+ 3. **Access the API** from any device on your tailnet:
87
+
88
+ - Base URL: `https://macbook.tail4048eb.ts.net:8765/v1` (use your MagicDNS name and port)
89
+ - Browsers will show a padlock; no certificate warnings when using Tailscale-issued certs.
90
+
91
+ ## Use as SDK in another project
92
+
93
+ Install the package and ensure the **Cursor agent CLI is installed and set up** (see Prerequisites). When you use the SDK with the default URL, **the proxy starts in the background automatically** if it is not already running. You can still start it yourself with `npx cursor-api-proxy` or set `CURSOR_PROXY_URL` to point at an existing proxy (then the SDK will not start another).
94
+
95
+ - **Base URL**: `http://127.0.0.1:8765/v1` (override with `CURSOR_PROXY_URL` or options).
96
+ - **API key**: Use any value (e.g. `unused`), or set `CURSOR_BRIDGE_API_KEY` and pass it in options or env.
97
+ - **Disable auto-start**: Pass `startProxy: false` (or use a custom `baseUrl`) if you run the proxy yourself and don’t want the SDK to start it.
98
+ - **Shutdown behavior**: When the SDK starts the proxy, it also stops it automatically when the Node.js process exits or receives normal termination signals. `stopManagedProxy()` is still available if you want to shut it down earlier. `SIGKILL` cannot be intercepted.
99
+
100
+ ### Option A: OpenAI SDK + helper (recommended)
101
+
102
+ This is an optional consumer-side example. `openai` is not a dependency of `cursor-api-proxy`; install it only in the app where you want to use this example.
103
+
104
+ ```js
105
+ import OpenAI from "openai";
106
+ import { getOpenAIOptionsAsync } from "cursor-api-proxy";
107
+
108
+ const opts = await getOpenAIOptionsAsync(); // starts proxy if needed
109
+ const client = new OpenAI(opts);
110
+
111
+ const completion = await client.chat.completions.create({
112
+ model: "gpt-5.2",
113
+ messages: [{ role: "user", content: "Hello" }],
114
+ });
115
+ console.log(completion.choices[0].message.content);
116
+ ```
117
+
118
+ For a sync config without auto-start, use `getOpenAIOptions()` and ensure the proxy is already running.
119
+
120
+ ### Option B: Minimal client (no OpenAI SDK)
121
+
122
+ ```js
123
+ import { createCursorProxyClient } from "cursor-api-proxy";
124
+
125
+ const proxy = createCursorProxyClient(); // proxy starts on first request if needed
126
+ const data = await proxy.chatCompletionsCreate({
127
+ model: "auto",
128
+ messages: [{ role: "user", content: "Hello" }],
129
+ });
130
+ console.log(data.choices?.[0]?.message?.content);
131
+ ```
132
+
133
+ ### Option C: Raw OpenAI client (no SDK import from this package)
134
+
135
+ ```js
136
+ import OpenAI from "openai";
137
+
138
+ const client = new OpenAI({
139
+ baseURL: "http://127.0.0.1:8765/v1",
140
+ apiKey: process.env.CURSOR_BRIDGE_API_KEY || "unused",
141
+ });
142
+ // Start the proxy yourself (npx cursor-api-proxy) or use Option A/B for auto-start.
143
+ ```
144
+
145
+ ### Endpoints
146
+
147
+ | Method | Path | Description |
148
+ |--------|------|-------------|
149
+ | GET | `/health` | Server and config info |
150
+ | GET | `/v1/models` | List Cursor models (from `agent --list-models`) |
151
+ | POST | `/v1/chat/completions` | Chat completion (OpenAI shape; supports `stream: true`) |
152
+ | POST | `/v1/messages` | Anthropic Messages API (used by Claude Code; supports `stream: true`) |
153
+
154
+ ## Environment variables
155
+
156
+ Environment handling is centralized in one module. Aliases, defaults, path resolution, platform fallbacks, and `--tailscale` host behavior are resolved consistently before the server starts.
157
+
158
+ | Variable | Default | Description |
159
+ |----------|---------|-------------|
160
+ | `CURSOR_BRIDGE_HOST` | `127.0.0.1` | Bind address |
161
+ | `CURSOR_BRIDGE_PORT` | `8765` | Port |
162
+ | `CURSOR_BRIDGE_API_KEY` | — | If set, require `Authorization: Bearer <key>` on requests |
163
+ | `CURSOR_BRIDGE_WORKSPACE` | process cwd | Workspace directory for Cursor CLI |
164
+ | `CURSOR_BRIDGE_MODE` | — | Ignored; proxy always runs in **ask** (chat-only) mode so the CLI never creates or edits files. |
165
+ | `CURSOR_BRIDGE_DEFAULT_MODEL` | `auto` | Default model when request omits one |
166
+ | `CURSOR_BRIDGE_STRICT_MODEL` | `true` | Use last requested model when none specified |
167
+ | `CURSOR_BRIDGE_FORCE` | `false` | Pass `--force` to Cursor CLI |
168
+ | `CURSOR_BRIDGE_APPROVE_MCPS` | `false` | Pass `--approve-mcps` to Cursor CLI |
169
+ | `CURSOR_BRIDGE_TIMEOUT_MS` | `300000` | Timeout per completion (ms) |
170
+ | `CURSOR_BRIDGE_TLS_CERT` | — | Path to TLS certificate file (e.g. Tailscale cert). Use with `CURSOR_BRIDGE_TLS_KEY` for HTTPS. |
171
+ | `CURSOR_BRIDGE_TLS_KEY` | — | Path to TLS private key file. Use with `CURSOR_BRIDGE_TLS_CERT` for HTTPS. |
172
+ | `CURSOR_BRIDGE_SESSIONS_LOG` | `~/.cursor-api-proxy/sessions.log` | Path to log file; each request is appended as a line (timestamp, method, path, IP, status). |
173
+ | `CURSOR_BRIDGE_CHAT_ONLY_WORKSPACE` | `true` | When `true` (default), the CLI runs in an empty temp dir so it **cannot read or write your project**; pure chat only. Set to `false` to pass the real workspace (e.g. for `X-Cursor-Workspace`). |
174
+ | `CURSOR_BRIDGE_VERBOSE` | `false` | When `true`, print full request messages and response content to stdout for every completion (both stream and sync). |
175
+ | `CURSOR_BRIDGE_MAX_MODE` | `false` | When `true`, enable Cursor **Max Mode** for all requests (larger context window, higher tool-call limits). The proxy writes `maxMode: true` to `cli-config.json` before each run. Works when using `CURSOR_AGENT_NODE`/`CURSOR_AGENT_SCRIPT` or the default Windows `.cmd` layout (node.exe + index.js next to agent.cmd). |
176
+ | `CURSOR_AGENT_BIN` | `agent` | Path to Cursor CLI binary. Alias precedence: `CURSOR_AGENT_BIN`, then `CURSOR_CLI_BIN`, then `CURSOR_CLI_PATH`. |
177
+ | `CURSOR_AGENT_NODE` | — | **(Windows)** Path to Node.js executable. When set together with `CURSOR_AGENT_SCRIPT`, spawns Node directly instead of going through cmd.exe, bypassing the ~8191 character command line limit. |
178
+ | `CURSOR_AGENT_SCRIPT` | — | **(Windows)** Path to the agent script (e.g. `agent.cmd` or the underlying `.js`). Use with `CURSOR_AGENT_NODE` to bypass cmd.exe for long prompts. |
179
+
180
+ Notes:
181
+ - `--tailscale` changes the default host to `0.0.0.0` only when `CURSOR_BRIDGE_HOST` is not already set.
182
+ - Relative paths such as `CURSOR_BRIDGE_WORKSPACE`, `CURSOR_BRIDGE_SESSIONS_LOG`, `CURSOR_BRIDGE_TLS_CERT`, and `CURSOR_BRIDGE_TLS_KEY` are resolved from the current working directory.
183
+
184
+ #### Windows command line limit bypass
185
+
186
+ On Windows, cmd.exe has a ~8191 character limit on the command line. Long prompts passed as arguments can exceed this and cause the agent to fail. To avoid that, set both `CURSOR_AGENT_NODE` (path to `node.exe`) and `CURSOR_AGENT_SCRIPT` (path to the agent script). The proxy will then spawn Node directly with the script and args instead of using cmd.exe, avoiding the limit.
187
+
188
+ Example (adjust paths to your install):
189
+
190
+ ```bash
191
+ set CURSOR_AGENT_NODE=C:\Program Files\nodejs\node.exe
192
+ set CURSOR_AGENT_SCRIPT=C:\path\to\Cursor\resources\agent\agent.cmd
193
+ # or wherever your agent script lives
194
+ ```
195
+
196
+ CLI flags:
197
+
198
+ | Flag | Description |
199
+ |------|-------------|
200
+ | `--tailscale` | Bind to `0.0.0.0` for access from tailnet/LAN (unless `CURSOR_BRIDGE_HOST` is already set) |
201
+ | `-h`, `--help` | Show CLI usage |
202
+
203
+ Optional per-request override: send header `X-Cursor-Workspace: <path>` to use a different workspace for that request.
204
+
205
+ ## Streaming
206
+
207
+ The proxy supports `stream: true` on `POST /v1/chat/completions` and `POST /v1/messages`. It returns Server-Sent Events (SSE) in OpenAI’s streaming format. Cursor CLI emits incremental deltas plus a final full message; the proxy deduplicates output so clients receive each chunk only once.
208
+
209
+ **Test streaming:** from repo root, with the proxy running:
210
+
211
+ ```bash
212
+ node examples/test-stream.mjs
213
+ ```
214
+
215
+ See [examples/README.md](examples/README.md) for details.
216
+
217
+ ## License
218
+
219
+ MIT
package/dist/cli.d.ts ADDED
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env node
2
+ export declare function parseArgs(argv: string[]): {
3
+ tailscale: boolean;
4
+ help: boolean;
5
+ };
package/dist/cli.js ADDED
@@ -0,0 +1,53 @@
1
+ #!/usr/bin/env node
2
+ import fs from "node:fs";
3
+ import { fileURLToPath } from "node:url";
4
+ import pkg from "../package.json" with { type: "json" };
5
+ import { loadBridgeConfig } from "./lib/config.js";
6
+ import { startBridgeServer } from "./lib/server.js";
7
+ const __filename = fileURLToPath(import.meta.url);
8
+ const realArgv1 = process.argv[1]
9
+ ? fs.realpathSync(process.argv[1])
10
+ : "";
11
+ const isMainModule = realArgv1 === fs.realpathSync(__filename);
12
+ export function parseArgs(argv) {
13
+ let tailscale = false;
14
+ let help = false;
15
+ for (const arg of argv) {
16
+ if (arg === "--tailscale") {
17
+ tailscale = true;
18
+ continue;
19
+ }
20
+ if (arg === "--help" || arg === "-h") {
21
+ help = true;
22
+ continue;
23
+ }
24
+ throw new Error(`Unknown argument: ${arg}`);
25
+ }
26
+ return { tailscale, help };
27
+ }
28
+ function printHelp() {
29
+ console.log("cursor-api-proxy");
30
+ console.log("");
31
+ console.log("Usage:");
32
+ console.log(" cursor-api-proxy [--tailscale]");
33
+ console.log("");
34
+ console.log("Options:");
35
+ console.log(" --tailscale Bind to 0.0.0.0 for tailnet/LAN access");
36
+ console.log(" -h, --help Show this help message");
37
+ }
38
+ async function main() {
39
+ const args = parseArgs(process.argv.slice(2));
40
+ if (args.help) {
41
+ printHelp();
42
+ return;
43
+ }
44
+ const config = loadBridgeConfig({ tailscale: args.tailscale });
45
+ startBridgeServer({ version: pkg.version, config });
46
+ }
47
+ if (isMainModule) {
48
+ main().catch((err) => {
49
+ console.error(err);
50
+ process.exit(1);
51
+ });
52
+ }
53
+ //# sourceMappingURL=cli.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"cli.js","sourceRoot":"","sources":["../src/cli.ts"],"names":[],"mappings":";AAEA,OAAO,EAAE,MAAM,SAAS,CAAC;AACzB,OAAO,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAEzC,OAAO,GAAG,MAAM,iBAAiB,CAAC,OAAO,IAAI,EAAE,MAAM,EAAE,CAAC;AACxD,OAAO,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AACnD,OAAO,EAAE,iBAAiB,EAAE,MAAM,iBAAiB,CAAC;AAEpD,MAAM,UAAU,GAAG,aAAa,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;AAClD,MAAM,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC;IAC/B,CAAC,CAAC,EAAE,CAAC,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClC,CAAC,CAAC,EAAE,CAAC;AACP,MAAM,YAAY,GAAG,SAAS,KAAK,EAAE,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC;AAE/D,MAAM,UAAU,SAAS,CAAC,IAAc;IACtC,IAAI,SAAS,GAAG,KAAK,CAAC;IACtB,IAAI,IAAI,GAAG,KAAK,CAAC;IAEjB,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE,CAAC;QACvB,IAAI,GAAG,KAAK,aAAa,EAAE,CAAC;YAC1B,SAAS,GAAG,IAAI,CAAC;YACjB,SAAS;QACX,CAAC;QACD,IAAI,GAAG,KAAK,QAAQ,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YACrC,IAAI,GAAG,IAAI,CAAC;YACZ,SAAS;QACX,CAAC;QACD,MAAM,IAAI,KAAK,CAAC,qBAAqB,GAAG,EAAE,CAAC,CAAC;IAC9C,CAAC;IAED,OAAO,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC;AAC7B,CAAC;AAED,SAAS,SAAS;IAChB,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC;IAChC,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;IAChB,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;IACtB,OAAO,CAAC,GAAG,CAAC,kCAAkC,CAAC,CAAC;IAChD,OAAO,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC;IAChB,OAAO,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC;IACxB,OAAO,CAAC,GAAG,CAAC,uDAAuD,CAAC,CAAC;IACrE,OAAO,CAAC,GAAG,CAAC,uCAAuC,CAAC,CAAC;AACvD,CAAC;AAED,KAAK,UAAU,IAAI;IACjB,MAAM,IAAI,GAAG,SAAS,CAAC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC;IAE9C,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;QACd,SAAS,EAAE,CAAC;QACZ,OAAO;IACT,CAAC;IAED,MAAM,MAAM,GAAG,gBAAgB,CAAC,EAAE,SAAS,EAAE,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC;IAC/D,iBAAiB,CAAC,EAAE,OAAO,EAAE,GAAG,CAAC,OAAO,EAAE,MAAM,EAAE,CAAC,CAAC;AACtD,CAAC;AAED,IAAI,YAAY,EAAE,CAAC;IACjB,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE;QACnB,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QACnB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC,CAAC,CAAC;AACL,CAAC"}
@@ -0,0 +1,95 @@
1
+ /**
2
+ * SDK for calling cursor-api-proxy from another project.
3
+ *
4
+ * When startProxy is true (default), the SDK will start the proxy in the
5
+ * background if it is not already reachable. Prerequisites: Cursor agent CLI
6
+ * must be installed and set up separately (see README).
7
+ */
8
+ export type CursorProxyClientOptions = {
9
+ /** Proxy base URL (e.g. http://127.0.0.1:8765). Default: env CURSOR_PROXY_URL or http://127.0.0.1:8765 */
10
+ baseUrl?: string;
11
+ /** Optional API key; if the proxy is started with CURSOR_BRIDGE_API_KEY, pass it here. */
12
+ apiKey?: string;
13
+ /**
14
+ * When true (default), start the proxy in the background if it is not reachable.
15
+ * Only applies when using the default base URL. Set to false if you run the proxy yourself.
16
+ */
17
+ startProxy?: boolean;
18
+ };
19
+ /**
20
+ * Ensures the proxy is running at the given base URL. If the URL is the default
21
+ * and the proxy is not reachable, starts it in the background (Node.js only).
22
+ * Resolves when /health returns 200 or rejects on timeout.
23
+ */
24
+ export declare function ensureProxyRunning(options?: {
25
+ baseUrl?: string;
26
+ timeoutMs?: number;
27
+ }): Promise<string>;
28
+ export declare function stopManagedProxy(options?: {
29
+ timeoutMs?: number;
30
+ }): Promise<boolean>;
31
+ /**
32
+ * Options suitable for the OpenAI SDK constructor.
33
+ * Use: new OpenAI(getOpenAIOptions())
34
+ * For auto-starting the proxy first, use getOpenAIOptionsAsync() and await it.
35
+ */
36
+ export declare function getOpenAIOptions(options?: CursorProxyClientOptions): {
37
+ baseURL: string;
38
+ apiKey: string;
39
+ };
40
+ /**
41
+ * Like getOpenAIOptions but ensures the proxy is running first (starts it in the background if needed).
42
+ * Use: new OpenAI(await getOpenAIOptionsAsync())
43
+ */
44
+ export declare function getOpenAIOptionsAsync(options?: CursorProxyClientOptions & {
45
+ timeoutMs?: number;
46
+ }): Promise<{
47
+ baseURL: string;
48
+ apiKey: string;
49
+ }>;
50
+ /**
51
+ * Minimal client to call the proxy HTTP API.
52
+ * When startProxy is true (default), the proxy is started in the background on first request if not reachable.
53
+ */
54
+ export declare function createCursorProxyClient(options?: CursorProxyClientOptions): {
55
+ /** Base URL of the proxy (no /v1 suffix) */
56
+ baseUrl: string;
57
+ /** Headers to send (Content-Type and optional Authorization) */
58
+ headers: Record<string, string>;
59
+ /** Get options for the OpenAI SDK constructor */
60
+ getOpenAIOptions: () => {
61
+ baseURL: string;
62
+ apiKey: string;
63
+ };
64
+ /**
65
+ * POST to a path (e.g. /v1/chat/completions). Body is JSON-serialized.
66
+ */
67
+ request<T = unknown>(path: string, body: unknown): Promise<{
68
+ data: T;
69
+ ok: boolean;
70
+ status: number;
71
+ }>;
72
+ /** OpenAI-style chat completions (non-streaming). */
73
+ chatCompletionsCreate(params: {
74
+ model?: string;
75
+ messages: Array<{
76
+ role: string;
77
+ content: string;
78
+ }>;
79
+ stream?: false;
80
+ }): Promise<{
81
+ choices?: Array<{
82
+ message?: {
83
+ content?: string;
84
+ };
85
+ }>;
86
+ error?: {
87
+ message?: string;
88
+ };
89
+ }>;
90
+ /**
91
+ * Use for streaming: returns a fetch Response so you can read the body stream.
92
+ * Ensures the proxy is running first when startProxy is true.
93
+ */
94
+ fetch(path: string, init?: RequestInit): Promise<Response>;
95
+ };