@spacelr/mcp 0.0.6 → 0.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -5,21 +5,89 @@ import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
5
5
  import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
6
6
 
7
7
  // libs/mcp-server/src/config.ts
8
+ import * as crypto from "crypto";
8
9
  import * as fs from "fs";
9
10
  import * as os from "os";
10
11
  import * as path from "path";
11
12
  var REFRESH_TIMEOUT_MS = 1e4;
12
- function getCredentialsFile() {
13
- const home = os.homedir();
13
+ var SPACELR_CONFIG_FILENAME = "spacelr.json";
14
+ function getGlobalCredentialsFile() {
15
+ const home = process.env["HOME"] ?? process.env["USERPROFILE"] ?? os.homedir();
14
16
  return path.join(home, ".spacelr", "credentials.json");
15
17
  }
16
- function readStoredCredentials() {
18
+ function findSpacelrConfigPath(startDir) {
19
+ let dir = startDir ?? process.cwd();
20
+ while (true) {
21
+ const candidate = path.join(dir, SPACELR_CONFIG_FILENAME);
22
+ if (fs.existsSync(candidate)) return candidate;
23
+ const parent = path.dirname(dir);
24
+ if (parent === dir) return null;
25
+ dir = parent;
26
+ }
27
+ }
28
+ function resolveCredentialsFile() {
29
+ const configPath = findSpacelrConfigPath();
30
+ if (configPath) {
31
+ const projectDir = path.dirname(configPath);
32
+ return {
33
+ path: path.join(projectDir, ".spacelr", "credentials.json"),
34
+ scope: "project",
35
+ projectDir
36
+ };
37
+ }
38
+ return { path: getGlobalCredentialsFile(), scope: "global" };
39
+ }
40
+ function ensureGitignored(projectDir) {
41
+ const gitignorePath = path.join(projectDir, ".gitignore");
42
+ const entry = ".spacelr/";
43
+ const matchesExisting = (line) => {
44
+ const trimmed = line.trim();
45
+ if (!trimmed || trimmed.startsWith("#")) return false;
46
+ return trimmed === ".spacelr" || trimmed === ".spacelr/" || trimmed === ".spacelr/*" || trimmed === ".spacelr/**" || trimmed === ".spacelr/credentials.json" || // Leading-slash forms anchor the pattern to the repo root
47
+ trimmed === "/.spacelr" || trimmed === "/.spacelr/" || trimmed === "/.spacelr/*" || trimmed === "/.spacelr/**" || trimmed === "/.spacelr/credentials.json";
48
+ };
49
+ const hasNegation = (line) => {
50
+ const trimmed = line.trim();
51
+ if (!trimmed || trimmed.startsWith("#")) return false;
52
+ return trimmed.startsWith("!") && trimmed.includes(".spacelr");
53
+ };
54
+ let content = "";
55
+ let existed = false;
17
56
  try {
18
- const file = getCredentialsFile();
19
- if (!fs.existsSync(file)) return null;
20
- const content = fs.readFileSync(file, "utf-8");
57
+ content = fs.readFileSync(gitignorePath, "utf-8");
58
+ existed = true;
59
+ } catch (err) {
60
+ if (err.code !== "ENOENT") return { safe: true };
61
+ }
62
+ const lines = content.split(/\r?\n/);
63
+ if (lines.some(hasNegation)) {
64
+ const alreadyHasMatch = lines.some(matchesExisting);
65
+ console.error(
66
+ alreadyHasMatch ? ".gitignore already contains .spacelr/ but ALSO contains a negation pattern that un-ignores credentials. Remove the negation or store credentials globally." : ".gitignore contains a negation pattern for .spacelr/ \u2014 refusing to write credentials safety entry. Remove the negation or store credentials globally."
67
+ );
68
+ return { safe: false };
69
+ }
70
+ if (lines.some(matchesExisting)) return { safe: true };
71
+ const needsLeadingNewline = existed && content.length > 0 && !content.endsWith("\n");
72
+ const newContent = (existed ? content : "") + (needsLeadingNewline ? "\n" : "") + entry + "\n";
73
+ try {
74
+ writeFileAtomic(gitignorePath, newContent, 420);
75
+ console.error(`Added ${entry} to .gitignore (credentials must never be committed)`);
76
+ return { safe: true };
77
+ } catch {
78
+ return { safe: true };
79
+ }
80
+ }
81
+ function readCredentialsFromDisk(filePath) {
82
+ try {
83
+ const content = fs.readFileSync(filePath, "utf-8");
21
84
  const parsed = JSON.parse(content);
22
- if (typeof parsed !== "object" || parsed === null || typeof parsed.accessToken !== "string" || !Number.isFinite(parsed.expiresAt) || typeof parsed.apiUrl !== "string") {
85
+ if (typeof parsed !== "object" || parsed === null) return null;
86
+ const obj = parsed;
87
+ if (typeof obj["accessToken"] !== "string" || obj["accessToken"].length === 0 || !Number.isFinite(obj["expiresAt"]) || typeof obj["apiUrl"] !== "string" || obj["apiUrl"].length === 0) {
88
+ return null;
89
+ }
90
+ if ("refreshToken" in obj && typeof obj["refreshToken"] !== "string") {
23
91
  return null;
24
92
  }
25
93
  return parsed;
@@ -27,15 +95,72 @@ function readStoredCredentials() {
27
95
  return null;
28
96
  }
29
97
  }
30
- function storeCredentials(credentials) {
31
- const file = getCredentialsFile();
32
- fs.mkdirSync(path.dirname(file), { recursive: true, mode: 448 });
33
- fs.writeFileSync(file, JSON.stringify(credentials, null, 2), {
34
- mode: 384
35
- });
98
+ function readStoredCredentials() {
99
+ return readStoredCredentialsWithLocation()?.credentials ?? null;
36
100
  }
37
- async function refreshToken(credentials) {
38
- if (!credentials.refreshToken || !credentials.apiUrl) return null;
101
+ function readStoredCredentialsWithLocation() {
102
+ const primary = resolveCredentialsFile();
103
+ const fromPrimary = readCredentialsFromDisk(primary.path);
104
+ if (fromPrimary) return { credentials: fromPrimary, location: primary };
105
+ if (primary.scope === "project") {
106
+ const globalPath = getGlobalCredentialsFile();
107
+ const fromGlobal = readCredentialsFromDisk(globalPath);
108
+ if (fromGlobal) {
109
+ return {
110
+ credentials: fromGlobal,
111
+ location: { path: globalPath, scope: "global" }
112
+ };
113
+ }
114
+ }
115
+ return null;
116
+ }
117
+ function writeFileAtomic(filePath, data, mode) {
118
+ const tmpPath = `${filePath}.tmp.${process.pid}.${crypto.randomBytes(6).toString("hex")}`;
119
+ try {
120
+ fs.writeFileSync(tmpPath, data, { mode });
121
+ fs.renameSync(tmpPath, filePath);
122
+ } catch (err) {
123
+ try {
124
+ fs.unlinkSync(tmpPath);
125
+ } catch {
126
+ }
127
+ throw err;
128
+ }
129
+ }
130
+ function storeCredentials(credentials, location) {
131
+ if (location.scope === "project" && location.projectDir) {
132
+ const { safe } = ensureGitignored(location.projectDir);
133
+ if (!safe) {
134
+ throw new Error(
135
+ "Refusing to write project-local credentials: .gitignore contains a negation pattern that would un-ignore .spacelr/."
136
+ );
137
+ }
138
+ }
139
+ const dir = path.dirname(location.path);
140
+ fs.mkdirSync(dir, { recursive: true, mode: 448 });
141
+ try {
142
+ fs.chmodSync(dir, 448);
143
+ } catch {
144
+ }
145
+ writeFileAtomic(location.path, JSON.stringify(credentials, null, 2), 384);
146
+ }
147
+ function isTokenStillValid(credentials) {
148
+ return Date.now() < credentials.expiresAt - 6e4;
149
+ }
150
+ async function refreshToken(credentials, sourceLocation) {
151
+ if (!credentials.refreshToken) return null;
152
+ try {
153
+ const parsed = new URL(credentials.apiUrl);
154
+ if (parsed.protocol !== "http:" && parsed.protocol !== "https:") {
155
+ console.error(
156
+ `Refusing to refresh against non-http(s) URL: ${parsed.protocol}`
157
+ );
158
+ return null;
159
+ }
160
+ } catch {
161
+ console.error("Refusing to refresh: stored apiUrl is not a valid URL");
162
+ return null;
163
+ }
39
164
  const apiUrl = credentials.apiUrl.replace(/\/+$/, "");
40
165
  const controller = new AbortController();
41
166
  const timer = setTimeout(() => controller.abort(), REFRESH_TIMEOUT_MS);
@@ -49,6 +174,13 @@ async function refreshToken(credentials) {
49
174
  });
50
175
  if (!response.ok) {
51
176
  console.error(`Token refresh failed (HTTP ${response.status})`);
177
+ if (response.status === 401) {
178
+ const fresh = readCredentialsFromDisk(sourceLocation.path);
179
+ if (fresh && (fresh.accessToken !== credentials.accessToken || fresh.refreshToken !== credentials.refreshToken) && isTokenStillValid(fresh)) {
180
+ console.error("Using credentials refreshed by concurrent process");
181
+ return fresh.accessToken;
182
+ }
183
+ }
52
184
  return null;
53
185
  }
54
186
  const data = await response.json();
@@ -56,13 +188,15 @@ async function refreshToken(credentials) {
56
188
  console.error("Token refresh returned unexpected response shape");
57
189
  return null;
58
190
  }
191
+ const expiresInRaw = data.expires_in;
192
+ const expiresIn = typeof expiresInRaw === "number" && Number.isFinite(expiresInRaw) && expiresInRaw > 0 ? expiresInRaw : 3600;
59
193
  const updated = {
60
194
  accessToken: data.access_token,
61
195
  refreshToken: data.refresh_token ?? credentials.refreshToken,
62
- expiresAt: Date.now() + (data.expires_in ?? 3600) * 1e3,
196
+ expiresAt: Date.now() + expiresIn * 1e3,
63
197
  apiUrl: credentials.apiUrl
64
198
  };
65
- storeCredentials(updated);
199
+ storeCredentials(updated, sourceLocation);
66
200
  console.error("Token refreshed successfully");
67
201
  return data.access_token;
68
202
  } catch (err) {
@@ -79,14 +213,19 @@ async function refreshToken(credentials) {
79
213
  async function resolveAuthToken(forceRefresh = false) {
80
214
  const envToken = process.env["SPACELR_AUTH_TOKEN"];
81
215
  if (envToken) return envToken;
82
- const credentials = readStoredCredentials();
83
- if (!credentials) return null;
84
- const isExpired = Date.now() >= credentials.expiresAt - 6e4;
85
- if (!forceRefresh && !isExpired) return credentials.accessToken;
86
- return refreshToken(credentials);
216
+ const loaded = readStoredCredentialsWithLocation();
217
+ if (!loaded) return null;
218
+ return resolveAuthTokenFromLoaded(loaded, forceRefresh);
219
+ }
220
+ async function resolveAuthTokenFromLoaded(loaded, forceRefresh) {
221
+ const { credentials, location } = loaded;
222
+ if (!forceRefresh && isTokenStillValid(credentials)) {
223
+ return credentials.accessToken;
224
+ }
225
+ return refreshToken(credentials, location);
87
226
  }
88
227
  function warnIfInsecureUrl(url) {
89
- if (url.startsWith("http://") && !url.includes("localhost") && !/^http:\/\/(127\.|10\.|192\.168\.|172\.(1[6-9]|2\d|3[01])\.|0\.0\.0\.0|(\[::1\]))(\/|:|$)/.test(url)) {
228
+ if (url.startsWith("http://") && !url.includes("localhost") && !/^http:\/\/(127\.|10\.|192\.168\.|172\.(1[6-9]|2\d|3[01])\.|(\[::1\])|::1)(\/|:|$)/.test(url)) {
90
229
  console.error(`Warning: API URL uses plain HTTP (${url}). Credentials will be transmitted unencrypted.`);
91
230
  }
92
231
  }
@@ -123,20 +262,24 @@ function resolveApiBaseUrl() {
123
262
  try {
124
263
  const credentials = readStoredCredentials();
125
264
  return resolveApiUrl(credentials);
126
- } catch {
265
+ } catch (err) {
266
+ console.error(
267
+ `Failed to resolve admin API URL: ${err instanceof Error ? err.message : String(err)}`
268
+ );
127
269
  return null;
128
270
  }
129
271
  }
130
272
  async function loadConfig() {
131
- const credentials = readStoredCredentials();
132
- const authToken = await resolveAuthToken();
273
+ const envToken = process.env["SPACELR_AUTH_TOKEN"];
274
+ const loaded = readStoredCredentialsWithLocation();
275
+ const authToken = envToken ?? (loaded ? await resolveAuthTokenFromLoaded(loaded, false) : null);
133
276
  if (!authToken) {
134
277
  throw new Error(
135
278
  'No auth token found. Either set SPACELR_AUTH_TOKEN or run "spacelr login" first.'
136
279
  );
137
280
  }
138
281
  return {
139
- apiBaseUrl: resolveApiUrl(credentials),
282
+ apiBaseUrl: resolveApiUrl(loaded?.credentials ?? null),
140
283
  authToken,
141
284
  clientId: process.env["SPACELR_CLIENT_ID"],
142
285
  projectId: process.env["SPACELR_PROJECT_ID"]
@@ -145,6 +288,7 @@ async function loadConfig() {
145
288
 
146
289
  // libs/mcp-server/src/api-client.ts
147
290
  var REQUEST_TIMEOUT_MS = 3e4;
291
+ var UPLOAD_TIMEOUT_MS = 12e4;
148
292
  var ApiError = class extends Error {
149
293
  constructor(status, statusText, responseBody) {
150
294
  const truncated = responseBody.length > 500 ? responseBody.slice(0, 500) + "..." : responseBody;
@@ -183,6 +327,77 @@ var ApiClient = class {
183
327
  async delete(path2, opts) {
184
328
  return this.requestWithRetry("DELETE", path2, opts);
185
329
  }
330
+ /**
331
+ * Upload a file as multipart/form-data.
332
+ * Includes the same 401-retry logic as regular requests.
333
+ *
334
+ * @param path - API path (e.g. `/projects/:id/functions/:id/deploy`)
335
+ * @param buffer - Raw file content
336
+ * @param filename - Name sent in the Content-Disposition header (e.g. "bundle.zip")
337
+ */
338
+ async uploadFile(path2, buffer, filename) {
339
+ try {
340
+ return await this.sendMultipart(path2, buffer, filename);
341
+ } catch (error) {
342
+ if (error instanceof ApiError && error.status === 401) {
343
+ const currentToken = this.headers["Authorization"];
344
+ let newToken = await this.refreshAuthToken();
345
+ const newBaseUrl = resolveApiBaseUrl();
346
+ if (newBaseUrl && newBaseUrl !== this.baseUrl) {
347
+ this.baseUrl = newBaseUrl;
348
+ }
349
+ if (newToken && `Bearer ${newToken}` === currentToken) {
350
+ newToken = await resolveAuthToken(true);
351
+ }
352
+ if (newToken && `Bearer ${newToken}` !== currentToken) {
353
+ this.headers["Authorization"] = `Bearer ${newToken}`;
354
+ return this.sendMultipart(path2, buffer, filename);
355
+ }
356
+ }
357
+ throw error;
358
+ }
359
+ }
360
+ async sendMultipart(path2, buffer, filename) {
361
+ const boundary = `----MCP${Date.now()}${Math.random().toString(36).slice(2)}`;
362
+ const header = Buffer.from(
363
+ `--${boundary}\r
364
+ Content-Disposition: form-data; name="file"; filename="${filename}"\r
365
+ Content-Type: application/zip\r
366
+ \r
367
+ `
368
+ );
369
+ const footer = Buffer.from(`\r
370
+ --${boundary}--\r
371
+ `);
372
+ const body = Buffer.concat([header, buffer, footer]);
373
+ const url = `${this.baseUrl}${path2}`;
374
+ const controller = new AbortController();
375
+ const timer = setTimeout(() => controller.abort(), UPLOAD_TIMEOUT_MS);
376
+ try {
377
+ const response = await fetch(url, {
378
+ method: "POST",
379
+ headers: {
380
+ ...this.headers,
381
+ "Content-Type": `multipart/form-data; boundary=${boundary}`,
382
+ "Content-Length": String(body.length)
383
+ },
384
+ body,
385
+ signal: controller.signal
386
+ });
387
+ const text = await response.text();
388
+ if (!response.ok) {
389
+ throw new ApiError(response.status, response.statusText, text);
390
+ }
391
+ if (!text) return void 0;
392
+ try {
393
+ return JSON.parse(text);
394
+ } catch {
395
+ return text;
396
+ }
397
+ } finally {
398
+ clearTimeout(timer);
399
+ }
400
+ }
186
401
  async refreshAuthToken(force = false) {
187
402
  if (!this.refreshPromise) {
188
403
  this.refreshPromise = resolveAuthToken(force).finally(() => {
@@ -1344,6 +1559,104 @@ function registerDatabaseTools(server, api) {
1344
1559
 
1345
1560
  // libs/mcp-server/src/tools/hosting.ts
1346
1561
  import { z as z5 } from "zod";
1562
+
1563
+ // libs/mcp-server/src/zip.ts
1564
+ import { deflateRawSync } from "zlib";
1565
+ var ZipBuilder = class {
1566
+ constructor() {
1567
+ this.files = [];
1568
+ }
1569
+ /**
1570
+ * Add a text file to the archive.
1571
+ *
1572
+ * @param name - File path inside the ZIP (e.g. "index.js" or "src/utils.ts")
1573
+ * @param content - UTF-8 text content of the file
1574
+ */
1575
+ addFile(name, content) {
1576
+ const nameBuffer = Buffer.from(name, "utf-8");
1577
+ const contentBuffer = Buffer.from(content, "utf-8");
1578
+ const compressed = deflateRawSync(contentBuffer, { level: 9 });
1579
+ const crc32 = computeCrc32(contentBuffer);
1580
+ this.files.push({
1581
+ name: nameBuffer,
1582
+ content: contentBuffer,
1583
+ compressed,
1584
+ crc32
1585
+ });
1586
+ }
1587
+ /** Generate the complete ZIP archive as a Buffer. */
1588
+ toBuffer() {
1589
+ const localHeaders = [];
1590
+ const centralHeaders = [];
1591
+ let offset = 0;
1592
+ for (const file of this.files) {
1593
+ const local = Buffer.alloc(30);
1594
+ local.writeUInt32LE(67324752, 0);
1595
+ local.writeUInt16LE(20, 4);
1596
+ local.writeUInt16LE(0, 6);
1597
+ local.writeUInt16LE(8, 8);
1598
+ local.writeUInt16LE(0, 10);
1599
+ local.writeUInt16LE(0, 12);
1600
+ local.writeUInt32LE(file.crc32, 14);
1601
+ local.writeUInt32LE(file.compressed.length, 18);
1602
+ local.writeUInt32LE(file.content.length, 22);
1603
+ local.writeUInt16LE(file.name.length, 26);
1604
+ local.writeUInt16LE(0, 28);
1605
+ localHeaders.push(local, file.name, file.compressed);
1606
+ const central = Buffer.alloc(46);
1607
+ central.writeUInt32LE(33639248, 0);
1608
+ central.writeUInt16LE(20, 4);
1609
+ central.writeUInt16LE(20, 6);
1610
+ central.writeUInt16LE(0, 8);
1611
+ central.writeUInt16LE(8, 10);
1612
+ central.writeUInt16LE(0, 12);
1613
+ central.writeUInt16LE(0, 14);
1614
+ central.writeUInt32LE(file.crc32, 16);
1615
+ central.writeUInt32LE(file.compressed.length, 20);
1616
+ central.writeUInt32LE(file.content.length, 24);
1617
+ central.writeUInt16LE(file.name.length, 28);
1618
+ central.writeUInt16LE(0, 30);
1619
+ central.writeUInt16LE(0, 32);
1620
+ central.writeUInt16LE(0, 34);
1621
+ central.writeUInt16LE(0, 36);
1622
+ central.writeUInt32LE(0, 38);
1623
+ central.writeUInt32LE(offset, 42);
1624
+ centralHeaders.push(central, file.name);
1625
+ offset += 30 + file.name.length + file.compressed.length;
1626
+ }
1627
+ const centralDirSize = centralHeaders.reduce((sum, b) => sum + b.length, 0);
1628
+ const eocd = Buffer.alloc(22);
1629
+ eocd.writeUInt32LE(101010256, 0);
1630
+ eocd.writeUInt16LE(0, 4);
1631
+ eocd.writeUInt16LE(0, 6);
1632
+ eocd.writeUInt16LE(this.files.length, 8);
1633
+ eocd.writeUInt16LE(this.files.length, 10);
1634
+ eocd.writeUInt32LE(centralDirSize, 12);
1635
+ eocd.writeUInt32LE(offset, 16);
1636
+ eocd.writeUInt16LE(0, 20);
1637
+ return Buffer.concat([...localHeaders, ...centralHeaders, eocd]);
1638
+ }
1639
+ };
1640
+ var crcTable = (() => {
1641
+ const table = new Uint32Array(256);
1642
+ for (let n = 0; n < 256; n++) {
1643
+ let c = n;
1644
+ for (let k = 0; k < 8; k++) {
1645
+ c = c & 1 ? 3988292384 ^ c >>> 1 : c >>> 1;
1646
+ }
1647
+ table[n] = c;
1648
+ }
1649
+ return table;
1650
+ })();
1651
+ function computeCrc32(data) {
1652
+ let crc = 4294967295;
1653
+ for (let i = 0; i < data.length; i++) {
1654
+ crc = crcTable[(crc ^ data[i]) & 255] ^ crc >>> 8;
1655
+ }
1656
+ return (crc ^ 4294967295) >>> 0;
1657
+ }
1658
+
1659
+ // libs/mcp-server/src/tools/hosting.ts
1347
1660
  function registerHostingTools(server, api) {
1348
1661
  server.registerTool(
1349
1662
  "hosting_deployments_list",
@@ -1424,6 +1737,57 @@ function registerHostingTools(server, api) {
1424
1737
  }
1425
1738
  }
1426
1739
  );
1740
+ server.registerTool(
1741
+ "hosting_deployments_upload",
1742
+ {
1743
+ description: 'Upload files to an existing hosting deployment. Accepts a record of filename \u2192 content, bundles them into a ZIP archive, and uploads it.\n\nTypical workflow:\n 1. hosting_deployments_create \u2192 get deploymentId\n 2. hosting_deployments_upload \u2192 upload site files\n 3. hosting_deployments_activate \u2192 make the deployment live\n\nExample:\n files: { "index.html": "<!DOCTYPE html>...", "style.css": "body { ... }", "app.js": "..." }',
1744
+ inputSchema: {
1745
+ projectId: z5.string().describe("Project ID"),
1746
+ deploymentId: z5.string().describe("Deployment ID (from hosting_deployments_create)"),
1747
+ files: z5.record(z5.string(), z5.string()).describe(
1748
+ 'Map of filename \u2192 file content. Keys are file paths inside the archive (e.g. "index.html", "assets/style.css"). Values are the UTF-8 file contents.'
1749
+ )
1750
+ }
1751
+ },
1752
+ async ({ projectId, deploymentId, files }) => {
1753
+ try {
1754
+ const fileEntries = Object.entries(files);
1755
+ if (fileEntries.length === 0) {
1756
+ return {
1757
+ content: [{ type: "text", text: "files must contain at least one entry" }],
1758
+ isError: true
1759
+ };
1760
+ }
1761
+ const zip = new ZipBuilder();
1762
+ for (const [name, content] of fileEntries) {
1763
+ zip.addFile(name, content);
1764
+ }
1765
+ const zipBuffer = zip.toBuffer();
1766
+ const result = await api.uploadFile(
1767
+ `/hosting/projects/${encodeURIComponent(projectId)}/deployments/${encodeURIComponent(deploymentId)}/upload`,
1768
+ zipBuffer,
1769
+ "site.zip"
1770
+ );
1771
+ const response = {
1772
+ success: true,
1773
+ filesUploaded: fileEntries.map(([name]) => name),
1774
+ bundleSizeBytes: zipBuffer.length
1775
+ };
1776
+ if (result && typeof result === "object") {
1777
+ Object.assign(response, result);
1778
+ }
1779
+ return {
1780
+ content: [{ type: "text", text: JSON.stringify(response, null, 2) }]
1781
+ };
1782
+ } catch (error) {
1783
+ const message = error instanceof Error ? error.message : String(error);
1784
+ return {
1785
+ content: [{ type: "text", text: `Failed to upload deployment: ${message}` }],
1786
+ isError: true
1787
+ };
1788
+ }
1789
+ }
1790
+ );
1427
1791
  server.registerTool(
1428
1792
  "hosting_deployments_activate",
1429
1793
  {
@@ -2138,6 +2502,70 @@ function registerFunctionTools(server, api) {
2138
2502
  }
2139
2503
  }
2140
2504
  );
2505
+ server.registerTool(
2506
+ "functions_deploy",
2507
+ {
2508
+ description: `Deploy code to a serverless function. Accepts one or more files as a record of filename \u2192 content. The files are bundled into a ZIP archive and uploaded. Use this after creating a function with functions_create.
2509
+
2510
+ Example \u2013 single file:
2511
+ files: { "index.js": "export default async function handler(ctx) { ... }" }
2512
+
2513
+ Example \u2013 multiple files:
2514
+ files: { "index.js": "import {hello} from './lib.js'; ...", "lib.js": "export const hello = () => ..." }`,
2515
+ inputSchema: {
2516
+ projectId: z7.string().describe("Project ID"),
2517
+ functionId: z7.string().describe("Function ID (from functions_create or functions_list)"),
2518
+ files: z7.record(z7.string(), z7.string()).describe(
2519
+ 'Map of filename \u2192 file content. Keys are file paths inside the bundle (e.g. "index.js", "src/helper.ts"). Values are the UTF-8 source code.'
2520
+ ),
2521
+ entryPoint: z7.string().max(255).optional().describe('Entry point file inside the bundle (default: "index.js"). Must match a key in files.')
2522
+ }
2523
+ },
2524
+ async ({ projectId, functionId, files, entryPoint }) => {
2525
+ try {
2526
+ const fileEntries = Object.entries(files);
2527
+ if (fileEntries.length === 0) {
2528
+ return {
2529
+ content: [{ type: "text", text: "files must contain at least one entry" }],
2530
+ isError: true
2531
+ };
2532
+ }
2533
+ if (entryPoint) {
2534
+ await api.patch(
2535
+ `/projects/${encodeURIComponent(projectId)}/functions/${encodeURIComponent(functionId)}`,
2536
+ { body: { entryPoint } }
2537
+ );
2538
+ }
2539
+ const zip = new ZipBuilder();
2540
+ for (const [name, content] of fileEntries) {
2541
+ zip.addFile(name, content);
2542
+ }
2543
+ const zipBuffer = zip.toBuffer();
2544
+ const result = await api.uploadFile(
2545
+ `/projects/${encodeURIComponent(projectId)}/functions/${encodeURIComponent(functionId)}/deploy`,
2546
+ zipBuffer,
2547
+ "bundle.zip"
2548
+ );
2549
+ const response = {
2550
+ success: true,
2551
+ filesDeployed: fileEntries.map(([name]) => name),
2552
+ bundleSizeBytes: zipBuffer.length
2553
+ };
2554
+ if (result && typeof result === "object") {
2555
+ Object.assign(response, result);
2556
+ }
2557
+ return {
2558
+ content: [{ type: "text", text: JSON.stringify(response, null, 2) }]
2559
+ };
2560
+ } catch (error) {
2561
+ const message = error instanceof Error ? error.message : String(error);
2562
+ return {
2563
+ content: [{ type: "text", text: `Failed to deploy function: ${message}` }],
2564
+ isError: true
2565
+ };
2566
+ }
2567
+ }
2568
+ );
2141
2569
  server.registerTool(
2142
2570
  "functions_deployments_list",
2143
2571
  {
@@ -3679,7 +4107,10 @@ function registerFunctionPrompts(server) {
3679
4107
  " Optional: cronExpression for scheduled execution",
3680
4108
  "",
3681
4109
  "2. WRITE the code:",
3682
- " Create an index.js file. Available APIs inside the sandbox:",
4110
+ " Create an index.js file with plain JavaScript (ES2022).",
4111
+ " The runtime is an isolated V8 sandbox \u2014 NOT Node.js.",
4112
+ " No require/import, no Node.js built-ins (fs, path, crypto, Buffer, process).",
4113
+ " Top-level await is supported. Available APIs inside the sandbox:",
3683
4114
  "",
3684
4115
  " - console.log/warn/error/info() \u2014 captured to execution logs",
3685
4116
  " - await fetch(url, options) \u2014 HTTP client (10s timeout, 5MB limit)",
@@ -3692,6 +4123,10 @@ function registerFunctionPrompts(server) {
3692
4123
  " - await spacelr.email.send/sendRaw() \u2014 send template or raw emails",
3693
4124
  " - await spacelr.notifications.send/sendMany() \u2014 push notifications",
3694
4125
  "",
4126
+ " Trigger context globals (depending on trigger type):",
4127
+ " - event: { type, data, timestamp } \u2014 for event-triggered executions",
4128
+ " - payload: { ... } \u2014 for webhook-triggered executions (the POST body)",
4129
+ "",
3695
4130
  " IMPORTANT: Top-level await is supported. No imports/require available.",
3696
4131
  "",
3697
4132
  "3. IF USING spacelr.db:",
@@ -3700,9 +4135,11 @@ function registerFunctionPrompts(server) {
3700
4135
  ' Without rules, inserts will fail with "Rule denied".',
3701
4136
  "",
3702
4137
  "4. DEPLOY the code:",
3703
- " Zip the file(s) and upload via the CLI:",
3704
- " spacelr functions deploy ./my-function --name " + name,
3705
- " Or use the admin API: POST /projects/{projectId}/functions/{id}/deploy",
4138
+ " Use the functions_deploy tool to upload code directly:",
4139
+ ' functions_deploy({ projectId, functionId, files: { "index.js": "..." } })',
4140
+ " This bundles the files into a ZIP and uploads them automatically.",
4141
+ ' You can deploy multiple files: { "index.js": "...", "lib.js": "..." }',
4142
+ " Optionally set entryPoint if your main file is not index.js.",
3706
4143
  "",
3707
4144
  "5. TRIGGER:",
3708
4145
  " Functions support multiple trigger types:",
@@ -3745,6 +4182,31 @@ function registerFunctionPrompts(server) {
3745
4182
  text: [
3746
4183
  "Show me the complete API reference for the Spacelr function sandbox.",
3747
4184
  "",
4185
+ "\u2550\u2550\u2550 RUNTIME ENVIRONMENT \u2550\u2550\u2550",
4186
+ "Functions run in an isolated V8 sandbox (via isolated-vm), NOT in Node.js.",
4187
+ "This means:",
4188
+ " - No require() or import statements \u2014 all APIs are pre-injected globals",
4189
+ " - No Node.js built-ins (fs, path, crypto, Buffer, process, etc.)",
4190
+ " - No setTimeout, setInterval, or Promise.race",
4191
+ " - No Function constructor, eval(), WebAssembly, or Proxy",
4192
+ " - Top-level await IS supported (code runs in an async IIFE)",
4193
+ " - Standard JS built-ins work: JSON, Math, Date, Array, Map, Set, RegExp, etc.",
4194
+ " - Use the built-in fetch() for HTTP calls (not node-fetch or axios)",
4195
+ " - Use env.get() for secrets (not process.env)",
4196
+ " - Use kv.get/set() for persistence across executions (not file system)",
4197
+ "",
4198
+ "Write plain JavaScript (ES2022). The code structure is simple:",
4199
+ " // Top-level code runs immediately",
4200
+ ' const data = await fetch("https://api.example.com/data");',
4201
+ " const json = JSON.parse(data.body);",
4202
+ ' console.log("Fetched", json.length, "items");',
4203
+ "",
4204
+ "\u2550\u2550\u2550 TRIGGER CONTEXT \u2550\u2550\u2550",
4205
+ "Depending on how the function was triggered, these globals may be available:",
4206
+ " - event: { type, data, timestamp } \u2014 for event-triggered executions",
4207
+ " - payload: { ... } \u2014 for webhook-triggered executions (the POST body)",
4208
+ "For manual and cron triggers, neither is set.",
4209
+ "",
3748
4210
  "\u2550\u2550\u2550 CONSOLE \u2550\u2550\u2550",
3749
4211
  "console.log(...args) \u2014 log level",
3750
4212
  "console.info(...args) \u2014 info level",
@@ -3777,11 +4239,26 @@ function registerFunctionPrompts(server) {
3777
4239
  "",
3778
4240
  "\u2550\u2550\u2550 DATABASE \u2550\u2550\u2550",
3779
4241
  "const docs = await spacelr.db.collection(name).find(filter?, options?)",
3780
- " options: { sort, limit, offset }",
4242
+ ' filter: MongoDB-style query (e.g. { status: "active", age: { $gt: 18 } })',
4243
+ " options: { sort: { createdAt: -1 }, limit: 10, offset: 0 }",
4244
+ " returns: array of documents",
4245
+ "",
3781
4246
  "await spacelr.db.collection(name).insertOne(doc)",
4247
+ ' doc: plain object (e.g. { name: "Max", email: "max@example.com" })',
4248
+ " returns: the inserted document with _id",
4249
+ "",
3782
4250
  "await spacelr.db.collection(name).insertMany(docs)",
4251
+ " docs: array of objects",
4252
+ " returns: array of inserted documents",
4253
+ "",
4254
+ "Collection names: alphanumeric + underscore only, max 128 chars.",
3783
4255
  "Scoped to the function's project. Requires database rules to be set first.",
3784
4256
  "",
4257
+ "Example \u2014 read and write:",
4258
+ ' const users = await spacelr.db.collection("users").find({ status: "active" }, { limit: 5 });',
4259
+ ' console.log("Found", users.length, "active users");',
4260
+ ' await spacelr.db.collection("logs").insertOne({ action: "check", count: users.length, at: new Date().toISOString() });',
4261
+ "",
3785
4262
  "\u2550\u2550\u2550 STORAGE \u2550\u2550\u2550",
3786
4263
  "await spacelr.storage.list(options?) \u2192 FileInfo[]",
3787
4264
  "await spacelr.storage.getInfo(fileId) \u2192 FileInfo",