run402 1.34.2 → 1.35.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/cli.mjs +11 -1
  2. package/lib/blob.mjs +449 -0
  3. package/package.json +1 -1
package/cli.mjs CHANGED
@@ -28,7 +28,8 @@ Commands:
28
28
  deploy Deploy a full-stack app or static site (requires active tier)
29
29
  functions Manage serverless functions (deploy, invoke, logs, list, delete)
30
30
  secrets Manage project secrets (set, list, delete)
31
- storage Manage file storage (upload, download, list, delete)
31
+ blob Direct-to-S3 blob storage (put, get, ls, rm, sign) — up to 5 TiB
32
+ storage Legacy file storage (deprecated — sunset 2026-06-01, use 'blob')
32
33
  sites Deploy static sites
33
34
  subdomains Manage custom subdomains (claim, list, delete)
34
35
  domains Manage custom domains (add, list, status, delete)
@@ -115,10 +116,19 @@ switch (cmd) {
115
116
  break;
116
117
  }
117
118
  case "storage": {
119
+ process.stderr.write(
120
+ "run402 storage is deprecated — sunset 2026-06-01. Use `run402 blob` instead.\n" +
121
+ "See https://run402.com/docs/blob#migration\n\n",
122
+ );
118
123
  const { run } = await import("./lib/storage.mjs");
119
124
  await run(sub, rest);
120
125
  break;
121
126
  }
127
+ case "blob": {
128
+ const { run } = await import("./lib/blob.mjs");
129
+ await run(sub, rest);
130
+ break;
131
+ }
122
132
  case "sites": {
123
133
  const { run } = await import("./lib/sites.mjs");
124
134
  await run(sub, rest);
package/lib/blob.mjs ADDED
@@ -0,0 +1,449 @@
1
+ /**
2
+ * run402 blob — direct-to-S3 storage CLI.
3
+ *
4
+ * Usage:
5
+ * run402 blob put <file> [files...] [--project <id>] [--key <dest>] [--private] [--immutable] [--concurrency N] [--no-resume]
6
+ * run402 blob get <key> --output <file> [--project <id>]
7
+ * run402 blob ls [--project <id>] [--prefix <p>] [--limit <n>]
8
+ * run402 blob rm <key> [--project <id>]
9
+ * run402 blob sign <key> [--project <id>] [--ttl <seconds>]
10
+ *
11
+ * For any file ≤ 5 GiB a single presigned PUT is used. Larger files use S3
12
+ * multipart uploads with 16 MiB parts (640 parts at 10 GiB; up to 10 000
13
+ * parts at 5 TiB). The gateway never carries upload bytes — PUTs go straight
14
+ * to S3 from the client.
15
+ *
16
+ * Resumable uploads are enabled by default. The CLI persists per-upload
17
+ * state to ~/.run402/uploads/<upload_id>.json so a Ctrl-C'd upload can be
18
+ * resumed by re-running the same command.
19
+ */
20
+
21
+ import {
22
+ createReadStream,
23
+ statSync,
24
+ readFileSync,
25
+ writeFileSync,
26
+ mkdirSync,
27
+ existsSync,
28
+ unlinkSync,
29
+ readdirSync,
30
+ createWriteStream,
31
+ } from "node:fs";
32
+ import { createHash } from "node:crypto";
33
+ import { basename, dirname, join, resolve as resolvePath } from "node:path";
34
+ import { homedir } from "node:os";
35
+ import { pipeline } from "node:stream/promises";
36
+
37
+ import { findProject, API } from "./config.mjs";
38
+
39
+ const HELP = `run402 blob — Direct-to-S3 blob storage
40
+
41
+ Usage:
42
+ run402 blob put <file> [files...] [options]
43
+ run402 blob get <key> --output <file> [options]
44
+ run402 blob ls [options]
45
+ run402 blob rm <key> [options]
46
+ run402 blob sign <key> [options]
47
+
48
+ Options:
49
+ --project <id> Project ID (defaults to active project from 'run402 projects use')
50
+ --key <dest> Destination key (put only; defaults to file basename)
51
+ --private Upload as private (not served by CDN; apikey required to read)
52
+ --immutable Adds a content-hash suffix to the URL so overwrites produce distinct URLs.
53
+ Requires computing SHA-256 over the file (CLI does this automatically).
54
+ --concurrency N Concurrent part PUTs (default 4)
55
+ --no-resume Start fresh; ignore any cached state
56
+ --json NDJSON progress events (for agent consumption)
57
+ --prefix <p> Prefix filter (ls only)
58
+ --limit <n> Max results (ls only; default 100, max 1000)
59
+ --ttl <seconds> Signed-URL TTL (sign only; default 3600, max 604800)
60
+
61
+ Examples:
62
+ run402 blob put ./artifact.tgz --project abc123
63
+ run402 blob put ./dist/**/*.png --project abc123 --key assets/
64
+ run402 blob put huge.bin --project abc123 --immutable
65
+ run402 blob get images/logo.png --output /tmp/logo.png --project abc123
66
+ run402 blob ls --project abc123 --prefix images/
67
+ run402 blob rm images/logo.png --project abc123
68
+ run402 blob sign images/logo.png --project abc123 --ttl 600
69
+ `;
70
+
71
+ const UPLOAD_STATE_DIR = join(homedir(), ".run402", "uploads");
72
+
73
+ function die(msg, code = 1) {
74
+ console.error(JSON.stringify({ status: "error", message: msg }));
75
+ process.exit(code);
76
+ }
77
+
78
+ function parseArgs(args) {
79
+ const out = { positional: [], project: null, key: null, private: false, immutable: false,
80
+ concurrency: 4, resume: true, json: false, prefix: null, limit: null,
81
+ output: null, ttl: null };
82
+ for (let i = 0; i < args.length; i++) {
83
+ const a = args[i];
84
+ if (a === "--project") out.project = args[++i];
85
+ else if (a === "--key") out.key = args[++i];
86
+ else if (a === "--private") out.private = true;
87
+ else if (a === "--immutable") out.immutable = true;
88
+ else if (a === "--concurrency") out.concurrency = parseInt(args[++i], 10);
89
+ else if (a === "--no-resume") out.resume = false;
90
+ else if (a === "--json") out.json = true;
91
+ else if (a === "--prefix") out.prefix = args[++i];
92
+ else if (a === "--limit") out.limit = parseInt(args[++i], 10);
93
+ else if (a === "--output" || a === "-o") out.output = args[++i];
94
+ else if (a === "--ttl") out.ttl = parseInt(args[++i], 10);
95
+ else if (!a.startsWith("--")) out.positional.push(a);
96
+ }
97
+ return out;
98
+ }
99
+
100
+ function resolveProject(projectId) {
101
+ if (!projectId) die("--project is required (or run 'run402 projects use <id>' to set default)");
102
+ const p = findProject(projectId);
103
+ if (!p) die(`Project not found: ${projectId}`);
104
+ return p;
105
+ }
106
+
107
+ async function sha256File(filePath) {
108
+ const h = createHash("sha256");
109
+ const stream = createReadStream(filePath);
110
+ for await (const chunk of stream) h.update(chunk);
111
+ return h.digest("hex");
112
+ }
113
+
114
+ function loadState(uploadId) {
115
+ const path = join(UPLOAD_STATE_DIR, `${uploadId}.json`);
116
+ if (!existsSync(path)) return null;
117
+ try { return JSON.parse(readFileSync(path, "utf8")); }
118
+ catch { return null; }
119
+ }
120
+
121
+ function saveState(state) {
122
+ mkdirSync(UPLOAD_STATE_DIR, { recursive: true });
123
+ writeFileSync(join(UPLOAD_STATE_DIR, `${state.upload_id}.json`), JSON.stringify(state, null, 2));
124
+ }
125
+
126
+ function removeState(uploadId) {
127
+ const path = join(UPLOAD_STATE_DIR, `${uploadId}.json`);
128
+ if (existsSync(path)) unlinkSync(path);
129
+ }
130
+
131
+ function findResumableStateForFile(projectId, localPath, key) {
132
+ if (!existsSync(UPLOAD_STATE_DIR)) return null;
133
+ for (const f of readdirSync(UPLOAD_STATE_DIR)) {
134
+ if (!f.endsWith(".json")) continue;
135
+ try {
136
+ const s = JSON.parse(readFileSync(join(UPLOAD_STATE_DIR, f), "utf8"));
137
+ if (s.project_id === projectId && s.local_path === localPath && s.key === key) return s;
138
+ } catch { /* ignore */ }
139
+ }
140
+ return null;
141
+ }
142
+
143
+ // ---------------------------------------------------------------------------
144
+ // put
145
+ // ---------------------------------------------------------------------------
146
+
147
+ async function putOne(project, filePath, opts) {
148
+ const stat = statSync(filePath);
149
+ const size = stat.size;
150
+ const destKey = computeDestKey(filePath, opts.key);
151
+ const absLocal = resolvePath(filePath);
152
+
153
+ // Compute sha256 for immutable uploads up front; otherwise lazy.
154
+ const needSha = opts.immutable;
155
+ const sha256 = needSha ? await sha256File(filePath) : undefined;
156
+
157
+ // Attempt to resume
158
+ let state = opts.resume
159
+ ? findResumableStateForFile(project.id, absLocal, destKey)
160
+ : null;
161
+ let initRes;
162
+ if (state) {
163
+ // Re-poll the session; if it's still active, resume. Otherwise start fresh.
164
+ const poll = await apiFetch(`${API}/storage/v1/uploads/${state.upload_id}`, "GET", project, null);
165
+ if (poll.status === 200 && poll.body.status === "active") {
166
+ log(opts, { event: "resume", upload_id: state.upload_id, key: destKey });
167
+ initRes = { upload_id: state.upload_id, mode: state.mode, parts: state.parts, part_count: state.part_count, part_size_bytes: state.part_size_bytes };
168
+ } else {
169
+ removeState(state.upload_id);
170
+ state = null;
171
+ }
172
+ }
173
+
174
+ if (!state) {
175
+ const init = await apiFetch(`${API}/storage/v1/uploads`, "POST", project, {
176
+ key: destKey,
177
+ size_bytes: size,
178
+ content_type: guessContentType(destKey),
179
+ visibility: opts.private ? "private" : "public",
180
+ immutable: opts.immutable,
181
+ sha256,
182
+ });
183
+ if (init.status !== 201) die(`Init failed: HTTP ${init.status}: ${JSON.stringify(init.body)}`);
184
+ initRes = init.body;
185
+ saveState({
186
+ upload_id: initRes.upload_id,
187
+ project_id: project.id,
188
+ local_path: absLocal,
189
+ key: destKey,
190
+ mode: initRes.mode,
191
+ part_size_bytes: initRes.part_size_bytes,
192
+ part_count: initRes.part_count,
193
+ parts: initRes.parts,
194
+ parts_done: {},
195
+ sha256,
196
+ started_at: new Date().toISOString(),
197
+ });
198
+ state = loadState(initRes.upload_id);
199
+ }
200
+
201
+ // Upload parts with concurrency limit. For single-PUT mode part_count=1 and
202
+ // this loop runs once.
203
+ const etags = Array(initRes.part_count);
204
+ for (const pn of Object.keys(state.parts_done || {})) {
205
+ const pd = state.parts_done[pn];
206
+ // Legacy resume state stored just the etag string; new code stores
207
+ // { etag, sha256 }. Normalize on load.
208
+ etags[parseInt(pn, 10) - 1] = typeof pd === "string" ? { etag: pd, sha256: undefined } : pd;
209
+ }
210
+
211
+ // Presigned URLs are signed WITHOUT ChecksumAlgorithm (see gateway
212
+ // s3-presign.ts). The client-asserted sha256 declared at init is the
213
+ // integrity attestation — no x-amz-checksum-sha256 header on PUTs, and
214
+ // the gateway trusts the declared value at complete when S3 has none.
215
+ const todo = initRes.parts.filter((p) => !(state.parts_done || {})[String(p.part_number)]);
216
+ await withConcurrency(todo, opts.concurrency, async (part) => {
217
+ const { etag } = await putPart(filePath, part);
218
+ etags[part.part_number - 1] = { etag };
219
+ state.parts_done[String(part.part_number)] = { etag };
220
+ saveState(state);
221
+ log(opts, { event: "part", upload_id: state.upload_id, part_number: part.part_number, etag });
222
+ });
223
+
224
+ // Complete
225
+ const body = initRes.mode === "multipart"
226
+ ? { parts: etags.map((e, i) => ({ part_number: i + 1, etag: e.etag })) }
227
+ : {};
228
+ const complete = await apiFetch(`${API}/storage/v1/uploads/${state.upload_id}/complete`, "POST", project, body);
229
+ if (complete.status !== 200) die(`Complete failed: HTTP ${complete.status}: ${JSON.stringify(complete.body)}`);
230
+
231
+ removeState(state.upload_id);
232
+ log(opts, { event: "done", ...complete.body });
233
+ return complete.body;
234
+ }
235
+
236
+ function computeDestKey(filePath, keyOpt) {
237
+ if (!keyOpt) return basename(filePath);
238
+ if (keyOpt.endsWith("/")) return keyOpt + basename(filePath);
239
+ return keyOpt;
240
+ }
241
+
242
+ async function putPart(filePath, part) {
243
+ const start = part.byte_start ?? 0;
244
+ const end = part.byte_end ?? (statSync(filePath).size - 1);
245
+ const stream = createReadStream(filePath, { start, end });
246
+ const chunks = [];
247
+ for await (const c of stream) chunks.push(c);
248
+ const body = Buffer.concat(chunks);
249
+
250
+ const res = await fetch(part.url, { method: "PUT", body });
251
+ if (!res.ok) {
252
+ const errBody = await res.text().catch(() => "");
253
+ throw new Error(`Part ${part.part_number} PUT failed: ${res.status} ${res.statusText}${errBody ? " — " + errBody.slice(0, 200) : ""}`);
254
+ }
255
+ const etag = res.headers.get("etag") ?? "";
256
+ return { etag };
257
+ }
258
+
259
+ async function withConcurrency(items, limit, worker) {
260
+ const running = [];
261
+ for (const item of items) {
262
+ const p = Promise.resolve().then(() => worker(item));
263
+ running.push(p);
264
+ if (running.length >= limit) {
265
+ await Promise.race(running.map((r) => r.catch(() => {})));
266
+ for (let i = running.length - 1; i >= 0; i--) {
267
+ if (isSettled(running[i])) running.splice(i, 1);
268
+ }
269
+ }
270
+ }
271
+ await Promise.all(running);
272
+ }
273
+
274
+ function isSettled(p) {
275
+ const marker = {};
276
+ return Promise.race([p, marker]).then(
277
+ (v) => v !== marker,
278
+ () => true,
279
+ );
280
+ }
281
+
282
+ async function put(projectId, argv) {
283
+ const opts = parseArgs(argv);
284
+ opts.project = opts.project || projectId;
285
+ const project = resolveProject(opts.project);
286
+
287
+ if (opts.positional.length === 0) die("At least one file path is required");
288
+ if (opts.immutable && opts.positional.length > 1 && opts.key && !opts.key.endsWith("/")) {
289
+ die("--key with --immutable across multiple files requires a directory prefix (ending with /)");
290
+ }
291
+
292
+ const results = [];
293
+ for (const filePath of opts.positional) {
294
+ if (!existsSync(filePath)) die(`File not found: ${filePath}`);
295
+ const r = await putOne(project, filePath, opts);
296
+ results.push({ file: filePath, ...r });
297
+ }
298
+ if (!opts.json) console.log(JSON.stringify(results, null, 2));
299
+ }
300
+
301
+ // ---------------------------------------------------------------------------
302
+ // get
303
+ // ---------------------------------------------------------------------------
304
+
305
+ async function get(projectId, argv) {
306
+ const opts = parseArgs(argv);
307
+ opts.project = opts.project || projectId;
308
+ const project = resolveProject(opts.project);
309
+ if (opts.positional.length === 0) die("Key required");
310
+ if (!opts.output) die("--output <file> required");
311
+ const key = opts.positional[0];
312
+
313
+ const res = await fetch(`${API}/storage/v1/blob/${encodeKey(key)}`, {
314
+ headers: { apikey: project.anon_key, Authorization: `Bearer ${project.anon_key}` },
315
+ });
316
+ if (!res.ok) die(`GET failed: HTTP ${res.status}`);
317
+ if (!res.body) die("Empty response body");
318
+
319
+ mkdirSync(dirname(resolvePath(opts.output)), { recursive: true });
320
+ await pipeline(res.body, createWriteStream(opts.output));
321
+ console.log(JSON.stringify({ status: "ok", key, output: opts.output }));
322
+ }
323
+
324
+ // ---------------------------------------------------------------------------
325
+ // ls
326
+ // ---------------------------------------------------------------------------
327
+
328
+ async function ls(projectId, argv) {
329
+ const opts = parseArgs(argv);
330
+ opts.project = opts.project || projectId;
331
+ const project = resolveProject(opts.project);
332
+
333
+ const qs = new URLSearchParams();
334
+ if (opts.prefix) qs.set("prefix", opts.prefix);
335
+ if (opts.limit) qs.set("limit", String(opts.limit));
336
+ const url = `${API}/storage/v1/blobs${qs.toString() ? "?" + qs.toString() : ""}`;
337
+
338
+ const res = await fetch(url, {
339
+ headers: { apikey: project.anon_key, Authorization: `Bearer ${project.anon_key}` },
340
+ });
341
+ const data = await res.json();
342
+ if (!res.ok) { console.error(JSON.stringify({ status: "error", http: res.status, ...data })); process.exit(1); }
343
+ console.log(JSON.stringify(data, null, 2));
344
+ }
345
+
346
+ // ---------------------------------------------------------------------------
347
+ // rm
348
+ // ---------------------------------------------------------------------------
349
+
350
+ async function rm(projectId, argv) {
351
+ const opts = parseArgs(argv);
352
+ opts.project = opts.project || projectId;
353
+ const project = resolveProject(opts.project);
354
+ if (opts.positional.length === 0) die("Key required");
355
+ const key = opts.positional[0];
356
+
357
+ const res = await fetch(`${API}/storage/v1/blob/${encodeKey(key)}`, {
358
+ method: "DELETE",
359
+ headers: { apikey: project.anon_key, Authorization: `Bearer ${project.anon_key}` },
360
+ });
361
+ const data = await res.json().catch(() => ({}));
362
+ if (!res.ok) { console.error(JSON.stringify({ status: "error", http: res.status, ...data })); process.exit(1); }
363
+ console.log(JSON.stringify(data, null, 2));
364
+ }
365
+
366
+ // ---------------------------------------------------------------------------
367
+ // sign
368
+ // ---------------------------------------------------------------------------
369
+
370
+ async function sign(projectId, argv) {
371
+ const opts = parseArgs(argv);
372
+ opts.project = opts.project || projectId;
373
+ const project = resolveProject(opts.project);
374
+ if (opts.positional.length === 0) die("Key required");
375
+ const key = opts.positional[0];
376
+
377
+ const res = await fetch(`${API}/storage/v1/blob/${encodeKey(key)}/sign`, {
378
+ method: "POST",
379
+ headers: { "content-type": "application/json", apikey: project.anon_key, Authorization: `Bearer ${project.anon_key}` },
380
+ body: JSON.stringify(opts.ttl ? { ttl_seconds: opts.ttl } : {}),
381
+ });
382
+ const data = await res.json();
383
+ if (!res.ok) { console.error(JSON.stringify({ status: "error", http: res.status, ...data })); process.exit(1); }
384
+ console.log(JSON.stringify(data, null, 2));
385
+ }
386
+
387
+ // ---------------------------------------------------------------------------
388
+ // Shared helpers
389
+ // ---------------------------------------------------------------------------
390
+
391
+ function encodeKey(key) {
392
+ // Encode each path segment; preserve `/` as separator.
393
+ return key.split("/").map(encodeURIComponent).join("/");
394
+ }
395
+
396
+ function guessContentType(key) {
397
+ const ext = key.slice(key.lastIndexOf(".") + 1).toLowerCase();
398
+ const map = {
399
+ png: "image/png", jpg: "image/jpeg", jpeg: "image/jpeg", gif: "image/gif",
400
+ svg: "image/svg+xml", webp: "image/webp",
401
+ html: "text/html", css: "text/css", js: "text/javascript", json: "application/json",
402
+ txt: "text/plain", md: "text/markdown", pdf: "application/pdf",
403
+ mp4: "video/mp4", webm: "video/webm", mov: "video/quicktime",
404
+ zip: "application/zip", tgz: "application/gzip", gz: "application/gzip",
405
+ };
406
+ return map[ext] ?? "application/octet-stream";
407
+ }
408
+
409
+ async function apiFetch(url, method, project, body) {
410
+ const res = await fetch(url, {
411
+ method,
412
+ headers: {
413
+ "content-type": "application/json",
414
+ apikey: project.anon_key,
415
+ Authorization: `Bearer ${project.anon_key}`,
416
+ },
417
+ body: body === null ? undefined : JSON.stringify(body ?? {}),
418
+ });
419
+ const txt = await res.text();
420
+ let parsed; try { parsed = txt ? JSON.parse(txt) : null; } catch { parsed = txt; }
421
+ return { status: res.status, body: parsed };
422
+ }
423
+
424
+ function log(opts, event) {
425
+ if (opts.json) console.log(JSON.stringify(event));
426
+ }
427
+
428
+ // ---------------------------------------------------------------------------
429
+ // Dispatch
430
+ // ---------------------------------------------------------------------------
431
+
432
+ export async function run(sub, args) {
433
+ if (!sub || sub === "--help" || sub === "-h") {
434
+ console.log(HELP);
435
+ process.exit(0);
436
+ }
437
+ const defaultProject = process.env.RUN402_PROJECT ?? null;
438
+ switch (sub) {
439
+ case "put": await put(defaultProject, args); break;
440
+ case "get": await get(defaultProject, args); break;
441
+ case "ls": await ls(defaultProject, args); break;
442
+ case "rm": await rm(defaultProject, args); break;
443
+ case "sign": await sign(defaultProject, args); break;
444
+ default:
445
+ console.error(`Unknown subcommand: ${sub}`);
446
+ console.log(HELP);
447
+ process.exit(1);
448
+ }
449
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "run402",
3
- "version": "1.34.2",
3
+ "version": "1.35.0",
4
4
  "description": "CLI for Run402 — provision Postgres databases, deploy static sites, generate images, and manage wallets via x402 and MPP micropayments.",
5
5
  "type": "module",
6
6
  "bin": {