run402 1.34.2 → 1.35.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/cli.mjs CHANGED
@@ -28,7 +28,8 @@ Commands:
28
28
  deploy Deploy a full-stack app or static site (requires active tier)
29
29
  functions Manage serverless functions (deploy, invoke, logs, list, delete)
30
30
  secrets Manage project secrets (set, list, delete)
31
- storage Manage file storage (upload, download, list, delete)
31
+ blob Direct-to-S3 blob storage (put, get, ls, rm, sign) — up to 5 TiB
32
+ storage Legacy file storage (deprecated — sunset 2026-06-01, use 'blob')
32
33
  sites Deploy static sites
33
34
  subdomains Manage custom subdomains (claim, list, delete)
34
35
  domains Manage custom domains (add, list, status, delete)
@@ -115,10 +116,19 @@ switch (cmd) {
115
116
  break;
116
117
  }
117
118
  case "storage": {
119
+ process.stderr.write(
120
+ "run402 storage is deprecated — sunset 2026-06-01. Use `run402 blob` instead.\n" +
121
+ "See https://run402.com/docs/blob#migration\n\n",
122
+ );
118
123
  const { run } = await import("./lib/storage.mjs");
119
124
  await run(sub, rest);
120
125
  break;
121
126
  }
127
+ case "blob": {
128
+ const { run } = await import("./lib/blob.mjs");
129
+ await run(sub, rest);
130
+ break;
131
+ }
122
132
  case "sites": {
123
133
  const { run } = await import("./lib/sites.mjs");
124
134
  await run(sub, rest);
package/lib/blob.mjs ADDED
@@ -0,0 +1,449 @@
1
+ /**
2
+ * run402 blob — direct-to-S3 storage CLI.
3
+ *
4
+ * Usage:
5
+ * run402 blob put <file> [files...] [--project <id>] [--key <dest>] [--private] [--immutable] [--concurrency N] [--no-resume]
6
+ * run402 blob get <key> --output <file> [--project <id>]
7
+ * run402 blob ls [--project <id>] [--prefix <p>] [--limit <n>]
8
+ * run402 blob rm <key> [--project <id>]
9
+ * run402 blob sign <key> [--project <id>] [--ttl <seconds>]
10
+ *
11
+ * For any file ≤ 5 GiB a single presigned PUT is used. Larger files use S3
12
+ * multipart uploads with 16 MiB parts (640 parts at 10 GiB; up to 10 000
13
+ * parts at 5 TiB). The gateway never carries upload bytes — PUTs go straight
14
+ * to S3 from the client.
15
+ *
16
+ * Resumable uploads are enabled by default. The CLI persists per-upload
17
+ * state to ~/.run402/uploads/<upload_id>.json so a Ctrl-C'd upload can be
18
+ * resumed by re-running the same command.
19
+ */
20
+
21
+ import {
22
+ createReadStream,
23
+ statSync,
24
+ readFileSync,
25
+ writeFileSync,
26
+ mkdirSync,
27
+ existsSync,
28
+ unlinkSync,
29
+ readdirSync,
30
+ createWriteStream,
31
+ } from "node:fs";
32
+ import { createHash } from "node:crypto";
33
+ import { basename, dirname, join, resolve as resolvePath } from "node:path";
34
+ import { homedir } from "node:os";
35
+ import { pipeline } from "node:stream/promises";
36
+
37
+ import { findProject, API } from "./config.mjs";
38
+
39
+ const HELP = `run402 blob — Direct-to-S3 blob storage
40
+
41
+ Usage:
42
+ run402 blob put <file> [files...] [options]
43
+ run402 blob get <key> --output <file> [options]
44
+ run402 blob ls [options]
45
+ run402 blob rm <key> [options]
46
+ run402 blob sign <key> [options]
47
+
48
+ Options:
49
+ --project <id> Project ID (defaults to active project from 'run402 projects use')
50
+ --key <dest> Destination key (put only; defaults to file basename)
51
+ --private Upload as private (not served by CDN; apikey required to read)
52
+ --immutable Adds a content-hash suffix to the URL so overwrites produce distinct URLs.
53
+ Requires computing SHA-256 over the file (CLI does this automatically).
54
+ --concurrency N Concurrent part PUTs (default 4)
55
+ --no-resume Start fresh; ignore any cached state
56
+ --json NDJSON progress events (for agent consumption)
57
+ --prefix <p> Prefix filter (ls only)
58
+ --limit <n> Max results (ls only; default 100, max 1000)
59
+ --ttl <seconds> Signed-URL TTL (sign only; default 3600, max 604800)
60
+
61
+ Examples:
62
+ run402 blob put ./artifact.tgz --project abc123
63
+ run402 blob put ./dist/**/*.png --project abc123 --key assets/
64
+ run402 blob put huge.bin --project abc123 --immutable
65
+ run402 blob get images/logo.png --output /tmp/logo.png --project abc123
66
+ run402 blob ls --project abc123 --prefix images/
67
+ run402 blob rm images/logo.png --project abc123
68
+ run402 blob sign images/logo.png --project abc123 --ttl 600
69
+ `;
70
+
71
+ const UPLOAD_STATE_DIR = join(homedir(), ".run402", "uploads");
72
+
73
+ function die(msg, code = 1) {
74
+ console.error(JSON.stringify({ status: "error", message: msg }));
75
+ process.exit(code);
76
+ }
77
+
78
+ function parseArgs(args) {
79
+ const out = { positional: [], project: null, key: null, private: false, immutable: false,
80
+ concurrency: 4, resume: true, json: false, prefix: null, limit: null,
81
+ output: null, ttl: null };
82
+ for (let i = 0; i < args.length; i++) {
83
+ const a = args[i];
84
+ if (a === "--project") out.project = args[++i];
85
+ else if (a === "--key") out.key = args[++i];
86
+ else if (a === "--private") out.private = true;
87
+ else if (a === "--immutable") out.immutable = true;
88
+ else if (a === "--concurrency") out.concurrency = parseInt(args[++i], 10);
89
+ else if (a === "--no-resume") out.resume = false;
90
+ else if (a === "--json") out.json = true;
91
+ else if (a === "--prefix") out.prefix = args[++i];
92
+ else if (a === "--limit") out.limit = parseInt(args[++i], 10);
93
+ else if (a === "--output" || a === "-o") out.output = args[++i];
94
+ else if (a === "--ttl") out.ttl = parseInt(args[++i], 10);
95
+ else if (!a.startsWith("--")) out.positional.push(a);
96
+ }
97
+ return out;
98
+ }
99
+
100
+ function resolveProject(projectId) {
101
+ if (!projectId) die("--project is required (or run 'run402 projects use <id>' to set default)");
102
+ const p = findProject(projectId);
103
+ if (!p) die(`Project not found: ${projectId}`);
104
+ return p;
105
+ }
106
+
107
+ async function sha256File(filePath) {
108
+ const h = createHash("sha256");
109
+ const stream = createReadStream(filePath);
110
+ for await (const chunk of stream) h.update(chunk);
111
+ return h.digest("hex");
112
+ }
113
+
114
+ function loadState(uploadId) {
115
+ const path = join(UPLOAD_STATE_DIR, `${uploadId}.json`);
116
+ if (!existsSync(path)) return null;
117
+ try { return JSON.parse(readFileSync(path, "utf8")); }
118
+ catch { return null; }
119
+ }
120
+
121
+ function saveState(state) {
122
+ mkdirSync(UPLOAD_STATE_DIR, { recursive: true });
123
+ writeFileSync(join(UPLOAD_STATE_DIR, `${state.upload_id}.json`), JSON.stringify(state, null, 2));
124
+ }
125
+
126
+ function removeState(uploadId) {
127
+ const path = join(UPLOAD_STATE_DIR, `${uploadId}.json`);
128
+ if (existsSync(path)) unlinkSync(path);
129
+ }
130
+
131
+ function findResumableStateForFile(projectId, localPath, key) {
132
+ if (!existsSync(UPLOAD_STATE_DIR)) return null;
133
+ for (const f of readdirSync(UPLOAD_STATE_DIR)) {
134
+ if (!f.endsWith(".json")) continue;
135
+ try {
136
+ const s = JSON.parse(readFileSync(join(UPLOAD_STATE_DIR, f), "utf8"));
137
+ if (s.project_id === projectId && s.local_path === localPath && s.key === key) return s;
138
+ } catch { /* ignore */ }
139
+ }
140
+ return null;
141
+ }
142
+
143
+ // ---------------------------------------------------------------------------
144
+ // put
145
+ // ---------------------------------------------------------------------------
146
+
147
+ async function putOne(project, filePath, opts) {
148
+ const stat = statSync(filePath);
149
+ const size = stat.size;
150
+ const destKey = computeDestKey(filePath, opts.key);
151
+ const absLocal = resolvePath(filePath);
152
+
153
+ // Compute sha256 for immutable uploads up front; otherwise lazy.
154
+ const needSha = opts.immutable;
155
+ const sha256 = needSha ? await sha256File(filePath) : undefined;
156
+
157
+ // Attempt to resume
158
+ let state = opts.resume
159
+ ? findResumableStateForFile(project.id, absLocal, destKey)
160
+ : null;
161
+ let initRes;
162
+ if (state) {
163
+ // Re-poll the session; if it's still active, resume. Otherwise start fresh.
164
+ const poll = await apiFetch(`${API}/storage/v1/uploads/${state.upload_id}`, "GET", project, null);
165
+ if (poll.status === 200 && poll.body.status === "active") {
166
+ log(opts, { event: "resume", upload_id: state.upload_id, key: destKey });
167
+ initRes = { upload_id: state.upload_id, mode: state.mode, parts: state.parts, part_count: state.part_count, part_size_bytes: state.part_size_bytes };
168
+ } else {
169
+ removeState(state.upload_id);
170
+ state = null;
171
+ }
172
+ }
173
+
174
+ if (!state) {
175
+ const init = await apiFetch(`${API}/storage/v1/uploads`, "POST", project, {
176
+ key: destKey,
177
+ size_bytes: size,
178
+ content_type: guessContentType(destKey),
179
+ visibility: opts.private ? "private" : "public",
180
+ immutable: opts.immutable,
181
+ sha256,
182
+ });
183
+ if (init.status !== 201) die(`Init failed: HTTP ${init.status}: ${JSON.stringify(init.body)}`);
184
+ initRes = init.body;
185
+ saveState({
186
+ upload_id: initRes.upload_id,
187
+ project_id: project.id,
188
+ local_path: absLocal,
189
+ key: destKey,
190
+ mode: initRes.mode,
191
+ part_size_bytes: initRes.part_size_bytes,
192
+ part_count: initRes.part_count,
193
+ parts: initRes.parts,
194
+ parts_done: {},
195
+ sha256,
196
+ started_at: new Date().toISOString(),
197
+ });
198
+ state = loadState(initRes.upload_id);
199
+ }
200
+
201
+ // Upload parts with concurrency limit. For single-PUT mode part_count=1 and
202
+ // this loop runs once.
203
+ const etags = Array(initRes.part_count);
204
+ for (const pn of Object.keys(state.parts_done || {})) {
205
+ const pd = state.parts_done[pn];
206
+ // Legacy resume state stored just the etag string; new code stores
207
+ // { etag, sha256 }. Normalize on load.
208
+ etags[parseInt(pn, 10) - 1] = typeof pd === "string" ? { etag: pd, sha256: undefined } : pd;
209
+ }
210
+
211
+ // Presigned URLs are signed WITHOUT ChecksumAlgorithm (see gateway
212
+ // s3-presign.ts). The client-asserted sha256 declared at init is the
213
+ // integrity attestation — no x-amz-checksum-sha256 header on PUTs, and
214
+ // the gateway trusts the declared value at complete when S3 has none.
215
+ const todo = initRes.parts.filter((p) => !(state.parts_done || {})[String(p.part_number)]);
216
+ await withConcurrency(todo, opts.concurrency, async (part) => {
217
+ const { etag } = await putPart(filePath, part);
218
+ etags[part.part_number - 1] = { etag };
219
+ state.parts_done[String(part.part_number)] = { etag };
220
+ saveState(state);
221
+ log(opts, { event: "part", upload_id: state.upload_id, part_number: part.part_number, etag });
222
+ });
223
+
224
+ // Complete
225
+ const body = initRes.mode === "multipart"
226
+ ? { parts: etags.map((e, i) => ({ part_number: i + 1, etag: e.etag })) }
227
+ : {};
228
+ const complete = await apiFetch(`${API}/storage/v1/uploads/${state.upload_id}/complete`, "POST", project, body);
229
+ if (complete.status !== 200) die(`Complete failed: HTTP ${complete.status}: ${JSON.stringify(complete.body)}`);
230
+
231
+ removeState(state.upload_id);
232
+ log(opts, { event: "done", ...complete.body });
233
+ return complete.body;
234
+ }
235
+
236
+ function computeDestKey(filePath, keyOpt) {
237
+ if (!keyOpt) return basename(filePath);
238
+ if (keyOpt.endsWith("/")) return keyOpt + basename(filePath);
239
+ return keyOpt;
240
+ }
241
+
242
+ async function putPart(filePath, part) {
243
+ const start = part.byte_start ?? 0;
244
+ const end = part.byte_end ?? (statSync(filePath).size - 1);
245
+ const stream = createReadStream(filePath, { start, end });
246
+ const chunks = [];
247
+ for await (const c of stream) chunks.push(c);
248
+ const body = Buffer.concat(chunks);
249
+
250
+ const res = await fetch(part.url, { method: "PUT", body });
251
+ if (!res.ok) {
252
+ const errBody = await res.text().catch(() => "");
253
+ throw new Error(`Part ${part.part_number} PUT failed: ${res.status} ${res.statusText}${errBody ? " — " + errBody.slice(0, 200) : ""}`);
254
+ }
255
+ const etag = res.headers.get("etag") ?? "";
256
+ return { etag };
257
+ }
258
+
259
+ async function withConcurrency(items, limit, worker) {
260
+ const running = [];
261
+ for (const item of items) {
262
+ const p = Promise.resolve().then(() => worker(item));
263
+ running.push(p);
264
+ if (running.length >= limit) {
265
+ await Promise.race(running.map((r) => r.catch(() => {})));
266
+ for (let i = running.length - 1; i >= 0; i--) {
267
+ if (isSettled(running[i])) running.splice(i, 1);
268
+ }
269
+ }
270
+ }
271
+ await Promise.all(running);
272
+ }
273
+
274
+ function isSettled(p) {
275
+ const marker = {};
276
+ return Promise.race([p, marker]).then(
277
+ (v) => v !== marker,
278
+ () => true,
279
+ );
280
+ }
281
+
282
+ async function put(projectId, argv) {
283
+ const opts = parseArgs(argv);
284
+ opts.project = opts.project || projectId;
285
+ const project = resolveProject(opts.project);
286
+
287
+ if (opts.positional.length === 0) die("At least one file path is required");
288
+ if (opts.immutable && opts.positional.length > 1 && opts.key && !opts.key.endsWith("/")) {
289
+ die("--key with --immutable across multiple files requires a directory prefix (ending with /)");
290
+ }
291
+
292
+ const results = [];
293
+ for (const filePath of opts.positional) {
294
+ if (!existsSync(filePath)) die(`File not found: ${filePath}`);
295
+ const r = await putOne(project, filePath, opts);
296
+ results.push({ file: filePath, ...r });
297
+ }
298
+ if (!opts.json) console.log(JSON.stringify(results, null, 2));
299
+ }
300
+
301
+ // ---------------------------------------------------------------------------
302
+ // get
303
+ // ---------------------------------------------------------------------------
304
+
305
+ async function get(projectId, argv) {
306
+ const opts = parseArgs(argv);
307
+ opts.project = opts.project || projectId;
308
+ const project = resolveProject(opts.project);
309
+ if (opts.positional.length === 0) die("Key required");
310
+ if (!opts.output) die("--output <file> required");
311
+ const key = opts.positional[0];
312
+
313
+ const res = await fetch(`${API}/storage/v1/blob/${encodeKey(key)}`, {
314
+ headers: { apikey: project.anon_key, Authorization: `Bearer ${project.anon_key}` },
315
+ });
316
+ if (!res.ok) die(`GET failed: HTTP ${res.status}`);
317
+ if (!res.body) die("Empty response body");
318
+
319
+ mkdirSync(dirname(resolvePath(opts.output)), { recursive: true });
320
+ await pipeline(res.body, createWriteStream(opts.output));
321
+ console.log(JSON.stringify({ status: "ok", key, output: opts.output }));
322
+ }
323
+
324
+ // ---------------------------------------------------------------------------
325
+ // ls
326
+ // ---------------------------------------------------------------------------
327
+
328
+ async function ls(projectId, argv) {
329
+ const opts = parseArgs(argv);
330
+ opts.project = opts.project || projectId;
331
+ const project = resolveProject(opts.project);
332
+
333
+ const qs = new URLSearchParams();
334
+ if (opts.prefix) qs.set("prefix", opts.prefix);
335
+ if (opts.limit) qs.set("limit", String(opts.limit));
336
+ const url = `${API}/storage/v1/blobs${qs.toString() ? "?" + qs.toString() : ""}`;
337
+
338
+ const res = await fetch(url, {
339
+ headers: { apikey: project.anon_key, Authorization: `Bearer ${project.anon_key}` },
340
+ });
341
+ const data = await res.json();
342
+ if (!res.ok) { console.error(JSON.stringify({ status: "error", http: res.status, ...data })); process.exit(1); }
343
+ console.log(JSON.stringify(data, null, 2));
344
+ }
345
+
346
+ // ---------------------------------------------------------------------------
347
+ // rm
348
+ // ---------------------------------------------------------------------------
349
+
350
+ async function rm(projectId, argv) {
351
+ const opts = parseArgs(argv);
352
+ opts.project = opts.project || projectId;
353
+ const project = resolveProject(opts.project);
354
+ if (opts.positional.length === 0) die("Key required");
355
+ const key = opts.positional[0];
356
+
357
+ const res = await fetch(`${API}/storage/v1/blob/${encodeKey(key)}`, {
358
+ method: "DELETE",
359
+ headers: { apikey: project.anon_key, Authorization: `Bearer ${project.anon_key}` },
360
+ });
361
+ const data = await res.json().catch(() => ({}));
362
+ if (!res.ok) { console.error(JSON.stringify({ status: "error", http: res.status, ...data })); process.exit(1); }
363
+ console.log(JSON.stringify(data, null, 2));
364
+ }
365
+
366
+ // ---------------------------------------------------------------------------
367
+ // sign
368
+ // ---------------------------------------------------------------------------
369
+
370
+ async function sign(projectId, argv) {
371
+ const opts = parseArgs(argv);
372
+ opts.project = opts.project || projectId;
373
+ const project = resolveProject(opts.project);
374
+ if (opts.positional.length === 0) die("Key required");
375
+ const key = opts.positional[0];
376
+
377
+ const res = await fetch(`${API}/storage/v1/blob/${encodeKey(key)}/sign`, {
378
+ method: "POST",
379
+ headers: { "content-type": "application/json", apikey: project.anon_key, Authorization: `Bearer ${project.anon_key}` },
380
+ body: JSON.stringify(opts.ttl ? { ttl_seconds: opts.ttl } : {}),
381
+ });
382
+ const data = await res.json();
383
+ if (!res.ok) { console.error(JSON.stringify({ status: "error", http: res.status, ...data })); process.exit(1); }
384
+ console.log(JSON.stringify(data, null, 2));
385
+ }
386
+
387
+ // ---------------------------------------------------------------------------
388
+ // Shared helpers
389
+ // ---------------------------------------------------------------------------
390
+
391
+ function encodeKey(key) {
392
+ // Encode each path segment; preserve `/` as separator.
393
+ return key.split("/").map(encodeURIComponent).join("/");
394
+ }
395
+
396
+ function guessContentType(key) {
397
+ const ext = key.slice(key.lastIndexOf(".") + 1).toLowerCase();
398
+ const map = {
399
+ png: "image/png", jpg: "image/jpeg", jpeg: "image/jpeg", gif: "image/gif",
400
+ svg: "image/svg+xml", webp: "image/webp",
401
+ html: "text/html", css: "text/css", js: "text/javascript", json: "application/json",
402
+ txt: "text/plain", md: "text/markdown", pdf: "application/pdf",
403
+ mp4: "video/mp4", webm: "video/webm", mov: "video/quicktime",
404
+ zip: "application/zip", tgz: "application/gzip", gz: "application/gzip",
405
+ };
406
+ return map[ext] ?? "application/octet-stream";
407
+ }
408
+
409
+ async function apiFetch(url, method, project, body) {
410
+ const res = await fetch(url, {
411
+ method,
412
+ headers: {
413
+ "content-type": "application/json",
414
+ apikey: project.anon_key,
415
+ Authorization: `Bearer ${project.anon_key}`,
416
+ },
417
+ body: body === null ? undefined : JSON.stringify(body ?? {}),
418
+ });
419
+ const txt = await res.text();
420
+ let parsed; try { parsed = txt ? JSON.parse(txt) : null; } catch { parsed = txt; }
421
+ return { status: res.status, body: parsed };
422
+ }
423
+
424
+ function log(opts, event) {
425
+ if (opts.json) console.log(JSON.stringify(event));
426
+ }
427
+
428
+ // ---------------------------------------------------------------------------
429
+ // Dispatch
430
+ // ---------------------------------------------------------------------------
431
+
432
+ export async function run(sub, args) {
433
+ if (!sub || sub === "--help" || sub === "-h") {
434
+ console.log(HELP);
435
+ process.exit(0);
436
+ }
437
+ const defaultProject = process.env.RUN402_PROJECT ?? null;
438
+ switch (sub) {
439
+ case "put": await put(defaultProject, args); break;
440
+ case "get": await get(defaultProject, args); break;
441
+ case "ls": await ls(defaultProject, args); break;
442
+ case "rm": await rm(defaultProject, args); break;
443
+ case "sign": await sign(defaultProject, args); break;
444
+ default:
445
+ console.error(`Unknown subcommand: ${sub}`);
446
+ console.log(HELP);
447
+ process.exit(1);
448
+ }
449
+ }
package/lib/init.mjs CHANGED
@@ -12,6 +12,8 @@ const HELP = `run402 init — Set up allowance, funding, and check tier status
12
12
  Usage:
13
13
  run402 init Set up with x402 (Base Sepolia) — default
14
14
  run402 init mpp Set up with MPP (Tempo Moderato)
15
+ run402 init --json Same as init, but emit a JSON summary on stdout
16
+ (human lines go to stderr — for agent automation)
15
17
 
16
18
  Steps (idempotent — safe to re-run):
17
19
  1. Creates config directory (~/.config/run402)
@@ -25,12 +27,28 @@ Run this once to get started, or again to check your setup.
25
27
  `;
26
28
 
27
29
  function short(addr) { return addr.slice(0, 6) + "..." + addr.slice(-4); }
28
- function line(label, value) { console.log(` ${label.padEnd(10)} ${value}`); }
29
30
 
30
31
  export async function run(args = []) {
31
32
  if (args.includes("--help") || args.includes("-h")) { console.log(HELP); process.exit(0); }
33
+ const jsonMode = args.includes("--json");
32
34
  const isMpp = args[0] === "mpp";
33
- console.log();
35
+
36
+ // In --json mode, human-readable lines go to stderr so stdout stays clean for
37
+ // agents. We also collect structured data for the final JSON emit.
38
+ const write = jsonMode ? (s) => console.error(s) : (s) => console.log(s);
39
+ const line = (label, value) => write(` ${label.padEnd(10)} ${value}`);
40
+ const summary = {
41
+ config_dir: CONFIG_DIR,
42
+ allowance: null,
43
+ rail: null,
44
+ network: null,
45
+ balance: null,
46
+ tier: null,
47
+ projects_saved: 0,
48
+ next_step: null,
49
+ };
50
+
51
+ write("");
34
52
 
35
53
  // 1. Config directory
36
54
  mkdirSync(CONFIG_DIR, { recursive: true });
@@ -58,6 +76,10 @@ export async function run(args = []) {
58
76
  line("Allowance", short(allowance.address));
59
77
  }
60
78
 
79
+ summary.allowance = { address: allowance.address, funded: allowance.funded || false };
80
+ summary.network = isMpp ? "tempo-moderato" : "base-sepolia";
81
+ summary.rail = isMpp ? "mpp" : "x402";
82
+
61
83
  line("Network", isMpp ? "Tempo Moderato (testnet)" : "Base Sepolia (testnet)");
62
84
  line("Rail", isMpp ? "mpp" : "x402");
63
85
 
@@ -110,6 +132,7 @@ export async function run(args = []) {
110
132
  } else {
111
133
  line("Balance", `${(balance / 1e6).toFixed(2)} pathUSD`);
112
134
  }
135
+ summary.balance = { symbol: "pathUSD", usd_micros: balance };
113
136
  } else {
114
137
  // Base Sepolia: read USDC balance (existing behavior)
115
138
  const { createPublicClient, http } = await import("viem");
@@ -152,6 +175,7 @@ export async function run(args = []) {
152
175
  } else {
153
176
  line("Balance", `${(balance / 1e6).toFixed(2)} USDC`);
154
177
  }
178
+ summary.balance = { symbol: "USDC", usd_micros: balance };
155
179
  }
156
180
 
157
181
  // Show note if switching rails
@@ -176,19 +200,32 @@ export async function run(args = []) {
176
200
  if (tierInfo && tierInfo.tier && tierInfo.active) {
177
201
  const expiry = tierInfo.lease_expires_at ? tierInfo.lease_expires_at.split("T")[0] : "unknown";
178
202
  line("Tier", `${tierInfo.tier} (expires ${expiry})`);
203
+ summary.tier = { name: tierInfo.tier, expires: tierInfo.lease_expires_at || null };
179
204
  } else {
180
205
  line("Tier", "(none)");
206
+ summary.tier = null;
181
207
  }
182
208
 
183
- // 5. Projects
184
- line("Projects", `${Object.keys(store.projects).length} active`);
209
+ // 5. Projects — count locally saved project entries. Note: "saved" (not
210
+ // "active") — these are all projects in the keystore, regardless of whether
211
+ // the server considers them active.
212
+ summary.projects_saved = Object.keys(store.projects).length;
213
+ line("Projects", `${summary.projects_saved} saved`);
185
214
 
186
215
  // 6. Next step
187
- console.log();
216
+ write("");
217
+ const nextStep = (!tierInfo || !tierInfo.tier || !tierInfo.active)
218
+ ? "run402 tier set prototype"
219
+ : "run402 deploy --manifest app.json";
188
220
  if (!tierInfo || !tierInfo.tier || !tierInfo.active) {
189
- console.log(" Next: run402 tier set prototype");
221
+ write(" Next: run402 tier set prototype");
190
222
  } else {
191
- console.log(" Ready to deploy. Run: run402 deploy --manifest app.json");
223
+ write(" Ready to deploy. Run: run402 deploy --manifest app.json");
224
+ }
225
+ write("");
226
+ summary.next_step = nextStep;
227
+
228
+ if (jsonMode) {
229
+ console.log(JSON.stringify(summary, null, 2));
192
230
  }
193
- console.log();
194
231
  }
package/lib/projects.mjs CHANGED
@@ -141,13 +141,17 @@ async function sqlCmd(projectId, args = []) {
141
141
  const headers = { "Authorization": `Bearer ${p.service_key}`, "Content-Type": useParams ? "application/json" : "text/plain" };
142
142
  const body = useParams ? JSON.stringify({ sql, params }) : sql;
143
143
  const res = await fetch(`${API}/projects/v1/admin/${projectId}/sql`, { method: "POST", headers, body });
144
- console.log(JSON.stringify(await res.json(), null, 2));
144
+ const data = await res.json();
145
+ if (!res.ok) { console.error(JSON.stringify({ status: "error", http: res.status, ...data })); process.exit(1); }
146
+ console.log(JSON.stringify(data, null, 2));
145
147
  }
146
148
 
147
149
  async function rest(projectId, table, queryParams) {
148
150
  const p = findProject(projectId);
149
151
  const res = await fetch(`${API}/rest/v1/${table}${queryParams ? '?' + queryParams : ''}`, { headers: { "apikey": p.anon_key } });
150
- console.log(JSON.stringify(await res.json(), null, 2));
152
+ const data = await res.json();
153
+ if (!res.ok) { console.error(JSON.stringify({ status: "error", http: res.status, ...data })); process.exit(1); }
154
+ console.log(JSON.stringify(data, null, 2));
151
155
  }
152
156
 
153
157
  async function usage(projectId) {
package/lib/status.mjs CHANGED
@@ -8,6 +8,7 @@ Usage:
8
8
 
9
9
  Displays:
10
10
  - Allowance address and funding status
11
+ - Wallet on-chain USDC/pathUSD balance (wallet_balance_usd_micros)
11
12
  - Billing balance (available + held)
12
13
  - Tier subscription (name, status, expiry)
13
14
  - Projects (from server, with fallback to local keystore)
@@ -16,6 +17,63 @@ Displays:
16
17
  Output is JSON. Requires an existing allowance (run 'run402 init' first).
17
18
  `;
18
19
 
20
+ // USDC / pathUSD constants (match allowance.mjs)
21
+ const USDC_ABI = [{ name: "balanceOf", type: "function", stateMutability: "view", inputs: [{ name: "account", type: "address" }], outputs: [{ name: "", type: "uint256" }] }];
22
+ const USDC_MAINNET = "0x833589fCD6eDb6E08f4c7C32D4f71b54bdA02913";
23
+ const USDC_SEPOLIA = "0x036CbD53842c5426634e7929541eC2318f3dCF7e";
24
+ const PATH_USD = "0x20c0000000000000000000000000000000000000";
25
+ const TEMPO_RPC = "https://rpc.moderato.tempo.xyz/";
26
+
27
+ /**
28
+ * Read the on-chain wallet balance in USD micros for the current rail.
29
+ * For x402: read Base mainnet + Base Sepolia USDC and sum funded networks.
30
+ * For mpp: read pathUSD on Tempo Moderato.
31
+ * Returns null if every read fails (e.g. offline or RPC down).
32
+ */
33
+ async function readWalletBalanceUsdMicros(rail, address) {
34
+ try {
35
+ const { createPublicClient, http, defineChain } = await import("viem");
36
+ if (rail === "mpp") {
37
+ const tempoModerato = defineChain({
38
+ id: 42431,
39
+ name: "Tempo Moderato",
40
+ nativeCurrency: { name: "pathUSD", symbol: "pathUSD", decimals: 6 },
41
+ rpcUrls: { default: { http: [TEMPO_RPC] } },
42
+ });
43
+ const client = createPublicClient({ chain: tempoModerato, transport: http() });
44
+ try {
45
+ const raw = await client.readContract({ address: PATH_USD, abi: USDC_ABI, functionName: "balanceOf", args: [address] });
46
+ return Number(raw);
47
+ } catch { return null; }
48
+ }
49
+ // x402 rail — read Base mainnet + Base Sepolia in parallel; sum any that succeed.
50
+ const { base, baseSepolia } = await import("viem/chains");
51
+ const mainnetClient = createPublicClient({ chain: base, transport: http() });
52
+ const sepoliaClient = createPublicClient({ chain: baseSepolia, transport: http() });
53
+ const [mainnet, sepolia] = await Promise.all([
54
+ mainnetClient.readContract({ address: USDC_MAINNET, abi: USDC_ABI, functionName: "balanceOf", args: [address] }).then(Number).catch(() => null),
55
+ sepoliaClient.readContract({ address: USDC_SEPOLIA, abi: USDC_ABI, functionName: "balanceOf", args: [address] }).then(Number).catch(() => null),
56
+ ]);
57
+ if (mainnet === null && sepolia === null) return null;
58
+ return (mainnet || 0) + (sepolia || 0);
59
+ } catch {
60
+ return null;
61
+ }
62
+ }
63
+
64
+ /**
65
+ * Normalize a project entry to the agreed-on shape: always expose `project_id`
66
+ * (matching `projects list`). The remote /wallets/v1/:wallet/projects endpoint
67
+ * returns entries keyed as `id`, so we map them here and drop the raw `id`
68
+ * field to avoid having two aliases for the same identity.
69
+ */
70
+ function normalizeProject(raw) {
71
+ if (!raw || typeof raw !== "object") return raw;
72
+ const projectId = raw.project_id || raw.id;
73
+ const { id: _dropId, project_id: _dropPid, ...rest } = raw;
74
+ return { project_id: projectId, ...rest };
75
+ }
76
+
19
77
  export async function run(args = []) {
20
78
  if (args.includes("--help") || args.includes("-h")) { console.log(HELP); process.exit(0); }
21
79
  const allowance = readAllowance();
@@ -26,14 +84,16 @@ export async function run(args = []) {
26
84
 
27
85
  const wallet = allowance.address.toLowerCase();
28
86
  const authHeaders = getAllowanceAuthHeaders("/tiers/v1/status");
87
+ const rail = allowance.rail || "x402";
29
88
 
30
- // Parallel API calls: tier + billing balance + server-side projects
31
- const [tierRes, balanceRes, projectsRes] = await Promise.all([
89
+ // Parallel API calls: tier + billing balance + server-side projects + on-chain wallet balance
90
+ const [tierRes, balanceRes, projectsRes, walletBalance] = await Promise.all([
32
91
  authHeaders
33
92
  ? fetch(`${API}/tiers/v1/status`, { headers: { ...authHeaders } }).catch(() => null)
34
93
  : null,
35
94
  fetch(`${API}/billing/v1/accounts/${wallet}`).catch(() => null),
36
95
  fetch(`${API}/wallets/v1/${wallet}/projects`).catch(() => null),
96
+ readWalletBalanceUsdMicros(rail, allowance.address),
37
97
  ]);
38
98
 
39
99
  const tier = tierRes?.ok ? await tierRes.json() : null;
@@ -44,18 +104,29 @@ export async function run(args = []) {
44
104
  const store = loadKeyStore();
45
105
  const activeId = getActiveProjectId();
46
106
 
107
+ const projects = remote?.projects
108
+ ? remote.projects.map(normalizeProject)
109
+ : Object.keys(store.projects).map(id => ({ project_id: id }));
110
+
47
111
  const result = {
48
112
  allowance: {
49
113
  address: allowance.address,
50
114
  funded: allowance.funded || false,
51
115
  },
116
+ rail,
52
117
  tier: tier && tier.tier
53
118
  ? { name: tier.tier, status: tier.status, expires: tier.lease_expires_at }
54
119
  : null,
55
- balance: billing && billing.exists
120
+ // GH-32: `balance` used to mean the billing-account balance, which
121
+ // confused people who expected their on-chain wallet balance. Split into
122
+ // two unambiguous fields:
123
+ // - billing: credits held by Run402 (available + held), null if no account
124
+ // - wallet_balance_usd_micros: on-chain USDC/pathUSD, null if RPC fails
125
+ billing: billing && billing.exists
56
126
  ? { available_usd_micros: billing.available_usd_micros, held_usd_micros: billing.held_usd_micros }
57
127
  : null,
58
- projects: remote?.projects || Object.keys(store.projects).map(id => ({ id })),
128
+ wallet_balance_usd_micros: walletBalance,
129
+ projects,
59
130
  active_project: activeId || null,
60
131
  };
61
132
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "run402",
3
- "version": "1.34.2",
3
+ "version": "1.35.1",
4
4
  "description": "CLI for Run402 — provision Postgres databases, deploy static sites, generate images, and manage wallets via x402 and MPP micropayments.",
5
5
  "type": "module",
6
6
  "bin": {