bulletin-deploy 0.7.14 → 0.7.15-rc.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/README.md +27 -0
  2. package/bin/bulletin-deploy +5 -0
  3. package/dist/bug-report.js +4 -4
  4. package/dist/{chunk-6YJ46BN2.js → chunk-4KWVGBB2.js} +1 -1
  5. package/dist/{chunk-4AP5ZFNV.js → chunk-545X4HLT.js} +1 -1
  6. package/dist/chunk-5MRZ3V4A.js +171 -0
  7. package/dist/{chunk-LM5U3XDV.js → chunk-5SODMVIF.js} +1 -1
  8. package/dist/chunk-C2TS5MER.js +64 -0
  9. package/dist/chunk-KOSF5FDO.js +49 -0
  10. package/dist/chunk-MJTQOXBC.js +140 -0
  11. package/dist/chunk-NSYGVVJV.js +177 -0
  12. package/dist/{chunk-N7YE5ZN3.js → chunk-NZUB2BVL.js} +2 -2
  13. package/dist/{chunk-3RNYFSU7.js → chunk-QEXYZ5SQ.js} +2 -2
  14. package/dist/chunk-S7EM5VMW.js +108 -0
  15. package/dist/{chunk-3ILZFC4E.js → chunk-TBTEN6MI.js} +805 -31
  16. package/dist/chunk-probe.d.ts +36 -0
  17. package/dist/chunk-probe.js +19 -0
  18. package/dist/chunker.d.ts +8 -0
  19. package/dist/chunker.js +11 -0
  20. package/dist/deploy.d.ts +73 -2
  21. package/dist/deploy.js +24 -7
  22. package/dist/dotns.js +3 -3
  23. package/dist/incremental-stats.d.ts +65 -0
  24. package/dist/incremental-stats.js +11 -0
  25. package/dist/index.d.ts +6 -1
  26. package/dist/index.js +51 -12
  27. package/dist/manifest-embed.d.ts +18 -0
  28. package/dist/manifest-embed.js +10 -0
  29. package/dist/manifest-fetch.d.ts +26 -0
  30. package/dist/manifest-fetch.js +14 -0
  31. package/dist/manifest-roundtrip.d.ts +15 -0
  32. package/dist/manifest-roundtrip.js +56 -0
  33. package/dist/manifest.d.ts +44 -0
  34. package/dist/manifest.js +21 -0
  35. package/dist/memory-report.js +2 -2
  36. package/dist/merkle.d.ts +38 -1
  37. package/dist/merkle.js +28 -3
  38. package/dist/run-state.js +1 -1
  39. package/dist/telemetry.js +2 -2
  40. package/dist/version-check.js +3 -3
  41. package/package.json +2 -2
  42. package/dist/chunk-B7GUYYAN.js +0 -94
package/README.md CHANGED
@@ -115,6 +115,33 @@ The runtime uses a 24-hour cache at `${XDG_CACHE_HOME:-~/.cache}/bulletin-deploy
115
115
  - `pool accounts`: derived Bulletin uploader accounts used to spread nonce and authorization load.
116
116
  - `PoP`: Proof of Personhood, which some `.dot` names require before registration.
117
117
 
118
+ ## Incremental Upload
119
+
120
+ After the first deploy of a `.dot` domain, every subsequent deploy automatically reuses chunks already stored on Bulletin instead of re-uploading them. There is no flag to enable; it just runs.
121
+
122
+ How it works:
123
+
124
+ 1. The previous deploy embeds a manifest at `.bulletin-deploy/manifest.json` inside the deployed content (file classification, block ordering, chunk metadata).
125
+ 2. The new deploy fetches the previous contenthash from DotNS, then fetches that manifest via the Bulletin IPFS gateway.
126
+ 3. The new build's CAR is sliced into chunks; each chunk's CID is HEAD-probed against the gateway. Chunks already present are skipped.
127
+ 4. Only the chunks that actually changed (typically: the manifest itself and any modified content) are uploaded.
128
+
129
+ The summary line at the end of a deploy shows the savings:
130
+
131
+ ```
132
+ Cache:
133
+ Manifest: embedded (1 attempt)
134
+ Probed: 18 chunks → 15 cached, 2 to upload, 1 probe-failed
135
+ Recycled: 3 CIDs found on-chain that weren't in the previous manifest
136
+ Saved: ~52s and 14.3 MB upload
137
+ ```
138
+
139
+ CI runners benefit identically — no `actions/cache` wiring required, because the manifest travels with the deployed content rather than living on the runner's disk.
140
+
141
+ **Encrypted deploys** (`--password`) bypass the incremental path. Encryption produces non-deterministic CAR bytes per run, so chunk-level dedup can't apply.
142
+
143
+ **Force a full re-upload** by deleting `.bulletin-deploy/` from your build output (or changing one byte in any non-volatile file) before deploying. The classifier treats `.bulletin-deploy/` paths as volatile, so removing the manifest forces the next deploy to fall back to the heuristic classifier.
144
+
118
145
  ## Domain Rules
119
146
 
120
147
  DotNS classifies labels on-chain and may require a specific Proof of Personhood level before registration.
@@ -31,6 +31,9 @@ for (let i = 0; i < args.length; i++) {
31
31
  else if (args[i] === "--name") { flags.name = args[++i]; }
32
32
  else if (args[i] === "--description") { flags.description = args[++i]; }
33
33
  else if (args[i] === "--gh-pages-mirror") { flags.ghPagesMirror = true; }
34
+ else if (args[i] === "--allow-large-deploy") { flags.allowLargeDeploy = true; }
35
+ else if (args[i] === "--reproducible") { flags.reproducibleSource = "commit"; }
36
+ else if (args[i].startsWith("--reproducible=")) { flags.reproducibleSource = args[i].slice("--reproducible=".length); }
34
37
  else if (args[i] === "--version" || args[i] === "-V") { flags.version = true; }
35
38
  else if (args[i] === "--help" || args[i] === "-h") { flags.help = true; }
36
39
  else { positional.push(args[i]); }
@@ -255,6 +258,8 @@ try {
255
258
  ghPagesMirror: flags.ghPagesMirror,
256
259
  name: flags.name,
257
260
  description: flags.description,
261
+ allowLargeDeploy: flags.allowLargeDeploy,
262
+ reproducibleSource: flags.reproducibleSource,
258
263
  });
259
264
 
260
265
  const output = process.env.GITHUB_OUTPUT;
@@ -9,10 +9,10 @@ import {
9
9
  offerBugReport,
10
10
  scrubSecrets,
11
11
  setDeployContext
12
- } from "./chunk-3RNYFSU7.js";
13
- import "./chunk-6YJ46BN2.js";
14
- import "./chunk-4AP5ZFNV.js";
15
- import "./chunk-N7YE5ZN3.js";
12
+ } from "./chunk-QEXYZ5SQ.js";
13
+ import "./chunk-4KWVGBB2.js";
14
+ import "./chunk-545X4HLT.js";
15
+ import "./chunk-NZUB2BVL.js";
16
16
  import "./chunk-QGM4M3NI.js";
17
17
  export {
18
18
  buildCliFlagsSummary,
@@ -1,6 +1,6 @@
1
1
  import {
2
2
  VERSION
3
- } from "./chunk-4AP5ZFNV.js";
3
+ } from "./chunk-545X4HLT.js";
4
4
 
5
5
  // src/version-check.ts
6
6
  import { execSync, execFileSync } from "child_process";
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  package_default,
3
3
  writeRunState
4
- } from "./chunk-N7YE5ZN3.js";
4
+ } from "./chunk-NZUB2BVL.js";
5
5
 
6
6
  // src/memory-report.ts
7
7
  import * as fs2 from "fs";
@@ -0,0 +1,171 @@
1
+ import {
2
+ MANIFEST_DIR,
3
+ MANIFEST_FILENAME,
4
+ parseManifest
5
+ } from "./chunk-S7EM5VMW.js";
6
+
7
+ // src/manifest-fetch.ts
8
+ import { CarReader } from "@ipld/car/reader";
9
+ import * as dagPB from "@ipld/dag-pb";
10
+ import { CID } from "multiformats/cid";
11
+ var DEFAULT_GATEWAY = "https://paseo-ipfs.polkadot.io";
12
+ var DEFAULT_TIMEOUT_MS = 5e3;
13
+ var RANGE_TIERS = [
14
+ "bytes=0-4095",
15
+ "bytes=0-65535",
16
+ "bytes=0-1048575",
17
+ void 0
18
+ // full body
19
+ ];
20
+ async function fetchAcrossTiers(url, budget, start) {
21
+ let lastReason = "unknown";
22
+ let attempts = 0;
23
+ let bytesDownloaded = 0;
24
+ for (let tier = 0; tier < RANGE_TIERS.length; tier++) {
25
+ if (Date.now() - start > budget) {
26
+ return { outcome: "retryable", reason: `budget exceeded: ${lastReason}`, attempts, bytesDownloaded };
27
+ }
28
+ attempts++;
29
+ const headers = {};
30
+ if (RANGE_TIERS[tier] !== void 0) headers.Range = RANGE_TIERS[tier];
31
+ let res;
32
+ try {
33
+ const ctrl = new AbortController();
34
+ const remaining = budget - (Date.now() - start);
35
+ const timer = setTimeout(() => ctrl.abort(), Math.max(100, remaining));
36
+ try {
37
+ res = await fetch(url, { headers, signal: ctrl.signal });
38
+ } finally {
39
+ clearTimeout(timer);
40
+ }
41
+ } catch (e) {
42
+ lastReason = `network error: ${e?.message ?? e}`;
43
+ continue;
44
+ }
45
+ if (res.status === 404) {
46
+ return { outcome: "404", attempts, bytesDownloaded };
47
+ }
48
+ if (res.status !== 200 && res.status !== 206) {
49
+ lastReason = `gateway HTTP ${res.status}`;
50
+ continue;
51
+ }
52
+ const isFullBody = res.status === 200;
53
+ let carBytes;
54
+ try {
55
+ const buf = await res.arrayBuffer();
56
+ carBytes = new Uint8Array(buf);
57
+ bytesDownloaded = carBytes.length;
58
+ } catch (e) {
59
+ if (isFullBody) {
60
+ return { outcome: "parse_error", reason: `body read error: ${e?.message ?? e}`, attempts, bytesDownloaded };
61
+ }
62
+ lastReason = `body read error: ${e?.message ?? e}`;
63
+ continue;
64
+ }
65
+ let manifestBytes;
66
+ try {
67
+ manifestBytes = await extractManifestFromCar(carBytes);
68
+ } catch (e) {
69
+ const msg = String(e?.message ?? e);
70
+ if (isFullBody) {
71
+ return { outcome: "parse_error", reason: `CAR parse error: ${msg}`, attempts, bytesDownloaded };
72
+ }
73
+ lastReason = `truncated at tier ${tier}: ${msg}`;
74
+ continue;
75
+ }
76
+ if (!manifestBytes) {
77
+ if (isFullBody) {
78
+ return { outcome: "parse_error", reason: "no .bulletin-deploy/manifest.json in deployed DAG", attempts, bytesDownloaded };
79
+ }
80
+ lastReason = `manifest not in slice tier ${tier}`;
81
+ continue;
82
+ }
83
+ const text = new TextDecoder().decode(manifestBytes);
84
+ const parsed = parseManifest(text);
85
+ if (parsed.ok) {
86
+ return { outcome: "success", manifest: parsed.manifest, attempts, bytesDownloaded };
87
+ }
88
+ return { outcome: "parse_error", reason: parsed.error, attempts, bytesDownloaded };
89
+ }
90
+ return { outcome: "retryable", reason: `tiers exhausted: ${lastReason}`, attempts, bytesDownloaded };
91
+ }
92
+ async function fetchPreviousManifest(prevContenthash, options = {}) {
93
+ if (prevContenthash === null) return { source: "none" };
94
+ const gatewayList = (options.gateways ?? (options.gateway ? [options.gateway] : [DEFAULT_GATEWAY])).map((g) => g.replace(/\/$/, ""));
95
+ const budget = options.timeoutMs ?? DEFAULT_TIMEOUT_MS;
96
+ const start = Date.now();
97
+ let lastReason = "unknown";
98
+ let totalAttempts = 0;
99
+ let bytesDownloaded = 0;
100
+ for (const gateway of gatewayList) {
101
+ const url = `${gateway}/ipfs/${prevContenthash}`;
102
+ const tierResult = await fetchAcrossTiers(url, budget, start);
103
+ if (tierResult.outcome === "success") {
104
+ return { source: "embedded", manifest: tierResult.manifest, attempts: totalAttempts + tierResult.attempts, bytesDownloaded: bytesDownloaded + tierResult.bytesDownloaded };
105
+ }
106
+ if (tierResult.outcome === "404" || tierResult.outcome === "parse_error") {
107
+ return {
108
+ source: "heuristic_fallback",
109
+ reason: tierResult.outcome === "404" ? "gateway 404" : tierResult.reason,
110
+ attempts: totalAttempts + tierResult.attempts,
111
+ bytesDownloaded: bytesDownloaded + tierResult.bytesDownloaded
112
+ };
113
+ }
114
+ lastReason = tierResult.reason;
115
+ totalAttempts += tierResult.attempts;
116
+ bytesDownloaded += tierResult.bytesDownloaded;
117
+ }
118
+ return { source: "heuristic_fallback", reason: `all gateways exhausted: ${lastReason}`, attempts: totalAttempts, bytesDownloaded };
119
+ }
120
+ async function extractManifestFromCar(carBytes) {
121
+ const reader = await CarReader.fromBytes(carBytes);
122
+ const roots = await reader.getRoots();
123
+ if (roots.length === 0) return null;
124
+ const blocks = /* @__PURE__ */ new Map();
125
+ for await (const { cid, bytes } of reader.blocks()) {
126
+ blocks.set(cid.toString(), bytes);
127
+ }
128
+ const rootBytes = blocks.get(roots[0].toString());
129
+ if (!rootBytes) return null;
130
+ const rootNode = dagPB.decode(rootBytes);
131
+ const bdLink = (rootNode.Links ?? []).find((l) => l.Name === MANIFEST_DIR);
132
+ if (!bdLink) return null;
133
+ const bdBytes = blocks.get(bdLink.Hash.toString());
134
+ if (!bdBytes) return null;
135
+ const bdNode = dagPB.decode(bdBytes);
136
+ const manLink = (bdNode.Links ?? []).find((l) => l.Name === MANIFEST_FILENAME);
137
+ if (!manLink) return null;
138
+ const manCidStr = manLink.Hash.toString();
139
+ const manCid = CID.parse(manCidStr);
140
+ const manBytes = blocks.get(manCidStr);
141
+ if (!manBytes) return null;
142
+ if (manCid.code === 85) {
143
+ return manBytes;
144
+ }
145
+ if (manCid.code === 112) {
146
+ const node = dagPB.decode(manBytes);
147
+ const parts = [];
148
+ let total = 0;
149
+ for (const link of node.Links ?? []) {
150
+ const leafBytes = blocks.get(link.Hash.toString());
151
+ if (!leafBytes) return null;
152
+ parts.push(leafBytes);
153
+ total += leafBytes.length;
154
+ }
155
+ const out = new Uint8Array(total);
156
+ let pos = 0;
157
+ for (const part of parts) {
158
+ out.set(part, pos);
159
+ pos += part.length;
160
+ }
161
+ return out;
162
+ }
163
+ return null;
164
+ }
165
+
166
+ export {
167
+ DEFAULT_GATEWAY,
168
+ DEFAULT_TIMEOUT_MS,
169
+ fetchPreviousManifest,
170
+ extractManifestFromCar
171
+ };
@@ -5,7 +5,7 @@ import {
5
5
  captureWarning,
6
6
  setDeployAttribute,
7
7
  withSpan
8
- } from "./chunk-4AP5ZFNV.js";
8
+ } from "./chunk-545X4HLT.js";
9
9
 
10
10
  // src/dotns.ts
11
11
  import { spawn } from "child_process";
@@ -0,0 +1,64 @@
1
+ // src/chunker.ts
2
+ var CHUNK_SIZE_TARGET = 1024 * 1024;
3
+ var CHUNK_SIZE_MAX = 2 * 1024 * 1024 - 1024;
4
+ function concat(parts) {
5
+ let total = 0;
6
+ for (const p of parts) total += p.length;
7
+ const out = new Uint8Array(total);
8
+ let off = 0;
9
+ for (const p of parts) {
10
+ out.set(p, off);
11
+ off += p.length;
12
+ }
13
+ return out;
14
+ }
15
+ function packSection(files) {
16
+ const chunks = [];
17
+ let buffer = [];
18
+ let bufferLen = 0;
19
+ const flush = () => {
20
+ if (bufferLen === 0) return;
21
+ chunks.push(concat(buffer));
22
+ buffer = [];
23
+ bufferLen = 0;
24
+ };
25
+ for (const file of files) {
26
+ const fileBytes = file.blocks.reduce((s, b) => s + b.length, 0);
27
+ if (fileBytes === 0) continue;
28
+ if (fileBytes > CHUNK_SIZE_TARGET) {
29
+ flush();
30
+ if (fileBytes > CHUNK_SIZE_MAX) {
31
+ for (const block of file.blocks) {
32
+ if (bufferLen + block.length > CHUNK_SIZE_MAX) {
33
+ flush();
34
+ }
35
+ buffer.push(block);
36
+ bufferLen += block.length;
37
+ }
38
+ flush();
39
+ } else {
40
+ for (const block of file.blocks) {
41
+ buffer.push(block);
42
+ bufferLen += block.length;
43
+ flush();
44
+ }
45
+ }
46
+ } else {
47
+ if (bufferLen + fileBytes > CHUNK_SIZE_TARGET) {
48
+ flush();
49
+ }
50
+ for (const block of file.blocks) {
51
+ buffer.push(block);
52
+ bufferLen += block.length;
53
+ }
54
+ }
55
+ }
56
+ flush();
57
+ return chunks;
58
+ }
59
+
60
+ export {
61
+ CHUNK_SIZE_TARGET,
62
+ CHUNK_SIZE_MAX,
63
+ packSection
64
+ };
@@ -0,0 +1,49 @@
1
+ import {
2
+ MANIFEST_DIR,
3
+ MANIFEST_PATH
4
+ } from "./chunk-S7EM5VMW.js";
5
+
6
+ // src/manifest-embed.ts
7
+ import * as fs from "fs";
8
+ import * as path from "path";
9
+ function writeAtomic(filePath, body) {
10
+ const tmp = `${filePath}.tmp`;
11
+ fs.writeFileSync(tmp, body);
12
+ fs.renameSync(tmp, filePath);
13
+ }
14
+ function ensureDir(dirPath) {
15
+ fs.mkdirSync(dirPath, { recursive: true });
16
+ }
17
+ function writeEmbeddedManifestPlaceholder(buildDir, data) {
18
+ ensureDir(path.join(buildDir, MANIFEST_DIR));
19
+ const payload = {
20
+ version: data.version,
21
+ previous_contenthash: data.previousContenthash,
22
+ deployed_at: data.deployedAt,
23
+ framework: data.framework,
24
+ files: {},
25
+ stableBlockOrder: [],
26
+ blocks: [],
27
+ chunks: {}
28
+ };
29
+ writeAtomic(path.join(buildDir, MANIFEST_PATH), JSON.stringify(payload, null, 2));
30
+ }
31
+ function finaliseEmbeddedManifest(buildDir, data) {
32
+ ensureDir(path.join(buildDir, MANIFEST_DIR));
33
+ const payload = {
34
+ version: data.version,
35
+ previous_contenthash: data.previousContenthash,
36
+ deployed_at: data.deployedAt,
37
+ framework: data.framework,
38
+ files: data.files,
39
+ stableBlockOrder: data.stableBlockOrder,
40
+ blocks: data.blocks,
41
+ chunks: data.chunks
42
+ };
43
+ writeAtomic(path.join(buildDir, MANIFEST_PATH), JSON.stringify(payload, null, 2));
44
+ }
45
+
46
+ export {
47
+ writeEmbeddedManifestPlaceholder,
48
+ finaliseEmbeddedManifest
49
+ };
@@ -0,0 +1,140 @@
1
+ // src/incremental-stats.ts
2
+ var SECONDS_PER_PROBE_SKIP = 3.5;
3
+ function countByReason(probe, reason) {
4
+ return probe.filter((r) => r.present === null && r.failureReason === reason).length;
5
+ }
6
+ function computeStats(input) {
7
+ const present = input.probeResults.filter((r) => r.present === true);
8
+ const failed = input.probeResults.filter((r) => r.present === null);
9
+ const presentInPrev = present.filter((r) => input.prevChunks[r.cid] != null);
10
+ const recycled = present.filter((r) => input.prevChunks[r.cid] == null);
11
+ return {
12
+ manifestSource: input.manifestSource,
13
+ manifestFetchAttempts: input.manifestFetchAttempts,
14
+ manifestBytes: input.manifestBytes ?? 0,
15
+ framework: input.framework,
16
+ filesTotal: input.filesTotal,
17
+ filesStable: input.filesStable,
18
+ filesVolatile: input.filesVolatile,
19
+ probedTotal: input.probeResults.length,
20
+ probePresent: present.length,
21
+ probeAbsent: input.probeResults.filter((r) => r.present === false).length,
22
+ probeFailed: failed.length,
23
+ probeFailedRpc: countByReason(input.probeResults, "rpc_error"),
24
+ probeFailedDecode: countByReason(input.probeResults, "decode_error"),
25
+ probeFailedMetadata: countByReason(input.probeResults, "metadata_error"),
26
+ recycledCids: recycled.length,
27
+ retentionPeriodBlocks: input.retentionPeriodBlocks,
28
+ bytesSkipped: input.bytesSkipped,
29
+ bytesUploaded: input.bytesUploaded,
30
+ chunksTotal: input.chunksTotal,
31
+ chunksUploaded: input.chunksUploaded,
32
+ chunksSkipped: input.chunksSkipped,
33
+ carBytes: input.carBytes,
34
+ section0Bytes: input.sectionSizes.section0,
35
+ section1Bytes: input.sectionSizes.section1,
36
+ section2Bytes: input.sectionSizes.section2,
37
+ estimatedSecondsSaved: Math.round(SECONDS_PER_PROBE_SKIP * presentInPrev.length),
38
+ tier2VerifiedCount: input.tier2VerifiedCount,
39
+ tier2InconclusiveCount: input.tier2InconclusiveCount,
40
+ tier2FallbackCount: input.tier2FallbackCount
41
+ };
42
+ }
43
+ function telemetryAttributes(s) {
44
+ const hitRate = s.filesTotal === 0 ? 0 : s.filesStable / s.filesTotal;
45
+ return {
46
+ "deploy.cache.manifest_source": s.manifestSource,
47
+ "deploy.cache.manifest_fetch_attempts": String(s.manifestFetchAttempts),
48
+ "deploy.cache.manifest_bytes": String(s.manifestBytes),
49
+ "deploy.cache.framework": s.framework ?? "",
50
+ "deploy.cache.hit_rate": String(Math.round(hitRate * 1e3) / 1e3),
51
+ "deploy.cache.files_total": String(s.filesTotal),
52
+ "deploy.cache.files_stable": String(s.filesStable),
53
+ "deploy.cache.files_volatile": String(s.filesVolatile),
54
+ "deploy.cache.probed_total": String(s.probedTotal),
55
+ "deploy.cache.probe_present": String(s.probePresent),
56
+ "deploy.cache.probe_absent": String(s.probeAbsent),
57
+ "deploy.cache.probe_failed": String(s.probeFailed),
58
+ "deploy.cache.probe_failed_rpc": String(s.probeFailedRpc),
59
+ "deploy.cache.probe_failed_decode": String(s.probeFailedDecode),
60
+ "deploy.cache.probe_failed_metadata": String(s.probeFailedMetadata),
61
+ "deploy.cache.recycled_cids": String(s.recycledCids),
62
+ "deploy.cache.retention_period_blocks": String(s.retentionPeriodBlocks),
63
+ "deploy.cache.chunks_total": String(s.chunksTotal),
64
+ "deploy.cache.chunks_uploaded": String(s.chunksUploaded),
65
+ "deploy.cache.chunks_skipped": String(s.chunksSkipped),
66
+ "deploy.cache.bytes_skipped": String(s.bytesSkipped),
67
+ "deploy.cache.bytes_uploaded": String(s.bytesUploaded),
68
+ "deploy.cache.car_bytes": String(s.carBytes),
69
+ "deploy.cache.section0_bytes": String(s.section0Bytes),
70
+ "deploy.cache.section1_bytes": String(s.section1Bytes),
71
+ "deploy.cache.section2_bytes": String(s.section2Bytes),
72
+ "deploy.cache.estimated_seconds_saved": String(s.estimatedSecondsSaved),
73
+ "deploy.cache.tier2_fallback": String(s.tier2FallbackCount),
74
+ "deploy.cache.tier2_verified": String(s.tier2VerifiedCount),
75
+ "deploy.cache.tier2_inconclusive": String(s.tier2InconclusiveCount)
76
+ };
77
+ }
78
+ function fmtMb(bytes) {
79
+ return (bytes / 1e6).toFixed(1);
80
+ }
81
+ function fmtKb(bytes) {
82
+ return (bytes / 1e3).toFixed(1);
83
+ }
84
+ function renderSummary(s) {
85
+ const lines = [];
86
+ if (s.manifestSource === "heuristic_fallback") {
87
+ lines.push(` \u26A0 Previous manifest fetch failed after ${s.manifestFetchAttempts} attempts (gateway timeout).`);
88
+ lines.push(` Using heuristic classification \u2014 hit rate may be lower this run.`);
89
+ lines.push(` Subsequent deploys recover automatically.`);
90
+ lines.push("");
91
+ }
92
+ lines.push(`Cache:`);
93
+ if (s.manifestSource === "none") {
94
+ lines.push(` Manifest: first deploy (no previous manifest)`);
95
+ } else if (s.manifestSource === "embedded") {
96
+ const attemptsStr = `${s.manifestFetchAttempts} attempt${s.manifestFetchAttempts === 1 ? "" : "s"}`;
97
+ const sizeStr = s.manifestBytes > 0 ? `, ${fmtKb(s.manifestBytes)} KB Range hit` : "";
98
+ lines.push(` Manifest: embedded (${attemptsStr}${sizeStr})`);
99
+ } else {
100
+ lines.push(` Manifest: heuristic_fallback (${s.manifestFetchAttempts} attempts)`);
101
+ }
102
+ if (s.filesTotal > 0 && s.manifestSource !== "none") {
103
+ const pct = s.filesTotal === 0 ? 0 : Math.round(s.filesStable / s.filesTotal * 100);
104
+ lines.push(` Files: ${s.filesStable} unchanged, ${s.filesVolatile} changed (${pct} % stable)`);
105
+ }
106
+ if (s.probedTotal > 0) {
107
+ let probeFailedStr = "";
108
+ if (s.probeFailed > 0) {
109
+ const reasons = [];
110
+ if (s.probeFailedRpc > 0) reasons.push("rpc_error");
111
+ if (s.probeFailedDecode > 0) reasons.push("decode_error");
112
+ if (s.probeFailedMetadata > 0) reasons.push("metadata_error");
113
+ probeFailedStr = `, ${s.probeFailed} probe-failed (${reasons.join(", ")})`;
114
+ }
115
+ lines.push(` Probed: ${s.probedTotal} chunks \u2192 ${s.probePresent} cached, ${s.probeAbsent} to upload${probeFailedStr}`);
116
+ }
117
+ if (s.recycledCids > 0) {
118
+ lines.push(` Recycled: ${s.recycledCids} CIDs found on-chain that weren't in the previous manifest`);
119
+ }
120
+ if (s.tier2FallbackCount > 0) {
121
+ const inconclusiveStr = s.tier2InconclusiveCount > 0 ? `, ${s.tier2InconclusiveCount} inconclusive` : "";
122
+ lines.push(` Verify: ${s.tier2VerifiedCount}/${s.tier2FallbackCount} via-fallback chunks confirmed on chain${inconclusiveStr}`);
123
+ }
124
+ lines.push(` CAR sections: manifest ${fmtKb(s.section0Bytes)} KB \xB7 stable ${fmtMb(s.section1Bytes)} MB \xB7 volatile ${fmtMb(s.section2Bytes)} MB`);
125
+ if (s.chunksUploaded > 0 && s.bytesSkipped > 0) {
126
+ lines.push(` Upload: ${fmtMb(s.bytesUploaded)} MB across ${s.chunksUploaded} chunks (vs ${fmtMb(s.carBytes)} MB if full deploy)`);
127
+ } else if (s.chunksUploaded > 0) {
128
+ lines.push(` Upload: ${fmtMb(s.bytesUploaded)} MB across ${s.chunksUploaded} chunks`);
129
+ }
130
+ if (s.estimatedSecondsSaved > 0 || s.bytesSkipped > 0) {
131
+ lines.push(` Saved: ~${s.estimatedSecondsSaved} s and ${fmtMb(s.bytesSkipped)} MB`);
132
+ }
133
+ return lines.join("\n");
134
+ }
135
+
136
+ export {
137
+ computeStats,
138
+ telemetryAttributes,
139
+ renderSummary
140
+ };