bulletin-deploy 0.6.16 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -47,9 +47,6 @@ bulletin-deploy ./dist my-app00.dot
47
47
 
48
48
  # Custom RPC endpoint
49
49
  bulletin-deploy --rpc wss://custom-bulletin.example.com ./dist my-app00.dot
50
-
51
- # Deploy and publish to the Playground remix registry
52
- bulletin-deploy --playground ./dist my-app00.dot
53
50
  ```
54
51
 
55
52
  ### All options
@@ -63,7 +60,6 @@ Options:
63
60
  Bulletin direct signer and the DotNS registration signer then
64
61
  run as that derived account. Useful when running parallel
65
62
  deploys against the same root mnemonic without nonce contention.
66
- --playground Publish to the Playground remix registry
67
63
  --js-merkle Use pure-JS merkleization (no IPFS Kubo binary required)
68
64
  --pool-size N Number of pool accounts (default: 10)
69
65
  --tag "..." Free-form label attached to the deploy span as deploy.tag
@@ -111,18 +107,6 @@ Limitations / follow-ups:
111
107
  - **Mirror failures are non-fatal.** The source of truth is Bulletin + DotNS; the mirror is a cache. Failures log and let the deploy succeed.
112
108
  - **GitHub Pages build latency.** The CAR lands on `gh-pages` immediately; Pages serves it after the build completes (~1–2 min in practice). Hosts should fall back to Bulletin while the 404 window lasts.
113
109
 
114
- ### Playground registry
115
-
116
- By default, deploys only upload to Bulletin storage and register the DotNS domain. The **Playground remix registry** is an on-chain app directory that makes your deploy visible in [Polkadot Playground](https://playground.polkadot.cloud).
117
-
118
- To publish to it, pass `--playground`:
119
-
120
- ```bash
121
- bulletin-deploy --playground ./dist my-app.dot
122
- ```
123
-
124
- This requires `cdm.json` in your project root (shipped with bulletin-deploy) and a git remote origin.
125
-
126
110
  ## GitHub Actions
127
111
 
128
112
  1. Copy `workflows/deploy-on-pr.yml` to your repo's `.github/workflows/` directory
@@ -331,20 +315,21 @@ Runs `test/test.js` + `test/pool.test.js` + `test/helpers/e2e-helpers.test.js` v
331
315
 
332
316
  ### Live-testnet E2E
333
317
 
334
- Three scenarios land on Paseo Bulletin:
318
+ Four scenarios land on Paseo Bulletin:
335
319
 
336
320
  - **S1** — happy path on a stable label (`e2epool.dot` / `e2edirect.dot`)
337
321
  - **S2** — fresh registration via commit-reveal (nightly only)
338
322
  - **S3** — deploy to `e2eowned.dot` (owned by a different account), expects `EXIT_CODE_NO_RETRY` (78) and the "owned by a different account" error message
323
+ - **S4** — deploy with `--gh-pages-mirror`, waits for GitHub Pages to serve the just-pushed manifest (CID-freshness check), then byte-compares the CAR on Pages against a pre-upload dump to confirm the mirror is an exact copy of what went to Bulletin
339
324
 
340
- **Prerequisites** (one-time per testnet lifetime): see [`docs/e2e-bootstrap.md`](docs/e2e-bootstrap.md). Grants Alice PoP Full, funds+maps Bob, pre-registers `e2eowned.dot` to Bob via `dotns-cli`.
325
+ **Prerequisites** (one-time per testnet lifetime): see [`docs/e2e-bootstrap.md`](docs/e2e-bootstrap.md). Grants Alice PoP Full, funds+maps Bob, pre-registers `e2eowned.dot` to Bob via `dotns-cli`. S4 additionally needs GitHub Pages enabled on the repo with `gh-pages` as the source branch and a token with `contents: write` (the workflow's `GITHUB_TOKEN` provides this; locally your git credentials must be able to push).
341
326
 
342
327
  **Local launchers:**
343
328
 
344
329
  ```bash
345
330
  npm run test:e2e:smoke # 1 scenario (S1 pool/js) ~5 min
346
- npm run test:e2e:pr # 3 scenarios (matches per-PR CI) ~15 min
347
- npm run test:e2e:nightly # 7 scenarios (matches nightly CI) ~30–45 min
331
+ npm run test:e2e:pr # 4 scenarios (matches per-PR CI) ~20 min
332
+ npm run test:e2e:nightly # 8 scenarios (matches nightly CI) ~30–45 min
348
333
  ```
349
334
 
350
335
  All three run through to completion even if one fails; a colored summary prints at the end with per-scenario pass/fail, timing, JUnit report paths, and a pre-filtered Sentry trace link.
@@ -18,7 +18,6 @@ for (let i = 0; i < args.length; i++) {
18
18
  else if (args[i] === "--derivation-path") { flags.derivationPath = args[++i]; }
19
19
  else if (args[i] === "--rpc") { flags.rpc = args[++i]; }
20
20
  else if (args[i] === "--password") { flags.password = args[++i]; }
21
- else if (args[i] === "--playground") { flags.playground = true; }
22
21
  else if (args[i] === "--js-merkle") { flags.jsMerkle = true; }
23
22
  else if (args[i] === "--tag") { flags.tag = args[++i]; }
24
23
  else if (args[i] === "--gh-pages-mirror") { flags.ghPagesMirror = true; }
@@ -45,7 +44,6 @@ Options:
45
44
  --rpc wss://... Bulletin RPC (or set BULLETIN_RPC env var)
46
45
  --pool-size N Number of pool accounts (default: 10)
47
46
  --password "..." Encrypt SPA content (users will be prompted to decrypt)
48
- --playground Publish to the playground remix registry
49
47
  --js-merkle Use pure-JS merkleization (no IPFS Kubo binary required)
50
48
  --tag "..." Label deploy in telemetry (or set DEPLOY_TAG env var); see Telemetry in README
51
49
  --gh-pages-mirror After deploy, push the CAR to the current repo's gh-pages branch
@@ -78,7 +76,6 @@ try {
78
76
  });
79
77
 
80
78
  const result = await deploy(buildDir, domain, {
81
- playground: flags.playground,
82
79
  mnemonic: flags.mnemonic,
83
80
  derivationPath: flags.derivationPath,
84
81
  rpc: flags.rpc,
@@ -2,10 +2,10 @@ import {
2
2
  classifyErrorArea,
3
3
  isInteractive,
4
4
  promptYesNo
5
- } from "./chunk-BGLOVKHX.js";
5
+ } from "./chunk-PA3AS7QR.js";
6
6
  import {
7
7
  VERSION
8
- } from "./chunk-LF3XAUCI.js";
8
+ } from "./chunk-Q42TQHNL.js";
9
9
  import "./chunk-QGM4M3NI.js";
10
10
 
11
11
  // src/bug-report.ts
@@ -12,6 +12,35 @@ var MirrorSkipped = class extends Error {
12
12
  this.name = "MirrorSkipped";
13
13
  }
14
14
  };
15
+ async function pollMirrorFreshness(mirrorUrl2, expectedCid, opts = {}) {
16
+ const timeoutMs = opts.timeoutMs ?? 5 * 60 * 1e3;
17
+ const intervalMs = opts.intervalMs ?? 1e4;
18
+ const fetchFn = opts.fetchFn ?? fetch;
19
+ const manifestUrl = mirrorUrl2.replace(/\.car$/, ".json");
20
+ const started = Date.now();
21
+ const deadline = started + timeoutMs;
22
+ let attempts = 0;
23
+ let lastCid = null;
24
+ let lastStatus = 0;
25
+ while (Date.now() < deadline) {
26
+ attempts++;
27
+ try {
28
+ const res = await fetchFn(manifestUrl, { redirect: "follow", cache: "no-store" });
29
+ lastStatus = res.status;
30
+ if (res.status === 200) {
31
+ const m = await res.json();
32
+ if (m.cid === expectedCid) {
33
+ return { verified: true, attempts, durationMs: Date.now() - started, lastCid: m.cid, lastStatus };
34
+ }
35
+ lastCid = m.cid ?? null;
36
+ }
37
+ } catch {
38
+ }
39
+ if (Date.now() + intervalMs >= deadline) break;
40
+ await new Promise((r) => setTimeout(r, intervalMs));
41
+ }
42
+ return { verified: false, attempts, durationMs: Date.now() - started, lastCid, lastStatus };
43
+ }
15
44
  function parseGitRemoteUrl(url) {
16
45
  const trimmed = url.trim();
17
46
  const ssh = trimmed.match(/^git@[^:]+:([^/]+)\/(.+?)(?:\.git)?$/);
@@ -91,7 +120,7 @@ async function mirrorToGitHubPages(input) {
91
120
  }
92
121
  if (input.carBytes.length > GH_PAGES_MIRROR_MAX_BYTES) {
93
122
  const mb = (input.carBytes.length / 1024 / 1024).toFixed(1);
94
- throw new MirrorSkipped(`CAR is ${mb} MB; GitHub limits single files to 100 MB. Mirror skipped.`);
123
+ throw new MirrorSkipped(`CAR is ${mb} MB, exceeds GitHub's 100 MB single-file soft limit. Pages can't host this CAR \u2014 the on-chain deploy still succeeds and hosts will fall back to Bulletin.`);
95
124
  }
96
125
  const domainFilename = normalizeDomainFilename(input.domain);
97
126
  const { owner, repo } = ownerRepo;
@@ -112,8 +141,8 @@ async function mirrorToGitHubPages(input) {
112
141
  branchExists = false;
113
142
  }
114
143
  if (branchExists) {
115
- runGit(["fetch", "origin", `${GH_PAGES_MIRROR_BRANCH}:${GH_PAGES_MIRROR_BRANCH}`, "--depth=1"], repoPath);
116
- runGit(["worktree", "add", workTree, GH_PAGES_MIRROR_BRANCH], repoPath);
144
+ runGit(["fetch", "origin", GH_PAGES_MIRROR_BRANCH, "--depth=1"], repoPath);
145
+ runGit(["worktree", "add", "--detach", workTree, `origin/${GH_PAGES_MIRROR_BRANCH}`], repoPath);
117
146
  } else {
118
147
  runGit(["worktree", "add", "--detach", workTree, "HEAD"], repoPath);
119
148
  runGit(["checkout", "--orphan", GH_PAGES_MIRROR_BRANCH], workTree);
@@ -173,6 +202,7 @@ export {
173
202
  GH_PAGES_MIRROR_DIR,
174
203
  GH_PAGES_MIRROR_BRANCH,
175
204
  MirrorSkipped,
205
+ pollMirrorFreshness,
176
206
  parseGitRemoteUrl,
177
207
  resolveOwnerRepo,
178
208
  resolveSourceCommit,
@@ -12,8 +12,11 @@ var CidPreservingBlockstore = class {
12
12
  *all() {
13
13
  yield* this.data.values();
14
14
  }
15
+ clear() {
16
+ this.data.clear();
17
+ }
15
18
  };
16
- function walkDirectory(dirPath, prefix = "") {
19
+ function* walkDirectoryLazy(dirPath, prefix = "") {
17
20
  let dirents;
18
21
  try {
19
22
  dirents = fs.readdirSync(dirPath, { withFileTypes: true });
@@ -23,42 +26,46 @@ function walkDirectory(dirPath, prefix = "") {
23
26
  if (code === "ENOTDIR") throw new Error(`Not a directory: ${dirPath}`);
24
27
  throw err;
25
28
  }
26
- const entries = [];
27
29
  for (const entry of dirents) {
28
30
  const fullPath = path.join(dirPath, entry.name);
29
31
  const relativePath = prefix ? `${prefix}/${entry.name}` : entry.name;
30
32
  if (entry.isDirectory()) {
31
- entries.push(...walkDirectory(fullPath, relativePath));
33
+ yield* walkDirectoryLazy(fullPath, relativePath);
32
34
  } else if (entry.isFile()) {
33
- entries.push({ path: relativePath, content: fs.readFileSync(fullPath) });
35
+ yield { path: relativePath, absolutePath: fullPath };
34
36
  }
35
37
  }
36
- return entries;
37
38
  }
38
39
  async function collectBytes(iter) {
39
40
  const parts = [];
41
+ let totalLength = 0;
40
42
  for await (const chunk of iter) {
41
43
  parts.push(chunk);
44
+ totalLength += chunk.length;
42
45
  }
43
- const totalLength = parts.reduce((sum, p) => sum + p.length, 0);
44
46
  const result = new Uint8Array(totalLength);
45
47
  let offset = 0;
46
- for (const part of parts) {
48
+ for (let i = 0; i < parts.length; i++) {
49
+ const part = parts[i];
47
50
  result.set(part, offset);
48
51
  offset += part.length;
52
+ parts[i] = void 0;
49
53
  }
50
54
  return result;
51
55
  }
52
56
  async function merkleizeJS(directoryPath) {
53
57
  console.log(` Merkleizing (JS): ${directoryPath}`);
54
- const files = walkDirectory(directoryPath);
55
58
  const blockstore = new CidPreservingBlockstore();
56
- const source = files.map((file) => ({
57
- path: file.path,
58
- content: (async function* () {
59
- yield file.content;
60
- })()
61
- }));
59
+ const source = (function* () {
60
+ for (const file of walkDirectoryLazy(directoryPath)) {
61
+ yield {
62
+ path: file.path,
63
+ content: (async function* () {
64
+ yield fs.readFileSync(file.absolutePath);
65
+ })()
66
+ };
67
+ }
68
+ })();
62
69
  let rootCid;
63
70
  for await (const entry of importer(source, blockstore, {
64
71
  cidVersion: 1,
@@ -77,6 +84,7 @@ async function merkleizeJS(directoryPath) {
77
84
  }
78
85
  await writer.close();
79
86
  const carBytes = await collectPromise;
87
+ blockstore.clear();
80
88
  console.log(` CAR (JS): ${(carBytes.length / 1024 / 1024).toFixed(2)} MB`);
81
89
  return { carBytes, cid: rootCid.toString() };
82
90
  }