@joclaim/browser-extension-sdk 0.2.3 → 0.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,15 @@
1
+ <!doctype html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8" />
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0" />
6
+ <title>Reclaim Offscreen Document</title>
7
+ <script defer src="./offscreen.bundle.js"></script>
8
+ </head>
9
+ <body>
10
+ <div id="offscreen-container">
11
+ <!-- This document is used for proof generation operations that require DOM access -->
12
+ </div>
13
+ <!-- Let webpack inject the script automatically -->
14
+ </body>
15
+ </html>
@@ -0,0 +1 @@
1
+ const fs=require("fs"),path=require("path"),https=require("https"),REPO="xj894976677/zk-symmetric-crypto",TARGET_DIR=path.join(process.cwd(),"public","browser-rpc","resources"),TEMP_DIR=path.join(process.cwd(),"zk-resources");function getLatestCommitHash(){return new Promise((e,t)=>{const o=`https://api.github.com/repos/${REPO}/commits/test`,s=https.get(o,{headers:{"User-Agent":"Node.js"}},o=>{let s="";o.on("data",e=>s+=e),o.on("end",()=>{try{const o=JSON.parse(s);o.sha?e(o.sha):t(new Error("No SHA found in GitHub API response"))}catch(e){t(new Error(`Failed to parse GitHub API response: ${e.message}`))}})});s.on("error",t),s.setTimeout(1e4,()=>{s.destroy(),t(new Error("Timeout fetching latest commit hash"))})})}function getFileSize(e,t,o=null){return o||(o=`https://github.com/${REPO}/raw/${t}/resources/${e}`),new Promise((s,r)=>{const n=https.request(o,{method:"HEAD"},o=>{if([301,302].includes(o.statusCode)&&o.headers.location)return s(getFileSize(e,t,o.headers.location));const r=parseInt(o.headers["content-length"],10);s(isNaN(r)?0:r)});n.on("error",r),n.end()})}async function downloadFile(e,t,o,s,r=null,n=3){r||(r=`https://github.com/${REPO}/raw/${s}/resources/${e}`);const a=path.dirname(t);await fs.promises.mkdir(a,{recursive:!0});for(let a=1;a<=n;a++)try{return await new Promise((n,a)=>{const i=fs.createWriteStream(t),c=https.get(r,r=>{if([301,302].includes(r.statusCode)&&r.headers.location)return downloadFile(e,t,o,s,r.headers.location).then(n).catch(a);r.on("data",e=>{o(e.length)}),r.pipe(i),i.on("finish",()=>{i.close(n)})});c.on("error",e=>{fs.unlink(t,()=>a(e))}),c.setTimeout(3e4,()=>{c.destroy(),a(new Error(`Timeout downloading ${e}`))})})}catch(t){if(a===n)throw new Error(`Failed to download ${e} after ${n} attempts: ${t.message}`);console.log(`\nRetry ${a}/${n} for ${e}: ${t.message}`),await new Promise(e=>setTimeout(e,1e3*Math.pow(2,a)))}}function formatBytes(e){const t=["B","KB","MB","GB"];let o=0;for(;e>=1024&&o<t.length-1;)e/=1024,o++;return`${e.toFixed(1)} ${t[o]}`}function renderProgress(e,t,o,s,r,n){const a=Math.min(s>0?o/s:0,1),i=Math.round(25*a),c=Math.max(0,25-i),l="█".repeat(i)+"░".repeat(c),d=r>0?Math.max(0,s-o)/r:0,m=(100*a).toFixed(1).padStart(5),p=`${formatBytes(r)}/s`,h=isFinite(d)?`${Math.round(d)}s`.padStart(4):"--";return`[${e.toString().padStart(2)}/${t}] ${m}% [${l}] Speed: ${p} Time Left: ${h} | ${n}`}async function main(){try{console.log("🔄 Fetching latest commit hash...");const e=await getLatestCommitHash();console.log(`📝 Using commit: ${e}`),await fs.promises.mkdir(TARGET_DIR,{recursive:!0});const t=["circuit_final.zkey","circuit.wasm","circuit.r1cs"],o=[...["chacha20","aes-256-ctr","aes-128-ctr"].flatMap(e=>t.map(t=>`snarkjs/${e}/${t}`))];if(await Promise.all(o.map(async e=>{try{return await fs.promises.access(path.join(TARGET_DIR,e)),!0}catch{return!1}})).then(e=>e.every(e=>e)))return console.log("ZK files already exist in target directory, skipping download."),void process.exit(0);await fs.promises.rm(TEMP_DIR,{recursive:!0,force:!0}),await fs.promises.mkdir(TEMP_DIR,{recursive:!0}),console.log("📦 Downloading ZK files ");const s=o.length;console.log("🔍 Fetching file information... please wait");let r={},n=0;for(const t of o)try{const o=await getFileSize(t,e);r[t]=o,n+=o}catch(e){r[t]=0}let a=0,i=0;const c=Date.now();process.stdout.write("[?25l");for(const t of o){const o=path.join(TEMP_DIR,t);let r=0;await downloadFile(t,o,e=>{r+=e,i+=e;const o=(Date.now()-c)/1e3,l=renderProgress(a,s,i,n,o>0?i/o:0,path.basename(t));process.stdout.write("\r"+l)},e),a++;const l=(Date.now()-c)/1e3,d=renderProgress(a,s,i,n,l>0?i/l:0,path.basename(t));process.stdout.write("\r"+d)}process.stdout.write("[?25h\n"),console.log("\nMoving files to final location..."),await fs.promises.rm(TARGET_DIR,{recursive:!0,force:!0}),await fs.promises.rename(TEMP_DIR,TARGET_DIR),console.log("Download completed successfully!"),process.exit(0)}catch(e){process.stdout.write("[?25h\n"),console.error("\nFatal error during download:",e),await fs.promises.rm(TEMP_DIR,{recursive:!0,force:!0}),process.exit(1)}}process.on("unhandledRejection",e=>{console.error("Unhandled promise rejection:",e),process.exit(1)}),main();
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ const fs=require("fs"),path=require("path"),cp=require("child_process");function parseArgs(){const o=process.argv.slice(2),e={};for(let n=0;n<o.length;n++){const[i,c]=o[n].split("=");i.startsWith("--")&&(e[i.replace(/^--/,"")]=c??!0)}return e}function ensureDir(o){fs.mkdirSync(o,{recursive:!0})}function copyFile(o,e){ensureDir(path.dirname(e)),fs.copyFileSync(o,e)}function copyDir(o,e){if(fs.existsSync(o)){ensureDir(e);for(const n of fs.readdirSync(o)){const i=path.join(o,n),c=path.join(e,n);fs.statSync(i).isDirectory()?copyDir(i,c):copyFile(i,c)}}}async function main(){const o=parseArgs(),e=process.cwd(),n=path.resolve(__dirname,"../.."),i=path.join(n,"build"),c=path.resolve(e,o["public-dir"]||"public");console.log("[joclaim] downloading circuits...");const s=path.join(__dirname,"download-circuits.js");try{cp.execFileSync(process.execPath,[s],{stdio:"inherit",cwd:e})}catch(o){console.error("[joclaim] circuits download failed",o.message),process.exit(1)}console.log("[joclaim] copying assets...");const t=path.join(c,"joclaim-browser-extension-sdk");copyFile(path.join(i,"content","content.bundle.js"),path.join(t,"content","content.bundle.js")),copyDir(path.join(i,"content","components"),path.join(t,"content","components")),copyDir(path.join(i,"interceptor"),path.join(t,"interceptor")),copyDir(path.join(i,"offscreen"),path.join(t,"offscreen"));const r=path.join(i,"343.bundle.js");fs.existsSync(r)&&copyFile(r,path.join(c,"343.bundle.js")),copyFile(path.join(i,"JoclaimExtensionSDK.bundle.js"),path.join(c,"joclaim-browser-extension-sdk","JoclaimExtensionSDK.bundle.js")),console.log("[joclaim] setup complete")}main().catch(o=>{console.error("[joclaim] setup failed",o),process.exit(1)});
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@joclaim/browser-extension-sdk",
3
- "version": "0.2.3",
3
+ "version": "0.2.5",
4
4
  "module": "build/JoclaimExtensionSDK.bundle.js",
5
5
  "exports": {
6
6
  ".": {
@@ -18,6 +18,7 @@
18
18
  "files": [
19
19
  "build/",
20
20
  "src/types/",
21
+ "src/scripts/",
21
22
  "index.d.ts"
22
23
  ],
23
24
  "scripts": {
@@ -59,7 +60,8 @@
59
60
  "license": "MIT",
60
61
  "dependencies": {
61
62
  "@extism/extism": "^1.0.3",
62
- "@joclaim/attestor-core": "0.2.3",
63
+ "@joclaim/attestor-core": "0.2.4",
64
+ "@joclaim/snarkjs": "0.1.0",
63
65
  "@joclaim/tls": "0.2.0",
64
66
  "browserify-zlib": "^0.2.0",
65
67
  "buffer": "^6.0.3",
@@ -79,7 +81,6 @@
79
81
  "redux": "^4.2.1",
80
82
  "redux-logger": "^3.0.6",
81
83
  "redux-thunk": "^2.4.2",
82
- "@joclaim/snarkjs": "0.1.0",
83
84
  "stream-browserify": "^3.0.0",
84
85
  "tailwindcss": "^3.3.3",
85
86
  "web-vitals": "^2.1.4",
@@ -0,0 +1,305 @@
1
+ const fs = require("fs");
2
+ const path = require("path");
3
+ const https = require("https");
4
+
5
+ const REPO = "xj894976677/zk-symmetric-crypto";
6
+ const TARGET_DIR = path.join(process.cwd(), "public", "browser-rpc", "resources");
7
+ const TEMP_DIR = path.join(process.cwd(), "zk-resources");
8
+
9
+ /**
10
+ * Fetch the latest commit hash from the GitHub repository.
11
+ */
12
+ function getLatestCommitHash() {
13
+ return new Promise((resolve, reject) => {
14
+ const url = `https://api.github.com/repos/${REPO}/commits/test`;
15
+ const req = https.get(
16
+ url,
17
+ {
18
+ headers: {
19
+ "User-Agent": "Node.js",
20
+ },
21
+ },
22
+ (res) => {
23
+ let data = "";
24
+ res.on("data", (chunk) => (data += chunk));
25
+ res.on("end", () => {
26
+ try {
27
+ const json = JSON.parse(data);
28
+ if (json.sha) {
29
+ resolve(json.sha);
30
+ } else {
31
+ reject(new Error("No SHA found in GitHub API response"));
32
+ }
33
+ } catch (error) {
34
+ reject(new Error(`Failed to parse GitHub API response: ${error.message}`));
35
+ }
36
+ });
37
+ },
38
+ );
39
+ req.on("error", reject);
40
+ req.setTimeout(10000, () => {
41
+ req.destroy();
42
+ reject(new Error("Timeout fetching latest commit hash"));
43
+ });
44
+ });
45
+ }
46
+
47
+ /**
48
+ * Perform a HEAD request to fetch the file's content-length.
49
+ * Follows redirects if necessary.
50
+ */
51
+ function getFileSize(filePath, commitHash, url = null) {
52
+ if (!url) {
53
+ url = `https://github.com/${REPO}/raw/${commitHash}/resources/${filePath}`;
54
+ }
55
+ return new Promise((resolve, reject) => {
56
+ const req = https.request(url, { method: "HEAD" }, (res) => {
57
+ if ([301, 302].includes(res.statusCode) && res.headers.location) {
58
+ return resolve(getFileSize(filePath, commitHash, res.headers.location));
59
+ }
60
+ const size = parseInt(res.headers["content-length"], 10);
61
+ resolve(isNaN(size) ? 0 : size);
62
+ });
63
+ req.on("error", reject);
64
+ req.end();
65
+ });
66
+ }
67
+
68
+ /**
69
+ * Download a file from GitHub with retries. On every received chunk,
70
+ * call progressCallback(delta) where delta is the number of bytes just received.
71
+ * Follows redirects if necessary.
72
+ */
73
+ async function downloadFile(
74
+ filePath,
75
+ targetPath,
76
+ progressCallback,
77
+ commitHash,
78
+ url = null,
79
+ retries = 3,
80
+ ) {
81
+ if (!url) {
82
+ url = `https://github.com/${REPO}/raw/${commitHash}/resources/${filePath}`;
83
+ }
84
+ const dir = path.dirname(targetPath);
85
+ await fs.promises.mkdir(dir, { recursive: true });
86
+
87
+ for (let attempt = 1; attempt <= retries; attempt++) {
88
+ try {
89
+ return await new Promise((resolve, reject) => {
90
+ const file = fs.createWriteStream(targetPath);
91
+ const request = https.get(url, (response) => {
92
+ if ([301, 302].includes(response.statusCode) && response.headers.location) {
93
+ // Follow redirect
94
+ return downloadFile(
95
+ filePath,
96
+ targetPath,
97
+ progressCallback,
98
+ commitHash,
99
+ response.headers.location,
100
+ )
101
+ .then(resolve)
102
+ .catch(reject);
103
+ }
104
+ response.on("data", (chunk) => {
105
+ progressCallback(chunk.length);
106
+ });
107
+ response.pipe(file);
108
+ file.on("finish", () => {
109
+ file.close(resolve);
110
+ });
111
+ });
112
+
113
+ request.on("error", (err) => {
114
+ fs.unlink(targetPath, () => reject(err));
115
+ });
116
+
117
+ request.setTimeout(30000, () => {
118
+ request.destroy();
119
+ reject(new Error(`Timeout downloading ${filePath}`));
120
+ });
121
+ });
122
+ } catch (error) {
123
+ if (attempt === retries) {
124
+ throw new Error(
125
+ `Failed to download ${filePath} after ${retries} attempts: ${error.message}`,
126
+ );
127
+ }
128
+ console.log(`\nRetry ${attempt}/${retries} for ${filePath}: ${error.message}`);
129
+ // Exponential backoff: wait 2^attempt seconds before retrying
130
+ await new Promise((resolve) => setTimeout(resolve, Math.pow(2, attempt) * 1000));
131
+ }
132
+ }
133
+ }
134
+
135
+ /**
136
+ * Format bytes into a human–readable string.
137
+ */
138
+ function formatBytes(n) {
139
+ const units = ["B", "KB", "MB", "GB"];
140
+ let i = 0;
141
+ while (n >= 1024 && i < units.length - 1) {
142
+ n /= 1024;
143
+ i++;
144
+ }
145
+ return `${n.toFixed(1)} ${units[i]}`;
146
+ }
147
+
148
+ /**
149
+ * Render the progress string. We compute:
150
+ * - overall progress = globalDownloaded / globalTotal
151
+ * - speed = globalDownloaded / elapsed time
152
+ * - ETA = remaining bytes / speed
153
+ * Also show the number of files finished.
154
+ */
155
+ function renderProgress(
156
+ completedFiles,
157
+ totalFiles,
158
+ globalDownloaded,
159
+ globalTotal,
160
+ speed,
161
+ currentFileName,
162
+ ) {
163
+ const progress = Math.min(globalTotal > 0 ? globalDownloaded / globalTotal : 0, 1);
164
+ const barLength = 25;
165
+ const filled = Math.round(progress * barLength);
166
+ const unfilled = Math.max(0, barLength - filled);
167
+ const bar = "█".repeat(filled) + "░".repeat(unfilled);
168
+ const remaining = speed > 0 ? Math.max(0, globalTotal - globalDownloaded) / speed : 0;
169
+
170
+ const percentage = (progress * 100).toFixed(1).padStart(5);
171
+ const spdStr = `${formatBytes(speed)}/s`;
172
+ const etaStr = isFinite(remaining) ? `${Math.round(remaining)}s`.padStart(4) : "--";
173
+
174
+ return `[${completedFiles.toString().padStart(2)}/${totalFiles}] ${percentage}% [${bar}] Speed: ${spdStr} Time Left: ${etaStr} | ${currentFileName}`;
175
+ }
176
+
177
+ async function main() {
178
+ try {
179
+ // Fetch the latest commit hash
180
+ console.log("🔄 Fetching latest commit hash...");
181
+ const COMMIT_HASH = await getLatestCommitHash();
182
+ console.log(`📝 Using commit: ${COMMIT_HASH}`);
183
+
184
+ // Create public/browser-rpc directory if it doesn't exist
185
+ await fs.promises.mkdir(TARGET_DIR, { recursive: true });
186
+
187
+ // Check if files already exist in the target directory
188
+ const ciphers = ["chacha20", "aes-256-ctr", "aes-128-ctr"];
189
+ const files = ["circuit_final.zkey", "circuit.wasm", "circuit.r1cs"];
190
+
191
+ const allFiles = [...ciphers.flatMap((c) => files.map((f) => `snarkjs/${c}/${f}`))];
192
+
193
+ // Check if all files exist
194
+ const allFilesExist = await Promise.all(
195
+ allFiles.map(async (file) => {
196
+ try {
197
+ await fs.promises.access(path.join(TARGET_DIR, file));
198
+ return true;
199
+ } catch {
200
+ return false;
201
+ }
202
+ }),
203
+ ).then((results) => results.every((exists) => exists));
204
+
205
+ if (allFilesExist) {
206
+ console.log("ZK files already exist in target directory, skipping download.");
207
+ process.exit(0);
208
+ return;
209
+ }
210
+
211
+ // Create and clean temp directory
212
+ await fs.promises.rm(TEMP_DIR, { recursive: true, force: true });
213
+ await fs.promises.mkdir(TEMP_DIR, { recursive: true });
214
+
215
+ console.log("📦 Downloading ZK files ");
216
+
217
+ const totalFiles = allFiles.length;
218
+
219
+ // Pre-fetch each file's size so that we can compute overall progress
220
+ console.log("🔍 Fetching file information... please wait");
221
+ let fileSizes = {};
222
+ let globalTotalBytes = 0;
223
+ for (const filePath of allFiles) {
224
+ try {
225
+ const size = await getFileSize(filePath, COMMIT_HASH);
226
+ fileSizes[filePath] = size;
227
+ globalTotalBytes += size;
228
+ } catch (err) {
229
+ fileSizes[filePath] = 0;
230
+ }
231
+ }
232
+
233
+ let completedFiles = 0;
234
+ let globalDownloadedBytes = 0;
235
+ const startTime = Date.now();
236
+
237
+ process.stdout.write("\x1B[?25l"); // Hide cursor
238
+
239
+ // Download files sequentially to temp directory
240
+ for (const filePath of allFiles) {
241
+ const targetPath = path.join(TEMP_DIR, filePath);
242
+ let currentFileDownloaded = 0;
243
+
244
+ await downloadFile(
245
+ filePath,
246
+ targetPath,
247
+ (delta) => {
248
+ currentFileDownloaded += delta;
249
+ globalDownloadedBytes += delta;
250
+
251
+ const elapsed = (Date.now() - startTime) / 1000;
252
+ const speed = elapsed > 0 ? globalDownloadedBytes / elapsed : 0;
253
+
254
+ const progressStr = renderProgress(
255
+ completedFiles,
256
+ totalFiles,
257
+ globalDownloadedBytes,
258
+ globalTotalBytes,
259
+ speed,
260
+ path.basename(filePath),
261
+ );
262
+ process.stdout.write("\r\x1B[K" + progressStr);
263
+ },
264
+ COMMIT_HASH,
265
+ );
266
+ completedFiles++;
267
+
268
+ // After finishing the file, update the progress one last time.
269
+ const elapsed = (Date.now() - startTime) / 1000;
270
+ const speed = elapsed > 0 ? globalDownloadedBytes / elapsed : 0;
271
+ const progressStr = renderProgress(
272
+ completedFiles,
273
+ totalFiles,
274
+ globalDownloadedBytes,
275
+ globalTotalBytes,
276
+ speed,
277
+ path.basename(filePath),
278
+ );
279
+ process.stdout.write("\r\x1B[K" + progressStr);
280
+ }
281
+
282
+ process.stdout.write("\x1B[?25h\n"); // Show cursor
283
+ console.log("\nMoving files to final location...");
284
+
285
+ // Move files from temp to final location
286
+ await fs.promises.rm(TARGET_DIR, { recursive: true, force: true });
287
+ await fs.promises.rename(TEMP_DIR, TARGET_DIR);
288
+
289
+ console.log("Download completed successfully!");
290
+ process.exit(0);
291
+ } catch (error) {
292
+ process.stdout.write("\x1B[?25h\n"); // Ensure the cursor is restored
293
+ console.error("\nFatal error during download:", error);
294
+ // Clean up temp directory on error
295
+ await fs.promises.rm(TEMP_DIR, { recursive: true, force: true });
296
+ process.exit(1);
297
+ }
298
+ }
299
+
300
+ process.on("unhandledRejection", (error) => {
301
+ console.error("Unhandled promise rejection:", error);
302
+ process.exit(1);
303
+ });
304
+
305
+ main();
@@ -0,0 +1,94 @@
1
+ #!/usr/bin/env node
2
+ const fs = require("fs");
3
+ const path = require("path");
4
+ const cp = require("child_process");
5
+
6
+ function parseArgs() {
7
+ const args = process.argv.slice(2);
8
+ const out = {};
9
+ for (let i = 0; i < args.length; i++) {
10
+ const [k, v] = args[i].split("=");
11
+ if (k.startsWith("--")) out[k.replace(/^--/, "")] = v ?? true;
12
+ }
13
+ return out;
14
+ }
15
+
16
+ function ensureDir(p) {
17
+ fs.mkdirSync(p, { recursive: true });
18
+ }
19
+
20
+ function copyFile(src, dest) {
21
+ ensureDir(path.dirname(dest));
22
+ fs.copyFileSync(src, dest);
23
+ }
24
+
25
+ function copyDir(srcDir, destDir) {
26
+ if (!fs.existsSync(srcDir)) return;
27
+ ensureDir(destDir);
28
+ for (const entry of fs.readdirSync(srcDir)) {
29
+ const s = path.join(srcDir, entry);
30
+ const d = path.join(destDir, entry);
31
+ const stat = fs.statSync(s);
32
+ if (stat.isDirectory()) copyDir(s, d);
33
+ else copyFile(s, d);
34
+ }
35
+ }
36
+
37
+ async function main() {
38
+ const args = parseArgs();
39
+ const projectRoot = process.cwd();
40
+
41
+ const sdkRoot = path.resolve(__dirname, "../..");
42
+ const sdkBuild = path.join(sdkRoot, "build");
43
+
44
+ const publicDir = path.resolve(projectRoot, args["public-dir"] || "public");
45
+
46
+ // 1) Download circuits into public/browser-rpc/resources
47
+ console.log("[joclaim] downloading circuits...");
48
+ const dlScript = path.join(__dirname, "download-circuits.js");
49
+ try {
50
+ cp.execFileSync(process.execPath, [dlScript], { stdio: "inherit", cwd: projectRoot });
51
+ } catch (e) {
52
+ console.error("[joclaim] circuits download failed", e.message);
53
+ process.exit(1);
54
+ }
55
+
56
+ // 2) Copy SDK assets into public/joclaim-browser-extension-sdk
57
+ console.log("[joclaim] copying assets...");
58
+ const targetBase = path.join(publicDir, "joclaim-browser-extension-sdk");
59
+
60
+ // content
61
+ copyFile(
62
+ path.join(sdkBuild, "content", "content.bundle.js"),
63
+ path.join(targetBase, "content", "content.bundle.js"),
64
+ );
65
+ copyDir(
66
+ path.join(sdkBuild, "content", "components"),
67
+ path.join(targetBase, "content", "components"),
68
+ );
69
+
70
+ // interceptor
71
+ copyDir(path.join(sdkBuild, "interceptor"), path.join(targetBase, "interceptor"));
72
+
73
+ // offscreen
74
+ copyDir(path.join(sdkBuild, "offscreen"), path.join(targetBase, "offscreen"));
75
+
76
+ // optional bundle
77
+ const b343 = path.join(sdkBuild, "343.bundle.js");
78
+ if (fs.existsSync(b343)) copyFile(b343, path.join(publicDir, "343.bundle.js"));
79
+
80
+ // 3) Copy SDK bundle into public/joclaim-browser-extension-sdk
81
+ copyFile(
82
+ path.join(sdkBuild, "JoclaimExtensionSDK.bundle.js"),
83
+ path.join(publicDir, "joclaim-browser-extension-sdk", "JoclaimExtensionSDK.bundle.js"),
84
+ );
85
+ // optionally also MV2
86
+ // copyFile(path.join(sdkBuild, "ReclaimExtensionSDK-mv2.bundle.js"), path.join(publicDir, "reclaim-browser-extension-sdk", "ReclaimExtensionSDK-mv2.bundle.js"));
87
+
88
+ console.log("[joclaim] setup complete");
89
+ }
90
+
91
+ main().catch((e) => {
92
+ console.error("[joclaim] setup failed", e);
93
+ process.exit(1);
94
+ });