@metamask-previews/foundryup 0.0.0-preview-c1fef6e5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. package/CHANGELOG.md +10 -0
  2. package/LICENSE +20 -0
  3. package/README.md +47 -0
  4. package/dist/cli.cjs +10 -0
  5. package/dist/cli.cjs.map +1 -0
  6. package/dist/cli.d.cts +3 -0
  7. package/dist/cli.d.cts.map +1 -0
  8. package/dist/cli.d.mts +3 -0
  9. package/dist/cli.d.mts.map +1 -0
  10. package/dist/cli.mjs +8 -0
  11. package/dist/cli.mjs.map +1 -0
  12. package/dist/download.cjs +68 -0
  13. package/dist/download.cjs.map +1 -0
  14. package/dist/download.d.cts +18 -0
  15. package/dist/download.d.cts.map +1 -0
  16. package/dist/download.d.mts +18 -0
  17. package/dist/download.d.mts.map +1 -0
  18. package/dist/download.mjs +64 -0
  19. package/dist/download.mjs.map +1 -0
  20. package/dist/extract.cjs +184 -0
  21. package/dist/extract.cjs.map +1 -0
  22. package/dist/extract.d.cts +15 -0
  23. package/dist/extract.d.cts.map +1 -0
  24. package/dist/extract.d.mts +15 -0
  25. package/dist/extract.d.mts.map +1 -0
  26. package/dist/extract.mjs +180 -0
  27. package/dist/extract.mjs.map +1 -0
  28. package/dist/index.cjs +116 -0
  29. package/dist/index.cjs.map +1 -0
  30. package/dist/index.d.cts +11 -0
  31. package/dist/index.d.cts.map +1 -0
  32. package/dist/index.d.mts +11 -0
  33. package/dist/index.d.mts.map +1 -0
  34. package/dist/index.mjs +109 -0
  35. package/dist/index.mjs.map +1 -0
  36. package/dist/options.cjs +135 -0
  37. package/dist/options.cjs.map +1 -0
  38. package/dist/options.d.cts +52 -0
  39. package/dist/options.d.cts.map +1 -0
  40. package/dist/options.d.mts +52 -0
  41. package/dist/options.d.mts.map +1 -0
  42. package/dist/options.mjs +127 -0
  43. package/dist/options.mjs.map +1 -0
  44. package/dist/types.cjs +30 -0
  45. package/dist/types.cjs.map +1 -0
  46. package/dist/types.d.cts +72 -0
  47. package/dist/types.d.cts.map +1 -0
  48. package/dist/types.d.mts +72 -0
  49. package/dist/types.d.mts.map +1 -0
  50. package/dist/types.mjs +27 -0
  51. package/dist/types.mjs.map +1 -0
  52. package/dist/utils.cjs +104 -0
  53. package/dist/utils.cjs.map +1 -0
  54. package/dist/utils.d.cts +44 -0
  55. package/dist/utils.d.cts.map +1 -0
  56. package/dist/utils.d.mts +44 -0
  57. package/dist/utils.d.mts.map +1 -0
  58. package/dist/utils.mjs +95 -0
  59. package/dist/utils.mjs.map +1 -0
  60. package/package.json +66 -0
@@ -0,0 +1,180 @@
1
+ import { ok } from "node:assert/strict";
2
+ import { createHash } from "node:crypto";
3
+ import { createWriteStream } from "node:fs";
4
+ import { rename, mkdir, rm } from "node:fs/promises";
5
+ import { Agent as HttpAgent } from "node:http";
6
+ import { Agent as HttpsAgent } from "node:https";
7
+ import { join, basename, extname, relative } from "node:path";
8
+ import { pipeline } from "node:stream/promises";
9
+ import { Minipass } from "minipass";
10
+ import { extract as extractTar } from "tar";
11
+ import { Open } from "unzipper";
12
+ import { say } from "./utils.mjs";
13
+ import { startDownload } from "./download.mjs";
14
+ import { Extension, Binary } from "./types.mjs";
15
+ /**
16
+ * Extracts the binaries from the given URL and writes them to the destination.
17
+ *
18
+ * @param url - The URL of the archive to extract the binaries from
19
+ * @param binaries - The list of binaries to extract
20
+ * @param dir - The destination directory
21
+ * @param checksums - The checksums to verify the binaries against
22
+ * @returns The list of binaries extracted
23
+ */
24
+ export async function extractFrom(url, binaries, dir, checksums) {
25
+ const extract = url.pathname.toLowerCase().endsWith(Extension.Tar)
26
+ ? extractFromTar
27
+ : extractFromZip;
28
+ // write all files to a temporary directory first, then rename to the final
29
+ // destination to avoid accidental partial extraction. We don't use
30
+ // `os.tmpdir` for this because `rename` will fail if the directories are on
31
+ // different file systems.
32
+ const tempDir = `${dir}.downloading`;
33
+ const rmOpts = { recursive: true, maxRetries: 3, force: true };
34
+ try {
35
+ // clean up any previous in-progress downloads
36
+ await rm(tempDir, rmOpts);
37
+ // make the temporary directory to extract the binaries to
38
+ await mkdir(tempDir, { recursive: true });
39
+ const downloads = await extract(url, binaries, tempDir, checksums?.algorithm);
40
+ ok(downloads.length === binaries.length, 'Failed to extract all binaries');
41
+ const paths = [];
42
+ for (const { path, binary, checksum } of downloads) {
43
+ if (checksums) {
44
+ say(`verifying checksum for ${binary}`);
45
+ const expected = checksums.binaries[binary];
46
+ if (checksum === expected) {
47
+ say(`checksum verified for ${binary}`);
48
+ }
49
+ else {
50
+ throw new Error(`checksum mismatch for ${binary}, expected ${expected}, got ${checksum}`);
51
+ }
52
+ }
53
+ // add the *final* path to the list of binaries
54
+ paths.push(join(dir, relative(tempDir, path)));
55
+ }
56
+ // this directory shouldn't exist, but if two simultaneous `yarn foundryup`
57
+ // processes are running, it might. Last process wins, so we remove other
58
+ // `dir`s just in case.
59
+ await rm(dir, rmOpts);
60
+ // everything has been extracted; move the files to their final destination
61
+ await rename(tempDir, dir);
62
+ // return the list of extracted binaries
63
+ return paths;
64
+ }
65
+ catch (error) {
66
+ // if things fail for any reason try to clean up a bit. it is very important
67
+ // to not leave `dir` behind, as its existence is a signal that the binaries
68
+ // are installed.
69
+ const rmErrors = (await Promise.allSettled([rm(tempDir, rmOpts), rm(dir, rmOpts)]))
70
+ .filter((r) => r.status === 'rejected')
71
+ .map((r) => r.reason);
72
+ // if we failed to clean up, create an aggregate error message
73
+ if (rmErrors.length) {
74
+ throw new AggregateError([error, ...rmErrors], 'This is a bug; you should report it.');
75
+ }
76
+ throw error;
77
+ }
78
+ }
79
+ /**
80
+ * Extracts the binaries from a tar archive.
81
+ *
82
+ * @param url - The URL of the archive to extract the binaries from
83
+ * @param binaries - The list of binaries to extract
84
+ * @param dir - The destination directory
85
+ * @param checksumAlgorithm - The checksum algorithm to use
86
+ * @returns The list of binaries extracted
87
+ */
88
+ async function extractFromTar(url, binaries, dir, checksumAlgorithm) {
89
+ const downloads = [];
90
+ await pipeline(startDownload(url), extractTar({
91
+ cwd: dir,
92
+ transform: (entry) => {
93
+ const absolutePath = entry.absolute;
94
+ if (!absolutePath) {
95
+ throw new Error('Missing absolute path for entry');
96
+ }
97
+ if (checksumAlgorithm) {
98
+ const hash = createHash(checksumAlgorithm);
99
+ const passThrough = new Minipass({ async: true });
100
+ passThrough.pipe(hash);
101
+ passThrough.on('end', () => {
102
+ downloads.push({
103
+ path: absolutePath,
104
+ binary: entry.path,
105
+ checksum: hash.digest('hex'),
106
+ });
107
+ });
108
+ return passThrough;
109
+ }
110
+ // When no checksum is needed, record the entry and return undefined
111
+ // to use the original stream without transformation
112
+ downloads.push({
113
+ path: absolutePath,
114
+ binary: entry.path,
115
+ });
116
+ return undefined;
117
+ },
118
+ }, binaries));
119
+ return downloads;
120
+ }
121
+ /**
122
+ * Extracts the binaries from a zip archive.
123
+ *
124
+ * @param url - The URL of the archive to extract the binaries from
125
+ * @param binaries - The list of binaries to extract
126
+ * @param dir - The destination directory
127
+ * @param checksumAlgorithm - The checksum algorithm to use
128
+ * @returns The list of binaries extracted
129
+ */
130
+ async function extractFromZip(url, binaries, dir, checksumAlgorithm) {
131
+ const agent = new (url.protocol === 'http:' ? HttpAgent : HttpsAgent)({
132
+ keepAlive: true,
133
+ });
134
+ const source = {
135
+ async size() {
136
+ const download = startDownload(url, { agent, method: 'HEAD' });
137
+ const response = await download.response();
138
+ const contentLength = response.headers['content-length'];
139
+ return contentLength ? parseInt(contentLength, 10) : 0;
140
+ },
141
+ stream(offset, bytes) {
142
+ const options = {
143
+ agent,
144
+ headers: {
145
+ range: `bytes=${offset}-${bytes ? offset + bytes : ''}`,
146
+ },
147
+ };
148
+ return startDownload(url, options);
149
+ },
150
+ };
151
+ const { files } = await Open.custom(source, {});
152
+ const filtered = files.filter(({ path }) => binaries.includes(basename(path, extname(path))));
153
+ return await Promise.all(filtered.map(async ({ path, stream }) => {
154
+ const dest = join(dir, path);
155
+ const entry = stream();
156
+ const destStream = createWriteStream(dest);
157
+ const binary = basename(path, extname(path));
158
+ if (checksumAlgorithm) {
159
+ const hash = createHash(checksumAlgorithm);
160
+ const hashStream = async function* (entryStream) {
161
+ for await (const chunk of entryStream) {
162
+ hash.update(chunk);
163
+ yield chunk;
164
+ }
165
+ };
166
+ await pipeline(entry, hashStream, destStream);
167
+ return {
168
+ path: dest,
169
+ binary,
170
+ checksum: hash.digest('hex'),
171
+ };
172
+ }
173
+ await pipeline(entry, destStream);
174
+ return {
175
+ path: dest,
176
+ binary,
177
+ };
178
+ }));
179
+ }
180
+ //# sourceMappingURL=extract.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"extract.mjs","sourceRoot":"","sources":["../src/extract.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,EAAE,EAAE,2BAA2B;AACxC,OAAO,EAAE,UAAU,EAAE,oBAAoB;AACzC,OAAO,EAAE,iBAAiB,EAAE,gBAAgB;AAC5C,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,EAAE,EAAE,yBAAyB;AACrD,OAAO,EAAE,KAAK,IAAI,SAAS,EAAE,kBAAkB;AAC/C,OAAO,EAAE,KAAK,IAAI,UAAU,EAAE,mBAAmB;AACjD,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,QAAQ,EAAE,kBAAkB;AAC9D,OAAO,EAAE,QAAQ,EAAE,6BAA6B;AAChD,OAAO,EAAE,QAAQ,EAAE,iBAAiB;AACpC,OAAO,EAAE,OAAO,IAAI,UAAU,EAAE,YAAY;AAC5C,OAAO,EAAE,IAAI,EAA2B,iBAAiB;AACzD,OAAO,EAAE,GAAG,EAAE,oBAAgB;AAC9B,OAAO,EAAE,aAAa,EAAE,uBAAmB;AAC3C,OAAO,EAAE,SAAS,EAAE,MAAM,EAAE,oBAAgB;AAE5C;;;;;;;;GAQG;AAEH,MAAM,CAAC,KAAK,UAAU,WAAW,CAC/B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,SAAyE;IAEzE,MAAM,OAAO,GAAG,GAAG,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,GAAG,CAAC;QAChE,CAAC,CAAC,cAAc;QAChB,CAAC,CAAC,cAAc,CAAC;IACnB,2EAA2E;IAC3E,mEAAmE;IACnE,4EAA4E;IAC5E,0BAA0B;IAC1B,MAAM,OAAO,GAAG,GAAG,GAAG,cAAc,CAAC;IACrC,MAAM,MAAM,GAAG,EAAE,SAAS,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC;IAC/D,IAAI;QACF,8CAA8C;QAC9C,MAAM,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;QAC1B,0DAA0D;QAC1D,MAAM,KAAK,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAC1C,MAAM,SAAS,GAAG,MAAM,OAAO,CAC7B,GAAG,EACH,QAAQ,EACR,OAAO,EACP,SAAS,EAAE,SAAS,CACrB,CAAC;QACF,EAAE,CAAC,SAAS,CAAC,MAAM,KAAK,QAAQ,CAAC,MAAM,EAAE,gCAAgC,CAAC,CAAC;QAE3E,MAAM,KAAK,GAAa,EAAE,CAAC;QAC3B,KAAK,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,IAAI,SAAS,EAAE;YAClD,IAAI,SAAS,EAAE;gBACb,GAAG,CAAC,0BAA0B,MAAM,EAAE,CAAC,CAAC;gBACxC,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;gBAC5C,IAAI,QAAQ,KAAK,QAAQ,EAAE;oBACzB,GAAG,CAAC,yBAAyB,MAAM,EAAE,CAAC,CAAC;iBACxC;qBAAM;oBACL,MAAM,IAAI,KAAK,CACb,yBAAyB,MAAM,cAAc,QAAQ,SAAS,QAAQ,EAAE,CACzE,CAAC;iBACH;aACF;YACD,+CAA+C;YAC/C,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,QAAQ,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC,CAAC,CAAC;SAChD;QAED,2EAA2E;QAC3E,yEAAyE;QACzE,uBAAuB;QACvB,MAAM,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,CAAC;QACtB,2EAA2E;QAC3E,MAAM,MAAM,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;QAC3B,wCAAwC;QACxC,OAAO,KAAK,CAAC;KACd;IAAC,OAAO,KAAK,EAAE;QACd,4EAA4E;QAC5E,4EAA4E;QAC5E,iBAAiB;QACjB,MAAM,QAAQ,GAAG,CACf,MAAM,OAAO,CAAC,UAAU,CAAC,CAAC,EAAE,CAAC,OAAO,EAAE,MAAM,CAAC,EAAE,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,CAAC,CAAC,CACjE;aACE,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,MAAM,KAAK,UAAU,CAAC;aACtC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAE,CAA2B,CAAC,MAAM,CAAC,CAAC;QAEnD,8DAA8D;QAC9D,IAAI,QAAQ,CAAC,MAAM,EAAE;YACnB,MAAM,IAAI,cAAc,CACtB,CAAC,KAAK,EAAE,GAAG,QAAQ,CAAC,EACpB,sCAAsC,CACvC,CAAC;SACH;QACD,MAAM,KAAK,CAAC;KACb;AACH,CAAC;AACD;;;;;;;;GAQG;AAEH,KAAK,UAAU,cAAc,CAC3B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,iBAA0B;IAE1B,MAAM,SAAS,GAIT,EAAE,CAAC;IACT,MAAM,QAAQ,CACZ,aAAa,CAAC,GAAG,CAAC,EAClB,UAAU,CACR;QACE,GAAG,EAAE,GAAG;QACR,SAAS,EAAE,CAAC,KAAK,EAAE,EAAE;YACnB,MAAM,YAAY,GAAG,KAAK,CAAC,QAAQ,CAAC;YACpC,IAAI,CAAC,YAAY,EAAE;gBACjB,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACpD;YAED,IAAI,iBAAiB,EAAE;gBACrB,MAAM,IAAI,GAAG,UAAU,CAAC,iBAAiB,CAAC,CAAC;gBAC3C,MAAM,WAAW,GAAG,IAAI,QAAQ,CAAC,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;gBAClD,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;gBACvB,WAAW,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE;oBACzB,SAAS,CAAC,IAAI,CAAC;wBACb,IAAI,EAAE,YAAY;wBAClB,MAAM,EAAE,KAAK,CAAC,IAAc;wBAC5B,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;qBAC7B,CAAC,CAAC;gBACL,CAAC,CAAC,CAAC;gBACH,OAAO,WAAW,CAAC;aACpB;YAED,oEAAoE;YACpE,oDAAoD;YACpD,SAAS,CAAC,IAAI,CAAC;gBACb,IAAI,EAAE,YAAY;gBAClB,MAAM,EAAE,KAAK,CAAC,IAAc;aAC7B,CAAC,CAAC;YACH,OAAO,SAAS,CAAC;QACnB,CAAC;KACF,EACD,QAAQ,CACT,CACF,CAAC;IACF,OAAO,SAAS,CAAC;AACnB,CAAC;AACD;;;;;;;;GAQG;AAEH,KAAK,UAAU,cAAc,CAC3B,GAAQ,EACR,QAAkB,EAClB,GAAW,EACX,iBAA0B;IAE1B,MAAM,KAAK,GAAG,IAAI,CAAC,GAAG,CAAC,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,UAAU,CAAC,CAAC;QACpE,SAAS,EAAE,IAAI;KAChB,CAAC,CAAC;IACH,MAAM,MAAM,GAAW;QACrB,KAAK,CAAC,IAAI;YACR,MAAM,QAAQ,GAAG,aAAa,CAAC,GAAG,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,CAAC,CAAC;YAC/D,MAAM,QAAQ,GAAG,MAAM,QAAQ,CAAC,QAAQ,EAAE,CAAC;YAC3C,MAAM,aAAa,GAAG,QAAQ,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAC;YACzD,OAAO,aAAa,CAAC,CAAC,CAAC,QAAQ,CAAC,aAAa,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;QACzD,CAAC;QACD,MAAM,CAAC,MAAc,EAAE,KAAa;YAClC,MAAM,OAAO,GAAG;gBACd,KAAK;gBACL,OAAO,EAAE;oBACP,KAAK,EAAE,SAAS,MAAM,IAAI,KAAK,CAAC,CAAC,CAAC,MAAM,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;iBACxD;aACF,CAAC;YACF,OAAO,aAAa,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;QACrC,CAAC;KACF,CAAC;IAEF,MAAM,EAAE,KAAK,EAAE,GAAG,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,EAAE,CAAC,CAAC;IAChD,MAAM,QAAQ,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,EAAE,CACzC,QAAQ,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAW,CAAC,CAC3D,CAAC;IACF,OAAO,MAAM,OAAO,CAAC,GAAG,CACtB,QAAQ,CAAC,GAAG,CAAC,KAAK,EAAE,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE;QACtC,MAAM,IAAI,GAAG,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;QAC7B,MAAM,KAAK,GAAG,MAAM,EAAE,CAAC;QACvB,MAAM,UAAU,GAAG,iBAAiB,CAAC,IAAI,CAAC,CAAC;QAC3C,MAAM,MAAM,GAAG,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAW,CAAC;QACvD,IAAI,iBAAiB,EAAE;YACrB,MAAM,IAAI,GAAG,UAAU,CAAC,iBAAiB,CAAC,CAAC;YAC3C,MAAM,UAAU,GAAG,KAAK,SAAS,CAAC,EAAE,WAAkB;gBACpD,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,WAAW,EAAE;oBACrC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;oBACnB,MAAM,KAAK,CAAC;iBACb;YACH,CAAC,CAAC;YACF,MAAM,QAAQ,CAAC,KAAK,EAAE,UAAU,EAAE,UAAU,CAAC,CAAC;YAC9C,OAAO;gBACL,IAAI,EAAE,IAAI;gBACV,MAAM;gBACN,QAAQ,EAAE,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC;aAC7B,CAAC;SACH;QACD,MAAM,QAAQ,CAAC,KAAK,EAAE,UAAU,CAAC,CAAC;QAClC,OAAO;YACL,IAAI,EAAE,IAAI;YACV,MAAM;SACP,CAAC;IACJ,CAAC,CAAC,CACH,CAAC;AACJ,CAAC","sourcesContent":["import { ok } from 'node:assert/strict';\nimport { createHash } from 'node:crypto';\nimport { createWriteStream } from 'node:fs';\nimport { rename, mkdir, rm } from 'node:fs/promises';\nimport { Agent as HttpAgent } from 'node:http';\nimport { Agent as HttpsAgent } from 'node:https';\nimport { join, basename, extname, relative } from 'node:path';\nimport { pipeline } from 'node:stream/promises';\nimport { Minipass } from 'minipass';\nimport { extract as extractTar } from 'tar';\nimport { Open, type Source, type Entry } from 'unzipper';\nimport { say } from './utils';\nimport { startDownload } from './download';\nimport { Extension, Binary } from './types';\n\n/**\n * Extracts the binaries from the given URL and writes them to the destination.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksums - The checksums to verify the binaries against\n * @returns The list of binaries extracted\n */\n\nexport async function extractFrom(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksums: { algorithm: string; binaries: Record<Binary, string> } | null,\n) {\n const extract = url.pathname.toLowerCase().endsWith(Extension.Tar)\n ? extractFromTar\n : extractFromZip;\n // write all files to a temporary directory first, then rename to the final\n // destination to avoid accidental partial extraction. We don't use\n // `os.tmpdir` for this because `rename` will fail if the directories are on\n // different file systems.\n const tempDir = `${dir}.downloading`;\n const rmOpts = { recursive: true, maxRetries: 3, force: true };\n try {\n // clean up any previous in-progress downloads\n await rm(tempDir, rmOpts);\n // make the temporary directory to extract the binaries to\n await mkdir(tempDir, { recursive: true });\n const downloads = await extract(\n url,\n binaries,\n tempDir,\n checksums?.algorithm,\n );\n ok(downloads.length === binaries.length, 'Failed to extract all binaries');\n\n const paths: string[] = [];\n for (const { path, binary, checksum } of downloads) {\n if (checksums) {\n say(`verifying checksum for ${binary}`);\n const expected = checksums.binaries[binary];\n if (checksum === expected) {\n say(`checksum verified for ${binary}`);\n } else {\n throw new Error(\n `checksum mismatch for ${binary}, expected ${expected}, got ${checksum}`,\n );\n }\n }\n // add the *final* path to the list of binaries\n paths.push(join(dir, relative(tempDir, path)));\n }\n\n // this directory shouldn't exist, but if two simultaneous `yarn foundryup`\n // processes are running, it might. Last process wins, so we remove other\n // `dir`s just in case.\n await rm(dir, rmOpts);\n // everything has been extracted; move the files to their final destination\n await rename(tempDir, dir);\n // return the list of extracted binaries\n return paths;\n } catch (error) {\n // if things fail for any reason try to clean up a bit. it is very important\n // to not leave `dir` behind, as its existence is a signal that the binaries\n // are installed.\n const rmErrors = (\n await Promise.allSettled([rm(tempDir, rmOpts), rm(dir, rmOpts)])\n )\n .filter((r) => r.status === 'rejected')\n .map((r) => (r as PromiseRejectedResult).reason);\n\n // if we failed to clean up, create an aggregate error message\n if (rmErrors.length) {\n throw new AggregateError(\n [error, ...rmErrors],\n 'This is a bug; you should report it.',\n );\n }\n throw error;\n }\n}\n/**\n * Extracts the binaries from a tar archive.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksumAlgorithm - The checksum algorithm to use\n * @returns The list of binaries extracted\n */\n\nasync function extractFromTar(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksumAlgorithm?: string,\n) {\n const downloads: {\n path: string;\n binary: Binary;\n checksum?: string;\n }[] = [];\n await pipeline(\n startDownload(url),\n extractTar(\n {\n cwd: dir,\n transform: (entry) => {\n const absolutePath = entry.absolute;\n if (!absolutePath) {\n throw new Error('Missing absolute path for entry');\n }\n\n if (checksumAlgorithm) {\n const hash = createHash(checksumAlgorithm);\n const passThrough = new Minipass({ async: true });\n passThrough.pipe(hash);\n passThrough.on('end', () => {\n downloads.push({\n path: absolutePath,\n binary: entry.path as Binary,\n checksum: hash.digest('hex'),\n });\n });\n return passThrough;\n }\n\n // When no checksum is needed, record the entry and return undefined\n // to use the original stream without transformation\n downloads.push({\n path: absolutePath,\n binary: entry.path as Binary,\n });\n return undefined;\n },\n },\n binaries,\n ),\n );\n return downloads;\n}\n/**\n * Extracts the binaries from a zip archive.\n *\n * @param url - The URL of the archive to extract the binaries from\n * @param binaries - The list of binaries to extract\n * @param dir - The destination directory\n * @param checksumAlgorithm - The checksum algorithm to use\n * @returns The list of binaries extracted\n */\n\nasync function extractFromZip(\n url: URL,\n binaries: Binary[],\n dir: string,\n checksumAlgorithm?: string,\n) {\n const agent = new (url.protocol === 'http:' ? HttpAgent : HttpsAgent)({\n keepAlive: true,\n });\n const source: Source = {\n async size() {\n const download = startDownload(url, { agent, method: 'HEAD' });\n const response = await download.response();\n const contentLength = response.headers['content-length'];\n return contentLength ? parseInt(contentLength, 10) : 0;\n },\n stream(offset: number, bytes: number) {\n const options = {\n agent,\n headers: {\n range: `bytes=${offset}-${bytes ? offset + bytes : ''}`,\n },\n };\n return startDownload(url, options);\n },\n };\n\n const { files } = await Open.custom(source, {});\n const filtered = files.filter(({ path }) =>\n binaries.includes(basename(path, extname(path)) as Binary),\n );\n return await Promise.all(\n filtered.map(async ({ path, stream }) => {\n const dest = join(dir, path);\n const entry = stream();\n const destStream = createWriteStream(dest);\n const binary = basename(path, extname(path)) as Binary;\n if (checksumAlgorithm) {\n const hash = createHash(checksumAlgorithm);\n const hashStream = async function* (entryStream: Entry) {\n for await (const chunk of entryStream) {\n hash.update(chunk);\n yield chunk;\n }\n };\n await pipeline(entry, hashStream, destStream);\n return {\n path: dest,\n binary,\n checksum: hash.digest('hex'),\n };\n }\n await pipeline(entry, destStream);\n return {\n path: dest,\n binary,\n };\n }),\n );\n}\n"]}
package/dist/index.cjs ADDED
@@ -0,0 +1,116 @@
1
+ #!/usr/bin/env -S node --require "./node_modules/tsx/dist/preflight.cjs" --import "./node_modules/tsx/dist/loader.mjs"
2
+ "use strict";
3
+ Object.defineProperty(exports, "__esModule", { value: true });
4
+ exports.downloadAndInstallFoundryBinaries = exports.installBinaries = exports.checkAndDownloadBinaries = exports.getBinaryArchiveUrl = exports.getCacheDirectory = void 0;
5
+ const node_path_1 = require("node:path");
6
+ const node_os_1 = require("node:os");
7
+ const node_fs_1 = require("node:fs");
8
+ const promises_1 = require("node:fs/promises");
9
+ const node_crypto_1 = require("node:crypto");
10
+ const node_process_1 = require("node:process");
11
+ const yaml_1 = require("yaml");
12
+ const utils_1 = require("./utils.cjs");
13
+ const extract_1 = require("./extract.cjs");
14
+ const options_1 = require("./options.cjs");
15
+ const types_1 = require("./types.cjs");
16
+ function getCacheDirectory() {
17
+ let enableGlobalCache = false;
18
+ try {
19
+ const configFileContent = (0, node_fs_1.readFileSync)('.yarnrc.yml', 'utf8');
20
+ const parsedConfig = (0, yaml_1.parse)(configFileContent);
21
+ enableGlobalCache = parsedConfig?.enableGlobalCache ?? false;
22
+ }
23
+ catch (error) {
24
+ // If file doesn't exist or can't be read, default to local cache
25
+ if (error.code === 'ENOENT') {
26
+ return (0, node_path_1.join)((0, node_process_1.cwd)(), '.metamask', 'cache');
27
+ }
28
+ // For other errors, log but continue with default
29
+ console.warn('Warning: Error reading .yarnrc.yml, using local cache:', error);
30
+ }
31
+ return enableGlobalCache
32
+ ? (0, node_path_1.join)((0, node_os_1.homedir)(), '.cache', 'metamask')
33
+ : (0, node_path_1.join)((0, node_process_1.cwd)(), '.metamask', 'cache');
34
+ }
35
+ exports.getCacheDirectory = getCacheDirectory;
36
+ function getBinaryArchiveUrl(repo, tag, version, platform, arch) {
37
+ const ext = platform === types_1.Platform.Windows ? types_1.Extension.Zip : types_1.Extension.Tar;
38
+ return `https://github.com/${repo}/releases/download/${tag}/foundry_${version}_${platform}_${arch}.${ext}`;
39
+ }
40
+ exports.getBinaryArchiveUrl = getBinaryArchiveUrl;
41
+ async function checkAndDownloadBinaries(url, binaries, cachePath, platform, arch, checksums) {
42
+ let downloadedBinaries;
43
+ try {
44
+ (0, utils_1.say)(`checking cache`);
45
+ downloadedBinaries = await (0, promises_1.opendir)(cachePath);
46
+ (0, utils_1.say)(`found binaries in cache`);
47
+ }
48
+ catch (e) {
49
+ (0, utils_1.say)(`binaries not in cache`);
50
+ if (e.code === 'ENOENT') {
51
+ (0, utils_1.say)(`installing from ${url.toString()}`);
52
+ // directory doesn't exist, download and extract
53
+ const platformChecksums = (0, utils_1.transformChecksums)(checksums, platform, arch);
54
+ await (0, extract_1.extractFrom)(url, binaries, cachePath, platformChecksums);
55
+ downloadedBinaries = await (0, promises_1.opendir)(cachePath);
56
+ }
57
+ else {
58
+ throw e;
59
+ }
60
+ }
61
+ return downloadedBinaries;
62
+ }
63
+ exports.checkAndDownloadBinaries = checkAndDownloadBinaries;
64
+ async function installBinaries(downloadedBinaries, BIN_DIR, cachePath) {
65
+ for await (const file of downloadedBinaries) {
66
+ if (!file.isFile()) {
67
+ continue;
68
+ }
69
+ const target = (0, node_path_1.join)(file.parentPath, file.name);
70
+ const path = (0, node_path_1.join)(BIN_DIR, (0, node_path_1.relative)(cachePath, target));
71
+ // create the BIN_DIR paths if they don't exists already
72
+ await (0, promises_1.mkdir)(BIN_DIR, { recursive: true });
73
+ // clean up any existing files or symlinks
74
+ await (0, promises_1.unlink)(path).catch(utils_1.noop);
75
+ try {
76
+ // create new symlink
77
+ await (0, promises_1.symlink)(target, path);
78
+ }
79
+ catch (e) {
80
+ if (!((0, utils_1.isCodedError)(e) && ['EPERM', 'EXDEV'].includes(e.code))) {
81
+ throw e;
82
+ }
83
+ // symlinking can fail if it's a cross-device/filesystem link, or for
84
+ // permissions reasons, so we'll just copy the file instead
85
+ await (0, promises_1.copyFile)(target, path);
86
+ }
87
+ // check that it works by logging the version
88
+ (0, utils_1.say)(`installed - ${(0, utils_1.getVersion)(path)}`);
89
+ }
90
+ }
91
+ exports.installBinaries = installBinaries;
92
+ async function downloadAndInstallFoundryBinaries() {
93
+ const parsedArgs = (0, options_1.parseArgs)();
94
+ const CACHE_DIR = getCacheDirectory();
95
+ if (parsedArgs.command === 'cache clean') {
96
+ await (0, promises_1.rm)(CACHE_DIR, { recursive: true, force: true });
97
+ (0, utils_1.say)('done!');
98
+ (0, node_process_1.exit)(0);
99
+ }
100
+ const { repo, version: { version, tag }, arch, platform, binaries, checksums, } = parsedArgs.options;
101
+ (0, options_1.printBanner)();
102
+ const bins = binaries.join(', ');
103
+ (0, utils_1.say)(`fetching ${bins} ${version} for ${platform} ${arch}`);
104
+ const BIN_ARCHIVE_URL = getBinaryArchiveUrl(repo, tag, version, platform, arch);
105
+ const BIN_DIR = (0, node_path_1.join)((0, node_process_1.cwd)(), 'node_modules', '.bin');
106
+ const url = new URL(BIN_ARCHIVE_URL);
107
+ const cacheKey = (0, node_crypto_1.createHash)('sha256')
108
+ .update(`${BIN_ARCHIVE_URL}-${bins}`)
109
+ .digest('hex');
110
+ const cachePath = (0, node_path_1.join)(CACHE_DIR, cacheKey);
111
+ const downloadedBinaries = await checkAndDownloadBinaries(url, binaries, cachePath, platform, arch, checksums);
112
+ await installBinaries(downloadedBinaries, BIN_DIR, cachePath);
113
+ (0, utils_1.say)('done!');
114
+ }
115
+ exports.downloadAndInstallFoundryBinaries = downloadAndInstallFoundryBinaries;
116
+ //# sourceMappingURL=index.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.cjs","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;AAEA,yCAA2C;AAC3C,qCAAkC;AAClC,qCAA4C;AAC5C,+CAAiF;AACjF,6CAAyC;AACzC,+CAAyC;AACzC,+BAA0C;AAC1C,uCAMiB;AACjB,2CAAwC;AACxC,2CAAmD;AAEnD,uCAAoE;AAEpE,SAAgB,iBAAiB;IAC/B,IAAI,iBAAiB,GAAG,KAAK,CAAC;IAC9B,IAAI;QACF,MAAM,iBAAiB,GAAG,IAAA,sBAAY,EAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAC9D,MAAM,YAAY,GAAG,IAAA,YAAS,EAAC,iBAAiB,CAAC,CAAC;QAClD,iBAAiB,GAAG,YAAY,EAAE,iBAAiB,IAAI,KAAK,CAAC;KAC9D;IAAC,OAAO,KAAK,EAAE;QACd,iEAAiE;QACjE,IAAK,KAA+B,CAAC,IAAI,KAAK,QAAQ,EAAE;YACtD,OAAO,IAAA,gBAAI,EAAC,IAAA,kBAAG,GAAE,EAAE,WAAW,EAAE,OAAO,CAAC,CAAC;SAC1C;QACD,kDAAkD;QAClD,OAAO,CAAC,IAAI,CAAC,wDAAwD,EAAE,KAAK,CAAC,CAAC;KAC/E;IACD,OAAO,iBAAiB;QACtB,CAAC,CAAC,IAAA,gBAAI,EAAC,IAAA,iBAAO,GAAE,EAAE,QAAQ,EAAE,UAAU,CAAC;QACvC,CAAC,CAAC,IAAA,gBAAI,EAAC,IAAA,kBAAG,GAAE,EAAE,WAAW,EAAE,OAAO,CAAC,CAAC;AACxC,CAAC;AAjBD,8CAiBC;AAED,SAAgB,mBAAmB,CACjC,IAAY,EACZ,GAAW,EACX,OAAe,EACf,QAAkB,EAClB,IAAY;IAEZ,MAAM,GAAG,GAAG,QAAQ,KAAK,gBAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,iBAAS,CAAC,GAAG,CAAC,CAAC,CAAC,iBAAS,CAAC,GAAG,CAAC;IAC1E,OAAO,sBAAsB,IAAI,sBAAsB,GAAG,YAAY,OAAO,IAAI,QAAQ,IAAI,IAAI,IAAI,GAAG,EAAE,CAAC;AAC7G,CAAC;AATD,kDASC;AAEM,KAAK,UAAU,wBAAwB,CAC5C,GAAQ,EACR,QAAkB,EAClB,SAAiB,EACjB,QAAkB,EAClB,IAAkB,EAClB,SAAqB;IAErB,IAAI,kBAAuB,CAAC;IAC5B,IAAI;QACF,IAAA,WAAG,EAAC,gBAAgB,CAAC,CAAC;QACtB,kBAAkB,GAAG,MAAM,IAAA,kBAAO,EAAC,SAAS,CAAC,CAAC;QAC9C,IAAA,WAAG,EAAC,yBAAyB,CAAC,CAAC;KAChC;IAAC,OAAO,CAAU,EAAE;QACnB,IAAA,WAAG,EAAC,uBAAuB,CAAC,CAAC;QAC7B,IAAK,CAA2B,CAAC,IAAI,KAAK,QAAQ,EAAE;YAClD,IAAA,WAAG,EAAC,mBAAmB,GAAG,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC;YACzC,gDAAgD;YAChD,MAAM,iBAAiB,GAAG,IAAA,0BAAkB,EAAC,SAAS,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;YACxE,MAAM,IAAA,qBAAW,EAAC,GAAG,EAAE,QAAQ,EAAE,SAAS,EAAE,iBAAiB,CAAC,CAAC;YAC/D,kBAAkB,GAAG,MAAM,IAAA,kBAAO,EAAC,SAAS,CAAC,CAAC;SAC/C;aAAM;YACL,MAAM,CAAC,CAAC;SACT;KACF;IACD,OAAO,kBAAkB,CAAC;AAC5B,CAAC;AA1BD,4DA0BC;AAEM,KAAK,UAAU,eAAe,CACnC,kBAAuB,EACvB,OAAe,EACf,SAAiB;IAEjB,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,kBAAkB,EAAE;QAC3C,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;YAClB,SAAS;SACV;QACD,MAAM,MAAM,GAAG,IAAA,gBAAI,EAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;QAChD,MAAM,IAAI,GAAG,IAAA,gBAAI,EAAC,OAAO,EAAE,IAAA,oBAAQ,EAAC,SAAS,EAAE,MAAM,CAAC,CAAC,CAAC;QAExD,wDAAwD;QACxD,MAAM,IAAA,gBAAK,EAAC,OAAO,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAE1C,0CAA0C;QAC1C,MAAM,IAAA,iBAAM,EAAC,IAAI,CAAC,CAAC,KAAK,CAAC,YAAI,CAAC,CAAC;QAC/B,IAAI;YACF,qBAAqB;YACrB,MAAM,IAAA,kBAAO,EAAC,MAAM,EAAE,IAAI,CAAC,CAAC;SAC7B;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,CAAC,IAAA,oBAAY,EAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE;gBAC7D,MAAM,CAAC,CAAC;aACT;YACD,qEAAqE;YACrE,2DAA2D;YAC3D,MAAM,IAAA,mBAAQ,EAAC,MAAM,EAAE,IAAI,CAAC,CAAC;SAC9B;QACD,6CAA6C;QAC7C,IAAA,WAAG,EAAC,eAAe,IAAA,kBAAU,EAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KACxC;AACH,CAAC;AA/BD,0CA+BC;AAEM,KAAK,UAAU,iCAAiC;IACrD,MAAM,UAAU,GAAG,IAAA,mBAAS,GAAE,CAAC;IAE/B,MAAM,SAAS,GAAG,iBAAiB,EAAE,CAAC;IAEtC,IAAI,UAAU,CAAC,OAAO,KAAK,aAAa,EAAE;QACxC,MAAM,IAAA,aAAE,EAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;QACtD,IAAA,WAAG,EAAC,OAAO,CAAC,CAAC;QACb,IAAA,mBAAI,EAAC,CAAC,CAAC,CAAC;KACT;IAED,MAAM,EACJ,IAAI,EACJ,OAAO,EAAE,EAAE,OAAO,EAAE,GAAG,EAAE,EACzB,IAAI,EACJ,QAAQ,EACR,QAAQ,EACR,SAAS,GACV,GAAG,UAAU,CAAC,OAAO,CAAC;IAEvB,IAAA,qBAAW,GAAE,CAAC;IACd,MAAM,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACjC,IAAA,WAAG,EAAC,YAAY,IAAI,IAAI,OAAO,QAAQ,QAAQ,IAAI,IAAI,EAAE,CAAC,CAAC;IAE3D,MAAM,eAAe,GAAG,mBAAmB,CACzC,IAAI,EACJ,GAAG,EACH,OAAO,EACP,QAAQ,EACR,IAAI,CACL,CAAC;IACF,MAAM,OAAO,GAAG,IAAA,gBAAI,EAAC,IAAA,kBAAG,GAAE,EAAE,cAAc,EAAE,MAAM,CAAC,CAAC;IAEpD,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,eAAe,CAAC,CAAC;IACrC,MAAM,QAAQ,GAAG,IAAA,wBAAU,EAAC,QAAQ,CAAC;SAClC,MAAM,CAAC,GAAG,eAAe,IAAI,IAAI,EAAE,CAAC;SACpC,MAAM,CAAC,KAAK,CAAC,CAAC;IACjB,MAAM,SAAS,GAAG,IAAA,gBAAI,EAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;IAE5C,MAAM,kBAAkB,GAAG,MAAM,wBAAwB,CACvD,GAAG,EACH,QAAQ,EACR,SAAS,EACT,QAAQ,EACR,IAAI,EACJ,SAAS,CACV,CAAC;IAEF,MAAM,eAAe,CAAC,kBAAkB,EAAE,OAAO,EAAE,SAAS,CAAC,CAAC;IAE9D,IAAA,WAAG,EAAC,OAAO,CAAC,CAAC;AACf,CAAC;AAnDD,8EAmDC","sourcesContent":["#!/usr/bin/env -S node --require \"./node_modules/tsx/dist/preflight.cjs\" --import \"./node_modules/tsx/dist/loader.mjs\"\n\nimport { join, relative } from 'node:path';\nimport { homedir } from 'node:os';\nimport { Dir, readFileSync } from 'node:fs';\nimport { copyFile, mkdir, opendir, rm, symlink, unlink } from 'node:fs/promises';\nimport { createHash } from 'node:crypto';\nimport { cwd, exit } from 'node:process';\nimport { parse as parseYaml } from 'yaml';\nimport {\n getVersion,\n isCodedError,\n noop,\n say,\n transformChecksums,\n} from './utils';\nimport { extractFrom } from './extract';\nimport { parseArgs, printBanner } from './options';\nimport type { Checksums } from './types';\nimport { Architecture, Binary, Extension, Platform } from './types';\n\nexport function getCacheDirectory(): string {\n let enableGlobalCache = false;\n try {\n const configFileContent = readFileSync('.yarnrc.yml', 'utf8');\n const parsedConfig = parseYaml(configFileContent);\n enableGlobalCache = parsedConfig?.enableGlobalCache ?? false;\n } catch (error) {\n // If file doesn't exist or can't be read, default to local cache\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n return join(cwd(), '.metamask', 'cache');\n }\n // For other errors, log but continue with default\n console.warn('Warning: Error reading .yarnrc.yml, using local cache:', error);\n }\n return enableGlobalCache\n ? join(homedir(), '.cache', 'metamask')\n : join(cwd(), '.metamask', 'cache');\n}\n\nexport function getBinaryArchiveUrl(\n repo: string,\n tag: string,\n version: string,\n platform: Platform,\n arch: string,\n): string {\n const ext = platform === Platform.Windows ? Extension.Zip : Extension.Tar;\n return `https://github.com/${repo}/releases/download/${tag}/foundry_${version}_${platform}_${arch}.${ext}`;\n}\n\nexport async function checkAndDownloadBinaries(\n url: URL,\n binaries: Binary[],\n cachePath: string,\n platform: Platform,\n arch: Architecture,\n checksums?: Checksums,\n): Promise<Dir> {\n let downloadedBinaries: Dir;\n try {\n say(`checking cache`);\n downloadedBinaries = await opendir(cachePath);\n say(`found binaries in cache`);\n } catch (e: unknown) {\n say(`binaries not in cache`);\n if ((e as NodeJS.ErrnoException).code === 'ENOENT') {\n say(`installing from ${url.toString()}`);\n // directory doesn't exist, download and extract\n const platformChecksums = transformChecksums(checksums, platform, arch);\n await extractFrom(url, binaries, cachePath, platformChecksums);\n downloadedBinaries = await opendir(cachePath);\n } else {\n throw e;\n }\n }\n return downloadedBinaries;\n}\n\nexport async function installBinaries(\n downloadedBinaries: Dir,\n BIN_DIR: string,\n cachePath: string,\n): Promise<void> {\n for await (const file of downloadedBinaries) {\n if (!file.isFile()) {\n continue;\n }\n const target = join(file.parentPath, file.name);\n const path = join(BIN_DIR, relative(cachePath, target));\n\n // create the BIN_DIR paths if they don't exists already\n await mkdir(BIN_DIR, { recursive: true });\n\n // clean up any existing files or symlinks\n await unlink(path).catch(noop);\n try {\n // create new symlink\n await symlink(target, path);\n } catch (e) {\n if (!(isCodedError(e) && ['EPERM', 'EXDEV'].includes(e.code))) {\n throw e;\n }\n // symlinking can fail if it's a cross-device/filesystem link, or for\n // permissions reasons, so we'll just copy the file instead\n await copyFile(target, path);\n }\n // check that it works by logging the version\n say(`installed - ${getVersion(path)}`);\n }\n}\n\nexport async function downloadAndInstallFoundryBinaries(): Promise<void> {\n const parsedArgs = parseArgs();\n\n const CACHE_DIR = getCacheDirectory();\n\n if (parsedArgs.command === 'cache clean') {\n await rm(CACHE_DIR, { recursive: true, force: true });\n say('done!');\n exit(0);\n }\n\n const {\n repo,\n version: { version, tag },\n arch,\n platform,\n binaries,\n checksums,\n } = parsedArgs.options;\n\n printBanner();\n const bins = binaries.join(', ');\n say(`fetching ${bins} ${version} for ${platform} ${arch}`);\n\n const BIN_ARCHIVE_URL = getBinaryArchiveUrl(\n repo,\n tag,\n version,\n platform,\n arch,\n );\n const BIN_DIR = join(cwd(), 'node_modules', '.bin');\n\n const url = new URL(BIN_ARCHIVE_URL);\n const cacheKey = createHash('sha256')\n .update(`${BIN_ARCHIVE_URL}-${bins}`)\n .digest('hex');\n const cachePath = join(CACHE_DIR, cacheKey);\n\n const downloadedBinaries = await checkAndDownloadBinaries(\n url,\n binaries,\n cachePath,\n platform,\n arch,\n checksums,\n );\n\n await installBinaries(downloadedBinaries, BIN_DIR, cachePath);\n\n say('done!');\n}\n"]}
@@ -0,0 +1,11 @@
1
+ #!/usr/bin/env -S node --require "./node_modules/tsx/dist/preflight.cjs" --import "./node_modules/tsx/dist/loader.mjs"
2
+ /// <reference types="node" />
3
+ import { Dir } from "node:fs";
4
+ import type { Checksums } from "./types.cjs";
5
+ import { Architecture, Binary, Platform } from "./types.cjs";
6
+ export declare function getCacheDirectory(): string;
7
+ export declare function getBinaryArchiveUrl(repo: string, tag: string, version: string, platform: Platform, arch: string): string;
8
+ export declare function checkAndDownloadBinaries(url: URL, binaries: Binary[], cachePath: string, platform: Platform, arch: Architecture, checksums?: Checksums): Promise<Dir>;
9
+ export declare function installBinaries(downloadedBinaries: Dir, BIN_DIR: string, cachePath: string): Promise<void>;
10
+ export declare function downloadAndInstallFoundryBinaries(): Promise<void>;
11
+ //# sourceMappingURL=index.d.cts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.cts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAIA,OAAO,EAAE,GAAG,EAAgB,gBAAgB;AAc5C,OAAO,KAAK,EAAE,SAAS,EAAE,oBAAgB;AACzC,OAAO,EAAE,YAAY,EAAE,MAAM,EAAa,QAAQ,EAAE,oBAAgB;AAEpE,wBAAgB,iBAAiB,IAAI,MAAM,CAiB1C;AAED,wBAAgB,mBAAmB,CACjC,IAAI,EAAE,MAAM,EACZ,GAAG,EAAE,MAAM,EACX,OAAO,EAAE,MAAM,EACf,QAAQ,EAAE,QAAQ,EAClB,IAAI,EAAE,MAAM,GACX,MAAM,CAGR;AAED,wBAAsB,wBAAwB,CAC5C,GAAG,EAAE,GAAG,EACR,QAAQ,EAAE,MAAM,EAAE,EAClB,SAAS,EAAE,MAAM,EACjB,QAAQ,EAAE,QAAQ,EAClB,IAAI,EAAE,YAAY,EAClB,SAAS,CAAC,EAAE,SAAS,GACpB,OAAO,CAAC,GAAG,CAAC,CAmBd;AAED,wBAAsB,eAAe,CACnC,kBAAkB,EAAE,GAAG,EACvB,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC,IAAI,CAAC,CA2Bf;AAED,wBAAsB,iCAAiC,IAAI,OAAO,CAAC,IAAI,CAAC,CAmDvE"}
@@ -0,0 +1,11 @@
1
+ #!/usr/bin/env -S node --require "./node_modules/tsx/dist/preflight.cjs" --import "./node_modules/tsx/dist/loader.mjs"
2
+ /// <reference types="node" />
3
+ import { Dir } from "node:fs";
4
+ import type { Checksums } from "./types.mjs";
5
+ import { Architecture, Binary, Platform } from "./types.mjs";
6
+ export declare function getCacheDirectory(): string;
7
+ export declare function getBinaryArchiveUrl(repo: string, tag: string, version: string, platform: Platform, arch: string): string;
8
+ export declare function checkAndDownloadBinaries(url: URL, binaries: Binary[], cachePath: string, platform: Platform, arch: Architecture, checksums?: Checksums): Promise<Dir>;
9
+ export declare function installBinaries(downloadedBinaries: Dir, BIN_DIR: string, cachePath: string): Promise<void>;
10
+ export declare function downloadAndInstallFoundryBinaries(): Promise<void>;
11
+ //# sourceMappingURL=index.d.mts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.mts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAIA,OAAO,EAAE,GAAG,EAAgB,gBAAgB;AAc5C,OAAO,KAAK,EAAE,SAAS,EAAE,oBAAgB;AACzC,OAAO,EAAE,YAAY,EAAE,MAAM,EAAa,QAAQ,EAAE,oBAAgB;AAEpE,wBAAgB,iBAAiB,IAAI,MAAM,CAiB1C;AAED,wBAAgB,mBAAmB,CACjC,IAAI,EAAE,MAAM,EACZ,GAAG,EAAE,MAAM,EACX,OAAO,EAAE,MAAM,EACf,QAAQ,EAAE,QAAQ,EAClB,IAAI,EAAE,MAAM,GACX,MAAM,CAGR;AAED,wBAAsB,wBAAwB,CAC5C,GAAG,EAAE,GAAG,EACR,QAAQ,EAAE,MAAM,EAAE,EAClB,SAAS,EAAE,MAAM,EACjB,QAAQ,EAAE,QAAQ,EAClB,IAAI,EAAE,YAAY,EAClB,SAAS,CAAC,EAAE,SAAS,GACpB,OAAO,CAAC,GAAG,CAAC,CAmBd;AAED,wBAAsB,eAAe,CACnC,kBAAkB,EAAE,GAAG,EACvB,OAAO,EAAE,MAAM,EACf,SAAS,EAAE,MAAM,GAChB,OAAO,CAAC,IAAI,CAAC,CA2Bf;AAED,wBAAsB,iCAAiC,IAAI,OAAO,CAAC,IAAI,CAAC,CAmDvE"}
package/dist/index.mjs ADDED
@@ -0,0 +1,109 @@
1
+ #!/usr/bin/env -S node --require "./node_modules/tsx/dist/preflight.cjs" --import "./node_modules/tsx/dist/loader.mjs"
2
+ import { join, relative } from "node:path";
3
+ import { homedir } from "node:os";
4
+ import { Dir, readFileSync } from "node:fs";
5
+ import { copyFile, mkdir, opendir, rm, symlink, unlink } from "node:fs/promises";
6
+ import { createHash } from "node:crypto";
7
+ import { cwd, exit } from "node:process";
8
+ import $yaml from "yaml";
9
+ const { parse: parseYaml } = $yaml;
10
+ import { getVersion, isCodedError, noop, say, transformChecksums } from "./utils.mjs";
11
+ import { extractFrom } from "./extract.mjs";
12
+ import { parseArgs, printBanner } from "./options.mjs";
13
+ import { Architecture, Binary, Extension, Platform } from "./types.mjs";
14
+ export function getCacheDirectory() {
15
+ let enableGlobalCache = false;
16
+ try {
17
+ const configFileContent = readFileSync('.yarnrc.yml', 'utf8');
18
+ const parsedConfig = parseYaml(configFileContent);
19
+ enableGlobalCache = parsedConfig?.enableGlobalCache ?? false;
20
+ }
21
+ catch (error) {
22
+ // If file doesn't exist or can't be read, default to local cache
23
+ if (error.code === 'ENOENT') {
24
+ return join(cwd(), '.metamask', 'cache');
25
+ }
26
+ // For other errors, log but continue with default
27
+ console.warn('Warning: Error reading .yarnrc.yml, using local cache:', error);
28
+ }
29
+ return enableGlobalCache
30
+ ? join(homedir(), '.cache', 'metamask')
31
+ : join(cwd(), '.metamask', 'cache');
32
+ }
33
+ export function getBinaryArchiveUrl(repo, tag, version, platform, arch) {
34
+ const ext = platform === Platform.Windows ? Extension.Zip : Extension.Tar;
35
+ return `https://github.com/${repo}/releases/download/${tag}/foundry_${version}_${platform}_${arch}.${ext}`;
36
+ }
37
+ export async function checkAndDownloadBinaries(url, binaries, cachePath, platform, arch, checksums) {
38
+ let downloadedBinaries;
39
+ try {
40
+ say(`checking cache`);
41
+ downloadedBinaries = await opendir(cachePath);
42
+ say(`found binaries in cache`);
43
+ }
44
+ catch (e) {
45
+ say(`binaries not in cache`);
46
+ if (e.code === 'ENOENT') {
47
+ say(`installing from ${url.toString()}`);
48
+ // directory doesn't exist, download and extract
49
+ const platformChecksums = transformChecksums(checksums, platform, arch);
50
+ await extractFrom(url, binaries, cachePath, platformChecksums);
51
+ downloadedBinaries = await opendir(cachePath);
52
+ }
53
+ else {
54
+ throw e;
55
+ }
56
+ }
57
+ return downloadedBinaries;
58
+ }
59
+ export async function installBinaries(downloadedBinaries, BIN_DIR, cachePath) {
60
+ for await (const file of downloadedBinaries) {
61
+ if (!file.isFile()) {
62
+ continue;
63
+ }
64
+ const target = join(file.parentPath, file.name);
65
+ const path = join(BIN_DIR, relative(cachePath, target));
66
+ // create the BIN_DIR paths if they don't exists already
67
+ await mkdir(BIN_DIR, { recursive: true });
68
+ // clean up any existing files or symlinks
69
+ await unlink(path).catch(noop);
70
+ try {
71
+ // create new symlink
72
+ await symlink(target, path);
73
+ }
74
+ catch (e) {
75
+ if (!(isCodedError(e) && ['EPERM', 'EXDEV'].includes(e.code))) {
76
+ throw e;
77
+ }
78
+ // symlinking can fail if it's a cross-device/filesystem link, or for
79
+ // permissions reasons, so we'll just copy the file instead
80
+ await copyFile(target, path);
81
+ }
82
+ // check that it works by logging the version
83
+ say(`installed - ${getVersion(path)}`);
84
+ }
85
+ }
86
+ export async function downloadAndInstallFoundryBinaries() {
87
+ const parsedArgs = parseArgs();
88
+ const CACHE_DIR = getCacheDirectory();
89
+ if (parsedArgs.command === 'cache clean') {
90
+ await rm(CACHE_DIR, { recursive: true, force: true });
91
+ say('done!');
92
+ exit(0);
93
+ }
94
+ const { repo, version: { version, tag }, arch, platform, binaries, checksums, } = parsedArgs.options;
95
+ printBanner();
96
+ const bins = binaries.join(', ');
97
+ say(`fetching ${bins} ${version} for ${platform} ${arch}`);
98
+ const BIN_ARCHIVE_URL = getBinaryArchiveUrl(repo, tag, version, platform, arch);
99
+ const BIN_DIR = join(cwd(), 'node_modules', '.bin');
100
+ const url = new URL(BIN_ARCHIVE_URL);
101
+ const cacheKey = createHash('sha256')
102
+ .update(`${BIN_ARCHIVE_URL}-${bins}`)
103
+ .digest('hex');
104
+ const cachePath = join(CACHE_DIR, cacheKey);
105
+ const downloadedBinaries = await checkAndDownloadBinaries(url, binaries, cachePath, platform, arch, checksums);
106
+ await installBinaries(downloadedBinaries, BIN_DIR, cachePath);
107
+ say('done!');
108
+ }
109
+ //# sourceMappingURL=index.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.mjs","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAEA,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,kBAAkB;AAC3C,OAAO,EAAE,OAAO,EAAE,gBAAgB;AAClC,OAAO,EAAE,GAAG,EAAE,YAAY,EAAE,gBAAgB;AAC5C,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,EAAE,EAAE,OAAO,EAAE,MAAM,EAAE,yBAAyB;AACjF,OAAO,EAAE,UAAU,EAAE,oBAAoB;AACzC,OAAO,EAAE,GAAG,EAAE,IAAI,EAAE,qBAAqB;;;AAEzC,OAAO,EACL,UAAU,EACV,YAAY,EACZ,IAAI,EACJ,GAAG,EACH,kBAAkB,EACnB,oBAAgB;AACjB,OAAO,EAAE,WAAW,EAAE,sBAAkB;AACxC,OAAO,EAAE,SAAS,EAAE,WAAW,EAAE,sBAAkB;AAEnD,OAAO,EAAE,YAAY,EAAE,MAAM,EAAE,SAAS,EAAE,QAAQ,EAAE,oBAAgB;AAEpE,MAAM,UAAU,iBAAiB;IAC/B,IAAI,iBAAiB,GAAG,KAAK,CAAC;IAC9B,IAAI;QACF,MAAM,iBAAiB,GAAG,YAAY,CAAC,aAAa,EAAE,MAAM,CAAC,CAAC;QAC9D,MAAM,YAAY,GAAG,SAAS,CAAC,iBAAiB,CAAC,CAAC;QAClD,iBAAiB,GAAG,YAAY,EAAE,iBAAiB,IAAI,KAAK,CAAC;KAC9D;IAAC,OAAO,KAAK,EAAE;QACd,iEAAiE;QACjE,IAAK,KAA+B,CAAC,IAAI,KAAK,QAAQ,EAAE;YACtD,OAAO,IAAI,CAAC,GAAG,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,CAAC;SAC1C;QACD,kDAAkD;QAClD,OAAO,CAAC,IAAI,CAAC,wDAAwD,EAAE,KAAK,CAAC,CAAC;KAC/E;IACD,OAAO,iBAAiB;QACtB,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,EAAE,QAAQ,EAAE,UAAU,CAAC;QACvC,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,EAAE,WAAW,EAAE,OAAO,CAAC,CAAC;AACxC,CAAC;AAED,MAAM,UAAU,mBAAmB,CACjC,IAAY,EACZ,GAAW,EACX,OAAe,EACf,QAAkB,EAClB,IAAY;IAEZ,MAAM,GAAG,GAAG,QAAQ,KAAK,QAAQ,CAAC,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC,GAAG,CAAC;IAC1E,OAAO,sBAAsB,IAAI,sBAAsB,GAAG,YAAY,OAAO,IAAI,QAAQ,IAAI,IAAI,IAAI,GAAG,EAAE,CAAC;AAC7G,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,wBAAwB,CAC5C,GAAQ,EACR,QAAkB,EAClB,SAAiB,EACjB,QAAkB,EAClB,IAAkB,EAClB,SAAqB;IAErB,IAAI,kBAAuB,CAAC;IAC5B,IAAI;QACF,GAAG,CAAC,gBAAgB,CAAC,CAAC;QACtB,kBAAkB,GAAG,MAAM,OAAO,CAAC,SAAS,CAAC,CAAC;QAC9C,GAAG,CAAC,yBAAyB,CAAC,CAAC;KAChC;IAAC,OAAO,CAAU,EAAE;QACnB,GAAG,CAAC,uBAAuB,CAAC,CAAC;QAC7B,IAAK,CAA2B,CAAC,IAAI,KAAK,QAAQ,EAAE;YAClD,GAAG,CAAC,mBAAmB,GAAG,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC;YACzC,gDAAgD;YAChD,MAAM,iBAAiB,GAAG,kBAAkB,CAAC,SAAS,EAAE,QAAQ,EAAE,IAAI,CAAC,CAAC;YACxE,MAAM,WAAW,CAAC,GAAG,EAAE,QAAQ,EAAE,SAAS,EAAE,iBAAiB,CAAC,CAAC;YAC/D,kBAAkB,GAAG,MAAM,OAAO,CAAC,SAAS,CAAC,CAAC;SAC/C;aAAM;YACL,MAAM,CAAC,CAAC;SACT;KACF;IACD,OAAO,kBAAkB,CAAC;AAC5B,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,eAAe,CACnC,kBAAuB,EACvB,OAAe,EACf,SAAiB;IAEjB,IAAI,KAAK,EAAE,MAAM,IAAI,IAAI,kBAAkB,EAAE;QAC3C,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,EAAE;YAClB,SAAS;SACV;QACD,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC;QAChD,MAAM,IAAI,GAAG,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC,CAAC;QAExD,wDAAwD;QACxD,MAAM,KAAK,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;QAE1C,0CAA0C;QAC1C,MAAM,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;QAC/B,IAAI;YACF,qBAAqB;YACrB,MAAM,OAAO,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;SAC7B;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,CAAC,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,EAAE;gBAC7D,MAAM,CAAC,CAAC;aACT;YACD,qEAAqE;YACrE,2DAA2D;YAC3D,MAAM,QAAQ,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC;SAC9B;QACD,6CAA6C;QAC7C,GAAG,CAAC,eAAe,UAAU,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;KACxC;AACH,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,iCAAiC;IACrD,MAAM,UAAU,GAAG,SAAS,EAAE,CAAC;IAE/B,MAAM,SAAS,GAAG,iBAAiB,EAAE,CAAC;IAEtC,IAAI,UAAU,CAAC,OAAO,KAAK,aAAa,EAAE;QACxC,MAAM,EAAE,CAAC,SAAS,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC;QACtD,GAAG,CAAC,OAAO,CAAC,CAAC;QACb,IAAI,CAAC,CAAC,CAAC,CAAC;KACT;IAED,MAAM,EACJ,IAAI,EACJ,OAAO,EAAE,EAAE,OAAO,EAAE,GAAG,EAAE,EACzB,IAAI,EACJ,QAAQ,EACR,QAAQ,EACR,SAAS,GACV,GAAG,UAAU,CAAC,OAAO,CAAC;IAEvB,WAAW,EAAE,CAAC;IACd,MAAM,IAAI,GAAG,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACjC,GAAG,CAAC,YAAY,IAAI,IAAI,OAAO,QAAQ,QAAQ,IAAI,IAAI,EAAE,CAAC,CAAC;IAE3D,MAAM,eAAe,GAAG,mBAAmB,CACzC,IAAI,EACJ,GAAG,EACH,OAAO,EACP,QAAQ,EACR,IAAI,CACL,CAAC;IACF,MAAM,OAAO,GAAG,IAAI,CAAC,GAAG,EAAE,EAAE,cAAc,EAAE,MAAM,CAAC,CAAC;IAEpD,MAAM,GAAG,GAAG,IAAI,GAAG,CAAC,eAAe,CAAC,CAAC;IACrC,MAAM,QAAQ,GAAG,UAAU,CAAC,QAAQ,CAAC;SAClC,MAAM,CAAC,GAAG,eAAe,IAAI,IAAI,EAAE,CAAC;SACpC,MAAM,CAAC,KAAK,CAAC,CAAC;IACjB,MAAM,SAAS,GAAG,IAAI,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;IAE5C,MAAM,kBAAkB,GAAG,MAAM,wBAAwB,CACvD,GAAG,EACH,QAAQ,EACR,SAAS,EACT,QAAQ,EACR,IAAI,EACJ,SAAS,CACV,CAAC;IAEF,MAAM,eAAe,CAAC,kBAAkB,EAAE,OAAO,EAAE,SAAS,CAAC,CAAC;IAE9D,GAAG,CAAC,OAAO,CAAC,CAAC;AACf,CAAC","sourcesContent":["#!/usr/bin/env -S node --require \"./node_modules/tsx/dist/preflight.cjs\" --import \"./node_modules/tsx/dist/loader.mjs\"\n\nimport { join, relative } from 'node:path';\nimport { homedir } from 'node:os';\nimport { Dir, readFileSync } from 'node:fs';\nimport { copyFile, mkdir, opendir, rm, symlink, unlink } from 'node:fs/promises';\nimport { createHash } from 'node:crypto';\nimport { cwd, exit } from 'node:process';\nimport { parse as parseYaml } from 'yaml';\nimport {\n getVersion,\n isCodedError,\n noop,\n say,\n transformChecksums,\n} from './utils';\nimport { extractFrom } from './extract';\nimport { parseArgs, printBanner } from './options';\nimport type { Checksums } from './types';\nimport { Architecture, Binary, Extension, Platform } from './types';\n\nexport function getCacheDirectory(): string {\n let enableGlobalCache = false;\n try {\n const configFileContent = readFileSync('.yarnrc.yml', 'utf8');\n const parsedConfig = parseYaml(configFileContent);\n enableGlobalCache = parsedConfig?.enableGlobalCache ?? false;\n } catch (error) {\n // If file doesn't exist or can't be read, default to local cache\n if ((error as NodeJS.ErrnoException).code === 'ENOENT') {\n return join(cwd(), '.metamask', 'cache');\n }\n // For other errors, log but continue with default\n console.warn('Warning: Error reading .yarnrc.yml, using local cache:', error);\n }\n return enableGlobalCache\n ? join(homedir(), '.cache', 'metamask')\n : join(cwd(), '.metamask', 'cache');\n}\n\nexport function getBinaryArchiveUrl(\n repo: string,\n tag: string,\n version: string,\n platform: Platform,\n arch: string,\n): string {\n const ext = platform === Platform.Windows ? Extension.Zip : Extension.Tar;\n return `https://github.com/${repo}/releases/download/${tag}/foundry_${version}_${platform}_${arch}.${ext}`;\n}\n\nexport async function checkAndDownloadBinaries(\n url: URL,\n binaries: Binary[],\n cachePath: string,\n platform: Platform,\n arch: Architecture,\n checksums?: Checksums,\n): Promise<Dir> {\n let downloadedBinaries: Dir;\n try {\n say(`checking cache`);\n downloadedBinaries = await opendir(cachePath);\n say(`found binaries in cache`);\n } catch (e: unknown) {\n say(`binaries not in cache`);\n if ((e as NodeJS.ErrnoException).code === 'ENOENT') {\n say(`installing from ${url.toString()}`);\n // directory doesn't exist, download and extract\n const platformChecksums = transformChecksums(checksums, platform, arch);\n await extractFrom(url, binaries, cachePath, platformChecksums);\n downloadedBinaries = await opendir(cachePath);\n } else {\n throw e;\n }\n }\n return downloadedBinaries;\n}\n\nexport async function installBinaries(\n downloadedBinaries: Dir,\n BIN_DIR: string,\n cachePath: string,\n): Promise<void> {\n for await (const file of downloadedBinaries) {\n if (!file.isFile()) {\n continue;\n }\n const target = join(file.parentPath, file.name);\n const path = join(BIN_DIR, relative(cachePath, target));\n\n // create the BIN_DIR paths if they don't exists already\n await mkdir(BIN_DIR, { recursive: true });\n\n // clean up any existing files or symlinks\n await unlink(path).catch(noop);\n try {\n // create new symlink\n await symlink(target, path);\n } catch (e) {\n if (!(isCodedError(e) && ['EPERM', 'EXDEV'].includes(e.code))) {\n throw e;\n }\n // symlinking can fail if it's a cross-device/filesystem link, or for\n // permissions reasons, so we'll just copy the file instead\n await copyFile(target, path);\n }\n // check that it works by logging the version\n say(`installed - ${getVersion(path)}`);\n }\n}\n\nexport async function downloadAndInstallFoundryBinaries(): Promise<void> {\n const parsedArgs = parseArgs();\n\n const CACHE_DIR = getCacheDirectory();\n\n if (parsedArgs.command === 'cache clean') {\n await rm(CACHE_DIR, { recursive: true, force: true });\n say('done!');\n exit(0);\n }\n\n const {\n repo,\n version: { version, tag },\n arch,\n platform,\n binaries,\n checksums,\n } = parsedArgs.options;\n\n printBanner();\n const bins = binaries.join(', ');\n say(`fetching ${bins} ${version} for ${platform} ${arch}`);\n\n const BIN_ARCHIVE_URL = getBinaryArchiveUrl(\n repo,\n tag,\n version,\n platform,\n arch,\n );\n const BIN_DIR = join(cwd(), 'node_modules', '.bin');\n\n const url = new URL(BIN_ARCHIVE_URL);\n const cacheKey = createHash('sha256')\n .update(`${BIN_ARCHIVE_URL}-${bins}`)\n .digest('hex');\n const cachePath = join(CACHE_DIR, cacheKey);\n\n const downloadedBinaries = await checkAndDownloadBinaries(\n url,\n binaries,\n cachePath,\n platform,\n arch,\n checksums,\n );\n\n await installBinaries(downloadedBinaries, BIN_DIR, cachePath);\n\n say('done!');\n}\n"]}
@@ -0,0 +1,135 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.parseArgs = exports.printBanner = void 0;
7
+ const node_os_1 = require("node:os");
8
+ const node_process_1 = require("node:process");
9
+ const yargs_1 = __importDefault(require("yargs/yargs"));
10
+ const utils_1 = require("./utils.cjs");
11
+ const types_1 = require("./types.cjs");
12
+ function isVersionString(value) {
13
+ return /^v\d/u.test(value);
14
+ }
15
+ function printBanner() {
16
+ console.log(`
17
+ .xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx
18
+
19
+ ╔═╗ ╔═╗ ╦ ╦ ╔╗╔ ╔╦╗ ╦═╗ ╦ ╦ Portable and modular toolkit
20
+ ╠╣ ║ ║ ║ ║ ║║║ ║║ ╠╦╝ ╚╦╝ for Ethereum Application Development
21
+ ╚ ╚═╝ ╚═╝ ╝╚╝ ═╩╝ ╩╚═ ╩ written in Rust.
22
+
23
+ .xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx
24
+
25
+ Repo : https://github.com/foundry-rs/
26
+ Book : https://book.getfoundry.sh/
27
+ Chat : https://t.me/foundry_rs/
28
+ Support : https://t.me/foundry_support/
29
+ Contribute : https://github.com/orgs/foundry-rs/projects/2/
30
+
31
+ .xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx.xOx
32
+ `);
33
+ }
34
+ exports.printBanner = printBanner;
35
+ function parseArgs(args = node_process_1.argv.slice(2)) {
36
+ const { $0, _, ...parsed } = (0, yargs_1.default)()
37
+ // Ensure unrecognized commands/options are reported as errors.
38
+ .strict()
39
+ // disable yargs's version, as it doesn't make sense here
40
+ .version(false)
41
+ // use the scriptName in `--help` output
42
+ .scriptName('yarn foundryup')
43
+ // wrap output at a maximum of 120 characters or `stdout.columns`
44
+ .wrap(Math.min(120, node_process_1.stdout.columns))
45
+ .parserConfiguration({
46
+ 'strip-aliased': true,
47
+ 'strip-dashed': true,
48
+ })
49
+ // enable ENV parsing, which allows the user to specify foundryup options
50
+ // via environment variables prefixed with `FOUNDRYUP_`
51
+ .env('FOUNDRYUP')
52
+ .command(['$0', 'install'], 'Install foundry binaries', (builder) => {
53
+ builder.options(getOptions()).pkgConf('foundryup');
54
+ })
55
+ .command('cache', '', (builder) => {
56
+ builder.command('clean', 'Remove the shared cache files').demandCommand();
57
+ })
58
+ .parseSync(args);
59
+ const command = _.join(' ');
60
+ if (command === 'cache clean') {
61
+ return {
62
+ command,
63
+ };
64
+ }
65
+ // if we get here `command` is always 'install' or '' (yargs checks it)
66
+ return {
67
+ command: 'install',
68
+ options: parsed,
69
+ };
70
+ }
71
+ exports.parseArgs = parseArgs;
72
+ const Binaries = Object.values(types_1.Binary);
73
+ function getOptions(defaultPlatform = (0, node_os_1.platform)(), defaultArch = (0, utils_1.normalizeSystemArchitecture)()) {
74
+ return {
75
+ binaries: {
76
+ alias: 'b',
77
+ type: 'array',
78
+ multiple: true,
79
+ description: 'Specify the binaries to install',
80
+ default: Binaries,
81
+ choices: Binaries,
82
+ coerce: (values) => [...new Set(values)], // Remove duplicates
83
+ },
84
+ checksums: {
85
+ alias: 'c',
86
+ description: 'JSON object containing checksums for the binaries.',
87
+ coerce: (rawChecksums) => {
88
+ try {
89
+ return typeof rawChecksums === 'string'
90
+ ? JSON.parse(rawChecksums)
91
+ : rawChecksums;
92
+ }
93
+ catch {
94
+ throw new Error('Invalid checksums');
95
+ }
96
+ },
97
+ optional: true,
98
+ },
99
+ repo: {
100
+ alias: 'r',
101
+ description: 'Specify the repository',
102
+ default: 'foundry-rs/foundry',
103
+ },
104
+ version: {
105
+ alias: 'v',
106
+ description: 'Specify the version (see: https://github.com/foundry-rs/foundry/tags)',
107
+ default: 'nightly',
108
+ coerce: (rawVersion) => {
109
+ if (/^nightly/u.test(rawVersion)) {
110
+ return { version: 'nightly', tag: rawVersion };
111
+ // we don't validate the version much, we just trust the user
112
+ }
113
+ else if (isVersionString(rawVersion)) {
114
+ return { version: rawVersion, tag: rawVersion };
115
+ }
116
+ throw new Error('Invalid version');
117
+ },
118
+ },
119
+ arch: {
120
+ alias: 'a',
121
+ description: 'Specify the architecture',
122
+ // if `defaultArch` is not a supported Architecture yargs will throw an error
123
+ default: defaultArch,
124
+ choices: Object.values(types_1.Architecture),
125
+ },
126
+ platform: {
127
+ alias: 'p',
128
+ description: 'Specify the platform',
129
+ // if `defaultPlatform` is not a supported Platform yargs will throw an error
130
+ default: defaultPlatform,
131
+ choices: Object.values(types_1.Platform),
132
+ },
133
+ };
134
+ }
135
+ //# sourceMappingURL=options.cjs.map