ic-mops 0.37.0 → 0.37.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/package.json +2 -1
- package/package.json +2 -1
- package/dist/cli-local.d.ts +0 -2
- package/dist/cli-local.js +0 -2
- package/dist/commands/toolchain/mocv.d.ts +0 -1
- package/dist/commands/toolchain/mocv.js +0 -271
- package/dist/moc-wrapper.d.ts +0 -2
- package/dist/moc-wrapper.js +0 -8
- package/dist/out/cli.d.ts +0 -2
- package/dist/out/cli.js +0 -115242
- package/dist/pic-js/examples/clock/tests/clock/index.d.ts +0 -1
- package/dist/pic-js/examples/clock/tests/clock/index.js +0 -5
- package/dist/pic-js/examples/clock/tests/jest.config.d.ts +0 -3
- package/dist/pic-js/examples/clock/tests/jest.config.js +0 -8
- package/dist/pic-js/examples/clock/tests/src/clock.spec.d.ts +0 -1
- package/dist/pic-js/examples/clock/tests/src/clock.spec.js +0 -48
- package/dist/pic-js/examples/counter/tests/counter/index.d.ts +0 -1
- package/dist/pic-js/examples/counter/tests/counter/index.js +0 -5
- package/dist/pic-js/examples/counter/tests/jest.config.d.ts +0 -3
- package/dist/pic-js/examples/counter/tests/jest.config.js +0 -8
- package/dist/pic-js/examples/counter/tests/src/counter.spec.d.ts +0 -1
- package/dist/pic-js/examples/counter/tests/src/counter.spec.js +0 -80
- package/dist/pic-js/examples/todo/tests/jest.config.d.ts +0 -3
- package/dist/pic-js/examples/todo/tests/jest.config.js +0 -8
- package/dist/pic-js/examples/todo/tests/src/todo.spec.d.ts +0 -1
- package/dist/pic-js/examples/todo/tests/src/todo.spec.js +0 -211
- package/dist/pic-js/examples/todo/tests/todo/index.d.ts +0 -1
- package/dist/pic-js/examples/todo/tests/todo/index.js +0 -5
- package/dist/pic-js/packages/pic/src/error.d.ts +0 -12
- package/dist/pic-js/packages/pic/src/error.js +0 -36
- package/dist/pic-js/packages/pic/src/http-client.d.ts +0 -15
- package/dist/pic-js/packages/pic/src/http-client.js +0 -37
- package/dist/pic-js/packages/pic/src/identity.d.ts +0 -66
- package/dist/pic-js/packages/pic/src/identity.js +0 -86
- package/dist/pic-js/packages/pic/src/index.d.ts +0 -4
- package/dist/pic-js/packages/pic/src/index.js +0 -8
- package/dist/pic-js/packages/pic/src/management-canister.d.ts +0 -30
- package/dist/pic-js/packages/pic/src/management-canister.js +0 -43
- package/dist/pic-js/packages/pic/src/pocket-ic-actor.d.ts +0 -83
- package/dist/pic-js/packages/pic/src/pocket-ic-actor.js +0 -58
- package/dist/pic-js/packages/pic/src/pocket-ic-client-types.d.ts +0 -61
- package/dist/pic-js/packages/pic/src/pocket-ic-client-types.js +0 -2
- package/dist/pic-js/packages/pic/src/pocket-ic-client.d.ts +0 -24
- package/dist/pic-js/packages/pic/src/pocket-ic-client.js +0 -123
- package/dist/pic-js/packages/pic/src/pocket-ic-server.d.ts +0 -10
- package/dist/pic-js/packages/pic/src/pocket-ic-server.js +0 -55
- package/dist/pic-js/packages/pic/src/pocket-ic-types.d.ts +0 -40
- package/dist/pic-js/packages/pic/src/pocket-ic-types.js +0 -2
- package/dist/pic-js/packages/pic/src/pocket-ic.d.ts +0 -447
- package/dist/pic-js/packages/pic/src/pocket-ic.js +0 -551
- package/dist/pic-js/packages/pic/src/util/candid.d.ts +0 -1
- package/dist/pic-js/packages/pic/src/util/candid.js +0 -7
- package/dist/pic-js/packages/pic/src/util/encoding.d.ts +0 -5
- package/dist/pic-js/packages/pic/src/util/encoding.js +0 -19
- package/dist/pic-js/packages/pic/src/util/fs.d.ts +0 -4
- package/dist/pic-js/packages/pic/src/util/fs.js +0 -29
- package/dist/pic-js/packages/pic/src/util/index.d.ts +0 -5
- package/dist/pic-js/packages/pic/src/util/index.js +0 -21
- package/dist/pic-js/packages/pic/src/util/os.d.ts +0 -4
- package/dist/pic-js/packages/pic/src/util/os.js +0 -19
- package/dist/pic-js/packages/pic/src/util/poll.d.ts +0 -5
- package/dist/pic-js/packages/pic/src/util/poll.js +0 -28
- package/dist/templates/cli.d.ts +0 -2
- package/dist/templates/cli.js +0 -3660
package/dist/templates/cli.js
DELETED
|
@@ -1,3660 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
// dist/cli.js
|
|
3
|
-
import fs14 from "node:fs";
|
|
4
|
-
import { program, Argument, Option } from "commander";
|
|
5
|
-
import chalk29 from "chalk";
|
|
6
|
-
import { Principal as Principal2 } from "@dfinity/principal";
|
|
7
|
-
// dist/commands/init.js
|
|
8
|
-
import { execSync } from "node:child_process";
|
|
9
|
-
import path8 from "node:path";
|
|
10
|
-
import { existsSync as existsSync2, readFileSync as readFileSync2, writeFileSync as writeFileSync2 } from "node:fs";
|
|
11
|
-
import chalk7 from "chalk";
|
|
12
|
-
import prompts3 from "prompts";
|
|
13
|
-
// dist/mops.js
|
|
14
|
-
import path from "node:path";
|
|
15
|
-
import fs2 from "node:fs";
|
|
16
|
-
import TOML from "@iarna/toml";
|
|
17
|
-
import chalk from "chalk";
|
|
18
|
-
import prompts from "prompts";
|
|
19
|
-
import ncp from "ncp";
|
|
20
|
-
import fetch2 from "node-fetch";
|
|
21
|
-
// dist/pem.js
|
|
22
|
-
import fs from "node:fs";
|
|
23
|
-
import { Ed25519KeyIdentity } from "@dfinity/identity";
|
|
24
|
-
import { Secp256k1KeyIdentity } from "@dfinity/identity-secp256k1";
|
|
25
|
-
import pemfile from "pem-file";
|
|
26
|
-
import crypto from "crypto";
|
|
27
|
-
function decodeFile(file, password) {
|
|
28
|
-
let rawKey = fs.readFileSync(file);
|
|
29
|
-
if (password) {
|
|
30
|
-
return decode(decrypt(rawKey, password));
|
|
31
|
-
}
|
|
32
|
-
return decode(rawKey);
|
|
33
|
-
}
|
|
34
|
-
function decode(rawKey) {
|
|
35
|
-
let buf = pemfile.decode(rawKey);
|
|
36
|
-
if (rawKey.includes("EC PRIVATE KEY")) {
|
|
37
|
-
if (buf.length != 118) {
|
|
38
|
-
throw "expecting byte length 118 but got " + buf.length;
|
|
39
|
-
}
|
|
40
|
-
return Secp256k1KeyIdentity.fromSecretKey(buf.subarray(7, 39));
|
|
41
|
-
}
|
|
42
|
-
if (buf.length != 85) {
|
|
43
|
-
throw "expecting byte length 85 but got " + buf.length;
|
|
44
|
-
}
|
|
45
|
-
let secretKey = Buffer.concat([buf.subarray(16, 48), buf.subarray(53, 85)]);
|
|
46
|
-
return Ed25519KeyIdentity.fromSecretKey(secretKey);
|
|
47
|
-
}
|
|
48
|
-
var algorithm = "aes-256-ctr";
|
|
49
|
-
function encrypt(buffer, password) {
|
|
50
|
-
let key = crypto.createHash("sha256").update(password).digest("base64").slice(0, 32);
|
|
51
|
-
let iv = crypto.randomBytes(16);
|
|
52
|
-
let cipher = crypto.createCipheriv(algorithm, key, iv);
|
|
53
|
-
let result = Buffer.concat([iv, cipher.update(buffer), cipher.final()]);
|
|
54
|
-
return result;
|
|
55
|
-
}
|
|
56
|
-
function decrypt(encrypted, password) {
|
|
57
|
-
let key = crypto.createHash("sha256").update(password).digest("base64").slice(0, 32);
|
|
58
|
-
let iv = encrypted.subarray(0, 16);
|
|
59
|
-
encrypted = encrypted.subarray(16);
|
|
60
|
-
let decipher = crypto.createDecipheriv(algorithm, key, iv);
|
|
61
|
-
let result = Buffer.concat([decipher.update(encrypted), decipher.final()]);
|
|
62
|
-
return result;
|
|
63
|
-
}
|
|
64
|
-
// dist/api/actors.js
|
|
65
|
-
import { Actor as Actor3, HttpAgent as HttpAgent3 } from "@dfinity/agent";
|
|
66
|
-
// dist/declarations/main/index.js
|
|
67
|
-
import { Actor, HttpAgent } from "@dfinity/agent";
|
|
68
|
-
// dist/declarations/main/main.did.js
|
|
69
|
-
var idlFactory = ({ IDL }) => {
|
|
70
|
-
const TestsChanges = IDL.Record({
|
|
71
|
-
"addedNames": IDL.Vec(IDL.Text),
|
|
72
|
-
"removedNames": IDL.Vec(IDL.Text)
|
|
73
|
-
});
|
|
74
|
-
const DepChange = IDL.Record({
|
|
75
|
-
"oldVersion": IDL.Text,
|
|
76
|
-
"name": IDL.Text,
|
|
77
|
-
"newVersion": IDL.Text
|
|
78
|
-
});
|
|
79
|
-
const PackageChanges__1 = IDL.Record({
|
|
80
|
-
"tests": TestsChanges,
|
|
81
|
-
"deps": IDL.Vec(DepChange),
|
|
82
|
-
"notes": IDL.Text,
|
|
83
|
-
"devDeps": IDL.Vec(DepChange)
|
|
84
|
-
});
|
|
85
|
-
const PublishingId = IDL.Text;
|
|
86
|
-
const Err = IDL.Text;
|
|
87
|
-
const Result = IDL.Variant({ "ok": IDL.Null, "err": Err });
|
|
88
|
-
const Text = IDL.Text;
|
|
89
|
-
const PackageName = IDL.Text;
|
|
90
|
-
const PackageVersion = IDL.Text;
|
|
91
|
-
const PackageId = IDL.Text;
|
|
92
|
-
const Time = IDL.Int;
|
|
93
|
-
const DownloadsSnapshot__1 = IDL.Record({
|
|
94
|
-
"startTime": Time,
|
|
95
|
-
"endTime": Time,
|
|
96
|
-
"downloads": IDL.Nat
|
|
97
|
-
});
|
|
98
|
-
const FileId = IDL.Text;
|
|
99
|
-
const Result_8 = IDL.Variant({
|
|
100
|
-
"ok": IDL.Vec(IDL.Tuple(FileId, IDL.Vec(IDL.Nat8))),
|
|
101
|
-
"err": Err
|
|
102
|
-
});
|
|
103
|
-
const Result_7 = IDL.Variant({ "ok": IDL.Vec(FileId), "err": Err });
|
|
104
|
-
const SemverPart = IDL.Variant({
|
|
105
|
-
"major": IDL.Null,
|
|
106
|
-
"minor": IDL.Null,
|
|
107
|
-
"patch": IDL.Null
|
|
108
|
-
});
|
|
109
|
-
const Result_6 = IDL.Variant({
|
|
110
|
-
"ok": IDL.Vec(IDL.Tuple(PackageName, PackageVersion)),
|
|
111
|
-
"err": Err
|
|
112
|
-
});
|
|
113
|
-
const Result_5 = IDL.Variant({ "ok": PackageVersion, "err": Err });
|
|
114
|
-
const User = IDL.Record({
|
|
115
|
-
"id": IDL.Principal,
|
|
116
|
-
"emailVerified": IDL.Bool,
|
|
117
|
-
"twitter": IDL.Text,
|
|
118
|
-
"displayName": IDL.Text,
|
|
119
|
-
"name": IDL.Text,
|
|
120
|
-
"site": IDL.Text,
|
|
121
|
-
"email": IDL.Text,
|
|
122
|
-
"twitterVerified": IDL.Bool,
|
|
123
|
-
"githubVerified": IDL.Bool,
|
|
124
|
-
"github": IDL.Text
|
|
125
|
-
});
|
|
126
|
-
const DepsStatus = IDL.Variant({
|
|
127
|
-
"allLatest": IDL.Null,
|
|
128
|
-
"tooOld": IDL.Null,
|
|
129
|
-
"updatesAvailable": IDL.Null
|
|
130
|
-
});
|
|
131
|
-
const PackageQuality = IDL.Record({
|
|
132
|
-
"depsStatus": DepsStatus,
|
|
133
|
-
"hasDescription": IDL.Bool,
|
|
134
|
-
"hasKeywords": IDL.Bool,
|
|
135
|
-
"hasLicense": IDL.Bool,
|
|
136
|
-
"hasDocumentation": IDL.Bool,
|
|
137
|
-
"hasTests": IDL.Bool,
|
|
138
|
-
"hasRepository": IDL.Bool,
|
|
139
|
-
"hasReleaseNotes": IDL.Bool
|
|
140
|
-
});
|
|
141
|
-
const Script = IDL.Record({ "value": IDL.Text, "name": IDL.Text });
|
|
142
|
-
const PackageName__1 = IDL.Text;
|
|
143
|
-
const DependencyV2 = IDL.Record({
|
|
144
|
-
"name": PackageName__1,
|
|
145
|
-
"repo": IDL.Text,
|
|
146
|
-
"version": IDL.Text
|
|
147
|
-
});
|
|
148
|
-
const PackageConfigV2__1 = IDL.Record({
|
|
149
|
-
"dfx": IDL.Text,
|
|
150
|
-
"moc": IDL.Text,
|
|
151
|
-
"scripts": IDL.Vec(Script),
|
|
152
|
-
"baseDir": IDL.Text,
|
|
153
|
-
"documentation": IDL.Text,
|
|
154
|
-
"name": PackageName__1,
|
|
155
|
-
"homepage": IDL.Text,
|
|
156
|
-
"description": IDL.Text,
|
|
157
|
-
"version": IDL.Text,
|
|
158
|
-
"keywords": IDL.Vec(IDL.Text),
|
|
159
|
-
"donation": IDL.Text,
|
|
160
|
-
"devDependencies": IDL.Vec(DependencyV2),
|
|
161
|
-
"repository": IDL.Text,
|
|
162
|
-
"dependencies": IDL.Vec(DependencyV2),
|
|
163
|
-
"license": IDL.Text,
|
|
164
|
-
"readme": IDL.Text
|
|
165
|
-
});
|
|
166
|
-
const PackagePublication = IDL.Record({
|
|
167
|
-
"storage": IDL.Principal,
|
|
168
|
-
"time": Time,
|
|
169
|
-
"user": IDL.Principal
|
|
170
|
-
});
|
|
171
|
-
const PackageSummary = IDL.Record({
|
|
172
|
-
"ownerInfo": User,
|
|
173
|
-
"owner": IDL.Principal,
|
|
174
|
-
"quality": PackageQuality,
|
|
175
|
-
"downloadsTotal": IDL.Nat,
|
|
176
|
-
"downloadsInLast30Days": IDL.Nat,
|
|
177
|
-
"downloadsInLast7Days": IDL.Nat,
|
|
178
|
-
"config": PackageConfigV2__1,
|
|
179
|
-
"publication": PackagePublication
|
|
180
|
-
});
|
|
181
|
-
const PackageSummary__1 = IDL.Record({
|
|
182
|
-
"ownerInfo": User,
|
|
183
|
-
"owner": IDL.Principal,
|
|
184
|
-
"quality": PackageQuality,
|
|
185
|
-
"downloadsTotal": IDL.Nat,
|
|
186
|
-
"downloadsInLast30Days": IDL.Nat,
|
|
187
|
-
"downloadsInLast7Days": IDL.Nat,
|
|
188
|
-
"config": PackageConfigV2__1,
|
|
189
|
-
"publication": PackagePublication
|
|
190
|
-
});
|
|
191
|
-
const TestStats__1 = IDL.Record({
|
|
192
|
-
"passedNames": IDL.Vec(IDL.Text),
|
|
193
|
-
"passed": IDL.Nat
|
|
194
|
-
});
|
|
195
|
-
const DownloadsSnapshot = IDL.Record({
|
|
196
|
-
"startTime": Time,
|
|
197
|
-
"endTime": Time,
|
|
198
|
-
"downloads": IDL.Nat
|
|
199
|
-
});
|
|
200
|
-
const PackageFileStatsPublic = IDL.Record({
|
|
201
|
-
"sourceFiles": IDL.Nat,
|
|
202
|
-
"sourceSize": IDL.Nat
|
|
203
|
-
});
|
|
204
|
-
const PackageChanges = IDL.Record({
|
|
205
|
-
"tests": TestsChanges,
|
|
206
|
-
"deps": IDL.Vec(DepChange),
|
|
207
|
-
"notes": IDL.Text,
|
|
208
|
-
"devDeps": IDL.Vec(DepChange)
|
|
209
|
-
});
|
|
210
|
-
const PackageSummaryWithChanges__1 = IDL.Record({
|
|
211
|
-
"ownerInfo": User,
|
|
212
|
-
"owner": IDL.Principal,
|
|
213
|
-
"quality": PackageQuality,
|
|
214
|
-
"downloadsTotal": IDL.Nat,
|
|
215
|
-
"downloadsInLast30Days": IDL.Nat,
|
|
216
|
-
"downloadsInLast7Days": IDL.Nat,
|
|
217
|
-
"config": PackageConfigV2__1,
|
|
218
|
-
"changes": PackageChanges,
|
|
219
|
-
"publication": PackagePublication
|
|
220
|
-
});
|
|
221
|
-
const PackageDetails = IDL.Record({
|
|
222
|
-
"ownerInfo": User,
|
|
223
|
-
"owner": IDL.Principal,
|
|
224
|
-
"deps": IDL.Vec(PackageSummary__1),
|
|
225
|
-
"quality": PackageQuality,
|
|
226
|
-
"testStats": TestStats__1,
|
|
227
|
-
"downloadsTotal": IDL.Nat,
|
|
228
|
-
"downloadsInLast30Days": IDL.Nat,
|
|
229
|
-
"downloadTrend": IDL.Vec(DownloadsSnapshot),
|
|
230
|
-
"fileStats": PackageFileStatsPublic,
|
|
231
|
-
"versionHistory": IDL.Vec(PackageSummaryWithChanges__1),
|
|
232
|
-
"dependents": IDL.Vec(PackageSummary__1),
|
|
233
|
-
"devDeps": IDL.Vec(PackageSummary__1),
|
|
234
|
-
"downloadsInLast7Days": IDL.Nat,
|
|
235
|
-
"config": PackageConfigV2__1,
|
|
236
|
-
"changes": PackageChanges,
|
|
237
|
-
"publication": PackagePublication
|
|
238
|
-
});
|
|
239
|
-
const Result_4 = IDL.Variant({ "ok": PackageDetails, "err": Err });
|
|
240
|
-
const PackageSummaryWithChanges = IDL.Record({
|
|
241
|
-
"ownerInfo": User,
|
|
242
|
-
"owner": IDL.Principal,
|
|
243
|
-
"quality": PackageQuality,
|
|
244
|
-
"downloadsTotal": IDL.Nat,
|
|
245
|
-
"downloadsInLast30Days": IDL.Nat,
|
|
246
|
-
"downloadsInLast7Days": IDL.Nat,
|
|
247
|
-
"config": PackageConfigV2__1,
|
|
248
|
-
"changes": PackageChanges,
|
|
249
|
-
"publication": PackagePublication
|
|
250
|
-
});
|
|
251
|
-
const StorageId = IDL.Principal;
|
|
252
|
-
const StorageStats = IDL.Record({
|
|
253
|
-
"fileCount": IDL.Nat,
|
|
254
|
-
"cyclesBalance": IDL.Nat,
|
|
255
|
-
"memorySize": IDL.Nat
|
|
256
|
-
});
|
|
257
|
-
const User__1 = IDL.Record({
|
|
258
|
-
"id": IDL.Principal,
|
|
259
|
-
"emailVerified": IDL.Bool,
|
|
260
|
-
"twitter": IDL.Text,
|
|
261
|
-
"displayName": IDL.Text,
|
|
262
|
-
"name": IDL.Text,
|
|
263
|
-
"site": IDL.Text,
|
|
264
|
-
"email": IDL.Text,
|
|
265
|
-
"twitterVerified": IDL.Bool,
|
|
266
|
-
"githubVerified": IDL.Bool,
|
|
267
|
-
"github": IDL.Text
|
|
268
|
-
});
|
|
269
|
-
const Header = IDL.Tuple(IDL.Text, IDL.Text);
|
|
270
|
-
const Request = IDL.Record({
|
|
271
|
-
"url": IDL.Text,
|
|
272
|
-
"method": IDL.Text,
|
|
273
|
-
"body": IDL.Vec(IDL.Nat8),
|
|
274
|
-
"headers": IDL.Vec(Header),
|
|
275
|
-
"certificate_version": IDL.Opt(IDL.Nat16)
|
|
276
|
-
});
|
|
277
|
-
const StreamingToken = IDL.Vec(IDL.Nat8);
|
|
278
|
-
const StreamingCallbackResponse = IDL.Record({
|
|
279
|
-
"token": IDL.Opt(StreamingToken),
|
|
280
|
-
"body": IDL.Vec(IDL.Nat8)
|
|
281
|
-
});
|
|
282
|
-
const StreamingCallback = IDL.Func([StreamingToken], [IDL.Opt(StreamingCallbackResponse)], ["query"]);
|
|
283
|
-
const StreamingStrategy = IDL.Variant({
|
|
284
|
-
"Callback": IDL.Record({
|
|
285
|
-
"token": StreamingToken,
|
|
286
|
-
"callback": StreamingCallback
|
|
287
|
-
})
|
|
288
|
-
});
|
|
289
|
-
const Response = IDL.Record({
|
|
290
|
-
"body": IDL.Vec(IDL.Nat8),
|
|
291
|
-
"headers": IDL.Vec(Header),
|
|
292
|
-
"upgrade": IDL.Opt(IDL.Bool),
|
|
293
|
-
"streaming_strategy": IDL.Opt(StreamingStrategy),
|
|
294
|
-
"status_code": IDL.Nat16
|
|
295
|
-
});
|
|
296
|
-
const PageCount = IDL.Nat;
|
|
297
|
-
const Result_1 = IDL.Variant({ "ok": IDL.Null, "err": IDL.Text });
|
|
298
|
-
const Result_3 = IDL.Variant({ "ok": FileId, "err": Err });
|
|
299
|
-
const PackageConfigV2 = IDL.Record({
|
|
300
|
-
"dfx": IDL.Text,
|
|
301
|
-
"moc": IDL.Text,
|
|
302
|
-
"scripts": IDL.Vec(Script),
|
|
303
|
-
"baseDir": IDL.Text,
|
|
304
|
-
"documentation": IDL.Text,
|
|
305
|
-
"name": PackageName__1,
|
|
306
|
-
"homepage": IDL.Text,
|
|
307
|
-
"description": IDL.Text,
|
|
308
|
-
"version": IDL.Text,
|
|
309
|
-
"keywords": IDL.Vec(IDL.Text),
|
|
310
|
-
"donation": IDL.Text,
|
|
311
|
-
"devDependencies": IDL.Vec(DependencyV2),
|
|
312
|
-
"repository": IDL.Text,
|
|
313
|
-
"dependencies": IDL.Vec(DependencyV2),
|
|
314
|
-
"license": IDL.Text,
|
|
315
|
-
"readme": IDL.Text
|
|
316
|
-
});
|
|
317
|
-
const PublishingErr = IDL.Text;
|
|
318
|
-
const Result_2 = IDL.Variant({ "ok": PublishingId, "err": PublishingErr });
|
|
319
|
-
const TestStats = IDL.Record({
|
|
320
|
-
"passedNames": IDL.Vec(IDL.Text),
|
|
321
|
-
"passed": IDL.Nat
|
|
322
|
-
});
|
|
323
|
-
return IDL.Service({
|
|
324
|
-
"backup": IDL.Func([], [], []),
|
|
325
|
-
"claimAirdrop": IDL.Func([IDL.Principal], [IDL.Text], []),
|
|
326
|
-
"computeHashesForExistingFiles": IDL.Func([], [], []),
|
|
327
|
-
"diff": IDL.Func([IDL.Text, IDL.Text], [PackageChanges__1], ["query"]),
|
|
328
|
-
"finishPublish": IDL.Func([PublishingId], [Result], []),
|
|
329
|
-
"getAirdropAmount": IDL.Func([], [IDL.Nat], ["query"]),
|
|
330
|
-
"getAirdropAmountAll": IDL.Func([], [IDL.Nat], ["query"]),
|
|
331
|
-
"getApiVersion": IDL.Func([], [Text], ["query"]),
|
|
332
|
-
"getBackupCanisterId": IDL.Func([], [IDL.Principal], ["query"]),
|
|
333
|
-
"getDefaultPackages": IDL.Func([IDL.Text], [IDL.Vec(IDL.Tuple(PackageName, PackageVersion))], ["query"]),
|
|
334
|
-
"getDownloadTrendByPackageId": IDL.Func([PackageId], [IDL.Vec(DownloadsSnapshot__1)], ["query"]),
|
|
335
|
-
"getDownloadTrendByPackageName": IDL.Func([PackageName], [IDL.Vec(DownloadsSnapshot__1)], ["query"]),
|
|
336
|
-
"getFileHashes": IDL.Func([PackageName, PackageVersion], [Result_8], []),
|
|
337
|
-
"getFileHashesByPackageIds": IDL.Func([IDL.Vec(PackageId)], [
|
|
338
|
-
IDL.Vec(IDL.Tuple(PackageId, IDL.Vec(IDL.Tuple(FileId, IDL.Vec(IDL.Nat8)))))
|
|
339
|
-
], []),
|
|
340
|
-
"getFileIds": IDL.Func([PackageName, PackageVersion], [Result_7], ["query"]),
|
|
341
|
-
"getHighestSemverBatch": IDL.Func([IDL.Vec(IDL.Tuple(PackageName, PackageVersion, SemverPart))], [Result_6], ["query"]),
|
|
342
|
-
"getHighestVersion": IDL.Func([PackageName], [Result_5], ["query"]),
|
|
343
|
-
"getMostDownloadedPackages": IDL.Func([], [IDL.Vec(PackageSummary)], ["query"]),
|
|
344
|
-
"getMostDownloadedPackagesIn7Days": IDL.Func([], [IDL.Vec(PackageSummary)], ["query"]),
|
|
345
|
-
"getNewPackages": IDL.Func([], [IDL.Vec(PackageSummary)], ["query"]),
|
|
346
|
-
"getPackageDetails": IDL.Func([PackageName, PackageVersion], [Result_4], ["query"]),
|
|
347
|
-
"getPackagesByCategory": IDL.Func([], [IDL.Vec(IDL.Tuple(IDL.Text, IDL.Vec(PackageSummary)))], ["query"]),
|
|
348
|
-
"getRecentlyUpdatedPackages": IDL.Func([], [IDL.Vec(PackageSummaryWithChanges)], ["query"]),
|
|
349
|
-
"getStoragesStats": IDL.Func([], [IDL.Vec(IDL.Tuple(StorageId, StorageStats))], ["query"]),
|
|
350
|
-
"getTotalDownloads": IDL.Func([], [IDL.Nat], ["query"]),
|
|
351
|
-
"getTotalPackages": IDL.Func([], [IDL.Nat], ["query"]),
|
|
352
|
-
"getUser": IDL.Func([IDL.Principal], [IDL.Opt(User__1)], ["query"]),
|
|
353
|
-
"http_request": IDL.Func([Request], [Response], ["query"]),
|
|
354
|
-
"notifyInstall": IDL.Func([PackageName, PackageVersion], [], ["oneway"]),
|
|
355
|
-
"notifyInstalls": IDL.Func([IDL.Vec(IDL.Tuple(PackageName, PackageVersion))], [], ["oneway"]),
|
|
356
|
-
"restore": IDL.Func([IDL.Nat, IDL.Nat], [], []),
|
|
357
|
-
"search": IDL.Func([Text, IDL.Opt(IDL.Nat), IDL.Opt(IDL.Nat)], [IDL.Vec(PackageSummary), PageCount], ["query"]),
|
|
358
|
-
"setUserProp": IDL.Func([IDL.Text, IDL.Text], [Result_1], []),
|
|
359
|
-
"startFileUpload": IDL.Func([PublishingId, Text, IDL.Nat, IDL.Vec(IDL.Nat8)], [Result_3], []),
|
|
360
|
-
"startPublish": IDL.Func([PackageConfigV2], [Result_2], []),
|
|
361
|
-
"takeAirdropSnapshot": IDL.Func([], [], ["oneway"]),
|
|
362
|
-
"transferOwnership": IDL.Func([PackageName, IDL.Principal], [Result_1], []),
|
|
363
|
-
"uploadFileChunk": IDL.Func([PublishingId, FileId, IDL.Nat, IDL.Vec(IDL.Nat8)], [Result], []),
|
|
364
|
-
"uploadNotes": IDL.Func([PublishingId, IDL.Text], [Result], []),
|
|
365
|
-
"uploadTestStats": IDL.Func([PublishingId, TestStats], [Result], [])
|
|
366
|
-
});
|
|
367
|
-
};
|
|
368
|
-
// dist/declarations/main/index.js
|
|
369
|
-
var canisterId = process.env.CANISTER_ID_MAIN || process.env.MAIN_CANISTER_ID;
|
|
370
|
-
// dist/declarations/storage/index.js
|
|
371
|
-
import { Actor as Actor2, HttpAgent as HttpAgent2 } from "@dfinity/agent";
|
|
372
|
-
// dist/declarations/storage/storage.did.js
|
|
373
|
-
var idlFactory2 = ({ IDL }) => {
|
|
374
|
-
const FileId = IDL.Text;
|
|
375
|
-
const Chunk = IDL.Vec(IDL.Nat8);
|
|
376
|
-
const Err = IDL.Text;
|
|
377
|
-
const Result_2 = IDL.Variant({ "ok": Chunk, "err": Err });
|
|
378
|
-
const FileId__1 = IDL.Text;
|
|
379
|
-
const FileMeta = IDL.Record({
|
|
380
|
-
"id": FileId__1,
|
|
381
|
-
"owners": IDL.Vec(IDL.Principal),
|
|
382
|
-
"path": IDL.Text,
|
|
383
|
-
"chunkCount": IDL.Nat
|
|
384
|
-
});
|
|
385
|
-
const Result_1 = IDL.Variant({ "ok": FileMeta, "err": Err });
|
|
386
|
-
const StorageStats = IDL.Record({
|
|
387
|
-
"fileCount": IDL.Nat,
|
|
388
|
-
"cyclesBalance": IDL.Nat,
|
|
389
|
-
"memorySize": IDL.Nat
|
|
390
|
-
});
|
|
391
|
-
const Result = IDL.Variant({ "ok": IDL.Null, "err": Err });
|
|
392
|
-
const Storage = IDL.Service({
|
|
393
|
-
"acceptCycles": IDL.Func([], [], []),
|
|
394
|
-
"deleteFile": IDL.Func([FileId], [], []),
|
|
395
|
-
"downloadChunk": IDL.Func([FileId, IDL.Nat], [Result_2], ["query"]),
|
|
396
|
-
"finishUpload": IDL.Func([FileId], [], []),
|
|
397
|
-
"getFileIdsRange": IDL.Func([IDL.Nat, IDL.Nat], [IDL.Vec(FileId)], ["query"]),
|
|
398
|
-
"getFileMeta": IDL.Func([FileId], [Result_1], ["query"]),
|
|
399
|
-
"getStats": IDL.Func([], [StorageStats], ["query"]),
|
|
400
|
-
"startUpload": IDL.Func([FileMeta], [Result], []),
|
|
401
|
-
"updateFileOwners": IDL.Func([FileId, IDL.Vec(IDL.Principal)], [], []),
|
|
402
|
-
"uploadChunk": IDL.Func([FileId, IDL.Nat, Chunk], [], [])
|
|
403
|
-
});
|
|
404
|
-
return Storage;
|
|
405
|
-
};
|
|
406
|
-
// dist/api/network.js
|
|
407
|
-
function getNetwork() {
|
|
408
|
-
return globalThis.MOPS_NETWORK || "ic";
|
|
409
|
-
}
|
|
410
|
-
function getEndpoint(network) {
|
|
411
|
-
if (network === "staging") {
|
|
412
|
-
return {
|
|
413
|
-
host: "https://icp-api.io",
|
|
414
|
-
canisterId: "2d2zu-vaaaa-aaaak-qb6pq-cai"
|
|
415
|
-
};
|
|
416
|
-
}
|
|
417
|
-
else if (network === "ic") {
|
|
418
|
-
return {
|
|
419
|
-
host: "https://icp-api.io",
|
|
420
|
-
canisterId: "oknww-riaaa-aaaam-qaf6a-cai"
|
|
421
|
-
};
|
|
422
|
-
}
|
|
423
|
-
else {
|
|
424
|
-
return {
|
|
425
|
-
host: "http://127.0.0.1:4943",
|
|
426
|
-
canisterId: "2d2zu-vaaaa-aaaak-qb6pq-cai"
|
|
427
|
-
};
|
|
428
|
-
}
|
|
429
|
-
}
|
|
430
|
-
// dist/api/actors.js
|
|
431
|
-
var mainActor = async (identity) => {
|
|
432
|
-
let network = getNetwork();
|
|
433
|
-
let host = getEndpoint(network).host;
|
|
434
|
-
let canisterId3 = getEndpoint(network).canisterId;
|
|
435
|
-
let agent = new HttpAgent3({ host, identity });
|
|
436
|
-
if (network === "local") {
|
|
437
|
-
await agent.fetchRootKey();
|
|
438
|
-
}
|
|
439
|
-
return Actor3.createActor(idlFactory, {
|
|
440
|
-
agent,
|
|
441
|
-
canisterId: canisterId3
|
|
442
|
-
});
|
|
443
|
-
};
|
|
444
|
-
var storageActor = async (storageId, identity) => {
|
|
445
|
-
let network = getNetwork();
|
|
446
|
-
let host = getEndpoint(network).host;
|
|
447
|
-
let agent = new HttpAgent3({ host, identity });
|
|
448
|
-
if (network === "local") {
|
|
449
|
-
await agent.fetchRootKey();
|
|
450
|
-
}
|
|
451
|
-
return Actor3.createActor(idlFactory2, {
|
|
452
|
-
agent,
|
|
453
|
-
canisterId: storageId
|
|
454
|
-
});
|
|
455
|
-
};
|
|
456
|
-
// dist/api/getHighestVersion.js
|
|
457
|
-
async function getHighestVersion(pkgName) {
|
|
458
|
-
let actor = await mainActor();
|
|
459
|
-
return actor.getHighestVersion(pkgName);
|
|
460
|
-
}
|
|
461
|
-
// dist/mops.js
|
|
462
|
-
if (!global.fetch) {
|
|
463
|
-
global.fetch = fetch2;
|
|
464
|
-
}
|
|
465
|
-
var apiVersion = "1.2";
|
|
466
|
-
var globalConfigDir = "";
|
|
467
|
-
var globalCacheDir = "";
|
|
468
|
-
if (process.platform == "win32") {
|
|
469
|
-
globalConfigDir = path.join(process.env.LOCALAPPDATA || "", "mops/config");
|
|
470
|
-
globalCacheDir = path.join(process.env.LOCALAPPDATA || "", "mops/cache");
|
|
471
|
-
}
|
|
472
|
-
else if (process.platform == "darwin") {
|
|
473
|
-
globalConfigDir = path.join(process.env.HOME || "", "Library/Application Support/mops");
|
|
474
|
-
globalCacheDir = path.join(process.env.HOME || "", "Library/Caches/mops");
|
|
475
|
-
}
|
|
476
|
-
else {
|
|
477
|
-
globalConfigDir = path.join(process.env.HOME || "", ".config/mops");
|
|
478
|
-
globalCacheDir = path.join(process.env.HOME || "", ".cache/mops");
|
|
479
|
-
}
|
|
480
|
-
if (process.env.XDG_CONFIG_HOME) {
|
|
481
|
-
globalConfigDir = path.join(process.env.XDG_CONFIG_HOME, "mops");
|
|
482
|
-
}
|
|
483
|
-
if (process.env.XDG_CACHE_HOME) {
|
|
484
|
-
globalCacheDir = path.join(process.env.XDG_CACHE_HOME, "mops");
|
|
485
|
-
}
|
|
486
|
-
var oldGlobalConfigDir = path.resolve(process.env.HOME || process.env.APPDATA || "/", "mops");
|
|
487
|
-
if (fs2.existsSync(oldGlobalConfigDir) && !fs2.existsSync(globalConfigDir)) {
|
|
488
|
-
fs2.mkdirSync(globalConfigDir, { recursive: true });
|
|
489
|
-
if (fs2.existsSync(path.join(oldGlobalConfigDir, "identity.pem"))) {
|
|
490
|
-
fs2.copyFileSync(path.join(oldGlobalConfigDir, "identity.pem"), path.join(globalConfigDir, "identity.pem"));
|
|
491
|
-
}
|
|
492
|
-
if (fs2.existsSync(path.join(oldGlobalConfigDir, "identity.pem.encrypted"))) {
|
|
493
|
-
fs2.copyFileSync(path.join(oldGlobalConfigDir, "identity.pem.encrypted"), path.join(globalConfigDir, "identity.pem.encrypted"));
|
|
494
|
-
}
|
|
495
|
-
console.log("Moved config to " + chalk.green(globalConfigDir));
|
|
496
|
-
}
|
|
497
|
-
if (fs2.existsSync(oldGlobalConfigDir) && !fs2.existsSync(globalCacheDir)) {
|
|
498
|
-
fs2.mkdirSync(globalCacheDir, { recursive: true });
|
|
499
|
-
ncp.ncp(path.join(oldGlobalConfigDir, "packages"), path.join(globalCacheDir, "packages"), {
|
|
500
|
-
stopOnErr: true,
|
|
501
|
-
clobber: false
|
|
502
|
-
}, (err) => {
|
|
503
|
-
if (err) {
|
|
504
|
-
console.log("Error moving config: ", err);
|
|
505
|
-
fs2.rmSync(globalCacheDir, { recursive: true, force: true });
|
|
506
|
-
}
|
|
507
|
-
});
|
|
508
|
-
console.log("Moved cache to " + chalk.green(globalCacheDir));
|
|
509
|
-
}
|
|
510
|
-
function getNetworkFile() {
|
|
511
|
-
let networkFile2 = "";
|
|
512
|
-
try {
|
|
513
|
-
networkFile2 = new URL("./network.txt", import.meta.url);
|
|
514
|
-
}
|
|
515
|
-
catch {
|
|
516
|
-
networkFile2 = path.join(__dirname, "network.txt");
|
|
517
|
-
}
|
|
518
|
-
return networkFile2;
|
|
519
|
-
}
|
|
520
|
-
function setNetwork(network) {
|
|
521
|
-
fs2.writeFileSync(getNetworkFile(), network);
|
|
522
|
-
}
|
|
523
|
-
var getIdentity = async () => {
|
|
524
|
-
let identityPem = path.resolve(globalConfigDir, "identity.pem");
|
|
525
|
-
let identityPemEncrypted = path.resolve(globalConfigDir, "identity.pem.encrypted");
|
|
526
|
-
if (fs2.existsSync(identityPemEncrypted)) {
|
|
527
|
-
let res = await prompts({
|
|
528
|
-
type: "invisible",
|
|
529
|
-
name: "value",
|
|
530
|
-
message: "Enter password:"
|
|
531
|
-
});
|
|
532
|
-
return await decodeFile(identityPemEncrypted, res.value);
|
|
533
|
-
}
|
|
534
|
-
if (fs2.existsSync(identityPem)) {
|
|
535
|
-
return decodeFile(identityPem);
|
|
536
|
-
}
|
|
537
|
-
return void 0;
|
|
538
|
-
};
|
|
539
|
-
function getClosestConfigFile(dir = process.cwd()) {
|
|
540
|
-
if (!path.basename(dir)) {
|
|
541
|
-
throw "";
|
|
542
|
-
}
|
|
543
|
-
let configFile = path.join(dir, "mops.toml");
|
|
544
|
-
if (fs2.existsSync(configFile)) {
|
|
545
|
-
return configFile;
|
|
546
|
-
}
|
|
547
|
-
return getClosestConfigFile(path.resolve(dir, ".."));
|
|
548
|
-
}
|
|
549
|
-
function getRootDir() {
|
|
550
|
-
let configFile = getClosestConfigFile();
|
|
551
|
-
if (!configFile) {
|
|
552
|
-
return "";
|
|
553
|
-
}
|
|
554
|
-
return path.dirname(configFile);
|
|
555
|
-
}
|
|
556
|
-
function checkConfigFile() {
|
|
557
|
-
let configFile = getClosestConfigFile();
|
|
558
|
-
if (!configFile) {
|
|
559
|
-
console.log(chalk.red("Error: ") + `Config file 'mops.toml' not found. Please run ${chalk.green("mops init")} first`);
|
|
560
|
-
return false;
|
|
561
|
-
}
|
|
562
|
-
return true;
|
|
563
|
-
}
|
|
564
|
-
function progressBar(step, total) {
|
|
565
|
-
let done = Math.round(step / total * 10);
|
|
566
|
-
return `[${":".repeat(done)}${" ".repeat(Math.max(0, 10 - done))}]`;
|
|
567
|
-
}
|
|
568
|
-
function parseGithubURL(href) {
|
|
569
|
-
var _a;
|
|
570
|
-
const url = new URL(href);
|
|
571
|
-
let branchAndSha = (_a = url.hash) == null ? void 0 : _a.substring(1).split("@");
|
|
572
|
-
let branch = branchAndSha[0] || "master";
|
|
573
|
-
let commitHash = branchAndSha[1] || "";
|
|
574
|
-
let [org, gitName] = url.pathname.split("/").filter((path18) => !!path18);
|
|
575
|
-
org = org || "";
|
|
576
|
-
gitName = gitName || "";
|
|
577
|
-
if (gitName == null ? void 0 : gitName.endsWith(".git")) {
|
|
578
|
-
gitName = gitName.substring(0, gitName.length - 4);
|
|
579
|
-
}
|
|
580
|
-
return { org, gitName, branch, commitHash };
|
|
581
|
-
}
|
|
582
|
-
async function getGithubCommit(repo, ref) {
|
|
583
|
-
let res = await fetch2(`https://api.github.com/repos/${repo}/commits/${ref}`);
|
|
584
|
-
let json = await res.json();
|
|
585
|
-
if (json.message && ref === "master") {
|
|
586
|
-
res = await fetch2(`https://api.github.com/repos/${repo}/commits/main`);
|
|
587
|
-
json = await res.json();
|
|
588
|
-
}
|
|
589
|
-
return json;
|
|
590
|
-
}
|
|
591
|
-
function getDependencyType(version) {
|
|
592
|
-
if (!version || typeof version !== "string") {
|
|
593
|
-
throw Error(`Invalid dependency value "${version}"`);
|
|
594
|
-
}
|
|
595
|
-
if (version.startsWith("https://github.com/")) {
|
|
596
|
-
return "github";
|
|
597
|
-
}
|
|
598
|
-
else if (version.match(/^(\.?\.)?\//)) {
|
|
599
|
-
return "local";
|
|
600
|
-
}
|
|
601
|
-
else {
|
|
602
|
-
return "mops";
|
|
603
|
-
}
|
|
604
|
-
}
|
|
605
|
-
function readConfig(configFile = getClosestConfigFile()) {
|
|
606
|
-
let text = fs2.readFileSync(configFile).toString();
|
|
607
|
-
let toml = TOML.parse(text);
|
|
608
|
-
let processDeps = (deps) => {
|
|
609
|
-
Object.entries(deps).forEach(([name, data]) => {
|
|
610
|
-
if (!data || typeof data !== "string") {
|
|
611
|
-
throw Error(`Invalid dependency value ${name} = "${data}"`);
|
|
612
|
-
}
|
|
613
|
-
let depType = getDependencyType(data);
|
|
614
|
-
if (depType === "github") {
|
|
615
|
-
deps[name] = { name, repo: data, version: "" };
|
|
616
|
-
}
|
|
617
|
-
else if (depType === "local") {
|
|
618
|
-
deps[name] = { name, repo: "", path: data, version: "" };
|
|
619
|
-
}
|
|
620
|
-
else {
|
|
621
|
-
deps[name] = { name, repo: "", version: data };
|
|
622
|
-
}
|
|
623
|
-
});
|
|
624
|
-
};
|
|
625
|
-
processDeps(toml.dependencies || {});
|
|
626
|
-
processDeps(toml["dev-dependencies"] || {});
|
|
627
|
-
return toml;
|
|
628
|
-
}
|
|
629
|
-
function writeConfig(config, configFile = getClosestConfigFile()) {
|
|
630
|
-
let resConfig = { ...config };
|
|
631
|
-
let deps = resConfig.dependencies || {};
|
|
632
|
-
Object.entries(config.dependencies || {}).forEach(([name, { repo, path: path18, version }]) => {
|
|
633
|
-
deps[name] = repo || path18 || version;
|
|
634
|
-
});
|
|
635
|
-
let devDeps = resConfig["dev-dependencies"] || {};
|
|
636
|
-
Object.entries(config["dev-dependencies"] || {}).forEach(([name, { repo, path: path18, version }]) => {
|
|
637
|
-
devDeps[name] = repo || path18 || version;
|
|
638
|
-
});
|
|
639
|
-
let text = TOML.stringify(resConfig).trim();
|
|
640
|
-
if (fs2.existsSync(configFile) && fs2.readFileSync(configFile).toString().endsWith("\n")) {
|
|
641
|
-
text += "\n";
|
|
642
|
-
}
|
|
643
|
-
fs2.writeFileSync(configFile, text);
|
|
644
|
-
}
|
|
645
|
-
function formatDir(name, version) {
|
|
646
|
-
return path.join(getRootDir(), ".mops", `${name}@${version}`);
|
|
647
|
-
}
|
|
648
|
-
function formatGithubDir(name, repo) {
|
|
649
|
-
const { branch, commitHash } = parseGithubURL(repo);
|
|
650
|
-
return path.join(getRootDir(), ".mops/_github", `${name}#${branch}` + (commitHash ? `@${commitHash}` : ""));
|
|
651
|
-
}
|
|
652
|
-
async function checkApiCompatibility() {
|
|
653
|
-
let actor = await mainActor();
|
|
654
|
-
let backendApiVer = await actor.getApiVersion();
|
|
655
|
-
if (backendApiVer.split(".")[0] !== apiVersion.split(".")[0]) {
|
|
656
|
-
console.log(chalk.red("ERR: ") + `CLI incompatible with backend. CLI v${apiVersion}, Backend v${backendApiVer}`);
|
|
657
|
-
console.log("Run " + chalk.greenBright("npm i -g ic-mops") + " to upgrade cli.");
|
|
658
|
-
return false;
|
|
659
|
-
}
|
|
660
|
-
else if (backendApiVer.split(".")[1] !== apiVersion.split(".")[1]) {
|
|
661
|
-
console.log("-".repeat(50));
|
|
662
|
-
console.log(chalk.yellow("WARN: ") + `CLI probably incompatible with backend. CLI v${apiVersion}, Backend v${backendApiVer}`);
|
|
663
|
-
console.log("Recommended to run " + chalk.greenBright("npm i -g ic-mops") + " to upgrade cli.");
|
|
664
|
-
console.log("-".repeat(50));
|
|
665
|
-
}
|
|
666
|
-
return true;
|
|
667
|
-
}
|
|
668
|
-
// dist/commands/install-all.js
|
|
669
|
-
import chalk5 from "chalk";
|
|
670
|
-
import logUpdate3 from "log-update";
|
|
671
|
-
// dist/commands/install.js
|
|
672
|
-
import path4 from "node:path";
|
|
673
|
-
import fs4 from "node:fs";
|
|
674
|
-
import logUpdate2 from "log-update";
|
|
675
|
-
import chalk3 from "chalk";
|
|
676
|
-
// dist/parallel.js
|
|
677
|
-
async function parallel(threads, items, fn) {
|
|
678
|
-
return new Promise((resolve) => {
|
|
679
|
-
let busyThreads = 0;
|
|
680
|
-
items = items.slice();
|
|
681
|
-
let loop = () => {
|
|
682
|
-
if (!items.length) {
|
|
683
|
-
if (busyThreads === 0) {
|
|
684
|
-
resolve();
|
|
685
|
-
}
|
|
686
|
-
return;
|
|
687
|
-
}
|
|
688
|
-
if (busyThreads >= threads) {
|
|
689
|
-
return;
|
|
690
|
-
}
|
|
691
|
-
busyThreads++;
|
|
692
|
-
fn(items.shift()).then(() => {
|
|
693
|
-
busyThreads--;
|
|
694
|
-
loop();
|
|
695
|
-
});
|
|
696
|
-
loop();
|
|
697
|
-
};
|
|
698
|
-
loop();
|
|
699
|
-
});
|
|
700
|
-
}
|
|
701
|
-
// dist/vessel.js
|
|
702
|
-
import { existsSync, mkdirSync, createWriteStream, readFileSync, writeFileSync } from "node:fs";
|
|
703
|
-
import path3 from "node:path";
|
|
704
|
-
import { deleteSync } from "del";
|
|
705
|
-
import { execaCommand } from "execa";
|
|
706
|
-
import chalk2 from "chalk";
|
|
707
|
-
import logUpdate from "log-update";
|
|
708
|
-
import got from "got";
|
|
709
|
-
import decompress from "decompress";
|
|
710
|
-
import { pipeline } from "stream";
|
|
711
|
-
// dist/cache.js
|
|
712
|
-
import fs3 from "node:fs";
|
|
713
|
-
import path2 from "node:path";
|
|
714
|
-
import ncp2 from "ncp";
|
|
715
|
-
import getFolderSize from "get-folder-size";
|
|
716
|
-
var isCached = (pkgId) => {
|
|
717
|
-
let dir = path2.join(globalCacheDir, "packages", pkgId);
|
|
718
|
-
return fs3.existsSync(dir);
|
|
719
|
-
};
|
|
720
|
-
var addCache = (pkgId, source) => {
|
|
721
|
-
let dest = path2.join(globalCacheDir, "packages", pkgId);
|
|
722
|
-
fs3.mkdirSync(dest, { recursive: true });
|
|
723
|
-
return new Promise((resolve, reject) => {
|
|
724
|
-
ncp2.ncp(source, dest, { stopOnErr: true }, (err) => {
|
|
725
|
-
if (err) {
|
|
726
|
-
reject(err);
|
|
727
|
-
}
|
|
728
|
-
resolve();
|
|
729
|
-
});
|
|
730
|
-
});
|
|
731
|
-
};
|
|
732
|
-
var copyCache = (pkgId, dest) => {
|
|
733
|
-
let source = path2.join(globalCacheDir, "packages", pkgId);
|
|
734
|
-
fs3.mkdirSync(dest, { recursive: true });
|
|
735
|
-
return new Promise((resolve, reject) => {
|
|
736
|
-
ncp2.ncp(source, dest, { stopOnErr: true }, (err) => {
|
|
737
|
-
if (err) {
|
|
738
|
-
reject(err);
|
|
739
|
-
}
|
|
740
|
-
resolve();
|
|
741
|
-
});
|
|
742
|
-
});
|
|
743
|
-
};
|
|
744
|
-
var cacheSize = async () => {
|
|
745
|
-
let dir = path2.join(globalCacheDir, "packages");
|
|
746
|
-
fs3.mkdirSync(dir, { recursive: true });
|
|
747
|
-
let size = await getFolderSize.strict(dir);
|
|
748
|
-
if (size < 1024 * 1024) {
|
|
749
|
-
return (size / 1024).toFixed(2) + " KB";
|
|
750
|
-
}
|
|
751
|
-
return (size / 1024 / 1024).toFixed(2) + " MB";
|
|
752
|
-
};
|
|
753
|
-
var cleanCache = async () => {
|
|
754
|
-
let dir = path2.join(globalCacheDir, "packages");
|
|
755
|
-
fs3.rmSync(dir, { recursive: true, force: true });
|
|
756
|
-
};
|
|
757
|
-
// dist/vessel.js
|
|
758
|
-
var dhallFileToJson = async (filePath, silent) => {
|
|
759
|
-
var _a;
|
|
760
|
-
if (existsSync(filePath)) {
|
|
761
|
-
let cwd = new URL(path3.dirname(import.meta.url)).pathname;
|
|
762
|
-
let res;
|
|
763
|
-
try {
|
|
764
|
-
res = await execaCommand(`dhall-to-json --file ${filePath}`, { preferLocal: true, cwd });
|
|
765
|
-
}
|
|
766
|
-
catch (err) {
|
|
767
|
-
silent || console.error("dhall-to-json error:", (_a = err.message) == null ? void 0 : _a.split("Message:")[0]);
|
|
768
|
-
return null;
|
|
769
|
-
}
|
|
770
|
-
if (res.exitCode === 0) {
|
|
771
|
-
return JSON.parse(res.stdout);
|
|
772
|
-
}
|
|
773
|
-
else {
|
|
774
|
-
return res;
|
|
775
|
-
}
|
|
776
|
-
}
|
|
777
|
-
return null;
|
|
778
|
-
};
|
|
779
|
-
var readVesselConfig = async (dir, { cache = true, silent = false } = {}) => {
|
|
780
|
-
const cachedFile = (dir || process.cwd()) + "/vessel.json";
|
|
781
|
-
if (existsSync(cachedFile)) {
|
|
782
|
-
let cachedConfig = readFileSync(cachedFile).toString();
|
|
783
|
-
return JSON.parse(cachedConfig);
|
|
784
|
-
}
|
|
785
|
-
const [vessel, packageSetArray] = await Promise.all([
|
|
786
|
-
dhallFileToJson((dir || process.cwd()) + "/vessel.dhall", silent),
|
|
787
|
-
dhallFileToJson((dir || process.cwd()) + "/package-set.dhall", silent)
|
|
788
|
-
]);
|
|
789
|
-
if (!vessel || !packageSetArray) {
|
|
790
|
-
return null;
|
|
791
|
-
}
|
|
792
|
-
let repos = {};
|
|
793
|
-
for (const { name, repo, version } of packageSetArray) {
|
|
794
|
-
const { org, gitName } = parseGithubURL(repo);
|
|
795
|
-
repos[name] = `https://github.com/${org}/${gitName}#${version}`;
|
|
796
|
-
}
|
|
797
|
-
let config = {
|
|
798
|
-
dependencies: vessel.dependencies.map((name) => {
|
|
799
|
-
return { name, repo: repos[name], version: "" };
|
|
800
|
-
}),
|
|
801
|
-
"dev-dependencies": []
|
|
802
|
-
};
|
|
803
|
-
if (cache === true) {
|
|
804
|
-
writeFileSync(cachedFile, JSON.stringify(config), "utf-8");
|
|
805
|
-
}
|
|
806
|
-
return config;
|
|
807
|
-
};
|
|
808
|
-
var downloadFromGithub = async (repo, dest, onProgress) => {
|
|
809
|
-
const { branch, org, gitName, commitHash } = parseGithubURL(repo);
|
|
810
|
-
const zipFile = `https://github.com/${org}/${gitName}/archive/${commitHash || branch}.zip`;
|
|
811
|
-
const readStream = got.stream(zipFile);
|
|
812
|
-
const promise = new Promise((resolve, reject) => {
|
|
813
|
-
readStream.on("error", (err) => {
|
|
814
|
-
console.error(chalk2.red(`Error: failed to download from GitHub: ${zipFile}`));
|
|
815
|
-
console.error(err.message);
|
|
816
|
-
reject(err);
|
|
817
|
-
});
|
|
818
|
-
readStream.on("downloadProgress", ({ transferred, total }) => {
|
|
819
|
-
onProgress == null ? void 0 : onProgress(transferred, total || 2 * 1024 ** 2);
|
|
820
|
-
});
|
|
821
|
-
readStream.on("response", (response) => {
|
|
822
|
-
if (response.headers.age > 3600) {
|
|
823
|
-
console.error(chalk2.red("Error: ") + "Failure - response too old");
|
|
824
|
-
readStream.destroy();
|
|
825
|
-
reject();
|
|
826
|
-
return;
|
|
827
|
-
}
|
|
828
|
-
readStream.off("error", reject);
|
|
829
|
-
const tmpDir = path3.resolve(process.cwd(), ".mops/_tmp/");
|
|
830
|
-
const tmpFile = path3.resolve(tmpDir, `${gitName}@${commitHash || branch}.zip`);
|
|
831
|
-
try {
|
|
832
|
-
mkdirSync(tmpDir, { recursive: true });
|
|
833
|
-
pipeline(readStream, createWriteStream(tmpFile), (err) => {
|
|
834
|
-
if (err) {
|
|
835
|
-
deleteSync([tmpDir]);
|
|
836
|
-
reject(err);
|
|
837
|
-
}
|
|
838
|
-
else {
|
|
839
|
-
let options = {
|
|
840
|
-
extract: true,
|
|
841
|
-
strip: 1,
|
|
842
|
-
headers: {
|
|
843
|
-
accept: "application/zip"
|
|
844
|
-
}
|
|
845
|
-
};
|
|
846
|
-
decompress(tmpFile, dest, options).then((unzippedFiles) => {
|
|
847
|
-
deleteSync([tmpDir]);
|
|
848
|
-
resolve(unzippedFiles);
|
|
849
|
-
}).catch((err2) => {
|
|
850
|
-
deleteSync([tmpDir]);
|
|
851
|
-
reject(err2);
|
|
852
|
-
});
|
|
853
|
-
}
|
|
854
|
-
});
|
|
855
|
-
}
|
|
856
|
-
catch (err) {
|
|
857
|
-
deleteSync([tmpDir]);
|
|
858
|
-
reject(err);
|
|
859
|
-
}
|
|
860
|
-
});
|
|
861
|
-
});
|
|
862
|
-
return promise;
|
|
863
|
-
};
|
|
864
|
-
var installFromGithub = async (name, repo, { verbose = false, dep = false, silent = false } = {}) => {
|
|
865
|
-
let { branch, commitHash } = parseGithubURL(repo);
|
|
866
|
-
let dir = formatGithubDir(name, repo);
|
|
867
|
-
let cacheName = `_github/${name}#${branch}` + (commitHash ? `@${commitHash}` : "");
|
|
868
|
-
if (existsSync(dir)) {
|
|
869
|
-
silent || logUpdate(`${dep ? "Dependency" : "Installing"} ${repo} (local cache)`);
|
|
870
|
-
}
|
|
871
|
-
else if (isCached(cacheName)) {
|
|
872
|
-
await copyCache(cacheName, dir);
|
|
873
|
-
silent || logUpdate(`${dep ? "Dependency" : "Installing"} ${repo} (global cache)`);
|
|
874
|
-
}
|
|
875
|
-
else {
|
|
876
|
-
mkdirSync(dir, { recursive: true });
|
|
877
|
-
let progress = (step, total) => {
|
|
878
|
-
silent || logUpdate(`${dep ? "Dependency" : "Installing"} ${repo} ${progressBar(step, total)}`);
|
|
879
|
-
};
|
|
880
|
-
progress(0, 2 * 1024 ** 2);
|
|
881
|
-
try {
|
|
882
|
-
await downloadFromGithub(repo, dir, progress);
|
|
883
|
-
}
|
|
884
|
-
catch (err) {
|
|
885
|
-
deleteSync([dir]);
|
|
886
|
-
process.exit(1);
|
|
887
|
-
}
|
|
888
|
-
await addCache(cacheName, dir);
|
|
889
|
-
}
|
|
890
|
-
if (verbose) {
|
|
891
|
-
silent || logUpdate.done();
|
|
892
|
-
}
|
|
893
|
-
const config = await readVesselConfig(dir, { silent });
|
|
894
|
-
if (config) {
|
|
895
|
-
for (const { name: name2, repo: repo2 } of config.dependencies) {
|
|
896
|
-
if (repo2) {
|
|
897
|
-
await installFromGithub(name2, repo2, { verbose, silent, dep: true });
|
|
898
|
-
}
|
|
899
|
-
}
|
|
900
|
-
}
|
|
901
|
-
};
|
|
902
|
-
// dist/api/downloadPackageFiles.js
|
|
903
|
-
async function getPackageFilesInfo(pkg, version) {
|
|
904
|
-
let actor = await mainActor();
|
|
905
|
-
let [packageDetailsRes, fileIds] = await Promise.all([
|
|
906
|
-
actor.getPackageDetails(pkg, version),
|
|
907
|
-
getFileIds(pkg, version)
|
|
908
|
-
]);
|
|
909
|
-
if ("err" in packageDetailsRes) {
|
|
910
|
-
throw packageDetailsRes.err;
|
|
911
|
-
}
|
|
912
|
-
let packageDetails = packageDetailsRes.ok;
|
|
913
|
-
return {
|
|
914
|
-
storageId: packageDetails.publication.storage,
|
|
915
|
-
fileIds
|
|
916
|
-
};
|
|
917
|
-
}
|
|
918
|
-
async function getFileIds(pkg, version) {
|
|
919
|
-
let actor = await mainActor();
|
|
920
|
-
let fileIdsRes = await actor.getFileIds(pkg, version);
|
|
921
|
-
if ("err" in fileIdsRes) {
|
|
922
|
-
throw fileIdsRes.err;
|
|
923
|
-
}
|
|
924
|
-
let filesIds = fileIdsRes.ok;
|
|
925
|
-
return filesIds;
|
|
926
|
-
}
|
|
927
|
-
async function downloadFile(storage, fileId) {
|
|
928
|
-
let fileMetaRes = await storage.getFileMeta(fileId);
|
|
929
|
-
if ("err" in fileMetaRes) {
|
|
930
|
-
throw fileMetaRes.err;
|
|
931
|
-
}
|
|
932
|
-
let fileMeta = fileMetaRes.ok;
|
|
933
|
-
let data = [];
|
|
934
|
-
for (let i = 0n; i < fileMeta.chunkCount; i++) {
|
|
935
|
-
let chunkRes = await storage.downloadChunk(fileId, i);
|
|
936
|
-
if ("err" in chunkRes) {
|
|
937
|
-
throw chunkRes.err;
|
|
938
|
-
}
|
|
939
|
-
let chunk = chunkRes.ok;
|
|
940
|
-
data = [...data, ...chunk];
|
|
941
|
-
}
|
|
942
|
-
return {
|
|
943
|
-
path: fileMeta.path,
|
|
944
|
-
data
|
|
945
|
-
};
|
|
946
|
-
}
|
|
947
|
-
// dist/commands/install.js
|
|
948
|
-
async function install(pkg, version = "", { verbose = false, silent = false, dep = false } = {}) {
|
|
949
|
-
if (!checkConfigFile()) {
|
|
950
|
-
return false;
|
|
951
|
-
}
|
|
952
|
-
let total = Infinity;
|
|
953
|
-
let step = 0;
|
|
954
|
-
let progress = () => {
|
|
955
|
-
step++;
|
|
956
|
-
silent || logUpdate2(`${dep ? "Dependency" : "Installing"} ${pkg}@${version} ${progressBar(step, total)}`);
|
|
957
|
-
};
|
|
958
|
-
progress();
|
|
959
|
-
if (!version) {
|
|
960
|
-
let versionRes = await getHighestVersion(pkg);
|
|
961
|
-
if ("err" in versionRes) {
|
|
962
|
-
console.log(chalk3.red("Error: ") + versionRes.err);
|
|
963
|
-
return false;
|
|
964
|
-
}
|
|
965
|
-
version = versionRes.ok;
|
|
966
|
-
}
|
|
967
|
-
let dir = formatDir(pkg, version);
|
|
968
|
-
let alreadyInstalled = false;
|
|
969
|
-
if (fs4.existsSync(dir)) {
|
|
970
|
-
silent || logUpdate2(`${dep ? "Dependency" : "Installing"} ${pkg}@${version} (local cache)`);
|
|
971
|
-
alreadyInstalled = true;
|
|
972
|
-
}
|
|
973
|
-
else if (isCached(`${pkg}@${version}`)) {
|
|
974
|
-
await copyCache(`${pkg}@${version}`, dir);
|
|
975
|
-
silent || logUpdate2(`${dep ? "Dependency" : "Installing"} ${pkg}@${version} (global cache)`);
|
|
976
|
-
}
|
|
977
|
-
else {
|
|
978
|
-
let threads = 16;
|
|
979
|
-
if (process.env.GITHUB_ENV) {
|
|
980
|
-
threads = 4;
|
|
981
|
-
}
|
|
982
|
-
try {
|
|
983
|
-
let { storageId, fileIds } = await getPackageFilesInfo(pkg, version);
|
|
984
|
-
total = fileIds.length + 2;
|
|
985
|
-
let filesData = /* @__PURE__ */ new Map();
|
|
986
|
-
let storage = await storageActor(storageId);
|
|
987
|
-
await parallel(threads, fileIds, async (fileId) => {
|
|
988
|
-
let { path: path18, data } = await downloadFile(storage, fileId);
|
|
989
|
-
filesData.set(path18, data);
|
|
990
|
-
progress();
|
|
991
|
-
});
|
|
992
|
-
for (let [filePath, data] of filesData.entries()) {
|
|
993
|
-
fs4.mkdirSync(path4.join(dir, path4.dirname(filePath)), { recursive: true });
|
|
994
|
-
fs4.writeFileSync(path4.join(dir, filePath), Buffer.from(data));
|
|
995
|
-
}
|
|
996
|
-
}
|
|
997
|
-
catch (err) {
|
|
998
|
-
console.error(chalk3.red("Error: ") + err);
|
|
999
|
-
return false;
|
|
1000
|
-
}
|
|
1001
|
-
await addCache(`${pkg}@${version}`, dir);
|
|
1002
|
-
progress();
|
|
1003
|
-
}
|
|
1004
|
-
if (verbose) {
|
|
1005
|
-
silent || logUpdate2.done();
|
|
1006
|
-
}
|
|
1007
|
-
let ok = true;
|
|
1008
|
-
let config = readConfig(path4.join(dir, "mops.toml"));
|
|
1009
|
-
let deps = Object.values(config.dependencies || {});
|
|
1010
|
-
let installedDeps = {};
|
|
1011
|
-
for (const { name, repo, version: version2 } of deps) {
|
|
1012
|
-
if (repo) {
|
|
1013
|
-
await installFromGithub(name, repo, { silent, verbose });
|
|
1014
|
-
}
|
|
1015
|
-
else {
|
|
1016
|
-
let res = await install(name, version2, { silent, verbose });
|
|
1017
|
-
if (res) {
|
|
1018
|
-
installedDeps = { ...installedDeps, ...res };
|
|
1019
|
-
}
|
|
1020
|
-
else {
|
|
1021
|
-
ok = false;
|
|
1022
|
-
}
|
|
1023
|
-
}
|
|
1024
|
-
}
|
|
1025
|
-
if (!alreadyInstalled) {
|
|
1026
|
-
installedDeps = { ...installedDeps, [pkg]: version };
|
|
1027
|
-
}
|
|
1028
|
-
if (!ok) {
|
|
1029
|
-
return false;
|
|
1030
|
-
}
|
|
1031
|
-
return installedDeps;
|
|
1032
|
-
}
|
|
1033
|
-
// dist/resolve-packages.js
|
|
1034
|
-
import path5 from "node:path";
|
|
1035
|
-
import chalk4 from "chalk";
|
|
1036
|
-
async function resolvePackages({ verbose = false } = {}) {
|
|
1037
|
-
if (!checkConfigFile()) {
|
|
1038
|
-
return {};
|
|
1039
|
-
}
|
|
1040
|
-
let packages = {};
|
|
1041
|
-
let versions = {};
|
|
1042
|
-
let compareVersions = (a = "0.0.0", b = "0.0.0") => {
|
|
1043
|
-
let ap = a.split(".").map((x) => parseInt(x));
|
|
1044
|
-
let bp = b.split(".").map((x) => parseInt(x));
|
|
1045
|
-
if (ap[0] - bp[0]) {
|
|
1046
|
-
return Math.sign(ap[0] - bp[0]);
|
|
1047
|
-
}
|
|
1048
|
-
if (ap[0] === bp[0] && ap[1] - bp[1]) {
|
|
1049
|
-
return Math.sign(ap[1] - bp[1]);
|
|
1050
|
-
}
|
|
1051
|
-
if (ap[0] === bp[0] && ap[1] === bp[1] && ap[2] - bp[2]) {
|
|
1052
|
-
return Math.sign(ap[2] - bp[2]);
|
|
1053
|
-
}
|
|
1054
|
-
return 0;
|
|
1055
|
-
};
|
|
1056
|
-
const gitVerRegex = new RegExp(/v(\d{1,2}\.\d{1,2}\.\d{1,2})(-.*)?$/);
|
|
1057
|
-
const compareGitVersions = (repoA, repoB) => {
|
|
1058
|
-
const { branch: a } = parseGithubURL(repoA);
|
|
1059
|
-
const { branch: b } = parseGithubURL(repoB);
|
|
1060
|
-
if (gitVerRegex.test(a) && gitVerRegex.test(b)) {
|
|
1061
|
-
return compareVersions(a.substring(1), b.substring(1));
|
|
1062
|
-
}
|
|
1063
|
-
else if (!gitVerRegex.test(a)) {
|
|
1064
|
-
return -1;
|
|
1065
|
-
}
|
|
1066
|
-
else {
|
|
1067
|
-
return 1;
|
|
1068
|
-
}
|
|
1069
|
-
};
|
|
1070
|
-
let collectDeps = async (config2, isRoot = false) => {
|
|
1071
|
-
var _a, _b, _c, _d, _e, _f;
|
|
1072
|
-
let allDeps = [...Object.values(config2.dependencies || {})];
|
|
1073
|
-
if (isRoot) {
|
|
1074
|
-
allDeps = [...allDeps, ...Object.values(config2["dev-dependencies"] || {})];
|
|
1075
|
-
}
|
|
1076
|
-
for (const pkgDetails of allDeps) {
|
|
1077
|
-
const { name, repo, version } = pkgDetails;
|
|
1078
|
-
if (isRoot || !packages[name] || !((_a = packages[name]) == null ? void 0 : _a.isRoot) && (repo && ((_b = packages[name]) == null ? void 0 : _b.repo) && compareGitVersions(((_c = packages[name]) == null ? void 0 : _c.repo) || "", repo) === -1 || compareVersions((_d = packages[name]) == null ? void 0 : _d.version, version) === -1)) {
|
|
1079
|
-
packages[name] = {
|
|
1080
|
-
...pkgDetails,
|
|
1081
|
-
isRoot
|
|
1082
|
-
};
|
|
1083
|
-
}
|
|
1084
|
-
let nestedConfig;
|
|
1085
|
-
if (repo) {
|
|
1086
|
-
const dir = formatGithubDir(name, repo);
|
|
1087
|
-
nestedConfig = await readVesselConfig(dir, { silent: true }) || {};
|
|
1088
|
-
}
|
|
1089
|
-
else if (!pkgDetails.path && version) {
|
|
1090
|
-
const file = formatDir(name, version) + "/mops.toml";
|
|
1091
|
-
nestedConfig = readConfig(file);
|
|
1092
|
-
}
|
|
1093
|
-
if (nestedConfig && !pkgDetails.path) {
|
|
1094
|
-
await collectDeps(nestedConfig);
|
|
1095
|
-
}
|
|
1096
|
-
if (!versions[name]) {
|
|
1097
|
-
versions[name] = [];
|
|
1098
|
-
}
|
|
1099
|
-
if (repo) {
|
|
1100
|
-
const { branch } = parseGithubURL(repo);
|
|
1101
|
-
(_e = versions[name]) == null ? void 0 : _e.push(branch);
|
|
1102
|
-
}
|
|
1103
|
-
else if (version) {
|
|
1104
|
-
(_f = versions[name]) == null ? void 0 : _f.push(version);
|
|
1105
|
-
}
|
|
1106
|
-
}
|
|
1107
|
-
};
|
|
1108
|
-
let config = readConfig();
|
|
1109
|
-
await collectDeps(config, true);
|
|
1110
|
-
if (verbose) {
|
|
1111
|
-
for (let [dep, vers] of Object.entries(versions)) {
|
|
1112
|
-
if (vers.length > 1) {
|
|
1113
|
-
console.log(chalk4.yellow("WARN:"), `Conflicting package versions "${dep}" - ${vers.join(", ")}`);
|
|
1114
|
-
}
|
|
1115
|
-
}
|
|
1116
|
-
}
|
|
1117
|
-
let rootDir = getRootDir();
|
|
1118
|
-
return Object.fromEntries(Object.entries(packages).map(([name, pkg]) => {
|
|
1119
|
-
let version;
|
|
1120
|
-
if (pkg.path) {
|
|
1121
|
-
version = path5.resolve(rootDir, pkg.path).replaceAll("{MOPS_ENV}", process.env.MOPS_ENV || "local");
|
|
1122
|
-
}
|
|
1123
|
-
else if (pkg.repo) {
|
|
1124
|
-
version = pkg.repo;
|
|
1125
|
-
}
|
|
1126
|
-
else if (pkg.version) {
|
|
1127
|
-
version = pkg.version;
|
|
1128
|
-
}
|
|
1129
|
-
else {
|
|
1130
|
-
return [name, ""];
|
|
1131
|
-
}
|
|
1132
|
-
return [name, version];
|
|
1133
|
-
}).filter(([, version]) => version !== ""));
|
|
1134
|
-
}
|
|
1135
|
-
// dist/notify-installs.js
|
|
1136
|
-
async function notifyInstalls(names) {
|
|
1137
|
-
let resolvedPackages = await resolvePackages();
|
|
1138
|
-
let packages = names.map((name) => [name, resolvedPackages[name]]);
|
|
1139
|
-
if (packages.length) {
|
|
1140
|
-
let actor = await mainActor();
|
|
1141
|
-
await actor.notifyInstalls(packages.filter(([_, version]) => getDependencyType(version) === "mops"));
|
|
1142
|
-
}
|
|
1143
|
-
}
|
|
1144
|
-
// dist/integrity.js
|
|
1145
|
-
import fs5 from "node:fs";
|
|
1146
|
-
import path6 from "node:path";
|
|
1147
|
-
import { sha256 } from "@noble/hashes/sha256";
|
|
1148
|
-
import { bytesToHex } from "@noble/hashes/utils";
|
|
1149
|
-
async function checkIntegrity(lock) {
|
|
1150
|
-
let force = !!lock;
|
|
1151
|
-
if (!lock && !process.env["CI"] && fs5.existsSync(path6.join(getRootDir(), "mops.lock"))) {
|
|
1152
|
-
lock = "save";
|
|
1153
|
-
}
|
|
1154
|
-
if (!lock) {
|
|
1155
|
-
lock = process.env["CI"] ? "check" : "ignore";
|
|
1156
|
-
}
|
|
1157
|
-
if (lock === "save") {
|
|
1158
|
-
await saveLockFile();
|
|
1159
|
-
await checkLockFile(force);
|
|
1160
|
-
}
|
|
1161
|
-
else if (lock === "check") {
|
|
1162
|
-
await checkLockFile(force);
|
|
1163
|
-
}
|
|
1164
|
-
}
|
|
1165
|
-
async function getFileHashesFromRegistry() {
|
|
1166
|
-
let packageIds = await getResolvedMopsPackageIds();
|
|
1167
|
-
let actor = await mainActor();
|
|
1168
|
-
let fileHashesByPackageIds = await actor.getFileHashesByPackageIds(packageIds);
|
|
1169
|
-
return fileHashesByPackageIds;
|
|
1170
|
-
}
|
|
1171
|
-
async function getResolvedMopsPackageIds() {
|
|
1172
|
-
let resolvedPackages = await resolvePackages();
|
|
1173
|
-
let packageIds = Object.entries(resolvedPackages).filter(([_, version]) => getDependencyType(version) === "mops").map(([name, version]) => `${name}@${version}`);
|
|
1174
|
-
return packageIds;
|
|
1175
|
-
}
|
|
1176
|
-
function getLocalFileHash(fileId) {
|
|
1177
|
-
let rootDir = getRootDir();
|
|
1178
|
-
let file = path6.join(rootDir, ".mops", fileId);
|
|
1179
|
-
if (!fs5.existsSync(file)) {
|
|
1180
|
-
console.error(`Missing file ${fileId} in .mops dir`);
|
|
1181
|
-
process.exit(1);
|
|
1182
|
-
}
|
|
1183
|
-
let fileData = fs5.readFileSync(file);
|
|
1184
|
-
return bytesToHex(sha256(fileData));
|
|
1185
|
-
}
|
|
1186
|
-
function getMopsTomlHash() {
|
|
1187
|
-
return bytesToHex(sha256(fs5.readFileSync(getRootDir() + "/mops.toml")));
|
|
1188
|
-
}
|
|
1189
|
-
async function saveLockFile() {
|
|
1190
|
-
let rootDir = getRootDir();
|
|
1191
|
-
let lockFile = path6.join(rootDir, "mops.lock");
|
|
1192
|
-
if (fs5.existsSync(lockFile)) {
|
|
1193
|
-
let lockFileJson2 = JSON.parse(fs5.readFileSync(lockFile).toString());
|
|
1194
|
-
let mopsTomlHash = getMopsTomlHash();
|
|
1195
|
-
if (mopsTomlHash === lockFileJson2.mopsTomlHash) {
|
|
1196
|
-
return;
|
|
1197
|
-
}
|
|
1198
|
-
}
|
|
1199
|
-
let fileHashes = await getFileHashesFromRegistry();
|
|
1200
|
-
let lockFileJson = {
|
|
1201
|
-
version: 1,
|
|
1202
|
-
mopsTomlHash: getMopsTomlHash(),
|
|
1203
|
-
hashes: fileHashes.reduce((acc, [packageId, fileHashes2]) => {
|
|
1204
|
-
acc[packageId] = fileHashes2.reduce((acc2, [fileId, hash]) => {
|
|
1205
|
-
acc2[fileId] = bytesToHex(new Uint8Array(hash));
|
|
1206
|
-
return acc2;
|
|
1207
|
-
}, {});
|
|
1208
|
-
return acc;
|
|
1209
|
-
}, {})
|
|
1210
|
-
};
|
|
1211
|
-
fs5.writeFileSync(lockFile, JSON.stringify(lockFileJson, null, 2));
|
|
1212
|
-
}
|
|
1213
|
-
async function checkLockFile(force = false) {
|
|
1214
|
-
let rootDir = getRootDir();
|
|
1215
|
-
let lockFile = path6.join(rootDir, "mops.lock");
|
|
1216
|
-
if (!fs5.existsSync(lockFile)) {
|
|
1217
|
-
if (force) {
|
|
1218
|
-
console.error("Missing lock file. Run `mops install` to generate it.");
|
|
1219
|
-
process.exit(1);
|
|
1220
|
-
}
|
|
1221
|
-
return;
|
|
1222
|
-
}
|
|
1223
|
-
let lockFileJson = JSON.parse(fs5.readFileSync(lockFile).toString());
|
|
1224
|
-
let packageIds = await getResolvedMopsPackageIds();
|
|
1225
|
-
if (lockFileJson.version !== 1) {
|
|
1226
|
-
console.error("Integrity check failed");
|
|
1227
|
-
console.error(`Invalid lock file version: ${lockFileJson.version}. Supported versions: 1`);
|
|
1228
|
-
process.exit(1);
|
|
1229
|
-
}
|
|
1230
|
-
if (lockFileJson.mopsTomlHash !== getMopsTomlHash()) {
|
|
1231
|
-
console.error("Integrity check failed");
|
|
1232
|
-
console.error("Mismatched mops.toml hash");
|
|
1233
|
-
console.error(`Locked hash: ${lockFileJson.mopsTomlHash}`);
|
|
1234
|
-
console.error(`Actual hash: ${getMopsTomlHash()}`);
|
|
1235
|
-
process.exit(1);
|
|
1236
|
-
}
|
|
1237
|
-
if (Object.keys(lockFileJson.hashes).length !== packageIds.length) {
|
|
1238
|
-
console.error("Integrity check failed");
|
|
1239
|
-
console.error(`Mismatched number of resolved packages: ${JSON.stringify(Object.keys(lockFileJson.hashes).length)} vs ${JSON.stringify(packageIds.length)}`);
|
|
1240
|
-
process.exit(1);
|
|
1241
|
-
}
|
|
1242
|
-
for (let packageId of packageIds) {
|
|
1243
|
-
if (!(packageId in lockFileJson.hashes)) {
|
|
1244
|
-
console.error("Integrity check failed");
|
|
1245
|
-
console.error(`Missing package ${packageId} in lock file`);
|
|
1246
|
-
process.exit(1);
|
|
1247
|
-
}
|
|
1248
|
-
}
|
|
1249
|
-
for (let [packageId, hashes] of Object.entries(lockFileJson.hashes)) {
|
|
1250
|
-
if (!packageIds.includes(packageId)) {
|
|
1251
|
-
console.error("Integrity check failed");
|
|
1252
|
-
console.error(`Package ${packageId} in lock file but not in resolved packages`);
|
|
1253
|
-
process.exit(1);
|
|
1254
|
-
}
|
|
1255
|
-
for (let [fileId, lockedHash] of Object.entries(hashes)) {
|
|
1256
|
-
if (!fileId.startsWith(packageId)) {
|
|
1257
|
-
console.error("Integrity check failed");
|
|
1258
|
-
console.error(`File ${fileId} in lock file does not belong to package ${packageId}`);
|
|
1259
|
-
process.exit(1);
|
|
1260
|
-
}
|
|
1261
|
-
let localHash = getLocalFileHash(fileId);
|
|
1262
|
-
if (lockedHash !== localHash) {
|
|
1263
|
-
console.error("Integrity check failed");
|
|
1264
|
-
console.error(`Mismatched hash for ${fileId}`);
|
|
1265
|
-
console.error(`Locked hash: ${lockedHash}`);
|
|
1266
|
-
console.error(`Actual hash: ${localHash}`);
|
|
1267
|
-
process.exit(1);
|
|
1268
|
-
}
|
|
1269
|
-
}
|
|
1270
|
-
}
|
|
1271
|
-
}
|
|
1272
|
-
// dist/commands/install-all.js
|
|
1273
|
-
async function installAll({ verbose = false, silent = false, lockfile } = {}) {
|
|
1274
|
-
if (!checkConfigFile()) {
|
|
1275
|
-
return;
|
|
1276
|
-
}
|
|
1277
|
-
let config = readConfig();
|
|
1278
|
-
let deps = Object.values(config.dependencies || {});
|
|
1279
|
-
let devDeps = Object.values(config["dev-dependencies"] || {});
|
|
1280
|
-
let allDeps = [...deps, ...devDeps];
|
|
1281
|
-
let installedPackages = {};
|
|
1282
|
-
for (let { name, repo, path: path18, version } of allDeps) {
|
|
1283
|
-
if (repo) {
|
|
1284
|
-
await installFromGithub(name, repo, { verbose, silent });
|
|
1285
|
-
}
|
|
1286
|
-
else if (!path18) {
|
|
1287
|
-
let res = await install(name, version, { verbose, silent });
|
|
1288
|
-
if (res === false) {
|
|
1289
|
-
return;
|
|
1290
|
-
}
|
|
1291
|
-
installedPackages = { ...installedPackages, ...res };
|
|
1292
|
-
}
|
|
1293
|
-
}
|
|
1294
|
-
if (!silent && lockfile !== "ignore") {
|
|
1295
|
-
logUpdate3("Checking integrity...");
|
|
1296
|
-
}
|
|
1297
|
-
await Promise.all([
|
|
1298
|
-
notifyInstalls(Object.keys(installedPackages)),
|
|
1299
|
-
checkIntegrity(lockfile)
|
|
1300
|
-
]);
|
|
1301
|
-
if (!silent) {
|
|
1302
|
-
logUpdate3.clear();
|
|
1303
|
-
console.log(chalk5.green("All packages installed"));
|
|
1304
|
-
}
|
|
1305
|
-
}
|
|
1306
|
-
// dist/commands/template.js
|
|
1307
|
-
import fs6 from "node:fs";
|
|
1308
|
-
import path7 from "node:path";
|
|
1309
|
-
import chalk6 from "chalk";
|
|
1310
|
-
import prompts2 from "prompts";
|
|
1311
|
-
import camelCase from "camelcase";
|
|
1312
|
-
async function template(templateName, options = {}) {
|
|
1313
|
-
var _a;
|
|
1314
|
-
if (!templateName) {
|
|
1315
|
-
let res = await prompts2({
|
|
1316
|
-
type: "select",
|
|
1317
|
-
name: "value",
|
|
1318
|
-
message: "Select template:",
|
|
1319
|
-
choices: [
|
|
1320
|
-
{ title: "README.md", value: "readme" },
|
|
1321
|
-
{ title: "src/lib.mo", value: "lib.mo" },
|
|
1322
|
-
{ title: "test/lib.test.mo", value: "lib.test.mo" },
|
|
1323
|
-
{ title: "License MIT", value: "license:MIT" },
|
|
1324
|
-
{ title: "License Apache-2.0", value: "license:Apache-2.0" },
|
|
1325
|
-
{ title: "GitHub Workflow to run 'mops test'", value: "github-workflow:mops-test" },
|
|
1326
|
-
{ title: "\xD7 Cancel", value: "" }
|
|
1327
|
-
],
|
|
1328
|
-
initial: 0
|
|
1329
|
-
});
|
|
1330
|
-
templateName = res.value;
|
|
1331
|
-
}
|
|
1332
|
-
if (templateName === "github-workflow:mops-test") {
|
|
1333
|
-
let dest = path7.resolve(getRootDir(), ".github/workflows/mops-test.yml");
|
|
1334
|
-
if (fs6.existsSync(dest)) {
|
|
1335
|
-
console.log(chalk6.yellow("Workflow already exists:"), path7.relative(getRootDir(), dest));
|
|
1336
|
-
return;
|
|
1337
|
-
}
|
|
1338
|
-
let mopsTestYml = new URL("../templates/mops-test.yml", import.meta.url);
|
|
1339
|
-
fs6.mkdirSync(path7.resolve(getRootDir(), ".github/workflows"), { recursive: true });
|
|
1340
|
-
fs6.copyFileSync(mopsTestYml, dest);
|
|
1341
|
-
console.log(chalk6.green("Created"), path7.relative(getRootDir(), dest));
|
|
1342
|
-
}
|
|
1343
|
-
else if (templateName == null ? void 0 : templateName.startsWith("license:")) {
|
|
1344
|
-
let dest = path7.resolve(getRootDir(), "LICENSE");
|
|
1345
|
-
if (fs6.existsSync(dest)) {
|
|
1346
|
-
console.log(chalk6.yellow("LICENSE already exists"));
|
|
1347
|
-
return;
|
|
1348
|
-
}
|
|
1349
|
-
let setYearAndOwner = (file) => {
|
|
1350
|
-
let license = fs6.readFileSync(file).toString();
|
|
1351
|
-
license = license.replace(/<year>/g, ( /* @__PURE__ */new Date()).getFullYear().toString());
|
|
1352
|
-
if (options.copyrightOwner) {
|
|
1353
|
-
license = license.replace(/<copyright-owner>/g, options.copyrightOwner);
|
|
1354
|
-
}
|
|
1355
|
-
fs6.writeFileSync(file, license);
|
|
1356
|
-
};
|
|
1357
|
-
if (templateName === "license:MIT") {
|
|
1358
|
-
fs6.copyFileSync(new URL("../templates/licenses/MIT", import.meta.url), path7.resolve(getRootDir(), "LICENSE"));
|
|
1359
|
-
setYearAndOwner(path7.resolve(getRootDir(), "LICENSE"));
|
|
1360
|
-
console.log(chalk6.green("Created"), path7.relative(getRootDir(), "LICENSE"));
|
|
1361
|
-
}
|
|
1362
|
-
else if (templateName === "license:Apache-2.0") {
|
|
1363
|
-
fs6.copyFileSync(new URL("../templates/licenses/Apache-2.0", import.meta.url), path7.resolve(getRootDir(), "LICENSE"));
|
|
1364
|
-
fs6.copyFileSync(new URL("../templates/licenses/Apache-2.0-NOTICE", import.meta.url), path7.resolve(getRootDir(), "NOTICE"));
|
|
1365
|
-
setYearAndOwner(path7.resolve(getRootDir(), "NOTICE"));
|
|
1366
|
-
console.log(chalk6.green("Created"), path7.relative(getRootDir(), "LICENSE"));
|
|
1367
|
-
console.log(chalk6.green("Created"), path7.relative(getRootDir(), "NOTICE"));
|
|
1368
|
-
}
|
|
1369
|
-
}
|
|
1370
|
-
else if (templateName === "lib.mo") {
|
|
1371
|
-
fs6.mkdirSync(path7.join(getRootDir(), "src"), { recursive: true });
|
|
1372
|
-
fs6.copyFileSync(new URL("../templates/src/lib.mo", import.meta.url), path7.resolve(getRootDir(), "src/lib.mo"));
|
|
1373
|
-
console.log(chalk6.green("Created"), path7.relative(getRootDir(), "src/lib.mo"));
|
|
1374
|
-
}
|
|
1375
|
-
else if (templateName === "lib.test.mo") {
|
|
1376
|
-
fs6.mkdirSync(path7.join(getRootDir(), "test"), { recursive: true });
|
|
1377
|
-
fs6.copyFileSync(new URL("../templates/test/lib.test.mo", import.meta.url), path7.resolve(getRootDir(), "test/lib.test.mo"));
|
|
1378
|
-
console.log(chalk6.green("Created"), path7.relative(getRootDir(), "test/lib.test.mo"));
|
|
1379
|
-
}
|
|
1380
|
-
else if (templateName === "readme") {
|
|
1381
|
-
let dest = path7.resolve(getRootDir(), "README.md");
|
|
1382
|
-
if (fs6.existsSync(dest)) {
|
|
1383
|
-
console.log(chalk6.yellow("README.md already exists"));
|
|
1384
|
-
return;
|
|
1385
|
-
}
|
|
1386
|
-
fs6.copyFileSync(new URL("../templates/README.md", import.meta.url), dest);
|
|
1387
|
-
let config = readConfig();
|
|
1388
|
-
let data = fs6.readFileSync(dest).toString();
|
|
1389
|
-
data = data.replace(/<year>/g, ( /* @__PURE__ */new Date()).getFullYear().toString());
|
|
1390
|
-
if ((_a = config.package) == null ? void 0 : _a.name) {
|
|
1391
|
-
data = data.replace(/<name>/g, config.package.name);
|
|
1392
|
-
data = data.replace(/<import-name>/g, camelCase(config.package.name, { pascalCase: true }));
|
|
1393
|
-
}
|
|
1394
|
-
fs6.writeFileSync(dest, data);
|
|
1395
|
-
console.log(chalk6.green("Created"), path7.relative(getRootDir(), "README.md"));
|
|
1396
|
-
}
|
|
1397
|
-
}
|
|
1398
|
-
// dist/commands/init.js
|
|
1399
|
-
async function init({ yes = false } = {}) {
|
|
1400
|
-
let configFile = path8.join(process.cwd(), "mops.toml");
|
|
1401
|
-
let exists = existsSync2(configFile);
|
|
1402
|
-
if (exists) {
|
|
1403
|
-
console.log(chalk7.yellow("mops.toml already exists"));
|
|
1404
|
-
return;
|
|
1405
|
-
}
|
|
1406
|
-
console.log("Initializing...");
|
|
1407
|
-
let config = {};
|
|
1408
|
-
if (yes) {
|
|
1409
|
-
await applyInit({
|
|
1410
|
-
type: "project",
|
|
1411
|
-
config,
|
|
1412
|
-
setupWorkflow: true,
|
|
1413
|
-
addTest: false,
|
|
1414
|
-
copyrightOwner: ""
|
|
1415
|
-
});
|
|
1416
|
-
return;
|
|
1417
|
-
}
|
|
1418
|
-
let vesselFile = path8.join(process.cwd(), "vessel.dhall");
|
|
1419
|
-
let vesselConfig = { dependencies: [], "dev-dependencies": [] };
|
|
1420
|
-
if (existsSync2(vesselFile)) {
|
|
1421
|
-
console.log("Reading vessel.dhall file");
|
|
1422
|
-
let res = await readVesselConfig(process.cwd(), { cache: false });
|
|
1423
|
-
if (res) {
|
|
1424
|
-
vesselConfig = { ...res };
|
|
1425
|
-
}
|
|
1426
|
-
}
|
|
1427
|
-
if (vesselConfig.dependencies) {
|
|
1428
|
-
let deps = {};
|
|
1429
|
-
deps = {};
|
|
1430
|
-
for (const dep of vesselConfig.dependencies || []) {
|
|
1431
|
-
deps[dep.name] = dep;
|
|
1432
|
-
}
|
|
1433
|
-
if (Object.keys(deps).length) {
|
|
1434
|
-
config.dependencies = deps;
|
|
1435
|
-
}
|
|
1436
|
-
}
|
|
1437
|
-
let promptsConfig = {
|
|
1438
|
-
onCancel() {
|
|
1439
|
-
console.log("aborted");
|
|
1440
|
-
process.exit(0);
|
|
1441
|
-
}
|
|
1442
|
-
};
|
|
1443
|
-
let { type } = await prompts3({
|
|
1444
|
-
type: "select",
|
|
1445
|
-
name: "type",
|
|
1446
|
-
message: "Select type:",
|
|
1447
|
-
choices: [
|
|
1448
|
-
{ title: `Project ${chalk7.dim("(I just want to use mops packages in my project)")}`, value: "project" },
|
|
1449
|
-
{ title: `Package ${chalk7.dim("(I plan to publish this package on mops)")}`, value: "package" }
|
|
1450
|
-
]
|
|
1451
|
-
}, promptsConfig);
|
|
1452
|
-
let addTest = false;
|
|
1453
|
-
let copyrightOwner = "";
|
|
1454
|
-
if (type === "package") {
|
|
1455
|
-
let res = await prompts3([
|
|
1456
|
-
{
|
|
1457
|
-
type: "text",
|
|
1458
|
-
name: "name",
|
|
1459
|
-
message: "Enter package name:",
|
|
1460
|
-
initial: ""
|
|
1461
|
-
},
|
|
1462
|
-
{
|
|
1463
|
-
type: "text",
|
|
1464
|
-
name: "description",
|
|
1465
|
-
message: "Enter package description:",
|
|
1466
|
-
initial: ""
|
|
1467
|
-
},
|
|
1468
|
-
{
|
|
1469
|
-
type: "text",
|
|
1470
|
-
name: "repository",
|
|
1471
|
-
message: "Enter package repository url:",
|
|
1472
|
-
initial: ""
|
|
1473
|
-
},
|
|
1474
|
-
{
|
|
1475
|
-
type: "text",
|
|
1476
|
-
name: "keywords",
|
|
1477
|
-
message: "Enter keywords separated by spaces:",
|
|
1478
|
-
initial: ""
|
|
1479
|
-
},
|
|
1480
|
-
{
|
|
1481
|
-
type: "select",
|
|
1482
|
-
name: "license",
|
|
1483
|
-
message: "Choose a license:",
|
|
1484
|
-
choices: [
|
|
1485
|
-
{ title: "MIT", value: "MIT" },
|
|
1486
|
-
{ title: "Apache-2.0", value: "Apache-2.0" }
|
|
1487
|
-
],
|
|
1488
|
-
initial: 0
|
|
1489
|
-
},
|
|
1490
|
-
{
|
|
1491
|
-
type: "text",
|
|
1492
|
-
name: "copyrightOwner",
|
|
1493
|
-
message: "Enter license copyright owner:",
|
|
1494
|
-
initial: ""
|
|
1495
|
-
},
|
|
1496
|
-
{
|
|
1497
|
-
type: "confirm",
|
|
1498
|
-
name: "addTest",
|
|
1499
|
-
message: `Add example test file? ${chalk7.dim("(test/lib.test.mo)")}`,
|
|
1500
|
-
initial: true
|
|
1501
|
-
}
|
|
1502
|
-
], promptsConfig);
|
|
1503
|
-
config.package = {
|
|
1504
|
-
name: (res.name || "").trim(),
|
|
1505
|
-
version: "1.0.0",
|
|
1506
|
-
description: (res.description || "").trim(),
|
|
1507
|
-
repository: (res.repository || "").trim(),
|
|
1508
|
-
keywords: [...new Set(res.keywords.split(" ").filter(Boolean))],
|
|
1509
|
-
license: (res.license || "").trim()
|
|
1510
|
-
};
|
|
1511
|
-
addTest = res.addTest;
|
|
1512
|
-
copyrightOwner = res.copyrightOwner;
|
|
1513
|
-
}
|
|
1514
|
-
let { setupWorkflow } = await prompts3({
|
|
1515
|
-
type: "confirm",
|
|
1516
|
-
name: "setupWorkflow",
|
|
1517
|
-
message: `Setup GitHub workflow? ${chalk7.dim("(run `mops test` on push)")}`,
|
|
1518
|
-
initial: true
|
|
1519
|
-
}, promptsConfig);
|
|
1520
|
-
await applyInit({
|
|
1521
|
-
type,
|
|
1522
|
-
config,
|
|
1523
|
-
setupWorkflow,
|
|
1524
|
-
addTest,
|
|
1525
|
-
copyrightOwner
|
|
1526
|
-
});
|
|
1527
|
-
}
|
|
1528
|
-
async function applyInit({ type, config, setupWorkflow, addTest, copyrightOwner }) {
|
|
1529
|
-
var _a, _b;
|
|
1530
|
-
let dfxJson2 = path8.resolve(process.cwd(), "dfx.json");
|
|
1531
|
-
let dfxJsonData;
|
|
1532
|
-
if (existsSync2(dfxJson2)) {
|
|
1533
|
-
let dfxJsonText = readFileSync2(dfxJson2).toString();
|
|
1534
|
-
try {
|
|
1535
|
-
dfxJsonData = JSON.parse(dfxJsonText);
|
|
1536
|
-
}
|
|
1537
|
-
catch (err) {
|
|
1538
|
-
console.log(chalk7.yellow("Failed to parse dfx.json"));
|
|
1539
|
-
}
|
|
1540
|
-
if (dfxJsonData) {
|
|
1541
|
-
console.log("Setting packtool in dfx.json...");
|
|
1542
|
-
dfxJsonData.defaults = dfxJsonData.defaults || {};
|
|
1543
|
-
dfxJsonData.defaults.build = dfxJsonData.defaults.build || {};
|
|
1544
|
-
if (dfxJsonData.defaults.build.packtool !== "mops sources") {
|
|
1545
|
-
dfxJsonData.defaults.build.packtool = "mops sources";
|
|
1546
|
-
let indent = ((_a = dfxJsonText.match(/([ \t]+)"/)) == null ? void 0 : _a[1]) || " ";
|
|
1547
|
-
writeFileSync2(path8.join(process.cwd(), "dfx.json"), JSON.stringify(dfxJsonData, null, indent));
|
|
1548
|
-
console.log(chalk7.green('packtool set to "mops sources"'));
|
|
1549
|
-
}
|
|
1550
|
-
}
|
|
1551
|
-
}
|
|
1552
|
-
if (type === "project") {
|
|
1553
|
-
let compatible = await checkApiCompatibility();
|
|
1554
|
-
if (!compatible) {
|
|
1555
|
-
return;
|
|
1556
|
-
}
|
|
1557
|
-
let dfxVersion = (dfxJsonData == null ? void 0 : dfxJsonData.dfx) || "";
|
|
1558
|
-
if (!dfxVersion) {
|
|
1559
|
-
try {
|
|
1560
|
-
let res = execSync("dfx --version").toString();
|
|
1561
|
-
let match = res.match(/\d+\.\d+\.\d+/);
|
|
1562
|
-
if (match) {
|
|
1563
|
-
dfxVersion = match[0];
|
|
1564
|
-
}
|
|
1565
|
-
}
|
|
1566
|
-
catch {
|
|
1567
|
-
}
|
|
1568
|
-
}
|
|
1569
|
-
console.log(`Fetching default packages for dfx ${dfxVersion}...`);
|
|
1570
|
-
let actor = await mainActor();
|
|
1571
|
-
let defaultPackages = await actor.getDefaultPackages(dfxVersion);
|
|
1572
|
-
if (!config.dependencies) {
|
|
1573
|
-
config.dependencies = {};
|
|
1574
|
-
}
|
|
1575
|
-
for (let [name, version] of defaultPackages) {
|
|
1576
|
-
config.dependencies[name] = { name, version };
|
|
1577
|
-
}
|
|
1578
|
-
}
|
|
1579
|
-
let configFile = path8.join(process.cwd(), "mops.toml");
|
|
1580
|
-
writeConfig(config, configFile);
|
|
1581
|
-
console.log(chalk7.green("Created"), "mops.toml");
|
|
1582
|
-
if (type === "package" && !existsSync2(path8.join(process.cwd(), "src"))) {
|
|
1583
|
-
await template("lib.mo");
|
|
1584
|
-
}
|
|
1585
|
-
if (addTest && !existsSync2(path8.join(process.cwd(), "test"))) {
|
|
1586
|
-
await template("lib.test.mo");
|
|
1587
|
-
}
|
|
1588
|
-
if ((_b = config.package) == null ? void 0 : _b.license) {
|
|
1589
|
-
await template(`license:${config.package.license}`, { copyrightOwner });
|
|
1590
|
-
}
|
|
1591
|
-
if (type === "package") {
|
|
1592
|
-
await template("readme");
|
|
1593
|
-
}
|
|
1594
|
-
if (setupWorkflow) {
|
|
1595
|
-
await template("github-workflow:mops-test");
|
|
1596
|
-
}
|
|
1597
|
-
{
|
|
1598
|
-
let gitignore = path8.join(process.cwd(), ".gitignore");
|
|
1599
|
-
let gitignoreData = existsSync2(gitignore) ? readFileSync2(gitignore).toString() : "";
|
|
1600
|
-
let lf = gitignoreData.endsWith("\n") ? "\n" : "";
|
|
1601
|
-
if (!gitignoreData.includes(".mops")) {
|
|
1602
|
-
writeFileSync2(gitignore, `${gitignoreData}
|
|
1603
|
-
.mops${lf}`.trimStart());
|
|
1604
|
-
console.log(chalk7.green("Added"), ".mops to .gitignore");
|
|
1605
|
-
}
|
|
1606
|
-
}
|
|
1607
|
-
if (Object.keys(config.dependencies || {}).length) {
|
|
1608
|
-
console.log("Installing dependencies...");
|
|
1609
|
-
await installAll({ verbose: true });
|
|
1610
|
-
}
|
|
1611
|
-
console.log(chalk7.green("Done!"));
|
|
1612
|
-
}
|
|
1613
|
-
// dist/commands/publish.js
|
|
1614
|
-
import fs10 from "node:fs";
|
|
1615
|
-
import path13 from "node:path";
|
|
1616
|
-
import chalk15 from "chalk";
|
|
1617
|
-
import logUpdate5 from "log-update";
|
|
1618
|
-
import { globbySync } from "globby";
|
|
1619
|
-
import { minimatch } from "minimatch";
|
|
1620
|
-
import prompts4 from "prompts";
|
|
1621
|
-
import { fromMarkdown } from "mdast-util-from-markdown";
|
|
1622
|
-
import { toMarkdown } from "mdast-util-to-markdown";
|
|
1623
|
-
// dist/commands/docs.js
|
|
1624
|
-
import { spawn, execSync as execSync2 } from "node:child_process";
|
|
1625
|
-
import fs7 from "node:fs";
|
|
1626
|
-
import path9 from "node:path";
|
|
1627
|
-
import chalk8 from "chalk";
|
|
1628
|
-
import { globSync } from "glob";
|
|
1629
|
-
import { deleteSync as deleteSync2 } from "del";
|
|
1630
|
-
import tar from "tar";
|
|
1631
|
-
import streamToPromise from "stream-to-promise";
|
|
1632
|
-
var moDoc;
|
|
1633
|
-
async function docs({ silent = false } = {}) {
|
|
1634
|
-
let rootDir = getRootDir();
|
|
1635
|
-
let docsDir = path9.join(rootDir, ".mops/.docs");
|
|
1636
|
-
let docsDirRelative = path9.relative(process.cwd(), docsDir);
|
|
1637
|
-
deleteSync2([docsDir], { force: true });
|
|
1638
|
-
if (process.env.DFX_MOC_PATH && process.env.DFX_MOC_PATH.includes("mocv/versions")) {
|
|
1639
|
-
moDoc = process.env.DFX_MOC_PATH.replace(/\/moc$/, "/mo-doc");
|
|
1640
|
-
}
|
|
1641
|
-
else {
|
|
1642
|
-
moDoc = execSync2("dfx cache show").toString().trim() + "/mo-doc";
|
|
1643
|
-
}
|
|
1644
|
-
await new Promise((resolve) => {
|
|
1645
|
-
let proc = spawn(moDoc, [`--source=${path9.join(rootDir, "src")}`, `--output=${docsDirRelative}`, "--format=adoc"]);
|
|
1646
|
-
proc.stdout.on("data", (data) => {
|
|
1647
|
-
let text = data.toString().trim();
|
|
1648
|
-
let failedText = "Failed to extract documentation";
|
|
1649
|
-
if (text.includes(failedText)) {
|
|
1650
|
-
console.log(text.replaceAll(failedText, chalk8.yellow("Warning: ") + failedText));
|
|
1651
|
-
}
|
|
1652
|
-
silent || console.log("stdout", text);
|
|
1653
|
-
});
|
|
1654
|
-
let stderr = "";
|
|
1655
|
-
proc.stderr.on("data", (data) => {
|
|
1656
|
-
let text = data.toString().trim();
|
|
1657
|
-
if (text.includes("syntax error")) {
|
|
1658
|
-
console.log(chalk8.red("Error:"), text);
|
|
1659
|
-
process.exit(1);
|
|
1660
|
-
}
|
|
1661
|
-
if (text.includes("No such file or directory") || text.includes("Couldn't find a module expression")) {
|
|
1662
|
-
silent || console.log(text);
|
|
1663
|
-
return;
|
|
1664
|
-
}
|
|
1665
|
-
stderr += text;
|
|
1666
|
-
});
|
|
1667
|
-
proc.on("exit", (code) => {
|
|
1668
|
-
if (code === 2 && !stderr) {
|
|
1669
|
-
resolve();
|
|
1670
|
-
return;
|
|
1671
|
-
}
|
|
1672
|
-
if (code !== 0) {
|
|
1673
|
-
console.log(chalk8.red("Error:"), code, stderr);
|
|
1674
|
-
process.exit(1);
|
|
1675
|
-
}
|
|
1676
|
-
resolve();
|
|
1677
|
-
});
|
|
1678
|
-
});
|
|
1679
|
-
let ignore4 = [
|
|
1680
|
-
`${docsDir}/**/*.test.adoc`,
|
|
1681
|
-
`${docsDir}/test/**/*`
|
|
1682
|
-
];
|
|
1683
|
-
let files = globSync(`${docsDir}/**/*.adoc`, { ignore: ignore4 }).map((f) => path9.relative(docsDir, f));
|
|
1684
|
-
files.sort();
|
|
1685
|
-
if (files.length) {
|
|
1686
|
-
let stream = tar.create({
|
|
1687
|
-
cwd: docsDir,
|
|
1688
|
-
gzip: true,
|
|
1689
|
-
portable: true
|
|
1690
|
-
}, files).pipe(fs7.createWriteStream(path9.join(docsDir, "docs.tgz")));
|
|
1691
|
-
await streamToPromise(stream);
|
|
1692
|
-
}
|
|
1693
|
-
silent || console.log(`${chalk8.green("Documentation generated")} at ${docsDirRelative}`);
|
|
1694
|
-
}
|
|
1695
|
-
// dist/commands/test/test.js
|
|
1696
|
-
import { spawn as spawn2, execSync as execSync3 } from "node:child_process";
|
|
1697
|
-
import path12 from "node:path";
|
|
1698
|
-
import fs9 from "node:fs";
|
|
1699
|
-
import os from "node:os";
|
|
1700
|
-
import chalk14 from "chalk";
|
|
1701
|
-
import { globSync as globSync2 } from "glob";
|
|
1702
|
-
import chokidar from "chokidar";
|
|
1703
|
-
import debounce from "debounce";
|
|
1704
|
-
// dist/commands/sources.js
|
|
1705
|
-
import path10 from "node:path";
|
|
1706
|
-
import fs8 from "node:fs";
|
|
1707
|
-
async function sources({ verbose = false, cwd = process.cwd() } = {}) {
|
|
1708
|
-
if (!checkConfigFile()) {
|
|
1709
|
-
return [];
|
|
1710
|
-
}
|
|
1711
|
-
let resolvedPackages = await resolvePackages({ verbose });
|
|
1712
|
-
return Object.entries(resolvedPackages).map(([name, version]) => {
|
|
1713
|
-
var _a;
|
|
1714
|
-
let depType = getDependencyType(version);
|
|
1715
|
-
let pkgDir;
|
|
1716
|
-
if (depType === "local") {
|
|
1717
|
-
pkgDir = path10.relative(cwd, version);
|
|
1718
|
-
}
|
|
1719
|
-
else if (depType === "github") {
|
|
1720
|
-
pkgDir = path10.relative(cwd, formatGithubDir(name, version));
|
|
1721
|
-
}
|
|
1722
|
-
else if (depType === "mops") {
|
|
1723
|
-
pkgDir = path10.relative(cwd, formatDir(name, version));
|
|
1724
|
-
}
|
|
1725
|
-
else {
|
|
1726
|
-
return;
|
|
1727
|
-
}
|
|
1728
|
-
let pkgBaseDir;
|
|
1729
|
-
if (fs8.existsSync(path10.join(pkgDir, "mops.toml"))) {
|
|
1730
|
-
let config = readConfig(path10.join(pkgDir, "mops.toml"));
|
|
1731
|
-
pkgBaseDir = path10.join(pkgDir, ((_a = config.package) == null ? void 0 : _a.baseDir) || "src");
|
|
1732
|
-
}
|
|
1733
|
-
else {
|
|
1734
|
-
pkgBaseDir = path10.join(pkgDir, "src");
|
|
1735
|
-
}
|
|
1736
|
-
if (depType === "local" && !fs8.existsSync(pkgBaseDir)) {
|
|
1737
|
-
pkgBaseDir = pkgDir;
|
|
1738
|
-
}
|
|
1739
|
-
return `--package ${name} ${pkgBaseDir}`;
|
|
1740
|
-
});
|
|
1741
|
-
}
|
|
1742
|
-
// dist/commands/test/mmf1.js
|
|
1743
|
-
import chalk9 from "chalk";
|
|
1744
|
-
var MMF1 = class {
|
|
1745
|
-
constructor(srategy, file) {
|
|
1746
|
-
this.stack = [];
|
|
1747
|
-
this.currSuite = "";
|
|
1748
|
-
this.failed = 0;
|
|
1749
|
-
this.passed = 0;
|
|
1750
|
-
this.skipped = 0;
|
|
1751
|
-
this.output = [];
|
|
1752
|
-
this.nestingSymbol = " \u203A ";
|
|
1753
|
-
this.passedNamesFlat = [];
|
|
1754
|
-
this.srategy = srategy;
|
|
1755
|
-
this.file = file;
|
|
1756
|
-
}
|
|
1757
|
-
_log(type, ...args) {
|
|
1758
|
-
if (this.srategy === "store") {
|
|
1759
|
-
this.output.push({
|
|
1760
|
-
type,
|
|
1761
|
-
message: args.join(" ")
|
|
1762
|
-
});
|
|
1763
|
-
}
|
|
1764
|
-
else if (this.srategy === "print") {
|
|
1765
|
-
console.log(...args);
|
|
1766
|
-
}
|
|
1767
|
-
}
|
|
1768
|
-
flush(messageType) {
|
|
1769
|
-
for (let out of this.output) {
|
|
1770
|
-
if (!messageType || out.type === messageType) {
|
|
1771
|
-
console.log(out.message);
|
|
1772
|
-
}
|
|
1773
|
-
}
|
|
1774
|
-
this.output = [];
|
|
1775
|
-
}
|
|
1776
|
-
parseLine(line) {
|
|
1777
|
-
if (line.startsWith("mops:1:start ")) {
|
|
1778
|
-
this._testStart(line.split("mops:1:start ")[1] || "");
|
|
1779
|
-
}
|
|
1780
|
-
else if (line.startsWith("mops:1:end ")) {
|
|
1781
|
-
this._testEnd(line.split("mops:1:end ")[1] || "");
|
|
1782
|
-
}
|
|
1783
|
-
else if (line.startsWith("mops:1:skip ")) {
|
|
1784
|
-
this._testSkip(line.split("mops:1:skip ")[1] || "");
|
|
1785
|
-
}
|
|
1786
|
-
else if (line.startsWith("mops:")) {
|
|
1787
|
-
}
|
|
1788
|
-
else {
|
|
1789
|
-
this._log("stdout", " ".repeat(this.stack.length * 2), chalk9.gray("stdout"), line);
|
|
1790
|
-
}
|
|
1791
|
-
}
|
|
1792
|
-
_testStart(name) {
|
|
1793
|
-
let suite = this.stack[this.stack.length - 1];
|
|
1794
|
-
if (suite) {
|
|
1795
|
-
if (this.currSuite !== suite) {
|
|
1796
|
-
this.currSuite = suite;
|
|
1797
|
-
this._log("suite", " ".repeat((this.stack.length - 1) * 2), chalk9.gray("\u2022") + "", suite);
|
|
1798
|
-
}
|
|
1799
|
-
}
|
|
1800
|
-
this.stack.push(name);
|
|
1801
|
-
}
|
|
1802
|
-
_testEnd(name) {
|
|
1803
|
-
if (name !== this.stack.pop()) {
|
|
1804
|
-
throw "mmf1._testEnd: start and end test mismatch";
|
|
1805
|
-
}
|
|
1806
|
-
this._status(name, "pass");
|
|
1807
|
-
}
|
|
1808
|
-
_testSkip(name) {
|
|
1809
|
-
this._status(name, "skip");
|
|
1810
|
-
}
|
|
1811
|
-
_status(name, status) {
|
|
1812
|
-
if (status === "pass") {
|
|
1813
|
-
if (name === this.currSuite) {
|
|
1814
|
-
return;
|
|
1815
|
-
}
|
|
1816
|
-
this.passed++;
|
|
1817
|
-
this._log(status, " ".repeat(this.stack.length * 2), chalk9.green("\u2713"), name);
|
|
1818
|
-
this.passedNamesFlat.push([this.file, ...this.stack, name].join(this.nestingSymbol));
|
|
1819
|
-
}
|
|
1820
|
-
else if (status === "fail") {
|
|
1821
|
-
this.failed++;
|
|
1822
|
-
this._log(status, " ".repeat(this.stack.length * 2), chalk9.red("\u2716"), name);
|
|
1823
|
-
}
|
|
1824
|
-
else if (status === "skip") {
|
|
1825
|
-
this.skipped++;
|
|
1826
|
-
this._log(status, " ".repeat(this.stack.length * 2), chalk9.yellow("\u2212"), name);
|
|
1827
|
-
}
|
|
1828
|
-
}
|
|
1829
|
-
fail(stderr) {
|
|
1830
|
-
let name = this.stack.pop() || "";
|
|
1831
|
-
this._status(name, "fail");
|
|
1832
|
-
this._log("fail", " ".repeat(this.stack.length * 2), chalk9.red("FAIL"), stderr);
|
|
1833
|
-
}
|
|
1834
|
-
pass() {
|
|
1835
|
-
let name = this.stack.pop();
|
|
1836
|
-
if (name) {
|
|
1837
|
-
this._status(name, "pass");
|
|
1838
|
-
}
|
|
1839
|
-
this._log("pass", " ".repeat(this.stack.length * 2), chalk9.green("PASS"));
|
|
1840
|
-
}
|
|
1841
|
-
};
|
|
1842
|
-
// dist/commands/test/utils.js
|
|
1843
|
-
import path11 from "node:path";
|
|
1844
|
-
function absToRel(p) {
|
|
1845
|
-
let rootDir = getRootDir();
|
|
1846
|
-
return path11.relative(rootDir, path11.resolve(p));
|
|
1847
|
-
}
|
|
1848
|
-
// dist/commands/test/reporters/verbose-reporter.js
|
|
1849
|
-
import chalk10 from "chalk";
|
|
1850
|
-
var __classPrivateFieldGet = function (receiver, state, kind, f) {
|
|
1851
|
-
if (kind === "a" && !f)
|
|
1852
|
-
throw new TypeError("Private accessor was defined without a getter");
|
|
1853
|
-
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
|
|
1854
|
-
throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
1855
|
-
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
1856
|
-
};
|
|
1857
|
-
var __classPrivateFieldSet = function (receiver, state, value, kind, f) {
|
|
1858
|
-
if (kind === "m")
|
|
1859
|
-
throw new TypeError("Private method is not writable");
|
|
1860
|
-
if (kind === "a" && !f)
|
|
1861
|
-
throw new TypeError("Private accessor was defined without a setter");
|
|
1862
|
-
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
|
|
1863
|
-
throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
1864
|
-
return kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value), value;
|
|
1865
|
-
};
|
|
1866
|
-
var _VerboseReporter_startTime;
|
|
1867
|
-
var _VerboseReporter_curFileIndex;
|
|
1868
|
-
var VerboseReporter = class {
|
|
1869
|
-
constructor() {
|
|
1870
|
-
this.passed = 0;
|
|
1871
|
-
this.failed = 0;
|
|
1872
|
-
this.skipped = 0;
|
|
1873
|
-
_VerboseReporter_startTime.set(this, Date.now());
|
|
1874
|
-
_VerboseReporter_curFileIndex.set(this, 0);
|
|
1875
|
-
}
|
|
1876
|
-
addFiles(files) {
|
|
1877
|
-
console.log("Test files:");
|
|
1878
|
-
for (let file of files) {
|
|
1879
|
-
console.log(chalk10.gray(`\u2022 ${absToRel(file)}`));
|
|
1880
|
-
}
|
|
1881
|
-
console.log("=".repeat(50));
|
|
1882
|
-
}
|
|
1883
|
-
addRun(file, mmf, state, wasiMode) {
|
|
1884
|
-
state.then(() => {
|
|
1885
|
-
var _a, _b;
|
|
1886
|
-
this.passed += mmf.passed;
|
|
1887
|
-
this.failed += mmf.failed;
|
|
1888
|
-
this.skipped += mmf.skipped;
|
|
1889
|
-
if (mmf.passed === 0 && mmf.failed === 0) {
|
|
1890
|
-
this.passed++;
|
|
1891
|
-
}
|
|
1892
|
-
(__classPrivateFieldSet(this, _VerboseReporter_curFileIndex, (_b = __classPrivateFieldGet(this, _VerboseReporter_curFileIndex, "f"), _a = _b++, _b), "f"), _a) && console.log("-".repeat(50));
|
|
1893
|
-
console.log(`Running ${chalk10.gray(absToRel(file))} ${wasiMode ? chalk10.gray("(wasi)") : ""}`);
|
|
1894
|
-
mmf.flush();
|
|
1895
|
-
});
|
|
1896
|
-
}
|
|
1897
|
-
done() {
|
|
1898
|
-
console.log("=".repeat(50));
|
|
1899
|
-
if (this.failed) {
|
|
1900
|
-
console.log(chalk10.redBright("Tests failed"));
|
|
1901
|
-
}
|
|
1902
|
-
else {
|
|
1903
|
-
console.log(chalk10.greenBright("Tests passed"));
|
|
1904
|
-
}
|
|
1905
|
-
console.log(`Done in ${chalk10.gray(((Date.now() - __classPrivateFieldGet(this, _VerboseReporter_startTime, "f")) / 1e3).toFixed(2) + "s")}, passed ${chalk10.greenBright(this.passed)}` + (this.skipped ? `, skipped ${chalk10[this.skipped ? "yellowBright" : "gray"](this.skipped)}` : "") + (this.failed ? `, failed ${chalk10[this.failed ? "redBright" : "gray"](this.failed)}` : ""));
|
|
1906
|
-
return this.failed === 0;
|
|
1907
|
-
}
|
|
1908
|
-
};
|
|
1909
|
-
_VerboseReporter_startTime = /* @__PURE__ */ new WeakMap(), _VerboseReporter_curFileIndex = /* @__PURE__ */ new WeakMap();
|
|
1910
|
-
// dist/commands/test/reporters/files-reporter.js
|
|
1911
|
-
import chalk11 from "chalk";
|
|
1912
|
-
var __classPrivateFieldGet2 = function (receiver, state, kind, f) {
|
|
1913
|
-
if (kind === "a" && !f)
|
|
1914
|
-
throw new TypeError("Private accessor was defined without a getter");
|
|
1915
|
-
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
|
|
1916
|
-
throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
1917
|
-
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
1918
|
-
};
|
|
1919
|
-
var _FilesReporter_startTime;
|
|
1920
|
-
var FilesReporter = class {
|
|
1921
|
-
constructor() {
|
|
1922
|
-
this.passed = 0;
|
|
1923
|
-
this.failed = 0;
|
|
1924
|
-
this.skipped = 0;
|
|
1925
|
-
_FilesReporter_startTime.set(this, Date.now());
|
|
1926
|
-
}
|
|
1927
|
-
addFiles(files) {
|
|
1928
|
-
console.log(`Test files: ${files.length}`);
|
|
1929
|
-
console.log("=".repeat(50));
|
|
1930
|
-
}
|
|
1931
|
-
addRun(file, mmf, state, wasiMode) {
|
|
1932
|
-
state.then(() => {
|
|
1933
|
-
this.passed += Number(mmf.failed === 0);
|
|
1934
|
-
this.failed += Number(mmf.failed !== 0);
|
|
1935
|
-
this.skipped += mmf.skipped;
|
|
1936
|
-
if (mmf.failed) {
|
|
1937
|
-
console.log(chalk11.red("\u2716"), absToRel(file));
|
|
1938
|
-
mmf.flush("fail");
|
|
1939
|
-
console.log("-".repeat(50));
|
|
1940
|
-
}
|
|
1941
|
-
else {
|
|
1942
|
-
console.log(`${chalk11.green("\u2713")} ${absToRel(file)} ${wasiMode ? chalk11.gray("(wasi)") : ""}`);
|
|
1943
|
-
}
|
|
1944
|
-
});
|
|
1945
|
-
}
|
|
1946
|
-
done() {
|
|
1947
|
-
console.log("=".repeat(50));
|
|
1948
|
-
if (this.failed) {
|
|
1949
|
-
console.log(chalk11.redBright("Tests failed"));
|
|
1950
|
-
}
|
|
1951
|
-
else {
|
|
1952
|
-
console.log(chalk11.greenBright("Tests passed"));
|
|
1953
|
-
}
|
|
1954
|
-
console.log(`Done in ${chalk11.gray(((Date.now() - __classPrivateFieldGet2(this, _FilesReporter_startTime, "f")) / 1e3).toFixed(2) + "s")}, passed ${chalk11.greenBright(this.passed)} files` + (this.skipped ? `, skipped ${chalk11[this.skipped ? "yellowBright" : "gray"](this.skipped)} cases` : "") + (this.failed ? `, failed ${chalk11[this.failed ? "redBright" : "gray"](this.failed)} files` : ""));
|
|
1955
|
-
return this.failed === 0;
|
|
1956
|
-
}
|
|
1957
|
-
};
|
|
1958
|
-
_FilesReporter_startTime = /* @__PURE__ */ new WeakMap();
|
|
1959
|
-
// dist/commands/test/reporters/compact-reporter.js
|
|
1960
|
-
import chalk12 from "chalk";
|
|
1961
|
-
import logUpdate4 from "log-update";
|
|
1962
|
-
var __classPrivateFieldSet2 = function (receiver, state, value, kind, f) {
|
|
1963
|
-
if (kind === "m")
|
|
1964
|
-
throw new TypeError("Private method is not writable");
|
|
1965
|
-
if (kind === "a" && !f)
|
|
1966
|
-
throw new TypeError("Private accessor was defined without a setter");
|
|
1967
|
-
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
|
|
1968
|
-
throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
|
1969
|
-
return kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value), value;
|
|
1970
|
-
};
|
|
1971
|
-
var __classPrivateFieldGet3 = function (receiver, state, kind, f) {
|
|
1972
|
-
if (kind === "a" && !f)
|
|
1973
|
-
throw new TypeError("Private accessor was defined without a getter");
|
|
1974
|
-
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver))
|
|
1975
|
-
throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
|
1976
|
-
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
|
1977
|
-
};
|
|
1978
|
-
var _CompactReporter_instances;
|
|
1979
|
-
var _CompactReporter_allFiles;
|
|
1980
|
-
var _CompactReporter_runningFiles;
|
|
1981
|
-
var _CompactReporter_failedFiles;
|
|
1982
|
-
var _CompactReporter_finishedFiles;
|
|
1983
|
-
var _CompactReporter_startTime;
|
|
1984
|
-
var _CompactReporter_timerId;
|
|
1985
|
-
var _CompactReporter_startTimer;
|
|
1986
|
-
var _CompactReporter_clearTimer;
|
|
1987
|
-
var _CompactReporter_log;
|
|
1988
|
-
var CompactReporter = class {
|
|
1989
|
-
constructor() {
|
|
1990
|
-
_CompactReporter_instances.add(this);
|
|
1991
|
-
this.passed = 0;
|
|
1992
|
-
this.failed = 0;
|
|
1993
|
-
this.skipped = 0;
|
|
1994
|
-
this.passedFiles = 0;
|
|
1995
|
-
this.failedFiles = 0;
|
|
1996
|
-
_CompactReporter_allFiles.set(this, /* @__PURE__ */ new Set());
|
|
1997
|
-
_CompactReporter_runningFiles.set(this, /* @__PURE__ */ new Set());
|
|
1998
|
-
_CompactReporter_failedFiles.set(this, /* @__PURE__ */ new Set());
|
|
1999
|
-
_CompactReporter_finishedFiles.set(this, /* @__PURE__ */ new Set());
|
|
2000
|
-
_CompactReporter_startTime.set(this, Date.now());
|
|
2001
|
-
_CompactReporter_timerId.set(this, null);
|
|
2002
|
-
}
|
|
2003
|
-
addFiles(files) {
|
|
2004
|
-
__classPrivateFieldSet2(this, _CompactReporter_allFiles, new Set(files), "f");
|
|
2005
|
-
__classPrivateFieldGet3(this, _CompactReporter_instances, "m", _CompactReporter_log).call(this);
|
|
2006
|
-
__classPrivateFieldGet3(this, _CompactReporter_instances, "m", _CompactReporter_startTimer).call(this);
|
|
2007
|
-
}
|
|
2008
|
-
addRun(file, mmf, state, _wasiMode) {
|
|
2009
|
-
__classPrivateFieldGet3(this, _CompactReporter_runningFiles, "f").add(file);
|
|
2010
|
-
__classPrivateFieldGet3(this, _CompactReporter_instances, "m", _CompactReporter_log).call(this);
|
|
2011
|
-
state.then(() => {
|
|
2012
|
-
this.passed += mmf.passed;
|
|
2013
|
-
this.failed += mmf.failed;
|
|
2014
|
-
this.skipped += mmf.skipped;
|
|
2015
|
-
if (mmf.passed === 0 && mmf.failed === 0) {
|
|
2016
|
-
this.passed++;
|
|
2017
|
-
}
|
|
2018
|
-
this.passedFiles += Number(mmf.failed === 0);
|
|
2019
|
-
this.failedFiles += Number(mmf.failed !== 0);
|
|
2020
|
-
if (mmf.failed) {
|
|
2021
|
-
__classPrivateFieldGet3(this, _CompactReporter_failedFiles, "f").add(file);
|
|
2022
|
-
logUpdate4.clear();
|
|
2023
|
-
console.log(chalk12.red("\u2716"), absToRel(file));
|
|
2024
|
-
mmf.flush("fail");
|
|
2025
|
-
console.log("-".repeat(50));
|
|
2026
|
-
}
|
|
2027
|
-
__classPrivateFieldGet3(this, _CompactReporter_runningFiles, "f").delete(file);
|
|
2028
|
-
__classPrivateFieldGet3(this, _CompactReporter_finishedFiles, "f").add(file);
|
|
2029
|
-
__classPrivateFieldGet3(this, _CompactReporter_instances, "m", _CompactReporter_log).call(this);
|
|
2030
|
-
});
|
|
2031
|
-
}
|
|
2032
|
-
done() {
|
|
2033
|
-
__classPrivateFieldGet3(this, _CompactReporter_instances, "m", _CompactReporter_log).call(this);
|
|
2034
|
-
logUpdate4.done();
|
|
2035
|
-
__classPrivateFieldGet3(this, _CompactReporter_instances, "m", _CompactReporter_clearTimer).call(this);
|
|
2036
|
-
return this.failed === 0;
|
|
2037
|
-
}
|
|
2038
|
-
};
|
|
2039
|
-
_CompactReporter_allFiles = /* @__PURE__ */ new WeakMap(), _CompactReporter_runningFiles = /* @__PURE__ */ new WeakMap(), _CompactReporter_failedFiles = /* @__PURE__ */ new WeakMap(), _CompactReporter_finishedFiles = /* @__PURE__ */ new WeakMap(), _CompactReporter_startTime = /* @__PURE__ */ new WeakMap(), _CompactReporter_timerId = /* @__PURE__ */ new WeakMap(), _CompactReporter_instances = /* @__PURE__ */ new WeakSet(), _CompactReporter_startTimer = function _CompactReporter_startTimer2() {
|
|
2040
|
-
__classPrivateFieldSet2(this, _CompactReporter_timerId, setInterval(() => __classPrivateFieldGet3(this, _CompactReporter_instances, "m", _CompactReporter_log).call(this), 55), "f");
|
|
2041
|
-
}, _CompactReporter_clearTimer = function _CompactReporter_clearTimer2() {
|
|
2042
|
-
if (__classPrivateFieldGet3(this, _CompactReporter_timerId, "f")) {
|
|
2043
|
-
clearInterval(__classPrivateFieldGet3(this, _CompactReporter_timerId, "f"));
|
|
2044
|
-
}
|
|
2045
|
-
}, _CompactReporter_log = function _CompactReporter_log2() {
|
|
2046
|
-
let res = [];
|
|
2047
|
-
let i = 0;
|
|
2048
|
-
for (let file of __classPrivateFieldGet3(this, _CompactReporter_allFiles, "f")) {
|
|
2049
|
-
if (__classPrivateFieldGet3(this, _CompactReporter_runningFiles, "f").has(file)) {
|
|
2050
|
-
res[Number(i)] = ".";
|
|
2051
|
-
}
|
|
2052
|
-
else if (__classPrivateFieldGet3(this, _CompactReporter_finishedFiles, "f").has(file)) {
|
|
2053
|
-
res[Number(i)] = __classPrivateFieldGet3(this, _CompactReporter_failedFiles, "f").has(file) ? chalk12.red(":") : ":";
|
|
2054
|
-
}
|
|
2055
|
-
else {
|
|
2056
|
-
res[Number(i)] = " ";
|
|
2057
|
-
}
|
|
2058
|
-
i++;
|
|
2059
|
-
}
|
|
2060
|
-
let output = `[${res.join("")}]
|
|
2061
|
-
${chalk12.gray(((Date.now() - __classPrivateFieldGet3(this, _CompactReporter_startTime, "f")) / 1e3).toFixed(2) + "s")}, total ${__classPrivateFieldGet3(this, _CompactReporter_allFiles, "f").size} files, passed ${chalk12.greenBright(this.passedFiles)} files` + (this.skipped ? `, skipped ${chalk12[this.skipped ? "yellowBright" : "gray"](this.skipped)} cases` : "") + (this.failed ? `, failed ${chalk12[this.failed ? "redBright" : "gray"](this.failed)} cases` : "");
|
|
2062
|
-
logUpdate4(output);
|
|
2063
|
-
};
|
|
2064
|
-
// dist/commands/test/reporters/silent-reporter.js
|
|
2065
|
-
import chalk13 from "chalk";
|
|
2066
|
-
var SilentReporter = class {
|
|
2067
|
-
constructor() {
|
|
2068
|
-
this.passed = 0;
|
|
2069
|
-
this.failed = 0;
|
|
2070
|
-
this.skipped = 0;
|
|
2071
|
-
this.passedFiles = 0;
|
|
2072
|
-
this.failedFiles = 0;
|
|
2073
|
-
this.passedNamesFlat = [];
|
|
2074
|
-
}
|
|
2075
|
-
addFiles(_files) {
|
|
2076
|
-
}
|
|
2077
|
-
addRun(file, mmf, state, _wasiMode) {
|
|
2078
|
-
state.then(() => {
|
|
2079
|
-
this.passed += mmf.passed;
|
|
2080
|
-
this.failed += mmf.failed;
|
|
2081
|
-
this.skipped += mmf.skipped;
|
|
2082
|
-
this.passedNamesFlat = [...this.passedNamesFlat, ...mmf.passedNamesFlat];
|
|
2083
|
-
if (mmf.passed === 0 && mmf.failed === 0) {
|
|
2084
|
-
this.passed++;
|
|
2085
|
-
this.passedNamesFlat.push(absToRel(file));
|
|
2086
|
-
}
|
|
2087
|
-
this.passedFiles += Number(mmf.failed === 0);
|
|
2088
|
-
this.failedFiles += Number(mmf.failed !== 0);
|
|
2089
|
-
if (mmf.failed) {
|
|
2090
|
-
console.log(chalk13.red("\u2716"), absToRel(file));
|
|
2091
|
-
mmf.flush("fail");
|
|
2092
|
-
console.log("-".repeat(50));
|
|
2093
|
-
}
|
|
2094
|
-
});
|
|
2095
|
-
}
|
|
2096
|
-
done() {
|
|
2097
|
-
return this.failed === 0;
|
|
2098
|
-
}
|
|
2099
|
-
};
|
|
2100
|
-
// dist/commands/test/test.js
|
|
2101
|
-
var ignore = [
|
|
2102
|
-
"**/node_modules/**",
|
|
2103
|
-
"**/.mops/**",
|
|
2104
|
-
"**/.vessel/**",
|
|
2105
|
-
"**/.git/**"
|
|
2106
|
-
];
|
|
2107
|
-
var globConfig = {
|
|
2108
|
-
nocase: true,
|
|
2109
|
-
ignore
|
|
2110
|
-
};
|
|
2111
|
-
async function test(filter = "", { watch = false, reporter = "verbose", mode = "interpreter" } = {}) {
|
|
2112
|
-
let rootDir = getRootDir();
|
|
2113
|
-
if (watch) {
|
|
2114
|
-
let run = debounce(async () => {
|
|
2115
|
-
console.clear();
|
|
2116
|
-
process.stdout.write("\x1Bc");
|
|
2117
|
-
await runAll(reporter, filter, mode);
|
|
2118
|
-
console.log("-".repeat(50));
|
|
2119
|
-
console.log("Waiting for file changes...");
|
|
2120
|
-
console.log(chalk14.gray(`Press ${chalk14.gray("Ctrl+C")} to exit.`));
|
|
2121
|
-
}, 200);
|
|
2122
|
-
let watcher = chokidar.watch([
|
|
2123
|
-
path12.join(rootDir, "**/*.mo"),
|
|
2124
|
-
path12.join(rootDir, "mops.toml")
|
|
2125
|
-
], {
|
|
2126
|
-
ignored: ignore,
|
|
2127
|
-
ignoreInitial: true
|
|
2128
|
-
});
|
|
2129
|
-
watcher.on("all", () => {
|
|
2130
|
-
run();
|
|
2131
|
-
});
|
|
2132
|
-
run();
|
|
2133
|
-
}
|
|
2134
|
-
else {
|
|
2135
|
-
let passed = await runAll(reporter, filter, mode);
|
|
2136
|
-
if (!passed) {
|
|
2137
|
-
process.exit(1);
|
|
2138
|
-
}
|
|
2139
|
-
}
|
|
2140
|
-
}
|
|
2141
|
-
var mocPath = process.env.DFX_MOC_PATH;
|
|
2142
|
-
async function runAll(reporterName = "verbose", filter = "", mode = "interpreter") {
|
|
2143
|
-
let reporter;
|
|
2144
|
-
if (reporterName == "compact") {
|
|
2145
|
-
reporter = new CompactReporter();
|
|
2146
|
-
}
|
|
2147
|
-
else if (reporterName == "files") {
|
|
2148
|
-
reporter = new FilesReporter();
|
|
2149
|
-
}
|
|
2150
|
-
else if (reporterName == "silent") {
|
|
2151
|
-
reporter = new SilentReporter();
|
|
2152
|
-
}
|
|
2153
|
-
else {
|
|
2154
|
-
reporter = new VerboseReporter();
|
|
2155
|
-
}
|
|
2156
|
-
let done = await testWithReporter(reporter, filter, mode);
|
|
2157
|
-
return done;
|
|
2158
|
-
}
|
|
2159
|
-
async function testWithReporter(reporter, filter = "", mode = "interpreter") {
|
|
2160
|
-
let rootDir = getRootDir();
|
|
2161
|
-
let files = [];
|
|
2162
|
-
let libFiles = globSync2("**/test?(s)/lib.mo", globConfig);
|
|
2163
|
-
if (libFiles[0]) {
|
|
2164
|
-
files = [libFiles[0]];
|
|
2165
|
-
}
|
|
2166
|
-
else {
|
|
2167
|
-
let globStr = "**/test?(s)/**/*.test.mo";
|
|
2168
|
-
if (filter) {
|
|
2169
|
-
globStr = `**/test?(s)/**/*${filter}*.mo`;
|
|
2170
|
-
}
|
|
2171
|
-
files = globSync2(path12.join(rootDir, globStr), globConfig);
|
|
2172
|
-
}
|
|
2173
|
-
if (!files.length) {
|
|
2174
|
-
if (filter) {
|
|
2175
|
-
console.log(`No test files found for filter '${filter}'`);
|
|
2176
|
-
return false;
|
|
2177
|
-
}
|
|
2178
|
-
console.log("No test files found");
|
|
2179
|
-
console.log("Put your tests in 'test' directory in *.test.mo files");
|
|
2180
|
-
return false;
|
|
2181
|
-
}
|
|
2182
|
-
reporter.addFiles(files);
|
|
2183
|
-
let sourcesArr = await sources();
|
|
2184
|
-
if (!mocPath) {
|
|
2185
|
-
mocPath = execSync3("dfx cache show").toString().trim() + "/moc";
|
|
2186
|
-
}
|
|
2187
|
-
let wasmDir = `${getRootDir()}/.mops/.test/`;
|
|
2188
|
-
fs9.mkdirSync(wasmDir, { recursive: true });
|
|
2189
|
-
await parallel(os.cpus().length, files, async (file) => {
|
|
2190
|
-
let mmf = new MMF1("store", absToRel(file));
|
|
2191
|
-
let wasiMode = mode === "wasi" || fs9.readFileSync(file, "utf8").startsWith("// @testmode wasi");
|
|
2192
|
-
let promise = new Promise((resolve) => {
|
|
2193
|
-
if (!mocPath) {
|
|
2194
|
-
mocPath = "moc";
|
|
2195
|
-
}
|
|
2196
|
-
let mocArgs = ["--hide-warnings", "--error-detail=2", ...sourcesArr.join(" ").split(" "), file].filter((x) => x);
|
|
2197
|
-
if (wasiMode) {
|
|
2198
|
-
let wasmFile = `${path12.join(wasmDir, path12.parse(file).name)}.wasm`;
|
|
2199
|
-
let buildProc = spawn2(mocPath, [`-o=${wasmFile}`, "-wasi-system-api", ...mocArgs]);
|
|
2200
|
-
pipeMMF(buildProc, mmf).then(async () => {
|
|
2201
|
-
if (mmf.failed > 0) {
|
|
2202
|
-
return;
|
|
2203
|
-
}
|
|
2204
|
-
let proc = spawn2("wasmtime", [
|
|
2205
|
-
"--max-wasm-stack=2000000",
|
|
2206
|
-
"--enable-cranelift-nan-canonicalization",
|
|
2207
|
-
"--wasm-features",
|
|
2208
|
-
"multi-memory,bulk-memory",
|
|
2209
|
-
wasmFile
|
|
2210
|
-
], {
|
|
2211
|
-
env: {
|
|
2212
|
-
...process.env,
|
|
2213
|
-
WASMTIME_NEW_CLI: "0"
|
|
2214
|
-
}
|
|
2215
|
-
});
|
|
2216
|
-
await pipeMMF(proc, mmf);
|
|
2217
|
-
}).finally(() => {
|
|
2218
|
-
fs9.rmSync(wasmFile, { force: true });
|
|
2219
|
-
}).then(resolve);
|
|
2220
|
-
}
|
|
2221
|
-
else {
|
|
2222
|
-
let proc = spawn2(mocPath, ["-r", "-ref-system-api", ...mocArgs]);
|
|
2223
|
-
pipeMMF(proc, mmf).then(resolve);
|
|
2224
|
-
}
|
|
2225
|
-
});
|
|
2226
|
-
reporter.addRun(file, mmf, promise, wasiMode);
|
|
2227
|
-
await promise;
|
|
2228
|
-
});
|
|
2229
|
-
fs9.rmSync(wasmDir, { recursive: true, force: true });
|
|
2230
|
-
return reporter.done();
|
|
2231
|
-
}
|
|
2232
|
-
function pipeMMF(proc, mmf) {
|
|
2233
|
-
return new Promise((resolve) => {
|
|
2234
|
-
proc.stdout.on("data", (data) => {
|
|
2235
|
-
for (let line of data.toString().split("\n")) {
|
|
2236
|
-
line = line.trim();
|
|
2237
|
-
if (line) {
|
|
2238
|
-
mmf.parseLine(line);
|
|
2239
|
-
}
|
|
2240
|
-
}
|
|
2241
|
-
});
|
|
2242
|
-
proc.stderr.on("data", (data) => {
|
|
2243
|
-
let text = data.toString().trim();
|
|
2244
|
-
let failedLine = "";
|
|
2245
|
-
text = text.replace(/([\w+._/-]+):(\d+).(\d+)(-\d+.\d+)/g, (_m0, m1, m2, m3) => {
|
|
2246
|
-
var _a, _b;
|
|
2247
|
-
let res = `${absToRel(m1)}:${m2}:${m3}`;
|
|
2248
|
-
if (!fs9.existsSync(m1)) {
|
|
2249
|
-
return res;
|
|
2250
|
-
}
|
|
2251
|
-
let content = fs9.readFileSync(m1);
|
|
2252
|
-
let lines = content.toString().split("\n") || [];
|
|
2253
|
-
failedLine += chalk14.dim("\n ...");
|
|
2254
|
-
let lineBefore = lines[+m2 - 2];
|
|
2255
|
-
if (lineBefore) {
|
|
2256
|
-
failedLine += chalk14.dim(`
|
|
2257
|
-
${+m2 - 1} | ${lineBefore.replaceAll(" ", " ")}`);
|
|
2258
|
-
}
|
|
2259
|
-
failedLine += `
|
|
2260
|
-
${chalk14.redBright `->`} ${m2} | ${(_a = lines[+m2 - 1]) == null ? void 0 : _a.replaceAll(" ", " ")}`;
|
|
2261
|
-
if (lines.length > +m2) {
|
|
2262
|
-
failedLine += chalk14.dim(`
|
|
2263
|
-
${+m2 + 1} | ${(_b = lines[+m2]) == null ? void 0 : _b.replaceAll(" ", " ")}`);
|
|
2264
|
-
}
|
|
2265
|
-
failedLine += chalk14.dim("\n ...");
|
|
2266
|
-
return res;
|
|
2267
|
-
});
|
|
2268
|
-
if (failedLine) {
|
|
2269
|
-
text += failedLine;
|
|
2270
|
-
}
|
|
2271
|
-
mmf.fail(text);
|
|
2272
|
-
});
|
|
2273
|
-
proc.on("close", (code) => {
|
|
2274
|
-
if (code === 0) {
|
|
2275
|
-
mmf.pass();
|
|
2276
|
-
}
|
|
2277
|
-
else if (code !== 1) {
|
|
2278
|
-
mmf.fail(`unknown exit code: ${code}`);
|
|
2279
|
-
}
|
|
2280
|
-
resolve();
|
|
2281
|
-
});
|
|
2282
|
-
});
|
|
2283
|
-
}
|
|
2284
|
-
// dist/commands/publish.js
|
|
2285
|
-
async function publish(options = {}) {
|
|
2286
|
-
var _a, _b;
|
|
2287
|
-
if (!checkConfigFile()) {
|
|
2288
|
-
return;
|
|
2289
|
-
}
|
|
2290
|
-
let rootDir = getRootDir();
|
|
2291
|
-
let config = readConfig();
|
|
2292
|
-
console.log(`Publishing ${(_a = config.package) == null ? void 0 : _a.name}@${(_b = config.package) == null ? void 0 : _b.version}`);
|
|
2293
|
-
for (let key of Object.keys(config)) {
|
|
2294
|
-
if (!["package", "dependencies", "dev-dependencies", "scripts"].includes(key)) {
|
|
2295
|
-
console.log(chalk15.red("Error: ") + `Unknown config section [${key}]`);
|
|
2296
|
-
process.exit(1);
|
|
2297
|
-
}
|
|
2298
|
-
}
|
|
2299
|
-
if (!config.package) {
|
|
2300
|
-
console.log(chalk15.red("Error: ") + "Please specify [package] section in your mops.toml");
|
|
2301
|
-
process.exit(1);
|
|
2302
|
-
}
|
|
2303
|
-
for (let key of ["name", "version"]) {
|
|
2304
|
-
if (!config.package[key]) {
|
|
2305
|
-
console.log(chalk15.red("Error: ") + `Please specify "${key}" in [config] section in your mops.toml`);
|
|
2306
|
-
process.exit(1);
|
|
2307
|
-
}
|
|
2308
|
-
}
|
|
2309
|
-
for (let key of ["description", "repository"]) {
|
|
2310
|
-
if (!config.package[key]) {
|
|
2311
|
-
let res2 = await prompts4({
|
|
2312
|
-
type: "confirm",
|
|
2313
|
-
name: "ok",
|
|
2314
|
-
message: `Missing recommended config key "${key}", publish anyway?`
|
|
2315
|
-
});
|
|
2316
|
-
if (!res2.ok) {
|
|
2317
|
-
return;
|
|
2318
|
-
}
|
|
2319
|
-
}
|
|
2320
|
-
}
|
|
2321
|
-
let packageKeys = [
|
|
2322
|
-
"name",
|
|
2323
|
-
"version",
|
|
2324
|
-
"keywords",
|
|
2325
|
-
"description",
|
|
2326
|
-
"repository",
|
|
2327
|
-
"documentation",
|
|
2328
|
-
"homepage",
|
|
2329
|
-
"baseDir",
|
|
2330
|
-
"readme",
|
|
2331
|
-
"license",
|
|
2332
|
-
"files",
|
|
2333
|
-
"dfx",
|
|
2334
|
-
"moc",
|
|
2335
|
-
"donation"
|
|
2336
|
-
];
|
|
2337
|
-
for (let key of Object.keys(config.package)) {
|
|
2338
|
-
if (!packageKeys.includes(key)) {
|
|
2339
|
-
console.log(chalk15.red("Error: ") + `Unknown config key 'package.${key}'`);
|
|
2340
|
-
process.exit(1);
|
|
2341
|
-
}
|
|
2342
|
-
}
|
|
2343
|
-
let keysMax = {
|
|
2344
|
-
name: 50,
|
|
2345
|
-
version: 20,
|
|
2346
|
-
keywords: 10,
|
|
2347
|
-
description: 200,
|
|
2348
|
-
repository: 300,
|
|
2349
|
-
documentation: 300,
|
|
2350
|
-
homepage: 300,
|
|
2351
|
-
readme: 100,
|
|
2352
|
-
license: 30,
|
|
2353
|
-
files: 20,
|
|
2354
|
-
dfx: 10,
|
|
2355
|
-
moc: 10,
|
|
2356
|
-
donation: 64,
|
|
2357
|
-
root: 50
|
|
2358
|
-
};
|
|
2359
|
-
for (let [key, max] of Object.entries(keysMax)) {
|
|
2360
|
-
if (config.package[key] && config.package[key].length > max) {
|
|
2361
|
-
console.log(chalk15.red("Error: ") + `package.${key} value max length is ${max}`);
|
|
2362
|
-
process.exit(1);
|
|
2363
|
-
}
|
|
2364
|
-
}
|
|
2365
|
-
if (config.dependencies) {
|
|
2366
|
-
if (Object.keys(config.dependencies).length > 100) {
|
|
2367
|
-
console.log(chalk15.red("Error: ") + "max dependencies is 100");
|
|
2368
|
-
process.exit(1);
|
|
2369
|
-
}
|
|
2370
|
-
for (let dep of Object.values(config.dependencies)) {
|
|
2371
|
-
if (dep.path) {
|
|
2372
|
-
console.log(chalk15.red("Error: ") + "you can't publish packages with local dependencies");
|
|
2373
|
-
process.exit(1);
|
|
2374
|
-
}
|
|
2375
|
-
delete dep.path;
|
|
2376
|
-
}
|
|
2377
|
-
}
|
|
2378
|
-
if (config["dev-dependencies"]) {
|
|
2379
|
-
if (Object.keys(config["dev-dependencies"]).length > 100) {
|
|
2380
|
-
console.log(chalk15.red("Error: ") + "max dev-dependencies is 100");
|
|
2381
|
-
process.exit(1);
|
|
2382
|
-
}
|
|
2383
|
-
for (let dep of Object.values(config["dev-dependencies"])) {
|
|
2384
|
-
if (dep.path) {
|
|
2385
|
-
console.log(chalk15.red("Error: ") + "you can't publish packages with local dev-dependencies");
|
|
2386
|
-
process.exit(1);
|
|
2387
|
-
}
|
|
2388
|
-
delete dep.path;
|
|
2389
|
-
}
|
|
2390
|
-
}
|
|
2391
|
-
if (config.package.keywords) {
|
|
2392
|
-
for (let keyword of config.package.keywords) {
|
|
2393
|
-
if (keyword.length > 20) {
|
|
2394
|
-
console.log(chalk15.red("Error: ") + "max keyword length is 20");
|
|
2395
|
-
return;
|
|
2396
|
-
}
|
|
2397
|
-
}
|
|
2398
|
-
}
|
|
2399
|
-
if (config.package.files) {
|
|
2400
|
-
for (let file of config.package.files) {
|
|
2401
|
-
if (file.startsWith("/") || file.startsWith("../")) {
|
|
2402
|
-
console.log(chalk15.red("Error: ") + "file path cannot start with '/' or '../'");
|
|
2403
|
-
return;
|
|
2404
|
-
}
|
|
2405
|
-
}
|
|
2406
|
-
}
|
|
2407
|
-
let toBackendDep = (dep) => {
|
|
2408
|
-
return {
|
|
2409
|
-
...dep,
|
|
2410
|
-
version: dep.version || "",
|
|
2411
|
-
repo: dep.repo || ""
|
|
2412
|
-
};
|
|
2413
|
-
};
|
|
2414
|
-
let backendPkgConfig = {
|
|
2415
|
-
name: config.package.name,
|
|
2416
|
-
version: config.package.version,
|
|
2417
|
-
keywords: config.package.keywords || [],
|
|
2418
|
-
description: config.package.description || "",
|
|
2419
|
-
repository: config.package.repository || "",
|
|
2420
|
-
homepage: config.package.homepage || "",
|
|
2421
|
-
documentation: config.package.documentation || "",
|
|
2422
|
-
baseDir: "src",
|
|
2423
|
-
readme: "README.md",
|
|
2424
|
-
license: config.package.license || "",
|
|
2425
|
-
dfx: config.package.dfx || "",
|
|
2426
|
-
moc: config.package.moc || "",
|
|
2427
|
-
donation: config.package.donation || "",
|
|
2428
|
-
dependencies: Object.values(config.dependencies || {}).map(toBackendDep),
|
|
2429
|
-
devDependencies: Object.values(config["dev-dependencies"] || {}).map(toBackendDep),
|
|
2430
|
-
scripts: []
|
|
2431
|
-
};
|
|
2432
|
-
let defaultFiles = [
|
|
2433
|
-
"mops.toml",
|
|
2434
|
-
"README.md",
|
|
2435
|
-
"LICENSE",
|
|
2436
|
-
"NOTICE",
|
|
2437
|
-
"!.mops/**",
|
|
2438
|
-
"!test/**",
|
|
2439
|
-
"!tests/**",
|
|
2440
|
-
"!**/*.test.mo",
|
|
2441
|
-
"!**/*.Test.mo",
|
|
2442
|
-
"!bench/**",
|
|
2443
|
-
"!benchmark/**",
|
|
2444
|
-
"!**/*.bench.mo",
|
|
2445
|
-
"!**/*.Bench.mo"
|
|
2446
|
-
];
|
|
2447
|
-
let files = config.package.files || ["**/*.mo"];
|
|
2448
|
-
files = [...files, ...defaultFiles];
|
|
2449
|
-
files = globbySync([...files, ...defaultFiles]);
|
|
2450
|
-
let docsFile = path13.join(rootDir, ".mops/.docs/docs.tgz");
|
|
2451
|
-
if (options.docs) {
|
|
2452
|
-
console.log("Generating documentation...");
|
|
2453
|
-
await docs({ silent: true });
|
|
2454
|
-
if (fs10.existsSync(docsFile)) {
|
|
2455
|
-
files.unshift(docsFile);
|
|
2456
|
-
}
|
|
2457
|
-
}
|
|
2458
|
-
if (!files.includes("mops.toml")) {
|
|
2459
|
-
console.log(chalk15.red("Error: ") + " please add mops.toml file");
|
|
2460
|
-
process.exit(1);
|
|
2461
|
-
}
|
|
2462
|
-
if (!files.includes("README.md")) {
|
|
2463
|
-
console.log(chalk15.red("Error: ") + " please add README.md file");
|
|
2464
|
-
process.exit(1);
|
|
2465
|
-
}
|
|
2466
|
-
for (let file of files) {
|
|
2467
|
-
if (!minimatch(file, "**/*.{mo,did,md,toml}") && !file.toLowerCase().endsWith("license") && !file.toLowerCase().endsWith("notice") && file !== docsFile) {
|
|
2468
|
-
console.log(chalk15.red("Error: ") + `file ${file} has unsupported extension. Allowed: .mo, .did, .md, .toml`);
|
|
2469
|
-
process.exit(1);
|
|
2470
|
-
}
|
|
2471
|
-
}
|
|
2472
|
-
console.log("Parsing CHANGELOG.md...");
|
|
2473
|
-
let changelog = parseChangelog(config.package.version);
|
|
2474
|
-
if (!changelog && config.package.repository) {
|
|
2475
|
-
console.log("Fetching release notes from GitHub...");
|
|
2476
|
-
changelog = await fetchGitHubReleaseNotes(config.package.repository, config.package.version);
|
|
2477
|
-
}
|
|
2478
|
-
if (changelog) {
|
|
2479
|
-
console.log("Changelog:");
|
|
2480
|
-
console.log(chalk15.gray(changelog));
|
|
2481
|
-
}
|
|
2482
|
-
let reporter = new SilentReporter();
|
|
2483
|
-
if (options.test) {
|
|
2484
|
-
console.log("Running tests...");
|
|
2485
|
-
await testWithReporter(reporter);
|
|
2486
|
-
if (reporter.failed > 0) {
|
|
2487
|
-
console.log(chalk15.red("Error: ") + "tests failed");
|
|
2488
|
-
process.exit(1);
|
|
2489
|
-
}
|
|
2490
|
-
}
|
|
2491
|
-
let total = files.length + 2;
|
|
2492
|
-
let step = 0;
|
|
2493
|
-
function progress() {
|
|
2494
|
-
step++;
|
|
2495
|
-
logUpdate5(`Uploading files ${progressBar(step, total)}`);
|
|
2496
|
-
}
|
|
2497
|
-
let identity = await getIdentity();
|
|
2498
|
-
let actor = await mainActor(identity);
|
|
2499
|
-
progress();
|
|
2500
|
-
let publishing = await actor.startPublish(backendPkgConfig);
|
|
2501
|
-
if ("err" in publishing) {
|
|
2502
|
-
console.log(chalk15.red("Error: ") + publishing.err);
|
|
2503
|
-
return;
|
|
2504
|
-
}
|
|
2505
|
-
let puiblishingId = publishing.ok;
|
|
2506
|
-
if (options.test) {
|
|
2507
|
-
await actor.uploadTestStats(puiblishingId, {
|
|
2508
|
-
passed: BigInt(reporter.passed),
|
|
2509
|
-
passedNames: reporter.passedNamesFlat
|
|
2510
|
-
});
|
|
2511
|
-
}
|
|
2512
|
-
if (changelog) {
|
|
2513
|
-
await actor.uploadNotes(puiblishingId, changelog);
|
|
2514
|
-
}
|
|
2515
|
-
await parallel(8, files, async (file) => {
|
|
2516
|
-
progress();
|
|
2517
|
-
let chunkSize = 1024 * 1024 + 512 * 1024;
|
|
2518
|
-
let content = fs10.readFileSync(file);
|
|
2519
|
-
let chunkCount = Math.ceil(content.length / chunkSize);
|
|
2520
|
-
let firstChunk = Array.from(content.slice(0, chunkSize));
|
|
2521
|
-
if (file === docsFile) {
|
|
2522
|
-
file = path13.basename(file);
|
|
2523
|
-
}
|
|
2524
|
-
let res2 = await actor.startFileUpload(puiblishingId, file, BigInt(chunkCount), firstChunk);
|
|
2525
|
-
if ("err" in res2) {
|
|
2526
|
-
console.log(chalk15.red("Error: ") + res2.err);
|
|
2527
|
-
return;
|
|
2528
|
-
}
|
|
2529
|
-
let fileId = res2.ok;
|
|
2530
|
-
for (let i = 1; i < chunkCount; i++) {
|
|
2531
|
-
let start = i * chunkSize;
|
|
2532
|
-
let chunk = Array.from(content.slice(start, start + chunkSize));
|
|
2533
|
-
let res3 = await actor.uploadFileChunk(puiblishingId, fileId, BigInt(i), chunk);
|
|
2534
|
-
if ("err" in res3) {
|
|
2535
|
-
console.log(chalk15.red("Error: ") + res3.err);
|
|
2536
|
-
return;
|
|
2537
|
-
}
|
|
2538
|
-
}
|
|
2539
|
-
});
|
|
2540
|
-
fs10.rmSync(path13.join(rootDir, ".mops/.docs"), { force: true, recursive: true });
|
|
2541
|
-
progress();
|
|
2542
|
-
let res = await actor.finishPublish(puiblishingId);
|
|
2543
|
-
if ("err" in res) {
|
|
2544
|
-
console.log(chalk15.red("Error: ") + res.err);
|
|
2545
|
-
return;
|
|
2546
|
-
}
|
|
2547
|
-
console.log(chalk15.green("Published ") + `${config.package.name}@${config.package.version}`);
|
|
2548
|
-
}
|
|
2549
|
-
function parseChangelog(version) {
|
|
2550
|
-
let rootDir = getRootDir();
|
|
2551
|
-
let changelogFile = "";
|
|
2552
|
-
let files = ["CHANGELOG.md", "Changelog.md", "changelog.md"];
|
|
2553
|
-
for (let file of files) {
|
|
2554
|
-
if (fs10.existsSync(path13.join(rootDir, file))) {
|
|
2555
|
-
changelogFile = path13.join(rootDir, file);
|
|
2556
|
-
break;
|
|
2557
|
-
}
|
|
2558
|
-
}
|
|
2559
|
-
if (!changelogFile) {
|
|
2560
|
-
console.log(chalk15.yellow("CHANGELOG.md not found"));
|
|
2561
|
-
return "";
|
|
2562
|
-
}
|
|
2563
|
-
let str = fs10.readFileSync(changelogFile, "utf-8");
|
|
2564
|
-
let changelog = findChangelogEntry(str, version);
|
|
2565
|
-
if (!changelog) {
|
|
2566
|
-
console.log(chalk15.yellow("No changelog entry found"));
|
|
2567
|
-
}
|
|
2568
|
-
return changelog || "";
|
|
2569
|
-
}
|
|
2570
|
-
async function fetchGitHubReleaseNotes(repo, version) {
|
|
2571
|
-
let repoPath = new URL(repo).pathname;
|
|
2572
|
-
let res = await fetch(`https://api.github.com/repos${repoPath}/releases/tags/${version}`);
|
|
2573
|
-
let release = await res.json();
|
|
2574
|
-
if (release.message === "Not Found") {
|
|
2575
|
-
res = await fetch(`https://api.github.com/repos${repoPath}/releases/tags/v${version}`);
|
|
2576
|
-
release = await res.json();
|
|
2577
|
-
if (release.message === "Not Found") {
|
|
2578
|
-
console.log(chalk15.yellow(`No GitHub release found with name ${version} or v${version}`));
|
|
2579
|
-
return "";
|
|
2580
|
-
}
|
|
2581
|
-
}
|
|
2582
|
-
return release.body;
|
|
2583
|
-
}
|
|
2584
|
-
function findChangelogEntry(changelog, version) {
|
|
2585
|
-
let tree = fromMarkdown(changelog);
|
|
2586
|
-
let found = false;
|
|
2587
|
-
let nodes = [];
|
|
2588
|
-
for (let node of tree.children) {
|
|
2589
|
-
if (found) {
|
|
2590
|
-
if (node.type === "heading") {
|
|
2591
|
-
break;
|
|
2592
|
-
}
|
|
2593
|
-
else {
|
|
2594
|
-
nodes.push(node);
|
|
2595
|
-
}
|
|
2596
|
-
}
|
|
2597
|
-
else if (node.type === "heading" && toMarkdown(node).match(new RegExp(`\\b${version}\\b`))) {
|
|
2598
|
-
found = true;
|
|
2599
|
-
}
|
|
2600
|
-
}
|
|
2601
|
-
return toMarkdown({
|
|
2602
|
-
type: "root",
|
|
2603
|
-
children: nodes
|
|
2604
|
-
});
|
|
2605
|
-
}
|
|
2606
|
-
// dist/commands/import-identity.js
|
|
2607
|
-
import fs11 from "node:fs";
|
|
2608
|
-
import path14 from "node:path";
|
|
2609
|
-
import chalk16 from "chalk";
|
|
2610
|
-
import prompts5 from "prompts";
|
|
2611
|
-
import { deleteSync as deleteSync3 } from "del";
|
|
2612
|
-
async function importPem(data, options = { encrypt: true }) {
|
|
2613
|
-
try {
|
|
2614
|
-
if (!fs11.existsSync(globalConfigDir)) {
|
|
2615
|
-
fs11.mkdirSync(globalConfigDir);
|
|
2616
|
-
}
|
|
2617
|
-
let password = "";
|
|
2618
|
-
if (options.encrypt) {
|
|
2619
|
-
let res = await prompts5({
|
|
2620
|
-
type: "invisible",
|
|
2621
|
-
name: "password",
|
|
2622
|
-
message: "Enter password to encrypt identity.pem"
|
|
2623
|
-
});
|
|
2624
|
-
password = res.password;
|
|
2625
|
-
if (!password) {
|
|
2626
|
-
let res2 = await prompts5({
|
|
2627
|
-
type: "confirm",
|
|
2628
|
-
name: "ok",
|
|
2629
|
-
message: "Are you sure you don't want to protect your identity.pem with a password?"
|
|
2630
|
-
});
|
|
2631
|
-
if (!res2.ok) {
|
|
2632
|
-
console.log("aborted");
|
|
2633
|
-
return;
|
|
2634
|
-
}
|
|
2635
|
-
}
|
|
2636
|
-
}
|
|
2637
|
-
let identityPem = path14.resolve(globalConfigDir, "identity.pem");
|
|
2638
|
-
let identityPemEncrypted = path14.resolve(globalConfigDir, "identity.pem.encrypted");
|
|
2639
|
-
deleteSync3([identityPem, identityPemEncrypted], { force: true });
|
|
2640
|
-
if (password) {
|
|
2641
|
-
let encrypted = await encrypt(Buffer.from(data), password);
|
|
2642
|
-
fs11.writeFileSync(identityPemEncrypted, encrypted);
|
|
2643
|
-
}
|
|
2644
|
-
else {
|
|
2645
|
-
fs11.writeFileSync(identityPem, data);
|
|
2646
|
-
}
|
|
2647
|
-
console.log(chalk16.green("Success"));
|
|
2648
|
-
}
|
|
2649
|
-
catch (err) {
|
|
2650
|
-
console.log(chalk16.red("Error: ") + err);
|
|
2651
|
-
}
|
|
2652
|
-
}
|
|
2653
|
-
// dist/commands/whoami.js
|
|
2654
|
-
import chalk17 from "chalk";
|
|
2655
|
-
async function whoami() {
|
|
2656
|
-
let identity = await getIdentity();
|
|
2657
|
-
if (identity) {
|
|
2658
|
-
console.log(identity.getPrincipal().toText());
|
|
2659
|
-
}
|
|
2660
|
-
else {
|
|
2661
|
-
console.log(chalk17.red("Error: ") + "identity not found. Run " + chalk17.greenBright("mops import-identity") + " command.");
|
|
2662
|
-
}
|
|
2663
|
-
}
|
|
2664
|
-
// dist/commands/search.js
|
|
2665
|
-
import asTable from "as-table";
|
|
2666
|
-
import chalk18 from "chalk";
|
|
2667
|
-
async function search(text) {
|
|
2668
|
-
let actor = await mainActor();
|
|
2669
|
-
let [packages, _pageCount] = await actor.search(text, [], []);
|
|
2670
|
-
if (!packages.length) {
|
|
2671
|
-
console.log("Packages not found");
|
|
2672
|
-
return;
|
|
2673
|
-
}
|
|
2674
|
-
let ellipsis = (text2, max) => {
|
|
2675
|
-
if (text2.length <= max) {
|
|
2676
|
-
return text2;
|
|
2677
|
-
}
|
|
2678
|
-
else {
|
|
2679
|
-
return text2.slice(0, max) + "\u2026";
|
|
2680
|
-
}
|
|
2681
|
-
};
|
|
2682
|
-
let maxNameLength = Math.max(...packages.map((a) => a.config.name.length));
|
|
2683
|
-
let table = packages.map((item) => {
|
|
2684
|
-
return {
|
|
2685
|
-
NAME: chalk18.bold(item.config.name),
|
|
2686
|
-
DESCRIPTION: ellipsis(item.config.description, process.stdout.columns - 40 - maxNameLength),
|
|
2687
|
-
VERSION: item.config.version,
|
|
2688
|
-
UPDATED: new Date(Number(item.publication.time / 1000000n)).toISOString().split("T")[0]
|
|
2689
|
-
};
|
|
2690
|
-
});
|
|
2691
|
-
console.log("");
|
|
2692
|
-
console.log(asTable.configure({
|
|
2693
|
-
delimiter: chalk18.gray(" | "),
|
|
2694
|
-
dash: chalk18.gray("\u2500"),
|
|
2695
|
-
title: (t) => chalk18.gray.bold(t)
|
|
2696
|
-
})(table));
|
|
2697
|
-
console.log("");
|
|
2698
|
-
}
|
|
2699
|
-
// dist/commands/add.js
|
|
2700
|
-
import path15 from "node:path";
|
|
2701
|
-
import chalk19 from "chalk";
|
|
2702
|
-
import logUpdate6 from "log-update";
|
|
2703
|
-
async function add(name, { verbose = false, dev = false, lockfile } = {}, asName) {
|
|
2704
|
-
if (!checkConfigFile()) {
|
|
2705
|
-
return;
|
|
2706
|
-
}
|
|
2707
|
-
let config = readConfig();
|
|
2708
|
-
if (dev) {
|
|
2709
|
-
if (!config["dev-dependencies"]) {
|
|
2710
|
-
config["dev-dependencies"] = {};
|
|
2711
|
-
}
|
|
2712
|
-
}
|
|
2713
|
-
else {
|
|
2714
|
-
if (!config.dependencies) {
|
|
2715
|
-
config.dependencies = {};
|
|
2716
|
-
}
|
|
2717
|
-
}
|
|
2718
|
-
let pkgDetails;
|
|
2719
|
-
if (name.startsWith("./") || name.startsWith("../") || name.startsWith("/")) {
|
|
2720
|
-
pkgDetails = {
|
|
2721
|
-
name: path15.parse(name).name === "." ? "_" : path15.parse(name).name,
|
|
2722
|
-
path: name,
|
|
2723
|
-
repo: "",
|
|
2724
|
-
version: ""
|
|
2725
|
-
};
|
|
2726
|
-
}
|
|
2727
|
-
else if (name.startsWith("https://github.com") || name.split("/").length > 1) {
|
|
2728
|
-
let { org, gitName, branch, commitHash } = parseGithubURL(name);
|
|
2729
|
-
if (!commitHash) {
|
|
2730
|
-
let commit = await getGithubCommit(`${org}/${gitName}`, branch);
|
|
2731
|
-
if (!commit.sha) {
|
|
2732
|
-
throw Error(`Could not find commit hash for ${name}`);
|
|
2733
|
-
}
|
|
2734
|
-
commitHash = commit.sha;
|
|
2735
|
-
}
|
|
2736
|
-
pkgDetails = {
|
|
2737
|
-
name: asName || parseGithubURL(name).gitName,
|
|
2738
|
-
repo: `https://github.com/${org}/${gitName}#${branch}@${commitHash}`,
|
|
2739
|
-
version: ""
|
|
2740
|
-
};
|
|
2741
|
-
}
|
|
2742
|
-
else {
|
|
2743
|
-
let ver;
|
|
2744
|
-
if (name.includes("@")) {
|
|
2745
|
-
[name, ver] = name.split("@");
|
|
2746
|
-
}
|
|
2747
|
-
else {
|
|
2748
|
-
let versionRes = await getHighestVersion(name);
|
|
2749
|
-
if ("err" in versionRes) {
|
|
2750
|
-
console.log(chalk19.red("Error: ") + versionRes.err);
|
|
2751
|
-
return;
|
|
2752
|
-
}
|
|
2753
|
-
ver = versionRes.ok;
|
|
2754
|
-
}
|
|
2755
|
-
pkgDetails = {
|
|
2756
|
-
name,
|
|
2757
|
-
repo: "",
|
|
2758
|
-
version: ver
|
|
2759
|
-
};
|
|
2760
|
-
}
|
|
2761
|
-
let installedPackages = {};
|
|
2762
|
-
if (pkgDetails.repo) {
|
|
2763
|
-
await installFromGithub(pkgDetails.name, pkgDetails.repo, { verbose });
|
|
2764
|
-
}
|
|
2765
|
-
else if (!pkgDetails.path) {
|
|
2766
|
-
let res = await install(pkgDetails.name, pkgDetails.version, { verbose });
|
|
2767
|
-
if (res === false) {
|
|
2768
|
-
return;
|
|
2769
|
-
}
|
|
2770
|
-
installedPackages = { ...installedPackages, ...res };
|
|
2771
|
-
}
|
|
2772
|
-
const depsProp = dev ? "dev-dependencies" : "dependencies";
|
|
2773
|
-
let deps = config[depsProp];
|
|
2774
|
-
if (deps) {
|
|
2775
|
-
deps[pkgDetails.name] = pkgDetails;
|
|
2776
|
-
}
|
|
2777
|
-
else {
|
|
2778
|
-
throw Error(`Invalid config file: [${depsProp}] not found`);
|
|
2779
|
-
}
|
|
2780
|
-
writeConfig(config);
|
|
2781
|
-
await Promise.all([
|
|
2782
|
-
notifyInstalls(Object.keys(installedPackages))
|
|
2783
|
-
// checkIntegrity(lockfile),
|
|
2784
|
-
]);
|
|
2785
|
-
logUpdate6.clear();
|
|
2786
|
-
console.log(chalk19.green("Package installed ") + `${pkgDetails.name} = "${pkgDetails.repo || pkgDetails.path || pkgDetails.version}"`);
|
|
2787
|
-
}
|
|
2788
|
-
// dist/commands/self-update.js
|
|
2789
|
-
import child_process from "node:child_process";
|
|
2790
|
-
function selfUpdate({ detached = false } = {}) {
|
|
2791
|
-
console.log("Updating mops CLI...");
|
|
2792
|
-
child_process.spawn("npm", ["install", "ic-mops", "-g"], { stdio: "inherit", detached });
|
|
2793
|
-
}
|
|
2794
|
-
// dist/commands/remove.js
|
|
2795
|
-
import fs12 from "node:fs";
|
|
2796
|
-
import { deleteSync as deleteSync4 } from "del";
|
|
2797
|
-
import chalk20 from "chalk";
|
|
2798
|
-
async function remove(name, { dev = false, verbose = false, dryRun = false, lockfile } = {}) {
|
|
2799
|
-
if (!checkConfigFile()) {
|
|
2800
|
-
return;
|
|
2801
|
-
}
|
|
2802
|
-
function getTransitiveDependencies(config2, exceptPkgId) {
|
|
2803
|
-
let deps2 = Object.values(config2.dependencies || {});
|
|
2804
|
-
let devDeps = Object.values(config2["dev-dependencies"] || {});
|
|
2805
|
-
return [...deps2, ...devDeps].filter((dep) => {
|
|
2806
|
-
let depId = dep.name + "@" + dep.version;
|
|
2807
|
-
return depId !== exceptPkgId;
|
|
2808
|
-
}).map((dep) => {
|
|
2809
|
-
return [dep, ...getTransitiveDependenciesOf(dep.name, dep.version, dep.repo)];
|
|
2810
|
-
}).flat();
|
|
2811
|
-
}
|
|
2812
|
-
function getTransitiveDependenciesOf(name2, version2, repo) {
|
|
2813
|
-
let pkgDir = "";
|
|
2814
|
-
if (repo) {
|
|
2815
|
-
pkgDir = formatGithubDir(name2, repo);
|
|
2816
|
-
}
|
|
2817
|
-
else if (version2) {
|
|
2818
|
-
pkgDir = formatDir(name2, version2);
|
|
2819
|
-
}
|
|
2820
|
-
let configFile = pkgDir + "/mops.toml";
|
|
2821
|
-
if (!fs12.existsSync(configFile)) {
|
|
2822
|
-
verbose && console.log("no config", configFile);
|
|
2823
|
-
return [];
|
|
2824
|
-
}
|
|
2825
|
-
let config2 = readConfig(configFile);
|
|
2826
|
-
let deps2 = Object.values(config2.dependencies || {}).map((dep) => {
|
|
2827
|
-
return [dep, ...getTransitiveDependenciesOf(dep.name, dep.version)];
|
|
2828
|
-
}).flat();
|
|
2829
|
-
return deps2;
|
|
2830
|
-
}
|
|
2831
|
-
let config = readConfig();
|
|
2832
|
-
let deps = dev ? config["dev-dependencies"] : config.dependencies;
|
|
2833
|
-
deps = deps || {};
|
|
2834
|
-
let pkgDetails = deps[name];
|
|
2835
|
-
if (!pkgDetails) {
|
|
2836
|
-
return console.log(chalk20.red("Error: ") + `No ${dev ? "dev " : ""}dependency to remove "${name}"`);
|
|
2837
|
-
}
|
|
2838
|
-
let version = pkgDetails.version;
|
|
2839
|
-
let packageId = `${name}@${version}`;
|
|
2840
|
-
let transitiveDeps = getTransitiveDependencies(config, packageId);
|
|
2841
|
-
let transitiveDepIds = new Set(transitiveDeps.map((dep) => {
|
|
2842
|
-
return dep.name + "@" + dep.version;
|
|
2843
|
-
}));
|
|
2844
|
-
let transitiveDepsOfPackage = [pkgDetails, ...getTransitiveDependenciesOf(name, version)];
|
|
2845
|
-
for (let dep of transitiveDepsOfPackage) {
|
|
2846
|
-
let depId = dep.name + "@" + dep.version;
|
|
2847
|
-
if (transitiveDepIds.has(depId)) {
|
|
2848
|
-
verbose && console.log(`Ignored transitive dependency ${depId} (other deps depend on it)`);
|
|
2849
|
-
continue;
|
|
2850
|
-
}
|
|
2851
|
-
let pkgDir;
|
|
2852
|
-
if (dep.repo) {
|
|
2853
|
-
pkgDir = formatGithubDir(dep.name, dep.repo);
|
|
2854
|
-
}
|
|
2855
|
-
else if (dep.version) {
|
|
2856
|
-
pkgDir = formatDir(dep.name, dep.version);
|
|
2857
|
-
}
|
|
2858
|
-
if (pkgDir && fs12.existsSync(pkgDir)) {
|
|
2859
|
-
dryRun || deleteSync4([`${pkgDir}`], { force: true });
|
|
2860
|
-
verbose && console.log(`Removed local cache ${pkgDir}`);
|
|
2861
|
-
}
|
|
2862
|
-
}
|
|
2863
|
-
if (!dev && config.dependencies) {
|
|
2864
|
-
delete config.dependencies[name];
|
|
2865
|
-
}
|
|
2866
|
-
if (dev && config["dev-dependencies"]) {
|
|
2867
|
-
delete config["dev-dependencies"][name];
|
|
2868
|
-
}
|
|
2869
|
-
dryRun || writeConfig(config);
|
|
2870
|
-
console.log(chalk20.green("Package removed ") + `${name} = "${version}"`);
|
|
2871
|
-
}
|
|
2872
|
-
// dist/commands/user.js
|
|
2873
|
-
import chalk21 from "chalk";
|
|
2874
|
-
async function getUserProp(prop) {
|
|
2875
|
-
var _a;
|
|
2876
|
-
let actor = await mainActor();
|
|
2877
|
-
let identity = await getIdentity();
|
|
2878
|
-
if (!identity) {
|
|
2879
|
-
console.log(chalk21.red("Error: ") + "No identity found");
|
|
2880
|
-
process.exit(1);
|
|
2881
|
-
}
|
|
2882
|
-
let res = await actor.getUser(identity.getPrincipal());
|
|
2883
|
-
console.log(((_a = res[0]) == null ? void 0 : _a[prop]) || "");
|
|
2884
|
-
}
|
|
2885
|
-
async function setUserProp(prop, value) {
|
|
2886
|
-
let identity = await getIdentity();
|
|
2887
|
-
let actor = await mainActor(identity);
|
|
2888
|
-
let res = await actor.setUserProp(prop, value);
|
|
2889
|
-
if ("ok" in res) {
|
|
2890
|
-
console.log(chalk21.green("Success!"));
|
|
2891
|
-
}
|
|
2892
|
-
else {
|
|
2893
|
-
console.log(chalk21.red("Error: ") + res.err);
|
|
2894
|
-
}
|
|
2895
|
-
}
|
|
2896
|
-
// dist/commands/bump.js
|
|
2897
|
-
import prompts6 from "prompts";
|
|
2898
|
-
import chalk22 from "chalk";
|
|
2899
|
-
async function bump(part) {
|
|
2900
|
-
if (!checkConfigFile()) {
|
|
2901
|
-
return;
|
|
2902
|
-
}
|
|
2903
|
-
if (part && !["major", "minor", "patch"].includes(part)) {
|
|
2904
|
-
console.log(chalk22.red("Unknown version part. Available parts: major, minor, patch"));
|
|
2905
|
-
process.exit(1);
|
|
2906
|
-
}
|
|
2907
|
-
let config = readConfig();
|
|
2908
|
-
if (!config.package) {
|
|
2909
|
-
console.log(chalk22.red("No [package] section found in mops.toml."));
|
|
2910
|
-
process.exit(1);
|
|
2911
|
-
}
|
|
2912
|
-
console.log(`Current version: ${chalk22.yellow.bold(config.package.version)}`);
|
|
2913
|
-
if (!part) {
|
|
2914
|
-
let res = await prompts6({
|
|
2915
|
-
type: "select",
|
|
2916
|
-
name: "part",
|
|
2917
|
-
message: "Select new version:",
|
|
2918
|
-
choices: [
|
|
2919
|
-
{ title: `${updateVersion(config.package.version, "major")} ${chalk22.dim("(major, breaking changes)")}`, value: "major" },
|
|
2920
|
-
{ title: `${updateVersion(config.package.version, "minor")} ${chalk22.dim("(minor, new features)")}`, value: "minor" },
|
|
2921
|
-
{ title: `${updateVersion(config.package.version, "patch")} ${chalk22.dim("(patch, bug fixes)")}`, value: "patch" }
|
|
2922
|
-
],
|
|
2923
|
-
initial: 2
|
|
2924
|
-
});
|
|
2925
|
-
if (!res.part) {
|
|
2926
|
-
return;
|
|
2927
|
-
}
|
|
2928
|
-
part = res.part;
|
|
2929
|
-
}
|
|
2930
|
-
config.package.version = updateVersion(config.package.version, part);
|
|
2931
|
-
writeConfig(config);
|
|
2932
|
-
console.log(`Updated version: ${chalk22.green.bold(config.package.version)}`);
|
|
2933
|
-
}
|
|
2934
|
-
function updateVersion(version, part) {
|
|
2935
|
-
let parts = version.split(".");
|
|
2936
|
-
let idx = ["major", "minor", "patch"].indexOf(part);
|
|
2937
|
-
if (!parts[idx]) {
|
|
2938
|
-
throw new Error(`Invalid version part: ${part}`);
|
|
2939
|
-
}
|
|
2940
|
-
parts[idx] = String(parseInt(parts[idx] || "0") + 1);
|
|
2941
|
-
for (let i = idx + 1; i < parts.length; i++) {
|
|
2942
|
-
parts[i] = "0";
|
|
2943
|
-
}
|
|
2944
|
-
return parts.join(".");
|
|
2945
|
-
}
|
|
2946
|
-
// dist/commands/sync.js
|
|
2947
|
-
import path16 from "node:path";
|
|
2948
|
-
import { execSync as execSync4 } from "node:child_process";
|
|
2949
|
-
import { globSync as globSync3 } from "glob";
|
|
2950
|
-
import chalk23 from "chalk";
|
|
2951
|
-
async function sync({ lockfile } = {}) {
|
|
2952
|
-
if (!checkConfigFile()) {
|
|
2953
|
-
return;
|
|
2954
|
-
}
|
|
2955
|
-
let missing = await getMissingPackages();
|
|
2956
|
-
let unused = await getUnusedPackages();
|
|
2957
|
-
missing.length && console.log(`${chalk23.yellow("Missing packages:")} ${missing.join(", ")}`);
|
|
2958
|
-
unused.length && console.log(`${chalk23.yellow("Unused packages:")} ${unused.join(", ")}`);
|
|
2959
|
-
let config = readConfig();
|
|
2960
|
-
let deps = new Set(Object.keys(config.dependencies || {}));
|
|
2961
|
-
let devDeps = new Set(Object.keys(config["dev-dependencies"] || {}));
|
|
2962
|
-
for (let pkg of missing) {
|
|
2963
|
-
await add(pkg, { lockfile: "ignore" });
|
|
2964
|
-
}
|
|
2965
|
-
for (let pkg of unused) {
|
|
2966
|
-
let dev = devDeps.has(pkg) && !deps.has(pkg);
|
|
2967
|
-
await remove(pkg, { dev, lockfile: "ignore" });
|
|
2968
|
-
}
|
|
2969
|
-
await checkIntegrity(lockfile);
|
|
2970
|
-
}
|
|
2971
|
-
var ignore2 = [
|
|
2972
|
-
"**/node_modules/**",
|
|
2973
|
-
"**/.vessel/**",
|
|
2974
|
-
"**/.git/**",
|
|
2975
|
-
"**/.mops/**"
|
|
2976
|
-
];
|
|
2977
|
-
var mocPath2 = "";
|
|
2978
|
-
function getMocPath() {
|
|
2979
|
-
if (!mocPath2) {
|
|
2980
|
-
mocPath2 = process.env.DFX_MOC_PATH || "";
|
|
2981
|
-
}
|
|
2982
|
-
if (!mocPath2) {
|
|
2983
|
-
try {
|
|
2984
|
-
mocPath2 = execSync4("dfx cache show").toString().trim() + "/moc";
|
|
2985
|
-
}
|
|
2986
|
-
catch {
|
|
2987
|
-
}
|
|
2988
|
-
}
|
|
2989
|
-
if (!mocPath2) {
|
|
2990
|
-
mocPath2 = "moc";
|
|
2991
|
-
}
|
|
2992
|
-
return mocPath2;
|
|
2993
|
-
}
|
|
2994
|
-
async function getUsedPackages() {
|
|
2995
|
-
let rootDir = getRootDir();
|
|
2996
|
-
let files = globSync3("**/*.mo", {
|
|
2997
|
-
cwd: rootDir,
|
|
2998
|
-
nocase: true,
|
|
2999
|
-
ignore: ignore2
|
|
3000
|
-
});
|
|
3001
|
-
let packages = /* @__PURE__ */ new Set();
|
|
3002
|
-
for (let file of files) {
|
|
3003
|
-
let deps = execSync4(`${getMocPath()} --print-deps ${path16.join(rootDir, file)}`).toString().trim().split("\n");
|
|
3004
|
-
for (let dep of deps) {
|
|
3005
|
-
if (dep.startsWith("mo:") && !dep.startsWith("mo:prim") && !dep.startsWith("mo:\u26D4")) {
|
|
3006
|
-
packages.add(dep.replace(/^mo:([^/]+).*$/, "$1"));
|
|
3007
|
-
}
|
|
3008
|
-
}
|
|
3009
|
-
}
|
|
3010
|
-
return [...packages];
|
|
3011
|
-
}
|
|
3012
|
-
async function getMissingPackages() {
|
|
3013
|
-
let config = readConfig();
|
|
3014
|
-
let allDeps = [...Object.keys(config.dependencies || {}), ...Object.keys(config["dev-dependencies"] || {})];
|
|
3015
|
-
let missing = new Set(await getUsedPackages());
|
|
3016
|
-
for (let pkg of allDeps) {
|
|
3017
|
-
missing.delete(pkg);
|
|
3018
|
-
}
|
|
3019
|
-
return [...missing];
|
|
3020
|
-
}
|
|
3021
|
-
async function getUnusedPackages() {
|
|
3022
|
-
let config = readConfig();
|
|
3023
|
-
let allDeps = /* @__PURE__ */ new Set([...Object.keys(config.dependencies || {}), ...Object.keys(config["dev-dependencies"] || {})]);
|
|
3024
|
-
let used = await getUsedPackages();
|
|
3025
|
-
for (let pkg of used) {
|
|
3026
|
-
allDeps.delete(pkg);
|
|
3027
|
-
}
|
|
3028
|
-
return [...allDeps];
|
|
3029
|
-
}
|
|
3030
|
-
// dist/commands/outdated.js
|
|
3031
|
-
import chalk25 from "chalk";
|
|
3032
|
-
// dist/commands/available-updates.js
|
|
3033
|
-
import chalk24 from "chalk";
|
|
3034
|
-
async function getAvailableUpdates(config, pkg) {
|
|
3035
|
-
let deps = Object.values(config.dependencies || {});
|
|
3036
|
-
let devDeps = Object.values(config["dev-dependencies"] || {});
|
|
3037
|
-
let allDeps = [...deps, ...devDeps].filter((dep) => dep.version);
|
|
3038
|
-
let depsToUpdate = pkg ? allDeps.filter((dep) => dep.name === pkg) : allDeps;
|
|
3039
|
-
let getCurrentVersion = (pkg2) => {
|
|
3040
|
-
for (let dep of allDeps) {
|
|
3041
|
-
if (dep.name === pkg2 && dep.version) {
|
|
3042
|
-
return dep.version;
|
|
3043
|
-
}
|
|
3044
|
-
}
|
|
3045
|
-
return "";
|
|
3046
|
-
};
|
|
3047
|
-
let actor = await mainActor();
|
|
3048
|
-
let res = await actor.getHighestSemverBatch(depsToUpdate.map((dep) => [dep.name, dep.version || "", { major: null }]));
|
|
3049
|
-
if ("err" in res) {
|
|
3050
|
-
console.log(chalk24.red("Error:"), res.err);
|
|
3051
|
-
process.exit(1);
|
|
3052
|
-
}
|
|
3053
|
-
return res.ok.filter((dep) => dep[1] !== getCurrentVersion(dep[0])).map((dep) => [dep[0], getCurrentVersion(dep[0]), dep[1]]);
|
|
3054
|
-
}
|
|
3055
|
-
// dist/commands/outdated.js
|
|
3056
|
-
async function outdated() {
|
|
3057
|
-
if (!checkConfigFile()) {
|
|
3058
|
-
return;
|
|
3059
|
-
}
|
|
3060
|
-
let config = readConfig();
|
|
3061
|
-
let available = await getAvailableUpdates(config);
|
|
3062
|
-
if (available.length === 0) {
|
|
3063
|
-
console.log(chalk25.green("All dependencies are up to date!"));
|
|
3064
|
-
}
|
|
3065
|
-
else {
|
|
3066
|
-
console.log("Available updates:");
|
|
3067
|
-
for (let dep of available) {
|
|
3068
|
-
console.log(`${dep[0]} ${chalk25.yellow(dep[1])} -> ${chalk25.green(dep[2])}`);
|
|
3069
|
-
}
|
|
3070
|
-
}
|
|
3071
|
-
}
|
|
3072
|
-
// dist/commands/update.js
|
|
3073
|
-
import chalk26 from "chalk";
|
|
3074
|
-
async function update(pkg, { lockfile } = {}) {
|
|
3075
|
-
var _a, _b, _c, _d;
|
|
3076
|
-
if (!checkConfigFile()) {
|
|
3077
|
-
return;
|
|
3078
|
-
}
|
|
3079
|
-
let config = readConfig();
|
|
3080
|
-
if (pkg && !((_a = config.dependencies) == null ? void 0 : _a[pkg]) && !((_b = config["dev-dependencies"]) == null ? void 0 : _b[pkg])) {
|
|
3081
|
-
console.log(chalk26.red(`Package "${pkg}" is not installed!`));
|
|
3082
|
-
return;
|
|
3083
|
-
}
|
|
3084
|
-
let deps = Object.values(config.dependencies || {});
|
|
3085
|
-
let devDeps = Object.values(config["dev-dependencies"] || {});
|
|
3086
|
-
let githubDeps = [...deps, ...devDeps].filter((dep) => dep.repo);
|
|
3087
|
-
if (pkg) {
|
|
3088
|
-
githubDeps = githubDeps.filter((dep) => dep.name === pkg);
|
|
3089
|
-
}
|
|
3090
|
-
for (let dep of githubDeps) {
|
|
3091
|
-
let { org, gitName, branch, commitHash } = parseGithubURL(dep.repo || "");
|
|
3092
|
-
let dev = !!((_c = config["dev-dependencies"]) == null ? void 0 : _c[dep.name]);
|
|
3093
|
-
let commit = await getGithubCommit(`${org}/${gitName}`, branch);
|
|
3094
|
-
if (commit.sha !== commitHash) {
|
|
3095
|
-
await add(`https://github.com/${org}/${gitName}#${branch}@${commit.sha}`, { dev }, dep.name);
|
|
3096
|
-
}
|
|
3097
|
-
}
|
|
3098
|
-
let available = await getAvailableUpdates(config, pkg);
|
|
3099
|
-
if (available.length === 0) {
|
|
3100
|
-
if (pkg) {
|
|
3101
|
-
console.log(chalk26.green(`Package "${pkg}" is up to date!`));
|
|
3102
|
-
}
|
|
3103
|
-
else {
|
|
3104
|
-
console.log(chalk26.green("All dependencies are up to date!"));
|
|
3105
|
-
}
|
|
3106
|
-
}
|
|
3107
|
-
else {
|
|
3108
|
-
for (let dep of available) {
|
|
3109
|
-
let dev = !!((_d = config["dev-dependencies"]) == null ? void 0 : _d[dep[0]]);
|
|
3110
|
-
await add(`${dep[0]}@${dep[2]}`, { dev });
|
|
3111
|
-
}
|
|
3112
|
-
}
|
|
3113
|
-
await checkIntegrity(lockfile);
|
|
3114
|
-
}
|
|
3115
|
-
// dist/commands/bench.js
|
|
3116
|
-
import { execSync as execSync8 } from "node:child_process";
|
|
3117
|
-
import path17 from "node:path";
|
|
3118
|
-
import fs13 from "node:fs";
|
|
3119
|
-
import os2 from "node:os";
|
|
3120
|
-
import chalk27 from "chalk";
|
|
3121
|
-
import { globSync as globSync4 } from "glob";
|
|
3122
|
-
import { markdownTable } from "markdown-table";
|
|
3123
|
-
import logUpdate7 from "log-update";
|
|
3124
|
-
// dist/declarations/bench/index.js
|
|
3125
|
-
import { Actor as Actor4, HttpAgent as HttpAgent4 } from "@dfinity/agent";
|
|
3126
|
-
// dist/declarations/bench/bench.did.js
|
|
3127
|
-
var idlFactory3 = ({ IDL }) => {
|
|
3128
|
-
const BenchSchema = IDL.Record({
|
|
3129
|
-
"cols": IDL.Vec(IDL.Text),
|
|
3130
|
-
"name": IDL.Text,
|
|
3131
|
-
"rows": IDL.Vec(IDL.Text),
|
|
3132
|
-
"description": IDL.Text
|
|
3133
|
-
});
|
|
3134
|
-
const BenchResult = IDL.Record({
|
|
3135
|
-
"instructions": IDL.Int,
|
|
3136
|
-
"rts_memory_size": IDL.Int,
|
|
3137
|
-
"rts_total_allocation": IDL.Int,
|
|
3138
|
-
"rts_collector_instructions": IDL.Int,
|
|
3139
|
-
"rts_mutator_instructions": IDL.Int,
|
|
3140
|
-
"rts_heap_size": IDL.Int
|
|
3141
|
-
});
|
|
3142
|
-
const anon_class_10_1 = IDL.Service({
|
|
3143
|
-
"getSchema": IDL.Func([], [BenchSchema], ["query"]),
|
|
3144
|
-
"getStats": IDL.Func([], [BenchResult], ["query"]),
|
|
3145
|
-
"init": IDL.Func([], [BenchSchema], []),
|
|
3146
|
-
"runCellQuery": IDL.Func([IDL.Nat, IDL.Nat], [BenchResult], ["query"]),
|
|
3147
|
-
"runCellUpdate": IDL.Func([IDL.Nat, IDL.Nat], [BenchResult], []),
|
|
3148
|
-
"runCellUpdateAwait": IDL.Func([IDL.Nat, IDL.Nat], [BenchResult], [])
|
|
3149
|
-
});
|
|
3150
|
-
return anon_class_10_1;
|
|
3151
|
-
};
|
|
3152
|
-
// dist/declarations/bench/index.js
|
|
3153
|
-
var canisterId2 = process.env.CANISTER_ID_BENCH || process.env.BENCH_CANISTER_ID;
|
|
3154
|
-
var createActor = (canisterId3, options = {}) => {
|
|
3155
|
-
const agent = options.agent || new HttpAgent4({ ...options.agentOptions });
|
|
3156
|
-
if (options.agent && options.agentOptions) {
|
|
3157
|
-
console.warn("Detected both agent and agentOptions passed to createActor. Ignoring agentOptions and proceeding with the provided agent.");
|
|
3158
|
-
}
|
|
3159
|
-
if (process.env.DFX_NETWORK !== "ic") {
|
|
3160
|
-
agent.fetchRootKey().catch((err) => {
|
|
3161
|
-
console.warn("Unable to fetch root key. Check to ensure that your local replica is running");
|
|
3162
|
-
console.error(err);
|
|
3163
|
-
});
|
|
3164
|
-
}
|
|
3165
|
-
return Actor4.createActor(idlFactory3, {
|
|
3166
|
-
agent,
|
|
3167
|
-
canisterId: canisterId3,
|
|
3168
|
-
...options.actorOptions
|
|
3169
|
-
});
|
|
3170
|
-
};
|
|
3171
|
-
// dist/helpers/get-moc-version.js
|
|
3172
|
-
import { execSync as execSync6 } from "node:child_process";
|
|
3173
|
-
// dist/helpers/get-moc-path.js
|
|
3174
|
-
import { execSync as execSync5 } from "node:child_process";
|
|
3175
|
-
function getMocPath2() {
|
|
3176
|
-
let mocPath3 = process.env.DFX_MOC_PATH;
|
|
3177
|
-
if (!mocPath3) {
|
|
3178
|
-
mocPath3 = execSync5("dfx cache show").toString().trim() + "/moc";
|
|
3179
|
-
}
|
|
3180
|
-
if (!mocPath3) {
|
|
3181
|
-
mocPath3 = "moc";
|
|
3182
|
-
}
|
|
3183
|
-
return mocPath3;
|
|
3184
|
-
}
|
|
3185
|
-
// dist/helpers/get-moc-version.js
|
|
3186
|
-
function getMocVersion() {
|
|
3187
|
-
let mocPath3 = getMocPath2();
|
|
3188
|
-
let match = execSync6(mocPath3).toString().trim().match(/Motoko compiler ([^\s]+) .*/);
|
|
3189
|
-
return (match == null ? void 0 : match[1]) || "";
|
|
3190
|
-
}
|
|
3191
|
-
// dist/helpers/get-dfx-version.js
|
|
3192
|
-
import { execSync as execSync7 } from "node:child_process";
|
|
3193
|
-
function getDfxVersion() {
|
|
3194
|
-
try {
|
|
3195
|
-
let res = execSync7("dfx --version").toString();
|
|
3196
|
-
return res.trim().split("dfx ")[1] || "";
|
|
3197
|
-
}
|
|
3198
|
-
catch {
|
|
3199
|
-
}
|
|
3200
|
-
return "";
|
|
3201
|
-
}
|
|
3202
|
-
// dist/commands/bench.js
|
|
3203
|
-
import { execaCommand as execaCommand2 } from "execa";
|
|
3204
|
-
var ignore3 = [
|
|
3205
|
-
"**/node_modules/**",
|
|
3206
|
-
"**/.mops/**",
|
|
3207
|
-
"**/.vessel/**",
|
|
3208
|
-
"**/.git/**"
|
|
3209
|
-
];
|
|
3210
|
-
var globConfig2 = {
|
|
3211
|
-
nocase: true,
|
|
3212
|
-
ignore: ignore3
|
|
3213
|
-
};
|
|
3214
|
-
async function bench(filter = "", options = {}) {
|
|
3215
|
-
let defaultOptions = {
|
|
3216
|
-
moc: getMocVersion(),
|
|
3217
|
-
dfx: getDfxVersion(),
|
|
3218
|
-
gc: "incremental",
|
|
3219
|
-
forceGc: true,
|
|
3220
|
-
save: false,
|
|
3221
|
-
compare: false,
|
|
3222
|
-
verbose: false
|
|
3223
|
-
};
|
|
3224
|
-
options = { ...defaultOptions, ...options };
|
|
3225
|
-
options.verbose && console.log(options);
|
|
3226
|
-
let rootDir = getRootDir();
|
|
3227
|
-
let globStr = "**/bench?(mark)/**/*.bench.mo";
|
|
3228
|
-
if (filter) {
|
|
3229
|
-
globStr = `**/bench?(mark)/**/*${filter}*.mo`;
|
|
3230
|
-
}
|
|
3231
|
-
let files = globSync4(path17.join(rootDir, globStr), globConfig2);
|
|
3232
|
-
if (!files.length) {
|
|
3233
|
-
if (filter) {
|
|
3234
|
-
console.log(`No benchmark files found for filter '${filter}'`);
|
|
3235
|
-
return false;
|
|
3236
|
-
}
|
|
3237
|
-
console.log("No *.bench.mo files found");
|
|
3238
|
-
console.log("Put your benchmark code in 'bench' directory in *.bench.mo files");
|
|
3239
|
-
return false;
|
|
3240
|
-
}
|
|
3241
|
-
files.sort();
|
|
3242
|
-
let benchDir = `${getRootDir()}/.mops/.bench/`;
|
|
3243
|
-
fs13.rmSync(benchDir, { recursive: true, force: true });
|
|
3244
|
-
fs13.mkdirSync(benchDir, { recursive: true });
|
|
3245
|
-
console.log("Benchmark files:");
|
|
3246
|
-
for (let file of files) {
|
|
3247
|
-
console.log(chalk27.gray(`\u2022 ${absToRel(file)}`));
|
|
3248
|
-
}
|
|
3249
|
-
console.log("");
|
|
3250
|
-
console.log("=".repeat(50));
|
|
3251
|
-
console.log("");
|
|
3252
|
-
console.log("Starting dfx replica...");
|
|
3253
|
-
startDfx(options.verbose);
|
|
3254
|
-
console.log("Deploying canisters...");
|
|
3255
|
-
await parallel(os2.cpus().length, files, async (file) => {
|
|
3256
|
-
try {
|
|
3257
|
-
await deployBenchFile(file, options);
|
|
3258
|
-
}
|
|
3259
|
-
catch (err) {
|
|
3260
|
-
console.error("Unexpected error. Stopping dfx replica...");
|
|
3261
|
-
stopDfx(options.verbose);
|
|
3262
|
-
throw err;
|
|
3263
|
-
}
|
|
3264
|
-
});
|
|
3265
|
-
await parallel(1, files, async (file) => {
|
|
3266
|
-
console.log("\n" + "\u2014".repeat(50));
|
|
3267
|
-
console.log(`
|
|
3268
|
-
Running ${chalk27.gray(absToRel(file))}...`);
|
|
3269
|
-
console.log("");
|
|
3270
|
-
try {
|
|
3271
|
-
await runBenchFile(file, options);
|
|
3272
|
-
}
|
|
3273
|
-
catch (err) {
|
|
3274
|
-
console.error("Unexpected error. Stopping dfx replica...");
|
|
3275
|
-
stopDfx(options.verbose);
|
|
3276
|
-
throw err;
|
|
3277
|
-
}
|
|
3278
|
-
});
|
|
3279
|
-
console.log("Stopping dfx replica...");
|
|
3280
|
-
stopDfx(options.verbose);
|
|
3281
|
-
fs13.rmSync(benchDir, { recursive: true, force: true });
|
|
3282
|
-
return true;
|
|
3283
|
-
}
|
|
3284
|
-
function getMocArgs(options) {
|
|
3285
|
-
let args = "";
|
|
3286
|
-
if (options.forceGc) {
|
|
3287
|
-
args += " --force-gc";
|
|
3288
|
-
}
|
|
3289
|
-
if (options.gc) {
|
|
3290
|
-
args += ` --${options.gc}-gc`;
|
|
3291
|
-
}
|
|
3292
|
-
return args;
|
|
3293
|
-
}
|
|
3294
|
-
function dfxJson(canisterName, options = {}) {
|
|
3295
|
-
options || console.log(options);
|
|
3296
|
-
let canisters = {};
|
|
3297
|
-
if (canisterName) {
|
|
3298
|
-
canisters[canisterName] = {
|
|
3299
|
-
type: "custom",
|
|
3300
|
-
wasm: "canister.wasm",
|
|
3301
|
-
candid: "canister.did"
|
|
3302
|
-
};
|
|
3303
|
-
}
|
|
3304
|
-
return {
|
|
3305
|
-
version: 1,
|
|
3306
|
-
canisters,
|
|
3307
|
-
defaults: {
|
|
3308
|
-
build: {
|
|
3309
|
-
packtool: "mops sources"
|
|
3310
|
-
}
|
|
3311
|
-
},
|
|
3312
|
-
networks: {
|
|
3313
|
-
local: {
|
|
3314
|
-
type: "ephemeral",
|
|
3315
|
-
bind: "127.0.0.1:4944"
|
|
3316
|
-
}
|
|
3317
|
-
}
|
|
3318
|
-
};
|
|
3319
|
-
}
|
|
3320
|
-
function startDfx(verbose = false) {
|
|
3321
|
-
stopDfx(verbose);
|
|
3322
|
-
let dir = path17.join(getRootDir(), ".mops/.bench");
|
|
3323
|
-
fs13.writeFileSync(path17.join(dir, "dfx.json"), JSON.stringify(dfxJson(""), null, 2));
|
|
3324
|
-
execSync8("dfx start --background --clean --artificial-delay 0" + (verbose ? "" : " -qqqq"), { cwd: dir, stdio: ["inherit", verbose ? "inherit" : "ignore", "inherit"] });
|
|
3325
|
-
}
|
|
3326
|
-
function stopDfx(verbose = false) {
|
|
3327
|
-
let dir = path17.join(getRootDir(), ".mops/.bench");
|
|
3328
|
-
execSync8("dfx stop" + (verbose ? "" : " -qqqq"), { cwd: dir, stdio: ["pipe", verbose ? "inherit" : "ignore", "pipe"] });
|
|
3329
|
-
}
|
|
3330
|
-
async function deployBenchFile(file, options = {}) {
|
|
3331
|
-
let rootDir = getRootDir();
|
|
3332
|
-
let tempDir = path17.join(rootDir, ".mops/.bench/", path17.parse(file).name);
|
|
3333
|
-
let canisterName = path17.parse(file).name;
|
|
3334
|
-
fs13.mkdirSync(tempDir, { recursive: true });
|
|
3335
|
-
fs13.writeFileSync(path17.join(tempDir, "dfx.json"), JSON.stringify(dfxJson(canisterName, options), null, 2));
|
|
3336
|
-
let benchCanisterData = fs13.readFileSync(new URL("./bench/bench-canister.mo", import.meta.url), "utf8");
|
|
3337
|
-
benchCanisterData = benchCanisterData.replace("./user-bench", path17.relative(tempDir, file).replace(/.mo$/g, ""));
|
|
3338
|
-
fs13.writeFileSync(path17.join(tempDir, "canister.mo"), benchCanisterData);
|
|
3339
|
-
let mocPath3 = getMocPath2();
|
|
3340
|
-
let mocArgs = getMocArgs(options);
|
|
3341
|
-
options.verbose && console.time(`build ${canisterName}`);
|
|
3342
|
-
await execaCommand2(`${mocPath3} -c --idl canister.mo ${mocArgs} ${(await sources({ cwd: tempDir })).join(" ")}`, { cwd: tempDir, stdio: options.verbose ? "pipe" : ["pipe", "ignore", "pipe"] });
|
|
3343
|
-
options.verbose && console.timeEnd(`build ${canisterName}`);
|
|
3344
|
-
options.verbose && console.time(`deploy ${canisterName}`);
|
|
3345
|
-
await execaCommand2(`dfx deploy ${canisterName} --mode reinstall --yes --identity anonymous`, { cwd: tempDir, stdio: options.verbose ? "pipe" : ["pipe", "ignore", "pipe"] });
|
|
3346
|
-
options.verbose && console.timeEnd(`deploy ${canisterName}`);
|
|
3347
|
-
options.verbose && console.time(`init ${canisterName}`);
|
|
3348
|
-
let canisterId3 = execSync8(`dfx canister id ${canisterName}`, { cwd: tempDir }).toString().trim();
|
|
3349
|
-
let actor = await createActor(canisterId3, {
|
|
3350
|
-
agentOptions: {
|
|
3351
|
-
host: "http://127.0.0.1:4944"
|
|
3352
|
-
}
|
|
3353
|
-
});
|
|
3354
|
-
await actor.init();
|
|
3355
|
-
options.verbose && console.timeEnd(`init ${canisterName}`);
|
|
3356
|
-
}
|
|
3357
|
-
async function runBenchFile(file, options = {}) {
|
|
3358
|
-
let rootDir = getRootDir();
|
|
3359
|
-
let tempDir = path17.join(rootDir, ".mops/.bench/", path17.parse(file).name);
|
|
3360
|
-
let canisterName = path17.parse(file).name;
|
|
3361
|
-
let canisterId3 = execSync8(`dfx canister id ${canisterName}`, { cwd: tempDir }).toString().trim();
|
|
3362
|
-
let actor = await createActor(canisterId3, {
|
|
3363
|
-
agentOptions: {
|
|
3364
|
-
host: "http://127.0.0.1:4944"
|
|
3365
|
-
}
|
|
3366
|
-
});
|
|
3367
|
-
let schema = await actor.getSchema();
|
|
3368
|
-
let prevResults;
|
|
3369
|
-
let resultsJsonFile = path17.join(rootDir, ".bench", `${path17.parse(file).name}.json`);
|
|
3370
|
-
if (options.compare) {
|
|
3371
|
-
if (fs13.existsSync(resultsJsonFile)) {
|
|
3372
|
-
let prevResultsJson = JSON.parse(fs13.readFileSync(resultsJsonFile).toString());
|
|
3373
|
-
prevResults = new Map(prevResultsJson.results);
|
|
3374
|
-
}
|
|
3375
|
-
else {
|
|
3376
|
-
console.log(chalk27.yellow(`No previous results found "${resultsJsonFile}"`));
|
|
3377
|
-
}
|
|
3378
|
-
}
|
|
3379
|
-
let results = /* @__PURE__ */ new Map();
|
|
3380
|
-
let formatNumber = (n) => {
|
|
3381
|
-
return n.toLocaleString("en-US").replaceAll(",", "_");
|
|
3382
|
-
};
|
|
3383
|
-
let getTable = (prop) => {
|
|
3384
|
-
let resArr = [["", ...schema.cols]];
|
|
3385
|
-
for (let [_rowIndex, row] of schema.rows.entries()) {
|
|
3386
|
-
let curRow = [row];
|
|
3387
|
-
for (let [_colIndex, col] of schema.cols.entries()) {
|
|
3388
|
-
let res = results.get(`${row}:${col}`);
|
|
3389
|
-
if (res) {
|
|
3390
|
-
let diff = "";
|
|
3391
|
-
if (options.compare && prevResults) {
|
|
3392
|
-
let prevRes = prevResults.get(`${row}:${col}`);
|
|
3393
|
-
if (prevRes) {
|
|
3394
|
-
let percent = (Number(res[prop]) - Number(prevRes[prop])) / Number(prevRes[prop]) * 100;
|
|
3395
|
-
let sign = percent > 0 ? "+" : "";
|
|
3396
|
-
let percentText = percent == 0 ? "0%" : sign + percent.toFixed(2) + "%";
|
|
3397
|
-
diff = " (" + percentText + ")";
|
|
3398
|
-
}
|
|
3399
|
-
else {
|
|
3400
|
-
diff = chalk27.yellow(" (no previous results)");
|
|
3401
|
-
}
|
|
3402
|
-
}
|
|
3403
|
-
curRow.push(formatNumber(res[prop]) + diff);
|
|
3404
|
-
}
|
|
3405
|
-
else {
|
|
3406
|
-
curRow.push("");
|
|
3407
|
-
}
|
|
3408
|
-
}
|
|
3409
|
-
resArr.push(curRow);
|
|
3410
|
-
}
|
|
3411
|
-
return markdownTable(resArr, { align: ["l", ..."r".repeat(schema.cols.length)] });
|
|
3412
|
-
};
|
|
3413
|
-
let printResults = () => {
|
|
3414
|
-
logUpdate7(`
|
|
3415
|
-
|
|
3416
|
-
${chalk27.bold(schema.name)}
|
|
3417
|
-
${schema.description ? "\n" + chalk27.gray(schema.description) : ""}
|
|
3418
|
-
|
|
3419
|
-
|
|
3420
|
-
${chalk27.blue("Instructions")}
|
|
3421
|
-
|
|
3422
|
-
${getTable("instructions")}
|
|
3423
|
-
|
|
3424
|
-
|
|
3425
|
-
${chalk27.blue("Heap")}
|
|
3426
|
-
|
|
3427
|
-
${getTable("rts_heap_size")}
|
|
3428
|
-
`);
|
|
3429
|
-
};
|
|
3430
|
-
printResults();
|
|
3431
|
-
for (let [rowIndex, row] of schema.rows.entries()) {
|
|
3432
|
-
for (let [colIndex, col] of schema.cols.entries()) {
|
|
3433
|
-
let res = await actor.runCellUpdateAwait(BigInt(rowIndex), BigInt(colIndex));
|
|
3434
|
-
results.set(`${row}:${col}`, res);
|
|
3435
|
-
printResults();
|
|
3436
|
-
}
|
|
3437
|
-
}
|
|
3438
|
-
logUpdate7.done();
|
|
3439
|
-
if (options.save) {
|
|
3440
|
-
console.log(`Saving results to ${chalk27.gray(absToRel(resultsJsonFile))}`);
|
|
3441
|
-
let json = {
|
|
3442
|
-
version: 1,
|
|
3443
|
-
moc: options.moc,
|
|
3444
|
-
dfx: options.dfx,
|
|
3445
|
-
gc: options.gc,
|
|
3446
|
-
forceGc: options.forceGc,
|
|
3447
|
-
results: Array.from(results.entries())
|
|
3448
|
-
};
|
|
3449
|
-
fs13.mkdirSync(path17.dirname(resultsJsonFile), { recursive: true });
|
|
3450
|
-
fs13.writeFileSync(resultsJsonFile, JSON.stringify(json, (_, val) => {
|
|
3451
|
-
if (typeof val === "bigint") {
|
|
3452
|
-
return Number(val);
|
|
3453
|
-
}
|
|
3454
|
-
else {
|
|
3455
|
-
return val;
|
|
3456
|
-
}
|
|
3457
|
-
}, 2));
|
|
3458
|
-
}
|
|
3459
|
-
return { schema, results };
|
|
3460
|
-
}
|
|
3461
|
-
// dist/commands/transfer-ownership.js
|
|
3462
|
-
import chalk28 from "chalk";
|
|
3463
|
-
import { Principal } from "@dfinity/principal";
|
|
3464
|
-
import prompts7 from "prompts";
|
|
3465
|
-
async function transferOwnership(toPrincipal) {
|
|
3466
|
-
var _a, _b;
|
|
3467
|
-
if (!checkConfigFile()) {
|
|
3468
|
-
return;
|
|
3469
|
-
}
|
|
3470
|
-
let config = readConfig();
|
|
3471
|
-
let principal = Principal.fromText(toPrincipal);
|
|
3472
|
-
let promptsConfig = {
|
|
3473
|
-
onCancel() {
|
|
3474
|
-
console.log("aborted");
|
|
3475
|
-
process.exit(0);
|
|
3476
|
-
}
|
|
3477
|
-
};
|
|
3478
|
-
console.log(chalk28.red("Warning: ") + "This action cannot be undone!");
|
|
3479
|
-
let { confirm } = await prompts7({
|
|
3480
|
-
type: "confirm",
|
|
3481
|
-
name: "confirm",
|
|
3482
|
-
message: `Are you sure you want to transfer ownership of ${chalk28.yellow((_a = config.package) == null ? void 0 : _a.name)} to ${chalk28.yellow(toPrincipal)}?`,
|
|
3483
|
-
initial: false
|
|
3484
|
-
}, promptsConfig);
|
|
3485
|
-
if (!confirm) {
|
|
3486
|
-
return;
|
|
3487
|
-
}
|
|
3488
|
-
let identity = await getIdentity();
|
|
3489
|
-
let actor = await mainActor(identity);
|
|
3490
|
-
let res = await actor.transferOwnership(((_b = config.package) == null ? void 0 : _b.name) || "", principal);
|
|
3491
|
-
if ("ok" in res) {
|
|
3492
|
-
console.log(chalk28.green("Success!"));
|
|
3493
|
-
}
|
|
3494
|
-
else {
|
|
3495
|
-
console.log(chalk28.red("Error: ") + res.err);
|
|
3496
|
-
}
|
|
3497
|
-
}
|
|
3498
|
-
// dist/cli.js
|
|
3499
|
-
var networkFile = getNetworkFile();
|
|
3500
|
-
if (fs14.existsSync(networkFile)) {
|
|
3501
|
-
globalThis.MOPS_NETWORK = fs14.readFileSync(networkFile).toString() || "ic";
|
|
3502
|
-
}
|
|
3503
|
-
program.name("mops");
|
|
3504
|
-
var packageJson = JSON.parse(fs14.readFileSync(new URL("package.json", import.meta.url)).toString());
|
|
3505
|
-
program.version(`CLI ${packageJson.version}
|
|
3506
|
-
API ${apiVersion}`, "-v --version");
|
|
3507
|
-
program.command("init").description("Initialize a new project or package in the current directory").option("-y, --yes", "Accept all defaults").action(async (options) => {
|
|
3508
|
-
await init(options);
|
|
3509
|
-
});
|
|
3510
|
-
program.command("add <pkg>").description("Install the package and save it to mops.toml").option("--dev").option("--verbose").addOption(new Option("--lockfile <lockfile>", "Lockfile action").choices(["save", "ignore"])).action(async (pkg, options) => {
|
|
3511
|
-
if (!checkConfigFile()) {
|
|
3512
|
-
process.exit(1);
|
|
3513
|
-
}
|
|
3514
|
-
await add(pkg, options);
|
|
3515
|
-
});
|
|
3516
|
-
program.command("remove <pkg>").alias("rm").description("Remove package and update mops.toml").option("--dev", "Remove from dev-dependencies instead of dependencies").option("--verbose", "Show more information").option("--dry-run", "Do not actually remove anything").addOption(new Option("--lockfile <lockfile>", "Lockfile action").choices(["save", "ignore"])).action(async (pkg, options) => {
|
|
3517
|
-
if (!checkConfigFile()) {
|
|
3518
|
-
process.exit(1);
|
|
3519
|
-
}
|
|
3520
|
-
await remove(pkg, options);
|
|
3521
|
-
});
|
|
3522
|
-
program.command("install [pkg]").alias("i").description("Install all dependencies specified in mops.toml").option("--verbose").addOption(new Option("--lockfile <lockfile>", "Lockfile action").choices(["save", "check", "ignore"])).action(async (pkg, options) => {
|
|
3523
|
-
if (!checkConfigFile()) {
|
|
3524
|
-
process.exit(1);
|
|
3525
|
-
}
|
|
3526
|
-
let compatible = await checkApiCompatibility();
|
|
3527
|
-
if (!compatible) {
|
|
3528
|
-
return;
|
|
3529
|
-
}
|
|
3530
|
-
if (pkg) {
|
|
3531
|
-
console.log(chalk29.yellow("Consider using the 'mops add' command to install a specific package."));
|
|
3532
|
-
await add(pkg, options);
|
|
3533
|
-
}
|
|
3534
|
-
else {
|
|
3535
|
-
await installAll(options);
|
|
3536
|
-
}
|
|
3537
|
-
});
|
|
3538
|
-
program.command("publish").description("Publish package to the mops registry").option("--no-docs", "Do not generate docs").option("--no-test", "Do not run tests").action(async (options) => {
|
|
3539
|
-
if (!checkConfigFile()) {
|
|
3540
|
-
process.exit(1);
|
|
3541
|
-
}
|
|
3542
|
-
let compatible = await checkApiCompatibility();
|
|
3543
|
-
if (compatible) {
|
|
3544
|
-
await publish(options);
|
|
3545
|
-
}
|
|
3546
|
-
});
|
|
3547
|
-
program.command("set-network <network>").alias("sn").description("Set network local|staging|ic").action(async (network) => {
|
|
3548
|
-
await setNetwork(network);
|
|
3549
|
-
console.log(`Selected '${network}' network`);
|
|
3550
|
-
});
|
|
3551
|
-
program.command("get-network").alias("gn").description("Get network").action(async () => {
|
|
3552
|
-
console.log(getNetwork());
|
|
3553
|
-
});
|
|
3554
|
-
program.command("import-identity <data>").description("Import .pem file data to use as identity").addOption(new Option("--no-encrypt", "Do not ask for a password to encrypt identity")).action(async (data, options) => {
|
|
3555
|
-
await importPem(data, options);
|
|
3556
|
-
await whoami();
|
|
3557
|
-
});
|
|
3558
|
-
program.command("sources").description("for dfx packtool").option("--verbose").action(async (options) => {
|
|
3559
|
-
if (!checkConfigFile()) {
|
|
3560
|
-
process.exit(1);
|
|
3561
|
-
}
|
|
3562
|
-
await installAll({ silent: true, lockfile: "ignore" });
|
|
3563
|
-
let sourcesArr = await sources(options);
|
|
3564
|
-
console.log(sourcesArr.join("\n"));
|
|
3565
|
-
});
|
|
3566
|
-
program.command("whoami").description("Print your principal").action(async () => {
|
|
3567
|
-
await whoami();
|
|
3568
|
-
});
|
|
3569
|
-
program.command("search <text>").description("Search for packages").action(async (text) => {
|
|
3570
|
-
await search(text);
|
|
3571
|
-
});
|
|
3572
|
-
program.command("cache").description("Manage cache").addArgument(new Argument("<sub>").choices(["size", "clean"])).action(async (sub) => {
|
|
3573
|
-
if (sub == "clean") {
|
|
3574
|
-
await cleanCache();
|
|
3575
|
-
console.log("Cache cleaned");
|
|
3576
|
-
}
|
|
3577
|
-
else if (sub == "size") {
|
|
3578
|
-
let size = await cacheSize();
|
|
3579
|
-
console.log("Cache size is " + size);
|
|
3580
|
-
}
|
|
3581
|
-
});
|
|
3582
|
-
program.command("test [filter]").description("Run tests").addOption(new Option("-r, --reporter <reporter>", "Test reporter").choices(["verbose", "compact", "files", "silent"]).default("verbose")).addOption(new Option("--mode <mode>", "Test mode").choices(["interpreter", "wasi"]).default("interpreter")).option("-w, --watch", "Enable watch mode").action(async (filter, options) => {
|
|
3583
|
-
await installAll({ silent: true, lockfile: "ignore" });
|
|
3584
|
-
await test(filter, options);
|
|
3585
|
-
});
|
|
3586
|
-
program.command("bench [filter]").description("Run benchmarks").addOption(new Option("--save", "Save benchmark results to .bench/<filename>.json")).addOption(new Option("--compare", "Run benchmark and compare results with .bench/<filename>.json")).addOption(new Option("--gc <gc>", "Garbage collector").choices(["copying", "compacting", "generational", "incremental"]).default("incremental")).addOption(new Option("--verbose", "Show more information")).action(async (filter, options) => {
|
|
3587
|
-
await installAll({ silent: true, lockfile: "ignore" });
|
|
3588
|
-
await bench(filter, options);
|
|
3589
|
-
});
|
|
3590
|
-
program.command("template").description("Apply template").action(async () => {
|
|
3591
|
-
if (!checkConfigFile()) {
|
|
3592
|
-
process.exit(1);
|
|
3593
|
-
}
|
|
3594
|
-
await template();
|
|
3595
|
-
});
|
|
3596
|
-
program.command("self-update").description("Update mops CLI to the latest version").option("--detached").option("--force").action(async (options) => {
|
|
3597
|
-
if (options.force) {
|
|
3598
|
-
selfUpdate(options);
|
|
3599
|
-
}
|
|
3600
|
-
else {
|
|
3601
|
-
console.log("Please run 'npm i -g ic-mops'");
|
|
3602
|
-
}
|
|
3603
|
-
});
|
|
3604
|
-
program.command("user").addArgument(new Argument("<sub>").choices(["set", "get"])).addArgument(new Argument("<prop>").choices(["name", "site", "email", "github", "twitter"])).addArgument(new Argument("[value]")).description("User settings").action(async (sub, prop, value) => {
|
|
3605
|
-
if (sub == "get") {
|
|
3606
|
-
await getUserProp(prop);
|
|
3607
|
-
}
|
|
3608
|
-
else if (sub == "set") {
|
|
3609
|
-
if (!value) {
|
|
3610
|
-
console.log('error: missing required argument "value"');
|
|
3611
|
-
return;
|
|
3612
|
-
}
|
|
3613
|
-
await setUserProp(prop, value);
|
|
3614
|
-
}
|
|
3615
|
-
});
|
|
3616
|
-
program.command("airdrop <check|claim> [canister]").action(async (sub, canister) => {
|
|
3617
|
-
let identity = await getIdentity();
|
|
3618
|
-
let main = await mainActor(identity);
|
|
3619
|
-
if (sub === "check") {
|
|
3620
|
-
let amount = await main.getAirdropAmount();
|
|
3621
|
-
if (amount === 0n) {
|
|
3622
|
-
console.log("No airdrop available");
|
|
3623
|
-
return;
|
|
3624
|
-
}
|
|
3625
|
-
console.log(`You can claim ${Number(amount) / 1e12} TCycles`);
|
|
3626
|
-
}
|
|
3627
|
-
else if (sub === "claim") {
|
|
3628
|
-
let principal;
|
|
3629
|
-
try {
|
|
3630
|
-
principal = Principal2.fromText(canister);
|
|
3631
|
-
}
|
|
3632
|
-
catch (err) {
|
|
3633
|
-
console.log("Invalid canister id");
|
|
3634
|
-
console.log(err);
|
|
3635
|
-
return;
|
|
3636
|
-
}
|
|
3637
|
-
console.log("Sending cycles to the canister " + canister);
|
|
3638
|
-
let res = await main.claimAirdrop(principal);
|
|
3639
|
-
console.log(res);
|
|
3640
|
-
}
|
|
3641
|
-
else {
|
|
3642
|
-
console.log("Unknown sub command. Available sub commands: check, claim");
|
|
3643
|
-
}
|
|
3644
|
-
});
|
|
3645
|
-
program.command("bump [major|minor|patch]").description("Bump current package version").action(async (part) => {
|
|
3646
|
-
await bump(part);
|
|
3647
|
-
});
|
|
3648
|
-
program.command("sync").description("Add missing packages and remove unused packages").addOption(new Option("--lockfile <lockfile>", "Lockfile action").choices(["save", "ignore"])).action(async (options) => {
|
|
3649
|
-
await sync(options);
|
|
3650
|
-
});
|
|
3651
|
-
program.command("outdated").description("Print outdated dependencies specified in mops.toml").action(async () => {
|
|
3652
|
-
await outdated();
|
|
3653
|
-
});
|
|
3654
|
-
program.command("update [pkg]").description("Update dependencies specified in mops.toml").addOption(new Option("--lockfile <lockfile>", "Lockfile action").choices(["save", "ignore"])).action(async (pkg, options) => {
|
|
3655
|
-
await update(pkg, options);
|
|
3656
|
-
});
|
|
3657
|
-
program.command("transfer-ownership [to-principal]").description("Transfer ownership of the current package to another principal").action(async (toPrincipal) => {
|
|
3658
|
-
await transferOwnership(toPrincipal);
|
|
3659
|
-
});
|
|
3660
|
-
program.parse();
|