ic-mops 1.0.0-pre.0 → 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,21 +1,28 @@
1
1
  # Mops CLI Changelog
2
2
 
3
- ## unreleased
3
+ ## 1.0.1
4
+ - Fixed `mops user *` commands
5
+
6
+ ## 1.0.0
4
7
  - `mops cache clean` now cleans local cache too (`.mops` folder)
5
8
  - Conflicting dependencies are now reported on `mops add/install/sources`
6
9
  - New `--conflicts <action>` option in `mops sources` command ([docs](https://docs.mops.one/cli/mops-sources#--conflicts))
7
10
  - New "Stable Memory" and "Garbage Collection" metrics are now reported in the `mops bench` command
8
11
  - `mops test` command now supports `replica` mode for running actor tests ([docs](https://docs.mops.one/cli/mops-test#--mode))
9
12
  - New `--replica` option in `mops test` command
13
+ - Updated npm dependencies
14
+ - Fixed bug with GitHub dependency with branch name containing `/`
10
15
 
11
16
  **Breaking changes**:
12
- - Default replica in `mops bench` commands now is `pocket-ic` if `pocket-ic` is specified in `mops.toml` in `[toolchain]` section
17
+ - Default replica in `mops bench` and `mops test` commands now is `pocket-ic` if `pocket-ic` is specified in `mops.toml` in `[toolchain]` section and `dfx` otherwise
13
18
  - The only supported version of `pocket-ic` is `4.0.0`
14
- - Removed the ability to install a specific package with `mops install <pkg>` command. Use `mops add <pkg>` instead.
15
- - Default reporter in `mops test` command is now `verbose` if there is only one file to test and `files` otherwise.
16
- - Removed legacy folders migration code. If you are using Mops CLI `<= 0.21.0`, you need first to run `npm i -g ic-mops@0.45.3` to migrate your legacy folders. After that, you can run `mops self update` to update your Mops CLI to the latest version.
19
+ - Dropped support for `wasmtime` version `< 14.0.0`
20
+ - Default reporter in `mops test` command is now `files` if test file count is > 1 and `verbose` otherwise.
17
21
  - Renamed `mops import-identity` command to `mops user import`
18
22
  - Renamed `mops whoami` command to `mops user get-principal`
23
+ - Removed the ability to install a specific package with `mops install <pkg>` command. Use `mops add <pkg>` instead.
24
+ - Removed legacy folders migration code. If you are using Mops CLI `<= 0.21.0`, you need first to run `npm i -g ic-mops@0.45.3` to migrate your legacy folders. After that, you can run `mops self update` to update your Mops CLI to the latest version.
25
+ - Removed `--verbose` flag from `mops sources` command
19
26
 
20
27
  ## 0.45.3
21
28
  - Fixed bug with missing `tar` package
package/bundle/cli.tgz ADDED
Binary file
package/cache.ts CHANGED
@@ -35,7 +35,7 @@ export function getMopsDepCacheName(name : string, version : string) {
35
35
 
36
36
  export function getGithubDepCacheName(name : string, repo : string) {
37
37
  const {branch, commitHash} = parseGithubURL(repo);
38
- return `_github/${name}#${branch}` + (commitHash ? `@${commitHash}` : '');
38
+ return `_github/${name}#${branch.replaceAll('/', '___')}` + (commitHash ? `@${commitHash}` : '');
39
39
  }
40
40
 
41
41
  export let addCache = (cacheName : string, source : string) => {
package/cli.ts CHANGED
@@ -25,7 +25,6 @@ import {toolchain} from './commands/toolchain/index.js';
25
25
  import {Tool} from './types.js';
26
26
  import * as self from './commands/self.js';
27
27
  import {resolvePackages} from './resolve-packages.js';
28
- // import {docs} from './commands/docs.js';
29
28
 
30
29
  declare global {
31
30
  // eslint-disable-next-line no-var
@@ -152,23 +151,12 @@ program
152
151
  console.log(getNetwork());
153
152
  });
154
153
 
155
- // user import
156
- program
157
- .command('mops user import <data>')
158
- .description('Import .pem file data to use as identity')
159
- .addOption(new Option('--no-encrypt', 'Do not ask for a password to encrypt identity'))
160
- .action(async (data, options) => {
161
- await importPem(data, options);
162
- await getPrincipal();
163
- });
164
-
165
154
  // sources
166
155
  program
167
156
  .command('sources')
168
157
  .description('for dfx packtool')
169
158
  .option('--no-install', 'Do not install dependencies before running sources')
170
159
  .addOption(new Option('--conflicts <action>', 'What to do with dependency version conflicts').choices(['ignore', 'warning', 'error']).default('warning'))
171
- .option('--verbose', 'Show more information') // for backcompat
172
160
  .action(async (options) => {
173
161
  if (!checkConfigFile()) {
174
162
  process.exit(1);
@@ -181,14 +169,6 @@ program
181
169
  console.log(sourcesArr.join('\n'));
182
170
  });
183
171
 
184
- // get-principal
185
- program
186
- .command('get-principal')
187
- .description('Print your principal')
188
- .action(async () => {
189
- await getPrincipal();
190
- });
191
-
192
172
  // search
193
173
  program
194
174
  .command('search <text>')
@@ -257,37 +237,48 @@ program
257
237
  await template();
258
238
  });
259
239
 
260
- // docs
261
- // program
262
- // .command('docs')
263
- // .description('Generate documentation (experimental)')
264
- // .action(async () => {
265
- // if (!checkConfigFile()) {
266
- // process.exit(1);
267
- // }
268
- // await docs();
269
- // });
270
-
271
- // user
272
- program
273
- .command('user')
274
- .addArgument(new Argument('<sub>').choices(['set', 'get']))
240
+ // mops user *
241
+ const userCommand = new Command('user').description('User management');
242
+
243
+ // user get-principal
244
+ userCommand
245
+ .command('get-principal')
246
+ .description('Print your principal')
247
+ .action(async () => {
248
+ await getPrincipal();
249
+ });
250
+
251
+ // user import
252
+ userCommand
253
+ .command('import <data>')
254
+ .description('Import .pem file data to use as identity')
255
+ .addOption(new Option('--no-encrypt', 'Do not ask for a password to encrypt identity'))
256
+ .action(async (data, options) => {
257
+ await importPem(data, options);
258
+ await getPrincipal();
259
+ });
260
+
261
+ // user set <prop> <value>
262
+ userCommand
263
+ .command('set')
275
264
  .addArgument(new Argument('<prop>').choices(['name', 'site', 'email', 'github', 'twitter']))
276
- .addArgument(new Argument('[value]'))
277
- .description('User settings')
278
- .action(async (sub, prop, value) => {
279
- if (sub == 'get') {
280
- await getUserProp(prop);
281
- }
282
- else if (sub == 'set') {
283
- if (!value) {
284
- console.log('error: missing required argument "value"');
285
- return;
286
- }
287
- await setUserProp(prop, value);
288
- }
265
+ .addArgument(new Argument('<value>'))
266
+ .description('Set user property')
267
+ .action(async (prop, value) => {
268
+ await setUserProp(prop, value);
289
269
  });
290
270
 
271
+ // user get <prop>
272
+ userCommand
273
+ .command('get')
274
+ .addArgument(new Argument('<prop>').choices(['name', 'site', 'email', 'github', 'twitter']))
275
+ .description('Get user property')
276
+ .action(async (prop) => {
277
+ await getUserProp(prop);
278
+ });
279
+
280
+ program.addCommand(userCommand);
281
+
291
282
  // bump
292
283
  program
293
284
  .command('bump [major|minor|patch]')
package/commands/bench.ts CHANGED
@@ -98,7 +98,7 @@ export async function bench(filter = '', optionsArg : Partial<BenchOptions> = {}
98
98
  files.sort();
99
99
 
100
100
  let benchDir = `${getRootDir()}/.mops/.bench/`;
101
- // fs.rmSync(benchDir, {recursive: true, force: true});
101
+ fs.rmSync(benchDir, {recursive: true, force: true});
102
102
  fs.mkdirSync(benchDir, {recursive: true});
103
103
 
104
104
  if (!options.silent) {
@@ -147,7 +147,7 @@ export async function bench(filter = '', optionsArg : Partial<BenchOptions> = {}
147
147
  options.silent || console.log('Stopping replica...');
148
148
  await replica.stop();
149
149
 
150
- // fs.rmSync(benchDir, {recursive: true, force: true});
150
+ fs.rmSync(benchDir, {recursive: true, force: true});
151
151
 
152
152
  return benchResults;
153
153
  }
@@ -95,18 +95,20 @@ export class Replica {
95
95
  curData = curData + data.toString();
96
96
 
97
97
  if (curData.includes('\n')) {
98
- let m = curData.match(/\[Canister ([a-z0-9-]+)\] (.*)/);
99
- if (!m) {
100
- return;
98
+ let chunk = curData.split('\n').slice(0, -1).join('\n');
99
+ let matches = [...chunk.matchAll(/\[Canister ([a-z0-9-]+)\] (.*)/g)];
100
+
101
+ for (let match of matches) {
102
+ let [, canisterId, msg] = match;
103
+ let stream = this.getCanisterStream(canisterId || '');
104
+ if (stream) {
105
+ stream.write(msg);
106
+ }
101
107
  }
102
- let [, canisterId, msg] = m;
103
108
 
104
- let stream = this.getCanisterStream(canisterId || '');
105
- if (stream) {
106
- stream.write(msg);
109
+ if (matches.length) {
110
+ curData = curData.split('\n').slice(-1).join('\n');
107
111
  }
108
-
109
- curData = '';
110
112
  }
111
113
  });
112
114
  }
@@ -83,7 +83,11 @@ export class MMF1 {
83
83
  }
84
84
 
85
85
  _testEnd(name : string) {
86
- if (name !== this.stack.pop()) {
86
+ let last = this.stack.pop();
87
+ if (name !== last) {
88
+ console.error(`Expected test name "${last}" but got "${name}"`);
89
+ console.error(`Stack: ${this.stack.join(' › ')}`);
90
+ console.error(`File: ${this.file}`);
87
91
  throw 'mmf1._testEnd: start and end test mismatch';
88
92
  }
89
93
  this._status(name, 'pass');
@@ -247,15 +247,9 @@ export async function testWithReporter(reporterName : ReporterName | Reporter |
247
247
  wasmFile,
248
248
  ];
249
249
  }
250
- // backcompat
251
250
  else {
252
- wasmtimeArgs = [
253
- '--max-wasm-stack=4000000',
254
- '--enable-cranelift-nan-canonicalization',
255
- '--wasm-features',
256
- 'multi-memory,bulk-memory',
257
- wasmFile,
258
- ];
251
+ console.error(chalk.red('Minimum wasmtime version is 14.0.0. Please update wasmtime to the latest version'));
252
+ process.exit(1);
259
253
  }
260
254
 
261
255
  let proc = spawn(wasmtimePath, wasmtimeArgs);
@@ -1,4 +1,4 @@
1
- type _anon_class_10_1 =
1
+ type _anon_class_13_1 =
2
2
  service {
3
3
  getSchema: () -> (BenchSchema) query;
4
4
  getStats: () -> (BenchResult) query;
@@ -7,24 +7,24 @@ type _anon_class_10_1 =
7
7
  runCellUpdate: (nat, nat) -> (BenchResult);
8
8
  runCellUpdateAwait: (nat, nat) -> (BenchResult);
9
9
  };
10
- type BenchSchema =
10
+ type BenchSchema =
11
11
  record {
12
12
  cols: vec text;
13
13
  description: text;
14
14
  name: text;
15
15
  rows: vec text;
16
16
  };
17
- type BenchResult =
17
+ type BenchResult =
18
18
  record {
19
19
  instructions: int;
20
- rts_heap_size: int;
21
- stable_memory_size: int;
22
- rts_stable_memory_size: int;
23
20
  rts_collector_instructions: int;
21
+ rts_heap_size: int;
22
+ rts_logical_stable_memory_size: int;
24
23
  rts_memory_size: int;
25
24
  rts_mutator_instructions: int;
26
- rts_total_allocation: int;
27
- rts_logical_stable_memory_size: int;
28
25
  rts_reclaimed: int;
26
+ rts_stable_memory_size: int;
27
+ rts_total_allocation: int;
28
+ stable_memory_size: int;
29
29
  };
30
- service : () -> _anon_class_10_1
30
+ service : () -> _anon_class_13_1
@@ -3,14 +3,14 @@ import type { ActorMethod } from '@dfinity/agent';
3
3
  import type { IDL } from '@dfinity/candid';
4
4
 
5
5
  export interface BenchResult {
6
+ 'rts_stable_memory_size' : bigint,
7
+ 'stable_memory_size' : bigint,
6
8
  'instructions' : bigint,
7
9
  'rts_memory_size' : bigint,
8
- 'stable_memory_size' : bigint,
9
- 'rts_stable_memory_size' : bigint,
10
- 'rts_logical_stable_memory_size' : bigint,
11
10
  'rts_total_allocation' : bigint,
12
11
  'rts_collector_instructions' : bigint,
13
12
  'rts_mutator_instructions' : bigint,
13
+ 'rts_logical_stable_memory_size' : bigint,
14
14
  'rts_heap_size' : bigint,
15
15
  'rts_reclaimed' : bigint,
16
16
  }
@@ -20,7 +20,7 @@ export interface BenchSchema {
20
20
  'rows' : Array<string>,
21
21
  'description' : string,
22
22
  }
23
- export interface _anon_class_10_1 {
23
+ export interface _anon_class_13_1 {
24
24
  'getSchema' : ActorMethod<[], BenchSchema>,
25
25
  'getStats' : ActorMethod<[], BenchResult>,
26
26
  'init' : ActorMethod<[], BenchSchema>,
@@ -28,6 +28,6 @@ export interface _anon_class_10_1 {
28
28
  'runCellUpdate' : ActorMethod<[bigint, bigint], BenchResult>,
29
29
  'runCellUpdateAwait' : ActorMethod<[bigint, bigint], BenchResult>,
30
30
  }
31
- export interface _SERVICE extends _anon_class_10_1 {}
31
+ export interface _SERVICE extends _anon_class_13_1 {}
32
32
  export declare const idlFactory: IDL.InterfaceFactory;
33
33
  export declare const init: (args: { IDL: typeof IDL }) => IDL.Type[];
@@ -6,18 +6,18 @@ export const idlFactory = ({ IDL }) => {
6
6
  'description' : IDL.Text,
7
7
  });
8
8
  const BenchResult = IDL.Record({
9
- 'instructions' : IDL.Int,
10
- 'stable_memory_size' : IDL.Int,
11
9
  'rts_stable_memory_size' : IDL.Int,
12
- 'rts_logical_stable_memory_size' : IDL.Int,
10
+ 'stable_memory_size' : IDL.Int,
11
+ 'instructions' : IDL.Int,
13
12
  'rts_memory_size' : IDL.Int,
14
13
  'rts_total_allocation' : IDL.Int,
15
14
  'rts_collector_instructions' : IDL.Int,
16
15
  'rts_mutator_instructions' : IDL.Int,
16
+ 'rts_logical_stable_memory_size' : IDL.Int,
17
17
  'rts_heap_size' : IDL.Int,
18
18
  'rts_reclaimed' : IDL.Int,
19
19
  });
20
- const _anon_class_10_1 = IDL.Service({
20
+ const _anon_class_13_1 = IDL.Service({
21
21
  'getSchema' : IDL.Func([], [BenchSchema], ['query']),
22
22
  'getStats' : IDL.Func([], [BenchResult], ['query']),
23
23
  'init' : IDL.Func([], [BenchSchema], []),
@@ -25,6 +25,6 @@ export const idlFactory = ({ IDL }) => {
25
25
  'runCellUpdate' : IDL.Func([IDL.Nat, IDL.Nat], [BenchResult], []),
26
26
  'runCellUpdateAwait' : IDL.Func([IDL.Nat, IDL.Nat], [BenchResult], []),
27
27
  });
28
- return _anon_class_10_1;
28
+ return _anon_class_13_1;
29
29
  };
30
30
  export const init = ({ IDL }) => { return []; };
package/dist/cache.js CHANGED
@@ -27,7 +27,7 @@ export function getMopsDepCacheName(name, version) {
27
27
  }
28
28
  export function getGithubDepCacheName(name, repo) {
29
29
  const { branch, commitHash } = parseGithubURL(repo);
30
- return `_github/${name}#${branch}` + (commitHash ? `@${commitHash}` : '');
30
+ return `_github/${name}#${branch.replaceAll('/', '___')}` + (commitHash ? `@${commitHash}` : '');
31
31
  }
32
32
  export let addCache = (cacheName, source) => {
33
33
  let dest = path.join(getGlobalCacheDir(), 'packages', cacheName);
package/dist/cli.js CHANGED
@@ -125,22 +125,12 @@ program
125
125
  .action(async () => {
126
126
  console.log(getNetwork());
127
127
  });
128
- // user import
129
- program
130
- .command('mops user import <data>')
131
- .description('Import .pem file data to use as identity')
132
- .addOption(new Option('--no-encrypt', 'Do not ask for a password to encrypt identity'))
133
- .action(async (data, options) => {
134
- await importPem(data, options);
135
- await getPrincipal();
136
- });
137
128
  // sources
138
129
  program
139
130
  .command('sources')
140
131
  .description('for dfx packtool')
141
132
  .option('--no-install', 'Do not install dependencies before running sources')
142
133
  .addOption(new Option('--conflicts <action>', 'What to do with dependency version conflicts').choices(['ignore', 'warning', 'error']).default('warning'))
143
- .option('--verbose', 'Show more information') // for backcompat
144
134
  .action(async (options) => {
145
135
  if (!checkConfigFile()) {
146
136
  process.exit(1);
@@ -152,13 +142,6 @@ program
152
142
  let sourcesArr = await sources(options);
153
143
  console.log(sourcesArr.join('\n'));
154
144
  });
155
- // get-principal
156
- program
157
- .command('get-principal')
158
- .description('Print your principal')
159
- .action(async () => {
160
- await getPrincipal();
161
- });
162
145
  // search
163
146
  program
164
147
  .command('search <text>')
@@ -222,35 +205,42 @@ program
222
205
  }
223
206
  await template();
224
207
  });
225
- // docs
226
- // program
227
- // .command('docs')
228
- // .description('Generate documentation (experimental)')
229
- // .action(async () => {
230
- // if (!checkConfigFile()) {
231
- // process.exit(1);
232
- // }
233
- // await docs();
234
- // });
235
- // user
236
- program
237
- .command('user')
238
- .addArgument(new Argument('<sub>').choices(['set', 'get']))
208
+ // mops user *
209
+ const userCommand = new Command('user').description('User management');
210
+ // user get-principal
211
+ userCommand
212
+ .command('get-principal')
213
+ .description('Print your principal')
214
+ .action(async () => {
215
+ await getPrincipal();
216
+ });
217
+ // user import
218
+ userCommand
219
+ .command('import <data>')
220
+ .description('Import .pem file data to use as identity')
221
+ .addOption(new Option('--no-encrypt', 'Do not ask for a password to encrypt identity'))
222
+ .action(async (data, options) => {
223
+ await importPem(data, options);
224
+ await getPrincipal();
225
+ });
226
+ // user set <prop> <value>
227
+ userCommand
228
+ .command('set')
239
229
  .addArgument(new Argument('<prop>').choices(['name', 'site', 'email', 'github', 'twitter']))
240
- .addArgument(new Argument('[value]'))
241
- .description('User settings')
242
- .action(async (sub, prop, value) => {
243
- if (sub == 'get') {
244
- await getUserProp(prop);
245
- }
246
- else if (sub == 'set') {
247
- if (!value) {
248
- console.log('error: missing required argument "value"');
249
- return;
250
- }
251
- await setUserProp(prop, value);
252
- }
230
+ .addArgument(new Argument('<value>'))
231
+ .description('Set user property')
232
+ .action(async (prop, value) => {
233
+ await setUserProp(prop, value);
234
+ });
235
+ // user get <prop>
236
+ userCommand
237
+ .command('get')
238
+ .addArgument(new Argument('<prop>').choices(['name', 'site', 'email', 'github', 'twitter']))
239
+ .description('Get user property')
240
+ .action(async (prop) => {
241
+ await getUserProp(prop);
253
242
  });
243
+ program.addCommand(userCommand);
254
244
  // bump
255
245
  program
256
246
  .command('bump [major|minor|patch]')
@@ -69,7 +69,7 @@ export async function bench(filter = '', optionsArg = {}) {
69
69
  }
70
70
  files.sort();
71
71
  let benchDir = `${getRootDir()}/.mops/.bench/`;
72
- // fs.rmSync(benchDir, {recursive: true, force: true});
72
+ fs.rmSync(benchDir, { recursive: true, force: true });
73
73
  fs.mkdirSync(benchDir, { recursive: true });
74
74
  if (!options.silent) {
75
75
  console.log('Benchmark files:');
@@ -111,7 +111,7 @@ export async function bench(filter = '', optionsArg = {}) {
111
111
  });
112
112
  options.silent || console.log('Stopping replica...');
113
113
  await replica.stop();
114
- // fs.rmSync(benchDir, {recursive: true, force: true});
114
+ fs.rmSync(benchDir, { recursive: true, force: true });
115
115
  return benchResults;
116
116
  }
117
117
  function getMocArgs(options) {
@@ -70,16 +70,18 @@ export class Replica {
70
70
  proc.stderr.on('data', (data) => {
71
71
  curData = curData + data.toString();
72
72
  if (curData.includes('\n')) {
73
- let m = curData.match(/\[Canister ([a-z0-9-]+)\] (.*)/);
74
- if (!m) {
75
- return;
73
+ let chunk = curData.split('\n').slice(0, -1).join('\n');
74
+ let matches = [...chunk.matchAll(/\[Canister ([a-z0-9-]+)\] (.*)/g)];
75
+ for (let match of matches) {
76
+ let [, canisterId, msg] = match;
77
+ let stream = this.getCanisterStream(canisterId || '');
78
+ if (stream) {
79
+ stream.write(msg);
80
+ }
76
81
  }
77
- let [, canisterId, msg] = m;
78
- let stream = this.getCanisterStream(canisterId || '');
79
- if (stream) {
80
- stream.write(msg);
82
+ if (matches.length) {
83
+ curData = curData.split('\n').slice(-1).join('\n');
81
84
  }
82
- curData = '';
83
85
  }
84
86
  });
85
87
  }
@@ -67,7 +67,11 @@ export class MMF1 {
67
67
  this.stack.push(name);
68
68
  }
69
69
  _testEnd(name) {
70
- if (name !== this.stack.pop()) {
70
+ let last = this.stack.pop();
71
+ if (name !== last) {
72
+ console.error(`Expected test name "${last}" but got "${name}"`);
73
+ console.error(`Stack: ${this.stack.join(' › ')}`);
74
+ console.error(`File: ${this.file}`);
71
75
  throw 'mmf1._testEnd: start and end test mismatch';
72
76
  }
73
77
  this._status(name, 'pass');
@@ -200,15 +200,9 @@ export async function testWithReporter(reporterName, filter = '', defaultMode =
200
200
  wasmFile,
201
201
  ];
202
202
  }
203
- // backcompat
204
203
  else {
205
- wasmtimeArgs = [
206
- '--max-wasm-stack=4000000',
207
- '--enable-cranelift-nan-canonicalization',
208
- '--wasm-features',
209
- 'multi-memory,bulk-memory',
210
- wasmFile,
211
- ];
204
+ console.error(chalk.red('Minimum wasmtime version is 14.0.0. Please update wasmtime to the latest version'));
205
+ process.exit(1);
212
206
  }
213
207
  let proc = spawn(wasmtimePath, wasmtimeArgs);
214
208
  await pipeMMF(proc, mmf);
@@ -1,4 +1,4 @@
1
- type _anon_class_10_1 =
1
+ type _anon_class_13_1 =
2
2
  service {
3
3
  getSchema: () -> (BenchSchema) query;
4
4
  getStats: () -> (BenchResult) query;
@@ -7,24 +7,24 @@ type _anon_class_10_1 =
7
7
  runCellUpdate: (nat, nat) -> (BenchResult);
8
8
  runCellUpdateAwait: (nat, nat) -> (BenchResult);
9
9
  };
10
- type BenchSchema =
10
+ type BenchSchema =
11
11
  record {
12
12
  cols: vec text;
13
13
  description: text;
14
14
  name: text;
15
15
  rows: vec text;
16
16
  };
17
- type BenchResult =
17
+ type BenchResult =
18
18
  record {
19
19
  instructions: int;
20
- rts_heap_size: int;
21
- stable_memory_size: int;
22
- rts_stable_memory_size: int;
23
20
  rts_collector_instructions: int;
21
+ rts_heap_size: int;
22
+ rts_logical_stable_memory_size: int;
24
23
  rts_memory_size: int;
25
24
  rts_mutator_instructions: int;
26
- rts_total_allocation: int;
27
- rts_logical_stable_memory_size: int;
28
25
  rts_reclaimed: int;
26
+ rts_stable_memory_size: int;
27
+ rts_total_allocation: int;
28
+ stable_memory_size: int;
29
29
  };
30
- service : () -> _anon_class_10_1
30
+ service : () -> _anon_class_13_1
@@ -3,14 +3,14 @@ import type { ActorMethod } from '@dfinity/agent';
3
3
  import type { IDL } from '@dfinity/candid';
4
4
 
5
5
  export interface BenchResult {
6
+ 'rts_stable_memory_size' : bigint,
7
+ 'stable_memory_size' : bigint,
6
8
  'instructions' : bigint,
7
9
  'rts_memory_size' : bigint,
8
- 'stable_memory_size' : bigint,
9
- 'rts_stable_memory_size' : bigint,
10
- 'rts_logical_stable_memory_size' : bigint,
11
10
  'rts_total_allocation' : bigint,
12
11
  'rts_collector_instructions' : bigint,
13
12
  'rts_mutator_instructions' : bigint,
13
+ 'rts_logical_stable_memory_size' : bigint,
14
14
  'rts_heap_size' : bigint,
15
15
  'rts_reclaimed' : bigint,
16
16
  }
@@ -20,7 +20,7 @@ export interface BenchSchema {
20
20
  'rows' : Array<string>,
21
21
  'description' : string,
22
22
  }
23
- export interface _anon_class_10_1 {
23
+ export interface _anon_class_13_1 {
24
24
  'getSchema' : ActorMethod<[], BenchSchema>,
25
25
  'getStats' : ActorMethod<[], BenchResult>,
26
26
  'init' : ActorMethod<[], BenchSchema>,
@@ -28,6 +28,6 @@ export interface _anon_class_10_1 {
28
28
  'runCellUpdate' : ActorMethod<[bigint, bigint], BenchResult>,
29
29
  'runCellUpdateAwait' : ActorMethod<[bigint, bigint], BenchResult>,
30
30
  }
31
- export interface _SERVICE extends _anon_class_10_1 {}
31
+ export interface _SERVICE extends _anon_class_13_1 {}
32
32
  export declare const idlFactory: IDL.InterfaceFactory;
33
33
  export declare const init: (args: { IDL: typeof IDL }) => IDL.Type[];
@@ -6,18 +6,18 @@ export const idlFactory = ({ IDL }) => {
6
6
  'description' : IDL.Text,
7
7
  });
8
8
  const BenchResult = IDL.Record({
9
- 'instructions' : IDL.Int,
10
- 'stable_memory_size' : IDL.Int,
11
9
  'rts_stable_memory_size' : IDL.Int,
12
- 'rts_logical_stable_memory_size' : IDL.Int,
10
+ 'stable_memory_size' : IDL.Int,
11
+ 'instructions' : IDL.Int,
13
12
  'rts_memory_size' : IDL.Int,
14
13
  'rts_total_allocation' : IDL.Int,
15
14
  'rts_collector_instructions' : IDL.Int,
16
15
  'rts_mutator_instructions' : IDL.Int,
16
+ 'rts_logical_stable_memory_size' : IDL.Int,
17
17
  'rts_heap_size' : IDL.Int,
18
18
  'rts_reclaimed' : IDL.Int,
19
19
  });
20
- const _anon_class_10_1 = IDL.Service({
20
+ const _anon_class_13_1 = IDL.Service({
21
21
  'getSchema' : IDL.Func([], [BenchSchema], ['query']),
22
22
  'getStats' : IDL.Func([], [BenchResult], ['query']),
23
23
  'init' : IDL.Func([], [BenchSchema], []),
@@ -25,6 +25,6 @@ export const idlFactory = ({ IDL }) => {
25
25
  'runCellUpdate' : IDL.Func([IDL.Nat, IDL.Nat], [BenchResult], []),
26
26
  'runCellUpdateAwait' : IDL.Func([IDL.Nat, IDL.Nat], [BenchResult], []),
27
27
  });
28
- return _anon_class_10_1;
28
+ return _anon_class_13_1;
29
29
  };
30
30
  export const init = ({ IDL }) => { return []; };
package/dist/mops.js CHANGED
@@ -187,7 +187,7 @@ export function formatDir(name, version) {
187
187
  }
188
188
  export function formatGithubDir(name, repo) {
189
189
  const { branch, commitHash } = parseGithubURL(repo);
190
- return path.join(getRootDir(), '.mops/_github', `${name}#${branch}` + (commitHash ? `@${commitHash}` : ''));
190
+ return path.join(getRootDir(), '.mops/_github', `${name}#${branch.replaceAll('/', '___')}` + (commitHash ? `@${commitHash}` : ''));
191
191
  }
192
192
  export function readDfxJson() {
193
193
  let dir = process.cwd();
package/dist/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ic-mops",
3
- "version": "1.0.0-pre.0",
3
+ "version": "1.0.1",
4
4
  "type": "module",
5
5
  "bin": {
6
6
  "mops": "bin/mops.js",
@@ -10,6 +10,10 @@
10
10
  "files": [
11
11
  "*",
12
12
  "!network.txt",
13
+ "!Dockerfile",
14
+ "!verify.sh",
15
+ "!build.sh",
16
+ "!DEVELOPMENT.md",
13
17
  "!.mops",
14
18
  "/templates"
15
19
  ],
@@ -24,13 +28,13 @@
24
28
  "node": ">=18.0.0"
25
29
  },
26
30
  "dependencies": {
27
- "@dfinity/agent": "2.0.0",
28
- "@dfinity/candid": "2.0.0",
29
- "@dfinity/identity": "2.0.0",
30
- "@dfinity/identity-secp256k1": "2.0.0",
31
- "@dfinity/principal": "2.0.0",
31
+ "@dfinity/agent": "2.1.0",
32
+ "@dfinity/candid": "2.1.0",
33
+ "@dfinity/identity": "2.1.0",
34
+ "@dfinity/identity-secp256k1": "2.1.0",
35
+ "@dfinity/principal": "2.1.0",
32
36
  "@iarna/toml": "2.2.5",
33
- "@noble/hashes": "1.4.0",
37
+ "@noble/hashes": "1.5.0",
34
38
  "as-table": "1.0.55",
35
39
  "buffer": "6.0.3",
36
40
  "cacheable-request": "12.0.1",
@@ -38,13 +42,13 @@
38
42
  "chalk": "5.3.0",
39
43
  "chokidar": "3.6.0",
40
44
  "commander": "12.1.0",
41
- "debounce": "2.1.0",
45
+ "debounce": "2.1.1",
42
46
  "decomp-tarxz": "0.1.1",
43
47
  "decompress": "4.2.1",
44
48
  "del": "7.1.0",
45
49
  "dhall-to-json-cli": "1.7.6",
46
- "execa": "9.3.0",
47
- "filesize": "10.1.4",
50
+ "execa": "9.3.1",
51
+ "filesize": "10.1.6",
48
52
  "fs-extra": "11.2.0",
49
53
  "get-folder-size": "5.0.0",
50
54
  "glob": "11.0.0",
@@ -59,7 +63,7 @@
59
63
  "node-fetch": "3.3.2",
60
64
  "octokit": "3.1.2",
61
65
  "pem-file": "1.0.1",
62
- "pic-ic": "0.5.1",
66
+ "pic-ic": "0.5.3",
63
67
  "prompts": "2.4.2",
64
68
  "semver": "7.6.3",
65
69
  "stream-to-promise": "3.0.0",
@@ -73,21 +77,15 @@
73
77
  "@types/fs-extra": "11.0.4",
74
78
  "@types/glob": "8.1.0",
75
79
  "@types/ncp": "2.0.8",
76
- "@types/node": "22.0.2",
80
+ "@types/node": "22.5.4",
77
81
  "@types/prompts": "2.4.9",
78
82
  "@types/semver": "7.5.8",
79
83
  "@types/stream-to-promise": "2.2.4",
80
84
  "@types/tar": "6.1.13",
81
- "bun": "1.0.35",
82
- "esbuild": "0.23.0",
85
+ "bun": "1.1.27",
86
+ "esbuild": "0.23.1",
83
87
  "eslint": "8.57.0",
84
- "tsx": "4.16.5",
85
- "typescript": "5.5.4"
86
- },
87
- "overrides": {
88
- "@dfinity/agent": "2.0.0",
89
- "@dfinity/identity": "2.0.0",
90
- "@dfinity/principal": "2.0.0",
91
- "@dfinity/candid": "2.0.0"
88
+ "tsx": "4.19.1",
89
+ "typescript": "5.6.2"
92
90
  }
93
91
  }
@@ -8,8 +8,9 @@ import { sha256 } from '@noble/hashes/sha256';
8
8
  import { bytesToHex } from '@noble/hashes/utils';
9
9
  import { findChangelogEntry } from './helpers/find-changelog-entry.js';
10
10
  let __dirname = new URL('.', import.meta.url).pathname;
11
- execSync('npm run prepare', { stdio: 'inherit', cwd: __dirname });
12
- execSync('npm run bundle', { stdio: 'inherit', cwd: __dirname });
11
+ // build using Docker
12
+ execSync('./build.sh', { stdio: 'inherit', cwd: __dirname });
13
+ let commitHash = process.env.COMMIT_HASH || execSync('git rev-parse HEAD').toString().trim();
13
14
  let version = JSON.parse(fs.readFileSync(path.resolve(__dirname, 'package.json'), 'utf8')).version;
14
15
  let major = semver.parse(version)?.major;
15
16
  let tag = semver.parse(version)?.prerelease[0] || 'latest';
@@ -17,6 +18,7 @@ let releaseNotes = findChangelogEntry(fs.readFileSync(path.resolve(__dirname, 'C
17
18
  let data = fs.readFileSync(path.resolve(__dirname, 'bundle/cli.tgz'));
18
19
  let hash = bytesToHex(sha256(data));
19
20
  let size = data.byteLength;
21
+ console.log(`Commit hash of release: ${commitHash}`);
20
22
  fs.cpSync(path.resolve(__dirname, 'bundle/cli.tgz'), path.resolve(__dirname, `../cli-releases/versions/${version}.tgz`), { force: false, errorOnExist: true });
21
23
  fs.cpSync(path.resolve(__dirname, `../cli-releases/versions/${version}.tgz`), path.resolve(__dirname, `../cli-releases/versions/${tag}.tgz`), { force: true, errorOnExist: false, recursive: true });
22
24
  fs.cpSync(path.resolve(__dirname, `../cli-releases/versions/${version}.tgz`), path.resolve(__dirname, `../cli-releases/versions/${major}.tgz`), { force: true, errorOnExist: false, recursive: true });
@@ -31,6 +33,7 @@ releases.versions[version] = {
31
33
  time: new Date().getTime(),
32
34
  size,
33
35
  relseaseNotes: releaseNotes,
36
+ commitHash: commitHash,
34
37
  url: `https://cli.mops.one/versions/${version}.tgz`,
35
38
  hash,
36
39
  };
package/dist/vessel.js CHANGED
@@ -82,7 +82,7 @@ export const downloadFromGithub = async (repo, dest, onProgress) => {
82
82
  // Prevent `onError` being called twice.
83
83
  readStream.off('error', reject);
84
84
  const tmpDir = path.resolve(process.cwd(), '.mops/_tmp/');
85
- const tmpFile = path.resolve(tmpDir, `${gitName}@${commitHash || branch}.zip`);
85
+ const tmpFile = path.resolve(tmpDir, `${gitName}@${(commitHash || branch).replaceAll('/', '___')}.zip`);
86
86
  try {
87
87
  mkdirSync(tmpDir, { recursive: true });
88
88
  pipeline(readStream, createWriteStream(tmpFile), (err) => {
package/mops.ts CHANGED
@@ -219,7 +219,7 @@ export function formatDir(name : string, version : string) {
219
219
 
220
220
  export function formatGithubDir(name : string, repo : string) {
221
221
  const {branch, commitHash} = parseGithubURL(repo);
222
- return path.join(getRootDir(), '.mops/_github', `${name}#${branch}` + (commitHash ? `@${commitHash}` : ''));
222
+ return path.join(getRootDir(), '.mops/_github', `${name}#${branch.replaceAll('/', '___')}` + (commitHash ? `@${commitHash}` : ''));
223
223
  }
224
224
 
225
225
  export function readDfxJson() : any {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "ic-mops",
3
- "version": "1.0.0-pre.0",
3
+ "version": "1.0.1",
4
4
  "type": "module",
5
5
  "bin": {
6
6
  "mops": "dist/bin/mops.js",
@@ -10,6 +10,10 @@
10
10
  "files": [
11
11
  "*",
12
12
  "!network.txt",
13
+ "!Dockerfile",
14
+ "!verify.sh",
15
+ "!build.sh",
16
+ "!DEVELOPMENT.md",
13
17
  "!.mops",
14
18
  "/templates"
15
19
  ],
@@ -24,14 +28,15 @@
24
28
  "node": ">=18.0.0"
25
29
  },
26
30
  "scripts": {
27
- "build": "tsc",
31
+ "build": "npm run prepare && npm run bundle",
32
+ "dist": "tsc",
28
33
  "bundle": "rm -rf ./bundle && bun build ./cli.ts --outdir ./bundle --target node --minify --external @napi-rs/lzma --external fsevents --format esm --define '__dirname=import.meta.dirname' && npm run bundle:fix && npm run bundle:copy && npm run bundle:package-json && npm run bundle:tar",
29
34
  "bundle:fix": "npx -y rexreplace 'new URL\\(\"\\.\\./templates' 'new URL(\"./templates' bundle/cli.js",
30
35
  "bundle:copy": "cp -r commands/bench bundle && cp -r bin declarations templates package.json bundle",
31
36
  "bundle:package-json": "tsx bundle-package-json.ts",
32
- "bundle:tar": "touch bundle/cli.tgz && tar --exclude bundle/cli.tgz -czvf bundle/cli.tgz bundle",
37
+ "bundle:tar": "rm -f bundle/cli.tgz && touch -t 200101010101 bundle/cli.tgz && find bundle -exec touch -d '1970-01-01 00:00:00' {} + && tar --sort name --exclude bundle/cli.tgz -cvf - bundle | gzip -n > bundle/cli.tgz",
33
38
  "copy": "cp -r commands/bench dist/commands && cp -r declarations templates package.json bin dist | true",
34
- "prepare": "npm run build && npm run copy && npm run fix-dist",
39
+ "prepare": "rm -rf dist && npm run dist && npm run copy && npm run fix-dist",
35
40
  "fix-dist": "tsx ./fix-dist.ts",
36
41
  "release": "tsx release-cli.ts",
37
42
  "check": "tsc --project tsconfig.json --noEmit",
@@ -39,13 +44,13 @@
39
44
  "esbuild": "esbuild"
40
45
  },
41
46
  "dependencies": {
42
- "@dfinity/agent": "2.0.0",
43
- "@dfinity/candid": "2.0.0",
44
- "@dfinity/identity": "2.0.0",
45
- "@dfinity/identity-secp256k1": "2.0.0",
46
- "@dfinity/principal": "2.0.0",
47
+ "@dfinity/agent": "2.1.0",
48
+ "@dfinity/candid": "2.1.0",
49
+ "@dfinity/identity": "2.1.0",
50
+ "@dfinity/identity-secp256k1": "2.1.0",
51
+ "@dfinity/principal": "2.1.0",
47
52
  "@iarna/toml": "2.2.5",
48
- "@noble/hashes": "1.4.0",
53
+ "@noble/hashes": "1.5.0",
49
54
  "as-table": "1.0.55",
50
55
  "buffer": "6.0.3",
51
56
  "cacheable-request": "12.0.1",
@@ -53,13 +58,13 @@
53
58
  "chalk": "5.3.0",
54
59
  "chokidar": "3.6.0",
55
60
  "commander": "12.1.0",
56
- "debounce": "2.1.0",
61
+ "debounce": "2.1.1",
57
62
  "decomp-tarxz": "0.1.1",
58
63
  "decompress": "4.2.1",
59
64
  "del": "7.1.0",
60
65
  "dhall-to-json-cli": "1.7.6",
61
- "execa": "9.3.0",
62
- "filesize": "10.1.4",
66
+ "execa": "9.3.1",
67
+ "filesize": "10.1.6",
63
68
  "fs-extra": "11.2.0",
64
69
  "get-folder-size": "5.0.0",
65
70
  "glob": "11.0.0",
@@ -74,7 +79,7 @@
74
79
  "node-fetch": "3.3.2",
75
80
  "octokit": "3.1.2",
76
81
  "pem-file": "1.0.1",
77
- "pic-ic": "0.5.1",
82
+ "pic-ic": "0.5.3",
78
83
  "prompts": "2.4.2",
79
84
  "semver": "7.6.3",
80
85
  "stream-to-promise": "3.0.0",
@@ -88,21 +93,15 @@
88
93
  "@types/fs-extra": "11.0.4",
89
94
  "@types/glob": "8.1.0",
90
95
  "@types/ncp": "2.0.8",
91
- "@types/node": "22.0.2",
96
+ "@types/node": "22.5.4",
92
97
  "@types/prompts": "2.4.9",
93
98
  "@types/semver": "7.5.8",
94
99
  "@types/stream-to-promise": "2.2.4",
95
100
  "@types/tar": "6.1.13",
96
- "bun": "1.0.35",
97
- "esbuild": "0.23.0",
101
+ "bun": "1.1.27",
102
+ "esbuild": "0.23.1",
98
103
  "eslint": "8.57.0",
99
- "tsx": "4.16.5",
100
- "typescript": "5.5.4"
101
- },
102
- "overrides": {
103
- "@dfinity/agent": "2.0.0",
104
- "@dfinity/identity": "2.0.0",
105
- "@dfinity/principal": "2.0.0",
106
- "@dfinity/candid": "2.0.0"
104
+ "tsx": "4.19.1",
105
+ "typescript": "5.6.2"
107
106
  }
108
107
  }
package/release-cli.ts CHANGED
@@ -11,9 +11,10 @@ import {findChangelogEntry} from './helpers/find-changelog-entry.js';
11
11
 
12
12
  let __dirname = new URL('.', import.meta.url).pathname;
13
13
 
14
- execSync('npm run prepare', {stdio: 'inherit', cwd: __dirname});
15
- execSync('npm run bundle', {stdio: 'inherit', cwd: __dirname});
14
+ // build using Docker
15
+ execSync('./build.sh', {stdio: 'inherit', cwd: __dirname});
16
16
 
17
+ let commitHash = process.env.COMMIT_HASH || execSync('git rev-parse HEAD').toString().trim();
17
18
  let version = JSON.parse(fs.readFileSync(path.resolve(__dirname, 'package.json'), 'utf8')).version;
18
19
  let major = semver.parse(version)?.major;
19
20
  let tag = semver.parse(version)?.prerelease[0] || 'latest';
@@ -22,6 +23,8 @@ let data = fs.readFileSync(path.resolve(__dirname, 'bundle/cli.tgz'));
22
23
  let hash = bytesToHex(sha256(data));
23
24
  let size = data.byteLength;
24
25
 
26
+ console.log(`Commit hash of release: ${commitHash}`);
27
+
25
28
  fs.cpSync(path.resolve(__dirname, 'bundle/cli.tgz'), path.resolve(__dirname, `../cli-releases/versions/${version}.tgz`), {force: false, errorOnExist: true});
26
29
 
27
30
  fs.cpSync(path.resolve(__dirname, `../cli-releases/versions/${version}.tgz`), path.resolve(__dirname, `../cli-releases/versions/${tag}.tgz`), {force: true, errorOnExist: false, recursive: true});
@@ -39,6 +42,7 @@ type Releases = {
39
42
  time : number;
40
43
  size : number;
41
44
  hash : string;
45
+ commitHash ?: string;
42
46
  url : string;
43
47
  relseaseNotes : string;
44
48
  }>;
@@ -55,6 +59,7 @@ releases.versions[version] = {
55
59
  time: new Date().getTime(),
56
60
  size,
57
61
  relseaseNotes: releaseNotes,
62
+ commitHash: commitHash,
58
63
  url: `https://cli.mops.one/versions/${version}.tgz`,
59
64
  hash,
60
65
  };
package/vessel.ts CHANGED
@@ -111,7 +111,7 @@ export const downloadFromGithub = async (repo : string, dest : string, onProgres
111
111
  // Prevent `onError` being called twice.
112
112
  readStream.off('error', reject);
113
113
  const tmpDir = path.resolve(process.cwd(), '.mops/_tmp/');
114
- const tmpFile = path.resolve(tmpDir, `${gitName}@${commitHash || branch}.zip`);
114
+ const tmpFile = path.resolve(tmpDir, `${gitName}@${(commitHash || branch).replaceAll('/', '___')}.zip`);
115
115
 
116
116
  try {
117
117
  mkdirSync(tmpDir, {recursive: true});
package/DEVELOPMENT.md DELETED
@@ -1,25 +0,0 @@
1
- # Mops CLI
2
-
3
- 1. Update the version in `package.json` using `npm version` command.
4
-
5
- 2. Update changelog in `CHANGELOG.md` file.
6
-
7
- 3. Publish.
8
-
9
- ## Publish to npm
10
- ```
11
- npm publish
12
- ```
13
-
14
- ## Publish on chain
15
-
16
- 1. Prepeare release
17
- ```
18
- npm run release
19
- ```
20
-
21
- 2. Deploy canister
22
- (from root of the project)
23
- ```
24
- dfx deploy --network ic --no-wallet cli
25
- ```
@@ -1,5 +0,0 @@
1
- type ImportIdentityOptions = {
2
- encrypt: boolean;
3
- };
4
- export declare function importPem(data: string, options?: ImportIdentityOptions): Promise<void>;
5
- export {};
@@ -1,51 +0,0 @@
1
- import fs from 'node:fs';
2
- import path from 'node:path';
3
- import { Buffer } from 'node:buffer';
4
- import chalk from 'chalk';
5
- import prompts from 'prompts';
6
- import { deleteSync } from 'del';
7
- import { globalConfigDir } from '../mops.js';
8
- import { encrypt } from '../pem.js';
9
- export async function importPem(data, options = { encrypt: true }) {
10
- try {
11
- if (!fs.existsSync(globalConfigDir)) {
12
- fs.mkdirSync(globalConfigDir);
13
- }
14
- let password = '';
15
- if (options.encrypt) {
16
- let res = await prompts({
17
- type: 'invisible',
18
- name: 'password',
19
- message: 'Enter password to encrypt identity.pem',
20
- });
21
- password = res.password;
22
- if (!password) {
23
- let res = await prompts({
24
- type: 'confirm',
25
- name: 'ok',
26
- message: 'Are you sure you don\'t want to protect your identity.pem with a password?',
27
- });
28
- if (!res.ok) {
29
- console.log('aborted');
30
- return;
31
- }
32
- }
33
- }
34
- let identityPem = path.resolve(globalConfigDir, 'identity.pem');
35
- let identityPemEncrypted = path.resolve(globalConfigDir, 'identity.pem.encrypted');
36
- deleteSync([identityPem, identityPemEncrypted], { force: true });
37
- // encrypted
38
- if (password) {
39
- let encrypted = await encrypt(Buffer.from(data), password);
40
- fs.writeFileSync(identityPemEncrypted, encrypted);
41
- }
42
- // unencrypted
43
- else {
44
- fs.writeFileSync(identityPem, data);
45
- }
46
- console.log(chalk.green('Success'));
47
- }
48
- catch (err) {
49
- console.log(chalk.red('Error: ') + err);
50
- }
51
- }
@@ -1 +0,0 @@
1
- export declare function whoami(): Promise<void>;
@@ -1,11 +0,0 @@
1
- import chalk from 'chalk';
2
- import { getIdentity } from '../mops.js';
3
- export async function whoami() {
4
- let identity = await getIdentity();
5
- if (identity) {
6
- console.log(identity.getPrincipal().toText());
7
- }
8
- else {
9
- console.log(chalk.red('Error: ') + 'identity not found. Run ' + chalk.greenBright('mops import-identity') + ' command.');
10
- }
11
- }