@intelligentgraphics/ig.gfx.packager 3.0.9 → 3.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/build/bin.mjs +6 -0
  2. package/build/bin.mjs.map +1 -0
  3. package/build/cli-381989cc.mjs +1389 -0
  4. package/build/cli-381989cc.mjs.map +1 -0
  5. package/build/dependencies-1f665204.mjs +129 -0
  6. package/build/dependencies-1f665204.mjs.map +1 -0
  7. package/build/generateIndex-074f4aa1.mjs +257 -0
  8. package/build/generateIndex-074f4aa1.mjs.map +1 -0
  9. package/build/generateParameterType-4c9e95a5.mjs +75 -0
  10. package/build/generateParameterType-4c9e95a5.mjs.map +1 -0
  11. package/build/index-06ac2c4c.mjs +495 -0
  12. package/build/index-06ac2c4c.mjs.map +1 -0
  13. package/build/index-cc42a478.mjs +312 -0
  14. package/build/index-cc42a478.mjs.map +1 -0
  15. package/build/postinstall-c38d9b55.mjs +67 -0
  16. package/build/postinstall-c38d9b55.mjs.map +1 -0
  17. package/build/publishNpm-8ec1b871.mjs +134 -0
  18. package/build/publishNpm-8ec1b871.mjs.map +1 -0
  19. package/build/versionFile-aa8b6b7a.mjs +384 -0
  20. package/build/versionFile-aa8b6b7a.mjs.map +1 -0
  21. package/lib/lib.mjs +1476 -0
  22. package/package.json +13 -9
  23. package/readme.md +86 -2
  24. package/build/cli-17d957b0.js +0 -2531
  25. package/build/cli-17d957b0.js.map +0 -1
  26. package/build/dependencies-51916db0.js +0 -149
  27. package/build/dependencies-51916db0.js.map +0 -1
  28. package/build/generateIndex-59993f0f.js +0 -266
  29. package/build/generateIndex-59993f0f.js.map +0 -1
  30. package/build/generateParameterType-d3ab08fd.js +0 -74
  31. package/build/generateParameterType-d3ab08fd.js.map +0 -1
  32. package/build/index-a48c5d0a.js +0 -480
  33. package/build/index-a48c5d0a.js.map +0 -1
  34. package/build/index-ac2cd050.js +0 -308
  35. package/build/index-ac2cd050.js.map +0 -1
  36. package/build/index.mjs +0 -6
  37. package/build/index.mjs.map +0 -1
  38. package/build/postinstall-9990fb31.js +0 -64
  39. package/build/postinstall-9990fb31.js.map +0 -1
  40. package/build/publishNpm-74a96626.js +0 -133
  41. package/build/publishNpm-74a96626.js.map +0 -1
  42. package/build/versionFile-68e35b54.js +0 -370
  43. package/build/versionFile-68e35b54.js.map +0 -1
@@ -0,0 +1,1389 @@
1
+ import updateNotifier from 'update-notifier';
2
+ import * as fs$1 from 'fs';
3
+ import fs__default from 'fs';
4
+ import * as path$1 from 'path';
5
+ import path__default from 'path';
6
+ import yargs from 'yargs/yargs';
7
+ import { fileURLToPath } from 'url';
8
+ import glob from 'glob';
9
+ import 'resolve';
10
+ import { writePackageSync } from 'write-pkg';
11
+ import 'node:path';
12
+ import 'node:fs';
13
+ import require$$0 from 'assert';
14
+ import require$$2 from 'events';
15
+ import 'core-js/modules/es.typed-array.set.js';
16
+ import require$$6 from 'util';
17
+ import axios from 'axios';
18
+ import inquirer from 'inquirer';
19
+
20
+ const stripUtf8Bom = (text)=>{
21
+ // Catches EFBBBF (UTF-8 BOM) because the buffer-to-string
22
+ // conversion translates it to FEFF (UTF-16 BOM).
23
+ if (text.charCodeAt(0) === 0xfeff) {
24
+ return text.slice(1);
25
+ }
26
+ return text;
27
+ };
28
+
29
+ const readNpmManifest = (directory)=>{
30
+ const packageJsonPath = path$1.join(directory, "package.json");
31
+ const packageJson = stripUtf8Bom(fs$1.readFileSync(packageJsonPath, {
32
+ encoding: "utf8"
33
+ }));
34
+ return JSON.parse(packageJson);
35
+ };
36
+ const writeNpmManifest = (directory, packageJson)=>{
37
+ const packageJsonPath = path$1.join(directory, "package.json");
38
+ writePackageSync(packageJsonPath, packageJson);
39
+ };
40
+
41
+ const getNodeErrorCode = (error)=>{
42
+ if (error !== null && typeof error === "object" && error.code !== undefined) {
43
+ return error.code;
44
+ }
45
+ };
46
+ /**
47
+ * No such file or directory: Commonly raised by fs operations to indicate that a component of the specified pathname does not exist. No entity (file or directory) could be found by the given path.
48
+ *
49
+ * @param {unknown} error
50
+ */ const isErrorENOENT = (error)=>getNodeErrorCode(error) === "ENOENT";
51
+
52
+ // Functionality related to working with a single package.
53
+ const PACKAGE_FILE = "_Package.json";
54
+ const INDEX_FILE = "_Index.json";
55
+ const ANIMATION_FILE_SUFFIX = ".animation.json";
56
+ const getCreatorIndexParameterPrimaryJSType = (type)=>{
57
+ switch(type){
58
+ case "LengthM":
59
+ case "ArcDEG":
60
+ case "Integer":
61
+ case "Int":
62
+ case "Float":
63
+ return "number";
64
+ case "Boolean":
65
+ case "Bool":
66
+ return "boolean";
67
+ case "String":
68
+ case "Material":
69
+ case "Geometry":
70
+ case "Animation":
71
+ case "Interactor":
72
+ case "Evaluator":
73
+ default:
74
+ return "string";
75
+ }
76
+ };
77
+ const parseCreatorPackageName = (manifest)=>{
78
+ const [domain, ...subdomainParts] = manifest.Package.split(".");
79
+ return {
80
+ domain,
81
+ subdomain: subdomainParts.join(".")
82
+ };
83
+ };
84
+ /**
85
+ * Detects the package at the given directory.
86
+ *
87
+ * @param {string} directory
88
+ * @returns {PackageLocation}
89
+ */ const detectPackage = (workspace, directory)=>{
90
+ directory = path$1.resolve(workspace.path, directory);
91
+ const scriptsPath = path$1.join(directory, "Scripts");
92
+ const tsPath = path$1.join(directory, "ts");
93
+ let location;
94
+ if (fs$1.existsSync(scriptsPath)) {
95
+ location = {
96
+ _kind: "PackageLocation",
97
+ path: directory,
98
+ scriptsDir: scriptsPath,
99
+ manifestDir: scriptsPath
100
+ };
101
+ } else if (fs$1.existsSync(tsPath)) {
102
+ location = {
103
+ _kind: "PackageLocation",
104
+ path: directory,
105
+ scriptsDir: tsPath,
106
+ manifestDir: directory
107
+ };
108
+ } else {
109
+ location = {
110
+ _kind: "PackageLocation",
111
+ path: directory,
112
+ scriptsDir: directory,
113
+ manifestDir: directory
114
+ };
115
+ }
116
+ try {
117
+ readPackageCreatorManifest(location);
118
+ } catch (err) {
119
+ if (isErrorENOENT(err)) {
120
+ throw new Error(`No _Package.json found in ${location.manifestDir}`);
121
+ }
122
+ throw err;
123
+ }
124
+ return location;
125
+ };
126
+ const readPackageCreatorManifest = (location)=>{
127
+ const packageJsonPath = path$1.join(location.manifestDir, PACKAGE_FILE);
128
+ const packageJson = stripUtf8Bom(fs$1.readFileSync(packageJsonPath, {
129
+ encoding: "utf8"
130
+ }));
131
+ return JSON.parse(packageJson);
132
+ };
133
+ const writePackageCreatorManifest = (location, creatorPackage)=>{
134
+ const packageJsonPath = path$1.join(location.manifestDir, PACKAGE_FILE);
135
+ fs$1.writeFileSync(packageJsonPath, JSON.stringify(creatorPackage, null, "\t") + "\n");
136
+ };
137
+ const getPackageCreatorIndexPath = (location)=>path$1.join(location.manifestDir, INDEX_FILE);
138
+ const readPackageCreatorIndex = (location)=>{
139
+ try {
140
+ const indexPath = getPackageCreatorIndexPath(location);
141
+ const index = stripUtf8Bom(fs$1.readFileSync(indexPath, {
142
+ encoding: "utf8"
143
+ }));
144
+ return JSON.parse(index);
145
+ } catch (err) {
146
+ if (isErrorENOENT(err)) {
147
+ return undefined;
148
+ }
149
+ throw err;
150
+ }
151
+ };
152
+ const writePackageCreatorIndex = (location, index)=>{
153
+ const indexPath = getPackageCreatorIndexPath(location);
154
+ fs$1.writeFileSync(indexPath, JSON.stringify(index, null, "\t") + "\n");
155
+ };
156
+ const readPackageNpmManifest = (location)=>{
157
+ try {
158
+ return readNpmManifest(location.manifestDir);
159
+ } catch (err) {
160
+ if (isErrorENOENT(err)) {
161
+ return undefined;
162
+ }
163
+ throw err;
164
+ }
165
+ };
166
+ const writePackageNpmManifest = (location, packageJson)=>{
167
+ writeNpmManifest(location.manifestDir, packageJson);
168
+ };
169
+ const readPackageAnimationList = (location)=>{
170
+ const directoryContent = fs$1.readdirSync(location.manifestDir);
171
+ const animationPathList = [];
172
+ for (const entry of directoryContent){
173
+ if (entry.endsWith(ANIMATION_FILE_SUFFIX)) {
174
+ const animationPath = path$1.join(location.manifestDir, entry);
175
+ animationPathList.push(animationPath);
176
+ }
177
+ }
178
+ return animationPathList;
179
+ };
180
+ const getPackageReleasesDirectory = (location)=>path$1.join(location.path, "Releases");
181
+
182
+ // Functionality related to working with a workspace consisting of multiple packages.
183
+ const detectWorkspace = (directory)=>{
184
+ directory = path$1.resolve(process.cwd(), directory);
185
+ return {
186
+ _kind: "WorkspaceLocation",
187
+ path: directory
188
+ };
189
+ };
190
+ const readWorkspaceNpmManifest = (workspace)=>{
191
+ try {
192
+ return readNpmManifest(workspace.path);
193
+ } catch (err) {
194
+ if (isErrorENOENT(err)) {
195
+ throw new Error(`Expected a package.json file to exist in ${workspace.path}. See packager readme for instructions on how to create the package.json.`);
196
+ }
197
+ throw err;
198
+ }
199
+ };
200
+ const writeWorkspaceNpmManifest = (workspace, packageJson)=>writeNpmManifest(workspace.path, packageJson);
201
+ const getWorkspaceOutputPath = (workspace)=>path$1.join(workspace.path, "bin");
202
+ const getWorkspaceLibPath = (workspace)=>path$1.join(workspace.path, "lib");
203
+ function* iterateWorkspacePackages(workspace) {
204
+ const entries = fs$1.readdirSync(workspace.path, {
205
+ withFileTypes: true
206
+ });
207
+ for (const entry of entries){
208
+ if (!entry.isDirectory()) {
209
+ continue;
210
+ }
211
+ try {
212
+ yield detectPackage(workspace, entry.name);
213
+ } catch {}
214
+ }
215
+ }
216
+
217
+ const getPackageTypescriptFiles = (location)=>glob.sync("**/*.ts", {
218
+ absolute: true,
219
+ cwd: location.scriptsDir,
220
+ ignore: "node_modules/**/*"
221
+ });
222
+
223
+ var writeFileAtomicExports = {};
224
+ var writeFileAtomic = {
225
+ get exports () {
226
+ return writeFileAtomicExports;
227
+ },
228
+ set exports (v){
229
+ writeFileAtomicExports = v;
230
+ }
231
+ };
232
+
233
+ var imurmurhashExports = {};
234
+ var imurmurhash = {
235
+ get exports () {
236
+ return imurmurhashExports;
237
+ },
238
+ set exports (v){
239
+ imurmurhashExports = v;
240
+ }
241
+ };
242
+
243
+ (function(module) {
244
+ (function() {
245
+ var cache;
246
+ // Call this function without `new` to use the cached object (good for
247
+ // single-threaded environments), or with `new` to create a new object.
248
+ //
249
+ // @param {string} key A UTF-16 or ASCII string
250
+ // @param {number} seed An optional positive integer
251
+ // @return {object} A MurmurHash3 object for incremental hashing
252
+ function MurmurHash3(key, seed) {
253
+ var m = this instanceof MurmurHash3 ? this : cache;
254
+ m.reset(seed);
255
+ if (typeof key === 'string' && key.length > 0) {
256
+ m.hash(key);
257
+ }
258
+ if (m !== this) {
259
+ return m;
260
+ }
261
+ }
262
+ // Incrementally add a string to this hash
263
+ //
264
+ // @param {string} key A UTF-16 or ASCII string
265
+ // @return {object} this
266
+ MurmurHash3.prototype.hash = function(key) {
267
+ var h1, k1, i, top, len;
268
+ len = key.length;
269
+ this.len += len;
270
+ k1 = this.k1;
271
+ i = 0;
272
+ switch(this.rem){
273
+ case 0:
274
+ k1 ^= len > i ? key.charCodeAt(i++) & 0xffff : 0;
275
+ case 1:
276
+ k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) << 8 : 0;
277
+ case 2:
278
+ k1 ^= len > i ? (key.charCodeAt(i++) & 0xffff) << 16 : 0;
279
+ case 3:
280
+ k1 ^= len > i ? (key.charCodeAt(i) & 0xff) << 24 : 0;
281
+ k1 ^= len > i ? (key.charCodeAt(i++) & 0xff00) >> 8 : 0;
282
+ }
283
+ this.rem = len + this.rem & 3; // & 3 is same as % 4
284
+ len -= this.rem;
285
+ if (len > 0) {
286
+ h1 = this.h1;
287
+ while(1){
288
+ k1 = k1 * 0x2d51 + (k1 & 0xffff) * 0xcc9e0000 & 0xffffffff;
289
+ k1 = k1 << 15 | k1 >>> 17;
290
+ k1 = k1 * 0x3593 + (k1 & 0xffff) * 0x1b870000 & 0xffffffff;
291
+ h1 ^= k1;
292
+ h1 = h1 << 13 | h1 >>> 19;
293
+ h1 = h1 * 5 + 0xe6546b64 & 0xffffffff;
294
+ if (i >= len) {
295
+ break;
296
+ }
297
+ k1 = key.charCodeAt(i++) & 0xffff ^ (key.charCodeAt(i++) & 0xffff) << 8 ^ (key.charCodeAt(i++) & 0xffff) << 16;
298
+ top = key.charCodeAt(i++);
299
+ k1 ^= (top & 0xff) << 24 ^ (top & 0xff00) >> 8;
300
+ }
301
+ k1 = 0;
302
+ switch(this.rem){
303
+ case 3:
304
+ k1 ^= (key.charCodeAt(i + 2) & 0xffff) << 16;
305
+ case 2:
306
+ k1 ^= (key.charCodeAt(i + 1) & 0xffff) << 8;
307
+ case 1:
308
+ k1 ^= key.charCodeAt(i) & 0xffff;
309
+ }
310
+ this.h1 = h1;
311
+ }
312
+ this.k1 = k1;
313
+ return this;
314
+ };
315
+ // Get the result of this hash
316
+ //
317
+ // @return {number} The 32-bit hash
318
+ MurmurHash3.prototype.result = function() {
319
+ var k1, h1;
320
+ k1 = this.k1;
321
+ h1 = this.h1;
322
+ if (k1 > 0) {
323
+ k1 = k1 * 0x2d51 + (k1 & 0xffff) * 0xcc9e0000 & 0xffffffff;
324
+ k1 = k1 << 15 | k1 >>> 17;
325
+ k1 = k1 * 0x3593 + (k1 & 0xffff) * 0x1b870000 & 0xffffffff;
326
+ h1 ^= k1;
327
+ }
328
+ h1 ^= this.len;
329
+ h1 ^= h1 >>> 16;
330
+ h1 = h1 * 0xca6b + (h1 & 0xffff) * 0x85eb0000 & 0xffffffff;
331
+ h1 ^= h1 >>> 13;
332
+ h1 = h1 * 0xae35 + (h1 & 0xffff) * 0xc2b20000 & 0xffffffff;
333
+ h1 ^= h1 >>> 16;
334
+ return h1 >>> 0;
335
+ };
336
+ // Reset the hash object for reuse
337
+ //
338
+ // @param {number} seed An optional positive integer
339
+ MurmurHash3.prototype.reset = function(seed) {
340
+ this.h1 = typeof seed === 'number' ? seed : 0;
341
+ this.rem = this.k1 = this.len = 0;
342
+ return this;
343
+ };
344
+ // A cached object to use. This can be safely used if you're in a single-
345
+ // threaded environment, otherwise you need to create new hashes to use.
346
+ cache = new MurmurHash3();
347
+ {
348
+ module.exports = MurmurHash3;
349
+ }
350
+ })();
351
+ })(imurmurhash);
352
+
353
+ var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
354
+
355
+ var signalExitExports = {};
356
+ var signalExit = {
357
+ get exports () {
358
+ return signalExitExports;
359
+ },
360
+ set exports (v){
361
+ signalExitExports = v;
362
+ }
363
+ };
364
+
365
+ var signalsExports = {};
366
+ var signals$1 = {
367
+ get exports () {
368
+ return signalsExports;
369
+ },
370
+ set exports (v){
371
+ signalsExports = v;
372
+ }
373
+ };
374
+
375
+ var hasRequiredSignals;
376
+ function requireSignals() {
377
+ if (hasRequiredSignals) return signalsExports;
378
+ hasRequiredSignals = 1;
379
+ (function(module) {
380
+ // This is not the set of all possible signals.
381
+ //
382
+ // It IS, however, the set of all signals that trigger
383
+ // an exit on either Linux or BSD systems. Linux is a
384
+ // superset of the signal names supported on BSD, and
385
+ // the unknown signals just fail to register, so we can
386
+ // catch that easily enough.
387
+ //
388
+ // Don't bother with SIGKILL. It's uncatchable, which
389
+ // means that we can't fire any callbacks anyway.
390
+ //
391
+ // If a user does happen to register a handler on a non-
392
+ // fatal signal like SIGWINCH or something, and then
393
+ // exit, it'll end up firing `process.emit('exit')`, so
394
+ // the handler will be fired anyway.
395
+ //
396
+ // SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised
397
+ // artificially, inherently leave the process in a
398
+ // state from which it is not safe to try and enter JS
399
+ // listeners.
400
+ module.exports = [
401
+ 'SIGABRT',
402
+ 'SIGALRM',
403
+ 'SIGHUP',
404
+ 'SIGINT',
405
+ 'SIGTERM'
406
+ ];
407
+ if (process.platform !== 'win32') {
408
+ module.exports.push('SIGVTALRM', 'SIGXCPU', 'SIGXFSZ', 'SIGUSR2', 'SIGTRAP', 'SIGSYS', 'SIGQUIT', 'SIGIOT');
409
+ }
410
+ if (process.platform === 'linux') {
411
+ module.exports.push('SIGIO', 'SIGPOLL', 'SIGPWR', 'SIGSTKFLT', 'SIGUNUSED');
412
+ }
413
+ })(signals$1);
414
+ return signalsExports;
415
+ }
416
+
417
+ // Note: since nyc uses this module to output coverage, any lines
418
+ // that are in the direct sync flow of nyc's outputCoverage are
419
+ // ignored, since we can never get coverage for them.
420
+ // grab a reference to node's real process object right away
421
+ var process$1 = commonjsGlobal.process;
422
+ const processOk = function(process) {
423
+ return process && typeof process === 'object' && typeof process.removeListener === 'function' && typeof process.emit === 'function' && typeof process.reallyExit === 'function' && typeof process.listeners === 'function' && typeof process.kill === 'function' && typeof process.pid === 'number' && typeof process.on === 'function';
424
+ };
425
+ // some kind of non-node environment, just no-op
426
+ /* istanbul ignore if */ if (!processOk(process$1)) {
427
+ signalExit.exports = function() {
428
+ return function() {};
429
+ };
430
+ } else {
431
+ var assert = require$$0;
432
+ var signals = requireSignals();
433
+ var isWin = /^win/i.test(process$1.platform);
434
+ var EE = require$$2;
435
+ /* istanbul ignore if */ if (typeof EE !== 'function') {
436
+ EE = EE.EventEmitter;
437
+ }
438
+ var emitter;
439
+ if (process$1.__signal_exit_emitter__) {
440
+ emitter = process$1.__signal_exit_emitter__;
441
+ } else {
442
+ emitter = process$1.__signal_exit_emitter__ = new EE();
443
+ emitter.count = 0;
444
+ emitter.emitted = {};
445
+ }
446
+ // Because this emitter is a global, we have to check to see if a
447
+ // previous version of this library failed to enable infinite listeners.
448
+ // I know what you're about to say. But literally everything about
449
+ // signal-exit is a compromise with evil. Get used to it.
450
+ if (!emitter.infinite) {
451
+ emitter.setMaxListeners(Infinity);
452
+ emitter.infinite = true;
453
+ }
454
+ signalExit.exports = function(cb, opts) {
455
+ /* istanbul ignore if */ if (!processOk(commonjsGlobal.process)) {
456
+ return function() {};
457
+ }
458
+ assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler');
459
+ if (loaded === false) {
460
+ load();
461
+ }
462
+ var ev = 'exit';
463
+ if (opts && opts.alwaysLast) {
464
+ ev = 'afterexit';
465
+ }
466
+ var remove = function() {
467
+ emitter.removeListener(ev, cb);
468
+ if (emitter.listeners('exit').length === 0 && emitter.listeners('afterexit').length === 0) {
469
+ unload();
470
+ }
471
+ };
472
+ emitter.on(ev, cb);
473
+ return remove;
474
+ };
475
+ var unload = function unload() {
476
+ if (!loaded || !processOk(commonjsGlobal.process)) {
477
+ return;
478
+ }
479
+ loaded = false;
480
+ signals.forEach(function(sig) {
481
+ try {
482
+ process$1.removeListener(sig, sigListeners[sig]);
483
+ } catch (er) {}
484
+ });
485
+ process$1.emit = originalProcessEmit;
486
+ process$1.reallyExit = originalProcessReallyExit;
487
+ emitter.count -= 1;
488
+ };
489
+ signalExitExports.unload = unload;
490
+ var emit = function emit(event, code, signal) {
491
+ /* istanbul ignore if */ if (emitter.emitted[event]) {
492
+ return;
493
+ }
494
+ emitter.emitted[event] = true;
495
+ emitter.emit(event, code, signal);
496
+ };
497
+ // { <signal>: <listener fn>, ... }
498
+ var sigListeners = {};
499
+ signals.forEach(function(sig) {
500
+ sigListeners[sig] = function listener() {
501
+ /* istanbul ignore if */ if (!processOk(commonjsGlobal.process)) {
502
+ return;
503
+ }
504
+ // If there are no other listeners, an exit is coming!
505
+ // Simplest way: remove us and then re-send the signal.
506
+ // We know that this will kill the process, so we can
507
+ // safely emit now.
508
+ var listeners = process$1.listeners(sig);
509
+ if (listeners.length === emitter.count) {
510
+ unload();
511
+ emit('exit', null, sig);
512
+ /* istanbul ignore next */ emit('afterexit', null, sig);
513
+ /* istanbul ignore next */ if (isWin && sig === 'SIGHUP') {
514
+ // "SIGHUP" throws an `ENOSYS` error on Windows,
515
+ // so use a supported signal instead
516
+ sig = 'SIGINT';
517
+ }
518
+ /* istanbul ignore next */ process$1.kill(process$1.pid, sig);
519
+ }
520
+ };
521
+ });
522
+ signalExitExports.signals = function() {
523
+ return signals;
524
+ };
525
+ var loaded = false;
526
+ var load = function load() {
527
+ if (loaded || !processOk(commonjsGlobal.process)) {
528
+ return;
529
+ }
530
+ loaded = true;
531
+ // This is the number of onSignalExit's that are in play.
532
+ // It's important so that we can count the correct number of
533
+ // listeners on signals, and don't wait for the other one to
534
+ // handle it instead of us.
535
+ emitter.count += 1;
536
+ signals = signals.filter(function(sig) {
537
+ try {
538
+ process$1.on(sig, sigListeners[sig]);
539
+ return true;
540
+ } catch (er) {
541
+ return false;
542
+ }
543
+ });
544
+ process$1.emit = processEmit;
545
+ process$1.reallyExit = processReallyExit;
546
+ };
547
+ signalExitExports.load = load;
548
+ var originalProcessReallyExit = process$1.reallyExit;
549
+ var processReallyExit = function processReallyExit(code) {
550
+ /* istanbul ignore if */ if (!processOk(commonjsGlobal.process)) {
551
+ return;
552
+ }
553
+ process$1.exitCode = code || /* istanbul ignore next */ 0;
554
+ emit('exit', process$1.exitCode, null);
555
+ /* istanbul ignore next */ emit('afterexit', process$1.exitCode, null);
556
+ /* istanbul ignore next */ originalProcessReallyExit.call(process$1, process$1.exitCode);
557
+ };
558
+ var originalProcessEmit = process$1.emit;
559
+ var processEmit = function processEmit(ev, arg) {
560
+ if (ev === 'exit' && processOk(commonjsGlobal.process)) {
561
+ /* istanbul ignore else */ if (arg !== undefined) {
562
+ process$1.exitCode = arg;
563
+ }
564
+ var ret = originalProcessEmit.apply(this, arguments);
565
+ /* istanbul ignore next */ emit('exit', process$1.exitCode, null);
566
+ /* istanbul ignore next */ emit('afterexit', process$1.exitCode, null);
567
+ /* istanbul ignore next */ return ret;
568
+ } else {
569
+ return originalProcessEmit.apply(this, arguments);
570
+ }
571
+ };
572
+ }
573
+
574
+ var isTypedarray = isTypedArray$2;
575
+ isTypedArray$2.strict = isStrictTypedArray;
576
+ isTypedArray$2.loose = isLooseTypedArray;
577
+ var toString = Object.prototype.toString;
578
+ var names = {
579
+ '[object Int8Array]': true,
580
+ '[object Int16Array]': true,
581
+ '[object Int32Array]': true,
582
+ '[object Uint8Array]': true,
583
+ '[object Uint8ClampedArray]': true,
584
+ '[object Uint16Array]': true,
585
+ '[object Uint32Array]': true,
586
+ '[object Float32Array]': true,
587
+ '[object Float64Array]': true
588
+ };
589
+ function isTypedArray$2(arr) {
590
+ return isStrictTypedArray(arr) || isLooseTypedArray(arr);
591
+ }
592
+ function isStrictTypedArray(arr) {
593
+ return arr instanceof Int8Array || arr instanceof Int16Array || arr instanceof Int32Array || arr instanceof Uint8Array || arr instanceof Uint8ClampedArray || arr instanceof Uint16Array || arr instanceof Uint32Array || arr instanceof Float32Array || arr instanceof Float64Array;
594
+ }
595
+ function isLooseTypedArray(arr) {
596
+ return names[toString.call(arr)];
597
+ }
598
+
599
+ var isTypedArray$1 = isTypedarray.strict;
600
+ var typedarrayToBuffer = function typedarrayToBuffer(arr) {
601
+ if (isTypedArray$1(arr)) {
602
+ // To avoid a copy, use the typed array's underlying ArrayBuffer to back new Buffer
603
+ var buf = Buffer.from(arr.buffer);
604
+ if (arr.byteLength !== arr.buffer.byteLength) {
605
+ // Respect the "view", i.e. byteOffset and byteLength, without doing a copy
606
+ buf = buf.slice(arr.byteOffset, arr.byteOffset + arr.byteLength);
607
+ }
608
+ return buf;
609
+ } else {
610
+ // Pass through all other types to `Buffer.from`
611
+ return Buffer.from(arr);
612
+ }
613
+ };
614
+
615
+ writeFileAtomic.exports = writeFile;
616
+ writeFileAtomicExports.sync = writeFileSync;
617
+ writeFileAtomicExports._getTmpname = getTmpname // for testing
618
+ ;
619
+ writeFileAtomicExports._cleanupOnExit = cleanupOnExit;
620
+ const fs = fs__default;
621
+ const MurmurHash3 = imurmurhashExports;
622
+ const onExit = signalExitExports;
623
+ const path = path__default;
624
+ const isTypedArray = isTypedarray;
625
+ const typedArrayToBuffer = typedarrayToBuffer;
626
+ const { promisify } = require$$6;
627
+ const activeFiles = {};
628
+ // if we run inside of a worker_thread, `process.pid` is not unique
629
+ /* istanbul ignore next */ const threadId = function getId() {
630
+ try {
631
+ const workerThreads = require('worker_threads');
632
+ /// if we are in main thread, this is set to `0`
633
+ return workerThreads.threadId;
634
+ } catch (e) {
635
+ // worker_threads are not available, fallback to 0
636
+ return 0;
637
+ }
638
+ }();
639
+ let invocations = 0;
640
+ function getTmpname(filename) {
641
+ return filename + '.' + MurmurHash3(__filename).hash(String(process.pid)).hash(String(threadId)).hash(String(++invocations)).result();
642
+ }
643
+ function cleanupOnExit(tmpfile) {
644
+ return ()=>{
645
+ try {
646
+ fs.unlinkSync(typeof tmpfile === 'function' ? tmpfile() : tmpfile);
647
+ } catch (_) {}
648
+ };
649
+ }
650
+ function serializeActiveFile(absoluteName) {
651
+ return new Promise((resolve)=>{
652
+ // make a queue if it doesn't already exist
653
+ if (!activeFiles[absoluteName]) activeFiles[absoluteName] = [];
654
+ activeFiles[absoluteName].push(resolve) // add this job to the queue
655
+ ;
656
+ if (activeFiles[absoluteName].length === 1) resolve() // kick off the first one
657
+ ;
658
+ });
659
+ }
660
+ // https://github.com/isaacs/node-graceful-fs/blob/master/polyfills.js#L315-L342
661
+ function isChownErrOk(err) {
662
+ if (err.code === 'ENOSYS') {
663
+ return true;
664
+ }
665
+ const nonroot = !process.getuid || process.getuid() !== 0;
666
+ if (nonroot) {
667
+ if (err.code === 'EINVAL' || err.code === 'EPERM') {
668
+ return true;
669
+ }
670
+ }
671
+ return false;
672
+ }
673
+ async function writeFileAsync(filename, data, options = {}) {
674
+ if (typeof options === 'string') {
675
+ options = {
676
+ encoding: options
677
+ };
678
+ }
679
+ let fd;
680
+ let tmpfile;
681
+ /* istanbul ignore next -- The closure only gets called when onExit triggers */ const removeOnExitHandler = onExit(cleanupOnExit(()=>tmpfile));
682
+ const absoluteName = path.resolve(filename);
683
+ try {
684
+ await serializeActiveFile(absoluteName);
685
+ const truename = await promisify(fs.realpath)(filename).catch(()=>filename);
686
+ tmpfile = getTmpname(truename);
687
+ if (!options.mode || !options.chown) {
688
+ // Either mode or chown is not explicitly set
689
+ // Default behavior is to copy it from original file
690
+ const stats = await promisify(fs.stat)(truename).catch(()=>{});
691
+ if (stats) {
692
+ if (options.mode == null) {
693
+ options.mode = stats.mode;
694
+ }
695
+ if (options.chown == null && process.getuid) {
696
+ options.chown = {
697
+ uid: stats.uid,
698
+ gid: stats.gid
699
+ };
700
+ }
701
+ }
702
+ }
703
+ fd = await promisify(fs.open)(tmpfile, 'w', options.mode);
704
+ if (options.tmpfileCreated) {
705
+ await options.tmpfileCreated(tmpfile);
706
+ }
707
+ if (isTypedArray(data)) {
708
+ data = typedArrayToBuffer(data);
709
+ }
710
+ if (Buffer.isBuffer(data)) {
711
+ await promisify(fs.write)(fd, data, 0, data.length, 0);
712
+ } else if (data != null) {
713
+ await promisify(fs.write)(fd, String(data), 0, String(options.encoding || 'utf8'));
714
+ }
715
+ if (options.fsync !== false) {
716
+ await promisify(fs.fsync)(fd);
717
+ }
718
+ await promisify(fs.close)(fd);
719
+ fd = null;
720
+ if (options.chown) {
721
+ await promisify(fs.chown)(tmpfile, options.chown.uid, options.chown.gid).catch((err)=>{
722
+ if (!isChownErrOk(err)) {
723
+ throw err;
724
+ }
725
+ });
726
+ }
727
+ if (options.mode) {
728
+ await promisify(fs.chmod)(tmpfile, options.mode).catch((err)=>{
729
+ if (!isChownErrOk(err)) {
730
+ throw err;
731
+ }
732
+ });
733
+ }
734
+ await promisify(fs.rename)(tmpfile, truename);
735
+ } finally{
736
+ if (fd) {
737
+ await promisify(fs.close)(fd).catch(/* istanbul ignore next */ ()=>{});
738
+ }
739
+ removeOnExitHandler();
740
+ await promisify(fs.unlink)(tmpfile).catch(()=>{});
741
+ activeFiles[absoluteName].shift() // remove the element added by serializeSameFile
742
+ ;
743
+ if (activeFiles[absoluteName].length > 0) {
744
+ activeFiles[absoluteName][0]() // start next job if one is pending
745
+ ;
746
+ } else delete activeFiles[absoluteName];
747
+ }
748
+ }
749
+ function writeFile(filename, data, options, callback) {
750
+ if (options instanceof Function) {
751
+ callback = options;
752
+ options = {};
753
+ }
754
+ const promise = writeFileAsync(filename, data, options);
755
+ if (callback) {
756
+ promise.then(callback, callback);
757
+ }
758
+ return promise;
759
+ }
760
+ function writeFileSync(filename, data, options) {
761
+ if (typeof options === 'string') options = {
762
+ encoding: options
763
+ };
764
+ else if (!options) options = {};
765
+ try {
766
+ filename = fs.realpathSync(filename);
767
+ } catch (ex) {
768
+ // it's ok, it'll happen on a not yet existing file
769
+ }
770
+ const tmpfile = getTmpname(filename);
771
+ if (!options.mode || !options.chown) {
772
+ // Either mode or chown is not explicitly set
773
+ // Default behavior is to copy it from original file
774
+ try {
775
+ const stats = fs.statSync(filename);
776
+ options = Object.assign({}, options);
777
+ if (!options.mode) {
778
+ options.mode = stats.mode;
779
+ }
780
+ if (!options.chown && process.getuid) {
781
+ options.chown = {
782
+ uid: stats.uid,
783
+ gid: stats.gid
784
+ };
785
+ }
786
+ } catch (ex) {
787
+ // ignore stat errors
788
+ }
789
+ }
790
+ let fd;
791
+ const cleanup = cleanupOnExit(tmpfile);
792
+ const removeOnExitHandler = onExit(cleanup);
793
+ let threw = true;
794
+ try {
795
+ fd = fs.openSync(tmpfile, 'w', options.mode || 0o666);
796
+ if (options.tmpfileCreated) {
797
+ options.tmpfileCreated(tmpfile);
798
+ }
799
+ if (isTypedArray(data)) {
800
+ data = typedArrayToBuffer(data);
801
+ }
802
+ if (Buffer.isBuffer(data)) {
803
+ fs.writeSync(fd, data, 0, data.length, 0);
804
+ } else if (data != null) {
805
+ fs.writeSync(fd, String(data), 0, String(options.encoding || 'utf8'));
806
+ }
807
+ if (options.fsync !== false) {
808
+ fs.fsyncSync(fd);
809
+ }
810
+ fs.closeSync(fd);
811
+ fd = null;
812
+ if (options.chown) {
813
+ try {
814
+ fs.chownSync(tmpfile, options.chown.uid, options.chown.gid);
815
+ } catch (err) {
816
+ if (!isChownErrOk(err)) {
817
+ throw err;
818
+ }
819
+ }
820
+ }
821
+ if (options.mode) {
822
+ try {
823
+ fs.chmodSync(tmpfile, options.mode);
824
+ } catch (err) {
825
+ if (!isChownErrOk(err)) {
826
+ throw err;
827
+ }
828
+ }
829
+ }
830
+ fs.renameSync(tmpfile, filename);
831
+ threw = false;
832
+ } finally{
833
+ if (fd) {
834
+ try {
835
+ fs.closeSync(fd);
836
+ } catch (ex) {
837
+ // ignore close errors at this stage, error may have closed fd already.
838
+ }
839
+ }
840
+ removeOnExitHandler();
841
+ if (threw) {
842
+ cleanup();
843
+ }
844
+ }
845
+ }
846
+
847
+ var CreatorWorkspaceGeometryFileType;
848
+ (function(CreatorWorkspaceGeometryFileType) {
849
+ CreatorWorkspaceGeometryFileType["StandardObj"] = "standard.obj";
850
+ CreatorWorkspaceGeometryFileType["StandardCtm"] = "standard.ctm";
851
+ CreatorWorkspaceGeometryFileType["StandardNormals"] = "normals_std.png";
852
+ CreatorWorkspaceGeometryFileType["DeformationGlb"] = "deformation.glb";
853
+ })(CreatorWorkspaceGeometryFileType || (CreatorWorkspaceGeometryFileType = {}));
854
+
855
+ const PLUGIN_ID = "0feba3a0-b6d1-11e6-9598-0800200c9a66";
856
+ /**
857
+ * Starts an IG.Asset.Server session and returns the sessionId
858
+ *
859
+ * @param {SessionStartParams} params
860
+ * @returns
861
+ */ const startSession = async ({ url , authentication , ...params })=>{
862
+ const payload = {
863
+ ...params,
864
+ user: undefined,
865
+ password: undefined,
866
+ license: undefined,
867
+ plugin: PLUGIN_ID
868
+ };
869
+ if (authentication.type === "credentials") {
870
+ payload.user = authentication.username;
871
+ payload.password = authentication.password;
872
+ } else if (authentication.type === "license") {
873
+ payload.license = authentication.license;
874
+ }
875
+ const { data: { session: sessionId , state , response } } = await axios.post(`Session/Start2`, JSON.stringify(payload), {
876
+ baseURL: url
877
+ });
878
+ if (state !== "SUCCESS") {
879
+ let message = `Could not start session. IG.Asset.Server responded with ${state}`;
880
+ if (response) {
881
+ message += `: ${response}`;
882
+ }
883
+ throw new Error(message);
884
+ }
885
+ return {
886
+ _kind: "AssetService",
887
+ url,
888
+ sessionId,
889
+ domain: params.domain,
890
+ subDomain: params.subDomain
891
+ };
892
+ };
893
+ const closeSession = async (session)=>{
894
+ await axios.get(`Session/Close/${session.sessionId}`, {
895
+ baseURL: session.url
896
+ });
897
+ };
898
+ const uploadPackage = async (session, { name , version }, zipFilePath)=>{
899
+ try {
900
+ await uploadPackageToUrl(session.url, `UploadPackage/${session.sessionId}/${name}_${version}`, zipFilePath);
901
+ } catch (err) {
902
+ await uploadPackageToUrl(session.url, `UploadPackage/${session.sessionId}/${name}_${version}/`, zipFilePath);
903
+ }
904
+ };
905
+ const uploadPackageToUrl = async (url, path, zipFilePath)=>{
906
+ const { data , status } = await axios.post(path, fs$1.createReadStream(zipFilePath), {
907
+ baseURL: url
908
+ });
909
+ let objectBody;
910
+ if (typeof data === "string") {
911
+ try {
912
+ objectBody = JSON.parse(data);
913
+ } catch (err) {}
914
+ } else if (typeof data === "object") {
915
+ objectBody = data;
916
+ }
917
+ if (objectBody !== undefined) {
918
+ if ("state" in objectBody && objectBody.state !== "SUCCESS") {
919
+ throw new Error(objectBody.response ?? objectBody.state);
920
+ }
921
+ }
922
+ if (status >= 400) {
923
+ if (objectBody !== undefined) {
924
+ let text_1 = "";
925
+ for(const key in objectBody){
926
+ text_1 += key + ": \n";
927
+ if (typeof objectBody[key] === "object") {
928
+ text_1 += JSON.stringify(objectBody[key], undefined, 2);
929
+ } else {
930
+ text_1 += objectBody[key];
931
+ }
932
+ text_1 += "\n\n";
933
+ }
934
+ throw new Error(text_1);
935
+ }
936
+ throw new Error(data);
937
+ }
938
+ return data;
939
+ };
940
+ const getExistingPackages = async (session)=>{
941
+ const { data } = await axios.get(`Script/GetInformation/${session.sessionId}`, {
942
+ baseURL: session.url,
943
+ validateStatus: (status)=>status === 404 || status === 200
944
+ }).catch((err)=>{
945
+ throw new Error(`Failed to get existing packages: ${err.message}`);
946
+ });
947
+ return data;
948
+ };
949
+
950
+ const createDefaultPrompter = ()=>{
951
+ return {
952
+ confirm: async (message)=>{
953
+ const { confirm } = await inquirer.prompt([
954
+ {
955
+ type: "confirm",
956
+ message,
957
+ name: "confirm"
958
+ }
959
+ ]);
960
+ return confirm;
961
+ },
962
+ ask: async (question)=>{
963
+ const { answer } = await inquirer.prompt([
964
+ {
965
+ type: "list",
966
+ message: question.message,
967
+ name: "answer",
968
+ choices: question.options,
969
+ default: question.default
970
+ }
971
+ ]);
972
+ return answer;
973
+ }
974
+ };
975
+ };
976
+
977
+ const __filename$1 = fileURLToPath(import.meta.url);
978
+ const __dirname = path$1.dirname(__filename$1);
979
+ const pjson = JSON.parse(fs$1.readFileSync(path$1.join(__dirname, "..", "package.json"), "utf8"));
980
+ const captureError = (err)=>{
981
+ console.log("");
982
+ if (process.env.NODE_ENV !== "production") {
983
+ console.error(err);
984
+ } else {
985
+ console.error("Stopped execution because of the following error: " + err.message);
986
+ }
987
+ process.exit(1);
988
+ };
989
+ const buildOptions = {
990
+ outDir: {
991
+ description: "Output directory",
992
+ type: "string",
993
+ default: "bin",
994
+ coerce: (input)=>input === undefined || input === null ? undefined : path$1.resolve(process.cwd(), input)
995
+ },
996
+ minimize: {
997
+ description: "Minify output",
998
+ type: "boolean",
999
+ default: true
1000
+ },
1001
+ cwd: {
1002
+ description: "Working directory",
1003
+ type: "string",
1004
+ default: process.cwd()
1005
+ },
1006
+ clean: {
1007
+ description: "Empty output dir before compiling",
1008
+ type: "boolean",
1009
+ default: false
1010
+ },
1011
+ docs: {
1012
+ type: "boolean",
1013
+ default: false
1014
+ }
1015
+ };
1016
+ const preCommandCheck = async (workspaceLocation)=>{
1017
+ var _repositoryPackage_dependencies, _repositoryPackage_devDependencies;
1018
+ const executedLocalPackager = __filename$1.startsWith(workspaceLocation.path);
1019
+ const repositoryPackage = readWorkspaceNpmManifest(workspaceLocation);
1020
+ if ((repositoryPackage == null ? void 0 : (_repositoryPackage_dependencies = repositoryPackage.dependencies) == null ? void 0 : _repositoryPackage_dependencies["@intelligentgraphics/ig.gfx.packager"]) || (repositoryPackage == null ? void 0 : (_repositoryPackage_devDependencies = repositoryPackage.devDependencies) == null ? void 0 : _repositoryPackage_devDependencies["@intelligentgraphics/ig.gfx.packager"])) {
1021
+ const parts = [
1022
+ "Detected locally installed ig.gfx.packager."
1023
+ ];
1024
+ if (executedLocalPackager) {
1025
+ parts.push('Run "npm install -g @intelligentgraphics/ig.gfx.packager@latest" to install the global version, if it is not yet installed.');
1026
+ }
1027
+ parts.push('Run "npm uninstall @intelligentgraphics/ig.gfx.packager" to remove the local version.');
1028
+ console.error(parts.join("\n"));
1029
+ process.exit(1);
1030
+ }
1031
+ if (executedLocalPackager) {
1032
+ console.error(`Detected locally installed ig.gfx.packager.
1033
+ Run "npm install -g @intelligentgraphics/ig.gfx.packager@latest" to install the global version, if it is not yet installed.
1034
+ Run "npm install" to get rid of the local packager version.`);
1035
+ process.exit(1);
1036
+ }
1037
+ const notifier = updateNotifier({
1038
+ pkg: pjson,
1039
+ shouldNotifyInNpmScript: true,
1040
+ updateCheckInterval: 1000 * 60
1041
+ });
1042
+ notifier.notify({
1043
+ isGlobal: true,
1044
+ defer: true
1045
+ });
1046
+ if (repositoryPackage === undefined) {
1047
+ throw new Error("Could not load package.json file in current directory");
1048
+ }
1049
+ repositoryPackage.scripts ??= {};
1050
+ repositoryPackage.scripts.postinstall = "packager postinstall";
1051
+ writeWorkspaceNpmManifest(workspaceLocation, repositoryPackage);
1052
+ };
1053
+ const yargsInstance = yargs(process.argv.slice(2));
1054
+ const resolvePackagesWithTypescriptFromMaybePatterns = (args = [], workspace)=>{
1055
+ const folders = new Map();
1056
+ for (const arg of args){
1057
+ glob.sync(arg, {
1058
+ cwd: workspace.path,
1059
+ absolute: true
1060
+ }).forEach((folder)=>{
1061
+ try {
1062
+ const location = detectPackage(workspace, folder);
1063
+ if (getPackageTypescriptFiles(location).length === 0) {
1064
+ return;
1065
+ }
1066
+ folders.set(folder, location);
1067
+ } catch (err) {}
1068
+ });
1069
+ }
1070
+ return Array.from(folders.values());
1071
+ };
1072
+ yargsInstance.command("build [directories...]", "Builds the specified directories", (argv)=>argv.options(buildOptions), async ({ directories =[] , ...options })=>{
1073
+ const workspace = detectWorkspace(options.cwd);
1074
+ const folders = resolvePackagesWithTypescriptFromMaybePatterns(directories, workspace);
1075
+ await preCommandCheck(workspace);
1076
+ if (folders.length === 0) {
1077
+ return console.log("No build targets found. Please check wether a folder with the provided name exists and wether it has _Package.json.");
1078
+ }
1079
+ const { buildFolders } = await import('./index-cc42a478.mjs').then(function (n) { return n.i; });
1080
+ await buildFolders({
1081
+ ...options,
1082
+ packages: folders,
1083
+ workspace
1084
+ }).catch(captureError);
1085
+ });
1086
+ yargsInstance.command("publish [directory]", "Publishes the specified directory", (argv)=>argv.options({
1087
+ ...buildOptions,
1088
+ noUpload: {
1089
+ type: "boolean",
1090
+ default: false,
1091
+ description: "Only zip built files and do not upload them"
1092
+ },
1093
+ domain: {
1094
+ type: "string",
1095
+ description: "Overwrite the publish domain. Defaults to the one in the _Package.json"
1096
+ },
1097
+ subdomain: {
1098
+ type: "string",
1099
+ description: "Overwrite the publish subdomain. Defaults to the one in the _Package.json"
1100
+ },
1101
+ newVersion: {
1102
+ type: "string",
1103
+ description: "The name of the new version",
1104
+ default: process.env.VERSION,
1105
+ required: true
1106
+ },
1107
+ address: {
1108
+ type: "string",
1109
+ description: "Address",
1110
+ default: "localhost"
1111
+ },
1112
+ service: {
1113
+ type: "string",
1114
+ description: "IG.Asset.Server url",
1115
+ default: process.env.IG_GFX_ASSET_SERVICE,
1116
+ required: true
1117
+ },
1118
+ user: {
1119
+ type: "string",
1120
+ description: "User",
1121
+ default: process.env.IG_GFX_USER
1122
+ },
1123
+ password: {
1124
+ type: "string",
1125
+ description: "Password",
1126
+ default: process.env.IG_GFX_PWD
1127
+ },
1128
+ docs: {
1129
+ type: "boolean",
1130
+ default: false,
1131
+ description: "Generate typedoc documentation"
1132
+ },
1133
+ pushOnly: {
1134
+ type: "boolean",
1135
+ default: false,
1136
+ description: "Try to upload an existing zip file without building and validating the version number"
1137
+ },
1138
+ license: {
1139
+ type: "string",
1140
+ description: "Path to a license file",
1141
+ default: process.env.IG_GFX_LICENSE
1142
+ },
1143
+ skipDependencies: {
1144
+ type: "boolean",
1145
+ default: false,
1146
+ description: "Skip dependency checks"
1147
+ }
1148
+ }), async ({ directory , user , password , service , license , ...options })=>{
1149
+ const workspace = detectWorkspace(options.cwd);
1150
+ const folder = detectPackage(workspace, directory);
1151
+ await preCommandCheck(workspace);
1152
+ if (!options.noUpload) {
1153
+ if (!service) {
1154
+ captureError(new Error('The IG.Asset.Server url has to either be provided using the option --service or through the "IG_GFX_ASSET_SERVICE" environment variable'));
1155
+ return;
1156
+ }
1157
+ if (!license && (!user || !password)) {
1158
+ captureError(new Error(`Expected authentication to be provided through either of the following methods:
1159
+ - as a path to a license file using the --license option or the IG_GFX_LICENSE environment variable
1160
+ - as a username and password using the --user and --password options, or the IG_GFX_USER and IG_GFX_PWD environment variables`));
1161
+ return;
1162
+ }
1163
+ if (license && !license.endsWith(".iglic")) {
1164
+ captureError(new Error(`Expected the license path to end with the extension .iglic. Received the path "${license}". You may need to reload your environment variables by restarting the program you're using to execute the packager.`));
1165
+ return;
1166
+ }
1167
+ }
1168
+ let authentication;
1169
+ if (license) {
1170
+ const fullLicensePath = path$1.resolve(process.cwd(), license);
1171
+ try {
1172
+ const content = fs$1.readFileSync(fullLicensePath);
1173
+ authentication = {
1174
+ type: "license",
1175
+ license: content.toString("base64")
1176
+ };
1177
+ } catch (err) {
1178
+ if ((err == null ? void 0 : err.code) === "ENOENT") {
1179
+ captureError(new Error(`Expected to find a license file at path: ${fullLicensePath}`));
1180
+ return;
1181
+ }
1182
+ captureError(new Error(`Failed to read license file at path: ${fullLicensePath}`));
1183
+ return;
1184
+ }
1185
+ } else if (user && password) {
1186
+ console.log(`Detected usage of username and password authentication. Please migrate to the new license file based authentication.`);
1187
+ authentication = {
1188
+ type: "credentials",
1189
+ username: user,
1190
+ password
1191
+ };
1192
+ }
1193
+ const { releaseFolder } = await import('./index-06ac2c4c.mjs');
1194
+ const prompter = createDefaultPrompter();
1195
+ const fullOptions = {
1196
+ ...options,
1197
+ authentication,
1198
+ service: service,
1199
+ directory: folder,
1200
+ banner: true,
1201
+ prompter,
1202
+ newVersion: options.newVersion,
1203
+ workspace
1204
+ };
1205
+ await releaseFolder(fullOptions).catch(captureError);
1206
+ });
1207
+ yargsInstance.command("testConnection [directory]", "Tests connection to asset service", (argv)=>argv.options({
1208
+ domain: {
1209
+ type: "string",
1210
+ description: "Overwrite the publish domain. Defaults to the one in the _Package.json"
1211
+ },
1212
+ subdomain: {
1213
+ type: "string",
1214
+ description: "Overwrite the publish subdomain. Defaults to the one in the _Package.json"
1215
+ },
1216
+ address: {
1217
+ type: "string",
1218
+ description: "Address",
1219
+ default: "localhost"
1220
+ },
1221
+ service: {
1222
+ type: "string",
1223
+ description: "IG.Asset.Server url",
1224
+ default: process.env.IG_GFX_ASSET_SERVICE,
1225
+ required: true
1226
+ },
1227
+ user: {
1228
+ type: "string",
1229
+ description: "User",
1230
+ default: process.env.IG_GFX_USER
1231
+ },
1232
+ password: {
1233
+ type: "string",
1234
+ description: "Password",
1235
+ default: process.env.IG_GFX_PWD
1236
+ },
1237
+ license: {
1238
+ type: "string",
1239
+ description: "Path to a license file",
1240
+ default: process.env.IG_GFX_LICENSE
1241
+ }
1242
+ }), async ({ user , password , service , license , subdomain , domain , address , directory })=>{
1243
+ if (!service) {
1244
+ captureError(new Error('The IG.Asset.Server url has to either be provided using the option --service or through the "IG_GFX_ASSET_SERVICE" environment variable'));
1245
+ return;
1246
+ }
1247
+ if (!license && (!user || !password)) {
1248
+ captureError(new Error(`Expected authentication to be provided through either of the following methods:
1249
+ - as a path to a license file using the --license option or the IG_GFX_LICENSE environment variable
1250
+ - as a username and password using the --user and --password options, or the IG_GFX_USER and IG_GFX_PWD environment variables`));
1251
+ return;
1252
+ }
1253
+ if (license && !license.endsWith(".iglic")) {
1254
+ captureError(new Error(`Expected the license path to end with the extension .iglic. Received the path "${license}". You may need to reload your environment variables by restarting the program you're using to execute the packager.`));
1255
+ return;
1256
+ }
1257
+ let authentication;
1258
+ if (license) {
1259
+ const fullLicensePath = path$1.resolve(process.cwd(), license);
1260
+ try {
1261
+ const content = fs$1.readFileSync(fullLicensePath);
1262
+ authentication = {
1263
+ type: "license",
1264
+ license: content.toString("base64")
1265
+ };
1266
+ } catch (err) {
1267
+ if ((err == null ? void 0 : err.code) === "ENOENT") {
1268
+ captureError(new Error(`Expected to find a license file at path: ${fullLicensePath}`));
1269
+ return;
1270
+ }
1271
+ captureError(new Error(`Failed to read license file at path: ${fullLicensePath}`));
1272
+ return;
1273
+ }
1274
+ } else if (user && password) {
1275
+ console.log(`Detected usage of username and password authentication. Please migrate to the new license file based authentication.`);
1276
+ authentication = {
1277
+ type: "credentials",
1278
+ username: user,
1279
+ password
1280
+ };
1281
+ }
1282
+ if (authentication === undefined) {
1283
+ throw new Error(`Expected authentication to be available`);
1284
+ }
1285
+ if (typeof directory === "string") {
1286
+ const workspace = detectWorkspace(process.cwd());
1287
+ const folder = detectPackage(workspace, directory);
1288
+ const manifest = readPackageCreatorManifest(folder);
1289
+ const parsedName = parseCreatorPackageName(manifest);
1290
+ if (domain === undefined) {
1291
+ domain = parsedName.domain;
1292
+ }
1293
+ if (subdomain === undefined) {
1294
+ subdomain = parsedName.subdomain;
1295
+ }
1296
+ }
1297
+ if (domain === undefined || subdomain === undefined) {
1298
+ throw new Error(`Expected either domain and subdomain to be provided through options or to be executed for a specific package directory`);
1299
+ }
1300
+ const session = await startSession({
1301
+ url: service,
1302
+ address,
1303
+ domain,
1304
+ subDomain: subdomain,
1305
+ authentication
1306
+ });
1307
+ await closeSession(session);
1308
+ console.log(`Asset service session successfully started and closed`);
1309
+ });
1310
+ yargsInstance.command({
1311
+ command: "generateIndex [directory]",
1312
+ builder: (argv)=>argv.option("ignore", {
1313
+ type: "array",
1314
+ default: [],
1315
+ description: "Files to ignore while generating index"
1316
+ }).option("strictOptional", {
1317
+ type: "boolean",
1318
+ default: false,
1319
+ description: "Marks non optional parameter object properties as required"
1320
+ }),
1321
+ handler: async ({ directory , ignore , strictOptional })=>{
1322
+ const workspace = detectWorkspace(process.cwd());
1323
+ await preCommandCheck(workspace);
1324
+ const { generateIndex } = await import('./generateIndex-074f4aa1.mjs');
1325
+ const location = detectPackage(workspace, directory);
1326
+ generateIndex({
1327
+ location,
1328
+ ignore,
1329
+ strictOptional
1330
+ });
1331
+ },
1332
+ describe: "Generates an index file for a package based on typescript types"
1333
+ });
1334
+ yargsInstance.command({
1335
+ command: "generateParameterType [directory] [name]",
1336
+ handler: async ({ directory , name })=>{
1337
+ const workspace = detectWorkspace(process.cwd());
1338
+ await preCommandCheck(workspace);
1339
+ const { generateParameterType } = await import('./generateParameterType-4c9e95a5.mjs');
1340
+ const location = detectPackage(workspace, directory);
1341
+ generateParameterType({
1342
+ location,
1343
+ name
1344
+ });
1345
+ },
1346
+ describe: "Generates a parameter type for an interactor or evaluator"
1347
+ });
1348
+ yargsInstance.command({
1349
+ command: "postinstall",
1350
+ builder: (argv)=>argv,
1351
+ handler: async ()=>{
1352
+ const { executePostInstall } = await import('./postinstall-c38d9b55.mjs');
1353
+ executePostInstall(detectWorkspace(process.cwd()));
1354
+ },
1355
+ describe: "Runs postinstall tasks"
1356
+ });
1357
+ yargsInstance.command({
1358
+ command: "publishNpm [directory]",
1359
+ builder: (argv)=>argv.options({
1360
+ newVersion: {
1361
+ type: "string",
1362
+ description: "Name of the new version",
1363
+ default: process.env.VERSION,
1364
+ required: true
1365
+ },
1366
+ dryRun: {
1367
+ type: "boolean"
1368
+ }
1369
+ }),
1370
+ handler: async ({ directory , newVersion , dryRun })=>{
1371
+ const workspace = detectWorkspace(process.cwd());
1372
+ const { publishToNpm } = await import('./publishNpm-8ec1b871.mjs');
1373
+ await publishToNpm({
1374
+ workspace,
1375
+ location: detectPackage(workspace, directory),
1376
+ version: newVersion,
1377
+ dryRun
1378
+ }).catch(captureError);
1379
+ },
1380
+ describe: "Publishes the package to npm"
1381
+ });
1382
+ yargsInstance.demandCommand().pkgConf("packager").showHelpOnFail(false).version(pjson.version).argv;
1383
+
1384
+ var cli = /*#__PURE__*/Object.freeze({
1385
+ __proto__: null
1386
+ });
1387
+
1388
+ export { INDEX_FILE as I, PACKAGE_FILE as P, readPackageAnimationList as a, readPackageCreatorIndex as b, readWorkspaceNpmManifest as c, getPackageReleasesDirectory as d, getExistingPackages as e, closeSession as f, getWorkspaceOutputPath as g, getPackageTypescriptFiles as h, isErrorENOENT as i, writePackageCreatorIndex as j, getCreatorIndexParameterPrimaryJSType as k, getWorkspaceLibPath as l, readNpmManifest as m, stripUtf8Bom as n, readPackageNpmManifest as o, parseCreatorPackageName as p, writePackageNpmManifest as q, readPackageCreatorManifest as r, startSession as s, iterateWorkspacePackages as t, uploadPackage as u, cli as v, writePackageCreatorManifest as w };
1389
+ //# sourceMappingURL=cli-381989cc.mjs.map