@storm-software/cloudflare-tools 0.55.44 → 0.55.45
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/README.md +1 -1
- package/dist/{chunk-75BWJSFW.mjs → chunk-2P5TGL2R.mjs} +10 -2
- package/dist/{chunk-CAC2PNUJ.js → chunk-4P4IIN6G.js} +17 -17
- package/dist/{chunk-LY3NUGLI.mjs → chunk-BDADKXEW.mjs} +11 -11
- package/dist/{chunk-ISO7UOVA.js → chunk-DZPVDJXE.js} +143 -143
- package/dist/{chunk-TDNPRL4Y.js → chunk-P7YORNEH.js} +1 -1
- package/dist/{chunk-TTDB2VCO.mjs → chunk-PHH7TIFK.mjs} +1 -1
- package/dist/{chunk-STUGKRS5.mjs → chunk-QGY5FTZR.mjs} +6 -6
- package/dist/{chunk-HWJBDRIG.js → chunk-QQQD2SIN.js} +9 -1
- package/dist/{chunk-AXF5C7QR.js → chunk-S5NUDAJR.js} +3 -3
- package/dist/{chunk-MIKH2FI5.mjs → chunk-WGEGIRE6.mjs} +1 -1
- package/dist/{chunk-R2SQ6ZOK.mjs → chunk-YBYGEO5L.mjs} +9 -1
- package/dist/{chunk-R32N7B24.js → chunk-Z5XL2UL5.js} +50 -42
- package/dist/{chunk-JUJW77CM.js → chunk-ZGZLK5XZ.js} +12 -12
- package/dist/{chunk-FVMS7RMT.mjs → chunk-ZTEXBI4T.mjs} +5 -5
- package/dist/executors.js +5 -5
- package/dist/executors.mjs +5 -5
- package/dist/generators.js +5 -5
- package/dist/generators.mjs +4 -4
- package/dist/index.js +12 -12
- package/dist/index.mjs +16 -16
- package/dist/src/executors/cloudflare-publish/executor.js +3 -3
- package/dist/src/executors/cloudflare-publish/executor.mjs +3 -3
- package/dist/src/executors/r2-upload-publish/executor.js +5 -5
- package/dist/src/executors/r2-upload-publish/executor.mjs +4 -4
- package/dist/src/executors/serve/executor.d.mts +20 -2
- package/dist/src/executors/serve/executor.d.ts +20 -2
- package/dist/src/executors/serve/executor.js +4 -4
- package/dist/src/executors/serve/executor.mjs +3 -3
- package/dist/src/generators/init/generator.js +2 -2
- package/dist/src/generators/init/generator.mjs +1 -1
- package/dist/src/generators/worker/generator.js +5 -5
- package/dist/src/generators/worker/generator.mjs +4 -4
- package/dist/src/utils/index.js +3 -3
- package/dist/src/utils/index.mjs +2 -2
- package/dist/src/utils/r2-bucket-helpers.js +3 -3
- package/dist/src/utils/r2-bucket-helpers.mjs +2 -2
- package/package.json +1 -1
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
var _chunkVTHBMY4Bjs = require('./chunk-VTHBMY4B.js');
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
var
|
|
7
|
+
var _chunkZ5XL2UL5js = require('./chunk-Z5XL2UL5.js');
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
|
|
@@ -20,7 +20,7 @@ var _chunkR32N7B24js = require('./chunk-R32N7B24.js');
|
|
|
20
20
|
|
|
21
21
|
|
|
22
22
|
|
|
23
|
-
var
|
|
23
|
+
var _chunkQQQD2SINjs = require('./chunk-QQQD2SIN.js');
|
|
24
24
|
|
|
25
25
|
|
|
26
26
|
|
|
@@ -53,7 +53,7 @@ var applyWorkspaceBaseTokens = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void
|
|
|
53
53
|
}
|
|
54
54
|
}
|
|
55
55
|
if (result.includes("{workspaceRoot}")) {
|
|
56
|
-
result = result.replaceAll("{workspaceRoot}", _nullishCoalesce(_nullishCoalesce(tokenParams.workspaceRoot, () => ( _optionalChain([tokenParams, 'access', _3 => _3.config, 'optionalAccess', _4 => _4.workspaceRoot]))), () => (
|
|
56
|
+
result = result.replaceAll("{workspaceRoot}", _nullishCoalesce(_nullishCoalesce(tokenParams.workspaceRoot, () => ( _optionalChain([tokenParams, 'access', _3 => _3.config, 'optionalAccess', _4 => _4.workspaceRoot]))), () => ( _chunkQQQD2SINjs.findWorkspaceRoot.call(void 0, ))));
|
|
57
57
|
}
|
|
58
58
|
return result;
|
|
59
59
|
}, "applyWorkspaceBaseTokens");
|
|
@@ -101,35 +101,35 @@ var run = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (config, command,
|
|
|
101
101
|
// ../workspace-tools/src/base/base-executor.ts
|
|
102
102
|
var _defu = require('defu'); var _defu2 = _interopRequireDefault(_defu);
|
|
103
103
|
var withRunExecutor = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (name, executorFn, executorOptions = {}) => async (_options, context2) => {
|
|
104
|
-
const stopwatch =
|
|
104
|
+
const stopwatch = _chunkQQQD2SINjs.getStopwatch.call(void 0, name);
|
|
105
105
|
let options = _options;
|
|
106
106
|
let config = {};
|
|
107
107
|
try {
|
|
108
108
|
if (!_optionalChain([context2, 'access', _5 => _5.projectsConfigurations, 'optionalAccess', _6 => _6.projects]) || !context2.projectName || !context2.projectsConfigurations.projects[context2.projectName]) {
|
|
109
109
|
throw new Error("The Build process failed because the context is not valid. Please run this command from a workspace.");
|
|
110
110
|
}
|
|
111
|
-
const workspaceRoot3 =
|
|
111
|
+
const workspaceRoot3 = _chunkQQQD2SINjs.findWorkspaceRoot.call(void 0, );
|
|
112
112
|
const projectRoot = context2.projectsConfigurations.projects[context2.projectName].root || workspaceRoot3;
|
|
113
113
|
const sourceRoot = context2.projectsConfigurations.projects[context2.projectName].sourceRoot || projectRoot || workspaceRoot3;
|
|
114
114
|
const projectName = context2.projectName;
|
|
115
115
|
config.workspaceRoot = workspaceRoot3;
|
|
116
|
-
|
|
116
|
+
_chunkQQQD2SINjs.writeInfo.call(void 0, `\u26A1 Running the ${name} executor for ${projectName} `, config);
|
|
117
117
|
if (!executorOptions.skipReadingConfig) {
|
|
118
|
-
|
|
118
|
+
_chunkQQQD2SINjs.writeTrace.call(void 0, `Loading the Storm Config from environment variables and storm.config.js file...
|
|
119
119
|
- workspaceRoot: ${workspaceRoot3}
|
|
120
120
|
- projectRoot: ${projectRoot}
|
|
121
121
|
- sourceRoot: ${sourceRoot}
|
|
122
122
|
- projectName: ${projectName}
|
|
123
123
|
`, config);
|
|
124
|
-
config = await
|
|
124
|
+
config = await _chunkZ5XL2UL5js.getConfig.call(void 0, workspaceRoot3);
|
|
125
125
|
}
|
|
126
126
|
if (_optionalChain([executorOptions, 'optionalAccess', _7 => _7.hooks, 'optionalAccess', _8 => _8.applyDefaultOptions])) {
|
|
127
|
-
|
|
127
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, "Running the applyDefaultOptions hook...", config);
|
|
128
128
|
options = await Promise.resolve(executorOptions.hooks.applyDefaultOptions(options, config));
|
|
129
|
-
|
|
129
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, "Completed the applyDefaultOptions hook", config);
|
|
130
130
|
}
|
|
131
|
-
|
|
132
|
-
${
|
|
131
|
+
_chunkQQQD2SINjs.writeTrace.call(void 0, `Executor schema options \u2699\uFE0F
|
|
132
|
+
${_chunkQQQD2SINjs.formatLogMessage.call(void 0, options)}
|
|
133
133
|
`, config);
|
|
134
134
|
const tokenized = await applyWorkspaceTokens(options, _defu.defu.call(void 0, {
|
|
135
135
|
workspaceRoot: workspaceRoot3,
|
|
@@ -138,13 +138,13 @@ ${_chunkHWJBDRIGjs.formatLogMessage.call(void 0, options)}
|
|
|
138
138
|
projectName,
|
|
139
139
|
config
|
|
140
140
|
}, config, context2.projectsConfigurations.projects[context2.projectName]), applyWorkspaceProjectTokens);
|
|
141
|
-
|
|
142
|
-
${
|
|
141
|
+
_chunkQQQD2SINjs.writeTrace.call(void 0, `Executor schema tokenized options \u2699\uFE0F
|
|
142
|
+
${_chunkQQQD2SINjs.formatLogMessage.call(void 0, tokenized)}
|
|
143
143
|
`, config);
|
|
144
144
|
if (_optionalChain([executorOptions, 'optionalAccess', _9 => _9.hooks, 'optionalAccess', _10 => _10.preProcess])) {
|
|
145
|
-
|
|
145
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, "Running the preProcess hook...", config);
|
|
146
146
|
await Promise.resolve(executorOptions.hooks.preProcess(tokenized, config));
|
|
147
|
-
|
|
147
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, "Completed the preProcess hook", config);
|
|
148
148
|
}
|
|
149
149
|
const ret = executorFn(tokenized, context2, config);
|
|
150
150
|
if (_isFunction(_optionalChain([ret, 'optionalAccess', _11 => _11.next]))) {
|
|
@@ -154,26 +154,26 @@ ${_chunkHWJBDRIGjs.formatLogMessage.call(void 0, tokenized)}
|
|
|
154
154
|
}
|
|
155
155
|
const result = await Promise.resolve(ret);
|
|
156
156
|
if (result && (!result.success || result.error && _optionalChain([result, 'optionalAccess', _12 => _12.error, 'optionalAccess', _13 => _13.message]) && typeof _optionalChain([result, 'optionalAccess', _14 => _14.error, 'optionalAccess', _15 => _15.message]) === "string" && _optionalChain([result, 'optionalAccess', _16 => _16.error, 'optionalAccess', _17 => _17.name]) && typeof _optionalChain([result, 'optionalAccess', _18 => _18.error, 'optionalAccess', _19 => _19.name]) === "string")) {
|
|
157
|
-
|
|
158
|
-
${
|
|
157
|
+
_chunkQQQD2SINjs.writeTrace.call(void 0, `Failure determined by the ${name} executor
|
|
158
|
+
${_chunkQQQD2SINjs.formatLogMessage.call(void 0, result)}`, config);
|
|
159
159
|
console.error(result);
|
|
160
160
|
throw new Error(`The ${name} executor failed to run`, {
|
|
161
161
|
cause: _optionalChain([result, 'optionalAccess', _20 => _20.error])
|
|
162
162
|
});
|
|
163
163
|
}
|
|
164
164
|
if (_optionalChain([executorOptions, 'optionalAccess', _21 => _21.hooks, 'optionalAccess', _22 => _22.postProcess])) {
|
|
165
|
-
|
|
165
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, "Running the postProcess hook...", config);
|
|
166
166
|
await Promise.resolve(executorOptions.hooks.postProcess(config));
|
|
167
|
-
|
|
167
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, "Completed the postProcess hook", config);
|
|
168
168
|
}
|
|
169
|
-
|
|
169
|
+
_chunkQQQD2SINjs.writeSuccess.call(void 0, `Completed running the ${name} task executor!
|
|
170
170
|
`, config);
|
|
171
171
|
return {
|
|
172
172
|
success: true
|
|
173
173
|
};
|
|
174
174
|
} catch (error) {
|
|
175
|
-
|
|
176
|
-
|
|
175
|
+
_chunkQQQD2SINjs.writeFatal.call(void 0, "A fatal error occurred while running the executor - the process was forced to terminate", config);
|
|
176
|
+
_chunkQQQD2SINjs.writeError.call(void 0, `An exception was thrown in the executor's process
|
|
177
177
|
- Details: ${error.message}
|
|
178
178
|
- Stacktrace: ${error.stack}`, config);
|
|
179
179
|
return {
|
|
@@ -476,8 +476,8 @@ var copyAssets = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async (con
|
|
|
476
476
|
output: "src/"
|
|
477
477
|
});
|
|
478
478
|
}
|
|
479
|
-
|
|
480
|
-
${pendingAssets.map((pendingAsset) => typeof pendingAsset === "string" ? ` - ${pendingAsset} -> ${outputPath}` : ` - ${pendingAsset.input}/${pendingAsset.glob} -> ${
|
|
479
|
+
_chunkQQQD2SINjs.writeTrace.call(void 0, `\u{1F4DD} Copying the following assets to the output directory:
|
|
480
|
+
${pendingAssets.map((pendingAsset) => typeof pendingAsset === "string" ? ` - ${pendingAsset} -> ${outputPath}` : ` - ${pendingAsset.input}/${pendingAsset.glob} -> ${_chunkQQQD2SINjs.joinPaths.call(void 0, outputPath, pendingAsset.output)}`).join("\n")}`, config);
|
|
481
481
|
const assetHandler = new (0, _copyassetshandler.CopyAssetsHandler)({
|
|
482
482
|
projectDir: projectRoot,
|
|
483
483
|
rootDir: config.workspaceRoot,
|
|
@@ -486,12 +486,12 @@ ${pendingAssets.map((pendingAsset) => typeof pendingAsset === "string" ? ` - ${p
|
|
|
486
486
|
});
|
|
487
487
|
await assetHandler.processAllAssetsOnce();
|
|
488
488
|
if (includeSrc === true) {
|
|
489
|
-
|
|
489
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, `\u{1F4DD} Adding banner and writing source files: ${_chunkQQQD2SINjs.joinPaths.call(void 0, outputPath, "src")}`, config);
|
|
490
490
|
const files = await _glob.glob.call(void 0, [
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
491
|
+
_chunkQQQD2SINjs.joinPaths.call(void 0, config.workspaceRoot, outputPath, "src/**/*.ts"),
|
|
492
|
+
_chunkQQQD2SINjs.joinPaths.call(void 0, config.workspaceRoot, outputPath, "src/**/*.tsx"),
|
|
493
|
+
_chunkQQQD2SINjs.joinPaths.call(void 0, config.workspaceRoot, outputPath, "src/**/*.js"),
|
|
494
|
+
_chunkQQQD2SINjs.joinPaths.call(void 0, config.workspaceRoot, outputPath, "src/**/*.jsx")
|
|
495
495
|
]);
|
|
496
496
|
await Promise.allSettled(files.map(async (file) => _promises.writeFile.call(void 0, file, `${banner && typeof banner === "string" ? banner.startsWith("//") ? banner : `// ${banner}` : ""}
|
|
497
497
|
|
|
@@ -514,7 +514,7 @@ var addPackageDependencies = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0
|
|
|
514
514
|
for (const project of projectDependencies.dependencies.filter((dep) => dep.node.type === "lib" && _optionalChain([dep, 'access', _29 => _29.node, 'access', _30 => _30.data, 'optionalAccess', _31 => _31.root]) !== projectRoot && _optionalChain([dep, 'access', _32 => _32.node, 'access', _33 => _33.data, 'optionalAccess', _34 => _34.root]) !== workspaceRoot3)) {
|
|
515
515
|
const projectNode = project.node;
|
|
516
516
|
if (projectNode.data.root) {
|
|
517
|
-
const projectPackageJsonPath =
|
|
517
|
+
const projectPackageJsonPath = _chunkQQQD2SINjs.joinPaths.call(void 0, workspaceRoot3, projectNode.data.root, "package.json");
|
|
518
518
|
if (_fs.existsSync.call(void 0, projectPackageJsonPath)) {
|
|
519
519
|
const projectPackageJsonContent = await _promises.readFile.call(void 0, projectPackageJsonPath, "utf8");
|
|
520
520
|
const projectPackageJson = JSON.parse(projectPackageJsonContent);
|
|
@@ -525,8 +525,8 @@ var addPackageDependencies = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0
|
|
|
525
525
|
}
|
|
526
526
|
}
|
|
527
527
|
if (localPackages.length > 0) {
|
|
528
|
-
|
|
529
|
-
const projectJsonFile = await _promises.readFile.call(void 0,
|
|
528
|
+
_chunkQQQD2SINjs.writeTrace.call(void 0, `\u{1F4E6} Adding local packages to package.json: ${localPackages.map((p) => p.name).join(", ")}`);
|
|
529
|
+
const projectJsonFile = await _promises.readFile.call(void 0, _chunkQQQD2SINjs.joinPaths.call(void 0, projectRoot, "project.json"), "utf8");
|
|
530
530
|
const projectJson = JSON.parse(projectJsonFile);
|
|
531
531
|
const projectName2 = projectJson.name;
|
|
532
532
|
const projectConfigurations = _projectgraph.readProjectsConfigurationFromProjectGraph.call(void 0, projectGraph);
|
|
@@ -535,7 +535,7 @@ var addPackageDependencies = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0
|
|
|
535
535
|
}
|
|
536
536
|
const implicitDependencies = _optionalChain([projectConfigurations, 'access', _37 => _37.projects, 'optionalAccess', _38 => _38[projectName2], 'access', _39 => _39.implicitDependencies, 'optionalAccess', _40 => _40.reduce, 'call', _41 => _41((ret, dep) => {
|
|
537
537
|
if (_optionalChain([projectConfigurations, 'access', _42 => _42.projects, 'optionalAccess', _43 => _43[dep]])) {
|
|
538
|
-
const depPackageJsonPath =
|
|
538
|
+
const depPackageJsonPath = _chunkQQQD2SINjs.joinPaths.call(void 0, workspaceRoot3, projectConfigurations.projects[dep].root, "package.json");
|
|
539
539
|
if (_fs.existsSync.call(void 0, depPackageJsonPath)) {
|
|
540
540
|
const depPackageJsonContent = _fs.readFileSync.call(void 0, depPackageJsonPath, "utf8");
|
|
541
541
|
const depPackageJson = JSON.parse(depPackageJsonContent);
|
|
@@ -559,13 +559,13 @@ var addPackageDependencies = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0
|
|
|
559
559
|
return ret;
|
|
560
560
|
}, _nullishCoalesce(packageJson.devDependencies, () => ( {})));
|
|
561
561
|
} else {
|
|
562
|
-
|
|
562
|
+
_chunkQQQD2SINjs.writeTrace.call(void 0, "\u{1F4E6} No local packages dependencies to add to package.json");
|
|
563
563
|
}
|
|
564
564
|
return packageJson;
|
|
565
565
|
}, "addPackageDependencies");
|
|
566
566
|
var addWorkspacePackageJsonFields = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async (config, projectRoot, sourceRoot, projectName, includeSrc = false, packageJson) => {
|
|
567
|
-
const workspaceRoot3 = config.workspaceRoot ? config.workspaceRoot :
|
|
568
|
-
const workspacePackageJsonContent = await _promises.readFile.call(void 0,
|
|
567
|
+
const workspaceRoot3 = config.workspaceRoot ? config.workspaceRoot : _chunkQQQD2SINjs.findWorkspaceRoot.call(void 0, );
|
|
568
|
+
const workspacePackageJsonContent = await _promises.readFile.call(void 0, _chunkQQQD2SINjs.joinPaths.call(void 0, workspaceRoot3, "package.json"), "utf8");
|
|
569
569
|
const workspacePackageJson = JSON.parse(workspacePackageJsonContent);
|
|
570
570
|
packageJson.type ??= "module";
|
|
571
571
|
packageJson.sideEffects ??= false;
|
|
@@ -574,7 +574,7 @@ var addWorkspacePackageJsonFields = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call
|
|
|
574
574
|
if (distSrc.startsWith("/")) {
|
|
575
575
|
distSrc = distSrc.substring(1);
|
|
576
576
|
}
|
|
577
|
-
packageJson.source ??= `${
|
|
577
|
+
packageJson.source ??= `${_chunkQQQD2SINjs.joinPaths.call(void 0, distSrc, "index.ts").replaceAll("\\", "/")}`;
|
|
578
578
|
}
|
|
579
579
|
packageJson.files ??= [
|
|
580
580
|
"dist/**/*"
|
|
@@ -605,14 +605,14 @@ var addWorkspacePackageJsonFields = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call
|
|
|
605
605
|
];
|
|
606
606
|
}
|
|
607
607
|
packageJson.repository ??= workspacePackageJson.repository;
|
|
608
|
-
packageJson.repository.directory ??= projectRoot ? projectRoot :
|
|
608
|
+
packageJson.repository.directory ??= projectRoot ? projectRoot : _chunkQQQD2SINjs.joinPaths.call(void 0, "packages", projectName);
|
|
609
609
|
return packageJson;
|
|
610
610
|
}, "addWorkspacePackageJsonFields");
|
|
611
611
|
|
|
612
612
|
// ../build-tools/src/utilities/get-entry-points.ts
|
|
613
613
|
|
|
614
614
|
var getEntryPoints = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async (config, projectRoot, sourceRoot, entry, emitOnAll = false) => {
|
|
615
|
-
const workspaceRoot3 = config.workspaceRoot ||
|
|
615
|
+
const workspaceRoot3 = config.workspaceRoot || _chunkQQQD2SINjs.findWorkspaceRoot.call(void 0, );
|
|
616
616
|
const entryPoints = [];
|
|
617
617
|
if (entry) {
|
|
618
618
|
if (typeof entry === "string") {
|
|
@@ -624,7 +624,7 @@ var getEntryPoints = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
624
624
|
}
|
|
625
625
|
}
|
|
626
626
|
if (emitOnAll) {
|
|
627
|
-
entryPoints.push(
|
|
627
|
+
entryPoints.push(_chunkQQQD2SINjs.joinPaths.call(void 0, workspaceRoot3, sourceRoot || projectRoot, "**/*.{ts,tsx}"));
|
|
628
628
|
}
|
|
629
629
|
const results = await Promise.all(entryPoints.map(async (entryPoint) => {
|
|
630
630
|
const paths = [];
|
|
@@ -636,9 +636,9 @@ var getEntryPoints = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
636
636
|
]
|
|
637
637
|
});
|
|
638
638
|
paths.push(...files.reduce((ret, filePath) => {
|
|
639
|
-
const result =
|
|
639
|
+
const result = _chunkQQQD2SINjs.correctPaths.call(void 0, _chunkQQQD2SINjs.joinPaths.call(void 0, filePath.path, filePath.name).replaceAll(_chunkQQQD2SINjs.correctPaths.call(void 0, workspaceRoot3), "").replaceAll(_chunkQQQD2SINjs.correctPaths.call(void 0, projectRoot), ""));
|
|
640
640
|
if (result) {
|
|
641
|
-
|
|
641
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, `Trying to add entry point ${result} at "${_chunkQQQD2SINjs.joinPaths.call(void 0, filePath.path, filePath.name)}"`, config);
|
|
642
642
|
if (!paths.includes(result)) {
|
|
643
643
|
paths.push(result);
|
|
644
644
|
}
|
|
@@ -646,10 +646,9 @@ var getEntryPoints = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
646
646
|
return ret;
|
|
647
647
|
}, []));
|
|
648
648
|
} else {
|
|
649
|
-
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
paths.push(result);
|
|
649
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, `Trying to add entry point ${entryPoint}"`, config);
|
|
650
|
+
if (!paths.includes(entryPoint)) {
|
|
651
|
+
paths.push(entryPoint);
|
|
653
652
|
}
|
|
654
653
|
}
|
|
655
654
|
return paths;
|
|
@@ -962,7 +961,7 @@ var onErrorPlugin = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (option
|
|
|
962
961
|
setup(build4) {
|
|
963
962
|
build4.onEnd((result) => {
|
|
964
963
|
if (result.errors.length > 0 && process.env.WATCH !== "true") {
|
|
965
|
-
|
|
964
|
+
_chunkQQQD2SINjs.writeError.call(void 0, `The following errors occurred during the build:
|
|
966
965
|
${result.errors.map((error) => error.text).join("\n")}
|
|
967
966
|
|
|
968
967
|
`, resolvedOptions.config);
|
|
@@ -997,7 +996,7 @@ _chunkJ5SB6L2Ljs.__name.call(void 0, resolvePathsConfig, "resolvePathsConfig");
|
|
|
997
996
|
var resolvePathsPlugin = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (options, resolvedOptions) => ({
|
|
998
997
|
name: "storm:resolve-paths",
|
|
999
998
|
setup(build4) {
|
|
1000
|
-
const parentTsConfig = build4.initialOptions.tsconfig ? _chunkJ5SB6L2Ljs.__require.call(void 0,
|
|
999
|
+
const parentTsConfig = build4.initialOptions.tsconfig ? _chunkJ5SB6L2Ljs.__require.call(void 0, _chunkQQQD2SINjs.joinPaths.call(void 0, resolvedOptions.config.workspaceRoot, build4.initialOptions.tsconfig)) : _chunkJ5SB6L2Ljs.__require.call(void 0, _chunkQQQD2SINjs.joinPaths.call(void 0, resolvedOptions.config.workspaceRoot, "tsconfig.json"));
|
|
1001
1000
|
const resolvedTsPaths = resolvePathsConfig(parentTsConfig, options.projectRoot);
|
|
1002
1001
|
const packagesRegex = new RegExp(`^(${Object.keys(resolvedTsPaths).join("|")})$`);
|
|
1003
1002
|
build4.onResolve({
|
|
@@ -1021,7 +1020,7 @@ var _apiextractor = require('@microsoft/api-extractor');
|
|
|
1021
1020
|
|
|
1022
1021
|
|
|
1023
1022
|
function bundleTypeDefinitions(filename, outfile, externals, options) {
|
|
1024
|
-
const { dependencies, peerDependencies, devDependencies } = _chunkJ5SB6L2Ljs.__require.call(void 0,
|
|
1023
|
+
const { dependencies, peerDependencies, devDependencies } = _chunkJ5SB6L2Ljs.__require.call(void 0, _chunkQQQD2SINjs.joinPaths.call(void 0, options.projectRoot, "package.json"));
|
|
1025
1024
|
const dependenciesKeys = Object.keys(_nullishCoalesce(dependencies, () => ( {}))).flatMap((p) => [
|
|
1026
1025
|
p,
|
|
1027
1026
|
getTypeDependencyPackageName(p)
|
|
@@ -1057,13 +1056,13 @@ function bundleTypeDefinitions(filename, outfile, externals, options) {
|
|
|
1057
1056
|
},
|
|
1058
1057
|
dtsRollup: {
|
|
1059
1058
|
enabled: true,
|
|
1060
|
-
untrimmedFilePath:
|
|
1059
|
+
untrimmedFilePath: _chunkQQQD2SINjs.joinPaths.call(void 0, options.outdir, `${outfile}.d.ts`)
|
|
1061
1060
|
},
|
|
1062
1061
|
tsdocMetadata: {
|
|
1063
1062
|
enabled: false
|
|
1064
1063
|
}
|
|
1065
1064
|
},
|
|
1066
|
-
packageJsonFullPath:
|
|
1065
|
+
packageJsonFullPath: _chunkQQQD2SINjs.joinPaths.call(void 0, options.projectRoot, "package.json"),
|
|
1067
1066
|
configObjectFullPath: void 0
|
|
1068
1067
|
});
|
|
1069
1068
|
const extractorResult = _apiextractor.Extractor.invoke(extractorConfig, {
|
|
@@ -1071,7 +1070,7 @@ function bundleTypeDefinitions(filename, outfile, externals, options) {
|
|
|
1071
1070
|
localBuild: true
|
|
1072
1071
|
});
|
|
1073
1072
|
if (extractorResult.succeeded === false) {
|
|
1074
|
-
|
|
1073
|
+
_chunkQQQD2SINjs.writeError.call(void 0, `API Extractor completed with ${extractorResult.errorCount} ${extractorResult.errorCount === 1 ? "error" : "errors"}`);
|
|
1075
1074
|
throw new Error("API Extractor completed with errors");
|
|
1076
1075
|
}
|
|
1077
1076
|
}
|
|
@@ -1090,12 +1089,12 @@ var tscPlugin = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (options, r
|
|
|
1090
1089
|
const sourceRoot = resolvedOptions.sourceRoot.replaceAll(resolvedOptions.projectRoot, "");
|
|
1091
1090
|
const typeOutDir = resolvedOptions.outdir;
|
|
1092
1091
|
const entryPoint = resolvedOptions.entryPoints[0].replace(sourceRoot, "").replace(/\.ts$/, "");
|
|
1093
|
-
const bundlePath =
|
|
1092
|
+
const bundlePath = _chunkQQQD2SINjs.joinPaths.call(void 0, resolvedOptions.outdir, entryPoint);
|
|
1094
1093
|
let dtsPath;
|
|
1095
|
-
if (_fs.existsSync.call(void 0,
|
|
1096
|
-
dtsPath =
|
|
1097
|
-
} else if (_fs.existsSync.call(void 0,
|
|
1098
|
-
dtsPath =
|
|
1094
|
+
if (_fs.existsSync.call(void 0, _chunkQQQD2SINjs.joinPaths.call(void 0, resolvedOptions.config.workspaceRoot, typeOutDir, `${entryPoint}.d.ts`))) {
|
|
1095
|
+
dtsPath = _chunkQQQD2SINjs.joinPaths.call(void 0, resolvedOptions.config.workspaceRoot, typeOutDir, `${entryPoint}.d.ts`);
|
|
1096
|
+
} else if (_fs.existsSync.call(void 0, _chunkQQQD2SINjs.joinPaths.call(void 0, resolvedOptions.config.workspaceRoot, typeOutDir, `${entryPoint.replace(/^src\//, "")}.d.ts`))) {
|
|
1097
|
+
dtsPath = _chunkQQQD2SINjs.joinPaths.call(void 0, resolvedOptions.config.workspaceRoot, typeOutDir, `${entryPoint.replace(/^src\//, "")}.d.ts`);
|
|
1099
1098
|
}
|
|
1100
1099
|
const ext = resolvedOptions.format === "esm" ? "d.mts" : "d.ts";
|
|
1101
1100
|
if (process.env.WATCH !== "true" && process.env.DEV !== "true") {
|
|
@@ -1259,8 +1258,8 @@ var depsCheckPlugin = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (bund
|
|
|
1259
1258
|
const filteredMissingDeps = missingDependencies.filter((dep) => {
|
|
1260
1259
|
return !missingIgnore.some((pattern) => dep.match(pattern)) && !peerDependencies.includes(dep);
|
|
1261
1260
|
});
|
|
1262
|
-
|
|
1263
|
-
|
|
1261
|
+
_chunkQQQD2SINjs.writeWarning.call(void 0, `Unused Dependencies: ${JSON.stringify(filteredUnusedDeps)}`);
|
|
1262
|
+
_chunkQQQD2SINjs.writeError.call(void 0, `Missing Dependencies: ${JSON.stringify(filteredMissingDeps)}`);
|
|
1264
1263
|
if (filteredMissingDeps.length > 0) {
|
|
1265
1264
|
throw new Error(`Missing dependencies detected - please install them:
|
|
1266
1265
|
${JSON.stringify(filteredMissingDeps)}
|
|
@@ -1354,13 +1353,13 @@ var resolveOptions = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
1354
1353
|
if (!workspaceRoot3) {
|
|
1355
1354
|
throw new Error("Cannot find Nx workspace root");
|
|
1356
1355
|
}
|
|
1357
|
-
const config = await
|
|
1358
|
-
|
|
1359
|
-
const stopwatch =
|
|
1356
|
+
const config = await _chunkZ5XL2UL5js.getConfig.call(void 0, workspaceRoot3.dir);
|
|
1357
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, " \u2699\uFE0F Resolving build options", config);
|
|
1358
|
+
const stopwatch = _chunkQQQD2SINjs.getStopwatch.call(void 0, "Build options resolution");
|
|
1360
1359
|
const projectGraph = await _devkit.createProjectGraphAsync.call(void 0, {
|
|
1361
1360
|
exitOnError: true
|
|
1362
1361
|
});
|
|
1363
|
-
const projectJsonPath =
|
|
1362
|
+
const projectJsonPath = _chunkQQQD2SINjs.joinPaths.call(void 0, workspaceRoot3.dir, projectRoot, "project.json");
|
|
1364
1363
|
if (!_fs.existsSync.call(void 0, projectJsonPath)) {
|
|
1365
1364
|
throw new Error("Cannot find project.json configuration");
|
|
1366
1365
|
}
|
|
@@ -1374,7 +1373,7 @@ var resolveOptions = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
1374
1373
|
const options = _defu2.default.call(void 0, userOptions, DEFAULT_BUILD_OPTIONS);
|
|
1375
1374
|
options.name ??= `${projectName}-${options.format}`;
|
|
1376
1375
|
options.target ??= DEFAULT_TARGET;
|
|
1377
|
-
const packageJsonPath =
|
|
1376
|
+
const packageJsonPath = _chunkQQQD2SINjs.joinPaths.call(void 0, workspaceRoot3.dir, options.projectRoot, "package.json");
|
|
1378
1377
|
if (!_fs.existsSync.call(void 0, packageJsonPath)) {
|
|
1379
1378
|
throw new Error("Cannot find package.json configuration");
|
|
1380
1379
|
}
|
|
@@ -1391,24 +1390,23 @@ var resolveOptions = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
1391
1390
|
"main"
|
|
1392
1391
|
],
|
|
1393
1392
|
...userOptions,
|
|
1394
|
-
tsconfig:
|
|
1393
|
+
tsconfig: _chunkQQQD2SINjs.joinPaths.call(void 0, projectRoot, userOptions.tsconfig ? userOptions.tsconfig.replace(projectRoot, "") : "tsconfig.json"),
|
|
1395
1394
|
format: options.format || "cjs",
|
|
1396
|
-
entryPoints: await getEntryPoints(config, projectRoot, projectJson.sourceRoot, _nullishCoalesce(userOptions.entry, () => ( "./src/index.ts")),
|
|
1397
|
-
outdir: userOptions.outputPath ||
|
|
1395
|
+
entryPoints: await getEntryPoints(config, projectRoot, projectJson.sourceRoot, _nullishCoalesce(userOptions.entry, () => ( "./src/index.ts")), false),
|
|
1396
|
+
outdir: userOptions.outputPath || _chunkQQQD2SINjs.joinPaths.call(void 0, "dist", projectRoot),
|
|
1398
1397
|
distDir: userOptions.distDir || "dist",
|
|
1399
1398
|
plugins: [],
|
|
1400
1399
|
name: userOptions.name || projectName,
|
|
1401
1400
|
projectConfigurations,
|
|
1402
1401
|
projectName,
|
|
1403
1402
|
projectGraph,
|
|
1404
|
-
sourceRoot: userOptions.sourceRoot || projectJson.sourceRoot ||
|
|
1403
|
+
sourceRoot: userOptions.sourceRoot || projectJson.sourceRoot || _chunkQQQD2SINjs.joinPaths.call(void 0, projectRoot, "src"),
|
|
1405
1404
|
minify: userOptions.minify || !userOptions.debug,
|
|
1406
|
-
verbose: userOptions.verbose ||
|
|
1405
|
+
verbose: userOptions.verbose || _chunkQQQD2SINjs.isVerbose.call(void 0, ) || userOptions.debug === true,
|
|
1407
1406
|
includeSrc: userOptions.includeSrc === true,
|
|
1408
1407
|
metafile: userOptions.metafile !== false,
|
|
1409
1408
|
generatePackageJson: userOptions.generatePackageJson !== false,
|
|
1410
1409
|
clean: userOptions.clean !== false,
|
|
1411
|
-
emitOnAll: userOptions.emitOnAll === true,
|
|
1412
1410
|
assets: _nullishCoalesce(userOptions.assets, () => ( [])),
|
|
1413
1411
|
injectShims: userOptions.injectShims !== true,
|
|
1414
1412
|
bundle: userOptions.bundle !== false,
|
|
@@ -1439,8 +1437,8 @@ var resolveOptions = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
1439
1437
|
}, {})
|
|
1440
1438
|
},
|
|
1441
1439
|
inject: [
|
|
1442
|
-
options.format === "cjs" && options.injectShims ?
|
|
1443
|
-
options.format === "esm" && options.injectShims && options.platform === "node" ?
|
|
1440
|
+
options.format === "cjs" && options.injectShims ? _chunkQQQD2SINjs.joinPaths.call(void 0, __dirname, "../assets/cjs_shims.js") : void 0,
|
|
1441
|
+
options.format === "esm" && options.injectShims && options.platform === "node" ? _chunkQQQD2SINjs.joinPaths.call(void 0, __dirname, "../assets/esm_shims.js") : void 0
|
|
1444
1442
|
].filter(Boolean)
|
|
1445
1443
|
};
|
|
1446
1444
|
result.plugins = _nullishCoalesce(userOptions.plugins, () => ( getDefaultBuildPlugins(userOptions, result)));
|
|
@@ -1458,14 +1456,14 @@ var resolveOptions = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
1458
1456
|
return result;
|
|
1459
1457
|
}, "resolveOptions");
|
|
1460
1458
|
async function generatePackageJson(context2) {
|
|
1461
|
-
if (context2.options.generatePackageJson !== false && _fs.existsSync.call(void 0,
|
|
1462
|
-
|
|
1463
|
-
const stopwatch =
|
|
1464
|
-
const packageJsonPath =
|
|
1459
|
+
if (context2.options.generatePackageJson !== false && _fs.existsSync.call(void 0, _chunkQQQD2SINjs.joinPaths.call(void 0, context2.options.projectRoot, "package.json"))) {
|
|
1460
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, " \u270D\uFE0F Writing package.json file", context2.options.config);
|
|
1461
|
+
const stopwatch = _chunkQQQD2SINjs.getStopwatch.call(void 0, "Write package.json file");
|
|
1462
|
+
const packageJsonPath = _chunkQQQD2SINjs.joinPaths.call(void 0, context2.options.projectRoot, "project.json");
|
|
1465
1463
|
if (!_fs.existsSync.call(void 0, packageJsonPath)) {
|
|
1466
1464
|
throw new Error("Cannot find package.json configuration");
|
|
1467
1465
|
}
|
|
1468
|
-
const packageJsonFile = await _promises2.default.readFile(
|
|
1466
|
+
const packageJsonFile = await _promises2.default.readFile(_chunkQQQD2SINjs.joinPaths.call(void 0, context2.options.config.workspaceRoot, context2.options.projectRoot, "package.json"), "utf8");
|
|
1469
1467
|
let packageJson = JSON.parse(packageJsonFile);
|
|
1470
1468
|
if (!packageJson) {
|
|
1471
1469
|
throw new Error("Cannot find package.json configuration file");
|
|
@@ -1485,7 +1483,7 @@ async function generatePackageJson(context2) {
|
|
|
1485
1483
|
if (Array.isArray(context2.options.entryPoints)) {
|
|
1486
1484
|
entryPoints = context2.options.entryPoints.map((entryPoint) => typeof entryPoint === "string" ? {
|
|
1487
1485
|
in: entryPoint,
|
|
1488
|
-
out: entryPoint
|
|
1486
|
+
out: _chunkQQQD2SINjs.correctPaths.call(void 0, entryPoint.replaceAll(_chunkQQQD2SINjs.correctPaths.call(void 0, context2.options.config.workspaceRoot), "").replaceAll(_chunkQQQD2SINjs.correctPaths.call(void 0, context2.options.projectRoot), ""))
|
|
1489
1487
|
} : entryPoint);
|
|
1490
1488
|
}
|
|
1491
1489
|
for (const entryPoint of entryPoints) {
|
|
@@ -1507,7 +1505,7 @@ async function generatePackageJson(context2) {
|
|
|
1507
1505
|
}
|
|
1508
1506
|
return ret;
|
|
1509
1507
|
}, packageJson.exports);
|
|
1510
|
-
await _devkit.writeJsonFile.call(void 0,
|
|
1508
|
+
await _devkit.writeJsonFile.call(void 0, _chunkQQQD2SINjs.joinPaths.call(void 0, context2.options.outdir, "package.json"), packageJson);
|
|
1511
1509
|
stopwatch();
|
|
1512
1510
|
}
|
|
1513
1511
|
return context2;
|
|
@@ -1533,8 +1531,8 @@ async function generateContext(getOptions) {
|
|
|
1533
1531
|
}
|
|
1534
1532
|
_chunkJ5SB6L2Ljs.__name.call(void 0, generateContext, "generateContext");
|
|
1535
1533
|
async function executeEsBuild(context2) {
|
|
1536
|
-
|
|
1537
|
-
const stopwatch =
|
|
1534
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, ` \u{1F680} Running ${context2.options.name} build`, context2.options.config);
|
|
1535
|
+
const stopwatch = _chunkQQQD2SINjs.getStopwatch.call(void 0, `${context2.options.name} build`);
|
|
1538
1536
|
if (process.env.STORM_WATCH) {
|
|
1539
1537
|
const ctx = await esbuild2.context(context2.options);
|
|
1540
1538
|
watch(ctx, context2.options);
|
|
@@ -1542,7 +1540,7 @@ async function executeEsBuild(context2) {
|
|
|
1542
1540
|
const options = {
|
|
1543
1541
|
...context2.options
|
|
1544
1542
|
};
|
|
1545
|
-
options.outdir =
|
|
1543
|
+
options.outdir = _chunkQQQD2SINjs.joinPaths.call(void 0, context2.options.outdir, context2.options.distDir);
|
|
1546
1544
|
if (!options.inject || !Array.isArray(options.inject) || options.inject.length === 0 || // eslint-disable-next-line no-constant-binary-expression, @typescript-eslint/no-explicit-any
|
|
1547
1545
|
options.inject === {}) {
|
|
1548
1546
|
delete options.inject;
|
|
@@ -1556,7 +1554,6 @@ async function executeEsBuild(context2) {
|
|
|
1556
1554
|
delete options.clean;
|
|
1557
1555
|
delete options.debug;
|
|
1558
1556
|
delete options.generatePackageJson;
|
|
1559
|
-
delete options.emitOnAll;
|
|
1560
1557
|
delete options.distDir;
|
|
1561
1558
|
delete options.includeSrc;
|
|
1562
1559
|
delete options.verbose;
|
|
@@ -1568,8 +1565,11 @@ async function executeEsBuild(context2) {
|
|
|
1568
1565
|
delete options.config;
|
|
1569
1566
|
delete options.injectShims;
|
|
1570
1567
|
delete options.external;
|
|
1571
|
-
|
|
1572
|
-
${
|
|
1568
|
+
_chunkQQQD2SINjs.writeTrace.call(void 0, `Run esbuild (${context2.options.name}) with the following options:
|
|
1569
|
+
${_chunkQQQD2SINjs.formatLogMessage.call(void 0, {
|
|
1570
|
+
...options,
|
|
1571
|
+
define: "<Hidden>"
|
|
1572
|
+
})}`, context2.options.config);
|
|
1573
1573
|
const result = await esbuild2.build(options);
|
|
1574
1574
|
await esbuild2.stop();
|
|
1575
1575
|
if (result.metafile) {
|
|
@@ -1582,8 +1582,8 @@ ${_chunkHWJBDRIGjs.formatLogMessage.call(void 0, options)}`, context2.options.co
|
|
|
1582
1582
|
_chunkJ5SB6L2Ljs.__name.call(void 0, executeEsBuild, "executeEsBuild");
|
|
1583
1583
|
async function copyBuildAssets(context2) {
|
|
1584
1584
|
if (_optionalChain([context2, 'access', _76 => _76.result, 'optionalAccess', _77 => _77.errors, 'access', _78 => _78.length]) === 0) {
|
|
1585
|
-
|
|
1586
|
-
const stopwatch =
|
|
1585
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, ` \u{1F4CB} Copying asset files to output directory: ${context2.options.outdir}`, context2.options.config);
|
|
1586
|
+
const stopwatch = _chunkQQQD2SINjs.getStopwatch.call(void 0, `${context2.options.name} asset copy`);
|
|
1587
1587
|
await copyAssets(context2.options.config, _nullishCoalesce(context2.options.assets, () => ( [])), context2.options.outdir, context2.options.projectRoot, context2.options.sourceRoot, true, false);
|
|
1588
1588
|
stopwatch();
|
|
1589
1589
|
}
|
|
@@ -1593,9 +1593,9 @@ _chunkJ5SB6L2Ljs.__name.call(void 0, copyBuildAssets, "copyBuildAssets");
|
|
|
1593
1593
|
async function reportResults(context2) {
|
|
1594
1594
|
if (_optionalChain([context2, 'access', _79 => _79.result, 'optionalAccess', _80 => _80.errors, 'access', _81 => _81.length]) === 0) {
|
|
1595
1595
|
if (context2.result.warnings.length > 0) {
|
|
1596
|
-
|
|
1596
|
+
_chunkQQQD2SINjs.writeWarning.call(void 0, ` \u{1F6A7} The following warnings occurred during the build: ${context2.result.warnings.map((warning) => warning.text).join("\n")}`, context2.options.config);
|
|
1597
1597
|
}
|
|
1598
|
-
|
|
1598
|
+
_chunkQQQD2SINjs.writeSuccess.call(void 0, ` \u{1F4E6} The ${context2.options.name} build completed successfully`, context2.options.config);
|
|
1599
1599
|
}
|
|
1600
1600
|
}
|
|
1601
1601
|
_chunkJ5SB6L2Ljs.__name.call(void 0, reportResults, "reportResults");
|
|
@@ -1631,8 +1631,8 @@ async function dependencyCheck(options) {
|
|
|
1631
1631
|
_chunkJ5SB6L2Ljs.__name.call(void 0, dependencyCheck, "dependencyCheck");
|
|
1632
1632
|
async function cleanOutputPath(context2) {
|
|
1633
1633
|
if (context2.options.clean !== false && context2.options.outdir) {
|
|
1634
|
-
|
|
1635
|
-
const stopwatch =
|
|
1634
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, ` \u{1F9F9} Cleaning ${context2.options.name} output path: ${context2.options.outdir}`, context2.options.config);
|
|
1635
|
+
const stopwatch = _chunkQQQD2SINjs.getStopwatch.call(void 0, `${context2.options.name} output clean`);
|
|
1636
1636
|
await cleanDirectories(context2.options.name, context2.options.outdir, context2.options.config);
|
|
1637
1637
|
stopwatch();
|
|
1638
1638
|
}
|
|
@@ -1640,8 +1640,8 @@ async function cleanOutputPath(context2) {
|
|
|
1640
1640
|
}
|
|
1641
1641
|
_chunkJ5SB6L2Ljs.__name.call(void 0, cleanOutputPath, "cleanOutputPath");
|
|
1642
1642
|
async function build3(options) {
|
|
1643
|
-
|
|
1644
|
-
const stopwatch =
|
|
1643
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, ` \u26A1 Executing Storm ESBuild pipeline`);
|
|
1644
|
+
const stopwatch = _chunkQQQD2SINjs.getStopwatch.call(void 0, "ESBuild pipeline");
|
|
1645
1645
|
try {
|
|
1646
1646
|
const opts = Array.isArray(options) ? options : [
|
|
1647
1647
|
options
|
|
@@ -1651,9 +1651,9 @@ async function build3(options) {
|
|
|
1651
1651
|
}
|
|
1652
1652
|
void transduce.async(opts, dependencyCheck);
|
|
1653
1653
|
await transduce.async(await createOptions(opts), pipe.async(generateContext, cleanOutputPath, generatePackageJson, executeEsBuild, copyBuildAssets, reportResults));
|
|
1654
|
-
|
|
1654
|
+
_chunkQQQD2SINjs.writeSuccess.call(void 0, " \u{1F3C1} ESBuild pipeline build completed successfully");
|
|
1655
1655
|
} catch (error) {
|
|
1656
|
-
|
|
1656
|
+
_chunkQQQD2SINjs.writeFatal.call(void 0, "Fatal errors that the build process could not recover from have occured. The build process has been terminated.");
|
|
1657
1657
|
throw error;
|
|
1658
1658
|
} finally {
|
|
1659
1659
|
stopwatch();
|
|
@@ -1681,9 +1681,9 @@ var watch = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (context2, opti
|
|
|
1681
1681
|
return context2.rebuild();
|
|
1682
1682
|
});
|
|
1683
1683
|
if (rebuildResult instanceof Error) {
|
|
1684
|
-
|
|
1684
|
+
_chunkQQQD2SINjs.writeError.call(void 0, rebuildResult.message);
|
|
1685
1685
|
}
|
|
1686
|
-
|
|
1686
|
+
_chunkQQQD2SINjs.writeTrace.call(void 0, `${Date.now() - timeBefore}ms [${_nullishCoalesce(options.name, () => ( ""))}]`);
|
|
1687
1687
|
}, 10);
|
|
1688
1688
|
changeWatcher.on("change", fastRebuild);
|
|
1689
1689
|
return void 0;
|
|
@@ -1691,7 +1691,7 @@ var watch = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (context2, opti
|
|
|
1691
1691
|
|
|
1692
1692
|
// ../workspace-tools/src/executors/esbuild/executor.ts
|
|
1693
1693
|
async function esbuildExecutorFn(options, context2, config) {
|
|
1694
|
-
|
|
1694
|
+
_chunkQQQD2SINjs.writeInfo.call(void 0, "\u{1F4E6} Running Storm ESBuild executor on the workspace", config);
|
|
1695
1695
|
if (!_optionalChain([context2, 'access', _82 => _82.projectsConfigurations, 'optionalAccess', _83 => _83.projects]) || !context2.projectName || !context2.projectsConfigurations.projects[context2.projectName] || !_optionalChain([context2, 'access', _84 => _84.projectsConfigurations, 'access', _85 => _85.projects, 'access', _86 => _86[context2.projectName], 'optionalAccess', _87 => _87.root])) {
|
|
1696
1696
|
throw new Error("The Build process failed because the context is not valid. Please run this command from a workspace.");
|
|
1697
1697
|
}
|
|
@@ -1745,7 +1745,7 @@ async function sizeLimitExecutorFn(options, context2, config) {
|
|
|
1745
1745
|
if (!_optionalChain([context2, 'optionalAccess', _96 => _96.projectName]) || !_optionalChain([context2, 'access', _97 => _97.projectsConfigurations, 'optionalAccess', _98 => _98.projects]) || !context2.projectsConfigurations.projects[context2.projectName]) {
|
|
1746
1746
|
throw new Error("The Size-Limit process failed because the context is not valid. Please run this command from a workspace.");
|
|
1747
1747
|
}
|
|
1748
|
-
|
|
1748
|
+
_chunkQQQD2SINjs.writeInfo.call(void 0, `\u{1F4CF} Running Size-Limit on ${context2.projectName}`, config);
|
|
1749
1749
|
_sizelimit2.default.call(void 0, [
|
|
1750
1750
|
_file2.default,
|
|
1751
1751
|
_esbuild3.default,
|
|
@@ -1753,7 +1753,7 @@ async function sizeLimitExecutorFn(options, context2, config) {
|
|
|
1753
1753
|
], {
|
|
1754
1754
|
checks: _nullishCoalesce(_nullishCoalesce(options.entry, () => ( _optionalChain([context2, 'access', _99 => _99.projectsConfigurations, 'access', _100 => _100.projects, 'access', _101 => _101[context2.projectName], 'optionalAccess', _102 => _102.sourceRoot]))), () => ( _devkit.joinPathFragments.call(void 0, _nullishCoalesce(_optionalChain([context2, 'access', _103 => _103.projectsConfigurations, 'access', _104 => _104.projects, 'access', _105 => _105[context2.projectName], 'optionalAccess', _106 => _106.root]), () => ( "./")), "src")))
|
|
1755
1755
|
}).then((result) => {
|
|
1756
|
-
|
|
1756
|
+
_chunkQQQD2SINjs.writeInfo.call(void 0, `\u{1F4CF} ${context2.projectName} Size-Limit result: ${JSON.stringify(result)}`, config);
|
|
1757
1757
|
});
|
|
1758
1758
|
return {
|
|
1759
1759
|
success: true
|
|
@@ -1774,11 +1774,11 @@ var _fsextra = require('fs-extra');
|
|
|
1774
1774
|
var _TypiaProgrammerjs = require('typia/lib/programmers/TypiaProgrammer.js');
|
|
1775
1775
|
async function typiaExecutorFn(options, _, config) {
|
|
1776
1776
|
if (options.clean !== false) {
|
|
1777
|
-
|
|
1777
|
+
_chunkQQQD2SINjs.writeInfo.call(void 0, `\u{1F9F9} Cleaning output path: ${options.outputPath}`, config);
|
|
1778
1778
|
_fsextra.removeSync.call(void 0, options.outputPath);
|
|
1779
1779
|
}
|
|
1780
1780
|
await Promise.all(options.entry.map((entry) => {
|
|
1781
|
-
|
|
1781
|
+
_chunkQQQD2SINjs.writeInfo.call(void 0, `\u{1F680} Running Typia on entry: ${entry}`, config);
|
|
1782
1782
|
return _TypiaProgrammerjs.TypiaProgrammer.build({
|
|
1783
1783
|
input: entry,
|
|
1784
1784
|
output: options.outputPath,
|
|
@@ -1809,7 +1809,7 @@ var executor_default8 = withRunExecutor("Typia runtime validation generator", ty
|
|
|
1809
1809
|
|
|
1810
1810
|
var _jiti = require('jiti');
|
|
1811
1811
|
async function unbuildExecutorFn(options, context2, config) {
|
|
1812
|
-
|
|
1812
|
+
_chunkQQQD2SINjs.writeInfo.call(void 0, "\u{1F4E6} Running Storm Unbuild executor on the workspace", config);
|
|
1813
1813
|
if (!_optionalChain([context2, 'access', _107 => _107.projectsConfigurations, 'optionalAccess', _108 => _108.projects]) || !context2.projectName || !context2.projectsConfigurations.projects[context2.projectName]) {
|
|
1814
1814
|
throw new Error("The Build process failed because the context is not valid. Please run this command from a workspace root directory.");
|
|
1815
1815
|
}
|
|
@@ -1820,7 +1820,7 @@ async function unbuildExecutorFn(options, context2, config) {
|
|
|
1820
1820
|
throw new Error("The Build process failed because the project's source root is not valid. Please run this command from a workspace root directory.");
|
|
1821
1821
|
}
|
|
1822
1822
|
const jiti = _jiti.createJiti.call(void 0, config.workspaceRoot, {
|
|
1823
|
-
fsCache: config.skipCache ? false :
|
|
1823
|
+
fsCache: config.skipCache ? false : _chunkQQQD2SINjs.joinPaths.call(void 0, config.workspaceRoot, config.directories.cache || "node_modules/.cache/storm", "jiti"),
|
|
1824
1824
|
interopDefault: true
|
|
1825
1825
|
});
|
|
1826
1826
|
const stormUnbuild = await jiti.import(jiti.esmResolve("@storm-software/unbuild/build"));
|
|
@@ -1833,7 +1833,7 @@ async function unbuildExecutorFn(options, context2, config) {
|
|
|
1833
1833
|
}, {
|
|
1834
1834
|
stubOptions: {
|
|
1835
1835
|
jiti: {
|
|
1836
|
-
fsCache: config.skipCache ? false :
|
|
1836
|
+
fsCache: config.skipCache ? false : _chunkQQQD2SINjs.joinPaths.call(void 0, config.workspaceRoot, config.directories.cache || "node_modules/.cache/storm", "jiti")
|
|
1837
1837
|
}
|
|
1838
1838
|
},
|
|
1839
1839
|
rollup: {
|
|
@@ -1880,34 +1880,34 @@ var executor_default9 = withRunExecutor("TypeScript Unbuild build", unbuildExecu
|
|
|
1880
1880
|
var withRunGenerator = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (name, generatorFn, generatorOptions = {
|
|
1881
1881
|
skipReadingConfig: false
|
|
1882
1882
|
}) => async (tree, _options) => {
|
|
1883
|
-
const stopwatch =
|
|
1883
|
+
const stopwatch = _chunkQQQD2SINjs.getStopwatch.call(void 0, name);
|
|
1884
1884
|
let options = _options;
|
|
1885
1885
|
let config;
|
|
1886
1886
|
try {
|
|
1887
|
-
|
|
1887
|
+
_chunkQQQD2SINjs.writeInfo.call(void 0, `\u26A1 Running the ${name} generator...
|
|
1888
1888
|
|
|
1889
1889
|
`, config);
|
|
1890
|
-
const workspaceRoot3 =
|
|
1890
|
+
const workspaceRoot3 = _chunkQQQD2SINjs.findWorkspaceRoot.call(void 0, );
|
|
1891
1891
|
if (!generatorOptions.skipReadingConfig) {
|
|
1892
|
-
|
|
1892
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, `Loading the Storm Config from environment variables and storm.config.js file...
|
|
1893
1893
|
- workspaceRoot: ${workspaceRoot3}`, config);
|
|
1894
|
-
config = await
|
|
1894
|
+
config = await _chunkZ5XL2UL5js.getConfig.call(void 0, workspaceRoot3);
|
|
1895
1895
|
}
|
|
1896
1896
|
if (_optionalChain([generatorOptions, 'optionalAccess', _109 => _109.hooks, 'optionalAccess', _110 => _110.applyDefaultOptions])) {
|
|
1897
|
-
|
|
1897
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, "Running the applyDefaultOptions hook...", config);
|
|
1898
1898
|
options = await Promise.resolve(generatorOptions.hooks.applyDefaultOptions(options, config));
|
|
1899
|
-
|
|
1899
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, "Completed the applyDefaultOptions hook", config);
|
|
1900
1900
|
}
|
|
1901
|
-
|
|
1901
|
+
_chunkQQQD2SINjs.writeTrace.call(void 0, `Generator schema options \u2699\uFE0F
|
|
1902
1902
|
${Object.keys(_nullishCoalesce(options, () => ( {}))).map((key) => ` - ${key}=${JSON.stringify(options[key])}`).join("\n")}`, config);
|
|
1903
1903
|
const tokenized = await applyWorkspaceTokens(options, {
|
|
1904
1904
|
workspaceRoot: tree.root,
|
|
1905
1905
|
config
|
|
1906
1906
|
}, applyWorkspaceBaseTokens);
|
|
1907
1907
|
if (_optionalChain([generatorOptions, 'optionalAccess', _111 => _111.hooks, 'optionalAccess', _112 => _112.preProcess])) {
|
|
1908
|
-
|
|
1908
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, "Running the preProcess hook...", config);
|
|
1909
1909
|
await Promise.resolve(generatorOptions.hooks.preProcess(tokenized, config));
|
|
1910
|
-
|
|
1910
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, "Completed the preProcess hook", config);
|
|
1911
1911
|
}
|
|
1912
1912
|
const result = await Promise.resolve(generatorFn(tree, tokenized, config));
|
|
1913
1913
|
if (result) {
|
|
@@ -1920,18 +1920,18 @@ ${Object.keys(_nullishCoalesce(options, () => ( {}))).map((key) => ` - ${key}=${
|
|
|
1920
1920
|
}
|
|
1921
1921
|
}
|
|
1922
1922
|
if (_optionalChain([generatorOptions, 'optionalAccess', _122 => _122.hooks, 'optionalAccess', _123 => _123.postProcess])) {
|
|
1923
|
-
|
|
1923
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, "Running the postProcess hook...", config);
|
|
1924
1924
|
await Promise.resolve(generatorOptions.hooks.postProcess(config));
|
|
1925
|
-
|
|
1925
|
+
_chunkQQQD2SINjs.writeDebug.call(void 0, "Completed the postProcess hook", config);
|
|
1926
1926
|
}
|
|
1927
1927
|
return () => {
|
|
1928
|
-
|
|
1928
|
+
_chunkQQQD2SINjs.writeSuccess.call(void 0, `Completed running the ${name} generator!
|
|
1929
1929
|
`, config);
|
|
1930
1930
|
};
|
|
1931
1931
|
} catch (error) {
|
|
1932
1932
|
return () => {
|
|
1933
|
-
|
|
1934
|
-
|
|
1933
|
+
_chunkQQQD2SINjs.writeFatal.call(void 0, "A fatal error occurred while running the generator - the process was forced to terminate", config);
|
|
1934
|
+
_chunkQQQD2SINjs.writeError.call(void 0, `An exception was thrown in the generator's process
|
|
1935
1935
|
- Details: ${error.message}
|
|
1936
1936
|
- Stacktrace: ${error.stack}`, config);
|
|
1937
1937
|
};
|
|
@@ -1977,7 +1977,7 @@ async function typeScriptLibraryGeneratorFn(tree, options, config) {
|
|
|
1977
1977
|
const projectConfig = {
|
|
1978
1978
|
root: normalized.directory,
|
|
1979
1979
|
projectType: "library",
|
|
1980
|
-
sourceRoot:
|
|
1980
|
+
sourceRoot: _chunkQQQD2SINjs.joinPaths.call(void 0, _nullishCoalesce(normalized.directory, () => ( "")), "src"),
|
|
1981
1981
|
targets: {
|
|
1982
1982
|
build: {
|
|
1983
1983
|
executor: options.buildExecutor,
|
|
@@ -1986,11 +1986,11 @@ async function typeScriptLibraryGeneratorFn(tree, options, config) {
|
|
|
1986
1986
|
],
|
|
1987
1987
|
options: {
|
|
1988
1988
|
entry: [
|
|
1989
|
-
|
|
1989
|
+
_chunkQQQD2SINjs.joinPaths.call(void 0, normalized.projectRoot, "src", "index.ts")
|
|
1990
1990
|
],
|
|
1991
1991
|
outputPath: getOutputPath(normalized),
|
|
1992
|
-
tsconfig:
|
|
1993
|
-
project:
|
|
1992
|
+
tsconfig: _chunkQQQD2SINjs.joinPaths.call(void 0, normalized.projectRoot, "tsconfig.json"),
|
|
1993
|
+
project: _chunkQQQD2SINjs.joinPaths.call(void 0, normalized.projectRoot, "package.json"),
|
|
1994
1994
|
defaultConfiguration: "production",
|
|
1995
1995
|
platform: "neutral",
|
|
1996
1996
|
assets: [
|
|
@@ -2044,7 +2044,7 @@ async function typeScriptLibraryGeneratorFn(tree, options, config) {
|
|
|
2044
2044
|
if (!normalized.importPath) {
|
|
2045
2045
|
normalized.importPath = normalized.name;
|
|
2046
2046
|
}
|
|
2047
|
-
const packageJsonPath =
|
|
2047
|
+
const packageJsonPath = _chunkQQQD2SINjs.joinPaths.call(void 0, normalized.projectRoot, "package.json");
|
|
2048
2048
|
if (tree.exists(packageJsonPath)) {
|
|
2049
2049
|
_devkit.updateJson.call(void 0, tree, packageJsonPath, (json) => {
|
|
2050
2050
|
if (!normalized.importPath) {
|
|
@@ -2101,10 +2101,10 @@ async function typeScriptLibraryGeneratorFn(tree, options, config) {
|
|
|
2101
2101
|
}));
|
|
2102
2102
|
}
|
|
2103
2103
|
_js.addTsConfigPath.call(void 0, tree, normalized.importPath, [
|
|
2104
|
-
|
|
2104
|
+
_chunkQQQD2SINjs.joinPaths.call(void 0, normalized.projectRoot, "./src", `index.${normalized.js ? "js" : "ts"}`)
|
|
2105
2105
|
]);
|
|
2106
|
-
_js.addTsConfigPath.call(void 0, tree,
|
|
2107
|
-
|
|
2106
|
+
_js.addTsConfigPath.call(void 0, tree, _chunkQQQD2SINjs.joinPaths.call(void 0, normalized.importPath, "/*"), [
|
|
2107
|
+
_chunkQQQD2SINjs.joinPaths.call(void 0, normalized.projectRoot, "./src", "/*")
|
|
2108
2108
|
]);
|
|
2109
2109
|
if (tree.exists("package.json")) {
|
|
2110
2110
|
const packageJson = _devkit.readJson.call(void 0, tree, "package.json");
|
|
@@ -2115,7 +2115,7 @@ async function typeScriptLibraryGeneratorFn(tree, options, config) {
|
|
|
2115
2115
|
description = packageJson.description;
|
|
2116
2116
|
}
|
|
2117
2117
|
}
|
|
2118
|
-
const tsconfigPath =
|
|
2118
|
+
const tsconfigPath = _chunkQQQD2SINjs.joinPaths.call(void 0, normalized.projectRoot, "tsconfig.json");
|
|
2119
2119
|
if (tree.exists(tsconfigPath)) {
|
|
2120
2120
|
_devkit.updateJson.call(void 0, tree, tsconfigPath, (json) => {
|
|
2121
2121
|
json.composite ??= true;
|
|
@@ -2153,7 +2153,7 @@ function getOutputPath(options) {
|
|
|
2153
2153
|
} else {
|
|
2154
2154
|
parts.push(options.projectRoot);
|
|
2155
2155
|
}
|
|
2156
|
-
return
|
|
2156
|
+
return _chunkQQQD2SINjs.joinPaths.call(void 0, ...parts);
|
|
2157
2157
|
}
|
|
2158
2158
|
_chunkJ5SB6L2Ljs.__name.call(void 0, getOutputPath, "getOutputPath");
|
|
2159
2159
|
function createProjectTsConfigJson(tree, options) {
|
|
@@ -2162,7 +2162,7 @@ function createProjectTsConfigJson(tree, options) {
|
|
|
2162
2162
|
..._nullishCoalesce(_optionalChain([options, 'optionalAccess', _135 => _135.tsconfigOptions]), () => ( {})),
|
|
2163
2163
|
compilerOptions: {
|
|
2164
2164
|
...options.rootProject ? _js.tsConfigBaseOptions : {},
|
|
2165
|
-
outDir:
|
|
2165
|
+
outDir: _chunkQQQD2SINjs.joinPaths.call(void 0, _devkit.offsetFromRoot.call(void 0, options.projectRoot), "dist/out-tsc"),
|
|
2166
2166
|
noEmit: true,
|
|
2167
2167
|
..._nullishCoalesce(_optionalChain([options, 'optionalAccess', _136 => _136.tsconfigOptions, 'optionalAccess', _137 => _137.compilerOptions]), () => ( {}))
|
|
2168
2168
|
},
|
|
@@ -2182,7 +2182,7 @@ function createProjectTsConfigJson(tree, options) {
|
|
|
2182
2182
|
"src/**/*.test.ts"
|
|
2183
2183
|
]
|
|
2184
2184
|
};
|
|
2185
|
-
_devkit.writeJson.call(void 0, tree,
|
|
2185
|
+
_devkit.writeJson.call(void 0, tree, _chunkQQQD2SINjs.joinPaths.call(void 0, options.projectRoot, "tsconfig.json"), tsconfig);
|
|
2186
2186
|
}
|
|
2187
2187
|
_chunkJ5SB6L2Ljs.__name.call(void 0, createProjectTsConfigJson, "createProjectTsConfigJson");
|
|
2188
2188
|
async function normalizeOptions(tree, options, config) {
|
|
@@ -2242,7 +2242,7 @@ _chunkJ5SB6L2Ljs.__name.call(void 0, normalizeOptions, "normalizeOptions");
|
|
|
2242
2242
|
|
|
2243
2243
|
// ../workspace-tools/src/generators/browser-library/generator.ts
|
|
2244
2244
|
async function browserLibraryGeneratorFn(tree, schema, config) {
|
|
2245
|
-
const filesDir =
|
|
2245
|
+
const filesDir = _chunkQQQD2SINjs.joinPaths.call(void 0, __dirname, "src", "generators", "browser-library", "files");
|
|
2246
2246
|
const tsLibraryGeneratorOptions = {
|
|
2247
2247
|
buildExecutor: "@storm-software/workspace-tools:unbuild",
|
|
2248
2248
|
platform: "browser",
|
|
@@ -2314,19 +2314,19 @@ var generator_default = withRunGenerator("TypeScript Library Creator (Browser Pl
|
|
|
2314
2314
|
|
|
2315
2315
|
var _zodtojsonschema = require('zod-to-json-schema');
|
|
2316
2316
|
async function configSchemaGeneratorFn(tree, options, config) {
|
|
2317
|
-
|
|
2318
|
-
|
|
2319
|
-
const jsonSchema = _zodtojsonschema.zodToJsonSchema.call(void 0,
|
|
2317
|
+
_chunkQQQD2SINjs.writeInfo.call(void 0, "\u{1F4E6} Running Storm Workspace Configuration JSON Schema generator", config);
|
|
2318
|
+
_chunkQQQD2SINjs.writeTrace.call(void 0, `Determining the Storm Workspace Configuration JSON Schema...`, config);
|
|
2319
|
+
const jsonSchema = _zodtojsonschema.zodToJsonSchema.call(void 0, _chunkQQQD2SINjs.stormWorkspaceConfigSchema, {
|
|
2320
2320
|
name: "StormWorkspaceConfiguration"
|
|
2321
2321
|
});
|
|
2322
|
-
|
|
2323
|
-
const outputPath = options.outputFile.replaceAll("{workspaceRoot}", "").replaceAll(_nullishCoalesce(_optionalChain([config, 'optionalAccess', _146 => _146.workspaceRoot]), () => (
|
|
2324
|
-
|
|
2322
|
+
_chunkQQQD2SINjs.writeTrace.call(void 0, jsonSchema, config);
|
|
2323
|
+
const outputPath = options.outputFile.replaceAll("{workspaceRoot}", "").replaceAll(_nullishCoalesce(_optionalChain([config, 'optionalAccess', _146 => _146.workspaceRoot]), () => ( _chunkQQQD2SINjs.findWorkspaceRoot.call(void 0, ))), _optionalChain([options, 'access', _147 => _147.outputFile, 'optionalAccess', _148 => _148.startsWith, 'call', _149 => _149("./")]) ? "" : "./");
|
|
2324
|
+
_chunkQQQD2SINjs.writeTrace.call(void 0, `\u{1F4DD} Writing Storm Configuration JSON Schema to "${outputPath}"`, config);
|
|
2325
2325
|
_devkit.writeJson.call(void 0, tree, outputPath, jsonSchema, {
|
|
2326
2326
|
spaces: 2
|
|
2327
2327
|
});
|
|
2328
2328
|
await _devkit.formatFiles.call(void 0, tree);
|
|
2329
|
-
|
|
2329
|
+
_chunkQQQD2SINjs.writeSuccess.call(void 0, "\u{1F680} Storm Configuration JSON Schema creation has completed successfully!", config);
|
|
2330
2330
|
return {
|
|
2331
2331
|
success: true
|
|
2332
2332
|
};
|
|
@@ -2347,7 +2347,7 @@ var generator_default2 = withRunGenerator("Configuration Schema Creator", config
|
|
|
2347
2347
|
// ../workspace-tools/src/generators/neutral-library/generator.ts
|
|
2348
2348
|
|
|
2349
2349
|
async function neutralLibraryGeneratorFn(tree, schema, config) {
|
|
2350
|
-
const filesDir =
|
|
2350
|
+
const filesDir = _chunkQQQD2SINjs.joinPaths.call(void 0, __dirname, "src", "generators", "neutral-library", "files");
|
|
2351
2351
|
const tsLibraryGeneratorOptions = {
|
|
2352
2352
|
...schema,
|
|
2353
2353
|
platform: "neutral",
|
|
@@ -2390,7 +2390,7 @@ var generator_default3 = withRunGenerator("TypeScript Library Creator (Neutral P
|
|
|
2390
2390
|
// ../workspace-tools/src/generators/node-library/generator.ts
|
|
2391
2391
|
|
|
2392
2392
|
async function nodeLibraryGeneratorFn(tree, schema, config) {
|
|
2393
|
-
const filesDir =
|
|
2393
|
+
const filesDir = _chunkQQQD2SINjs.joinPaths.call(void 0, __dirname, "src", "generators", "node-library", "files");
|
|
2394
2394
|
const tsLibraryGeneratorOptions = {
|
|
2395
2395
|
platform: "node",
|
|
2396
2396
|
devDependencies: {
|