@storm-software/cloudflare-tools 0.51.1 → 0.52.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/README.md +1 -1
- package/dist/{chunk-TH4KH35P.js → chunk-343PZDII.js} +1 -2
- package/dist/{chunk-TKQA6SOX.mjs → chunk-42I5F5WI.mjs} +3 -3
- package/dist/{chunk-ABCBYQGV.js → chunk-AZ4TIZKT.js} +3 -3
- package/dist/{chunk-3JXOPSOV.mjs → chunk-EF5HLWXR.mjs} +1 -1
- package/dist/{chunk-XBFOA367.mjs → chunk-ISVNV2UX.mjs} +1 -2
- package/dist/{chunk-OTQ3T47C.mjs → chunk-IU4KGUV4.mjs} +1 -1
- package/dist/{chunk-GTA63S6V.js → chunk-JLSFFOSI.js} +1 -1
- package/dist/{chunk-ZKG6FHEK.mjs → chunk-KZF3SN32.mjs} +3 -3
- package/dist/{chunk-3JJSMPBT.js → chunk-OQPC7VB6.js} +41 -41
- package/dist/{chunk-UOZU7VQX.mjs → chunk-TOEMUWVP.mjs} +1 -1
- package/dist/{chunk-T5WCUJXN.js → chunk-U7TTYCSL.js} +12 -12
- package/dist/{chunk-6PVMDNO7.mjs → chunk-Y56OHY47.mjs} +3 -3
- package/dist/{chunk-MHBXIMZJ.js → chunk-YLIOH3VA.js} +155 -155
- package/dist/{chunk-O2XVQQLQ.js → chunk-YVLGBYMR.js} +16 -16
- package/dist/executors.js +5 -5
- package/dist/executors.mjs +5 -5
- package/dist/generators.js +5 -5
- package/dist/generators.mjs +4 -4
- package/dist/index.js +8 -8
- package/dist/index.mjs +7 -7
- package/dist/src/executors/cloudflare-publish/executor.js +3 -3
- package/dist/src/executors/cloudflare-publish/executor.mjs +3 -3
- package/dist/src/executors/r2-upload-publish/executor.js +5 -5
- package/dist/src/executors/r2-upload-publish/executor.mjs +4 -4
- package/dist/src/executors/serve/executor.js +4 -4
- package/dist/src/executors/serve/executor.mjs +3 -3
- package/dist/src/generators/init/generator.js +2 -2
- package/dist/src/generators/init/generator.mjs +1 -1
- package/dist/src/generators/worker/generator.js +5 -5
- package/dist/src/generators/worker/generator.mjs +4 -4
- package/dist/src/utils/index.js +3 -3
- package/dist/src/utils/index.mjs +2 -2
- package/dist/src/utils/r2-bucket-helpers.js +3 -3
- package/dist/src/utils/r2-bucket-helpers.mjs +2 -2
- package/package.json +1 -1
|
@@ -4,7 +4,7 @@
|
|
|
4
4
|
var _chunkVTHBMY4Bjs = require('./chunk-VTHBMY4B.js');
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
var
|
|
7
|
+
var _chunkOQPC7VB6js = require('./chunk-OQPC7VB6.js');
|
|
8
8
|
|
|
9
9
|
|
|
10
10
|
|
|
@@ -20,7 +20,7 @@ var _chunk3JJSMPBTjs = require('./chunk-3JJSMPBT.js');
|
|
|
20
20
|
|
|
21
21
|
|
|
22
22
|
|
|
23
|
-
var
|
|
23
|
+
var _chunk343PZDIIjs = require('./chunk-343PZDII.js');
|
|
24
24
|
|
|
25
25
|
|
|
26
26
|
|
|
@@ -53,7 +53,7 @@ var applyWorkspaceBaseTokens = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void
|
|
|
53
53
|
}
|
|
54
54
|
}
|
|
55
55
|
if (result.includes("{workspaceRoot}")) {
|
|
56
|
-
result = result.replaceAll("{workspaceRoot}", _nullishCoalesce(_nullishCoalesce(tokenizerOptions.workspaceRoot, () => ( _optionalChain([tokenizerOptions, 'access', _4 => _4.config, 'optionalAccess', _5 => _5.workspaceRoot]))), () => (
|
|
56
|
+
result = result.replaceAll("{workspaceRoot}", _nullishCoalesce(_nullishCoalesce(tokenizerOptions.workspaceRoot, () => ( _optionalChain([tokenizerOptions, 'access', _4 => _4.config, 'optionalAccess', _5 => _5.workspaceRoot]))), () => ( _chunk343PZDIIjs.findWorkspaceRoot.call(void 0, ))));
|
|
57
57
|
}
|
|
58
58
|
return result;
|
|
59
59
|
}, "applyWorkspaceBaseTokens");
|
|
@@ -139,42 +139,42 @@ var applyWorkspaceExecutorTokens = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(
|
|
|
139
139
|
result = result.replaceAll("{sourceRoot}", sourceRoot);
|
|
140
140
|
}
|
|
141
141
|
if (result.includes("{workspaceRoot}")) {
|
|
142
|
-
result = result.replaceAll("{workspaceRoot}", _nullishCoalesce(tokenizerOptions.workspaceRoot, () => (
|
|
142
|
+
result = result.replaceAll("{workspaceRoot}", _nullishCoalesce(tokenizerOptions.workspaceRoot, () => ( _chunk343PZDIIjs.findWorkspaceRoot.call(void 0, ))));
|
|
143
143
|
}
|
|
144
144
|
return result;
|
|
145
145
|
}, "applyWorkspaceExecutorTokens");
|
|
146
146
|
|
|
147
147
|
// ../workspace-tools/src/base/base-executor.ts
|
|
148
148
|
var withRunExecutor = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (name, executorFn, executorOptions = {}) => async (_options, context2) => {
|
|
149
|
-
const stopwatch =
|
|
149
|
+
const stopwatch = _chunk343PZDIIjs.getStopwatch.call(void 0, name);
|
|
150
150
|
let options = _options;
|
|
151
151
|
let config = {};
|
|
152
152
|
try {
|
|
153
|
-
|
|
153
|
+
_chunk343PZDIIjs.writeInfo.call(void 0, `\u26A1 Running the ${name} executor...
|
|
154
154
|
`, config);
|
|
155
155
|
if (!_optionalChain([context2, 'access', _7 => _7.projectsConfigurations, 'optionalAccess', _8 => _8.projects]) || !context2.projectName || !context2.projectsConfigurations.projects[context2.projectName]) {
|
|
156
156
|
throw new Error("The Build process failed because the context is not valid. Please run this command from a workspace.");
|
|
157
157
|
}
|
|
158
|
-
const workspaceRoot3 =
|
|
158
|
+
const workspaceRoot3 = _chunk343PZDIIjs.findWorkspaceRoot.call(void 0, );
|
|
159
159
|
const projectRoot = _nullishCoalesce(_optionalChain([context2, 'access', _9 => _9.projectsConfigurations, 'access', _10 => _10.projects, 'access', _11 => _11[context2.projectName], 'optionalAccess', _12 => _12.root]), () => ( workspaceRoot3));
|
|
160
160
|
const sourceRoot = _nullishCoalesce(_optionalChain([context2, 'access', _13 => _13.projectsConfigurations, 'access', _14 => _14.projects, 'access', _15 => _15[context2.projectName], 'optionalAccess', _16 => _16.sourceRoot]), () => ( workspaceRoot3));
|
|
161
161
|
const projectName = _nullishCoalesce(_optionalChain([context2, 'access', _17 => _17.projectsConfigurations, 'access', _18 => _18.projects, 'access', _19 => _19[context2.projectName], 'optionalAccess', _20 => _20.name]), () => ( context2.projectName));
|
|
162
162
|
config.workspaceRoot = workspaceRoot3;
|
|
163
163
|
if (!executorOptions.skipReadingConfig) {
|
|
164
|
-
|
|
164
|
+
_chunk343PZDIIjs.writeTrace.call(void 0, `Loading the Storm Config from environment variables and storm.config.js file...
|
|
165
165
|
- workspaceRoot: ${workspaceRoot3}
|
|
166
166
|
- projectRoot: ${projectRoot}
|
|
167
167
|
- sourceRoot: ${sourceRoot}
|
|
168
168
|
- projectName: ${projectName}
|
|
169
169
|
`, config);
|
|
170
|
-
config = await
|
|
170
|
+
config = await _chunkOQPC7VB6js.loadStormConfig.call(void 0, workspaceRoot3);
|
|
171
171
|
}
|
|
172
172
|
if (_optionalChain([executorOptions, 'optionalAccess', _21 => _21.hooks, 'optionalAccess', _22 => _22.applyDefaultOptions])) {
|
|
173
|
-
|
|
173
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, "Running the applyDefaultOptions hook...", config);
|
|
174
174
|
options = await Promise.resolve(executorOptions.hooks.applyDefaultOptions(options, config));
|
|
175
|
-
|
|
175
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, "Completed the applyDefaultOptions hook", config);
|
|
176
176
|
}
|
|
177
|
-
|
|
177
|
+
_chunk343PZDIIjs.writeTrace.call(void 0, `Executor schema options \u2699\uFE0F
|
|
178
178
|
${Object.keys(options).map((key) => ` - ${key}=${_isFunction(options[key]) ? "<function>" : JSON.stringify(options[key])}`).join("\n")}`, config);
|
|
179
179
|
const tokenized = await applyWorkspaceTokens(options, {
|
|
180
180
|
config,
|
|
@@ -186,9 +186,9 @@ ${Object.keys(options).map((key) => ` - ${key}=${_isFunction(options[key]) ? "<f
|
|
|
186
186
|
...executorOptions
|
|
187
187
|
}, applyWorkspaceExecutorTokens);
|
|
188
188
|
if (_optionalChain([executorOptions, 'optionalAccess', _23 => _23.hooks, 'optionalAccess', _24 => _24.preProcess])) {
|
|
189
|
-
|
|
189
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, "Running the preProcess hook...", config);
|
|
190
190
|
await Promise.resolve(executorOptions.hooks.preProcess(tokenized, config));
|
|
191
|
-
|
|
191
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, "Completed the preProcess hook", config);
|
|
192
192
|
}
|
|
193
193
|
const ret = executorFn(tokenized, context2, config);
|
|
194
194
|
if (_isFunction(_optionalChain([ret, 'optionalAccess', _25 => _25.next]))) {
|
|
@@ -198,26 +198,26 @@ ${Object.keys(options).map((key) => ` - ${key}=${_isFunction(options[key]) ? "<f
|
|
|
198
198
|
}
|
|
199
199
|
const result = await Promise.resolve(ret);
|
|
200
200
|
if (result && (!result.success || result.error && _optionalChain([result, 'optionalAccess', _26 => _26.error, 'optionalAccess', _27 => _27.message]) && typeof _optionalChain([result, 'optionalAccess', _28 => _28.error, 'optionalAccess', _29 => _29.message]) === "string" && _optionalChain([result, 'optionalAccess', _30 => _30.error, 'optionalAccess', _31 => _31.name]) && typeof _optionalChain([result, 'optionalAccess', _32 => _32.error, 'optionalAccess', _33 => _33.name]) === "string")) {
|
|
201
|
-
|
|
202
|
-
${
|
|
201
|
+
_chunk343PZDIIjs.writeTrace.call(void 0, `Failure determined by the ${name} executor
|
|
202
|
+
${_chunk343PZDIIjs.formatLogMessage.call(void 0, result)}`, config);
|
|
203
203
|
console.error(result);
|
|
204
204
|
throw new Error(`The ${name} executor failed to run`, {
|
|
205
205
|
cause: _optionalChain([result, 'optionalAccess', _34 => _34.error])
|
|
206
206
|
});
|
|
207
207
|
}
|
|
208
208
|
if (_optionalChain([executorOptions, 'optionalAccess', _35 => _35.hooks, 'optionalAccess', _36 => _36.postProcess])) {
|
|
209
|
-
|
|
209
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, "Running the postProcess hook...", config);
|
|
210
210
|
await Promise.resolve(executorOptions.hooks.postProcess(config));
|
|
211
|
-
|
|
211
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, "Completed the postProcess hook", config);
|
|
212
212
|
}
|
|
213
|
-
|
|
213
|
+
_chunk343PZDIIjs.writeSuccess.call(void 0, `Completed running the ${name} task executor!
|
|
214
214
|
`, config);
|
|
215
215
|
return {
|
|
216
216
|
success: true
|
|
217
217
|
};
|
|
218
218
|
} catch (error) {
|
|
219
|
-
|
|
220
|
-
|
|
219
|
+
_chunk343PZDIIjs.writeFatal.call(void 0, "A fatal error occurred while running the executor - the process was forced to terminate", config);
|
|
220
|
+
_chunk343PZDIIjs.writeError.call(void 0, `An exception was thrown in the executor's process
|
|
221
221
|
- Details: ${error.message}
|
|
222
222
|
- Stacktrace: ${error.stack}`, config);
|
|
223
223
|
return {
|
|
@@ -502,10 +502,10 @@ var _promises = require('fs/promises'); var _promises2 = _interopRequireDefault(
|
|
|
502
502
|
var readNxConfig = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async (workspaceRoot3) => {
|
|
503
503
|
let rootDir = workspaceRoot3;
|
|
504
504
|
if (!rootDir) {
|
|
505
|
-
const config = await
|
|
505
|
+
const config = await _chunkOQPC7VB6js.loadStormConfig.call(void 0, );
|
|
506
506
|
rootDir = config.workspaceRoot;
|
|
507
507
|
}
|
|
508
|
-
const nxJsonPath =
|
|
508
|
+
const nxJsonPath = _chunk343PZDIIjs.joinPaths.call(void 0, rootDir, "nx.json");
|
|
509
509
|
if (!_fs.existsSync.call(void 0, nxJsonPath)) {
|
|
510
510
|
throw new Error("Cannot find project.json configuration");
|
|
511
511
|
}
|
|
@@ -552,7 +552,7 @@ var copyAssets = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async (con
|
|
|
552
552
|
}
|
|
553
553
|
const buildTarget = _optionalChain([projectsConfigurations, 'access', _51 => _51.projects, 'access', _52 => _52[projectName], 'access', _53 => _53.targets, 'optionalAccess', _54 => _54.build]);
|
|
554
554
|
if (!buildTarget) {
|
|
555
|
-
throw new Error(`The Build process failed because the project does not have a valid build target in the project.json file. Check if the file exists in the root of the project at ${
|
|
555
|
+
throw new Error(`The Build process failed because the project does not have a valid build target in the project.json file. Check if the file exists in the root of the project at ${_chunk343PZDIIjs.joinPaths.call(void 0, projectRoot, "project.json")}`);
|
|
556
556
|
}
|
|
557
557
|
const result = await _js.copyAssets.call(void 0, {
|
|
558
558
|
assets,
|
|
@@ -567,18 +567,18 @@ var copyAssets = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async (con
|
|
|
567
567
|
projectsConfigurations,
|
|
568
568
|
nxJsonConfiguration: nxJson,
|
|
569
569
|
cwd: config.workspaceRoot,
|
|
570
|
-
isVerbose:
|
|
570
|
+
isVerbose: _chunk343PZDIIjs.isVerbose.call(void 0, config.logLevel)
|
|
571
571
|
});
|
|
572
572
|
if (!result.success) {
|
|
573
573
|
throw new Error("The Build process failed trying to copy assets");
|
|
574
574
|
}
|
|
575
575
|
if (includeSrc === true) {
|
|
576
|
-
|
|
576
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, `\u{1F4DD} Adding banner and writing source files: ${_chunk343PZDIIjs.joinPaths.call(void 0, outputPath, "src")}`, config);
|
|
577
577
|
const files = await _glob.glob.call(void 0, [
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
578
|
+
_chunk343PZDIIjs.joinPaths.call(void 0, config.workspaceRoot, outputPath, "src/**/*.ts"),
|
|
579
|
+
_chunk343PZDIIjs.joinPaths.call(void 0, config.workspaceRoot, outputPath, "src/**/*.tsx"),
|
|
580
|
+
_chunk343PZDIIjs.joinPaths.call(void 0, config.workspaceRoot, outputPath, "src/**/*.js"),
|
|
581
|
+
_chunk343PZDIIjs.joinPaths.call(void 0, config.workspaceRoot, outputPath, "src/**/*.jsx")
|
|
582
582
|
]);
|
|
583
583
|
await Promise.allSettled(files.map(async (file) => _promises.writeFile.call(void 0, file, `${banner && typeof banner === "string" ? banner.startsWith("//") ? banner : `// ${banner}` : ""}
|
|
584
584
|
|
|
@@ -600,7 +600,7 @@ var addPackageDependencies = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0
|
|
|
600
600
|
for (const project of projectDependencies.dependencies.filter((dep) => dep.node.type === "lib" && dep.node.data.root !== projectRoot && dep.node.data.root !== workspaceRoot3)) {
|
|
601
601
|
const projectNode = project.node;
|
|
602
602
|
if (projectNode.data.root) {
|
|
603
|
-
const projectPackageJsonPath =
|
|
603
|
+
const projectPackageJsonPath = _chunk343PZDIIjs.joinPaths.call(void 0, workspaceRoot3, projectNode.data.root, "package.json");
|
|
604
604
|
if (_fs.existsSync.call(void 0, projectPackageJsonPath)) {
|
|
605
605
|
const projectPackageJsonContent = await _promises.readFile.call(void 0, projectPackageJsonPath, "utf8");
|
|
606
606
|
const projectPackageJson = JSON.parse(projectPackageJsonContent);
|
|
@@ -611,7 +611,7 @@ var addPackageDependencies = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0
|
|
|
611
611
|
}
|
|
612
612
|
}
|
|
613
613
|
if (localPackages.length > 0) {
|
|
614
|
-
|
|
614
|
+
_chunk343PZDIIjs.writeTrace.call(void 0, `\u{1F4E6} Adding local packages to package.json: ${localPackages.map((p) => p.name).join(", ")}`);
|
|
615
615
|
packageJson.peerDependencies = localPackages.reduce((ret, localPackage) => {
|
|
616
616
|
if (!ret[localPackage.name]) {
|
|
617
617
|
ret[localPackage.name] = `>=${localPackage.version || "0.0.1"}`;
|
|
@@ -633,13 +633,13 @@ var addPackageDependencies = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0
|
|
|
633
633
|
return ret;
|
|
634
634
|
}, _nullishCoalesce(packageJson.peerDependencies, () => ( {})));
|
|
635
635
|
} else {
|
|
636
|
-
|
|
636
|
+
_chunk343PZDIIjs.writeTrace.call(void 0, "\u{1F4E6} No local packages dependencies to add to package.json");
|
|
637
637
|
}
|
|
638
638
|
return packageJson;
|
|
639
639
|
}, "addPackageDependencies");
|
|
640
640
|
var addWorkspacePackageJsonFields = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async (config, projectRoot, sourceRoot, projectName, includeSrc = false, packageJson) => {
|
|
641
|
-
const workspaceRoot3 = config.workspaceRoot ? config.workspaceRoot :
|
|
642
|
-
const workspacePackageJsonContent = await _promises.readFile.call(void 0,
|
|
641
|
+
const workspaceRoot3 = config.workspaceRoot ? config.workspaceRoot : _chunk343PZDIIjs.findWorkspaceRoot.call(void 0, );
|
|
642
|
+
const workspacePackageJsonContent = await _promises.readFile.call(void 0, _chunk343PZDIIjs.joinPaths.call(void 0, workspaceRoot3, "package.json"), "utf8");
|
|
643
643
|
const workspacePackageJson = JSON.parse(workspacePackageJsonContent);
|
|
644
644
|
packageJson.type ??= "module";
|
|
645
645
|
packageJson.sideEffects ??= false;
|
|
@@ -648,7 +648,7 @@ var addWorkspacePackageJsonFields = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call
|
|
|
648
648
|
if (distSrc.startsWith("/")) {
|
|
649
649
|
distSrc = distSrc.substring(1);
|
|
650
650
|
}
|
|
651
|
-
packageJson.source ??= `${
|
|
651
|
+
packageJson.source ??= `${_chunk343PZDIIjs.joinPaths.call(void 0, distSrc, "index.ts").replaceAll("\\", "/")}`;
|
|
652
652
|
}
|
|
653
653
|
packageJson.files ??= [
|
|
654
654
|
"dist/**/*"
|
|
@@ -679,7 +679,7 @@ var addWorkspacePackageJsonFields = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call
|
|
|
679
679
|
];
|
|
680
680
|
}
|
|
681
681
|
packageJson.repository ??= workspacePackageJson.repository;
|
|
682
|
-
packageJson.repository.directory ??= projectRoot ? projectRoot :
|
|
682
|
+
packageJson.repository.directory ??= projectRoot ? projectRoot : _chunk343PZDIIjs.joinPaths.call(void 0, "packages", projectName);
|
|
683
683
|
return packageJson;
|
|
684
684
|
}, "addWorkspacePackageJsonFields");
|
|
685
685
|
var addPackageJsonExport = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (file, type = "module", sourceRoot) => {
|
|
@@ -706,7 +706,7 @@ var addPackageJsonExport = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0,
|
|
|
706
706
|
// ../build-tools/src/utilities/get-entry-points.ts
|
|
707
707
|
|
|
708
708
|
var getEntryPoints = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async (config, projectRoot, sourceRoot, entry, emitOnAll = false) => {
|
|
709
|
-
const workspaceRoot3 = config.workspaceRoot ? config.workspaceRoot :
|
|
709
|
+
const workspaceRoot3 = config.workspaceRoot ? config.workspaceRoot : _chunk343PZDIIjs.findWorkspaceRoot.call(void 0, );
|
|
710
710
|
const entryPoints = [];
|
|
711
711
|
if (entry) {
|
|
712
712
|
if (Array.isArray(entry)) {
|
|
@@ -718,7 +718,7 @@ var getEntryPoints = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
718
718
|
}
|
|
719
719
|
}
|
|
720
720
|
if (emitOnAll) {
|
|
721
|
-
entryPoints.push(
|
|
721
|
+
entryPoints.push(_chunk343PZDIIjs.joinPaths.call(void 0, workspaceRoot3, sourceRoot || projectRoot, "**/*.{ts,tsx}"));
|
|
722
722
|
}
|
|
723
723
|
const results = [];
|
|
724
724
|
for (const entryPoint in entryPoints) {
|
|
@@ -727,9 +727,9 @@ var getEntryPoints = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
727
727
|
withFileTypes: true
|
|
728
728
|
});
|
|
729
729
|
results.push(...files.reduce((ret, filePath) => {
|
|
730
|
-
const result =
|
|
730
|
+
const result = _chunk343PZDIIjs.correctPaths.call(void 0, _chunk343PZDIIjs.joinPaths.call(void 0, filePath.path, filePath.name).replaceAll(_chunk343PZDIIjs.correctPaths.call(void 0, workspaceRoot3), "").replaceAll(_chunk343PZDIIjs.correctPaths.call(void 0, projectRoot), ""));
|
|
731
731
|
if (result) {
|
|
732
|
-
|
|
732
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, `Trying to add entry point ${result} at "${_chunk343PZDIIjs.joinPaths.call(void 0, filePath.path, filePath.name)}"`, config);
|
|
733
733
|
if (!results.includes(result)) {
|
|
734
734
|
results.push(result);
|
|
735
735
|
}
|
|
@@ -935,8 +935,8 @@ var RendererEngine = class {
|
|
|
935
935
|
// ../esbuild/src/clean.ts
|
|
936
936
|
|
|
937
937
|
async function clean(name = "ESBuild", directory, config) {
|
|
938
|
-
|
|
939
|
-
const stopwatch =
|
|
938
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, ` \u{1F9F9} Cleaning ${name} output path: ${directory}`, config);
|
|
939
|
+
const stopwatch = _chunk343PZDIIjs.getStopwatch.call(void 0, `${name} output clean`);
|
|
940
940
|
await _node.hfs.deleteAll(directory);
|
|
941
941
|
stopwatch();
|
|
942
942
|
}
|
|
@@ -1057,7 +1057,7 @@ var onErrorPlugin = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (option
|
|
|
1057
1057
|
setup(build5) {
|
|
1058
1058
|
build5.onEnd((result) => {
|
|
1059
1059
|
if (result.errors.length > 0 && process.env.WATCH !== "true") {
|
|
1060
|
-
|
|
1060
|
+
_chunk343PZDIIjs.writeError.call(void 0, `The following errors occurred during the build:
|
|
1061
1061
|
${result.errors.map((error) => error.text).join("\n")}
|
|
1062
1062
|
|
|
1063
1063
|
`, resolvedOptions.config);
|
|
@@ -1092,7 +1092,7 @@ _chunkJ5SB6L2Ljs.__name.call(void 0, resolvePathsConfig, "resolvePathsConfig");
|
|
|
1092
1092
|
var resolvePathsPlugin = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (options, resolvedOptions) => ({
|
|
1093
1093
|
name: "storm:resolve-paths",
|
|
1094
1094
|
setup(build5) {
|
|
1095
|
-
const parentTsConfig = build5.initialOptions.tsconfig ? _chunkJ5SB6L2Ljs.__require.call(void 0,
|
|
1095
|
+
const parentTsConfig = build5.initialOptions.tsconfig ? _chunkJ5SB6L2Ljs.__require.call(void 0, _chunk343PZDIIjs.joinPaths.call(void 0, resolvedOptions.config.workspaceRoot, build5.initialOptions.tsconfig)) : _chunkJ5SB6L2Ljs.__require.call(void 0, _chunk343PZDIIjs.joinPaths.call(void 0, resolvedOptions.config.workspaceRoot, "tsconfig.json"));
|
|
1096
1096
|
const resolvedTsPaths = resolvePathsConfig(parentTsConfig, options.projectRoot);
|
|
1097
1097
|
const packagesRegex = new RegExp(`^(${Object.keys(resolvedTsPaths).join("|")})$`);
|
|
1098
1098
|
build5.onResolve({
|
|
@@ -1115,7 +1115,7 @@ var resolvePathsPlugin = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (o
|
|
|
1115
1115
|
|
|
1116
1116
|
var _apiextractor = require('@microsoft/api-extractor');
|
|
1117
1117
|
function bundleTypeDefinitions(filename, outfile, externals, options) {
|
|
1118
|
-
const { dependencies, peerDependencies, devDependencies } = _chunkJ5SB6L2Ljs.__require.call(void 0,
|
|
1118
|
+
const { dependencies, peerDependencies, devDependencies } = _chunkJ5SB6L2Ljs.__require.call(void 0, _chunk343PZDIIjs.joinPaths.call(void 0, options.projectRoot, "package.json"));
|
|
1119
1119
|
const dependenciesKeys = Object.keys(_nullishCoalesce(dependencies, () => ( {}))).flatMap((p) => [
|
|
1120
1120
|
p,
|
|
1121
1121
|
getTypeDependencyPackageName(p)
|
|
@@ -1151,13 +1151,13 @@ function bundleTypeDefinitions(filename, outfile, externals, options) {
|
|
|
1151
1151
|
},
|
|
1152
1152
|
dtsRollup: {
|
|
1153
1153
|
enabled: true,
|
|
1154
|
-
untrimmedFilePath:
|
|
1154
|
+
untrimmedFilePath: _chunk343PZDIIjs.joinPaths.call(void 0, options.outdir, `${outfile}.d.ts`)
|
|
1155
1155
|
},
|
|
1156
1156
|
tsdocMetadata: {
|
|
1157
1157
|
enabled: false
|
|
1158
1158
|
}
|
|
1159
1159
|
},
|
|
1160
|
-
packageJsonFullPath:
|
|
1160
|
+
packageJsonFullPath: _chunk343PZDIIjs.joinPaths.call(void 0, options.projectRoot, "package.json"),
|
|
1161
1161
|
configObjectFullPath: void 0
|
|
1162
1162
|
});
|
|
1163
1163
|
const extractorResult = _apiextractor.Extractor.invoke(extractorConfig, {
|
|
@@ -1165,7 +1165,7 @@ function bundleTypeDefinitions(filename, outfile, externals, options) {
|
|
|
1165
1165
|
localBuild: true
|
|
1166
1166
|
});
|
|
1167
1167
|
if (extractorResult.succeeded === false) {
|
|
1168
|
-
|
|
1168
|
+
_chunk343PZDIIjs.writeError.call(void 0, `API Extractor completed with ${extractorResult.errorCount} ${extractorResult.errorCount === 1 ? "error" : "errors"}`);
|
|
1169
1169
|
throw new Error("API Extractor completed with errors");
|
|
1170
1170
|
}
|
|
1171
1171
|
}
|
|
@@ -1184,12 +1184,12 @@ var tscPlugin = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (options, r
|
|
|
1184
1184
|
const sourceRoot = resolvedOptions.sourceRoot.replaceAll(resolvedOptions.projectRoot, "");
|
|
1185
1185
|
const typeOutDir = resolvedOptions.outdir;
|
|
1186
1186
|
const entryPoint = resolvedOptions.entryPoints[0].replace(sourceRoot, "").replace(/\.ts$/, "");
|
|
1187
|
-
const bundlePath =
|
|
1187
|
+
const bundlePath = _chunk343PZDIIjs.joinPaths.call(void 0, resolvedOptions.outdir, entryPoint);
|
|
1188
1188
|
let dtsPath;
|
|
1189
|
-
if (await _node.hfs.isFile(
|
|
1190
|
-
dtsPath =
|
|
1191
|
-
} else if (await _node.hfs.isFile(
|
|
1192
|
-
dtsPath =
|
|
1189
|
+
if (await _node.hfs.isFile(_chunk343PZDIIjs.joinPaths.call(void 0, resolvedOptions.config.workspaceRoot, typeOutDir, `${entryPoint}.d.ts`))) {
|
|
1190
|
+
dtsPath = _chunk343PZDIIjs.joinPaths.call(void 0, resolvedOptions.config.workspaceRoot, typeOutDir, `${entryPoint}.d.ts`);
|
|
1191
|
+
} else if (await _node.hfs.isFile(_chunk343PZDIIjs.joinPaths.call(void 0, resolvedOptions.config.workspaceRoot, typeOutDir, `${entryPoint.replace(/^src\//, "")}.d.ts`))) {
|
|
1192
|
+
dtsPath = _chunk343PZDIIjs.joinPaths.call(void 0, resolvedOptions.config.workspaceRoot, typeOutDir, `${entryPoint.replace(/^src\//, "")}.d.ts`);
|
|
1193
1193
|
}
|
|
1194
1194
|
const ext = resolvedOptions.outExtension.dts || resolvedOptions.format === "esm" ? "d.mts" : "d.ts";
|
|
1195
1195
|
if (process.env.WATCH !== "true" && process.env.DEV !== "true") {
|
|
@@ -1344,8 +1344,8 @@ var depsCheckPlugin = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (bund
|
|
|
1344
1344
|
const filteredMissingDeps = missingDependencies.filter((dep) => {
|
|
1345
1345
|
return !missingIgnore.some((pattern) => dep.match(pattern)) && !peerDependencies.includes(dep);
|
|
1346
1346
|
});
|
|
1347
|
-
|
|
1348
|
-
|
|
1347
|
+
_chunk343PZDIIjs.writeWarning.call(void 0, `Unused Dependencies: ${JSON.stringify(filteredUnusedDeps)}`);
|
|
1348
|
+
_chunk343PZDIIjs.writeError.call(void 0, `Missing Dependencies: ${JSON.stringify(filteredMissingDeps)}`);
|
|
1349
1349
|
if (filteredMissingDeps.length > 0) {
|
|
1350
1350
|
throw new Error(`Missing dependencies detected - please install them:
|
|
1351
1351
|
${JSON.stringify(filteredMissingDeps)}
|
|
@@ -1439,13 +1439,13 @@ var resolveOptions = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
1439
1439
|
if (!workspaceRoot3) {
|
|
1440
1440
|
throw new Error("Cannot find Nx workspace root");
|
|
1441
1441
|
}
|
|
1442
|
-
const config = await
|
|
1443
|
-
|
|
1444
|
-
const stopwatch =
|
|
1442
|
+
const config = await _chunkOQPC7VB6js.loadStormConfig.call(void 0, workspaceRoot3.dir);
|
|
1443
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, " \u2699\uFE0F Resolving build options", config);
|
|
1444
|
+
const stopwatch = _chunk343PZDIIjs.getStopwatch.call(void 0, "Build options resolution");
|
|
1445
1445
|
const projectGraph = await _devkit.createProjectGraphAsync.call(void 0, {
|
|
1446
1446
|
exitOnError: true
|
|
1447
1447
|
});
|
|
1448
|
-
const projectJsonPath =
|
|
1448
|
+
const projectJsonPath = _chunk343PZDIIjs.joinPaths.call(void 0, workspaceRoot3.dir, projectRoot, "project.json");
|
|
1449
1449
|
if (!await _node.hfs.isFile(projectJsonPath)) {
|
|
1450
1450
|
throw new Error("Cannot find project.json configuration");
|
|
1451
1451
|
}
|
|
@@ -1458,7 +1458,7 @@ var resolveOptions = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
1458
1458
|
const options = _defu2.default.call(void 0, userOptions, DEFAULT_BUILD_OPTIONS);
|
|
1459
1459
|
options.name ??= `${projectName}-${options.format}`;
|
|
1460
1460
|
options.target ??= DEFAULT_TARGET;
|
|
1461
|
-
const packageJsonPath =
|
|
1461
|
+
const packageJsonPath = _chunk343PZDIIjs.joinPaths.call(void 0, workspaceRoot3.dir, options.projectRoot, "package.json");
|
|
1462
1462
|
if (!await _node.hfs.isFile(packageJsonPath)) {
|
|
1463
1463
|
throw new Error("Cannot find package.json configuration");
|
|
1464
1464
|
}
|
|
@@ -1482,20 +1482,20 @@ var resolveOptions = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
1482
1482
|
".node"
|
|
1483
1483
|
],
|
|
1484
1484
|
...userOptions,
|
|
1485
|
-
tsconfig:
|
|
1485
|
+
tsconfig: _chunk343PZDIIjs.joinPaths.call(void 0, projectRoot, userOptions.tsconfig ? userOptions.tsconfig.replace(projectRoot, "") : "tsconfig.json"),
|
|
1486
1486
|
format: options.format || "cjs",
|
|
1487
1487
|
entryPoints: await getEntryPoints(config, projectRoot, projectJson.sourceRoot, userOptions.entry || [
|
|
1488
1488
|
"./src/index.ts"
|
|
1489
1489
|
], userOptions.emitOnAll),
|
|
1490
|
-
outdir: userOptions.outputPath ||
|
|
1490
|
+
outdir: userOptions.outputPath || _chunk343PZDIIjs.joinPaths.call(void 0, "dist", projectRoot),
|
|
1491
1491
|
plugins: [],
|
|
1492
1492
|
name: userOptions.name || projectName,
|
|
1493
1493
|
projectConfigurations,
|
|
1494
1494
|
projectName,
|
|
1495
1495
|
projectGraph,
|
|
1496
|
-
sourceRoot: userOptions.sourceRoot || projectJson.sourceRoot ||
|
|
1496
|
+
sourceRoot: userOptions.sourceRoot || projectJson.sourceRoot || _chunk343PZDIIjs.joinPaths.call(void 0, projectRoot, "src"),
|
|
1497
1497
|
minify: userOptions.minify || !userOptions.debug,
|
|
1498
|
-
verbose: userOptions.verbose ||
|
|
1498
|
+
verbose: userOptions.verbose || _chunk343PZDIIjs.isVerbose.call(void 0, ) || userOptions.debug === true,
|
|
1499
1499
|
includeSrc: userOptions.includeSrc === true,
|
|
1500
1500
|
metafile: userOptions.metafile !== false,
|
|
1501
1501
|
generatePackageJson: userOptions.generatePackageJson !== false,
|
|
@@ -1531,8 +1531,8 @@ var resolveOptions = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
1531
1531
|
}, {})
|
|
1532
1532
|
},
|
|
1533
1533
|
inject: [
|
|
1534
|
-
options.format === "cjs" && options.injectShims ?
|
|
1535
|
-
options.format === "esm" && options.injectShims && options.platform === "node" ?
|
|
1534
|
+
options.format === "cjs" && options.injectShims ? _chunk343PZDIIjs.joinPaths.call(void 0, __dirname, "../assets/cjs_shims.js") : "",
|
|
1535
|
+
options.format === "esm" && options.injectShims && options.platform === "node" ? _chunk343PZDIIjs.joinPaths.call(void 0, __dirname, "../assets/esm_shims.js") : "",
|
|
1536
1536
|
..._nullishCoalesce(options.inject, () => ( []))
|
|
1537
1537
|
].filter(Boolean)
|
|
1538
1538
|
};
|
|
@@ -1541,14 +1541,14 @@ var resolveOptions = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
1541
1541
|
return result;
|
|
1542
1542
|
}, "resolveOptions");
|
|
1543
1543
|
async function generatePackageJson(context2) {
|
|
1544
|
-
if (context2.options.generatePackageJson !== false && await _node.hfs.isFile(
|
|
1545
|
-
|
|
1546
|
-
const stopwatch =
|
|
1547
|
-
const packageJsonPath =
|
|
1544
|
+
if (context2.options.generatePackageJson !== false && await _node.hfs.isFile(_chunk343PZDIIjs.joinPaths.call(void 0, context2.options.projectRoot, "package.json"))) {
|
|
1545
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, " \u270D\uFE0F Writing package.json file", context2.options.config);
|
|
1546
|
+
const stopwatch = _chunk343PZDIIjs.getStopwatch.call(void 0, "Write package.json file");
|
|
1547
|
+
const packageJsonPath = _chunk343PZDIIjs.joinPaths.call(void 0, context2.options.projectRoot, "project.json");
|
|
1548
1548
|
if (!await _node.hfs.isFile(packageJsonPath)) {
|
|
1549
1549
|
throw new Error("Cannot find package.json configuration");
|
|
1550
1550
|
}
|
|
1551
|
-
let packageJson = await _node.hfs.json(
|
|
1551
|
+
let packageJson = await _node.hfs.json(_chunk343PZDIIjs.joinPaths.call(void 0, context2.options.config.workspaceRoot, context2.options.projectRoot, "package.json"));
|
|
1552
1552
|
if (!packageJson) {
|
|
1553
1553
|
throw new Error("Cannot find package.json configuration file");
|
|
1554
1554
|
}
|
|
@@ -1586,7 +1586,7 @@ async function generatePackageJson(context2) {
|
|
|
1586
1586
|
}
|
|
1587
1587
|
return ret;
|
|
1588
1588
|
}, packageJson.exports);
|
|
1589
|
-
await _devkit.writeJsonFile.call(void 0,
|
|
1589
|
+
await _devkit.writeJsonFile.call(void 0, _chunk343PZDIIjs.joinPaths.call(void 0, context2.options.outdir, "package.json"), packageJson);
|
|
1590
1590
|
stopwatch();
|
|
1591
1591
|
}
|
|
1592
1592
|
return context2;
|
|
@@ -1612,8 +1612,8 @@ async function generateContext(getOptions) {
|
|
|
1612
1612
|
}
|
|
1613
1613
|
_chunkJ5SB6L2Ljs.__name.call(void 0, generateContext, "generateContext");
|
|
1614
1614
|
async function executeEsBuild(context2) {
|
|
1615
|
-
|
|
1616
|
-
const stopwatch =
|
|
1615
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, ` \u{1F680} Running ${context2.options.name} build`, context2.options.config);
|
|
1616
|
+
const stopwatch = _chunk343PZDIIjs.getStopwatch.call(void 0, `${context2.options.name} build`);
|
|
1617
1617
|
if (process.env.WATCH === "true") {
|
|
1618
1618
|
const ctx = await esbuild2.context(context2.options);
|
|
1619
1619
|
watch(ctx, context2.options);
|
|
@@ -1629,8 +1629,8 @@ async function executeEsBuild(context2) {
|
|
|
1629
1629
|
_chunkJ5SB6L2Ljs.__name.call(void 0, executeEsBuild, "executeEsBuild");
|
|
1630
1630
|
async function copyBuildAssets(context2) {
|
|
1631
1631
|
if (_optionalChain([context2, 'access', _79 => _79.result, 'optionalAccess', _80 => _80.errors, 'access', _81 => _81.length]) === 0) {
|
|
1632
|
-
|
|
1633
|
-
const stopwatch =
|
|
1632
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, ` \u{1F4CB} Copying asset files to output directory: ${context2.options.outdir}`, context2.options.config);
|
|
1633
|
+
const stopwatch = _chunk343PZDIIjs.getStopwatch.call(void 0, `${context2.options.name} asset copy`);
|
|
1634
1634
|
await copyAssets(context2.options.config, _nullishCoalesce(context2.options.assets, () => ( [])), context2.options.outdir, context2.options.projectRoot, context2.options.projectName, context2.options.sourceRoot, true, false);
|
|
1635
1635
|
stopwatch();
|
|
1636
1636
|
}
|
|
@@ -1640,9 +1640,9 @@ _chunkJ5SB6L2Ljs.__name.call(void 0, copyBuildAssets, "copyBuildAssets");
|
|
|
1640
1640
|
async function reportResults(context2) {
|
|
1641
1641
|
if (_optionalChain([context2, 'access', _82 => _82.result, 'optionalAccess', _83 => _83.errors, 'access', _84 => _84.length]) === 0) {
|
|
1642
1642
|
if (context2.result.warnings.length > 0) {
|
|
1643
|
-
|
|
1643
|
+
_chunk343PZDIIjs.writeWarning.call(void 0, ` \u{1F6A7} The following warnings occurred during the build: ${context2.result.warnings.map((warning) => warning.text).join("\n")}`, context2.options.config);
|
|
1644
1644
|
}
|
|
1645
|
-
|
|
1645
|
+
_chunk343PZDIIjs.writeSuccess.call(void 0, ` \u{1F4E6} The ${context2.options.name} build completed successfully`, context2.options.config);
|
|
1646
1646
|
}
|
|
1647
1647
|
}
|
|
1648
1648
|
_chunkJ5SB6L2Ljs.__name.call(void 0, reportResults, "reportResults");
|
|
@@ -1684,8 +1684,8 @@ async function cleanOutputPath(context2) {
|
|
|
1684
1684
|
}
|
|
1685
1685
|
_chunkJ5SB6L2Ljs.__name.call(void 0, cleanOutputPath, "cleanOutputPath");
|
|
1686
1686
|
async function build3(options) {
|
|
1687
|
-
|
|
1688
|
-
const stopwatch =
|
|
1687
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, ` \u26A1 Executing Storm ESBuild pipeline`);
|
|
1688
|
+
const stopwatch = _chunk343PZDIIjs.getStopwatch.call(void 0, "ESBuild pipeline");
|
|
1689
1689
|
try {
|
|
1690
1690
|
const opts = Array.isArray(options) ? options : [
|
|
1691
1691
|
options
|
|
@@ -1695,9 +1695,9 @@ async function build3(options) {
|
|
|
1695
1695
|
}
|
|
1696
1696
|
void transduce.async(opts, dependencyCheck);
|
|
1697
1697
|
await transduce.async(await createOptions(opts), pipe.async(generateContext, cleanOutputPath, generatePackageJson, executeEsBuild, copyBuildAssets, reportResults));
|
|
1698
|
-
|
|
1698
|
+
_chunk343PZDIIjs.writeSuccess.call(void 0, " \u{1F3C1} ESBuild pipeline build completed successfully");
|
|
1699
1699
|
} catch (error) {
|
|
1700
|
-
|
|
1700
|
+
_chunk343PZDIIjs.writeFatal.call(void 0, " \u274C Fatal errors occurred during the build that could not be recovered from. The build process has been terminated.");
|
|
1701
1701
|
throw error;
|
|
1702
1702
|
} finally {
|
|
1703
1703
|
stopwatch();
|
|
@@ -1725,9 +1725,9 @@ var watch = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (context2, opti
|
|
|
1725
1725
|
return context2.rebuild();
|
|
1726
1726
|
});
|
|
1727
1727
|
if (rebuildResult instanceof Error) {
|
|
1728
|
-
|
|
1728
|
+
_chunk343PZDIIjs.writeError.call(void 0, rebuildResult.message);
|
|
1729
1729
|
}
|
|
1730
|
-
|
|
1730
|
+
_chunk343PZDIIjs.writeTrace.call(void 0, `${Date.now() - timeBefore}ms [${_nullishCoalesce(options.name, () => ( ""))}]`);
|
|
1731
1731
|
}, 10);
|
|
1732
1732
|
changeWatcher.on("change", fastRebuild);
|
|
1733
1733
|
return void 0;
|
|
@@ -1735,7 +1735,7 @@ var watch = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (context2, opti
|
|
|
1735
1735
|
|
|
1736
1736
|
// ../workspace-tools/src/executors/esbuild/executor.ts
|
|
1737
1737
|
async function esbuildExecutorFn(options, context2, config) {
|
|
1738
|
-
|
|
1738
|
+
_chunk343PZDIIjs.writeInfo.call(void 0, "\u{1F4E6} Running Storm ESBuild executor on the workspace", config);
|
|
1739
1739
|
if (!_optionalChain([context2, 'access', _85 => _85.projectsConfigurations, 'optionalAccess', _86 => _86.projects]) || !context2.projectName || !context2.projectsConfigurations.projects[context2.projectName] || !_optionalChain([context2, 'access', _87 => _87.projectsConfigurations, 'access', _88 => _88.projects, 'access', _89 => _89[context2.projectName], 'optionalAccess', _90 => _90.root])) {
|
|
1740
1740
|
throw new Error("The Build process failed because the context is not valid. Please run this command from a workspace.");
|
|
1741
1741
|
}
|
|
@@ -1789,7 +1789,7 @@ async function sizeLimitExecutorFn(options, context2, config) {
|
|
|
1789
1789
|
if (!_optionalChain([context2, 'optionalAccess', _99 => _99.projectName]) || !_optionalChain([context2, 'access', _100 => _100.projectsConfigurations, 'optionalAccess', _101 => _101.projects]) || !context2.projectsConfigurations.projects[context2.projectName]) {
|
|
1790
1790
|
throw new Error("The Size-Limit process failed because the context is not valid. Please run this command from a workspace.");
|
|
1791
1791
|
}
|
|
1792
|
-
|
|
1792
|
+
_chunk343PZDIIjs.writeInfo.call(void 0, `\u{1F4CF} Running Size-Limit on ${context2.projectName}`, config);
|
|
1793
1793
|
_sizelimit2.default.call(void 0, [
|
|
1794
1794
|
_file2.default,
|
|
1795
1795
|
_esbuild3.default,
|
|
@@ -1797,7 +1797,7 @@ async function sizeLimitExecutorFn(options, context2, config) {
|
|
|
1797
1797
|
], {
|
|
1798
1798
|
checks: _nullishCoalesce(_nullishCoalesce(options.entry, () => ( _optionalChain([context2, 'access', _102 => _102.projectsConfigurations, 'access', _103 => _103.projects, 'access', _104 => _104[context2.projectName], 'optionalAccess', _105 => _105.sourceRoot]))), () => ( _devkit.joinPathFragments.call(void 0, _nullishCoalesce(_optionalChain([context2, 'access', _106 => _106.projectsConfigurations, 'access', _107 => _107.projects, 'access', _108 => _108[context2.projectName], 'optionalAccess', _109 => _109.root]), () => ( "./")), "src")))
|
|
1799
1799
|
}).then((result) => {
|
|
1800
|
-
|
|
1800
|
+
_chunk343PZDIIjs.writeInfo.call(void 0, `\u{1F4CF} ${context2.projectName} Size-Limit result: ${JSON.stringify(result)}`, config);
|
|
1801
1801
|
});
|
|
1802
1802
|
return {
|
|
1803
1803
|
success: true
|
|
@@ -1823,8 +1823,8 @@ var _tsdown = require('tsdown');
|
|
|
1823
1823
|
// ../tsdown/src/clean.ts
|
|
1824
1824
|
|
|
1825
1825
|
async function clean2(name = "ESBuild", directory, config) {
|
|
1826
|
-
|
|
1827
|
-
const stopwatch =
|
|
1826
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, ` \u{1F9F9} Cleaning ${name} output path: ${directory}`, config);
|
|
1827
|
+
const stopwatch = _chunk343PZDIIjs.getStopwatch.call(void 0, `${name} output clean`);
|
|
1828
1828
|
await _node.hfs.deleteAll(directory);
|
|
1829
1829
|
stopwatch();
|
|
1830
1830
|
}
|
|
@@ -1859,13 +1859,13 @@ var resolveOptions2 = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
1859
1859
|
if (!workspaceRoot3) {
|
|
1860
1860
|
throw new Error("Cannot find Nx workspace root");
|
|
1861
1861
|
}
|
|
1862
|
-
const config = await
|
|
1863
|
-
|
|
1864
|
-
const stopwatch =
|
|
1862
|
+
const config = await _chunkOQPC7VB6js.loadStormConfig.call(void 0, workspaceRoot3.dir);
|
|
1863
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, " \u2699\uFE0F Resolving build options", config);
|
|
1864
|
+
const stopwatch = _chunk343PZDIIjs.getStopwatch.call(void 0, "Build options resolution");
|
|
1865
1865
|
const projectGraph = await _devkit.createProjectGraphAsync.call(void 0, {
|
|
1866
1866
|
exitOnError: true
|
|
1867
1867
|
});
|
|
1868
|
-
const projectJsonPath =
|
|
1868
|
+
const projectJsonPath = _chunk343PZDIIjs.joinPaths.call(void 0, workspaceRoot3.dir, projectRoot, "project.json");
|
|
1869
1869
|
if (!await _node.hfs.isFile(projectJsonPath)) {
|
|
1870
1870
|
throw new Error("Cannot find project.json configuration");
|
|
1871
1871
|
}
|
|
@@ -1878,7 +1878,7 @@ var resolveOptions2 = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
1878
1878
|
const options = _defu2.default.call(void 0, userOptions, DEFAULT_BUILD_OPTIONS2);
|
|
1879
1879
|
options.name ??= `${projectName}-${options.format}`;
|
|
1880
1880
|
options.target ??= DEFAULT_TARGET;
|
|
1881
|
-
const packageJsonPath =
|
|
1881
|
+
const packageJsonPath = _chunk343PZDIIjs.joinPaths.call(void 0, workspaceRoot3.dir, options.projectRoot, "package.json");
|
|
1882
1882
|
if (!await _node.hfs.isFile(packageJsonPath)) {
|
|
1883
1883
|
throw new Error("Cannot find package.json configuration");
|
|
1884
1884
|
}
|
|
@@ -1887,20 +1887,20 @@ var resolveOptions2 = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
1887
1887
|
...options,
|
|
1888
1888
|
config,
|
|
1889
1889
|
...userOptions,
|
|
1890
|
-
tsconfig:
|
|
1890
|
+
tsconfig: _chunk343PZDIIjs.joinPaths.call(void 0, projectRoot, userOptions.tsconfig ? userOptions.tsconfig.replace(projectRoot, "") : "tsconfig.json"),
|
|
1891
1891
|
format: options.format || "cjs",
|
|
1892
1892
|
entryPoints: await getEntryPoints(config, projectRoot, projectJson.sourceRoot, userOptions.entry || [
|
|
1893
1893
|
"./src/index.ts"
|
|
1894
1894
|
], userOptions.emitOnAll),
|
|
1895
|
-
outdir: userOptions.outputPath ||
|
|
1895
|
+
outdir: userOptions.outputPath || _chunk343PZDIIjs.joinPaths.call(void 0, "dist", projectRoot),
|
|
1896
1896
|
plugins: [],
|
|
1897
1897
|
name: userOptions.name || projectName,
|
|
1898
1898
|
projectConfigurations,
|
|
1899
1899
|
projectName,
|
|
1900
1900
|
projectGraph,
|
|
1901
|
-
sourceRoot: userOptions.sourceRoot || projectJson.sourceRoot ||
|
|
1901
|
+
sourceRoot: userOptions.sourceRoot || projectJson.sourceRoot || _chunk343PZDIIjs.joinPaths.call(void 0, projectRoot, "src"),
|
|
1902
1902
|
minify: userOptions.minify || !userOptions.debug,
|
|
1903
|
-
verbose: userOptions.verbose ||
|
|
1903
|
+
verbose: userOptions.verbose || _chunk343PZDIIjs.isVerbose.call(void 0, ) || userOptions.debug === true,
|
|
1904
1904
|
includeSrc: userOptions.includeSrc === true,
|
|
1905
1905
|
metafile: userOptions.metafile !== false,
|
|
1906
1906
|
generatePackageJson: userOptions.generatePackageJson !== false,
|
|
@@ -1934,14 +1934,14 @@ var resolveOptions2 = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, async
|
|
|
1934
1934
|
return result;
|
|
1935
1935
|
}, "resolveOptions");
|
|
1936
1936
|
async function generatePackageJson2(options) {
|
|
1937
|
-
if (options.generatePackageJson !== false && await _node.hfs.isFile(
|
|
1938
|
-
|
|
1939
|
-
const stopwatch =
|
|
1940
|
-
const packageJsonPath =
|
|
1937
|
+
if (options.generatePackageJson !== false && await _node.hfs.isFile(_chunk343PZDIIjs.joinPaths.call(void 0, options.projectRoot, "package.json"))) {
|
|
1938
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, " \u270D\uFE0F Writing package.json file", options.config);
|
|
1939
|
+
const stopwatch = _chunk343PZDIIjs.getStopwatch.call(void 0, "Write package.json file");
|
|
1940
|
+
const packageJsonPath = _chunk343PZDIIjs.joinPaths.call(void 0, options.projectRoot, "project.json");
|
|
1941
1941
|
if (!await _node.hfs.isFile(packageJsonPath)) {
|
|
1942
1942
|
throw new Error("Cannot find package.json configuration");
|
|
1943
1943
|
}
|
|
1944
|
-
let packageJson = await _node.hfs.json(
|
|
1944
|
+
let packageJson = await _node.hfs.json(_chunk343PZDIIjs.joinPaths.call(void 0, options.config.workspaceRoot, options.projectRoot, "package.json"));
|
|
1945
1945
|
if (!packageJson) {
|
|
1946
1946
|
throw new Error("Cannot find package.json configuration file");
|
|
1947
1947
|
}
|
|
@@ -1979,15 +1979,15 @@ async function generatePackageJson2(options) {
|
|
|
1979
1979
|
}
|
|
1980
1980
|
return ret;
|
|
1981
1981
|
}, packageJson.exports);
|
|
1982
|
-
await _devkit.writeJsonFile.call(void 0,
|
|
1982
|
+
await _devkit.writeJsonFile.call(void 0, _chunk343PZDIIjs.joinPaths.call(void 0, options.outdir, "package.json"), packageJson);
|
|
1983
1983
|
stopwatch();
|
|
1984
1984
|
}
|
|
1985
1985
|
return options;
|
|
1986
1986
|
}
|
|
1987
1987
|
_chunkJ5SB6L2Ljs.__name.call(void 0, generatePackageJson2, "generatePackageJson");
|
|
1988
1988
|
async function executeTSDown(options) {
|
|
1989
|
-
|
|
1990
|
-
const stopwatch =
|
|
1989
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, ` \u{1F680} Running ${options.name} build`, options.config);
|
|
1990
|
+
const stopwatch = _chunk343PZDIIjs.getStopwatch.call(void 0, `${options.name} build`);
|
|
1991
1991
|
await _tsdown.build.call(void 0, {
|
|
1992
1992
|
...options,
|
|
1993
1993
|
entry: options.entryPoints,
|
|
@@ -1999,15 +1999,15 @@ async function executeTSDown(options) {
|
|
|
1999
1999
|
}
|
|
2000
2000
|
_chunkJ5SB6L2Ljs.__name.call(void 0, executeTSDown, "executeTSDown");
|
|
2001
2001
|
async function copyBuildAssets2(options) {
|
|
2002
|
-
|
|
2003
|
-
const stopwatch =
|
|
2002
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, ` \u{1F4CB} Copying asset files to output directory: ${options.outdir}`, options.config);
|
|
2003
|
+
const stopwatch = _chunk343PZDIIjs.getStopwatch.call(void 0, `${options.name} asset copy`);
|
|
2004
2004
|
await copyAssets(options.config, _nullishCoalesce(options.assets, () => ( [])), options.outdir, options.projectRoot, options.projectName, options.sourceRoot, true, false);
|
|
2005
2005
|
stopwatch();
|
|
2006
2006
|
return options;
|
|
2007
2007
|
}
|
|
2008
2008
|
_chunkJ5SB6L2Ljs.__name.call(void 0, copyBuildAssets2, "copyBuildAssets");
|
|
2009
2009
|
async function reportResults2(options) {
|
|
2010
|
-
|
|
2010
|
+
_chunk343PZDIIjs.writeSuccess.call(void 0, ` \u{1F4E6} The ${options.name} build completed successfully`, options.config);
|
|
2011
2011
|
}
|
|
2012
2012
|
_chunkJ5SB6L2Ljs.__name.call(void 0, reportResults2, "reportResults");
|
|
2013
2013
|
async function cleanOutputPath2(options) {
|
|
@@ -2018,8 +2018,8 @@ async function cleanOutputPath2(options) {
|
|
|
2018
2018
|
}
|
|
2019
2019
|
_chunkJ5SB6L2Ljs.__name.call(void 0, cleanOutputPath2, "cleanOutputPath");
|
|
2020
2020
|
async function build4(options) {
|
|
2021
|
-
|
|
2022
|
-
const stopwatch =
|
|
2021
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, ` \u26A1 Executing Storm TSDown pipeline`);
|
|
2022
|
+
const stopwatch = _chunk343PZDIIjs.getStopwatch.call(void 0, "TSDown pipeline");
|
|
2023
2023
|
try {
|
|
2024
2024
|
const opts = Array.isArray(options) ? options : [
|
|
2025
2025
|
options
|
|
@@ -2037,11 +2037,11 @@ async function build4(options) {
|
|
|
2037
2037
|
await reportResults2(opt);
|
|
2038
2038
|
}));
|
|
2039
2039
|
} else {
|
|
2040
|
-
|
|
2040
|
+
_chunk343PZDIIjs.writeWarning.call(void 0, " \u{1F6A7} No options were passed to TSBuild. Please check the parameters passed to the `build` function.");
|
|
2041
2041
|
}
|
|
2042
|
-
|
|
2042
|
+
_chunk343PZDIIjs.writeSuccess.call(void 0, " \u{1F3C1} TSDown pipeline build completed successfully");
|
|
2043
2043
|
} catch (error) {
|
|
2044
|
-
|
|
2044
|
+
_chunk343PZDIIjs.writeFatal.call(void 0, " \u274C Fatal errors occurred during the build that could not be recovered from. The build process has been terminated.");
|
|
2045
2045
|
throw error;
|
|
2046
2046
|
} finally {
|
|
2047
2047
|
stopwatch();
|
|
@@ -2051,7 +2051,7 @@ _chunkJ5SB6L2Ljs.__name.call(void 0, build4, "build");
|
|
|
2051
2051
|
|
|
2052
2052
|
// ../workspace-tools/src/executors/tsdown/executor.ts
|
|
2053
2053
|
async function tsdownExecutorFn(options, context2, config) {
|
|
2054
|
-
|
|
2054
|
+
_chunk343PZDIIjs.writeInfo.call(void 0, "\u{1F4E6} Running Storm TSDown build executor on the workspace", config);
|
|
2055
2055
|
if (!_optionalChain([context2, 'access', _112 => _112.projectsConfigurations, 'optionalAccess', _113 => _113.projects]) || !context2.projectName || !context2.projectsConfigurations.projects[context2.projectName] || !_optionalChain([context2, 'access', _114 => _114.projectsConfigurations, 'access', _115 => _115.projects, 'access', _116 => _116[context2.projectName], 'optionalAccess', _117 => _117.root])) {
|
|
2056
2056
|
throw new Error("The Build process failed because the context is not valid. Please run this command from a workspace.");
|
|
2057
2057
|
}
|
|
@@ -2087,11 +2087,11 @@ var _fsextra = require('fs-extra');
|
|
|
2087
2087
|
var _TypiaProgrammerjs = require('typia/lib/programmers/TypiaProgrammer.js');
|
|
2088
2088
|
async function typiaExecutorFn(options, _, config) {
|
|
2089
2089
|
if (options.clean !== false) {
|
|
2090
|
-
|
|
2090
|
+
_chunk343PZDIIjs.writeInfo.call(void 0, `\u{1F9F9} Cleaning output path: ${options.outputPath}`, config);
|
|
2091
2091
|
_fsextra.removeSync.call(void 0, options.outputPath);
|
|
2092
2092
|
}
|
|
2093
2093
|
await Promise.all(options.entry.map((entry) => {
|
|
2094
|
-
|
|
2094
|
+
_chunk343PZDIIjs.writeInfo.call(void 0, `\u{1F680} Running Typia on entry: ${entry}`, config);
|
|
2095
2095
|
return _TypiaProgrammerjs.TypiaProgrammer.build({
|
|
2096
2096
|
input: entry,
|
|
2097
2097
|
output: options.outputPath,
|
|
@@ -2122,15 +2122,15 @@ var executor_default9 = withRunExecutor("Typia runtime validation generator", ty
|
|
|
2122
2122
|
|
|
2123
2123
|
var _jiti = require('jiti');
|
|
2124
2124
|
async function unbuildExecutorFn(options, context2, config) {
|
|
2125
|
-
|
|
2125
|
+
_chunk343PZDIIjs.writeInfo.call(void 0, "\u{1F4E6} Running Storm Unbuild executor on the workspace", config);
|
|
2126
2126
|
if (!_optionalChain([context2, 'access', _126 => _126.projectsConfigurations, 'optionalAccess', _127 => _127.projects]) || !context2.projectName || !context2.projectsConfigurations.projects[context2.projectName] || !_optionalChain([context2, 'access', _128 => _128.projectsConfigurations, 'access', _129 => _129.projects, 'access', _130 => _130[context2.projectName], 'optionalAccess', _131 => _131.root])) {
|
|
2127
2127
|
throw new Error("The Build process failed because the context is not valid. Please run this command from a workspace.");
|
|
2128
2128
|
}
|
|
2129
2129
|
const jiti = _jiti.createJiti.call(void 0, config.workspaceRoot, {
|
|
2130
|
-
fsCache: config.skipCache ? false :
|
|
2130
|
+
fsCache: config.skipCache ? false : _chunk343PZDIIjs.joinPaths.call(void 0, config.directories.cache || "node_modules/.cache/storm", "jiti"),
|
|
2131
2131
|
interopDefault: true
|
|
2132
2132
|
});
|
|
2133
|
-
const stormUnbuild = await jiti.import("@storm-software/unbuild");
|
|
2133
|
+
const stormUnbuild = await jiti.import("@storm-software/unbuild/build.js");
|
|
2134
2134
|
await stormUnbuild.build(_defu.defu.call(void 0, {
|
|
2135
2135
|
...options,
|
|
2136
2136
|
projectRoot: _optionalChain([context2, 'access', _132 => _132.projectsConfigurations, 'access', _133 => _133.projects, 'optionalAccess', _134 => _134[context2.projectName], 'access', _135 => _135.root]),
|
|
@@ -2191,34 +2191,34 @@ var executor_default10 = withRunExecutor("TypeScript Unbuild build", unbuildExec
|
|
|
2191
2191
|
var withRunGenerator = /* @__PURE__ */ _chunkJ5SB6L2Ljs.__name.call(void 0, (name, generatorFn, generatorOptions = {
|
|
2192
2192
|
skipReadingConfig: false
|
|
2193
2193
|
}) => async (tree, _options) => {
|
|
2194
|
-
const stopwatch =
|
|
2194
|
+
const stopwatch = _chunk343PZDIIjs.getStopwatch.call(void 0, name);
|
|
2195
2195
|
let options = _options;
|
|
2196
2196
|
let config;
|
|
2197
2197
|
try {
|
|
2198
|
-
|
|
2198
|
+
_chunk343PZDIIjs.writeInfo.call(void 0, `\u26A1 Running the ${name} generator...
|
|
2199
2199
|
|
|
2200
2200
|
`, config);
|
|
2201
|
-
const workspaceRoot3 =
|
|
2201
|
+
const workspaceRoot3 = _chunk343PZDIIjs.findWorkspaceRoot.call(void 0, );
|
|
2202
2202
|
if (!generatorOptions.skipReadingConfig) {
|
|
2203
|
-
|
|
2203
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, `Loading the Storm Config from environment variables and storm.config.js file...
|
|
2204
2204
|
- workspaceRoot: ${workspaceRoot3}`, config);
|
|
2205
|
-
config = await
|
|
2205
|
+
config = await _chunkOQPC7VB6js.loadStormConfig.call(void 0, workspaceRoot3);
|
|
2206
2206
|
}
|
|
2207
2207
|
if (_optionalChain([generatorOptions, 'optionalAccess', _140 => _140.hooks, 'optionalAccess', _141 => _141.applyDefaultOptions])) {
|
|
2208
|
-
|
|
2208
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, "Running the applyDefaultOptions hook...", config);
|
|
2209
2209
|
options = await Promise.resolve(generatorOptions.hooks.applyDefaultOptions(options, config));
|
|
2210
|
-
|
|
2210
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, "Completed the applyDefaultOptions hook", config);
|
|
2211
2211
|
}
|
|
2212
|
-
|
|
2212
|
+
_chunk343PZDIIjs.writeTrace.call(void 0, `Generator schema options \u2699\uFE0F
|
|
2213
2213
|
${Object.keys(_nullishCoalesce(options, () => ( {}))).map((key) => ` - ${key}=${JSON.stringify(options[key])}`).join("\n")}`, config);
|
|
2214
2214
|
const tokenized = await applyWorkspaceTokens(options, {
|
|
2215
2215
|
workspaceRoot: tree.root,
|
|
2216
2216
|
config
|
|
2217
2217
|
}, applyWorkspaceBaseTokens);
|
|
2218
2218
|
if (_optionalChain([generatorOptions, 'optionalAccess', _142 => _142.hooks, 'optionalAccess', _143 => _143.preProcess])) {
|
|
2219
|
-
|
|
2219
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, "Running the preProcess hook...", config);
|
|
2220
2220
|
await Promise.resolve(generatorOptions.hooks.preProcess(tokenized, config));
|
|
2221
|
-
|
|
2221
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, "Completed the preProcess hook", config);
|
|
2222
2222
|
}
|
|
2223
2223
|
const result = await Promise.resolve(generatorFn(tree, tokenized, config));
|
|
2224
2224
|
if (result) {
|
|
@@ -2231,18 +2231,18 @@ ${Object.keys(_nullishCoalesce(options, () => ( {}))).map((key) => ` - ${key}=${
|
|
|
2231
2231
|
}
|
|
2232
2232
|
}
|
|
2233
2233
|
if (_optionalChain([generatorOptions, 'optionalAccess', _153 => _153.hooks, 'optionalAccess', _154 => _154.postProcess])) {
|
|
2234
|
-
|
|
2234
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, "Running the postProcess hook...", config);
|
|
2235
2235
|
await Promise.resolve(generatorOptions.hooks.postProcess(config));
|
|
2236
|
-
|
|
2236
|
+
_chunk343PZDIIjs.writeDebug.call(void 0, "Completed the postProcess hook", config);
|
|
2237
2237
|
}
|
|
2238
2238
|
return () => {
|
|
2239
|
-
|
|
2239
|
+
_chunk343PZDIIjs.writeSuccess.call(void 0, `Completed running the ${name} generator!
|
|
2240
2240
|
`, config);
|
|
2241
2241
|
};
|
|
2242
2242
|
} catch (error) {
|
|
2243
2243
|
return () => {
|
|
2244
|
-
|
|
2245
|
-
|
|
2244
|
+
_chunk343PZDIIjs.writeFatal.call(void 0, "A fatal error occurred while running the generator - the process was forced to terminate", config);
|
|
2245
|
+
_chunk343PZDIIjs.writeError.call(void 0, `An exception was thrown in the generator's process
|
|
2246
2246
|
- Details: ${error.message}
|
|
2247
2247
|
- Stacktrace: ${error.stack}`, config);
|
|
2248
2248
|
};
|
|
@@ -2288,7 +2288,7 @@ async function typeScriptLibraryGeneratorFn(tree, schema, config) {
|
|
|
2288
2288
|
const projectConfig = {
|
|
2289
2289
|
root: options.directory,
|
|
2290
2290
|
projectType: "library",
|
|
2291
|
-
sourceRoot:
|
|
2291
|
+
sourceRoot: _chunk343PZDIIjs.joinPaths.call(void 0, _nullishCoalesce(options.directory, () => ( "")), "src"),
|
|
2292
2292
|
targets: {
|
|
2293
2293
|
build: {
|
|
2294
2294
|
executor: schema.buildExecutor,
|
|
@@ -2297,11 +2297,11 @@ async function typeScriptLibraryGeneratorFn(tree, schema, config) {
|
|
|
2297
2297
|
],
|
|
2298
2298
|
options: {
|
|
2299
2299
|
entry: [
|
|
2300
|
-
|
|
2300
|
+
_chunk343PZDIIjs.joinPaths.call(void 0, options.projectRoot, "src", "index.ts")
|
|
2301
2301
|
],
|
|
2302
2302
|
outputPath: getOutputPath(options),
|
|
2303
|
-
tsconfig:
|
|
2304
|
-
project:
|
|
2303
|
+
tsconfig: _chunk343PZDIIjs.joinPaths.call(void 0, options.projectRoot, "tsconfig.json"),
|
|
2304
|
+
project: _chunk343PZDIIjs.joinPaths.call(void 0, options.projectRoot, "package.json"),
|
|
2305
2305
|
defaultConfiguration: "production",
|
|
2306
2306
|
platform: "neutral",
|
|
2307
2307
|
assets: [
|
|
@@ -2355,7 +2355,7 @@ async function typeScriptLibraryGeneratorFn(tree, schema, config) {
|
|
|
2355
2355
|
if (!options.importPath) {
|
|
2356
2356
|
options.importPath = options.name;
|
|
2357
2357
|
}
|
|
2358
|
-
const packageJsonPath =
|
|
2358
|
+
const packageJsonPath = _chunk343PZDIIjs.joinPaths.call(void 0, options.projectRoot, "package.json");
|
|
2359
2359
|
if (tree.exists(packageJsonPath)) {
|
|
2360
2360
|
_devkit.updateJson.call(void 0, tree, packageJsonPath, (json) => {
|
|
2361
2361
|
if (!options.importPath) {
|
|
@@ -2412,10 +2412,10 @@ async function typeScriptLibraryGeneratorFn(tree, schema, config) {
|
|
|
2412
2412
|
}));
|
|
2413
2413
|
}
|
|
2414
2414
|
_js.addTsConfigPath.call(void 0, tree, options.importPath, [
|
|
2415
|
-
|
|
2415
|
+
_chunk343PZDIIjs.joinPaths.call(void 0, options.projectRoot, "./src", `index.${options.js ? "js" : "ts"}`)
|
|
2416
2416
|
]);
|
|
2417
|
-
_js.addTsConfigPath.call(void 0, tree,
|
|
2418
|
-
|
|
2417
|
+
_js.addTsConfigPath.call(void 0, tree, _chunk343PZDIIjs.joinPaths.call(void 0, options.importPath, "/*"), [
|
|
2418
|
+
_chunk343PZDIIjs.joinPaths.call(void 0, options.projectRoot, "./src", "/*")
|
|
2419
2419
|
]);
|
|
2420
2420
|
if (tree.exists("package.json")) {
|
|
2421
2421
|
const packageJson = _devkit.readJson.call(void 0, tree, "package.json");
|
|
@@ -2426,7 +2426,7 @@ async function typeScriptLibraryGeneratorFn(tree, schema, config) {
|
|
|
2426
2426
|
description = packageJson.description;
|
|
2427
2427
|
}
|
|
2428
2428
|
}
|
|
2429
|
-
const tsconfigPath =
|
|
2429
|
+
const tsconfigPath = _chunk343PZDIIjs.joinPaths.call(void 0, options.projectRoot, "tsconfig.json");
|
|
2430
2430
|
if (tree.exists(tsconfigPath)) {
|
|
2431
2431
|
_devkit.updateJson.call(void 0, tree, tsconfigPath, (json) => {
|
|
2432
2432
|
json.composite ??= true;
|
|
@@ -2464,7 +2464,7 @@ function getOutputPath(options) {
|
|
|
2464
2464
|
} else {
|
|
2465
2465
|
parts.push(options.projectRoot);
|
|
2466
2466
|
}
|
|
2467
|
-
return
|
|
2467
|
+
return _chunk343PZDIIjs.joinPaths.call(void 0, ...parts);
|
|
2468
2468
|
}
|
|
2469
2469
|
_chunkJ5SB6L2Ljs.__name.call(void 0, getOutputPath, "getOutputPath");
|
|
2470
2470
|
function createProjectTsConfigJson(tree, options) {
|
|
@@ -2473,7 +2473,7 @@ function createProjectTsConfigJson(tree, options) {
|
|
|
2473
2473
|
..._nullishCoalesce(_optionalChain([options, 'optionalAccess', _166 => _166.tsconfigOptions]), () => ( {})),
|
|
2474
2474
|
compilerOptions: {
|
|
2475
2475
|
...options.rootProject ? _js.tsConfigBaseOptions : {},
|
|
2476
|
-
outDir:
|
|
2476
|
+
outDir: _chunk343PZDIIjs.joinPaths.call(void 0, _devkit.offsetFromRoot.call(void 0, options.projectRoot), "dist/out-tsc"),
|
|
2477
2477
|
noEmit: true,
|
|
2478
2478
|
..._nullishCoalesce(_optionalChain([options, 'optionalAccess', _167 => _167.tsconfigOptions, 'optionalAccess', _168 => _168.compilerOptions]), () => ( {}))
|
|
2479
2479
|
},
|
|
@@ -2493,7 +2493,7 @@ function createProjectTsConfigJson(tree, options) {
|
|
|
2493
2493
|
"src/**/*.test.ts"
|
|
2494
2494
|
]
|
|
2495
2495
|
};
|
|
2496
|
-
_devkit.writeJson.call(void 0, tree,
|
|
2496
|
+
_devkit.writeJson.call(void 0, tree, _chunk343PZDIIjs.joinPaths.call(void 0, options.projectRoot, "tsconfig.json"), tsconfig);
|
|
2497
2497
|
}
|
|
2498
2498
|
_chunkJ5SB6L2Ljs.__name.call(void 0, createProjectTsConfigJson, "createProjectTsConfigJson");
|
|
2499
2499
|
async function normalizeOptions(tree, options) {
|
|
@@ -2619,19 +2619,19 @@ var generator_default = withRunGenerator("TypeScript Library Creator (Browser Pl
|
|
|
2619
2619
|
|
|
2620
2620
|
var _zodtojsonschema = require('zod-to-json-schema');
|
|
2621
2621
|
async function configSchemaGeneratorFn(tree, options, config) {
|
|
2622
|
-
|
|
2623
|
-
|
|
2624
|
-
const jsonSchema = _zodtojsonschema.zodToJsonSchema.call(void 0,
|
|
2622
|
+
_chunk343PZDIIjs.writeInfo.call(void 0, "\u{1F4E6} Running Storm Configuration JSON Schema generator", config);
|
|
2623
|
+
_chunk343PZDIIjs.writeTrace.call(void 0, `Determining the Storm Configuration JSON Schema...`, config);
|
|
2624
|
+
const jsonSchema = _zodtojsonschema.zodToJsonSchema.call(void 0, _chunk343PZDIIjs.StormConfigSchema, {
|
|
2625
2625
|
name: "StormWorkspaceConfiguration"
|
|
2626
2626
|
});
|
|
2627
|
-
|
|
2628
|
-
const outputPath = options.outputFile.replaceAll("{workspaceRoot}", "").replaceAll(_nullishCoalesce(_optionalChain([config, 'optionalAccess', _175 => _175.workspaceRoot]), () => (
|
|
2629
|
-
|
|
2627
|
+
_chunk343PZDIIjs.writeTrace.call(void 0, jsonSchema, config);
|
|
2628
|
+
const outputPath = options.outputFile.replaceAll("{workspaceRoot}", "").replaceAll(_nullishCoalesce(_optionalChain([config, 'optionalAccess', _175 => _175.workspaceRoot]), () => ( _chunk343PZDIIjs.findWorkspaceRoot.call(void 0, ))), _optionalChain([options, 'access', _176 => _176.outputFile, 'optionalAccess', _177 => _177.startsWith, 'call', _178 => _178("./")]) ? "" : "./");
|
|
2629
|
+
_chunk343PZDIIjs.writeTrace.call(void 0, `\u{1F4DD} Writing Storm Configuration JSON Schema to "${outputPath}"`, config);
|
|
2630
2630
|
_devkit.writeJson.call(void 0, tree, outputPath, jsonSchema, {
|
|
2631
2631
|
spaces: 2
|
|
2632
2632
|
});
|
|
2633
2633
|
await _devkit.formatFiles.call(void 0, tree);
|
|
2634
|
-
|
|
2634
|
+
_chunk343PZDIIjs.writeSuccess.call(void 0, "\u{1F680} Storm Configuration JSON Schema creation has completed successfully!", config);
|
|
2635
2635
|
return {
|
|
2636
2636
|
success: true
|
|
2637
2637
|
};
|