@mui/internal-docs-infra 0.3.1-canary.3 → 0.3.1-canary.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/CodeControllerContext/CodeControllerContext.js +2 -2
- package/esm/CodeExternalsContext/CodeExternalsContext.js +1 -1
- package/esm/CodeHighlighter/CodeHighlighter.js +247 -329
- package/esm/CodeHighlighter/CodeHighlighterClient.js +447 -653
- package/esm/CodeHighlighter/CodeHighlighterContext.js +2 -2
- package/esm/CodeHighlighter/CodeHighlighterFallbackContext.js +2 -2
- package/esm/CodeHighlighter/codeToFallbackProps.js +21 -37
- package/esm/CodeHighlighter/errors.js +248 -400
- package/esm/CodeHighlighter/parseControlledCode.js +12 -20
- package/esm/CodeProvider/CodeContext.js +3 -3
- package/esm/CodeProvider/CodeProvider.js +31 -40
- package/esm/abstractCreateDemo/abstractCreateDemo.js +13 -17
- package/esm/abstractCreateDemoClient/abstractCreateDemoClient.js +12 -12
- package/esm/cli/index.js +1 -1
- package/esm/cli/runValidate.js +160 -264
- package/esm/createDemoData/createDemoData.js +11 -12
- package/esm/createSitemap/createSitemap.js +2 -2
- package/esm/pipeline/getFileConventions/fileConventions.js +1 -1
- package/esm/pipeline/getFileConventions/getFileConventions.js +2 -15
- package/esm/pipeline/hastUtils/hastUtils.js +16 -17
- package/esm/pipeline/loadCodeVariant/addCodeVariantPaths.js +24 -24
- package/esm/pipeline/loadCodeVariant/applyCodeTransform.js +12 -22
- package/esm/pipeline/loadCodeVariant/calculateMainFilePath.js +30 -37
- package/esm/pipeline/loadCodeVariant/computeHastDeltas.js +107 -185
- package/esm/pipeline/loadCodeVariant/diffHast.js +18 -53
- package/esm/pipeline/loadCodeVariant/examineCodeVariant.js +24 -27
- package/esm/pipeline/loadCodeVariant/flattenCodeVariant.js +9 -10
- package/esm/pipeline/loadCodeVariant/hasAllCodeVariants.js +5 -5
- package/esm/pipeline/loadCodeVariant/loadCodeFallback.js +516 -731
- package/esm/pipeline/loadCodeVariant/loadCodeVariant.js +679 -1079
- package/esm/pipeline/loadCodeVariant/maybeCodeInitialData.js +14 -20
- package/esm/pipeline/loadCodeVariant/mergeCodeMetadata.js +53 -63
- package/esm/pipeline/loadCodeVariant/parseCode.js +40 -48
- package/esm/pipeline/loadCodeVariant/pathUtils.js +43 -64
- package/esm/pipeline/loadCodeVariant/transformSource.js +55 -125
- package/esm/pipeline/loadPrecomputedCodeHighlighter/loadPrecomputedCodeHighlighter.js +160 -221
- package/esm/pipeline/loadPrecomputedCodeHighlighter/parseCreateFactoryCall.js +377 -479
- package/esm/pipeline/loadPrecomputedCodeHighlighter/parseFunctionArguments.js +171 -173
- package/esm/pipeline/loadPrecomputedCodeHighlighter/performanceLogger.js +14 -30
- package/esm/pipeline/loadPrecomputedCodeHighlighter/replacePrecomputeValue.js +19 -21
- package/esm/pipeline/loadPrecomputedCodeHighlighter/serializeFunctionArguments.js +37 -71
- package/esm/pipeline/loadPrecomputedCodeHighlighterClient/filterRuntimeExternals.js +3 -9
- package/esm/pipeline/loadPrecomputedCodeHighlighterClient/generateImportStatements.js +54 -80
- package/esm/pipeline/loadPrecomputedCodeHighlighterClient/generateResolvedExternals.js +71 -98
- package/esm/pipeline/loadPrecomputedCodeHighlighterClient/injectImportsIntoSource.js +5 -5
- package/esm/pipeline/loadPrecomputedCodeHighlighterClient/loadPrecomputedCodeHighlighterClient.js +161 -211
- package/esm/pipeline/loadPrecomputedSitemap/loadPrecomputedSitemap.js +159 -207
- package/esm/pipeline/loadServerCodeMeta/loadServerCodeMeta.js +42 -64
- package/esm/pipeline/loadServerCodeMeta/resolveModulePathWithFs.js +20 -96
- package/esm/pipeline/loadServerPageIndex/loadServerPageIndex.js +66 -85
- package/esm/pipeline/loadServerSitemap/loadServerSitemap.js +71 -118
- package/esm/pipeline/loadServerSource/loadServerSource.js +121 -148
- package/esm/pipeline/loaderUtils/externalsToPackages.js +7 -7
- package/esm/pipeline/loaderUtils/extractNameAndSlugFromUrl.js +8 -12
- package/esm/pipeline/loaderUtils/fileUrlToPortablePath.js +5 -5
- package/esm/pipeline/loaderUtils/getFileNameFromUrl.js +19 -29
- package/esm/pipeline/loaderUtils/getLanguageFromExtension.js +3 -4
- package/esm/pipeline/loaderUtils/mergeExternals.js +15 -35
- package/esm/pipeline/loaderUtils/parseImportsAndComments.js +413 -433
- package/esm/pipeline/loaderUtils/processRelativeImports.js +153 -239
- package/esm/pipeline/loaderUtils/resolveModulePath.js +544 -1303
- package/esm/pipeline/loaderUtils/rewriteImports.js +73 -111
- package/esm/pipeline/parseSource/addLineGutters.js +33 -45
- package/esm/pipeline/parseSource/grammars.js +3 -3
- package/esm/pipeline/parseSource/parseSource.js +13 -31
- package/esm/pipeline/syncPageIndex/createMarkdownNodes.js +32 -55
- package/esm/pipeline/syncPageIndex/mergeMetadataMarkdown.js +107 -160
- package/esm/pipeline/syncPageIndex/metadataToMarkdown.js +846 -1033
- package/esm/pipeline/syncPageIndex/syncPageIndex.js +291 -438
- package/esm/pipeline/transformHtmlCodePrecomputed/transformHtmlCodePrecomputed.js +213 -311
- package/esm/pipeline/transformMarkdownBlockquoteCallouts/transformMarkdownBlockquoteCallouts.js +10 -10
- package/esm/pipeline/transformMarkdownCode/transformMarkdownCode.js +133 -193
- package/esm/pipeline/transformMarkdownDemoLinks/transformMarkdownDemoLinks.js +25 -27
- package/esm/pipeline/transformMarkdownMetadata/transformMarkdownMetadata.js +572 -717
- package/esm/pipeline/transformMarkdownRelativePaths/transformMarkdownRelativePaths.js +8 -8
- package/esm/pipeline/transformTypescriptToJavascript/removeTypes.js +84 -113
- package/esm/pipeline/transformTypescriptToJavascript/transformTypescriptToJavascript.js +10 -26
- package/esm/useCode/Pre.js +58 -62
- package/esm/useCode/useCode.js +59 -61
- package/esm/useCode/useCodeUtils.js +54 -63
- package/esm/useCode/useCopyFunctionality.js +10 -9
- package/esm/useCode/useFileNavigation.js +150 -212
- package/esm/useCode/useSourceEditing.js +17 -14
- package/esm/useCode/useTransformManagement.js +23 -26
- package/esm/useCode/useUIState.js +12 -20
- package/esm/useCode/useVariantSelection.js +62 -79
- package/esm/useCopier/index.js +29 -56
- package/esm/useDemo/createCodeSandbox.js +12 -15
- package/esm/useDemo/createStackBlitz.js +14 -20
- package/esm/useDemo/exportVariant.js +200 -180
- package/esm/useDemo/exportVariantAsCra.js +22 -25
- package/esm/useDemo/useDemo.js +80 -84
- package/esm/useErrors/ErrorsContext.js +1 -1
- package/esm/useErrors/useErrors.js +3 -3
- package/esm/useLocalStorageState/useLocalStorageState.js +23 -39
- package/esm/usePreference/PreferencesProvider.js +1 -1
- package/esm/usePreference/usePreference.js +9 -11
- package/esm/useSearch/useSearch.js +290 -387
- package/esm/useUrlHashState/useUrlHashState.js +11 -14
- package/esm/withDocsInfra/withDeploymentConfig.js +26 -21
- package/esm/withDocsInfra/withDocsInfra.js +99 -101
- package/package.json +7 -4
|
@@ -1,10 +1,3 @@
|
|
|
1
|
-
import _regeneratorValues from "@babel/runtime/helpers/esm/regeneratorValues";
|
|
2
|
-
import _toConsumableArray from "@babel/runtime/helpers/esm/toConsumableArray";
|
|
3
|
-
import _extends from "@babel/runtime/helpers/esm/extends";
|
|
4
|
-
import _regenerator from "@babel/runtime/helpers/esm/regenerator";
|
|
5
|
-
import _createForOfIteratorHelper from "@babel/runtime/helpers/esm/createForOfIteratorHelper";
|
|
6
|
-
import _slicedToArray from "@babel/runtime/helpers/esm/slicedToArray";
|
|
7
|
-
import _asyncToGenerator from "@babel/runtime/helpers/esm/asyncToGenerator";
|
|
8
1
|
import * as path from 'path-module';
|
|
9
2
|
import { compress, strToU8 } from 'fflate';
|
|
10
3
|
import { encode } from 'uint8-to-base64';
|
|
@@ -13,10 +6,9 @@ import { diffHast } from "./diffHast.js";
|
|
|
13
6
|
import { getFileNameFromUrl, getLanguageFromExtension, normalizeLanguage } from "../loaderUtils/index.js";
|
|
14
7
|
import { mergeExternals } from "../loaderUtils/mergeExternals.js";
|
|
15
8
|
import { performanceMeasure } from "../loadPrecomputedCodeHighlighter/performanceLogger.js";
|
|
16
|
-
function compressAsync(input) {
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
compress(input, options, function (err, output) {
|
|
9
|
+
function compressAsync(input, options = {}) {
|
|
10
|
+
return new Promise((resolve, reject) => {
|
|
11
|
+
compress(input, options, (err, output) => {
|
|
20
12
|
if (err) {
|
|
21
13
|
reject(err);
|
|
22
14
|
} else {
|
|
@@ -47,21 +39,21 @@ function generateConflictFreeFilename(originalFilename, existingFiles) {
|
|
|
47
39
|
}
|
|
48
40
|
|
|
49
41
|
// Try with global_ prefix
|
|
50
|
-
|
|
42
|
+
const globalFilename = `global_${originalFilename}`;
|
|
51
43
|
if (!existingFiles.has(globalFilename)) {
|
|
52
44
|
return globalFilename;
|
|
53
45
|
}
|
|
54
46
|
|
|
55
47
|
// Use path.parse to cleanly split filename into name and extension
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
48
|
+
const parsed = path.parse(originalFilename);
|
|
49
|
+
const nameWithoutExt = parsed.name;
|
|
50
|
+
const extension = parsed.ext;
|
|
59
51
|
|
|
60
52
|
// Add numbers until we find a free name, preserving extension
|
|
61
|
-
|
|
62
|
-
|
|
53
|
+
let counter = 1;
|
|
54
|
+
let candidateName;
|
|
63
55
|
do {
|
|
64
|
-
candidateName =
|
|
56
|
+
candidateName = `global_${nameWithoutExt}_${counter}${extension}`;
|
|
65
57
|
counter += 1;
|
|
66
58
|
} while (existingFiles.has(candidateName));
|
|
67
59
|
return candidateName;
|
|
@@ -80,13 +72,13 @@ function convertKeyBasedOnDirectory(nestedKey, sourceFileKey) {
|
|
|
80
72
|
}
|
|
81
73
|
|
|
82
74
|
// Treat bare filenames as relative to current directory (same as ./filename)
|
|
83
|
-
|
|
75
|
+
let processedNestedKey = nestedKey;
|
|
84
76
|
if (!nestedKey.startsWith('.')) {
|
|
85
|
-
processedNestedKey =
|
|
77
|
+
processedNestedKey = `./${nestedKey}`;
|
|
86
78
|
}
|
|
87
79
|
|
|
88
80
|
// Get the directory of the source file
|
|
89
|
-
|
|
81
|
+
const sourceDir = path.dirname(sourceFileKey);
|
|
90
82
|
|
|
91
83
|
// If sourceDir is '.' (current directory), just return the processed nested key
|
|
92
84
|
// This avoids path.resolve which can produce absolute paths on Windows
|
|
@@ -97,10 +89,10 @@ function convertKeyBasedOnDirectory(nestedKey, sourceFileKey) {
|
|
|
97
89
|
|
|
98
90
|
// Use path.join instead of path.resolve to avoid producing absolute paths
|
|
99
91
|
// path.join keeps paths relative, while path.resolve can make them absolute
|
|
100
|
-
|
|
92
|
+
const joinedPath = path.join(sourceDir, processedNestedKey);
|
|
101
93
|
|
|
102
94
|
// Normalize the path to clean up any ../ or ./ segments
|
|
103
|
-
|
|
95
|
+
const normalizedPath = path.normalize(joinedPath);
|
|
104
96
|
|
|
105
97
|
// Ensure we return a clean relative path (remove leading './' if present after normalization)
|
|
106
98
|
if (normalizedPath.startsWith('./')) {
|
|
@@ -119,7 +111,7 @@ function normalizePathKey(key) {
|
|
|
119
111
|
}
|
|
120
112
|
|
|
121
113
|
// Use path.normalize to clean up the path, then remove leading './' if present
|
|
122
|
-
|
|
114
|
+
const normalized = path.normalize(key);
|
|
123
115
|
|
|
124
116
|
// Convert './filename' to 'filename' using path.relative
|
|
125
117
|
if (normalized.startsWith('./')) {
|
|
@@ -132,1092 +124,700 @@ function normalizePathKey(key) {
|
|
|
132
124
|
* Loads and processes extra files recursively with support for relative paths
|
|
133
125
|
* and circular dependency detection. Uses Promise.all for parallel loading.
|
|
134
126
|
*/
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
if (finalSource) {
|
|
197
|
-
_context.n = 20;
|
|
198
|
-
break;
|
|
199
|
-
}
|
|
200
|
-
if (loadSource) {
|
|
201
|
-
_context.n = 1;
|
|
202
|
-
break;
|
|
203
|
-
}
|
|
204
|
-
throw new Error('"loadSource" function is required when source is not provided');
|
|
205
|
-
case 1:
|
|
206
|
-
if (url) {
|
|
207
|
-
_context.n = 2;
|
|
208
|
-
break;
|
|
209
|
-
}
|
|
210
|
-
throw new Error('URL is required when loading source');
|
|
211
|
-
case 2:
|
|
212
|
-
_context.p = 2;
|
|
213
|
-
// Check cache first to avoid duplicate loadSource calls
|
|
214
|
-
loadPromise = loadSourceCache.get(url);
|
|
215
|
-
if (!loadPromise) {
|
|
216
|
-
loadPromise = loadSource(url);
|
|
217
|
-
loadSourceCache.set(url, loadPromise);
|
|
218
|
-
}
|
|
219
|
-
_context.n = 3;
|
|
220
|
-
return loadPromise;
|
|
221
|
-
case 3:
|
|
222
|
-
loadResult = _context.v;
|
|
223
|
-
finalSource = loadResult.source;
|
|
224
|
-
extraFilesFromSource = loadResult.extraFiles;
|
|
225
|
-
extraDependenciesFromSource = loadResult.extraDependencies;
|
|
226
|
-
externalsFromSource = loadResult.externals;
|
|
227
|
-
currentMark = performanceMeasure(currentMark, {
|
|
228
|
-
mark: 'Loaded File',
|
|
229
|
-
measure: 'File Loading'
|
|
230
|
-
}, [functionName, url]);
|
|
231
|
-
|
|
232
|
-
// Validate that extraFiles from loadSource contain only absolute URLs as values
|
|
233
|
-
if (!extraFilesFromSource) {
|
|
234
|
-
_context.n = 7;
|
|
235
|
-
break;
|
|
236
|
-
}
|
|
237
|
-
_i = 0, _Object$entries = Object.entries(extraFilesFromSource);
|
|
238
|
-
case 4:
|
|
239
|
-
if (!(_i < _Object$entries.length)) {
|
|
240
|
-
_context.n = 7;
|
|
241
|
-
break;
|
|
242
|
-
}
|
|
243
|
-
_Object$entries$_i = _slicedToArray(_Object$entries[_i], 2), extraFileName = _Object$entries$_i[0], fileData = _Object$entries$_i[1];
|
|
244
|
-
if (!isAbsolutePath(extraFileName)) {
|
|
245
|
-
_context.n = 5;
|
|
246
|
-
break;
|
|
247
|
-
}
|
|
248
|
-
throw new Error("Invalid extraFiles from loadSource: key \"".concat(extraFileName, "\" appears to be an absolute path. ") + "extraFiles keys should be relative paths from the current file.");
|
|
249
|
-
case 5:
|
|
250
|
-
if (!(typeof fileData === 'string' && fileData.startsWith('.'))) {
|
|
251
|
-
_context.n = 6;
|
|
252
|
-
break;
|
|
253
|
-
}
|
|
254
|
-
throw new Error("Invalid extraFiles from loadSource: \"".concat(extraFileName, "\" has relative path \"").concat(fileData, "\". ") + "All extraFiles values must be absolute URLs.");
|
|
255
|
-
case 6:
|
|
256
|
-
_i++;
|
|
257
|
-
_context.n = 4;
|
|
258
|
-
break;
|
|
259
|
-
case 7:
|
|
260
|
-
if (!extraDependenciesFromSource) {
|
|
261
|
-
_context.n = 15;
|
|
262
|
-
break;
|
|
263
|
-
}
|
|
264
|
-
_iterator = _createForOfIteratorHelper(extraDependenciesFromSource);
|
|
265
|
-
_context.p = 8;
|
|
266
|
-
_iterator.s();
|
|
267
|
-
case 9:
|
|
268
|
-
if ((_step = _iterator.n()).done) {
|
|
269
|
-
_context.n = 12;
|
|
270
|
-
break;
|
|
271
|
-
}
|
|
272
|
-
dependency = _step.value;
|
|
273
|
-
if (!dependency.startsWith('.')) {
|
|
274
|
-
_context.n = 10;
|
|
275
|
-
break;
|
|
276
|
-
}
|
|
277
|
-
throw new Error("Invalid extraDependencies from loadSource: \"".concat(dependency, "\" is a relative path. ") + "All extraDependencies must be absolute URLs.");
|
|
278
|
-
case 10:
|
|
279
|
-
if (!(dependency === url)) {
|
|
280
|
-
_context.n = 11;
|
|
281
|
-
break;
|
|
127
|
+
|
|
128
|
+
async function loadSingleFile(variantName, fileName, source, url, loadSource, sourceParser, sourceTransformers, loadSourceCache, transforms, options = {}, allFilesListed = false, knownExtraFiles = new Set(), language) {
|
|
129
|
+
const {
|
|
130
|
+
disableTransforms = false,
|
|
131
|
+
disableParsing = false
|
|
132
|
+
} = options;
|
|
133
|
+
let finalSource = source;
|
|
134
|
+
let extraFilesFromSource;
|
|
135
|
+
let extraDependenciesFromSource;
|
|
136
|
+
let externalsFromSource;
|
|
137
|
+
const functionName = 'Load Variant File';
|
|
138
|
+
let currentMark = performanceMeasure(undefined, {
|
|
139
|
+
mark: 'Start',
|
|
140
|
+
measure: 'Start'
|
|
141
|
+
}, [functionName, url || fileName], true);
|
|
142
|
+
|
|
143
|
+
// Load source if not provided
|
|
144
|
+
if (!finalSource) {
|
|
145
|
+
if (!loadSource) {
|
|
146
|
+
throw new Error('"loadSource" function is required when source is not provided');
|
|
147
|
+
}
|
|
148
|
+
if (!url) {
|
|
149
|
+
throw new Error('URL is required when loading source');
|
|
150
|
+
}
|
|
151
|
+
try {
|
|
152
|
+
// Check cache first to avoid duplicate loadSource calls
|
|
153
|
+
let loadPromise = loadSourceCache.get(url);
|
|
154
|
+
if (!loadPromise) {
|
|
155
|
+
loadPromise = loadSource(url);
|
|
156
|
+
loadSourceCache.set(url, loadPromise);
|
|
157
|
+
}
|
|
158
|
+
const loadResult = await loadPromise;
|
|
159
|
+
finalSource = loadResult.source;
|
|
160
|
+
extraFilesFromSource = loadResult.extraFiles;
|
|
161
|
+
extraDependenciesFromSource = loadResult.extraDependencies;
|
|
162
|
+
externalsFromSource = loadResult.externals;
|
|
163
|
+
currentMark = performanceMeasure(currentMark, {
|
|
164
|
+
mark: 'Loaded File',
|
|
165
|
+
measure: 'File Loading'
|
|
166
|
+
}, [functionName, url]);
|
|
167
|
+
|
|
168
|
+
// Validate that extraFiles from loadSource contain only absolute URLs as values
|
|
169
|
+
if (extraFilesFromSource) {
|
|
170
|
+
for (const [extraFileName, fileData] of Object.entries(extraFilesFromSource)) {
|
|
171
|
+
// Validate that keys are relative paths (not absolute)
|
|
172
|
+
if (isAbsolutePath(extraFileName)) {
|
|
173
|
+
throw new Error(`Invalid extraFiles from loadSource: key "${extraFileName}" appears to be an absolute path. ` + `extraFiles keys should be relative paths from the current file.`);
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// Validate that values are absolute URLs (not relative paths)
|
|
177
|
+
if (typeof fileData === 'string' && fileData.startsWith('.')) {
|
|
178
|
+
throw new Error(`Invalid extraFiles from loadSource: "${extraFileName}" has relative path "${fileData}". ` + `All extraFiles values must be absolute URLs.`);
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
// Validate that extraDependencies from loadSource contain only absolute URLs
|
|
184
|
+
if (extraDependenciesFromSource) {
|
|
185
|
+
for (const dependency of extraDependenciesFromSource) {
|
|
186
|
+
if (dependency.startsWith('.')) {
|
|
187
|
+
throw new Error(`Invalid extraDependencies from loadSource: "${dependency}" is a relative path. ` + `All extraDependencies must be absolute URLs.`);
|
|
282
188
|
}
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
_context.n = 9;
|
|
286
|
-
break;
|
|
287
|
-
case 12:
|
|
288
|
-
_context.n = 14;
|
|
289
|
-
break;
|
|
290
|
-
case 13:
|
|
291
|
-
_context.p = 13;
|
|
292
|
-
_t = _context.v;
|
|
293
|
-
_iterator.e(_t);
|
|
294
|
-
case 14:
|
|
295
|
-
_context.p = 14;
|
|
296
|
-
_iterator.f();
|
|
297
|
-
return _context.f(14);
|
|
298
|
-
case 15:
|
|
299
|
-
if (!(allFilesListed && (extraFilesFromSource || extraDependenciesFromSource))) {
|
|
300
|
-
_context.n = 17;
|
|
301
|
-
break;
|
|
189
|
+
if (dependency === url) {
|
|
190
|
+
throw new Error(`Invalid extraDependencies from loadSource: "${dependency}" is the same as the input URL. ` + `extraDependencies should not include the file being loaded.`);
|
|
302
191
|
}
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
192
|
+
}
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
// Check for new files when allFilesListed is enabled
|
|
196
|
+
if (allFilesListed && (extraFilesFromSource || extraDependenciesFromSource)) {
|
|
197
|
+
const newFiles = [];
|
|
198
|
+
if (extraFilesFromSource) {
|
|
199
|
+
// Check if any extraFiles keys are not in the known set
|
|
200
|
+
for (const extraFileKey of Object.keys(extraFilesFromSource)) {
|
|
201
|
+
if (!knownExtraFiles.has(extraFileKey)) {
|
|
202
|
+
newFiles.push(extraFileKey);
|
|
311
203
|
}
|
|
312
204
|
}
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
break;
|
|
321
|
-
}
|
|
322
|
-
console.warn(message);
|
|
323
|
-
_context.n = 17;
|
|
324
|
-
break;
|
|
325
|
-
case 16:
|
|
326
|
-
throw new Error(message);
|
|
327
|
-
case 17:
|
|
328
|
-
_context.n = 20;
|
|
329
|
-
break;
|
|
330
|
-
case 18:
|
|
331
|
-
_context.p = 18;
|
|
332
|
-
_t2 = _context.v;
|
|
333
|
-
if (!(_t2 instanceof Error && (_t2.message.startsWith('Invalid extraFiles from loadSource:') || _t2.message.startsWith('Invalid extraDependencies from loadSource:') || _t2.message.startsWith('Unexpected files discovered via loadSource when allFilesListed=true')))) {
|
|
334
|
-
_context.n = 19;
|
|
335
|
-
break;
|
|
336
|
-
}
|
|
337
|
-
throw _t2;
|
|
338
|
-
case 19:
|
|
339
|
-
throw new Error("Failed to load source code (variant: ".concat(variantName, ", file: ").concat(fileName, ", url: ").concat(url, "): ").concat(JSON.stringify(_t2)));
|
|
340
|
-
case 20:
|
|
341
|
-
// Apply source transformers if no transforms exist and transforms are not disabled
|
|
342
|
-
finalTransforms = transforms;
|
|
343
|
-
if (!(sourceTransformers && !finalTransforms && !disableTransforms && finalSource)) {
|
|
344
|
-
_context.n = 22;
|
|
345
|
-
break;
|
|
346
|
-
}
|
|
347
|
-
_context.n = 21;
|
|
348
|
-
return transformSource(finalSource, normalizePathKey(fileName), sourceTransformers);
|
|
349
|
-
case 21:
|
|
350
|
-
finalTransforms = _context.v;
|
|
351
|
-
currentMark = performanceMeasure(currentMark, {
|
|
352
|
-
mark: 'Transformed File',
|
|
353
|
-
measure: 'File Transforming'
|
|
354
|
-
}, [functionName, url || fileName]);
|
|
355
|
-
case 22:
|
|
356
|
-
if (!(typeof finalSource === 'string' && !disableParsing)) {
|
|
357
|
-
_context.n = 31;
|
|
358
|
-
break;
|
|
359
|
-
}
|
|
360
|
-
if (sourceParser) {
|
|
361
|
-
_context.n = 23;
|
|
362
|
-
break;
|
|
363
|
-
}
|
|
364
|
-
throw new Error('"sourceParser" function is required when source is a string and parsing is not disabled');
|
|
365
|
-
case 23:
|
|
366
|
-
_context.p = 23;
|
|
367
|
-
sourceString = finalSource;
|
|
368
|
-
_context.n = 24;
|
|
369
|
-
return sourceParser;
|
|
370
|
-
case 24:
|
|
371
|
-
parseSource = _context.v;
|
|
372
|
-
finalSource = parseSource(finalSource, fileName, language);
|
|
373
|
-
currentMark = performanceMeasure(currentMark, {
|
|
374
|
-
mark: 'Parsed File',
|
|
375
|
-
measure: 'File Parsing'
|
|
376
|
-
}, [functionName, url || fileName]);
|
|
377
|
-
if (!(finalTransforms && !disableTransforms)) {
|
|
378
|
-
_context.n = 26;
|
|
379
|
-
break;
|
|
380
|
-
}
|
|
381
|
-
_context.n = 25;
|
|
382
|
-
return diffHast(sourceString, finalSource, normalizePathKey(fileName), finalTransforms, parseSource);
|
|
383
|
-
case 25:
|
|
384
|
-
finalTransforms = _context.v;
|
|
385
|
-
currentMark = performanceMeasure(currentMark, {
|
|
386
|
-
mark: 'Transform Parsed File',
|
|
387
|
-
measure: 'Parsed File Transforming'
|
|
388
|
-
}, [functionName, url || fileName]);
|
|
389
|
-
case 26:
|
|
390
|
-
if (!(options.output === 'hastGzip' && process.env.NODE_ENV === 'production')) {
|
|
391
|
-
_context.n = 28;
|
|
392
|
-
break;
|
|
393
|
-
}
|
|
394
|
-
_t3 = encode;
|
|
395
|
-
_context.n = 27;
|
|
396
|
-
return compressAsync(strToU8(JSON.stringify(finalSource)), {
|
|
397
|
-
consume: true,
|
|
398
|
-
level: 9
|
|
399
|
-
});
|
|
400
|
-
case 27:
|
|
401
|
-
hastGzip = _t3(_context.v);
|
|
402
|
-
finalSource = {
|
|
403
|
-
hastGzip: hastGzip
|
|
404
|
-
};
|
|
405
|
-
currentMark = performanceMeasure(currentMark, {
|
|
406
|
-
mark: 'Compressed File',
|
|
407
|
-
measure: 'File Compression'
|
|
408
|
-
}, [functionName, url || fileName]);
|
|
409
|
-
_context.n = 29;
|
|
410
|
-
break;
|
|
411
|
-
case 28:
|
|
412
|
-
if (options.output === 'hastJson' || options.output === 'hastGzip') {
|
|
413
|
-
// in development, we skip compression but still convert to JSON
|
|
414
|
-
finalSource = {
|
|
415
|
-
hastJson: JSON.stringify(finalSource)
|
|
416
|
-
};
|
|
417
|
-
performanceMeasure(currentMark, {
|
|
418
|
-
mark: 'JSON Stringified File',
|
|
419
|
-
measure: 'File Stringification'
|
|
420
|
-
}, [functionName, url || fileName]);
|
|
205
|
+
}
|
|
206
|
+
if (newFiles.length > 0) {
|
|
207
|
+
const message = `Unexpected files discovered via loadSource when allFilesListed=true (variant: ${variantName}, file: ${fileName}). ` + `New files: ${newFiles.join(', ')}. ` + `Please update the loadVariantMeta function to provide the complete list of files upfront.`;
|
|
208
|
+
if (isProduction()) {
|
|
209
|
+
console.warn(message);
|
|
210
|
+
} else {
|
|
211
|
+
throw new Error(message);
|
|
421
212
|
}
|
|
422
|
-
|
|
423
|
-
_context.n = 31;
|
|
424
|
-
break;
|
|
425
|
-
case 30:
|
|
426
|
-
_context.p = 30;
|
|
427
|
-
_t4 = _context.v;
|
|
428
|
-
throw new Error("Failed to parse source code (variant: ".concat(variantName, ", file: ").concat(fileName, ", url: ").concat(url, "): ").concat(_t4 instanceof Error ? _t4.message : ''));
|
|
429
|
-
case 31:
|
|
430
|
-
return _context.a(2, {
|
|
431
|
-
source: finalSource,
|
|
432
|
-
transforms: finalTransforms,
|
|
433
|
-
extraFiles: extraFilesFromSource,
|
|
434
|
-
extraDependencies: extraDependenciesFromSource,
|
|
435
|
-
externals: externalsFromSource
|
|
436
|
-
});
|
|
213
|
+
}
|
|
437
214
|
}
|
|
438
|
-
}
|
|
439
|
-
|
|
440
|
-
|
|
215
|
+
} catch (error) {
|
|
216
|
+
// Re-throw validation errors without wrapping them
|
|
217
|
+
if (error instanceof Error && (error.message.startsWith('Invalid extraFiles from loadSource:') || error.message.startsWith('Invalid extraDependencies from loadSource:') || error.message.startsWith('Unexpected files discovered via loadSource when allFilesListed=true'))) {
|
|
218
|
+
throw error;
|
|
219
|
+
}
|
|
220
|
+
throw new Error(`Failed to load source code (variant: ${variantName}, file: ${fileName}, url: ${url}): ${JSON.stringify(error)}`);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
// Apply source transformers if no transforms exist and transforms are not disabled
|
|
225
|
+
let finalTransforms = transforms;
|
|
226
|
+
if (sourceTransformers && !finalTransforms && !disableTransforms && finalSource) {
|
|
227
|
+
finalTransforms = await transformSource(finalSource, normalizePathKey(fileName), sourceTransformers);
|
|
228
|
+
currentMark = performanceMeasure(currentMark, {
|
|
229
|
+
mark: 'Transformed File',
|
|
230
|
+
measure: 'File Transforming'
|
|
231
|
+
}, [functionName, url || fileName]);
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
// Parse source if it's a string and parsing is not disabled
|
|
235
|
+
if (typeof finalSource === 'string' && !disableParsing) {
|
|
236
|
+
if (!sourceParser) {
|
|
237
|
+
throw new Error('"sourceParser" function is required when source is a string and parsing is not disabled');
|
|
238
|
+
}
|
|
239
|
+
try {
|
|
240
|
+
const sourceString = finalSource;
|
|
241
|
+
const parseSource = await sourceParser;
|
|
242
|
+
finalSource = parseSource(finalSource, fileName, language);
|
|
243
|
+
currentMark = performanceMeasure(currentMark, {
|
|
244
|
+
mark: 'Parsed File',
|
|
245
|
+
measure: 'File Parsing'
|
|
246
|
+
}, [functionName, url || fileName]);
|
|
247
|
+
if (finalTransforms && !disableTransforms) {
|
|
248
|
+
finalTransforms = await diffHast(sourceString, finalSource, normalizePathKey(fileName), finalTransforms, parseSource);
|
|
249
|
+
currentMark = performanceMeasure(currentMark, {
|
|
250
|
+
mark: 'Transform Parsed File',
|
|
251
|
+
measure: 'Parsed File Transforming'
|
|
252
|
+
}, [functionName, url || fileName]);
|
|
253
|
+
}
|
|
254
|
+
if (options.output === 'hastGzip' && process.env.NODE_ENV === 'production') {
|
|
255
|
+
const hastGzip = encode(await compressAsync(strToU8(JSON.stringify(finalSource)), {
|
|
256
|
+
consume: true,
|
|
257
|
+
level: 9
|
|
258
|
+
}));
|
|
259
|
+
finalSource = {
|
|
260
|
+
hastGzip
|
|
261
|
+
};
|
|
262
|
+
currentMark = performanceMeasure(currentMark, {
|
|
263
|
+
mark: 'Compressed File',
|
|
264
|
+
measure: 'File Compression'
|
|
265
|
+
}, [functionName, url || fileName]);
|
|
266
|
+
} else if (options.output === 'hastJson' || options.output === 'hastGzip') {
|
|
267
|
+
// in development, we skip compression but still convert to JSON
|
|
268
|
+
finalSource = {
|
|
269
|
+
hastJson: JSON.stringify(finalSource)
|
|
270
|
+
};
|
|
271
|
+
performanceMeasure(currentMark, {
|
|
272
|
+
mark: 'JSON Stringified File',
|
|
273
|
+
measure: 'File Stringification'
|
|
274
|
+
}, [functionName, url || fileName]);
|
|
275
|
+
}
|
|
276
|
+
} catch (error) {
|
|
277
|
+
throw new Error(`Failed to parse source code (variant: ${variantName}, file: ${fileName}, url: ${url}): ${error instanceof Error ? error.message : ''}`);
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
return {
|
|
281
|
+
source: finalSource,
|
|
282
|
+
transforms: finalTransforms,
|
|
283
|
+
extraFiles: extraFilesFromSource,
|
|
284
|
+
extraDependencies: extraDependenciesFromSource,
|
|
285
|
+
externals: externalsFromSource
|
|
286
|
+
};
|
|
441
287
|
}
|
|
442
|
-
|
|
443
|
-
|
|
288
|
+
|
|
289
|
+
/**
|
|
290
|
+
* Loads and processes extra files recursively with support for relative paths
|
|
291
|
+
* and circular dependency detection. Uses Promise.all for parallel loading.
|
|
292
|
+
*/
|
|
293
|
+
async function loadExtraFiles(variantName, extraFiles, baseUrl, entryUrl,
|
|
294
|
+
// Track the original entry file URL
|
|
295
|
+
loadSource, sourceParser, sourceTransformers, loadSourceCache, options = {}, allFilesListed = false, knownExtraFiles = new Set(), globalsFileKeys = new Set() // Track which files came from globals
|
|
296
|
+
) {
|
|
297
|
+
const {
|
|
298
|
+
maxDepth = 10,
|
|
299
|
+
loadedFiles = new Set()
|
|
300
|
+
} = options;
|
|
301
|
+
if (maxDepth <= 0) {
|
|
302
|
+
throw new Error('Maximum recursion depth reached while loading extra files');
|
|
303
|
+
}
|
|
304
|
+
const processedExtraFiles = {};
|
|
305
|
+
const allFilesUsed = [];
|
|
306
|
+
const allExternals = {};
|
|
307
|
+
|
|
308
|
+
// Start loading all extra files in parallel
|
|
309
|
+
const extraFilePromises = Object.entries(extraFiles).map(async ([fileName, fileData]) => {
|
|
310
|
+
try {
|
|
311
|
+
let fileUrl;
|
|
312
|
+
let sourceData;
|
|
313
|
+
let transforms;
|
|
314
|
+
if (typeof fileData === 'string') {
|
|
315
|
+
// fileData is a URL/path - use it directly, don't modify it
|
|
316
|
+
fileUrl = fileData;
|
|
317
|
+
|
|
318
|
+
// Check for circular dependencies
|
|
319
|
+
if (loadedFiles.has(fileUrl)) {
|
|
320
|
+
throw new Error(`Circular dependency detected: ${fileUrl}`);
|
|
321
|
+
}
|
|
322
|
+
loadedFiles.add(fileUrl);
|
|
323
|
+
} else {
|
|
324
|
+
// fileData is an object with source and/or transforms
|
|
325
|
+
sourceData = fileData.source;
|
|
326
|
+
transforms = fileData.transforms;
|
|
327
|
+
fileUrl = baseUrl; // Use base URL as fallback
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
// Derive language from fileName for extra files
|
|
331
|
+
const extraFileExtension = fileName.slice(fileName.lastIndexOf('.'));
|
|
332
|
+
const extraFileLanguage = getLanguageFromExtension(extraFileExtension);
|
|
333
|
+
|
|
334
|
+
// Load the file (this will handle recursive extra files)
|
|
335
|
+
const fileResult = await loadSingleFile(variantName, fileName, sourceData, fileUrl, loadSource, sourceParser, sourceTransformers, loadSourceCache, transforms, {
|
|
336
|
+
...options,
|
|
337
|
+
maxDepth: maxDepth - 1,
|
|
338
|
+
loadedFiles: new Set(loadedFiles)
|
|
339
|
+
}, allFilesListed, knownExtraFiles, extraFileLanguage);
|
|
340
|
+
|
|
341
|
+
// Collect files used from this file load
|
|
342
|
+
const filesUsedFromFile = [];
|
|
343
|
+
if (typeof fileData === 'string') {
|
|
344
|
+
filesUsedFromFile.push(fileUrl);
|
|
345
|
+
}
|
|
346
|
+
if (fileResult.extraDependencies) {
|
|
347
|
+
filesUsedFromFile.push(...fileResult.extraDependencies);
|
|
348
|
+
}
|
|
349
|
+
|
|
350
|
+
// Collect externals from this file load
|
|
351
|
+
const externalsFromFile = {};
|
|
352
|
+
if (fileResult.externals) {
|
|
353
|
+
Object.assign(externalsFromFile, fileResult.externals);
|
|
354
|
+
}
|
|
355
|
+
return {
|
|
356
|
+
fileName,
|
|
357
|
+
result: fileResult,
|
|
358
|
+
filesUsed: filesUsedFromFile,
|
|
359
|
+
externals: externalsFromFile
|
|
360
|
+
};
|
|
361
|
+
} catch (error) {
|
|
362
|
+
throw new Error(`Failed to load extra file (variant: ${variantName}, file: ${fileName}, url: ${baseUrl}): ${error instanceof Error ? error.message : ''}`);
|
|
363
|
+
}
|
|
364
|
+
});
|
|
365
|
+
|
|
366
|
+
// Wait for all extra files to load
|
|
367
|
+
const extraFileResults = await Promise.all(extraFilePromises);
|
|
368
|
+
|
|
369
|
+
// Process results and handle nested extra files
|
|
370
|
+
const nestedExtraFilesPromises = [];
|
|
371
|
+
for (const {
|
|
372
|
+
fileName,
|
|
373
|
+
result,
|
|
374
|
+
filesUsed,
|
|
375
|
+
externals
|
|
376
|
+
} of extraFileResults) {
|
|
377
|
+
const normalizedFileName = normalizePathKey(fileName);
|
|
378
|
+
const originalFileData = extraFiles[fileName];
|
|
379
|
+
|
|
380
|
+
// Preserve metadata flag if it exists in the original data, or if this file came from globals
|
|
381
|
+
let metadata;
|
|
382
|
+
if (typeof originalFileData !== 'string') {
|
|
383
|
+
metadata = originalFileData.metadata;
|
|
384
|
+
} else if (globalsFileKeys.has(fileName)) {
|
|
385
|
+
metadata = true;
|
|
386
|
+
}
|
|
387
|
+
|
|
388
|
+
// Derive language from fileName extension for extra files
|
|
389
|
+
const extraFileExtension = normalizedFileName.slice(normalizedFileName.lastIndexOf('.'));
|
|
390
|
+
const extraFileLanguage = getLanguageFromExtension(extraFileExtension);
|
|
391
|
+
processedExtraFiles[normalizedFileName] = {
|
|
392
|
+
source: result.source,
|
|
393
|
+
...(extraFileLanguage && {
|
|
394
|
+
language: extraFileLanguage
|
|
395
|
+
}),
|
|
396
|
+
...(result.transforms && {
|
|
397
|
+
transforms: result.transforms
|
|
398
|
+
}),
|
|
399
|
+
...(metadata !== undefined && {
|
|
400
|
+
metadata
|
|
401
|
+
})
|
|
402
|
+
};
|
|
403
|
+
|
|
404
|
+
// Add files used from this file load
|
|
405
|
+
allFilesUsed.push(...filesUsed);
|
|
406
|
+
|
|
407
|
+
// Add externals from this file load using proper merging
|
|
408
|
+
const mergedExternals = mergeExternals([allExternals, externals]);
|
|
409
|
+
Object.assign(allExternals, mergedExternals);
|
|
410
|
+
|
|
411
|
+
// Collect promises for nested extra files with their source key
|
|
412
|
+
if (result.extraFiles) {
|
|
413
|
+
let sourceFileUrl = baseUrl;
|
|
414
|
+
const fileData = extraFiles[fileName];
|
|
415
|
+
if (typeof fileData === 'string') {
|
|
416
|
+
sourceFileUrl = fileData; // Use the URL directly, don't modify it
|
|
417
|
+
}
|
|
418
|
+
nestedExtraFilesPromises.push(loadExtraFiles(variantName, result.extraFiles, sourceFileUrl,
|
|
419
|
+
// Use the source file's URL as base for its extra files
|
|
420
|
+
entryUrl,
|
|
421
|
+
// Keep the entry URL for final conversion
|
|
422
|
+
loadSource, sourceParser, sourceTransformers, loadSourceCache, {
|
|
423
|
+
...options,
|
|
424
|
+
maxDepth: maxDepth - 1,
|
|
425
|
+
loadedFiles: new Set(loadedFiles)
|
|
426
|
+
}, allFilesListed, knownExtraFiles, globalsFileKeys // Pass through globals file tracking
|
|
427
|
+
).then(nestedResult => ({
|
|
428
|
+
files: nestedResult.extraFiles,
|
|
429
|
+
allFilesUsed: nestedResult.allFilesUsed,
|
|
430
|
+
allExternals: nestedResult.allExternals,
|
|
431
|
+
sourceFileKey: normalizedFileName // Pass the normalized key
|
|
432
|
+
})));
|
|
433
|
+
}
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
// Wait for all nested extra files and merge them, converting paths based on key structure
|
|
437
|
+
if (nestedExtraFilesPromises.length > 0) {
|
|
438
|
+
const nestedExtraFilesResults = await Promise.all(nestedExtraFilesPromises);
|
|
439
|
+
for (const {
|
|
440
|
+
files: nestedExtraFiles,
|
|
441
|
+
allFilesUsed: nestedFilesUsed,
|
|
442
|
+
allExternals: nestedExternals,
|
|
443
|
+
sourceFileKey
|
|
444
|
+
} of nestedExtraFilesResults) {
|
|
445
|
+
// Add nested files used
|
|
446
|
+
allFilesUsed.push(...nestedFilesUsed);
|
|
447
|
+
|
|
448
|
+
// Add nested externals using proper merging
|
|
449
|
+
const mergedNestedExternals = mergeExternals([allExternals, nestedExternals]);
|
|
450
|
+
Object.assign(allExternals, mergedNestedExternals);
|
|
451
|
+
for (const [nestedKey, nestedValue] of Object.entries(nestedExtraFiles)) {
|
|
452
|
+
// Convert the key based on the directory structure of the source key
|
|
453
|
+
const convertedKey = convertKeyBasedOnDirectory(nestedKey, sourceFileKey);
|
|
454
|
+
const normalizedConvertedKey = normalizePathKey(convertedKey);
|
|
455
|
+
processedExtraFiles[normalizedConvertedKey] = nestedValue;
|
|
456
|
+
}
|
|
457
|
+
}
|
|
458
|
+
}
|
|
459
|
+
return {
|
|
460
|
+
extraFiles: processedExtraFiles,
|
|
461
|
+
allFilesUsed,
|
|
462
|
+
allExternals
|
|
463
|
+
};
|
|
444
464
|
}
|
|
465
|
+
|
|
445
466
|
/**
|
|
446
467
|
* Loads a variant with support for recursive extra file loading.
|
|
447
468
|
* The loadSource function can now return extraFiles that will be loaded recursively.
|
|
448
469
|
* Supports both relative and absolute paths for extra files.
|
|
449
470
|
* Uses Promise.all for efficient parallel loading of extra files.
|
|
450
471
|
*/
|
|
451
|
-
function
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
sourceFileKey,
|
|
480
|
-
mergedNestedExternals,
|
|
481
|
-
_i3,
|
|
482
|
-
_Object$entries2,
|
|
483
|
-
_Object$entries2$_i,
|
|
484
|
-
nestedKey,
|
|
485
|
-
nestedValue,
|
|
486
|
-
convertedKey,
|
|
487
|
-
normalizedConvertedKey,
|
|
488
|
-
_args4 = arguments,
|
|
489
|
-
_t6;
|
|
490
|
-
return _regenerator().w(function (_context4) {
|
|
491
|
-
while (1) switch (_context4.p = _context4.n) {
|
|
492
|
-
case 0:
|
|
493
|
-
options = _args4.length > 8 && _args4[8] !== undefined ? _args4[8] : {};
|
|
494
|
-
allFilesListed = _args4.length > 9 && _args4[9] !== undefined ? _args4[9] : false;
|
|
495
|
-
knownExtraFiles = _args4.length > 10 && _args4[10] !== undefined ? _args4[10] : new Set();
|
|
496
|
-
globalsFileKeys = _args4.length > 11 && _args4[11] !== undefined ? _args4[11] : new Set();
|
|
497
|
-
_options$maxDepth = options.maxDepth, maxDepth = _options$maxDepth === void 0 ? 10 : _options$maxDepth, _options$loadedFiles = options.loadedFiles, loadedFiles = _options$loadedFiles === void 0 ? new Set() : _options$loadedFiles;
|
|
498
|
-
if (!(maxDepth <= 0)) {
|
|
499
|
-
_context4.n = 1;
|
|
500
|
-
break;
|
|
501
|
-
}
|
|
502
|
-
throw new Error('Maximum recursion depth reached while loading extra files');
|
|
503
|
-
case 1:
|
|
504
|
-
processedExtraFiles = {};
|
|
505
|
-
allFilesUsed = [];
|
|
506
|
-
allExternals = {}; // Start loading all extra files in parallel
|
|
507
|
-
extraFilePromises = Object.entries(extraFiles).map(/*#__PURE__*/function () {
|
|
508
|
-
var _ref2 = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee2(_ref) {
|
|
509
|
-
var _ref3, fileName, fileData, fileUrl, sourceData, transforms, extraFileExtension, extraFileLanguage, fileResult, filesUsedFromFile, externalsFromFile, _t5;
|
|
510
|
-
return _regenerator().w(function (_context2) {
|
|
511
|
-
while (1) switch (_context2.p = _context2.n) {
|
|
512
|
-
case 0:
|
|
513
|
-
_ref3 = _slicedToArray(_ref, 2), fileName = _ref3[0], fileData = _ref3[1];
|
|
514
|
-
_context2.p = 1;
|
|
515
|
-
if (!(typeof fileData === 'string')) {
|
|
516
|
-
_context2.n = 3;
|
|
517
|
-
break;
|
|
518
|
-
}
|
|
519
|
-
// fileData is a URL/path - use it directly, don't modify it
|
|
520
|
-
fileUrl = fileData;
|
|
521
|
-
|
|
522
|
-
// Check for circular dependencies
|
|
523
|
-
if (!loadedFiles.has(fileUrl)) {
|
|
524
|
-
_context2.n = 2;
|
|
525
|
-
break;
|
|
526
|
-
}
|
|
527
|
-
throw new Error("Circular dependency detected: ".concat(fileUrl));
|
|
528
|
-
case 2:
|
|
529
|
-
loadedFiles.add(fileUrl);
|
|
530
|
-
_context2.n = 4;
|
|
531
|
-
break;
|
|
532
|
-
case 3:
|
|
533
|
-
// fileData is an object with source and/or transforms
|
|
534
|
-
sourceData = fileData.source;
|
|
535
|
-
transforms = fileData.transforms;
|
|
536
|
-
fileUrl = baseUrl; // Use base URL as fallback
|
|
537
|
-
case 4:
|
|
538
|
-
// Derive language from fileName for extra files
|
|
539
|
-
extraFileExtension = fileName.slice(fileName.lastIndexOf('.'));
|
|
540
|
-
extraFileLanguage = getLanguageFromExtension(extraFileExtension); // Load the file (this will handle recursive extra files)
|
|
541
|
-
_context2.n = 5;
|
|
542
|
-
return loadSingleFile(variantName, fileName, sourceData, fileUrl, loadSource, sourceParser, sourceTransformers, loadSourceCache, transforms, _extends(_extends({}, options), {}, {
|
|
543
|
-
maxDepth: maxDepth - 1,
|
|
544
|
-
loadedFiles: new Set(loadedFiles)
|
|
545
|
-
}), allFilesListed, knownExtraFiles, extraFileLanguage);
|
|
546
|
-
case 5:
|
|
547
|
-
fileResult = _context2.v;
|
|
548
|
-
// Collect files used from this file load
|
|
549
|
-
filesUsedFromFile = [];
|
|
550
|
-
if (typeof fileData === 'string') {
|
|
551
|
-
filesUsedFromFile.push(fileUrl);
|
|
552
|
-
}
|
|
553
|
-
if (fileResult.extraDependencies) {
|
|
554
|
-
filesUsedFromFile.push.apply(filesUsedFromFile, _toConsumableArray(fileResult.extraDependencies));
|
|
555
|
-
}
|
|
556
|
-
|
|
557
|
-
// Collect externals from this file load
|
|
558
|
-
externalsFromFile = {};
|
|
559
|
-
if (fileResult.externals) {
|
|
560
|
-
Object.assign(externalsFromFile, fileResult.externals);
|
|
561
|
-
}
|
|
562
|
-
return _context2.a(2, {
|
|
563
|
-
fileName: fileName,
|
|
564
|
-
result: fileResult,
|
|
565
|
-
filesUsed: filesUsedFromFile,
|
|
566
|
-
externals: externalsFromFile
|
|
567
|
-
});
|
|
568
|
-
case 6:
|
|
569
|
-
_context2.p = 6;
|
|
570
|
-
_t5 = _context2.v;
|
|
571
|
-
throw new Error("Failed to load extra file (variant: ".concat(variantName, ", file: ").concat(fileName, ", url: ").concat(baseUrl, "): ").concat(_t5 instanceof Error ? _t5.message : ''));
|
|
572
|
-
case 7:
|
|
573
|
-
return _context2.a(2);
|
|
574
|
-
}
|
|
575
|
-
}, _callee2, null, [[1, 6]]);
|
|
576
|
-
}));
|
|
577
|
-
return function (_x19) {
|
|
578
|
-
return _ref2.apply(this, arguments);
|
|
579
|
-
};
|
|
580
|
-
}()); // Wait for all extra files to load
|
|
581
|
-
_context4.n = 2;
|
|
582
|
-
return Promise.all(extraFilePromises);
|
|
583
|
-
case 2:
|
|
584
|
-
extraFileResults = _context4.v;
|
|
585
|
-
// Process results and handle nested extra files
|
|
586
|
-
nestedExtraFilesPromises = [];
|
|
587
|
-
_iterator2 = _createForOfIteratorHelper(extraFileResults);
|
|
588
|
-
_context4.p = 3;
|
|
589
|
-
_loop = /*#__PURE__*/_regenerator().m(function _loop() {
|
|
590
|
-
var _step2$value, fileName, result, filesUsed, externals, normalizedFileName, originalFileData, metadata, extraFileExtension, extraFileLanguage, mergedExternals, sourceFileUrl, fileData;
|
|
591
|
-
return _regenerator().w(function (_context3) {
|
|
592
|
-
while (1) switch (_context3.n) {
|
|
593
|
-
case 0:
|
|
594
|
-
_step2$value = _step2.value, fileName = _step2$value.fileName, result = _step2$value.result, filesUsed = _step2$value.filesUsed, externals = _step2$value.externals;
|
|
595
|
-
normalizedFileName = normalizePathKey(fileName);
|
|
596
|
-
originalFileData = extraFiles[fileName]; // Preserve metadata flag if it exists in the original data, or if this file came from globals
|
|
597
|
-
if (typeof originalFileData !== 'string') {
|
|
598
|
-
metadata = originalFileData.metadata;
|
|
599
|
-
} else if (globalsFileKeys.has(fileName)) {
|
|
600
|
-
metadata = true;
|
|
601
|
-
}
|
|
602
|
-
|
|
603
|
-
// Derive language from fileName extension for extra files
|
|
604
|
-
extraFileExtension = normalizedFileName.slice(normalizedFileName.lastIndexOf('.'));
|
|
605
|
-
extraFileLanguage = getLanguageFromExtension(extraFileExtension);
|
|
606
|
-
processedExtraFiles[normalizedFileName] = _extends(_extends(_extends({
|
|
607
|
-
source: result.source
|
|
608
|
-
}, extraFileLanguage && {
|
|
609
|
-
language: extraFileLanguage
|
|
610
|
-
}), result.transforms && {
|
|
611
|
-
transforms: result.transforms
|
|
612
|
-
}), metadata !== undefined && {
|
|
613
|
-
metadata: metadata
|
|
614
|
-
});
|
|
615
|
-
|
|
616
|
-
// Add files used from this file load
|
|
617
|
-
allFilesUsed.push.apply(allFilesUsed, _toConsumableArray(filesUsed));
|
|
618
|
-
|
|
619
|
-
// Add externals from this file load using proper merging
|
|
620
|
-
mergedExternals = mergeExternals([allExternals, externals]);
|
|
621
|
-
Object.assign(allExternals, mergedExternals);
|
|
622
|
-
|
|
623
|
-
// Collect promises for nested extra files with their source key
|
|
624
|
-
if (result.extraFiles) {
|
|
625
|
-
sourceFileUrl = baseUrl;
|
|
626
|
-
fileData = extraFiles[fileName];
|
|
627
|
-
if (typeof fileData === 'string') {
|
|
628
|
-
sourceFileUrl = fileData; // Use the URL directly, don't modify it
|
|
629
|
-
}
|
|
630
|
-
nestedExtraFilesPromises.push(loadExtraFiles(variantName, result.extraFiles, sourceFileUrl,
|
|
631
|
-
// Use the source file's URL as base for its extra files
|
|
632
|
-
entryUrl,
|
|
633
|
-
// Keep the entry URL for final conversion
|
|
634
|
-
loadSource, sourceParser, sourceTransformers, loadSourceCache, _extends(_extends({}, options), {}, {
|
|
635
|
-
maxDepth: maxDepth - 1,
|
|
636
|
-
loadedFiles: new Set(loadedFiles)
|
|
637
|
-
}), allFilesListed, knownExtraFiles, globalsFileKeys // Pass through globals file tracking
|
|
638
|
-
).then(function (nestedResult) {
|
|
639
|
-
return {
|
|
640
|
-
files: nestedResult.extraFiles,
|
|
641
|
-
allFilesUsed: nestedResult.allFilesUsed,
|
|
642
|
-
allExternals: nestedResult.allExternals,
|
|
643
|
-
sourceFileKey: normalizedFileName // Pass the normalized key
|
|
644
|
-
};
|
|
645
|
-
}));
|
|
646
|
-
}
|
|
647
|
-
case 1:
|
|
648
|
-
return _context3.a(2);
|
|
649
|
-
}
|
|
650
|
-
}, _loop);
|
|
651
|
-
});
|
|
652
|
-
_iterator2.s();
|
|
653
|
-
case 4:
|
|
654
|
-
if ((_step2 = _iterator2.n()).done) {
|
|
655
|
-
_context4.n = 6;
|
|
656
|
-
break;
|
|
657
|
-
}
|
|
658
|
-
return _context4.d(_regeneratorValues(_loop()), 5);
|
|
659
|
-
case 5:
|
|
660
|
-
_context4.n = 4;
|
|
661
|
-
break;
|
|
662
|
-
case 6:
|
|
663
|
-
_context4.n = 8;
|
|
664
|
-
break;
|
|
665
|
-
case 7:
|
|
666
|
-
_context4.p = 7;
|
|
667
|
-
_t6 = _context4.v;
|
|
668
|
-
_iterator2.e(_t6);
|
|
669
|
-
case 8:
|
|
670
|
-
_context4.p = 8;
|
|
671
|
-
_iterator2.f();
|
|
672
|
-
return _context4.f(8);
|
|
673
|
-
case 9:
|
|
674
|
-
if (!(nestedExtraFilesPromises.length > 0)) {
|
|
675
|
-
_context4.n = 11;
|
|
676
|
-
break;
|
|
677
|
-
}
|
|
678
|
-
_context4.n = 10;
|
|
679
|
-
return Promise.all(nestedExtraFilesPromises);
|
|
680
|
-
case 10:
|
|
681
|
-
nestedExtraFilesResults = _context4.v;
|
|
682
|
-
_iterator3 = _createForOfIteratorHelper(nestedExtraFilesResults);
|
|
683
|
-
try {
|
|
684
|
-
for (_iterator3.s(); !(_step3 = _iterator3.n()).done;) {
|
|
685
|
-
_step3$value = _step3.value, nestedExtraFiles = _step3$value.files, nestedFilesUsed = _step3$value.allFilesUsed, nestedExternals = _step3$value.allExternals, sourceFileKey = _step3$value.sourceFileKey;
|
|
686
|
-
// Add nested files used
|
|
687
|
-
allFilesUsed.push.apply(allFilesUsed, _toConsumableArray(nestedFilesUsed));
|
|
688
|
-
|
|
689
|
-
// Add nested externals using proper merging
|
|
690
|
-
mergedNestedExternals = mergeExternals([allExternals, nestedExternals]);
|
|
691
|
-
Object.assign(allExternals, mergedNestedExternals);
|
|
692
|
-
for (_i3 = 0, _Object$entries2 = Object.entries(nestedExtraFiles); _i3 < _Object$entries2.length; _i3++) {
|
|
693
|
-
_Object$entries2$_i = _slicedToArray(_Object$entries2[_i3], 2), nestedKey = _Object$entries2$_i[0], nestedValue = _Object$entries2$_i[1];
|
|
694
|
-
// Convert the key based on the directory structure of the source key
|
|
695
|
-
convertedKey = convertKeyBasedOnDirectory(nestedKey, sourceFileKey);
|
|
696
|
-
normalizedConvertedKey = normalizePathKey(convertedKey);
|
|
697
|
-
processedExtraFiles[normalizedConvertedKey] = nestedValue;
|
|
698
|
-
}
|
|
699
|
-
}
|
|
700
|
-
} catch (err) {
|
|
701
|
-
_iterator3.e(err);
|
|
702
|
-
} finally {
|
|
703
|
-
_iterator3.f();
|
|
704
|
-
}
|
|
705
|
-
case 11:
|
|
706
|
-
return _context4.a(2, {
|
|
707
|
-
extraFiles: processedExtraFiles,
|
|
708
|
-
allFilesUsed: allFilesUsed,
|
|
709
|
-
allExternals: allExternals
|
|
710
|
-
});
|
|
472
|
+
export async function loadCodeVariant(url, variantName, variant, options = {}) {
|
|
473
|
+
if (!variant) {
|
|
474
|
+
throw new Error(`Variant is missing from code: ${variantName}`);
|
|
475
|
+
}
|
|
476
|
+
const {
|
|
477
|
+
sourceParser,
|
|
478
|
+
loadSource,
|
|
479
|
+
loadVariantMeta,
|
|
480
|
+
sourceTransformers,
|
|
481
|
+
globalsCode,
|
|
482
|
+
disableParsing
|
|
483
|
+
} = options;
|
|
484
|
+
|
|
485
|
+
// Create a cache for loadSource calls scoped to this loadCodeVariant call
|
|
486
|
+
const loadSourceCache = new Map();
|
|
487
|
+
const functionName = 'Load Variant';
|
|
488
|
+
let currentMark = performanceMeasure(undefined, {
|
|
489
|
+
mark: 'Start',
|
|
490
|
+
measure: 'Start'
|
|
491
|
+
}, [functionName, url || variantName], true);
|
|
492
|
+
if (typeof variant === 'string') {
|
|
493
|
+
if (!loadVariantMeta) {
|
|
494
|
+
// Create a basic loadVariantMeta function as fallback
|
|
495
|
+
const {
|
|
496
|
+
fileName
|
|
497
|
+
} = getFileNameFromUrl(variant);
|
|
498
|
+
if (!fileName) {
|
|
499
|
+
throw new Error(`Cannot determine fileName from URL "${variant}" for variant "${variantName}". ` + `Please provide a loadVariantMeta function or ensure the URL has a valid file extension.`);
|
|
711
500
|
}
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
}
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
extraFileName
|
|
740
|
-
|
|
501
|
+
variant = {
|
|
502
|
+
url: variant,
|
|
503
|
+
fileName
|
|
504
|
+
};
|
|
505
|
+
} else {
|
|
506
|
+
try {
|
|
507
|
+
variant = await loadVariantMeta(variantName, variant);
|
|
508
|
+
} catch (error) {
|
|
509
|
+
throw new Error(`Failed to load variant code (variant: ${variantName}, url: ${variant}): ${JSON.stringify(error)}`);
|
|
510
|
+
}
|
|
511
|
+
currentMark = performanceMeasure(currentMark, {
|
|
512
|
+
mark: 'Loaded Variant Meta',
|
|
513
|
+
measure: 'Variant Meta Loading'
|
|
514
|
+
}, [functionName, url || variantName]);
|
|
515
|
+
}
|
|
516
|
+
}
|
|
517
|
+
const loadedFiles = new Set();
|
|
518
|
+
if (url) {
|
|
519
|
+
loadedFiles.add(url);
|
|
520
|
+
}
|
|
521
|
+
const allFilesUsed = url ? [url] : []; // Start with the main file URL if available
|
|
522
|
+
let allExternals = {}; // Collect externals from all sources
|
|
523
|
+
|
|
524
|
+
// Build set of known extra files from variant definition
|
|
525
|
+
const knownExtraFiles = new Set();
|
|
526
|
+
if (variant.extraFiles) {
|
|
527
|
+
for (const extraFileName of Object.keys(variant.extraFiles)) {
|
|
528
|
+
knownExtraFiles.add(extraFileName);
|
|
529
|
+
}
|
|
530
|
+
}
|
|
531
|
+
|
|
532
|
+
// Load main file
|
|
533
|
+
const fileName = variant.fileName || (url ? getFileNameFromUrl(url).fileName : undefined);
|
|
534
|
+
|
|
535
|
+
// Derive language from variant.language or from fileName extension
|
|
536
|
+
// Normalize the language to its canonical form (e.g., 'js' -> 'javascript')
|
|
537
|
+
let language = variant.language ? normalizeLanguage(variant.language) : undefined;
|
|
538
|
+
if (!language && fileName) {
|
|
539
|
+
const extension = fileName.slice(fileName.lastIndexOf('.'));
|
|
540
|
+
language = getLanguageFromExtension(extension);
|
|
541
|
+
}
|
|
542
|
+
|
|
543
|
+
// If we don't have a fileName and no URL, we can still parse if we have language
|
|
544
|
+
if (!fileName && !url) {
|
|
545
|
+
let finalSource = variant.source;
|
|
546
|
+
|
|
547
|
+
// Parse the source if we have language and sourceParser
|
|
548
|
+
if (typeof finalSource === 'string' && language && sourceParser && !disableParsing) {
|
|
549
|
+
const parseSource = await sourceParser;
|
|
550
|
+
finalSource = parseSource(finalSource, '', language);
|
|
551
|
+
} else if (typeof finalSource === 'string') {
|
|
552
|
+
// No language or parser - return as plain text
|
|
553
|
+
finalSource = {
|
|
554
|
+
type: 'root',
|
|
555
|
+
children: [{
|
|
556
|
+
type: 'text',
|
|
557
|
+
value: finalSource || ''
|
|
558
|
+
}]
|
|
559
|
+
};
|
|
560
|
+
}
|
|
561
|
+
const finalVariant = {
|
|
562
|
+
...variant,
|
|
741
563
|
language,
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
}
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
url: variant,
|
|
829
|
-
fileName: _fileName
|
|
564
|
+
source: finalSource
|
|
565
|
+
};
|
|
566
|
+
return {
|
|
567
|
+
code: finalVariant,
|
|
568
|
+
dependencies: [],
|
|
569
|
+
// No dependencies without URL
|
|
570
|
+
externals: {} // No externals without URL
|
|
571
|
+
};
|
|
572
|
+
}
|
|
573
|
+
if (!fileName) {
|
|
574
|
+
throw new Error(`No fileName available for variant "${variantName}". ` + `Please provide a fileName in the variant definition or ensure the URL has a valid file extension.`);
|
|
575
|
+
}
|
|
576
|
+
const mainFileResult = await loadSingleFile(variantName, fileName, variant.source, url, loadSource, sourceParser, sourceTransformers, loadSourceCache, variant.transforms, {
|
|
577
|
+
...options,
|
|
578
|
+
loadedFiles
|
|
579
|
+
}, variant.allFilesListed || false, knownExtraFiles, language);
|
|
580
|
+
|
|
581
|
+
// Add files used from main file loading
|
|
582
|
+
if (mainFileResult.extraDependencies) {
|
|
583
|
+
allFilesUsed.push(...mainFileResult.extraDependencies);
|
|
584
|
+
}
|
|
585
|
+
currentMark = performanceMeasure(currentMark, {
|
|
586
|
+
mark: 'Loaded Main File',
|
|
587
|
+
measure: 'Main File Loading'
|
|
588
|
+
}, [functionName, url || fileName], true);
|
|
589
|
+
|
|
590
|
+
// Validate extraFiles keys from variant definition
|
|
591
|
+
if (variant.extraFiles) {
|
|
592
|
+
for (const extraFileName of Object.keys(variant.extraFiles)) {
|
|
593
|
+
// Check if key is an absolute URL (should be relative)
|
|
594
|
+
if (isAbsolutePath(extraFileName)) {
|
|
595
|
+
throw new Error(`Invalid extraFiles key in variant: "${extraFileName}" appears to be an absolute path. ` + `extraFiles keys in variant definition should be relative paths from the main file.`);
|
|
596
|
+
}
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
// Collect extra files from variant definition and from loaded source
|
|
601
|
+
const extraFilesToLoad = {
|
|
602
|
+
...(variant.extraFiles || {}),
|
|
603
|
+
...(mainFileResult.extraFiles || {})
|
|
604
|
+
};
|
|
605
|
+
|
|
606
|
+
// Add externals from main file loading
|
|
607
|
+
if (mainFileResult.externals) {
|
|
608
|
+
allExternals = mergeExternals([allExternals, mainFileResult.externals]);
|
|
609
|
+
}
|
|
610
|
+
const externalsMergedMark = performanceMeasure(currentMark, {
|
|
611
|
+
mark: 'Externals Merged',
|
|
612
|
+
measure: 'Merging Externals'
|
|
613
|
+
}, [functionName, url || fileName]);
|
|
614
|
+
currentMark = externalsMergedMark;
|
|
615
|
+
|
|
616
|
+
// Track which files come from globals for metadata marking
|
|
617
|
+
const globalsFileKeys = new Set(); // Track globals file keys for loadExtraFiles
|
|
618
|
+
|
|
619
|
+
// Process globalsCode array and add to extraFiles if provided
|
|
620
|
+
if (globalsCode && globalsCode.length > 0) {
|
|
621
|
+
// Collect existing filenames to avoid conflicts
|
|
622
|
+
const existingFiles = new Set();
|
|
623
|
+
|
|
624
|
+
// Add main variant filename if it exists
|
|
625
|
+
if (variant.fileName) {
|
|
626
|
+
existingFiles.add(variant.fileName);
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
// Add already loaded extra files
|
|
630
|
+
for (const key of Object.keys(extraFilesToLoad)) {
|
|
631
|
+
existingFiles.add(key);
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
// Process all globals items in parallel
|
|
635
|
+
const globalsPromises = globalsCode.map(async globalsItem => {
|
|
636
|
+
let globalsVariant;
|
|
637
|
+
if (typeof globalsItem === 'string') {
|
|
638
|
+
// Handle string case - load the variant metadata
|
|
639
|
+
if (!loadVariantMeta) {
|
|
640
|
+
// Create a basic variant as fallback
|
|
641
|
+
const {
|
|
642
|
+
fileName: globalsFileName
|
|
643
|
+
} = getFileNameFromUrl(globalsItem);
|
|
644
|
+
if (!globalsFileName) {
|
|
645
|
+
throw new Error(`Cannot determine fileName from globalsCode URL "${globalsItem}". ` + `Please provide a loadVariantMeta function or ensure the URL has a valid file extension.`);
|
|
646
|
+
}
|
|
647
|
+
globalsVariant = {
|
|
648
|
+
url: globalsItem,
|
|
649
|
+
fileName: globalsFileName
|
|
830
650
|
};
|
|
831
|
-
|
|
832
|
-
|
|
833
|
-
|
|
834
|
-
|
|
835
|
-
|
|
836
|
-
|
|
837
|
-
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
case 6:
|
|
846
|
-
currentMark = performanceMeasure(currentMark, {
|
|
847
|
-
mark: 'Loaded Variant Meta',
|
|
848
|
-
measure: 'Variant Meta Loading'
|
|
849
|
-
}, [functionName, url || variantName]);
|
|
850
|
-
case 7:
|
|
851
|
-
loadedFiles = new Set();
|
|
852
|
-
if (url) {
|
|
853
|
-
loadedFiles.add(url);
|
|
854
|
-
}
|
|
855
|
-
allFilesUsed = url ? [url] : []; // Start with the main file URL if available
|
|
856
|
-
allExternals = {}; // Collect externals from all sources
|
|
857
|
-
// Build set of known extra files from variant definition
|
|
858
|
-
knownExtraFiles = new Set();
|
|
859
|
-
if (variant.extraFiles) {
|
|
860
|
-
for (_i4 = 0, _Object$keys2 = Object.keys(variant.extraFiles); _i4 < _Object$keys2.length; _i4++) {
|
|
861
|
-
extraFileName = _Object$keys2[_i4];
|
|
862
|
-
knownExtraFiles.add(extraFileName);
|
|
863
|
-
}
|
|
864
|
-
}
|
|
651
|
+
} else {
|
|
652
|
+
try {
|
|
653
|
+
globalsVariant = await loadVariantMeta(variantName, globalsItem);
|
|
654
|
+
currentMark = performanceMeasure(currentMark, {
|
|
655
|
+
mark: 'Globals Variant Meta Loaded',
|
|
656
|
+
measure: 'Globals Variant Meta Loading'
|
|
657
|
+
}, [functionName, globalsItem, url || fileName]);
|
|
658
|
+
} catch (error) {
|
|
659
|
+
throw new Error(`Failed to load globalsCode variant metadata (variant: ${variantName}, url: ${globalsItem}): ${JSON.stringify(error)}`);
|
|
660
|
+
}
|
|
661
|
+
}
|
|
662
|
+
} else {
|
|
663
|
+
globalsVariant = globalsItem;
|
|
664
|
+
}
|
|
865
665
|
|
|
866
|
-
|
|
867
|
-
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
666
|
+
// Load the globals code separately without affecting allFilesListed
|
|
667
|
+
try {
|
|
668
|
+
const globalsResult = await loadCodeVariant(globalsVariant.url, variantName, globalsVariant, {
|
|
669
|
+
...options,
|
|
670
|
+
globalsCode: undefined
|
|
671
|
+
} // Prevent infinite recursion
|
|
672
|
+
);
|
|
673
|
+
currentMark = performanceMeasure(currentMark, {
|
|
674
|
+
mark: 'Globals Variant Loaded',
|
|
675
|
+
measure: 'Globals Variant Loading'
|
|
676
|
+
}, [functionName, globalsVariant.url || variantName, url || fileName]);
|
|
677
|
+
return globalsResult;
|
|
678
|
+
} catch (error) {
|
|
679
|
+
throw new Error(`Failed to load globalsCode (variant: ${variantName}): ${error instanceof Error ? error.message : JSON.stringify(error)}`);
|
|
680
|
+
}
|
|
681
|
+
});
|
|
874
682
|
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
|
|
881
|
-
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
type: 'root',
|
|
897
|
-
children: [{
|
|
898
|
-
type: 'text',
|
|
899
|
-
value: finalSource || ''
|
|
900
|
-
}]
|
|
683
|
+
// Wait for all globals to load
|
|
684
|
+
const globalsResults = await Promise.all(globalsPromises);
|
|
685
|
+
|
|
686
|
+
// Merge results from all globals
|
|
687
|
+
for (const globalsResult of globalsResults) {
|
|
688
|
+
// Add globals extraFiles (but NOT the main file)
|
|
689
|
+
if (globalsResult.code.extraFiles) {
|
|
690
|
+
// Add globals extra files with conflict-free naming and metadata flag
|
|
691
|
+
for (const [key, value] of Object.entries(globalsResult.code.extraFiles)) {
|
|
692
|
+
const conflictFreeKey = generateConflictFreeFilename(key, existingFiles);
|
|
693
|
+
|
|
694
|
+
// Always add metadata: true flag for globals files
|
|
695
|
+
if (typeof value === 'string') {
|
|
696
|
+
// For string URLs, we can't easily wrap them but need to track for later metadata addition
|
|
697
|
+
extraFilesToLoad[conflictFreeKey] = value;
|
|
698
|
+
globalsFileKeys.add(conflictFreeKey); // Track for loadExtraFiles
|
|
699
|
+
} else {
|
|
700
|
+
// For object values, add metadata directly
|
|
701
|
+
extraFilesToLoad[conflictFreeKey] = {
|
|
702
|
+
...value,
|
|
703
|
+
metadata: true
|
|
901
704
|
};
|
|
902
705
|
}
|
|
903
|
-
|
|
904
|
-
|
|
905
|
-
|
|
906
|
-
source: finalSource
|
|
907
|
-
});
|
|
908
|
-
return _context6.a(2, {
|
|
909
|
-
code: _finalVariant,
|
|
910
|
-
dependencies: [],
|
|
911
|
-
// No dependencies without URL
|
|
912
|
-
externals: {} // No externals without URL
|
|
913
|
-
});
|
|
914
|
-
case 11:
|
|
915
|
-
if (fileName) {
|
|
916
|
-
_context6.n = 12;
|
|
917
|
-
break;
|
|
918
|
-
}
|
|
919
|
-
throw new Error("No fileName available for variant \"".concat(variantName, "\". ") + "Please provide a fileName in the variant definition or ensure the URL has a valid file extension.");
|
|
920
|
-
case 12:
|
|
921
|
-
_context6.n = 13;
|
|
922
|
-
return loadSingleFile(variantName, fileName, variant.source, url, loadSource, sourceParser, sourceTransformers, loadSourceCache, variant.transforms, _extends(_extends({}, options), {}, {
|
|
923
|
-
loadedFiles: loadedFiles
|
|
924
|
-
}), variant.allFilesListed || false, knownExtraFiles, language);
|
|
925
|
-
case 13:
|
|
926
|
-
mainFileResult = _context6.v;
|
|
927
|
-
// Add files used from main file loading
|
|
928
|
-
if (mainFileResult.extraDependencies) {
|
|
929
|
-
allFilesUsed.push.apply(allFilesUsed, _toConsumableArray(mainFileResult.extraDependencies));
|
|
930
|
-
}
|
|
931
|
-
currentMark = performanceMeasure(currentMark, {
|
|
932
|
-
mark: 'Loaded Main File',
|
|
933
|
-
measure: 'Main File Loading'
|
|
934
|
-
}, [functionName, url || fileName], true);
|
|
935
|
-
|
|
936
|
-
// Validate extraFiles keys from variant definition
|
|
937
|
-
if (!variant.extraFiles) {
|
|
938
|
-
_context6.n = 16;
|
|
939
|
-
break;
|
|
940
|
-
}
|
|
941
|
-
_i5 = 0, _Object$keys3 = Object.keys(variant.extraFiles);
|
|
942
|
-
case 14:
|
|
943
|
-
if (!(_i5 < _Object$keys3.length)) {
|
|
944
|
-
_context6.n = 16;
|
|
945
|
-
break;
|
|
946
|
-
}
|
|
947
|
-
_extraFileName = _Object$keys3[_i5];
|
|
948
|
-
if (!isAbsolutePath(_extraFileName)) {
|
|
949
|
-
_context6.n = 15;
|
|
950
|
-
break;
|
|
951
|
-
}
|
|
952
|
-
throw new Error("Invalid extraFiles key in variant: \"".concat(_extraFileName, "\" appears to be an absolute path. ") + "extraFiles keys in variant definition should be relative paths from the main file.");
|
|
953
|
-
case 15:
|
|
954
|
-
_i5++;
|
|
955
|
-
_context6.n = 14;
|
|
956
|
-
break;
|
|
957
|
-
case 16:
|
|
958
|
-
// Collect extra files from variant definition and from loaded source
|
|
959
|
-
extraFilesToLoad = _extends(_extends({}, variant.extraFiles || {}), mainFileResult.extraFiles || {}); // Add externals from main file loading
|
|
960
|
-
if (mainFileResult.externals) {
|
|
961
|
-
allExternals = mergeExternals([allExternals, mainFileResult.externals]);
|
|
962
|
-
}
|
|
963
|
-
externalsMergedMark = performanceMeasure(currentMark, {
|
|
964
|
-
mark: 'Externals Merged',
|
|
965
|
-
measure: 'Merging Externals'
|
|
966
|
-
}, [functionName, url || fileName]);
|
|
967
|
-
currentMark = externalsMergedMark;
|
|
968
|
-
|
|
969
|
-
// Track which files come from globals for metadata marking
|
|
970
|
-
globalsFileKeys = new Set(); // Track globals file keys for loadExtraFiles
|
|
971
|
-
// Process globalsCode array and add to extraFiles if provided
|
|
972
|
-
if (!(globalsCode && globalsCode.length > 0)) {
|
|
973
|
-
_context6.n = 18;
|
|
974
|
-
break;
|
|
975
|
-
}
|
|
976
|
-
// Collect existing filenames to avoid conflicts
|
|
977
|
-
existingFiles = new Set(); // Add main variant filename if it exists
|
|
978
|
-
if (variant.fileName) {
|
|
979
|
-
existingFiles.add(variant.fileName);
|
|
980
|
-
}
|
|
706
|
+
existingFiles.add(conflictFreeKey); // Track the added file for subsequent iterations
|
|
707
|
+
}
|
|
708
|
+
}
|
|
981
709
|
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
key = _Object$keys4[_i6];
|
|
985
|
-
existingFiles.add(key);
|
|
986
|
-
}
|
|
710
|
+
// Add globals dependencies
|
|
711
|
+
allFilesUsed.push(...globalsResult.dependencies);
|
|
987
712
|
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
}, [functionName, globalsItem, url || fileName]);
|
|
1027
|
-
_context5.n = 5;
|
|
1028
|
-
break;
|
|
1029
|
-
case 4:
|
|
1030
|
-
_context5.p = 4;
|
|
1031
|
-
_t7 = _context5.v;
|
|
1032
|
-
throw new Error("Failed to load globalsCode variant metadata (variant: ".concat(variantName, ", url: ").concat(globalsItem, "): ").concat(JSON.stringify(_t7)));
|
|
1033
|
-
case 5:
|
|
1034
|
-
_context5.n = 7;
|
|
1035
|
-
break;
|
|
1036
|
-
case 6:
|
|
1037
|
-
globalsVariant = globalsItem;
|
|
1038
|
-
case 7:
|
|
1039
|
-
_context5.p = 7;
|
|
1040
|
-
_context5.n = 8;
|
|
1041
|
-
return loadCodeVariant(globalsVariant.url, variantName, globalsVariant, _extends(_extends({}, options), {}, {
|
|
1042
|
-
globalsCode: undefined
|
|
1043
|
-
}) // Prevent infinite recursion
|
|
1044
|
-
);
|
|
1045
|
-
case 8:
|
|
1046
|
-
globalsResult = _context5.v;
|
|
1047
|
-
currentMark = performanceMeasure(currentMark, {
|
|
1048
|
-
mark: 'Globals Variant Loaded',
|
|
1049
|
-
measure: 'Globals Variant Loading'
|
|
1050
|
-
}, [functionName, globalsVariant.url || variantName, url || fileName]);
|
|
1051
|
-
return _context5.a(2, globalsResult);
|
|
1052
|
-
case 9:
|
|
1053
|
-
_context5.p = 9;
|
|
1054
|
-
_t8 = _context5.v;
|
|
1055
|
-
throw new Error("Failed to load globalsCode (variant: ".concat(variantName, "): ").concat(_t8 instanceof Error ? _t8.message : JSON.stringify(_t8)));
|
|
1056
|
-
case 10:
|
|
1057
|
-
return _context5.a(2);
|
|
1058
|
-
}
|
|
1059
|
-
}, _callee4, null, [[7, 9], [2, 4]]);
|
|
1060
|
-
}));
|
|
1061
|
-
return function (_x20) {
|
|
1062
|
-
return _ref4.apply(this, arguments);
|
|
1063
|
-
};
|
|
1064
|
-
}()); // Wait for all globals to load
|
|
1065
|
-
_context6.n = 17;
|
|
1066
|
-
return Promise.all(globalsPromises);
|
|
1067
|
-
case 17:
|
|
1068
|
-
globalsResults = _context6.v;
|
|
1069
|
-
// Merge results from all globals
|
|
1070
|
-
_iterator4 = _createForOfIteratorHelper(globalsResults);
|
|
1071
|
-
try {
|
|
1072
|
-
for (_iterator4.s(); !(_step4 = _iterator4.n()).done;) {
|
|
1073
|
-
globalsResult = _step4.value;
|
|
1074
|
-
// Add globals extraFiles (but NOT the main file)
|
|
1075
|
-
if (globalsResult.code.extraFiles) {
|
|
1076
|
-
// Add globals extra files with conflict-free naming and metadata flag
|
|
1077
|
-
for (_i7 = 0, _Object$entries3 = Object.entries(globalsResult.code.extraFiles); _i7 < _Object$entries3.length; _i7++) {
|
|
1078
|
-
_Object$entries3$_i = _slicedToArray(_Object$entries3[_i7], 2), _key = _Object$entries3$_i[0], value = _Object$entries3$_i[1];
|
|
1079
|
-
conflictFreeKey = generateConflictFreeFilename(_key, existingFiles); // Always add metadata: true flag for globals files
|
|
1080
|
-
if (typeof value === 'string') {
|
|
1081
|
-
// For string URLs, we can't easily wrap them but need to track for later metadata addition
|
|
1082
|
-
extraFilesToLoad[conflictFreeKey] = value;
|
|
1083
|
-
globalsFileKeys.add(conflictFreeKey); // Track for loadExtraFiles
|
|
1084
|
-
} else {
|
|
1085
|
-
// For object values, add metadata directly
|
|
1086
|
-
extraFilesToLoad[conflictFreeKey] = _extends(_extends({}, value), {}, {
|
|
1087
|
-
metadata: true
|
|
1088
|
-
});
|
|
1089
|
-
}
|
|
1090
|
-
existingFiles.add(conflictFreeKey); // Track the added file for subsequent iterations
|
|
1091
|
-
}
|
|
1092
|
-
}
|
|
1093
|
-
|
|
1094
|
-
// Add globals dependencies
|
|
1095
|
-
allFilesUsed.push.apply(allFilesUsed, _toConsumableArray(globalsResult.dependencies));
|
|
1096
|
-
|
|
1097
|
-
// Add globals externals
|
|
1098
|
-
allExternals = mergeExternals([allExternals, globalsResult.externals]);
|
|
1099
|
-
}
|
|
1100
|
-
} catch (err) {
|
|
1101
|
-
_iterator4.e(err);
|
|
1102
|
-
} finally {
|
|
1103
|
-
_iterator4.f();
|
|
1104
|
-
}
|
|
1105
|
-
case 18:
|
|
1106
|
-
currentMark = performanceMeasure(externalsMergedMark, {
|
|
1107
|
-
mark: 'Globals Loaded',
|
|
1108
|
-
measure: 'Globals Loading'
|
|
1109
|
-
}, [functionName, url || fileName], true);
|
|
1110
|
-
allExtraFiles = {}; // Load all extra files if any exist and we have a URL
|
|
1111
|
-
if (!(Object.keys(extraFilesToLoad).length > 0)) {
|
|
1112
|
-
_context6.n = 24;
|
|
1113
|
-
break;
|
|
1114
|
-
}
|
|
1115
|
-
if (url) {
|
|
1116
|
-
_context6.n = 21;
|
|
1117
|
-
break;
|
|
1118
|
-
}
|
|
1119
|
-
// If there's no URL, we can only load extra files that have inline source or absolute URLs
|
|
1120
|
-
loadableFiles = {};
|
|
1121
|
-
for (_i8 = 0, _Object$entries4 = Object.entries(extraFilesToLoad); _i8 < _Object$entries4.length; _i8++) {
|
|
1122
|
-
_Object$entries4$_i = _slicedToArray(_Object$entries4[_i8], 2), _key2 = _Object$entries4$_i[0], _value = _Object$entries4$_i[1];
|
|
1123
|
-
if (typeof _value !== 'string' && _value.source !== undefined) {
|
|
1124
|
-
// Inline source - can always load
|
|
1125
|
-
loadableFiles[_key2] = _value;
|
|
1126
|
-
} else if (typeof _value === 'string' && isAbsolutePath(_value)) {
|
|
1127
|
-
// Absolute URL - can load without base URL
|
|
1128
|
-
loadableFiles[_key2] = _value;
|
|
1129
|
-
} else {
|
|
1130
|
-
console.warn("Skipping extra file \"".concat(_key2, "\" - no URL provided and file requires loading from external source"));
|
|
1131
|
-
}
|
|
1132
|
-
}
|
|
1133
|
-
if (!(Object.keys(loadableFiles).length > 0)) {
|
|
1134
|
-
_context6.n = 20;
|
|
1135
|
-
break;
|
|
1136
|
-
}
|
|
1137
|
-
// Process loadable files: inline sources without URL-based loading, absolute URLs with loading
|
|
1138
|
-
for (_i9 = 0, _Object$entries5 = Object.entries(loadableFiles); _i9 < _Object$entries5.length; _i9++) {
|
|
1139
|
-
_Object$entries5$_i = _slicedToArray(_Object$entries5[_i9], 2), _key3 = _Object$entries5$_i[0], _value2 = _Object$entries5$_i[1];
|
|
1140
|
-
if (typeof _value2 !== 'string') {
|
|
1141
|
-
// Inline source - preserve metadata if it was marked as globals
|
|
1142
|
-
metadata = _value2.metadata || globalsFileKeys.has(_key3) ? true : undefined; // Derive language from filename extension
|
|
1143
|
-
_extension = _key3.slice(_key3.lastIndexOf('.'));
|
|
1144
|
-
extraFileLanguage = getLanguageFromExtension(_extension);
|
|
1145
|
-
allExtraFiles[normalizePathKey(_key3)] = _extends(_extends(_extends({
|
|
1146
|
-
source: _value2.source
|
|
1147
|
-
}, extraFileLanguage && {
|
|
713
|
+
// Add globals externals
|
|
714
|
+
allExternals = mergeExternals([allExternals, globalsResult.externals]);
|
|
715
|
+
}
|
|
716
|
+
}
|
|
717
|
+
currentMark = performanceMeasure(externalsMergedMark, {
|
|
718
|
+
mark: 'Globals Loaded',
|
|
719
|
+
measure: 'Globals Loading'
|
|
720
|
+
}, [functionName, url || fileName], true);
|
|
721
|
+
let allExtraFiles = {};
|
|
722
|
+
|
|
723
|
+
// Load all extra files if any exist and we have a URL
|
|
724
|
+
if (Object.keys(extraFilesToLoad).length > 0) {
|
|
725
|
+
if (!url) {
|
|
726
|
+
// If there's no URL, we can only load extra files that have inline source or absolute URLs
|
|
727
|
+
const loadableFiles = {};
|
|
728
|
+
for (const [key, value] of Object.entries(extraFilesToLoad)) {
|
|
729
|
+
if (typeof value !== 'string' && value.source !== undefined) {
|
|
730
|
+
// Inline source - can always load
|
|
731
|
+
loadableFiles[key] = value;
|
|
732
|
+
} else if (typeof value === 'string' && isAbsolutePath(value)) {
|
|
733
|
+
// Absolute URL - can load without base URL
|
|
734
|
+
loadableFiles[key] = value;
|
|
735
|
+
} else {
|
|
736
|
+
console.warn(`Skipping extra file "${key}" - no URL provided and file requires loading from external source`);
|
|
737
|
+
}
|
|
738
|
+
}
|
|
739
|
+
if (Object.keys(loadableFiles).length > 0) {
|
|
740
|
+
// Process loadable files: inline sources without URL-based loading, absolute URLs with loading
|
|
741
|
+
for (const [key, value] of Object.entries(loadableFiles)) {
|
|
742
|
+
if (typeof value !== 'string') {
|
|
743
|
+
// Inline source - preserve metadata if it was marked as globals
|
|
744
|
+
const metadata = value.metadata || globalsFileKeys.has(key) ? true : undefined;
|
|
745
|
+
// Derive language from filename extension
|
|
746
|
+
const extension = key.slice(key.lastIndexOf('.'));
|
|
747
|
+
const extraFileLanguage = getLanguageFromExtension(extension);
|
|
748
|
+
allExtraFiles[normalizePathKey(key)] = {
|
|
749
|
+
source: value.source,
|
|
750
|
+
...(extraFileLanguage && {
|
|
1148
751
|
language: extraFileLanguage
|
|
1149
|
-
}),
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
|
|
1153
|
-
|
|
1154
|
-
|
|
752
|
+
}),
|
|
753
|
+
...(value.transforms && {
|
|
754
|
+
transforms: value.transforms
|
|
755
|
+
}),
|
|
756
|
+
...(metadata !== undefined && {
|
|
757
|
+
metadata
|
|
758
|
+
})
|
|
759
|
+
};
|
|
1155
760
|
}
|
|
761
|
+
}
|
|
1156
762
|
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
urlFilesToLoad[_key4] = _value3;
|
|
1163
|
-
}
|
|
1164
|
-
}
|
|
1165
|
-
if (!(Object.keys(urlFilesToLoad).length > 0)) {
|
|
1166
|
-
_context6.n = 20;
|
|
1167
|
-
break;
|
|
763
|
+
// For absolute URLs, we need to load them
|
|
764
|
+
const urlFilesToLoad = {};
|
|
765
|
+
for (const [key, value] of Object.entries(loadableFiles)) {
|
|
766
|
+
if (typeof value === 'string') {
|
|
767
|
+
urlFilesToLoad[key] = value;
|
|
1168
768
|
}
|
|
1169
|
-
|
|
1170
|
-
|
|
769
|
+
}
|
|
770
|
+
if (Object.keys(urlFilesToLoad).length > 0) {
|
|
771
|
+
// Load absolute URL files even without base URL
|
|
772
|
+
const extraFilesResult = await loadExtraFiles(variantName, urlFilesToLoad, '',
|
|
1171
773
|
// No base URL needed for absolute URLs
|
|
1172
774
|
'',
|
|
1173
775
|
// No entry URL
|
|
1174
|
-
loadSource, sourceParser, sourceTransformers, loadSourceCache,
|
|
1175
|
-
|
|
1176
|
-
|
|
776
|
+
loadSource, sourceParser, sourceTransformers, loadSourceCache, {
|
|
777
|
+
...options,
|
|
778
|
+
loadedFiles
|
|
779
|
+
}, variant.allFilesListed || false, knownExtraFiles, globalsFileKeys // Pass globals file tracking
|
|
1177
780
|
);
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
781
|
+
allExtraFiles = {
|
|
782
|
+
...allExtraFiles,
|
|
783
|
+
...extraFilesResult.extraFiles
|
|
784
|
+
};
|
|
785
|
+
allFilesUsed.push(...extraFilesResult.allFilesUsed);
|
|
1182
786
|
allExternals = mergeExternals([allExternals, extraFilesResult.allExternals]);
|
|
1183
|
-
|
|
1184
|
-
_context6.n = 23;
|
|
1185
|
-
break;
|
|
1186
|
-
case 21:
|
|
1187
|
-
_context6.n = 22;
|
|
1188
|
-
return loadExtraFiles(variantName, extraFilesToLoad, url, url,
|
|
1189
|
-
// Entry URL is the same as the main file URL
|
|
1190
|
-
loadSource, sourceParser, sourceTransformers, loadSourceCache, _extends(_extends({}, options), {}, {
|
|
1191
|
-
loadedFiles: loadedFiles
|
|
1192
|
-
}), variant.allFilesListed || false, knownExtraFiles, globalsFileKeys // Pass globals file tracking
|
|
1193
|
-
);
|
|
1194
|
-
case 22:
|
|
1195
|
-
_extraFilesResult = _context6.v;
|
|
1196
|
-
allExtraFiles = _extraFilesResult.extraFiles;
|
|
1197
|
-
allFilesUsed.push.apply(allFilesUsed, _toConsumableArray(_extraFilesResult.allFilesUsed));
|
|
1198
|
-
allExternals = mergeExternals([allExternals, _extraFilesResult.allExternals]);
|
|
1199
|
-
case 23:
|
|
1200
|
-
currentMark = performanceMeasure(currentMark, {
|
|
1201
|
-
mark: 'Extra Files Loaded',
|
|
1202
|
-
measure: 'Extra Files Loading'
|
|
1203
|
-
}, [functionName, url || fileName], true);
|
|
1204
|
-
case 24:
|
|
1205
|
-
// Note: metadata marking is now handled during loadExtraFiles processing
|
|
1206
|
-
finalVariant = _extends(_extends({}, variant), {}, {
|
|
1207
|
-
language: language,
|
|
1208
|
-
source: mainFileResult.source,
|
|
1209
|
-
transforms: mainFileResult.transforms,
|
|
1210
|
-
extraFiles: Object.keys(allExtraFiles).length > 0 ? allExtraFiles : undefined,
|
|
1211
|
-
externals: Object.keys(allExternals).length > 0 ? Object.keys(allExternals) : undefined
|
|
1212
|
-
});
|
|
1213
|
-
return _context6.a(2, {
|
|
1214
|
-
code: finalVariant,
|
|
1215
|
-
dependencies: Array.from(new Set(allFilesUsed)),
|
|
1216
|
-
// Remove duplicates
|
|
1217
|
-
externals: allExternals
|
|
1218
|
-
});
|
|
787
|
+
}
|
|
1219
788
|
}
|
|
1220
|
-
}
|
|
1221
|
-
|
|
1222
|
-
|
|
789
|
+
} else {
|
|
790
|
+
const extraFilesResult = await loadExtraFiles(variantName, extraFilesToLoad, url, url,
|
|
791
|
+
// Entry URL is the same as the main file URL
|
|
792
|
+
loadSource, sourceParser, sourceTransformers, loadSourceCache, {
|
|
793
|
+
...options,
|
|
794
|
+
loadedFiles
|
|
795
|
+
}, variant.allFilesListed || false, knownExtraFiles, globalsFileKeys // Pass globals file tracking
|
|
796
|
+
);
|
|
797
|
+
allExtraFiles = extraFilesResult.extraFiles;
|
|
798
|
+
allFilesUsed.push(...extraFilesResult.allFilesUsed);
|
|
799
|
+
allExternals = mergeExternals([allExternals, extraFilesResult.allExternals]);
|
|
800
|
+
}
|
|
801
|
+
currentMark = performanceMeasure(currentMark, {
|
|
802
|
+
mark: 'Extra Files Loaded',
|
|
803
|
+
measure: 'Extra Files Loading'
|
|
804
|
+
}, [functionName, url || fileName], true);
|
|
805
|
+
}
|
|
806
|
+
|
|
807
|
+
// Note: metadata marking is now handled during loadExtraFiles processing
|
|
808
|
+
|
|
809
|
+
const finalVariant = {
|
|
810
|
+
...variant,
|
|
811
|
+
language,
|
|
812
|
+
source: mainFileResult.source,
|
|
813
|
+
transforms: mainFileResult.transforms,
|
|
814
|
+
extraFiles: Object.keys(allExtraFiles).length > 0 ? allExtraFiles : undefined,
|
|
815
|
+
externals: Object.keys(allExternals).length > 0 ? Object.keys(allExternals) : undefined
|
|
816
|
+
};
|
|
817
|
+
return {
|
|
818
|
+
code: finalVariant,
|
|
819
|
+
dependencies: Array.from(new Set(allFilesUsed)),
|
|
820
|
+
// Remove duplicates
|
|
821
|
+
externals: allExternals
|
|
822
|
+
};
|
|
1223
823
|
}
|