@mui/internal-docs-infra 0.3.1-canary.3 → 0.3.1-canary.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/CodeControllerContext/CodeControllerContext.js +2 -2
- package/esm/CodeExternalsContext/CodeExternalsContext.js +1 -1
- package/esm/CodeHighlighter/CodeHighlighter.js +247 -329
- package/esm/CodeHighlighter/CodeHighlighterClient.js +447 -653
- package/esm/CodeHighlighter/CodeHighlighterContext.js +2 -2
- package/esm/CodeHighlighter/CodeHighlighterFallbackContext.js +2 -2
- package/esm/CodeHighlighter/codeToFallbackProps.js +21 -37
- package/esm/CodeHighlighter/errors.js +248 -400
- package/esm/CodeHighlighter/parseControlledCode.js +12 -20
- package/esm/CodeProvider/CodeContext.js +3 -3
- package/esm/CodeProvider/CodeProvider.js +31 -40
- package/esm/abstractCreateDemo/abstractCreateDemo.js +13 -17
- package/esm/abstractCreateDemoClient/abstractCreateDemoClient.js +12 -12
- package/esm/cli/index.js +1 -1
- package/esm/cli/runValidate.js +160 -264
- package/esm/createDemoData/createDemoData.js +11 -12
- package/esm/createSitemap/createSitemap.js +2 -2
- package/esm/pipeline/getFileConventions/fileConventions.js +1 -1
- package/esm/pipeline/getFileConventions/getFileConventions.js +2 -15
- package/esm/pipeline/hastUtils/hastUtils.js +16 -17
- package/esm/pipeline/loadCodeVariant/addCodeVariantPaths.js +24 -24
- package/esm/pipeline/loadCodeVariant/applyCodeTransform.js +12 -22
- package/esm/pipeline/loadCodeVariant/calculateMainFilePath.js +30 -37
- package/esm/pipeline/loadCodeVariant/computeHastDeltas.js +107 -185
- package/esm/pipeline/loadCodeVariant/diffHast.js +18 -53
- package/esm/pipeline/loadCodeVariant/examineCodeVariant.js +24 -27
- package/esm/pipeline/loadCodeVariant/flattenCodeVariant.js +9 -10
- package/esm/pipeline/loadCodeVariant/hasAllCodeVariants.js +5 -5
- package/esm/pipeline/loadCodeVariant/loadCodeFallback.js +516 -731
- package/esm/pipeline/loadCodeVariant/loadCodeVariant.js +679 -1079
- package/esm/pipeline/loadCodeVariant/maybeCodeInitialData.js +14 -20
- package/esm/pipeline/loadCodeVariant/mergeCodeMetadata.js +53 -63
- package/esm/pipeline/loadCodeVariant/parseCode.js +40 -48
- package/esm/pipeline/loadCodeVariant/pathUtils.js +43 -64
- package/esm/pipeline/loadCodeVariant/transformSource.js +55 -125
- package/esm/pipeline/loadPrecomputedCodeHighlighter/loadPrecomputedCodeHighlighter.js +160 -221
- package/esm/pipeline/loadPrecomputedCodeHighlighter/parseCreateFactoryCall.js +377 -479
- package/esm/pipeline/loadPrecomputedCodeHighlighter/parseFunctionArguments.js +171 -173
- package/esm/pipeline/loadPrecomputedCodeHighlighter/performanceLogger.js +14 -30
- package/esm/pipeline/loadPrecomputedCodeHighlighter/replacePrecomputeValue.js +19 -21
- package/esm/pipeline/loadPrecomputedCodeHighlighter/serializeFunctionArguments.js +37 -71
- package/esm/pipeline/loadPrecomputedCodeHighlighterClient/filterRuntimeExternals.js +3 -9
- package/esm/pipeline/loadPrecomputedCodeHighlighterClient/generateImportStatements.js +54 -80
- package/esm/pipeline/loadPrecomputedCodeHighlighterClient/generateResolvedExternals.js +71 -98
- package/esm/pipeline/loadPrecomputedCodeHighlighterClient/injectImportsIntoSource.js +5 -5
- package/esm/pipeline/loadPrecomputedCodeHighlighterClient/loadPrecomputedCodeHighlighterClient.js +161 -211
- package/esm/pipeline/loadPrecomputedSitemap/loadPrecomputedSitemap.js +159 -207
- package/esm/pipeline/loadServerCodeMeta/loadServerCodeMeta.js +42 -64
- package/esm/pipeline/loadServerCodeMeta/resolveModulePathWithFs.js +20 -96
- package/esm/pipeline/loadServerPageIndex/loadServerPageIndex.js +66 -85
- package/esm/pipeline/loadServerSitemap/loadServerSitemap.js +71 -118
- package/esm/pipeline/loadServerSource/loadServerSource.js +121 -148
- package/esm/pipeline/loaderUtils/externalsToPackages.js +7 -7
- package/esm/pipeline/loaderUtils/extractNameAndSlugFromUrl.js +8 -12
- package/esm/pipeline/loaderUtils/fileUrlToPortablePath.js +5 -5
- package/esm/pipeline/loaderUtils/getFileNameFromUrl.js +19 -29
- package/esm/pipeline/loaderUtils/getLanguageFromExtension.js +3 -4
- package/esm/pipeline/loaderUtils/mergeExternals.js +15 -35
- package/esm/pipeline/loaderUtils/parseImportsAndComments.js +413 -433
- package/esm/pipeline/loaderUtils/processRelativeImports.js +153 -239
- package/esm/pipeline/loaderUtils/resolveModulePath.js +544 -1303
- package/esm/pipeline/loaderUtils/rewriteImports.js +73 -111
- package/esm/pipeline/parseSource/addLineGutters.js +33 -45
- package/esm/pipeline/parseSource/grammars.js +3 -3
- package/esm/pipeline/parseSource/parseSource.js +13 -31
- package/esm/pipeline/syncPageIndex/createMarkdownNodes.js +32 -55
- package/esm/pipeline/syncPageIndex/mergeMetadataMarkdown.js +107 -160
- package/esm/pipeline/syncPageIndex/metadataToMarkdown.js +846 -1033
- package/esm/pipeline/syncPageIndex/syncPageIndex.js +291 -438
- package/esm/pipeline/transformHtmlCodePrecomputed/transformHtmlCodePrecomputed.js +213 -311
- package/esm/pipeline/transformMarkdownBlockquoteCallouts/transformMarkdownBlockquoteCallouts.js +10 -10
- package/esm/pipeline/transformMarkdownCode/transformMarkdownCode.js +133 -193
- package/esm/pipeline/transformMarkdownDemoLinks/transformMarkdownDemoLinks.js +25 -27
- package/esm/pipeline/transformMarkdownMetadata/transformMarkdownMetadata.js +572 -717
- package/esm/pipeline/transformMarkdownRelativePaths/transformMarkdownRelativePaths.js +8 -8
- package/esm/pipeline/transformTypescriptToJavascript/removeTypes.js +84 -113
- package/esm/pipeline/transformTypescriptToJavascript/transformTypescriptToJavascript.js +10 -26
- package/esm/useCode/Pre.js +58 -62
- package/esm/useCode/useCode.js +59 -61
- package/esm/useCode/useCodeUtils.js +54 -63
- package/esm/useCode/useCopyFunctionality.js +10 -9
- package/esm/useCode/useFileNavigation.js +150 -212
- package/esm/useCode/useSourceEditing.js +17 -14
- package/esm/useCode/useTransformManagement.js +23 -26
- package/esm/useCode/useUIState.js +12 -20
- package/esm/useCode/useVariantSelection.js +62 -79
- package/esm/useCopier/index.js +29 -56
- package/esm/useDemo/createCodeSandbox.js +12 -15
- package/esm/useDemo/createStackBlitz.js +14 -20
- package/esm/useDemo/exportVariant.js +200 -180
- package/esm/useDemo/exportVariantAsCra.js +22 -25
- package/esm/useDemo/useDemo.js +80 -84
- package/esm/useErrors/ErrorsContext.js +1 -1
- package/esm/useErrors/useErrors.js +3 -3
- package/esm/useLocalStorageState/useLocalStorageState.js +23 -39
- package/esm/usePreference/PreferencesProvider.js +1 -1
- package/esm/usePreference/usePreference.js +9 -11
- package/esm/useSearch/useSearch.js +290 -387
- package/esm/useUrlHashState/useUrlHashState.js +11 -14
- package/esm/withDocsInfra/withDeploymentConfig.js +26 -21
- package/esm/withDocsInfra/withDocsInfra.js +99 -101
- package/package.json +7 -4
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
import _slicedToArray from "@babel/runtime/helpers/esm/slicedToArray";
|
|
2
|
-
import _extends from "@babel/runtime/helpers/esm/extends";
|
|
3
1
|
/**
|
|
4
2
|
* Add path utility to add path property to each file in a variant
|
|
5
3
|
* Uses calculateMainFilePath utility and URL resolution for simplified path calculation
|
|
@@ -11,31 +9,32 @@ import { calculateMainFilePath } from "./calculateMainFilePath.js";
|
|
|
11
9
|
* Add flat paths to all files in a variant
|
|
12
10
|
*/
|
|
13
11
|
export function addPathsToVariant(variant) {
|
|
14
|
-
|
|
15
|
-
|
|
12
|
+
const url = variant.url || '';
|
|
13
|
+
const fileName = variant.fileName || '';
|
|
16
14
|
|
|
17
15
|
// Calculate actual back navigation needed based on extraFiles
|
|
18
|
-
|
|
16
|
+
const backNavResult = variant.extraFiles ? calculateMaxBackNavigation(variant.extraFiles) : {
|
|
19
17
|
maxBackNavigation: 0,
|
|
20
18
|
maxSourceBackNavigation: 0
|
|
21
19
|
};
|
|
22
20
|
|
|
23
21
|
// Create a synthetic URL for variants without URL to ensure consistent processing
|
|
24
|
-
|
|
22
|
+
let effectiveUrl = url;
|
|
25
23
|
if (!url && fileName) {
|
|
26
|
-
effectiveUrl =
|
|
24
|
+
effectiveUrl = `file:///${fileName}`;
|
|
27
25
|
}
|
|
28
26
|
|
|
29
27
|
// Calculate main file path using only the back navigation needed by source files
|
|
30
|
-
|
|
28
|
+
const mainFileUrl = effectiveUrl ? calculateMainFilePath(effectiveUrl, backNavResult.maxBackNavigation, backNavResult.maxSourceBackNavigation, variant.metadataPrefix, fileName || undefined // Only pass fileName if it's not empty
|
|
31
29
|
) : undefined;
|
|
32
30
|
|
|
33
31
|
// Extract just the path part from the file:// URL and remove leading slash
|
|
34
|
-
|
|
35
|
-
return
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
32
|
+
const path = mainFileUrl ? new URL(mainFileUrl).pathname.slice(1) : undefined;
|
|
33
|
+
return {
|
|
34
|
+
...variant,
|
|
35
|
+
path,
|
|
36
|
+
extraFiles: calculateExtraFilesPaths(variant.extraFiles, mainFileUrl || `file:///${backNavResult.maxBackNavigation > 0 ? `${createSyntheticDirectories(backNavResult.maxBackNavigation).join('/')}/` : ''}temp.txt`)
|
|
37
|
+
};
|
|
39
38
|
}
|
|
40
39
|
|
|
41
40
|
/**
|
|
@@ -45,24 +44,25 @@ function calculateExtraFilesPaths(extraFiles, mainFileUrl) {
|
|
|
45
44
|
if (!extraFiles) {
|
|
46
45
|
return undefined;
|
|
47
46
|
}
|
|
48
|
-
|
|
49
|
-
for (
|
|
50
|
-
var _Object$entries$_i = _slicedToArray(_Object$entries[_i], 2),
|
|
51
|
-
relativePath = _Object$entries$_i[0],
|
|
52
|
-
fileContent = _Object$entries$_i[1];
|
|
47
|
+
const result = {};
|
|
48
|
+
for (const [relativePath, fileContent] of Object.entries(extraFiles)) {
|
|
53
49
|
// Resolve the relative path against the main file URL
|
|
54
50
|
try {
|
|
55
|
-
|
|
56
|
-
|
|
51
|
+
const resolvedUrl = new URL(relativePath, mainFileUrl);
|
|
52
|
+
const file = typeof fileContent === 'string' ? {
|
|
57
53
|
source: fileContent
|
|
58
54
|
} : fileContent;
|
|
59
|
-
result[relativePath] =
|
|
55
|
+
result[relativePath] = {
|
|
56
|
+
...file,
|
|
60
57
|
path: resolvedUrl.pathname.slice(1)
|
|
61
|
-
}
|
|
62
|
-
} catch
|
|
58
|
+
};
|
|
59
|
+
} catch {
|
|
63
60
|
// If URL resolution fails, skip this file
|
|
64
61
|
continue;
|
|
65
62
|
}
|
|
66
63
|
}
|
|
67
|
-
return
|
|
64
|
+
return {
|
|
65
|
+
...extraFiles,
|
|
66
|
+
...result
|
|
67
|
+
};
|
|
68
68
|
}
|
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import _createForOfIteratorHelper from "@babel/runtime/helpers/esm/createForOfIteratorHelper";
|
|
2
1
|
import { patch, clone } from 'jsondiffpatch';
|
|
3
2
|
/**
|
|
4
3
|
* Applies a specific transform to a variant source and returns the transformed source
|
|
@@ -9,25 +8,25 @@ import { patch, clone } from 'jsondiffpatch';
|
|
|
9
8
|
* @throws Error if the transform key doesn't exist or patching fails
|
|
10
9
|
*/
|
|
11
10
|
export function applyCodeTransform(source, transforms, transformKey) {
|
|
12
|
-
|
|
11
|
+
const transform = transforms[transformKey];
|
|
13
12
|
if (!transform) {
|
|
14
|
-
throw new Error(
|
|
13
|
+
throw new Error(`Transform "${transformKey}" not found in transforms`);
|
|
15
14
|
}
|
|
16
15
|
|
|
17
16
|
// Determine the format of the source and apply the appropriate transform strategy
|
|
18
17
|
if (typeof source === 'string') {
|
|
19
18
|
// For string sources, deltas are typically line-array based (from transformSource)
|
|
20
|
-
|
|
21
|
-
|
|
19
|
+
const sourceLines = source.split('\n');
|
|
20
|
+
const patched = patch(sourceLines, transform.delta);
|
|
22
21
|
if (!Array.isArray(patched)) {
|
|
23
|
-
throw new Error(
|
|
22
|
+
throw new Error(`Patch for transform "${transformKey}" did not return an array`);
|
|
24
23
|
}
|
|
25
24
|
return patched.join('\n');
|
|
26
25
|
}
|
|
27
26
|
|
|
28
27
|
// For Hast node sources, deltas are typically node-based (from diffHast)
|
|
29
|
-
|
|
30
|
-
|
|
28
|
+
let sourceRoot;
|
|
29
|
+
const isHastJson = 'hastJson' in source;
|
|
31
30
|
if (isHastJson) {
|
|
32
31
|
sourceRoot = JSON.parse(source.hastJson);
|
|
33
32
|
} else {
|
|
@@ -35,9 +34,9 @@ export function applyCodeTransform(source, transforms, transformKey) {
|
|
|
35
34
|
}
|
|
36
35
|
|
|
37
36
|
// Apply the node-based delta
|
|
38
|
-
|
|
37
|
+
const patchedNodes = patch(clone(sourceRoot), transform.delta);
|
|
39
38
|
if (!patchedNodes) {
|
|
40
|
-
throw new Error(
|
|
39
|
+
throw new Error(`Patch for transform "${transformKey}" returned null/undefined`);
|
|
41
40
|
}
|
|
42
41
|
|
|
43
42
|
// Return in the same format as the input
|
|
@@ -58,18 +57,9 @@ export function applyCodeTransform(source, transforms, transformKey) {
|
|
|
58
57
|
* @throws Error if any transform key doesn't exist or patching fails
|
|
59
58
|
*/
|
|
60
59
|
export function applyCodeTransforms(source, transforms, transformKeys) {
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
try {
|
|
65
|
-
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
|
66
|
-
var transformKey = _step.value;
|
|
67
|
-
currentSource = applyCodeTransform(currentSource, transforms, transformKey);
|
|
68
|
-
}
|
|
69
|
-
} catch (err) {
|
|
70
|
-
_iterator.e(err);
|
|
71
|
-
} finally {
|
|
72
|
-
_iterator.f();
|
|
60
|
+
let currentSource = source;
|
|
61
|
+
for (const transformKey of transformKeys) {
|
|
62
|
+
currentSource = applyCodeTransform(currentSource, transforms, transformKey);
|
|
73
63
|
}
|
|
74
64
|
return currentSource;
|
|
75
65
|
}
|
|
@@ -1,9 +1,8 @@
|
|
|
1
|
-
import _toConsumableArray from "@babel/runtime/helpers/esm/toConsumableArray";
|
|
2
1
|
import { createSyntheticDirectories, buildPath } from "./pathUtils.js";
|
|
3
2
|
export function calculateMainFilePath(url, maxBackNav, maxSourceBackNav, metadataPrefix, fileName) {
|
|
4
3
|
// Handle optional parameters with defaults
|
|
5
|
-
|
|
6
|
-
|
|
4
|
+
const actualMaxSourceBackNav = maxSourceBackNav ?? maxBackNav;
|
|
5
|
+
const actualMetadataPrefix = metadataPrefix ?? '';
|
|
7
6
|
|
|
8
7
|
// Handle empty URL
|
|
9
8
|
if (!url) {
|
|
@@ -11,15 +10,15 @@ export function calculateMainFilePath(url, maxBackNav, maxSourceBackNav, metadat
|
|
|
11
10
|
}
|
|
12
11
|
|
|
13
12
|
// Extract URL parts and filename
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
13
|
+
let urlSegments;
|
|
14
|
+
let filename;
|
|
15
|
+
let hasTrailingSlash = false;
|
|
17
16
|
try {
|
|
18
17
|
// Try to parse as URL first
|
|
19
|
-
|
|
18
|
+
const urlObj = new URL(url);
|
|
20
19
|
hasTrailingSlash = urlObj.pathname.endsWith('/');
|
|
21
|
-
|
|
22
|
-
|
|
20
|
+
const pathSegments = urlObj.pathname.split('/').filter(Boolean);
|
|
21
|
+
let baseFilename;
|
|
23
22
|
if (hasTrailingSlash) {
|
|
24
23
|
// If URL ends with /, there's no filename - all segments are path segments
|
|
25
24
|
baseFilename = '';
|
|
@@ -36,73 +35,67 @@ export function calculateMainFilePath(url, maxBackNav, maxSourceBackNav, metadat
|
|
|
36
35
|
} else {
|
|
37
36
|
filename = baseFilename + urlObj.search + urlObj.hash;
|
|
38
37
|
if (hasTrailingSlash && !baseFilename) {
|
|
39
|
-
filename =
|
|
38
|
+
filename = `${filename}/`;
|
|
40
39
|
}
|
|
41
40
|
}
|
|
42
|
-
} catch
|
|
41
|
+
} catch {
|
|
43
42
|
// Fallback to simple string parsing for relative paths
|
|
44
43
|
hasTrailingSlash = url.endsWith('/');
|
|
45
|
-
|
|
46
|
-
|
|
44
|
+
const urlParts = url.split('/');
|
|
45
|
+
let baseFilename;
|
|
47
46
|
if (hasTrailingSlash) {
|
|
48
47
|
// If URL ends with /, there's no filename - all segments are path segments
|
|
49
|
-
|
|
50
|
-
urlSegments = urlParts.filter(
|
|
51
|
-
return part !== '';
|
|
52
|
-
}); // Remove empty segments
|
|
48
|
+
baseFilename = urlParts.pop() || ''; // Remove the empty string after trailing slash
|
|
49
|
+
urlSegments = urlParts.filter(part => part !== ''); // Remove empty segments
|
|
53
50
|
} else {
|
|
54
51
|
// Normal case - last segment is the filename
|
|
55
|
-
|
|
56
|
-
urlSegments = urlParts.filter(
|
|
57
|
-
return part !== '';
|
|
58
|
-
}); // Remove empty segments
|
|
52
|
+
baseFilename = urlParts.pop() || '';
|
|
53
|
+
urlSegments = urlParts.filter(part => part !== ''); // Remove empty segments
|
|
59
54
|
}
|
|
60
55
|
|
|
61
56
|
// Use provided fileName or fallback to extracted baseFilename
|
|
62
57
|
if (fileName !== undefined) {
|
|
63
58
|
filename = fileName;
|
|
64
59
|
} else {
|
|
65
|
-
filename =
|
|
66
|
-
if (hasTrailingSlash && !
|
|
67
|
-
filename =
|
|
60
|
+
filename = baseFilename;
|
|
61
|
+
if (hasTrailingSlash && !baseFilename) {
|
|
62
|
+
filename = `${filename}/`;
|
|
68
63
|
}
|
|
69
64
|
}
|
|
70
65
|
}
|
|
71
66
|
|
|
72
67
|
// Work with a copy of URL segments to avoid mutations
|
|
73
|
-
|
|
68
|
+
const remainingUrlSegments = [...urlSegments];
|
|
74
69
|
|
|
75
70
|
// Take actualMaxSourceBackNav items from the end for sourcePath
|
|
76
|
-
|
|
71
|
+
const sourcePath = remainingUrlSegments.splice(-actualMaxSourceBackNav, actualMaxSourceBackNav);
|
|
77
72
|
|
|
78
73
|
// Calculate unhandledBackNav, accounting for missing sourcePath segments
|
|
79
|
-
|
|
74
|
+
let unhandledBackNav = maxBackNav - actualMaxSourceBackNav;
|
|
80
75
|
|
|
81
76
|
// Add any missing sourcePath segments to unhandledBackNav
|
|
82
|
-
|
|
77
|
+
const missingSourcePathSegments = actualMaxSourceBackNav - sourcePath.length;
|
|
83
78
|
unhandledBackNav += missingSourcePathSegments;
|
|
84
79
|
|
|
85
80
|
// Split actualMetadataPrefix and subtract that count from unhandledBackNav
|
|
86
|
-
|
|
87
|
-
return part !== '';
|
|
88
|
-
});
|
|
81
|
+
const metadataPrefixSegments = actualMetadataPrefix.split('/').filter(part => part !== '');
|
|
89
82
|
unhandledBackNav -= metadataPrefixSegments.length;
|
|
90
83
|
|
|
91
84
|
// Calculate metadataPath from remaining URL segments (what's left after sourcePath)
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
85
|
+
const metadataSegmentsNeeded = Math.max(0, unhandledBackNav);
|
|
86
|
+
const metadataSegmentsAvailable = Math.min(metadataSegmentsNeeded, remainingUrlSegments.length);
|
|
87
|
+
const metadataPath = remainingUrlSegments.splice(-metadataSegmentsAvailable, metadataSegmentsAvailable);
|
|
95
88
|
|
|
96
89
|
// Update unhandledBackNav with segments we couldn't fulfill from URL
|
|
97
90
|
unhandledBackNav = metadataSegmentsNeeded - metadataSegmentsAvailable;
|
|
98
91
|
|
|
99
92
|
// Create synthetic directories for any remaining unhandledBackNav
|
|
100
|
-
|
|
93
|
+
const syntheticDirs = createSyntheticDirectories(unhandledBackNav);
|
|
101
94
|
|
|
102
95
|
// Combine all parts to create the final path using buildPath utility
|
|
103
96
|
// Order: [synthetic directories] + [metadataPath] + [metadataPrefix] + [sourcePath] + [filename]
|
|
104
|
-
|
|
97
|
+
const path = buildPath(syntheticDirs, metadataPath, metadataPrefixSegments, sourcePath, filename);
|
|
105
98
|
|
|
106
99
|
// Return as file:// URL unless the result is empty
|
|
107
|
-
return path ?
|
|
100
|
+
return path ? `file:///${path}` : path;
|
|
108
101
|
}
|
|
@@ -1,9 +1,3 @@
|
|
|
1
|
-
import _createForOfIteratorHelper from "@babel/runtime/helpers/esm/createForOfIteratorHelper";
|
|
2
|
-
import _regenerator from "@babel/runtime/helpers/esm/regenerator";
|
|
3
|
-
import _extends from "@babel/runtime/helpers/esm/extends";
|
|
4
|
-
import _asyncToGenerator from "@babel/runtime/helpers/esm/asyncToGenerator";
|
|
5
|
-
import _typeof from "@babel/runtime/helpers/esm/typeof";
|
|
6
|
-
import _slicedToArray from "@babel/runtime/helpers/esm/slicedToArray";
|
|
7
1
|
import { toText } from 'hast-util-to-text';
|
|
8
2
|
import { diffHast } from "./diffHast.js";
|
|
9
3
|
|
|
@@ -12,20 +6,16 @@ import { diffHast } from "./diffHast.js";
|
|
|
12
6
|
* Returns entries of variants that have transforms requiring processing.
|
|
13
7
|
*/
|
|
14
8
|
export function getVariantsToTransform(parsedCode) {
|
|
15
|
-
return Object.entries(parsedCode).filter(
|
|
16
|
-
|
|
17
|
-
variantCode = _ref2[1];
|
|
18
|
-
if (!variantCode || _typeof(variantCode) !== 'object') {
|
|
9
|
+
return Object.entries(parsedCode).filter(([, variantCode]) => {
|
|
10
|
+
if (!variantCode || typeof variantCode !== 'object') {
|
|
19
11
|
return false;
|
|
20
12
|
}
|
|
21
13
|
|
|
22
14
|
// Check if main source has transforms and needs processing
|
|
23
|
-
|
|
15
|
+
const mainSourceNeedsTransform = variantCode.transforms && variantCode.source && typeof variantCode.source !== 'string' && !('hastJson' in variantCode.source);
|
|
24
16
|
|
|
25
17
|
// Check if any extraFiles have transforms and need processing
|
|
26
|
-
|
|
27
|
-
return _typeof(fileContent) === 'object' && fileContent && fileContent.transforms && fileContent.source && typeof fileContent.source !== 'string' && !('hastJson' in fileContent.source);
|
|
28
|
-
}) : false;
|
|
18
|
+
const extraFilesNeedTransform = variantCode.extraFiles ? Object.values(variantCode.extraFiles).some(fileContent => typeof fileContent === 'object' && fileContent && fileContent.transforms && fileContent.source && typeof fileContent.source !== 'string' && !('hastJson' in fileContent.source)) : false;
|
|
29
19
|
return mainSourceNeedsTransform || extraFilesNeedTransform;
|
|
30
20
|
});
|
|
31
21
|
}
|
|
@@ -35,22 +25,22 @@ export function getVariantsToTransform(parsedCode) {
|
|
|
35
25
|
* Only includes transforms that have actual deltas (file changes), not just filename changes.
|
|
36
26
|
*/
|
|
37
27
|
export function getAvailableTransforms(parsedCode, variantName) {
|
|
38
|
-
|
|
39
|
-
if (!currentVariant ||
|
|
28
|
+
const currentVariant = parsedCode?.[variantName];
|
|
29
|
+
if (!currentVariant || typeof currentVariant !== 'object') {
|
|
40
30
|
return [];
|
|
41
31
|
}
|
|
42
|
-
|
|
32
|
+
const transforms = new Set();
|
|
43
33
|
|
|
44
34
|
// Check main variant transforms
|
|
45
35
|
if (currentVariant.transforms) {
|
|
46
|
-
Object.keys(currentVariant.transforms).forEach(
|
|
47
|
-
|
|
36
|
+
Object.keys(currentVariant.transforms).forEach(transformKey => {
|
|
37
|
+
const transformData = currentVariant.transforms[transformKey];
|
|
48
38
|
// Only include transforms that have actual deltas (file changes)
|
|
49
39
|
// Check if delta exists and is not empty
|
|
50
|
-
if (transformData &&
|
|
51
|
-
|
|
40
|
+
if (transformData && typeof transformData === 'object' && 'delta' in transformData) {
|
|
41
|
+
const delta = transformData.delta;
|
|
52
42
|
// Check if delta has meaningful content (not just an empty object)
|
|
53
|
-
|
|
43
|
+
const hasContent = delta && typeof delta === 'object' && Object.keys(delta).length > 0;
|
|
54
44
|
if (hasContent) {
|
|
55
45
|
transforms.add(transformKey);
|
|
56
46
|
}
|
|
@@ -60,16 +50,16 @@ export function getAvailableTransforms(parsedCode, variantName) {
|
|
|
60
50
|
|
|
61
51
|
// Check extraFiles for transforms with deltas
|
|
62
52
|
if (currentVariant.extraFiles) {
|
|
63
|
-
Object.values(currentVariant.extraFiles).forEach(
|
|
64
|
-
if (fileData &&
|
|
65
|
-
Object.keys(fileData.transforms).forEach(
|
|
66
|
-
|
|
53
|
+
Object.values(currentVariant.extraFiles).forEach(fileData => {
|
|
54
|
+
if (fileData && typeof fileData === 'object' && 'transforms' in fileData && fileData.transforms) {
|
|
55
|
+
Object.keys(fileData.transforms).forEach(transformKey => {
|
|
56
|
+
const transformData = fileData.transforms[transformKey];
|
|
67
57
|
// Only include transforms that have actual deltas (file changes)
|
|
68
58
|
// Check if delta exists and is not empty
|
|
69
|
-
if (transformData &&
|
|
70
|
-
|
|
59
|
+
if (transformData && typeof transformData === 'object' && 'delta' in transformData) {
|
|
60
|
+
const delta = transformData.delta;
|
|
71
61
|
// Check if delta has meaningful content (not just an empty object)
|
|
72
|
-
|
|
62
|
+
const hasContent = delta && typeof delta === 'object' && Object.keys(delta).length > 0;
|
|
73
63
|
if (hasContent) {
|
|
74
64
|
transforms.add(transformKey);
|
|
75
65
|
}
|
|
@@ -85,8 +75,59 @@ export function getAvailableTransforms(parsedCode, variantName) {
|
|
|
85
75
|
* Pure async function to transform a single variant's code and extraFiles.
|
|
86
76
|
* Returns the transformed variant or the original if transformation fails.
|
|
87
77
|
*/
|
|
88
|
-
export function computeVariantDeltas(
|
|
89
|
-
|
|
78
|
+
export async function computeVariantDeltas(variant, variantCode, parseSource) {
|
|
79
|
+
// Type guard
|
|
80
|
+
if (typeof variantCode !== 'object' || !variantCode) {
|
|
81
|
+
return variantCode;
|
|
82
|
+
}
|
|
83
|
+
let mainTransformResult;
|
|
84
|
+
let transformedExtraFiles;
|
|
85
|
+
|
|
86
|
+
// Process main source transforms if applicable
|
|
87
|
+
if (variantCode.transforms && variantCode.source && typeof variantCode.source !== 'string' && !('hastJson' in variantCode.source)) {
|
|
88
|
+
const hastNodes = variantCode.source;
|
|
89
|
+
const sourceString = toText(hastNodes, {
|
|
90
|
+
whitespace: 'pre'
|
|
91
|
+
});
|
|
92
|
+
mainTransformResult = await diffHast(sourceString, hastNodes, variant,
|
|
93
|
+
// fileName
|
|
94
|
+
variantCode.transforms, parseSource);
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// Process extraFiles transforms if applicable
|
|
98
|
+
if (variantCode.extraFiles) {
|
|
99
|
+
transformedExtraFiles = await Promise.all(Object.entries(variantCode.extraFiles).map(async ([fileName, fileContent]) => {
|
|
100
|
+
if (typeof fileContent === 'object' && fileContent && fileContent.transforms && fileContent.source && typeof fileContent.source !== 'string' && !('hastJson' in fileContent.source)) {
|
|
101
|
+
try {
|
|
102
|
+
const extraHastNodes = fileContent.source;
|
|
103
|
+
const extraSourceString = toText(extraHastNodes, {
|
|
104
|
+
whitespace: 'pre'
|
|
105
|
+
});
|
|
106
|
+
const extraTransformResult = await diffHast(extraSourceString, extraHastNodes, fileName, fileContent.transforms, parseSource);
|
|
107
|
+
return [fileName, {
|
|
108
|
+
...fileContent,
|
|
109
|
+
transforms: extraTransformResult
|
|
110
|
+
}];
|
|
111
|
+
} catch (error) {
|
|
112
|
+
console.error(`Failed to transform extraFile ${fileName}:`, error);
|
|
113
|
+
return [fileName, fileContent];
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
return [fileName, fileContent];
|
|
117
|
+
})).then(entries => Object.fromEntries(entries));
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// Update the variant with the computed results
|
|
121
|
+
const transformedVariant = {
|
|
122
|
+
...variantCode,
|
|
123
|
+
...(mainTransformResult && {
|
|
124
|
+
transforms: mainTransformResult
|
|
125
|
+
}),
|
|
126
|
+
...(transformedExtraFiles && {
|
|
127
|
+
extraFiles: transformedExtraFiles
|
|
128
|
+
})
|
|
129
|
+
};
|
|
130
|
+
return transformedVariant;
|
|
90
131
|
}
|
|
91
132
|
|
|
92
133
|
/**
|
|
@@ -97,159 +138,40 @@ export function computeVariantDeltas(_x, _x2, _x3) {
|
|
|
97
138
|
* @param parseSource - The parser function to parse source strings
|
|
98
139
|
* @returns A promise that resolves to the code with computed transforms
|
|
99
140
|
*/
|
|
100
|
-
function
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
variantCode.transforms, parseSource);
|
|
124
|
-
case 2:
|
|
125
|
-
mainTransformResult = _context2.v;
|
|
126
|
-
case 3:
|
|
127
|
-
if (!variantCode.extraFiles) {
|
|
128
|
-
_context2.n = 5;
|
|
129
|
-
break;
|
|
130
|
-
}
|
|
131
|
-
_context2.n = 4;
|
|
132
|
-
return Promise.all(Object.entries(variantCode.extraFiles).map(/*#__PURE__*/function () {
|
|
133
|
-
var _ref4 = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee(_ref3) {
|
|
134
|
-
var _ref5, fileName, fileContent, extraHastNodes, extraSourceString, extraTransformResult, _t;
|
|
135
|
-
return _regenerator().w(function (_context) {
|
|
136
|
-
while (1) switch (_context.p = _context.n) {
|
|
137
|
-
case 0:
|
|
138
|
-
_ref5 = _slicedToArray(_ref3, 2), fileName = _ref5[0], fileContent = _ref5[1];
|
|
139
|
-
if (!(_typeof(fileContent) === 'object' && fileContent && fileContent.transforms && fileContent.source && typeof fileContent.source !== 'string' && !('hastJson' in fileContent.source))) {
|
|
140
|
-
_context.n = 4;
|
|
141
|
-
break;
|
|
142
|
-
}
|
|
143
|
-
_context.p = 1;
|
|
144
|
-
extraHastNodes = fileContent.source;
|
|
145
|
-
extraSourceString = toText(extraHastNodes, {
|
|
146
|
-
whitespace: 'pre'
|
|
147
|
-
});
|
|
148
|
-
_context.n = 2;
|
|
149
|
-
return diffHast(extraSourceString, extraHastNodes, fileName, fileContent.transforms, parseSource);
|
|
150
|
-
case 2:
|
|
151
|
-
extraTransformResult = _context.v;
|
|
152
|
-
return _context.a(2, [fileName, _extends(_extends({}, fileContent), {}, {
|
|
153
|
-
transforms: extraTransformResult
|
|
154
|
-
})]);
|
|
155
|
-
case 3:
|
|
156
|
-
_context.p = 3;
|
|
157
|
-
_t = _context.v;
|
|
158
|
-
console.error("Failed to transform extraFile ".concat(fileName, ":"), _t);
|
|
159
|
-
return _context.a(2, [fileName, fileContent]);
|
|
160
|
-
case 4:
|
|
161
|
-
return _context.a(2, [fileName, fileContent]);
|
|
162
|
-
}
|
|
163
|
-
}, _callee, null, [[1, 3]]);
|
|
164
|
-
}));
|
|
165
|
-
return function (_x6) {
|
|
166
|
-
return _ref4.apply(this, arguments);
|
|
167
|
-
};
|
|
168
|
-
}())).then(function (entries) {
|
|
169
|
-
return Object.fromEntries(entries);
|
|
170
|
-
});
|
|
171
|
-
case 4:
|
|
172
|
-
transformedExtraFiles = _context2.v;
|
|
173
|
-
case 5:
|
|
174
|
-
// Update the variant with the computed results
|
|
175
|
-
transformedVariant = _extends(_extends(_extends({}, variantCode), mainTransformResult && {
|
|
176
|
-
transforms: mainTransformResult
|
|
177
|
-
}), transformedExtraFiles && {
|
|
178
|
-
extraFiles: transformedExtraFiles
|
|
179
|
-
});
|
|
180
|
-
return _context2.a(2, transformedVariant);
|
|
181
|
-
}
|
|
182
|
-
}, _callee2);
|
|
183
|
-
}));
|
|
184
|
-
return _computeVariantDeltas.apply(this, arguments);
|
|
185
|
-
}
|
|
186
|
-
export function computeHastDeltas(_x4, _x5) {
|
|
187
|
-
return _computeHastDeltas.apply(this, arguments);
|
|
188
|
-
}
|
|
189
|
-
function _computeHastDeltas() {
|
|
190
|
-
_computeHastDeltas = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee4(parsedCode, parseSource) {
|
|
191
|
-
var variantsToTransform, results, enhancedCode, _iterator, _step, _step$value, variant, transformedVariant;
|
|
192
|
-
return _regenerator().w(function (_context4) {
|
|
193
|
-
while (1) switch (_context4.n) {
|
|
194
|
-
case 0:
|
|
195
|
-
variantsToTransform = getVariantsToTransform(parsedCode);
|
|
196
|
-
if (!(variantsToTransform.length === 0)) {
|
|
197
|
-
_context4.n = 1;
|
|
198
|
-
break;
|
|
199
|
-
}
|
|
200
|
-
return _context4.a(2, parsedCode);
|
|
201
|
-
case 1:
|
|
202
|
-
_context4.n = 2;
|
|
203
|
-
return Promise.all(variantsToTransform.map(/*#__PURE__*/function () {
|
|
204
|
-
var _ref7 = _asyncToGenerator(/*#__PURE__*/_regenerator().m(function _callee3(_ref6) {
|
|
205
|
-
var _ref8, variant, variantCode, transformedVariant, _t2;
|
|
206
|
-
return _regenerator().w(function (_context3) {
|
|
207
|
-
while (1) switch (_context3.p = _context3.n) {
|
|
208
|
-
case 0:
|
|
209
|
-
_ref8 = _slicedToArray(_ref6, 2), variant = _ref8[0], variantCode = _ref8[1];
|
|
210
|
-
_context3.p = 1;
|
|
211
|
-
_context3.n = 2;
|
|
212
|
-
return computeVariantDeltas(variant, variantCode, parseSource);
|
|
213
|
-
case 2:
|
|
214
|
-
transformedVariant = _context3.v;
|
|
215
|
-
return _context3.a(2, {
|
|
216
|
-
variant: variant,
|
|
217
|
-
transformedVariant: transformedVariant
|
|
218
|
-
});
|
|
219
|
-
case 3:
|
|
220
|
-
_context3.p = 3;
|
|
221
|
-
_t2 = _context3.v;
|
|
222
|
-
// Keep original variant if transformation fails
|
|
223
|
-
console.error("Failed to transform variant ".concat(variant, ":"), _t2);
|
|
224
|
-
return _context3.a(2, {
|
|
225
|
-
variant: variant,
|
|
226
|
-
transformedVariant: variantCode
|
|
227
|
-
});
|
|
228
|
-
}
|
|
229
|
-
}, _callee3, null, [[1, 3]]);
|
|
230
|
-
}));
|
|
231
|
-
return function (_x7) {
|
|
232
|
-
return _ref7.apply(this, arguments);
|
|
233
|
-
};
|
|
234
|
-
}()));
|
|
235
|
-
case 2:
|
|
236
|
-
results = _context4.v;
|
|
237
|
-
// Apply the transformations to create the enhanced code
|
|
238
|
-
enhancedCode = _extends({}, parsedCode);
|
|
239
|
-
_iterator = _createForOfIteratorHelper(results);
|
|
240
|
-
try {
|
|
241
|
-
for (_iterator.s(); !(_step = _iterator.n()).done;) {
|
|
242
|
-
_step$value = _step.value, variant = _step$value.variant, transformedVariant = _step$value.transformedVariant;
|
|
243
|
-
enhancedCode[variant] = transformedVariant;
|
|
244
|
-
}
|
|
245
|
-
} catch (err) {
|
|
246
|
-
_iterator.e(err);
|
|
247
|
-
} finally {
|
|
248
|
-
_iterator.f();
|
|
249
|
-
}
|
|
250
|
-
return _context4.a(2, enhancedCode);
|
|
251
|
-
}
|
|
252
|
-
}, _callee4);
|
|
141
|
+
export async function computeHastDeltas(parsedCode, parseSource) {
|
|
142
|
+
const variantsToTransform = getVariantsToTransform(parsedCode);
|
|
143
|
+
if (variantsToTransform.length === 0) {
|
|
144
|
+
// No variants need transformation
|
|
145
|
+
return parsedCode;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
// Process transformations for all variants
|
|
149
|
+
const results = await Promise.all(variantsToTransform.map(async ([variant, variantCode]) => {
|
|
150
|
+
try {
|
|
151
|
+
const transformedVariant = await computeVariantDeltas(variant, variantCode, parseSource);
|
|
152
|
+
return {
|
|
153
|
+
variant,
|
|
154
|
+
transformedVariant
|
|
155
|
+
};
|
|
156
|
+
} catch (error) {
|
|
157
|
+
// Keep original variant if transformation fails
|
|
158
|
+
console.error(`Failed to transform variant ${variant}:`, error);
|
|
159
|
+
return {
|
|
160
|
+
variant,
|
|
161
|
+
transformedVariant: variantCode
|
|
162
|
+
};
|
|
163
|
+
}
|
|
253
164
|
}));
|
|
254
|
-
|
|
165
|
+
|
|
166
|
+
// Apply the transformations to create the enhanced code
|
|
167
|
+
const enhancedCode = {
|
|
168
|
+
...parsedCode
|
|
169
|
+
};
|
|
170
|
+
for (const {
|
|
171
|
+
variant,
|
|
172
|
+
transformedVariant
|
|
173
|
+
} of results) {
|
|
174
|
+
enhancedCode[variant] = transformedVariant;
|
|
175
|
+
}
|
|
176
|
+
return enhancedCode;
|
|
255
177
|
}
|