@mintlify/previewing 4.0.826 → 4.0.828
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/local-preview/listener/generateDependentSnippets.d.ts +1 -1
- package/dist/local-preview/listener/generateDependentSnippets.js +7 -7
- package/dist/local-preview/listener/generatePagesWithImports.d.ts +1 -1
- package/dist/local-preview/listener/generatePagesWithImports.js +5 -6
- package/dist/local-preview/listener/getSnippets.d.ts +2 -2
- package/dist/local-preview/listener/getSnippets.js +5 -5
- package/dist/local-preview/listener/index.js +7 -24
- package/dist/local-preview/listener/resolveAllImports.d.ts +1 -1
- package/dist/local-preview/listener/resolveAllImports.js +2 -3
- package/dist/local-preview/listener/utils.d.ts +0 -1
- package/dist/local-preview/listener/utils.js +0 -14
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/package.json +6 -6
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
import type { FindAndRemoveImportsResponse } from '@mintlify/common';
|
|
2
|
-
export declare const generateDependentSnippets: (changedFilename: string, newImportData: FindAndRemoveImportsResponse
|
|
2
|
+
export declare const generateDependentSnippets: (changedFilename: string, newImportData: FindAndRemoveImportsResponse) => Promise<string[]>;
|
|
@@ -4,7 +4,7 @@ import { join } from 'path';
|
|
|
4
4
|
import { NEXT_PUBLIC_PATH } from '../../constants.js';
|
|
5
5
|
import { getOriginalSnippets } from './getSnippets.js';
|
|
6
6
|
import { resolveAllImports } from './resolveAllImports.js';
|
|
7
|
-
const findAllDependents = async (initialFileWithSlash, allSnippets, processedDataCache
|
|
7
|
+
const findAllDependents = async (initialFileWithSlash, allSnippets, processedDataCache) => {
|
|
8
8
|
const affected = new Set([initialFileWithSlash]);
|
|
9
9
|
const queue = [initialFileWithSlash];
|
|
10
10
|
while (queue.length > 0) {
|
|
@@ -20,7 +20,7 @@ const findAllDependents = async (initialFileWithSlash, allSnippets, processedDat
|
|
|
20
20
|
processedDataCache.set(potentialDependentFile, processedData);
|
|
21
21
|
}
|
|
22
22
|
const importsCurrentFile = Object.keys(processedData.importMap).some((importPath) => {
|
|
23
|
-
const resolvedPath = resolveSnippetImportPath(importPath, potentialDependentFile
|
|
23
|
+
const resolvedPath = resolveSnippetImportPath(importPath, potentialDependentFile);
|
|
24
24
|
return resolvedPath === currentSourceFile;
|
|
25
25
|
});
|
|
26
26
|
if (importsCurrentFile) {
|
|
@@ -33,11 +33,11 @@ const findAllDependents = async (initialFileWithSlash, allSnippets, processedDat
|
|
|
33
33
|
}
|
|
34
34
|
return affected;
|
|
35
35
|
};
|
|
36
|
-
export const generateDependentSnippets = async (changedFilename, newImportData
|
|
36
|
+
export const generateDependentSnippets = async (changedFilename, newImportData) => {
|
|
37
37
|
const processedDataCache = new Map();
|
|
38
|
-
const allOriginalSnippets = await getOriginalSnippets(
|
|
38
|
+
const allOriginalSnippets = await getOriginalSnippets();
|
|
39
39
|
const updatedSnippetFileKey = optionallyAddLeadingSlash(changedFilename);
|
|
40
|
-
const affectedSnippets = await findAllDependents(updatedSnippetFileKey, allOriginalSnippets, processedDataCache
|
|
40
|
+
const affectedSnippets = await findAllDependents(updatedSnippetFileKey, allOriginalSnippets, processedDataCache);
|
|
41
41
|
const snippetPromises = Array.from(affectedSnippets).map(async (filename) => {
|
|
42
42
|
const cachedData = processedDataCache.get(filename);
|
|
43
43
|
if (cachedData)
|
|
@@ -66,7 +66,7 @@ export const generateDependentSnippets = async (changedFilename, newImportData,
|
|
|
66
66
|
const graph = {};
|
|
67
67
|
snippets.forEach((item) => {
|
|
68
68
|
graph[item.filename] = Object.keys(item.importMap)
|
|
69
|
-
.map((dep) => resolveSnippetImportPath(dep, item.filename
|
|
69
|
+
.map((dep) => resolveSnippetImportPath(dep, item.filename))
|
|
70
70
|
.filter((resolvedDep) => resolvedDep != null);
|
|
71
71
|
});
|
|
72
72
|
const sortedSnippets = topologicalSort(graph).reverse();
|
|
@@ -78,7 +78,7 @@ export const generateDependentSnippets = async (changedFilename, newImportData,
|
|
|
78
78
|
for (const currentSnippet of orderedSnippets) {
|
|
79
79
|
let processedTree = currentSnippet.tree;
|
|
80
80
|
if (currentSnippet.filename !== updatedSnippetFileKey && hasImports(currentSnippet)) {
|
|
81
|
-
processedTree = await resolveAllImports(currentSnippet
|
|
81
|
+
processedTree = await resolveAllImports(currentSnippet);
|
|
82
82
|
}
|
|
83
83
|
const targetFilename = optionallyRemoveLeadingSlash(currentSnippet.filename);
|
|
84
84
|
const targetPath = join(NEXT_PUBLIC_PATH, targetFilename);
|
|
@@ -1 +1 @@
|
|
|
1
|
-
export declare const generatePagesWithImports: (updatedSnippets: Set<string
|
|
1
|
+
export declare const generatePagesWithImports: (updatedSnippets: Set<string>) => Promise<void>;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { findAndRemoveImports, optionallyRemoveLeadingSlash, resolveAllImports, resolveSnippetImportPath, stringifyTree, SNIPPET_EXTENSIONS,
|
|
1
|
+
import { findAndRemoveImports, optionallyRemoveLeadingSlash, resolveAllImports, resolveSnippetImportPath, stringifyTree, SNIPPET_EXTENSIONS, } from '@mintlify/common';
|
|
2
2
|
import { preparseMdxTree, getFileListSync } from '@mintlify/prebuild';
|
|
3
3
|
import { promises as _promises } from 'fs';
|
|
4
4
|
import { outputFile } from 'fs-extra';
|
|
@@ -6,11 +6,11 @@ import { join } from 'path';
|
|
|
6
6
|
import { CMD_EXEC_PATH, NEXT_PROPS_PATH } from '../../constants.js';
|
|
7
7
|
import { getProcessedSnippets } from './getSnippets.js';
|
|
8
8
|
const { readFile } = _promises;
|
|
9
|
-
export const generatePagesWithImports = async (updatedSnippets
|
|
10
|
-
const snippets = await getProcessedSnippets(
|
|
9
|
+
export const generatePagesWithImports = async (updatedSnippets) => {
|
|
10
|
+
const snippets = await getProcessedSnippets();
|
|
11
11
|
const pageFilenames = getFileListSync(CMD_EXEC_PATH).filter((file) => SNIPPET_EXTENSIONS.some((ext) => file.endsWith(ext)) &&
|
|
12
12
|
!file.startsWith('_snippets/') &&
|
|
13
|
-
!
|
|
13
|
+
!file.startsWith('snippets/'));
|
|
14
14
|
await Promise.all(pageFilenames.map(async (pageFilename) => {
|
|
15
15
|
const sourcePath = join(CMD_EXEC_PATH, pageFilename);
|
|
16
16
|
const contentStr = (await readFile(sourcePath)).toString();
|
|
@@ -18,14 +18,13 @@ export const generatePagesWithImports = async (updatedSnippets, snippetPatterns)
|
|
|
18
18
|
const tree = await preparseMdxTree(contentStr, CMD_EXEC_PATH, sourcePath);
|
|
19
19
|
const importsResponse = await findAndRemoveImports(tree);
|
|
20
20
|
if (Object.keys(importsResponse.importMap).some((importPath) => {
|
|
21
|
-
const resolvedPath = resolveSnippetImportPath(importPath, pageFilename
|
|
21
|
+
const resolvedPath = resolveSnippetImportPath(importPath, pageFilename);
|
|
22
22
|
return (resolvedPath != null &&
|
|
23
23
|
updatedSnippets.has(optionallyRemoveLeadingSlash(resolvedPath)));
|
|
24
24
|
})) {
|
|
25
25
|
const content = await resolveAllImports({
|
|
26
26
|
snippets,
|
|
27
27
|
fileWithImports: { ...importsResponse, filename: pageFilename },
|
|
28
|
-
snippetPatterns,
|
|
29
28
|
});
|
|
30
29
|
const targetPath = join(NEXT_PROPS_PATH, pageFilename);
|
|
31
30
|
await outputFile(targetPath, stringifyTree(content), {
|
|
@@ -1,3 +1,3 @@
|
|
|
1
1
|
import { type FileType } from '@mintlify/common';
|
|
2
|
-
export declare const getProcessedSnippets: (
|
|
3
|
-
export declare const getOriginalSnippets: (
|
|
2
|
+
export declare const getProcessedSnippets: () => Promise<FileType[]>;
|
|
3
|
+
export declare const getOriginalSnippets: () => Promise<FileType[]>;
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
import { optionallyAddLeadingSlash, SNIPPET_EXTENSIONS
|
|
1
|
+
import { optionallyAddLeadingSlash, SNIPPET_EXTENSIONS } from '@mintlify/common';
|
|
2
2
|
import { getFileListSync, preparseMdxTree } from '@mintlify/prebuild';
|
|
3
3
|
import { promises as _promises } from 'fs';
|
|
4
4
|
import { join } from 'path';
|
|
5
5
|
import { CMD_EXEC_PATH, NEXT_PUBLIC_PATH } from '../../constants.js';
|
|
6
6
|
const { readFile } = _promises;
|
|
7
|
-
const getSnippetBase = async (BASE_DIR
|
|
8
|
-
const snippetFilenames = getFileListSync(BASE_DIR).filter((file) => SNIPPET_EXTENSIONS.some((ext) => file.endsWith(ext)) &&
|
|
7
|
+
const getSnippetBase = async (BASE_DIR) => {
|
|
8
|
+
const snippetFilenames = getFileListSync(BASE_DIR).filter((file) => SNIPPET_EXTENSIONS.some((ext) => file.endsWith(ext)) && file.startsWith('snippets/'));
|
|
9
9
|
const promises = snippetFilenames.map(async (snippetFilename) => {
|
|
10
10
|
try {
|
|
11
11
|
const tree = await preparseMdxTree((await readFile(join(BASE_DIR, snippetFilename))).toString(), BASE_DIR, join(BASE_DIR, snippetFilename));
|
|
@@ -21,5 +21,5 @@ const getSnippetBase = async (BASE_DIR, snippetPatterns) => {
|
|
|
21
21
|
});
|
|
22
22
|
return (await Promise.all(promises)).filter(Boolean);
|
|
23
23
|
};
|
|
24
|
-
export const getProcessedSnippets = (
|
|
25
|
-
export const getOriginalSnippets = (
|
|
24
|
+
export const getProcessedSnippets = () => getSnippetBase(NEXT_PUBLIC_PATH);
|
|
25
|
+
export const getOriginalSnippets = () => getSnippetBase(CMD_EXEC_PATH);
|
|
@@ -16,13 +16,11 @@ import { generatePagesWithImports } from './generatePagesWithImports.js';
|
|
|
16
16
|
import { getDocsState } from './getDocsState.js';
|
|
17
17
|
import { resolveAllImports } from './resolveAllImports.js';
|
|
18
18
|
import { updateCustomLanguages, updateGeneratedNav, updateOpenApiFiles, upsertOpenApiFile, } from './update.js';
|
|
19
|
-
import {
|
|
19
|
+
import { getMintIgnoreGlobs, isFileSizeValid, isJsonValid, shouldRegenerateNavForPage, } from './utils.js';
|
|
20
20
|
const { readFile } = _promises;
|
|
21
21
|
const frontmatterHashes = new Map();
|
|
22
|
-
let previousSnippetPatternsJson = null;
|
|
23
22
|
const listener = (callback, options = {}) => {
|
|
24
23
|
const mintIgnoreGlobs = getMintIgnoreGlobs();
|
|
25
|
-
previousSnippetPatternsJson = JSON.stringify(getSnippetPatterns() ?? []);
|
|
26
24
|
chokidar
|
|
27
25
|
.watch(CMD_EXEC_PATH, {
|
|
28
26
|
ignoreInitial: true,
|
|
@@ -74,8 +72,7 @@ const onUnlinkEvent = async (filename, options) => {
|
|
|
74
72
|
return;
|
|
75
73
|
}
|
|
76
74
|
try {
|
|
77
|
-
const
|
|
78
|
-
const potentialCategory = getFileCategory(filename, snippetPatterns);
|
|
75
|
+
const potentialCategory = getFileCategory(filename);
|
|
79
76
|
const targetPath = getTargetPath(potentialCategory, filename);
|
|
80
77
|
if (potentialCategory === 'page' ||
|
|
81
78
|
potentialCategory === 'snippet' ||
|
|
@@ -162,8 +159,7 @@ const validateConfigFiles = async () => {
|
|
|
162
159
|
*/
|
|
163
160
|
const onUpdateEvent = async (filename, callback, options = {}) => {
|
|
164
161
|
const filePath = pathUtil.join(CMD_EXEC_PATH, filename);
|
|
165
|
-
const
|
|
166
|
-
const potentialCategory = getFileCategory(filename, snippetPatterns);
|
|
162
|
+
const potentialCategory = getFileCategory(filename);
|
|
167
163
|
const targetPath = getTargetPath(potentialCategory, filename);
|
|
168
164
|
let regenerateNav = false;
|
|
169
165
|
let category = potentialCategory === 'potentialYamlOpenApiSpec' ||
|
|
@@ -177,7 +173,7 @@ const onUpdateEvent = async (filename, callback, options = {}) => {
|
|
|
177
173
|
const tree = await preparseMdxTree(contentStr, CMD_EXEC_PATH, filePath);
|
|
178
174
|
const importsResponse = await findAndRemoveImports(tree);
|
|
179
175
|
if (hasImports(importsResponse)) {
|
|
180
|
-
contentStr = stringifyTree(await resolveAllImports({ ...importsResponse, filename }
|
|
176
|
+
contentStr = stringifyTree(await resolveAllImports({ ...importsResponse, filename }));
|
|
181
177
|
}
|
|
182
178
|
// set suppressErrLog true here to avoid double logging errors already logged in preparseMdxTree
|
|
183
179
|
const { pageContent } = await createPage(filename, contentStr, CMD_EXEC_PATH, [], [], true);
|
|
@@ -195,13 +191,13 @@ const onUpdateEvent = async (filename, callback, options = {}) => {
|
|
|
195
191
|
const tree = await preparseMdxTree(contentStr, CMD_EXEC_PATH, filePath);
|
|
196
192
|
const importsResponse = await findAndRemoveImports(tree);
|
|
197
193
|
if (hasImports(importsResponse)) {
|
|
198
|
-
contentStr = stringifyTree(await resolveAllImports({ ...importsResponse, filename }
|
|
194
|
+
contentStr = stringifyTree(await resolveAllImports({ ...importsResponse, filename }));
|
|
199
195
|
}
|
|
200
196
|
await fse.outputFile(targetPath, contentStr, {
|
|
201
197
|
flag: 'w',
|
|
202
198
|
});
|
|
203
|
-
const updatedSnippets = await generateDependentSnippets(filename, importsResponse
|
|
204
|
-
await generatePagesWithImports(new Set(updatedSnippets)
|
|
199
|
+
const updatedSnippets = await generateDependentSnippets(filename, importsResponse);
|
|
200
|
+
await generatePagesWithImports(new Set(updatedSnippets));
|
|
205
201
|
break;
|
|
206
202
|
}
|
|
207
203
|
case 'mintConfig':
|
|
@@ -211,19 +207,6 @@ const onUpdateEvent = async (filename, callback, options = {}) => {
|
|
|
211
207
|
addChangeLog(_jsx(ErrorLog, { message: `Syntax error in ${filename}: ${error}` }));
|
|
212
208
|
return null;
|
|
213
209
|
}
|
|
214
|
-
if (potentialCategory === 'docsConfig') {
|
|
215
|
-
const freshSnippetPatterns = getSnippetPatterns();
|
|
216
|
-
const currentJson = JSON.stringify(freshSnippetPatterns ?? []);
|
|
217
|
-
if (previousSnippetPatternsJson !== null && currentJson !== previousSnippetPatternsJson) {
|
|
218
|
-
previousSnippetPatternsJson = currentJson;
|
|
219
|
-
addChangeLog(_jsx(InfoLog, { message: "Snippet patterns changed. Rebuilding..." }));
|
|
220
|
-
await fse.emptyDir(NEXT_PUBLIC_PATH);
|
|
221
|
-
await fse.emptyDir(NEXT_PROPS_PATH);
|
|
222
|
-
await prebuild(CMD_EXEC_PATH, options);
|
|
223
|
-
break;
|
|
224
|
-
}
|
|
225
|
-
previousSnippetPatternsJson = currentJson;
|
|
226
|
-
}
|
|
227
210
|
regenerateNav = true;
|
|
228
211
|
try {
|
|
229
212
|
const { mintConfig, openApiFiles, docsConfig } = await getDocsState();
|
|
@@ -1,3 +1,3 @@
|
|
|
1
1
|
import type { FileWithImports } from '@mintlify/common';
|
|
2
2
|
import type { Root } from 'mdast';
|
|
3
|
-
export declare const resolveAllImports: (fileWithImports: FileWithImports
|
|
3
|
+
export declare const resolveAllImports: (fileWithImports: FileWithImports) => Promise<Root>;
|
|
@@ -1,10 +1,9 @@
|
|
|
1
1
|
import { resolveAllImports as baseResolveAllImports } from '@mintlify/common';
|
|
2
2
|
import { getProcessedSnippets } from './getSnippets.js';
|
|
3
|
-
export const resolveAllImports = async (fileWithImports
|
|
4
|
-
const snippets = await getProcessedSnippets(
|
|
3
|
+
export const resolveAllImports = async (fileWithImports) => {
|
|
4
|
+
const snippets = await getProcessedSnippets();
|
|
5
5
|
return await baseResolveAllImports({
|
|
6
6
|
snippets,
|
|
7
7
|
fileWithImports,
|
|
8
|
-
snippetPatterns,
|
|
9
8
|
});
|
|
10
9
|
};
|
|
@@ -3,7 +3,6 @@ export declare const isFileSizeValid: (path: string, maxFileSizeInMb: number) =>
|
|
|
3
3
|
export declare function isError(obj: unknown): boolean;
|
|
4
4
|
export declare const readJsonFile: (path: string) => Promise<any>;
|
|
5
5
|
export declare const getMintIgnoreGlobs: () => string[];
|
|
6
|
-
export declare const getSnippetPatterns: () => string[] | undefined;
|
|
7
6
|
export declare const isJsonValid: (filePath: string) => {
|
|
8
7
|
valid: boolean;
|
|
9
8
|
error?: string;
|
|
@@ -28,20 +28,6 @@ export const getMintIgnoreGlobs = () => {
|
|
|
28
28
|
}
|
|
29
29
|
return [];
|
|
30
30
|
};
|
|
31
|
-
export const getSnippetPatterns = () => {
|
|
32
|
-
const docsConfigPath = pathUtil.join(CMD_EXEC_PATH, 'docs.json');
|
|
33
|
-
if (existsSync(docsConfigPath)) {
|
|
34
|
-
try {
|
|
35
|
-
const configContent = readFileSync(docsConfigPath, 'utf-8');
|
|
36
|
-
const config = JSON.parse(configContent);
|
|
37
|
-
return config.snippets;
|
|
38
|
-
}
|
|
39
|
-
catch {
|
|
40
|
-
return undefined;
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
return undefined;
|
|
44
|
-
};
|
|
45
31
|
export const isJsonValid = (filePath) => {
|
|
46
32
|
try {
|
|
47
33
|
const content = readFileSync(filePath, 'utf-8');
|