@mintlify/prebuild 1.0.779 → 1.0.780
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/prebuild/update/git/getGitBlame.d.ts +8 -0
- package/dist/prebuild/update/git/getGitBlame.js +62 -0
- package/dist/prebuild/update/index.d.ts +2 -0
- package/dist/prebuild/update/index.js +45 -2
- package/dist/prebuild/update/read/readContent.d.ts +7 -1
- package/dist/prebuild/update/read/readContent.js +9 -3
- package/dist/prebuild/update/write/writeRssFiles.d.ts +9 -0
- package/dist/prebuild/update/write/writeRssFiles.js +53 -13
- package/dist/tsconfig.build.tsbuildinfo +1 -1
- package/package.json +4 -4
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Gets git blame data for a file, returning a map of line numbers to ISO date strings.
|
|
3
|
+
* Uses author-time (when the change was originally created) rather than committer-time.
|
|
4
|
+
* @param filePath - Absolute path to the file
|
|
5
|
+
* @param repoPath - Path to the git repository root
|
|
6
|
+
* @returns Record<number, string> mapping line numbers (1-indexed) to ISO date strings
|
|
7
|
+
*/
|
|
8
|
+
export declare const getGitBlame: (filePath: string, repoPath: string) => Promise<Record<number, string>>;
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { exec } from 'child_process';
|
|
2
|
+
import { promisify } from 'util';
|
|
3
|
+
const execAsync = promisify(exec);
|
|
4
|
+
/**
|
|
5
|
+
* Gets git blame data for a file, returning a map of line numbers to ISO date strings.
|
|
6
|
+
* Uses author-time (when the change was originally created) rather than committer-time.
|
|
7
|
+
* @param filePath - Absolute path to the file
|
|
8
|
+
* @param repoPath - Path to the git repository root
|
|
9
|
+
* @returns Record<number, string> mapping line numbers (1-indexed) to ISO date strings
|
|
10
|
+
*/
|
|
11
|
+
export const getGitBlame = async (filePath, repoPath) => {
|
|
12
|
+
try {
|
|
13
|
+
// Make file path relative to repo root for git blame
|
|
14
|
+
const relativeFilePath = filePath.startsWith(repoPath)
|
|
15
|
+
? filePath.substring(repoPath.length + 1)
|
|
16
|
+
: filePath;
|
|
17
|
+
// Use git blame with porcelain format for easier parsing
|
|
18
|
+
// --line-porcelain gives us detailed info for each line
|
|
19
|
+
const { stdout } = await execAsync(`git blame --line-porcelain "${relativeFilePath}"`, {
|
|
20
|
+
cwd: repoPath,
|
|
21
|
+
maxBuffer: 10 * 1024 * 1024, // 10MB buffer for large files
|
|
22
|
+
});
|
|
23
|
+
const lineBlame = {};
|
|
24
|
+
const lines = stdout.split('\n');
|
|
25
|
+
for (let i = 0; i < lines.length; i++) {
|
|
26
|
+
const line = lines[i];
|
|
27
|
+
if (!line)
|
|
28
|
+
continue;
|
|
29
|
+
// Line starting with commit hash indicates a new blame entry
|
|
30
|
+
// Format: <commit-hash> <original-line> <final-line> <num-lines>
|
|
31
|
+
const commitMatch = line.match(/^[0-9a-f]{40}\s+\d+\s+(\d+)/);
|
|
32
|
+
if (commitMatch && commitMatch[1]) {
|
|
33
|
+
const lineNumber = parseInt(commitMatch[1], 10);
|
|
34
|
+
// Look ahead for 'author-time' field (Unix timestamp)
|
|
35
|
+
let authorTime;
|
|
36
|
+
for (let j = i + 1; j < lines.length && j < i + 15; j++) {
|
|
37
|
+
const nextLine = lines[j];
|
|
38
|
+
if (!nextLine)
|
|
39
|
+
continue;
|
|
40
|
+
if (nextLine.startsWith('author-time ')) {
|
|
41
|
+
const timestamp = parseInt(nextLine.substring('author-time '.length), 10);
|
|
42
|
+
authorTime = new Date(timestamp * 1000).toISOString();
|
|
43
|
+
break;
|
|
44
|
+
}
|
|
45
|
+
// Stop when we hit the content line (starts with tab)
|
|
46
|
+
if (nextLine.startsWith('\t')) {
|
|
47
|
+
break;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
if (authorTime) {
|
|
51
|
+
lineBlame[lineNumber] = authorTime;
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
return lineBlame;
|
|
56
|
+
}
|
|
57
|
+
catch (error) {
|
|
58
|
+
// If git blame fails (e.g., file not in git, no commits), silently return empty blame
|
|
59
|
+
// Server will use current date as fallback
|
|
60
|
+
return {};
|
|
61
|
+
}
|
|
62
|
+
};
|
|
@@ -2279,7 +2279,9 @@ export declare const update: ({ contentDirectoryPath, staticFilenames, openApiFi
|
|
|
2279
2279
|
}>;
|
|
2280
2280
|
export declare const writeMdxFilesWithNoImports: (mdxFilesWithNoImports: {
|
|
2281
2281
|
targetPath: string;
|
|
2282
|
+
sourcePath: string;
|
|
2282
2283
|
tree: Root;
|
|
2284
|
+
metadata: unknown;
|
|
2283
2285
|
}[]) => Promise<void>[];
|
|
2284
2286
|
export * from './mintConfig/index.js';
|
|
2285
2287
|
export * from './docsConfig/index.js';
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { stringifyTree, containsUpdates } from '@mintlify/common';
|
|
2
2
|
import { upgradeToDocsConfig } from '@mintlify/validation';
|
|
3
3
|
import { outputFile } from 'fs-extra';
|
|
4
|
+
import { join } from 'path';
|
|
4
5
|
import { updateDocsConfigFile } from './docsConfig/index.js';
|
|
5
6
|
import { updateMintConfigFile } from './mintConfig/index.js';
|
|
6
7
|
import { readPageContents, readSnippetsV2Contents } from './read/readContent.js';
|
|
@@ -36,7 +37,49 @@ export const update = async ({ contentDirectoryPath, staticFilenames, openApiFil
|
|
|
36
37
|
snippetV2Promises,
|
|
37
38
|
pagePromises,
|
|
38
39
|
]);
|
|
39
|
-
|
|
40
|
+
// Filter pages with rss: true in frontmatter OR pages that contain Update components
|
|
41
|
+
const rssPages = mdxFilesWithNoImports.filter((page) => page.metadata.rss === true || containsUpdates(page.tree));
|
|
42
|
+
// Also include RSS pages that have imports (e.g., changelog entries as separate snippet files)
|
|
43
|
+
// For these, we need to extract the snippets using the importMap and include the component names
|
|
44
|
+
// so they can be properly resolved on the server side
|
|
45
|
+
const rssPagesWithImports = filesWithImports
|
|
46
|
+
.filter((file) => {
|
|
47
|
+
// Check if the file has rss: true metadata
|
|
48
|
+
if (file.metadata?.rss === true) {
|
|
49
|
+
return true;
|
|
50
|
+
}
|
|
51
|
+
// Check if the file itself contains Update components
|
|
52
|
+
if (containsUpdates(file.tree)) {
|
|
53
|
+
return true;
|
|
54
|
+
}
|
|
55
|
+
// Check if any of the imported snippets contain Update components
|
|
56
|
+
const importedSnippets = Object.keys(file.importMap).map((importPath) => snippetV2Contents.find((s) => s.filename === importPath));
|
|
57
|
+
return importedSnippets.some((snippet) => snippet && containsUpdates(snippet.tree));
|
|
58
|
+
})
|
|
59
|
+
.map((file) => {
|
|
60
|
+
// Extract snippets referenced by this page using its importMap
|
|
61
|
+
// Map component names (import specifiers) to snippets for proper injection
|
|
62
|
+
const referencedSnippets = [];
|
|
63
|
+
for (const [importPath, importSpecifiers] of Object.entries(file.importMap)) {
|
|
64
|
+
const snippet = snippetV2Contents.find((s) => s.filename === importPath);
|
|
65
|
+
if (snippet) {
|
|
66
|
+
// Get all the import names (component names) for this snippet
|
|
67
|
+
const importNames = importSpecifiers.map((spec) => spec.name);
|
|
68
|
+
referencedSnippets.push({
|
|
69
|
+
...snippet,
|
|
70
|
+
importNames,
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
return {
|
|
75
|
+
targetPath: join('src', '_props', file.filename),
|
|
76
|
+
sourcePath: join(contentDirectoryPath, file.filename),
|
|
77
|
+
tree: file.tree,
|
|
78
|
+
metadata: file.metadata,
|
|
79
|
+
snippets: referencedSnippets,
|
|
80
|
+
};
|
|
81
|
+
});
|
|
82
|
+
const allRssPages = [...rssPages, ...rssPagesWithImports];
|
|
40
83
|
const writeDevGroupsPromise = groups && groups.length > 0
|
|
41
84
|
? outputFile('src/_props/dev-groups.json', JSON.stringify({ groups }, null, 2), { flag: 'w' })
|
|
42
85
|
: Promise.resolve();
|
|
@@ -59,7 +102,7 @@ export const update = async ({ contentDirectoryPath, staticFilenames, openApiFil
|
|
|
59
102
|
writeFile(customLanguages, 'src/_props/customLanguages.json'),
|
|
60
103
|
writeOpenApiData(newOpenApiFiles),
|
|
61
104
|
writeAsyncApiFiles(newAsyncApiFiles),
|
|
62
|
-
writeRssFiles(newDocsConfig,
|
|
105
|
+
writeRssFiles(newDocsConfig, allRssPages, contentDirectoryPath, snippetV2Contents),
|
|
63
106
|
updateFavicons(newDocsConfig, contentDirectoryPath),
|
|
64
107
|
...writeMdxFilesWithNoImports(mdxFilesWithNoImports),
|
|
65
108
|
...writeFiles(contentDirectoryPath, 'public', [...staticFilenames, ...snippets]),
|
|
@@ -2,6 +2,10 @@ import type { FileWithImports } from '@mintlify/common';
|
|
|
2
2
|
import { AsyncAPIFile } from '@mintlify/common';
|
|
3
3
|
import type { OpenApiFile, DecoratedNavigationPage } from '@mintlify/models';
|
|
4
4
|
import type { Root } from 'mdast';
|
|
5
|
+
export type FileWithImportsAndMetadata = FileWithImports & {
|
|
6
|
+
metadata?: Record<string, unknown>;
|
|
7
|
+
contentDirectoryPath?: string;
|
|
8
|
+
};
|
|
5
9
|
type ReadPageContentsArgs = {
|
|
6
10
|
contentDirectoryPath: string;
|
|
7
11
|
openApiFiles: OpenApiFile[];
|
|
@@ -12,10 +16,12 @@ type ReadPageContentsArgs = {
|
|
|
12
16
|
export declare const readPageContents: ({ contentDirectoryPath, openApiFiles, asyncApiFiles, contentFilenames, pagesAcc, }: ReadPageContentsArgs) => Promise<{
|
|
13
17
|
mdxFilesWithNoImports: {
|
|
14
18
|
targetPath: string;
|
|
19
|
+
sourcePath: string;
|
|
15
20
|
tree: Root;
|
|
21
|
+
metadata: DecoratedNavigationPage;
|
|
16
22
|
}[];
|
|
17
23
|
pagesAcc: Record<string, DecoratedNavigationPage>;
|
|
18
|
-
filesWithImports:
|
|
24
|
+
filesWithImports: FileWithImportsAndMetadata[];
|
|
19
25
|
}>;
|
|
20
26
|
export declare const readSnippetsV2Contents: (contentDirectoryPath: string, snippetV2Filenames: string[]) => Promise<{
|
|
21
27
|
filename: string;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { hasImports, findAndRemoveImports, getDecoratedNavPageAndSlug } from '@mintlify/common';
|
|
1
|
+
import { hasImports, findAndRemoveImports, getDecoratedNavPageAndSlug, parseFrontmatter, } from '@mintlify/common';
|
|
2
2
|
import { promises as _promises } from 'fs';
|
|
3
3
|
import { join } from 'path';
|
|
4
4
|
import { preparseMdxTree } from '../../../createPage/preparseMdx/index.js';
|
|
@@ -10,19 +10,25 @@ export const readPageContents = async ({ contentDirectoryPath, openApiFiles, asy
|
|
|
10
10
|
const targetPath = join('src', '_props', filename);
|
|
11
11
|
try {
|
|
12
12
|
const contentStr = (await readFile(sourcePath)).toString();
|
|
13
|
-
// if is snippet add to static file array
|
|
14
13
|
const tree = await preparseMdxTree(contentStr, contentDirectoryPath, sourcePath);
|
|
15
14
|
const importsResponse = await findAndRemoveImports(tree);
|
|
16
15
|
if (hasImports(importsResponse)) {
|
|
16
|
+
let metadata;
|
|
17
|
+
try {
|
|
18
|
+
metadata = parseFrontmatter(contentStr).attributes;
|
|
19
|
+
}
|
|
20
|
+
catch (error) { }
|
|
17
21
|
filesWithImports.push({
|
|
18
22
|
...importsResponse,
|
|
19
23
|
filename,
|
|
24
|
+
metadata,
|
|
25
|
+
contentDirectoryPath,
|
|
20
26
|
});
|
|
21
27
|
return;
|
|
22
28
|
}
|
|
23
29
|
const { slug, pageMetadata } = getDecoratedNavPageAndSlug(filename, contentStr, openApiFiles, asyncApiFiles);
|
|
24
30
|
pagesAcc[slug] = pageMetadata;
|
|
25
|
-
return { targetPath, tree };
|
|
31
|
+
return { targetPath, sourcePath, tree, metadata: pageMetadata };
|
|
26
32
|
}
|
|
27
33
|
catch (error) {
|
|
28
34
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred reading and parsing file.';
|
|
@@ -2,5 +2,14 @@ import { DocsConfig } from '@mintlify/validation';
|
|
|
2
2
|
import type { Root } from 'mdast';
|
|
3
3
|
export declare const writeRssFiles: (docsConfig: DocsConfig, rssPages: {
|
|
4
4
|
targetPath: string;
|
|
5
|
+
sourcePath: string;
|
|
6
|
+
tree: Root;
|
|
7
|
+
snippets?: {
|
|
8
|
+
filename: string;
|
|
9
|
+
tree: Root;
|
|
10
|
+
importNames?: string[];
|
|
11
|
+
}[];
|
|
12
|
+
}[], contentDirectoryPath: string, allSnippets?: {
|
|
13
|
+
filename: string;
|
|
5
14
|
tree: Root;
|
|
6
15
|
}[]) => Promise<void>;
|
|
@@ -1,33 +1,73 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { stringifyTree, findAndRemoveImports } from '@mintlify/common';
|
|
2
2
|
import fse from 'fs-extra';
|
|
3
|
+
import { getGitBlame } from '../git/getGitBlame.js';
|
|
3
4
|
import { docsConfigToRss } from '../rss/docsConfigToRss.js';
|
|
4
5
|
import { pageToRss } from '../rss/pageToRss.js';
|
|
5
6
|
import { pathToRss } from '../rss/pathToRss.js';
|
|
6
|
-
|
|
7
|
+
const extractSnippets = async (tree, allSnippets) => {
|
|
8
|
+
if (!allSnippets || allSnippets.length === 0) {
|
|
9
|
+
return [];
|
|
10
|
+
}
|
|
11
|
+
const clonedTree = structuredClone(tree);
|
|
12
|
+
const { importMap } = await findAndRemoveImports(clonedTree);
|
|
13
|
+
if (Object.keys(importMap).length === 0) {
|
|
14
|
+
return [];
|
|
15
|
+
}
|
|
16
|
+
const referencedSnippets = [];
|
|
17
|
+
for (const importPath of Object.keys(importMap)) {
|
|
18
|
+
const normalizedImportPath = importPath.startsWith('/') ? importPath.slice(1) : importPath;
|
|
19
|
+
const snippet = allSnippets.find((s) => s.filename === normalizedImportPath);
|
|
20
|
+
if (snippet) {
|
|
21
|
+
referencedSnippets.push({
|
|
22
|
+
path: snippet.filename,
|
|
23
|
+
content: stringifyTree(snippet.tree),
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
return referencedSnippets;
|
|
28
|
+
};
|
|
29
|
+
export const writeRssFiles = async (docsConfig, rssPages, contentDirectoryPath, allSnippets) => {
|
|
7
30
|
const { orgName, orgDescription } = docsConfigToRss(docsConfig);
|
|
8
31
|
const rssTargetPath = 'src/_props/rssFiles.json';
|
|
9
|
-
const
|
|
10
|
-
const { targetPath, tree } = page;
|
|
32
|
+
const rssItemsPromises = rssPages.map(async (page) => {
|
|
33
|
+
const { targetPath, sourcePath, tree, snippets: preExtractedSnippets } = page;
|
|
11
34
|
const { title, description } = pageToRss(tree);
|
|
12
35
|
const rssPath = pathToRss(targetPath);
|
|
13
|
-
const
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
36
|
+
const content = stringifyTree(tree);
|
|
37
|
+
let lineBlame = {};
|
|
38
|
+
try {
|
|
39
|
+
lineBlame = await getGitBlame(sourcePath, contentDirectoryPath);
|
|
40
|
+
}
|
|
41
|
+
catch (error) { }
|
|
42
|
+
let snippets;
|
|
43
|
+
if (preExtractedSnippets && preExtractedSnippets.length > 0) {
|
|
44
|
+
snippets = preExtractedSnippets.map((s) => ({
|
|
45
|
+
path: s.filename,
|
|
46
|
+
content: stringifyTree(s.tree),
|
|
47
|
+
importNames: s.importNames,
|
|
48
|
+
}));
|
|
49
|
+
}
|
|
50
|
+
else {
|
|
51
|
+
snippets = await extractSnippets(tree, allSnippets);
|
|
18
52
|
}
|
|
19
|
-
const
|
|
20
|
-
|
|
53
|
+
const filePath = sourcePath.substring(contentDirectoryPath.length + 1);
|
|
54
|
+
const rssFileV4 = {
|
|
55
|
+
version: 'v4',
|
|
21
56
|
rssPath,
|
|
22
57
|
orgName,
|
|
23
58
|
orgDescription,
|
|
24
59
|
title,
|
|
25
60
|
description,
|
|
26
|
-
|
|
61
|
+
filePath,
|
|
62
|
+
content,
|
|
63
|
+
lineBlame,
|
|
64
|
+
snippets: snippets.length > 0 ? snippets : undefined,
|
|
27
65
|
};
|
|
66
|
+
return rssFileV4;
|
|
28
67
|
});
|
|
68
|
+
const rssItemsToSave = await Promise.all(rssItemsPromises);
|
|
29
69
|
await fse.remove(rssTargetPath);
|
|
30
|
-
await fse.outputFile(rssTargetPath, JSON.stringify(rssItemsToSave), {
|
|
70
|
+
await fse.outputFile(rssTargetPath, JSON.stringify(rssItemsToSave, null, 2), {
|
|
31
71
|
flag: 'w',
|
|
32
72
|
});
|
|
33
73
|
};
|