mintlify 2.1.0 → 3.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (165) hide show
  1. package/CONTRIBUTING.md +2 -16
  2. package/index.js +3 -0
  3. package/package.json +23 -46
  4. package/bin/browser.js +0 -24
  5. package/bin/browser.js.map +0 -1
  6. package/bin/constants.js +0 -32
  7. package/bin/constants.js.map +0 -1
  8. package/bin/downloadImage.js +0 -85
  9. package/bin/downloadImage.js.map +0 -1
  10. package/bin/index.js +0 -49
  11. package/bin/index.js.map +0 -1
  12. package/bin/local-preview/helper-commands/installDepsCommand.js +0 -12
  13. package/bin/local-preview/helper-commands/installDepsCommand.js.map +0 -1
  14. package/bin/local-preview/index.js +0 -154
  15. package/bin/local-preview/index.js.map +0 -1
  16. package/bin/local-preview/listener/categorize.js +0 -95
  17. package/bin/local-preview/listener/categorize.js.map +0 -1
  18. package/bin/local-preview/listener/categorizeFiles.js +0 -47
  19. package/bin/local-preview/listener/categorizeFiles.js.map +0 -1
  20. package/bin/local-preview/listener/generate.js +0 -89
  21. package/bin/local-preview/listener/generate.js.map +0 -1
  22. package/bin/local-preview/listener/index.js +0 -200
  23. package/bin/local-preview/listener/index.js.map +0 -1
  24. package/bin/local-preview/listener/update.js +0 -24
  25. package/bin/local-preview/listener/update.js.map +0 -1
  26. package/bin/local-preview/listener/utils/createPage.js +0 -167
  27. package/bin/local-preview/listener/utils/createPage.js.map +0 -1
  28. package/bin/local-preview/listener/utils/fileIsMdxOrMd.js +0 -12
  29. package/bin/local-preview/listener/utils/fileIsMdxOrMd.js.map +0 -1
  30. package/bin/local-preview/listener/utils/getOpenApiContext.js +0 -57
  31. package/bin/local-preview/listener/utils/getOpenApiContext.js.map +0 -1
  32. package/bin/local-preview/listener/utils/mintConfigFile.js +0 -22
  33. package/bin/local-preview/listener/utils/mintConfigFile.js.map +0 -1
  34. package/bin/local-preview/listener/utils/toTitleCase.js +0 -36
  35. package/bin/local-preview/listener/utils/toTitleCase.js.map +0 -1
  36. package/bin/local-preview/listener/utils/types.js +0 -2
  37. package/bin/local-preview/listener/utils/types.js.map +0 -1
  38. package/bin/local-preview/listener/utils.js +0 -67
  39. package/bin/local-preview/listener/utils.js.map +0 -1
  40. package/bin/local-preview/utils/categorizeFiles.js +0 -63
  41. package/bin/local-preview/utils/categorizeFiles.js.map +0 -1
  42. package/bin/local-preview/utils/getOpenApiContext.js +0 -58
  43. package/bin/local-preview/utils/getOpenApiContext.js.map +0 -1
  44. package/bin/local-preview/utils/injectFavicons.js +0 -72
  45. package/bin/local-preview/utils/injectFavicons.js.map +0 -1
  46. package/bin/local-preview/utils/listener.js +0 -116
  47. package/bin/local-preview/utils/listener.js.map +0 -1
  48. package/bin/local-preview/utils/metadata.js +0 -118
  49. package/bin/local-preview/utils/metadata.js.map +0 -1
  50. package/bin/local-preview/utils/mintConfigFile.js +0 -43
  51. package/bin/local-preview/utils/mintConfigFile.js.map +0 -1
  52. package/bin/local-preview/utils/openApiCheck.js +0 -15
  53. package/bin/local-preview/utils/openApiCheck.js.map +0 -1
  54. package/bin/local-preview/utils/slugToTitle.js +0 -8
  55. package/bin/local-preview/utils/slugToTitle.js.map +0 -1
  56. package/bin/navigation.js +0 -4
  57. package/bin/navigation.js.map +0 -1
  58. package/bin/pageTemplate.js +0 -30
  59. package/bin/pageTemplate.js.map +0 -1
  60. package/bin/scraping/combineNavWithEmptyGroupTitles.js +0 -20
  61. package/bin/scraping/combineNavWithEmptyGroupTitles.js.map +0 -1
  62. package/bin/scraping/detectFramework.js +0 -39
  63. package/bin/scraping/detectFramework.js.map +0 -1
  64. package/bin/scraping/downloadAllImages.js +0 -33
  65. package/bin/scraping/downloadAllImages.js.map +0 -1
  66. package/bin/scraping/downloadLogoImage.js +0 -13
  67. package/bin/scraping/downloadLogoImage.js.map +0 -1
  68. package/bin/scraping/getSitemapLinks.js +0 -18
  69. package/bin/scraping/getSitemapLinks.js.map +0 -1
  70. package/bin/scraping/replaceImagePaths.js +0 -17
  71. package/bin/scraping/replaceImagePaths.js.map +0 -1
  72. package/bin/scraping/scrapeFileGettingFileNameFromUrl.js +0 -43
  73. package/bin/scraping/scrapeFileGettingFileNameFromUrl.js.map +0 -1
  74. package/bin/scraping/scrapeGettingFileNameFromUrl.js +0 -13
  75. package/bin/scraping/scrapeGettingFileNameFromUrl.js.map +0 -1
  76. package/bin/scraping/scrapePage.js +0 -10
  77. package/bin/scraping/scrapePage.js.map +0 -1
  78. package/bin/scraping/scrapePageCommands.js +0 -55
  79. package/bin/scraping/scrapePageCommands.js.map +0 -1
  80. package/bin/scraping/scrapeSection.js +0 -12
  81. package/bin/scraping/scrapeSection.js.map +0 -1
  82. package/bin/scraping/scrapeSectionCommands.js +0 -66
  83. package/bin/scraping/scrapeSectionCommands.js.map +0 -1
  84. package/bin/scraping/site-scrapers/Intercom/scrapeIntercomPage.js +0 -27
  85. package/bin/scraping/site-scrapers/Intercom/scrapeIntercomPage.js.map +0 -1
  86. package/bin/scraping/site-scrapers/Intercom/scrapeIntercomSection.js +0 -32
  87. package/bin/scraping/site-scrapers/Intercom/scrapeIntercomSection.js.map +0 -1
  88. package/bin/scraping/site-scrapers/alternateGroupTitle.js +0 -9
  89. package/bin/scraping/site-scrapers/alternateGroupTitle.js.map +0 -1
  90. package/bin/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.js +0 -34
  91. package/bin/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.js.map +0 -1
  92. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursively.js +0 -38
  93. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursively.js.map +0 -1
  94. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursivelyGitBook.js +0 -38
  95. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursivelyGitBook.js.map +0 -1
  96. package/bin/scraping/site-scrapers/openNestedDocusaurusMenus.js +0 -30
  97. package/bin/scraping/site-scrapers/openNestedDocusaurusMenus.js.map +0 -1
  98. package/bin/scraping/site-scrapers/openNestedGitbookMenus.js +0 -17
  99. package/bin/scraping/site-scrapers/openNestedGitbookMenus.js.map +0 -1
  100. package/bin/scraping/site-scrapers/scrapeDocusaurusPage.js +0 -49
  101. package/bin/scraping/site-scrapers/scrapeDocusaurusPage.js.map +0 -1
  102. package/bin/scraping/site-scrapers/scrapeDocusaurusSection.js +0 -30
  103. package/bin/scraping/site-scrapers/scrapeDocusaurusSection.js.map +0 -1
  104. package/bin/scraping/site-scrapers/scrapeGitBookPage.js +0 -47
  105. package/bin/scraping/site-scrapers/scrapeGitBookPage.js.map +0 -1
  106. package/bin/scraping/site-scrapers/scrapeGitBookSection.js +0 -52
  107. package/bin/scraping/site-scrapers/scrapeGitBookSection.js.map +0 -1
  108. package/bin/scraping/site-scrapers/scrapeReadMePage.js +0 -36
  109. package/bin/scraping/site-scrapers/scrapeReadMePage.js.map +0 -1
  110. package/bin/scraping/site-scrapers/scrapeReadMeSection.js +0 -44
  111. package/bin/scraping/site-scrapers/scrapeReadMeSection.js.map +0 -1
  112. package/bin/util.js +0 -129
  113. package/bin/util.js.map +0 -1
  114. package/bin/validation/isValidLink.js +0 -11
  115. package/bin/validation/isValidLink.js.map +0 -1
  116. package/bin/validation/stopIfInvalidLink.js +0 -9
  117. package/bin/validation/stopIfInvalidLink.js.map +0 -1
  118. package/scraper.md +0 -121
  119. package/src/browser.ts +0 -24
  120. package/src/constants.ts +0 -40
  121. package/src/downloadImage.ts +0 -110
  122. package/src/index.ts +0 -112
  123. package/src/local-preview/helper-commands/installDepsCommand.ts +0 -13
  124. package/src/local-preview/index.ts +0 -195
  125. package/src/local-preview/listener/categorize.ts +0 -105
  126. package/src/local-preview/listener/generate.ts +0 -110
  127. package/src/local-preview/listener/index.ts +0 -228
  128. package/src/local-preview/listener/update.ts +0 -27
  129. package/src/local-preview/listener/utils/createPage.ts +0 -211
  130. package/src/local-preview/listener/utils/getOpenApiContext.ts +0 -77
  131. package/src/local-preview/listener/utils/mintConfigFile.ts +0 -28
  132. package/src/local-preview/listener/utils/toTitleCase.ts +0 -40
  133. package/src/local-preview/listener/utils/types.ts +0 -16
  134. package/src/local-preview/listener/utils.ts +0 -78
  135. package/src/scraping/combineNavWithEmptyGroupTitles.ts +0 -21
  136. package/src/scraping/detectFramework.ts +0 -47
  137. package/src/scraping/downloadAllImages.ts +0 -60
  138. package/src/scraping/downloadLogoImage.ts +0 -25
  139. package/src/scraping/getSitemapLinks.ts +0 -18
  140. package/src/scraping/replaceImagePaths.ts +0 -21
  141. package/src/scraping/scrapeFileGettingFileNameFromUrl.ts +0 -86
  142. package/src/scraping/scrapeGettingFileNameFromUrl.ts +0 -54
  143. package/src/scraping/scrapePage.ts +0 -24
  144. package/src/scraping/scrapePageCommands.ts +0 -66
  145. package/src/scraping/scrapeSection.ts +0 -28
  146. package/src/scraping/scrapeSectionCommands.ts +0 -110
  147. package/src/scraping/site-scrapers/Intercom/scrapeIntercomPage.ts +0 -51
  148. package/src/scraping/site-scrapers/Intercom/scrapeIntercomSection.ts +0 -53
  149. package/src/scraping/site-scrapers/alternateGroupTitle.ts +0 -8
  150. package/src/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.ts +0 -44
  151. package/src/scraping/site-scrapers/links-per-group/getLinksRecursively.ts +0 -45
  152. package/src/scraping/site-scrapers/links-per-group/getLinksRecursivelyGitBook.ts +0 -46
  153. package/src/scraping/site-scrapers/openNestedDocusaurusMenus.ts +0 -42
  154. package/src/scraping/site-scrapers/openNestedGitbookMenus.ts +0 -24
  155. package/src/scraping/site-scrapers/scrapeDocusaurusPage.ts +0 -81
  156. package/src/scraping/site-scrapers/scrapeDocusaurusSection.ts +0 -63
  157. package/src/scraping/site-scrapers/scrapeGitBookPage.ts +0 -74
  158. package/src/scraping/site-scrapers/scrapeGitBookSection.ts +0 -80
  159. package/src/scraping/site-scrapers/scrapeReadMePage.ts +0 -60
  160. package/src/scraping/site-scrapers/scrapeReadMeSection.ts +0 -70
  161. package/src/types.d.ts +0 -29
  162. package/src/util.ts +0 -161
  163. package/src/validation/isValidLink.ts +0 -9
  164. package/src/validation/stopIfInvalidLink.ts +0 -9
  165. package/tsconfig.json +0 -18
@@ -1,77 +0,0 @@
1
- export const extractMethodAndEndpoint = (
2
- api: string
3
- ): { method?: string; endpoint: string; filename?: string } => {
4
- const methodRegex = /(get|post|put|delete|patch)\s/i;
5
- const trimmed = api.trim();
6
- const foundMethod = trimmed.match(methodRegex);
7
-
8
- const startIndexOfMethod = foundMethod ? api.indexOf(foundMethod[0]) : 0;
9
- const endIndexOfMethod = foundMethod
10
- ? startIndexOfMethod + foundMethod[0].length - 1
11
- : 0;
12
-
13
- const filename = api.substring(0, startIndexOfMethod).trim();
14
-
15
- return {
16
- method: foundMethod ? foundMethod[0].slice(0, -1).toUpperCase() : undefined,
17
- endpoint: api.substring(endIndexOfMethod).trim(),
18
- filename: filename ? filename : undefined,
19
- };
20
- };
21
-
22
- export const getOpenApiOperationMethodAndEndpoint = (
23
- openApi: any,
24
- openApiMetaField: string
25
- ) => {
26
- const { endpoint, method, filename } =
27
- extractMethodAndEndpoint(openApiMetaField);
28
-
29
- let path: any;
30
-
31
- openApi.files?.forEach((file: any) => {
32
- const openApiFile = file.openapi;
33
- const openApiPath = openApiFile.paths && openApiFile.paths[endpoint];
34
- const isFilenameOrNone = !filename || filename === file.name;
35
- if (openApiPath && isFilenameOrNone) {
36
- path = openApiPath;
37
- }
38
- });
39
-
40
- if (path == null) {
41
- return {};
42
- }
43
-
44
- let operation;
45
- if (method) {
46
- operation = path[method.toLowerCase()];
47
- } else {
48
- const firstOperationKey = Object.keys(path)[0];
49
- operation = path[firstOperationKey];
50
- }
51
-
52
- return {
53
- operation,
54
- method,
55
- endpoint,
56
- };
57
- };
58
-
59
- export const getOpenApiTitleAndDescription = (openApi, openApiMetaField) => {
60
- if (openApi == null || !openApiMetaField || openApiMetaField == null) {
61
- return {};
62
- }
63
-
64
- const { operation } = getOpenApiOperationMethodAndEndpoint(
65
- openApi,
66
- openApiMetaField
67
- );
68
-
69
- if (operation == null) {
70
- return {};
71
- }
72
-
73
- return {
74
- title: operation.summary,
75
- description: operation.description,
76
- };
77
- };
@@ -1,28 +0,0 @@
1
- import { promises as _promises } from "fs";
2
- import { pathExists } from "fs-extra";
3
- import pathUtil from "path";
4
-
5
- const { readFile } = _promises;
6
-
7
- // TODO: Put in prebuild package
8
- export const getConfigPath = async (
9
- contentDirectoryPath: string
10
- ): Promise<string | null> => {
11
- if (await pathExists(pathUtil.join(contentDirectoryPath, "mint.json"))) {
12
- return pathUtil.join(contentDirectoryPath, "mint.json");
13
- }
14
- return null;
15
- };
16
-
17
- // TODO: Put in prebuild package
18
- export const getConfigObj = async (
19
- contentDirectoryPath: string
20
- ): Promise<any> => {
21
- const configPath = await getConfigPath(contentDirectoryPath);
22
- let configObj = null;
23
- if (configPath) {
24
- const configContents = await readFile(configPath);
25
- configObj = await JSON.parse(configContents.toString());
26
- }
27
- return configObj;
28
- };
@@ -1,40 +0,0 @@
1
- export function toTitleCase(text: string) {
2
- const smallWords = /^(a|an|and|as|at|but|by|en|for|if|in|nor|of|on|or|per|the|to|v.?|vs.?|via)$/i;
3
- const alphanumericPattern = /([A-Za-z0-9\u00C0-\u00FF])/;
4
- const wordSeparators = /([ :–—-])/;
5
-
6
- return text
7
- .split(wordSeparators)
8
- .map(function (current, index, array) {
9
- if (
10
- // Check for small words
11
- current.search(smallWords) > -1 &&
12
- // Skip first and last word
13
- index !== 0 &&
14
- index !== array.length - 1 &&
15
- // Ignore title end and subtitle start
16
- array[index - 3] !== ':' &&
17
- array[index + 1] !== ':' &&
18
- // Ignore small words that start a hyphenated phrase
19
- (array[index + 1] !== '-' || (array[index - 1] === '-' && array[index + 1] === '-'))
20
- ) {
21
- return current.toLowerCase();
22
- }
23
-
24
- // Ignore intentional capitalization
25
- if (current.substring(1).search(/[A-Z]|\../) > -1) {
26
- return current;
27
- }
28
-
29
- // Ignore URLs
30
- if (array[index + 1] === ':' && array[index + 2] !== '') {
31
- return current;
32
- }
33
-
34
- // Capitalize the first letter
35
- return current.replace(alphanumericPattern, function (match: string) {
36
- return match.toUpperCase();
37
- });
38
- })
39
- .join('');
40
- }
@@ -1,16 +0,0 @@
1
- export type OpenApiFile = { name: string; spec: any };
2
-
3
- export type PotentialFileCategory =
4
- | "page"
5
- | "snippet"
6
- | "mintConfig"
7
- | "potentialYamlOpenApiSpec"
8
- | "potentialJsonOpenApiSpec"
9
- | "staticFile";
10
-
11
- export type FileCategory =
12
- | "page"
13
- | "snippet"
14
- | "mintConfig"
15
- | "openApi"
16
- | "staticFile";
@@ -1,78 +0,0 @@
1
- import SwaggerParser from "@apidevtools/swagger-parser";
2
- import { promises as _promises } from "fs";
3
-
4
- const { readdir, stat } = _promises;
5
-
6
- export const getFileExtension = (filename) => {
7
- return (
8
- filename.substring(filename.lastIndexOf(".") + 1, filename.length) ||
9
- filename
10
- );
11
- };
12
-
13
- export const openApiCheck = async (path) => {
14
- let spec;
15
- let isOpenApi = false;
16
- try {
17
- spec = await SwaggerParser.validate(path);
18
- isOpenApi = true;
19
- } catch {
20
- // not valid openApi
21
- }
22
- return { spec, isOpenApi };
23
- };
24
-
25
- export const filterOutNullInGroup = (group) => {
26
- const newPages = filterOutNullInPages(group.pages);
27
- const newGroup = {
28
- ...group,
29
- pages: newPages,
30
- };
31
- return newGroup;
32
- };
33
-
34
- const filterOutNullInPages = (pages) => {
35
- if (!Array.isArray(pages)) {
36
- return [];
37
- }
38
- const newPages = [];
39
- pages.forEach((page) => {
40
- if (page == null) {
41
- return;
42
- }
43
- if (page.hasOwnProperty("pages")) {
44
- const newGroup = filterOutNullInGroup(page);
45
- newPages.push(newGroup);
46
- } else {
47
- newPages.push(page);
48
- }
49
- });
50
-
51
- return newPages;
52
- };
53
-
54
- export const getFileList = async (dirName: string, og = dirName) => {
55
- let files = [];
56
- const items = await readdir(dirName, { withFileTypes: true });
57
-
58
- for (const item of items) {
59
- if (item.isDirectory()) {
60
- files = [...files, ...(await getFileList(`${dirName}/${item.name}`, og))];
61
- } else {
62
- const path = `${dirName}/${item.name}`;
63
- const name = path.replace(og, "");
64
- files.push(name);
65
- }
66
- }
67
-
68
- return files;
69
- };
70
-
71
- export const isFileSizeValid = async (
72
- path: string,
73
- maxFileSizeInMb: number
74
- ): Promise<boolean> => {
75
- const maxFileSizeBytes = maxFileSizeInMb * 1000000;
76
- const stats = await stat(path);
77
- return stats.size <= maxFileSizeBytes;
78
- };
@@ -1,21 +0,0 @@
1
- export default function combineNavWithEmptyGroupTitles(
2
- navArray: MintNavigation[]
3
- ): MintNavigation[] {
4
- let newNavArray = [];
5
-
6
- navArray.forEach((nav: MintNavigation) => {
7
- // The first run through the loop will always have -1 as the index.
8
- // JavaScript returns undefined when we look for an index outside the size of the array.
9
- const prev = newNavArray[newNavArray.length - 1];
10
- if (prev == null) {
11
- newNavArray.push(nav);
12
- } else if (!nav.group && !prev.group) {
13
- // Joins multiple groups without a title together IF they occur side by side
14
- prev.pages = prev.pages.concat(nav.pages);
15
- } else {
16
- newNavArray.push(nav);
17
- }
18
- });
19
-
20
- return newNavArray;
21
- }
@@ -1,47 +0,0 @@
1
- import cheerio from "cheerio";
2
-
3
- export enum Frameworks {
4
- DOCUSAURUS = "DOCUSAURUS",
5
- GITBOOK = "GITBOOK",
6
- README = "README",
7
- INTERCOM = "INTERCOM",
8
- }
9
-
10
- export function detectFramework(html) {
11
- const $ = cheerio.load(html);
12
- const docusaurusMeta = $('meta[name="generator"]');
13
-
14
- if (
15
- docusaurusMeta.length > 0 &&
16
- docusaurusMeta.attr("content").includes("Docusaurus")
17
- ) {
18
- if (docusaurusMeta.attr("content").includes("v3")) {
19
- return { framework: Frameworks.DOCUSAURUS, version: "3" };
20
- }
21
- if (docusaurusMeta.attr("content").includes("v2")) {
22
- return { framework: Frameworks.DOCUSAURUS, version: "2" };
23
- } else if (docusaurusMeta.attr("content").includes("v1")) {
24
- console.warn(
25
- "WARNING: We detected Docusaurus version 1 but we only support scraping versions 2 and 3."
26
- );
27
- return { framework: Frameworks.DOCUSAURUS, version: "1" };
28
- }
29
- }
30
-
31
- const isGitBook = $(".gitbook-root").length > 0;
32
- if (isGitBook) {
33
- return { framework: Frameworks.GITBOOK };
34
- }
35
-
36
- const isReadMe = $('meta[name="readme-deploy"]').length > 0;
37
- if (isReadMe) {
38
- return { framework: Frameworks.README };
39
- }
40
-
41
- const isIntercom = $("meta[name='intercom:trackingEvent']").length > 0;
42
- if (isIntercom) {
43
- return { framework: Frameworks.INTERCOM };
44
- }
45
-
46
- return undefined;
47
- }
@@ -1,60 +0,0 @@
1
- import path from "path";
2
- import downloadImage, {
3
- cleanImageSrc,
4
- isValidImageSrc,
5
- removeMetadataFromImageSrc,
6
- } from "../downloadImage.js";
7
-
8
- // To Do: Use CheerioElement instead of any when we bump the cheerio version
9
- export default async function downloadAllImages(
10
- $: any,
11
- content: any,
12
- origin: string,
13
- baseDir: string,
14
- overwrite: boolean,
15
- modifyFileName?: any,
16
- skipValidateImageExtension?: boolean
17
- ) {
18
- if (!baseDir) {
19
- console.debug("Skipping image downloading");
20
- return;
21
- }
22
-
23
- // We remove duplicates because some frameworks duplicate img tags
24
- // to show the image larger when clicked on.
25
- const imageSrcs = [
26
- ...new Set(
27
- content
28
- .find("img[src]")
29
- .map((i, image) => $(image).attr("src"))
30
- .toArray()
31
- ),
32
- ];
33
-
34
- // Wait to all images to download before continuing
35
- const origToNewArray = await Promise.all(
36
- imageSrcs.map(async (imageSrc: string) => {
37
- if (!isValidImageSrc(imageSrc, skipValidateImageExtension)) {
38
- return;
39
- }
40
-
41
- const imageHref = cleanImageSrc(imageSrc, origin);
42
-
43
- let fileName = removeMetadataFromImageSrc(path.basename(imageHref));
44
- if (modifyFileName) {
45
- fileName = modifyFileName(fileName);
46
- }
47
-
48
- const writePath = path.join(baseDir, fileName);
49
-
50
- await downloadImage(imageHref, writePath, overwrite);
51
-
52
- return { [imageSrc]: writePath };
53
- })
54
- );
55
-
56
- return origToNewArray.reduce(
57
- (result, current) => Object.assign(result, current),
58
- {}
59
- );
60
- }
@@ -1,25 +0,0 @@
1
- import path from "path";
2
- import downloadImage, {
3
- cleanImageSrc,
4
- isValidImageSrc,
5
- removeMetadataFromImageSrc,
6
- } from "../downloadImage.js";
7
- import { getFileExtension } from "../util.js";
8
-
9
- // To Do: Use CheerioElement instead of any when we bump the cheerio version
10
- export default async function downloadLogoImage(
11
- imageSrc: string,
12
- imageBaseDir: string,
13
- origin: string,
14
- overwrite: boolean,
15
- skipValidateImageExtension?: boolean
16
- ) {
17
- if (!isValidImageSrc(imageSrc, skipValidateImageExtension)) return;
18
-
19
- const imageHref = cleanImageSrc(imageSrc, origin);
20
-
21
- const ext = getFileExtension(removeMetadataFromImageSrc(imageSrc));
22
- const imagePath = path.join(imageBaseDir, "logo", "logo-light-mode." + ext);
23
-
24
- await downloadImage(imageHref, imagePath, overwrite);
25
- }
@@ -1,18 +0,0 @@
1
- import axios from "axios";
2
-
3
- // Not in use.
4
- // Gets all links in a sitemap.
5
- export const getSitemapLinks = async (url: URL) => {
6
- const hostname = url.hostname.replace(".", "\\.");
7
- const regex = new RegExp(`https?:\/\/${hostname}.+?(?=<\/loc>)`, "gmi");
8
-
9
- try {
10
- const indexData = (await axios.get(url.href)).data as string;
11
- const array = indexData.match(regex) as string[] | null;
12
- return array || [];
13
- } catch (err) {
14
- console.error(err);
15
- console.log("Skipping sitemap links because we encountered an error.");
16
- return [];
17
- }
18
- };
@@ -1,21 +0,0 @@
1
- export default function replaceImagePaths(
2
- origToWritePath: object,
3
- cliDir: string,
4
- markdown: string
5
- ) {
6
- if (origToWritePath == null) {
7
- return markdown;
8
- }
9
-
10
- // Change image paths to use the downloaded locations
11
- for (const [origHref, writePath] of Object.entries(origToWritePath)) {
12
- // Use relative paths within the folder we are in
13
- if (writePath.startsWith(cliDir)) {
14
- markdown = markdown.replaceAll(origHref, writePath.slice(cliDir.length));
15
- } else {
16
- markdown = markdown.replaceAll(origHref, writePath);
17
- }
18
- }
19
-
20
- return markdown;
21
- }
@@ -1,86 +0,0 @@
1
- import path from "path";
2
- import axios from "axios";
3
- import { getHtmlWithPuppeteer } from "../browser.js";
4
- import { createPage } from "../util.js";
5
-
6
- export async function scrapeFileGettingFileNameFromUrl(
7
- pathname: string,
8
- cliDir: string,
9
- origin: string,
10
- overwrite: boolean,
11
- scrapePageFunc: (
12
- html: string,
13
- origin: string,
14
- cliDir: string,
15
- imageBaseDir: string,
16
- overwrite: boolean,
17
- version: string | undefined
18
- ) => Promise<{
19
- title?: string;
20
- description?: string;
21
- markdown?: string;
22
- }>,
23
- puppeteer = false,
24
- version: string | undefined,
25
- baseToRemove?: string
26
- ): Promise<MintNavigationEntry> {
27
- // Skip scraping external links
28
- if (pathname.startsWith("https://") || pathname.startsWith("http://")) {
29
- return pathname;
30
- }
31
-
32
- // Removes file name from the end
33
- const splitSubpath = pathname.split("/");
34
- let folders = splitSubpath.slice(0, splitSubpath.length - 1).join("/");
35
-
36
- // Remove base dir if passed in
37
- if (baseToRemove && folders.startsWith(baseToRemove)) {
38
- folders = folders.replace(baseToRemove, "");
39
- }
40
-
41
- // TO DO: Improve this by putting each page's images in a separate
42
- // folder named after the title of the page.
43
- const imageBaseDir = path.join(cliDir, "images", folders);
44
-
45
- // Scrape each page separately
46
- const href = new URL(pathname, origin).href;
47
- let html: string;
48
- if (puppeteer) {
49
- html = await getHtmlWithPuppeteer(href);
50
- } else {
51
- const res = await axios.get(href);
52
- html = res.data;
53
- }
54
-
55
- const { title, description, markdown } = await scrapePageFunc(
56
- html,
57
- origin,
58
- cliDir,
59
- imageBaseDir,
60
- overwrite,
61
- version
62
- );
63
-
64
- // Check if page didn't have content
65
- if (!title && !markdown) {
66
- return undefined;
67
- }
68
-
69
- const newFileLocation = folders ? path.join(cliDir, folders) : cliDir;
70
-
71
- // Default to introduction.mdx if we encountered index.html
72
- const fileName = splitSubpath[splitSubpath.length - 1] || "introduction";
73
-
74
- // Will create subfolders as needed
75
- createPage(
76
- title,
77
- description,
78
- markdown,
79
- overwrite,
80
- newFileLocation,
81
- fileName
82
- );
83
-
84
- // Removes first slash if we are in a folder, Mintlify doesn't need it
85
- return folders ? path.join(folders, fileName).substring(1) : fileName;
86
- }
@@ -1,54 +0,0 @@
1
- import { scrapeFileGettingFileNameFromUrl } from "./scrapeFileGettingFileNameFromUrl.js";
2
-
3
- export async function scrapeGettingFileNameFromUrl(
4
- navEntry: MintNavigationEntry,
5
- cliDir: string,
6
- origin: string,
7
- overwrite: boolean,
8
- scrapePageFunc: (
9
- html: string,
10
- origin: string,
11
- cliDir: string,
12
- imageBaseDir: string,
13
- overwrite: boolean,
14
- version: string | undefined
15
- ) => Promise<{
16
- title?: string;
17
- description?: string;
18
- markdown?: string;
19
- }>,
20
- puppeteer = false,
21
- version: string | undefined,
22
- baseToRemove?: string
23
- ): Promise<MintNavigationEntry> {
24
- if (typeof navEntry !== "string") {
25
- const newPages = [];
26
- for (const nestedNavEntry of navEntry.pages) {
27
- newPages.push(
28
- await scrapeGettingFileNameFromUrl(
29
- nestedNavEntry,
30
- cliDir,
31
- origin,
32
- overwrite,
33
- scrapePageFunc,
34
- puppeteer,
35
- version,
36
- baseToRemove
37
- )
38
- );
39
- }
40
- navEntry.pages = newPages;
41
- return navEntry;
42
- }
43
-
44
- return await scrapeFileGettingFileNameFromUrl(
45
- navEntry,
46
- cliDir,
47
- origin,
48
- overwrite,
49
- scrapePageFunc,
50
- puppeteer,
51
- version,
52
- baseToRemove
53
- );
54
- }
@@ -1,24 +0,0 @@
1
- import path from "path";
2
- import { createPage, getOrigin } from "../util.js";
3
-
4
- export async function scrapePage(
5
- scrapeFunc: ScrapePageFn,
6
- href: string,
7
- html: string,
8
- overwrite: boolean,
9
- version: string | undefined
10
- ) {
11
- const origin = getOrigin(href);
12
- const cwd = process.cwd();
13
- const imageBaseDir = path.join(cwd, "images");
14
-
15
- const { title, description, markdown } = await scrapeFunc(
16
- html,
17
- origin,
18
- cwd,
19
- imageBaseDir,
20
- overwrite,
21
- version
22
- );
23
- createPage(title, description, markdown, overwrite, process.cwd());
24
- }
@@ -1,66 +0,0 @@
1
- import axios from "axios";
2
- import { scrapePage } from "./scrapePage.js";
3
- import { scrapeDocusaurusPage } from "./site-scrapers/scrapeDocusaurusPage.js";
4
- import { scrapeGitBookPage } from "./site-scrapers/scrapeGitBookPage.js";
5
- import { scrapeReadMePage } from "./site-scrapers/scrapeReadMePage.js";
6
- import { detectFramework, Frameworks } from "./detectFramework.js";
7
- import { getHrefFromArgs } from "../util.js";
8
- import { getHtmlWithPuppeteer } from "../browser.js";
9
- import { ArgumentsCamelCase } from "yargs";
10
- import { scrapeIntercomPage } from "./site-scrapers/Intercom/scrapeIntercomPage.js";
11
-
12
- function validateFramework(framework) {
13
- if (!framework) {
14
- console.log(
15
- "Could not detect the framework automatically. Please use one of:"
16
- );
17
- console.log("scrape-page-docusaurus");
18
- console.log("scrape-page-gitbook");
19
- console.log("scrape-page-readme");
20
- console.log("scrape-page-intercom");
21
- return process.exit(1);
22
- }
23
- }
24
-
25
- export async function scrapePageWrapper(
26
- argv: ArgumentsCamelCase,
27
- scrapeFunc: ScrapePageFn,
28
- options?: { version?: string; puppeteer?: boolean }
29
- ) {
30
- const href = getHrefFromArgs(argv);
31
- let html: string;
32
- if (options?.puppeteer) {
33
- html = await getHtmlWithPuppeteer(href);
34
- } else {
35
- const res = await axios.get(href);
36
- html = res.data;
37
- }
38
- await scrapePage(scrapeFunc, href, html, !!argv.overwrite, options?.version);
39
- process.exit(0);
40
- }
41
-
42
- export async function scrapePageAutomatically(argv: any) {
43
- const href = getHrefFromArgs(argv);
44
- const res = await axios.get(href);
45
- const html = res.data;
46
- const { framework, version } = detectFramework(html);
47
-
48
- validateFramework(framework);
49
-
50
- console.log("Detected framework: " + framework);
51
-
52
- switch (framework) {
53
- case Frameworks.DOCUSAURUS:
54
- await scrapePageWrapper(argv, scrapeDocusaurusPage, { version });
55
- break;
56
- case Frameworks.GITBOOK:
57
- await scrapePageWrapper(argv, scrapeGitBookPage, { puppeteer: true });
58
- break;
59
- case Frameworks.README:
60
- await scrapePageWrapper(argv, scrapeReadMePage);
61
- break;
62
- case Frameworks.INTERCOM:
63
- await scrapePageWrapper(argv, scrapeIntercomPage);
64
- break;
65
- }
66
- }