mintlify 1.1.5 → 1.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (224) hide show
  1. package/CONTRIBUTING.md +5 -0
  2. package/bin/index.js +1 -8
  3. package/bin/index.js.map +1 -1
  4. package/bin/local-preview/index.js +4 -6
  5. package/bin/local-preview/index.js.map +1 -1
  6. package/bin/local-preview/injectNav.js +94 -0
  7. package/bin/local-preview/injectNav.js.map +1 -0
  8. package/bin/local-preview/utils/categorizeFiles.js +10 -3
  9. package/bin/local-preview/utils/categorizeFiles.js.map +1 -1
  10. package/bin/local-preview/utils/getOpenApiContext.js +21 -9
  11. package/bin/local-preview/utils/getOpenApiContext.js.map +1 -1
  12. package/bin/local-preview/utils/listener.js +3 -2
  13. package/bin/local-preview/utils/listener.js.map +1 -1
  14. package/bin/local-preview/utils/metadata.js +4 -8
  15. package/bin/local-preview/utils/metadata.js.map +1 -1
  16. package/bin/local-preview/utils/openApiCheck.js +3 -4
  17. package/bin/local-preview/utils/openApiCheck.js.map +1 -1
  18. package/bin/mint/client/.babel-plugin-macrosrc.json +5 -0
  19. package/bin/mint/client/.babelrc +4 -0
  20. package/bin/mint/client/.editorconfig +12 -0
  21. package/bin/mint/client/.eslintrc.json +7 -0
  22. package/bin/mint/client/.prettierignore +4 -0
  23. package/bin/mint/client/.prettierrc +14 -0
  24. package/bin/mint/client/.vscode/launch.json +28 -0
  25. package/bin/mint/client/README.md +44 -0
  26. package/bin/mint/client/jest.config.ts +195 -0
  27. package/bin/mint/client/next-env.d.ts +4 -0
  28. package/bin/mint/client/next.config.js +152 -0
  29. package/bin/mint/client/package.json +139 -0
  30. package/bin/mint/client/postcss.config.cjs +9 -0
  31. package/bin/mint/client/prebuild/faviconConfig.js +35 -0
  32. package/bin/mint/client/prebuild/getOpenApiContext.js +53 -0
  33. package/bin/mint/client/prebuild/index.js +117 -0
  34. package/bin/mint/client/prebuild/injectNav.js +115 -0
  35. package/bin/mint/client/prebuild/slugToTitle.js +7 -0
  36. package/bin/mint/client/rehype/withApiComponents.js +60 -0
  37. package/bin/mint/client/rehype/withCodeBlocks.js +54 -0
  38. package/bin/mint/client/rehype/withLayouts.js +113 -0
  39. package/bin/mint/client/rehype/withLinkRoles.js +13 -0
  40. package/bin/mint/client/rehype/withRawComponents.js +13 -0
  41. package/bin/mint/client/rehype/withStaticProps.js +25 -0
  42. package/bin/mint/client/rehype/withSyntaxHighlighting.js +60 -0
  43. package/bin/mint/client/remark/utils.js +369 -0
  44. package/bin/mint/client/remark/withFrames.js +55 -0
  45. package/bin/mint/client/remark/withImportsInjected.js +36 -0
  46. package/bin/mint/client/remark/withNextLinks.js +37 -0
  47. package/bin/mint/client/remark/withTableOfContents.js +71 -0
  48. package/bin/mint/client/scripts/local.js +177 -0
  49. package/bin/mint/client/sentry.client.config.js +15 -0
  50. package/bin/mint/client/sentry.properties +4 -0
  51. package/bin/mint/client/sentry.server.config.js +15 -0
  52. package/bin/mint/client/src/analytics/AbstractAnalyticsImplementation.ts +50 -0
  53. package/bin/mint/client/src/analytics/AnalyticsContext.ts +5 -0
  54. package/bin/mint/client/src/analytics/AnalyticsMediator.ts +101 -0
  55. package/bin/mint/client/src/analytics/FakeAnalyticsMediator.ts +9 -0
  56. package/bin/mint/client/src/analytics/GA4Script.tsx +33 -0
  57. package/bin/mint/client/src/analytics/implementations/amplitude.ts +26 -0
  58. package/bin/mint/client/src/analytics/implementations/fathom.ts +38 -0
  59. package/bin/mint/client/src/analytics/implementations/ga4.ts +33 -0
  60. package/bin/mint/client/src/analytics/implementations/hotjar.ts +53 -0
  61. package/bin/mint/client/src/analytics/implementations/mixpanel-browser.d.ts +1 -0
  62. package/bin/mint/client/src/analytics/implementations/mixpanel.ts +52 -0
  63. package/bin/mint/client/src/analytics/implementations/posthog.ts +37 -0
  64. package/bin/mint/client/src/components/Accordion/Accordion.tsx +43 -0
  65. package/bin/mint/client/src/components/Accordion/index.ts +4 -0
  66. package/bin/mint/client/src/components/ApiExample.tsx +9 -0
  67. package/bin/mint/client/src/components/Card.tsx +51 -0
  68. package/bin/mint/client/src/components/CodeGroup.tsx +132 -0
  69. package/bin/mint/client/src/components/Editor.tsx +12 -0
  70. package/bin/mint/client/src/components/Expandable.tsx +40 -0
  71. package/bin/mint/client/src/components/Heading.tsx +84 -0
  72. package/bin/mint/client/src/components/Param.tsx +56 -0
  73. package/bin/mint/client/src/components/Request.tsx +19 -0
  74. package/bin/mint/client/src/components/ResponseField.tsx +33 -0
  75. package/bin/mint/client/src/components/TabBar.tsx +61 -0
  76. package/bin/mint/client/src/config.ts +115 -0
  77. package/bin/mint/client/src/css/bar-of-progress.css +10 -0
  78. package/bin/mint/client/src/css/base.css +29 -0
  79. package/bin/mint/client/src/css/font-awesome.css +7 -0
  80. package/bin/mint/client/src/css/fonts.css +44 -0
  81. package/bin/mint/client/src/css/main.css +11 -0
  82. package/bin/mint/client/src/css/prism.css +270 -0
  83. package/bin/mint/client/src/css/utilities.css +43 -0
  84. package/bin/mint/client/src/enums/components.ts +8 -0
  85. package/bin/mint/client/src/fonts/FiraCode-VF.woff +0 -0
  86. package/bin/mint/client/src/fonts/FiraCode-VF.woff2 +0 -0
  87. package/bin/mint/client/src/fonts/IBMPlexMono-Regular.ttf +0 -0
  88. package/bin/mint/client/src/fonts/IBMPlexMono-SemiBold.ttf +0 -0
  89. package/bin/mint/client/src/fonts/Inter-italic-latin.var.woff2 +0 -0
  90. package/bin/mint/client/src/fonts/Inter-roman-latin.var.woff2 +0 -0
  91. package/bin/mint/client/src/fonts/Pally-Variable.ttf +0 -0
  92. package/bin/mint/client/src/fonts/SourceSansPro-Regular.otf +0 -0
  93. package/bin/mint/client/src/fonts/SourceSerifPro-Regular.ttf +0 -0
  94. package/bin/mint/client/src/fonts/Synonym-Variable.ttf +0 -0
  95. package/bin/mint/client/src/fonts/Ubuntu-Mono-bold.woff2 +0 -0
  96. package/bin/mint/client/src/fonts/generated/IBMPlexMono-Regular-subset.woff2 +0 -0
  97. package/bin/mint/client/src/fonts/generated/IBMPlexMono-Regular-subset.zopfli.woff +0 -0
  98. package/bin/mint/client/src/fonts/generated/IBMPlexMono-Regular.module.css +11 -0
  99. package/bin/mint/client/src/fonts/generated/IBMPlexMono-SemiBold-subset.woff2 +0 -0
  100. package/bin/mint/client/src/fonts/generated/IBMPlexMono-SemiBold-subset.zopfli.woff +0 -0
  101. package/bin/mint/client/src/fonts/generated/IBMPlexMono-SemiBold.module.css +11 -0
  102. package/bin/mint/client/src/fonts/generated/Pally-Variable-subset.woff2 +0 -0
  103. package/bin/mint/client/src/fonts/generated/Pally-Variable-subset.zopfli.woff +0 -0
  104. package/bin/mint/client/src/fonts/generated/Pally-Variable.module.css +11 -0
  105. package/bin/mint/client/src/fonts/generated/SourceSerifPro-Regular-subset.woff2 +0 -0
  106. package/bin/mint/client/src/fonts/generated/SourceSerifPro-Regular-subset.zopfli.woff +0 -0
  107. package/bin/mint/client/src/fonts/generated/SourceSerifPro-Regular.module.css +11 -0
  108. package/bin/mint/client/src/fonts/generated/Synonym-Variable-subset.woff2 +0 -0
  109. package/bin/mint/client/src/fonts/generated/Synonym-Variable-subset.zopfli.woff +0 -0
  110. package/bin/mint/client/src/fonts/generated/Synonym-Variable.module.css +11 -0
  111. package/bin/mint/client/src/fonts/generated/TenorSans-Regular-subset.woff2 +0 -0
  112. package/bin/mint/client/src/fonts/generated/TenorSans-Regular-subset.zopfli.woff +0 -0
  113. package/bin/mint/client/src/fonts/generated/TenorSans-Regular.module.css +11 -0
  114. package/bin/mint/client/src/hooks/useActionKey.ts +20 -0
  115. package/bin/mint/client/src/hooks/useIsomorphicLayoutEffect.ts +3 -0
  116. package/bin/mint/client/src/hooks/useMedia.ts +27 -0
  117. package/bin/mint/client/src/hooks/usePrevNext.ts +34 -0
  118. package/bin/mint/client/src/hooks/useTop.ts +15 -0
  119. package/bin/mint/client/src/icons/CopyToClipboard.tsx +33 -0
  120. package/bin/mint/client/src/index.d.ts +1 -0
  121. package/bin/mint/client/src/layouts/ApiSupplemental.tsx +173 -0
  122. package/bin/mint/client/src/layouts/ContentsLayout.tsx +256 -0
  123. package/bin/mint/client/src/layouts/DocumentationLayout.tsx +44 -0
  124. package/bin/mint/client/src/layouts/OpenApiContent.tsx +301 -0
  125. package/bin/mint/client/src/layouts/SidebarLayout.tsx +412 -0
  126. package/bin/mint/client/src/layouts/UserFeedback.tsx +73 -0
  127. package/bin/mint/client/src/layouts/getGroupsInDivision.ts +25 -0
  128. package/bin/mint/client/src/layouts/isPathInGroupPages.ts +10 -0
  129. package/bin/mint/client/src/metadata.ts +58 -0
  130. package/bin/mint/client/src/nav.json +219 -0
  131. package/bin/mint/client/src/openapi.ts +3 -0
  132. package/bin/mint/client/src/pages/404.tsx +73 -0
  133. package/bin/mint/client/src/pages/_app.tsx +138 -0
  134. package/bin/mint/client/src/pages/_document.tsx +57 -0
  135. package/bin/mint/client/src/pages/api/issue.ts +10 -0
  136. package/bin/mint/client/src/pages/api/name.ts +8 -0
  137. package/bin/mint/client/src/pages/api/request.ts +31 -0
  138. package/bin/mint/client/src/pages/api/suggest.ts +10 -0
  139. package/bin/mint/client/src/pages/api/syntax-highlighted-json.ts +13 -0
  140. package/bin/mint/client/src/pages/api/utils.ts +6 -0
  141. package/bin/mint/client/src/pages/index.tsx +31 -0
  142. package/bin/mint/client/src/ui/Api.tsx +359 -0
  143. package/bin/mint/client/src/ui/Footer.tsx +124 -0
  144. package/bin/mint/client/src/ui/Header.tsx +370 -0
  145. package/bin/mint/client/src/ui/Logo.tsx +55 -0
  146. package/bin/mint/client/src/ui/PageHeader.tsx +51 -0
  147. package/bin/mint/client/src/ui/Search.tsx +386 -0
  148. package/bin/mint/client/src/ui/ThemeToggle.tsx +285 -0
  149. package/bin/mint/client/src/ui/Title.tsx +22 -0
  150. package/bin/mint/client/src/ui/TopLevelLink.tsx +122 -0
  151. package/bin/mint/client/src/utils/api.ts +252 -0
  152. package/bin/mint/client/src/utils/brands.ts +217 -0
  153. package/bin/mint/client/src/utils/castArray.ts +3 -0
  154. package/bin/mint/client/src/utils/childrenArray.ts +3 -0
  155. package/bin/mint/client/src/utils/fit.ts +27 -0
  156. package/bin/mint/client/src/utils/fontAwesome.ts +577 -0
  157. package/bin/mint/client/src/utils/getAnalyticsConfig.ts +14 -0
  158. package/bin/mint/client/src/utils/getLogoHref.ts +9 -0
  159. package/bin/mint/client/src/utils/getOpenApiContext.ts +26 -0
  160. package/bin/mint/client/src/utils/importAll.ts +6 -0
  161. package/bin/mint/client/src/utils/isObject.ts +3 -0
  162. package/bin/mint/client/src/utils/kebabToTitleCase.ts +3 -0
  163. package/bin/mint/client/src/utils/loadImage.ts +8 -0
  164. package/bin/mint/client/src/utils/slugToTitle.ts +7 -0
  165. package/bin/mint/client/src/utils/wait.ts +5 -0
  166. package/bin/mint/client/tailwind.config.cjs +323 -0
  167. package/bin/mint/client/test/test.test.ts +5 -0
  168. package/bin/mint/client/tsconfig.json +36 -0
  169. package/bin/mint/client/yarn.lock +9702 -0
  170. package/bin/scraping/detectFramework.js +12 -3
  171. package/bin/scraping/detectFramework.js.map +1 -1
  172. package/bin/scraping/scrapeFileGettingFileNameFromUrl.js +2 -2
  173. package/bin/scraping/scrapeFileGettingFileNameFromUrl.js.map +1 -1
  174. package/bin/scraping/scrapeGettingFileNameFromUrl.js +3 -3
  175. package/bin/scraping/scrapeGettingFileNameFromUrl.js.map +1 -1
  176. package/bin/scraping/scrapePage.js +2 -2
  177. package/bin/scraping/scrapePage.js.map +1 -1
  178. package/bin/scraping/scrapePageCommands.js +6 -6
  179. package/bin/scraping/scrapePageCommands.js.map +1 -1
  180. package/bin/scraping/scrapeSection.js +2 -2
  181. package/bin/scraping/scrapeSection.js.map +1 -1
  182. package/bin/scraping/scrapeSectionCommands.js +8 -7
  183. package/bin/scraping/scrapeSectionCommands.js.map +1 -1
  184. package/bin/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.js +36 -0
  185. package/bin/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.js.map +1 -0
  186. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursively.js +38 -0
  187. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursively.js.map +1 -0
  188. package/bin/scraping/site-scrapers/scrapeDocusaurusPage.js +14 -8
  189. package/bin/scraping/site-scrapers/scrapeDocusaurusPage.js.map +1 -1
  190. package/bin/scraping/site-scrapers/scrapeDocusaurusSection.js +4 -29
  191. package/bin/scraping/site-scrapers/scrapeDocusaurusSection.js.map +1 -1
  192. package/bin/scraping/site-scrapers/scrapeGitBookPage.js +2 -1
  193. package/bin/scraping/site-scrapers/scrapeGitBookPage.js.map +1 -1
  194. package/bin/scraping/site-scrapers/scrapeGitBookSection.js +3 -3
  195. package/bin/scraping/site-scrapers/scrapeGitBookSection.js.map +1 -1
  196. package/bin/scraping/site-scrapers/scrapeReadMePage.js +2 -1
  197. package/bin/scraping/site-scrapers/scrapeReadMePage.js.map +1 -1
  198. package/bin/scraping/site-scrapers/scrapeReadMeSection.js +3 -3
  199. package/bin/scraping/site-scrapers/scrapeReadMeSection.js.map +1 -1
  200. package/package.json +1 -1
  201. package/src/index.ts +0 -16
  202. package/src/local-preview/index.ts +4 -6
  203. package/src/local-preview/utils/categorizeFiles.ts +12 -4
  204. package/src/local-preview/utils/getOpenApiContext.ts +27 -11
  205. package/src/local-preview/utils/listener.ts +9 -4
  206. package/src/local-preview/utils/metadata.ts +4 -8
  207. package/src/local-preview/utils/openApiCheck.ts +4 -5
  208. package/src/scraping/detectFramework.ts +13 -3
  209. package/src/scraping/scrapeFileGettingFileNameFromUrl.ts +5 -2
  210. package/src/scraping/scrapeGettingFileNameFromUrl.ts +5 -1
  211. package/src/scraping/scrapePage.ts +6 -3
  212. package/src/scraping/scrapePageCommands.ts +10 -6
  213. package/src/scraping/scrapeSection.ts +9 -2
  214. package/src/scraping/scrapeSectionCommands.ts +24 -7
  215. package/src/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.ts +46 -0
  216. package/src/scraping/site-scrapers/{getLinksRecursively.ts → links-per-group/getLinksRecursively.ts} +0 -0
  217. package/src/scraping/site-scrapers/scrapeDocusaurusPage.ts +20 -8
  218. package/src/scraping/site-scrapers/scrapeDocusaurusSection.ts +9 -33
  219. package/src/scraping/site-scrapers/scrapeGitBookPage.ts +2 -1
  220. package/src/scraping/site-scrapers/scrapeGitBookSection.ts +5 -3
  221. package/src/scraping/site-scrapers/scrapeReadMePage.ts +2 -1
  222. package/src/scraping/site-scrapers/scrapeReadMeSection.ts +4 -2
  223. package/bin/local-preview/helper-commands/cleanCommand.js +0 -8
  224. package/bin/local-preview/helper-commands/cleanCommand.js.map +0 -1
@@ -44,18 +44,16 @@ const { readFile } = _promises;
44
44
  const copyFiles = async (logger: any) => {
45
45
  logger.start("Syncing doc files...");
46
46
  shell.cd(CMD_EXEC_PATH);
47
- const { markdownFiles, staticFiles, openApiBuffer } = await categorizeFiles();
47
+ const { markdownFiles, staticFiles, openApi } = await categorizeFiles();
48
48
 
49
49
  const configObj = await updateConfigFile(logger);
50
50
 
51
51
  const openApiTargetPath = path.join(CLIENT_PATH, "src", "openapi.json");
52
- let openApiObj = null;
53
- if (openApiBuffer) {
52
+ if (openApi) {
54
53
  logger.succeed("OpenApi file synced");
55
- await fse.outputFile(openApiTargetPath, Buffer.from(openApiBuffer), {
54
+ await fse.outputFile(openApiTargetPath, JSON.stringify(openApi), {
56
55
  flag: "w",
57
56
  });
58
- openApiObj = JSON.parse(openApiBuffer.toString());
59
57
  } else {
60
58
  await fse.outputFile(openApiTargetPath, "{}", { flag: "w" });
61
59
  }
@@ -73,7 +71,7 @@ const copyFiles = async (logger: any) => {
73
71
 
74
72
  const fileContent = await readFile(sourcePath);
75
73
  const contentStr = fileContent.toString();
76
- const page = createPage(filename, contentStr, openApiObj);
74
+ const page = createPage(filename, contentStr, openApi);
77
75
  pages = {
78
76
  ...pages,
79
77
  ...page,
@@ -27,13 +27,13 @@ const getFileList = async (dirName: string, og = dirName) => {
27
27
  const categorizeFiles = async (): Promise<{
28
28
  markdownFiles: string[];
29
29
  staticFiles: string[];
30
- openApiBuffer: Buffer | undefined;
30
+ openApi: object | undefined;
31
31
  }> => {
32
32
  const allFilesInCmdExecutionPath = await getFileList(CMD_EXEC_PATH);
33
33
  const markdownFiles = [];
34
34
  const staticFiles = [];
35
35
  const promises = [];
36
- let openApiBuffer = undefined;
36
+ const openApiFiles = [];
37
37
  allFilesInCmdExecutionPath.forEach((file) => {
38
38
  promises.push(
39
39
  (async () => {
@@ -53,7 +53,11 @@ const categorizeFiles = async (): Promise<{
53
53
  );
54
54
  isOpenApi = openApiInfo.isOpenApi;
55
55
  if (isOpenApi) {
56
- openApiBuffer = openApiInfo.buffer;
56
+ const fileName = path.parse(file.name).base;
57
+ openApiFiles.push({
58
+ name: fileName.substring(0, fileName.lastIndexOf(".")),
59
+ openapi: openApiInfo.openapi,
60
+ });
57
61
  }
58
62
  } else if (
59
63
  (!file.name.endsWith("mint.config.json") ||
@@ -68,7 +72,11 @@ const categorizeFiles = async (): Promise<{
68
72
  });
69
73
  await Promise.all(promises);
70
74
 
71
- return { markdownFiles, staticFiles, openApiBuffer };
75
+ const openApi = {
76
+ files: openApiFiles,
77
+ };
78
+
79
+ return { markdownFiles, staticFiles, openApi };
72
80
  };
73
81
 
74
82
  export default categorizeFiles;
@@ -1,25 +1,41 @@
1
- export const extractMethodAndEndpoint = (api) => {
2
- const methodRegex = /^get|post|put|delete|patch/i;
1
+ export const extractMethodAndEndpoint = (
2
+ api: string
3
+ ): { method?: string; endpoint: string; filename?: string } => {
4
+ const methodRegex = /(get|post|put|delete|patch)\s/i;
3
5
  const trimmed = api.trim();
4
6
  const foundMethod = trimmed.match(methodRegex);
5
7
 
8
+ const startIndexOfMethod = foundMethod ? api.indexOf(foundMethod[0]) : 0;
6
9
  const endIndexOfMethod = foundMethod
7
- ? api.indexOf(foundMethod[0]) + foundMethod[0].length
10
+ ? startIndexOfMethod + foundMethod[0].length - 1
8
11
  : 0;
9
12
 
13
+ const filename = api.substring(0, startIndexOfMethod).trim();
14
+
10
15
  return {
11
- method: foundMethod ? foundMethod[0].toUpperCase() : undefined,
16
+ method: foundMethod ? foundMethod[0].slice(0, -1).toUpperCase() : undefined,
12
17
  endpoint: api.substring(endIndexOfMethod).trim(),
18
+ filename: filename ? filename : undefined,
13
19
  };
14
20
  };
15
21
 
16
22
  export const getOpenApiOperationMethodAndEndpoint = (
17
- openApiObj,
18
- openApiMetaField
23
+ openApi: any,
24
+ openApiMetaField: string
19
25
  ) => {
20
- const { endpoint, method } = extractMethodAndEndpoint(openApiMetaField);
26
+ const { endpoint, method, filename } =
27
+ extractMethodAndEndpoint(openApiMetaField);
28
+
29
+ let path: any;
21
30
 
22
- const path = openApiObj?.paths && openApiObj.paths[endpoint];
31
+ openApi.files?.forEach((file: any) => {
32
+ const openApiFile = file.openapi;
33
+ const openApiPath = openApiFile.paths && openApiFile.paths[endpoint];
34
+ const isFilenameOrNone = !filename || filename === file.name;
35
+ if (openApiPath && isFilenameOrNone) {
36
+ path = openApiPath;
37
+ }
38
+ });
23
39
 
24
40
  if (path == null) {
25
41
  return {};
@@ -40,13 +56,13 @@ export const getOpenApiOperationMethodAndEndpoint = (
40
56
  };
41
57
  };
42
58
 
43
- export const getOpenApiTitleAndDescription = (openApiObj, openApiMetaField) => {
44
- if (openApiObj == null || !openApiMetaField || openApiMetaField == null) {
59
+ export const getOpenApiTitleAndDescription = (openApi, openApiMetaField) => {
60
+ if (openApi == null || !openApiMetaField || openApiMetaField == null) {
45
61
  return {};
46
62
  }
47
63
 
48
64
  const { operation } = getOpenApiOperationMethodAndEndpoint(
49
- openApiObj,
65
+ openApi,
50
66
  openApiMetaField
51
67
  );
52
68
 
@@ -81,13 +81,18 @@ const listener = () => {
81
81
  );
82
82
  isOpenApi = openApiInfo.isOpenApi;
83
83
  if (isOpenApi) {
84
- await fse.outputFile(path.join(CLIENT_PATH, "src", "openapi.json"), Buffer.from(openApiInfo.buffer), {
85
- flag: "w",
86
- });
84
+ await fse.outputFile(
85
+ path.join(CLIENT_PATH, "src", "openapi.json"),
86
+ JSON.stringify(openApiInfo.openapi),
87
+ {
88
+ flag: "w",
89
+ }
90
+ );
87
91
  updateMetadata = true;
88
92
  }
89
93
  }
90
- if (!isOpenApi) { // all other files
94
+ if (!isOpenApi) {
95
+ // all other files
91
96
  const targetPath = path.join(CLIENT_PATH, "public", filename);
92
97
  await fse.copy(filePath, targetPath);
93
98
  }
@@ -54,14 +54,14 @@ const getMetadata = (fileContents: string) => {
54
54
  export const createPage = (
55
55
  path: string,
56
56
  content: string,
57
- openApiObj: object | null
57
+ openApi: object | null
58
58
  ) => {
59
59
  const slug = path.replace(/\.mdx?$/, "").substring(1);
60
60
  let defaultTitle = slugToTitle(slug);
61
61
  const metadata = getMetadata(content);
62
62
  // Append data from OpenAPI if it exists
63
63
  const { title, description } = getOpenApiTitleAndDescription(
64
- openApiObj,
64
+ openApi,
65
65
  metadata?.openapi
66
66
  );
67
67
  if (title) {
@@ -127,11 +127,7 @@ export const createMetadataFileFromPages = (pages: any, configObj: any) => {
127
127
 
128
128
  export const createMetadataFile = async () => {
129
129
  // create pages
130
- const { markdownFiles, openApiBuffer } = await categorizeFiles();
131
- let openApiObj = null;
132
- if (openApiBuffer) {
133
- openApiObj = JSON.parse(openApiBuffer.toString());
134
- }
130
+ const { markdownFiles, openApi } = await categorizeFiles();
135
131
  // create config object
136
132
  const configObj = await getConfigObj();
137
133
  let pages = {};
@@ -142,7 +138,7 @@ export const createMetadataFile = async () => {
142
138
  const sourcePath = path.join(CMD_EXEC_PATH, filename);
143
139
  const fileContent = await readFile(sourcePath);
144
140
  const contentStr = fileContent.toString();
145
- const page = createPage(filename, contentStr, openApiObj);
141
+ const page = createPage(filename, contentStr, openApi);
146
142
  pages = {
147
143
  ...pages,
148
144
  ...page,
@@ -3,17 +3,16 @@ import { promises as _promises } from "fs";
3
3
 
4
4
  const openApiCheck = async (
5
5
  path: string
6
- ): Promise<{ buffer: Buffer | undefined; isOpenApi: boolean }> => {
7
- let buffer = undefined;
6
+ ): Promise<{ openapi: any; isOpenApi: boolean }> => {
7
+ let openapi;
8
8
  let isOpenApi = false;
9
9
  try {
10
- const api = await SwaggerParser.validate(path);
11
- buffer = Buffer.from(JSON.stringify(api, null, 2), "utf-8");
10
+ openapi = await SwaggerParser.validate(path);
12
11
  isOpenApi = true;
13
12
  } catch {
14
13
  // not valid openApi
15
14
  }
16
- return { buffer, isOpenApi };
15
+ return { openapi, isOpenApi };
17
16
  };
18
17
 
19
18
  export default openApiCheck;
@@ -14,17 +14,27 @@ export function detectFramework(html) {
14
14
  docusaurusMeta.length > 0 &&
15
15
  docusaurusMeta.attr("content").includes("Docusaurus")
16
16
  ) {
17
- return Frameworks.DOCUSAURUS;
17
+ if (docusaurusMeta.attr("content").includes("v3")) {
18
+ return { framework: Frameworks.DOCUSAURUS, version: "3" };
19
+ }
20
+ if (docusaurusMeta.attr("content").includes("v2")) {
21
+ return { framework: Frameworks.DOCUSAURUS, version: "2" };
22
+ } else if (docusaurusMeta.attr("content").includes("v1")) {
23
+ console.warn(
24
+ "WARNING: We detected Docusaurus version 1 but we only support scraping versions 2 and 3."
25
+ );
26
+ return { framework: Frameworks.DOCUSAURUS, version: "1" };
27
+ }
18
28
  }
19
29
 
20
30
  const isGitBook = $(".gitbook-root").length > 0;
21
31
  if (isGitBook) {
22
- return Frameworks.GITBOOK;
32
+ return { framework: Frameworks.GITBOOK };
23
33
  }
24
34
 
25
35
  const isReadMe = $('meta[name="readme-deploy"]').length > 0;
26
36
  if (isReadMe) {
27
- return Frameworks.README;
37
+ return { framework: Frameworks.README };
28
38
  }
29
39
 
30
40
  return undefined;
@@ -12,13 +12,15 @@ export async function scrapeFileGettingFileNameFromUrl(
12
12
  html: string,
13
13
  origin: string,
14
14
  cliDir: string,
15
- imageBaseDir: string
15
+ imageBaseDir: string,
16
+ version: string | undefined
16
17
  ) => Promise<{
17
18
  title?: string;
18
19
  description?: string;
19
20
  markdown?: string;
20
21
  }>,
21
22
  puppeteer = false,
23
+ version: string | undefined,
22
24
  baseToRemove?: string
23
25
  ) {
24
26
  // Skip scraping external links
@@ -53,7 +55,8 @@ export async function scrapeFileGettingFileNameFromUrl(
53
55
  html,
54
56
  origin,
55
57
  cliDir,
56
- imageBaseDir
58
+ imageBaseDir,
59
+ version
57
60
  );
58
61
 
59
62
  // Check if page didn't have content
@@ -10,13 +10,15 @@ export async function scrapeGettingFileNameFromUrl(
10
10
  html: string,
11
11
  origin: string,
12
12
  cliDir: string,
13
- imageBaseDir: string
13
+ imageBaseDir: string,
14
+ version: string | undefined
14
15
  ) => Promise<{
15
16
  title?: string;
16
17
  description?: string;
17
18
  markdown?: string;
18
19
  }>,
19
20
  puppeteer = false,
21
+ version: string | undefined,
20
22
  baseToRemove?: string
21
23
  ): Promise<NavigationEntry> {
22
24
  if (isNavigation(navEntry)) {
@@ -30,6 +32,7 @@ export async function scrapeGettingFileNameFromUrl(
30
32
  overwrite,
31
33
  scrapePageFunc,
32
34
  puppeteer,
35
+ version,
33
36
  baseToRemove
34
37
  )
35
38
  );
@@ -45,6 +48,7 @@ export async function scrapeGettingFileNameFromUrl(
45
48
  overwrite,
46
49
  scrapePageFunc,
47
50
  puppeteer,
51
+ version,
48
52
  baseToRemove
49
53
  );
50
54
  }
@@ -6,11 +6,13 @@ export async function scrapePage(
6
6
  html: string,
7
7
  origin: string,
8
8
  cliDir: string,
9
- imageBaseDir: string
9
+ imageBaseDir: string,
10
+ version: string | undefined
10
11
  ) => Promise<any>,
11
12
  href: string,
12
13
  html: string,
13
- overwrite: boolean
14
+ overwrite: boolean,
15
+ version: string | undefined
14
16
  ) {
15
17
  const origin = getOrigin(href);
16
18
  const imageBaseDir = path.join(process.cwd(), "images");
@@ -18,7 +20,8 @@ export async function scrapePage(
18
20
  html,
19
21
  origin,
20
22
  process.cwd(),
21
- imageBaseDir
23
+ imageBaseDir,
24
+ version
22
25
  );
23
26
  createPage(title, description, markdown, overwrite, process.cwd());
24
27
  }
@@ -19,16 +19,20 @@ function validateFramework(framework) {
19
19
  }
20
20
  }
21
21
 
22
- export async function scrapePageWrapper(argv, scrapeFunc, puppeteer = false) {
22
+ export async function scrapePageWrapper(
23
+ argv: any,
24
+ scrapeFunc: any,
25
+ options?: { version?: string; puppeteer?: boolean }
26
+ ) {
23
27
  const href = getHrefFromArgs(argv);
24
28
  let html: string;
25
- if (puppeteer) {
29
+ if (options.puppeteer) {
26
30
  html = await getHtmlWithPuppeteer(href);
27
31
  } else {
28
32
  const res = await axios.default.get(href);
29
33
  html = res.data;
30
34
  }
31
- await scrapePage(scrapeFunc, href, html, argv.overwrite);
35
+ await scrapePage(scrapeFunc, href, html, argv.overwrite, options.version);
32
36
  process.exit(0);
33
37
  }
34
38
 
@@ -36,16 +40,16 @@ export async function scrapePageAutomatically(argv: any) {
36
40
  const href = getHrefFromArgs(argv);
37
41
  const res = await axios.default.get(href);
38
42
  const html = res.data;
39
- const framework = detectFramework(html);
43
+ const { framework, version } = detectFramework(html);
40
44
 
41
45
  validateFramework(framework);
42
46
 
43
47
  console.log("Detected framework: " + framework);
44
48
 
45
49
  if (framework === Frameworks.DOCUSAURUS) {
46
- await scrapePageWrapper(argv, scrapeDocusaurusPage);
50
+ await scrapePageWrapper(argv, scrapeDocusaurusPage, { version });
47
51
  } else if (framework === Frameworks.GITBOOK) {
48
- await scrapePageWrapper(argv, scrapeGitBookPage, true);
52
+ await scrapePageWrapper(argv, scrapeGitBookPage, { puppeteer: true });
49
53
  } else if (framework === Frameworks.README) {
50
54
  await scrapePageWrapper(argv, scrapeReadMePage);
51
55
  }
@@ -4,12 +4,19 @@ export async function scrapeSection(
4
4
  scrapeFunc: any,
5
5
  html: string,
6
6
  origin: string,
7
- overwrite: boolean
7
+ overwrite: boolean,
8
+ version: string | undefined
8
9
  ) {
9
10
  console.log(
10
11
  `Started scraping${overwrite ? ", overwrite mode is on" : ""}...`
11
12
  );
12
- const groupsConfig = await scrapeFunc(html, origin, process.cwd(), overwrite);
13
+ const groupsConfig = await scrapeFunc(
14
+ html,
15
+ origin,
16
+ process.cwd(),
17
+ overwrite,
18
+ version
19
+ );
13
20
  console.log("Finished scraping.");
14
21
  console.log("Add the following to your navigation in mint.json:");
15
22
  console.log(objToReadableString(groupsConfig));
@@ -13,15 +13,25 @@ export async function scrapeSectionAxiosWrapper(argv: any, scrapeFunc: any) {
13
13
  const href = getHrefFromArgs(argv);
14
14
  const res = await axios.default.get(href);
15
15
  const html = res.data;
16
- await scrapeSection(scrapeFunc, html, getOrigin(href), argv.overwrite);
16
+ await scrapeSection(
17
+ scrapeFunc,
18
+ html,
19
+ getOrigin(href),
20
+ argv.overwrite,
21
+ undefined
22
+ );
17
23
  process.exit(0);
18
24
  }
19
25
 
20
- export async function scrapeDocusaurusSectionCommand(argv: any) {
26
+ export async function scrapeDocusaurusSectionCommand(
27
+ argv: any,
28
+ version: string // "1" | "2" | "3"
29
+ ) {
21
30
  await scrapeSectionOpeningAllNested(
22
31
  argv,
23
32
  openNestedDocusaurusMenus,
24
- scrapeDocusaurusSection
33
+ scrapeDocusaurusSection,
34
+ version
25
35
  );
26
36
  }
27
37
 
@@ -36,7 +46,8 @@ export async function scrapeGitbookSectionCommand(argv: any) {
36
46
  async function scrapeSectionOpeningAllNested(
37
47
  argv: any,
38
48
  openLinks: any,
39
- scrapeFunc: any
49
+ scrapeFunc: any,
50
+ version?: string
40
51
  ) {
41
52
  const href = getHrefFromArgs(argv);
42
53
 
@@ -48,7 +59,13 @@ async function scrapeSectionOpeningAllNested(
48
59
 
49
60
  const html = await openLinks(page);
50
61
  browser.close();
51
- await scrapeSection(scrapeFunc, html, getOrigin(href), argv.overwrite);
62
+ await scrapeSection(
63
+ scrapeFunc,
64
+ html,
65
+ getOrigin(href),
66
+ argv.overwrite,
67
+ version
68
+ );
52
69
  process.exit(0);
53
70
  }
54
71
 
@@ -56,14 +73,14 @@ export async function scrapeSectionAutomatically(argv: any) {
56
73
  const href = getHrefFromArgs(argv);
57
74
  const res = await axios.default.get(href);
58
75
  const html = res.data;
59
- const framework = detectFramework(html);
76
+ const { framework, version } = detectFramework(html);
60
77
 
61
78
  validateFramework(framework);
62
79
 
63
80
  console.log("Detected framework: " + framework);
64
81
 
65
82
  if (framework === Frameworks.DOCUSAURUS) {
66
- await scrapeDocusaurusSectionCommand(argv);
83
+ await scrapeDocusaurusSectionCommand(argv, version);
67
84
  } else if (framework === Frameworks.GITBOOK) {
68
85
  await scrapeGitbookSectionCommand(argv);
69
86
  } else if (framework === Frameworks.README) {
@@ -0,0 +1,46 @@
1
+ import alternateGroupTitle from "../alternateGroupTitle.js";
2
+ import getLinksRecursively from "./getLinksRecursively.js";
3
+
4
+ export function getDocusaurusLinksPerGroup(
5
+ navigationSections: any,
6
+ $: any,
7
+ version: string | undefined
8
+ ) {
9
+ if (version === "3" || version === "2") {
10
+ return getDocusaurusLinksPerGroupLoop(navigationSections, $);
11
+ }
12
+ return [];
13
+ }
14
+
15
+ function getDocusaurusLinksPerGroupLoop(navigationSections: any, $: any) {
16
+ return navigationSections
17
+ .map((i, s) => {
18
+ const section = $(s);
19
+
20
+ // Links without a group
21
+ if (section.hasClass("theme-doc-sidebar-item-link")) {
22
+ const linkHref = section.find("a[href]").first().attr("href");
23
+ return {
24
+ group: "",
25
+ pages: [linkHref],
26
+ };
27
+ }
28
+
29
+ const firstLink = section
30
+ .find(".menu__list-item-collapsible")
31
+ .first()
32
+ .find("a[href]");
33
+
34
+ const sectionTitle = firstLink.text();
35
+ const firstHref = firstLink.attr("href");
36
+ const linkSections = section.children().eq(1).children();
37
+
38
+ const pages = getLinksRecursively(linkSections, $);
39
+
40
+ return {
41
+ group: sectionTitle || alternateGroupTitle(firstLink, pages),
42
+ pages: firstHref ? [firstHref, ...pages] : pages,
43
+ };
44
+ })
45
+ .toArray();
46
+ }
@@ -7,34 +7,46 @@ export async function scrapeDocusaurusPage(
7
7
  html: string,
8
8
  origin: string,
9
9
  cliDir: string,
10
- imageBaseDir: string
10
+ imageBaseDir: string,
11
+ version: string | undefined // expects "2", or "3". Have not written support for "1" yet
11
12
  ) {
12
13
  const $ = cheerio.load(html);
13
14
 
14
- const content = $(".theme-doc-markdown").first();
15
+ const article =
16
+ version === "3" ? $(".theme-doc-markdown").first() : $("article").first();
15
17
 
16
- // Index pages with no additional text don't have the markdown class
17
- if (content.length === 0) {
18
+ if (article.length === 0) {
19
+ // Index pages with no additional text don't have the markdown class
18
20
  return {};
19
21
  }
20
22
 
21
- const titleComponent = content.find("h1");
23
+ const titleComponent = article.find("h1");
22
24
  const title = titleComponent.text().trim();
23
25
 
24
26
  // Do not include title in the content when we insert it in our metadata
25
27
  titleComponent.remove();
26
28
 
29
+ const markdownContent =
30
+ version === "3" ? article : article.find(".markdown").first();
31
+
27
32
  const origToWritePath = await downloadAllImages(
28
33
  $,
29
- content,
34
+ markdownContent,
30
35
  origin,
31
36
  imageBaseDir
32
37
  );
33
38
 
34
- const contentHtml = content.html();
39
+ const markdownHtml = markdownContent.html();
35
40
 
36
41
  const nhm = new NodeHtmlMarkdown();
37
- let markdown = nhm.translate(contentHtml);
42
+ let markdown = nhm.translate(markdownHtml);
43
+
44
+ if (markdown == null) {
45
+ console.error(
46
+ "We do not support scraping this page. Content will be empty"
47
+ );
48
+ return { title, description: null, markdown: "" };
49
+ }
38
50
 
39
51
  // Description only exists in meta tags. The code is commented out because its prone to incorrectly
40
52
  // including a description if the first line of text had markdown annotations like `.
@@ -3,14 +3,14 @@ import { NavigationEntry } from "../..//navigation.js";
3
3
  import { scrapeGettingFileNameFromUrl } from "../scrapeGettingFileNameFromUrl.js";
4
4
  import combineNavWithEmptyGroupTitles from "../combineNavWithEmptyGroupTitles.js";
5
5
  import { scrapeDocusaurusPage } from "./scrapeDocusaurusPage.js";
6
- import getLinksRecursively from "./getLinksRecursively.js";
7
- import alternateGroupTitle from "./alternateGroupTitle.js";
6
+ import { getDocusaurusLinksPerGroup } from "./links-per-group/getDocusaurusLinksPerGroup.js";
8
7
 
9
8
  export async function scrapeDocusaurusSection(
10
9
  html: string,
11
10
  origin: string,
12
11
  cliDir: string,
13
- overwrite: boolean
12
+ overwrite: boolean,
13
+ version: string
14
14
  ) {
15
15
  const $ = cheerio.load(html);
16
16
 
@@ -18,36 +18,11 @@ export async function scrapeDocusaurusSection(
18
18
  const navigationSections = $(".theme-doc-sidebar-menu").first().children();
19
19
 
20
20
  // Get all links per group
21
- const groupsConfig = navigationSections
22
- .map((i, s) => {
23
- const section = $(s);
24
-
25
- // Links without a group
26
- if (section.hasClass("theme-doc-sidebar-item-link")) {
27
- const linkHref = section.find("a[href]").first().attr("href");
28
- return {
29
- group: "",
30
- pages: [linkHref],
31
- };
32
- }
33
-
34
- const firstLink = section
35
- .find(".menu__list-item-collapsible")
36
- .first()
37
- .find("a[href]");
38
-
39
- const sectionTitle = firstLink.text();
40
- const firstHref = firstLink.attr("href");
41
- const linkSections = section.children().eq(1).children();
42
-
43
- const pages = getLinksRecursively(linkSections, $);
44
-
45
- return {
46
- group: sectionTitle || alternateGroupTitle(firstLink, pages),
47
- pages: firstHref ? [firstHref, ...pages] : pages,
48
- };
49
- })
50
- .toArray();
21
+ const groupsConfig = getDocusaurusLinksPerGroup(
22
+ navigationSections,
23
+ $,
24
+ version
25
+ );
51
26
 
52
27
  // Merge groups with empty titles together
53
28
  const reducedGroupsConfig = combineNavWithEmptyGroupTitles(groupsConfig);
@@ -67,6 +42,7 @@ export async function scrapeDocusaurusSection(
67
42
  overwrite,
68
43
  scrapeDocusaurusPage,
69
44
  false,
45
+ version,
70
46
  "/docs"
71
47
  )
72
48
  )
@@ -7,7 +7,8 @@ export async function scrapeGitBookPage(
7
7
  html: string,
8
8
  origin: string,
9
9
  cliDir: string,
10
- imageBaseDir: string
10
+ imageBaseDir: string,
11
+ _: string | undefined // version
11
12
  ) {
12
13
  const $ = cheerio.load(html);
13
14