mintlify 1.0.6 → 1.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (286) hide show
  1. package/README.md +27 -3
  2. package/bin/browser.js +24 -0
  3. package/bin/browser.js.map +1 -0
  4. package/bin/constants.js +8 -0
  5. package/bin/constants.js.map +1 -0
  6. package/bin/dev/getOpenApiContext.js +46 -0
  7. package/bin/dev/getOpenApiContext.js.map +1 -0
  8. package/bin/dev/index.js +164 -0
  9. package/bin/dev/index.js.map +1 -0
  10. package/bin/dev/injectNav.js +97 -0
  11. package/bin/dev/injectNav.js.map +1 -0
  12. package/bin/dev/slugToTitle.js +8 -0
  13. package/bin/dev/slugToTitle.js.map +1 -0
  14. package/bin/downloadImage.js +27 -0
  15. package/bin/downloadImage.js.map +1 -0
  16. package/bin/index.js +49 -106
  17. package/bin/index.js.map +1 -1
  18. package/bin/init-command/index.js +51 -0
  19. package/bin/init-command/index.js.map +1 -0
  20. package/bin/init-command/templates.js +41 -0
  21. package/bin/init-command/templates.js.map +1 -0
  22. package/bin/local-preview/categorizeFiles.js +56 -0
  23. package/bin/local-preview/categorizeFiles.js.map +1 -0
  24. package/bin/local-preview/getOpenApiContext.js +46 -0
  25. package/bin/local-preview/getOpenApiContext.js.map +1 -0
  26. package/bin/local-preview/index.js +138 -0
  27. package/bin/local-preview/index.js.map +1 -0
  28. package/bin/local-preview/injectFavicons.js +72 -0
  29. package/bin/local-preview/injectFavicons.js.map +1 -0
  30. package/bin/local-preview/listener.js +112 -0
  31. package/bin/local-preview/listener.js.map +1 -0
  32. package/bin/local-preview/metadata.js +121 -0
  33. package/bin/local-preview/metadata.js.map +1 -0
  34. package/bin/local-preview/mintConfigFile.js +43 -0
  35. package/bin/local-preview/mintConfigFile.js.map +1 -0
  36. package/bin/local-preview/openApiCheck.js +16 -0
  37. package/bin/local-preview/openApiCheck.js.map +1 -0
  38. package/bin/local-preview/slugToTitle.js +8 -0
  39. package/bin/local-preview/slugToTitle.js.map +1 -0
  40. package/bin/mint/client/.babel-plugin-macrosrc.json +5 -0
  41. package/bin/mint/client/.babelrc +4 -0
  42. package/bin/mint/client/.editorconfig +12 -0
  43. package/bin/mint/client/.eslintrc.json +7 -0
  44. package/bin/mint/client/.prettierignore +4 -0
  45. package/bin/mint/client/.prettierrc +14 -0
  46. package/bin/mint/client/.vscode/launch.json +28 -0
  47. package/bin/mint/client/README.md +46 -0
  48. package/bin/mint/client/jest.config.ts +195 -0
  49. package/bin/mint/client/next-env.d.ts +4 -0
  50. package/bin/mint/client/next.config.js +152 -0
  51. package/bin/mint/client/package.json +140 -0
  52. package/bin/mint/client/postcss.config.cjs +9 -0
  53. package/bin/mint/client/prebuild/faviconConfig.js +35 -0
  54. package/bin/mint/client/prebuild/getOpenApiContext.js +53 -0
  55. package/bin/mint/client/prebuild/index.js +117 -0
  56. package/bin/mint/client/prebuild/injectNav.js +115 -0
  57. package/bin/mint/client/prebuild/slugToTitle.js +7 -0
  58. package/bin/mint/client/rehype/withApiComponents.js +60 -0
  59. package/bin/mint/client/rehype/withCodeBlocks.js +54 -0
  60. package/bin/mint/client/rehype/withLayouts.js +113 -0
  61. package/bin/mint/client/rehype/withLinkRoles.js +13 -0
  62. package/bin/mint/client/rehype/withRawComponents.js +13 -0
  63. package/bin/mint/client/rehype/withStaticProps.js +25 -0
  64. package/bin/mint/client/rehype/withSyntaxHighlighting.js +60 -0
  65. package/bin/mint/client/remark/utils.js +369 -0
  66. package/bin/mint/client/remark/withFrames.js +55 -0
  67. package/bin/mint/client/remark/withImportsInjected.js +36 -0
  68. package/bin/mint/client/remark/withNextLinks.js +37 -0
  69. package/bin/mint/client/remark/withTableOfContents.js +71 -0
  70. package/bin/mint/client/scripts/local-to-docs.js +72 -0
  71. package/bin/mint/client/scripts/local.js +177 -0
  72. package/bin/mint/client/sentry.client.config.js +15 -0
  73. package/bin/mint/client/sentry.properties +4 -0
  74. package/bin/mint/client/sentry.server.config.js +15 -0
  75. package/bin/mint/client/src/analytics/AbstractAnalyticsImplementation.ts +50 -0
  76. package/bin/mint/client/src/analytics/AnalyticsContext.ts +5 -0
  77. package/bin/mint/client/src/analytics/AnalyticsMediator.ts +101 -0
  78. package/bin/mint/client/src/analytics/FakeAnalyticsMediator.ts +9 -0
  79. package/bin/mint/client/src/analytics/GA4Script.tsx +33 -0
  80. package/bin/mint/client/src/analytics/implementations/amplitude.ts +26 -0
  81. package/bin/mint/client/src/analytics/implementations/fathom.ts +38 -0
  82. package/bin/mint/client/src/analytics/implementations/ga4.ts +33 -0
  83. package/bin/mint/client/src/analytics/implementations/hotjar.ts +53 -0
  84. package/bin/mint/client/src/analytics/implementations/mixpanel-browser.d.ts +1 -0
  85. package/bin/mint/client/src/analytics/implementations/mixpanel.ts +52 -0
  86. package/bin/mint/client/src/analytics/implementations/posthog.ts +37 -0
  87. package/bin/mint/client/src/components/Accordion/Accordion.tsx +43 -0
  88. package/bin/mint/client/src/components/Accordion/index.ts +4 -0
  89. package/bin/mint/client/src/components/ApiExample.tsx +9 -0
  90. package/bin/mint/client/src/components/Card.tsx +51 -0
  91. package/bin/mint/client/src/components/CodeGroup.tsx +132 -0
  92. package/bin/mint/client/src/components/Editor.tsx +12 -0
  93. package/bin/mint/client/src/components/Expandable.tsx +40 -0
  94. package/bin/mint/client/src/components/Heading.tsx +84 -0
  95. package/bin/mint/client/src/components/Param.tsx +56 -0
  96. package/bin/mint/client/src/components/Request.tsx +19 -0
  97. package/bin/mint/client/src/components/ResponseField.tsx +33 -0
  98. package/bin/mint/client/src/components/TabBar.tsx +61 -0
  99. package/bin/mint/client/src/config.ts +115 -0
  100. package/bin/mint/client/src/css/bar-of-progress.css +10 -0
  101. package/bin/mint/client/src/css/base.css +29 -0
  102. package/bin/mint/client/src/css/font-awesome.css +7 -0
  103. package/bin/mint/client/src/css/fonts.css +44 -0
  104. package/bin/mint/client/src/css/main.css +11 -0
  105. package/bin/mint/client/src/css/prism.css +270 -0
  106. package/bin/mint/client/src/css/utilities.css +43 -0
  107. package/bin/mint/client/src/enums/components.ts +8 -0
  108. package/bin/mint/client/src/fonts/FiraCode-VF.woff +0 -0
  109. package/bin/mint/client/src/fonts/FiraCode-VF.woff2 +0 -0
  110. package/bin/mint/client/src/fonts/IBMPlexMono-Regular.ttf +0 -0
  111. package/bin/mint/client/src/fonts/IBMPlexMono-SemiBold.ttf +0 -0
  112. package/bin/mint/client/src/fonts/Inter-italic-latin.var.woff2 +0 -0
  113. package/bin/mint/client/src/fonts/Inter-roman-latin.var.woff2 +0 -0
  114. package/bin/mint/client/src/fonts/Pally-Variable.ttf +0 -0
  115. package/bin/mint/client/src/fonts/SourceSansPro-Regular.otf +0 -0
  116. package/bin/mint/client/src/fonts/SourceSerifPro-Regular.ttf +0 -0
  117. package/bin/mint/client/src/fonts/Synonym-Variable.ttf +0 -0
  118. package/bin/mint/client/src/fonts/Ubuntu-Mono-bold.woff2 +0 -0
  119. package/bin/mint/client/src/fonts/generated/IBMPlexMono-Regular-subset.woff2 +0 -0
  120. package/bin/mint/client/src/fonts/generated/IBMPlexMono-Regular-subset.zopfli.woff +0 -0
  121. package/bin/mint/client/src/fonts/generated/IBMPlexMono-Regular.module.css +11 -0
  122. package/bin/mint/client/src/fonts/generated/IBMPlexMono-SemiBold-subset.woff2 +0 -0
  123. package/bin/mint/client/src/fonts/generated/IBMPlexMono-SemiBold-subset.zopfli.woff +0 -0
  124. package/bin/mint/client/src/fonts/generated/IBMPlexMono-SemiBold.module.css +11 -0
  125. package/bin/mint/client/src/fonts/generated/Pally-Variable-subset.woff2 +0 -0
  126. package/bin/mint/client/src/fonts/generated/Pally-Variable-subset.zopfli.woff +0 -0
  127. package/bin/mint/client/src/fonts/generated/Pally-Variable.module.css +11 -0
  128. package/bin/mint/client/src/fonts/generated/SourceSerifPro-Regular-subset.woff2 +0 -0
  129. package/bin/mint/client/src/fonts/generated/SourceSerifPro-Regular-subset.zopfli.woff +0 -0
  130. package/bin/mint/client/src/fonts/generated/SourceSerifPro-Regular.module.css +11 -0
  131. package/bin/mint/client/src/fonts/generated/Synonym-Variable-subset.woff2 +0 -0
  132. package/bin/mint/client/src/fonts/generated/Synonym-Variable-subset.zopfli.woff +0 -0
  133. package/bin/mint/client/src/fonts/generated/Synonym-Variable.module.css +11 -0
  134. package/bin/mint/client/src/fonts/generated/TenorSans-Regular-subset.woff2 +0 -0
  135. package/bin/mint/client/src/fonts/generated/TenorSans-Regular-subset.zopfli.woff +0 -0
  136. package/bin/mint/client/src/fonts/generated/TenorSans-Regular.module.css +11 -0
  137. package/bin/mint/client/src/hooks/useActionKey.ts +20 -0
  138. package/bin/mint/client/src/hooks/useIsomorphicLayoutEffect.ts +3 -0
  139. package/bin/mint/client/src/hooks/useMedia.ts +27 -0
  140. package/bin/mint/client/src/hooks/usePrevNext.ts +34 -0
  141. package/bin/mint/client/src/hooks/useTop.ts +15 -0
  142. package/bin/mint/client/src/icons/CopyToClipboard.tsx +33 -0
  143. package/bin/mint/client/src/index.d.ts +1 -0
  144. package/bin/mint/client/src/layouts/ApiSupplemental.tsx +173 -0
  145. package/bin/mint/client/src/layouts/ContentsLayout.tsx +256 -0
  146. package/bin/mint/client/src/layouts/DocumentationLayout.tsx +44 -0
  147. package/bin/mint/client/src/layouts/OpenApiContent.tsx +301 -0
  148. package/bin/mint/client/src/layouts/SidebarLayout.tsx +412 -0
  149. package/bin/mint/client/src/layouts/UserFeedback.tsx +73 -0
  150. package/bin/mint/client/src/layouts/getGroupsInDivision.ts +25 -0
  151. package/bin/mint/client/src/layouts/isPathInGroupPages.ts +10 -0
  152. package/bin/mint/client/src/metadata.ts +58 -0
  153. package/bin/mint/client/src/openapi.ts +3 -0
  154. package/bin/mint/client/src/pages/404.tsx +73 -0
  155. package/bin/mint/client/src/pages/_app.tsx +138 -0
  156. package/bin/mint/client/src/pages/_document.tsx +57 -0
  157. package/bin/mint/client/src/pages/api/issue.ts +10 -0
  158. package/bin/mint/client/src/pages/api/name.ts +8 -0
  159. package/bin/mint/client/src/pages/api/request.ts +31 -0
  160. package/bin/mint/client/src/pages/api/suggest.ts +10 -0
  161. package/bin/mint/client/src/pages/api/syntax-highlighted-json.ts +13 -0
  162. package/bin/mint/client/src/pages/api/utils.ts +6 -0
  163. package/bin/mint/client/src/pages/index.tsx +31 -0
  164. package/bin/mint/client/src/ui/Api.tsx +359 -0
  165. package/bin/mint/client/src/ui/Footer.tsx +124 -0
  166. package/bin/mint/client/src/ui/Header.tsx +370 -0
  167. package/bin/mint/client/src/ui/Logo.tsx +55 -0
  168. package/bin/mint/client/src/ui/PageHeader.tsx +51 -0
  169. package/bin/mint/client/src/ui/Search.tsx +386 -0
  170. package/bin/mint/client/src/ui/ThemeToggle.tsx +285 -0
  171. package/bin/mint/client/src/ui/Title.tsx +22 -0
  172. package/bin/mint/client/src/ui/TopLevelLink.tsx +122 -0
  173. package/bin/mint/client/src/utils/api.ts +252 -0
  174. package/bin/mint/client/src/utils/brands.ts +217 -0
  175. package/bin/mint/client/src/utils/castArray.ts +3 -0
  176. package/bin/mint/client/src/utils/childrenArray.ts +3 -0
  177. package/bin/mint/client/src/utils/fit.ts +27 -0
  178. package/bin/mint/client/src/utils/fontAwesome.ts +577 -0
  179. package/bin/mint/client/src/utils/getAnalyticsConfig.ts +14 -0
  180. package/bin/mint/client/src/utils/getLogoHref.ts +9 -0
  181. package/bin/mint/client/src/utils/getOpenApiContext.ts +26 -0
  182. package/bin/mint/client/src/utils/importAll.ts +6 -0
  183. package/bin/mint/client/src/utils/isObject.ts +3 -0
  184. package/bin/mint/client/src/utils/kebabToTitleCase.ts +3 -0
  185. package/bin/mint/client/src/utils/loadImage.ts +8 -0
  186. package/bin/mint/client/src/utils/slugToTitle.ts +7 -0
  187. package/bin/mint/client/src/utils/wait.ts +5 -0
  188. package/bin/mint/client/tailwind.config.cjs +323 -0
  189. package/bin/mint/client/test/test.test.ts +5 -0
  190. package/bin/mint/client/tsconfig.json +36 -0
  191. package/bin/mint/client/yarn.lock +9702 -0
  192. package/bin/navigation.js +4 -0
  193. package/bin/navigation.js.map +1 -0
  194. package/bin/pageTemplate.js +30 -0
  195. package/bin/pageTemplate.js.map +1 -0
  196. package/bin/scraping/combineNavWithEmptyGroupTitles.js +20 -0
  197. package/bin/scraping/combineNavWithEmptyGroupTitles.js.map +1 -0
  198. package/bin/scraping/detectFramework.js +25 -0
  199. package/bin/scraping/detectFramework.js.map +1 -0
  200. package/bin/scraping/downloadAllImages.js +57 -0
  201. package/bin/scraping/downloadAllImages.js.map +1 -0
  202. package/bin/scraping/getSitemapLinks.js +18 -0
  203. package/bin/scraping/getSitemapLinks.js.map +1 -0
  204. package/bin/scraping/replaceImagePaths.js +17 -0
  205. package/bin/scraping/replaceImagePaths.js.map +1 -0
  206. package/bin/scraping/scrapeFileGettingFileNameFromUrl.js +43 -0
  207. package/bin/scraping/scrapeFileGettingFileNameFromUrl.js.map +1 -0
  208. package/bin/scraping/scrapeGettingFileNameFromUrl.js +14 -0
  209. package/bin/scraping/scrapeGettingFileNameFromUrl.js.map +1 -0
  210. package/bin/scraping/scrapePage.js +9 -0
  211. package/bin/scraping/scrapePage.js.map +1 -0
  212. package/bin/scraping/scrapePageCommands.js +48 -0
  213. package/bin/scraping/scrapePageCommands.js.map +1 -0
  214. package/bin/scraping/scrapeSection.js +9 -0
  215. package/bin/scraping/scrapeSection.js.map +1 -0
  216. package/bin/scraping/scrapeSectionCommands.js +90 -0
  217. package/bin/scraping/scrapeSectionCommands.js.map +1 -0
  218. package/bin/scraping/site-scrapers/getLinksRecursively.js +33 -0
  219. package/bin/scraping/site-scrapers/getLinksRecursively.js.map +1 -0
  220. package/bin/scraping/site-scrapers/scrapeDocusaurusPage.js +43 -0
  221. package/bin/scraping/site-scrapers/scrapeDocusaurusPage.js.map +1 -0
  222. package/bin/scraping/site-scrapers/scrapeDocusaurusSection.js +52 -0
  223. package/bin/scraping/site-scrapers/scrapeDocusaurusSection.js.map +1 -0
  224. package/bin/{scrapeGitBookPage.js → scraping/site-scrapers/scrapeGitBookPage.js} +10 -5
  225. package/bin/scraping/site-scrapers/scrapeGitBookPage.js.map +1 -0
  226. package/bin/scraping/site-scrapers/scrapeGitBookSection.js +51 -0
  227. package/bin/scraping/site-scrapers/scrapeGitBookSection.js.map +1 -0
  228. package/bin/scraping/site-scrapers/scrapeReadMePage.js +35 -0
  229. package/bin/scraping/site-scrapers/scrapeReadMePage.js.map +1 -0
  230. package/bin/scraping/site-scrapers/scrapeReadMeSection.js +38 -0
  231. package/bin/scraping/site-scrapers/scrapeReadMeSection.js.map +1 -0
  232. package/bin/util.js +47 -8
  233. package/bin/util.js.map +1 -1
  234. package/bin/validation/isValidLink.js +11 -0
  235. package/bin/validation/isValidLink.js.map +1 -0
  236. package/bin/validation/stopIfInvalidLink.js +9 -0
  237. package/bin/validation/stopIfInvalidLink.js.map +1 -0
  238. package/package.json +21 -4
  239. package/src/browser.ts +24 -0
  240. package/src/constants.ts +10 -0
  241. package/src/downloadImage.ts +35 -0
  242. package/src/index.ts +111 -122
  243. package/src/init-command/index.ts +59 -0
  244. package/src/{templates.ts → init-command/templates.ts} +0 -0
  245. package/src/local-preview/categorizeFiles.ts +74 -0
  246. package/src/local-preview/getOpenApiContext.ts +61 -0
  247. package/src/local-preview/index.ts +164 -0
  248. package/src/local-preview/injectFavicons.ts +76 -0
  249. package/src/local-preview/listener.ts +116 -0
  250. package/src/local-preview/metadata.ts +154 -0
  251. package/src/local-preview/mintConfigFile.ts +48 -0
  252. package/src/local-preview/openApiCheck.ts +19 -0
  253. package/src/local-preview/slugToTitle.ts +7 -0
  254. package/src/navigation.ts +12 -0
  255. package/src/pageTemplate.ts +32 -0
  256. package/src/scraping/combineNavWithEmptyGroupTitles.ts +21 -0
  257. package/src/scraping/detectFramework.ts +31 -0
  258. package/src/scraping/downloadAllImages.ts +79 -0
  259. package/src/scraping/getSitemapLinks.ts +18 -0
  260. package/src/scraping/replaceImagePaths.ts +21 -0
  261. package/src/scraping/scrapeFileGettingFileNameFromUrl.ts +81 -0
  262. package/src/scraping/scrapeGettingFileNameFromUrl.ts +50 -0
  263. package/src/scraping/scrapePage.ts +24 -0
  264. package/src/scraping/scrapePageCommands.ts +52 -0
  265. package/src/scraping/scrapeSection.ts +16 -0
  266. package/src/scraping/scrapeSectionCommands.ts +110 -0
  267. package/src/scraping/site-scrapers/getLinksRecursively.ts +40 -0
  268. package/src/scraping/site-scrapers/scrapeDocusaurusPage.ts +67 -0
  269. package/src/scraping/site-scrapers/scrapeDocusaurusSection.ts +80 -0
  270. package/src/{scrapeGitBookPage.ts → scraping/site-scrapers/scrapeGitBookPage.ts} +25 -5
  271. package/src/scraping/site-scrapers/scrapeGitBookSection.ts +77 -0
  272. package/src/scraping/site-scrapers/scrapeReadMePage.ts +57 -0
  273. package/src/scraping/site-scrapers/scrapeReadMeSection.ts +60 -0
  274. package/src/util.ts +53 -7
  275. package/src/validation/isValidLink.ts +9 -0
  276. package/src/validation/stopIfInvalidLink.ts +9 -0
  277. package/tsconfig.json +1 -1
  278. package/bin/scrapeGitBook.js +0 -28
  279. package/bin/scrapeGitBook.js.map +0 -1
  280. package/bin/scrapeGitBookPage.js.map +0 -1
  281. package/bin/scrapeReadMe.js +0 -60
  282. package/bin/scrapeReadMe.js.map +0 -1
  283. package/bin/scrapeReadMePage.js +0 -28
  284. package/bin/scrapeReadMePage.js.map +0 -1
  285. package/src/scrapeReadMe.ts +0 -79
  286. package/src/scrapeReadMePage.ts +0 -37
@@ -0,0 +1,4 @@
1
+ export function isNavigation(navEntry) {
2
+ return typeof navEntry !== "string";
3
+ }
4
+ //# sourceMappingURL=navigation.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"navigation.js","sourceRoot":"","sources":["../src/navigation.ts"],"names":[],"mappings":"AAOA,MAAM,UAAU,YAAY,CAC1B,QAAyB;IAEzB,OAAO,OAAO,QAAQ,KAAK,QAAQ,CAAC;AACtC,CAAC"}
@@ -0,0 +1,30 @@
1
+ import inquirer from "inquirer";
2
+ import { createPage } from "./util.js";
3
+ const generatePageTemplate = () => {
4
+ inquirer
5
+ .prompt([
6
+ {
7
+ type: "input",
8
+ name: "title",
9
+ message: "What is the title of the new page?",
10
+ },
11
+ {
12
+ type: "input",
13
+ name: "description",
14
+ message: "What is the description?",
15
+ default: "",
16
+ },
17
+ ])
18
+ .then((answers) => {
19
+ const { title, description } = answers;
20
+ createPage(title, description);
21
+ console.log("🌱 Created initial files for Mintlify docs");
22
+ process.exit(0);
23
+ })
24
+ .catch((error) => {
25
+ console.error(error);
26
+ process.exit(1);
27
+ });
28
+ };
29
+ export default generatePageTemplate;
30
+ //# sourceMappingURL=pageTemplate.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pageTemplate.js","sourceRoot":"","sources":["../src/pageTemplate.ts"],"names":[],"mappings":"AAAA,OAAO,QAAQ,MAAM,UAAU,CAAC;AAChC,OAAO,EAAE,UAAU,EAAE,MAAM,WAAW,CAAC;AAEvC,MAAM,oBAAoB,GAAG,GAAG,EAAE;IAChC,QAAQ;SACL,MAAM,CAAC;QACN;YACE,IAAI,EAAE,OAAO;YACb,IAAI,EAAE,OAAO;YACb,OAAO,EAAE,oCAAoC;SAC9C;QACD;YACE,IAAI,EAAE,OAAO;YACb,IAAI,EAAE,aAAa;YACnB,OAAO,EAAE,0BAA0B;YACnC,OAAO,EAAE,EAAE;SACZ;KACF,CAAC;SACD,IAAI,CAAC,CAAC,OAAO,EAAE,EAAE;QAChB,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC;QAEvC,UAAU,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC;QAC/B,OAAO,CAAC,GAAG,CAAC,4CAA4C,CAAC,CAAC;QAC1D,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC,CAAC;SACD,KAAK,CAAC,CAAC,KAAK,EAAE,EAAE;QACf,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;QACrB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC,CAAC,CAAC;AACP,CAAC,CAAC;AAEF,eAAe,oBAAoB,CAAC"}
@@ -0,0 +1,20 @@
1
+ export default function combineNavWithEmptyGroupTitles(navArray) {
2
+ let newNavArray = [];
3
+ navArray.forEach((nav) => {
4
+ // The first run through the loop will always have -1 as the index.
5
+ // JavaScript returns undefined when we look for an index outside the size of the array.
6
+ const prev = newNavArray[newNavArray.length - 1];
7
+ if (prev == null) {
8
+ newNavArray.push(nav);
9
+ }
10
+ else if (!nav.group && !prev.group) {
11
+ // Joins multiple groups without a title together IF they occur side by side
12
+ prev.pages = prev.pages.concat(nav.pages);
13
+ }
14
+ else {
15
+ newNavArray.push(nav);
16
+ }
17
+ });
18
+ return newNavArray;
19
+ }
20
+ //# sourceMappingURL=combineNavWithEmptyGroupTitles.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"combineNavWithEmptyGroupTitles.js","sourceRoot":"","sources":["../../src/scraping/combineNavWithEmptyGroupTitles.ts"],"names":[],"mappings":"AAEA,MAAM,CAAC,OAAO,UAAU,8BAA8B,CAAC,QAAsB;IAC3E,IAAI,WAAW,GAAG,EAAE,CAAC;IAErB,QAAQ,CAAC,OAAO,CAAC,CAAC,GAAe,EAAE,EAAE;QACnC,mEAAmE;QACnE,wFAAwF;QACxF,MAAM,IAAI,GAAG,WAAW,CAAC,WAAW,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC;QACjD,IAAI,IAAI,IAAI,IAAI,EAAE;YAChB,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;aAAM,IAAI,CAAC,GAAG,CAAC,KAAK,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE;YACpC,4EAA4E;YAC5E,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;SAC3C;aAAM;YACL,WAAW,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;SACvB;IACH,CAAC,CAAC,CAAC;IAEH,OAAO,WAAW,CAAC;AACrB,CAAC"}
@@ -0,0 +1,25 @@
1
+ import cheerio from "cheerio";
2
+ export var Frameworks;
3
+ (function (Frameworks) {
4
+ Frameworks["DOCUSAURUS"] = "DOCUSAURUS";
5
+ Frameworks["GITBOOK"] = "GITBOOK";
6
+ Frameworks["README"] = "README";
7
+ })(Frameworks = Frameworks || (Frameworks = {}));
8
+ export function detectFramework(html) {
9
+ const $ = cheerio.load(html);
10
+ const docusaurusMeta = $('meta[name="generator"]');
11
+ if (docusaurusMeta.length > 0 &&
12
+ docusaurusMeta.attr("content").includes("Docusaurus")) {
13
+ return Frameworks.DOCUSAURUS;
14
+ }
15
+ const isGitBook = $(".gitbook-root").length > 0;
16
+ if (isGitBook) {
17
+ return Frameworks.GITBOOK;
18
+ }
19
+ const isReadMe = $('meta[name="readme-deploy"]').length > 0;
20
+ if (isReadMe) {
21
+ return Frameworks.README;
22
+ }
23
+ return undefined;
24
+ }
25
+ //# sourceMappingURL=detectFramework.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"detectFramework.js","sourceRoot":"","sources":["../../src/scraping/detectFramework.ts"],"names":[],"mappings":"AAAA,OAAO,OAAO,MAAM,SAAS,CAAC;AAE9B,MAAM,CAAN,IAAY,UAIX;AAJD,WAAY,UAAU;IACpB,uCAAyB,CAAA;IACzB,iCAAmB,CAAA;IACnB,+BAAiB,CAAA;AACnB,CAAC,EAJW,UAAU,GAAV,UAAU,KAAV,UAAU,QAIrB;AAED,MAAM,UAAU,eAAe,CAAC,IAAI;IAClC,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAC7B,MAAM,cAAc,GAAG,CAAC,CAAC,wBAAwB,CAAC,CAAC;IAEnD,IACE,cAAc,CAAC,MAAM,GAAG,CAAC;QACzB,cAAc,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,QAAQ,CAAC,YAAY,CAAC,EACrD;QACA,OAAO,UAAU,CAAC,UAAU,CAAC;KAC9B;IAED,MAAM,SAAS,GAAG,CAAC,CAAC,eAAe,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;IAChD,IAAI,SAAS,EAAE;QACb,OAAO,UAAU,CAAC,OAAO,CAAC;KAC3B;IAED,MAAM,QAAQ,GAAG,CAAC,CAAC,4BAA4B,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;IAC5D,IAAI,QAAQ,EAAE;QACZ,OAAO,UAAU,CAAC,MAAM,CAAC;KAC1B;IAED,OAAO,SAAS,CAAC;AACnB,CAAC"}
@@ -0,0 +1,57 @@
1
+ import path from "path";
2
+ import downloadImage from "../downloadImage.js";
3
+ // To Do: Use CheerioElement instead of any when we bump the cheerio version
4
+ export default async function downloadAllImages($, content, origin, baseDir, modifyFileName) {
5
+ if (!baseDir) {
6
+ console.debug("Skipping image downloading");
7
+ return;
8
+ }
9
+ // We remove duplicates because some frameworks duplicate img tags
10
+ // to show the image larger when clicked on.
11
+ const imageSrcs = [
12
+ ...new Set(content
13
+ .find("img[src]")
14
+ .map((i, image) => $(image).attr("src"))
15
+ .toArray()),
16
+ ];
17
+ // Wait to all images to download before continuing
18
+ const origToNewArray = await Promise.all(imageSrcs.map(async (origImageSrc) => {
19
+ // Add origin if the image tags are using relative sources
20
+ const imageHref = origImageSrc.startsWith("http")
21
+ ? origImageSrc
22
+ : new URL(origImageSrc, origin).href;
23
+ let fileName = removeMetadataFromExtension(path.basename(imageHref));
24
+ if (modifyFileName) {
25
+ fileName = modifyFileName(fileName);
26
+ }
27
+ if (!fileName) {
28
+ console.error("Invalid image path " + imageHref);
29
+ return;
30
+ }
31
+ const writePath = path.join(baseDir, fileName);
32
+ await downloadImage(imageHref, writePath)
33
+ .then(() => {
34
+ console.log("🖼️ - " + writePath);
35
+ })
36
+ .catch((e) => {
37
+ if (e.code === "EEXIST") {
38
+ console.log(`❌ Skipping existing image ${writePath}`);
39
+ }
40
+ else {
41
+ console.error(e);
42
+ }
43
+ });
44
+ return { [origImageSrc]: writePath };
45
+ }));
46
+ return origToNewArray.reduce((result, current) => Object.assign(result, current), {});
47
+ }
48
+ function removeMetadataFromExtension(src) {
49
+ // Part of the URL standard
50
+ const metadataSymbols = ["?", "#"];
51
+ metadataSymbols.forEach((dividerSymbol) => {
52
+ // Some frameworks add metadata after the file extension, we need to remove that.
53
+ src = src.split(dividerSymbol)[0];
54
+ });
55
+ return src;
56
+ }
57
+ //# sourceMappingURL=downloadAllImages.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"downloadAllImages.js","sourceRoot":"","sources":["../../src/scraping/downloadAllImages.ts"],"names":[],"mappings":"AAAA,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,aAAa,MAAM,qBAAqB,CAAC;AAEhD,4EAA4E;AAC5E,MAAM,CAAC,OAAO,CAAC,KAAK,UAAU,iBAAiB,CAC7C,CAAM,EACN,OAAY,EACZ,MAAc,EACd,OAAe,EACf,cAAoB;IAEpB,IAAI,CAAC,OAAO,EAAE;QACZ,OAAO,CAAC,KAAK,CAAC,4BAA4B,CAAC,CAAC;QAC5C,OAAO;KACR;IAED,kEAAkE;IAClE,4CAA4C;IAC5C,MAAM,SAAS,GAAG;QAChB,GAAG,IAAI,GAAG,CACR,OAAO;aACJ,IAAI,CAAC,UAAU,CAAC;aAChB,GAAG,CAAC,CAAC,CAAC,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;aACvC,OAAO,EAAE,CACb;KACF,CAAC;IAEF,mDAAmD;IACnD,MAAM,cAAc,GAAG,MAAM,OAAO,CAAC,GAAG,CACtC,SAAS,CAAC,GAAG,CAAC,KAAK,EAAE,YAAoB,EAAE,EAAE;QAC3C,0DAA0D;QAC1D,MAAM,SAAS,GAAG,YAAY,CAAC,UAAU,CAAC,MAAM,CAAC;YAC/C,CAAC,CAAC,YAAY;YACd,CAAC,CAAC,IAAI,GAAG,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,IAAI,CAAC;QAEvC,IAAI,QAAQ,GAAG,2BAA2B,CAAC,IAAI,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,CAAC;QACrE,IAAI,cAAc,EAAE;YAClB,QAAQ,GAAG,cAAc,CAAC,QAAQ,CAAC,CAAC;SACrC;QAED,IAAI,CAAC,QAAQ,EAAE;YACb,OAAO,CAAC,KAAK,CAAC,qBAAqB,GAAG,SAAS,CAAC,CAAC;YACjD,OAAO;SACR;QAED,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC;QAE/C,MAAM,aAAa,CAAC,SAAS,EAAE,SAAS,CAAC;aACtC,IAAI,CAAC,GAAG,EAAE;YACT,OAAO,CAAC,GAAG,CAAC,QAAQ,GAAG,SAAS,CAAC,CAAC;QACpC,CAAC,CAAC;aACD,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE;YACX,IAAI,CAAC,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACvB,OAAO,CAAC,GAAG,CAAC,6BAA6B,SAAS,EAAE,CAAC,CAAC;aACvD;iBAAM;gBACL,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;aAClB;QACH,CAAC,CAAC,CAAC;QAEL,OAAO,EAAE,CAAC,YAAY,CAAC,EAAE,SAAS,EAAE,CAAC;IACvC,CAAC,CAAC,CACH,CAAC;IAEF,OAAO,cAAc,CAAC,MAAM,CAC1B,CAAC,MAAM,EAAE,OAAO,EAAE,EAAE,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EACnD,EAAE,CACH,CAAC;AACJ,CAAC;AAED,SAAS,2BAA2B,CAAC,GAAW;IAC9C,2BAA2B;IAC3B,MAAM,eAAe,GAAG,CAAC,GAAG,EAAE,GAAG,CAAC,CAAC;IAEnC,eAAe,CAAC,OAAO,CAAC,CAAC,aAAa,EAAE,EAAE;QACxC,iFAAiF;QACjF,GAAG,GAAG,GAAG,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,CAAC;IACpC,CAAC,CAAC,CAAC;IACH,OAAO,GAAG,CAAC;AACb,CAAC"}
@@ -0,0 +1,18 @@
1
+ import axios from "axios";
2
+ // Not in use.
3
+ // Gets all links in a sitemap.
4
+ export const getSitemapLinks = async (url) => {
5
+ const hostname = url.hostname.replace(".", "\\.");
6
+ const regex = new RegExp(`https?:\/\/${hostname}.+?(?=<\/loc>)`, "gmi");
7
+ try {
8
+ const indexData = (await axios.default.get(url.href)).data;
9
+ const array = indexData.match(regex);
10
+ return array || [];
11
+ }
12
+ catch (err) {
13
+ console.error(err);
14
+ console.log("Skipping sitemap links because we encountered an error.");
15
+ return [];
16
+ }
17
+ };
18
+ //# sourceMappingURL=getSitemapLinks.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"getSitemapLinks.js","sourceRoot":"","sources":["../../src/scraping/getSitemapLinks.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,OAAO,CAAC;AAE1B,cAAc;AACd,+BAA+B;AAC/B,MAAM,CAAC,MAAM,eAAe,GAAG,KAAK,EAAE,GAAQ,EAAE,EAAE;IAChD,MAAM,QAAQ,GAAG,GAAG,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;IAClD,MAAM,KAAK,GAAG,IAAI,MAAM,CAAC,cAAc,QAAQ,gBAAgB,EAAE,KAAK,CAAC,CAAC;IAExE,IAAI;QACF,MAAM,SAAS,GAAG,CAAC,MAAM,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,IAAc,CAAC;QACrE,MAAM,KAAK,GAAG,SAAS,CAAC,KAAK,CAAC,KAAK,CAAoB,CAAC;QACxD,OAAO,KAAK,IAAI,EAAE,CAAC;KACpB;IAAC,OAAO,GAAG,EAAE;QACZ,OAAO,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;QACnB,OAAO,CAAC,GAAG,CAAC,yDAAyD,CAAC,CAAC;QACvE,OAAO,EAAE,CAAC;KACX;AACH,CAAC,CAAC"}
@@ -0,0 +1,17 @@
1
+ export default function replaceImagePaths(origToWritePath, cliDir, markdown) {
2
+ if (origToWritePath == null) {
3
+ return markdown;
4
+ }
5
+ // Change image paths to use the downloaded locations
6
+ for (const [origHref, writePath] of Object.entries(origToWritePath)) {
7
+ // Use relative paths within the folder we are in
8
+ if (writePath.startsWith(cliDir)) {
9
+ markdown = markdown.replaceAll(origHref, writePath.slice(cliDir.length));
10
+ }
11
+ else {
12
+ markdown = markdown.replaceAll(origHref, writePath);
13
+ }
14
+ }
15
+ return markdown;
16
+ }
17
+ //# sourceMappingURL=replaceImagePaths.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"replaceImagePaths.js","sourceRoot":"","sources":["../../src/scraping/replaceImagePaths.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,OAAO,UAAU,iBAAiB,CACvC,eAAuB,EACvB,MAAc,EACd,QAAgB;IAEhB,IAAI,eAAe,IAAI,IAAI,EAAE;QAC3B,OAAO,QAAQ,CAAC;KACjB;IAED,qDAAqD;IACrD,KAAK,MAAM,CAAC,QAAQ,EAAE,SAAS,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,eAAe,CAAC,EAAE;QACnE,iDAAiD;QACjD,IAAI,SAAS,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE;YAChC,QAAQ,GAAG,QAAQ,CAAC,UAAU,CAAC,QAAQ,EAAE,SAAS,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC;SAC1E;aAAM;YACL,QAAQ,GAAG,QAAQ,CAAC,UAAU,CAAC,QAAQ,EAAE,SAAS,CAAC,CAAC;SACrD;KACF;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC"}
@@ -0,0 +1,43 @@
1
+ import path from "path";
2
+ import axios from "axios";
3
+ import { getHtmlWithPuppeteer } from "../browser.js";
4
+ import { createPage } from "../util.js";
5
+ export async function scrapeFileGettingFileNameFromUrl(pathname, cliDir, origin, overwrite, scrapePageFunc, puppeteer = false, baseToRemove) {
6
+ // Skip scraping external links
7
+ if (pathname.startsWith("https://") || pathname.startsWith("http://")) {
8
+ return pathname;
9
+ }
10
+ // Removes file name from the end
11
+ const splitSubpath = pathname.split("/");
12
+ let folders = splitSubpath.slice(0, splitSubpath.length - 1).join("/");
13
+ // Remove base dir if passed in
14
+ if (baseToRemove && folders.startsWith(baseToRemove)) {
15
+ folders = folders.replace(baseToRemove, "");
16
+ }
17
+ // TO DO: Improve this by putting each page's images in a separate
18
+ // folder named after the title of the page.
19
+ const imageBaseDir = path.join(cliDir, "images", folders);
20
+ // Scrape each page separately
21
+ const href = new URL(pathname, origin).href;
22
+ let html;
23
+ if (puppeteer) {
24
+ html = await getHtmlWithPuppeteer(href);
25
+ }
26
+ else {
27
+ const res = await axios.default.get(href);
28
+ html = res.data;
29
+ }
30
+ const { title, description, markdown } = await scrapePageFunc(html, origin, cliDir, imageBaseDir);
31
+ // Check if page didn't have content
32
+ if (!title && !markdown) {
33
+ return undefined;
34
+ }
35
+ const newFileLocation = folders ? path.join(cliDir, folders) : cliDir;
36
+ // Default to introduction.mdx if we encountered index.html
37
+ const fileName = splitSubpath[splitSubpath.length - 1] || "introduction";
38
+ // Will create subfolders as needed
39
+ createPage(title, description, markdown, overwrite, newFileLocation, fileName);
40
+ // Removes first slash if we are in a folder, Mintlify doesn't need it
41
+ return folders ? path.join(folders, fileName).substring(1) : fileName;
42
+ }
43
+ //# sourceMappingURL=scrapeFileGettingFileNameFromUrl.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scrapeFileGettingFileNameFromUrl.js","sourceRoot":"","sources":["../../src/scraping/scrapeFileGettingFileNameFromUrl.ts"],"names":[],"mappings":"AAAA,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,EAAE,oBAAoB,EAAE,MAAM,eAAe,CAAC;AACrD,OAAO,EAAE,UAAU,EAAE,MAAM,YAAY,CAAC;AAExC,MAAM,CAAC,KAAK,UAAU,gCAAgC,CACpD,QAAgB,EAChB,MAAc,EACd,MAAc,EACd,SAAkB,EAClB,cASE,EACF,SAAS,GAAG,KAAK,EACjB,YAAqB;IAErB,+BAA+B;IAC/B,IAAI,QAAQ,CAAC,UAAU,CAAC,UAAU,CAAC,IAAI,QAAQ,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;QACrE,OAAO,QAAQ,CAAC;KACjB;IAED,iCAAiC;IACjC,MAAM,YAAY,GAAG,QAAQ,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC;IACzC,IAAI,OAAO,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;IAEvE,+BAA+B;IAC/B,IAAI,YAAY,IAAI,OAAO,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE;QACpD,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,YAAY,EAAE,EAAE,CAAC,CAAC;KAC7C;IAED,kEAAkE;IAClE,4CAA4C;IAC5C,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC;IAE1D,8BAA8B;IAC9B,MAAM,IAAI,GAAG,IAAI,GAAG,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC,IAAI,CAAC;IAC5C,IAAI,IAAY,CAAC;IACjB,IAAI,SAAS,EAAE;QACb,IAAI,GAAG,MAAM,oBAAoB,CAAC,IAAI,CAAC,CAAC;KACzC;SAAM;QACL,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QAC1C,IAAI,GAAG,GAAG,CAAC,IAAI,CAAC;KACjB;IAED,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,QAAQ,EAAE,GAAG,MAAM,cAAc,CAC3D,IAAI,EACJ,MAAM,EACN,MAAM,EACN,YAAY,CACb,CAAC;IAEF,oCAAoC;IACpC,IAAI,CAAC,KAAK,IAAI,CAAC,QAAQ,EAAE;QACvB,OAAO,SAAS,CAAC;KAClB;IAED,MAAM,eAAe,GAAG,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;IAEtE,2DAA2D;IAC3D,MAAM,QAAQ,GAAG,YAAY,CAAC,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC,IAAI,cAAc,CAAC;IAEzE,mCAAmC;IACnC,UAAU,CACR,KAAK,EACL,WAAW,EACX,QAAQ,EACR,SAAS,EACT,eAAe,EACf,QAAQ,CACT,CAAC;IAEF,sEAAsE;IACtE,OAAO,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC;AACxE,CAAC"}
@@ -0,0 +1,14 @@
1
+ import { isNavigation } from "../navigation.js";
2
+ import { scrapeFileGettingFileNameFromUrl } from "./scrapeFileGettingFileNameFromUrl.js";
3
+ export async function scrapeGettingFileNameFromUrl(navEntry, cliDir, origin, overwrite, scrapePageFunc, puppeteer = false, baseToRemove) {
4
+ if (isNavigation(navEntry)) {
5
+ const newPages = [];
6
+ for (const nestedNavEntry of navEntry.pages) {
7
+ newPages.push(await scrapeGettingFileNameFromUrl(nestedNavEntry, cliDir, origin, overwrite, scrapePageFunc, puppeteer, baseToRemove));
8
+ }
9
+ navEntry.pages = newPages;
10
+ return navEntry;
11
+ }
12
+ return await scrapeFileGettingFileNameFromUrl(navEntry, cliDir, origin, overwrite, scrapePageFunc, puppeteer, baseToRemove);
13
+ }
14
+ //# sourceMappingURL=scrapeGettingFileNameFromUrl.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scrapeGettingFileNameFromUrl.js","sourceRoot":"","sources":["../../src/scraping/scrapeGettingFileNameFromUrl.ts"],"names":[],"mappings":"AAAA,OAAO,EAAmB,YAAY,EAAE,MAAM,kBAAkB,CAAC;AACjE,OAAO,EAAE,gCAAgC,EAAE,MAAM,uCAAuC,CAAC;AAEzF,MAAM,CAAC,KAAK,UAAU,4BAA4B,CAChD,QAAyB,EACzB,MAAc,EACd,MAAc,EACd,SAAkB,EAClB,cASE,EACF,SAAS,GAAG,KAAK,EACjB,YAAqB;IAErB,IAAI,YAAY,CAAC,QAAQ,CAAC,EAAE;QAC1B,MAAM,QAAQ,GAAG,EAAE,CAAC;QACpB,KAAK,MAAM,cAAc,IAAI,QAAQ,CAAC,KAAK,EAAE;YAC3C,QAAQ,CAAC,IAAI,CACX,MAAM,4BAA4B,CAChC,cAAc,EACd,MAAM,EACN,MAAM,EACN,SAAS,EACT,cAAc,EACd,SAAS,EACT,YAAY,CACb,CACF,CAAC;SACH;QACD,QAAQ,CAAC,KAAK,GAAG,QAAQ,CAAC;QAC1B,OAAO,QAAQ,CAAC;KACjB;IAED,OAAO,MAAM,gCAAgC,CAC3C,QAAQ,EACR,MAAM,EACN,MAAM,EACN,SAAS,EACT,cAAc,EACd,SAAS,EACT,YAAY,CACb,CAAC;AACJ,CAAC"}
@@ -0,0 +1,9 @@
1
+ import path from "path";
2
+ import { createPage, getOrigin } from "../util.js";
3
+ export async function scrapePage(scrapeFunc, href, html, overwrite) {
4
+ const origin = getOrigin(href);
5
+ const imageBaseDir = path.join(process.cwd(), "images");
6
+ const { title, description, markdown } = await scrapeFunc(html, origin, process.cwd(), imageBaseDir);
7
+ createPage(title, description, markdown, overwrite, process.cwd());
8
+ }
9
+ //# sourceMappingURL=scrapePage.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scrapePage.js","sourceRoot":"","sources":["../../src/scraping/scrapePage.ts"],"names":[],"mappings":"AAAA,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,MAAM,YAAY,CAAC;AAEnD,MAAM,CAAC,KAAK,UAAU,UAAU,CAC9B,UAKiB,EACjB,IAAY,EACZ,IAAY,EACZ,SAAkB;IAElB,MAAM,MAAM,GAAG,SAAS,CAAC,IAAI,CAAC,CAAC;IAC/B,MAAM,YAAY,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,QAAQ,CAAC,CAAC;IACxD,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,QAAQ,EAAE,GAAG,MAAM,UAAU,CACvD,IAAI,EACJ,MAAM,EACN,OAAO,CAAC,GAAG,EAAE,EACb,YAAY,CACb,CAAC;IACF,UAAU,CAAC,KAAK,EAAE,WAAW,EAAE,QAAQ,EAAE,SAAS,EAAE,OAAO,CAAC,GAAG,EAAE,CAAC,CAAC;AACrE,CAAC"}
@@ -0,0 +1,48 @@
1
+ import axios from "axios";
2
+ import { scrapePage } from "./scrapePage.js";
3
+ import { scrapeDocusaurusPage } from "./site-scrapers/scrapeDocusaurusPage.js";
4
+ import { scrapeGitBookPage } from "./site-scrapers/scrapeGitBookPage.js";
5
+ import { scrapeReadMePage } from "./site-scrapers/scrapeReadMePage.js";
6
+ import { detectFramework, Frameworks } from "./detectFramework.js";
7
+ import { getHrefFromArgs } from "../util.js";
8
+ import { getHtmlWithPuppeteer } from "../browser.js";
9
+ function validateFramework(framework) {
10
+ if (!framework) {
11
+ console.log("Could not detect the framework automatically. Please use one of:");
12
+ console.log("scrape-page-docusaurus");
13
+ console.log("scrape-page-gitbook");
14
+ console.log("scrape-page-readme");
15
+ return process.exit(1);
16
+ }
17
+ }
18
+ export async function scrapePageWrapper(argv, scrapeFunc, puppeteer = false) {
19
+ const href = getHrefFromArgs(argv);
20
+ let html;
21
+ if (puppeteer) {
22
+ html = await getHtmlWithPuppeteer(href);
23
+ }
24
+ else {
25
+ const res = await axios.default.get(href);
26
+ html = res.data;
27
+ }
28
+ await scrapePage(scrapeFunc, href, html, argv.overwrite);
29
+ process.exit(0);
30
+ }
31
+ export async function scrapePageAutomatically(argv) {
32
+ const href = getHrefFromArgs(argv);
33
+ const res = await axios.default.get(href);
34
+ const html = res.data;
35
+ const framework = detectFramework(html);
36
+ validateFramework(framework);
37
+ console.log("Detected framework: " + framework);
38
+ if (framework === Frameworks.DOCUSAURUS) {
39
+ await scrapePageWrapper(argv, scrapeDocusaurusPage);
40
+ }
41
+ else if (framework === Frameworks.GITBOOK) {
42
+ await scrapePageWrapper(argv, scrapeGitBookPage, true);
43
+ }
44
+ else if (framework === Frameworks.README) {
45
+ await scrapePageWrapper(argv, scrapeReadMePage);
46
+ }
47
+ }
48
+ //# sourceMappingURL=scrapePageCommands.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scrapePageCommands.js","sourceRoot":"","sources":["../../src/scraping/scrapePageCommands.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC7C,OAAO,EAAE,oBAAoB,EAAE,MAAM,yCAAyC,CAAC;AAC/E,OAAO,EAAE,iBAAiB,EAAE,MAAM,sCAAsC,CAAC;AACzE,OAAO,EAAE,gBAAgB,EAAE,MAAM,qCAAqC,CAAC;AACvE,OAAO,EAAE,eAAe,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;AACnE,OAAO,EAAE,eAAe,EAAE,MAAM,YAAY,CAAC;AAC7C,OAAO,EAAE,oBAAoB,EAAE,MAAM,eAAe,CAAC;AAErD,SAAS,iBAAiB,CAAC,SAAS;IAClC,IAAI,CAAC,SAAS,EAAE;QACd,OAAO,CAAC,GAAG,CACT,kEAAkE,CACnE,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,CAAC;QACtC,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAC;QACnC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,CAAC;QAClC,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;KACxB;AACH,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAC,IAAI,EAAE,UAAU,EAAE,SAAS,GAAG,KAAK;IACzE,MAAM,IAAI,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;IACnC,IAAI,IAAY,CAAC;IACjB,IAAI,SAAS,EAAE;QACb,IAAI,GAAG,MAAM,oBAAoB,CAAC,IAAI,CAAC,CAAC;KACzC;SAAM;QACL,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;QAC1C,IAAI,GAAG,GAAG,CAAC,IAAI,CAAC;KACjB;IACD,MAAM,UAAU,CAAC,UAAU,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;IACzD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClB,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,uBAAuB,CAAC,IAAS;IACrD,MAAM,IAAI,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;IACnC,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;IAC1C,MAAM,IAAI,GAAG,GAAG,CAAC,IAAI,CAAC;IACtB,MAAM,SAAS,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;IAExC,iBAAiB,CAAC,SAAS,CAAC,CAAC;IAE7B,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,SAAS,CAAC,CAAC;IAEhD,IAAI,SAAS,KAAK,UAAU,CAAC,UAAU,EAAE;QACvC,MAAM,iBAAiB,CAAC,IAAI,EAAE,oBAAoB,CAAC,CAAC;KACrD;SAAM,IAAI,SAAS,KAAK,UAAU,CAAC,OAAO,EAAE;QAC3C,MAAM,iBAAiB,CAAC,IAAI,EAAE,iBAAiB,EAAE,IAAI,CAAC,CAAC;KACxD;SAAM,IAAI,SAAS,KAAK,UAAU,CAAC,MAAM,EAAE;QAC1C,MAAM,iBAAiB,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC;KACjD;AACH,CAAC"}
@@ -0,0 +1,9 @@
1
+ import { objToReadableString } from "../util.js";
2
+ export async function scrapeSection(scrapeFunc, html, origin, overwrite) {
3
+ console.log(`Started scraping${overwrite ? ", overwrite mode is on" : ""}...`);
4
+ const groupsConfig = await scrapeFunc(html, origin, process.cwd(), overwrite);
5
+ console.log("Finished scraping.");
6
+ console.log("Add the following to your navigation in mint.json:");
7
+ console.log(objToReadableString(groupsConfig));
8
+ }
9
+ //# sourceMappingURL=scrapeSection.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scrapeSection.js","sourceRoot":"","sources":["../../src/scraping/scrapeSection.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,MAAM,YAAY,CAAC;AAEjD,MAAM,CAAC,KAAK,UAAU,aAAa,CACjC,UAAe,EACf,IAAY,EACZ,MAAc,EACd,SAAkB;IAElB,OAAO,CAAC,GAAG,CACT,mBAAmB,SAAS,CAAC,CAAC,CAAC,wBAAwB,CAAC,CAAC,CAAC,EAAE,KAAK,CAClE,CAAC;IACF,MAAM,YAAY,GAAG,MAAM,UAAU,CAAC,IAAI,EAAE,MAAM,EAAE,OAAO,CAAC,GAAG,EAAE,EAAE,SAAS,CAAC,CAAC;IAC9E,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,CAAC;IAClC,OAAO,CAAC,GAAG,CAAC,oDAAoD,CAAC,CAAC;IAClE,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,YAAY,CAAC,CAAC,CAAC;AACjD,CAAC"}
@@ -0,0 +1,90 @@
1
+ import axios from "axios";
2
+ import { detectFramework, Frameworks } from "./detectFramework.js";
3
+ import { getHrefFromArgs, getOrigin } from "../util.js";
4
+ import { scrapeSection } from "./scrapeSection.js";
5
+ import { scrapeDocusaurusSection } from "./site-scrapers/scrapeDocusaurusSection.js";
6
+ import { scrapeGitBookSection } from "./site-scrapers/scrapeGitBookSection.js";
7
+ import { scrapeReadMeSection } from "./site-scrapers/scrapeReadMeSection.js";
8
+ import { startBrowser } from "../browser.js";
9
+ function validateFramework(framework) {
10
+ if (!framework) {
11
+ console.log("Could not detect the framework automatically. Please use one of:");
12
+ console.log("scrape-page-docusaurus");
13
+ console.log("scrape-page-gitbook");
14
+ console.log("scrape-page-readme");
15
+ return process.exit(1);
16
+ }
17
+ }
18
+ export async function scrapeSectionAxiosWrapper(argv, scrapeFunc) {
19
+ const href = getHrefFromArgs(argv);
20
+ const res = await axios.default.get(href);
21
+ const html = res.data;
22
+ await scrapeSection(scrapeFunc, html, getOrigin(href), argv.overwrite);
23
+ process.exit(0);
24
+ }
25
+ export async function scrapeGitbookSectionCommand(argv) {
26
+ await scrapeSectionGitBookWrapper(argv, scrapeGitBookSection);
27
+ }
28
+ async function scrapeSectionGitBookWrapper(argv, scrapeFunc) {
29
+ const href = getHrefFromArgs(argv);
30
+ const browser = await startBrowser();
31
+ const page = await browser.newPage();
32
+ await page.goto(href, {
33
+ waitUntil: "networkidle2",
34
+ });
35
+ let prevEncountered = [];
36
+ let encounteredHref = ["fake"];
37
+ // Loop until we've encountered every link
38
+ while (!encounteredHref.every((href) => prevEncountered.includes(href))) {
39
+ prevEncountered = encounteredHref;
40
+ encounteredHref = await page.evaluate((encounteredHref) => {
41
+ const icons = Array.from(document.querySelectorAll('path[d="M9 18l6-6-6-6"]'));
42
+ const linksFound = [];
43
+ icons.forEach(async (icon) => {
44
+ var _a, _b;
45
+ const toClick = (_a = icon === null || icon === void 0 ? void 0 : icon.parentElement) === null || _a === void 0 ? void 0 : _a.parentElement;
46
+ const link = (_b = toClick === null || toClick === void 0 ? void 0 : toClick.parentElement) === null || _b === void 0 ? void 0 : _b.parentElement;
47
+ // Skip icons not in the side navigation
48
+ if (!(link === null || link === void 0 ? void 0 : link.hasAttribute("href"))) {
49
+ return;
50
+ }
51
+ const href = link.getAttribute("href");
52
+ // Should never occur but we keep it as a fail-safe
53
+ if ((href === null || href === void 0 ? void 0 : href.startsWith("https://")) || (href === null || href === void 0 ? void 0 : href.startsWith("http://"))) {
54
+ return;
55
+ }
56
+ // Click any links we haven't seen before
57
+ if (href && !encounteredHref.includes(href)) {
58
+ toClick === null || toClick === void 0 ? void 0 : toClick.click();
59
+ }
60
+ if (href) {
61
+ linksFound.push(href);
62
+ }
63
+ });
64
+ return linksFound;
65
+ }, encounteredHref // Need to pass array into the browser
66
+ );
67
+ }
68
+ const html = await page.content();
69
+ browser.close();
70
+ await scrapeSection(scrapeFunc, html, getOrigin(href), argv.overwrite);
71
+ process.exit(0);
72
+ }
73
+ export async function scrapeSectionAutomatically(argv) {
74
+ const href = getHrefFromArgs(argv);
75
+ const res = await axios.default.get(href);
76
+ const html = res.data;
77
+ const framework = detectFramework(html);
78
+ validateFramework(framework);
79
+ console.log("Detected framework: " + framework);
80
+ if (framework === Frameworks.DOCUSAURUS) {
81
+ await scrapeSectionAxiosWrapper(argv, scrapeDocusaurusSection);
82
+ }
83
+ else if (framework === Frameworks.GITBOOK) {
84
+ await scrapeGitbookSectionCommand(argv);
85
+ }
86
+ else if (framework === Frameworks.README) {
87
+ await scrapeSectionAxiosWrapper(argv, scrapeReadMeSection);
88
+ }
89
+ }
90
+ //# sourceMappingURL=scrapeSectionCommands.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scrapeSectionCommands.js","sourceRoot":"","sources":["../../src/scraping/scrapeSectionCommands.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,EAAE,eAAe,EAAE,UAAU,EAAE,MAAM,sBAAsB,CAAC;AACnE,OAAO,EAAE,eAAe,EAAE,SAAS,EAAE,MAAM,YAAY,CAAC;AACxD,OAAO,EAAE,aAAa,EAAE,MAAM,oBAAoB,CAAC;AACnD,OAAO,EAAE,uBAAuB,EAAE,MAAM,4CAA4C,CAAC;AACrF,OAAO,EAAE,oBAAoB,EAAE,MAAM,yCAAyC,CAAC;AAC/E,OAAO,EAAE,mBAAmB,EAAE,MAAM,wCAAwC,CAAC;AAC7E,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAC;AAE7C,SAAS,iBAAiB,CAAC,SAAiC;IAC1D,IAAI,CAAC,SAAS,EAAE;QACd,OAAO,CAAC,GAAG,CACT,kEAAkE,CACnE,CAAC;QACF,OAAO,CAAC,GAAG,CAAC,wBAAwB,CAAC,CAAC;QACtC,OAAO,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAC;QACnC,OAAO,CAAC,GAAG,CAAC,oBAAoB,CAAC,CAAC;QAClC,OAAO,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;KACxB;AACH,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,yBAAyB,CAAC,IAAS,EAAE,UAAe;IACxE,MAAM,IAAI,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;IACnC,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;IAC1C,MAAM,IAAI,GAAG,GAAG,CAAC,IAAI,CAAC;IACtB,MAAM,aAAa,CAAC,UAAU,EAAE,IAAI,EAAE,SAAS,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;IACvE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClB,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,2BAA2B,CAAC,IAAS;IACzD,MAAM,2BAA2B,CAAC,IAAI,EAAE,oBAAoB,CAAC,CAAC;AAChE,CAAC;AAED,KAAK,UAAU,2BAA2B,CAAC,IAAS,EAAE,UAAe;IACnE,MAAM,IAAI,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;IAEnC,MAAM,OAAO,GAAG,MAAM,YAAY,EAAE,CAAC;IACrC,MAAM,IAAI,GAAG,MAAM,OAAO,CAAC,OAAO,EAAE,CAAC;IACrC,MAAM,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE;QACpB,SAAS,EAAE,cAAc;KAC1B,CAAC,CAAC;IAEH,IAAI,eAAe,GAAa,EAAE,CAAC;IACnC,IAAI,eAAe,GAAG,CAAC,MAAM,CAAC,CAAC;IAE/B,0CAA0C;IAC1C,OAAO,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,eAAe,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,EAAE;QACvE,eAAe,GAAG,eAAe,CAAC;QAClC,eAAe,GAAG,MAAM,IAAI,CAAC,QAAQ,CACnC,CAAC,eAAe,EAAE,EAAE;YAClB,MAAM,KAAK,GAAkB,KAAK,CAAC,IAAI,CACrC,QAAQ,CAAC,gBAAgB,CAAC,yBAAyB,CAAC,CACrD,CAAC;YAEF,MAAM,UAAU,GAAa,EAAE,CAAC;YAChC,KAAK,CAAC,OAAO,CAAC,KAAK,EAAE,IAAiB,EAAE,EAAE;;gBACxC,MAAM,OAAO,GAAG,MAAA,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,aAAa,0CAAE,aAAa,CAAC;gBACnD,MAAM,IAAI,GAAG,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,aAAa,0CAAE,aAAa,CAAC;gBAEnD,wCAAwC;gBACxC,IAAI,CAAC,CAAA,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,YAAY,CAAC,MAAM,CAAC,CAAA,EAAE;oBAC/B,OAAO;iBACR;gBAED,MAAM,IAAI,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,CAAC;gBAEvC,mDAAmD;gBACnD,IAAI,CAAA,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,UAAU,CAAC,UAAU,CAAC,MAAI,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,UAAU,CAAC,SAAS,CAAC,CAAA,EAAE;oBAC/D,OAAO;iBACR;gBAED,yCAAyC;gBACzC,IAAI,IAAI,IAAI,CAAC,eAAe,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE;oBAC3C,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,KAAK,EAAE,CAAC;iBAClB;gBACD,IAAI,IAAI,EAAE;oBACR,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;iBACvB;YACH,CAAC,CAAC,CAAC;YAEH,OAAO,UAAU,CAAC;QACpB,CAAC,EACD,eAAe,CAAC,sCAAsC;SACvD,CAAC;KACH;IAED,MAAM,IAAI,GAAG,MAAM,IAAI,CAAC,OAAO,EAAE,CAAC;IAClC,OAAO,CAAC,KAAK,EAAE,CAAC;IAChB,MAAM,aAAa,CAAC,UAAU,EAAE,IAAI,EAAE,SAAS,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;IACvE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClB,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,0BAA0B,CAAC,IAAS;IACxD,MAAM,IAAI,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;IACnC,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;IAC1C,MAAM,IAAI,GAAG,GAAG,CAAC,IAAI,CAAC;IACtB,MAAM,SAAS,GAAG,eAAe,CAAC,IAAI,CAAC,CAAC;IAExC,iBAAiB,CAAC,SAAS,CAAC,CAAC;IAE7B,OAAO,CAAC,GAAG,CAAC,sBAAsB,GAAG,SAAS,CAAC,CAAC;IAEhD,IAAI,SAAS,KAAK,UAAU,CAAC,UAAU,EAAE;QACvC,MAAM,yBAAyB,CAAC,IAAI,EAAE,uBAAuB,CAAC,CAAC;KAChE;SAAM,IAAI,SAAS,KAAK,UAAU,CAAC,OAAO,EAAE;QAC3C,MAAM,2BAA2B,CAAC,IAAI,CAAC,CAAC;KACzC;SAAM,IAAI,SAAS,KAAK,UAAU,CAAC,MAAM,EAAE;QAC1C,MAAM,yBAAyB,CAAC,IAAI,EAAE,mBAAmB,CAAC,CAAC;KAC5D;AACH,CAAC"}
@@ -0,0 +1,33 @@
1
+ // Used by GitBook and ReadMe section scrapers
2
+ export default function getLinksRecursively(linkSections, $) {
3
+ if (linkSections == null || linkSections.length === 0) {
4
+ return [];
5
+ }
6
+ return linkSections
7
+ .map((i, s) => {
8
+ const subsection = $(s);
9
+ const link = subsection.children().first();
10
+ const linkHref = link.attr("href");
11
+ // Skip missing links. For example, GitBook uses
12
+ // empty divs are used for styling a line beside the nav.
13
+ // Skip external links until Mintlify supports them
14
+ if (!linkHref ||
15
+ linkHref.startsWith("https://") ||
16
+ linkHref.startsWith("http://")) {
17
+ return undefined;
18
+ }
19
+ const childLinks = subsection.children().eq(1).children();
20
+ if (childLinks.length > 0) {
21
+ // Put the section link in the list of pages.
22
+ // When we support the section itself being a link we should update this
23
+ return {
24
+ group: link.text(),
25
+ pages: [linkHref, ...getLinksRecursively(childLinks, $)],
26
+ };
27
+ }
28
+ return linkHref;
29
+ })
30
+ .toArray()
31
+ .filter(Boolean);
32
+ }
33
+ //# sourceMappingURL=getLinksRecursively.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"getLinksRecursively.js","sourceRoot":"","sources":["../../../src/scraping/site-scrapers/getLinksRecursively.ts"],"names":[],"mappings":"AAAA,8CAA8C;AAC9C,MAAM,CAAC,OAAO,UAAU,mBAAmB,CAAC,YAAiB,EAAE,CAAM;IACnE,IAAI,YAAY,IAAI,IAAI,IAAI,YAAY,CAAC,MAAM,KAAK,CAAC,EAAE;QACrD,OAAO,EAAE,CAAC;KACX;IAED,OAAO,YAAY;SAChB,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE;QACZ,MAAM,UAAU,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACxB,MAAM,IAAI,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC,KAAK,EAAE,CAAC;QAE3C,MAAM,QAAQ,GAAG,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAEnC,gDAAgD;QAChD,yDAAyD;QACzD,mDAAmD;QACnD,IACE,CAAC,QAAQ;YACT,QAAQ,CAAC,UAAU,CAAC,UAAU,CAAC;YAC/B,QAAQ,CAAC,UAAU,CAAC,SAAS,CAAC,EAC9B;YACA,OAAO,SAAS,CAAC;SAClB;QAED,MAAM,UAAU,GAAG,UAAU,CAAC,QAAQ,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC;QAE1D,IAAI,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;YACzB,6CAA6C;YAC7C,wEAAwE;YACxE,OAAO;gBACL,KAAK,EAAE,IAAI,CAAC,IAAI,EAAE;gBAClB,KAAK,EAAE,CAAC,QAAQ,EAAE,GAAG,mBAAmB,CAAC,UAAU,EAAE,CAAC,CAAC,CAAC;aACzD,CAAC;SACH;QAED,OAAO,QAAQ,CAAC;IAClB,CAAC,CAAC;SACD,OAAO,EAAE;SACT,MAAM,CAAC,OAAO,CAAC,CAAC;AACrB,CAAC"}
@@ -0,0 +1,43 @@
1
+ import cheerio from "cheerio";
2
+ import { NodeHtmlMarkdown } from "node-html-markdown";
3
+ import downloadAllImages from "../downloadAllImages.js";
4
+ import replaceImagePaths from "../replaceImagePaths.js";
5
+ export async function scrapeDocusaurusPage(html, origin, cliDir, imageBaseDir) {
6
+ const $ = cheerio.load(html);
7
+ const content = $(".theme-doc-markdown").first();
8
+ // Index pages with no additional text don't have the markdown class
9
+ if (content.length === 0) {
10
+ return {};
11
+ }
12
+ const titleComponent = content.find("h1");
13
+ const title = titleComponent.text().trim();
14
+ // Do not include title in the content when we insert it in our metadata
15
+ titleComponent.remove();
16
+ const origToWritePath = await downloadAllImages($, content, origin, imageBaseDir);
17
+ const contentHtml = content.html();
18
+ const nhm = new NodeHtmlMarkdown();
19
+ let markdown = nhm.translate(contentHtml);
20
+ // Description only exists in meta tags. The code is commented out because its prone to incorrectly
21
+ // including a description if the first line of text had markdown annotations like `.
22
+ // The commented out alternative is to ignore description if it's the first line of text,
23
+ // this means it was not set in the metadata and Docusaurus defaulted to the text.
24
+ const description = null;
25
+ // let description = $('meta[property="og:description"]').attr("content");
26
+ // if (markdown.startsWith(description)) {
27
+ // description = null;
28
+ // }
29
+ // Remove Docusaurus links from headers
30
+ // When we parse their HTML the parser adds things like:
31
+ // [](#setup "Direct link to heading")
32
+ // to the end of each header.
33
+ markdown = markdown.replace(/\[\]\(#.+ ".+"\)\n/g, "\n");
34
+ // Remove unnecessary nonwidth blank space characters
35
+ markdown = markdown.replace(/\u200b/g, "");
36
+ // Reduce unnecessary blank lines
37
+ markdown = markdown.replace(/\n\n\n/g, "\n\n");
38
+ // Mintlify doesn't support bolded headers, remove the asterisks
39
+ markdown = markdown.replace(/(\n#+) \*\*(.*)\*\*\n/g, "$1 $2\n");
40
+ markdown = replaceImagePaths(origToWritePath, cliDir, markdown);
41
+ return { title, description, markdown };
42
+ }
43
+ //# sourceMappingURL=scrapeDocusaurusPage.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scrapeDocusaurusPage.js","sourceRoot":"","sources":["../../../src/scraping/site-scrapers/scrapeDocusaurusPage.ts"],"names":[],"mappings":"AAAA,OAAO,OAAO,MAAM,SAAS,CAAC;AAC9B,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AACtD,OAAO,iBAAiB,MAAM,yBAAyB,CAAC;AACxD,OAAO,iBAAiB,MAAM,yBAAyB,CAAC;AAExD,MAAM,CAAC,KAAK,UAAU,oBAAoB,CACxC,IAAY,EACZ,MAAc,EACd,MAAc,EACd,YAAoB;IAEpB,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE7B,MAAM,OAAO,GAAG,CAAC,CAAC,qBAAqB,CAAC,CAAC,KAAK,EAAE,CAAC;IAEjD,oEAAoE;IACpE,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;QACxB,OAAO,EAAE,CAAC;KACX;IAED,MAAM,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAC1C,MAAM,KAAK,GAAG,cAAc,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC;IAE3C,wEAAwE;IACxE,cAAc,CAAC,MAAM,EAAE,CAAC;IAExB,MAAM,eAAe,GAAG,MAAM,iBAAiB,CAC7C,CAAC,EACD,OAAO,EACP,MAAM,EACN,YAAY,CACb,CAAC;IAEF,MAAM,WAAW,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC;IAEnC,MAAM,GAAG,GAAG,IAAI,gBAAgB,EAAE,CAAC;IACnC,IAAI,QAAQ,GAAG,GAAG,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAE1C,mGAAmG;IACnG,qFAAqF;IACrF,yFAAyF;IACzF,kFAAkF;IAClF,MAAM,WAAW,GAAG,IAAI,CAAC;IACzB,0EAA0E;IAC1E,0CAA0C;IAC1C,wBAAwB;IACxB,IAAI;IAEJ,uCAAuC;IACvC,wDAAwD;IACxD,sCAAsC;IACtC,6BAA6B;IAC7B,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,qBAAqB,EAAE,IAAI,CAAC,CAAC;IAEzD,qDAAqD;IACrD,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC;IAE3C,iCAAiC;IACjC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;IAE/C,gEAAgE;IAChE,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,wBAAwB,EAAE,SAAS,CAAC,CAAC;IAEjE,QAAQ,GAAG,iBAAiB,CAAC,eAAe,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;IAEhE,OAAO,EAAE,KAAK,EAAE,WAAW,EAAE,QAAQ,EAAE,CAAC;AAC1C,CAAC"}
@@ -0,0 +1,52 @@
1
+ import cheerio from "cheerio";
2
+ import { scrapeGettingFileNameFromUrl } from "../scrapeGettingFileNameFromUrl.js";
3
+ import { scrapeDocusaurusPage } from "./scrapeDocusaurusPage.js";
4
+ export async function scrapeDocusaurusSection(html, origin, cliDir, overwrite) {
5
+ const $ = cheerio.load(html);
6
+ // Get all the navigation sections
7
+ const navigationSections = $(".theme-doc-sidebar-menu").first().children();
8
+ // Get all links per group
9
+ const groupsConfig = navigationSections
10
+ .map((i, section) => {
11
+ const sectionComponent = $(section);
12
+ // Links without a group
13
+ if (sectionComponent.hasClass("theme-doc-sidebar-item-link")) {
14
+ const linkHref = sectionComponent.find("a[href]").first().attr("href");
15
+ return {
16
+ group: "",
17
+ pages: [linkHref],
18
+ };
19
+ }
20
+ const sectionTitle = sectionComponent
21
+ .find(".menu__list-item-collapsible")
22
+ .first()
23
+ .text();
24
+ // The category title can be a page too so we find from the
25
+ // section component instead of the more specific menu__list child
26
+ const linkPaths = sectionComponent
27
+ .find("a[href]")
28
+ .map((i, link) => {
29
+ return $(link).attr("href");
30
+ })
31
+ .filter((i, link) => link !== "#")
32
+ .toArray();
33
+ // Follows the same structure as mint.json
34
+ return {
35
+ group: sectionTitle,
36
+ pages: linkPaths,
37
+ };
38
+ })
39
+ .toArray();
40
+ // Scrape each link in the navigation.
41
+ const groupsConfigCleanPaths = await Promise.all(groupsConfig.map(async (groupConfig) => {
42
+ groupConfig.pages = (await Promise.all(groupConfig.pages.map(async (pathname) =>
43
+ // Docusaurus requires a directory on all sections wheras we use root.
44
+ // /docs is their default directory so we remove it
45
+ scrapeGettingFileNameFromUrl(pathname, cliDir, origin, overwrite, scrapeDocusaurusPage, false, "/docs"))))
46
+ // Remove skipped index pages (they return undefined from the above function)
47
+ .filter(Boolean);
48
+ return groupConfig;
49
+ }));
50
+ return groupsConfigCleanPaths;
51
+ }
52
+ //# sourceMappingURL=scrapeDocusaurusSection.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"scrapeDocusaurusSection.js","sourceRoot":"","sources":["../../../src/scraping/site-scrapers/scrapeDocusaurusSection.ts"],"names":[],"mappings":"AAAA,OAAO,OAAO,MAAM,SAAS,CAAC;AAC9B,OAAO,EAAE,4BAA4B,EAAE,MAAM,oCAAoC,CAAC;AAClF,OAAO,EAAE,oBAAoB,EAAE,MAAM,2BAA2B,CAAC;AAEjE,MAAM,CAAC,KAAK,UAAU,uBAAuB,CAC3C,IAAY,EACZ,MAAc,EACd,MAAc,EACd,SAAkB;IAElB,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE7B,kCAAkC;IAClC,MAAM,kBAAkB,GAAG,CAAC,CAAC,yBAAyB,CAAC,CAAC,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC;IAE3E,0BAA0B;IAC1B,MAAM,YAAY,GAAG,kBAAkB;SACpC,GAAG,CAAC,CAAC,CAAC,EAAE,OAAO,EAAE,EAAE;QAClB,MAAM,gBAAgB,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC;QAEpC,wBAAwB;QACxB,IAAI,gBAAgB,CAAC,QAAQ,CAAC,6BAA6B,CAAC,EAAE;YAC5D,MAAM,QAAQ,GAAG,gBAAgB,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;YACvE,OAAO;gBACL,KAAK,EAAE,EAAE;gBACT,KAAK,EAAE,CAAC,QAAQ,CAAC;aAClB,CAAC;SACH;QAED,MAAM,YAAY,GAAG,gBAAgB;aAClC,IAAI,CAAC,8BAA8B,CAAC;aACpC,KAAK,EAAE;aACP,IAAI,EAAE,CAAC;QAEV,2DAA2D;QAC3D,kEAAkE;QAClE,MAAM,SAAS,GAAG,gBAAgB;aAC/B,IAAI,CAAC,SAAS,CAAC;aACf,GAAG,CAAC,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE;YACf,OAAO,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC9B,CAAC,CAAC;aACD,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,EAAE,EAAE,CAAC,IAAI,KAAK,GAAG,CAAC;aACjC,OAAO,EAAE,CAAC;QAEb,0CAA0C;QAC1C,OAAO;YACL,KAAK,EAAE,YAAY;YACnB,KAAK,EAAE,SAAS;SACjB,CAAC;IACJ,CAAC,CAAC;SACD,OAAO,EAAE,CAAC;IAEb,sCAAsC;IACtC,MAAM,sBAAsB,GAAG,MAAM,OAAO,CAAC,GAAG,CAC9C,YAAY,CAAC,GAAG,CAAC,KAAK,EAAE,WAAW,EAAE,EAAE;QACrC,WAAW,CAAC,KAAK,GAAG,CAClB,MAAM,OAAO,CAAC,GAAG,CACf,WAAW,CAAC,KAAK,CAAC,GAAG,CAAC,KAAK,EAAE,QAAgB,EAAE,EAAE;QAC/C,sEAAsE;QACtE,mDAAmD;QACnD,4BAA4B,CAC1B,QAAQ,EACR,MAAM,EACN,MAAM,EACN,SAAS,EACT,oBAAoB,EACpB,KAAK,EACL,OAAO,CACR,CACF,CACF,CACF;YACC,6EAA6E;aAC5E,MAAM,CAAC,OAAO,CAAC,CAAC;QACnB,OAAO,WAAW,CAAC;IACrB,CAAC,CAAC,CACH,CAAC;IAEF,OAAO,sBAAsB,CAAC;AAChC,CAAC"}