@mintlify/scraping 3.0.187 → 3.0.189

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (345) hide show
  1. package/README.md +0 -5
  2. package/bin/assert.d.ts +5 -0
  3. package/bin/assert.js +13 -0
  4. package/bin/assert.js.map +1 -0
  5. package/bin/cli.js +43 -72
  6. package/bin/cli.js.map +1 -1
  7. package/bin/components/Accordion.d.ts +5 -0
  8. package/bin/components/Accordion.js +54 -0
  9. package/bin/components/Accordion.js.map +1 -0
  10. package/bin/components/AccordionGroup.d.ts +5 -0
  11. package/bin/components/AccordionGroup.js +52 -0
  12. package/bin/components/AccordionGroup.js.map +1 -0
  13. package/bin/components/Callout.d.ts +5 -0
  14. package/bin/components/Callout.js +114 -0
  15. package/bin/components/Callout.js.map +1 -0
  16. package/bin/components/Card.d.ts +5 -0
  17. package/bin/components/Card.js +135 -0
  18. package/bin/components/Card.js.map +1 -0
  19. package/bin/components/CardGroup.d.ts +5 -0
  20. package/bin/components/CardGroup.js +52 -0
  21. package/bin/components/CardGroup.js.map +1 -0
  22. package/bin/components/CodeGroup.d.ts +5 -0
  23. package/bin/components/CodeGroup.js +166 -0
  24. package/bin/components/CodeGroup.js.map +1 -0
  25. package/bin/components/Frame.d.ts +5 -0
  26. package/bin/components/Frame.js +51 -0
  27. package/bin/components/Frame.js.map +1 -0
  28. package/bin/components/Tabs.d.ts +5 -0
  29. package/bin/components/Tabs.js +122 -0
  30. package/bin/components/Tabs.js.map +1 -0
  31. package/bin/components/link.d.ts +2 -0
  32. package/bin/components/link.js +16 -0
  33. package/bin/components/link.js.map +1 -0
  34. package/bin/constants.d.ts +6 -7
  35. package/bin/constants.js +31 -12
  36. package/bin/constants.js.map +1 -1
  37. package/bin/customComponents/create.d.ts +10 -0
  38. package/bin/customComponents/create.js +69 -0
  39. package/bin/customComponents/create.js.map +1 -0
  40. package/bin/customComponents/plugin.d.ts +2 -0
  41. package/bin/customComponents/plugin.js +26 -0
  42. package/bin/customComponents/plugin.js.map +1 -0
  43. package/bin/customComponents/selective.d.ts +6 -0
  44. package/bin/customComponents/selective.js +29 -0
  45. package/bin/customComponents/selective.js.map +1 -0
  46. package/bin/nav/iterate.d.ts +2 -0
  47. package/bin/nav/iterate.js +15 -0
  48. package/bin/nav/iterate.js.map +1 -0
  49. package/bin/nav/listItems.d.ts +8 -0
  50. package/bin/nav/listItems.js +62 -0
  51. package/bin/nav/listItems.js.map +1 -0
  52. package/bin/nav/retrieve.d.ts +3 -0
  53. package/bin/nav/retrieve.js +75 -0
  54. package/bin/nav/retrieve.js.map +1 -0
  55. package/bin/nav/root.d.ts +2 -0
  56. package/bin/nav/root.js +40 -0
  57. package/bin/nav/root.js.map +1 -0
  58. package/bin/openapi/generateOpenApiPages.js +2 -2
  59. package/bin/openapi/generateOpenApiPages.js.map +1 -1
  60. package/bin/root/retrieve.d.ts +2 -0
  61. package/bin/root/retrieve.js +46 -0
  62. package/bin/root/retrieve.js.map +1 -0
  63. package/bin/scrapingPipeline/group.d.ts +5 -0
  64. package/bin/scrapingPipeline/group.js +46 -0
  65. package/bin/scrapingPipeline/group.js.map +1 -0
  66. package/bin/scrapingPipeline/icon.d.ts +2 -0
  67. package/bin/scrapingPipeline/icon.js +22 -0
  68. package/bin/scrapingPipeline/icon.js.map +1 -0
  69. package/bin/scrapingPipeline/images.d.ts +3 -0
  70. package/bin/scrapingPipeline/images.js +50 -0
  71. package/bin/scrapingPipeline/images.js.map +1 -0
  72. package/bin/scrapingPipeline/logo.d.ts +5 -0
  73. package/bin/scrapingPipeline/logo.js +92 -0
  74. package/bin/scrapingPipeline/logo.js.map +1 -0
  75. package/bin/scrapingPipeline/page.d.ts +6 -0
  76. package/bin/scrapingPipeline/page.js +102 -0
  77. package/bin/scrapingPipeline/page.js.map +1 -0
  78. package/bin/scrapingPipeline/root.d.ts +2 -0
  79. package/bin/scrapingPipeline/root.js +8 -0
  80. package/bin/scrapingPipeline/root.js.map +1 -0
  81. package/bin/scrapingPipeline/site.d.ts +7 -0
  82. package/bin/scrapingPipeline/site.js +129 -0
  83. package/bin/scrapingPipeline/site.js.map +1 -0
  84. package/bin/scrapingPipeline/tabs.d.ts +3 -0
  85. package/bin/scrapingPipeline/tabs.js +67 -0
  86. package/bin/scrapingPipeline/tabs.js.map +1 -0
  87. package/bin/tabs/retrieveReadme.d.ts +3 -0
  88. package/bin/tabs/retrieveReadme.js +78 -0
  89. package/bin/tabs/retrieveReadme.js.map +1 -0
  90. package/bin/tsconfig.build.tsbuildinfo +1 -1
  91. package/bin/types/components.d.ts +2 -0
  92. package/bin/types/components.js +2 -0
  93. package/bin/types/components.js.map +1 -0
  94. package/bin/types/framework.d.ts +8 -0
  95. package/bin/types/framework.js +3 -0
  96. package/bin/types/framework.js.map +1 -0
  97. package/bin/types/hast.d.ts +6 -0
  98. package/bin/types/hast.js +2 -0
  99. package/bin/types/hast.js.map +1 -0
  100. package/bin/types/result.d.ts +7 -0
  101. package/bin/types/result.js +2 -0
  102. package/bin/types/result.js.map +1 -0
  103. package/bin/types/scrapeFunc.d.ts +3 -0
  104. package/bin/types/scrapeFunc.js +2 -0
  105. package/bin/types/scrapeFunc.js.map +1 -0
  106. package/bin/utils/append.d.ts +1 -0
  107. package/bin/utils/append.js +12 -0
  108. package/bin/utils/append.js.map +1 -0
  109. package/bin/utils/children.d.ts +5 -0
  110. package/bin/utils/children.js +35 -0
  111. package/bin/utils/children.js.map +1 -0
  112. package/bin/utils/className.d.ts +3 -0
  113. package/bin/utils/className.js +13 -0
  114. package/bin/utils/className.js.map +1 -0
  115. package/bin/utils/detectFramework.d.ts +4 -0
  116. package/bin/utils/detectFramework.js +60 -0
  117. package/bin/utils/detectFramework.js.map +1 -0
  118. package/bin/utils/emptyParagraphs.d.ts +3 -0
  119. package/bin/utils/emptyParagraphs.js +19 -0
  120. package/bin/utils/emptyParagraphs.js.map +1 -0
  121. package/bin/utils/errors.d.ts +3 -0
  122. package/bin/utils/errors.js +16 -0
  123. package/bin/utils/errors.js.map +1 -0
  124. package/bin/utils/escape.d.ts +2 -0
  125. package/bin/utils/escape.js +25 -0
  126. package/bin/utils/escape.js.map +1 -0
  127. package/bin/utils/extension.d.ts +3 -0
  128. package/bin/utils/extension.js +18 -0
  129. package/bin/utils/extension.js.map +1 -0
  130. package/bin/utils/file.d.ts +4 -0
  131. package/bin/utils/file.js +43 -0
  132. package/bin/utils/file.js.map +1 -0
  133. package/bin/utils/firstChild.d.ts +2 -0
  134. package/bin/utils/firstChild.js +12 -0
  135. package/bin/utils/firstChild.js.map +1 -0
  136. package/bin/utils/images.d.ts +5 -0
  137. package/bin/utils/images.js +86 -0
  138. package/bin/utils/images.js.map +1 -0
  139. package/bin/utils/img.d.ts +2 -0
  140. package/bin/utils/img.js +15 -0
  141. package/bin/utils/img.js.map +1 -0
  142. package/bin/utils/log.d.ts +18 -0
  143. package/bin/utils/log.js +68 -0
  144. package/bin/utils/log.js.map +1 -0
  145. package/bin/utils/nestedRoots.d.ts +7 -0
  146. package/bin/utils/nestedRoots.js +19 -0
  147. package/bin/utils/nestedRoots.js.map +1 -0
  148. package/bin/utils/network.d.ts +5 -0
  149. package/bin/utils/network.js +82 -0
  150. package/bin/utils/network.js.map +1 -0
  151. package/bin/utils/path.d.ts +1 -0
  152. package/bin/utils/path.js +22 -0
  153. package/bin/utils/path.js.map +1 -0
  154. package/bin/utils/position.d.ts +3 -0
  155. package/bin/utils/position.js +12 -0
  156. package/bin/utils/position.js.map +1 -0
  157. package/bin/utils/reservedNames.d.ts +4 -0
  158. package/bin/utils/reservedNames.js +27 -0
  159. package/bin/utils/reservedNames.js.map +1 -0
  160. package/bin/utils/strings.d.ts +2 -0
  161. package/bin/utils/strings.js +7 -0
  162. package/bin/utils/strings.js.map +1 -0
  163. package/bin/utils/text.d.ts +2 -0
  164. package/bin/utils/text.js +11 -0
  165. package/bin/utils/text.js.map +1 -0
  166. package/bin/utils/title.d.ts +10 -0
  167. package/bin/utils/title.js +58 -0
  168. package/bin/utils/title.js.map +1 -0
  169. package/bin/utils/url.d.ts +3 -0
  170. package/bin/utils/url.js +10 -0
  171. package/bin/utils/url.js.map +1 -0
  172. package/package.json +20 -11
  173. package/src/assert.ts +15 -0
  174. package/src/cli.ts +53 -90
  175. package/src/components/Accordion.ts +84 -0
  176. package/src/components/AccordionGroup.ts +69 -0
  177. package/src/components/Callout.ts +159 -0
  178. package/src/components/Card.ts +168 -0
  179. package/src/components/CardGroup.ts +69 -0
  180. package/src/components/CodeGroup.ts +209 -0
  181. package/src/components/Frame.ts +86 -0
  182. package/src/components/Tabs.ts +154 -0
  183. package/src/components/link.ts +17 -0
  184. package/src/constants.ts +37 -19
  185. package/src/customComponents/create.ts +106 -0
  186. package/src/customComponents/plugin.ts +31 -0
  187. package/src/customComponents/selective.ts +37 -0
  188. package/src/nav/iterate.ts +18 -0
  189. package/src/nav/listItems.ts +82 -0
  190. package/src/nav/retrieve.ts +88 -0
  191. package/src/nav/root.ts +47 -0
  192. package/src/openapi/generateOpenApiPages.ts +2 -2
  193. package/src/root/retrieve.ts +52 -0
  194. package/src/scrapingPipeline/group.ts +62 -0
  195. package/src/scrapingPipeline/icon.ts +26 -0
  196. package/src/scrapingPipeline/images.ts +67 -0
  197. package/src/scrapingPipeline/logo.ts +127 -0
  198. package/src/scrapingPipeline/page.ts +130 -0
  199. package/src/scrapingPipeline/root.ts +10 -0
  200. package/src/scrapingPipeline/site.ts +161 -0
  201. package/src/scrapingPipeline/tabs.ts +87 -0
  202. package/src/tabs/retrieveReadme.ts +99 -0
  203. package/src/types/components.ts +3 -0
  204. package/src/types/framework.ts +10 -0
  205. package/src/types/hast.ts +12 -0
  206. package/src/types/result.ts +1 -0
  207. package/src/types/scrapeFunc.ts +9 -0
  208. package/src/utils/append.ts +9 -0
  209. package/src/utils/children.ts +51 -0
  210. package/src/utils/className.ts +14 -0
  211. package/src/utils/detectFramework.ts +72 -0
  212. package/src/utils/emptyParagraphs.ts +21 -0
  213. package/src/utils/errors.ts +24 -0
  214. package/src/utils/escape.ts +30 -0
  215. package/src/utils/extension.ts +19 -0
  216. package/src/utils/file.ts +58 -0
  217. package/src/utils/firstChild.ts +13 -0
  218. package/src/utils/images.ts +101 -0
  219. package/src/utils/img.ts +17 -0
  220. package/src/utils/log.ts +82 -0
  221. package/src/utils/nestedRoots.ts +20 -0
  222. package/src/utils/network.ts +95 -0
  223. package/src/utils/path.ts +27 -0
  224. package/src/utils/position.ts +14 -0
  225. package/src/utils/reservedNames.ts +31 -0
  226. package/src/utils/strings.ts +7 -0
  227. package/src/utils/text.ts +11 -0
  228. package/src/utils/title.ts +68 -0
  229. package/src/utils/url.ts +8 -0
  230. package/bin/browser.d.ts +0 -2
  231. package/bin/browser.js +0 -24
  232. package/bin/browser.js.map +0 -1
  233. package/bin/checks.d.ts +0 -8
  234. package/bin/checks.js +0 -24
  235. package/bin/checks.js.map +0 -1
  236. package/bin/downloadImage.d.ts +0 -5
  237. package/bin/downloadImage.js +0 -88
  238. package/bin/downloadImage.js.map +0 -1
  239. package/bin/scraping/combineNavWithEmptyGroupTitles.d.ts +0 -2
  240. package/bin/scraping/combineNavWithEmptyGroupTitles.js +0 -20
  241. package/bin/scraping/combineNavWithEmptyGroupTitles.js.map +0 -1
  242. package/bin/scraping/detectFramework.d.ts +0 -9
  243. package/bin/scraping/detectFramework.js +0 -36
  244. package/bin/scraping/detectFramework.js.map +0 -1
  245. package/bin/scraping/downloadAllImages.d.ts +0 -4
  246. package/bin/scraping/downloadAllImages.js +0 -36
  247. package/bin/scraping/downloadAllImages.js.map +0 -1
  248. package/bin/scraping/downloadLogoImage.d.ts +0 -1
  249. package/bin/scraping/downloadLogoImage.js +0 -12
  250. package/bin/scraping/downloadLogoImage.js.map +0 -1
  251. package/bin/scraping/replaceImagePaths.d.ts +0 -1
  252. package/bin/scraping/replaceImagePaths.js +0 -14
  253. package/bin/scraping/replaceImagePaths.js.map +0 -1
  254. package/bin/scraping/scrapeFileGettingFileNameFromUrl.d.ts +0 -6
  255. package/bin/scraping/scrapeFileGettingFileNameFromUrl.js +0 -46
  256. package/bin/scraping/scrapeFileGettingFileNameFromUrl.js.map +0 -1
  257. package/bin/scraping/scrapeGettingFileNameFromUrl.d.ts +0 -6
  258. package/bin/scraping/scrapeGettingFileNameFromUrl.js +0 -13
  259. package/bin/scraping/scrapeGettingFileNameFromUrl.js.map +0 -1
  260. package/bin/scraping/scrapePage.d.ts +0 -8
  261. package/bin/scraping/scrapePage.js +0 -10
  262. package/bin/scraping/scrapePage.js.map +0 -1
  263. package/bin/scraping/scrapePageCommands.d.ts +0 -7
  264. package/bin/scraping/scrapePageCommands.js +0 -50
  265. package/bin/scraping/scrapePageCommands.js.map +0 -1
  266. package/bin/scraping/scrapeSection.d.ts +0 -3
  267. package/bin/scraping/scrapeSection.js +0 -12
  268. package/bin/scraping/scrapeSection.js.map +0 -1
  269. package/bin/scraping/scrapeSectionCommands.d.ts +0 -6
  270. package/bin/scraping/scrapeSectionCommands.js +0 -63
  271. package/bin/scraping/scrapeSectionCommands.js.map +0 -1
  272. package/bin/scraping/site-scrapers/Intercom/scrapeIntercomPage.d.ts +0 -5
  273. package/bin/scraping/site-scrapers/Intercom/scrapeIntercomPage.js +0 -29
  274. package/bin/scraping/site-scrapers/Intercom/scrapeIntercomPage.js.map +0 -1
  275. package/bin/scraping/site-scrapers/Intercom/scrapeIntercomSection.d.ts +0 -2
  276. package/bin/scraping/site-scrapers/Intercom/scrapeIntercomSection.js +0 -31
  277. package/bin/scraping/site-scrapers/Intercom/scrapeIntercomSection.js.map +0 -1
  278. package/bin/scraping/site-scrapers/alternateGroupTitle.d.ts +0 -3
  279. package/bin/scraping/site-scrapers/alternateGroupTitle.js +0 -9
  280. package/bin/scraping/site-scrapers/alternateGroupTitle.js.map +0 -1
  281. package/bin/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.d.ts +0 -5
  282. package/bin/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.js +0 -33
  283. package/bin/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.js.map +0 -1
  284. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursively.d.ts +0 -3
  285. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursively.js +0 -35
  286. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursively.js.map +0 -1
  287. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursivelyGitBook.d.ts +0 -3
  288. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursivelyGitBook.js +0 -33
  289. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursivelyGitBook.js.map +0 -1
  290. package/bin/scraping/site-scrapers/openNestedDocusaurusMenus.d.ts +0 -2
  291. package/bin/scraping/site-scrapers/openNestedDocusaurusMenus.js +0 -30
  292. package/bin/scraping/site-scrapers/openNestedDocusaurusMenus.js.map +0 -1
  293. package/bin/scraping/site-scrapers/openNestedGitbookMenus.d.ts +0 -2
  294. package/bin/scraping/site-scrapers/openNestedGitbookMenus.js +0 -21
  295. package/bin/scraping/site-scrapers/openNestedGitbookMenus.js.map +0 -1
  296. package/bin/scraping/site-scrapers/scrapeDocusaurusPage.d.ts +0 -5
  297. package/bin/scraping/site-scrapers/scrapeDocusaurusPage.js +0 -53
  298. package/bin/scraping/site-scrapers/scrapeDocusaurusPage.js.map +0 -1
  299. package/bin/scraping/site-scrapers/scrapeDocusaurusSection.d.ts +0 -2
  300. package/bin/scraping/site-scrapers/scrapeDocusaurusSection.js +0 -32
  301. package/bin/scraping/site-scrapers/scrapeDocusaurusSection.js.map +0 -1
  302. package/bin/scraping/site-scrapers/scrapeGitBookPage.d.ts +0 -5
  303. package/bin/scraping/site-scrapers/scrapeGitBookPage.js +0 -56
  304. package/bin/scraping/site-scrapers/scrapeGitBookPage.js.map +0 -1
  305. package/bin/scraping/site-scrapers/scrapeGitBookSection.d.ts +0 -2
  306. package/bin/scraping/site-scrapers/scrapeGitBookSection.js +0 -42
  307. package/bin/scraping/site-scrapers/scrapeGitBookSection.js.map +0 -1
  308. package/bin/scraping/site-scrapers/scrapeReadMePage.d.ts +0 -5
  309. package/bin/scraping/site-scrapers/scrapeReadMePage.js +0 -38
  310. package/bin/scraping/site-scrapers/scrapeReadMePage.js.map +0 -1
  311. package/bin/scraping/site-scrapers/scrapeReadMeSection.d.ts +0 -2
  312. package/bin/scraping/site-scrapers/scrapeReadMeSection.js +0 -39
  313. package/bin/scraping/site-scrapers/scrapeReadMeSection.js.map +0 -1
  314. package/bin/util.d.ts +0 -29
  315. package/bin/util.js +0 -97
  316. package/bin/util.js.map +0 -1
  317. package/src/browser.ts +0 -24
  318. package/src/checks.ts +0 -32
  319. package/src/downloadImage.ts +0 -102
  320. package/src/scraping/combineNavWithEmptyGroupTitles.ts +0 -21
  321. package/src/scraping/detectFramework.ts +0 -55
  322. package/src/scraping/downloadAllImages.ts +0 -61
  323. package/src/scraping/downloadLogoImage.ts +0 -24
  324. package/src/scraping/replaceImagePaths.ts +0 -17
  325. package/src/scraping/scrapeFileGettingFileNameFromUrl.ts +0 -84
  326. package/src/scraping/scrapeGettingFileNameFromUrl.ts +0 -56
  327. package/src/scraping/scrapePage.ts +0 -40
  328. package/src/scraping/scrapePageCommands.ts +0 -68
  329. package/src/scraping/scrapeSection.ts +0 -30
  330. package/src/scraping/scrapeSectionCommands.ts +0 -98
  331. package/src/scraping/site-scrapers/Intercom/scrapeIntercomPage.ts +0 -52
  332. package/src/scraping/site-scrapers/Intercom/scrapeIntercomSection.ts +0 -54
  333. package/src/scraping/site-scrapers/alternateGroupTitle.ts +0 -11
  334. package/src/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.ts +0 -45
  335. package/src/scraping/site-scrapers/links-per-group/getLinksRecursively.ts +0 -47
  336. package/src/scraping/site-scrapers/links-per-group/getLinksRecursivelyGitBook.ts +0 -44
  337. package/src/scraping/site-scrapers/openNestedDocusaurusMenus.ts +0 -42
  338. package/src/scraping/site-scrapers/openNestedGitbookMenus.ts +0 -27
  339. package/src/scraping/site-scrapers/scrapeDocusaurusPage.ts +0 -85
  340. package/src/scraping/site-scrapers/scrapeDocusaurusSection.ts +0 -63
  341. package/src/scraping/site-scrapers/scrapeGitBookPage.ts +0 -82
  342. package/src/scraping/site-scrapers/scrapeGitBookSection.ts +0 -69
  343. package/src/scraping/site-scrapers/scrapeReadMePage.ts +0 -56
  344. package/src/scraping/site-scrapers/scrapeReadMeSection.ts +0 -66
  345. package/src/util.ts +0 -122
@@ -1 +0,0 @@
1
- {"version":3,"file":"scrapeReadMePage.js","sourceRoot":"","sources":["../../../src/scraping/site-scrapers/scrapeReadMePage.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,OAAO,MAAM,SAAS,CAAC;AACnC,OAAO,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAC;AAEtD,OAAO,iBAAiB,MAAM,yBAAyB,CAAC;AACxD,OAAO,iBAAiB,MAAM,yBAAyB,CAAC;AAExD,MAAM,CAAC,KAAK,UAAU,gBAAgB,CACpC,IAAY,EACZ,MAAc,EACd,MAAc,EACd,YAAoB,EACpB,SAAkB,EAClB,CAAqB,CAAC,UAAU;;IAEhC,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE7B,MAAM,cAAc,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,CAAC;IACvC,MAAM,KAAK,GAAG,cAAc,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC;IAC3C,IAAI,WAAW,GAAG,CAAC,CAAC,gBAAgB,EAAE,cAAc,CAAC,MAAM,EAAE,CAAC,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC;IAC7E,IAAI,CAAC,WAAW,EAAE,CAAC;QACjB,WAAW,GAAG,CAAC,CAAC,wBAAwB,CAAC,CAAC,IAAI,EAAE,CAAC,IAAI,EAAE,CAAC;IAC1D,CAAC;IAED,IAAI,OAAO,GAAG,CAAC,CAAC,8BAA8B,CAAC,CAAC,KAAK,EAAE,CAAC;IACxD,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACzB,OAAO,GAAG,CAAC,CAAC,8BAA8B,CAAC,CAAC;IAC9C,CAAC;IAED,+EAA+E;IAC/E,MAAM,WAAW,GAAG,OAAO,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC;IAEzC,MAAM,eAAe,GAAG,MAAM,iBAAiB,CAAC,CAAC,EAAE,OAAO,EAAE,MAAM,EAAE,YAAY,EAAE,SAAS,CAAC,CAAC;IAE7F,MAAM,GAAG,GAAG,IAAI,gBAAgB,CAAC,EAAE,cAAc,EAAE,KAAK,EAAE,CAAC,CAAC;IAC5D,IAAI,QAAQ,GAAG,GAAG,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;IAE1C,2DAA2D;IAC3D,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,SAAS,EAAE,KAAK,CAAC,CAAC;IAE9C,qDAAqD;IACrD,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC;IAE3C,6BAA6B;IAC7B,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,kBAAkB,EAAE,IAAI,CAAC,CAAC;IAEtD,iCAAiC;IACjC,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC,CAAC;IAE/C,gEAAgE;IAChE,QAAQ,GAAG,QAAQ,CAAC,OAAO,CAAC,wBAAwB,EAAE,SAAS,CAAC,CAAC;IACjE,IAAI,eAAe,EAAE,CAAC;QACpB,QAAQ,GAAG,iBAAiB,CAAC,eAAe,EAAE,MAAM,EAAE,QAAQ,CAAC,CAAC;IAClE,CAAC;IAED,OAAO,EAAE,KAAK,EAAE,WAAW,EAAE,QAAQ,EAAE,CAAC;AAC1C,CAAC"}
@@ -1,2 +0,0 @@
1
- import { NavigationEntry } from '@mintlify/models';
2
- export declare function scrapeReadMeSection(html: string, origin: string, cliDir: string, imageBaseDir: string, overwrite: boolean, version: string | undefined): Promise<NavigationEntry[]>;
@@ -1,39 +0,0 @@
1
- import * as cheerio from 'cheerio';
2
- import downloadLogoImage from '../downloadLogoImage.js';
3
- import { scrapeGettingFileNameFromUrl } from '../scrapeGettingFileNameFromUrl.js';
4
- import getLinksRecursively from './links-per-group/getLinksRecursively.js';
5
- import { scrapeReadMePage } from './scrapeReadMePage.js';
6
- export async function scrapeReadMeSection(html, origin, cliDir, imageBaseDir, overwrite, version) {
7
- const $ = cheerio.load(html);
8
- // Download the logo
9
- const logoSrc = $('.rm-Logo-img').first().attr('src');
10
- downloadLogoImage(logoSrc, imageBaseDir, origin, overwrite).catch(console.error);
11
- // Get all the navigation sections, but only from the first
12
- // sidebar found. There are multiple in the HTML for mobile
13
- // responsiveness but they all have the same links.
14
- const navigationSections = $('.rm-Sidebar').first().find('.rm-Sidebar-section');
15
- const groupsConfig = navigationSections.toArray().map((s) => {
16
- const section = $(s);
17
- const sectionTitle = section.find('h3').first().text();
18
- // Get all links, then use filter to remove duplicates.
19
- // There are duplicates because of nested navigation, eg:
20
- // subgroupTitle -> /first-page
21
- // -- First Page -> /first-page ** DUPLICATE **
22
- // -- Second Page -> /second-page
23
- const linkSections = section.find('.rm-Sidebar-list').first().children();
24
- const pages = getLinksRecursively(linkSections, $).filter((value, index, array) => array.indexOf(value) === index);
25
- // Follows the same structure as mint.json
26
- return {
27
- group: sectionTitle,
28
- pages: pages,
29
- };
30
- });
31
- // Scrape each link in the navigation.
32
- return Promise.all(groupsConfig.map(async (navEntry) => {
33
- return await scrapeGettingFileNameFromUrl(
34
- // ReadMe requires a directory on all sections whereas we use root.
35
- // /docs is their default directory so we remove it
36
- navEntry, cliDir, origin, overwrite, scrapeReadMePage, false, version, '/docs');
37
- }));
38
- }
39
- //# sourceMappingURL=scrapeReadMeSection.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"scrapeReadMeSection.js","sourceRoot":"","sources":["../../../src/scraping/site-scrapers/scrapeReadMeSection.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,OAAO,MAAM,SAAS,CAAC;AAEnC,OAAO,iBAAiB,MAAM,yBAAyB,CAAC;AACxD,OAAO,EAAE,4BAA4B,EAAE,MAAM,oCAAoC,CAAC;AAClF,OAAO,mBAAmB,MAAM,0CAA0C,CAAC;AAC3E,OAAO,EAAE,gBAAgB,EAAE,MAAM,uBAAuB,CAAC;AAEzD,MAAM,CAAC,KAAK,UAAU,mBAAmB,CACvC,IAAY,EACZ,MAAc,EACd,MAAc,EACd,YAAoB,EACpB,SAAkB,EAClB,OAA2B;IAE3B,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IAE7B,oBAAoB;IACpB,MAAM,OAAO,GAAG,CAAC,CAAC,cAAc,CAAC,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;IACtD,iBAAiB,CAAC,OAAO,EAAE,YAAY,EAAE,MAAM,EAAE,SAAS,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;IAEjF,2DAA2D;IAC3D,2DAA2D;IAC3D,mDAAmD;IACnD,MAAM,kBAAkB,GAAG,CAAC,CAAC,aAAa,CAAC,CAAC,KAAK,EAAE,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC;IAEhF,MAAM,YAAY,GAAe,kBAAkB,CAAC,OAAO,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE;QACtE,MAAM,OAAO,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;QACrB,MAAM,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,KAAK,EAAE,CAAC,IAAI,EAAE,CAAC;QAEvD,uDAAuD;QACvD,yDAAyD;QACzD,+BAA+B;QAC/B,iDAAiD;QACjD,iCAAiC;QACjC,MAAM,YAAY,GAAG,OAAO,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,KAAK,EAAE,CAAC,QAAQ,EAAE,CAAC;QACzE,MAAM,KAAK,GAAG,mBAAmB,CAAC,YAAY,EAAE,CAAC,CAAC,CAAC,MAAM,CACvD,CAAC,KAAK,EAAE,KAAK,EAAE,KAAK,EAAE,EAAE,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAC,KAAK,KAAK,CACxD,CAAC;QAEF,0CAA0C;QAC1C,OAAO;YACL,KAAK,EAAE,YAAY;YACnB,KAAK,EAAE,KAAK;SACb,CAAC;IACJ,CAAC,CAAC,CAAC;IAEH,sCAAsC;IACtC,OAAO,OAAO,CAAC,GAAG,CAChB,YAAY,CAAC,GAAG,CAAC,KAAK,EAAE,QAAyB,EAAE,EAAE;QACnD,OAAO,MAAM,4BAA4B;QACvC,mEAAmE;QACnE,mDAAmD;QACnD,QAAQ,EACR,MAAM,EACN,MAAM,EACN,SAAS,EACT,gBAAgB,EAChB,KAAK,EACL,OAAO,EACP,OAAO,CACR,CAAC;IACJ,CAAC,CAAC,CACH,CAAC;AACJ,CAAC"}
package/bin/util.d.ts DELETED
@@ -1,29 +0,0 @@
1
- import { NavigationEntry } from '@mintlify/models';
2
- import { Ora as OraType } from 'ora';
3
- export declare const MintConfig: (name: string, color: string, ctaName: string, ctaUrl: string, filename: string) => {
4
- name: string;
5
- logo: string;
6
- favicon: string;
7
- colors: {
8
- primary: string;
9
- };
10
- topbarLinks: never[];
11
- topbarCtaButton: {
12
- name: string;
13
- url: string;
14
- };
15
- anchors: never[];
16
- navigation: {
17
- group: string;
18
- pages: string[];
19
- }[];
20
- };
21
- export declare const Page: (title: string, description?: string, markdown?: string) => string;
22
- export declare function getOrigin(url: string): string;
23
- export declare function objToReadableString(objs: NavigationEntry[]): string;
24
- export declare const toFilename: (title: string) => string;
25
- export declare const addMdx: (fileName: string) => string;
26
- export declare const createPage: (title: string, description?: string, markdown?: string, overwrite?: boolean, rootDir?: string, fileName?: string) => void;
27
- export declare const buildLogger: (startText?: string) => OraType;
28
- export declare const getFileExtension: (filename: string) => string | undefined;
29
- export declare const fileBelongsInPagesFolder: (filename: string) => boolean | "" | undefined;
package/bin/util.js DELETED
@@ -1,97 +0,0 @@
1
- import { existsSync, mkdirSync, writeFileSync } from 'fs';
2
- import Ora from 'ora';
3
- import path from 'path';
4
- export const MintConfig = (name, color, ctaName, ctaUrl, filename) => {
5
- return {
6
- name,
7
- logo: '',
8
- favicon: '',
9
- colors: {
10
- primary: color,
11
- },
12
- topbarLinks: [],
13
- topbarCtaButton: {
14
- name: ctaName,
15
- url: ctaUrl,
16
- },
17
- anchors: [],
18
- navigation: [
19
- {
20
- group: 'Home',
21
- pages: [filename],
22
- },
23
- ],
24
- // footerSocials: {}, // support object type for footer tyoes
25
- };
26
- };
27
- export const Page = (title, description, markdown) => {
28
- // If we are an empty String we want to add two quotes,
29
- // if we added as we went we would detect the first quote
30
- // as the closing quote.
31
- const startsWithQuote = title.startsWith('"');
32
- const endsWithQuote = title.startsWith('"');
33
- if (!startsWithQuote) {
34
- title = '"' + title;
35
- }
36
- if (!endsWithQuote) {
37
- title = title + '"';
38
- }
39
- const optionalDescription = description ? `\ndescription: "${description}"` : '';
40
- return `---\ntitle: ${title}${optionalDescription}\n---\n\n${markdown}`;
41
- };
42
- export function getOrigin(url) {
43
- // eg. https://google.com -> https://google.com
44
- // https://google.com/page -> https://google.com
45
- return new URL(url).origin;
46
- }
47
- export function objToReadableString(objs) {
48
- // Two spaces as indentation
49
- return objs.map((obj) => JSON.stringify(obj, null, 2)).join(',\n');
50
- }
51
- export const toFilename = (title) => {
52
- // Gets rid of special characters at the start and end
53
- // of the name by converting to spaces then using trim.
54
- return title
55
- .replace(/[^a-z0-9]/gi, ' ')
56
- .trim()
57
- .replace(/ /g, '-')
58
- .toLowerCase();
59
- };
60
- export const addMdx = (fileName) => {
61
- if (fileName.endsWith('.mdx')) {
62
- return fileName;
63
- }
64
- return fileName + '.mdx';
65
- };
66
- export const createPage = (title, description, markdown, overwrite = false, rootDir = '', fileName) => {
67
- const writePath = path.join(rootDir, addMdx(fileName || toFilename(title)));
68
- // Create the folders needed if they're missing
69
- mkdirSync(rootDir, { recursive: true });
70
- if (!overwrite && existsSync(writePath)) {
71
- console.log(`❌ Skipping existing file ${writePath}`);
72
- return;
73
- }
74
- // Write the page to disk
75
- try {
76
- writeFileSync(writePath, Page(title, description, markdown));
77
- console.log('✏️ - ' + writePath);
78
- }
79
- catch (e) {
80
- console.error(e);
81
- }
82
- };
83
- export const buildLogger = (startText = '') => {
84
- const logger = Ora().start(startText);
85
- return logger;
86
- };
87
- export const getFileExtension = (filename) => {
88
- const ext = filename.substring(filename.lastIndexOf('.') + 1, filename.length);
89
- if (filename === ext)
90
- return undefined;
91
- return ext.toLowerCase();
92
- };
93
- export const fileBelongsInPagesFolder = (filename) => {
94
- const extension = getFileExtension(filename);
95
- return extension && (extension === 'mdx' || extension === 'md' || extension === 'tsx');
96
- };
97
- //# sourceMappingURL=util.js.map
package/bin/util.js.map DELETED
@@ -1 +0,0 @@
1
- {"version":3,"file":"util.js","sourceRoot":"","sources":["../src/util.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,UAAU,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,IAAI,CAAC;AAC1D,OAAO,GAAuB,MAAM,KAAK,CAAC;AAC1C,OAAO,IAAI,MAAM,MAAM,CAAC;AAExB,MAAM,CAAC,MAAM,UAAU,GAAG,CACxB,IAAY,EACZ,KAAa,EACb,OAAe,EACf,MAAc,EACd,QAAgB,EAChB,EAAE;IACF,OAAO;QACL,IAAI;QACJ,IAAI,EAAE,EAAE;QACR,OAAO,EAAE,EAAE;QACX,MAAM,EAAE;YACN,OAAO,EAAE,KAAK;SACf;QACD,WAAW,EAAE,EAAE;QACf,eAAe,EAAE;YACf,IAAI,EAAE,OAAO;YACb,GAAG,EAAE,MAAM;SACZ;QACD,OAAO,EAAE,EAAE;QACX,UAAU,EAAE;YACV;gBACE,KAAK,EAAE,MAAM;gBACb,KAAK,EAAE,CAAC,QAAQ,CAAC;aAClB;SACF;QACD,6DAA6D;KAC9D,CAAC;AACJ,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAAG,CAAC,KAAa,EAAE,WAAoB,EAAE,QAAiB,EAAE,EAAE;IAC7E,uDAAuD;IACvD,yDAAyD;IACzD,wBAAwB;IACxB,MAAM,eAAe,GAAG,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC9C,MAAM,aAAa,GAAG,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC5C,IAAI,CAAC,eAAe,EAAE,CAAC;QACrB,KAAK,GAAG,GAAG,GAAG,KAAK,CAAC;IACtB,CAAC;IACD,IAAI,CAAC,aAAa,EAAE,CAAC;QACnB,KAAK,GAAG,KAAK,GAAG,GAAG,CAAC;IACtB,CAAC;IAED,MAAM,mBAAmB,GAAG,WAAW,CAAC,CAAC,CAAC,mBAAmB,WAAW,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;IACjF,OAAO,eAAe,KAAK,GAAG,mBAAmB,YAAY,QAAQ,EAAE,CAAC;AAC1E,CAAC,CAAC;AAEF,MAAM,UAAU,SAAS,CAAC,GAAW;IACnC,+CAA+C;IAC/C,gDAAgD;IAChD,OAAO,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC;AAC7B,CAAC;AAED,MAAM,UAAU,mBAAmB,CAAC,IAAuB;IACzD,4BAA4B;IAC5B,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACrE,CAAC;AAED,MAAM,CAAC,MAAM,UAAU,GAAG,CAAC,KAAa,EAAE,EAAE;IAC1C,sDAAsD;IACtD,uDAAuD;IACvD,OAAO,KAAK;SACT,OAAO,CAAC,aAAa,EAAE,GAAG,CAAC;SAC3B,IAAI,EAAE;SACN,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC;SAClB,WAAW,EAAE,CAAC;AACnB,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAG,CAAC,QAAgB,EAAE,EAAE;IACzC,IAAI,QAAQ,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC;QAC9B,OAAO,QAAQ,CAAC;IAClB,CAAC;IACD,OAAO,QAAQ,GAAG,MAAM,CAAC;AAC3B,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAAG,CACxB,KAAa,EACb,WAAoB,EACpB,QAAiB,EACjB,SAAS,GAAG,KAAK,EACjB,OAAO,GAAG,EAAE,EACZ,QAAiB,EACjB,EAAE;IACF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,QAAQ,IAAI,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAE5E,+CAA+C;IAC/C,SAAS,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAExC,IAAI,CAAC,SAAS,IAAI,UAAU,CAAC,SAAS,CAAC,EAAE,CAAC;QACxC,OAAO,CAAC,GAAG,CAAC,4BAA4B,SAAS,EAAE,CAAC,CAAC;QACrD,OAAO;IACT,CAAC;IAED,yBAAyB;IACzB,IAAI,CAAC;QACH,aAAa,CAAC,SAAS,EAAE,IAAI,CAAC,KAAK,EAAE,WAAW,EAAE,QAAQ,CAAC,CAAC,CAAC;QAC7D,OAAO,CAAC,GAAG,CAAC,OAAO,GAAG,SAAS,CAAC,CAAC;IACnC,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IACnB,CAAC;AACH,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,WAAW,GAAG,CAAC,SAAS,GAAG,EAAE,EAAW,EAAE;IACrD,MAAM,MAAM,GAAG,GAAG,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;IACtC,OAAO,MAAM,CAAC;AAChB,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAAC,QAAgB,EAAE,EAAE;IACnD,MAAM,GAAG,GAAG,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,WAAW,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,QAAQ,CAAC,MAAM,CAAC,CAAC;IAC/E,IAAI,QAAQ,KAAK,GAAG;QAAE,OAAO,SAAS,CAAC;IACvC,OAAO,GAAG,CAAC,WAAW,EAAE,CAAC;AAC3B,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAAG,CAAC,QAAgB,EAAE,EAAE;IAC3D,MAAM,SAAS,GAAG,gBAAgB,CAAC,QAAQ,CAAC,CAAC;IAC7C,OAAO,SAAS,IAAI,CAAC,SAAS,KAAK,KAAK,IAAI,SAAS,KAAK,IAAI,IAAI,SAAS,KAAK,KAAK,CAAC,CAAC;AACzF,CAAC,CAAC"}
package/src/browser.ts DELETED
@@ -1,24 +0,0 @@
1
- import { launch } from 'puppeteer';
2
-
3
- export async function startBrowser() {
4
- try {
5
- return await launch({
6
- headless: true,
7
- ignoreHTTPSErrors: true,
8
- });
9
- } catch (err) {
10
- console.log('Could not create a browser instance: ', err);
11
- process.exit(1);
12
- }
13
- }
14
-
15
- export async function getHtmlWithPuppeteer(href: string) {
16
- const browser = await startBrowser();
17
- const page = await browser.newPage();
18
- await page.goto(href, {
19
- waitUntil: 'networkidle2',
20
- });
21
- const html = await page.content();
22
- void browser.close();
23
- return html;
24
- }
package/src/checks.ts DELETED
@@ -1,32 +0,0 @@
1
- import { Framework } from './scraping/detectFramework.js';
2
-
3
- // This checks the link is written correctly, not that the page exists.
4
- export function checkUrl({ url }: { url: string }) {
5
- try {
6
- new URL(url);
7
- } catch {
8
- throw Error(`Invalid link: ${url}\nMake sure the link starts with http:// or https://`);
9
- }
10
- return true;
11
- }
12
-
13
- export function checkVersion({
14
- tool,
15
- docusaurusVersion,
16
- }: {
17
- tool: Framework | undefined;
18
- docusaurusVersion: string | undefined;
19
- }) {
20
- if (tool === 'docusaurus') {
21
- if (docusaurusVersion === undefined) {
22
- throw Error(
23
- 'When using Docusaurus, you must specify the version (1,2,3) using the --docusaurusVersion flag'
24
- );
25
- }
26
- } else {
27
- if (docusaurusVersion !== undefined) {
28
- throw Error('The --docusaurusVersion flag is only applicable when using Docusaurus.');
29
- }
30
- }
31
- return true;
32
- }
@@ -1,102 +0,0 @@
1
- import axios from 'axios';
2
- import { existsSync, mkdirSync, createWriteStream } from 'fs';
3
- import path from 'path';
4
-
5
- import { SUPPORTED_MEDIA_EXTENSIONS } from './constants.js';
6
- import { getFileExtension } from './util.js';
7
-
8
- async function writeImageToFile(imageSrc: string, writePath: string, overwrite: boolean) {
9
- // Avoid unnecessary downloads
10
- if (existsSync(writePath) && !overwrite) {
11
- return Promise.reject({
12
- code: 'EEXIST',
13
- });
14
- }
15
-
16
- // Create the folders needed if they're missing
17
- mkdirSync(path.dirname(writePath), { recursive: true });
18
-
19
- const writer = createWriteStream(writePath);
20
-
21
- try {
22
- const response = await axios.get(imageSrc, {
23
- responseType: 'stream',
24
- });
25
- // wx prevents overwriting an image with the exact same name
26
- // being created in the time we were downloading
27
- response.data.pipe(writer, {
28
- flag: 'wx',
29
- });
30
-
31
- return new Promise((resolve, reject) => {
32
- writer.on('finish', resolve);
33
- writer.on('error', reject);
34
- });
35
- } catch (e) {
36
- return Promise.reject({
37
- code: 'ENOTFOUND',
38
- });
39
- }
40
- }
41
-
42
- export function isValidImageSrc(src: string) {
43
- if (!src) {
44
- return false;
45
- }
46
- // We do not support downloading base64 in-line images.
47
- if (src.startsWith('data:')) {
48
- return false;
49
- }
50
-
51
- const imageHref = removeMetadataFromImageSrc(src);
52
- const ext = getFileExtension(imageHref);
53
-
54
- if (ext && !SUPPORTED_MEDIA_EXTENSIONS.includes(ext)) {
55
- console.error('🚨 We do not support the file extension: ' + ext);
56
- return false;
57
- }
58
-
59
- return true;
60
- }
61
-
62
- export function getLengthUntilMetadata(src: string, ext: string) {
63
- const lengthUntilMetadata = src.indexOf(`.${ext}`) + `.${ext}`.length;
64
- return src.slice(0, lengthUntilMetadata);
65
- }
66
-
67
- export function removeMetadataFromImageSrc(src: string) {
68
- // Some frameworks add metadata after the file extension, we need to remove that.
69
- if (src.includes('gitbook/image')) {
70
- for (const ext of SUPPORTED_MEDIA_EXTENSIONS) {
71
- if (src.includes(`.${ext}`)) {
72
- return getLengthUntilMetadata(src, ext);
73
- }
74
- }
75
- }
76
- return src.split('#')[0]!.split('?')[0]!;
77
- }
78
-
79
- export function cleanImageSrc(src: string, origin: string) {
80
- // Add origin if the image tags are using relative sources
81
- return src.startsWith('http') ? src : new URL(src, origin).href;
82
- }
83
-
84
- export default async function downloadImage(
85
- imageSrc: string,
86
- writePath: string,
87
- overwrite = false
88
- ) {
89
- await writeImageToFile(imageSrc, writePath, overwrite)
90
- .then(() => {
91
- console.log('🖼️ - ' + writePath);
92
- })
93
- .catch((e) => {
94
- if (e.code === 'EEXIST') {
95
- console.log(`❌ Skipping existing image ${writePath}`);
96
- } else if (e.code === 'ENOTFOUND') {
97
- console.error(`🚨 Cannot download the image, address not found ${imageSrc}`);
98
- } else {
99
- console.error(e);
100
- }
101
- });
102
- }
@@ -1,21 +0,0 @@
1
- import { Navigation, NavigationGroup } from '@mintlify/models';
2
-
3
- export default function combineNavWithEmptyGroupTitles(navArray: Navigation): Navigation {
4
- const newNavArray: Navigation = [];
5
-
6
- navArray.forEach((nav: NavigationGroup) => {
7
- // The first run through the loop will always have -1 as the index.
8
- // JavaScript returns undefined when we look for an index outside the size of the array.
9
- const prev = newNavArray[newNavArray.length - 1];
10
- if (prev == null) {
11
- newNavArray.push(nav);
12
- } else if (!nav.group && !prev.group) {
13
- // Joins multiple groups without a title together IF they occur side by side
14
- prev.pages = prev.pages.concat(nav.pages);
15
- } else {
16
- newNavArray.push(nav);
17
- }
18
- });
19
-
20
- return newNavArray;
21
- }
@@ -1,55 +0,0 @@
1
- import * as cheerio from 'cheerio';
2
-
3
- export const frameworks = ['docusaurus', 'gitbook', 'readme', 'intercom'] as const;
4
- export type Framework = (typeof frameworks)[number];
5
-
6
- export type FrameworkHint =
7
- | {
8
- framework: 'docusaurus';
9
- version: '1' | '2' | '3';
10
- }
11
- | {
12
- framework: 'gitbook' | 'readme' | 'intercom' | undefined;
13
- };
14
-
15
- export function detectFramework(html: string): FrameworkHint {
16
- const $ = cheerio.load(html);
17
- const docusaurusMeta = $('meta[name="generator"]');
18
-
19
- if (
20
- docusaurusMeta.length > 0 &&
21
- docusaurusMeta.attr('content') &&
22
- typeof docusaurusMeta.attr('content') === 'string' &&
23
- (docusaurusMeta.attr('content') as string).includes('Docusaurus')
24
- ) {
25
- const metaAttrString = docusaurusMeta.attr('content') as string;
26
- if (metaAttrString.includes('v3')) {
27
- return { framework: 'docusaurus', version: '3' };
28
- }
29
- if (metaAttrString.includes('v2')) {
30
- return { framework: 'docusaurus', version: '2' };
31
- } else if (metaAttrString.includes('v1')) {
32
- console.warn(
33
- 'WARNING: We detected Docusaurus version 1 but we only support scraping versions 2 and 3.'
34
- );
35
- return { framework: 'docusaurus', version: '1' };
36
- }
37
- }
38
-
39
- const isGitBook = $('head link[rel="preconnect"][href="https://api.gitbook.com"]').length > 0;
40
- if (isGitBook) {
41
- return { framework: 'gitbook' };
42
- }
43
-
44
- const isReadMe = $('meta[name="readme-deploy"]').length > 0;
45
- if (isReadMe) {
46
- return { framework: 'readme' };
47
- }
48
-
49
- const isIntercom = $("meta[name='intercom:trackingEvent']").length > 0;
50
- if (isIntercom) {
51
- return { framework: 'intercom' };
52
- }
53
-
54
- return { framework: undefined };
55
- }
@@ -1,61 +0,0 @@
1
- import { Cheerio, CheerioAPI, Element } from 'cheerio';
2
- import path from 'path';
3
-
4
- import downloadImage, {
5
- cleanImageSrc,
6
- isValidImageSrc,
7
- removeMetadataFromImageSrc,
8
- } from '../downloadImage.js';
9
-
10
- export default async function downloadAllImages(
11
- $: CheerioAPI,
12
- content: Cheerio<Element>,
13
- origin: string,
14
- baseDir: string,
15
- overwrite: boolean,
16
- modifyFileName?: (fileName: string) => string
17
- ) {
18
- if (!baseDir) {
19
- console.debug('Skipping image downloading');
20
- return;
21
- }
22
-
23
- // We remove duplicates because some frameworks duplicate img tags
24
- // to show the image larger when clicked on.
25
- const imageSrcs = [
26
- ...new Set(
27
- content
28
- .find('img[src]')
29
- .map((_, image) => $(image).attr('src'))
30
- .toArray()
31
- ),
32
- ];
33
-
34
- // Wait to all images to download before continuing
35
- const origToNewArray = await Promise.all(
36
- imageSrcs.map(async (imageSrc) => {
37
- if (!imageSrc || !isValidImageSrc(imageSrc)) {
38
- return {};
39
- }
40
-
41
- const imageHref = cleanImageSrc(imageSrc, origin);
42
- let imageBasename = imageHref;
43
- if (!imageHref.includes('gitbook/image')) {
44
- imageBasename = path.basename(imageHref);
45
- }
46
-
47
- let fileName = removeMetadataFromImageSrc(imageBasename) || '';
48
- if (modifyFileName) {
49
- fileName = modifyFileName(fileName) || '';
50
- }
51
-
52
- const writePath = path.join(baseDir, fileName);
53
-
54
- await downloadImage(imageHref, writePath, overwrite);
55
-
56
- return { [imageSrc]: writePath };
57
- })
58
- );
59
-
60
- return origToNewArray.reduce((result, current) => Object.assign(result, current), {});
61
- }
@@ -1,24 +0,0 @@
1
- import path from 'path';
2
-
3
- import downloadImage, {
4
- cleanImageSrc,
5
- isValidImageSrc,
6
- removeMetadataFromImageSrc,
7
- } from '../downloadImage.js';
8
- import { getFileExtension } from '../util.js';
9
-
10
- export default async function downloadLogoImage(
11
- imageSrc: string | undefined,
12
- imageBaseDir: string,
13
- origin: string,
14
- overwrite: boolean
15
- ) {
16
- if (!imageSrc || !isValidImageSrc(imageSrc)) return;
17
-
18
- const imageHref = cleanImageSrc(imageSrc, origin);
19
-
20
- const ext = getFileExtension(removeMetadataFromImageSrc(imageSrc));
21
- const imagePath = path.join(imageBaseDir, 'logo', 'logo-light-mode.' + ext);
22
-
23
- await downloadImage(imageHref, imagePath, overwrite);
24
- }
@@ -1,17 +0,0 @@
1
- export default function replaceImagePaths(
2
- origToWritePath: Record<string, string>,
3
- cliDir: string,
4
- markdown: string
5
- ) {
6
- // Change image paths to use the downloaded locations
7
- for (const [origHref, writePath] of Object.entries(origToWritePath)) {
8
- // Use relative paths within the folder we are in
9
- if (writePath.startsWith(cliDir)) {
10
- markdown = markdown.replaceAll(origHref, writePath.slice(cliDir.length));
11
- } else {
12
- markdown = markdown.replaceAll(origHref, writePath);
13
- }
14
- }
15
-
16
- return markdown;
17
- }
@@ -1,84 +0,0 @@
1
- import { NavigationEntry } from '@mintlify/models';
2
- import axios from 'axios';
3
- import path from 'path';
4
-
5
- import { getHtmlWithPuppeteer } from '../browser.js';
6
- import { createPage } from '../util.js';
7
-
8
- export async function scrapeFileGettingFileNameFromUrl(
9
- pathname: string,
10
- cliDir: string,
11
- origin: string,
12
- overwrite: boolean,
13
- scrapePageFunc: (
14
- html: string,
15
- origin: string,
16
- cliDir: string,
17
- imageBaseDir: string,
18
- overwrite: boolean,
19
- version: string | undefined
20
- ) => Promise<{
21
- title?: string;
22
- description?: string;
23
- markdown?: string;
24
- }>,
25
- puppeteer = false,
26
- version: string | undefined,
27
- baseToRemove?: string
28
- ): Promise<NavigationEntry> {
29
- // Skip scraping external links
30
- if (pathname.startsWith('https://') || pathname.startsWith('http://')) {
31
- return pathname;
32
- }
33
-
34
- // Removes file name from the end
35
- const splitSubpath = pathname.split('/');
36
- let folders = splitSubpath.slice(0, splitSubpath.length - 1).join('/');
37
-
38
- // Remove base dir if passed in
39
- if (baseToRemove && folders.startsWith(baseToRemove)) {
40
- folders = folders.replace(baseToRemove, '');
41
- }
42
-
43
- // TO DO: Improve this by putting each page's images in a separate
44
- // folder named after the title of the page.
45
- const imageBaseDir = path.join(cliDir, 'images', folders);
46
-
47
- // Scrape each page separately
48
- const href = new URL(pathname, origin).href;
49
- let html: string;
50
- if (puppeteer) {
51
- html = await getHtmlWithPuppeteer(href);
52
- } else {
53
- const res = await axios.get(href);
54
- html = res.data;
55
- }
56
-
57
- const { title, description, markdown } = await scrapePageFunc(
58
- html,
59
- origin,
60
- cliDir,
61
- imageBaseDir,
62
- overwrite,
63
- version
64
- );
65
-
66
- // Check if page didn't have content
67
- if (!title && !markdown) {
68
- return {
69
- group: '',
70
- pages: [],
71
- };
72
- }
73
-
74
- const newFileLocation = folders ? path.join(cliDir, folders) : cliDir;
75
-
76
- // Default to introduction.mdx if we encountered index.html
77
- const fileName = splitSubpath[splitSubpath.length - 1] || 'introduction';
78
-
79
- // Will create subfolders as needed
80
- createPage(title ?? '', description, markdown, overwrite, newFileLocation, fileName);
81
-
82
- // Removes first slash if we are in a folder, Mintlify doesn't need it
83
- return folders ? path.join(folders, fileName).substring(1) : fileName;
84
- }
@@ -1,56 +0,0 @@
1
- import { NavigationEntry } from '@mintlify/models';
2
-
3
- import { scrapeFileGettingFileNameFromUrl } from './scrapeFileGettingFileNameFromUrl.js';
4
-
5
- export async function scrapeGettingFileNameFromUrl(
6
- navEntry: NavigationEntry,
7
- cliDir: string,
8
- origin: string,
9
- overwrite: boolean,
10
- scrapePageFunc: (
11
- html: string,
12
- origin: string,
13
- cliDir: string,
14
- imageBaseDir: string,
15
- overwrite: boolean,
16
- version: string | undefined
17
- ) => Promise<{
18
- title?: string;
19
- description?: string;
20
- markdown?: string;
21
- }>,
22
- puppeteer = false,
23
- version: string | undefined,
24
- baseToRemove?: string
25
- ): Promise<NavigationEntry> {
26
- if (typeof navEntry !== 'string') {
27
- const newPages: NavigationEntry[] = [];
28
- for (const nestedNavEntry of navEntry.pages) {
29
- newPages.push(
30
- await scrapeGettingFileNameFromUrl(
31
- nestedNavEntry,
32
- cliDir,
33
- origin,
34
- overwrite,
35
- scrapePageFunc,
36
- puppeteer,
37
- version,
38
- baseToRemove
39
- )
40
- );
41
- }
42
- navEntry.pages = newPages;
43
- return navEntry;
44
- }
45
-
46
- return await scrapeFileGettingFileNameFromUrl(
47
- navEntry,
48
- cliDir,
49
- origin,
50
- overwrite,
51
- scrapePageFunc,
52
- puppeteer,
53
- version,
54
- baseToRemove
55
- );
56
- }