mintlify 2.1.0 → 3.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CONTRIBUTING.md +2 -16
- package/index.js +3 -0
- package/package.json +23 -46
- package/bin/browser.js +0 -24
- package/bin/browser.js.map +0 -1
- package/bin/constants.js +0 -32
- package/bin/constants.js.map +0 -1
- package/bin/downloadImage.js +0 -85
- package/bin/downloadImage.js.map +0 -1
- package/bin/index.js +0 -49
- package/bin/index.js.map +0 -1
- package/bin/local-preview/helper-commands/installDepsCommand.js +0 -12
- package/bin/local-preview/helper-commands/installDepsCommand.js.map +0 -1
- package/bin/local-preview/index.js +0 -154
- package/bin/local-preview/index.js.map +0 -1
- package/bin/local-preview/listener/categorize.js +0 -95
- package/bin/local-preview/listener/categorize.js.map +0 -1
- package/bin/local-preview/listener/categorizeFiles.js +0 -47
- package/bin/local-preview/listener/categorizeFiles.js.map +0 -1
- package/bin/local-preview/listener/generate.js +0 -89
- package/bin/local-preview/listener/generate.js.map +0 -1
- package/bin/local-preview/listener/index.js +0 -200
- package/bin/local-preview/listener/index.js.map +0 -1
- package/bin/local-preview/listener/update.js +0 -24
- package/bin/local-preview/listener/update.js.map +0 -1
- package/bin/local-preview/listener/utils/createPage.js +0 -167
- package/bin/local-preview/listener/utils/createPage.js.map +0 -1
- package/bin/local-preview/listener/utils/fileIsMdxOrMd.js +0 -12
- package/bin/local-preview/listener/utils/fileIsMdxOrMd.js.map +0 -1
- package/bin/local-preview/listener/utils/getOpenApiContext.js +0 -57
- package/bin/local-preview/listener/utils/getOpenApiContext.js.map +0 -1
- package/bin/local-preview/listener/utils/mintConfigFile.js +0 -22
- package/bin/local-preview/listener/utils/mintConfigFile.js.map +0 -1
- package/bin/local-preview/listener/utils/toTitleCase.js +0 -36
- package/bin/local-preview/listener/utils/toTitleCase.js.map +0 -1
- package/bin/local-preview/listener/utils/types.js +0 -2
- package/bin/local-preview/listener/utils/types.js.map +0 -1
- package/bin/local-preview/listener/utils.js +0 -67
- package/bin/local-preview/listener/utils.js.map +0 -1
- package/bin/local-preview/utils/categorizeFiles.js +0 -63
- package/bin/local-preview/utils/categorizeFiles.js.map +0 -1
- package/bin/local-preview/utils/getOpenApiContext.js +0 -58
- package/bin/local-preview/utils/getOpenApiContext.js.map +0 -1
- package/bin/local-preview/utils/injectFavicons.js +0 -72
- package/bin/local-preview/utils/injectFavicons.js.map +0 -1
- package/bin/local-preview/utils/listener.js +0 -116
- package/bin/local-preview/utils/listener.js.map +0 -1
- package/bin/local-preview/utils/metadata.js +0 -118
- package/bin/local-preview/utils/metadata.js.map +0 -1
- package/bin/local-preview/utils/mintConfigFile.js +0 -43
- package/bin/local-preview/utils/mintConfigFile.js.map +0 -1
- package/bin/local-preview/utils/openApiCheck.js +0 -15
- package/bin/local-preview/utils/openApiCheck.js.map +0 -1
- package/bin/local-preview/utils/slugToTitle.js +0 -8
- package/bin/local-preview/utils/slugToTitle.js.map +0 -1
- package/bin/navigation.js +0 -4
- package/bin/navigation.js.map +0 -1
- package/bin/pageTemplate.js +0 -30
- package/bin/pageTemplate.js.map +0 -1
- package/bin/scraping/combineNavWithEmptyGroupTitles.js +0 -20
- package/bin/scraping/combineNavWithEmptyGroupTitles.js.map +0 -1
- package/bin/scraping/detectFramework.js +0 -39
- package/bin/scraping/detectFramework.js.map +0 -1
- package/bin/scraping/downloadAllImages.js +0 -33
- package/bin/scraping/downloadAllImages.js.map +0 -1
- package/bin/scraping/downloadLogoImage.js +0 -13
- package/bin/scraping/downloadLogoImage.js.map +0 -1
- package/bin/scraping/getSitemapLinks.js +0 -18
- package/bin/scraping/getSitemapLinks.js.map +0 -1
- package/bin/scraping/replaceImagePaths.js +0 -17
- package/bin/scraping/replaceImagePaths.js.map +0 -1
- package/bin/scraping/scrapeFileGettingFileNameFromUrl.js +0 -43
- package/bin/scraping/scrapeFileGettingFileNameFromUrl.js.map +0 -1
- package/bin/scraping/scrapeGettingFileNameFromUrl.js +0 -13
- package/bin/scraping/scrapeGettingFileNameFromUrl.js.map +0 -1
- package/bin/scraping/scrapePage.js +0 -10
- package/bin/scraping/scrapePage.js.map +0 -1
- package/bin/scraping/scrapePageCommands.js +0 -55
- package/bin/scraping/scrapePageCommands.js.map +0 -1
- package/bin/scraping/scrapeSection.js +0 -12
- package/bin/scraping/scrapeSection.js.map +0 -1
- package/bin/scraping/scrapeSectionCommands.js +0 -66
- package/bin/scraping/scrapeSectionCommands.js.map +0 -1
- package/bin/scraping/site-scrapers/Intercom/scrapeIntercomPage.js +0 -27
- package/bin/scraping/site-scrapers/Intercom/scrapeIntercomPage.js.map +0 -1
- package/bin/scraping/site-scrapers/Intercom/scrapeIntercomSection.js +0 -32
- package/bin/scraping/site-scrapers/Intercom/scrapeIntercomSection.js.map +0 -1
- package/bin/scraping/site-scrapers/alternateGroupTitle.js +0 -9
- package/bin/scraping/site-scrapers/alternateGroupTitle.js.map +0 -1
- package/bin/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.js +0 -34
- package/bin/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.js.map +0 -1
- package/bin/scraping/site-scrapers/links-per-group/getLinksRecursively.js +0 -38
- package/bin/scraping/site-scrapers/links-per-group/getLinksRecursively.js.map +0 -1
- package/bin/scraping/site-scrapers/links-per-group/getLinksRecursivelyGitBook.js +0 -38
- package/bin/scraping/site-scrapers/links-per-group/getLinksRecursivelyGitBook.js.map +0 -1
- package/bin/scraping/site-scrapers/openNestedDocusaurusMenus.js +0 -30
- package/bin/scraping/site-scrapers/openNestedDocusaurusMenus.js.map +0 -1
- package/bin/scraping/site-scrapers/openNestedGitbookMenus.js +0 -17
- package/bin/scraping/site-scrapers/openNestedGitbookMenus.js.map +0 -1
- package/bin/scraping/site-scrapers/scrapeDocusaurusPage.js +0 -49
- package/bin/scraping/site-scrapers/scrapeDocusaurusPage.js.map +0 -1
- package/bin/scraping/site-scrapers/scrapeDocusaurusSection.js +0 -30
- package/bin/scraping/site-scrapers/scrapeDocusaurusSection.js.map +0 -1
- package/bin/scraping/site-scrapers/scrapeGitBookPage.js +0 -47
- package/bin/scraping/site-scrapers/scrapeGitBookPage.js.map +0 -1
- package/bin/scraping/site-scrapers/scrapeGitBookSection.js +0 -52
- package/bin/scraping/site-scrapers/scrapeGitBookSection.js.map +0 -1
- package/bin/scraping/site-scrapers/scrapeReadMePage.js +0 -36
- package/bin/scraping/site-scrapers/scrapeReadMePage.js.map +0 -1
- package/bin/scraping/site-scrapers/scrapeReadMeSection.js +0 -44
- package/bin/scraping/site-scrapers/scrapeReadMeSection.js.map +0 -1
- package/bin/util.js +0 -129
- package/bin/util.js.map +0 -1
- package/bin/validation/isValidLink.js +0 -11
- package/bin/validation/isValidLink.js.map +0 -1
- package/bin/validation/stopIfInvalidLink.js +0 -9
- package/bin/validation/stopIfInvalidLink.js.map +0 -1
- package/scraper.md +0 -121
- package/src/browser.ts +0 -24
- package/src/constants.ts +0 -40
- package/src/downloadImage.ts +0 -110
- package/src/index.ts +0 -112
- package/src/local-preview/helper-commands/installDepsCommand.ts +0 -13
- package/src/local-preview/index.ts +0 -195
- package/src/local-preview/listener/categorize.ts +0 -105
- package/src/local-preview/listener/generate.ts +0 -110
- package/src/local-preview/listener/index.ts +0 -228
- package/src/local-preview/listener/update.ts +0 -27
- package/src/local-preview/listener/utils/createPage.ts +0 -211
- package/src/local-preview/listener/utils/getOpenApiContext.ts +0 -77
- package/src/local-preview/listener/utils/mintConfigFile.ts +0 -28
- package/src/local-preview/listener/utils/toTitleCase.ts +0 -40
- package/src/local-preview/listener/utils/types.ts +0 -16
- package/src/local-preview/listener/utils.ts +0 -78
- package/src/scraping/combineNavWithEmptyGroupTitles.ts +0 -21
- package/src/scraping/detectFramework.ts +0 -47
- package/src/scraping/downloadAllImages.ts +0 -60
- package/src/scraping/downloadLogoImage.ts +0 -25
- package/src/scraping/getSitemapLinks.ts +0 -18
- package/src/scraping/replaceImagePaths.ts +0 -21
- package/src/scraping/scrapeFileGettingFileNameFromUrl.ts +0 -86
- package/src/scraping/scrapeGettingFileNameFromUrl.ts +0 -54
- package/src/scraping/scrapePage.ts +0 -24
- package/src/scraping/scrapePageCommands.ts +0 -66
- package/src/scraping/scrapeSection.ts +0 -28
- package/src/scraping/scrapeSectionCommands.ts +0 -110
- package/src/scraping/site-scrapers/Intercom/scrapeIntercomPage.ts +0 -51
- package/src/scraping/site-scrapers/Intercom/scrapeIntercomSection.ts +0 -53
- package/src/scraping/site-scrapers/alternateGroupTitle.ts +0 -8
- package/src/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.ts +0 -44
- package/src/scraping/site-scrapers/links-per-group/getLinksRecursively.ts +0 -45
- package/src/scraping/site-scrapers/links-per-group/getLinksRecursivelyGitBook.ts +0 -46
- package/src/scraping/site-scrapers/openNestedDocusaurusMenus.ts +0 -42
- package/src/scraping/site-scrapers/openNestedGitbookMenus.ts +0 -24
- package/src/scraping/site-scrapers/scrapeDocusaurusPage.ts +0 -81
- package/src/scraping/site-scrapers/scrapeDocusaurusSection.ts +0 -63
- package/src/scraping/site-scrapers/scrapeGitBookPage.ts +0 -74
- package/src/scraping/site-scrapers/scrapeGitBookSection.ts +0 -80
- package/src/scraping/site-scrapers/scrapeReadMePage.ts +0 -60
- package/src/scraping/site-scrapers/scrapeReadMeSection.ts +0 -70
- package/src/types.d.ts +0 -29
- package/src/util.ts +0 -161
- package/src/validation/isValidLink.ts +0 -9
- package/src/validation/stopIfInvalidLink.ts +0 -9
- package/tsconfig.json +0 -18
package/bin/util.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"util.js","sourceRoot":"","sources":["../src/util.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,IAAI,CAAC;AAC9C,OAAO,GAAG,MAAM,KAAK,CAAC;AACtB,OAAO,IAAI,MAAM,MAAM,CAAC;AACxB,OAAO,KAAK,MAAM,SAAS,CAAC;AAC5B,OAAO,iBAAiB,MAAM,mCAAmC,CAAC;AAElE,MAAM,CAAC,MAAM,UAAU,GAAG,CACxB,IAAY,EACZ,KAAa,EACb,OAAe,EACf,MAAc,EACd,QAAgB,EAChB,EAAE;IACF,OAAO;QACL,IAAI;QACJ,IAAI,EAAE,EAAE;QACR,OAAO,EAAE,EAAE;QACX,MAAM,EAAE;YACN,OAAO,EAAE,KAAK;SACf;QACD,WAAW,EAAE,EAAE;QACf,eAAe,EAAE;YACf,IAAI,EAAE,OAAO;YACb,GAAG,EAAE,MAAM;SACZ;QACD,OAAO,EAAE,EAAE;QACX,UAAU,EAAE;YACV;gBACE,KAAK,EAAE,MAAM;gBACb,KAAK,EAAE,CAAC,QAAQ,CAAC;aAClB;SACF;QACD,6DAA6D;KAC9D,CAAC;AACJ,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,IAAI,GAAG,CAClB,KAAa,EACb,WAAoB,EACpB,QAAiB,EACjB,EAAE;IACF,uDAAuD;IACvD,yDAAyD;IACzD,wBAAwB;IACxB,MAAM,eAAe,GAAG,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC9C,MAAM,aAAa,GAAG,KAAK,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;IAC5C,IAAI,CAAC,eAAe,EAAE;QACpB,KAAK,GAAG,GAAG,GAAG,KAAK,CAAC;KACrB;IACD,IAAI,CAAC,aAAa,EAAE;QAClB,KAAK,GAAG,KAAK,GAAG,GAAG,CAAC;KACrB;IAED,MAAM,mBAAmB,GAAG,WAAW;QACrC,CAAC,CAAC,mBAAmB,WAAW,GAAG;QACnC,CAAC,CAAC,EAAE,CAAC;IACP,OAAO,eAAe,KAAK,GAAG,mBAAmB,YAAY,QAAQ,EAAE,CAAC;AAC1E,CAAC,CAAC;AAEF,MAAM,UAAU,SAAS,CAAC,GAAW;IACnC,+CAA+C;IAC/C,gDAAgD;IAChD,OAAO,IAAI,GAAG,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC;AAC7B,CAAC;AAED,MAAM,UAAU,mBAAmB,CAAC,IAA2B;IAC7D,4BAA4B;IAC5B,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,GAAG,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;AACrE,CAAC;AAED,MAAM,CAAC,MAAM,UAAU,GAAG,CAAC,KAAa,EAAE,EAAE;IAC1C,sDAAsD;IACtD,uDAAuD;IACvD,OAAO,KAAK;SACT,OAAO,CAAC,aAAa,EAAE,GAAG,CAAC;SAC3B,IAAI,EAAE;SACN,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC;SAClB,WAAW,EAAE,CAAC;AACnB,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,MAAM,GAAG,CAAC,QAAgB,EAAE,EAAE;IACzC,IAAI,QAAQ,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE;QAC7B,OAAO,QAAQ,CAAC;KACjB;IACD,OAAO,QAAQ,GAAG,MAAM,CAAC;AAC3B,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAAG,CACxB,KAAa,EACb,WAAoB,EACpB,QAAiB,EACjB,YAAqB,KAAK,EAC1B,UAAkB,EAAE,EACpB,QAAiB,EACjB,EAAE;IACF,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,MAAM,CAAC,QAAQ,IAAI,UAAU,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;IAE5E,+CAA+C;IAC/C,SAAS,CAAC,OAAO,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;IAExC,2BAA2B;IAC3B,IAAI,SAAS,EAAE;QACb,aAAa,CAAC,SAAS,EAAE,IAAI,CAAC,KAAK,EAAE,WAAW,EAAE,QAAQ,CAAC,CAAC,CAAC;QAC7D,OAAO,CAAC,GAAG,CAAC,OAAO,GAAG,SAAS,CAAC,CAAC;KAClC;SAAM;QACL,IAAI;YACF,aAAa,CAAC,SAAS,EAAE,IAAI,CAAC,KAAK,EAAE,WAAW,EAAE,QAAQ,CAAC,EAAE;gBAC3D,IAAI,EAAE,IAAI;aACX,CAAC,CAAC;YACH,OAAO,CAAC,GAAG,CAAC,OAAO,GAAG,SAAS,CAAC,CAAC;SAClC;QAAC,OAAO,CAAC,EAAE;YACV,yEAAyE;YACzE,sDAAsD;YACtD,IAAI,CAAC,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACvB,OAAO,CAAC,GAAG,CAAC,4BAA4B,SAAS,EAAE,CAAC,CAAC;aACtD;iBAAM;gBACL,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;aAClB;SACF;KACF;AACH,CAAC,CAAC;AAEF,MAAM,UAAU,eAAe,CAAC,IAAS;IACvC,MAAM,IAAI,GAAG,IAAI,CAAC,GAAG,CAAC;IACtB,iBAAiB,CAAC,IAAI,CAAC,CAAC;IACxB,OAAO,IAAI,CAAC;AACd,CAAC;AAED,MAAM,CAAC,MAAM,WAAW,GAAG,CAAC,YAAoB,EAAE,EAAE,EAAE;IACpD,MAAM,MAAM,GAAG,GAAG,EAAE,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC;IACtC,OAAO,MAAM,CAAC;AAChB,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAAC,QAAgB,EAAE,EAAE;IACnD,MAAM,GAAG,GAAG,QAAQ,CAAC,SAAS,CAC5B,QAAQ,CAAC,WAAW,CAAC,GAAG,CAAC,GAAG,CAAC,EAC7B,QAAQ,CAAC,MAAM,CAChB,CAAC;IACF,IAAI,QAAQ,KAAK,GAAG;QAAE,OAAO,SAAS,CAAC;IACvC,OAAO,GAAG,CAAC;AACb,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,wBAAwB,GAAG,CAAC,QAAgB,EAAE,EAAE;IAC3D,MAAM,SAAS,GAAG,gBAAgB,CAAC,QAAQ,CAAC,CAAC;IAC7C,OAAO,CACL,SAAS;QACT,CAAC,SAAS,KAAK,KAAK,IAAI,SAAS,KAAK,IAAI,IAAI,SAAS,KAAK,KAAK,CAAC,CACnE,CAAC;AACJ,CAAC,CAAC;AAEF,MAAM,CAAC,MAAM,UAAU,GAAG,CAAC,MAAW,EAAE,EAAE;IACxC,MAAM,aAAa,GAAG,KAAK,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC;IAC1C,IAAI,CAAC,aAAa,EAAE;QAClB,MAAM,CAAC,IAAI,CAAC;;;;KAIX,CAAC,CAAC;QACH,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;KACjB;AACH,CAAC,CAAC"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"isValidLink.js","sourceRoot":"","sources":["../../src/validation/isValidLink.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,OAAO,UAAU,WAAW,CAAC,IAAY;IAC9C,uEAAuE;IACvE,IAAI;QACF,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC;QACd,OAAO,IAAI,CAAC;KACb;IAAC,OAAO,CAAC,EAAE;QACV,OAAO,KAAK,CAAC;KACd;AACH,CAAC"}
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
import isValidLink from "./isValidLink.js";
|
|
2
|
-
export default function stopIfInvalidLink(href) {
|
|
3
|
-
if (!isValidLink(href)) {
|
|
4
|
-
console.log("Invalid link: " + href);
|
|
5
|
-
console.log("Make sure the link starts with http:// or https://");
|
|
6
|
-
process.exit(1);
|
|
7
|
-
}
|
|
8
|
-
}
|
|
9
|
-
//# sourceMappingURL=stopIfInvalidLink.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"stopIfInvalidLink.js","sourceRoot":"","sources":["../../src/validation/stopIfInvalidLink.ts"],"names":[],"mappings":"AAAA,OAAO,WAAW,MAAM,kBAAkB,CAAC;AAE3C,MAAM,CAAC,OAAO,UAAU,iBAAiB,CAAC,IAAY;IACpD,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE;QACtB,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,IAAI,CAAC,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,oDAAoD,CAAC,CAAC;QAClE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;KACjB;AACH,CAAC"}
|
package/scraper.md
DELETED
|
@@ -1,121 +0,0 @@
|
|
|
1
|
-
# CLI Scraping
|
|
2
|
-
|
|
3
|
-
The CLI has many commands. This doc focuses on how we coded scraping websites.
|
|
4
|
-
|
|
5
|
-
## User Interface
|
|
6
|
-
|
|
7
|
-
There are two main commands:
|
|
8
|
-
|
|
9
|
-
`mintlify scrape-page [url]`
|
|
10
|
-
|
|
11
|
-
and
|
|
12
|
-
|
|
13
|
-
`mintlify scrape-section [url]`
|
|
14
|
-
|
|
15
|
-
Scraping a page downloads a single page’s content. Scraping a section goes through the navigation and scrapes each page. The code for downloading a page’s content is shared between the two commands.
|
|
16
|
-
|
|
17
|
-
Important files: `scraping/scrapePageCommands.ts`, `scraping/scrapeSectionAutomatically.ts`
|
|
18
|
-
|
|
19
|
-
We have `scrape-gitbook-page` and similar commands for debugging. Ignore them, they just call internal functions directly. You should not need to use them unless you are debugging issues with Detecting Frameworks.
|
|
20
|
-
|
|
21
|
-
## Overwriting
|
|
22
|
-
|
|
23
|
-
The user has to add a `--overwrite` flag if they want to overwrite their current files.
|
|
24
|
-
|
|
25
|
-
## Sections vs Websites
|
|
26
|
-
|
|
27
|
-
We call the command `scrape-section` instead of `scrape-website` because we cannot scrape pages not in the navigation of the URL first passed in. For example, ReadMe has API Reference and other sections accessible through a separate top-navigation which we do not parse. We only scrape the navigation on the left: [https://docs.readme.com/main/docs](https://docs.readme.com/main/docs)
|
|
28
|
-
|
|
29
|
-
## Detecting Frameworks
|
|
30
|
-
|
|
31
|
-
The commands look in the page HTML to detect what framework scraper to use. For example, all Docusaurus sites have a metatag with the word Docusaurus in it. Some times, the metatag even has the Docusaurus version.
|
|
32
|
-
|
|
33
|
-
Each framework’s scrapers live in `scraping/site-scrapers/`
|
|
34
|
-
|
|
35
|
-
We currently support:
|
|
36
|
-
|
|
37
|
-
- Docusaurus
|
|
38
|
-
- GitBook
|
|
39
|
-
- ReadMe
|
|
40
|
-
- Intercom
|
|
41
|
-
|
|
42
|
-
## Terminal Output
|
|
43
|
-
|
|
44
|
-
We print a line in the terminal for every file we write. `util.ts` has a createPage function that takes care of writing the file and logging.
|
|
45
|
-
|
|
46
|
-
We use a pencil emoji when we successfully write a file. Images get a picture emoji. Likewise, we print a X emoji when we find a file that already exists and the user has not enabled overwriting files. We use emojis so you can tell what the command is doing without reading each file path.
|
|
47
|
-
|
|
48
|
-
We also print the file paths when scraping sections so the user can easily copy paste them into mint.json. Note that pages the user already added in Mintlify are not included in the printed example. We do not generate mint.json completely, we are just giving a small example to help users starting from scratch.
|
|
49
|
-
|
|
50
|
-
```jsx
|
|
51
|
-
Add the following to your navigation in mint.json:
|
|
52
|
-
|
|
53
|
-
{
|
|
54
|
-
"group": "Guides",
|
|
55
|
-
"pages": ["page-we-scraped"]
|
|
56
|
-
}
|
|
57
|
-
```
|
|
58
|
-
|
|
59
|
-
# Navigation Scraping
|
|
60
|
-
|
|
61
|
-
Most sites use JavaScript to open navigation menus which do not automatically include the menu buttons in the HTML. We use Puppeteer to click every nested menu so the site adds the menu buttons to the HTML. For example the original site’s HTML:
|
|
62
|
-
|
|
63
|
-
```jsx
|
|
64
|
-
<div>
|
|
65
|
-
<a id="my-nested-menu"></a>
|
|
66
|
-
</div>
|
|
67
|
-
```
|
|
68
|
-
|
|
69
|
-
can turn into this after opening the nested menu:
|
|
70
|
-
|
|
71
|
-
```jsx
|
|
72
|
-
<div>
|
|
73
|
-
<a id="my-nested-menu" aria-expanded=true></a>
|
|
74
|
-
<div>
|
|
75
|
-
<a href="/page"></a>
|
|
76
|
-
<a href="/other-page"></a>
|
|
77
|
-
</div>
|
|
78
|
-
</div>
|
|
79
|
-
```
|
|
80
|
-
|
|
81
|
-
Ultimately, all section scrapers need to find an array of links to visit then call the scrape page function in a loop.
|
|
82
|
-
|
|
83
|
-
We use axios instead of Puppeteer if a site doesn’t hide links. Puppeteer is slow.
|
|
84
|
-
|
|
85
|
-
# Image File Locations
|
|
86
|
-
|
|
87
|
-
Images go in an `images/` folder because that’s what most users want. Scraping per section uses the same root-level images folder. Scraping per page downloads them to the current location. Thus, scraping a single page from a folder means the user always has to move the images themselves. That’s a trade-off we are comfortable with — trying to detect an existing images folder gets too complicated too fast.
|
|
88
|
-
|
|
89
|
-
# Cheerio
|
|
90
|
-
|
|
91
|
-
Cheerio is a library to scrape/handle the HTML after we have it in a string. Most of the work is using inspect-element to view a website and figure out where the content we want is, then writing the corresponding Cheerio code.
|
|
92
|
-
|
|
93
|
-
# HTML to MDX
|
|
94
|
-
|
|
95
|
-
We use an open-source library to convert HTML to Markdown: https://github.com/crosstype/node-html-markdown
|
|
96
|
-
|
|
97
|
-
The `util.ts` createPage function assembles the MDX metadata, we just need to return an object of the form `{ title, description, content }` from each page scraper.
|
|
98
|
-
|
|
99
|
-
## Parsing Issues
|
|
100
|
-
|
|
101
|
-
Parsing struggles when documentation websites are using non-standard HTML. For example, code blocks are supposed to use. `<pre><code></code></pre>` but GitBook just uses divs.
|
|
102
|
-
|
|
103
|
-
We can write custom translators for the library that determine how we parse certain objects.
|
|
104
|
-
|
|
105
|
-
In some cases, we will want custom translators even if parsing succeeds. For example, ReadMe callouts are using quote syntax
|
|
106
|
-
|
|
107
|
-
```jsx
|
|
108
|
-
> 💡
|
|
109
|
-
> Callout text
|
|
110
|
-
>
|
|
111
|
-
```
|
|
112
|
-
|
|
113
|
-
When we want to convert them to:
|
|
114
|
-
|
|
115
|
-
```jsx
|
|
116
|
-
<Tip>Callout text</Tip>
|
|
117
|
-
```
|
|
118
|
-
|
|
119
|
-
## Regex
|
|
120
|
-
|
|
121
|
-
You can use regex to make small changes where translators are overkill or there’s no obvious component to modify. For example, here’s the end of `scrapeDocusaurusPage.ts`:
|
package/src/browser.ts
DELETED
|
@@ -1,24 +0,0 @@
|
|
|
1
|
-
import { launch } from "puppeteer";
|
|
2
|
-
|
|
3
|
-
export async function startBrowser() {
|
|
4
|
-
try {
|
|
5
|
-
return await launch({
|
|
6
|
-
headless: true,
|
|
7
|
-
ignoreHTTPSErrors: true,
|
|
8
|
-
});
|
|
9
|
-
} catch (err) {
|
|
10
|
-
console.log("Could not create a browser instance: ", err);
|
|
11
|
-
process.exit(1);
|
|
12
|
-
}
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
export async function getHtmlWithPuppeteer(href: string) {
|
|
16
|
-
const browser = await startBrowser();
|
|
17
|
-
const page = await browser.newPage();
|
|
18
|
-
await page.goto(href, {
|
|
19
|
-
waitUntil: "networkidle2",
|
|
20
|
-
});
|
|
21
|
-
const html = await page.content();
|
|
22
|
-
browser.close();
|
|
23
|
-
return html;
|
|
24
|
-
}
|
package/src/constants.ts
DELETED
|
@@ -1,40 +0,0 @@
|
|
|
1
|
-
import path from "path";
|
|
2
|
-
import * as url from "url";
|
|
3
|
-
import os from "os";
|
|
4
|
-
|
|
5
|
-
// Change this to bump to a newer version of mint's client
|
|
6
|
-
export const TARGET_MINT_VERSION = "v0.0.9";
|
|
7
|
-
|
|
8
|
-
// package installation location
|
|
9
|
-
export const INSTALL_PATH = url.fileURLToPath(new URL(".", import.meta.url));
|
|
10
|
-
|
|
11
|
-
export const HOME_DIR = os.homedir();
|
|
12
|
-
|
|
13
|
-
export const DOT_MINTLIFY = path.join(HOME_DIR, ".mintlify");
|
|
14
|
-
|
|
15
|
-
export const VERSION_PATH = path.join(DOT_MINTLIFY, "mint", "mint-version.txt");
|
|
16
|
-
|
|
17
|
-
export const CLIENT_PATH = path.join(DOT_MINTLIFY, "mint", "client");
|
|
18
|
-
|
|
19
|
-
export const MINT_PATH = path.join(DOT_MINTLIFY, "mint");
|
|
20
|
-
|
|
21
|
-
// command execution location
|
|
22
|
-
export const CMD_EXEC_PATH = process.cwd();
|
|
23
|
-
|
|
24
|
-
export const SUPPORTED_MEDIA_EXTENSIONS = [
|
|
25
|
-
"jpeg",
|
|
26
|
-
"jpg",
|
|
27
|
-
"jfif",
|
|
28
|
-
"pjpeg",
|
|
29
|
-
"pjp",
|
|
30
|
-
"png",
|
|
31
|
-
"svg",
|
|
32
|
-
"svgz",
|
|
33
|
-
"ico",
|
|
34
|
-
"webp",
|
|
35
|
-
"gif",
|
|
36
|
-
"apng",
|
|
37
|
-
"avif",
|
|
38
|
-
"bmp",
|
|
39
|
-
"mp4",
|
|
40
|
-
];
|
package/src/downloadImage.ts
DELETED
|
@@ -1,110 +0,0 @@
|
|
|
1
|
-
import { existsSync, mkdirSync, createWriteStream } from "fs";
|
|
2
|
-
import path from "path";
|
|
3
|
-
import axios from "axios";
|
|
4
|
-
import { getFileExtension } from "./util.js";
|
|
5
|
-
import { SUPPORTED_MEDIA_EXTENSIONS } from "./constants.js";
|
|
6
|
-
|
|
7
|
-
async function writeImageToFile(
|
|
8
|
-
imageSrc: string,
|
|
9
|
-
writePath: string,
|
|
10
|
-
overwrite: boolean
|
|
11
|
-
) {
|
|
12
|
-
// Avoid unnecessary downloads
|
|
13
|
-
if (existsSync(writePath) && !overwrite) {
|
|
14
|
-
return Promise.reject({
|
|
15
|
-
code: "EEXIST",
|
|
16
|
-
});
|
|
17
|
-
}
|
|
18
|
-
|
|
19
|
-
// Create the folders needed if they're missing
|
|
20
|
-
mkdirSync(path.dirname(writePath), { recursive: true });
|
|
21
|
-
|
|
22
|
-
const writer = createWriteStream(writePath);
|
|
23
|
-
|
|
24
|
-
try {
|
|
25
|
-
const response = await axios.get(imageSrc, {
|
|
26
|
-
responseType: "stream",
|
|
27
|
-
});
|
|
28
|
-
// wx prevents overwriting an image with the exact same name
|
|
29
|
-
// being created in the time we were downloading
|
|
30
|
-
response.data.pipe(writer, {
|
|
31
|
-
flag: "wx",
|
|
32
|
-
});
|
|
33
|
-
|
|
34
|
-
return new Promise((resolve, reject) => {
|
|
35
|
-
writer.on("finish", resolve);
|
|
36
|
-
writer.on("error", reject);
|
|
37
|
-
});
|
|
38
|
-
} catch (e) {
|
|
39
|
-
return Promise.reject({
|
|
40
|
-
code: "ENOTFOUND",
|
|
41
|
-
});
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
|
|
45
|
-
export function isValidImageSrc(
|
|
46
|
-
src: string,
|
|
47
|
-
skipValidateImageExtension?: boolean
|
|
48
|
-
) {
|
|
49
|
-
if (!src) {
|
|
50
|
-
return false;
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
// We do not support downloading base64 in-line images.
|
|
54
|
-
if (src.startsWith("data:")) {
|
|
55
|
-
return false;
|
|
56
|
-
}
|
|
57
|
-
|
|
58
|
-
if (skipValidateImageExtension) {
|
|
59
|
-
return true;
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
const imageHref = removeMetadataFromImageSrc(src);
|
|
63
|
-
const ext = getFileExtension(imageHref);
|
|
64
|
-
|
|
65
|
-
if (!SUPPORTED_MEDIA_EXTENSIONS.includes(ext)) {
|
|
66
|
-
console.error("🚨 We do not support the file extension: " + ext);
|
|
67
|
-
return false;
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
return true;
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
export function removeMetadataFromImageSrc(src: string) {
|
|
74
|
-
// Part of the URL standard
|
|
75
|
-
const metadataSymbols = ["?", "#"];
|
|
76
|
-
|
|
77
|
-
metadataSymbols.forEach((dividerSymbol) => {
|
|
78
|
-
// Some frameworks add metadata after the file extension, we need to remove that.
|
|
79
|
-
src = src.split(dividerSymbol)[0];
|
|
80
|
-
});
|
|
81
|
-
|
|
82
|
-
return src;
|
|
83
|
-
}
|
|
84
|
-
|
|
85
|
-
export function cleanImageSrc(src: string, origin: string) {
|
|
86
|
-
// Add origin if the image tags are using relative sources
|
|
87
|
-
return src.startsWith("http") ? src : new URL(src, origin).href;
|
|
88
|
-
}
|
|
89
|
-
|
|
90
|
-
export default async function downloadImage(
|
|
91
|
-
imageSrc: string,
|
|
92
|
-
writePath: string,
|
|
93
|
-
overwrite: boolean = false
|
|
94
|
-
) {
|
|
95
|
-
await writeImageToFile(imageSrc, writePath, overwrite)
|
|
96
|
-
.then(() => {
|
|
97
|
-
console.log("🖼️ - " + writePath);
|
|
98
|
-
})
|
|
99
|
-
.catch((e) => {
|
|
100
|
-
if (e.code === "EEXIST") {
|
|
101
|
-
console.log(`❌ Skipping existing image ${writePath}`);
|
|
102
|
-
} else if (e.code === "ENOTFOUND") {
|
|
103
|
-
console.error(
|
|
104
|
-
`🚨 Cannot download the image, address not found ${imageSrc}`
|
|
105
|
-
);
|
|
106
|
-
} else {
|
|
107
|
-
console.error(e);
|
|
108
|
-
}
|
|
109
|
-
});
|
|
110
|
-
}
|
package/src/index.ts
DELETED
|
@@ -1,112 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
|
|
3
|
-
import yargs from "yargs";
|
|
4
|
-
import { hideBin } from "yargs/helpers";
|
|
5
|
-
import {
|
|
6
|
-
scrapePageAutomatically,
|
|
7
|
-
scrapePageWrapper,
|
|
8
|
-
} from "./scraping/scrapePageCommands.js";
|
|
9
|
-
import { scrapeGitBookPage } from "./scraping/site-scrapers/scrapeGitBookPage.js";
|
|
10
|
-
import { scrapeReadMePage } from "./scraping/site-scrapers/scrapeReadMePage.js";
|
|
11
|
-
import {
|
|
12
|
-
scrapeSectionAutomatically,
|
|
13
|
-
scrapeSectionAxiosWrapper,
|
|
14
|
-
scrapeGitbookSectionCommand,
|
|
15
|
-
} from "./scraping/scrapeSectionCommands.js";
|
|
16
|
-
import { scrapeReadMeSection } from "./scraping/site-scrapers/scrapeReadMeSection.js";
|
|
17
|
-
import dev from "./local-preview/index.js";
|
|
18
|
-
import installDepsCommand from "./local-preview/helper-commands/installDepsCommand.js";
|
|
19
|
-
import { scrapeIntercomPage } from "./scraping/site-scrapers/Intercom/scrapeIntercomPage.js";
|
|
20
|
-
import { scrapeIntercomSection } from "./scraping/site-scrapers/Intercom/scrapeIntercomSection.js";
|
|
21
|
-
|
|
22
|
-
yargs(hideBin(process.argv))
|
|
23
|
-
.command(
|
|
24
|
-
"dev",
|
|
25
|
-
"Runs Mintlify locally (Must run in directory with mint.json)",
|
|
26
|
-
() => {},
|
|
27
|
-
async (argv) => {
|
|
28
|
-
await dev(argv);
|
|
29
|
-
}
|
|
30
|
-
)
|
|
31
|
-
.command(
|
|
32
|
-
"install",
|
|
33
|
-
"Install dependencies for local Mintlify",
|
|
34
|
-
() => {},
|
|
35
|
-
installDepsCommand
|
|
36
|
-
)
|
|
37
|
-
.command(
|
|
38
|
-
"scrape-page [url]",
|
|
39
|
-
"Scrapes a page",
|
|
40
|
-
() => {},
|
|
41
|
-
async (argv) => {
|
|
42
|
-
await scrapePageAutomatically(argv);
|
|
43
|
-
}
|
|
44
|
-
)
|
|
45
|
-
.command(
|
|
46
|
-
"scrape-gitbook-page [url]",
|
|
47
|
-
"Scrapes a GitBook page",
|
|
48
|
-
() => {},
|
|
49
|
-
async (argv) => {
|
|
50
|
-
await scrapePageWrapper(argv, scrapeGitBookPage);
|
|
51
|
-
}
|
|
52
|
-
)
|
|
53
|
-
.command(
|
|
54
|
-
"scrape-readme-page [url]",
|
|
55
|
-
"Scrapes a ReadMe page",
|
|
56
|
-
() => {},
|
|
57
|
-
async (argv) => {
|
|
58
|
-
await scrapePageWrapper(argv, scrapeReadMePage);
|
|
59
|
-
}
|
|
60
|
-
)
|
|
61
|
-
.command(
|
|
62
|
-
"scrape-intercom-page [url]",
|
|
63
|
-
"Scrapes a Intercom page",
|
|
64
|
-
() => {},
|
|
65
|
-
async (argv) => {
|
|
66
|
-
await scrapePageWrapper(argv, scrapeIntercomPage);
|
|
67
|
-
}
|
|
68
|
-
)
|
|
69
|
-
.command(
|
|
70
|
-
"scrape-section [url]",
|
|
71
|
-
"Scrapes the docs in the section",
|
|
72
|
-
() => {},
|
|
73
|
-
async (argv) => {
|
|
74
|
-
await scrapeSectionAutomatically(argv);
|
|
75
|
-
}
|
|
76
|
-
)
|
|
77
|
-
.command(
|
|
78
|
-
"scrape-gitbook-section [url]",
|
|
79
|
-
"Scrapes the Gitbook section",
|
|
80
|
-
() => {},
|
|
81
|
-
async (argv) => {
|
|
82
|
-
await scrapeGitbookSectionCommand(argv);
|
|
83
|
-
}
|
|
84
|
-
)
|
|
85
|
-
.command(
|
|
86
|
-
"scrape-readme-section [url]",
|
|
87
|
-
"Scrapes the ReadMe section",
|
|
88
|
-
() => {},
|
|
89
|
-
async (argv) => {
|
|
90
|
-
await scrapeSectionAxiosWrapper(argv, scrapeReadMeSection);
|
|
91
|
-
}
|
|
92
|
-
)
|
|
93
|
-
.command(
|
|
94
|
-
"scrape-intercom-section [url]",
|
|
95
|
-
"Scrapes the Intercom section",
|
|
96
|
-
() => {},
|
|
97
|
-
async (argv) => {
|
|
98
|
-
await scrapeSectionAxiosWrapper(argv, scrapeIntercomSection);
|
|
99
|
-
}
|
|
100
|
-
)
|
|
101
|
-
// Print the help menu when the user enters an invalid command.
|
|
102
|
-
.strictCommands()
|
|
103
|
-
.demandCommand(
|
|
104
|
-
1,
|
|
105
|
-
"Unknown command. See above for the list of supported commands."
|
|
106
|
-
)
|
|
107
|
-
|
|
108
|
-
// Alias option flags --help = -h, --version = -v
|
|
109
|
-
.alias("h", "help")
|
|
110
|
-
.alias("v", "version")
|
|
111
|
-
|
|
112
|
-
.parse();
|
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
import shell from "shelljs";
|
|
2
|
-
import { CLIENT_PATH } from "../../constants.js";
|
|
3
|
-
import { buildLogger, ensureYarn } from "../../util.js";
|
|
4
|
-
|
|
5
|
-
const installDeps = async () => {
|
|
6
|
-
const logger = buildLogger("");
|
|
7
|
-
ensureYarn(logger);
|
|
8
|
-
shell.cd(CLIENT_PATH);
|
|
9
|
-
shell.exec("yarn");
|
|
10
|
-
logger.succeed("Dependencies installed.");
|
|
11
|
-
};
|
|
12
|
-
|
|
13
|
-
export default installDeps;
|
|
@@ -1,195 +0,0 @@
|
|
|
1
|
-
import Chalk from "chalk";
|
|
2
|
-
import child_process from "child_process";
|
|
3
|
-
import open from "open";
|
|
4
|
-
import fse, { pathExists } from "fs-extra";
|
|
5
|
-
import inquirer from "inquirer";
|
|
6
|
-
import { isInternetAvailable } from "is-internet-available";
|
|
7
|
-
import path from "path";
|
|
8
|
-
import shell from "shelljs";
|
|
9
|
-
import { Octokit } from "@octokit/rest";
|
|
10
|
-
import {
|
|
11
|
-
CLIENT_PATH,
|
|
12
|
-
HOME_DIR,
|
|
13
|
-
DOT_MINTLIFY,
|
|
14
|
-
CMD_EXEC_PATH,
|
|
15
|
-
TARGET_MINT_VERSION,
|
|
16
|
-
VERSION_PATH,
|
|
17
|
-
MINT_PATH,
|
|
18
|
-
} from "../constants.js";
|
|
19
|
-
import { buildLogger, ensureYarn } from "../util.js";
|
|
20
|
-
import listener from "./listener/index.js";
|
|
21
|
-
import { ArgumentsCamelCase } from "yargs";
|
|
22
|
-
import { getConfigPath } from "./listener/utils/mintConfigFile.js";
|
|
23
|
-
|
|
24
|
-
const nodeModulesExists = async () => {
|
|
25
|
-
return pathExists(path.join(DOT_MINTLIFY, "mint", "client", "node_modules"));
|
|
26
|
-
};
|
|
27
|
-
|
|
28
|
-
const promptForYarn = async () => {
|
|
29
|
-
const yarnInstalled = shell.which("yarn");
|
|
30
|
-
if (!yarnInstalled) {
|
|
31
|
-
await inquirer
|
|
32
|
-
.prompt([
|
|
33
|
-
{
|
|
34
|
-
type: "confirm",
|
|
35
|
-
name: "confirm",
|
|
36
|
-
message: "yarn must be globally installed. Install yarn?",
|
|
37
|
-
default: true,
|
|
38
|
-
},
|
|
39
|
-
])
|
|
40
|
-
.then(({ confirm }) => {
|
|
41
|
-
if (confirm) {
|
|
42
|
-
shell.exec("npm install --global yarn");
|
|
43
|
-
} else {
|
|
44
|
-
console.log("Installation cancelled.");
|
|
45
|
-
}
|
|
46
|
-
});
|
|
47
|
-
}
|
|
48
|
-
};
|
|
49
|
-
|
|
50
|
-
const downloadTargetMint = async (logger) => {
|
|
51
|
-
fse.emptyDirSync(MINT_PATH);
|
|
52
|
-
|
|
53
|
-
logger.text = "Downloading Mintlify framework...";
|
|
54
|
-
|
|
55
|
-
const octokit = new Octokit();
|
|
56
|
-
const downloadRes = await octokit.repos.downloadTarballArchive({
|
|
57
|
-
owner: "mintlify",
|
|
58
|
-
repo: "mint",
|
|
59
|
-
ref: TARGET_MINT_VERSION,
|
|
60
|
-
});
|
|
61
|
-
|
|
62
|
-
logger.text = "Extracting Mintlify framework...";
|
|
63
|
-
const TAR_PATH = path.join(MINT_PATH, "mint.tar.gz");
|
|
64
|
-
fse.writeFileSync(TAR_PATH, Buffer.from(downloadRes.data as any));
|
|
65
|
-
|
|
66
|
-
// strip-components 1 removes the top level directory from the unzipped content
|
|
67
|
-
// which is a folder with the release sha
|
|
68
|
-
fse.mkdirSync(path.join(MINT_PATH, "mint-tmp"));
|
|
69
|
-
shell.exec("tar -xzf mint.tar.gz -C mint-tmp --strip-components 1", {
|
|
70
|
-
silent: true,
|
|
71
|
-
});
|
|
72
|
-
|
|
73
|
-
fse.removeSync(TAR_PATH);
|
|
74
|
-
|
|
75
|
-
fse.moveSync(
|
|
76
|
-
path.join(MINT_PATH, "mint-tmp", "client"),
|
|
77
|
-
path.join(CLIENT_PATH)
|
|
78
|
-
);
|
|
79
|
-
|
|
80
|
-
fse.writeFileSync(VERSION_PATH, TARGET_MINT_VERSION);
|
|
81
|
-
|
|
82
|
-
// Delete unnecessary content downloaded from GitHub
|
|
83
|
-
fse.removeSync(path.join(MINT_PATH, "mint-tmp"));
|
|
84
|
-
|
|
85
|
-
logger.text = "Installing dependencies...";
|
|
86
|
-
|
|
87
|
-
ensureYarn(logger);
|
|
88
|
-
shell.cd(CLIENT_PATH);
|
|
89
|
-
shell.exec("yarn", { silent: true });
|
|
90
|
-
};
|
|
91
|
-
|
|
92
|
-
const checkForMintJson = async (logger) => {
|
|
93
|
-
const configPath = await getConfigPath(CMD_EXEC_PATH);
|
|
94
|
-
if (configPath == null) {
|
|
95
|
-
logger.fail("Must be ran in a directory where a mint.json file exists.");
|
|
96
|
-
process.exit(1);
|
|
97
|
-
}
|
|
98
|
-
return;
|
|
99
|
-
};
|
|
100
|
-
|
|
101
|
-
const dev = async (argv: ArgumentsCamelCase) => {
|
|
102
|
-
shell.cd(HOME_DIR);
|
|
103
|
-
await promptForYarn();
|
|
104
|
-
const logger = buildLogger("Preparing local Mintlify instance...");
|
|
105
|
-
await fse.ensureDir(MINT_PATH);
|
|
106
|
-
shell.cd(MINT_PATH);
|
|
107
|
-
|
|
108
|
-
const internet = await isInternetAvailable();
|
|
109
|
-
if (!internet && !(await pathExists(CLIENT_PATH))) {
|
|
110
|
-
logger.fail(
|
|
111
|
-
"Running mintlify dev for the first time requires an internet connection."
|
|
112
|
-
);
|
|
113
|
-
process.exit(1);
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
if (internet) {
|
|
117
|
-
const mintVersionExists = await pathExists(VERSION_PATH);
|
|
118
|
-
|
|
119
|
-
let needToDownloadTargetMint = !mintVersionExists;
|
|
120
|
-
|
|
121
|
-
if (mintVersionExists) {
|
|
122
|
-
const currVersion = fse.readFileSync(VERSION_PATH, "utf8");
|
|
123
|
-
if (currVersion !== TARGET_MINT_VERSION) {
|
|
124
|
-
needToDownloadTargetMint = true;
|
|
125
|
-
}
|
|
126
|
-
}
|
|
127
|
-
|
|
128
|
-
if (needToDownloadTargetMint) {
|
|
129
|
-
await downloadTargetMint(logger);
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
|
|
133
|
-
if (!(await nodeModulesExists())) {
|
|
134
|
-
if (!internet) {
|
|
135
|
-
logger.fail(`Dependencies are missing and you are offline. Connect to the internet and run
|
|
136
|
-
|
|
137
|
-
mintlify install
|
|
138
|
-
|
|
139
|
-
`);
|
|
140
|
-
} else {
|
|
141
|
-
logger.fail(`Dependencies were not installed correctly, run
|
|
142
|
-
|
|
143
|
-
mintlify install
|
|
144
|
-
|
|
145
|
-
`);
|
|
146
|
-
}
|
|
147
|
-
process.exit(1);
|
|
148
|
-
}
|
|
149
|
-
await checkForMintJson(logger);
|
|
150
|
-
shell.cd(CLIENT_PATH);
|
|
151
|
-
const relativePath = path.relative(CLIENT_PATH, CMD_EXEC_PATH);
|
|
152
|
-
child_process.spawnSync("yarn preconfigure", [relativePath], { shell: true });
|
|
153
|
-
logger.succeed("Local Mintlify instance is ready. Launching your site...");
|
|
154
|
-
run((argv.port as string) || "3000");
|
|
155
|
-
};
|
|
156
|
-
|
|
157
|
-
const run = (port: string) => {
|
|
158
|
-
shell.cd(CLIENT_PATH);
|
|
159
|
-
|
|
160
|
-
// next-remote-watch can only receive ports as env variables
|
|
161
|
-
// https://github.com/hashicorp/next-remote-watch/issues/23
|
|
162
|
-
const mintlifyDevProcess = child_process.spawn("npm run dev-watch", {
|
|
163
|
-
env: {
|
|
164
|
-
...process.env,
|
|
165
|
-
PORT: port,
|
|
166
|
-
},
|
|
167
|
-
cwd: CLIENT_PATH,
|
|
168
|
-
stdio: "pipe",
|
|
169
|
-
shell: true,
|
|
170
|
-
});
|
|
171
|
-
mintlifyDevProcess.stdout.on("data", (data) => {
|
|
172
|
-
const output = data.toString();
|
|
173
|
-
console.log(output);
|
|
174
|
-
if (output.startsWith("> Ready on http://localhost:")) {
|
|
175
|
-
console.log(
|
|
176
|
-
`🌿 ${Chalk.green(
|
|
177
|
-
`Your local preview is available at http://localhost:${port}`
|
|
178
|
-
)}`
|
|
179
|
-
);
|
|
180
|
-
console.log(
|
|
181
|
-
`🌿 ${Chalk.green("Press Ctrl+C any time to stop the local preview.")}`
|
|
182
|
-
);
|
|
183
|
-
open(`http://localhost:${port}`);
|
|
184
|
-
}
|
|
185
|
-
});
|
|
186
|
-
const onExit = () => {
|
|
187
|
-
mintlifyDevProcess.kill("SIGINT");
|
|
188
|
-
process.exit(0);
|
|
189
|
-
};
|
|
190
|
-
process.on("SIGINT", onExit);
|
|
191
|
-
process.on("SIGTERM", onExit);
|
|
192
|
-
listener();
|
|
193
|
-
};
|
|
194
|
-
|
|
195
|
-
export default dev;
|