@mintlify/scraping 3.0.140 → 3.0.142

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. package/bin/browser.js +1 -1
  2. package/bin/browser.js.map +1 -1
  3. package/bin/checks.d.ts +8 -0
  4. package/bin/checks.js +24 -0
  5. package/bin/checks.js.map +1 -0
  6. package/bin/cli.js +49 -45
  7. package/bin/cli.js.map +1 -1
  8. package/bin/scraping/detectFramework.d.ts +7 -14
  9. package/bin/scraping/detectFramework.js +8 -14
  10. package/bin/scraping/detectFramework.js.map +1 -1
  11. package/bin/scraping/downloadAllImages.d.ts +2 -1
  12. package/bin/scraping/downloadAllImages.js +1 -2
  13. package/bin/scraping/downloadAllImages.js.map +1 -1
  14. package/bin/scraping/downloadLogoImage.js +0 -1
  15. package/bin/scraping/downloadLogoImage.js.map +1 -1
  16. package/bin/scraping/replaceImagePaths.d.ts +1 -1
  17. package/bin/scraping/replaceImagePaths.js +0 -3
  18. package/bin/scraping/replaceImagePaths.js.map +1 -1
  19. package/bin/scraping/scrapePageCommands.d.ts +3 -3
  20. package/bin/scraping/scrapePageCommands.js +22 -27
  21. package/bin/scraping/scrapePageCommands.js.map +1 -1
  22. package/bin/scraping/scrapeSectionCommands.d.ts +5 -5
  23. package/bin/scraping/scrapeSectionCommands.js +27 -30
  24. package/bin/scraping/scrapeSectionCommands.js.map +1 -1
  25. package/bin/scraping/site-scrapers/Intercom/scrapeIntercomPage.js +1 -1
  26. package/bin/scraping/site-scrapers/Intercom/scrapeIntercomPage.js.map +1 -1
  27. package/bin/scraping/site-scrapers/Intercom/scrapeIntercomSection.js +2 -2
  28. package/bin/scraping/site-scrapers/Intercom/scrapeIntercomSection.js.map +1 -1
  29. package/bin/scraping/site-scrapers/alternateGroupTitle.d.ts +3 -1
  30. package/bin/scraping/site-scrapers/alternateGroupTitle.js +1 -1
  31. package/bin/scraping/site-scrapers/alternateGroupTitle.js.map +1 -1
  32. package/bin/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.d.ts +5 -1
  33. package/bin/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.js +1 -1
  34. package/bin/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.js.map +1 -1
  35. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursively.d.ts +3 -1
  36. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursively.js +1 -4
  37. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursively.js.map +1 -1
  38. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursivelyGitBook.d.ts +3 -1
  39. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursivelyGitBook.js +0 -3
  40. package/bin/scraping/site-scrapers/links-per-group/getLinksRecursivelyGitBook.js.map +1 -1
  41. package/bin/scraping/site-scrapers/openNestedDocusaurusMenus.js +3 -3
  42. package/bin/scraping/site-scrapers/openNestedDocusaurusMenus.js.map +1 -1
  43. package/bin/scraping/site-scrapers/openNestedGitbookMenus.js +2 -2
  44. package/bin/scraping/site-scrapers/openNestedGitbookMenus.js.map +1 -1
  45. package/bin/scraping/site-scrapers/scrapeDocusaurusPage.js +1 -1
  46. package/bin/scraping/site-scrapers/scrapeDocusaurusPage.js.map +1 -1
  47. package/bin/scraping/site-scrapers/scrapeDocusaurusSection.js +2 -2
  48. package/bin/scraping/site-scrapers/scrapeDocusaurusSection.js.map +1 -1
  49. package/bin/scraping/site-scrapers/scrapeGitBookPage.js +1 -1
  50. package/bin/scraping/site-scrapers/scrapeGitBookPage.js.map +1 -1
  51. package/bin/scraping/site-scrapers/scrapeGitBookSection.js +1 -1
  52. package/bin/scraping/site-scrapers/scrapeGitBookSection.js.map +1 -1
  53. package/bin/scraping/site-scrapers/scrapeReadMePage.js +1 -1
  54. package/bin/scraping/site-scrapers/scrapeReadMePage.js.map +1 -1
  55. package/bin/scraping/site-scrapers/scrapeReadMeSection.js +2 -2
  56. package/bin/scraping/site-scrapers/scrapeReadMeSection.js.map +1 -1
  57. package/bin/tsconfig.build.tsbuildinfo +1 -1
  58. package/bin/util.d.ts +0 -1
  59. package/bin/util.js +9 -26
  60. package/bin/util.js.map +1 -1
  61. package/package.json +7 -7
  62. package/src/browser.ts +1 -1
  63. package/src/checks.ts +32 -0
  64. package/src/cli.ts +48 -74
  65. package/src/scraping/detectFramework.ts +20 -15
  66. package/src/scraping/downloadAllImages.ts +7 -7
  67. package/src/scraping/downloadLogoImage.ts +0 -1
  68. package/src/scraping/replaceImagePaths.ts +1 -5
  69. package/src/scraping/scrapePageCommands.ts +32 -29
  70. package/src/scraping/scrapeSectionCommands.ts +38 -34
  71. package/src/scraping/site-scrapers/Intercom/scrapeIntercomPage.ts +1 -1
  72. package/src/scraping/site-scrapers/Intercom/scrapeIntercomSection.ts +2 -2
  73. package/src/scraping/site-scrapers/alternateGroupTitle.ts +5 -2
  74. package/src/scraping/site-scrapers/links-per-group/getDocusaurusLinksPerGroup.ts +7 -5
  75. package/src/scraping/site-scrapers/links-per-group/getLinksRecursively.ts +8 -6
  76. package/src/scraping/site-scrapers/links-per-group/getLinksRecursivelyGitBook.ts +7 -5
  77. package/src/scraping/site-scrapers/openNestedDocusaurusMenus.ts +3 -3
  78. package/src/scraping/site-scrapers/openNestedGitbookMenus.ts +3 -3
  79. package/src/scraping/site-scrapers/scrapeDocusaurusPage.ts +1 -1
  80. package/src/scraping/site-scrapers/scrapeDocusaurusSection.ts +2 -2
  81. package/src/scraping/site-scrapers/scrapeGitBookPage.ts +1 -1
  82. package/src/scraping/site-scrapers/scrapeGitBookSection.ts +3 -3
  83. package/src/scraping/site-scrapers/scrapeReadMePage.ts +1 -1
  84. package/src/scraping/site-scrapers/scrapeReadMeSection.ts +3 -3
  85. package/src/util.ts +10 -26
  86. package/tsconfig.json +0 -1
  87. package/bin/validation/isValidLink.d.ts +0 -1
  88. package/bin/validation/isValidLink.js +0 -11
  89. package/bin/validation/isValidLink.js.map +0 -1
  90. package/bin/validation/stopIfInvalidLink.d.ts +0 -1
  91. package/bin/validation/stopIfInvalidLink.js +0 -9
  92. package/bin/validation/stopIfInvalidLink.js.map +0 -1
  93. package/src/validation/isValidLink.ts +0 -9
  94. package/src/validation/stopIfInvalidLink.ts +0 -9
@@ -1,9 +1,9 @@
1
1
  import axios from 'axios';
2
- import { ArgumentsCamelCase } from 'yargs';
2
+ import { Page } from 'puppeteer';
3
3
 
4
4
  import { startBrowser } from '../browser.js';
5
- import { getHrefFromArgs, getOrigin } from '../util.js';
6
- import { detectFramework, Frameworks } from './detectFramework.js';
5
+ import { getOrigin } from '../util.js';
6
+ import { detectFramework, Framework, FrameworkHint } from './detectFramework.js';
7
7
  import { ScrapeSectionFn, scrapeSection } from './scrapeSection.js';
8
8
  import { scrapeIntercomSection } from './site-scrapers/Intercom/scrapeIntercomSection.js';
9
9
  import openNestedDocusaurusMenus from './site-scrapers/openNestedDocusaurusMenus.js';
@@ -13,78 +13,82 @@ import { scrapeGitBookSection } from './site-scrapers/scrapeGitBookSection.js';
13
13
  import { scrapeReadMeSection } from './site-scrapers/scrapeReadMeSection.js';
14
14
 
15
15
  export async function scrapeSectionAxiosWrapper(
16
- argv: ArgumentsCamelCase,
16
+ url: string,
17
+ overwrite: boolean,
17
18
  scrapeFunc: ScrapeSectionFn
18
19
  ) {
19
- const href = getHrefFromArgs(argv);
20
- const res = await axios.get(href);
20
+ const res = await axios.get(url);
21
21
  const html = res.data;
22
- await scrapeSection(scrapeFunc, html, getOrigin(href), !!argv.overwrite, undefined);
22
+ await scrapeSection(scrapeFunc, html, getOrigin(url), overwrite, undefined);
23
23
  process.exit(0);
24
24
  }
25
25
 
26
26
  export async function scrapeDocusaurusSectionCommand(
27
- argv: any,
27
+ url: string,
28
+ overwrite: boolean,
28
29
  version: string | undefined // "1" | "2" | "3"
29
30
  ) {
30
31
  await scrapeSectionOpeningAllNested(
31
- argv,
32
+ url,
33
+ overwrite,
32
34
  openNestedDocusaurusMenus,
33
35
  scrapeDocusaurusSection,
34
36
  version
35
37
  );
36
38
  }
37
39
 
38
- export async function scrapeGitbookSectionCommand(argv: any) {
39
- await scrapeSectionOpeningAllNested(argv, openNestedGitbookMenus, scrapeGitBookSection);
40
+ export async function scrapeGitbookSectionCommand(url: string, overwrite: boolean) {
41
+ await scrapeSectionOpeningAllNested(url, overwrite, openNestedGitbookMenus, scrapeGitBookSection);
40
42
  }
41
43
 
42
44
  async function scrapeSectionOpeningAllNested(
43
- argv: any,
44
- openLinks: any,
45
+ url: string,
46
+ overwrite: boolean,
47
+ openLinks: (page: Page) => Promise<string>,
45
48
  scrapeFunc: ScrapeSectionFn,
46
49
  version?: string
47
50
  ) {
48
- const href = getHrefFromArgs(argv);
49
-
50
51
  const browser = await startBrowser();
51
52
  const page = await browser.newPage();
52
- await page.goto(href, {
53
+ await page.goto(url, {
53
54
  waitUntil: 'networkidle2',
54
55
  });
55
56
 
56
57
  const html = await openLinks(page);
57
- browser.close();
58
- await scrapeSection(scrapeFunc, html, getOrigin(href), !!argv.overwrite, version);
58
+ void browser.close();
59
+ await scrapeSection(scrapeFunc, html, getOrigin(url), overwrite, version);
59
60
  process.exit(0);
60
61
  }
61
62
 
62
- export async function scrapeSectionAutomatically(argv: any) {
63
- const href = getHrefFromArgs(argv);
64
- const res = await axios.get(href);
63
+ export async function scrapeSectionAutomatically(
64
+ url: string,
65
+ overwrite: boolean,
66
+ frameworkHint: FrameworkHint
67
+ ) {
68
+ const res = await axios.get(url);
65
69
  const html = res.data;
66
- const { framework, version } = detectFramework(html);
70
+ frameworkHint = frameworkHint.framework ? frameworkHint : detectFramework(html);
67
71
 
68
- validateFramework(framework);
69
- console.log('Detected framework: ' + framework);
72
+ validateFramework(frameworkHint.framework);
73
+ console.log('Detected framework: ' + frameworkHint.framework);
70
74
 
71
- switch (framework) {
72
- case Frameworks.DOCUSAURUS:
73
- await scrapeDocusaurusSectionCommand(argv, version);
75
+ switch (frameworkHint.framework) {
76
+ case 'docusaurus':
77
+ await scrapeDocusaurusSectionCommand(url, overwrite, frameworkHint.version);
74
78
  break;
75
- case Frameworks.GITBOOK:
76
- await scrapeGitbookSectionCommand(argv);
79
+ case 'gitbook':
80
+ await scrapeGitbookSectionCommand(url, overwrite);
77
81
  break;
78
- case Frameworks.README:
79
- await scrapeSectionAxiosWrapper(argv, scrapeReadMeSection);
82
+ case 'readme':
83
+ await scrapeSectionAxiosWrapper(url, overwrite, scrapeReadMeSection);
80
84
  break;
81
- case Frameworks.INTERCOM:
82
- await scrapeSectionAxiosWrapper(argv, scrapeIntercomSection);
85
+ case 'intercom':
86
+ await scrapeSectionAxiosWrapper(url, overwrite, scrapeIntercomSection);
83
87
  break;
84
88
  }
85
89
  }
86
90
 
87
- function validateFramework(framework: Frameworks | undefined) {
91
+ function validateFramework(framework: Framework | undefined) {
88
92
  if (!framework) {
89
93
  console.log(
90
94
  'Could not detect the framework automatically. We only support Docusaurus (V2 and V3), GitBook, and ReadMe.'
@@ -1,4 +1,4 @@
1
- import cheerio from 'cheerio';
1
+ import * as cheerio from 'cheerio';
2
2
  import { NodeHtmlMarkdown } from 'node-html-markdown';
3
3
 
4
4
  import downloadAllImages from '../../downloadAllImages.js';
@@ -1,6 +1,6 @@
1
1
  import { Navigation, NavigationEntry } from '@mintlify/models';
2
2
  import axios from 'axios';
3
- import cheerio from 'cheerio';
3
+ import * as cheerio from 'cheerio';
4
4
 
5
5
  import downloadLogoImage from '../../downloadLogoImage.js';
6
6
  import { scrapeGettingFileNameFromUrl } from '../../scrapeGettingFileNameFromUrl.js';
@@ -17,7 +17,7 @@ export async function scrapeIntercomSection(
17
17
  let $ = cheerio.load(html);
18
18
 
19
19
  const logoSrc = $('.header__logo img').first().attr('src');
20
- downloadLogoImage(logoSrc, imageBaseDir, origin, overwrite);
20
+ void downloadLogoImage(logoSrc, imageBaseDir, origin, overwrite);
21
21
 
22
22
  const collectionsLink = $('.section .g__space a');
23
23
  const collectionsMap = collectionsLink.toArray().map(async (s: cheerio.Element) => {
@@ -1,8 +1,11 @@
1
- export default function alternateGroupTitle(firstLink: cheerio.Cheerio, pages) {
1
+ import { NavigationEntry } from '@mintlify/models';
2
+ import { Cheerio, Element } from 'cheerio';
3
+
4
+ export default function alternateGroupTitle(firstLink: Cheerio<Element>, pages: NavigationEntry[]) {
2
5
  // Only assign titles to nested navigation menus outside a section.
3
6
  // Others should not have a title so we can merge them into one section.
4
7
  if (pages.length > 0) {
5
- return firstLink?.text();
8
+ return firstLink.text();
6
9
  }
7
10
  return '';
8
11
  }
@@ -1,9 +1,11 @@
1
+ import { Cheerio, CheerioAPI, Element } from 'cheerio';
2
+
1
3
  import alternateGroupTitle from '../alternateGroupTitle.js';
2
4
  import getLinksRecursively from './getLinksRecursively.js';
3
5
 
4
6
  export function getDocusaurusLinksPerGroup(
5
- navigationSections: any,
6
- $: any,
7
+ navigationSections: Cheerio<Element>,
8
+ $: CheerioAPI,
7
9
  version: string | undefined
8
10
  ) {
9
11
  if (version === '3' || version === '2') {
@@ -12,8 +14,8 @@ export function getDocusaurusLinksPerGroup(
12
14
  return [];
13
15
  }
14
16
 
15
- function getDocusaurusLinksPerGroupLoop(navigationSections: any, $: any) {
16
- return navigationSections.toArray().map((s: string) => {
17
+ function getDocusaurusLinksPerGroupLoop(navigationSections: Cheerio<Element>, $: CheerioAPI) {
18
+ return navigationSections.toArray().map((s) => {
17
19
  const section = $(s);
18
20
 
19
21
  // Links without a group
@@ -21,7 +23,7 @@ function getDocusaurusLinksPerGroupLoop(navigationSections: any, $: any) {
21
23
  const linkHref = section.find('a[href]').first().attr('href');
22
24
  return {
23
25
  group: '',
24
- pages: [linkHref],
26
+ pages: linkHref !== undefined ? [linkHref] : [],
25
27
  };
26
28
  }
27
29
 
@@ -1,11 +1,13 @@
1
- // Used by Docusaurus and ReadMe section scrapers
2
- export default function getLinksRecursively(linkSections: any, $: any) {
3
- if (linkSections == null || linkSections.length === 0) {
4
- return [];
5
- }
1
+ import { NavigationEntry } from '@mintlify/models';
2
+ import { Cheerio, CheerioAPI, Element } from 'cheerio';
6
3
 
4
+ // Used by Docusaurus and ReadMe section scrapers
5
+ export default function getLinksRecursively(
6
+ linkSections: Cheerio<Element>,
7
+ $: CheerioAPI
8
+ ): NavigationEntry[] {
7
9
  return linkSections
8
- .map((i, s) => {
10
+ .map((_, s) => {
9
11
  const subsection = $(s);
10
12
  let link = subsection.children().first();
11
13
 
@@ -1,9 +1,11 @@
1
- // Used by GitBook section scraper
2
- export default function getLinksRecursivelyGitBook(linkSections: any, $: any) {
3
- if (linkSections == null || linkSections.length === 0) {
4
- return [];
5
- }
1
+ import { NavigationEntry } from '@mintlify/models';
2
+ import { Cheerio, CheerioAPI, Element } from 'cheerio';
6
3
 
4
+ // Used by GitBook section scraper
5
+ export default function getLinksRecursivelyGitBook(
6
+ linkSections: Cheerio<Element>,
7
+ $: CheerioAPI
8
+ ): NavigationEntry[] {
7
9
  return linkSections
8
10
  .map((_, s) => {
9
11
  let subsection = $(s);
@@ -14,8 +14,8 @@ export default async function openNestedDocusaurusMenus(page: Page) {
14
14
  );
15
15
 
16
16
  const linksFound: string[] = [];
17
- collapsible.forEach(async (collapsibleItem: HTMLElement) => {
18
- const href = collapsibleItem?.getAttribute('href');
17
+ collapsible.forEach((collapsibleItem) => {
18
+ const href = collapsibleItem.getAttribute('href');
19
19
 
20
20
  // Should never occur but we keep it as a fail-safe
21
21
  if (href?.startsWith('https://') || href?.startsWith('http://')) {
@@ -24,7 +24,7 @@ export default async function openNestedDocusaurusMenus(page: Page) {
24
24
 
25
25
  // Click any links we haven't seen before
26
26
  if (href && !encounteredHref.includes(href)) {
27
- collapsibleItem?.click();
27
+ collapsibleItem.click();
28
28
  }
29
29
 
30
30
  if (href) {
@@ -7,10 +7,10 @@ export default async function openNestedGitbookMenus(page: Page) {
7
7
  while (clickedAny) {
8
8
  clickedAny = await page.evaluate(() => {
9
9
  // Right pointing arrow. Only closed menus have this icon
10
- const icons: HTMLElement[] = Array.from(document.querySelectorAll('path[d="M9 18l6-6-6-6"]'));
10
+ const icons = Array.from(document.querySelectorAll('path[d="M9 18l6-6-6-6"]'));
11
11
 
12
- icons.forEach(async (icon: HTMLElement) => {
13
- const toClick = icon?.parentElement?.parentElement;
12
+ icons.forEach((icon) => {
13
+ const toClick = icon.parentElement?.parentElement;
14
14
  if (toClick) {
15
15
  toClick.click();
16
16
  }
@@ -1,4 +1,4 @@
1
- import cheerio from 'cheerio';
1
+ import * as cheerio from 'cheerio';
2
2
  import { NodeHtmlMarkdown } from 'node-html-markdown';
3
3
 
4
4
  import downloadAllImages from '../downloadAllImages.js';
@@ -1,5 +1,5 @@
1
1
  import { Navigation, NavigationEntry } from '@mintlify/models';
2
- import cheerio from 'cheerio';
2
+ import * as cheerio from 'cheerio';
3
3
 
4
4
  import combineNavWithEmptyGroupTitles from '../combineNavWithEmptyGroupTitles.js';
5
5
  import downloadLogoImage from '../downloadLogoImage.js';
@@ -19,7 +19,7 @@ export async function scrapeDocusaurusSection(
19
19
 
20
20
  // Download the logo
21
21
  const logoSrc = $('.navbar__logo img').attr('src');
22
- downloadLogoImage(logoSrc, imageBaseDir, origin, overwrite);
22
+ void downloadLogoImage(logoSrc, imageBaseDir, origin, overwrite);
23
23
 
24
24
  // Get all the navigation sections
25
25
  const navigationSections = $('.theme-doc-sidebar-menu').first().children();
@@ -1,4 +1,4 @@
1
- import cheerio from 'cheerio';
1
+ import * as cheerio from 'cheerio';
2
2
  import { NodeHtmlMarkdown } from 'node-html-markdown';
3
3
 
4
4
  import downloadAllImages from '../downloadAllImages.js';
@@ -1,5 +1,5 @@
1
1
  import { Navigation, NavigationEntry } from '@mintlify/models';
2
- import cheerio from 'cheerio';
2
+ import * as cheerio from 'cheerio';
3
3
 
4
4
  import combineNavWithEmptyGroupTitles from '../combineNavWithEmptyGroupTitles.js';
5
5
  import downloadLogoImage from '../downloadLogoImage.js';
@@ -25,7 +25,7 @@ export async function scrapeGitBookSection(
25
25
  // Get all the navigation sections
26
26
  // Some variants of the GitBook UI show the logo and search base in the side navigation bar,
27
27
  // but the navigation sections are always the last value.
28
- const navigationSections: cheerio.Cheerio = $(
28
+ const navigationSections = $(
29
29
  'div[data-testid="page.desktopTableOfContents"] > nav > div:first-child'
30
30
  )
31
31
  .children()
@@ -45,7 +45,7 @@ export async function scrapeGitBookSection(
45
45
  const firstLink = section.children().eq(0);
46
46
  const firstHref = firstLink.attr('href');
47
47
 
48
- const linkSections: cheerio.Cheerio = section.children().eq(1).children();
48
+ const linkSections = section.children().eq(1).children();
49
49
  const pages = getLinksRecursivelyGitBook(linkSections, $);
50
50
 
51
51
  return {
@@ -1,4 +1,4 @@
1
- import cheerio from 'cheerio';
1
+ import * as cheerio from 'cheerio';
2
2
  import { NodeHtmlMarkdown } from 'node-html-markdown';
3
3
 
4
4
  import downloadAllImages from '../downloadAllImages.js';
@@ -1,5 +1,5 @@
1
1
  import { Navigation, NavigationEntry } from '@mintlify/models';
2
- import cheerio from 'cheerio';
2
+ import * as cheerio from 'cheerio';
3
3
 
4
4
  import downloadLogoImage from '../downloadLogoImage.js';
5
5
  import { scrapeGettingFileNameFromUrl } from '../scrapeGettingFileNameFromUrl.js';
@@ -25,7 +25,7 @@ export async function scrapeReadMeSection(
25
25
  // responsiveness but they all have the same links.
26
26
  const navigationSections = $('.rm-Sidebar').first().find('.rm-Sidebar-section');
27
27
 
28
- const groupsConfig: Navigation = navigationSections.toArray().map((s: cheerio.Element) => {
28
+ const groupsConfig: Navigation = navigationSections.toArray().map((s) => {
29
29
  const section = $(s);
30
30
  const sectionTitle = section.find('h3').first().text();
31
31
 
@@ -36,7 +36,7 @@ export async function scrapeReadMeSection(
36
36
  // -- Second Page -> /second-page
37
37
  const linkSections = section.find('.rm-Sidebar-list').first().children();
38
38
  const pages = getLinksRecursively(linkSections, $).filter(
39
- (value: string, index: number, self: any) => self.indexOf(value) === index
39
+ (value, index, array) => array.indexOf(value) === index
40
40
  );
41
41
 
42
42
  // Follows the same structure as mint.json
package/src/util.ts CHANGED
@@ -1,10 +1,8 @@
1
1
  import { NavigationEntry } from '@mintlify/models';
2
- import { mkdirSync, writeFileSync } from 'fs';
2
+ import { existsSync, mkdirSync, writeFileSync } from 'fs';
3
3
  import Ora, { Ora as OraType } from 'ora';
4
4
  import path from 'path';
5
5
 
6
- import stopIfInvalidLink from './validation/stopIfInvalidLink.js';
7
-
8
6
  export const MintConfig = (
9
7
  name: string,
10
8
  color: string,
@@ -93,34 +91,20 @@ export const createPage = (
93
91
  // Create the folders needed if they're missing
94
92
  mkdirSync(rootDir, { recursive: true });
95
93
 
96
- // Write the page to memory
97
- if (overwrite) {
94
+ if (!overwrite && existsSync(writePath)) {
95
+ console.log(`❌ Skipping existing file ${writePath}`);
96
+ return;
97
+ }
98
+
99
+ // Write the page to disk
100
+ try {
98
101
  writeFileSync(writePath, Page(title, description, markdown));
99
102
  console.log('✏️ - ' + writePath);
100
- } else {
101
- try {
102
- writeFileSync(writePath, Page(title, description, markdown), {
103
- flag: 'wx',
104
- });
105
- console.log('✏️ - ' + writePath);
106
- } catch (e) {
107
- // We do a try-catch instead of an if-statement to avoid a race condition
108
- // of the file being created after we started writing.
109
- if ((e as { code: string })?.code === 'EEXIST') {
110
- console.log(`❌ Skipping existing file ${writePath}`);
111
- } else {
112
- console.error(e);
113
- }
114
- }
103
+ } catch (e) {
104
+ console.error(e);
115
105
  }
116
106
  };
117
107
 
118
- export function getHrefFromArgs(argv: any) {
119
- const href = argv.url;
120
- stopIfInvalidLink(href);
121
- return href;
122
- }
123
-
124
108
  export const buildLogger = (startText = ''): OraType => {
125
109
  const logger = Ora().start(startText);
126
110
  return logger;
package/tsconfig.json CHANGED
@@ -5,7 +5,6 @@
5
5
  "sourceMap": true,
6
6
  "outDir": "bin",
7
7
  "declaration": true,
8
- "noImplicitAny": false,
9
8
  "types": ["vitest/globals"]
10
9
  },
11
10
  "include": ["**/*.ts"],
@@ -1 +0,0 @@
1
- export default function isValidLink(href: string): boolean;
@@ -1,11 +0,0 @@
1
- export default function isValidLink(href) {
2
- // This checks the link is written correctly, not that the page exists.
3
- try {
4
- new URL(href);
5
- return true;
6
- }
7
- catch (_) {
8
- return false;
9
- }
10
- }
11
- //# sourceMappingURL=isValidLink.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"isValidLink.js","sourceRoot":"","sources":["../../src/validation/isValidLink.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,OAAO,UAAU,WAAW,CAAC,IAAY;IAC9C,uEAAuE;IACvE,IAAI,CAAC;QACH,IAAI,GAAG,CAAC,IAAI,CAAC,CAAC;QACd,OAAO,IAAI,CAAC;IACd,CAAC;IAAC,OAAO,CAAC,EAAE,CAAC;QACX,OAAO,KAAK,CAAC;IACf,CAAC;AACH,CAAC"}
@@ -1 +0,0 @@
1
- export default function stopIfInvalidLink(href: string): void;
@@ -1,9 +0,0 @@
1
- import isValidLink from './isValidLink.js';
2
- export default function stopIfInvalidLink(href) {
3
- if (!isValidLink(href)) {
4
- console.log('Invalid link: ' + href);
5
- console.log('Make sure the link starts with http:// or https://');
6
- process.exit(1);
7
- }
8
- }
9
- //# sourceMappingURL=stopIfInvalidLink.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"stopIfInvalidLink.js","sourceRoot":"","sources":["../../src/validation/stopIfInvalidLink.ts"],"names":[],"mappings":"AAAA,OAAO,WAAW,MAAM,kBAAkB,CAAC;AAE3C,MAAM,CAAC,OAAO,UAAU,iBAAiB,CAAC,IAAY;IACpD,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,EAAE,CAAC;QACvB,OAAO,CAAC,GAAG,CAAC,gBAAgB,GAAG,IAAI,CAAC,CAAC;QACrC,OAAO,CAAC,GAAG,CAAC,oDAAoD,CAAC,CAAC;QAClE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC;AACH,CAAC"}
@@ -1,9 +0,0 @@
1
- export default function isValidLink(href: string) {
2
- // This checks the link is written correctly, not that the page exists.
3
- try {
4
- new URL(href);
5
- return true;
6
- } catch (_) {
7
- return false;
8
- }
9
- }
@@ -1,9 +0,0 @@
1
- import isValidLink from './isValidLink.js';
2
-
3
- export default function stopIfInvalidLink(href: string) {
4
- if (!isValidLink(href)) {
5
- console.log('Invalid link: ' + href);
6
- console.log('Make sure the link starts with http:// or https://');
7
- process.exit(1);
8
- }
9
- }