@mintlify/cli 4.0.1079 → 4.0.1081
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/__test__/brokenLinks.test.ts +10 -5
- package/__test__/openApiCheck.test.ts +7 -2
- package/bin/cli.js +1 -81
- package/bin/tsconfig.build.tsbuildinfo +1 -1
- package/package.json +6 -8
- package/src/cli.tsx +1 -117
- package/__test__/migrateMdx.test.ts +0 -236
- package/bin/migrateMdx.js +0 -378
- package/bin/scrape.js +0 -108
- package/src/migrateMdx.tsx +0 -469
- package/src/scrape.tsx +0 -122
package/bin/migrateMdx.js
DELETED
|
@@ -1,378 +0,0 @@
|
|
|
1
|
-
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
-
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
-
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
-
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
-
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
-
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
-
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
-
});
|
|
9
|
-
};
|
|
10
|
-
import { jsx as _jsx } from "react/jsx-runtime";
|
|
11
|
-
import { potentiallyParseOpenApiString, parseFrontmatter } from '@mintlify/common';
|
|
12
|
-
import { getConfigObj, getConfigPath } from '@mintlify/prebuild';
|
|
13
|
-
import { addLog, ErrorLog, SuccessLog } from '@mintlify/previewing';
|
|
14
|
-
import { divisions, validateDocsConfig, } from '@mintlify/validation';
|
|
15
|
-
import fs from 'fs';
|
|
16
|
-
import { outputFile } from 'fs-extra';
|
|
17
|
-
import inquirer from 'inquirer';
|
|
18
|
-
import yaml from 'js-yaml';
|
|
19
|
-
import path from 'path';
|
|
20
|
-
import { CMD_EXEC_PATH } from './constants.js';
|
|
21
|
-
const specCache = {};
|
|
22
|
-
const candidateSpecCache = {};
|
|
23
|
-
const specLocks = new Map();
|
|
24
|
-
function withSpecLock(specPath, task) {
|
|
25
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
26
|
-
var _a;
|
|
27
|
-
const key = path.resolve(specPath);
|
|
28
|
-
const previous = (_a = specLocks.get(key)) !== null && _a !== void 0 ? _a : Promise.resolve();
|
|
29
|
-
let releaseNext;
|
|
30
|
-
const next = new Promise((resolve) => {
|
|
31
|
-
releaseNext = resolve;
|
|
32
|
-
});
|
|
33
|
-
specLocks.set(key, next);
|
|
34
|
-
yield previous;
|
|
35
|
-
try {
|
|
36
|
-
yield task();
|
|
37
|
-
}
|
|
38
|
-
finally {
|
|
39
|
-
releaseNext();
|
|
40
|
-
}
|
|
41
|
-
});
|
|
42
|
-
}
|
|
43
|
-
let inquirerLockQueue = Promise.resolve();
|
|
44
|
-
function withInquirerLock(task) {
|
|
45
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
46
|
-
const previous = inquirerLockQueue;
|
|
47
|
-
let releaseNext;
|
|
48
|
-
const next = new Promise((resolve) => {
|
|
49
|
-
releaseNext = resolve;
|
|
50
|
-
});
|
|
51
|
-
inquirerLockQueue = next;
|
|
52
|
-
yield previous;
|
|
53
|
-
try {
|
|
54
|
-
return yield task();
|
|
55
|
-
}
|
|
56
|
-
finally {
|
|
57
|
-
releaseNext();
|
|
58
|
-
}
|
|
59
|
-
});
|
|
60
|
-
}
|
|
61
|
-
export function migrateMdx() {
|
|
62
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
63
|
-
const docsConfigPath = yield getConfigPath(CMD_EXEC_PATH, 'docs');
|
|
64
|
-
if (!docsConfigPath) {
|
|
65
|
-
addLog(_jsx(ErrorLog, { message: "docs.json not found in current directory" }));
|
|
66
|
-
return;
|
|
67
|
-
}
|
|
68
|
-
const rawConfig = JSON.parse(yield fs.promises.readFile(docsConfigPath, 'utf-8'));
|
|
69
|
-
const docsConfigObj = yield getConfigObj(CMD_EXEC_PATH, 'docs');
|
|
70
|
-
const validationResults = yield validateDocsConfig(docsConfigObj);
|
|
71
|
-
if (!validationResults.success) {
|
|
72
|
-
addLog(_jsx(ErrorLog, { message: "docs.json is invalid" }));
|
|
73
|
-
return;
|
|
74
|
-
}
|
|
75
|
-
const validatedDocsConfig = validationResults.data;
|
|
76
|
-
yield buildCandidateSpecCacheIfNeeded(CMD_EXEC_PATH);
|
|
77
|
-
const updatedNavigation = yield processNav(validatedDocsConfig.navigation);
|
|
78
|
-
rawConfig.navigation = updatedNavigation;
|
|
79
|
-
yield outputFile(docsConfigPath, JSON.stringify(rawConfig, null, 2));
|
|
80
|
-
addLog(_jsx(SuccessLog, { message: "docs.json updated" }));
|
|
81
|
-
for (const specPath in specCache) {
|
|
82
|
-
const specObj = specCache[specPath];
|
|
83
|
-
const ext = path.extname(specPath).toLowerCase();
|
|
84
|
-
const stringified = ext === '.json' ? JSON.stringify(specObj, null, 2) : yaml.dump(specObj);
|
|
85
|
-
yield outputFile(specPath, stringified);
|
|
86
|
-
addLog(_jsx(SuccessLog, { message: `updated ${path.relative(CMD_EXEC_PATH, specPath)}` }));
|
|
87
|
-
}
|
|
88
|
-
addLog(_jsx(SuccessLog, { message: "migration complete" }));
|
|
89
|
-
});
|
|
90
|
-
}
|
|
91
|
-
function processNav(nav) {
|
|
92
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
93
|
-
let newNav = Object.assign({}, nav);
|
|
94
|
-
if ('pages' in newNav) {
|
|
95
|
-
const newPages = yield Promise.all(newNav.pages.map((page) => __awaiter(this, void 0, void 0, function* () {
|
|
96
|
-
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
|
97
|
-
if (typeof page === 'object' && page !== null && 'group' in page) {
|
|
98
|
-
return processNav(page);
|
|
99
|
-
}
|
|
100
|
-
if (typeof page === 'string' && !/\s/.test(page)) {
|
|
101
|
-
const mdxCandidatePath = path.join(CMD_EXEC_PATH, `${page}.mdx`);
|
|
102
|
-
if (!fs.existsSync(mdxCandidatePath)) {
|
|
103
|
-
return page;
|
|
104
|
-
}
|
|
105
|
-
const fmParsed = parseFrontmatter(yield fs.promises.readFile(mdxCandidatePath, 'utf-8'));
|
|
106
|
-
const frontmatter = fmParsed.attributes;
|
|
107
|
-
const content = fmParsed.body;
|
|
108
|
-
if (!frontmatter.openapi) {
|
|
109
|
-
return page;
|
|
110
|
-
}
|
|
111
|
-
const parsed = potentiallyParseOpenApiString(frontmatter.openapi);
|
|
112
|
-
if (!parsed) {
|
|
113
|
-
addLog(_jsx(ErrorLog, { message: `invalid openapi frontmatter in ${mdxCandidatePath}: ${frontmatter.openapi}` }));
|
|
114
|
-
return page;
|
|
115
|
-
}
|
|
116
|
-
const { filename, method, endpoint: endpointPath } = parsed;
|
|
117
|
-
let specPath = filename;
|
|
118
|
-
if (specPath && URL.canParse(specPath)) {
|
|
119
|
-
return page;
|
|
120
|
-
}
|
|
121
|
-
if (!specPath) {
|
|
122
|
-
const methodLower = method.toLowerCase();
|
|
123
|
-
const matchingSpecs = yield findMatchingOpenApiSpecs({
|
|
124
|
-
method: methodLower,
|
|
125
|
-
endpointPath,
|
|
126
|
-
}, candidateSpecCache);
|
|
127
|
-
if (matchingSpecs.length === 0) {
|
|
128
|
-
addLog(_jsx(ErrorLog, { message: `no OpenAPI spec found for ${method.toUpperCase()} ${endpointPath} in repository` }));
|
|
129
|
-
return page;
|
|
130
|
-
}
|
|
131
|
-
if (matchingSpecs.length === 1) {
|
|
132
|
-
specPath = path.relative(CMD_EXEC_PATH, matchingSpecs[0]);
|
|
133
|
-
}
|
|
134
|
-
else {
|
|
135
|
-
const answer = yield withInquirerLock(() => inquirer.prompt([
|
|
136
|
-
{
|
|
137
|
-
type: 'list',
|
|
138
|
-
name: 'chosen',
|
|
139
|
-
message: `multiple OpenAPI specs found for ${method.toUpperCase()} ${endpointPath}. which one should be used for ${path.relative(CMD_EXEC_PATH, mdxCandidatePath)}?`,
|
|
140
|
-
choices: matchingSpecs.map((p) => ({
|
|
141
|
-
name: path.relative(CMD_EXEC_PATH, p),
|
|
142
|
-
value: path.relative(CMD_EXEC_PATH, p),
|
|
143
|
-
})),
|
|
144
|
-
},
|
|
145
|
-
]));
|
|
146
|
-
specPath = answer.chosen;
|
|
147
|
-
}
|
|
148
|
-
}
|
|
149
|
-
const href = `/${page}`;
|
|
150
|
-
const pageName = specPath ? `${specPath} ${method} ${endpointPath}` : frontmatter.openapi;
|
|
151
|
-
delete frontmatter.openapi;
|
|
152
|
-
yield withSpecLock(path.resolve(specPath), () => migrateToXMint({
|
|
153
|
-
specPath,
|
|
154
|
-
method,
|
|
155
|
-
endpointPath,
|
|
156
|
-
frontmatter,
|
|
157
|
-
content,
|
|
158
|
-
href,
|
|
159
|
-
}));
|
|
160
|
-
try {
|
|
161
|
-
yield fs.promises.unlink(mdxCandidatePath);
|
|
162
|
-
}
|
|
163
|
-
catch (err) {
|
|
164
|
-
addLog(_jsx(ErrorLog, { message: `failed to delete ${mdxCandidatePath}: ${err.message}` }));
|
|
165
|
-
}
|
|
166
|
-
return pageName;
|
|
167
|
-
}
|
|
168
|
-
return page;
|
|
169
|
-
})));
|
|
170
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
171
|
-
newNav.pages = newPages;
|
|
172
|
-
}
|
|
173
|
-
for (const division of ['groups', ...divisions]) {
|
|
174
|
-
if (division in newNav) {
|
|
175
|
-
const items = newNav[division];
|
|
176
|
-
newNav = Object.assign(Object.assign({}, newNav), { [division]: yield Promise.all(items.map((item) => processNav(item))) });
|
|
177
|
-
}
|
|
178
|
-
}
|
|
179
|
-
return newNav;
|
|
180
|
-
});
|
|
181
|
-
}
|
|
182
|
-
function migrateToXMint(args) {
|
|
183
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
184
|
-
const { specPath, method, endpointPath, frontmatter, content, href } = args;
|
|
185
|
-
if (!fs.existsSync(specPath)) {
|
|
186
|
-
addLog(_jsx(ErrorLog, { message: `spec file not found: ${specPath}` }));
|
|
187
|
-
return;
|
|
188
|
-
}
|
|
189
|
-
let specObj;
|
|
190
|
-
if (path.resolve(specPath) in specCache) {
|
|
191
|
-
specObj = specCache[path.resolve(specPath)];
|
|
192
|
-
}
|
|
193
|
-
else {
|
|
194
|
-
const pathname = path.join(CMD_EXEC_PATH, specPath);
|
|
195
|
-
const file = yield fs.promises.readFile(pathname, 'utf-8');
|
|
196
|
-
const ext = path.extname(specPath).toLowerCase();
|
|
197
|
-
if (ext === '.json') {
|
|
198
|
-
specObj = JSON.parse(file);
|
|
199
|
-
}
|
|
200
|
-
else if (ext === '.yml' || ext === '.yaml') {
|
|
201
|
-
specObj = yaml.load(file);
|
|
202
|
-
}
|
|
203
|
-
else {
|
|
204
|
-
addLog(_jsx(ErrorLog, { message: `unsupported spec file extension: ${specPath}` }));
|
|
205
|
-
return;
|
|
206
|
-
}
|
|
207
|
-
}
|
|
208
|
-
const methodLower = method.toLowerCase();
|
|
209
|
-
if (!editXMint(specObj, endpointPath, methodLower, {
|
|
210
|
-
metadata: Object.keys(frontmatter).length > 0 ? frontmatter : undefined,
|
|
211
|
-
content: content.length > 0 ? content : undefined,
|
|
212
|
-
href,
|
|
213
|
-
})) {
|
|
214
|
-
addLog(_jsx(ErrorLog, { message: `operation not found in spec: ${method.toUpperCase()} ${endpointPath} in ${specPath}` }));
|
|
215
|
-
return;
|
|
216
|
-
}
|
|
217
|
-
specCache[path.resolve(specPath)] = specObj;
|
|
218
|
-
});
|
|
219
|
-
}
|
|
220
|
-
function editXMint(document, path, method, newXMint) {
|
|
221
|
-
if (method === 'webhook') {
|
|
222
|
-
return editWebhookXMint(document, path, newXMint);
|
|
223
|
-
}
|
|
224
|
-
if (!document.paths || !document.paths[path]) {
|
|
225
|
-
return false;
|
|
226
|
-
}
|
|
227
|
-
const pathItem = document.paths[path];
|
|
228
|
-
const normalizedMethod = method.toLowerCase();
|
|
229
|
-
if (!pathItem[normalizedMethod]) {
|
|
230
|
-
return false;
|
|
231
|
-
}
|
|
232
|
-
const operation = pathItem[normalizedMethod];
|
|
233
|
-
operation['x-mint'] = newXMint;
|
|
234
|
-
if ('x-mcp' in operation && !('mcp' in operation['x-mint'])) {
|
|
235
|
-
operation['x-mint']['mcp'] = operation['x-mcp'];
|
|
236
|
-
delete operation['x-mcp'];
|
|
237
|
-
}
|
|
238
|
-
return true;
|
|
239
|
-
}
|
|
240
|
-
function editWebhookXMint(document, path, newXMint) {
|
|
241
|
-
var _a;
|
|
242
|
-
const webhookObject = (_a = document.webhooks) === null || _a === void 0 ? void 0 : _a[path];
|
|
243
|
-
if (!webhookObject || typeof webhookObject !== 'object') {
|
|
244
|
-
return false;
|
|
245
|
-
}
|
|
246
|
-
if (!webhookObject['post']) {
|
|
247
|
-
return false;
|
|
248
|
-
}
|
|
249
|
-
const operation = webhookObject['post'];
|
|
250
|
-
operation['x-mint'] = newXMint;
|
|
251
|
-
if ('x-mcp' in operation && !('mcp' in operation['x-mint'])) {
|
|
252
|
-
operation['x-mint']['mcp'] = operation['x-mcp'];
|
|
253
|
-
delete operation['x-mcp'];
|
|
254
|
-
}
|
|
255
|
-
return true;
|
|
256
|
-
}
|
|
257
|
-
function findMatchingOpenApiSpecs(args, docsByPath) {
|
|
258
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
259
|
-
const { method, endpointPath } = args;
|
|
260
|
-
const docsEntries = docsByPath
|
|
261
|
-
? Object.entries(docsByPath)
|
|
262
|
-
: (yield collectOpenApiFiles(CMD_EXEC_PATH)).map((absPath) => [absPath, undefined]);
|
|
263
|
-
const normalizedMethod = method.toLowerCase();
|
|
264
|
-
const endpointVariants = new Set([endpointPath]);
|
|
265
|
-
if (!endpointPath.startsWith('/')) {
|
|
266
|
-
endpointVariants.add(`/${endpointPath}`);
|
|
267
|
-
}
|
|
268
|
-
else {
|
|
269
|
-
endpointVariants.add(endpointPath.replace(/^\/+/, ''));
|
|
270
|
-
}
|
|
271
|
-
const matches = [];
|
|
272
|
-
for (const [absPath, maybeDoc] of docsEntries) {
|
|
273
|
-
try {
|
|
274
|
-
const doc = maybeDoc || (yield loadOpenApiDocument(absPath));
|
|
275
|
-
if (!doc)
|
|
276
|
-
continue;
|
|
277
|
-
if (normalizedMethod === 'webhook') {
|
|
278
|
-
const webhooks = doc.webhooks;
|
|
279
|
-
if (!webhooks)
|
|
280
|
-
continue;
|
|
281
|
-
for (const key of Object.keys(webhooks)) {
|
|
282
|
-
if (endpointVariants.has(key)) {
|
|
283
|
-
const pathItem = webhooks[key];
|
|
284
|
-
if (pathItem && typeof pathItem === 'object' && 'post' in pathItem && pathItem.post) {
|
|
285
|
-
matches.push(absPath);
|
|
286
|
-
break;
|
|
287
|
-
}
|
|
288
|
-
}
|
|
289
|
-
}
|
|
290
|
-
continue;
|
|
291
|
-
}
|
|
292
|
-
if (!doc.paths)
|
|
293
|
-
continue;
|
|
294
|
-
for (const variant of endpointVariants) {
|
|
295
|
-
const pathItem = doc.paths[variant];
|
|
296
|
-
if (!pathItem)
|
|
297
|
-
continue;
|
|
298
|
-
const hasOperation = !!pathItem[normalizedMethod];
|
|
299
|
-
if (hasOperation) {
|
|
300
|
-
matches.push(absPath);
|
|
301
|
-
break;
|
|
302
|
-
}
|
|
303
|
-
}
|
|
304
|
-
}
|
|
305
|
-
catch (_a) { }
|
|
306
|
-
}
|
|
307
|
-
return matches.map((abs) => path.resolve(abs)).filter((v, i, a) => a.indexOf(v) === i);
|
|
308
|
-
});
|
|
309
|
-
}
|
|
310
|
-
function collectOpenApiFiles(rootDir) {
|
|
311
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
312
|
-
const results = [];
|
|
313
|
-
const excludedDirs = new Set([
|
|
314
|
-
'node_modules',
|
|
315
|
-
'.git',
|
|
316
|
-
'dist',
|
|
317
|
-
'build',
|
|
318
|
-
'.next',
|
|
319
|
-
'.vercel',
|
|
320
|
-
'out',
|
|
321
|
-
'coverage',
|
|
322
|
-
'tmp',
|
|
323
|
-
'temp',
|
|
324
|
-
]);
|
|
325
|
-
function walk(currentDir) {
|
|
326
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
327
|
-
const entries = yield fs.promises.readdir(currentDir, { withFileTypes: true });
|
|
328
|
-
for (const entry of entries) {
|
|
329
|
-
const abs = path.join(currentDir, entry.name);
|
|
330
|
-
if (entry.isDirectory()) {
|
|
331
|
-
if (excludedDirs.has(entry.name))
|
|
332
|
-
continue;
|
|
333
|
-
yield walk(abs);
|
|
334
|
-
}
|
|
335
|
-
else if (entry.isFile()) {
|
|
336
|
-
if (/\.(ya?ml|json)$/i.test(entry.name)) {
|
|
337
|
-
results.push(abs);
|
|
338
|
-
}
|
|
339
|
-
}
|
|
340
|
-
}
|
|
341
|
-
});
|
|
342
|
-
}
|
|
343
|
-
yield walk(rootDir);
|
|
344
|
-
return results;
|
|
345
|
-
});
|
|
346
|
-
}
|
|
347
|
-
function loadOpenApiDocument(absPath) {
|
|
348
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
349
|
-
try {
|
|
350
|
-
const file = yield fs.promises.readFile(absPath, 'utf-8');
|
|
351
|
-
const ext = path.extname(absPath).toLowerCase();
|
|
352
|
-
let doc;
|
|
353
|
-
if (ext === '.json') {
|
|
354
|
-
doc = JSON.parse(file);
|
|
355
|
-
}
|
|
356
|
-
else if (ext === '.yml' || ext === '.yaml') {
|
|
357
|
-
doc = yaml.load(file);
|
|
358
|
-
}
|
|
359
|
-
return doc;
|
|
360
|
-
}
|
|
361
|
-
catch (_a) {
|
|
362
|
-
return undefined;
|
|
363
|
-
}
|
|
364
|
-
});
|
|
365
|
-
}
|
|
366
|
-
function buildCandidateSpecCacheIfNeeded(rootDir) {
|
|
367
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
368
|
-
if (Object.keys(candidateSpecCache).length > 0)
|
|
369
|
-
return;
|
|
370
|
-
const files = yield collectOpenApiFiles(rootDir);
|
|
371
|
-
yield Promise.all(files.map((abs) => __awaiter(this, void 0, void 0, function* () {
|
|
372
|
-
const doc = yield loadOpenApiDocument(abs);
|
|
373
|
-
if (doc) {
|
|
374
|
-
candidateSpecCache[path.resolve(abs)] = doc;
|
|
375
|
-
}
|
|
376
|
-
})));
|
|
377
|
-
});
|
|
378
|
-
}
|
package/bin/scrape.js
DELETED
|
@@ -1,108 +0,0 @@
|
|
|
1
|
-
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
2
|
-
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
3
|
-
return new (P || (P = Promise))(function (resolve, reject) {
|
|
4
|
-
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
5
|
-
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
6
|
-
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
7
|
-
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
8
|
-
});
|
|
9
|
-
};
|
|
10
|
-
import { jsx as _jsx } from "react/jsx-runtime";
|
|
11
|
-
import { addLog, ErrorLog, SuccessLog, SpinnerLog, InfoLog } from '@mintlify/previewing';
|
|
12
|
-
import { scrapePageGroup, scrapeAllSiteTabs, htmlToHast, detectFramework, framework, fetchPageHtml, write, getErrorMessage, generateOpenApiPages, FINAL_SUCCESS_MESSAGE, } from '@mintlify/scraping';
|
|
13
|
-
import { upgradeToDocsConfig } from '@mintlify/validation';
|
|
14
|
-
import { terminate } from './helpers.js';
|
|
15
|
-
export function scrapeSite(url, filter) {
|
|
16
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
17
|
-
try {
|
|
18
|
-
const urlObj = new URL(url);
|
|
19
|
-
addLog(_jsx(SpinnerLog, { message: `Fetching ${urlObj.toString()}...` }));
|
|
20
|
-
const html = yield fetchPageHtml(urlObj);
|
|
21
|
-
addLog(_jsx(SuccessLog, { message: `Successfully retrieved HTML from ${urlObj.toString()}` }));
|
|
22
|
-
addLog(_jsx(SpinnerLog, { message: "Scraping site..." }));
|
|
23
|
-
const result = yield scrapeAllSiteTabs(html, urlObj, { filter });
|
|
24
|
-
if (result.success) {
|
|
25
|
-
const mintConfig = result.data;
|
|
26
|
-
const docsConfig = upgradeToDocsConfig(mintConfig, {
|
|
27
|
-
shouldUpgradeTheme: true,
|
|
28
|
-
});
|
|
29
|
-
docsConfig.theme = 'aspen';
|
|
30
|
-
write('docs.json', JSON.stringify(docsConfig, undefined, 2));
|
|
31
|
-
addLog(_jsx(SuccessLog, { message: FINAL_SUCCESS_MESSAGE }));
|
|
32
|
-
}
|
|
33
|
-
else {
|
|
34
|
-
addLog(_jsx(ErrorLog, { message: result.message }));
|
|
35
|
-
yield terminate(1);
|
|
36
|
-
}
|
|
37
|
-
yield terminate(0);
|
|
38
|
-
}
|
|
39
|
-
catch (error) {
|
|
40
|
-
const errorMessage = getErrorMessage(error);
|
|
41
|
-
addLog(_jsx(ErrorLog, { message: errorMessage }));
|
|
42
|
-
yield terminate(1);
|
|
43
|
-
}
|
|
44
|
-
});
|
|
45
|
-
}
|
|
46
|
-
export function scrapePage(url) {
|
|
47
|
-
return __awaiter(this, void 0, void 0, function* () {
|
|
48
|
-
try {
|
|
49
|
-
const urlObj = new URL(url);
|
|
50
|
-
addLog(_jsx(SpinnerLog, { message: `Fetching ${urlObj.toString()}...` }));
|
|
51
|
-
const html = yield fetchPageHtml(urlObj);
|
|
52
|
-
addLog(_jsx(SuccessLog, { message: `Successfully retrieved HTML from ${urlObj.toString()}` }));
|
|
53
|
-
const hast = htmlToHast(html);
|
|
54
|
-
detectFramework(hast);
|
|
55
|
-
const needsBrowser = framework.vendor === 'gitbook';
|
|
56
|
-
addLog(_jsx(SpinnerLog, { message: "Scraping page..." }));
|
|
57
|
-
const results = yield scrapePageGroup([urlObj], needsBrowser);
|
|
58
|
-
const result = results[0] || {
|
|
59
|
-
success: false,
|
|
60
|
-
message: `An unknown error occurred when scraping ${url}`,
|
|
61
|
-
};
|
|
62
|
-
if (result.success) {
|
|
63
|
-
addLog(_jsx(SuccessLog, { message: `Successfully scraped ${url} ${result.data ? `into ${result.data[1]}` : ''}` }));
|
|
64
|
-
}
|
|
65
|
-
else {
|
|
66
|
-
addLog(_jsx(ErrorLog, { message: result.message }));
|
|
67
|
-
yield terminate(1);
|
|
68
|
-
}
|
|
69
|
-
yield terminate(0);
|
|
70
|
-
}
|
|
71
|
-
catch (error) {
|
|
72
|
-
const errorMessage = getErrorMessage(error);
|
|
73
|
-
addLog(_jsx(ErrorLog, { message: errorMessage }));
|
|
74
|
-
yield terminate(1);
|
|
75
|
-
}
|
|
76
|
-
});
|
|
77
|
-
}
|
|
78
|
-
export function scrapeOpenApi(_a) {
|
|
79
|
-
return __awaiter(this, arguments, void 0, function* ({ openapiLocation, writeFiles, outDir, overwrite, }) {
|
|
80
|
-
try {
|
|
81
|
-
addLog(_jsx(SpinnerLog, { message: `Processing OpenAPI spec from ${openapiLocation}...` }));
|
|
82
|
-
const { nav, isUrl } = yield generateOpenApiPages(openapiLocation, {
|
|
83
|
-
openApiFilePath: undefined,
|
|
84
|
-
version: undefined,
|
|
85
|
-
writeFiles,
|
|
86
|
-
outDir,
|
|
87
|
-
overwrite,
|
|
88
|
-
});
|
|
89
|
-
addLog(_jsx(SuccessLog, { message: "Successfully generated OpenAPI pages" }));
|
|
90
|
-
addLog(_jsx(InfoLog, { message: "Navigation object suggestion:" }));
|
|
91
|
-
addLog(_jsx(InfoLog, { message: JSON.stringify(nav, undefined, 2) }));
|
|
92
|
-
if (isUrl) {
|
|
93
|
-
addLog(_jsx(InfoLog, { message: "OpenAPI location suggestion:" }));
|
|
94
|
-
addLog(_jsx(InfoLog, { message: `openapi: ${openapiLocation}` }));
|
|
95
|
-
}
|
|
96
|
-
yield terminate(0);
|
|
97
|
-
}
|
|
98
|
-
catch (error) {
|
|
99
|
-
if (error instanceof Error) {
|
|
100
|
-
addLog(_jsx(ErrorLog, { message: error.message }));
|
|
101
|
-
}
|
|
102
|
-
else {
|
|
103
|
-
addLog(_jsx(ErrorLog, { message: String(error) }));
|
|
104
|
-
}
|
|
105
|
-
yield terminate(1);
|
|
106
|
-
}
|
|
107
|
-
});
|
|
108
|
-
}
|