skrypt-ai 0.4.2 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/dist/auth/index.d.ts +13 -3
  2. package/dist/auth/index.js +94 -9
  3. package/dist/auth/keychain.d.ts +5 -0
  4. package/dist/auth/keychain.js +82 -0
  5. package/dist/auth/notices.d.ts +3 -0
  6. package/dist/auth/notices.js +42 -0
  7. package/dist/autofix/index.js +10 -3
  8. package/dist/cli.js +16 -3
  9. package/dist/commands/generate.js +37 -1
  10. package/dist/commands/import.d.ts +2 -0
  11. package/dist/commands/import.js +157 -0
  12. package/dist/commands/init.js +19 -7
  13. package/dist/commands/login.js +15 -4
  14. package/dist/commands/review-pr.js +10 -0
  15. package/dist/commands/security.d.ts +2 -0
  16. package/dist/commands/security.js +103 -0
  17. package/dist/generator/writer.js +12 -3
  18. package/dist/importers/confluence.d.ts +5 -0
  19. package/dist/importers/confluence.js +137 -0
  20. package/dist/importers/detect.d.ts +20 -0
  21. package/dist/importers/detect.js +121 -0
  22. package/dist/importers/docusaurus.d.ts +5 -0
  23. package/dist/importers/docusaurus.js +279 -0
  24. package/dist/importers/gitbook.d.ts +5 -0
  25. package/dist/importers/gitbook.js +189 -0
  26. package/dist/importers/github.d.ts +8 -0
  27. package/dist/importers/github.js +99 -0
  28. package/dist/importers/index.d.ts +15 -0
  29. package/dist/importers/index.js +30 -0
  30. package/dist/importers/markdown.d.ts +6 -0
  31. package/dist/importers/markdown.js +105 -0
  32. package/dist/importers/mintlify.d.ts +5 -0
  33. package/dist/importers/mintlify.js +172 -0
  34. package/dist/importers/notion.d.ts +5 -0
  35. package/dist/importers/notion.js +174 -0
  36. package/dist/importers/readme.d.ts +5 -0
  37. package/dist/importers/readme.js +184 -0
  38. package/dist/importers/transform.d.ts +90 -0
  39. package/dist/importers/transform.js +457 -0
  40. package/dist/importers/types.d.ts +37 -0
  41. package/dist/importers/types.js +1 -0
  42. package/dist/plugins/index.js +7 -0
  43. package/dist/scanner/index.js +37 -24
  44. package/dist/scanner/python.js +17 -0
  45. package/dist/template/public/search-index.json +1 -1
  46. package/dist/template/scripts/build-search-index.mjs +67 -9
  47. package/dist/template/src/lib/search-types.ts +4 -1
  48. package/dist/template/src/lib/search.ts +30 -7
  49. package/dist/utils/files.d.ts +9 -1
  50. package/dist/utils/files.js +59 -10
  51. package/package.json +4 -1
@@ -0,0 +1,279 @@
1
+ import { readFileSync, existsSync, readdirSync, statSync } from 'fs';
2
+ import { join, relative, basename, extname } from 'path';
3
+ import { findMdxFiles } from '../utils/files.js';
4
+ import { transformDocusaurusAdmonitions, transformDocusaurusTabs, stripDocusaurusImports, normalizeFrontmatter, getSortWeight, rewriteImagePaths, } from './transform.js';
5
+ /**
6
+ * Import Docusaurus documentation.
7
+ */
8
+ export function importDocusaurus(dir, name) {
9
+ const result = createEmptyResult(name);
10
+ const stats = { callouts: 0, tabs: 0, codeGroups: 0, steps: 0, accordions: 0, images: 0, other: 0 };
11
+ // Extract project info from docusaurus.config.js
12
+ const configPath = existsSync(join(dir, 'docusaurus.config.ts'))
13
+ ? join(dir, 'docusaurus.config.ts')
14
+ : join(dir, 'docusaurus.config.js');
15
+ if (existsSync(configPath)) {
16
+ const configContent = readFileSync(configPath, 'utf-8');
17
+ const titleMatch = configContent.match(/title:\s*['"]([^'"]+)['"]/);
18
+ const taglineMatch = configContent.match(/tagline:\s*['"]([^'"]+)['"]/);
19
+ if (titleMatch)
20
+ result.name = titleMatch[1];
21
+ if (taglineMatch)
22
+ result.description = taglineMatch[1];
23
+ }
24
+ if (name)
25
+ result.name = name;
26
+ // Find docs directory
27
+ const docsDir = existsSync(join(dir, 'docs')) ? join(dir, 'docs') : dir;
28
+ // Try to parse sidebars.js for navigation
29
+ const sidebarNav = parseSidebars(dir, docsDir);
30
+ if (sidebarNav && sidebarNav.length > 0) {
31
+ for (const group of sidebarNav) {
32
+ const pages = [];
33
+ for (const pageId of group.pageIds) {
34
+ const page = processDocusaurusPage(docsDir, pageId, stats, result);
35
+ if (page)
36
+ pages.push(page);
37
+ }
38
+ if (pages.length > 0) {
39
+ result.navigation.push({ group: group.label, pages });
40
+ }
41
+ }
42
+ }
43
+ else {
44
+ // Fallback: walk docs/ with _category_.json
45
+ const groups = walkDocsDirectory(docsDir);
46
+ for (const group of groups) {
47
+ const pages = [];
48
+ for (const file of group.files) {
49
+ const pageId = relative(docsDir, file).replace(/\.(md|mdx)$/, '').replace(/\\/g, '/');
50
+ const page = processDocusaurusPage(docsDir, pageId, stats, result);
51
+ if (page)
52
+ pages.push(page);
53
+ }
54
+ if (pages.length > 0) {
55
+ result.navigation.push({ group: group.label, pages });
56
+ }
57
+ }
58
+ }
59
+ // Copy static assets
60
+ const staticDir = join(dir, 'static');
61
+ if (existsSync(staticDir)) {
62
+ collectStaticAssets(staticDir, result, stats);
63
+ }
64
+ result.stats = {
65
+ pages: result.files.size,
66
+ groups: result.navigation.length,
67
+ transforms: stats,
68
+ };
69
+ return result;
70
+ }
71
+ function processDocusaurusPage(docsDir, pageId, stats, result) {
72
+ let filePath = join(docsDir, `${pageId}.mdx`);
73
+ if (!existsSync(filePath))
74
+ filePath = join(docsDir, `${pageId}.md`);
75
+ if (!existsSync(filePath))
76
+ filePath = join(docsDir, pageId, 'index.mdx');
77
+ if (!existsSync(filePath))
78
+ filePath = join(docsDir, pageId, 'index.md');
79
+ if (!existsSync(filePath)) {
80
+ result.warnings.push(`Page not found: ${pageId}`);
81
+ return null;
82
+ }
83
+ let content = readFileSync(filePath, 'utf-8');
84
+ const originalContent = content;
85
+ // Apply transforms
86
+ content = stripDocusaurusImports(content);
87
+ content = transformDocusaurusAdmonitions(content);
88
+ content = transformDocusaurusTabs(content);
89
+ content = normalizeFrontmatter(content);
90
+ // Strip Docusaurus-specific code fence metadata ({1,3-5} line highlighting)
91
+ content = content.replace(/^(```\w+)(?:\s+\{[\d,-]+\})/gm, '$1');
92
+ // Count transforms
93
+ stats.callouts += countNewOccurrences(originalContent, content, '<Callout');
94
+ stats.tabs += countNewOccurrences(originalContent, content, '<TabPanel');
95
+ const title = extractTitle(content, filePath);
96
+ const slug = pageId.replace(/\\/g, '/');
97
+ const outputPath = `content/docs/${slug}.mdx`;
98
+ result.files.set(outputPath, content);
99
+ return { title, slug, sourcePath: relative(docsDir, filePath), content };
100
+ }
101
+ function parseSidebars(rootDir, docsDir) {
102
+ const sidebarPath = join(rootDir, 'sidebars.js');
103
+ if (!existsSync(sidebarPath))
104
+ return null;
105
+ try {
106
+ const content = readFileSync(sidebarPath, 'utf-8');
107
+ const groups = [];
108
+ // Match category objects: { type: 'category', label: 'X', items: [...] }
109
+ const categoryRegex = /\{\s*type:\s*['"]category['"],\s*label:\s*['"]([^'"]+)['"]\s*,\s*items:\s*\[([\s\S]*?)\]\s*\}/g;
110
+ let match;
111
+ while ((match = categoryRegex.exec(content)) !== null) {
112
+ const label = match[1];
113
+ const itemsStr = match[2];
114
+ const pageIds = extractPageIds(itemsStr);
115
+ groups.push({ label, pageIds });
116
+ }
117
+ // Match autogenerated: { type: 'autogenerated', dirName: 'x' }
118
+ const autoRegex = /\{\s*type:\s*['"]autogenerated['"],\s*dirName:\s*['"]([^'"]+)['"]\s*\}/g;
119
+ while ((match = autoRegex.exec(content)) !== null) {
120
+ const dirName = match[1];
121
+ const subDir = join(docsDir, dirName);
122
+ if (existsSync(subDir)) {
123
+ const files = findMdxFiles(subDir);
124
+ const pageIds = files.map(f => relative(docsDir, f).replace(/\.(md|mdx)$/, '').replace(/\\/g, '/'));
125
+ const label = dirName.replace(/[-_]/g, ' ').replace(/\b\w/g, c => c.toUpperCase());
126
+ groups.push({ label, pageIds });
127
+ }
128
+ }
129
+ // Match top-level string references: 'intro', 'getting-started'
130
+ if (groups.length === 0) {
131
+ const stringRefs = content.match(/['"]([a-zA-Z0-9/_-]+)['"]/g);
132
+ if (stringRefs) {
133
+ const pageIds = stringRefs.map(s => s.replace(/['"]/g, '')).filter(s => !['category', 'doc', 'autogenerated'].includes(s));
134
+ if (pageIds.length > 0) {
135
+ groups.push({ label: 'Documentation', pageIds });
136
+ }
137
+ }
138
+ }
139
+ return groups.length > 0 ? groups : null;
140
+ }
141
+ catch {
142
+ return null;
143
+ }
144
+ }
145
+ function extractPageIds(itemsStr) {
146
+ const ids = [];
147
+ // String shorthand: 'page-id'
148
+ const stringRegex = /['"]([a-zA-Z0-9/_-]+)['"]/g;
149
+ let match;
150
+ while ((match = stringRegex.exec(itemsStr)) !== null) {
151
+ const val = match[1];
152
+ if (!['doc', 'category', 'autogenerated'].includes(val)) {
153
+ ids.push(val);
154
+ }
155
+ }
156
+ return ids;
157
+ }
158
+ function walkDocsDirectory(docsDir) {
159
+ const groups = [];
160
+ // Root-level files
161
+ const rootFiles = getMdFilesInDir(docsDir);
162
+ if (rootFiles.length > 0) {
163
+ groups.push({ label: 'Documentation', files: rootFiles, position: 0 });
164
+ }
165
+ // Subdirectories
166
+ try {
167
+ const entries = readdirSync(docsDir);
168
+ for (const entry of entries) {
169
+ const fullPath = join(docsDir, entry);
170
+ if (!statSync(fullPath).isDirectory() || entry.startsWith('.') || entry === 'node_modules')
171
+ continue;
172
+ const files = findMdxFiles(fullPath);
173
+ if (files.length === 0)
174
+ continue;
175
+ // Read _category_.json for label and position
176
+ let label = entry.replace(/[-_]/g, ' ').replace(/\b\w/g, c => c.toUpperCase());
177
+ let position = Infinity;
178
+ const categoryPath = join(fullPath, '_category_.json');
179
+ if (existsSync(categoryPath)) {
180
+ try {
181
+ const cat = JSON.parse(readFileSync(categoryPath, 'utf-8'));
182
+ if (cat.label)
183
+ label = cat.label;
184
+ if (typeof cat.position === 'number')
185
+ position = cat.position;
186
+ }
187
+ catch { /* skip */ }
188
+ }
189
+ groups.push({ label, files: sortByWeight(files), position });
190
+ }
191
+ }
192
+ catch { /* skip */ }
193
+ return groups.sort((a, b) => a.position - b.position);
194
+ }
195
+ function getMdFilesInDir(dir) {
196
+ try {
197
+ return readdirSync(dir)
198
+ .filter(f => /\.(md|mdx)$/.test(f))
199
+ .map(f => join(dir, f));
200
+ }
201
+ catch {
202
+ return [];
203
+ }
204
+ }
205
+ function sortByWeight(files) {
206
+ return files.sort((a, b) => {
207
+ const aName = basename(a, extname(a)).toLowerCase();
208
+ const bName = basename(b, extname(b)).toLowerCase();
209
+ if (aName === 'index' || aName === 'readme')
210
+ return -1;
211
+ if (bName === 'index' || bName === 'readme')
212
+ return 1;
213
+ try {
214
+ const aWeight = getSortWeight(readFileSync(a, 'utf-8'));
215
+ const bWeight = getSortWeight(readFileSync(b, 'utf-8'));
216
+ if (aWeight !== bWeight)
217
+ return aWeight - bWeight;
218
+ }
219
+ catch { /* skip */ }
220
+ return aName.localeCompare(bName);
221
+ });
222
+ }
223
+ function collectStaticAssets(staticDir, result, stats) {
224
+ const imgExts = new Set(['.png', '.jpg', '.jpeg', '.gif', '.svg', '.webp', '.ico']);
225
+ const imageMapping = new Map();
226
+ function walk(dir) {
227
+ try {
228
+ for (const entry of readdirSync(dir)) {
229
+ const fullPath = join(dir, entry);
230
+ if (statSync(fullPath).isDirectory()) {
231
+ walk(fullPath);
232
+ }
233
+ else if (imgExts.has(extname(entry).toLowerCase())) {
234
+ const rel = relative(staticDir, fullPath);
235
+ const dest = `public/${rel}`;
236
+ result.assets.set(dest, fullPath);
237
+ imageMapping.set(`/static/${rel}`, `/${rel}`);
238
+ imageMapping.set(`static/${rel}`, `/${rel}`);
239
+ stats.images++;
240
+ }
241
+ }
242
+ }
243
+ catch { /* skip */ }
244
+ }
245
+ walk(staticDir);
246
+ // Rewrite image paths
247
+ if (imageMapping.size > 0) {
248
+ for (const [path, content] of result.files) {
249
+ result.files.set(path, rewriteImagePaths(content, imageMapping));
250
+ }
251
+ }
252
+ }
253
+ function extractTitle(content, filePath) {
254
+ const fmMatch = content.match(/^---\n[\s\S]*?title:\s*["']?([^"'\n]+)["']?\s*\n[\s\S]*?---/m);
255
+ if (fmMatch)
256
+ return fmMatch[1].trim();
257
+ const h1Match = content.match(/^#\s+(.+)$/m);
258
+ if (h1Match)
259
+ return h1Match[1].trim();
260
+ return basename(filePath, extname(filePath)).replace(/[-_]/g, ' ').replace(/\b\w/g, c => c.toUpperCase());
261
+ }
262
+ function countNewOccurrences(original, transformed, marker) {
263
+ const escaped = marker.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
264
+ const origCount = (original.match(new RegExp(escaped, 'g')) || []).length;
265
+ const newCount = (transformed.match(new RegExp(escaped, 'g')) || []).length;
266
+ return Math.max(0, newCount - origCount);
267
+ }
268
+ function createEmptyResult(name) {
269
+ return {
270
+ navigation: [],
271
+ name: name || 'Documentation',
272
+ description: 'Imported from Docusaurus',
273
+ files: new Map(),
274
+ assets: new Map(),
275
+ warnings: [],
276
+ stats: { pages: 0, groups: 0, transforms: { callouts: 0, tabs: 0, codeGroups: 0, steps: 0, accordions: 0, images: 0, other: 0 } },
277
+ sourceFormat: 'docusaurus',
278
+ };
279
+ }
@@ -0,0 +1,5 @@
1
+ import type { ImportResult } from './types.js';
2
+ /**
3
+ * Import GitBook documentation.
4
+ */
5
+ export declare function importGitBook(dir: string, name?: string): ImportResult;
@@ -0,0 +1,189 @@
1
+ import { readFileSync, existsSync, readdirSync, statSync } from 'fs';
2
+ import { join, relative, basename, extname } from 'path';
3
+ import { findMdxFiles } from '../utils/files.js';
4
+ import { transformGitBookHints, transformGitBookTabs, transformGitBookSteps, transformGitBookExpandable, transformGitBookContentRef, transformGitBookEmbed, normalizeFrontmatter, rewriteImagePaths, } from './transform.js';
5
+ /**
6
+ * Import GitBook documentation.
7
+ */
8
+ export function importGitBook(dir, name) {
9
+ const result = createEmptyResult(name);
10
+ const stats = { callouts: 0, tabs: 0, codeGroups: 0, steps: 0, accordions: 0, images: 0, other: 0 };
11
+ // Check for .gitbook.yaml root config
12
+ let rootPath = '';
13
+ const gitbookYaml = join(dir, '.gitbook.yaml');
14
+ if (existsSync(gitbookYaml)) {
15
+ try {
16
+ const yamlContent = readFileSync(gitbookYaml, 'utf-8');
17
+ const rootMatch = yamlContent.match(/root:\s*(.+)/);
18
+ if (rootMatch)
19
+ rootPath = rootMatch[1].trim().replace(/^\.\//, '');
20
+ }
21
+ catch { /* skip */ }
22
+ }
23
+ const contentDir = rootPath ? join(dir, rootPath) : dir;
24
+ // Parse SUMMARY.md for navigation
25
+ const summaryPath = join(contentDir, 'SUMMARY.md');
26
+ if (existsSync(summaryPath)) {
27
+ const summaryContent = readFileSync(summaryPath, 'utf-8');
28
+ const groups = parseSummaryMd(summaryContent);
29
+ for (const group of groups) {
30
+ const pages = [];
31
+ for (const ref of group.pageRefs) {
32
+ const page = processGitBookPage(contentDir, ref.path, ref.title, stats, result);
33
+ if (page)
34
+ pages.push(page);
35
+ }
36
+ if (pages.length > 0) {
37
+ result.navigation.push({ group: group.label, pages });
38
+ }
39
+ }
40
+ }
41
+ else {
42
+ // No SUMMARY.md — walk files
43
+ const files = findMdxFiles(contentDir);
44
+ const pages = [];
45
+ for (const filePath of files) {
46
+ if (basename(filePath).toUpperCase() === 'SUMMARY.MD')
47
+ continue;
48
+ const rel = relative(contentDir, filePath);
49
+ const page = processGitBookPage(contentDir, rel, undefined, stats, result);
50
+ if (page)
51
+ pages.push(page);
52
+ }
53
+ if (pages.length > 0) {
54
+ result.navigation.push({ group: 'Documentation', pages });
55
+ }
56
+ }
57
+ // Copy .gitbook/assets/
58
+ const assetsDir = join(dir, '.gitbook', 'assets');
59
+ if (existsSync(assetsDir)) {
60
+ collectGitBookAssets(assetsDir, result, stats);
61
+ }
62
+ result.stats = {
63
+ pages: result.files.size,
64
+ groups: result.navigation.length,
65
+ transforms: stats,
66
+ };
67
+ return result;
68
+ }
69
+ function parseSummaryMd(content) {
70
+ const groups = [];
71
+ let currentGroup = null;
72
+ const lines = content.split('\n');
73
+ for (const line of lines) {
74
+ // Group headers: ## Group Name or non-indented text without link
75
+ const headerMatch = line.match(/^#{1,3}\s+(.+)/);
76
+ if (headerMatch) {
77
+ if (currentGroup && currentGroup.pageRefs.length > 0) {
78
+ groups.push(currentGroup);
79
+ }
80
+ currentGroup = { label: headerMatch[1].trim(), pageRefs: [] };
81
+ continue;
82
+ }
83
+ // Page references: * [Title](path.md) or - [Title](path.md)
84
+ const linkMatch = line.match(/^[\s]*[*-]\s+\[([^\]]+)\]\(([^)]+)\)/);
85
+ if (linkMatch) {
86
+ if (!currentGroup) {
87
+ currentGroup = { label: 'Documentation', pageRefs: [] };
88
+ }
89
+ currentGroup.pageRefs.push({ title: linkMatch[1], path: linkMatch[2] });
90
+ }
91
+ }
92
+ if (currentGroup && currentGroup.pageRefs.length > 0) {
93
+ groups.push(currentGroup);
94
+ }
95
+ // If no groups found, treat all links as one group
96
+ if (groups.length === 0) {
97
+ const allLinks = [];
98
+ const linkRegex = /\[([^\]]+)\]\(([^)]+)\)/g;
99
+ let match;
100
+ while ((match = linkRegex.exec(content)) !== null) {
101
+ allLinks.push({ title: match[1], path: match[2] });
102
+ }
103
+ if (allLinks.length > 0) {
104
+ groups.push({ label: 'Documentation', pageRefs: allLinks });
105
+ }
106
+ }
107
+ return groups;
108
+ }
109
+ function processGitBookPage(contentDir, pagePath, title, stats, result) {
110
+ const filePath = join(contentDir, pagePath);
111
+ // Guard: prevent path traversal outside content directory
112
+ if (!filePath.startsWith(contentDir)) {
113
+ result.warnings.push(`Path traversal blocked: ${pagePath}`);
114
+ return null;
115
+ }
116
+ if (!existsSync(filePath)) {
117
+ result.warnings.push(`Page not found: ${pagePath}`);
118
+ return null;
119
+ }
120
+ let content = readFileSync(filePath, 'utf-8');
121
+ const originalContent = content;
122
+ // Apply all GitBook transforms
123
+ content = transformGitBookHints(content);
124
+ content = transformGitBookTabs(content);
125
+ content = transformGitBookSteps(content);
126
+ content = transformGitBookExpandable(content);
127
+ content = transformGitBookContentRef(content);
128
+ content = transformGitBookEmbed(content);
129
+ content = normalizeFrontmatter(content);
130
+ // Count transforms
131
+ stats.callouts += countNewOccurrences(originalContent, content, '<Callout');
132
+ stats.tabs += countNewOccurrences(originalContent, content, '<TabPanel');
133
+ stats.steps += countNewOccurrences(originalContent, content, '<Step>');
134
+ stats.accordions += countNewOccurrences(originalContent, content, '<Accordion');
135
+ const pageTitle = title || extractTitle(content, filePath);
136
+ const slug = pagePath.replace(/\.(md|mdx)$/, '').replace(/\\/g, '/');
137
+ const outputPath = `content/docs/${slug}.mdx`;
138
+ result.files.set(outputPath, content);
139
+ return { title: pageTitle, slug, sourcePath: pagePath, content };
140
+ }
141
+ function collectGitBookAssets(assetsDir, result, stats) {
142
+ const imageMapping = new Map();
143
+ try {
144
+ for (const entry of readdirSync(assetsDir)) {
145
+ const fullPath = join(assetsDir, entry);
146
+ if (statSync(fullPath).isFile()) {
147
+ const dest = `public/images/${entry}`;
148
+ result.assets.set(dest, fullPath);
149
+ imageMapping.set(`.gitbook/assets/${entry}`, `/images/${entry}`);
150
+ imageMapping.set(`../.gitbook/assets/${entry}`, `/images/${entry}`);
151
+ stats.images++;
152
+ }
153
+ }
154
+ }
155
+ catch { /* skip */ }
156
+ // Rewrite image paths
157
+ if (imageMapping.size > 0) {
158
+ for (const [path, content] of result.files) {
159
+ result.files.set(path, rewriteImagePaths(content, imageMapping));
160
+ }
161
+ }
162
+ }
163
+ function extractTitle(content, filePath) {
164
+ const fmMatch = content.match(/^---\n[\s\S]*?title:\s*["']?([^"'\n]+)["']?\s*\n[\s\S]*?---/m);
165
+ if (fmMatch)
166
+ return fmMatch[1].trim();
167
+ const h1Match = content.match(/^#\s+(.+)$/m);
168
+ if (h1Match)
169
+ return h1Match[1].trim();
170
+ return basename(filePath, extname(filePath)).replace(/[-_]/g, ' ').replace(/\b\w/g, c => c.toUpperCase());
171
+ }
172
+ function countNewOccurrences(original, transformed, marker) {
173
+ const escaped = marker.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
174
+ const origCount = (original.match(new RegExp(escaped, 'g')) || []).length;
175
+ const newCount = (transformed.match(new RegExp(escaped, 'g')) || []).length;
176
+ return Math.max(0, newCount - origCount);
177
+ }
178
+ function createEmptyResult(name) {
179
+ return {
180
+ navigation: [],
181
+ name: name || 'Documentation',
182
+ description: 'Imported from GitBook',
183
+ files: new Map(),
184
+ assets: new Map(),
185
+ warnings: [],
186
+ stats: { pages: 0, groups: 0, transforms: { callouts: 0, tabs: 0, codeGroups: 0, steps: 0, accordions: 0, images: 0, other: 0 } },
187
+ sourceFormat: 'gitbook',
188
+ };
189
+ }
@@ -0,0 +1,8 @@
1
+ import type { ImportResult, ImportFormat } from './types.js';
2
+ /**
3
+ * Fetch docs from a GitHub URL and import them.
4
+ */
5
+ export declare function importFromGitHub(owner: string, repo: string, path: string, ref: string, options?: {
6
+ format?: ImportFormat;
7
+ name?: string;
8
+ }): Promise<ImportResult>;
@@ -0,0 +1,99 @@
1
+ import { mkdirSync, writeFileSync, rmSync } from 'fs';
2
+ import { join, extname } from 'path';
3
+ import { tmpdir } from 'os';
4
+ import { detectFormat } from './detect.js';
5
+ import { runImport } from './index.js';
6
+ // Only fetch text-based files for import
7
+ const TEXT_EXTENSIONS = new Set([
8
+ '.md', '.mdx', '.json', '.yaml', '.yml', '.js', '.ts', '.txt',
9
+ '.html', '.htm', '.xml', '.css', '.toml', '.cfg', '.ini', '.rst',
10
+ ]);
11
+ const IMAGE_EXTENSIONS = new Set(['.png', '.jpg', '.jpeg', '.gif', '.svg', '.webp', '.ico']);
12
+ const MAX_FILE_SIZE = 10 * 1024 * 1024; // 10MB limit per file
13
+ const MAX_DEPTH = 20; // Prevent unbounded recursion
14
+ /**
15
+ * Fetch docs from a GitHub URL and import them.
16
+ */
17
+ export async function importFromGitHub(owner, repo, path, ref, options) {
18
+ const token = process.env.GITHUB_TOKEN;
19
+ const headers = {
20
+ Accept: 'application/vnd.github.v3+json',
21
+ 'User-Agent': 'skrypt-cli',
22
+ };
23
+ if (token) {
24
+ headers.Authorization = `Bearer ${token}`;
25
+ }
26
+ const tempDir = join(tmpdir(), `skrypt-import-${Date.now()}`);
27
+ mkdirSync(tempDir, { recursive: true });
28
+ try {
29
+ console.log(` Fetching from GitHub: ${owner}/${repo}/${path || ''}...`);
30
+ let fileCount = 0;
31
+ async function fetchDir(dirPath, localDir, depth) {
32
+ if (depth > MAX_DEPTH) {
33
+ console.warn(` Warning: Max directory depth (${MAX_DEPTH}) reached, skipping deeper paths`);
34
+ return;
35
+ }
36
+ const apiUrl = `https://api.github.com/repos/${owner}/${repo}/contents/${dirPath}?ref=${ref}`;
37
+ const res = await fetch(apiUrl, { headers });
38
+ if (res.status === 403) {
39
+ const rateLimitRemaining = res.headers.get('X-RateLimit-Remaining');
40
+ if (rateLimitRemaining === '0') {
41
+ const resetTime = res.headers.get('X-RateLimit-Reset');
42
+ const resetDate = resetTime ? new Date(parseInt(resetTime) * 1000) : null;
43
+ throw new Error(`GitHub API rate limit exceeded. ${!token ? 'Set GITHUB_TOKEN for higher limits. ' : ''}${resetDate ? `Resets at ${resetDate.toLocaleTimeString()}.` : ''}`);
44
+ }
45
+ throw new Error(`GitHub API forbidden: ${res.statusText}`);
46
+ }
47
+ if (!res.ok) {
48
+ throw new Error(`GitHub API error: ${res.status} ${res.statusText}`);
49
+ }
50
+ const entries = await res.json();
51
+ for (const entry of entries) {
52
+ if (entry.type === 'dir') {
53
+ const subDir = join(localDir, entry.name);
54
+ mkdirSync(subDir, { recursive: true });
55
+ await fetchDir(entry.path, subDir, depth + 1);
56
+ }
57
+ else if (entry.type === 'file' && entry.download_url) {
58
+ // Skip files that are too large
59
+ if (entry.size && entry.size > MAX_FILE_SIZE) {
60
+ console.warn(` Skipping large file: ${entry.name} (${Math.round(entry.size / 1024)}KB)`);
61
+ continue;
62
+ }
63
+ const ext = extname(entry.name).toLowerCase();
64
+ const isText = TEXT_EXTENSIONS.has(ext);
65
+ const isImage = IMAGE_EXTENSIONS.has(ext);
66
+ if (!isText && !isImage)
67
+ continue;
68
+ const fileRes = await fetch(entry.download_url, { headers });
69
+ if (fileRes.ok) {
70
+ if (isImage) {
71
+ // Fetch binary content properly for images
72
+ const buffer = Buffer.from(await fileRes.arrayBuffer());
73
+ writeFileSync(join(localDir, entry.name), buffer);
74
+ }
75
+ else {
76
+ const content = await fileRes.text();
77
+ writeFileSync(join(localDir, entry.name), content);
78
+ }
79
+ fileCount++;
80
+ process.stdout.write(`\r Fetched ${fileCount} files...`);
81
+ }
82
+ }
83
+ }
84
+ }
85
+ await fetchDir(path, tempDir, 0);
86
+ console.log(`\r Fetched ${fileCount} files from GitHub`);
87
+ const format = options?.format || detectFormat(tempDir);
88
+ return runImport(tempDir, format, options?.name);
89
+ }
90
+ finally {
91
+ // Clean up temp directory
92
+ try {
93
+ rmSync(tempDir, { recursive: true, force: true });
94
+ }
95
+ catch {
96
+ // Best-effort cleanup
97
+ }
98
+ }
99
+ }
@@ -0,0 +1,15 @@
1
+ export type { ImportFormat, ImportResult, ImportedGroup, ImportedPage, TransformStats } from './types.js';
2
+ export { detectFormat, isGitHubUrl, parseGitHubUrl } from './detect.js';
3
+ export { importMarkdown } from './markdown.js';
4
+ export { importMintlify } from './mintlify.js';
5
+ export { importDocusaurus } from './docusaurus.js';
6
+ export { importGitBook } from './gitbook.js';
7
+ export { importReadme } from './readme.js';
8
+ export { importNotion } from './notion.js';
9
+ export { importConfluence } from './confluence.js';
10
+ export { importFromGitHub } from './github.js';
11
+ import type { ImportFormat, ImportResult } from './types.js';
12
+ /**
13
+ * Run the appropriate importer for a detected format.
14
+ */
15
+ export declare function runImport(dir: string, format: ImportFormat, name?: string): ImportResult;
@@ -0,0 +1,30 @@
1
+ export { detectFormat, isGitHubUrl, parseGitHubUrl } from './detect.js';
2
+ export { importMarkdown } from './markdown.js';
3
+ export { importMintlify } from './mintlify.js';
4
+ export { importDocusaurus } from './docusaurus.js';
5
+ export { importGitBook } from './gitbook.js';
6
+ export { importReadme } from './readme.js';
7
+ export { importNotion } from './notion.js';
8
+ export { importConfluence } from './confluence.js';
9
+ export { importFromGitHub } from './github.js';
10
+ import { importMarkdown } from './markdown.js';
11
+ import { importMintlify } from './mintlify.js';
12
+ import { importDocusaurus } from './docusaurus.js';
13
+ import { importGitBook } from './gitbook.js';
14
+ import { importReadme } from './readme.js';
15
+ import { importNotion } from './notion.js';
16
+ import { importConfluence } from './confluence.js';
17
+ /**
18
+ * Run the appropriate importer for a detected format.
19
+ */
20
+ export function runImport(dir, format, name) {
21
+ switch (format) {
22
+ case 'mintlify': return importMintlify(dir, name);
23
+ case 'docusaurus': return importDocusaurus(dir, name);
24
+ case 'gitbook': return importGitBook(dir, name);
25
+ case 'readme': return importReadme(dir, name);
26
+ case 'notion': return importNotion(dir, name);
27
+ case 'confluence': return importConfluence(dir, name);
28
+ case 'markdown': return importMarkdown(dir, name);
29
+ }
30
+ }
@@ -0,0 +1,6 @@
1
+ import type { ImportResult } from './types.js';
2
+ /**
3
+ * Import plain Markdown/MDX directory.
4
+ * Folder structure → groups, files → pages.
5
+ */
6
+ export declare function importMarkdown(dir: string, name?: string): ImportResult;