@larkiny/astro-github-loader 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +675 -0
- package/dist/github.cleanup.d.ts +5 -0
- package/dist/github.cleanup.js +216 -0
- package/dist/github.constants.d.ts +24 -0
- package/dist/github.constants.js +24 -0
- package/dist/github.content.d.ts +138 -0
- package/dist/github.content.js +1016 -0
- package/dist/github.dryrun.d.ts +72 -0
- package/dist/github.dryrun.js +247 -0
- package/dist/github.link-transform.d.ts +77 -0
- package/dist/github.link-transform.js +321 -0
- package/dist/github.loader.d.ts +14 -0
- package/dist/github.loader.js +143 -0
- package/dist/github.loader.spec.d.ts +1 -0
- package/dist/github.loader.spec.js +96 -0
- package/dist/github.logger.d.ts +132 -0
- package/dist/github.logger.js +260 -0
- package/dist/github.sync.d.ts +5 -0
- package/dist/github.sync.js +292 -0
- package/dist/github.types.d.ts +315 -0
- package/dist/github.types.js +1 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +5 -0
- package/package.json +66 -0
- package/src/github.cleanup.ts +243 -0
- package/src/github.constants.ts +25 -0
- package/src/github.content.ts +1205 -0
- package/src/github.dryrun.ts +339 -0
- package/src/github.link-transform.ts +452 -0
- package/src/github.loader.spec.ts +106 -0
- package/src/github.loader.ts +189 -0
- package/src/github.logger.ts +324 -0
- package/src/github.types.ts +339 -0
- package/src/index.ts +5 -0
|
@@ -0,0 +1,321 @@
|
|
|
1
|
+
import { slug } from 'github-slugger';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
/**
|
|
4
|
+
* Extract anchor fragment from a link
|
|
5
|
+
*/
|
|
6
|
+
function extractAnchor(link) {
|
|
7
|
+
const anchorMatch = link.match(/#.*$/);
|
|
8
|
+
const anchor = anchorMatch ? anchorMatch[0] : '';
|
|
9
|
+
const path = link.replace(/#.*$/, '');
|
|
10
|
+
return { path, anchor };
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* Check if a link is external (should not be transformed)
|
|
14
|
+
* External links are left completely unchanged by all transformations
|
|
15
|
+
*/
|
|
16
|
+
function isExternalLink(link) {
|
|
17
|
+
return (
|
|
18
|
+
// Common protocols
|
|
19
|
+
/^https?:\/\//.test(link) ||
|
|
20
|
+
/^mailto:/.test(link) ||
|
|
21
|
+
/^tel:/.test(link) ||
|
|
22
|
+
/^ftp:/.test(link) ||
|
|
23
|
+
/^ftps:\/\//.test(link) ||
|
|
24
|
+
// Any protocol with ://
|
|
25
|
+
link.includes('://') ||
|
|
26
|
+
// Anchor-only links (same page)
|
|
27
|
+
link.startsWith('#') ||
|
|
28
|
+
// Data URLs
|
|
29
|
+
/^data:/.test(link) ||
|
|
30
|
+
// File protocol
|
|
31
|
+
/^file:\/\//.test(link));
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Normalize path separators and resolve relative paths
|
|
35
|
+
*/
|
|
36
|
+
function normalizePath(linkPath, currentFilePath, logger) {
|
|
37
|
+
logger?.debug(`[normalizePath] BEFORE: linkPath="${linkPath}", currentFilePath="${currentFilePath}"`);
|
|
38
|
+
// Handle relative paths
|
|
39
|
+
if (linkPath.startsWith('./') || linkPath.includes('../')) {
|
|
40
|
+
const currentDir = path.dirname(currentFilePath);
|
|
41
|
+
const resolved = path.posix.normalize(path.posix.join(currentDir, linkPath));
|
|
42
|
+
logger?.debug(`[normalizePath] RELATIVE PATH RESOLVED: "${linkPath}" -> "${resolved}" (currentDir: "${currentDir}")`);
|
|
43
|
+
return resolved;
|
|
44
|
+
}
|
|
45
|
+
// Remove leading './'
|
|
46
|
+
if (linkPath.startsWith('./')) {
|
|
47
|
+
return linkPath.slice(2);
|
|
48
|
+
}
|
|
49
|
+
logger?.debug(`[normalizePath] AFTER: "${linkPath}" (no changes)`);
|
|
50
|
+
return linkPath;
|
|
51
|
+
}
|
|
52
|
+
/**
|
|
53
|
+
* Apply link mappings to transform a URL
|
|
54
|
+
*/
|
|
55
|
+
function applyLinkMappings(linkUrl, linkMappings, context) {
|
|
56
|
+
const { path: linkPath, anchor } = extractAnchor(linkUrl);
|
|
57
|
+
let transformedPath = linkPath;
|
|
58
|
+
for (const mapping of linkMappings) {
|
|
59
|
+
// Check if contextFilter allows this mapping to be applied
|
|
60
|
+
if (mapping.contextFilter && context.currentFile.linkContext) {
|
|
61
|
+
if (!mapping.contextFilter(context.currentFile.linkContext)) {
|
|
62
|
+
continue; // Skip this mapping
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
// Handle relative links automatically if enabled
|
|
66
|
+
if (mapping.relativeLinks && context.currentFile.linkContext) {
|
|
67
|
+
// Check if this is a relative link (doesn't start with /, http, etc.)
|
|
68
|
+
if (!linkPath.startsWith('/') && !isExternalLink(linkPath)) {
|
|
69
|
+
// Check if the link points to a known directory structure
|
|
70
|
+
const knownPaths = ['modules/', 'classes/', 'interfaces/', 'enums/'];
|
|
71
|
+
const isKnownPath = knownPaths.some(p => linkPath.startsWith(p));
|
|
72
|
+
if (isKnownPath) {
|
|
73
|
+
// Strip .md extension from the link path
|
|
74
|
+
const cleanLinkPath = linkPath.replace(/\.md$/, '');
|
|
75
|
+
// Convert relative path to absolute path using the target base
|
|
76
|
+
const targetBase = generateSiteUrl(context.currentFile.linkContext.basePath, context.global.stripPrefixes);
|
|
77
|
+
// Construct final URL with proper Starlight formatting
|
|
78
|
+
let finalUrl = targetBase.replace(/\/$/, '') + '/' + cleanLinkPath;
|
|
79
|
+
// Add trailing slash if it doesn't end with one and isn't empty
|
|
80
|
+
if (finalUrl && !finalUrl.endsWith('/')) {
|
|
81
|
+
finalUrl += '/';
|
|
82
|
+
}
|
|
83
|
+
transformedPath = finalUrl;
|
|
84
|
+
return transformedPath + anchor;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
let matched = false;
|
|
89
|
+
let replacement = '';
|
|
90
|
+
if (typeof mapping.pattern === 'string') {
|
|
91
|
+
// String pattern - exact match or contains
|
|
92
|
+
if (transformedPath.includes(mapping.pattern)) {
|
|
93
|
+
matched = true;
|
|
94
|
+
if (typeof mapping.replacement === 'string') {
|
|
95
|
+
replacement = transformedPath.replace(mapping.pattern, mapping.replacement);
|
|
96
|
+
}
|
|
97
|
+
else {
|
|
98
|
+
replacement = mapping.replacement(transformedPath, anchor, context);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
else {
|
|
103
|
+
// RegExp pattern
|
|
104
|
+
const match = transformedPath.match(mapping.pattern);
|
|
105
|
+
if (match) {
|
|
106
|
+
matched = true;
|
|
107
|
+
if (typeof mapping.replacement === 'string') {
|
|
108
|
+
replacement = transformedPath.replace(mapping.pattern, mapping.replacement);
|
|
109
|
+
}
|
|
110
|
+
else {
|
|
111
|
+
replacement = mapping.replacement(transformedPath, anchor, context);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
if (matched) {
|
|
116
|
+
// Apply the transformation and continue with next mapping
|
|
117
|
+
transformedPath = replacement;
|
|
118
|
+
// Note: We continue applying other mappings to allow chaining
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
return transformedPath + anchor;
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Convert a target path to a site-compatible URL
|
|
125
|
+
*/
|
|
126
|
+
function generateSiteUrl(targetPath, stripPrefixes) {
|
|
127
|
+
let url = targetPath;
|
|
128
|
+
// Strip configured prefixes
|
|
129
|
+
for (const prefix of stripPrefixes) {
|
|
130
|
+
if (url.startsWith(prefix)) {
|
|
131
|
+
url = url.slice(prefix.length);
|
|
132
|
+
break;
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
// Remove leading slash if present
|
|
136
|
+
url = url.replace(/^\//, '');
|
|
137
|
+
// Remove file extension
|
|
138
|
+
url = url.replace(/\.(md|mdx)$/i, '');
|
|
139
|
+
// Handle index files - they should resolve to parent directory
|
|
140
|
+
if (url.endsWith('/index')) {
|
|
141
|
+
url = url.replace('/index', '');
|
|
142
|
+
}
|
|
143
|
+
else if (url === 'index') {
|
|
144
|
+
url = '';
|
|
145
|
+
}
|
|
146
|
+
// Split path into segments and slugify each
|
|
147
|
+
const segments = url.split('/').map(segment => segment ? slug(segment) : '');
|
|
148
|
+
// Reconstruct URL
|
|
149
|
+
url = segments.filter(s => s).join('/');
|
|
150
|
+
// Ensure leading slash
|
|
151
|
+
if (url && !url.startsWith('/')) {
|
|
152
|
+
url = '/' + url;
|
|
153
|
+
}
|
|
154
|
+
// Add trailing slash for non-empty paths
|
|
155
|
+
if (url && !url.endsWith('/')) {
|
|
156
|
+
url = url + '/';
|
|
157
|
+
}
|
|
158
|
+
return url || '/';
|
|
159
|
+
}
|
|
160
|
+
/**
|
|
161
|
+
* Transform a single markdown link
|
|
162
|
+
*
|
|
163
|
+
* Processing order:
|
|
164
|
+
* 1. Skip external links (no transformation)
|
|
165
|
+
* 2. Normalize path relative to current file
|
|
166
|
+
* 3. Apply global path mappings to normalized path
|
|
167
|
+
* 4. Check if link targets imported file in sourceToTargetMap
|
|
168
|
+
* 5. Apply non-global path mappings if unresolved
|
|
169
|
+
* 6. Check custom handlers
|
|
170
|
+
*/
|
|
171
|
+
function transformLink(linkText, linkUrl, context) {
|
|
172
|
+
// Skip external links FIRST - no transformations should ever be applied to them
|
|
173
|
+
if (isExternalLink(linkUrl)) {
|
|
174
|
+
return `[${linkText}](${linkUrl})`;
|
|
175
|
+
}
|
|
176
|
+
const { path: linkPath, anchor } = extractAnchor(linkUrl);
|
|
177
|
+
// Normalize the link path relative to current file FIRST
|
|
178
|
+
const normalizedPath = normalizePath(linkPath, context.currentFile.sourcePath, context.global.logger);
|
|
179
|
+
// Apply global path mappings to the normalized path
|
|
180
|
+
let processedNormalizedPath = normalizedPath;
|
|
181
|
+
if (context.global.linkMappings) {
|
|
182
|
+
const globalMappings = context.global.linkMappings.filter(m => m.global);
|
|
183
|
+
if (globalMappings.length > 0) {
|
|
184
|
+
processedNormalizedPath = applyLinkMappings(normalizedPath + anchor, globalMappings, context);
|
|
185
|
+
// Extract path again after global mappings
|
|
186
|
+
const { path: newPath } = extractAnchor(processedNormalizedPath);
|
|
187
|
+
processedNormalizedPath = newPath;
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
// Check if this links to an imported file
|
|
191
|
+
const targetPath = context.global.sourceToTargetMap.get(normalizedPath);
|
|
192
|
+
if (targetPath) {
|
|
193
|
+
// This is an internal link to an imported file
|
|
194
|
+
const siteUrl = generateSiteUrl(targetPath, context.global.stripPrefixes);
|
|
195
|
+
return `[${linkText}](${siteUrl}${anchor})`;
|
|
196
|
+
}
|
|
197
|
+
// Apply non-global path mappings to unresolved links
|
|
198
|
+
if (context.global.linkMappings) {
|
|
199
|
+
const nonGlobalMappings = context.global.linkMappings.filter(m => !m.global);
|
|
200
|
+
if (nonGlobalMappings.length > 0) {
|
|
201
|
+
const mappedUrl = applyLinkMappings(processedNormalizedPath + anchor, nonGlobalMappings, context);
|
|
202
|
+
if (mappedUrl !== (processedNormalizedPath + anchor)) {
|
|
203
|
+
return `[${linkText}](${mappedUrl})`;
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
// Check custom handlers
|
|
208
|
+
if (context.global.customHandlers) {
|
|
209
|
+
for (const handler of context.global.customHandlers) {
|
|
210
|
+
const currentUrl = processedNormalizedPath + anchor;
|
|
211
|
+
if (handler.test(currentUrl, context)) {
|
|
212
|
+
const transformedUrl = handler.transform(currentUrl, context);
|
|
213
|
+
return `[${linkText}](${transformedUrl})`;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
// No transformation needed - return processed URL
|
|
218
|
+
return `[${linkText}](${processedNormalizedPath + anchor})`;
|
|
219
|
+
}
|
|
220
|
+
/**
|
|
221
|
+
* Global link transformation function
|
|
222
|
+
* Processes all imported files and resolves internal links
|
|
223
|
+
*/
|
|
224
|
+
export function globalLinkTransform(importedFiles, options) {
|
|
225
|
+
// Build global context
|
|
226
|
+
const sourceToTargetMap = new Map();
|
|
227
|
+
const sourceToIdMap = new Map();
|
|
228
|
+
for (const file of importedFiles) {
|
|
229
|
+
sourceToTargetMap.set(file.sourcePath, file.targetPath);
|
|
230
|
+
sourceToIdMap.set(file.sourcePath, file.id);
|
|
231
|
+
}
|
|
232
|
+
const globalContext = {
|
|
233
|
+
sourceToTargetMap,
|
|
234
|
+
sourceToIdMap,
|
|
235
|
+
stripPrefixes: options.stripPrefixes,
|
|
236
|
+
customHandlers: options.customHandlers,
|
|
237
|
+
linkMappings: options.linkMappings,
|
|
238
|
+
logger: options.logger,
|
|
239
|
+
};
|
|
240
|
+
// Transform links in all files
|
|
241
|
+
const markdownLinkRegex = /\[([^\]]*)\]\(([^)]+)\)/g;
|
|
242
|
+
return importedFiles.map(file => ({
|
|
243
|
+
...file,
|
|
244
|
+
content: file.content.replace(markdownLinkRegex, (match, linkText, linkUrl) => {
|
|
245
|
+
const linkContext = {
|
|
246
|
+
currentFile: file,
|
|
247
|
+
originalLink: linkUrl,
|
|
248
|
+
anchor: extractAnchor(linkUrl).anchor,
|
|
249
|
+
global: globalContext,
|
|
250
|
+
};
|
|
251
|
+
return transformLink(linkText, linkUrl, linkContext);
|
|
252
|
+
}),
|
|
253
|
+
}));
|
|
254
|
+
}
|
|
255
|
+
/**
|
|
256
|
+
* Infer cross-section path from basePath
|
|
257
|
+
* @param basePath - The base path from include pattern (e.g., 'src/content/docs/reference/api')
|
|
258
|
+
* @returns Inferred cross-section path (e.g., '/reference/api')
|
|
259
|
+
*/
|
|
260
|
+
function inferCrossSectionPath(basePath) {
|
|
261
|
+
return basePath
|
|
262
|
+
.replace(/^src\/content\/docs/, '')
|
|
263
|
+
.replace(/\/$/, '') || '/';
|
|
264
|
+
}
|
|
265
|
+
/**
|
|
266
|
+
* Generate link mappings automatically from pathMappings in include patterns
|
|
267
|
+
* @param includes - Array of include patterns with pathMappings
|
|
268
|
+
* @param stripPrefixes - Prefixes to strip when generating URLs
|
|
269
|
+
* @returns Array of generated link mappings
|
|
270
|
+
*/
|
|
271
|
+
export function generateAutoLinkMappings(includes, stripPrefixes = []) {
|
|
272
|
+
const linkMappings = [];
|
|
273
|
+
for (const includePattern of includes) {
|
|
274
|
+
if (!includePattern.pathMappings)
|
|
275
|
+
continue;
|
|
276
|
+
const inferredCrossSection = inferCrossSectionPath(includePattern.basePath);
|
|
277
|
+
for (const [sourcePath, mappingValue] of Object.entries(includePattern.pathMappings)) {
|
|
278
|
+
// Handle both string and enhanced object formats
|
|
279
|
+
const targetPath = typeof mappingValue === 'string' ? mappingValue : mappingValue.target;
|
|
280
|
+
const crossSectionPath = typeof mappingValue === 'object' && mappingValue.crossSectionPath
|
|
281
|
+
? mappingValue.crossSectionPath
|
|
282
|
+
: inferredCrossSection;
|
|
283
|
+
if (sourcePath.endsWith('/')) {
|
|
284
|
+
// Folder mapping - use regex with capture group
|
|
285
|
+
const sourcePattern = sourcePath.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
286
|
+
linkMappings.push({
|
|
287
|
+
pattern: new RegExp(`^${sourcePattern}(.+)$`),
|
|
288
|
+
replacement: (transformedPath, anchor, context) => {
|
|
289
|
+
const relativePath = transformedPath.replace(new RegExp(`^${sourcePattern}`), '');
|
|
290
|
+
let finalPath;
|
|
291
|
+
if (crossSectionPath && crossSectionPath !== '/') {
|
|
292
|
+
finalPath = targetPath === ''
|
|
293
|
+
? `${crossSectionPath}/${relativePath}`
|
|
294
|
+
: `${crossSectionPath}/${targetPath}${relativePath}`;
|
|
295
|
+
}
|
|
296
|
+
else {
|
|
297
|
+
finalPath = targetPath === '' ? relativePath : `${targetPath}${relativePath}`;
|
|
298
|
+
}
|
|
299
|
+
return generateSiteUrl(finalPath, stripPrefixes);
|
|
300
|
+
},
|
|
301
|
+
global: true,
|
|
302
|
+
});
|
|
303
|
+
}
|
|
304
|
+
else {
|
|
305
|
+
// File mapping - exact string match
|
|
306
|
+
const sourcePattern = sourcePath.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
|
307
|
+
linkMappings.push({
|
|
308
|
+
pattern: new RegExp(`^${sourcePattern}$`),
|
|
309
|
+
replacement: (transformedPath, anchor, context) => {
|
|
310
|
+
const finalPath = crossSectionPath && crossSectionPath !== '/'
|
|
311
|
+
? `${crossSectionPath}/${targetPath}`
|
|
312
|
+
: targetPath;
|
|
313
|
+
return generateSiteUrl(finalPath, stripPrefixes);
|
|
314
|
+
},
|
|
315
|
+
global: true,
|
|
316
|
+
});
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
}
|
|
320
|
+
return linkMappings;
|
|
321
|
+
}
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import type { Loader, GithubLoaderOptions } from "./github.types.js";
|
|
2
|
+
/**
|
|
3
|
+
* Loads data from GitHub repositories based on the provided configurations and options.
|
|
4
|
+
*
|
|
5
|
+
* Features:
|
|
6
|
+
* - Sequential processing with spinner feedback for long-running operations
|
|
7
|
+
* - Dry run mode for change detection without actual imports
|
|
8
|
+
* - Configurable logging levels per configuration
|
|
9
|
+
* - Import state tracking for incremental updates
|
|
10
|
+
* - Content store management with optional clearing
|
|
11
|
+
*
|
|
12
|
+
* @return A loader object responsible for managing the data loading process.
|
|
13
|
+
*/
|
|
14
|
+
export declare function githubLoader({ octokit, configs, fetchOptions, clear, dryRun, logLevel, force, }: GithubLoaderOptions): Loader;
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
import { toCollectionEntry } from "./github.content.js";
|
|
2
|
+
import { performSelectiveCleanup } from "./github.cleanup.js";
|
|
3
|
+
import { performDryRun, displayDryRunResults, updateImportState } from "./github.dryrun.js";
|
|
4
|
+
import { createLogger } from "./github.logger.js";
|
|
5
|
+
/**
|
|
6
|
+
* Performs selective cleanup for configurations with basePath
|
|
7
|
+
* @param configs - Array of configuration objects
|
|
8
|
+
* @param context - Loader context
|
|
9
|
+
* @param octokit - GitHub API client
|
|
10
|
+
* @internal
|
|
11
|
+
*/
|
|
12
|
+
async function performSelectiveCleanups(configs, context, octokit) {
|
|
13
|
+
const results = [];
|
|
14
|
+
// Process each config sequentially to avoid overwhelming Astro's file watcher
|
|
15
|
+
for (const config of configs) {
|
|
16
|
+
if (config.enabled === false) {
|
|
17
|
+
context.logger.debug(`Skipping disabled config: ${config.name || `${config.owner}/${config.repo}`}`);
|
|
18
|
+
continue;
|
|
19
|
+
}
|
|
20
|
+
try {
|
|
21
|
+
const stats = await performSelectiveCleanup(config, context, octokit);
|
|
22
|
+
results.push(stats);
|
|
23
|
+
}
|
|
24
|
+
catch (error) {
|
|
25
|
+
context.logger.error(`Selective cleanup failed for ${config.name || `${config.owner}/${config.repo}`}: ${error}`);
|
|
26
|
+
// Continue with other configs even if one fails
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
return results;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Loads data from GitHub repositories based on the provided configurations and options.
|
|
33
|
+
*
|
|
34
|
+
* Features:
|
|
35
|
+
* - Sequential processing with spinner feedback for long-running operations
|
|
36
|
+
* - Dry run mode for change detection without actual imports
|
|
37
|
+
* - Configurable logging levels per configuration
|
|
38
|
+
* - Import state tracking for incremental updates
|
|
39
|
+
* - Content store management with optional clearing
|
|
40
|
+
*
|
|
41
|
+
* @return A loader object responsible for managing the data loading process.
|
|
42
|
+
*/
|
|
43
|
+
export function githubLoader({ octokit, configs, fetchOptions = {}, clear = false, dryRun = false, logLevel, force = false, }) {
|
|
44
|
+
return {
|
|
45
|
+
name: "github-loader",
|
|
46
|
+
load: async (context) => {
|
|
47
|
+
const { store } = context;
|
|
48
|
+
// Create global logger with specified level or default
|
|
49
|
+
const globalLogger = createLogger(logLevel || 'default');
|
|
50
|
+
if (dryRun) {
|
|
51
|
+
globalLogger.info("🔍 Dry run mode enabled - checking for changes only");
|
|
52
|
+
try {
|
|
53
|
+
const results = await performDryRun(configs, context, octokit);
|
|
54
|
+
displayDryRunResults(results, context.logger);
|
|
55
|
+
globalLogger.info("\n🚫 Dry run complete - no imports performed");
|
|
56
|
+
globalLogger.info("💡 Set dryRun: false to perform actual imports");
|
|
57
|
+
return; // Exit without importing
|
|
58
|
+
}
|
|
59
|
+
catch (error) {
|
|
60
|
+
globalLogger.error(`Dry run failed: ${error.message}`);
|
|
61
|
+
throw error;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
globalLogger.debug(`Loading data from ${configs.length} sources`);
|
|
65
|
+
// Always use standard processing - no file deletions to avoid Astro issues
|
|
66
|
+
globalLogger.info(clear ? "Processing with content store clear" : "Processing without content store clear");
|
|
67
|
+
if (clear) {
|
|
68
|
+
store.clear();
|
|
69
|
+
}
|
|
70
|
+
// Process each config sequentially to avoid overwhelming GitHub API/CDN
|
|
71
|
+
for (let i = 0; i < configs.length; i++) {
|
|
72
|
+
const config = configs[i];
|
|
73
|
+
if (config.enabled === false) {
|
|
74
|
+
globalLogger.debug(`Skipping disabled config: ${config.name || `${config.owner}/${config.repo}`}`);
|
|
75
|
+
continue;
|
|
76
|
+
}
|
|
77
|
+
// Add small delay between configs to be gentler on GitHub's CDN
|
|
78
|
+
if (i > 0) {
|
|
79
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
80
|
+
}
|
|
81
|
+
// Determine the effective log level for this config
|
|
82
|
+
const effectiveLogLevel = logLevel || config.logLevel || 'default';
|
|
83
|
+
const configLogger = createLogger(effectiveLogLevel);
|
|
84
|
+
const configName = config.name || `${config.owner}/${config.repo}`;
|
|
85
|
+
const repository = `${config.owner}/${config.repo}`;
|
|
86
|
+
let summary = {
|
|
87
|
+
configName,
|
|
88
|
+
repository,
|
|
89
|
+
ref: config.ref,
|
|
90
|
+
filesProcessed: 0,
|
|
91
|
+
filesUpdated: 0,
|
|
92
|
+
filesUnchanged: 0,
|
|
93
|
+
duration: 0,
|
|
94
|
+
status: 'error',
|
|
95
|
+
};
|
|
96
|
+
const startTime = Date.now();
|
|
97
|
+
try {
|
|
98
|
+
// Perform the import with spinner
|
|
99
|
+
const stats = await globalLogger.withSpinner(`🔄 Importing ${configName}...`, () => toCollectionEntry({
|
|
100
|
+
context: { ...context, logger: configLogger },
|
|
101
|
+
octokit,
|
|
102
|
+
options: config,
|
|
103
|
+
fetchOptions,
|
|
104
|
+
force,
|
|
105
|
+
}), `✅ ${configName} imported successfully`, `❌ ${configName} import failed`);
|
|
106
|
+
summary.duration = Date.now() - startTime;
|
|
107
|
+
summary.filesProcessed = stats?.processed || 0;
|
|
108
|
+
summary.filesUpdated = stats?.updated || 0;
|
|
109
|
+
summary.filesUnchanged = stats?.unchanged || 0;
|
|
110
|
+
summary.assetsDownloaded = stats?.assetsDownloaded || 0;
|
|
111
|
+
summary.assetsCached = stats?.assetsCached || 0;
|
|
112
|
+
summary.status = 'success';
|
|
113
|
+
// Log structured summary
|
|
114
|
+
configLogger.logImportSummary(summary);
|
|
115
|
+
// Update state tracking for future dry runs
|
|
116
|
+
try {
|
|
117
|
+
// Get the latest commit info to track state
|
|
118
|
+
const { data } = await octokit.rest.repos.listCommits({
|
|
119
|
+
owner: config.owner,
|
|
120
|
+
repo: config.repo,
|
|
121
|
+
sha: config.ref || 'main',
|
|
122
|
+
per_page: 1
|
|
123
|
+
});
|
|
124
|
+
if (data.length > 0) {
|
|
125
|
+
await updateImportState(process.cwd(), config, data[0].sha);
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
catch (error) {
|
|
129
|
+
// Don't fail the import if state tracking fails
|
|
130
|
+
configLogger.debug(`Failed to update import state for ${configName}: ${error}`);
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
catch (error) {
|
|
134
|
+
summary.duration = Date.now() - startTime;
|
|
135
|
+
summary.status = 'error';
|
|
136
|
+
summary.error = error.message;
|
|
137
|
+
configLogger.logImportSummary(summary);
|
|
138
|
+
// Continue with other configs even if one fails
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
},
|
|
142
|
+
};
|
|
143
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { beforeEach, describe, it, expect } from "vitest";
|
|
2
|
+
import { githubLoader } from "./github.loader.js";
|
|
3
|
+
import { globalLinkTransform } from "./github.link-transform.js";
|
|
4
|
+
import { createLogger } from "./github.logger.js";
|
|
5
|
+
import { Octokit } from "octokit";
|
|
6
|
+
const FIXTURES = [
|
|
7
|
+
{
|
|
8
|
+
owner: "awesome-algorand",
|
|
9
|
+
repo: "algokit-cli",
|
|
10
|
+
ref: "docs/starlight-preview",
|
|
11
|
+
path: ".devportal/starlight",
|
|
12
|
+
},
|
|
13
|
+
];
|
|
14
|
+
describe("githubLoader", () => {
|
|
15
|
+
let octokit;
|
|
16
|
+
beforeEach(() => {
|
|
17
|
+
octokit = new Octokit({ auth: process.env.GITHUB_TOKEN });
|
|
18
|
+
});
|
|
19
|
+
it("should work", async () => {
|
|
20
|
+
const result = githubLoader({ octokit, configs: FIXTURES });
|
|
21
|
+
console.log(result);
|
|
22
|
+
});
|
|
23
|
+
describe("context-aware link transformations", () => {
|
|
24
|
+
it("should handle relative links from API files with contextFilter", () => {
|
|
25
|
+
const testFiles = [
|
|
26
|
+
{
|
|
27
|
+
id: "api-readme",
|
|
28
|
+
sourcePath: "docs/code/README.md",
|
|
29
|
+
targetPath: "src/content/docs/reference/algokit-utils-ts/api/README.md",
|
|
30
|
+
content: 'Check out the [modules](modules/) for more info.',
|
|
31
|
+
linkContext: {
|
|
32
|
+
sourcePath: "docs/code/README.md",
|
|
33
|
+
targetPath: "src/content/docs/reference/algokit-utils-ts/api/README.md",
|
|
34
|
+
basePath: "src/content/docs/reference/algokit-utils-ts/api",
|
|
35
|
+
pathMappings: { "docs/code/": "" }
|
|
36
|
+
}
|
|
37
|
+
},
|
|
38
|
+
{
|
|
39
|
+
id: "modules-index",
|
|
40
|
+
sourcePath: "docs/code/modules/index.md",
|
|
41
|
+
targetPath: "src/content/docs/reference/algokit-utils-ts/api/modules/index.md",
|
|
42
|
+
content: 'This is the modules index.',
|
|
43
|
+
linkContext: {
|
|
44
|
+
sourcePath: "docs/code/modules/index.md",
|
|
45
|
+
targetPath: "src/content/docs/reference/algokit-utils-ts/api/modules/index.md",
|
|
46
|
+
basePath: "src/content/docs/reference/algokit-utils-ts/api",
|
|
47
|
+
pathMappings: { "docs/code/": "" }
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
];
|
|
51
|
+
const result = globalLinkTransform(testFiles, {
|
|
52
|
+
stripPrefixes: ['src/content/docs'],
|
|
53
|
+
linkMappings: [
|
|
54
|
+
{
|
|
55
|
+
contextFilter: (context) => context.sourcePath.startsWith('docs/code/'),
|
|
56
|
+
relativeLinks: true,
|
|
57
|
+
pattern: /.*/,
|
|
58
|
+
replacement: '',
|
|
59
|
+
global: false
|
|
60
|
+
}
|
|
61
|
+
]
|
|
62
|
+
});
|
|
63
|
+
// The relative link `modules/` should be transformed to `/reference/algokit-utils-ts/api/modules/`
|
|
64
|
+
expect(result[0].content).toContain('[modules](/reference/algokit-utils-ts/api/modules/)');
|
|
65
|
+
});
|
|
66
|
+
});
|
|
67
|
+
describe("logging system", () => {
|
|
68
|
+
it("should create logger with different levels", () => {
|
|
69
|
+
const silentLogger = createLogger('silent');
|
|
70
|
+
const defaultLogger = createLogger('default');
|
|
71
|
+
const verboseLogger = createLogger('verbose');
|
|
72
|
+
const debugLogger = createLogger('debug');
|
|
73
|
+
expect(silentLogger.getLevel()).toBe('silent');
|
|
74
|
+
expect(defaultLogger.getLevel()).toBe('default');
|
|
75
|
+
expect(verboseLogger.getLevel()).toBe('verbose');
|
|
76
|
+
expect(debugLogger.getLevel()).toBe('debug');
|
|
77
|
+
});
|
|
78
|
+
it("should format import summary correctly", () => {
|
|
79
|
+
const logger = createLogger('default');
|
|
80
|
+
const summary = {
|
|
81
|
+
configName: 'Test Config',
|
|
82
|
+
repository: 'test/repo',
|
|
83
|
+
ref: 'main',
|
|
84
|
+
filesProcessed: 10,
|
|
85
|
+
filesUpdated: 5,
|
|
86
|
+
filesUnchanged: 5,
|
|
87
|
+
assetsDownloaded: 3,
|
|
88
|
+
assetsCached: 2,
|
|
89
|
+
duration: 1500,
|
|
90
|
+
status: 'success'
|
|
91
|
+
};
|
|
92
|
+
// This test mainly verifies the types work correctly
|
|
93
|
+
expect(() => logger.logImportSummary(summary)).not.toThrow();
|
|
94
|
+
});
|
|
95
|
+
});
|
|
96
|
+
});
|