@larkiny/astro-github-loader 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +675 -0
- package/dist/github.cleanup.d.ts +5 -0
- package/dist/github.cleanup.js +216 -0
- package/dist/github.constants.d.ts +24 -0
- package/dist/github.constants.js +24 -0
- package/dist/github.content.d.ts +138 -0
- package/dist/github.content.js +1016 -0
- package/dist/github.dryrun.d.ts +72 -0
- package/dist/github.dryrun.js +247 -0
- package/dist/github.link-transform.d.ts +77 -0
- package/dist/github.link-transform.js +321 -0
- package/dist/github.loader.d.ts +14 -0
- package/dist/github.loader.js +143 -0
- package/dist/github.loader.spec.d.ts +1 -0
- package/dist/github.loader.spec.js +96 -0
- package/dist/github.logger.d.ts +132 -0
- package/dist/github.logger.js +260 -0
- package/dist/github.sync.d.ts +5 -0
- package/dist/github.sync.js +292 -0
- package/dist/github.types.d.ts +315 -0
- package/dist/github.types.js +1 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.js +5 -0
- package/package.json +66 -0
- package/src/github.cleanup.ts +243 -0
- package/src/github.constants.ts +25 -0
- package/src/github.content.ts +1205 -0
- package/src/github.dryrun.ts +339 -0
- package/src/github.link-transform.ts +452 -0
- package/src/github.loader.spec.ts +106 -0
- package/src/github.loader.ts +189 -0
- package/src/github.logger.ts +324 -0
- package/src/github.types.ts +339 -0
- package/src/index.ts +5 -0
|
@@ -0,0 +1,132 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Multi-level logging system for astro-github-loader
|
|
3
|
+
*/
|
|
4
|
+
export type LogLevel = 'silent' | 'default' | 'verbose' | 'debug';
|
|
5
|
+
export interface LoggerOptions {
|
|
6
|
+
level: LogLevel;
|
|
7
|
+
prefix?: string;
|
|
8
|
+
}
|
|
9
|
+
export interface ImportSummary {
|
|
10
|
+
configName: string;
|
|
11
|
+
repository: string;
|
|
12
|
+
ref?: string;
|
|
13
|
+
filesProcessed: number;
|
|
14
|
+
filesUpdated: number;
|
|
15
|
+
filesUnchanged: number;
|
|
16
|
+
assetsDownloaded?: number;
|
|
17
|
+
assetsCached?: number;
|
|
18
|
+
duration: number;
|
|
19
|
+
status: 'success' | 'error' | 'cancelled';
|
|
20
|
+
error?: string;
|
|
21
|
+
}
|
|
22
|
+
export interface SyncSummary {
|
|
23
|
+
added: number;
|
|
24
|
+
updated: number;
|
|
25
|
+
deleted: number;
|
|
26
|
+
unchanged: number;
|
|
27
|
+
duration: number;
|
|
28
|
+
}
|
|
29
|
+
export interface CleanupSummary {
|
|
30
|
+
deleted: number;
|
|
31
|
+
duration: number;
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Centralized logger with configurable verbosity levels and spinner support for long-running operations
|
|
35
|
+
*/
|
|
36
|
+
export declare class Logger {
|
|
37
|
+
private level;
|
|
38
|
+
private prefix;
|
|
39
|
+
private spinnerInterval?;
|
|
40
|
+
private spinnerChars;
|
|
41
|
+
private spinnerIndex;
|
|
42
|
+
private spinnerStartTime?;
|
|
43
|
+
constructor(options: LoggerOptions);
|
|
44
|
+
/**
|
|
45
|
+
* Set the logging level
|
|
46
|
+
*/
|
|
47
|
+
setLevel(level: LogLevel): void;
|
|
48
|
+
/**
|
|
49
|
+
* Get the current logging level
|
|
50
|
+
*/
|
|
51
|
+
getLevel(): LogLevel;
|
|
52
|
+
/**
|
|
53
|
+
* Check if a specific level should be logged
|
|
54
|
+
*/
|
|
55
|
+
private shouldLog;
|
|
56
|
+
/**
|
|
57
|
+
* Format message with prefix
|
|
58
|
+
*/
|
|
59
|
+
private formatMessage;
|
|
60
|
+
/**
|
|
61
|
+
* Silent level - no output
|
|
62
|
+
*/
|
|
63
|
+
silent(): void;
|
|
64
|
+
/**
|
|
65
|
+
* Default level - summary information only
|
|
66
|
+
*/
|
|
67
|
+
info(message: string): void;
|
|
68
|
+
/**
|
|
69
|
+
* Verbose level - detailed operation information
|
|
70
|
+
*/
|
|
71
|
+
verbose(message: string): void;
|
|
72
|
+
/**
|
|
73
|
+
* Debug level - all information including diagnostics
|
|
74
|
+
*/
|
|
75
|
+
debug(message: string): void;
|
|
76
|
+
/**
|
|
77
|
+
* Error - always shown unless silent
|
|
78
|
+
*/
|
|
79
|
+
error(message: string): void;
|
|
80
|
+
/**
|
|
81
|
+
* Warning - shown at default level and above
|
|
82
|
+
*/
|
|
83
|
+
warn(message: string): void;
|
|
84
|
+
/**
|
|
85
|
+
* Log structured import summary (default level)
|
|
86
|
+
*/
|
|
87
|
+
logImportSummary(summary: ImportSummary): void;
|
|
88
|
+
/**
|
|
89
|
+
* Log sync operation summary (default level)
|
|
90
|
+
*/
|
|
91
|
+
logSyncSummary(configName: string, summary: SyncSummary): void;
|
|
92
|
+
/**
|
|
93
|
+
* Log cleanup operation summary (default level)
|
|
94
|
+
*/
|
|
95
|
+
logCleanupSummary(configName: string, summary: CleanupSummary): void;
|
|
96
|
+
/**
|
|
97
|
+
* Log file-level processing (verbose level)
|
|
98
|
+
*/
|
|
99
|
+
logFileProcessing(action: string, filePath: string, details?: string): void;
|
|
100
|
+
/**
|
|
101
|
+
* Log asset processing (verbose level)
|
|
102
|
+
*/
|
|
103
|
+
logAssetProcessing(action: string, assetPath: string, details?: string): void;
|
|
104
|
+
/**
|
|
105
|
+
* Create a child logger with additional prefix
|
|
106
|
+
*/
|
|
107
|
+
child(prefix: string): Logger;
|
|
108
|
+
/**
|
|
109
|
+
* Time a function execution and log the result
|
|
110
|
+
*/
|
|
111
|
+
time<T>(label: string, fn: () => Promise<T>): Promise<T>;
|
|
112
|
+
/**
|
|
113
|
+
* Format duration in human-readable format
|
|
114
|
+
*/
|
|
115
|
+
private formatDuration;
|
|
116
|
+
/**
|
|
117
|
+
* Start a spinner with duration timer for long-running operations
|
|
118
|
+
*/
|
|
119
|
+
startSpinner(message?: string): void;
|
|
120
|
+
/**
|
|
121
|
+
* Stop the spinner and optionally show a final message
|
|
122
|
+
*/
|
|
123
|
+
stopSpinner(finalMessage?: string): void;
|
|
124
|
+
/**
|
|
125
|
+
* Execute a function with spinner feedback
|
|
126
|
+
*/
|
|
127
|
+
withSpinner<T>(message: string, fn: () => Promise<T>, successMessage?: string, errorMessage?: string): Promise<T>;
|
|
128
|
+
}
|
|
129
|
+
/**
|
|
130
|
+
* Create a logger instance with the specified level
|
|
131
|
+
*/
|
|
132
|
+
export declare function createLogger(level?: LogLevel, prefix?: string): Logger;
|
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Multi-level logging system for astro-github-loader
|
|
3
|
+
*/
|
|
4
|
+
/**
|
|
5
|
+
* Centralized logger with configurable verbosity levels and spinner support for long-running operations
|
|
6
|
+
*/
|
|
7
|
+
export class Logger {
|
|
8
|
+
constructor(options) {
|
|
9
|
+
this.spinnerChars = ['⠋', '⠙', '⠹', '⠸', '⠼', '⠴', '⠦', '⠧', '⠇', '⠏'];
|
|
10
|
+
this.spinnerIndex = 0;
|
|
11
|
+
this.level = options.level;
|
|
12
|
+
this.prefix = options.prefix || '';
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Set the logging level
|
|
16
|
+
*/
|
|
17
|
+
setLevel(level) {
|
|
18
|
+
this.level = level;
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Get the current logging level
|
|
22
|
+
*/
|
|
23
|
+
getLevel() {
|
|
24
|
+
return this.level;
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Check if a specific level should be logged
|
|
28
|
+
*/
|
|
29
|
+
shouldLog(level) {
|
|
30
|
+
const levels = {
|
|
31
|
+
silent: 0,
|
|
32
|
+
default: 1,
|
|
33
|
+
verbose: 2,
|
|
34
|
+
debug: 3,
|
|
35
|
+
};
|
|
36
|
+
return levels[this.level] >= levels[level];
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Format message with prefix
|
|
40
|
+
*/
|
|
41
|
+
formatMessage(message) {
|
|
42
|
+
return this.prefix ? `${this.prefix} ${message}` : message;
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Silent level - no output
|
|
46
|
+
*/
|
|
47
|
+
silent() {
|
|
48
|
+
// Intentionally empty
|
|
49
|
+
}
|
|
50
|
+
/**
|
|
51
|
+
* Default level - summary information only
|
|
52
|
+
*/
|
|
53
|
+
info(message) {
|
|
54
|
+
if (this.shouldLog('default')) {
|
|
55
|
+
console.log(this.formatMessage(message));
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Verbose level - detailed operation information
|
|
60
|
+
*/
|
|
61
|
+
verbose(message) {
|
|
62
|
+
if (this.shouldLog('verbose')) {
|
|
63
|
+
console.log(this.formatMessage(message));
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Debug level - all information including diagnostics
|
|
68
|
+
*/
|
|
69
|
+
debug(message) {
|
|
70
|
+
if (this.shouldLog('debug')) {
|
|
71
|
+
console.log(this.formatMessage(message));
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
/**
|
|
75
|
+
* Error - always shown unless silent
|
|
76
|
+
*/
|
|
77
|
+
error(message) {
|
|
78
|
+
if (this.shouldLog('default')) {
|
|
79
|
+
console.error(this.formatMessage(message));
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Warning - shown at default level and above
|
|
84
|
+
*/
|
|
85
|
+
warn(message) {
|
|
86
|
+
if (this.shouldLog('default')) {
|
|
87
|
+
console.warn(this.formatMessage(message));
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
/**
|
|
91
|
+
* Log structured import summary (default level)
|
|
92
|
+
*/
|
|
93
|
+
logImportSummary(summary) {
|
|
94
|
+
if (!this.shouldLog('default'))
|
|
95
|
+
return;
|
|
96
|
+
const statusIcon = summary.status === 'success' ? '✅' : summary.status === 'error' ? '❌' : '🚫';
|
|
97
|
+
this.info('');
|
|
98
|
+
this.info(`📊 Import Summary: ${summary.configName}`);
|
|
99
|
+
this.info(`├─ Repository: ${summary.repository}${summary.ref ? `@${summary.ref}` : ''}`);
|
|
100
|
+
this.info(`├─ Files: ${summary.filesProcessed} processed, ${summary.filesUpdated} updated, ${summary.filesUnchanged} unchanged`);
|
|
101
|
+
if (summary.assetsDownloaded !== undefined || summary.assetsCached !== undefined) {
|
|
102
|
+
const downloaded = summary.assetsDownloaded || 0;
|
|
103
|
+
const cached = summary.assetsCached || 0;
|
|
104
|
+
this.info(`├─ Assets: ${downloaded} downloaded, ${cached} cached`);
|
|
105
|
+
}
|
|
106
|
+
this.info(`├─ Duration: ${(summary.duration / 1000).toFixed(1)}s`);
|
|
107
|
+
this.info(`└─ Status: ${statusIcon} ${summary.status === 'success' ? 'Success' : summary.status === 'error' ? `Error: ${summary.error}` : 'Cancelled'}`);
|
|
108
|
+
this.info('');
|
|
109
|
+
}
|
|
110
|
+
/**
|
|
111
|
+
* Log sync operation summary (default level)
|
|
112
|
+
*/
|
|
113
|
+
logSyncSummary(configName, summary) {
|
|
114
|
+
if (!this.shouldLog('default'))
|
|
115
|
+
return;
|
|
116
|
+
if (summary.added > 0 || summary.updated > 0 || summary.deleted > 0) {
|
|
117
|
+
this.info(`Sync completed for ${configName}: ${summary.added} added, ${summary.updated} updated, ${summary.deleted} deleted (${summary.duration}ms)`);
|
|
118
|
+
}
|
|
119
|
+
else {
|
|
120
|
+
this.info(`No changes needed for ${configName} (${summary.duration}ms)`);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Log cleanup operation summary (default level)
|
|
125
|
+
*/
|
|
126
|
+
logCleanupSummary(configName, summary) {
|
|
127
|
+
if (!this.shouldLog('default'))
|
|
128
|
+
return;
|
|
129
|
+
if (summary.deleted > 0) {
|
|
130
|
+
this.info(`Cleanup completed for ${configName}: ${summary.deleted} obsolete files deleted (${summary.duration}ms)`);
|
|
131
|
+
}
|
|
132
|
+
else {
|
|
133
|
+
this.debug(`No cleanup needed for ${configName} (${summary.duration}ms)`);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
/**
|
|
137
|
+
* Log file-level processing (verbose level)
|
|
138
|
+
*/
|
|
139
|
+
logFileProcessing(action, filePath, details) {
|
|
140
|
+
const message = details ? `${action}: ${filePath} - ${details}` : `${action}: ${filePath}`;
|
|
141
|
+
this.verbose(message);
|
|
142
|
+
}
|
|
143
|
+
/**
|
|
144
|
+
* Log asset processing (verbose level)
|
|
145
|
+
*/
|
|
146
|
+
logAssetProcessing(action, assetPath, details) {
|
|
147
|
+
const message = details ? `Asset ${action}: ${assetPath} - ${details}` : `Asset ${action}: ${assetPath}`;
|
|
148
|
+
this.verbose(message);
|
|
149
|
+
}
|
|
150
|
+
/**
|
|
151
|
+
* Create a child logger with additional prefix
|
|
152
|
+
*/
|
|
153
|
+
child(prefix) {
|
|
154
|
+
return new Logger({
|
|
155
|
+
level: this.level,
|
|
156
|
+
prefix: this.prefix ? `${this.prefix}${prefix}` : prefix,
|
|
157
|
+
});
|
|
158
|
+
}
|
|
159
|
+
/**
|
|
160
|
+
* Time a function execution and log the result
|
|
161
|
+
*/
|
|
162
|
+
async time(label, fn) {
|
|
163
|
+
const startTime = Date.now();
|
|
164
|
+
this.debug(`⏱️ Starting: ${label}`);
|
|
165
|
+
try {
|
|
166
|
+
const result = await fn();
|
|
167
|
+
const duration = Date.now() - startTime;
|
|
168
|
+
this.verbose(`✅ Completed: ${label} (${duration}ms)`);
|
|
169
|
+
return result;
|
|
170
|
+
}
|
|
171
|
+
catch (error) {
|
|
172
|
+
const duration = Date.now() - startTime;
|
|
173
|
+
this.error(`❌ Failed: ${label} (${duration}ms): ${error}`);
|
|
174
|
+
throw error;
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
/**
|
|
178
|
+
* Format duration in human-readable format
|
|
179
|
+
*/
|
|
180
|
+
formatDuration(seconds) {
|
|
181
|
+
if (seconds < 60) {
|
|
182
|
+
return `${seconds}s`;
|
|
183
|
+
}
|
|
184
|
+
else if (seconds < 3600) {
|
|
185
|
+
const mins = Math.floor(seconds / 60);
|
|
186
|
+
const secs = seconds % 60;
|
|
187
|
+
return `${mins}m ${secs}s`;
|
|
188
|
+
}
|
|
189
|
+
else {
|
|
190
|
+
const hours = Math.floor(seconds / 3600);
|
|
191
|
+
const mins = Math.floor((seconds % 3600) / 60);
|
|
192
|
+
return `${hours}h ${mins}m`;
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
/**
|
|
196
|
+
* Start a spinner with duration timer for long-running operations
|
|
197
|
+
*/
|
|
198
|
+
startSpinner(message = 'Processing...') {
|
|
199
|
+
if (this.level === 'silent')
|
|
200
|
+
return;
|
|
201
|
+
this.spinnerStartTime = Date.now();
|
|
202
|
+
this.spinnerIndex = 0;
|
|
203
|
+
const updateSpinner = () => {
|
|
204
|
+
const elapsed = Math.floor((Date.now() - this.spinnerStartTime) / 1000);
|
|
205
|
+
const spinner = this.spinnerChars[this.spinnerIndex];
|
|
206
|
+
const duration = this.formatDuration(elapsed);
|
|
207
|
+
const formattedMessage = this.formatMessage(`${message} ${spinner} (${duration})`);
|
|
208
|
+
process.stdout.write(`\r${formattedMessage}`);
|
|
209
|
+
this.spinnerIndex = (this.spinnerIndex + 1) % this.spinnerChars.length;
|
|
210
|
+
};
|
|
211
|
+
// Initial display
|
|
212
|
+
updateSpinner();
|
|
213
|
+
// Update every 100ms
|
|
214
|
+
this.spinnerInterval = setInterval(updateSpinner, 100);
|
|
215
|
+
}
|
|
216
|
+
/**
|
|
217
|
+
* Stop the spinner and optionally show a final message
|
|
218
|
+
*/
|
|
219
|
+
stopSpinner(finalMessage) {
|
|
220
|
+
if (this.spinnerInterval) {
|
|
221
|
+
clearInterval(this.spinnerInterval);
|
|
222
|
+
this.spinnerInterval = undefined;
|
|
223
|
+
}
|
|
224
|
+
if (finalMessage && this.spinnerStartTime) {
|
|
225
|
+
const totalTime = Math.floor((Date.now() - this.spinnerStartTime) / 1000);
|
|
226
|
+
const duration = this.formatDuration(totalTime);
|
|
227
|
+
const formattedMessage = this.formatMessage(`${finalMessage} (${duration})`);
|
|
228
|
+
process.stdout.write(`\r${formattedMessage}\n`);
|
|
229
|
+
}
|
|
230
|
+
else if (finalMessage) {
|
|
231
|
+
const formattedMessage = this.formatMessage(finalMessage);
|
|
232
|
+
process.stdout.write(`\r${formattedMessage}\n`);
|
|
233
|
+
}
|
|
234
|
+
else {
|
|
235
|
+
process.stdout.write('\r\x1b[K'); // Clear the line
|
|
236
|
+
}
|
|
237
|
+
this.spinnerStartTime = undefined;
|
|
238
|
+
}
|
|
239
|
+
/**
|
|
240
|
+
* Execute a function with spinner feedback
|
|
241
|
+
*/
|
|
242
|
+
async withSpinner(message, fn, successMessage, errorMessage) {
|
|
243
|
+
this.startSpinner(message);
|
|
244
|
+
try {
|
|
245
|
+
const result = await fn();
|
|
246
|
+
this.stopSpinner(successMessage || `✅ ${message.replace(/^[🔄⏳]?\s*/, '')} completed`);
|
|
247
|
+
return result;
|
|
248
|
+
}
|
|
249
|
+
catch (error) {
|
|
250
|
+
this.stopSpinner(errorMessage || `❌ ${message.replace(/^[🔄⏳]?\s*/, '')} failed`);
|
|
251
|
+
throw error;
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
/**
|
|
256
|
+
* Create a logger instance with the specified level
|
|
257
|
+
*/
|
|
258
|
+
export function createLogger(level = 'default', prefix) {
|
|
259
|
+
return new Logger({ level, prefix });
|
|
260
|
+
}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
import type { ImportOptions, SyncStats, LoaderContext } from "./github.types.js";
|
|
2
|
+
/**
|
|
3
|
+
* Performs incremental sync for a single import configuration
|
|
4
|
+
*/
|
|
5
|
+
export declare function performIncrementalSync(config: ImportOptions, context: LoaderContext, octokit: any, signal?: AbortSignal): Promise<SyncStats>;
|
|
@@ -0,0 +1,292 @@
|
|
|
1
|
+
import { promises as fs } from "node:fs";
|
|
2
|
+
import { existsSync } from "node:fs";
|
|
3
|
+
import { join } from "node:path";
|
|
4
|
+
import { createHash } from "node:crypto";
|
|
5
|
+
import { generateId, generatePath, shouldIncludeFile, syncEntry } from "./github.content.js";
|
|
6
|
+
const MANIFEST_FILENAME = '.astro-github-manifest.json';
|
|
7
|
+
const SLEEP_BETWEEN_DELETES = 10; // ms between file deletions
|
|
8
|
+
/**
|
|
9
|
+
* Creates a hash of the configuration to detect changes
|
|
10
|
+
*/
|
|
11
|
+
function createConfigHash(options) {
|
|
12
|
+
const configForHashing = {
|
|
13
|
+
owner: options.owner,
|
|
14
|
+
repo: options.repo,
|
|
15
|
+
ref: options.ref,
|
|
16
|
+
includes: options.includes
|
|
17
|
+
};
|
|
18
|
+
return createHash('md5').update(JSON.stringify(configForHashing)).digest('hex');
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Loads the sync manifest from disk
|
|
22
|
+
*/
|
|
23
|
+
async function loadManifest(basePath) {
|
|
24
|
+
const manifestPath = join(basePath, MANIFEST_FILENAME);
|
|
25
|
+
if (!existsSync(manifestPath)) {
|
|
26
|
+
return {
|
|
27
|
+
files: {},
|
|
28
|
+
lastSync: new Date().toISOString()
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
try {
|
|
32
|
+
const content = await fs.readFile(manifestPath, 'utf-8');
|
|
33
|
+
return JSON.parse(content);
|
|
34
|
+
}
|
|
35
|
+
catch (error) {
|
|
36
|
+
console.warn(`Failed to load manifest from ${manifestPath}, starting fresh:`, error);
|
|
37
|
+
return {
|
|
38
|
+
files: {},
|
|
39
|
+
lastSync: new Date().toISOString()
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
/**
|
|
44
|
+
* Saves the sync manifest to disk
|
|
45
|
+
*/
|
|
46
|
+
async function saveManifest(basePath, manifest) {
|
|
47
|
+
const manifestPath = join(basePath, MANIFEST_FILENAME);
|
|
48
|
+
// Ensure directory exists
|
|
49
|
+
if (!existsSync(basePath)) {
|
|
50
|
+
await fs.mkdir(basePath, { recursive: true });
|
|
51
|
+
}
|
|
52
|
+
try {
|
|
53
|
+
await fs.writeFile(manifestPath, JSON.stringify(manifest, null, 2), 'utf-8');
|
|
54
|
+
}
|
|
55
|
+
catch (error) {
|
|
56
|
+
console.warn(`Failed to save manifest to ${manifestPath}:`, error);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Discovers all files in a GitHub repository directory
|
|
61
|
+
*/
|
|
62
|
+
async function discoverRemoteFiles(octokit, options, signal) {
|
|
63
|
+
const { owner, repo, ref = "main" } = options;
|
|
64
|
+
const files = new Map();
|
|
65
|
+
// Get all unique directory prefixes from include patterns to limit scanning
|
|
66
|
+
const directoriesToScan = new Set();
|
|
67
|
+
if (options.includes && options.includes.length > 0) {
|
|
68
|
+
for (const includePattern of options.includes) {
|
|
69
|
+
// Extract directory part from pattern (before any glob wildcards)
|
|
70
|
+
const pattern = includePattern.pattern;
|
|
71
|
+
const beforeGlob = pattern.split(/[*?{]/)[0];
|
|
72
|
+
const dirPart = beforeGlob.includes('/') ? beforeGlob.substring(0, beforeGlob.lastIndexOf('/')) : '';
|
|
73
|
+
directoriesToScan.add(dirPart);
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
else {
|
|
77
|
+
// If no includes specified, scan from root
|
|
78
|
+
directoriesToScan.add('');
|
|
79
|
+
}
|
|
80
|
+
async function processDirectory(dirPath) {
|
|
81
|
+
try {
|
|
82
|
+
const { data } = await octokit.rest.repos.getContent({
|
|
83
|
+
owner,
|
|
84
|
+
repo,
|
|
85
|
+
path: dirPath,
|
|
86
|
+
ref,
|
|
87
|
+
request: { signal }
|
|
88
|
+
});
|
|
89
|
+
if (!Array.isArray(data)) {
|
|
90
|
+
// Single file
|
|
91
|
+
if (data.type === 'file' && shouldIncludeFile(data.path, options).included) {
|
|
92
|
+
const id = generateId(data.path);
|
|
93
|
+
const includeResult = shouldIncludeFile(data.path, options);
|
|
94
|
+
const localPath = generatePath(data.path, includeResult.included ? includeResult.matchedPattern : null, options);
|
|
95
|
+
files.set(id, {
|
|
96
|
+
path: data.path,
|
|
97
|
+
localPath,
|
|
98
|
+
lastModified: data.last_modified || undefined,
|
|
99
|
+
etag: data.sha // Use SHA as ETag equivalent
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
// Directory listing
|
|
105
|
+
const promises = data
|
|
106
|
+
.filter(({ type, path }) => {
|
|
107
|
+
if (type === "dir")
|
|
108
|
+
return true;
|
|
109
|
+
if (type === "file")
|
|
110
|
+
return shouldIncludeFile(path, options).included;
|
|
111
|
+
return false;
|
|
112
|
+
})
|
|
113
|
+
.map(async ({ type, path: itemPath }) => {
|
|
114
|
+
if (type === "dir") {
|
|
115
|
+
await processDirectory(itemPath);
|
|
116
|
+
}
|
|
117
|
+
else if (type === "file") {
|
|
118
|
+
const id = generateId(itemPath);
|
|
119
|
+
const includeResult = shouldIncludeFile(itemPath, options);
|
|
120
|
+
const localPath = generatePath(itemPath, includeResult.included ? includeResult.matchedPattern : null, options);
|
|
121
|
+
files.set(id, {
|
|
122
|
+
path: itemPath,
|
|
123
|
+
localPath,
|
|
124
|
+
etag: data.find(item => item.path === itemPath)?.sha
|
|
125
|
+
});
|
|
126
|
+
}
|
|
127
|
+
});
|
|
128
|
+
await Promise.all(promises);
|
|
129
|
+
}
|
|
130
|
+
catch (error) {
|
|
131
|
+
if (signal?.aborted)
|
|
132
|
+
throw error;
|
|
133
|
+
console.warn(`Failed to process directory ${dirPath}:`, error);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
// Process only the directories that match our include patterns
|
|
137
|
+
for (const dirPath of directoriesToScan) {
|
|
138
|
+
await processDirectory(dirPath);
|
|
139
|
+
}
|
|
140
|
+
return files;
|
|
141
|
+
}
|
|
142
|
+
/**
|
|
143
|
+
* Creates a sync plan by comparing remote files with the local manifest
|
|
144
|
+
*/
|
|
145
|
+
async function createSyncPlan(remoteFiles, manifest, options) {
|
|
146
|
+
const plan = {
|
|
147
|
+
toAdd: [],
|
|
148
|
+
toUpdate: [],
|
|
149
|
+
toDelete: [],
|
|
150
|
+
unchanged: []
|
|
151
|
+
};
|
|
152
|
+
// Check remote files against local manifest
|
|
153
|
+
for (const [id, remoteEntry] of remoteFiles) {
|
|
154
|
+
const localEntry = manifest.files[id];
|
|
155
|
+
if (!localEntry) {
|
|
156
|
+
// New file
|
|
157
|
+
plan.toAdd.push(remoteEntry);
|
|
158
|
+
}
|
|
159
|
+
else if (remoteEntry.etag !== localEntry.etag ||
|
|
160
|
+
remoteEntry.lastModified !== localEntry.lastModified ||
|
|
161
|
+
!existsSync(localEntry.localPath)) {
|
|
162
|
+
// Changed or missing local file
|
|
163
|
+
plan.toUpdate.push({ ...remoteEntry, localPath: localEntry.localPath });
|
|
164
|
+
}
|
|
165
|
+
else {
|
|
166
|
+
// Unchanged
|
|
167
|
+
plan.unchanged.push(localEntry);
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
// Check for files to delete (in local manifest but not in remote)
|
|
171
|
+
for (const [id, localEntry] of Object.entries(manifest.files)) {
|
|
172
|
+
if (!remoteFiles.has(id)) {
|
|
173
|
+
plan.toDelete.push(localEntry);
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
return plan;
|
|
177
|
+
}
|
|
178
|
+
/**
|
|
179
|
+
* Sleep utility for pacing file operations
|
|
180
|
+
*/
|
|
181
|
+
function sleep(ms) {
|
|
182
|
+
return new Promise(resolve => setTimeout(resolve, ms));
|
|
183
|
+
}
|
|
184
|
+
/**
|
|
185
|
+
* Executes the sync plan with proper pacing to avoid Astro issues
|
|
186
|
+
*/
|
|
187
|
+
async function executeSyncPlan(plan, options, context, octokit, signal) {
|
|
188
|
+
const { logger } = context;
|
|
189
|
+
// Delete obsolete files first (with pacing)
|
|
190
|
+
for (const entry of plan.toDelete) {
|
|
191
|
+
try {
|
|
192
|
+
if (existsSync(entry.localPath)) {
|
|
193
|
+
await fs.unlink(entry.localPath);
|
|
194
|
+
logger.debug(`Deleted ${entry.localPath}`);
|
|
195
|
+
}
|
|
196
|
+
await sleep(SLEEP_BETWEEN_DELETES);
|
|
197
|
+
}
|
|
198
|
+
catch (error) {
|
|
199
|
+
logger.warn(`Failed to delete ${entry.localPath}: ${error}`);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
// Process additions and updates (can be done in parallel)
|
|
203
|
+
const processFile = async (entry) => {
|
|
204
|
+
try {
|
|
205
|
+
const { owner, repo, ref = "main" } = options;
|
|
206
|
+
const { data } = await octokit.rest.repos.getContent({
|
|
207
|
+
owner,
|
|
208
|
+
repo,
|
|
209
|
+
path: entry.path,
|
|
210
|
+
ref,
|
|
211
|
+
request: { signal }
|
|
212
|
+
});
|
|
213
|
+
if (Array.isArray(data) || data.type !== 'file' || !data.download_url) {
|
|
214
|
+
throw new Error(`${entry.path} is not a valid file`);
|
|
215
|
+
}
|
|
216
|
+
await syncEntry(context, { url: data.download_url, editUrl: data.url }, entry.path, options, octokit, { signal });
|
|
217
|
+
logger.debug(`Synced ${entry.path} -> ${entry.localPath}`);
|
|
218
|
+
}
|
|
219
|
+
catch (error) {
|
|
220
|
+
if (signal?.aborted)
|
|
221
|
+
throw error;
|
|
222
|
+
logger.error(`Failed to sync ${entry.path}: ${error}`);
|
|
223
|
+
}
|
|
224
|
+
};
|
|
225
|
+
// Process additions and updates with controlled concurrency
|
|
226
|
+
const allFilesToProcess = [...plan.toAdd, ...plan.toUpdate];
|
|
227
|
+
const concurrency = 5; // Limit concurrent operations
|
|
228
|
+
for (let i = 0; i < allFilesToProcess.length; i += concurrency) {
|
|
229
|
+
const batch = allFilesToProcess.slice(i, i + concurrency);
|
|
230
|
+
await Promise.all(batch.map(processFile));
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
/**
|
|
234
|
+
* Performs incremental sync for a single import configuration
|
|
235
|
+
*/
|
|
236
|
+
export async function performIncrementalSync(config, context, octokit, signal) {
|
|
237
|
+
const startTime = Date.now();
|
|
238
|
+
const { logger } = context;
|
|
239
|
+
const configName = config.name || `${config.owner}/${config.repo}`;
|
|
240
|
+
if (!config.includes || config.includes.length === 0) {
|
|
241
|
+
throw new Error(`includes patterns are required for incremental sync in config: ${configName}`);
|
|
242
|
+
}
|
|
243
|
+
logger.debug(`Starting incremental sync for ${configName}`);
|
|
244
|
+
try {
|
|
245
|
+
// Load existing manifest (using first include pattern's base path)
|
|
246
|
+
const manifestPath = config.includes[0].basePath;
|
|
247
|
+
const manifest = await loadManifest(manifestPath);
|
|
248
|
+
// Check if config changed (force full sync if it did)
|
|
249
|
+
const currentConfigHash = createConfigHash(config);
|
|
250
|
+
const configChanged = manifest.configHash && manifest.configHash !== currentConfigHash;
|
|
251
|
+
if (configChanged) {
|
|
252
|
+
logger.info(`Configuration changed for ${configName}, performing full sync`);
|
|
253
|
+
manifest.files = {}; // Clear manifest to force full re-sync
|
|
254
|
+
}
|
|
255
|
+
// Discover remote files
|
|
256
|
+
const remoteFiles = await discoverRemoteFiles(octokit, config, signal);
|
|
257
|
+
// Create sync plan
|
|
258
|
+
const plan = await createSyncPlan(remoteFiles, manifest, config);
|
|
259
|
+
// Execute the sync plan
|
|
260
|
+
await executeSyncPlan(plan, config, context, octokit, signal);
|
|
261
|
+
// Update manifest
|
|
262
|
+
const newManifest = {
|
|
263
|
+
files: Object.fromEntries(remoteFiles),
|
|
264
|
+
lastSync: new Date().toISOString(),
|
|
265
|
+
configHash: currentConfigHash
|
|
266
|
+
};
|
|
267
|
+
await saveManifest(manifestPath, newManifest);
|
|
268
|
+
const duration = Date.now() - startTime;
|
|
269
|
+
const stats = {
|
|
270
|
+
added: plan.toAdd.length,
|
|
271
|
+
updated: plan.toUpdate.length,
|
|
272
|
+
deleted: plan.toDelete.length,
|
|
273
|
+
unchanged: plan.unchanged.length,
|
|
274
|
+
duration
|
|
275
|
+
};
|
|
276
|
+
// Log summary
|
|
277
|
+
logger.info(`Sync completed for ${configName}: ` +
|
|
278
|
+
`${stats.added} added, ${stats.updated} updated, ` +
|
|
279
|
+
`${stats.deleted} deleted, ${stats.unchanged} unchanged ` +
|
|
280
|
+
`(${duration}ms)`);
|
|
281
|
+
return stats;
|
|
282
|
+
}
|
|
283
|
+
catch (error) {
|
|
284
|
+
if (signal?.aborted) {
|
|
285
|
+
logger.info(`Sync cancelled for ${configName}`);
|
|
286
|
+
throw error;
|
|
287
|
+
}
|
|
288
|
+
const duration = Date.now() - startTime;
|
|
289
|
+
logger.error(`Sync failed for ${configName} after ${duration}ms: ${error}`);
|
|
290
|
+
throw error;
|
|
291
|
+
}
|
|
292
|
+
}
|