@larkiny/astro-github-loader 0.11.3 → 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +35 -55
- package/dist/github.assets.d.ts +70 -0
- package/dist/github.assets.js +253 -0
- package/dist/github.auth.js +13 -9
- package/dist/github.cleanup.d.ts +3 -2
- package/dist/github.cleanup.js +30 -23
- package/dist/github.constants.d.ts +0 -16
- package/dist/github.constants.js +0 -16
- package/dist/github.content.d.ts +5 -131
- package/dist/github.content.js +152 -794
- package/dist/github.dryrun.d.ts +9 -5
- package/dist/github.dryrun.js +49 -25
- package/dist/github.link-transform.d.ts +2 -2
- package/dist/github.link-transform.js +68 -57
- package/dist/github.loader.js +30 -46
- package/dist/github.logger.d.ts +2 -2
- package/dist/github.logger.js +33 -24
- package/dist/github.paths.d.ts +76 -0
- package/dist/github.paths.js +190 -0
- package/dist/github.storage.d.ts +16 -0
- package/dist/github.storage.js +115 -0
- package/dist/github.types.d.ts +40 -4
- package/dist/index.d.ts +8 -6
- package/dist/index.js +3 -6
- package/dist/test-helpers.d.ts +130 -0
- package/dist/test-helpers.js +194 -0
- package/package.json +3 -1
- package/src/github.assets.spec.ts +717 -0
- package/src/github.assets.ts +365 -0
- package/src/github.auth.spec.ts +245 -0
- package/src/github.auth.ts +24 -10
- package/src/github.cleanup.spec.ts +380 -0
- package/src/github.cleanup.ts +91 -47
- package/src/github.constants.ts +0 -17
- package/src/github.content.spec.ts +305 -454
- package/src/github.content.ts +259 -957
- package/src/github.dryrun.spec.ts +598 -0
- package/src/github.dryrun.ts +108 -54
- package/src/github.link-transform.spec.ts +1345 -0
- package/src/github.link-transform.ts +177 -95
- package/src/github.loader.spec.ts +75 -50
- package/src/github.loader.ts +101 -76
- package/src/github.logger.spec.ts +795 -0
- package/src/github.logger.ts +77 -35
- package/src/github.paths.spec.ts +523 -0
- package/src/github.paths.ts +259 -0
- package/src/github.storage.spec.ts +377 -0
- package/src/github.storage.ts +135 -0
- package/src/github.types.ts +54 -9
- package/src/index.ts +43 -6
- package/src/test-helpers.ts +215 -0
|
@@ -0,0 +1,380 @@
|
|
|
1
|
+
import { beforeEach, describe, it, expect, vi, afterEach } from "vitest";
|
|
2
|
+
import { performSelectiveCleanup } from "./github.cleanup.js";
|
|
3
|
+
import type { ImportOptions, SyncStats } from "./github.types.js";
|
|
4
|
+
import { createMockContext, createMockOctokit } from "./test-helpers.js";
|
|
5
|
+
import { promises as fs } from "node:fs";
|
|
6
|
+
import { existsSync } from "node:fs";
|
|
7
|
+
|
|
8
|
+
// Mock the filesystem modules
|
|
9
|
+
vi.mock("node:fs/promises");
|
|
10
|
+
vi.mock("node:fs");
|
|
11
|
+
|
|
12
|
+
describe("github.cleanup", () => {
|
|
13
|
+
beforeEach(() => {
|
|
14
|
+
vi.clearAllMocks();
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
afterEach(() => {
|
|
18
|
+
vi.restoreAllMocks();
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
describe("performSelectiveCleanup", () => {
|
|
22
|
+
it("should return zero stats when repository has no include patterns", async () => {
|
|
23
|
+
const { octokit } = createMockOctokit();
|
|
24
|
+
const ctx = createMockContext();
|
|
25
|
+
|
|
26
|
+
const config: ImportOptions = {
|
|
27
|
+
name: "Test Repo",
|
|
28
|
+
owner: "test-owner",
|
|
29
|
+
repo: "test-repo",
|
|
30
|
+
ref: "main",
|
|
31
|
+
includes: [],
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
const stats: SyncStats = await performSelectiveCleanup(
|
|
35
|
+
config,
|
|
36
|
+
ctx as any,
|
|
37
|
+
octokit,
|
|
38
|
+
);
|
|
39
|
+
|
|
40
|
+
expect(stats.deleted).toBe(0);
|
|
41
|
+
expect(stats.added).toBe(0);
|
|
42
|
+
expect(stats.updated).toBe(0);
|
|
43
|
+
expect(stats.unchanged).toBe(0);
|
|
44
|
+
expect(stats.duration).toBeGreaterThanOrEqual(0);
|
|
45
|
+
});
|
|
46
|
+
|
|
47
|
+
it("should detect orphaned files that are not in the remote repository tree", async () => {
|
|
48
|
+
const { octokit, spies } = createMockOctokit();
|
|
49
|
+
const ctx = createMockContext();
|
|
50
|
+
|
|
51
|
+
const config: ImportOptions = {
|
|
52
|
+
name: "Test Repo",
|
|
53
|
+
owner: "test-owner",
|
|
54
|
+
repo: "test-repo",
|
|
55
|
+
ref: "main",
|
|
56
|
+
includes: [
|
|
57
|
+
{
|
|
58
|
+
pattern: "docs/**/*.md",
|
|
59
|
+
basePath: "/test-base",
|
|
60
|
+
},
|
|
61
|
+
],
|
|
62
|
+
};
|
|
63
|
+
|
|
64
|
+
// Mock existsSync to return true for basePath and orphaned file
|
|
65
|
+
vi.mocked(existsSync).mockImplementation((path) => {
|
|
66
|
+
return path === "/test-base" || path === "/test-base/orphaned-file.md";
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
// Mock readdir to return local files including an orphaned file
|
|
70
|
+
vi.mocked(fs.readdir).mockResolvedValue([
|
|
71
|
+
{
|
|
72
|
+
name: "orphaned-file.md",
|
|
73
|
+
isFile: () => true,
|
|
74
|
+
isDirectory: () => false,
|
|
75
|
+
} as any,
|
|
76
|
+
]);
|
|
77
|
+
|
|
78
|
+
// Mock getContent to return only files that exist in remote
|
|
79
|
+
spies.getContentSpy.mockResolvedValueOnce({
|
|
80
|
+
data: [
|
|
81
|
+
{
|
|
82
|
+
type: "file",
|
|
83
|
+
path: "docs/valid-file.md",
|
|
84
|
+
name: "valid-file.md",
|
|
85
|
+
sha: "abc123",
|
|
86
|
+
},
|
|
87
|
+
],
|
|
88
|
+
status: 200,
|
|
89
|
+
url: "",
|
|
90
|
+
headers: {},
|
|
91
|
+
} as any);
|
|
92
|
+
|
|
93
|
+
// Mock unlink for file deletion
|
|
94
|
+
vi.mocked(fs.unlink).mockResolvedValue(undefined);
|
|
95
|
+
|
|
96
|
+
const stats: SyncStats = await performSelectiveCleanup(
|
|
97
|
+
config,
|
|
98
|
+
ctx as any,
|
|
99
|
+
octokit,
|
|
100
|
+
);
|
|
101
|
+
|
|
102
|
+
// The orphaned file should be detected and deleted
|
|
103
|
+
expect(stats.deleted).toBe(1);
|
|
104
|
+
expect(vi.mocked(fs.unlink)).toHaveBeenCalledWith(
|
|
105
|
+
"/test-base/orphaned-file.md",
|
|
106
|
+
);
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
it("should track deletion stats correctly", async () => {
|
|
110
|
+
const { octokit, spies } = createMockOctokit();
|
|
111
|
+
const ctx = createMockContext();
|
|
112
|
+
|
|
113
|
+
const config: ImportOptions = {
|
|
114
|
+
name: "Test Repo",
|
|
115
|
+
owner: "test-owner",
|
|
116
|
+
repo: "test-repo",
|
|
117
|
+
ref: "main",
|
|
118
|
+
includes: [
|
|
119
|
+
{
|
|
120
|
+
pattern: "docs/**/*.md",
|
|
121
|
+
basePath: "/test-base",
|
|
122
|
+
},
|
|
123
|
+
],
|
|
124
|
+
};
|
|
125
|
+
|
|
126
|
+
// Mock existsSync
|
|
127
|
+
vi.mocked(existsSync).mockImplementation((path) => {
|
|
128
|
+
return (
|
|
129
|
+
path === "/test-base" ||
|
|
130
|
+
path === "/test-base/orphan1.md" ||
|
|
131
|
+
path === "/test-base/orphan2.md" ||
|
|
132
|
+
path === "/test-base/orphan3.md"
|
|
133
|
+
);
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
// Mock readdir to return multiple orphaned files
|
|
137
|
+
vi.mocked(fs.readdir).mockResolvedValue([
|
|
138
|
+
{
|
|
139
|
+
name: "orphan1.md",
|
|
140
|
+
isFile: () => true,
|
|
141
|
+
isDirectory: () => false,
|
|
142
|
+
} as any,
|
|
143
|
+
{
|
|
144
|
+
name: "orphan2.md",
|
|
145
|
+
isFile: () => true,
|
|
146
|
+
isDirectory: () => false,
|
|
147
|
+
} as any,
|
|
148
|
+
{
|
|
149
|
+
name: "orphan3.md",
|
|
150
|
+
isFile: () => true,
|
|
151
|
+
isDirectory: () => false,
|
|
152
|
+
} as any,
|
|
153
|
+
]);
|
|
154
|
+
|
|
155
|
+
// Mock getContent to return empty (no remote files match)
|
|
156
|
+
spies.getContentSpy.mockResolvedValue({
|
|
157
|
+
data: [],
|
|
158
|
+
status: 200,
|
|
159
|
+
url: "",
|
|
160
|
+
headers: {},
|
|
161
|
+
} as any);
|
|
162
|
+
|
|
163
|
+
// Mock unlink
|
|
164
|
+
vi.mocked(fs.unlink).mockResolvedValue(undefined);
|
|
165
|
+
|
|
166
|
+
const stats: SyncStats = await performSelectiveCleanup(
|
|
167
|
+
config,
|
|
168
|
+
ctx as any,
|
|
169
|
+
octokit,
|
|
170
|
+
);
|
|
171
|
+
|
|
172
|
+
expect(stats.deleted).toBe(3);
|
|
173
|
+
expect(vi.mocked(fs.unlink)).toHaveBeenCalledTimes(3);
|
|
174
|
+
expect(stats.duration).toBeGreaterThanOrEqual(0);
|
|
175
|
+
});
|
|
176
|
+
|
|
177
|
+
it("should handle Octokit API failures gracefully", async () => {
|
|
178
|
+
const { octokit, spies } = createMockOctokit();
|
|
179
|
+
const ctx = createMockContext();
|
|
180
|
+
|
|
181
|
+
const config: ImportOptions = {
|
|
182
|
+
name: "Test Repo",
|
|
183
|
+
owner: "test-owner",
|
|
184
|
+
repo: "test-repo",
|
|
185
|
+
ref: "main",
|
|
186
|
+
includes: [
|
|
187
|
+
{
|
|
188
|
+
pattern: "docs/**/*.md",
|
|
189
|
+
basePath: "/test-base",
|
|
190
|
+
},
|
|
191
|
+
],
|
|
192
|
+
};
|
|
193
|
+
|
|
194
|
+
// Mock existsSync
|
|
195
|
+
vi.mocked(existsSync).mockReturnValue(true);
|
|
196
|
+
|
|
197
|
+
// Mock readdir to return local files
|
|
198
|
+
vi.mocked(fs.readdir).mockResolvedValue([
|
|
199
|
+
{
|
|
200
|
+
name: "some-file.md",
|
|
201
|
+
isFile: () => true,
|
|
202
|
+
isDirectory: () => false,
|
|
203
|
+
} as any,
|
|
204
|
+
]);
|
|
205
|
+
|
|
206
|
+
// Mock unlink for file deletion
|
|
207
|
+
vi.mocked(fs.unlink).mockResolvedValue(undefined);
|
|
208
|
+
|
|
209
|
+
// Mock getContent to throw an error (this happens when fetching expected files)
|
|
210
|
+
// NOTE: Current behavior - when API fails, processDirectory catches the error and logs a warning,
|
|
211
|
+
// but getExpectedFiles returns an empty Set. This causes all local files to be treated as orphans.
|
|
212
|
+
spies.getContentSpy.mockRejectedValue(new Error("API rate limit exceeded"));
|
|
213
|
+
|
|
214
|
+
const stats: SyncStats = await performSelectiveCleanup(
|
|
215
|
+
config,
|
|
216
|
+
ctx as any,
|
|
217
|
+
octokit,
|
|
218
|
+
);
|
|
219
|
+
|
|
220
|
+
// Current behavior: When API fails, expectedFiles is empty, so all local files are deleted
|
|
221
|
+
expect(stats.deleted).toBe(1);
|
|
222
|
+
expect(stats.added).toBe(0);
|
|
223
|
+
expect(stats.updated).toBe(0);
|
|
224
|
+
expect(stats.unchanged).toBe(0);
|
|
225
|
+
expect(stats.duration).toBeGreaterThanOrEqual(0);
|
|
226
|
+
|
|
227
|
+
// Should log a warning from processDirectory
|
|
228
|
+
expect(ctx.logger.warn).toHaveBeenCalled();
|
|
229
|
+
|
|
230
|
+
// Files are deleted because empty expectedFiles makes all local files orphans
|
|
231
|
+
expect(vi.mocked(fs.unlink)).toHaveBeenCalledWith("/test-base/some-file.md");
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
it("should skip cleanup when no local files exist (fresh import)", async () => {
|
|
235
|
+
const { octokit } = createMockOctokit();
|
|
236
|
+
const ctx = createMockContext();
|
|
237
|
+
|
|
238
|
+
const config: ImportOptions = {
|
|
239
|
+
name: "Test Repo",
|
|
240
|
+
owner: "test-owner",
|
|
241
|
+
repo: "test-repo",
|
|
242
|
+
ref: "main",
|
|
243
|
+
includes: [
|
|
244
|
+
{
|
|
245
|
+
pattern: "docs/**/*.md",
|
|
246
|
+
basePath: "/test-base",
|
|
247
|
+
},
|
|
248
|
+
],
|
|
249
|
+
};
|
|
250
|
+
|
|
251
|
+
// Mock existsSync to return false (basePath doesn't exist)
|
|
252
|
+
vi.mocked(existsSync).mockReturnValue(false);
|
|
253
|
+
|
|
254
|
+
const stats: SyncStats = await performSelectiveCleanup(
|
|
255
|
+
config,
|
|
256
|
+
ctx as any,
|
|
257
|
+
octokit,
|
|
258
|
+
);
|
|
259
|
+
|
|
260
|
+
expect(stats.deleted).toBe(0);
|
|
261
|
+
expect(vi.mocked(fs.readdir)).not.toHaveBeenCalled();
|
|
262
|
+
expect(vi.mocked(fs.unlink)).not.toHaveBeenCalled();
|
|
263
|
+
});
|
|
264
|
+
|
|
265
|
+
it("should handle file deletion errors without failing the entire cleanup", async () => {
|
|
266
|
+
const { octokit, spies } = createMockOctokit();
|
|
267
|
+
const ctx = createMockContext();
|
|
268
|
+
|
|
269
|
+
const config: ImportOptions = {
|
|
270
|
+
name: "Test Repo",
|
|
271
|
+
owner: "test-owner",
|
|
272
|
+
repo: "test-repo",
|
|
273
|
+
ref: "main",
|
|
274
|
+
includes: [
|
|
275
|
+
{
|
|
276
|
+
pattern: "docs/**/*.md",
|
|
277
|
+
basePath: "/test-base",
|
|
278
|
+
},
|
|
279
|
+
],
|
|
280
|
+
};
|
|
281
|
+
|
|
282
|
+
// Mock existsSync
|
|
283
|
+
vi.mocked(existsSync).mockImplementation((path) => {
|
|
284
|
+
return (
|
|
285
|
+
path === "/test-base" ||
|
|
286
|
+
path === "/test-base/file1.md" ||
|
|
287
|
+
path === "/test-base/file2.md"
|
|
288
|
+
);
|
|
289
|
+
});
|
|
290
|
+
|
|
291
|
+
// Mock readdir
|
|
292
|
+
vi.mocked(fs.readdir).mockResolvedValue([
|
|
293
|
+
{
|
|
294
|
+
name: "file1.md",
|
|
295
|
+
isFile: () => true,
|
|
296
|
+
isDirectory: () => false,
|
|
297
|
+
} as any,
|
|
298
|
+
{
|
|
299
|
+
name: "file2.md",
|
|
300
|
+
isFile: () => true,
|
|
301
|
+
isDirectory: () => false,
|
|
302
|
+
} as any,
|
|
303
|
+
]);
|
|
304
|
+
|
|
305
|
+
// Mock getContent to return empty
|
|
306
|
+
spies.getContentSpy.mockResolvedValue({
|
|
307
|
+
data: [],
|
|
308
|
+
status: 200,
|
|
309
|
+
url: "",
|
|
310
|
+
headers: {},
|
|
311
|
+
} as any);
|
|
312
|
+
|
|
313
|
+
// Mock unlink to fail on first file, succeed on second
|
|
314
|
+
vi.mocked(fs.unlink)
|
|
315
|
+
.mockRejectedValueOnce(new Error("Permission denied"))
|
|
316
|
+
.mockResolvedValueOnce(undefined);
|
|
317
|
+
|
|
318
|
+
const stats: SyncStats = await performSelectiveCleanup(
|
|
319
|
+
config,
|
|
320
|
+
ctx as any,
|
|
321
|
+
octokit,
|
|
322
|
+
);
|
|
323
|
+
|
|
324
|
+
// Should still delete the second file despite first failure
|
|
325
|
+
expect(stats.deleted).toBe(1);
|
|
326
|
+
expect(ctx.logger.warn).toHaveBeenCalled();
|
|
327
|
+
});
|
|
328
|
+
|
|
329
|
+
it("should handle AbortSignal cancellation", async () => {
|
|
330
|
+
const { octokit, spies } = createMockOctokit();
|
|
331
|
+
const ctx = createMockContext();
|
|
332
|
+
const abortController = new AbortController();
|
|
333
|
+
|
|
334
|
+
const config: ImportOptions = {
|
|
335
|
+
name: "Test Repo",
|
|
336
|
+
owner: "test-owner",
|
|
337
|
+
repo: "test-repo",
|
|
338
|
+
ref: "main",
|
|
339
|
+
includes: [
|
|
340
|
+
{
|
|
341
|
+
pattern: "docs/**/*.md",
|
|
342
|
+
basePath: "/test-base",
|
|
343
|
+
},
|
|
344
|
+
],
|
|
345
|
+
};
|
|
346
|
+
|
|
347
|
+
// Mock existsSync
|
|
348
|
+
vi.mocked(existsSync).mockReturnValue(true);
|
|
349
|
+
|
|
350
|
+
// Mock readdir
|
|
351
|
+
vi.mocked(fs.readdir).mockResolvedValue([
|
|
352
|
+
{
|
|
353
|
+
name: "file.md",
|
|
354
|
+
isFile: () => true,
|
|
355
|
+
isDirectory: () => false,
|
|
356
|
+
} as any,
|
|
357
|
+
]);
|
|
358
|
+
|
|
359
|
+
// Mock getContent to throw abort error
|
|
360
|
+
const abortError = new Error("Aborted");
|
|
361
|
+
spies.getContentSpy.mockRejectedValue(abortError);
|
|
362
|
+
|
|
363
|
+
// Abort the operation
|
|
364
|
+
abortController.abort();
|
|
365
|
+
|
|
366
|
+
await expect(
|
|
367
|
+
performSelectiveCleanup(
|
|
368
|
+
config,
|
|
369
|
+
ctx as any,
|
|
370
|
+
octokit,
|
|
371
|
+
abortController.signal,
|
|
372
|
+
),
|
|
373
|
+
).rejects.toThrow();
|
|
374
|
+
|
|
375
|
+
expect(ctx.logger.info).toHaveBeenCalledWith(
|
|
376
|
+
expect.stringContaining("cancelled"),
|
|
377
|
+
);
|
|
378
|
+
});
|
|
379
|
+
});
|
|
380
|
+
});
|
package/src/github.cleanup.ts
CHANGED
|
@@ -1,8 +1,18 @@
|
|
|
1
1
|
import { promises as fs } from "node:fs";
|
|
2
2
|
import { existsSync } from "node:fs";
|
|
3
3
|
import { join } from "node:path";
|
|
4
|
-
import {
|
|
5
|
-
|
|
4
|
+
import {
|
|
5
|
+
generateId,
|
|
6
|
+
generatePath,
|
|
7
|
+
shouldIncludeFile,
|
|
8
|
+
} from "./github.content.js";
|
|
9
|
+
import { Octokit } from "octokit";
|
|
10
|
+
import type { Logger } from "./github.logger.js";
|
|
11
|
+
import type {
|
|
12
|
+
ExtendedLoaderContext,
|
|
13
|
+
ImportOptions,
|
|
14
|
+
SyncStats,
|
|
15
|
+
} from "./github.types.js";
|
|
6
16
|
|
|
7
17
|
const SLEEP_BETWEEN_DELETES = 10; // ms between file deletions
|
|
8
18
|
|
|
@@ -10,16 +20,17 @@ const SLEEP_BETWEEN_DELETES = 10; // ms between file deletions
|
|
|
10
20
|
* Sleep utility for pacing file operations
|
|
11
21
|
*/
|
|
12
22
|
function sleep(ms: number): Promise<void> {
|
|
13
|
-
return new Promise(resolve => setTimeout(resolve, ms));
|
|
23
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
14
24
|
}
|
|
15
25
|
|
|
16
26
|
/**
|
|
17
27
|
* Gets all files that should exist locally based on remote repository state
|
|
18
28
|
*/
|
|
19
29
|
async function getExpectedFiles(
|
|
20
|
-
octokit:
|
|
30
|
+
octokit: Octokit,
|
|
21
31
|
options: ImportOptions,
|
|
22
|
-
|
|
32
|
+
logger: Logger,
|
|
33
|
+
signal?: AbortSignal,
|
|
23
34
|
): Promise<Set<string>> {
|
|
24
35
|
const { owner, repo, ref = "main" } = options;
|
|
25
36
|
const expectedFiles = new Set<string>();
|
|
@@ -31,12 +42,14 @@ async function getExpectedFiles(
|
|
|
31
42
|
// Extract directory part from pattern (before any glob wildcards)
|
|
32
43
|
const pattern = includePattern.pattern;
|
|
33
44
|
const beforeGlob = pattern.split(/[*?{]/)[0];
|
|
34
|
-
const dirPart = beforeGlob.includes(
|
|
45
|
+
const dirPart = beforeGlob.includes("/")
|
|
46
|
+
? beforeGlob.substring(0, beforeGlob.lastIndexOf("/"))
|
|
47
|
+
: "";
|
|
35
48
|
directoriesToScan.add(dirPart);
|
|
36
49
|
}
|
|
37
50
|
} else {
|
|
38
51
|
// If no includes specified, scan from root
|
|
39
|
-
directoriesToScan.add(
|
|
52
|
+
directoriesToScan.add("");
|
|
40
53
|
}
|
|
41
54
|
|
|
42
55
|
async function processDirectory(dirPath: string) {
|
|
@@ -46,17 +59,26 @@ async function getExpectedFiles(
|
|
|
46
59
|
repo,
|
|
47
60
|
path: dirPath,
|
|
48
61
|
ref,
|
|
49
|
-
request: { signal }
|
|
62
|
+
request: { signal },
|
|
50
63
|
});
|
|
51
64
|
|
|
52
65
|
if (!Array.isArray(data)) {
|
|
53
66
|
// Single file
|
|
54
|
-
if (
|
|
67
|
+
if (
|
|
68
|
+
data.type === "file" &&
|
|
69
|
+
shouldIncludeFile(data.path, options).included
|
|
70
|
+
) {
|
|
55
71
|
const id = generateId(data.path);
|
|
56
72
|
const includeResult = shouldIncludeFile(data.path, options);
|
|
57
|
-
const localPath = generatePath(
|
|
73
|
+
const localPath = generatePath(
|
|
74
|
+
data.path,
|
|
75
|
+
includeResult.included ? includeResult.matchedPattern : null,
|
|
76
|
+
options,
|
|
77
|
+
);
|
|
58
78
|
// Convert to absolute path for consistent comparison
|
|
59
|
-
const absolutePath = localPath.startsWith(
|
|
79
|
+
const absolutePath = localPath.startsWith("/")
|
|
80
|
+
? localPath
|
|
81
|
+
: join(process.cwd(), localPath);
|
|
60
82
|
expectedFiles.add(absolutePath);
|
|
61
83
|
}
|
|
62
84
|
return;
|
|
@@ -75,17 +97,23 @@ async function getExpectedFiles(
|
|
|
75
97
|
} else if (type === "file") {
|
|
76
98
|
const id = generateId(itemPath);
|
|
77
99
|
const includeResult = shouldIncludeFile(itemPath, options);
|
|
78
|
-
const localPath = generatePath(
|
|
100
|
+
const localPath = generatePath(
|
|
101
|
+
itemPath,
|
|
102
|
+
includeResult.included ? includeResult.matchedPattern : null,
|
|
103
|
+
options,
|
|
104
|
+
);
|
|
79
105
|
// Convert to absolute path for consistent comparison
|
|
80
|
-
const absolutePath = localPath.startsWith(
|
|
106
|
+
const absolutePath = localPath.startsWith("/")
|
|
107
|
+
? localPath
|
|
108
|
+
: join(process.cwd(), localPath);
|
|
81
109
|
expectedFiles.add(absolutePath);
|
|
82
110
|
}
|
|
83
111
|
});
|
|
84
112
|
|
|
85
113
|
await Promise.all(promises);
|
|
86
|
-
} catch (error:
|
|
114
|
+
} catch (error: unknown) {
|
|
87
115
|
if (signal?.aborted) throw error;
|
|
88
|
-
|
|
116
|
+
logger.warn(`Failed to process directory ${dirPath}: ${error}`);
|
|
89
117
|
}
|
|
90
118
|
}
|
|
91
119
|
|
|
@@ -99,9 +127,12 @@ async function getExpectedFiles(
|
|
|
99
127
|
/**
|
|
100
128
|
* Gets all existing local files in the basePath as absolute paths
|
|
101
129
|
*/
|
|
102
|
-
async function getExistingFiles(
|
|
130
|
+
async function getExistingFiles(
|
|
131
|
+
basePath: string,
|
|
132
|
+
logger: Logger,
|
|
133
|
+
): Promise<Set<string>> {
|
|
103
134
|
const existingFiles = new Set<string>();
|
|
104
|
-
|
|
135
|
+
|
|
105
136
|
if (!existsSync(basePath)) {
|
|
106
137
|
return existingFiles;
|
|
107
138
|
}
|
|
@@ -109,24 +140,24 @@ async function getExistingFiles(basePath: string): Promise<Set<string>> {
|
|
|
109
140
|
async function walkDirectory(dirPath: string) {
|
|
110
141
|
try {
|
|
111
142
|
const entries = await fs.readdir(dirPath, { withFileTypes: true });
|
|
112
|
-
|
|
143
|
+
|
|
113
144
|
for (const entry of entries) {
|
|
114
145
|
const fullPath = join(dirPath, entry.name);
|
|
115
|
-
|
|
146
|
+
|
|
116
147
|
if (entry.isDirectory()) {
|
|
117
148
|
// Skip manifest files and other system directories
|
|
118
|
-
if (!entry.name.startsWith(
|
|
149
|
+
if (!entry.name.startsWith(".")) {
|
|
119
150
|
await walkDirectory(fullPath);
|
|
120
151
|
}
|
|
121
152
|
} else if (entry.isFile()) {
|
|
122
153
|
// Skip manifest and system files
|
|
123
|
-
if (!entry.name.startsWith(
|
|
154
|
+
if (!entry.name.startsWith(".")) {
|
|
124
155
|
existingFiles.add(fullPath);
|
|
125
156
|
}
|
|
126
157
|
}
|
|
127
158
|
}
|
|
128
159
|
} catch (error) {
|
|
129
|
-
|
|
160
|
+
logger.warn(`Failed to read directory ${dirPath}: ${error}`);
|
|
130
161
|
}
|
|
131
162
|
}
|
|
132
163
|
|
|
@@ -139,14 +170,14 @@ async function getExistingFiles(basePath: string): Promise<Set<string>> {
|
|
|
139
170
|
*/
|
|
140
171
|
export async function performSelectiveCleanup(
|
|
141
172
|
config: ImportOptions,
|
|
142
|
-
context:
|
|
143
|
-
octokit:
|
|
144
|
-
signal?: AbortSignal
|
|
173
|
+
context: ExtendedLoaderContext,
|
|
174
|
+
octokit: Octokit,
|
|
175
|
+
signal?: AbortSignal,
|
|
145
176
|
): Promise<SyncStats> {
|
|
146
177
|
const startTime = Date.now();
|
|
147
178
|
const { logger } = context;
|
|
148
179
|
const configName = config.name || `${config.owner}/${config.repo}`;
|
|
149
|
-
|
|
180
|
+
|
|
150
181
|
if (!config.includes || config.includes.length === 0) {
|
|
151
182
|
// No cleanup needed if no include patterns specified
|
|
152
183
|
return {
|
|
@@ -154,7 +185,7 @@ export async function performSelectiveCleanup(
|
|
|
154
185
|
updated: 0,
|
|
155
186
|
deleted: 0,
|
|
156
187
|
unchanged: 0,
|
|
157
|
-
duration: Date.now() - startTime
|
|
188
|
+
duration: Date.now() - startTime,
|
|
158
189
|
};
|
|
159
190
|
}
|
|
160
191
|
|
|
@@ -164,25 +195,35 @@ export async function performSelectiveCleanup(
|
|
|
164
195
|
// Get existing local files from all include pattern base paths
|
|
165
196
|
const allExistingFiles = new Set<string>();
|
|
166
197
|
for (const includePattern of config.includes) {
|
|
167
|
-
const existingFiles = await getExistingFiles(
|
|
168
|
-
|
|
198
|
+
const existingFiles = await getExistingFiles(
|
|
199
|
+
includePattern.basePath,
|
|
200
|
+
logger,
|
|
201
|
+
);
|
|
202
|
+
existingFiles.forEach((file) => allExistingFiles.add(file));
|
|
169
203
|
}
|
|
170
|
-
|
|
204
|
+
|
|
171
205
|
// If no existing files, skip cleanup (fresh import)
|
|
172
206
|
if (allExistingFiles.size === 0) {
|
|
173
|
-
logger.debug(
|
|
207
|
+
logger.debug(
|
|
208
|
+
`No existing files found in any base paths, skipping cleanup`,
|
|
209
|
+
);
|
|
174
210
|
return {
|
|
175
211
|
added: 0,
|
|
176
212
|
updated: 0,
|
|
177
213
|
deleted: 0,
|
|
178
214
|
unchanged: 0,
|
|
179
|
-
duration: Date.now() - startTime
|
|
215
|
+
duration: Date.now() - startTime,
|
|
180
216
|
};
|
|
181
217
|
}
|
|
182
|
-
|
|
218
|
+
|
|
183
219
|
// Get expected files from remote repository
|
|
184
|
-
const expectedFiles = await getExpectedFiles(
|
|
185
|
-
|
|
220
|
+
const expectedFiles = await getExpectedFiles(
|
|
221
|
+
octokit,
|
|
222
|
+
config,
|
|
223
|
+
logger,
|
|
224
|
+
signal,
|
|
225
|
+
);
|
|
226
|
+
|
|
186
227
|
// Find files to delete (exist locally but not in remote)
|
|
187
228
|
const filesToDelete: string[] = [];
|
|
188
229
|
for (const existingFile of allExistingFiles) {
|
|
@@ -190,7 +231,7 @@ export async function performSelectiveCleanup(
|
|
|
190
231
|
filesToDelete.push(existingFile);
|
|
191
232
|
}
|
|
192
233
|
}
|
|
193
|
-
|
|
234
|
+
|
|
194
235
|
// Delete obsolete files with pacing
|
|
195
236
|
let deletedCount = 0;
|
|
196
237
|
for (const filePath of filesToDelete) {
|
|
@@ -205,39 +246,42 @@ export async function performSelectiveCleanup(
|
|
|
205
246
|
logger.warn(`Failed to delete ${filePath}: ${error}`);
|
|
206
247
|
}
|
|
207
248
|
}
|
|
208
|
-
|
|
249
|
+
|
|
209
250
|
const duration = Date.now() - startTime;
|
|
210
251
|
const stats: SyncStats = {
|
|
211
252
|
added: 0, // Will be counted by main sync process
|
|
212
|
-
updated: 0, // Will be counted by main sync process
|
|
253
|
+
updated: 0, // Will be counted by main sync process
|
|
213
254
|
deleted: deletedCount,
|
|
214
255
|
unchanged: 0, // Will be counted by main sync process
|
|
215
|
-
duration
|
|
256
|
+
duration,
|
|
216
257
|
};
|
|
217
258
|
|
|
218
259
|
if (deletedCount > 0) {
|
|
219
|
-
logger.info(
|
|
260
|
+
logger.info(
|
|
261
|
+
`Cleanup completed for ${configName}: ${deletedCount} obsolete files deleted (${duration}ms)`,
|
|
262
|
+
);
|
|
220
263
|
} else {
|
|
221
264
|
logger.debug(`No cleanup needed for ${configName} (${duration}ms)`);
|
|
222
265
|
}
|
|
223
266
|
|
|
224
267
|
return stats;
|
|
225
|
-
|
|
226
|
-
} catch (error: any) {
|
|
268
|
+
} catch (error: unknown) {
|
|
227
269
|
if (signal?.aborted) {
|
|
228
270
|
logger.info(`Cleanup cancelled for ${configName}`);
|
|
229
271
|
throw error;
|
|
230
272
|
}
|
|
231
|
-
|
|
273
|
+
|
|
232
274
|
const duration = Date.now() - startTime;
|
|
233
|
-
logger.error(
|
|
275
|
+
logger.error(
|
|
276
|
+
`Cleanup failed for ${configName} after ${duration}ms: ${error instanceof Error ? error.message : String(error)}`,
|
|
277
|
+
);
|
|
234
278
|
// Don't throw - let the main sync process continue
|
|
235
279
|
return {
|
|
236
280
|
added: 0,
|
|
237
|
-
updated: 0,
|
|
281
|
+
updated: 0,
|
|
238
282
|
deleted: 0,
|
|
239
283
|
unchanged: 0,
|
|
240
|
-
duration
|
|
284
|
+
duration,
|
|
241
285
|
};
|
|
242
286
|
}
|
|
243
|
-
}
|
|
287
|
+
}
|
package/src/github.constants.ts
CHANGED
|
@@ -6,20 +6,3 @@
|
|
|
6
6
|
* @internal
|
|
7
7
|
*/
|
|
8
8
|
export const INVALID_STRING_ERROR = "Invalid string";
|
|
9
|
-
/**
|
|
10
|
-
* Represents an error message indicating that a provided URL is invalid.
|
|
11
|
-
* This constant is typically used for validation or error handling when a URL
|
|
12
|
-
* does not conform to the expected format or requirements.
|
|
13
|
-
*
|
|
14
|
-
* @internal
|
|
15
|
-
*/
|
|
16
|
-
export const INVALID_URL_ERROR = "Invalid url";
|
|
17
|
-
|
|
18
|
-
/**
|
|
19
|
-
* A constant that holds a default error message indicating that a service response is invalid.
|
|
20
|
-
* This value is typically used to signify that the response from a service or API call
|
|
21
|
-
* does not meet the expected format, structure, or criteria.
|
|
22
|
-
*
|
|
23
|
-
* @internal
|
|
24
|
-
*/
|
|
25
|
-
export const INVALID_SERVICE_RESPONSE = "Invalid service response";
|