docs-cache 0.4.3 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (90) hide show
  1. package/dist/cli.mjs +13 -13
  2. package/dist/esm/api.d.ts +14 -0
  3. package/dist/esm/api.mjs +14 -0
  4. package/dist/esm/cache/cache-layout.d.ts +1 -0
  5. package/dist/esm/cache/cache-layout.mjs +12 -0
  6. package/dist/esm/cache/lock.d.ts +21 -0
  7. package/dist/esm/cache/lock.mjs +91 -0
  8. package/dist/esm/cache/manifest.d.ts +11 -0
  9. package/dist/esm/cache/manifest.mjs +68 -0
  10. package/dist/esm/cache/materialize.d.ts +26 -0
  11. package/dist/esm/cache/materialize.mjs +442 -0
  12. package/dist/esm/cache/targets.d.ts +19 -0
  13. package/dist/esm/cache/targets.mjs +66 -0
  14. package/dist/esm/cache/toc.d.ts +12 -0
  15. package/dist/esm/cache/toc.mjs +167 -0
  16. package/dist/esm/cli/exit-code.d.ts +11 -0
  17. package/dist/esm/cli/exit-code.mjs +5 -0
  18. package/dist/esm/cli/index.d.ts +5 -0
  19. package/dist/esm/cli/index.mjs +345 -0
  20. package/dist/esm/cli/live-output.d.ts +12 -0
  21. package/dist/esm/cli/live-output.mjs +30 -0
  22. package/dist/esm/cli/parse-args.d.ts +13 -0
  23. package/dist/esm/cli/parse-args.mjs +295 -0
  24. package/dist/esm/cli/run.d.ts +1 -0
  25. package/dist/esm/cli/run.mjs +2 -0
  26. package/dist/esm/cli/task-reporter.d.ts +32 -0
  27. package/dist/esm/cli/task-reporter.mjs +122 -0
  28. package/dist/esm/cli/types.d.ts +51 -0
  29. package/dist/esm/cli/types.mjs +0 -0
  30. package/dist/esm/cli/ui.d.ts +21 -0
  31. package/dist/esm/cli/ui.mjs +64 -0
  32. package/dist/esm/commands/add.d.ts +20 -0
  33. package/dist/esm/commands/add.mjs +81 -0
  34. package/dist/esm/commands/clean-git-cache.d.ts +10 -0
  35. package/dist/esm/commands/clean-git-cache.mjs +48 -0
  36. package/dist/esm/commands/clean.d.ts +10 -0
  37. package/dist/esm/commands/clean.mjs +27 -0
  38. package/dist/esm/commands/init.d.ts +19 -0
  39. package/dist/esm/commands/init.mjs +179 -0
  40. package/dist/esm/commands/prune.d.ts +11 -0
  41. package/dist/esm/commands/prune.mjs +52 -0
  42. package/dist/esm/commands/remove.d.ts +12 -0
  43. package/dist/esm/commands/remove.mjs +87 -0
  44. package/dist/esm/commands/status.d.ts +16 -0
  45. package/dist/esm/commands/status.mjs +78 -0
  46. package/dist/esm/commands/sync.d.ts +33 -0
  47. package/dist/esm/commands/sync.mjs +730 -0
  48. package/dist/esm/commands/verify.d.ts +11 -0
  49. package/dist/esm/commands/verify.mjs +120 -0
  50. package/dist/esm/config/index.d.ts +15 -0
  51. package/dist/esm/config/index.mjs +196 -0
  52. package/dist/esm/config/io.d.ts +30 -0
  53. package/dist/esm/config/io.mjs +112 -0
  54. package/dist/esm/config/schema.d.ts +171 -0
  55. package/dist/esm/config/schema.mjs +69 -0
  56. package/dist/esm/errors.d.ts +3 -0
  57. package/dist/esm/errors.mjs +2 -0
  58. package/dist/esm/git/cache-dir.d.ts +16 -0
  59. package/dist/esm/git/cache-dir.mjs +23 -0
  60. package/dist/esm/git/fetch-source.d.ts +19 -0
  61. package/dist/esm/git/fetch-source.mjs +477 -0
  62. package/dist/esm/git/redact.d.ts +1 -0
  63. package/dist/esm/git/redact.mjs +4 -0
  64. package/dist/esm/git/resolve-remote.d.ts +15 -0
  65. package/dist/esm/git/resolve-remote.mjs +87 -0
  66. package/dist/esm/git/resolve-repo.d.ts +5 -0
  67. package/dist/esm/git/resolve-repo.mjs +52 -0
  68. package/dist/esm/gitignore.d.ts +18 -0
  69. package/dist/esm/gitignore.mjs +80 -0
  70. package/dist/esm/paths.d.ts +8 -0
  71. package/dist/esm/paths.mjs +34 -0
  72. package/dist/esm/source-id.d.ts +1 -0
  73. package/dist/esm/source-id.mjs +29 -0
  74. package/dist/esm/types/sync.d.ts +25 -0
  75. package/dist/esm/types/sync.mjs +0 -0
  76. package/package.json +138 -91
  77. package/dist/chunks/add.mjs +0 -3
  78. package/dist/chunks/clean-git-cache.mjs +0 -2
  79. package/dist/chunks/clean.mjs +0 -2
  80. package/dist/chunks/init.mjs +0 -3
  81. package/dist/chunks/prune.mjs +0 -2
  82. package/dist/chunks/remove.mjs +0 -3
  83. package/dist/chunks/status.mjs +0 -2
  84. package/dist/chunks/sync.mjs +0 -9
  85. package/dist/chunks/verify.mjs +0 -2
  86. package/dist/shared/docs-cache.BOr9BnyP.mjs +0 -5
  87. package/dist/shared/docs-cache.BSvQNKuf.mjs +0 -2
  88. package/dist/shared/docs-cache.CQiaFDb_.mjs +0 -7
  89. package/dist/shared/docs-cache.CaOcl4OS.mjs +0 -3
  90. package/dist/shared/docs-cache.kK1DPQIQ.mjs +0 -2
@@ -0,0 +1,730 @@
1
+ import { createHash } from "node:crypto";
2
+ import { access, mkdir, readFile } from "node:fs/promises";
3
+ import path from "node:path";
4
+ import pc from "picocolors";
5
+ import { readLock, resolveLockPath, writeLock } from "#cache/lock";
6
+ import { MANIFEST_FILENAME } from "#cache/manifest";
7
+ import { computeManifestHash, materializeSource } from "#cache/materialize";
8
+ import { applyTargetDir } from "#cache/targets";
9
+ import { writeToc } from "#cache/toc";
10
+ import { TaskReporter } from "#cli/task-reporter";
11
+ import { isSilentMode, symbols, ui } from "#cli/ui";
12
+ import { verifyCache } from "#commands/verify";
13
+ import {
14
+ DEFAULT_CACHE_DIR,
15
+ DEFAULT_CONFIG,
16
+ loadConfig
17
+ } from "#config";
18
+ import { resolveCacheDir, resolveTargetDir } from "#core/paths";
19
+ import { fetchSource } from "#git/fetch-source";
20
+ import { resolveRemoteCommit } from "#git/resolve-remote";
21
+ const formatBytes = (value) => {
22
+ if (value < 1024) {
23
+ return `${value} B`;
24
+ }
25
+ const units = ["KB", "MB", "GB", "TB"];
26
+ let size = value;
27
+ let index = -1;
28
+ while (size >= 1024 && index < units.length - 1) {
29
+ size /= 1024;
30
+ index += 1;
31
+ }
32
+ return `${size.toFixed(1)} ${units[index]}`;
33
+ };
34
+ const exists = async (target) => {
35
+ try {
36
+ await access(target);
37
+ return true;
38
+ } catch {
39
+ return false;
40
+ }
41
+ };
42
+ const hasDocs = async (cacheDir, sourceId) => {
43
+ const sourceDir = path.join(cacheDir, sourceId);
44
+ if (!await exists(sourceDir)) {
45
+ return false;
46
+ }
47
+ return await exists(path.join(sourceDir, MANIFEST_FILENAME));
48
+ };
49
+ const normalizePatterns = (patterns) => {
50
+ if (!patterns || patterns.length === 0) {
51
+ return [];
52
+ }
53
+ const normalized = patterns.map((pattern) => pattern.trim()).filter((pattern) => pattern.length > 0);
54
+ return Array.from(new Set(normalized)).sort();
55
+ };
56
+ const RULES_HASH_BLACKLIST = [
57
+ "id",
58
+ "repo",
59
+ "ref",
60
+ "targetDir",
61
+ "targetMode",
62
+ "required",
63
+ "integrity",
64
+ "toc"
65
+ ];
66
+ const RULES_HASH_KEYS = [
67
+ "mode",
68
+ "include",
69
+ "exclude",
70
+ "maxBytes",
71
+ "maxFiles",
72
+ "ignoreHidden",
73
+ "unwrapSingleRootDir"
74
+ ];
75
+ const normalizeRulesValue = (key, value) => {
76
+ if (key === "include" && Array.isArray(value)) {
77
+ return normalizePatterns(value);
78
+ }
79
+ if (key === "exclude" && Array.isArray(value)) {
80
+ return normalizePatterns(value);
81
+ }
82
+ return value;
83
+ };
84
+ const computeRulesHash = (source) => {
85
+ const entries = RULES_HASH_KEYS.map((key) => [
86
+ key,
87
+ normalizeRulesValue(key, source[key])
88
+ ]);
89
+ entries.sort(
90
+ ([left], [right]) => left.localeCompare(right)
91
+ );
92
+ const payload = Object.fromEntries(entries);
93
+ const hash = createHash("sha256");
94
+ hash.update(JSON.stringify(payload));
95
+ return hash.digest("hex");
96
+ };
97
+ export const getSyncPlan = async (options, deps = {}) => {
98
+ const { config, resolvedPath, sources } = await loadConfig(
99
+ options.configPath
100
+ );
101
+ const defaults = config.defaults ?? DEFAULT_CONFIG.defaults;
102
+ const resolvedCacheDir = resolveCacheDir(
103
+ resolvedPath,
104
+ config.cacheDir ?? DEFAULT_CACHE_DIR,
105
+ options.cacheDirOverride
106
+ );
107
+ const lockPath = resolveLockPath(resolvedPath);
108
+ const lockExists = await exists(lockPath);
109
+ let lockData = null;
110
+ if (lockExists) {
111
+ lockData = await readLock(lockPath);
112
+ }
113
+ const resolveCommit = deps.resolveRemoteCommit ?? resolveRemoteCommit;
114
+ const filteredSources = options.sourceFilter?.length ? sources.filter((source) => options.sourceFilter?.includes(source.id)) : sources;
115
+ const results = await Promise.all(
116
+ filteredSources.map(async (source) => {
117
+ const lockEntry = lockData?.sources?.[source.id];
118
+ const rulesSha256 = computeRulesSha(source, defaults);
119
+ if (options.offline) {
120
+ return buildOfflineResult({
121
+ source,
122
+ lockEntry,
123
+ defaults,
124
+ resolvedCacheDir,
125
+ rulesSha256
126
+ });
127
+ }
128
+ return buildOnlineResult({
129
+ source,
130
+ lockEntry,
131
+ defaults,
132
+ options,
133
+ resolveCommit,
134
+ rulesSha256
135
+ });
136
+ })
137
+ );
138
+ return {
139
+ config,
140
+ configPath: resolvedPath,
141
+ cacheDir: resolvedCacheDir,
142
+ lockPath,
143
+ lockExists,
144
+ lockData,
145
+ results,
146
+ sources: filteredSources,
147
+ defaults
148
+ };
149
+ };
150
+ const loadToolVersion = async () => {
151
+ const cwdPath = path.resolve(process.cwd(), "package.json");
152
+ try {
153
+ const raw = await readFile(cwdPath, "utf8");
154
+ const pkg = JSON.parse(raw.toString());
155
+ return typeof pkg.version === "string" ? pkg.version : "0.0.0";
156
+ } catch {
157
+ }
158
+ try {
159
+ const raw = await readFile(
160
+ new URL("../package.json", import.meta.url),
161
+ "utf8"
162
+ );
163
+ const pkg = JSON.parse(raw.toString());
164
+ return typeof pkg.version === "string" ? pkg.version : "0.0.0";
165
+ } catch {
166
+ }
167
+ try {
168
+ const raw = await readFile(
169
+ new URL("../../package.json", import.meta.url),
170
+ "utf8"
171
+ );
172
+ const pkg = JSON.parse(raw.toString());
173
+ return typeof pkg.version === "string" ? pkg.version : "0.0.0";
174
+ } catch {
175
+ return "0.0.0";
176
+ }
177
+ };
178
+ const buildLockSource = (result, prior, now) => ({
179
+ repo: result.repo,
180
+ ref: result.ref,
181
+ resolvedCommit: result.resolvedCommit,
182
+ bytes: result.bytes ?? prior?.bytes ?? 0,
183
+ fileCount: result.fileCount ?? prior?.fileCount ?? 0,
184
+ manifestSha256: result.manifestSha256 ?? prior?.manifestSha256 ?? result.resolvedCommit,
185
+ rulesSha256: result.rulesSha256 ?? prior?.rulesSha256,
186
+ updatedAt: now
187
+ });
188
+ const buildLock = async (plan, previous) => {
189
+ const toolVersion = await loadToolVersion();
190
+ const now = (/* @__PURE__ */ new Date()).toISOString();
191
+ const sources = { ...previous?.sources ?? {} };
192
+ for (const result of plan.results) {
193
+ const prior = sources[result.id];
194
+ sources[result.id] = buildLockSource(result, prior, now);
195
+ }
196
+ return {
197
+ version: 1,
198
+ generatedAt: now,
199
+ toolVersion,
200
+ sources
201
+ };
202
+ };
203
+ const buildSyncResultBase = (params) => {
204
+ const {
205
+ source,
206
+ lockEntry,
207
+ defaults,
208
+ resolvedCommit,
209
+ rulesSha256,
210
+ repo,
211
+ ref
212
+ } = params;
213
+ return {
214
+ id: source.id,
215
+ repo: repo ?? lockEntry?.repo ?? source.repo,
216
+ ref: ref ?? lockEntry?.ref ?? source.ref ?? defaults.ref,
217
+ resolvedCommit,
218
+ lockCommit: lockEntry?.resolvedCommit ?? null,
219
+ lockRulesSha256: lockEntry?.rulesSha256,
220
+ bytes: lockEntry?.bytes,
221
+ fileCount: lockEntry?.fileCount,
222
+ manifestSha256: lockEntry?.manifestSha256,
223
+ rulesSha256
224
+ };
225
+ };
226
+ const computeRulesSha = (source, defaults) => {
227
+ const include = source.include ?? defaults.include;
228
+ const exclude = source.exclude ?? defaults.exclude;
229
+ return computeRulesHash({
230
+ ...source,
231
+ include,
232
+ exclude
233
+ });
234
+ };
235
+ const buildOfflineResult = async (params) => {
236
+ const { source, lockEntry, defaults, resolvedCacheDir, rulesSha256 } = params;
237
+ const docsPresent = await hasDocs(resolvedCacheDir, source.id);
238
+ const resolvedCommit = lockEntry?.resolvedCommit ?? "offline";
239
+ const base = buildSyncResultBase({
240
+ source,
241
+ lockEntry,
242
+ defaults,
243
+ resolvedCommit,
244
+ rulesSha256
245
+ });
246
+ return {
247
+ ...base,
248
+ status: lockEntry && docsPresent ? "up-to-date" : "missing"
249
+ };
250
+ };
251
+ const buildOnlineResult = async (params) => {
252
+ const { source, lockEntry, defaults, options, resolveCommit, rulesSha256 } = params;
253
+ const resolved = await resolveCommit({
254
+ repo: source.repo,
255
+ ref: source.ref,
256
+ allowHosts: defaults.allowHosts,
257
+ timeoutMs: options.timeoutMs,
258
+ logger: options.verbose && !options.json ? ui.debug : void 0
259
+ });
260
+ const upToDate = lockEntry?.resolvedCommit === resolved.resolvedCommit && lockEntry?.rulesSha256 === rulesSha256;
261
+ let status = "missing";
262
+ if (lockEntry) {
263
+ status = upToDate ? "up-to-date" : "changed";
264
+ }
265
+ const base = buildSyncResultBase({
266
+ source,
267
+ lockEntry,
268
+ defaults,
269
+ resolvedCommit: resolved.resolvedCommit,
270
+ rulesSha256,
271
+ repo: resolved.repo,
272
+ ref: resolved.ref
273
+ });
274
+ return { ...base, status };
275
+ };
276
+ const logFetchStatus = (reporter, options, sourceId, fromCache) => {
277
+ if (reporter) {
278
+ reporter.debug(
279
+ `${sourceId}: ${fromCache ? "restored from cache" : "downloaded"}`
280
+ );
281
+ return;
282
+ }
283
+ if (!options.json) {
284
+ ui.step(fromCache ? "Restoring from cache" : "Downloading repo", sourceId);
285
+ }
286
+ };
287
+ const logMaterializeStart = (reporter, options, sourceId) => {
288
+ if (reporter) {
289
+ reporter.debug(`${sourceId}: materializing`);
290
+ return;
291
+ }
292
+ if (!options.json) {
293
+ ui.step("Materializing", sourceId);
294
+ }
295
+ };
296
+ const reportNoChanges = (reporter, options, sourceId) => {
297
+ if (reporter) {
298
+ reporter.success(sourceId, "no content changes");
299
+ return;
300
+ }
301
+ if (!options.json) {
302
+ ui.item(symbols.success, sourceId, "no content changes");
303
+ }
304
+ };
305
+ const reportSynced = (reporter, options, sourceId, fileCount) => {
306
+ if (reporter) {
307
+ reporter.success(sourceId, `synced ${fileCount} files`, symbols.synced);
308
+ return;
309
+ }
310
+ if (!options.json) {
311
+ ui.item(symbols.synced, sourceId, `synced ${fileCount} files`);
312
+ }
313
+ };
314
+ const createLoggers = (reporter, options, sourceId) => {
315
+ const logDebug = options.verbose && !options.json ? reporter ? (msg) => reporter.debug(msg) : ui.debug : void 0;
316
+ const logProgress = reporter ? (msg) => reporter.debug(`${sourceId}: ${msg}`) : void 0;
317
+ return { logDebug, logProgress };
318
+ };
319
+ const applyTargetIfNeeded = async (plan, defaults, source) => {
320
+ if (!source.targetDir) {
321
+ return;
322
+ }
323
+ const resolvedTarget = resolveTargetDir(plan.configPath, source.targetDir);
324
+ await applyTargetDir({
325
+ sourceDir: path.join(plan.cacheDir, source.id),
326
+ targetDir: resolvedTarget,
327
+ mode: source.targetMode ?? defaults.targetMode,
328
+ explicitTargetMode: source.targetMode !== void 0,
329
+ unwrapSingleRootDir: source.unwrapSingleRootDir
330
+ });
331
+ };
332
+ const materializeJob = async (params) => {
333
+ const {
334
+ plan,
335
+ options,
336
+ defaults,
337
+ reporter,
338
+ source,
339
+ fetch,
340
+ runMaterialize,
341
+ result
342
+ } = params;
343
+ logMaterializeStart(reporter, options, source.id);
344
+ const stats = await runMaterialize({
345
+ sourceId: source.id,
346
+ repoDir: fetch.repoDir,
347
+ cacheDir: plan.cacheDir,
348
+ include: source.include ?? defaults.include,
349
+ exclude: source.exclude,
350
+ maxBytes: source.maxBytes ?? defaults.maxBytes,
351
+ maxFiles: source.maxFiles ?? defaults.maxFiles,
352
+ ignoreHidden: source.ignoreHidden ?? defaults.ignoreHidden,
353
+ unwrapSingleRootDir: source.unwrapSingleRootDir,
354
+ json: options.json,
355
+ progressLogger: reporter ? (msg) => reporter.debug(`${source.id}: ${msg}`) : void 0
356
+ });
357
+ await applyTargetIfNeeded(plan, defaults, source);
358
+ result.bytes = stats.bytes;
359
+ result.fileCount = stats.fileCount;
360
+ result.manifestSha256 = stats.manifestSha256;
361
+ result.status = "up-to-date";
362
+ reportSynced(reporter, options, source.id, stats.fileCount);
363
+ };
364
+ const verifyAndRepairCache = async (params) => {
365
+ const { plan, options, docsPresence, defaults, reporter, runJobs } = params;
366
+ if (options.offline) {
367
+ return 0;
368
+ }
369
+ const shouldVerify = !options.json || plan.results.length > 0;
370
+ if (!shouldVerify) {
371
+ return 0;
372
+ }
373
+ const verifyReport = await verifyCache({
374
+ configPath: plan.configPath,
375
+ cacheDirOverride: plan.cacheDir,
376
+ json: true
377
+ });
378
+ const failed = verifyReport.results.filter((result) => !result.ok);
379
+ if (failed.length === 0) {
380
+ return 0;
381
+ }
382
+ const retryJobs = await buildJobs(
383
+ plan,
384
+ options,
385
+ docsPresence,
386
+ failed.map((result) => result.id),
387
+ true
388
+ );
389
+ if (retryJobs.length > 0) {
390
+ await runJobs(retryJobs);
391
+ await ensureTargets(plan, defaults);
392
+ }
393
+ const retryReport = await verifyCache({
394
+ configPath: plan.configPath,
395
+ cacheDirOverride: plan.cacheDir,
396
+ json: true
397
+ });
398
+ const stillFailed = retryReport.results.filter((result) => !result.ok);
399
+ if (stillFailed.length === 0) {
400
+ return 0;
401
+ }
402
+ reportVerifyFailures(reporter, options, stillFailed);
403
+ return 1;
404
+ };
405
+ const tryReuseManifest = async (params) => {
406
+ const {
407
+ result,
408
+ source,
409
+ lockEntry,
410
+ plan,
411
+ defaults,
412
+ fetch,
413
+ reporter,
414
+ options
415
+ } = params;
416
+ if (result.status === "up-to-date") {
417
+ return false;
418
+ }
419
+ if (!lockEntry?.manifestSha256) {
420
+ return false;
421
+ }
422
+ if (lockEntry.rulesSha256 !== result.rulesSha256) {
423
+ return false;
424
+ }
425
+ const manifestPath = path.join(plan.cacheDir, source.id, MANIFEST_FILENAME);
426
+ if (!await exists(manifestPath)) {
427
+ return false;
428
+ }
429
+ const computed = await computeManifestHash({
430
+ sourceId: source.id,
431
+ repoDir: fetch.repoDir,
432
+ cacheDir: plan.cacheDir,
433
+ include: source.include ?? defaults.include,
434
+ exclude: source.exclude,
435
+ maxBytes: source.maxBytes ?? defaults.maxBytes,
436
+ maxFiles: source.maxFiles ?? defaults.maxFiles,
437
+ ignoreHidden: source.ignoreHidden ?? defaults.ignoreHidden
438
+ });
439
+ if (computed.manifestSha256 !== lockEntry.manifestSha256) {
440
+ return false;
441
+ }
442
+ result.bytes = computed.bytes;
443
+ result.fileCount = computed.fileCount;
444
+ result.manifestSha256 = computed.manifestSha256;
445
+ result.status = "up-to-date";
446
+ reportNoChanges(reporter, options, source.id);
447
+ return true;
448
+ };
449
+ const buildJobs = async (plan, options, docsPresence, ids, force) => {
450
+ const pick = ids?.length ? plan.results.filter((result) => ids.includes(result.id)) : plan.results;
451
+ const jobs = await Promise.all(
452
+ pick.map(async (result) => {
453
+ const source = plan.sources.find((entry) => entry.id === result.id);
454
+ if (!source) {
455
+ return null;
456
+ }
457
+ if (options.offline) {
458
+ const lockEntry = plan.lockData?.sources?.[result.id];
459
+ if (!lockEntry?.resolvedCommit) {
460
+ return null;
461
+ }
462
+ }
463
+ if (force) {
464
+ return { result, source };
465
+ }
466
+ let docsPresent = docsPresence.get(result.id);
467
+ if (docsPresent === void 0) {
468
+ docsPresent = await hasDocs(plan.cacheDir, result.id);
469
+ docsPresence.set(result.id, docsPresent);
470
+ }
471
+ const needsMaterialize = result.status !== "up-to-date" || !docsPresent;
472
+ if (!needsMaterialize) {
473
+ return null;
474
+ }
475
+ return { result, source };
476
+ })
477
+ );
478
+ return jobs.filter(Boolean);
479
+ };
480
+ const ensureTargets = async (plan, defaults) => {
481
+ await Promise.all(
482
+ plan.sources.map(async (source) => {
483
+ if (!source.targetDir) {
484
+ return;
485
+ }
486
+ const resolvedTarget = resolveTargetDir(
487
+ plan.configPath,
488
+ source.targetDir
489
+ );
490
+ if (await exists(resolvedTarget)) {
491
+ return;
492
+ }
493
+ await applyTargetDir({
494
+ sourceDir: path.join(plan.cacheDir, source.id),
495
+ targetDir: resolvedTarget,
496
+ mode: source.targetMode ?? defaults.targetMode,
497
+ explicitTargetMode: source.targetMode !== void 0,
498
+ unwrapSingleRootDir: source.unwrapSingleRootDir
499
+ });
500
+ })
501
+ );
502
+ };
503
+ const summarizePlan = (plan) => {
504
+ const totalBytes = plan.results.reduce(
505
+ (sum, result) => sum + (result.bytes ?? 0),
506
+ 0
507
+ );
508
+ const totalFiles = plan.results.reduce(
509
+ (sum, result) => sum + (result.fileCount ?? 0),
510
+ 0
511
+ );
512
+ return { totalBytes, totalFiles };
513
+ };
514
+ const reportVerifyFailures = (reporter, options, stillFailed) => {
515
+ if (stillFailed.length === 0) {
516
+ return;
517
+ }
518
+ if (reporter) {
519
+ for (const failed of stillFailed) {
520
+ reporter.warn(failed.id, failed.issues.join("; "));
521
+ }
522
+ return;
523
+ }
524
+ if (!options.json) {
525
+ const details = stillFailed.map((result) => `${result.id} (${result.issues.join("; ")})`).join(", ");
526
+ ui.line(
527
+ `${symbols.warn} Verify failed for ${stillFailed.length} source(s): ${details}`
528
+ );
529
+ }
530
+ };
531
+ const finalizeSync = async (params) => {
532
+ const { plan, previous, reporter, options, startTime, warningCount } = params;
533
+ const lock = await buildLock(plan, previous);
534
+ await writeLock(plan.lockPath, lock);
535
+ const { totalBytes, totalFiles } = summarizePlan(plan);
536
+ if (reporter) {
537
+ const summary = `${symbols.info} ${formatBytes(totalBytes)} \xB7 ${totalFiles} files`;
538
+ reporter.finish(summary);
539
+ }
540
+ if (!reporter && !options.json) {
541
+ const elapsedMs = Number(process.hrtime.bigint() - startTime) / 1e6;
542
+ ui.line(
543
+ `${symbols.info} Completed in ${elapsedMs.toFixed(0)}ms \xB7 ${formatBytes(totalBytes)} \xB7 ${totalFiles} files${warningCount ? ` \xB7 ${warningCount} warning${warningCount === 1 ? "" : "s"}` : ""}`
544
+ );
545
+ }
546
+ await writeToc({
547
+ cacheDir: plan.cacheDir,
548
+ configPath: plan.configPath,
549
+ lock,
550
+ sources: plan.sources,
551
+ results: plan.results
552
+ });
553
+ plan.lockExists = true;
554
+ return plan;
555
+ };
556
+ const createJobRunner = (params) => {
557
+ const { plan, options, defaults, reporter, runFetch, runMaterialize } = params;
558
+ return async (jobs) => {
559
+ const concurrencyRaw = options.concurrency ?? 4;
560
+ const concurrency = Math.floor(concurrencyRaw);
561
+ if (!Number.isFinite(concurrencyRaw) || concurrency < 1) {
562
+ throw new TypeError(
563
+ "Invalid options.concurrency; must be a positive number."
564
+ );
565
+ }
566
+ let index = 0;
567
+ const runNext = async () => {
568
+ const job = jobs[index];
569
+ if (!job || !job.source) {
570
+ return;
571
+ }
572
+ index += 1;
573
+ const { result, source } = job;
574
+ const lockEntry = plan.lockData?.sources?.[source.id];
575
+ const { logDebug, logProgress } = createLoggers(
576
+ reporter,
577
+ options,
578
+ source.id
579
+ );
580
+ if (reporter) {
581
+ reporter.start(source.id);
582
+ }
583
+ const fetch = await runFetch({
584
+ sourceId: source.id,
585
+ repo: source.repo,
586
+ ref: source.ref,
587
+ resolvedCommit: result.resolvedCommit,
588
+ cacheDir: plan.cacheDir,
589
+ include: source.include ?? defaults.include,
590
+ timeoutMs: options.timeoutMs,
591
+ logger: logDebug,
592
+ progressLogger: logProgress,
593
+ offline: options.offline
594
+ });
595
+ logFetchStatus(reporter, options, source.id, fetch.fromCache);
596
+ try {
597
+ const reusedManifest = await tryReuseManifest({
598
+ result,
599
+ source,
600
+ lockEntry,
601
+ plan,
602
+ defaults,
603
+ fetch,
604
+ reporter,
605
+ options
606
+ });
607
+ if (reusedManifest) {
608
+ await runNext();
609
+ return;
610
+ }
611
+ await materializeJob({
612
+ plan,
613
+ options,
614
+ defaults,
615
+ reporter,
616
+ source,
617
+ fetch,
618
+ runMaterialize,
619
+ result
620
+ });
621
+ } finally {
622
+ await fetch.cleanup();
623
+ }
624
+ await runNext();
625
+ };
626
+ await Promise.all(
627
+ Array.from({ length: Math.min(concurrency, jobs.length) }, runNext)
628
+ );
629
+ };
630
+ };
631
+ export const runSync = async (options, deps = {}) => {
632
+ const startTime = process.hrtime.bigint();
633
+ let warningCount = 0;
634
+ const plan = await getSyncPlan(options, deps);
635
+ await mkdir(plan.cacheDir, { recursive: true });
636
+ const isTestRunner = process.argv.includes("--test");
637
+ const useLiveOutput = !options.json && !isSilentMode() && process.stdout.isTTY && !isTestRunner;
638
+ const reporter = useLiveOutput ? new TaskReporter() : null;
639
+ const previous = plan.lockData;
640
+ const requiredMissing = plan.results.filter((result) => {
641
+ const source = plan.sources.find((entry) => entry.id === result.id);
642
+ return result.status === "missing" && (source?.required ?? true);
643
+ });
644
+ if (options.failOnMiss && requiredMissing.length > 0) {
645
+ throw new Error(
646
+ `Missing required source(s): ${requiredMissing.map((result) => result.id).join(", ")}.`
647
+ );
648
+ }
649
+ if (!options.lockOnly) {
650
+ const defaults = plan.defaults;
651
+ const runFetch = deps.fetchSource ?? fetchSource;
652
+ const runMaterialize = deps.materializeSource ?? materializeSource;
653
+ const docsPresence = /* @__PURE__ */ new Map();
654
+ const runJobs = createJobRunner({
655
+ plan,
656
+ options,
657
+ defaults,
658
+ reporter,
659
+ runFetch,
660
+ runMaterialize
661
+ });
662
+ const initialJobs = await buildJobs(plan, options, docsPresence);
663
+ await runJobs(initialJobs);
664
+ await ensureTargets(plan, defaults);
665
+ warningCount += await verifyAndRepairCache({
666
+ plan,
667
+ options,
668
+ docsPresence,
669
+ defaults,
670
+ reporter,
671
+ runJobs
672
+ });
673
+ }
674
+ return finalizeSync({
675
+ plan,
676
+ previous,
677
+ reporter,
678
+ options,
679
+ startTime,
680
+ warningCount
681
+ });
682
+ };
683
+ export const printSyncPlan = (plan) => {
684
+ const summary = {
685
+ upToDate: plan.results.filter((r) => r.status === "up-to-date").length,
686
+ changed: plan.results.filter((r) => r.status === "changed").length,
687
+ missing: plan.results.filter((r) => r.status === "missing").length
688
+ };
689
+ if (plan.results.length === 0) {
690
+ ui.line(`${symbols.info} No sources to sync.`);
691
+ return;
692
+ }
693
+ ui.line(
694
+ `${symbols.info} ${plan.results.length} sources (${summary.upToDate} up-to-date, ${summary.changed} changed, ${summary.missing} missing)`
695
+ );
696
+ for (const result of plan.results) {
697
+ const shortResolved = ui.hash(result.resolvedCommit);
698
+ const shortLock = ui.hash(result.lockCommit);
699
+ const rulesChanged = Boolean(result.lockRulesSha256) && Boolean(result.rulesSha256) && result.lockRulesSha256 !== result.rulesSha256;
700
+ if (result.status === "up-to-date") {
701
+ ui.item(
702
+ symbols.success,
703
+ result.id,
704
+ `${pc.dim("up-to-date")} ${pc.gray(shortResolved)}`
705
+ );
706
+ continue;
707
+ }
708
+ if (result.status === "changed") {
709
+ if (result.lockCommit === result.resolvedCommit && rulesChanged) {
710
+ ui.item(
711
+ symbols.warn,
712
+ result.id,
713
+ `${pc.dim("rules changed")} ${pc.gray(shortResolved)}`
714
+ );
715
+ continue;
716
+ }
717
+ ui.item(
718
+ symbols.warn,
719
+ result.id,
720
+ `${pc.dim("changed")} ${pc.gray(shortLock)} ${pc.dim("->")} ${pc.gray(shortResolved)}`
721
+ );
722
+ continue;
723
+ }
724
+ ui.item(
725
+ symbols.warn,
726
+ result.id,
727
+ `${pc.dim("missing")} ${pc.gray(shortResolved)}`
728
+ );
729
+ }
730
+ };