claudekit-cli 1.4.0 → 1.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/dist/index.js +40 -18
- package/package.json +1 -1
- package/src/lib/download.ts +44 -9
- package/src/lib/merge.ts +14 -11
- package/src/types.ts +7 -0
- package/src/utils/file-scanner.ts +1 -1
- package/tests/lib/merge.test.ts +60 -8
- package/test-integration/demo/.mcp.json +0 -13
- package/test-integration/demo/.repomixignore +0 -15
- package/test-integration/demo/CLAUDE.md +0 -34
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,10 @@
|
|
|
1
|
+
## [1.4.1](https://github.com/mrgoonie/claudekit-cli/compare/v1.4.0...v1.4.1) (2025-10-21)
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
### Bug Fixes
|
|
5
|
+
|
|
6
|
+
* handle protected files during merge ([fe90767](https://github.com/mrgoonie/claudekit-cli/commit/fe907670932fc5b39521586ef798f73cd1130180))
|
|
7
|
+
|
|
1
8
|
# [1.4.0](https://github.com/mrgoonie/claudekit-cli/compare/v1.3.0...v1.4.0) (2025-10-21)
|
|
2
9
|
|
|
3
10
|
|
package/dist/index.js
CHANGED
|
@@ -6403,7 +6403,7 @@ var cac = (name = "") => new CAC(name);
|
|
|
6403
6403
|
// package.json
|
|
6404
6404
|
var package_default = {
|
|
6405
6405
|
name: "claudekit-cli",
|
|
6406
|
-
version: "1.
|
|
6406
|
+
version: "1.4.0",
|
|
6407
6407
|
description: "CLI tool for bootstrapping and updating ClaudeKit projects",
|
|
6408
6408
|
type: "module",
|
|
6409
6409
|
bin: {
|
|
@@ -11056,6 +11056,10 @@ var PROTECTED_PATTERNS = [
|
|
|
11056
11056
|
"*.key",
|
|
11057
11057
|
"*.pem",
|
|
11058
11058
|
"*.p12",
|
|
11059
|
+
".gitignore",
|
|
11060
|
+
".repomixignore",
|
|
11061
|
+
".mcp.json",
|
|
11062
|
+
"CLAUDE.md",
|
|
11059
11063
|
"node_modules/**",
|
|
11060
11064
|
".git/**",
|
|
11061
11065
|
"dist/**",
|
|
@@ -21292,6 +21296,22 @@ class DownloadManager {
|
|
|
21292
21296
|
shouldExclude(filePath) {
|
|
21293
21297
|
return this.ig.ignores(filePath);
|
|
21294
21298
|
}
|
|
21299
|
+
decodeFilePath(path8) {
|
|
21300
|
+
if (!path8.includes("%")) {
|
|
21301
|
+
return path8;
|
|
21302
|
+
}
|
|
21303
|
+
try {
|
|
21304
|
+
if (/%[0-9A-F]{2}/i.test(path8)) {
|
|
21305
|
+
const decoded = decodeURIComponent(path8);
|
|
21306
|
+
logger.debug(`Decoded path: ${path8} -> ${decoded}`);
|
|
21307
|
+
return decoded;
|
|
21308
|
+
}
|
|
21309
|
+
return path8;
|
|
21310
|
+
} catch (error2) {
|
|
21311
|
+
logger.warning(`Failed to decode path "${path8}": ${error2 instanceof Error ? error2.message : "Unknown error"}`);
|
|
21312
|
+
return path8;
|
|
21313
|
+
}
|
|
21314
|
+
}
|
|
21295
21315
|
isPathSafe(basePath, targetPath) {
|
|
21296
21316
|
const resolvedBase = resolve(basePath);
|
|
21297
21317
|
const resolvedTarget = resolve(targetPath);
|
|
@@ -21443,22 +21463,23 @@ class DownloadManager {
|
|
|
21443
21463
|
cwd: tempExtractDir,
|
|
21444
21464
|
strip: 0,
|
|
21445
21465
|
filter: (path8) => {
|
|
21446
|
-
const
|
|
21466
|
+
const decodedPath = this.decodeFilePath(path8);
|
|
21467
|
+
const shouldInclude = !this.shouldExclude(decodedPath);
|
|
21447
21468
|
if (!shouldInclude) {
|
|
21448
|
-
logger.debug(`Excluding: ${
|
|
21469
|
+
logger.debug(`Excluding: ${decodedPath}`);
|
|
21449
21470
|
}
|
|
21450
21471
|
return shouldInclude;
|
|
21451
21472
|
}
|
|
21452
21473
|
});
|
|
21453
21474
|
logger.debug(`Extracted TAR.GZ to temp: ${tempExtractDir}`);
|
|
21454
|
-
const entries = await readdir(tempExtractDir);
|
|
21475
|
+
const entries = await readdir(tempExtractDir, { encoding: "utf8" });
|
|
21455
21476
|
logger.debug(`Root entries: ${entries.join(", ")}`);
|
|
21456
21477
|
if (entries.length === 1) {
|
|
21457
21478
|
const rootEntry = entries[0];
|
|
21458
21479
|
const rootPath = pathJoin(tempExtractDir, rootEntry);
|
|
21459
21480
|
const rootStat = await stat(rootPath);
|
|
21460
21481
|
if (rootStat.isDirectory()) {
|
|
21461
|
-
const rootContents = await readdir(rootPath);
|
|
21482
|
+
const rootContents = await readdir(rootPath, { encoding: "utf8" });
|
|
21462
21483
|
logger.debug(`Root directory '${rootEntry}' contains: ${rootContents.join(", ")}`);
|
|
21463
21484
|
const isWrapper = this.isWrapperDirectory(rootEntry);
|
|
21464
21485
|
logger.debug(`Is wrapper directory: ${isWrapper}`);
|
|
@@ -21499,14 +21520,14 @@ class DownloadManager {
|
|
|
21499
21520
|
try {
|
|
21500
21521
|
await import_extract_zip.default(archivePath, { dir: tempExtractDir });
|
|
21501
21522
|
logger.debug(`Extracted ZIP to temp: ${tempExtractDir}`);
|
|
21502
|
-
const entries = await readdir(tempExtractDir);
|
|
21523
|
+
const entries = await readdir(tempExtractDir, { encoding: "utf8" });
|
|
21503
21524
|
logger.debug(`Root entries: ${entries.join(", ")}`);
|
|
21504
21525
|
if (entries.length === 1) {
|
|
21505
21526
|
const rootEntry = entries[0];
|
|
21506
21527
|
const rootPath = pathJoin(tempExtractDir, rootEntry);
|
|
21507
21528
|
const rootStat = await stat(rootPath);
|
|
21508
21529
|
if (rootStat.isDirectory()) {
|
|
21509
|
-
const rootContents = await readdir(rootPath);
|
|
21530
|
+
const rootContents = await readdir(rootPath, { encoding: "utf8" });
|
|
21510
21531
|
logger.debug(`Root directory '${rootEntry}' contains: ${rootContents.join(", ")}`);
|
|
21511
21532
|
const isWrapper = this.isWrapperDirectory(rootEntry);
|
|
21512
21533
|
logger.debug(`Is wrapper directory: ${isWrapper}`);
|
|
@@ -21538,7 +21559,7 @@ class DownloadManager {
|
|
|
21538
21559
|
const { readdir, stat, mkdir: mkdirPromise, copyFile } = await import("node:fs/promises");
|
|
21539
21560
|
const { join: pathJoin, relative: relative2 } = await import("node:path");
|
|
21540
21561
|
await mkdirPromise(destDir, { recursive: true });
|
|
21541
|
-
const entries = await readdir(sourceDir);
|
|
21562
|
+
const entries = await readdir(sourceDir, { encoding: "utf8" });
|
|
21542
21563
|
for (const entry of entries) {
|
|
21543
21564
|
const sourcePath = pathJoin(sourceDir, entry);
|
|
21544
21565
|
const destPath = pathJoin(destDir, entry);
|
|
@@ -21564,7 +21585,7 @@ class DownloadManager {
|
|
|
21564
21585
|
const { readdir, stat, mkdir: mkdirPromise, copyFile } = await import("node:fs/promises");
|
|
21565
21586
|
const { join: pathJoin, relative: relative2 } = await import("node:path");
|
|
21566
21587
|
await mkdirPromise(destDir, { recursive: true });
|
|
21567
|
-
const entries = await readdir(sourceDir);
|
|
21588
|
+
const entries = await readdir(sourceDir, { encoding: "utf8" });
|
|
21568
21589
|
for (const entry of entries) {
|
|
21569
21590
|
const sourcePath = pathJoin(sourceDir, entry);
|
|
21570
21591
|
const destPath = pathJoin(destDir, entry);
|
|
@@ -21600,7 +21621,7 @@ class DownloadManager {
|
|
|
21600
21621
|
const { join: pathJoin } = await import("node:path");
|
|
21601
21622
|
const { constants: constants2 } = await import("node:fs");
|
|
21602
21623
|
try {
|
|
21603
|
-
const entries = await readdir(extractDir);
|
|
21624
|
+
const entries = await readdir(extractDir, { encoding: "utf8" });
|
|
21604
21625
|
logger.debug(`Extracted files: ${entries.join(", ")}`);
|
|
21605
21626
|
if (entries.length === 0) {
|
|
21606
21627
|
throw new ExtractionError("Extraction resulted in no files");
|
|
@@ -21811,11 +21832,12 @@ class FileMerger {
|
|
|
21811
21832
|
const files = await this.getFiles(sourceDir);
|
|
21812
21833
|
for (const file of files) {
|
|
21813
21834
|
const relativePath = relative2(sourceDir, file);
|
|
21814
|
-
if (this.ig.ignores(relativePath)) {
|
|
21815
|
-
continue;
|
|
21816
|
-
}
|
|
21817
21835
|
const destPath = join4(destDir, relativePath);
|
|
21818
21836
|
if (await import_fs_extra.pathExists(destPath)) {
|
|
21837
|
+
if (this.ig.ignores(relativePath)) {
|
|
21838
|
+
logger.debug(`Protected file exists but won't be overwritten: ${relativePath}`);
|
|
21839
|
+
continue;
|
|
21840
|
+
}
|
|
21819
21841
|
conflicts.push(relativePath);
|
|
21820
21842
|
}
|
|
21821
21843
|
}
|
|
@@ -21827,12 +21849,12 @@ class FileMerger {
|
|
|
21827
21849
|
let skippedCount = 0;
|
|
21828
21850
|
for (const file of files) {
|
|
21829
21851
|
const relativePath = relative2(sourceDir, file);
|
|
21830
|
-
|
|
21831
|
-
|
|
21852
|
+
const destPath = join4(destDir, relativePath);
|
|
21853
|
+
if (this.ig.ignores(relativePath) && await import_fs_extra.pathExists(destPath)) {
|
|
21854
|
+
logger.debug(`Skipping protected file (exists in destination): ${relativePath}`);
|
|
21832
21855
|
skippedCount++;
|
|
21833
21856
|
continue;
|
|
21834
21857
|
}
|
|
21835
|
-
const destPath = join4(destDir, relativePath);
|
|
21836
21858
|
await import_fs_extra.copy(file, destPath, { overwrite: true });
|
|
21837
21859
|
copiedCount++;
|
|
21838
21860
|
}
|
|
@@ -21840,7 +21862,7 @@ class FileMerger {
|
|
|
21840
21862
|
}
|
|
21841
21863
|
async getFiles(dir) {
|
|
21842
21864
|
const files = [];
|
|
21843
|
-
const entries = await import_fs_extra.readdir(dir);
|
|
21865
|
+
const entries = await import_fs_extra.readdir(dir, { encoding: "utf8" });
|
|
21844
21866
|
for (const entry of entries) {
|
|
21845
21867
|
const fullPath = join4(dir, entry);
|
|
21846
21868
|
const stats = await import_fs_extra.stat(fullPath);
|
|
@@ -22092,7 +22114,7 @@ class FileScanner {
|
|
|
22092
22114
|
return files;
|
|
22093
22115
|
}
|
|
22094
22116
|
try {
|
|
22095
|
-
const entries = await import_fs_extra3.readdir(dirPath);
|
|
22117
|
+
const entries = await import_fs_extra3.readdir(dirPath, { encoding: "utf8" });
|
|
22096
22118
|
for (const entry of entries) {
|
|
22097
22119
|
const fullPath = join5(dirPath, entry);
|
|
22098
22120
|
if (!FileScanner.isSafePath(basePath, fullPath)) {
|
package/package.json
CHANGED
package/src/lib/download.ts
CHANGED
|
@@ -82,6 +82,39 @@ export class DownloadManager {
|
|
|
82
82
|
return this.ig.ignores(filePath);
|
|
83
83
|
}
|
|
84
84
|
|
|
85
|
+
/**
|
|
86
|
+
* Decode percent-encoded file paths to handle Mojibake issues
|
|
87
|
+
*
|
|
88
|
+
* GitHub tarballs may contain percent-encoded paths (e.g., %20 for space, %C3%A9 for é)
|
|
89
|
+
* that need to be decoded to prevent character encoding corruption.
|
|
90
|
+
*
|
|
91
|
+
* @param path - File path that may contain URL-encoded characters
|
|
92
|
+
* @returns Decoded path, or original path if decoding fails
|
|
93
|
+
* @private
|
|
94
|
+
*/
|
|
95
|
+
private decodeFilePath(path: string): string {
|
|
96
|
+
// Early exit for non-encoded paths (performance optimization)
|
|
97
|
+
if (!path.includes("%")) {
|
|
98
|
+
return path;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
try {
|
|
102
|
+
// Only decode if path contains valid percent-encoding pattern (%XX)
|
|
103
|
+
if (/%[0-9A-F]{2}/i.test(path)) {
|
|
104
|
+
const decoded = decodeURIComponent(path);
|
|
105
|
+
logger.debug(`Decoded path: ${path} -> ${decoded}`);
|
|
106
|
+
return decoded;
|
|
107
|
+
}
|
|
108
|
+
return path;
|
|
109
|
+
} catch (error) {
|
|
110
|
+
// If decoding fails (malformed encoding), return original path
|
|
111
|
+
logger.warning(
|
|
112
|
+
`Failed to decode path "${path}": ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
113
|
+
);
|
|
114
|
+
return path;
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
85
118
|
/**
|
|
86
119
|
* Validate path to prevent path traversal attacks (zip slip)
|
|
87
120
|
*/
|
|
@@ -334,10 +367,12 @@ export class DownloadManager {
|
|
|
334
367
|
cwd: tempExtractDir,
|
|
335
368
|
strip: 0, // Don't strip yet - we'll decide based on wrapper detection
|
|
336
369
|
filter: (path: string) => {
|
|
370
|
+
// Decode percent-encoded paths from GitHub tarballs
|
|
371
|
+
const decodedPath = this.decodeFilePath(path);
|
|
337
372
|
// Exclude unwanted files
|
|
338
|
-
const shouldInclude = !this.shouldExclude(
|
|
373
|
+
const shouldInclude = !this.shouldExclude(decodedPath);
|
|
339
374
|
if (!shouldInclude) {
|
|
340
|
-
logger.debug(`Excluding: ${
|
|
375
|
+
logger.debug(`Excluding: ${decodedPath}`);
|
|
341
376
|
}
|
|
342
377
|
return shouldInclude;
|
|
343
378
|
},
|
|
@@ -346,7 +381,7 @@ export class DownloadManager {
|
|
|
346
381
|
logger.debug(`Extracted TAR.GZ to temp: ${tempExtractDir}`);
|
|
347
382
|
|
|
348
383
|
// Apply same wrapper detection logic as zip
|
|
349
|
-
const entries = await readdir(tempExtractDir);
|
|
384
|
+
const entries = await readdir(tempExtractDir, { encoding: "utf8" });
|
|
350
385
|
logger.debug(`Root entries: ${entries.join(", ")}`);
|
|
351
386
|
|
|
352
387
|
if (entries.length === 1) {
|
|
@@ -356,7 +391,7 @@ export class DownloadManager {
|
|
|
356
391
|
|
|
357
392
|
if (rootStat.isDirectory()) {
|
|
358
393
|
// Check contents of root directory
|
|
359
|
-
const rootContents = await readdir(rootPath);
|
|
394
|
+
const rootContents = await readdir(rootPath, { encoding: "utf8" });
|
|
360
395
|
logger.debug(`Root directory '${rootEntry}' contains: ${rootContents.join(", ")}`);
|
|
361
396
|
|
|
362
397
|
// Only strip if root is a version/release wrapper
|
|
@@ -430,7 +465,7 @@ export class DownloadManager {
|
|
|
430
465
|
logger.debug(`Extracted ZIP to temp: ${tempExtractDir}`);
|
|
431
466
|
|
|
432
467
|
// Find the root directory in the zip (if any)
|
|
433
|
-
const entries = await readdir(tempExtractDir);
|
|
468
|
+
const entries = await readdir(tempExtractDir, { encoding: "utf8" });
|
|
434
469
|
logger.debug(`Root entries: ${entries.join(", ")}`);
|
|
435
470
|
|
|
436
471
|
// If there's a single root directory, check if it's a wrapper
|
|
@@ -441,7 +476,7 @@ export class DownloadManager {
|
|
|
441
476
|
|
|
442
477
|
if (rootStat.isDirectory()) {
|
|
443
478
|
// Check contents of root directory
|
|
444
|
-
const rootContents = await readdir(rootPath);
|
|
479
|
+
const rootContents = await readdir(rootPath, { encoding: "utf8" });
|
|
445
480
|
logger.debug(`Root directory '${rootEntry}' contains: ${rootContents.join(", ")}`);
|
|
446
481
|
|
|
447
482
|
// Only strip if root is a version/release wrapper
|
|
@@ -492,7 +527,7 @@ export class DownloadManager {
|
|
|
492
527
|
|
|
493
528
|
await mkdirPromise(destDir, { recursive: true });
|
|
494
529
|
|
|
495
|
-
const entries = await readdir(sourceDir);
|
|
530
|
+
const entries = await readdir(sourceDir, { encoding: "utf8" });
|
|
496
531
|
|
|
497
532
|
for (const entry of entries) {
|
|
498
533
|
const sourcePath = pathJoin(sourceDir, entry);
|
|
@@ -534,7 +569,7 @@ export class DownloadManager {
|
|
|
534
569
|
|
|
535
570
|
await mkdirPromise(destDir, { recursive: true });
|
|
536
571
|
|
|
537
|
-
const entries = await readdir(sourceDir);
|
|
572
|
+
const entries = await readdir(sourceDir, { encoding: "utf8" });
|
|
538
573
|
|
|
539
574
|
for (const entry of entries) {
|
|
540
575
|
const sourcePath = pathJoin(sourceDir, entry);
|
|
@@ -591,7 +626,7 @@ export class DownloadManager {
|
|
|
591
626
|
|
|
592
627
|
try {
|
|
593
628
|
// Check if extract directory exists and is not empty
|
|
594
|
-
const entries = await readdir(extractDir);
|
|
629
|
+
const entries = await readdir(extractDir, { encoding: "utf8" });
|
|
595
630
|
logger.debug(`Extracted files: ${entries.join(", ")}`);
|
|
596
631
|
|
|
597
632
|
if (entries.length === 0) {
|
package/src/lib/merge.ts
CHANGED
|
@@ -37,6 +37,7 @@ export class FileMerger {
|
|
|
37
37
|
|
|
38
38
|
/**
|
|
39
39
|
* Detect files that will be overwritten
|
|
40
|
+
* Protected files that exist in destination are not considered conflicts (they won't be overwritten)
|
|
40
41
|
*/
|
|
41
42
|
private async detectConflicts(sourceDir: string, destDir: string): Promise<string[]> {
|
|
42
43
|
const conflicts: string[] = [];
|
|
@@ -44,14 +45,15 @@ export class FileMerger {
|
|
|
44
45
|
|
|
45
46
|
for (const file of files) {
|
|
46
47
|
const relativePath = relative(sourceDir, file);
|
|
47
|
-
|
|
48
|
-
// Skip protected files
|
|
49
|
-
if (this.ig.ignores(relativePath)) {
|
|
50
|
-
continue;
|
|
51
|
-
}
|
|
52
|
-
|
|
53
48
|
const destPath = join(destDir, relativePath);
|
|
49
|
+
|
|
50
|
+
// Check if file exists in destination
|
|
54
51
|
if (await pathExists(destPath)) {
|
|
52
|
+
// Protected files won't be overwritten, so they're not conflicts
|
|
53
|
+
if (this.ig.ignores(relativePath)) {
|
|
54
|
+
logger.debug(`Protected file exists but won't be overwritten: ${relativePath}`);
|
|
55
|
+
continue;
|
|
56
|
+
}
|
|
55
57
|
conflicts.push(relativePath);
|
|
56
58
|
}
|
|
57
59
|
}
|
|
@@ -69,15 +71,16 @@ export class FileMerger {
|
|
|
69
71
|
|
|
70
72
|
for (const file of files) {
|
|
71
73
|
const relativePath = relative(sourceDir, file);
|
|
74
|
+
const destPath = join(destDir, relativePath);
|
|
72
75
|
|
|
73
|
-
// Skip protected files
|
|
74
|
-
|
|
75
|
-
|
|
76
|
+
// Skip protected files ONLY if they already exist in destination
|
|
77
|
+
// This allows new protected files to be added, but prevents overwriting existing ones
|
|
78
|
+
if (this.ig.ignores(relativePath) && (await pathExists(destPath))) {
|
|
79
|
+
logger.debug(`Skipping protected file (exists in destination): ${relativePath}`);
|
|
76
80
|
skippedCount++;
|
|
77
81
|
continue;
|
|
78
82
|
}
|
|
79
83
|
|
|
80
|
-
const destPath = join(destDir, relativePath);
|
|
81
84
|
await copy(file, destPath, { overwrite: true });
|
|
82
85
|
copiedCount++;
|
|
83
86
|
}
|
|
@@ -90,7 +93,7 @@ export class FileMerger {
|
|
|
90
93
|
*/
|
|
91
94
|
private async getFiles(dir: string): Promise<string[]> {
|
|
92
95
|
const files: string[] = [];
|
|
93
|
-
const entries = await readdir(dir);
|
|
96
|
+
const entries = await readdir(dir, { encoding: "utf8" });
|
|
94
97
|
|
|
95
98
|
for (const entry of entries) {
|
|
96
99
|
const fullPath = join(dir, entry);
|
package/src/types.ts
CHANGED
|
@@ -105,12 +105,19 @@ export const AVAILABLE_KITS: Record<KitType, KitConfig> = {
|
|
|
105
105
|
|
|
106
106
|
// Protected file patterns (files to skip during update)
|
|
107
107
|
export const PROTECTED_PATTERNS = [
|
|
108
|
+
// Environment and secrets
|
|
108
109
|
".env",
|
|
109
110
|
".env.local",
|
|
110
111
|
".env.*.local",
|
|
111
112
|
"*.key",
|
|
112
113
|
"*.pem",
|
|
113
114
|
"*.p12",
|
|
115
|
+
// User configuration files (only skip if they exist)
|
|
116
|
+
".gitignore",
|
|
117
|
+
".repomixignore",
|
|
118
|
+
".mcp.json",
|
|
119
|
+
"CLAUDE.md",
|
|
120
|
+
// Dependencies and build artifacts
|
|
114
121
|
"node_modules/**",
|
|
115
122
|
".git/**",
|
|
116
123
|
"dist/**",
|
package/tests/lib/merge.test.ts
CHANGED
|
@@ -62,26 +62,60 @@ describe("FileMerger", () => {
|
|
|
62
62
|
expect(existsSync(join(testDestDir, "readme.md"))).toBe(true);
|
|
63
63
|
});
|
|
64
64
|
|
|
65
|
-
test("should skip protected files like .env", async () => {
|
|
66
|
-
// Create test files
|
|
65
|
+
test("should skip protected files like .env if they exist in destination", async () => {
|
|
66
|
+
// Create test files in source
|
|
67
|
+
await writeFile(join(testSourceDir, "normal.txt"), "normal");
|
|
68
|
+
await writeFile(join(testSourceDir, ".env"), "NEW_SECRET=new_value");
|
|
69
|
+
|
|
70
|
+
// Create existing .env in destination
|
|
71
|
+
await writeFile(join(testDestDir, ".env"), "OLD_SECRET=old_value");
|
|
72
|
+
|
|
73
|
+
await merger.merge(testSourceDir, testDestDir, true);
|
|
74
|
+
|
|
75
|
+
// Verify normal file was copied
|
|
76
|
+
expect(existsSync(join(testDestDir, "normal.txt"))).toBe(true);
|
|
77
|
+
|
|
78
|
+
// Verify .env was NOT overwritten (still has old value)
|
|
79
|
+
const envContent = await Bun.file(join(testDestDir, ".env")).text();
|
|
80
|
+
expect(envContent).toBe("OLD_SECRET=old_value");
|
|
81
|
+
});
|
|
82
|
+
|
|
83
|
+
test("should copy protected files like .env if they don't exist in destination", async () => {
|
|
84
|
+
// Create test files in source
|
|
67
85
|
await writeFile(join(testSourceDir, "normal.txt"), "normal");
|
|
68
86
|
await writeFile(join(testSourceDir, ".env"), "SECRET=value");
|
|
69
87
|
|
|
70
88
|
await merger.merge(testSourceDir, testDestDir, true);
|
|
71
89
|
|
|
72
|
-
// Verify
|
|
90
|
+
// Verify both files were copied (no existing .env to protect)
|
|
73
91
|
expect(existsSync(join(testDestDir, "normal.txt"))).toBe(true);
|
|
74
|
-
expect(existsSync(join(testDestDir, ".env"))).toBe(
|
|
92
|
+
expect(existsSync(join(testDestDir, ".env"))).toBe(true);
|
|
75
93
|
});
|
|
76
94
|
|
|
77
|
-
test("should skip protected patterns like *.key", async () => {
|
|
95
|
+
test("should skip protected patterns like *.key if they exist in destination", async () => {
|
|
96
|
+
await writeFile(join(testSourceDir, "normal.txt"), "normal");
|
|
97
|
+
await writeFile(join(testSourceDir, "private.key"), "new key data");
|
|
98
|
+
|
|
99
|
+
// Create existing key file in destination
|
|
100
|
+
await writeFile(join(testDestDir, "private.key"), "old key data");
|
|
101
|
+
|
|
102
|
+
await merger.merge(testSourceDir, testDestDir, true);
|
|
103
|
+
|
|
104
|
+
expect(existsSync(join(testDestDir, "normal.txt"))).toBe(true);
|
|
105
|
+
|
|
106
|
+
// Verify key file was NOT overwritten
|
|
107
|
+
const keyContent = await Bun.file(join(testDestDir, "private.key")).text();
|
|
108
|
+
expect(keyContent).toBe("old key data");
|
|
109
|
+
});
|
|
110
|
+
|
|
111
|
+
test("should copy protected patterns like *.key if they don't exist in destination", async () => {
|
|
78
112
|
await writeFile(join(testSourceDir, "normal.txt"), "normal");
|
|
79
113
|
await writeFile(join(testSourceDir, "private.key"), "key data");
|
|
80
114
|
|
|
81
115
|
await merger.merge(testSourceDir, testDestDir, true);
|
|
82
116
|
|
|
83
117
|
expect(existsSync(join(testDestDir, "normal.txt"))).toBe(true);
|
|
84
|
-
expect(existsSync(join(testDestDir, "private.key"))).toBe(
|
|
118
|
+
expect(existsSync(join(testDestDir, "private.key"))).toBe(true);
|
|
85
119
|
});
|
|
86
120
|
|
|
87
121
|
test("should handle nested directories", async () => {
|
|
@@ -123,7 +157,25 @@ describe("FileMerger", () => {
|
|
|
123
157
|
expect(existsSync(join(testDestDir, specialFileName))).toBe(true);
|
|
124
158
|
});
|
|
125
159
|
|
|
126
|
-
test("should skip custom ignore patterns", async () => {
|
|
160
|
+
test("should skip custom ignore patterns if they exist in destination", async () => {
|
|
161
|
+
merger.addIgnorePatterns(["custom-*"]);
|
|
162
|
+
|
|
163
|
+
await writeFile(join(testSourceDir, "normal.txt"), "normal");
|
|
164
|
+
await writeFile(join(testSourceDir, "custom-ignore.txt"), "new content");
|
|
165
|
+
|
|
166
|
+
// Create existing file in destination
|
|
167
|
+
await writeFile(join(testDestDir, "custom-ignore.txt"), "old content");
|
|
168
|
+
|
|
169
|
+
await merger.merge(testSourceDir, testDestDir, true);
|
|
170
|
+
|
|
171
|
+
expect(existsSync(join(testDestDir, "normal.txt"))).toBe(true);
|
|
172
|
+
|
|
173
|
+
// Verify custom file was NOT overwritten
|
|
174
|
+
const customContent = await Bun.file(join(testDestDir, "custom-ignore.txt")).text();
|
|
175
|
+
expect(customContent).toBe("old content");
|
|
176
|
+
});
|
|
177
|
+
|
|
178
|
+
test("should copy custom ignore patterns if they don't exist in destination", async () => {
|
|
127
179
|
merger.addIgnorePatterns(["custom-*"]);
|
|
128
180
|
|
|
129
181
|
await writeFile(join(testSourceDir, "normal.txt"), "normal");
|
|
@@ -132,7 +184,7 @@ describe("FileMerger", () => {
|
|
|
132
184
|
await merger.merge(testSourceDir, testDestDir, true);
|
|
133
185
|
|
|
134
186
|
expect(existsSync(join(testDestDir, "normal.txt"))).toBe(true);
|
|
135
|
-
expect(existsSync(join(testDestDir, "custom-ignore.txt"))).toBe(
|
|
187
|
+
expect(existsSync(join(testDestDir, "custom-ignore.txt"))).toBe(true);
|
|
136
188
|
});
|
|
137
189
|
});
|
|
138
190
|
|
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
# CLAUDE.md
|
|
2
|
-
|
|
3
|
-
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
|
4
|
-
|
|
5
|
-
## Role & Responsibilities
|
|
6
|
-
|
|
7
|
-
Your role is to analyze user requirements, delegate tasks to appropriate sub-agents, and ensure cohesive delivery of features that meet specifications and architectural standards.
|
|
8
|
-
|
|
9
|
-
## Workflows
|
|
10
|
-
|
|
11
|
-
- Primary workflow: `./.claude/workflows/primary-workflow.md`
|
|
12
|
-
- Development rules: `./.claude/workflows/development-rules.md`
|
|
13
|
-
- Orchestration protocols: `./.claude/workflows/orchestration-protocol.md`
|
|
14
|
-
- Documentation management: `./.claude/workflows/documentation-management.md`
|
|
15
|
-
|
|
16
|
-
**IMPORTANT:** You must follow strictly the development rules in `./.claude/workflows/development-rules.md` file.
|
|
17
|
-
**IMPORTANT:** Before you plan or proceed any implementation, always read the `./README.md` file first to get context.
|
|
18
|
-
**IMPORTANT:** Sacrifice grammar for the sake of concision when writing reports.
|
|
19
|
-
**IMPORTANT:** In reports, list any unresolved questions at the end, if any.
|
|
20
|
-
|
|
21
|
-
## Documentation Management
|
|
22
|
-
|
|
23
|
-
We keep all important docs in `./docs` folder and keep updating them, structure like below:
|
|
24
|
-
|
|
25
|
-
```
|
|
26
|
-
./docs
|
|
27
|
-
├── project-overview-pdr.md
|
|
28
|
-
├── code-standards.md
|
|
29
|
-
├── codebase-summary.md
|
|
30
|
-
├── design-guidelines.md
|
|
31
|
-
├── deployment-guide.md
|
|
32
|
-
├── system-architecture.md
|
|
33
|
-
└── project-roadmap.md
|
|
34
|
-
```
|