claudekit-cli 1.3.0 → 1.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,57 @@
1
+ name: Claude Code Review
2
+
3
+ on:
4
+ pull_request:
5
+ types: [opened, synchronize]
6
+ # Optional: Only run on specific file changes
7
+ # paths:
8
+ # - "src/**/*.ts"
9
+ # - "src/**/*.tsx"
10
+ # - "src/**/*.js"
11
+ # - "src/**/*.jsx"
12
+
13
+ jobs:
14
+ claude-review:
15
+ # Optional: Filter by PR author
16
+ # if: |
17
+ # github.event.pull_request.user.login == 'external-contributor' ||
18
+ # github.event.pull_request.user.login == 'new-developer' ||
19
+ # github.event.pull_request.author_association == 'FIRST_TIME_CONTRIBUTOR'
20
+
21
+ runs-on: ubuntu-latest
22
+ permissions:
23
+ contents: read
24
+ pull-requests: read
25
+ issues: read
26
+ id-token: write
27
+
28
+ steps:
29
+ - name: Checkout repository
30
+ uses: actions/checkout@v4
31
+ with:
32
+ fetch-depth: 1
33
+
34
+ - name: Run Claude Code Review
35
+ id: claude-review
36
+ uses: anthropics/claude-code-action@v1
37
+ with:
38
+ claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
39
+ prompt: |
40
+ REPO: ${{ github.repository }}
41
+ PR NUMBER: ${{ github.event.pull_request.number }}
42
+
43
+ Please review this pull request and provide feedback on:
44
+ - Code quality and best practices
45
+ - Potential bugs or issues
46
+ - Performance considerations
47
+ - Security concerns
48
+ - Test coverage
49
+
50
+ Use the repository's CLAUDE.md for guidance on style and conventions. Be constructive and helpful in your feedback.
51
+
52
+ Use `gh pr comment` with your Bash tool to leave your review as a comment on the PR.
53
+
54
+ # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
55
+ # or https://docs.claude.com/en/docs/claude-code/cli-reference for available options
56
+ claude_args: '--allowed-tools "Bash(gh issue view:*),Bash(gh search:*),Bash(gh issue list:*),Bash(gh pr comment:*),Bash(gh pr diff:*),Bash(gh pr view:*),Bash(gh pr list:*)"'
57
+
@@ -0,0 +1,50 @@
1
+ name: Claude Code
2
+
3
+ on:
4
+ issue_comment:
5
+ types: [created]
6
+ pull_request_review_comment:
7
+ types: [created]
8
+ issues:
9
+ types: [opened, assigned]
10
+ pull_request_review:
11
+ types: [submitted]
12
+
13
+ jobs:
14
+ claude:
15
+ if: |
16
+ (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) ||
17
+ (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) ||
18
+ (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) ||
19
+ (github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude')))
20
+ runs-on: ubuntu-latest
21
+ permissions:
22
+ contents: read
23
+ pull-requests: read
24
+ issues: read
25
+ id-token: write
26
+ actions: read # Required for Claude to read CI results on PRs
27
+ steps:
28
+ - name: Checkout repository
29
+ uses: actions/checkout@v4
30
+ with:
31
+ fetch-depth: 1
32
+
33
+ - name: Run Claude Code
34
+ id: claude
35
+ uses: anthropics/claude-code-action@v1
36
+ with:
37
+ claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
38
+
39
+ # This is an optional setting that allows Claude to read CI results on PRs
40
+ additional_permissions: |
41
+ actions: read
42
+
43
+ # Optional: Give a custom prompt to Claude. If this is not specified, Claude will perform the instructions specified in the comment that tagged it.
44
+ # prompt: 'Update the pull request description to include a summary of changes.'
45
+
46
+ # Optional: Add claude_args to customize behavior and configuration
47
+ # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md
48
+ # or https://docs.claude.com/en/docs/claude-code/cli-reference for available options
49
+ # claude_args: '--allowed-tools Bash(gh pr:*)'
50
+
package/CHANGELOG.md CHANGED
@@ -1,3 +1,17 @@
1
+ ## [1.4.1](https://github.com/mrgoonie/claudekit-cli/compare/v1.4.0...v1.4.1) (2025-10-21)
2
+
3
+
4
+ ### Bug Fixes
5
+
6
+ * handle protected files during merge ([fe90767](https://github.com/mrgoonie/claudekit-cli/commit/fe907670932fc5b39521586ef798f73cd1130180))
7
+
8
+ # [1.4.0](https://github.com/mrgoonie/claudekit-cli/compare/v1.3.0...v1.4.0) (2025-10-21)
9
+
10
+
11
+ ### Features
12
+
13
+ * add --exclude flag to new and update commands ([8a0d7a0](https://github.com/mrgoonie/claudekit-cli/commit/8a0d7a00de70823d4fecac26d4c7e82c4df2ab0f))
14
+
1
15
  # [1.3.0](https://github.com/mrgoonie/claudekit-cli/compare/v1.2.2...v1.3.0) (2025-10-21)
2
16
 
3
17
 
package/README.md CHANGED
@@ -79,6 +79,12 @@ ck new --dir my-project --kit engineer
79
79
 
80
80
  # Specific version
81
81
  ck new --kit engineer --version v1.0.0
82
+
83
+ # With exclude patterns
84
+ ck new --kit engineer --exclude "*.log" --exclude "temp/**"
85
+
86
+ # Multiple patterns
87
+ ck new --exclude "*.log" --exclude "*.tmp" --exclude "cache/**"
82
88
  ```
83
89
 
84
90
  ### Update Existing Project
@@ -94,6 +100,9 @@ ck update --kit engineer
94
100
 
95
101
  # Specific version
96
102
  ck update --kit engineer --version v1.0.0
103
+
104
+ # With exclude patterns
105
+ ck update --exclude "local-config/**" --exclude "*.local"
97
106
  ```
98
107
 
99
108
  ### List Available Versions
@@ -204,6 +213,95 @@ The following file patterns are protected and will not be overwritten during upd
204
213
  - `node_modules/**`, `.git/**`
205
214
  - `dist/**`, `build/**`
206
215
 
216
+ ## Excluding Files
217
+
218
+ Use the `--exclude` flag to skip specific files or directories during download and extraction. This is useful for:
219
+
220
+ - Excluding temporary or cache directories
221
+ - Skipping log files or debug output
222
+ - Omitting files you want to manage manually
223
+ - Avoiding unnecessary large files
224
+
225
+ ### Basic Usage
226
+
227
+ ```bash
228
+ # Exclude log files
229
+ ck new --exclude "*.log"
230
+
231
+ # Exclude multiple patterns
232
+ ck new --exclude "*.log" --exclude "temp/**" --exclude "cache/**"
233
+
234
+ # Common exclude patterns for updates
235
+ ck update --exclude "node_modules/**" --exclude "dist/**" --exclude ".env.*"
236
+ ```
237
+
238
+ ### Supported Glob Patterns
239
+
240
+ The `--exclude` flag accepts standard glob patterns:
241
+
242
+ - `*` - Match any characters except `/` (e.g., `*.log` matches all log files)
243
+ - `**` - Match any characters including `/` (e.g., `temp/**` matches all files in temp directory)
244
+ - `?` - Match single character (e.g., `file?.txt` matches `file1.txt`, `file2.txt`)
245
+ - `[abc]` - Match characters in brackets (e.g., `[Tt]emp` matches `Temp` or `temp`)
246
+ - `{a,b}` - Match alternatives (e.g., `*.{log,tmp}` matches `*.log` and `*.tmp`)
247
+
248
+ ### Common Exclude Patterns
249
+
250
+ ```bash
251
+ # Exclude all log files
252
+ --exclude "*.log" --exclude "**/*.log"
253
+
254
+ # Exclude temporary directories
255
+ --exclude "tmp/**" --exclude "temp/**" --exclude ".tmp/**"
256
+
257
+ # Exclude cache directories
258
+ --exclude "cache/**" --exclude ".cache/**" --exclude "**/.cache/**"
259
+
260
+ # Exclude build artifacts
261
+ --exclude "dist/**" --exclude "build/**" --exclude "out/**"
262
+
263
+ # Exclude local configuration
264
+ --exclude "*.local" --exclude "local/**" --exclude ".env.local"
265
+
266
+ # Exclude IDE/editor files
267
+ --exclude ".vscode/**" --exclude ".idea/**" --exclude "*.swp"
268
+ ```
269
+
270
+ ### Important Notes
271
+
272
+ **Additive Behavior:**
273
+ - User exclude patterns are ADDED to the default protected patterns
274
+ - They do not replace the built-in protections
275
+ - All patterns work together to determine which files to skip
276
+
277
+ **Security Restrictions:**
278
+ - Absolute paths (starting with `/`) are not allowed
279
+ - Path traversal patterns (containing `..`) are not allowed
280
+ - Patterns must be between 1-500 characters
281
+ - These restrictions prevent accidental or malicious file system access
282
+
283
+ **Pattern Matching:**
284
+ - Patterns are case-sensitive on Linux/macOS
285
+ - Patterns are case-insensitive on Windows
286
+ - Patterns are applied during both extraction and merge phases
287
+ - Excluded files are never written to disk, saving time and space
288
+
289
+ **Examples of Invalid Patterns:**
290
+
291
+ ```bash
292
+ # ❌ Absolute paths not allowed
293
+ ck new --exclude "/etc/passwd"
294
+
295
+ # ❌ Path traversal not allowed
296
+ ck new --exclude "../../secret"
297
+
298
+ # ❌ Empty patterns not allowed
299
+ ck new --exclude ""
300
+
301
+ # ✅ Correct way to exclude root-level files
302
+ ck new --exclude "secret.txt" --exclude "config.local.json"
303
+ ```
304
+
207
305
  ### Custom .claude Files
208
306
 
209
307
  When updating a project, the CLI automatically preserves your custom `.claude/` files that don't exist in the new release package. This allows you to maintain:
package/dist/index.js CHANGED
@@ -6400,6 +6400,66 @@ class CAC extends EventEmitter {
6400
6400
  }
6401
6401
  }
6402
6402
  var cac = (name = "") => new CAC(name);
6403
+ // package.json
6404
+ var package_default = {
6405
+ name: "claudekit-cli",
6406
+ version: "1.4.0",
6407
+ description: "CLI tool for bootstrapping and updating ClaudeKit projects",
6408
+ type: "module",
6409
+ bin: {
6410
+ ck: "./dist/index.js"
6411
+ },
6412
+ scripts: {
6413
+ dev: "bun run src/index.ts >> logs.txt 2>&1",
6414
+ build: "bun build src/index.ts --outdir dist --target node --external keytar --external @octokit/rest >> logs.txt 2>&1",
6415
+ compile: "bun build src/index.ts --compile --outfile ck >> logs.txt 2>&1",
6416
+ test: "bun test >> logs.txt 2>&1",
6417
+ "test:watch": "bun test --watch >> logs.txt 2>&1",
6418
+ lint: "biome check . >> logs.txt 2>&1",
6419
+ format: "biome format --write . >> logs.txt 2>&1",
6420
+ typecheck: "tsc --noEmit >> logs.txt 2>&1"
6421
+ },
6422
+ keywords: [
6423
+ "cli",
6424
+ "claudekit",
6425
+ "boilerplate",
6426
+ "bootstrap",
6427
+ "template"
6428
+ ],
6429
+ author: "ClaudeKit",
6430
+ license: "MIT",
6431
+ engines: {
6432
+ bun: ">=1.0.0"
6433
+ },
6434
+ dependencies: {
6435
+ "@clack/prompts": "^0.7.0",
6436
+ "@octokit/rest": "^22.0.0",
6437
+ cac: "^6.7.14",
6438
+ "cli-progress": "^3.12.0",
6439
+ "extract-zip": "^2.0.1",
6440
+ "fs-extra": "^11.2.0",
6441
+ ignore: "^5.3.2",
6442
+ keytar: "^7.9.0",
6443
+ ora: "^9.0.0",
6444
+ picocolors: "^1.1.1",
6445
+ tar: "^7.4.3",
6446
+ tmp: "^0.2.3",
6447
+ zod: "^3.23.8"
6448
+ },
6449
+ devDependencies: {
6450
+ "@biomejs/biome": "^1.9.4",
6451
+ "@semantic-release/changelog": "^6.0.3",
6452
+ "@semantic-release/git": "^10.0.1",
6453
+ "@types/bun": "latest",
6454
+ "@types/cli-progress": "^3.11.6",
6455
+ "@types/fs-extra": "^11.0.4",
6456
+ "@types/node": "^22.10.1",
6457
+ "@types/tar": "^6.1.13",
6458
+ "@types/tmp": "^0.2.6",
6459
+ "semantic-release": "^24.2.0",
6460
+ typescript: "^5.7.2"
6461
+ }
6462
+ };
6403
6463
 
6404
6464
  // src/commands/new.ts
6405
6465
  var import_fs_extra2 = __toESM(require_lib(), 1);
@@ -10922,16 +10982,19 @@ var coerce = {
10922
10982
  var NEVER = INVALID;
10923
10983
  // src/types.ts
10924
10984
  var KitType = exports_external.enum(["engineer", "marketing"]);
10985
+ var ExcludePatternSchema = exports_external.string().trim().min(1, "Exclude pattern cannot be empty").max(500, "Exclude pattern too long").refine((val) => !val.startsWith("/"), "Absolute paths not allowed in exclude patterns").refine((val) => !val.includes(".."), "Path traversal not allowed in exclude patterns");
10925
10986
  var NewCommandOptionsSchema = exports_external.object({
10926
10987
  dir: exports_external.string().default("."),
10927
10988
  kit: KitType.optional(),
10928
10989
  version: exports_external.string().optional(),
10929
- force: exports_external.boolean().default(false)
10990
+ force: exports_external.boolean().default(false),
10991
+ exclude: exports_external.array(ExcludePatternSchema).optional().default([])
10930
10992
  });
10931
10993
  var UpdateCommandOptionsSchema = exports_external.object({
10932
10994
  dir: exports_external.string().default("."),
10933
10995
  kit: KitType.optional(),
10934
- version: exports_external.string().optional()
10996
+ version: exports_external.string().optional(),
10997
+ exclude: exports_external.array(ExcludePatternSchema).optional().default([])
10935
10998
  });
10936
10999
  var VersionCommandOptionsSchema = exports_external.object({
10937
11000
  kit: KitType.optional(),
@@ -10993,6 +11056,10 @@ var PROTECTED_PATTERNS = [
10993
11056
  "*.key",
10994
11057
  "*.pem",
10995
11058
  "*.p12",
11059
+ ".gitignore",
11060
+ ".repomixignore",
11061
+ ".mcp.json",
11062
+ "CLAUDE.md",
10996
11063
  "node_modules/**",
10997
11064
  ".git/**",
10998
11065
  "dist/**",
@@ -21213,9 +21280,37 @@ class DownloadManager {
21213
21280
  "*.log"
21214
21281
  ];
21215
21282
  totalExtractedSize = 0;
21283
+ ig;
21284
+ userExcludePatterns = [];
21285
+ constructor() {
21286
+ this.ig = import_ignore.default().add(DownloadManager.EXCLUDE_PATTERNS);
21287
+ }
21288
+ setExcludePatterns(patterns) {
21289
+ this.userExcludePatterns = patterns;
21290
+ this.ig = import_ignore.default().add([...DownloadManager.EXCLUDE_PATTERNS, ...this.userExcludePatterns]);
21291
+ if (patterns.length > 0) {
21292
+ logger.info(`Added ${patterns.length} custom exclude pattern(s)`);
21293
+ patterns.forEach((p) => logger.debug(` - ${p}`));
21294
+ }
21295
+ }
21216
21296
  shouldExclude(filePath) {
21217
- const ig = import_ignore.default().add(DownloadManager.EXCLUDE_PATTERNS);
21218
- return ig.ignores(filePath);
21297
+ return this.ig.ignores(filePath);
21298
+ }
21299
+ decodeFilePath(path8) {
21300
+ if (!path8.includes("%")) {
21301
+ return path8;
21302
+ }
21303
+ try {
21304
+ if (/%[0-9A-F]{2}/i.test(path8)) {
21305
+ const decoded = decodeURIComponent(path8);
21306
+ logger.debug(`Decoded path: ${path8} -> ${decoded}`);
21307
+ return decoded;
21308
+ }
21309
+ return path8;
21310
+ } catch (error2) {
21311
+ logger.warning(`Failed to decode path "${path8}": ${error2 instanceof Error ? error2.message : "Unknown error"}`);
21312
+ return path8;
21313
+ }
21219
21314
  }
21220
21315
  isPathSafe(basePath, targetPath) {
21221
21316
  const resolvedBase = resolve(basePath);
@@ -21368,22 +21463,23 @@ class DownloadManager {
21368
21463
  cwd: tempExtractDir,
21369
21464
  strip: 0,
21370
21465
  filter: (path8) => {
21371
- const shouldInclude = !this.shouldExclude(path8);
21466
+ const decodedPath = this.decodeFilePath(path8);
21467
+ const shouldInclude = !this.shouldExclude(decodedPath);
21372
21468
  if (!shouldInclude) {
21373
- logger.debug(`Excluding: ${path8}`);
21469
+ logger.debug(`Excluding: ${decodedPath}`);
21374
21470
  }
21375
21471
  return shouldInclude;
21376
21472
  }
21377
21473
  });
21378
21474
  logger.debug(`Extracted TAR.GZ to temp: ${tempExtractDir}`);
21379
- const entries = await readdir(tempExtractDir);
21475
+ const entries = await readdir(tempExtractDir, { encoding: "utf8" });
21380
21476
  logger.debug(`Root entries: ${entries.join(", ")}`);
21381
21477
  if (entries.length === 1) {
21382
21478
  const rootEntry = entries[0];
21383
21479
  const rootPath = pathJoin(tempExtractDir, rootEntry);
21384
21480
  const rootStat = await stat(rootPath);
21385
21481
  if (rootStat.isDirectory()) {
21386
- const rootContents = await readdir(rootPath);
21482
+ const rootContents = await readdir(rootPath, { encoding: "utf8" });
21387
21483
  logger.debug(`Root directory '${rootEntry}' contains: ${rootContents.join(", ")}`);
21388
21484
  const isWrapper = this.isWrapperDirectory(rootEntry);
21389
21485
  logger.debug(`Is wrapper directory: ${isWrapper}`);
@@ -21424,14 +21520,14 @@ class DownloadManager {
21424
21520
  try {
21425
21521
  await import_extract_zip.default(archivePath, { dir: tempExtractDir });
21426
21522
  logger.debug(`Extracted ZIP to temp: ${tempExtractDir}`);
21427
- const entries = await readdir(tempExtractDir);
21523
+ const entries = await readdir(tempExtractDir, { encoding: "utf8" });
21428
21524
  logger.debug(`Root entries: ${entries.join(", ")}`);
21429
21525
  if (entries.length === 1) {
21430
21526
  const rootEntry = entries[0];
21431
21527
  const rootPath = pathJoin(tempExtractDir, rootEntry);
21432
21528
  const rootStat = await stat(rootPath);
21433
21529
  if (rootStat.isDirectory()) {
21434
- const rootContents = await readdir(rootPath);
21530
+ const rootContents = await readdir(rootPath, { encoding: "utf8" });
21435
21531
  logger.debug(`Root directory '${rootEntry}' contains: ${rootContents.join(", ")}`);
21436
21532
  const isWrapper = this.isWrapperDirectory(rootEntry);
21437
21533
  logger.debug(`Is wrapper directory: ${isWrapper}`);
@@ -21463,7 +21559,7 @@ class DownloadManager {
21463
21559
  const { readdir, stat, mkdir: mkdirPromise, copyFile } = await import("node:fs/promises");
21464
21560
  const { join: pathJoin, relative: relative2 } = await import("node:path");
21465
21561
  await mkdirPromise(destDir, { recursive: true });
21466
- const entries = await readdir(sourceDir);
21562
+ const entries = await readdir(sourceDir, { encoding: "utf8" });
21467
21563
  for (const entry of entries) {
21468
21564
  const sourcePath = pathJoin(sourceDir, entry);
21469
21565
  const destPath = pathJoin(destDir, entry);
@@ -21489,7 +21585,7 @@ class DownloadManager {
21489
21585
  const { readdir, stat, mkdir: mkdirPromise, copyFile } = await import("node:fs/promises");
21490
21586
  const { join: pathJoin, relative: relative2 } = await import("node:path");
21491
21587
  await mkdirPromise(destDir, { recursive: true });
21492
- const entries = await readdir(sourceDir);
21588
+ const entries = await readdir(sourceDir, { encoding: "utf8" });
21493
21589
  for (const entry of entries) {
21494
21590
  const sourcePath = pathJoin(sourceDir, entry);
21495
21591
  const destPath = pathJoin(destDir, entry);
@@ -21525,7 +21621,7 @@ class DownloadManager {
21525
21621
  const { join: pathJoin } = await import("node:path");
21526
21622
  const { constants: constants2 } = await import("node:fs");
21527
21623
  try {
21528
- const entries = await readdir(extractDir);
21624
+ const entries = await readdir(extractDir, { encoding: "utf8" });
21529
21625
  logger.debug(`Extracted files: ${entries.join(", ")}`);
21530
21626
  if (entries.length === 0) {
21531
21627
  throw new ExtractionError("Extraction resulted in no files");
@@ -21736,11 +21832,12 @@ class FileMerger {
21736
21832
  const files = await this.getFiles(sourceDir);
21737
21833
  for (const file of files) {
21738
21834
  const relativePath = relative2(sourceDir, file);
21739
- if (this.ig.ignores(relativePath)) {
21740
- continue;
21741
- }
21742
21835
  const destPath = join4(destDir, relativePath);
21743
21836
  if (await import_fs_extra.pathExists(destPath)) {
21837
+ if (this.ig.ignores(relativePath)) {
21838
+ logger.debug(`Protected file exists but won't be overwritten: ${relativePath}`);
21839
+ continue;
21840
+ }
21744
21841
  conflicts.push(relativePath);
21745
21842
  }
21746
21843
  }
@@ -21752,12 +21849,12 @@ class FileMerger {
21752
21849
  let skippedCount = 0;
21753
21850
  for (const file of files) {
21754
21851
  const relativePath = relative2(sourceDir, file);
21755
- if (this.ig.ignores(relativePath)) {
21756
- logger.debug(`Skipping protected file: ${relativePath}`);
21852
+ const destPath = join4(destDir, relativePath);
21853
+ if (this.ig.ignores(relativePath) && await import_fs_extra.pathExists(destPath)) {
21854
+ logger.debug(`Skipping protected file (exists in destination): ${relativePath}`);
21757
21855
  skippedCount++;
21758
21856
  continue;
21759
21857
  }
21760
- const destPath = join4(destDir, relativePath);
21761
21858
  await import_fs_extra.copy(file, destPath, { overwrite: true });
21762
21859
  copiedCount++;
21763
21860
  }
@@ -21765,7 +21862,7 @@ class FileMerger {
21765
21862
  }
21766
21863
  async getFiles(dir) {
21767
21864
  const files = [];
21768
- const entries = await import_fs_extra.readdir(dir);
21865
+ const entries = await import_fs_extra.readdir(dir, { encoding: "utf8" });
21769
21866
  for (const entry of entries) {
21770
21867
  const fullPath = join4(dir, entry);
21771
21868
  const stats = await import_fs_extra.stat(fullPath);
@@ -21950,6 +22047,9 @@ async function newCommand(options) {
21950
22047
  logger.info(`Download source: ${downloadInfo.type}`);
21951
22048
  logger.debug(`Download URL: ${downloadInfo.url}`);
21952
22049
  const downloadManager = new DownloadManager;
22050
+ if (validOptions.exclude && validOptions.exclude.length > 0) {
22051
+ downloadManager.setExcludePatterns(validOptions.exclude);
22052
+ }
21953
22053
  const tempDir = await downloadManager.createTempDir();
21954
22054
  const { token } = await AuthManager.getToken();
21955
22055
  let archivePath;
@@ -21985,6 +22085,9 @@ async function newCommand(options) {
21985
22085
  await downloadManager.extractArchive(archivePath, extractDir);
21986
22086
  await downloadManager.validateExtraction(extractDir);
21987
22087
  const merger = new FileMerger;
22088
+ if (validOptions.exclude && validOptions.exclude.length > 0) {
22089
+ merger.addIgnorePatterns(validOptions.exclude);
22090
+ }
21988
22091
  await merger.merge(extractDir, resolvedDir, true);
21989
22092
  prompts.outro(`✨ Project created successfully at ${resolvedDir}`);
21990
22093
  prompts.note(`cd ${targetDir !== "." ? targetDir : "into the directory"}
@@ -22011,7 +22114,7 @@ class FileScanner {
22011
22114
  return files;
22012
22115
  }
22013
22116
  try {
22014
- const entries = await import_fs_extra3.readdir(dirPath);
22117
+ const entries = await import_fs_extra3.readdir(dirPath, { encoding: "utf8" });
22015
22118
  for (const entry of entries) {
22016
22119
  const fullPath = join5(dirPath, entry);
22017
22120
  if (!FileScanner.isSafePath(basePath, fullPath)) {
@@ -22107,6 +22210,9 @@ async function updateCommand(options) {
22107
22210
  logger.info(`Download source: ${downloadInfo.type}`);
22108
22211
  logger.debug(`Download URL: ${downloadInfo.url}`);
22109
22212
  const downloadManager = new DownloadManager;
22213
+ if (validOptions.exclude && validOptions.exclude.length > 0) {
22214
+ downloadManager.setExcludePatterns(validOptions.exclude);
22215
+ }
22110
22216
  const tempDir = await downloadManager.createTempDir();
22111
22217
  const { token } = await AuthManager.getToken();
22112
22218
  let archivePath;
@@ -22148,6 +22254,9 @@ async function updateCommand(options) {
22148
22254
  merger.addIgnorePatterns(customClaudeFiles);
22149
22255
  logger.success(`Protected ${customClaudeFiles.length} custom .claude file(s)`);
22150
22256
  }
22257
+ if (validOptions.exclude && validOptions.exclude.length > 0) {
22258
+ merger.addIgnorePatterns(validOptions.exclude);
22259
+ }
22151
22260
  await merger.merge(extractDir, resolvedDir, false);
22152
22261
  prompts.outro(`✨ Project updated successfully at ${resolvedDir}`);
22153
22262
  const protectedNote = customClaudeFiles.length > 0 ? `Your project has been updated with the latest version.
@@ -22354,10 +22463,6 @@ class Logger2 {
22354
22463
  }
22355
22464
  }
22356
22465
  var logger2 = new Logger2;
22357
- // src/version.json
22358
- var version_default = {
22359
- version: "1.2.1"
22360
- };
22361
22466
 
22362
22467
  // src/index.ts
22363
22468
  if (process.stdout.setEncoding) {
@@ -22366,14 +22471,20 @@ if (process.stdout.setEncoding) {
22366
22471
  if (process.stderr.setEncoding) {
22367
22472
  process.stderr.setEncoding("utf8");
22368
22473
  }
22369
- var packageVersion = version_default.version;
22474
+ var packageVersion = package_default.version;
22370
22475
  var cli = cac("ck");
22371
22476
  cli.option("--verbose, -v", "Enable verbose logging for debugging");
22372
22477
  cli.option("--log-file <path>", "Write logs to file");
22373
- cli.command("new", "Bootstrap a new ClaudeKit project").option("--dir <dir>", "Target directory (default: .)").option("--kit <kit>", "Kit to use (engineer, marketing)").option("--version <version>", "Specific version to download (default: latest)").option("--force", "Overwrite existing files without confirmation").action(async (options) => {
22478
+ cli.command("new", "Bootstrap a new ClaudeKit project").option("--dir <dir>", "Target directory (default: .)").option("--kit <kit>", "Kit to use (engineer, marketing)").option("--version <version>", "Specific version to download (default: latest)").option("--force", "Overwrite existing files without confirmation").option("--exclude <pattern>", "Exclude files matching glob pattern (can be used multiple times)").action(async (options) => {
22479
+ if (options.exclude && !Array.isArray(options.exclude)) {
22480
+ options.exclude = [options.exclude];
22481
+ }
22374
22482
  await newCommand(options);
22375
22483
  });
22376
- cli.command("update", "Update existing ClaudeKit project").option("--dir <dir>", "Target directory (default: .)").option("--kit <kit>", "Kit to use (engineer, marketing)").option("--version <version>", "Specific version to download (default: latest)").action(async (options) => {
22484
+ cli.command("update", "Update existing ClaudeKit project").option("--dir <dir>", "Target directory (default: .)").option("--kit <kit>", "Kit to use (engineer, marketing)").option("--version <version>", "Specific version to download (default: latest)").option("--exclude <pattern>", "Exclude files matching glob pattern (can be used multiple times)").action(async (options) => {
22485
+ if (options.exclude && !Array.isArray(options.exclude)) {
22486
+ options.exclude = [options.exclude];
22487
+ }
22377
22488
  await updateCommand(options);
22378
22489
  });
22379
22490
  cli.command("versions", "List available versions of ClaudeKit repositories").option("--kit <kit>", "Filter by specific kit (engineer, marketing)").option("--limit <limit>", "Number of releases to show (default: 30)").option("--all", "Show all releases including prereleases").action(async (options) => {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "claudekit-cli",
3
- "version": "1.3.0",
3
+ "version": "1.4.1",
4
4
  "description": "CLI tool for bootstrapping and updating ClaudeKit projects",
5
5
  "type": "module",
6
6
  "bin": {
@@ -110,6 +110,12 @@ export async function newCommand(options: NewCommandOptions): Promise<void> {
110
110
 
111
111
  // Download asset
112
112
  const downloadManager = new DownloadManager();
113
+
114
+ // Apply user exclude patterns if provided
115
+ if (validOptions.exclude && validOptions.exclude.length > 0) {
116
+ downloadManager.setExcludePatterns(validOptions.exclude);
117
+ }
118
+
113
119
  const tempDir = await downloadManager.createTempDir();
114
120
 
115
121
  // Get authentication token for API requests
@@ -157,6 +163,12 @@ export async function newCommand(options: NewCommandOptions): Promise<void> {
157
163
 
158
164
  // Copy files to target directory
159
165
  const merger = new FileMerger();
166
+
167
+ // Apply user exclude patterns if provided
168
+ if (validOptions.exclude && validOptions.exclude.length > 0) {
169
+ merger.addIgnorePatterns(validOptions.exclude);
170
+ }
171
+
160
172
  await merger.merge(extractDir, resolvedDir, true); // Skip confirmation for new projects
161
173
 
162
174
  prompts.outro(`✨ Project created successfully at ${resolvedDir}`);
@@ -83,6 +83,12 @@ export async function updateCommand(options: UpdateCommandOptions): Promise<void
83
83
 
84
84
  // Download asset
85
85
  const downloadManager = new DownloadManager();
86
+
87
+ // Apply user exclude patterns if provided
88
+ if (validOptions.exclude && validOptions.exclude.length > 0) {
89
+ downloadManager.setExcludePatterns(validOptions.exclude);
90
+ }
91
+
86
92
  const tempDir = await downloadManager.createTempDir();
87
93
 
88
94
  // Get authentication token for API requests
@@ -141,6 +147,11 @@ export async function updateCommand(options: UpdateCommandOptions): Promise<void
141
147
  logger.success(`Protected ${customClaudeFiles.length} custom .claude file(s)`);
142
148
  }
143
149
 
150
+ // Apply user exclude patterns if provided
151
+ if (validOptions.exclude && validOptions.exclude.length > 0) {
152
+ merger.addIgnorePatterns(validOptions.exclude);
153
+ }
154
+
144
155
  await merger.merge(extractDir, resolvedDir, false); // Show confirmation for updates
145
156
 
146
157
  prompts.outro(`✨ Project updated successfully at ${resolvedDir}`);
package/src/index.ts CHANGED
@@ -1,11 +1,11 @@
1
1
  #!/usr/bin/env bun
2
2
 
3
3
  import { cac } from "cac";
4
+ import packageInfo from "../package.json" assert { type: "json" };
4
5
  import { newCommand } from "./commands/new.js";
5
6
  import { updateCommand } from "./commands/update.js";
6
7
  import { versionCommand } from "./commands/version.js";
7
8
  import { logger } from "./utils/logger.js";
8
- import versionInfo from "./version.json" assert { type: "json" };
9
9
 
10
10
  // Set proper output encoding to prevent unicode rendering issues
11
11
  if (process.stdout.setEncoding) {
@@ -15,7 +15,7 @@ if (process.stderr.setEncoding) {
15
15
  process.stderr.setEncoding("utf8");
16
16
  }
17
17
 
18
- const packageVersion = versionInfo.version;
18
+ const packageVersion = packageInfo.version;
19
19
 
20
20
  const cli = cac("ck");
21
21
 
@@ -30,7 +30,12 @@ cli
30
30
  .option("--kit <kit>", "Kit to use (engineer, marketing)")
31
31
  .option("--version <version>", "Specific version to download (default: latest)")
32
32
  .option("--force", "Overwrite existing files without confirmation")
33
+ .option("--exclude <pattern>", "Exclude files matching glob pattern (can be used multiple times)")
33
34
  .action(async (options) => {
35
+ // Normalize exclude to always be an array (CAC may pass string for single value)
36
+ if (options.exclude && !Array.isArray(options.exclude)) {
37
+ options.exclude = [options.exclude];
38
+ }
34
39
  await newCommand(options);
35
40
  });
36
41
 
@@ -40,7 +45,12 @@ cli
40
45
  .option("--dir <dir>", "Target directory (default: .)")
41
46
  .option("--kit <kit>", "Kit to use (engineer, marketing)")
42
47
  .option("--version <version>", "Specific version to download (default: latest)")
48
+ .option("--exclude <pattern>", "Exclude files matching glob pattern (can be used multiple times)")
43
49
  .action(async (options) => {
50
+ // Normalize exclude to always be an array (CAC may pass string for single value)
51
+ if (options.exclude && !Array.isArray(options.exclude)) {
52
+ options.exclude = [options.exclude];
53
+ }
44
54
  await updateCommand(options);
45
55
  });
46
56
 
@@ -41,12 +41,78 @@ export class DownloadManager {
41
41
  */
42
42
  private totalExtractedSize = 0;
43
43
 
44
+ /**
45
+ * Instance-level ignore object with combined default and user patterns
46
+ */
47
+ private ig: ReturnType<typeof ignore>;
48
+
49
+ /**
50
+ * Store user-defined exclude patterns
51
+ */
52
+ private userExcludePatterns: string[] = [];
53
+
54
+ /**
55
+ * Initialize DownloadManager with default exclude patterns
56
+ */
57
+ constructor() {
58
+ // Initialize ignore with default patterns
59
+ this.ig = ignore().add(DownloadManager.EXCLUDE_PATTERNS);
60
+ }
61
+
62
+ /**
63
+ * Set additional user-defined exclude patterns
64
+ * These are added to (not replace) the default EXCLUDE_PATTERNS
65
+ */
66
+ setExcludePatterns(patterns: string[]): void {
67
+ this.userExcludePatterns = patterns;
68
+ // Reinitialize ignore with both default and user patterns
69
+ this.ig = ignore().add([...DownloadManager.EXCLUDE_PATTERNS, ...this.userExcludePatterns]);
70
+
71
+ if (patterns.length > 0) {
72
+ logger.info(`Added ${patterns.length} custom exclude pattern(s)`);
73
+ patterns.forEach((p) => logger.debug(` - ${p}`));
74
+ }
75
+ }
76
+
44
77
  /**
45
78
  * Check if file path should be excluded
79
+ * Uses instance-level ignore with both default and user patterns
46
80
  */
47
81
  private shouldExclude(filePath: string): boolean {
48
- const ig = ignore().add(DownloadManager.EXCLUDE_PATTERNS);
49
- return ig.ignores(filePath);
82
+ return this.ig.ignores(filePath);
83
+ }
84
+
85
+ /**
86
+ * Decode percent-encoded file paths to handle Mojibake issues
87
+ *
88
+ * GitHub tarballs may contain percent-encoded paths (e.g., %20 for space, %C3%A9 for é)
89
+ * that need to be decoded to prevent character encoding corruption.
90
+ *
91
+ * @param path - File path that may contain URL-encoded characters
92
+ * @returns Decoded path, or original path if decoding fails
93
+ * @private
94
+ */
95
+ private decodeFilePath(path: string): string {
96
+ // Early exit for non-encoded paths (performance optimization)
97
+ if (!path.includes("%")) {
98
+ return path;
99
+ }
100
+
101
+ try {
102
+ // Only decode if path contains valid percent-encoding pattern (%XX)
103
+ if (/%[0-9A-F]{2}/i.test(path)) {
104
+ const decoded = decodeURIComponent(path);
105
+ logger.debug(`Decoded path: ${path} -> ${decoded}`);
106
+ return decoded;
107
+ }
108
+ return path;
109
+ } catch (error) {
110
+ // If decoding fails (malformed encoding), return original path
111
+ logger.warning(
112
+ `Failed to decode path "${path}": ${error instanceof Error ? error.message : "Unknown error"}`,
113
+ );
114
+ return path;
115
+ }
50
116
  }
51
117
 
52
118
  /**
@@ -301,10 +367,12 @@ export class DownloadManager {
301
367
  cwd: tempExtractDir,
302
368
  strip: 0, // Don't strip yet - we'll decide based on wrapper detection
303
369
  filter: (path: string) => {
370
+ // Decode percent-encoded paths from GitHub tarballs
371
+ const decodedPath = this.decodeFilePath(path);
304
372
  // Exclude unwanted files
305
- const shouldInclude = !this.shouldExclude(path);
373
+ const shouldInclude = !this.shouldExclude(decodedPath);
306
374
  if (!shouldInclude) {
307
- logger.debug(`Excluding: ${path}`);
375
+ logger.debug(`Excluding: ${decodedPath}`);
308
376
  }
309
377
  return shouldInclude;
310
378
  },
@@ -313,7 +381,7 @@ export class DownloadManager {
313
381
  logger.debug(`Extracted TAR.GZ to temp: ${tempExtractDir}`);
314
382
 
315
383
  // Apply same wrapper detection logic as zip
316
- const entries = await readdir(tempExtractDir);
384
+ const entries = await readdir(tempExtractDir, { encoding: "utf8" });
317
385
  logger.debug(`Root entries: ${entries.join(", ")}`);
318
386
 
319
387
  if (entries.length === 1) {
@@ -323,7 +391,7 @@ export class DownloadManager {
323
391
 
324
392
  if (rootStat.isDirectory()) {
325
393
  // Check contents of root directory
326
- const rootContents = await readdir(rootPath);
394
+ const rootContents = await readdir(rootPath, { encoding: "utf8" });
327
395
  logger.debug(`Root directory '${rootEntry}' contains: ${rootContents.join(", ")}`);
328
396
 
329
397
  // Only strip if root is a version/release wrapper
@@ -397,7 +465,7 @@ export class DownloadManager {
397
465
  logger.debug(`Extracted ZIP to temp: ${tempExtractDir}`);
398
466
 
399
467
  // Find the root directory in the zip (if any)
400
- const entries = await readdir(tempExtractDir);
468
+ const entries = await readdir(tempExtractDir, { encoding: "utf8" });
401
469
  logger.debug(`Root entries: ${entries.join(", ")}`);
402
470
 
403
471
  // If there's a single root directory, check if it's a wrapper
@@ -408,7 +476,7 @@ export class DownloadManager {
408
476
 
409
477
  if (rootStat.isDirectory()) {
410
478
  // Check contents of root directory
411
- const rootContents = await readdir(rootPath);
479
+ const rootContents = await readdir(rootPath, { encoding: "utf8" });
412
480
  logger.debug(`Root directory '${rootEntry}' contains: ${rootContents.join(", ")}`);
413
481
 
414
482
  // Only strip if root is a version/release wrapper
@@ -459,7 +527,7 @@ export class DownloadManager {
459
527
 
460
528
  await mkdirPromise(destDir, { recursive: true });
461
529
 
462
- const entries = await readdir(sourceDir);
530
+ const entries = await readdir(sourceDir, { encoding: "utf8" });
463
531
 
464
532
  for (const entry of entries) {
465
533
  const sourcePath = pathJoin(sourceDir, entry);
@@ -501,7 +569,7 @@ export class DownloadManager {
501
569
 
502
570
  await mkdirPromise(destDir, { recursive: true });
503
571
 
504
- const entries = await readdir(sourceDir);
572
+ const entries = await readdir(sourceDir, { encoding: "utf8" });
505
573
 
506
574
  for (const entry of entries) {
507
575
  const sourcePath = pathJoin(sourceDir, entry);
@@ -558,7 +626,7 @@ export class DownloadManager {
558
626
 
559
627
  try {
560
628
  // Check if extract directory exists and is not empty
561
- const entries = await readdir(extractDir);
629
+ const entries = await readdir(extractDir, { encoding: "utf8" });
562
630
  logger.debug(`Extracted files: ${entries.join(", ")}`);
563
631
 
564
632
  if (entries.length === 0) {
package/src/lib/merge.ts CHANGED
@@ -37,6 +37,7 @@ export class FileMerger {
37
37
 
38
38
  /**
39
39
  * Detect files that will be overwritten
40
+ * Protected files that exist in destination are not considered conflicts (they won't be overwritten)
40
41
  */
41
42
  private async detectConflicts(sourceDir: string, destDir: string): Promise<string[]> {
42
43
  const conflicts: string[] = [];
@@ -44,14 +45,15 @@ export class FileMerger {
44
45
 
45
46
  for (const file of files) {
46
47
  const relativePath = relative(sourceDir, file);
47
-
48
- // Skip protected files
49
- if (this.ig.ignores(relativePath)) {
50
- continue;
51
- }
52
-
53
48
  const destPath = join(destDir, relativePath);
49
+
50
+ // Check if file exists in destination
54
51
  if (await pathExists(destPath)) {
52
+ // Protected files won't be overwritten, so they're not conflicts
53
+ if (this.ig.ignores(relativePath)) {
54
+ logger.debug(`Protected file exists but won't be overwritten: ${relativePath}`);
55
+ continue;
56
+ }
55
57
  conflicts.push(relativePath);
56
58
  }
57
59
  }
@@ -69,15 +71,16 @@ export class FileMerger {
69
71
 
70
72
  for (const file of files) {
71
73
  const relativePath = relative(sourceDir, file);
74
+ const destPath = join(destDir, relativePath);
72
75
 
73
- // Skip protected files
74
- if (this.ig.ignores(relativePath)) {
75
- logger.debug(`Skipping protected file: ${relativePath}`);
76
+ // Skip protected files ONLY if they already exist in destination
77
+ // This allows new protected files to be added, but prevents overwriting existing ones
78
+ if (this.ig.ignores(relativePath) && (await pathExists(destPath))) {
79
+ logger.debug(`Skipping protected file (exists in destination): ${relativePath}`);
76
80
  skippedCount++;
77
81
  continue;
78
82
  }
79
83
 
80
- const destPath = join(destDir, relativePath);
81
84
  await copy(file, destPath, { overwrite: true });
82
85
  copiedCount++;
83
86
  }
@@ -90,7 +93,7 @@ export class FileMerger {
90
93
  */
91
94
  private async getFiles(dir: string): Promise<string[]> {
92
95
  const files: string[] = [];
93
- const entries = await readdir(dir);
96
+ const entries = await readdir(dir, { encoding: "utf8" });
94
97
 
95
98
  for (const entry of entries) {
96
99
  const fullPath = join(dir, entry);
package/src/types.ts CHANGED
@@ -4,12 +4,22 @@ import { z } from "zod";
4
4
  export const KitType = z.enum(["engineer", "marketing"]);
5
5
  export type KitType = z.infer<typeof KitType>;
6
6
 
7
+ // Exclude pattern validation schema
8
+ export const ExcludePatternSchema = z
9
+ .string()
10
+ .trim()
11
+ .min(1, "Exclude pattern cannot be empty")
12
+ .max(500, "Exclude pattern too long")
13
+ .refine((val) => !val.startsWith("/"), "Absolute paths not allowed in exclude patterns")
14
+ .refine((val) => !val.includes(".."), "Path traversal not allowed in exclude patterns");
15
+
7
16
  // Command options schemas
8
17
  export const NewCommandOptionsSchema = z.object({
9
18
  dir: z.string().default("."),
10
19
  kit: KitType.optional(),
11
20
  version: z.string().optional(),
12
21
  force: z.boolean().default(false),
22
+ exclude: z.array(ExcludePatternSchema).optional().default([]),
13
23
  });
14
24
  export type NewCommandOptions = z.infer<typeof NewCommandOptionsSchema>;
15
25
 
@@ -17,6 +27,7 @@ export const UpdateCommandOptionsSchema = z.object({
17
27
  dir: z.string().default("."),
18
28
  kit: KitType.optional(),
19
29
  version: z.string().optional(),
30
+ exclude: z.array(ExcludePatternSchema).optional().default([]),
20
31
  });
21
32
  export type UpdateCommandOptions = z.infer<typeof UpdateCommandOptionsSchema>;
22
33
 
@@ -94,12 +105,19 @@ export const AVAILABLE_KITS: Record<KitType, KitConfig> = {
94
105
 
95
106
  // Protected file patterns (files to skip during update)
96
107
  export const PROTECTED_PATTERNS = [
108
+ // Environment and secrets
97
109
  ".env",
98
110
  ".env.local",
99
111
  ".env.*.local",
100
112
  "*.key",
101
113
  "*.pem",
102
114
  "*.p12",
115
+ // User configuration files (only skip if they exist)
116
+ ".gitignore",
117
+ ".repomixignore",
118
+ ".mcp.json",
119
+ "CLAUDE.md",
120
+ // Dependencies and build artifacts
103
121
  "node_modules/**",
104
122
  ".git/**",
105
123
  "dist/**",
@@ -29,7 +29,7 @@ export class FileScanner {
29
29
  }
30
30
 
31
31
  try {
32
- const entries = await readdir(dirPath);
32
+ const entries = await readdir(dirPath, { encoding: "utf8" });
33
33
 
34
34
  for (const entry of entries) {
35
35
  const fullPath = join(dirPath, entry);
@@ -62,26 +62,60 @@ describe("FileMerger", () => {
62
62
  expect(existsSync(join(testDestDir, "readme.md"))).toBe(true);
63
63
  });
64
64
 
65
- test("should skip protected files like .env", async () => {
66
- // Create test files including protected ones
65
+ test("should skip protected files like .env if they exist in destination", async () => {
66
+ // Create test files in source
67
+ await writeFile(join(testSourceDir, "normal.txt"), "normal");
68
+ await writeFile(join(testSourceDir, ".env"), "NEW_SECRET=new_value");
69
+
70
+ // Create existing .env in destination
71
+ await writeFile(join(testDestDir, ".env"), "OLD_SECRET=old_value");
72
+
73
+ await merger.merge(testSourceDir, testDestDir, true);
74
+
75
+ // Verify normal file was copied
76
+ expect(existsSync(join(testDestDir, "normal.txt"))).toBe(true);
77
+
78
+ // Verify .env was NOT overwritten (still has old value)
79
+ const envContent = await Bun.file(join(testDestDir, ".env")).text();
80
+ expect(envContent).toBe("OLD_SECRET=old_value");
81
+ });
82
+
83
+ test("should copy protected files like .env if they don't exist in destination", async () => {
84
+ // Create test files in source
67
85
  await writeFile(join(testSourceDir, "normal.txt"), "normal");
68
86
  await writeFile(join(testSourceDir, ".env"), "SECRET=value");
69
87
 
70
88
  await merger.merge(testSourceDir, testDestDir, true);
71
89
 
72
- // Verify normal file was copied but .env was not
90
+ // Verify both files were copied (no existing .env to protect)
73
91
  expect(existsSync(join(testDestDir, "normal.txt"))).toBe(true);
74
- expect(existsSync(join(testDestDir, ".env"))).toBe(false);
92
+ expect(existsSync(join(testDestDir, ".env"))).toBe(true);
75
93
  });
76
94
 
77
- test("should skip protected patterns like *.key", async () => {
95
+ test("should skip protected patterns like *.key if they exist in destination", async () => {
96
+ await writeFile(join(testSourceDir, "normal.txt"), "normal");
97
+ await writeFile(join(testSourceDir, "private.key"), "new key data");
98
+
99
+ // Create existing key file in destination
100
+ await writeFile(join(testDestDir, "private.key"), "old key data");
101
+
102
+ await merger.merge(testSourceDir, testDestDir, true);
103
+
104
+ expect(existsSync(join(testDestDir, "normal.txt"))).toBe(true);
105
+
106
+ // Verify key file was NOT overwritten
107
+ const keyContent = await Bun.file(join(testDestDir, "private.key")).text();
108
+ expect(keyContent).toBe("old key data");
109
+ });
110
+
111
+ test("should copy protected patterns like *.key if they don't exist in destination", async () => {
78
112
  await writeFile(join(testSourceDir, "normal.txt"), "normal");
79
113
  await writeFile(join(testSourceDir, "private.key"), "key data");
80
114
 
81
115
  await merger.merge(testSourceDir, testDestDir, true);
82
116
 
83
117
  expect(existsSync(join(testDestDir, "normal.txt"))).toBe(true);
84
- expect(existsSync(join(testDestDir, "private.key"))).toBe(false);
118
+ expect(existsSync(join(testDestDir, "private.key"))).toBe(true);
85
119
  });
86
120
 
87
121
  test("should handle nested directories", async () => {
@@ -123,7 +157,25 @@ describe("FileMerger", () => {
123
157
  expect(existsSync(join(testDestDir, specialFileName))).toBe(true);
124
158
  });
125
159
 
126
- test("should skip custom ignore patterns", async () => {
160
+ test("should skip custom ignore patterns if they exist in destination", async () => {
161
+ merger.addIgnorePatterns(["custom-*"]);
162
+
163
+ await writeFile(join(testSourceDir, "normal.txt"), "normal");
164
+ await writeFile(join(testSourceDir, "custom-ignore.txt"), "new content");
165
+
166
+ // Create existing file in destination
167
+ await writeFile(join(testDestDir, "custom-ignore.txt"), "old content");
168
+
169
+ await merger.merge(testSourceDir, testDestDir, true);
170
+
171
+ expect(existsSync(join(testDestDir, "normal.txt"))).toBe(true);
172
+
173
+ // Verify custom file was NOT overwritten
174
+ const customContent = await Bun.file(join(testDestDir, "custom-ignore.txt")).text();
175
+ expect(customContent).toBe("old content");
176
+ });
177
+
178
+ test("should copy custom ignore patterns if they don't exist in destination", async () => {
127
179
  merger.addIgnorePatterns(["custom-*"]);
128
180
 
129
181
  await writeFile(join(testSourceDir, "normal.txt"), "normal");
@@ -132,7 +184,7 @@ describe("FileMerger", () => {
132
184
  await merger.merge(testSourceDir, testDestDir, true);
133
185
 
134
186
  expect(existsSync(join(testDestDir, "normal.txt"))).toBe(true);
135
- expect(existsSync(join(testDestDir, "custom-ignore.txt"))).toBe(false);
187
+ expect(existsSync(join(testDestDir, "custom-ignore.txt"))).toBe(true);
136
188
  });
137
189
  });
138
190
 
@@ -5,6 +5,7 @@ import {
5
5
  ClaudeKitError,
6
6
  ConfigSchema,
7
7
  DownloadError,
8
+ ExcludePatternSchema,
8
9
  ExtractionError,
9
10
  GitHubError,
10
11
  GitHubReleaseAssetSchema,
@@ -29,6 +30,44 @@ describe("Types and Schemas", () => {
29
30
  });
30
31
  });
31
32
 
33
+ describe("ExcludePatternSchema", () => {
34
+ test("should accept valid glob patterns", () => {
35
+ const validPatterns = ["*.log", "**/*.tmp", "temp/**", "logs/*.txt", "cache/**/*"];
36
+ validPatterns.forEach((pattern) => {
37
+ expect(() => ExcludePatternSchema.parse(pattern)).not.toThrow();
38
+ });
39
+ });
40
+
41
+ test("should reject absolute paths", () => {
42
+ expect(() => ExcludePatternSchema.parse("/etc/passwd")).toThrow("Absolute paths not allowed");
43
+ expect(() => ExcludePatternSchema.parse("/var/log/**")).toThrow("Absolute paths not allowed");
44
+ });
45
+
46
+ test("should reject path traversal", () => {
47
+ expect(() => ExcludePatternSchema.parse("../../etc/passwd")).toThrow(
48
+ "Path traversal not allowed",
49
+ );
50
+ expect(() => ExcludePatternSchema.parse("../../../secret")).toThrow(
51
+ "Path traversal not allowed",
52
+ );
53
+ });
54
+
55
+ test("should reject empty patterns", () => {
56
+ expect(() => ExcludePatternSchema.parse("")).toThrow("Exclude pattern cannot be empty");
57
+ expect(() => ExcludePatternSchema.parse(" ")).toThrow("Exclude pattern cannot be empty");
58
+ });
59
+
60
+ test("should reject overly long patterns", () => {
61
+ const longPattern = "a".repeat(501);
62
+ expect(() => ExcludePatternSchema.parse(longPattern)).toThrow("Exclude pattern too long");
63
+ });
64
+
65
+ test("should trim whitespace", () => {
66
+ const result = ExcludePatternSchema.parse(" *.log ");
67
+ expect(result).toBe("*.log");
68
+ });
69
+ });
70
+
32
71
  describe("NewCommandOptionsSchema", () => {
33
72
  test("should validate correct options", () => {
34
73
  const result = NewCommandOptionsSchema.parse({
@@ -46,6 +85,7 @@ describe("Types and Schemas", () => {
46
85
  expect(result.dir).toBe(".");
47
86
  expect(result.kit).toBeUndefined();
48
87
  expect(result.version).toBeUndefined();
88
+ expect(result.exclude).toEqual([]);
49
89
  });
50
90
 
51
91
  test("should accept optional fields", () => {
@@ -53,6 +93,23 @@ describe("Types and Schemas", () => {
53
93
  expect(result.dir).toBe("./custom");
54
94
  expect(result.kit).toBeUndefined();
55
95
  });
96
+
97
+ test("should validate exclude patterns", () => {
98
+ const result = NewCommandOptionsSchema.parse({
99
+ dir: "./test",
100
+ exclude: ["*.log", "temp/**"],
101
+ });
102
+ expect(result.exclude).toEqual(["*.log", "temp/**"]);
103
+ });
104
+
105
+ test("should reject invalid exclude patterns", () => {
106
+ expect(() =>
107
+ NewCommandOptionsSchema.parse({
108
+ dir: "./test",
109
+ exclude: ["/etc/passwd"],
110
+ }),
111
+ ).toThrow();
112
+ });
56
113
  });
57
114
 
58
115
  describe("UpdateCommandOptionsSchema", () => {
@@ -70,6 +127,24 @@ describe("Types and Schemas", () => {
70
127
  test("should use default values", () => {
71
128
  const result = UpdateCommandOptionsSchema.parse({});
72
129
  expect(result.dir).toBe(".");
130
+ expect(result.exclude).toEqual([]);
131
+ });
132
+
133
+ test("should validate exclude patterns", () => {
134
+ const result = UpdateCommandOptionsSchema.parse({
135
+ dir: "./test",
136
+ exclude: ["*.log", "**/*.tmp"],
137
+ });
138
+ expect(result.exclude).toEqual(["*.log", "**/*.tmp"]);
139
+ });
140
+
141
+ test("should reject invalid exclude patterns", () => {
142
+ expect(() =>
143
+ UpdateCommandOptionsSchema.parse({
144
+ dir: "./test",
145
+ exclude: ["../../../etc"],
146
+ }),
147
+ ).toThrow();
73
148
  });
74
149
  });
75
150
 
@@ -1,13 +0,0 @@
1
- {
2
- "mcpServers": {
3
- "human": {
4
- "command": "npx",
5
- "args": ["-y", "@goonnguyen/human-mcp@latest"],
6
- "env": {
7
- "GOOGLE_GEMINI_API_KEY": "",
8
- "TRANSPORT_TYPE": "stdio",
9
- "LOG_LEVEL": "info"
10
- }
11
- }
12
- }
13
- }
@@ -1,15 +0,0 @@
1
- docs/*
2
- plans/*
3
- assets/*
4
- dist/*
5
- coverage/*
6
- build/*
7
- ios/*
8
- android/*
9
-
10
- .claude/*
11
- .serena/*
12
- .pnpm-store/*
13
- .github/*
14
- .dart_tool/*
15
- .idea/*
@@ -1,34 +0,0 @@
1
- # CLAUDE.md
2
-
3
- This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
4
-
5
- ## Role & Responsibilities
6
-
7
- Your role is to analyze user requirements, delegate tasks to appropriate sub-agents, and ensure cohesive delivery of features that meet specifications and architectural standards.
8
-
9
- ## Workflows
10
-
11
- - Primary workflow: `./.claude/workflows/primary-workflow.md`
12
- - Development rules: `./.claude/workflows/development-rules.md`
13
- - Orchestration protocols: `./.claude/workflows/orchestration-protocol.md`
14
- - Documentation management: `./.claude/workflows/documentation-management.md`
15
-
16
- **IMPORTANT:** You must follow strictly the development rules in `./.claude/workflows/development-rules.md` file.
17
- **IMPORTANT:** Before you plan or proceed any implementation, always read the `./README.md` file first to get context.
18
- **IMPORTANT:** Sacrifice grammar for the sake of concision when writing reports.
19
- **IMPORTANT:** In reports, list any unresolved questions at the end, if any.
20
-
21
- ## Documentation Management
22
-
23
- We keep all important docs in `./docs` folder and keep updating them, structure like below:
24
-
25
- ```
26
- ./docs
27
- ├── project-overview-pdr.md
28
- ├── code-standards.md
29
- ├── codebase-summary.md
30
- ├── design-guidelines.md
31
- ├── deployment-guide.md
32
- ├── system-architecture.md
33
- └── project-roadmap.md
34
- ```