claudekit-cli 1.2.1 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/ci.yml +3 -3
- package/CHANGELOG.md +23 -0
- package/README.md +2 -0
- package/biome.json +1 -1
- package/bun.lock +58 -433
- package/dist/index.js +2717 -52
- package/package.json +10 -11
- package/src/commands/new.ts +30 -7
- package/src/commands/update.ts +3 -0
- package/src/index.ts +5 -9
- package/src/lib/download.ts +236 -27
- package/src/types.ts +1 -0
- package/src/version.json +3 -0
- package/test-integration/demo/.mcp.json +13 -0
- package/test-integration/demo/.repomixignore +15 -0
- package/test-integration/demo/CLAUDE.md +34 -0
- package/tests/integration/cli.test.ts +252 -0
- package/tests/lib/download.test.ts +230 -8
package/package.json
CHANGED
|
@@ -1,20 +1,20 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "claudekit-cli",
|
|
3
|
-
"version": "1.
|
|
3
|
+
"version": "1.3.0",
|
|
4
4
|
"description": "CLI tool for bootstrapping and updating ClaudeKit projects",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"bin": {
|
|
7
7
|
"ck": "./dist/index.js"
|
|
8
8
|
},
|
|
9
9
|
"scripts": {
|
|
10
|
-
"dev": "bun run src/index.ts",
|
|
11
|
-
"build": "bun build src/index.ts --outdir dist --target node --external
|
|
12
|
-
"compile": "bun build src/index.ts --compile --outfile ck",
|
|
13
|
-
"test": "bun test",
|
|
14
|
-
"test:watch": "bun test --watch",
|
|
15
|
-
"lint": "biome check .",
|
|
16
|
-
"format": "biome format --write .",
|
|
17
|
-
"typecheck": "tsc --noEmit"
|
|
10
|
+
"dev": "bun run src/index.ts >> logs.txt 2>&1",
|
|
11
|
+
"build": "bun build src/index.ts --outdir dist --target node --external keytar --external @octokit/rest >> logs.txt 2>&1",
|
|
12
|
+
"compile": "bun build src/index.ts --compile --outfile ck >> logs.txt 2>&1",
|
|
13
|
+
"test": "bun test >> logs.txt 2>&1",
|
|
14
|
+
"test:watch": "bun test --watch >> logs.txt 2>&1",
|
|
15
|
+
"lint": "biome check . >> logs.txt 2>&1",
|
|
16
|
+
"format": "biome format --write . >> logs.txt 2>&1",
|
|
17
|
+
"typecheck": "tsc --noEmit >> logs.txt 2>&1"
|
|
18
18
|
},
|
|
19
19
|
"keywords": [
|
|
20
20
|
"cli",
|
|
@@ -33,6 +33,7 @@
|
|
|
33
33
|
"@octokit/rest": "^22.0.0",
|
|
34
34
|
"cac": "^6.7.14",
|
|
35
35
|
"cli-progress": "^3.12.0",
|
|
36
|
+
"extract-zip": "^2.0.1",
|
|
36
37
|
"fs-extra": "^11.2.0",
|
|
37
38
|
"ignore": "^5.3.2",
|
|
38
39
|
"keytar": "^7.9.0",
|
|
@@ -40,7 +41,6 @@
|
|
|
40
41
|
"picocolors": "^1.1.1",
|
|
41
42
|
"tar": "^7.4.3",
|
|
42
43
|
"tmp": "^0.2.3",
|
|
43
|
-
"unzipper": "^0.12.3",
|
|
44
44
|
"zod": "^3.23.8"
|
|
45
45
|
},
|
|
46
46
|
"devDependencies": {
|
|
@@ -53,7 +53,6 @@
|
|
|
53
53
|
"@types/node": "^22.10.1",
|
|
54
54
|
"@types/tar": "^6.1.13",
|
|
55
55
|
"@types/tmp": "^0.2.6",
|
|
56
|
-
"@types/unzipper": "^0.10.10",
|
|
57
56
|
"semantic-release": "^24.2.0",
|
|
58
57
|
"typescript": "^5.7.2"
|
|
59
58
|
}
|
package/src/commands/new.ts
CHANGED
|
@@ -19,12 +19,19 @@ export async function newCommand(options: NewCommandOptions): Promise<void> {
|
|
|
19
19
|
// Validate and parse options
|
|
20
20
|
const validOptions = NewCommandOptionsSchema.parse(options);
|
|
21
21
|
|
|
22
|
+
// Detect non-interactive mode
|
|
23
|
+
const isNonInteractive =
|
|
24
|
+
!process.stdin.isTTY || process.env.CI === "true" || process.env.NON_INTERACTIVE === "true";
|
|
25
|
+
|
|
22
26
|
// Load config for defaults
|
|
23
27
|
const config = await ConfigManager.get();
|
|
24
28
|
|
|
25
29
|
// Get kit selection
|
|
26
30
|
let kit = validOptions.kit || config.defaults?.kit;
|
|
27
31
|
if (!kit) {
|
|
32
|
+
if (isNonInteractive) {
|
|
33
|
+
throw new Error("Kit must be specified via --kit flag in non-interactive mode");
|
|
34
|
+
}
|
|
28
35
|
kit = await prompts.selectKit();
|
|
29
36
|
}
|
|
30
37
|
|
|
@@ -34,7 +41,11 @@ export async function newCommand(options: NewCommandOptions): Promise<void> {
|
|
|
34
41
|
// Get target directory
|
|
35
42
|
let targetDir = validOptions.dir || config.defaults?.dir || ".";
|
|
36
43
|
if (!validOptions.dir && !config.defaults?.dir) {
|
|
37
|
-
|
|
44
|
+
if (isNonInteractive) {
|
|
45
|
+
targetDir = ".";
|
|
46
|
+
} else {
|
|
47
|
+
targetDir = await prompts.getDirectory(targetDir);
|
|
48
|
+
}
|
|
38
49
|
}
|
|
39
50
|
|
|
40
51
|
const resolvedDir = resolve(targetDir);
|
|
@@ -45,12 +56,21 @@ export async function newCommand(options: NewCommandOptions): Promise<void> {
|
|
|
45
56
|
const files = await readdir(resolvedDir);
|
|
46
57
|
const isEmpty = files.length === 0;
|
|
47
58
|
if (!isEmpty) {
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
59
|
+
if (isNonInteractive) {
|
|
60
|
+
if (!validOptions.force) {
|
|
61
|
+
throw new Error(
|
|
62
|
+
"Directory is not empty. Use --force flag to overwrite in non-interactive mode",
|
|
63
|
+
);
|
|
64
|
+
}
|
|
65
|
+
logger.info("Directory is not empty. Proceeding with --force flag");
|
|
66
|
+
} else {
|
|
67
|
+
const continueAnyway = await prompts.confirm(
|
|
68
|
+
"Directory is not empty. Files may be overwritten. Continue?",
|
|
69
|
+
);
|
|
70
|
+
if (!continueAnyway) {
|
|
71
|
+
logger.warning("Operation cancelled");
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
54
74
|
}
|
|
55
75
|
}
|
|
56
76
|
}
|
|
@@ -132,6 +152,9 @@ export async function newCommand(options: NewCommandOptions): Promise<void> {
|
|
|
132
152
|
const extractDir = `${tempDir}/extracted`;
|
|
133
153
|
await downloadManager.extractArchive(archivePath, extractDir);
|
|
134
154
|
|
|
155
|
+
// Validate extraction
|
|
156
|
+
await downloadManager.validateExtraction(extractDir);
|
|
157
|
+
|
|
135
158
|
// Copy files to target directory
|
|
136
159
|
const merger = new FileMerger();
|
|
137
160
|
await merger.merge(extractDir, resolvedDir, true); // Skip confirmation for new projects
|
package/src/commands/update.ts
CHANGED
|
@@ -125,6 +125,9 @@ export async function updateCommand(options: UpdateCommandOptions): Promise<void
|
|
|
125
125
|
const extractDir = `${tempDir}/extracted`;
|
|
126
126
|
await downloadManager.extractArchive(archivePath, extractDir);
|
|
127
127
|
|
|
128
|
+
// Validate extraction
|
|
129
|
+
await downloadManager.validateExtraction(extractDir);
|
|
130
|
+
|
|
128
131
|
// Identify custom .claude files to preserve
|
|
129
132
|
logger.info("Scanning for custom .claude files...");
|
|
130
133
|
const customClaudeFiles = await FileScanner.findCustomFiles(resolvedDir, extractDir, ".claude");
|
package/src/index.ts
CHANGED
|
@@ -1,13 +1,11 @@
|
|
|
1
1
|
#!/usr/bin/env bun
|
|
2
2
|
|
|
3
|
-
import { readFileSync } from "node:fs";
|
|
4
|
-
import { join } from "node:path";
|
|
5
|
-
import { fileURLToPath } from "node:url";
|
|
6
3
|
import { cac } from "cac";
|
|
7
4
|
import { newCommand } from "./commands/new.js";
|
|
8
5
|
import { updateCommand } from "./commands/update.js";
|
|
9
6
|
import { versionCommand } from "./commands/version.js";
|
|
10
7
|
import { logger } from "./utils/logger.js";
|
|
8
|
+
import versionInfo from "./version.json" assert { type: "json" };
|
|
11
9
|
|
|
12
10
|
// Set proper output encoding to prevent unicode rendering issues
|
|
13
11
|
if (process.stdout.setEncoding) {
|
|
@@ -17,10 +15,7 @@ if (process.stderr.setEncoding) {
|
|
|
17
15
|
process.stderr.setEncoding("utf8");
|
|
18
16
|
}
|
|
19
17
|
|
|
20
|
-
const
|
|
21
|
-
|
|
22
|
-
// Read package.json for version
|
|
23
|
-
const packageJson = JSON.parse(readFileSync(join(__dirname, "../package.json"), "utf-8"));
|
|
18
|
+
const packageVersion = versionInfo.version;
|
|
24
19
|
|
|
25
20
|
const cli = cac("ck");
|
|
26
21
|
|
|
@@ -34,6 +29,7 @@ cli
|
|
|
34
29
|
.option("--dir <dir>", "Target directory (default: .)")
|
|
35
30
|
.option("--kit <kit>", "Kit to use (engineer, marketing)")
|
|
36
31
|
.option("--version <version>", "Specific version to download (default: latest)")
|
|
32
|
+
.option("--force", "Overwrite existing files without confirmation")
|
|
37
33
|
.action(async (options) => {
|
|
38
34
|
await newCommand(options);
|
|
39
35
|
});
|
|
@@ -59,7 +55,7 @@ cli
|
|
|
59
55
|
});
|
|
60
56
|
|
|
61
57
|
// Version
|
|
62
|
-
cli.version(
|
|
58
|
+
cli.version(packageVersion);
|
|
63
59
|
|
|
64
60
|
// Help
|
|
65
61
|
cli.help();
|
|
@@ -85,7 +81,7 @@ if (parsed.options.logFile) {
|
|
|
85
81
|
|
|
86
82
|
// Log startup info in verbose mode
|
|
87
83
|
logger.verbose("ClaudeKit CLI starting", {
|
|
88
|
-
version:
|
|
84
|
+
version: packageVersion,
|
|
89
85
|
command: parsed.args[0] || "none",
|
|
90
86
|
options: parsed.options,
|
|
91
87
|
cwd: process.cwd(),
|
package/src/lib/download.ts
CHANGED
|
@@ -1,13 +1,11 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { createWriteStream } from "node:fs";
|
|
2
2
|
import { mkdir } from "node:fs/promises";
|
|
3
3
|
import { tmpdir } from "node:os";
|
|
4
|
-
import { join } from "node:path";
|
|
5
|
-
import { pipeline } from "node:stream";
|
|
6
|
-
import { promisify } from "node:util";
|
|
4
|
+
import { join, relative, resolve } from "node:path";
|
|
7
5
|
import cliProgress from "cli-progress";
|
|
6
|
+
import extractZip from "extract-zip";
|
|
8
7
|
import ignore from "ignore";
|
|
9
8
|
import * as tar from "tar";
|
|
10
|
-
import unzipper from "unzipper";
|
|
11
9
|
import {
|
|
12
10
|
type ArchiveType,
|
|
13
11
|
DownloadError,
|
|
@@ -17,9 +15,12 @@ import {
|
|
|
17
15
|
import { logger } from "../utils/logger.js";
|
|
18
16
|
import { createSpinner } from "../utils/safe-spinner.js";
|
|
19
17
|
|
|
20
|
-
const streamPipeline = promisify(pipeline);
|
|
21
|
-
|
|
22
18
|
export class DownloadManager {
|
|
19
|
+
/**
|
|
20
|
+
* Maximum extraction size (500MB) to prevent archive bombs
|
|
21
|
+
*/
|
|
22
|
+
private static MAX_EXTRACTION_SIZE = 500 * 1024 * 1024; // 500MB
|
|
23
|
+
|
|
23
24
|
/**
|
|
24
25
|
* Patterns to exclude from extraction
|
|
25
26
|
*/
|
|
@@ -35,6 +36,11 @@ export class DownloadManager {
|
|
|
35
36
|
"*.log",
|
|
36
37
|
];
|
|
37
38
|
|
|
39
|
+
/**
|
|
40
|
+
* Track total extracted size to prevent archive bombs
|
|
41
|
+
*/
|
|
42
|
+
private totalExtractedSize = 0;
|
|
43
|
+
|
|
38
44
|
/**
|
|
39
45
|
* Check if file path should be excluded
|
|
40
46
|
*/
|
|
@@ -43,6 +49,45 @@ export class DownloadManager {
|
|
|
43
49
|
return ig.ignores(filePath);
|
|
44
50
|
}
|
|
45
51
|
|
|
52
|
+
/**
|
|
53
|
+
* Validate path to prevent path traversal attacks (zip slip)
|
|
54
|
+
*/
|
|
55
|
+
private isPathSafe(basePath: string, targetPath: string): boolean {
|
|
56
|
+
// Resolve both paths to their absolute canonical forms
|
|
57
|
+
const resolvedBase = resolve(basePath);
|
|
58
|
+
const resolvedTarget = resolve(targetPath);
|
|
59
|
+
|
|
60
|
+
// Calculate relative path from base to target
|
|
61
|
+
const relativePath = relative(resolvedBase, resolvedTarget);
|
|
62
|
+
|
|
63
|
+
// If path starts with .. or is absolute, it's trying to escape
|
|
64
|
+
// Also block if relative path is empty but resolved paths differ (edge case)
|
|
65
|
+
return (
|
|
66
|
+
!relativePath.startsWith("..") &&
|
|
67
|
+
!relativePath.startsWith("/") &&
|
|
68
|
+
resolvedTarget.startsWith(resolvedBase)
|
|
69
|
+
);
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
/**
|
|
73
|
+
* Track extracted file size and check against limit
|
|
74
|
+
*/
|
|
75
|
+
private checkExtractionSize(fileSize: number): void {
|
|
76
|
+
this.totalExtractedSize += fileSize;
|
|
77
|
+
if (this.totalExtractedSize > DownloadManager.MAX_EXTRACTION_SIZE) {
|
|
78
|
+
throw new ExtractionError(
|
|
79
|
+
`Archive exceeds maximum extraction size of ${this.formatBytes(DownloadManager.MAX_EXTRACTION_SIZE)}. Possible archive bomb detected.`,
|
|
80
|
+
);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Reset extraction size tracker
|
|
86
|
+
*/
|
|
87
|
+
private resetExtractionSize(): void {
|
|
88
|
+
this.totalExtractedSize = 0;
|
|
89
|
+
}
|
|
90
|
+
|
|
46
91
|
/**
|
|
47
92
|
* Download asset from URL with progress tracking
|
|
48
93
|
*/
|
|
@@ -212,6 +257,9 @@ export class DownloadManager {
|
|
|
212
257
|
const spinner = createSpinner("Extracting files...").start();
|
|
213
258
|
|
|
214
259
|
try {
|
|
260
|
+
// Reset extraction size tracker
|
|
261
|
+
this.resetExtractionSize();
|
|
262
|
+
|
|
215
263
|
// Detect archive type from filename if not provided
|
|
216
264
|
const detectedType = archiveType || this.detectArchiveType(archivePath);
|
|
217
265
|
|
|
@@ -239,19 +287,96 @@ export class DownloadManager {
|
|
|
239
287
|
* Extract tar.gz archive
|
|
240
288
|
*/
|
|
241
289
|
private async extractTarGz(archivePath: string, destDir: string): Promise<void> {
|
|
242
|
-
await
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
290
|
+
const { readdir, stat, mkdir: mkdirPromise, copyFile, rm } = await import("node:fs/promises");
|
|
291
|
+
const { join: pathJoin } = await import("node:path");
|
|
292
|
+
|
|
293
|
+
// Extract to a temporary directory first
|
|
294
|
+
const tempExtractDir = `${destDir}-temp`;
|
|
295
|
+
await mkdirPromise(tempExtractDir, { recursive: true });
|
|
296
|
+
|
|
297
|
+
try {
|
|
298
|
+
// Extract without stripping first
|
|
299
|
+
await tar.extract({
|
|
300
|
+
file: archivePath,
|
|
301
|
+
cwd: tempExtractDir,
|
|
302
|
+
strip: 0, // Don't strip yet - we'll decide based on wrapper detection
|
|
303
|
+
filter: (path: string) => {
|
|
304
|
+
// Exclude unwanted files
|
|
305
|
+
const shouldInclude = !this.shouldExclude(path);
|
|
306
|
+
if (!shouldInclude) {
|
|
307
|
+
logger.debug(`Excluding: ${path}`);
|
|
308
|
+
}
|
|
309
|
+
return shouldInclude;
|
|
310
|
+
},
|
|
311
|
+
});
|
|
312
|
+
|
|
313
|
+
logger.debug(`Extracted TAR.GZ to temp: ${tempExtractDir}`);
|
|
314
|
+
|
|
315
|
+
// Apply same wrapper detection logic as zip
|
|
316
|
+
const entries = await readdir(tempExtractDir);
|
|
317
|
+
logger.debug(`Root entries: ${entries.join(", ")}`);
|
|
318
|
+
|
|
319
|
+
if (entries.length === 1) {
|
|
320
|
+
const rootEntry = entries[0];
|
|
321
|
+
const rootPath = pathJoin(tempExtractDir, rootEntry);
|
|
322
|
+
const rootStat = await stat(rootPath);
|
|
323
|
+
|
|
324
|
+
if (rootStat.isDirectory()) {
|
|
325
|
+
// Check contents of root directory
|
|
326
|
+
const rootContents = await readdir(rootPath);
|
|
327
|
+
logger.debug(`Root directory '${rootEntry}' contains: ${rootContents.join(", ")}`);
|
|
328
|
+
|
|
329
|
+
// Only strip if root is a version/release wrapper
|
|
330
|
+
const isWrapper = this.isWrapperDirectory(rootEntry);
|
|
331
|
+
logger.debug(`Is wrapper directory: ${isWrapper}`);
|
|
332
|
+
|
|
333
|
+
if (isWrapper) {
|
|
334
|
+
// Strip wrapper and move contents
|
|
335
|
+
logger.debug(`Stripping wrapper directory: ${rootEntry}`);
|
|
336
|
+
await this.moveDirectoryContents(rootPath, destDir);
|
|
337
|
+
} else {
|
|
338
|
+
// Keep root directory - move everything including root
|
|
339
|
+
logger.debug("Preserving complete directory structure");
|
|
340
|
+
await this.moveDirectoryContents(tempExtractDir, destDir);
|
|
341
|
+
}
|
|
342
|
+
} else {
|
|
343
|
+
// Single file, just move it
|
|
344
|
+
await mkdirPromise(destDir, { recursive: true });
|
|
345
|
+
await copyFile(rootPath, pathJoin(destDir, rootEntry));
|
|
251
346
|
}
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
347
|
+
} else {
|
|
348
|
+
// Multiple entries at root, move them all
|
|
349
|
+
logger.debug("Multiple root entries - moving all");
|
|
350
|
+
await this.moveDirectoryContents(tempExtractDir, destDir);
|
|
351
|
+
}
|
|
352
|
+
|
|
353
|
+
logger.debug(`Moved contents to: ${destDir}`);
|
|
354
|
+
|
|
355
|
+
// Clean up temp directory
|
|
356
|
+
await rm(tempExtractDir, { recursive: true, force: true });
|
|
357
|
+
} catch (error) {
|
|
358
|
+
// Clean up temp directory on error
|
|
359
|
+
try {
|
|
360
|
+
await rm(tempExtractDir, { recursive: true, force: true });
|
|
361
|
+
} catch {
|
|
362
|
+
// Ignore cleanup errors
|
|
363
|
+
}
|
|
364
|
+
throw error;
|
|
365
|
+
}
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
/**
|
|
369
|
+
* Check if directory name is a version/release wrapper
|
|
370
|
+
* Examples: claudekit-engineer-v1.0.0, claudekit-engineer-1.0.0, repo-abc1234,
|
|
371
|
+
* project-v1.0.0-alpha, project-1.2.3-beta.1, repo-v2.0.0-rc.5
|
|
372
|
+
*/
|
|
373
|
+
private isWrapperDirectory(dirName: string): boolean {
|
|
374
|
+
// Match version patterns with optional prerelease: project-v1.0.0, project-1.0.0-alpha, project-v2.0.0-rc.1
|
|
375
|
+
const versionPattern = /^[\w-]+-v?\d+\.\d+\.\d+(-[\w.]+)?$/;
|
|
376
|
+
// Match commit hash patterns: project-abc1234 (7-40 chars for short/full SHA)
|
|
377
|
+
const hashPattern = /^[\w-]+-[a-f0-9]{7,40}$/;
|
|
378
|
+
|
|
379
|
+
return versionPattern.test(dirName) || hashPattern.test(dirName);
|
|
255
380
|
}
|
|
256
381
|
|
|
257
382
|
/**
|
|
@@ -266,24 +391,39 @@ export class DownloadManager {
|
|
|
266
391
|
await mkdirPromise(tempExtractDir, { recursive: true });
|
|
267
392
|
|
|
268
393
|
try {
|
|
269
|
-
// Extract zip to temp directory
|
|
270
|
-
await
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
);
|
|
394
|
+
// Extract zip to temp directory using extract-zip
|
|
395
|
+
await extractZip(archivePath, { dir: tempExtractDir });
|
|
396
|
+
|
|
397
|
+
logger.debug(`Extracted ZIP to temp: ${tempExtractDir}`);
|
|
274
398
|
|
|
275
399
|
// Find the root directory in the zip (if any)
|
|
276
400
|
const entries = await readdir(tempExtractDir);
|
|
401
|
+
logger.debug(`Root entries: ${entries.join(", ")}`);
|
|
277
402
|
|
|
278
|
-
// If there's a single root directory,
|
|
403
|
+
// If there's a single root directory, check if it's a wrapper
|
|
279
404
|
if (entries.length === 1) {
|
|
280
405
|
const rootEntry = entries[0];
|
|
281
406
|
const rootPath = pathJoin(tempExtractDir, rootEntry);
|
|
282
407
|
const rootStat = await stat(rootPath);
|
|
283
408
|
|
|
284
409
|
if (rootStat.isDirectory()) {
|
|
285
|
-
//
|
|
286
|
-
await
|
|
410
|
+
// Check contents of root directory
|
|
411
|
+
const rootContents = await readdir(rootPath);
|
|
412
|
+
logger.debug(`Root directory '${rootEntry}' contains: ${rootContents.join(", ")}`);
|
|
413
|
+
|
|
414
|
+
// Only strip if root is a version/release wrapper
|
|
415
|
+
const isWrapper = this.isWrapperDirectory(rootEntry);
|
|
416
|
+
logger.debug(`Is wrapper directory: ${isWrapper}`);
|
|
417
|
+
|
|
418
|
+
if (isWrapper) {
|
|
419
|
+
// Strip wrapper and move contents
|
|
420
|
+
logger.debug(`Stripping wrapper directory: ${rootEntry}`);
|
|
421
|
+
await this.moveDirectoryContents(rootPath, destDir);
|
|
422
|
+
} else {
|
|
423
|
+
// Keep root directory - move everything including root
|
|
424
|
+
logger.debug("Preserving complete directory structure");
|
|
425
|
+
await this.moveDirectoryContents(tempExtractDir, destDir);
|
|
426
|
+
}
|
|
287
427
|
} else {
|
|
288
428
|
// Single file, just move it
|
|
289
429
|
await mkdirPromise(destDir, { recursive: true });
|
|
@@ -291,9 +431,12 @@ export class DownloadManager {
|
|
|
291
431
|
}
|
|
292
432
|
} else {
|
|
293
433
|
// Multiple entries at root, move them all
|
|
434
|
+
logger.debug("Multiple root entries - moving all");
|
|
294
435
|
await this.moveDirectoryContents(tempExtractDir, destDir);
|
|
295
436
|
}
|
|
296
437
|
|
|
438
|
+
logger.debug(`Moved contents to: ${destDir}`);
|
|
439
|
+
|
|
297
440
|
// Clean up temp directory
|
|
298
441
|
await rm(tempExtractDir, { recursive: true, force: true });
|
|
299
442
|
} catch (error) {
|
|
@@ -323,6 +466,12 @@ export class DownloadManager {
|
|
|
323
466
|
const destPath = pathJoin(destDir, entry);
|
|
324
467
|
const relativePath = relative(sourceDir, sourcePath);
|
|
325
468
|
|
|
469
|
+
// Validate path safety (prevent path traversal)
|
|
470
|
+
if (!this.isPathSafe(destDir, destPath)) {
|
|
471
|
+
logger.warning(`Skipping unsafe path: ${relativePath}`);
|
|
472
|
+
throw new ExtractionError(`Path traversal attempt detected: ${relativePath}`);
|
|
473
|
+
}
|
|
474
|
+
|
|
326
475
|
// Skip excluded files
|
|
327
476
|
if (this.shouldExclude(relativePath)) {
|
|
328
477
|
logger.debug(`Excluding: ${relativePath}`);
|
|
@@ -335,6 +484,8 @@ export class DownloadManager {
|
|
|
335
484
|
// Recursively copy directory
|
|
336
485
|
await this.copyDirectory(sourcePath, destPath);
|
|
337
486
|
} else {
|
|
487
|
+
// Track file size and check limit
|
|
488
|
+
this.checkExtractionSize(entryStat.size);
|
|
338
489
|
// Copy file
|
|
339
490
|
await copyFile(sourcePath, destPath);
|
|
340
491
|
}
|
|
@@ -357,6 +508,12 @@ export class DownloadManager {
|
|
|
357
508
|
const destPath = pathJoin(destDir, entry);
|
|
358
509
|
const relativePath = relative(sourceDir, sourcePath);
|
|
359
510
|
|
|
511
|
+
// Validate path safety (prevent path traversal)
|
|
512
|
+
if (!this.isPathSafe(destDir, destPath)) {
|
|
513
|
+
logger.warning(`Skipping unsafe path: ${relativePath}`);
|
|
514
|
+
throw new ExtractionError(`Path traversal attempt detected: ${relativePath}`);
|
|
515
|
+
}
|
|
516
|
+
|
|
360
517
|
// Skip excluded files
|
|
361
518
|
if (this.shouldExclude(relativePath)) {
|
|
362
519
|
logger.debug(`Excluding: ${relativePath}`);
|
|
@@ -369,6 +526,8 @@ export class DownloadManager {
|
|
|
369
526
|
// Recursively copy directory
|
|
370
527
|
await this.copyDirectory(sourcePath, destPath);
|
|
371
528
|
} else {
|
|
529
|
+
// Track file size and check limit
|
|
530
|
+
this.checkExtractionSize(entryStat.size);
|
|
372
531
|
// Copy file
|
|
373
532
|
await copyFile(sourcePath, destPath);
|
|
374
533
|
}
|
|
@@ -388,6 +547,56 @@ export class DownloadManager {
|
|
|
388
547
|
throw new ExtractionError(`Cannot detect archive type from filename: ${filename}`);
|
|
389
548
|
}
|
|
390
549
|
|
|
550
|
+
/**
|
|
551
|
+
* Validate extraction results
|
|
552
|
+
* @throws {ExtractionError} If validation fails
|
|
553
|
+
*/
|
|
554
|
+
async validateExtraction(extractDir: string): Promise<void> {
|
|
555
|
+
const { readdir, access } = await import("node:fs/promises");
|
|
556
|
+
const { join: pathJoin } = await import("node:path");
|
|
557
|
+
const { constants } = await import("node:fs");
|
|
558
|
+
|
|
559
|
+
try {
|
|
560
|
+
// Check if extract directory exists and is not empty
|
|
561
|
+
const entries = await readdir(extractDir);
|
|
562
|
+
logger.debug(`Extracted files: ${entries.join(", ")}`);
|
|
563
|
+
|
|
564
|
+
if (entries.length === 0) {
|
|
565
|
+
throw new ExtractionError("Extraction resulted in no files");
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
// Verify critical paths exist
|
|
569
|
+
const criticalPaths = [".claude", "CLAUDE.md"];
|
|
570
|
+
const missingPaths: string[] = [];
|
|
571
|
+
|
|
572
|
+
for (const path of criticalPaths) {
|
|
573
|
+
try {
|
|
574
|
+
await access(pathJoin(extractDir, path), constants.F_OK);
|
|
575
|
+
logger.debug(`✓ Found: ${path}`);
|
|
576
|
+
} catch {
|
|
577
|
+
logger.warning(`Expected path not found: ${path}`);
|
|
578
|
+
missingPaths.push(path);
|
|
579
|
+
}
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
// Warn if critical paths are missing but don't fail validation
|
|
583
|
+
if (missingPaths.length > 0) {
|
|
584
|
+
logger.warning(
|
|
585
|
+
`Some expected paths are missing: ${missingPaths.join(", ")}. This may not be a ClaudeKit project.`,
|
|
586
|
+
);
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
logger.debug("Extraction validation passed");
|
|
590
|
+
} catch (error) {
|
|
591
|
+
if (error instanceof ExtractionError) {
|
|
592
|
+
throw error;
|
|
593
|
+
}
|
|
594
|
+
throw new ExtractionError(
|
|
595
|
+
`Validation failed: ${error instanceof Error ? error.message : "Unknown error"}`,
|
|
596
|
+
);
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
|
|
391
600
|
/**
|
|
392
601
|
* Create temporary download directory
|
|
393
602
|
*/
|
package/src/types.ts
CHANGED
|
@@ -9,6 +9,7 @@ export const NewCommandOptionsSchema = z.object({
|
|
|
9
9
|
dir: z.string().default("."),
|
|
10
10
|
kit: KitType.optional(),
|
|
11
11
|
version: z.string().optional(),
|
|
12
|
+
force: z.boolean().default(false),
|
|
12
13
|
});
|
|
13
14
|
export type NewCommandOptions = z.infer<typeof NewCommandOptionsSchema>;
|
|
14
15
|
|
package/src/version.json
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# CLAUDE.md
|
|
2
|
+
|
|
3
|
+
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
|
4
|
+
|
|
5
|
+
## Role & Responsibilities
|
|
6
|
+
|
|
7
|
+
Your role is to analyze user requirements, delegate tasks to appropriate sub-agents, and ensure cohesive delivery of features that meet specifications and architectural standards.
|
|
8
|
+
|
|
9
|
+
## Workflows
|
|
10
|
+
|
|
11
|
+
- Primary workflow: `./.claude/workflows/primary-workflow.md`
|
|
12
|
+
- Development rules: `./.claude/workflows/development-rules.md`
|
|
13
|
+
- Orchestration protocols: `./.claude/workflows/orchestration-protocol.md`
|
|
14
|
+
- Documentation management: `./.claude/workflows/documentation-management.md`
|
|
15
|
+
|
|
16
|
+
**IMPORTANT:** You must follow strictly the development rules in `./.claude/workflows/development-rules.md` file.
|
|
17
|
+
**IMPORTANT:** Before you plan or proceed any implementation, always read the `./README.md` file first to get context.
|
|
18
|
+
**IMPORTANT:** Sacrifice grammar for the sake of concision when writing reports.
|
|
19
|
+
**IMPORTANT:** In reports, list any unresolved questions at the end, if any.
|
|
20
|
+
|
|
21
|
+
## Documentation Management
|
|
22
|
+
|
|
23
|
+
We keep all important docs in `./docs` folder and keep updating them, structure like below:
|
|
24
|
+
|
|
25
|
+
```
|
|
26
|
+
./docs
|
|
27
|
+
├── project-overview-pdr.md
|
|
28
|
+
├── code-standards.md
|
|
29
|
+
├── codebase-summary.md
|
|
30
|
+
├── design-guidelines.md
|
|
31
|
+
├── deployment-guide.md
|
|
32
|
+
├── system-architecture.md
|
|
33
|
+
└── project-roadmap.md
|
|
34
|
+
```
|