@khanhcan148/mk 0.1.12 → 0.1.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@khanhcan148/mk",
3
- "version": "0.1.12",
3
+ "version": "0.1.14",
4
4
  "description": "CLI to install and manage MyClaudeKit (.claude/) in your projects",
5
5
  "type": "module",
6
6
  "bin": {
@@ -14,7 +14,7 @@
14
14
  "node": ">=18.0.0"
15
15
  },
16
16
  "scripts": {
17
- "test": "node --test test/lib/*.test.js test/commands/*.test.js test/integration/*.test.js test/characterization/*.characterization.test.js test/hooks/*.test.cjs",
17
+ "test": "node --test test/lib/*.test.js test/commands/*.test.js test/integration/*.test.js test/characterization/*.characterization.test.js .claude/hooks/tests/*.test.cjs",
18
18
  "lint": "node --check src/**/*.js bin/**/*.js 2>/dev/null",
19
19
  "selftest": "python3 .claude/skills/mk-selftest/scripts/validate_kit.py"
20
20
  },
@@ -5,7 +5,7 @@ import { fileURLToPath } from 'node:url';
5
5
  import { copyKitFiles, mergeSettingsJson } from '../lib/copy.js';
6
6
  import { writeManifest } from '../lib/manifest.js';
7
7
  import { computeChecksum } from '../lib/checksum.js';
8
- import { resolveTargetDir, resolveManifestPath } from '../lib/paths.js';
8
+ import { resolveSourceDir, resolveTargetDir, resolveManifestPath } from '../lib/paths.js';
9
9
  import { MANIFEST_FILENAME } from '../lib/constants.js';
10
10
  import { resolveTokenOrLogin } from '../lib/auth.js';
11
11
  import { writeToken, readStoredToken } from '../lib/config.js';
@@ -6,7 +6,7 @@ import { fileURLToPath } from 'node:url';
6
6
  import { readManifest, updateManifest, diffManifest } from '../lib/manifest.js';
7
7
  import { computeChecksum } from '../lib/checksum.js';
8
8
  import { copyKitFiles, collectDiskFiles, mergeSettingsJson } from '../lib/copy.js';
9
- import { resolveTargetDir, resolveManifestPath, deriveProjectRoot, assertSafePath } from '../lib/paths.js';
9
+ import { resolveSourceDir, resolveTargetDir, resolveManifestPath, deriveProjectRoot, assertSafePath } from '../lib/paths.js';
10
10
  import { resolveTokenOrLogin } from '../lib/auth.js';
11
11
  import { writeToken, readStoredToken } from '../lib/config.js';
12
12
  import { downloadAndExtractKit, cleanupTempDir } from '../lib/download.js';
@@ -67,6 +67,10 @@ export async function runUpdate(params = {}) {
67
67
  const projectRoot = deriveProjectRoot(manifest, manifestPath);
68
68
  const claudeRoot = resolve(join(projectRoot, '.claude'));
69
69
  const sourceFileList = copyKitFiles(sourceDir, targetDir, { dryRun: true });
70
+ // Fix 11-12 (performance): Build a Map for O(1) lookups in applyCopy.
71
+ // Previously sourceFileList.find() in applyCopy was O(n) per call, causing O(n²)
72
+ // behaviour when many files need updating. The Map is built once in O(n).
73
+ const sourceFileMap = new Map(sourceFileList.map(e => [e.relativePath, e]));
70
74
  const sourceFiles = {};
71
75
  for (const entry of sourceFileList) {
72
76
  const checksum = computeChecksum(entry.sourceAbsPath);
@@ -116,10 +120,11 @@ export async function runUpdate(params = {}) {
116
120
 
117
121
  /**
118
122
  * Copy a source file entry to its destination.
123
+ * Fix 11-12: Uses O(1) Map lookup instead of O(n) Array.find to eliminate O(n²) worst case.
119
124
  * @param {string} relPath
120
125
  */
121
126
  function applyCopy(relPath) {
122
- const entry = sourceFileList.find(f => f.relativePath === relPath);
127
+ const entry = sourceFileMap.get(relPath);
123
128
  if (!entry) return;
124
129
  const destAbs = join(projectRoot, relPath);
125
130
  // Bounds check: destination must stay inside .claude/ subtree
package/src/lib/auth.js CHANGED
@@ -5,7 +5,11 @@ import { GITHUB_API, KIT_REPO } from './constants.js';
5
5
  // Constants
6
6
  // ---------------------------------------------------------------------------
7
7
 
8
- export const GITHUB_CLIENT_ID = 'Ov23li35aA2A1xVa01B6';
8
+ // Public OAuth App client ID (no secret) — override via env for rotation without a code release.
9
+ // This is a public-flow client ID: GitHub Device Flow does not use a client secret,
10
+ // so committing this value is safe. The env override allows operators to rotate the
11
+ // OAuth app without publishing a new npm package.
12
+ export const GITHUB_CLIENT_ID = process.env.GITHUB_CLIENT_ID || 'Ov23li35aA2A1xVa01B6';
9
13
  export const GITHUB_DEVICE_CODE_URL = 'https://github.com/login/device/code';
10
14
  export const GITHUB_TOKEN_URL = 'https://github.com/login/oauth/access_token';
11
15
 
package/src/lib/config.js CHANGED
@@ -1,72 +1,90 @@
1
- import { join } from 'node:path';
2
- import { homedir } from 'node:os';
3
- import { mkdirSync, writeFileSync, readFileSync, unlinkSync, chmodSync, existsSync } from 'node:fs';
4
-
5
- /**
6
- * Returns the config directory for mk.
7
- * XDG-compliant: ~/.config/mk on Unix/macOS, %APPDATA%/mk on Windows.
8
- * @returns {string}
9
- */
10
- export function getConfigDir() {
11
- const appData = process.env.APPDATA;
12
- // Reject UNC paths (\\server\share) — these could redirect token storage to attacker-controlled
13
- // network shares. Fall back to the XDG path if APPDATA looks suspicious.
14
- if (appData && !appData.startsWith('\\\\')) {
15
- return join(appData, 'mk');
16
- }
17
- return join(homedir(), '.config', 'mk');
18
- }
19
-
20
- /**
21
- * Returns the token file path.
22
- * @returns {string}
23
- */
24
- export function getTokenPath() {
25
- return join(getConfigDir(), 'token');
26
- }
27
-
28
- /**
29
- * Read the stored token from disk.
30
- * @returns {string|null} Token string or null if not found.
31
- */
32
- export function readStoredToken() {
33
- const tokenPath = getTokenPath();
34
- if (!existsSync(tokenPath)) {
35
- return null;
36
- }
37
- try {
38
- return readFileSync(tokenPath, 'utf8').trim();
39
- } catch {
40
- return null;
41
- }
42
- }
43
-
44
- /**
45
- * Write the token to disk. Creates config dir if needed. Sets chmod 600 on Unix.
46
- * @param {string} token
47
- */
48
- export function writeToken(token) {
49
- const configDir = getConfigDir();
50
- const tokenPath = getTokenPath();
51
- mkdirSync(configDir, { recursive: true });
52
- // Write with mode 0o600 atomically prevents TOCTOU window between write and chmod.
53
- // On Windows the mode flag is ignored; home-dir ACLs provide equivalent protection.
54
- writeFileSync(tokenPath, token, { encoding: 'utf8', mode: 0o600 });
55
- if (process.platform !== 'win32') {
56
- // Explicit chmod ensures mode is set even if umask overrides the O_CREAT mode.
57
- chmodSync(tokenPath, 0o600);
58
- }
59
- }
60
-
61
- /**
62
- * Delete the stored token file.
63
- * Does not throw if the file does not exist.
64
- */
65
- export function deleteToken() {
66
- const tokenPath = getTokenPath();
67
- try {
68
- unlinkSync(tokenPath);
69
- } catch {
70
- // File doesn't existthat's fine
71
- }
72
- }
1
+ import { join } from 'node:path';
2
+ import { homedir } from 'node:os';
3
+ import { mkdirSync, writeFileSync, readFileSync, unlinkSync, chmodSync, existsSync } from 'node:fs';
4
+
5
+ /**
6
+ * Returns the config directory for mk.
7
+ * XDG-compliant: ~/.config/mk on Unix/macOS, %APPDATA%/mk on Windows.
8
+ * @returns {string}
9
+ */
10
+ export function getConfigDir() {
11
+ const appData = process.env.APPDATA;
12
+ // Reject UNC paths (\\server\share) — these could redirect token storage to attacker-controlled
13
+ // network shares. Fall back to the XDG path if APPDATA looks suspicious.
14
+ if (appData && !appData.startsWith('\\\\')) {
15
+ return join(appData, 'mk');
16
+ }
17
+ return join(homedir(), '.config', 'mk');
18
+ }
19
+
20
+ /**
21
+ * Returns the token file path.
22
+ * @returns {string}
23
+ */
24
+ export function getTokenPath() {
25
+ return join(getConfigDir(), 'token');
26
+ }
27
+
28
+ /**
29
+ * Read the stored token from disk.
30
+ * @returns {string|null} Token string or null if not found.
31
+ */
32
+ export function readStoredToken() {
33
+ const tokenPath = getTokenPath();
34
+ if (!existsSync(tokenPath)) {
35
+ return null;
36
+ }
37
+ try {
38
+ return readFileSync(tokenPath, 'utf8').trim();
39
+ } catch {
40
+ return null;
41
+ }
42
+ }
43
+
44
+ /**
45
+ * Validate that a token string matches the expected GitHub token format.
46
+ * Accepted prefixes: ghp_ (personal access token), gho_ (OAuth token),
47
+ * github_pat_ (fine-grained PAT). Rejects garbage values from spoofed API responses.
48
+ * @param {string} token
49
+ * @throws {Error} if the token format is invalid
50
+ */
51
+ function assertTokenFormat(token) {
52
+ if (typeof token !== 'string' || !/^(ghp_|gho_|github_pat_)[A-Za-z0-9_]+$/.test(token)) {
53
+ throw new Error(
54
+ `Invalid GitHub token format. Expected a token starting with ghp_, gho_, or github_pat_. ` +
55
+ `Got: "${typeof token === 'string' ? '<redacted>' : typeof token}"`
56
+ );
57
+ }
58
+ }
59
+
60
+ /**
61
+ * Write the token to disk. Creates config dir if needed. Sets chmod 600 on Unix.
62
+ * Validates token format before writing to reject spoofed or malformed values.
63
+ * @param {string} token
64
+ */
65
+ export function writeToken(token) {
66
+ assertTokenFormat(token);
67
+ const configDir = getConfigDir();
68
+ const tokenPath = getTokenPath();
69
+ mkdirSync(configDir, { recursive: true });
70
+ // Write with mode 0o600 atomically prevents TOCTOU window between write and chmod.
71
+ // On Windows the mode flag is ignored; home-dir ACLs provide equivalent protection.
72
+ writeFileSync(tokenPath, token, { encoding: 'utf8', mode: 0o600 });
73
+ if (process.platform !== 'win32') {
74
+ // Explicit chmod ensures mode is set even if umask overrides the O_CREAT mode.
75
+ chmodSync(tokenPath, 0o600);
76
+ }
77
+ }
78
+
79
+ /**
80
+ * Delete the stored token file.
81
+ * Does not throw if the file does not exist.
82
+ */
83
+ export function deleteToken() {
84
+ const tokenPath = getTokenPath();
85
+ try {
86
+ unlinkSync(tokenPath);
87
+ } catch {
88
+ // File doesn't exist — that's fine
89
+ }
90
+ }
package/src/lib/copy.js CHANGED
@@ -1,5 +1,5 @@
1
1
  import { join, relative } from 'node:path';
2
- import { statSync, lstatSync, existsSync, readFileSync, writeFileSync, mkdirSync } from 'node:fs';
2
+ import { statSync, lstatSync, existsSync, readFileSync, writeFileSync, mkdirSync, copyFileSync } from 'node:fs';
3
3
  import fsExtra from 'fs-extra';
4
4
  import { KIT_SUBDIRS, COPY_FILTER_PATTERNS, WINDOWS_PATH_WARN_LENGTH } from './constants.js';
5
5
 
@@ -154,16 +154,38 @@ export function copyKitFiles(sourceDir, targetDir, options = {}) {
154
154
  return fileList;
155
155
  }
156
156
 
157
+ /**
158
+ * Create a timestamped backup of a file.
159
+ * @param {string} filePath - Absolute path to the file to back up
160
+ * @returns {string|null} Backup path, or null if source doesn't exist
161
+ */
162
+ function backupFile(filePath) {
163
+ if (!existsSync(filePath)) return null;
164
+ const now = new Date();
165
+ const ts = [
166
+ now.getFullYear(), String(now.getMonth() + 1).padStart(2, '0'),
167
+ String(now.getDate()).padStart(2, '0'), '-',
168
+ String(now.getHours()).padStart(2, '0'), String(now.getMinutes()).padStart(2, '0')
169
+ ].join('');
170
+ const backupPath = `${filePath}.${ts}.bak`;
171
+ copyFileSync(filePath, backupPath);
172
+ return backupPath;
173
+ }
174
+
157
175
  /**
158
176
  * Merge kit's settings.json into user's existing settings.json.
159
- * Strategy: deep-merge "hooks" key from kit source; preserve all other user keys.
160
- * If user has no settings.json, copy kit source as-is.
177
+ * Strategy: merge "hooks" key from kit source; preserve all other user keys.
178
+ * When a matcher already exists, the kit's hooks REPLACE the user's hooks for
179
+ * that matcher — this ensures updated hook commands propagate on update instead
180
+ * of accumulating stale duplicates.
181
+ * If user has no settings.json, create one with hooks only.
161
182
  * If kit source has no settings.json, do nothing.
183
+ * A timestamped backup is created before any write to an existing file.
162
184
  *
163
185
  * @param {string} sourceDir - Absolute path to source .claude/
164
186
  * @param {string} targetDir - Absolute path to target .claude/
165
187
  * @param {{ dryRun: boolean }} options
166
- * @returns {{ action: 'created'|'merged'|'skipped', merged?: string[] }}
188
+ * @returns {{ action: 'created'|'merged'|'skipped', merged?: string[], backup?: string }}
167
189
  */
168
190
  export function mergeSettingsJson(sourceDir, targetDir, options = {}) {
169
191
  const { dryRun = false } = options;
@@ -179,11 +201,14 @@ export function mergeSettingsJson(sourceDir, targetDir, options = {}) {
179
201
  return { action: 'skipped' };
180
202
  }
181
203
 
182
- // No existing user settings — copy kit source as-is
204
+ // No existing user settings — create with hooks only (not permissions or other keys).
205
+ // Copying the full kit settings.json would duplicate permissions.deny entries when both
206
+ // global (~/.claude/settings.json) and project (.claude/settings.json) are initialised.
183
207
  if (!existsSync(destPath)) {
184
208
  if (!dryRun) {
209
+ const hooksOnly = kitSettings.hooks ? { hooks: kitSettings.hooks } : {};
185
210
  mkdirSync(targetDir, { recursive: true });
186
- writeFileSync(destPath, JSON.stringify(kitSettings, null, 2) + '\n', 'utf-8');
211
+ writeFileSync(destPath, JSON.stringify(hooksOnly, null, 2) + '\n', 'utf-8');
187
212
  }
188
213
  return { action: 'created' };
189
214
  }
@@ -199,7 +224,9 @@ export function mergeSettingsJson(sourceDir, targetDir, options = {}) {
199
224
 
200
225
  const merged = [];
201
226
 
202
- // Merge hooks: kit entries are added/updated, user entries not in kit are preserved
227
+ // Merge hooks: kit entries are added or replaced by matcher; user-only matchers preserved.
228
+ // Replace strategy: when the kit ships an updated hook command for an existing matcher,
229
+ // the old command is replaced instead of appended — preventing stale duplicates.
203
230
  if (kitSettings.hooks) {
204
231
  if (!userSettings.hooks) userSettings.hooks = {};
205
232
  for (const [event, kitEntries] of Object.entries(kitSettings.hooks)) {
@@ -207,31 +234,25 @@ export function mergeSettingsJson(sourceDir, targetDir, options = {}) {
207
234
  userSettings.hooks[event] = kitEntries;
208
235
  merged.push(event);
209
236
  } else {
210
- // Merge by matcher: add kit entries whose matcher doesn't already exist
211
237
  for (const kitEntry of kitEntries) {
212
238
  const kitMatcher = kitEntry.matcher || '*';
213
- const exists = userSettings.hooks[event].some(
239
+ const idx = userSettings.hooks[event].findIndex(
214
240
  e => (e.matcher || '*') === kitMatcher
215
241
  );
216
- if (!exists) {
242
+ if (idx === -1) {
243
+ // New matcher — add it
217
244
  userSettings.hooks[event].push(kitEntry);
218
- merged.push(`${event}[${kitMatcher}]`);
219
- }
220
- // If matcher exists, check if kit hooks are present
221
- if (exists) {
222
- const userEntry = userSettings.hooks[event].find(
223
- e => (e.matcher || '*') === kitMatcher
224
- );
225
- if (userEntry && userEntry.hooks && kitEntry.hooks) {
226
- for (const kh of kitEntry.hooks) {
227
- const hookExists = userEntry.hooks.some(
228
- uh => uh.command === kh.command
229
- );
230
- if (!hookExists) {
231
- userEntry.hooks.push(kh);
232
- merged.push(`${event}[${kitMatcher}]:${kh.command}`);
233
- }
234
- }
245
+ merged.push(`${event}[${kitMatcher}]:added`);
246
+ } else {
247
+ // Existing matcher replace with kit version if hooks differ
248
+ const userEntry = userSettings.hooks[event][idx];
249
+ const userCmds = (userEntry.hooks || []).map(h => h.command).sort();
250
+ const kitCmds = (kitEntry.hooks || []).map(h => h.command).sort();
251
+ const same = userCmds.length === kitCmds.length &&
252
+ userCmds.every((c, i) => c === kitCmds[i]);
253
+ if (!same) {
254
+ userSettings.hooks[event][idx] = kitEntry;
255
+ merged.push(`${event}[${kitMatcher}]:replaced`);
235
256
  }
236
257
  }
237
258
  }
@@ -242,7 +263,9 @@ export function mergeSettingsJson(sourceDir, targetDir, options = {}) {
242
263
  if (merged.length === 0) return { action: 'skipped' };
243
264
 
244
265
  if (!dryRun) {
266
+ const backup = backupFile(destPath);
245
267
  writeFileSync(destPath, JSON.stringify(userSettings, null, 2) + '\n', 'utf-8');
268
+ return { action: 'merged', merged, backup };
246
269
  }
247
270
  return { action: 'merged', merged };
248
271
  }
@@ -1,276 +1,323 @@
1
- import { createGunzip } from 'node:zlib';
2
- import { mkdirSync, writeFileSync, rmSync, mkdtempSync } from 'node:fs';
3
- import { join, dirname, resolve, sep } from 'node:path';
4
- import { tmpdir } from 'node:os';
5
- import { Writable, Readable } from 'node:stream';
6
- import { pipeline } from 'node:stream/promises';
7
- import { GITHUB_API, KIT_REPO } from './constants.js';
8
-
9
- // ---------------------------------------------------------------------------
10
- // Constants
11
- // ---------------------------------------------------------------------------
12
-
13
- const KIT_BRANCH = 'main';
14
- const TARBALL_URL = `${GITHUB_API}/repos/${KIT_REPO}/tarball/${KIT_BRANCH}`;
15
-
16
- // ---------------------------------------------------------------------------
17
- // Manual tar stream parser (zero-dependency, handles regular files + dirs)
18
- // ---------------------------------------------------------------------------
19
-
20
- /**
21
- * A Writable stream that parses tar format and writes matching entries to disk.
22
- * Only processes entries whose paths contain '.claude/' after stripping the root prefix.
23
- *
24
- * Tar format: 512-byte header blocks followed by data blocks (padded to 512 bytes).
25
- * Two consecutive 512-byte zero blocks mark end of archive.
26
- */
27
- class TarExtractor extends Writable {
28
- /**
29
- * @param {string} destDir - Destination directory (resolved to absolute path)
30
- */
31
- constructor(destDir) {
32
- super();
33
- this.destDir = resolve(destDir);
34
- // Chunk list avoids O(n²) Buffer.concat on every write
35
- this._chunks = [];
36
- this._totalLen = 0;
37
- this._state = 'header'; // 'header' | 'data' | 'skip'
38
- this._remaining = 0; // bytes left in current entry data
39
- this._paddedSize = 0; // padded size of current entry (multiple of 512)
40
- this._currentPath = ''; // relative path being written ('' if not .claude/)
41
- this._rootPrefix = null; // first root directory prefix to strip
42
- this._zeroBlocks = 0;
43
- }
44
-
45
- _write(chunk, encoding, callback) {
46
- this._chunks.push(chunk);
47
- this._totalLen += chunk.length;
48
- try {
49
- this._process();
50
- callback();
51
- } catch (err) {
52
- callback(err);
53
- }
54
- }
55
-
56
- /** Consolidate pending chunks into one Buffer (lazy — only when access needed). */
57
- _getBuffer() {
58
- if (this._chunks.length !== 1) {
59
- this._chunks = [Buffer.concat(this._chunks)];
60
- }
61
- return this._chunks[0];
62
- }
63
-
64
- /** Consume n bytes from the front of the chunk list. */
65
- _consumeBuffer(n) {
66
- const buf = this._getBuffer();
67
- const remaining = buf.slice(n);
68
- this._chunks = remaining.length > 0 ? [remaining] : [];
69
- this._totalLen -= n;
70
- }
71
-
72
- _process() {
73
- while (this._totalLen >= 512) {
74
- if (this._state === 'header') {
75
- this._parseHeader();
76
- } else if (this._state === 'data') {
77
- this._readData();
78
- } else if (this._state === 'skip') {
79
- this._skipData();
80
- }
81
-
82
- // Don't loop if we can't make progress (need paddedSize bytes to consume a data/skip entry)
83
- if (this._state === 'data' && this._totalLen < this._paddedSize) break;
84
- if (this._state === 'skip' && this._totalLen < this._paddedSize) break;
85
- }
86
- }
87
-
88
- /**
89
- * Assert that resolvedPath is safely contained within this.destDir.
90
- * Throws if the path would escape the destination directory.
91
- * @param {string} resolvedPath
92
- */
93
- _assertSafe(resolvedPath) {
94
- if (resolvedPath !== this.destDir && !resolvedPath.startsWith(this.destDir + sep)) {
95
- throw new Error(`Path traversal detected: "${resolvedPath}" escapes destination directory`);
96
- }
97
- }
98
-
99
- _parseHeader() {
100
- const block = this._getBuffer().slice(0, 512);
101
-
102
- // Check for zero block (end of archive)
103
- if (block[0] === 0 && block.every(b => b === 0)) {
104
- this._zeroBlocks++;
105
- this._consumeBuffer(512);
106
- return;
107
- }
108
- this._zeroBlocks = 0;
109
-
110
- // Parse header fields
111
- const rawName = block.slice(0, 100).toString('utf8').replace(/\0+$/, '');
112
- const prefix = block.slice(345, 500).toString('utf8').replace(/\0+$/, '');
113
- const fullName = prefix ? `${prefix}/${rawName}` : rawName;
114
-
115
- const sizeOctal = block.slice(124, 136).toString('utf8').replace(/\0/g, '').trim();
116
- const size = sizeOctal ? parseInt(sizeOctal, 8) : 0;
117
-
118
- const typeFlag = String.fromCharCode(block[156]) || '0';
119
-
120
- // Skip PAX extended headers ('x') and PAX global headers ('g') without
121
- // participating in root-prefix detection. GitHub tarballs prepend a
122
- // pax_global_header whose name has no slash; if we let it set rootPrefix
123
- // to '' every subsequent entry fails the '.claude/' filter → 0 files.
124
- if (typeFlag === 'g' || typeFlag === 'x') {
125
- this._consumeBuffer(512);
126
- if (size > 0) {
127
- this._paddedSize = Math.ceil(size / 512) * 512;
128
- this._state = 'skip';
129
- }
130
- return;
131
- }
132
-
133
- // Detect and strip root prefix (first directory component)
134
- if (this._rootPrefix === null) {
135
- const firstSlash = fullName.indexOf('/');
136
- this._rootPrefix = firstSlash >= 0 ? fullName.slice(0, firstSlash + 1) : '';
137
- }
138
-
139
- const strippedName = fullName.startsWith(this._rootPrefix)
140
- ? fullName.slice(this._rootPrefix.length)
141
- : fullName;
142
-
143
- this._consumeBuffer(512);
144
-
145
- // Block symlinks and hard links entirely — prevents symlink-based escapes
146
- if (typeFlag === '1' || typeFlag === '2') {
147
- this._state = 'header';
148
- return;
149
- }
150
-
151
- // Only process entries under .claude/
152
- const isClaudePath = strippedName.startsWith('.claude/');
153
-
154
- if (typeFlag === '5' || typeFlag === '\0' || typeFlag === '') {
155
- // Directory entry
156
- if (isClaudePath && strippedName) {
157
- const dirPath = resolve(join(this.destDir, strippedName));
158
- this._assertSafe(dirPath);
159
- mkdirSync(dirPath, { recursive: true });
160
- }
161
- this._state = 'header';
162
- return;
163
- }
164
-
165
- // Regular file entry (typeFlag '0' or empty/null)
166
- if (size === 0) {
167
- if (isClaudePath) {
168
- const filePath = resolve(join(this.destDir, strippedName));
169
- this._assertSafe(filePath);
170
- mkdirSync(dirname(filePath), { recursive: true });
171
- writeFileSync(filePath, Buffer.alloc(0));
172
- }
173
- this._state = 'header';
174
- return;
175
- }
176
-
177
- this._remaining = size;
178
- this._paddedSize = Math.ceil(size / 512) * 512;
179
-
180
- if (isClaudePath) {
181
- this._currentPath = strippedName;
182
- this._state = 'data';
183
- } else {
184
- this._state = 'skip';
185
- }
186
- }
187
-
188
- _readData() {
189
- if (this._totalLen < this._paddedSize && this._totalLen < 512) {
190
- return; // Wait for more data
191
- }
192
-
193
- if (this._totalLen >= this._paddedSize) {
194
- // We have all the data for this entry
195
- const buf = this._getBuffer();
196
- const rawData = buf.slice(0, this._remaining);
197
- this._consumeBuffer(this._paddedSize);
198
-
199
- const filePath = resolve(join(this.destDir, this._currentPath));
200
- this._assertSafe(filePath);
201
- mkdirSync(dirname(filePath), { recursive: true });
202
- writeFileSync(filePath, rawData);
203
-
204
- this._currentPath = '';
205
- this._state = 'header';
206
- }
207
- // else: not enough data yet — wait
208
- }
209
-
210
- _skipData() {
211
- if (this._totalLen < this._paddedSize) {
212
- return; // Wait for more data
213
- }
214
- this._consumeBuffer(this._paddedSize);
215
- this._state = 'header';
216
- }
217
-
218
- _final(callback) {
219
- callback();
220
- }
221
- }
222
-
223
- // ---------------------------------------------------------------------------
224
- // Public API
225
- // ---------------------------------------------------------------------------
226
-
227
- /**
228
- * Download the kit repository as a tarball and extract .claude/ to targetDir.
229
- *
230
- * @param {string} token - GitHub Bearer token
231
- * @param {{ targetDir?: string, url?: string }} [opts]
232
- * - url: override the download URL (e.g. a release tarball URL). Defaults to main-branch TARBALL_URL.
233
- * @returns {Promise<string>} The targetDir path
234
- */
235
- export async function downloadAndExtractKit(token, opts = {}) {
236
- const { targetDir = mkdtempSync(join(tmpdir(), 'mk-kit-')), url = TARBALL_URL } = opts;
237
-
238
- let res;
239
- try {
240
- res = await fetch(url, {
241
- headers: {
242
- Authorization: `Bearer ${token}`,
243
- Accept: 'application/vnd.github.v3+json'
244
- },
245
- redirect: 'follow'
246
- });
247
- } catch (err) {
248
- throw new Error(`Network connection failed: ${err.message}`);
249
- }
250
-
251
- if (!res.ok) {
252
- throw new Error(`GitHub API error: ${res.status} ${res.statusText}`);
253
- }
254
-
255
- mkdirSync(targetDir, { recursive: true });
256
-
257
- const gunzip = createGunzip();
258
- const extractor = new TarExtractor(targetDir);
259
-
260
- await pipeline(
261
- Readable.fromWeb(res.body),
262
- gunzip,
263
- extractor
264
- );
265
-
266
- return targetDir;
267
- }
268
-
269
- /**
270
- * Remove a temp directory created by downloadAndExtractKit.
271
- *
272
- * @param {string} tempDir
273
- */
274
- export function cleanupTempDir(tempDir) {
275
- rmSync(tempDir, { recursive: true, force: true });
276
- }
1
+ import { createGunzip } from 'node:zlib';
2
+ import { mkdirSync, writeFileSync, rmSync, mkdtempSync } from 'node:fs';
3
+ import { join, dirname, resolve, sep } from 'node:path';
4
+ import { tmpdir } from 'node:os';
5
+ import { Writable, Readable } from 'node:stream';
6
+ import { pipeline } from 'node:stream/promises';
7
+ import { GITHUB_API, KIT_REPO } from './constants.js';
8
+
9
+ // ---------------------------------------------------------------------------
10
+ // Constants
11
+ // ---------------------------------------------------------------------------
12
+
13
+ const KIT_BRANCH = 'main';
14
+ const TARBALL_URL = `${GITHUB_API}/repos/${KIT_REPO}/tarball/${KIT_BRANCH}`;
15
+
16
+ /** Maximum size (bytes) allowed for a single tar entry. Prevents memory-exhaustion
17
+ * from crafted tarballs with large size fields. 50 MB is well above any kit file. */
18
+ const MAX_ENTRY_SIZE = 50 * 1024 * 1024; // 52428800 bytes
19
+
20
+ /**
21
+ * Validate that the download URL's hostname is a GitHub domain.
22
+ * Prevents SSRF: caller-supplied URLs (e.g. tarballUrl from GitHub API JSON) could be
23
+ * redirected to an attacker-controlled host, exfiltrating the Bearer token.
24
+ * Allowed: api.github.com, *.github.com (e.g. codeload.github.com)
25
+ * @param {string} url
26
+ * @throws {Error} if hostname is not a GitHub domain
27
+ */
28
+ function assertGitHubHostname(url) {
29
+ let parsed;
30
+ try {
31
+ parsed = new URL(url);
32
+ } catch {
33
+ throw new Error(`SSRF guard: invalid URL "${url}"`);
34
+ }
35
+ const { hostname } = parsed;
36
+ const ALLOWED_HOSTS = new Set(['github.com', 'api.github.com', 'codeload.github.com']);
37
+ if (!ALLOWED_HOSTS.has(hostname)) {
38
+ throw new Error(
39
+ `SSRF guard: URL hostname "${hostname}" is not allowed. ` +
40
+ `Only github.com domains are permitted for kit downloads.`
41
+ );
42
+ }
43
+ }
44
+
45
+ // ---------------------------------------------------------------------------
46
+ // Manual tar stream parser (zero-dependency, handles regular files + dirs)
47
+ // ---------------------------------------------------------------------------
48
+
49
+ /**
50
+ * A Writable stream that parses tar format and writes matching entries to disk.
51
+ * Only processes entries whose paths contain '.claude/' after stripping the root prefix.
52
+ *
53
+ * Tar format: 512-byte header blocks followed by data blocks (padded to 512 bytes).
54
+ * Two consecutive 512-byte zero blocks mark end of archive.
55
+ */
56
+ class TarExtractor extends Writable {
57
+ /**
58
+ * @param {string} destDir - Destination directory (resolved to absolute path)
59
+ */
60
+ constructor(destDir) {
61
+ super();
62
+ this.destDir = resolve(destDir);
63
+ // Chunk list avoids O(n²) Buffer.concat on every write
64
+ this._chunks = [];
65
+ this._totalLen = 0;
66
+ this._state = 'header'; // 'header' | 'data' | 'skip'
67
+ this._remaining = 0; // bytes left in current entry data
68
+ this._paddedSize = 0; // padded size of current entry (multiple of 512)
69
+ this._currentPath = ''; // relative path being written ('' if not .claude/)
70
+ this._rootPrefix = null; // first root directory prefix to strip
71
+ this._zeroBlocks = 0;
72
+ }
73
+
74
+ _write(chunk, encoding, callback) {
75
+ this._chunks.push(chunk);
76
+ this._totalLen += chunk.length;
77
+ try {
78
+ this._process();
79
+ callback();
80
+ } catch (err) {
81
+ callback(err);
82
+ }
83
+ }
84
+
85
+ /** Consolidate pending chunks into one Buffer (lazy — only when access needed). */
86
+ _getBuffer() {
87
+ if (this._chunks.length !== 1) {
88
+ this._chunks = [Buffer.concat(this._chunks)];
89
+ }
90
+ return this._chunks[0];
91
+ }
92
+
93
+ /** Consume n bytes from the front of the chunk list. */
94
+ _consumeBuffer(n) {
95
+ const buf = this._getBuffer();
96
+ const remaining = buf.slice(n);
97
+ this._chunks = remaining.length > 0 ? [remaining] : [];
98
+ this._totalLen -= n;
99
+ }
100
+
101
+ _process() {
102
+ while (this._totalLen >= 512) {
103
+ if (this._state === 'header') {
104
+ this._parseHeader();
105
+ } else if (this._state === 'data') {
106
+ this._readData();
107
+ } else if (this._state === 'skip') {
108
+ this._skipData();
109
+ }
110
+
111
+ // Don't loop if we can't make progress (need paddedSize bytes to consume a data/skip entry)
112
+ if (this._state === 'data' && this._totalLen < this._paddedSize) break;
113
+ if (this._state === 'skip' && this._totalLen < this._paddedSize) break;
114
+ }
115
+ }
116
+
117
+ /**
118
+ * Assert that resolvedPath is safely contained within this.destDir.
119
+ * Throws if the path would escape the destination directory.
120
+ * Fix 7: On case-insensitive filesystems (win32, darwin) compare lowercased paths
121
+ * to prevent mixed-case bypass (e.g. /tmp/Mk-Kit-abc matching /tmp/mk-kit-abc).
122
+ * @param {string} resolvedPath
123
+ */
124
+ _assertSafe(resolvedPath) {
125
+ const isCaseInsensitive = process.platform === 'win32' || process.platform === 'darwin';
126
+ const a = isCaseInsensitive ? resolvedPath.toLowerCase() : resolvedPath;
127
+ const b = isCaseInsensitive ? this.destDir.toLowerCase() : this.destDir;
128
+ if (a !== b && !a.startsWith(b + sep)) {
129
+ throw new Error(`Path traversal detected: "${resolvedPath}" escapes destination directory`);
130
+ }
131
+ }
132
+
133
+ _parseHeader() {
134
+ const block = this._getBuffer().slice(0, 512);
135
+
136
+ // Check for zero block (end of archive)
137
+ if (block[0] === 0 && block.every(b => b === 0)) {
138
+ this._zeroBlocks++;
139
+ this._consumeBuffer(512);
140
+ return;
141
+ }
142
+ this._zeroBlocks = 0;
143
+
144
+ // Parse header fields
145
+ const rawName = block.slice(0, 100).toString('utf8').replace(/\0+$/, '');
146
+ const prefix = block.slice(345, 500).toString('utf8').replace(/\0+$/, '');
147
+ const fullName = prefix ? `${prefix}/${rawName}` : rawName;
148
+
149
+ const sizeOctal = block.slice(124, 136).toString('utf8').replace(/\0/g, '').trim();
150
+ const size = sizeOctal ? parseInt(sizeOctal, 8) : 0;
151
+
152
+ // Fix 6: Per-entry size cap — reject crafted tarballs with enormous size fields
153
+ // that would cause _readData to buffer the whole entry in memory (DoS vector).
154
+ if (size > MAX_ENTRY_SIZE) {
155
+ throw new Error(
156
+ `Tar entry size ${size} bytes exceeds maximum allowed ${MAX_ENTRY_SIZE} bytes (50 MB): ` +
157
+ `entry "${fullName}"`
158
+ );
159
+ }
160
+
161
+ const typeFlag = String.fromCharCode(block[156]) || '0';
162
+
163
+ // Skip PAX extended headers ('x') and PAX global headers ('g') without
164
+ // participating in root-prefix detection. GitHub tarballs prepend a
165
+ // pax_global_header whose name has no slash; if we let it set rootPrefix
166
+ // to '' every subsequent entry fails the '.claude/' filter → 0 files.
167
+ if (typeFlag === 'g' || typeFlag === 'x') {
168
+ this._consumeBuffer(512);
169
+ if (size > 0) {
170
+ this._paddedSize = Math.ceil(size / 512) * 512;
171
+ this._state = 'skip';
172
+ }
173
+ return;
174
+ }
175
+
176
+ // Detect and strip root prefix (first directory component)
177
+ if (this._rootPrefix === null) {
178
+ const firstSlash = fullName.indexOf('/');
179
+ this._rootPrefix = firstSlash >= 0 ? fullName.slice(0, firstSlash + 1) : '';
180
+ }
181
+
182
+ const strippedName = fullName.startsWith(this._rootPrefix)
183
+ ? fullName.slice(this._rootPrefix.length)
184
+ : fullName;
185
+
186
+ this._consumeBuffer(512);
187
+
188
+ // Block symlinks and hard links entirely — prevents symlink-based escapes
189
+ if (typeFlag === '1' || typeFlag === '2') {
190
+ this._state = 'header';
191
+ return;
192
+ }
193
+
194
+ // Only process entries under .claude/
195
+ const isClaudePath = strippedName.startsWith('.claude/');
196
+
197
+ if (typeFlag === '5' || typeFlag === '\0' || typeFlag === '') {
198
+ // Directory entry
199
+ if (isClaudePath && strippedName) {
200
+ const dirPath = resolve(join(this.destDir, strippedName));
201
+ this._assertSafe(dirPath);
202
+ mkdirSync(dirPath, { recursive: true });
203
+ }
204
+ this._state = 'header';
205
+ return;
206
+ }
207
+
208
+ // Regular file entry (typeFlag '0' or empty/null)
209
+ if (size === 0) {
210
+ if (isClaudePath) {
211
+ const filePath = resolve(join(this.destDir, strippedName));
212
+ this._assertSafe(filePath);
213
+ mkdirSync(dirname(filePath), { recursive: true });
214
+ writeFileSync(filePath, Buffer.alloc(0));
215
+ }
216
+ this._state = 'header';
217
+ return;
218
+ }
219
+
220
+ this._remaining = size;
221
+ this._paddedSize = Math.ceil(size / 512) * 512;
222
+
223
+ if (isClaudePath) {
224
+ this._currentPath = strippedName;
225
+ this._state = 'data';
226
+ } else {
227
+ this._state = 'skip';
228
+ }
229
+ }
230
+
231
+ _readData() {
232
+ if (this._totalLen < this._paddedSize && this._totalLen < 512) {
233
+ return; // Wait for more data
234
+ }
235
+
236
+ if (this._totalLen >= this._paddedSize) {
237
+ // We have all the data for this entry
238
+ const buf = this._getBuffer();
239
+ const rawData = buf.slice(0, this._remaining);
240
+ this._consumeBuffer(this._paddedSize);
241
+
242
+ const filePath = resolve(join(this.destDir, this._currentPath));
243
+ this._assertSafe(filePath);
244
+ mkdirSync(dirname(filePath), { recursive: true });
245
+ writeFileSync(filePath, rawData);
246
+
247
+ this._currentPath = '';
248
+ this._state = 'header';
249
+ }
250
+ // else: not enough data yet — wait
251
+ }
252
+
253
+ _skipData() {
254
+ if (this._totalLen < this._paddedSize) {
255
+ return; // Wait for more data
256
+ }
257
+ this._consumeBuffer(this._paddedSize);
258
+ this._state = 'header';
259
+ }
260
+
261
+ _final(callback) {
262
+ callback();
263
+ }
264
+ }
265
+
266
+ // ---------------------------------------------------------------------------
267
+ // Public API
268
+ // ---------------------------------------------------------------------------
269
+
270
+ /**
271
+ * Download the kit repository as a tarball and extract .claude/ to targetDir.
272
+ *
273
+ * @param {string} token - GitHub Bearer token
274
+ * @param {{ targetDir?: string, url?: string }} [opts]
275
+ * - url: override the download URL (e.g. a release tarball URL). Defaults to main-branch TARBALL_URL.
276
+ * @returns {Promise<string>} The targetDir path
277
+ */
278
+ export async function downloadAndExtractKit(token, opts = {}) {
279
+ const { targetDir = mkdtempSync(join(tmpdir(), 'mk-kit-')), url = TARBALL_URL } = opts;
280
+
281
+ // Fix 4: SSRF guard — assert hostname is a GitHub domain before forwarding Bearer token.
282
+ // A compromised or MITM'd GitHub API response could supply an attacker-controlled URL.
283
+ assertGitHubHostname(url);
284
+
285
+ let res;
286
+ try {
287
+ res = await fetch(url, {
288
+ headers: {
289
+ Authorization: `Bearer ${token}`,
290
+ Accept: 'application/vnd.github.v3+json'
291
+ },
292
+ redirect: 'follow'
293
+ });
294
+ } catch (err) {
295
+ throw new Error(`Network connection failed: ${err.message}`);
296
+ }
297
+
298
+ if (!res.ok) {
299
+ throw new Error(`GitHub API error: ${res.status} ${res.statusText}`);
300
+ }
301
+
302
+ mkdirSync(targetDir, { recursive: true });
303
+
304
+ const gunzip = createGunzip();
305
+ const extractor = new TarExtractor(targetDir);
306
+
307
+ await pipeline(
308
+ Readable.fromWeb(res.body),
309
+ gunzip,
310
+ extractor
311
+ );
312
+
313
+ return targetDir;
314
+ }
315
+
316
+ /**
317
+ * Remove a temp directory created by downloadAndExtractKit.
318
+ *
319
+ * @param {string} tempDir
320
+ */
321
+ export function cleanupTempDir(tempDir) {
322
+ rmSync(tempDir, { recursive: true, force: true });
323
+ }