@khanhcan148/mk 0.1.13 → 0.1.15

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -134,7 +134,7 @@ User → /mk-* command (skill) → spawns utility agents → agents use knowledg
134
134
  | `/mk-docs` | Generate and update project documentation; maintains AGENTS.md; Impact Areas analysis produces human-readable "What changed / Who is affected / What could go wrong" narrative |
135
135
  | `/mk-git` | Git operations: branch, commit, push, PR, merge |
136
136
  | `/mk-research` | Deep multi-source research on technical topics |
137
- | `/mk-spike` | Investigate external service integrations: fetch API docs, evaluate options, produce spike.md with Go/No-Go |
137
+ | `/mk-spike` | Investigate external service integrations: fetch API docs, evaluate options, produce <service-slug>-spike.md with Go/No-Go |
138
138
  | `/mk-overview` | Synthesize project artifacts into multi-tier stakeholder overview: Executive Brief, Product Report, Technical Report |
139
139
  | `/mk-workflow` | Trace REST endpoint call chains with upstream caller detection, variant branching, side effects/feature flags, Mermaid diagrams |
140
140
  | `/mk-log-analysis` | Analyze production logs from Datadog or Azure Application Insights via MCP; progressive severity triage, pattern detection, mandatory stack trace investigation, mk-debug integration |
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@khanhcan148/mk",
3
- "version": "0.1.13",
3
+ "version": "0.1.15",
4
4
  "description": "CLI to install and manage MyClaudeKit (.claude/) in your projects",
5
5
  "type": "module",
6
6
  "bin": {
@@ -14,7 +14,7 @@
14
14
  "node": ">=18.0.0"
15
15
  },
16
16
  "scripts": {
17
- "test": "node --test test/lib/*.test.js test/commands/*.test.js test/integration/*.test.js test/characterization/*.characterization.test.js test/hooks/*.test.cjs",
17
+ "test": "node --test test/lib/*.test.js test/commands/*.test.js test/integration/*.test.js test/characterization/*.characterization.test.js .claude/hooks/tests/*.test.cjs",
18
18
  "lint": "node --check src/**/*.js bin/**/*.js 2>/dev/null",
19
19
  "selftest": "python3 .claude/skills/mk-selftest/scripts/validate_kit.py"
20
20
  },
@@ -5,7 +5,7 @@ import { fileURLToPath } from 'node:url';
5
5
  import { copyKitFiles, mergeSettingsJson } from '../lib/copy.js';
6
6
  import { writeManifest } from '../lib/manifest.js';
7
7
  import { computeChecksum } from '../lib/checksum.js';
8
- import { resolveTargetDir, resolveManifestPath } from '../lib/paths.js';
8
+ import { resolveSourceDir, resolveTargetDir, resolveManifestPath } from '../lib/paths.js';
9
9
  import { MANIFEST_FILENAME } from '../lib/constants.js';
10
10
  import { resolveTokenOrLogin } from '../lib/auth.js';
11
11
  import { writeToken, readStoredToken } from '../lib/config.js';
@@ -6,13 +6,39 @@ import { fileURLToPath } from 'node:url';
6
6
  import { readManifest, updateManifest, diffManifest } from '../lib/manifest.js';
7
7
  import { computeChecksum } from '../lib/checksum.js';
8
8
  import { copyKitFiles, collectDiskFiles, mergeSettingsJson } from '../lib/copy.js';
9
- import { resolveTargetDir, resolveManifestPath, deriveProjectRoot, assertSafePath } from '../lib/paths.js';
9
+ import { resolveSourceDir, resolveTargetDir, resolveManifestPath, deriveProjectRoot, assertSafePath } from '../lib/paths.js';
10
10
  import { resolveTokenOrLogin } from '../lib/auth.js';
11
11
  import { writeToken, readStoredToken } from '../lib/config.js';
12
12
  import { downloadAndExtractKit, cleanupTempDir } from '../lib/download.js';
13
13
  import { fetchLatestRelease, compareVersions } from '../lib/releases.js';
14
14
  import { isEmptyDir } from '../lib/fs-utils.js';
15
15
 
16
+ // ---------------------------------------------------------------------------
17
+ // Security: strip terminal escape sequences from untrusted content
18
+ // ---------------------------------------------------------------------------
19
+
20
+ /**
21
+ * Strip terminal escape sequences from a string to prevent terminal injection
22
+ * when printing content sourced from the GitHub API (e.g. release notes).
23
+ *
24
+ * Removes:
25
+ * - CSI sequences: ESC [ ... <letter> (e.g. color codes, cursor movement, screen clear)
26
+ * - OSC sequences: ESC ] ... BEL/ST (e.g. window title manipulation)
27
+ * - Fe two-character sequences: ESC <char> (e.g. ESC c = RIS terminal reset, ESC P = DCS)
28
+ * - Raw C0 control characters (0x00-0x08, 0x0b, 0x0c, 0x0e-0x1f) excluding
29
+ * printable whitespace (\t, \n, \r which are 0x09, 0x0a, 0x0d)
30
+ *
31
+ * @param {string} str
32
+ * @returns {string}
33
+ */
34
+ function stripTerminalEscapes(str) {
35
+ return str
36
+ .replace(/\x1b\[[0-9;]*[a-zA-Z]/g, '') // CSI sequences
37
+ .replace(/\x1b\].*?(\x07|\x1b\\)/gs, '') // OSC sequences (dotAll for multiline)
38
+ .replace(/\x1b[^[\]]/g, '') // Fe two-char sequences (ESC c, ESC P, etc.)
39
+ .replace(/[\x00-\x08\x0b\x0c\x0e-\x1f]/g, ''); // raw control chars (preserve \t \n \r)
40
+ }
41
+
16
42
  // ---------------------------------------------------------------------------
17
43
  // Prompt helper
18
44
  // ---------------------------------------------------------------------------
@@ -67,6 +93,10 @@ export async function runUpdate(params = {}) {
67
93
  const projectRoot = deriveProjectRoot(manifest, manifestPath);
68
94
  const claudeRoot = resolve(join(projectRoot, '.claude'));
69
95
  const sourceFileList = copyKitFiles(sourceDir, targetDir, { dryRun: true });
96
+ // Fix 11-12 (performance): Build a Map for O(1) lookups in applyCopy.
97
+ // Previously sourceFileList.find() in applyCopy was O(n) per call, causing O(n²)
98
+ // behaviour when many files need updating. The Map is built once in O(n).
99
+ const sourceFileMap = new Map(sourceFileList.map(e => [e.relativePath, e]));
70
100
  const sourceFiles = {};
71
101
  for (const entry of sourceFileList) {
72
102
  const checksum = computeChecksum(entry.sourceAbsPath);
@@ -116,10 +146,11 @@ export async function runUpdate(params = {}) {
116
146
 
117
147
  /**
118
148
  * Copy a source file entry to its destination.
149
+ * Fix 11-12: Uses O(1) Map lookup instead of O(n) Array.find to eliminate O(n²) worst case.
119
150
  * @param {string} relPath
120
151
  */
121
152
  function applyCopy(relPath) {
122
- const entry = sourceFileList.find(f => f.relativePath === relPath);
153
+ const entry = sourceFileMap.get(relPath);
123
154
  if (!entry) return;
124
155
  const destAbs = join(projectRoot, relPath);
125
156
  // Bounds check: destination must stay inside .claude/ subtree
@@ -322,10 +353,13 @@ export async function updateAction(options = {}, deps = {}) {
322
353
  chalk.cyan(`Update available: v${local} -> v${remote}\n`)
323
354
  );
324
355
 
325
- // Show release notes (if any), truncated to 500 chars
356
+ // Show release notes (if any), truncated to 500 chars.
357
+ // S4: Strip terminal escape sequences before printing to prevent injection via
358
+ // crafted GitHub release bodies (CSI/OSC sequences can clear screen, set window titles, etc.).
326
359
  const body = release.body;
327
360
  if (body && body.trim().length > 0) {
328
- const notes = body.length > 500 ? body.slice(0, 500) + '...' : body;
361
+ const rawNotes = body.length > 500 ? body.slice(0, 500) + '...' : body;
362
+ const notes = stripTerminalEscapes(rawNotes);
329
363
  process.stdout.write('\nRelease notes:\n');
330
364
  process.stdout.write(notes + '\n\n');
331
365
  }
package/src/lib/auth.js CHANGED
@@ -5,7 +5,11 @@ import { GITHUB_API, KIT_REPO } from './constants.js';
5
5
  // Constants
6
6
  // ---------------------------------------------------------------------------
7
7
 
8
- export const GITHUB_CLIENT_ID = 'Ov23li35aA2A1xVa01B6';
8
+ // Public OAuth App client ID (no secret) — override via env for rotation without a code release.
9
+ // This is a public-flow client ID: GitHub Device Flow does not use a client secret,
10
+ // so committing this value is safe. The env override allows operators to rotate the
11
+ // OAuth app without publishing a new npm package.
12
+ export const GITHUB_CLIENT_ID = process.env.GITHUB_CLIENT_ID || 'Ov23li35aA2A1xVa01B6';
9
13
  export const GITHUB_DEVICE_CODE_URL = 'https://github.com/login/device/code';
10
14
  export const GITHUB_TOKEN_URL = 'https://github.com/login/oauth/access_token';
11
15
 
@@ -123,7 +127,9 @@ export async function startDeviceFlow(opts = {}) {
123
127
  'Content-Type': 'application/x-www-form-urlencoded',
124
128
  Accept: 'application/json'
125
129
  },
126
- body: `client_id=${GITHUB_CLIENT_ID}&scope=repo`
130
+ // Empty scope sufficient for public repos (5000 req/hr). Set MK_OAUTH_SCOPE=repo for private forks.
131
+ // Use URLSearchParams to prevent parameter injection via env var containing '&' chars.
132
+ body: new URLSearchParams({ client_id: GITHUB_CLIENT_ID, scope: process.env.MK_OAUTH_SCOPE || '' }).toString()
127
133
  });
128
134
 
129
135
  if (!codeRes.ok) {
package/src/lib/config.js CHANGED
@@ -1,72 +1,90 @@
1
- import { join } from 'node:path';
2
- import { homedir } from 'node:os';
3
- import { mkdirSync, writeFileSync, readFileSync, unlinkSync, chmodSync, existsSync } from 'node:fs';
4
-
5
- /**
6
- * Returns the config directory for mk.
7
- * XDG-compliant: ~/.config/mk on Unix/macOS, %APPDATA%/mk on Windows.
8
- * @returns {string}
9
- */
10
- export function getConfigDir() {
11
- const appData = process.env.APPDATA;
12
- // Reject UNC paths (\\server\share) — these could redirect token storage to attacker-controlled
13
- // network shares. Fall back to the XDG path if APPDATA looks suspicious.
14
- if (appData && !appData.startsWith('\\\\')) {
15
- return join(appData, 'mk');
16
- }
17
- return join(homedir(), '.config', 'mk');
18
- }
19
-
20
- /**
21
- * Returns the token file path.
22
- * @returns {string}
23
- */
24
- export function getTokenPath() {
25
- return join(getConfigDir(), 'token');
26
- }
27
-
28
- /**
29
- * Read the stored token from disk.
30
- * @returns {string|null} Token string or null if not found.
31
- */
32
- export function readStoredToken() {
33
- const tokenPath = getTokenPath();
34
- if (!existsSync(tokenPath)) {
35
- return null;
36
- }
37
- try {
38
- return readFileSync(tokenPath, 'utf8').trim();
39
- } catch {
40
- return null;
41
- }
42
- }
43
-
44
- /**
45
- * Write the token to disk. Creates config dir if needed. Sets chmod 600 on Unix.
46
- * @param {string} token
47
- */
48
- export function writeToken(token) {
49
- const configDir = getConfigDir();
50
- const tokenPath = getTokenPath();
51
- mkdirSync(configDir, { recursive: true });
52
- // Write with mode 0o600 atomically prevents TOCTOU window between write and chmod.
53
- // On Windows the mode flag is ignored; home-dir ACLs provide equivalent protection.
54
- writeFileSync(tokenPath, token, { encoding: 'utf8', mode: 0o600 });
55
- if (process.platform !== 'win32') {
56
- // Explicit chmod ensures mode is set even if umask overrides the O_CREAT mode.
57
- chmodSync(tokenPath, 0o600);
58
- }
59
- }
60
-
61
- /**
62
- * Delete the stored token file.
63
- * Does not throw if the file does not exist.
64
- */
65
- export function deleteToken() {
66
- const tokenPath = getTokenPath();
67
- try {
68
- unlinkSync(tokenPath);
69
- } catch {
70
- // File doesn't existthat's fine
71
- }
72
- }
1
+ import { join } from 'node:path';
2
+ import { homedir } from 'node:os';
3
+ import { mkdirSync, writeFileSync, readFileSync, unlinkSync, chmodSync, existsSync } from 'node:fs';
4
+
5
+ /**
6
+ * Returns the config directory for mk.
7
+ * XDG-compliant: ~/.config/mk on Unix/macOS, %APPDATA%/mk on Windows.
8
+ * @returns {string}
9
+ */
10
+ export function getConfigDir() {
11
+ const appData = process.env.APPDATA;
12
+ // Reject UNC paths (\\server\share) — these could redirect token storage to attacker-controlled
13
+ // network shares. Fall back to the XDG path if APPDATA looks suspicious.
14
+ if (appData && !appData.startsWith('\\\\')) {
15
+ return join(appData, 'mk');
16
+ }
17
+ return join(homedir(), '.config', 'mk');
18
+ }
19
+
20
+ /**
21
+ * Returns the token file path.
22
+ * @returns {string}
23
+ */
24
+ export function getTokenPath() {
25
+ return join(getConfigDir(), 'token');
26
+ }
27
+
28
+ /**
29
+ * Read the stored token from disk.
30
+ * @returns {string|null} Token string or null if not found.
31
+ */
32
+ export function readStoredToken() {
33
+ const tokenPath = getTokenPath();
34
+ if (!existsSync(tokenPath)) {
35
+ return null;
36
+ }
37
+ try {
38
+ return readFileSync(tokenPath, 'utf8').trim();
39
+ } catch {
40
+ return null;
41
+ }
42
+ }
43
+
44
+ /**
45
+ * Validate that a token string matches the expected GitHub token format.
46
+ * Accepted prefixes: ghp_ (personal access token), gho_ (OAuth token),
47
+ * github_pat_ (fine-grained PAT). Rejects garbage values from spoofed API responses.
48
+ * @param {string} token
49
+ * @throws {Error} if the token format is invalid
50
+ */
51
+ function assertTokenFormat(token) {
52
+ if (typeof token !== 'string' || !/^(ghp_|gho_|github_pat_)[A-Za-z0-9_]+$/.test(token)) {
53
+ throw new Error(
54
+ `Invalid GitHub token format. Expected a token starting with ghp_, gho_, or github_pat_. ` +
55
+ `Got: "${typeof token === 'string' ? '<redacted>' : typeof token}"`
56
+ );
57
+ }
58
+ }
59
+
60
+ /**
61
+ * Write the token to disk. Creates config dir if needed. Sets chmod 600 on Unix.
62
+ * Validates token format before writing to reject spoofed or malformed values.
63
+ * @param {string} token
64
+ */
65
+ export function writeToken(token) {
66
+ assertTokenFormat(token);
67
+ const configDir = getConfigDir();
68
+ const tokenPath = getTokenPath();
69
+ mkdirSync(configDir, { recursive: true });
70
+ // Write with mode 0o600 atomically prevents TOCTOU window between write and chmod.
71
+ // On Windows the mode flag is ignored; home-dir ACLs provide equivalent protection.
72
+ writeFileSync(tokenPath, token, { encoding: 'utf8', mode: 0o600 });
73
+ if (process.platform !== 'win32') {
74
+ // Explicit chmod ensures mode is set even if umask overrides the O_CREAT mode.
75
+ chmodSync(tokenPath, 0o600);
76
+ }
77
+ }
78
+
79
+ /**
80
+ * Delete the stored token file.
81
+ * Does not throw if the file does not exist.
82
+ */
83
+ export function deleteToken() {
84
+ const tokenPath = getTokenPath();
85
+ try {
86
+ unlinkSync(tokenPath);
87
+ } catch {
88
+ // File doesn't exist — that's fine
89
+ }
90
+ }
package/src/lib/copy.js CHANGED
@@ -1,5 +1,5 @@
1
1
  import { join, relative } from 'node:path';
2
- import { statSync, lstatSync, existsSync, readFileSync, writeFileSync, mkdirSync } from 'node:fs';
2
+ import { statSync, lstatSync, existsSync, readFileSync, writeFileSync, mkdirSync, copyFileSync } from 'node:fs';
3
3
  import fsExtra from 'fs-extra';
4
4
  import { KIT_SUBDIRS, COPY_FILTER_PATTERNS, WINDOWS_PATH_WARN_LENGTH } from './constants.js';
5
5
 
@@ -154,16 +154,38 @@ export function copyKitFiles(sourceDir, targetDir, options = {}) {
154
154
  return fileList;
155
155
  }
156
156
 
157
+ /**
158
+ * Create a timestamped backup of a file.
159
+ * @param {string} filePath - Absolute path to the file to back up
160
+ * @returns {string|null} Backup path, or null if source doesn't exist
161
+ */
162
+ function backupFile(filePath) {
163
+ if (!existsSync(filePath)) return null;
164
+ const now = new Date();
165
+ const ts = [
166
+ now.getFullYear(), String(now.getMonth() + 1).padStart(2, '0'),
167
+ String(now.getDate()).padStart(2, '0'), '-',
168
+ String(now.getHours()).padStart(2, '0'), String(now.getMinutes()).padStart(2, '0')
169
+ ].join('');
170
+ const backupPath = `${filePath}.${ts}.bak`;
171
+ copyFileSync(filePath, backupPath);
172
+ return backupPath;
173
+ }
174
+
157
175
  /**
158
176
  * Merge kit's settings.json into user's existing settings.json.
159
- * Strategy: deep-merge "hooks" key from kit source; preserve all other user keys.
160
- * If user has no settings.json, copy kit source as-is.
177
+ * Strategy: merge "hooks" key from kit source; preserve all other user keys.
178
+ * When a matcher already exists, the kit's hooks REPLACE the user's hooks for
179
+ * that matcher — this ensures updated hook commands propagate on update instead
180
+ * of accumulating stale duplicates.
181
+ * If user has no settings.json, create one with hooks only.
161
182
  * If kit source has no settings.json, do nothing.
183
+ * A timestamped backup is created before any write to an existing file.
162
184
  *
163
185
  * @param {string} sourceDir - Absolute path to source .claude/
164
186
  * @param {string} targetDir - Absolute path to target .claude/
165
187
  * @param {{ dryRun: boolean }} options
166
- * @returns {{ action: 'created'|'merged'|'skipped', merged?: string[] }}
188
+ * @returns {{ action: 'created'|'merged'|'skipped', merged?: string[], backup?: string }}
167
189
  */
168
190
  export function mergeSettingsJson(sourceDir, targetDir, options = {}) {
169
191
  const { dryRun = false } = options;
@@ -179,11 +201,14 @@ export function mergeSettingsJson(sourceDir, targetDir, options = {}) {
179
201
  return { action: 'skipped' };
180
202
  }
181
203
 
182
- // No existing user settings — copy kit source as-is
204
+ // No existing user settings — create with hooks only (not permissions or other keys).
205
+ // Copying the full kit settings.json would duplicate permissions.deny entries when both
206
+ // global (~/.claude/settings.json) and project (.claude/settings.json) are initialised.
183
207
  if (!existsSync(destPath)) {
184
208
  if (!dryRun) {
209
+ const hooksOnly = kitSettings.hooks ? { hooks: kitSettings.hooks } : {};
185
210
  mkdirSync(targetDir, { recursive: true });
186
- writeFileSync(destPath, JSON.stringify(kitSettings, null, 2) + '\n', 'utf-8');
211
+ writeFileSync(destPath, JSON.stringify(hooksOnly, null, 2) + '\n', 'utf-8');
187
212
  }
188
213
  return { action: 'created' };
189
214
  }
@@ -199,7 +224,9 @@ export function mergeSettingsJson(sourceDir, targetDir, options = {}) {
199
224
 
200
225
  const merged = [];
201
226
 
202
- // Merge hooks: kit entries are added/updated, user entries not in kit are preserved
227
+ // Merge hooks: kit entries are added or replaced by matcher; user-only matchers preserved.
228
+ // Replace strategy: when the kit ships an updated hook command for an existing matcher,
229
+ // the old command is replaced instead of appended — preventing stale duplicates.
203
230
  if (kitSettings.hooks) {
204
231
  if (!userSettings.hooks) userSettings.hooks = {};
205
232
  for (const [event, kitEntries] of Object.entries(kitSettings.hooks)) {
@@ -207,31 +234,25 @@ export function mergeSettingsJson(sourceDir, targetDir, options = {}) {
207
234
  userSettings.hooks[event] = kitEntries;
208
235
  merged.push(event);
209
236
  } else {
210
- // Merge by matcher: add kit entries whose matcher doesn't already exist
211
237
  for (const kitEntry of kitEntries) {
212
238
  const kitMatcher = kitEntry.matcher || '*';
213
- const exists = userSettings.hooks[event].some(
239
+ const idx = userSettings.hooks[event].findIndex(
214
240
  e => (e.matcher || '*') === kitMatcher
215
241
  );
216
- if (!exists) {
242
+ if (idx === -1) {
243
+ // New matcher — add it
217
244
  userSettings.hooks[event].push(kitEntry);
218
- merged.push(`${event}[${kitMatcher}]`);
219
- }
220
- // If matcher exists, check if kit hooks are present
221
- if (exists) {
222
- const userEntry = userSettings.hooks[event].find(
223
- e => (e.matcher || '*') === kitMatcher
224
- );
225
- if (userEntry && userEntry.hooks && kitEntry.hooks) {
226
- for (const kh of kitEntry.hooks) {
227
- const hookExists = userEntry.hooks.some(
228
- uh => uh.command === kh.command
229
- );
230
- if (!hookExists) {
231
- userEntry.hooks.push(kh);
232
- merged.push(`${event}[${kitMatcher}]:${kh.command}`);
233
- }
234
- }
245
+ merged.push(`${event}[${kitMatcher}]:added`);
246
+ } else {
247
+ // Existing matcher replace with kit version if hooks differ
248
+ const userEntry = userSettings.hooks[event][idx];
249
+ const userCmds = (userEntry.hooks || []).map(h => h.command).sort();
250
+ const kitCmds = (kitEntry.hooks || []).map(h => h.command).sort();
251
+ const same = userCmds.length === kitCmds.length &&
252
+ userCmds.every((c, i) => c === kitCmds[i]);
253
+ if (!same) {
254
+ userSettings.hooks[event][idx] = kitEntry;
255
+ merged.push(`${event}[${kitMatcher}]:replaced`);
235
256
  }
236
257
  }
237
258
  }
@@ -242,7 +263,9 @@ export function mergeSettingsJson(sourceDir, targetDir, options = {}) {
242
263
  if (merged.length === 0) return { action: 'skipped' };
243
264
 
244
265
  if (!dryRun) {
266
+ const backup = backupFile(destPath);
245
267
  writeFileSync(destPath, JSON.stringify(userSettings, null, 2) + '\n', 'utf-8');
268
+ return { action: 'merged', merged, backup };
246
269
  }
247
270
  return { action: 'merged', merged };
248
271
  }
@@ -1,276 +1,352 @@
1
- import { createGunzip } from 'node:zlib';
2
- import { mkdirSync, writeFileSync, rmSync, mkdtempSync } from 'node:fs';
3
- import { join, dirname, resolve, sep } from 'node:path';
4
- import { tmpdir } from 'node:os';
5
- import { Writable, Readable } from 'node:stream';
6
- import { pipeline } from 'node:stream/promises';
7
- import { GITHUB_API, KIT_REPO } from './constants.js';
8
-
9
- // ---------------------------------------------------------------------------
10
- // Constants
11
- // ---------------------------------------------------------------------------
12
-
13
- const KIT_BRANCH = 'main';
14
- const TARBALL_URL = `${GITHUB_API}/repos/${KIT_REPO}/tarball/${KIT_BRANCH}`;
15
-
16
- // ---------------------------------------------------------------------------
17
- // Manual tar stream parser (zero-dependency, handles regular files + dirs)
18
- // ---------------------------------------------------------------------------
19
-
20
- /**
21
- * A Writable stream that parses tar format and writes matching entries to disk.
22
- * Only processes entries whose paths contain '.claude/' after stripping the root prefix.
23
- *
24
- * Tar format: 512-byte header blocks followed by data blocks (padded to 512 bytes).
25
- * Two consecutive 512-byte zero blocks mark end of archive.
26
- */
27
- class TarExtractor extends Writable {
28
- /**
29
- * @param {string} destDir - Destination directory (resolved to absolute path)
30
- */
31
- constructor(destDir) {
32
- super();
33
- this.destDir = resolve(destDir);
34
- // Chunk list avoids O(n²) Buffer.concat on every write
35
- this._chunks = [];
36
- this._totalLen = 0;
37
- this._state = 'header'; // 'header' | 'data' | 'skip'
38
- this._remaining = 0; // bytes left in current entry data
39
- this._paddedSize = 0; // padded size of current entry (multiple of 512)
40
- this._currentPath = ''; // relative path being written ('' if not .claude/)
41
- this._rootPrefix = null; // first root directory prefix to strip
42
- this._zeroBlocks = 0;
43
- }
44
-
45
- _write(chunk, encoding, callback) {
46
- this._chunks.push(chunk);
47
- this._totalLen += chunk.length;
48
- try {
49
- this._process();
50
- callback();
51
- } catch (err) {
52
- callback(err);
53
- }
54
- }
55
-
56
- /** Consolidate pending chunks into one Buffer (lazy only when access needed). */
57
- _getBuffer() {
58
- if (this._chunks.length !== 1) {
59
- this._chunks = [Buffer.concat(this._chunks)];
60
- }
61
- return this._chunks[0];
62
- }
63
-
64
- /** Consume n bytes from the front of the chunk list. */
65
- _consumeBuffer(n) {
66
- const buf = this._getBuffer();
67
- const remaining = buf.slice(n);
68
- this._chunks = remaining.length > 0 ? [remaining] : [];
69
- this._totalLen -= n;
70
- }
71
-
72
- _process() {
73
- while (this._totalLen >= 512) {
74
- if (this._state === 'header') {
75
- this._parseHeader();
76
- } else if (this._state === 'data') {
77
- this._readData();
78
- } else if (this._state === 'skip') {
79
- this._skipData();
80
- }
81
-
82
- // Don't loop if we can't make progress (need paddedSize bytes to consume a data/skip entry)
83
- if (this._state === 'data' && this._totalLen < this._paddedSize) break;
84
- if (this._state === 'skip' && this._totalLen < this._paddedSize) break;
85
- }
86
- }
87
-
88
- /**
89
- * Assert that resolvedPath is safely contained within this.destDir.
90
- * Throws if the path would escape the destination directory.
91
- * @param {string} resolvedPath
92
- */
93
- _assertSafe(resolvedPath) {
94
- if (resolvedPath !== this.destDir && !resolvedPath.startsWith(this.destDir + sep)) {
95
- throw new Error(`Path traversal detected: "${resolvedPath}" escapes destination directory`);
96
- }
97
- }
98
-
99
- _parseHeader() {
100
- const block = this._getBuffer().slice(0, 512);
101
-
102
- // Check for zero block (end of archive)
103
- if (block[0] === 0 && block.every(b => b === 0)) {
104
- this._zeroBlocks++;
105
- this._consumeBuffer(512);
106
- return;
107
- }
108
- this._zeroBlocks = 0;
109
-
110
- // Parse header fields
111
- const rawName = block.slice(0, 100).toString('utf8').replace(/\0+$/, '');
112
- const prefix = block.slice(345, 500).toString('utf8').replace(/\0+$/, '');
113
- const fullName = prefix ? `${prefix}/${rawName}` : rawName;
114
-
115
- const sizeOctal = block.slice(124, 136).toString('utf8').replace(/\0/g, '').trim();
116
- const size = sizeOctal ? parseInt(sizeOctal, 8) : 0;
117
-
118
- const typeFlag = String.fromCharCode(block[156]) || '0';
119
-
120
- // Skip PAX extended headers ('x') and PAX global headers ('g') without
121
- // participating in root-prefix detection. GitHub tarballs prepend a
122
- // pax_global_header whose name has no slash; if we let it set rootPrefix
123
- // to '' every subsequent entry fails the '.claude/' filter → 0 files.
124
- if (typeFlag === 'g' || typeFlag === 'x') {
125
- this._consumeBuffer(512);
126
- if (size > 0) {
127
- this._paddedSize = Math.ceil(size / 512) * 512;
128
- this._state = 'skip';
129
- }
130
- return;
131
- }
132
-
133
- // Detect and strip root prefix (first directory component)
134
- if (this._rootPrefix === null) {
135
- const firstSlash = fullName.indexOf('/');
136
- this._rootPrefix = firstSlash >= 0 ? fullName.slice(0, firstSlash + 1) : '';
137
- }
138
-
139
- const strippedName = fullName.startsWith(this._rootPrefix)
140
- ? fullName.slice(this._rootPrefix.length)
141
- : fullName;
142
-
143
- this._consumeBuffer(512);
144
-
145
- // Block symlinks and hard links entirely — prevents symlink-based escapes
146
- if (typeFlag === '1' || typeFlag === '2') {
147
- this._state = 'header';
148
- return;
149
- }
150
-
151
- // Only process entries under .claude/
152
- const isClaudePath = strippedName.startsWith('.claude/');
153
-
154
- if (typeFlag === '5' || typeFlag === '\0' || typeFlag === '') {
155
- // Directory entry
156
- if (isClaudePath && strippedName) {
157
- const dirPath = resolve(join(this.destDir, strippedName));
158
- this._assertSafe(dirPath);
159
- mkdirSync(dirPath, { recursive: true });
160
- }
161
- this._state = 'header';
162
- return;
163
- }
164
-
165
- // Regular file entry (typeFlag '0' or empty/null)
166
- if (size === 0) {
167
- if (isClaudePath) {
168
- const filePath = resolve(join(this.destDir, strippedName));
169
- this._assertSafe(filePath);
170
- mkdirSync(dirname(filePath), { recursive: true });
171
- writeFileSync(filePath, Buffer.alloc(0));
172
- }
173
- this._state = 'header';
174
- return;
175
- }
176
-
177
- this._remaining = size;
178
- this._paddedSize = Math.ceil(size / 512) * 512;
179
-
180
- if (isClaudePath) {
181
- this._currentPath = strippedName;
182
- this._state = 'data';
183
- } else {
184
- this._state = 'skip';
185
- }
186
- }
187
-
188
- _readData() {
189
- if (this._totalLen < this._paddedSize && this._totalLen < 512) {
190
- return; // Wait for more data
191
- }
192
-
193
- if (this._totalLen >= this._paddedSize) {
194
- // We have all the data for this entry
195
- const buf = this._getBuffer();
196
- const rawData = buf.slice(0, this._remaining);
197
- this._consumeBuffer(this._paddedSize);
198
-
199
- const filePath = resolve(join(this.destDir, this._currentPath));
200
- this._assertSafe(filePath);
201
- mkdirSync(dirname(filePath), { recursive: true });
202
- writeFileSync(filePath, rawData);
203
-
204
- this._currentPath = '';
205
- this._state = 'header';
206
- }
207
- // else: not enough data yet — wait
208
- }
209
-
210
- _skipData() {
211
- if (this._totalLen < this._paddedSize) {
212
- return; // Wait for more data
213
- }
214
- this._consumeBuffer(this._paddedSize);
215
- this._state = 'header';
216
- }
217
-
218
- _final(callback) {
219
- callback();
220
- }
221
- }
222
-
223
- // ---------------------------------------------------------------------------
224
- // Public API
225
- // ---------------------------------------------------------------------------
226
-
227
- /**
228
- * Download the kit repository as a tarball and extract .claude/ to targetDir.
229
- *
230
- * @param {string} token - GitHub Bearer token
231
- * @param {{ targetDir?: string, url?: string }} [opts]
232
- * - url: override the download URL (e.g. a release tarball URL). Defaults to main-branch TARBALL_URL.
233
- * @returns {Promise<string>} The targetDir path
234
- */
235
- export async function downloadAndExtractKit(token, opts = {}) {
236
- const { targetDir = mkdtempSync(join(tmpdir(), 'mk-kit-')), url = TARBALL_URL } = opts;
237
-
238
- let res;
239
- try {
240
- res = await fetch(url, {
241
- headers: {
242
- Authorization: `Bearer ${token}`,
243
- Accept: 'application/vnd.github.v3+json'
244
- },
245
- redirect: 'follow'
246
- });
247
- } catch (err) {
248
- throw new Error(`Network connection failed: ${err.message}`);
249
- }
250
-
251
- if (!res.ok) {
252
- throw new Error(`GitHub API error: ${res.status} ${res.statusText}`);
253
- }
254
-
255
- mkdirSync(targetDir, { recursive: true });
256
-
257
- const gunzip = createGunzip();
258
- const extractor = new TarExtractor(targetDir);
259
-
260
- await pipeline(
261
- Readable.fromWeb(res.body),
262
- gunzip,
263
- extractor
264
- );
265
-
266
- return targetDir;
267
- }
268
-
269
- /**
270
- * Remove a temp directory created by downloadAndExtractKit.
271
- *
272
- * @param {string} tempDir
273
- */
274
- export function cleanupTempDir(tempDir) {
275
- rmSync(tempDir, { recursive: true, force: true });
276
- }
1
+ import { createGunzip } from 'node:zlib';
2
+ import { mkdirSync, writeFileSync, rmSync, mkdtempSync } from 'node:fs';
3
+ import { join, dirname, resolve, sep } from 'node:path';
4
+ import { tmpdir } from 'node:os';
5
+ import { Writable, Readable } from 'node:stream';
6
+ import { pipeline } from 'node:stream/promises';
7
+ import { GITHUB_API, KIT_REPO } from './constants.js';
8
+
9
+ // ---------------------------------------------------------------------------
10
+ // Constants
11
+ // ---------------------------------------------------------------------------
12
+
13
+ const KIT_BRANCH = 'main';
14
+ const TARBALL_URL = `${GITHUB_API}/repos/${KIT_REPO}/tarball/${KIT_BRANCH}`;
15
+
16
+ /** Maximum size (bytes) allowed for a single tar entry. Prevents memory-exhaustion
17
+ * from crafted tarballs with large size fields. 50 MB is well above any kit file. */
18
+ const MAX_ENTRY_SIZE = 50 * 1024 * 1024; // 52428800 bytes
19
+
20
+ /** Hostnames allowed for kit downloads. Hoisted to module scope to avoid rebuilding on each call. */
21
+ const ALLOWED_HOSTS = new Set(['github.com', 'api.github.com', 'codeload.github.com']);
22
+
23
+ /** HTTP status codes that indicate a redirect. Hoisted to module scope alongside ALLOWED_HOSTS. */
24
+ const REDIRECT_STATUSES = new Set([301, 302, 303, 307, 308]);
25
+
26
+ /**
27
+ * Validate that the download URL's hostname is a GitHub domain.
28
+ * Prevents SSRF: caller-supplied URLs (e.g. tarballUrl from GitHub API JSON) could be
29
+ * redirected to an attacker-controlled host, exfiltrating the Bearer token.
30
+ * Allowed: api.github.com, *.github.com (e.g. codeload.github.com)
31
+ * Exported so that releases.js can validate tarball_url at the source.
32
+ * @param {string} url
33
+ * @throws {Error} if hostname is not a GitHub domain
34
+ */
35
+ export function assertGitHubHostname(url) {
36
+ let parsed;
37
+ try {
38
+ parsed = new URL(url);
39
+ } catch {
40
+ throw new Error(`SSRF guard: invalid URL "${url}"`);
41
+ }
42
+ const { hostname } = parsed;
43
+ if (!ALLOWED_HOSTS.has(hostname)) {
44
+ throw new Error(
45
+ `SSRF guard: URL hostname "${hostname}" is not allowed. ` +
46
+ `Only github.com domains are permitted for kit downloads.`
47
+ );
48
+ }
49
+ }
50
+
51
+ // ---------------------------------------------------------------------------
52
+ // Manual tar stream parser (zero-dependency, handles regular files + dirs)
53
+ // ---------------------------------------------------------------------------
54
+
55
+ /**
56
+ * A Writable stream that parses tar format and writes matching entries to disk.
57
+ * Only processes entries whose paths contain '.claude/' after stripping the root prefix.
58
+ *
59
+ * Tar format: 512-byte header blocks followed by data blocks (padded to 512 bytes).
60
+ * Two consecutive 512-byte zero blocks mark end of archive.
61
+ */
62
+ class TarExtractor extends Writable {
63
+ /**
64
+ * @param {string} destDir - Destination directory (resolved to absolute path)
65
+ */
66
+ constructor(destDir) {
67
+ super();
68
+ this.destDir = resolve(destDir);
69
+ // Chunk list avoids O(n²) Buffer.concat on every write
70
+ this._chunks = [];
71
+ this._totalLen = 0;
72
+ this._state = 'header'; // 'header' | 'data' | 'skip'
73
+ this._remaining = 0; // bytes left in current entry data
74
+ this._paddedSize = 0; // padded size of current entry (multiple of 512)
75
+ this._currentPath = ''; // relative path being written ('' if not .claude/)
76
+ this._rootPrefix = null; // first root directory prefix to strip
77
+ this._zeroBlocks = 0;
78
+ }
79
+
80
+ _write(chunk, encoding, callback) {
81
+ this._chunks.push(chunk);
82
+ this._totalLen += chunk.length;
83
+ try {
84
+ this._process();
85
+ callback();
86
+ } catch (err) {
87
+ callback(err);
88
+ }
89
+ }
90
+
91
+ /** Consolidate pending chunks into one Buffer (lazy — only when access needed). */
92
+ _getBuffer() {
93
+ if (this._chunks.length !== 1) {
94
+ this._chunks = [Buffer.concat(this._chunks)];
95
+ }
96
+ return this._chunks[0];
97
+ }
98
+
99
+ /** Consume n bytes from the front of the chunk list. */
100
+ _consumeBuffer(n) {
101
+ const buf = this._getBuffer();
102
+ const remaining = buf.slice(n);
103
+ this._chunks = remaining.length > 0 ? [remaining] : [];
104
+ this._totalLen -= n;
105
+ }
106
+
107
+ _process() {
108
+ while (this._totalLen >= 512) {
109
+ if (this._state === 'header') {
110
+ this._parseHeader();
111
+ } else if (this._state === 'data') {
112
+ this._readData();
113
+ } else if (this._state === 'skip') {
114
+ this._skipData();
115
+ }
116
+
117
+ // Don't loop if we can't make progress (need paddedSize bytes to consume a data/skip entry)
118
+ if (this._state === 'data' && this._totalLen < this._paddedSize) break;
119
+ if (this._state === 'skip' && this._totalLen < this._paddedSize) break;
120
+ }
121
+ }
122
+
123
+ /**
124
+ * Assert that resolvedPath is safely contained within this.destDir.
125
+ * Throws if the path would escape the destination directory.
126
+ * Fix 7: On case-insensitive filesystems (win32, darwin) compare lowercased paths
127
+ * to prevent mixed-case bypass (e.g. /tmp/Mk-Kit-abc matching /tmp/mk-kit-abc).
128
+ * @param {string} resolvedPath
129
+ */
130
+ _assertSafe(resolvedPath) {
131
+ const isCaseInsensitive = process.platform === 'win32' || process.platform === 'darwin';
132
+ const a = isCaseInsensitive ? resolvedPath.toLowerCase() : resolvedPath;
133
+ const b = isCaseInsensitive ? this.destDir.toLowerCase() : this.destDir;
134
+ if (a !== b && !a.startsWith(b + sep)) {
135
+ throw new Error(`Path traversal detected: "${resolvedPath}" escapes destination directory`);
136
+ }
137
+ }
138
+
139
+ _parseHeader() {
140
+ const block = this._getBuffer().slice(0, 512);
141
+
142
+ // Check for zero block (end of archive)
143
+ if (block[0] === 0 && block.every(b => b === 0)) {
144
+ this._zeroBlocks++;
145
+ this._consumeBuffer(512);
146
+ return;
147
+ }
148
+ this._zeroBlocks = 0;
149
+
150
+ // Parse header fields
151
+ const rawName = block.slice(0, 100).toString('utf8').replace(/\0+$/, '');
152
+ const prefix = block.slice(345, 500).toString('utf8').replace(/\0+$/, '');
153
+ const fullName = prefix ? `${prefix}/${rawName}` : rawName;
154
+
155
+ const sizeOctal = block.slice(124, 136).toString('utf8').replace(/\0/g, '').trim();
156
+ const size = sizeOctal ? parseInt(sizeOctal, 8) : 0;
157
+
158
+ // Fix 6: Per-entry size cap — reject crafted tarballs with enormous size fields
159
+ // that would cause _readData to buffer the whole entry in memory (DoS vector).
160
+ if (size > MAX_ENTRY_SIZE) {
161
+ throw new Error(
162
+ `Tar entry size ${size} bytes exceeds maximum allowed ${MAX_ENTRY_SIZE} bytes (50 MB): ` +
163
+ `entry "${fullName}"`
164
+ );
165
+ }
166
+
167
+ const typeFlag = String.fromCharCode(block[156]) || '0';
168
+
169
+ // Skip PAX extended headers ('x') and PAX global headers ('g') without
170
+ // participating in root-prefix detection. GitHub tarballs prepend a
171
+ // pax_global_header whose name has no slash; if we let it set rootPrefix
172
+ // to '' every subsequent entry fails the '.claude/' filter → 0 files.
173
+ if (typeFlag === 'g' || typeFlag === 'x') {
174
+ this._consumeBuffer(512);
175
+ if (size > 0) {
176
+ this._paddedSize = Math.ceil(size / 512) * 512;
177
+ this._state = 'skip';
178
+ }
179
+ return;
180
+ }
181
+
182
+ // Detect and strip root prefix (first directory component)
183
+ if (this._rootPrefix === null) {
184
+ const firstSlash = fullName.indexOf('/');
185
+ this._rootPrefix = firstSlash >= 0 ? fullName.slice(0, firstSlash + 1) : '';
186
+ }
187
+
188
+ const strippedName = fullName.startsWith(this._rootPrefix)
189
+ ? fullName.slice(this._rootPrefix.length)
190
+ : fullName;
191
+
192
+ this._consumeBuffer(512);
193
+
194
+ // Block symlinks and hard links entirely prevents symlink-based escapes
195
+ if (typeFlag === '1' || typeFlag === '2') {
196
+ this._state = 'header';
197
+ return;
198
+ }
199
+
200
+ // Only process entries under .claude/
201
+ const isClaudePath = strippedName.startsWith('.claude/');
202
+
203
+ if (typeFlag === '5' || typeFlag === '\0' || typeFlag === '') {
204
+ // Directory entry
205
+ if (isClaudePath && strippedName) {
206
+ const dirPath = resolve(join(this.destDir, strippedName));
207
+ this._assertSafe(dirPath);
208
+ mkdirSync(dirPath, { recursive: true });
209
+ }
210
+ this._state = 'header';
211
+ return;
212
+ }
213
+
214
+ // Regular file entry (typeFlag '0' or empty/null)
215
+ if (size === 0) {
216
+ if (isClaudePath) {
217
+ const filePath = resolve(join(this.destDir, strippedName));
218
+ this._assertSafe(filePath);
219
+ mkdirSync(dirname(filePath), { recursive: true });
220
+ writeFileSync(filePath, Buffer.alloc(0));
221
+ }
222
+ this._state = 'header';
223
+ return;
224
+ }
225
+
226
+ this._remaining = size;
227
+ this._paddedSize = Math.ceil(size / 512) * 512;
228
+
229
+ if (isClaudePath) {
230
+ this._currentPath = strippedName;
231
+ this._state = 'data';
232
+ } else {
233
+ this._state = 'skip';
234
+ }
235
+ }
236
+
237
+ _readData() {
238
+ if (this._totalLen < this._paddedSize && this._totalLen < 512) {
239
+ return; // Wait for more data
240
+ }
241
+
242
+ if (this._totalLen >= this._paddedSize) {
243
+ // We have all the data for this entry
244
+ const buf = this._getBuffer();
245
+ const rawData = buf.slice(0, this._remaining);
246
+ this._consumeBuffer(this._paddedSize);
247
+
248
+ const filePath = resolve(join(this.destDir, this._currentPath));
249
+ this._assertSafe(filePath);
250
+ mkdirSync(dirname(filePath), { recursive: true });
251
+ writeFileSync(filePath, rawData);
252
+
253
+ this._currentPath = '';
254
+ this._state = 'header';
255
+ }
256
+ // else: not enough data yet — wait
257
+ }
258
+
259
+ _skipData() {
260
+ if (this._totalLen < this._paddedSize) {
261
+ return; // Wait for more data
262
+ }
263
+ this._consumeBuffer(this._paddedSize);
264
+ this._state = 'header';
265
+ }
266
+
267
+ _final(callback) {
268
+ callback();
269
+ }
270
+ }
271
+
272
+ // ---------------------------------------------------------------------------
273
+ // Public API
274
+ // ---------------------------------------------------------------------------
275
+
276
+ /**
277
+ * Download the kit repository as a tarball and extract .claude/ to targetDir.
278
+ *
279
+ * @param {string} token - GitHub Bearer token
280
+ * @param {{ targetDir?: string, url?: string }} [opts]
281
+ * - url: override the download URL (e.g. a release tarball URL). Defaults to main-branch TARBALL_URL.
282
+ * @returns {Promise<string>} The targetDir path
283
+ */
284
+ export async function downloadAndExtractKit(token, opts = {}) {
285
+ const { targetDir = mkdtempSync(join(tmpdir(), 'mk-kit-')), url = TARBALL_URL } = opts;
286
+
287
+ // Fix 4: SSRF guard — assert hostname is a GitHub domain before forwarding Bearer token.
288
+ // A compromised or MITM'd GitHub API response could supply an attacker-controlled URL.
289
+ assertGitHubHostname(url);
290
+
291
+ let res;
292
+ try {
293
+ res = await fetch(url, {
294
+ headers: {
295
+ Authorization: `Bearer ${token}`,
296
+ Accept: 'application/vnd.github.v3+json'
297
+ },
298
+ // S1: Use redirect: 'manual' to prevent the Authorization header from being forwarded
299
+ // to the redirect target (e.g. codeload.github.com). We handle the redirect manually:
300
+ // validate the Location hostname, then re-fetch without the auth header.
301
+ redirect: 'manual'
302
+ });
303
+ } catch (err) {
304
+ throw new Error(`Network connection failed: ${err.message}`);
305
+ }
306
+
307
+ // Handle 3xx redirects: validate Location hostname, then re-fetch without auth header.
308
+ // This prevents the Bearer token from leaking to third-party servers via a compromised redirect.
309
+ if (REDIRECT_STATUSES.has(res.status)) {
310
+ const location = res.headers.get('location');
311
+ if (!location) {
312
+ throw new Error(`GitHub API redirect (${res.status}) had no Location header`);
313
+ }
314
+ // Validate the redirect target is a GitHub hostname before following
315
+ assertGitHubHostname(location);
316
+ try {
317
+ res = await fetch(location, {
318
+ // No Authorization header on the redirect target — the token is only for api.github.com
319
+ headers: { Accept: 'application/vnd.github.v3+json' },
320
+ redirect: 'follow'
321
+ });
322
+ } catch (err) {
323
+ throw new Error(`Network connection failed on redirect: ${err.message}`);
324
+ }
325
+ }
326
+
327
+ if (!res.ok) {
328
+ throw new Error(`GitHub API error: ${res.status} ${res.statusText}`);
329
+ }
330
+
331
+ mkdirSync(targetDir, { recursive: true });
332
+
333
+ const gunzip = createGunzip();
334
+ const extractor = new TarExtractor(targetDir);
335
+
336
+ await pipeline(
337
+ Readable.fromWeb(res.body),
338
+ gunzip,
339
+ extractor
340
+ );
341
+
342
+ return targetDir;
343
+ }
344
+
345
+ /**
346
+ * Remove a temp directory created by downloadAndExtractKit.
347
+ *
348
+ * @param {string} tempDir
349
+ */
350
+ export function cleanupTempDir(tempDir) {
351
+ rmSync(tempDir, { recursive: true, force: true });
352
+ }
@@ -1,5 +1,6 @@
1
1
  import semver from 'semver';
2
2
  import { GITHUB_API, KIT_REPO } from './constants.js';
3
+ import { assertGitHubHostname } from './download.js';
3
4
 
4
5
  // ---------------------------------------------------------------------------
5
6
  // GitHub Releases API helpers
@@ -53,6 +54,14 @@ export async function fetchLatestRelease(token) {
53
54
 
54
55
  const { tag_name: tag, body = null, tarball_url: tarballUrl } = data;
55
56
 
57
+ // S3: Validate tarball_url hostname before returning it to callers.
58
+ // A compromised GitHub API response could supply an attacker-controlled URL for SSRF.
59
+ try {
60
+ assertGitHubHostname(tarballUrl);
61
+ } catch {
62
+ return { available: false, reason: 'Tarball URL failed hostname validation' };
63
+ }
64
+
56
65
  // Parse version: try clean() first (handles v-prefix), then coerce() as fallback.
57
66
  // coerce('release-0.2.0') => '0.2.0', coerce('totally-not-semver') => null
58
67
  const version = semver.clean(tag) ?? (semver.coerce(tag) ? semver.coerce(tag).version : null);