@khanhcan148/mk 0.1.17 → 0.1.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -88,7 +88,7 @@ cp -r .claude ~/.claude/
88
88
 
89
89
  ```
90
90
  ├── .claude/
91
- │ ├── agents/ # 32 agents (5 primary + 27 utility: implementers, quality, docs, specialized, concerns)
91
+ │ ├── agents/ # 36 agents (5 primary + 31 utility: implementers, quality, docs, specialized, concerns, brainstorm critics)
92
92
  │ ├── skills/ # 67 skill packages (SKILL.md + scripts/references/assets)
93
93
  │ │ ├── mk-*/ # 20 workflow commands (/mk-audit, /mk-brainstorm, /mk-log-analysis, /mk-overview, /mk-wiki, etc.)
94
94
  │ │ └── ... # Domain skills (frontend, backend, testing, browser automation, etc.)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@khanhcan148/mk",
3
- "version": "0.1.17",
3
+ "version": "0.1.19",
4
4
  "description": "CLI to install and manage MyClaudeKit (.claude/) in your projects",
5
5
  "type": "module",
6
6
  "bin": {
@@ -138,6 +138,17 @@ export async function statusAction(deps = {}) {
138
138
  process.stdout.write(chalk.green('Repo access: granted\n'));
139
139
  } else {
140
140
  process.stdout.write(chalk.red('Repo access: denied\n'));
141
- process.stdout.write('Contact the repository owner for collaborator access.\n');
141
+ const hasRepoScope = access.scopes?.some((s) => s === 'repo');
142
+ if (access.status === 404 && access.scopes && !hasRepoScope) {
143
+ // 404 with no 'repo' scope usually means the token can't see the private
144
+ // KIT_REPO — re-authenticating picks up the new default scope.
145
+ const current = access.scopes.length ? access.scopes.join(', ') : 'none';
146
+ process.stdout.write(
147
+ `Token is missing the 'repo' scope (current scopes: ${current}).\n` +
148
+ "Run 'mk auth logout && mk auth login' to re-authenticate with the required scope.\n"
149
+ );
150
+ } else {
151
+ process.stdout.write('Contact the repository owner for collaborator access.\n');
152
+ }
142
153
  }
143
154
  }
@@ -5,6 +5,7 @@ import { fileURLToPath } from 'node:url';
5
5
  import { copyKitFiles, mergeSettingsJson } from '../lib/copy.js';
6
6
  import { writeManifest } from '../lib/manifest.js';
7
7
  import { computeChecksum } from '../lib/checksum.js';
8
+ import { pLimit, DEFAULT_CONCURRENCY_CAP } from '../lib/concurrency.js';
8
9
  import { resolveSourceDir, resolveTargetDir, resolveManifestPath } from '../lib/paths.js';
9
10
  import { MANIFEST_FILENAME } from '../lib/constants.js';
10
11
  import { resolveTokenOrLogin } from '../lib/auth.js';
@@ -59,13 +60,18 @@ export async function runInit(params = {}) {
59
60
  'utf8'
60
61
  ));
61
62
 
62
- const files = {};
63
- for (const entry of fileList) {
64
- if (existsSync(entry.absolutePath)) {
65
- const checksum = computeChecksum(entry.absolutePath);
66
- files[entry.relativePath] = { checksum, size: entry.size };
67
- }
68
- }
63
+ // Compute checksums in bounded parallel — pLimit(cap=16) keeps concurrent
64
+ // file descriptors well under macOS default ulimit 256 so large installs
65
+ // never hit EMFILE. Output order matches input (see pLimit contract).
66
+ const existingEntries = fileList.filter(entry => existsSync(entry.absolutePath));
67
+ const fileChecksumEntries = await pLimit(
68
+ existingEntries.map((entry) => async () => {
69
+ const checksum = await computeChecksum(entry.absolutePath);
70
+ return [entry.relativePath, { checksum, size: entry.size }];
71
+ }),
72
+ DEFAULT_CONCURRENCY_CAP
73
+ );
74
+ const files = Object.fromEntries(fileChecksumEntries);
69
75
 
70
76
  // Write manifest.
71
77
  // Use explicitVersion when provided (e.g. release.version from initAction);
@@ -5,6 +5,7 @@ import { join, dirname, resolve, sep } from 'node:path';
5
5
  import { fileURLToPath } from 'node:url';
6
6
  import { readManifest, updateManifest, diffManifest } from '../lib/manifest.js';
7
7
  import { computeChecksum } from '../lib/checksum.js';
8
+ import { pLimit, DEFAULT_CONCURRENCY_CAP } from '../lib/concurrency.js';
8
9
  import { copyKitFiles, collectDiskFiles, mergeSettingsJson } from '../lib/copy.js';
9
10
  import { resolveSourceDir, resolveTargetDir, resolveManifestPath, deriveProjectRoot, assertSafePath } from '../lib/paths.js';
10
11
  import { resolveTokenOrLogin } from '../lib/auth.js';
@@ -31,7 +32,7 @@ import { isEmptyDir } from '../lib/fs-utils.js';
31
32
  * @param {string} str
32
33
  * @returns {string}
33
34
  */
34
- function stripTerminalEscapes(str) {
35
+ export function stripTerminalEscapes(str) {
35
36
  return str
36
37
  .replace(/\x1b\[[0-9;]*[a-zA-Z]/g, '') // CSI sequences
37
38
  .replace(/\x1b\].*?(\x07|\x1b\\)/gs, '') // OSC sequences (dotAll for multiline)
@@ -97,14 +98,19 @@ export async function runUpdate(params = {}) {
97
98
  // Previously sourceFileList.find() in applyCopy was O(n) per call, causing O(n²)
98
99
  // behaviour when many files need updating. The Map is built once in O(n).
99
100
  const sourceFileMap = new Map(sourceFileList.map(e => [e.relativePath, e]));
100
- const sourceFiles = {};
101
- for (const entry of sourceFileList) {
102
- const checksum = computeChecksum(entry.sourceAbsPath);
103
- sourceFiles[entry.relativePath] = { checksum, size: entry.size };
104
- }
101
+ // Compute source checksums under a bounded pool (M3) — see concurrency.js.
102
+ const sourceChecksumEntries = await pLimit(
103
+ sourceFileList.map((entry) => async () => {
104
+ const checksum = await computeChecksum(entry.sourceAbsPath);
105
+ return [entry.relativePath, { checksum, size: entry.size }];
106
+ }),
107
+ DEFAULT_CONCURRENCY_CAP
108
+ );
109
+ const sourceFiles = Object.fromEntries(sourceChecksumEntries);
105
110
 
106
- // Get disk checksums for files currently in manifest
107
- const diskChecksums = {};
111
+ // Get disk checksums for files currently in manifest.
112
+ // Filter to only existing safe paths first, then parallelise checksum I/O.
113
+ const safeManifestPaths = [];
108
114
  for (const relPath of Object.keys(manifest.files)) {
109
115
  // relPath is like '.claude/agents/foo.md' — relative to project root
110
116
  const absPath = join(projectRoot, relPath);
@@ -116,9 +122,14 @@ export async function runUpdate(params = {}) {
116
122
  continue;
117
123
  }
118
124
  if (existsSync(absPath)) {
119
- diskChecksums[relPath] = computeChecksum(absPath);
125
+ safeManifestPaths.push({ relPath, absPath });
120
126
  }
121
127
  }
128
+ const diskChecksumEntries = await pLimit(
129
+ safeManifestPaths.map(({ relPath, absPath }) => async () => [relPath, await computeChecksum(absPath)]),
130
+ DEFAULT_CONCURRENCY_CAP
131
+ );
132
+ const diskChecksums = Object.fromEntries(diskChecksumEntries);
122
133
 
123
134
  // Three-way diff
124
135
  const diff = diffManifest(manifest, sourceFiles, diskChecksums);
@@ -209,7 +220,9 @@ export async function runUpdate(params = {}) {
209
220
  const orphanParentDirs = new Set();
210
221
  for (const relPath of diskFiles) {
211
222
  if (relPath in sourceFiles) continue; // present in new source — keep
212
- const absPath = join(projectRoot, relPath);
223
+ // M7: pre-resolve so assertSafePath checks the canonical absolute path
224
+ // (not a `..`-relative traversal that slipped through the manifest).
225
+ const absPath = resolve(join(projectRoot, relPath));
213
226
  try {
214
227
  assertSafePath(absPath, claudeRoot, `orphan "${relPath}"`);
215
228
  } catch (err) {
@@ -220,8 +233,14 @@ export async function runUpdate(params = {}) {
220
233
  unlinkSync(absPath);
221
234
  orphans.push(relPath);
222
235
  orphanParentDirs.add(dirname(absPath));
223
- } catch {
224
- // Already missing — skip
236
+ } catch (err) {
237
+ // M7: ENOENT is benign (already missing). Anything else EACCES, EPERM,
238
+ // EBUSY — is surfaced so operators can react instead of losing signal.
239
+ if (err && err.code !== 'ENOENT') {
240
+ // Use err.code only — err.message on fs errors embeds the absolute path, which
241
+ // we already redact elsewhere (H4, H6). Fall through to a generic label.
242
+ process.stderr.write(chalk.yellow(` warning: orphan delete failed for ${relPath}: ${err.code || 'unknown error'}\n`));
243
+ }
225
244
  }
226
245
  }
227
246
 
package/src/lib/auth.js CHANGED
@@ -82,8 +82,15 @@ export async function validateToken(token) {
82
82
  /**
83
83
  * Check whether the token has access to the kit repository.
84
84
  *
85
+ * Returns the HTTP status and any scopes reported via the `x-oauth-scopes`
86
+ * response header so callers can disambiguate common failure modes:
87
+ * - 404 + no 'repo' scope → token lacks scope (most common for a private KIT_REPO)
88
+ * - 404 + has 'repo' scope → authenticated account is not a collaborator
89
+ * - 403 → SSO or rate-limit block
90
+ * - 0 → network error
91
+ *
85
92
  * @param {string} token
86
- * @returns {Promise<{ accessible: boolean }>}
93
+ * @returns {Promise<{ accessible: boolean, status: number, scopes: string[] }>}
87
94
  */
88
95
  export async function checkRepoAccess(token) {
89
96
  try {
@@ -93,12 +100,17 @@ export async function checkRepoAccess(token) {
93
100
  Accept: 'application/vnd.github.v3+json'
94
101
  }
95
102
  });
96
- return { accessible: res.ok };
103
+ return { accessible: res.ok, status: res.status, scopes: parseScopes(res) };
97
104
  } catch {
98
- return { accessible: false };
105
+ return { accessible: false, status: 0, scopes: [] };
99
106
  }
100
107
  }
101
108
 
109
+ function parseScopes(res) {
110
+ const raw = res.headers?.get?.('x-oauth-scopes') || '';
111
+ return raw.split(',').map((s) => s.trim()).filter(Boolean);
112
+ }
113
+
102
114
  // ---------------------------------------------------------------------------
103
115
  // OAuth Device Flow
104
116
  // ---------------------------------------------------------------------------
@@ -127,9 +139,15 @@ export async function startDeviceFlow(opts = {}) {
127
139
  'Content-Type': 'application/x-www-form-urlencoded',
128
140
  Accept: 'application/json'
129
141
  },
130
- // Empty scope sufficient for public repos (5000 req/hr). Set MK_OAUTH_SCOPE=repo for private forks.
142
+ // KIT_REPO is private, so 'repo' scope is required to read it. Default to 'repo'.
143
+ // Override with MK_OAUTH_SCOPE for public forks (e.g. MK_OAUTH_SCOPE='' for no scope,
144
+ // or MK_OAUTH_SCOPE='public_repo' for public-only access).
145
+ // `??` (not `||`) so an explicit empty string is honored as an opt-out.
131
146
  // Use URLSearchParams to prevent parameter injection via env var containing '&' chars.
132
- body: new URLSearchParams({ client_id: GITHUB_CLIENT_ID, scope: process.env.MK_OAUTH_SCOPE || '' }).toString()
147
+ body: new URLSearchParams({
148
+ client_id: GITHUB_CLIENT_ID,
149
+ scope: process.env.MK_OAUTH_SCOPE ?? 'repo'
150
+ }).toString()
133
151
  });
134
152
 
135
153
  if (!codeRes.ok) {
@@ -155,7 +173,13 @@ export async function startDeviceFlow(opts = {}) {
155
173
  'Content-Type': 'application/x-www-form-urlencoded',
156
174
  Accept: 'application/json'
157
175
  },
158
- body: `client_id=${GITHUB_CLIENT_ID}&device_code=${device_code}&grant_type=urn:ietf:params:oauth:grant-type:device_code`
176
+ // Use URLSearchParams to prevent parameter injection if device_code ever contains '&' chars
177
+ // (mirrors the same safe pattern used in Step 1 above for the initial device-code request).
178
+ body: new URLSearchParams({
179
+ client_id: GITHUB_CLIENT_ID,
180
+ device_code,
181
+ grant_type: 'urn:ietf:params:oauth:grant-type:device_code'
182
+ }).toString()
159
183
  });
160
184
 
161
185
  const tokenData = await tokenRes.json();
@@ -1,13 +1,28 @@
1
1
  import { createHash } from 'node:crypto';
2
- import { readFileSync } from 'node:fs';
2
+ import { createReadStream } from 'node:fs';
3
3
 
4
4
  /**
5
- * Compute SHA-256 checksum of a file.
5
+ * Compute SHA-256 checksum of a file asynchronously using a streaming pipeline.
6
+ * Using createReadStream avoids loading the entire file into memory, which is
7
+ * important for large binary assets in the kit. Promise.all callers can parallelise
8
+ * multiple checksums without blocking the event loop.
9
+ *
6
10
  * @param {string} filePath - Absolute path to file
7
- * @returns {string} Checksum string prefixed with 'sha256:'
11
+ * @returns {Promise<string>} Checksum string prefixed with 'sha256:'
8
12
  */
9
13
  export function computeChecksum(filePath) {
10
- const content = readFileSync(filePath);
11
- const hash = createHash('sha256').update(content).digest('hex');
12
- return `sha256:${hash}`;
14
+ return new Promise((resolve, reject) => {
15
+ const hash = createHash('sha256');
16
+ const stream = createReadStream(filePath);
17
+ stream.on('data', (chunk) => hash.update(chunk));
18
+ stream.on('end', () => resolve(`sha256:${hash.digest('hex')}`));
19
+ // H6: wrap the raw fs.Error so the absolute path in err.path never reaches
20
+ // user stderr while preserving the causal chain for debuggers. The top-level
21
+ // message is the errno code (callers branch on err.code; err.cause retains
22
+ // the original for stack-trace inspection when needed).
23
+ stream.on('error', (e) => {
24
+ const code = e && e.code ? e.code : 'checksum read failed';
25
+ reject(new Error(code, e ? { cause: e } : undefined));
26
+ });
27
+ });
13
28
  }
@@ -0,0 +1,36 @@
1
+ /**
2
+ * Default in-flight worker cap for `pLimit` over file-descriptor-bound tasks.
3
+ *
4
+ * 16 is 8× typical disk parallelism and well under macOS default `ulimit -n 256`.
5
+ * Chosen empirically — higher values don't meaningfully speed up SHA-256 of
6
+ * kit-sized files, and lower values serialise too aggressively on SSD.
7
+ */
8
+ export const DEFAULT_CONCURRENCY_CAP = 16;
9
+
10
+ /**
11
+ * Bounded-concurrency helper for fan-out over async tasks.
12
+ *
13
+ * M3 — `Promise.all(items.map(computeChecksum))` opens N file descriptors at
14
+ * once. On installs with hundreds of files this blows past the default
15
+ * `ulimit -n` (256 on macOS) and causes EMFILE. `pLimit` caps the in-flight
16
+ * worker pool; results preserve input order via explicit index assignment.
17
+ *
18
+ * @template T
19
+ * @param {Array<() => Promise<T>>} tasks Array of thunks. Each thunk is
20
+ * invoked at most once by exactly one worker.
21
+ * @param {number} cap Max concurrent workers (>=1).
22
+ * @returns {Promise<T[]>} Results indexed to match `tasks`.
23
+ */
24
+ export async function pLimit(tasks, cap) {
25
+ const results = new Array(tasks.length);
26
+ let next = 0;
27
+ const workerCount = Math.max(1, Math.min(cap, tasks.length));
28
+ const workers = Array.from({ length: workerCount }, async () => {
29
+ while (next < tasks.length) {
30
+ const idx = next++;
31
+ results[idx] = await tasks[idx]();
32
+ }
33
+ });
34
+ await Promise.all(workers);
35
+ return results;
36
+ }
package/src/lib/copy.js CHANGED
@@ -81,6 +81,14 @@ export function collectDiskFiles(targetDir) {
81
81
  return results;
82
82
  }
83
83
 
84
+ /**
85
+ * @typedef {Object} FileEntry
86
+ * @property {string} relativePath - Path relative to the target .claude/ (POSIX separators, e.g. ".claude/agents/foo.md")
87
+ * @property {string} absolutePath - Destination absolute path under targetDir (where the file is copied to)
88
+ * @property {string} sourceAbsPath - Source absolute path under sourceDir (where the file is copied from)
89
+ * @property {number} size - File size in bytes, captured at copy time
90
+ */
91
+
84
92
  /**
85
93
  * Copy kit files from sourceDir (.claude/) to targetDir (.claude/).
86
94
  * Only copies KIT_SUBDIRS (agents/, skills/, workflows/).
@@ -88,7 +96,7 @@ export function collectDiskFiles(targetDir) {
88
96
  * @param {string} sourceDir - Absolute path to source .claude/
89
97
  * @param {string} targetDir - Absolute path to target .claude/
90
98
  * @param {{ dryRun: boolean }} options
91
- * @returns {Array<{ relativePath: string, absolutePath: string, sourceAbsPath: string, size: number }>}
99
+ * @returns {FileEntry[]}
92
100
  * @remarks Naming convention: `absolutePath` is the destination (under targetDir),
93
101
  * `sourceAbsPath` is the source (under sourceDir). The asymmetry is intentional —
94
102
  * renaming would break consumers (update.js). See DEBT-016.
@@ -39,6 +39,24 @@ export function assertGitHubHostname(url) {
39
39
  } catch {
40
40
  throw new Error(`SSRF guard: invalid URL "${url}"`);
41
41
  }
42
+ // H9/H10: block non-TLS schemes. Plain http:// to github.com can be MITM-redirected,
43
+ // and the kit download path has no reason to accept anything but https.
44
+ if (parsed.protocol !== 'https:') {
45
+ // Truncate to guard against log injection via crafted long / newline-containing schemes.
46
+ const safeScheme = String(parsed.protocol).slice(0, 20);
47
+ throw new Error(
48
+ `SSRF guard: scheme "${safeScheme}" is not allowed. ` +
49
+ `Only https: is permitted for kit downloads.`
50
+ );
51
+ }
52
+ // Block userinfo-prefixed URLs (user:pass@host) — they can mask the true
53
+ // hostname in server-side url parsers or confuse logging/auditing.
54
+ if (parsed.username || parsed.password) {
55
+ throw new Error(
56
+ `SSRF guard: userinfo-prefixed URL is not allowed. ` +
57
+ `Credentials in the URL (user:pass@host) are rejected for kit downloads.`
58
+ );
59
+ }
42
60
  const { hostname } = parsed;
43
61
  if (!ALLOWED_HOSTS.has(hostname)) {
44
62
  throw new Error(