@khanhcan148/mk 0.1.17 → 0.1.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@khanhcan148/mk",
3
- "version": "0.1.17",
3
+ "version": "0.1.18",
4
4
  "description": "CLI to install and manage MyClaudeKit (.claude/) in your projects",
5
5
  "type": "module",
6
6
  "bin": {
@@ -5,6 +5,7 @@ import { fileURLToPath } from 'node:url';
5
5
  import { copyKitFiles, mergeSettingsJson } from '../lib/copy.js';
6
6
  import { writeManifest } from '../lib/manifest.js';
7
7
  import { computeChecksum } from '../lib/checksum.js';
8
+ import { pLimit, DEFAULT_CONCURRENCY_CAP } from '../lib/concurrency.js';
8
9
  import { resolveSourceDir, resolveTargetDir, resolveManifestPath } from '../lib/paths.js';
9
10
  import { MANIFEST_FILENAME } from '../lib/constants.js';
10
11
  import { resolveTokenOrLogin } from '../lib/auth.js';
@@ -59,13 +60,18 @@ export async function runInit(params = {}) {
59
60
  'utf8'
60
61
  ));
61
62
 
62
- const files = {};
63
- for (const entry of fileList) {
64
- if (existsSync(entry.absolutePath)) {
65
- const checksum = computeChecksum(entry.absolutePath);
66
- files[entry.relativePath] = { checksum, size: entry.size };
67
- }
68
- }
63
+ // Compute checksums in bounded parallel — pLimit(cap=16) keeps concurrent
64
+ // file descriptors well under macOS default ulimit 256 so large installs
65
+ // never hit EMFILE. Output order matches input (see pLimit contract).
66
+ const existingEntries = fileList.filter(entry => existsSync(entry.absolutePath));
67
+ const fileChecksumEntries = await pLimit(
68
+ existingEntries.map((entry) => async () => {
69
+ const checksum = await computeChecksum(entry.absolutePath);
70
+ return [entry.relativePath, { checksum, size: entry.size }];
71
+ }),
72
+ DEFAULT_CONCURRENCY_CAP
73
+ );
74
+ const files = Object.fromEntries(fileChecksumEntries);
69
75
 
70
76
  // Write manifest.
71
77
  // Use explicitVersion when provided (e.g. release.version from initAction);
@@ -5,6 +5,7 @@ import { join, dirname, resolve, sep } from 'node:path';
5
5
  import { fileURLToPath } from 'node:url';
6
6
  import { readManifest, updateManifest, diffManifest } from '../lib/manifest.js';
7
7
  import { computeChecksum } from '../lib/checksum.js';
8
+ import { pLimit, DEFAULT_CONCURRENCY_CAP } from '../lib/concurrency.js';
8
9
  import { copyKitFiles, collectDiskFiles, mergeSettingsJson } from '../lib/copy.js';
9
10
  import { resolveSourceDir, resolveTargetDir, resolveManifestPath, deriveProjectRoot, assertSafePath } from '../lib/paths.js';
10
11
  import { resolveTokenOrLogin } from '../lib/auth.js';
@@ -97,14 +98,19 @@ export async function runUpdate(params = {}) {
97
98
  // Previously sourceFileList.find() in applyCopy was O(n) per call, causing O(n²)
98
99
  // behaviour when many files need updating. The Map is built once in O(n).
99
100
  const sourceFileMap = new Map(sourceFileList.map(e => [e.relativePath, e]));
100
- const sourceFiles = {};
101
- for (const entry of sourceFileList) {
102
- const checksum = computeChecksum(entry.sourceAbsPath);
103
- sourceFiles[entry.relativePath] = { checksum, size: entry.size };
104
- }
101
+ // Compute source checksums under a bounded pool (M3) — see concurrency.js.
102
+ const sourceChecksumEntries = await pLimit(
103
+ sourceFileList.map((entry) => async () => {
104
+ const checksum = await computeChecksum(entry.sourceAbsPath);
105
+ return [entry.relativePath, { checksum, size: entry.size }];
106
+ }),
107
+ DEFAULT_CONCURRENCY_CAP
108
+ );
109
+ const sourceFiles = Object.fromEntries(sourceChecksumEntries);
105
110
 
106
- // Get disk checksums for files currently in manifest
107
- const diskChecksums = {};
111
+ // Get disk checksums for files currently in manifest.
112
+ // Filter to only existing safe paths first, then parallelise checksum I/O.
113
+ const safeManifestPaths = [];
108
114
  for (const relPath of Object.keys(manifest.files)) {
109
115
  // relPath is like '.claude/agents/foo.md' — relative to project root
110
116
  const absPath = join(projectRoot, relPath);
@@ -116,9 +122,14 @@ export async function runUpdate(params = {}) {
116
122
  continue;
117
123
  }
118
124
  if (existsSync(absPath)) {
119
- diskChecksums[relPath] = computeChecksum(absPath);
125
+ safeManifestPaths.push({ relPath, absPath });
120
126
  }
121
127
  }
128
+ const diskChecksumEntries = await pLimit(
129
+ safeManifestPaths.map(({ relPath, absPath }) => async () => [relPath, await computeChecksum(absPath)]),
130
+ DEFAULT_CONCURRENCY_CAP
131
+ );
132
+ const diskChecksums = Object.fromEntries(diskChecksumEntries);
122
133
 
123
134
  // Three-way diff
124
135
  const diff = diffManifest(manifest, sourceFiles, diskChecksums);
@@ -209,7 +220,9 @@ export async function runUpdate(params = {}) {
209
220
  const orphanParentDirs = new Set();
210
221
  for (const relPath of diskFiles) {
211
222
  if (relPath in sourceFiles) continue; // present in new source — keep
212
- const absPath = join(projectRoot, relPath);
223
+ // M7: pre-resolve so assertSafePath checks the canonical absolute path
224
+ // (not a `..`-relative traversal that slipped through the manifest).
225
+ const absPath = resolve(join(projectRoot, relPath));
213
226
  try {
214
227
  assertSafePath(absPath, claudeRoot, `orphan "${relPath}"`);
215
228
  } catch (err) {
@@ -220,8 +233,14 @@ export async function runUpdate(params = {}) {
220
233
  unlinkSync(absPath);
221
234
  orphans.push(relPath);
222
235
  orphanParentDirs.add(dirname(absPath));
223
- } catch {
224
- // Already missing — skip
236
+ } catch (err) {
237
+ // M7: ENOENT is benign (already missing). Anything else EACCES, EPERM,
238
+ // EBUSY — is surfaced so operators can react instead of losing signal.
239
+ if (err && err.code !== 'ENOENT') {
240
+ // Use err.code only — err.message on fs errors embeds the absolute path, which
241
+ // we already redact elsewhere (H4, H6). Fall through to a generic label.
242
+ process.stderr.write(chalk.yellow(` warning: orphan delete failed for ${relPath}: ${err.code || 'unknown error'}\n`));
243
+ }
225
244
  }
226
245
  }
227
246
 
package/src/lib/auth.js CHANGED
@@ -155,7 +155,13 @@ export async function startDeviceFlow(opts = {}) {
155
155
  'Content-Type': 'application/x-www-form-urlencoded',
156
156
  Accept: 'application/json'
157
157
  },
158
- body: `client_id=${GITHUB_CLIENT_ID}&device_code=${device_code}&grant_type=urn:ietf:params:oauth:grant-type:device_code`
158
+ // Use URLSearchParams to prevent parameter injection if device_code ever contains '&' chars
159
+ // (mirrors the same safe pattern used in Step 1 above for the initial device-code request).
160
+ body: new URLSearchParams({
161
+ client_id: GITHUB_CLIENT_ID,
162
+ device_code,
163
+ grant_type: 'urn:ietf:params:oauth:grant-type:device_code'
164
+ }).toString()
159
165
  });
160
166
 
161
167
  const tokenData = await tokenRes.json();
@@ -1,13 +1,28 @@
1
1
  import { createHash } from 'node:crypto';
2
- import { readFileSync } from 'node:fs';
2
+ import { createReadStream } from 'node:fs';
3
3
 
4
4
  /**
5
- * Compute SHA-256 checksum of a file.
5
+ * Compute SHA-256 checksum of a file asynchronously using a streaming pipeline.
6
+ * Using createReadStream avoids loading the entire file into memory, which is
7
+ * important for large binary assets in the kit. Promise.all callers can parallelise
8
+ * multiple checksums without blocking the event loop.
9
+ *
6
10
  * @param {string} filePath - Absolute path to file
7
- * @returns {string} Checksum string prefixed with 'sha256:'
11
+ * @returns {Promise<string>} Checksum string prefixed with 'sha256:'
8
12
  */
9
13
  export function computeChecksum(filePath) {
10
- const content = readFileSync(filePath);
11
- const hash = createHash('sha256').update(content).digest('hex');
12
- return `sha256:${hash}`;
14
+ return new Promise((resolve, reject) => {
15
+ const hash = createHash('sha256');
16
+ const stream = createReadStream(filePath);
17
+ stream.on('data', (chunk) => hash.update(chunk));
18
+ stream.on('end', () => resolve(`sha256:${hash.digest('hex')}`));
19
+ // H6: wrap the raw fs.Error so the absolute path in err.path never reaches
20
+ // user stderr while preserving the causal chain for debuggers. The top-level
21
+ // message is the errno code (callers branch on err.code; err.cause retains
22
+ // the original for stack-trace inspection when needed).
23
+ stream.on('error', (e) => {
24
+ const code = e && e.code ? e.code : 'checksum read failed';
25
+ reject(new Error(code, e ? { cause: e } : undefined));
26
+ });
27
+ });
13
28
  }
@@ -0,0 +1,36 @@
1
+ /**
2
+ * Default in-flight worker cap for `pLimit` over file-descriptor-bound tasks.
3
+ *
4
+ * 16 is 8× typical disk parallelism and well under macOS default `ulimit -n 256`.
5
+ * Chosen empirically — higher values don't meaningfully speed up SHA-256 of
6
+ * kit-sized files, and lower values serialise too aggressively on SSD.
7
+ */
8
+ export const DEFAULT_CONCURRENCY_CAP = 16;
9
+
10
+ /**
11
+ * Bounded-concurrency helper for fan-out over async tasks.
12
+ *
13
+ * M3 — `Promise.all(items.map(computeChecksum))` opens N file descriptors at
14
+ * once. On installs with hundreds of files this blows past the default
15
+ * `ulimit -n` (256 on macOS) and causes EMFILE. `pLimit` caps the in-flight
16
+ * worker pool; results preserve input order via explicit index assignment.
17
+ *
18
+ * @template T
19
+ * @param {Array<() => Promise<T>>} tasks Array of thunks. Each thunk is
20
+ * invoked at most once by exactly one worker.
21
+ * @param {number} cap Max concurrent workers (>=1).
22
+ * @returns {Promise<T[]>} Results indexed to match `tasks`.
23
+ */
24
+ export async function pLimit(tasks, cap) {
25
+ const results = new Array(tasks.length);
26
+ let next = 0;
27
+ const workerCount = Math.max(1, Math.min(cap, tasks.length));
28
+ const workers = Array.from({ length: workerCount }, async () => {
29
+ while (next < tasks.length) {
30
+ const idx = next++;
31
+ results[idx] = await tasks[idx]();
32
+ }
33
+ });
34
+ await Promise.all(workers);
35
+ return results;
36
+ }