akm-cli 0.0.20 → 0.0.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. package/README.md +8 -5
  2. package/dist/asset-spec.js +96 -9
  3. package/dist/cli.js +195 -55
  4. package/dist/common.js +15 -2
  5. package/dist/config-cli.js +65 -6
  6. package/dist/config.js +206 -22
  7. package/dist/create-provider-registry.js +18 -0
  8. package/dist/db.js +156 -53
  9. package/dist/embedder.js +36 -18
  10. package/dist/errors.js +6 -0
  11. package/dist/file-context.js +18 -19
  12. package/dist/frontmatter.js +19 -3
  13. package/dist/indexer.js +126 -89
  14. package/dist/{stash-registry.js → installed-kits.js} +16 -24
  15. package/dist/kit-include.js +108 -0
  16. package/dist/local-search.js +429 -0
  17. package/dist/lockfile.js +47 -5
  18. package/dist/matchers.js +6 -0
  19. package/dist/metadata.js +22 -16
  20. package/dist/paths.js +4 -0
  21. package/dist/providers/skills-sh.js +3 -2
  22. package/dist/providers/static-index.js +4 -9
  23. package/dist/registry-build-index.js +356 -0
  24. package/dist/registry-factory.js +19 -0
  25. package/dist/registry-install.js +114 -109
  26. package/dist/registry-resolve.js +44 -9
  27. package/dist/registry-search.js +14 -9
  28. package/dist/renderers.js +23 -7
  29. package/dist/ripgrep-install.js +9 -4
  30. package/dist/self-update.js +31 -4
  31. package/dist/stash-add.js +75 -6
  32. package/dist/stash-clone.js +1 -1
  33. package/dist/stash-provider-factory.js +52 -0
  34. package/dist/stash-provider.js +1 -0
  35. package/dist/stash-providers/filesystem.js +42 -0
  36. package/dist/stash-providers/index.js +9 -0
  37. package/dist/stash-providers/openviking.js +337 -0
  38. package/dist/stash-resolve.js +33 -3
  39. package/dist/stash-search.js +70 -402
  40. package/dist/stash-show.js +24 -5
  41. package/dist/stash-source.js +19 -11
  42. package/dist/walker.js +15 -10
  43. package/dist/warn.js +7 -0
  44. package/package.json +1 -1
  45. package/dist/provider-registry.js +0 -8
@@ -5,8 +5,10 @@ import path from "node:path";
5
5
  import { TYPE_DIRS } from "./asset-spec";
6
6
  import { fetchWithRetry, isWithin } from "./common";
7
7
  import { loadConfig, saveConfig } from "./config";
8
+ import { copyIncludedPaths, findNearestIncludeConfig } from "./kit-include";
8
9
  import { getRegistryCacheDir as _getRegistryCacheDir } from "./paths";
9
- import { parseRegistryRef, resolveRegistryArtifact } from "./registry-resolve";
10
+ import { parseRegistryRef, resolveRegistryArtifact, validateGitRef, validateGitUrl } from "./registry-resolve";
11
+ import { warn } from "./warn";
10
12
  const REGISTRY_STASH_DIR_NAMES = new Set(Object.values(TYPE_DIRS));
11
13
  export async function installRegistryRef(ref, options) {
12
14
  const parsed = parseRegistryRef(ref);
@@ -48,13 +50,30 @@ export async function installRegistryRef(ref, options) {
48
50
  }
49
51
  }
50
52
  fs.mkdirSync(cacheDir, { recursive: true });
51
- await downloadArchive(resolved.artifactUrl, archivePath);
52
- verifyArchiveIntegrity(archivePath, resolved.resolvedRevision, resolved.source);
53
- const integrity = await computeFileHash(archivePath);
54
- extractTarGzSecure(archivePath, extractedDir);
55
- const provisionalKitRoot = detectStashRoot(extractedDir);
56
- const installRoot = applyAgentikitIncludeConfig(provisionalKitRoot, cacheDir, extractedDir) ?? provisionalKitRoot;
57
- const stashRoot = detectStashRoot(installRoot);
53
+ let integrity;
54
+ let provisionalKitRoot;
55
+ let installRoot;
56
+ let stashRoot;
57
+ try {
58
+ await downloadArchive(resolved.artifactUrl, archivePath);
59
+ verifyArchiveIntegrity(archivePath, resolved.resolvedRevision, resolved.source);
60
+ integrity = await computeFileHash(archivePath);
61
+ extractTarGzSecure(archivePath, extractedDir);
62
+ provisionalKitRoot = detectStashRoot(extractedDir);
63
+ installRoot = applyAgentikitIncludeConfig(provisionalKitRoot, cacheDir, extractedDir) ?? provisionalKitRoot;
64
+ stashRoot = detectStashRoot(installRoot);
65
+ }
66
+ catch (err) {
67
+ // Clean up the cache directory so stale or partially-extracted artifacts
68
+ // don't cause false cache hits on the next install attempt.
69
+ try {
70
+ fs.rmSync(cacheDir, { recursive: true, force: true });
71
+ }
72
+ catch {
73
+ // Best-effort cleanup; ignore errors
74
+ }
75
+ throw err;
76
+ }
58
77
  return {
59
78
  id: resolved.id,
60
79
  source: resolved.source,
@@ -121,24 +140,44 @@ async function installGitRegistryRef(parsed, options) {
121
140
  }
122
141
  }
123
142
  fs.mkdirSync(cacheDir, { recursive: true });
124
- const cloneArgs = ["clone", "--depth", "1"];
125
- if (parsed.requestedRef) {
126
- cloneArgs.push("--branch", parsed.requestedRef);
143
+ // Validate URL and ref before passing to git to prevent command injection
144
+ validateGitUrl(parsed.url);
145
+ if (parsed.requestedRef)
146
+ validateGitRef(parsed.requestedRef);
147
+ let provisionalKitRoot;
148
+ let installRoot;
149
+ let stashRoot;
150
+ try {
151
+ const cloneArgs = ["clone", "--depth", "1"];
152
+ if (parsed.requestedRef) {
153
+ cloneArgs.push("--branch", parsed.requestedRef);
154
+ }
155
+ cloneArgs.push(parsed.url, cloneDir);
156
+ const cloneResult = spawnSync("git", cloneArgs, { encoding: "utf8", timeout: 120_000 });
157
+ if (cloneResult.status !== 0) {
158
+ const err = cloneResult.stderr?.trim() || cloneResult.error?.message || "unknown error";
159
+ throw new Error(`Failed to clone ${parsed.url}: ${err}`);
160
+ }
161
+ // Copy contents to extracted dir without .git
162
+ fs.mkdirSync(extractedDir, { recursive: true });
163
+ copyDirectoryContents(cloneDir, extractedDir);
164
+ // Clean up the clone dir
165
+ fs.rmSync(cloneDir, { recursive: true, force: true });
166
+ provisionalKitRoot = detectStashRoot(extractedDir);
167
+ installRoot = applyAgentikitIncludeConfig(provisionalKitRoot, cacheDir, extractedDir) ?? provisionalKitRoot;
168
+ stashRoot = detectStashRoot(installRoot);
127
169
  }
128
- cloneArgs.push(parsed.url, cloneDir);
129
- const cloneResult = spawnSync("git", cloneArgs, { encoding: "utf8", timeout: 120_000 });
130
- if (cloneResult.status !== 0) {
131
- const err = cloneResult.stderr?.trim() || cloneResult.error?.message || "unknown error";
132
- throw new Error(`Failed to clone ${parsed.url}: ${err}`);
170
+ catch (err) {
171
+ // Clean up the cache directory so stale or partially-cloned artifacts
172
+ // don't cause false cache hits on the next install attempt.
173
+ try {
174
+ fs.rmSync(cacheDir, { recursive: true, force: true });
175
+ }
176
+ catch {
177
+ // Best-effort cleanup; ignore errors
178
+ }
179
+ throw err;
133
180
  }
134
- // Copy contents to extracted dir without .git
135
- fs.mkdirSync(extractedDir, { recursive: true });
136
- copyDirectoryContents(cloneDir, extractedDir);
137
- // Clean up the clone dir
138
- fs.rmSync(cloneDir, { recursive: true, force: true });
139
- const provisionalKitRoot = detectStashRoot(extractedDir);
140
- const installRoot = applyAgentikitIncludeConfig(provisionalKitRoot, cacheDir, extractedDir) ?? provisionalKitRoot;
141
- const stashRoot = detectStashRoot(installRoot);
142
181
  return {
143
182
  id: resolved.id,
144
183
  source: resolved.source,
@@ -204,13 +243,13 @@ function buildInstallCacheDir(cacheRootDir, source, id, version) {
204
243
  return path.join(cacheRootDir, slug || source, versionSlug);
205
244
  }
206
245
  function applyAgentikitIncludeConfig(sourceRoot, cacheDir, searchRoot = sourceRoot) {
207
- const includeConfig = findNearestAgentikitIncludeConfig(sourceRoot, searchRoot);
246
+ const includeConfig = findNearestIncludeConfig(sourceRoot, searchRoot);
208
247
  if (!includeConfig)
209
248
  return undefined;
210
249
  const selectedDir = path.join(cacheDir, "selected");
211
250
  fs.rmSync(selectedDir, { recursive: true, force: true });
212
251
  fs.mkdirSync(selectedDir, { recursive: true });
213
- copyIncludedPaths(includeConfig.baseDir, includeConfig.include, selectedDir);
252
+ copyIncludedPaths(includeConfig.include, includeConfig.baseDir, selectedDir);
214
253
  return selectedDir;
215
254
  }
216
255
  async function downloadArchive(url, destination) {
@@ -260,9 +299,10 @@ export function verifyArchiveIntegrity(archivePath, expected, source) {
260
299
  }
261
300
  return;
262
301
  }
263
- // Unrecognized format — skip verification
302
+ // Unrecognized format — warn and skip verification
303
+ warn("Unrecognized integrity format: %s — verification skipped", expected);
264
304
  }
265
- function extractTarGzSecure(archivePath, destinationDir) {
305
+ export function extractTarGzSecure(archivePath, destinationDir) {
266
306
  const listResult = spawnSync("tar", ["tzf", archivePath], { encoding: "utf8" });
267
307
  if (listResult.status !== 0) {
268
308
  const err = listResult.stderr?.trim() || listResult.error?.message || "unknown error";
@@ -271,15 +311,42 @@ function extractTarGzSecure(archivePath, destinationDir) {
271
311
  validateTarEntries(listResult.stdout);
272
312
  fs.rmSync(destinationDir, { recursive: true, force: true });
273
313
  fs.mkdirSync(destinationDir, { recursive: true });
274
- const extractResult = spawnSync("tar", ["xzf", archivePath, "--strip-components=1", "-C", destinationDir], {
275
- encoding: "utf8",
276
- });
314
+ const extractResult = spawnSync("tar", ["xzf", archivePath, "--no-same-owner", "--strip-components=1", "-C", destinationDir], { encoding: "utf8" });
277
315
  if (extractResult.status !== 0) {
278
316
  const err = extractResult.stderr?.trim() || extractResult.error?.message || "unknown error";
279
317
  throw new Error(`Failed to extract archive ${archivePath}: ${err}`);
280
318
  }
319
+ // Post-extraction scan: verify all extracted files are within destinationDir
320
+ // This mitigates TOCTOU between validateTarEntries (list) and tar extract.
321
+ scanExtractedFiles(destinationDir, destinationDir);
322
+ }
323
+ function scanExtractedFiles(dir, root) {
324
+ let entries;
325
+ try {
326
+ entries = fs.readdirSync(dir, { withFileTypes: true });
327
+ }
328
+ catch {
329
+ return;
330
+ }
331
+ for (const entry of entries) {
332
+ const fullPath = path.join(dir, entry.name);
333
+ // Check for ".." segments in names (e.g. symlink tricks or crafted filenames)
334
+ if (entry.name.includes("..")) {
335
+ throw new Error(`Post-extraction scan: suspicious entry name: ${fullPath}`);
336
+ }
337
+ // Resolve symlinks to detect escapes outside the destination directory
338
+ if (entry.isSymbolicLink()) {
339
+ const target = fs.realpathSync(fullPath);
340
+ if (!isWithin(target, root)) {
341
+ throw new Error(`Post-extraction scan: symlink escapes destination directory: ${fullPath} -> ${target}`);
342
+ }
343
+ }
344
+ if (entry.isDirectory()) {
345
+ scanExtractedFiles(fullPath, root);
346
+ }
347
+ }
281
348
  }
282
- function validateTarEntries(listOutput) {
349
+ export function validateTarEntries(listOutput) {
283
350
  const lines = listOutput.split(/\r?\n/).filter(Boolean);
284
351
  for (const rawLine of lines) {
285
352
  const entry = rawLine.trim();
@@ -313,83 +380,6 @@ function isDirectory(target) {
313
380
  return false;
314
381
  }
315
382
  }
316
- function readAgentikitIncludeConfigAtDir(dirPath) {
317
- const packageJsonPath = path.join(dirPath, "package.json");
318
- if (!fs.existsSync(packageJsonPath))
319
- return undefined;
320
- let pkg;
321
- try {
322
- pkg = JSON.parse(fs.readFileSync(packageJsonPath, "utf8"));
323
- }
324
- catch {
325
- return undefined;
326
- }
327
- if (typeof pkg !== "object" || pkg === null || Array.isArray(pkg))
328
- return undefined;
329
- const akmConfig = pkg.akm;
330
- if (typeof akmConfig !== "object" || akmConfig === null || Array.isArray(akmConfig))
331
- return undefined;
332
- const include = akmConfig.include;
333
- if (!Array.isArray(include))
334
- return undefined;
335
- const parsedInclude = include
336
- .filter((value) => typeof value === "string")
337
- .map((value) => value.trim())
338
- .filter(Boolean);
339
- return parsedInclude.length > 0 ? { baseDir: dirPath, include: parsedInclude } : undefined;
340
- }
341
- function findNearestAgentikitIncludeConfig(startDir, stopDir) {
342
- let current = path.resolve(startDir);
343
- const boundary = path.resolve(stopDir);
344
- while (isWithin(current, boundary)) {
345
- const config = readAgentikitIncludeConfigAtDir(current);
346
- if (config)
347
- return config;
348
- if (current === boundary)
349
- break;
350
- const parent = path.dirname(current);
351
- if (parent === current)
352
- break;
353
- current = parent;
354
- }
355
- return undefined;
356
- }
357
- function copyIncludedPaths(baseDir, include, destinationDir) {
358
- for (const entry of include) {
359
- const resolvedSource = path.resolve(baseDir, entry);
360
- if (!isWithin(resolvedSource, baseDir)) {
361
- throw new Error(`Path in akm.include escapes the package root: ${entry}`);
362
- }
363
- if (!fs.existsSync(resolvedSource)) {
364
- throw new Error(`Path in akm.include does not exist: ${entry}`);
365
- }
366
- if (path.basename(resolvedSource) === ".git") {
367
- continue;
368
- }
369
- const relativePath = path.relative(baseDir, resolvedSource);
370
- if (!relativePath || relativePath === ".") {
371
- copyDirectoryContents(baseDir, destinationDir);
372
- continue;
373
- }
374
- copyPath(resolvedSource, path.join(destinationDir, relativePath));
375
- }
376
- }
377
- function copyDirectoryContents(sourceDir, destinationDir) {
378
- for (const entry of fs.readdirSync(sourceDir, { withFileTypes: true })) {
379
- if (entry.name === ".git")
380
- continue;
381
- copyPath(path.join(sourceDir, entry.name), path.join(destinationDir, entry.name));
382
- }
383
- }
384
- function copyPath(sourcePath, destinationPath) {
385
- const stat = fs.statSync(sourcePath);
386
- fs.mkdirSync(path.dirname(destinationPath), { recursive: true });
387
- if (stat.isDirectory()) {
388
- fs.cpSync(sourcePath, destinationPath, { recursive: true, force: true });
389
- return;
390
- }
391
- fs.copyFileSync(sourcePath, destinationPath);
392
- }
393
383
  function hasStashDirs(dirPath) {
394
384
  if (!isDirectory(dirPath))
395
385
  return false;
@@ -452,6 +442,21 @@ function normalizeInstalledEntry(entry) {
452
442
  cacheDir: path.resolve(entry.cacheDir),
453
443
  };
454
444
  }
445
+ function copyDirectoryContents(sourceDir, destinationDir) {
446
+ for (const entry of fs.readdirSync(sourceDir, { withFileTypes: true })) {
447
+ if (entry.name === ".git")
448
+ continue;
449
+ const src = path.join(sourceDir, entry.name);
450
+ const dest = path.join(destinationDir, entry.name);
451
+ fs.mkdirSync(path.dirname(dest), { recursive: true });
452
+ if (entry.isDirectory()) {
453
+ fs.cpSync(src, dest, { recursive: true, force: true });
454
+ }
455
+ else {
456
+ fs.copyFileSync(src, dest);
457
+ }
458
+ }
459
+ }
455
460
  async function computeFileHash(filePath) {
456
461
  const data = fs.readFileSync(filePath);
457
462
  const hash = createHash("sha256").update(data).digest("hex");
@@ -2,9 +2,37 @@ import { spawnSync } from "node:child_process";
2
2
  import fs from "node:fs";
3
3
  import os from "node:os";
4
4
  import path from "node:path";
5
- import { pathToFileURL } from "node:url";
5
+ import { fileURLToPath, pathToFileURL } from "node:url";
6
6
  import { fetchWithRetry } from "./common";
7
+ import { UsageError } from "./errors";
7
8
  import { asRecord, asString, GITHUB_API_BASE, githubHeaders } from "./github";
9
+ /**
10
+ * Validate that a URL is safe to pass to git.
11
+ * Allowlists https:, http:, ssh:, git: schemes and git@ SSH shorthand.
12
+ * Rejects git protocol helpers (ext::, fd::) that can execute arbitrary commands.
13
+ */
14
+ export function validateGitUrl(url) {
15
+ // git@ SSH shorthand: git@host:path
16
+ if (/^git@[^:]+:.+$/.test(url))
17
+ return;
18
+ let parsed;
19
+ try {
20
+ parsed = new URL(url);
21
+ }
22
+ catch {
23
+ throw new UsageError(`Invalid git URL: ${url}`);
24
+ }
25
+ const allowed = ["https:", "http:", "ssh:", "git:"];
26
+ if (!allowed.includes(parsed.protocol)) {
27
+ throw new UsageError(`Unsafe git URL scheme "${parsed.protocol}" in "${url}". Allowed: https, http, ssh, git, git@host:path`);
28
+ }
29
+ }
30
+ /** Validate that a git ref (branch/tag/commit) contains only safe characters. */
31
+ export function validateGitRef(ref) {
32
+ if (!/^[a-zA-Z0-9._\-/]+$/.test(ref)) {
33
+ throw new UsageError(`Unsafe git ref "${ref}": only alphanumerics, '.', '_', '-', '/' are allowed`);
34
+ }
35
+ }
8
36
  export function parseRegistryRef(rawRef) {
9
37
  const ref = rawRef.trim();
10
38
  if (!ref)
@@ -285,7 +313,10 @@ async function resolveGithubArtifact(parsed) {
285
313
  };
286
314
  }
287
315
  async function resolveGitArtifact(parsed) {
316
+ validateGitUrl(parsed.url);
288
317
  const ref = parsed.requestedRef ?? "HEAD";
318
+ if (parsed.requestedRef)
319
+ validateGitRef(parsed.requestedRef);
289
320
  const result = spawnSync("git", ["ls-remote", parsed.url, ref], { encoding: "utf8", timeout: 30_000 });
290
321
  let resolvedRevision;
291
322
  if (result.status === 0) {
@@ -370,19 +401,23 @@ function stripGitTransport(ref) {
370
401
  }
371
402
  /**
372
403
  * Convert a `file:` URI to a local filesystem path.
373
- * Supports `file:./relative`, `file:../relative`, and `file:///absolute`.
404
+ *
405
+ * Standard `file:///absolute` forms are handled by Node's `fileURLToPath`.
406
+ * Non-standard `file:./relative` and `file:../relative` shorthand forms
407
+ * (not a valid RFC 8089 URL) are handled with a custom fallback.
374
408
  */
375
409
  function fileUriToPath(ref) {
376
410
  const after = ref.slice(5); // strip "file:"
377
- // file:///absolute/path or file:///C:/path
378
- if (after.startsWith("///")) {
379
- return after.slice(2); // keep one leading /
380
- }
381
- // file://hostname/path (rare, treat hostname/path as absolute)
411
+ // Standard file:///absolute/path delegate to Node's implementation
382
412
  if (after.startsWith("//")) {
383
- return after.slice(1);
413
+ try {
414
+ return fileURLToPath(ref);
415
+ }
416
+ catch {
417
+ // Fall through to custom handling
418
+ }
384
419
  }
385
- // file:./relative or file:../relative or file:/absolute
420
+ // Non-standard file:./relative or file:../relative or file:/absolute
386
421
  return after;
387
422
  }
388
423
  /**
@@ -1,5 +1,6 @@
1
+ import { toErrorMessage } from "./common";
1
2
  import { DEFAULT_CONFIG, loadConfig } from "./config";
2
- import { resolveProviderFactory } from "./provider-registry";
3
+ import { resolveProviderFactory } from "./registry-factory";
3
4
  // ── Eagerly import providers to trigger self-registration ───────────────────
4
5
  import "./providers/static-index";
5
6
  import "./providers/skills-sh";
@@ -63,11 +64,18 @@ export function resolveRegistries(configRegistries) {
63
64
  // Allow env var override (comma-separated URLs) — CI escape hatch
64
65
  const envUrls = process.env.AKM_REGISTRY_URL?.trim();
65
66
  if (envUrls) {
66
- return envUrls
67
- .split(",")
68
- .map((u) => u.trim())
69
- .filter(Boolean)
70
- .map((url) => ({ url }));
67
+ const entries = [];
68
+ for (const raw of envUrls.split(",")) {
69
+ const url = raw.trim();
70
+ if (!url)
71
+ continue;
72
+ if (!url.startsWith("http://") && !url.startsWith("https://")) {
73
+ console.warn(`[agentikit] Ignoring AKM_REGISTRY_URL entry: must start with http:// or https://, got "${url}"`);
74
+ continue;
75
+ }
76
+ entries.push({ url });
77
+ }
78
+ return entries;
71
79
  }
72
80
  const registries = configRegistries ?? loadConfig().registries ?? DEFAULT_CONFIG.registries ?? [];
73
81
  return registries.filter((r) => r.enabled !== false);
@@ -89,6 +97,3 @@ function clampLimit(limit) {
89
97
  return 20;
90
98
  return Math.min(100, Math.max(1, Math.trunc(limit)));
91
99
  }
92
- function toErrorMessage(error) {
93
- return error instanceof Error ? error.message : String(error);
94
- }
package/dist/renderers.js CHANGED
@@ -54,7 +54,7 @@ export function extractCommentTags(filePath) {
54
54
  catch {
55
55
  return {};
56
56
  }
57
- const lines = content.split(/\r?\n/).slice(0, 50);
57
+ const lines = content.split(/\r?\n/, 50);
58
58
  const hints = {};
59
59
  for (const line of lines) {
60
60
  const trimmed = line.trim();
@@ -89,10 +89,11 @@ export function extractCommentTags(filePath) {
89
89
  export function detectExecHints(filePath) {
90
90
  const ext = path.extname(filePath).toLowerCase();
91
91
  const hints = {};
92
- // Interpreter from extension
92
+ // Interpreter from extension — use basename so the run command is portable
93
+ // relative to the stash root (callers set cwd to the file's directory).
93
94
  const interpreter = INTERPRETER_MAP[ext];
94
95
  if (interpreter) {
95
- hints.run = `${interpreter} ${filePath}`;
96
+ hints.run = `${interpreter} ${path.basename(filePath)}`;
96
97
  }
97
98
  // Setup from nearby dependency files
98
99
  const dir = path.dirname(filePath);
@@ -206,7 +207,7 @@ const commandMdRenderer = {
206
207
  action: "Fill $ARGUMENTS placeholders in the template, then dispatch",
207
208
  description: toStringOrUndefined(parsedMd.data.description),
208
209
  template,
209
- modelHint: parsedMd.data.model,
210
+ modelHint: typeof parsedMd.data.model === "string" ? parsedMd.data.model : undefined,
210
211
  agent: toStringOrUndefined(parsedMd.data.agent),
211
212
  parameters: extractParameters(template),
212
213
  };
@@ -226,7 +227,7 @@ const agentMdRenderer = {
226
227
  description: toStringOrUndefined(parsedMd.data.description),
227
228
  prompt: parsedMd.content,
228
229
  toolPolicy: parsedMd.data.tools,
229
- modelHint: parsedMd.data.model,
230
+ modelHint: typeof parsedMd.data.model === "string" ? parsedMd.data.model : undefined,
230
231
  };
231
232
  },
232
233
  };
@@ -308,7 +309,21 @@ const knowledgeMdRenderer = {
308
309
  }
309
310
  },
310
311
  };
311
- // ── 5. script-source ─────────────────────────────────────────────────────────
312
+ // ── 5. memory-md ─────────────────────────────────────────────────────────────
313
+ const memoryMdRenderer = {
314
+ name: "memory-md",
315
+ buildShowResponse(ctx) {
316
+ const name = deriveName(ctx);
317
+ return {
318
+ type: "memory",
319
+ name,
320
+ path: ctx.absPath,
321
+ action: "Recall context — read the content below",
322
+ content: ctx.content(),
323
+ };
324
+ },
325
+ };
326
+ // ── 6. script-source ─────────────────────────────────────────────────────────
312
327
  const scriptSourceRenderer = {
313
328
  name: "script-source",
314
329
  buildShowResponse(ctx) {
@@ -371,6 +386,7 @@ const builtinRenderers = [
371
386
  commandMdRenderer,
372
387
  agentMdRenderer,
373
388
  knowledgeMdRenderer,
389
+ memoryMdRenderer,
374
390
  scriptSourceRenderer,
375
391
  ];
376
392
  /**
@@ -383,4 +399,4 @@ export function registerBuiltinRenderers() {
383
399
  }
384
400
  }
385
401
  // ── Named exports for testing ────────────────────────────────────────────────
386
- export { skillMdRenderer, commandMdRenderer, agentMdRenderer, knowledgeMdRenderer, scriptSourceRenderer, INTERPRETER_MAP, SETUP_SIGNALS, };
402
+ export { skillMdRenderer, commandMdRenderer, agentMdRenderer, knowledgeMdRenderer, memoryMdRenderer, scriptSourceRenderer, INTERPRETER_MAP, SETUP_SIGNALS, };
@@ -118,9 +118,13 @@ function downloadAndExtractZip(url, archiveName, destBinary) {
118
118
  if (dlResult.status !== 0) {
119
119
  throw new Error(dlResult.stderr?.trim() || "download failed");
120
120
  }
121
- // Extract the zip archive using separate spawnSync calls with argument arrays
122
- // to avoid shell injection via path interpolation in PowerShell -Command strings
123
- const expandResult = spawnSync("powershell", ["-Command", "Expand-Archive", "-Path", tmpZip, "-DestinationPath", destDir, "-Force"], {
121
+ // Extract the zip archive. Use a single-string -Command with quoted paths to
122
+ // prevent PowerShell from treating subsequent array elements as separate
123
+ // arguments to the interpreter itself (PowerShell -Command arg1 arg2 ... would
124
+ // concatenate them with spaces, causing unexpected evaluation on paths with
125
+ // backticks or semicolons).
126
+ const expandCmd = `Expand-Archive -Path '${tmpZip.replace(/'/g, "''")}' -DestinationPath '${destDir.replace(/'/g, "''")}' -Force`;
127
+ const expandResult = spawnSync("powershell", ["-NonInteractive", "-NoProfile", "-Command", expandCmd], {
124
128
  encoding: "utf8",
125
129
  timeout: 60_000,
126
130
  env: process.env,
@@ -129,7 +133,8 @@ function downloadAndExtractZip(url, archiveName, destBinary) {
129
133
  throw new Error(expandResult.stderr?.trim() || "extraction failed");
130
134
  }
131
135
  const srcRgExe = path.join(destDir, archiveName, "rg.exe");
132
- const moveResult = spawnSync("powershell", ["-Command", "Move-Item", "-Force", "-Path", srcRgExe, "-Destination", destBinary], {
136
+ const moveCmd = `Move-Item -Force -Path '${srcRgExe.replace(/'/g, "''")}' -Destination '${destBinary.replace(/'/g, "''")}'`;
137
+ const moveResult = spawnSync("powershell", ["-NonInteractive", "-NoProfile", "-Command", moveCmd], {
133
138
  encoding: "utf8",
134
139
  timeout: 60_000,
135
140
  env: process.env,
@@ -91,11 +91,21 @@ export async function performUpgrade(check, opts) {
91
91
  throw new Error(`Failed to download binary: ${binaryResponse.status} ${binaryResponse.statusText}`);
92
92
  }
93
93
  const binaryData = new Uint8Array(await binaryResponse.arrayBuffer());
94
- // Download and verify checksum
94
+ // Download and verify checksum (mandatory — upgrade is blocked if checksums cannot be fetched)
95
95
  let checksumVerified = false;
96
+ const skipChecksum = opts?.skipChecksum === true;
96
97
  try {
97
98
  const checksumsResponse = await fetchWithRetry(checksumsUrl);
98
- if (checksumsResponse.ok) {
99
+ if (!checksumsResponse.ok) {
100
+ if (skipChecksum) {
101
+ console.warn(`WARNING: checksums.txt fetch failed (HTTP ${checksumsResponse.status}). Proceeding without verification because --skip-checksum was provided.`);
102
+ }
103
+ else {
104
+ throw new Error(`Checksum verification failed: could not fetch ${checksumsUrl} (HTTP ${checksumsResponse.status}). ` +
105
+ `Use --skip-checksum to bypass (not recommended).`);
106
+ }
107
+ }
108
+ else {
99
109
  const checksumsText = await checksumsResponse.text();
100
110
  const expectedHash = parseChecksumForFile(checksumsText, binaryName);
101
111
  if (expectedHash) {
@@ -105,13 +115,30 @@ export async function performUpgrade(check, opts) {
105
115
  }
106
116
  checksumVerified = true;
107
117
  }
118
+ else {
119
+ if (skipChecksum) {
120
+ console.warn(`WARNING: ${binaryName} not found in checksums.txt. Proceeding without verification because --skip-checksum was provided.`);
121
+ }
122
+ else {
123
+ throw new Error(`Checksum verification failed: ${binaryName} not listed in checksums.txt. ` +
124
+ `Use --skip-checksum to bypass (not recommended).`);
125
+ }
126
+ }
108
127
  }
109
128
  }
110
129
  catch (err) {
111
- if (err instanceof Error && err.message.includes("Checksum mismatch")) {
130
+ if (err instanceof Error &&
131
+ (err.message.includes("Checksum mismatch") || err.message.includes("Checksum verification failed"))) {
112
132
  throw err;
113
133
  }
114
- // Non-fatal: checksum file missing or unparseable
134
+ // Network or parse failure
135
+ if (skipChecksum) {
136
+ console.warn(`WARNING: Could not fetch or parse checksums: ${err instanceof Error ? err.message : String(err)}. Proceeding because --skip-checksum was provided.`);
137
+ }
138
+ else {
139
+ throw new Error(`Checksum verification failed: ${err instanceof Error ? err.message : String(err)}. ` +
140
+ `Use --skip-checksum to bypass (not recommended).`);
141
+ }
115
142
  }
116
143
  const execPath = process.execPath;
117
144
  const execDir = path.dirname(execPath);