akm-cli 0.1.3 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,11 +1,13 @@
1
1
  import { loadConfig } from "./config";
2
- import { ACTION_BUILDERS, buildLocalAction, rendererForType, searchLocal, TYPE_TO_RENDERER } from "./local-search";
2
+ import { closeDatabase, openDatabase } from "./db";
3
+ import { searchLocal } from "./local-search";
3
4
  import { resolveStashProviders } from "./stash-provider-factory";
4
5
  // Eagerly import stash providers to trigger self-registration
5
6
  import "./stash-providers/index";
6
7
  import { UsageError } from "./errors";
7
8
  import { searchRegistry } from "./registry-search";
8
9
  import { resolveStashSources } from "./search-source";
10
+ import { insertUsageEvent } from "./usage-events";
9
11
  const DEFAULT_LIMIT = 20;
10
12
  export async function akmSearch(input) {
11
13
  const t0 = Date.now();
@@ -19,7 +21,7 @@ export async function akmSearch(input) {
19
21
  if (sources.length === 0) {
20
22
  // stashDir: "" is a safe sentinel here — the response carries zero hits
21
23
  // and a warning, so no downstream code will try to use the empty path.
22
- return {
24
+ const response = {
23
25
  schemaVersion: 1,
24
26
  stashDir: "",
25
27
  source,
@@ -27,12 +29,16 @@ export async function akmSearch(input) {
27
29
  warnings: ["No stashes configured. Run `akm init` to create your working stash."],
28
30
  timing: { totalMs: Date.now() - t0 },
29
31
  };
32
+ logSearchEvent(query, response);
33
+ return response;
30
34
  }
31
35
  // Primary stash directory — used for DB path lookups and as the default
32
36
  // stash root. Safe because the empty-sources case is handled above.
33
37
  const stashDir = sources[0].path;
34
- // Resolve additional stash providers (e.g. OpenViking) from config
35
- const additionalStashProviders = resolveStashProviders(config);
38
+ // Resolve additional stash providers (e.g. OpenViking) from config.
39
+ // Exclude filesystem (handled by resolveStashSources) and context-hub/github
40
+ // (content now indexed through the unified FTS5 pipeline).
41
+ const additionalStashProviders = resolveStashProviders(config).filter((p) => p.type !== "filesystem" && p.type !== "context-hub" && p.type !== "git");
36
42
  const localResult = source === "registry"
37
43
  ? undefined
38
44
  : await searchLocal({
@@ -43,8 +49,8 @@ export async function akmSearch(input) {
43
49
  sources,
44
50
  config,
45
51
  });
46
- // Query additional stash providers (e.g. OpenViking)
47
- const additionalStashResults = source === "registry" || additionalStashProviders.length === 0 || !query
52
+ // Pass original case to providers FTS5 requires lowercase but remote providers handle case themselves
53
+ const additionalStashResults = source === "registry" || additionalStashProviders.length === 0
48
54
  ? []
49
55
  : await Promise.all(additionalStashProviders.map(async (provider) => {
50
56
  try {
@@ -65,7 +71,7 @@ export async function akmSearch(input) {
65
71
  const allStashHits = mergeStashHits(localResult?.hits ?? [], additionalHits, limit);
66
72
  const localWarnings = [...(localResult?.warnings ?? []), ...additionalWarnings];
67
73
  const hasResults = allStashHits.length > 0;
68
- return {
74
+ const response = {
69
75
  schemaVersion: 1,
70
76
  stashDir,
71
77
  source,
@@ -74,9 +80,14 @@ export async function akmSearch(input) {
74
80
  warnings: localWarnings.length > 0 ? localWarnings : undefined,
75
81
  timing: { totalMs: Date.now() - t0, rankMs: localResult?.rankMs, embedMs: localResult?.embedMs },
76
82
  };
83
+ logSearchEvent(query, response);
84
+ return response;
77
85
  }
78
86
  const registryHits = (registryResult?.hits ?? []).map((hit) => {
79
- const installRef = hit.source === "npm" ? `npm:${hit.ref}` : hit.source === "git" ? `git+${hit.ref}` : `github:${hit.ref}`;
87
+ // Use the provider-supplied installRef when available (already correctly
88
+ // prefixed), otherwise derive it from source + ref for backward compat.
89
+ const installRef = hit.installRef ??
90
+ (hit.source === "npm" ? `npm:${hit.ref}` : hit.source === "git" ? `git+${hit.ref}` : `github:${hit.ref}`);
80
91
  return {
81
92
  type: "registry",
82
93
  name: hit.title,
@@ -89,76 +100,118 @@ export async function akmSearch(input) {
89
100
  };
90
101
  });
91
102
  if (source === "registry") {
92
- const hits = registryHits.slice(0, limit);
93
- const hasResults = hits.length > 0;
94
- return {
103
+ const slicedRegistryHits = registryHits.slice(0, limit);
104
+ const hasResults = slicedRegistryHits.length > 0;
105
+ const response = {
95
106
  schemaVersion: 1,
96
107
  stashDir,
97
108
  source,
98
- hits,
109
+ hits: [],
110
+ registryHits: slicedRegistryHits,
99
111
  tip: hasResults ? undefined : "No matching registry entries were found.",
100
112
  warnings: registryResult?.warnings.length ? registryResult.warnings : undefined,
101
113
  timing: { totalMs: Date.now() - t0 },
102
114
  };
115
+ logSearchEvent(query, response);
116
+ return response;
103
117
  }
104
118
  // source === "both"
105
119
  const allStashHits = mergeStashHits(localResult?.hits ?? [], additionalHits, limit * 2);
106
- const mergedHits = mergeSearchHits(allStashHits, registryHits, limit);
107
120
  const warnings = [...(localResult?.warnings ?? []), ...additionalWarnings, ...(registryResult?.warnings ?? [])];
108
- const hasResults = mergedHits.length > 0;
109
- return {
121
+ const hasResults = allStashHits.length > 0 || registryHits.length > 0;
122
+ const response = {
110
123
  schemaVersion: 1,
111
124
  stashDir,
112
125
  source,
113
- hits: mergedHits,
126
+ hits: allStashHits.slice(0, limit),
127
+ registryHits,
114
128
  tip: hasResults ? undefined : "No matching stash assets or registry entries were found.",
115
129
  warnings: warnings.length ? warnings : undefined,
116
130
  timing: { totalMs: Date.now() - t0 },
117
131
  };
132
+ logSearchEvent(query, response);
133
+ return response;
118
134
  }
119
- // Re-export searchLocal so existing callers (filesystem.ts) still work via this module
120
- export { searchLocal };
121
- // ── Type renderer and action builder registration ────────────────────────────
122
- export function registerTypeRenderer(type, rendererName) {
123
- TYPE_TO_RENDERER[type] = rendererName;
135
+ /**
136
+ * Resolve entry IDs by file_path lookup (exact match, not LIKE).
137
+ */
138
+ function resolveEntryIds(db, hits) {
139
+ const results = [];
140
+ const stmt = db.prepare("SELECT id FROM entries WHERE file_path = ? LIMIT 1");
141
+ for (const hit of hits) {
142
+ try {
143
+ const row = stmt.get(hit.path);
144
+ if (row)
145
+ results.push({ entryId: row.id, ref: hit.ref });
146
+ }
147
+ catch {
148
+ /* skip unresolvable */
149
+ }
150
+ }
151
+ return results;
124
152
  }
125
- export function registerActionBuilder(type, builder) {
126
- ACTION_BUILDERS[type] = builder;
153
+ /**
154
+ * Fire-and-forget: log a search event to the usage_events table.
155
+ * Never blocks the caller; errors are silently ignored.
156
+ */
157
+ function logSearchEvent(query, response, existingDb) {
158
+ try {
159
+ const db = existingDb ?? openDatabase();
160
+ try {
161
+ const stashHits = response.hits.filter((h) => h.type !== "registry").slice(0, 50);
162
+ const resolved = resolveEntryIds(db, stashHits);
163
+ for (const { entryId, ref } of resolved) {
164
+ insertUsageEvent(db, {
165
+ event_type: "search",
166
+ query,
167
+ entry_id: entryId,
168
+ entry_ref: ref,
169
+ });
170
+ }
171
+ insertUsageEvent(db, {
172
+ event_type: "search",
173
+ query,
174
+ metadata: JSON.stringify({ resultCount: response.hits.length, resolvedCount: resolved.length }),
175
+ });
176
+ }
177
+ finally {
178
+ if (!existingDb)
179
+ closeDatabase(db);
180
+ }
181
+ }
182
+ catch {
183
+ /* fire-and-forget */
184
+ }
127
185
  }
128
- // Re-export for consumers that were already importing from stash-search
129
- export { buildLocalAction, rendererForType };
130
186
  // ── Helpers ──────────────────────────────────────────────────────────────────
131
187
  /**
132
- * Merge hits from local stash and additional providers using Reciprocal Rank
133
- * Fusion (RRF). Each list is already internally sorted by relevance. RRF
134
- * assigns scores based on rank position rather than raw score values, so
135
- * sources with incompatible score scales (e.g. RRF ~0.01-0.03 vs 0-1 or
136
- * 0-100) are merged fairly.
188
+ * Merge local and additional stash hits into a single ranked list.
189
+ *
190
+ * Provider hits (e.g. OpenViking) keep their original scores and compete
191
+ * fairly alongside local hits. Duplicates are resolved in favour of the
192
+ * local version.
193
+ *
194
+ * 1. Build set of local hit keys for dedup.
195
+ * 2. Filter provider hits that aren't duplicates.
196
+ * 3. Combine local + non-duplicate provider hits.
197
+ * 4. Sort by score descending.
198
+ * 5. Slice to limit.
137
199
  */
138
200
  export function mergeStashHits(localHits, additionalHits, limit) {
139
201
  if (additionalHits.length === 0)
140
202
  return localHits.slice(0, limit);
141
- const RRF_K = 60;
142
- const scoreMap = new Map();
143
- const applyRankedList = (hits) => {
144
- for (let i = 0; i < hits.length; i++) {
145
- const key = hits[i].path ?? hits[i].ref ?? hits[i].name;
146
- const rrf = 1 / (RRF_K + i + 1);
147
- const existing = scoreMap.get(key);
148
- if (existing) {
149
- existing.score += rrf;
150
- }
151
- else {
152
- scoreMap.set(key, { hit: hits[i], score: rrf });
153
- }
154
- }
155
- };
156
- applyRankedList(localHits);
157
- applyRankedList(additionalHits);
158
- return [...scoreMap.values()]
159
- .sort((a, b) => b.score - a.score)
160
- .slice(0, limit)
161
- .map((v) => ({ ...v.hit, score: Math.round(v.score * 10000) / 10000 }));
203
+ // Track local hits by a dedup key (path > ref > name)
204
+ const localKeys = new Set();
205
+ for (const h of localHits) {
206
+ localKeys.add(h.path ?? h.ref ?? h.name);
207
+ }
208
+ // Keep non-duplicate provider hits with their original scores
209
+ const providerOnly = additionalHits.filter((h) => {
210
+ const key = h.path ?? h.ref ?? h.name;
211
+ return !localKeys.has(key);
212
+ });
213
+ // Combine and sort by score descending
214
+ return [...localHits, ...providerOnly].sort((a, b) => (b.score ?? 0) - (a.score ?? 0)).slice(0, limit);
162
215
  }
163
216
  function normalizeLimit(limit) {
164
217
  if (typeof limit !== "number" || Number.isNaN(limit) || limit <= 0) {
@@ -177,45 +230,8 @@ export function parseSearchSource(source) {
177
230
  throw new UsageError(`Invalid value for --source: ${String(source)}. Expected one of: stash|registry|both`);
178
231
  }
179
232
  /**
180
- * Merge stash hits and registry hits using RRF, same rationale as mergeStashHits.
233
+ * Merge stash hits and registry hits via simple concatenation.
181
234
  */
182
235
  export function mergeSearchHits(localHits, registryHits, limit) {
183
- if (registryHits.length === 0)
184
- return localHits.slice(0, limit);
185
- if (localHits.length === 0)
186
- return registryHits.slice(0, limit);
187
- const RRF_K = 60;
188
- const scoreMap = new Map();
189
- const applyStashList = (hits) => {
190
- for (let i = 0; i < hits.length; i++) {
191
- const key = hits[i].path ?? hits[i].ref ?? hits[i].name;
192
- const rrf = 1 / (RRF_K + i + 1);
193
- const existing = scoreMap.get(key);
194
- if (existing) {
195
- existing.score += rrf;
196
- }
197
- else {
198
- scoreMap.set(key, { hit: hits[i], score: rrf });
199
- }
200
- }
201
- };
202
- const applyRegistryList = (hits) => {
203
- for (let i = 0; i < hits.length; i++) {
204
- const key = `registry:${hits[i].id ?? hits[i].name}`;
205
- const rrf = 1 / (RRF_K + i + 1);
206
- const existing = scoreMap.get(key);
207
- if (existing) {
208
- existing.score += rrf;
209
- }
210
- else {
211
- scoreMap.set(key, { hit: hits[i], score: rrf });
212
- }
213
- }
214
- };
215
- applyStashList(localHits);
216
- applyRegistryList(registryHits);
217
- return [...scoreMap.values()]
218
- .sort((a, b) => b.score - a.score)
219
- .slice(0, limit)
220
- .map((v) => ({ ...v.hit, score: Math.round(v.score * 10000) / 10000 }));
236
+ return [...localHits, ...registryHits].slice(0, limit);
221
237
  }
@@ -1,27 +1,86 @@
1
+ import path from "node:path";
1
2
  import { loadConfig } from "./config";
3
+ import { closeDatabase, openDatabase } from "./db";
2
4
  import { NotFoundError, UsageError } from "./errors";
3
5
  import { buildFileContext, buildRenderContext, getRenderer, runMatchers } from "./file-context";
6
+ import { parseFrontmatter, toStringOrUndefined } from "./frontmatter";
7
+ import { loadStashFile } from "./metadata";
4
8
  import { resolveSourcesForOrigin } from "./origin-resolve";
5
9
  import { buildEditHint, findSourceForPath, isEditable, resolveStashSources } from "./search-source";
6
10
  import { resolveStashProviders } from "./stash-provider-factory";
7
11
  import { parseAssetRef } from "./stash-ref";
8
12
  import { resolveAssetPath } from "./stash-resolve";
13
+ import { insertUsageEvent } from "./usage-events";
9
14
  // Eagerly import stash providers to trigger self-registration
10
15
  import "./stash-providers/index";
11
16
  /**
12
- * Unified show: routes to the first stash provider that can handle the ref.
13
- * viking:// refs are handled by OpenViking provider; everything else by filesystem show.
17
+ * Unified show: tries local FTS5 index first, then remote providers.
18
+ *
19
+ * When `detail` is `"summary"`, the response omits content/template/prompt and
20
+ * returns only compact metadata (name, type, description, tags, parameters).
14
21
  */
15
22
  export async function akmShowUnified(input) {
16
23
  const ref = input.ref.trim();
17
- // Try stash providers first (e.g. OpenViking for viking:// URIs)
24
+ // 1. Try local filesystem first (FTS5 index lookup)
25
+ let localError;
26
+ try {
27
+ const result = await showLocal(input);
28
+ logShowEvent(ref);
29
+ return result;
30
+ }
31
+ catch (err) {
32
+ // Only fall through to remote providers on NotFoundError
33
+ if (!(err instanceof NotFoundError))
34
+ throw err;
35
+ localError = err;
36
+ }
37
+ // 2. Try remote providers (e.g. OpenViking)
18
38
  const config = loadConfig();
19
- const provider = resolveStashProviders(config).find((p) => p.canShow(ref));
20
- if (provider) {
21
- return provider.show(ref, input.view);
39
+ const providers = resolveStashProviders(config).filter((p) => p.type !== "filesystem" && p.canShow(ref));
40
+ for (const provider of providers) {
41
+ try {
42
+ const response = await provider.show(ref, input.view);
43
+ logShowEvent(ref);
44
+ if (input.detail === "summary") {
45
+ return buildSummaryResponse(response);
46
+ }
47
+ return response;
48
+ }
49
+ catch (err) {
50
+ if (!(err instanceof NotFoundError))
51
+ throw err;
52
+ }
53
+ }
54
+ // Nothing found anywhere — rethrow the original local error with its specific message
55
+ throw localError;
56
+ }
57
+ /**
58
+ * Fire-and-forget: log a show event to the usage_events table.
59
+ * Never blocks the caller; errors are silently ignored.
60
+ */
61
+ function logShowEvent(ref, existingDb) {
62
+ try {
63
+ const db = existingDb ?? openDatabase();
64
+ try {
65
+ const parsed = parseAssetRef(ref);
66
+ const safeName = parsed.name.replace(/%/g, "\\%").replace(/_/g, "\\_");
67
+ const row = db
68
+ .prepare("SELECT id FROM entries WHERE entry_key LIKE ? ESCAPE '\\' AND entry_type = ? LIMIT 1")
69
+ .get(`%:${parsed.type}:${safeName}`, parsed.type);
70
+ insertUsageEvent(db, {
71
+ event_type: "show",
72
+ entry_ref: ref,
73
+ entry_id: row?.id,
74
+ });
75
+ }
76
+ finally {
77
+ if (!existingDb)
78
+ closeDatabase(db);
79
+ }
80
+ }
81
+ catch {
82
+ /* fire-and-forget */
22
83
  }
23
- // Default: local filesystem show
24
- return showLocal(input);
25
84
  }
26
85
  /** @internal Use akmShowUnified() for all external callers. */
27
86
  export async function showLocal(input) {
@@ -71,10 +130,67 @@ export async function showLocal(input) {
71
130
  const renderCtx = buildRenderContext(fileCtx, match, allStashDirs);
72
131
  const response = renderer.buildShowResponse(renderCtx);
73
132
  const editable = isEditable(assetPath, config);
74
- return {
133
+ const fullResponse = {
75
134
  ...response,
76
135
  origin: source?.registryId ?? null,
77
136
  editable,
78
137
  ...(!editable ? { editHint: buildEditHint(assetPath, parsed.type, parsed.name, source?.registryId) } : {}),
79
138
  };
139
+ if (input.detail === "summary") {
140
+ return buildSummaryResponse(fullResponse, assetPath);
141
+ }
142
+ return fullResponse;
143
+ }
144
+ /**
145
+ * Build a compact summary response from a full ShowResponse.
146
+ *
147
+ * Strips content/template/prompt and returns only metadata fields:
148
+ * type, name, path, description, tags, parameters, action.
149
+ * Enriches description and tags from frontmatter or .stash.json when available.
150
+ *
151
+ * Enrichment via frontmatter and .stash.json is only performed when `assetPath`
152
+ * is supplied (local assets). Remote provider responses (e.g. OpenViking) rely
153
+ * on the provider having already populated description and tags.
154
+ *
155
+ * The resulting JSON should be under 200 tokens.
156
+ */
157
+ function buildSummaryResponse(full, assetPath) {
158
+ // Try to enrich metadata from .stash.json if description or tags are missing
159
+ let description = full.description;
160
+ let tags = full.tags;
161
+ if (assetPath) {
162
+ // Try frontmatter extraction from content fields
163
+ const textContent = full.content ?? full.template ?? full.prompt;
164
+ if (textContent && !description) {
165
+ const parsed = parseFrontmatter(textContent);
166
+ description = toStringOrUndefined(parsed.data.description);
167
+ }
168
+ // Try .stash.json for richer metadata (tags especially)
169
+ const dir = path.dirname(assetPath);
170
+ const stashFile = loadStashFile(dir);
171
+ if (stashFile) {
172
+ const fileName = path.basename(assetPath);
173
+ const entry = stashFile.entries.find((e) => e.filename === fileName);
174
+ if (entry) {
175
+ if (!description && entry.description) {
176
+ description = entry.description;
177
+ }
178
+ if (!tags && entry.tags) {
179
+ tags = entry.tags;
180
+ }
181
+ }
182
+ }
183
+ }
184
+ const summary = {
185
+ type: full.type,
186
+ name: full.name,
187
+ path: full.path,
188
+ ...(description ? { description } : {}),
189
+ ...(tags && tags.length > 0 ? { tags } : {}),
190
+ ...(full.parameters ? { parameters: full.parameters } : {}),
191
+ ...(full.action ? { action: full.action } : {}),
192
+ ...(full.run ? { run: full.run } : {}),
193
+ ...(full.origin !== undefined ? { origin: full.origin } : {}),
194
+ };
195
+ return summary;
80
196
  }
@@ -0,0 +1,73 @@
1
+ /**
2
+ * Usage event helpers for telemetry and utility-based re-ranking.
3
+ *
4
+ * Schema (created by ensureUsageEventsSchema):
5
+ * id, event_type, query, entry_id (nullable), entry_ref, signal, metadata, created_at
6
+ */
7
+ // ── Schema ──────────────────────────────────────────────────────────────────
8
+ export function ensureUsageEventsSchema(db) {
9
+ db.exec(`
10
+ CREATE TABLE IF NOT EXISTS usage_events (
11
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
12
+ event_type TEXT NOT NULL,
13
+ query TEXT,
14
+ entry_id INTEGER,
15
+ entry_ref TEXT,
16
+ signal TEXT,
17
+ metadata TEXT,
18
+ created_at TEXT NOT NULL DEFAULT (datetime('now'))
19
+ );
20
+ CREATE INDEX IF NOT EXISTS idx_usage_events_entry ON usage_events(entry_id);
21
+ CREATE INDEX IF NOT EXISTS idx_usage_events_type ON usage_events(event_type);
22
+ CREATE INDEX IF NOT EXISTS idx_usage_events_ref ON usage_events(entry_ref);
23
+ `);
24
+ }
25
+ // ── Insert ───────────────────────────────────────────────────────────────────
26
+ /**
27
+ * Insert a usage event into the database. Fire-and-forget: errors are
28
+ * silently caught so callers are never blocked or disrupted.
29
+ */
30
+ export function insertUsageEvent(db, event) {
31
+ try {
32
+ db.prepare(`INSERT INTO usage_events (event_type, query, entry_id, entry_ref, signal, metadata)
33
+ VALUES (?, ?, ?, ?, ?, ?)`).run(event.event_type, event.query ?? null, event.entry_id ?? null, event.entry_ref ?? null, event.signal ?? null, event.metadata ?? null);
34
+ }
35
+ catch {
36
+ /* fire-and-forget: silently ignore errors */
37
+ }
38
+ }
39
+ // ── Query ────────────────────────────────────────────────────────────────────
40
+ /**
41
+ * Retrieve usage events, optionally filtered by event_type and/or entry_ref.
42
+ */
43
+ export function getUsageEvents(db, filters) {
44
+ const conditions = [];
45
+ const params = [];
46
+ if (filters?.event_type) {
47
+ conditions.push("event_type = ?");
48
+ params.push(filters.event_type);
49
+ }
50
+ if (filters?.entry_ref) {
51
+ conditions.push("entry_ref = ?");
52
+ params.push(filters.entry_ref);
53
+ }
54
+ const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
55
+ const sql = `SELECT id, event_type, query, entry_id, entry_ref, signal, metadata, created_at
56
+ FROM usage_events ${where}
57
+ ORDER BY id ASC`;
58
+ return db.prepare(sql).all(...params);
59
+ }
60
+ /**
61
+ * Delete usage events older than the given number of days.
62
+ */
63
+ export function purgeOldUsageEvents(db, retentionDays) {
64
+ if (!Number.isFinite(retentionDays) || retentionDays <= 0)
65
+ return;
66
+ try {
67
+ const cutoff = new Date(Date.now() - retentionDays * 86_400_000).toISOString();
68
+ db.prepare("DELETE FROM usage_events WHERE created_at < ?").run(cutoff);
69
+ }
70
+ catch {
71
+ /* Table may not exist yet */
72
+ }
73
+ }
@@ -0,0 +1,20 @@
1
+ import fs from "node:fs";
2
+ import path from "node:path";
3
+ // Version: prefer compile-time define, then package.json, then fallback
4
+ export const pkgVersion = (() => {
5
+ // Injected at compile time via `bun build --define`
6
+ if (typeof AKM_VERSION !== "undefined")
7
+ return AKM_VERSION;
8
+ try {
9
+ const pkgPath = path.resolve(import.meta.dir ?? __dirname, "../package.json");
10
+ if (fs.existsSync(pkgPath)) {
11
+ const pkg = JSON.parse(fs.readFileSync(pkgPath, "utf-8"));
12
+ if (typeof pkg.version === "string")
13
+ return pkg.version;
14
+ }
15
+ }
16
+ catch {
17
+ // swallow — running as compiled binary without package.json
18
+ }
19
+ return "0.0.0-dev";
20
+ })();
package/dist/walker.js CHANGED
@@ -9,6 +9,7 @@ import fs from "node:fs";
9
9
  import path from "node:path";
10
10
  import { isRelevantAssetFile } from "./asset-spec";
11
11
  import { buildFileContext } from "./file-context";
12
+ const SKIP_DIRS = new Set([".git", "node_modules", "bin", ".cache"]);
12
13
  /**
13
14
  * Walk a type root directory and return files grouped by their parent directory.
14
15
  *
@@ -82,7 +83,6 @@ function walkStashGit(stashRoot) {
82
83
  // result.success is false if the process exited non-zero OR git was not found
83
84
  if (!result.success)
84
85
  return null;
85
- const SKIP_DIRS = new Set([".git", "node_modules", "bin", ".cache"]);
86
86
  const SKIP_FILES = new Set([".stash.json", ".gitignore", ".gitattributes"]);
87
87
  const stdout = Buffer.isBuffer(result.stdout) ? result.stdout.toString("utf8") : String(result.stdout ?? "");
88
88
  const files = stdout
@@ -139,7 +139,6 @@ function isInsideGitRepo(dir) {
139
139
  /** Manual walk for non-git directories. */
140
140
  function walkStashManual(stashRoot) {
141
141
  const results = [];
142
- const SKIP_DIRS = new Set([".git", "node_modules", "bin", ".cache"]);
143
142
  const stack = [stashRoot];
144
143
  while (stack.length > 0) {
145
144
  const current = stack.pop();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "akm-cli",
3
- "version": "0.1.3",
3
+ "version": "0.2.1",
4
4
  "type": "module",
5
5
  "description": "CLI tool to search, open, and run extension assets from an akm stash directory.",
6
6
  "keywords": [
@@ -51,7 +51,6 @@
51
51
  "typescript": "^5.9.3"
52
52
  },
53
53
  "optionalDependencies": {
54
- "@xenova/transformers": "^2.17.0",
55
54
  "sqlite-vec": "0.1.7-alpha.2"
56
55
  },
57
56
  "engines": {
@@ -59,6 +58,8 @@
59
58
  },
60
59
  "dependencies": {
61
60
  "@clack/prompts": "^1.1.0",
62
- "citty": "^0.2.1"
61
+ "@huggingface/transformers": "^3.8.1",
62
+ "citty": "^0.2.1",
63
+ "yaml": "^2.8.2"
63
64
  }
64
65
  }