claude-mem-lite 2.26.1 → 2.28.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +1 -1
- package/.claude-plugin/plugin.json +1 -1
- package/README.md +22 -5
- package/README.zh-CN.md +47 -63
- package/cli.mjs +14 -2
- package/commands/mem.md +1 -1
- package/commands/memory.md +1 -1
- package/commands/recall.md +1 -1
- package/commands/recent.md +1 -1
- package/commands/search.md +1 -1
- package/commands/timeline.md +1 -1
- package/commands/tools.md +9 -20
- package/commands/update.md +1 -1
- package/hook-episode.mjs +1 -1
- package/hook-llm.mjs +31 -4
- package/hook-memory.mjs +9 -3
- package/hook-semaphore.mjs +5 -4
- package/hook-update.mjs +1 -1
- package/hook.mjs +20 -2
- package/hooks/hooks.json +10 -0
- package/install.mjs +30 -13
- package/mem-cli.mjs +171 -30
- package/package.json +5 -1
- package/registry-enricher.mjs +101 -0
- package/registry-github.mjs +54 -0
- package/registry-importer.mjs +358 -0
- package/registry.mjs +39 -2
- package/schema.mjs +5 -5
- package/scoring-sql.mjs +3 -3
- package/scripts/post-tool-use.sh +1 -1
- package/scripts/pre-skill-bridge.js +83 -0
- package/scripts/prompt-search-utils.mjs +47 -2
- package/scripts/user-prompt-search.js +110 -6
- package/server.mjs +207 -31
- package/tfidf.mjs +2 -2
- package/tool-schemas.mjs +9 -2
- package/utils.mjs +22 -3
|
@@ -0,0 +1,358 @@
|
|
|
1
|
+
// claude-mem-lite: Registry importer — tree discovery, frontmatter parsing, keyword extraction, GitHub import pipeline
|
|
2
|
+
// GitHub API helpers (parseGitHubUrl, buildTreeUrl, buildContentUrl, buildHeaders)
|
|
3
|
+
// are in registry-github.mjs.
|
|
4
|
+
|
|
5
|
+
import { parseGitHubUrl, buildTreeUrl, buildContentUrl, buildRepoUrl, buildHeaders } from './registry-github.mjs';
|
|
6
|
+
import { upsertResource } from './registry.mjs';
|
|
7
|
+
import { debugLog, isPathConfined } from './utils.mjs';
|
|
8
|
+
import { createHash } from 'crypto';
|
|
9
|
+
import { mkdirSync, writeFileSync } from 'fs';
|
|
10
|
+
import { join } from 'path';
|
|
11
|
+
import { homedir } from 'os';
|
|
12
|
+
|
|
13
|
+
const MANAGED_DIR = join(homedir(), '.claude-mem-lite', 'managed');
|
|
14
|
+
|
|
15
|
+
// ─── Tree Discovery ─────────────────────────────────────────────────────────
|
|
16
|
+
|
|
17
|
+
// Patterns: flat (skills/name/SKILL.md), plugin (plugins/x/skills/y/SKILL.md),
|
|
18
|
+
// agent (agents/name/AGENT.md), root (./SKILL.md)
|
|
19
|
+
const SKILL_RE = /(?:^|\/)(skills\/([^/]+)\/SKILL\.md)$/;
|
|
20
|
+
const AGENT_RE = /(?:^|\/)(agents\/([^/]+)\/AGENT\.md)$/;
|
|
21
|
+
const PLUGIN_SKILL_RE = /^plugins\/([^/]+)\/skills\/([^/]+)\/SKILL\.md$/;
|
|
22
|
+
const ROOT_SKILL_RE = /^SKILL\.md$/;
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Discover skills/agents from a GitHub tree API response.
|
|
26
|
+
* Supports flat (skills/name/SKILL.md), plugin (plugins/x/skills/y/SKILL.md),
|
|
27
|
+
* and root (./SKILL.md) layouts.
|
|
28
|
+
* @param {object} treeData GitHub API tree response { tree: [{ path, type }] }
|
|
29
|
+
* @param {string} pathFilter Only include paths under this prefix (empty = all)
|
|
30
|
+
* @returns {Array<{ name: string, type: 'skill'|'agent', filePath: string }>}
|
|
31
|
+
*/
|
|
32
|
+
export function discoverFromTree(treeData, pathFilter) {
|
|
33
|
+
const results = [];
|
|
34
|
+
if (!treeData?.tree) return results;
|
|
35
|
+
|
|
36
|
+
for (const item of treeData.tree) {
|
|
37
|
+
if (item.type !== 'blob') continue;
|
|
38
|
+
const p = item.path;
|
|
39
|
+
|
|
40
|
+
// Apply path filter
|
|
41
|
+
if (pathFilter && !p.startsWith(pathFilter)) continue;
|
|
42
|
+
|
|
43
|
+
// Plugin-nested skill: plugins/x/skills/y/SKILL.md → name = "x/y"
|
|
44
|
+
const pluginMatch = p.match(PLUGIN_SKILL_RE);
|
|
45
|
+
if (pluginMatch) {
|
|
46
|
+
results.push({ name: `${pluginMatch[1]}/${pluginMatch[2]}`, type: 'skill', filePath: p });
|
|
47
|
+
continue;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
// Flat skill: skills/name/SKILL.md → name = "name"
|
|
51
|
+
const skillMatch = p.match(SKILL_RE);
|
|
52
|
+
if (skillMatch) {
|
|
53
|
+
results.push({ name: skillMatch[2], type: 'skill', filePath: p });
|
|
54
|
+
continue;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Agent: agents/name/AGENT.md → name = "name"
|
|
58
|
+
const agentMatch = p.match(AGENT_RE);
|
|
59
|
+
if (agentMatch) {
|
|
60
|
+
results.push({ name: agentMatch[2], type: 'agent', filePath: p });
|
|
61
|
+
continue;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// Root-level SKILL.md
|
|
65
|
+
if (ROOT_SKILL_RE.test(p)) {
|
|
66
|
+
results.push({ name: 'root', type: 'skill', filePath: p });
|
|
67
|
+
continue;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
// Generic: any-dir/SKILL.md or any-dir/AGENT.md (non-standard layouts)
|
|
71
|
+
const genericSkill = p.match(/^([^/]+)\/SKILL\.md$/);
|
|
72
|
+
if (genericSkill) {
|
|
73
|
+
results.push({ name: genericSkill[1], type: 'skill', filePath: p });
|
|
74
|
+
continue;
|
|
75
|
+
}
|
|
76
|
+
const genericAgent = p.match(/^([^/]+)\/AGENT\.md$/);
|
|
77
|
+
if (genericAgent) {
|
|
78
|
+
results.push({ name: genericAgent[1], type: 'agent', filePath: p });
|
|
79
|
+
continue;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
return results;
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// ─── YAML Frontmatter Parser ────────────────────────────────────────────────
|
|
87
|
+
// Lightweight YAML subset parser for skill/agent frontmatter.
|
|
88
|
+
// Known limitations: does not handle YAML arrays (- item), nested objects,
|
|
89
|
+
// or unquoted values containing colons (e.g. bare URLs). For such fields,
|
|
90
|
+
// wrap the value in quotes in the frontmatter: url: "https://..."
|
|
91
|
+
|
|
92
|
+
/**
|
|
93
|
+
* Parse YAML frontmatter from SKILL.md / AGENT.md content.
|
|
94
|
+
* Handles basic key: value, multiline (|, >), JSON arrays ([...]), quoted strings.
|
|
95
|
+
* @param {string} content Full file content
|
|
96
|
+
* @returns {{ frontmatter: Record<string, any>, body: string }}
|
|
97
|
+
*/
|
|
98
|
+
export function parseFrontmatter(content) {
|
|
99
|
+
const match = content.match(/^---\r?\n([\s\S]*?)\r?\n---/);
|
|
100
|
+
if (!match) return { frontmatter: {}, body: content };
|
|
101
|
+
|
|
102
|
+
const raw = match[1];
|
|
103
|
+
const body = content.slice(match[0].length).trim();
|
|
104
|
+
const fm = {};
|
|
105
|
+
let currentKey = null, currentValue = '', inMultiline = false;
|
|
106
|
+
|
|
107
|
+
for (const line of raw.split('\n')) {
|
|
108
|
+
if (inMultiline && (line.startsWith(' ') || line.startsWith('\t') || line.trim() === '')) {
|
|
109
|
+
currentValue += ' ' + line.trim();
|
|
110
|
+
continue;
|
|
111
|
+
}
|
|
112
|
+
if (inMultiline && currentKey) { fm[currentKey] = currentValue.trim(); inMultiline = false; }
|
|
113
|
+
|
|
114
|
+
const kv = line.match(/^(\w[\w-]*)\s*:\s*(.*)/);
|
|
115
|
+
if (kv) {
|
|
116
|
+
currentKey = kv[1];
|
|
117
|
+
let val = kv[2].trim();
|
|
118
|
+
if (val === '|' || val === '>') { inMultiline = true; currentValue = ''; continue; }
|
|
119
|
+
if (val.startsWith('[') && val.endsWith(']')) {
|
|
120
|
+
try { fm[currentKey] = JSON.parse(val); } catch { fm[currentKey] = val; }
|
|
121
|
+
continue;
|
|
122
|
+
}
|
|
123
|
+
if ((val.startsWith('"') && val.endsWith('"')) || (val.startsWith("'") && val.endsWith("'")))
|
|
124
|
+
val = val.slice(1, -1);
|
|
125
|
+
if (currentKey === 'description' && val) { inMultiline = true; currentValue = val; continue; }
|
|
126
|
+
fm[currentKey] = val;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
if (inMultiline && currentKey) fm[currentKey] = currentValue.trim();
|
|
130
|
+
return { frontmatter: fm, body };
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// ─── Keyword Extraction ─────────────────────────────────────────────────────
|
|
134
|
+
|
|
135
|
+
const STOP_WORDS = new Set([
|
|
136
|
+
'the', 'a', 'an', 'and', 'or', 'but', 'in', 'on', 'at', 'to', 'for', 'of', 'with',
|
|
137
|
+
'by', 'from', 'up', 'about', 'into', 'through', 'during', 'before', 'after',
|
|
138
|
+
'is', 'are', 'was', 'were', 'be', 'been', 'being', 'have', 'has', 'had', 'do',
|
|
139
|
+
'does', 'did', 'will', 'would', 'could', 'should', 'may', 'might', 'shall',
|
|
140
|
+
'not', 'no', 'nor', 'so', 'if', 'then', 'than', 'that', 'this', 'these', 'those',
|
|
141
|
+
'it', 'its', 'as', 'such', 'which', 'who', 'whom', 'what', 'when', 'where', 'how',
|
|
142
|
+
'all', 'each', 'every', 'both', 'few', 'more', 'most', 'other', 'some', 'any',
|
|
143
|
+
'can', 'use', 'using', 'used', 'also', 'just', 'very', 'only', 'own', 'same',
|
|
144
|
+
'make', 'like', 'get', 'set', 'new', 'one', 'two', 'see', 'way', 'well',
|
|
145
|
+
]);
|
|
146
|
+
|
|
147
|
+
const INTENT_MAP = {
|
|
148
|
+
test: [/\btest\b/i, /\btdd\b/i, /\bunit\s*test/i, /\be2e\b/i, /\bspec\b/i, /\bcoverage\b/i],
|
|
149
|
+
debug: [/\bdebug\b/i, /\btroubleshoot\b/i, /\bdiagnose\b/i, /\berror\b/i, /\bbug\b/i],
|
|
150
|
+
deploy: [/\bdeploy\b/i, /\bci[\s/]*cd\b/i, /\bpipeline\b/i, /\brelease\b/i, /\bship\b/i, /\bpublish\b/i],
|
|
151
|
+
review: [/\breview\b/i, /\baudit\b/i, /\blint\b/i, /\binspect\b/i, /code\s*quality/i],
|
|
152
|
+
generate: [/\bcreate\b/i, /\bscaffold\b/i, /\bgenerate\b/i, /\bboilerplate\b/i],
|
|
153
|
+
refactor: [/\brefactor\b/i, /\boptimize\b/i, /\bclean\s*up\b/i, /\bsimplify\b/i],
|
|
154
|
+
document: [/\bdocument\b/i, /\bdocs?\b/i, /\breadme\b/i, /\bjsdoc\b/i],
|
|
155
|
+
plan: [/\bplan\b/i, /\bdesign\b/i, /\barchitect\b/i, /\bblueprint\b/i],
|
|
156
|
+
security: [/\bsecurity\b/i, /\bvulnerab/i, /\bauthenticat/i, /\bencrypt/i],
|
|
157
|
+
performance:[/\bperformance\b/i, /\bprofil/i, /\bbenchmark\b/i, /\blatency\b/i],
|
|
158
|
+
migrate: [/\bmigrat/i, /\bupgrad/i, /\blegacy\b/i],
|
|
159
|
+
};
|
|
160
|
+
|
|
161
|
+
const DOMAIN_PATTERNS = {
|
|
162
|
+
frontend: [/\breact\b/i, /\bvue\b/i, /\bangular\b/i, /\bsvelte\b/i, /\bnext\.?js\b/i, /\bcss\b/i, /\btailwind\b/i, /\bhtml\b/i],
|
|
163
|
+
backend: [/\bexpress\b/i, /\bfastapi\b/i, /\bdjango\b/i, /\bflask\b/i, /\brails\b/i, /\bspring\b/i],
|
|
164
|
+
database: [/\bpostgres/i, /\bmysql\b/i, /\bmongodb\b/i, /\bredis\b/i, /\bsqlite\b/i, /\bsql\b/i],
|
|
165
|
+
infrastructure: [/\bdocker\b/i, /\bkubernetes\b/i, /\bterraform\b/i, /\bansible\b/i, /\bcloud\b/i, /\baws\b/i, /\bgcp\b/i, /\bazure\b/i],
|
|
166
|
+
javascript: [/\bjavascript\b/i, /\btypescript\b/i, /\bnode\b/i, /\bnpm\b/i, /\besm\b/i],
|
|
167
|
+
python: [/\bpython\b/i, /\bpip\b/i, /\bpydantic\b/i, /\bpoetry\b/i],
|
|
168
|
+
testing: [/\bjest\b/i, /\bvitest\b/i, /\bpytest\b/i, /\bcypress\b/i, /\bplaywright\b/i],
|
|
169
|
+
security: [/\boauth\b/i, /\bjwt\b/i, /\bssl\b/i, /\btls\b/i, /\brbac\b/i],
|
|
170
|
+
ml: [/\bmachine\s*learning\b/i, /\bneural\b/i, /\btensor/i, /\bpytorch\b/i, /\bllm\b/i],
|
|
171
|
+
mobile: [/\bios\b/i, /\bandroid\b/i, /react.native/i, /\bflutter\b/i, /\bswift\b/i],
|
|
172
|
+
};
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* Extract keywords, intent tags, and domain tags from content.
|
|
176
|
+
* @param {string} content Full text
|
|
177
|
+
* @returns {{ keywords: string, intentTags: string, domainTags: string }}
|
|
178
|
+
*/
|
|
179
|
+
export function extractKeywords(content) {
|
|
180
|
+
if (!content) return { keywords: '', intentTags: '', domainTags: '' };
|
|
181
|
+
|
|
182
|
+
const text = content.toLowerCase();
|
|
183
|
+
|
|
184
|
+
// ── Keywords: stop-word filtered frequency counting, top 10 ────────────
|
|
185
|
+
const words = text.match(/\b[a-z][a-z0-9]{2,}\b/g) || [];
|
|
186
|
+
const freq = {};
|
|
187
|
+
for (const w of words) {
|
|
188
|
+
if (!STOP_WORDS.has(w)) freq[w] = (freq[w] || 0) + 1;
|
|
189
|
+
}
|
|
190
|
+
const keywords = Object.entries(freq)
|
|
191
|
+
.sort(([, a], [, b]) => b - a)
|
|
192
|
+
.slice(0, 10)
|
|
193
|
+
.map(([w]) => w)
|
|
194
|
+
.join(' ');
|
|
195
|
+
|
|
196
|
+
// ── Intent tags ───────────────────────────────────────────────────────
|
|
197
|
+
const intents = [];
|
|
198
|
+
for (const [intent, patterns] of Object.entries(INTENT_MAP)) {
|
|
199
|
+
if (patterns.some(re => re.test(text))) intents.push(intent);
|
|
200
|
+
}
|
|
201
|
+
const intentTags = intents.join(' ');
|
|
202
|
+
|
|
203
|
+
// ── Domain tags ───────────────────────────────────────────────────────
|
|
204
|
+
const domains = [];
|
|
205
|
+
for (const [domain, patterns] of Object.entries(DOMAIN_PATTERNS)) {
|
|
206
|
+
if (patterns.some(re => re.test(text))) domains.push(domain);
|
|
207
|
+
}
|
|
208
|
+
const domainTags = domains.join(' ');
|
|
209
|
+
|
|
210
|
+
return { keywords, intentTags, domainTags };
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
// ─── GitHub Import Pipeline ─────────────────────────────────────────────────
|
|
214
|
+
|
|
215
|
+
/**
|
|
216
|
+
* Import skills/agents from a GitHub URL into the registry.
|
|
217
|
+
* Stage 1 only — pure code, no LLM.
|
|
218
|
+
* @param {Database} db Registry database
|
|
219
|
+
* @param {string} url GitHub URL
|
|
220
|
+
* @param {object} opts Options
|
|
221
|
+
* @param {Function} opts.fetchFn Override fetch function (for testing)
|
|
222
|
+
* @param {string} opts.managedDir Override managed directory (for testing)
|
|
223
|
+
* @returns {Promise<Array<{ name: string, type: string, id: number }>>}
|
|
224
|
+
*/
|
|
225
|
+
export async function importFromGitHub(db, url, opts = {}) {
|
|
226
|
+
const fetchFn = opts.fetchFn || globalThis.fetch;
|
|
227
|
+
const managedDir = opts.managedDir || MANAGED_DIR;
|
|
228
|
+
const headers = buildHeaders();
|
|
229
|
+
|
|
230
|
+
// 1. Parse GitHub URL
|
|
231
|
+
const parsed = parseGitHubUrl(url);
|
|
232
|
+
if (!parsed) throw new Error('Invalid GitHub URL');
|
|
233
|
+
const { owner, repo, branch, path: pathFilter } = parsed;
|
|
234
|
+
|
|
235
|
+
// 2. Fetch repo metadata (stars, forks, updated_at)
|
|
236
|
+
const repoResp = await fetchFn(buildRepoUrl(owner, repo), { headers });
|
|
237
|
+
if (!repoResp.ok) {
|
|
238
|
+
if (repoResp.status === 404) throw new Error(`Repository not found: ${owner}/${repo}`);
|
|
239
|
+
if (repoResp.status === 403) throw new Error(`GitHub API rate limit exceeded`);
|
|
240
|
+
throw new Error(`GitHub API error: ${repoResp.status}`);
|
|
241
|
+
}
|
|
242
|
+
const repoMeta = await repoResp.json();
|
|
243
|
+
const repoStars = repoMeta.stargazers_count || 0;
|
|
244
|
+
const repoForks = repoMeta.forks_count || 0;
|
|
245
|
+
const repoUpdatedAt = repoMeta.updated_at || null;
|
|
246
|
+
|
|
247
|
+
// 3. Fetch file tree via GitHub API (recursive)
|
|
248
|
+
const treeResp = await fetchFn(buildTreeUrl(owner, repo, branch), { headers });
|
|
249
|
+
if (!treeResp.ok) {
|
|
250
|
+
if (treeResp.status === 404) throw new Error(`Branch not found: ${branch}`);
|
|
251
|
+
if (treeResp.status === 403) throw new Error(`GitHub API rate limit exceeded`);
|
|
252
|
+
throw new Error(`GitHub API error: ${treeResp.status}`);
|
|
253
|
+
}
|
|
254
|
+
const treeData = await treeResp.json();
|
|
255
|
+
|
|
256
|
+
// 4. Discover skills/agents from tree
|
|
257
|
+
const discovered = discoverFromTree(treeData, pathFilter);
|
|
258
|
+
if (discovered.length === 0) return [];
|
|
259
|
+
|
|
260
|
+
const repoUrl = `https://github.com/${owner}/${repo}`;
|
|
261
|
+
const results = [];
|
|
262
|
+
|
|
263
|
+
// 5. Process each discovered item
|
|
264
|
+
for (const item of discovered) {
|
|
265
|
+
try {
|
|
266
|
+
// 5a. Fetch content via raw GitHub URL
|
|
267
|
+
const contentUrl = buildContentUrl(owner, repo, branch, item.filePath);
|
|
268
|
+
const contentResp = await fetchFn(contentUrl, { headers });
|
|
269
|
+
if (!contentResp.ok) {
|
|
270
|
+
debugLog('WARN', 'importer', `Failed to fetch ${item.filePath}: ${contentResp.status}`);
|
|
271
|
+
continue;
|
|
272
|
+
}
|
|
273
|
+
const content = await contentResp.text();
|
|
274
|
+
|
|
275
|
+
// 5b. Parse frontmatter
|
|
276
|
+
const { frontmatter, body } = parseFrontmatter(content);
|
|
277
|
+
|
|
278
|
+
// Root skill naming: use frontmatter name if present, else repo name for root, else discovered name
|
|
279
|
+
const rawName = frontmatter.name || (item.name === 'root' ? repo : item.name);
|
|
280
|
+
const name = rawName.replace(/[^a-zA-Z0-9._-]/g, '_');
|
|
281
|
+
// Path traversal guard: reject names that would escape managed directory
|
|
282
|
+
const typeDir = item.type === 'agent' ? 'agents' : 'skills';
|
|
283
|
+
if (!isPathConfined(join(managedDir, typeDir, name), managedDir)) {
|
|
284
|
+
debugLog('WARN', 'importer', `Rejected path-traversal name: ${rawName}`);
|
|
285
|
+
continue;
|
|
286
|
+
}
|
|
287
|
+
const description = frontmatter.description || '';
|
|
288
|
+
const fullText = `${name} ${description} ${body}`;
|
|
289
|
+
|
|
290
|
+
// 5c. Extract keywords/intents/domains
|
|
291
|
+
const { keywords, intentTags, domainTags } = extractKeywords(fullText);
|
|
292
|
+
|
|
293
|
+
// 5d. SHA-256 hash for dedup
|
|
294
|
+
const fileHash = createHash('sha256').update(content).digest('hex');
|
|
295
|
+
const existing = db.prepare(
|
|
296
|
+
'SELECT file_hash FROM resources WHERE type = ? AND name = ?'
|
|
297
|
+
).get(item.type, name);
|
|
298
|
+
if (existing && existing.file_hash === fileHash) {
|
|
299
|
+
debugLog('DEBUG', 'importer', `Skipping ${name} — unchanged`);
|
|
300
|
+
continue;
|
|
301
|
+
}
|
|
302
|
+
|
|
303
|
+
// 5e. Download to managed directory
|
|
304
|
+
const destDir = join(managedDir, typeDir, name);
|
|
305
|
+
mkdirSync(destDir, { recursive: true });
|
|
306
|
+
const fileName = item.type === 'agent' ? 'AGENT.md' : 'SKILL.md';
|
|
307
|
+
writeFileSync(join(destDir, fileName), content, 'utf8');
|
|
308
|
+
|
|
309
|
+
// 5f. Upsert to registry DB
|
|
310
|
+
const resourceId = upsertResource(db, {
|
|
311
|
+
name,
|
|
312
|
+
type: item.type,
|
|
313
|
+
status: 'active',
|
|
314
|
+
source: 'github',
|
|
315
|
+
repo_url: repoUrl,
|
|
316
|
+
repo_stars: repoStars,
|
|
317
|
+
local_path: join(destDir, fileName),
|
|
318
|
+
file_hash: fileHash,
|
|
319
|
+
invocation_name: frontmatter['invocation-name'] || frontmatter.invocation_name || '',
|
|
320
|
+
intent_tags: intentTags,
|
|
321
|
+
domain_tags: domainTags,
|
|
322
|
+
action_type: frontmatter.action_type || frontmatter['action-type'] || '',
|
|
323
|
+
trigger_patterns: frontmatter.trigger_patterns || frontmatter['trigger-patterns'] || '',
|
|
324
|
+
capability_summary: description,
|
|
325
|
+
input_type: frontmatter.input_type || frontmatter['input-type'] || '',
|
|
326
|
+
output_type: frontmatter.output_type || frontmatter['output-type'] || '',
|
|
327
|
+
prerequisites: frontmatter.prerequisites || '{}',
|
|
328
|
+
keywords,
|
|
329
|
+
tech_stack: frontmatter.tech_stack || frontmatter['tech-stack'] || '',
|
|
330
|
+
use_cases: frontmatter.use_cases || frontmatter['use-cases'] || '',
|
|
331
|
+
complexity: frontmatter.complexity || 'intermediate',
|
|
332
|
+
quality_tier: 'community',
|
|
333
|
+
indexed_at: new Date().toISOString(),
|
|
334
|
+
});
|
|
335
|
+
|
|
336
|
+
// 5g. Update repo_forks and repo_updated_at (not in upsert SQL)
|
|
337
|
+
db.prepare(
|
|
338
|
+
'UPDATE resources SET repo_forks = ?, repo_updated_at = ?, quality_tier = ? WHERE id = ?'
|
|
339
|
+
).run(repoForks, repoUpdatedAt, 'community', resourceId);
|
|
340
|
+
|
|
341
|
+
results.push({ name, type: item.type, id: resourceId });
|
|
342
|
+
debugLog('INFO', 'importer', `Imported ${item.type}:${name} (id=${resourceId})`);
|
|
343
|
+
} catch (err) {
|
|
344
|
+
debugLog('ERROR', 'importer', `Failed to import ${item.name}: ${err.message}`);
|
|
345
|
+
// Skip individual failures, continue with next
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
// 6. Rebuild FTS5 index
|
|
350
|
+
try {
|
|
351
|
+
db.exec("INSERT INTO resources_fts(resources_fts) VALUES('rebuild')");
|
|
352
|
+
} catch (err) {
|
|
353
|
+
debugLog('WARN', 'importer', `FTS rebuild failed: ${err.message}`);
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
// 7. Return imported resources
|
|
357
|
+
return results;
|
|
358
|
+
}
|
package/registry.mjs
CHANGED
|
@@ -15,7 +15,7 @@ const RESOURCES_SCHEMA = `
|
|
|
15
15
|
type TEXT NOT NULL CHECK(type IN ('skill','agent')),
|
|
16
16
|
status TEXT NOT NULL DEFAULT 'active'
|
|
17
17
|
CHECK(status IN ('active','disabled','error','indexing')),
|
|
18
|
-
source TEXT NOT NULL CHECK(source IN ('preinstalled','user')),
|
|
18
|
+
source TEXT NOT NULL CHECK(source IN ('preinstalled','user','github')),
|
|
19
19
|
repo_url TEXT,
|
|
20
20
|
repo_stars INTEGER DEFAULT 0,
|
|
21
21
|
local_path TEXT NOT NULL,
|
|
@@ -47,7 +47,11 @@ const RESOURCES_SCHEMA = `
|
|
|
47
47
|
recommendation_mode TEXT DEFAULT 'proactive',
|
|
48
48
|
indexed_at TEXT,
|
|
49
49
|
created_at TEXT DEFAULT (datetime('now')),
|
|
50
|
-
updated_at TEXT DEFAULT (datetime('now'))
|
|
50
|
+
updated_at TEXT DEFAULT (datetime('now')),
|
|
51
|
+
enrichment_status TEXT DEFAULT NULL,
|
|
52
|
+
enriched_at INTEGER DEFAULT NULL,
|
|
53
|
+
repo_updated_at TEXT DEFAULT NULL,
|
|
54
|
+
repo_forks INTEGER DEFAULT 0
|
|
51
55
|
);
|
|
52
56
|
|
|
53
57
|
CREATE UNIQUE INDEX IF NOT EXISTS idx_res_type_name
|
|
@@ -182,10 +186,43 @@ export function ensureRegistryDb(dbPath) {
|
|
|
182
186
|
if (!resCols.has('quality_tier')) db.exec("ALTER TABLE resources ADD COLUMN quality_tier TEXT DEFAULT 'community'");
|
|
183
187
|
if (!resCols.has('popularity_score')) db.exec("ALTER TABLE resources ADD COLUMN popularity_score REAL DEFAULT 0");
|
|
184
188
|
if (!resCols.has('personal_score')) db.exec("ALTER TABLE resources ADD COLUMN personal_score REAL DEFAULT 0");
|
|
189
|
+
if (!resCols.has('enrichment_status')) db.exec("ALTER TABLE resources ADD COLUMN enrichment_status TEXT DEFAULT NULL");
|
|
190
|
+
if (!resCols.has('enriched_at')) db.exec("ALTER TABLE resources ADD COLUMN enriched_at INTEGER DEFAULT NULL");
|
|
191
|
+
if (!resCols.has('repo_updated_at')) db.exec("ALTER TABLE resources ADD COLUMN repo_updated_at TEXT DEFAULT NULL");
|
|
192
|
+
if (!resCols.has('repo_forks')) db.exec("ALTER TABLE resources ADD COLUMN repo_forks INTEGER DEFAULT 0");
|
|
185
193
|
// Auto-set quality_tier for installed preinstalled resources
|
|
186
194
|
db.exec("UPDATE resources SET quality_tier = 'installed' WHERE source = 'preinstalled' AND quality_tier = 'community'");
|
|
187
195
|
} catch (e) { debugCatch(e, 'resources-column-migration'); }
|
|
188
196
|
|
|
197
|
+
// Migrate: add 'github' to source CHECK constraint (required for smart import)
|
|
198
|
+
// Must disable FK checks during table recreation (RENAME triggers FK validation)
|
|
199
|
+
try {
|
|
200
|
+
const resSchema = db.prepare(`SELECT sql FROM sqlite_master WHERE type='table' AND name='resources'`).get();
|
|
201
|
+
if (resSchema?.sql && !resSchema.sql.includes("'github'")) {
|
|
202
|
+
db.pragma('foreign_keys = OFF');
|
|
203
|
+
try {
|
|
204
|
+
db.transaction(() => {
|
|
205
|
+
const hasOld = db.prepare(`SELECT 1 FROM sqlite_master WHERE type='table' AND name='resources_old'`).get();
|
|
206
|
+
if (hasOld) db.exec(`DROP TABLE resources_old`);
|
|
207
|
+
// Drop FTS triggers first (reference resources table)
|
|
208
|
+
db.exec(`DROP TRIGGER IF EXISTS res_fts_insert`);
|
|
209
|
+
db.exec(`DROP TRIGGER IF EXISTS res_fts_update`);
|
|
210
|
+
db.exec(`DROP TRIGGER IF EXISTS res_fts_delete`);
|
|
211
|
+
db.exec(`ALTER TABLE resources RENAME TO resources_old`);
|
|
212
|
+
db.exec(RESOURCES_SCHEMA);
|
|
213
|
+
// Copy all existing data
|
|
214
|
+
const cols = db.prepare("PRAGMA table_info(resources_old)").all().map(c => c.name);
|
|
215
|
+
const newCols = new Set(db.prepare("PRAGMA table_info(resources)").all().map(c => c.name));
|
|
216
|
+
const common = cols.filter(c => newCols.has(c)).join(', ');
|
|
217
|
+
db.exec(`INSERT INTO resources (${common}) SELECT ${common} FROM resources_old`);
|
|
218
|
+
db.exec(`DROP TABLE resources_old`);
|
|
219
|
+
})();
|
|
220
|
+
} finally {
|
|
221
|
+
db.pragma('foreign_keys = ON');
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
} catch (e) { debugCatch(e, 'resources-source-check-migration'); }
|
|
225
|
+
|
|
189
226
|
// FTS5: create if not exists
|
|
190
227
|
const hasFts = db.prepare(`SELECT 1 FROM sqlite_master WHERE type='table' AND name='resources_fts'`).get();
|
|
191
228
|
if (!hasFts) {
|
package/schema.mjs
CHANGED
|
@@ -13,7 +13,7 @@ export const DB_PATH = join(DB_DIR, 'claude-mem-lite.db');
|
|
|
13
13
|
export const REGISTRY_DB_PATH = join(DB_DIR, 'resource-registry.db');
|
|
14
14
|
|
|
15
15
|
// Increment when schema changes (tables, columns, indexes, FTS, migrations)
|
|
16
|
-
export const CURRENT_SCHEMA_VERSION =
|
|
16
|
+
export const CURRENT_SCHEMA_VERSION = 20;
|
|
17
17
|
|
|
18
18
|
const CORE_SCHEMA = `
|
|
19
19
|
CREATE TABLE IF NOT EXISTS sdk_sessions (
|
|
@@ -173,11 +173,11 @@ export function initSchema(db) {
|
|
|
173
173
|
db.exec(`CREATE INDEX IF NOT EXISTS idx_sessions_project ON sdk_sessions(project)`);
|
|
174
174
|
db.exec(`CREATE INDEX IF NOT EXISTS idx_obs_not_compressed ON observations(created_at_epoch DESC) WHERE COALESCE(compressed_into, 0) = 0`);
|
|
175
175
|
|
|
176
|
-
// FTS5 migration:
|
|
177
|
-
// Detect old FTS5 table missing lesson_learned and recreate with full column set
|
|
176
|
+
// FTS5 migration: recreate observations_fts when columns are missing (one-time)
|
|
177
|
+
// Detect old FTS5 table missing lesson_learned or search_aliases and recreate with full column set
|
|
178
178
|
try {
|
|
179
179
|
const ftsDdl = db.prepare(`SELECT sql FROM sqlite_master WHERE type='table' AND name='observations_fts'`).get();
|
|
180
|
-
if (ftsDdl && !ftsDdl.sql.includes('lesson_learned')) {
|
|
180
|
+
if (ftsDdl && (!ftsDdl.sql.includes('lesson_learned') || !ftsDdl.sql.includes('search_aliases'))) {
|
|
181
181
|
db.exec(`DROP TRIGGER IF EXISTS observations_ai`);
|
|
182
182
|
db.exec(`DROP TRIGGER IF EXISTS observations_ad`);
|
|
183
183
|
db.exec(`DROP TRIGGER IF EXISTS observations_au`);
|
|
@@ -284,7 +284,7 @@ export function initSchema(db) {
|
|
|
284
284
|
// Strategy 2: substring match for aliases (e.g., "claude-mem-lite" → match project containing "mem")
|
|
285
285
|
// Extract the most distinctive token from the short name for fuzzy matching
|
|
286
286
|
if (!canonical) {
|
|
287
|
-
const tokens = shortName.split(/[-_.]/).filter(t => t.length >=
|
|
287
|
+
const tokens = shortName.split(/[-_.]/).filter(t => t.length >= 5);
|
|
288
288
|
for (const token of tokens) {
|
|
289
289
|
canonical = db.prepare(
|
|
290
290
|
`SELECT project FROM observations WHERE project LIKE ? AND project LIKE '%--_%'
|
package/scoring-sql.mjs
CHANGED
|
@@ -18,14 +18,14 @@ export const DEFAULT_DECAY_HALF_LIFE_MS = 14 * 86400000;
|
|
|
18
18
|
// Single source of truth for FTS5 BM25 weight expressions.
|
|
19
19
|
// Column order must match ensureFTS() calls in schema.mjs.
|
|
20
20
|
|
|
21
|
-
/** observations_fts BM25 weights: title=10, subtitle=5, narrative=5, text=3, facts=3, concepts=2, lesson_learned=8 */
|
|
22
|
-
export const OBS_BM25 = 'bm25(observations_fts, 10, 5, 5, 3, 3, 2, 8)';
|
|
21
|
+
/** observations_fts BM25 weights: title=10, subtitle=5, narrative=5, text=3, facts=3, concepts=2, lesson_learned=8, search_aliases=5 */
|
|
22
|
+
export const OBS_BM25 = 'bm25(observations_fts, 10, 5, 5, 3, 3, 2, 8, 5)';
|
|
23
23
|
|
|
24
24
|
/** session_summaries_fts BM25 weights: request=5, investigated=3, learned=3, completed=3, next_steps=2, notes=1, remaining_items=1 */
|
|
25
25
|
export const SESS_BM25 = 'bm25(session_summaries_fts, 5, 3, 3, 3, 2, 1, 1)';
|
|
26
26
|
|
|
27
27
|
/** FTS5 columns for observations (must match BM25 weight order) */
|
|
28
|
-
export const OBS_FTS_COLUMNS = ['title', 'subtitle', 'narrative', 'text', 'facts', 'concepts', 'lesson_learned'];
|
|
28
|
+
export const OBS_FTS_COLUMNS = ['title', 'subtitle', 'narrative', 'text', 'facts', 'concepts', 'lesson_learned', 'search_aliases'];
|
|
29
29
|
|
|
30
30
|
/** SQL CASE for type-differentiated recency decay half-lives (milliseconds) */
|
|
31
31
|
export const TYPE_DECAY_CASE = `(
|
package/scripts/post-tool-use.sh
CHANGED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
// claude-mem-lite: PreToolUse Skill bridge — loads managed skills from registry
|
|
3
|
+
// Intercepts Skill("name") calls for skills in ~/.claude-mem-lite/managed/
|
|
4
|
+
// Lightweight standalone (~30ms): only imports better-sqlite3, fs, path, os
|
|
5
|
+
|
|
6
|
+
import { existsSync, readFileSync } from 'fs';
|
|
7
|
+
import { join, resolve, sep } from 'path';
|
|
8
|
+
import { homedir } from 'os';
|
|
9
|
+
|
|
10
|
+
const REGISTRY_DB_PATH = join(homedir(), '.claude-mem-lite', 'resource-registry.db');
|
|
11
|
+
const MANAGED_BASE = join(homedir(), '.claude-mem-lite');
|
|
12
|
+
const MANAGED_MARKER = '/.claude-mem-lite/managed/';
|
|
13
|
+
|
|
14
|
+
try {
|
|
15
|
+
// Skip if recursive hook
|
|
16
|
+
if (process.env.CLAUDE_MEM_HOOK_RUNNING) process.exit(0);
|
|
17
|
+
|
|
18
|
+
// Read stdin
|
|
19
|
+
let input = '';
|
|
20
|
+
for await (const chunk of process.stdin) input += chunk;
|
|
21
|
+
|
|
22
|
+
// Parse event
|
|
23
|
+
let skillName;
|
|
24
|
+
try {
|
|
25
|
+
const event = JSON.parse(input);
|
|
26
|
+
skillName = event.tool_input?.skill;
|
|
27
|
+
} catch { process.exit(0); }
|
|
28
|
+
|
|
29
|
+
if (!skillName || typeof skillName !== 'string') process.exit(0);
|
|
30
|
+
|
|
31
|
+
// Skip if registry DB doesn't exist
|
|
32
|
+
if (!existsSync(REGISTRY_DB_PATH)) process.exit(0);
|
|
33
|
+
|
|
34
|
+
// Open DB readonly
|
|
35
|
+
const Database = (await import('better-sqlite3')).default;
|
|
36
|
+
let db;
|
|
37
|
+
try {
|
|
38
|
+
db = new Database(REGISTRY_DB_PATH, { readonly: true });
|
|
39
|
+
db.pragma('busy_timeout = 1000');
|
|
40
|
+
} catch { process.exit(0); }
|
|
41
|
+
|
|
42
|
+
try {
|
|
43
|
+
// Query: find by name or invocation_name, ONLY if managed path
|
|
44
|
+
const row = db.prepare(`
|
|
45
|
+
SELECT name, local_path FROM resources
|
|
46
|
+
WHERE status = 'active'
|
|
47
|
+
AND (name = ? OR invocation_name = ?)
|
|
48
|
+
AND local_path LIKE ?
|
|
49
|
+
LIMIT 1
|
|
50
|
+
`).get(skillName, skillName, `%${MANAGED_MARKER}%`);
|
|
51
|
+
|
|
52
|
+
if (!row || !row.local_path) process.exit(0);
|
|
53
|
+
|
|
54
|
+
// Resolve path: directory skills → SKILL.md (agents always have full .md paths)
|
|
55
|
+
let skillPath = row.local_path;
|
|
56
|
+
if (!skillPath.endsWith('.md')) {
|
|
57
|
+
const candidate = join(skillPath, 'SKILL.md');
|
|
58
|
+
if (existsSync(candidate)) skillPath = candidate;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
if (!existsSync(skillPath)) process.exit(0);
|
|
62
|
+
|
|
63
|
+
// Path confinement check — prevent LIKE bypass via '../' in local_path
|
|
64
|
+
const resolvedPath = resolve(skillPath);
|
|
65
|
+
if (resolvedPath !== MANAGED_BASE && !resolvedPath.startsWith(MANAGED_BASE + sep)) process.exit(0);
|
|
66
|
+
|
|
67
|
+
// Read and output
|
|
68
|
+
const content = readFileSync(skillPath, 'utf8');
|
|
69
|
+
// Token budget: ~4 chars per token, 4000 token limit = 16000 chars
|
|
70
|
+
if (content.length > 16000) {
|
|
71
|
+
const summary = content.slice(0, 800);
|
|
72
|
+
console.log(`<skill-bridge name="${row.name}" source="managed" truncated="true">\n${summary}\n...\n</skill-bridge>\n\nSkill content truncated. Use mem_use(name="${row.name}") to load full content.`);
|
|
73
|
+
} else {
|
|
74
|
+
console.log(`<skill-bridge name="${row.name}" source="managed">\n${content}\n</skill-bridge>\n\nThis skill was loaded from the managed registry. Follow the instructions above.`);
|
|
75
|
+
}
|
|
76
|
+
} catch {
|
|
77
|
+
// Silent failure — never block Skill tool
|
|
78
|
+
} finally {
|
|
79
|
+
try { db.close(); } catch {}
|
|
80
|
+
}
|
|
81
|
+
} catch {
|
|
82
|
+
// Top-level catch — exit 0 no matter what
|
|
83
|
+
}
|
|
@@ -34,10 +34,25 @@ export const INTENTS = [
|
|
|
34
34
|
];
|
|
35
35
|
|
|
36
36
|
export function detectIntent(text) {
|
|
37
|
+
// Collect all matching intents (patterns may overlap)
|
|
38
|
+
const matches = [];
|
|
37
39
|
for (const intent of INTENTS) {
|
|
38
|
-
if (intent.pattern.test(text))
|
|
40
|
+
if (intent.pattern.test(text)) matches.push(intent);
|
|
39
41
|
}
|
|
40
|
-
return null;
|
|
42
|
+
if (matches.length === 0) return null;
|
|
43
|
+
if (matches.length === 1) return matches[0];
|
|
44
|
+
|
|
45
|
+
// Disambiguation: specifically when bugfix and recall both match, use
|
|
46
|
+
// position-based resolution — the pattern appearing earlier in text wins.
|
|
47
|
+
// "I remember we fixed..." → recall leads. "fix the bug from before" → bugfix leads.
|
|
48
|
+
const first = matches[0];
|
|
49
|
+
const second = matches[1];
|
|
50
|
+
if (first.type === 'bugfix' && second.useRecent) {
|
|
51
|
+
const bugPos = text.search(first.pattern);
|
|
52
|
+
const recallPos = text.search(second.pattern);
|
|
53
|
+
if (recallPos < bugPos) return second;
|
|
54
|
+
}
|
|
55
|
+
return first;
|
|
41
56
|
}
|
|
42
57
|
|
|
43
58
|
// ─── Result Dedup ───────────────────────────────────────────────────────────
|
|
@@ -65,6 +80,36 @@ export function shouldSkipByDedup(newIds, injectedFile) {
|
|
|
65
80
|
} catch { return false; }
|
|
66
81
|
}
|
|
67
82
|
|
|
83
|
+
// ─── Registry Skill Name Matching ───────────────────────────────────────────
|
|
84
|
+
|
|
85
|
+
/**
|
|
86
|
+
* Check if prompt text contains a known managed skill name.
|
|
87
|
+
* Returns the matched name or null.
|
|
88
|
+
* @param {string} text - user prompt
|
|
89
|
+
* @param {Set<string>} skillNames - set of known managed skill names (lowercase)
|
|
90
|
+
* @returns {string|null}
|
|
91
|
+
*/
|
|
92
|
+
export function matchRegistrySkillName(text, skillNames) {
|
|
93
|
+
if (!text || skillNames.size === 0) return null;
|
|
94
|
+
const lower = text.toLowerCase();
|
|
95
|
+
|
|
96
|
+
// Sort names longest-first to match "code-review-expert" before "code-review"
|
|
97
|
+
const sorted = [...skillNames].sort((a, b) => b.length - a.length);
|
|
98
|
+
|
|
99
|
+
for (const name of sorted) {
|
|
100
|
+
const idx = lower.indexOf(name);
|
|
101
|
+
if (idx === -1) continue;
|
|
102
|
+
|
|
103
|
+
// Check word boundaries: char before and after must be non-alphanumeric (or start/end)
|
|
104
|
+
const before = idx === 0 ? ' ' : lower[idx - 1];
|
|
105
|
+
const after = idx + name.length >= lower.length ? ' ' : lower[idx + name.length];
|
|
106
|
+
if (/[a-z0-9]/.test(before) || /[a-z0-9]/.test(after)) continue;
|
|
107
|
+
|
|
108
|
+
return name;
|
|
109
|
+
}
|
|
110
|
+
return null;
|
|
111
|
+
}
|
|
112
|
+
|
|
68
113
|
// ─── File Path Detection ─────────────────────────────────────────────────────
|
|
69
114
|
|
|
70
115
|
/** Detect file paths in text */
|