smart-context-mcp 1.18.0 → 1.19.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +24 -13
- package/package.json +1 -1
- package/server.json +2 -2
- package/src/embeddings/embedder.js +28 -0
- package/src/embeddings/hashing.js +77 -0
- package/src/embeddings/index.js +91 -0
- package/src/embeddings/tokenize.js +46 -0
- package/src/global-memory/scrub.js +46 -0
- package/src/global-memory/store.js +275 -0
- package/src/index-watcher.js +224 -0
- package/src/index.js +91 -9
- package/src/orchestration/adapters/claude-adapter.js +26 -0
- package/src/orchestration/adapters/cursor-adapter.js +26 -0
- package/src/orchestration/policy/soft-prompts.js +97 -0
- package/src/parsers/registry.js +26 -0
- package/src/playbooks/builtin/debug-flake.yaml +17 -0
- package/src/playbooks/builtin/doc-sync.yaml +16 -0
- package/src/playbooks/builtin/preflight-merge.yaml +20 -0
- package/src/playbooks/builtin/ramp-up.yaml +14 -0
- package/src/playbooks/builtin/refactor-safe.yaml +18 -0
- package/src/playbooks/loader.js +123 -0
- package/src/playbooks/runner.js +182 -0
- package/src/playbooks/yaml-mini.js +162 -0
- package/src/server.js +54 -3
- package/src/tools/global-memory.js +99 -0
- package/src/tools/smart-playbook.js +63 -0
- package/src/tools/smart-search.js +30 -1
- package/src/tools/smart-status.js +13 -0
- package/src/turn/next-actions.js +4 -1
package/README.md
CHANGED
|
@@ -56,7 +56,7 @@ Restart your AI client. Done.
|
|
|
56
56
|
# Check installed version
|
|
57
57
|
npm list -g smart-context-mcp
|
|
58
58
|
|
|
59
|
-
# Should show: smart-context-mcp@1.
|
|
59
|
+
# Should show: smart-context-mcp@1.19.0 (or later)
|
|
60
60
|
|
|
61
61
|
# Update to latest version
|
|
62
62
|
npm update -g smart-context-mcp
|
|
@@ -278,22 +278,30 @@ Check actual usage:
|
|
|
278
278
|
|
|
279
279
|
Provides **two key components**:
|
|
280
280
|
|
|
281
|
-
### 1. Specialized Tools (
|
|
281
|
+
### 1. Specialized Tools (20 tools)
|
|
282
282
|
|
|
283
283
|
| Tool | Purpose | Savings |
|
|
284
284
|
|------|---------|---------|
|
|
285
|
-
| `smart_read` | Read files in outline/signatures mode | 90% |
|
|
285
|
+
| `smart_read` | Read files in outline / signatures / symbol / explain mode | 90% |
|
|
286
286
|
| `smart_read_batch` | Read multiple files in one call | 90% |
|
|
287
|
-
| `smart_search` | Intent-aware code search with ranking | 95% |
|
|
288
|
-
| `smart_context` | One-call context builder | 85% |
|
|
289
|
-
| `
|
|
290
|
-
| `
|
|
291
|
-
| `
|
|
292
|
-
| `
|
|
293
|
-
| `build_index` | Symbol index builder | - |
|
|
287
|
+
| `smart_search` | Intent-aware code search with ranking and `kinds` filter (incl. ADRs) | 95% |
|
|
288
|
+
| `smart_context` | One-call context builder; `paths: { from, to }` mode traverses the import graph | 85% |
|
|
289
|
+
| `smart_shell` | Safe command execution (TAP / git-log / diff compression) | 94% |
|
|
290
|
+
| `smart_test` | Affected tests via graph + sandboxed runner + persisted `last_failure` | - |
|
|
291
|
+
| `smart_review` | One-call review preflight: diff + callers + tests + heuristic findings | - |
|
|
292
|
+
| `build_index` | Symbol index builder (incremental) | - |
|
|
294
293
|
| `warm_cache` | File preloading (5x faster cold start) | - |
|
|
295
294
|
| `git_blame` | Function-level code attribution | - |
|
|
296
295
|
| `cross_project` | Multi-project context | - |
|
|
296
|
+
| `smart_summary` | Task checkpoint management with rolling window | 98% |
|
|
297
|
+
| `smart_status` | Quick session / project state inspection | - |
|
|
298
|
+
| `smart_doctor` | Health checks for storage, index, hooks | - |
|
|
299
|
+
| `smart_edit` | Targeted symbol-aware edits | - |
|
|
300
|
+
| `smart_turn` | Turn boundary + `nextActions[]` machine-readable plan | - |
|
|
301
|
+
| `smart_resume` | Lightweight alias for `smart_turn(phase: 'start', verbosity: 'minimal')` | - |
|
|
302
|
+
| `smart_metrics` | Token usage inspection | - |
|
|
303
|
+
| `smart_playbook` | Declarative workflows: composes `smart_*` tools in one call (preflight-merge, debug-flake, refactor-safe, doc-sync, ramp-up) | - |
|
|
304
|
+
| `global_memory` | Cross-project memory in `~/.devctx/global.db` (opt-in, scrubbed for secrets, semantic recall) | - |
|
|
297
305
|
|
|
298
306
|
### 2. Agent Rules (Task-Specific Guidance)
|
|
299
307
|
|
|
@@ -646,9 +654,12 @@ npm run verify
|
|
|
646
654
|
## Storage
|
|
647
655
|
|
|
648
656
|
Data stored in `.devctx/`:
|
|
649
|
-
- `index.json` - Symbol index
|
|
650
|
-
- `state.sqlite` -
|
|
651
|
-
- `metrics.jsonl` -
|
|
657
|
+
- `index.json` - Symbol index (`INDEX_VERSION 7`: ADR + ADR sections, richer Python/Go)
|
|
658
|
+
- `state.sqlite` - Sessions, metrics, patterns, task handoffs, test failures, explain cache (Node 22+)
|
|
659
|
+
- `metrics.jsonl` - Opt-in legacy file, only when `DEVCTX_METRICS_FILE=path.jsonl` is set
|
|
660
|
+
|
|
661
|
+
Cross-project (opt-in via `DEVCTX_GLOBAL_MEMORY=true`):
|
|
662
|
+
- `~/.devctx/global.db` - Scrubbed decisions, patterns, playbooks, notes with semantic recall
|
|
652
663
|
|
|
653
664
|
Add to `.gitignore`:
|
|
654
665
|
```
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "smart-context-mcp",
|
|
3
3
|
"mcpName": "io.github.Arrayo/smart-context-mcp",
|
|
4
|
-
"version": "1.
|
|
4
|
+
"version": "1.19.0",
|
|
5
5
|
"description": "MCP server that reduces agent token usage by 90% with intelligent context compression, task checkpoint persistence, and workflow-aware agent guidance.",
|
|
6
6
|
"author": "Francisco Caballero Portero <fcp1978@hotmail.com>",
|
|
7
7
|
"type": "module",
|
package/server.json
CHANGED
|
@@ -6,12 +6,12 @@
|
|
|
6
6
|
"url": "https://github.com/Arrayo/smart-context-mcp",
|
|
7
7
|
"source": "github"
|
|
8
8
|
},
|
|
9
|
-
"version": "1.
|
|
9
|
+
"version": "1.19.0",
|
|
10
10
|
"packages": [
|
|
11
11
|
{
|
|
12
12
|
"registryType": "npm",
|
|
13
13
|
"identifier": "smart-context-mcp",
|
|
14
|
-
"version": "1.
|
|
14
|
+
"version": "1.19.0",
|
|
15
15
|
"transport": {
|
|
16
16
|
"type": "stdio"
|
|
17
17
|
},
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { embed as hashingEmbed, cosineSimilarity, buildCorpusIdf, DEFAULT_DIMENSIONS } from './hashing.js';
|
|
2
|
+
|
|
3
|
+
const HASHING_EMBEDDER = {
|
|
4
|
+
id: 'hashing-v1',
|
|
5
|
+
dimensions: DEFAULT_DIMENSIONS,
|
|
6
|
+
embed: (text, options = {}) => hashingEmbed(text, options),
|
|
7
|
+
similarity: cosineSimilarity,
|
|
8
|
+
buildCorpusIdf,
|
|
9
|
+
};
|
|
10
|
+
|
|
11
|
+
let activeEmbedder = HASHING_EMBEDDER;
|
|
12
|
+
|
|
13
|
+
export const getEmbedder = () => activeEmbedder;
|
|
14
|
+
|
|
15
|
+
export const setEmbedder = (embedder) => {
|
|
16
|
+
if (!embedder || typeof embedder.embed !== 'function' || typeof embedder.similarity !== 'function') {
|
|
17
|
+
throw new Error('Embedder must implement embed(text, opts) and similarity(a, b)');
|
|
18
|
+
}
|
|
19
|
+
activeEmbedder = {
|
|
20
|
+
id: embedder.id ?? 'custom',
|
|
21
|
+
dimensions: embedder.dimensions ?? DEFAULT_DIMENSIONS,
|
|
22
|
+
embed: embedder.embed,
|
|
23
|
+
similarity: embedder.similarity,
|
|
24
|
+
buildCorpusIdf: embedder.buildCorpusIdf ?? buildCorpusIdf,
|
|
25
|
+
};
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
export const resetEmbedder = () => { activeEmbedder = HASHING_EMBEDDER; };
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import { tokenize } from './tokenize.js';
|
|
2
|
+
|
|
3
|
+
export const DEFAULT_DIMENSIONS = 256;
|
|
4
|
+
|
|
5
|
+
const fnv1a = (str) => {
|
|
6
|
+
let hash = 0x811c9dc5;
|
|
7
|
+
for (let i = 0; i < str.length; i += 1) {
|
|
8
|
+
hash ^= str.charCodeAt(i);
|
|
9
|
+
hash = Math.imul(hash, 0x01000193) >>> 0;
|
|
10
|
+
}
|
|
11
|
+
return hash;
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
const signedBucket = (token, dims) => {
|
|
15
|
+
const h = fnv1a(token);
|
|
16
|
+
const bucket = h % dims;
|
|
17
|
+
const sign = ((h >>> 16) & 1) === 0 ? 1 : -1;
|
|
18
|
+
return { bucket, sign };
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
const l2Normalize = (vector) => {
|
|
22
|
+
let sumSq = 0;
|
|
23
|
+
for (let i = 0; i < vector.length; i += 1) sumSq += vector[i] * vector[i];
|
|
24
|
+
if (sumSq === 0) return vector;
|
|
25
|
+
const norm = Math.sqrt(sumSq);
|
|
26
|
+
for (let i = 0; i < vector.length; i += 1) vector[i] /= norm;
|
|
27
|
+
return vector;
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
export const embed = (text, { dimensions = DEFAULT_DIMENSIONS, idf = null } = {}) => {
|
|
31
|
+
const tokens = tokenize(text);
|
|
32
|
+
const vector = new Float32Array(dimensions);
|
|
33
|
+
if (tokens.length === 0) return vector;
|
|
34
|
+
|
|
35
|
+
const counts = new Map();
|
|
36
|
+
for (const token of tokens) {
|
|
37
|
+
counts.set(token, (counts.get(token) ?? 0) + 1);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
for (const [token, count] of counts) {
|
|
41
|
+
const tf = 1 + Math.log(count);
|
|
42
|
+
const weight = idf ? tf * (idf.get(token) ?? 1) : tf;
|
|
43
|
+
const { bucket, sign } = signedBucket(token, dimensions);
|
|
44
|
+
vector[bucket] += sign * weight;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
return l2Normalize(vector);
|
|
48
|
+
};
|
|
49
|
+
|
|
50
|
+
export const cosineSimilarity = (a, b) => {
|
|
51
|
+
if (!a || !b || a.length !== b.length) return 0;
|
|
52
|
+
let dot = 0;
|
|
53
|
+
for (let i = 0; i < a.length; i += 1) dot += a[i] * b[i];
|
|
54
|
+
return dot;
|
|
55
|
+
};
|
|
56
|
+
|
|
57
|
+
export const buildCorpusIdf = (documents) => {
|
|
58
|
+
const df = new Map();
|
|
59
|
+
let docCount = 0;
|
|
60
|
+
for (const doc of documents) {
|
|
61
|
+
docCount += 1;
|
|
62
|
+
const seen = new Set();
|
|
63
|
+
for (const token of tokenize(doc)) {
|
|
64
|
+
if (!seen.has(token)) {
|
|
65
|
+
seen.add(token);
|
|
66
|
+
df.set(token, (df.get(token) ?? 0) + 1);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
const idf = new Map();
|
|
71
|
+
for (const [token, freq] of df) {
|
|
72
|
+
idf.set(token, Math.log((docCount + 1) / (freq + 1)) + 1);
|
|
73
|
+
}
|
|
74
|
+
return idf;
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
export const _internal = { fnv1a, signedBucket, l2Normalize };
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
import { getEmbedder } from './embedder.js';
|
|
2
|
+
|
|
3
|
+
const symbolToText = (symbol, filePath) => {
|
|
4
|
+
const parts = [];
|
|
5
|
+
parts.push(symbol.name);
|
|
6
|
+
if (symbol.parent) parts.push(symbol.parent);
|
|
7
|
+
if (symbol.kind) parts.push(symbol.kind);
|
|
8
|
+
if (symbol.signature) parts.push(symbol.signature);
|
|
9
|
+
if (symbol.snippet) parts.push(symbol.snippet);
|
|
10
|
+
if (Array.isArray(symbol.decorators)) parts.push(...symbol.decorators);
|
|
11
|
+
if (filePath) {
|
|
12
|
+
const segments = filePath.replace(/\\/g, '/').split('/');
|
|
13
|
+
parts.push(...segments);
|
|
14
|
+
}
|
|
15
|
+
return parts.join(' ');
|
|
16
|
+
};
|
|
17
|
+
|
|
18
|
+
const fileToText = (relPath, fileInfo) => {
|
|
19
|
+
const parts = [relPath];
|
|
20
|
+
for (const symbol of fileInfo?.symbols ?? []) {
|
|
21
|
+
parts.push(symbol.name);
|
|
22
|
+
if (symbol.signature) parts.push(symbol.signature);
|
|
23
|
+
}
|
|
24
|
+
return parts.join(' ');
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
export const buildIndexCorpusIdf = (index) => {
|
|
28
|
+
const embedder = getEmbedder();
|
|
29
|
+
const docs = [];
|
|
30
|
+
for (const [relPath, fileInfo] of Object.entries(index?.files ?? {})) {
|
|
31
|
+
docs.push(fileToText(relPath, fileInfo));
|
|
32
|
+
for (const symbol of fileInfo.symbols ?? []) {
|
|
33
|
+
docs.push(symbolToText(symbol, relPath));
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
return embedder.buildCorpusIdf(docs);
|
|
37
|
+
};
|
|
38
|
+
|
|
39
|
+
export const embedQuery = (query, options = {}) => {
|
|
40
|
+
const embedder = getEmbedder();
|
|
41
|
+
return embedder.embed(query, options);
|
|
42
|
+
};
|
|
43
|
+
|
|
44
|
+
export const embedFile = (relPath, fileInfo, options = {}) => {
|
|
45
|
+
const embedder = getEmbedder();
|
|
46
|
+
return embedder.embed(fileToText(relPath, fileInfo), options);
|
|
47
|
+
};
|
|
48
|
+
|
|
49
|
+
export const embedSymbol = (symbol, relPath, options = {}) => {
|
|
50
|
+
const embedder = getEmbedder();
|
|
51
|
+
return embedder.embed(symbolToText(symbol, relPath), options);
|
|
52
|
+
};
|
|
53
|
+
|
|
54
|
+
export const semanticRankSymbols = ({ query, index, limit = 10, idf = null }) => {
|
|
55
|
+
if (!query || !index) return [];
|
|
56
|
+
const embedder = getEmbedder();
|
|
57
|
+
const queryVec = embedder.embed(query, { idf });
|
|
58
|
+
const results = [];
|
|
59
|
+
for (const [relPath, fileInfo] of Object.entries(index.files ?? {})) {
|
|
60
|
+
for (const symbol of fileInfo.symbols ?? []) {
|
|
61
|
+
const vec = embedder.embed(symbolToText(symbol, relPath), { idf });
|
|
62
|
+
const score = embedder.similarity(queryVec, vec);
|
|
63
|
+
if (score > 0) {
|
|
64
|
+
results.push({ score, path: relPath, symbol });
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
results.sort((a, b) => b.score - a.score);
|
|
69
|
+
return results.slice(0, limit);
|
|
70
|
+
};
|
|
71
|
+
|
|
72
|
+
export const semanticRankFiles = ({ query, index, limit = 10, idf = null }) => {
|
|
73
|
+
if (!query || !index) return [];
|
|
74
|
+
const embedder = getEmbedder();
|
|
75
|
+
const queryVec = embedder.embed(query, { idf });
|
|
76
|
+
const results = [];
|
|
77
|
+
for (const [relPath, fileInfo] of Object.entries(index.files ?? {})) {
|
|
78
|
+
const vec = embedder.embed(fileToText(relPath, fileInfo), { idf });
|
|
79
|
+
const score = embedder.similarity(queryVec, vec);
|
|
80
|
+
if (score > 0) {
|
|
81
|
+
results.push({ score, path: relPath, symbolCount: fileInfo.symbols?.length ?? 0 });
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
results.sort((a, b) => b.score - a.score);
|
|
85
|
+
return results.slice(0, limit);
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
export { getEmbedder, setEmbedder, resetEmbedder } from './embedder.js';
|
|
89
|
+
export { tokenize } from './tokenize.js';
|
|
90
|
+
export { embed, cosineSimilarity, buildCorpusIdf, DEFAULT_DIMENSIONS } from './hashing.js';
|
|
91
|
+
export const _internal = { symbolToText, fileToText };
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
const STOP_WORDS = new Set([
|
|
2
|
+
'the', 'a', 'an', 'and', 'or', 'but', 'is', 'in', 'on', 'at', 'to', 'for', 'of', 'with',
|
|
3
|
+
'as', 'by', 'from', 'this', 'that', 'these', 'those', 'it', 'its', 'be', 'been', 'being',
|
|
4
|
+
'are', 'was', 'were', 'has', 'have', 'had', 'do', 'does', 'did', 'will', 'would', 'should',
|
|
5
|
+
'can', 'could', 'may', 'might', 'must', 'shall', 'not', 'no', 'yes', 'if', 'else', 'then',
|
|
6
|
+
'return', 'true', 'false', 'null', 'undefined', 'self', 'this', 'use', 'using', 'used', 'via',
|
|
7
|
+
]);
|
|
8
|
+
|
|
9
|
+
const CAMEL_RE = /([a-z0-9])([A-Z])/g;
|
|
10
|
+
const SNAKE_RE = /[_-]+/g;
|
|
11
|
+
const NON_WORD_RE = /[^A-Za-z0-9_]+/g;
|
|
12
|
+
const NUMBER_RE = /^\d+$/;
|
|
13
|
+
|
|
14
|
+
const splitIdentifier = (token) => {
|
|
15
|
+
if (!token) return [];
|
|
16
|
+
const camelExpanded = token.replace(CAMEL_RE, '$1 $2');
|
|
17
|
+
const parts = camelExpanded.replace(SNAKE_RE, ' ').split(/\s+/).filter(Boolean);
|
|
18
|
+
const out = new Set();
|
|
19
|
+
out.add(token.toLowerCase());
|
|
20
|
+
for (const part of parts) {
|
|
21
|
+
const lower = part.toLowerCase();
|
|
22
|
+
if (!lower || NUMBER_RE.test(lower)) continue;
|
|
23
|
+
if (STOP_WORDS.has(lower)) continue;
|
|
24
|
+
if (lower.length < 2) continue;
|
|
25
|
+
out.add(lower);
|
|
26
|
+
}
|
|
27
|
+
return [...out];
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
export const tokenize = (text) => {
|
|
31
|
+
if (!text || typeof text !== 'string') return [];
|
|
32
|
+
const raw = text.replace(NON_WORD_RE, ' ').split(/\s+/).filter(Boolean);
|
|
33
|
+
const out = [];
|
|
34
|
+
for (const token of raw) {
|
|
35
|
+
if (NUMBER_RE.test(token)) continue;
|
|
36
|
+
const expanded = splitIdentifier(token);
|
|
37
|
+
for (const part of expanded) {
|
|
38
|
+
if (!STOP_WORDS.has(part) && part.length >= 2) {
|
|
39
|
+
out.push(part);
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
return out;
|
|
44
|
+
};
|
|
45
|
+
|
|
46
|
+
export const _internal = { STOP_WORDS, splitIdentifier };
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
const SECRET_PATTERNS = [
|
|
2
|
+
/(api[_-]?key|secret|token|password|passwd|pwd|bearer|authorization)\s*[:=]\s*['"]?([A-Za-z0-9_\-+/=]{8,})['"]?/gi,
|
|
3
|
+
/(aws_access_key_id|aws_secret_access_key)\s*[:=]\s*['"]?([A-Za-z0-9_\-+/=]{8,})['"]?/gi,
|
|
4
|
+
/sk-[A-Za-z0-9]{20,}/g,
|
|
5
|
+
/ghp_[A-Za-z0-9]{20,}/g,
|
|
6
|
+
/xoxb-[A-Za-z0-9-]{20,}/g,
|
|
7
|
+
/AIza[A-Za-z0-9_-]{30,}/g,
|
|
8
|
+
/eyJ[A-Za-z0-9_-]+\.[A-Za-z0-9_-]+\.[A-Za-z0-9_-]+/g,
|
|
9
|
+
/-----BEGIN [A-Z ]+PRIVATE KEY-----[\s\S]+?-----END [A-Z ]+PRIVATE KEY-----/g,
|
|
10
|
+
/(?:postgres|postgresql|mysql|mongodb|redis):\/\/[^\s'"]+/gi,
|
|
11
|
+
/[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}/g,
|
|
12
|
+
];
|
|
13
|
+
|
|
14
|
+
const HOME_PATH_RE = /\/home\/[^/\s'"]+/g;
|
|
15
|
+
const USER_PATH_RE = /\/Users\/[^/\s'"]+/g;
|
|
16
|
+
const WIN_USER_RE = /C:\\Users\\[^\\/\s'"]+/g;
|
|
17
|
+
|
|
18
|
+
const REDACTION = '[REDACTED]';
|
|
19
|
+
|
|
20
|
+
export const scrubContent = (content) => {
|
|
21
|
+
if (typeof content !== 'string') return content;
|
|
22
|
+
let cleaned = content;
|
|
23
|
+
for (const pattern of SECRET_PATTERNS) {
|
|
24
|
+
cleaned = cleaned.replace(pattern, REDACTION);
|
|
25
|
+
}
|
|
26
|
+
cleaned = cleaned.replace(HOME_PATH_RE, '~').replace(USER_PATH_RE, '~').replace(WIN_USER_RE, '~');
|
|
27
|
+
return cleaned;
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
export const containsLikelySecret = (content) => {
|
|
31
|
+
if (typeof content !== 'string') return false;
|
|
32
|
+
return SECRET_PATTERNS.some((re) => {
|
|
33
|
+
re.lastIndex = 0;
|
|
34
|
+
return re.test(content);
|
|
35
|
+
});
|
|
36
|
+
};
|
|
37
|
+
|
|
38
|
+
export const hashProjectPath = (absolutePath) => {
|
|
39
|
+
if (!absolutePath || typeof absolutePath !== 'string') return null;
|
|
40
|
+
let hash = 0x811c9dc5;
|
|
41
|
+
for (let i = 0; i < absolutePath.length; i += 1) {
|
|
42
|
+
hash ^= absolutePath.charCodeAt(i);
|
|
43
|
+
hash = Math.imul(hash, 0x01000193) >>> 0;
|
|
44
|
+
}
|
|
45
|
+
return `proj-${hash.toString(36)}`;
|
|
46
|
+
};
|
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import os from 'node:os';
|
|
4
|
+
import { scrubContent, hashProjectPath } from './scrub.js';
|
|
5
|
+
import { embed, cosineSimilarity, buildCorpusIdf } from '../embeddings/hashing.js';
|
|
6
|
+
|
|
7
|
+
const DEFAULT_GLOBAL_DIR = path.join(os.homedir(), '.devctx');
|
|
8
|
+
const DEFAULT_GLOBAL_DB = path.join(DEFAULT_GLOBAL_DIR, 'global.db');
|
|
9
|
+
const SCHEMA_VERSION = 1;
|
|
10
|
+
|
|
11
|
+
let sqliteModulePromise = null;
|
|
12
|
+
|
|
13
|
+
const loadSqliteModule = async () => {
|
|
14
|
+
if (!sqliteModulePromise) {
|
|
15
|
+
sqliteModulePromise = import('node:sqlite')
|
|
16
|
+
.catch(() => {
|
|
17
|
+
throw new Error('Global memory requires Node 22+ (node:sqlite)');
|
|
18
|
+
});
|
|
19
|
+
}
|
|
20
|
+
return sqliteModulePromise;
|
|
21
|
+
};
|
|
22
|
+
|
|
23
|
+
export const getGlobalDbPath = () => {
|
|
24
|
+
const override = process.env.DEVCTX_GLOBAL_DB?.trim();
|
|
25
|
+
return override && override.length > 0 ? override : DEFAULT_GLOBAL_DB;
|
|
26
|
+
};
|
|
27
|
+
|
|
28
|
+
export const isGlobalMemoryEnabled = () => {
|
|
29
|
+
const value = String(process.env.DEVCTX_GLOBAL_MEMORY ?? '').trim().toLowerCase();
|
|
30
|
+
return value === '1' || value === 'true' || value === 'yes' || value === 'on';
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
const ensureDir = (filePath) => {
|
|
34
|
+
const dir = path.dirname(filePath);
|
|
35
|
+
if (!fs.existsSync(dir)) fs.mkdirSync(dir, { recursive: true });
|
|
36
|
+
};
|
|
37
|
+
|
|
38
|
+
const SCHEMA_SQL = `
|
|
39
|
+
CREATE TABLE IF NOT EXISTS meta (
|
|
40
|
+
key TEXT PRIMARY KEY,
|
|
41
|
+
value TEXT NOT NULL
|
|
42
|
+
);
|
|
43
|
+
|
|
44
|
+
CREATE TABLE IF NOT EXISTS entries (
|
|
45
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
46
|
+
kind TEXT NOT NULL,
|
|
47
|
+
content TEXT NOT NULL,
|
|
48
|
+
tags TEXT,
|
|
49
|
+
project_hash TEXT,
|
|
50
|
+
created_at INTEGER NOT NULL,
|
|
51
|
+
updated_at INTEGER NOT NULL,
|
|
52
|
+
usage_count INTEGER NOT NULL DEFAULT 0,
|
|
53
|
+
last_used_at INTEGER
|
|
54
|
+
);
|
|
55
|
+
|
|
56
|
+
CREATE INDEX IF NOT EXISTS idx_entries_kind ON entries(kind);
|
|
57
|
+
CREATE INDEX IF NOT EXISTS idx_entries_project ON entries(project_hash);
|
|
58
|
+
CREATE INDEX IF NOT EXISTS idx_entries_created ON entries(created_at DESC);
|
|
59
|
+
`;
|
|
60
|
+
|
|
61
|
+
const VALID_KINDS = new Set(['decision', 'pattern', 'playbook', 'note']);
|
|
62
|
+
|
|
63
|
+
const withDb = async (fn, { filePath = getGlobalDbPath(), readOnly = false } = {}) => {
|
|
64
|
+
const { DatabaseSync } = await loadSqliteModule();
|
|
65
|
+
if (!readOnly) ensureDir(filePath);
|
|
66
|
+
|
|
67
|
+
if (readOnly && !fs.existsSync(filePath)) {
|
|
68
|
+
return fn(null);
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
const db = new DatabaseSync(filePath, { readOnly });
|
|
72
|
+
try {
|
|
73
|
+
if (!readOnly) {
|
|
74
|
+
db.exec(SCHEMA_SQL);
|
|
75
|
+
const meta = db.prepare('SELECT value FROM meta WHERE key = ?').get('schema_version');
|
|
76
|
+
if (!meta) {
|
|
77
|
+
db.prepare('INSERT INTO meta(key, value) VALUES(?, ?)').run('schema_version', String(SCHEMA_VERSION));
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
return fn(db);
|
|
81
|
+
} finally {
|
|
82
|
+
db.close();
|
|
83
|
+
}
|
|
84
|
+
};
|
|
85
|
+
|
|
86
|
+
const normalizeTags = (tags) => {
|
|
87
|
+
if (!tags) return null;
|
|
88
|
+
if (typeof tags === 'string') return tags;
|
|
89
|
+
if (Array.isArray(tags)) return JSON.stringify(tags.filter((t) => typeof t === 'string'));
|
|
90
|
+
return null;
|
|
91
|
+
};
|
|
92
|
+
|
|
93
|
+
const parseTags = (raw) => {
|
|
94
|
+
if (!raw) return [];
|
|
95
|
+
try {
|
|
96
|
+
const parsed = JSON.parse(raw);
|
|
97
|
+
return Array.isArray(parsed) ? parsed : [];
|
|
98
|
+
} catch {
|
|
99
|
+
return typeof raw === 'string' ? raw.split(',').map((t) => t.trim()).filter(Boolean) : [];
|
|
100
|
+
}
|
|
101
|
+
};
|
|
102
|
+
|
|
103
|
+
export const saveEntry = async ({
|
|
104
|
+
kind,
|
|
105
|
+
content,
|
|
106
|
+
tags,
|
|
107
|
+
projectPath,
|
|
108
|
+
filePath = getGlobalDbPath(),
|
|
109
|
+
} = {}) => {
|
|
110
|
+
if (!VALID_KINDS.has(kind)) {
|
|
111
|
+
throw new Error(`Invalid kind: ${kind}. Must be one of: ${[...VALID_KINDS].join(', ')}`);
|
|
112
|
+
}
|
|
113
|
+
if (typeof content !== 'string' || content.trim().length === 0) {
|
|
114
|
+
throw new Error('content must be a non-empty string');
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
const scrubbed = scrubContent(content);
|
|
118
|
+
const projectHash = projectPath ? hashProjectPath(projectPath) : null;
|
|
119
|
+
const tagsJson = normalizeTags(tags);
|
|
120
|
+
const now = Date.now();
|
|
121
|
+
|
|
122
|
+
return withDb((db) => {
|
|
123
|
+
const stmt = db.prepare(`
|
|
124
|
+
INSERT INTO entries (kind, content, tags, project_hash, created_at, updated_at)
|
|
125
|
+
VALUES (?, ?, ?, ?, ?, ?)
|
|
126
|
+
`);
|
|
127
|
+
const result = stmt.run(kind, scrubbed, tagsJson, projectHash, now, now);
|
|
128
|
+
return {
|
|
129
|
+
id: Number(result.lastInsertRowid),
|
|
130
|
+
kind,
|
|
131
|
+
contentLength: scrubbed.length,
|
|
132
|
+
projectHash,
|
|
133
|
+
tags: parseTags(tagsJson),
|
|
134
|
+
createdAt: now,
|
|
135
|
+
};
|
|
136
|
+
}, { filePath });
|
|
137
|
+
};
|
|
138
|
+
|
|
139
|
+
export const recallEntries = async ({
|
|
140
|
+
kind,
|
|
141
|
+
query,
|
|
142
|
+
limit = 10,
|
|
143
|
+
projectPath,
|
|
144
|
+
filePath = getGlobalDbPath(),
|
|
145
|
+
} = {}) => {
|
|
146
|
+
return withDb((db) => {
|
|
147
|
+
if (!db) return { hits: [], total: 0 };
|
|
148
|
+
const conditions = [];
|
|
149
|
+
const params = [];
|
|
150
|
+
if (kind) {
|
|
151
|
+
conditions.push('kind = ?');
|
|
152
|
+
params.push(kind);
|
|
153
|
+
}
|
|
154
|
+
if (projectPath) {
|
|
155
|
+
conditions.push('project_hash = ?');
|
|
156
|
+
params.push(hashProjectPath(projectPath));
|
|
157
|
+
}
|
|
158
|
+
const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
159
|
+
|
|
160
|
+
const rows = db.prepare(`
|
|
161
|
+
SELECT id, kind, content, tags, project_hash, created_at, updated_at, usage_count
|
|
162
|
+
FROM entries
|
|
163
|
+
${where}
|
|
164
|
+
ORDER BY created_at DESC
|
|
165
|
+
LIMIT 500
|
|
166
|
+
`).all(...params);
|
|
167
|
+
|
|
168
|
+
if (!query || query.trim().length === 0) {
|
|
169
|
+
return {
|
|
170
|
+
hits: rows.slice(0, limit).map((r) => ({
|
|
171
|
+
id: r.id,
|
|
172
|
+
kind: r.kind,
|
|
173
|
+
content: r.content,
|
|
174
|
+
tags: parseTags(r.tags),
|
|
175
|
+
createdAt: r.created_at,
|
|
176
|
+
usageCount: r.usage_count,
|
|
177
|
+
score: 0,
|
|
178
|
+
})),
|
|
179
|
+
total: rows.length,
|
|
180
|
+
};
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
const idf = buildCorpusIdf(rows.map((r) => r.content));
|
|
184
|
+
const queryVec = embed(query, { idf });
|
|
185
|
+
const ranked = rows
|
|
186
|
+
.map((r) => {
|
|
187
|
+
const docVec = embed(r.content, { idf });
|
|
188
|
+
const score = cosineSimilarity(queryVec, docVec);
|
|
189
|
+
return { row: r, score };
|
|
190
|
+
})
|
|
191
|
+
.filter((x) => x.score > 0)
|
|
192
|
+
.sort((a, b) => b.score - a.score)
|
|
193
|
+
.slice(0, limit);
|
|
194
|
+
|
|
195
|
+
return {
|
|
196
|
+
hits: ranked.map(({ row, score }) => ({
|
|
197
|
+
id: row.id,
|
|
198
|
+
kind: row.kind,
|
|
199
|
+
content: row.content,
|
|
200
|
+
tags: parseTags(row.tags),
|
|
201
|
+
createdAt: row.created_at,
|
|
202
|
+
usageCount: row.usage_count,
|
|
203
|
+
score: Number(score.toFixed(4)),
|
|
204
|
+
})),
|
|
205
|
+
total: rows.length,
|
|
206
|
+
};
|
|
207
|
+
}, { filePath, readOnly: true });
|
|
208
|
+
};
|
|
209
|
+
|
|
210
|
+
export const markEntryUsed = async ({
|
|
211
|
+
id,
|
|
212
|
+
filePath = getGlobalDbPath(),
|
|
213
|
+
} = {}) => {
|
|
214
|
+
const now = Date.now();
|
|
215
|
+
return withDb((db) => {
|
|
216
|
+
const result = db.prepare(`
|
|
217
|
+
UPDATE entries
|
|
218
|
+
SET usage_count = usage_count + 1, last_used_at = ?
|
|
219
|
+
WHERE id = ?
|
|
220
|
+
`).run(now, id);
|
|
221
|
+
return { id, updated: Number(result.changes) > 0, lastUsedAt: now };
|
|
222
|
+
}, { filePath });
|
|
223
|
+
};
|
|
224
|
+
|
|
225
|
+
export const deleteEntry = async ({ id, filePath = getGlobalDbPath() } = {}) => {
|
|
226
|
+
return withDb((db) => {
|
|
227
|
+
const result = db.prepare('DELETE FROM entries WHERE id = ?').run(id);
|
|
228
|
+
return { id, deleted: Number(result.changes) > 0 };
|
|
229
|
+
}, { filePath });
|
|
230
|
+
};
|
|
231
|
+
|
|
232
|
+
export const listKinds = async ({ filePath = getGlobalDbPath() } = {}) => {
|
|
233
|
+
return withDb((db) => {
|
|
234
|
+
if (!db) return { kinds: [], total: 0 };
|
|
235
|
+
const rows = db.prepare(`
|
|
236
|
+
SELECT kind, COUNT(*) as count, MAX(created_at) as latest
|
|
237
|
+
FROM entries
|
|
238
|
+
GROUP BY kind
|
|
239
|
+
ORDER BY count DESC
|
|
240
|
+
`).all();
|
|
241
|
+
return {
|
|
242
|
+
kinds: rows.map((r) => ({ kind: r.kind, count: Number(r.count), latest: r.latest })),
|
|
243
|
+
total: rows.reduce((sum, r) => sum + Number(r.count), 0),
|
|
244
|
+
};
|
|
245
|
+
}, { filePath, readOnly: true });
|
|
246
|
+
};
|
|
247
|
+
|
|
248
|
+
export const getStats = async ({ filePath = getGlobalDbPath() } = {}) => {
|
|
249
|
+
return withDb((db) => {
|
|
250
|
+
if (!db) {
|
|
251
|
+
return {
|
|
252
|
+
exists: false,
|
|
253
|
+
filePath,
|
|
254
|
+
enabled: isGlobalMemoryEnabled(),
|
|
255
|
+
totalEntries: 0,
|
|
256
|
+
byKind: {},
|
|
257
|
+
};
|
|
258
|
+
}
|
|
259
|
+
const total = db.prepare('SELECT COUNT(*) as c FROM entries').get();
|
|
260
|
+
const byKindRows = db.prepare('SELECT kind, COUNT(*) as c FROM entries GROUP BY kind').all();
|
|
261
|
+
const byKind = {};
|
|
262
|
+
for (const r of byKindRows) byKind[r.kind] = Number(r.c);
|
|
263
|
+
const sizeBytes = fs.existsSync(filePath) ? fs.statSync(filePath).size : 0;
|
|
264
|
+
return {
|
|
265
|
+
exists: true,
|
|
266
|
+
filePath,
|
|
267
|
+
enabled: isGlobalMemoryEnabled(),
|
|
268
|
+
totalEntries: Number(total.c),
|
|
269
|
+
byKind,
|
|
270
|
+
sizeBytes,
|
|
271
|
+
};
|
|
272
|
+
}, { filePath, readOnly: true });
|
|
273
|
+
};
|
|
274
|
+
|
|
275
|
+
export const VALID_GLOBAL_KINDS = VALID_KINDS;
|