@grainulation/silo 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +13 -0
- package/LICENSE +21 -0
- package/README.md +111 -0
- package/bin/silo.js +327 -0
- package/lib/analytics.js +76 -0
- package/lib/import-export.js +174 -0
- package/lib/index.js +28 -0
- package/lib/packs.js +184 -0
- package/lib/search.js +128 -0
- package/lib/serve-mcp.js +337 -0
- package/lib/server.js +425 -0
- package/lib/store.js +145 -0
- package/lib/templates.js +139 -0
- package/package.json +48 -0
- package/packs/api-design.json +189 -0
- package/packs/architecture.json +175 -0
- package/packs/ci-cd.json +175 -0
- package/packs/compliance.json +203 -0
- package/packs/data-engineering.json +175 -0
- package/packs/frontend.json +175 -0
- package/packs/migration.json +147 -0
- package/packs/observability.json +175 -0
- package/packs/security.json +175 -0
- package/packs/team-process.json +175 -0
- package/packs/testing.json +147 -0
- package/public/grainulation-tokens.css +321 -0
- package/public/index.html +803 -0
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* import-export.js — Pull claims between sprints and silos
|
|
3
|
+
*
|
|
4
|
+
* Handles the core workflow: take claims from a silo collection
|
|
5
|
+
* (or built-in pack) and merge them into a sprint's claims.json.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const fs = require('node:fs');
|
|
9
|
+
const path = require('node:path');
|
|
10
|
+
const { Store } = require('./store.js');
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Normalize a claim to wheat's canonical schema.
|
|
14
|
+
* Handles legacy fields: tier -> evidence, text -> content.
|
|
15
|
+
* Fills in missing required fields with sensible defaults.
|
|
16
|
+
*/
|
|
17
|
+
function normalizeClaim(claim) {
|
|
18
|
+
const normalized = { ...claim };
|
|
19
|
+
|
|
20
|
+
// Map legacy field names to wheat-canonical names
|
|
21
|
+
if ('tier' in normalized && !('evidence' in normalized)) {
|
|
22
|
+
normalized.evidence = normalized.tier;
|
|
23
|
+
}
|
|
24
|
+
delete normalized.tier;
|
|
25
|
+
|
|
26
|
+
if ('text' in normalized && !('content' in normalized)) {
|
|
27
|
+
normalized.content = normalized.text;
|
|
28
|
+
}
|
|
29
|
+
delete normalized.text;
|
|
30
|
+
|
|
31
|
+
// Ensure all required wheat fields exist
|
|
32
|
+
normalized.id = normalized.id || null;
|
|
33
|
+
normalized.type = normalized.type || 'factual';
|
|
34
|
+
normalized.topic = normalized.topic || '';
|
|
35
|
+
normalized.content = normalized.content || '';
|
|
36
|
+
normalized.evidence = normalized.evidence || 'stated';
|
|
37
|
+
normalized.status = normalized.status || 'active';
|
|
38
|
+
normalized.phase_added = normalized.phase_added || 'import';
|
|
39
|
+
normalized.timestamp = normalized.timestamp || new Date().toISOString();
|
|
40
|
+
normalized.conflicts_with = normalized.conflicts_with || [];
|
|
41
|
+
normalized.resolved_by = normalized.resolved_by || null;
|
|
42
|
+
normalized.tags = normalized.tags || [];
|
|
43
|
+
|
|
44
|
+
// Normalize source to object form
|
|
45
|
+
if (!normalized.source || typeof normalized.source === 'string') {
|
|
46
|
+
normalized.source = {
|
|
47
|
+
origin: typeof normalized.source === 'string' ? normalized.source : 'silo-import',
|
|
48
|
+
artifact: null,
|
|
49
|
+
connector: null,
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
return normalized;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
class ImportExport {
|
|
57
|
+
constructor(store) {
|
|
58
|
+
this.store = store || new Store();
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
/**
|
|
62
|
+
* Pull claims from a silo collection into a target claims file.
|
|
63
|
+
* New claims get re-prefixed to avoid ID collisions.
|
|
64
|
+
*
|
|
65
|
+
* @param {string} source - Collection name/id or built-in pack name
|
|
66
|
+
* @param {string} targetPath - Path to claims.json to merge into
|
|
67
|
+
* @param {object} opts
|
|
68
|
+
* @param {string} opts.prefix - Claim ID prefix for imported claims (default: 'imp')
|
|
69
|
+
* @param {string[]} opts.types - Only import these claim types
|
|
70
|
+
* @param {string[]} opts.ids - Only import claims with these original IDs
|
|
71
|
+
* @param {boolean} opts.dryRun - If true, return what would be imported without writing
|
|
72
|
+
*/
|
|
73
|
+
pull(source, targetPath, opts = {}) {
|
|
74
|
+
const { prefix = 'imp', types, ids, dryRun = false } = opts;
|
|
75
|
+
|
|
76
|
+
// Resolve source: try silo store first, then built-in packs
|
|
77
|
+
let sourceClaims = this._resolveSource(source);
|
|
78
|
+
if (!sourceClaims) {
|
|
79
|
+
throw new Error(`Collection or pack "${source}" not found`);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// Normalize all claims to wheat schema
|
|
83
|
+
sourceClaims = sourceClaims.map(normalizeClaim);
|
|
84
|
+
|
|
85
|
+
// Filter by specific claim IDs if requested
|
|
86
|
+
if (ids && ids.length > 0) {
|
|
87
|
+
const idSet = new Set(ids);
|
|
88
|
+
sourceClaims = sourceClaims.filter((c) => idSet.has(c.id));
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// Filter by type if requested
|
|
92
|
+
if (types && types.length > 0) {
|
|
93
|
+
sourceClaims = sourceClaims.filter((c) => types.includes(c.type));
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
// Re-prefix claim IDs
|
|
97
|
+
const imported = sourceClaims.map((claim, i) => ({
|
|
98
|
+
...claim,
|
|
99
|
+
id: `${prefix}${String(i + 1).padStart(3, '0')}`,
|
|
100
|
+
importedFrom: source,
|
|
101
|
+
importedAt: new Date().toISOString(),
|
|
102
|
+
}));
|
|
103
|
+
|
|
104
|
+
if (dryRun) {
|
|
105
|
+
return { wouldImport: imported.length, claims: imported };
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
// Read existing target or create empty
|
|
109
|
+
let existing = [];
|
|
110
|
+
if (fs.existsSync(targetPath)) {
|
|
111
|
+
const raw = JSON.parse(fs.readFileSync(targetPath, 'utf-8'));
|
|
112
|
+
existing = Array.isArray(raw) ? raw : raw.claims || [];
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
// Deduplicate by content (wheat-canonical field)
|
|
116
|
+
const existingTexts = new Set(
|
|
117
|
+
existing.map((c) => (c.content || c.text || '').toLowerCase()),
|
|
118
|
+
);
|
|
119
|
+
const deduped = imported.filter(
|
|
120
|
+
(c) => !existingTexts.has((c.content || '').toLowerCase()),
|
|
121
|
+
);
|
|
122
|
+
|
|
123
|
+
const merged = [...existing, ...deduped];
|
|
124
|
+
const output = Array.isArray(JSON.parse(fs.readFileSync(targetPath, 'utf-8') || '[]'))
|
|
125
|
+
? merged
|
|
126
|
+
: { claims: merged };
|
|
127
|
+
|
|
128
|
+
const tmp = targetPath + '.tmp.' + process.pid;
|
|
129
|
+
fs.writeFileSync(tmp, JSON.stringify(output, null, 2) + '\n', 'utf-8');
|
|
130
|
+
fs.renameSync(tmp, targetPath);
|
|
131
|
+
|
|
132
|
+
return {
|
|
133
|
+
imported: deduped.length,
|
|
134
|
+
skippedDuplicates: imported.length - deduped.length,
|
|
135
|
+
totalClaims: merged.length,
|
|
136
|
+
};
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
/**
|
|
140
|
+
* Export claims from a sprint's claims.json into the silo.
|
|
141
|
+
*
|
|
142
|
+
* @param {string} sourcePath - Path to claims.json
|
|
143
|
+
* @param {string} name - Name for the stored collection
|
|
144
|
+
* @param {object} meta - Additional metadata
|
|
145
|
+
*/
|
|
146
|
+
push(sourcePath, name, meta = {}) {
|
|
147
|
+
if (!fs.existsSync(sourcePath)) {
|
|
148
|
+
throw new Error(`Claims file not found: ${sourcePath}`);
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
const raw = JSON.parse(fs.readFileSync(sourcePath, 'utf-8'));
|
|
152
|
+
const claims = Array.isArray(raw) ? raw : raw.claims || [];
|
|
153
|
+
|
|
154
|
+
return this.store.storeClaims(name, claims, meta);
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/** Resolve a source name to an array of claims. */
|
|
158
|
+
_resolveSource(source) {
|
|
159
|
+
// Try silo store
|
|
160
|
+
const stored = this.store.getClaims(source);
|
|
161
|
+
if (stored) return stored.claims;
|
|
162
|
+
|
|
163
|
+
// Try built-in packs
|
|
164
|
+
const packPath = path.join(__dirname, '..', 'packs', `${source}.json`);
|
|
165
|
+
if (fs.existsSync(packPath)) {
|
|
166
|
+
const pack = JSON.parse(fs.readFileSync(packPath, 'utf-8'));
|
|
167
|
+
return pack.claims || [];
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
return null;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
module.exports = { ImportExport, normalizeClaim };
|
package/lib/index.js
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* index.js — Unified entry point for @grainulation/silo
|
|
3
|
+
*
|
|
4
|
+
* Re-exports all public modules so consumers can import from a single path.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
const { readFileSync } = require('node:fs');
|
|
8
|
+
const { join } = require('node:path');
|
|
9
|
+
const { Store, DEFAULT_SILO_DIR } = require('./store.js');
|
|
10
|
+
const { Search } = require('./search.js');
|
|
11
|
+
const { Packs } = require('./packs.js');
|
|
12
|
+
const { ImportExport } = require('./import-export.js');
|
|
13
|
+
const { Templates } = require('./templates.js');
|
|
14
|
+
|
|
15
|
+
const pkg = JSON.parse(readFileSync(join(__dirname, '..', 'package.json'), 'utf-8'));
|
|
16
|
+
|
|
17
|
+
module.exports = {
|
|
18
|
+
name: 'silo',
|
|
19
|
+
version: pkg.version,
|
|
20
|
+
description: pkg.description,
|
|
21
|
+
|
|
22
|
+
Store,
|
|
23
|
+
DEFAULT_SILO_DIR,
|
|
24
|
+
Search,
|
|
25
|
+
Packs,
|
|
26
|
+
ImportExport,
|
|
27
|
+
Templates,
|
|
28
|
+
};
|
package/lib/packs.js
ADDED
|
@@ -0,0 +1,184 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* packs.js — Knowledge pack bundling
|
|
3
|
+
*
|
|
4
|
+
* Packs are versioned bundles of claims + templates that teams can
|
|
5
|
+
* publish and subscribe to. A pack is a directory or JSON file with
|
|
6
|
+
* a manifest, claims, and optional templates.
|
|
7
|
+
*/
|
|
8
|
+
|
|
9
|
+
const fs = require('node:fs');
|
|
10
|
+
const path = require('node:path');
|
|
11
|
+
const crypto = require('node:crypto');
|
|
12
|
+
const { Store } = require('./store.js');
|
|
13
|
+
|
|
14
|
+
const BUILT_IN_PACKS_DIR = path.join(__dirname, '..', 'packs');
|
|
15
|
+
|
|
16
|
+
class Packs {
|
|
17
|
+
constructor(store) {
|
|
18
|
+
this.store = store || new Store();
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
/** List all available packs (built-in + locally installed). */
|
|
22
|
+
list() {
|
|
23
|
+
const packs = [];
|
|
24
|
+
|
|
25
|
+
// Built-in packs
|
|
26
|
+
if (fs.existsSync(BUILT_IN_PACKS_DIR)) {
|
|
27
|
+
for (const file of fs.readdirSync(BUILT_IN_PACKS_DIR)) {
|
|
28
|
+
if (!file.endsWith('.json')) continue;
|
|
29
|
+
try {
|
|
30
|
+
const data = JSON.parse(
|
|
31
|
+
fs.readFileSync(path.join(BUILT_IN_PACKS_DIR, file), 'utf-8'),
|
|
32
|
+
);
|
|
33
|
+
packs.push({
|
|
34
|
+
id: file.replace('.json', ''),
|
|
35
|
+
name: data.name,
|
|
36
|
+
description: data.description,
|
|
37
|
+
claimCount: (data.claims || []).length,
|
|
38
|
+
version: data.version || '1.0.0',
|
|
39
|
+
source: 'built-in',
|
|
40
|
+
});
|
|
41
|
+
} catch {
|
|
42
|
+
// skip malformed
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// Locally installed packs
|
|
48
|
+
const localDir = this.store.packsDir;
|
|
49
|
+
if (fs.existsSync(localDir)) {
|
|
50
|
+
for (const file of fs.readdirSync(localDir)) {
|
|
51
|
+
if (!file.endsWith('.json')) continue;
|
|
52
|
+
try {
|
|
53
|
+
const data = JSON.parse(
|
|
54
|
+
fs.readFileSync(path.join(localDir, file), 'utf-8'),
|
|
55
|
+
);
|
|
56
|
+
packs.push({
|
|
57
|
+
id: file.replace('.json', ''),
|
|
58
|
+
name: data.name,
|
|
59
|
+
description: data.description,
|
|
60
|
+
claimCount: (data.claims || []).length,
|
|
61
|
+
version: data.version || '1.0.0',
|
|
62
|
+
source: 'local',
|
|
63
|
+
});
|
|
64
|
+
} catch {
|
|
65
|
+
// skip malformed
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return packs;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
/** Get a pack by ID (checks built-in first, then local). */
|
|
74
|
+
get(id) {
|
|
75
|
+
const builtIn = path.join(BUILT_IN_PACKS_DIR, `${id}.json`);
|
|
76
|
+
if (fs.existsSync(builtIn)) {
|
|
77
|
+
return JSON.parse(fs.readFileSync(builtIn, 'utf-8'));
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
const local = path.join(this.store.packsDir, `${id}.json`);
|
|
81
|
+
if (fs.existsSync(local)) {
|
|
82
|
+
return JSON.parse(fs.readFileSync(local, 'utf-8'));
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return null;
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
/**
|
|
89
|
+
* Bundle claims from the silo into a publishable pack.
|
|
90
|
+
*
|
|
91
|
+
* @param {string} name - Pack name
|
|
92
|
+
* @param {string[]} collectionIds - Claim collection IDs to include
|
|
93
|
+
* @param {object} meta - Pack metadata (description, version, author)
|
|
94
|
+
*/
|
|
95
|
+
bundle(name, collectionIds, meta = {}) {
|
|
96
|
+
this.store.init();
|
|
97
|
+
const allClaims = [];
|
|
98
|
+
|
|
99
|
+
for (const id of collectionIds) {
|
|
100
|
+
const data = this.store.getClaims(id);
|
|
101
|
+
if (data) {
|
|
102
|
+
allClaims.push(...(data.claims || []));
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
const pack = {
|
|
107
|
+
name,
|
|
108
|
+
description: meta.description || '',
|
|
109
|
+
version: meta.version || '1.0.0',
|
|
110
|
+
author: meta.author || '',
|
|
111
|
+
createdAt: new Date().toISOString(),
|
|
112
|
+
hash: crypto.createHash('sha256').update(JSON.stringify(allClaims)).digest('hex'),
|
|
113
|
+
sources: collectionIds,
|
|
114
|
+
claims: allClaims,
|
|
115
|
+
};
|
|
116
|
+
|
|
117
|
+
const slug = name.toLowerCase().replace(/[^a-z0-9]+/g, '-').replace(/^-|-$/g, '');
|
|
118
|
+
const packPath = path.join(this.store.packsDir, `${slug}.json`);
|
|
119
|
+
const tmp = packPath + '.tmp.' + process.pid;
|
|
120
|
+
fs.writeFileSync(tmp, JSON.stringify(pack, null, 2) + '\n', 'utf-8');
|
|
121
|
+
fs.renameSync(tmp, packPath);
|
|
122
|
+
|
|
123
|
+
return { id: slug, path: packPath, claimCount: allClaims.length };
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
/**
|
|
127
|
+
* Install a pack from a file path into the local silo.
|
|
128
|
+
*
|
|
129
|
+
* @param {string} filePath - Path to the pack JSON file
|
|
130
|
+
*/
|
|
131
|
+
install(filePath, options = {}) {
|
|
132
|
+
if (!fs.existsSync(filePath)) {
|
|
133
|
+
throw new Error(`Pack file not found: ${filePath}`);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
const pack = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
|
137
|
+
const slug = (pack.name || path.basename(filePath, '.json'))
|
|
138
|
+
.toLowerCase()
|
|
139
|
+
.replace(/[^a-z0-9]+/g, '-')
|
|
140
|
+
.replace(/^-|-$/g, '');
|
|
141
|
+
|
|
142
|
+
// Verify pack integrity if hash present
|
|
143
|
+
if (pack.hash && pack.claims) {
|
|
144
|
+
const actual = crypto.createHash('sha256').update(JSON.stringify(pack.claims)).digest('hex');
|
|
145
|
+
// Support both old 12-char and new 64-char hashes
|
|
146
|
+
if (pack.hash !== actual && !actual.startsWith(pack.hash)) {
|
|
147
|
+
if (!options.force) {
|
|
148
|
+
throw new Error(`Pack integrity check failed: hash mismatch. Use --force to install anyway.`);
|
|
149
|
+
}
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
this.store.init();
|
|
154
|
+
const dest = path.join(this.store.packsDir, `${slug}.json`);
|
|
155
|
+
|
|
156
|
+
// Version comparison if pack already exists
|
|
157
|
+
if (fs.existsSync(dest) && !options.force) {
|
|
158
|
+
const existing = JSON.parse(fs.readFileSync(dest, 'utf-8'));
|
|
159
|
+
const cmp = _compareSemver(pack.version || '0.0.0', existing.version || '0.0.0');
|
|
160
|
+
if (cmp === 0) {
|
|
161
|
+
return { id: slug, claimCount: (pack.claims || []).length, skipped: true, reason: 'same version' };
|
|
162
|
+
}
|
|
163
|
+
if (cmp < 0) {
|
|
164
|
+
return { id: slug, claimCount: (pack.claims || []).length, skipped: true, reason: `downgrade (${existing.version} → ${pack.version}). Use --force to override.` };
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
fs.copyFileSync(filePath, dest);
|
|
169
|
+
return { id: slug, claimCount: (pack.claims || []).length };
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
/** Compare two semver strings. Returns -1, 0, or 1. */
|
|
174
|
+
function _compareSemver(a, b) {
|
|
175
|
+
const pa = (a || '0.0.0').split('.').map(Number);
|
|
176
|
+
const pb = (b || '0.0.0').split('.').map(Number);
|
|
177
|
+
for (let i = 0; i < 3; i++) {
|
|
178
|
+
if ((pa[i] || 0) > (pb[i] || 0)) return 1;
|
|
179
|
+
if ((pa[i] || 0) < (pb[i] || 0)) return -1;
|
|
180
|
+
}
|
|
181
|
+
return 0;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
module.exports = { Packs, _compareSemver };
|
package/lib/search.js
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* search.js — Full-text search across stored claims
|
|
3
|
+
*
|
|
4
|
+
* Simple but effective: tokenize query, scan all claim files,
|
|
5
|
+
* rank by term frequency. No external deps.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
const fs = require('node:fs');
|
|
9
|
+
const path = require('node:path');
|
|
10
|
+
const { Store } = require('./store.js');
|
|
11
|
+
|
|
12
|
+
class Search {
|
|
13
|
+
constructor(store) {
|
|
14
|
+
this.store = store || new Store();
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
/**
|
|
18
|
+
* Search across all stored claims.
|
|
19
|
+
* Returns matches sorted by relevance (highest first).
|
|
20
|
+
*
|
|
21
|
+
* @param {string} query - Search terms (space-separated, OR logic)
|
|
22
|
+
* @param {object} opts
|
|
23
|
+
* @param {string} opts.type - Filter by claim type (constraint, risk, etc.)
|
|
24
|
+
* @param {string} opts.evidence - Filter by evidence tier (also accepts legacy 'tier')
|
|
25
|
+
* @param {number} opts.limit - Max results (default 20)
|
|
26
|
+
*/
|
|
27
|
+
query(query, opts = {}) {
|
|
28
|
+
const { type, evidence, tier, limit = 20 } = opts;
|
|
29
|
+
const evidenceFilter = evidence || tier; // support legacy 'tier' option
|
|
30
|
+
const tokens = this._tokenize(query);
|
|
31
|
+
if (tokens.length === 0) return [];
|
|
32
|
+
|
|
33
|
+
const results = [];
|
|
34
|
+
const claimsDir = this.store.claimsDir;
|
|
35
|
+
|
|
36
|
+
if (!fs.existsSync(claimsDir)) return [];
|
|
37
|
+
|
|
38
|
+
const files = fs.readdirSync(claimsDir).filter((f) => f.endsWith('.json'));
|
|
39
|
+
|
|
40
|
+
for (const file of files) {
|
|
41
|
+
const filePath = path.join(claimsDir, file);
|
|
42
|
+
let data;
|
|
43
|
+
try {
|
|
44
|
+
data = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
|
|
45
|
+
} catch {
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const collectionName = data.meta?.name || file.replace('.json', '');
|
|
50
|
+
const claims = data.claims || [];
|
|
51
|
+
|
|
52
|
+
for (const claim of claims) {
|
|
53
|
+
// Apply filters
|
|
54
|
+
if (type && claim.type !== type) continue;
|
|
55
|
+
const claimEvidence = claim.evidence || claim.tier;
|
|
56
|
+
if (evidenceFilter && claimEvidence !== evidenceFilter) continue;
|
|
57
|
+
|
|
58
|
+
// Score by token matches across searchable fields (support both content and legacy text)
|
|
59
|
+
const sourceStr = typeof claim.source === 'object'
|
|
60
|
+
? [claim.source?.origin, claim.source?.artifact].filter(Boolean).join(' ')
|
|
61
|
+
: (claim.source || '');
|
|
62
|
+
const searchable = [
|
|
63
|
+
claim.content || claim.text || '',
|
|
64
|
+
claim.type || '',
|
|
65
|
+
claim.tags?.join(' ') || '',
|
|
66
|
+
sourceStr,
|
|
67
|
+
collectionName,
|
|
68
|
+
]
|
|
69
|
+
.join(' ')
|
|
70
|
+
.toLowerCase();
|
|
71
|
+
|
|
72
|
+
let score = 0;
|
|
73
|
+
for (const token of tokens) {
|
|
74
|
+
const idx = searchable.indexOf(token);
|
|
75
|
+
if (idx !== -1) {
|
|
76
|
+
score += 1;
|
|
77
|
+
// Bonus for exact word match
|
|
78
|
+
if (searchable.includes(` ${token} `) || searchable.startsWith(`${token} `)) {
|
|
79
|
+
score += 0.5;
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (score > 0) {
|
|
85
|
+
results.push({
|
|
86
|
+
claim,
|
|
87
|
+
collection: collectionName,
|
|
88
|
+
score,
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
results.sort((a, b) => b.score - a.score);
|
|
95
|
+
return results.slice(0, limit);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
/** List all unique tags across stored claims. */
|
|
99
|
+
tags() {
|
|
100
|
+
const tagSet = new Set();
|
|
101
|
+
const claimsDir = this.store.claimsDir;
|
|
102
|
+
if (!fs.existsSync(claimsDir)) return [];
|
|
103
|
+
|
|
104
|
+
const files = fs.readdirSync(claimsDir).filter((f) => f.endsWith('.json'));
|
|
105
|
+
for (const file of files) {
|
|
106
|
+
try {
|
|
107
|
+
const data = JSON.parse(fs.readFileSync(path.join(claimsDir, file), 'utf-8'));
|
|
108
|
+
for (const claim of data.claims || []) {
|
|
109
|
+
for (const tag of claim.tags || []) {
|
|
110
|
+
tagSet.add(tag);
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
} catch {
|
|
114
|
+
// skip malformed files
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
return [...tagSet].sort();
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
_tokenize(str) {
|
|
121
|
+
return str
|
|
122
|
+
.toLowerCase()
|
|
123
|
+
.split(/\s+/)
|
|
124
|
+
.filter((t) => t.length > 1);
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
module.exports = { Search };
|