@cccarv82/freya 2.3.13 → 2.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.agent/rules/freya/agents/coach.mdc +7 -16
- package/.agent/rules/freya/agents/ingestor.mdc +1 -89
- package/.agent/rules/freya/agents/master.mdc +3 -0
- package/.agent/rules/freya/agents/oracle.mdc +7 -23
- package/cli/web-ui.css +860 -182
- package/cli/web-ui.js +547 -175
- package/cli/web.js +690 -536
- package/package.json +6 -3
- package/scripts/build-vector-index.js +85 -0
- package/scripts/export-obsidian.js +6 -16
- package/scripts/generate-blockers-report.js +5 -17
- package/scripts/generate-daily-summary.js +25 -58
- package/scripts/generate-executive-report.js +22 -204
- package/scripts/generate-sm-weekly-report.js +27 -92
- package/scripts/lib/DataLayer.js +92 -0
- package/scripts/lib/DataManager.js +198 -0
- package/scripts/lib/Embedder.js +59 -0
- package/scripts/lib/schema.js +23 -0
- package/scripts/migrate-v1-v2.js +184 -0
- package/scripts/validate-data.js +48 -51
- package/scripts/validate-structure.js +12 -58
- package/templates/base/scripts/build-vector-index.js +85 -0
- package/templates/base/scripts/export-obsidian.js +143 -0
- package/templates/base/scripts/generate-daily-summary.js +25 -58
- package/templates/base/scripts/generate-executive-report.js +14 -225
- package/templates/base/scripts/generate-sm-weekly-report.js +9 -91
- package/templates/base/scripts/index/build-index.js +13 -0
- package/templates/base/scripts/index/update-index.js +15 -0
- package/templates/base/scripts/lib/DataLayer.js +92 -0
- package/templates/base/scripts/lib/DataManager.js +198 -0
- package/templates/base/scripts/lib/Embedder.js +59 -0
- package/templates/base/scripts/lib/index-utils.js +407 -0
- package/templates/base/scripts/lib/schema.js +23 -0
- package/templates/base/scripts/lib/search-utils.js +183 -0
- package/templates/base/scripts/migrate-v1-v2.js +184 -0
- package/templates/base/scripts/validate-data.js +48 -51
- package/templates/base/scripts/validate-structure.js +10 -32
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* DataManager.js (V2 - SQLite Powered)
|
|
3
|
+
* Centralized data access layer for F.R.E.Y.A. reports and scripts.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
const fs = require('fs');
|
|
7
|
+
const path = require('path');
|
|
8
|
+
const { isWithinRange, formatDate } = require('./date-utils');
|
|
9
|
+
const SCHEMA = require('./schema');
|
|
10
|
+
const { defaultInstance: dl } = require('./DataLayer');
|
|
11
|
+
const { defaultEmbedder } = require('./Embedder');
|
|
12
|
+
|
|
13
|
+
class DataManager {
|
|
14
|
+
constructor(dataDir, logsDir) {
|
|
15
|
+
// Keeping args for backwards compatibility, though SQLite handles the storage now
|
|
16
|
+
this.dataDir = dataDir;
|
|
17
|
+
this.logsDir = logsDir;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
// --- Helpers (Deprecated but kept for backwards compat if needed elsewhere) ---
|
|
21
|
+
readJsonOrQuarantine(filePath, fallback) {
|
|
22
|
+
return fallback;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
// --- Tasks ---
|
|
26
|
+
getTasksRaw() {
|
|
27
|
+
return dl.db.prepare('SELECT * FROM tasks').all().map(t => {
|
|
28
|
+
const meta = t.metadata ? JSON.parse(t.metadata) : {};
|
|
29
|
+
return {
|
|
30
|
+
id: t.id,
|
|
31
|
+
projectSlug: t.project_slug,
|
|
32
|
+
description: t.description,
|
|
33
|
+
category: t.category,
|
|
34
|
+
status: t.status,
|
|
35
|
+
createdAt: t.created_at,
|
|
36
|
+
completedAt: t.completed_at,
|
|
37
|
+
priority: meta.priority,
|
|
38
|
+
streamSlug: meta.streamSlug
|
|
39
|
+
};
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
getTasks(start, end) {
|
|
44
|
+
// SQLite date func: use ISO format
|
|
45
|
+
const toIso = (d) => d ? (d instanceof Date ? d.toISOString() : new Date(d).toISOString()) : null;
|
|
46
|
+
const startIso = toIso(start);
|
|
47
|
+
const endIso = toIso(end);
|
|
48
|
+
|
|
49
|
+
const tasks = this.getTasksRaw();
|
|
50
|
+
const completed = dl.db.prepare(`
|
|
51
|
+
SELECT * FROM tasks
|
|
52
|
+
WHERE status = 'COMPLETED'
|
|
53
|
+
AND completed_at >= ? AND completed_at <= ?
|
|
54
|
+
`).all(startIso, endIso).map(t => ({ ...t, completedAt: t.completed_at, createdAt: t.created_at }));
|
|
55
|
+
|
|
56
|
+
const pending = dl.db.prepare(`
|
|
57
|
+
SELECT * FROM tasks
|
|
58
|
+
WHERE status = 'PENDING' AND category = 'DO_NOW'
|
|
59
|
+
`).all().map(t => ({ ...t, createdAt: t.created_at }));
|
|
60
|
+
|
|
61
|
+
return { tasks, completed, pending };
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
// --- Blockers ---
|
|
65
|
+
getBlockersRaw() {
|
|
66
|
+
return dl.db.prepare('SELECT * FROM blockers').all().map(b => {
|
|
67
|
+
const meta = b.metadata ? JSON.parse(b.metadata) : {};
|
|
68
|
+
return {
|
|
69
|
+
id: b.id,
|
|
70
|
+
projectSlug: b.project_slug,
|
|
71
|
+
title: b.title,
|
|
72
|
+
severity: b.severity,
|
|
73
|
+
status: b.status,
|
|
74
|
+
owner: b.owner,
|
|
75
|
+
nextAction: b.next_action,
|
|
76
|
+
createdAt: b.created_at,
|
|
77
|
+
resolvedAt: b.resolved_at,
|
|
78
|
+
streamSlug: meta.streamSlug
|
|
79
|
+
};
|
|
80
|
+
});
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
getBlockers(start, end) {
|
|
84
|
+
const toIso = (d) => d ? (d instanceof Date ? d.toISOString() : new Date(d).toISOString()) : null;
|
|
85
|
+
const startIso = toIso(start);
|
|
86
|
+
const endIso = toIso(end);
|
|
87
|
+
|
|
88
|
+
const blockers = this.getBlockersRaw();
|
|
89
|
+
|
|
90
|
+
const open = dl.db.prepare(`
|
|
91
|
+
SELECT * FROM blockers
|
|
92
|
+
WHERE status NOT IN ('RESOLVED', 'CLOSED', 'DONE', 'FIXED')
|
|
93
|
+
AND resolved_at IS NULL
|
|
94
|
+
ORDER BY
|
|
95
|
+
CASE severity
|
|
96
|
+
WHEN 'CRITICAL' THEN 0
|
|
97
|
+
WHEN 'HIGH' THEN 1
|
|
98
|
+
WHEN 'MEDIUM' THEN 2
|
|
99
|
+
WHEN 'LOW' THEN 3
|
|
100
|
+
ELSE 99
|
|
101
|
+
END ASC,
|
|
102
|
+
created_at ASC
|
|
103
|
+
`).all().map(b => ({ ...b, projectSlug: b.project_slug, createdAt: b.created_at }));
|
|
104
|
+
|
|
105
|
+
const openedRecent = dl.db.prepare(`
|
|
106
|
+
SELECT * FROM blockers
|
|
107
|
+
WHERE created_at >= ? AND created_at <= ?
|
|
108
|
+
`).all(startIso, endIso).map(b => ({ ...b, projectSlug: b.project_slug, createdAt: b.created_at }));
|
|
109
|
+
|
|
110
|
+
const resolvedRecent = dl.db.prepare(`
|
|
111
|
+
SELECT * FROM blockers
|
|
112
|
+
WHERE resolved_at >= ? AND resolved_at <= ?
|
|
113
|
+
`).all(startIso, endIso).map(b => ({ ...b, projectSlug: b.project_slug, resolvedAt: b.resolved_at }));
|
|
114
|
+
|
|
115
|
+
return { blockers, open, openedRecent, resolvedRecent };
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// --- Project Updates ---
|
|
119
|
+
getProjectUpdates(start, end) {
|
|
120
|
+
const toIso = (d) => d ? (d instanceof Date ? d.toISOString() : new Date(d).toISOString()) : null;
|
|
121
|
+
const startIso = toIso(start);
|
|
122
|
+
const endIso = toIso(end);
|
|
123
|
+
|
|
124
|
+
const updates = [];
|
|
125
|
+
|
|
126
|
+
// Find active projects with recent history
|
|
127
|
+
const activeProjects = dl.db.prepare(`SELECT * FROM projects WHERE is_active = 1`).all();
|
|
128
|
+
|
|
129
|
+
for (const proj of activeProjects) {
|
|
130
|
+
const recentHistory = dl.db.prepare(`
|
|
131
|
+
SELECT status_text, date FROM project_status_history
|
|
132
|
+
WHERE project_id = ? AND date >= ? AND date <= ?
|
|
133
|
+
ORDER BY date DESC
|
|
134
|
+
`).all(proj.id, startIso, endIso).map(h => ({
|
|
135
|
+
content: h.status_text,
|
|
136
|
+
date: h.date
|
|
137
|
+
}));
|
|
138
|
+
|
|
139
|
+
// We determine currentStatus as the latest from history, or a placeholder if none
|
|
140
|
+
const latest = dl.db.prepare(`
|
|
141
|
+
SELECT status_text FROM project_status_history
|
|
142
|
+
WHERE project_id = ? ORDER BY date DESC LIMIT 1
|
|
143
|
+
`).get(proj.id);
|
|
144
|
+
|
|
145
|
+
const currentStatus = latest ? latest.status_text : "Initialized";
|
|
146
|
+
|
|
147
|
+
if (recentHistory.length > 0) {
|
|
148
|
+
updates.push({
|
|
149
|
+
client: proj.client,
|
|
150
|
+
project: proj.name,
|
|
151
|
+
slug: proj.slug,
|
|
152
|
+
currentStatus: currentStatus,
|
|
153
|
+
events: recentHistory
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
return updates;
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
// --- Daily Logs ---
|
|
162
|
+
getDailyLogs(start, end) {
|
|
163
|
+
function toIso(d) { return (d instanceof Date ? d : new Date(d)).toISOString().slice(0, 10); }
|
|
164
|
+
const startIso = formatDate ? formatDate(start) : toIso(start);
|
|
165
|
+
const endIso = formatDate ? formatDate(end) : toIso(end);
|
|
166
|
+
|
|
167
|
+
return dl.db.prepare(`
|
|
168
|
+
SELECT date, raw_markdown as content FROM daily_logs
|
|
169
|
+
WHERE date >= ? AND date <= ?
|
|
170
|
+
ORDER BY date ASC
|
|
171
|
+
`).all(startIso, endIso);
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
// --- RAG (Vector Search) ---
|
|
175
|
+
async semanticSearch(query, topK = 10) {
|
|
176
|
+
const queryVector = await defaultEmbedder.embedText(query);
|
|
177
|
+
|
|
178
|
+
// Fetch all stored embeddings. For a local personal tool with < 100k chunks, in-memory cosine sim is perfectly fast.
|
|
179
|
+
const rows = dl.db.prepare(`
|
|
180
|
+
SELECT reference_type, reference_id, chunk_index, text_chunk, embedding
|
|
181
|
+
FROM document_embeddings
|
|
182
|
+
`).all();
|
|
183
|
+
|
|
184
|
+
const scored = [];
|
|
185
|
+
for (const row of rows) {
|
|
186
|
+
const chunkVector = defaultEmbedder.bufferToVector(row.embedding);
|
|
187
|
+
const score = defaultEmbedder.cosineSimilarity(queryVector, chunkVector);
|
|
188
|
+
scored.push({ ...row, score });
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
// Sort pure descending by similarity score
|
|
192
|
+
scored.sort((a, b) => b.score - a.score);
|
|
193
|
+
|
|
194
|
+
return scored.slice(0, topK);
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
module.exports = DataManager;
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
const { pipeline } = require('@xenova/transformers');
|
|
2
|
+
|
|
3
|
+
class Embedder {
|
|
4
|
+
constructor() {
|
|
5
|
+
this.modelName = 'Xenova/all-MiniLM-L6-v2';
|
|
6
|
+
this.extractorInfo = null;
|
|
7
|
+
this.initPromise = null;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
async init() {
|
|
11
|
+
if (this.extractorInfo) return;
|
|
12
|
+
if (!this.initPromise) {
|
|
13
|
+
this.initPromise = pipeline('feature-extraction', this.modelName, { quantized: true })
|
|
14
|
+
.then((ex) => {
|
|
15
|
+
this.extractorInfo = ex;
|
|
16
|
+
})
|
|
17
|
+
.catch((err) => {
|
|
18
|
+
this.initPromise = null;
|
|
19
|
+
throw err;
|
|
20
|
+
});
|
|
21
|
+
}
|
|
22
|
+
await this.initPromise;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
async embedText(text) {
|
|
26
|
+
await this.init();
|
|
27
|
+
const cleanText = String(text || '').trim();
|
|
28
|
+
if (!cleanText) return new Float32Array(384); // Empty zero vector for model
|
|
29
|
+
|
|
30
|
+
const output = await this.extractorInfo(cleanText, { pooling: 'mean', normalize: true });
|
|
31
|
+
return new Float32Array(output.data);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
// Prepares a Float32Array into a Node.js Buffer for SQLite BLOB storage
|
|
35
|
+
vectorToBuffer(float32Array) {
|
|
36
|
+
return Buffer.from(float32Array.buffer);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// Parses a SQLite BLOB Buffer back into a Float32Array
|
|
40
|
+
bufferToVector(buffer) {
|
|
41
|
+
return new Float32Array(buffer.buffer, buffer.byteOffset, buffer.length / Float32Array.BYTES_PER_ELEMENT);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
cosineSimilarity(vecA, vecB) {
|
|
45
|
+
let dotProduct = 0;
|
|
46
|
+
let normA = 0;
|
|
47
|
+
let normB = 0;
|
|
48
|
+
for (let i = 0; i < vecA.length; i++) {
|
|
49
|
+
dotProduct += vecA[i] * vecB[i];
|
|
50
|
+
normA += vecA[i] * vecA[i];
|
|
51
|
+
normB += vecB[i] * vecB[i];
|
|
52
|
+
}
|
|
53
|
+
if (normA === 0 || normB === 0) return 0;
|
|
54
|
+
return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
const defaultEmbedder = new Embedder();
|
|
59
|
+
module.exports = { Embedder, defaultEmbedder };
|
|
@@ -0,0 +1,407 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
const ID_PATTERNS = [
|
|
5
|
+
/\bPTI\d{4,}-\d+\b/gi,
|
|
6
|
+
/\bINC\d+\b/gi,
|
|
7
|
+
/\bCHG\d+\b/gi
|
|
8
|
+
];
|
|
9
|
+
|
|
10
|
+
const TEXT_EXTS = new Set(['.md', '.txt', '.log', '.json', '.yaml', '.yml']);
|
|
11
|
+
const TOKEN_RE = /[A-Za-z0-9_-]{3,}/g;
|
|
12
|
+
|
|
13
|
+
const DEFAULT_MAX_SIZE = 2 * 1024 * 1024;
|
|
14
|
+
const DEFAULT_TOKEN_LIMIT = 500;
|
|
15
|
+
|
|
16
|
+
function extractIdTokens(query) {
|
|
17
|
+
const tokens = new Set();
|
|
18
|
+
const q = String(query || '');
|
|
19
|
+
for (const re of ID_PATTERNS) {
|
|
20
|
+
const matches = q.match(re);
|
|
21
|
+
if (matches) {
|
|
22
|
+
for (const m of matches) tokens.add(m.toUpperCase());
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
return Array.from(tokens);
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function tokenizeQuery(query) {
|
|
29
|
+
const tokens = [];
|
|
30
|
+
const q = String(query || '');
|
|
31
|
+
const re = /[A-Za-z0-9_-]{2,}/g;
|
|
32
|
+
let m;
|
|
33
|
+
while ((m = re.exec(q)) !== null) {
|
|
34
|
+
tokens.push(m[0]);
|
|
35
|
+
}
|
|
36
|
+
return tokens;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
function listFilesRecursive(dir, files = []) {
|
|
40
|
+
let entries;
|
|
41
|
+
try {
|
|
42
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
43
|
+
} catch {
|
|
44
|
+
return files;
|
|
45
|
+
}
|
|
46
|
+
for (const ent of entries) {
|
|
47
|
+
const full = path.join(dir, ent.name);
|
|
48
|
+
if (ent.isDirectory()) {
|
|
49
|
+
listFilesRecursive(full, files);
|
|
50
|
+
} else if (ent.isFile()) {
|
|
51
|
+
const ext = path.extname(ent.name).toLowerCase();
|
|
52
|
+
if (TEXT_EXTS.has(ext)) files.push(full);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
return files;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function toDateString(ms) {
|
|
59
|
+
try {
|
|
60
|
+
const d = new Date(ms);
|
|
61
|
+
if (Number.isNaN(d.getTime())) return '';
|
|
62
|
+
const y = String(d.getFullYear());
|
|
63
|
+
const m = String(d.getMonth() + 1).padStart(2, '0');
|
|
64
|
+
const day = String(d.getDate()).padStart(2, '0');
|
|
65
|
+
return `${y}-${m}-${day}`;
|
|
66
|
+
} catch {
|
|
67
|
+
return '';
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
function inferDateFromPath(filePath, mtimeMs) {
|
|
72
|
+
const m = String(filePath).match(/\b(\d{4}-\d{2}-\d{2})\b/);
|
|
73
|
+
if (m && m[1]) return m[1];
|
|
74
|
+
return toDateString(mtimeMs);
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
function buildSnippet(text, index, length) {
|
|
78
|
+
if (index < 0) {
|
|
79
|
+
const clean = String(text || '').replace(/\s+/g, ' ').trim();
|
|
80
|
+
return clean.length > 220 ? clean.slice(0, 220) + '…' : clean;
|
|
81
|
+
}
|
|
82
|
+
const raw = String(text || '');
|
|
83
|
+
const start = Math.max(0, index - 80);
|
|
84
|
+
const end = Math.min(raw.length, index + length + 120);
|
|
85
|
+
let snippet = raw.slice(start, end).replace(/\s+/g, ' ').trim();
|
|
86
|
+
if (start > 0) snippet = '…' + snippet;
|
|
87
|
+
if (end < raw.length) snippet = snippet + '…';
|
|
88
|
+
return snippet;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
function ensureDir(dir) {
|
|
92
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
function normalizeRelPath(workspaceDir, fullPath) {
|
|
96
|
+
return path.relative(workspaceDir, fullPath).replace(/\\/g, '/');
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
function indexPathFor(workspaceDir) {
|
|
100
|
+
return path.join(workspaceDir, 'data', 'index', 'search-index.json');
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
function listTargetFiles(workspaceDir) {
|
|
104
|
+
const targetDirs = [
|
|
105
|
+
path.join(workspaceDir, 'logs', 'daily'),
|
|
106
|
+
path.join(workspaceDir, 'data', 'tasks'),
|
|
107
|
+
path.join(workspaceDir, 'data', 'Clients'),
|
|
108
|
+
path.join(workspaceDir, 'docs', 'reports')
|
|
109
|
+
];
|
|
110
|
+
|
|
111
|
+
const files = [];
|
|
112
|
+
for (const dir of targetDirs) {
|
|
113
|
+
if (!fs.existsSync(dir)) continue;
|
|
114
|
+
const list = listFilesRecursive(dir, []);
|
|
115
|
+
for (const file of list) {
|
|
116
|
+
try {
|
|
117
|
+
const st = fs.statSync(file);
|
|
118
|
+
if (!st.isFile()) continue;
|
|
119
|
+
files.push({ file, mtimeMs: st.mtimeMs });
|
|
120
|
+
} catch {
|
|
121
|
+
// ignore
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
return files;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
function extractIdMatches(text) {
|
|
129
|
+
const out = [];
|
|
130
|
+
for (const re of ID_PATTERNS) {
|
|
131
|
+
let m;
|
|
132
|
+
while ((m = re.exec(text)) !== null) {
|
|
133
|
+
out.push({
|
|
134
|
+
key: String(m[0]).toUpperCase(),
|
|
135
|
+
index: m.index,
|
|
136
|
+
length: String(m[0]).length
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
re.lastIndex = 0;
|
|
140
|
+
}
|
|
141
|
+
return out;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
function extractKeywordIndexMap(textLower, tokenLimit) {
|
|
145
|
+
const map = new Map();
|
|
146
|
+
let m;
|
|
147
|
+
while ((m = TOKEN_RE.exec(textLower)) !== null) {
|
|
148
|
+
const key = m[0];
|
|
149
|
+
if (!map.has(key)) {
|
|
150
|
+
map.set(key, m.index);
|
|
151
|
+
if (map.size >= tokenLimit) break;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
TOKEN_RE.lastIndex = 0;
|
|
155
|
+
return map;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
function addEntry(entriesMap, key, relPath, date, snippet) {
|
|
159
|
+
if (!entriesMap.has(key)) entriesMap.set(key, new Map());
|
|
160
|
+
const fileMap = entriesMap.get(key);
|
|
161
|
+
if (!fileMap.has(relPath)) {
|
|
162
|
+
fileMap.set(relPath, { path: relPath, date, snippet });
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
|
|
166
|
+
function removeFileFromEntries(entriesMap, relPath) {
|
|
167
|
+
for (const [key, fileMap] of entriesMap.entries()) {
|
|
168
|
+
if (fileMap.delete(relPath)) {
|
|
169
|
+
if (fileMap.size === 0) entriesMap.delete(key);
|
|
170
|
+
}
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
function indexSingleFile(workspaceDir, file, mtimeMs, opts, entriesMap) {
|
|
175
|
+
const maxSize = Math.max(1024, Number(opts.maxSize || DEFAULT_MAX_SIZE));
|
|
176
|
+
const tokenLimit = Math.max(50, Number(opts.tokenLimit || DEFAULT_TOKEN_LIMIT));
|
|
177
|
+
|
|
178
|
+
let st;
|
|
179
|
+
try {
|
|
180
|
+
st = fs.statSync(file);
|
|
181
|
+
} catch {
|
|
182
|
+
return;
|
|
183
|
+
}
|
|
184
|
+
if (!st.isFile() || st.size > maxSize) return;
|
|
185
|
+
|
|
186
|
+
let text;
|
|
187
|
+
try {
|
|
188
|
+
text = fs.readFileSync(file, 'utf8');
|
|
189
|
+
} catch {
|
|
190
|
+
return;
|
|
191
|
+
}
|
|
192
|
+
if (!text || text.includes('\u0000')) return;
|
|
193
|
+
|
|
194
|
+
const relPath = normalizeRelPath(workspaceDir, file);
|
|
195
|
+
const date = inferDateFromPath(relPath, mtimeMs);
|
|
196
|
+
|
|
197
|
+
const idMatches = extractIdMatches(text);
|
|
198
|
+
for (const match of idMatches) {
|
|
199
|
+
const snippet = buildSnippet(text, match.index, match.length);
|
|
200
|
+
addEntry(entriesMap, match.key, relPath, date, snippet);
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
const textLower = text.toLowerCase();
|
|
204
|
+
const tokenMap = extractKeywordIndexMap(textLower, tokenLimit);
|
|
205
|
+
for (const [token, index] of tokenMap.entries()) {
|
|
206
|
+
if (!token) continue;
|
|
207
|
+
const snippet = buildSnippet(text, index, token.length);
|
|
208
|
+
addEntry(entriesMap, token, relPath, date, snippet);
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
function entriesToMap(entries) {
|
|
213
|
+
const map = new Map();
|
|
214
|
+
if (!Array.isArray(entries)) return map;
|
|
215
|
+
for (const entry of entries) {
|
|
216
|
+
if (!entry || typeof entry.key !== 'string' || !Array.isArray(entry.files)) continue;
|
|
217
|
+
const fileMap = new Map();
|
|
218
|
+
for (const f of entry.files) {
|
|
219
|
+
if (!f || typeof f.path !== 'string') continue;
|
|
220
|
+
fileMap.set(f.path, {
|
|
221
|
+
path: f.path,
|
|
222
|
+
date: typeof f.date === 'string' ? f.date : '',
|
|
223
|
+
snippet: typeof f.snippet === 'string' ? f.snippet : ''
|
|
224
|
+
});
|
|
225
|
+
}
|
|
226
|
+
if (fileMap.size) map.set(entry.key, fileMap);
|
|
227
|
+
}
|
|
228
|
+
return map;
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
function mapToEntries(entriesMap) {
|
|
232
|
+
const out = [];
|
|
233
|
+
const keys = Array.from(entriesMap.keys()).sort();
|
|
234
|
+
for (const key of keys) {
|
|
235
|
+
const fileMap = entriesMap.get(key);
|
|
236
|
+
const files = Array.from(fileMap.values());
|
|
237
|
+
out.push({ key, files });
|
|
238
|
+
}
|
|
239
|
+
return out;
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
function readIndex(indexPath) {
|
|
243
|
+
try {
|
|
244
|
+
if (!fs.existsSync(indexPath)) return null;
|
|
245
|
+
const raw = fs.readFileSync(indexPath, 'utf8');
|
|
246
|
+
const json = JSON.parse(raw);
|
|
247
|
+
if (!json || typeof json !== 'object') return null;
|
|
248
|
+
return json;
|
|
249
|
+
} catch {
|
|
250
|
+
return null;
|
|
251
|
+
}
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
function buildIndex(workspaceDir, opts = {}) {
|
|
255
|
+
const entriesMap = new Map();
|
|
256
|
+
const files = listTargetFiles(workspaceDir);
|
|
257
|
+
|
|
258
|
+
for (const { file, mtimeMs } of files) {
|
|
259
|
+
indexSingleFile(workspaceDir, file, mtimeMs, opts, entriesMap);
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
const metaFiles = {};
|
|
263
|
+
for (const { file, mtimeMs } of files) {
|
|
264
|
+
const relPath = normalizeRelPath(workspaceDir, file);
|
|
265
|
+
metaFiles[relPath] = mtimeMs;
|
|
266
|
+
}
|
|
267
|
+
|
|
268
|
+
const index = {
|
|
269
|
+
meta: {
|
|
270
|
+
lastRun: new Date().toISOString(),
|
|
271
|
+
files: metaFiles
|
|
272
|
+
},
|
|
273
|
+
entries: mapToEntries(entriesMap)
|
|
274
|
+
};
|
|
275
|
+
|
|
276
|
+
const indexPath = indexPathFor(workspaceDir);
|
|
277
|
+
ensureDir(path.dirname(indexPath));
|
|
278
|
+
fs.writeFileSync(indexPath, JSON.stringify(index, null, 2) + '\n', 'utf8');
|
|
279
|
+
|
|
280
|
+
return { indexPath, fileCount: files.length, keyCount: index.entries.length };
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
function updateIndex(workspaceDir, opts = {}) {
|
|
284
|
+
const indexPath = indexPathFor(workspaceDir);
|
|
285
|
+
const currentFiles = listTargetFiles(workspaceDir);
|
|
286
|
+
const currentMap = new Map();
|
|
287
|
+
for (const { file, mtimeMs } of currentFiles) {
|
|
288
|
+
const relPath = normalizeRelPath(workspaceDir, file);
|
|
289
|
+
currentMap.set(relPath, { file, mtimeMs });
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
const existing = readIndex(indexPath);
|
|
293
|
+
if (!existing || !existing.meta || !existing.meta.files || !Array.isArray(existing.entries)) {
|
|
294
|
+
return buildIndex(workspaceDir, opts);
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
const entriesMap = entriesToMap(existing.entries);
|
|
298
|
+
const prevFiles = existing.meta.files || {};
|
|
299
|
+
|
|
300
|
+
const removed = new Set();
|
|
301
|
+
for (const relPath of Object.keys(prevFiles)) {
|
|
302
|
+
if (!currentMap.has(relPath)) removed.add(relPath);
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
const changed = [];
|
|
306
|
+
for (const [relPath, info] of currentMap.entries()) {
|
|
307
|
+
const prev = prevFiles[relPath];
|
|
308
|
+
if (!prev || Number(prev) !== Number(info.mtimeMs)) {
|
|
309
|
+
changed.push(info);
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
for (const relPath of removed) {
|
|
314
|
+
removeFileFromEntries(entriesMap, relPath);
|
|
315
|
+
}
|
|
316
|
+
|
|
317
|
+
for (const info of changed) {
|
|
318
|
+
const relPath = normalizeRelPath(workspaceDir, info.file);
|
|
319
|
+
removeFileFromEntries(entriesMap, relPath);
|
|
320
|
+
indexSingleFile(workspaceDir, info.file, info.mtimeMs, opts, entriesMap);
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
const metaFiles = {};
|
|
324
|
+
for (const [relPath, info] of currentMap.entries()) {
|
|
325
|
+
metaFiles[relPath] = info.mtimeMs;
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
const index = {
|
|
329
|
+
meta: {
|
|
330
|
+
lastRun: new Date().toISOString(),
|
|
331
|
+
files: metaFiles
|
|
332
|
+
},
|
|
333
|
+
entries: mapToEntries(entriesMap)
|
|
334
|
+
};
|
|
335
|
+
|
|
336
|
+
ensureDir(path.dirname(indexPath));
|
|
337
|
+
fs.writeFileSync(indexPath, JSON.stringify(index, null, 2) + '\n', 'utf8');
|
|
338
|
+
|
|
339
|
+
return { indexPath, fileCount: currentFiles.length, keyCount: index.entries.length, changed: changed.length, removed: removed.size };
|
|
340
|
+
}
|
|
341
|
+
|
|
342
|
+
function searchIndex(workspaceDir, query, opts = {}) {
|
|
343
|
+
const q = String(query || '').trim();
|
|
344
|
+
if (!q) return [];
|
|
345
|
+
const limit = Math.max(1, Math.min(20, Number(opts.limit || 8)));
|
|
346
|
+
|
|
347
|
+
const indexPath = indexPathFor(workspaceDir);
|
|
348
|
+
const index = readIndex(indexPath);
|
|
349
|
+
if (!index || !Array.isArray(index.entries)) return [];
|
|
350
|
+
|
|
351
|
+
const entriesMap = new Map();
|
|
352
|
+
for (const entry of index.entries) {
|
|
353
|
+
if (!entry || typeof entry.key !== 'string' || !Array.isArray(entry.files)) continue;
|
|
354
|
+
entriesMap.set(entry.key, entry.files);
|
|
355
|
+
}
|
|
356
|
+
|
|
357
|
+
const idTokens = extractIdTokens(q);
|
|
358
|
+
const tokens = tokenizeQuery(q).map((t) => t.toLowerCase());
|
|
359
|
+
const queryLower = q.toLowerCase();
|
|
360
|
+
|
|
361
|
+
const resultsMap = new Map();
|
|
362
|
+
|
|
363
|
+
function applyMatches(keys, weight) {
|
|
364
|
+
for (const key of keys) {
|
|
365
|
+
if (!key) continue;
|
|
366
|
+
const entryFiles = entriesMap.get(key);
|
|
367
|
+
if (!entryFiles) continue;
|
|
368
|
+
for (const f of entryFiles) {
|
|
369
|
+
if (!f || !f.path) continue;
|
|
370
|
+
const prev = resultsMap.get(f.path) || { file: f.path, date: f.date || '', score: 0, snippet: '' , weight: 0 };
|
|
371
|
+
const bonus = (key === queryLower) ? 10 : 0;
|
|
372
|
+
const nextScore = prev.score + weight + bonus;
|
|
373
|
+
const nextWeight = Math.max(prev.weight, weight);
|
|
374
|
+
const snippet = (nextWeight > prev.weight && f.snippet) ? f.snippet : (prev.snippet || f.snippet || '');
|
|
375
|
+
resultsMap.set(f.path, {
|
|
376
|
+
file: f.path,
|
|
377
|
+
date: f.date || prev.date || '',
|
|
378
|
+
score: nextScore,
|
|
379
|
+
snippet,
|
|
380
|
+
weight: nextWeight
|
|
381
|
+
});
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
applyMatches(idTokens.map((t) => t.toUpperCase()), 100);
|
|
387
|
+
applyMatches(tokens, 2);
|
|
388
|
+
|
|
389
|
+
const results = Array.from(resultsMap.values()).map((r) => {
|
|
390
|
+
const { weight, ...rest } = r;
|
|
391
|
+
return rest;
|
|
392
|
+
});
|
|
393
|
+
|
|
394
|
+
results.sort((a, b) => {
|
|
395
|
+
if (b.score !== a.score) return b.score - a.score;
|
|
396
|
+
return String(b.date || '').localeCompare(String(a.date || ''));
|
|
397
|
+
});
|
|
398
|
+
|
|
399
|
+
return results.slice(0, limit);
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
module.exports = {
|
|
403
|
+
buildIndex,
|
|
404
|
+
updateIndex,
|
|
405
|
+
searchIndex,
|
|
406
|
+
indexPathFor
|
|
407
|
+
};
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* schema.js
|
|
3
|
+
* Centralized schema constants for F.R.E.Y.A. data structures.
|
|
4
|
+
* This is the single source of truth for validation and ingestion.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
module.exports = {
|
|
8
|
+
TASK: {
|
|
9
|
+
CATEGORIES: ['DO_NOW', 'SCHEDULE', 'DELEGATE', 'IGNORE'],
|
|
10
|
+
STATUSES: ['PENDING', 'COMPLETED', 'ARCHIVED'],
|
|
11
|
+
PRIORITIES: ['LOW', 'MEDIUM', 'HIGH', 'CRITICAL']
|
|
12
|
+
},
|
|
13
|
+
BLOCKER: {
|
|
14
|
+
STATUSES: ['OPEN', 'MITIGATING', 'RESOLVED'],
|
|
15
|
+
SEVERITIES: ['LOW', 'MEDIUM', 'HIGH', 'CRITICAL']
|
|
16
|
+
},
|
|
17
|
+
CAREER: {
|
|
18
|
+
TYPES: ['Achievement', 'Feedback', 'Certification', 'Goal']
|
|
19
|
+
},
|
|
20
|
+
PROJECT: {
|
|
21
|
+
HISTORY_TYPES: ['Status', 'Decision', 'Risk', 'Achievement', 'Feedback', 'Goal', 'Blocker']
|
|
22
|
+
}
|
|
23
|
+
};
|