@cccarv82/freya 2.3.13 → 2.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/.agent/rules/freya/agents/coach.mdc +7 -16
  2. package/.agent/rules/freya/agents/ingestor.mdc +1 -89
  3. package/.agent/rules/freya/agents/master.mdc +3 -0
  4. package/.agent/rules/freya/agents/oracle.mdc +7 -23
  5. package/cli/web-ui.css +965 -182
  6. package/cli/web-ui.js +551 -173
  7. package/cli/web.js +863 -536
  8. package/package.json +7 -4
  9. package/scripts/build-vector-index.js +85 -0
  10. package/scripts/export-obsidian.js +6 -16
  11. package/scripts/generate-blockers-report.js +5 -17
  12. package/scripts/generate-daily-summary.js +25 -58
  13. package/scripts/generate-executive-report.js +22 -204
  14. package/scripts/generate-sm-weekly-report.js +27 -92
  15. package/scripts/lib/DataLayer.js +92 -0
  16. package/scripts/lib/DataManager.js +198 -0
  17. package/scripts/lib/Embedder.js +59 -0
  18. package/scripts/lib/schema.js +23 -0
  19. package/scripts/migrate-v1-v2.js +184 -0
  20. package/scripts/validate-data.js +48 -51
  21. package/scripts/validate-structure.js +12 -58
  22. package/templates/base/scripts/build-vector-index.js +85 -0
  23. package/templates/base/scripts/export-obsidian.js +143 -0
  24. package/templates/base/scripts/generate-daily-summary.js +25 -58
  25. package/templates/base/scripts/generate-executive-report.js +14 -225
  26. package/templates/base/scripts/generate-sm-weekly-report.js +9 -91
  27. package/templates/base/scripts/index/build-index.js +13 -0
  28. package/templates/base/scripts/index/update-index.js +15 -0
  29. package/templates/base/scripts/lib/DataLayer.js +92 -0
  30. package/templates/base/scripts/lib/DataManager.js +198 -0
  31. package/templates/base/scripts/lib/Embedder.js +59 -0
  32. package/templates/base/scripts/lib/index-utils.js +407 -0
  33. package/templates/base/scripts/lib/schema.js +23 -0
  34. package/templates/base/scripts/lib/search-utils.js +183 -0
  35. package/templates/base/scripts/migrate-v1-v2.js +184 -0
  36. package/templates/base/scripts/validate-data.js +48 -51
  37. package/templates/base/scripts/validate-structure.js +10 -32
@@ -0,0 +1,184 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+ const { defaultInstance: dl } = require('./lib/DataLayer');
4
+ const DataManager = require('./lib/DataManager');
5
+
6
+ const dataDir = path.join(__dirname, '..', 'data');
7
+ const logsDir = path.join(__dirname, '..', 'logs', 'daily');
8
+
9
+ console.log('--- Iniciando Migração V1 (JSON) para V2 (SQLite) ---');
10
+
11
+ function migrateTasks() {
12
+ console.log('Migrando Tasks...');
13
+ const dm = new DataManager(dataDir, logsDir);
14
+ const tasks = dm.getTasksRaw();
15
+ const stmt = dl.db.prepare(`
16
+ INSERT OR IGNORE INTO tasks
17
+ (id, project_slug, description, category, status, created_at, completed_at, metadata)
18
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
19
+ `);
20
+
21
+ let count = 0;
22
+ // Tasks has: PENDING, COMPLETED, ARCHIVED
23
+ const processList = (list) => {
24
+ for (const t of list) {
25
+ const meta = { priority: t.priority, streamSlug: t.streamSlug };
26
+ stmt.run(
27
+ t.id,
28
+ t.projectSlug || null,
29
+ t.description,
30
+ t.category,
31
+ t.status,
32
+ t.createdAt || new Date().toISOString(),
33
+ t.completedAt || null,
34
+ JSON.stringify(meta)
35
+ );
36
+ count++;
37
+ }
38
+ };
39
+
40
+ const pending = tasks.filter(t => t.status === 'PENDING');
41
+ const completed = tasks.filter(t => t.status === 'COMPLETED');
42
+ const archived = tasks.filter(t => t.status === 'ARCHIVED');
43
+
44
+ processList(pending);
45
+ processList(completed);
46
+ processList(archived);
47
+
48
+ console.log(`✅ ${count} tasks migradas.`);
49
+ }
50
+
51
+ function migrateBlockers() {
52
+ console.log('Migrando Blockers...');
53
+ const dm = new DataManager(dataDir, logsDir);
54
+ const blockers = dm.getBlockersRaw();
55
+ const stmt = dl.db.prepare(`
56
+ INSERT OR IGNORE INTO blockers
57
+ (id, project_slug, title, severity, status, owner, next_action, created_at, resolved_at, metadata)
58
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
59
+ `);
60
+
61
+ let count = 0;
62
+ const processList = (list) => {
63
+ for (const b of list) {
64
+ const meta = { streamSlug: b.streamSlug };
65
+ stmt.run(
66
+ b.id,
67
+ b.projectSlug || null,
68
+ b.title,
69
+ b.severity || 'MEDIUM',
70
+ b.status || 'OPEN',
71
+ b.owner || null,
72
+ b.nextAction || null,
73
+ b.createdAt || new Date().toISOString(),
74
+ b.resolvedAt || null,
75
+ JSON.stringify(meta)
76
+ );
77
+ count++;
78
+ }
79
+ };
80
+
81
+ const open = blockers.filter(b => b.status === 'OPEN' || b.status === 'MITIGATING');
82
+ const resolved = blockers.filter(b => b.status === 'RESOLVED' || b.status === 'CLOSED');
83
+
84
+ processList(open);
85
+ processList(resolved);
86
+
87
+ console.log(`✅ ${count} blockers migrados.`);
88
+ }
89
+
90
+ function migrateProjects() {
91
+ console.log('Migrando Projetos...');
92
+ const clientsDir = path.join(dataDir, 'Clients');
93
+ if (!fs.existsSync(clientsDir)) return;
94
+
95
+ const insertProj = dl.db.prepare(`
96
+ INSERT OR IGNORE INTO projects (slug, client, name, is_active)
97
+ VALUES (?, ?, ?, ?)
98
+ `);
99
+
100
+ const getProjId = dl.db.prepare("SELECT id FROM projects WHERE slug = ?");
101
+
102
+ const insertHist = dl.db.prepare(`
103
+ INSERT INTO project_status_history (project_id, status_text, date)
104
+ VALUES (?, ?, ?)
105
+ `);
106
+
107
+ let pCount = 0;
108
+ let hCount = 0;
109
+
110
+ for (const clientSlug of fs.readdirSync(clientsDir)) {
111
+ const clientPath = path.join(clientsDir, clientSlug);
112
+ if (!fs.statSync(clientPath).isDirectory()) continue;
113
+
114
+ for (const projectSlug of fs.readdirSync(clientPath)) {
115
+ const projectPath = path.join(clientPath, projectSlug);
116
+ if (!fs.statSync(projectPath).isDirectory()) continue;
117
+
118
+ const statusPath = path.join(projectPath, 'status.json');
119
+ if (!fs.existsSync(statusPath)) continue;
120
+
121
+ let pData;
122
+ try {
123
+ pData = JSON.parse(fs.readFileSync(statusPath, 'utf8'));
124
+ } catch (e) { continue; }
125
+
126
+ const fullSlug = `${clientSlug}-${projectSlug}`;
127
+ const isActive = pData.active !== false ? 1 : 0;
128
+
129
+ insertProj.run(fullSlug, pData.client || clientSlug, pData.project || projectSlug, isActive);
130
+ pCount++;
131
+
132
+ const pRow = getProjId.get(fullSlug);
133
+ if (pRow && pData.history && Array.isArray(pData.history)) {
134
+ for (const log of pData.history) {
135
+ let content = '';
136
+ let date = new Date().toISOString();
137
+ if (typeof log === 'string') {
138
+ content = log;
139
+ } else if (typeof log === 'object') {
140
+ content = log.content || log.summary || JSON.stringify(log);
141
+ date = log.date || date;
142
+ }
143
+ insertHist.run(pRow.id, content, date);
144
+ hCount++;
145
+ }
146
+ }
147
+ }
148
+ }
149
+
150
+ console.log(`✅ ${pCount} projetos migrados, com ${hCount} atualizações de status.`);
151
+ }
152
+
153
+ function migrateLogs() {
154
+ console.log('Migrando Logs Diários...');
155
+ if (!fs.existsSync(logsDir)) {
156
+ console.log('Nenhum diretório de log encontrado.');
157
+ return;
158
+ }
159
+ const files = fs.readdirSync(logsDir).filter(f => f.endsWith('.md'));
160
+ const stmt = dl.db.prepare(`
161
+ INSERT OR IGNORE INTO daily_logs (date, raw_markdown)
162
+ VALUES (?, ?)
163
+ `);
164
+
165
+ let count = 0;
166
+ for (const file of files) {
167
+ const date = file.replace('.md', '');
168
+ const content = fs.readFileSync(path.join(logsDir, file), 'utf8');
169
+ stmt.run(date, content);
170
+ count++;
171
+ }
172
+ console.log(`✅ ${count} logs diários migrados.`);
173
+ }
174
+
175
+ // Transaction wrapper
176
+ dl.db.transaction(() => {
177
+ migrateProjects();
178
+ migrateTasks();
179
+ migrateBlockers();
180
+ migrateLogs();
181
+ })();
182
+
183
+ console.log('--- Migração concluída com sucesso! ---');
184
+ dl.close();
@@ -2,6 +2,7 @@ const fs = require('fs');
2
2
  const path = require('path');
3
3
 
4
4
  const { safeReadJson, quarantineCorruptedFile } = require('./lib/fs-utils');
5
+ const SCHEMA = require('./lib/schema');
5
6
 
6
7
  const DATA_DIR = path.join(__dirname, '../data');
7
8
 
@@ -24,14 +25,14 @@ function validateTaskLog(json, file) {
24
25
  if (!task.status) errors.push(`Task[${index}] missing 'status'.`);
25
26
  if (!task.createdAt) errors.push(`Task[${index}] missing 'createdAt'.`);
26
27
 
27
- const validCategories = ['DO_NOW', 'SCHEDULE', 'DELEGATE', 'IGNORE'];
28
+ const validCategories = SCHEMA.TASK.CATEGORIES;
28
29
  if (task.category && !validCategories.includes(task.category)) {
29
- errors.push(`Task[${index}] invalid category '${task.category}'.`);
30
+ errors.push(`Task[${index}] invalid category '${task.category}'.`);
30
31
  }
31
32
 
32
- const validStatuses = ['PENDING', 'COMPLETED', 'ARCHIVED'];
33
+ const validStatuses = SCHEMA.TASK.STATUSES;
33
34
  if (task.status && !validStatuses.includes(task.status)) {
34
- errors.push(`Task[${index}] invalid status '${task.status}'.`);
35
+ errors.push(`Task[${index}] invalid status '${task.status}'.`);
35
36
  }
36
37
  });
37
38
 
@@ -54,9 +55,9 @@ function validateCareerLog(json, file) {
54
55
  if (!entry.type) errors.push(`Entry[${index}] missing 'type'.`);
55
56
  if (!entry.description) errors.push(`Entry[${index}] missing 'description'.`);
56
57
 
57
- const validTypes = ['Achievement', 'Feedback', 'Certification', 'Goal'];
58
+ const validTypes = SCHEMA.CAREER.TYPES;
58
59
  if (entry.type && !validTypes.includes(entry.type)) {
59
- errors.push(`Entry[${index}] invalid type '${entry.type}'.`);
60
+ errors.push(`Entry[${index}] invalid type '${entry.type}'.`);
60
61
  }
61
62
  });
62
63
 
@@ -66,19 +67,19 @@ function validateCareerLog(json, file) {
66
67
  function validateProjectStatus(json, file) {
67
68
  const errors = [];
68
69
  const requiredFields = ['client', 'project', 'active', 'currentStatus', 'lastUpdated', 'history'];
69
-
70
+
70
71
  requiredFields.forEach(field => {
71
- if (json[field] === undefined) errors.push(`Missing field '${field}'.`);
72
+ if (json[field] === undefined) errors.push(`Missing field '${field}'.`);
72
73
  });
73
74
 
74
75
  if (Array.isArray(json.history)) {
75
- json.history.forEach((item, index) => {
76
- if (!item.date) errors.push(`History[${index}] missing 'date'.`);
77
- if (!item.type) errors.push(`History[${index}] missing 'type'.`);
78
- if (!item.content) errors.push(`History[${index}] missing 'content'.`);
79
- });
76
+ json.history.forEach((item, index) => {
77
+ if (!item.date) errors.push(`History[${index}] missing 'date'.`);
78
+ if (!item.type) errors.push(`History[${index}] missing 'type'.`);
79
+ if (!item.content) errors.push(`History[${index}] missing 'content'.`);
80
+ });
80
81
  } else if (json.history !== undefined) {
81
- errors.push(`'history' must be an array.`);
82
+ errors.push(`'history' must be an array.`);
82
83
  }
83
84
 
84
85
  return errors;
@@ -94,8 +95,8 @@ function validateBlockerLog(json, file) {
94
95
  return errors;
95
96
  }
96
97
 
97
- const validStatuses = ['OPEN', 'MITIGATING', 'RESOLVED'];
98
- const validSeverities = ['LOW', 'MEDIUM', 'HIGH', 'CRITICAL'];
98
+ const validStatuses = SCHEMA.BLOCKER.STATUSES;
99
+ const validSeverities = SCHEMA.BLOCKER.SEVERITIES;
99
100
 
100
101
  json.blockers.forEach((b, i) => {
101
102
  const prefix = `Blocker[${i}]`;
@@ -148,53 +149,49 @@ function validateData() {
148
149
 
149
150
  const files = walk(DATA_DIR);
150
151
  console.log(`Found ${files.length} json files.`);
151
-
152
+
152
153
  let errorCount = 0;
153
154
 
154
155
  files.forEach(file => {
155
- const relativePath = path.relative(DATA_DIR, file);
156
- const result = safeReadJson(file);
157
- if (!result.ok) {
158
- if (result.error.type === 'parse') {
159
- quarantineCorruptedFile(file, result.error.message);
160
- console.warn(`⚠️ [${relativePath}] JSON parse failed; quarantined to _corrupted.`);
161
- } else {
162
- console.error(`❌ [${relativePath}] Read failed: ${result.error.message}`);
163
- }
164
- errorCount++;
165
- return;
156
+ const relativePath = path.relative(DATA_DIR, file);
157
+ const result = safeReadJson(file);
158
+ if (!result.ok) {
159
+ if (result.error.type === 'parse') {
160
+ quarantineCorruptedFile(file, result.error.message);
161
+ console.warn(`⚠️ [${relativePath}] JSON parse failed; quarantined to _corrupted.`);
162
+ } else {
163
+ console.error(`❌ [${relativePath}] Read failed: ${result.error.message}`);
166
164
  }
165
+ errorCount++;
166
+ return;
167
+ }
167
168
 
168
- const json = result.json;
169
+ const json = result.json;
169
170
 
170
- let fileErrors = [];
171
+ let fileErrors = [];
171
172
 
172
- // Route validation based on filename/path
173
- if (file.endsWith('task-log.json')) {
174
- fileErrors = validateTaskLog(json, relativePath);
175
- } else if (file.endsWith('career-log.json')) {
176
- fileErrors = validateCareerLog(json, relativePath);
177
- } else if (file.endsWith('status.json')) {
178
- fileErrors = validateProjectStatus(json, relativePath);
179
- } else if (file.endsWith('blocker-log.json')) {
180
- fileErrors = validateBlockerLog(json, relativePath);
181
- } else {
182
- // Optional: warn about unknown files, or ignore
183
- // console.warn(`⚠️ [${relativePath}] Unknown JSON file type. Skipping schema validation.`);
184
- }
173
+ // Route validation based on filename/path
174
+ if (file.endsWith('career-log.json')) {
175
+ fileErrors = validateCareerLog(json, relativePath);
176
+ } else if (file.endsWith('task-log.json') || file.endsWith('status.json') || file.endsWith('blocker-log.json')) {
177
+ // Obsoleted by SQLite, ignore
178
+ } else {
179
+ // Optional: warn about unknown files, or ignore
180
+ // console.warn(`⚠️ [${relativePath}] Unknown JSON file type. Skipping schema validation.`);
181
+ }
185
182
 
186
- if (fileErrors.length > 0) {
187
- console.error(`❌ [${relativePath}] Validation failed:`);
188
- fileErrors.forEach(e => console.error(` - ${e}`));
189
- errorCount++;
190
- }
183
+ if (fileErrors.length > 0) {
184
+ console.error(`❌ [${relativePath}] Validation failed:`);
185
+ fileErrors.forEach(e => console.error(` - ${e}`));
186
+ errorCount++;
187
+ }
191
188
  });
192
189
 
193
190
  if (errorCount === 0) {
194
- console.log('✅ All systems operational');
191
+ console.log('✅ All systems operational');
195
192
  } else {
196
- console.error(`❌ Validation completed with errors in ${errorCount} file(s).`);
197
- process.exit(1);
193
+ console.error(`❌ Validation completed with errors in ${errorCount} file(s).`);
194
+ process.exit(1);
198
195
  }
199
196
 
200
197
  } catch (err) {
@@ -7,6 +7,7 @@ const DATA_DIR = path.join(ROOT, 'data');
7
7
  const DOCS_DIR = path.join(ROOT, 'docs');
8
8
  const CLIENTS_DIR = path.join(DATA_DIR, 'Clients');
9
9
 
10
+ const { defaultInstance: dl } = require('./lib/DataLayer');
10
11
  const errors = [];
11
12
 
12
13
  function exists(p) {
@@ -44,78 +45,31 @@ function parseFrontmatter(text) {
44
45
  return fm;
45
46
  }
46
47
 
47
- function validateDailyLogs() {
48
- if (!exists(LOGS_DIR)) return;
49
- const files = fs.readdirSync(LOGS_DIR)
50
- .filter((f) => /^\d{4}-\d{2}-\d{2}\.md$/.test(f));
51
-
52
- for (const name of files) {
53
- const full = path.join(LOGS_DIR, name);
54
- const body = readFileSafe(full);
55
- const fm = parseFrontmatter(body);
56
- const date = name.replace(/\.md$/, '');
57
- if (!fm) {
58
- errors.push(`Daily log missing frontmatter: ${path.relative(ROOT, full)}`);
59
- continue;
60
- }
61
- const type = String(fm.Type || '').toLowerCase();
62
- const fmDate = String(fm.Date || '').trim();
63
- if (type !== 'daily') {
64
- errors.push(`Daily log frontmatter Type must be 'daily': ${path.relative(ROOT, full)}`);
65
- }
66
- if (fmDate !== date) {
67
- errors.push(`Daily log frontmatter Date must match filename (${date}): ${path.relative(ROOT, full)}`);
68
- }
69
- }
70
- }
71
-
48
+ // Daily logs are now managed by SQLite in V2.
72
49
  function collectProjectSlugs() {
73
- if (!exists(CLIENTS_DIR)) return [];
74
- const slugs = [];
75
- const files = walk(CLIENTS_DIR).filter((f) => f.endsWith('status.json'));
76
- for (const file of files) {
77
- const rel = path.relative(CLIENTS_DIR, path.dirname(file));
78
- if (!rel) continue;
79
- const slug = rel.split(path.sep).join('/').toLowerCase();
80
- slugs.push(slug);
81
- }
82
- return Array.from(new Set(slugs));
50
+ return dl.db.prepare('SELECT slug FROM projects WHERE is_active = 1').all().map(p => p.slug.toLowerCase());
83
51
  }
84
52
 
85
53
  function validateProjectStatusHistory() {
86
- if (!exists(CLIENTS_DIR)) return;
87
- const files = walk(CLIENTS_DIR).filter((f) => f.endsWith('status.json'));
88
- for (const file of files) {
89
- const raw = readFileSafe(file);
90
- if (!raw) continue;
91
- try {
92
- const json = JSON.parse(raw);
93
- if (!Array.isArray(json.history)) {
94
- errors.push(`status.json must include history array: ${path.relative(ROOT, file)}`);
95
- }
96
- } catch (e) {
97
- errors.push(`Invalid JSON in status.json: ${path.relative(ROOT, file)}`);
98
- }
54
+ const projects = dl.db.prepare('SELECT id, name FROM projects').all();
55
+ for (const p of projects) {
56
+ const historyCount = dl.db.prepare('SELECT count(*) as count FROM project_status_history WHERE project_id = ?').get(p.id).count;
57
+ // We don't strictly enforce history to exist for brand new projects, but if we did:
58
+ // if (historyCount === 0) errors.push(`Project missing status history: ${p.name}`);
99
59
  }
100
60
  }
101
61
 
102
62
  function validateTaskProjectSlugs() {
103
63
  const slugs = collectProjectSlugs();
104
- if (!slugs.length) return; // no known slugs -> skip
105
-
106
- const taskFile = path.join(DATA_DIR, 'tasks', 'task-log.json');
107
- if (!exists(taskFile)) return;
64
+ if (!slugs.length) return;
108
65
 
109
- let json;
110
- try { json = JSON.parse(readFileSafe(taskFile) || '{}'); } catch { return; }
111
- const tasks = Array.isArray(json.tasks) ? json.tasks : [];
66
+ const tasks = dl.db.prepare('SELECT id, description, project_slug FROM tasks').all();
112
67
 
113
68
  for (const task of tasks) {
114
- if (!task || typeof task !== 'object') continue;
115
69
  const desc = String(task.description || '').toLowerCase();
116
70
  if (!desc) continue;
117
71
  const mentioned = slugs.find((slug) => desc.includes(slug));
118
- if (mentioned && !task.projectSlug) {
72
+ if (mentioned && !task.project_slug) {
119
73
  errors.push(`Task missing projectSlug for mentioned project (${mentioned}): ${task.id || task.description}`);
120
74
  }
121
75
  }
@@ -135,7 +89,7 @@ function validateDocsHubs() {
135
89
  }
136
90
 
137
91
  function main() {
138
- validateDailyLogs();
92
+ // validateDailyLogs(); removed context since migrated to SQLite
139
93
  validateProjectStatusHistory();
140
94
  validateTaskProjectSlugs();
141
95
  validateDocsHubs();
@@ -0,0 +1,85 @@
1
+ const { defaultInstance: dl } = require('./lib/DataLayer');
2
+ const { defaultEmbedder } = require('./lib/Embedder');
3
+
4
+ /**
5
+ * Splits markdown text into overlapping chunks of approx maximum length
6
+ */
7
+ function chunkText(text, maxChars = 800, overlap = 150) {
8
+ if (!text) return [];
9
+ const chunks = [];
10
+ let i = 0;
11
+ while (i < text.length) {
12
+ let end = i + maxChars;
13
+ if (end < text.length) {
14
+ // Find a newline or space to break at cleanly
15
+ const nextNewline = text.lastIndexOf('\n', end);
16
+ if (nextNewline > i + overlap) {
17
+ end = nextNewline;
18
+ } else {
19
+ const nextSpace = text.lastIndexOf(' ', end);
20
+ if (nextSpace > i + overlap) end = nextSpace;
21
+ }
22
+ }
23
+ chunks.push(text.slice(i, end).trim());
24
+ i = end - overlap;
25
+ }
26
+ return chunks;
27
+ }
28
+
29
+ async function buildVectorIndex() {
30
+ console.log('[RAG] Booting Embedding Engine...');
31
+ await defaultEmbedder.init();
32
+ console.log('[RAG] Model ready.');
33
+
34
+ // Find daily logs that haven't been indexed completely
35
+ const allLogs = dl.db.prepare('SELECT * FROM daily_logs').all();
36
+ let updatedCount = 0;
37
+
38
+ for (const log of allLogs) {
39
+ // Assume log is processed if we have *any* embedding for it.
40
+ // For total correctness, we would compare hash of raw_markdown,
41
+ // but skipping if exists is enough for initialization.
42
+ const existing = dl.db.prepare(
43
+ "SELECT count(*) as count FROM document_embeddings WHERE reference_type = 'daily_log' AND reference_id = ?"
44
+ ).get(log.date);
45
+
46
+ if (existing && existing.count > 0) {
47
+ continue;
48
+ }
49
+
50
+ console.log(`[RAG] Generating embeddings for Daily Log: ${log.date}`);
51
+ const chunks = chunkText(`Daily Log Date: ${log.date}\n\n${log.raw_markdown}`);
52
+
53
+ const insertStmt = dl.db.prepare(`
54
+ INSERT INTO document_embeddings
55
+ (reference_type, reference_id, chunk_index, text_chunk, embedding)
56
+ VALUES (?, ?, ?, ?, ?)
57
+ `);
58
+
59
+ // Use transaction for speed
60
+ const insertTx = dl.db.transaction((chunksArr) => {
61
+ for (let i = 0; i < chunksArr.length; i++) {
62
+ insertStmt.run('daily_log', log.date, i, chunksArr[i].text, chunksArr[i].buffer);
63
+ }
64
+ });
65
+
66
+ // Compute vectors asynchronously (since transformers is async) then insert
67
+ const preparedChunks = [];
68
+ for (let i = 0; i < chunks.length; i++) {
69
+ const vector = await defaultEmbedder.embedText(chunks[i]);
70
+ const buffer = defaultEmbedder.vectorToBuffer(vector);
71
+ preparedChunks.push({ text: chunks[i], buffer });
72
+ }
73
+
74
+ insertTx(preparedChunks);
75
+ updatedCount++;
76
+ }
77
+
78
+ console.log(`[RAG] Vector Index Built. Processed ${updatedCount} un-indexed logs.`);
79
+ }
80
+
81
+ if (require.main === module) {
82
+ buildVectorIndex().catch(console.error);
83
+ }
84
+
85
+ module.exports = { buildVectorIndex };
@@ -0,0 +1,143 @@
1
+ #!/usr/bin/env node
2
+
3
+ const fs = require('fs');
4
+ const path = require('path');
5
+
6
+ function exists(p) {
7
+ try { fs.accessSync(p); return true; } catch { return false; }
8
+ }
9
+
10
+ function ensureDir(p) {
11
+ fs.mkdirSync(p, { recursive: true });
12
+ }
13
+
14
+ const { defaultInstance: dl } = require('./lib/DataLayer');
15
+ const DataManager = require('./lib/DataManager');
16
+
17
+ function slugifyFileName(s) {
18
+ return String(s || '')
19
+ .toLowerCase()
20
+ .trim()
21
+ .replace(/[^a-z0-9\-_/ ]+/g, '')
22
+ .replace(/[\s]+/g, '-')
23
+ .replace(/-+/g, '-')
24
+ .replace(/^[-/]+|[-/]+$/g, '')
25
+ .slice(0, 80) || 'note';
26
+ }
27
+
28
+ function yamlEscape(v) {
29
+ const s = String(v == null ? '' : v);
30
+ // quote always for safety
31
+ return JSON.stringify(s);
32
+ }
33
+
34
+ function fmtTags(tags) {
35
+ const uniq = Array.from(new Set(tags.filter(Boolean)));
36
+ return '[' + uniq.map((t) => yamlEscape(t)).join(', ') + ']';
37
+ }
38
+
39
+ function writeNote(baseDir, relPathNoExt, md) {
40
+ const outPath = path.join(baseDir, relPathNoExt + '.md');
41
+ ensureDir(path.dirname(outPath));
42
+ fs.writeFileSync(outPath, md, 'utf8');
43
+ return outPath;
44
+ }
45
+
46
+ function main() {
47
+ const workspaceDir = path.resolve(process.cwd());
48
+
49
+ const dm = new DataManager(path.join(workspaceDir, 'data'), path.join(workspaceDir, 'logs'));
50
+
51
+ const blockers = dm.getBlockersRaw();
52
+ const tasks = dm.getTasksRaw();
53
+
54
+ const notesRoot = path.join(workspaceDir, 'docs', 'notes');
55
+ ensureDir(notesRoot);
56
+
57
+ const created = [];
58
+
59
+ // Export OPEN blockers as incident notes
60
+ for (const b of blockers) {
61
+ if (!b || typeof b !== 'object') continue;
62
+ const status = String(b.status || '').toUpperCase();
63
+ if (status !== 'OPEN' && status !== 'MITIGATING') continue;
64
+
65
+ const title = String(b.title || '').trim();
66
+ if (!title) continue;
67
+
68
+ const projectSlug = String(b.projectSlug || '').trim();
69
+ const sev = String(b.severity || '').toUpperCase();
70
+
71
+ const tags = [];
72
+ if (projectSlug) projectSlug.split('/').forEach((p) => tags.push(p));
73
+ if (sev) tags.push(sev.toLowerCase());
74
+ tags.push('blocker');
75
+
76
+ const relBase = projectSlug ? path.join('incidents', projectSlug) : path.join('incidents', 'unclassified');
77
+ const fileBase = slugifyFileName(title);
78
+ const relPath = path.join(relBase, fileBase);
79
+
80
+ const md = [
81
+ '---',
82
+ `type: ${yamlEscape('incident')}`,
83
+ `id: ${yamlEscape(b.id || '')}`,
84
+ `title: ${yamlEscape(title)}`,
85
+ `status: ${yamlEscape(status)}`,
86
+ `severity: ${yamlEscape(sev || '')}`,
87
+ `projectSlug: ${yamlEscape(projectSlug)}`,
88
+ `createdAt: ${yamlEscape(b.createdAt || '')}`,
89
+ `tags: ${fmtTags(tags.map((t) => '#' + String(t).replace(/^#/, '')))}`,
90
+ '---',
91
+ '',
92
+ `# ${title}`,
93
+ '',
94
+ b.description ? `## Context\n${String(b.description).trim()}\n` : '',
95
+ b.nextAction ? `## Next action\n${String(b.nextAction).trim()}\n` : '',
96
+ '',
97
+ '## Links',
98
+ '- Related reports: see `docs/reports/`',
99
+ '- Related tasks: see F.R.E.Y.A. SQLite tasks table',
100
+ ''
101
+ ].filter(Boolean).join('\n');
102
+
103
+ const out = writeNote(notesRoot, relPath, md);
104
+ created.push(out);
105
+ }
106
+
107
+ // Export a daily index note (lightweight)
108
+ const today = new Date().toISOString().slice(0, 10);
109
+ const dailyNote = [
110
+ '---',
111
+ `type: ${yamlEscape('daily-index')}`,
112
+ `date: ${yamlEscape(today)}`,
113
+ '---',
114
+ '',
115
+ `# Daily Index ${today}`,
116
+ '',
117
+ '## Open blockers',
118
+ ...blockers
119
+ .filter((b) => b && (String(b.status || '').toUpperCase() === 'OPEN' || String(b.status || '').toUpperCase() === 'MITIGATING'))
120
+ .slice(0, 20)
121
+ .map((b) => {
122
+ const ps = b.projectSlug ? ` [${b.projectSlug}]` : '';
123
+ return `- ${String(b.title || '').trim()}${ps}`;
124
+ }),
125
+ '',
126
+ '## DO_NOW tasks',
127
+ ...tasks
128
+ .filter((t) => t && String(t.status || '').toUpperCase() === 'PENDING' && String(t.category || '') === 'DO_NOW')
129
+ .slice(0, 20)
130
+ .map((t) => {
131
+ const ps = t.projectSlug ? ` [${t.projectSlug}]` : '';
132
+ return `- [ ] ${String(t.description || '').trim()}${ps}`;
133
+ }),
134
+ ''
135
+ ].join('\n');
136
+
137
+ const dailyOut = writeNote(notesRoot, path.join('daily', today), dailyNote);
138
+ created.push(dailyOut);
139
+
140
+ process.stdout.write(JSON.stringify({ ok: true, created: created.map((p) => path.relative(workspaceDir, p).replace(/\\/g, '/')) }, null, 2) + '\n');
141
+ }
142
+
143
+ main();