@cccarv82/freya 2.16.0 → 2.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json CHANGED
@@ -1,12 +1,11 @@
1
1
  {
2
2
  "name": "@cccarv82/freya",
3
- "version": "2.16.0",
3
+ "version": "2.17.0",
4
4
  "description": "Personal AI Assistant with local-first persistence",
5
5
  "scripts": {
6
6
  "health": "node scripts/validate-data.js && node scripts/validate-structure.js",
7
7
  "migrate": "node scripts/migrate-data.js",
8
- "report": "node scripts/generate-weekly-report.js",
9
- "sm-weekly": "node scripts/generate-sm-weekly-report.js",
8
+ "sm-weekly": "node scripts/generate-sm-weekly-report.js",
10
9
  "daily": "node scripts/generate-daily-summary.js",
11
10
  "status": "node scripts/generate-executive-report.js",
12
11
  "blockers": "node scripts/generate-blockers-report.js",
@@ -26,57 +26,109 @@ function chunkText(text, maxChars = 800, overlap = 150) {
26
26
  return chunks;
27
27
  }
28
28
 
29
+ async function embedAndInsert(refType, refId, text) {
30
+ const existing = dl.db.prepare(
31
+ "SELECT count(*) as count FROM document_embeddings WHERE reference_type = ? AND reference_id = ?"
32
+ ).get(refType, refId);
33
+
34
+ if (existing && existing.count > 0) return false;
35
+
36
+ const chunks = chunkText(text);
37
+ if (chunks.length === 0) return false;
38
+
39
+ const insertStmt = dl.db.prepare(`
40
+ INSERT INTO document_embeddings
41
+ (reference_type, reference_id, chunk_index, text_chunk, embedding)
42
+ VALUES (?, ?, ?, ?, ?)
43
+ `);
44
+
45
+ const insertTx = dl.db.transaction((chunksArr) => {
46
+ for (let i = 0; i < chunksArr.length; i++) {
47
+ insertStmt.run(refType, refId, i, chunksArr[i].text, chunksArr[i].buffer);
48
+ }
49
+ });
50
+
51
+ const preparedChunks = [];
52
+ for (let i = 0; i < chunks.length; i++) {
53
+ const vector = await defaultEmbedder.embedText(chunks[i]);
54
+ const buffer = defaultEmbedder.vectorToBuffer(vector);
55
+ preparedChunks.push({ text: chunks[i], buffer });
56
+ }
57
+
58
+ insertTx(preparedChunks);
59
+ return true;
60
+ }
61
+
29
62
  async function buildVectorIndex() {
30
63
  await ready;
31
64
  console.log('[RAG] Booting Embedding Engine...');
32
65
  await defaultEmbedder.init();
33
66
  console.log('[RAG] Model ready.');
34
67
 
35
- // Find daily logs that haven't been indexed completely
36
- const allLogs = dl.db.prepare('SELECT * FROM daily_logs').all();
37
68
  let updatedCount = 0;
38
69
 
70
+ // 1. Index daily logs
71
+ const allLogs = dl.db.prepare('SELECT * FROM daily_logs').all();
39
72
  for (const log of allLogs) {
40
- // Assume log is processed if we have *any* embedding for it.
41
- // For total correctness, we would compare hash of raw_markdown,
42
- // but skipping if exists is enough for initialization.
43
- const existing = dl.db.prepare(
44
- "SELECT count(*) as count FROM document_embeddings WHERE reference_type = 'daily_log' AND reference_id = ?"
45
- ).get(log.date);
46
-
47
- if (existing && existing.count > 0) {
48
- continue;
73
+ const text = `Daily Log Date: ${log.date}\n\n${log.raw_markdown}`;
74
+ const indexed = await embedAndInsert('daily_log', log.date, text);
75
+ if (indexed) {
76
+ console.log(`[RAG] Indexed daily log: ${log.date}`);
77
+ updatedCount++;
49
78
  }
79
+ }
50
80
 
51
- console.log(`[RAG] Generating embeddings for Daily Log: ${log.date}`);
52
- const chunks = chunkText(`Daily Log Date: ${log.date}\n\n${log.raw_markdown}`);
53
-
54
- const insertStmt = dl.db.prepare(`
55
- INSERT INTO document_embeddings
56
- (reference_type, reference_id, chunk_index, text_chunk, embedding)
57
- VALUES (?, ?, ?, ?, ?)
58
- `);
81
+ // 2. Index tasks
82
+ const allTasks = dl.db.prepare('SELECT * FROM tasks').all();
83
+ for (const t of allTasks) {
84
+ let meta = {};
85
+ try { meta = t.metadata ? JSON.parse(t.metadata) : {}; } catch { meta = {}; }
86
+ const parts = [
87
+ `Task: ${t.description}`,
88
+ t.project_slug ? `Project: ${t.project_slug}` : '',
89
+ `Category: ${t.category}`,
90
+ `Status: ${t.status}`,
91
+ meta.priority ? `Priority: ${meta.priority}` : '',
92
+ meta.streamSlug ? `Stream: ${meta.streamSlug}` : '',
93
+ t.due_date ? `Due: ${t.due_date}` : '',
94
+ t.created_at ? `Created: ${String(t.created_at).slice(0, 10)}` : '',
95
+ t.completed_at ? `Completed: ${String(t.completed_at).slice(0, 10)}` : '',
96
+ meta.comments ? `Comments: ${meta.comments}` : '',
97
+ ].filter(Boolean).join('\n');
59
98
 
60
- // Use transaction for speed
61
- const insertTx = dl.db.transaction((chunksArr) => {
62
- for (let i = 0; i < chunksArr.length; i++) {
63
- insertStmt.run('daily_log', log.date, i, chunksArr[i].text, chunksArr[i].buffer);
64
- }
65
- });
66
-
67
- // Compute vectors asynchronously (since transformers is async) then insert
68
- const preparedChunks = [];
69
- for (let i = 0; i < chunks.length; i++) {
70
- const vector = await defaultEmbedder.embedText(chunks[i]);
71
- const buffer = defaultEmbedder.vectorToBuffer(vector);
72
- preparedChunks.push({ text: chunks[i], buffer });
99
+ const indexed = await embedAndInsert('task', t.id, parts);
100
+ if (indexed) {
101
+ console.log(`[RAG] Indexed task: ${t.id}`);
102
+ updatedCount++;
73
103
  }
104
+ }
74
105
 
75
- insertTx(preparedChunks);
76
- updatedCount++;
106
+ // 3. Index blockers
107
+ const allBlockers = dl.db.prepare('SELECT * FROM blockers').all();
108
+ for (const b of allBlockers) {
109
+ let meta = {};
110
+ try { meta = b.metadata ? JSON.parse(b.metadata) : {}; } catch { meta = {}; }
111
+ const parts = [
112
+ `Blocker: ${b.title}`,
113
+ b.project_slug ? `Project: ${b.project_slug}` : '',
114
+ `Severity: ${b.severity}`,
115
+ `Status: ${b.status}`,
116
+ b.owner ? `Owner: ${b.owner}` : '',
117
+ b.next_action ? `Next Action: ${b.next_action}` : '',
118
+ meta.description ? `Description: ${meta.description}` : '',
119
+ meta.streamSlug ? `Stream: ${meta.streamSlug}` : '',
120
+ b.created_at ? `Created: ${String(b.created_at).slice(0, 10)}` : '',
121
+ b.resolved_at ? `Resolved: ${String(b.resolved_at).slice(0, 10)}` : '',
122
+ ].filter(Boolean).join('\n');
123
+
124
+ const indexed = await embedAndInsert('blocker', b.id, parts);
125
+ if (indexed) {
126
+ console.log(`[RAG] Indexed blocker: ${b.id}`);
127
+ updatedCount++;
128
+ }
77
129
  }
78
130
 
79
- console.log(`[RAG] Vector Index Built. Processed ${updatedCount} un-indexed logs.`);
131
+ console.log(`[RAG] Vector Index Built. Processed ${updatedCount} new items (daily logs + tasks + blockers).`);
80
132
  }
81
133
 
82
134
  if (require.main === module) {
@@ -26,57 +26,109 @@ function chunkText(text, maxChars = 800, overlap = 150) {
26
26
  return chunks;
27
27
  }
28
28
 
29
+ async function embedAndInsert(refType, refId, text) {
30
+ const existing = dl.db.prepare(
31
+ "SELECT count(*) as count FROM document_embeddings WHERE reference_type = ? AND reference_id = ?"
32
+ ).get(refType, refId);
33
+
34
+ if (existing && existing.count > 0) return false;
35
+
36
+ const chunks = chunkText(text);
37
+ if (chunks.length === 0) return false;
38
+
39
+ const insertStmt = dl.db.prepare(`
40
+ INSERT INTO document_embeddings
41
+ (reference_type, reference_id, chunk_index, text_chunk, embedding)
42
+ VALUES (?, ?, ?, ?, ?)
43
+ `);
44
+
45
+ const insertTx = dl.db.transaction((chunksArr) => {
46
+ for (let i = 0; i < chunksArr.length; i++) {
47
+ insertStmt.run(refType, refId, i, chunksArr[i].text, chunksArr[i].buffer);
48
+ }
49
+ });
50
+
51
+ const preparedChunks = [];
52
+ for (let i = 0; i < chunks.length; i++) {
53
+ const vector = await defaultEmbedder.embedText(chunks[i]);
54
+ const buffer = defaultEmbedder.vectorToBuffer(vector);
55
+ preparedChunks.push({ text: chunks[i], buffer });
56
+ }
57
+
58
+ insertTx(preparedChunks);
59
+ return true;
60
+ }
61
+
29
62
  async function buildVectorIndex() {
30
63
  await ready;
31
64
  console.log('[RAG] Booting Embedding Engine...');
32
65
  await defaultEmbedder.init();
33
66
  console.log('[RAG] Model ready.');
34
67
 
35
- // Find daily logs that haven't been indexed completely
36
- const allLogs = dl.db.prepare('SELECT * FROM daily_logs').all();
37
68
  let updatedCount = 0;
38
69
 
70
+ // 1. Index daily logs
71
+ const allLogs = dl.db.prepare('SELECT * FROM daily_logs').all();
39
72
  for (const log of allLogs) {
40
- // Assume log is processed if we have *any* embedding for it.
41
- // For total correctness, we would compare hash of raw_markdown,
42
- // but skipping if exists is enough for initialization.
43
- const existing = dl.db.prepare(
44
- "SELECT count(*) as count FROM document_embeddings WHERE reference_type = 'daily_log' AND reference_id = ?"
45
- ).get(log.date);
46
-
47
- if (existing && existing.count > 0) {
48
- continue;
73
+ const text = `Daily Log Date: ${log.date}\n\n${log.raw_markdown}`;
74
+ const indexed = await embedAndInsert('daily_log', log.date, text);
75
+ if (indexed) {
76
+ console.log(`[RAG] Indexed daily log: ${log.date}`);
77
+ updatedCount++;
49
78
  }
79
+ }
50
80
 
51
- console.log(`[RAG] Generating embeddings for Daily Log: ${log.date}`);
52
- const chunks = chunkText(`Daily Log Date: ${log.date}\n\n${log.raw_markdown}`);
53
-
54
- const insertStmt = dl.db.prepare(`
55
- INSERT INTO document_embeddings
56
- (reference_type, reference_id, chunk_index, text_chunk, embedding)
57
- VALUES (?, ?, ?, ?, ?)
58
- `);
81
+ // 2. Index tasks
82
+ const allTasks = dl.db.prepare('SELECT * FROM tasks').all();
83
+ for (const t of allTasks) {
84
+ let meta = {};
85
+ try { meta = t.metadata ? JSON.parse(t.metadata) : {}; } catch { meta = {}; }
86
+ const parts = [
87
+ `Task: ${t.description}`,
88
+ t.project_slug ? `Project: ${t.project_slug}` : '',
89
+ `Category: ${t.category}`,
90
+ `Status: ${t.status}`,
91
+ meta.priority ? `Priority: ${meta.priority}` : '',
92
+ meta.streamSlug ? `Stream: ${meta.streamSlug}` : '',
93
+ t.due_date ? `Due: ${t.due_date}` : '',
94
+ t.created_at ? `Created: ${String(t.created_at).slice(0, 10)}` : '',
95
+ t.completed_at ? `Completed: ${String(t.completed_at).slice(0, 10)}` : '',
96
+ meta.comments ? `Comments: ${meta.comments}` : '',
97
+ ].filter(Boolean).join('\n');
59
98
 
60
- // Use transaction for speed
61
- const insertTx = dl.db.transaction((chunksArr) => {
62
- for (let i = 0; i < chunksArr.length; i++) {
63
- insertStmt.run('daily_log', log.date, i, chunksArr[i].text, chunksArr[i].buffer);
64
- }
65
- });
66
-
67
- // Compute vectors asynchronously (since transformers is async) then insert
68
- const preparedChunks = [];
69
- for (let i = 0; i < chunks.length; i++) {
70
- const vector = await defaultEmbedder.embedText(chunks[i]);
71
- const buffer = defaultEmbedder.vectorToBuffer(vector);
72
- preparedChunks.push({ text: chunks[i], buffer });
99
+ const indexed = await embedAndInsert('task', t.id, parts);
100
+ if (indexed) {
101
+ console.log(`[RAG] Indexed task: ${t.id}`);
102
+ updatedCount++;
73
103
  }
104
+ }
74
105
 
75
- insertTx(preparedChunks);
76
- updatedCount++;
106
+ // 3. Index blockers
107
+ const allBlockers = dl.db.prepare('SELECT * FROM blockers').all();
108
+ for (const b of allBlockers) {
109
+ let meta = {};
110
+ try { meta = b.metadata ? JSON.parse(b.metadata) : {}; } catch { meta = {}; }
111
+ const parts = [
112
+ `Blocker: ${b.title}`,
113
+ b.project_slug ? `Project: ${b.project_slug}` : '',
114
+ `Severity: ${b.severity}`,
115
+ `Status: ${b.status}`,
116
+ b.owner ? `Owner: ${b.owner}` : '',
117
+ b.next_action ? `Next Action: ${b.next_action}` : '',
118
+ meta.description ? `Description: ${meta.description}` : '',
119
+ meta.streamSlug ? `Stream: ${meta.streamSlug}` : '',
120
+ b.created_at ? `Created: ${String(b.created_at).slice(0, 10)}` : '',
121
+ b.resolved_at ? `Resolved: ${String(b.resolved_at).slice(0, 10)}` : '',
122
+ ].filter(Boolean).join('\n');
123
+
124
+ const indexed = await embedAndInsert('blocker', b.id, parts);
125
+ if (indexed) {
126
+ console.log(`[RAG] Indexed blocker: ${b.id}`);
127
+ updatedCount++;
128
+ }
77
129
  }
78
130
 
79
- console.log(`[RAG] Vector Index Built. Processed ${updatedCount} un-indexed logs.`);
131
+ console.log(`[RAG] Vector Index Built. Processed ${updatedCount} new items (daily logs + tasks + blockers).`);
80
132
  }
81
133
 
82
134
  if (require.main === module) {
@@ -1,128 +0,0 @@
1
- /**
2
- * generate-weekly-report.js
3
- * Generates a weekly Markdown report aggregating Tasks, Blockers, Career entries,
4
- * and Project Updates from the SQLite database.
5
- *
6
- * Usage: node scripts/generate-weekly-report.js
7
- */
8
-
9
- const fs = require('fs');
10
- const path = require('path');
11
-
12
- const { toIsoDate, safeParseToMs } = require('./lib/date-utils');
13
- const DataManager = require('./lib/DataManager');
14
- const { ready } = require('./lib/DataLayer');
15
-
16
- // --- Configuration (BUG-30: use FREYA_WORKSPACE_DIR instead of __dirname) ---
17
- const WORKSPACE_DIR = process.env.FREYA_WORKSPACE_DIR
18
- ? path.resolve(process.env.FREYA_WORKSPACE_DIR)
19
- : path.join(__dirname, '..'); // fallback: scripts/ is one level below repo root
20
-
21
- const REPORT_DIR = path.join(WORKSPACE_DIR, 'docs', 'reports');
22
-
23
- // --- Date Logic ---
24
- const now = new Date();
25
- const oneDay = 24 * 60 * 60 * 1000;
26
-
27
- function isWithinWeek(dateStr) {
28
- const ms = safeParseToMs(dateStr);
29
- if (!Number.isFinite(ms)) return false;
30
- const sevenDaysAgo = now.getTime() - (7 * oneDay);
31
- return ms >= sevenDaysAgo && ms <= now.getTime();
32
- }
33
-
34
- function getFormattedDate() {
35
- return toIsoDate(now);
36
- }
37
-
38
- function getFormattedTime() {
39
- const hh = String(now.getHours()).padStart(2, '0');
40
- const mm = String(now.getMinutes()).padStart(2, '0');
41
- const ss = String(now.getSeconds()).padStart(2, '0');
42
- return `${hh}${mm}${ss}`;
43
- }
44
-
45
- // --- Report Generation ---
46
- async function generateWeeklyReport() {
47
- await ready;
48
-
49
- const start = new Date(now.getTime() - 7 * oneDay);
50
- const end = now;
51
-
52
- const dm = new DataManager();
53
-
54
- // Fetch data from SQLite
55
- const { completed: completedTasks } = dm.getTasks(start, end);
56
- const { open: openBlockers, resolvedRecent } = dm.getBlockers(start, end);
57
- const projectUpdates = dm.getProjectUpdates(start, end);
58
- const careerEntries = dm.getCareerEntries ? dm.getCareerEntries(start, end) : [];
59
-
60
- // Ensure output dir exists
61
- if (!fs.existsSync(REPORT_DIR)) {
62
- fs.mkdirSync(REPORT_DIR, { recursive: true });
63
- }
64
-
65
- const reportDate = getFormattedDate();
66
- const reportTime = getFormattedTime();
67
- let report = `# Weekly Report - ${reportDate}\n\n`;
68
-
69
- // Projects
70
- report += '## Project Updates\n';
71
- if (projectUpdates.length > 0) {
72
- projectUpdates.forEach(p => {
73
- report += `### ${p.client || 'Unknown'} - ${p.project || p.slug || 'Unknown'}\n`;
74
- const events = Array.isArray(p.events) ? p.events : [];
75
- events.forEach(e => {
76
- const dateStr = e.date ? String(e.date).slice(0, 10) : 'Unknown Date';
77
- report += `- **${dateStr}**: ${e.content || ''}\n`;
78
- });
79
- report += '\n';
80
- });
81
- } else {
82
- report += 'No project updates recorded this week.\n\n';
83
- }
84
-
85
- // Completed Tasks
86
- report += '## Completed Tasks\n';
87
- if (completedTasks.length > 0) {
88
- completedTasks.forEach(t => {
89
- const projectTag = t.projectSlug || t.project_slug ? `[${t.projectSlug || t.project_slug}] ` : '';
90
- report += `- ${projectTag}${t.description}\n`;
91
- });
92
- } else {
93
- report += 'No tasks completed this week.\n';
94
- }
95
- report += '\n';
96
-
97
- // Open Blockers
98
- report += '## Open Blockers\n';
99
- if (openBlockers.length > 0) {
100
- openBlockers.forEach(b => {
101
- const sev = b.severity ? `[${b.severity}] ` : '';
102
- report += `- ${sev}${b.title}\n`;
103
- });
104
- } else {
105
- report += 'No open blockers.\n';
106
- }
107
- report += '\n';
108
-
109
- // Career entries (if DataManager supports it)
110
- if (Array.isArray(careerEntries) && careerEntries.length > 0) {
111
- report += '## Career Highlights\n';
112
- careerEntries.forEach(e => {
113
- report += `- **[${e.type || 'Note'}]**: ${e.description || e.content || ''}\n`;
114
- });
115
- report += '\n';
116
- }
117
-
118
- // 3. Save and Output
119
- const outputPath = path.join(REPORT_DIR, `weekly-${reportDate}-${reportTime}.md`);
120
- fs.writeFileSync(outputPath, report);
121
-
122
- console.log(`Report generated at: ${outputPath}`);
123
- console.log('---------------------------------------------------');
124
- console.log(report);
125
- console.log('---------------------------------------------------');
126
- }
127
-
128
- generateWeeklyReport().catch(err => { console.error(err); process.exit(1); });