@cccarv82/freya 2.3.13 → 2.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/.agent/rules/freya/agents/coach.mdc +7 -16
  2. package/.agent/rules/freya/agents/ingestor.mdc +1 -89
  3. package/.agent/rules/freya/agents/master.mdc +3 -0
  4. package/.agent/rules/freya/agents/oracle.mdc +7 -23
  5. package/cli/web-ui.css +965 -182
  6. package/cli/web-ui.js +551 -173
  7. package/cli/web.js +863 -536
  8. package/package.json +7 -4
  9. package/scripts/build-vector-index.js +85 -0
  10. package/scripts/export-obsidian.js +6 -16
  11. package/scripts/generate-blockers-report.js +5 -17
  12. package/scripts/generate-daily-summary.js +25 -58
  13. package/scripts/generate-executive-report.js +22 -204
  14. package/scripts/generate-sm-weekly-report.js +27 -92
  15. package/scripts/lib/DataLayer.js +92 -0
  16. package/scripts/lib/DataManager.js +198 -0
  17. package/scripts/lib/Embedder.js +59 -0
  18. package/scripts/lib/schema.js +23 -0
  19. package/scripts/migrate-v1-v2.js +184 -0
  20. package/scripts/validate-data.js +48 -51
  21. package/scripts/validate-structure.js +12 -58
  22. package/templates/base/scripts/build-vector-index.js +85 -0
  23. package/templates/base/scripts/export-obsidian.js +143 -0
  24. package/templates/base/scripts/generate-daily-summary.js +25 -58
  25. package/templates/base/scripts/generate-executive-report.js +14 -225
  26. package/templates/base/scripts/generate-sm-weekly-report.js +9 -91
  27. package/templates/base/scripts/index/build-index.js +13 -0
  28. package/templates/base/scripts/index/update-index.js +15 -0
  29. package/templates/base/scripts/lib/DataLayer.js +92 -0
  30. package/templates/base/scripts/lib/DataManager.js +198 -0
  31. package/templates/base/scripts/lib/Embedder.js +59 -0
  32. package/templates/base/scripts/lib/index-utils.js +407 -0
  33. package/templates/base/scripts/lib/schema.js +23 -0
  34. package/templates/base/scripts/lib/search-utils.js +183 -0
  35. package/templates/base/scripts/migrate-v1-v2.js +184 -0
  36. package/templates/base/scripts/validate-data.js +48 -51
  37. package/templates/base/scripts/validate-structure.js +10 -32
@@ -0,0 +1,183 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+
4
+ const ID_PATTERNS = [
5
+ /\bPTI\d{4,}-\d+\b/gi,
6
+ /\bINC\d+\b/gi,
7
+ /\bCHG\d+\b/gi
8
+ ];
9
+
10
+ const TEXT_EXTS = new Set(['.md', '.txt', '.log', '.json', '.yaml', '.yml']);
11
+
12
+ function extractIdTokens(query) {
13
+ const tokens = new Set();
14
+ const q = String(query || '');
15
+ for (const re of ID_PATTERNS) {
16
+ const matches = q.match(re);
17
+ if (matches) {
18
+ for (const m of matches) tokens.add(m.toUpperCase());
19
+ }
20
+ }
21
+ return Array.from(tokens);
22
+ }
23
+
24
+ function tokenizeQuery(query) {
25
+ const tokens = [];
26
+ const q = String(query || '');
27
+ const re = /[A-Za-z0-9_-]{2,}/g;
28
+ let m;
29
+ while ((m = re.exec(q)) !== null) {
30
+ tokens.push(m[0]);
31
+ }
32
+ return tokens;
33
+ }
34
+
35
+ function listFilesRecursive(dir, files = []) {
36
+ let entries;
37
+ try {
38
+ entries = fs.readdirSync(dir, { withFileTypes: true });
39
+ } catch {
40
+ return files;
41
+ }
42
+ for (const ent of entries) {
43
+ const full = path.join(dir, ent.name);
44
+ if (ent.isDirectory()) {
45
+ listFilesRecursive(full, files);
46
+ } else if (ent.isFile()) {
47
+ const ext = path.extname(ent.name).toLowerCase();
48
+ if (TEXT_EXTS.has(ext)) files.push(full);
49
+ }
50
+ }
51
+ return files;
52
+ }
53
+
54
+ function toDateString(ms) {
55
+ try {
56
+ const d = new Date(ms);
57
+ if (Number.isNaN(d.getTime())) return '';
58
+ const y = String(d.getFullYear());
59
+ const m = String(d.getMonth() + 1).padStart(2, '0');
60
+ const day = String(d.getDate()).padStart(2, '0');
61
+ return `${y}-${m}-${day}`;
62
+ } catch {
63
+ return '';
64
+ }
65
+ }
66
+
67
+ function inferDateFromPath(filePath, mtimeMs) {
68
+ const m = String(filePath).match(/\b(\d{4}-\d{2}-\d{2})\b/);
69
+ if (m && m[1]) return m[1];
70
+ return toDateString(mtimeMs);
71
+ }
72
+
73
+ function findFirstMatchIndex(textLower, needles) {
74
+ let best = -1;
75
+ for (const needle of needles) {
76
+ if (!needle) continue;
77
+ const idx = textLower.indexOf(needle);
78
+ if (idx !== -1 && (best === -1 || idx < best)) best = idx;
79
+ }
80
+ return best;
81
+ }
82
+
83
+ function buildSnippet(text, index, length) {
84
+ if (index < 0) {
85
+ const clean = String(text || '').replace(/\s+/g, ' ').trim();
86
+ return clean.length > 220 ? clean.slice(0, 220) + '…' : clean;
87
+ }
88
+ const raw = String(text || '');
89
+ const start = Math.max(0, index - 80);
90
+ const end = Math.min(raw.length, index + length + 120);
91
+ let snippet = raw.slice(start, end).replace(/\s+/g, ' ').trim();
92
+ if (start > 0) snippet = '…' + snippet;
93
+ if (end < raw.length) snippet = snippet + '…';
94
+ return snippet;
95
+ }
96
+
97
+ function scoreText(textLower, queryLower, tokensLower, idTokensLower) {
98
+ let score = 0;
99
+ if (queryLower && textLower.includes(queryLower)) score += 10;
100
+ for (const t of tokensLower) {
101
+ if (!t) continue;
102
+ if (textLower.includes(t)) score += 2;
103
+ }
104
+ for (const id of idTokensLower) {
105
+ if (!id) continue;
106
+ if (textLower.includes(id)) score += 100;
107
+ }
108
+ return score;
109
+ }
110
+
111
+ function searchWorkspace(workspaceDir, query, opts = {}) {
112
+ const q = String(query || '').trim();
113
+ if (!q) return [];
114
+
115
+ const limit = Math.max(1, Math.min(20, Number(opts.limit || 8)));
116
+ const maxSize = Math.max(1024, Number(opts.maxSize || 2 * 1024 * 1024));
117
+
118
+ const targetDirs = [
119
+ path.join(workspaceDir, 'logs', 'daily'),
120
+ path.join(workspaceDir, 'data', 'tasks'),
121
+ path.join(workspaceDir, 'data', 'Clients'),
122
+ path.join(workspaceDir, 'docs', 'reports')
123
+ ];
124
+
125
+ const idTokens = extractIdTokens(q);
126
+ const tokens = tokenizeQuery(q);
127
+ const tokensLower = tokens.map((t) => t.toLowerCase());
128
+ const idTokensLower = idTokens.map((t) => t.toLowerCase());
129
+ const queryLower = q.toLowerCase();
130
+
131
+ const results = [];
132
+ for (const dir of targetDirs) {
133
+ if (!fs.existsSync(dir)) continue;
134
+ const files = listFilesRecursive(dir, []);
135
+ for (const file of files) {
136
+ let st;
137
+ try {
138
+ st = fs.statSync(file);
139
+ } catch {
140
+ continue;
141
+ }
142
+ if (!st.isFile() || st.size > maxSize) continue;
143
+ let text;
144
+ try {
145
+ text = fs.readFileSync(file, 'utf8');
146
+ } catch {
147
+ continue;
148
+ }
149
+ if (!text || text.includes('\u0000')) continue;
150
+ const textLower = text.toLowerCase();
151
+ const score = scoreText(textLower, queryLower, tokensLower, idTokensLower);
152
+ if (score <= 0) continue;
153
+
154
+ const needles = [];
155
+ if (queryLower) needles.push(queryLower);
156
+ for (const t of tokensLower) needles.push(t);
157
+ for (const id of idTokensLower) needles.push(id);
158
+ const idx = findFirstMatchIndex(textLower, needles);
159
+ const snippet = buildSnippet(text, idx, queryLower.length || 12);
160
+ const relPath = path.relative(workspaceDir, file).replace(/\\/g, '/');
161
+ const date = inferDateFromPath(relPath, st.mtimeMs);
162
+
163
+ results.push({
164
+ file: relPath,
165
+ date,
166
+ score,
167
+ snippet
168
+ });
169
+ }
170
+ }
171
+
172
+ results.sort((a, b) => {
173
+ if (b.score !== a.score) return b.score - a.score;
174
+ return String(b.date || '').localeCompare(String(a.date || ''));
175
+ });
176
+
177
+ return results.slice(0, limit);
178
+ }
179
+
180
+ module.exports = {
181
+ extractIdTokens,
182
+ searchWorkspace
183
+ };
@@ -0,0 +1,184 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+ const { defaultInstance: dl } = require('./lib/DataLayer');
4
+ const DataManager = require('./lib/DataManager');
5
+
6
+ const dataDir = path.join(__dirname, '..', 'data');
7
+ const logsDir = path.join(__dirname, '..', 'logs', 'daily');
8
+
9
+ console.log('--- Iniciando Migração V1 (JSON) para V2 (SQLite) ---');
10
+
11
+ function migrateTasks() {
12
+ console.log('Migrando Tasks...');
13
+ const dm = new DataManager(dataDir, logsDir);
14
+ const tasks = dm.getTasksRaw();
15
+ const stmt = dl.db.prepare(`
16
+ INSERT OR IGNORE INTO tasks
17
+ (id, project_slug, description, category, status, created_at, completed_at, metadata)
18
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
19
+ `);
20
+
21
+ let count = 0;
22
+ // Tasks has: PENDING, COMPLETED, ARCHIVED
23
+ const processList = (list) => {
24
+ for (const t of list) {
25
+ const meta = { priority: t.priority, streamSlug: t.streamSlug };
26
+ stmt.run(
27
+ t.id,
28
+ t.projectSlug || null,
29
+ t.description,
30
+ t.category,
31
+ t.status,
32
+ t.createdAt || new Date().toISOString(),
33
+ t.completedAt || null,
34
+ JSON.stringify(meta)
35
+ );
36
+ count++;
37
+ }
38
+ };
39
+
40
+ const pending = tasks.filter(t => t.status === 'PENDING');
41
+ const completed = tasks.filter(t => t.status === 'COMPLETED');
42
+ const archived = tasks.filter(t => t.status === 'ARCHIVED');
43
+
44
+ processList(pending);
45
+ processList(completed);
46
+ processList(archived);
47
+
48
+ console.log(`✅ ${count} tasks migradas.`);
49
+ }
50
+
51
+ function migrateBlockers() {
52
+ console.log('Migrando Blockers...');
53
+ const dm = new DataManager(dataDir, logsDir);
54
+ const blockers = dm.getBlockersRaw();
55
+ const stmt = dl.db.prepare(`
56
+ INSERT OR IGNORE INTO blockers
57
+ (id, project_slug, title, severity, status, owner, next_action, created_at, resolved_at, metadata)
58
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
59
+ `);
60
+
61
+ let count = 0;
62
+ const processList = (list) => {
63
+ for (const b of list) {
64
+ const meta = { streamSlug: b.streamSlug };
65
+ stmt.run(
66
+ b.id,
67
+ b.projectSlug || null,
68
+ b.title,
69
+ b.severity || 'MEDIUM',
70
+ b.status || 'OPEN',
71
+ b.owner || null,
72
+ b.nextAction || null,
73
+ b.createdAt || new Date().toISOString(),
74
+ b.resolvedAt || null,
75
+ JSON.stringify(meta)
76
+ );
77
+ count++;
78
+ }
79
+ };
80
+
81
+ const open = blockers.filter(b => b.status === 'OPEN' || b.status === 'MITIGATING');
82
+ const resolved = blockers.filter(b => b.status === 'RESOLVED' || b.status === 'CLOSED');
83
+
84
+ processList(open);
85
+ processList(resolved);
86
+
87
+ console.log(`✅ ${count} blockers migrados.`);
88
+ }
89
+
90
+ function migrateProjects() {
91
+ console.log('Migrando Projetos...');
92
+ const clientsDir = path.join(dataDir, 'Clients');
93
+ if (!fs.existsSync(clientsDir)) return;
94
+
95
+ const insertProj = dl.db.prepare(`
96
+ INSERT OR IGNORE INTO projects (slug, client, name, is_active)
97
+ VALUES (?, ?, ?, ?)
98
+ `);
99
+
100
+ const getProjId = dl.db.prepare("SELECT id FROM projects WHERE slug = ?");
101
+
102
+ const insertHist = dl.db.prepare(`
103
+ INSERT INTO project_status_history (project_id, status_text, date)
104
+ VALUES (?, ?, ?)
105
+ `);
106
+
107
+ let pCount = 0;
108
+ let hCount = 0;
109
+
110
+ for (const clientSlug of fs.readdirSync(clientsDir)) {
111
+ const clientPath = path.join(clientsDir, clientSlug);
112
+ if (!fs.statSync(clientPath).isDirectory()) continue;
113
+
114
+ for (const projectSlug of fs.readdirSync(clientPath)) {
115
+ const projectPath = path.join(clientPath, projectSlug);
116
+ if (!fs.statSync(projectPath).isDirectory()) continue;
117
+
118
+ const statusPath = path.join(projectPath, 'status.json');
119
+ if (!fs.existsSync(statusPath)) continue;
120
+
121
+ let pData;
122
+ try {
123
+ pData = JSON.parse(fs.readFileSync(statusPath, 'utf8'));
124
+ } catch (e) { continue; }
125
+
126
+ const fullSlug = `${clientSlug}-${projectSlug}`;
127
+ const isActive = pData.active !== false ? 1 : 0;
128
+
129
+ insertProj.run(fullSlug, pData.client || clientSlug, pData.project || projectSlug, isActive);
130
+ pCount++;
131
+
132
+ const pRow = getProjId.get(fullSlug);
133
+ if (pRow && pData.history && Array.isArray(pData.history)) {
134
+ for (const log of pData.history) {
135
+ let content = '';
136
+ let date = new Date().toISOString();
137
+ if (typeof log === 'string') {
138
+ content = log;
139
+ } else if (typeof log === 'object') {
140
+ content = log.content || log.summary || JSON.stringify(log);
141
+ date = log.date || date;
142
+ }
143
+ insertHist.run(pRow.id, content, date);
144
+ hCount++;
145
+ }
146
+ }
147
+ }
148
+ }
149
+
150
+ console.log(`✅ ${pCount} projetos migrados, com ${hCount} atualizações de status.`);
151
+ }
152
+
153
+ function migrateLogs() {
154
+ console.log('Migrando Logs Diários...');
155
+ if (!fs.existsSync(logsDir)) {
156
+ console.log('Nenhum diretório de log encontrado.');
157
+ return;
158
+ }
159
+ const files = fs.readdirSync(logsDir).filter(f => f.endsWith('.md'));
160
+ const stmt = dl.db.prepare(`
161
+ INSERT OR IGNORE INTO daily_logs (date, raw_markdown)
162
+ VALUES (?, ?)
163
+ `);
164
+
165
+ let count = 0;
166
+ for (const file of files) {
167
+ const date = file.replace('.md', '');
168
+ const content = fs.readFileSync(path.join(logsDir, file), 'utf8');
169
+ stmt.run(date, content);
170
+ count++;
171
+ }
172
+ console.log(`✅ ${count} logs diários migrados.`);
173
+ }
174
+
175
+ // Transaction wrapper
176
+ dl.db.transaction(() => {
177
+ migrateProjects();
178
+ migrateTasks();
179
+ migrateBlockers();
180
+ migrateLogs();
181
+ })();
182
+
183
+ console.log('--- Migração concluída com sucesso! ---');
184
+ dl.close();
@@ -2,6 +2,7 @@ const fs = require('fs');
2
2
  const path = require('path');
3
3
 
4
4
  const { safeReadJson, quarantineCorruptedFile } = require('./lib/fs-utils');
5
+ const SCHEMA = require('./lib/schema');
5
6
 
6
7
  const DATA_DIR = path.join(__dirname, '../data');
7
8
 
@@ -24,14 +25,14 @@ function validateTaskLog(json, file) {
24
25
  if (!task.status) errors.push(`Task[${index}] missing 'status'.`);
25
26
  if (!task.createdAt) errors.push(`Task[${index}] missing 'createdAt'.`);
26
27
 
27
- const validCategories = ['DO_NOW', 'SCHEDULE', 'DELEGATE', 'IGNORE'];
28
+ const validCategories = SCHEMA.TASK.CATEGORIES;
28
29
  if (task.category && !validCategories.includes(task.category)) {
29
- errors.push(`Task[${index}] invalid category '${task.category}'.`);
30
+ errors.push(`Task[${index}] invalid category '${task.category}'.`);
30
31
  }
31
32
 
32
- const validStatuses = ['PENDING', 'COMPLETED', 'ARCHIVED'];
33
+ const validStatuses = SCHEMA.TASK.STATUSES;
33
34
  if (task.status && !validStatuses.includes(task.status)) {
34
- errors.push(`Task[${index}] invalid status '${task.status}'.`);
35
+ errors.push(`Task[${index}] invalid status '${task.status}'.`);
35
36
  }
36
37
  });
37
38
 
@@ -54,9 +55,9 @@ function validateCareerLog(json, file) {
54
55
  if (!entry.type) errors.push(`Entry[${index}] missing 'type'.`);
55
56
  if (!entry.description) errors.push(`Entry[${index}] missing 'description'.`);
56
57
 
57
- const validTypes = ['Achievement', 'Feedback', 'Certification', 'Goal'];
58
+ const validTypes = SCHEMA.CAREER.TYPES;
58
59
  if (entry.type && !validTypes.includes(entry.type)) {
59
- errors.push(`Entry[${index}] invalid type '${entry.type}'.`);
60
+ errors.push(`Entry[${index}] invalid type '${entry.type}'.`);
60
61
  }
61
62
  });
62
63
 
@@ -66,19 +67,19 @@ function validateCareerLog(json, file) {
66
67
  function validateProjectStatus(json, file) {
67
68
  const errors = [];
68
69
  const requiredFields = ['client', 'project', 'active', 'currentStatus', 'lastUpdated', 'history'];
69
-
70
+
70
71
  requiredFields.forEach(field => {
71
- if (json[field] === undefined) errors.push(`Missing field '${field}'.`);
72
+ if (json[field] === undefined) errors.push(`Missing field '${field}'.`);
72
73
  });
73
74
 
74
75
  if (Array.isArray(json.history)) {
75
- json.history.forEach((item, index) => {
76
- if (!item.date) errors.push(`History[${index}] missing 'date'.`);
77
- if (!item.type) errors.push(`History[${index}] missing 'type'.`);
78
- if (!item.content) errors.push(`History[${index}] missing 'content'.`);
79
- });
76
+ json.history.forEach((item, index) => {
77
+ if (!item.date) errors.push(`History[${index}] missing 'date'.`);
78
+ if (!item.type) errors.push(`History[${index}] missing 'type'.`);
79
+ if (!item.content) errors.push(`History[${index}] missing 'content'.`);
80
+ });
80
81
  } else if (json.history !== undefined) {
81
- errors.push(`'history' must be an array.`);
82
+ errors.push(`'history' must be an array.`);
82
83
  }
83
84
 
84
85
  return errors;
@@ -94,8 +95,8 @@ function validateBlockerLog(json, file) {
94
95
  return errors;
95
96
  }
96
97
 
97
- const validStatuses = ['OPEN', 'MITIGATING', 'RESOLVED'];
98
- const validSeverities = ['LOW', 'MEDIUM', 'HIGH', 'CRITICAL'];
98
+ const validStatuses = SCHEMA.BLOCKER.STATUSES;
99
+ const validSeverities = SCHEMA.BLOCKER.SEVERITIES;
99
100
 
100
101
  json.blockers.forEach((b, i) => {
101
102
  const prefix = `Blocker[${i}]`;
@@ -148,53 +149,49 @@ function validateData() {
148
149
 
149
150
  const files = walk(DATA_DIR);
150
151
  console.log(`Found ${files.length} json files.`);
151
-
152
+
152
153
  let errorCount = 0;
153
154
 
154
155
  files.forEach(file => {
155
- const relativePath = path.relative(DATA_DIR, file);
156
- const result = safeReadJson(file);
157
- if (!result.ok) {
158
- if (result.error.type === 'parse') {
159
- quarantineCorruptedFile(file, result.error.message);
160
- console.warn(`⚠️ [${relativePath}] JSON parse failed; quarantined to _corrupted.`);
161
- } else {
162
- console.error(`❌ [${relativePath}] Read failed: ${result.error.message}`);
163
- }
164
- errorCount++;
165
- return;
156
+ const relativePath = path.relative(DATA_DIR, file);
157
+ const result = safeReadJson(file);
158
+ if (!result.ok) {
159
+ if (result.error.type === 'parse') {
160
+ quarantineCorruptedFile(file, result.error.message);
161
+ console.warn(`⚠️ [${relativePath}] JSON parse failed; quarantined to _corrupted.`);
162
+ } else {
163
+ console.error(`❌ [${relativePath}] Read failed: ${result.error.message}`);
166
164
  }
165
+ errorCount++;
166
+ return;
167
+ }
167
168
 
168
- const json = result.json;
169
+ const json = result.json;
169
170
 
170
- let fileErrors = [];
171
+ let fileErrors = [];
171
172
 
172
- // Route validation based on filename/path
173
- if (file.endsWith('task-log.json')) {
174
- fileErrors = validateTaskLog(json, relativePath);
175
- } else if (file.endsWith('career-log.json')) {
176
- fileErrors = validateCareerLog(json, relativePath);
177
- } else if (file.endsWith('status.json')) {
178
- fileErrors = validateProjectStatus(json, relativePath);
179
- } else if (file.endsWith('blocker-log.json')) {
180
- fileErrors = validateBlockerLog(json, relativePath);
181
- } else {
182
- // Optional: warn about unknown files, or ignore
183
- // console.warn(`⚠️ [${relativePath}] Unknown JSON file type. Skipping schema validation.`);
184
- }
173
+ // Route validation based on filename/path
174
+ if (file.endsWith('career-log.json')) {
175
+ fileErrors = validateCareerLog(json, relativePath);
176
+ } else if (file.endsWith('task-log.json') || file.endsWith('status.json') || file.endsWith('blocker-log.json')) {
177
+ // Obsoleted by SQLite, ignore
178
+ } else {
179
+ // Optional: warn about unknown files, or ignore
180
+ // console.warn(`⚠️ [${relativePath}] Unknown JSON file type. Skipping schema validation.`);
181
+ }
185
182
 
186
- if (fileErrors.length > 0) {
187
- console.error(`❌ [${relativePath}] Validation failed:`);
188
- fileErrors.forEach(e => console.error(` - ${e}`));
189
- errorCount++;
190
- }
183
+ if (fileErrors.length > 0) {
184
+ console.error(`❌ [${relativePath}] Validation failed:`);
185
+ fileErrors.forEach(e => console.error(` - ${e}`));
186
+ errorCount++;
187
+ }
191
188
  });
192
189
 
193
190
  if (errorCount === 0) {
194
- console.log('✅ All systems operational');
191
+ console.log('✅ All systems operational');
195
192
  } else {
196
- console.error(`❌ Validation completed with errors in ${errorCount} file(s).`);
197
- process.exit(1);
193
+ console.error(`❌ Validation completed with errors in ${errorCount} file(s).`);
194
+ process.exit(1);
198
195
  }
199
196
 
200
197
  } catch (err) {
@@ -7,6 +7,7 @@ const DATA_DIR = path.join(ROOT, 'data');
7
7
  const DOCS_DIR = path.join(ROOT, 'docs');
8
8
  const CLIENTS_DIR = path.join(DATA_DIR, 'Clients');
9
9
 
10
+ const { defaultInstance: dl } = require('./lib/DataLayer');
10
11
  const errors = [];
11
12
 
12
13
  function exists(p) {
@@ -70,52 +71,29 @@ function validateDailyLogs() {
70
71
  }
71
72
 
72
73
  function collectProjectSlugs() {
73
- if (!exists(CLIENTS_DIR)) return [];
74
- const slugs = [];
75
- const files = walk(CLIENTS_DIR).filter((f) => f.endsWith('status.json'));
76
- for (const file of files) {
77
- const rel = path.relative(CLIENTS_DIR, path.dirname(file));
78
- if (!rel) continue;
79
- const slug = rel.split(path.sep).join('/').toLowerCase();
80
- slugs.push(slug);
81
- }
82
- return Array.from(new Set(slugs));
74
+ return dl.db.prepare('SELECT slug FROM projects WHERE is_active = 1').all().map(p => p.slug.toLowerCase());
83
75
  }
84
76
 
85
77
  function validateProjectStatusHistory() {
86
- if (!exists(CLIENTS_DIR)) return;
87
- const files = walk(CLIENTS_DIR).filter((f) => f.endsWith('status.json'));
88
- for (const file of files) {
89
- const raw = readFileSafe(file);
90
- if (!raw) continue;
91
- try {
92
- const json = JSON.parse(raw);
93
- if (!Array.isArray(json.history)) {
94
- errors.push(`status.json must include history array: ${path.relative(ROOT, file)}`);
95
- }
96
- } catch (e) {
97
- errors.push(`Invalid JSON in status.json: ${path.relative(ROOT, file)}`);
98
- }
78
+ const projects = dl.db.prepare('SELECT id, name FROM projects').all();
79
+ for (const p of projects) {
80
+ const historyCount = dl.db.prepare('SELECT count(*) as count FROM project_status_history WHERE project_id = ?').get(p.id).count;
81
+ // We don't strictly enforce history to exist for brand new projects, but if we did:
82
+ // if (historyCount === 0) errors.push(`Project missing status history: ${p.name}`);
99
83
  }
100
84
  }
101
85
 
102
86
  function validateTaskProjectSlugs() {
103
87
  const slugs = collectProjectSlugs();
104
- if (!slugs.length) return; // no known slugs -> skip
105
-
106
- const taskFile = path.join(DATA_DIR, 'tasks', 'task-log.json');
107
- if (!exists(taskFile)) return;
88
+ if (!slugs.length) return;
108
89
 
109
- let json;
110
- try { json = JSON.parse(readFileSafe(taskFile) || '{}'); } catch { return; }
111
- const tasks = Array.isArray(json.tasks) ? json.tasks : [];
90
+ const tasks = dl.db.prepare('SELECT id, description, project_slug FROM tasks').all();
112
91
 
113
92
  for (const task of tasks) {
114
- if (!task || typeof task !== 'object') continue;
115
93
  const desc = String(task.description || '').toLowerCase();
116
94
  if (!desc) continue;
117
95
  const mentioned = slugs.find((slug) => desc.includes(slug));
118
- if (mentioned && !task.projectSlug) {
96
+ if (mentioned && !task.project_slug) {
119
97
  errors.push(`Task missing projectSlug for mentioned project (${mentioned}): ${task.id || task.description}`);
120
98
  }
121
99
  }