@cccarv82/freya 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +92 -0
- package/bin/freya.js +11 -0
- package/cli/index.js +62 -0
- package/cli/init.js +133 -0
- package/package.json +27 -0
- package/templates/base/.agent/rules/freya/agents/coach.mdc +72 -0
- package/templates/base/.agent/rules/freya/agents/ingestor.mdc +183 -0
- package/templates/base/.agent/rules/freya/agents/master.mdc +93 -0
- package/templates/base/.agent/rules/freya/agents/oracle.mdc +102 -0
- package/templates/base/.agent/rules/freya/freya.mdc +31 -0
- package/templates/base/README.md +50 -0
- package/templates/base/USER_GUIDE.md +160 -0
- package/templates/base/data/blockers/blocker-log.json +4 -0
- package/templates/base/data/career/career-log.json +4 -0
- package/templates/base/data/schemas.md +66 -0
- package/templates/base/data/tasks/task-log.json +4 -0
- package/templates/base/scripts/generate-blockers-report.js +215 -0
- package/templates/base/scripts/generate-daily-summary.js +96 -0
- package/templates/base/scripts/generate-executive-report.js +240 -0
- package/templates/base/scripts/generate-sm-weekly-report.js +207 -0
- package/templates/base/scripts/generate-weekly-report.js +134 -0
- package/templates/base/scripts/lib/date-utils.js +37 -0
- package/templates/base/scripts/lib/fs-utils.js +61 -0
- package/templates/base/scripts/migrate-data.js +80 -0
- package/templates/base/scripts/validate-data.js +206 -0
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
// scripts/lib/date-utils.js
|
|
2
|
+
// Small, dependency-free helpers to keep date handling consistent.
|
|
3
|
+
|
|
4
|
+
function toIsoDate(date) {
|
|
5
|
+
return new Date(date).toISOString().slice(0, 10);
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
function safeParseToMs(value) {
|
|
9
|
+
if (!value) return NaN;
|
|
10
|
+
if (typeof value === 'number') return value;
|
|
11
|
+
|
|
12
|
+
// Handle common case: YYYY-MM-DD (from filenames)
|
|
13
|
+
if (/^\d{4}-\d{2}-\d{2}$/.test(value)) {
|
|
14
|
+
// Interpret as UTC midnight for consistency.
|
|
15
|
+
return Date.parse(`${value}T00:00:00.000Z`);
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
// Prefer explicit UTC if missing timezone.
|
|
19
|
+
// If string already contains Z or an offset (+/-HH:MM), keep as-is.
|
|
20
|
+
if (typeof value === 'string' && /^\d{4}-\d{2}-\d{2}T/.test(value) && !/[zZ]|[+-]\d{2}:?\d{2}$/.test(value)) {
|
|
21
|
+
return Date.parse(`${value}Z`);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
return Date.parse(value);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
function isWithinRange(dateValue, start, end) {
|
|
28
|
+
const ms = safeParseToMs(dateValue);
|
|
29
|
+
if (!Number.isFinite(ms)) return false;
|
|
30
|
+
return ms >= start.getTime() && ms <= end.getTime();
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
module.exports = {
|
|
34
|
+
toIsoDate,
|
|
35
|
+
safeParseToMs,
|
|
36
|
+
isWithinRange,
|
|
37
|
+
};
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
function safeReadJson(filePath) {
|
|
5
|
+
let content;
|
|
6
|
+
try {
|
|
7
|
+
content = fs.readFileSync(filePath, 'utf8');
|
|
8
|
+
} catch (error) {
|
|
9
|
+
return { ok: false, error: { type: 'read', message: error.message, cause: error } };
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
try {
|
|
13
|
+
const json = JSON.parse(content);
|
|
14
|
+
return { ok: true, json };
|
|
15
|
+
} catch (error) {
|
|
16
|
+
return { ok: false, error: { type: 'parse', message: error.message, cause: error } };
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function timestampForFilename(date = new Date()) {
|
|
21
|
+
return date.toISOString().replace(/[:.]/g, '-');
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function quarantineCorruptedFile(filePath, reason) {
|
|
25
|
+
const dir = path.dirname(filePath);
|
|
26
|
+
const corruptedDir = path.join(dir, '_corrupted');
|
|
27
|
+
if (!fs.existsSync(corruptedDir)) {
|
|
28
|
+
fs.mkdirSync(corruptedDir, { recursive: true });
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const parsed = path.parse(filePath);
|
|
32
|
+
const timestamp = timestampForFilename();
|
|
33
|
+
const quarantinedName = `${parsed.name}-${timestamp}${parsed.ext}`;
|
|
34
|
+
const quarantinedPath = path.join(corruptedDir, quarantinedName);
|
|
35
|
+
|
|
36
|
+
try {
|
|
37
|
+
fs.renameSync(filePath, quarantinedPath);
|
|
38
|
+
} catch (error) {
|
|
39
|
+
fs.copyFileSync(filePath, quarantinedPath);
|
|
40
|
+
fs.unlinkSync(filePath);
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
const notePath = `${quarantinedPath}.md`;
|
|
44
|
+
const note = [
|
|
45
|
+
'# Quarantined JSON',
|
|
46
|
+
'',
|
|
47
|
+
`- Original: ${filePath}`,
|
|
48
|
+
`- Quarantined: ${quarantinedPath}`,
|
|
49
|
+
`- Timestamp: ${new Date().toISOString()}`,
|
|
50
|
+
`- Reason: ${reason || 'Unknown JSON parse error'}`
|
|
51
|
+
].join('\n');
|
|
52
|
+
|
|
53
|
+
fs.writeFileSync(notePath, note, 'utf8');
|
|
54
|
+
|
|
55
|
+
return { quarantinedPath, notePath };
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
module.exports = {
|
|
59
|
+
safeReadJson,
|
|
60
|
+
quarantineCorruptedFile
|
|
61
|
+
};
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
const { safeReadJson, quarantineCorruptedFile } = require('./lib/fs-utils');
|
|
5
|
+
|
|
6
|
+
const DATA_DIR = process.env.DATA_DIR
|
|
7
|
+
? path.resolve(process.env.DATA_DIR)
|
|
8
|
+
: path.join(__dirname, '../data');
|
|
9
|
+
|
|
10
|
+
const KNOWN_FILES = [
|
|
11
|
+
{ relPath: path.join('tasks', 'task-log.json'), label: 'tasks/task-log.json' },
|
|
12
|
+
{ relPath: path.join('career', 'career-log.json'), label: 'career/career-log.json' },
|
|
13
|
+
{ relPath: path.join('blockers', 'blocker-log.json'), label: 'blockers/blocker-log.json' }
|
|
14
|
+
];
|
|
15
|
+
|
|
16
|
+
function atomicWriteJson(filePath, json) {
|
|
17
|
+
const dir = path.dirname(filePath);
|
|
18
|
+
const tmpName = `.${path.basename(filePath)}.tmp-${process.pid}-${Date.now()}`;
|
|
19
|
+
const tmpPath = path.join(dir, tmpName);
|
|
20
|
+
fs.writeFileSync(tmpPath, JSON.stringify(json, null, 2), 'utf8');
|
|
21
|
+
fs.renameSync(tmpPath, filePath);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function migrateFile(filePath, label, summary) {
|
|
25
|
+
if (!fs.existsSync(filePath)) {
|
|
26
|
+
summary.missing.push(label);
|
|
27
|
+
return;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const res = safeReadJson(filePath);
|
|
31
|
+
if (!res.ok) {
|
|
32
|
+
if (res.error.type === 'parse') {
|
|
33
|
+
quarantineCorruptedFile(filePath, res.error.message);
|
|
34
|
+
summary.quarantined.push(label);
|
|
35
|
+
} else {
|
|
36
|
+
summary.skipped.push(label);
|
|
37
|
+
}
|
|
38
|
+
return;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const json = res.json;
|
|
42
|
+
if (!json || typeof json !== 'object' || Array.isArray(json)) {
|
|
43
|
+
summary.skipped.push(label);
|
|
44
|
+
return;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
if (json.schemaVersion === undefined) {
|
|
48
|
+
json.schemaVersion = 1;
|
|
49
|
+
atomicWriteJson(filePath, json);
|
|
50
|
+
summary.updated.push(label);
|
|
51
|
+
} else {
|
|
52
|
+
summary.already.push(label);
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
function run() {
|
|
57
|
+
const summary = {
|
|
58
|
+
updated: [],
|
|
59
|
+
already: [],
|
|
60
|
+
missing: [],
|
|
61
|
+
quarantined: [],
|
|
62
|
+
skipped: []
|
|
63
|
+
};
|
|
64
|
+
|
|
65
|
+
KNOWN_FILES.forEach(({ relPath, label }) => {
|
|
66
|
+
const filePath = path.join(DATA_DIR, relPath);
|
|
67
|
+
migrateFile(filePath, label, summary);
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
const parts = [];
|
|
71
|
+
parts.push(`updated ${summary.updated.length}`);
|
|
72
|
+
if (summary.updated.length) parts.push(`updated files: ${summary.updated.join(', ')}`);
|
|
73
|
+
if (summary.quarantined.length) parts.push(`quarantined ${summary.quarantined.length}`);
|
|
74
|
+
if (summary.missing.length) parts.push(`missing ${summary.missing.length}`);
|
|
75
|
+
if (summary.skipped.length) parts.push(`skipped ${summary.skipped.length}`);
|
|
76
|
+
|
|
77
|
+
console.log(`Migration summary: ${parts.join('; ')}`);
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
run();
|
|
@@ -0,0 +1,206 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
const { safeReadJson, quarantineCorruptedFile } = require('./lib/fs-utils');
|
|
5
|
+
|
|
6
|
+
const DATA_DIR = path.join(__dirname, '../data');
|
|
7
|
+
|
|
8
|
+
// --- Validation Helpers ---
|
|
9
|
+
|
|
10
|
+
function validateTaskLog(json, file) {
|
|
11
|
+
const errors = [];
|
|
12
|
+
if (json.schemaVersion !== undefined && typeof json.schemaVersion !== 'number') {
|
|
13
|
+
errors.push("Root must have numeric 'schemaVersion' when present.");
|
|
14
|
+
}
|
|
15
|
+
if (!Array.isArray(json.tasks)) {
|
|
16
|
+
errors.push(`Root must have 'tasks' array.`);
|
|
17
|
+
return errors;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
json.tasks.forEach((task, index) => {
|
|
21
|
+
if (!task.id) errors.push(`Task[${index}] missing 'id'.`);
|
|
22
|
+
if (!task.description) errors.push(`Task[${index}] missing 'description'.`);
|
|
23
|
+
if (!task.category) errors.push(`Task[${index}] missing 'category'.`);
|
|
24
|
+
if (!task.status) errors.push(`Task[${index}] missing 'status'.`);
|
|
25
|
+
if (!task.createdAt) errors.push(`Task[${index}] missing 'createdAt'.`);
|
|
26
|
+
|
|
27
|
+
const validCategories = ['DO_NOW', 'SCHEDULE', 'DELEGATE', 'IGNORE'];
|
|
28
|
+
if (task.category && !validCategories.includes(task.category)) {
|
|
29
|
+
errors.push(`Task[${index}] invalid category '${task.category}'.`);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
const validStatuses = ['PENDING', 'COMPLETED', 'ARCHIVED'];
|
|
33
|
+
if (task.status && !validStatuses.includes(task.status)) {
|
|
34
|
+
errors.push(`Task[${index}] invalid status '${task.status}'.`);
|
|
35
|
+
}
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
return errors;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
function validateCareerLog(json, file) {
|
|
42
|
+
const errors = [];
|
|
43
|
+
if (json.schemaVersion !== undefined && typeof json.schemaVersion !== 'number') {
|
|
44
|
+
errors.push("Root must have numeric 'schemaVersion' when present.");
|
|
45
|
+
}
|
|
46
|
+
if (!Array.isArray(json.entries)) {
|
|
47
|
+
errors.push(`Root must have 'entries' array.`);
|
|
48
|
+
return errors;
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
json.entries.forEach((entry, index) => {
|
|
52
|
+
if (!entry.id) errors.push(`Entry[${index}] missing 'id'.`);
|
|
53
|
+
if (!entry.date) errors.push(`Entry[${index}] missing 'date'.`);
|
|
54
|
+
if (!entry.type) errors.push(`Entry[${index}] missing 'type'.`);
|
|
55
|
+
if (!entry.description) errors.push(`Entry[${index}] missing 'description'.`);
|
|
56
|
+
|
|
57
|
+
const validTypes = ['Achievement', 'Feedback', 'Certification', 'Goal'];
|
|
58
|
+
if (entry.type && !validTypes.includes(entry.type)) {
|
|
59
|
+
errors.push(`Entry[${index}] invalid type '${entry.type}'.`);
|
|
60
|
+
}
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
return errors;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function validateProjectStatus(json, file) {
|
|
67
|
+
const errors = [];
|
|
68
|
+
const requiredFields = ['client', 'project', 'active', 'currentStatus', 'lastUpdated', 'history'];
|
|
69
|
+
|
|
70
|
+
requiredFields.forEach(field => {
|
|
71
|
+
if (json[field] === undefined) errors.push(`Missing field '${field}'.`);
|
|
72
|
+
});
|
|
73
|
+
|
|
74
|
+
if (Array.isArray(json.history)) {
|
|
75
|
+
json.history.forEach((item, index) => {
|
|
76
|
+
if (!item.date) errors.push(`History[${index}] missing 'date'.`);
|
|
77
|
+
if (!item.type) errors.push(`History[${index}] missing 'type'.`);
|
|
78
|
+
if (!item.content) errors.push(`History[${index}] missing 'content'.`);
|
|
79
|
+
});
|
|
80
|
+
} else if (json.history !== undefined) {
|
|
81
|
+
errors.push(`'history' must be an array.`);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
return errors;
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
function validateBlockerLog(json, file) {
|
|
88
|
+
const errors = [];
|
|
89
|
+
if (typeof json.schemaVersion !== 'number') {
|
|
90
|
+
errors.push("Root must have numeric 'schemaVersion'.");
|
|
91
|
+
}
|
|
92
|
+
if (!Array.isArray(json.blockers)) {
|
|
93
|
+
errors.push("Root must have 'blockers' array.");
|
|
94
|
+
return errors;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
const validStatuses = ['OPEN', 'MITIGATING', 'RESOLVED'];
|
|
98
|
+
const validSeverities = ['LOW', 'MEDIUM', 'HIGH', 'CRITICAL'];
|
|
99
|
+
|
|
100
|
+
json.blockers.forEach((b, i) => {
|
|
101
|
+
const prefix = `Blocker[${i}]`;
|
|
102
|
+
if (!b.id) errors.push(`${prefix} missing 'id'.`);
|
|
103
|
+
if (!b.title) errors.push(`${prefix} missing 'title'.`);
|
|
104
|
+
if (!b.description) errors.push(`${prefix} missing 'description'.`);
|
|
105
|
+
if (!b.createdAt) errors.push(`${prefix} missing 'createdAt'.`);
|
|
106
|
+
if (!b.status) errors.push(`${prefix} missing 'status'.`);
|
|
107
|
+
if (!b.severity) errors.push(`${prefix} missing 'severity'.`);
|
|
108
|
+
|
|
109
|
+
if (b.status && !validStatuses.includes(String(b.status).toUpperCase())) {
|
|
110
|
+
errors.push(`${prefix} invalid status '${b.status}'.`);
|
|
111
|
+
}
|
|
112
|
+
if (b.severity && !validSeverities.includes(String(b.severity).toUpperCase())) {
|
|
113
|
+
errors.push(`${prefix} invalid severity '${b.severity}'.`);
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
return errors;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
// --- Main Logic ---
|
|
121
|
+
|
|
122
|
+
function walk(dir, fileList = []) {
|
|
123
|
+
const files = fs.readdirSync(dir);
|
|
124
|
+
files.forEach(file => {
|
|
125
|
+
const filePath = path.join(dir, file);
|
|
126
|
+
const stat = fs.statSync(filePath);
|
|
127
|
+
if (stat.isDirectory()) {
|
|
128
|
+
if (file === '_corrupted') {
|
|
129
|
+
return;
|
|
130
|
+
}
|
|
131
|
+
walk(filePath, fileList);
|
|
132
|
+
} else {
|
|
133
|
+
if (path.extname(file) === '.json') {
|
|
134
|
+
fileList.push(filePath);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
});
|
|
138
|
+
return fileList;
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
function validateData() {
|
|
142
|
+
console.log('🔍 Starting validation...');
|
|
143
|
+
try {
|
|
144
|
+
if (!fs.existsSync(DATA_DIR)) {
|
|
145
|
+
console.error('❌ Data directory not found:', DATA_DIR);
|
|
146
|
+
process.exit(1);
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
const files = walk(DATA_DIR);
|
|
150
|
+
console.log(`Found ${files.length} json files.`);
|
|
151
|
+
|
|
152
|
+
let errorCount = 0;
|
|
153
|
+
|
|
154
|
+
files.forEach(file => {
|
|
155
|
+
const relativePath = path.relative(DATA_DIR, file);
|
|
156
|
+
const result = safeReadJson(file);
|
|
157
|
+
if (!result.ok) {
|
|
158
|
+
if (result.error.type === 'parse') {
|
|
159
|
+
quarantineCorruptedFile(file, result.error.message);
|
|
160
|
+
console.warn(`⚠️ [${relativePath}] JSON parse failed; quarantined to _corrupted.`);
|
|
161
|
+
} else {
|
|
162
|
+
console.error(`❌ [${relativePath}] Read failed: ${result.error.message}`);
|
|
163
|
+
}
|
|
164
|
+
errorCount++;
|
|
165
|
+
return;
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
const json = result.json;
|
|
169
|
+
|
|
170
|
+
let fileErrors = [];
|
|
171
|
+
|
|
172
|
+
// Route validation based on filename/path
|
|
173
|
+
if (file.endsWith('task-log.json')) {
|
|
174
|
+
fileErrors = validateTaskLog(json, relativePath);
|
|
175
|
+
} else if (file.endsWith('career-log.json')) {
|
|
176
|
+
fileErrors = validateCareerLog(json, relativePath);
|
|
177
|
+
} else if (file.endsWith('status.json')) {
|
|
178
|
+
fileErrors = validateProjectStatus(json, relativePath);
|
|
179
|
+
} else if (file.endsWith('blocker-log.json')) {
|
|
180
|
+
fileErrors = validateBlockerLog(json, relativePath);
|
|
181
|
+
} else {
|
|
182
|
+
// Optional: warn about unknown files, or ignore
|
|
183
|
+
// console.warn(`⚠️ [${relativePath}] Unknown JSON file type. Skipping schema validation.`);
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
if (fileErrors.length > 0) {
|
|
187
|
+
console.error(`❌ [${relativePath}] Validation failed:`);
|
|
188
|
+
fileErrors.forEach(e => console.error(` - ${e}`));
|
|
189
|
+
errorCount++;
|
|
190
|
+
}
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
if (errorCount === 0) {
|
|
194
|
+
console.log('✅ All systems operational');
|
|
195
|
+
} else {
|
|
196
|
+
console.error(`❌ Validation completed with errors in ${errorCount} file(s).`);
|
|
197
|
+
process.exit(1);
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
} catch (err) {
|
|
201
|
+
console.error('❌ Fatal error:', err);
|
|
202
|
+
process.exit(1);
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
validateData();
|