@cccarv82/freya 2.1.6 → 2.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.agent/rules/freya/freya.mdc +8 -0
- package/cli/init.js +1 -1
- package/package.json +3 -3
- package/scripts/validate-structure.js +151 -0
- package/templates/base/.agent/rules/freya/agents/ingestor.mdc +8 -0
- package/templates/base/.agent/rules/freya/agents/master.mdc +5 -0
- package/templates/base/docs/career/Career Hub.md +13 -0
- package/templates/base/docs/reports/Reports Hub.md +13 -0
- package/templates/base/docs/standards/Standards Hub.md +11 -0
- package/templates/base/scripts/validate-structure.js +151 -0
|
@@ -35,3 +35,11 @@ Como posso ajudar você hoje?
|
|
|
35
35
|
[4] General Assistance
|
|
36
36
|
```
|
|
37
37
|
</menu-display>
|
|
38
|
+
|
|
39
|
+
## Registro Padrao
|
|
40
|
+
|
|
41
|
+
- Logs diarios (raw input, notas cronologicas): `logs/daily/YYYY-MM-DD.md`
|
|
42
|
+
- Dados estruturados (status, tarefas, carreira): `data/**`
|
|
43
|
+
- Sintese e navegacao (hubs, reports): `docs/**`
|
|
44
|
+
|
|
45
|
+
Regra: nunca gravar logs diarios em data/ ou docs/. Nunca gravar dados estruturados em logs/.
|
package/cli/init.js
CHANGED
|
@@ -81,7 +81,7 @@ function ensurePackageJson(targetDir, force, summary) {
|
|
|
81
81
|
const existing = readJsonSafe(pkgPath);
|
|
82
82
|
|
|
83
83
|
const scriptsToEnsure = {
|
|
84
|
-
health: 'node scripts/validate-data.js',
|
|
84
|
+
health: 'node scripts/validate-data.js && node scripts/validate-structure.js',
|
|
85
85
|
migrate: 'node scripts/migrate-data.js',
|
|
86
86
|
report: 'node scripts/generate-weekly-report.js',
|
|
87
87
|
'sm-weekly': 'node scripts/generate-sm-weekly-report.js',
|
package/package.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@cccarv82/freya",
|
|
3
|
-
"version": "2.1.
|
|
3
|
+
"version": "2.1.7",
|
|
4
4
|
"description": "Personal AI Assistant with local-first persistence",
|
|
5
5
|
"scripts": {
|
|
6
|
-
"health": "node scripts/validate-data.js",
|
|
6
|
+
"health": "node scripts/validate-data.js && node scripts/validate-structure.js",
|
|
7
7
|
"migrate": "node scripts/migrate-data.js",
|
|
8
8
|
"report": "node scripts/generate-weekly-report.js",
|
|
9
9
|
"sm-weekly": "node scripts/generate-sm-weekly-report.js",
|
|
@@ -13,7 +13,7 @@
|
|
|
13
13
|
"export-obsidian": "node scripts/export-obsidian.js",
|
|
14
14
|
"build-index": "node scripts/index/build-index.js",
|
|
15
15
|
"update-index": "node scripts/index/update-index.js",
|
|
16
|
-
"test": "node tests/unit/test-package-config.js && node tests/unit/test-cli-init.js && node tests/unit/test-cli-web-help.js && node tests/unit/test-web-static-assets.js && node tests/unit/test-fs-utils.js && node tests/unit/test-search-utils.js && node tests/unit/test-index-utils.js && node tests/unit/test-task-schema.js && node tests/unit/test-daily-generation.js && node tests/unit/test-report-generation.js && node tests/unit/test-executive-report-logs.js && node tests/unit/test-oracle-retrieval.js && node tests/unit/test-task-completion.js && node tests/unit/test-migrate-data.js && node tests/unit/test-blockers-validation.js && node tests/unit/test-blockers-report.js && node tests/unit/test-sm-weekly-report.js && node tests/integration/test-ingestor-task.js"
|
|
16
|
+
"test": "node tests/unit/test-package-config.js && node tests/unit/test-cli-init.js && node tests/unit/test-cli-web-help.js && node tests/unit/test-web-static-assets.js && node tests/unit/test-fs-utils.js && node tests/unit/test-search-utils.js && node tests/unit/test-index-utils.js && node tests/unit/test-task-schema.js && node tests/unit/test-daily-generation.js && node tests/unit/test-report-generation.js && node tests/unit/test-executive-report-logs.js && node tests/unit/test-oracle-retrieval.js && node tests/unit/test-task-completion.js && node tests/unit/test-migrate-data.js && node tests/unit/test-blockers-validation.js && node tests/unit/test-blockers-report.js && node tests/unit/test-sm-weekly-report.js && node tests/integration/test-ingestor-task.js && node tests/unit/test-structure-validation.js"
|
|
17
17
|
},
|
|
18
18
|
"keywords": [],
|
|
19
19
|
"author": "",
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
const ROOT = path.join(__dirname, '..');
|
|
5
|
+
const LOGS_DIR = path.join(ROOT, 'logs', 'daily');
|
|
6
|
+
const DATA_DIR = path.join(ROOT, 'data');
|
|
7
|
+
const DOCS_DIR = path.join(ROOT, 'docs');
|
|
8
|
+
const CLIENTS_DIR = path.join(DATA_DIR, 'Clients');
|
|
9
|
+
|
|
10
|
+
const errors = [];
|
|
11
|
+
|
|
12
|
+
function exists(p) {
|
|
13
|
+
try { fs.accessSync(p); return true; } catch { return false; }
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function readFileSafe(p) {
|
|
17
|
+
try { return fs.readFileSync(p, 'utf8'); } catch { return null; }
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function walk(dir, out = []) {
|
|
21
|
+
if (!exists(dir)) return out;
|
|
22
|
+
for (const ent of fs.readdirSync(dir, { withFileTypes: true })) {
|
|
23
|
+
const full = path.join(dir, ent.name);
|
|
24
|
+
if (ent.isDirectory()) walk(full, out);
|
|
25
|
+
else out.push(full);
|
|
26
|
+
}
|
|
27
|
+
return out;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function parseFrontmatter(text) {
|
|
31
|
+
if (!text) return null;
|
|
32
|
+
const lines = text.split(/\r?\n/);
|
|
33
|
+
if (!lines.length || lines[0].trim() !== '---') return null;
|
|
34
|
+
const fm = {};
|
|
35
|
+
for (let i = 1; i < lines.length; i++) {
|
|
36
|
+
const line = lines[i];
|
|
37
|
+
if (line.trim() === '---') break;
|
|
38
|
+
const idx = line.indexOf(':');
|
|
39
|
+
if (idx === -1) continue;
|
|
40
|
+
const key = line.slice(0, idx).trim();
|
|
41
|
+
const value = line.slice(idx + 1).trim();
|
|
42
|
+
fm[key] = value;
|
|
43
|
+
}
|
|
44
|
+
return fm;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
function validateDailyLogs() {
|
|
48
|
+
if (!exists(LOGS_DIR)) return;
|
|
49
|
+
const files = fs.readdirSync(LOGS_DIR)
|
|
50
|
+
.filter((f) => /^\d{4}-\d{2}-\d{2}\.md$/.test(f));
|
|
51
|
+
|
|
52
|
+
for (const name of files) {
|
|
53
|
+
const full = path.join(LOGS_DIR, name);
|
|
54
|
+
const body = readFileSafe(full);
|
|
55
|
+
const fm = parseFrontmatter(body);
|
|
56
|
+
const date = name.replace(/\.md$/, '');
|
|
57
|
+
if (!fm) {
|
|
58
|
+
errors.push(`Daily log missing frontmatter: ${path.relative(ROOT, full)}`);
|
|
59
|
+
continue;
|
|
60
|
+
}
|
|
61
|
+
const type = String(fm.Type || '').toLowerCase();
|
|
62
|
+
const fmDate = String(fm.Date || '').trim();
|
|
63
|
+
if (type !== 'daily') {
|
|
64
|
+
errors.push(`Daily log frontmatter Type must be 'daily': ${path.relative(ROOT, full)}`);
|
|
65
|
+
}
|
|
66
|
+
if (fmDate !== date) {
|
|
67
|
+
errors.push(`Daily log frontmatter Date must match filename (${date}): ${path.relative(ROOT, full)}`);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
function collectProjectSlugs() {
|
|
73
|
+
if (!exists(CLIENTS_DIR)) return [];
|
|
74
|
+
const slugs = [];
|
|
75
|
+
const files = walk(CLIENTS_DIR).filter((f) => f.endsWith('status.json'));
|
|
76
|
+
for (const file of files) {
|
|
77
|
+
const rel = path.relative(CLIENTS_DIR, path.dirname(file));
|
|
78
|
+
if (!rel) continue;
|
|
79
|
+
const slug = rel.split(path.sep).join('/').toLowerCase();
|
|
80
|
+
slugs.push(slug);
|
|
81
|
+
}
|
|
82
|
+
return Array.from(new Set(slugs));
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
function validateProjectStatusHistory() {
|
|
86
|
+
if (!exists(CLIENTS_DIR)) return;
|
|
87
|
+
const files = walk(CLIENTS_DIR).filter((f) => f.endsWith('status.json'));
|
|
88
|
+
for (const file of files) {
|
|
89
|
+
const raw = readFileSafe(file);
|
|
90
|
+
if (!raw) continue;
|
|
91
|
+
try {
|
|
92
|
+
const json = JSON.parse(raw);
|
|
93
|
+
if (!Array.isArray(json.history)) {
|
|
94
|
+
errors.push(`status.json must include history array: ${path.relative(ROOT, file)}`);
|
|
95
|
+
}
|
|
96
|
+
} catch (e) {
|
|
97
|
+
errors.push(`Invalid JSON in status.json: ${path.relative(ROOT, file)}`);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
function validateTaskProjectSlugs() {
|
|
103
|
+
const slugs = collectProjectSlugs();
|
|
104
|
+
if (!slugs.length) return; // no known slugs -> skip
|
|
105
|
+
|
|
106
|
+
const taskFile = path.join(DATA_DIR, 'tasks', 'task-log.json');
|
|
107
|
+
if (!exists(taskFile)) return;
|
|
108
|
+
|
|
109
|
+
let json;
|
|
110
|
+
try { json = JSON.parse(readFileSafe(taskFile) || '{}'); } catch { return; }
|
|
111
|
+
const tasks = Array.isArray(json.tasks) ? json.tasks : [];
|
|
112
|
+
|
|
113
|
+
for (const task of tasks) {
|
|
114
|
+
if (!task || typeof task !== 'object') continue;
|
|
115
|
+
const desc = String(task.description || '').toLowerCase();
|
|
116
|
+
if (!desc) continue;
|
|
117
|
+
const mentioned = slugs.find((slug) => desc.includes(slug));
|
|
118
|
+
if (mentioned && !task.projectSlug) {
|
|
119
|
+
errors.push(`Task missing projectSlug for mentioned project (${mentioned}): ${task.id || task.description}`);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
function validateDocsHubs() {
|
|
125
|
+
const hubs = [
|
|
126
|
+
path.join(DOCS_DIR, 'reports', 'Reports Hub.md'),
|
|
127
|
+
path.join(DOCS_DIR, 'career', 'Career Hub.md'),
|
|
128
|
+
path.join(DOCS_DIR, 'standards', 'Standards Hub.md'),
|
|
129
|
+
];
|
|
130
|
+
for (const hub of hubs) {
|
|
131
|
+
if (!exists(hub)) {
|
|
132
|
+
errors.push(`Missing hub doc: ${path.relative(ROOT, hub)}`);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
function main() {
|
|
138
|
+
validateDailyLogs();
|
|
139
|
+
validateProjectStatusHistory();
|
|
140
|
+
validateTaskProjectSlugs();
|
|
141
|
+
validateDocsHubs();
|
|
142
|
+
|
|
143
|
+
if (errors.length) {
|
|
144
|
+
console.error('❌ Structure validation failed:');
|
|
145
|
+
for (const err of errors) console.error('-', err);
|
|
146
|
+
process.exit(1);
|
|
147
|
+
}
|
|
148
|
+
console.log('✅ Structure validation passed');
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
main();
|
|
@@ -14,6 +14,14 @@ Before ANY attempt to parse, classify, or understand the input, you MUST write t
|
|
|
14
14
|
This ensures no data is lost even if the subsequent steps fail.
|
|
15
15
|
</critical-rule>
|
|
16
16
|
|
|
17
|
+
<structure-guardrails>
|
|
18
|
+
**Pasta correta, sempre:**
|
|
19
|
+
- Logs diários brutos → `logs/daily/YYYY-MM-DD.md`
|
|
20
|
+
- Dados estruturados → `data/**` (tasks, career, Clients/.../status.json)
|
|
21
|
+
- Documentos de síntese/hubs/relatórios → `docs/**`
|
|
22
|
+
Nunca gravar dados estruturados em `logs/` e nunca colocar notas diárias em `docs/`.
|
|
23
|
+
</structure-guardrails>
|
|
24
|
+
|
|
17
25
|
<workflow>
|
|
18
26
|
1. **Receive Input:** The user provides text (status update, blocker, random thought, etc.).
|
|
19
27
|
2. **Safe Log (PRIORITY):**
|
|
@@ -81,6 +81,11 @@ You must fully embody this agent's persona and follow all activation instruction
|
|
|
81
81
|
- If user asks "Relatório Scrum Master", "SM weekly" or "weekly scrum" -> Execute `npm run sm-weekly` via the Shell tool.
|
|
82
82
|
- If user asks "Relatório de blockers", "blockers report", "riscos" -> Execute `npm run blockers` via the Shell tool.
|
|
83
83
|
- Inform the user where the file was saved when applicable.
|
|
84
|
+
- **Structure Guardrail (ALWAYS)**:
|
|
85
|
+
- Logs diários brutos → `logs/daily/`
|
|
86
|
+
- Dados estruturados → `data/`
|
|
87
|
+
- Hubs e relatórios → `docs/`
|
|
88
|
+
- Nunca misturar camadas.
|
|
84
89
|
- **Git Operations**: If user asks "Commit changes", "Save my work", or "Generate commit" ->
|
|
85
90
|
1. Execute `git status --porcelain` via Shell.
|
|
86
91
|
2. If output is empty, inform the user "No changes to commit".
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
---
|
|
2
|
+
Type: career-hub
|
|
3
|
+
Tags: [career, hub]
|
|
4
|
+
DataPath: data/career/career-log.json
|
|
5
|
+
---
|
|
6
|
+
|
|
7
|
+
# Career Hub
|
|
8
|
+
|
|
9
|
+
Centro de navegação para evolução de carreira dentro da FREYA.
|
|
10
|
+
|
|
11
|
+
## Como usar
|
|
12
|
+
- Registrar feedbacks e conquistas em `data/career/career-log.json`.
|
|
13
|
+
- Criar relatórios de carreira em `docs/reports`.
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
---
|
|
2
|
+
Type: reports-hub
|
|
3
|
+
Tags: [reports, hub]
|
|
4
|
+
DirPath: docs/reports
|
|
5
|
+
---
|
|
6
|
+
|
|
7
|
+
# Reports Hub
|
|
8
|
+
|
|
9
|
+
Ponto único para visualizar relatórios gerados pela FREYA.
|
|
10
|
+
|
|
11
|
+
## Como usar
|
|
12
|
+
- Relatórios gerados via scripts aparecem aqui.
|
|
13
|
+
- Mantenha links para relatórios importantes.
|
|
@@ -0,0 +1,151 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
const ROOT = path.join(__dirname, '..');
|
|
5
|
+
const LOGS_DIR = path.join(ROOT, 'logs', 'daily');
|
|
6
|
+
const DATA_DIR = path.join(ROOT, 'data');
|
|
7
|
+
const DOCS_DIR = path.join(ROOT, 'docs');
|
|
8
|
+
const CLIENTS_DIR = path.join(DATA_DIR, 'Clients');
|
|
9
|
+
|
|
10
|
+
const errors = [];
|
|
11
|
+
|
|
12
|
+
function exists(p) {
|
|
13
|
+
try { fs.accessSync(p); return true; } catch { return false; }
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function readFileSafe(p) {
|
|
17
|
+
try { return fs.readFileSync(p, 'utf8'); } catch { return null; }
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function walk(dir, out = []) {
|
|
21
|
+
if (!exists(dir)) return out;
|
|
22
|
+
for (const ent of fs.readdirSync(dir, { withFileTypes: true })) {
|
|
23
|
+
const full = path.join(dir, ent.name);
|
|
24
|
+
if (ent.isDirectory()) walk(full, out);
|
|
25
|
+
else out.push(full);
|
|
26
|
+
}
|
|
27
|
+
return out;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function parseFrontmatter(text) {
|
|
31
|
+
if (!text) return null;
|
|
32
|
+
const lines = text.split(/\r?\n/);
|
|
33
|
+
if (!lines.length || lines[0].trim() !== '---') return null;
|
|
34
|
+
const fm = {};
|
|
35
|
+
for (let i = 1; i < lines.length; i++) {
|
|
36
|
+
const line = lines[i];
|
|
37
|
+
if (line.trim() === '---') break;
|
|
38
|
+
const idx = line.indexOf(':');
|
|
39
|
+
if (idx === -1) continue;
|
|
40
|
+
const key = line.slice(0, idx).trim();
|
|
41
|
+
const value = line.slice(idx + 1).trim();
|
|
42
|
+
fm[key] = value;
|
|
43
|
+
}
|
|
44
|
+
return fm;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
function validateDailyLogs() {
|
|
48
|
+
if (!exists(LOGS_DIR)) return;
|
|
49
|
+
const files = fs.readdirSync(LOGS_DIR)
|
|
50
|
+
.filter((f) => /^\d{4}-\d{2}-\d{2}\.md$/.test(f));
|
|
51
|
+
|
|
52
|
+
for (const name of files) {
|
|
53
|
+
const full = path.join(LOGS_DIR, name);
|
|
54
|
+
const body = readFileSafe(full);
|
|
55
|
+
const fm = parseFrontmatter(body);
|
|
56
|
+
const date = name.replace(/\.md$/, '');
|
|
57
|
+
if (!fm) {
|
|
58
|
+
errors.push(`Daily log missing frontmatter: ${path.relative(ROOT, full)}`);
|
|
59
|
+
continue;
|
|
60
|
+
}
|
|
61
|
+
const type = String(fm.Type || '').toLowerCase();
|
|
62
|
+
const fmDate = String(fm.Date || '').trim();
|
|
63
|
+
if (type !== 'daily') {
|
|
64
|
+
errors.push(`Daily log frontmatter Type must be 'daily': ${path.relative(ROOT, full)}`);
|
|
65
|
+
}
|
|
66
|
+
if (fmDate !== date) {
|
|
67
|
+
errors.push(`Daily log frontmatter Date must match filename (${date}): ${path.relative(ROOT, full)}`);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
function collectProjectSlugs() {
|
|
73
|
+
if (!exists(CLIENTS_DIR)) return [];
|
|
74
|
+
const slugs = [];
|
|
75
|
+
const files = walk(CLIENTS_DIR).filter((f) => f.endsWith('status.json'));
|
|
76
|
+
for (const file of files) {
|
|
77
|
+
const rel = path.relative(CLIENTS_DIR, path.dirname(file));
|
|
78
|
+
if (!rel) continue;
|
|
79
|
+
const slug = rel.split(path.sep).join('/').toLowerCase();
|
|
80
|
+
slugs.push(slug);
|
|
81
|
+
}
|
|
82
|
+
return Array.from(new Set(slugs));
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
function validateProjectStatusHistory() {
|
|
86
|
+
if (!exists(CLIENTS_DIR)) return;
|
|
87
|
+
const files = walk(CLIENTS_DIR).filter((f) => f.endsWith('status.json'));
|
|
88
|
+
for (const file of files) {
|
|
89
|
+
const raw = readFileSafe(file);
|
|
90
|
+
if (!raw) continue;
|
|
91
|
+
try {
|
|
92
|
+
const json = JSON.parse(raw);
|
|
93
|
+
if (!Array.isArray(json.history)) {
|
|
94
|
+
errors.push(`status.json must include history array: ${path.relative(ROOT, file)}`);
|
|
95
|
+
}
|
|
96
|
+
} catch (e) {
|
|
97
|
+
errors.push(`Invalid JSON in status.json: ${path.relative(ROOT, file)}`);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
function validateTaskProjectSlugs() {
|
|
103
|
+
const slugs = collectProjectSlugs();
|
|
104
|
+
if (!slugs.length) return; // no known slugs -> skip
|
|
105
|
+
|
|
106
|
+
const taskFile = path.join(DATA_DIR, 'tasks', 'task-log.json');
|
|
107
|
+
if (!exists(taskFile)) return;
|
|
108
|
+
|
|
109
|
+
let json;
|
|
110
|
+
try { json = JSON.parse(readFileSafe(taskFile) || '{}'); } catch { return; }
|
|
111
|
+
const tasks = Array.isArray(json.tasks) ? json.tasks : [];
|
|
112
|
+
|
|
113
|
+
for (const task of tasks) {
|
|
114
|
+
if (!task || typeof task !== 'object') continue;
|
|
115
|
+
const desc = String(task.description || '').toLowerCase();
|
|
116
|
+
if (!desc) continue;
|
|
117
|
+
const mentioned = slugs.find((slug) => desc.includes(slug));
|
|
118
|
+
if (mentioned && !task.projectSlug) {
|
|
119
|
+
errors.push(`Task missing projectSlug for mentioned project (${mentioned}): ${task.id || task.description}`);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
function validateDocsHubs() {
|
|
125
|
+
const hubs = [
|
|
126
|
+
path.join(DOCS_DIR, 'reports', 'Reports Hub.md'),
|
|
127
|
+
path.join(DOCS_DIR, 'career', 'Career Hub.md'),
|
|
128
|
+
path.join(DOCS_DIR, 'standards', 'Standards Hub.md'),
|
|
129
|
+
];
|
|
130
|
+
for (const hub of hubs) {
|
|
131
|
+
if (!exists(hub)) {
|
|
132
|
+
errors.push(`Missing hub doc: ${path.relative(ROOT, hub)}`);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
function main() {
|
|
138
|
+
validateDailyLogs();
|
|
139
|
+
validateProjectStatusHistory();
|
|
140
|
+
validateTaskProjectSlugs();
|
|
141
|
+
validateDocsHubs();
|
|
142
|
+
|
|
143
|
+
if (errors.length) {
|
|
144
|
+
console.error('❌ Structure validation failed:');
|
|
145
|
+
for (const err of errors) console.error('-', err);
|
|
146
|
+
process.exit(1);
|
|
147
|
+
}
|
|
148
|
+
console.log('✅ Structure validation passed');
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
main();
|