@cccarv82/freya 3.7.7 → 3.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/freya.js +45 -5
- package/cli/init.js +1 -1
- package/cli/retroactive-ingest.js +430 -0
- package/package.json +1 -1
- package/scripts/lib/DataLayer.js +28 -6
- package/templates/base/scripts/lib/DataLayer.js +28 -6
package/bin/freya.js
CHANGED
|
@@ -8,11 +8,17 @@ const { cmdWeb } = require('../cli/web');
|
|
|
8
8
|
const DEFAULT_PORT = 3872;
|
|
9
9
|
|
|
10
10
|
function usage() {
|
|
11
|
-
return `FREYA
|
|
11
|
+
return `FREYA — Personal AI Assistant
|
|
12
12
|
|
|
13
13
|
Usage:
|
|
14
14
|
freya [--port <n>] [--dir <path>] [--no-open] [--dev]
|
|
15
|
+
freya retroactive-ingest [--dry-run] [--days N] [--embeddings-only] [--skip-embeddings]
|
|
15
16
|
freya --version | -v
|
|
17
|
+
|
|
18
|
+
Commands:
|
|
19
|
+
(default) Start the web server
|
|
20
|
+
retroactive-ingest Process all historical daily logs (sync, embeddings, task extraction)
|
|
21
|
+
|
|
16
22
|
Options:
|
|
17
23
|
--port <n> Port to bind (default: ${DEFAULT_PORT})
|
|
18
24
|
--dir <path> Workspace directory (default: ./freya)
|
|
@@ -22,8 +28,8 @@ Options:
|
|
|
22
28
|
Examples:
|
|
23
29
|
freya
|
|
24
30
|
freya --port 4000
|
|
25
|
-
freya
|
|
26
|
-
freya --
|
|
31
|
+
freya retroactive-ingest
|
|
32
|
+
freya retroactive-ingest --days 7
|
|
27
33
|
`;
|
|
28
34
|
}
|
|
29
35
|
|
|
@@ -56,7 +62,6 @@ function parseArgs(argv) {
|
|
|
56
62
|
|
|
57
63
|
async function run(argv) {
|
|
58
64
|
const { args, flags, kv } = parseArgs(argv);
|
|
59
|
-
const positionals = args.filter((a) => a !== 'web');
|
|
60
65
|
|
|
61
66
|
if (flags.has('--help') || flags.has('-h')) {
|
|
62
67
|
process.stdout.write(usage());
|
|
@@ -69,8 +74,43 @@ async function run(argv) {
|
|
|
69
74
|
return;
|
|
70
75
|
}
|
|
71
76
|
|
|
77
|
+
// Check for subcommands
|
|
78
|
+
const subcommand = args[0] || '';
|
|
79
|
+
|
|
80
|
+
if (subcommand === 'retroactive-ingest') {
|
|
81
|
+
// Run retroactive ingestion directly from the global package
|
|
82
|
+
const dir = kv['--dir'] ? path.resolve(process.cwd(), kv['--dir']) : path.resolve(process.cwd(), 'freya');
|
|
83
|
+
// Ensure workspace exists first
|
|
84
|
+
const { initWorkspace } = require('../cli/init');
|
|
85
|
+
const { autoUpdate } = require('../cli/auto-update');
|
|
86
|
+
try {
|
|
87
|
+
const fs = require('fs');
|
|
88
|
+
if (!fs.existsSync(dir)) {
|
|
89
|
+
await initWorkspace({ targetDir: dir, force: false, forceData: false, forceLogs: false });
|
|
90
|
+
console.log('[FREYA] Workspace initialized at', dir);
|
|
91
|
+
}
|
|
92
|
+
await autoUpdate(dir);
|
|
93
|
+
} catch (e) {
|
|
94
|
+
console.error('[FREYA] Warning:', e.message || String(e));
|
|
95
|
+
}
|
|
96
|
+
// Forward remaining flags
|
|
97
|
+
const fwdArgs = [];
|
|
98
|
+
if (flags.has('--dry-run')) fwdArgs.push('--dry-run');
|
|
99
|
+
if (flags.has('--embeddings-only')) fwdArgs.push('--embeddings-only');
|
|
100
|
+
if (flags.has('--skip-embeddings')) fwdArgs.push('--skip-embeddings');
|
|
101
|
+
if (kv['--days']) { fwdArgs.push('--days'); fwdArgs.push(kv['--days']); }
|
|
102
|
+
// Set workspace dir and run directly
|
|
103
|
+
process.env.FREYA_WORKSPACE_DIR = dir;
|
|
104
|
+
// Inject args for the script to parse
|
|
105
|
+
process.argv = [process.argv[0], 'retroactive-ingest', ...fwdArgs];
|
|
106
|
+
require('../cli/retroactive-ingest')(dir, fwdArgs);
|
|
107
|
+
return;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
// Filter out 'web' positional for backwards compat
|
|
111
|
+
const positionals = args.filter((a) => a !== 'web');
|
|
72
112
|
if (positionals.length > 0) {
|
|
73
|
-
process.stderr.write(`Unknown
|
|
113
|
+
process.stderr.write(`Unknown command: ${positionals.join(' ')}\n`);
|
|
74
114
|
process.stdout.write(usage());
|
|
75
115
|
process.exitCode = 1;
|
|
76
116
|
return;
|
package/cli/init.js
CHANGED
|
@@ -89,7 +89,7 @@ function ensurePackageJson(targetDir, force, summary) {
|
|
|
89
89
|
daily: 'node scripts/generate-daily-summary.js',
|
|
90
90
|
status: 'node scripts/generate-executive-report.js',
|
|
91
91
|
blockers: 'node scripts/generate-blockers-report.js',
|
|
92
|
-
'retroactive-ingest': '
|
|
92
|
+
'retroactive-ingest': 'freya retroactive-ingest'
|
|
93
93
|
};
|
|
94
94
|
|
|
95
95
|
const depsToEnsure = {
|
|
@@ -0,0 +1,430 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* cli/retroactive-ingest.js
|
|
3
|
+
*
|
|
4
|
+
* Retroactive ingestion — runs from the GLOBAL package (not workspace copy).
|
|
5
|
+
* Reuses the same DataLayer, DataManager, run(), getCopilotCmd() as web.js.
|
|
6
|
+
*
|
|
7
|
+
* Called from: freya retroactive-ingest [flags]
|
|
8
|
+
*/
|
|
9
|
+
'use strict';
|
|
10
|
+
|
|
11
|
+
const fs = require('fs');
|
|
12
|
+
const path = require('path');
|
|
13
|
+
const crypto = require('crypto');
|
|
14
|
+
const { spawn, execSync } = require('child_process');
|
|
15
|
+
const os = require('os');
|
|
16
|
+
const { defaultInstance: dl, ready, configure: configureDataLayer } = require('../scripts/lib/DataLayer');
|
|
17
|
+
let DataManager;
|
|
18
|
+
try { DataManager = require('../scripts/lib/DataManager'); } catch { DataManager = null; }
|
|
19
|
+
|
|
20
|
+
function sha1(text) {
|
|
21
|
+
return crypto.createHash('sha1').update(text).digest('hex');
|
|
22
|
+
}
|
|
23
|
+
function normalizeWhitespace(t) {
|
|
24
|
+
return String(t || '').replace(/\s+/g, ' ').trim();
|
|
25
|
+
}
|
|
26
|
+
function normalizeTextForKey(t) {
|
|
27
|
+
return normalizeWhitespace(t).toLowerCase();
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
// ---------- run() — exact copy from web.js ----------
|
|
31
|
+
function run(cmd, args, cwd, extraEnv) {
|
|
32
|
+
return new Promise((resolve) => {
|
|
33
|
+
let child;
|
|
34
|
+
const env = extraEnv ? { ...process.env, ...extraEnv } : process.env;
|
|
35
|
+
try {
|
|
36
|
+
if (process.platform === 'win32') {
|
|
37
|
+
const escapedArgs = args.map(a => {
|
|
38
|
+
const escaped = String(a).replace(/'/g, "''");
|
|
39
|
+
return `'${escaped}'`;
|
|
40
|
+
});
|
|
41
|
+
const psCommand = `& '${cmd}' ${escapedArgs.join(' ')}`;
|
|
42
|
+
child = spawn('powershell.exe', [
|
|
43
|
+
'-NoLogo', '-Command', psCommand
|
|
44
|
+
], { cwd, env, windowsHide: true });
|
|
45
|
+
} else {
|
|
46
|
+
child = spawn(cmd, args, { cwd, shell: true, env });
|
|
47
|
+
}
|
|
48
|
+
} catch (e) {
|
|
49
|
+
return resolve({ code: 1, stdout: '', stderr: e.message || String(e) });
|
|
50
|
+
}
|
|
51
|
+
let stdout = '';
|
|
52
|
+
let stderr = '';
|
|
53
|
+
child.stdout && child.stdout.on('data', (d) => { stdout += d.toString(); });
|
|
54
|
+
child.stderr && child.stderr.on('data', (d) => { stderr += d.toString(); });
|
|
55
|
+
child.on('error', (e) => { stderr += `\n${e.message}`; resolve({ code: 1, stdout, stderr }); });
|
|
56
|
+
child.on('close', (code) => resolve({ code: code ?? 0, stdout, stderr }));
|
|
57
|
+
});
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
// ---------- getCopilotCmd() — exact copy from web.js ----------
|
|
61
|
+
let _copilotPathCache = null;
|
|
62
|
+
function getCopilotCmd() {
|
|
63
|
+
if (_copilotPathCache !== null) return _copilotPathCache;
|
|
64
|
+
|
|
65
|
+
if (process.env.COPILOT_CMD) {
|
|
66
|
+
_copilotPathCache = process.env.COPILOT_CMD;
|
|
67
|
+
return _copilotPathCache;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
if (process.platform === 'win32') {
|
|
71
|
+
try {
|
|
72
|
+
const p = execSync('where copilot 2>nul', { encoding: 'utf8', timeout: 5000 }).trim().split(/\r?\n/)[0];
|
|
73
|
+
if (p && fs.existsSync(p)) { _copilotPathCache = p; return p; }
|
|
74
|
+
} catch { }
|
|
75
|
+
try {
|
|
76
|
+
const p = execSync(
|
|
77
|
+
'powershell.exe -NoProfile -Command "(Get-Command copilot -ErrorAction SilentlyContinue).Source"',
|
|
78
|
+
{ encoding: 'utf8', timeout: 10000 }
|
|
79
|
+
).trim();
|
|
80
|
+
if (p && fs.existsSync(p)) { _copilotPathCache = p; return p; }
|
|
81
|
+
} catch { }
|
|
82
|
+
try {
|
|
83
|
+
const p = execSync(
|
|
84
|
+
'powershell.exe -NoProfile -Command "(Get-Command gh -ErrorAction SilentlyContinue).Source"',
|
|
85
|
+
{ encoding: 'utf8', timeout: 10000 }
|
|
86
|
+
).trim();
|
|
87
|
+
if (p && fs.existsSync(p)) { _copilotPathCache = `gh-copilot:${p}`; return _copilotPathCache; }
|
|
88
|
+
} catch { }
|
|
89
|
+
} else {
|
|
90
|
+
try {
|
|
91
|
+
const p = execSync('which copilot 2>/dev/null', { encoding: 'utf8', timeout: 5000 }).trim();
|
|
92
|
+
if (p) { _copilotPathCache = p; return p; }
|
|
93
|
+
} catch { }
|
|
94
|
+
try {
|
|
95
|
+
const p = execSync('which gh 2>/dev/null', { encoding: 'utf8', timeout: 5000 }).trim();
|
|
96
|
+
if (p) { _copilotPathCache = `gh-copilot:${p}`; return _copilotPathCache; }
|
|
97
|
+
} catch { }
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
_copilotPathCache = 'copilot';
|
|
101
|
+
return _copilotPathCache;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
function copilotSpawnArgs(copilotCmd, extraArgs) {
|
|
105
|
+
if (copilotCmd.startsWith('gh-copilot:')) {
|
|
106
|
+
const ghPath = copilotCmd.slice('gh-copilot:'.length);
|
|
107
|
+
return { cmd: ghPath, args: ['copilot', ...extraArgs] };
|
|
108
|
+
}
|
|
109
|
+
return { cmd: copilotCmd, args: extraArgs };
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
// ---------- Helpers ----------
|
|
113
|
+
function extractFirstJsonObject(text) {
|
|
114
|
+
if (!text) return null;
|
|
115
|
+
const start = text.indexOf('{');
|
|
116
|
+
if (start === -1) return null;
|
|
117
|
+
let depth = 0;
|
|
118
|
+
for (let i = start; i < text.length; i++) {
|
|
119
|
+
if (text[i] === '{') depth++;
|
|
120
|
+
else if (text[i] === '}') { depth--; if (depth === 0) return text.slice(start, i + 1); }
|
|
121
|
+
}
|
|
122
|
+
return null;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
function escapeJsonControlChars(jsonText) {
|
|
126
|
+
return jsonText.replace(/[\x00-\x1F\x7F]/g, (ch) => {
|
|
127
|
+
if (ch === '\n' || ch === '\r' || ch === '\t') return ch;
|
|
128
|
+
return '\\u' + ch.charCodeAt(0).toString(16).padStart(4, '0');
|
|
129
|
+
});
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
function readProjectSlugMap(wsDir) {
|
|
133
|
+
const p = path.join(wsDir, 'data', 'settings', 'project-slug-map.json');
|
|
134
|
+
try { return JSON.parse(fs.readFileSync(p, 'utf8')); } catch { return {}; }
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
function inferProjectSlug(text, map) {
|
|
138
|
+
if (!text || !map || typeof map !== 'object') return '';
|
|
139
|
+
const lower = text.toLowerCase();
|
|
140
|
+
let bestSlug = '', bestLen = 0;
|
|
141
|
+
for (const [keyword, slug] of Object.entries(map)) {
|
|
142
|
+
if (lower.includes(keyword.toLowerCase()) && keyword.length > bestLen) {
|
|
143
|
+
bestSlug = slug; bestLen = keyword.length;
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
return bestSlug;
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
// ---------- Main ----------
|
|
150
|
+
async function retroactiveIngest(workspaceDir, fwdArgs) {
|
|
151
|
+
const DRY_RUN = fwdArgs.includes('--dry-run');
|
|
152
|
+
const EMBEDDINGS_ONLY = fwdArgs.includes('--embeddings-only');
|
|
153
|
+
const SKIP_EMBEDDINGS = fwdArgs.includes('--skip-embeddings');
|
|
154
|
+
const daysIdx = fwdArgs.indexOf('--days');
|
|
155
|
+
const MAX_DAYS = daysIdx >= 0 ? parseInt(fwdArgs[daysIdx + 1], 10) : 0;
|
|
156
|
+
|
|
157
|
+
// Configure DataLayer to use workspace DB
|
|
158
|
+
await configureDataLayer(workspaceDir);
|
|
159
|
+
await ready;
|
|
160
|
+
|
|
161
|
+
console.log('╔══════════════════════════════════════════════════════╗');
|
|
162
|
+
console.log('║ FREYA — Retroactive Ingestion ║');
|
|
163
|
+
console.log('╚══════════════════════════════════════════════════════╝');
|
|
164
|
+
console.log(`Workspace: ${workspaceDir}`);
|
|
165
|
+
console.log(`Mode: ${DRY_RUN ? 'DRY RUN' : EMBEDDINGS_ONLY ? 'EMBEDDINGS ONLY' : 'FULL INGESTION'}`);
|
|
166
|
+
console.log('');
|
|
167
|
+
|
|
168
|
+
const logsDir = path.join(workspaceDir, 'logs', 'daily');
|
|
169
|
+
if (!fs.existsSync(logsDir)) {
|
|
170
|
+
console.log('❌ No daily logs directory found at:', logsDir);
|
|
171
|
+
process.exit(1);
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
let files = fs.readdirSync(logsDir)
|
|
175
|
+
.filter(f => /^\d{4}-\d{2}-\d{2}\.md$/.test(f))
|
|
176
|
+
.sort();
|
|
177
|
+
|
|
178
|
+
if (MAX_DAYS > 0) files = files.slice(-MAX_DAYS);
|
|
179
|
+
|
|
180
|
+
console.log(`📁 Found ${files.length} daily log files to process`);
|
|
181
|
+
console.log('');
|
|
182
|
+
|
|
183
|
+
// Step 1: Sync all daily logs to SQLite
|
|
184
|
+
console.log('── Step 1: Syncing daily logs to SQLite ──');
|
|
185
|
+
const upsert = dl.db.prepare(`
|
|
186
|
+
INSERT INTO daily_logs (date, raw_markdown) VALUES (?, ?)
|
|
187
|
+
ON CONFLICT(date) DO UPDATE SET raw_markdown = excluded.raw_markdown
|
|
188
|
+
`);
|
|
189
|
+
const syncTx = dl.db.transaction((fileList) => {
|
|
190
|
+
for (const file of fileList) {
|
|
191
|
+
const date = file.replace('.md', '');
|
|
192
|
+
const content = fs.readFileSync(path.join(logsDir, file), 'utf8');
|
|
193
|
+
upsert.run(date, content);
|
|
194
|
+
}
|
|
195
|
+
});
|
|
196
|
+
syncTx(files);
|
|
197
|
+
console.log(`✅ ${files.length} daily logs synced to SQLite`);
|
|
198
|
+
console.log('');
|
|
199
|
+
|
|
200
|
+
// Step 2: Generate embeddings
|
|
201
|
+
if (!SKIP_EMBEDDINGS) {
|
|
202
|
+
console.log('── Step 2: Generating embeddings ──');
|
|
203
|
+
const dm = new DataManager(workspaceDir, logsDir);
|
|
204
|
+
let totalChunks = 0;
|
|
205
|
+
for (let i = 0; i < files.length; i++) {
|
|
206
|
+
const date = files[i].replace('.md', '');
|
|
207
|
+
const content = fs.readFileSync(path.join(logsDir, files[i]), 'utf8');
|
|
208
|
+
try {
|
|
209
|
+
const count = await dm.generateEmbeddings('daily_log', date, content);
|
|
210
|
+
totalChunks += count;
|
|
211
|
+
process.stdout.write(`\r [${i + 1}/${files.length}] ${date} — ${count} chunks`);
|
|
212
|
+
} catch (err) {
|
|
213
|
+
process.stdout.write(`\r [${i + 1}/${files.length}] ${date} — ❌ ${err.message}`);
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
console.log(`\n✅ Generated ${totalChunks} embedding chunks total`);
|
|
217
|
+
console.log('');
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
if (EMBEDDINGS_ONLY) {
|
|
221
|
+
console.log('── Embeddings-only mode. Skipping task/blocker extraction. ──');
|
|
222
|
+
dl.db.save();
|
|
223
|
+
console.log('\n✅ Done!');
|
|
224
|
+
return;
|
|
225
|
+
}
|
|
226
|
+
|
|
227
|
+
// Step 3: Extract tasks/blockers via copilot planner
|
|
228
|
+
console.log('── Step 3: Extracting tasks & blockers via planner ──');
|
|
229
|
+
|
|
230
|
+
const copilotResolved = getCopilotCmd();
|
|
231
|
+
console.log(` ℹ Copilot resolved to: ${copilotResolved}`);
|
|
232
|
+
|
|
233
|
+
// Verify it works
|
|
234
|
+
const { cmd: testCmd, args: testArgsArr } = copilotSpawnArgs(copilotResolved, ['--version']);
|
|
235
|
+
const testResult = await run(testCmd, testArgsArr, workspaceDir);
|
|
236
|
+
if (testResult.code === 0) {
|
|
237
|
+
console.log(` ✓ Found: ${(testResult.stdout || '').trim().split(/\r?\n/)[0]}`);
|
|
238
|
+
} else {
|
|
239
|
+
console.log(' ❌ Could not find copilot CLI.');
|
|
240
|
+
console.log(` stderr: ${(testResult.stderr || '').slice(0, 200)}`);
|
|
241
|
+
console.log(' Skipping task/blocker extraction.');
|
|
242
|
+
dl.db.save();
|
|
243
|
+
console.log('\n✅ Retroactive ingestion complete (embeddings only)!');
|
|
244
|
+
return;
|
|
245
|
+
}
|
|
246
|
+
|
|
247
|
+
const agentEnv = { FREYA_WORKSPACE_DIR: workspaceDir };
|
|
248
|
+
const slugMap = readProjectSlugMap(workspaceDir);
|
|
249
|
+
const validTaskCats = new Set(['DO_NOW', 'SCHEDULE', 'DELEGATE', 'IGNORE']);
|
|
250
|
+
|
|
251
|
+
const schema = {
|
|
252
|
+
actions: [
|
|
253
|
+
{ type: 'create_task', description: '<string>', priority: 'HIGH|MEDIUM|LOW', category: 'DO_NOW|SCHEDULE|DELEGATE|IGNORE', projectSlug: '<string optional>' },
|
|
254
|
+
{ type: 'create_blocker', title: '<string>', severity: 'CRITICAL|HIGH|MEDIUM|LOW', notes: '<string>', projectSlug: '<string optional>' }
|
|
255
|
+
]
|
|
256
|
+
};
|
|
257
|
+
|
|
258
|
+
const sysInstructions = `Você é o planner do sistema F.R.E.Y.A.
|
|
259
|
+
|
|
260
|
+
Analise o daily log abaixo e extraia TODAS as tarefas e blockers mencionados.
|
|
261
|
+
Procure por: ações mencionadas, pendências, problemas, impedimentos, decisões que geram trabalho.
|
|
262
|
+
Se NÃO houver tarefas ou blockers claros, retorne: {"actions":[]}
|
|
263
|
+
Retorne APENAS JSON válido no formato: ${JSON.stringify(schema)}
|
|
264
|
+
NÃO use code fences. NÃO inclua texto extra.
|
|
265
|
+
IMPORTANTE: Extraia APENAS informações explícitas do log. NÃO invente dados.`;
|
|
266
|
+
|
|
267
|
+
let totalTasks = 0, totalBlockers = 0, totalSkipped = 0, totalErrors = 0;
|
|
268
|
+
|
|
269
|
+
const insertTask = dl.db.prepare(`INSERT OR IGNORE INTO tasks (id, project_slug, description, category, status, created_at, metadata) VALUES (?, ?, ?, ?, ?, ?, ?)`);
|
|
270
|
+
const insertBlocker = dl.db.prepare(`INSERT OR IGNORE INTO blockers (id, project_slug, title, severity, status, created_at, metadata) VALUES (?, ?, ?, ?, ?, ?, ?)`);
|
|
271
|
+
|
|
272
|
+
const existingTaskDescs = new Set(
|
|
273
|
+
dl.db.prepare("SELECT description FROM tasks").all().map(t => sha1(normalizeTextForKey(t.description)))
|
|
274
|
+
);
|
|
275
|
+
const existingBlockerTitles = new Set(
|
|
276
|
+
dl.db.prepare("SELECT title FROM blockers").all().map(b => sha1(normalizeTextForKey(b.title)))
|
|
277
|
+
);
|
|
278
|
+
|
|
279
|
+
const SAFE_ARG_LEN = 24000;
|
|
280
|
+
|
|
281
|
+
for (let i = 0; i < files.length; i++) {
|
|
282
|
+
const file = files[i];
|
|
283
|
+
const date = file.replace('.md', '');
|
|
284
|
+
const content = fs.readFileSync(path.join(logsDir, file), 'utf8');
|
|
285
|
+
|
|
286
|
+
if (content.trim().length < 50) {
|
|
287
|
+
process.stdout.write(`\r [${i + 1}/${files.length}] ${date} — skipped (too small)`);
|
|
288
|
+
continue;
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
const fullPrompt = `${sysInstructions}\n\nDAILY LOG (${date}):\n${content}\n`;
|
|
292
|
+
|
|
293
|
+
try {
|
|
294
|
+
let r;
|
|
295
|
+
const copilotExtra = ['-s', '--no-color', '--stream', 'off'];
|
|
296
|
+
|
|
297
|
+
if (fullPrompt.length > SAFE_ARG_LEN) {
|
|
298
|
+
const tmpFile = path.join(os.tmpdir(), `freya-retro-${Date.now()}.txt`);
|
|
299
|
+
fs.writeFileSync(tmpFile, fullPrompt, 'utf8');
|
|
300
|
+
const filePrompt = `Leia o arquivo abaixo e extraia tasks/blockers conforme as instruções contidas nele.\nARQUIVO: ${tmpFile}`;
|
|
301
|
+
const { cmd: sc, args: sa } = copilotSpawnArgs(copilotResolved, [...copilotExtra, '--add-dir', os.tmpdir(), '--allow-all-tools', '-p', filePrompt]);
|
|
302
|
+
r = await run(sc, sa, workspaceDir, agentEnv);
|
|
303
|
+
try { fs.unlinkSync(tmpFile); } catch { }
|
|
304
|
+
} else {
|
|
305
|
+
const { cmd: sc, args: sa } = copilotSpawnArgs(copilotResolved, [...copilotExtra, '-p', fullPrompt]);
|
|
306
|
+
r = await run(sc, sa, workspaceDir, agentEnv);
|
|
307
|
+
}
|
|
308
|
+
|
|
309
|
+
const out = (r.stdout + r.stderr).trim();
|
|
310
|
+
if (r.code !== 0 || !out) {
|
|
311
|
+
totalErrors++;
|
|
312
|
+
if (totalErrors === 1) {
|
|
313
|
+
console.log(`\n ⚠ Planner diagnostic for ${date}:`);
|
|
314
|
+
console.log(` Copilot: ${copilotResolved}`);
|
|
315
|
+
console.log(` Exit code: ${r.code}`);
|
|
316
|
+
console.log(` stdout: ${(r.stdout || '').slice(0, 300)}`);
|
|
317
|
+
console.log(` stderr: ${(r.stderr || '').slice(0, 300)}`);
|
|
318
|
+
}
|
|
319
|
+
process.stdout.write(`\r [${i + 1}/${files.length}] ${date} — ❌ planner error (code=${r.code}) `);
|
|
320
|
+
continue;
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
// Parse plan
|
|
324
|
+
const jsonText = extractFirstJsonObject(out) || out;
|
|
325
|
+
let plan;
|
|
326
|
+
try {
|
|
327
|
+
plan = JSON.parse(jsonText);
|
|
328
|
+
} catch {
|
|
329
|
+
try { plan = JSON.parse(escapeJsonControlChars(jsonText)); } catch {
|
|
330
|
+
totalErrors++;
|
|
331
|
+
process.stdout.write(`\r [${i + 1}/${files.length}] ${date} — ❌ invalid JSON `);
|
|
332
|
+
continue;
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
const actions = Array.isArray(plan.actions) ? plan.actions : [];
|
|
337
|
+
let fileTasks = 0, fileBlockers = 0, fileSkipped = 0;
|
|
338
|
+
|
|
339
|
+
if (!DRY_RUN) {
|
|
340
|
+
const applyTx = dl.db.transaction(() => {
|
|
341
|
+
for (const a of actions) {
|
|
342
|
+
if (!a || typeof a !== 'object') continue;
|
|
343
|
+
|
|
344
|
+
if (a.type === 'create_task' && a.description) {
|
|
345
|
+
const desc = normalizeWhitespace(a.description);
|
|
346
|
+
if (!desc) continue;
|
|
347
|
+
const projectSlug = String(a.projectSlug || '').trim() || inferProjectSlug(desc, slugMap);
|
|
348
|
+
const key = sha1(normalizeTextForKey((projectSlug ? projectSlug + ' ' : '') + desc));
|
|
349
|
+
if (existingTaskDescs.has(key)) { fileSkipped++; continue; }
|
|
350
|
+
const id = `t-retro-${Date.now()}-${Math.random().toString(16).slice(2, 8)}`;
|
|
351
|
+
const category = validTaskCats.has(String(a.category || '').trim()) ? String(a.category).trim() : 'DO_NOW';
|
|
352
|
+
const metadata = JSON.stringify({ priority: a.priority || 'medium', source: 'retroactive', sourceDate: date });
|
|
353
|
+
insertTask.run(id, projectSlug || null, desc, category, 'PENDING', `${date}T12:00:00.000Z`, metadata);
|
|
354
|
+
existingTaskDescs.add(key);
|
|
355
|
+
fileTasks++;
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
if (a.type === 'create_blocker' && a.title) {
|
|
359
|
+
const title = normalizeWhitespace(a.title);
|
|
360
|
+
if (!title) continue;
|
|
361
|
+
const notes = normalizeWhitespace(a.notes);
|
|
362
|
+
const projectSlug = String(a.projectSlug || '').trim() || inferProjectSlug(title + ' ' + notes, slugMap);
|
|
363
|
+
const key = sha1(normalizeTextForKey((projectSlug ? projectSlug + ' ' : '') + title));
|
|
364
|
+
if (existingBlockerTitles.has(key)) { fileSkipped++; continue; }
|
|
365
|
+
const id = `b-retro-${Date.now()}-${Math.random().toString(16).slice(2, 8)}`;
|
|
366
|
+
const severity = String(a.severity || 'MEDIUM').toUpperCase();
|
|
367
|
+
const metadata = JSON.stringify({ description: notes || title, source: 'retroactive', sourceDate: date });
|
|
368
|
+
insertBlocker.run(id, projectSlug || null, title, severity, 'OPEN', `${date}T12:00:00.000Z`, metadata);
|
|
369
|
+
existingBlockerTitles.add(key);
|
|
370
|
+
fileBlockers++;
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
});
|
|
374
|
+
applyTx();
|
|
375
|
+
} else {
|
|
376
|
+
for (const a of actions) {
|
|
377
|
+
if (a && a.type === 'create_task' && a.description) fileTasks++;
|
|
378
|
+
if (a && a.type === 'create_blocker' && a.title) fileBlockers++;
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
totalTasks += fileTasks;
|
|
383
|
+
totalBlockers += fileBlockers;
|
|
384
|
+
totalSkipped += fileSkipped;
|
|
385
|
+
|
|
386
|
+
const status = fileTasks || fileBlockers
|
|
387
|
+
? `${fileTasks}T ${fileBlockers}B${fileSkipped ? ` (${fileSkipped} dup)` : ''}`
|
|
388
|
+
: 'no actions';
|
|
389
|
+
process.stdout.write(`\r [${i + 1}/${files.length}] ${date} — ${status} `);
|
|
390
|
+
|
|
391
|
+
if (i < files.length - 1) {
|
|
392
|
+
await new Promise(r => setTimeout(r, 500));
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
} catch (err) {
|
|
396
|
+
totalErrors++;
|
|
397
|
+
process.stdout.write(`\r [${i + 1}/${files.length}] ${date} — ❌ ${err.message} `);
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
dl.db.save();
|
|
402
|
+
|
|
403
|
+
console.log('\n');
|
|
404
|
+
console.log('══════════════════════════════════════════════════════');
|
|
405
|
+
console.log(` 📊 Results ${DRY_RUN ? '(DRY RUN)' : ''}`);
|
|
406
|
+
console.log(` Tasks created: ${totalTasks}`);
|
|
407
|
+
console.log(` Blockers created: ${totalBlockers}`);
|
|
408
|
+
console.log(` Duplicates skipped: ${totalSkipped}`);
|
|
409
|
+
console.log(` Errors: ${totalErrors}`);
|
|
410
|
+
console.log('══════════════════════════════════════════════════════');
|
|
411
|
+
|
|
412
|
+
if (!DRY_RUN && !SKIP_EMBEDDINGS && (totalTasks > 0 || totalBlockers > 0)) {
|
|
413
|
+
console.log('\n── Step 4: Generating embeddings for new tasks/blockers ──');
|
|
414
|
+
const dm = new DataManager(workspaceDir, logsDir);
|
|
415
|
+
const newTasks = dl.db.prepare("SELECT id, description FROM tasks WHERE json_extract(metadata, '$.source') = 'retroactive'").all();
|
|
416
|
+
const newBlockers = dl.db.prepare("SELECT id, title, json_extract(metadata, '$.description') as notes FROM blockers WHERE json_extract(metadata, '$.source') = 'retroactive'").all();
|
|
417
|
+
let embCount = 0;
|
|
418
|
+
for (const t of newTasks) {
|
|
419
|
+
try { embCount += await dm.generateEmbeddings('task', t.id, t.description); } catch { }
|
|
420
|
+
}
|
|
421
|
+
for (const b of newBlockers) {
|
|
422
|
+
try { embCount += await dm.generateEmbeddings('blocker', b.id, b.title + ' ' + (b.notes || '')); } catch { }
|
|
423
|
+
}
|
|
424
|
+
console.log(`✅ Generated ${embCount} embedding chunks for new entities`);
|
|
425
|
+
}
|
|
426
|
+
|
|
427
|
+
console.log('\n✅ Retroactive ingestion complete!');
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
module.exports = retroactiveIngest;
|
package/package.json
CHANGED
package/scripts/lib/DataLayer.js
CHANGED
|
@@ -100,15 +100,37 @@ class SqlJsDatabase {
|
|
|
100
100
|
|
|
101
101
|
/**
|
|
102
102
|
* Persist the in-memory database to disk.
|
|
103
|
+
* Retries with backoff on EBUSY (OneDrive/cloud sync locking).
|
|
103
104
|
*/
|
|
104
105
|
_save() {
|
|
105
106
|
if (!this._filePath) return;
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
107
|
+
const data = this._db.export();
|
|
108
|
+
const buffer = Buffer.from(data);
|
|
109
|
+
const maxRetries = 5;
|
|
110
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
111
|
+
try {
|
|
112
|
+
// Write to temp file + rename for atomic operation
|
|
113
|
+
const tmpPath = this._filePath + '.tmp';
|
|
114
|
+
fs.writeFileSync(tmpPath, buffer);
|
|
115
|
+
try {
|
|
116
|
+
fs.renameSync(tmpPath, this._filePath);
|
|
117
|
+
} catch (renameErr) {
|
|
118
|
+
// On Windows, rename can fail if target is locked; fall back to direct write
|
|
119
|
+
try { fs.unlinkSync(tmpPath); } catch { }
|
|
120
|
+
fs.writeFileSync(this._filePath, buffer);
|
|
121
|
+
}
|
|
122
|
+
return; // success
|
|
123
|
+
} catch (err) {
|
|
124
|
+
if ((err.code === 'EBUSY' || err.code === 'EPERM') && attempt < maxRetries) {
|
|
125
|
+
// Wait with exponential backoff: 100ms, 200ms, 400ms, 800ms, 1600ms
|
|
126
|
+
const delay = 100 * Math.pow(2, attempt);
|
|
127
|
+
const start = Date.now();
|
|
128
|
+
while (Date.now() - start < delay) { /* busy wait — sync context */ }
|
|
129
|
+
continue;
|
|
130
|
+
}
|
|
131
|
+
console.error('[DataLayer] Failed to save database:', err.message);
|
|
132
|
+
return;
|
|
133
|
+
}
|
|
112
134
|
}
|
|
113
135
|
}
|
|
114
136
|
|
|
@@ -100,15 +100,37 @@ class SqlJsDatabase {
|
|
|
100
100
|
|
|
101
101
|
/**
|
|
102
102
|
* Persist the in-memory database to disk.
|
|
103
|
+
* Retries with backoff on EBUSY (OneDrive/cloud sync locking).
|
|
103
104
|
*/
|
|
104
105
|
_save() {
|
|
105
106
|
if (!this._filePath) return;
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
107
|
+
const data = this._db.export();
|
|
108
|
+
const buffer = Buffer.from(data);
|
|
109
|
+
const maxRetries = 5;
|
|
110
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
111
|
+
try {
|
|
112
|
+
// Write to temp file + rename for atomic operation
|
|
113
|
+
const tmpPath = this._filePath + '.tmp';
|
|
114
|
+
fs.writeFileSync(tmpPath, buffer);
|
|
115
|
+
try {
|
|
116
|
+
fs.renameSync(tmpPath, this._filePath);
|
|
117
|
+
} catch (renameErr) {
|
|
118
|
+
// On Windows, rename can fail if target is locked; fall back to direct write
|
|
119
|
+
try { fs.unlinkSync(tmpPath); } catch { }
|
|
120
|
+
fs.writeFileSync(this._filePath, buffer);
|
|
121
|
+
}
|
|
122
|
+
return; // success
|
|
123
|
+
} catch (err) {
|
|
124
|
+
if ((err.code === 'EBUSY' || err.code === 'EPERM') && attempt < maxRetries) {
|
|
125
|
+
// Wait with exponential backoff: 100ms, 200ms, 400ms, 800ms, 1600ms
|
|
126
|
+
const delay = 100 * Math.pow(2, attempt);
|
|
127
|
+
const start = Date.now();
|
|
128
|
+
while (Date.now() - start < delay) { /* busy wait — sync context */ }
|
|
129
|
+
continue;
|
|
130
|
+
}
|
|
131
|
+
console.error('[DataLayer] Failed to save database:', err.message);
|
|
132
|
+
return;
|
|
133
|
+
}
|
|
112
134
|
}
|
|
113
135
|
}
|
|
114
136
|
|