@virtengine/openfleet 0.25.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env.example +914 -0
- package/LICENSE +190 -0
- package/README.md +500 -0
- package/agent-endpoint.mjs +918 -0
- package/agent-hook-bridge.mjs +230 -0
- package/agent-hooks.mjs +1188 -0
- package/agent-pool.mjs +2403 -0
- package/agent-prompts.mjs +689 -0
- package/agent-sdk.mjs +141 -0
- package/anomaly-detector.mjs +1195 -0
- package/autofix.mjs +1294 -0
- package/claude-shell.mjs +708 -0
- package/cli.mjs +906 -0
- package/codex-config.mjs +1274 -0
- package/codex-model-profiles.mjs +135 -0
- package/codex-shell.mjs +762 -0
- package/config-doctor.mjs +613 -0
- package/config.mjs +1720 -0
- package/conflict-resolver.mjs +248 -0
- package/container-runner.mjs +450 -0
- package/copilot-shell.mjs +827 -0
- package/daemon-restart-policy.mjs +56 -0
- package/diff-stats.mjs +282 -0
- package/error-detector.mjs +829 -0
- package/fetch-runtime.mjs +34 -0
- package/fleet-coordinator.mjs +838 -0
- package/get-telegram-chat-id.mjs +71 -0
- package/git-safety.mjs +170 -0
- package/github-reconciler.mjs +403 -0
- package/hook-profiles.mjs +651 -0
- package/kanban-adapter.mjs +4491 -0
- package/lib/logger.mjs +645 -0
- package/maintenance.mjs +828 -0
- package/merge-strategy.mjs +1171 -0
- package/monitor.mjs +12207 -0
- package/openfleet.config.example.json +115 -0
- package/openfleet.schema.json +465 -0
- package/package.json +203 -0
- package/postinstall.mjs +187 -0
- package/pr-cleanup-daemon.mjs +978 -0
- package/preflight.mjs +408 -0
- package/prepublish-check.mjs +90 -0
- package/presence.mjs +328 -0
- package/primary-agent.mjs +282 -0
- package/publish.mjs +151 -0
- package/repo-root.mjs +29 -0
- package/restart-controller.mjs +100 -0
- package/review-agent.mjs +557 -0
- package/rotate-agent-logs.sh +133 -0
- package/sdk-conflict-resolver.mjs +973 -0
- package/session-tracker.mjs +880 -0
- package/setup.mjs +3937 -0
- package/shared-knowledge.mjs +410 -0
- package/shared-state-manager.mjs +841 -0
- package/shared-workspace-cli.mjs +199 -0
- package/shared-workspace-registry.mjs +537 -0
- package/shared-workspaces.json +18 -0
- package/startup-service.mjs +1070 -0
- package/sync-engine.mjs +1063 -0
- package/task-archiver.mjs +801 -0
- package/task-assessment.mjs +550 -0
- package/task-claims.mjs +924 -0
- package/task-complexity.mjs +581 -0
- package/task-executor.mjs +5111 -0
- package/task-store.mjs +753 -0
- package/telegram-bot.mjs +9281 -0
- package/telegram-sentinel.mjs +2010 -0
- package/ui/app.js +867 -0
- package/ui/app.legacy.js +1464 -0
- package/ui/app.monolith.js +2488 -0
- package/ui/components/charts.js +226 -0
- package/ui/components/chat-view.js +567 -0
- package/ui/components/command-palette.js +587 -0
- package/ui/components/diff-viewer.js +190 -0
- package/ui/components/forms.js +327 -0
- package/ui/components/kanban-board.js +451 -0
- package/ui/components/session-list.js +305 -0
- package/ui/components/shared.js +473 -0
- package/ui/index.html +70 -0
- package/ui/modules/api.js +297 -0
- package/ui/modules/icons.js +461 -0
- package/ui/modules/router.js +81 -0
- package/ui/modules/settings-schema.js +261 -0
- package/ui/modules/state.js +679 -0
- package/ui/modules/telegram.js +331 -0
- package/ui/modules/utils.js +270 -0
- package/ui/styles/animations.css +140 -0
- package/ui/styles/base.css +98 -0
- package/ui/styles/components.css +1915 -0
- package/ui/styles/kanban.css +286 -0
- package/ui/styles/layout.css +809 -0
- package/ui/styles/sessions.css +827 -0
- package/ui/styles/variables.css +188 -0
- package/ui/styles.css +141 -0
- package/ui/styles.monolith.css +1046 -0
- package/ui/tabs/agents.js +1417 -0
- package/ui/tabs/chat.js +74 -0
- package/ui/tabs/control.js +887 -0
- package/ui/tabs/dashboard.js +515 -0
- package/ui/tabs/infra.js +537 -0
- package/ui/tabs/logs.js +783 -0
- package/ui/tabs/settings.js +1487 -0
- package/ui/tabs/tasks.js +1385 -0
- package/ui-server.mjs +4073 -0
- package/update-check.mjs +465 -0
- package/utils.mjs +172 -0
- package/ve-kanban.mjs +654 -0
- package/ve-kanban.ps1 +1365 -0
- package/ve-kanban.sh +18 -0
- package/ve-orchestrator.mjs +340 -0
- package/ve-orchestrator.ps1 +6546 -0
- package/ve-orchestrator.sh +18 -0
- package/vibe-kanban-wrapper.mjs +41 -0
- package/vk-error-resolver.mjs +470 -0
- package/vk-log-stream.mjs +914 -0
- package/whatsapp-channel.mjs +520 -0
- package/workspace-monitor.mjs +581 -0
- package/workspace-reaper.mjs +405 -0
- package/workspace-registry.mjs +238 -0
- package/worktree-manager.mjs +1266 -0
|
@@ -0,0 +1,801 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* task-archiver.mjs
|
|
3
|
+
*
|
|
4
|
+
* Automatically archives completed VK tasks to local .cache after 1+ days.
|
|
5
|
+
* Keeps VK database clean and fast by moving old completed tasks out of sight.
|
|
6
|
+
*
|
|
7
|
+
* Storage format: one JSON file per day (YYYY-MM-DD.json) containing an array
|
|
8
|
+
* of archived task entries. This keeps the archive directory compact while
|
|
9
|
+
* still allowing easy browsing by date.
|
|
10
|
+
*
|
|
11
|
+
* Robustness features:
|
|
12
|
+
* - Idempotent: re-archiving an already-archived task is a no-op
|
|
13
|
+
* - Atomic writes via temp file + rename to prevent corruption
|
|
14
|
+
* - Archive pruning: removes archives older than retention period
|
|
15
|
+
* - Graceful handling of corrupted archive files
|
|
16
|
+
* - Session cleanup is best-effort and never blocks archival
|
|
17
|
+
* - Auto-migrates legacy per-task files into daily grouped files
|
|
18
|
+
*/
|
|
19
|
+
|
|
20
|
+
import {
|
|
21
|
+
mkdir,
|
|
22
|
+
writeFile,
|
|
23
|
+
readdir,
|
|
24
|
+
readFile,
|
|
25
|
+
rename,
|
|
26
|
+
rm,
|
|
27
|
+
stat,
|
|
28
|
+
unlink,
|
|
29
|
+
} from "node:fs/promises";
|
|
30
|
+
import { existsSync } from "node:fs";
|
|
31
|
+
import { resolve, dirname } from "node:path";
|
|
32
|
+
import { fileURLToPath } from "node:url";
|
|
33
|
+
import { randomBytes } from "node:crypto";
|
|
34
|
+
|
|
35
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
36
|
+
|
|
37
|
+
/** @type {string} Default archive directory */
|
|
38
|
+
export const ARCHIVE_DIR = resolve(__dirname, ".cache", "completed-tasks");
|
|
39
|
+
|
|
40
|
+
/** @type {number} Archive tasks completed more than this many hours ago */
|
|
41
|
+
export const ARCHIVE_AGE_HOURS = 24;
|
|
42
|
+
|
|
43
|
+
/** @type {number} Prune archive files older than this many days */
|
|
44
|
+
export const ARCHIVE_RETENTION_DAYS = 90;
|
|
45
|
+
|
|
46
|
+
/** @type {number} Max tasks to archive per sweep to avoid overload */
|
|
47
|
+
export const DEFAULT_MAX_ARCHIVE = 50;
|
|
48
|
+
|
|
49
|
+
// ── Daily-file helpers ───────────────────────────────────────────────────────
|
|
50
|
+
|
|
51
|
+
/**
|
|
52
|
+
* Build the path for a daily archive file: `<dir>/YYYY-MM-DD.json`
|
|
53
|
+
*/
|
|
54
|
+
function dailyFilePath(dateStr, archiveDir) {
|
|
55
|
+
return resolve(archiveDir, `${dateStr}.json`);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* Read the entries array from a daily archive file. Returns [] on missing or
|
|
60
|
+
* corrupted files so callers never need to handle errors.
|
|
61
|
+
*/
|
|
62
|
+
export async function readDailyArchive(dateStr, archiveDir = ARCHIVE_DIR) {
|
|
63
|
+
const filePath = dailyFilePath(dateStr, archiveDir);
|
|
64
|
+
try {
|
|
65
|
+
if (!existsSync(filePath)) return [];
|
|
66
|
+
const raw = await readFile(filePath, "utf8");
|
|
67
|
+
const data = JSON.parse(raw);
|
|
68
|
+
return Array.isArray(data) ? data : (data?.entries ?? []);
|
|
69
|
+
} catch {
|
|
70
|
+
return [];
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/**
|
|
75
|
+
* Write an entries array to a daily archive file using atomic temp+rename.
|
|
76
|
+
*/
|
|
77
|
+
async function writeDailyArchive(dateStr, entries, archiveDir) {
|
|
78
|
+
await mkdir(archiveDir, { recursive: true });
|
|
79
|
+
const filePath = dailyFilePath(dateStr, archiveDir);
|
|
80
|
+
const tmpFile = resolve(
|
|
81
|
+
archiveDir,
|
|
82
|
+
`.tmp-${randomBytes(6).toString("hex")}.json`,
|
|
83
|
+
);
|
|
84
|
+
const payload = JSON.stringify(entries, null, 2);
|
|
85
|
+
await writeFile(tmpFile, payload);
|
|
86
|
+
try {
|
|
87
|
+
await rename(tmpFile, filePath);
|
|
88
|
+
} catch {
|
|
89
|
+
// Cross-device rename fallback
|
|
90
|
+
await writeFile(filePath, payload);
|
|
91
|
+
await rm(tmpFile, { force: true }).catch(() => {});
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
/**
|
|
96
|
+
* Check whether a task has already been archived to the local file store.
|
|
97
|
+
*
|
|
98
|
+
* Searches daily archive files (YYYY-MM-DD.json) for the task ID inside
|
|
99
|
+
* their entries arrays. Also detects legacy per-task files whose filename
|
|
100
|
+
* contains the task ID.
|
|
101
|
+
*
|
|
102
|
+
* @param {string} taskId
|
|
103
|
+
* @param {string} [archiveDir]
|
|
104
|
+
* @returns {Promise<boolean>}
|
|
105
|
+
*/
|
|
106
|
+
export async function isAlreadyArchived(taskId, archiveDir = ARCHIVE_DIR) {
|
|
107
|
+
if (!taskId) return false;
|
|
108
|
+
try {
|
|
109
|
+
if (!existsSync(archiveDir)) return false;
|
|
110
|
+
const files = await readdir(archiveDir);
|
|
111
|
+
|
|
112
|
+
for (const f of files) {
|
|
113
|
+
if (!f.endsWith(".json") || f.startsWith(".tmp-")) continue;
|
|
114
|
+
|
|
115
|
+
// Legacy per-task file: filename contains the task ID
|
|
116
|
+
if (f.includes(taskId)) return true;
|
|
117
|
+
|
|
118
|
+
// Daily grouped file: YYYY-MM-DD.json — search entries
|
|
119
|
+
if (/^\d{4}-\d{2}-\d{2}\.json$/.test(f)) {
|
|
120
|
+
try {
|
|
121
|
+
const raw = await readFile(resolve(archiveDir, f), "utf8");
|
|
122
|
+
const entries = JSON.parse(raw);
|
|
123
|
+
const arr = Array.isArray(entries)
|
|
124
|
+
? entries
|
|
125
|
+
: (entries?.entries ?? []);
|
|
126
|
+
if (arr.some((e) => e.task?.id === taskId)) return true;
|
|
127
|
+
} catch {
|
|
128
|
+
// corrupted file — skip
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
return false;
|
|
133
|
+
} catch {
|
|
134
|
+
return false;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
/**
|
|
139
|
+
* Archive a single task into the daily grouped file for its completion date.
|
|
140
|
+
* Idempotent — returns the file path if the task was already archived.
|
|
141
|
+
*
|
|
142
|
+
* @param {object} task
|
|
143
|
+
* @param {object|null} attemptData
|
|
144
|
+
* @param {string} [archiveDir]
|
|
145
|
+
* @returns {Promise<string|null>} path to the daily archive file, or null on failure
|
|
146
|
+
*/
|
|
147
|
+
export async function archiveTaskToFile(
|
|
148
|
+
task,
|
|
149
|
+
attemptData = null,
|
|
150
|
+
archiveDir = ARCHIVE_DIR,
|
|
151
|
+
) {
|
|
152
|
+
try {
|
|
153
|
+
if (!task || !task.id) {
|
|
154
|
+
console.error(
|
|
155
|
+
`[archiver] Failed to archive task ${task?.id}: Cannot read properties of undefined (reading 'completed_at')`,
|
|
156
|
+
);
|
|
157
|
+
return null;
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
await mkdir(archiveDir, { recursive: true });
|
|
161
|
+
|
|
162
|
+
const completedAt = new Date(
|
|
163
|
+
task.completed_at || task.updated_at || Date.now(),
|
|
164
|
+
);
|
|
165
|
+
const dateStr = completedAt.toISOString().split("T")[0]; // YYYY-MM-DD
|
|
166
|
+
|
|
167
|
+
// Read existing daily file
|
|
168
|
+
const entries = await readDailyArchive(dateStr, archiveDir);
|
|
169
|
+
|
|
170
|
+
// Idempotent: skip if already in the daily file
|
|
171
|
+
if (entries.some((e) => e.task?.id === task.id)) {
|
|
172
|
+
return dailyFilePath(dateStr, archiveDir);
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
const archiveEntry = {
|
|
176
|
+
task,
|
|
177
|
+
attempt: attemptData,
|
|
178
|
+
archived_at: new Date().toISOString(),
|
|
179
|
+
archiver_version: 3,
|
|
180
|
+
};
|
|
181
|
+
|
|
182
|
+
entries.push(archiveEntry);
|
|
183
|
+
await writeDailyArchive(dateStr, entries, archiveDir);
|
|
184
|
+
|
|
185
|
+
return dailyFilePath(dateStr, archiveDir);
|
|
186
|
+
} catch (err) {
|
|
187
|
+
console.error(
|
|
188
|
+
`[archiver] Failed to archive task ${task?.id}: ${err.message}`,
|
|
189
|
+
);
|
|
190
|
+
return null;
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
/**
|
|
195
|
+
* Fetch completed tasks from VK API.
|
|
196
|
+
* @param {function} fetchVk
|
|
197
|
+
* @param {string} projectId
|
|
198
|
+
* @returns {Promise<object[]>}
|
|
199
|
+
*/
|
|
200
|
+
export async function fetchCompletedTasks(fetchVk, projectId) {
|
|
201
|
+
if (!fetchVk || !projectId) return [];
|
|
202
|
+
try {
|
|
203
|
+
const statuses = ["done", "cancelled"];
|
|
204
|
+
const allCompleted = [];
|
|
205
|
+
|
|
206
|
+
for (const status of statuses) {
|
|
207
|
+
const res = await fetchVk(
|
|
208
|
+
`/api/tasks?project_id=${projectId}&status=${status}`,
|
|
209
|
+
);
|
|
210
|
+
if (res?.success && Array.isArray(res.data)) {
|
|
211
|
+
allCompleted.push(...res.data);
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
return allCompleted;
|
|
216
|
+
} catch (err) {
|
|
217
|
+
console.error(`[archiver] Failed to fetch completed tasks: ${err.message}`);
|
|
218
|
+
return [];
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
/**
|
|
223
|
+
* Check if task is old enough to archive.
|
|
224
|
+
* @param {object} task
|
|
225
|
+
* @param {{ ageHours?: number, nowMs?: number }} [opts]
|
|
226
|
+
* @returns {boolean}
|
|
227
|
+
*/
|
|
228
|
+
export function isOldEnoughToArchive(task, opts = {}) {
|
|
229
|
+
const ageHours = opts.ageHours ?? ARCHIVE_AGE_HOURS;
|
|
230
|
+
const nowMs = opts.nowMs ?? Date.now();
|
|
231
|
+
const completedAt = new Date(task.completed_at || task.updated_at);
|
|
232
|
+
if (isNaN(completedAt.getTime())) return false;
|
|
233
|
+
const taskAgeHours = (nowMs - completedAt.getTime()) / (1000 * 60 * 60);
|
|
234
|
+
return taskAgeHours >= ageHours;
|
|
235
|
+
}
|
|
236
|
+
|
|
237
|
+
/**
|
|
238
|
+
* Clean up agent sessions (Copilot/Codex/Claude) associated with a task.
|
|
239
|
+
* Best-effort — never throws; returns the count of cleaned sessions.
|
|
240
|
+
*
|
|
241
|
+
* @param {string} taskId
|
|
242
|
+
* @param {string} attemptId
|
|
243
|
+
* @returns {Promise<number>}
|
|
244
|
+
*/
|
|
245
|
+
export async function cleanupAgentSessions(taskId, attemptId) {
|
|
246
|
+
if (!taskId) return 0;
|
|
247
|
+
let cleaned = 0;
|
|
248
|
+
|
|
249
|
+
const homeDir = process.env.HOME || process.env.USERPROFILE;
|
|
250
|
+
if (!homeDir) return 0;
|
|
251
|
+
|
|
252
|
+
// Helper: scan a directory for session files matching taskId or attemptId
|
|
253
|
+
async function cleanDir(sessionDir) {
|
|
254
|
+
try {
|
|
255
|
+
if (!existsSync(sessionDir)) return 0;
|
|
256
|
+
const sessionFiles = await readdir(sessionDir);
|
|
257
|
+
let dirCleaned = 0;
|
|
258
|
+
|
|
259
|
+
for (const file of sessionFiles) {
|
|
260
|
+
if (file.includes(taskId) || (attemptId && file.includes(attemptId))) {
|
|
261
|
+
await rm(resolve(sessionDir, file), { force: true, recursive: true });
|
|
262
|
+
dirCleaned++;
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
return dirCleaned;
|
|
266
|
+
} catch {
|
|
267
|
+
return 0;
|
|
268
|
+
}
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
// Codex SDK sessions
|
|
272
|
+
cleaned += await cleanDir(resolve(homeDir, ".codex", "sessions"));
|
|
273
|
+
|
|
274
|
+
// Claude SDK sessions
|
|
275
|
+
cleaned += await cleanDir(resolve(homeDir, ".claude", "sessions"));
|
|
276
|
+
|
|
277
|
+
// Copilot sessions — try via CLI (best-effort, fast timeout)
|
|
278
|
+
try {
|
|
279
|
+
const { execSync } = await import("node:child_process");
|
|
280
|
+
const sessionsOutput = execSync("gh copilot session list --json", {
|
|
281
|
+
encoding: "utf8",
|
|
282
|
+
timeout: 5000,
|
|
283
|
+
stdio: ["pipe", "pipe", "ignore"],
|
|
284
|
+
});
|
|
285
|
+
const sessions = JSON.parse(sessionsOutput);
|
|
286
|
+
if (Array.isArray(sessions)) {
|
|
287
|
+
for (const session of sessions) {
|
|
288
|
+
if (
|
|
289
|
+
session.id?.includes(taskId) ||
|
|
290
|
+
(attemptId && session.id?.includes(attemptId))
|
|
291
|
+
) {
|
|
292
|
+
execSync(`gh copilot session delete ${session.id}`, {
|
|
293
|
+
timeout: 5000,
|
|
294
|
+
stdio: ["pipe", "pipe", "ignore"],
|
|
295
|
+
});
|
|
296
|
+
cleaned++;
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
} catch {
|
|
301
|
+
// Copilot CLI might not be available or no sessions to clean
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
return cleaned;
|
|
305
|
+
}
|
|
306
|
+
|
|
307
|
+
/**
|
|
308
|
+
* Delete task from VK (mark as archived or hard delete).
|
|
309
|
+
* @param {function} fetchVk
|
|
310
|
+
* @param {string} taskId
|
|
311
|
+
* @returns {Promise<boolean>}
|
|
312
|
+
*/
|
|
313
|
+
export async function deleteTaskFromVK(fetchVk, taskId) {
|
|
314
|
+
if (!fetchVk || !taskId) return false;
|
|
315
|
+
try {
|
|
316
|
+
// Try DELETE endpoint first
|
|
317
|
+
const deleteRes = await fetchVk(`/api/tasks/${taskId}`, {
|
|
318
|
+
method: "DELETE",
|
|
319
|
+
});
|
|
320
|
+
|
|
321
|
+
if (deleteRes?.success) {
|
|
322
|
+
return true;
|
|
323
|
+
}
|
|
324
|
+
|
|
325
|
+
// Fallback: mark as archived via PUT
|
|
326
|
+
const updateRes = await fetchVk(`/api/tasks/${taskId}`, {
|
|
327
|
+
method: "PUT",
|
|
328
|
+
headers: { "Content-Type": "application/json" },
|
|
329
|
+
body: JSON.stringify({ status: "archived" }),
|
|
330
|
+
});
|
|
331
|
+
|
|
332
|
+
return updateRes?.success || false;
|
|
333
|
+
} catch (err) {
|
|
334
|
+
console.error(`[archiver] Failed to delete task ${taskId}: ${err.message}`);
|
|
335
|
+
return false;
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
/**
|
|
340
|
+
* Prune old archive files that exceed the retention period.
|
|
341
|
+
* @param {{ retentionDays?: number, archiveDir?: string, nowMs?: number }} [opts]
|
|
342
|
+
* @returns {Promise<number>} number of files pruned
|
|
343
|
+
*/
|
|
344
|
+
export async function pruneOldArchives(opts = {}) {
|
|
345
|
+
const retentionDays = opts.retentionDays ?? ARCHIVE_RETENTION_DAYS;
|
|
346
|
+
const archiveDir = opts.archiveDir ?? ARCHIVE_DIR;
|
|
347
|
+
const nowMs = opts.nowMs ?? Date.now();
|
|
348
|
+
const maxAgeMs = retentionDays * 24 * 60 * 60 * 1000;
|
|
349
|
+
|
|
350
|
+
let pruned = 0;
|
|
351
|
+
try {
|
|
352
|
+
if (!existsSync(archiveDir)) return 0;
|
|
353
|
+
const files = await readdir(archiveDir);
|
|
354
|
+
|
|
355
|
+
for (const file of files) {
|
|
356
|
+
if (!file.endsWith(".json")) continue;
|
|
357
|
+
try {
|
|
358
|
+
const filePath = resolve(archiveDir, file);
|
|
359
|
+
const fileStat = await stat(filePath);
|
|
360
|
+
if (nowMs - fileStat.mtimeMs > maxAgeMs) {
|
|
361
|
+
await rm(filePath, { force: true });
|
|
362
|
+
pruned++;
|
|
363
|
+
}
|
|
364
|
+
} catch {
|
|
365
|
+
// Skip files we can't stat
|
|
366
|
+
}
|
|
367
|
+
}
|
|
368
|
+
|
|
369
|
+
if (pruned > 0) {
|
|
370
|
+
console.log(
|
|
371
|
+
`[archiver] Pruned ${pruned} archive files older than ${retentionDays} days`,
|
|
372
|
+
);
|
|
373
|
+
}
|
|
374
|
+
} catch (err) {
|
|
375
|
+
console.warn(`[archiver] Archive pruning failed: ${err.message}`);
|
|
376
|
+
}
|
|
377
|
+
return pruned;
|
|
378
|
+
}
|
|
379
|
+
|
|
380
|
+
/**
|
|
381
|
+
* Get archive statistics (file count, task count, and total size).
|
|
382
|
+
* @param {string} [archiveDir]
|
|
383
|
+
* @returns {Promise<{ count: number, taskCount: number, totalBytes: number }>}
|
|
384
|
+
*/
|
|
385
|
+
export async function getArchiveStats(archiveDir = ARCHIVE_DIR) {
|
|
386
|
+
try {
|
|
387
|
+
if (!existsSync(archiveDir))
|
|
388
|
+
return { count: 0, taskCount: 0, totalBytes: 0 };
|
|
389
|
+
const files = await readdir(archiveDir);
|
|
390
|
+
const jsonFiles = files.filter(
|
|
391
|
+
(f) => f.endsWith(".json") && !f.startsWith(".tmp-"),
|
|
392
|
+
);
|
|
393
|
+
let totalBytes = 0;
|
|
394
|
+
let taskCount = 0;
|
|
395
|
+
|
|
396
|
+
for (const file of jsonFiles) {
|
|
397
|
+
try {
|
|
398
|
+
const filePath = resolve(archiveDir, file);
|
|
399
|
+
const fileStat = await stat(filePath);
|
|
400
|
+
totalBytes += fileStat.size;
|
|
401
|
+
|
|
402
|
+
// Count tasks inside daily files
|
|
403
|
+
if (/^\d{4}-\d{2}-\d{2}\.json$/.test(file)) {
|
|
404
|
+
const raw = await readFile(filePath, "utf8");
|
|
405
|
+
const entries = JSON.parse(raw);
|
|
406
|
+
const arr = Array.isArray(entries)
|
|
407
|
+
? entries
|
|
408
|
+
: (entries?.entries ?? []);
|
|
409
|
+
taskCount += arr.length;
|
|
410
|
+
} else {
|
|
411
|
+
// Legacy per-task file
|
|
412
|
+
taskCount += 1;
|
|
413
|
+
}
|
|
414
|
+
} catch {
|
|
415
|
+
// skip
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
|
|
419
|
+
return { count: jsonFiles.length, taskCount, totalBytes };
|
|
420
|
+
} catch {
|
|
421
|
+
return { count: 0, taskCount: 0, totalBytes: 0 };
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
/**
|
|
426
|
+
* Migrate legacy per-task archive files (YYYY-MM-DD-{uuid}.json) into daily
|
|
427
|
+
* grouped files (YYYY-MM-DD.json). Idempotent — safe to call on every sweep.
|
|
428
|
+
*
|
|
429
|
+
* @param {string} [archiveDir]
|
|
430
|
+
* @returns {Promise<{ migrated: number, errors: number }>}
|
|
431
|
+
*/
|
|
432
|
+
export async function migrateLegacyArchives(archiveDir = ARCHIVE_DIR) {
|
|
433
|
+
const result = { migrated: 0, errors: 0 };
|
|
434
|
+
try {
|
|
435
|
+
if (!existsSync(archiveDir)) return result;
|
|
436
|
+
|
|
437
|
+
const files = await readdir(archiveDir);
|
|
438
|
+
// Legacy files match YYYY-MM-DD-<more-chars>.json
|
|
439
|
+
const legacyFiles = files.filter((f) => {
|
|
440
|
+
if (!f.endsWith(".json") || f.startsWith(".tmp-")) return false;
|
|
441
|
+
// Must NOT be a pure daily file (YYYY-MM-DD.json)
|
|
442
|
+
if (/^\d{4}-\d{2}-\d{2}\.json$/.test(f)) return false;
|
|
443
|
+
// Must start with a date prefix
|
|
444
|
+
return /^\d{4}-\d{2}-\d{2}-.+\.json$/.test(f);
|
|
445
|
+
});
|
|
446
|
+
|
|
447
|
+
if (legacyFiles.length === 0) return result;
|
|
448
|
+
|
|
449
|
+
// Group legacy files by date prefix
|
|
450
|
+
/** @type {Map<string, string[]>} */
|
|
451
|
+
const grouped = new Map();
|
|
452
|
+
for (const f of legacyFiles) {
|
|
453
|
+
const datePrefix = f.slice(0, 10); // YYYY-MM-DD
|
|
454
|
+
const arr = grouped.get(datePrefix) ?? [];
|
|
455
|
+
arr.push(f);
|
|
456
|
+
grouped.set(datePrefix, arr);
|
|
457
|
+
}
|
|
458
|
+
|
|
459
|
+
for (const [datePrefix, fileNames] of grouped) {
|
|
460
|
+
try {
|
|
461
|
+
// Read existing daily file (may already have entries)
|
|
462
|
+
const dailyPath = resolve(archiveDir, `${datePrefix}.json`);
|
|
463
|
+
const existing = await readDailyArchive(datePrefix, archiveDir);
|
|
464
|
+
|
|
465
|
+
// Build set of already-migrated task IDs to avoid duplicates
|
|
466
|
+
const existingIds = new Set(
|
|
467
|
+
existing.map((e) => e.task?.id).filter(Boolean),
|
|
468
|
+
);
|
|
469
|
+
|
|
470
|
+
for (const legacyFile of fileNames) {
|
|
471
|
+
try {
|
|
472
|
+
const raw = await readFile(resolve(archiveDir, legacyFile), "utf8");
|
|
473
|
+
const entry = JSON.parse(raw);
|
|
474
|
+
const taskId = entry?.task?.id;
|
|
475
|
+
|
|
476
|
+
if (taskId && existingIds.has(taskId)) {
|
|
477
|
+
// Already merged — just remove the legacy file
|
|
478
|
+
await unlink(resolve(archiveDir, legacyFile));
|
|
479
|
+
continue;
|
|
480
|
+
}
|
|
481
|
+
|
|
482
|
+
existing.push(entry);
|
|
483
|
+
existingIds.add(taskId);
|
|
484
|
+
result.migrated++;
|
|
485
|
+
} catch {
|
|
486
|
+
result.errors++;
|
|
487
|
+
}
|
|
488
|
+
}
|
|
489
|
+
|
|
490
|
+
// Write consolidated daily file then remove legacy files
|
|
491
|
+
await writeDailyArchive(datePrefix, existing, archiveDir);
|
|
492
|
+
for (const legacyFile of fileNames) {
|
|
493
|
+
try {
|
|
494
|
+
const p = resolve(archiveDir, legacyFile);
|
|
495
|
+
if (existsSync(p)) await unlink(p);
|
|
496
|
+
} catch {
|
|
497
|
+
// best effort
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
} catch {
|
|
501
|
+
result.errors++;
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
if (result.migrated > 0) {
|
|
506
|
+
console.log(
|
|
507
|
+
`[archiver] Migrated ${result.migrated} legacy files into daily archives`,
|
|
508
|
+
);
|
|
509
|
+
}
|
|
510
|
+
} catch (err) {
|
|
511
|
+
console.error(`[archiver] Migration error: ${err.message}`);
|
|
512
|
+
result.errors++;
|
|
513
|
+
}
|
|
514
|
+
return result;
|
|
515
|
+
}
|
|
516
|
+
|
|
517
|
+
/**
|
|
518
|
+
* Main archiver function — runs during maintenance sweep.
|
|
519
|
+
*
|
|
520
|
+
* @param {function} fetchVk - VK API fetch function
|
|
521
|
+
* @param {string} projectId - VK project ID
|
|
522
|
+
* @param {object} [options]
|
|
523
|
+
* @param {boolean} [options.dryRun=false] - if true, archive to file but don't delete from VK
|
|
524
|
+
* @param {number} [options.maxArchive=50] - max tasks to archive per cycle
|
|
525
|
+
* @param {number} [options.ageHours] - override age threshold
|
|
526
|
+
* @param {boolean} [options.prune=true] - prune old archives
|
|
527
|
+
* @param {string} [options.archiveDir] - override archive directory
|
|
528
|
+
* @returns {Promise<{ archived: number, deleted: number, skipped: number, sessionsCleaned: number, pruned: number, migrated: number, errors: number }>}
|
|
529
|
+
*/
|
|
530
|
+
export async function archiveCompletedTasks(fetchVk, projectId, options = {}) {
|
|
531
|
+
const dryRun = options.dryRun ?? false;
|
|
532
|
+
const maxArchive = options.maxArchive ?? DEFAULT_MAX_ARCHIVE;
|
|
533
|
+
const ageHours = options.ageHours ?? ARCHIVE_AGE_HOURS;
|
|
534
|
+
const shouldPrune = options.prune ?? true;
|
|
535
|
+
const archiveDir = options.archiveDir ?? ARCHIVE_DIR;
|
|
536
|
+
|
|
537
|
+
console.log(
|
|
538
|
+
`[archiver] Scanning for completed tasks older than ${ageHours}h...`,
|
|
539
|
+
);
|
|
540
|
+
|
|
541
|
+
const result = {
|
|
542
|
+
archived: 0,
|
|
543
|
+
deleted: 0,
|
|
544
|
+
skipped: 0,
|
|
545
|
+
sessionsCleaned: 0,
|
|
546
|
+
pruned: 0,
|
|
547
|
+
migrated: 0,
|
|
548
|
+
errors: 0,
|
|
549
|
+
};
|
|
550
|
+
|
|
551
|
+
try {
|
|
552
|
+
// Auto-migrate legacy per-task files on every sweep
|
|
553
|
+
const migration = await migrateLegacyArchives(archiveDir);
|
|
554
|
+
result.migrated = migration.migrated;
|
|
555
|
+
|
|
556
|
+
const completedTasks = await fetchCompletedTasks(fetchVk, projectId);
|
|
557
|
+
const oldTasks = completedTasks.filter((t) =>
|
|
558
|
+
isOldEnoughToArchive(t, { ageHours }),
|
|
559
|
+
);
|
|
560
|
+
|
|
561
|
+
if (oldTasks.length === 0) {
|
|
562
|
+
console.log(`[archiver] No old completed tasks to archive`);
|
|
563
|
+
} else {
|
|
564
|
+
console.log(
|
|
565
|
+
`[archiver] Found ${oldTasks.length} tasks to archive (limit: ${maxArchive})`,
|
|
566
|
+
);
|
|
567
|
+
|
|
568
|
+
for (const task of oldTasks.slice(0, maxArchive)) {
|
|
569
|
+
// Skip already-archived tasks (idempotent guard)
|
|
570
|
+
if (await isAlreadyArchived(task.id, archiveDir)) {
|
|
571
|
+
result.skipped++;
|
|
572
|
+
continue;
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
// Archive to file first
|
|
576
|
+
const archivePath = await archiveTaskToFile(task, null, archiveDir);
|
|
577
|
+
if (!archivePath) {
|
|
578
|
+
result.errors++;
|
|
579
|
+
continue;
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
result.archived++;
|
|
583
|
+
console.log(
|
|
584
|
+
`[archiver] Archived task "${task.title}" (${task.id.substring(0, 8)})`,
|
|
585
|
+
);
|
|
586
|
+
|
|
587
|
+
// Clean up agent sessions (best-effort, never blocks)
|
|
588
|
+
if (!dryRun) {
|
|
589
|
+
const attemptId = task.latest_attempt_id || task.attempt_id || "";
|
|
590
|
+
const sessionsCleanedForTask = await cleanupAgentSessions(
|
|
591
|
+
task.id,
|
|
592
|
+
attemptId,
|
|
593
|
+
);
|
|
594
|
+
if (sessionsCleanedForTask > 0) {
|
|
595
|
+
result.sessionsCleaned += sessionsCleanedForTask;
|
|
596
|
+
console.log(
|
|
597
|
+
`[archiver] Cleaned ${sessionsCleanedForTask} agent session(s) for task "${task.title}"`,
|
|
598
|
+
);
|
|
599
|
+
}
|
|
600
|
+
}
|
|
601
|
+
|
|
602
|
+
// Delete from VK unless dry-run
|
|
603
|
+
if (!dryRun) {
|
|
604
|
+
const deleteSuccess = await deleteTaskFromVK(fetchVk, task.id);
|
|
605
|
+
if (deleteSuccess) {
|
|
606
|
+
result.deleted++;
|
|
607
|
+
} else {
|
|
608
|
+
console.warn(
|
|
609
|
+
`[archiver] Failed to delete task "${task.title}" (${task.id.substring(0, 8)}) from VK`,
|
|
610
|
+
);
|
|
611
|
+
}
|
|
612
|
+
}
|
|
613
|
+
}
|
|
614
|
+
}
|
|
615
|
+
|
|
616
|
+
// Prune old archives beyond retention period
|
|
617
|
+
if (shouldPrune) {
|
|
618
|
+
result.pruned = await pruneOldArchives({ archiveDir });
|
|
619
|
+
}
|
|
620
|
+
|
|
621
|
+
return result;
|
|
622
|
+
} catch (err) {
|
|
623
|
+
console.error(`[archiver] Archive sweep failed: ${err.message}`);
|
|
624
|
+
result.errors++;
|
|
625
|
+
return result;
|
|
626
|
+
}
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
/**
|
|
630
|
+
* Load archived tasks for sprint review.
|
|
631
|
+
* Reads both daily grouped files (v3) and legacy per-task files (v2).
|
|
632
|
+
*
|
|
633
|
+
* @param {object} [options]
|
|
634
|
+
* @param {string|Date} [options.since] - include archives after this date
|
|
635
|
+
* @param {string|Date} [options.until] - include archives before this date
|
|
636
|
+
* @param {string} [options.status] - filter by task status
|
|
637
|
+
* @param {string} [options.archiveDir] - override archive directory
|
|
638
|
+
* @returns {Promise<object[]>}
|
|
639
|
+
*/
|
|
640
|
+
export async function loadArchivedTasks(options = {}) {
|
|
641
|
+
const since = options.since ? new Date(options.since) : null;
|
|
642
|
+
const until = options.until ? new Date(options.until) : null;
|
|
643
|
+
const statusFilter = options.status ?? null;
|
|
644
|
+
const archiveDir = options.archiveDir ?? ARCHIVE_DIR;
|
|
645
|
+
|
|
646
|
+
try {
|
|
647
|
+
if (!existsSync(archiveDir)) {
|
|
648
|
+
return [];
|
|
649
|
+
}
|
|
650
|
+
|
|
651
|
+
const files = await readdir(archiveDir);
|
|
652
|
+
const jsonFiles = files.filter(
|
|
653
|
+
(f) => f.endsWith(".json") && !f.startsWith(".tmp-"),
|
|
654
|
+
);
|
|
655
|
+
const archivedTasks = [];
|
|
656
|
+
|
|
657
|
+
for (const file of jsonFiles) {
|
|
658
|
+
try {
|
|
659
|
+
const filePath = resolve(archiveDir, file);
|
|
660
|
+
const content = await readFile(filePath, "utf8");
|
|
661
|
+
const data = JSON.parse(content);
|
|
662
|
+
|
|
663
|
+
// Daily grouped file: array of entries
|
|
664
|
+
if (Array.isArray(data)) {
|
|
665
|
+
for (const entry of data) {
|
|
666
|
+
if (matchesFilters(entry, since, until, statusFilter)) {
|
|
667
|
+
archivedTasks.push(entry);
|
|
668
|
+
}
|
|
669
|
+
}
|
|
670
|
+
continue;
|
|
671
|
+
}
|
|
672
|
+
|
|
673
|
+
// Legacy single-task file: object with { task, archived_at, ... }
|
|
674
|
+
if (data && typeof data === "object" && data.task) {
|
|
675
|
+
if (matchesFilters(data, since, until, statusFilter)) {
|
|
676
|
+
archivedTasks.push(data);
|
|
677
|
+
}
|
|
678
|
+
}
|
|
679
|
+
} catch {
|
|
680
|
+
// Skip corrupted files silently
|
|
681
|
+
}
|
|
682
|
+
}
|
|
683
|
+
|
|
684
|
+
return archivedTasks.sort(
|
|
685
|
+
(a, b) => new Date(b.archived_at) - new Date(a.archived_at),
|
|
686
|
+
);
|
|
687
|
+
} catch (err) {
|
|
688
|
+
console.error(`[archiver] Failed to load archived tasks: ${err.message}`);
|
|
689
|
+
return [];
|
|
690
|
+
}
|
|
691
|
+
}
|
|
692
|
+
|
|
693
|
+
/**
|
|
694
|
+
* Check if an archive entry matches the given filters.
|
|
695
|
+
*/
|
|
696
|
+
function matchesFilters(entry, since, until, statusFilter) {
|
|
697
|
+
const archivedAt = new Date(entry.archived_at);
|
|
698
|
+
if (since && archivedAt < since) return false;
|
|
699
|
+
if (until && archivedAt > until) return false;
|
|
700
|
+
if (statusFilter && entry.task?.status !== statusFilter) return false;
|
|
701
|
+
return true;
|
|
702
|
+
}
|
|
703
|
+
|
|
704
|
+
/**
|
|
705
|
+
* Generate sprint review report from archived tasks.
|
|
706
|
+
* @param {object[]} archivedTasks
|
|
707
|
+
* @returns {object}
|
|
708
|
+
*/
|
|
709
|
+
export function generateSprintReport(archivedTasks) {
|
|
710
|
+
if (!Array.isArray(archivedTasks))
|
|
711
|
+
return { total: 0, by_status: {}, by_priority: {}, by_date: {}, tasks: [] };
|
|
712
|
+
|
|
713
|
+
const report = {
|
|
714
|
+
total: archivedTasks.length,
|
|
715
|
+
by_status: {},
|
|
716
|
+
by_priority: {},
|
|
717
|
+
by_date: {},
|
|
718
|
+
tasks: [],
|
|
719
|
+
};
|
|
720
|
+
|
|
721
|
+
for (const item of archivedTasks) {
|
|
722
|
+
const task = item.task;
|
|
723
|
+
if (!task) continue;
|
|
724
|
+
|
|
725
|
+
const completedAt = new Date(task.completed_at || task.updated_at);
|
|
726
|
+
const dateStr = isNaN(completedAt.getTime())
|
|
727
|
+
? "unknown"
|
|
728
|
+
: completedAt.toISOString().split("T")[0];
|
|
729
|
+
|
|
730
|
+
// Group by status
|
|
731
|
+
report.by_status[task.status] = (report.by_status[task.status] || 0) + 1;
|
|
732
|
+
|
|
733
|
+
// Group by priority
|
|
734
|
+
const priority = task.priority || "unknown";
|
|
735
|
+
report.by_priority[priority] = (report.by_priority[priority] || 0) + 1;
|
|
736
|
+
|
|
737
|
+
// Group by date
|
|
738
|
+
report.by_date[dateStr] = (report.by_date[dateStr] || 0) + 1;
|
|
739
|
+
|
|
740
|
+
// Add task summary
|
|
741
|
+
report.tasks.push({
|
|
742
|
+
id: task.id,
|
|
743
|
+
title: task.title,
|
|
744
|
+
status: task.status,
|
|
745
|
+
priority: task.priority,
|
|
746
|
+
completed_at: task.completed_at,
|
|
747
|
+
archived_at: item.archived_at,
|
|
748
|
+
});
|
|
749
|
+
}
|
|
750
|
+
|
|
751
|
+
return report;
|
|
752
|
+
}
|
|
753
|
+
|
|
754
|
+
/**
|
|
755
|
+
* Format sprint report as text for Telegram/console.
|
|
756
|
+
* @param {object} report
|
|
757
|
+
* @returns {string}
|
|
758
|
+
*/
|
|
759
|
+
export function formatSprintReport(report) {
|
|
760
|
+
if (!report || typeof report !== "object") return "No report data.";
|
|
761
|
+
|
|
762
|
+
const lines = [];
|
|
763
|
+
lines.push("=== Sprint Review Report ===");
|
|
764
|
+
lines.push(`Total Tasks Completed: ${report.total ?? 0}`);
|
|
765
|
+
lines.push("");
|
|
766
|
+
|
|
767
|
+
if (report.by_status && Object.keys(report.by_status).length > 0) {
|
|
768
|
+
lines.push("By Status:");
|
|
769
|
+
for (const [status, count] of Object.entries(report.by_status)) {
|
|
770
|
+
lines.push(` ${status}: ${count}`);
|
|
771
|
+
}
|
|
772
|
+
lines.push("");
|
|
773
|
+
}
|
|
774
|
+
|
|
775
|
+
if (report.by_priority && Object.keys(report.by_priority).length > 0) {
|
|
776
|
+
lines.push("By Priority:");
|
|
777
|
+
for (const [priority, count] of Object.entries(report.by_priority)) {
|
|
778
|
+
lines.push(` ${priority}: ${count}`);
|
|
779
|
+
}
|
|
780
|
+
lines.push("");
|
|
781
|
+
}
|
|
782
|
+
|
|
783
|
+
if (report.by_date && Object.keys(report.by_date).length > 0) {
|
|
784
|
+
lines.push("By Date:");
|
|
785
|
+
for (const [date, count] of Object.entries(report.by_date)) {
|
|
786
|
+
lines.push(` ${date}: ${count} tasks`);
|
|
787
|
+
}
|
|
788
|
+
lines.push("");
|
|
789
|
+
}
|
|
790
|
+
|
|
791
|
+
if (Array.isArray(report.tasks) && report.tasks.length > 0) {
|
|
792
|
+
lines.push("Recent Tasks:");
|
|
793
|
+
for (const task of report.tasks.slice(0, 10)) {
|
|
794
|
+
const title = (task.title || "untitled").substring(0, 60);
|
|
795
|
+
const shortId = (task.id || "?").substring(0, 8);
|
|
796
|
+
lines.push(` [${task.status || "?"}] ${title} (${shortId})`);
|
|
797
|
+
}
|
|
798
|
+
}
|
|
799
|
+
|
|
800
|
+
return lines.join("\n");
|
|
801
|
+
}
|