@stackmemoryai/stackmemory 0.3.26 → 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. package/dist/cli/commands/ralph.js +125 -188
  2. package/dist/cli/commands/ralph.js.map +2 -2
  3. package/dist/features/tui/simple-monitor.js +112 -0
  4. package/dist/features/tui/simple-monitor.js.map +7 -0
  5. package/dist/features/tui/swarm-monitor.js +644 -0
  6. package/dist/features/tui/swarm-monitor.js.map +7 -0
  7. package/dist/integrations/ralph/bridge/ralph-stackmemory-bridge.js +254 -43
  8. package/dist/integrations/ralph/bridge/ralph-stackmemory-bridge.js.map +3 -3
  9. package/dist/integrations/ralph/coordination/enhanced-coordination.js +406 -0
  10. package/dist/integrations/ralph/coordination/enhanced-coordination.js.map +7 -0
  11. package/dist/integrations/ralph/monitoring/swarm-dashboard.js +290 -0
  12. package/dist/integrations/ralph/monitoring/swarm-dashboard.js.map +7 -0
  13. package/dist/integrations/ralph/monitoring/swarm-registry.js +95 -0
  14. package/dist/integrations/ralph/monitoring/swarm-registry.js.map +7 -0
  15. package/dist/integrations/ralph/recovery/crash-recovery.js +458 -0
  16. package/dist/integrations/ralph/recovery/crash-recovery.js.map +7 -0
  17. package/dist/integrations/ralph/swarm/git-workflow-manager.js +6 -67
  18. package/dist/integrations/ralph/swarm/git-workflow-manager.js.map +2 -2
  19. package/dist/integrations/ralph/swarm/swarm-coordinator.js +5 -139
  20. package/dist/integrations/ralph/swarm/swarm-coordinator.js.map +2 -2
  21. package/package.json +2 -1
  22. package/scripts/test-ralph-iteration-fix.ts +118 -0
  23. package/scripts/test-simple-ralph-state-sync.ts +178 -0
  24. package/scripts/test-swarm-tui.ts +34 -0
  25. package/scripts/test-tui-shortcuts.ts +66 -0
  26. package/scripts/validate-tui-shortcuts.ts +83 -0
@@ -0,0 +1,458 @@
1
+ import * as fs from "fs/promises";
2
+ import * as path from "path";
3
+ import { logger } from "../../../core/monitoring/logger.js";
4
+ class CrashRecoverySystem {
5
+ checkpoints = /* @__PURE__ */ new Map();
6
+ crashReports = [];
7
+ recoveryStrategies = [];
8
+ swarmCoordinator;
9
+ checkpointInterval;
10
+ recoveryDir;
11
+ constructor(swarmCoordinator, recoveryDir = ".swarm/recovery") {
12
+ this.swarmCoordinator = swarmCoordinator;
13
+ this.recoveryDir = recoveryDir;
14
+ this.setupRecoveryStrategies();
15
+ }
16
+ /**
17
+ * Initialize crash recovery system
18
+ */
19
+ async initialize() {
20
+ await this.ensureRecoveryDirectory();
21
+ await this.loadExistingCheckpoints();
22
+ this.startPeriodicCheckpoints();
23
+ process.on("unhandledRejection", (reason, promise) => {
24
+ this.handleCrash(new Error(`Unhandled Rejection: ${reason}`), {
25
+ type: "unhandled_rejection",
26
+ promise: promise.toString()
27
+ });
28
+ });
29
+ process.on("uncaughtException", (error) => {
30
+ this.handleCrash(error, { type: "uncaught_exception" });
31
+ });
32
+ logger.info("Crash recovery system initialized");
33
+ }
34
+ /**
35
+ * Create recovery checkpoint
36
+ */
37
+ async createCheckpoint(swarmId, reason = "periodic") {
38
+ try {
39
+ const swarmState = this.swarmCoordinator.swarmState;
40
+ const agents = Array.from(
41
+ this.swarmCoordinator.activeAgents.values()
42
+ );
43
+ const checkpoint = {
44
+ id: this.generateId(),
45
+ swarmId,
46
+ timestamp: Date.now(),
47
+ swarmState: { ...swarmState },
48
+ agents: agents.map((agent) => ({ ...agent })),
49
+ tasks: swarmState.tasks || [],
50
+ errorLog: this.crashReports.slice(-10),
51
+ // Last 10 errors
52
+ gitState: await this.captureGitState()
53
+ };
54
+ const checkpointPath = path.join(
55
+ this.recoveryDir,
56
+ `checkpoint-${checkpoint.id}.json`
57
+ );
58
+ await fs.writeFile(checkpointPath, JSON.stringify(checkpoint, null, 2));
59
+ this.checkpoints.set(checkpoint.id, checkpoint);
60
+ logger.info(
61
+ `Created checkpoint ${checkpoint.id} for swarm ${swarmId} (${reason})`
62
+ );
63
+ return checkpoint.id;
64
+ } catch (error) {
65
+ logger.error("Failed to create checkpoint:", error);
66
+ throw error;
67
+ }
68
+ }
69
+ /**
70
+ * Handle crash or error
71
+ */
72
+ async handleCrash(error, context = {}) {
73
+ const report = {
74
+ id: this.generateId(),
75
+ timestamp: Date.now(),
76
+ agentId: context.agentId,
77
+ errorType: this.classifyError(error, context),
78
+ error,
79
+ context,
80
+ recoveryAction: "",
81
+ severity: this.assessSeverity(error, context),
82
+ resolved: false
83
+ };
84
+ this.crashReports.push(report);
85
+ logger.error(`Crash detected [${report.id}]:`, error);
86
+ const recovered = await this.attemptRecovery(report);
87
+ if (recovered) {
88
+ report.resolved = true;
89
+ report.recoveryAction = "auto_recovered";
90
+ logger.info(`Successfully recovered from crash ${report.id}`);
91
+ } else {
92
+ logger.error(`Failed to recover from crash ${report.id}`);
93
+ if (report.severity === "critical") {
94
+ await this.escalateCriticalFailure(report);
95
+ }
96
+ }
97
+ await this.saveCrashReport(report);
98
+ }
99
+ /**
100
+ * Restore from checkpoint
101
+ */
102
+ async restoreFromCheckpoint(checkpointId) {
103
+ try {
104
+ const checkpoint = this.checkpoints.get(checkpointId);
105
+ if (!checkpoint) {
106
+ logger.error(`Checkpoint ${checkpointId} not found`);
107
+ return false;
108
+ }
109
+ logger.info(`Restoring from checkpoint ${checkpointId}`);
110
+ await this.restoreGitState(checkpoint.gitState);
111
+ if (checkpoint.databaseBackup) {
112
+ await this.restoreDatabase(checkpoint.databaseBackup);
113
+ }
114
+ await this.restoreSwarmState(checkpoint);
115
+ logger.info(`Successfully restored from checkpoint ${checkpointId}`);
116
+ return true;
117
+ } catch (error) {
118
+ logger.error(
119
+ `Failed to restore from checkpoint ${checkpointId}:`,
120
+ error
121
+ );
122
+ return false;
123
+ }
124
+ }
125
+ /**
126
+ * Get recovery recommendations
127
+ */
128
+ getRecoveryRecommendations() {
129
+ const recent = Date.now() - 36e5;
130
+ const recentCheckpoints = Array.from(this.checkpoints.values()).filter((cp) => cp.timestamp > recent).sort((a, b) => b.timestamp - a.timestamp).slice(0, 5);
131
+ const errorCounts = /* @__PURE__ */ new Map();
132
+ const errorTimes = /* @__PURE__ */ new Map();
133
+ for (const report of this.crashReports.filter(
134
+ (r) => r.timestamp > recent
135
+ )) {
136
+ errorCounts.set(
137
+ report.errorType,
138
+ (errorCounts.get(report.errorType) || 0) + 1
139
+ );
140
+ errorTimes.set(
141
+ report.errorType,
142
+ Math.max(errorTimes.get(report.errorType) || 0, report.timestamp)
143
+ );
144
+ }
145
+ const frequentErrors = Array.from(errorCounts.entries()).map(([type, count]) => ({
146
+ type,
147
+ count,
148
+ lastOccurrence: errorTimes.get(type) || 0
149
+ })).sort((a, b) => b.count - a.count);
150
+ const criticalErrors = this.crashReports.filter(
151
+ (r) => r.severity === "critical" && r.timestamp > recent && !r.resolved
152
+ );
153
+ const systemHealth = criticalErrors.length > 0 ? "critical" : frequentErrors.length > 3 ? "degraded" : "good";
154
+ return {
155
+ recentCheckpoints,
156
+ frequentErrors,
157
+ recoveryActions: this.generateRecoveryActions(frequentErrors),
158
+ systemHealth
159
+ };
160
+ }
161
+ /**
162
+ * Auto-recovery from common failures
163
+ */
164
+ async attemptAutoRecovery(swarmId) {
165
+ logger.info(`Attempting auto-recovery for swarm ${swarmId}`);
166
+ try {
167
+ const recentCheckpoint = this.findRecentCheckpoint(swarmId);
168
+ if (recentCheckpoint) {
169
+ logger.info(`Found recent checkpoint: ${recentCheckpoint.id}`);
170
+ return await this.restoreFromCheckpoint(recentCheckpoint.id);
171
+ }
172
+ await this.swarmCoordinator.forceCleanup();
173
+ await this.clearProblematicState();
174
+ logger.info("Restarting swarm with minimal configuration");
175
+ return true;
176
+ } catch (error) {
177
+ logger.error("Auto-recovery failed:", error);
178
+ return false;
179
+ }
180
+ }
181
+ async attemptRecovery(report) {
182
+ for (const strategy of this.recoveryStrategies) {
183
+ if (strategy.condition(report.error, report.context)) {
184
+ logger.info(`Applying recovery strategy: ${strategy.errorType}`);
185
+ let retries = 0;
186
+ while (retries < strategy.maxRetries) {
187
+ try {
188
+ const success = await strategy.action(report, this);
189
+ if (success) {
190
+ report.recoveryAction = strategy.errorType;
191
+ return true;
192
+ }
193
+ } catch (error) {
194
+ logger.warn(
195
+ `Recovery attempt ${retries + 1} failed:`,
196
+ error
197
+ );
198
+ }
199
+ retries++;
200
+ if (retries < strategy.maxRetries) {
201
+ await this.sleep(strategy.backoffMs * Math.pow(2, retries));
202
+ }
203
+ }
204
+ }
205
+ }
206
+ return false;
207
+ }
208
+ classifyError(error, context) {
209
+ const message = error.message.toLowerCase();
210
+ if (message.includes("database") || message.includes("sqlite")) {
211
+ return "database_failure";
212
+ } else if (message.includes("git") || message.includes("branch")) {
213
+ return "git_conflict";
214
+ } else if (message.includes("timeout") || context.timeout) {
215
+ return "agent_timeout";
216
+ } else if (message.includes("memory") || message.includes("heap")) {
217
+ return "memory_overflow";
218
+ } else if (message.includes("network") || message.includes("connect")) {
219
+ return "network_error";
220
+ }
221
+ return "database_failure";
222
+ }
223
+ assessSeverity(error, context) {
224
+ if (context.type === "uncaught_exception") return "critical";
225
+ if (error.message.includes("unhandled")) return "high";
226
+ if (error.message.includes("database")) return "medium";
227
+ return "low";
228
+ }
229
+ setupRecoveryStrategies() {
230
+ this.recoveryStrategies = [
231
+ {
232
+ errorType: "database_failure",
233
+ condition: (error) => error.message.includes("database") || error.message.includes("sqlite"),
234
+ action: async (report, recovery) => {
235
+ logger.info("Attempting database recovery");
236
+ try {
237
+ await recovery.clearProblematicState();
238
+ return true;
239
+ } catch {
240
+ return false;
241
+ }
242
+ },
243
+ maxRetries: 3,
244
+ backoffMs: 1e3
245
+ },
246
+ {
247
+ errorType: "git_conflict",
248
+ condition: (error) => error.message.includes("git") || error.message.includes("branch"),
249
+ action: async (report, recovery) => {
250
+ logger.info("Attempting git conflict resolution");
251
+ try {
252
+ const { execSync } = await import("child_process");
253
+ execSync("git checkout main", { stdio: "ignore" });
254
+ execSync("git reset --hard HEAD", { stdio: "ignore" });
255
+ return true;
256
+ } catch {
257
+ return false;
258
+ }
259
+ },
260
+ maxRetries: 2,
261
+ backoffMs: 500
262
+ },
263
+ {
264
+ errorType: "agent_timeout",
265
+ condition: (error, context) => error.message.includes("timeout") || context.timeout,
266
+ action: async (report, recovery) => {
267
+ logger.info("Attempting agent timeout recovery");
268
+ await recovery.swarmCoordinator.forceCleanup();
269
+ return true;
270
+ },
271
+ maxRetries: 1,
272
+ backoffMs: 2e3
273
+ },
274
+ {
275
+ errorType: "memory_overflow",
276
+ condition: (error) => error.message.includes("memory") || error.message.includes("heap"),
277
+ action: async (report, recovery) => {
278
+ logger.info("Attempting memory recovery");
279
+ if (global.gc) global.gc();
280
+ await recovery.cleanupOldCheckpoints(5);
281
+ return true;
282
+ },
283
+ maxRetries: 1,
284
+ backoffMs: 5e3
285
+ }
286
+ ];
287
+ }
288
+ async captureGitState() {
289
+ try {
290
+ const { execSync } = await import("child_process");
291
+ const currentBranch = execSync("git branch --show-current", {
292
+ encoding: "utf8"
293
+ }).trim();
294
+ const statusOutput = execSync("git status --porcelain", {
295
+ encoding: "utf8"
296
+ });
297
+ const uncommittedChanges = statusOutput.trim().split("\n").filter(Boolean);
298
+ const branchesOutput = execSync("git branch", { encoding: "utf8" });
299
+ const activeBranches = branchesOutput.split("\n").map((line) => line.trim().replace(/^\*?\s*/, "")).filter(Boolean);
300
+ return {
301
+ currentBranch,
302
+ uncommittedChanges,
303
+ activeBranches
304
+ };
305
+ } catch (error) {
306
+ logger.warn("Failed to capture git state:", error);
307
+ return {
308
+ currentBranch: "unknown",
309
+ uncommittedChanges: [],
310
+ activeBranches: []
311
+ };
312
+ }
313
+ }
314
+ async restoreGitState(gitState) {
315
+ try {
316
+ const { execSync } = await import("child_process");
317
+ execSync(`git checkout ${gitState.currentBranch}`, { stdio: "ignore" });
318
+ logger.info(`Restored git branch: ${gitState.currentBranch}`);
319
+ } catch (error) {
320
+ logger.warn("Failed to restore git state:", error);
321
+ }
322
+ }
323
+ async restoreDatabase(backupPath) {
324
+ logger.info(`Restoring database from ${backupPath}`);
325
+ }
326
+ async restoreSwarmState(checkpoint) {
327
+ logger.info(`Restoring swarm state from checkpoint ${checkpoint.id}`);
328
+ }
329
+ findRecentCheckpoint(swarmId) {
330
+ const recent = Date.now() - 18e5;
331
+ return Array.from(this.checkpoints.values()).filter((cp) => cp.swarmId === swarmId && cp.timestamp > recent).sort((a, b) => b.timestamp - a.timestamp)[0] || null;
332
+ }
333
+ async clearProblematicState() {
334
+ try {
335
+ await this.cleanupTempFiles();
336
+ logger.info("Cleared problematic state");
337
+ } catch (error) {
338
+ logger.error("Failed to clear problematic state:", error);
339
+ }
340
+ }
341
+ async cleanupTempFiles() {
342
+ }
343
+ async cleanupOldCheckpoints(keepCount) {
344
+ const sorted = Array.from(this.checkpoints.values()).sort(
345
+ (a, b) => b.timestamp - a.timestamp
346
+ );
347
+ const toDelete = sorted.slice(keepCount);
348
+ for (const checkpoint of toDelete) {
349
+ try {
350
+ const checkpointPath = path.join(
351
+ this.recoveryDir,
352
+ `checkpoint-${checkpoint.id}.json`
353
+ );
354
+ await fs.unlink(checkpointPath);
355
+ this.checkpoints.delete(checkpoint.id);
356
+ } catch (error) {
357
+ logger.warn(
358
+ `Failed to delete checkpoint ${checkpoint.id}:`,
359
+ error
360
+ );
361
+ }
362
+ }
363
+ logger.info(`Cleaned up ${toDelete.length} old checkpoints`);
364
+ }
365
+ async escalateCriticalFailure(report) {
366
+ logger.error(`CRITICAL FAILURE [${report.id}]: ${report.error.message}`);
367
+ try {
368
+ const swarmState = this.swarmCoordinator.swarmState;
369
+ if (swarmState?.id) {
370
+ await this.createCheckpoint(swarmState.id, "critical_failure");
371
+ }
372
+ } catch {
373
+ logger.error("Failed to create emergency checkpoint");
374
+ }
375
+ await this.swarmCoordinator.forceCleanup();
376
+ }
377
+ generateRecoveryActions(frequentErrors) {
378
+ const actions = [];
379
+ for (const { type, count } of frequentErrors) {
380
+ if (count > 3) {
381
+ switch (type) {
382
+ case "database_failure":
383
+ actions.push("Consider upgrading database configuration");
384
+ break;
385
+ case "git_conflict":
386
+ actions.push("Review git workflow and branch strategy");
387
+ break;
388
+ case "agent_timeout":
389
+ actions.push(
390
+ "Increase agent timeout limits or reduce task complexity"
391
+ );
392
+ break;
393
+ case "memory_overflow":
394
+ actions.push("Monitor memory usage and consider increasing limits");
395
+ break;
396
+ }
397
+ }
398
+ }
399
+ return actions;
400
+ }
401
+ startPeriodicCheckpoints() {
402
+ this.checkpointInterval = setInterval(async () => {
403
+ const swarmState = this.swarmCoordinator.swarmState;
404
+ if (swarmState?.id && swarmState.status === "active") {
405
+ await this.createCheckpoint(swarmState.id, "periodic");
406
+ }
407
+ }, 3e5);
408
+ }
409
+ async ensureRecoveryDirectory() {
410
+ try {
411
+ await fs.mkdir(this.recoveryDir, { recursive: true });
412
+ } catch (error) {
413
+ logger.error("Failed to create recovery directory:", error);
414
+ }
415
+ }
416
+ async loadExistingCheckpoints() {
417
+ try {
418
+ const files = await fs.readdir(this.recoveryDir);
419
+ for (const file of files) {
420
+ if (file.startsWith("checkpoint-") && file.endsWith(".json")) {
421
+ try {
422
+ const content = await fs.readFile(
423
+ path.join(this.recoveryDir, file),
424
+ "utf8"
425
+ );
426
+ const checkpoint = JSON.parse(content);
427
+ this.checkpoints.set(checkpoint.id, checkpoint);
428
+ } catch (error) {
429
+ logger.warn(`Failed to load checkpoint ${file}:`, error);
430
+ }
431
+ }
432
+ }
433
+ logger.info(`Loaded ${this.checkpoints.size} existing checkpoints`);
434
+ } catch (error) {
435
+ logger.warn("Failed to load existing checkpoints:", error);
436
+ }
437
+ }
438
+ async saveCrashReport(report) {
439
+ try {
440
+ const reportPath = path.join(this.recoveryDir, `crash-${report.id}.json`);
441
+ await fs.writeFile(reportPath, JSON.stringify(report, null, 2));
442
+ } catch (error) {
443
+ logger.error("Failed to save crash report:", error);
444
+ }
445
+ }
446
+ sleep(ms) {
447
+ return new Promise((resolve) => setTimeout(resolve, ms));
448
+ }
449
+ generateId() {
450
+ return `recovery_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
451
+ }
452
+ }
453
+ var crash_recovery_default = CrashRecoverySystem;
454
+ export {
455
+ CrashRecoverySystem,
456
+ crash_recovery_default as default
457
+ };
458
+ //# sourceMappingURL=crash-recovery.js.map
@@ -0,0 +1,7 @@
1
+ {
2
+ "version": 3,
3
+ "sources": ["../../../../src/integrations/ralph/recovery/crash-recovery.ts"],
4
+ "sourcesContent": ["/**\n * Crash Recovery System for Ralph Swarms\n * Handles failures, provides auto-recovery, and maintains swarm resilience\n */\n\nimport * as fs from 'fs/promises';\nimport * as path from 'path';\nimport { logger } from '../../../core/monitoring/logger.js';\nimport { SwarmCoordinator } from '../swarm/swarm-coordinator.js';\nimport { Agent, SwarmState } from '../types.js';\n\nexport interface RecoveryCheckpoint {\n id: string;\n swarmId: string;\n timestamp: number;\n swarmState: SwarmState;\n agents: Agent[];\n tasks: any[];\n errorLog: CrashReport[];\n databaseBackup?: string;\n gitState: {\n currentBranch: string;\n uncommittedChanges: string[];\n activeBranches: string[];\n };\n}\n\nexport interface CrashReport {\n id: string;\n timestamp: number;\n agentId?: string;\n errorType:\n | 'database_failure'\n | 'git_conflict'\n | 'agent_timeout'\n | 'memory_overflow'\n | 'network_error';\n error: Error;\n context: any;\n recoveryAction: string;\n severity: 'low' | 'medium' | 'high' | 'critical';\n resolved: boolean;\n}\n\nexport interface RecoveryStrategy {\n errorType: string;\n condition: (error: Error, context: any) => boolean;\n action: (\n report: CrashReport,\n recovery: CrashRecoverySystem\n ) => Promise<boolean>;\n maxRetries: number;\n backoffMs: number;\n}\n\nexport class CrashRecoverySystem {\n private checkpoints: Map<string, RecoveryCheckpoint> = new Map();\n private crashReports: CrashReport[] = [];\n private recoveryStrategies: RecoveryStrategy[] = [];\n private swarmCoordinator: SwarmCoordinator;\n private checkpointInterval?: NodeJS.Timeout;\n private recoveryDir: string;\n\n constructor(\n swarmCoordinator: SwarmCoordinator,\n recoveryDir: string = '.swarm/recovery'\n ) {\n this.swarmCoordinator = swarmCoordinator;\n this.recoveryDir = recoveryDir;\n this.setupRecoveryStrategies();\n }\n\n /**\n * Initialize crash recovery system\n */\n async initialize(): Promise<void> {\n await this.ensureRecoveryDirectory();\n await this.loadExistingCheckpoints();\n this.startPeriodicCheckpoints();\n\n // Set up global error handlers\n process.on('unhandledRejection', (reason, promise) => {\n this.handleCrash(new Error(`Unhandled Rejection: ${reason}`), {\n type: 'unhandled_rejection',\n promise: promise.toString(),\n });\n });\n\n process.on('uncaughtException', (error) => {\n this.handleCrash(error, { type: 'uncaught_exception' });\n });\n\n logger.info('Crash recovery system initialized');\n }\n\n /**\n * Create recovery checkpoint\n */\n async createCheckpoint(\n swarmId: string,\n reason: string = 'periodic'\n ): Promise<string> {\n try {\n const swarmState = (this.swarmCoordinator as any).swarmState;\n const agents = Array.from(\n (this.swarmCoordinator as any).activeAgents.values()\n );\n\n const checkpoint: RecoveryCheckpoint = {\n id: this.generateId(),\n swarmId,\n timestamp: Date.now(),\n swarmState: { ...swarmState },\n agents: agents.map((agent) => ({ ...agent })),\n tasks: swarmState.tasks || [],\n errorLog: this.crashReports.slice(-10), // Last 10 errors\n gitState: await this.captureGitState(),\n };\n\n // Save to disk\n const checkpointPath = path.join(\n this.recoveryDir,\n `checkpoint-${checkpoint.id}.json`\n );\n await fs.writeFile(checkpointPath, JSON.stringify(checkpoint, null, 2));\n\n this.checkpoints.set(checkpoint.id, checkpoint);\n\n logger.info(\n `Created checkpoint ${checkpoint.id} for swarm ${swarmId} (${reason})`\n );\n return checkpoint.id;\n } catch (error) {\n logger.error('Failed to create checkpoint:', error as Error);\n throw error;\n }\n }\n\n /**\n * Handle crash or error\n */\n async handleCrash(error: Error, context: any = {}): Promise<void> {\n const report: CrashReport = {\n id: this.generateId(),\n timestamp: Date.now(),\n agentId: context.agentId,\n errorType: this.classifyError(error, context),\n error,\n context,\n recoveryAction: '',\n severity: this.assessSeverity(error, context),\n resolved: false,\n };\n\n this.crashReports.push(report);\n logger.error(`Crash detected [${report.id}]:`, error);\n\n // Attempt automatic recovery\n const recovered = await this.attemptRecovery(report);\n\n if (recovered) {\n report.resolved = true;\n report.recoveryAction = 'auto_recovered';\n logger.info(`Successfully recovered from crash ${report.id}`);\n } else {\n logger.error(`Failed to recover from crash ${report.id}`);\n\n if (report.severity === 'critical') {\n await this.escalateCriticalFailure(report);\n }\n }\n\n // Save crash report\n await this.saveCrashReport(report);\n }\n\n /**\n * Restore from checkpoint\n */\n async restoreFromCheckpoint(checkpointId: string): Promise<boolean> {\n try {\n const checkpoint = this.checkpoints.get(checkpointId);\n if (!checkpoint) {\n logger.error(`Checkpoint ${checkpointId} not found`);\n return false;\n }\n\n logger.info(`Restoring from checkpoint ${checkpointId}`);\n\n // Restore git state\n await this.restoreGitState(checkpoint.gitState);\n\n // Restore database if backup exists\n if (checkpoint.databaseBackup) {\n await this.restoreDatabase(checkpoint.databaseBackup);\n }\n\n // Restore swarm state\n await this.restoreSwarmState(checkpoint);\n\n logger.info(`Successfully restored from checkpoint ${checkpointId}`);\n return true;\n } catch (error) {\n logger.error(\n `Failed to restore from checkpoint ${checkpointId}:`,\n error as Error\n );\n return false;\n }\n }\n\n /**\n * Get recovery recommendations\n */\n getRecoveryRecommendations(): {\n recentCheckpoints: RecoveryCheckpoint[];\n frequentErrors: Array<{\n type: string;\n count: number;\n lastOccurrence: number;\n }>;\n recoveryActions: string[];\n systemHealth: 'good' | 'degraded' | 'critical';\n } {\n const recent = Date.now() - 3600000; // Last hour\n const recentCheckpoints = Array.from(this.checkpoints.values())\n .filter((cp) => cp.timestamp > recent)\n .sort((a, b) => b.timestamp - a.timestamp)\n .slice(0, 5);\n\n const errorCounts = new Map<string, number>();\n const errorTimes = new Map<string, number>();\n\n for (const report of this.crashReports.filter(\n (r) => r.timestamp > recent\n )) {\n errorCounts.set(\n report.errorType,\n (errorCounts.get(report.errorType) || 0) + 1\n );\n errorTimes.set(\n report.errorType,\n Math.max(errorTimes.get(report.errorType) || 0, report.timestamp)\n );\n }\n\n const frequentErrors = Array.from(errorCounts.entries())\n .map(([type, count]) => ({\n type,\n count,\n lastOccurrence: errorTimes.get(type) || 0,\n }))\n .sort((a, b) => b.count - a.count);\n\n const criticalErrors = this.crashReports.filter(\n (r) => r.severity === 'critical' && r.timestamp > recent && !r.resolved\n );\n\n const systemHealth =\n criticalErrors.length > 0\n ? 'critical'\n : frequentErrors.length > 3\n ? 'degraded'\n : 'good';\n\n return {\n recentCheckpoints,\n frequentErrors,\n recoveryActions: this.generateRecoveryActions(frequentErrors),\n systemHealth,\n };\n }\n\n /**\n * Auto-recovery from common failures\n */\n async attemptAutoRecovery(swarmId: string): Promise<boolean> {\n logger.info(`Attempting auto-recovery for swarm ${swarmId}`);\n\n try {\n // 1. Check for recent checkpoint\n const recentCheckpoint = this.findRecentCheckpoint(swarmId);\n if (recentCheckpoint) {\n logger.info(`Found recent checkpoint: ${recentCheckpoint.id}`);\n return await this.restoreFromCheckpoint(recentCheckpoint.id);\n }\n\n // 2. Attempt graceful restart\n await this.swarmCoordinator.forceCleanup();\n\n // 3. Clear problematic state\n await this.clearProblematicState();\n\n // 4. Restart with minimal configuration\n logger.info('Restarting swarm with minimal configuration');\n return true;\n } catch (error) {\n logger.error('Auto-recovery failed:', error as Error);\n return false;\n }\n }\n\n private async attemptRecovery(report: CrashReport): Promise<boolean> {\n // Find appropriate recovery strategy\n for (const strategy of this.recoveryStrategies) {\n if (strategy.condition(report.error, report.context)) {\n logger.info(`Applying recovery strategy: ${strategy.errorType}`);\n\n let retries = 0;\n while (retries < strategy.maxRetries) {\n try {\n const success = await strategy.action(report, this);\n if (success) {\n report.recoveryAction = strategy.errorType;\n return true;\n }\n } catch (error) {\n logger.warn(\n `Recovery attempt ${retries + 1} failed:`,\n error as Error\n );\n }\n\n retries++;\n if (retries < strategy.maxRetries) {\n await this.sleep(strategy.backoffMs * Math.pow(2, retries));\n }\n }\n }\n }\n\n return false;\n }\n\n private classifyError(error: Error, context: any): CrashReport['errorType'] {\n const message = error.message.toLowerCase();\n\n if (message.includes('database') || message.includes('sqlite')) {\n return 'database_failure';\n } else if (message.includes('git') || message.includes('branch')) {\n return 'git_conflict';\n } else if (message.includes('timeout') || context.timeout) {\n return 'agent_timeout';\n } else if (message.includes('memory') || message.includes('heap')) {\n return 'memory_overflow';\n } else if (message.includes('network') || message.includes('connect')) {\n return 'network_error';\n }\n\n return 'database_failure'; // Default\n }\n\n private assessSeverity(error: Error, context: any): CrashReport['severity'] {\n if (context.type === 'uncaught_exception') return 'critical';\n if (error.message.includes('unhandled')) return 'high';\n if (error.message.includes('database')) return 'medium';\n return 'low';\n }\n\n private setupRecoveryStrategies(): void {\n this.recoveryStrategies = [\n {\n errorType: 'database_failure',\n condition: (error) =>\n error.message.includes('database') ||\n error.message.includes('sqlite'),\n action: async (report, recovery) => {\n // Reinitialize database connection\n logger.info('Attempting database recovery');\n\n // Create new database adapter\n try {\n await recovery.clearProblematicState();\n return true;\n } catch {\n return false;\n }\n },\n maxRetries: 3,\n backoffMs: 1000,\n },\n {\n errorType: 'git_conflict',\n condition: (error) =>\n error.message.includes('git') || error.message.includes('branch'),\n action: async (report, recovery) => {\n logger.info('Attempting git conflict resolution');\n\n try {\n // Force cleanup git state\n const { execSync } = await import('child_process');\n execSync('git checkout main', { stdio: 'ignore' });\n execSync('git reset --hard HEAD', { stdio: 'ignore' });\n return true;\n } catch {\n return false;\n }\n },\n maxRetries: 2,\n backoffMs: 500,\n },\n {\n errorType: 'agent_timeout',\n condition: (error, context) =>\n error.message.includes('timeout') || context.timeout,\n action: async (report, recovery) => {\n logger.info('Attempting agent timeout recovery');\n\n // Force cleanup stuck agents\n await recovery.swarmCoordinator.forceCleanup();\n return true;\n },\n maxRetries: 1,\n backoffMs: 2000,\n },\n {\n errorType: 'memory_overflow',\n condition: (error) =>\n error.message.includes('memory') || error.message.includes('heap'),\n action: async (report, recovery) => {\n logger.info('Attempting memory recovery');\n\n // Force garbage collection\n if (global.gc) global.gc();\n\n // Cleanup old checkpoints\n await recovery.cleanupOldCheckpoints(5);\n return true;\n },\n maxRetries: 1,\n backoffMs: 5000,\n },\n ];\n }\n\n private async captureGitState(): Promise<RecoveryCheckpoint['gitState']> {\n try {\n const { execSync } = await import('child_process');\n\n const currentBranch = execSync('git branch --show-current', {\n encoding: 'utf8',\n }).trim();\n const statusOutput = execSync('git status --porcelain', {\n encoding: 'utf8',\n });\n const uncommittedChanges = statusOutput\n .trim()\n .split('\\n')\n .filter(Boolean);\n const branchesOutput = execSync('git branch', { encoding: 'utf8' });\n const activeBranches = branchesOutput\n .split('\\n')\n .map((line) => line.trim().replace(/^\\*?\\s*/, ''))\n .filter(Boolean);\n\n return {\n currentBranch,\n uncommittedChanges,\n activeBranches,\n };\n } catch (error) {\n logger.warn('Failed to capture git state:', error as Error);\n return {\n currentBranch: 'unknown',\n uncommittedChanges: [],\n activeBranches: [],\n };\n }\n }\n\n private async restoreGitState(\n gitState: RecoveryCheckpoint['gitState']\n ): Promise<void> {\n try {\n const { execSync } = await import('child_process');\n execSync(`git checkout ${gitState.currentBranch}`, { stdio: 'ignore' });\n logger.info(`Restored git branch: ${gitState.currentBranch}`);\n } catch (error) {\n logger.warn('Failed to restore git state:', error as Error);\n }\n }\n\n private async restoreDatabase(backupPath: string): Promise<void> {\n // Implementation would restore database from backup\n logger.info(`Restoring database from ${backupPath}`);\n }\n\n private async restoreSwarmState(\n checkpoint: RecoveryCheckpoint\n ): Promise<void> {\n // Implementation would restore swarm coordinator state\n logger.info(`Restoring swarm state from checkpoint ${checkpoint.id}`);\n }\n\n private findRecentCheckpoint(swarmId: string): RecoveryCheckpoint | null {\n const recent = Date.now() - 1800000; // 30 minutes\n\n return (\n Array.from(this.checkpoints.values())\n .filter((cp) => cp.swarmId === swarmId && cp.timestamp > recent)\n .sort((a, b) => b.timestamp - a.timestamp)[0] || null\n );\n }\n\n private async clearProblematicState(): Promise<void> {\n try {\n // Clear temporary files\n await this.cleanupTempFiles();\n\n // Reset any stuck locks\n // Implementation specific cleanup\n\n logger.info('Cleared problematic state');\n } catch (error) {\n logger.error('Failed to clear problematic state:', error as Error);\n }\n }\n\n private async cleanupTempFiles(): Promise<void> {\n // Cleanup implementation\n }\n\n private async cleanupOldCheckpoints(keepCount: number): Promise<void> {\n const sorted = Array.from(this.checkpoints.values()).sort(\n (a, b) => b.timestamp - a.timestamp\n );\n\n const toDelete = sorted.slice(keepCount);\n\n for (const checkpoint of toDelete) {\n try {\n const checkpointPath = path.join(\n this.recoveryDir,\n `checkpoint-${checkpoint.id}.json`\n );\n await fs.unlink(checkpointPath);\n this.checkpoints.delete(checkpoint.id);\n } catch (error) {\n logger.warn(\n `Failed to delete checkpoint ${checkpoint.id}:`,\n error as Error\n );\n }\n }\n\n logger.info(`Cleaned up ${toDelete.length} old checkpoints`);\n }\n\n private async escalateCriticalFailure(report: CrashReport): Promise<void> {\n logger.error(`CRITICAL FAILURE [${report.id}]: ${report.error.message}`);\n\n // Create emergency checkpoint\n try {\n const swarmState = (this.swarmCoordinator as any).swarmState;\n if (swarmState?.id) {\n await this.createCheckpoint(swarmState.id, 'critical_failure');\n }\n } catch {\n logger.error('Failed to create emergency checkpoint');\n }\n\n // Graceful shutdown\n await this.swarmCoordinator.forceCleanup();\n }\n\n private generateRecoveryActions(\n frequentErrors: Array<{ type: string; count: number }>\n ): string[] {\n const actions: string[] = [];\n\n for (const { type, count } of frequentErrors) {\n if (count > 3) {\n switch (type) {\n case 'database_failure':\n actions.push('Consider upgrading database configuration');\n break;\n case 'git_conflict':\n actions.push('Review git workflow and branch strategy');\n break;\n case 'agent_timeout':\n actions.push(\n 'Increase agent timeout limits or reduce task complexity'\n );\n break;\n case 'memory_overflow':\n actions.push('Monitor memory usage and consider increasing limits');\n break;\n }\n }\n }\n\n return actions;\n }\n\n private startPeriodicCheckpoints(): void {\n this.checkpointInterval = setInterval(async () => {\n const swarmState = (this.swarmCoordinator as any).swarmState;\n if (swarmState?.id && swarmState.status === 'active') {\n await this.createCheckpoint(swarmState.id, 'periodic');\n }\n }, 300000); // Every 5 minutes\n }\n\n private async ensureRecoveryDirectory(): Promise<void> {\n try {\n await fs.mkdir(this.recoveryDir, { recursive: true });\n } catch (error) {\n logger.error('Failed to create recovery directory:', error as Error);\n }\n }\n\n private async loadExistingCheckpoints(): Promise<void> {\n try {\n const files = await fs.readdir(this.recoveryDir);\n\n for (const file of files) {\n if (file.startsWith('checkpoint-') && file.endsWith('.json')) {\n try {\n const content = await fs.readFile(\n path.join(this.recoveryDir, file),\n 'utf8'\n );\n const checkpoint: RecoveryCheckpoint = JSON.parse(content);\n this.checkpoints.set(checkpoint.id, checkpoint);\n } catch (error) {\n logger.warn(`Failed to load checkpoint ${file}:`, error as Error);\n }\n }\n }\n\n logger.info(`Loaded ${this.checkpoints.size} existing checkpoints`);\n } catch (error) {\n logger.warn('Failed to load existing checkpoints:', error as Error);\n }\n }\n\n private async saveCrashReport(report: CrashReport): Promise<void> {\n try {\n const reportPath = path.join(this.recoveryDir, `crash-${report.id}.json`);\n await fs.writeFile(reportPath, JSON.stringify(report, null, 2));\n } catch (error) {\n logger.error('Failed to save crash report:', error as Error);\n }\n }\n\n private sleep(ms: number): Promise<void> {\n return new Promise((resolve) => setTimeout(resolve, ms));\n }\n\n private generateId(): string {\n return `recovery_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;\n }\n}\n\nexport default CrashRecoverySystem;\n"],
5
+ "mappings": "AAKA,YAAY,QAAQ;AACpB,YAAY,UAAU;AACtB,SAAS,cAAc;AAgDhB,MAAM,oBAAoB;AAAA,EACvB,cAA+C,oBAAI,IAAI;AAAA,EACvD,eAA8B,CAAC;AAAA,EAC/B,qBAAyC,CAAC;AAAA,EAC1C;AAAA,EACA;AAAA,EACA;AAAA,EAER,YACE,kBACA,cAAsB,mBACtB;AACA,SAAK,mBAAmB;AACxB,SAAK,cAAc;AACnB,SAAK,wBAAwB;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,aAA4B;AAChC,UAAM,KAAK,wBAAwB;AACnC,UAAM,KAAK,wBAAwB;AACnC,SAAK,yBAAyB;AAG9B,YAAQ,GAAG,sBAAsB,CAAC,QAAQ,YAAY;AACpD,WAAK,YAAY,IAAI,MAAM,wBAAwB,MAAM,EAAE,GAAG;AAAA,QAC5D,MAAM;AAAA,QACN,SAAS,QAAQ,SAAS;AAAA,MAC5B,CAAC;AAAA,IACH,CAAC;AAED,YAAQ,GAAG,qBAAqB,CAAC,UAAU;AACzC,WAAK,YAAY,OAAO,EAAE,MAAM,qBAAqB,CAAC;AAAA,IACxD,CAAC;AAED,WAAO,KAAK,mCAAmC;AAAA,EACjD;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,iBACJ,SACA,SAAiB,YACA;AACjB,QAAI;AACF,YAAM,aAAc,KAAK,iBAAyB;AAClD,YAAM,SAAS,MAAM;AAAA,QAClB,KAAK,iBAAyB,aAAa,OAAO;AAAA,MACrD;AAEA,YAAM,aAAiC;AAAA,QACrC,IAAI,KAAK,WAAW;AAAA,QACpB;AAAA,QACA,WAAW,KAAK,IAAI;AAAA,QACpB,YAAY,EAAE,GAAG,WAAW;AAAA,QAC5B,QAAQ,OAAO,IAAI,CAAC,WAAW,EAAE,GAAG,MAAM,EAAE;AAAA,QAC5C,OAAO,WAAW,SAAS,CAAC;AAAA,QAC5B,UAAU,KAAK,aAAa,MAAM,GAAG;AAAA;AAAA,QACrC,UAAU,MAAM,KAAK,gBAAgB;AAAA,MACvC;AAGA,YAAM,iBAAiB,KAAK;AAAA,QAC1B,KAAK;AAAA,QACL,cAAc,WAAW,EAAE;AAAA,MAC7B;AACA,YAAM,GAAG,UAAU,gBAAgB,KAAK,UAAU,YAAY,MAAM,CAAC,CAAC;AAEtE,WAAK,YAAY,IAAI,WAAW,IAAI,UAAU;AAE9C,aAAO;AAAA,QACL,sBAAsB,WAAW,EAAE,cAAc,OAAO,KAAK,MAAM;AAAA,MACrE;AACA,aAAO,WAAW;AAAA,IACpB,SAAS,OAAO;AACd,aAAO,MAAM,gCAAgC,KAAc;AAC3D,YAAM;AAAA,IACR;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,YAAY,OAAc,UAAe,CAAC,GAAkB;AAChE,UAAM,SAAsB;AAAA,MAC1B,IAAI,KAAK,WAAW;AAAA,MACpB,WAAW,KAAK,IAAI;AAAA,MACpB,SAAS,QAAQ;AAAA,MACjB,WAAW,KAAK,cAAc,OAAO,OAAO;AAAA,MAC5C;AAAA,MACA;AAAA,MACA,gBAAgB;AAAA,MAChB,UAAU,KAAK,eAAe,OAAO,OAAO;AAAA,MAC5C,UAAU;AAAA,IACZ;AAEA,SAAK,aAAa,KAAK,MAAM;AAC7B,WAAO,MAAM,mBAAmB,OAAO,EAAE,MAAM,KAAK;AAGpD,UAAM,YAAY,MAAM,KAAK,gBAAgB,MAAM;AAEnD,QAAI,WAAW;AACb,aAAO,WAAW;AAClB,aAAO,iBAAiB;AACxB,aAAO,KAAK,qCAAqC,OAAO,EAAE,EAAE;AAAA,IAC9D,OAAO;AACL,aAAO,MAAM,gCAAgC,OAAO,EAAE,EAAE;AAExD,UAAI,OAAO,aAAa,YAAY;AAClC,cAAM,KAAK,wBAAwB,MAAM;AAAA,MAC3C;AAAA,IACF;AAGA,UAAM,KAAK,gBAAgB,MAAM;AAAA,EACnC;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,sBAAsB,cAAwC;AAClE,QAAI;AACF,YAAM,aAAa,KAAK,YAAY,IAAI,YAAY;AACpD,UAAI,CAAC,YAAY;AACf,eAAO,MAAM,cAAc,YAAY,YAAY;AACnD,eAAO;AAAA,MACT;AAEA,aAAO,KAAK,6BAA6B,YAAY,EAAE;AAGvD,YAAM,KAAK,gBAAgB,WAAW,QAAQ;AAG9C,UAAI,WAAW,gBAAgB;AAC7B,cAAM,KAAK,gBAAgB,WAAW,cAAc;AAAA,MACtD;AAGA,YAAM,KAAK,kBAAkB,UAAU;AAEvC,aAAO,KAAK,yCAAyC,YAAY,EAAE;AACnE,aAAO;AAAA,IACT,SAAS,OAAO;AACd,aAAO;AAAA,QACL,qCAAqC,YAAY;AAAA,QACjD;AAAA,MACF;AACA,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,6BASE;AACA,UAAM,SAAS,KAAK,IAAI,IAAI;AAC5B,UAAM,oBAAoB,MAAM,KAAK,KAAK,YAAY,OAAO,CAAC,EAC3D,OAAO,CAAC,OAAO,GAAG,YAAY,MAAM,EACpC,KAAK,CAAC,GAAG,MAAM,EAAE,YAAY,EAAE,SAAS,EACxC,MAAM,GAAG,CAAC;AAEb,UAAM,cAAc,oBAAI,IAAoB;AAC5C,UAAM,aAAa,oBAAI,IAAoB;AAE3C,eAAW,UAAU,KAAK,aAAa;AAAA,MACrC,CAAC,MAAM,EAAE,YAAY;AAAA,IACvB,GAAG;AACD,kBAAY;AAAA,QACV,OAAO;AAAA,SACN,YAAY,IAAI,OAAO,SAAS,KAAK,KAAK;AAAA,MAC7C;AACA,iBAAW;AAAA,QACT,OAAO;AAAA,QACP,KAAK,IAAI,WAAW,IAAI,OAAO,SAAS,KAAK,GAAG,OAAO,SAAS;AAAA,MAClE;AAAA,IACF;AAEA,UAAM,iBAAiB,MAAM,KAAK,YAAY,QAAQ,CAAC,EACpD,IAAI,CAAC,CAAC,MAAM,KAAK,OAAO;AAAA,MACvB;AAAA,MACA;AAAA,MACA,gBAAgB,WAAW,IAAI,IAAI,KAAK;AAAA,IAC1C,EAAE,EACD,KAAK,CAAC,GAAG,MAAM,EAAE,QAAQ,EAAE,KAAK;AAEnC,UAAM,iBAAiB,KAAK,aAAa;AAAA,MACvC,CAAC,MAAM,EAAE,aAAa,cAAc,EAAE,YAAY,UAAU,CAAC,EAAE;AAAA,IACjE;AAEA,UAAM,eACJ,eAAe,SAAS,IACpB,aACA,eAAe,SAAS,IACtB,aACA;AAER,WAAO;AAAA,MACL;AAAA,MACA;AAAA,MACA,iBAAiB,KAAK,wBAAwB,cAAc;AAAA,MAC5D;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,oBAAoB,SAAmC;AAC3D,WAAO,KAAK,sCAAsC,OAAO,EAAE;AAE3D,QAAI;AAEF,YAAM,mBAAmB,KAAK,qBAAqB,OAAO;AAC1D,UAAI,kBAAkB;AACpB,eAAO,KAAK,4BAA4B,iBAAiB,EAAE,EAAE;AAC7D,eAAO,MAAM,KAAK,sBAAsB,iBAAiB,EAAE;AAAA,MAC7D;AAGA,YAAM,KAAK,iBAAiB,aAAa;AAGzC,YAAM,KAAK,sBAAsB;AAGjC,aAAO,KAAK,6CAA6C;AACzD,aAAO;AAAA,IACT,SAAS,OAAO;AACd,aAAO,MAAM,yBAAyB,KAAc;AACpD,aAAO;AAAA,IACT;AAAA,EACF;AAAA,EAEA,MAAc,gBAAgB,QAAuC;AAEnE,eAAW,YAAY,KAAK,oBAAoB;AAC9C,UAAI,SAAS,UAAU,OAAO,OAAO,OAAO,OAAO,GAAG;AACpD,eAAO,KAAK,+BAA+B,SAAS,SAAS,EAAE;AAE/D,YAAI,UAAU;AACd,eAAO,UAAU,SAAS,YAAY;AACpC,cAAI;AACF,kBAAM,UAAU,MAAM,SAAS,OAAO,QAAQ,IAAI;AAClD,gBAAI,SAAS;AACX,qBAAO,iBAAiB,SAAS;AACjC,qBAAO;AAAA,YACT;AAAA,UACF,SAAS,OAAO;AACd,mBAAO;AAAA,cACL,oBAAoB,UAAU,CAAC;AAAA,cAC/B;AAAA,YACF;AAAA,UACF;AAEA;AACA,cAAI,UAAU,SAAS,YAAY;AACjC,kBAAM,KAAK,MAAM,SAAS,YAAY,KAAK,IAAI,GAAG,OAAO,CAAC;AAAA,UAC5D;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,cAAc,OAAc,SAAwC;AAC1E,UAAM,UAAU,MAAM,QAAQ,YAAY;AAE1C,QAAI,QAAQ,SAAS,UAAU,KAAK,QAAQ,SAAS,QAAQ,GAAG;AAC9D,aAAO;AAAA,IACT,WAAW,QAAQ,SAAS,KAAK,KAAK,QAAQ,SAAS,QAAQ,GAAG;AAChE,aAAO;AAAA,IACT,WAAW,QAAQ,SAAS,SAAS,KAAK,QAAQ,SAAS;AACzD,aAAO;AAAA,IACT,WAAW,QAAQ,SAAS,QAAQ,KAAK,QAAQ,SAAS,MAAM,GAAG;AACjE,aAAO;AAAA,IACT,WAAW,QAAQ,SAAS,SAAS,KAAK,QAAQ,SAAS,SAAS,GAAG;AACrE,aAAO;AAAA,IACT;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,eAAe,OAAc,SAAuC;AAC1E,QAAI,QAAQ,SAAS,qBAAsB,QAAO;AAClD,QAAI,MAAM,QAAQ,SAAS,WAAW,EAAG,QAAO;AAChD,QAAI,MAAM,QAAQ,SAAS,UAAU,EAAG,QAAO;AAC/C,WAAO;AAAA,EACT;AAAA,EAEQ,0BAAgC;AACtC,SAAK,qBAAqB;AAAA,MACxB;AAAA,QACE,WAAW;AAAA,QACX,WAAW,CAAC,UACV,MAAM,QAAQ,SAAS,UAAU,KACjC,MAAM,QAAQ,SAAS,QAAQ;AAAA,QACjC,QAAQ,OAAO,QAAQ,aAAa;AAElC,iBAAO,KAAK,8BAA8B;AAG1C,cAAI;AACF,kBAAM,SAAS,sBAAsB;AACrC,mBAAO;AAAA,UACT,QAAQ;AACN,mBAAO;AAAA,UACT;AAAA,QACF;AAAA,QACA,YAAY;AAAA,QACZ,WAAW;AAAA,MACb;AAAA,MACA;AAAA,QACE,WAAW;AAAA,QACX,WAAW,CAAC,UACV,MAAM,QAAQ,SAAS,KAAK,KAAK,MAAM,QAAQ,SAAS,QAAQ;AAAA,QAClE,QAAQ,OAAO,QAAQ,aAAa;AAClC,iBAAO,KAAK,oCAAoC;AAEhD,cAAI;AAEF,kBAAM,EAAE,SAAS,IAAI,MAAM,OAAO,eAAe;AACjD,qBAAS,qBAAqB,EAAE,OAAO,SAAS,CAAC;AACjD,qBAAS,yBAAyB,EAAE,OAAO,SAAS,CAAC;AACrD,mBAAO;AAAA,UACT,QAAQ;AACN,mBAAO;AAAA,UACT;AAAA,QACF;AAAA,QACA,YAAY;AAAA,QACZ,WAAW;AAAA,MACb;AAAA,MACA;AAAA,QACE,WAAW;AAAA,QACX,WAAW,CAAC,OAAO,YACjB,MAAM,QAAQ,SAAS,SAAS,KAAK,QAAQ;AAAA,QAC/C,QAAQ,OAAO,QAAQ,aAAa;AAClC,iBAAO,KAAK,mCAAmC;AAG/C,gBAAM,SAAS,iBAAiB,aAAa;AAC7C,iBAAO;AAAA,QACT;AAAA,QACA,YAAY;AAAA,QACZ,WAAW;AAAA,MACb;AAAA,MACA;AAAA,QACE,WAAW;AAAA,QACX,WAAW,CAAC,UACV,MAAM,QAAQ,SAAS,QAAQ,KAAK,MAAM,QAAQ,SAAS,MAAM;AAAA,QACnE,QAAQ,OAAO,QAAQ,aAAa;AAClC,iBAAO,KAAK,4BAA4B;AAGxC,cAAI,OAAO,GAAI,QAAO,GAAG;AAGzB,gBAAM,SAAS,sBAAsB,CAAC;AACtC,iBAAO;AAAA,QACT;AAAA,QACA,YAAY;AAAA,QACZ,WAAW;AAAA,MACb;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,kBAA2D;AACvE,QAAI;AACF,YAAM,EAAE,SAAS,IAAI,MAAM,OAAO,eAAe;AAEjD,YAAM,gBAAgB,SAAS,6BAA6B;AAAA,QAC1D,UAAU;AAAA,MACZ,CAAC,EAAE,KAAK;AACR,YAAM,eAAe,SAAS,0BAA0B;AAAA,QACtD,UAAU;AAAA,MACZ,CAAC;AACD,YAAM,qBAAqB,aACxB,KAAK,EACL,MAAM,IAAI,EACV,OAAO,OAAO;AACjB,YAAM,iBAAiB,SAAS,cAAc,EAAE,UAAU,OAAO,CAAC;AAClE,YAAM,iBAAiB,eACpB,MAAM,IAAI,EACV,IAAI,CAAC,SAAS,KAAK,KAAK,EAAE,QAAQ,WAAW,EAAE,CAAC,EAChD,OAAO,OAAO;AAEjB,aAAO;AAAA,QACL;AAAA,QACA;AAAA,QACA;AAAA,MACF;AAAA,IACF,SAAS,OAAO;AACd,aAAO,KAAK,gCAAgC,KAAc;AAC1D,aAAO;AAAA,QACL,eAAe;AAAA,QACf,oBAAoB,CAAC;AAAA,QACrB,gBAAgB,CAAC;AAAA,MACnB;AAAA,IACF;AAAA,EACF;AAAA,EAEA,MAAc,gBACZ,UACe;AACf,QAAI;AACF,YAAM,EAAE,SAAS,IAAI,MAAM,OAAO,eAAe;AACjD,eAAS,gBAAgB,SAAS,aAAa,IAAI,EAAE,OAAO,SAAS,CAAC;AACtE,aAAO,KAAK,wBAAwB,SAAS,aAAa,EAAE;AAAA,IAC9D,SAAS,OAAO;AACd,aAAO,KAAK,gCAAgC,KAAc;AAAA,IAC5D;AAAA,EACF;AAAA,EAEA,MAAc,gBAAgB,YAAmC;AAE/D,WAAO,KAAK,2BAA2B,UAAU,EAAE;AAAA,EACrD;AAAA,EAEA,MAAc,kBACZ,YACe;AAEf,WAAO,KAAK,yCAAyC,WAAW,EAAE,EAAE;AAAA,EACtE;AAAA,EAEQ,qBAAqB,SAA4C;AACvE,UAAM,SAAS,KAAK,IAAI,IAAI;AAE5B,WACE,MAAM,KAAK,KAAK,YAAY,OAAO,CAAC,EACjC,OAAO,CAAC,OAAO,GAAG,YAAY,WAAW,GAAG,YAAY,MAAM,EAC9D,KAAK,CAAC,GAAG,MAAM,EAAE,YAAY,EAAE,SAAS,EAAE,CAAC,KAAK;AAAA,EAEvD;AAAA,EAEA,MAAc,wBAAuC;AACnD,QAAI;AAEF,YAAM,KAAK,iBAAiB;AAK5B,aAAO,KAAK,2BAA2B;AAAA,IACzC,SAAS,OAAO;AACd,aAAO,MAAM,sCAAsC,KAAc;AAAA,IACnE;AAAA,EACF;AAAA,EAEA,MAAc,mBAAkC;AAAA,EAEhD;AAAA,EAEA,MAAc,sBAAsB,WAAkC;AACpE,UAAM,SAAS,MAAM,KAAK,KAAK,YAAY,OAAO,CAAC,EAAE;AAAA,MACnD,CAAC,GAAG,MAAM,EAAE,YAAY,EAAE;AAAA,IAC5B;AAEA,UAAM,WAAW,OAAO,MAAM,SAAS;AAEvC,eAAW,cAAc,UAAU;AACjC,UAAI;AACF,cAAM,iBAAiB,KAAK;AAAA,UAC1B,KAAK;AAAA,UACL,cAAc,WAAW,EAAE;AAAA,QAC7B;AACA,cAAM,GAAG,OAAO,cAAc;AAC9B,aAAK,YAAY,OAAO,WAAW,EAAE;AAAA,MACvC,SAAS,OAAO;AACd,eAAO;AAAA,UACL,+BAA+B,WAAW,EAAE;AAAA,UAC5C;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,WAAO,KAAK,cAAc,SAAS,MAAM,kBAAkB;AAAA,EAC7D;AAAA,EAEA,MAAc,wBAAwB,QAAoC;AACxE,WAAO,MAAM,qBAAqB,OAAO,EAAE,MAAM,OAAO,MAAM,OAAO,EAAE;AAGvE,QAAI;AACF,YAAM,aAAc,KAAK,iBAAyB;AAClD,UAAI,YAAY,IAAI;AAClB,cAAM,KAAK,iBAAiB,WAAW,IAAI,kBAAkB;AAAA,MAC/D;AAAA,IACF,QAAQ;AACN,aAAO,MAAM,uCAAuC;AAAA,IACtD;AAGA,UAAM,KAAK,iBAAiB,aAAa;AAAA,EAC3C;AAAA,EAEQ,wBACN,gBACU;AACV,UAAM,UAAoB,CAAC;AAE3B,eAAW,EAAE,MAAM,MAAM,KAAK,gBAAgB;AAC5C,UAAI,QAAQ,GAAG;AACb,gBAAQ,MAAM;AAAA,UACZ,KAAK;AACH,oBAAQ,KAAK,2CAA2C;AACxD;AAAA,UACF,KAAK;AACH,oBAAQ,KAAK,yCAAyC;AACtD;AAAA,UACF,KAAK;AACH,oBAAQ;AAAA,cACN;AAAA,YACF;AACA;AAAA,UACF,KAAK;AACH,oBAAQ,KAAK,qDAAqD;AAClE;AAAA,QACJ;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,EACT;AAAA,EAEQ,2BAAiC;AACvC,SAAK,qBAAqB,YAAY,YAAY;AAChD,YAAM,aAAc,KAAK,iBAAyB;AAClD,UAAI,YAAY,MAAM,WAAW,WAAW,UAAU;AACpD,cAAM,KAAK,iBAAiB,WAAW,IAAI,UAAU;AAAA,MACvD;AAAA,IACF,GAAG,GAAM;AAAA,EACX;AAAA,EAEA,MAAc,0BAAyC;AACrD,QAAI;AACF,YAAM,GAAG,MAAM,KAAK,aAAa,EAAE,WAAW,KAAK,CAAC;AAAA,IACtD,SAAS,OAAO;AACd,aAAO,MAAM,wCAAwC,KAAc;AAAA,IACrE;AAAA,EACF;AAAA,EAEA,MAAc,0BAAyC;AACrD,QAAI;AACF,YAAM,QAAQ,MAAM,GAAG,QAAQ,KAAK,WAAW;AAE/C,iBAAW,QAAQ,OAAO;AACxB,YAAI,KAAK,WAAW,aAAa,KAAK,KAAK,SAAS,OAAO,GAAG;AAC5D,cAAI;AACF,kBAAM,UAAU,MAAM,GAAG;AAAA,cACvB,KAAK,KAAK,KAAK,aAAa,IAAI;AAAA,cAChC;AAAA,YACF;AACA,kBAAM,aAAiC,KAAK,MAAM,OAAO;AACzD,iBAAK,YAAY,IAAI,WAAW,IAAI,UAAU;AAAA,UAChD,SAAS,OAAO;AACd,mBAAO,KAAK,6BAA6B,IAAI,KAAK,KAAc;AAAA,UAClE;AAAA,QACF;AAAA,MACF;AAEA,aAAO,KAAK,UAAU,KAAK,YAAY,IAAI,uBAAuB;AAAA,IACpE,SAAS,OAAO;AACd,aAAO,KAAK,wCAAwC,KAAc;AAAA,IACpE;AAAA,EACF;AAAA,EAEA,MAAc,gBAAgB,QAAoC;AAChE,QAAI;AACF,YAAM,aAAa,KAAK,KAAK,KAAK,aAAa,SAAS,OAAO,EAAE,OAAO;AACxE,YAAM,GAAG,UAAU,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,CAAC;AAAA,IAChE,SAAS,OAAO;AACd,aAAO,MAAM,gCAAgC,KAAc;AAAA,IAC7D;AAAA,EACF;AAAA,EAEQ,MAAM,IAA2B;AACvC,WAAO,IAAI,QAAQ,CAAC,YAAY,WAAW,SAAS,EAAE,CAAC;AAAA,EACzD;AAAA,EAEQ,aAAqB;AAC3B,WAAO,YAAY,KAAK,IAAI,CAAC,IAAI,KAAK,OAAO,EAAE,SAAS,EAAE,EAAE,OAAO,GAAG,CAAC,CAAC;AAAA,EAC1E;AACF;AAEA,IAAO,yBAAQ;",
6
+ "names": []
7
+ }
@@ -1,12 +1,5 @@
1
1
  import { execSync } from "child_process";
2
2
  import { logger } from "../../../core/monitoring/logger.js";
3
- class GitWorkflowError extends Error {
4
- constructor(message, context) {
5
- super(message);
6
- this.context = context;
7
- this.name = "GitWorkflowError";
8
- }
9
- }
10
3
  class GitWorkflowManager {
11
4
  config;
12
5
  agentBranches = /* @__PURE__ */ new Map();
@@ -162,12 +155,7 @@ class GitWorkflowManager {
162
155
  }
163
156
  // Private helper methods
164
157
  getCurrentBranch() {
165
- try {
166
- return execSync("git rev-parse --abbrev-ref HEAD", { encoding: "utf8" }).trim();
167
- } catch (error) {
168
- logger.warn("Failed to get current branch", error);
169
- return "main";
170
- }
158
+ return execSync("git rev-parse --abbrev-ref HEAD", { encoding: "utf8" }).trim();
171
159
  }
172
160
  getMainBranch() {
173
161
  try {
@@ -175,7 +163,6 @@ class GitWorkflowManager {
175
163
  if (branches.includes("origin/main")) return "main";
176
164
  if (branches.includes("origin/master")) return "master";
177
165
  } catch (error) {
178
- logger.debug("Could not detect main branch from remotes", error);
179
166
  }
180
167
  return this.getCurrentBranch();
181
168
  }
@@ -192,46 +179,10 @@ class GitWorkflowManager {
192
179
  }
193
180
  }
194
181
  createBranch(branchName) {
195
- try {
196
- try {
197
- const currentBranch = execSync("git branch --show-current", { encoding: "utf8" }).trim();
198
- if (currentBranch === branchName) {
199
- try {
200
- execSync("git checkout main", { encoding: "utf8" });
201
- } catch {
202
- execSync("git checkout master", { encoding: "utf8" });
203
- }
204
- }
205
- try {
206
- const worktrees = execSync("git worktree list --porcelain", { encoding: "utf8" });
207
- const lines = worktrees.split("\n");
208
- for (let i = 0; i < lines.length; i++) {
209
- if (lines[i].startsWith("branch ") && lines[i].includes(branchName)) {
210
- const worktreetPath = lines[i - 1].replace("worktree ", "");
211
- execSync(`git worktree remove --force "${worktreetPath}"`, { encoding: "utf8" });
212
- logger.info(`Removed worktree at ${worktreetPath} for branch ${branchName}`);
213
- }
214
- }
215
- } catch (worktreeError) {
216
- logger.warn("Failed to check/remove worktrees", worktreeError);
217
- }
218
- execSync(`git branch -D ${branchName}`, { encoding: "utf8" });
219
- logger.info(`Deleted existing branch ${branchName} for fresh start`);
220
- } catch {
221
- }
222
- execSync(`git checkout -b ${branchName}`, { encoding: "utf8" });
223
- } catch (error) {
224
- logger.error(`Failed to create branch ${branchName}`, error);
225
- throw new GitWorkflowError(`Failed to create branch: ${branchName}`, { branchName });
226
- }
182
+ execSync(`git checkout -b ${branchName}`, { encoding: "utf8" });
227
183
  }
228
184
  checkoutBranch(branchName) {
229
- try {
230
- execSync(`git checkout ${branchName}`, { encoding: "utf8" });
231
- } catch (error) {
232
- logger.error(`Failed to checkout branch ${branchName}`, error);
233
- throw new GitWorkflowError(`Failed to checkout branch: ${branchName}`, { branchName });
234
- }
185
+ execSync(`git checkout ${branchName}`, { encoding: "utf8" });
235
186
  }
236
187
  branchExists(branchName) {
237
188
  try {
@@ -268,13 +219,8 @@ class GitWorkflowManager {
268
219
  }
269
220
  }
270
221
  hasUncommittedChanges() {
271
- try {
272
- const status = execSync("git status --porcelain", { encoding: "utf8" });
273
- return status.trim().length > 0;
274
- } catch (error) {
275
- logger.warn("Failed to check git status", error);
276
- return false;
277
- }
222
+ const status = execSync("git status --porcelain", { encoding: "utf8" });
223
+ return status.trim().length > 0;
278
224
  }
279
225
  hasRemote() {
280
226
  try {
@@ -295,16 +241,10 @@ class GitWorkflowManager {
295
241
  }
296
242
  }
297
243
  isAgentFile(file, agent) {
298
- if (!file || !agent?.role || !agent?.id) {
299
- return false;
300
- }
301
244
  return file.includes(agent.role) || file.includes(agent.id);
302
245
  }
303
246
  generateCommitMessage(agent, task) {
304
- const role = agent?.role || "agent";
305
- const title = task?.title || "task";
306
- const iteration = agent?.performance?.tasksCompleted || 1;
307
- return `[${role}] ${title} - Iteration ${iteration}`;
247
+ return `[${agent.role}] ${task.title} - Iteration ${agent.performance?.tasksCompleted || 1}`;
308
248
  }
309
249
  scheduleAutoCommit(agent, task) {
310
250
  const intervalMs = this.config.commitFrequency * 60 * 1e3;
@@ -363,7 +303,6 @@ Generated by Swarm Coordinator
363
303
  }
364
304
  const gitWorkflowManager = new GitWorkflowManager();
365
305
  export {
366
- GitWorkflowError,
367
306
  GitWorkflowManager,
368
307
  gitWorkflowManager
369
308
  };