claude-mpm 4.2.51__py3-none-any.whl → 4.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. claude_mpm/VERSION +1 -1
  2. claude_mpm/agents/BASE_PM.md +77 -447
  3. claude_mpm/agents/OUTPUT_STYLE.md +0 -39
  4. claude_mpm/agents/PM_INSTRUCTIONS.md +145 -0
  5. claude_mpm/agents/WORKFLOW.md +74 -368
  6. claude_mpm/agents/templates/prompt-engineer.json +294 -0
  7. claude_mpm/agents/templates/vercel_ops_agent.json +153 -32
  8. claude_mpm/cli/commands/uninstall.py +0 -1
  9. claude_mpm/core/framework_loader.py +72 -24
  10. claude_mpm/core/log_manager.py +52 -0
  11. claude_mpm/core/logging_utils.py +30 -12
  12. claude_mpm/services/agents/deployment/agent_template_builder.py +260 -18
  13. claude_mpm/services/agents/deployment/multi_source_deployment_service.py +35 -16
  14. claude_mpm/services/agents/local_template_manager.py +0 -1
  15. claude_mpm/services/monitor/daemon_manager.py +1 -3
  16. claude_mpm/services/monitor/event_emitter.py +5 -1
  17. claude_mpm/services/monitor/handlers/hooks.py +0 -2
  18. claude_mpm/tools/code_tree_analyzer.py +1 -3
  19. claude_mpm/utils/log_cleanup.py +612 -0
  20. {claude_mpm-4.2.51.dist-info → claude_mpm-4.3.4.dist-info}/METADATA +41 -28
  21. {claude_mpm-4.2.51.dist-info → claude_mpm-4.3.4.dist-info}/RECORD +26 -23
  22. /claude_mpm/agents/{INSTRUCTIONS.md → INSTRUCTIONS_OLD_DEPRECATED.md} +0 -0
  23. {claude_mpm-4.2.51.dist-info → claude_mpm-4.3.4.dist-info}/WHEEL +0 -0
  24. {claude_mpm-4.2.51.dist-info → claude_mpm-4.3.4.dist-info}/entry_points.txt +0 -0
  25. {claude_mpm-4.2.51.dist-info → claude_mpm-4.3.4.dist-info}/licenses/LICENSE +0 -0
  26. {claude_mpm-4.2.51.dist-info → claude_mpm-4.3.4.dist-info}/top_level.txt +0 -0
@@ -575,9 +575,7 @@ class DaemonManager:
575
575
  stdout=log_file,
576
576
  stderr=subprocess.STDOUT if self.log_file else subprocess.DEVNULL,
577
577
  start_new_session=True, # Create new process group
578
- close_fds=(
579
- not self.log_file
580
- ), # Keep log file open if redirecting
578
+ close_fds=(not self.log_file), # Keep log file open if redirecting
581
579
  env=env, # Pass modified environment
582
580
  )
583
581
 
@@ -186,7 +186,11 @@ class AsyncEventEmitter:
186
186
  return False
187
187
 
188
188
  async def _emit_http(
189
- self, namespace: str, event: str, data: Dict[str, Any], endpoint: Optional[str] = None
189
+ self,
190
+ namespace: str,
191
+ event: str,
192
+ data: Dict[str, Any],
193
+ endpoint: Optional[str] = None,
190
194
  ) -> bool:
191
195
  """Emit event via HTTP with connection pooling."""
192
196
  if not self._http_session:
@@ -428,7 +428,6 @@ class HookHandler:
428
428
  "original_event": data, # Keep original for debugging
429
429
  }
430
430
 
431
-
432
431
  def _process_hook_event(self, data: Dict) -> Dict:
433
432
  """Process and normalize hook event data.
434
433
 
@@ -447,7 +446,6 @@ class HookHandler:
447
446
  "processed_at": asyncio.get_event_loop().time(),
448
447
  }
449
448
 
450
-
451
449
  def _update_session_tracking(self, session_id: str, event: Dict):
452
450
  """Update session tracking with new event.
453
451
 
@@ -1756,9 +1756,7 @@ class CodeTreeAnalyzer:
1756
1756
  return node.name not in important_magic
1757
1757
 
1758
1758
  # Filter very generic getters/setters only if they're trivial
1759
- if (name_lower.startswith(("get_", "set_"))) and len(
1760
- node.name
1761
- ) <= 8:
1759
+ if (name_lower.startswith(("get_", "set_"))) and len(node.name) <= 8:
1762
1760
  return True
1763
1761
 
1764
1762
  # Don't filter single underscore functions - they're often important
@@ -0,0 +1,612 @@
1
+ """
2
+ Comprehensive log cleanup utility for Claude MPM.
3
+
4
+ This module provides automated log cleanup with both size and age-based policies,
5
+ including session directory cleanup, archived log removal, and rotation management.
6
+ """
7
+
8
+ import gzip
9
+ import logging
10
+ import os
11
+ import shutil
12
+ from datetime import datetime, timedelta
13
+ from pathlib import Path
14
+ from typing import Dict, Optional, Tuple
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+
19
+ class LogCleanupConfig:
20
+ """Configuration for log cleanup operations."""
21
+
22
+ # Default retention periods (in days)
23
+ DEFAULT_SESSION_MAX_AGE_DAYS = 7
24
+ DEFAULT_ARCHIVED_MAX_AGE_DAYS = 30
25
+ DEFAULT_LOG_MAX_AGE_DAYS = 14
26
+ DEFAULT_PROMPT_MAX_AGE_DAYS = 7
27
+
28
+ # Size thresholds
29
+ DEFAULT_MAX_LOG_SIZE_MB = 5
30
+ DEFAULT_MAX_TOTAL_SIZE_GB = 1
31
+
32
+ # File patterns
33
+ LOG_PATTERNS = {
34
+ "mpm": "mpm_*.log",
35
+ "startup": "startup-*.log",
36
+ "system": "system_*.log",
37
+ "agent": "agent_*.log",
38
+ }
39
+
40
+ ARCHIVE_EXTENSIONS = [".gz", ".zip", ".tar", ".bz2"]
41
+
42
+
43
+ class LogCleanupUtility:
44
+ """
45
+ Comprehensive log cleanup utility with age and size-based policies.
46
+
47
+ Features:
48
+ - Age-based cleanup for session directories
49
+ - Cleanup of old archived logs (.gz files)
50
+ - Size-based rotation trigger
51
+ - Dry-run mode for testing
52
+ - Detailed statistics reporting
53
+ - Error handling for locked/permission issues
54
+ """
55
+
56
+ def __init__(self, base_log_dir: Optional[Path] = None):
57
+ """
58
+ Initialize the log cleanup utility.
59
+
60
+ Args:
61
+ base_log_dir: Base directory for logs (default: .claude-mpm/logs)
62
+ """
63
+ if base_log_dir is None:
64
+ base_log_dir = Path.cwd() / ".claude-mpm" / "logs"
65
+
66
+ self.base_log_dir = Path(base_log_dir)
67
+ self.stats = {
68
+ "sessions_removed": 0,
69
+ "archives_removed": 0,
70
+ "logs_removed": 0,
71
+ "space_freed_mb": 0.0,
72
+ "errors": [],
73
+ }
74
+
75
+ def cleanup_old_sessions(
76
+ self,
77
+ max_age_days: int = LogCleanupConfig.DEFAULT_SESSION_MAX_AGE_DAYS,
78
+ dry_run: bool = False,
79
+ ) -> Tuple[int, float]:
80
+ """
81
+ Remove session directories older than specified days.
82
+
83
+ Args:
84
+ max_age_days: Maximum age in days for session directories
85
+ dry_run: If True, only report what would be deleted
86
+
87
+ Returns:
88
+ Tuple of (directories removed, space freed in MB)
89
+ """
90
+ sessions_dir = self.base_log_dir / "sessions"
91
+ if not sessions_dir.exists():
92
+ logger.info(f"Sessions directory not found: {sessions_dir}")
93
+ return 0, 0.0
94
+
95
+ cutoff_time = datetime.now() - timedelta(days=max_age_days)
96
+ removed_count = 0
97
+ total_size = 0.0
98
+
99
+ logger.info(
100
+ f"Scanning for session directories older than {max_age_days} days..."
101
+ )
102
+
103
+ try:
104
+ for session_dir in sessions_dir.iterdir():
105
+ if not session_dir.is_dir():
106
+ continue
107
+
108
+ try:
109
+ # Check directory modification time
110
+ mtime = datetime.fromtimestamp(session_dir.stat().st_mtime)
111
+
112
+ if mtime < cutoff_time:
113
+ # Calculate directory size
114
+ dir_size = self._get_directory_size(session_dir)
115
+ total_size += dir_size
116
+
117
+ if dry_run:
118
+ logger.info(
119
+ f"[DRY RUN] Would remove session: {session_dir.name} "
120
+ f"(age: {(datetime.now() - mtime).days} days, "
121
+ f"size: {dir_size:.2f} MB)"
122
+ )
123
+ else:
124
+ shutil.rmtree(session_dir)
125
+ logger.info(
126
+ f"Removed session: {session_dir.name} "
127
+ f"(age: {(datetime.now() - mtime).days} days, "
128
+ f"size: {dir_size:.2f} MB)"
129
+ )
130
+
131
+ removed_count += 1
132
+
133
+ except (PermissionError, OSError) as e:
134
+ error_msg = f"Could not remove {session_dir.name}: {e}"
135
+ logger.warning(error_msg)
136
+ self.stats["errors"].append(error_msg)
137
+
138
+ except Exception as e:
139
+ logger.error(f"Error scanning sessions directory: {e}")
140
+ self.stats["errors"].append(str(e))
141
+
142
+ self.stats["sessions_removed"] += removed_count
143
+ self.stats["space_freed_mb"] += total_size
144
+
145
+ return removed_count, total_size
146
+
147
+ def cleanup_archived_logs(
148
+ self,
149
+ max_age_days: int = LogCleanupConfig.DEFAULT_ARCHIVED_MAX_AGE_DAYS,
150
+ dry_run: bool = False,
151
+ ) -> Tuple[int, float]:
152
+ """
153
+ Remove archived log files older than specified days.
154
+
155
+ Args:
156
+ max_age_days: Maximum age in days for archived files
157
+ dry_run: If True, only report what would be deleted
158
+
159
+ Returns:
160
+ Tuple of (files removed, space freed in MB)
161
+ """
162
+ cutoff_time = datetime.now() - timedelta(days=max_age_days)
163
+ removed_count = 0
164
+ total_size = 0.0
165
+
166
+ logger.info(f"Scanning for archived files older than {max_age_days} days...")
167
+
168
+ # Search for archived files in all subdirectories
169
+ for ext in LogCleanupConfig.ARCHIVE_EXTENSIONS:
170
+ for archive_file in self.base_log_dir.rglob(f"*{ext}"):
171
+ try:
172
+ mtime = datetime.fromtimestamp(archive_file.stat().st_mtime)
173
+
174
+ if mtime < cutoff_time:
175
+ file_size = archive_file.stat().st_size / (1024 * 1024) # MB
176
+ total_size += file_size
177
+
178
+ if dry_run:
179
+ logger.info(
180
+ f"[DRY RUN] Would remove archive: {archive_file.name} "
181
+ f"(age: {(datetime.now() - mtime).days} days, "
182
+ f"size: {file_size:.2f} MB)"
183
+ )
184
+ else:
185
+ archive_file.unlink()
186
+ logger.info(
187
+ f"Removed archive: {archive_file.name} "
188
+ f"(age: {(datetime.now() - mtime).days} days, "
189
+ f"size: {file_size:.2f} MB)"
190
+ )
191
+
192
+ removed_count += 1
193
+
194
+ except (PermissionError, OSError) as e:
195
+ error_msg = f"Could not remove {archive_file.name}: {e}"
196
+ logger.warning(error_msg)
197
+ self.stats["errors"].append(error_msg)
198
+
199
+ self.stats["archives_removed"] += removed_count
200
+ self.stats["space_freed_mb"] += total_size
201
+
202
+ return removed_count, total_size
203
+
204
+ def cleanup_old_logs(
205
+ self,
206
+ max_age_days: int = LogCleanupConfig.DEFAULT_LOG_MAX_AGE_DAYS,
207
+ dry_run: bool = False,
208
+ log_type: Optional[str] = None,
209
+ ) -> Tuple[int, float]:
210
+ """
211
+ Remove old log files based on age.
212
+
213
+ Args:
214
+ max_age_days: Maximum age in days for log files
215
+ dry_run: If True, only report what would be deleted
216
+ log_type: Specific log type to clean (mpm, startup, etc.) or None for all
217
+
218
+ Returns:
219
+ Tuple of (files removed, space freed in MB)
220
+ """
221
+ cutoff_time = datetime.now() - timedelta(days=max_age_days)
222
+ removed_count = 0
223
+ total_size = 0.0
224
+
225
+ patterns = (
226
+ [LogCleanupConfig.LOG_PATTERNS.get(log_type)]
227
+ if log_type
228
+ else LogCleanupConfig.LOG_PATTERNS.values()
229
+ )
230
+
231
+ logger.info(f"Scanning for log files older than {max_age_days} days...")
232
+
233
+ for pattern in patterns:
234
+ for subdir in ["mpm", "startup", "system", "agents"]:
235
+ log_dir = self.base_log_dir / subdir
236
+ if not log_dir.exists():
237
+ continue
238
+
239
+ for log_file in log_dir.glob(pattern):
240
+ try:
241
+ mtime = datetime.fromtimestamp(log_file.stat().st_mtime)
242
+
243
+ if mtime < cutoff_time:
244
+ file_size = log_file.stat().st_size / (1024 * 1024) # MB
245
+ total_size += file_size
246
+
247
+ if dry_run:
248
+ logger.info(
249
+ f"[DRY RUN] Would remove log: {log_file.name} "
250
+ f"(age: {(datetime.now() - mtime).days} days, "
251
+ f"size: {file_size:.2f} MB)"
252
+ )
253
+ else:
254
+ log_file.unlink()
255
+ logger.info(
256
+ f"Removed log: {log_file.name} "
257
+ f"(age: {(datetime.now() - mtime).days} days, "
258
+ f"size: {file_size:.2f} MB)"
259
+ )
260
+
261
+ removed_count += 1
262
+
263
+ except (PermissionError, OSError) as e:
264
+ error_msg = f"Could not remove {log_file.name}: {e}"
265
+ logger.warning(error_msg)
266
+ self.stats["errors"].append(error_msg)
267
+
268
+ self.stats["logs_removed"] += removed_count
269
+ self.stats["space_freed_mb"] += total_size
270
+
271
+ return removed_count, total_size
272
+
273
+ def cleanup_empty_directories(self, dry_run: bool = False) -> int:
274
+ """
275
+ Remove empty directories in the log tree.
276
+
277
+ Args:
278
+ dry_run: If True, only report what would be deleted
279
+
280
+ Returns:
281
+ Number of directories removed
282
+ """
283
+ removed_count = 0
284
+
285
+ # Walk bottom-up to remove empty parent directories
286
+ for root, dirs, files in os.walk(self.base_log_dir, topdown=False):
287
+ root_path = Path(root)
288
+
289
+ # Skip the base log directory itself
290
+ if root_path == self.base_log_dir:
291
+ continue
292
+
293
+ try:
294
+ # Check if directory is empty
295
+ if not any(root_path.iterdir()):
296
+ if dry_run:
297
+ logger.info(
298
+ f"[DRY RUN] Would remove empty directory: {root_path}"
299
+ )
300
+ else:
301
+ root_path.rmdir()
302
+ logger.info(f"Removed empty directory: {root_path}")
303
+ removed_count += 1
304
+
305
+ except (PermissionError, OSError) as e:
306
+ error_msg = f"Could not remove directory {root_path}: {e}"
307
+ logger.debug(error_msg) # Debug level since this is common
308
+
309
+ return removed_count
310
+
311
+ def compress_old_logs(
312
+ self, age_days: int = 7, dry_run: bool = False
313
+ ) -> Tuple[int, float]:
314
+ """
315
+ Compress log files older than specified days.
316
+
317
+ Args:
318
+ age_days: Compress files older than this many days
319
+ dry_run: If True, only report what would be compressed
320
+
321
+ Returns:
322
+ Tuple of (files compressed, space saved in MB)
323
+ """
324
+ cutoff_time = datetime.now() - timedelta(days=age_days)
325
+ compressed_count = 0
326
+ space_saved = 0.0
327
+
328
+ for log_file in self.base_log_dir.rglob("*.log"):
329
+ # Skip already compressed files
330
+ if log_file.suffix in LogCleanupConfig.ARCHIVE_EXTENSIONS:
331
+ continue
332
+
333
+ try:
334
+ mtime = datetime.fromtimestamp(log_file.stat().st_mtime)
335
+
336
+ if mtime < cutoff_time:
337
+ original_size = log_file.stat().st_size / (1024 * 1024) # MB
338
+ compressed_path = log_file.with_suffix(".log.gz")
339
+
340
+ if dry_run:
341
+ # Estimate compression ratio (typically 80-90% for logs)
342
+ estimated_saved = original_size * 0.85
343
+ logger.info(
344
+ f"[DRY RUN] Would compress: {log_file.name} "
345
+ f"(size: {original_size:.2f} MB, "
346
+ f"estimated savings: {estimated_saved:.2f} MB)"
347
+ )
348
+ space_saved += estimated_saved
349
+ else:
350
+ # Actually compress the file
351
+ with open(log_file, "rb") as f_in:
352
+ with gzip.open(
353
+ compressed_path, "wb", compresslevel=9
354
+ ) as f_out:
355
+ shutil.copyfileobj(f_in, f_out)
356
+
357
+ compressed_size = compressed_path.stat().st_size / (1024 * 1024)
358
+ saved = original_size - compressed_size
359
+ space_saved += saved
360
+
361
+ # Remove original file
362
+ log_file.unlink()
363
+
364
+ logger.info(
365
+ f"Compressed: {log_file.name} "
366
+ f"({original_size:.2f} MB → {compressed_size:.2f} MB, "
367
+ f"saved: {saved:.2f} MB)"
368
+ )
369
+
370
+ compressed_count += 1
371
+
372
+ except Exception as e:
373
+ error_msg = f"Could not compress {log_file.name}: {e}"
374
+ logger.warning(error_msg)
375
+ self.stats["errors"].append(error_msg)
376
+
377
+ return compressed_count, space_saved
378
+
379
+ def get_statistics(self) -> Dict:
380
+ """
381
+ Get current statistics about the log directory.
382
+
383
+ Returns:
384
+ Dictionary with statistics
385
+ """
386
+ stats = {
387
+ "total_size_mb": 0.0,
388
+ "session_count": 0,
389
+ "archive_count": 0,
390
+ "log_count": 0,
391
+ "oldest_session": None,
392
+ "oldest_log": None,
393
+ "directory_sizes": {},
394
+ }
395
+
396
+ # Calculate total size
397
+ stats["total_size_mb"] = self._get_directory_size(self.base_log_dir)
398
+
399
+ # Count sessions
400
+ sessions_dir = self.base_log_dir / "sessions"
401
+ if sessions_dir.exists():
402
+ sessions = list(sessions_dir.iterdir())
403
+ stats["session_count"] = len([s for s in sessions if s.is_dir()])
404
+
405
+ # Find oldest session
406
+ if sessions:
407
+ oldest = min(sessions, key=lambda p: p.stat().st_mtime)
408
+ stats["oldest_session"] = {
409
+ "name": oldest.name,
410
+ "age_days": (
411
+ datetime.now() - datetime.fromtimestamp(oldest.stat().st_mtime)
412
+ ).days,
413
+ }
414
+
415
+ # Count archives
416
+ for ext in LogCleanupConfig.ARCHIVE_EXTENSIONS:
417
+ stats["archive_count"] += len(list(self.base_log_dir.rglob(f"*{ext}")))
418
+
419
+ # Count logs (excluding symlinks)
420
+ stats["log_count"] = len(
421
+ [p for p in self.base_log_dir.rglob("*.log") if not p.is_symlink()]
422
+ )
423
+
424
+ # Find oldest log (excluding symlinks)
425
+ all_logs = [p for p in self.base_log_dir.rglob("*.log") if not p.is_symlink()]
426
+ if all_logs:
427
+ oldest_log = min(all_logs, key=lambda p: p.stat().st_mtime)
428
+ stats["oldest_log"] = {
429
+ "name": oldest_log.name,
430
+ "path": str(oldest_log.relative_to(self.base_log_dir)),
431
+ "age_days": (
432
+ datetime.now() - datetime.fromtimestamp(oldest_log.stat().st_mtime)
433
+ ).days,
434
+ }
435
+
436
+ # Calculate directory sizes
437
+ for subdir in ["sessions", "mpm", "startup", "system", "agents", "prompts"]:
438
+ dir_path = self.base_log_dir / subdir
439
+ if dir_path.exists():
440
+ stats["directory_sizes"][subdir] = self._get_directory_size(dir_path)
441
+
442
+ return stats
443
+
444
+ def perform_full_cleanup(
445
+ self,
446
+ session_max_age_days: int = LogCleanupConfig.DEFAULT_SESSION_MAX_AGE_DAYS,
447
+ archive_max_age_days: int = LogCleanupConfig.DEFAULT_ARCHIVED_MAX_AGE_DAYS,
448
+ log_max_age_days: int = LogCleanupConfig.DEFAULT_LOG_MAX_AGE_DAYS,
449
+ compress_age_days: Optional[int] = None,
450
+ dry_run: bool = False,
451
+ ) -> Dict:
452
+ """
453
+ Perform a complete cleanup operation.
454
+
455
+ Args:
456
+ session_max_age_days: Maximum age for session directories
457
+ archive_max_age_days: Maximum age for archived files
458
+ log_max_age_days: Maximum age for log files
459
+ compress_age_days: Age threshold for compression (None to skip)
460
+ dry_run: If True, only report what would be done
461
+
462
+ Returns:
463
+ Summary statistics dictionary
464
+ """
465
+ mode = "[DRY RUN] " if dry_run else ""
466
+ logger.info(f"{mode}Starting comprehensive log cleanup...")
467
+
468
+ # Get initial statistics
469
+ initial_stats = self.get_statistics()
470
+
471
+ # Reset stats
472
+ self.stats = {
473
+ "sessions_removed": 0,
474
+ "archives_removed": 0,
475
+ "logs_removed": 0,
476
+ "files_compressed": 0,
477
+ "empty_dirs_removed": 0,
478
+ "space_freed_mb": 0.0,
479
+ "space_saved_mb": 0.0,
480
+ "errors": [],
481
+ }
482
+
483
+ # Cleanup operations
484
+ sessions_removed, sessions_space = self.cleanup_old_sessions(
485
+ session_max_age_days, dry_run
486
+ )
487
+
488
+ archives_removed, archives_space = self.cleanup_archived_logs(
489
+ archive_max_age_days, dry_run
490
+ )
491
+
492
+ logs_removed, logs_space = self.cleanup_old_logs(log_max_age_days, dry_run)
493
+
494
+ # Optional compression
495
+ if compress_age_days is not None:
496
+ compressed, space_saved = self.compress_old_logs(compress_age_days, dry_run)
497
+ self.stats["files_compressed"] = compressed
498
+ self.stats["space_saved_mb"] = space_saved
499
+
500
+ # Cleanup empty directories
501
+ empty_removed = self.cleanup_empty_directories(dry_run)
502
+ self.stats["empty_dirs_removed"] = empty_removed
503
+
504
+ # Get final statistics
505
+ final_stats = self.get_statistics() if not dry_run else initial_stats
506
+
507
+ # Prepare summary
508
+ summary = {
509
+ "mode": "DRY RUN" if dry_run else "EXECUTED",
510
+ "initial_stats": initial_stats,
511
+ "final_stats": final_stats,
512
+ "operations": self.stats,
513
+ "total_removed": (
514
+ self.stats["sessions_removed"]
515
+ + self.stats["archives_removed"]
516
+ + self.stats["logs_removed"]
517
+ ),
518
+ "total_space_impact_mb": (
519
+ self.stats["space_freed_mb"] + self.stats.get("space_saved_mb", 0)
520
+ ),
521
+ }
522
+
523
+ # Log summary
524
+ logger.info(
525
+ f"{mode}Cleanup complete: "
526
+ f"Removed {summary['total_removed']} items, "
527
+ f"freed {self.stats['space_freed_mb']:.2f} MB"
528
+ )
529
+
530
+ if self.stats.get("files_compressed"):
531
+ logger.info(
532
+ f"Compressed {self.stats['files_compressed']} files, "
533
+ f"saved {self.stats['space_saved_mb']:.2f} MB"
534
+ )
535
+
536
+ if self.stats["errors"]:
537
+ logger.warning(
538
+ f"Encountered {len(self.stats['errors'])} errors during cleanup"
539
+ )
540
+
541
+ return summary
542
+
543
+ def _get_directory_size(self, path: Path) -> float:
544
+ """
545
+ Calculate total size of a directory in MB.
546
+
547
+ Args:
548
+ path: Directory path
549
+
550
+ Returns:
551
+ Size in megabytes
552
+ """
553
+ total_size = 0
554
+ try:
555
+ for item in path.rglob("*"):
556
+ if item.is_file():
557
+ total_size += item.stat().st_size
558
+ except Exception as e:
559
+ logger.debug(f"Error calculating size for {path}: {e}")
560
+
561
+ return total_size / (1024 * 1024) # Convert to MB
562
+
563
+
564
+ def run_cleanup_on_startup(
565
+ base_log_dir: Optional[Path] = None, config: Optional[Dict] = None
566
+ ) -> Optional[Dict]:
567
+ """
568
+ Run automatic cleanup on application startup.
569
+
570
+ This function is designed to be called during application initialization
571
+ to perform routine log maintenance.
572
+
573
+ Args:
574
+ base_log_dir: Base directory for logs
575
+ config: Optional configuration dictionary
576
+
577
+ Returns:
578
+ Cleanup summary or None if disabled
579
+ """
580
+ # Check if cleanup is enabled
581
+ if config and not config.get("auto_cleanup_enabled", True):
582
+ logger.debug("Automatic log cleanup is disabled")
583
+ return None
584
+
585
+ try:
586
+ cleaner = LogCleanupUtility(base_log_dir)
587
+
588
+ # Use configuration or defaults
589
+ session_days = config.get("session_retention_days", 7) if config else 7
590
+ archive_days = config.get("archive_retention_days", 30) if config else 30
591
+ log_days = config.get("log_retention_days", 14) if config else 14
592
+
593
+ # Run cleanup (not dry-run)
594
+ summary = cleaner.perform_full_cleanup(
595
+ session_max_age_days=session_days,
596
+ archive_max_age_days=archive_days,
597
+ log_max_age_days=log_days,
598
+ compress_age_days=None, # Don't compress on startup
599
+ dry_run=False,
600
+ )
601
+
602
+ logger.info(
603
+ f"Startup cleanup completed: "
604
+ f"Removed {summary['total_removed']} items, "
605
+ f"freed {summary['total_space_impact_mb']:.2f} MB"
606
+ )
607
+
608
+ return summary
609
+
610
+ except Exception as e:
611
+ logger.error(f"Error during startup cleanup: {e}")
612
+ return None