gitflow-analytics 1.3.6__py3-none-any.whl → 3.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. gitflow_analytics/_version.py +1 -1
  2. gitflow_analytics/classification/batch_classifier.py +156 -4
  3. gitflow_analytics/cli.py +897 -179
  4. gitflow_analytics/config/loader.py +40 -1
  5. gitflow_analytics/config/schema.py +4 -0
  6. gitflow_analytics/core/cache.py +20 -0
  7. gitflow_analytics/core/data_fetcher.py +1254 -228
  8. gitflow_analytics/core/git_auth.py +169 -0
  9. gitflow_analytics/core/git_timeout_wrapper.py +347 -0
  10. gitflow_analytics/core/metrics_storage.py +12 -3
  11. gitflow_analytics/core/progress.py +219 -18
  12. gitflow_analytics/core/subprocess_git.py +145 -0
  13. gitflow_analytics/extractors/ml_tickets.py +3 -2
  14. gitflow_analytics/extractors/tickets.py +93 -8
  15. gitflow_analytics/integrations/jira_integration.py +1 -1
  16. gitflow_analytics/integrations/orchestrator.py +47 -29
  17. gitflow_analytics/metrics/branch_health.py +3 -2
  18. gitflow_analytics/models/database.py +72 -1
  19. gitflow_analytics/pm_framework/adapters/jira_adapter.py +12 -5
  20. gitflow_analytics/pm_framework/orchestrator.py +8 -3
  21. gitflow_analytics/qualitative/classifiers/llm/openai_client.py +24 -4
  22. gitflow_analytics/qualitative/classifiers/llm_commit_classifier.py +3 -1
  23. gitflow_analytics/qualitative/core/llm_fallback.py +34 -2
  24. gitflow_analytics/reports/narrative_writer.py +118 -74
  25. gitflow_analytics/security/__init__.py +11 -0
  26. gitflow_analytics/security/config.py +189 -0
  27. gitflow_analytics/security/extractors/__init__.py +7 -0
  28. gitflow_analytics/security/extractors/dependency_checker.py +379 -0
  29. gitflow_analytics/security/extractors/secret_detector.py +197 -0
  30. gitflow_analytics/security/extractors/vulnerability_scanner.py +333 -0
  31. gitflow_analytics/security/llm_analyzer.py +347 -0
  32. gitflow_analytics/security/reports/__init__.py +5 -0
  33. gitflow_analytics/security/reports/security_report.py +358 -0
  34. gitflow_analytics/security/security_analyzer.py +414 -0
  35. gitflow_analytics/tui/app.py +3 -1
  36. gitflow_analytics/tui/progress_adapter.py +313 -0
  37. gitflow_analytics/tui/screens/analysis_progress_screen.py +407 -46
  38. gitflow_analytics/tui/screens/results_screen.py +219 -206
  39. gitflow_analytics/ui/__init__.py +21 -0
  40. gitflow_analytics/ui/progress_display.py +1477 -0
  41. gitflow_analytics/verify_activity.py +697 -0
  42. {gitflow_analytics-1.3.6.dist-info → gitflow_analytics-3.3.0.dist-info}/METADATA +2 -1
  43. {gitflow_analytics-1.3.6.dist-info → gitflow_analytics-3.3.0.dist-info}/RECORD +47 -31
  44. gitflow_analytics/cli_rich.py +0 -503
  45. {gitflow_analytics-1.3.6.dist-info → gitflow_analytics-3.3.0.dist-info}/WHEEL +0 -0
  46. {gitflow_analytics-1.3.6.dist-info → gitflow_analytics-3.3.0.dist-info}/entry_points.txt +0 -0
  47. {gitflow_analytics-1.3.6.dist-info → gitflow_analytics-3.3.0.dist-info}/licenses/LICENSE +0 -0
  48. {gitflow_analytics-1.3.6.dist-info → gitflow_analytics-3.3.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,697 @@
1
+ """Project activity verification tool for GitFlow Analytics.
2
+
3
+ This module provides functionality to verify day-by-day activity for projects
4
+ without pulling code, using GitHub API or local git commands to query metadata.
5
+ """
6
+
7
+ import logging
8
+ from collections import defaultdict
9
+ from datetime import datetime, timedelta, timezone
10
+ from pathlib import Path
11
+ from typing import Any, Dict, List, Optional
12
+
13
+ import click
14
+ import git
15
+ from github import Github
16
+ from github.GithubException import GithubException, RateLimitExceededException
17
+ from tabulate import tabulate
18
+ from tqdm import tqdm
19
+
20
+ from .config import Config, ConfigLoader
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+
25
+ class ActivityVerifier:
26
+ """Verify project activity without pulling code."""
27
+
28
+ def __init__(self, config: Config, weeks: int = 4, config_dir: Optional[Path] = None):
29
+ """Initialize the activity verifier.
30
+
31
+ Args:
32
+ config: Configuration object
33
+ weeks: Number of weeks to analyze
34
+ config_dir: Directory containing the config file (for resolving relative paths)
35
+ """
36
+ self.config = config
37
+ self.weeks = weeks
38
+ self.config_dir = config_dir or Path.cwd()
39
+ self.end_date = datetime.now(timezone.utc).replace(
40
+ hour=23, minute=59, second=59, microsecond=999999
41
+ )
42
+ self.start_date = (self.end_date - timedelta(weeks=weeks)).replace(
43
+ hour=0, minute=0, second=0, microsecond=0
44
+ )
45
+
46
+ # Initialize GitHub client if configured
47
+ self.github_client = None
48
+ if config.github and config.github.token:
49
+ self.github_client = Github(config.github.token)
50
+
51
+ def verify_all_projects(self) -> Dict[str, Any]:
52
+ """Verify activity for all configured projects.
53
+
54
+ Returns:
55
+ Dictionary containing activity data for all projects
56
+ """
57
+ results = {
58
+ "period": {
59
+ "start": self.start_date.isoformat(),
60
+ "end": self.end_date.isoformat(),
61
+ "weeks": self.weeks,
62
+ },
63
+ "projects": {},
64
+ "daily_matrix": self._initialize_daily_matrix(),
65
+ "summary": {
66
+ "total_commits": 0,
67
+ "total_branches": 0,
68
+ "active_days": set(),
69
+ "inactive_projects": [],
70
+ },
71
+ }
72
+
73
+ # Get list of repositories to analyze
74
+ repositories = self._get_repositories()
75
+
76
+ if not repositories:
77
+ logger.warning("No repositories found to analyze")
78
+ return results
79
+
80
+ # Analyze each repository
81
+ with tqdm(repositories, desc="Analyzing repositories", unit="repo") as pbar:
82
+ for repo_info in pbar:
83
+ pbar.set_description(f"Analyzing {repo_info['name']}")
84
+
85
+ try:
86
+ project_data = self._verify_project_activity(repo_info)
87
+ results["projects"][repo_info["name"]] = project_data
88
+
89
+ # Update summary statistics
90
+ results["summary"]["total_commits"] += project_data["total_commits"]
91
+ results["summary"]["total_branches"] += len(project_data["branches"])
92
+
93
+ # Update daily matrix
94
+ self._update_daily_matrix(
95
+ results["daily_matrix"], repo_info["name"], project_data["daily_commits"]
96
+ )
97
+
98
+ # Track active days
99
+ for date_str in project_data["daily_commits"]:
100
+ if project_data["daily_commits"][date_str] > 0:
101
+ results["summary"]["active_days"].add(date_str)
102
+
103
+ # Check if project is inactive
104
+ if project_data["total_commits"] == 0:
105
+ results["summary"]["inactive_projects"].append(repo_info["name"])
106
+
107
+ except Exception as e:
108
+ logger.error(f"Error analyzing {repo_info['name']}: {e}")
109
+ results["projects"][repo_info["name"]] = {
110
+ "error": str(e),
111
+ "total_commits": 0,
112
+ "branches": [],
113
+ "daily_commits": {},
114
+ }
115
+
116
+ return results
117
+
118
+ def _get_repositories(self) -> List[Dict[str, Any]]:
119
+ """Get list of repositories to analyze.
120
+
121
+ Returns:
122
+ List of repository information dictionaries
123
+ """
124
+ repositories = []
125
+
126
+ # Add explicitly configured repositories
127
+ if self.config.repositories:
128
+ # Handle both list and dict formats
129
+ if isinstance(self.config.repositories, list):
130
+ for repo_config in self.config.repositories:
131
+ # Handle RepositoryConfig objects
132
+ if hasattr(repo_config, "name"):
133
+ repo_name = repo_config.name
134
+ repo_path = repo_config.path if hasattr(repo_config, "path") else None
135
+ github_repo = (
136
+ repo_config.github_repo if hasattr(repo_config, "github_repo") else None
137
+ )
138
+ # Handle dict format
139
+ else:
140
+ repo_name = repo_config.get("name", "")
141
+ repo_path = repo_config.get("path", None)
142
+ github_repo = repo_config.get("github_repo", None)
143
+
144
+ # Resolve relative paths relative to config directory
145
+ if repo_path:
146
+ path = Path(repo_path)
147
+ if not path.is_absolute():
148
+ path = self.config_dir / path
149
+ path = path.resolve()
150
+ else:
151
+ path = None
152
+
153
+ repo_info = {
154
+ "name": repo_name,
155
+ "path": path,
156
+ "is_local": True,
157
+ "github_name": github_repo,
158
+ }
159
+
160
+ # Check if it's a GitHub repo
161
+ if github_repo:
162
+ repo_info["github_name"] = github_repo
163
+ repo_info["is_local"] = bool(path) # Could be both local and GitHub
164
+ elif self.github_client and "/" in repo_name:
165
+ repo_info["github_name"] = repo_name
166
+ repo_info["is_local"] = bool(path)
167
+
168
+ repositories.append(repo_info)
169
+ elif isinstance(self.config.repositories, dict):
170
+ for repo_key, repo_config in self.config.repositories.items():
171
+ # Handle RepositoryConfig objects
172
+ if hasattr(repo_config, "path"):
173
+ repo_path = repo_config.path
174
+ github_repo = (
175
+ repo_config.github_repo if hasattr(repo_config, "github_repo") else None
176
+ )
177
+ else:
178
+ repo_path = None
179
+ github_repo = None
180
+
181
+ # Resolve relative paths relative to config directory
182
+ if repo_path:
183
+ path = Path(repo_path)
184
+ if not path.is_absolute():
185
+ path = self.config_dir / path
186
+ path = path.resolve()
187
+ else:
188
+ path = None
189
+
190
+ repo_info = {
191
+ "name": repo_key,
192
+ "path": path,
193
+ "is_local": True,
194
+ "github_name": github_repo,
195
+ }
196
+
197
+ # Check if it's a GitHub repo
198
+ if github_repo:
199
+ repo_info["github_name"] = github_repo
200
+ repo_info["is_local"] = bool(path)
201
+ elif self.github_client and "/" in repo_key:
202
+ repo_info["github_name"] = repo_key
203
+ repo_info["is_local"] = bool(path)
204
+
205
+ repositories.append(repo_info)
206
+
207
+ # Add GitHub organization repositories if configured
208
+ if self.config.github and self.config.github.organization and self.github_client:
209
+ try:
210
+ org = self.github_client.get_organization(self.config.github.organization)
211
+ for repo in org.get_repos(type="all"):
212
+ if not repo.archived:
213
+ # Check if not already added
214
+ if not any(r["name"] == repo.full_name for r in repositories):
215
+ repositories.append(
216
+ {
217
+ "name": repo.full_name,
218
+ "path": None,
219
+ "is_local": False,
220
+ "github_name": repo.full_name,
221
+ }
222
+ )
223
+ except GithubException as e:
224
+ logger.error(f"Error fetching organization repos: {e}")
225
+
226
+ return repositories
227
+
228
+ def _verify_project_activity(self, repo_info: Dict[str, Any]) -> Dict[str, Any]:
229
+ """Verify activity for a single project.
230
+
231
+ Args:
232
+ repo_info: Repository information dictionary
233
+
234
+ Returns:
235
+ Dictionary containing project activity data
236
+ """
237
+ if repo_info.get("github_name") and self.github_client:
238
+ return self._verify_github_activity(repo_info["github_name"])
239
+ elif repo_info.get("path"):
240
+ return self._verify_local_activity(repo_info["path"])
241
+ else:
242
+ raise ValueError(f"No valid path or GitHub name for repository {repo_info['name']}")
243
+
244
+ def _verify_github_activity(self, repo_name: str) -> Dict[str, Any]:
245
+ """Verify activity for a GitHub repository using API.
246
+
247
+ Args:
248
+ repo_name: Full repository name (owner/repo)
249
+
250
+ Returns:
251
+ Dictionary containing activity data
252
+ """
253
+ result = {
254
+ "total_commits": 0,
255
+ "branches": [],
256
+ "daily_commits": defaultdict(int),
257
+ "last_activity": None,
258
+ }
259
+
260
+ try:
261
+ repo = self.github_client.get_repo(repo_name)
262
+
263
+ # Get branches with their last activity
264
+ branches_data = []
265
+ for branch in repo.get_branches():
266
+ try:
267
+ commit = branch.commit
268
+ commit_date = commit.commit.author.date.replace(tzinfo=timezone.utc)
269
+ branches_data.append(
270
+ {
271
+ "name": branch.name,
272
+ "last_activity": commit_date.isoformat(),
273
+ "sha": commit.sha[:8],
274
+ }
275
+ )
276
+
277
+ # Update last activity
278
+ if not result["last_activity"] or commit_date > datetime.fromisoformat(
279
+ result["last_activity"]
280
+ ):
281
+ result["last_activity"] = commit_date.isoformat()
282
+ except Exception as e:
283
+ logger.debug(f"Error processing branch {branch.name}: {e}")
284
+
285
+ result["branches"] = sorted(
286
+ branches_data, key=lambda x: x["last_activity"], reverse=True
287
+ )
288
+
289
+ # Get commits in the date range
290
+ # We'll fetch commits from all branches to get complete activity
291
+ seen_shas = set()
292
+
293
+ for branch in repo.get_branches():
294
+ try:
295
+ commits = repo.get_commits(
296
+ sha=branch.name, since=self.start_date, until=self.end_date
297
+ )
298
+
299
+ for commit in commits:
300
+ if commit.sha not in seen_shas:
301
+ seen_shas.add(commit.sha)
302
+ commit_date = commit.commit.author.date.replace(tzinfo=timezone.utc)
303
+
304
+ # Only count commits within our date range
305
+ if self.start_date <= commit_date <= self.end_date:
306
+ date_str = commit_date.strftime("%Y-%m-%d")
307
+ result["daily_commits"][date_str] += 1
308
+ result["total_commits"] += 1
309
+
310
+ except RateLimitExceededException:
311
+ logger.warning(f"Rate limit reached while fetching commits for {repo_name}")
312
+ break
313
+ except Exception as e:
314
+ logger.debug(f"Error fetching commits from branch {branch.name}: {e}")
315
+
316
+ # Ensure all dates are present in daily_commits
317
+ current_date = self.start_date
318
+ while current_date <= self.end_date:
319
+ date_str = current_date.strftime("%Y-%m-%d")
320
+ if date_str not in result["daily_commits"]:
321
+ result["daily_commits"][date_str] = 0
322
+ current_date += timedelta(days=1)
323
+
324
+ except Exception as e:
325
+ logger.error(f"Error verifying GitHub activity for {repo_name}: {e}")
326
+ raise
327
+
328
+ return result
329
+
330
+ def _verify_local_activity(self, repo_path: Path) -> Dict[str, Any]:
331
+ """Verify activity for a local Git repository.
332
+
333
+ Args:
334
+ repo_path: Path to the local repository
335
+
336
+ Returns:
337
+ Dictionary containing activity data
338
+ """
339
+ result = {
340
+ "total_commits": 0,
341
+ "branches": [],
342
+ "daily_commits": defaultdict(int),
343
+ "last_activity": None,
344
+ }
345
+
346
+ try:
347
+ repo = git.Repo(repo_path)
348
+
349
+ # Get all branches (local and remote)
350
+ branches_data = []
351
+
352
+ # Local branches
353
+ for branch in repo.heads:
354
+ try:
355
+ commit = branch.commit
356
+ commit_date = datetime.fromtimestamp(commit.committed_date, tz=timezone.utc)
357
+ branches_data.append(
358
+ {
359
+ "name": branch.name,
360
+ "last_activity": commit_date.isoformat(),
361
+ "sha": commit.hexsha[:8],
362
+ "type": "local",
363
+ }
364
+ )
365
+
366
+ # Update last activity
367
+ if not result["last_activity"] or commit_date > datetime.fromisoformat(
368
+ result["last_activity"]
369
+ ):
370
+ result["last_activity"] = commit_date.isoformat()
371
+ except Exception as e:
372
+ logger.debug(f"Error processing branch {branch.name}: {e}")
373
+
374
+ # Remote branches
375
+ for remote in repo.remotes:
376
+ try:
377
+ remote.fetch(prune=True, dry_run=True) # Update remote refs without pulling
378
+ for ref in remote.refs:
379
+ if not ref.name.endswith("/HEAD"):
380
+ try:
381
+ commit = ref.commit
382
+ commit_date = datetime.fromtimestamp(
383
+ commit.committed_date, tz=timezone.utc
384
+ )
385
+ branches_data.append(
386
+ {
387
+ "name": ref.name,
388
+ "last_activity": commit_date.isoformat(),
389
+ "sha": commit.hexsha[:8],
390
+ "type": "remote",
391
+ }
392
+ )
393
+ except Exception as e:
394
+ logger.debug(f"Error processing remote branch {ref.name}: {e}")
395
+ except Exception as e:
396
+ logger.debug(f"Error fetching remote {remote.name}: {e}")
397
+
398
+ result["branches"] = sorted(
399
+ branches_data, key=lambda x: x["last_activity"], reverse=True
400
+ )
401
+
402
+ # Get commits in the date range from all branches
403
+ seen_shas = set()
404
+
405
+ # Analyze all branches
406
+ all_refs = list(repo.heads) + [
407
+ ref
408
+ for remote in repo.remotes
409
+ for ref in remote.refs
410
+ if not ref.name.endswith("/HEAD")
411
+ ]
412
+
413
+ for ref in all_refs:
414
+ try:
415
+ # Use git log with date filtering
416
+ commits = list(
417
+ repo.iter_commits(
418
+ ref.name,
419
+ since=self.start_date.strftime("%Y-%m-%d"),
420
+ until=self.end_date.strftime("%Y-%m-%d"),
421
+ )
422
+ )
423
+
424
+ for commit in commits:
425
+ if commit.hexsha not in seen_shas:
426
+ seen_shas.add(commit.hexsha)
427
+ commit_date = datetime.fromtimestamp(
428
+ commit.committed_date, tz=timezone.utc
429
+ )
430
+
431
+ # Only count commits within our date range
432
+ if self.start_date <= commit_date <= self.end_date:
433
+ date_str = commit_date.strftime("%Y-%m-%d")
434
+ result["daily_commits"][date_str] += 1
435
+ result["total_commits"] += 1
436
+
437
+ except Exception as e:
438
+ logger.debug(f"Error processing commits from {ref.name}: {e}")
439
+
440
+ # Ensure all dates are present in daily_commits
441
+ current_date = self.start_date
442
+ while current_date <= self.end_date:
443
+ date_str = current_date.strftime("%Y-%m-%d")
444
+ if date_str not in result["daily_commits"]:
445
+ result["daily_commits"][date_str] = 0
446
+ current_date += timedelta(days=1)
447
+
448
+ except Exception as e:
449
+ logger.error(f"Error verifying local activity for {repo_path}: {e}")
450
+ raise
451
+
452
+ return result
453
+
454
+ def _initialize_daily_matrix(self) -> Dict[str, Dict[str, int]]:
455
+ """Initialize the daily activity matrix structure.
456
+
457
+ Returns:
458
+ Dictionary with dates as keys and empty project dictionaries
459
+ """
460
+ matrix = {}
461
+ current_date = self.start_date
462
+
463
+ while current_date <= self.end_date:
464
+ date_str = current_date.strftime("%Y-%m-%d")
465
+ matrix[date_str] = {}
466
+ current_date += timedelta(days=1)
467
+
468
+ return matrix
469
+
470
+ def _update_daily_matrix(
471
+ self, matrix: Dict[str, Dict[str, int]], project_name: str, daily_commits: Dict[str, int]
472
+ ) -> None:
473
+ """Update the daily matrix with project commit data.
474
+
475
+ Args:
476
+ matrix: Daily matrix to update
477
+ project_name: Name of the project
478
+ daily_commits: Daily commit counts for the project
479
+ """
480
+ for date_str, count in daily_commits.items():
481
+ if date_str in matrix:
482
+ matrix[date_str][project_name] = count
483
+
484
+ def format_report(self, results: Dict[str, Any]) -> str:
485
+ """Format the verification results as a readable report.
486
+
487
+ Args:
488
+ results: Verification results dictionary
489
+
490
+ Returns:
491
+ Formatted report string
492
+ """
493
+ lines = []
494
+
495
+ # Header
496
+ lines.append("=" * 80)
497
+ lines.append("Activity Verification Report")
498
+ lines.append("=" * 80)
499
+ lines.append(
500
+ f"Period: {results['period']['start'][:10]} to {results['period']['end'][:10]}"
501
+ )
502
+ lines.append(f"Analysis duration: {results['period']['weeks']} weeks")
503
+ lines.append("")
504
+
505
+ # Summary statistics
506
+ lines.append("Summary Statistics:")
507
+ lines.append("-" * 40)
508
+ lines.append(f"Total commits: {results['summary']['total_commits']}")
509
+ lines.append(f"Total branches: {results['summary']['total_branches']}")
510
+ lines.append(f"Active days: {len(results['summary']['active_days'])}")
511
+ lines.append(f"Inactive projects: {len(results['summary']['inactive_projects'])}")
512
+
513
+ if results["summary"]["inactive_projects"]:
514
+ lines.append(
515
+ f" Projects with no activity: {', '.join(results['summary']['inactive_projects'])}"
516
+ )
517
+ lines.append("")
518
+
519
+ # Daily Activity Matrix
520
+ lines.append("Daily Activity Matrix:")
521
+ lines.append("-" * 40)
522
+
523
+ # Prepare matrix data for tabulation
524
+ matrix_data = []
525
+ dates = sorted(results["daily_matrix"].keys())
526
+ projects = sorted(
527
+ set(
528
+ project
529
+ for date_data in results["daily_matrix"].values()
530
+ for project in date_data.keys()
531
+ )
532
+ )
533
+
534
+ if projects and dates:
535
+ # Create condensed view - show week summaries
536
+ week_data = defaultdict(lambda: defaultdict(int))
537
+ week_starts = []
538
+
539
+ for date_str in dates:
540
+ date = datetime.strptime(date_str, "%Y-%m-%d")
541
+ week_start = date - timedelta(days=date.weekday())
542
+ week_key = week_start.strftime("%m/%d")
543
+
544
+ if week_key not in week_starts:
545
+ week_starts.append(week_key)
546
+
547
+ for project in projects:
548
+ count = results["daily_matrix"][date_str].get(project, 0)
549
+ week_data[project][week_key] += count
550
+
551
+ # Build table rows
552
+ headers = ["Project"] + week_starts + ["Total"]
553
+
554
+ for project in projects:
555
+ row = [project[:20]] # Truncate long project names
556
+ total = 0
557
+ for week in week_starts:
558
+ count = week_data[project].get(week, 0)
559
+ total += count
560
+ # Use symbols for readability
561
+ if count == 0:
562
+ row.append("-")
563
+ elif count < 10:
564
+ row.append(str(count))
565
+ else:
566
+ row.append(f"{count}+")
567
+ row.append(str(total))
568
+ matrix_data.append(row)
569
+
570
+ # Add summary row
571
+ summary_row = ["TOTAL"]
572
+ grand_total = 0
573
+ for week in week_starts:
574
+ week_total = sum(week_data[p].get(week, 0) for p in projects)
575
+ grand_total += week_total
576
+ if week_total == 0:
577
+ summary_row.append("-")
578
+ else:
579
+ summary_row.append(str(week_total))
580
+ summary_row.append(str(grand_total))
581
+ matrix_data.append(summary_row)
582
+
583
+ lines.append(tabulate(matrix_data, headers=headers, tablefmt="grid"))
584
+ else:
585
+ lines.append("No activity data available")
586
+
587
+ lines.append("")
588
+
589
+ # Branch Summary for each project
590
+ lines.append("Branch Summary by Project:")
591
+ lines.append("-" * 40)
592
+
593
+ for project_name, project_data in sorted(results["projects"].items()):
594
+ lines.append(f"\n{project_name}:")
595
+
596
+ if "error" in project_data:
597
+ lines.append(f" ERROR: {project_data['error']}")
598
+ continue
599
+
600
+ lines.append(f" Total commits: {project_data['total_commits']}")
601
+
602
+ if project_data.get("last_activity"):
603
+ lines.append(f" Last activity: {project_data['last_activity'][:10]}")
604
+
605
+ if project_data.get("branches"):
606
+ lines.append(f" Branches ({len(project_data['branches'])}):")
607
+ # Show top 5 most recently active branches
608
+ for branch in project_data["branches"][:5]:
609
+ branch_type = f" [{branch.get('type', 'unknown')}]" if "type" in branch else ""
610
+ lines.append(
611
+ f" - {branch['name']}{branch_type}: "
612
+ f"last activity {branch['last_activity'][:10]} "
613
+ f"({branch['sha']})"
614
+ )
615
+ if len(project_data["branches"]) > 5:
616
+ lines.append(f" ... and {len(project_data['branches']) - 5} more branches")
617
+ else:
618
+ lines.append(" No branches found")
619
+
620
+ lines.append("")
621
+
622
+ # Days with zero activity
623
+ lines.append("Days with Zero Activity:")
624
+ lines.append("-" * 40)
625
+
626
+ zero_activity_days = []
627
+ for date_str in sorted(results["daily_matrix"].keys()):
628
+ total_commits = (
629
+ sum(results["daily_matrix"][date_str].get(p, 0) for p in projects)
630
+ if projects
631
+ else 0
632
+ )
633
+
634
+ if total_commits == 0:
635
+ date = datetime.strptime(date_str, "%Y-%m-%d")
636
+ zero_activity_days.append(date.strftime("%a %Y-%m-%d"))
637
+
638
+ if zero_activity_days:
639
+ # Group consecutive days
640
+ lines.append(f"Found {len(zero_activity_days)} days with no activity:")
641
+ for i in range(0, len(zero_activity_days), 7):
642
+ lines.append(f" {', '.join(zero_activity_days[i:i+7])}")
643
+ else:
644
+ lines.append("No days with zero activity found!")
645
+
646
+ lines.append("")
647
+ lines.append("=" * 80)
648
+
649
+ return "\n".join(lines)
650
+
651
+
652
+ def verify_activity_command(
653
+ config_path: Path, weeks: int, output_path: Optional[Path] = None
654
+ ) -> None:
655
+ """Run the activity verification command.
656
+
657
+ Args:
658
+ config_path: Path to configuration file
659
+ weeks: Number of weeks to analyze
660
+ output_path: Optional path to save the report
661
+ """
662
+ # Load configuration
663
+ click.echo(f"Loading configuration from {config_path}...")
664
+ config = ConfigLoader.load(config_path)
665
+
666
+ # Store config directory for resolving relative paths
667
+ config_dir = config_path.parent
668
+
669
+ # Create verifier
670
+ verifier = ActivityVerifier(config, weeks, config_dir)
671
+
672
+ # Run verification
673
+ click.echo(f"Verifying activity for the last {weeks} weeks...")
674
+ results = verifier.verify_all_projects()
675
+
676
+ # Format and display report
677
+ report = verifier.format_report(results)
678
+
679
+ # Output to console
680
+ click.echo(report)
681
+
682
+ # Save to file if requested
683
+ if output_path:
684
+ output_path.write_text(report)
685
+ click.echo(f"\nReport saved to: {output_path}")
686
+
687
+ # Highlight any issues found
688
+ if results["summary"]["inactive_projects"]:
689
+ click.echo("\n⚠️ WARNING: Found projects with no activity!")
690
+ for project in results["summary"]["inactive_projects"]:
691
+ click.echo(f" - {project}")
692
+
693
+ zero_days = len(
694
+ [d for d in results["daily_matrix"].keys() if sum(results["daily_matrix"][d].values()) == 0]
695
+ )
696
+ if zero_days > 0:
697
+ click.echo(f"\n⚠️ WARNING: Found {zero_days} days with zero activity across all projects!")