gitflow-analytics 1.3.11__py3-none-any.whl → 3.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. gitflow_analytics/_version.py +1 -1
  2. gitflow_analytics/classification/batch_classifier.py +156 -4
  3. gitflow_analytics/cli.py +803 -135
  4. gitflow_analytics/config/loader.py +39 -1
  5. gitflow_analytics/config/schema.py +1 -0
  6. gitflow_analytics/core/cache.py +20 -0
  7. gitflow_analytics/core/data_fetcher.py +1051 -117
  8. gitflow_analytics/core/git_auth.py +169 -0
  9. gitflow_analytics/core/git_timeout_wrapper.py +347 -0
  10. gitflow_analytics/core/metrics_storage.py +12 -3
  11. gitflow_analytics/core/progress.py +219 -18
  12. gitflow_analytics/core/subprocess_git.py +145 -0
  13. gitflow_analytics/extractors/ml_tickets.py +3 -2
  14. gitflow_analytics/extractors/tickets.py +93 -8
  15. gitflow_analytics/integrations/jira_integration.py +1 -1
  16. gitflow_analytics/integrations/orchestrator.py +47 -29
  17. gitflow_analytics/metrics/branch_health.py +3 -2
  18. gitflow_analytics/models/database.py +72 -1
  19. gitflow_analytics/pm_framework/adapters/jira_adapter.py +12 -5
  20. gitflow_analytics/pm_framework/orchestrator.py +8 -3
  21. gitflow_analytics/qualitative/classifiers/llm/openai_client.py +24 -4
  22. gitflow_analytics/qualitative/classifiers/llm_commit_classifier.py +3 -1
  23. gitflow_analytics/qualitative/core/llm_fallback.py +34 -2
  24. gitflow_analytics/reports/narrative_writer.py +118 -74
  25. gitflow_analytics/security/__init__.py +11 -0
  26. gitflow_analytics/security/config.py +189 -0
  27. gitflow_analytics/security/extractors/__init__.py +7 -0
  28. gitflow_analytics/security/extractors/dependency_checker.py +379 -0
  29. gitflow_analytics/security/extractors/secret_detector.py +197 -0
  30. gitflow_analytics/security/extractors/vulnerability_scanner.py +333 -0
  31. gitflow_analytics/security/llm_analyzer.py +347 -0
  32. gitflow_analytics/security/reports/__init__.py +5 -0
  33. gitflow_analytics/security/reports/security_report.py +358 -0
  34. gitflow_analytics/security/security_analyzer.py +414 -0
  35. gitflow_analytics/tui/app.py +3 -1
  36. gitflow_analytics/tui/progress_adapter.py +313 -0
  37. gitflow_analytics/tui/screens/analysis_progress_screen.py +407 -46
  38. gitflow_analytics/tui/screens/results_screen.py +219 -206
  39. gitflow_analytics/ui/__init__.py +21 -0
  40. gitflow_analytics/ui/progress_display.py +1477 -0
  41. gitflow_analytics/verify_activity.py +697 -0
  42. {gitflow_analytics-1.3.11.dist-info → gitflow_analytics-3.3.0.dist-info}/METADATA +2 -1
  43. {gitflow_analytics-1.3.11.dist-info → gitflow_analytics-3.3.0.dist-info}/RECORD +47 -31
  44. gitflow_analytics/cli_rich.py +0 -503
  45. {gitflow_analytics-1.3.11.dist-info → gitflow_analytics-3.3.0.dist-info}/WHEEL +0 -0
  46. {gitflow_analytics-1.3.11.dist-info → gitflow_analytics-3.3.0.dist-info}/entry_points.txt +0 -0
  47. {gitflow_analytics-1.3.11.dist-info → gitflow_analytics-3.3.0.dist-info}/licenses/LICENSE +0 -0
  48. {gitflow_analytics-1.3.11.dist-info → gitflow_analytics-3.3.0.dist-info}/top_level.txt +0 -0
@@ -2,22 +2,24 @@
2
2
 
3
3
  import asyncio
4
4
  import time
5
+ from concurrent.futures import ThreadPoolExecutor
5
6
  from datetime import datetime, timedelta, timezone
6
7
  from pathlib import Path
7
8
  from typing import Any, Optional
8
9
 
9
- from rich.pretty import Pretty
10
10
  from textual.binding import Binding
11
11
  from textual.containers import Container, Vertical
12
12
  from textual.screen import Screen
13
- from textual.widgets import Footer, Header, Label, Log
13
+ from textual.widgets import Footer, Header, Label, Log, Static
14
14
 
15
15
  from gitflow_analytics.config import Config
16
+ from gitflow_analytics.core import progress as core_progress
16
17
  from gitflow_analytics.core.analyzer import GitAnalyzer
17
18
  from gitflow_analytics.core.cache import GitAnalysisCache
18
19
  from gitflow_analytics.core.identity import DeveloperIdentityResolver
19
20
  from gitflow_analytics.integrations.orchestrator import IntegrationOrchestrator
20
21
 
22
+ from ..progress_adapter import TUIProgressService
21
23
  from ..widgets.progress_widget import AnalysisProgressWidget
22
24
 
23
25
 
@@ -56,6 +58,8 @@ class AnalysisProgressScreen(Screen):
56
58
  self.analysis_task: Optional[asyncio.Task] = None
57
59
  self.analysis_results = {}
58
60
  self.start_time = time.time()
61
+ self.progress_service = None # Will be initialized on mount
62
+ self.executor: Optional[ThreadPoolExecutor] = None # Managed executor for cleanup
59
63
 
60
64
  def compose(self):
61
65
  """Compose the analysis progress screen."""
@@ -82,7 +86,7 @@ class AnalysisProgressScreen(Screen):
82
86
  # Live statistics panel
83
87
  with Container(classes="stats-panel"):
84
88
  yield Label("Live Statistics", classes="panel-title")
85
- yield Pretty({}, id="live-stats")
89
+ yield Static("No statistics yet...", id="live-stats")
86
90
 
87
91
  # Analysis log
88
92
  with Container(classes="log-panel"):
@@ -93,7 +97,37 @@ class AnalysisProgressScreen(Screen):
93
97
 
94
98
  def on_mount(self) -> None:
95
99
  """Start analysis when screen mounts."""
96
- self.analysis_task = asyncio.create_task(self._run_analysis())
100
+ # Initialize progress service for TUI
101
+ self.progress_service = TUIProgressService(asyncio.get_event_loop())
102
+ self.analysis_task = asyncio.create_task(self._run_analysis_wrapper())
103
+
104
+ def on_unmount(self) -> None:
105
+ """Cleanup when screen unmounts."""
106
+ # Cancel the analysis task if it's still running
107
+ if self.analysis_task and not self.analysis_task.done():
108
+ self.analysis_task.cancel()
109
+ # Don't wait for cancellation to complete to avoid blocking
110
+
111
+ # Shutdown the executor to cleanup threads immediately
112
+ if self.executor:
113
+ self.executor.shutdown(wait=False)
114
+ self.executor = None
115
+
116
+ async def _run_analysis_wrapper(self) -> None:
117
+ """Wrapper for analysis that handles cancellation gracefully."""
118
+ try:
119
+ await self._run_analysis()
120
+ except asyncio.CancelledError:
121
+ # Silently handle cancellation - this is expected during shutdown
122
+ pass
123
+ except Exception as e:
124
+ # Log unexpected errors if the app is still running
125
+ if self.app and self.app.is_running:
126
+ try:
127
+ log = self.query_one("#analysis-log", Log)
128
+ log.write_line(f"❌ Unexpected error: {e}")
129
+ except Exception:
130
+ pass
97
131
 
98
132
  async def _run_analysis(self) -> None:
99
133
  """
@@ -168,12 +202,26 @@ class AnalysisProgressScreen(Screen):
168
202
  )
169
203
 
170
204
  except asyncio.CancelledError:
171
- log.write_line("❌ Analysis cancelled by user")
172
- overall_progress.update_progress(0, "Cancelled")
205
+ # Check if the app is still running before updating UI
206
+ if self.app and self.app.is_running:
207
+ try:
208
+ log.write_line("❌ Analysis cancelled by user")
209
+ overall_progress.update_progress(0, "Cancelled")
210
+ except Exception:
211
+ # Silently ignore if we can't update the UI
212
+ pass
213
+ # Re-raise for the wrapper to handle
214
+ raise
173
215
  except Exception as e:
174
- log.write_line(f"❌ Analysis failed: {e}")
175
- overall_progress.update_progress(0, f"Error: {str(e)[:50]}...")
176
- self.notify(f"Analysis failed: {e}", severity="error")
216
+ # Check if the app is still running before updating UI
217
+ if self.app and self.app.is_running:
218
+ try:
219
+ log.write_line(f"❌ Analysis failed: {e}")
220
+ overall_progress.update_progress(0, f"Error: {str(e)[:50]}...")
221
+ self.notify(f"Analysis failed: {e}", severity="error")
222
+ except Exception:
223
+ # Silently ignore if we can't update the UI
224
+ pass
177
225
 
178
226
  async def _initialize_components(self, log: Log) -> None:
179
227
  """Initialize analysis components."""
@@ -191,12 +239,20 @@ class AnalysisProgressScreen(Screen):
191
239
  )
192
240
 
193
241
  log.write_line("🔍 Initializing analyzer...")
242
+
243
+ # Enable branch analysis with progress logging for TUI
244
+ branch_analysis_config = {
245
+ "enable_progress_logging": True,
246
+ "strategy": "all",
247
+ }
248
+
194
249
  self.analyzer = GitAnalyzer(
195
250
  self.cache,
196
251
  branch_mapping_rules=self.config.analysis.branch_mapping_rules,
197
252
  allowed_ticket_platforms=getattr(self.config.analysis, "ticket_platforms", None),
198
253
  exclude_paths=self.config.analysis.exclude_paths,
199
254
  story_point_patterns=self.config.analysis.story_point_patterns,
255
+ branch_analysis_config=branch_analysis_config,
200
256
  )
201
257
 
202
258
  log.write_line("🔗 Initializing integrations...")
@@ -242,7 +298,10 @@ class AnalysisProgressScreen(Screen):
242
298
 
243
299
  async def _analyze_repositories(self, repositories: list, log: Log) -> tuple:
244
300
  """Analyze all repositories and return commits and PRs."""
301
+ # Import progress module at the top of the function
302
+
245
303
  repo_progress = self.query_one("#repo-progress", AnalysisProgressWidget)
304
+ overall_progress = self.query_one("#overall-progress", AnalysisProgressWidget)
246
305
 
247
306
  all_commits = []
248
307
  all_prs = []
@@ -251,53 +310,177 @@ class AnalysisProgressScreen(Screen):
251
310
  end_date = datetime.now(timezone.utc)
252
311
  start_date = end_date - timedelta(weeks=self.weeks)
253
312
 
254
- for i, repo_config in enumerate(repositories):
255
- progress = (i / len(repositories)) * 100
256
- repo_progress.update_progress(progress, f"Analyzing {repo_config.name}...")
313
+ # Create progress adapter for repository analysis
314
+ repo_adapter = self.progress_service.create_adapter("repo", repo_progress)
257
315
 
258
- log.write_line(f"📁 Analyzing {repo_config.name}...")
316
+ # Set initial stats for the adapter
317
+ repo_adapter.processing_stats["total"] = len(repositories)
259
318
 
260
- try:
261
- # Clone repository if needed
262
- if not repo_config.path.exists() and repo_config.github_repo:
263
- log.write_line(f" 📥 Cloning {repo_config.github_repo}...")
264
- await self._clone_repository(repo_config, log)
265
-
266
- # Analyze commits
267
- commits = self.analyzer.analyze_repository(
268
- repo_config.path, start_date, repo_config.branch
269
- )
319
+ # Temporarily replace the global progress service with our adapter
320
+ original_progress_service = core_progress._progress_service
321
+ core_progress._progress_service = repo_adapter
270
322
 
271
- # Add project key and resolve identities
272
- for commit in commits:
273
- commit["project_key"] = repo_config.project_key or commit.get(
274
- "inferred_project", "UNKNOWN"
275
- )
276
- commit["canonical_id"] = self.identity_resolver.resolve_developer(
277
- commit["author_name"], commit["author_email"]
278
- )
323
+ total_repos = len(repositories)
324
+
325
+ # Clone repositories that don't exist locally first
326
+ for repo_config in repositories:
327
+ if not repo_config.path.exists() and repo_config.github_repo:
328
+ log.write_line(f" 📥 Cloning {repo_config.github_repo}...")
329
+ await self._clone_repository(repo_config, log)
279
330
 
280
- all_commits.extend(commits)
281
- log.write_line(f" ✅ Found {len(commits)} commits")
331
+ # Check if we should use async processing (for multiple repositories)
332
+ # We use async processing for 2+ repositories to keep the UI responsive
333
+ use_async = len(repositories) > 1
282
334
 
283
- # Update live stats
284
- await self._update_live_stats(
335
+ if use_async:
336
+ log.write_line(f"🚀 Starting async analysis of {len(repositories)} repositories...")
337
+
338
+ # Import data fetcher for parallel processing
339
+ from gitflow_analytics.core.data_fetcher import GitDataFetcher
340
+ from gitflow_analytics.tui.progress_adapter import TUIProgressAdapter
341
+
342
+ # Create and set up progress adapter for parallel processing
343
+ tui_progress_adapter = TUIProgressAdapter(repo_progress)
344
+ tui_progress_adapter.set_event_loop(asyncio.get_event_loop())
345
+
346
+ # Replace the global progress service so parallel processing can use it
347
+ # We'll restore the original one after processing
348
+ core_progress._progress_service = tui_progress_adapter
349
+
350
+ # Create data fetcher
351
+ # Use skip_remote_fetch=True when analyzing already-cloned repositories
352
+ # to avoid authentication issues with expired tokens
353
+ data_fetcher = GitDataFetcher(cache=self.cache, skip_remote_fetch=True)
354
+
355
+ # Prepare repository configurations for parallel processing
356
+ repo_configs = []
357
+ for repo_config in repositories:
358
+ repo_configs.append(
285
359
  {
286
- "repositories_analyzed": i + 1,
287
- "total_repositories": len(repositories),
288
- "total_commits": len(all_commits),
289
- "current_repo": repo_config.name,
360
+ "path": str(repo_config.path),
361
+ "project_key": repo_config.project_key or repo_config.name,
362
+ "branch_patterns": [repo_config.branch] if repo_config.branch else None,
290
363
  }
291
364
  )
292
365
 
293
- # Small delay to allow UI updates
294
- await asyncio.sleep(0.1)
366
+ # Run parallel processing in executor to avoid blocking
367
+ loop = asyncio.get_event_loop()
368
+
369
+ # Update overall progress
370
+ overall_progress.update_progress(25, "Running parallel repository analysis...")
371
+
372
+ try:
373
+ # Process repositories asynchronously with yielding for UI updates
374
+ parallel_results = await self._process_repositories_async(
375
+ data_fetcher,
376
+ repo_configs,
377
+ start_date,
378
+ end_date,
379
+ repo_progress,
380
+ overall_progress,
381
+ log,
382
+ )
383
+
384
+ # Process results
385
+ for project_key, result in parallel_results["results"].items():
386
+ if result and "commits" in result:
387
+ commits_data = result["commits"]
388
+ # Add project key and resolve identities
389
+ for commit in commits_data:
390
+ commit["project_key"] = project_key
391
+ commit["canonical_id"] = self.identity_resolver.resolve_developer(
392
+ commit["author_name"], commit["author_email"]
393
+ )
394
+ all_commits.extend(commits_data)
395
+ log.write_line(f" ✅ {project_key}: {len(commits_data)} commits")
396
+
397
+ # Log final statistics
398
+ stats = parallel_results.get("statistics", {})
399
+ log.write_line("\n📊 Analysis Statistics:")
400
+ log.write_line(f" Total: {stats.get('total', 0)} repositories")
401
+ log.write_line(f" Success: {stats.get('success', 0)} (have commits)")
402
+ log.write_line(
403
+ f" No Commits: {stats.get('no_commits', 0)} (no activity in period)"
404
+ )
405
+ log.write_line(f" Failed: {stats.get('failed', 0)} (processing errors)")
406
+ log.write_line(f" Timeout: {stats.get('timeout', 0)}")
295
407
 
296
408
  except Exception as e:
297
- log.write_line(f" ❌ Error analyzing {repo_config.name}: {e}")
298
- continue
409
+ log.write_line(f" ❌ Async processing failed: {e}")
410
+ log.write_line(" Falling back to sequential processing...")
411
+ use_async = False
412
+ finally:
413
+ # Restore original progress service
414
+ core_progress._progress_service = original_progress_service
415
+
416
+ # Sequential processing fallback or for single repository
417
+ if not use_async:
418
+ # Ensure we have an executor for sequential processing
419
+ if not self.executor:
420
+ self.executor = ThreadPoolExecutor(max_workers=1)
421
+
422
+ for i, repo_config in enumerate(repositories):
423
+ # Update overall progress based on repository completion
424
+ overall_pct = 20 + ((i / total_repos) * 30) # 20-50% range for repo analysis
425
+ overall_progress.update_progress(
426
+ overall_pct, f"Analyzing repositories ({i+1}/{total_repos})..."
427
+ )
428
+
429
+ repo_progress.update_progress(0, f"Analyzing {repo_config.name}...")
430
+
431
+ log.write_line(f"📁 Analyzing {repo_config.name}...")
432
+
433
+ try:
434
+ log.write_line(f" ⏳ Starting analysis of {repo_config.name}...")
435
+
436
+ # Run repository analysis in a thread to avoid blocking
437
+ loop = asyncio.get_event_loop()
438
+ commits = await loop.run_in_executor(
439
+ (
440
+ self.executor if self.executor else None
441
+ ), # Use managed executor if available
442
+ self.analyzer.analyze_repository,
443
+ repo_config.path,
444
+ start_date,
445
+ repo_config.branch,
446
+ )
447
+
448
+ log.write_line(f" ✓ Analysis complete for {repo_config.name}")
449
+
450
+ # Add project key and resolve identities
451
+ for commit in commits:
452
+ commit["project_key"] = repo_config.project_key or commit.get(
453
+ "inferred_project", "UNKNOWN"
454
+ )
455
+ commit["canonical_id"] = self.identity_resolver.resolve_developer(
456
+ commit["author_name"], commit["author_email"]
457
+ )
458
+
459
+ all_commits.extend(commits)
460
+ log.write_line(f" ✅ Found {len(commits)} commits")
461
+
462
+ # Update live stats
463
+ await self._update_live_stats(
464
+ {
465
+ "repositories_analyzed": i + 1,
466
+ "total_repositories": len(repositories),
467
+ "total_commits": len(all_commits),
468
+ "current_repo": repo_config.name,
469
+ }
470
+ )
471
+
472
+ # Small delay to allow UI updates
473
+ await asyncio.sleep(0.05) # Reduced delay for more responsive updates
474
+
475
+ except Exception as e:
476
+ log.write_line(f" ❌ Error analyzing {repo_config.name}: {e}")
477
+ continue
478
+
479
+ # Restore original progress service
480
+ core_progress._progress_service = original_progress_service
299
481
 
300
482
  repo_progress.complete(f"Completed {len(repositories)} repositories")
483
+ overall_progress.update_progress(50, f"Analyzed {len(all_commits)} commits")
301
484
  return all_commits, all_prs
302
485
 
303
486
  async def _enrich_with_integrations(self, repositories: list, commits: list, log: Log) -> None:
@@ -441,6 +624,166 @@ class AnalysisProgressScreen(Screen):
441
624
  log.write_line(f" ❌ Qualitative analysis failed: {e}")
442
625
  qual_progress.update_progress(0, f"Error: {str(e)[:30]}...")
443
626
 
627
+ async def _process_repositories_async(
628
+ self,
629
+ data_fetcher,
630
+ repo_configs: list,
631
+ start_date: datetime,
632
+ end_date: datetime,
633
+ repo_progress: AnalysisProgressWidget,
634
+ overall_progress: AnalysisProgressWidget,
635
+ log: Log,
636
+ ) -> dict:
637
+ """
638
+ Process repositories asynchronously with proper yielding for UI updates.
639
+
640
+ This method processes repositories one at a time but yields control back
641
+ to the event loop between each repository to allow UI updates.
642
+ """
643
+ results = {
644
+ "results": {},
645
+ "statistics": {
646
+ "total": len(repo_configs),
647
+ "processed": 0,
648
+ "success": 0,
649
+ "no_commits": 0,
650
+ "failed": 0,
651
+ "timeout": 0,
652
+ },
653
+ }
654
+
655
+ stats = results["statistics"]
656
+ loop = asyncio.get_event_loop()
657
+
658
+ # Create a managed executor for this analysis
659
+ if not self.executor:
660
+ self.executor = ThreadPoolExecutor(max_workers=1)
661
+
662
+ for i, repo_config in enumerate(repo_configs):
663
+ project_key = repo_config["project_key"]
664
+
665
+ # Update progress before processing
666
+ percentage = (i / stats["total"]) * 100
667
+ repo_progress.update_progress(
668
+ percentage, f"Processing {project_key} ({i+1}/{stats['total']})..."
669
+ )
670
+
671
+ # Update overall progress
672
+ overall_percentage = 25 + ((i / stats["total"]) * 25) # 25-50% range
673
+ overall_progress.update_progress(
674
+ overall_percentage, f"Analyzing repository {i+1}/{stats['total']}: {project_key}"
675
+ )
676
+
677
+ log.write_line(f"🔍 Processing {project_key} ({i+1}/{stats['total']})...")
678
+
679
+ try:
680
+ # Run the actual repository processing in a thread to avoid blocking
681
+ # but await it properly so we can yield between repositories
682
+ result = await loop.run_in_executor(
683
+ self.executor, # Use managed executor instead of default
684
+ self._process_single_repository_sync,
685
+ data_fetcher,
686
+ repo_config,
687
+ self.weeks,
688
+ start_date,
689
+ end_date,
690
+ )
691
+
692
+ # Check for commits - data fetcher returns 'daily_commits' not 'commits'
693
+ if result:
694
+ # Extract commits from daily_commits structure
695
+ daily_commits = result.get("daily_commits", {})
696
+ total_commits = result.get("stats", {}).get("total_commits", 0)
697
+
698
+ # Convert daily_commits to flat commits list
699
+ commits = []
700
+ for date_str, day_commits in daily_commits.items():
701
+ commits.extend(day_commits)
702
+
703
+ # Add flattened commits to result for compatibility
704
+ result["commits"] = commits
705
+
706
+ if total_commits > 0 or commits:
707
+ results["results"][project_key] = result
708
+ stats["success"] += 1
709
+ log.write_line(f" ✅ {project_key}: {total_commits} commits")
710
+ else:
711
+ stats["no_commits"] += 1
712
+ log.write_line(f" ⏸️ {project_key}: No commits in analysis period")
713
+ else:
714
+ stats["failed"] += 1
715
+ log.write_line(f" ❌ {project_key}: Failed to process")
716
+
717
+ except Exception as e:
718
+ stats["failed"] += 1
719
+ log.write_line(f" ❌ {project_key}: Error - {str(e)[:50]}...")
720
+
721
+ stats["processed"] += 1
722
+
723
+ # Update progress after processing
724
+ percentage = ((i + 1) / stats["total"]) * 100
725
+ repo_progress.update_progress(
726
+ percentage, f"Completed {project_key} ({i+1}/{stats['total']})"
727
+ )
728
+
729
+ # Yield control to event loop for UI updates
730
+ # This is the key to keeping the UI responsive
731
+ await asyncio.sleep(0.01)
732
+
733
+ # Also update live stats
734
+ await self._update_live_stats(
735
+ {
736
+ "repositories_analyzed": stats["processed"],
737
+ "total_repositories": stats["total"],
738
+ "successful": stats["success"],
739
+ "no_commits": stats["no_commits"],
740
+ "failed": stats["failed"],
741
+ "current_repo": project_key if i < len(repo_configs) - 1 else "Complete",
742
+ }
743
+ )
744
+
745
+ # Final progress update
746
+ repo_progress.complete(f"Processed {stats['total']} repositories")
747
+
748
+ # Cleanup executor after processing
749
+ if self.executor:
750
+ self.executor.shutdown(wait=False)
751
+ self.executor = None
752
+
753
+ return results
754
+
755
+ def _process_single_repository_sync(
756
+ self,
757
+ data_fetcher,
758
+ repo_config: dict,
759
+ weeks_back: int,
760
+ start_date: datetime,
761
+ end_date: datetime,
762
+ ) -> Optional[dict]:
763
+ """
764
+ Synchronous wrapper for processing a single repository.
765
+
766
+ This runs in a thread executor to avoid blocking the event loop.
767
+ """
768
+ try:
769
+ # Process the repository using data fetcher
770
+ result = data_fetcher.fetch_repository_data(
771
+ repo_path=Path(repo_config["path"]),
772
+ project_key=repo_config["project_key"],
773
+ weeks_back=weeks_back,
774
+ branch_patterns=repo_config.get("branch_patterns"),
775
+ jira_integration=None,
776
+ progress_callback=None,
777
+ start_date=start_date,
778
+ end_date=end_date,
779
+ )
780
+ return result
781
+ except Exception as e:
782
+ import logging
783
+
784
+ logging.getLogger(__name__).error(f"Error processing {repo_config['project_key']}: {e}")
785
+ return None
786
+
444
787
  async def _clone_repository(self, repo_config, log: Log) -> None:
445
788
  """Clone repository if needed."""
446
789
  try:
@@ -477,14 +820,32 @@ class AnalysisProgressScreen(Screen):
477
820
 
478
821
  async def _update_live_stats(self, stats: dict[str, Any]) -> None:
479
822
  """Update live statistics display."""
480
- stats_widget = self.query_one("#live-stats", Pretty)
481
- stats_widget.update(stats)
823
+ try:
824
+ stats_widget = self.query_one("#live-stats", Static)
825
+
826
+ # Format stats for display
827
+ stats_text = "\n".join(
828
+ [f"• {key.replace('_', ' ').title()}: {value}" for key, value in stats.items()]
829
+ )
830
+ stats_widget.update(stats_text)
831
+ except Exception:
832
+ # Silently ignore if widget doesn't exist (e.g., in testing)
833
+ pass
482
834
 
483
835
  def action_cancel(self) -> None:
484
836
  """Cancel the analysis."""
485
837
  if self.analysis_task and not self.analysis_task.done():
486
838
  self.analysis_task.cancel()
487
- self.app.pop_screen()
839
+ # Give the task a moment to cancel cleanly
840
+ asyncio.create_task(self._delayed_pop_screen())
841
+ else:
842
+ self.app.pop_screen()
843
+
844
+ async def _delayed_pop_screen(self) -> None:
845
+ """Pop screen after a brief delay to allow cancellation to complete."""
846
+ await asyncio.sleep(0.1)
847
+ if self.app and self.app.is_running:
848
+ self.app.pop_screen()
488
849
 
489
850
  def action_back(self) -> None:
490
851
  """Go back to main screen."""