gitflow-analytics 1.0.1__py3-none-any.whl → 1.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. gitflow_analytics/__init__.py +11 -11
  2. gitflow_analytics/_version.py +2 -2
  3. gitflow_analytics/cli.py +612 -258
  4. gitflow_analytics/cli_rich.py +353 -0
  5. gitflow_analytics/config.py +251 -141
  6. gitflow_analytics/core/analyzer.py +140 -103
  7. gitflow_analytics/core/branch_mapper.py +132 -132
  8. gitflow_analytics/core/cache.py +240 -169
  9. gitflow_analytics/core/identity.py +210 -173
  10. gitflow_analytics/extractors/base.py +13 -11
  11. gitflow_analytics/extractors/story_points.py +70 -59
  12. gitflow_analytics/extractors/tickets.py +101 -87
  13. gitflow_analytics/integrations/github_integration.py +84 -77
  14. gitflow_analytics/integrations/jira_integration.py +116 -104
  15. gitflow_analytics/integrations/orchestrator.py +86 -85
  16. gitflow_analytics/metrics/dora.py +181 -177
  17. gitflow_analytics/models/database.py +190 -53
  18. gitflow_analytics/qualitative/__init__.py +30 -0
  19. gitflow_analytics/qualitative/classifiers/__init__.py +13 -0
  20. gitflow_analytics/qualitative/classifiers/change_type.py +468 -0
  21. gitflow_analytics/qualitative/classifiers/domain_classifier.py +399 -0
  22. gitflow_analytics/qualitative/classifiers/intent_analyzer.py +436 -0
  23. gitflow_analytics/qualitative/classifiers/risk_analyzer.py +412 -0
  24. gitflow_analytics/qualitative/core/__init__.py +13 -0
  25. gitflow_analytics/qualitative/core/llm_fallback.py +653 -0
  26. gitflow_analytics/qualitative/core/nlp_engine.py +373 -0
  27. gitflow_analytics/qualitative/core/pattern_cache.py +457 -0
  28. gitflow_analytics/qualitative/core/processor.py +540 -0
  29. gitflow_analytics/qualitative/models/__init__.py +25 -0
  30. gitflow_analytics/qualitative/models/schemas.py +272 -0
  31. gitflow_analytics/qualitative/utils/__init__.py +13 -0
  32. gitflow_analytics/qualitative/utils/batch_processor.py +326 -0
  33. gitflow_analytics/qualitative/utils/cost_tracker.py +343 -0
  34. gitflow_analytics/qualitative/utils/metrics.py +347 -0
  35. gitflow_analytics/qualitative/utils/text_processing.py +243 -0
  36. gitflow_analytics/reports/analytics_writer.py +11 -4
  37. gitflow_analytics/reports/csv_writer.py +51 -31
  38. gitflow_analytics/reports/narrative_writer.py +16 -14
  39. gitflow_analytics/tui/__init__.py +5 -0
  40. gitflow_analytics/tui/app.py +721 -0
  41. gitflow_analytics/tui/screens/__init__.py +8 -0
  42. gitflow_analytics/tui/screens/analysis_progress_screen.py +487 -0
  43. gitflow_analytics/tui/screens/configuration_screen.py +547 -0
  44. gitflow_analytics/tui/screens/loading_screen.py +358 -0
  45. gitflow_analytics/tui/screens/main_screen.py +304 -0
  46. gitflow_analytics/tui/screens/results_screen.py +698 -0
  47. gitflow_analytics/tui/widgets/__init__.py +7 -0
  48. gitflow_analytics/tui/widgets/data_table.py +257 -0
  49. gitflow_analytics/tui/widgets/export_modal.py +301 -0
  50. gitflow_analytics/tui/widgets/progress_widget.py +192 -0
  51. {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/METADATA +31 -4
  52. gitflow_analytics-1.0.3.dist-info/RECORD +62 -0
  53. gitflow_analytics-1.0.1.dist-info/RECORD +0 -31
  54. {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/WHEEL +0 -0
  55. {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/entry_points.txt +0 -0
  56. {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/licenses/LICENSE +0 -0
  57. {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/top_level.txt +0 -0
gitflow_analytics/cli.py CHANGED
@@ -1,13 +1,16 @@
1
1
  """Command-line interface for GitFlow Analytics."""
2
+
2
3
  import sys
3
- from datetime import datetime, timedelta
4
+ from datetime import datetime, timedelta, timezone
4
5
  from pathlib import Path
5
- from typing import Optional
6
+ from typing import Any, Optional, cast
6
7
 
7
8
  import click
8
9
  import git
9
10
  import pandas as pd
10
11
 
12
+ from ._version import __version__
13
+ from .cli_rich import create_rich_display
11
14
  from .config import ConfigLoader
12
15
  from .core.analyzer import GitAnalyzer
13
16
  from .core.cache import GitAnalysisCache
@@ -21,299 +24,606 @@ from .reports.narrative_writer import NarrativeReportGenerator
21
24
 
22
25
 
23
26
  @click.group()
24
- @click.version_option(version='0.1.0', prog_name='GitFlow Analytics')
25
- def cli():
27
+ @click.version_option(version=__version__, prog_name="GitFlow Analytics")
28
+ def cli() -> None:
26
29
  """GitFlow Analytics - Analyze Git repositories for productivity insights."""
27
30
  pass
28
31
 
29
32
 
33
+ # TUI command removed - replaced with rich CLI output
34
+ # Legacy TUI code preserved but not exposed
35
+
36
+
30
37
  @cli.command()
31
- @click.option('--config', '-c',
32
- type=click.Path(exists=True, path_type=Path),
33
- required=True,
34
- help='Path to YAML configuration file')
35
- @click.option('--weeks', '-w',
36
- type=int,
37
- default=12,
38
- help='Number of weeks to analyze (default: 12)')
39
- @click.option('--output', '-o',
40
- type=click.Path(path_type=Path),
41
- default=None,
42
- help='Output directory for reports (overrides config file)')
43
- @click.option('--anonymize',
44
- is_flag=True,
45
- help='Anonymize developer information in reports')
46
- @click.option('--no-cache',
47
- is_flag=True,
48
- help='Disable caching (slower but always fresh)')
49
- @click.option('--validate-only',
50
- is_flag=True,
51
- help='Validate configuration without running analysis')
52
- @click.option('--clear-cache',
53
- is_flag=True,
54
- help='Clear cache before running analysis')
55
- def analyze(config: Path, weeks: int, output: Optional[Path], anonymize: bool,
56
- no_cache: bool, validate_only: bool, clear_cache: bool):
38
+ @click.option(
39
+ "--config",
40
+ "-c",
41
+ type=click.Path(exists=True, path_type=Path),
42
+ required=True,
43
+ help="Path to YAML configuration file",
44
+ )
45
+ @click.option(
46
+ "--weeks", "-w", type=int, default=12, help="Number of weeks to analyze (default: 12)"
47
+ )
48
+ @click.option(
49
+ "--output",
50
+ "-o",
51
+ type=click.Path(path_type=Path),
52
+ default=None,
53
+ help="Output directory for reports (overrides config file)",
54
+ )
55
+ @click.option("--anonymize", is_flag=True, help="Anonymize developer information in reports")
56
+ @click.option("--no-cache", is_flag=True, help="Disable caching (slower but always fresh)")
57
+ @click.option(
58
+ "--validate-only", is_flag=True, help="Validate configuration without running analysis"
59
+ )
60
+ @click.option("--clear-cache", is_flag=True, help="Clear cache before running analysis")
61
+ @click.option("--enable-qualitative", is_flag=True, help="Enable qualitative analysis (requires additional dependencies)")
62
+ @click.option("--qualitative-only", is_flag=True, help="Run only qualitative analysis on existing commits")
63
+ @click.option("--rich", is_flag=True, default=True, help="Use rich terminal output (default: enabled)")
64
+ def analyze(
65
+ config: Path,
66
+ weeks: int,
67
+ output: Optional[Path],
68
+ anonymize: bool,
69
+ no_cache: bool,
70
+ validate_only: bool,
71
+ clear_cache: bool,
72
+ enable_qualitative: bool,
73
+ qualitative_only: bool,
74
+ rich: bool,
75
+ ) -> None:
57
76
  """Analyze Git repositories using configuration file."""
77
+
78
+ # Initialize display - use rich by default, fall back to simple output if needed
79
+ display = create_rich_display() if rich else None
58
80
 
59
81
  try:
82
+ if display:
83
+ display.show_header()
84
+
60
85
  # Load configuration
61
- click.echo(f"📋 Loading configuration from {config}...")
86
+ if display:
87
+ display.print_status(f"Loading configuration from {config}...", "info")
88
+ else:
89
+ click.echo(f"📋 Loading configuration from {config}...")
90
+
62
91
  cfg = ConfigLoader.load(config)
63
-
92
+
64
93
  # Validate configuration
65
94
  warnings = ConfigLoader.validate_config(cfg)
66
95
  if warnings:
67
- click.echo("⚠️ Configuration warnings:")
68
- for warning in warnings:
69
- click.echo(f" - {warning}")
70
-
96
+ warning_msg = "Configuration warnings:\n" + "\n".join(f" {w}" for w in warnings)
97
+ if display:
98
+ display.show_warning(warning_msg)
99
+ else:
100
+ click.echo("⚠️ Configuration warnings:")
101
+ for warning in warnings:
102
+ click.echo(f" - {warning}")
103
+
71
104
  if validate_only:
72
105
  if not warnings:
73
- click.echo("✅ Configuration is valid!")
106
+ if display:
107
+ display.print_status("Configuration is valid!", "success")
108
+ else:
109
+ click.echo("✅ Configuration is valid!")
74
110
  else:
75
- click.echo("❌ Configuration has issues that should be addressed.")
111
+ if display:
112
+ display.print_status("Configuration has issues that should be addressed.", "error")
113
+ else:
114
+ click.echo("❌ Configuration has issues that should be addressed.")
76
115
  return
77
-
116
+
78
117
  # Use output directory from CLI or config
79
118
  if output is None:
80
- output = cfg.output.directory if cfg.output.directory else Path('./reports')
81
-
119
+ output = cfg.output.directory if cfg.output.directory else Path("./reports")
120
+
82
121
  # Setup output directory
83
122
  output.mkdir(parents=True, exist_ok=True)
84
123
 
124
+ # Show configuration status in rich display
125
+ if display:
126
+ github_org = cfg.github.organization if cfg.github else None
127
+ github_token_valid = bool(cfg.github and cfg.github.token)
128
+ jira_configured = bool(cfg.jira and cfg.jira.base_url)
129
+ jira_valid = jira_configured # Simplified validation
130
+
131
+ display.show_configuration_status(
132
+ config,
133
+ github_org=github_org,
134
+ github_token_valid=github_token_valid,
135
+ jira_configured=jira_configured,
136
+ jira_valid=jira_valid,
137
+ analysis_weeks=weeks
138
+ )
139
+
85
140
  # Initialize components
86
141
  cache_dir = cfg.cache.directory
87
142
  if clear_cache:
88
- click.echo("🗑️ Clearing cache...")
143
+ if display:
144
+ display.print_status("Clearing cache...", "info")
145
+ else:
146
+ click.echo("🗑️ Clearing cache...")
89
147
  import shutil
148
+
90
149
  if cache_dir.exists():
91
150
  shutil.rmtree(cache_dir)
92
-
93
- cache = GitAnalysisCache(
94
- cache_dir,
95
- ttl_hours=0 if no_cache else cfg.cache.ttl_hours
96
- )
97
-
151
+
152
+ cache = GitAnalysisCache(cache_dir, ttl_hours=0 if no_cache else cfg.cache.ttl_hours)
153
+
98
154
  identity_resolver = DeveloperIdentityResolver(
99
- cache_dir / 'identities.db',
155
+ cache_dir / "identities.db",
100
156
  similarity_threshold=cfg.analysis.similarity_threshold,
101
- manual_mappings=cfg.analysis.manual_identity_mappings
157
+ manual_mappings=cfg.analysis.manual_identity_mappings,
102
158
  )
103
-
159
+
104
160
  analyzer = GitAnalyzer(
105
- cache,
161
+ cache,
106
162
  branch_mapping_rules=cfg.analysis.branch_mapping_rules,
107
- allowed_ticket_platforms=getattr(cfg.analysis, 'ticket_platforms', None),
108
- exclude_paths=cfg.analysis.exclude_paths
163
+ allowed_ticket_platforms=getattr(cfg.analysis, "ticket_platforms", None),
164
+ exclude_paths=cfg.analysis.exclude_paths,
109
165
  )
110
166
  orchestrator = IntegrationOrchestrator(cfg, cache)
111
-
167
+
112
168
  # Discovery organization repositories if needed
113
169
  repositories_to_analyze = cfg.repositories
114
170
  if cfg.github.organization and not repositories_to_analyze:
115
- click.echo(f"🔍 Discovering repositories from organization: {cfg.github.organization}")
171
+ if display:
172
+ display.print_status(f"Discovering repositories from organization: {cfg.github.organization}", "info")
173
+ else:
174
+ click.echo(f"🔍 Discovering repositories from organization: {cfg.github.organization}")
116
175
  try:
117
176
  # Use a 'repos' directory in the config directory for cloned repositories
118
177
  config_dir = Path(config).parent if config else Path.cwd()
119
178
  repos_dir = config_dir / "repos"
120
179
  discovered_repos = cfg.discover_organization_repositories(clone_base_path=repos_dir)
121
180
  repositories_to_analyze = discovered_repos
122
- click.echo(f" ✅ Found {len(discovered_repos)} repositories in organization")
123
- for repo in discovered_repos:
124
- click.echo(f" - {repo.name} ({repo.github_repo})")
181
+
182
+ if display:
183
+ display.print_status(f"Found {len(discovered_repos)} repositories in organization", "success")
184
+ # Show repository discovery in structured format
185
+ repo_data = [{
186
+ "name": repo.name,
187
+ "github_repo": repo.github_repo,
188
+ "exists": repo.path.exists()
189
+ } for repo in discovered_repos]
190
+ display.show_repository_discovery(repo_data)
191
+ else:
192
+ click.echo(f" ✅ Found {len(discovered_repos)} repositories in organization")
193
+ for repo in discovered_repos:
194
+ click.echo(f" - {repo.name} ({repo.github_repo})")
125
195
  except Exception as e:
126
- click.echo(f" ❌ Failed to discover repositories: {e}")
196
+ if display:
197
+ display.show_error(f"Failed to discover repositories: {e}")
198
+ else:
199
+ click.echo(f" ❌ Failed to discover repositories: {e}")
127
200
  return
128
-
129
- # Analysis period
130
- end_date = datetime.now()
201
+
202
+ # Analysis period (timezone-aware to match commit timestamps)
203
+ end_date = datetime.now(timezone.utc)
131
204
  start_date = end_date - timedelta(weeks=weeks)
132
-
133
- click.echo(f"\n🚀 Analyzing {len(repositories_to_analyze)} repositories...")
134
- click.echo(f" Period: {start_date.strftime('%Y-%m-%d')} to {end_date.strftime('%Y-%m-%d')}")
135
-
205
+
206
+ if display:
207
+ display.print_status(f"Analyzing {len(repositories_to_analyze)} repositories...", "info")
208
+ display.print_status(f"Period: {start_date.strftime('%Y-%m-%d')} to {end_date.strftime('%Y-%m-%d')}", "info")
209
+ # Start live progress display
210
+ display.start_live_display()
211
+ display.add_progress_task("repos", "Processing repositories", len(repositories_to_analyze))
212
+ else:
213
+ click.echo(f"\n🚀 Analyzing {len(repositories_to_analyze)} repositories...")
214
+ click.echo(
215
+ f" Period: {start_date.strftime('%Y-%m-%d')} to {end_date.strftime('%Y-%m-%d')}"
216
+ )
217
+
136
218
  # Analyze repositories
137
219
  all_commits = []
138
220
  all_prs = []
139
221
  all_enrichments = {}
140
-
222
+
141
223
  for repo_config in repositories_to_analyze:
142
- click.echo(f"\n📁 Analyzing {repo_config.name}...")
143
-
224
+ if display:
225
+ display.update_progress_task("repos", description=f"Analyzing {repo_config.name}...")
226
+ else:
227
+ click.echo(f"\n📁 Analyzing {repo_config.name}...")
228
+
144
229
  # Check if repo exists, clone if needed
145
230
  if not repo_config.path.exists():
146
231
  # Try to clone if we have a github_repo configured
147
232
  if repo_config.github_repo and cfg.github.organization:
148
- click.echo(" 📥 Cloning repository from GitHub...")
233
+ if display:
234
+ display.print_status("Cloning repository from GitHub...", "info")
235
+ else:
236
+ click.echo(" 📥 Cloning repository from GitHub...")
149
237
  try:
150
238
  # Ensure parent directory exists
151
239
  repo_config.path.parent.mkdir(parents=True, exist_ok=True)
152
-
240
+
153
241
  # Clone the repository
154
242
  clone_url = f"https://github.com/{repo_config.github_repo}.git"
155
243
  if cfg.github.token:
156
244
  # Use token for authentication
157
245
  clone_url = f"https://{cfg.github.token}@github.com/{repo_config.github_repo}.git"
158
-
246
+
159
247
  git.Repo.clone_from(clone_url, repo_config.path, branch=repo_config.branch)
160
- click.echo(f" ✅ Successfully cloned {repo_config.github_repo}")
248
+ if display:
249
+ display.print_status(f"Successfully cloned {repo_config.github_repo}", "success")
250
+ else:
251
+ click.echo(f" ✅ Successfully cloned {repo_config.github_repo}")
161
252
  except Exception as e:
162
- click.echo(f" ❌ Failed to clone repository: {e}")
253
+ if display:
254
+ display.print_status(f"Failed to clone repository: {e}", "error")
255
+ else:
256
+ click.echo(f" ❌ Failed to clone repository: {e}")
163
257
  continue
164
258
  else:
165
- click.echo(f" ❌ Repository path not found: {repo_config.path}")
259
+ if display:
260
+ display.print_status(f"Repository path not found: {repo_config.path}", "error")
261
+ else:
262
+ click.echo(f" ❌ Repository path not found: {repo_config.path}")
166
263
  continue
167
-
264
+
168
265
  # Analyze repository
169
266
  try:
170
267
  commits = analyzer.analyze_repository(
171
- repo_config.path,
172
- start_date,
173
- repo_config.branch
268
+ repo_config.path, start_date, repo_config.branch
174
269
  )
175
-
270
+
176
271
  # Add project key and resolve developer identities
177
272
  for commit in commits:
178
273
  # Use configured project key or fall back to inferred project
179
- if repo_config.project_key and repo_config.project_key != 'UNKNOWN':
180
- commit['project_key'] = repo_config.project_key
274
+ if repo_config.project_key and repo_config.project_key != "UNKNOWN":
275
+ commit["project_key"] = repo_config.project_key
181
276
  else:
182
- commit['project_key'] = commit.get('inferred_project', 'UNKNOWN')
183
-
184
- commit['canonical_id'] = identity_resolver.resolve_developer(
185
- commit['author_name'],
186
- commit['author_email']
277
+ commit["project_key"] = commit.get("inferred_project", "UNKNOWN")
278
+
279
+ commit["canonical_id"] = identity_resolver.resolve_developer(
280
+ commit["author_name"], commit["author_email"]
187
281
  )
188
-
282
+
189
283
  all_commits.extend(commits)
190
- click.echo(f" ✅ Found {len(commits)} commits")
191
-
284
+ if display:
285
+ display.print_status(f"Found {len(commits)} commits", "success")
286
+ else:
287
+ click.echo(f" ✅ Found {len(commits)} commits")
288
+
192
289
  # Enrich with integration data
193
- enrichment = orchestrator.enrich_repository_data(
194
- repo_config, commits, start_date
195
- )
290
+ enrichment = orchestrator.enrich_repository_data(repo_config, commits, start_date)
196
291
  all_enrichments[repo_config.name] = enrichment
197
-
198
- if enrichment['prs']:
199
- all_prs.extend(enrichment['prs'])
200
- click.echo(f" ✅ Found {len(enrichment['prs'])} pull requests")
201
-
292
+
293
+ if enrichment["prs"]:
294
+ all_prs.extend(enrichment["prs"])
295
+ if display:
296
+ display.print_status(f"Found {len(enrichment['prs'])} pull requests", "success")
297
+ else:
298
+ click.echo(f" ✅ Found {len(enrichment['prs'])} pull requests")
299
+
202
300
  except Exception as e:
203
- click.echo(f" ❌ Error: {e}")
301
+ if display:
302
+ display.print_status(f"Error: {e}", "error")
303
+ else:
304
+ click.echo(f" ❌ Error: {e}")
204
305
  continue
306
+ finally:
307
+ if display:
308
+ display.update_progress_task("repos", advance=1)
309
+
310
+ # Stop repository progress and clean up display
311
+ if display:
312
+ display.complete_progress_task("repos", "Repository analysis complete")
313
+ display.stop_live_display()
205
314
 
206
315
  if not all_commits:
207
- click.echo("\n❌ No commits found in the specified period!")
316
+ if display:
317
+ display.show_error("No commits found in the specified period!")
318
+ else:
319
+ click.echo("\n❌ No commits found in the specified period!")
208
320
  return
209
-
321
+
210
322
  # Update developer statistics
211
- click.echo("\n👥 Resolving developer identities...")
323
+ if display:
324
+ display.print_status("Resolving developer identities...", "info")
325
+ else:
326
+ click.echo("\n👥 Resolving developer identities...")
327
+
212
328
  identity_resolver.update_commit_stats(all_commits)
213
329
  developer_stats = identity_resolver.get_developer_stats()
214
- click.echo(f" ✅ Identified {len(developer_stats)} unique developers")
215
330
 
331
+ if display:
332
+ display.print_status(f"Identified {len(developer_stats)} unique developers", "success")
333
+ else:
334
+ click.echo(f" ✅ Identified {len(developer_stats)} unique developers")
335
+
216
336
  # Analyze tickets
217
- click.echo("\n🎫 Analyzing ticket references...")
218
- ticket_extractor = TicketExtractor(allowed_platforms=getattr(cfg.analysis, 'ticket_platforms', None))
337
+ if display:
338
+ display.print_status("Analyzing ticket references...", "info")
339
+ else:
340
+ click.echo("\n🎫 Analyzing ticket references...")
341
+
342
+ ticket_extractor = TicketExtractor(
343
+ allowed_platforms=getattr(cfg.analysis, "ticket_platforms", None)
344
+ )
219
345
  ticket_analysis = ticket_extractor.analyze_ticket_coverage(all_commits, all_prs)
346
+
347
+ for platform, count in ticket_analysis["ticket_summary"].items():
348
+ if display:
349
+ display.print_status(f"{platform.title()}: {count} unique tickets", "success")
350
+ else:
351
+ click.echo(f" - {platform.title()}: {count} unique tickets")
352
+
353
+ # Perform qualitative analysis if enabled
354
+ qualitative_results = []
355
+ if (enable_qualitative or qualitative_only) and cfg.qualitative and cfg.qualitative.enabled:
356
+ if display:
357
+ display.print_status("Performing qualitative analysis...", "info")
358
+ else:
359
+ click.echo("\n🧠 Performing qualitative analysis...")
360
+
361
+ try:
362
+ from .qualitative import QualitativeProcessor
363
+ from .models.database import Database
364
+
365
+ # Initialize qualitative analysis components
366
+ qual_db = Database(cfg.cache.directory / "qualitative.db")
367
+ qual_processor = QualitativeProcessor(cfg.qualitative, qual_db)
368
+
369
+ # Validate setup
370
+ is_valid, issues = qual_processor.validate_setup()
371
+ if not is_valid:
372
+ issue_msg = "Qualitative analysis setup issues:\n" + "\n".join(f"• {issue}" for issue in issues)
373
+ if issues:
374
+ issue_msg += "\n\n💡 Install dependencies: pip install spacy scikit-learn openai tiktoken"
375
+ issue_msg += "\n💡 Download spaCy model: python -m spacy download en_core_web_sm"
376
+
377
+ if display:
378
+ display.show_warning(issue_msg)
379
+ else:
380
+ click.echo(" ⚠️ Qualitative analysis setup issues:")
381
+ for issue in issues:
382
+ click.echo(f" - {issue}")
383
+ if issues:
384
+ click.echo(" 💡 Install dependencies: pip install spacy scikit-learn openai tiktoken")
385
+ click.echo(" 💡 Download spaCy model: python -m spacy download en_core_web_sm")
386
+
387
+ # Convert commits to qualitative format
388
+ commits_for_qual = []
389
+ for commit in all_commits:
390
+ commit_dict = {
391
+ 'hash': commit.hash,
392
+ 'message': commit.message,
393
+ 'author_name': commit.author_name,
394
+ 'author_email': commit.author_email,
395
+ 'timestamp': commit.timestamp,
396
+ 'files_changed': commit.files_changed or [],
397
+ 'insertions': commit.insertions,
398
+ 'deletions': commit.deletions,
399
+ 'branch': getattr(commit, 'branch', 'main')
400
+ }
401
+ commits_for_qual.append(commit_dict)
402
+
403
+ # Perform qualitative analysis with progress tracking
404
+ if display:
405
+ display.start_live_display()
406
+ display.add_progress_task("qualitative", "Analyzing commits with qualitative insights", len(commits_for_qual))
407
+
408
+ qualitative_results = qual_processor.process_commits(commits_for_qual, show_progress=True)
409
+
410
+ if display:
411
+ display.complete_progress_task("qualitative", "Qualitative analysis complete")
412
+ display.stop_live_display()
413
+ display.print_status(f"Analyzed {len(qualitative_results)} commits with qualitative insights", "success")
414
+ else:
415
+ click.echo(f" ✅ Analyzed {len(qualitative_results)} commits with qualitative insights")
416
+
417
+ # Get processing statistics and show them
418
+ qual_stats = qual_processor.get_processing_statistics()
419
+ if display:
420
+ display.show_qualitative_stats(qual_stats)
421
+ else:
422
+ processing_summary = qual_stats['processing_summary']
423
+ click.echo(f" 📈 Processing: {processing_summary['commits_per_second']:.1f} commits/sec")
424
+ click.echo(f" 🎯 Methods: {processing_summary['method_breakdown']['cache']:.1f}% cached, "
425
+ f"{processing_summary['method_breakdown']['nlp']:.1f}% NLP, "
426
+ f"{processing_summary['method_breakdown']['llm']:.1f}% LLM")
427
+
428
+ if qual_stats['llm_statistics']['model_usage'] == 'available':
429
+ llm_stats = qual_stats['llm_statistics']['cost_tracking']
430
+ if llm_stats['total_cost'] > 0:
431
+ click.echo(f" 💰 LLM Cost: ${llm_stats['total_cost']:.4f}")
432
+
433
+ except ImportError as e:
434
+ error_msg = f"Qualitative analysis dependencies missing: {e}\n\n💡 Install with: pip install spacy scikit-learn openai tiktoken"
435
+ if display:
436
+ display.show_error(error_msg)
437
+ else:
438
+ click.echo(f" ❌ Qualitative analysis dependencies missing: {e}")
439
+ click.echo(" 💡 Install with: pip install spacy scikit-learn openai tiktoken")
440
+
441
+ if not qualitative_only:
442
+ if display:
443
+ display.print_status("Continuing with standard analysis...", "info")
444
+ else:
445
+ click.echo(" ⏭️ Continuing with standard analysis...")
446
+ else:
447
+ if display:
448
+ display.show_error("Cannot perform qualitative-only analysis without dependencies")
449
+ else:
450
+ click.echo(" ❌ Cannot perform qualitative-only analysis without dependencies")
451
+ return
452
+ except Exception as e:
453
+ error_msg = f"Qualitative analysis failed: {e}"
454
+ if display:
455
+ display.show_error(error_msg)
456
+ else:
457
+ click.echo(f" ❌ Qualitative analysis failed: {e}")
458
+
459
+ if qualitative_only:
460
+ if display:
461
+ display.show_error("Cannot continue with qualitative-only analysis")
462
+ else:
463
+ click.echo(" ❌ Cannot continue with qualitative-only analysis")
464
+ return
465
+ else:
466
+ if display:
467
+ display.print_status("Continuing with standard analysis...", "info")
468
+ else:
469
+ click.echo(" ⏭️ Continuing with standard analysis...")
470
+ elif enable_qualitative and not cfg.qualitative:
471
+ warning_msg = "Qualitative analysis requested but not configured in config file\n\nAdd a 'qualitative:' section to your configuration"
472
+ if display:
473
+ display.show_warning(warning_msg)
474
+ else:
475
+ click.echo("\n⚠️ Qualitative analysis requested but not configured in config file")
476
+ click.echo(" Add a 'qualitative:' section to your configuration")
220
477
 
221
- for platform, count in ticket_analysis['ticket_summary'].items():
222
- click.echo(f" - {platform.title()}: {count} unique tickets")
223
-
478
+ # Skip standard analysis if qualitative-only mode
479
+ if qualitative_only:
480
+ if display:
481
+ display.print_status("Qualitative-only analysis completed!", "success")
482
+ else:
483
+ click.echo("\n✅ Qualitative-only analysis completed!")
484
+ return
485
+
224
486
  # Generate reports
225
- click.echo("\n📊 Generating reports...")
487
+ if display:
488
+ display.print_status("Generating reports...", "info")
489
+ else:
490
+ click.echo("\n📊 Generating reports...")
226
491
  report_gen = CSVReportGenerator(anonymize=anonymize or cfg.output.anonymize_enabled)
227
- analytics_gen = AnalyticsReportGenerator(anonymize=anonymize or cfg.output.anonymize_enabled)
228
-
229
- # Weekly metrics report
230
- weekly_report = output / f'weekly_metrics_{datetime.now().strftime("%Y%m%d")}.csv'
231
- report_gen.generate_weekly_report(
232
- all_commits,
233
- developer_stats,
234
- weekly_report,
235
- weeks
492
+ analytics_gen = AnalyticsReportGenerator(
493
+ anonymize=anonymize or cfg.output.anonymize_enabled
236
494
  )
237
- click.echo(f" ✅ Weekly metrics: {weekly_report}")
495
+
496
+ # Collect generated report files for display
497
+ generated_reports = []
238
498
 
499
+ # Weekly metrics report
500
+ weekly_report = output / f'weekly_metrics_{datetime.now(timezone.utc).strftime("%Y%m%d")}.csv'
501
+ try:
502
+ report_gen.generate_weekly_report(all_commits, developer_stats, weekly_report, weeks)
503
+ generated_reports.append(weekly_report.name)
504
+ if not display:
505
+ click.echo(f" ✅ Weekly metrics: {weekly_report}")
506
+ except Exception as e:
507
+ click.echo(f" ❌ Error generating weekly metrics report: {e}")
508
+ click.echo(f" 🔍 Error type: {type(e).__name__}")
509
+ click.echo(f" 📍 Error details: {str(e)}")
510
+ import traceback
511
+ traceback.print_exc()
512
+ raise
513
+
239
514
  # Summary report
240
515
  summary_report = output / f'summary_{datetime.now().strftime("%Y%m%d")}.csv'
241
- report_gen.generate_summary_report(
242
- all_commits,
243
- all_prs,
244
- developer_stats,
245
- ticket_analysis,
246
- summary_report
247
- )
248
- click.echo(f" ✅ Summary stats: {summary_report}")
249
-
516
+ try:
517
+ report_gen.generate_summary_report(
518
+ all_commits, all_prs, developer_stats, ticket_analysis, summary_report
519
+ )
520
+ generated_reports.append(summary_report.name)
521
+ if not display:
522
+ click.echo(f" ✅ Summary stats: {summary_report}")
523
+ except Exception as e:
524
+ click.echo(f" ❌ Error generating summary report: {e}")
525
+ click.echo(f" 🔍 Error type: {type(e).__name__}")
526
+ click.echo(f" 📍 Error details: {str(e)}")
527
+ import traceback
528
+ traceback.print_exc()
529
+ raise
530
+
250
531
  # Developer report
251
532
  developer_report = output / f'developers_{datetime.now().strftime("%Y%m%d")}.csv'
252
- report_gen.generate_developer_report(
253
- developer_stats,
254
- developer_report
255
- )
256
- click.echo(f" ✅ Developer stats: {developer_report}")
257
-
533
+ try:
534
+ report_gen.generate_developer_report(developer_stats, developer_report)
535
+ generated_reports.append(developer_report.name)
536
+ if not display:
537
+ click.echo(f" ✅ Developer stats: {developer_report}")
538
+ except Exception as e:
539
+ click.echo(f" ❌ Error generating developer report: {e}")
540
+ click.echo(f" 🔍 Error type: {type(e).__name__}")
541
+ click.echo(f" 📍 Error details: {str(e)}")
542
+ import traceback
543
+ traceback.print_exc()
544
+ raise
545
+
258
546
  # Activity distribution report
259
547
  activity_report = output / f'activity_distribution_{datetime.now().strftime("%Y%m%d")}.csv'
260
- analytics_gen.generate_activity_distribution_report(
261
- all_commits,
262
- developer_stats,
263
- activity_report
264
- )
265
- click.echo(f" ✅ Activity distribution: {activity_report}")
266
-
548
+ try:
549
+ analytics_gen.generate_activity_distribution_report(
550
+ all_commits, developer_stats, activity_report
551
+ )
552
+ generated_reports.append(activity_report.name)
553
+ if not display:
554
+ click.echo(f" ✅ Activity distribution: {activity_report}")
555
+ except Exception as e:
556
+ click.echo(f" ❌ Error generating activity distribution report: {e}")
557
+ click.echo(f" 🔍 Error type: {type(e).__name__}")
558
+ click.echo(f" 📍 Error details: {str(e)}")
559
+ import traceback
560
+ traceback.print_exc()
561
+ raise
562
+
267
563
  # Developer focus report
268
564
  focus_report = output / f'developer_focus_{datetime.now().strftime("%Y%m%d")}.csv'
269
- analytics_gen.generate_developer_focus_report(
270
- all_commits,
271
- developer_stats,
272
- focus_report,
273
- weeks
274
- )
275
- click.echo(f" ✅ Developer focus: {focus_report}")
276
-
565
+ try:
566
+ analytics_gen.generate_developer_focus_report(
567
+ all_commits, developer_stats, focus_report, weeks
568
+ )
569
+ generated_reports.append(focus_report.name)
570
+ if not display:
571
+ click.echo(f" ✅ Developer focus: {focus_report}")
572
+ except Exception as e:
573
+ click.echo(f" ❌ Error generating developer focus report: {e}")
574
+ click.echo(f" 🔍 Error type: {type(e).__name__}")
575
+ click.echo(f" 📍 Error details: {str(e)}")
576
+ import traceback
577
+ traceback.print_exc()
578
+ raise
579
+
277
580
  # Qualitative insights report
278
581
  insights_report = output / f'qualitative_insights_{datetime.now().strftime("%Y%m%d")}.csv'
279
- analytics_gen.generate_qualitative_insights_report(
280
- all_commits,
281
- developer_stats,
282
- ticket_analysis,
283
- insights_report
284
- )
285
- click.echo(f" ✅ Qualitative insights: {insights_report}")
286
-
582
+ try:
583
+ analytics_gen.generate_qualitative_insights_report(
584
+ all_commits, developer_stats, ticket_analysis, insights_report
585
+ )
586
+ generated_reports.append(insights_report.name)
587
+ if not display:
588
+ click.echo(f" ✅ Qualitative insights: {insights_report}")
589
+ except Exception as e:
590
+ click.echo(f" ❌ Error generating qualitative insights report: {e}")
591
+ click.echo(f" 🔍 Error type: {type(e).__name__}")
592
+ click.echo(f" 📍 Error details: {str(e)}")
593
+ import traceback
594
+ traceback.print_exc()
595
+ raise
596
+
287
597
  # Calculate DORA metrics
288
598
  dora_calculator = DORAMetricsCalculator()
289
599
  dora_metrics = dora_calculator.calculate_dora_metrics(
290
600
  all_commits, all_prs, start_date, end_date
291
601
  )
292
-
602
+
293
603
  # Aggregate PR metrics
294
604
  pr_metrics = {}
295
605
  for enrichment in all_enrichments.values():
296
- if enrichment.get('pr_metrics'):
606
+ if enrichment.get("pr_metrics"):
297
607
  # Combine metrics (simplified - in production would properly aggregate)
298
- pr_metrics = enrichment['pr_metrics']
608
+ pr_metrics = enrichment["pr_metrics"]
299
609
  break
300
-
610
+
301
611
  # Generate narrative report if markdown format is enabled
302
- if 'markdown' in cfg.output.formats:
612
+ if "markdown" in cfg.output.formats:
303
613
  narrative_gen = NarrativeReportGenerator()
304
-
614
+
305
615
  # Load activity distribution data
306
616
  activity_df = pd.read_csv(activity_report)
307
- activity_data = activity_df.to_dict('records')
308
-
617
+ activity_data = cast(list[dict[str, Any]], activity_df.to_dict("records"))
618
+
309
619
  # Load focus data
310
620
  focus_df = pd.read_csv(focus_report)
311
- focus_data = focus_df.to_dict('records')
312
-
621
+ focus_data = cast(list[dict[str, Any]], focus_df.to_dict("records"))
622
+
313
623
  # Load insights data
314
624
  insights_df = pd.read_csv(insights_report)
315
- insights_data = insights_df.to_dict('records')
316
-
625
+ insights_data = cast(list[dict[str, Any]], insights_df.to_dict("records"))
626
+
317
627
  narrative_report = output / f'narrative_report_{datetime.now().strftime("%Y%m%d")}.md'
318
628
  narrative_gen.generate_narrative_report(
319
629
  all_commits,
@@ -325,190 +635,234 @@ def analyze(config: Path, weeks: int, output: Optional[Path], anonymize: bool,
325
635
  ticket_analysis,
326
636
  pr_metrics,
327
637
  narrative_report,
328
- weeks
638
+ weeks,
329
639
  )
330
- click.echo(f" ✅ Narrative report: {narrative_report}")
331
-
640
+ generated_reports.append(narrative_report.name)
641
+ if not display:
642
+ click.echo(f" ✅ Narrative report: {narrative_report}")
643
+
332
644
  # Generate JSON export if enabled
333
- if 'json' in cfg.output.formats:
645
+ if "json" in cfg.output.formats:
334
646
  json_report = output / f'gitflow_export_{datetime.now().strftime("%Y%m%d")}.json'
335
-
647
+
336
648
  project_metrics = {
337
- 'ticket_analysis': ticket_analysis,
338
- 'pr_metrics': pr_metrics,
339
- 'enrichments': all_enrichments
649
+ "ticket_analysis": ticket_analysis,
650
+ "pr_metrics": pr_metrics,
651
+ "enrichments": all_enrichments,
340
652
  }
341
-
653
+
342
654
  orchestrator.export_to_json(
343
655
  all_commits,
344
656
  all_prs,
345
657
  developer_stats,
346
658
  project_metrics,
347
659
  dora_metrics,
348
- str(json_report)
660
+ str(json_report),
349
661
  )
350
- click.echo(f" ✅ JSON export: {json_report}")
351
-
352
- # Print summary
353
- click.echo("\n📈 Analysis Summary:")
354
- click.echo(f" - Total commits: {len(all_commits)}")
355
- click.echo(f" - Total PRs: {len(all_prs)}")
356
- click.echo(f" - Active developers: {len(developer_stats)}")
357
- click.echo(f" - Ticket coverage: {ticket_analysis['commit_coverage_pct']:.1f}%")
358
-
359
- total_story_points = sum(c.get('story_points', 0) or 0 for c in all_commits)
360
- click.echo(f" - Total story points: {total_story_points}")
361
-
362
- if dora_metrics:
363
- click.echo("\n🎯 DORA Metrics:")
364
- click.echo(f" - Deployment frequency: {dora_metrics['deployment_frequency']['category']}")
365
- click.echo(f" - Lead time: {dora_metrics['lead_time_hours']:.1f} hours")
366
- click.echo(f" - Change failure rate: {dora_metrics['change_failure_rate']:.1f}%")
367
- click.echo(f" - MTTR: {dora_metrics['mttr_hours']:.1f} hours")
368
- click.echo(f" - Performance level: {dora_metrics['performance_level']}")
369
-
370
- click.echo(f"\n✅ Analysis complete! Reports saved to {output}")
371
-
662
+ generated_reports.append(json_report.name)
663
+ if not display:
664
+ click.echo(f" ✅ JSON export: {json_report}")
665
+
666
+ total_story_points = sum(c.get("story_points", 0) or 0 for c in all_commits)
667
+ qualitative_count = len(qualitative_results) if qualitative_results else 0
668
+
669
+ # Show results summary
670
+ if display:
671
+ display.show_analysis_summary(
672
+ total_commits=len(all_commits),
673
+ total_prs=len(all_prs),
674
+ active_developers=len(developer_stats),
675
+ ticket_coverage=ticket_analysis['commit_coverage_pct'],
676
+ story_points=total_story_points,
677
+ qualitative_analyzed=qualitative_count
678
+ )
679
+
680
+ # Show DORA metrics
681
+ if dora_metrics:
682
+ display.show_dora_metrics(dora_metrics)
683
+
684
+ # Show generated reports
685
+ display.show_reports_generated(output, generated_reports)
686
+
687
+ display.print_status("Analysis complete!", "success")
688
+ else:
689
+ # Print summary in simple format
690
+ click.echo("\n📈 Analysis Summary:")
691
+ click.echo(f" - Total commits: {len(all_commits)}")
692
+ click.echo(f" - Total PRs: {len(all_prs)}")
693
+ click.echo(f" - Active developers: {len(developer_stats)}")
694
+ click.echo(f" - Ticket coverage: {ticket_analysis['commit_coverage_pct']:.1f}%")
695
+ click.echo(f" - Total story points: {total_story_points}")
696
+
697
+ if dora_metrics:
698
+ click.echo("\n🎯 DORA Metrics:")
699
+ click.echo(
700
+ f" - Deployment frequency: {dora_metrics['deployment_frequency']['category']}"
701
+ )
702
+ click.echo(f" - Lead time: {dora_metrics['lead_time_hours']:.1f} hours")
703
+ click.echo(f" - Change failure rate: {dora_metrics['change_failure_rate']:.1f}%")
704
+ click.echo(f" - MTTR: {dora_metrics['mttr_hours']:.1f} hours")
705
+ click.echo(f" - Performance level: {dora_metrics['performance_level']}")
706
+
707
+ click.echo(f"\n✅ Analysis complete! Reports saved to {output}")
708
+
372
709
  except Exception as e:
373
- click.echo(f"\n❌ Error: {e}", err=True)
374
- if '--debug' in sys.argv:
710
+ if display:
711
+ display.show_error(str(e), show_debug_hint=True)
712
+ else:
713
+ click.echo(f"\n❌ Error: {e}", err=True)
714
+
715
+ if "--debug" in sys.argv:
375
716
  raise
376
717
  sys.exit(1)
377
718
 
378
719
 
379
720
  @cli.command()
380
- @click.option('--config', '-c',
381
- type=click.Path(exists=True, path_type=Path),
382
- required=True,
383
- help='Path to YAML configuration file')
384
- def cache_stats(config: Path):
721
+ @click.option(
722
+ "--config",
723
+ "-c",
724
+ type=click.Path(exists=True, path_type=Path),
725
+ required=True,
726
+ help="Path to YAML configuration file",
727
+ )
728
+ def cache_stats(config: Path) -> None:
385
729
  """Show cache statistics."""
386
730
  try:
387
731
  cfg = ConfigLoader.load(config)
388
732
  cache = GitAnalysisCache(cfg.cache.directory)
389
-
733
+
390
734
  stats = cache.get_cache_stats()
391
-
735
+
392
736
  click.echo("📊 Cache Statistics:")
393
737
  click.echo(f" - Cached commits: {stats['cached_commits']}")
394
738
  click.echo(f" - Cached PRs: {stats['cached_prs']}")
395
739
  click.echo(f" - Cached issues: {stats['cached_issues']}")
396
740
  click.echo(f" - Stale entries: {stats['stale_commits']}")
397
-
741
+
398
742
  # Calculate cache size
399
743
  import os
744
+
400
745
  cache_size = 0
401
746
  for root, _dirs, files in os.walk(cfg.cache.directory):
402
747
  for f in files:
403
748
  cache_size += os.path.getsize(os.path.join(root, f))
404
-
749
+
405
750
  click.echo(f" - Cache size: {cache_size / 1024 / 1024:.1f} MB")
406
-
751
+
407
752
  except Exception as e:
408
753
  click.echo(f"❌ Error: {e}", err=True)
409
754
  sys.exit(1)
410
755
 
411
756
 
412
757
  @cli.command()
413
- @click.option('--config', '-c',
414
- type=click.Path(exists=True, path_type=Path),
415
- required=True,
416
- help='Path to YAML configuration file')
417
- @click.argument('dev1')
418
- @click.argument('dev2')
419
- def merge_identity(config: Path, dev1: str, dev2: str):
758
+ @click.option(
759
+ "--config",
760
+ "-c",
761
+ type=click.Path(exists=True, path_type=Path),
762
+ required=True,
763
+ help="Path to YAML configuration file",
764
+ )
765
+ @click.argument("dev1")
766
+ @click.argument("dev2")
767
+ def merge_identity(config: Path, dev1: str, dev2: str) -> None:
420
768
  """Merge two developer identities."""
421
769
  try:
422
770
  cfg = ConfigLoader.load(config)
423
- identity_resolver = DeveloperIdentityResolver(
424
- cfg.cache.directory / 'identities.db'
425
- )
426
-
771
+ identity_resolver = DeveloperIdentityResolver(cfg.cache.directory / "identities.db")
772
+
427
773
  click.echo(f"🔄 Merging {dev2} into {dev1}...")
428
774
  identity_resolver.merge_identities(dev1, dev2)
429
775
  click.echo("✅ Identities merged successfully!")
430
-
776
+
431
777
  except Exception as e:
432
778
  click.echo(f"❌ Error: {e}", err=True)
433
779
  sys.exit(1)
434
780
 
435
781
 
436
782
  @cli.command()
437
- @click.option('--config', '-c',
438
- type=click.Path(exists=True, path_type=Path),
439
- required=True,
440
- help='Path to YAML configuration file')
441
- def discover_jira_fields(config: Path):
783
+ @click.option(
784
+ "--config",
785
+ "-c",
786
+ type=click.Path(exists=True, path_type=Path),
787
+ required=True,
788
+ help="Path to YAML configuration file",
789
+ )
790
+ def discover_jira_fields(config: Path) -> None:
442
791
  """Discover available JIRA fields, particularly story point fields."""
443
792
  try:
444
793
  cfg = ConfigLoader.load(config)
445
-
794
+
446
795
  # Check if JIRA is configured
447
796
  if not cfg.jira or not cfg.jira.base_url:
448
797
  click.echo("❌ JIRA is not configured in the configuration file")
449
798
  return
450
-
799
+
451
800
  # Initialize JIRA integration
452
801
  from .integrations.jira_integration import JIRAIntegration
453
-
802
+
803
+ # Create minimal cache for JIRA integration
804
+ cache = GitAnalysisCache(cfg.cache.directory)
454
805
  jira = JIRAIntegration(
455
806
  cfg.jira.base_url,
456
807
  cfg.jira.access_user,
457
808
  cfg.jira.access_token,
458
- None # No cache needed for field discovery
809
+ cache,
459
810
  )
460
-
811
+
461
812
  # Validate connection
462
813
  click.echo(f"🔗 Connecting to JIRA at {cfg.jira.base_url}...")
463
814
  if not jira.validate_connection():
464
815
  click.echo("❌ Failed to connect to JIRA. Check your credentials.")
465
816
  return
466
-
817
+
467
818
  click.echo("✅ Connected successfully!\n")
468
819
  click.echo("🔍 Discovering fields with potential story point data...")
469
-
820
+
470
821
  fields = jira.discover_fields()
471
-
822
+
472
823
  if not fields:
473
824
  click.echo("No potential story point fields found.")
474
825
  else:
475
826
  click.echo(f"\nFound {len(fields)} potential story point fields:")
476
- click.echo("\nAdd these to your configuration under jira_integration.story_point_fields:")
827
+ click.echo(
828
+ "\nAdd these to your configuration under jira_integration.story_point_fields:"
829
+ )
477
830
  click.echo("```yaml")
478
831
  click.echo("jira_integration:")
479
832
  click.echo(" story_point_fields:")
480
833
  for field_id, field_info in fields.items():
481
834
  click.echo(f' - "{field_id}" # {field_info["name"]}')
482
835
  click.echo("```")
483
-
836
+
484
837
  except Exception as e:
485
838
  click.echo(f"❌ Error: {e}", err=True)
486
839
  sys.exit(1)
487
840
 
488
841
 
489
842
  @cli.command()
490
- @click.option('--config', '-c',
491
- type=click.Path(exists=True, path_type=Path),
492
- required=True,
493
- help='Path to YAML configuration file')
494
- def list_developers(config: Path):
843
+ @click.option(
844
+ "--config",
845
+ "-c",
846
+ type=click.Path(exists=True, path_type=Path),
847
+ required=True,
848
+ help="Path to YAML configuration file",
849
+ )
850
+ def list_developers(config: Path) -> None:
495
851
  """List all known developers."""
496
852
  try:
497
853
  cfg = ConfigLoader.load(config)
498
- identity_resolver = DeveloperIdentityResolver(
499
- cfg.cache.directory / 'identities.db'
500
- )
501
-
854
+ identity_resolver = DeveloperIdentityResolver(cfg.cache.directory / "identities.db")
855
+
502
856
  developers = identity_resolver.get_developer_stats()
503
-
857
+
504
858
  if not developers:
505
859
  click.echo("No developers found. Run analysis first.")
506
860
  return
507
-
861
+
508
862
  click.echo("👥 Known Developers:")
509
863
  click.echo(f"{'Name':<30} {'Email':<40} {'Commits':<10} {'Points':<10} {'Aliases'}")
510
864
  click.echo("-" * 100)
511
-
865
+
512
866
  for dev in developers[:20]: # Show top 20
513
867
  click.echo(
514
868
  f"{dev['primary_name']:<30} "
@@ -517,19 +871,19 @@ def list_developers(config: Path):
517
871
  f"{dev['total_story_points']:<10} "
518
872
  f"{dev['alias_count']}"
519
873
  )
520
-
874
+
521
875
  if len(developers) > 20:
522
876
  click.echo(f"\n... and {len(developers) - 20} more developers")
523
-
877
+
524
878
  except Exception as e:
525
879
  click.echo(f"❌ Error: {e}", err=True)
526
880
  sys.exit(1)
527
881
 
528
882
 
529
- def main():
883
+ def main() -> None:
530
884
  """Main entry point."""
531
885
  cli()
532
886
 
533
887
 
534
- if __name__ == '__main__':
535
- main()
888
+ if __name__ == "__main__":
889
+ main()