gitflow-analytics 3.13.0__py3-none-any.whl → 3.13.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
gitflow_analytics/cli.py CHANGED
@@ -429,8 +429,7 @@ def cli(ctx: click.Context) -> None:
429
429
  "--use-batch-classification/--use-legacy-classification",
430
430
  default=True,
431
431
  help=(
432
- "Use batch LLM classification on pre-fetched data (Step 2 of 2) - "
433
- "now the default behavior"
432
+ "Use batch LLM classification on pre-fetched data (Step 2 of 2) - now the default behavior"
434
433
  ),
435
434
  )
436
435
  @click.option(
@@ -1213,8 +1212,7 @@ def analyze(
1213
1212
  display.update_progress_task(
1214
1213
  "main",
1215
1214
  description=(
1216
- f"🔍 Discovering repositories from organization: "
1217
- f"{cfg.github.organization}"
1215
+ f"🔍 Discovering repositories from organization: {cfg.github.organization}"
1218
1216
  ),
1219
1217
  completed=15,
1220
1218
  )
@@ -1457,8 +1455,7 @@ def analyze(
1457
1455
  display.update_progress_task(
1458
1456
  "repos",
1459
1457
  description=(
1460
- f"Step 1: Fetching data for "
1461
- f"{len(repos_needing_analysis)} repositories"
1458
+ f"Step 1: Fetching data for {len(repos_needing_analysis)} repositories"
1462
1459
  ),
1463
1460
  completed=0,
1464
1461
  )
@@ -1532,7 +1529,6 @@ def analyze(
1532
1529
  description="Processing repositories",
1533
1530
  unit="repos",
1534
1531
  ) as repos_progress_ctx:
1535
-
1536
1532
  for idx, repo_config in enumerate(repos_needing_analysis, 1):
1537
1533
  try:
1538
1534
  repo_path = Path(repo_config.path)
@@ -1594,6 +1590,68 @@ def analyze(
1594
1590
  total_commits += result["stats"]["total_commits"]
1595
1591
  total_tickets += result["stats"]["unique_tickets"]
1596
1592
 
1593
+ # Fetch and enrich with GitHub PRs after data collection
1594
+ if repo_config.github_repo:
1595
+ try:
1596
+ if display:
1597
+ display.print_status(
1598
+ " 📥 Fetching pull requests from GitHub...",
1599
+ "info",
1600
+ )
1601
+
1602
+ # Load commits that were just fetched from cache
1603
+ with cache.get_session() as session:
1604
+ from gitflow_analytics.models.database import CachedCommit
1605
+
1606
+ cached_commits = (
1607
+ session.query(CachedCommit)
1608
+ .filter(
1609
+ CachedCommit.repo_path == str(repo_path),
1610
+ CachedCommit.timestamp >= start_date,
1611
+ CachedCommit.timestamp <= end_date,
1612
+ )
1613
+ .all()
1614
+ )
1615
+
1616
+ # Convert to dict format for enrichment
1617
+ commits_for_enrichment = []
1618
+ for cached_commit in cached_commits:
1619
+ commit_dict = {
1620
+ "hash": cached_commit.commit_hash,
1621
+ "author_name": cached_commit.author_name,
1622
+ "author_email": cached_commit.author_email,
1623
+ "date": cached_commit.timestamp,
1624
+ "message": cached_commit.message,
1625
+ }
1626
+ commits_for_enrichment.append(commit_dict)
1627
+
1628
+ # Enrich with GitHub PR data
1629
+ enrichment = orchestrator.enrich_repository_data(
1630
+ repo_config, commits_for_enrichment, start_date
1631
+ )
1632
+
1633
+ if enrichment["prs"]:
1634
+ pr_count = len(enrichment["prs"])
1635
+ if display:
1636
+ display.print_status(
1637
+ f" ✅ Found {pr_count} pull requests",
1638
+ "success",
1639
+ )
1640
+ else:
1641
+ click.echo(f" ✅ Found {pr_count} pull requests")
1642
+
1643
+ except Exception as e:
1644
+ logger.warning(
1645
+ f"Failed to fetch PRs for {repo_config.github_repo}: {e}"
1646
+ )
1647
+ if display:
1648
+ display.print_status(
1649
+ f" ⚠️ Could not fetch PRs: {e}",
1650
+ "warning",
1651
+ )
1652
+ else:
1653
+ click.echo(f" ⚠️ Could not fetch PRs: {e}")
1654
+
1597
1655
  # Collect unique developers if available
1598
1656
  if "developers" in result["stats"]:
1599
1657
  total_developers.update(result["stats"]["developers"])
@@ -2092,6 +2150,68 @@ def analyze(
2092
2150
  total_commits += result["stats"]["total_commits"]
2093
2151
  total_tickets += result["stats"]["unique_tickets"]
2094
2152
 
2153
+ # Fetch and enrich with GitHub PRs after data collection
2154
+ if repo_config.github_repo:
2155
+ try:
2156
+ if display:
2157
+ display.print_status(
2158
+ " 📥 Fetching pull requests from GitHub...",
2159
+ "info",
2160
+ )
2161
+
2162
+ # Load commits that were just fetched from cache
2163
+ with cache.get_session() as session:
2164
+ from gitflow_analytics.models.database import CachedCommit
2165
+
2166
+ cached_commits = (
2167
+ session.query(CachedCommit)
2168
+ .filter(
2169
+ CachedCommit.repo_path == str(repo_path),
2170
+ CachedCommit.timestamp >= start_date,
2171
+ CachedCommit.timestamp <= end_date,
2172
+ )
2173
+ .all()
2174
+ )
2175
+
2176
+ # Convert to dict format for enrichment
2177
+ commits_for_enrichment = []
2178
+ for cached_commit in cached_commits:
2179
+ commit_dict = {
2180
+ "hash": cached_commit.commit_hash,
2181
+ "author_name": cached_commit.author_name,
2182
+ "author_email": cached_commit.author_email,
2183
+ "date": cached_commit.timestamp,
2184
+ "message": cached_commit.message,
2185
+ }
2186
+ commits_for_enrichment.append(commit_dict)
2187
+
2188
+ # Enrich with GitHub PR data
2189
+ enrichment = orchestrator.enrich_repository_data(
2190
+ repo_config, commits_for_enrichment, start_date
2191
+ )
2192
+
2193
+ if enrichment["prs"]:
2194
+ pr_count = len(enrichment["prs"])
2195
+ if display:
2196
+ display.print_status(
2197
+ f" ✅ Found {pr_count} pull requests",
2198
+ "success",
2199
+ )
2200
+ else:
2201
+ click.echo(f" ✅ Found {pr_count} pull requests")
2202
+
2203
+ except Exception as e:
2204
+ logger.warning(
2205
+ f"Failed to fetch PRs for {repo_config.github_repo}: {e}"
2206
+ )
2207
+ if display:
2208
+ display.print_status(
2209
+ f" ⚠️ Could not fetch PRs: {e}",
2210
+ "warning",
2211
+ )
2212
+ else:
2213
+ click.echo(f" ⚠️ Could not fetch PRs: {e}")
2214
+
2095
2215
  # Collect unique developers if available
2096
2216
  if "developers" in result["stats"]:
2097
2217
  total_developers.update(result["stats"]["developers"])
@@ -2285,11 +2405,9 @@ def analyze(
2285
2405
  f" 🗃️ Database state: {pre_classification_commits} commits, "
2286
2406
  f"{pre_classification_batches} batches"
2287
2407
  )
2288
- click.echo(
2289
- " 💡 Commits exist but no daily batches - " "batch creation failed"
2290
- )
2408
+ click.echo(" 💡 Commits exist but no daily batches - batch creation failed")
2291
2409
  raise click.ClickException(
2292
- "No batches available for classification - " "batch creation process failed"
2410
+ "No batches available for classification - batch creation process failed"
2293
2411
  )
2294
2412
 
2295
2413
  if display:
@@ -2717,7 +2835,9 @@ def analyze(
2717
2835
  )
2718
2836
  commit["canonical_id"] = canonical_id
2719
2837
  # Also add canonical display name for reports
2720
- commit["canonical_name"] = identity_resolver.get_canonical_name(canonical_id)
2838
+ commit["canonical_name"] = identity_resolver.get_canonical_name(
2839
+ canonical_id
2840
+ )
2721
2841
 
2722
2842
  all_commits.extend(commits)
2723
2843
  if display:
@@ -2934,9 +3054,9 @@ def analyze(
2934
3054
  ):
2935
3055
  existing_mappings.append(new_mapping)
2936
3056
 
2937
- config_data["analysis"]["identity"][
2938
- "manual_mappings"
2939
- ] = existing_mappings
3057
+ config_data["analysis"]["identity"]["manual_mappings"] = (
3058
+ existing_mappings
3059
+ )
2940
3060
 
2941
3061
  # Apply bot exclusions
2942
3062
  if suggested_config.get("exclude", {}).get("authors"):
@@ -3382,7 +3502,7 @@ def analyze(
3382
3502
  # Weekly metrics report (only if CSV generation is enabled)
3383
3503
  if generate_csv:
3384
3504
  weekly_report = (
3385
- output / f'weekly_metrics_{datetime.now(timezone.utc).strftime("%Y%m%d")}.csv'
3505
+ output / f"weekly_metrics_{datetime.now(timezone.utc).strftime('%Y%m%d')}.csv"
3386
3506
  )
3387
3507
  try:
3388
3508
  logger.debug("Starting weekly metrics report generation")
@@ -3402,7 +3522,7 @@ def analyze(
3402
3522
  if generate_csv:
3403
3523
  activity_summary_report = (
3404
3524
  output
3405
- / f'developer_activity_summary_{datetime.now(timezone.utc).strftime("%Y%m%d")}.csv'
3525
+ / f"developer_activity_summary_{datetime.now(timezone.utc).strftime('%Y%m%d')}.csv"
3406
3526
  )
3407
3527
  try:
3408
3528
  logger.debug("Starting developer activity summary report generation")
@@ -3429,7 +3549,7 @@ def analyze(
3429
3549
 
3430
3550
  # Summary report (only if CSV generation is enabled)
3431
3551
  if generate_csv:
3432
- summary_report = output / f'summary_{datetime.now().strftime("%Y%m%d")}.csv'
3552
+ summary_report = output / f"summary_{datetime.now().strftime('%Y%m%d')}.csv"
3433
3553
  try:
3434
3554
  report_gen.generate_summary_report(
3435
3555
  all_commits,
@@ -3453,7 +3573,7 @@ def analyze(
3453
3573
 
3454
3574
  # Developer report (only if CSV generation is enabled)
3455
3575
  if generate_csv:
3456
- developer_report = output / f'developers_{datetime.now().strftime("%Y%m%d")}.csv'
3576
+ developer_report = output / f"developers_{datetime.now().strftime('%Y%m%d')}.csv"
3457
3577
  try:
3458
3578
  report_gen.generate_developer_report(developer_stats, developer_report)
3459
3579
  generated_reports.append(developer_report.name)
@@ -3471,7 +3591,7 @@ def analyze(
3471
3591
  # Untracked commits report (only if CSV generation is enabled)
3472
3592
  if generate_csv:
3473
3593
  untracked_commits_report = (
3474
- output / f'untracked_commits_{datetime.now().strftime("%Y%m%d")}.csv'
3594
+ output / f"untracked_commits_{datetime.now().strftime('%Y%m%d')}.csv"
3475
3595
  )
3476
3596
  try:
3477
3597
  report_gen.generate_untracked_commits_report(
@@ -3492,7 +3612,7 @@ def analyze(
3492
3612
  # Weekly Categorization report (only if CSV generation is enabled)
3493
3613
  if generate_csv:
3494
3614
  weekly_categorization_report = (
3495
- output / f'weekly_categorization_{datetime.now().strftime("%Y%m%d")}.csv'
3615
+ output / f"weekly_categorization_{datetime.now().strftime('%Y%m%d')}.csv"
3496
3616
  )
3497
3617
  try:
3498
3618
  logger.debug("Starting weekly categorization report generation")
@@ -3510,7 +3630,7 @@ def analyze(
3510
3630
  # PM Correlations report (if PM data is available and CSV generation is enabled)
3511
3631
  if aggregated_pm_data and generate_csv:
3512
3632
  pm_correlations_report = (
3513
- output / f'pm_correlations_{datetime.now().strftime("%Y%m%d")}.csv'
3633
+ output / f"pm_correlations_{datetime.now().strftime('%Y%m%d')}.csv"
3514
3634
  )
3515
3635
  try:
3516
3636
  report_gen.generate_pm_correlations_report(
@@ -3525,7 +3645,7 @@ def analyze(
3525
3645
  # Story Point Correlation report (only if CSV generation is enabled)
3526
3646
  if generate_csv:
3527
3647
  story_point_correlation_report = (
3528
- output / f'story_point_correlation_{datetime.now().strftime("%Y%m%d")}.csv'
3648
+ output / f"story_point_correlation_{datetime.now().strftime('%Y%m%d')}.csv"
3529
3649
  )
3530
3650
  try:
3531
3651
  logger.debug("Starting story point correlation report generation")
@@ -3541,7 +3661,7 @@ def analyze(
3541
3661
  click.echo(f" ⚠️ Warning: Story point correlation report failed: {e}")
3542
3662
 
3543
3663
  # Activity distribution report (always generate data, optionally write CSV)
3544
- activity_report = output / f'activity_distribution_{datetime.now().strftime("%Y%m%d")}.csv'
3664
+ activity_report = output / f"activity_distribution_{datetime.now().strftime('%Y%m%d')}.csv"
3545
3665
  try:
3546
3666
  logger.debug("Starting activity distribution report generation")
3547
3667
  analytics_gen.generate_activity_distribution_report(
@@ -3565,7 +3685,7 @@ def analyze(
3565
3685
  raise
3566
3686
 
3567
3687
  # Developer focus report (always generate data, optionally write CSV)
3568
- focus_report = output / f'developer_focus_{datetime.now().strftime("%Y%m%d")}.csv'
3688
+ focus_report = output / f"developer_focus_{datetime.now().strftime('%Y%m%d')}.csv"
3569
3689
  try:
3570
3690
  logger.debug("Starting developer focus report generation")
3571
3691
  analytics_gen.generate_developer_focus_report(
@@ -3589,7 +3709,7 @@ def analyze(
3589
3709
  raise
3590
3710
 
3591
3711
  # Qualitative insights report (always generate data, optionally write CSV)
3592
- insights_report = output / f'qualitative_insights_{datetime.now().strftime("%Y%m%d")}.csv'
3712
+ insights_report = output / f"qualitative_insights_{datetime.now().strftime('%Y%m%d')}.csv"
3593
3713
  try:
3594
3714
  logger.debug("Starting qualitative insights report generation")
3595
3715
  analytics_gen.generate_qualitative_insights_report(
@@ -3609,7 +3729,7 @@ def analyze(
3609
3729
 
3610
3730
  branch_health_gen = BranchHealthReportGenerator()
3611
3731
 
3612
- branch_health_report = output / f'branch_health_{datetime.now().strftime("%Y%m%d")}.csv'
3732
+ branch_health_report = output / f"branch_health_{datetime.now().strftime('%Y%m%d')}.csv"
3613
3733
  try:
3614
3734
  logger.debug("Starting branch health report generation")
3615
3735
  branch_health_gen.generate_csv_report(branch_health_metrics, branch_health_report)
@@ -3623,7 +3743,7 @@ def analyze(
3623
3743
 
3624
3744
  # Detailed branch report
3625
3745
  detailed_branch_report = (
3626
- output / f'branch_details_{datetime.now().strftime("%Y%m%d")}.csv'
3746
+ output / f"branch_details_{datetime.now().strftime('%Y%m%d')}.csv"
3627
3747
  )
3628
3748
  try:
3629
3749
  branch_health_gen.generate_detailed_branch_report(
@@ -3663,7 +3783,7 @@ def analyze(
3663
3783
 
3664
3784
  # Weekly trends report (includes developer and project trends) (only if CSV generation is enabled)
3665
3785
  if generate_csv:
3666
- trends_report = output / f'weekly_trends_{datetime.now().strftime("%Y%m%d")}.csv'
3786
+ trends_report = output / f"weekly_trends_{datetime.now().strftime('%Y%m%d')}.csv"
3667
3787
  try:
3668
3788
  logger.debug("Starting weekly trends report generation")
3669
3789
  analytics_gen.generate_weekly_trends_report(
@@ -3739,7 +3859,7 @@ def analyze(
3739
3859
  # Weekly velocity report (only if CSV generation is enabled)
3740
3860
  if generate_csv:
3741
3861
  weekly_velocity_report = (
3742
- output / f'weekly_velocity_{datetime.now().strftime("%Y%m%d")}.csv'
3862
+ output / f"weekly_velocity_{datetime.now().strftime('%Y%m%d')}.csv"
3743
3863
  )
3744
3864
  try:
3745
3865
  logger.debug("Starting weekly velocity report generation")
@@ -3765,7 +3885,7 @@ def analyze(
3765
3885
  # Weekly DORA metrics report (only if CSV generation is enabled)
3766
3886
  if generate_csv:
3767
3887
  weekly_dora_report = (
3768
- output / f'weekly_dora_metrics_{datetime.now().strftime("%Y%m%d")}.csv'
3888
+ output / f"weekly_dora_metrics_{datetime.now().strftime('%Y%m%d')}.csv"
3769
3889
  )
3770
3890
  try:
3771
3891
  logger.debug("Starting weekly DORA metrics report generation")
@@ -3983,7 +4103,7 @@ def analyze(
3983
4103
  logger.debug("Starting comprehensive JSON export generation")
3984
4104
  click.echo(" 🔄 Generating comprehensive JSON export...")
3985
4105
  json_report = (
3986
- output / f'comprehensive_export_{datetime.now().strftime("%Y%m%d")}.json'
4106
+ output / f"comprehensive_export_{datetime.now().strftime('%Y%m%d')}.json"
3987
4107
  )
3988
4108
 
3989
4109
  # Initialize comprehensive JSON exporter
@@ -5178,7 +5298,7 @@ def identities(config: Path, weeks: int, apply: bool) -> None:
5178
5298
 
5179
5299
  # Run analysis
5180
5300
  identity_report_path = (
5181
- cfg.cache.directory / f'identity_analysis_{datetime.now().strftime("%Y%m%d")}.yaml'
5301
+ cfg.cache.directory / f"identity_analysis_{datetime.now().strftime('%Y%m%d')}.yaml"
5182
5302
  )
5183
5303
  identity_result = analysis_pass.run_analysis(
5184
5304
  all_commits, output_path=identity_report_path, apply_to_config=False
@@ -5525,7 +5645,9 @@ def aliases_command(
5525
5645
  confidence_color = (
5526
5646
  "green"
5527
5647
  if alias.confidence >= 0.9
5528
- else "yellow" if alias.confidence >= 0.8 else "red"
5648
+ else "yellow"
5649
+ if alias.confidence >= 0.8
5650
+ else "red"
5529
5651
  )
5530
5652
  click.echo(" Confidence: ", nl=False)
5531
5653
  click.secho(f"{alias.confidence:.0%}", fg=confidence_color)
@@ -5941,12 +6063,10 @@ def create_alias_interactive(config: Path, output: Optional[Path]) -> None:
5941
6063
  )
5942
6064
  @click.option(
5943
6065
  "--old-name",
5944
- required=True,
5945
6066
  help="Current canonical name to rename (must match a name in manual_mappings)",
5946
6067
  )
5947
6068
  @click.option(
5948
6069
  "--new-name",
5949
- required=True,
5950
6070
  help="New canonical display name to use in reports",
5951
6071
  )
5952
6072
  @click.option(
@@ -5959,12 +6079,19 @@ def create_alias_interactive(config: Path, output: Optional[Path]) -> None:
5959
6079
  is_flag=True,
5960
6080
  help="Show what would be changed without applying changes",
5961
6081
  )
6082
+ @click.option(
6083
+ "--interactive",
6084
+ "-i",
6085
+ is_flag=True,
6086
+ help="Interactive mode: select developer from numbered list",
6087
+ )
5962
6088
  def alias_rename(
5963
6089
  config: Path,
5964
6090
  old_name: str,
5965
6091
  new_name: str,
5966
6092
  update_cache: bool,
5967
6093
  dry_run: bool,
6094
+ interactive: bool,
5968
6095
  ) -> None:
5969
6096
  """Rename a developer's canonical display name.
5970
6097
 
@@ -5975,6 +6102,9 @@ def alias_rename(
5975
6102
 
5976
6103
  \b
5977
6104
  EXAMPLES:
6105
+ # Interactive mode: select from numbered list
6106
+ gitflow-analytics alias-rename -c config.yaml --interactive
6107
+
5978
6108
  # Rename with dry-run to see changes
5979
6109
  gitflow-analytics alias-rename -c config.yaml \\
5980
6110
  --old-name "bianco-zaelot" \\
@@ -6001,27 +6131,11 @@ def alias_rename(
6001
6131
  try:
6002
6132
  from .core.identity import DeveloperIdentityResolver
6003
6133
 
6004
- # Validate inputs
6005
- if not old_name.strip():
6006
- click.echo("❌ Error: --old-name cannot be empty", err=True)
6007
- sys.exit(1)
6008
-
6009
- if not new_name.strip():
6010
- click.echo("❌ Error: --new-name cannot be empty", err=True)
6011
- sys.exit(1)
6012
-
6013
- old_name = old_name.strip()
6014
- new_name = new_name.strip()
6015
-
6016
- if old_name == new_name:
6017
- click.echo("❌ Error: old-name and new-name are identical", err=True)
6018
- sys.exit(1)
6019
-
6020
6134
  # Load the YAML config file
6021
6135
  click.echo(f"\n📋 Loading configuration from {config}...")
6022
6136
 
6023
6137
  try:
6024
- with open(config, "r", encoding="utf-8") as f:
6138
+ with open(config, encoding="utf-8") as f:
6025
6139
  config_data = yaml.safe_load(f)
6026
6140
  except Exception as e:
6027
6141
  click.echo(f"❌ Error loading config file: {e}", err=True)
@@ -6037,7 +6151,9 @@ def alias_rename(
6037
6151
  sys.exit(1)
6038
6152
 
6039
6153
  if "manual_mappings" not in config_data["analysis"]["identity"]:
6040
- click.echo("❌ Error: 'analysis.identity.manual_mappings' not found in config", err=True)
6154
+ click.echo(
6155
+ "❌ Error: 'analysis.identity.manual_mappings' not found in config", err=True
6156
+ )
6041
6157
  sys.exit(1)
6042
6158
 
6043
6159
  manual_mappings = config_data["analysis"]["identity"]["manual_mappings"]
@@ -6046,6 +6162,62 @@ def alias_rename(
6046
6162
  click.echo("❌ Error: manual_mappings is empty", err=True)
6047
6163
  sys.exit(1)
6048
6164
 
6165
+ # Interactive mode: display numbered list and prompt for selection
6166
+ if interactive or not old_name or not new_name:
6167
+ click.echo("\n" + "=" * 60)
6168
+ click.echo(click.style("Current Developers:", fg="cyan", bold=True))
6169
+ click.echo("=" * 60 + "\n")
6170
+
6171
+ developer_names = []
6172
+ for idx, mapping in enumerate(manual_mappings, 1):
6173
+ name = mapping.get("name", "Unknown")
6174
+ email = mapping.get("primary_email", "N/A")
6175
+ alias_count = len(mapping.get("aliases", []))
6176
+
6177
+ developer_names.append(name)
6178
+ click.echo(f" {idx}. {click.style(name, fg='green')}")
6179
+ click.echo(f" Email: {email}")
6180
+ click.echo(f" Aliases: {alias_count} email(s)")
6181
+ click.echo()
6182
+
6183
+ # Prompt for selection
6184
+ try:
6185
+ selection = click.prompt(
6186
+ "Select developer number to rename (or 0 to cancel)",
6187
+ type=click.IntRange(0, len(developer_names)),
6188
+ )
6189
+ except click.Abort:
6190
+ click.echo("\n👋 Cancelled by user.")
6191
+ sys.exit(0)
6192
+
6193
+ if selection == 0:
6194
+ click.echo("\n👋 Cancelled.")
6195
+ sys.exit(0)
6196
+
6197
+ # Get selected developer name
6198
+ old_name = developer_names[selection - 1]
6199
+ click.echo(f"\n📝 Selected: {click.style(old_name, fg='green')}")
6200
+
6201
+ # Prompt for new name if not provided
6202
+ if not new_name:
6203
+ new_name = click.prompt("Enter new canonical name", type=str)
6204
+
6205
+ # Validate inputs
6206
+ if not old_name or not old_name.strip():
6207
+ click.echo("❌ Error: --old-name cannot be empty", err=True)
6208
+ sys.exit(1)
6209
+
6210
+ if not new_name or not new_name.strip():
6211
+ click.echo("❌ Error: --new-name cannot be empty", err=True)
6212
+ sys.exit(1)
6213
+
6214
+ old_name = old_name.strip()
6215
+ new_name = new_name.strip()
6216
+
6217
+ if old_name == new_name:
6218
+ click.echo("❌ Error: old-name and new-name are identical", err=True)
6219
+ sys.exit(1)
6220
+
6049
6221
  # Find the matching entry
6050
6222
  matching_entry = None
6051
6223
  matching_index = None
@@ -6065,24 +6237,30 @@ def alias_rename(
6065
6237
  sys.exit(1)
6066
6238
 
6067
6239
  # Display what will be changed
6068
- click.echo(f"\n🔍 Found matching entry:")
6240
+ click.echo("\n🔍 Found matching entry:")
6069
6241
  click.echo(f" Current name: {old_name}")
6070
6242
  click.echo(f" New name: {new_name}")
6071
6243
  click.echo(f" Email: {matching_entry.get('primary_email', 'N/A')}")
6072
6244
  click.echo(f" Aliases: {len(matching_entry.get('aliases', []))} email(s)")
6073
6245
 
6074
6246
  if dry_run:
6075
- click.echo(f"\n🔎 DRY RUN - No changes will be made")
6247
+ click.echo("\n🔎 DRY RUN - No changes will be made")
6076
6248
 
6077
6249
  # Update the config file
6078
6250
  if not dry_run:
6079
- click.echo(f"\n📝 Updating configuration file...")
6251
+ click.echo("\n📝 Updating configuration file...")
6080
6252
  manual_mappings[matching_index]["name"] = new_name
6081
6253
 
6082
6254
  try:
6083
6255
  with open(config, "w", encoding="utf-8") as f:
6084
- yaml.dump(config_data, f, default_flow_style=False, allow_unicode=True, sort_keys=False)
6085
- click.echo(f"✅ Configuration file updated")
6256
+ yaml.dump(
6257
+ config_data,
6258
+ f,
6259
+ default_flow_style=False,
6260
+ allow_unicode=True,
6261
+ sort_keys=False,
6262
+ )
6263
+ click.echo("✅ Configuration file updated")
6086
6264
  except Exception as e:
6087
6265
  click.echo(f"❌ Error writing config file: {e}", err=True)
6088
6266
  sys.exit(1)
@@ -6091,7 +6269,7 @@ def alias_rename(
6091
6269
 
6092
6270
  # Update database cache if requested
6093
6271
  if update_cache:
6094
- click.echo(f"\n💾 Checking database cache...")
6272
+ click.echo("\n💾 Checking database cache...")
6095
6273
 
6096
6274
  # Load config to get cache directory
6097
6275
  cfg = ConfigLoader.load(config)
@@ -6099,7 +6277,7 @@ def alias_rename(
6099
6277
 
6100
6278
  if not identity_db_path.exists():
6101
6279
  click.echo(f"⚠️ Warning: Identity database not found at {identity_db_path}")
6102
- click.echo(f" Skipping cache update")
6280
+ click.echo(" Skipping cache update")
6103
6281
  else:
6104
6282
  # Initialize identity resolver to access database
6105
6283
  identity_resolver = DeveloperIdentityResolver(
@@ -6113,15 +6291,17 @@ def alias_rename(
6113
6291
  with identity_resolver.get_session() as session:
6114
6292
  # Count developer_identities records
6115
6293
  result = session.execute(
6116
- text("SELECT COUNT(*) FROM developer_identities WHERE primary_name = :old_name"),
6117
- {"old_name": old_name}
6294
+ text(
6295
+ "SELECT COUNT(*) FROM developer_identities WHERE primary_name = :old_name"
6296
+ ),
6297
+ {"old_name": old_name},
6118
6298
  )
6119
6299
  identity_count = result.scalar()
6120
6300
 
6121
6301
  # Count developer_aliases records
6122
6302
  result = session.execute(
6123
6303
  text("SELECT COUNT(*) FROM developer_aliases WHERE name = :old_name"),
6124
- {"old_name": old_name}
6304
+ {"old_name": old_name},
6125
6305
  )
6126
6306
  alias_count = result.scalar()
6127
6307
 
@@ -6129,28 +6309,34 @@ def alias_rename(
6129
6309
  click.echo(f" Found {alias_count} alias record(s)")
6130
6310
 
6131
6311
  if identity_count == 0 and alias_count == 0:
6132
- click.echo(f" ℹ️ No database records to update")
6312
+ click.echo(" ℹ️ No database records to update")
6133
6313
  elif not dry_run:
6134
- click.echo(f" Updating database records...")
6314
+ click.echo(" Updating database records...")
6135
6315
 
6136
6316
  with identity_resolver.get_session() as session:
6137
6317
  # Update developer_identities
6138
6318
  if identity_count > 0:
6139
6319
  session.execute(
6140
- text("UPDATE developer_identities SET primary_name = :new_name WHERE primary_name = :old_name"),
6141
- {"new_name": new_name, "old_name": old_name}
6320
+ text(
6321
+ "UPDATE developer_identities SET primary_name = :new_name WHERE primary_name = :old_name"
6322
+ ),
6323
+ {"new_name": new_name, "old_name": old_name},
6142
6324
  )
6143
6325
 
6144
6326
  # Update developer_aliases
6145
6327
  if alias_count > 0:
6146
6328
  session.execute(
6147
- text("UPDATE developer_aliases SET name = :new_name WHERE name = :old_name"),
6148
- {"new_name": new_name, "old_name": old_name}
6329
+ text(
6330
+ "UPDATE developer_aliases SET name = :new_name WHERE name = :old_name"
6331
+ ),
6332
+ {"new_name": new_name, "old_name": old_name},
6149
6333
  )
6150
6334
 
6151
- click.echo(f" ✅ Database updated")
6335
+ click.echo(" ✅ Database updated")
6152
6336
  else:
6153
- click.echo(f" [Would update {identity_count + alias_count} database record(s)]")
6337
+ click.echo(
6338
+ f" [Would update {identity_count + alias_count} database record(s)]"
6339
+ )
6154
6340
 
6155
6341
  # Summary
6156
6342
  click.echo(f"\n{'🔎 DRY RUN SUMMARY' if dry_run else '✅ RENAME COMPLETE'}")
@@ -6160,14 +6346,14 @@ def alias_rename(
6160
6346
  if update_cache:
6161
6347
  click.echo(f" Cache: {'Would update' if dry_run else 'Updated'}")
6162
6348
  else:
6163
- click.echo(f" Cache: Skipped (use --update-cache to update)")
6349
+ click.echo(" Cache: Skipped (use --update-cache to update)")
6164
6350
 
6165
6351
  if dry_run:
6166
- click.echo(f"\n💡 Run without --dry-run to apply changes")
6352
+ click.echo("\n💡 Run without --dry-run to apply changes")
6167
6353
  else:
6168
- click.echo(f"\n💡 Next steps:")
6354
+ click.echo("\n💡 Next steps:")
6169
6355
  click.echo(f" - Review the updated config file: {config}")
6170
- click.echo(f" - Re-run analysis to see updated reports with new name")
6356
+ click.echo(" - Re-run analysis to see updated reports with new name")
6171
6357
 
6172
6358
  except KeyboardInterrupt:
6173
6359
  click.echo("\n\n👋 Interrupted by user. Exiting.")
@@ -6175,6 +6361,7 @@ def alias_rename(
6175
6361
  except Exception as e:
6176
6362
  click.echo(f"❌ Unexpected error: {e}", err=True)
6177
6363
  import traceback
6364
+
6178
6365
  traceback.print_exc()
6179
6366
  sys.exit(1)
6180
6367
 
@@ -6750,7 +6937,10 @@ def training_statistics(config: Path) -> None:
6750
6937
 
6751
6938
  # Initialize trainer to access statistics
6752
6939
  trainer = CommitClassificationTrainer(
6753
- config=cfg, cache=cache, orchestrator=None, training_config={} # Not needed for stats
6940
+ config=cfg,
6941
+ cache=cache,
6942
+ orchestrator=None,
6943
+ training_config={}, # Not needed for stats
6754
6944
  )
6755
6945
 
6756
6946
  stats = trainer.get_training_statistics()