gitflow-analytics 3.12.5__py3-none-any.whl → 3.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
gitflow_analytics/cli.py CHANGED
@@ -18,6 +18,7 @@ import yaml
18
18
 
19
19
  from ._version import __version__
20
20
  from .config import ConfigLoader
21
+ from .config.errors import ConfigurationError
21
22
  from .ui.progress_display import create_progress_display
22
23
 
23
24
  # Heavy imports are lazy-loaded to improve CLI startup time
@@ -158,7 +159,8 @@ def handle_timezone_error(
158
159
  for i, commit in enumerate(sample_commits):
159
160
  timestamp = commit.get("timestamp")
160
161
  logger.error(
161
- f" Sample commit {i}: timestamp={timestamp} (tzinfo: {getattr(timestamp, 'tzinfo', 'N/A')})"
162
+ f" Sample commit {i}: timestamp={timestamp} "
163
+ f"(tzinfo: {getattr(timestamp, 'tzinfo', 'N/A')})"
162
164
  )
163
165
 
164
166
  click.echo(f" ❌ Timezone comparison error in {report_name}")
@@ -300,16 +302,16 @@ def cli(ctx: click.Context) -> None:
300
302
 
301
303
  \b
302
304
  QUICK START:
303
- 1. Create a configuration file (see config-sample.yaml)
304
- 2. Run analysis: gitflow-analytics -c config.yaml --weeks 4
305
+ 1. Create a configuration file named config.yaml (see config-sample.yaml)
306
+ 2. Run analysis: gitflow-analytics --weeks 4
305
307
  3. View reports in the output directory
306
308
 
307
309
  \b
308
310
  COMMON WORKFLOWS:
309
- Analyze last 4 weeks: gitflow-analytics -c config.yaml --weeks 4
310
- Generate specific report: gitflow-analytics -c config.yaml --format csv
311
- Clear cache and analyze: gitflow-analytics -c config.yaml --clear-cache
312
- Validate configuration: gitflow-analytics -c config.yaml --validate-only
311
+ Analyze last 4 weeks: gitflow-analytics --weeks 4
312
+ Use custom config: gitflow-analytics -c myconfig.yaml --weeks 4
313
+ Clear cache and analyze: gitflow-analytics --clear-cache
314
+ Validate configuration: gitflow-analytics --validate-only
313
315
 
314
316
  \b
315
317
  COMMANDS:
@@ -328,21 +330,28 @@ def cli(ctx: click.Context) -> None:
328
330
  gitflow-analytics install
329
331
 
330
332
  # Interactive launcher
331
- gitflow-analytics run -c config.yaml
333
+ gitflow-analytics run
332
334
 
333
335
  # Generate developer aliases
334
- gitflow-analytics aliases -c config.yaml --apply
336
+ gitflow-analytics aliases --apply
335
337
 
336
- # Run analysis
337
- gitflow-analytics -c config.yaml --weeks 4
338
+ # Run analysis (uses config.yaml by default)
339
+ gitflow-analytics --weeks 4
338
340
 
339
341
  \b
340
342
  For detailed command help: gitflow-analytics COMMAND --help
341
343
  For documentation: https://github.com/yourusername/gitflow-analytics
342
344
  """
343
- # If no subcommand was invoked, show help
345
+ # If no subcommand was invoked, show interactive menu or help
344
346
  if ctx.invoked_subcommand is None:
345
- click.echo(ctx.get_help())
347
+ # Check if running in interactive terminal
348
+ if sys.stdin.isatty() and sys.stdout.isatty():
349
+ from gitflow_analytics.cli_wizards.menu import show_main_menu
350
+
351
+ show_main_menu()
352
+ else:
353
+ # Non-interactive terminal, show help
354
+ click.echo(ctx.get_help())
346
355
  ctx.exit(0)
347
356
 
348
357
 
@@ -350,9 +359,9 @@ def cli(ctx: click.Context) -> None:
350
359
  @click.option(
351
360
  "--config",
352
361
  "-c",
353
- type=click.Path(exists=True, path_type=Path),
354
- required=True,
355
- help="Path to YAML configuration file",
362
+ type=click.Path(path_type=Path),
363
+ default="config.yaml",
364
+ help="Path to YAML configuration file (default: config.yaml)",
356
365
  )
357
366
  @click.option(
358
367
  "--weeks", "-w", type=int, default=12, help="Number of weeks to analyze (default: 12)"
@@ -419,7 +428,10 @@ def cli(ctx: click.Context) -> None:
419
428
  @click.option(
420
429
  "--use-batch-classification/--use-legacy-classification",
421
430
  default=True,
422
- help="Use batch LLM classification on pre-fetched data (Step 2 of 2) - now the default behavior",
431
+ help=(
432
+ "Use batch LLM classification on pre-fetched data (Step 2 of 2) - "
433
+ "now the default behavior"
434
+ ),
423
435
  )
424
436
  @click.option(
425
437
  "--force-fetch", is_flag=True, help="Force fetch fresh data even if cached data exists"
@@ -472,20 +484,23 @@ def analyze_subcommand(
472
484
 
473
485
  \b
474
486
  EXAMPLES:
475
- # Basic analysis of last 4 weeks
476
- gitflow-analytics analyze -c config.yaml --weeks 4
487
+ # Basic analysis of last 4 weeks (uses config.yaml by default)
488
+ gitflow-analytics analyze --weeks 4
489
+
490
+ # Use a custom configuration file
491
+ gitflow-analytics analyze -c myconfig.yaml --weeks 4
477
492
 
478
493
  # Generate CSV reports with fresh data
479
- gitflow-analytics analyze -c config.yaml --generate-csv --clear-cache
494
+ gitflow-analytics analyze --generate-csv --clear-cache
480
495
 
481
496
  # Quick validation of configuration
482
- gitflow-analytics analyze -c config.yaml --validate-only
497
+ gitflow-analytics analyze --validate-only
483
498
 
484
499
  # Analyze with qualitative insights
485
- gitflow-analytics analyze -c config.yaml --enable-qualitative
500
+ gitflow-analytics analyze --enable-qualitative
486
501
 
487
502
  # Run only security analysis (requires security config)
488
- gitflow-analytics analyze -c config.yaml --security-only
503
+ gitflow-analytics analyze --security-only
489
504
 
490
505
  \b
491
506
  OUTPUT FILES:
@@ -626,7 +641,28 @@ def analyze(
626
641
  else:
627
642
  click.echo(f"📋 Loading configuration from {config}...")
628
643
 
629
- cfg = ConfigLoader.load(config)
644
+ try:
645
+ cfg = ConfigLoader.load(config)
646
+ except (FileNotFoundError, ConfigurationError) as e:
647
+ # Provide user-friendly guidance for missing config file
648
+ error_msg = str(e)
649
+ if "not found" in error_msg.lower() or isinstance(e, FileNotFoundError):
650
+ friendly_msg = (
651
+ f"❌ Configuration file not found: {config}\n\n"
652
+ "To get started:\n"
653
+ " 1. Copy the sample: cp examples/config/config-sample.yaml config.yaml\n"
654
+ " 2. Edit config.yaml with your repository settings\n"
655
+ " 3. Run: gitflow-analytics -w 4\n\n"
656
+ "Or use the interactive installer: gitflow-analytics install"
657
+ )
658
+ if display:
659
+ display.print_status(friendly_msg, "error")
660
+ else:
661
+ click.echo(friendly_msg, err=True)
662
+ sys.exit(1)
663
+ else:
664
+ # Re-raise other configuration errors (they already have good messages)
665
+ raise
630
666
 
631
667
  # Helper function to check if qualitative analysis is enabled
632
668
  # Supports both top-level cfg.qualitative and nested cfg.analysis.qualitative
@@ -797,7 +833,10 @@ def analyze(
797
833
  if display and display._live:
798
834
  display.update_progress_task(
799
835
  "main",
800
- description=f"Cache cleared: {cleared_counts['commits']} commits, {cleared_counts['total']} total",
836
+ description=(
837
+ f"Cache cleared: {cleared_counts['commits']} commits, "
838
+ f"{cleared_counts['total']} total"
839
+ ),
801
840
  completed=10,
802
841
  )
803
842
  elif display:
@@ -935,7 +974,6 @@ def analyze(
935
974
  from .security.reports import SecurityReportGenerator
936
975
 
937
976
  # GitAnalysisCache already imported at module level (line 24)
938
-
939
977
  # Load security configuration
940
978
  security_config = SecurityConfig.from_dict(
941
979
  cfg.analysis.security if hasattr(cfg.analysis, "security") else {}
@@ -1174,7 +1212,10 @@ def analyze(
1174
1212
  # We're in full-screen mode, update the task
1175
1213
  display.update_progress_task(
1176
1214
  "main",
1177
- description=f"🔍 Discovering repositories from organization: {cfg.github.organization}",
1215
+ description=(
1216
+ f"🔍 Discovering repositories from organization: "
1217
+ f"{cfg.github.organization}"
1218
+ ),
1178
1219
  completed=15,
1179
1220
  )
1180
1221
  else:
@@ -1211,7 +1252,10 @@ def analyze(
1211
1252
  # We're in full-screen mode, update progress and initialize repo list
1212
1253
  display.update_progress_task(
1213
1254
  "main",
1214
- description=f"✅ Found {len(discovered_repos)} repositories in {cfg.github.organization}",
1255
+ description=(
1256
+ f"✅ Found {len(discovered_repos)} repositories in "
1257
+ f"{cfg.github.organization}"
1258
+ ),
1215
1259
  completed=20,
1216
1260
  )
1217
1261
  # Initialize repository list for the full-screen display
@@ -1341,12 +1385,16 @@ def analyze(
1341
1385
  if display and hasattr(display, "_live") and display._live:
1342
1386
  display.update_progress_task(
1343
1387
  "repos",
1344
- description=f"Found {len(cached_repos)} repos with cached data ({total_cached_commits} commits)",
1388
+ description=(
1389
+ f"Found {len(cached_repos)} repos with cached data "
1390
+ f"({total_cached_commits} commits)"
1391
+ ),
1345
1392
  completed=10,
1346
1393
  )
1347
1394
  else:
1348
1395
  click.echo(
1349
- f"✅ Found {len(cached_repos)} repos with cached data ({total_cached_commits} commits)"
1396
+ f"✅ Found {len(cached_repos)} repos with cached data "
1397
+ f"({total_cached_commits} commits)"
1350
1398
  )
1351
1399
  else:
1352
1400
  # Force fetch: analyze all repositories
@@ -1370,12 +1418,16 @@ def analyze(
1370
1418
  if display and display._live:
1371
1419
  display.update_progress_task(
1372
1420
  "repos",
1373
- description=f"Step 1: Fetching data for {len(repos_needing_analysis)} repositories...",
1421
+ description=(
1422
+ f"Step 1: Fetching data for "
1423
+ f"{len(repos_needing_analysis)} repositories..."
1424
+ ),
1374
1425
  completed=15,
1375
1426
  )
1376
1427
  else:
1377
1428
  click.echo(
1378
- f"📥 Step 1: Fetching data for {len(repos_needing_analysis)} repositories..."
1429
+ f"📥 Step 1: Fetching data for "
1430
+ f"{len(repos_needing_analysis)} repositories..."
1379
1431
  )
1380
1432
 
1381
1433
  # Perform data fetch for repositories that need analysis
@@ -1404,7 +1456,10 @@ def analyze(
1404
1456
  # Update the existing task since display was already started
1405
1457
  display.update_progress_task(
1406
1458
  "repos",
1407
- description=f"Step 1: Fetching data for {len(repos_needing_analysis)} repositories",
1459
+ description=(
1460
+ f"Step 1: Fetching data for "
1461
+ f"{len(repos_needing_analysis)} repositories"
1462
+ ),
1408
1463
  completed=0,
1409
1464
  )
1410
1465
 
@@ -1450,7 +1505,8 @@ def analyze(
1450
1505
  description=f"Analyzing {len(repos_needing_analysis)} repositories",
1451
1506
  )
1452
1507
 
1453
- # Initialize ALL repositories (both cached and to-be-fetched) with their status
1508
+ # Initialize ALL repositories (both cached and to-be-fetched)
1509
+ # with their status
1454
1510
  all_repo_list = []
1455
1511
 
1456
1512
  # Add cached repos as COMPLETE
@@ -1486,14 +1542,18 @@ def analyze(
1486
1542
  repo_display_name = repo_config.name or project_key
1487
1543
  progress.set_description(
1488
1544
  repos_progress_ctx,
1489
- f"🔄 Analyzing repository: {repo_display_name} ({idx}/{len(repos_needing_analysis)})",
1545
+ f"🔄 Analyzing repository: {repo_display_name} "
1546
+ f"({idx}/{len(repos_needing_analysis)})",
1490
1547
  )
1491
1548
 
1492
1549
  # Also update the display if available
1493
1550
  if display:
1494
1551
  display.update_progress_task(
1495
1552
  "repos",
1496
- description=f"🔄 Processing: {repo_display_name} ({idx}/{len(repos_needing_analysis)})",
1553
+ description=(
1554
+ f"🔄 Processing: {repo_display_name} "
1555
+ f"({idx}/{len(repos_needing_analysis)})"
1556
+ ),
1497
1557
  completed=idx - 1,
1498
1558
  )
1499
1559
  # Update repository status to processing
@@ -1551,7 +1611,8 @@ def analyze(
1551
1611
 
1552
1612
  if display:
1553
1613
  display.print_status(
1554
- f" ✅ {project_key}: {result['stats']['total_commits']} commits, "
1614
+ f" ✅ {project_key}: "
1615
+ f"{result['stats']['total_commits']} commits, "
1555
1616
  f"{result['stats']['unique_tickets']} tickets",
1556
1617
  "success",
1557
1618
  )
@@ -1624,21 +1685,27 @@ def analyze(
1624
1685
  )
1625
1686
  if repo_status["failed_updates"] > 0:
1626
1687
  logger.warning(
1627
- " ⚠️ Some repositories failed to fetch updates. Analysis uses potentially stale data.\n"
1628
- " Check authentication, network connectivity, or try with --skip-remote-fetch."
1688
+ " ⚠️ Some repositories failed to fetch updates. "
1689
+ "Analysis uses potentially stale data.\n"
1690
+ " Check authentication, network connectivity, or try "
1691
+ "with --skip-remote-fetch."
1629
1692
  )
1630
1693
 
1631
1694
  if display and display._live:
1632
1695
  display.update_progress_task(
1633
1696
  "repos",
1634
- description=f"Step 1 complete: {total_commits} commits, {total_tickets} tickets fetched",
1697
+ description=(
1698
+ f"Step 1 complete: {total_commits} commits, "
1699
+ f"{total_tickets} tickets fetched"
1700
+ ),
1635
1701
  completed=100,
1636
1702
  )
1637
1703
  # Stop the live display after Step 1
1638
1704
  display.stop_live_display()
1639
1705
  else:
1640
1706
  click.echo(
1641
- f"📥 Step 1 complete: {total_commits} commits, {total_tickets} tickets fetched"
1707
+ f"📥 Step 1 complete: {total_commits} commits, "
1708
+ f"{total_tickets} tickets fetched"
1642
1709
  )
1643
1710
  else:
1644
1711
  if display and display._live:
@@ -1690,41 +1757,50 @@ def analyze(
1690
1757
  validation_passed = True
1691
1758
  if display:
1692
1759
  display.print_status(
1693
- f"✅ Data validation passed: {stored_commits} commits, {existing_batches} batches ready",
1760
+ f"✅ Data validation passed: {stored_commits} commits, "
1761
+ f"{existing_batches} batches ready",
1694
1762
  "success",
1695
1763
  )
1696
1764
  else:
1697
1765
  click.echo(
1698
- f"✅ Data validation passed: {stored_commits} commits, {existing_batches} batches ready"
1766
+ f"✅ Data validation passed: {stored_commits} commits, "
1767
+ f"{existing_batches} batches ready"
1699
1768
  )
1700
1769
 
1701
1770
  elif stored_commits > 0 and existing_batches == 0:
1702
1771
  # We have commits but no batches - this shouldn't happen but we can recover
1703
1772
  if display:
1704
1773
  display.print_status(
1705
- f"⚠️ Found {stored_commits} commits but no daily batches - data inconsistency detected",
1774
+ f"⚠️ Found {stored_commits} commits but no daily batches - "
1775
+ f"data inconsistency detected",
1706
1776
  "warning",
1707
1777
  )
1708
1778
  else:
1709
1779
  click.echo(
1710
- f"⚠️ Found {stored_commits} commits but no daily batches - data inconsistency detected"
1780
+ f"⚠️ Found {stored_commits} commits but no daily batches - "
1781
+ f"data inconsistency detected"
1711
1782
  )
1712
1783
 
1713
1784
  elif stored_commits == 0 and total_commits > 0:
1714
1785
  # Step 1 claimed success but no commits were stored - critical error
1715
- error_msg = f"❌ VALIDATION FAILED: Step 1 reported {total_commits} commits but database contains 0 commits for date range"
1786
+ error_msg = (
1787
+ f"❌ VALIDATION FAILED: Step 1 reported {total_commits} commits "
1788
+ f"but database contains 0 commits for date range"
1789
+ )
1716
1790
  if display:
1717
1791
  display.print_status(error_msg, "error")
1718
1792
  else:
1719
1793
  click.echo(error_msg)
1720
1794
  click.echo(
1721
- f" 📅 Date range: {start_date.strftime('%Y-%m-%d')} to {end_date.strftime('%Y-%m-%d')}"
1795
+ f" 📅 Date range: {start_date.strftime('%Y-%m-%d')} to "
1796
+ f"{end_date.strftime('%Y-%m-%d')}"
1722
1797
  )
1723
1798
  click.echo(
1724
1799
  f" 📊 Step 1 stats: {total_commits} commits, {total_tickets} tickets"
1725
1800
  )
1726
1801
  click.echo(
1727
- f" 🗃️ Database reality: {stored_commits} commits, {existing_batches} batches"
1802
+ f" 🗃️ Database reality: {stored_commits} commits, "
1803
+ f"{existing_batches} batches"
1728
1804
  )
1729
1805
  click.echo(
1730
1806
  " 💡 This suggests a timezone, date filtering, or database storage issue"
@@ -1737,12 +1813,14 @@ def analyze(
1737
1813
  # No data at all - need to fetch or explain why
1738
1814
  if display:
1739
1815
  display.print_status(
1740
- "📊 No commits or batches found for date range - proceeding with data fetch",
1816
+ "📊 No commits or batches found for date range - "
1817
+ "proceeding with data fetch",
1741
1818
  "warning",
1742
1819
  )
1743
1820
  else:
1744
1821
  click.echo(
1745
- "📊 No commits or batches found for date range - proceeding with data fetch"
1822
+ "📊 No commits or batches found for date range - "
1823
+ "proceeding with data fetch"
1746
1824
  )
1747
1825
 
1748
1826
  # PROCEED WITH INITIAL FETCH if validation didn't pass
@@ -1761,15 +1839,18 @@ def analyze(
1761
1839
  if repos_needing_analysis:
1762
1840
  if display:
1763
1841
  display.print_status(
1764
- f"Initial fetch: Fetching data for {len(repos_needing_analysis)} repositories...",
1842
+ f"Initial fetch: Fetching data for "
1843
+ f"{len(repos_needing_analysis)} repositories...",
1765
1844
  "info",
1766
1845
  )
1767
1846
  else:
1768
1847
  click.echo(
1769
- f"🚨 Initial fetch: Fetching data for {len(repos_needing_analysis)} repositories..."
1848
+ f"🚨 Initial fetch: Fetching data for "
1849
+ f"{len(repos_needing_analysis)} repositories..."
1770
1850
  )
1771
1851
  click.echo(
1772
- " 📋 Reason: Need to ensure commits and batches exist for classification"
1852
+ " 📋 Reason: Need to ensure commits and batches exist "
1853
+ "for classification"
1773
1854
  )
1774
1855
 
1775
1856
  # Perform data fetch for repositories that need analysis
@@ -1812,22 +1893,26 @@ def analyze(
1812
1893
  if retry_count > 0:
1813
1894
  if display:
1814
1895
  display.print_status(
1815
- f" 🔄 Retry {retry_count}/{max_retries}: {repo_config.github_repo}",
1896
+ f" 🔄 Retry {retry_count}/{max_retries}: "
1897
+ f"{repo_config.github_repo}",
1816
1898
  "warning",
1817
1899
  )
1818
1900
  else:
1819
1901
  click.echo(
1820
- f" 🔄 Retry {retry_count}/{max_retries}: {repo_config.github_repo}"
1902
+ f" 🔄 Retry {retry_count}/{max_retries}: "
1903
+ f"{repo_config.github_repo}"
1821
1904
  )
1822
1905
  else:
1823
1906
  if display:
1824
1907
  display.print_status(
1825
- f" 📥 Cloning {repo_config.github_repo} from GitHub...",
1908
+ f" 📥 Cloning {repo_config.github_repo} "
1909
+ f"from GitHub...",
1826
1910
  "info",
1827
1911
  )
1828
1912
  else:
1829
1913
  click.echo(
1830
- f" 📥 Cloning {repo_config.github_repo} from GitHub..."
1914
+ f" 📥 Cloning {repo_config.github_repo} "
1915
+ f"from GitHub..."
1831
1916
  )
1832
1917
 
1833
1918
  try:
@@ -1870,7 +1955,7 @@ def analyze(
1870
1955
  cmd,
1871
1956
  env=env,
1872
1957
  stdout=subprocess.PIPE,
1873
- stderr=None, # Let stderr (progress) flow to terminal
1958
+ stderr=None, # Let stderr flow to terminal
1874
1959
  text=True,
1875
1960
  timeout=timeout_seconds,
1876
1961
  )
@@ -1890,12 +1975,14 @@ def analyze(
1890
1975
  ):
1891
1976
  if display:
1892
1977
  display.print_status(
1893
- f" ❌ Authentication failed for {repo_config.github_repo}",
1978
+ f" ❌ Authentication failed for "
1979
+ f"{repo_config.github_repo}",
1894
1980
  "error",
1895
1981
  )
1896
1982
  else:
1897
1983
  click.echo(
1898
- f" ❌ Authentication failed for {repo_config.github_repo}"
1984
+ f" ❌ Authentication failed for "
1985
+ f"{repo_config.github_repo}"
1899
1986
  )
1900
1987
  break # Don't retry auth failures
1901
1988
  else:
@@ -1909,24 +1996,28 @@ def analyze(
1909
1996
  clone_success = True
1910
1997
  if display:
1911
1998
  display.print_status(
1912
- f" ✅ Cloned {repo_config.github_repo} ({elapsed:.1f}s)",
1999
+ f" ✅ Cloned {repo_config.github_repo} "
2000
+ f"({elapsed:.1f}s)",
1913
2001
  "success",
1914
2002
  )
1915
2003
  else:
1916
2004
  click.echo(
1917
- f" ✅ Cloned {repo_config.github_repo} ({elapsed:.1f}s)"
2005
+ f" ✅ Cloned {repo_config.github_repo} "
2006
+ f"({elapsed:.1f}s)"
1918
2007
  )
1919
2008
 
1920
2009
  except subprocess.TimeoutExpired:
1921
2010
  retry_count += 1
1922
2011
  if display:
1923
2012
  display.print_status(
1924
- f" ⏱️ Clone timeout after {timeout_seconds}s: {repo_config.github_repo}",
2013
+ f" ⏱️ Clone timeout ({timeout_seconds}s): "
2014
+ f"{repo_config.github_repo}",
1925
2015
  "error",
1926
2016
  )
1927
2017
  else:
1928
2018
  click.echo(
1929
- f" ⏱️ Clone timeout after {timeout_seconds}s: {repo_config.github_repo}"
2019
+ f" ⏱️ Clone timeout ({timeout_seconds}s): "
2020
+ f"{repo_config.github_repo}"
1930
2021
  )
1931
2022
  # Clean up partial clone
1932
2023
  if repo_path.exists():
@@ -1936,12 +2027,14 @@ def analyze(
1936
2027
  if retry_count > max_retries:
1937
2028
  if display:
1938
2029
  display.print_status(
1939
- f" ❌ Skipping {repo_config.github_repo} after {max_retries} timeouts",
2030
+ f" ❌ Skipping {repo_config.github_repo} "
2031
+ f"after {max_retries} timeouts",
1940
2032
  "error",
1941
2033
  )
1942
2034
  else:
1943
2035
  click.echo(
1944
- f" ❌ Skipping {repo_config.github_repo} after {max_retries} timeouts"
2036
+ f" ❌ Skipping {repo_config.github_repo} "
2037
+ f"after {max_retries} timeouts"
1945
2038
  )
1946
2039
  break
1947
2040
  continue # Try again
@@ -2016,7 +2109,8 @@ def analyze(
2016
2109
 
2017
2110
  if display:
2018
2111
  display.print_status(
2019
- f" ✅ {project_key}: {result['stats']['total_commits']} commits, "
2112
+ f" ✅ {project_key}: "
2113
+ f"{result['stats']['total_commits']} commits, "
2020
2114
  f"{result['stats']['unique_tickets']} tickets",
2021
2115
  "success",
2022
2116
  )
@@ -2037,7 +2131,8 @@ def analyze(
2037
2131
  except Exception as e:
2038
2132
  if display:
2039
2133
  display.print_status(
2040
- f" ❌ Error fetching {project_key}: {e}", "error"
2134
+ f" ❌ Error fetching {project_key}: {e}",
2135
+ "error",
2041
2136
  )
2042
2137
  else:
2043
2138
  click.echo(f" ❌ Error fetching {project_key}: {e}")
@@ -2045,12 +2140,14 @@ def analyze(
2045
2140
 
2046
2141
  if display:
2047
2142
  display.print_status(
2048
- f"Initial fetch complete: {total_commits} commits, {total_tickets} tickets",
2143
+ f"Initial fetch complete: {total_commits} commits, "
2144
+ f"{total_tickets} tickets",
2049
2145
  "success",
2050
2146
  )
2051
2147
  else:
2052
2148
  click.echo(
2053
- f"🚨 Initial fetch complete: {total_commits} commits, {total_tickets} tickets"
2149
+ f"🚨 Initial fetch complete: {total_commits} commits, "
2150
+ f"{total_tickets} tickets"
2054
2151
  )
2055
2152
 
2056
2153
  # RE-VALIDATE after initial fetch
@@ -2078,7 +2175,10 @@ def analyze(
2078
2175
  )
2079
2176
 
2080
2177
  if final_commits == 0:
2081
- error_msg = "❌ CRITICAL: Initial fetch completed but still 0 commits stored in database"
2178
+ error_msg = (
2179
+ "❌ CRITICAL: Initial fetch completed but still 0 commits "
2180
+ "stored in database"
2181
+ )
2082
2182
  if display:
2083
2183
  display.print_status(error_msg, "error")
2084
2184
  else:
@@ -2090,11 +2190,13 @@ def analyze(
2090
2190
  f" 📊 Initial fetch stats: {total_commits} commits reported"
2091
2191
  )
2092
2192
  click.echo(
2093
- f" 🗃️ Database result: {final_commits} commits, {final_batches} batches"
2193
+ f" 🗃️ Database result: {final_commits} commits, "
2194
+ f"{final_batches} batches"
2094
2195
  )
2095
2196
  click.echo(" 🔍 Possible causes:")
2096
2197
  click.echo(
2097
- " - Timezone mismatch between commit timestamps and analysis range"
2198
+ " - Timezone mismatch between commit timestamps "
2199
+ "and analysis range"
2098
2200
  )
2099
2201
  click.echo(" - Date filtering excluding all commits")
2100
2202
  click.echo(" - Database transaction not committed")
@@ -2102,17 +2204,20 @@ def analyze(
2102
2204
  " - Repository has no commits in the specified time range"
2103
2205
  )
2104
2206
  raise click.ClickException(
2105
- "Initial fetch failed validation - no data available for classification"
2207
+ "Initial fetch failed validation - "
2208
+ "no data available for classification"
2106
2209
  )
2107
2210
 
2108
2211
  if display:
2109
2212
  display.print_status(
2110
- f"✅ Post-fetch validation: {final_commits} commits, {final_batches} batches confirmed",
2213
+ f"✅ Post-fetch validation: {final_commits} commits, "
2214
+ f"{final_batches} batches confirmed",
2111
2215
  "success",
2112
2216
  )
2113
2217
  else:
2114
2218
  click.echo(
2115
- f"✅ Post-fetch validation: {final_commits} commits, {final_batches} batches confirmed"
2219
+ f"✅ Post-fetch validation: {final_commits} commits, "
2220
+ f"{final_batches} batches confirmed"
2116
2221
  )
2117
2222
 
2118
2223
  # FINAL PRE-CLASSIFICATION CHECK: Ensure we have data before starting batch classifier
@@ -2139,39 +2244,52 @@ def analyze(
2139
2244
  )
2140
2245
 
2141
2246
  if pre_classification_commits == 0:
2142
- error_msg = "❌ PRE-CLASSIFICATION CHECK FAILED: No commits available for batch classification"
2247
+ error_msg = (
2248
+ "❌ PRE-CLASSIFICATION CHECK FAILED: "
2249
+ "No commits available for batch classification"
2250
+ )
2143
2251
  if display:
2144
2252
  display.print_status(error_msg, "error")
2145
2253
  else:
2146
2254
  click.echo(error_msg)
2147
2255
  click.echo(
2148
- f" 📅 Date range: {start_date.strftime('%Y-%m-%d')} to {end_date.strftime('%Y-%m-%d')}"
2256
+ f" 📅 Date range: {start_date.strftime('%Y-%m-%d')} to "
2257
+ f"{end_date.strftime('%Y-%m-%d')}"
2149
2258
  )
2150
2259
  click.echo(
2151
- f" 🗃️ Database state: {pre_classification_commits} commits, {pre_classification_batches} batches"
2260
+ f" 🗃️ Database state: {pre_classification_commits} commits, "
2261
+ f"{pre_classification_batches} batches"
2152
2262
  )
2153
2263
  click.echo(
2154
- " 💡 This indicates all previous validation and fetch steps failed to store any data"
2264
+ " 💡 This indicates all previous validation and fetch steps "
2265
+ "failed to store any data"
2155
2266
  )
2156
2267
  raise click.ClickException(
2157
2268
  "No data available for batch classification - cannot proceed"
2158
2269
  )
2159
2270
 
2160
2271
  if pre_classification_batches == 0:
2161
- error_msg = "❌ PRE-CLASSIFICATION CHECK FAILED: No daily batches available for classification"
2272
+ error_msg = (
2273
+ "❌ PRE-CLASSIFICATION CHECK FAILED: "
2274
+ "No daily batches available for classification"
2275
+ )
2162
2276
  if display:
2163
2277
  display.print_status(error_msg, "error")
2164
2278
  else:
2165
2279
  click.echo(error_msg)
2166
2280
  click.echo(
2167
- f" 📅 Date range: {start_date.strftime('%Y-%m-%d')} to {end_date.strftime('%Y-%m-%d')}"
2281
+ f" 📅 Date range: {start_date.strftime('%Y-%m-%d')} to "
2282
+ f"{end_date.strftime('%Y-%m-%d')}"
2283
+ )
2284
+ click.echo(
2285
+ f" 🗃️ Database state: {pre_classification_commits} commits, "
2286
+ f"{pre_classification_batches} batches"
2168
2287
  )
2169
2288
  click.echo(
2170
- f" 🗃️ Database state: {pre_classification_commits} commits, {pre_classification_batches} batches"
2289
+ " 💡 Commits exist but no daily batches - " "batch creation failed"
2171
2290
  )
2172
- click.echo(" 💡 Commits exist but no daily batches - batch creation failed")
2173
2291
  raise click.ClickException(
2174
- "No batches available for classification - batch creation process failed"
2292
+ "No batches available for classification - " "batch creation process failed"
2175
2293
  )
2176
2294
 
2177
2295
  if display:
@@ -2227,7 +2345,7 @@ def analyze(
2227
2345
  cache_dir=cfg.cache.directory,
2228
2346
  llm_config=llm_config,
2229
2347
  batch_size=50,
2230
- confidence_threshold=cfg.analysis.llm_classification.confidence_threshold,
2348
+ confidence_threshold=(cfg.analysis.llm_classification.confidence_threshold),
2231
2349
  fallback_enabled=True,
2232
2350
  )
2233
2351
 
@@ -2238,7 +2356,8 @@ def analyze(
2238
2356
  project_keys.append(project_key)
2239
2357
 
2240
2358
  # Run batch classification
2241
- # Note: The batch classifier will create its own progress bars, but our display should remain active
2359
+ # Note: The batch classifier will create its own progress bars,
2360
+ # but our display should remain active
2242
2361
  classification_result = batch_classifier.classify_date_range(
2243
2362
  start_date=start_date,
2244
2363
  end_date=end_date,
@@ -2249,7 +2368,8 @@ def analyze(
2249
2368
  # Update display progress after classification
2250
2369
  if display and hasattr(display, "update_progress_task"):
2251
2370
  display.update_progress_task(
2252
- "repos", completed=total_batches if "total_batches" in locals() else 0
2371
+ "repos",
2372
+ completed=total_batches if "total_batches" in locals() else 0,
2253
2373
  )
2254
2374
 
2255
2375
  if display:
@@ -2257,7 +2377,8 @@ def analyze(
2257
2377
  display.complete_progress_task("repos", "Batch classification complete")
2258
2378
  display.stop_live_display()
2259
2379
  display.print_status(
2260
- f"✅ Batch classification completed: {classification_result['processed_batches']} batches, "
2380
+ f"✅ Batch classification completed: "
2381
+ f"{classification_result['processed_batches']} batches, "
2261
2382
  f"{classification_result['total_commits']} commits",
2262
2383
  "success",
2263
2384
  )
@@ -2591,9 +2712,12 @@ def analyze(
2591
2712
  else:
2592
2713
  commit["project_key"] = commit.get("inferred_project", "UNKNOWN")
2593
2714
 
2594
- commit["canonical_id"] = identity_resolver.resolve_developer(
2715
+ canonical_id = identity_resolver.resolve_developer(
2595
2716
  commit["author_name"], commit["author_email"]
2596
2717
  )
2718
+ commit["canonical_id"] = canonical_id
2719
+ # Also add canonical display name for reports
2720
+ commit["canonical_name"] = identity_resolver.get_canonical_name(canonical_id)
2597
2721
 
2598
2722
  all_commits.extend(commits)
2599
2723
  if display:
@@ -3128,7 +3252,10 @@ def analyze(
3128
3252
  click.echo(f" 💰 LLM Cost: ${llm_stats['total_cost']:.4f}")
3129
3253
 
3130
3254
  except ImportError as e:
3131
- error_msg = f"Qualitative analysis dependencies missing: {e}\n\n💡 Install with: pip install spacy scikit-learn openai tiktoken"
3255
+ error_msg = (
3256
+ f"Qualitative analysis dependencies missing: {e}\n\n"
3257
+ "💡 Install with: pip install spacy scikit-learn openai tiktoken"
3258
+ )
3132
3259
  if display:
3133
3260
  display.show_error(error_msg)
3134
3261
  else:
@@ -3169,7 +3296,11 @@ def analyze(
3169
3296
  else:
3170
3297
  click.echo(" ⏭️ Continuing with standard analysis...")
3171
3298
  elif enable_qualitative and not get_qualitative_config():
3172
- warning_msg = "Qualitative analysis requested but not configured in config file\n\nAdd a 'qualitative:' section (top-level or under 'analysis:') to your configuration"
3299
+ warning_msg = (
3300
+ "Qualitative analysis requested but not configured in config file\n\n"
3301
+ "Add a 'qualitative:' section (top-level or under 'analysis:') "
3302
+ "to your configuration"
3303
+ )
3173
3304
  if display:
3174
3305
  display.show_warning(warning_msg)
3175
3306
  else:
@@ -4115,7 +4246,8 @@ def analyze(
4115
4246
  "\n💡 Note: Token/cost tracking is only available with qualitative analysis enabled."
4116
4247
  )
4117
4248
  click.echo(
4118
- " Add 'qualitative:' section (top-level or under 'analysis:') to your config to enable detailed LLM cost tracking."
4249
+ " Add 'qualitative:' section (top-level or under 'analysis:') "
4250
+ "to your config to enable detailed LLM cost tracking."
4119
4251
  )
4120
4252
 
4121
4253
  # Display cache statistics in simple format
@@ -5521,6 +5653,532 @@ def list_developers(config: Path) -> None:
5521
5653
  sys.exit(1)
5522
5654
 
5523
5655
 
5656
+ @cli.command(name="create-alias-interactive")
5657
+ @click.option(
5658
+ "--config",
5659
+ "-c",
5660
+ type=click.Path(exists=True, path_type=Path),
5661
+ required=True,
5662
+ help="Path to YAML configuration file",
5663
+ )
5664
+ @click.option(
5665
+ "--output",
5666
+ "-o",
5667
+ type=click.Path(path_type=Path),
5668
+ help="Output path for aliases.yaml (default: same dir as config)",
5669
+ )
5670
+ def create_alias_interactive(config: Path, output: Optional[Path]) -> None:
5671
+ """Create developer aliases interactively with numbered selection.
5672
+
5673
+ \b
5674
+ This command provides an interactive interface to create developer
5675
+ aliases by selecting from a numbered list of developers in the database.
5676
+ You can merge multiple developer identities and save them to aliases.yaml.
5677
+
5678
+ \b
5679
+ EXAMPLES:
5680
+ # Start interactive alias creation
5681
+ gitflow-analytics create-alias-interactive -c config.yaml
5682
+
5683
+ # Save to specific location
5684
+ gitflow-analytics create-alias-interactive -c config.yaml -o ~/shared/aliases.yaml
5685
+
5686
+ \b
5687
+ WORKFLOW:
5688
+ 1. Displays numbered list of all developers from database
5689
+ 2. Select multiple developer numbers to merge (space-separated)
5690
+ 3. Choose which one should be the primary identity
5691
+ 4. Create alias mapping
5692
+ 5. Option to save to aliases.yaml
5693
+ 6. Option to continue creating more aliases
5694
+
5695
+ \b
5696
+ Useful for:
5697
+ - Consolidating developer identities across email addresses
5698
+ - Cleaning up duplicate developer entries
5699
+ - Maintaining consistent identity resolution
5700
+ """
5701
+ from .config.aliases import AliasesManager, DeveloperAlias
5702
+ from .core.identity import DeveloperIdentityResolver
5703
+
5704
+ try:
5705
+ # Load configuration
5706
+ cfg = ConfigLoader.load(config)
5707
+
5708
+ # Determine output path for aliases file
5709
+ if not output:
5710
+ output = config.parent / "aliases.yaml"
5711
+
5712
+ # Initialize identity resolver
5713
+ identity_resolver = DeveloperIdentityResolver(cfg.cache.directory / "identities.db")
5714
+
5715
+ # Initialize aliases manager
5716
+ aliases_manager = AliasesManager(output if output.exists() else None)
5717
+
5718
+ click.echo("\n" + "=" * 80)
5719
+ click.echo(click.style("🔧 Interactive Alias Creator", fg="cyan", bold=True))
5720
+ click.echo("=" * 80 + "\n")
5721
+
5722
+ # Main loop for creating multiple aliases
5723
+ continue_creating = True
5724
+
5725
+ while continue_creating:
5726
+ # Get all developers from database
5727
+ developers = identity_resolver.get_developer_stats()
5728
+
5729
+ if not developers:
5730
+ click.echo("❌ No developers found. Run analysis first.")
5731
+ sys.exit(1)
5732
+
5733
+ # Display numbered list of developers
5734
+ click.echo(
5735
+ click.style(f"\n📋 Found {len(developers)} developers:\n", fg="green", bold=True)
5736
+ )
5737
+ click.echo(f"{'#':<6} {'Name':<30} {'Email':<40} {'Commits':<10}")
5738
+ click.echo("-" * 86)
5739
+
5740
+ for idx, dev in enumerate(developers, start=1):
5741
+ click.echo(
5742
+ f"{idx:<6} "
5743
+ f"{dev['primary_name']:<30} "
5744
+ f"{dev['primary_email']:<40} "
5745
+ f"{dev['total_commits']:<10}"
5746
+ )
5747
+
5748
+ click.echo()
5749
+
5750
+ # Get user selection
5751
+ while True:
5752
+ try:
5753
+ selection_input = click.prompt(
5754
+ click.style(
5755
+ "Select developers to merge (enter numbers separated by spaces, or 'q' to quit)",
5756
+ fg="yellow",
5757
+ ),
5758
+ type=str,
5759
+ ).strip()
5760
+
5761
+ # Handle quit
5762
+ if selection_input.lower() in ["q", "quit", "exit"]:
5763
+ click.echo("\n👋 Exiting alias creation.")
5764
+ sys.exit(0)
5765
+
5766
+ # Parse selection
5767
+ selected_indices = []
5768
+ for num_str in selection_input.split():
5769
+ try:
5770
+ num = int(num_str)
5771
+ if 1 <= num <= len(developers):
5772
+ selected_indices.append(num)
5773
+ else:
5774
+ click.echo(
5775
+ click.style(
5776
+ f"⚠️ Number {num} is out of range (1-{len(developers)})",
5777
+ fg="red",
5778
+ )
5779
+ )
5780
+ raise ValueError("Invalid range")
5781
+ except ValueError:
5782
+ click.echo(
5783
+ click.style(
5784
+ f"⚠️ Invalid input: '{num_str}' is not a valid number", fg="red"
5785
+ )
5786
+ )
5787
+ raise
5788
+
5789
+ # Check minimum selection
5790
+ if len(selected_indices) < 2:
5791
+ click.echo(
5792
+ click.style(
5793
+ "⚠️ You must select at least 2 developers to merge", fg="red"
5794
+ )
5795
+ )
5796
+ continue
5797
+
5798
+ # Remove duplicates and sort
5799
+ selected_indices = sorted(set(selected_indices))
5800
+ break
5801
+
5802
+ except ValueError:
5803
+ continue
5804
+ except click.exceptions.Abort:
5805
+ click.echo("\n\n👋 Exiting alias creation.")
5806
+ sys.exit(0)
5807
+
5808
+ # Display selected developers
5809
+ selected_devs = [developers[idx - 1] for idx in selected_indices]
5810
+
5811
+ click.echo(click.style("\n✅ Selected developers:", fg="green", bold=True))
5812
+ for idx, dev in zip(selected_indices, selected_devs):
5813
+ click.echo(
5814
+ f" [{idx}] {dev['primary_name']} <{dev['primary_email']}> "
5815
+ f"({dev['total_commits']} commits)"
5816
+ )
5817
+
5818
+ # Ask which one should be primary
5819
+ click.echo()
5820
+ while True:
5821
+ try:
5822
+ primary_input = click.prompt(
5823
+ click.style(
5824
+ f"Which developer should be the primary identity? "
5825
+ f"Enter number ({', '.join(map(str, selected_indices))})",
5826
+ fg="yellow",
5827
+ ),
5828
+ type=int,
5829
+ )
5830
+
5831
+ if primary_input in selected_indices:
5832
+ primary_idx = primary_input
5833
+ break
5834
+ else:
5835
+ click.echo(
5836
+ click.style(
5837
+ f"⚠️ Please select one of: {', '.join(map(str, selected_indices))}",
5838
+ fg="red",
5839
+ )
5840
+ )
5841
+ except ValueError:
5842
+ click.echo(click.style("⚠️ Please enter a valid number", fg="red"))
5843
+ except click.exceptions.Abort:
5844
+ click.echo("\n\n👋 Exiting alias creation.")
5845
+ sys.exit(0)
5846
+
5847
+ # Build alias configuration
5848
+ primary_dev = developers[primary_idx - 1]
5849
+ alias_emails = [
5850
+ dev["primary_email"]
5851
+ for idx, dev in zip(selected_indices, selected_devs)
5852
+ if idx != primary_idx
5853
+ ]
5854
+
5855
+ # Create the alias
5856
+ new_alias = DeveloperAlias(
5857
+ primary_email=primary_dev["primary_email"],
5858
+ aliases=alias_emails,
5859
+ name=primary_dev["primary_name"],
5860
+ confidence=1.0, # Manual aliases have full confidence
5861
+ reasoning="Manually created via interactive CLI",
5862
+ )
5863
+
5864
+ # Display the alias configuration
5865
+ click.echo(click.style("\n📝 Alias Configuration:", fg="cyan", bold=True))
5866
+ click.echo(f" Primary: {new_alias.name} <{new_alias.primary_email}>")
5867
+ click.echo(" Aliases:")
5868
+ for alias_email in new_alias.aliases:
5869
+ click.echo(f" - {alias_email}")
5870
+
5871
+ # Add to aliases manager
5872
+ aliases_manager.add_alias(new_alias)
5873
+
5874
+ # Ask if user wants to save
5875
+ click.echo()
5876
+ if click.confirm(click.style(f"💾 Save alias to {output}?", fg="green"), default=True):
5877
+ try:
5878
+ aliases_manager.save()
5879
+ click.echo(click.style(f"✅ Alias saved to {output}", fg="green"))
5880
+
5881
+ # Also update the database directly by merging identities
5882
+ # For each alias email, find its canonical_id and merge with primary
5883
+ for alias_email in alias_emails:
5884
+ # Find the developer entry for this alias email
5885
+ alias_dev = next(
5886
+ (dev for dev in developers if dev["primary_email"] == alias_email), None
5887
+ )
5888
+
5889
+ if alias_dev:
5890
+ # Merge using canonical IDs
5891
+ identity_resolver.merge_identities(
5892
+ primary_dev["canonical_id"], # Primary's canonical_id
5893
+ alias_dev["canonical_id"], # Alias's canonical_id
5894
+ )
5895
+ else:
5896
+ # Edge case: alias email doesn't match any developer
5897
+ # This shouldn't happen, but log a warning
5898
+ click.echo(
5899
+ click.style(
5900
+ f"⚠️ Warning: Could not find developer entry for {alias_email}",
5901
+ fg="yellow",
5902
+ )
5903
+ )
5904
+
5905
+ click.echo(
5906
+ click.style("✅ Database updated with merged identities", fg="green")
5907
+ )
5908
+
5909
+ except Exception as e:
5910
+ click.echo(click.style(f"❌ Error saving alias: {e}", fg="red"), err=True)
5911
+ else:
5912
+ click.echo(click.style("⏭️ Alias not saved", fg="yellow"))
5913
+
5914
+ # Ask if user wants to create more aliases
5915
+ click.echo()
5916
+ if not click.confirm(click.style("🔄 Create another alias?", fg="cyan"), default=True):
5917
+ continue_creating = False
5918
+
5919
+ click.echo(click.style("\n✅ Alias creation completed!", fg="green", bold=True))
5920
+ click.echo(f"📄 Aliases file: {output}")
5921
+ click.echo("\n💡 To use these aliases, ensure your config references: {output}\n")
5922
+
5923
+ except KeyboardInterrupt:
5924
+ click.echo("\n\n👋 Interrupted by user. Exiting.")
5925
+ sys.exit(0)
5926
+ except Exception as e:
5927
+ click.echo(click.style(f"\n❌ Error: {e}", fg="red"), err=True)
5928
+ import traceback
5929
+
5930
+ traceback.print_exc()
5931
+ sys.exit(1)
5932
+
5933
+
5934
+ @cli.command(name="alias-rename")
5935
+ @click.option(
5936
+ "--config",
5937
+ "-c",
5938
+ type=click.Path(exists=True, path_type=Path),
5939
+ required=True,
5940
+ help="Path to YAML configuration file",
5941
+ )
5942
+ @click.option(
5943
+ "--old-name",
5944
+ required=True,
5945
+ help="Current canonical name to rename (must match a name in manual_mappings)",
5946
+ )
5947
+ @click.option(
5948
+ "--new-name",
5949
+ required=True,
5950
+ help="New canonical display name to use in reports",
5951
+ )
5952
+ @click.option(
5953
+ "--update-cache",
5954
+ is_flag=True,
5955
+ help="Update cached database records with the new name",
5956
+ )
5957
+ @click.option(
5958
+ "--dry-run",
5959
+ is_flag=True,
5960
+ help="Show what would be changed without applying changes",
5961
+ )
5962
+ def alias_rename(
5963
+ config: Path,
5964
+ old_name: str,
5965
+ new_name: str,
5966
+ update_cache: bool,
5967
+ dry_run: bool,
5968
+ ) -> None:
5969
+ """Rename a developer's canonical display name.
5970
+
5971
+ \b
5972
+ Updates the developer's name in:
5973
+ - Configuration file (analysis.identity.manual_mappings)
5974
+ - Database cache (if --update-cache is specified)
5975
+
5976
+ \b
5977
+ EXAMPLES:
5978
+ # Rename with dry-run to see changes
5979
+ gitflow-analytics alias-rename -c config.yaml \\
5980
+ --old-name "bianco-zaelot" \\
5981
+ --new-name "Emiliozzo Bianco" \\
5982
+ --dry-run
5983
+
5984
+ # Apply rename to config only
5985
+ gitflow-analytics alias-rename -c config.yaml \\
5986
+ --old-name "bianco-zaelot" \\
5987
+ --new-name "Emiliozzo Bianco"
5988
+
5989
+ # Apply rename to config and update cache
5990
+ gitflow-analytics alias-rename -c config.yaml \\
5991
+ --old-name "bianco-zaelot" \\
5992
+ --new-name "Emiliozzo Bianco" \\
5993
+ --update-cache
5994
+
5995
+ \b
5996
+ NOTE:
5997
+ This command searches through analysis.identity.manual_mappings
5998
+ in your config file and updates the 'name' field for the matching
5999
+ entry. It preserves all other fields (primary_email, aliases).
6000
+ """
6001
+ try:
6002
+ from .core.identity import DeveloperIdentityResolver
6003
+
6004
+ # Validate inputs
6005
+ if not old_name.strip():
6006
+ click.echo("❌ Error: --old-name cannot be empty", err=True)
6007
+ sys.exit(1)
6008
+
6009
+ if not new_name.strip():
6010
+ click.echo("❌ Error: --new-name cannot be empty", err=True)
6011
+ sys.exit(1)
6012
+
6013
+ old_name = old_name.strip()
6014
+ new_name = new_name.strip()
6015
+
6016
+ if old_name == new_name:
6017
+ click.echo("❌ Error: old-name and new-name are identical", err=True)
6018
+ sys.exit(1)
6019
+
6020
+ # Load the YAML config file
6021
+ click.echo(f"\n📋 Loading configuration from {config}...")
6022
+
6023
+ try:
6024
+ with open(config, "r", encoding="utf-8") as f:
6025
+ config_data = yaml.safe_load(f)
6026
+ except Exception as e:
6027
+ click.echo(f"❌ Error loading config file: {e}", err=True)
6028
+ sys.exit(1)
6029
+
6030
+ # Navigate to analysis.identity.manual_mappings
6031
+ if "analysis" not in config_data:
6032
+ click.echo("❌ Error: 'analysis' section not found in config", err=True)
6033
+ sys.exit(1)
6034
+
6035
+ if "identity" not in config_data["analysis"]:
6036
+ click.echo("❌ Error: 'analysis.identity' section not found in config", err=True)
6037
+ sys.exit(1)
6038
+
6039
+ if "manual_mappings" not in config_data["analysis"]["identity"]:
6040
+ click.echo("❌ Error: 'analysis.identity.manual_mappings' not found in config", err=True)
6041
+ sys.exit(1)
6042
+
6043
+ manual_mappings = config_data["analysis"]["identity"]["manual_mappings"]
6044
+
6045
+ if not manual_mappings:
6046
+ click.echo("❌ Error: manual_mappings is empty", err=True)
6047
+ sys.exit(1)
6048
+
6049
+ # Find the matching entry
6050
+ matching_entry = None
6051
+ matching_index = None
6052
+
6053
+ for idx, mapping in enumerate(manual_mappings):
6054
+ if mapping.get("name") == old_name:
6055
+ matching_entry = mapping
6056
+ matching_index = idx
6057
+ break
6058
+
6059
+ if not matching_entry:
6060
+ click.echo(f"❌ Error: No manual mapping found with name '{old_name}'", err=True)
6061
+ click.echo("\nAvailable names in manual_mappings:")
6062
+ for mapping in manual_mappings:
6063
+ if "name" in mapping:
6064
+ click.echo(f" - {mapping['name']}")
6065
+ sys.exit(1)
6066
+
6067
+ # Display what will be changed
6068
+ click.echo(f"\n🔍 Found matching entry:")
6069
+ click.echo(f" Current name: {old_name}")
6070
+ click.echo(f" New name: {new_name}")
6071
+ click.echo(f" Email: {matching_entry.get('primary_email', 'N/A')}")
6072
+ click.echo(f" Aliases: {len(matching_entry.get('aliases', []))} email(s)")
6073
+
6074
+ if dry_run:
6075
+ click.echo(f"\n🔎 DRY RUN - No changes will be made")
6076
+
6077
+ # Update the config file
6078
+ if not dry_run:
6079
+ click.echo(f"\n📝 Updating configuration file...")
6080
+ manual_mappings[matching_index]["name"] = new_name
6081
+
6082
+ try:
6083
+ with open(config, "w", encoding="utf-8") as f:
6084
+ yaml.dump(config_data, f, default_flow_style=False, allow_unicode=True, sort_keys=False)
6085
+ click.echo(f"✅ Configuration file updated")
6086
+ except Exception as e:
6087
+ click.echo(f"❌ Error writing config file: {e}", err=True)
6088
+ sys.exit(1)
6089
+ else:
6090
+ click.echo(f" [Would update config: {config}]")
6091
+
6092
+ # Update database cache if requested
6093
+ if update_cache:
6094
+ click.echo(f"\n💾 Checking database cache...")
6095
+
6096
+ # Load config to get cache directory
6097
+ cfg = ConfigLoader.load(config)
6098
+ identity_db_path = cfg.cache.directory / "identities.db"
6099
+
6100
+ if not identity_db_path.exists():
6101
+ click.echo(f"⚠️ Warning: Identity database not found at {identity_db_path}")
6102
+ click.echo(f" Skipping cache update")
6103
+ else:
6104
+ # Initialize identity resolver to access database
6105
+ identity_resolver = DeveloperIdentityResolver(
6106
+ str(identity_db_path),
6107
+ manual_mappings=None, # Don't apply mappings during rename
6108
+ )
6109
+
6110
+ # Count affected records
6111
+ from sqlalchemy import text
6112
+
6113
+ with identity_resolver.get_session() as session:
6114
+ # Count developer_identities records
6115
+ result = session.execute(
6116
+ text("SELECT COUNT(*) FROM developer_identities WHERE primary_name = :old_name"),
6117
+ {"old_name": old_name}
6118
+ )
6119
+ identity_count = result.scalar()
6120
+
6121
+ # Count developer_aliases records
6122
+ result = session.execute(
6123
+ text("SELECT COUNT(*) FROM developer_aliases WHERE name = :old_name"),
6124
+ {"old_name": old_name}
6125
+ )
6126
+ alias_count = result.scalar()
6127
+
6128
+ click.echo(f" Found {identity_count} identity record(s)")
6129
+ click.echo(f" Found {alias_count} alias record(s)")
6130
+
6131
+ if identity_count == 0 and alias_count == 0:
6132
+ click.echo(f" ℹ️ No database records to update")
6133
+ elif not dry_run:
6134
+ click.echo(f" Updating database records...")
6135
+
6136
+ with identity_resolver.get_session() as session:
6137
+ # Update developer_identities
6138
+ if identity_count > 0:
6139
+ session.execute(
6140
+ text("UPDATE developer_identities SET primary_name = :new_name WHERE primary_name = :old_name"),
6141
+ {"new_name": new_name, "old_name": old_name}
6142
+ )
6143
+
6144
+ # Update developer_aliases
6145
+ if alias_count > 0:
6146
+ session.execute(
6147
+ text("UPDATE developer_aliases SET name = :new_name WHERE name = :old_name"),
6148
+ {"new_name": new_name, "old_name": old_name}
6149
+ )
6150
+
6151
+ click.echo(f" ✅ Database updated")
6152
+ else:
6153
+ click.echo(f" [Would update {identity_count + alias_count} database record(s)]")
6154
+
6155
+ # Summary
6156
+ click.echo(f"\n{'🔎 DRY RUN SUMMARY' if dry_run else '✅ RENAME COMPLETE'}")
6157
+ click.echo(f" Old name: {old_name}")
6158
+ click.echo(f" New name: {new_name}")
6159
+ click.echo(f" Config: {'Would update' if dry_run else 'Updated'}")
6160
+ if update_cache:
6161
+ click.echo(f" Cache: {'Would update' if dry_run else 'Updated'}")
6162
+ else:
6163
+ click.echo(f" Cache: Skipped (use --update-cache to update)")
6164
+
6165
+ if dry_run:
6166
+ click.echo(f"\n💡 Run without --dry-run to apply changes")
6167
+ else:
6168
+ click.echo(f"\n💡 Next steps:")
6169
+ click.echo(f" - Review the updated config file: {config}")
6170
+ click.echo(f" - Re-run analysis to see updated reports with new name")
6171
+
6172
+ except KeyboardInterrupt:
6173
+ click.echo("\n\n👋 Interrupted by user. Exiting.")
6174
+ sys.exit(0)
6175
+ except Exception as e:
6176
+ click.echo(f"❌ Unexpected error: {e}", err=True)
6177
+ import traceback
6178
+ traceback.print_exc()
6179
+ sys.exit(1)
6180
+
6181
+
5524
6182
  @cli.command()
5525
6183
  @click.option(
5526
6184
  "--config",
@@ -5943,31 +6601,31 @@ def show_help() -> None:
5943
6601
  ────────────────────
5944
6602
  1. Create a configuration file:
5945
6603
  cp config-sample.yaml myconfig.yaml
5946
-
6604
+
5947
6605
  2. Edit configuration with your repositories:
5948
6606
  repositories:
5949
6607
  - path: /path/to/repo
5950
6608
  branch: main
5951
-
6609
+
5952
6610
  3. Run your first analysis:
5953
6611
  gitflow-analytics -c myconfig.yaml --weeks 4
5954
-
6612
+
5955
6613
  4. View reports in the output directory
5956
6614
 
5957
6615
  🔧 COMMON WORKFLOWS
5958
6616
  ──────────────────
5959
6617
  Weekly team report:
5960
6618
  gitflow-analytics -c config.yaml --weeks 1
5961
-
6619
+
5962
6620
  Monthly metrics with all formats:
5963
6621
  gitflow-analytics -c config.yaml --weeks 4 --generate-csv
5964
-
6622
+
5965
6623
  Identity resolution:
5966
6624
  gitflow-analytics identities -c config.yaml
5967
-
6625
+
5968
6626
  Fresh analysis (bypass cache):
5969
6627
  gitflow-analytics -c config.yaml --clear-cache
5970
-
6628
+
5971
6629
  Quick config validation:
5972
6630
  gitflow-analytics -c config.yaml --validate-only
5973
6631
 
@@ -5984,15 +6642,15 @@ def show_help() -> None:
5984
6642
  Slow analysis?
5985
6643
  → Use caching (default) or reduce --weeks
5986
6644
  → Check cache stats: cache-stats command
5987
-
6645
+
5988
6646
  Wrong developer names?
5989
6647
  → Run: identities command
5990
6648
  → Add manual mappings to config
5991
-
6649
+
5992
6650
  Missing ticket references?
5993
6651
  → Check ticket_platforms configuration
5994
6652
  → Verify commit message format
5995
-
6653
+
5996
6654
  API errors?
5997
6655
  → Verify credentials in config or .env
5998
6656
  → Check rate limits
@@ -6005,13 +6663,13 @@ def show_help() -> None:
6005
6663
  • weekly_metrics: Time-based trends
6006
6664
  • activity_distribution: Work patterns
6007
6665
  • untracked_commits: Process gaps
6008
-
6666
+
6009
6667
  Narrative Report (default):
6010
6668
  • Executive summary
6011
6669
  • Team composition analysis
6012
6670
  • Development patterns
6013
6671
  • Recommendations
6014
-
6672
+
6015
6673
  JSON Export:
6016
6674
  • Complete data for integration
6017
6675
  • All metrics and metadata
@@ -6022,12 +6680,12 @@ def show_help() -> None:
6022
6680
  • Pull requests and reviews
6023
6681
  • Issues and milestones
6024
6682
  • DORA metrics
6025
-
6683
+
6026
6684
  JIRA:
6027
6685
  • Story points and velocity
6028
6686
  • Sprint tracking
6029
6687
  • Issue types
6030
-
6688
+
6031
6689
  ClickUp:
6032
6690
  • Task tracking
6033
6691
  • Time estimates