gitflow-analytics 3.12.6__py3-none-any.whl → 3.13.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gitflow_analytics/_version.py +1 -1
- gitflow_analytics/cli.py +853 -129
- gitflow_analytics/cli_wizards/__init__.py +9 -3
- gitflow_analytics/cli_wizards/menu.py +798 -0
- gitflow_analytics/config/loader.py +3 -1
- gitflow_analytics/config/profiles.py +1 -2
- gitflow_analytics/core/data_fetcher.py +0 -2
- gitflow_analytics/core/identity.py +2 -0
- gitflow_analytics/extractors/tickets.py +3 -1
- gitflow_analytics/integrations/github_integration.py +1 -1
- gitflow_analytics/integrations/jira_integration.py +1 -1
- gitflow_analytics/qualitative/chatgpt_analyzer.py +15 -15
- gitflow_analytics/qualitative/classifiers/llm/prompts.py +1 -1
- gitflow_analytics/qualitative/core/processor.py +1 -2
- gitflow_analytics/qualitative/enhanced_analyzer.py +24 -8
- gitflow_analytics/reports/narrative_writer.py +13 -9
- gitflow_analytics/security/reports/__init__.py +5 -0
- gitflow_analytics/security/reports/security_report.py +358 -0
- gitflow_analytics/ui/progress_display.py +14 -6
- gitflow_analytics/verify_activity.py +1 -1
- {gitflow_analytics-3.12.6.dist-info → gitflow_analytics-3.13.5.dist-info}/METADATA +37 -1
- {gitflow_analytics-3.12.6.dist-info → gitflow_analytics-3.13.5.dist-info}/RECORD +26 -23
- {gitflow_analytics-3.12.6.dist-info → gitflow_analytics-3.13.5.dist-info}/WHEEL +0 -0
- {gitflow_analytics-3.12.6.dist-info → gitflow_analytics-3.13.5.dist-info}/entry_points.txt +0 -0
- {gitflow_analytics-3.12.6.dist-info → gitflow_analytics-3.13.5.dist-info}/licenses/LICENSE +0 -0
- {gitflow_analytics-3.12.6.dist-info → gitflow_analytics-3.13.5.dist-info}/top_level.txt +0 -0
gitflow_analytics/cli.py
CHANGED
|
@@ -18,6 +18,7 @@ import yaml
|
|
|
18
18
|
|
|
19
19
|
from ._version import __version__
|
|
20
20
|
from .config import ConfigLoader
|
|
21
|
+
from .config.errors import ConfigurationError
|
|
21
22
|
from .ui.progress_display import create_progress_display
|
|
22
23
|
|
|
23
24
|
# Heavy imports are lazy-loaded to improve CLI startup time
|
|
@@ -158,7 +159,8 @@ def handle_timezone_error(
|
|
|
158
159
|
for i, commit in enumerate(sample_commits):
|
|
159
160
|
timestamp = commit.get("timestamp")
|
|
160
161
|
logger.error(
|
|
161
|
-
f" Sample commit {i}: timestamp={timestamp}
|
|
162
|
+
f" Sample commit {i}: timestamp={timestamp} "
|
|
163
|
+
f"(tzinfo: {getattr(timestamp, 'tzinfo', 'N/A')})"
|
|
162
164
|
)
|
|
163
165
|
|
|
164
166
|
click.echo(f" ❌ Timezone comparison error in {report_name}")
|
|
@@ -300,16 +302,16 @@ def cli(ctx: click.Context) -> None:
|
|
|
300
302
|
|
|
301
303
|
\b
|
|
302
304
|
QUICK START:
|
|
303
|
-
1. Create a configuration file (see config-sample.yaml)
|
|
304
|
-
2. Run analysis: gitflow-analytics
|
|
305
|
+
1. Create a configuration file named config.yaml (see config-sample.yaml)
|
|
306
|
+
2. Run analysis: gitflow-analytics --weeks 4
|
|
305
307
|
3. View reports in the output directory
|
|
306
308
|
|
|
307
309
|
\b
|
|
308
310
|
COMMON WORKFLOWS:
|
|
309
|
-
Analyze last 4 weeks: gitflow-analytics
|
|
310
|
-
|
|
311
|
-
Clear cache and analyze: gitflow-analytics
|
|
312
|
-
Validate configuration: gitflow-analytics
|
|
311
|
+
Analyze last 4 weeks: gitflow-analytics --weeks 4
|
|
312
|
+
Use custom config: gitflow-analytics -c myconfig.yaml --weeks 4
|
|
313
|
+
Clear cache and analyze: gitflow-analytics --clear-cache
|
|
314
|
+
Validate configuration: gitflow-analytics --validate-only
|
|
313
315
|
|
|
314
316
|
\b
|
|
315
317
|
COMMANDS:
|
|
@@ -328,21 +330,28 @@ def cli(ctx: click.Context) -> None:
|
|
|
328
330
|
gitflow-analytics install
|
|
329
331
|
|
|
330
332
|
# Interactive launcher
|
|
331
|
-
gitflow-analytics run
|
|
333
|
+
gitflow-analytics run
|
|
332
334
|
|
|
333
335
|
# Generate developer aliases
|
|
334
|
-
gitflow-analytics aliases
|
|
336
|
+
gitflow-analytics aliases --apply
|
|
335
337
|
|
|
336
|
-
# Run analysis
|
|
337
|
-
gitflow-analytics
|
|
338
|
+
# Run analysis (uses config.yaml by default)
|
|
339
|
+
gitflow-analytics --weeks 4
|
|
338
340
|
|
|
339
341
|
\b
|
|
340
342
|
For detailed command help: gitflow-analytics COMMAND --help
|
|
341
343
|
For documentation: https://github.com/yourusername/gitflow-analytics
|
|
342
344
|
"""
|
|
343
|
-
# If no subcommand was invoked, show help
|
|
345
|
+
# If no subcommand was invoked, show interactive menu or help
|
|
344
346
|
if ctx.invoked_subcommand is None:
|
|
345
|
-
|
|
347
|
+
# Check if running in interactive terminal
|
|
348
|
+
if sys.stdin.isatty() and sys.stdout.isatty():
|
|
349
|
+
from gitflow_analytics.cli_wizards.menu import show_main_menu
|
|
350
|
+
|
|
351
|
+
show_main_menu()
|
|
352
|
+
else:
|
|
353
|
+
# Non-interactive terminal, show help
|
|
354
|
+
click.echo(ctx.get_help())
|
|
346
355
|
ctx.exit(0)
|
|
347
356
|
|
|
348
357
|
|
|
@@ -350,9 +359,9 @@ def cli(ctx: click.Context) -> None:
|
|
|
350
359
|
@click.option(
|
|
351
360
|
"--config",
|
|
352
361
|
"-c",
|
|
353
|
-
type=click.Path(
|
|
354
|
-
|
|
355
|
-
help="Path to YAML configuration file",
|
|
362
|
+
type=click.Path(path_type=Path),
|
|
363
|
+
default="config.yaml",
|
|
364
|
+
help="Path to YAML configuration file (default: config.yaml)",
|
|
356
365
|
)
|
|
357
366
|
@click.option(
|
|
358
367
|
"--weeks", "-w", type=int, default=12, help="Number of weeks to analyze (default: 12)"
|
|
@@ -419,7 +428,9 @@ def cli(ctx: click.Context) -> None:
|
|
|
419
428
|
@click.option(
|
|
420
429
|
"--use-batch-classification/--use-legacy-classification",
|
|
421
430
|
default=True,
|
|
422
|
-
help=
|
|
431
|
+
help=(
|
|
432
|
+
"Use batch LLM classification on pre-fetched data (Step 2 of 2) - now the default behavior"
|
|
433
|
+
),
|
|
423
434
|
)
|
|
424
435
|
@click.option(
|
|
425
436
|
"--force-fetch", is_flag=True, help="Force fetch fresh data even if cached data exists"
|
|
@@ -472,20 +483,23 @@ def analyze_subcommand(
|
|
|
472
483
|
|
|
473
484
|
\b
|
|
474
485
|
EXAMPLES:
|
|
475
|
-
# Basic analysis of last 4 weeks
|
|
476
|
-
gitflow-analytics analyze
|
|
486
|
+
# Basic analysis of last 4 weeks (uses config.yaml by default)
|
|
487
|
+
gitflow-analytics analyze --weeks 4
|
|
488
|
+
|
|
489
|
+
# Use a custom configuration file
|
|
490
|
+
gitflow-analytics analyze -c myconfig.yaml --weeks 4
|
|
477
491
|
|
|
478
492
|
# Generate CSV reports with fresh data
|
|
479
|
-
gitflow-analytics analyze
|
|
493
|
+
gitflow-analytics analyze --generate-csv --clear-cache
|
|
480
494
|
|
|
481
495
|
# Quick validation of configuration
|
|
482
|
-
gitflow-analytics analyze
|
|
496
|
+
gitflow-analytics analyze --validate-only
|
|
483
497
|
|
|
484
498
|
# Analyze with qualitative insights
|
|
485
|
-
gitflow-analytics analyze
|
|
499
|
+
gitflow-analytics analyze --enable-qualitative
|
|
486
500
|
|
|
487
501
|
# Run only security analysis (requires security config)
|
|
488
|
-
gitflow-analytics analyze
|
|
502
|
+
gitflow-analytics analyze --security-only
|
|
489
503
|
|
|
490
504
|
\b
|
|
491
505
|
OUTPUT FILES:
|
|
@@ -626,7 +640,28 @@ def analyze(
|
|
|
626
640
|
else:
|
|
627
641
|
click.echo(f"📋 Loading configuration from {config}...")
|
|
628
642
|
|
|
629
|
-
|
|
643
|
+
try:
|
|
644
|
+
cfg = ConfigLoader.load(config)
|
|
645
|
+
except (FileNotFoundError, ConfigurationError) as e:
|
|
646
|
+
# Provide user-friendly guidance for missing config file
|
|
647
|
+
error_msg = str(e)
|
|
648
|
+
if "not found" in error_msg.lower() or isinstance(e, FileNotFoundError):
|
|
649
|
+
friendly_msg = (
|
|
650
|
+
f"❌ Configuration file not found: {config}\n\n"
|
|
651
|
+
"To get started:\n"
|
|
652
|
+
" 1. Copy the sample: cp examples/config/config-sample.yaml config.yaml\n"
|
|
653
|
+
" 2. Edit config.yaml with your repository settings\n"
|
|
654
|
+
" 3. Run: gitflow-analytics -w 4\n\n"
|
|
655
|
+
"Or use the interactive installer: gitflow-analytics install"
|
|
656
|
+
)
|
|
657
|
+
if display:
|
|
658
|
+
display.print_status(friendly_msg, "error")
|
|
659
|
+
else:
|
|
660
|
+
click.echo(friendly_msg, err=True)
|
|
661
|
+
sys.exit(1)
|
|
662
|
+
else:
|
|
663
|
+
# Re-raise other configuration errors (they already have good messages)
|
|
664
|
+
raise
|
|
630
665
|
|
|
631
666
|
# Helper function to check if qualitative analysis is enabled
|
|
632
667
|
# Supports both top-level cfg.qualitative and nested cfg.analysis.qualitative
|
|
@@ -797,7 +832,10 @@ def analyze(
|
|
|
797
832
|
if display and display._live:
|
|
798
833
|
display.update_progress_task(
|
|
799
834
|
"main",
|
|
800
|
-
description=
|
|
835
|
+
description=(
|
|
836
|
+
f"Cache cleared: {cleared_counts['commits']} commits, "
|
|
837
|
+
f"{cleared_counts['total']} total"
|
|
838
|
+
),
|
|
801
839
|
completed=10,
|
|
802
840
|
)
|
|
803
841
|
elif display:
|
|
@@ -935,7 +973,6 @@ def analyze(
|
|
|
935
973
|
from .security.reports import SecurityReportGenerator
|
|
936
974
|
|
|
937
975
|
# GitAnalysisCache already imported at module level (line 24)
|
|
938
|
-
|
|
939
976
|
# Load security configuration
|
|
940
977
|
security_config = SecurityConfig.from_dict(
|
|
941
978
|
cfg.analysis.security if hasattr(cfg.analysis, "security") else {}
|
|
@@ -1174,7 +1211,9 @@ def analyze(
|
|
|
1174
1211
|
# We're in full-screen mode, update the task
|
|
1175
1212
|
display.update_progress_task(
|
|
1176
1213
|
"main",
|
|
1177
|
-
description=
|
|
1214
|
+
description=(
|
|
1215
|
+
f"🔍 Discovering repositories from organization: {cfg.github.organization}"
|
|
1216
|
+
),
|
|
1178
1217
|
completed=15,
|
|
1179
1218
|
)
|
|
1180
1219
|
else:
|
|
@@ -1211,7 +1250,10 @@ def analyze(
|
|
|
1211
1250
|
# We're in full-screen mode, update progress and initialize repo list
|
|
1212
1251
|
display.update_progress_task(
|
|
1213
1252
|
"main",
|
|
1214
|
-
description=
|
|
1253
|
+
description=(
|
|
1254
|
+
f"✅ Found {len(discovered_repos)} repositories in "
|
|
1255
|
+
f"{cfg.github.organization}"
|
|
1256
|
+
),
|
|
1215
1257
|
completed=20,
|
|
1216
1258
|
)
|
|
1217
1259
|
# Initialize repository list for the full-screen display
|
|
@@ -1341,12 +1383,16 @@ def analyze(
|
|
|
1341
1383
|
if display and hasattr(display, "_live") and display._live:
|
|
1342
1384
|
display.update_progress_task(
|
|
1343
1385
|
"repos",
|
|
1344
|
-
description=
|
|
1386
|
+
description=(
|
|
1387
|
+
f"Found {len(cached_repos)} repos with cached data "
|
|
1388
|
+
f"({total_cached_commits} commits)"
|
|
1389
|
+
),
|
|
1345
1390
|
completed=10,
|
|
1346
1391
|
)
|
|
1347
1392
|
else:
|
|
1348
1393
|
click.echo(
|
|
1349
|
-
f"✅ Found {len(cached_repos)} repos with cached data
|
|
1394
|
+
f"✅ Found {len(cached_repos)} repos with cached data "
|
|
1395
|
+
f"({total_cached_commits} commits)"
|
|
1350
1396
|
)
|
|
1351
1397
|
else:
|
|
1352
1398
|
# Force fetch: analyze all repositories
|
|
@@ -1370,12 +1416,16 @@ def analyze(
|
|
|
1370
1416
|
if display and display._live:
|
|
1371
1417
|
display.update_progress_task(
|
|
1372
1418
|
"repos",
|
|
1373
|
-
description=
|
|
1419
|
+
description=(
|
|
1420
|
+
f"Step 1: Fetching data for "
|
|
1421
|
+
f"{len(repos_needing_analysis)} repositories..."
|
|
1422
|
+
),
|
|
1374
1423
|
completed=15,
|
|
1375
1424
|
)
|
|
1376
1425
|
else:
|
|
1377
1426
|
click.echo(
|
|
1378
|
-
f"📥 Step 1: Fetching data for
|
|
1427
|
+
f"📥 Step 1: Fetching data for "
|
|
1428
|
+
f"{len(repos_needing_analysis)} repositories..."
|
|
1379
1429
|
)
|
|
1380
1430
|
|
|
1381
1431
|
# Perform data fetch for repositories that need analysis
|
|
@@ -1404,7 +1454,9 @@ def analyze(
|
|
|
1404
1454
|
# Update the existing task since display was already started
|
|
1405
1455
|
display.update_progress_task(
|
|
1406
1456
|
"repos",
|
|
1407
|
-
description=
|
|
1457
|
+
description=(
|
|
1458
|
+
f"Step 1: Fetching data for {len(repos_needing_analysis)} repositories"
|
|
1459
|
+
),
|
|
1408
1460
|
completed=0,
|
|
1409
1461
|
)
|
|
1410
1462
|
|
|
@@ -1450,7 +1502,8 @@ def analyze(
|
|
|
1450
1502
|
description=f"Analyzing {len(repos_needing_analysis)} repositories",
|
|
1451
1503
|
)
|
|
1452
1504
|
|
|
1453
|
-
# Initialize ALL repositories (both cached and to-be-fetched)
|
|
1505
|
+
# Initialize ALL repositories (both cached and to-be-fetched)
|
|
1506
|
+
# with their status
|
|
1454
1507
|
all_repo_list = []
|
|
1455
1508
|
|
|
1456
1509
|
# Add cached repos as COMPLETE
|
|
@@ -1476,7 +1529,6 @@ def analyze(
|
|
|
1476
1529
|
description="Processing repositories",
|
|
1477
1530
|
unit="repos",
|
|
1478
1531
|
) as repos_progress_ctx:
|
|
1479
|
-
|
|
1480
1532
|
for idx, repo_config in enumerate(repos_needing_analysis, 1):
|
|
1481
1533
|
try:
|
|
1482
1534
|
repo_path = Path(repo_config.path)
|
|
@@ -1486,14 +1538,18 @@ def analyze(
|
|
|
1486
1538
|
repo_display_name = repo_config.name or project_key
|
|
1487
1539
|
progress.set_description(
|
|
1488
1540
|
repos_progress_ctx,
|
|
1489
|
-
f"🔄 Analyzing repository: {repo_display_name}
|
|
1541
|
+
f"🔄 Analyzing repository: {repo_display_name} "
|
|
1542
|
+
f"({idx}/{len(repos_needing_analysis)})",
|
|
1490
1543
|
)
|
|
1491
1544
|
|
|
1492
1545
|
# Also update the display if available
|
|
1493
1546
|
if display:
|
|
1494
1547
|
display.update_progress_task(
|
|
1495
1548
|
"repos",
|
|
1496
|
-
description=
|
|
1549
|
+
description=(
|
|
1550
|
+
f"🔄 Processing: {repo_display_name} "
|
|
1551
|
+
f"({idx}/{len(repos_needing_analysis)})"
|
|
1552
|
+
),
|
|
1497
1553
|
completed=idx - 1,
|
|
1498
1554
|
)
|
|
1499
1555
|
# Update repository status to processing
|
|
@@ -1551,7 +1607,8 @@ def analyze(
|
|
|
1551
1607
|
|
|
1552
1608
|
if display:
|
|
1553
1609
|
display.print_status(
|
|
1554
|
-
f" ✅ {project_key}:
|
|
1610
|
+
f" ✅ {project_key}: "
|
|
1611
|
+
f"{result['stats']['total_commits']} commits, "
|
|
1555
1612
|
f"{result['stats']['unique_tickets']} tickets",
|
|
1556
1613
|
"success",
|
|
1557
1614
|
)
|
|
@@ -1624,21 +1681,27 @@ def analyze(
|
|
|
1624
1681
|
)
|
|
1625
1682
|
if repo_status["failed_updates"] > 0:
|
|
1626
1683
|
logger.warning(
|
|
1627
|
-
" ⚠️ Some repositories failed to fetch updates.
|
|
1628
|
-
"
|
|
1684
|
+
" ⚠️ Some repositories failed to fetch updates. "
|
|
1685
|
+
"Analysis uses potentially stale data.\n"
|
|
1686
|
+
" Check authentication, network connectivity, or try "
|
|
1687
|
+
"with --skip-remote-fetch."
|
|
1629
1688
|
)
|
|
1630
1689
|
|
|
1631
1690
|
if display and display._live:
|
|
1632
1691
|
display.update_progress_task(
|
|
1633
1692
|
"repos",
|
|
1634
|
-
description=
|
|
1693
|
+
description=(
|
|
1694
|
+
f"Step 1 complete: {total_commits} commits, "
|
|
1695
|
+
f"{total_tickets} tickets fetched"
|
|
1696
|
+
),
|
|
1635
1697
|
completed=100,
|
|
1636
1698
|
)
|
|
1637
1699
|
# Stop the live display after Step 1
|
|
1638
1700
|
display.stop_live_display()
|
|
1639
1701
|
else:
|
|
1640
1702
|
click.echo(
|
|
1641
|
-
f"📥 Step 1 complete: {total_commits} commits,
|
|
1703
|
+
f"📥 Step 1 complete: {total_commits} commits, "
|
|
1704
|
+
f"{total_tickets} tickets fetched"
|
|
1642
1705
|
)
|
|
1643
1706
|
else:
|
|
1644
1707
|
if display and display._live:
|
|
@@ -1690,41 +1753,50 @@ def analyze(
|
|
|
1690
1753
|
validation_passed = True
|
|
1691
1754
|
if display:
|
|
1692
1755
|
display.print_status(
|
|
1693
|
-
f"✅ Data validation passed: {stored_commits} commits,
|
|
1756
|
+
f"✅ Data validation passed: {stored_commits} commits, "
|
|
1757
|
+
f"{existing_batches} batches ready",
|
|
1694
1758
|
"success",
|
|
1695
1759
|
)
|
|
1696
1760
|
else:
|
|
1697
1761
|
click.echo(
|
|
1698
|
-
f"✅ Data validation passed: {stored_commits} commits,
|
|
1762
|
+
f"✅ Data validation passed: {stored_commits} commits, "
|
|
1763
|
+
f"{existing_batches} batches ready"
|
|
1699
1764
|
)
|
|
1700
1765
|
|
|
1701
1766
|
elif stored_commits > 0 and existing_batches == 0:
|
|
1702
1767
|
# We have commits but no batches - this shouldn't happen but we can recover
|
|
1703
1768
|
if display:
|
|
1704
1769
|
display.print_status(
|
|
1705
|
-
f"⚠️ Found {stored_commits} commits but no daily batches -
|
|
1770
|
+
f"⚠️ Found {stored_commits} commits but no daily batches - "
|
|
1771
|
+
f"data inconsistency detected",
|
|
1706
1772
|
"warning",
|
|
1707
1773
|
)
|
|
1708
1774
|
else:
|
|
1709
1775
|
click.echo(
|
|
1710
|
-
f"⚠️ Found {stored_commits} commits but no daily batches -
|
|
1776
|
+
f"⚠️ Found {stored_commits} commits but no daily batches - "
|
|
1777
|
+
f"data inconsistency detected"
|
|
1711
1778
|
)
|
|
1712
1779
|
|
|
1713
1780
|
elif stored_commits == 0 and total_commits > 0:
|
|
1714
1781
|
# Step 1 claimed success but no commits were stored - critical error
|
|
1715
|
-
error_msg =
|
|
1782
|
+
error_msg = (
|
|
1783
|
+
f"❌ VALIDATION FAILED: Step 1 reported {total_commits} commits "
|
|
1784
|
+
f"but database contains 0 commits for date range"
|
|
1785
|
+
)
|
|
1716
1786
|
if display:
|
|
1717
1787
|
display.print_status(error_msg, "error")
|
|
1718
1788
|
else:
|
|
1719
1789
|
click.echo(error_msg)
|
|
1720
1790
|
click.echo(
|
|
1721
|
-
f" 📅 Date range: {start_date.strftime('%Y-%m-%d')} to
|
|
1791
|
+
f" 📅 Date range: {start_date.strftime('%Y-%m-%d')} to "
|
|
1792
|
+
f"{end_date.strftime('%Y-%m-%d')}"
|
|
1722
1793
|
)
|
|
1723
1794
|
click.echo(
|
|
1724
1795
|
f" 📊 Step 1 stats: {total_commits} commits, {total_tickets} tickets"
|
|
1725
1796
|
)
|
|
1726
1797
|
click.echo(
|
|
1727
|
-
f" 🗃️ Database reality: {stored_commits} commits,
|
|
1798
|
+
f" 🗃️ Database reality: {stored_commits} commits, "
|
|
1799
|
+
f"{existing_batches} batches"
|
|
1728
1800
|
)
|
|
1729
1801
|
click.echo(
|
|
1730
1802
|
" 💡 This suggests a timezone, date filtering, or database storage issue"
|
|
@@ -1737,12 +1809,14 @@ def analyze(
|
|
|
1737
1809
|
# No data at all - need to fetch or explain why
|
|
1738
1810
|
if display:
|
|
1739
1811
|
display.print_status(
|
|
1740
|
-
"📊 No commits or batches found for date range -
|
|
1812
|
+
"📊 No commits or batches found for date range - "
|
|
1813
|
+
"proceeding with data fetch",
|
|
1741
1814
|
"warning",
|
|
1742
1815
|
)
|
|
1743
1816
|
else:
|
|
1744
1817
|
click.echo(
|
|
1745
|
-
"📊 No commits or batches found for date range -
|
|
1818
|
+
"📊 No commits or batches found for date range - "
|
|
1819
|
+
"proceeding with data fetch"
|
|
1746
1820
|
)
|
|
1747
1821
|
|
|
1748
1822
|
# PROCEED WITH INITIAL FETCH if validation didn't pass
|
|
@@ -1761,15 +1835,18 @@ def analyze(
|
|
|
1761
1835
|
if repos_needing_analysis:
|
|
1762
1836
|
if display:
|
|
1763
1837
|
display.print_status(
|
|
1764
|
-
f"Initial fetch: Fetching data for
|
|
1838
|
+
f"Initial fetch: Fetching data for "
|
|
1839
|
+
f"{len(repos_needing_analysis)} repositories...",
|
|
1765
1840
|
"info",
|
|
1766
1841
|
)
|
|
1767
1842
|
else:
|
|
1768
1843
|
click.echo(
|
|
1769
|
-
f"🚨 Initial fetch: Fetching data for
|
|
1844
|
+
f"🚨 Initial fetch: Fetching data for "
|
|
1845
|
+
f"{len(repos_needing_analysis)} repositories..."
|
|
1770
1846
|
)
|
|
1771
1847
|
click.echo(
|
|
1772
|
-
" 📋 Reason: Need to ensure commits and batches exist
|
|
1848
|
+
" 📋 Reason: Need to ensure commits and batches exist "
|
|
1849
|
+
"for classification"
|
|
1773
1850
|
)
|
|
1774
1851
|
|
|
1775
1852
|
# Perform data fetch for repositories that need analysis
|
|
@@ -1812,22 +1889,26 @@ def analyze(
|
|
|
1812
1889
|
if retry_count > 0:
|
|
1813
1890
|
if display:
|
|
1814
1891
|
display.print_status(
|
|
1815
|
-
f" 🔄 Retry {retry_count}/{max_retries}:
|
|
1892
|
+
f" 🔄 Retry {retry_count}/{max_retries}: "
|
|
1893
|
+
f"{repo_config.github_repo}",
|
|
1816
1894
|
"warning",
|
|
1817
1895
|
)
|
|
1818
1896
|
else:
|
|
1819
1897
|
click.echo(
|
|
1820
|
-
f" 🔄 Retry {retry_count}/{max_retries}:
|
|
1898
|
+
f" 🔄 Retry {retry_count}/{max_retries}: "
|
|
1899
|
+
f"{repo_config.github_repo}"
|
|
1821
1900
|
)
|
|
1822
1901
|
else:
|
|
1823
1902
|
if display:
|
|
1824
1903
|
display.print_status(
|
|
1825
|
-
f" 📥 Cloning {repo_config.github_repo}
|
|
1904
|
+
f" 📥 Cloning {repo_config.github_repo} "
|
|
1905
|
+
f"from GitHub...",
|
|
1826
1906
|
"info",
|
|
1827
1907
|
)
|
|
1828
1908
|
else:
|
|
1829
1909
|
click.echo(
|
|
1830
|
-
f" 📥 Cloning {repo_config.github_repo}
|
|
1910
|
+
f" 📥 Cloning {repo_config.github_repo} "
|
|
1911
|
+
f"from GitHub..."
|
|
1831
1912
|
)
|
|
1832
1913
|
|
|
1833
1914
|
try:
|
|
@@ -1870,7 +1951,7 @@ def analyze(
|
|
|
1870
1951
|
cmd,
|
|
1871
1952
|
env=env,
|
|
1872
1953
|
stdout=subprocess.PIPE,
|
|
1873
|
-
stderr=None, # Let stderr
|
|
1954
|
+
stderr=None, # Let stderr flow to terminal
|
|
1874
1955
|
text=True,
|
|
1875
1956
|
timeout=timeout_seconds,
|
|
1876
1957
|
)
|
|
@@ -1890,12 +1971,14 @@ def analyze(
|
|
|
1890
1971
|
):
|
|
1891
1972
|
if display:
|
|
1892
1973
|
display.print_status(
|
|
1893
|
-
f" ❌ Authentication failed for
|
|
1974
|
+
f" ❌ Authentication failed for "
|
|
1975
|
+
f"{repo_config.github_repo}",
|
|
1894
1976
|
"error",
|
|
1895
1977
|
)
|
|
1896
1978
|
else:
|
|
1897
1979
|
click.echo(
|
|
1898
|
-
f" ❌ Authentication failed for
|
|
1980
|
+
f" ❌ Authentication failed for "
|
|
1981
|
+
f"{repo_config.github_repo}"
|
|
1899
1982
|
)
|
|
1900
1983
|
break # Don't retry auth failures
|
|
1901
1984
|
else:
|
|
@@ -1909,24 +1992,28 @@ def analyze(
|
|
|
1909
1992
|
clone_success = True
|
|
1910
1993
|
if display:
|
|
1911
1994
|
display.print_status(
|
|
1912
|
-
f" ✅ Cloned {repo_config.github_repo}
|
|
1995
|
+
f" ✅ Cloned {repo_config.github_repo} "
|
|
1996
|
+
f"({elapsed:.1f}s)",
|
|
1913
1997
|
"success",
|
|
1914
1998
|
)
|
|
1915
1999
|
else:
|
|
1916
2000
|
click.echo(
|
|
1917
|
-
f" ✅ Cloned {repo_config.github_repo}
|
|
2001
|
+
f" ✅ Cloned {repo_config.github_repo} "
|
|
2002
|
+
f"({elapsed:.1f}s)"
|
|
1918
2003
|
)
|
|
1919
2004
|
|
|
1920
2005
|
except subprocess.TimeoutExpired:
|
|
1921
2006
|
retry_count += 1
|
|
1922
2007
|
if display:
|
|
1923
2008
|
display.print_status(
|
|
1924
|
-
f" ⏱️
|
|
2009
|
+
f" ⏱️ Clone timeout ({timeout_seconds}s): "
|
|
2010
|
+
f"{repo_config.github_repo}",
|
|
1925
2011
|
"error",
|
|
1926
2012
|
)
|
|
1927
2013
|
else:
|
|
1928
2014
|
click.echo(
|
|
1929
|
-
f" ⏱️
|
|
2015
|
+
f" ⏱️ Clone timeout ({timeout_seconds}s): "
|
|
2016
|
+
f"{repo_config.github_repo}"
|
|
1930
2017
|
)
|
|
1931
2018
|
# Clean up partial clone
|
|
1932
2019
|
if repo_path.exists():
|
|
@@ -1936,12 +2023,14 @@ def analyze(
|
|
|
1936
2023
|
if retry_count > max_retries:
|
|
1937
2024
|
if display:
|
|
1938
2025
|
display.print_status(
|
|
1939
|
-
f" ❌ Skipping {repo_config.github_repo}
|
|
2026
|
+
f" ❌ Skipping {repo_config.github_repo} "
|
|
2027
|
+
f"after {max_retries} timeouts",
|
|
1940
2028
|
"error",
|
|
1941
2029
|
)
|
|
1942
2030
|
else:
|
|
1943
2031
|
click.echo(
|
|
1944
|
-
f" ❌ Skipping {repo_config.github_repo}
|
|
2032
|
+
f" ❌ Skipping {repo_config.github_repo} "
|
|
2033
|
+
f"after {max_retries} timeouts"
|
|
1945
2034
|
)
|
|
1946
2035
|
break
|
|
1947
2036
|
continue # Try again
|
|
@@ -2016,7 +2105,8 @@ def analyze(
|
|
|
2016
2105
|
|
|
2017
2106
|
if display:
|
|
2018
2107
|
display.print_status(
|
|
2019
|
-
f" ✅ {project_key}:
|
|
2108
|
+
f" ✅ {project_key}: "
|
|
2109
|
+
f"{result['stats']['total_commits']} commits, "
|
|
2020
2110
|
f"{result['stats']['unique_tickets']} tickets",
|
|
2021
2111
|
"success",
|
|
2022
2112
|
)
|
|
@@ -2037,7 +2127,8 @@ def analyze(
|
|
|
2037
2127
|
except Exception as e:
|
|
2038
2128
|
if display:
|
|
2039
2129
|
display.print_status(
|
|
2040
|
-
f" ❌ Error fetching {project_key}: {e}",
|
|
2130
|
+
f" ❌ Error fetching {project_key}: {e}",
|
|
2131
|
+
"error",
|
|
2041
2132
|
)
|
|
2042
2133
|
else:
|
|
2043
2134
|
click.echo(f" ❌ Error fetching {project_key}: {e}")
|
|
@@ -2045,12 +2136,14 @@ def analyze(
|
|
|
2045
2136
|
|
|
2046
2137
|
if display:
|
|
2047
2138
|
display.print_status(
|
|
2048
|
-
f"Initial fetch complete: {total_commits} commits,
|
|
2139
|
+
f"Initial fetch complete: {total_commits} commits, "
|
|
2140
|
+
f"{total_tickets} tickets",
|
|
2049
2141
|
"success",
|
|
2050
2142
|
)
|
|
2051
2143
|
else:
|
|
2052
2144
|
click.echo(
|
|
2053
|
-
f"🚨 Initial fetch complete: {total_commits} commits,
|
|
2145
|
+
f"🚨 Initial fetch complete: {total_commits} commits, "
|
|
2146
|
+
f"{total_tickets} tickets"
|
|
2054
2147
|
)
|
|
2055
2148
|
|
|
2056
2149
|
# RE-VALIDATE after initial fetch
|
|
@@ -2078,7 +2171,10 @@ def analyze(
|
|
|
2078
2171
|
)
|
|
2079
2172
|
|
|
2080
2173
|
if final_commits == 0:
|
|
2081
|
-
error_msg =
|
|
2174
|
+
error_msg = (
|
|
2175
|
+
"❌ CRITICAL: Initial fetch completed but still 0 commits "
|
|
2176
|
+
"stored in database"
|
|
2177
|
+
)
|
|
2082
2178
|
if display:
|
|
2083
2179
|
display.print_status(error_msg, "error")
|
|
2084
2180
|
else:
|
|
@@ -2090,11 +2186,13 @@ def analyze(
|
|
|
2090
2186
|
f" 📊 Initial fetch stats: {total_commits} commits reported"
|
|
2091
2187
|
)
|
|
2092
2188
|
click.echo(
|
|
2093
|
-
f" 🗃️ Database result: {final_commits} commits,
|
|
2189
|
+
f" 🗃️ Database result: {final_commits} commits, "
|
|
2190
|
+
f"{final_batches} batches"
|
|
2094
2191
|
)
|
|
2095
2192
|
click.echo(" 🔍 Possible causes:")
|
|
2096
2193
|
click.echo(
|
|
2097
|
-
" - Timezone mismatch between commit timestamps
|
|
2194
|
+
" - Timezone mismatch between commit timestamps "
|
|
2195
|
+
"and analysis range"
|
|
2098
2196
|
)
|
|
2099
2197
|
click.echo(" - Date filtering excluding all commits")
|
|
2100
2198
|
click.echo(" - Database transaction not committed")
|
|
@@ -2102,17 +2200,20 @@ def analyze(
|
|
|
2102
2200
|
" - Repository has no commits in the specified time range"
|
|
2103
2201
|
)
|
|
2104
2202
|
raise click.ClickException(
|
|
2105
|
-
"Initial fetch failed validation -
|
|
2203
|
+
"Initial fetch failed validation - "
|
|
2204
|
+
"no data available for classification"
|
|
2106
2205
|
)
|
|
2107
2206
|
|
|
2108
2207
|
if display:
|
|
2109
2208
|
display.print_status(
|
|
2110
|
-
f"✅ Post-fetch validation: {final_commits} commits,
|
|
2209
|
+
f"✅ Post-fetch validation: {final_commits} commits, "
|
|
2210
|
+
f"{final_batches} batches confirmed",
|
|
2111
2211
|
"success",
|
|
2112
2212
|
)
|
|
2113
2213
|
else:
|
|
2114
2214
|
click.echo(
|
|
2115
|
-
f"✅ Post-fetch validation: {final_commits} commits,
|
|
2215
|
+
f"✅ Post-fetch validation: {final_commits} commits, "
|
|
2216
|
+
f"{final_batches} batches confirmed"
|
|
2116
2217
|
)
|
|
2117
2218
|
|
|
2118
2219
|
# FINAL PRE-CLASSIFICATION CHECK: Ensure we have data before starting batch classifier
|
|
@@ -2139,35 +2240,46 @@ def analyze(
|
|
|
2139
2240
|
)
|
|
2140
2241
|
|
|
2141
2242
|
if pre_classification_commits == 0:
|
|
2142
|
-
error_msg =
|
|
2243
|
+
error_msg = (
|
|
2244
|
+
"❌ PRE-CLASSIFICATION CHECK FAILED: "
|
|
2245
|
+
"No commits available for batch classification"
|
|
2246
|
+
)
|
|
2143
2247
|
if display:
|
|
2144
2248
|
display.print_status(error_msg, "error")
|
|
2145
2249
|
else:
|
|
2146
2250
|
click.echo(error_msg)
|
|
2147
2251
|
click.echo(
|
|
2148
|
-
f" 📅 Date range: {start_date.strftime('%Y-%m-%d')} to
|
|
2252
|
+
f" 📅 Date range: {start_date.strftime('%Y-%m-%d')} to "
|
|
2253
|
+
f"{end_date.strftime('%Y-%m-%d')}"
|
|
2149
2254
|
)
|
|
2150
2255
|
click.echo(
|
|
2151
|
-
f" 🗃️ Database state: {pre_classification_commits} commits,
|
|
2256
|
+
f" 🗃️ Database state: {pre_classification_commits} commits, "
|
|
2257
|
+
f"{pre_classification_batches} batches"
|
|
2152
2258
|
)
|
|
2153
2259
|
click.echo(
|
|
2154
|
-
" 💡 This indicates all previous validation and fetch steps
|
|
2260
|
+
" 💡 This indicates all previous validation and fetch steps "
|
|
2261
|
+
"failed to store any data"
|
|
2155
2262
|
)
|
|
2156
2263
|
raise click.ClickException(
|
|
2157
2264
|
"No data available for batch classification - cannot proceed"
|
|
2158
2265
|
)
|
|
2159
2266
|
|
|
2160
2267
|
if pre_classification_batches == 0:
|
|
2161
|
-
error_msg =
|
|
2268
|
+
error_msg = (
|
|
2269
|
+
"❌ PRE-CLASSIFICATION CHECK FAILED: "
|
|
2270
|
+
"No daily batches available for classification"
|
|
2271
|
+
)
|
|
2162
2272
|
if display:
|
|
2163
2273
|
display.print_status(error_msg, "error")
|
|
2164
2274
|
else:
|
|
2165
2275
|
click.echo(error_msg)
|
|
2166
2276
|
click.echo(
|
|
2167
|
-
f" 📅 Date range: {start_date.strftime('%Y-%m-%d')} to
|
|
2277
|
+
f" 📅 Date range: {start_date.strftime('%Y-%m-%d')} to "
|
|
2278
|
+
f"{end_date.strftime('%Y-%m-%d')}"
|
|
2168
2279
|
)
|
|
2169
2280
|
click.echo(
|
|
2170
|
-
f" 🗃️ Database state: {pre_classification_commits} commits,
|
|
2281
|
+
f" 🗃️ Database state: {pre_classification_commits} commits, "
|
|
2282
|
+
f"{pre_classification_batches} batches"
|
|
2171
2283
|
)
|
|
2172
2284
|
click.echo(" 💡 Commits exist but no daily batches - batch creation failed")
|
|
2173
2285
|
raise click.ClickException(
|
|
@@ -2227,7 +2339,7 @@ def analyze(
|
|
|
2227
2339
|
cache_dir=cfg.cache.directory,
|
|
2228
2340
|
llm_config=llm_config,
|
|
2229
2341
|
batch_size=50,
|
|
2230
|
-
confidence_threshold=cfg.analysis.llm_classification.confidence_threshold,
|
|
2342
|
+
confidence_threshold=(cfg.analysis.llm_classification.confidence_threshold),
|
|
2231
2343
|
fallback_enabled=True,
|
|
2232
2344
|
)
|
|
2233
2345
|
|
|
@@ -2238,7 +2350,8 @@ def analyze(
|
|
|
2238
2350
|
project_keys.append(project_key)
|
|
2239
2351
|
|
|
2240
2352
|
# Run batch classification
|
|
2241
|
-
# Note: The batch classifier will create its own progress bars,
|
|
2353
|
+
# Note: The batch classifier will create its own progress bars,
|
|
2354
|
+
# but our display should remain active
|
|
2242
2355
|
classification_result = batch_classifier.classify_date_range(
|
|
2243
2356
|
start_date=start_date,
|
|
2244
2357
|
end_date=end_date,
|
|
@@ -2249,7 +2362,8 @@ def analyze(
|
|
|
2249
2362
|
# Update display progress after classification
|
|
2250
2363
|
if display and hasattr(display, "update_progress_task"):
|
|
2251
2364
|
display.update_progress_task(
|
|
2252
|
-
"repos",
|
|
2365
|
+
"repos",
|
|
2366
|
+
completed=total_batches if "total_batches" in locals() else 0,
|
|
2253
2367
|
)
|
|
2254
2368
|
|
|
2255
2369
|
if display:
|
|
@@ -2257,7 +2371,8 @@ def analyze(
|
|
|
2257
2371
|
display.complete_progress_task("repos", "Batch classification complete")
|
|
2258
2372
|
display.stop_live_display()
|
|
2259
2373
|
display.print_status(
|
|
2260
|
-
f"✅ Batch classification completed:
|
|
2374
|
+
f"✅ Batch classification completed: "
|
|
2375
|
+
f"{classification_result['processed_batches']} batches, "
|
|
2261
2376
|
f"{classification_result['total_commits']} commits",
|
|
2262
2377
|
"success",
|
|
2263
2378
|
)
|
|
@@ -2591,9 +2706,14 @@ def analyze(
|
|
|
2591
2706
|
else:
|
|
2592
2707
|
commit["project_key"] = commit.get("inferred_project", "UNKNOWN")
|
|
2593
2708
|
|
|
2594
|
-
|
|
2709
|
+
canonical_id = identity_resolver.resolve_developer(
|
|
2595
2710
|
commit["author_name"], commit["author_email"]
|
|
2596
2711
|
)
|
|
2712
|
+
commit["canonical_id"] = canonical_id
|
|
2713
|
+
# Also add canonical display name for reports
|
|
2714
|
+
commit["canonical_name"] = identity_resolver.get_canonical_name(
|
|
2715
|
+
canonical_id
|
|
2716
|
+
)
|
|
2597
2717
|
|
|
2598
2718
|
all_commits.extend(commits)
|
|
2599
2719
|
if display:
|
|
@@ -2810,9 +2930,9 @@ def analyze(
|
|
|
2810
2930
|
):
|
|
2811
2931
|
existing_mappings.append(new_mapping)
|
|
2812
2932
|
|
|
2813
|
-
config_data["analysis"]["identity"][
|
|
2814
|
-
|
|
2815
|
-
|
|
2933
|
+
config_data["analysis"]["identity"]["manual_mappings"] = (
|
|
2934
|
+
existing_mappings
|
|
2935
|
+
)
|
|
2816
2936
|
|
|
2817
2937
|
# Apply bot exclusions
|
|
2818
2938
|
if suggested_config.get("exclude", {}).get("authors"):
|
|
@@ -3128,7 +3248,10 @@ def analyze(
|
|
|
3128
3248
|
click.echo(f" 💰 LLM Cost: ${llm_stats['total_cost']:.4f}")
|
|
3129
3249
|
|
|
3130
3250
|
except ImportError as e:
|
|
3131
|
-
error_msg =
|
|
3251
|
+
error_msg = (
|
|
3252
|
+
f"Qualitative analysis dependencies missing: {e}\n\n"
|
|
3253
|
+
"💡 Install with: pip install spacy scikit-learn openai tiktoken"
|
|
3254
|
+
)
|
|
3132
3255
|
if display:
|
|
3133
3256
|
display.show_error(error_msg)
|
|
3134
3257
|
else:
|
|
@@ -3169,7 +3292,11 @@ def analyze(
|
|
|
3169
3292
|
else:
|
|
3170
3293
|
click.echo(" ⏭️ Continuing with standard analysis...")
|
|
3171
3294
|
elif enable_qualitative and not get_qualitative_config():
|
|
3172
|
-
warning_msg =
|
|
3295
|
+
warning_msg = (
|
|
3296
|
+
"Qualitative analysis requested but not configured in config file\n\n"
|
|
3297
|
+
"Add a 'qualitative:' section (top-level or under 'analysis:') "
|
|
3298
|
+
"to your configuration"
|
|
3299
|
+
)
|
|
3173
3300
|
if display:
|
|
3174
3301
|
display.show_warning(warning_msg)
|
|
3175
3302
|
else:
|
|
@@ -3251,7 +3378,7 @@ def analyze(
|
|
|
3251
3378
|
# Weekly metrics report (only if CSV generation is enabled)
|
|
3252
3379
|
if generate_csv:
|
|
3253
3380
|
weekly_report = (
|
|
3254
|
-
output / f
|
|
3381
|
+
output / f"weekly_metrics_{datetime.now(timezone.utc).strftime('%Y%m%d')}.csv"
|
|
3255
3382
|
)
|
|
3256
3383
|
try:
|
|
3257
3384
|
logger.debug("Starting weekly metrics report generation")
|
|
@@ -3271,7 +3398,7 @@ def analyze(
|
|
|
3271
3398
|
if generate_csv:
|
|
3272
3399
|
activity_summary_report = (
|
|
3273
3400
|
output
|
|
3274
|
-
/ f
|
|
3401
|
+
/ f"developer_activity_summary_{datetime.now(timezone.utc).strftime('%Y%m%d')}.csv"
|
|
3275
3402
|
)
|
|
3276
3403
|
try:
|
|
3277
3404
|
logger.debug("Starting developer activity summary report generation")
|
|
@@ -3298,7 +3425,7 @@ def analyze(
|
|
|
3298
3425
|
|
|
3299
3426
|
# Summary report (only if CSV generation is enabled)
|
|
3300
3427
|
if generate_csv:
|
|
3301
|
-
summary_report = output / f
|
|
3428
|
+
summary_report = output / f"summary_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
3302
3429
|
try:
|
|
3303
3430
|
report_gen.generate_summary_report(
|
|
3304
3431
|
all_commits,
|
|
@@ -3322,7 +3449,7 @@ def analyze(
|
|
|
3322
3449
|
|
|
3323
3450
|
# Developer report (only if CSV generation is enabled)
|
|
3324
3451
|
if generate_csv:
|
|
3325
|
-
developer_report = output / f
|
|
3452
|
+
developer_report = output / f"developers_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
3326
3453
|
try:
|
|
3327
3454
|
report_gen.generate_developer_report(developer_stats, developer_report)
|
|
3328
3455
|
generated_reports.append(developer_report.name)
|
|
@@ -3340,7 +3467,7 @@ def analyze(
|
|
|
3340
3467
|
# Untracked commits report (only if CSV generation is enabled)
|
|
3341
3468
|
if generate_csv:
|
|
3342
3469
|
untracked_commits_report = (
|
|
3343
|
-
output / f
|
|
3470
|
+
output / f"untracked_commits_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
3344
3471
|
)
|
|
3345
3472
|
try:
|
|
3346
3473
|
report_gen.generate_untracked_commits_report(
|
|
@@ -3361,7 +3488,7 @@ def analyze(
|
|
|
3361
3488
|
# Weekly Categorization report (only if CSV generation is enabled)
|
|
3362
3489
|
if generate_csv:
|
|
3363
3490
|
weekly_categorization_report = (
|
|
3364
|
-
output / f
|
|
3491
|
+
output / f"weekly_categorization_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
3365
3492
|
)
|
|
3366
3493
|
try:
|
|
3367
3494
|
logger.debug("Starting weekly categorization report generation")
|
|
@@ -3379,7 +3506,7 @@ def analyze(
|
|
|
3379
3506
|
# PM Correlations report (if PM data is available and CSV generation is enabled)
|
|
3380
3507
|
if aggregated_pm_data and generate_csv:
|
|
3381
3508
|
pm_correlations_report = (
|
|
3382
|
-
output / f
|
|
3509
|
+
output / f"pm_correlations_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
3383
3510
|
)
|
|
3384
3511
|
try:
|
|
3385
3512
|
report_gen.generate_pm_correlations_report(
|
|
@@ -3394,7 +3521,7 @@ def analyze(
|
|
|
3394
3521
|
# Story Point Correlation report (only if CSV generation is enabled)
|
|
3395
3522
|
if generate_csv:
|
|
3396
3523
|
story_point_correlation_report = (
|
|
3397
|
-
output / f
|
|
3524
|
+
output / f"story_point_correlation_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
3398
3525
|
)
|
|
3399
3526
|
try:
|
|
3400
3527
|
logger.debug("Starting story point correlation report generation")
|
|
@@ -3410,7 +3537,7 @@ def analyze(
|
|
|
3410
3537
|
click.echo(f" ⚠️ Warning: Story point correlation report failed: {e}")
|
|
3411
3538
|
|
|
3412
3539
|
# Activity distribution report (always generate data, optionally write CSV)
|
|
3413
|
-
activity_report = output / f
|
|
3540
|
+
activity_report = output / f"activity_distribution_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
3414
3541
|
try:
|
|
3415
3542
|
logger.debug("Starting activity distribution report generation")
|
|
3416
3543
|
analytics_gen.generate_activity_distribution_report(
|
|
@@ -3434,7 +3561,7 @@ def analyze(
|
|
|
3434
3561
|
raise
|
|
3435
3562
|
|
|
3436
3563
|
# Developer focus report (always generate data, optionally write CSV)
|
|
3437
|
-
focus_report = output / f
|
|
3564
|
+
focus_report = output / f"developer_focus_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
3438
3565
|
try:
|
|
3439
3566
|
logger.debug("Starting developer focus report generation")
|
|
3440
3567
|
analytics_gen.generate_developer_focus_report(
|
|
@@ -3458,7 +3585,7 @@ def analyze(
|
|
|
3458
3585
|
raise
|
|
3459
3586
|
|
|
3460
3587
|
# Qualitative insights report (always generate data, optionally write CSV)
|
|
3461
|
-
insights_report = output / f
|
|
3588
|
+
insights_report = output / f"qualitative_insights_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
3462
3589
|
try:
|
|
3463
3590
|
logger.debug("Starting qualitative insights report generation")
|
|
3464
3591
|
analytics_gen.generate_qualitative_insights_report(
|
|
@@ -3478,7 +3605,7 @@ def analyze(
|
|
|
3478
3605
|
|
|
3479
3606
|
branch_health_gen = BranchHealthReportGenerator()
|
|
3480
3607
|
|
|
3481
|
-
branch_health_report = output / f
|
|
3608
|
+
branch_health_report = output / f"branch_health_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
3482
3609
|
try:
|
|
3483
3610
|
logger.debug("Starting branch health report generation")
|
|
3484
3611
|
branch_health_gen.generate_csv_report(branch_health_metrics, branch_health_report)
|
|
@@ -3492,7 +3619,7 @@ def analyze(
|
|
|
3492
3619
|
|
|
3493
3620
|
# Detailed branch report
|
|
3494
3621
|
detailed_branch_report = (
|
|
3495
|
-
output / f
|
|
3622
|
+
output / f"branch_details_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
3496
3623
|
)
|
|
3497
3624
|
try:
|
|
3498
3625
|
branch_health_gen.generate_detailed_branch_report(
|
|
@@ -3532,7 +3659,7 @@ def analyze(
|
|
|
3532
3659
|
|
|
3533
3660
|
# Weekly trends report (includes developer and project trends) (only if CSV generation is enabled)
|
|
3534
3661
|
if generate_csv:
|
|
3535
|
-
trends_report = output / f
|
|
3662
|
+
trends_report = output / f"weekly_trends_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
3536
3663
|
try:
|
|
3537
3664
|
logger.debug("Starting weekly trends report generation")
|
|
3538
3665
|
analytics_gen.generate_weekly_trends_report(
|
|
@@ -3608,7 +3735,7 @@ def analyze(
|
|
|
3608
3735
|
# Weekly velocity report (only if CSV generation is enabled)
|
|
3609
3736
|
if generate_csv:
|
|
3610
3737
|
weekly_velocity_report = (
|
|
3611
|
-
output / f
|
|
3738
|
+
output / f"weekly_velocity_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
3612
3739
|
)
|
|
3613
3740
|
try:
|
|
3614
3741
|
logger.debug("Starting weekly velocity report generation")
|
|
@@ -3634,7 +3761,7 @@ def analyze(
|
|
|
3634
3761
|
# Weekly DORA metrics report (only if CSV generation is enabled)
|
|
3635
3762
|
if generate_csv:
|
|
3636
3763
|
weekly_dora_report = (
|
|
3637
|
-
output / f
|
|
3764
|
+
output / f"weekly_dora_metrics_{datetime.now().strftime('%Y%m%d')}.csv"
|
|
3638
3765
|
)
|
|
3639
3766
|
try:
|
|
3640
3767
|
logger.debug("Starting weekly DORA metrics report generation")
|
|
@@ -3852,7 +3979,7 @@ def analyze(
|
|
|
3852
3979
|
logger.debug("Starting comprehensive JSON export generation")
|
|
3853
3980
|
click.echo(" 🔄 Generating comprehensive JSON export...")
|
|
3854
3981
|
json_report = (
|
|
3855
|
-
output / f
|
|
3982
|
+
output / f"comprehensive_export_{datetime.now().strftime('%Y%m%d')}.json"
|
|
3856
3983
|
)
|
|
3857
3984
|
|
|
3858
3985
|
# Initialize comprehensive JSON exporter
|
|
@@ -4115,7 +4242,8 @@ def analyze(
|
|
|
4115
4242
|
"\n💡 Note: Token/cost tracking is only available with qualitative analysis enabled."
|
|
4116
4243
|
)
|
|
4117
4244
|
click.echo(
|
|
4118
|
-
" Add 'qualitative:' section (top-level or under 'analysis:')
|
|
4245
|
+
" Add 'qualitative:' section (top-level or under 'analysis:') "
|
|
4246
|
+
"to your config to enable detailed LLM cost tracking."
|
|
4119
4247
|
)
|
|
4120
4248
|
|
|
4121
4249
|
# Display cache statistics in simple format
|
|
@@ -5046,7 +5174,7 @@ def identities(config: Path, weeks: int, apply: bool) -> None:
|
|
|
5046
5174
|
|
|
5047
5175
|
# Run analysis
|
|
5048
5176
|
identity_report_path = (
|
|
5049
|
-
cfg.cache.directory / f
|
|
5177
|
+
cfg.cache.directory / f"identity_analysis_{datetime.now().strftime('%Y%m%d')}.yaml"
|
|
5050
5178
|
)
|
|
5051
5179
|
identity_result = analysis_pass.run_analysis(
|
|
5052
5180
|
all_commits, output_path=identity_report_path, apply_to_config=False
|
|
@@ -5393,7 +5521,9 @@ def aliases_command(
|
|
|
5393
5521
|
confidence_color = (
|
|
5394
5522
|
"green"
|
|
5395
5523
|
if alias.confidence >= 0.9
|
|
5396
|
-
else "yellow"
|
|
5524
|
+
else "yellow"
|
|
5525
|
+
if alias.confidence >= 0.8
|
|
5526
|
+
else "red"
|
|
5397
5527
|
)
|
|
5398
5528
|
click.echo(" Confidence: ", nl=False)
|
|
5399
5529
|
click.secho(f"{alias.confidence:.0%}", fg=confidence_color)
|
|
@@ -5521,6 +5651,597 @@ def list_developers(config: Path) -> None:
|
|
|
5521
5651
|
sys.exit(1)
|
|
5522
5652
|
|
|
5523
5653
|
|
|
5654
|
+
@cli.command(name="create-alias-interactive")
|
|
5655
|
+
@click.option(
|
|
5656
|
+
"--config",
|
|
5657
|
+
"-c",
|
|
5658
|
+
type=click.Path(exists=True, path_type=Path),
|
|
5659
|
+
required=True,
|
|
5660
|
+
help="Path to YAML configuration file",
|
|
5661
|
+
)
|
|
5662
|
+
@click.option(
|
|
5663
|
+
"--output",
|
|
5664
|
+
"-o",
|
|
5665
|
+
type=click.Path(path_type=Path),
|
|
5666
|
+
help="Output path for aliases.yaml (default: same dir as config)",
|
|
5667
|
+
)
|
|
5668
|
+
def create_alias_interactive(config: Path, output: Optional[Path]) -> None:
|
|
5669
|
+
"""Create developer aliases interactively with numbered selection.
|
|
5670
|
+
|
|
5671
|
+
\b
|
|
5672
|
+
This command provides an interactive interface to create developer
|
|
5673
|
+
aliases by selecting from a numbered list of developers in the database.
|
|
5674
|
+
You can merge multiple developer identities and save them to aliases.yaml.
|
|
5675
|
+
|
|
5676
|
+
\b
|
|
5677
|
+
EXAMPLES:
|
|
5678
|
+
# Start interactive alias creation
|
|
5679
|
+
gitflow-analytics create-alias-interactive -c config.yaml
|
|
5680
|
+
|
|
5681
|
+
# Save to specific location
|
|
5682
|
+
gitflow-analytics create-alias-interactive -c config.yaml -o ~/shared/aliases.yaml
|
|
5683
|
+
|
|
5684
|
+
\b
|
|
5685
|
+
WORKFLOW:
|
|
5686
|
+
1. Displays numbered list of all developers from database
|
|
5687
|
+
2. Select multiple developer numbers to merge (space-separated)
|
|
5688
|
+
3. Choose which one should be the primary identity
|
|
5689
|
+
4. Create alias mapping
|
|
5690
|
+
5. Option to save to aliases.yaml
|
|
5691
|
+
6. Option to continue creating more aliases
|
|
5692
|
+
|
|
5693
|
+
\b
|
|
5694
|
+
Useful for:
|
|
5695
|
+
- Consolidating developer identities across email addresses
|
|
5696
|
+
- Cleaning up duplicate developer entries
|
|
5697
|
+
- Maintaining consistent identity resolution
|
|
5698
|
+
"""
|
|
5699
|
+
from .config.aliases import AliasesManager, DeveloperAlias
|
|
5700
|
+
from .core.identity import DeveloperIdentityResolver
|
|
5701
|
+
|
|
5702
|
+
try:
|
|
5703
|
+
# Load configuration
|
|
5704
|
+
cfg = ConfigLoader.load(config)
|
|
5705
|
+
|
|
5706
|
+
# Determine output path for aliases file
|
|
5707
|
+
if not output:
|
|
5708
|
+
output = config.parent / "aliases.yaml"
|
|
5709
|
+
|
|
5710
|
+
# Initialize identity resolver
|
|
5711
|
+
identity_resolver = DeveloperIdentityResolver(cfg.cache.directory / "identities.db")
|
|
5712
|
+
|
|
5713
|
+
# Initialize aliases manager
|
|
5714
|
+
aliases_manager = AliasesManager(output if output.exists() else None)
|
|
5715
|
+
|
|
5716
|
+
click.echo("\n" + "=" * 80)
|
|
5717
|
+
click.echo(click.style("🔧 Interactive Alias Creator", fg="cyan", bold=True))
|
|
5718
|
+
click.echo("=" * 80 + "\n")
|
|
5719
|
+
|
|
5720
|
+
# Main loop for creating multiple aliases
|
|
5721
|
+
continue_creating = True
|
|
5722
|
+
|
|
5723
|
+
while continue_creating:
|
|
5724
|
+
# Get all developers from database
|
|
5725
|
+
developers = identity_resolver.get_developer_stats()
|
|
5726
|
+
|
|
5727
|
+
if not developers:
|
|
5728
|
+
click.echo("❌ No developers found. Run analysis first.")
|
|
5729
|
+
sys.exit(1)
|
|
5730
|
+
|
|
5731
|
+
# Display numbered list of developers
|
|
5732
|
+
click.echo(
|
|
5733
|
+
click.style(f"\n📋 Found {len(developers)} developers:\n", fg="green", bold=True)
|
|
5734
|
+
)
|
|
5735
|
+
click.echo(f"{'#':<6} {'Name':<30} {'Email':<40} {'Commits':<10}")
|
|
5736
|
+
click.echo("-" * 86)
|
|
5737
|
+
|
|
5738
|
+
for idx, dev in enumerate(developers, start=1):
|
|
5739
|
+
click.echo(
|
|
5740
|
+
f"{idx:<6} "
|
|
5741
|
+
f"{dev['primary_name']:<30} "
|
|
5742
|
+
f"{dev['primary_email']:<40} "
|
|
5743
|
+
f"{dev['total_commits']:<10}"
|
|
5744
|
+
)
|
|
5745
|
+
|
|
5746
|
+
click.echo()
|
|
5747
|
+
|
|
5748
|
+
# Get user selection
|
|
5749
|
+
while True:
|
|
5750
|
+
try:
|
|
5751
|
+
selection_input = click.prompt(
|
|
5752
|
+
click.style(
|
|
5753
|
+
"Select developers to merge (enter numbers separated by spaces, or 'q' to quit)",
|
|
5754
|
+
fg="yellow",
|
|
5755
|
+
),
|
|
5756
|
+
type=str,
|
|
5757
|
+
).strip()
|
|
5758
|
+
|
|
5759
|
+
# Handle quit
|
|
5760
|
+
if selection_input.lower() in ["q", "quit", "exit"]:
|
|
5761
|
+
click.echo("\n👋 Exiting alias creation.")
|
|
5762
|
+
sys.exit(0)
|
|
5763
|
+
|
|
5764
|
+
# Parse selection
|
|
5765
|
+
selected_indices = []
|
|
5766
|
+
for num_str in selection_input.split():
|
|
5767
|
+
try:
|
|
5768
|
+
num = int(num_str)
|
|
5769
|
+
if 1 <= num <= len(developers):
|
|
5770
|
+
selected_indices.append(num)
|
|
5771
|
+
else:
|
|
5772
|
+
click.echo(
|
|
5773
|
+
click.style(
|
|
5774
|
+
f"⚠️ Number {num} is out of range (1-{len(developers)})",
|
|
5775
|
+
fg="red",
|
|
5776
|
+
)
|
|
5777
|
+
)
|
|
5778
|
+
raise ValueError("Invalid range")
|
|
5779
|
+
except ValueError:
|
|
5780
|
+
click.echo(
|
|
5781
|
+
click.style(
|
|
5782
|
+
f"⚠️ Invalid input: '{num_str}' is not a valid number", fg="red"
|
|
5783
|
+
)
|
|
5784
|
+
)
|
|
5785
|
+
raise
|
|
5786
|
+
|
|
5787
|
+
# Check minimum selection
|
|
5788
|
+
if len(selected_indices) < 2:
|
|
5789
|
+
click.echo(
|
|
5790
|
+
click.style(
|
|
5791
|
+
"⚠️ You must select at least 2 developers to merge", fg="red"
|
|
5792
|
+
)
|
|
5793
|
+
)
|
|
5794
|
+
continue
|
|
5795
|
+
|
|
5796
|
+
# Remove duplicates and sort
|
|
5797
|
+
selected_indices = sorted(set(selected_indices))
|
|
5798
|
+
break
|
|
5799
|
+
|
|
5800
|
+
except ValueError:
|
|
5801
|
+
continue
|
|
5802
|
+
except click.exceptions.Abort:
|
|
5803
|
+
click.echo("\n\n👋 Exiting alias creation.")
|
|
5804
|
+
sys.exit(0)
|
|
5805
|
+
|
|
5806
|
+
# Display selected developers
|
|
5807
|
+
selected_devs = [developers[idx - 1] for idx in selected_indices]
|
|
5808
|
+
|
|
5809
|
+
click.echo(click.style("\n✅ Selected developers:", fg="green", bold=True))
|
|
5810
|
+
for idx, dev in zip(selected_indices, selected_devs):
|
|
5811
|
+
click.echo(
|
|
5812
|
+
f" [{idx}] {dev['primary_name']} <{dev['primary_email']}> "
|
|
5813
|
+
f"({dev['total_commits']} commits)"
|
|
5814
|
+
)
|
|
5815
|
+
|
|
5816
|
+
# Ask which one should be primary
|
|
5817
|
+
click.echo()
|
|
5818
|
+
while True:
|
|
5819
|
+
try:
|
|
5820
|
+
primary_input = click.prompt(
|
|
5821
|
+
click.style(
|
|
5822
|
+
f"Which developer should be the primary identity? "
|
|
5823
|
+
f"Enter number ({', '.join(map(str, selected_indices))})",
|
|
5824
|
+
fg="yellow",
|
|
5825
|
+
),
|
|
5826
|
+
type=int,
|
|
5827
|
+
)
|
|
5828
|
+
|
|
5829
|
+
if primary_input in selected_indices:
|
|
5830
|
+
primary_idx = primary_input
|
|
5831
|
+
break
|
|
5832
|
+
else:
|
|
5833
|
+
click.echo(
|
|
5834
|
+
click.style(
|
|
5835
|
+
f"⚠️ Please select one of: {', '.join(map(str, selected_indices))}",
|
|
5836
|
+
fg="red",
|
|
5837
|
+
)
|
|
5838
|
+
)
|
|
5839
|
+
except ValueError:
|
|
5840
|
+
click.echo(click.style("⚠️ Please enter a valid number", fg="red"))
|
|
5841
|
+
except click.exceptions.Abort:
|
|
5842
|
+
click.echo("\n\n👋 Exiting alias creation.")
|
|
5843
|
+
sys.exit(0)
|
|
5844
|
+
|
|
5845
|
+
# Build alias configuration
|
|
5846
|
+
primary_dev = developers[primary_idx - 1]
|
|
5847
|
+
alias_emails = [
|
|
5848
|
+
dev["primary_email"]
|
|
5849
|
+
for idx, dev in zip(selected_indices, selected_devs)
|
|
5850
|
+
if idx != primary_idx
|
|
5851
|
+
]
|
|
5852
|
+
|
|
5853
|
+
# Create the alias
|
|
5854
|
+
new_alias = DeveloperAlias(
|
|
5855
|
+
primary_email=primary_dev["primary_email"],
|
|
5856
|
+
aliases=alias_emails,
|
|
5857
|
+
name=primary_dev["primary_name"],
|
|
5858
|
+
confidence=1.0, # Manual aliases have full confidence
|
|
5859
|
+
reasoning="Manually created via interactive CLI",
|
|
5860
|
+
)
|
|
5861
|
+
|
|
5862
|
+
# Display the alias configuration
|
|
5863
|
+
click.echo(click.style("\n📝 Alias Configuration:", fg="cyan", bold=True))
|
|
5864
|
+
click.echo(f" Primary: {new_alias.name} <{new_alias.primary_email}>")
|
|
5865
|
+
click.echo(" Aliases:")
|
|
5866
|
+
for alias_email in new_alias.aliases:
|
|
5867
|
+
click.echo(f" - {alias_email}")
|
|
5868
|
+
|
|
5869
|
+
# Add to aliases manager
|
|
5870
|
+
aliases_manager.add_alias(new_alias)
|
|
5871
|
+
|
|
5872
|
+
# Ask if user wants to save
|
|
5873
|
+
click.echo()
|
|
5874
|
+
if click.confirm(click.style(f"💾 Save alias to {output}?", fg="green"), default=True):
|
|
5875
|
+
try:
|
|
5876
|
+
aliases_manager.save()
|
|
5877
|
+
click.echo(click.style(f"✅ Alias saved to {output}", fg="green"))
|
|
5878
|
+
|
|
5879
|
+
# Also update the database directly by merging identities
|
|
5880
|
+
# For each alias email, find its canonical_id and merge with primary
|
|
5881
|
+
for alias_email in alias_emails:
|
|
5882
|
+
# Find the developer entry for this alias email
|
|
5883
|
+
alias_dev = next(
|
|
5884
|
+
(dev for dev in developers if dev["primary_email"] == alias_email), None
|
|
5885
|
+
)
|
|
5886
|
+
|
|
5887
|
+
if alias_dev:
|
|
5888
|
+
# Merge using canonical IDs
|
|
5889
|
+
identity_resolver.merge_identities(
|
|
5890
|
+
primary_dev["canonical_id"], # Primary's canonical_id
|
|
5891
|
+
alias_dev["canonical_id"], # Alias's canonical_id
|
|
5892
|
+
)
|
|
5893
|
+
else:
|
|
5894
|
+
# Edge case: alias email doesn't match any developer
|
|
5895
|
+
# This shouldn't happen, but log a warning
|
|
5896
|
+
click.echo(
|
|
5897
|
+
click.style(
|
|
5898
|
+
f"⚠️ Warning: Could not find developer entry for {alias_email}",
|
|
5899
|
+
fg="yellow",
|
|
5900
|
+
)
|
|
5901
|
+
)
|
|
5902
|
+
|
|
5903
|
+
click.echo(
|
|
5904
|
+
click.style("✅ Database updated with merged identities", fg="green")
|
|
5905
|
+
)
|
|
5906
|
+
|
|
5907
|
+
except Exception as e:
|
|
5908
|
+
click.echo(click.style(f"❌ Error saving alias: {e}", fg="red"), err=True)
|
|
5909
|
+
else:
|
|
5910
|
+
click.echo(click.style("⏭️ Alias not saved", fg="yellow"))
|
|
5911
|
+
|
|
5912
|
+
# Ask if user wants to create more aliases
|
|
5913
|
+
click.echo()
|
|
5914
|
+
if not click.confirm(click.style("🔄 Create another alias?", fg="cyan"), default=True):
|
|
5915
|
+
continue_creating = False
|
|
5916
|
+
|
|
5917
|
+
click.echo(click.style("\n✅ Alias creation completed!", fg="green", bold=True))
|
|
5918
|
+
click.echo(f"📄 Aliases file: {output}")
|
|
5919
|
+
click.echo("\n💡 To use these aliases, ensure your config references: {output}\n")
|
|
5920
|
+
|
|
5921
|
+
except KeyboardInterrupt:
|
|
5922
|
+
click.echo("\n\n👋 Interrupted by user. Exiting.")
|
|
5923
|
+
sys.exit(0)
|
|
5924
|
+
except Exception as e:
|
|
5925
|
+
click.echo(click.style(f"\n❌ Error: {e}", fg="red"), err=True)
|
|
5926
|
+
import traceback
|
|
5927
|
+
|
|
5928
|
+
traceback.print_exc()
|
|
5929
|
+
sys.exit(1)
|
|
5930
|
+
|
|
5931
|
+
|
|
5932
|
+
@cli.command(name="alias-rename")
|
|
5933
|
+
@click.option(
|
|
5934
|
+
"--config",
|
|
5935
|
+
"-c",
|
|
5936
|
+
type=click.Path(exists=True, path_type=Path),
|
|
5937
|
+
required=True,
|
|
5938
|
+
help="Path to YAML configuration file",
|
|
5939
|
+
)
|
|
5940
|
+
@click.option(
|
|
5941
|
+
"--old-name",
|
|
5942
|
+
help="Current canonical name to rename (must match a name in manual_mappings)",
|
|
5943
|
+
)
|
|
5944
|
+
@click.option(
|
|
5945
|
+
"--new-name",
|
|
5946
|
+
help="New canonical display name to use in reports",
|
|
5947
|
+
)
|
|
5948
|
+
@click.option(
|
|
5949
|
+
"--update-cache",
|
|
5950
|
+
is_flag=True,
|
|
5951
|
+
help="Update cached database records with the new name",
|
|
5952
|
+
)
|
|
5953
|
+
@click.option(
|
|
5954
|
+
"--dry-run",
|
|
5955
|
+
is_flag=True,
|
|
5956
|
+
help="Show what would be changed without applying changes",
|
|
5957
|
+
)
|
|
5958
|
+
@click.option(
|
|
5959
|
+
"--interactive",
|
|
5960
|
+
"-i",
|
|
5961
|
+
is_flag=True,
|
|
5962
|
+
help="Interactive mode: select developer from numbered list",
|
|
5963
|
+
)
|
|
5964
|
+
def alias_rename(
|
|
5965
|
+
config: Path,
|
|
5966
|
+
old_name: str,
|
|
5967
|
+
new_name: str,
|
|
5968
|
+
update_cache: bool,
|
|
5969
|
+
dry_run: bool,
|
|
5970
|
+
interactive: bool,
|
|
5971
|
+
) -> None:
|
|
5972
|
+
"""Rename a developer's canonical display name.
|
|
5973
|
+
|
|
5974
|
+
\b
|
|
5975
|
+
Updates the developer's name in:
|
|
5976
|
+
- Configuration file (analysis.identity.manual_mappings)
|
|
5977
|
+
- Database cache (if --update-cache is specified)
|
|
5978
|
+
|
|
5979
|
+
\b
|
|
5980
|
+
EXAMPLES:
|
|
5981
|
+
# Interactive mode: select from numbered list
|
|
5982
|
+
gitflow-analytics alias-rename -c config.yaml --interactive
|
|
5983
|
+
|
|
5984
|
+
# Rename with dry-run to see changes
|
|
5985
|
+
gitflow-analytics alias-rename -c config.yaml \\
|
|
5986
|
+
--old-name "bianco-zaelot" \\
|
|
5987
|
+
--new-name "Emiliozzo Bianco" \\
|
|
5988
|
+
--dry-run
|
|
5989
|
+
|
|
5990
|
+
# Apply rename to config only
|
|
5991
|
+
gitflow-analytics alias-rename -c config.yaml \\
|
|
5992
|
+
--old-name "bianco-zaelot" \\
|
|
5993
|
+
--new-name "Emiliozzo Bianco"
|
|
5994
|
+
|
|
5995
|
+
# Apply rename to config and update cache
|
|
5996
|
+
gitflow-analytics alias-rename -c config.yaml \\
|
|
5997
|
+
--old-name "bianco-zaelot" \\
|
|
5998
|
+
--new-name "Emiliozzo Bianco" \\
|
|
5999
|
+
--update-cache
|
|
6000
|
+
|
|
6001
|
+
\b
|
|
6002
|
+
NOTE:
|
|
6003
|
+
This command searches through analysis.identity.manual_mappings
|
|
6004
|
+
in your config file and updates the 'name' field for the matching
|
|
6005
|
+
entry. It preserves all other fields (primary_email, aliases).
|
|
6006
|
+
"""
|
|
6007
|
+
try:
|
|
6008
|
+
from .core.identity import DeveloperIdentityResolver
|
|
6009
|
+
|
|
6010
|
+
# Load the YAML config file
|
|
6011
|
+
click.echo(f"\n📋 Loading configuration from {config}...")
|
|
6012
|
+
|
|
6013
|
+
try:
|
|
6014
|
+
with open(config, encoding="utf-8") as f:
|
|
6015
|
+
config_data = yaml.safe_load(f)
|
|
6016
|
+
except Exception as e:
|
|
6017
|
+
click.echo(f"❌ Error loading config file: {e}", err=True)
|
|
6018
|
+
sys.exit(1)
|
|
6019
|
+
|
|
6020
|
+
# Navigate to analysis.identity.manual_mappings
|
|
6021
|
+
if "analysis" not in config_data:
|
|
6022
|
+
click.echo("❌ Error: 'analysis' section not found in config", err=True)
|
|
6023
|
+
sys.exit(1)
|
|
6024
|
+
|
|
6025
|
+
if "identity" not in config_data["analysis"]:
|
|
6026
|
+
click.echo("❌ Error: 'analysis.identity' section not found in config", err=True)
|
|
6027
|
+
sys.exit(1)
|
|
6028
|
+
|
|
6029
|
+
if "manual_mappings" not in config_data["analysis"]["identity"]:
|
|
6030
|
+
click.echo(
|
|
6031
|
+
"❌ Error: 'analysis.identity.manual_mappings' not found in config", err=True
|
|
6032
|
+
)
|
|
6033
|
+
sys.exit(1)
|
|
6034
|
+
|
|
6035
|
+
manual_mappings = config_data["analysis"]["identity"]["manual_mappings"]
|
|
6036
|
+
|
|
6037
|
+
if not manual_mappings:
|
|
6038
|
+
click.echo("❌ Error: manual_mappings is empty", err=True)
|
|
6039
|
+
sys.exit(1)
|
|
6040
|
+
|
|
6041
|
+
# Interactive mode: display numbered list and prompt for selection
|
|
6042
|
+
if interactive or not old_name or not new_name:
|
|
6043
|
+
click.echo("\n" + "=" * 60)
|
|
6044
|
+
click.echo(click.style("Current Developers:", fg="cyan", bold=True))
|
|
6045
|
+
click.echo("=" * 60 + "\n")
|
|
6046
|
+
|
|
6047
|
+
developer_names = []
|
|
6048
|
+
for idx, mapping in enumerate(manual_mappings, 1):
|
|
6049
|
+
name = mapping.get("name", "Unknown")
|
|
6050
|
+
email = mapping.get("primary_email", "N/A")
|
|
6051
|
+
alias_count = len(mapping.get("aliases", []))
|
|
6052
|
+
|
|
6053
|
+
developer_names.append(name)
|
|
6054
|
+
click.echo(f" {idx}. {click.style(name, fg='green')}")
|
|
6055
|
+
click.echo(f" Email: {email}")
|
|
6056
|
+
click.echo(f" Aliases: {alias_count} email(s)")
|
|
6057
|
+
click.echo()
|
|
6058
|
+
|
|
6059
|
+
# Prompt for selection
|
|
6060
|
+
try:
|
|
6061
|
+
selection = click.prompt(
|
|
6062
|
+
"Select developer number to rename (or 0 to cancel)",
|
|
6063
|
+
type=click.IntRange(0, len(developer_names)),
|
|
6064
|
+
)
|
|
6065
|
+
except click.Abort:
|
|
6066
|
+
click.echo("\n👋 Cancelled by user.")
|
|
6067
|
+
sys.exit(0)
|
|
6068
|
+
|
|
6069
|
+
if selection == 0:
|
|
6070
|
+
click.echo("\n👋 Cancelled.")
|
|
6071
|
+
sys.exit(0)
|
|
6072
|
+
|
|
6073
|
+
# Get selected developer name
|
|
6074
|
+
old_name = developer_names[selection - 1]
|
|
6075
|
+
click.echo(f"\n📝 Selected: {click.style(old_name, fg='green')}")
|
|
6076
|
+
|
|
6077
|
+
# Prompt for new name if not provided
|
|
6078
|
+
if not new_name:
|
|
6079
|
+
new_name = click.prompt("Enter new canonical name", type=str)
|
|
6080
|
+
|
|
6081
|
+
# Validate inputs
|
|
6082
|
+
if not old_name or not old_name.strip():
|
|
6083
|
+
click.echo("❌ Error: --old-name cannot be empty", err=True)
|
|
6084
|
+
sys.exit(1)
|
|
6085
|
+
|
|
6086
|
+
if not new_name or not new_name.strip():
|
|
6087
|
+
click.echo("❌ Error: --new-name cannot be empty", err=True)
|
|
6088
|
+
sys.exit(1)
|
|
6089
|
+
|
|
6090
|
+
old_name = old_name.strip()
|
|
6091
|
+
new_name = new_name.strip()
|
|
6092
|
+
|
|
6093
|
+
if old_name == new_name:
|
|
6094
|
+
click.echo("❌ Error: old-name and new-name are identical", err=True)
|
|
6095
|
+
sys.exit(1)
|
|
6096
|
+
|
|
6097
|
+
# Find the matching entry
|
|
6098
|
+
matching_entry = None
|
|
6099
|
+
matching_index = None
|
|
6100
|
+
|
|
6101
|
+
for idx, mapping in enumerate(manual_mappings):
|
|
6102
|
+
if mapping.get("name") == old_name:
|
|
6103
|
+
matching_entry = mapping
|
|
6104
|
+
matching_index = idx
|
|
6105
|
+
break
|
|
6106
|
+
|
|
6107
|
+
if not matching_entry:
|
|
6108
|
+
click.echo(f"❌ Error: No manual mapping found with name '{old_name}'", err=True)
|
|
6109
|
+
click.echo("\nAvailable names in manual_mappings:")
|
|
6110
|
+
for mapping in manual_mappings:
|
|
6111
|
+
if "name" in mapping:
|
|
6112
|
+
click.echo(f" - {mapping['name']}")
|
|
6113
|
+
sys.exit(1)
|
|
6114
|
+
|
|
6115
|
+
# Display what will be changed
|
|
6116
|
+
click.echo("\n🔍 Found matching entry:")
|
|
6117
|
+
click.echo(f" Current name: {old_name}")
|
|
6118
|
+
click.echo(f" New name: {new_name}")
|
|
6119
|
+
click.echo(f" Email: {matching_entry.get('primary_email', 'N/A')}")
|
|
6120
|
+
click.echo(f" Aliases: {len(matching_entry.get('aliases', []))} email(s)")
|
|
6121
|
+
|
|
6122
|
+
if dry_run:
|
|
6123
|
+
click.echo("\n🔎 DRY RUN - No changes will be made")
|
|
6124
|
+
|
|
6125
|
+
# Update the config file
|
|
6126
|
+
if not dry_run:
|
|
6127
|
+
click.echo("\n📝 Updating configuration file...")
|
|
6128
|
+
manual_mappings[matching_index]["name"] = new_name
|
|
6129
|
+
|
|
6130
|
+
try:
|
|
6131
|
+
with open(config, "w", encoding="utf-8") as f:
|
|
6132
|
+
yaml.dump(
|
|
6133
|
+
config_data,
|
|
6134
|
+
f,
|
|
6135
|
+
default_flow_style=False,
|
|
6136
|
+
allow_unicode=True,
|
|
6137
|
+
sort_keys=False,
|
|
6138
|
+
)
|
|
6139
|
+
click.echo("✅ Configuration file updated")
|
|
6140
|
+
except Exception as e:
|
|
6141
|
+
click.echo(f"❌ Error writing config file: {e}", err=True)
|
|
6142
|
+
sys.exit(1)
|
|
6143
|
+
else:
|
|
6144
|
+
click.echo(f" [Would update config: {config}]")
|
|
6145
|
+
|
|
6146
|
+
# Update database cache if requested
|
|
6147
|
+
if update_cache:
|
|
6148
|
+
click.echo("\n💾 Checking database cache...")
|
|
6149
|
+
|
|
6150
|
+
# Load config to get cache directory
|
|
6151
|
+
cfg = ConfigLoader.load(config)
|
|
6152
|
+
identity_db_path = cfg.cache.directory / "identities.db"
|
|
6153
|
+
|
|
6154
|
+
if not identity_db_path.exists():
|
|
6155
|
+
click.echo(f"⚠️ Warning: Identity database not found at {identity_db_path}")
|
|
6156
|
+
click.echo(" Skipping cache update")
|
|
6157
|
+
else:
|
|
6158
|
+
# Initialize identity resolver to access database
|
|
6159
|
+
identity_resolver = DeveloperIdentityResolver(
|
|
6160
|
+
str(identity_db_path),
|
|
6161
|
+
manual_mappings=None, # Don't apply mappings during rename
|
|
6162
|
+
)
|
|
6163
|
+
|
|
6164
|
+
# Count affected records
|
|
6165
|
+
from sqlalchemy import text
|
|
6166
|
+
|
|
6167
|
+
with identity_resolver.get_session() as session:
|
|
6168
|
+
# Count developer_identities records
|
|
6169
|
+
result = session.execute(
|
|
6170
|
+
text(
|
|
6171
|
+
"SELECT COUNT(*) FROM developer_identities WHERE primary_name = :old_name"
|
|
6172
|
+
),
|
|
6173
|
+
{"old_name": old_name},
|
|
6174
|
+
)
|
|
6175
|
+
identity_count = result.scalar()
|
|
6176
|
+
|
|
6177
|
+
# Count developer_aliases records
|
|
6178
|
+
result = session.execute(
|
|
6179
|
+
text("SELECT COUNT(*) FROM developer_aliases WHERE name = :old_name"),
|
|
6180
|
+
{"old_name": old_name},
|
|
6181
|
+
)
|
|
6182
|
+
alias_count = result.scalar()
|
|
6183
|
+
|
|
6184
|
+
click.echo(f" Found {identity_count} identity record(s)")
|
|
6185
|
+
click.echo(f" Found {alias_count} alias record(s)")
|
|
6186
|
+
|
|
6187
|
+
if identity_count == 0 and alias_count == 0:
|
|
6188
|
+
click.echo(" ℹ️ No database records to update")
|
|
6189
|
+
elif not dry_run:
|
|
6190
|
+
click.echo(" Updating database records...")
|
|
6191
|
+
|
|
6192
|
+
with identity_resolver.get_session() as session:
|
|
6193
|
+
# Update developer_identities
|
|
6194
|
+
if identity_count > 0:
|
|
6195
|
+
session.execute(
|
|
6196
|
+
text(
|
|
6197
|
+
"UPDATE developer_identities SET primary_name = :new_name WHERE primary_name = :old_name"
|
|
6198
|
+
),
|
|
6199
|
+
{"new_name": new_name, "old_name": old_name},
|
|
6200
|
+
)
|
|
6201
|
+
|
|
6202
|
+
# Update developer_aliases
|
|
6203
|
+
if alias_count > 0:
|
|
6204
|
+
session.execute(
|
|
6205
|
+
text(
|
|
6206
|
+
"UPDATE developer_aliases SET name = :new_name WHERE name = :old_name"
|
|
6207
|
+
),
|
|
6208
|
+
{"new_name": new_name, "old_name": old_name},
|
|
6209
|
+
)
|
|
6210
|
+
|
|
6211
|
+
click.echo(" ✅ Database updated")
|
|
6212
|
+
else:
|
|
6213
|
+
click.echo(
|
|
6214
|
+
f" [Would update {identity_count + alias_count} database record(s)]"
|
|
6215
|
+
)
|
|
6216
|
+
|
|
6217
|
+
# Summary
|
|
6218
|
+
click.echo(f"\n{'🔎 DRY RUN SUMMARY' if dry_run else '✅ RENAME COMPLETE'}")
|
|
6219
|
+
click.echo(f" Old name: {old_name}")
|
|
6220
|
+
click.echo(f" New name: {new_name}")
|
|
6221
|
+
click.echo(f" Config: {'Would update' if dry_run else 'Updated'}")
|
|
6222
|
+
if update_cache:
|
|
6223
|
+
click.echo(f" Cache: {'Would update' if dry_run else 'Updated'}")
|
|
6224
|
+
else:
|
|
6225
|
+
click.echo(" Cache: Skipped (use --update-cache to update)")
|
|
6226
|
+
|
|
6227
|
+
if dry_run:
|
|
6228
|
+
click.echo("\n💡 Run without --dry-run to apply changes")
|
|
6229
|
+
else:
|
|
6230
|
+
click.echo("\n💡 Next steps:")
|
|
6231
|
+
click.echo(f" - Review the updated config file: {config}")
|
|
6232
|
+
click.echo(" - Re-run analysis to see updated reports with new name")
|
|
6233
|
+
|
|
6234
|
+
except KeyboardInterrupt:
|
|
6235
|
+
click.echo("\n\n👋 Interrupted by user. Exiting.")
|
|
6236
|
+
sys.exit(0)
|
|
6237
|
+
except Exception as e:
|
|
6238
|
+
click.echo(f"❌ Unexpected error: {e}", err=True)
|
|
6239
|
+
import traceback
|
|
6240
|
+
|
|
6241
|
+
traceback.print_exc()
|
|
6242
|
+
sys.exit(1)
|
|
6243
|
+
|
|
6244
|
+
|
|
5524
6245
|
@cli.command()
|
|
5525
6246
|
@click.option(
|
|
5526
6247
|
"--config",
|
|
@@ -5943,31 +6664,31 @@ def show_help() -> None:
|
|
|
5943
6664
|
────────────────────
|
|
5944
6665
|
1. Create a configuration file:
|
|
5945
6666
|
cp config-sample.yaml myconfig.yaml
|
|
5946
|
-
|
|
6667
|
+
|
|
5947
6668
|
2. Edit configuration with your repositories:
|
|
5948
6669
|
repositories:
|
|
5949
6670
|
- path: /path/to/repo
|
|
5950
6671
|
branch: main
|
|
5951
|
-
|
|
6672
|
+
|
|
5952
6673
|
3. Run your first analysis:
|
|
5953
6674
|
gitflow-analytics -c myconfig.yaml --weeks 4
|
|
5954
|
-
|
|
6675
|
+
|
|
5955
6676
|
4. View reports in the output directory
|
|
5956
6677
|
|
|
5957
6678
|
🔧 COMMON WORKFLOWS
|
|
5958
6679
|
──────────────────
|
|
5959
6680
|
Weekly team report:
|
|
5960
6681
|
gitflow-analytics -c config.yaml --weeks 1
|
|
5961
|
-
|
|
6682
|
+
|
|
5962
6683
|
Monthly metrics with all formats:
|
|
5963
6684
|
gitflow-analytics -c config.yaml --weeks 4 --generate-csv
|
|
5964
|
-
|
|
6685
|
+
|
|
5965
6686
|
Identity resolution:
|
|
5966
6687
|
gitflow-analytics identities -c config.yaml
|
|
5967
|
-
|
|
6688
|
+
|
|
5968
6689
|
Fresh analysis (bypass cache):
|
|
5969
6690
|
gitflow-analytics -c config.yaml --clear-cache
|
|
5970
|
-
|
|
6691
|
+
|
|
5971
6692
|
Quick config validation:
|
|
5972
6693
|
gitflow-analytics -c config.yaml --validate-only
|
|
5973
6694
|
|
|
@@ -5984,15 +6705,15 @@ def show_help() -> None:
|
|
|
5984
6705
|
Slow analysis?
|
|
5985
6706
|
→ Use caching (default) or reduce --weeks
|
|
5986
6707
|
→ Check cache stats: cache-stats command
|
|
5987
|
-
|
|
6708
|
+
|
|
5988
6709
|
Wrong developer names?
|
|
5989
6710
|
→ Run: identities command
|
|
5990
6711
|
→ Add manual mappings to config
|
|
5991
|
-
|
|
6712
|
+
|
|
5992
6713
|
Missing ticket references?
|
|
5993
6714
|
→ Check ticket_platforms configuration
|
|
5994
6715
|
→ Verify commit message format
|
|
5995
|
-
|
|
6716
|
+
|
|
5996
6717
|
API errors?
|
|
5997
6718
|
→ Verify credentials in config or .env
|
|
5998
6719
|
→ Check rate limits
|
|
@@ -6005,13 +6726,13 @@ def show_help() -> None:
|
|
|
6005
6726
|
• weekly_metrics: Time-based trends
|
|
6006
6727
|
• activity_distribution: Work patterns
|
|
6007
6728
|
• untracked_commits: Process gaps
|
|
6008
|
-
|
|
6729
|
+
|
|
6009
6730
|
Narrative Report (default):
|
|
6010
6731
|
• Executive summary
|
|
6011
6732
|
• Team composition analysis
|
|
6012
6733
|
• Development patterns
|
|
6013
6734
|
• Recommendations
|
|
6014
|
-
|
|
6735
|
+
|
|
6015
6736
|
JSON Export:
|
|
6016
6737
|
• Complete data for integration
|
|
6017
6738
|
• All metrics and metadata
|
|
@@ -6022,12 +6743,12 @@ def show_help() -> None:
|
|
|
6022
6743
|
• Pull requests and reviews
|
|
6023
6744
|
• Issues and milestones
|
|
6024
6745
|
• DORA metrics
|
|
6025
|
-
|
|
6746
|
+
|
|
6026
6747
|
JIRA:
|
|
6027
6748
|
• Story points and velocity
|
|
6028
6749
|
• Sprint tracking
|
|
6029
6750
|
• Issue types
|
|
6030
|
-
|
|
6751
|
+
|
|
6031
6752
|
ClickUp:
|
|
6032
6753
|
• Task tracking
|
|
6033
6754
|
• Time estimates
|
|
@@ -6092,7 +6813,10 @@ def training_statistics(config: Path) -> None:
|
|
|
6092
6813
|
|
|
6093
6814
|
# Initialize trainer to access statistics
|
|
6094
6815
|
trainer = CommitClassificationTrainer(
|
|
6095
|
-
config=cfg,
|
|
6816
|
+
config=cfg,
|
|
6817
|
+
cache=cache,
|
|
6818
|
+
orchestrator=None,
|
|
6819
|
+
training_config={}, # Not needed for stats
|
|
6096
6820
|
)
|
|
6097
6821
|
|
|
6098
6822
|
stats = trainer.get_training_statistics()
|