gitflow-analytics 3.4.7__py3-none-any.whl → 3.5.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gitflow_analytics/_version.py +1 -1
- gitflow_analytics/cli.py +353 -0
- gitflow_analytics/cli_wizards/install_wizard.py +263 -18
- gitflow_analytics/cli_wizards/run_launcher.py +98 -8
- gitflow_analytics/config/__init__.py +3 -0
- gitflow_analytics/config/aliases.py +306 -0
- gitflow_analytics/config/loader.py +35 -1
- gitflow_analytics/config/schema.py +1 -0
- {gitflow_analytics-3.4.7.dist-info → gitflow_analytics-3.5.2.dist-info}/METADATA +1 -1
- {gitflow_analytics-3.4.7.dist-info → gitflow_analytics-3.5.2.dist-info}/RECORD +14 -13
- {gitflow_analytics-3.4.7.dist-info → gitflow_analytics-3.5.2.dist-info}/WHEEL +0 -0
- {gitflow_analytics-3.4.7.dist-info → gitflow_analytics-3.5.2.dist-info}/entry_points.txt +0 -0
- {gitflow_analytics-3.4.7.dist-info → gitflow_analytics-3.5.2.dist-info}/licenses/LICENSE +0 -0
- {gitflow_analytics-3.4.7.dist-info → gitflow_analytics-3.5.2.dist-info}/top_level.txt +0 -0
gitflow_analytics/_version.py
CHANGED
gitflow_analytics/cli.py
CHANGED
|
@@ -342,11 +342,28 @@ def cli(ctx: click.Context) -> None:
|
|
|
342
342
|
\b
|
|
343
343
|
COMMANDS:
|
|
344
344
|
analyze Analyze repositories and generate reports (default)
|
|
345
|
+
install Interactive installation wizard
|
|
346
|
+
run Interactive launcher with preferences
|
|
347
|
+
aliases Generate developer identity aliases using LLM
|
|
345
348
|
identities Manage developer identity resolution
|
|
346
349
|
train Train ML models for commit classification
|
|
347
350
|
fetch Fetch external data (GitHub PRs, PM tickets)
|
|
348
351
|
help Show detailed help and documentation
|
|
349
352
|
|
|
353
|
+
\b
|
|
354
|
+
EXAMPLES:
|
|
355
|
+
# Interactive installation
|
|
356
|
+
gitflow-analytics install
|
|
357
|
+
|
|
358
|
+
# Interactive launcher
|
|
359
|
+
gitflow-analytics run -c config.yaml
|
|
360
|
+
|
|
361
|
+
# Generate developer aliases
|
|
362
|
+
gitflow-analytics aliases -c config.yaml --apply
|
|
363
|
+
|
|
364
|
+
# Run analysis
|
|
365
|
+
gitflow-analytics -c config.yaml --weeks 4
|
|
366
|
+
|
|
350
367
|
\b
|
|
351
368
|
For detailed command help: gitflow-analytics COMMAND --help
|
|
352
369
|
For documentation: https://github.com/yourusername/gitflow-analytics
|
|
@@ -4857,6 +4874,342 @@ def identities(config: Path, weeks: int, apply: bool) -> None:
|
|
|
4857
4874
|
sys.exit(1)
|
|
4858
4875
|
|
|
4859
4876
|
|
|
4877
|
+
@cli.command(name="aliases")
|
|
4878
|
+
@click.option(
|
|
4879
|
+
"--config",
|
|
4880
|
+
"-c",
|
|
4881
|
+
type=click.Path(exists=True, path_type=Path),
|
|
4882
|
+
required=True,
|
|
4883
|
+
help="Path to configuration file",
|
|
4884
|
+
)
|
|
4885
|
+
@click.option(
|
|
4886
|
+
"--output",
|
|
4887
|
+
"-o",
|
|
4888
|
+
type=click.Path(path_type=Path),
|
|
4889
|
+
help="Output path for aliases.yaml (default: same dir as config)",
|
|
4890
|
+
)
|
|
4891
|
+
@click.option(
|
|
4892
|
+
"--confidence-threshold",
|
|
4893
|
+
type=float,
|
|
4894
|
+
default=0.9,
|
|
4895
|
+
help="Minimum confidence threshold for LLM matches (default: 0.9)",
|
|
4896
|
+
)
|
|
4897
|
+
@click.option(
|
|
4898
|
+
"--apply", is_flag=True, help="Automatically update config to use generated aliases file"
|
|
4899
|
+
)
|
|
4900
|
+
@click.option(
|
|
4901
|
+
"--weeks", type=int, default=12, help="Number of weeks of history to analyze (default: 12)"
|
|
4902
|
+
)
|
|
4903
|
+
def aliases_command(
|
|
4904
|
+
config: Path,
|
|
4905
|
+
output: Optional[Path],
|
|
4906
|
+
confidence_threshold: float,
|
|
4907
|
+
apply: bool,
|
|
4908
|
+
weeks: int,
|
|
4909
|
+
) -> None:
|
|
4910
|
+
"""Generate developer identity aliases using LLM analysis.
|
|
4911
|
+
|
|
4912
|
+
\b
|
|
4913
|
+
This command analyzes commit history and uses LLM to identify
|
|
4914
|
+
developer aliases (same person with different email addresses).
|
|
4915
|
+
Results are saved to aliases.yaml which can be shared across
|
|
4916
|
+
multiple config files.
|
|
4917
|
+
|
|
4918
|
+
\b
|
|
4919
|
+
EXAMPLES:
|
|
4920
|
+
# Generate aliases and review
|
|
4921
|
+
gitflow-analytics aliases -c config.yaml
|
|
4922
|
+
|
|
4923
|
+
# Generate and apply automatically
|
|
4924
|
+
gitflow-analytics aliases -c config.yaml --apply
|
|
4925
|
+
|
|
4926
|
+
# Save to specific location
|
|
4927
|
+
gitflow-analytics aliases -c config.yaml -o ~/shared/aliases.yaml
|
|
4928
|
+
|
|
4929
|
+
# Use longer history for better accuracy
|
|
4930
|
+
gitflow-analytics aliases -c config.yaml --weeks 24
|
|
4931
|
+
|
|
4932
|
+
\b
|
|
4933
|
+
CONFIGURATION:
|
|
4934
|
+
Aliases are saved to aliases.yaml and can be referenced in
|
|
4935
|
+
multiple config files for consistent identity resolution.
|
|
4936
|
+
"""
|
|
4937
|
+
try:
|
|
4938
|
+
from .config.aliases import AliasesManager, DeveloperAlias
|
|
4939
|
+
from .identity_llm.analyzer import LLMIdentityAnalyzer
|
|
4940
|
+
|
|
4941
|
+
# Load configuration
|
|
4942
|
+
click.echo(f"\n📋 Loading configuration from {config}...")
|
|
4943
|
+
cfg = ConfigLoader.load(config)
|
|
4944
|
+
|
|
4945
|
+
# Determine output path
|
|
4946
|
+
if not output:
|
|
4947
|
+
output = config.parent / "aliases.yaml"
|
|
4948
|
+
|
|
4949
|
+
click.echo(f"🔍 Analyzing developer identities (last {weeks} weeks)")
|
|
4950
|
+
click.echo(f"📊 Confidence threshold: {confidence_threshold:.0%}")
|
|
4951
|
+
click.echo(f"💾 Output: {output}\n")
|
|
4952
|
+
|
|
4953
|
+
# Set up date range
|
|
4954
|
+
end_date = datetime.now(timezone.utc)
|
|
4955
|
+
start_date = end_date - timedelta(weeks=weeks)
|
|
4956
|
+
|
|
4957
|
+
# Analyze repositories to collect commits
|
|
4958
|
+
click.echo("📥 Fetching commit history...\n")
|
|
4959
|
+
cache = GitAnalysisCache(cfg.cache.directory)
|
|
4960
|
+
|
|
4961
|
+
# Prepare ML categorization config for analyzer
|
|
4962
|
+
ml_config = None
|
|
4963
|
+
if hasattr(cfg.analysis, "ml_categorization"):
|
|
4964
|
+
ml_config = {
|
|
4965
|
+
"enabled": cfg.analysis.ml_categorization.enabled,
|
|
4966
|
+
"min_confidence": cfg.analysis.ml_categorization.min_confidence,
|
|
4967
|
+
"semantic_weight": cfg.analysis.ml_categorization.semantic_weight,
|
|
4968
|
+
"file_pattern_weight": cfg.analysis.ml_categorization.file_pattern_weight,
|
|
4969
|
+
"hybrid_threshold": cfg.analysis.ml_categorization.hybrid_threshold,
|
|
4970
|
+
"cache_duration_days": cfg.analysis.ml_categorization.cache_duration_days,
|
|
4971
|
+
"batch_size": cfg.analysis.ml_categorization.batch_size,
|
|
4972
|
+
"enable_caching": cfg.analysis.ml_categorization.enable_caching,
|
|
4973
|
+
"spacy_model": cfg.analysis.ml_categorization.spacy_model,
|
|
4974
|
+
}
|
|
4975
|
+
|
|
4976
|
+
# LLM classification configuration
|
|
4977
|
+
llm_config = {
|
|
4978
|
+
"enabled": cfg.analysis.llm_classification.enabled,
|
|
4979
|
+
"api_key": cfg.analysis.llm_classification.api_key,
|
|
4980
|
+
"model": cfg.analysis.llm_classification.model,
|
|
4981
|
+
"confidence_threshold": cfg.analysis.llm_classification.confidence_threshold,
|
|
4982
|
+
"max_tokens": cfg.analysis.llm_classification.max_tokens,
|
|
4983
|
+
"temperature": cfg.analysis.llm_classification.temperature,
|
|
4984
|
+
"timeout_seconds": cfg.analysis.llm_classification.timeout_seconds,
|
|
4985
|
+
"cache_duration_days": cfg.analysis.llm_classification.cache_duration_days,
|
|
4986
|
+
"enable_caching": cfg.analysis.llm_classification.enable_caching,
|
|
4987
|
+
"max_daily_requests": cfg.analysis.llm_classification.max_daily_requests,
|
|
4988
|
+
"domain_terms": cfg.analysis.llm_classification.domain_terms,
|
|
4989
|
+
}
|
|
4990
|
+
|
|
4991
|
+
# Configure branch analysis
|
|
4992
|
+
branch_analysis_config = {
|
|
4993
|
+
"strategy": cfg.analysis.branch_analysis.strategy,
|
|
4994
|
+
"max_branches_per_repo": cfg.analysis.branch_analysis.max_branches_per_repo,
|
|
4995
|
+
"active_days_threshold": cfg.analysis.branch_analysis.active_days_threshold,
|
|
4996
|
+
"include_main_branches": cfg.analysis.branch_analysis.include_main_branches,
|
|
4997
|
+
"always_include_patterns": cfg.analysis.branch_analysis.always_include_patterns,
|
|
4998
|
+
"always_exclude_patterns": cfg.analysis.branch_analysis.always_exclude_patterns,
|
|
4999
|
+
"enable_progress_logging": cfg.analysis.branch_analysis.enable_progress_logging,
|
|
5000
|
+
"branch_commit_limit": cfg.analysis.branch_analysis.branch_commit_limit,
|
|
5001
|
+
}
|
|
5002
|
+
|
|
5003
|
+
analyzer = GitAnalyzer(
|
|
5004
|
+
cache,
|
|
5005
|
+
branch_mapping_rules=cfg.analysis.branch_mapping_rules,
|
|
5006
|
+
allowed_ticket_platforms=getattr(
|
|
5007
|
+
cfg.analysis, "ticket_platforms", ["jira", "github", "clickup", "linear"]
|
|
5008
|
+
),
|
|
5009
|
+
exclude_paths=cfg.analysis.exclude_paths,
|
|
5010
|
+
story_point_patterns=cfg.analysis.story_point_patterns,
|
|
5011
|
+
ml_categorization_config=ml_config,
|
|
5012
|
+
llm_config=llm_config,
|
|
5013
|
+
branch_analysis_config=branch_analysis_config,
|
|
5014
|
+
)
|
|
5015
|
+
|
|
5016
|
+
all_commits = []
|
|
5017
|
+
|
|
5018
|
+
# Get repositories to analyze
|
|
5019
|
+
repositories = cfg.repositories if cfg.repositories else []
|
|
5020
|
+
|
|
5021
|
+
if not repositories:
|
|
5022
|
+
click.echo("❌ No repositories configured", err=True)
|
|
5023
|
+
sys.exit(1)
|
|
5024
|
+
|
|
5025
|
+
# Collect commits from all repositories
|
|
5026
|
+
with click.progressbar(
|
|
5027
|
+
repositories,
|
|
5028
|
+
label="Analyzing repositories",
|
|
5029
|
+
item_show_func=lambda r: r.name if r else "",
|
|
5030
|
+
) as repos:
|
|
5031
|
+
for repo_config in repos:
|
|
5032
|
+
try:
|
|
5033
|
+
if not repo_config.path.exists():
|
|
5034
|
+
continue
|
|
5035
|
+
|
|
5036
|
+
# Fetch commits
|
|
5037
|
+
repo_commits = analyzer.analyze_repository(
|
|
5038
|
+
repo_config.path, start_date=start_date, branch=repo_config.branch
|
|
5039
|
+
)
|
|
5040
|
+
|
|
5041
|
+
if repo_commits:
|
|
5042
|
+
all_commits.extend(repo_commits)
|
|
5043
|
+
|
|
5044
|
+
except Exception as e:
|
|
5045
|
+
click.echo(f"\n⚠️ Warning: Failed to analyze repository: {e}", err=True)
|
|
5046
|
+
continue
|
|
5047
|
+
|
|
5048
|
+
click.echo(f"\n✅ Collected {len(all_commits)} commits\n")
|
|
5049
|
+
|
|
5050
|
+
if not all_commits:
|
|
5051
|
+
click.echo("❌ No commits found to analyze", err=True)
|
|
5052
|
+
sys.exit(1)
|
|
5053
|
+
|
|
5054
|
+
# Initialize LLM identity analyzer
|
|
5055
|
+
click.echo("🤖 Running LLM identity analysis...\n")
|
|
5056
|
+
|
|
5057
|
+
# Get OpenRouter API key from config
|
|
5058
|
+
api_key = None
|
|
5059
|
+
if cfg.chatgpt and cfg.chatgpt.api_key:
|
|
5060
|
+
# Resolve environment variable if needed
|
|
5061
|
+
api_key_value = cfg.chatgpt.api_key
|
|
5062
|
+
if api_key_value.startswith("${") and api_key_value.endswith("}"):
|
|
5063
|
+
var_name = api_key_value[2:-1]
|
|
5064
|
+
api_key = os.getenv(var_name)
|
|
5065
|
+
else:
|
|
5066
|
+
api_key = api_key_value
|
|
5067
|
+
|
|
5068
|
+
if not api_key:
|
|
5069
|
+
click.echo(
|
|
5070
|
+
"⚠️ No OpenRouter API key configured - using heuristic analysis only", err=True
|
|
5071
|
+
)
|
|
5072
|
+
|
|
5073
|
+
llm_analyzer = LLMIdentityAnalyzer(
|
|
5074
|
+
api_key=api_key, confidence_threshold=confidence_threshold
|
|
5075
|
+
)
|
|
5076
|
+
|
|
5077
|
+
# Run analysis
|
|
5078
|
+
result = llm_analyzer.analyze_identities(all_commits)
|
|
5079
|
+
|
|
5080
|
+
click.echo("✅ Analysis complete:")
|
|
5081
|
+
click.echo(f" - Found {len(result.clusters)} identity clusters")
|
|
5082
|
+
click.echo(f" - {len(result.unresolved_identities)} unresolved identities")
|
|
5083
|
+
click.echo(f" - Method: {result.analysis_metadata.get('analysis_method', 'unknown')}\n")
|
|
5084
|
+
|
|
5085
|
+
# Create aliases manager and add clusters
|
|
5086
|
+
aliases_mgr = AliasesManager(output)
|
|
5087
|
+
|
|
5088
|
+
# Load existing aliases if file exists
|
|
5089
|
+
if output.exists():
|
|
5090
|
+
click.echo(f"📂 Loading existing aliases from {output}...")
|
|
5091
|
+
aliases_mgr.load()
|
|
5092
|
+
existing_count = len(aliases_mgr.aliases)
|
|
5093
|
+
click.echo(f" Found {existing_count} existing aliases\n")
|
|
5094
|
+
|
|
5095
|
+
# Add new clusters
|
|
5096
|
+
new_count = 0
|
|
5097
|
+
updated_count = 0
|
|
5098
|
+
|
|
5099
|
+
for cluster in result.clusters:
|
|
5100
|
+
# Check if this is a new or updated alias
|
|
5101
|
+
existing = aliases_mgr.get_alias(cluster.canonical_email)
|
|
5102
|
+
|
|
5103
|
+
alias = DeveloperAlias(
|
|
5104
|
+
name=cluster.preferred_display_name or cluster.canonical_name,
|
|
5105
|
+
primary_email=cluster.canonical_email,
|
|
5106
|
+
aliases=[a.email for a in cluster.aliases],
|
|
5107
|
+
confidence=cluster.confidence,
|
|
5108
|
+
reasoning=(
|
|
5109
|
+
cluster.reasoning[:200] if cluster.reasoning else ""
|
|
5110
|
+
), # Truncate for readability
|
|
5111
|
+
)
|
|
5112
|
+
|
|
5113
|
+
if existing:
|
|
5114
|
+
updated_count += 1
|
|
5115
|
+
else:
|
|
5116
|
+
new_count += 1
|
|
5117
|
+
|
|
5118
|
+
aliases_mgr.add_alias(alias)
|
|
5119
|
+
|
|
5120
|
+
# Save aliases
|
|
5121
|
+
click.echo("💾 Saving aliases...\n")
|
|
5122
|
+
aliases_mgr.save()
|
|
5123
|
+
|
|
5124
|
+
click.echo(f"✅ Saved to {output}")
|
|
5125
|
+
click.echo(f" - New aliases: {new_count}")
|
|
5126
|
+
click.echo(f" - Updated aliases: {updated_count}")
|
|
5127
|
+
click.echo(f" - Total aliases: {len(aliases_mgr.aliases)}\n")
|
|
5128
|
+
|
|
5129
|
+
# Display summary
|
|
5130
|
+
if aliases_mgr.aliases:
|
|
5131
|
+
click.echo("📋 Generated Aliases:\n")
|
|
5132
|
+
|
|
5133
|
+
for alias in sorted(aliases_mgr.aliases, key=lambda a: a.primary_email):
|
|
5134
|
+
name_display = (
|
|
5135
|
+
f"{alias.name} <{alias.primary_email}>" if alias.name else alias.primary_email
|
|
5136
|
+
)
|
|
5137
|
+
click.echo(f" • {name_display}")
|
|
5138
|
+
|
|
5139
|
+
if alias.aliases:
|
|
5140
|
+
for alias_email in alias.aliases:
|
|
5141
|
+
click.echo(f" → {alias_email}")
|
|
5142
|
+
|
|
5143
|
+
if alias.confidence < 1.0:
|
|
5144
|
+
confidence_color = (
|
|
5145
|
+
"green"
|
|
5146
|
+
if alias.confidence >= 0.9
|
|
5147
|
+
else "yellow" if alias.confidence >= 0.8 else "red"
|
|
5148
|
+
)
|
|
5149
|
+
click.echo(" Confidence: ", nl=False)
|
|
5150
|
+
click.secho(f"{alias.confidence:.0%}", fg=confidence_color)
|
|
5151
|
+
|
|
5152
|
+
click.echo() # Blank line between aliases
|
|
5153
|
+
|
|
5154
|
+
# Apply to config if requested
|
|
5155
|
+
if apply:
|
|
5156
|
+
click.echo(f"🔄 Updating {config} to reference aliases file...\n")
|
|
5157
|
+
|
|
5158
|
+
# Read current config
|
|
5159
|
+
with open(config) as f:
|
|
5160
|
+
config_data = yaml.safe_load(f)
|
|
5161
|
+
|
|
5162
|
+
# Ensure analysis section exists
|
|
5163
|
+
if "analysis" not in config_data:
|
|
5164
|
+
config_data["analysis"] = {}
|
|
5165
|
+
|
|
5166
|
+
if "identity" not in config_data["analysis"]:
|
|
5167
|
+
config_data["analysis"]["identity"] = {}
|
|
5168
|
+
|
|
5169
|
+
# Calculate relative path from config to aliases file
|
|
5170
|
+
try:
|
|
5171
|
+
rel_path = output.relative_to(config.parent)
|
|
5172
|
+
config_data["analysis"]["identity"]["aliases_file"] = str(rel_path)
|
|
5173
|
+
except ValueError:
|
|
5174
|
+
# Not relative, use absolute
|
|
5175
|
+
config_data["analysis"]["identity"]["aliases_file"] = str(output)
|
|
5176
|
+
|
|
5177
|
+
# Remove manual_mappings if present (now in aliases file)
|
|
5178
|
+
if "manual_identity_mappings" in config_data["analysis"].get("identity", {}):
|
|
5179
|
+
del config_data["analysis"]["identity"]["manual_identity_mappings"]
|
|
5180
|
+
click.echo(" Removed inline manual_identity_mappings (now in aliases file)")
|
|
5181
|
+
|
|
5182
|
+
# Save updated config
|
|
5183
|
+
with open(config, "w") as f:
|
|
5184
|
+
yaml.dump(config_data, f, default_flow_style=False, sort_keys=False)
|
|
5185
|
+
|
|
5186
|
+
click.echo(f"✅ Updated {config}")
|
|
5187
|
+
click.echo(
|
|
5188
|
+
f" Added: analysis.identity.aliases_file = "
|
|
5189
|
+
f"{config_data['analysis']['identity']['aliases_file']}\n"
|
|
5190
|
+
)
|
|
5191
|
+
|
|
5192
|
+
# Summary and next steps
|
|
5193
|
+
click.echo("✨ Identity alias generation complete!\n")
|
|
5194
|
+
|
|
5195
|
+
if not apply:
|
|
5196
|
+
click.echo("💡 Next steps:")
|
|
5197
|
+
click.echo(f" 1. Review the aliases in {output}")
|
|
5198
|
+
click.echo(" 2. Update your config.yaml to reference the aliases file:")
|
|
5199
|
+
click.echo(" analysis:")
|
|
5200
|
+
click.echo(" identity:")
|
|
5201
|
+
click.echo(f" aliases_file: {output.name}")
|
|
5202
|
+
click.echo(" 3. Or run with --apply flag to update automatically\n")
|
|
5203
|
+
|
|
5204
|
+
except Exception as e:
|
|
5205
|
+
click.echo(f"\n❌ Error generating aliases: {e}", err=True)
|
|
5206
|
+
import traceback
|
|
5207
|
+
|
|
5208
|
+
if os.getenv("GITFLOW_DEBUG"):
|
|
5209
|
+
traceback.print_exc()
|
|
5210
|
+
sys.exit(1)
|
|
5211
|
+
|
|
5212
|
+
|
|
4860
5213
|
@cli.command()
|
|
4861
5214
|
@click.option(
|
|
4862
5215
|
"--config",
|
|
@@ -4,10 +4,12 @@ This module provides a user-friendly installation experience with credential val
|
|
|
4
4
|
and comprehensive configuration generation.
|
|
5
5
|
"""
|
|
6
6
|
|
|
7
|
+
import getpass
|
|
7
8
|
import logging
|
|
8
9
|
import os
|
|
9
10
|
import stat
|
|
10
11
|
import subprocess
|
|
12
|
+
import sys
|
|
11
13
|
import time
|
|
12
14
|
from datetime import datetime
|
|
13
15
|
from pathlib import Path
|
|
@@ -27,6 +29,55 @@ logger = logging.getLogger(__name__)
|
|
|
27
29
|
class InstallWizard:
|
|
28
30
|
"""Interactive installation wizard for GitFlow Analytics setup."""
|
|
29
31
|
|
|
32
|
+
# Installation profiles
|
|
33
|
+
PROFILES = {
|
|
34
|
+
"1": {
|
|
35
|
+
"name": "Standard",
|
|
36
|
+
"description": "GitHub + JIRA + AI (Full featured)",
|
|
37
|
+
"github": True,
|
|
38
|
+
"repositories": "manual",
|
|
39
|
+
"jira": True,
|
|
40
|
+
"ai": True,
|
|
41
|
+
"analysis": True,
|
|
42
|
+
},
|
|
43
|
+
"2": {
|
|
44
|
+
"name": "GitHub Only",
|
|
45
|
+
"description": "GitHub integration without PM tools",
|
|
46
|
+
"github": True,
|
|
47
|
+
"repositories": "manual",
|
|
48
|
+
"jira": False,
|
|
49
|
+
"ai": False,
|
|
50
|
+
"analysis": True,
|
|
51
|
+
},
|
|
52
|
+
"3": {
|
|
53
|
+
"name": "Organization Mode",
|
|
54
|
+
"description": "Auto-discover repos from GitHub org",
|
|
55
|
+
"github": True,
|
|
56
|
+
"repositories": "organization",
|
|
57
|
+
"jira": True,
|
|
58
|
+
"ai": True,
|
|
59
|
+
"analysis": True,
|
|
60
|
+
},
|
|
61
|
+
"4": {
|
|
62
|
+
"name": "Minimal",
|
|
63
|
+
"description": "Local repos only, no integrations",
|
|
64
|
+
"github": False,
|
|
65
|
+
"repositories": "local",
|
|
66
|
+
"jira": False,
|
|
67
|
+
"ai": False,
|
|
68
|
+
"analysis": True,
|
|
69
|
+
},
|
|
70
|
+
"5": {
|
|
71
|
+
"name": "Custom",
|
|
72
|
+
"description": "Configure everything manually",
|
|
73
|
+
"github": None, # Ask user
|
|
74
|
+
"repositories": None, # Ask user
|
|
75
|
+
"jira": None, # Ask user
|
|
76
|
+
"ai": None, # Ask user
|
|
77
|
+
"analysis": True,
|
|
78
|
+
},
|
|
79
|
+
}
|
|
80
|
+
|
|
30
81
|
def __init__(self, output_dir: Path, skip_validation: bool = False):
|
|
31
82
|
"""Initialize the installation wizard.
|
|
32
83
|
|
|
@@ -38,10 +89,56 @@ class InstallWizard:
|
|
|
38
89
|
self.skip_validation = skip_validation
|
|
39
90
|
self.config_data = {}
|
|
40
91
|
self.env_data = {}
|
|
92
|
+
self.profile = None # Selected installation profile
|
|
41
93
|
|
|
42
94
|
# Ensure output directory exists
|
|
43
95
|
self.output_dir.mkdir(parents=True, exist_ok=True)
|
|
44
96
|
|
|
97
|
+
def _is_interactive(self) -> bool:
|
|
98
|
+
"""Check if running in interactive terminal.
|
|
99
|
+
|
|
100
|
+
Returns:
|
|
101
|
+
True if stdin and stdout are connected to a TTY
|
|
102
|
+
"""
|
|
103
|
+
return sys.stdin.isatty() and sys.stdout.isatty()
|
|
104
|
+
|
|
105
|
+
def _get_password(self, prompt: str, field_name: str = "password") -> str:
|
|
106
|
+
"""Get password input with non-interactive detection.
|
|
107
|
+
|
|
108
|
+
Args:
|
|
109
|
+
prompt: Prompt text to display
|
|
110
|
+
field_name: Field name for error messages
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
Password string
|
|
114
|
+
"""
|
|
115
|
+
if self._is_interactive():
|
|
116
|
+
return getpass.getpass(prompt)
|
|
117
|
+
else:
|
|
118
|
+
click.echo(f"⚠️ Non-interactive mode detected - {field_name} will be visible", err=True)
|
|
119
|
+
return click.prompt(prompt, hide_input=False)
|
|
120
|
+
|
|
121
|
+
def _select_profile(self) -> dict:
|
|
122
|
+
"""Let user select installation profile."""
|
|
123
|
+
click.echo("\n📋 Installation Profiles")
|
|
124
|
+
click.echo("=" * 60 + "\n")
|
|
125
|
+
|
|
126
|
+
for key, profile in self.PROFILES.items():
|
|
127
|
+
click.echo(f" {key}. {profile['name']}")
|
|
128
|
+
click.echo(f" {profile['description']}")
|
|
129
|
+
click.echo()
|
|
130
|
+
|
|
131
|
+
profile_choice = click.prompt(
|
|
132
|
+
"Select installation profile",
|
|
133
|
+
type=click.Choice(list(self.PROFILES.keys())),
|
|
134
|
+
default="1",
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
selected = self.PROFILES[profile_choice].copy()
|
|
138
|
+
click.echo(f"\n✅ Selected: {selected['name']}\n")
|
|
139
|
+
|
|
140
|
+
return selected
|
|
141
|
+
|
|
45
142
|
def run(self) -> bool:
|
|
46
143
|
"""Run the installation wizard.
|
|
47
144
|
|
|
@@ -53,22 +150,48 @@ class InstallWizard:
|
|
|
53
150
|
click.echo("=" * 50)
|
|
54
151
|
click.echo()
|
|
55
152
|
|
|
56
|
-
# Step
|
|
57
|
-
|
|
58
|
-
return False
|
|
153
|
+
# Step 0: Select profile
|
|
154
|
+
self.profile = self._select_profile()
|
|
59
155
|
|
|
60
|
-
# Step
|
|
61
|
-
if not
|
|
156
|
+
# Step 1: GitHub Setup (conditional based on profile)
|
|
157
|
+
if self.profile["github"] is not False:
|
|
158
|
+
if not self._setup_github():
|
|
159
|
+
return False
|
|
160
|
+
else:
|
|
161
|
+
# Minimal mode - no GitHub
|
|
162
|
+
pass
|
|
163
|
+
|
|
164
|
+
# Step 2: Repository Configuration (based on profile)
|
|
165
|
+
if self.profile["repositories"] == "organization":
|
|
166
|
+
# Organization mode - already handled in GitHub setup
|
|
167
|
+
pass
|
|
168
|
+
elif self.profile["repositories"] == "manual":
|
|
169
|
+
if not self._setup_repositories():
|
|
170
|
+
return False
|
|
171
|
+
elif self.profile["repositories"] == "local":
|
|
172
|
+
if not self._setup_local_repositories():
|
|
173
|
+
return False
|
|
174
|
+
elif self.profile["repositories"] is None and not self._setup_repositories():
|
|
175
|
+
# Custom mode - ask user
|
|
62
176
|
return False
|
|
63
177
|
|
|
64
|
-
# Step 3: JIRA Setup (
|
|
65
|
-
self.
|
|
178
|
+
# Step 3: JIRA Setup (conditional based on profile)
|
|
179
|
+
if self.profile["jira"]:
|
|
180
|
+
self._setup_jira()
|
|
181
|
+
elif self.profile["jira"] is None:
|
|
182
|
+
# Custom mode - ask user
|
|
183
|
+
self._setup_jira()
|
|
66
184
|
|
|
67
|
-
# Step 4: OpenRouter/ChatGPT Setup (
|
|
68
|
-
self.
|
|
185
|
+
# Step 4: OpenRouter/ChatGPT Setup (conditional based on profile)
|
|
186
|
+
if self.profile["ai"]:
|
|
187
|
+
self._setup_ai()
|
|
188
|
+
elif self.profile["ai"] is None:
|
|
189
|
+
# Custom mode - ask user
|
|
190
|
+
self._setup_ai()
|
|
69
191
|
|
|
70
192
|
# Step 5: Analysis Configuration
|
|
71
|
-
self.
|
|
193
|
+
if self.profile["analysis"]:
|
|
194
|
+
self._setup_analysis()
|
|
72
195
|
|
|
73
196
|
# Step 6: Generate Files
|
|
74
197
|
if not self._generate_files():
|
|
@@ -123,10 +246,8 @@ class InstallWizard:
|
|
|
123
246
|
click.echo(f"⏳ Waiting {delay} seconds before retry...")
|
|
124
247
|
time.sleep(delay)
|
|
125
248
|
|
|
126
|
-
token =
|
|
127
|
-
"Enter GitHub Personal Access Token",
|
|
128
|
-
hide_input=True,
|
|
129
|
-
type=str,
|
|
249
|
+
token = self._get_password(
|
|
250
|
+
"Enter GitHub Personal Access Token: ", "GitHub token"
|
|
130
251
|
).strip()
|
|
131
252
|
|
|
132
253
|
if not token:
|
|
@@ -319,6 +440,61 @@ class InstallWizard:
|
|
|
319
440
|
self.config_data["github"]["repositories"] = repositories
|
|
320
441
|
return True
|
|
321
442
|
|
|
443
|
+
def _setup_local_repositories(self) -> bool:
|
|
444
|
+
"""Setup local repository paths (no GitHub).
|
|
445
|
+
|
|
446
|
+
Returns:
|
|
447
|
+
True if setup successful, False otherwise
|
|
448
|
+
"""
|
|
449
|
+
click.echo("\n📦 Local Repository Mode")
|
|
450
|
+
click.echo("Specify local Git repository paths to analyze.")
|
|
451
|
+
click.echo()
|
|
452
|
+
|
|
453
|
+
repositories = []
|
|
454
|
+
while True:
|
|
455
|
+
repo_path_str = click.prompt(
|
|
456
|
+
"Enter repository path (or press Enter to finish)",
|
|
457
|
+
type=str,
|
|
458
|
+
default="",
|
|
459
|
+
show_default=False,
|
|
460
|
+
).strip()
|
|
461
|
+
|
|
462
|
+
if not repo_path_str:
|
|
463
|
+
if not repositories:
|
|
464
|
+
click.echo("❌ At least one repository is required")
|
|
465
|
+
continue
|
|
466
|
+
break
|
|
467
|
+
|
|
468
|
+
# Validate path is safe
|
|
469
|
+
path_obj = self._validate_directory_path(repo_path_str, "Repository path")
|
|
470
|
+
if path_obj is None:
|
|
471
|
+
continue # Re-prompt
|
|
472
|
+
|
|
473
|
+
if not path_obj.exists():
|
|
474
|
+
click.echo(f"⚠️ Path does not exist: {path_obj}")
|
|
475
|
+
if not click.confirm("Add anyway?", default=False):
|
|
476
|
+
continue
|
|
477
|
+
|
|
478
|
+
# Check if it's a git repository
|
|
479
|
+
if (path_obj / ".git").exists():
|
|
480
|
+
click.echo(f"✅ Valid git repository: {path_obj}")
|
|
481
|
+
else:
|
|
482
|
+
click.echo(f"⚠️ Not a git repository: {path_obj}")
|
|
483
|
+
if not click.confirm("Add anyway?", default=False):
|
|
484
|
+
continue
|
|
485
|
+
|
|
486
|
+
repo_name = click.prompt("Repository name", default=path_obj.name)
|
|
487
|
+
|
|
488
|
+
repositories.append({"name": repo_name, "path": str(path_obj)})
|
|
489
|
+
click.echo(f"Added repository #{len(repositories)}\n")
|
|
490
|
+
|
|
491
|
+
if not click.confirm("Add another repository?", default=False):
|
|
492
|
+
break
|
|
493
|
+
|
|
494
|
+
# Store repositories directly without GitHub section
|
|
495
|
+
self.config_data["repositories"] = repositories
|
|
496
|
+
return True
|
|
497
|
+
|
|
322
498
|
def _setup_jira(self) -> None:
|
|
323
499
|
"""Setup JIRA integration (optional)."""
|
|
324
500
|
click.echo("\n📋 Step 3: JIRA Setup (OPTIONAL)")
|
|
@@ -347,7 +523,7 @@ class InstallWizard:
|
|
|
347
523
|
|
|
348
524
|
base_url = click.prompt("JIRA base URL", type=str).strip()
|
|
349
525
|
access_user = click.prompt("JIRA email", type=str).strip()
|
|
350
|
-
access_token =
|
|
526
|
+
access_token = self._get_password("JIRA API token: ", "JIRA token").strip()
|
|
351
527
|
|
|
352
528
|
if not all([base_url, access_user, access_token]):
|
|
353
529
|
click.echo("❌ All JIRA fields are required")
|
|
@@ -550,7 +726,7 @@ class InstallWizard:
|
|
|
550
726
|
click.echo(f"⏳ Waiting {delay} seconds before retry...")
|
|
551
727
|
time.sleep(delay)
|
|
552
728
|
|
|
553
|
-
api_key =
|
|
729
|
+
api_key = self._get_password("Enter API key: ", "AI API key").strip()
|
|
554
730
|
|
|
555
731
|
if not api_key:
|
|
556
732
|
click.echo("❌ API key cannot be empty")
|
|
@@ -700,6 +876,75 @@ class InstallWizard:
|
|
|
700
876
|
self.config_data["analysis"]["output_directory"] = output_dir
|
|
701
877
|
self.config_data["analysis"]["cache_directory"] = cache_dir
|
|
702
878
|
|
|
879
|
+
# NEW: Aliases configuration
|
|
880
|
+
click.echo("\n🔗 Developer Identity Aliases")
|
|
881
|
+
click.echo("-" * 40 + "\n")
|
|
882
|
+
|
|
883
|
+
click.echo("Aliases consolidate multiple email addresses for the same developer.")
|
|
884
|
+
click.echo("You can use a shared aliases.yaml file across multiple configs.\n")
|
|
885
|
+
|
|
886
|
+
use_aliases = click.confirm("Configure aliases file?", default=True)
|
|
887
|
+
|
|
888
|
+
if use_aliases:
|
|
889
|
+
aliases_options = [
|
|
890
|
+
"1. Create new aliases.yaml in this directory",
|
|
891
|
+
"2. Use existing shared aliases file",
|
|
892
|
+
"3. Generate aliases using LLM (after installation)",
|
|
893
|
+
]
|
|
894
|
+
|
|
895
|
+
click.echo("\nOptions:")
|
|
896
|
+
for option in aliases_options:
|
|
897
|
+
click.echo(f" {option}")
|
|
898
|
+
|
|
899
|
+
aliases_choice = click.prompt(
|
|
900
|
+
"\nSelect option", type=click.Choice(["1", "2", "3"]), default="1"
|
|
901
|
+
)
|
|
902
|
+
|
|
903
|
+
if aliases_choice == "1":
|
|
904
|
+
# Create new aliases file
|
|
905
|
+
aliases_path = "aliases.yaml"
|
|
906
|
+
|
|
907
|
+
# Ensure analysis.identity section exists
|
|
908
|
+
if "identity" not in self.config_data.get("analysis", {}):
|
|
909
|
+
if "analysis" not in self.config_data:
|
|
910
|
+
self.config_data["analysis"] = {}
|
|
911
|
+
self.config_data["analysis"]["identity"] = {}
|
|
912
|
+
|
|
913
|
+
self.config_data["analysis"]["identity"]["aliases_file"] = aliases_path
|
|
914
|
+
|
|
915
|
+
# Create empty aliases file
|
|
916
|
+
from ..config.aliases import AliasesManager
|
|
917
|
+
|
|
918
|
+
aliases_full_path = self.output_dir / aliases_path
|
|
919
|
+
aliases_mgr = AliasesManager(aliases_full_path)
|
|
920
|
+
aliases_mgr.save() # Creates empty file with comments
|
|
921
|
+
|
|
922
|
+
click.echo(f"\n✅ Created {aliases_path}")
|
|
923
|
+
click.echo(" Generate aliases after installation with:")
|
|
924
|
+
click.echo(" gitflow-analytics aliases -c config.yaml --apply\n")
|
|
925
|
+
|
|
926
|
+
elif aliases_choice == "2":
|
|
927
|
+
# Use existing file
|
|
928
|
+
aliases_path = click.prompt(
|
|
929
|
+
"Path to aliases.yaml (relative to config)", default="../shared/aliases.yaml"
|
|
930
|
+
)
|
|
931
|
+
|
|
932
|
+
# Ensure analysis.identity section exists
|
|
933
|
+
if "identity" not in self.config_data.get("analysis", {}):
|
|
934
|
+
if "analysis" not in self.config_data:
|
|
935
|
+
self.config_data["analysis"] = {}
|
|
936
|
+
self.config_data["analysis"]["identity"] = {}
|
|
937
|
+
|
|
938
|
+
self.config_data["analysis"]["identity"]["aliases_file"] = aliases_path
|
|
939
|
+
|
|
940
|
+
click.echo(f"\n✅ Configured to use: {aliases_path}\n")
|
|
941
|
+
|
|
942
|
+
else: # choice == "3"
|
|
943
|
+
# Will generate after installation
|
|
944
|
+
click.echo("\n💡 After installation, run:")
|
|
945
|
+
click.echo(" gitflow-analytics aliases -c config.yaml --apply")
|
|
946
|
+
click.echo(" This will analyze your repos and generate aliases automatically.\n")
|
|
947
|
+
|
|
703
948
|
def _clear_sensitive_data(self) -> None:
|
|
704
949
|
"""Clear sensitive data from memory after use."""
|
|
705
950
|
sensitive_keys = ["TOKEN", "KEY", "PASSWORD", "SECRET"]
|
|
@@ -852,7 +1097,7 @@ class InstallWizard:
|
|
|
852
1097
|
from ..config import ConfigLoader
|
|
853
1098
|
|
|
854
1099
|
ConfigLoader.load(config_path)
|
|
855
|
-
click.echo("✅ Configuration
|
|
1100
|
+
click.echo("✅ Configuration validated successfully")
|
|
856
1101
|
|
|
857
1102
|
# Offer to run first analysis
|
|
858
1103
|
if click.confirm("\nRun initial analysis now?", default=False):
|
|
@@ -861,7 +1106,7 @@ class InstallWizard:
|
|
|
861
1106
|
return True
|
|
862
1107
|
|
|
863
1108
|
except Exception as e:
|
|
864
|
-
click.echo("
|
|
1109
|
+
click.echo(f"❌ Configuration validation failed: {e}", err=True)
|
|
865
1110
|
click.echo("You may need to adjust the configuration manually.")
|
|
866
1111
|
logger.error(f"Configuration validation error type: {type(e).__name__}")
|
|
867
1112
|
return True # Don't fail installation on validation error
|
|
@@ -139,13 +139,104 @@ class InteractiveLauncher:
|
|
|
139
139
|
logger.warning(f"Could not load preferences: {e}")
|
|
140
140
|
self.preferences = {}
|
|
141
141
|
|
|
142
|
+
def _get_available_repositories(self) -> list[str]:
|
|
143
|
+
"""Get list of available repositories from config.
|
|
144
|
+
|
|
145
|
+
Supports both explicit repository configuration and organization mode.
|
|
146
|
+
|
|
147
|
+
Returns:
|
|
148
|
+
List of repository names
|
|
149
|
+
"""
|
|
150
|
+
repos = []
|
|
151
|
+
|
|
152
|
+
# Check for organization mode
|
|
153
|
+
try:
|
|
154
|
+
with open(self.config_path) as f:
|
|
155
|
+
config_data = yaml.safe_load(f)
|
|
156
|
+
|
|
157
|
+
github_config = config_data.get("github", {})
|
|
158
|
+
|
|
159
|
+
if github_config.get("organization"):
|
|
160
|
+
# Organization mode - fetch repos from GitHub
|
|
161
|
+
token = self._resolve_env_var(github_config.get("token", ""))
|
|
162
|
+
if token:
|
|
163
|
+
from github import Github
|
|
164
|
+
|
|
165
|
+
try:
|
|
166
|
+
gh = Github(token)
|
|
167
|
+
org_name = github_config["organization"]
|
|
168
|
+
org = gh.get_organization(org_name)
|
|
169
|
+
|
|
170
|
+
# Get all non-archived repos
|
|
171
|
+
for repo in org.get_repos(type="all"):
|
|
172
|
+
if not repo.archived:
|
|
173
|
+
repos.append(repo.full_name)
|
|
174
|
+
|
|
175
|
+
if repos:
|
|
176
|
+
click.echo(
|
|
177
|
+
f"🔍 Discovered {len(repos)} repositories from organization '{org_name}'\n"
|
|
178
|
+
)
|
|
179
|
+
except Exception as e:
|
|
180
|
+
click.echo(
|
|
181
|
+
f"⚠️ Could not fetch organization repos: {type(e).__name__}", err=True
|
|
182
|
+
)
|
|
183
|
+
logger.error(f"Organization repo fetch error: {e}")
|
|
184
|
+
|
|
185
|
+
# Fall back to explicit repositories
|
|
186
|
+
if not repos and self.config and self.config.repositories:
|
|
187
|
+
repos = [repo.path.name for repo in self.config.repositories]
|
|
188
|
+
|
|
189
|
+
except Exception as e:
|
|
190
|
+
logger.warning(f"Could not determine repositories: {e}")
|
|
191
|
+
# Final fallback
|
|
192
|
+
if self.config and self.config.repositories:
|
|
193
|
+
repos = [repo.path.name for repo in self.config.repositories]
|
|
194
|
+
|
|
195
|
+
return repos
|
|
196
|
+
|
|
197
|
+
def _resolve_env_var(self, value: str) -> str:
|
|
198
|
+
"""Resolve environment variable in config value.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
value: Config value that may contain ${VAR_NAME}
|
|
202
|
+
|
|
203
|
+
Returns:
|
|
204
|
+
Resolved value
|
|
205
|
+
"""
|
|
206
|
+
import os
|
|
207
|
+
import re
|
|
208
|
+
|
|
209
|
+
# Load .env file if it exists next to config
|
|
210
|
+
env_path = self.config_path.parent / ".env"
|
|
211
|
+
if env_path.exists():
|
|
212
|
+
try:
|
|
213
|
+
with open(env_path) as f:
|
|
214
|
+
for line in f:
|
|
215
|
+
line = line.strip()
|
|
216
|
+
if line and not line.startswith("#") and "=" in line:
|
|
217
|
+
key, val = line.split("=", 1)
|
|
218
|
+
os.environ.setdefault(key.strip(), val.strip())
|
|
219
|
+
except Exception as e:
|
|
220
|
+
logger.debug(f"Could not load .env: {e}")
|
|
221
|
+
|
|
222
|
+
# Resolve ${VAR_NAME} patterns
|
|
223
|
+
pattern = r"\$\{([^}]+)\}"
|
|
224
|
+
|
|
225
|
+
def replace_var(match):
|
|
226
|
+
var_name = match.group(1)
|
|
227
|
+
return os.environ.get(var_name, match.group(0))
|
|
228
|
+
|
|
229
|
+
return re.sub(pattern, replace_var, value)
|
|
230
|
+
|
|
142
231
|
def _select_repositories(self) -> list[str]:
|
|
143
232
|
"""Interactive repository selection with multi-select.
|
|
144
233
|
|
|
145
234
|
Returns:
|
|
146
235
|
List of selected repository names.
|
|
147
236
|
"""
|
|
148
|
-
|
|
237
|
+
# Check for organization mode first
|
|
238
|
+
repos = self._get_available_repositories()
|
|
239
|
+
if not repos:
|
|
149
240
|
click.echo("❌ No repositories configured!")
|
|
150
241
|
return []
|
|
151
242
|
|
|
@@ -155,10 +246,9 @@ class InteractiveLauncher:
|
|
|
155
246
|
last_selected = self.preferences.get("last_selected_repos", [])
|
|
156
247
|
|
|
157
248
|
# Display repositories with numbering and selection status
|
|
158
|
-
for i,
|
|
159
|
-
repo_name = repo.path.name
|
|
249
|
+
for i, repo_name in enumerate(repos, 1):
|
|
160
250
|
status = "✓" if repo_name in last_selected else " "
|
|
161
|
-
click.echo(f" [{status}] {i}. {repo_name}
|
|
251
|
+
click.echo(f" [{status}] {i}. {repo_name}")
|
|
162
252
|
|
|
163
253
|
# Get user selection
|
|
164
254
|
click.echo("\n📝 Select repositories:")
|
|
@@ -169,7 +259,7 @@ class InteractiveLauncher:
|
|
|
169
259
|
selection = click.prompt("Selection", default="", show_default=False).strip()
|
|
170
260
|
|
|
171
261
|
if selection.lower() == "all":
|
|
172
|
-
selected =
|
|
262
|
+
selected = repos
|
|
173
263
|
click.echo(f"✅ Selected all {len(selected)} repositories\n")
|
|
174
264
|
return selected
|
|
175
265
|
elif not selection and last_selected:
|
|
@@ -177,7 +267,7 @@ class InteractiveLauncher:
|
|
|
177
267
|
return last_selected
|
|
178
268
|
elif not selection:
|
|
179
269
|
# Default to all repos if no previous selection
|
|
180
|
-
selected =
|
|
270
|
+
selected = repos
|
|
181
271
|
click.echo(f"✅ Selected all {len(selected)} repositories (default)\n")
|
|
182
272
|
return selected
|
|
183
273
|
else:
|
|
@@ -186,8 +276,8 @@ class InteractiveLauncher:
|
|
|
186
276
|
indices = [int(x.strip()) for x in selection.split(",")]
|
|
187
277
|
selected = []
|
|
188
278
|
for i in indices:
|
|
189
|
-
if 1 <= i <= len(
|
|
190
|
-
selected.append(
|
|
279
|
+
if 1 <= i <= len(repos):
|
|
280
|
+
selected.append(repos[i - 1])
|
|
191
281
|
else:
|
|
192
282
|
click.echo(f"⚠️ Invalid index: {i} (ignored)")
|
|
193
283
|
|
|
@@ -6,6 +6,7 @@ sub-modules while maintaining backward compatibility.
|
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
8
|
# Re-export main interfaces for backward compatibility
|
|
9
|
+
from .aliases import AliasesManager, DeveloperAlias
|
|
9
10
|
from .loader import ConfigLoader
|
|
10
11
|
from .schema import (
|
|
11
12
|
AnalysisConfig,
|
|
@@ -40,4 +41,6 @@ __all__ = [
|
|
|
40
41
|
"LLMClassificationConfig",
|
|
41
42
|
"CommitClassificationConfig",
|
|
42
43
|
"BranchAnalysisConfig",
|
|
44
|
+
"AliasesManager",
|
|
45
|
+
"DeveloperAlias",
|
|
43
46
|
]
|
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
"""Developer identity aliases management.
|
|
2
|
+
|
|
3
|
+
This module provides functionality for managing developer identity aliases
|
|
4
|
+
across multiple configuration files. Aliases can be shared to maintain
|
|
5
|
+
consistent identity resolution across different analysis configurations.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
from dataclasses import dataclass, field
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any, Optional
|
|
12
|
+
|
|
13
|
+
import yaml
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class DeveloperAlias:
|
|
20
|
+
"""A developer alias configuration.
|
|
21
|
+
|
|
22
|
+
Represents a single developer with their primary email and all known aliases.
|
|
23
|
+
Supports both manual and LLM-generated alias configurations with confidence scores.
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
primary_email: str
|
|
27
|
+
aliases: list[str] = field(default_factory=list)
|
|
28
|
+
name: Optional[str] = None
|
|
29
|
+
confidence: float = 1.0
|
|
30
|
+
reasoning: str = ""
|
|
31
|
+
|
|
32
|
+
def to_dict(self) -> dict[str, Any]:
|
|
33
|
+
"""Convert to dictionary format for YAML serialization.
|
|
34
|
+
|
|
35
|
+
Returns:
|
|
36
|
+
Dictionary representation with optional fields omitted if not set
|
|
37
|
+
"""
|
|
38
|
+
result: dict[str, Any] = {
|
|
39
|
+
"primary_email": self.primary_email,
|
|
40
|
+
"aliases": self.aliases,
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
if self.name:
|
|
44
|
+
result["name"] = self.name
|
|
45
|
+
|
|
46
|
+
# Only include confidence and reasoning for LLM-generated aliases
|
|
47
|
+
if self.confidence < 1.0:
|
|
48
|
+
result["confidence"] = round(self.confidence, 2)
|
|
49
|
+
if self.reasoning:
|
|
50
|
+
result["reasoning"] = self.reasoning
|
|
51
|
+
|
|
52
|
+
return result
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class AliasesManager:
|
|
56
|
+
"""Manages developer identity aliases.
|
|
57
|
+
|
|
58
|
+
Provides functionality to load, save, and manipulate developer identity aliases.
|
|
59
|
+
Supports both manual aliases (confidence=1.0) and LLM-generated aliases with
|
|
60
|
+
confidence scores and reasoning.
|
|
61
|
+
|
|
62
|
+
Example:
|
|
63
|
+
>>> manager = AliasesManager(Path("aliases.yaml"))
|
|
64
|
+
>>> manager.load()
|
|
65
|
+
>>> manager.add_alias(DeveloperAlias(
|
|
66
|
+
... primary_email="john@company.com",
|
|
67
|
+
... aliases=["jdoe@gmail.com"],
|
|
68
|
+
... name="John Doe"
|
|
69
|
+
... ))
|
|
70
|
+
>>> manager.save()
|
|
71
|
+
"""
|
|
72
|
+
|
|
73
|
+
def __init__(self, aliases_path: Optional[Path] = None):
|
|
74
|
+
"""Initialize aliases manager.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
aliases_path: Path to aliases.yaml file. If None, aliases must be
|
|
78
|
+
added programmatically or loaded from another source.
|
|
79
|
+
"""
|
|
80
|
+
self.aliases_path = aliases_path
|
|
81
|
+
self.aliases: list[DeveloperAlias] = []
|
|
82
|
+
|
|
83
|
+
if aliases_path and aliases_path.exists():
|
|
84
|
+
self.load()
|
|
85
|
+
|
|
86
|
+
def load(self) -> None:
|
|
87
|
+
"""Load aliases from file.
|
|
88
|
+
|
|
89
|
+
Loads developer aliases from the configured YAML file. If the file
|
|
90
|
+
doesn't exist or is empty, initializes with an empty alias list.
|
|
91
|
+
|
|
92
|
+
Raises:
|
|
93
|
+
yaml.YAMLError: If the YAML file is malformed
|
|
94
|
+
"""
|
|
95
|
+
if not self.aliases_path or not self.aliases_path.exists():
|
|
96
|
+
logger.debug("No aliases file found or path not set")
|
|
97
|
+
return
|
|
98
|
+
|
|
99
|
+
try:
|
|
100
|
+
with open(self.aliases_path) as f:
|
|
101
|
+
data = yaml.safe_load(f) or {}
|
|
102
|
+
|
|
103
|
+
self.aliases = []
|
|
104
|
+
for alias_data in data.get("developer_aliases", []):
|
|
105
|
+
# Support both 'primary_email' (new) and 'canonical_email' (old)
|
|
106
|
+
primary_email = alias_data.get("primary_email") or alias_data.get("canonical_email")
|
|
107
|
+
|
|
108
|
+
if not primary_email:
|
|
109
|
+
logger.warning(f"Skipping alias entry without primary_email: {alias_data}")
|
|
110
|
+
continue
|
|
111
|
+
|
|
112
|
+
self.aliases.append(
|
|
113
|
+
DeveloperAlias(
|
|
114
|
+
primary_email=primary_email,
|
|
115
|
+
aliases=alias_data.get("aliases", []),
|
|
116
|
+
name=alias_data.get("name"),
|
|
117
|
+
confidence=alias_data.get("confidence", 1.0),
|
|
118
|
+
reasoning=alias_data.get("reasoning", ""),
|
|
119
|
+
)
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
logger.info(f"Loaded {len(self.aliases)} developer aliases from {self.aliases_path}")
|
|
123
|
+
|
|
124
|
+
except yaml.YAMLError as e:
|
|
125
|
+
logger.error(f"Error parsing aliases file {self.aliases_path}: {e}")
|
|
126
|
+
raise
|
|
127
|
+
except Exception as e:
|
|
128
|
+
logger.error(f"Error loading aliases file {self.aliases_path}: {e}")
|
|
129
|
+
raise
|
|
130
|
+
|
|
131
|
+
def save(self) -> None:
|
|
132
|
+
"""Save aliases to file.
|
|
133
|
+
|
|
134
|
+
Writes all developer aliases to the configured YAML file with proper
|
|
135
|
+
formatting and comments. Creates the parent directory if it doesn't exist.
|
|
136
|
+
|
|
137
|
+
Raises:
|
|
138
|
+
OSError: If file cannot be written
|
|
139
|
+
"""
|
|
140
|
+
if not self.aliases_path:
|
|
141
|
+
logger.warning("No aliases path configured, cannot save")
|
|
142
|
+
return
|
|
143
|
+
|
|
144
|
+
# Ensure directory exists
|
|
145
|
+
self.aliases_path.parent.mkdir(parents=True, exist_ok=True)
|
|
146
|
+
|
|
147
|
+
try:
|
|
148
|
+
# Build data structure with comments
|
|
149
|
+
data = {
|
|
150
|
+
"# Developer Identity Aliases": None,
|
|
151
|
+
"# Generated by GitFlow Analytics": None,
|
|
152
|
+
"# Share this file across multiple config files": None,
|
|
153
|
+
"# Each alias maps multiple email addresses to a single developer": None,
|
|
154
|
+
"developer_aliases": [alias.to_dict() for alias in self.aliases],
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
with open(self.aliases_path, "w") as f:
|
|
158
|
+
# Custom YAML dump to preserve comments
|
|
159
|
+
f.write("# Developer Identity Aliases\n")
|
|
160
|
+
f.write("# Generated by GitFlow Analytics\n")
|
|
161
|
+
f.write("# Share this file across multiple config files\n")
|
|
162
|
+
f.write("# Each alias maps multiple email addresses to a single developer\n\n")
|
|
163
|
+
|
|
164
|
+
# Write the aliases list
|
|
165
|
+
yaml.dump(
|
|
166
|
+
{"developer_aliases": data["developer_aliases"]},
|
|
167
|
+
f,
|
|
168
|
+
default_flow_style=False,
|
|
169
|
+
sort_keys=False,
|
|
170
|
+
allow_unicode=True,
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
logger.info(f"Saved {len(self.aliases)} developer aliases to {self.aliases_path}")
|
|
174
|
+
|
|
175
|
+
except Exception as e:
|
|
176
|
+
logger.error(f"Error saving aliases file {self.aliases_path}: {e}")
|
|
177
|
+
raise
|
|
178
|
+
|
|
179
|
+
def add_alias(self, alias: DeveloperAlias) -> None:
|
|
180
|
+
"""Add or update a developer alias.
|
|
181
|
+
|
|
182
|
+
If an alias with the same primary email already exists, it will be replaced.
|
|
183
|
+
This ensures there is only one alias configuration per developer.
|
|
184
|
+
|
|
185
|
+
Args:
|
|
186
|
+
alias: The developer alias to add or update
|
|
187
|
+
"""
|
|
188
|
+
# Remove existing alias for same primary email
|
|
189
|
+
self.aliases = [a for a in self.aliases if a.primary_email != alias.primary_email]
|
|
190
|
+
self.aliases.append(alias)
|
|
191
|
+
logger.debug(f"Added/updated alias for {alias.primary_email}")
|
|
192
|
+
|
|
193
|
+
def remove_alias(self, primary_email: str) -> bool:
|
|
194
|
+
"""Remove a developer alias by primary email.
|
|
195
|
+
|
|
196
|
+
Args:
|
|
197
|
+
primary_email: The primary email of the alias to remove
|
|
198
|
+
|
|
199
|
+
Returns:
|
|
200
|
+
True if an alias was removed, False if not found
|
|
201
|
+
"""
|
|
202
|
+
original_count = len(self.aliases)
|
|
203
|
+
self.aliases = [a for a in self.aliases if a.primary_email != primary_email]
|
|
204
|
+
removed = len(self.aliases) < original_count
|
|
205
|
+
if removed:
|
|
206
|
+
logger.debug(f"Removed alias for {primary_email}")
|
|
207
|
+
return removed
|
|
208
|
+
|
|
209
|
+
def get_alias(self, primary_email: str) -> Optional[DeveloperAlias]:
|
|
210
|
+
"""Get a developer alias by primary email.
|
|
211
|
+
|
|
212
|
+
Args:
|
|
213
|
+
primary_email: The primary email to look up
|
|
214
|
+
|
|
215
|
+
Returns:
|
|
216
|
+
The developer alias if found, None otherwise
|
|
217
|
+
"""
|
|
218
|
+
for alias in self.aliases:
|
|
219
|
+
if alias.primary_email == primary_email:
|
|
220
|
+
return alias
|
|
221
|
+
return None
|
|
222
|
+
|
|
223
|
+
def to_manual_mappings(self) -> list[dict[str, Any]]:
|
|
224
|
+
"""Convert aliases to config manual_identity_mappings format.
|
|
225
|
+
|
|
226
|
+
Converts the internal alias representation to the format expected
|
|
227
|
+
by the GitFlow Analytics configuration's manual_identity_mappings field.
|
|
228
|
+
|
|
229
|
+
Returns:
|
|
230
|
+
List of manual identity mapping dictionaries
|
|
231
|
+
"""
|
|
232
|
+
mappings = []
|
|
233
|
+
for alias in self.aliases:
|
|
234
|
+
mapping: dict[str, Any] = {"primary_email": alias.primary_email}
|
|
235
|
+
|
|
236
|
+
if alias.name:
|
|
237
|
+
mapping["name"] = alias.name
|
|
238
|
+
|
|
239
|
+
mapping["aliases"] = alias.aliases
|
|
240
|
+
|
|
241
|
+
# Include confidence and reasoning for LLM-generated mappings
|
|
242
|
+
if alias.confidence < 1.0:
|
|
243
|
+
mapping["confidence"] = alias.confidence
|
|
244
|
+
if alias.reasoning:
|
|
245
|
+
mapping["reasoning"] = alias.reasoning
|
|
246
|
+
|
|
247
|
+
mappings.append(mapping)
|
|
248
|
+
|
|
249
|
+
return mappings
|
|
250
|
+
|
|
251
|
+
def merge_from_mappings(self, mappings: list[dict[str, Any]]) -> None:
|
|
252
|
+
"""Merge aliases from manual identity mappings.
|
|
253
|
+
|
|
254
|
+
Takes manual identity mappings from a config file and merges them
|
|
255
|
+
into the current alias set. Existing aliases are preserved unless
|
|
256
|
+
they conflict with the new mappings.
|
|
257
|
+
|
|
258
|
+
Args:
|
|
259
|
+
mappings: List of manual identity mapping dictionaries
|
|
260
|
+
"""
|
|
261
|
+
for mapping in mappings:
|
|
262
|
+
# Support both field name variants
|
|
263
|
+
primary_email = mapping.get("primary_email") or mapping.get("canonical_email")
|
|
264
|
+
|
|
265
|
+
if not primary_email:
|
|
266
|
+
logger.warning(f"Skipping mapping without primary_email: {mapping}")
|
|
267
|
+
continue
|
|
268
|
+
|
|
269
|
+
alias = DeveloperAlias(
|
|
270
|
+
primary_email=primary_email,
|
|
271
|
+
aliases=mapping.get("aliases", []),
|
|
272
|
+
name=mapping.get("name"),
|
|
273
|
+
confidence=mapping.get("confidence", 1.0),
|
|
274
|
+
reasoning=mapping.get("reasoning", ""),
|
|
275
|
+
)
|
|
276
|
+
|
|
277
|
+
self.add_alias(alias)
|
|
278
|
+
|
|
279
|
+
def get_statistics(self) -> dict[str, Any]:
|
|
280
|
+
"""Get statistics about the aliases.
|
|
281
|
+
|
|
282
|
+
Returns:
|
|
283
|
+
Dictionary with statistics including total aliases, manual vs LLM-generated,
|
|
284
|
+
average confidence, etc.
|
|
285
|
+
"""
|
|
286
|
+
if not self.aliases:
|
|
287
|
+
return {
|
|
288
|
+
"total_aliases": 0,
|
|
289
|
+
"manual_aliases": 0,
|
|
290
|
+
"llm_aliases": 0,
|
|
291
|
+
"avg_confidence": 0.0,
|
|
292
|
+
"total_email_addresses": 0,
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
manual_count = sum(1 for a in self.aliases if a.confidence == 1.0)
|
|
296
|
+
llm_count = len(self.aliases) - manual_count
|
|
297
|
+
avg_confidence = sum(a.confidence for a in self.aliases) / len(self.aliases)
|
|
298
|
+
total_emails = sum(len(a.aliases) + 1 for a in self.aliases) # +1 for primary
|
|
299
|
+
|
|
300
|
+
return {
|
|
301
|
+
"total_aliases": len(self.aliases),
|
|
302
|
+
"manual_aliases": manual_count,
|
|
303
|
+
"llm_aliases": llm_count,
|
|
304
|
+
"avg_confidence": round(avg_confidence, 3),
|
|
305
|
+
"total_email_addresses": total_emails,
|
|
306
|
+
}
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
"""YAML configuration loading and environment variable expansion."""
|
|
2
2
|
|
|
3
|
+
import logging
|
|
3
4
|
import os
|
|
4
5
|
from pathlib import Path
|
|
5
6
|
from typing import Any, Optional, Union
|
|
@@ -33,6 +34,8 @@ from .schema import (
|
|
|
33
34
|
)
|
|
34
35
|
from .validator import ConfigValidator
|
|
35
36
|
|
|
37
|
+
logger = logging.getLogger(__name__)
|
|
38
|
+
|
|
36
39
|
|
|
37
40
|
class ConfigLoader:
|
|
38
41
|
"""Load and validate configuration from YAML files."""
|
|
@@ -533,6 +536,36 @@ class ConfigLoader:
|
|
|
533
536
|
BranchAnalysisConfig(**branch_data) if branch_data else BranchAnalysisConfig()
|
|
534
537
|
)
|
|
535
538
|
|
|
539
|
+
# Process aliases file and manual identity mappings
|
|
540
|
+
manual_mappings = list(analysis_data.get("identity", {}).get("manual_mappings", []))
|
|
541
|
+
aliases_file_path = None
|
|
542
|
+
|
|
543
|
+
# Load aliases from external file if specified
|
|
544
|
+
aliases_file = analysis_data.get("identity", {}).get("aliases_file")
|
|
545
|
+
if aliases_file:
|
|
546
|
+
aliases_path = Path(aliases_file).expanduser()
|
|
547
|
+
# Make relative paths relative to config file directory
|
|
548
|
+
if not aliases_path.is_absolute():
|
|
549
|
+
aliases_path = config_path.parent / aliases_path
|
|
550
|
+
|
|
551
|
+
aliases_file_path = aliases_path
|
|
552
|
+
|
|
553
|
+
# Load and merge aliases if file exists
|
|
554
|
+
if aliases_path.exists():
|
|
555
|
+
try:
|
|
556
|
+
from .aliases import AliasesManager
|
|
557
|
+
|
|
558
|
+
aliases_mgr = AliasesManager(aliases_path)
|
|
559
|
+
# Merge aliases with existing manual mappings
|
|
560
|
+
manual_mappings.extend(aliases_mgr.to_manual_mappings())
|
|
561
|
+
logger.info(
|
|
562
|
+
f"Loaded {len(aliases_mgr.aliases)} identity aliases from {aliases_path}"
|
|
563
|
+
)
|
|
564
|
+
except Exception as e:
|
|
565
|
+
logger.warning(f"Could not load aliases file {aliases_path}: {e}")
|
|
566
|
+
else:
|
|
567
|
+
logger.warning(f"Aliases file not found: {aliases_path}")
|
|
568
|
+
|
|
536
569
|
return AnalysisConfig(
|
|
537
570
|
story_point_patterns=analysis_data.get(
|
|
538
571
|
"story_point_patterns",
|
|
@@ -550,7 +583,8 @@ class ConfigLoader:
|
|
|
550
583
|
similarity_threshold=analysis_data.get("identity", {}).get(
|
|
551
584
|
"similarity_threshold", 0.85
|
|
552
585
|
),
|
|
553
|
-
manual_identity_mappings=
|
|
586
|
+
manual_identity_mappings=manual_mappings,
|
|
587
|
+
aliases_file=aliases_file_path,
|
|
554
588
|
default_ticket_platform=analysis_data.get("default_ticket_platform"),
|
|
555
589
|
branch_mapping_rules=analysis_data.get("branch_mapping_rules", {}),
|
|
556
590
|
ticket_platforms=analysis_data.get("ticket_platforms"),
|
|
@@ -301,6 +301,7 @@ class AnalysisConfig:
|
|
|
301
301
|
exclude_paths: list[str] = field(default_factory=list)
|
|
302
302
|
similarity_threshold: float = 0.85
|
|
303
303
|
manual_identity_mappings: list[dict[str, Any]] = field(default_factory=list)
|
|
304
|
+
aliases_file: Optional[Path] = None # Path to shared aliases.yaml file
|
|
304
305
|
default_ticket_platform: Optional[str] = None
|
|
305
306
|
branch_mapping_rules: dict[str, list[str]] = field(default_factory=dict)
|
|
306
307
|
ticket_platforms: Optional[list[str]] = None
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
gitflow_analytics/__init__.py,sha256=yN1dyAUu4l9qX-YNAGRItEf4RFFe-5GQiOntXPIfdxo,683
|
|
2
|
-
gitflow_analytics/_version.py,sha256=
|
|
3
|
-
gitflow_analytics/cli.py,sha256=
|
|
2
|
+
gitflow_analytics/_version.py,sha256=VlHh9eAQ34bIRsu31l61B7n-gUeASgbwjJXQ9Hp_IbQ,137
|
|
3
|
+
gitflow_analytics/cli.py,sha256=gKfN-fbEPQqEQVgE693xU5yKSlR4TeVXjoI-vG5oIPA,258433
|
|
4
4
|
gitflow_analytics/config.py,sha256=XRuxvzLWyn_ML7mDCcuZ9-YFNAEsnt33vIuWxQQ_jxg,1033
|
|
5
5
|
gitflow_analytics/constants.py,sha256=GXEncUJS9ijOI5KWtQCTANwdqxPfXpw-4lNjhaWTKC4,2488
|
|
6
6
|
gitflow_analytics/verify_activity.py,sha256=aRQnmypf5NDasXudf2iz_WdJnCWtwlbAiJ5go0DJLSU,27050
|
|
@@ -11,14 +11,15 @@ gitflow_analytics/classification/feature_extractor.py,sha256=W82vztPQO8-MFw9Yt17
|
|
|
11
11
|
gitflow_analytics/classification/linguist_analyzer.py,sha256=HjLx9mM7hGXtrvMba6osovHJLAacTx9oDmN6CS5w0bE,17687
|
|
12
12
|
gitflow_analytics/classification/model.py,sha256=2KbmFh9MpyvHMcNHbqwUTAAVLHHu3MiTfFIPyZSGa-8,16356
|
|
13
13
|
gitflow_analytics/cli_wizards/__init__.py,sha256=D73D97cS1hZsB_fCQQaAiWtd_w2Lb8TtcGc9Pn2DIyE,343
|
|
14
|
-
gitflow_analytics/cli_wizards/install_wizard.py,sha256=
|
|
15
|
-
gitflow_analytics/cli_wizards/run_launcher.py,sha256=
|
|
16
|
-
gitflow_analytics/config/__init__.py,sha256=
|
|
14
|
+
gitflow_analytics/cli_wizards/install_wizard.py,sha256=aQepE3ThhBPyOjkDj-_p0fKZ2OEVTlq7d_6HEt4JrkQ,44871
|
|
15
|
+
gitflow_analytics/cli_wizards/run_launcher.py,sha256=J6G_C7IqxPg7_GhAfbV99D1dIIWwb1s_qmHC7Iv2iGI,15038
|
|
16
|
+
gitflow_analytics/config/__init__.py,sha256=KziRIbBJctB5LOLcKLzELWA1rXwjS6-C2_DeM_hT9rM,1133
|
|
17
|
+
gitflow_analytics/config/aliases.py,sha256=z9F0X6qbbF544Tw7sHlOoBj5mpRSddMkCpoKLzvVzDU,10960
|
|
17
18
|
gitflow_analytics/config/errors.py,sha256=IBKhAIwJ4gscZFnLDyE3jEp03wn2stPR7JQJXNSIfok,10386
|
|
18
|
-
gitflow_analytics/config/loader.py,sha256=
|
|
19
|
+
gitflow_analytics/config/loader.py,sha256=EiksTB1Uqey63FxIvuud_kMdab3sNDfuICE_RwMLYFA,37290
|
|
19
20
|
gitflow_analytics/config/profiles.py,sha256=yUjFAWW6uzOUdi5qlPE-QV9681HigyrLiSJFpL8X9A0,7967
|
|
20
21
|
gitflow_analytics/config/repository.py,sha256=maptMAdCKDsuMAfoTAaTrMPVfVd_tKNLRenvuPe1-t4,4350
|
|
21
|
-
gitflow_analytics/config/schema.py,sha256=
|
|
22
|
+
gitflow_analytics/config/schema.py,sha256=lFN80-YcSqu33UwiJryFHn8F5_zX619AaJXSuJ3aht8,15271
|
|
22
23
|
gitflow_analytics/config/validator.py,sha256=l7AHjXYJ8wEmyA1rn2WiItZXtAiRb9YBLjFCDl53qKM,5907
|
|
23
24
|
gitflow_analytics/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
24
25
|
gitflow_analytics/core/analyzer.py,sha256=59kGObzjziOb8geFyZMKCUvWmo3hcXE0eTgrjYEc1XA,58736
|
|
@@ -132,9 +133,9 @@ gitflow_analytics/tui/widgets/export_modal.py,sha256=L-XKPOc6u-fow2TudPgDnC0kXZM
|
|
|
132
133
|
gitflow_analytics/tui/widgets/progress_widget.py,sha256=Qny6Q1nU0Pr3aj4aHfXLaRjya9MH3rldR2HWYiaQyGE,6167
|
|
133
134
|
gitflow_analytics/ui/__init__.py,sha256=UBhYhZMvwlSrCuGWjkIdoP2zNbiQxOHOli-I8mqIZUE,441
|
|
134
135
|
gitflow_analytics/ui/progress_display.py,sha256=3xJnCOSs1DRVAfS-rTu37EsLfWDFW5-mbv-bPS9NMm4,59182
|
|
135
|
-
gitflow_analytics-3.
|
|
136
|
-
gitflow_analytics-3.
|
|
137
|
-
gitflow_analytics-3.
|
|
138
|
-
gitflow_analytics-3.
|
|
139
|
-
gitflow_analytics-3.
|
|
140
|
-
gitflow_analytics-3.
|
|
136
|
+
gitflow_analytics-3.5.2.dist-info/licenses/LICENSE,sha256=xwvSwY1GYXpRpmbnFvvnbmMwpobnrdN9T821sGvjOY0,1066
|
|
137
|
+
gitflow_analytics-3.5.2.dist-info/METADATA,sha256=7TWOkZxt3KP7_w30_hufQiB8JKsNC6ZGJcPHwNRyMAc,34122
|
|
138
|
+
gitflow_analytics-3.5.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
139
|
+
gitflow_analytics-3.5.2.dist-info/entry_points.txt,sha256=a3y8HnfLOvK1QVOgAkDY6VQXXm3o9ZSQRZrpiaS3hEM,65
|
|
140
|
+
gitflow_analytics-3.5.2.dist-info/top_level.txt,sha256=CQyxZXjKvpSB1kgqqtuE0PCRqfRsXZJL8JrYpJKtkrk,18
|
|
141
|
+
gitflow_analytics-3.5.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|