gitflow-analytics 1.0.0__py3-none-any.whl → 1.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gitflow_analytics/__init__.py +11 -9
- gitflow_analytics/_version.py +2 -2
- gitflow_analytics/cli.py +691 -243
- gitflow_analytics/cli_rich.py +353 -0
- gitflow_analytics/config.py +389 -96
- gitflow_analytics/core/analyzer.py +175 -78
- gitflow_analytics/core/branch_mapper.py +132 -132
- gitflow_analytics/core/cache.py +242 -173
- gitflow_analytics/core/identity.py +214 -178
- gitflow_analytics/extractors/base.py +13 -11
- gitflow_analytics/extractors/story_points.py +70 -59
- gitflow_analytics/extractors/tickets.py +111 -88
- gitflow_analytics/integrations/github_integration.py +91 -77
- gitflow_analytics/integrations/jira_integration.py +284 -0
- gitflow_analytics/integrations/orchestrator.py +99 -72
- gitflow_analytics/metrics/dora.py +183 -179
- gitflow_analytics/models/database.py +191 -54
- gitflow_analytics/qualitative/__init__.py +30 -0
- gitflow_analytics/qualitative/classifiers/__init__.py +13 -0
- gitflow_analytics/qualitative/classifiers/change_type.py +468 -0
- gitflow_analytics/qualitative/classifiers/domain_classifier.py +399 -0
- gitflow_analytics/qualitative/classifiers/intent_analyzer.py +436 -0
- gitflow_analytics/qualitative/classifiers/risk_analyzer.py +412 -0
- gitflow_analytics/qualitative/core/__init__.py +13 -0
- gitflow_analytics/qualitative/core/llm_fallback.py +653 -0
- gitflow_analytics/qualitative/core/nlp_engine.py +373 -0
- gitflow_analytics/qualitative/core/pattern_cache.py +457 -0
- gitflow_analytics/qualitative/core/processor.py +540 -0
- gitflow_analytics/qualitative/models/__init__.py +25 -0
- gitflow_analytics/qualitative/models/schemas.py +272 -0
- gitflow_analytics/qualitative/utils/__init__.py +13 -0
- gitflow_analytics/qualitative/utils/batch_processor.py +326 -0
- gitflow_analytics/qualitative/utils/cost_tracker.py +343 -0
- gitflow_analytics/qualitative/utils/metrics.py +347 -0
- gitflow_analytics/qualitative/utils/text_processing.py +243 -0
- gitflow_analytics/reports/analytics_writer.py +25 -8
- gitflow_analytics/reports/csv_writer.py +60 -32
- gitflow_analytics/reports/narrative_writer.py +21 -15
- gitflow_analytics/tui/__init__.py +5 -0
- gitflow_analytics/tui/app.py +721 -0
- gitflow_analytics/tui/screens/__init__.py +8 -0
- gitflow_analytics/tui/screens/analysis_progress_screen.py +487 -0
- gitflow_analytics/tui/screens/configuration_screen.py +547 -0
- gitflow_analytics/tui/screens/loading_screen.py +358 -0
- gitflow_analytics/tui/screens/main_screen.py +304 -0
- gitflow_analytics/tui/screens/results_screen.py +698 -0
- gitflow_analytics/tui/widgets/__init__.py +7 -0
- gitflow_analytics/tui/widgets/data_table.py +257 -0
- gitflow_analytics/tui/widgets/export_modal.py +301 -0
- gitflow_analytics/tui/widgets/progress_widget.py +192 -0
- gitflow_analytics-1.0.3.dist-info/METADATA +490 -0
- gitflow_analytics-1.0.3.dist-info/RECORD +62 -0
- gitflow_analytics-1.0.0.dist-info/METADATA +0 -201
- gitflow_analytics-1.0.0.dist-info/RECORD +0 -30
- {gitflow_analytics-1.0.0.dist-info → gitflow_analytics-1.0.3.dist-info}/WHEEL +0 -0
- {gitflow_analytics-1.0.0.dist-info → gitflow_analytics-1.0.3.dist-info}/entry_points.txt +0 -0
- {gitflow_analytics-1.0.0.dist-info → gitflow_analytics-1.0.3.dist-info}/licenses/LICENSE +0 -0
- {gitflow_analytics-1.0.0.dist-info → gitflow_analytics-1.0.3.dist-info}/top_level.txt +0 -0
gitflow_analytics/cli.py
CHANGED
|
@@ -1,278 +1,629 @@
|
|
|
1
1
|
"""Command-line interface for GitFlow Analytics."""
|
|
2
|
-
|
|
3
|
-
import yaml
|
|
4
|
-
from pathlib import Path
|
|
5
|
-
from datetime import datetime, timedelta
|
|
6
|
-
from typing import Optional
|
|
2
|
+
|
|
7
3
|
import sys
|
|
4
|
+
from datetime import datetime, timedelta, timezone
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, Optional, cast
|
|
7
|
+
|
|
8
|
+
import click
|
|
9
|
+
import git
|
|
8
10
|
import pandas as pd
|
|
9
11
|
|
|
10
|
-
from .
|
|
11
|
-
from .
|
|
12
|
+
from ._version import __version__
|
|
13
|
+
from .cli_rich import create_rich_display
|
|
14
|
+
from .config import ConfigLoader
|
|
12
15
|
from .core.analyzer import GitAnalyzer
|
|
16
|
+
from .core.cache import GitAnalysisCache
|
|
13
17
|
from .core.identity import DeveloperIdentityResolver
|
|
14
|
-
from .extractors.story_points import StoryPointExtractor
|
|
15
18
|
from .extractors.tickets import TicketExtractor
|
|
16
|
-
from .
|
|
19
|
+
from .integrations.orchestrator import IntegrationOrchestrator
|
|
20
|
+
from .metrics.dora import DORAMetricsCalculator
|
|
17
21
|
from .reports.analytics_writer import AnalyticsReportGenerator
|
|
22
|
+
from .reports.csv_writer import CSVReportGenerator
|
|
18
23
|
from .reports.narrative_writer import NarrativeReportGenerator
|
|
19
|
-
from .metrics.dora import DORAMetricsCalculator
|
|
20
|
-
from .integrations.orchestrator import IntegrationOrchestrator
|
|
21
24
|
|
|
22
25
|
|
|
23
26
|
@click.group()
|
|
24
|
-
@click.version_option(version=
|
|
25
|
-
def cli():
|
|
27
|
+
@click.version_option(version=__version__, prog_name="GitFlow Analytics")
|
|
28
|
+
def cli() -> None:
|
|
26
29
|
"""GitFlow Analytics - Analyze Git repositories for productivity insights."""
|
|
27
30
|
pass
|
|
28
31
|
|
|
29
32
|
|
|
33
|
+
# TUI command removed - replaced with rich CLI output
|
|
34
|
+
# Legacy TUI code preserved but not exposed
|
|
35
|
+
|
|
36
|
+
|
|
30
37
|
@cli.command()
|
|
31
|
-
@click.option(
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
@click.option(
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
38
|
+
@click.option(
|
|
39
|
+
"--config",
|
|
40
|
+
"-c",
|
|
41
|
+
type=click.Path(exists=True, path_type=Path),
|
|
42
|
+
required=True,
|
|
43
|
+
help="Path to YAML configuration file",
|
|
44
|
+
)
|
|
45
|
+
@click.option(
|
|
46
|
+
"--weeks", "-w", type=int, default=12, help="Number of weeks to analyze (default: 12)"
|
|
47
|
+
)
|
|
48
|
+
@click.option(
|
|
49
|
+
"--output",
|
|
50
|
+
"-o",
|
|
51
|
+
type=click.Path(path_type=Path),
|
|
52
|
+
default=None,
|
|
53
|
+
help="Output directory for reports (overrides config file)",
|
|
54
|
+
)
|
|
55
|
+
@click.option("--anonymize", is_flag=True, help="Anonymize developer information in reports")
|
|
56
|
+
@click.option("--no-cache", is_flag=True, help="Disable caching (slower but always fresh)")
|
|
57
|
+
@click.option(
|
|
58
|
+
"--validate-only", is_flag=True, help="Validate configuration without running analysis"
|
|
59
|
+
)
|
|
60
|
+
@click.option("--clear-cache", is_flag=True, help="Clear cache before running analysis")
|
|
61
|
+
@click.option("--enable-qualitative", is_flag=True, help="Enable qualitative analysis (requires additional dependencies)")
|
|
62
|
+
@click.option("--qualitative-only", is_flag=True, help="Run only qualitative analysis on existing commits")
|
|
63
|
+
@click.option("--rich", is_flag=True, default=True, help="Use rich terminal output (default: enabled)")
|
|
64
|
+
def analyze(
|
|
65
|
+
config: Path,
|
|
66
|
+
weeks: int,
|
|
67
|
+
output: Optional[Path],
|
|
68
|
+
anonymize: bool,
|
|
69
|
+
no_cache: bool,
|
|
70
|
+
validate_only: bool,
|
|
71
|
+
clear_cache: bool,
|
|
72
|
+
enable_qualitative: bool,
|
|
73
|
+
qualitative_only: bool,
|
|
74
|
+
rich: bool,
|
|
75
|
+
) -> None:
|
|
57
76
|
"""Analyze Git repositories using configuration file."""
|
|
77
|
+
|
|
78
|
+
# Initialize display - use rich by default, fall back to simple output if needed
|
|
79
|
+
display = create_rich_display() if rich else None
|
|
58
80
|
|
|
59
81
|
try:
|
|
82
|
+
if display:
|
|
83
|
+
display.show_header()
|
|
84
|
+
|
|
60
85
|
# Load configuration
|
|
61
|
-
|
|
86
|
+
if display:
|
|
87
|
+
display.print_status(f"Loading configuration from {config}...", "info")
|
|
88
|
+
else:
|
|
89
|
+
click.echo(f"📋 Loading configuration from {config}...")
|
|
90
|
+
|
|
62
91
|
cfg = ConfigLoader.load(config)
|
|
63
|
-
|
|
92
|
+
|
|
64
93
|
# Validate configuration
|
|
65
94
|
warnings = ConfigLoader.validate_config(cfg)
|
|
66
95
|
if warnings:
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
96
|
+
warning_msg = "Configuration warnings:\n" + "\n".join(f"• {w}" for w in warnings)
|
|
97
|
+
if display:
|
|
98
|
+
display.show_warning(warning_msg)
|
|
99
|
+
else:
|
|
100
|
+
click.echo("⚠️ Configuration warnings:")
|
|
101
|
+
for warning in warnings:
|
|
102
|
+
click.echo(f" - {warning}")
|
|
103
|
+
|
|
71
104
|
if validate_only:
|
|
72
105
|
if not warnings:
|
|
73
|
-
|
|
106
|
+
if display:
|
|
107
|
+
display.print_status("Configuration is valid!", "success")
|
|
108
|
+
else:
|
|
109
|
+
click.echo("✅ Configuration is valid!")
|
|
74
110
|
else:
|
|
75
|
-
|
|
111
|
+
if display:
|
|
112
|
+
display.print_status("Configuration has issues that should be addressed.", "error")
|
|
113
|
+
else:
|
|
114
|
+
click.echo("❌ Configuration has issues that should be addressed.")
|
|
76
115
|
return
|
|
77
|
-
|
|
116
|
+
|
|
78
117
|
# Use output directory from CLI or config
|
|
79
118
|
if output is None:
|
|
80
|
-
output = cfg.output.directory if cfg.output.directory else Path(
|
|
81
|
-
|
|
119
|
+
output = cfg.output.directory if cfg.output.directory else Path("./reports")
|
|
120
|
+
|
|
82
121
|
# Setup output directory
|
|
83
122
|
output.mkdir(parents=True, exist_ok=True)
|
|
84
123
|
|
|
124
|
+
# Show configuration status in rich display
|
|
125
|
+
if display:
|
|
126
|
+
github_org = cfg.github.organization if cfg.github else None
|
|
127
|
+
github_token_valid = bool(cfg.github and cfg.github.token)
|
|
128
|
+
jira_configured = bool(cfg.jira and cfg.jira.base_url)
|
|
129
|
+
jira_valid = jira_configured # Simplified validation
|
|
130
|
+
|
|
131
|
+
display.show_configuration_status(
|
|
132
|
+
config,
|
|
133
|
+
github_org=github_org,
|
|
134
|
+
github_token_valid=github_token_valid,
|
|
135
|
+
jira_configured=jira_configured,
|
|
136
|
+
jira_valid=jira_valid,
|
|
137
|
+
analysis_weeks=weeks
|
|
138
|
+
)
|
|
139
|
+
|
|
85
140
|
# Initialize components
|
|
86
141
|
cache_dir = cfg.cache.directory
|
|
87
142
|
if clear_cache:
|
|
88
|
-
|
|
143
|
+
if display:
|
|
144
|
+
display.print_status("Clearing cache...", "info")
|
|
145
|
+
else:
|
|
146
|
+
click.echo("🗑️ Clearing cache...")
|
|
89
147
|
import shutil
|
|
148
|
+
|
|
90
149
|
if cache_dir.exists():
|
|
91
150
|
shutil.rmtree(cache_dir)
|
|
92
|
-
|
|
93
|
-
cache = GitAnalysisCache(
|
|
94
|
-
|
|
95
|
-
ttl_hours=0 if no_cache else cfg.cache.ttl_hours
|
|
96
|
-
)
|
|
97
|
-
|
|
151
|
+
|
|
152
|
+
cache = GitAnalysisCache(cache_dir, ttl_hours=0 if no_cache else cfg.cache.ttl_hours)
|
|
153
|
+
|
|
98
154
|
identity_resolver = DeveloperIdentityResolver(
|
|
99
|
-
cache_dir /
|
|
155
|
+
cache_dir / "identities.db",
|
|
100
156
|
similarity_threshold=cfg.analysis.similarity_threshold,
|
|
101
|
-
manual_mappings=cfg.analysis.manual_identity_mappings
|
|
157
|
+
manual_mappings=cfg.analysis.manual_identity_mappings,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
analyzer = GitAnalyzer(
|
|
161
|
+
cache,
|
|
162
|
+
branch_mapping_rules=cfg.analysis.branch_mapping_rules,
|
|
163
|
+
allowed_ticket_platforms=getattr(cfg.analysis, "ticket_platforms", None),
|
|
164
|
+
exclude_paths=cfg.analysis.exclude_paths,
|
|
102
165
|
)
|
|
103
|
-
|
|
104
|
-
analyzer = GitAnalyzer(cache, branch_mapping_rules=cfg.analysis.branch_mapping_rules)
|
|
105
166
|
orchestrator = IntegrationOrchestrator(cfg, cache)
|
|
106
|
-
|
|
107
|
-
#
|
|
108
|
-
|
|
167
|
+
|
|
168
|
+
# Discovery organization repositories if needed
|
|
169
|
+
repositories_to_analyze = cfg.repositories
|
|
170
|
+
if cfg.github.organization and not repositories_to_analyze:
|
|
171
|
+
if display:
|
|
172
|
+
display.print_status(f"Discovering repositories from organization: {cfg.github.organization}", "info")
|
|
173
|
+
else:
|
|
174
|
+
click.echo(f"🔍 Discovering repositories from organization: {cfg.github.organization}")
|
|
175
|
+
try:
|
|
176
|
+
# Use a 'repos' directory in the config directory for cloned repositories
|
|
177
|
+
config_dir = Path(config).parent if config else Path.cwd()
|
|
178
|
+
repos_dir = config_dir / "repos"
|
|
179
|
+
discovered_repos = cfg.discover_organization_repositories(clone_base_path=repos_dir)
|
|
180
|
+
repositories_to_analyze = discovered_repos
|
|
181
|
+
|
|
182
|
+
if display:
|
|
183
|
+
display.print_status(f"Found {len(discovered_repos)} repositories in organization", "success")
|
|
184
|
+
# Show repository discovery in structured format
|
|
185
|
+
repo_data = [{
|
|
186
|
+
"name": repo.name,
|
|
187
|
+
"github_repo": repo.github_repo,
|
|
188
|
+
"exists": repo.path.exists()
|
|
189
|
+
} for repo in discovered_repos]
|
|
190
|
+
display.show_repository_discovery(repo_data)
|
|
191
|
+
else:
|
|
192
|
+
click.echo(f" ✅ Found {len(discovered_repos)} repositories in organization")
|
|
193
|
+
for repo in discovered_repos:
|
|
194
|
+
click.echo(f" - {repo.name} ({repo.github_repo})")
|
|
195
|
+
except Exception as e:
|
|
196
|
+
if display:
|
|
197
|
+
display.show_error(f"Failed to discover repositories: {e}")
|
|
198
|
+
else:
|
|
199
|
+
click.echo(f" ❌ Failed to discover repositories: {e}")
|
|
200
|
+
return
|
|
201
|
+
|
|
202
|
+
# Analysis period (timezone-aware to match commit timestamps)
|
|
203
|
+
end_date = datetime.now(timezone.utc)
|
|
109
204
|
start_date = end_date - timedelta(weeks=weeks)
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
205
|
+
|
|
206
|
+
if display:
|
|
207
|
+
display.print_status(f"Analyzing {len(repositories_to_analyze)} repositories...", "info")
|
|
208
|
+
display.print_status(f"Period: {start_date.strftime('%Y-%m-%d')} to {end_date.strftime('%Y-%m-%d')}", "info")
|
|
209
|
+
# Start live progress display
|
|
210
|
+
display.start_live_display()
|
|
211
|
+
display.add_progress_task("repos", "Processing repositories", len(repositories_to_analyze))
|
|
212
|
+
else:
|
|
213
|
+
click.echo(f"\n🚀 Analyzing {len(repositories_to_analyze)} repositories...")
|
|
214
|
+
click.echo(
|
|
215
|
+
f" Period: {start_date.strftime('%Y-%m-%d')} to {end_date.strftime('%Y-%m-%d')}"
|
|
216
|
+
)
|
|
217
|
+
|
|
114
218
|
# Analyze repositories
|
|
115
219
|
all_commits = []
|
|
116
220
|
all_prs = []
|
|
117
221
|
all_enrichments = {}
|
|
118
|
-
|
|
119
|
-
for repo_config in
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
222
|
+
|
|
223
|
+
for repo_config in repositories_to_analyze:
|
|
224
|
+
if display:
|
|
225
|
+
display.update_progress_task("repos", description=f"Analyzing {repo_config.name}...")
|
|
226
|
+
else:
|
|
227
|
+
click.echo(f"\n📁 Analyzing {repo_config.name}...")
|
|
228
|
+
|
|
229
|
+
# Check if repo exists, clone if needed
|
|
123
230
|
if not repo_config.path.exists():
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
231
|
+
# Try to clone if we have a github_repo configured
|
|
232
|
+
if repo_config.github_repo and cfg.github.organization:
|
|
233
|
+
if display:
|
|
234
|
+
display.print_status("Cloning repository from GitHub...", "info")
|
|
235
|
+
else:
|
|
236
|
+
click.echo(" 📥 Cloning repository from GitHub...")
|
|
237
|
+
try:
|
|
238
|
+
# Ensure parent directory exists
|
|
239
|
+
repo_config.path.parent.mkdir(parents=True, exist_ok=True)
|
|
240
|
+
|
|
241
|
+
# Clone the repository
|
|
242
|
+
clone_url = f"https://github.com/{repo_config.github_repo}.git"
|
|
243
|
+
if cfg.github.token:
|
|
244
|
+
# Use token for authentication
|
|
245
|
+
clone_url = f"https://{cfg.github.token}@github.com/{repo_config.github_repo}.git"
|
|
246
|
+
|
|
247
|
+
git.Repo.clone_from(clone_url, repo_config.path, branch=repo_config.branch)
|
|
248
|
+
if display:
|
|
249
|
+
display.print_status(f"Successfully cloned {repo_config.github_repo}", "success")
|
|
250
|
+
else:
|
|
251
|
+
click.echo(f" ✅ Successfully cloned {repo_config.github_repo}")
|
|
252
|
+
except Exception as e:
|
|
253
|
+
if display:
|
|
254
|
+
display.print_status(f"Failed to clone repository: {e}", "error")
|
|
255
|
+
else:
|
|
256
|
+
click.echo(f" ❌ Failed to clone repository: {e}")
|
|
257
|
+
continue
|
|
258
|
+
else:
|
|
259
|
+
if display:
|
|
260
|
+
display.print_status(f"Repository path not found: {repo_config.path}", "error")
|
|
261
|
+
else:
|
|
262
|
+
click.echo(f" ❌ Repository path not found: {repo_config.path}")
|
|
263
|
+
continue
|
|
264
|
+
|
|
127
265
|
# Analyze repository
|
|
128
266
|
try:
|
|
129
267
|
commits = analyzer.analyze_repository(
|
|
130
|
-
repo_config.path,
|
|
131
|
-
start_date,
|
|
132
|
-
repo_config.branch
|
|
268
|
+
repo_config.path, start_date, repo_config.branch
|
|
133
269
|
)
|
|
134
|
-
|
|
270
|
+
|
|
135
271
|
# Add project key and resolve developer identities
|
|
136
272
|
for commit in commits:
|
|
137
273
|
# Use configured project key or fall back to inferred project
|
|
138
|
-
if repo_config.project_key and repo_config.project_key !=
|
|
139
|
-
commit[
|
|
274
|
+
if repo_config.project_key and repo_config.project_key != "UNKNOWN":
|
|
275
|
+
commit["project_key"] = repo_config.project_key
|
|
140
276
|
else:
|
|
141
|
-
commit[
|
|
142
|
-
|
|
143
|
-
commit[
|
|
144
|
-
commit[
|
|
145
|
-
commit['author_email']
|
|
277
|
+
commit["project_key"] = commit.get("inferred_project", "UNKNOWN")
|
|
278
|
+
|
|
279
|
+
commit["canonical_id"] = identity_resolver.resolve_developer(
|
|
280
|
+
commit["author_name"], commit["author_email"]
|
|
146
281
|
)
|
|
147
|
-
|
|
282
|
+
|
|
148
283
|
all_commits.extend(commits)
|
|
149
|
-
|
|
150
|
-
|
|
284
|
+
if display:
|
|
285
|
+
display.print_status(f"Found {len(commits)} commits", "success")
|
|
286
|
+
else:
|
|
287
|
+
click.echo(f" ✅ Found {len(commits)} commits")
|
|
288
|
+
|
|
151
289
|
# Enrich with integration data
|
|
152
|
-
enrichment = orchestrator.enrich_repository_data(
|
|
153
|
-
repo_config, commits, start_date
|
|
154
|
-
)
|
|
290
|
+
enrichment = orchestrator.enrich_repository_data(repo_config, commits, start_date)
|
|
155
291
|
all_enrichments[repo_config.name] = enrichment
|
|
156
|
-
|
|
157
|
-
if enrichment[
|
|
158
|
-
all_prs.extend(enrichment[
|
|
159
|
-
|
|
160
|
-
|
|
292
|
+
|
|
293
|
+
if enrichment["prs"]:
|
|
294
|
+
all_prs.extend(enrichment["prs"])
|
|
295
|
+
if display:
|
|
296
|
+
display.print_status(f"Found {len(enrichment['prs'])} pull requests", "success")
|
|
297
|
+
else:
|
|
298
|
+
click.echo(f" ✅ Found {len(enrichment['prs'])} pull requests")
|
|
299
|
+
|
|
161
300
|
except Exception as e:
|
|
162
|
-
|
|
301
|
+
if display:
|
|
302
|
+
display.print_status(f"Error: {e}", "error")
|
|
303
|
+
else:
|
|
304
|
+
click.echo(f" ❌ Error: {e}")
|
|
163
305
|
continue
|
|
306
|
+
finally:
|
|
307
|
+
if display:
|
|
308
|
+
display.update_progress_task("repos", advance=1)
|
|
309
|
+
|
|
310
|
+
# Stop repository progress and clean up display
|
|
311
|
+
if display:
|
|
312
|
+
display.complete_progress_task("repos", "Repository analysis complete")
|
|
313
|
+
display.stop_live_display()
|
|
164
314
|
|
|
165
315
|
if not all_commits:
|
|
166
|
-
|
|
316
|
+
if display:
|
|
317
|
+
display.show_error("No commits found in the specified period!")
|
|
318
|
+
else:
|
|
319
|
+
click.echo("\n❌ No commits found in the specified period!")
|
|
167
320
|
return
|
|
168
|
-
|
|
321
|
+
|
|
169
322
|
# Update developer statistics
|
|
170
|
-
|
|
323
|
+
if display:
|
|
324
|
+
display.print_status("Resolving developer identities...", "info")
|
|
325
|
+
else:
|
|
326
|
+
click.echo("\n👥 Resolving developer identities...")
|
|
327
|
+
|
|
171
328
|
identity_resolver.update_commit_stats(all_commits)
|
|
172
329
|
developer_stats = identity_resolver.get_developer_stats()
|
|
173
|
-
click.echo(f" ✅ Identified {len(developer_stats)} unique developers")
|
|
174
330
|
|
|
331
|
+
if display:
|
|
332
|
+
display.print_status(f"Identified {len(developer_stats)} unique developers", "success")
|
|
333
|
+
else:
|
|
334
|
+
click.echo(f" ✅ Identified {len(developer_stats)} unique developers")
|
|
335
|
+
|
|
175
336
|
# Analyze tickets
|
|
176
|
-
|
|
177
|
-
|
|
337
|
+
if display:
|
|
338
|
+
display.print_status("Analyzing ticket references...", "info")
|
|
339
|
+
else:
|
|
340
|
+
click.echo("\n🎫 Analyzing ticket references...")
|
|
341
|
+
|
|
342
|
+
ticket_extractor = TicketExtractor(
|
|
343
|
+
allowed_platforms=getattr(cfg.analysis, "ticket_platforms", None)
|
|
344
|
+
)
|
|
178
345
|
ticket_analysis = ticket_extractor.analyze_ticket_coverage(all_commits, all_prs)
|
|
346
|
+
|
|
347
|
+
for platform, count in ticket_analysis["ticket_summary"].items():
|
|
348
|
+
if display:
|
|
349
|
+
display.print_status(f"{platform.title()}: {count} unique tickets", "success")
|
|
350
|
+
else:
|
|
351
|
+
click.echo(f" - {platform.title()}: {count} unique tickets")
|
|
352
|
+
|
|
353
|
+
# Perform qualitative analysis if enabled
|
|
354
|
+
qualitative_results = []
|
|
355
|
+
if (enable_qualitative or qualitative_only) and cfg.qualitative and cfg.qualitative.enabled:
|
|
356
|
+
if display:
|
|
357
|
+
display.print_status("Performing qualitative analysis...", "info")
|
|
358
|
+
else:
|
|
359
|
+
click.echo("\n🧠 Performing qualitative analysis...")
|
|
360
|
+
|
|
361
|
+
try:
|
|
362
|
+
from .qualitative import QualitativeProcessor
|
|
363
|
+
from .models.database import Database
|
|
364
|
+
|
|
365
|
+
# Initialize qualitative analysis components
|
|
366
|
+
qual_db = Database(cfg.cache.directory / "qualitative.db")
|
|
367
|
+
qual_processor = QualitativeProcessor(cfg.qualitative, qual_db)
|
|
368
|
+
|
|
369
|
+
# Validate setup
|
|
370
|
+
is_valid, issues = qual_processor.validate_setup()
|
|
371
|
+
if not is_valid:
|
|
372
|
+
issue_msg = "Qualitative analysis setup issues:\n" + "\n".join(f"• {issue}" for issue in issues)
|
|
373
|
+
if issues:
|
|
374
|
+
issue_msg += "\n\n💡 Install dependencies: pip install spacy scikit-learn openai tiktoken"
|
|
375
|
+
issue_msg += "\n💡 Download spaCy model: python -m spacy download en_core_web_sm"
|
|
376
|
+
|
|
377
|
+
if display:
|
|
378
|
+
display.show_warning(issue_msg)
|
|
379
|
+
else:
|
|
380
|
+
click.echo(" ⚠️ Qualitative analysis setup issues:")
|
|
381
|
+
for issue in issues:
|
|
382
|
+
click.echo(f" - {issue}")
|
|
383
|
+
if issues:
|
|
384
|
+
click.echo(" 💡 Install dependencies: pip install spacy scikit-learn openai tiktoken")
|
|
385
|
+
click.echo(" 💡 Download spaCy model: python -m spacy download en_core_web_sm")
|
|
386
|
+
|
|
387
|
+
# Convert commits to qualitative format
|
|
388
|
+
commits_for_qual = []
|
|
389
|
+
for commit in all_commits:
|
|
390
|
+
commit_dict = {
|
|
391
|
+
'hash': commit.hash,
|
|
392
|
+
'message': commit.message,
|
|
393
|
+
'author_name': commit.author_name,
|
|
394
|
+
'author_email': commit.author_email,
|
|
395
|
+
'timestamp': commit.timestamp,
|
|
396
|
+
'files_changed': commit.files_changed or [],
|
|
397
|
+
'insertions': commit.insertions,
|
|
398
|
+
'deletions': commit.deletions,
|
|
399
|
+
'branch': getattr(commit, 'branch', 'main')
|
|
400
|
+
}
|
|
401
|
+
commits_for_qual.append(commit_dict)
|
|
402
|
+
|
|
403
|
+
# Perform qualitative analysis with progress tracking
|
|
404
|
+
if display:
|
|
405
|
+
display.start_live_display()
|
|
406
|
+
display.add_progress_task("qualitative", "Analyzing commits with qualitative insights", len(commits_for_qual))
|
|
407
|
+
|
|
408
|
+
qualitative_results = qual_processor.process_commits(commits_for_qual, show_progress=True)
|
|
409
|
+
|
|
410
|
+
if display:
|
|
411
|
+
display.complete_progress_task("qualitative", "Qualitative analysis complete")
|
|
412
|
+
display.stop_live_display()
|
|
413
|
+
display.print_status(f"Analyzed {len(qualitative_results)} commits with qualitative insights", "success")
|
|
414
|
+
else:
|
|
415
|
+
click.echo(f" ✅ Analyzed {len(qualitative_results)} commits with qualitative insights")
|
|
416
|
+
|
|
417
|
+
# Get processing statistics and show them
|
|
418
|
+
qual_stats = qual_processor.get_processing_statistics()
|
|
419
|
+
if display:
|
|
420
|
+
display.show_qualitative_stats(qual_stats)
|
|
421
|
+
else:
|
|
422
|
+
processing_summary = qual_stats['processing_summary']
|
|
423
|
+
click.echo(f" 📈 Processing: {processing_summary['commits_per_second']:.1f} commits/sec")
|
|
424
|
+
click.echo(f" 🎯 Methods: {processing_summary['method_breakdown']['cache']:.1f}% cached, "
|
|
425
|
+
f"{processing_summary['method_breakdown']['nlp']:.1f}% NLP, "
|
|
426
|
+
f"{processing_summary['method_breakdown']['llm']:.1f}% LLM")
|
|
427
|
+
|
|
428
|
+
if qual_stats['llm_statistics']['model_usage'] == 'available':
|
|
429
|
+
llm_stats = qual_stats['llm_statistics']['cost_tracking']
|
|
430
|
+
if llm_stats['total_cost'] > 0:
|
|
431
|
+
click.echo(f" 💰 LLM Cost: ${llm_stats['total_cost']:.4f}")
|
|
432
|
+
|
|
433
|
+
except ImportError as e:
|
|
434
|
+
error_msg = f"Qualitative analysis dependencies missing: {e}\n\n💡 Install with: pip install spacy scikit-learn openai tiktoken"
|
|
435
|
+
if display:
|
|
436
|
+
display.show_error(error_msg)
|
|
437
|
+
else:
|
|
438
|
+
click.echo(f" ❌ Qualitative analysis dependencies missing: {e}")
|
|
439
|
+
click.echo(" 💡 Install with: pip install spacy scikit-learn openai tiktoken")
|
|
440
|
+
|
|
441
|
+
if not qualitative_only:
|
|
442
|
+
if display:
|
|
443
|
+
display.print_status("Continuing with standard analysis...", "info")
|
|
444
|
+
else:
|
|
445
|
+
click.echo(" ⏭️ Continuing with standard analysis...")
|
|
446
|
+
else:
|
|
447
|
+
if display:
|
|
448
|
+
display.show_error("Cannot perform qualitative-only analysis without dependencies")
|
|
449
|
+
else:
|
|
450
|
+
click.echo(" ❌ Cannot perform qualitative-only analysis without dependencies")
|
|
451
|
+
return
|
|
452
|
+
except Exception as e:
|
|
453
|
+
error_msg = f"Qualitative analysis failed: {e}"
|
|
454
|
+
if display:
|
|
455
|
+
display.show_error(error_msg)
|
|
456
|
+
else:
|
|
457
|
+
click.echo(f" ❌ Qualitative analysis failed: {e}")
|
|
458
|
+
|
|
459
|
+
if qualitative_only:
|
|
460
|
+
if display:
|
|
461
|
+
display.show_error("Cannot continue with qualitative-only analysis")
|
|
462
|
+
else:
|
|
463
|
+
click.echo(" ❌ Cannot continue with qualitative-only analysis")
|
|
464
|
+
return
|
|
465
|
+
else:
|
|
466
|
+
if display:
|
|
467
|
+
display.print_status("Continuing with standard analysis...", "info")
|
|
468
|
+
else:
|
|
469
|
+
click.echo(" ⏭️ Continuing with standard analysis...")
|
|
470
|
+
elif enable_qualitative and not cfg.qualitative:
|
|
471
|
+
warning_msg = "Qualitative analysis requested but not configured in config file\n\nAdd a 'qualitative:' section to your configuration"
|
|
472
|
+
if display:
|
|
473
|
+
display.show_warning(warning_msg)
|
|
474
|
+
else:
|
|
475
|
+
click.echo("\n⚠️ Qualitative analysis requested but not configured in config file")
|
|
476
|
+
click.echo(" Add a 'qualitative:' section to your configuration")
|
|
179
477
|
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
478
|
+
# Skip standard analysis if qualitative-only mode
|
|
479
|
+
if qualitative_only:
|
|
480
|
+
if display:
|
|
481
|
+
display.print_status("Qualitative-only analysis completed!", "success")
|
|
482
|
+
else:
|
|
483
|
+
click.echo("\n✅ Qualitative-only analysis completed!")
|
|
484
|
+
return
|
|
485
|
+
|
|
183
486
|
# Generate reports
|
|
184
|
-
|
|
487
|
+
if display:
|
|
488
|
+
display.print_status("Generating reports...", "info")
|
|
489
|
+
else:
|
|
490
|
+
click.echo("\n📊 Generating reports...")
|
|
185
491
|
report_gen = CSVReportGenerator(anonymize=anonymize or cfg.output.anonymize_enabled)
|
|
186
|
-
analytics_gen = AnalyticsReportGenerator(
|
|
187
|
-
|
|
188
|
-
# Weekly metrics report
|
|
189
|
-
weekly_report = output / f'weekly_metrics_{datetime.now().strftime("%Y%m%d")}.csv'
|
|
190
|
-
report_gen.generate_weekly_report(
|
|
191
|
-
all_commits,
|
|
192
|
-
developer_stats,
|
|
193
|
-
weekly_report,
|
|
194
|
-
weeks
|
|
492
|
+
analytics_gen = AnalyticsReportGenerator(
|
|
493
|
+
anonymize=anonymize or cfg.output.anonymize_enabled
|
|
195
494
|
)
|
|
196
|
-
|
|
495
|
+
|
|
496
|
+
# Collect generated report files for display
|
|
497
|
+
generated_reports = []
|
|
197
498
|
|
|
499
|
+
# Weekly metrics report
|
|
500
|
+
weekly_report = output / f'weekly_metrics_{datetime.now(timezone.utc).strftime("%Y%m%d")}.csv'
|
|
501
|
+
try:
|
|
502
|
+
report_gen.generate_weekly_report(all_commits, developer_stats, weekly_report, weeks)
|
|
503
|
+
generated_reports.append(weekly_report.name)
|
|
504
|
+
if not display:
|
|
505
|
+
click.echo(f" ✅ Weekly metrics: {weekly_report}")
|
|
506
|
+
except Exception as e:
|
|
507
|
+
click.echo(f" ❌ Error generating weekly metrics report: {e}")
|
|
508
|
+
click.echo(f" 🔍 Error type: {type(e).__name__}")
|
|
509
|
+
click.echo(f" 📍 Error details: {str(e)}")
|
|
510
|
+
import traceback
|
|
511
|
+
traceback.print_exc()
|
|
512
|
+
raise
|
|
513
|
+
|
|
198
514
|
# Summary report
|
|
199
515
|
summary_report = output / f'summary_{datetime.now().strftime("%Y%m%d")}.csv'
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
516
|
+
try:
|
|
517
|
+
report_gen.generate_summary_report(
|
|
518
|
+
all_commits, all_prs, developer_stats, ticket_analysis, summary_report
|
|
519
|
+
)
|
|
520
|
+
generated_reports.append(summary_report.name)
|
|
521
|
+
if not display:
|
|
522
|
+
click.echo(f" ✅ Summary stats: {summary_report}")
|
|
523
|
+
except Exception as e:
|
|
524
|
+
click.echo(f" ❌ Error generating summary report: {e}")
|
|
525
|
+
click.echo(f" 🔍 Error type: {type(e).__name__}")
|
|
526
|
+
click.echo(f" 📍 Error details: {str(e)}")
|
|
527
|
+
import traceback
|
|
528
|
+
traceback.print_exc()
|
|
529
|
+
raise
|
|
530
|
+
|
|
209
531
|
# Developer report
|
|
210
532
|
developer_report = output / f'developers_{datetime.now().strftime("%Y%m%d")}.csv'
|
|
211
|
-
|
|
212
|
-
developer_stats,
|
|
213
|
-
developer_report
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
533
|
+
try:
|
|
534
|
+
report_gen.generate_developer_report(developer_stats, developer_report)
|
|
535
|
+
generated_reports.append(developer_report.name)
|
|
536
|
+
if not display:
|
|
537
|
+
click.echo(f" ✅ Developer stats: {developer_report}")
|
|
538
|
+
except Exception as e:
|
|
539
|
+
click.echo(f" ❌ Error generating developer report: {e}")
|
|
540
|
+
click.echo(f" 🔍 Error type: {type(e).__name__}")
|
|
541
|
+
click.echo(f" 📍 Error details: {str(e)}")
|
|
542
|
+
import traceback
|
|
543
|
+
traceback.print_exc()
|
|
544
|
+
raise
|
|
545
|
+
|
|
217
546
|
# Activity distribution report
|
|
218
547
|
activity_report = output / f'activity_distribution_{datetime.now().strftime("%Y%m%d")}.csv'
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
548
|
+
try:
|
|
549
|
+
analytics_gen.generate_activity_distribution_report(
|
|
550
|
+
all_commits, developer_stats, activity_report
|
|
551
|
+
)
|
|
552
|
+
generated_reports.append(activity_report.name)
|
|
553
|
+
if not display:
|
|
554
|
+
click.echo(f" ✅ Activity distribution: {activity_report}")
|
|
555
|
+
except Exception as e:
|
|
556
|
+
click.echo(f" ❌ Error generating activity distribution report: {e}")
|
|
557
|
+
click.echo(f" 🔍 Error type: {type(e).__name__}")
|
|
558
|
+
click.echo(f" 📍 Error details: {str(e)}")
|
|
559
|
+
import traceback
|
|
560
|
+
traceback.print_exc()
|
|
561
|
+
raise
|
|
562
|
+
|
|
226
563
|
# Developer focus report
|
|
227
564
|
focus_report = output / f'developer_focus_{datetime.now().strftime("%Y%m%d")}.csv'
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
565
|
+
try:
|
|
566
|
+
analytics_gen.generate_developer_focus_report(
|
|
567
|
+
all_commits, developer_stats, focus_report, weeks
|
|
568
|
+
)
|
|
569
|
+
generated_reports.append(focus_report.name)
|
|
570
|
+
if not display:
|
|
571
|
+
click.echo(f" ✅ Developer focus: {focus_report}")
|
|
572
|
+
except Exception as e:
|
|
573
|
+
click.echo(f" ❌ Error generating developer focus report: {e}")
|
|
574
|
+
click.echo(f" 🔍 Error type: {type(e).__name__}")
|
|
575
|
+
click.echo(f" 📍 Error details: {str(e)}")
|
|
576
|
+
import traceback
|
|
577
|
+
traceback.print_exc()
|
|
578
|
+
raise
|
|
579
|
+
|
|
236
580
|
# Qualitative insights report
|
|
237
581
|
insights_report = output / f'qualitative_insights_{datetime.now().strftime("%Y%m%d")}.csv'
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
insights_report
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
582
|
+
try:
|
|
583
|
+
analytics_gen.generate_qualitative_insights_report(
|
|
584
|
+
all_commits, developer_stats, ticket_analysis, insights_report
|
|
585
|
+
)
|
|
586
|
+
generated_reports.append(insights_report.name)
|
|
587
|
+
if not display:
|
|
588
|
+
click.echo(f" ✅ Qualitative insights: {insights_report}")
|
|
589
|
+
except Exception as e:
|
|
590
|
+
click.echo(f" ❌ Error generating qualitative insights report: {e}")
|
|
591
|
+
click.echo(f" 🔍 Error type: {type(e).__name__}")
|
|
592
|
+
click.echo(f" 📍 Error details: {str(e)}")
|
|
593
|
+
import traceback
|
|
594
|
+
traceback.print_exc()
|
|
595
|
+
raise
|
|
596
|
+
|
|
246
597
|
# Calculate DORA metrics
|
|
247
598
|
dora_calculator = DORAMetricsCalculator()
|
|
248
599
|
dora_metrics = dora_calculator.calculate_dora_metrics(
|
|
249
600
|
all_commits, all_prs, start_date, end_date
|
|
250
601
|
)
|
|
251
|
-
|
|
602
|
+
|
|
252
603
|
# Aggregate PR metrics
|
|
253
604
|
pr_metrics = {}
|
|
254
605
|
for enrichment in all_enrichments.values():
|
|
255
|
-
if enrichment.get(
|
|
606
|
+
if enrichment.get("pr_metrics"):
|
|
256
607
|
# Combine metrics (simplified - in production would properly aggregate)
|
|
257
|
-
pr_metrics = enrichment[
|
|
608
|
+
pr_metrics = enrichment["pr_metrics"]
|
|
258
609
|
break
|
|
259
|
-
|
|
610
|
+
|
|
260
611
|
# Generate narrative report if markdown format is enabled
|
|
261
|
-
if
|
|
612
|
+
if "markdown" in cfg.output.formats:
|
|
262
613
|
narrative_gen = NarrativeReportGenerator()
|
|
263
|
-
|
|
614
|
+
|
|
264
615
|
# Load activity distribution data
|
|
265
616
|
activity_df = pd.read_csv(activity_report)
|
|
266
|
-
activity_data = activity_df.to_dict(
|
|
267
|
-
|
|
617
|
+
activity_data = cast(list[dict[str, Any]], activity_df.to_dict("records"))
|
|
618
|
+
|
|
268
619
|
# Load focus data
|
|
269
620
|
focus_df = pd.read_csv(focus_report)
|
|
270
|
-
focus_data = focus_df.to_dict(
|
|
271
|
-
|
|
621
|
+
focus_data = cast(list[dict[str, Any]], focus_df.to_dict("records"))
|
|
622
|
+
|
|
272
623
|
# Load insights data
|
|
273
624
|
insights_df = pd.read_csv(insights_report)
|
|
274
|
-
insights_data = insights_df.to_dict(
|
|
275
|
-
|
|
625
|
+
insights_data = cast(list[dict[str, Any]], insights_df.to_dict("records"))
|
|
626
|
+
|
|
276
627
|
narrative_report = output / f'narrative_report_{datetime.now().strftime("%Y%m%d")}.md'
|
|
277
628
|
narrative_gen.generate_narrative_report(
|
|
278
629
|
all_commits,
|
|
@@ -284,137 +635,234 @@ def analyze(config: Path, weeks: int, output: Optional[Path], anonymize: bool,
|
|
|
284
635
|
ticket_analysis,
|
|
285
636
|
pr_metrics,
|
|
286
637
|
narrative_report,
|
|
287
|
-
weeks
|
|
638
|
+
weeks,
|
|
288
639
|
)
|
|
289
|
-
|
|
290
|
-
|
|
640
|
+
generated_reports.append(narrative_report.name)
|
|
641
|
+
if not display:
|
|
642
|
+
click.echo(f" ✅ Narrative report: {narrative_report}")
|
|
643
|
+
|
|
291
644
|
# Generate JSON export if enabled
|
|
292
|
-
if
|
|
645
|
+
if "json" in cfg.output.formats:
|
|
293
646
|
json_report = output / f'gitflow_export_{datetime.now().strftime("%Y%m%d")}.json'
|
|
294
|
-
|
|
647
|
+
|
|
295
648
|
project_metrics = {
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
649
|
+
"ticket_analysis": ticket_analysis,
|
|
650
|
+
"pr_metrics": pr_metrics,
|
|
651
|
+
"enrichments": all_enrichments,
|
|
299
652
|
}
|
|
300
|
-
|
|
653
|
+
|
|
301
654
|
orchestrator.export_to_json(
|
|
302
655
|
all_commits,
|
|
303
656
|
all_prs,
|
|
304
657
|
developer_stats,
|
|
305
658
|
project_metrics,
|
|
306
659
|
dora_metrics,
|
|
307
|
-
str(json_report)
|
|
660
|
+
str(json_report),
|
|
308
661
|
)
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
662
|
+
generated_reports.append(json_report.name)
|
|
663
|
+
if not display:
|
|
664
|
+
click.echo(f" ✅ JSON export: {json_report}")
|
|
665
|
+
|
|
666
|
+
total_story_points = sum(c.get("story_points", 0) or 0 for c in all_commits)
|
|
667
|
+
qualitative_count = len(qualitative_results) if qualitative_results else 0
|
|
668
|
+
|
|
669
|
+
# Show results summary
|
|
670
|
+
if display:
|
|
671
|
+
display.show_analysis_summary(
|
|
672
|
+
total_commits=len(all_commits),
|
|
673
|
+
total_prs=len(all_prs),
|
|
674
|
+
active_developers=len(developer_stats),
|
|
675
|
+
ticket_coverage=ticket_analysis['commit_coverage_pct'],
|
|
676
|
+
story_points=total_story_points,
|
|
677
|
+
qualitative_analyzed=qualitative_count
|
|
678
|
+
)
|
|
679
|
+
|
|
680
|
+
# Show DORA metrics
|
|
681
|
+
if dora_metrics:
|
|
682
|
+
display.show_dora_metrics(dora_metrics)
|
|
683
|
+
|
|
684
|
+
# Show generated reports
|
|
685
|
+
display.show_reports_generated(output, generated_reports)
|
|
686
|
+
|
|
687
|
+
display.print_status("Analysis complete!", "success")
|
|
688
|
+
else:
|
|
689
|
+
# Print summary in simple format
|
|
690
|
+
click.echo("\n📈 Analysis Summary:")
|
|
691
|
+
click.echo(f" - Total commits: {len(all_commits)}")
|
|
692
|
+
click.echo(f" - Total PRs: {len(all_prs)}")
|
|
693
|
+
click.echo(f" - Active developers: {len(developer_stats)}")
|
|
694
|
+
click.echo(f" - Ticket coverage: {ticket_analysis['commit_coverage_pct']:.1f}%")
|
|
695
|
+
click.echo(f" - Total story points: {total_story_points}")
|
|
696
|
+
|
|
697
|
+
if dora_metrics:
|
|
698
|
+
click.echo("\n🎯 DORA Metrics:")
|
|
699
|
+
click.echo(
|
|
700
|
+
f" - Deployment frequency: {dora_metrics['deployment_frequency']['category']}"
|
|
701
|
+
)
|
|
702
|
+
click.echo(f" - Lead time: {dora_metrics['lead_time_hours']:.1f} hours")
|
|
703
|
+
click.echo(f" - Change failure rate: {dora_metrics['change_failure_rate']:.1f}%")
|
|
704
|
+
click.echo(f" - MTTR: {dora_metrics['mttr_hours']:.1f} hours")
|
|
705
|
+
click.echo(f" - Performance level: {dora_metrics['performance_level']}")
|
|
706
|
+
|
|
707
|
+
click.echo(f"\n✅ Analysis complete! Reports saved to {output}")
|
|
708
|
+
|
|
331
709
|
except Exception as e:
|
|
332
|
-
|
|
333
|
-
|
|
710
|
+
if display:
|
|
711
|
+
display.show_error(str(e), show_debug_hint=True)
|
|
712
|
+
else:
|
|
713
|
+
click.echo(f"\n❌ Error: {e}", err=True)
|
|
714
|
+
|
|
715
|
+
if "--debug" in sys.argv:
|
|
334
716
|
raise
|
|
335
717
|
sys.exit(1)
|
|
336
718
|
|
|
337
719
|
|
|
338
720
|
@cli.command()
|
|
339
|
-
@click.option(
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
721
|
+
@click.option(
|
|
722
|
+
"--config",
|
|
723
|
+
"-c",
|
|
724
|
+
type=click.Path(exists=True, path_type=Path),
|
|
725
|
+
required=True,
|
|
726
|
+
help="Path to YAML configuration file",
|
|
727
|
+
)
|
|
728
|
+
def cache_stats(config: Path) -> None:
|
|
344
729
|
"""Show cache statistics."""
|
|
345
730
|
try:
|
|
346
731
|
cfg = ConfigLoader.load(config)
|
|
347
732
|
cache = GitAnalysisCache(cfg.cache.directory)
|
|
348
|
-
|
|
733
|
+
|
|
349
734
|
stats = cache.get_cache_stats()
|
|
350
|
-
|
|
735
|
+
|
|
351
736
|
click.echo("📊 Cache Statistics:")
|
|
352
737
|
click.echo(f" - Cached commits: {stats['cached_commits']}")
|
|
353
738
|
click.echo(f" - Cached PRs: {stats['cached_prs']}")
|
|
354
739
|
click.echo(f" - Cached issues: {stats['cached_issues']}")
|
|
355
740
|
click.echo(f" - Stale entries: {stats['stale_commits']}")
|
|
356
|
-
|
|
741
|
+
|
|
357
742
|
# Calculate cache size
|
|
358
743
|
import os
|
|
744
|
+
|
|
359
745
|
cache_size = 0
|
|
360
|
-
for root,
|
|
746
|
+
for root, _dirs, files in os.walk(cfg.cache.directory):
|
|
361
747
|
for f in files:
|
|
362
748
|
cache_size += os.path.getsize(os.path.join(root, f))
|
|
363
|
-
|
|
749
|
+
|
|
364
750
|
click.echo(f" - Cache size: {cache_size / 1024 / 1024:.1f} MB")
|
|
365
|
-
|
|
751
|
+
|
|
366
752
|
except Exception as e:
|
|
367
753
|
click.echo(f"❌ Error: {e}", err=True)
|
|
368
754
|
sys.exit(1)
|
|
369
755
|
|
|
370
756
|
|
|
371
757
|
@cli.command()
|
|
372
|
-
@click.option(
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
758
|
+
@click.option(
|
|
759
|
+
"--config",
|
|
760
|
+
"-c",
|
|
761
|
+
type=click.Path(exists=True, path_type=Path),
|
|
762
|
+
required=True,
|
|
763
|
+
help="Path to YAML configuration file",
|
|
764
|
+
)
|
|
765
|
+
@click.argument("dev1")
|
|
766
|
+
@click.argument("dev2")
|
|
767
|
+
def merge_identity(config: Path, dev1: str, dev2: str) -> None:
|
|
379
768
|
"""Merge two developer identities."""
|
|
380
769
|
try:
|
|
381
770
|
cfg = ConfigLoader.load(config)
|
|
382
|
-
identity_resolver = DeveloperIdentityResolver(
|
|
383
|
-
|
|
384
|
-
)
|
|
385
|
-
|
|
771
|
+
identity_resolver = DeveloperIdentityResolver(cfg.cache.directory / "identities.db")
|
|
772
|
+
|
|
386
773
|
click.echo(f"🔄 Merging {dev2} into {dev1}...")
|
|
387
774
|
identity_resolver.merge_identities(dev1, dev2)
|
|
388
775
|
click.echo("✅ Identities merged successfully!")
|
|
389
|
-
|
|
776
|
+
|
|
390
777
|
except Exception as e:
|
|
391
778
|
click.echo(f"❌ Error: {e}", err=True)
|
|
392
779
|
sys.exit(1)
|
|
393
780
|
|
|
394
781
|
|
|
395
782
|
@cli.command()
|
|
396
|
-
@click.option(
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
"
|
|
783
|
+
@click.option(
|
|
784
|
+
"--config",
|
|
785
|
+
"-c",
|
|
786
|
+
type=click.Path(exists=True, path_type=Path),
|
|
787
|
+
required=True,
|
|
788
|
+
help="Path to YAML configuration file",
|
|
789
|
+
)
|
|
790
|
+
def discover_jira_fields(config: Path) -> None:
|
|
791
|
+
"""Discover available JIRA fields, particularly story point fields."""
|
|
402
792
|
try:
|
|
403
793
|
cfg = ConfigLoader.load(config)
|
|
404
|
-
|
|
405
|
-
|
|
794
|
+
|
|
795
|
+
# Check if JIRA is configured
|
|
796
|
+
if not cfg.jira or not cfg.jira.base_url:
|
|
797
|
+
click.echo("❌ JIRA is not configured in the configuration file")
|
|
798
|
+
return
|
|
799
|
+
|
|
800
|
+
# Initialize JIRA integration
|
|
801
|
+
from .integrations.jira_integration import JIRAIntegration
|
|
802
|
+
|
|
803
|
+
# Create minimal cache for JIRA integration
|
|
804
|
+
cache = GitAnalysisCache(cfg.cache.directory)
|
|
805
|
+
jira = JIRAIntegration(
|
|
806
|
+
cfg.jira.base_url,
|
|
807
|
+
cfg.jira.access_user,
|
|
808
|
+
cfg.jira.access_token,
|
|
809
|
+
cache,
|
|
406
810
|
)
|
|
407
|
-
|
|
811
|
+
|
|
812
|
+
# Validate connection
|
|
813
|
+
click.echo(f"🔗 Connecting to JIRA at {cfg.jira.base_url}...")
|
|
814
|
+
if not jira.validate_connection():
|
|
815
|
+
click.echo("❌ Failed to connect to JIRA. Check your credentials.")
|
|
816
|
+
return
|
|
817
|
+
|
|
818
|
+
click.echo("✅ Connected successfully!\n")
|
|
819
|
+
click.echo("🔍 Discovering fields with potential story point data...")
|
|
820
|
+
|
|
821
|
+
fields = jira.discover_fields()
|
|
822
|
+
|
|
823
|
+
if not fields:
|
|
824
|
+
click.echo("No potential story point fields found.")
|
|
825
|
+
else:
|
|
826
|
+
click.echo(f"\nFound {len(fields)} potential story point fields:")
|
|
827
|
+
click.echo(
|
|
828
|
+
"\nAdd these to your configuration under jira_integration.story_point_fields:"
|
|
829
|
+
)
|
|
830
|
+
click.echo("```yaml")
|
|
831
|
+
click.echo("jira_integration:")
|
|
832
|
+
click.echo(" story_point_fields:")
|
|
833
|
+
for field_id, field_info in fields.items():
|
|
834
|
+
click.echo(f' - "{field_id}" # {field_info["name"]}')
|
|
835
|
+
click.echo("```")
|
|
836
|
+
|
|
837
|
+
except Exception as e:
|
|
838
|
+
click.echo(f"❌ Error: {e}", err=True)
|
|
839
|
+
sys.exit(1)
|
|
840
|
+
|
|
841
|
+
|
|
842
|
+
@cli.command()
|
|
843
|
+
@click.option(
|
|
844
|
+
"--config",
|
|
845
|
+
"-c",
|
|
846
|
+
type=click.Path(exists=True, path_type=Path),
|
|
847
|
+
required=True,
|
|
848
|
+
help="Path to YAML configuration file",
|
|
849
|
+
)
|
|
850
|
+
def list_developers(config: Path) -> None:
|
|
851
|
+
"""List all known developers."""
|
|
852
|
+
try:
|
|
853
|
+
cfg = ConfigLoader.load(config)
|
|
854
|
+
identity_resolver = DeveloperIdentityResolver(cfg.cache.directory / "identities.db")
|
|
855
|
+
|
|
408
856
|
developers = identity_resolver.get_developer_stats()
|
|
409
|
-
|
|
857
|
+
|
|
410
858
|
if not developers:
|
|
411
859
|
click.echo("No developers found. Run analysis first.")
|
|
412
860
|
return
|
|
413
|
-
|
|
861
|
+
|
|
414
862
|
click.echo("👥 Known Developers:")
|
|
415
863
|
click.echo(f"{'Name':<30} {'Email':<40} {'Commits':<10} {'Points':<10} {'Aliases'}")
|
|
416
864
|
click.echo("-" * 100)
|
|
417
|
-
|
|
865
|
+
|
|
418
866
|
for dev in developers[:20]: # Show top 20
|
|
419
867
|
click.echo(
|
|
420
868
|
f"{dev['primary_name']:<30} "
|
|
@@ -423,19 +871,19 @@ def list_developers(config: Path):
|
|
|
423
871
|
f"{dev['total_story_points']:<10} "
|
|
424
872
|
f"{dev['alias_count']}"
|
|
425
873
|
)
|
|
426
|
-
|
|
874
|
+
|
|
427
875
|
if len(developers) > 20:
|
|
428
876
|
click.echo(f"\n... and {len(developers) - 20} more developers")
|
|
429
|
-
|
|
877
|
+
|
|
430
878
|
except Exception as e:
|
|
431
879
|
click.echo(f"❌ Error: {e}", err=True)
|
|
432
880
|
sys.exit(1)
|
|
433
881
|
|
|
434
882
|
|
|
435
|
-
def main():
|
|
883
|
+
def main() -> None:
|
|
436
884
|
"""Main entry point."""
|
|
437
885
|
cli()
|
|
438
886
|
|
|
439
887
|
|
|
440
|
-
if __name__ ==
|
|
441
|
-
main()
|
|
888
|
+
if __name__ == "__main__":
|
|
889
|
+
main()
|