gitflow-analytics 3.13.0__py3-none-any.whl → 3.13.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gitflow_analytics/_version.py +1 -1
- gitflow_analytics/cli.py +267 -77
- gitflow_analytics/cli_wizards/menu.py +147 -6
- gitflow_analytics/config/loader.py +3 -1
- gitflow_analytics/config/profiles.py +1 -2
- gitflow_analytics/core/data_fetcher.py +0 -2
- gitflow_analytics/core/git_auth.py +74 -0
- gitflow_analytics/core/git_timeout_wrapper.py +8 -0
- gitflow_analytics/extractors/tickets.py +3 -1
- gitflow_analytics/integrations/github_integration.py +1 -1
- gitflow_analytics/integrations/jira_integration.py +1 -1
- gitflow_analytics/qualitative/chatgpt_analyzer.py +15 -15
- gitflow_analytics/qualitative/classifiers/llm/prompts.py +1 -1
- gitflow_analytics/qualitative/core/processor.py +1 -2
- gitflow_analytics/qualitative/enhanced_analyzer.py +24 -8
- gitflow_analytics/ui/progress_display.py +14 -6
- gitflow_analytics/verify_activity.py +1 -1
- {gitflow_analytics-3.13.0.dist-info → gitflow_analytics-3.13.6.dist-info}/METADATA +37 -1
- {gitflow_analytics-3.13.0.dist-info → gitflow_analytics-3.13.6.dist-info}/RECORD +23 -23
- {gitflow_analytics-3.13.0.dist-info → gitflow_analytics-3.13.6.dist-info}/WHEEL +0 -0
- {gitflow_analytics-3.13.0.dist-info → gitflow_analytics-3.13.6.dist-info}/entry_points.txt +0 -0
- {gitflow_analytics-3.13.0.dist-info → gitflow_analytics-3.13.6.dist-info}/licenses/LICENSE +0 -0
- {gitflow_analytics-3.13.0.dist-info → gitflow_analytics-3.13.6.dist-info}/top_level.txt +0 -0
|
@@ -5,6 +5,7 @@ is run without arguments, offering options for configuration, alias management,
|
|
|
5
5
|
analysis execution, and more.
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
|
+
import contextlib
|
|
8
9
|
import logging
|
|
9
10
|
import os
|
|
10
11
|
import subprocess
|
|
@@ -98,10 +99,8 @@ def _atomic_yaml_write(config_path: Path, config_data: dict) -> None:
|
|
|
98
99
|
except Exception as e:
|
|
99
100
|
# Cleanup temp file on error
|
|
100
101
|
if temp_fd is not None:
|
|
101
|
-
|
|
102
|
+
with contextlib.suppress(Exception):
|
|
102
103
|
os.close(temp_fd)
|
|
103
|
-
except Exception:
|
|
104
|
-
pass
|
|
105
104
|
|
|
106
105
|
if temp_path and temp_path.exists():
|
|
107
106
|
temp_path.unlink(missing_ok=True)
|
|
@@ -242,7 +241,7 @@ def validate_config(config_path: Path) -> bool:
|
|
|
242
241
|
|
|
243
242
|
# Line before
|
|
244
243
|
if mark.line > 0:
|
|
245
|
-
click.echo(f" {mark.line}: {lines[mark.line-1].rstrip()}", err=True)
|
|
244
|
+
click.echo(f" {mark.line}: {lines[mark.line - 1].rstrip()}", err=True)
|
|
246
245
|
|
|
247
246
|
# Problematic line (highlighted)
|
|
248
247
|
click.echo(
|
|
@@ -256,7 +255,7 @@ def validate_config(config_path: Path) -> bool:
|
|
|
256
255
|
|
|
257
256
|
# Line after
|
|
258
257
|
if mark.line + 1 < len(lines):
|
|
259
|
-
click.echo(f" {mark.line + 2}: {lines[mark.line+1].rstrip()}", err=True)
|
|
258
|
+
click.echo(f" {mark.line + 2}: {lines[mark.line + 1].rstrip()}", err=True)
|
|
260
259
|
except Exception:
|
|
261
260
|
# If we can't read file, just skip context
|
|
262
261
|
pass
|
|
@@ -562,6 +561,145 @@ def run_full_analysis(config_path: Path) -> bool:
|
|
|
562
561
|
return success
|
|
563
562
|
|
|
564
563
|
|
|
564
|
+
def rename_developer_alias(config_path: Path) -> bool:
|
|
565
|
+
"""Interactive interface for renaming developer aliases.
|
|
566
|
+
|
|
567
|
+
Args:
|
|
568
|
+
config_path: Path to config.yaml file
|
|
569
|
+
|
|
570
|
+
Returns:
|
|
571
|
+
True if rename succeeded, False otherwise.
|
|
572
|
+
"""
|
|
573
|
+
click.echo("\n" + "=" * 60)
|
|
574
|
+
click.echo(click.style("Rename Developer Alias", fg="cyan", bold=True))
|
|
575
|
+
click.echo("=" * 60 + "\n")
|
|
576
|
+
|
|
577
|
+
click.echo("Update a developer's canonical display name in reports.")
|
|
578
|
+
click.echo("This updates the configuration file and optionally the cache.\n")
|
|
579
|
+
|
|
580
|
+
try:
|
|
581
|
+
# Load config to get manual_mappings
|
|
582
|
+
with open(config_path) as f:
|
|
583
|
+
config_data = yaml.safe_load(f)
|
|
584
|
+
|
|
585
|
+
# Navigate to manual_mappings
|
|
586
|
+
manual_mappings = (
|
|
587
|
+
config_data.get("analysis", {}).get("identity", {}).get("manual_mappings", [])
|
|
588
|
+
)
|
|
589
|
+
|
|
590
|
+
if not manual_mappings:
|
|
591
|
+
click.echo(
|
|
592
|
+
click.style(
|
|
593
|
+
"❌ No manual_mappings found in config. Please add developers first.", fg="red"
|
|
594
|
+
),
|
|
595
|
+
err=True,
|
|
596
|
+
)
|
|
597
|
+
return False
|
|
598
|
+
|
|
599
|
+
# Display numbered list of developers
|
|
600
|
+
click.echo(click.style("Current Developers:", fg="cyan", bold=True))
|
|
601
|
+
click.echo()
|
|
602
|
+
|
|
603
|
+
developer_names = []
|
|
604
|
+
for idx, mapping in enumerate(manual_mappings, 1):
|
|
605
|
+
name = mapping.get("name", "Unknown")
|
|
606
|
+
email = mapping.get("primary_email", "N/A")
|
|
607
|
+
alias_count = len(mapping.get("aliases", []))
|
|
608
|
+
|
|
609
|
+
developer_names.append(name)
|
|
610
|
+
click.echo(f" {idx}. {click.style(name, fg='green')}")
|
|
611
|
+
click.echo(f" Email: {email}")
|
|
612
|
+
click.echo(f" Aliases: {alias_count} email(s)")
|
|
613
|
+
click.echo()
|
|
614
|
+
|
|
615
|
+
# Prompt for selection
|
|
616
|
+
try:
|
|
617
|
+
selection = click.prompt(
|
|
618
|
+
"Select developer number to rename (or 0 to cancel)",
|
|
619
|
+
type=click.IntRange(0, len(developer_names)),
|
|
620
|
+
)
|
|
621
|
+
except click.Abort:
|
|
622
|
+
click.echo(click.style("\n❌ Cancelled", fg="yellow"))
|
|
623
|
+
return False
|
|
624
|
+
|
|
625
|
+
if selection == 0:
|
|
626
|
+
click.echo(click.style("\n❌ Cancelled", fg="yellow"))
|
|
627
|
+
return False
|
|
628
|
+
|
|
629
|
+
# Get selected developer name
|
|
630
|
+
old_name = developer_names[selection - 1]
|
|
631
|
+
click.echo(f"\n📝 Selected: {click.style(old_name, fg='green')}")
|
|
632
|
+
|
|
633
|
+
# Prompt for new name
|
|
634
|
+
new_name = click.prompt("Enter new canonical name", type=str)
|
|
635
|
+
|
|
636
|
+
# Validate new name
|
|
637
|
+
new_name = new_name.strip()
|
|
638
|
+
if not new_name:
|
|
639
|
+
click.echo(click.style("❌ New name cannot be empty", fg="red"), err=True)
|
|
640
|
+
return False
|
|
641
|
+
|
|
642
|
+
if new_name == old_name:
|
|
643
|
+
click.echo(click.style("❌ New name is identical to current name", fg="yellow"))
|
|
644
|
+
return False
|
|
645
|
+
|
|
646
|
+
# Ask about cache update
|
|
647
|
+
update_cache = click.confirm("\nAlso update database cache?", default=True)
|
|
648
|
+
|
|
649
|
+
# Show what will be done
|
|
650
|
+
click.echo("\n" + "=" * 60)
|
|
651
|
+
click.echo(click.style("Summary", fg="yellow", bold=True))
|
|
652
|
+
click.echo("=" * 60)
|
|
653
|
+
click.echo(f" Old name: {old_name}")
|
|
654
|
+
click.echo(f" New name: {new_name}")
|
|
655
|
+
click.echo(f" Update cache: {'Yes' if update_cache else 'No'}")
|
|
656
|
+
click.echo()
|
|
657
|
+
|
|
658
|
+
# Confirm
|
|
659
|
+
if not click.confirm("Proceed with rename?", default=True):
|
|
660
|
+
click.echo(click.style("\n❌ Cancelled", fg="yellow"))
|
|
661
|
+
return False
|
|
662
|
+
|
|
663
|
+
except Exception as e:
|
|
664
|
+
click.echo(click.style(f"❌ Error reading config: {e}", fg="red"), err=True)
|
|
665
|
+
logger.error(f"Config read error: {type(e).__name__}: {e}")
|
|
666
|
+
return False
|
|
667
|
+
|
|
668
|
+
try:
|
|
669
|
+
# Validate config path
|
|
670
|
+
_validate_subprocess_path(config_path)
|
|
671
|
+
except ValueError as e:
|
|
672
|
+
click.echo(click.style(f"❌ Invalid config path: {e}", fg="red"), err=True)
|
|
673
|
+
logger.error(f"Config path validation failed: {e}")
|
|
674
|
+
return False
|
|
675
|
+
|
|
676
|
+
# Build command
|
|
677
|
+
cmd = [
|
|
678
|
+
sys.executable,
|
|
679
|
+
"-m",
|
|
680
|
+
"gitflow_analytics.cli",
|
|
681
|
+
"alias-rename",
|
|
682
|
+
"-c",
|
|
683
|
+
str(config_path),
|
|
684
|
+
"--old-name",
|
|
685
|
+
old_name,
|
|
686
|
+
"--new-name",
|
|
687
|
+
new_name,
|
|
688
|
+
]
|
|
689
|
+
|
|
690
|
+
if update_cache:
|
|
691
|
+
cmd.append("--update-cache")
|
|
692
|
+
|
|
693
|
+
# Run with timeout
|
|
694
|
+
success = _run_subprocess_safely(cmd, operation_name="Alias Rename", timeout=60)
|
|
695
|
+
|
|
696
|
+
if success:
|
|
697
|
+
click.echo(click.style("\n✅ Rename completed successfully!", fg="green"))
|
|
698
|
+
click.echo(f"Future reports will show '{new_name}' instead of '{old_name}'")
|
|
699
|
+
|
|
700
|
+
return success
|
|
701
|
+
|
|
702
|
+
|
|
565
703
|
def show_main_menu(config_path: Optional[Path] = None) -> None:
|
|
566
704
|
"""Display main interactive menu.
|
|
567
705
|
|
|
@@ -597,13 +735,14 @@ def show_main_menu(config_path: Optional[Path] = None) -> None:
|
|
|
597
735
|
click.echo(" 3. Re-pull Data (Re-run Analysis)")
|
|
598
736
|
click.echo(" 4. Set Number of Weeks")
|
|
599
737
|
click.echo(" 5. Run Full Analysis")
|
|
738
|
+
click.echo(" 6. Rename Developer Alias")
|
|
600
739
|
click.echo(" 0. Exit")
|
|
601
740
|
|
|
602
741
|
# Get user choice
|
|
603
742
|
click.echo()
|
|
604
743
|
choice = click.prompt(
|
|
605
744
|
click.style("Enter your choice", fg="yellow"),
|
|
606
|
-
type=click.Choice(["0", "1", "2", "3", "4", "5"], case_sensitive=False),
|
|
745
|
+
type=click.Choice(["0", "1", "2", "3", "4", "5", "6"], case_sensitive=False),
|
|
607
746
|
show_choices=False,
|
|
608
747
|
)
|
|
609
748
|
|
|
@@ -623,6 +762,8 @@ def show_main_menu(config_path: Optional[Path] = None) -> None:
|
|
|
623
762
|
success = set_weeks(config_path)
|
|
624
763
|
elif choice == "5":
|
|
625
764
|
success = run_full_analysis(config_path)
|
|
765
|
+
elif choice == "6":
|
|
766
|
+
success = rename_developer_alias(config_path)
|
|
626
767
|
|
|
627
768
|
# Show warning if operation failed
|
|
628
769
|
if not success and choice != "0":
|
|
@@ -968,7 +968,9 @@ class ConfigLoader:
|
|
|
968
968
|
(
|
|
969
969
|
cls._resolve_env_var(item)
|
|
970
970
|
if isinstance(item, str)
|
|
971
|
-
else cls._resolve_config_dict(item)
|
|
971
|
+
else cls._resolve_config_dict(item)
|
|
972
|
+
if isinstance(item, dict)
|
|
973
|
+
else item
|
|
972
974
|
)
|
|
973
975
|
for item in value
|
|
974
976
|
]
|
|
@@ -234,8 +234,7 @@ class ProfileManager:
|
|
|
234
234
|
if not profile_class:
|
|
235
235
|
available = ", ".join(cls._profiles.keys())
|
|
236
236
|
raise ValueError(
|
|
237
|
-
f"Unknown configuration profile: {profile_name}. "
|
|
238
|
-
f"Available profiles: {available}"
|
|
237
|
+
f"Unknown configuration profile: {profile_name}. Available profiles: {available}"
|
|
239
238
|
)
|
|
240
239
|
|
|
241
240
|
profile_settings = profile_class.get_settings()
|
|
@@ -192,7 +192,6 @@ class GitDataFetcher:
|
|
|
192
192
|
description=f"📊 Processing repository: {project_key}",
|
|
193
193
|
unit="steps",
|
|
194
194
|
) as repo_progress_ctx:
|
|
195
|
-
|
|
196
195
|
# Step 1: Fetch commits
|
|
197
196
|
progress.set_description(repo_progress_ctx, f"🔍 {project_key}: Fetching commits")
|
|
198
197
|
daily_commits = self._fetch_commits_by_day(
|
|
@@ -538,7 +537,6 @@ class GitDataFetcher:
|
|
|
538
537
|
unit="days",
|
|
539
538
|
nested=True,
|
|
540
539
|
) as day_progress_ctx:
|
|
541
|
-
|
|
542
540
|
for day_date in days_to_process:
|
|
543
541
|
# Update description to show current repository and day clearly
|
|
544
542
|
day_str = day_date.strftime("%Y-%m-%d")
|
|
@@ -112,6 +112,80 @@ def setup_git_credentials(token: str, username: str = "git") -> bool:
|
|
|
112
112
|
return False
|
|
113
113
|
|
|
114
114
|
|
|
115
|
+
def ensure_remote_url_has_token(repo_path: Path, token: str) -> bool:
|
|
116
|
+
"""Embed GitHub token in remote URL for HTTPS authentication.
|
|
117
|
+
|
|
118
|
+
This is needed because subprocess git operations may not have access
|
|
119
|
+
to the credential helper store due to environment variable restrictions
|
|
120
|
+
(GIT_CREDENTIAL_HELPER="" and GIT_ASKPASS="/bin/echo" in git_timeout_wrapper).
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
repo_path: Path to the git repository
|
|
124
|
+
token: GitHub personal access token
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
True if URL was updated with token, False if already has token,
|
|
128
|
+
not applicable (SSH URL), or operation failed
|
|
129
|
+
"""
|
|
130
|
+
if not token:
|
|
131
|
+
logger.debug("No token provided, skipping remote URL update")
|
|
132
|
+
return False
|
|
133
|
+
|
|
134
|
+
try:
|
|
135
|
+
# Get current origin remote URL
|
|
136
|
+
result = subprocess.run(
|
|
137
|
+
["git", "remote", "get-url", "origin"],
|
|
138
|
+
cwd=repo_path,
|
|
139
|
+
capture_output=True,
|
|
140
|
+
text=True,
|
|
141
|
+
check=True,
|
|
142
|
+
)
|
|
143
|
+
current_url = result.stdout.strip()
|
|
144
|
+
|
|
145
|
+
if not current_url:
|
|
146
|
+
logger.debug(f"No origin remote found for {repo_path}")
|
|
147
|
+
return False
|
|
148
|
+
|
|
149
|
+
# Check if it's an HTTPS GitHub URL without embedded token
|
|
150
|
+
if current_url.startswith("https://github.com/"):
|
|
151
|
+
# URL format: https://github.com/org/repo.git
|
|
152
|
+
# New format: https://git:TOKEN@github.com/org/repo.git
|
|
153
|
+
new_url = current_url.replace("https://github.com/", f"https://git:{token}@github.com/")
|
|
154
|
+
|
|
155
|
+
# Update the remote URL
|
|
156
|
+
subprocess.run(
|
|
157
|
+
["git", "remote", "set-url", "origin", new_url],
|
|
158
|
+
cwd=repo_path,
|
|
159
|
+
capture_output=True,
|
|
160
|
+
text=True,
|
|
161
|
+
check=True,
|
|
162
|
+
)
|
|
163
|
+
logger.debug(f"Updated remote URL with embedded token for {repo_path.name}")
|
|
164
|
+
return True
|
|
165
|
+
|
|
166
|
+
elif "@github.com" in current_url:
|
|
167
|
+
# Already has authentication embedded (either token or SSH)
|
|
168
|
+
logger.debug(f"Remote URL already has authentication for {repo_path.name}")
|
|
169
|
+
return False
|
|
170
|
+
|
|
171
|
+
elif current_url.startswith("git@github.com:"):
|
|
172
|
+
# SSH URL, no need to modify
|
|
173
|
+
logger.debug(f"Using SSH authentication for {repo_path.name}")
|
|
174
|
+
return False
|
|
175
|
+
|
|
176
|
+
else:
|
|
177
|
+
# Unknown URL format
|
|
178
|
+
logger.debug(f"Unknown URL format for {repo_path.name}: {current_url}")
|
|
179
|
+
return False
|
|
180
|
+
|
|
181
|
+
except subprocess.CalledProcessError as e:
|
|
182
|
+
logger.warning(f"Could not update remote URL for {repo_path.name}: {e.stderr}")
|
|
183
|
+
return False
|
|
184
|
+
except Exception as e:
|
|
185
|
+
logger.warning(f"Unexpected error updating remote URL for {repo_path.name}: {e}")
|
|
186
|
+
return False
|
|
187
|
+
|
|
188
|
+
|
|
115
189
|
def preflight_git_authentication(config: dict) -> bool:
|
|
116
190
|
"""Run pre-flight checks for git authentication and setup credentials.
|
|
117
191
|
|
|
@@ -14,6 +14,7 @@ from pathlib import Path
|
|
|
14
14
|
from typing import Callable, Optional, TypeVar
|
|
15
15
|
|
|
16
16
|
from ..constants import Timeouts
|
|
17
|
+
from .git_auth import ensure_remote_url_has_token
|
|
17
18
|
|
|
18
19
|
logger = logging.getLogger(__name__)
|
|
19
20
|
|
|
@@ -208,6 +209,13 @@ class GitTimeoutWrapper:
|
|
|
208
209
|
True if fetch succeeded, False otherwise
|
|
209
210
|
"""
|
|
210
211
|
try:
|
|
212
|
+
# Embed GitHub token in remote URL if available
|
|
213
|
+
# This is necessary because git operations run with GIT_CREDENTIAL_HELPER=""
|
|
214
|
+
# and GIT_ASKPASS="/bin/echo", which disable credential helpers
|
|
215
|
+
token = os.environ.get("GITHUB_TOKEN")
|
|
216
|
+
if token:
|
|
217
|
+
ensure_remote_url_has_token(repo_path, token)
|
|
218
|
+
|
|
211
219
|
self.run_git_command(
|
|
212
220
|
["git", "fetch", "--all"], cwd=repo_path, timeout=timeout, check=True
|
|
213
221
|
)
|
|
@@ -503,7 +503,9 @@ class TicketExtractor:
|
|
|
503
503
|
:100
|
|
504
504
|
], # Increased from 60 to 100
|
|
505
505
|
"full_message": commit.get("message", ""),
|
|
506
|
-
"author": commit.get(
|
|
506
|
+
"author": commit.get(
|
|
507
|
+
"canonical_name", commit.get("author_name", "Unknown")
|
|
508
|
+
),
|
|
507
509
|
"author_email": commit.get("author_email", ""),
|
|
508
510
|
"canonical_id": commit.get("canonical_id", commit.get("author_email", "")),
|
|
509
511
|
"timestamp": commit.get("timestamp"),
|
|
@@ -101,7 +101,7 @@ class GitHubIntegration:
|
|
|
101
101
|
|
|
102
102
|
if cache_hits > 0 or cache_misses > 0:
|
|
103
103
|
print(
|
|
104
|
-
f" 📊 GitHub PR cache: {cache_hits} hits, {cache_misses} misses ({cache_hits/(cache_hits+cache_misses)*100:.1f}% hit rate)"
|
|
104
|
+
f" 📊 GitHub PR cache: {cache_hits} hits, {cache_misses} misses ({cache_hits / (cache_hits + cache_misses) * 100:.1f}% hit rate)"
|
|
105
105
|
if (cache_hits + cache_misses) > 0
|
|
106
106
|
else ""
|
|
107
107
|
)
|
|
@@ -186,7 +186,7 @@ class JIRAIntegration:
|
|
|
186
186
|
|
|
187
187
|
if cache_hits > 0 or cache_misses > 0:
|
|
188
188
|
print(
|
|
189
|
-
f" 📊 JIRA cache: {cache_hits} hits, {cache_misses} misses ({cache_hits/(cache_hits+cache_misses)*100:.1f}% hit rate)"
|
|
189
|
+
f" 📊 JIRA cache: {cache_hits} hits, {cache_misses} misses ({cache_hits / (cache_hits + cache_misses) * 100:.1f}% hit rate)"
|
|
190
190
|
)
|
|
191
191
|
|
|
192
192
|
# Fetch missing tickets from JIRA
|
|
@@ -169,16 +169,16 @@ class ChatGPTQualitativeAnalyzer:
|
|
|
169
169
|
def _create_executive_summary_prompt(self, summary_data: dict[str, Any]) -> str:
|
|
170
170
|
"""Create the prompt for ChatGPT."""
|
|
171
171
|
|
|
172
|
-
prompt = f"""Based on the following GitFlow Analytics data from the past {summary_data[
|
|
172
|
+
prompt = f"""Based on the following GitFlow Analytics data from the past {summary_data["period_weeks"]} weeks, provide a comprehensive executive summary with qualitative insights:
|
|
173
173
|
|
|
174
174
|
## Key Metrics:
|
|
175
|
-
- Total Commits: {summary_data[
|
|
176
|
-
- Active Developers: {summary_data[
|
|
177
|
-
- Lines Changed: {summary_data[
|
|
178
|
-
- Story Points Delivered: {summary_data[
|
|
179
|
-
- Ticket Coverage: {summary_data[
|
|
180
|
-
- Team Health Score: {summary_data[
|
|
181
|
-
- Velocity Trend: {summary_data[
|
|
175
|
+
- Total Commits: {summary_data["total_commits"]:,}
|
|
176
|
+
- Active Developers: {summary_data["total_developers"]}
|
|
177
|
+
- Lines Changed: {summary_data["lines_changed"]:,}
|
|
178
|
+
- Story Points Delivered: {summary_data["story_points"]}
|
|
179
|
+
- Ticket Coverage: {summary_data["ticket_coverage"]:.1f}%
|
|
180
|
+
- Team Health Score: {summary_data["team_health_score"]:.1f}/100 ({summary_data["team_health_rating"]})
|
|
181
|
+
- Velocity Trend: {summary_data["velocity_trend"]}
|
|
182
182
|
|
|
183
183
|
## Top Contributors:
|
|
184
184
|
"""
|
|
@@ -222,18 +222,18 @@ Report only statistical patterns, measurable trends, and process gaps. Use factu
|
|
|
222
222
|
|
|
223
223
|
return f"""## Executive Summary
|
|
224
224
|
|
|
225
|
-
Over the past {summary_data[
|
|
225
|
+
Over the past {summary_data["period_weeks"]} weeks, the development team generated {summary_data["total_commits"]:,} commits across {summary_data["total_developers"]} active developers.
|
|
226
226
|
|
|
227
|
-
The team health score measured {summary_data[
|
|
227
|
+
The team health score measured {summary_data["team_health_score"]:.1f}/100 ({summary_data["team_health_rating"]}). Ticket coverage reached {summary_data["ticket_coverage"]:.1f}% of total commits with trackable references.
|
|
228
228
|
|
|
229
229
|
### Measured Outputs:
|
|
230
|
-
- Code changes: {summary_data[
|
|
231
|
-
- Story points completed: {summary_data[
|
|
232
|
-
- Velocity trend: {summary_data[
|
|
230
|
+
- Code changes: {summary_data["lines_changed"]:,} lines modified
|
|
231
|
+
- Story points completed: {summary_data["story_points"]}
|
|
232
|
+
- Velocity trend: {summary_data["velocity_trend"]}
|
|
233
233
|
|
|
234
234
|
### Process Recommendations:
|
|
235
|
-
1. {
|
|
236
|
-
2. {
|
|
235
|
+
1. {"Maintain current output rate" if summary_data["velocity_trend"] == "increasing" else "Analyze velocity decline factors"}
|
|
236
|
+
2. {"Sustain current tracking rate" if summary_data["ticket_coverage"] > 60 else "Increase commit-ticket linking to reach 70% coverage target"}
|
|
237
237
|
3. Review projects with health scores below 60/100 for process gaps
|
|
238
238
|
|
|
239
239
|
*Note: This is a fallback summary. For detailed analysis, configure ChatGPT integration.*
|
|
@@ -353,7 +353,7 @@ Response (format: CATEGORY confidence reasoning):""",
|
|
|
353
353
|
"""
|
|
354
354
|
formatted = []
|
|
355
355
|
for i, example in enumerate(examples, 1):
|
|
356
|
-
formatted.append(f
|
|
356
|
+
formatted.append(f'{i}. Message: "{example["message"]}"')
|
|
357
357
|
formatted.append(f" Response: {example['response']}")
|
|
358
358
|
return "\n".join(formatted)
|
|
359
359
|
|
|
@@ -577,8 +577,7 @@ class QualitativeProcessor:
|
|
|
577
577
|
llm_pct = (llm_processed / total_commits) * 100 if total_commits > 0 else 0
|
|
578
578
|
|
|
579
579
|
self.logger.info(
|
|
580
|
-
f"Processing breakdown: {cache_pct:.1f}% cached, "
|
|
581
|
-
f"{nlp_pct:.1f}% NLP, {llm_pct:.1f}% LLM"
|
|
580
|
+
f"Processing breakdown: {cache_pct:.1f}% cached, {nlp_pct:.1f}% NLP, {llm_pct:.1f}% LLM"
|
|
582
581
|
)
|
|
583
582
|
|
|
584
583
|
def _should_optimize_cache(self) -> bool:
|
|
@@ -906,7 +906,9 @@ class EnhancedQualitativeAnalyzer:
|
|
|
906
906
|
"status": (
|
|
907
907
|
"excellent"
|
|
908
908
|
if activity_score >= 80
|
|
909
|
-
else "good"
|
|
909
|
+
else "good"
|
|
910
|
+
if activity_score >= 60
|
|
911
|
+
else "needs_improvement"
|
|
910
912
|
),
|
|
911
913
|
},
|
|
912
914
|
"contributor_diversity": {
|
|
@@ -915,7 +917,9 @@ class EnhancedQualitativeAnalyzer:
|
|
|
915
917
|
"status": (
|
|
916
918
|
"excellent"
|
|
917
919
|
if len(contributors) >= 4
|
|
918
|
-
else "good"
|
|
920
|
+
else "good"
|
|
921
|
+
if len(contributors) >= 2
|
|
922
|
+
else "concerning"
|
|
919
923
|
),
|
|
920
924
|
},
|
|
921
925
|
"pr_velocity": {
|
|
@@ -929,7 +933,9 @@ class EnhancedQualitativeAnalyzer:
|
|
|
929
933
|
"status": (
|
|
930
934
|
"excellent"
|
|
931
935
|
if ticket_coverage >= 80
|
|
932
|
-
else "good"
|
|
936
|
+
else "good"
|
|
937
|
+
if ticket_coverage >= 60
|
|
938
|
+
else "needs_improvement"
|
|
933
939
|
),
|
|
934
940
|
},
|
|
935
941
|
}
|
|
@@ -948,7 +954,9 @@ class EnhancedQualitativeAnalyzer:
|
|
|
948
954
|
"status": (
|
|
949
955
|
"excellent"
|
|
950
956
|
if overall_score >= 80
|
|
951
|
-
else "good"
|
|
957
|
+
else "good"
|
|
958
|
+
if overall_score >= 60
|
|
959
|
+
else "needs_improvement"
|
|
952
960
|
),
|
|
953
961
|
}
|
|
954
962
|
|
|
@@ -1918,7 +1926,9 @@ class EnhancedQualitativeAnalyzer:
|
|
|
1918
1926
|
"status": (
|
|
1919
1927
|
"excellent"
|
|
1920
1928
|
if ticket_coverage >= 80
|
|
1921
|
-
else "good"
|
|
1929
|
+
else "good"
|
|
1930
|
+
if ticket_coverage >= 60
|
|
1931
|
+
else "needs_improvement"
|
|
1922
1932
|
),
|
|
1923
1933
|
},
|
|
1924
1934
|
"message_quality": {
|
|
@@ -1926,7 +1936,9 @@ class EnhancedQualitativeAnalyzer:
|
|
|
1926
1936
|
"status": (
|
|
1927
1937
|
"excellent"
|
|
1928
1938
|
if message_quality >= 80
|
|
1929
|
-
else "good"
|
|
1939
|
+
else "good"
|
|
1940
|
+
if message_quality >= 60
|
|
1941
|
+
else "needs_improvement"
|
|
1930
1942
|
),
|
|
1931
1943
|
},
|
|
1932
1944
|
"commit_size_compliance": {
|
|
@@ -1934,7 +1946,9 @@ class EnhancedQualitativeAnalyzer:
|
|
|
1934
1946
|
"status": (
|
|
1935
1947
|
"excellent"
|
|
1936
1948
|
if size_compliance >= 80
|
|
1937
|
-
else "good"
|
|
1949
|
+
else "good"
|
|
1950
|
+
if size_compliance >= 60
|
|
1951
|
+
else "needs_improvement"
|
|
1938
1952
|
),
|
|
1939
1953
|
},
|
|
1940
1954
|
"pr_approval_rate": {"score": pr_approval_rate, "status": "good"}, # Placeholder
|
|
@@ -1986,7 +2000,9 @@ class EnhancedQualitativeAnalyzer:
|
|
|
1986
2000
|
"collaboration_level": (
|
|
1987
2001
|
"high"
|
|
1988
2002
|
if collaboration_score >= 70
|
|
1989
|
-
else "medium"
|
|
2003
|
+
else "medium"
|
|
2004
|
+
if collaboration_score >= 40
|
|
2005
|
+
else "low"
|
|
1990
2006
|
),
|
|
1991
2007
|
"patterns": {
|
|
1992
2008
|
"multi_project_engagement": cross_collaboration_rate >= 50,
|
|
@@ -457,12 +457,16 @@ class RichProgressDisplay:
|
|
|
457
457
|
mem_icon = (
|
|
458
458
|
"🟢"
|
|
459
459
|
if self.statistics.memory_usage < 500
|
|
460
|
-
else "🟡"
|
|
460
|
+
else "🟡"
|
|
461
|
+
if self.statistics.memory_usage < 1000
|
|
462
|
+
else "🔴"
|
|
461
463
|
)
|
|
462
464
|
cpu_icon = (
|
|
463
465
|
"🟢"
|
|
464
466
|
if self.statistics.cpu_percent < 50
|
|
465
|
-
else "🟡"
|
|
467
|
+
else "🟡"
|
|
468
|
+
if self.statistics.cpu_percent < 80
|
|
469
|
+
else "🔴"
|
|
466
470
|
)
|
|
467
471
|
system_stats.append(f"{mem_icon} Memory: {self.statistics.memory_usage:.0f} MB")
|
|
468
472
|
system_stats.append(f"{cpu_icon} CPU: {self.statistics.cpu_percent:.1f}%")
|
|
@@ -471,7 +475,9 @@ class RichProgressDisplay:
|
|
|
471
475
|
speed_icon = (
|
|
472
476
|
"🚀"
|
|
473
477
|
if self.statistics.processing_speed > 100
|
|
474
|
-
else "⚡"
|
|
478
|
+
else "⚡"
|
|
479
|
+
if self.statistics.processing_speed > 50
|
|
480
|
+
else "🐢"
|
|
475
481
|
)
|
|
476
482
|
system_stats.append(
|
|
477
483
|
f"{speed_icon} Speed: {self.statistics.processing_speed:.1f} commits/s"
|
|
@@ -484,7 +490,9 @@ class RichProgressDisplay:
|
|
|
484
490
|
phase_indicator = (
|
|
485
491
|
"⚙️"
|
|
486
492
|
if "Processing" in self.statistics.current_phase
|
|
487
|
-
else "🔍"
|
|
493
|
+
else "🔍"
|
|
494
|
+
if "Analyzing" in self.statistics.current_phase
|
|
495
|
+
else "✨"
|
|
488
496
|
)
|
|
489
497
|
phase_text = f"{phase_indicator} [bold green]{self.statistics.current_phase}[/bold green]"
|
|
490
498
|
elapsed_text = f"⏱️ [bold blue]{self.statistics.get_elapsed_time()}[/bold blue]"
|
|
@@ -1250,9 +1258,9 @@ class SimpleProgressDisplay:
|
|
|
1250
1258
|
# Compatibility methods for CLI interface
|
|
1251
1259
|
def show_header(self):
|
|
1252
1260
|
"""Display header - compatibility method for CLI."""
|
|
1253
|
-
print(f"\n{'='*60}")
|
|
1261
|
+
print(f"\n{'=' * 60}")
|
|
1254
1262
|
print(f"GitFlow Analytics v{self.version}")
|
|
1255
|
-
print(f"{'='*60}\n")
|
|
1263
|
+
print(f"{'=' * 60}\n")
|
|
1256
1264
|
|
|
1257
1265
|
def start_live_display(self):
|
|
1258
1266
|
"""Start live display - compatibility wrapper for start()."""
|
|
@@ -636,7 +636,7 @@ class ActivityVerifier:
|
|
|
636
636
|
# Group consecutive days
|
|
637
637
|
lines.append(f"Found {len(zero_activity_days)} days with no activity:")
|
|
638
638
|
for i in range(0, len(zero_activity_days), 7):
|
|
639
|
-
lines.append(f" {', '.join(zero_activity_days[i:i+7])}")
|
|
639
|
+
lines.append(f" {', '.join(zero_activity_days[i : i + 7])}")
|
|
640
640
|
else:
|
|
641
641
|
lines.append("No days with zero activity found!")
|
|
642
642
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: gitflow-analytics
|
|
3
|
-
Version: 3.13.
|
|
3
|
+
Version: 3.13.6
|
|
4
4
|
Summary: Analyze Git repositories for developer productivity insights
|
|
5
5
|
Author-email: Bob Matyas <bobmatnyc@gmail.com>
|
|
6
6
|
License: MIT
|
|
@@ -1183,6 +1183,42 @@ gitflow-analytics analyze -c config.yaml --debug
|
|
|
1183
1183
|
|
|
1184
1184
|
Contributions are welcome! Please feel free to submit a Pull Request.
|
|
1185
1185
|
|
|
1186
|
+
### Development Setup
|
|
1187
|
+
|
|
1188
|
+
```bash
|
|
1189
|
+
# Clone the repository
|
|
1190
|
+
git clone https://github.com/bobmatnyc/gitflow-analytics.git
|
|
1191
|
+
cd gitflow-analytics
|
|
1192
|
+
|
|
1193
|
+
# Install development dependencies
|
|
1194
|
+
make install-dev
|
|
1195
|
+
|
|
1196
|
+
# Run tests
|
|
1197
|
+
make test
|
|
1198
|
+
|
|
1199
|
+
# Format code
|
|
1200
|
+
make format
|
|
1201
|
+
|
|
1202
|
+
# Run all quality checks
|
|
1203
|
+
make quality-gate
|
|
1204
|
+
```
|
|
1205
|
+
|
|
1206
|
+
### Release Workflow
|
|
1207
|
+
|
|
1208
|
+
This project uses a Makefile-based release workflow for simplicity and transparency. See [RELEASE.md](RELEASE.md) for detailed documentation.
|
|
1209
|
+
|
|
1210
|
+
**Quick Reference:**
|
|
1211
|
+
```bash
|
|
1212
|
+
make release-patch # Bug fixes (3.13.1 → 3.13.2)
|
|
1213
|
+
make release-minor # New features (3.13.1 → 3.14.0)
|
|
1214
|
+
make release-major # Breaking changes (3.13.1 → 4.0.0)
|
|
1215
|
+
```
|
|
1216
|
+
|
|
1217
|
+
For more details, see:
|
|
1218
|
+
- [RELEASE.md](RELEASE.md) - Comprehensive release guide
|
|
1219
|
+
- [RELEASE_QUICKREF.md](RELEASE_QUICKREF.md) - Quick reference card
|
|
1220
|
+
- `make help` - All available commands
|
|
1221
|
+
|
|
1186
1222
|
## License
|
|
1187
1223
|
|
|
1188
1224
|
This project is licensed under the MIT License - see the LICENSE file for details.
|