gitflow-analytics 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,311 @@
1
+ """CSV report generation for GitFlow Analytics."""
2
+ import csv
3
+ from datetime import datetime, timedelta
4
+ from pathlib import Path
5
+ from typing import List, Dict, Any, Optional
6
+ from collections import defaultdict
7
+ import pandas as pd
8
+
9
+
10
+ class CSVReportGenerator:
11
+ """Generate CSV reports with weekly metrics."""
12
+
13
+ def __init__(self, anonymize: bool = False):
14
+ """Initialize report generator."""
15
+ self.anonymize = anonymize
16
+ self._anonymization_map = {}
17
+ self._anonymous_counter = 0
18
+
19
+ def generate_weekly_report(self, commits: List[Dict[str, Any]],
20
+ developer_stats: List[Dict[str, Any]],
21
+ output_path: Path,
22
+ weeks: int = 12) -> Path:
23
+ """Generate weekly metrics CSV report."""
24
+ # Calculate week boundaries
25
+ end_date = datetime.now()
26
+ start_date = end_date - timedelta(weeks=weeks)
27
+
28
+ # Group commits by week and developer
29
+ weekly_data = self._aggregate_weekly_data(commits, start_date, end_date)
30
+
31
+ # Create developer lookup
32
+ dev_lookup = {dev['canonical_id']: dev for dev in developer_stats}
33
+
34
+ # Build CSV rows
35
+ rows = []
36
+ for (week_start, canonical_id, project_key), metrics in weekly_data.items():
37
+ developer = dev_lookup.get(canonical_id, {})
38
+
39
+ row = {
40
+ 'week_start': week_start.strftime('%Y-%m-%d'),
41
+ 'developer_id': self._anonymize_value(canonical_id, 'id'),
42
+ 'developer_name': self._anonymize_value(
43
+ developer.get('primary_name', 'Unknown'), 'name'
44
+ ),
45
+ 'developer_email': self._anonymize_value(
46
+ developer.get('primary_email', 'unknown@example.com'), 'email'
47
+ ),
48
+ 'project': project_key,
49
+ 'commits': metrics['commits'],
50
+ 'story_points': metrics['story_points'],
51
+ 'lines_added': metrics['lines_added'],
52
+ 'lines_removed': metrics['lines_removed'],
53
+ 'files_changed': metrics['files_changed'],
54
+ 'complexity_delta': round(metrics['complexity_delta'], 2),
55
+ 'ticket_coverage_pct': round(metrics['ticket_coverage_pct'], 1),
56
+ 'avg_commit_size': round(metrics['avg_commit_size'], 1),
57
+ 'unique_tickets': metrics['unique_tickets'],
58
+ 'prs_involved': metrics['prs_involved']
59
+ }
60
+ rows.append(row)
61
+
62
+ # Sort by week and developer
63
+ rows.sort(key=lambda x: (x['week_start'], x['developer_name'], x['project']))
64
+
65
+ # Write CSV
66
+ if rows:
67
+ df = pd.DataFrame(rows)
68
+ df.to_csv(output_path, index=False)
69
+ else:
70
+ # Write empty CSV with headers
71
+ with open(output_path, 'w', newline='') as f:
72
+ writer = csv.DictWriter(f, fieldnames=[
73
+ 'week_start', 'developer_id', 'developer_name', 'developer_email',
74
+ 'project', 'commits', 'story_points', 'lines_added', 'lines_removed',
75
+ 'files_changed', 'complexity_delta', 'ticket_coverage_pct',
76
+ 'avg_commit_size', 'unique_tickets', 'prs_involved'
77
+ ])
78
+ writer.writeheader()
79
+
80
+ return output_path
81
+
82
+ def generate_summary_report(self, commits: List[Dict[str, Any]],
83
+ prs: List[Dict[str, Any]],
84
+ developer_stats: List[Dict[str, Any]],
85
+ ticket_analysis: Dict[str, Any],
86
+ output_path: Path) -> Path:
87
+ """Generate summary statistics CSV."""
88
+ summary_data = []
89
+
90
+ # Overall statistics
91
+ total_commits = len(commits)
92
+ total_story_points = sum(c.get('story_points', 0) or 0 for c in commits)
93
+ total_lines = sum(c['insertions'] + c['deletions'] for c in commits)
94
+
95
+ summary_data.append({
96
+ 'metric': 'Total Commits',
97
+ 'value': total_commits,
98
+ 'category': 'Overall'
99
+ })
100
+
101
+ summary_data.append({
102
+ 'metric': 'Total Story Points',
103
+ 'value': total_story_points,
104
+ 'category': 'Overall'
105
+ })
106
+
107
+ summary_data.append({
108
+ 'metric': 'Total Lines Changed',
109
+ 'value': total_lines,
110
+ 'category': 'Overall'
111
+ })
112
+
113
+ summary_data.append({
114
+ 'metric': 'Active Developers',
115
+ 'value': len(developer_stats),
116
+ 'category': 'Overall'
117
+ })
118
+
119
+ # Ticket coverage
120
+ summary_data.append({
121
+ 'metric': 'Commit Ticket Coverage %',
122
+ 'value': round(ticket_analysis.get('commit_coverage_pct', 0), 1),
123
+ 'category': 'Tracking'
124
+ })
125
+
126
+ summary_data.append({
127
+ 'metric': 'PR Ticket Coverage %',
128
+ 'value': round(ticket_analysis.get('pr_coverage_pct', 0), 1),
129
+ 'category': 'Tracking'
130
+ })
131
+
132
+ # Platform breakdown
133
+ for platform, count in ticket_analysis.get('ticket_summary', {}).items():
134
+ summary_data.append({
135
+ 'metric': f'{platform.title()} Tickets',
136
+ 'value': count,
137
+ 'category': 'Platforms'
138
+ })
139
+
140
+ # Developer statistics
141
+ if developer_stats:
142
+ top_contributor = max(developer_stats, key=lambda x: x['total_commits'])
143
+ summary_data.append({
144
+ 'metric': 'Top Contributor',
145
+ 'value': self._anonymize_value(top_contributor['primary_name'], 'name'),
146
+ 'category': 'Developers'
147
+ })
148
+
149
+ summary_data.append({
150
+ 'metric': 'Top Contributor Commits',
151
+ 'value': top_contributor['total_commits'],
152
+ 'category': 'Developers'
153
+ })
154
+
155
+ # Write summary CSV
156
+ df = pd.DataFrame(summary_data)
157
+ df.to_csv(output_path, index=False)
158
+
159
+ return output_path
160
+
161
+ def generate_developer_report(self, developer_stats: List[Dict[str, Any]],
162
+ output_path: Path) -> Path:
163
+ """Generate developer statistics CSV."""
164
+ rows = []
165
+
166
+ for dev in developer_stats:
167
+ row = {
168
+ 'developer_id': self._anonymize_value(dev['canonical_id'], 'id'),
169
+ 'name': self._anonymize_value(dev['primary_name'], 'name'),
170
+ 'email': self._anonymize_value(dev['primary_email'], 'email'),
171
+ 'github_username': self._anonymize_value(
172
+ dev.get('github_username', ''), 'username'
173
+ ) if dev.get('github_username') else '',
174
+ 'total_commits': dev['total_commits'],
175
+ 'total_story_points': dev['total_story_points'],
176
+ 'alias_count': dev['alias_count'],
177
+ 'first_seen': dev['first_seen'].strftime('%Y-%m-%d') if dev['first_seen'] else '',
178
+ 'last_seen': dev['last_seen'].strftime('%Y-%m-%d') if dev['last_seen'] else '',
179
+ 'avg_story_points_per_commit': round(
180
+ dev['total_story_points'] / max(dev['total_commits'], 1), 2
181
+ )
182
+ }
183
+ rows.append(row)
184
+
185
+ # Sort by total commits
186
+ rows.sort(key=lambda x: x['total_commits'], reverse=True)
187
+
188
+ # Write CSV
189
+ df = pd.DataFrame(rows)
190
+ df.to_csv(output_path, index=False)
191
+
192
+ return output_path
193
+
194
+ def _aggregate_weekly_data(self, commits: List[Dict[str, Any]],
195
+ start_date: datetime,
196
+ end_date: datetime) -> Dict[tuple, Dict[str, Any]]:
197
+ """Aggregate commit data by week."""
198
+ weekly_data = defaultdict(lambda: {
199
+ 'commits': 0,
200
+ 'story_points': 0,
201
+ 'lines_added': 0,
202
+ 'lines_removed': 0,
203
+ 'files_changed': 0,
204
+ 'complexity_delta': 0.0,
205
+ 'commits_with_tickets': 0,
206
+ 'tickets': set(),
207
+ 'prs': set()
208
+ })
209
+
210
+ for commit in commits:
211
+ timestamp = commit['timestamp']
212
+ # Handle both timezone-aware and naive datetimes
213
+ if hasattr(timestamp, 'tzinfo') and timestamp.tzinfo is not None:
214
+ # Convert timezone-aware to naive (UTC)
215
+ timestamp = timestamp.replace(tzinfo=None)
216
+
217
+ if timestamp < start_date or timestamp > end_date:
218
+ continue
219
+
220
+ # Get week start (Monday)
221
+ week_start = self._get_week_start(timestamp)
222
+
223
+ # Get project key (default to 'unknown')
224
+ project_key = commit.get('project_key', 'unknown')
225
+
226
+ # Get canonical developer ID
227
+ canonical_id = commit.get('canonical_id', commit.get('author_email', 'unknown'))
228
+
229
+ key = (week_start, canonical_id, project_key)
230
+
231
+ # Aggregate metrics
232
+ weekly_data[key]['commits'] += 1
233
+ weekly_data[key]['story_points'] += commit.get('story_points', 0) or 0
234
+ weekly_data[key]['lines_added'] += commit.get('insertions', 0)
235
+ weekly_data[key]['lines_removed'] += commit.get('deletions', 0)
236
+ weekly_data[key]['files_changed'] += commit.get('files_changed', 0)
237
+ weekly_data[key]['complexity_delta'] += commit.get('complexity_delta', 0.0)
238
+
239
+ # Track tickets
240
+ if commit.get('ticket_references'):
241
+ weekly_data[key]['commits_with_tickets'] += 1
242
+ for ticket in commit['ticket_references']:
243
+ if isinstance(ticket, dict):
244
+ weekly_data[key]['tickets'].add(ticket.get('full_id', ''))
245
+ else:
246
+ weekly_data[key]['tickets'].add(str(ticket))
247
+
248
+ # Track PRs (if available)
249
+ if commit.get('pr_number'):
250
+ weekly_data[key]['prs'].add(commit['pr_number'])
251
+
252
+ # Calculate derived metrics
253
+ result = {}
254
+ for key, metrics in weekly_data.items():
255
+ commits = metrics['commits']
256
+ if commits > 0:
257
+ metrics['ticket_coverage_pct'] = (
258
+ metrics['commits_with_tickets'] / commits * 100
259
+ )
260
+ metrics['avg_commit_size'] = (
261
+ (metrics['lines_added'] + metrics['lines_removed']) / commits
262
+ )
263
+ else:
264
+ metrics['ticket_coverage_pct'] = 0
265
+ metrics['avg_commit_size'] = 0
266
+
267
+ metrics['unique_tickets'] = len(metrics['tickets'])
268
+ metrics['prs_involved'] = len(metrics['prs'])
269
+
270
+ # Remove sets before returning
271
+ del metrics['tickets']
272
+ del metrics['prs']
273
+ del metrics['commits_with_tickets']
274
+
275
+ result[key] = metrics
276
+
277
+ return result
278
+
279
+ def _get_week_start(self, date: datetime) -> datetime:
280
+ """Get Monday of the week for a given date."""
281
+ days_since_monday = date.weekday()
282
+ monday = date - timedelta(days=days_since_monday)
283
+ return monday.replace(hour=0, minute=0, second=0, microsecond=0)
284
+
285
+ def _anonymize_value(self, value: str, field_type: str) -> str:
286
+ """Anonymize a value if anonymization is enabled."""
287
+ if not self.anonymize or not value:
288
+ return value
289
+
290
+ if field_type == 'email' and '@' in value:
291
+ # Keep domain for email
292
+ local, domain = value.split('@', 1)
293
+ value = local # Anonymize only local part
294
+ suffix = f"@{domain}"
295
+ else:
296
+ suffix = ""
297
+
298
+ if value not in self._anonymization_map:
299
+ self._anonymous_counter += 1
300
+ if field_type == 'name':
301
+ anonymous = f"Developer{self._anonymous_counter}"
302
+ elif field_type == 'email':
303
+ anonymous = f"dev{self._anonymous_counter}"
304
+ elif field_type == 'id':
305
+ anonymous = f"ID{self._anonymous_counter:04d}"
306
+ else:
307
+ anonymous = f"anon{self._anonymous_counter}"
308
+
309
+ self._anonymization_map[value] = anonymous
310
+
311
+ return self._anonymization_map[value] + suffix
@@ -0,0 +1,263 @@
1
+ """Narrative report generation in Markdown format."""
2
+ from datetime import datetime
3
+ from pathlib import Path
4
+ from typing import List, Dict, Any
5
+ from io import StringIO
6
+
7
+
8
+ class NarrativeReportGenerator:
9
+ """Generate human-readable narrative reports in Markdown."""
10
+
11
+ def __init__(self):
12
+ """Initialize narrative report generator."""
13
+ self.templates = {
14
+ 'high_performer': "{name} led development with {commits} commits ({pct}% of total activity)",
15
+ 'multi_project': "{name} worked across {count} projects, primarily on {primary} ({primary_pct}%)",
16
+ 'focused_developer': "{name} showed strong focus on {project} with {pct}% of their time",
17
+ 'ticket_coverage': "The team maintained {coverage}% ticket coverage, indicating {quality} process adherence",
18
+ 'work_distribution': "Work distribution shows a {distribution} pattern with a Gini coefficient of {gini}"
19
+ }
20
+
21
+ def generate_narrative_report(self,
22
+ commits: List[Dict[str, Any]],
23
+ prs: List[Dict[str, Any]],
24
+ developer_stats: List[Dict[str, Any]],
25
+ activity_dist: List[Dict[str, Any]],
26
+ focus_data: List[Dict[str, Any]],
27
+ insights: List[Dict[str, Any]],
28
+ ticket_analysis: Dict[str, Any],
29
+ pr_metrics: Dict[str, Any],
30
+ output_path: Path,
31
+ weeks: int) -> Path:
32
+ """Generate comprehensive narrative report."""
33
+ report = StringIO()
34
+
35
+ # Header
36
+ report.write("# GitFlow Analytics Report\n\n")
37
+ report.write(f"**Generated**: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n")
38
+ report.write(f"**Analysis Period**: Last {weeks} weeks\n\n")
39
+
40
+ # Executive Summary
41
+ report.write("## Executive Summary\n\n")
42
+ self._write_executive_summary(report, commits, developer_stats, ticket_analysis)
43
+
44
+ # Team Composition
45
+ report.write("\n## Team Composition\n\n")
46
+ self._write_team_composition(report, developer_stats, focus_data)
47
+
48
+ # Project Activity
49
+ report.write("\n## Project Activity\n\n")
50
+ self._write_project_activity(report, activity_dist, commits)
51
+
52
+ # Development Patterns
53
+ report.write("\n## Development Patterns\n\n")
54
+ self._write_development_patterns(report, insights, focus_data)
55
+
56
+ # Pull Request Analysis (if available)
57
+ if pr_metrics and pr_metrics.get('total_prs', 0) > 0:
58
+ report.write("\n## Pull Request Analysis\n\n")
59
+ self._write_pr_analysis(report, pr_metrics, prs)
60
+
61
+ # Ticket Tracking
62
+ report.write("\n## Issue Tracking\n\n")
63
+ self._write_ticket_tracking(report, ticket_analysis)
64
+
65
+ # Recommendations
66
+ report.write("\n## Recommendations\n\n")
67
+ self._write_recommendations(report, insights, ticket_analysis, focus_data)
68
+
69
+ # Write to file
70
+ with open(output_path, 'w') as f:
71
+ f.write(report.getvalue())
72
+
73
+ return output_path
74
+
75
+ def _write_executive_summary(self, report: StringIO, commits: List[Dict[str, Any]],
76
+ developer_stats: List[Dict[str, Any]],
77
+ ticket_analysis: Dict[str, Any]):
78
+ """Write executive summary section."""
79
+ total_commits = len(commits)
80
+ total_developers = len(developer_stats)
81
+ total_lines = sum(c['insertions'] + c['deletions'] for c in commits)
82
+
83
+ report.write(f"- **Total Commits**: {total_commits:,}\n")
84
+ report.write(f"- **Active Developers**: {total_developers}\n")
85
+ report.write(f"- **Lines Changed**: {total_lines:,}\n")
86
+ report.write(f"- **Ticket Coverage**: {ticket_analysis['commit_coverage_pct']:.1f}%\n")
87
+
88
+ # Projects worked on
89
+ projects = set(c.get('project_key', 'UNKNOWN') for c in commits)
90
+ report.write(f"- **Active Projects**: {len(projects)} ({', '.join(sorted(projects))})\n")
91
+
92
+ # Top contributor
93
+ if developer_stats:
94
+ top_dev = developer_stats[0]
95
+ report.write(f"- **Top Contributor**: {top_dev['primary_name']} ")
96
+ report.write(f"({top_dev['total_commits']} commits)\n")
97
+
98
+ def _write_team_composition(self, report: StringIO, developer_stats: List[Dict[str, Any]],
99
+ focus_data: List[Dict[str, Any]]):
100
+ """Write team composition analysis."""
101
+ report.write("### Developer Profiles\n\n")
102
+
103
+ # Create developer lookup for focus data
104
+ focus_lookup = {d['developer']: d for d in focus_data}
105
+
106
+ for dev in developer_stats[:10]: # Top 10 developers
107
+ name = dev['primary_name']
108
+ commits = dev['total_commits']
109
+
110
+ report.write(f"**{name}**\n")
111
+ report.write(f"- Commits: {commits}\n")
112
+
113
+ # Add focus data if available
114
+ if name in focus_lookup:
115
+ focus = focus_lookup[name]
116
+ report.write(f"- Primary Project: {focus['primary_project']} ")
117
+ report.write(f"({focus['primary_project_pct']:.1f}% of time)\n")
118
+ report.write(f"- Work Style: {focus['work_style']}\n")
119
+ report.write(f"- Active Pattern: {focus['time_pattern']}\n")
120
+
121
+ report.write("\n")
122
+
123
+ def _write_project_activity(self, report: StringIO, activity_dist: List[Dict[str, Any]],
124
+ commits: List[Dict[str, Any]]):
125
+ """Write project activity breakdown."""
126
+ # Aggregate by project
127
+ project_totals = {}
128
+ for row in activity_dist:
129
+ project = row['project']
130
+ if project not in project_totals:
131
+ project_totals[project] = {
132
+ 'commits': 0,
133
+ 'lines': 0,
134
+ 'developers': set()
135
+ }
136
+ project_totals[project]['commits'] += row['commits']
137
+ project_totals[project]['lines'] += row['lines_changed']
138
+ project_totals[project]['developers'].add(row['developer'])
139
+
140
+ # Sort by commits
141
+ sorted_projects = sorted(project_totals.items(),
142
+ key=lambda x: x[1]['commits'], reverse=True)
143
+
144
+ report.write("### Activity by Project\n\n")
145
+ for project, data in sorted_projects:
146
+ report.write(f"**{project}**\n")
147
+ report.write(f"- Commits: {data['commits']} ")
148
+ report.write(f"({data['commits'] / len(commits) * 100:.1f}% of total)\n")
149
+ report.write(f"- Lines Changed: {data['lines']:,}\n")
150
+ report.write(f"- Active Developers: {len(data['developers'])}\n\n")
151
+
152
+ def _write_development_patterns(self, report: StringIO, insights: List[Dict[str, Any]],
153
+ focus_data: List[Dict[str, Any]]):
154
+ """Write development patterns analysis."""
155
+ report.write("### Key Patterns Identified\n\n")
156
+
157
+ # Group insights by category
158
+ by_category = {}
159
+ for insight in insights:
160
+ category = insight['category']
161
+ if category not in by_category:
162
+ by_category[category] = []
163
+ by_category[category].append(insight)
164
+
165
+ for category, category_insights in by_category.items():
166
+ report.write(f"**{category}**:\n")
167
+ for insight in category_insights:
168
+ report.write(f"- {insight['insight']}: {insight['value']} ")
169
+ report.write(f"({insight['impact']})\n")
170
+ report.write("\n")
171
+
172
+ # Add focus insights
173
+ if focus_data:
174
+ avg_focus = sum(d['focus_score'] for d in focus_data) / len(focus_data)
175
+ report.write(f"**Developer Focus**: Average focus score of {avg_focus:.1f}% ")
176
+
177
+ if avg_focus > 80:
178
+ report.write("indicates strong project concentration\n")
179
+ elif avg_focus > 60:
180
+ report.write("shows moderate multi-project work\n")
181
+ else:
182
+ report.write("suggests high context switching\n")
183
+
184
+ def _write_pr_analysis(self, report: StringIO, pr_metrics: Dict[str, Any],
185
+ prs: List[Dict[str, Any]]):
186
+ """Write pull request analysis."""
187
+ report.write(f"- **Total PRs Merged**: {pr_metrics['total_prs']}\n")
188
+ report.write(f"- **Average PR Size**: {pr_metrics['avg_pr_size']:.0f} lines\n")
189
+ report.write(f"- **Average PR Lifetime**: {pr_metrics['avg_pr_lifetime_hours']:.1f} hours\n")
190
+ report.write(f"- **Story Point Coverage**: {pr_metrics['story_point_coverage']:.1f}%\n")
191
+
192
+ if pr_metrics['total_review_comments'] > 0:
193
+ report.write(f"- **Total Review Comments**: {pr_metrics['total_review_comments']}\n")
194
+ avg_comments = pr_metrics['total_review_comments'] / pr_metrics['total_prs']
195
+ report.write(f"- **Average Comments per PR**: {avg_comments:.1f}\n")
196
+
197
+ def _write_ticket_tracking(self, report: StringIO, ticket_analysis: Dict[str, Any]):
198
+ """Write ticket tracking analysis."""
199
+ report.write("### Platform Usage\n\n")
200
+
201
+ total_tickets = sum(ticket_analysis['ticket_summary'].values())
202
+
203
+ for platform, count in sorted(ticket_analysis['ticket_summary'].items(),
204
+ key=lambda x: x[1], reverse=True):
205
+ pct = count / total_tickets * 100 if total_tickets > 0 else 0
206
+ report.write(f"- **{platform.title()}**: {count} tickets ({pct:.1f}%)\n")
207
+
208
+ report.write(f"\n### Coverage Analysis\n\n")
209
+ report.write(f"- **Commits with Tickets**: {ticket_analysis['commits_with_tickets']} ")
210
+ report.write(f"of {ticket_analysis['total_commits']} ")
211
+ report.write(f"({ticket_analysis['commit_coverage_pct']:.1f}%)\n")
212
+
213
+ if ticket_analysis['untracked_commits']:
214
+ report.write(f"\n### Significant Untracked Work\n\n")
215
+ for commit in ticket_analysis['untracked_commits'][:5]:
216
+ report.write(f"- `{commit['hash']}`: {commit['message']} ")
217
+ report.write(f"({commit['files_changed']} files)\n")
218
+
219
+ def _write_recommendations(self, report: StringIO, insights: List[Dict[str, Any]],
220
+ ticket_analysis: Dict[str, Any], focus_data: List[Dict[str, Any]]):
221
+ """Write recommendations based on analysis."""
222
+ recommendations = []
223
+
224
+ # Ticket coverage recommendations
225
+ coverage = ticket_analysis['commit_coverage_pct']
226
+ if coverage < 50:
227
+ recommendations.append(
228
+ "🎫 **Improve ticket tracking**: Current coverage is below 50%. "
229
+ "Consider enforcing ticket references in commit messages or PR descriptions."
230
+ )
231
+
232
+ # Work distribution recommendations
233
+ for insight in insights:
234
+ if insight['insight'] == 'Work distribution':
235
+ if 'unbalanced' in insight['value'].lower():
236
+ recommendations.append(
237
+ "⚖️ **Balance workload**: Work is concentrated among few developers. "
238
+ "Consider distributing tasks more evenly or adding team members."
239
+ )
240
+
241
+ # Focus recommendations
242
+ if focus_data:
243
+ low_focus = [d for d in focus_data if d['focus_score'] < 50]
244
+ if len(low_focus) > len(focus_data) / 2:
245
+ recommendations.append(
246
+ "🎯 **Reduce context switching**: Many developers work across multiple projects. "
247
+ "Consider more focused project assignments to improve efficiency."
248
+ )
249
+
250
+ # Branching strategy
251
+ for insight in insights:
252
+ if insight['insight'] == 'Branching strategy' and 'Heavy' in insight['value']:
253
+ recommendations.append(
254
+ "🌿 **Review branching strategy**: High percentage of merge commits suggests "
255
+ "complex branching. Consider simplifying the Git workflow."
256
+ )
257
+
258
+ if recommendations:
259
+ for rec in recommendations:
260
+ report.write(f"{rec}\n\n")
261
+ else:
262
+ report.write("✅ The team shows healthy development patterns. ")
263
+ report.write("Continue current practices while monitoring for changes.\n")