gitflow-analytics 1.0.3__py3-none-any.whl → 1.3.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gitflow_analytics/_version.py +1 -1
- gitflow_analytics/classification/__init__.py +31 -0
- gitflow_analytics/classification/batch_classifier.py +752 -0
- gitflow_analytics/classification/classifier.py +464 -0
- gitflow_analytics/classification/feature_extractor.py +725 -0
- gitflow_analytics/classification/linguist_analyzer.py +574 -0
- gitflow_analytics/classification/model.py +455 -0
- gitflow_analytics/cli.py +4108 -350
- gitflow_analytics/cli_rich.py +198 -48
- gitflow_analytics/config/__init__.py +43 -0
- gitflow_analytics/config/errors.py +261 -0
- gitflow_analytics/config/loader.py +904 -0
- gitflow_analytics/config/profiles.py +264 -0
- gitflow_analytics/config/repository.py +124 -0
- gitflow_analytics/config/schema.py +441 -0
- gitflow_analytics/config/validator.py +154 -0
- gitflow_analytics/config.py +44 -508
- gitflow_analytics/core/analyzer.py +1209 -98
- gitflow_analytics/core/cache.py +1337 -29
- gitflow_analytics/core/data_fetcher.py +1193 -0
- gitflow_analytics/core/identity.py +363 -14
- gitflow_analytics/core/metrics_storage.py +526 -0
- gitflow_analytics/core/progress.py +372 -0
- gitflow_analytics/core/schema_version.py +269 -0
- gitflow_analytics/extractors/ml_tickets.py +1100 -0
- gitflow_analytics/extractors/story_points.py +8 -1
- gitflow_analytics/extractors/tickets.py +749 -11
- gitflow_analytics/identity_llm/__init__.py +6 -0
- gitflow_analytics/identity_llm/analysis_pass.py +231 -0
- gitflow_analytics/identity_llm/analyzer.py +464 -0
- gitflow_analytics/identity_llm/models.py +76 -0
- gitflow_analytics/integrations/github_integration.py +175 -11
- gitflow_analytics/integrations/jira_integration.py +461 -24
- gitflow_analytics/integrations/orchestrator.py +124 -1
- gitflow_analytics/metrics/activity_scoring.py +322 -0
- gitflow_analytics/metrics/branch_health.py +470 -0
- gitflow_analytics/metrics/dora.py +379 -20
- gitflow_analytics/models/database.py +843 -53
- gitflow_analytics/pm_framework/__init__.py +115 -0
- gitflow_analytics/pm_framework/adapters/__init__.py +50 -0
- gitflow_analytics/pm_framework/adapters/jira_adapter.py +1845 -0
- gitflow_analytics/pm_framework/base.py +406 -0
- gitflow_analytics/pm_framework/models.py +211 -0
- gitflow_analytics/pm_framework/orchestrator.py +652 -0
- gitflow_analytics/pm_framework/registry.py +333 -0
- gitflow_analytics/qualitative/__init__.py +9 -10
- gitflow_analytics/qualitative/chatgpt_analyzer.py +259 -0
- gitflow_analytics/qualitative/classifiers/__init__.py +3 -3
- gitflow_analytics/qualitative/classifiers/change_type.py +518 -244
- gitflow_analytics/qualitative/classifiers/domain_classifier.py +272 -165
- gitflow_analytics/qualitative/classifiers/intent_analyzer.py +321 -222
- gitflow_analytics/qualitative/classifiers/llm/__init__.py +35 -0
- gitflow_analytics/qualitative/classifiers/llm/base.py +193 -0
- gitflow_analytics/qualitative/classifiers/llm/batch_processor.py +383 -0
- gitflow_analytics/qualitative/classifiers/llm/cache.py +479 -0
- gitflow_analytics/qualitative/classifiers/llm/cost_tracker.py +435 -0
- gitflow_analytics/qualitative/classifiers/llm/openai_client.py +403 -0
- gitflow_analytics/qualitative/classifiers/llm/prompts.py +373 -0
- gitflow_analytics/qualitative/classifiers/llm/response_parser.py +287 -0
- gitflow_analytics/qualitative/classifiers/llm_commit_classifier.py +607 -0
- gitflow_analytics/qualitative/classifiers/risk_analyzer.py +215 -189
- gitflow_analytics/qualitative/core/__init__.py +4 -4
- gitflow_analytics/qualitative/core/llm_fallback.py +239 -235
- gitflow_analytics/qualitative/core/nlp_engine.py +157 -148
- gitflow_analytics/qualitative/core/pattern_cache.py +214 -192
- gitflow_analytics/qualitative/core/processor.py +381 -248
- gitflow_analytics/qualitative/enhanced_analyzer.py +2236 -0
- gitflow_analytics/qualitative/example_enhanced_usage.py +420 -0
- gitflow_analytics/qualitative/models/__init__.py +7 -7
- gitflow_analytics/qualitative/models/schemas.py +155 -121
- gitflow_analytics/qualitative/utils/__init__.py +4 -4
- gitflow_analytics/qualitative/utils/batch_processor.py +136 -123
- gitflow_analytics/qualitative/utils/cost_tracker.py +142 -140
- gitflow_analytics/qualitative/utils/metrics.py +172 -158
- gitflow_analytics/qualitative/utils/text_processing.py +146 -104
- gitflow_analytics/reports/__init__.py +100 -0
- gitflow_analytics/reports/analytics_writer.py +539 -14
- gitflow_analytics/reports/base.py +648 -0
- gitflow_analytics/reports/branch_health_writer.py +322 -0
- gitflow_analytics/reports/classification_writer.py +924 -0
- gitflow_analytics/reports/cli_integration.py +427 -0
- gitflow_analytics/reports/csv_writer.py +1676 -212
- gitflow_analytics/reports/data_models.py +504 -0
- gitflow_analytics/reports/database_report_generator.py +427 -0
- gitflow_analytics/reports/example_usage.py +344 -0
- gitflow_analytics/reports/factory.py +499 -0
- gitflow_analytics/reports/formatters.py +698 -0
- gitflow_analytics/reports/html_generator.py +1116 -0
- gitflow_analytics/reports/interfaces.py +489 -0
- gitflow_analytics/reports/json_exporter.py +2770 -0
- gitflow_analytics/reports/narrative_writer.py +2287 -158
- gitflow_analytics/reports/story_point_correlation.py +1144 -0
- gitflow_analytics/reports/weekly_trends_writer.py +389 -0
- gitflow_analytics/training/__init__.py +5 -0
- gitflow_analytics/training/model_loader.py +377 -0
- gitflow_analytics/training/pipeline.py +550 -0
- gitflow_analytics/tui/__init__.py +1 -1
- gitflow_analytics/tui/app.py +129 -126
- gitflow_analytics/tui/screens/__init__.py +3 -3
- gitflow_analytics/tui/screens/analysis_progress_screen.py +188 -179
- gitflow_analytics/tui/screens/configuration_screen.py +154 -178
- gitflow_analytics/tui/screens/loading_screen.py +100 -110
- gitflow_analytics/tui/screens/main_screen.py +89 -72
- gitflow_analytics/tui/screens/results_screen.py +305 -281
- gitflow_analytics/tui/widgets/__init__.py +2 -2
- gitflow_analytics/tui/widgets/data_table.py +67 -69
- gitflow_analytics/tui/widgets/export_modal.py +76 -76
- gitflow_analytics/tui/widgets/progress_widget.py +41 -46
- gitflow_analytics-1.3.6.dist-info/METADATA +1015 -0
- gitflow_analytics-1.3.6.dist-info/RECORD +122 -0
- gitflow_analytics-1.0.3.dist-info/METADATA +0 -490
- gitflow_analytics-1.0.3.dist-info/RECORD +0 -62
- {gitflow_analytics-1.0.3.dist-info → gitflow_analytics-1.3.6.dist-info}/WHEEL +0 -0
- {gitflow_analytics-1.0.3.dist-info → gitflow_analytics-1.3.6.dist-info}/entry_points.txt +0 -0
- {gitflow_analytics-1.0.3.dist-info → gitflow_analytics-1.3.6.dist-info}/licenses/LICENSE +0 -0
- {gitflow_analytics-1.0.3.dist-info → gitflow_analytics-1.3.6.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,526 @@
|
|
|
1
|
+
"""Daily metrics storage system for GitFlow Analytics.
|
|
2
|
+
|
|
3
|
+
WHY: This module handles the storage and retrieval of daily classified activity
|
|
4
|
+
metrics for developers and projects. It provides the foundation for database-backed
|
|
5
|
+
reporting with trend analysis capabilities.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
from collections import defaultdict
|
|
10
|
+
from contextlib import contextmanager
|
|
11
|
+
from datetime import date, datetime
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from typing import Any, Optional
|
|
14
|
+
|
|
15
|
+
from sqlalchemy import and_, func
|
|
16
|
+
from sqlalchemy.orm import Session
|
|
17
|
+
|
|
18
|
+
from ..models.database import DailyMetrics, Database
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class DailyMetricsStorage:
|
|
24
|
+
"""Storage manager for daily developer/project activity metrics.
|
|
25
|
+
|
|
26
|
+
WHY: Centralized storage management ensures consistent data aggregation
|
|
27
|
+
and enables efficient querying for reports and trend analysis.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
def __init__(self, db_path: Path):
|
|
31
|
+
"""Initialize daily metrics storage.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
db_path: Path to SQLite database file
|
|
35
|
+
"""
|
|
36
|
+
self.db = Database(db_path)
|
|
37
|
+
logger.info(f"Initialized daily metrics storage at {db_path}")
|
|
38
|
+
|
|
39
|
+
@contextmanager
|
|
40
|
+
def get_session(self):
|
|
41
|
+
"""Get database session context manager."""
|
|
42
|
+
session = self.db.get_session()
|
|
43
|
+
try:
|
|
44
|
+
yield session
|
|
45
|
+
session.commit()
|
|
46
|
+
except Exception:
|
|
47
|
+
session.rollback()
|
|
48
|
+
raise
|
|
49
|
+
finally:
|
|
50
|
+
session.close()
|
|
51
|
+
|
|
52
|
+
def store_daily_metrics(
|
|
53
|
+
self,
|
|
54
|
+
analysis_date: date,
|
|
55
|
+
commits: list[dict[str, Any]],
|
|
56
|
+
developer_identities: dict[str, dict[str, str]],
|
|
57
|
+
) -> int:
|
|
58
|
+
"""Store daily metrics from commit analysis.
|
|
59
|
+
|
|
60
|
+
WHY: Aggregates and stores daily metrics per developer-project combination
|
|
61
|
+
to enable fast report generation and trend analysis.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
analysis_date: Date for the metrics
|
|
65
|
+
commits: List of analyzed commits with categorization
|
|
66
|
+
developer_identities: Mapping of email to canonical developer info
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
Number of daily metric records created/updated
|
|
70
|
+
"""
|
|
71
|
+
daily_aggregates = self._aggregate_commits_by_day(
|
|
72
|
+
commits, developer_identities, analysis_date
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
records_processed = 0
|
|
76
|
+
session = self.db.get_session()
|
|
77
|
+
try:
|
|
78
|
+
for (dev_id, project_key), metrics in daily_aggregates.items():
|
|
79
|
+
try:
|
|
80
|
+
# Get or create daily metrics record
|
|
81
|
+
existing = (
|
|
82
|
+
session.query(DailyMetrics)
|
|
83
|
+
.filter(
|
|
84
|
+
and_(
|
|
85
|
+
DailyMetrics.date == analysis_date,
|
|
86
|
+
DailyMetrics.developer_id == dev_id,
|
|
87
|
+
DailyMetrics.project_key == project_key,
|
|
88
|
+
)
|
|
89
|
+
)
|
|
90
|
+
.first()
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
if existing:
|
|
94
|
+
# Update existing record
|
|
95
|
+
self._update_metrics_record(existing, metrics)
|
|
96
|
+
existing.updated_at = datetime.utcnow()
|
|
97
|
+
logger.debug(
|
|
98
|
+
f"Updated existing daily metrics for {dev_id} in {project_key} on {analysis_date}"
|
|
99
|
+
)
|
|
100
|
+
else:
|
|
101
|
+
# Create new record
|
|
102
|
+
new_record = DailyMetrics(
|
|
103
|
+
date=analysis_date,
|
|
104
|
+
developer_id=dev_id,
|
|
105
|
+
project_key=project_key,
|
|
106
|
+
developer_name=metrics["developer_name"],
|
|
107
|
+
developer_email=metrics["developer_email"],
|
|
108
|
+
**{
|
|
109
|
+
k: v
|
|
110
|
+
for k, v in metrics.items()
|
|
111
|
+
if k not in ["developer_name", "developer_email"]
|
|
112
|
+
},
|
|
113
|
+
)
|
|
114
|
+
session.add(new_record)
|
|
115
|
+
logger.debug(
|
|
116
|
+
f"Created new daily metrics for {dev_id} in {project_key} on {analysis_date}"
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
# Commit this record individually to avoid constraint violations
|
|
120
|
+
session.commit()
|
|
121
|
+
records_processed += 1
|
|
122
|
+
|
|
123
|
+
except Exception as e:
|
|
124
|
+
logger.warning(
|
|
125
|
+
f"Failed to store/update daily metrics for {dev_id} in {project_key} on {analysis_date}: {e}"
|
|
126
|
+
)
|
|
127
|
+
session.rollback()
|
|
128
|
+
# Try to handle UNIQUE constraint violations by doing another lookup
|
|
129
|
+
try:
|
|
130
|
+
existing = (
|
|
131
|
+
session.query(DailyMetrics)
|
|
132
|
+
.filter(
|
|
133
|
+
and_(
|
|
134
|
+
DailyMetrics.date == analysis_date,
|
|
135
|
+
DailyMetrics.developer_id == dev_id,
|
|
136
|
+
DailyMetrics.project_key == project_key,
|
|
137
|
+
)
|
|
138
|
+
)
|
|
139
|
+
.first()
|
|
140
|
+
)
|
|
141
|
+
if existing:
|
|
142
|
+
# Record was created by another process, just update it
|
|
143
|
+
self._update_metrics_record(existing, metrics)
|
|
144
|
+
existing.updated_at = datetime.utcnow()
|
|
145
|
+
session.commit()
|
|
146
|
+
records_processed += 1
|
|
147
|
+
logger.info(
|
|
148
|
+
f"Updated metrics after constraint violation for {dev_id} in {project_key} on {analysis_date}"
|
|
149
|
+
)
|
|
150
|
+
else:
|
|
151
|
+
logger.error(
|
|
152
|
+
f"Could not resolve constraint violation for {dev_id} in {project_key} on {analysis_date}"
|
|
153
|
+
)
|
|
154
|
+
except Exception as retry_e:
|
|
155
|
+
logger.error(
|
|
156
|
+
f"Retry failed for {dev_id} in {project_key} on {analysis_date}: {retry_e}"
|
|
157
|
+
)
|
|
158
|
+
session.rollback()
|
|
159
|
+
finally:
|
|
160
|
+
session.close()
|
|
161
|
+
|
|
162
|
+
logger.info(f"Stored/updated {records_processed} daily metrics records for {analysis_date}")
|
|
163
|
+
return records_processed
|
|
164
|
+
|
|
165
|
+
def get_date_range_metrics(
|
|
166
|
+
self,
|
|
167
|
+
start_date: date,
|
|
168
|
+
end_date: date,
|
|
169
|
+
developer_ids: Optional[list[str]] = None,
|
|
170
|
+
project_keys: Optional[list[str]] = None,
|
|
171
|
+
) -> list[dict[str, Any]]:
|
|
172
|
+
"""Retrieve daily metrics for a date range.
|
|
173
|
+
|
|
174
|
+
Args:
|
|
175
|
+
start_date: Start date (inclusive)
|
|
176
|
+
end_date: End date (inclusive)
|
|
177
|
+
developer_ids: Optional filter by developer IDs
|
|
178
|
+
project_keys: Optional filter by project keys
|
|
179
|
+
|
|
180
|
+
Returns:
|
|
181
|
+
List of daily metrics dictionaries
|
|
182
|
+
"""
|
|
183
|
+
with self.get_session() as session:
|
|
184
|
+
query = session.query(DailyMetrics).filter(
|
|
185
|
+
and_(DailyMetrics.date >= start_date, DailyMetrics.date <= end_date)
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
if developer_ids:
|
|
189
|
+
query = query.filter(DailyMetrics.developer_id.in_(developer_ids))
|
|
190
|
+
|
|
191
|
+
if project_keys:
|
|
192
|
+
query = query.filter(DailyMetrics.project_key.in_(project_keys))
|
|
193
|
+
|
|
194
|
+
results = query.order_by(DailyMetrics.date, DailyMetrics.developer_id).all()
|
|
195
|
+
|
|
196
|
+
return [self._metrics_record_to_dict(record) for record in results]
|
|
197
|
+
|
|
198
|
+
def calculate_weekly_trends(
|
|
199
|
+
self, start_date: date, end_date: date
|
|
200
|
+
) -> dict[tuple[str, str], dict[str, float]]:
|
|
201
|
+
"""Calculate week-over-week trends for developer-project combinations.
|
|
202
|
+
|
|
203
|
+
WHY: Pre-calculated trends improve report performance and provide
|
|
204
|
+
consistent trend analysis across different report types.
|
|
205
|
+
|
|
206
|
+
Args:
|
|
207
|
+
start_date: Analysis start date
|
|
208
|
+
end_date: Analysis end date
|
|
209
|
+
|
|
210
|
+
Returns:
|
|
211
|
+
Dict mapping (developer_id, project_key) to trend metrics
|
|
212
|
+
"""
|
|
213
|
+
trends = {}
|
|
214
|
+
|
|
215
|
+
with self.get_session() as session:
|
|
216
|
+
# Get all unique developer-project combinations in the date range
|
|
217
|
+
combinations = (
|
|
218
|
+
session.query(DailyMetrics.developer_id, DailyMetrics.project_key)
|
|
219
|
+
.filter(and_(DailyMetrics.date >= start_date, DailyMetrics.date <= end_date))
|
|
220
|
+
.distinct()
|
|
221
|
+
.all()
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
for dev_id, project_key in combinations:
|
|
225
|
+
trend_data = self._calculate_developer_project_trend(
|
|
226
|
+
session, dev_id, project_key, start_date, end_date
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
if trend_data:
|
|
230
|
+
trends[(dev_id, project_key)] = trend_data
|
|
231
|
+
|
|
232
|
+
logger.info(f"Calculated trends for {len(trends)} developer-project combinations")
|
|
233
|
+
return trends
|
|
234
|
+
|
|
235
|
+
def get_classification_summary(
|
|
236
|
+
self, start_date: date, end_date: date
|
|
237
|
+
) -> dict[str, dict[str, int]]:
|
|
238
|
+
"""Get classification summary across all developers and projects.
|
|
239
|
+
|
|
240
|
+
Args:
|
|
241
|
+
start_date: Start date for summary
|
|
242
|
+
end_date: End date for summary
|
|
243
|
+
|
|
244
|
+
Returns:
|
|
245
|
+
Dict with classification counts by developer and project
|
|
246
|
+
"""
|
|
247
|
+
with self.get_session() as session:
|
|
248
|
+
# Classification totals by developer
|
|
249
|
+
dev_query = (
|
|
250
|
+
session.query(
|
|
251
|
+
DailyMetrics.developer_name,
|
|
252
|
+
func.sum(DailyMetrics.feature_commits).label("features"),
|
|
253
|
+
func.sum(DailyMetrics.bug_fix_commits).label("bug_fixes"),
|
|
254
|
+
func.sum(DailyMetrics.refactor_commits).label("refactors"),
|
|
255
|
+
func.sum(DailyMetrics.total_commits).label("total"),
|
|
256
|
+
)
|
|
257
|
+
.filter(and_(DailyMetrics.date >= start_date, DailyMetrics.date <= end_date))
|
|
258
|
+
.group_by(DailyMetrics.developer_name)
|
|
259
|
+
.all()
|
|
260
|
+
)
|
|
261
|
+
|
|
262
|
+
# Classification totals by project
|
|
263
|
+
proj_query = (
|
|
264
|
+
session.query(
|
|
265
|
+
DailyMetrics.project_key,
|
|
266
|
+
func.sum(DailyMetrics.feature_commits).label("features"),
|
|
267
|
+
func.sum(DailyMetrics.bug_fix_commits).label("bug_fixes"),
|
|
268
|
+
func.sum(DailyMetrics.refactor_commits).label("refactors"),
|
|
269
|
+
func.sum(DailyMetrics.total_commits).label("total"),
|
|
270
|
+
)
|
|
271
|
+
.filter(and_(DailyMetrics.date >= start_date, DailyMetrics.date <= end_date))
|
|
272
|
+
.group_by(DailyMetrics.project_key)
|
|
273
|
+
.all()
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
return {
|
|
277
|
+
"by_developer": {
|
|
278
|
+
row.developer_name: {
|
|
279
|
+
"features": row.features or 0,
|
|
280
|
+
"bug_fixes": row.bug_fixes or 0,
|
|
281
|
+
"refactors": row.refactors or 0,
|
|
282
|
+
"total": row.total or 0,
|
|
283
|
+
}
|
|
284
|
+
for row in dev_query
|
|
285
|
+
},
|
|
286
|
+
"by_project": {
|
|
287
|
+
row.project_key: {
|
|
288
|
+
"features": row.features or 0,
|
|
289
|
+
"bug_fixes": row.bug_fixes or 0,
|
|
290
|
+
"refactors": row.refactors or 0,
|
|
291
|
+
"total": row.total or 0,
|
|
292
|
+
}
|
|
293
|
+
for row in proj_query
|
|
294
|
+
},
|
|
295
|
+
}
|
|
296
|
+
|
|
297
|
+
def _aggregate_commits_by_day(
|
|
298
|
+
self,
|
|
299
|
+
commits: list[dict[str, Any]],
|
|
300
|
+
developer_identities: dict[str, dict[str, str]],
|
|
301
|
+
target_date: date,
|
|
302
|
+
) -> dict[tuple[str, str], dict[str, Any]]:
|
|
303
|
+
"""Aggregate commits into daily metrics by developer-project.
|
|
304
|
+
|
|
305
|
+
WHY: Groups commits by developer and project for the target date,
|
|
306
|
+
calculating all relevant metrics for storage.
|
|
307
|
+
"""
|
|
308
|
+
# Group commits by developer and project for the target date
|
|
309
|
+
daily_groups = defaultdict(
|
|
310
|
+
lambda: {
|
|
311
|
+
"total_commits": 0,
|
|
312
|
+
"feature_commits": 0,
|
|
313
|
+
"bug_fix_commits": 0,
|
|
314
|
+
"refactor_commits": 0,
|
|
315
|
+
"documentation_commits": 0,
|
|
316
|
+
"maintenance_commits": 0,
|
|
317
|
+
"test_commits": 0,
|
|
318
|
+
"style_commits": 0,
|
|
319
|
+
"build_commits": 0,
|
|
320
|
+
"other_commits": 0,
|
|
321
|
+
"files_changed": 0,
|
|
322
|
+
"lines_added": 0,
|
|
323
|
+
"lines_deleted": 0,
|
|
324
|
+
"story_points": 0,
|
|
325
|
+
"tracked_commits": 0,
|
|
326
|
+
"untracked_commits": 0,
|
|
327
|
+
"unique_tickets": set(),
|
|
328
|
+
"merge_commits": 0,
|
|
329
|
+
"complex_commits": 0,
|
|
330
|
+
"developer_name": "",
|
|
331
|
+
"developer_email": "",
|
|
332
|
+
}
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
for commit in commits:
|
|
336
|
+
# Filter to target date
|
|
337
|
+
commit_date = commit.get("timestamp")
|
|
338
|
+
if not commit_date:
|
|
339
|
+
continue
|
|
340
|
+
|
|
341
|
+
# Handle both datetime and date objects
|
|
342
|
+
if isinstance(commit_date, datetime) or hasattr(commit_date, "date"):
|
|
343
|
+
if commit_date.date() != target_date:
|
|
344
|
+
continue
|
|
345
|
+
elif str(commit_date)[:10] != str(target_date):
|
|
346
|
+
continue
|
|
347
|
+
|
|
348
|
+
# Get developer identity
|
|
349
|
+
author_email = commit.get("author_email", "")
|
|
350
|
+
dev_identity = developer_identities.get(
|
|
351
|
+
author_email,
|
|
352
|
+
{
|
|
353
|
+
"canonical_id": author_email,
|
|
354
|
+
"name": commit.get("author_name", "Unknown"),
|
|
355
|
+
"email": author_email,
|
|
356
|
+
},
|
|
357
|
+
)
|
|
358
|
+
|
|
359
|
+
dev_id = dev_identity.get("canonical_id", author_email)
|
|
360
|
+
project_key = commit.get("project_key", "default")
|
|
361
|
+
|
|
362
|
+
metrics = daily_groups[(dev_id, project_key)]
|
|
363
|
+
|
|
364
|
+
# Set developer info (will be overwritten with same values, that's OK)
|
|
365
|
+
metrics["developer_name"] = dev_identity.get(
|
|
366
|
+
"name", commit.get("author_name", "Unknown")
|
|
367
|
+
)
|
|
368
|
+
metrics["developer_email"] = dev_identity.get("email", author_email)
|
|
369
|
+
|
|
370
|
+
# Aggregate basic metrics
|
|
371
|
+
metrics["total_commits"] += 1
|
|
372
|
+
# Handle files_changed safely - could be int or list
|
|
373
|
+
files_changed = commit.get("files_changed", 0)
|
|
374
|
+
if isinstance(files_changed, list):
|
|
375
|
+
metrics["files_changed"] += len(files_changed)
|
|
376
|
+
elif isinstance(files_changed, int):
|
|
377
|
+
metrics["files_changed"] += files_changed
|
|
378
|
+
else:
|
|
379
|
+
# Fallback for unexpected types
|
|
380
|
+
metrics["files_changed"] += 0
|
|
381
|
+
metrics["lines_added"] += commit.get("insertions", 0)
|
|
382
|
+
metrics["lines_deleted"] += commit.get("deletions", 0)
|
|
383
|
+
metrics["story_points"] += commit.get("story_points", 0) or 0
|
|
384
|
+
|
|
385
|
+
# Classification counts
|
|
386
|
+
category = commit.get("category", "other")
|
|
387
|
+
category_field = f"{category}_commits"
|
|
388
|
+
if category_field in metrics:
|
|
389
|
+
metrics[category_field] += 1
|
|
390
|
+
else:
|
|
391
|
+
metrics["other_commits"] += 1
|
|
392
|
+
|
|
393
|
+
# Ticket tracking
|
|
394
|
+
ticket_refs = commit.get("ticket_references", [])
|
|
395
|
+
if ticket_refs:
|
|
396
|
+
metrics["tracked_commits"] += 1
|
|
397
|
+
metrics["unique_tickets"].update(ticket_refs)
|
|
398
|
+
else:
|
|
399
|
+
metrics["untracked_commits"] += 1
|
|
400
|
+
|
|
401
|
+
# Work patterns
|
|
402
|
+
if commit.get("is_merge", False):
|
|
403
|
+
metrics["merge_commits"] += 1
|
|
404
|
+
|
|
405
|
+
if commit.get("files_changed", 0) > 5:
|
|
406
|
+
metrics["complex_commits"] += 1
|
|
407
|
+
|
|
408
|
+
# Convert sets to counts
|
|
409
|
+
for metrics in daily_groups.values():
|
|
410
|
+
metrics["unique_tickets"] = len(metrics["unique_tickets"])
|
|
411
|
+
|
|
412
|
+
return dict(daily_groups)
|
|
413
|
+
|
|
414
|
+
def _update_metrics_record(self, record: DailyMetrics, metrics: dict[str, Any]) -> None:
|
|
415
|
+
"""Update an existing DailyMetrics record with new data."""
|
|
416
|
+
for key, value in metrics.items():
|
|
417
|
+
if hasattr(record, key) and key not in ["developer_name", "developer_email"]:
|
|
418
|
+
setattr(record, key, value)
|
|
419
|
+
|
|
420
|
+
def _metrics_record_to_dict(self, record: DailyMetrics) -> dict[str, Any]:
|
|
421
|
+
"""Convert DailyMetrics SQLAlchemy record to dictionary."""
|
|
422
|
+
return {
|
|
423
|
+
"date": record.date,
|
|
424
|
+
"developer_id": record.developer_id,
|
|
425
|
+
"developer_name": record.developer_name,
|
|
426
|
+
"developer_email": record.developer_email,
|
|
427
|
+
"project_key": record.project_key,
|
|
428
|
+
"feature_commits": record.feature_commits,
|
|
429
|
+
"bug_fix_commits": record.bug_fix_commits,
|
|
430
|
+
"refactor_commits": record.refactor_commits,
|
|
431
|
+
"documentation_commits": record.documentation_commits,
|
|
432
|
+
"maintenance_commits": record.maintenance_commits,
|
|
433
|
+
"test_commits": record.test_commits,
|
|
434
|
+
"style_commits": record.style_commits,
|
|
435
|
+
"build_commits": record.build_commits,
|
|
436
|
+
"other_commits": record.other_commits,
|
|
437
|
+
"total_commits": record.total_commits,
|
|
438
|
+
"files_changed": record.files_changed,
|
|
439
|
+
"lines_added": record.lines_added,
|
|
440
|
+
"lines_deleted": record.lines_deleted,
|
|
441
|
+
"story_points": record.story_points,
|
|
442
|
+
"tracked_commits": record.tracked_commits,
|
|
443
|
+
"untracked_commits": record.untracked_commits,
|
|
444
|
+
"unique_tickets": record.unique_tickets,
|
|
445
|
+
"merge_commits": record.merge_commits,
|
|
446
|
+
"complex_commits": record.complex_commits,
|
|
447
|
+
"created_at": record.created_at,
|
|
448
|
+
"updated_at": record.updated_at,
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
def _calculate_developer_project_trend(
|
|
452
|
+
self, session: Session, dev_id: str, project_key: str, start_date: date, end_date: date
|
|
453
|
+
) -> Optional[dict[str, float]]:
|
|
454
|
+
"""Calculate trend data for a specific developer-project combination."""
|
|
455
|
+
# Get weekly aggregates
|
|
456
|
+
weekly_data = self._get_weekly_aggregates(
|
|
457
|
+
session, dev_id, project_key, start_date, end_date
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
if len(weekly_data) < 2:
|
|
461
|
+
# Need at least 2 weeks for trend calculation
|
|
462
|
+
return None
|
|
463
|
+
|
|
464
|
+
# Calculate week-over-week changes for the most recent week
|
|
465
|
+
current_week = weekly_data[-1]
|
|
466
|
+
previous_week = weekly_data[-2]
|
|
467
|
+
|
|
468
|
+
def calculate_change(current: int, previous: int) -> float:
|
|
469
|
+
"""Calculate percentage change."""
|
|
470
|
+
if previous == 0:
|
|
471
|
+
return 100.0 if current > 0 else 0.0
|
|
472
|
+
return ((current - previous) / previous) * 100.0
|
|
473
|
+
|
|
474
|
+
return {
|
|
475
|
+
"total_commits_change": calculate_change(
|
|
476
|
+
current_week["total_commits"], previous_week["total_commits"]
|
|
477
|
+
),
|
|
478
|
+
"feature_commits_change": calculate_change(
|
|
479
|
+
current_week["feature_commits"], previous_week["feature_commits"]
|
|
480
|
+
),
|
|
481
|
+
"bug_fix_commits_change": calculate_change(
|
|
482
|
+
current_week["bug_fix_commits"], previous_week["bug_fix_commits"]
|
|
483
|
+
),
|
|
484
|
+
"refactor_commits_change": calculate_change(
|
|
485
|
+
current_week["refactor_commits"], previous_week["refactor_commits"]
|
|
486
|
+
),
|
|
487
|
+
"current_week_total": current_week["total_commits"],
|
|
488
|
+
"previous_week_total": previous_week["total_commits"],
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
def _get_weekly_aggregates(
|
|
492
|
+
self, session: Session, dev_id: str, project_key: str, start_date: date, end_date: date
|
|
493
|
+
) -> list[dict[str, Any]]:
|
|
494
|
+
"""Get weekly aggregated data for trend calculation."""
|
|
495
|
+
# Query daily metrics and group by week
|
|
496
|
+
results = (
|
|
497
|
+
session.query(
|
|
498
|
+
func.strftime("%Y-%W", DailyMetrics.date).label("week"),
|
|
499
|
+
func.sum(DailyMetrics.total_commits).label("total_commits"),
|
|
500
|
+
func.sum(DailyMetrics.feature_commits).label("feature_commits"),
|
|
501
|
+
func.sum(DailyMetrics.bug_fix_commits).label("bug_fix_commits"),
|
|
502
|
+
func.sum(DailyMetrics.refactor_commits).label("refactor_commits"),
|
|
503
|
+
)
|
|
504
|
+
.filter(
|
|
505
|
+
and_(
|
|
506
|
+
DailyMetrics.developer_id == dev_id,
|
|
507
|
+
DailyMetrics.project_key == project_key,
|
|
508
|
+
DailyMetrics.date >= start_date,
|
|
509
|
+
DailyMetrics.date <= end_date,
|
|
510
|
+
)
|
|
511
|
+
)
|
|
512
|
+
.group_by("week")
|
|
513
|
+
.order_by("week")
|
|
514
|
+
.all()
|
|
515
|
+
)
|
|
516
|
+
|
|
517
|
+
return [
|
|
518
|
+
{
|
|
519
|
+
"week": result.week,
|
|
520
|
+
"total_commits": result.total_commits or 0,
|
|
521
|
+
"feature_commits": result.feature_commits or 0,
|
|
522
|
+
"bug_fix_commits": result.bug_fix_commits or 0,
|
|
523
|
+
"refactor_commits": result.refactor_commits or 0,
|
|
524
|
+
}
|
|
525
|
+
for result in results
|
|
526
|
+
]
|