gitflow-analytics 1.0.1__py3-none-any.whl → 1.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gitflow_analytics/__init__.py +11 -11
- gitflow_analytics/_version.py +2 -2
- gitflow_analytics/cli.py +612 -258
- gitflow_analytics/cli_rich.py +353 -0
- gitflow_analytics/config.py +251 -141
- gitflow_analytics/core/analyzer.py +140 -103
- gitflow_analytics/core/branch_mapper.py +132 -132
- gitflow_analytics/core/cache.py +240 -169
- gitflow_analytics/core/identity.py +210 -173
- gitflow_analytics/extractors/base.py +13 -11
- gitflow_analytics/extractors/story_points.py +70 -59
- gitflow_analytics/extractors/tickets.py +101 -87
- gitflow_analytics/integrations/github_integration.py +84 -77
- gitflow_analytics/integrations/jira_integration.py +116 -104
- gitflow_analytics/integrations/orchestrator.py +86 -85
- gitflow_analytics/metrics/dora.py +181 -177
- gitflow_analytics/models/database.py +190 -53
- gitflow_analytics/qualitative/__init__.py +30 -0
- gitflow_analytics/qualitative/classifiers/__init__.py +13 -0
- gitflow_analytics/qualitative/classifiers/change_type.py +468 -0
- gitflow_analytics/qualitative/classifiers/domain_classifier.py +399 -0
- gitflow_analytics/qualitative/classifiers/intent_analyzer.py +436 -0
- gitflow_analytics/qualitative/classifiers/risk_analyzer.py +412 -0
- gitflow_analytics/qualitative/core/__init__.py +13 -0
- gitflow_analytics/qualitative/core/llm_fallback.py +653 -0
- gitflow_analytics/qualitative/core/nlp_engine.py +373 -0
- gitflow_analytics/qualitative/core/pattern_cache.py +457 -0
- gitflow_analytics/qualitative/core/processor.py +540 -0
- gitflow_analytics/qualitative/models/__init__.py +25 -0
- gitflow_analytics/qualitative/models/schemas.py +272 -0
- gitflow_analytics/qualitative/utils/__init__.py +13 -0
- gitflow_analytics/qualitative/utils/batch_processor.py +326 -0
- gitflow_analytics/qualitative/utils/cost_tracker.py +343 -0
- gitflow_analytics/qualitative/utils/metrics.py +347 -0
- gitflow_analytics/qualitative/utils/text_processing.py +243 -0
- gitflow_analytics/reports/analytics_writer.py +11 -4
- gitflow_analytics/reports/csv_writer.py +51 -31
- gitflow_analytics/reports/narrative_writer.py +16 -14
- gitflow_analytics/tui/__init__.py +5 -0
- gitflow_analytics/tui/app.py +721 -0
- gitflow_analytics/tui/screens/__init__.py +8 -0
- gitflow_analytics/tui/screens/analysis_progress_screen.py +487 -0
- gitflow_analytics/tui/screens/configuration_screen.py +547 -0
- gitflow_analytics/tui/screens/loading_screen.py +358 -0
- gitflow_analytics/tui/screens/main_screen.py +304 -0
- gitflow_analytics/tui/screens/results_screen.py +698 -0
- gitflow_analytics/tui/widgets/__init__.py +7 -0
- gitflow_analytics/tui/widgets/data_table.py +257 -0
- gitflow_analytics/tui/widgets/export_modal.py +301 -0
- gitflow_analytics/tui/widgets/progress_widget.py +192 -0
- {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/METADATA +31 -4
- gitflow_analytics-1.0.3.dist-info/RECORD +62 -0
- gitflow_analytics-1.0.1.dist-info/RECORD +0 -31
- {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/WHEEL +0 -0
- {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/entry_points.txt +0 -0
- {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/licenses/LICENSE +0 -0
- {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/top_level.txt +0 -0
gitflow_analytics/core/cache.py
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
"""Caching layer for Git analysis with SQLite backend."""
|
|
2
|
+
|
|
2
3
|
from contextlib import contextmanager
|
|
3
4
|
from datetime import datetime, timedelta
|
|
4
5
|
from pathlib import Path
|
|
5
|
-
from typing import Any,
|
|
6
|
+
from typing import Any, Optional
|
|
6
7
|
|
|
7
8
|
from sqlalchemy import and_
|
|
8
9
|
|
|
@@ -11,15 +12,15 @@ from ..models.database import CachedCommit, Database, IssueCache, PullRequestCac
|
|
|
11
12
|
|
|
12
13
|
class GitAnalysisCache:
|
|
13
14
|
"""Cache for Git analysis results."""
|
|
14
|
-
|
|
15
|
-
def __init__(self, cache_dir: Path, ttl_hours: int = 168):
|
|
15
|
+
|
|
16
|
+
def __init__(self, cache_dir: Path, ttl_hours: int = 168) -> None:
|
|
16
17
|
"""Initialize cache with SQLite backend."""
|
|
17
18
|
self.cache_dir = cache_dir
|
|
18
19
|
self.ttl_hours = ttl_hours
|
|
19
|
-
self.db = Database(cache_dir /
|
|
20
|
-
|
|
20
|
+
self.db = Database(cache_dir / "gitflow_cache.db")
|
|
21
|
+
|
|
21
22
|
@contextmanager
|
|
22
|
-
def get_session(self):
|
|
23
|
+
def get_session(self) -> Any:
|
|
23
24
|
"""Get database session context manager."""
|
|
24
25
|
session = self.db.get_session()
|
|
25
26
|
try:
|
|
@@ -30,33 +31,40 @@ class GitAnalysisCache:
|
|
|
30
31
|
raise
|
|
31
32
|
finally:
|
|
32
33
|
session.close()
|
|
33
|
-
|
|
34
|
-
def get_cached_commit(self, repo_path: str, commit_hash: str) -> Optional[
|
|
34
|
+
|
|
35
|
+
def get_cached_commit(self, repo_path: str, commit_hash: str) -> Optional[dict[str, Any]]:
|
|
35
36
|
"""Retrieve cached commit data if not stale."""
|
|
36
37
|
with self.get_session() as session:
|
|
37
|
-
cached =
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
38
|
+
cached = (
|
|
39
|
+
session.query(CachedCommit)
|
|
40
|
+
.filter(
|
|
41
|
+
and_(
|
|
42
|
+
CachedCommit.repo_path == repo_path, CachedCommit.commit_hash == commit_hash
|
|
43
|
+
)
|
|
41
44
|
)
|
|
42
|
-
|
|
43
|
-
|
|
45
|
+
.first()
|
|
46
|
+
)
|
|
47
|
+
|
|
44
48
|
if cached and not self._is_stale(cached.cached_at):
|
|
45
49
|
return self._commit_to_dict(cached)
|
|
46
|
-
|
|
50
|
+
|
|
47
51
|
return None
|
|
48
|
-
|
|
49
|
-
def cache_commit(self, repo_path: str, commit_data:
|
|
52
|
+
|
|
53
|
+
def cache_commit(self, repo_path: str, commit_data: dict[str, Any]) -> None:
|
|
50
54
|
"""Cache commit analysis results."""
|
|
51
55
|
with self.get_session() as session:
|
|
52
56
|
# Check if already exists
|
|
53
|
-
existing =
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
+
existing = (
|
|
58
|
+
session.query(CachedCommit)
|
|
59
|
+
.filter(
|
|
60
|
+
and_(
|
|
61
|
+
CachedCommit.repo_path == repo_path,
|
|
62
|
+
CachedCommit.commit_hash == commit_data["hash"],
|
|
63
|
+
)
|
|
57
64
|
)
|
|
58
|
-
|
|
59
|
-
|
|
65
|
+
.first()
|
|
66
|
+
)
|
|
67
|
+
|
|
60
68
|
if existing:
|
|
61
69
|
# Update existing
|
|
62
70
|
for key, value in commit_data.items():
|
|
@@ -67,207 +75,270 @@ class GitAnalysisCache:
|
|
|
67
75
|
# Create new
|
|
68
76
|
cached_commit = CachedCommit(
|
|
69
77
|
repo_path=repo_path,
|
|
70
|
-
commit_hash=commit_data[
|
|
71
|
-
author_name=commit_data.get(
|
|
72
|
-
author_email=commit_data.get(
|
|
73
|
-
message=commit_data.get(
|
|
74
|
-
timestamp=commit_data.get(
|
|
75
|
-
branch=commit_data.get(
|
|
76
|
-
is_merge=commit_data.get(
|
|
77
|
-
files_changed=commit_data.get(
|
|
78
|
-
insertions=commit_data.get(
|
|
79
|
-
deletions=commit_data.get(
|
|
80
|
-
complexity_delta=commit_data.get(
|
|
81
|
-
story_points=commit_data.get(
|
|
82
|
-
ticket_references=commit_data.get(
|
|
78
|
+
commit_hash=commit_data["hash"],
|
|
79
|
+
author_name=commit_data.get("author_name"),
|
|
80
|
+
author_email=commit_data.get("author_email"),
|
|
81
|
+
message=commit_data.get("message"),
|
|
82
|
+
timestamp=commit_data.get("timestamp"),
|
|
83
|
+
branch=commit_data.get("branch"),
|
|
84
|
+
is_merge=commit_data.get("is_merge", False),
|
|
85
|
+
files_changed=commit_data.get("files_changed", 0),
|
|
86
|
+
insertions=commit_data.get("insertions", 0),
|
|
87
|
+
deletions=commit_data.get("deletions", 0),
|
|
88
|
+
complexity_delta=commit_data.get("complexity_delta", 0.0),
|
|
89
|
+
story_points=commit_data.get("story_points"),
|
|
90
|
+
ticket_references=commit_data.get("ticket_references", []),
|
|
83
91
|
)
|
|
84
92
|
session.add(cached_commit)
|
|
85
|
-
|
|
86
|
-
def cache_commits_batch(self, repo_path: str, commits:
|
|
93
|
+
|
|
94
|
+
def cache_commits_batch(self, repo_path: str, commits: list[dict[str, Any]]) -> None:
|
|
87
95
|
"""Cache multiple commits in a single transaction."""
|
|
88
96
|
with self.get_session() as session:
|
|
89
97
|
for commit_data in commits:
|
|
90
98
|
# Check if already exists
|
|
91
|
-
existing =
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
99
|
+
existing = (
|
|
100
|
+
session.query(CachedCommit)
|
|
101
|
+
.filter(
|
|
102
|
+
and_(
|
|
103
|
+
CachedCommit.repo_path == repo_path,
|
|
104
|
+
CachedCommit.commit_hash == commit_data["hash"],
|
|
105
|
+
)
|
|
95
106
|
)
|
|
96
|
-
|
|
97
|
-
|
|
107
|
+
.first()
|
|
108
|
+
)
|
|
109
|
+
|
|
98
110
|
if existing:
|
|
99
111
|
# Update existing
|
|
100
112
|
for key, value in commit_data.items():
|
|
101
|
-
if key !=
|
|
113
|
+
if key != "hash" and hasattr(existing, key):
|
|
102
114
|
setattr(existing, key, value)
|
|
103
115
|
existing.cached_at = datetime.utcnow()
|
|
104
116
|
else:
|
|
105
117
|
# Create new
|
|
106
118
|
cached_commit = CachedCommit(
|
|
107
119
|
repo_path=repo_path,
|
|
108
|
-
commit_hash=commit_data[
|
|
109
|
-
author_name=commit_data.get(
|
|
110
|
-
author_email=commit_data.get(
|
|
111
|
-
message=commit_data.get(
|
|
112
|
-
timestamp=commit_data.get(
|
|
113
|
-
branch=commit_data.get(
|
|
114
|
-
is_merge=commit_data.get(
|
|
115
|
-
files_changed=commit_data.get(
|
|
116
|
-
insertions=commit_data.get(
|
|
117
|
-
deletions=commit_data.get(
|
|
118
|
-
complexity_delta=commit_data.get(
|
|
119
|
-
story_points=commit_data.get(
|
|
120
|
-
ticket_references=commit_data.get(
|
|
120
|
+
commit_hash=commit_data["hash"],
|
|
121
|
+
author_name=commit_data.get("author_name"),
|
|
122
|
+
author_email=commit_data.get("author_email"),
|
|
123
|
+
message=commit_data.get("message"),
|
|
124
|
+
timestamp=commit_data.get("timestamp"),
|
|
125
|
+
branch=commit_data.get("branch"),
|
|
126
|
+
is_merge=commit_data.get("is_merge", False),
|
|
127
|
+
files_changed=commit_data.get("files_changed", 0),
|
|
128
|
+
insertions=commit_data.get("insertions", 0),
|
|
129
|
+
deletions=commit_data.get("deletions", 0),
|
|
130
|
+
complexity_delta=commit_data.get("complexity_delta", 0.0),
|
|
131
|
+
story_points=commit_data.get("story_points"),
|
|
132
|
+
ticket_references=commit_data.get("ticket_references", []),
|
|
121
133
|
)
|
|
122
134
|
session.add(cached_commit)
|
|
123
|
-
|
|
124
|
-
def get_cached_pr(self, repo_path: str, pr_number: int) -> Optional[
|
|
135
|
+
|
|
136
|
+
def get_cached_pr(self, repo_path: str, pr_number: int) -> Optional[dict[str, Any]]:
|
|
125
137
|
"""Retrieve cached pull request data."""
|
|
126
138
|
with self.get_session() as session:
|
|
127
|
-
cached =
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
139
|
+
cached = (
|
|
140
|
+
session.query(PullRequestCache)
|
|
141
|
+
.filter(
|
|
142
|
+
and_(
|
|
143
|
+
PullRequestCache.repo_path == repo_path,
|
|
144
|
+
PullRequestCache.pr_number == pr_number,
|
|
145
|
+
)
|
|
131
146
|
)
|
|
132
|
-
|
|
133
|
-
|
|
147
|
+
.first()
|
|
148
|
+
)
|
|
149
|
+
|
|
134
150
|
if cached and not self._is_stale(cached.cached_at):
|
|
135
151
|
return self._pr_to_dict(cached)
|
|
136
|
-
|
|
152
|
+
|
|
137
153
|
return None
|
|
138
|
-
|
|
139
|
-
def cache_pr(self, repo_path: str, pr_data:
|
|
154
|
+
|
|
155
|
+
def cache_pr(self, repo_path: str, pr_data: dict[str, Any]) -> None:
|
|
140
156
|
"""Cache pull request data."""
|
|
141
157
|
with self.get_session() as session:
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
commit_hashes=pr_data.get('commit_hashes', [])
|
|
158
|
+
# Check if already exists
|
|
159
|
+
existing = (
|
|
160
|
+
session.query(PullRequestCache)
|
|
161
|
+
.filter(
|
|
162
|
+
and_(
|
|
163
|
+
PullRequestCache.repo_path == repo_path,
|
|
164
|
+
PullRequestCache.pr_number == pr_data["number"],
|
|
165
|
+
)
|
|
166
|
+
)
|
|
167
|
+
.first()
|
|
153
168
|
)
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
169
|
+
|
|
170
|
+
if existing:
|
|
171
|
+
# Update existing
|
|
172
|
+
existing.title = pr_data.get("title")
|
|
173
|
+
existing.description = pr_data.get("description")
|
|
174
|
+
existing.author = pr_data.get("author")
|
|
175
|
+
existing.created_at = pr_data.get("created_at")
|
|
176
|
+
existing.merged_at = pr_data.get("merged_at")
|
|
177
|
+
existing.story_points = pr_data.get("story_points")
|
|
178
|
+
existing.labels = pr_data.get("labels", [])
|
|
179
|
+
existing.commit_hashes = pr_data.get("commit_hashes", [])
|
|
180
|
+
existing.cached_at = datetime.utcnow()
|
|
181
|
+
else:
|
|
182
|
+
# Create new
|
|
183
|
+
cached_pr = PullRequestCache(
|
|
184
|
+
repo_path=repo_path,
|
|
185
|
+
pr_number=pr_data["number"],
|
|
186
|
+
title=pr_data.get("title"),
|
|
187
|
+
description=pr_data.get("description"),
|
|
188
|
+
author=pr_data.get("author"),
|
|
189
|
+
created_at=pr_data.get("created_at"),
|
|
190
|
+
merged_at=pr_data.get("merged_at"),
|
|
191
|
+
story_points=pr_data.get("story_points"),
|
|
192
|
+
labels=pr_data.get("labels", []),
|
|
193
|
+
commit_hashes=pr_data.get("commit_hashes", []),
|
|
194
|
+
)
|
|
195
|
+
session.add(cached_pr)
|
|
196
|
+
|
|
197
|
+
def cache_issue(self, platform: str, issue_data: dict[str, Any]) -> None:
|
|
157
198
|
"""Cache issue data from various platforms."""
|
|
158
199
|
with self.get_session() as session:
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
resolved_at=issue_data.get('resolved_at'),
|
|
170
|
-
story_points=issue_data.get('story_points'),
|
|
171
|
-
labels=issue_data.get('labels', []),
|
|
172
|
-
platform_data=issue_data.get('platform_data', {})
|
|
200
|
+
# Check if already exists
|
|
201
|
+
existing = (
|
|
202
|
+
session.query(IssueCache)
|
|
203
|
+
.filter(
|
|
204
|
+
and_(
|
|
205
|
+
IssueCache.platform == platform,
|
|
206
|
+
IssueCache.issue_id == str(issue_data["id"]),
|
|
207
|
+
)
|
|
208
|
+
)
|
|
209
|
+
.first()
|
|
173
210
|
)
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
211
|
+
|
|
212
|
+
if existing:
|
|
213
|
+
# Update existing
|
|
214
|
+
existing.project_key = issue_data["project_key"]
|
|
215
|
+
existing.title = issue_data.get("title")
|
|
216
|
+
existing.description = issue_data.get("description")
|
|
217
|
+
existing.status = issue_data.get("status")
|
|
218
|
+
existing.assignee = issue_data.get("assignee")
|
|
219
|
+
existing.created_at = issue_data.get("created_at")
|
|
220
|
+
existing.updated_at = issue_data.get("updated_at")
|
|
221
|
+
existing.resolved_at = issue_data.get("resolved_at")
|
|
222
|
+
existing.story_points = issue_data.get("story_points")
|
|
223
|
+
existing.labels = issue_data.get("labels", [])
|
|
224
|
+
existing.platform_data = issue_data.get("platform_data", {})
|
|
225
|
+
existing.cached_at = datetime.utcnow()
|
|
226
|
+
else:
|
|
227
|
+
# Create new
|
|
228
|
+
cached_issue = IssueCache(
|
|
229
|
+
platform=platform,
|
|
230
|
+
issue_id=str(issue_data["id"]),
|
|
231
|
+
project_key=issue_data["project_key"],
|
|
232
|
+
title=issue_data.get("title"),
|
|
233
|
+
description=issue_data.get("description"),
|
|
234
|
+
status=issue_data.get("status"),
|
|
235
|
+
assignee=issue_data.get("assignee"),
|
|
236
|
+
created_at=issue_data.get("created_at"),
|
|
237
|
+
updated_at=issue_data.get("updated_at"),
|
|
238
|
+
resolved_at=issue_data.get("resolved_at"),
|
|
239
|
+
story_points=issue_data.get("story_points"),
|
|
240
|
+
labels=issue_data.get("labels", []),
|
|
241
|
+
platform_data=issue_data.get("platform_data", {}),
|
|
242
|
+
)
|
|
243
|
+
session.add(cached_issue)
|
|
244
|
+
|
|
245
|
+
def get_cached_issues(self, platform: str, project_key: str) -> list[dict[str, Any]]:
|
|
177
246
|
"""Get all cached issues for a platform and project."""
|
|
178
247
|
with self.get_session() as session:
|
|
179
|
-
issues =
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
IssueCache.project_key == project_key
|
|
248
|
+
issues = (
|
|
249
|
+
session.query(IssueCache)
|
|
250
|
+
.filter(
|
|
251
|
+
and_(IssueCache.platform == platform, IssueCache.project_key == project_key)
|
|
183
252
|
)
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
253
|
+
.all()
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
return [
|
|
257
|
+
self._issue_to_dict(issue)
|
|
258
|
+
for issue in issues
|
|
259
|
+
if not self._is_stale(issue.cached_at)
|
|
260
|
+
]
|
|
261
|
+
|
|
262
|
+
def clear_stale_cache(self) -> None:
|
|
190
263
|
"""Remove stale cache entries."""
|
|
191
264
|
cutoff_time = datetime.utcnow() - timedelta(hours=self.ttl_hours)
|
|
192
|
-
|
|
265
|
+
|
|
193
266
|
with self.get_session() as session:
|
|
194
|
-
session.query(CachedCommit).filter(
|
|
195
|
-
|
|
196
|
-
).delete()
|
|
197
|
-
|
|
267
|
+
session.query(CachedCommit).filter(CachedCommit.cached_at < cutoff_time).delete()
|
|
268
|
+
|
|
198
269
|
session.query(PullRequestCache).filter(
|
|
199
270
|
PullRequestCache.cached_at < cutoff_time
|
|
200
271
|
).delete()
|
|
201
|
-
|
|
202
|
-
session.query(IssueCache).filter(
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
def get_cache_stats(self) -> Dict[str, int]:
|
|
272
|
+
|
|
273
|
+
session.query(IssueCache).filter(IssueCache.cached_at < cutoff_time).delete()
|
|
274
|
+
|
|
275
|
+
def get_cache_stats(self) -> dict[str, int]:
|
|
207
276
|
"""Get cache statistics."""
|
|
208
277
|
with self.get_session() as session:
|
|
209
278
|
stats = {
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
279
|
+
"cached_commits": session.query(CachedCommit).count(),
|
|
280
|
+
"cached_prs": session.query(PullRequestCache).count(),
|
|
281
|
+
"cached_issues": session.query(IssueCache).count(),
|
|
282
|
+
"stale_commits": session.query(CachedCommit)
|
|
283
|
+
.filter(
|
|
214
284
|
CachedCommit.cached_at < datetime.utcnow() - timedelta(hours=self.ttl_hours)
|
|
215
|
-
)
|
|
285
|
+
)
|
|
286
|
+
.count(),
|
|
216
287
|
}
|
|
217
288
|
return stats
|
|
218
|
-
|
|
289
|
+
|
|
219
290
|
def _is_stale(self, cached_at: datetime) -> bool:
|
|
220
291
|
"""Check if cache entry is stale."""
|
|
221
292
|
if self.ttl_hours == 0: # No expiration
|
|
222
293
|
return False
|
|
223
294
|
return cached_at < datetime.utcnow() - timedelta(hours=self.ttl_hours)
|
|
224
|
-
|
|
225
|
-
def _commit_to_dict(self, commit: CachedCommit) ->
|
|
295
|
+
|
|
296
|
+
def _commit_to_dict(self, commit: CachedCommit) -> dict[str, Any]:
|
|
226
297
|
"""Convert CachedCommit to dictionary."""
|
|
227
298
|
return {
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
299
|
+
"hash": commit.commit_hash,
|
|
300
|
+
"author_name": commit.author_name,
|
|
301
|
+
"author_email": commit.author_email,
|
|
302
|
+
"message": commit.message,
|
|
303
|
+
"timestamp": commit.timestamp,
|
|
304
|
+
"branch": commit.branch,
|
|
305
|
+
"is_merge": commit.is_merge,
|
|
306
|
+
"files_changed": commit.files_changed,
|
|
307
|
+
"insertions": commit.insertions,
|
|
308
|
+
"deletions": commit.deletions,
|
|
309
|
+
"complexity_delta": commit.complexity_delta,
|
|
310
|
+
"story_points": commit.story_points,
|
|
311
|
+
"ticket_references": commit.ticket_references or [],
|
|
241
312
|
}
|
|
242
|
-
|
|
243
|
-
def _pr_to_dict(self, pr: PullRequestCache) ->
|
|
313
|
+
|
|
314
|
+
def _pr_to_dict(self, pr: PullRequestCache) -> dict[str, Any]:
|
|
244
315
|
"""Convert PullRequestCache to dictionary."""
|
|
245
316
|
return {
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
317
|
+
"number": pr.pr_number,
|
|
318
|
+
"title": pr.title,
|
|
319
|
+
"description": pr.description,
|
|
320
|
+
"author": pr.author,
|
|
321
|
+
"created_at": pr.created_at,
|
|
322
|
+
"merged_at": pr.merged_at,
|
|
323
|
+
"story_points": pr.story_points,
|
|
324
|
+
"labels": pr.labels or [],
|
|
325
|
+
"commit_hashes": pr.commit_hashes or [],
|
|
255
326
|
}
|
|
256
|
-
|
|
257
|
-
def _issue_to_dict(self, issue: IssueCache) ->
|
|
327
|
+
|
|
328
|
+
def _issue_to_dict(self, issue: IssueCache) -> dict[str, Any]:
|
|
258
329
|
"""Convert IssueCache to dictionary."""
|
|
259
330
|
return {
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
}
|
|
331
|
+
"platform": issue.platform,
|
|
332
|
+
"id": issue.issue_id,
|
|
333
|
+
"project_key": issue.project_key,
|
|
334
|
+
"title": issue.title,
|
|
335
|
+
"description": issue.description,
|
|
336
|
+
"status": issue.status,
|
|
337
|
+
"assignee": issue.assignee,
|
|
338
|
+
"created_at": issue.created_at,
|
|
339
|
+
"updated_at": issue.updated_at,
|
|
340
|
+
"resolved_at": issue.resolved_at,
|
|
341
|
+
"story_points": issue.story_points,
|
|
342
|
+
"labels": issue.labels or [],
|
|
343
|
+
"platform_data": issue.platform_data or {},
|
|
344
|
+
}
|