gitflow-analytics 1.0.0__py3-none-any.whl → 1.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gitflow_analytics/__init__.py +11 -9
- gitflow_analytics/_version.py +2 -2
- gitflow_analytics/cli.py +691 -243
- gitflow_analytics/cli_rich.py +353 -0
- gitflow_analytics/config.py +389 -96
- gitflow_analytics/core/analyzer.py +175 -78
- gitflow_analytics/core/branch_mapper.py +132 -132
- gitflow_analytics/core/cache.py +242 -173
- gitflow_analytics/core/identity.py +214 -178
- gitflow_analytics/extractors/base.py +13 -11
- gitflow_analytics/extractors/story_points.py +70 -59
- gitflow_analytics/extractors/tickets.py +111 -88
- gitflow_analytics/integrations/github_integration.py +91 -77
- gitflow_analytics/integrations/jira_integration.py +284 -0
- gitflow_analytics/integrations/orchestrator.py +99 -72
- gitflow_analytics/metrics/dora.py +183 -179
- gitflow_analytics/models/database.py +191 -54
- gitflow_analytics/qualitative/__init__.py +30 -0
- gitflow_analytics/qualitative/classifiers/__init__.py +13 -0
- gitflow_analytics/qualitative/classifiers/change_type.py +468 -0
- gitflow_analytics/qualitative/classifiers/domain_classifier.py +399 -0
- gitflow_analytics/qualitative/classifiers/intent_analyzer.py +436 -0
- gitflow_analytics/qualitative/classifiers/risk_analyzer.py +412 -0
- gitflow_analytics/qualitative/core/__init__.py +13 -0
- gitflow_analytics/qualitative/core/llm_fallback.py +653 -0
- gitflow_analytics/qualitative/core/nlp_engine.py +373 -0
- gitflow_analytics/qualitative/core/pattern_cache.py +457 -0
- gitflow_analytics/qualitative/core/processor.py +540 -0
- gitflow_analytics/qualitative/models/__init__.py +25 -0
- gitflow_analytics/qualitative/models/schemas.py +272 -0
- gitflow_analytics/qualitative/utils/__init__.py +13 -0
- gitflow_analytics/qualitative/utils/batch_processor.py +326 -0
- gitflow_analytics/qualitative/utils/cost_tracker.py +343 -0
- gitflow_analytics/qualitative/utils/metrics.py +347 -0
- gitflow_analytics/qualitative/utils/text_processing.py +243 -0
- gitflow_analytics/reports/analytics_writer.py +25 -8
- gitflow_analytics/reports/csv_writer.py +60 -32
- gitflow_analytics/reports/narrative_writer.py +21 -15
- gitflow_analytics/tui/__init__.py +5 -0
- gitflow_analytics/tui/app.py +721 -0
- gitflow_analytics/tui/screens/__init__.py +8 -0
- gitflow_analytics/tui/screens/analysis_progress_screen.py +487 -0
- gitflow_analytics/tui/screens/configuration_screen.py +547 -0
- gitflow_analytics/tui/screens/loading_screen.py +358 -0
- gitflow_analytics/tui/screens/main_screen.py +304 -0
- gitflow_analytics/tui/screens/results_screen.py +698 -0
- gitflow_analytics/tui/widgets/__init__.py +7 -0
- gitflow_analytics/tui/widgets/data_table.py +257 -0
- gitflow_analytics/tui/widgets/export_modal.py +301 -0
- gitflow_analytics/tui/widgets/progress_widget.py +192 -0
- gitflow_analytics-1.0.3.dist-info/METADATA +490 -0
- gitflow_analytics-1.0.3.dist-info/RECORD +62 -0
- gitflow_analytics-1.0.0.dist-info/METADATA +0 -201
- gitflow_analytics-1.0.0.dist-info/RECORD +0 -30
- {gitflow_analytics-1.0.0.dist-info → gitflow_analytics-1.0.3.dist-info}/WHEEL +0 -0
- {gitflow_analytics-1.0.0.dist-info → gitflow_analytics-1.0.3.dist-info}/entry_points.txt +0 -0
- {gitflow_analytics-1.0.0.dist-info → gitflow_analytics-1.0.3.dist-info}/licenses/LICENSE +0 -0
- {gitflow_analytics-1.0.0.dist-info → gitflow_analytics-1.0.3.dist-info}/top_level.txt +0 -0
gitflow_analytics/core/cache.py
CHANGED
|
@@ -1,27 +1,26 @@
|
|
|
1
1
|
"""Caching layer for Git analysis with SQLite backend."""
|
|
2
|
-
|
|
2
|
+
|
|
3
|
+
from contextlib import contextmanager
|
|
3
4
|
from datetime import datetime, timedelta
|
|
4
|
-
from typing import List, Optional, Dict, Any
|
|
5
5
|
from pathlib import Path
|
|
6
|
-
from
|
|
6
|
+
from typing import Any, Optional
|
|
7
7
|
|
|
8
|
-
from sqlalchemy.orm import Session
|
|
9
8
|
from sqlalchemy import and_
|
|
10
9
|
|
|
11
|
-
from ..models.database import Database,
|
|
10
|
+
from ..models.database import CachedCommit, Database, IssueCache, PullRequestCache
|
|
12
11
|
|
|
13
12
|
|
|
14
13
|
class GitAnalysisCache:
|
|
15
14
|
"""Cache for Git analysis results."""
|
|
16
|
-
|
|
17
|
-
def __init__(self, cache_dir: Path, ttl_hours: int = 168):
|
|
15
|
+
|
|
16
|
+
def __init__(self, cache_dir: Path, ttl_hours: int = 168) -> None:
|
|
18
17
|
"""Initialize cache with SQLite backend."""
|
|
19
18
|
self.cache_dir = cache_dir
|
|
20
19
|
self.ttl_hours = ttl_hours
|
|
21
|
-
self.db = Database(cache_dir /
|
|
22
|
-
|
|
20
|
+
self.db = Database(cache_dir / "gitflow_cache.db")
|
|
21
|
+
|
|
23
22
|
@contextmanager
|
|
24
|
-
def get_session(self):
|
|
23
|
+
def get_session(self) -> Any:
|
|
25
24
|
"""Get database session context manager."""
|
|
26
25
|
session = self.db.get_session()
|
|
27
26
|
try:
|
|
@@ -32,33 +31,40 @@ class GitAnalysisCache:
|
|
|
32
31
|
raise
|
|
33
32
|
finally:
|
|
34
33
|
session.close()
|
|
35
|
-
|
|
36
|
-
def get_cached_commit(self, repo_path: str, commit_hash: str) -> Optional[
|
|
34
|
+
|
|
35
|
+
def get_cached_commit(self, repo_path: str, commit_hash: str) -> Optional[dict[str, Any]]:
|
|
37
36
|
"""Retrieve cached commit data if not stale."""
|
|
38
37
|
with self.get_session() as session:
|
|
39
|
-
cached =
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
38
|
+
cached = (
|
|
39
|
+
session.query(CachedCommit)
|
|
40
|
+
.filter(
|
|
41
|
+
and_(
|
|
42
|
+
CachedCommit.repo_path == repo_path, CachedCommit.commit_hash == commit_hash
|
|
43
|
+
)
|
|
43
44
|
)
|
|
44
|
-
|
|
45
|
-
|
|
45
|
+
.first()
|
|
46
|
+
)
|
|
47
|
+
|
|
46
48
|
if cached and not self._is_stale(cached.cached_at):
|
|
47
49
|
return self._commit_to_dict(cached)
|
|
48
|
-
|
|
50
|
+
|
|
49
51
|
return None
|
|
50
|
-
|
|
51
|
-
def cache_commit(self, repo_path: str, commit_data:
|
|
52
|
+
|
|
53
|
+
def cache_commit(self, repo_path: str, commit_data: dict[str, Any]) -> None:
|
|
52
54
|
"""Cache commit analysis results."""
|
|
53
55
|
with self.get_session() as session:
|
|
54
56
|
# Check if already exists
|
|
55
|
-
existing =
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
57
|
+
existing = (
|
|
58
|
+
session.query(CachedCommit)
|
|
59
|
+
.filter(
|
|
60
|
+
and_(
|
|
61
|
+
CachedCommit.repo_path == repo_path,
|
|
62
|
+
CachedCommit.commit_hash == commit_data["hash"],
|
|
63
|
+
)
|
|
59
64
|
)
|
|
60
|
-
|
|
61
|
-
|
|
65
|
+
.first()
|
|
66
|
+
)
|
|
67
|
+
|
|
62
68
|
if existing:
|
|
63
69
|
# Update existing
|
|
64
70
|
for key, value in commit_data.items():
|
|
@@ -69,207 +75,270 @@ class GitAnalysisCache:
|
|
|
69
75
|
# Create new
|
|
70
76
|
cached_commit = CachedCommit(
|
|
71
77
|
repo_path=repo_path,
|
|
72
|
-
commit_hash=commit_data[
|
|
73
|
-
author_name=commit_data.get(
|
|
74
|
-
author_email=commit_data.get(
|
|
75
|
-
message=commit_data.get(
|
|
76
|
-
timestamp=commit_data.get(
|
|
77
|
-
branch=commit_data.get(
|
|
78
|
-
is_merge=commit_data.get(
|
|
79
|
-
files_changed=commit_data.get(
|
|
80
|
-
insertions=commit_data.get(
|
|
81
|
-
deletions=commit_data.get(
|
|
82
|
-
complexity_delta=commit_data.get(
|
|
83
|
-
story_points=commit_data.get(
|
|
84
|
-
ticket_references=commit_data.get(
|
|
78
|
+
commit_hash=commit_data["hash"],
|
|
79
|
+
author_name=commit_data.get("author_name"),
|
|
80
|
+
author_email=commit_data.get("author_email"),
|
|
81
|
+
message=commit_data.get("message"),
|
|
82
|
+
timestamp=commit_data.get("timestamp"),
|
|
83
|
+
branch=commit_data.get("branch"),
|
|
84
|
+
is_merge=commit_data.get("is_merge", False),
|
|
85
|
+
files_changed=commit_data.get("files_changed", 0),
|
|
86
|
+
insertions=commit_data.get("insertions", 0),
|
|
87
|
+
deletions=commit_data.get("deletions", 0),
|
|
88
|
+
complexity_delta=commit_data.get("complexity_delta", 0.0),
|
|
89
|
+
story_points=commit_data.get("story_points"),
|
|
90
|
+
ticket_references=commit_data.get("ticket_references", []),
|
|
85
91
|
)
|
|
86
92
|
session.add(cached_commit)
|
|
87
|
-
|
|
88
|
-
def cache_commits_batch(self, repo_path: str, commits:
|
|
93
|
+
|
|
94
|
+
def cache_commits_batch(self, repo_path: str, commits: list[dict[str, Any]]) -> None:
|
|
89
95
|
"""Cache multiple commits in a single transaction."""
|
|
90
96
|
with self.get_session() as session:
|
|
91
97
|
for commit_data in commits:
|
|
92
98
|
# Check if already exists
|
|
93
|
-
existing =
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
99
|
+
existing = (
|
|
100
|
+
session.query(CachedCommit)
|
|
101
|
+
.filter(
|
|
102
|
+
and_(
|
|
103
|
+
CachedCommit.repo_path == repo_path,
|
|
104
|
+
CachedCommit.commit_hash == commit_data["hash"],
|
|
105
|
+
)
|
|
97
106
|
)
|
|
98
|
-
|
|
99
|
-
|
|
107
|
+
.first()
|
|
108
|
+
)
|
|
109
|
+
|
|
100
110
|
if existing:
|
|
101
111
|
# Update existing
|
|
102
112
|
for key, value in commit_data.items():
|
|
103
|
-
if key !=
|
|
113
|
+
if key != "hash" and hasattr(existing, key):
|
|
104
114
|
setattr(existing, key, value)
|
|
105
115
|
existing.cached_at = datetime.utcnow()
|
|
106
116
|
else:
|
|
107
117
|
# Create new
|
|
108
118
|
cached_commit = CachedCommit(
|
|
109
119
|
repo_path=repo_path,
|
|
110
|
-
commit_hash=commit_data[
|
|
111
|
-
author_name=commit_data.get(
|
|
112
|
-
author_email=commit_data.get(
|
|
113
|
-
message=commit_data.get(
|
|
114
|
-
timestamp=commit_data.get(
|
|
115
|
-
branch=commit_data.get(
|
|
116
|
-
is_merge=commit_data.get(
|
|
117
|
-
files_changed=commit_data.get(
|
|
118
|
-
insertions=commit_data.get(
|
|
119
|
-
deletions=commit_data.get(
|
|
120
|
-
complexity_delta=commit_data.get(
|
|
121
|
-
story_points=commit_data.get(
|
|
122
|
-
ticket_references=commit_data.get(
|
|
120
|
+
commit_hash=commit_data["hash"],
|
|
121
|
+
author_name=commit_data.get("author_name"),
|
|
122
|
+
author_email=commit_data.get("author_email"),
|
|
123
|
+
message=commit_data.get("message"),
|
|
124
|
+
timestamp=commit_data.get("timestamp"),
|
|
125
|
+
branch=commit_data.get("branch"),
|
|
126
|
+
is_merge=commit_data.get("is_merge", False),
|
|
127
|
+
files_changed=commit_data.get("files_changed", 0),
|
|
128
|
+
insertions=commit_data.get("insertions", 0),
|
|
129
|
+
deletions=commit_data.get("deletions", 0),
|
|
130
|
+
complexity_delta=commit_data.get("complexity_delta", 0.0),
|
|
131
|
+
story_points=commit_data.get("story_points"),
|
|
132
|
+
ticket_references=commit_data.get("ticket_references", []),
|
|
123
133
|
)
|
|
124
134
|
session.add(cached_commit)
|
|
125
|
-
|
|
126
|
-
def get_cached_pr(self, repo_path: str, pr_number: int) -> Optional[
|
|
135
|
+
|
|
136
|
+
def get_cached_pr(self, repo_path: str, pr_number: int) -> Optional[dict[str, Any]]:
|
|
127
137
|
"""Retrieve cached pull request data."""
|
|
128
138
|
with self.get_session() as session:
|
|
129
|
-
cached =
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
139
|
+
cached = (
|
|
140
|
+
session.query(PullRequestCache)
|
|
141
|
+
.filter(
|
|
142
|
+
and_(
|
|
143
|
+
PullRequestCache.repo_path == repo_path,
|
|
144
|
+
PullRequestCache.pr_number == pr_number,
|
|
145
|
+
)
|
|
133
146
|
)
|
|
134
|
-
|
|
135
|
-
|
|
147
|
+
.first()
|
|
148
|
+
)
|
|
149
|
+
|
|
136
150
|
if cached and not self._is_stale(cached.cached_at):
|
|
137
151
|
return self._pr_to_dict(cached)
|
|
138
|
-
|
|
152
|
+
|
|
139
153
|
return None
|
|
140
|
-
|
|
141
|
-
def cache_pr(self, repo_path: str, pr_data:
|
|
154
|
+
|
|
155
|
+
def cache_pr(self, repo_path: str, pr_data: dict[str, Any]) -> None:
|
|
142
156
|
"""Cache pull request data."""
|
|
143
157
|
with self.get_session() as session:
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
commit_hashes=pr_data.get('commit_hashes', [])
|
|
158
|
+
# Check if already exists
|
|
159
|
+
existing = (
|
|
160
|
+
session.query(PullRequestCache)
|
|
161
|
+
.filter(
|
|
162
|
+
and_(
|
|
163
|
+
PullRequestCache.repo_path == repo_path,
|
|
164
|
+
PullRequestCache.pr_number == pr_data["number"],
|
|
165
|
+
)
|
|
166
|
+
)
|
|
167
|
+
.first()
|
|
155
168
|
)
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
169
|
+
|
|
170
|
+
if existing:
|
|
171
|
+
# Update existing
|
|
172
|
+
existing.title = pr_data.get("title")
|
|
173
|
+
existing.description = pr_data.get("description")
|
|
174
|
+
existing.author = pr_data.get("author")
|
|
175
|
+
existing.created_at = pr_data.get("created_at")
|
|
176
|
+
existing.merged_at = pr_data.get("merged_at")
|
|
177
|
+
existing.story_points = pr_data.get("story_points")
|
|
178
|
+
existing.labels = pr_data.get("labels", [])
|
|
179
|
+
existing.commit_hashes = pr_data.get("commit_hashes", [])
|
|
180
|
+
existing.cached_at = datetime.utcnow()
|
|
181
|
+
else:
|
|
182
|
+
# Create new
|
|
183
|
+
cached_pr = PullRequestCache(
|
|
184
|
+
repo_path=repo_path,
|
|
185
|
+
pr_number=pr_data["number"],
|
|
186
|
+
title=pr_data.get("title"),
|
|
187
|
+
description=pr_data.get("description"),
|
|
188
|
+
author=pr_data.get("author"),
|
|
189
|
+
created_at=pr_data.get("created_at"),
|
|
190
|
+
merged_at=pr_data.get("merged_at"),
|
|
191
|
+
story_points=pr_data.get("story_points"),
|
|
192
|
+
labels=pr_data.get("labels", []),
|
|
193
|
+
commit_hashes=pr_data.get("commit_hashes", []),
|
|
194
|
+
)
|
|
195
|
+
session.add(cached_pr)
|
|
196
|
+
|
|
197
|
+
def cache_issue(self, platform: str, issue_data: dict[str, Any]) -> None:
|
|
159
198
|
"""Cache issue data from various platforms."""
|
|
160
199
|
with self.get_session() as session:
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
resolved_at=issue_data.get('resolved_at'),
|
|
172
|
-
story_points=issue_data.get('story_points'),
|
|
173
|
-
labels=issue_data.get('labels', []),
|
|
174
|
-
platform_data=issue_data.get('platform_data', {})
|
|
200
|
+
# Check if already exists
|
|
201
|
+
existing = (
|
|
202
|
+
session.query(IssueCache)
|
|
203
|
+
.filter(
|
|
204
|
+
and_(
|
|
205
|
+
IssueCache.platform == platform,
|
|
206
|
+
IssueCache.issue_id == str(issue_data["id"]),
|
|
207
|
+
)
|
|
208
|
+
)
|
|
209
|
+
.first()
|
|
175
210
|
)
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
211
|
+
|
|
212
|
+
if existing:
|
|
213
|
+
# Update existing
|
|
214
|
+
existing.project_key = issue_data["project_key"]
|
|
215
|
+
existing.title = issue_data.get("title")
|
|
216
|
+
existing.description = issue_data.get("description")
|
|
217
|
+
existing.status = issue_data.get("status")
|
|
218
|
+
existing.assignee = issue_data.get("assignee")
|
|
219
|
+
existing.created_at = issue_data.get("created_at")
|
|
220
|
+
existing.updated_at = issue_data.get("updated_at")
|
|
221
|
+
existing.resolved_at = issue_data.get("resolved_at")
|
|
222
|
+
existing.story_points = issue_data.get("story_points")
|
|
223
|
+
existing.labels = issue_data.get("labels", [])
|
|
224
|
+
existing.platform_data = issue_data.get("platform_data", {})
|
|
225
|
+
existing.cached_at = datetime.utcnow()
|
|
226
|
+
else:
|
|
227
|
+
# Create new
|
|
228
|
+
cached_issue = IssueCache(
|
|
229
|
+
platform=platform,
|
|
230
|
+
issue_id=str(issue_data["id"]),
|
|
231
|
+
project_key=issue_data["project_key"],
|
|
232
|
+
title=issue_data.get("title"),
|
|
233
|
+
description=issue_data.get("description"),
|
|
234
|
+
status=issue_data.get("status"),
|
|
235
|
+
assignee=issue_data.get("assignee"),
|
|
236
|
+
created_at=issue_data.get("created_at"),
|
|
237
|
+
updated_at=issue_data.get("updated_at"),
|
|
238
|
+
resolved_at=issue_data.get("resolved_at"),
|
|
239
|
+
story_points=issue_data.get("story_points"),
|
|
240
|
+
labels=issue_data.get("labels", []),
|
|
241
|
+
platform_data=issue_data.get("platform_data", {}),
|
|
242
|
+
)
|
|
243
|
+
session.add(cached_issue)
|
|
244
|
+
|
|
245
|
+
def get_cached_issues(self, platform: str, project_key: str) -> list[dict[str, Any]]:
|
|
179
246
|
"""Get all cached issues for a platform and project."""
|
|
180
247
|
with self.get_session() as session:
|
|
181
|
-
issues =
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
IssueCache.project_key == project_key
|
|
248
|
+
issues = (
|
|
249
|
+
session.query(IssueCache)
|
|
250
|
+
.filter(
|
|
251
|
+
and_(IssueCache.platform == platform, IssueCache.project_key == project_key)
|
|
185
252
|
)
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
253
|
+
.all()
|
|
254
|
+
)
|
|
255
|
+
|
|
256
|
+
return [
|
|
257
|
+
self._issue_to_dict(issue)
|
|
258
|
+
for issue in issues
|
|
259
|
+
if not self._is_stale(issue.cached_at)
|
|
260
|
+
]
|
|
261
|
+
|
|
262
|
+
def clear_stale_cache(self) -> None:
|
|
192
263
|
"""Remove stale cache entries."""
|
|
193
264
|
cutoff_time = datetime.utcnow() - timedelta(hours=self.ttl_hours)
|
|
194
|
-
|
|
265
|
+
|
|
195
266
|
with self.get_session() as session:
|
|
196
|
-
session.query(CachedCommit).filter(
|
|
197
|
-
|
|
198
|
-
).delete()
|
|
199
|
-
|
|
267
|
+
session.query(CachedCommit).filter(CachedCommit.cached_at < cutoff_time).delete()
|
|
268
|
+
|
|
200
269
|
session.query(PullRequestCache).filter(
|
|
201
270
|
PullRequestCache.cached_at < cutoff_time
|
|
202
271
|
).delete()
|
|
203
|
-
|
|
204
|
-
session.query(IssueCache).filter(
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
def get_cache_stats(self) -> Dict[str, int]:
|
|
272
|
+
|
|
273
|
+
session.query(IssueCache).filter(IssueCache.cached_at < cutoff_time).delete()
|
|
274
|
+
|
|
275
|
+
def get_cache_stats(self) -> dict[str, int]:
|
|
209
276
|
"""Get cache statistics."""
|
|
210
277
|
with self.get_session() as session:
|
|
211
278
|
stats = {
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
279
|
+
"cached_commits": session.query(CachedCommit).count(),
|
|
280
|
+
"cached_prs": session.query(PullRequestCache).count(),
|
|
281
|
+
"cached_issues": session.query(IssueCache).count(),
|
|
282
|
+
"stale_commits": session.query(CachedCommit)
|
|
283
|
+
.filter(
|
|
216
284
|
CachedCommit.cached_at < datetime.utcnow() - timedelta(hours=self.ttl_hours)
|
|
217
|
-
)
|
|
285
|
+
)
|
|
286
|
+
.count(),
|
|
218
287
|
}
|
|
219
288
|
return stats
|
|
220
|
-
|
|
289
|
+
|
|
221
290
|
def _is_stale(self, cached_at: datetime) -> bool:
|
|
222
291
|
"""Check if cache entry is stale."""
|
|
223
292
|
if self.ttl_hours == 0: # No expiration
|
|
224
293
|
return False
|
|
225
294
|
return cached_at < datetime.utcnow() - timedelta(hours=self.ttl_hours)
|
|
226
|
-
|
|
227
|
-
def _commit_to_dict(self, commit: CachedCommit) ->
|
|
295
|
+
|
|
296
|
+
def _commit_to_dict(self, commit: CachedCommit) -> dict[str, Any]:
|
|
228
297
|
"""Convert CachedCommit to dictionary."""
|
|
229
298
|
return {
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
299
|
+
"hash": commit.commit_hash,
|
|
300
|
+
"author_name": commit.author_name,
|
|
301
|
+
"author_email": commit.author_email,
|
|
302
|
+
"message": commit.message,
|
|
303
|
+
"timestamp": commit.timestamp,
|
|
304
|
+
"branch": commit.branch,
|
|
305
|
+
"is_merge": commit.is_merge,
|
|
306
|
+
"files_changed": commit.files_changed,
|
|
307
|
+
"insertions": commit.insertions,
|
|
308
|
+
"deletions": commit.deletions,
|
|
309
|
+
"complexity_delta": commit.complexity_delta,
|
|
310
|
+
"story_points": commit.story_points,
|
|
311
|
+
"ticket_references": commit.ticket_references or [],
|
|
243
312
|
}
|
|
244
|
-
|
|
245
|
-
def _pr_to_dict(self, pr: PullRequestCache) ->
|
|
313
|
+
|
|
314
|
+
def _pr_to_dict(self, pr: PullRequestCache) -> dict[str, Any]:
|
|
246
315
|
"""Convert PullRequestCache to dictionary."""
|
|
247
316
|
return {
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
317
|
+
"number": pr.pr_number,
|
|
318
|
+
"title": pr.title,
|
|
319
|
+
"description": pr.description,
|
|
320
|
+
"author": pr.author,
|
|
321
|
+
"created_at": pr.created_at,
|
|
322
|
+
"merged_at": pr.merged_at,
|
|
323
|
+
"story_points": pr.story_points,
|
|
324
|
+
"labels": pr.labels or [],
|
|
325
|
+
"commit_hashes": pr.commit_hashes or [],
|
|
257
326
|
}
|
|
258
|
-
|
|
259
|
-
def _issue_to_dict(self, issue: IssueCache) ->
|
|
327
|
+
|
|
328
|
+
def _issue_to_dict(self, issue: IssueCache) -> dict[str, Any]:
|
|
260
329
|
"""Convert IssueCache to dictionary."""
|
|
261
330
|
return {
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
}
|
|
331
|
+
"platform": issue.platform,
|
|
332
|
+
"id": issue.issue_id,
|
|
333
|
+
"project_key": issue.project_key,
|
|
334
|
+
"title": issue.title,
|
|
335
|
+
"description": issue.description,
|
|
336
|
+
"status": issue.status,
|
|
337
|
+
"assignee": issue.assignee,
|
|
338
|
+
"created_at": issue.created_at,
|
|
339
|
+
"updated_at": issue.updated_at,
|
|
340
|
+
"resolved_at": issue.resolved_at,
|
|
341
|
+
"story_points": issue.story_points,
|
|
342
|
+
"labels": issue.labels or [],
|
|
343
|
+
"platform_data": issue.platform_data or {},
|
|
344
|
+
}
|