gitflow-analytics 1.0.0__py3-none-any.whl → 1.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. gitflow_analytics/__init__.py +11 -9
  2. gitflow_analytics/_version.py +2 -2
  3. gitflow_analytics/cli.py +691 -243
  4. gitflow_analytics/cli_rich.py +353 -0
  5. gitflow_analytics/config.py +389 -96
  6. gitflow_analytics/core/analyzer.py +175 -78
  7. gitflow_analytics/core/branch_mapper.py +132 -132
  8. gitflow_analytics/core/cache.py +242 -173
  9. gitflow_analytics/core/identity.py +214 -178
  10. gitflow_analytics/extractors/base.py +13 -11
  11. gitflow_analytics/extractors/story_points.py +70 -59
  12. gitflow_analytics/extractors/tickets.py +111 -88
  13. gitflow_analytics/integrations/github_integration.py +91 -77
  14. gitflow_analytics/integrations/jira_integration.py +284 -0
  15. gitflow_analytics/integrations/orchestrator.py +99 -72
  16. gitflow_analytics/metrics/dora.py +183 -179
  17. gitflow_analytics/models/database.py +191 -54
  18. gitflow_analytics/qualitative/__init__.py +30 -0
  19. gitflow_analytics/qualitative/classifiers/__init__.py +13 -0
  20. gitflow_analytics/qualitative/classifiers/change_type.py +468 -0
  21. gitflow_analytics/qualitative/classifiers/domain_classifier.py +399 -0
  22. gitflow_analytics/qualitative/classifiers/intent_analyzer.py +436 -0
  23. gitflow_analytics/qualitative/classifiers/risk_analyzer.py +412 -0
  24. gitflow_analytics/qualitative/core/__init__.py +13 -0
  25. gitflow_analytics/qualitative/core/llm_fallback.py +653 -0
  26. gitflow_analytics/qualitative/core/nlp_engine.py +373 -0
  27. gitflow_analytics/qualitative/core/pattern_cache.py +457 -0
  28. gitflow_analytics/qualitative/core/processor.py +540 -0
  29. gitflow_analytics/qualitative/models/__init__.py +25 -0
  30. gitflow_analytics/qualitative/models/schemas.py +272 -0
  31. gitflow_analytics/qualitative/utils/__init__.py +13 -0
  32. gitflow_analytics/qualitative/utils/batch_processor.py +326 -0
  33. gitflow_analytics/qualitative/utils/cost_tracker.py +343 -0
  34. gitflow_analytics/qualitative/utils/metrics.py +347 -0
  35. gitflow_analytics/qualitative/utils/text_processing.py +243 -0
  36. gitflow_analytics/reports/analytics_writer.py +25 -8
  37. gitflow_analytics/reports/csv_writer.py +60 -32
  38. gitflow_analytics/reports/narrative_writer.py +21 -15
  39. gitflow_analytics/tui/__init__.py +5 -0
  40. gitflow_analytics/tui/app.py +721 -0
  41. gitflow_analytics/tui/screens/__init__.py +8 -0
  42. gitflow_analytics/tui/screens/analysis_progress_screen.py +487 -0
  43. gitflow_analytics/tui/screens/configuration_screen.py +547 -0
  44. gitflow_analytics/tui/screens/loading_screen.py +358 -0
  45. gitflow_analytics/tui/screens/main_screen.py +304 -0
  46. gitflow_analytics/tui/screens/results_screen.py +698 -0
  47. gitflow_analytics/tui/widgets/__init__.py +7 -0
  48. gitflow_analytics/tui/widgets/data_table.py +257 -0
  49. gitflow_analytics/tui/widgets/export_modal.py +301 -0
  50. gitflow_analytics/tui/widgets/progress_widget.py +192 -0
  51. gitflow_analytics-1.0.3.dist-info/METADATA +490 -0
  52. gitflow_analytics-1.0.3.dist-info/RECORD +62 -0
  53. gitflow_analytics-1.0.0.dist-info/METADATA +0 -201
  54. gitflow_analytics-1.0.0.dist-info/RECORD +0 -30
  55. {gitflow_analytics-1.0.0.dist-info → gitflow_analytics-1.0.3.dist-info}/WHEEL +0 -0
  56. {gitflow_analytics-1.0.0.dist-info → gitflow_analytics-1.0.3.dist-info}/entry_points.txt +0 -0
  57. {gitflow_analytics-1.0.0.dist-info → gitflow_analytics-1.0.3.dist-info}/licenses/LICENSE +0 -0
  58. {gitflow_analytics-1.0.0.dist-info → gitflow_analytics-1.0.3.dist-info}/top_level.txt +0 -0
@@ -1,27 +1,26 @@
1
1
  """Caching layer for Git analysis with SQLite backend."""
2
- import hashlib
2
+
3
+ from contextlib import contextmanager
3
4
  from datetime import datetime, timedelta
4
- from typing import List, Optional, Dict, Any
5
5
  from pathlib import Path
6
- from contextlib import contextmanager
6
+ from typing import Any, Optional
7
7
 
8
- from sqlalchemy.orm import Session
9
8
  from sqlalchemy import and_
10
9
 
11
- from ..models.database import Database, CachedCommit, PullRequestCache, IssueCache
10
+ from ..models.database import CachedCommit, Database, IssueCache, PullRequestCache
12
11
 
13
12
 
14
13
  class GitAnalysisCache:
15
14
  """Cache for Git analysis results."""
16
-
17
- def __init__(self, cache_dir: Path, ttl_hours: int = 168):
15
+
16
+ def __init__(self, cache_dir: Path, ttl_hours: int = 168) -> None:
18
17
  """Initialize cache with SQLite backend."""
19
18
  self.cache_dir = cache_dir
20
19
  self.ttl_hours = ttl_hours
21
- self.db = Database(cache_dir / 'gitflow_cache.db')
22
-
20
+ self.db = Database(cache_dir / "gitflow_cache.db")
21
+
23
22
  @contextmanager
24
- def get_session(self):
23
+ def get_session(self) -> Any:
25
24
  """Get database session context manager."""
26
25
  session = self.db.get_session()
27
26
  try:
@@ -32,33 +31,40 @@ class GitAnalysisCache:
32
31
  raise
33
32
  finally:
34
33
  session.close()
35
-
36
- def get_cached_commit(self, repo_path: str, commit_hash: str) -> Optional[Dict[str, Any]]:
34
+
35
+ def get_cached_commit(self, repo_path: str, commit_hash: str) -> Optional[dict[str, Any]]:
37
36
  """Retrieve cached commit data if not stale."""
38
37
  with self.get_session() as session:
39
- cached = session.query(CachedCommit).filter(
40
- and_(
41
- CachedCommit.repo_path == repo_path,
42
- CachedCommit.commit_hash == commit_hash
38
+ cached = (
39
+ session.query(CachedCommit)
40
+ .filter(
41
+ and_(
42
+ CachedCommit.repo_path == repo_path, CachedCommit.commit_hash == commit_hash
43
+ )
43
44
  )
44
- ).first()
45
-
45
+ .first()
46
+ )
47
+
46
48
  if cached and not self._is_stale(cached.cached_at):
47
49
  return self._commit_to_dict(cached)
48
-
50
+
49
51
  return None
50
-
51
- def cache_commit(self, repo_path: str, commit_data: Dict[str, Any]):
52
+
53
+ def cache_commit(self, repo_path: str, commit_data: dict[str, Any]) -> None:
52
54
  """Cache commit analysis results."""
53
55
  with self.get_session() as session:
54
56
  # Check if already exists
55
- existing = session.query(CachedCommit).filter(
56
- and_(
57
- CachedCommit.repo_path == repo_path,
58
- CachedCommit.commit_hash == commit_data['hash']
57
+ existing = (
58
+ session.query(CachedCommit)
59
+ .filter(
60
+ and_(
61
+ CachedCommit.repo_path == repo_path,
62
+ CachedCommit.commit_hash == commit_data["hash"],
63
+ )
59
64
  )
60
- ).first()
61
-
65
+ .first()
66
+ )
67
+
62
68
  if existing:
63
69
  # Update existing
64
70
  for key, value in commit_data.items():
@@ -69,207 +75,270 @@ class GitAnalysisCache:
69
75
  # Create new
70
76
  cached_commit = CachedCommit(
71
77
  repo_path=repo_path,
72
- commit_hash=commit_data['hash'],
73
- author_name=commit_data.get('author_name'),
74
- author_email=commit_data.get('author_email'),
75
- message=commit_data.get('message'),
76
- timestamp=commit_data.get('timestamp'),
77
- branch=commit_data.get('branch'),
78
- is_merge=commit_data.get('is_merge', False),
79
- files_changed=commit_data.get('files_changed', 0),
80
- insertions=commit_data.get('insertions', 0),
81
- deletions=commit_data.get('deletions', 0),
82
- complexity_delta=commit_data.get('complexity_delta', 0.0),
83
- story_points=commit_data.get('story_points'),
84
- ticket_references=commit_data.get('ticket_references', [])
78
+ commit_hash=commit_data["hash"],
79
+ author_name=commit_data.get("author_name"),
80
+ author_email=commit_data.get("author_email"),
81
+ message=commit_data.get("message"),
82
+ timestamp=commit_data.get("timestamp"),
83
+ branch=commit_data.get("branch"),
84
+ is_merge=commit_data.get("is_merge", False),
85
+ files_changed=commit_data.get("files_changed", 0),
86
+ insertions=commit_data.get("insertions", 0),
87
+ deletions=commit_data.get("deletions", 0),
88
+ complexity_delta=commit_data.get("complexity_delta", 0.0),
89
+ story_points=commit_data.get("story_points"),
90
+ ticket_references=commit_data.get("ticket_references", []),
85
91
  )
86
92
  session.add(cached_commit)
87
-
88
- def cache_commits_batch(self, repo_path: str, commits: List[Dict[str, Any]]):
93
+
94
+ def cache_commits_batch(self, repo_path: str, commits: list[dict[str, Any]]) -> None:
89
95
  """Cache multiple commits in a single transaction."""
90
96
  with self.get_session() as session:
91
97
  for commit_data in commits:
92
98
  # Check if already exists
93
- existing = session.query(CachedCommit).filter(
94
- and_(
95
- CachedCommit.repo_path == repo_path,
96
- CachedCommit.commit_hash == commit_data['hash']
99
+ existing = (
100
+ session.query(CachedCommit)
101
+ .filter(
102
+ and_(
103
+ CachedCommit.repo_path == repo_path,
104
+ CachedCommit.commit_hash == commit_data["hash"],
105
+ )
97
106
  )
98
- ).first()
99
-
107
+ .first()
108
+ )
109
+
100
110
  if existing:
101
111
  # Update existing
102
112
  for key, value in commit_data.items():
103
- if key != 'hash' and hasattr(existing, key):
113
+ if key != "hash" and hasattr(existing, key):
104
114
  setattr(existing, key, value)
105
115
  existing.cached_at = datetime.utcnow()
106
116
  else:
107
117
  # Create new
108
118
  cached_commit = CachedCommit(
109
119
  repo_path=repo_path,
110
- commit_hash=commit_data['hash'],
111
- author_name=commit_data.get('author_name'),
112
- author_email=commit_data.get('author_email'),
113
- message=commit_data.get('message'),
114
- timestamp=commit_data.get('timestamp'),
115
- branch=commit_data.get('branch'),
116
- is_merge=commit_data.get('is_merge', False),
117
- files_changed=commit_data.get('files_changed', 0),
118
- insertions=commit_data.get('insertions', 0),
119
- deletions=commit_data.get('deletions', 0),
120
- complexity_delta=commit_data.get('complexity_delta', 0.0),
121
- story_points=commit_data.get('story_points'),
122
- ticket_references=commit_data.get('ticket_references', [])
120
+ commit_hash=commit_data["hash"],
121
+ author_name=commit_data.get("author_name"),
122
+ author_email=commit_data.get("author_email"),
123
+ message=commit_data.get("message"),
124
+ timestamp=commit_data.get("timestamp"),
125
+ branch=commit_data.get("branch"),
126
+ is_merge=commit_data.get("is_merge", False),
127
+ files_changed=commit_data.get("files_changed", 0),
128
+ insertions=commit_data.get("insertions", 0),
129
+ deletions=commit_data.get("deletions", 0),
130
+ complexity_delta=commit_data.get("complexity_delta", 0.0),
131
+ story_points=commit_data.get("story_points"),
132
+ ticket_references=commit_data.get("ticket_references", []),
123
133
  )
124
134
  session.add(cached_commit)
125
-
126
- def get_cached_pr(self, repo_path: str, pr_number: int) -> Optional[Dict[str, Any]]:
135
+
136
+ def get_cached_pr(self, repo_path: str, pr_number: int) -> Optional[dict[str, Any]]:
127
137
  """Retrieve cached pull request data."""
128
138
  with self.get_session() as session:
129
- cached = session.query(PullRequestCache).filter(
130
- and_(
131
- PullRequestCache.repo_path == repo_path,
132
- PullRequestCache.pr_number == pr_number
139
+ cached = (
140
+ session.query(PullRequestCache)
141
+ .filter(
142
+ and_(
143
+ PullRequestCache.repo_path == repo_path,
144
+ PullRequestCache.pr_number == pr_number,
145
+ )
133
146
  )
134
- ).first()
135
-
147
+ .first()
148
+ )
149
+
136
150
  if cached and not self._is_stale(cached.cached_at):
137
151
  return self._pr_to_dict(cached)
138
-
152
+
139
153
  return None
140
-
141
- def cache_pr(self, repo_path: str, pr_data: Dict[str, Any]):
154
+
155
+ def cache_pr(self, repo_path: str, pr_data: dict[str, Any]) -> None:
142
156
  """Cache pull request data."""
143
157
  with self.get_session() as session:
144
- cached_pr = PullRequestCache(
145
- repo_path=repo_path,
146
- pr_number=pr_data['number'],
147
- title=pr_data.get('title'),
148
- description=pr_data.get('description'),
149
- author=pr_data.get('author'),
150
- created_at=pr_data.get('created_at'),
151
- merged_at=pr_data.get('merged_at'),
152
- story_points=pr_data.get('story_points'),
153
- labels=pr_data.get('labels', []),
154
- commit_hashes=pr_data.get('commit_hashes', [])
158
+ # Check if already exists
159
+ existing = (
160
+ session.query(PullRequestCache)
161
+ .filter(
162
+ and_(
163
+ PullRequestCache.repo_path == repo_path,
164
+ PullRequestCache.pr_number == pr_data["number"],
165
+ )
166
+ )
167
+ .first()
155
168
  )
156
- session.merge(cached_pr)
157
-
158
- def cache_issue(self, platform: str, issue_data: Dict[str, Any]):
169
+
170
+ if existing:
171
+ # Update existing
172
+ existing.title = pr_data.get("title")
173
+ existing.description = pr_data.get("description")
174
+ existing.author = pr_data.get("author")
175
+ existing.created_at = pr_data.get("created_at")
176
+ existing.merged_at = pr_data.get("merged_at")
177
+ existing.story_points = pr_data.get("story_points")
178
+ existing.labels = pr_data.get("labels", [])
179
+ existing.commit_hashes = pr_data.get("commit_hashes", [])
180
+ existing.cached_at = datetime.utcnow()
181
+ else:
182
+ # Create new
183
+ cached_pr = PullRequestCache(
184
+ repo_path=repo_path,
185
+ pr_number=pr_data["number"],
186
+ title=pr_data.get("title"),
187
+ description=pr_data.get("description"),
188
+ author=pr_data.get("author"),
189
+ created_at=pr_data.get("created_at"),
190
+ merged_at=pr_data.get("merged_at"),
191
+ story_points=pr_data.get("story_points"),
192
+ labels=pr_data.get("labels", []),
193
+ commit_hashes=pr_data.get("commit_hashes", []),
194
+ )
195
+ session.add(cached_pr)
196
+
197
+ def cache_issue(self, platform: str, issue_data: dict[str, Any]) -> None:
159
198
  """Cache issue data from various platforms."""
160
199
  with self.get_session() as session:
161
- cached_issue = IssueCache(
162
- platform=platform,
163
- issue_id=str(issue_data['id']),
164
- project_key=issue_data['project_key'],
165
- title=issue_data.get('title'),
166
- description=issue_data.get('description'),
167
- status=issue_data.get('status'),
168
- assignee=issue_data.get('assignee'),
169
- created_at=issue_data.get('created_at'),
170
- updated_at=issue_data.get('updated_at'),
171
- resolved_at=issue_data.get('resolved_at'),
172
- story_points=issue_data.get('story_points'),
173
- labels=issue_data.get('labels', []),
174
- platform_data=issue_data.get('platform_data', {})
200
+ # Check if already exists
201
+ existing = (
202
+ session.query(IssueCache)
203
+ .filter(
204
+ and_(
205
+ IssueCache.platform == platform,
206
+ IssueCache.issue_id == str(issue_data["id"]),
207
+ )
208
+ )
209
+ .first()
175
210
  )
176
- session.merge(cached_issue)
177
-
178
- def get_cached_issues(self, platform: str, project_key: str) -> List[Dict[str, Any]]:
211
+
212
+ if existing:
213
+ # Update existing
214
+ existing.project_key = issue_data["project_key"]
215
+ existing.title = issue_data.get("title")
216
+ existing.description = issue_data.get("description")
217
+ existing.status = issue_data.get("status")
218
+ existing.assignee = issue_data.get("assignee")
219
+ existing.created_at = issue_data.get("created_at")
220
+ existing.updated_at = issue_data.get("updated_at")
221
+ existing.resolved_at = issue_data.get("resolved_at")
222
+ existing.story_points = issue_data.get("story_points")
223
+ existing.labels = issue_data.get("labels", [])
224
+ existing.platform_data = issue_data.get("platform_data", {})
225
+ existing.cached_at = datetime.utcnow()
226
+ else:
227
+ # Create new
228
+ cached_issue = IssueCache(
229
+ platform=platform,
230
+ issue_id=str(issue_data["id"]),
231
+ project_key=issue_data["project_key"],
232
+ title=issue_data.get("title"),
233
+ description=issue_data.get("description"),
234
+ status=issue_data.get("status"),
235
+ assignee=issue_data.get("assignee"),
236
+ created_at=issue_data.get("created_at"),
237
+ updated_at=issue_data.get("updated_at"),
238
+ resolved_at=issue_data.get("resolved_at"),
239
+ story_points=issue_data.get("story_points"),
240
+ labels=issue_data.get("labels", []),
241
+ platform_data=issue_data.get("platform_data", {}),
242
+ )
243
+ session.add(cached_issue)
244
+
245
+ def get_cached_issues(self, platform: str, project_key: str) -> list[dict[str, Any]]:
179
246
  """Get all cached issues for a platform and project."""
180
247
  with self.get_session() as session:
181
- issues = session.query(IssueCache).filter(
182
- and_(
183
- IssueCache.platform == platform,
184
- IssueCache.project_key == project_key
248
+ issues = (
249
+ session.query(IssueCache)
250
+ .filter(
251
+ and_(IssueCache.platform == platform, IssueCache.project_key == project_key)
185
252
  )
186
- ).all()
187
-
188
- return [self._issue_to_dict(issue) for issue in issues
189
- if not self._is_stale(issue.cached_at)]
190
-
191
- def clear_stale_cache(self):
253
+ .all()
254
+ )
255
+
256
+ return [
257
+ self._issue_to_dict(issue)
258
+ for issue in issues
259
+ if not self._is_stale(issue.cached_at)
260
+ ]
261
+
262
+ def clear_stale_cache(self) -> None:
192
263
  """Remove stale cache entries."""
193
264
  cutoff_time = datetime.utcnow() - timedelta(hours=self.ttl_hours)
194
-
265
+
195
266
  with self.get_session() as session:
196
- session.query(CachedCommit).filter(
197
- CachedCommit.cached_at < cutoff_time
198
- ).delete()
199
-
267
+ session.query(CachedCommit).filter(CachedCommit.cached_at < cutoff_time).delete()
268
+
200
269
  session.query(PullRequestCache).filter(
201
270
  PullRequestCache.cached_at < cutoff_time
202
271
  ).delete()
203
-
204
- session.query(IssueCache).filter(
205
- IssueCache.cached_at < cutoff_time
206
- ).delete()
207
-
208
- def get_cache_stats(self) -> Dict[str, int]:
272
+
273
+ session.query(IssueCache).filter(IssueCache.cached_at < cutoff_time).delete()
274
+
275
+ def get_cache_stats(self) -> dict[str, int]:
209
276
  """Get cache statistics."""
210
277
  with self.get_session() as session:
211
278
  stats = {
212
- 'cached_commits': session.query(CachedCommit).count(),
213
- 'cached_prs': session.query(PullRequestCache).count(),
214
- 'cached_issues': session.query(IssueCache).count(),
215
- 'stale_commits': session.query(CachedCommit).filter(
279
+ "cached_commits": session.query(CachedCommit).count(),
280
+ "cached_prs": session.query(PullRequestCache).count(),
281
+ "cached_issues": session.query(IssueCache).count(),
282
+ "stale_commits": session.query(CachedCommit)
283
+ .filter(
216
284
  CachedCommit.cached_at < datetime.utcnow() - timedelta(hours=self.ttl_hours)
217
- ).count()
285
+ )
286
+ .count(),
218
287
  }
219
288
  return stats
220
-
289
+
221
290
  def _is_stale(self, cached_at: datetime) -> bool:
222
291
  """Check if cache entry is stale."""
223
292
  if self.ttl_hours == 0: # No expiration
224
293
  return False
225
294
  return cached_at < datetime.utcnow() - timedelta(hours=self.ttl_hours)
226
-
227
- def _commit_to_dict(self, commit: CachedCommit) -> Dict[str, Any]:
295
+
296
+ def _commit_to_dict(self, commit: CachedCommit) -> dict[str, Any]:
228
297
  """Convert CachedCommit to dictionary."""
229
298
  return {
230
- 'hash': commit.commit_hash,
231
- 'author_name': commit.author_name,
232
- 'author_email': commit.author_email,
233
- 'message': commit.message,
234
- 'timestamp': commit.timestamp,
235
- 'branch': commit.branch,
236
- 'is_merge': commit.is_merge,
237
- 'files_changed': commit.files_changed,
238
- 'insertions': commit.insertions,
239
- 'deletions': commit.deletions,
240
- 'complexity_delta': commit.complexity_delta,
241
- 'story_points': commit.story_points,
242
- 'ticket_references': commit.ticket_references or []
299
+ "hash": commit.commit_hash,
300
+ "author_name": commit.author_name,
301
+ "author_email": commit.author_email,
302
+ "message": commit.message,
303
+ "timestamp": commit.timestamp,
304
+ "branch": commit.branch,
305
+ "is_merge": commit.is_merge,
306
+ "files_changed": commit.files_changed,
307
+ "insertions": commit.insertions,
308
+ "deletions": commit.deletions,
309
+ "complexity_delta": commit.complexity_delta,
310
+ "story_points": commit.story_points,
311
+ "ticket_references": commit.ticket_references or [],
243
312
  }
244
-
245
- def _pr_to_dict(self, pr: PullRequestCache) -> Dict[str, Any]:
313
+
314
+ def _pr_to_dict(self, pr: PullRequestCache) -> dict[str, Any]:
246
315
  """Convert PullRequestCache to dictionary."""
247
316
  return {
248
- 'number': pr.pr_number,
249
- 'title': pr.title,
250
- 'description': pr.description,
251
- 'author': pr.author,
252
- 'created_at': pr.created_at,
253
- 'merged_at': pr.merged_at,
254
- 'story_points': pr.story_points,
255
- 'labels': pr.labels or [],
256
- 'commit_hashes': pr.commit_hashes or []
317
+ "number": pr.pr_number,
318
+ "title": pr.title,
319
+ "description": pr.description,
320
+ "author": pr.author,
321
+ "created_at": pr.created_at,
322
+ "merged_at": pr.merged_at,
323
+ "story_points": pr.story_points,
324
+ "labels": pr.labels or [],
325
+ "commit_hashes": pr.commit_hashes or [],
257
326
  }
258
-
259
- def _issue_to_dict(self, issue: IssueCache) -> Dict[str, Any]:
327
+
328
+ def _issue_to_dict(self, issue: IssueCache) -> dict[str, Any]:
260
329
  """Convert IssueCache to dictionary."""
261
330
  return {
262
- 'platform': issue.platform,
263
- 'id': issue.issue_id,
264
- 'project_key': issue.project_key,
265
- 'title': issue.title,
266
- 'description': issue.description,
267
- 'status': issue.status,
268
- 'assignee': issue.assignee,
269
- 'created_at': issue.created_at,
270
- 'updated_at': issue.updated_at,
271
- 'resolved_at': issue.resolved_at,
272
- 'story_points': issue.story_points,
273
- 'labels': issue.labels or [],
274
- 'platform_data': issue.platform_data or {}
275
- }
331
+ "platform": issue.platform,
332
+ "id": issue.issue_id,
333
+ "project_key": issue.project_key,
334
+ "title": issue.title,
335
+ "description": issue.description,
336
+ "status": issue.status,
337
+ "assignee": issue.assignee,
338
+ "created_at": issue.created_at,
339
+ "updated_at": issue.updated_at,
340
+ "resolved_at": issue.resolved_at,
341
+ "story_points": issue.story_points,
342
+ "labels": issue.labels or [],
343
+ "platform_data": issue.platform_data or {},
344
+ }