gitflow-analytics 1.0.1__py3-none-any.whl → 1.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. gitflow_analytics/__init__.py +11 -11
  2. gitflow_analytics/_version.py +2 -2
  3. gitflow_analytics/cli.py +612 -258
  4. gitflow_analytics/cli_rich.py +353 -0
  5. gitflow_analytics/config.py +251 -141
  6. gitflow_analytics/core/analyzer.py +140 -103
  7. gitflow_analytics/core/branch_mapper.py +132 -132
  8. gitflow_analytics/core/cache.py +240 -169
  9. gitflow_analytics/core/identity.py +210 -173
  10. gitflow_analytics/extractors/base.py +13 -11
  11. gitflow_analytics/extractors/story_points.py +70 -59
  12. gitflow_analytics/extractors/tickets.py +101 -87
  13. gitflow_analytics/integrations/github_integration.py +84 -77
  14. gitflow_analytics/integrations/jira_integration.py +116 -104
  15. gitflow_analytics/integrations/orchestrator.py +86 -85
  16. gitflow_analytics/metrics/dora.py +181 -177
  17. gitflow_analytics/models/database.py +190 -53
  18. gitflow_analytics/qualitative/__init__.py +30 -0
  19. gitflow_analytics/qualitative/classifiers/__init__.py +13 -0
  20. gitflow_analytics/qualitative/classifiers/change_type.py +468 -0
  21. gitflow_analytics/qualitative/classifiers/domain_classifier.py +399 -0
  22. gitflow_analytics/qualitative/classifiers/intent_analyzer.py +436 -0
  23. gitflow_analytics/qualitative/classifiers/risk_analyzer.py +412 -0
  24. gitflow_analytics/qualitative/core/__init__.py +13 -0
  25. gitflow_analytics/qualitative/core/llm_fallback.py +653 -0
  26. gitflow_analytics/qualitative/core/nlp_engine.py +373 -0
  27. gitflow_analytics/qualitative/core/pattern_cache.py +457 -0
  28. gitflow_analytics/qualitative/core/processor.py +540 -0
  29. gitflow_analytics/qualitative/models/__init__.py +25 -0
  30. gitflow_analytics/qualitative/models/schemas.py +272 -0
  31. gitflow_analytics/qualitative/utils/__init__.py +13 -0
  32. gitflow_analytics/qualitative/utils/batch_processor.py +326 -0
  33. gitflow_analytics/qualitative/utils/cost_tracker.py +343 -0
  34. gitflow_analytics/qualitative/utils/metrics.py +347 -0
  35. gitflow_analytics/qualitative/utils/text_processing.py +243 -0
  36. gitflow_analytics/reports/analytics_writer.py +11 -4
  37. gitflow_analytics/reports/csv_writer.py +51 -31
  38. gitflow_analytics/reports/narrative_writer.py +16 -14
  39. gitflow_analytics/tui/__init__.py +5 -0
  40. gitflow_analytics/tui/app.py +721 -0
  41. gitflow_analytics/tui/screens/__init__.py +8 -0
  42. gitflow_analytics/tui/screens/analysis_progress_screen.py +487 -0
  43. gitflow_analytics/tui/screens/configuration_screen.py +547 -0
  44. gitflow_analytics/tui/screens/loading_screen.py +358 -0
  45. gitflow_analytics/tui/screens/main_screen.py +304 -0
  46. gitflow_analytics/tui/screens/results_screen.py +698 -0
  47. gitflow_analytics/tui/widgets/__init__.py +7 -0
  48. gitflow_analytics/tui/widgets/data_table.py +257 -0
  49. gitflow_analytics/tui/widgets/export_modal.py +301 -0
  50. gitflow_analytics/tui/widgets/progress_widget.py +192 -0
  51. {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/METADATA +31 -4
  52. gitflow_analytics-1.0.3.dist-info/RECORD +62 -0
  53. gitflow_analytics-1.0.1.dist-info/RECORD +0 -31
  54. {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/WHEEL +0 -0
  55. {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/entry_points.txt +0 -0
  56. {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/licenses/LICENSE +0 -0
  57. {gitflow_analytics-1.0.1.dist-info → gitflow_analytics-1.0.3.dist-info}/top_level.txt +0 -0
@@ -1,8 +1,9 @@
1
1
  """Caching layer for Git analysis with SQLite backend."""
2
+
2
3
  from contextlib import contextmanager
3
4
  from datetime import datetime, timedelta
4
5
  from pathlib import Path
5
- from typing import Any, Dict, List, Optional
6
+ from typing import Any, Optional
6
7
 
7
8
  from sqlalchemy import and_
8
9
 
@@ -11,15 +12,15 @@ from ..models.database import CachedCommit, Database, IssueCache, PullRequestCac
11
12
 
12
13
  class GitAnalysisCache:
13
14
  """Cache for Git analysis results."""
14
-
15
- def __init__(self, cache_dir: Path, ttl_hours: int = 168):
15
+
16
+ def __init__(self, cache_dir: Path, ttl_hours: int = 168) -> None:
16
17
  """Initialize cache with SQLite backend."""
17
18
  self.cache_dir = cache_dir
18
19
  self.ttl_hours = ttl_hours
19
- self.db = Database(cache_dir / 'gitflow_cache.db')
20
-
20
+ self.db = Database(cache_dir / "gitflow_cache.db")
21
+
21
22
  @contextmanager
22
- def get_session(self):
23
+ def get_session(self) -> Any:
23
24
  """Get database session context manager."""
24
25
  session = self.db.get_session()
25
26
  try:
@@ -30,33 +31,40 @@ class GitAnalysisCache:
30
31
  raise
31
32
  finally:
32
33
  session.close()
33
-
34
- def get_cached_commit(self, repo_path: str, commit_hash: str) -> Optional[Dict[str, Any]]:
34
+
35
+ def get_cached_commit(self, repo_path: str, commit_hash: str) -> Optional[dict[str, Any]]:
35
36
  """Retrieve cached commit data if not stale."""
36
37
  with self.get_session() as session:
37
- cached = session.query(CachedCommit).filter(
38
- and_(
39
- CachedCommit.repo_path == repo_path,
40
- CachedCommit.commit_hash == commit_hash
38
+ cached = (
39
+ session.query(CachedCommit)
40
+ .filter(
41
+ and_(
42
+ CachedCommit.repo_path == repo_path, CachedCommit.commit_hash == commit_hash
43
+ )
41
44
  )
42
- ).first()
43
-
45
+ .first()
46
+ )
47
+
44
48
  if cached and not self._is_stale(cached.cached_at):
45
49
  return self._commit_to_dict(cached)
46
-
50
+
47
51
  return None
48
-
49
- def cache_commit(self, repo_path: str, commit_data: Dict[str, Any]):
52
+
53
+ def cache_commit(self, repo_path: str, commit_data: dict[str, Any]) -> None:
50
54
  """Cache commit analysis results."""
51
55
  with self.get_session() as session:
52
56
  # Check if already exists
53
- existing = session.query(CachedCommit).filter(
54
- and_(
55
- CachedCommit.repo_path == repo_path,
56
- CachedCommit.commit_hash == commit_data['hash']
57
+ existing = (
58
+ session.query(CachedCommit)
59
+ .filter(
60
+ and_(
61
+ CachedCommit.repo_path == repo_path,
62
+ CachedCommit.commit_hash == commit_data["hash"],
63
+ )
57
64
  )
58
- ).first()
59
-
65
+ .first()
66
+ )
67
+
60
68
  if existing:
61
69
  # Update existing
62
70
  for key, value in commit_data.items():
@@ -67,207 +75,270 @@ class GitAnalysisCache:
67
75
  # Create new
68
76
  cached_commit = CachedCommit(
69
77
  repo_path=repo_path,
70
- commit_hash=commit_data['hash'],
71
- author_name=commit_data.get('author_name'),
72
- author_email=commit_data.get('author_email'),
73
- message=commit_data.get('message'),
74
- timestamp=commit_data.get('timestamp'),
75
- branch=commit_data.get('branch'),
76
- is_merge=commit_data.get('is_merge', False),
77
- files_changed=commit_data.get('files_changed', 0),
78
- insertions=commit_data.get('insertions', 0),
79
- deletions=commit_data.get('deletions', 0),
80
- complexity_delta=commit_data.get('complexity_delta', 0.0),
81
- story_points=commit_data.get('story_points'),
82
- ticket_references=commit_data.get('ticket_references', [])
78
+ commit_hash=commit_data["hash"],
79
+ author_name=commit_data.get("author_name"),
80
+ author_email=commit_data.get("author_email"),
81
+ message=commit_data.get("message"),
82
+ timestamp=commit_data.get("timestamp"),
83
+ branch=commit_data.get("branch"),
84
+ is_merge=commit_data.get("is_merge", False),
85
+ files_changed=commit_data.get("files_changed", 0),
86
+ insertions=commit_data.get("insertions", 0),
87
+ deletions=commit_data.get("deletions", 0),
88
+ complexity_delta=commit_data.get("complexity_delta", 0.0),
89
+ story_points=commit_data.get("story_points"),
90
+ ticket_references=commit_data.get("ticket_references", []),
83
91
  )
84
92
  session.add(cached_commit)
85
-
86
- def cache_commits_batch(self, repo_path: str, commits: List[Dict[str, Any]]):
93
+
94
+ def cache_commits_batch(self, repo_path: str, commits: list[dict[str, Any]]) -> None:
87
95
  """Cache multiple commits in a single transaction."""
88
96
  with self.get_session() as session:
89
97
  for commit_data in commits:
90
98
  # Check if already exists
91
- existing = session.query(CachedCommit).filter(
92
- and_(
93
- CachedCommit.repo_path == repo_path,
94
- CachedCommit.commit_hash == commit_data['hash']
99
+ existing = (
100
+ session.query(CachedCommit)
101
+ .filter(
102
+ and_(
103
+ CachedCommit.repo_path == repo_path,
104
+ CachedCommit.commit_hash == commit_data["hash"],
105
+ )
95
106
  )
96
- ).first()
97
-
107
+ .first()
108
+ )
109
+
98
110
  if existing:
99
111
  # Update existing
100
112
  for key, value in commit_data.items():
101
- if key != 'hash' and hasattr(existing, key):
113
+ if key != "hash" and hasattr(existing, key):
102
114
  setattr(existing, key, value)
103
115
  existing.cached_at = datetime.utcnow()
104
116
  else:
105
117
  # Create new
106
118
  cached_commit = CachedCommit(
107
119
  repo_path=repo_path,
108
- commit_hash=commit_data['hash'],
109
- author_name=commit_data.get('author_name'),
110
- author_email=commit_data.get('author_email'),
111
- message=commit_data.get('message'),
112
- timestamp=commit_data.get('timestamp'),
113
- branch=commit_data.get('branch'),
114
- is_merge=commit_data.get('is_merge', False),
115
- files_changed=commit_data.get('files_changed', 0),
116
- insertions=commit_data.get('insertions', 0),
117
- deletions=commit_data.get('deletions', 0),
118
- complexity_delta=commit_data.get('complexity_delta', 0.0),
119
- story_points=commit_data.get('story_points'),
120
- ticket_references=commit_data.get('ticket_references', [])
120
+ commit_hash=commit_data["hash"],
121
+ author_name=commit_data.get("author_name"),
122
+ author_email=commit_data.get("author_email"),
123
+ message=commit_data.get("message"),
124
+ timestamp=commit_data.get("timestamp"),
125
+ branch=commit_data.get("branch"),
126
+ is_merge=commit_data.get("is_merge", False),
127
+ files_changed=commit_data.get("files_changed", 0),
128
+ insertions=commit_data.get("insertions", 0),
129
+ deletions=commit_data.get("deletions", 0),
130
+ complexity_delta=commit_data.get("complexity_delta", 0.0),
131
+ story_points=commit_data.get("story_points"),
132
+ ticket_references=commit_data.get("ticket_references", []),
121
133
  )
122
134
  session.add(cached_commit)
123
-
124
- def get_cached_pr(self, repo_path: str, pr_number: int) -> Optional[Dict[str, Any]]:
135
+
136
+ def get_cached_pr(self, repo_path: str, pr_number: int) -> Optional[dict[str, Any]]:
125
137
  """Retrieve cached pull request data."""
126
138
  with self.get_session() as session:
127
- cached = session.query(PullRequestCache).filter(
128
- and_(
129
- PullRequestCache.repo_path == repo_path,
130
- PullRequestCache.pr_number == pr_number
139
+ cached = (
140
+ session.query(PullRequestCache)
141
+ .filter(
142
+ and_(
143
+ PullRequestCache.repo_path == repo_path,
144
+ PullRequestCache.pr_number == pr_number,
145
+ )
131
146
  )
132
- ).first()
133
-
147
+ .first()
148
+ )
149
+
134
150
  if cached and not self._is_stale(cached.cached_at):
135
151
  return self._pr_to_dict(cached)
136
-
152
+
137
153
  return None
138
-
139
- def cache_pr(self, repo_path: str, pr_data: Dict[str, Any]):
154
+
155
+ def cache_pr(self, repo_path: str, pr_data: dict[str, Any]) -> None:
140
156
  """Cache pull request data."""
141
157
  with self.get_session() as session:
142
- cached_pr = PullRequestCache(
143
- repo_path=repo_path,
144
- pr_number=pr_data['number'],
145
- title=pr_data.get('title'),
146
- description=pr_data.get('description'),
147
- author=pr_data.get('author'),
148
- created_at=pr_data.get('created_at'),
149
- merged_at=pr_data.get('merged_at'),
150
- story_points=pr_data.get('story_points'),
151
- labels=pr_data.get('labels', []),
152
- commit_hashes=pr_data.get('commit_hashes', [])
158
+ # Check if already exists
159
+ existing = (
160
+ session.query(PullRequestCache)
161
+ .filter(
162
+ and_(
163
+ PullRequestCache.repo_path == repo_path,
164
+ PullRequestCache.pr_number == pr_data["number"],
165
+ )
166
+ )
167
+ .first()
153
168
  )
154
- session.merge(cached_pr)
155
-
156
- def cache_issue(self, platform: str, issue_data: Dict[str, Any]):
169
+
170
+ if existing:
171
+ # Update existing
172
+ existing.title = pr_data.get("title")
173
+ existing.description = pr_data.get("description")
174
+ existing.author = pr_data.get("author")
175
+ existing.created_at = pr_data.get("created_at")
176
+ existing.merged_at = pr_data.get("merged_at")
177
+ existing.story_points = pr_data.get("story_points")
178
+ existing.labels = pr_data.get("labels", [])
179
+ existing.commit_hashes = pr_data.get("commit_hashes", [])
180
+ existing.cached_at = datetime.utcnow()
181
+ else:
182
+ # Create new
183
+ cached_pr = PullRequestCache(
184
+ repo_path=repo_path,
185
+ pr_number=pr_data["number"],
186
+ title=pr_data.get("title"),
187
+ description=pr_data.get("description"),
188
+ author=pr_data.get("author"),
189
+ created_at=pr_data.get("created_at"),
190
+ merged_at=pr_data.get("merged_at"),
191
+ story_points=pr_data.get("story_points"),
192
+ labels=pr_data.get("labels", []),
193
+ commit_hashes=pr_data.get("commit_hashes", []),
194
+ )
195
+ session.add(cached_pr)
196
+
197
+ def cache_issue(self, platform: str, issue_data: dict[str, Any]) -> None:
157
198
  """Cache issue data from various platforms."""
158
199
  with self.get_session() as session:
159
- cached_issue = IssueCache(
160
- platform=platform,
161
- issue_id=str(issue_data['id']),
162
- project_key=issue_data['project_key'],
163
- title=issue_data.get('title'),
164
- description=issue_data.get('description'),
165
- status=issue_data.get('status'),
166
- assignee=issue_data.get('assignee'),
167
- created_at=issue_data.get('created_at'),
168
- updated_at=issue_data.get('updated_at'),
169
- resolved_at=issue_data.get('resolved_at'),
170
- story_points=issue_data.get('story_points'),
171
- labels=issue_data.get('labels', []),
172
- platform_data=issue_data.get('platform_data', {})
200
+ # Check if already exists
201
+ existing = (
202
+ session.query(IssueCache)
203
+ .filter(
204
+ and_(
205
+ IssueCache.platform == platform,
206
+ IssueCache.issue_id == str(issue_data["id"]),
207
+ )
208
+ )
209
+ .first()
173
210
  )
174
- session.merge(cached_issue)
175
-
176
- def get_cached_issues(self, platform: str, project_key: str) -> List[Dict[str, Any]]:
211
+
212
+ if existing:
213
+ # Update existing
214
+ existing.project_key = issue_data["project_key"]
215
+ existing.title = issue_data.get("title")
216
+ existing.description = issue_data.get("description")
217
+ existing.status = issue_data.get("status")
218
+ existing.assignee = issue_data.get("assignee")
219
+ existing.created_at = issue_data.get("created_at")
220
+ existing.updated_at = issue_data.get("updated_at")
221
+ existing.resolved_at = issue_data.get("resolved_at")
222
+ existing.story_points = issue_data.get("story_points")
223
+ existing.labels = issue_data.get("labels", [])
224
+ existing.platform_data = issue_data.get("platform_data", {})
225
+ existing.cached_at = datetime.utcnow()
226
+ else:
227
+ # Create new
228
+ cached_issue = IssueCache(
229
+ platform=platform,
230
+ issue_id=str(issue_data["id"]),
231
+ project_key=issue_data["project_key"],
232
+ title=issue_data.get("title"),
233
+ description=issue_data.get("description"),
234
+ status=issue_data.get("status"),
235
+ assignee=issue_data.get("assignee"),
236
+ created_at=issue_data.get("created_at"),
237
+ updated_at=issue_data.get("updated_at"),
238
+ resolved_at=issue_data.get("resolved_at"),
239
+ story_points=issue_data.get("story_points"),
240
+ labels=issue_data.get("labels", []),
241
+ platform_data=issue_data.get("platform_data", {}),
242
+ )
243
+ session.add(cached_issue)
244
+
245
+ def get_cached_issues(self, platform: str, project_key: str) -> list[dict[str, Any]]:
177
246
  """Get all cached issues for a platform and project."""
178
247
  with self.get_session() as session:
179
- issues = session.query(IssueCache).filter(
180
- and_(
181
- IssueCache.platform == platform,
182
- IssueCache.project_key == project_key
248
+ issues = (
249
+ session.query(IssueCache)
250
+ .filter(
251
+ and_(IssueCache.platform == platform, IssueCache.project_key == project_key)
183
252
  )
184
- ).all()
185
-
186
- return [self._issue_to_dict(issue) for issue in issues
187
- if not self._is_stale(issue.cached_at)]
188
-
189
- def clear_stale_cache(self):
253
+ .all()
254
+ )
255
+
256
+ return [
257
+ self._issue_to_dict(issue)
258
+ for issue in issues
259
+ if not self._is_stale(issue.cached_at)
260
+ ]
261
+
262
+ def clear_stale_cache(self) -> None:
190
263
  """Remove stale cache entries."""
191
264
  cutoff_time = datetime.utcnow() - timedelta(hours=self.ttl_hours)
192
-
265
+
193
266
  with self.get_session() as session:
194
- session.query(CachedCommit).filter(
195
- CachedCommit.cached_at < cutoff_time
196
- ).delete()
197
-
267
+ session.query(CachedCommit).filter(CachedCommit.cached_at < cutoff_time).delete()
268
+
198
269
  session.query(PullRequestCache).filter(
199
270
  PullRequestCache.cached_at < cutoff_time
200
271
  ).delete()
201
-
202
- session.query(IssueCache).filter(
203
- IssueCache.cached_at < cutoff_time
204
- ).delete()
205
-
206
- def get_cache_stats(self) -> Dict[str, int]:
272
+
273
+ session.query(IssueCache).filter(IssueCache.cached_at < cutoff_time).delete()
274
+
275
+ def get_cache_stats(self) -> dict[str, int]:
207
276
  """Get cache statistics."""
208
277
  with self.get_session() as session:
209
278
  stats = {
210
- 'cached_commits': session.query(CachedCommit).count(),
211
- 'cached_prs': session.query(PullRequestCache).count(),
212
- 'cached_issues': session.query(IssueCache).count(),
213
- 'stale_commits': session.query(CachedCommit).filter(
279
+ "cached_commits": session.query(CachedCommit).count(),
280
+ "cached_prs": session.query(PullRequestCache).count(),
281
+ "cached_issues": session.query(IssueCache).count(),
282
+ "stale_commits": session.query(CachedCommit)
283
+ .filter(
214
284
  CachedCommit.cached_at < datetime.utcnow() - timedelta(hours=self.ttl_hours)
215
- ).count()
285
+ )
286
+ .count(),
216
287
  }
217
288
  return stats
218
-
289
+
219
290
  def _is_stale(self, cached_at: datetime) -> bool:
220
291
  """Check if cache entry is stale."""
221
292
  if self.ttl_hours == 0: # No expiration
222
293
  return False
223
294
  return cached_at < datetime.utcnow() - timedelta(hours=self.ttl_hours)
224
-
225
- def _commit_to_dict(self, commit: CachedCommit) -> Dict[str, Any]:
295
+
296
+ def _commit_to_dict(self, commit: CachedCommit) -> dict[str, Any]:
226
297
  """Convert CachedCommit to dictionary."""
227
298
  return {
228
- 'hash': commit.commit_hash,
229
- 'author_name': commit.author_name,
230
- 'author_email': commit.author_email,
231
- 'message': commit.message,
232
- 'timestamp': commit.timestamp,
233
- 'branch': commit.branch,
234
- 'is_merge': commit.is_merge,
235
- 'files_changed': commit.files_changed,
236
- 'insertions': commit.insertions,
237
- 'deletions': commit.deletions,
238
- 'complexity_delta': commit.complexity_delta,
239
- 'story_points': commit.story_points,
240
- 'ticket_references': commit.ticket_references or []
299
+ "hash": commit.commit_hash,
300
+ "author_name": commit.author_name,
301
+ "author_email": commit.author_email,
302
+ "message": commit.message,
303
+ "timestamp": commit.timestamp,
304
+ "branch": commit.branch,
305
+ "is_merge": commit.is_merge,
306
+ "files_changed": commit.files_changed,
307
+ "insertions": commit.insertions,
308
+ "deletions": commit.deletions,
309
+ "complexity_delta": commit.complexity_delta,
310
+ "story_points": commit.story_points,
311
+ "ticket_references": commit.ticket_references or [],
241
312
  }
242
-
243
- def _pr_to_dict(self, pr: PullRequestCache) -> Dict[str, Any]:
313
+
314
+ def _pr_to_dict(self, pr: PullRequestCache) -> dict[str, Any]:
244
315
  """Convert PullRequestCache to dictionary."""
245
316
  return {
246
- 'number': pr.pr_number,
247
- 'title': pr.title,
248
- 'description': pr.description,
249
- 'author': pr.author,
250
- 'created_at': pr.created_at,
251
- 'merged_at': pr.merged_at,
252
- 'story_points': pr.story_points,
253
- 'labels': pr.labels or [],
254
- 'commit_hashes': pr.commit_hashes or []
317
+ "number": pr.pr_number,
318
+ "title": pr.title,
319
+ "description": pr.description,
320
+ "author": pr.author,
321
+ "created_at": pr.created_at,
322
+ "merged_at": pr.merged_at,
323
+ "story_points": pr.story_points,
324
+ "labels": pr.labels or [],
325
+ "commit_hashes": pr.commit_hashes or [],
255
326
  }
256
-
257
- def _issue_to_dict(self, issue: IssueCache) -> Dict[str, Any]:
327
+
328
+ def _issue_to_dict(self, issue: IssueCache) -> dict[str, Any]:
258
329
  """Convert IssueCache to dictionary."""
259
330
  return {
260
- 'platform': issue.platform,
261
- 'id': issue.issue_id,
262
- 'project_key': issue.project_key,
263
- 'title': issue.title,
264
- 'description': issue.description,
265
- 'status': issue.status,
266
- 'assignee': issue.assignee,
267
- 'created_at': issue.created_at,
268
- 'updated_at': issue.updated_at,
269
- 'resolved_at': issue.resolved_at,
270
- 'story_points': issue.story_points,
271
- 'labels': issue.labels or [],
272
- 'platform_data': issue.platform_data or {}
273
- }
331
+ "platform": issue.platform,
332
+ "id": issue.issue_id,
333
+ "project_key": issue.project_key,
334
+ "title": issue.title,
335
+ "description": issue.description,
336
+ "status": issue.status,
337
+ "assignee": issue.assignee,
338
+ "created_at": issue.created_at,
339
+ "updated_at": issue.updated_at,
340
+ "resolved_at": issue.resolved_at,
341
+ "story_points": issue.story_points,
342
+ "labels": issue.labels or [],
343
+ "platform_data": issue.platform_data or {},
344
+ }