@theihtisham/dev-pulse 1.0.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.editorconfig +12 -0
- package/.github/ISSUE_TEMPLATE/bug_report.yml +43 -0
- package/.github/ISSUE_TEMPLATE/feature_request.yml +33 -0
- package/.github/PULL_REQUEST_TEMPLATE.md +18 -0
- package/.github/dependabot.yml +16 -0
- package/.github/workflows/ci.yml +33 -0
- package/CODE_OF_CONDUCT.md +27 -0
- package/Dockerfile +8 -0
- package/LICENSE +21 -21
- package/README.md +135 -39
- package/SECURITY.md +22 -0
- package/devpulse/__init__.py +4 -4
- package/devpulse/api/__init__.py +1 -1
- package/devpulse/api/app.py +371 -371
- package/devpulse/cli/__init__.py +1 -1
- package/devpulse/cli/dashboard.py +131 -131
- package/devpulse/cli/main.py +678 -678
- package/devpulse/cli/render.py +175 -175
- package/devpulse/core/__init__.py +34 -34
- package/devpulse/core/analytics.py +487 -487
- package/devpulse/core/config.py +77 -77
- package/devpulse/core/database.py +612 -612
- package/devpulse/core/github_client.py +281 -281
- package/devpulse/core/models.py +142 -142
- package/devpulse/core/report_generator.py +454 -454
- package/devpulse/static/.gitkeep +1 -1
- package/devpulse/templates/report.html +64 -64
- package/package.json +35 -35
- package/pyproject.toml +80 -80
- package/requirements.txt +14 -14
- package/tests/__init__.py +1 -1
- package/tests/conftest.py +208 -208
- package/tests/test_analytics.py +284 -284
- package/tests/test_api.py +313 -313
- package/tests/test_cli.py +204 -204
- package/tests/test_config.py +47 -47
- package/tests/test_database.py +255 -255
- package/tests/test_models.py +107 -107
- package/tests/test_report_generator.py +173 -173
- package/jest.config.js +0 -7
|
@@ -1,612 +1,612 @@
|
|
|
1
|
-
"""SQLite database layer for DevPulse."""
|
|
2
|
-
|
|
3
|
-
import sqlite3
|
|
4
|
-
import json
|
|
5
|
-
from datetime import datetime, date
|
|
6
|
-
from pathlib import Path
|
|
7
|
-
from typing import Any, Optional
|
|
8
|
-
|
|
9
|
-
from devpulse.core.config import get_settings
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class Database:
|
|
13
|
-
"""Manages the SQLite database for DevPulse."""
|
|
14
|
-
|
|
15
|
-
def __init__(self, db_path: Optional[str] = None) -> None:
|
|
16
|
-
if db_path is None:
|
|
17
|
-
settings = get_settings()
|
|
18
|
-
db_path = settings.database_path
|
|
19
|
-
self.db_path = db_path
|
|
20
|
-
Path(db_path).parent.mkdir(parents=True, exist_ok=True)
|
|
21
|
-
self._init_db()
|
|
22
|
-
|
|
23
|
-
def _connect(self) -> sqlite3.Connection:
|
|
24
|
-
conn = sqlite3.connect(self.db_path)
|
|
25
|
-
conn.row_factory = sqlite3.Row
|
|
26
|
-
conn.execute("PRAGMA journal_mode=WAL")
|
|
27
|
-
conn.execute("PRAGMA foreign_keys=ON")
|
|
28
|
-
return conn
|
|
29
|
-
|
|
30
|
-
def _init_db(self) -> None:
|
|
31
|
-
"""Create tables if they do not exist."""
|
|
32
|
-
conn = self._connect()
|
|
33
|
-
try:
|
|
34
|
-
conn.executescript(SCHEMA)
|
|
35
|
-
conn.commit()
|
|
36
|
-
finally:
|
|
37
|
-
conn.close()
|
|
38
|
-
|
|
39
|
-
# ── Commits ──────────────────────────────────────────────────────
|
|
40
|
-
|
|
41
|
-
def upsert_commits(self, commits: list[dict[str, Any]]) -> int:
|
|
42
|
-
"""Insert or update commits. Returns count of new rows."""
|
|
43
|
-
conn = self._connect()
|
|
44
|
-
try:
|
|
45
|
-
count = 0
|
|
46
|
-
for c in commits:
|
|
47
|
-
cur = conn.execute(
|
|
48
|
-
"""INSERT INTO commits (sha, repo, author, author_date, message, additions, deletions, url)
|
|
49
|
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
50
|
-
ON CONFLICT(sha) DO UPDATE SET
|
|
51
|
-
additions=excluded.additions,
|
|
52
|
-
deletions=excluded.deletions""",
|
|
53
|
-
(
|
|
54
|
-
c["sha"],
|
|
55
|
-
c["repo"],
|
|
56
|
-
c["author"],
|
|
57
|
-
c["author_date"],
|
|
58
|
-
c["message"],
|
|
59
|
-
c.get("additions", 0),
|
|
60
|
-
c.get("deletions", 0),
|
|
61
|
-
c.get("url", ""),
|
|
62
|
-
),
|
|
63
|
-
)
|
|
64
|
-
if cur.rowcount > 0:
|
|
65
|
-
count += 1
|
|
66
|
-
conn.commit()
|
|
67
|
-
return count
|
|
68
|
-
finally:
|
|
69
|
-
conn.close()
|
|
70
|
-
|
|
71
|
-
def get_commits(
|
|
72
|
-
self,
|
|
73
|
-
repo: Optional[str] = None,
|
|
74
|
-
author: Optional[str] = None,
|
|
75
|
-
since: Optional[str] = None,
|
|
76
|
-
until: Optional[str] = None,
|
|
77
|
-
limit: int = 500,
|
|
78
|
-
) -> list[dict[str, Any]]:
|
|
79
|
-
"""Query commits with optional filters."""
|
|
80
|
-
clauses: list[str] = []
|
|
81
|
-
params: list[Any] = []
|
|
82
|
-
|
|
83
|
-
if repo:
|
|
84
|
-
clauses.append("repo = ?")
|
|
85
|
-
params.append(repo)
|
|
86
|
-
if author:
|
|
87
|
-
clauses.append("author = ?")
|
|
88
|
-
params.append(author)
|
|
89
|
-
if since:
|
|
90
|
-
clauses.append("author_date >= ?")
|
|
91
|
-
params.append(since)
|
|
92
|
-
if until:
|
|
93
|
-
# If until is a plain date, extend to end of day
|
|
94
|
-
until_val = until if "T" in until else f"{until}T23:59:59Z"
|
|
95
|
-
clauses.append("author_date <= ?")
|
|
96
|
-
params.append(until_val)
|
|
97
|
-
|
|
98
|
-
where = f"WHERE {' AND '.join(clauses)}" if clauses else ""
|
|
99
|
-
params.append(limit)
|
|
100
|
-
|
|
101
|
-
conn = self._connect()
|
|
102
|
-
try:
|
|
103
|
-
rows = conn.execute(
|
|
104
|
-
f"SELECT * FROM commits {where} ORDER BY author_date DESC LIMIT ?",
|
|
105
|
-
params,
|
|
106
|
-
).fetchall()
|
|
107
|
-
return [dict(r) for r in rows]
|
|
108
|
-
finally:
|
|
109
|
-
conn.close()
|
|
110
|
-
|
|
111
|
-
def get_commit_count_by_day(
|
|
112
|
-
self, author: Optional[str] = None, days: int = 365
|
|
113
|
-
) -> list[dict[str, Any]]:
|
|
114
|
-
"""Get daily commit counts for heatmap."""
|
|
115
|
-
conn = self._connect()
|
|
116
|
-
try:
|
|
117
|
-
query = """
|
|
118
|
-
SELECT DATE(author_date) as day, COUNT(*) as count
|
|
119
|
-
FROM commits
|
|
120
|
-
WHERE author_date >= date('now', ?)"""
|
|
121
|
-
params: list[Any] = [f"-{days} days"]
|
|
122
|
-
if author:
|
|
123
|
-
query += " AND author = ?"
|
|
124
|
-
params.append(author)
|
|
125
|
-
query += " GROUP BY DATE(author_date) ORDER BY day"
|
|
126
|
-
rows = conn.execute(query, params).fetchall()
|
|
127
|
-
return [dict(r) for r in rows]
|
|
128
|
-
finally:
|
|
129
|
-
conn.close()
|
|
130
|
-
|
|
131
|
-
# ── Pull Requests ────────────────────────────────────────────────
|
|
132
|
-
|
|
133
|
-
def upsert_pull_requests(self, prs: list[dict[str, Any]]) -> int:
|
|
134
|
-
"""Insert or update pull requests."""
|
|
135
|
-
conn = self._connect()
|
|
136
|
-
try:
|
|
137
|
-
count = 0
|
|
138
|
-
for p in prs:
|
|
139
|
-
cur = conn.execute(
|
|
140
|
-
"""INSERT INTO pull_requests
|
|
141
|
-
(number, repo, title, author, state, created_at, merged_at, closed_at,
|
|
142
|
-
additions, deletions, changed_files, review_comments, url)
|
|
143
|
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
144
|
-
ON CONFLICT(repo, number) DO UPDATE SET
|
|
145
|
-
state=excluded.state,
|
|
146
|
-
merged_at=excluded.merged_at,
|
|
147
|
-
closed_at=excluded.closed_at,
|
|
148
|
-
review_comments=excluded.review_comments""",
|
|
149
|
-
(
|
|
150
|
-
p["number"],
|
|
151
|
-
p["repo"],
|
|
152
|
-
p["title"],
|
|
153
|
-
p["author"],
|
|
154
|
-
p["state"],
|
|
155
|
-
p["created_at"],
|
|
156
|
-
p.get("merged_at"),
|
|
157
|
-
p.get("closed_at"),
|
|
158
|
-
p.get("additions", 0),
|
|
159
|
-
p.get("deletions", 0),
|
|
160
|
-
p.get("changed_files", 0),
|
|
161
|
-
p.get("review_comments", 0),
|
|
162
|
-
p.get("url", ""),
|
|
163
|
-
),
|
|
164
|
-
)
|
|
165
|
-
if cur.rowcount > 0:
|
|
166
|
-
count += 1
|
|
167
|
-
conn.commit()
|
|
168
|
-
return count
|
|
169
|
-
finally:
|
|
170
|
-
conn.close()
|
|
171
|
-
|
|
172
|
-
def get_pull_requests(
|
|
173
|
-
self,
|
|
174
|
-
repo: Optional[str] = None,
|
|
175
|
-
author: Optional[str] = None,
|
|
176
|
-
state: Optional[str] = None,
|
|
177
|
-
since: Optional[str] = None,
|
|
178
|
-
limit: int = 200,
|
|
179
|
-
) -> list[dict[str, Any]]:
|
|
180
|
-
clauses: list[str] = []
|
|
181
|
-
params: list[Any] = []
|
|
182
|
-
if repo:
|
|
183
|
-
clauses.append("repo = ?")
|
|
184
|
-
params.append(repo)
|
|
185
|
-
if author:
|
|
186
|
-
clauses.append("author = ?")
|
|
187
|
-
params.append(author)
|
|
188
|
-
if state:
|
|
189
|
-
clauses.append("state = ?")
|
|
190
|
-
params.append(state)
|
|
191
|
-
if since:
|
|
192
|
-
clauses.append("created_at >= ?")
|
|
193
|
-
params.append(since)
|
|
194
|
-
where = f"WHERE {' AND '.join(clauses)}" if clauses else ""
|
|
195
|
-
params.append(limit)
|
|
196
|
-
conn = self._connect()
|
|
197
|
-
try:
|
|
198
|
-
rows = conn.execute(
|
|
199
|
-
f"SELECT * FROM pull_requests {where} ORDER BY created_at DESC LIMIT ?",
|
|
200
|
-
params,
|
|
201
|
-
).fetchall()
|
|
202
|
-
return [dict(r) for r in rows]
|
|
203
|
-
finally:
|
|
204
|
-
conn.close()
|
|
205
|
-
|
|
206
|
-
# ── Issues ───────────────────────────────────────────────────────
|
|
207
|
-
|
|
208
|
-
def upsert_issues(self, issues: list[dict[str, Any]]) -> int:
|
|
209
|
-
conn = self._connect()
|
|
210
|
-
try:
|
|
211
|
-
count = 0
|
|
212
|
-
for i in issues:
|
|
213
|
-
cur = conn.execute(
|
|
214
|
-
"""INSERT INTO issues
|
|
215
|
-
(number, repo, title, author, state, labels, created_at, closed_at, url)
|
|
216
|
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
217
|
-
ON CONFLICT(repo, number) DO UPDATE SET
|
|
218
|
-
state=excluded.state,
|
|
219
|
-
closed_at=excluded.closed_at,
|
|
220
|
-
labels=excluded.labels""",
|
|
221
|
-
(
|
|
222
|
-
i["number"],
|
|
223
|
-
i["repo"],
|
|
224
|
-
i["title"],
|
|
225
|
-
i["author"],
|
|
226
|
-
i["state"],
|
|
227
|
-
json.dumps(i.get("labels", [])),
|
|
228
|
-
i["created_at"],
|
|
229
|
-
i.get("closed_at"),
|
|
230
|
-
i.get("url", ""),
|
|
231
|
-
),
|
|
232
|
-
)
|
|
233
|
-
if cur.rowcount > 0:
|
|
234
|
-
count += 1
|
|
235
|
-
conn.commit()
|
|
236
|
-
return count
|
|
237
|
-
finally:
|
|
238
|
-
conn.close()
|
|
239
|
-
|
|
240
|
-
def get_issues(
|
|
241
|
-
self,
|
|
242
|
-
repo: Optional[str] = None,
|
|
243
|
-
state: Optional[str] = None,
|
|
244
|
-
since: Optional[str] = None,
|
|
245
|
-
limit: int = 200,
|
|
246
|
-
) -> list[dict[str, Any]]:
|
|
247
|
-
clauses: list[str] = []
|
|
248
|
-
params: list[Any] = []
|
|
249
|
-
if repo:
|
|
250
|
-
clauses.append("repo = ?")
|
|
251
|
-
params.append(repo)
|
|
252
|
-
if state:
|
|
253
|
-
clauses.append("state = ?")
|
|
254
|
-
params.append(state)
|
|
255
|
-
if since:
|
|
256
|
-
clauses.append("created_at >= ?")
|
|
257
|
-
params.append(since)
|
|
258
|
-
where = f"WHERE {' AND '.join(clauses)}" if clauses else ""
|
|
259
|
-
params.append(limit)
|
|
260
|
-
conn = self._connect()
|
|
261
|
-
try:
|
|
262
|
-
rows = conn.execute(
|
|
263
|
-
f"SELECT * FROM issues {where} ORDER BY created_at DESC LIMIT ?",
|
|
264
|
-
params,
|
|
265
|
-
).fetchall()
|
|
266
|
-
return [dict(r) for r in rows]
|
|
267
|
-
finally:
|
|
268
|
-
conn.close()
|
|
269
|
-
|
|
270
|
-
# ── Reviews ──────────────────────────────────────────────────────
|
|
271
|
-
|
|
272
|
-
def upsert_reviews(self, reviews: list[dict[str, Any]]) -> int:
|
|
273
|
-
conn = self._connect()
|
|
274
|
-
try:
|
|
275
|
-
count = 0
|
|
276
|
-
for r in reviews:
|
|
277
|
-
cur = conn.execute(
|
|
278
|
-
"""INSERT INTO reviews
|
|
279
|
-
(id, repo, pr_number, author, state, submitted_at, body)
|
|
280
|
-
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
281
|
-
ON CONFLICT(id) DO UPDATE SET
|
|
282
|
-
state=excluded.state""",
|
|
283
|
-
(
|
|
284
|
-
r["id"],
|
|
285
|
-
r["repo"],
|
|
286
|
-
r["pr_number"],
|
|
287
|
-
r["author"],
|
|
288
|
-
r["state"],
|
|
289
|
-
r["submitted_at"],
|
|
290
|
-
r.get("body", ""),
|
|
291
|
-
),
|
|
292
|
-
)
|
|
293
|
-
if cur.rowcount > 0:
|
|
294
|
-
count += 1
|
|
295
|
-
conn.commit()
|
|
296
|
-
return count
|
|
297
|
-
finally:
|
|
298
|
-
conn.close()
|
|
299
|
-
|
|
300
|
-
def get_reviews(
|
|
301
|
-
self,
|
|
302
|
-
repo: Optional[str] = None,
|
|
303
|
-
author: Optional[str] = None,
|
|
304
|
-
since: Optional[str] = None,
|
|
305
|
-
limit: int = 200,
|
|
306
|
-
) -> list[dict[str, Any]]:
|
|
307
|
-
clauses: list[str] = []
|
|
308
|
-
params: list[Any] = []
|
|
309
|
-
if repo:
|
|
310
|
-
clauses.append("repo = ?")
|
|
311
|
-
params.append(repo)
|
|
312
|
-
if author:
|
|
313
|
-
clauses.append("author = ?")
|
|
314
|
-
params.append(author)
|
|
315
|
-
if since:
|
|
316
|
-
clauses.append("submitted_at >= ?")
|
|
317
|
-
params.append(since)
|
|
318
|
-
where = f"WHERE {' AND '.join(clauses)}" if clauses else ""
|
|
319
|
-
params.append(limit)
|
|
320
|
-
conn = self._connect()
|
|
321
|
-
try:
|
|
322
|
-
rows = conn.execute(
|
|
323
|
-
f"SELECT * FROM reviews {where} ORDER BY submitted_at DESC LIMIT ?",
|
|
324
|
-
params,
|
|
325
|
-
).fetchall()
|
|
326
|
-
return [dict(r) for r in rows]
|
|
327
|
-
finally:
|
|
328
|
-
conn.close()
|
|
329
|
-
|
|
330
|
-
# ── Goals ────────────────────────────────────────────────────────
|
|
331
|
-
|
|
332
|
-
def upsert_goal(self, goal: dict[str, Any]) -> int:
|
|
333
|
-
conn = self._connect()
|
|
334
|
-
try:
|
|
335
|
-
if goal.get("id"):
|
|
336
|
-
conn.execute(
|
|
337
|
-
"""UPDATE goals SET title=?, description=?, target_value=?, current_value=?,
|
|
338
|
-
metric=?, deadline=?, status=? WHERE id=?""",
|
|
339
|
-
(
|
|
340
|
-
goal["title"],
|
|
341
|
-
goal.get("description", ""),
|
|
342
|
-
goal["target_value"],
|
|
343
|
-
goal.get("current_value", 0),
|
|
344
|
-
goal["metric"],
|
|
345
|
-
goal.get("deadline"),
|
|
346
|
-
goal.get("status", "active"),
|
|
347
|
-
goal["id"],
|
|
348
|
-
),
|
|
349
|
-
)
|
|
350
|
-
gid = goal["id"]
|
|
351
|
-
else:
|
|
352
|
-
cur = conn.execute(
|
|
353
|
-
"""INSERT INTO goals (title, description, target_value, current_value, metric, deadline, status)
|
|
354
|
-
VALUES (?, ?, ?, ?, ?, ?, ?)""",
|
|
355
|
-
(
|
|
356
|
-
goal["title"],
|
|
357
|
-
goal.get("description", ""),
|
|
358
|
-
goal["target_value"],
|
|
359
|
-
goal.get("current_value", 0),
|
|
360
|
-
goal["metric"],
|
|
361
|
-
goal.get("deadline"),
|
|
362
|
-
goal.get("status", "active"),
|
|
363
|
-
),
|
|
364
|
-
)
|
|
365
|
-
gid = cur.lastrowid
|
|
366
|
-
conn.commit()
|
|
367
|
-
return gid # type: ignore[return-value]
|
|
368
|
-
finally:
|
|
369
|
-
conn.close()
|
|
370
|
-
|
|
371
|
-
def get_goals(self, status: Optional[str] = None) -> list[dict[str, Any]]:
|
|
372
|
-
conn = self._connect()
|
|
373
|
-
try:
|
|
374
|
-
if status:
|
|
375
|
-
rows = conn.execute(
|
|
376
|
-
"SELECT * FROM goals WHERE status = ? ORDER BY deadline", (status,)
|
|
377
|
-
).fetchall()
|
|
378
|
-
else:
|
|
379
|
-
rows = conn.execute("SELECT * FROM goals ORDER BY deadline").fetchall()
|
|
380
|
-
return [dict(r) for r in rows]
|
|
381
|
-
finally:
|
|
382
|
-
conn.close()
|
|
383
|
-
|
|
384
|
-
def delete_goal(self, goal_id: int) -> bool:
|
|
385
|
-
conn = self._connect()
|
|
386
|
-
try:
|
|
387
|
-
cur = conn.execute("DELETE FROM goals WHERE id = ?", (goal_id,))
|
|
388
|
-
conn.commit()
|
|
389
|
-
return cur.rowcount > 0
|
|
390
|
-
finally:
|
|
391
|
-
conn.close()
|
|
392
|
-
|
|
393
|
-
# ── Reports cache ────────────────────────────────────────────────
|
|
394
|
-
|
|
395
|
-
def save_report(self, report_type: str, period: str, content: str) -> None:
|
|
396
|
-
conn = self._connect()
|
|
397
|
-
try:
|
|
398
|
-
conn.execute(
|
|
399
|
-
"""INSERT INTO reports (report_type, period, content, generated_at)
|
|
400
|
-
VALUES (?, ?, ?, ?)
|
|
401
|
-
ON CONFLICT(report_type, period) DO UPDATE SET
|
|
402
|
-
content=excluded.content,
|
|
403
|
-
generated_at=excluded.generated_at""",
|
|
404
|
-
(report_type, period, content, datetime.utcnow().isoformat()),
|
|
405
|
-
)
|
|
406
|
-
conn.commit()
|
|
407
|
-
finally:
|
|
408
|
-
conn.close()
|
|
409
|
-
|
|
410
|
-
def get_report(self, report_type: str, period: str) -> Optional[str]:
|
|
411
|
-
conn = self._connect()
|
|
412
|
-
try:
|
|
413
|
-
row = conn.execute(
|
|
414
|
-
"SELECT content FROM reports WHERE report_type = ? AND period = ?",
|
|
415
|
-
(report_type, period),
|
|
416
|
-
).fetchone()
|
|
417
|
-
return dict(row)["content"] if row else None
|
|
418
|
-
finally:
|
|
419
|
-
conn.close()
|
|
420
|
-
|
|
421
|
-
# ── Sprint snapshots ─────────────────────────────────────────────
|
|
422
|
-
|
|
423
|
-
def save_sprint_snapshot(self, snapshot: dict[str, Any]) -> None:
|
|
424
|
-
conn = self._connect()
|
|
425
|
-
try:
|
|
426
|
-
conn.execute(
|
|
427
|
-
"""INSERT INTO sprint_snapshots (sprint_name, snapshot_date, total_points,
|
|
428
|
-
completed_points, remaining_points, added_points)
|
|
429
|
-
VALUES (?, ?, ?, ?, ?, ?)""",
|
|
430
|
-
(
|
|
431
|
-
snapshot["sprint_name"],
|
|
432
|
-
snapshot.get("snapshot_date", date.today().isoformat()),
|
|
433
|
-
snapshot["total_points"],
|
|
434
|
-
snapshot["completed_points"],
|
|
435
|
-
snapshot["remaining_points"],
|
|
436
|
-
snapshot.get("added_points", 0),
|
|
437
|
-
),
|
|
438
|
-
)
|
|
439
|
-
conn.commit()
|
|
440
|
-
finally:
|
|
441
|
-
conn.close()
|
|
442
|
-
|
|
443
|
-
def get_sprint_snapshots(self, sprint_name: str) -> list[dict[str, Any]]:
|
|
444
|
-
conn = self._connect()
|
|
445
|
-
try:
|
|
446
|
-
rows = conn.execute(
|
|
447
|
-
"SELECT * FROM sprint_snapshots WHERE sprint_name = ? ORDER BY snapshot_date",
|
|
448
|
-
(sprint_name,),
|
|
449
|
-
).fetchall()
|
|
450
|
-
return [dict(r) for r in rows]
|
|
451
|
-
finally:
|
|
452
|
-
conn.close()
|
|
453
|
-
|
|
454
|
-
# ── Code quality snapshots ───────────────────────────────────────
|
|
455
|
-
|
|
456
|
-
def save_quality_snapshot(self, snapshot: dict[str, Any]) -> None:
|
|
457
|
-
conn = self._connect()
|
|
458
|
-
try:
|
|
459
|
-
conn.execute(
|
|
460
|
-
"""INSERT INTO code_quality (repo, snapshot_date, test_coverage, open_bugs,
|
|
461
|
-
tech_debt_score, lines_added, lines_removed, files_changed)
|
|
462
|
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
|
463
|
-
(
|
|
464
|
-
snapshot["repo"],
|
|
465
|
-
snapshot.get("snapshot_date", date.today().isoformat()),
|
|
466
|
-
snapshot.get("test_coverage", 0.0),
|
|
467
|
-
snapshot.get("open_bugs", 0),
|
|
468
|
-
snapshot.get("tech_debt_score", 0.0),
|
|
469
|
-
snapshot.get("lines_added", 0),
|
|
470
|
-
snapshot.get("lines_removed", 0),
|
|
471
|
-
snapshot.get("files_changed", 0),
|
|
472
|
-
),
|
|
473
|
-
)
|
|
474
|
-
conn.commit()
|
|
475
|
-
finally:
|
|
476
|
-
conn.close()
|
|
477
|
-
|
|
478
|
-
def get_quality_snapshots(
|
|
479
|
-
self, repo: Optional[str] = None, days: int = 90
|
|
480
|
-
) -> list[dict[str, Any]]:
|
|
481
|
-
conn = self._connect()
|
|
482
|
-
try:
|
|
483
|
-
if repo:
|
|
484
|
-
rows = conn.execute(
|
|
485
|
-
"""SELECT * FROM code_quality
|
|
486
|
-
WHERE repo = ? AND snapshot_date >= date('now', ?)
|
|
487
|
-
ORDER BY snapshot_date""",
|
|
488
|
-
(repo, f"-{days} days"),
|
|
489
|
-
).fetchall()
|
|
490
|
-
else:
|
|
491
|
-
rows = conn.execute(
|
|
492
|
-
"""SELECT * FROM code_quality
|
|
493
|
-
WHERE snapshot_date >= date('now', ?)
|
|
494
|
-
ORDER BY snapshot_date""",
|
|
495
|
-
(f"-{days} days",),
|
|
496
|
-
).fetchall()
|
|
497
|
-
return [dict(r) for r in rows]
|
|
498
|
-
finally:
|
|
499
|
-
conn.close()
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
SCHEMA = """\
|
|
503
|
-
CREATE TABLE IF NOT EXISTS commits (
|
|
504
|
-
sha TEXT PRIMARY KEY,
|
|
505
|
-
repo TEXT NOT NULL,
|
|
506
|
-
author TEXT NOT NULL,
|
|
507
|
-
author_date TEXT NOT NULL,
|
|
508
|
-
message TEXT,
|
|
509
|
-
additions INTEGER DEFAULT 0,
|
|
510
|
-
deletions INTEGER DEFAULT 0,
|
|
511
|
-
url TEXT DEFAULT '',
|
|
512
|
-
fetched_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
513
|
-
);
|
|
514
|
-
|
|
515
|
-
CREATE TABLE IF NOT EXISTS pull_requests (
|
|
516
|
-
repo TEXT NOT NULL,
|
|
517
|
-
number INTEGER NOT NULL,
|
|
518
|
-
title TEXT,
|
|
519
|
-
author TEXT,
|
|
520
|
-
state TEXT,
|
|
521
|
-
created_at TEXT,
|
|
522
|
-
merged_at TEXT,
|
|
523
|
-
closed_at TEXT,
|
|
524
|
-
additions INTEGER DEFAULT 0,
|
|
525
|
-
deletions INTEGER DEFAULT 0,
|
|
526
|
-
changed_files INTEGER DEFAULT 0,
|
|
527
|
-
review_comments INTEGER DEFAULT 0,
|
|
528
|
-
url TEXT DEFAULT '',
|
|
529
|
-
fetched_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
530
|
-
PRIMARY KEY (repo, number)
|
|
531
|
-
);
|
|
532
|
-
|
|
533
|
-
CREATE TABLE IF NOT EXISTS issues (
|
|
534
|
-
repo TEXT NOT NULL,
|
|
535
|
-
number INTEGER NOT NULL,
|
|
536
|
-
title TEXT,
|
|
537
|
-
author TEXT,
|
|
538
|
-
state TEXT,
|
|
539
|
-
labels TEXT DEFAULT '[]',
|
|
540
|
-
created_at TEXT,
|
|
541
|
-
closed_at TEXT,
|
|
542
|
-
url TEXT DEFAULT '',
|
|
543
|
-
fetched_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
544
|
-
PRIMARY KEY (repo, number)
|
|
545
|
-
);
|
|
546
|
-
|
|
547
|
-
CREATE TABLE IF NOT EXISTS reviews (
|
|
548
|
-
id INTEGER PRIMARY KEY,
|
|
549
|
-
repo TEXT NOT NULL,
|
|
550
|
-
pr_number INTEGER NOT NULL,
|
|
551
|
-
author TEXT,
|
|
552
|
-
state TEXT,
|
|
553
|
-
submitted_at TEXT,
|
|
554
|
-
body TEXT DEFAULT ''
|
|
555
|
-
);
|
|
556
|
-
|
|
557
|
-
CREATE TABLE IF NOT EXISTS goals (
|
|
558
|
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
559
|
-
title TEXT NOT NULL,
|
|
560
|
-
description TEXT DEFAULT '',
|
|
561
|
-
target_value REAL NOT NULL,
|
|
562
|
-
current_value REAL DEFAULT 0,
|
|
563
|
-
metric TEXT NOT NULL,
|
|
564
|
-
deadline TEXT,
|
|
565
|
-
status TEXT DEFAULT 'active',
|
|
566
|
-
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
567
|
-
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
568
|
-
);
|
|
569
|
-
|
|
570
|
-
CREATE TABLE IF NOT EXISTS reports (
|
|
571
|
-
report_type TEXT NOT NULL,
|
|
572
|
-
period TEXT NOT NULL,
|
|
573
|
-
content TEXT NOT NULL,
|
|
574
|
-
generated_at TEXT,
|
|
575
|
-
PRIMARY KEY (report_type, period)
|
|
576
|
-
);
|
|
577
|
-
|
|
578
|
-
CREATE TABLE IF NOT EXISTS sprint_snapshots (
|
|
579
|
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
580
|
-
sprint_name TEXT NOT NULL,
|
|
581
|
-
snapshot_date TEXT NOT NULL,
|
|
582
|
-
total_points REAL DEFAULT 0,
|
|
583
|
-
completed_points REAL DEFAULT 0,
|
|
584
|
-
remaining_points REAL DEFAULT 0,
|
|
585
|
-
added_points REAL DEFAULT 0
|
|
586
|
-
);
|
|
587
|
-
|
|
588
|
-
CREATE TABLE IF NOT EXISTS code_quality (
|
|
589
|
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
590
|
-
repo TEXT NOT NULL,
|
|
591
|
-
snapshot_date TEXT NOT NULL,
|
|
592
|
-
test_coverage REAL DEFAULT 0.0,
|
|
593
|
-
open_bugs INTEGER DEFAULT 0,
|
|
594
|
-
tech_debt_score REAL DEFAULT 0.0,
|
|
595
|
-
lines_added INTEGER DEFAULT 0,
|
|
596
|
-
lines_removed INTEGER DEFAULT 0,
|
|
597
|
-
files_changed INTEGER DEFAULT 0
|
|
598
|
-
);
|
|
599
|
-
|
|
600
|
-
CREATE INDEX IF NOT EXISTS idx_commits_repo ON commits(repo);
|
|
601
|
-
CREATE INDEX IF NOT EXISTS idx_commits_author ON commits(author);
|
|
602
|
-
CREATE INDEX IF NOT EXISTS idx_commits_date ON commits(author_date);
|
|
603
|
-
CREATE INDEX IF NOT EXISTS idx_prs_repo ON pull_requests(repo);
|
|
604
|
-
CREATE INDEX IF NOT EXISTS idx_prs_author ON pull_requests(author);
|
|
605
|
-
CREATE INDEX IF NOT EXISTS idx_issues_repo ON issues(repo);
|
|
606
|
-
CREATE INDEX IF NOT EXISTS idx_issues_state ON issues(state);
|
|
607
|
-
CREATE INDEX IF NOT EXISTS idx_reviews_repo ON reviews(repo);
|
|
608
|
-
CREATE INDEX IF NOT EXISTS idx_reviews_author ON reviews(author);
|
|
609
|
-
CREATE INDEX IF NOT EXISTS idx_goals_status ON goals(status);
|
|
610
|
-
CREATE INDEX IF NOT EXISTS idx_sprint_name ON sprint_snapshots(sprint_name);
|
|
611
|
-
CREATE INDEX IF NOT EXISTS idx_quality_repo ON code_quality(repo);
|
|
612
|
-
"""
|
|
1
|
+
"""SQLite database layer for DevPulse."""
|
|
2
|
+
|
|
3
|
+
import sqlite3
|
|
4
|
+
import json
|
|
5
|
+
from datetime import datetime, date
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any, Optional
|
|
8
|
+
|
|
9
|
+
from devpulse.core.config import get_settings
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Database:
|
|
13
|
+
"""Manages the SQLite database for DevPulse."""
|
|
14
|
+
|
|
15
|
+
def __init__(self, db_path: Optional[str] = None) -> None:
|
|
16
|
+
if db_path is None:
|
|
17
|
+
settings = get_settings()
|
|
18
|
+
db_path = settings.database_path
|
|
19
|
+
self.db_path = db_path
|
|
20
|
+
Path(db_path).parent.mkdir(parents=True, exist_ok=True)
|
|
21
|
+
self._init_db()
|
|
22
|
+
|
|
23
|
+
def _connect(self) -> sqlite3.Connection:
|
|
24
|
+
conn = sqlite3.connect(self.db_path)
|
|
25
|
+
conn.row_factory = sqlite3.Row
|
|
26
|
+
conn.execute("PRAGMA journal_mode=WAL")
|
|
27
|
+
conn.execute("PRAGMA foreign_keys=ON")
|
|
28
|
+
return conn
|
|
29
|
+
|
|
30
|
+
def _init_db(self) -> None:
|
|
31
|
+
"""Create tables if they do not exist."""
|
|
32
|
+
conn = self._connect()
|
|
33
|
+
try:
|
|
34
|
+
conn.executescript(SCHEMA)
|
|
35
|
+
conn.commit()
|
|
36
|
+
finally:
|
|
37
|
+
conn.close()
|
|
38
|
+
|
|
39
|
+
# ── Commits ──────────────────────────────────────────────────────
|
|
40
|
+
|
|
41
|
+
def upsert_commits(self, commits: list[dict[str, Any]]) -> int:
|
|
42
|
+
"""Insert or update commits. Returns count of new rows."""
|
|
43
|
+
conn = self._connect()
|
|
44
|
+
try:
|
|
45
|
+
count = 0
|
|
46
|
+
for c in commits:
|
|
47
|
+
cur = conn.execute(
|
|
48
|
+
"""INSERT INTO commits (sha, repo, author, author_date, message, additions, deletions, url)
|
|
49
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
50
|
+
ON CONFLICT(sha) DO UPDATE SET
|
|
51
|
+
additions=excluded.additions,
|
|
52
|
+
deletions=excluded.deletions""",
|
|
53
|
+
(
|
|
54
|
+
c["sha"],
|
|
55
|
+
c["repo"],
|
|
56
|
+
c["author"],
|
|
57
|
+
c["author_date"],
|
|
58
|
+
c["message"],
|
|
59
|
+
c.get("additions", 0),
|
|
60
|
+
c.get("deletions", 0),
|
|
61
|
+
c.get("url", ""),
|
|
62
|
+
),
|
|
63
|
+
)
|
|
64
|
+
if cur.rowcount > 0:
|
|
65
|
+
count += 1
|
|
66
|
+
conn.commit()
|
|
67
|
+
return count
|
|
68
|
+
finally:
|
|
69
|
+
conn.close()
|
|
70
|
+
|
|
71
|
+
def get_commits(
|
|
72
|
+
self,
|
|
73
|
+
repo: Optional[str] = None,
|
|
74
|
+
author: Optional[str] = None,
|
|
75
|
+
since: Optional[str] = None,
|
|
76
|
+
until: Optional[str] = None,
|
|
77
|
+
limit: int = 500,
|
|
78
|
+
) -> list[dict[str, Any]]:
|
|
79
|
+
"""Query commits with optional filters."""
|
|
80
|
+
clauses: list[str] = []
|
|
81
|
+
params: list[Any] = []
|
|
82
|
+
|
|
83
|
+
if repo:
|
|
84
|
+
clauses.append("repo = ?")
|
|
85
|
+
params.append(repo)
|
|
86
|
+
if author:
|
|
87
|
+
clauses.append("author = ?")
|
|
88
|
+
params.append(author)
|
|
89
|
+
if since:
|
|
90
|
+
clauses.append("author_date >= ?")
|
|
91
|
+
params.append(since)
|
|
92
|
+
if until:
|
|
93
|
+
# If until is a plain date, extend to end of day
|
|
94
|
+
until_val = until if "T" in until else f"{until}T23:59:59Z"
|
|
95
|
+
clauses.append("author_date <= ?")
|
|
96
|
+
params.append(until_val)
|
|
97
|
+
|
|
98
|
+
where = f"WHERE {' AND '.join(clauses)}" if clauses else ""
|
|
99
|
+
params.append(limit)
|
|
100
|
+
|
|
101
|
+
conn = self._connect()
|
|
102
|
+
try:
|
|
103
|
+
rows = conn.execute(
|
|
104
|
+
f"SELECT * FROM commits {where} ORDER BY author_date DESC LIMIT ?",
|
|
105
|
+
params,
|
|
106
|
+
).fetchall()
|
|
107
|
+
return [dict(r) for r in rows]
|
|
108
|
+
finally:
|
|
109
|
+
conn.close()
|
|
110
|
+
|
|
111
|
+
def get_commit_count_by_day(
|
|
112
|
+
self, author: Optional[str] = None, days: int = 365
|
|
113
|
+
) -> list[dict[str, Any]]:
|
|
114
|
+
"""Get daily commit counts for heatmap."""
|
|
115
|
+
conn = self._connect()
|
|
116
|
+
try:
|
|
117
|
+
query = """
|
|
118
|
+
SELECT DATE(author_date) as day, COUNT(*) as count
|
|
119
|
+
FROM commits
|
|
120
|
+
WHERE author_date >= date('now', ?)"""
|
|
121
|
+
params: list[Any] = [f"-{days} days"]
|
|
122
|
+
if author:
|
|
123
|
+
query += " AND author = ?"
|
|
124
|
+
params.append(author)
|
|
125
|
+
query += " GROUP BY DATE(author_date) ORDER BY day"
|
|
126
|
+
rows = conn.execute(query, params).fetchall()
|
|
127
|
+
return [dict(r) for r in rows]
|
|
128
|
+
finally:
|
|
129
|
+
conn.close()
|
|
130
|
+
|
|
131
|
+
# ── Pull Requests ────────────────────────────────────────────────
|
|
132
|
+
|
|
133
|
+
def upsert_pull_requests(self, prs: list[dict[str, Any]]) -> int:
|
|
134
|
+
"""Insert or update pull requests."""
|
|
135
|
+
conn = self._connect()
|
|
136
|
+
try:
|
|
137
|
+
count = 0
|
|
138
|
+
for p in prs:
|
|
139
|
+
cur = conn.execute(
|
|
140
|
+
"""INSERT INTO pull_requests
|
|
141
|
+
(number, repo, title, author, state, created_at, merged_at, closed_at,
|
|
142
|
+
additions, deletions, changed_files, review_comments, url)
|
|
143
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
144
|
+
ON CONFLICT(repo, number) DO UPDATE SET
|
|
145
|
+
state=excluded.state,
|
|
146
|
+
merged_at=excluded.merged_at,
|
|
147
|
+
closed_at=excluded.closed_at,
|
|
148
|
+
review_comments=excluded.review_comments""",
|
|
149
|
+
(
|
|
150
|
+
p["number"],
|
|
151
|
+
p["repo"],
|
|
152
|
+
p["title"],
|
|
153
|
+
p["author"],
|
|
154
|
+
p["state"],
|
|
155
|
+
p["created_at"],
|
|
156
|
+
p.get("merged_at"),
|
|
157
|
+
p.get("closed_at"),
|
|
158
|
+
p.get("additions", 0),
|
|
159
|
+
p.get("deletions", 0),
|
|
160
|
+
p.get("changed_files", 0),
|
|
161
|
+
p.get("review_comments", 0),
|
|
162
|
+
p.get("url", ""),
|
|
163
|
+
),
|
|
164
|
+
)
|
|
165
|
+
if cur.rowcount > 0:
|
|
166
|
+
count += 1
|
|
167
|
+
conn.commit()
|
|
168
|
+
return count
|
|
169
|
+
finally:
|
|
170
|
+
conn.close()
|
|
171
|
+
|
|
172
|
+
def get_pull_requests(
|
|
173
|
+
self,
|
|
174
|
+
repo: Optional[str] = None,
|
|
175
|
+
author: Optional[str] = None,
|
|
176
|
+
state: Optional[str] = None,
|
|
177
|
+
since: Optional[str] = None,
|
|
178
|
+
limit: int = 200,
|
|
179
|
+
) -> list[dict[str, Any]]:
|
|
180
|
+
clauses: list[str] = []
|
|
181
|
+
params: list[Any] = []
|
|
182
|
+
if repo:
|
|
183
|
+
clauses.append("repo = ?")
|
|
184
|
+
params.append(repo)
|
|
185
|
+
if author:
|
|
186
|
+
clauses.append("author = ?")
|
|
187
|
+
params.append(author)
|
|
188
|
+
if state:
|
|
189
|
+
clauses.append("state = ?")
|
|
190
|
+
params.append(state)
|
|
191
|
+
if since:
|
|
192
|
+
clauses.append("created_at >= ?")
|
|
193
|
+
params.append(since)
|
|
194
|
+
where = f"WHERE {' AND '.join(clauses)}" if clauses else ""
|
|
195
|
+
params.append(limit)
|
|
196
|
+
conn = self._connect()
|
|
197
|
+
try:
|
|
198
|
+
rows = conn.execute(
|
|
199
|
+
f"SELECT * FROM pull_requests {where} ORDER BY created_at DESC LIMIT ?",
|
|
200
|
+
params,
|
|
201
|
+
).fetchall()
|
|
202
|
+
return [dict(r) for r in rows]
|
|
203
|
+
finally:
|
|
204
|
+
conn.close()
|
|
205
|
+
|
|
206
|
+
# ── Issues ───────────────────────────────────────────────────────
|
|
207
|
+
|
|
208
|
+
def upsert_issues(self, issues: list[dict[str, Any]]) -> int:
|
|
209
|
+
conn = self._connect()
|
|
210
|
+
try:
|
|
211
|
+
count = 0
|
|
212
|
+
for i in issues:
|
|
213
|
+
cur = conn.execute(
|
|
214
|
+
"""INSERT INTO issues
|
|
215
|
+
(number, repo, title, author, state, labels, created_at, closed_at, url)
|
|
216
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
217
|
+
ON CONFLICT(repo, number) DO UPDATE SET
|
|
218
|
+
state=excluded.state,
|
|
219
|
+
closed_at=excluded.closed_at,
|
|
220
|
+
labels=excluded.labels""",
|
|
221
|
+
(
|
|
222
|
+
i["number"],
|
|
223
|
+
i["repo"],
|
|
224
|
+
i["title"],
|
|
225
|
+
i["author"],
|
|
226
|
+
i["state"],
|
|
227
|
+
json.dumps(i.get("labels", [])),
|
|
228
|
+
i["created_at"],
|
|
229
|
+
i.get("closed_at"),
|
|
230
|
+
i.get("url", ""),
|
|
231
|
+
),
|
|
232
|
+
)
|
|
233
|
+
if cur.rowcount > 0:
|
|
234
|
+
count += 1
|
|
235
|
+
conn.commit()
|
|
236
|
+
return count
|
|
237
|
+
finally:
|
|
238
|
+
conn.close()
|
|
239
|
+
|
|
240
|
+
def get_issues(
|
|
241
|
+
self,
|
|
242
|
+
repo: Optional[str] = None,
|
|
243
|
+
state: Optional[str] = None,
|
|
244
|
+
since: Optional[str] = None,
|
|
245
|
+
limit: int = 200,
|
|
246
|
+
) -> list[dict[str, Any]]:
|
|
247
|
+
clauses: list[str] = []
|
|
248
|
+
params: list[Any] = []
|
|
249
|
+
if repo:
|
|
250
|
+
clauses.append("repo = ?")
|
|
251
|
+
params.append(repo)
|
|
252
|
+
if state:
|
|
253
|
+
clauses.append("state = ?")
|
|
254
|
+
params.append(state)
|
|
255
|
+
if since:
|
|
256
|
+
clauses.append("created_at >= ?")
|
|
257
|
+
params.append(since)
|
|
258
|
+
where = f"WHERE {' AND '.join(clauses)}" if clauses else ""
|
|
259
|
+
params.append(limit)
|
|
260
|
+
conn = self._connect()
|
|
261
|
+
try:
|
|
262
|
+
rows = conn.execute(
|
|
263
|
+
f"SELECT * FROM issues {where} ORDER BY created_at DESC LIMIT ?",
|
|
264
|
+
params,
|
|
265
|
+
).fetchall()
|
|
266
|
+
return [dict(r) for r in rows]
|
|
267
|
+
finally:
|
|
268
|
+
conn.close()
|
|
269
|
+
|
|
270
|
+
# ── Reviews ──────────────────────────────────────────────────────
|
|
271
|
+
|
|
272
|
+
def upsert_reviews(self, reviews: list[dict[str, Any]]) -> int:
|
|
273
|
+
conn = self._connect()
|
|
274
|
+
try:
|
|
275
|
+
count = 0
|
|
276
|
+
for r in reviews:
|
|
277
|
+
cur = conn.execute(
|
|
278
|
+
"""INSERT INTO reviews
|
|
279
|
+
(id, repo, pr_number, author, state, submitted_at, body)
|
|
280
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
281
|
+
ON CONFLICT(id) DO UPDATE SET
|
|
282
|
+
state=excluded.state""",
|
|
283
|
+
(
|
|
284
|
+
r["id"],
|
|
285
|
+
r["repo"],
|
|
286
|
+
r["pr_number"],
|
|
287
|
+
r["author"],
|
|
288
|
+
r["state"],
|
|
289
|
+
r["submitted_at"],
|
|
290
|
+
r.get("body", ""),
|
|
291
|
+
),
|
|
292
|
+
)
|
|
293
|
+
if cur.rowcount > 0:
|
|
294
|
+
count += 1
|
|
295
|
+
conn.commit()
|
|
296
|
+
return count
|
|
297
|
+
finally:
|
|
298
|
+
conn.close()
|
|
299
|
+
|
|
300
|
+
def get_reviews(
|
|
301
|
+
self,
|
|
302
|
+
repo: Optional[str] = None,
|
|
303
|
+
author: Optional[str] = None,
|
|
304
|
+
since: Optional[str] = None,
|
|
305
|
+
limit: int = 200,
|
|
306
|
+
) -> list[dict[str, Any]]:
|
|
307
|
+
clauses: list[str] = []
|
|
308
|
+
params: list[Any] = []
|
|
309
|
+
if repo:
|
|
310
|
+
clauses.append("repo = ?")
|
|
311
|
+
params.append(repo)
|
|
312
|
+
if author:
|
|
313
|
+
clauses.append("author = ?")
|
|
314
|
+
params.append(author)
|
|
315
|
+
if since:
|
|
316
|
+
clauses.append("submitted_at >= ?")
|
|
317
|
+
params.append(since)
|
|
318
|
+
where = f"WHERE {' AND '.join(clauses)}" if clauses else ""
|
|
319
|
+
params.append(limit)
|
|
320
|
+
conn = self._connect()
|
|
321
|
+
try:
|
|
322
|
+
rows = conn.execute(
|
|
323
|
+
f"SELECT * FROM reviews {where} ORDER BY submitted_at DESC LIMIT ?",
|
|
324
|
+
params,
|
|
325
|
+
).fetchall()
|
|
326
|
+
return [dict(r) for r in rows]
|
|
327
|
+
finally:
|
|
328
|
+
conn.close()
|
|
329
|
+
|
|
330
|
+
# ── Goals ────────────────────────────────────────────────────────
|
|
331
|
+
|
|
332
|
+
def upsert_goal(self, goal: dict[str, Any]) -> int:
|
|
333
|
+
conn = self._connect()
|
|
334
|
+
try:
|
|
335
|
+
if goal.get("id"):
|
|
336
|
+
conn.execute(
|
|
337
|
+
"""UPDATE goals SET title=?, description=?, target_value=?, current_value=?,
|
|
338
|
+
metric=?, deadline=?, status=? WHERE id=?""",
|
|
339
|
+
(
|
|
340
|
+
goal["title"],
|
|
341
|
+
goal.get("description", ""),
|
|
342
|
+
goal["target_value"],
|
|
343
|
+
goal.get("current_value", 0),
|
|
344
|
+
goal["metric"],
|
|
345
|
+
goal.get("deadline"),
|
|
346
|
+
goal.get("status", "active"),
|
|
347
|
+
goal["id"],
|
|
348
|
+
),
|
|
349
|
+
)
|
|
350
|
+
gid = goal["id"]
|
|
351
|
+
else:
|
|
352
|
+
cur = conn.execute(
|
|
353
|
+
"""INSERT INTO goals (title, description, target_value, current_value, metric, deadline, status)
|
|
354
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)""",
|
|
355
|
+
(
|
|
356
|
+
goal["title"],
|
|
357
|
+
goal.get("description", ""),
|
|
358
|
+
goal["target_value"],
|
|
359
|
+
goal.get("current_value", 0),
|
|
360
|
+
goal["metric"],
|
|
361
|
+
goal.get("deadline"),
|
|
362
|
+
goal.get("status", "active"),
|
|
363
|
+
),
|
|
364
|
+
)
|
|
365
|
+
gid = cur.lastrowid
|
|
366
|
+
conn.commit()
|
|
367
|
+
return gid # type: ignore[return-value]
|
|
368
|
+
finally:
|
|
369
|
+
conn.close()
|
|
370
|
+
|
|
371
|
+
def get_goals(self, status: Optional[str] = None) -> list[dict[str, Any]]:
|
|
372
|
+
conn = self._connect()
|
|
373
|
+
try:
|
|
374
|
+
if status:
|
|
375
|
+
rows = conn.execute(
|
|
376
|
+
"SELECT * FROM goals WHERE status = ? ORDER BY deadline", (status,)
|
|
377
|
+
).fetchall()
|
|
378
|
+
else:
|
|
379
|
+
rows = conn.execute("SELECT * FROM goals ORDER BY deadline").fetchall()
|
|
380
|
+
return [dict(r) for r in rows]
|
|
381
|
+
finally:
|
|
382
|
+
conn.close()
|
|
383
|
+
|
|
384
|
+
def delete_goal(self, goal_id: int) -> bool:
|
|
385
|
+
conn = self._connect()
|
|
386
|
+
try:
|
|
387
|
+
cur = conn.execute("DELETE FROM goals WHERE id = ?", (goal_id,))
|
|
388
|
+
conn.commit()
|
|
389
|
+
return cur.rowcount > 0
|
|
390
|
+
finally:
|
|
391
|
+
conn.close()
|
|
392
|
+
|
|
393
|
+
# ── Reports cache ────────────────────────────────────────────────
|
|
394
|
+
|
|
395
|
+
def save_report(self, report_type: str, period: str, content: str) -> None:
|
|
396
|
+
conn = self._connect()
|
|
397
|
+
try:
|
|
398
|
+
conn.execute(
|
|
399
|
+
"""INSERT INTO reports (report_type, period, content, generated_at)
|
|
400
|
+
VALUES (?, ?, ?, ?)
|
|
401
|
+
ON CONFLICT(report_type, period) DO UPDATE SET
|
|
402
|
+
content=excluded.content,
|
|
403
|
+
generated_at=excluded.generated_at""",
|
|
404
|
+
(report_type, period, content, datetime.utcnow().isoformat()),
|
|
405
|
+
)
|
|
406
|
+
conn.commit()
|
|
407
|
+
finally:
|
|
408
|
+
conn.close()
|
|
409
|
+
|
|
410
|
+
def get_report(self, report_type: str, period: str) -> Optional[str]:
|
|
411
|
+
conn = self._connect()
|
|
412
|
+
try:
|
|
413
|
+
row = conn.execute(
|
|
414
|
+
"SELECT content FROM reports WHERE report_type = ? AND period = ?",
|
|
415
|
+
(report_type, period),
|
|
416
|
+
).fetchone()
|
|
417
|
+
return dict(row)["content"] if row else None
|
|
418
|
+
finally:
|
|
419
|
+
conn.close()
|
|
420
|
+
|
|
421
|
+
# ── Sprint snapshots ─────────────────────────────────────────────
|
|
422
|
+
|
|
423
|
+
def save_sprint_snapshot(self, snapshot: dict[str, Any]) -> None:
|
|
424
|
+
conn = self._connect()
|
|
425
|
+
try:
|
|
426
|
+
conn.execute(
|
|
427
|
+
"""INSERT INTO sprint_snapshots (sprint_name, snapshot_date, total_points,
|
|
428
|
+
completed_points, remaining_points, added_points)
|
|
429
|
+
VALUES (?, ?, ?, ?, ?, ?)""",
|
|
430
|
+
(
|
|
431
|
+
snapshot["sprint_name"],
|
|
432
|
+
snapshot.get("snapshot_date", date.today().isoformat()),
|
|
433
|
+
snapshot["total_points"],
|
|
434
|
+
snapshot["completed_points"],
|
|
435
|
+
snapshot["remaining_points"],
|
|
436
|
+
snapshot.get("added_points", 0),
|
|
437
|
+
),
|
|
438
|
+
)
|
|
439
|
+
conn.commit()
|
|
440
|
+
finally:
|
|
441
|
+
conn.close()
|
|
442
|
+
|
|
443
|
+
def get_sprint_snapshots(self, sprint_name: str) -> list[dict[str, Any]]:
|
|
444
|
+
conn = self._connect()
|
|
445
|
+
try:
|
|
446
|
+
rows = conn.execute(
|
|
447
|
+
"SELECT * FROM sprint_snapshots WHERE sprint_name = ? ORDER BY snapshot_date",
|
|
448
|
+
(sprint_name,),
|
|
449
|
+
).fetchall()
|
|
450
|
+
return [dict(r) for r in rows]
|
|
451
|
+
finally:
|
|
452
|
+
conn.close()
|
|
453
|
+
|
|
454
|
+
# ── Code quality snapshots ───────────────────────────────────────
|
|
455
|
+
|
|
456
|
+
def save_quality_snapshot(self, snapshot: dict[str, Any]) -> None:
|
|
457
|
+
conn = self._connect()
|
|
458
|
+
try:
|
|
459
|
+
conn.execute(
|
|
460
|
+
"""INSERT INTO code_quality (repo, snapshot_date, test_coverage, open_bugs,
|
|
461
|
+
tech_debt_score, lines_added, lines_removed, files_changed)
|
|
462
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)""",
|
|
463
|
+
(
|
|
464
|
+
snapshot["repo"],
|
|
465
|
+
snapshot.get("snapshot_date", date.today().isoformat()),
|
|
466
|
+
snapshot.get("test_coverage", 0.0),
|
|
467
|
+
snapshot.get("open_bugs", 0),
|
|
468
|
+
snapshot.get("tech_debt_score", 0.0),
|
|
469
|
+
snapshot.get("lines_added", 0),
|
|
470
|
+
snapshot.get("lines_removed", 0),
|
|
471
|
+
snapshot.get("files_changed", 0),
|
|
472
|
+
),
|
|
473
|
+
)
|
|
474
|
+
conn.commit()
|
|
475
|
+
finally:
|
|
476
|
+
conn.close()
|
|
477
|
+
|
|
478
|
+
def get_quality_snapshots(
|
|
479
|
+
self, repo: Optional[str] = None, days: int = 90
|
|
480
|
+
) -> list[dict[str, Any]]:
|
|
481
|
+
conn = self._connect()
|
|
482
|
+
try:
|
|
483
|
+
if repo:
|
|
484
|
+
rows = conn.execute(
|
|
485
|
+
"""SELECT * FROM code_quality
|
|
486
|
+
WHERE repo = ? AND snapshot_date >= date('now', ?)
|
|
487
|
+
ORDER BY snapshot_date""",
|
|
488
|
+
(repo, f"-{days} days"),
|
|
489
|
+
).fetchall()
|
|
490
|
+
else:
|
|
491
|
+
rows = conn.execute(
|
|
492
|
+
"""SELECT * FROM code_quality
|
|
493
|
+
WHERE snapshot_date >= date('now', ?)
|
|
494
|
+
ORDER BY snapshot_date""",
|
|
495
|
+
(f"-{days} days",),
|
|
496
|
+
).fetchall()
|
|
497
|
+
return [dict(r) for r in rows]
|
|
498
|
+
finally:
|
|
499
|
+
conn.close()
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
SCHEMA = """\
|
|
503
|
+
CREATE TABLE IF NOT EXISTS commits (
|
|
504
|
+
sha TEXT PRIMARY KEY,
|
|
505
|
+
repo TEXT NOT NULL,
|
|
506
|
+
author TEXT NOT NULL,
|
|
507
|
+
author_date TEXT NOT NULL,
|
|
508
|
+
message TEXT,
|
|
509
|
+
additions INTEGER DEFAULT 0,
|
|
510
|
+
deletions INTEGER DEFAULT 0,
|
|
511
|
+
url TEXT DEFAULT '',
|
|
512
|
+
fetched_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
513
|
+
);
|
|
514
|
+
|
|
515
|
+
CREATE TABLE IF NOT EXISTS pull_requests (
|
|
516
|
+
repo TEXT NOT NULL,
|
|
517
|
+
number INTEGER NOT NULL,
|
|
518
|
+
title TEXT,
|
|
519
|
+
author TEXT,
|
|
520
|
+
state TEXT,
|
|
521
|
+
created_at TEXT,
|
|
522
|
+
merged_at TEXT,
|
|
523
|
+
closed_at TEXT,
|
|
524
|
+
additions INTEGER DEFAULT 0,
|
|
525
|
+
deletions INTEGER DEFAULT 0,
|
|
526
|
+
changed_files INTEGER DEFAULT 0,
|
|
527
|
+
review_comments INTEGER DEFAULT 0,
|
|
528
|
+
url TEXT DEFAULT '',
|
|
529
|
+
fetched_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
530
|
+
PRIMARY KEY (repo, number)
|
|
531
|
+
);
|
|
532
|
+
|
|
533
|
+
CREATE TABLE IF NOT EXISTS issues (
|
|
534
|
+
repo TEXT NOT NULL,
|
|
535
|
+
number INTEGER NOT NULL,
|
|
536
|
+
title TEXT,
|
|
537
|
+
author TEXT,
|
|
538
|
+
state TEXT,
|
|
539
|
+
labels TEXT DEFAULT '[]',
|
|
540
|
+
created_at TEXT,
|
|
541
|
+
closed_at TEXT,
|
|
542
|
+
url TEXT DEFAULT '',
|
|
543
|
+
fetched_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
544
|
+
PRIMARY KEY (repo, number)
|
|
545
|
+
);
|
|
546
|
+
|
|
547
|
+
CREATE TABLE IF NOT EXISTS reviews (
|
|
548
|
+
id INTEGER PRIMARY KEY,
|
|
549
|
+
repo TEXT NOT NULL,
|
|
550
|
+
pr_number INTEGER NOT NULL,
|
|
551
|
+
author TEXT,
|
|
552
|
+
state TEXT,
|
|
553
|
+
submitted_at TEXT,
|
|
554
|
+
body TEXT DEFAULT ''
|
|
555
|
+
);
|
|
556
|
+
|
|
557
|
+
CREATE TABLE IF NOT EXISTS goals (
|
|
558
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
559
|
+
title TEXT NOT NULL,
|
|
560
|
+
description TEXT DEFAULT '',
|
|
561
|
+
target_value REAL NOT NULL,
|
|
562
|
+
current_value REAL DEFAULT 0,
|
|
563
|
+
metric TEXT NOT NULL,
|
|
564
|
+
deadline TEXT,
|
|
565
|
+
status TEXT DEFAULT 'active',
|
|
566
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
567
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
568
|
+
);
|
|
569
|
+
|
|
570
|
+
CREATE TABLE IF NOT EXISTS reports (
|
|
571
|
+
report_type TEXT NOT NULL,
|
|
572
|
+
period TEXT NOT NULL,
|
|
573
|
+
content TEXT NOT NULL,
|
|
574
|
+
generated_at TEXT,
|
|
575
|
+
PRIMARY KEY (report_type, period)
|
|
576
|
+
);
|
|
577
|
+
|
|
578
|
+
CREATE TABLE IF NOT EXISTS sprint_snapshots (
|
|
579
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
580
|
+
sprint_name TEXT NOT NULL,
|
|
581
|
+
snapshot_date TEXT NOT NULL,
|
|
582
|
+
total_points REAL DEFAULT 0,
|
|
583
|
+
completed_points REAL DEFAULT 0,
|
|
584
|
+
remaining_points REAL DEFAULT 0,
|
|
585
|
+
added_points REAL DEFAULT 0
|
|
586
|
+
);
|
|
587
|
+
|
|
588
|
+
CREATE TABLE IF NOT EXISTS code_quality (
|
|
589
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
590
|
+
repo TEXT NOT NULL,
|
|
591
|
+
snapshot_date TEXT NOT NULL,
|
|
592
|
+
test_coverage REAL DEFAULT 0.0,
|
|
593
|
+
open_bugs INTEGER DEFAULT 0,
|
|
594
|
+
tech_debt_score REAL DEFAULT 0.0,
|
|
595
|
+
lines_added INTEGER DEFAULT 0,
|
|
596
|
+
lines_removed INTEGER DEFAULT 0,
|
|
597
|
+
files_changed INTEGER DEFAULT 0
|
|
598
|
+
);
|
|
599
|
+
|
|
600
|
+
CREATE INDEX IF NOT EXISTS idx_commits_repo ON commits(repo);
|
|
601
|
+
CREATE INDEX IF NOT EXISTS idx_commits_author ON commits(author);
|
|
602
|
+
CREATE INDEX IF NOT EXISTS idx_commits_date ON commits(author_date);
|
|
603
|
+
CREATE INDEX IF NOT EXISTS idx_prs_repo ON pull_requests(repo);
|
|
604
|
+
CREATE INDEX IF NOT EXISTS idx_prs_author ON pull_requests(author);
|
|
605
|
+
CREATE INDEX IF NOT EXISTS idx_issues_repo ON issues(repo);
|
|
606
|
+
CREATE INDEX IF NOT EXISTS idx_issues_state ON issues(state);
|
|
607
|
+
CREATE INDEX IF NOT EXISTS idx_reviews_repo ON reviews(repo);
|
|
608
|
+
CREATE INDEX IF NOT EXISTS idx_reviews_author ON reviews(author);
|
|
609
|
+
CREATE INDEX IF NOT EXISTS idx_goals_status ON goals(status);
|
|
610
|
+
CREATE INDEX IF NOT EXISTS idx_sprint_name ON sprint_snapshots(sprint_name);
|
|
611
|
+
CREATE INDEX IF NOT EXISTS idx_quality_repo ON code_quality(repo);
|
|
612
|
+
"""
|