@theihtisham/dev-pulse 1.0.0 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/.editorconfig +12 -0
  2. package/.github/ISSUE_TEMPLATE/bug_report.yml +43 -0
  3. package/.github/ISSUE_TEMPLATE/feature_request.yml +33 -0
  4. package/.github/PULL_REQUEST_TEMPLATE.md +18 -0
  5. package/.github/dependabot.yml +16 -0
  6. package/.github/workflows/ci.yml +33 -0
  7. package/CODE_OF_CONDUCT.md +27 -0
  8. package/Dockerfile +8 -0
  9. package/LICENSE +21 -21
  10. package/README.md +135 -39
  11. package/SECURITY.md +22 -0
  12. package/devpulse/__init__.py +4 -4
  13. package/devpulse/api/__init__.py +1 -1
  14. package/devpulse/api/app.py +371 -371
  15. package/devpulse/cli/__init__.py +1 -1
  16. package/devpulse/cli/dashboard.py +131 -131
  17. package/devpulse/cli/main.py +678 -678
  18. package/devpulse/cli/render.py +175 -175
  19. package/devpulse/core/__init__.py +34 -34
  20. package/devpulse/core/analytics.py +487 -487
  21. package/devpulse/core/config.py +77 -77
  22. package/devpulse/core/database.py +612 -612
  23. package/devpulse/core/github_client.py +281 -281
  24. package/devpulse/core/models.py +142 -142
  25. package/devpulse/core/report_generator.py +454 -454
  26. package/devpulse/static/.gitkeep +1 -1
  27. package/devpulse/templates/report.html +64 -64
  28. package/package.json +35 -35
  29. package/pyproject.toml +80 -80
  30. package/requirements.txt +14 -14
  31. package/tests/__init__.py +1 -1
  32. package/tests/conftest.py +208 -208
  33. package/tests/test_analytics.py +284 -284
  34. package/tests/test_api.py +313 -313
  35. package/tests/test_cli.py +204 -204
  36. package/tests/test_config.py +47 -47
  37. package/tests/test_database.py +255 -255
  38. package/tests/test_models.py +107 -107
  39. package/tests/test_report_generator.py +173 -173
  40. package/jest.config.js +0 -7
@@ -1,281 +1,281 @@
1
- """GitHub API client with rate limiting and caching."""
2
-
3
- import time
4
- import logging
5
- from datetime import datetime, timedelta
6
- from typing import Any, Optional
7
-
8
- import requests
9
-
10
- from devpulse.core.config import get_settings
11
-
12
- logger = logging.getLogger("devpulse.github")
13
-
14
- # Suppress requests logging of URLs (may contain tokens)
15
- logging.getLogger("urllib3").setLevel(logging.WARNING)
16
-
17
-
18
- class GitHubClient:
19
- """Authenticated GitHub API client with rate-limit awareness."""
20
-
21
- BASE = "https://api.github.com"
22
-
23
- def __init__(
24
- self,
25
- token: Optional[str] = None,
26
- username: Optional[str] = None,
27
- org: Optional[str] = None,
28
- ) -> None:
29
- settings = get_settings()
30
- self.token = token or settings.github_token
31
- self.username = username or settings.github_username
32
- self.org = org or settings.github_org
33
- self.base_url = settings.github_api_url
34
- self.cache_ttl = settings.github_cache_ttl_seconds
35
- self.rate_limit_rpm = settings.github_rate_limit_rpm
36
-
37
- self._session = requests.Session()
38
- if self.token:
39
- self._session.headers.update(
40
- {
41
- "Authorization": f"token {self.token}",
42
- "Accept": "application/vnd.github+json",
43
- }
44
- )
45
- self._session.headers.update({"User-Agent": "DevPulse/1.0"})
46
-
47
- self._last_request_time: float = 0.0
48
- self._min_interval = 60.0 / max(self.rate_limit_rpm, 1)
49
- self._cache: dict[str, tuple[float, Any]] = {}
50
-
51
- # ── Rate limiting ────────────────────────────────────────────────
52
-
53
- def _throttle(self) -> None:
54
- elapsed = time.time() - self._last_request_time
55
- if elapsed < self._min_interval:
56
- time.sleep(self._min_interval - elapsed)
57
- self._last_request_time = time.time()
58
-
59
- def _get(self, url: str, params: Optional[dict[str, Any]] = None) -> Any:
60
- """GET with rate limiting and caching."""
61
- cache_key = f"{url}:{sorted((params or {}).items())}"
62
- now = time.time()
63
- if cache_key in self._cache:
64
- ts, data = self._cache[cache_key]
65
- if now - ts < self.cache_ttl:
66
- return data
67
-
68
- self._throttle()
69
- resp = self._session.get(url, params=params, timeout=30)
70
- resp.raise_for_status()
71
-
72
- data = resp.json()
73
- self._cache[cache_key] = (now, data)
74
-
75
- # Check remaining rate limit
76
- remaining = resp.headers.get("X-RateLimit-Remaining")
77
- if remaining and int(remaining) < 5:
78
- reset_time = int(resp.headers.get("X-RateLimit-Reset", 0))
79
- wait = max(reset_time - int(time.time()), 1)
80
- logger.warning("Rate limit low (%s remaining), waiting %ds", remaining, wait)
81
- time.sleep(wait)
82
-
83
- return data
84
-
85
- def _paginated(self, url: str, params: Optional[dict[str, Any]] = None, max_pages: int = 10) -> list[dict[str, Any]]:
86
- """Fetch all pages of a paginated endpoint."""
87
- results: list[dict[str, Any]] = []
88
- params = dict(params or {})
89
- params.setdefault("per_page", 100)
90
- params["page"] = 1
91
-
92
- for _ in range(max_pages):
93
- data = self._get(url, params)
94
- if not isinstance(data, list) or not data:
95
- break
96
- results.extend(data)
97
- if len(data) < params["per_page"]:
98
- break
99
- params["page"] += 1
100
-
101
- return results
102
-
103
- # ── Repository listing ───────────────────────────────────────────
104
-
105
- def get_repos(self) -> list[str]:
106
- """Get list of repository full names."""
107
- if self.org:
108
- repos = self._paginated(f"{self.base_url}/orgs/{self.org}/repos", {"type": "all"})
109
- elif self.username:
110
- repos = self._paginated(f"{self.base_url}/users/{self.username}/repos", {"type": "all"})
111
- else:
112
- return []
113
- return [r["full_name"] for r in repos if isinstance(r, dict)]
114
-
115
- # ── Commits ──────────────────────────────────────────────────────
116
-
117
- def get_commits(
118
- self, repo: str, since: Optional[str] = None, until: Optional[str] = None, author: Optional[str] = None
119
- ) -> list[dict[str, Any]]:
120
- """Fetch commits for a repository."""
121
- params: dict[str, Any] = {"per_page": 100}
122
- if since:
123
- params["since"] = since
124
- if until:
125
- params["until"] = until
126
- if author:
127
- params["author"] = author
128
-
129
- raw = self._paginated(f"{self.base_url}/repos/{repo}/commits", params, max_pages=5)
130
- commits: list[dict[str, Any]] = []
131
- for c in raw:
132
- commit_data = c.get("commit", c)
133
- author_info = commit_data.get("author", {})
134
- commits.append(
135
- {
136
- "sha": c.get("sha", ""),
137
- "repo": repo,
138
- "author": author_info.get("name", ""),
139
- "author_date": author_info.get("date", ""),
140
- "message": commit_data.get("message", "").split("\n")[0][:200],
141
- "additions": 0,
142
- "deletions": 0,
143
- "url": c.get("html_url", ""),
144
- }
145
- )
146
- return commits
147
-
148
- def get_commit_detail(self, repo: str, sha: str) -> dict[str, Any]:
149
- """Get detailed stats for a single commit."""
150
- data = self._get(f"{self.base_url}/repos/{repo}/commits/{sha}")
151
- stats = data.get("stats", {})
152
- return {"additions": stats.get("additions", 0), "deletions": stats.get("deletions", 0)}
153
-
154
- # ── Pull Requests ────────────────────────────────────────────────
155
-
156
- def get_pull_requests(
157
- self, repo: str, state: str = "all", since: Optional[str] = None
158
- ) -> list[dict[str, Any]]:
159
- """Fetch pull requests for a repository."""
160
- params: dict[str, Any] = {"state": state, "per_page": 100}
161
- raw = self._paginated(f"{self.base_url}/repos/{repo}/pulls", params, max_pages=5)
162
-
163
- prs: list[dict[str, Any]] = []
164
- for p in raw:
165
- if since and p.get("created_at", "") < since:
166
- continue
167
- prs.append(
168
- {
169
- "number": p["number"],
170
- "repo": repo,
171
- "title": p.get("title", ""),
172
- "author": p.get("user", {}).get("login", ""),
173
- "state": "merged" if p.get("merged_at") else p.get("state", "open"),
174
- "created_at": p.get("created_at", ""),
175
- "merged_at": p.get("merged_at"),
176
- "closed_at": p.get("closed_at"),
177
- "additions": p.get("additions", 0),
178
- "deletions": p.get("deletions", 0),
179
- "changed_files": p.get("changed_files", 0),
180
- "review_comments": p.get("comments", 0) + p.get("review_comments", 0),
181
- "url": p.get("html_url", ""),
182
- }
183
- )
184
- return prs
185
-
186
- # ── Issues ───────────────────────────────────────────────────────
187
-
188
- def get_issues(
189
- self, repo: str, state: str = "all", since: Optional[str] = None
190
- ) -> list[dict[str, Any]]:
191
- """Fetch issues (excluding PRs) for a repository."""
192
- params: dict[str, Any] = {"state": state, "per_page": 100}
193
- if since:
194
- params["since"] = since
195
- raw = self._paginated(f"{self.base_url}/repos/{repo}/issues", params, max_pages=5)
196
-
197
- issues: list[dict[str, Any]] = []
198
- for i in raw:
199
- if "pull_request" in i:
200
- continue # skip PRs returned by issues endpoint
201
- issues.append(
202
- {
203
- "number": i["number"],
204
- "repo": repo,
205
- "title": i.get("title", ""),
206
- "author": i.get("user", {}).get("login", ""),
207
- "state": i.get("state", "open"),
208
- "labels": [lbl.get("name", "") for lbl in i.get("labels", [])],
209
- "created_at": i.get("created_at", ""),
210
- "closed_at": i.get("closed_at"),
211
- "url": i.get("html_url", ""),
212
- }
213
- )
214
- return issues
215
-
216
- # ── Reviews ──────────────────────────────────────────────────────
217
-
218
- def get_reviews(self, repo: str, pr_number: int) -> list[dict[str, Any]]:
219
- """Fetch reviews for a pull request."""
220
- raw = self._paginated(
221
- f"{self.base_url}/repos/{repo}/pulls/{pr_number}/reviews", {"per_page": 100}
222
- )
223
- reviews: list[dict[str, Any]] = []
224
- for r in raw:
225
- reviews.append(
226
- {
227
- "id": r["id"],
228
- "repo": repo,
229
- "pr_number": pr_number,
230
- "author": r.get("user", {}).get("login", ""),
231
- "state": r.get("state", ""),
232
- "submitted_at": r.get("submitted_at", ""),
233
- "body": r.get("body", ""),
234
- }
235
- )
236
- return reviews
237
-
238
- # ── Sync ─────────────────────────────────────────────────────────
239
-
240
- def sync_all(
241
- self,
242
- repos: Optional[list[str]] = None,
243
- since: Optional[str] = None,
244
- db: Optional[Any] = None,
245
- ) -> dict[str, int]:
246
- """Sync commits, PRs, issues, and reviews from GitHub."""
247
- if db is None:
248
- from devpulse.core.database import Database
249
- db = Database()
250
-
251
- if repos is None:
252
- repos = self.get_repos()
253
-
254
- if not since:
255
- since = (datetime.utcnow() - timedelta(days=30)).isoformat()
256
-
257
- counts: dict[str, int] = {"commits": 0, "prs": 0, "issues": 0, "reviews": 0}
258
-
259
- for repo in repos:
260
- logger.info("Syncing %s ...", repo)
261
- try:
262
- commits = self.get_commits(repo, since=since)
263
- counts["commits"] += db.upsert_commits(commits)
264
-
265
- prs = self.get_pull_requests(repo, since=since)
266
- counts["prs"] += db.upsert_pull_requests(prs)
267
-
268
- # Fetch reviews for each PR
269
- for pr in prs:
270
- try:
271
- reviews = self.get_reviews(repo, pr["number"])
272
- counts["reviews"] += db.upsert_reviews(reviews)
273
- except Exception:
274
- pass
275
-
276
- issues = self.get_issues(repo, since=since)
277
- counts["issues"] += db.upsert_issues(issues)
278
- except Exception as exc:
279
- logger.error("Error syncing %s: %s", repo, exc)
280
-
281
- return counts
1
+ """GitHub API client with rate limiting and caching."""
2
+
3
+ import time
4
+ import logging
5
+ from datetime import datetime, timedelta
6
+ from typing import Any, Optional
7
+
8
+ import requests
9
+
10
+ from devpulse.core.config import get_settings
11
+
12
+ logger = logging.getLogger("devpulse.github")
13
+
14
+ # Suppress requests logging of URLs (may contain tokens)
15
+ logging.getLogger("urllib3").setLevel(logging.WARNING)
16
+
17
+
18
+ class GitHubClient:
19
+ """Authenticated GitHub API client with rate-limit awareness."""
20
+
21
+ BASE = "https://api.github.com"
22
+
23
+ def __init__(
24
+ self,
25
+ token: Optional[str] = None,
26
+ username: Optional[str] = None,
27
+ org: Optional[str] = None,
28
+ ) -> None:
29
+ settings = get_settings()
30
+ self.token = token or settings.github_token
31
+ self.username = username or settings.github_username
32
+ self.org = org or settings.github_org
33
+ self.base_url = settings.github_api_url
34
+ self.cache_ttl = settings.github_cache_ttl_seconds
35
+ self.rate_limit_rpm = settings.github_rate_limit_rpm
36
+
37
+ self._session = requests.Session()
38
+ if self.token:
39
+ self._session.headers.update(
40
+ {
41
+ "Authorization": f"token {self.token}",
42
+ "Accept": "application/vnd.github+json",
43
+ }
44
+ )
45
+ self._session.headers.update({"User-Agent": "DevPulse/1.0"})
46
+
47
+ self._last_request_time: float = 0.0
48
+ self._min_interval = 60.0 / max(self.rate_limit_rpm, 1)
49
+ self._cache: dict[str, tuple[float, Any]] = {}
50
+
51
+ # ── Rate limiting ────────────────────────────────────────────────
52
+
53
+ def _throttle(self) -> None:
54
+ elapsed = time.time() - self._last_request_time
55
+ if elapsed < self._min_interval:
56
+ time.sleep(self._min_interval - elapsed)
57
+ self._last_request_time = time.time()
58
+
59
+ def _get(self, url: str, params: Optional[dict[str, Any]] = None) -> Any:
60
+ """GET with rate limiting and caching."""
61
+ cache_key = f"{url}:{sorted((params or {}).items())}"
62
+ now = time.time()
63
+ if cache_key in self._cache:
64
+ ts, data = self._cache[cache_key]
65
+ if now - ts < self.cache_ttl:
66
+ return data
67
+
68
+ self._throttle()
69
+ resp = self._session.get(url, params=params, timeout=30)
70
+ resp.raise_for_status()
71
+
72
+ data = resp.json()
73
+ self._cache[cache_key] = (now, data)
74
+
75
+ # Check remaining rate limit
76
+ remaining = resp.headers.get("X-RateLimit-Remaining")
77
+ if remaining and int(remaining) < 5:
78
+ reset_time = int(resp.headers.get("X-RateLimit-Reset", 0))
79
+ wait = max(reset_time - int(time.time()), 1)
80
+ logger.warning("Rate limit low (%s remaining), waiting %ds", remaining, wait)
81
+ time.sleep(wait)
82
+
83
+ return data
84
+
85
+ def _paginated(self, url: str, params: Optional[dict[str, Any]] = None, max_pages: int = 10) -> list[dict[str, Any]]:
86
+ """Fetch all pages of a paginated endpoint."""
87
+ results: list[dict[str, Any]] = []
88
+ params = dict(params or {})
89
+ params.setdefault("per_page", 100)
90
+ params["page"] = 1
91
+
92
+ for _ in range(max_pages):
93
+ data = self._get(url, params)
94
+ if not isinstance(data, list) or not data:
95
+ break
96
+ results.extend(data)
97
+ if len(data) < params["per_page"]:
98
+ break
99
+ params["page"] += 1
100
+
101
+ return results
102
+
103
+ # ── Repository listing ───────────────────────────────────────────
104
+
105
+ def get_repos(self) -> list[str]:
106
+ """Get list of repository full names."""
107
+ if self.org:
108
+ repos = self._paginated(f"{self.base_url}/orgs/{self.org}/repos", {"type": "all"})
109
+ elif self.username:
110
+ repos = self._paginated(f"{self.base_url}/users/{self.username}/repos", {"type": "all"})
111
+ else:
112
+ return []
113
+ return [r["full_name"] for r in repos if isinstance(r, dict)]
114
+
115
+ # ── Commits ──────────────────────────────────────────────────────
116
+
117
+ def get_commits(
118
+ self, repo: str, since: Optional[str] = None, until: Optional[str] = None, author: Optional[str] = None
119
+ ) -> list[dict[str, Any]]:
120
+ """Fetch commits for a repository."""
121
+ params: dict[str, Any] = {"per_page": 100}
122
+ if since:
123
+ params["since"] = since
124
+ if until:
125
+ params["until"] = until
126
+ if author:
127
+ params["author"] = author
128
+
129
+ raw = self._paginated(f"{self.base_url}/repos/{repo}/commits", params, max_pages=5)
130
+ commits: list[dict[str, Any]] = []
131
+ for c in raw:
132
+ commit_data = c.get("commit", c)
133
+ author_info = commit_data.get("author", {})
134
+ commits.append(
135
+ {
136
+ "sha": c.get("sha", ""),
137
+ "repo": repo,
138
+ "author": author_info.get("name", ""),
139
+ "author_date": author_info.get("date", ""),
140
+ "message": commit_data.get("message", "").split("\n")[0][:200],
141
+ "additions": 0,
142
+ "deletions": 0,
143
+ "url": c.get("html_url", ""),
144
+ }
145
+ )
146
+ return commits
147
+
148
+ def get_commit_detail(self, repo: str, sha: str) -> dict[str, Any]:
149
+ """Get detailed stats for a single commit."""
150
+ data = self._get(f"{self.base_url}/repos/{repo}/commits/{sha}")
151
+ stats = data.get("stats", {})
152
+ return {"additions": stats.get("additions", 0), "deletions": stats.get("deletions", 0)}
153
+
154
+ # ── Pull Requests ────────────────────────────────────────────────
155
+
156
+ def get_pull_requests(
157
+ self, repo: str, state: str = "all", since: Optional[str] = None
158
+ ) -> list[dict[str, Any]]:
159
+ """Fetch pull requests for a repository."""
160
+ params: dict[str, Any] = {"state": state, "per_page": 100}
161
+ raw = self._paginated(f"{self.base_url}/repos/{repo}/pulls", params, max_pages=5)
162
+
163
+ prs: list[dict[str, Any]] = []
164
+ for p in raw:
165
+ if since and p.get("created_at", "") < since:
166
+ continue
167
+ prs.append(
168
+ {
169
+ "number": p["number"],
170
+ "repo": repo,
171
+ "title": p.get("title", ""),
172
+ "author": p.get("user", {}).get("login", ""),
173
+ "state": "merged" if p.get("merged_at") else p.get("state", "open"),
174
+ "created_at": p.get("created_at", ""),
175
+ "merged_at": p.get("merged_at"),
176
+ "closed_at": p.get("closed_at"),
177
+ "additions": p.get("additions", 0),
178
+ "deletions": p.get("deletions", 0),
179
+ "changed_files": p.get("changed_files", 0),
180
+ "review_comments": p.get("comments", 0) + p.get("review_comments", 0),
181
+ "url": p.get("html_url", ""),
182
+ }
183
+ )
184
+ return prs
185
+
186
+ # ── Issues ───────────────────────────────────────────────────────
187
+
188
+ def get_issues(
189
+ self, repo: str, state: str = "all", since: Optional[str] = None
190
+ ) -> list[dict[str, Any]]:
191
+ """Fetch issues (excluding PRs) for a repository."""
192
+ params: dict[str, Any] = {"state": state, "per_page": 100}
193
+ if since:
194
+ params["since"] = since
195
+ raw = self._paginated(f"{self.base_url}/repos/{repo}/issues", params, max_pages=5)
196
+
197
+ issues: list[dict[str, Any]] = []
198
+ for i in raw:
199
+ if "pull_request" in i:
200
+ continue # skip PRs returned by issues endpoint
201
+ issues.append(
202
+ {
203
+ "number": i["number"],
204
+ "repo": repo,
205
+ "title": i.get("title", ""),
206
+ "author": i.get("user", {}).get("login", ""),
207
+ "state": i.get("state", "open"),
208
+ "labels": [lbl.get("name", "") for lbl in i.get("labels", [])],
209
+ "created_at": i.get("created_at", ""),
210
+ "closed_at": i.get("closed_at"),
211
+ "url": i.get("html_url", ""),
212
+ }
213
+ )
214
+ return issues
215
+
216
+ # ── Reviews ──────────────────────────────────────────────────────
217
+
218
+ def get_reviews(self, repo: str, pr_number: int) -> list[dict[str, Any]]:
219
+ """Fetch reviews for a pull request."""
220
+ raw = self._paginated(
221
+ f"{self.base_url}/repos/{repo}/pulls/{pr_number}/reviews", {"per_page": 100}
222
+ )
223
+ reviews: list[dict[str, Any]] = []
224
+ for r in raw:
225
+ reviews.append(
226
+ {
227
+ "id": r["id"],
228
+ "repo": repo,
229
+ "pr_number": pr_number,
230
+ "author": r.get("user", {}).get("login", ""),
231
+ "state": r.get("state", ""),
232
+ "submitted_at": r.get("submitted_at", ""),
233
+ "body": r.get("body", ""),
234
+ }
235
+ )
236
+ return reviews
237
+
238
+ # ── Sync ─────────────────────────────────────────────────────────
239
+
240
+ def sync_all(
241
+ self,
242
+ repos: Optional[list[str]] = None,
243
+ since: Optional[str] = None,
244
+ db: Optional[Any] = None,
245
+ ) -> dict[str, int]:
246
+ """Sync commits, PRs, issues, and reviews from GitHub."""
247
+ if db is None:
248
+ from devpulse.core.database import Database
249
+ db = Database()
250
+
251
+ if repos is None:
252
+ repos = self.get_repos()
253
+
254
+ if not since:
255
+ since = (datetime.utcnow() - timedelta(days=30)).isoformat()
256
+
257
+ counts: dict[str, int] = {"commits": 0, "prs": 0, "issues": 0, "reviews": 0}
258
+
259
+ for repo in repos:
260
+ logger.info("Syncing %s ...", repo)
261
+ try:
262
+ commits = self.get_commits(repo, since=since)
263
+ counts["commits"] += db.upsert_commits(commits)
264
+
265
+ prs = self.get_pull_requests(repo, since=since)
266
+ counts["prs"] += db.upsert_pull_requests(prs)
267
+
268
+ # Fetch reviews for each PR
269
+ for pr in prs:
270
+ try:
271
+ reviews = self.get_reviews(repo, pr["number"])
272
+ counts["reviews"] += db.upsert_reviews(reviews)
273
+ except Exception:
274
+ pass
275
+
276
+ issues = self.get_issues(repo, since=since)
277
+ counts["issues"] += db.upsert_issues(issues)
278
+ except Exception as exc:
279
+ logger.error("Error syncing %s: %s", repo, exc)
280
+
281
+ return counts