bb-stats 0.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bb_stats-0.1.1/PKG-INFO +11 -0
- bb_stats-0.1.1/README.md +85 -0
- bb_stats-0.1.1/bb_stats.egg-info/PKG-INFO +11 -0
- bb_stats-0.1.1/bb_stats.egg-info/SOURCES.txt +9 -0
- bb_stats-0.1.1/bb_stats.egg-info/dependency_links.txt +1 -0
- bb_stats-0.1.1/bb_stats.egg-info/entry_points.txt +2 -0
- bb_stats-0.1.1/bb_stats.egg-info/requires.txt +6 -0
- bb_stats-0.1.1/bb_stats.egg-info/top_level.txt +1 -0
- bb_stats-0.1.1/bb_stats.py +829 -0
- bb_stats-0.1.1/pyproject.toml +24 -0
- bb_stats-0.1.1/setup.cfg +4 -0
bb_stats-0.1.1/PKG-INFO
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: bb-stats
|
|
3
|
+
Version: 0.1.1
|
|
4
|
+
Summary: Terminal Bitbucket pull request and commit statistics viewer
|
|
5
|
+
Requires-Python: >=3.11
|
|
6
|
+
Requires-Dist: python-dotenv>=1.0
|
|
7
|
+
Requires-Dist: requests>=2.31
|
|
8
|
+
Requires-Dist: rich>=13.0
|
|
9
|
+
Requires-Dist: textual>=0.80.0
|
|
10
|
+
Requires-Dist: ty
|
|
11
|
+
Requires-Dist: ruff
|
bb_stats-0.1.1/README.md
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
# bitbucket-stats
|
|
2
|
+
|
|
3
|
+
Terminal-based Bitbucket pull request and commit statistics viewer built with [Textual](https://textual.textualize.io/).
|
|
4
|
+
|
|
5
|
+

|
|
6
|
+
|
|
7
|
+
## Features
|
|
8
|
+
|
|
9
|
+
- Interactive workspace and repository selector (`--list`)
|
|
10
|
+
- Tabbed TUI with keyboard navigation (keys `1`–`8`)
|
|
11
|
+
- **Overview** — merged/open PR counts, age stats, top author and reviewer
|
|
12
|
+
- **Activity** — weekly and monthly merge bar charts
|
|
13
|
+
- **Open PRs** — sortable table of currently open pull requests
|
|
14
|
+
- **Authors** — per-author PR count, avg/median age, comment stats
|
|
15
|
+
- **Reviews** — reviewer leaderboard with approval rates
|
|
16
|
+
- **Branches** — target branch merge frequency and age
|
|
17
|
+
- **Slowest** — the 25 longest-lived merged PRs
|
|
18
|
+
- **Commits** — weekly/monthly/day-of-week/hour-of-day charts and author breakdown
|
|
19
|
+
- Local JSON caching per workspace/repo to avoid redundant API calls
|
|
20
|
+
|
|
21
|
+
## Setup
|
|
22
|
+
|
|
23
|
+
### Environment variables
|
|
24
|
+
|
|
25
|
+
Create a `.env` file (or export the variables) with your Bitbucket **app password** and account email:
|
|
26
|
+
|
|
27
|
+
```
|
|
28
|
+
BITBUCKET=your_app_password
|
|
29
|
+
BITBUCKET_EMAIL=your@email.com
|
|
30
|
+
```
|
|
31
|
+
|
|
32
|
+
### Install
|
|
33
|
+
|
|
34
|
+
```bash
|
|
35
|
+
uv sync
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
Or run directly as a script without installing:
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
uv run bb_stats.py
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Usage
|
|
45
|
+
|
|
46
|
+
```bash
|
|
47
|
+
# Run with default workspace/repo (configured in bb_stats.py)
|
|
48
|
+
bb-stats
|
|
49
|
+
|
|
50
|
+
# Interactive workspace → repo picker
|
|
51
|
+
bb-stats --list # or -l
|
|
52
|
+
|
|
53
|
+
# Force refresh cached data
|
|
54
|
+
bb-stats --refresh # or -r
|
|
55
|
+
|
|
56
|
+
# Combine flags
|
|
57
|
+
bb-stats -l -r
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
### Keyboard shortcuts
|
|
61
|
+
|
|
62
|
+
| Key | Tab |
|
|
63
|
+
|-----|------------|
|
|
64
|
+
| `1` | Overview |
|
|
65
|
+
| `2` | Activity |
|
|
66
|
+
| `3` | Open PRs |
|
|
67
|
+
| `4` | Authors |
|
|
68
|
+
| `5` | Reviews |
|
|
69
|
+
| `6` | Branches |
|
|
70
|
+
| `7` | Slowest |
|
|
71
|
+
| `8` | Commits |
|
|
72
|
+
| `q` | Quit |
|
|
73
|
+
|
|
74
|
+
## Development
|
|
75
|
+
|
|
76
|
+
```bash
|
|
77
|
+
# Lint
|
|
78
|
+
uvx ruff check bb_stats.py
|
|
79
|
+
|
|
80
|
+
# Format
|
|
81
|
+
uvx ruff format bb_stats.py
|
|
82
|
+
|
|
83
|
+
# Type check
|
|
84
|
+
uvx ty check bb_stats.py
|
|
85
|
+
```
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: bb-stats
|
|
3
|
+
Version: 0.1.1
|
|
4
|
+
Summary: Terminal Bitbucket pull request and commit statistics viewer
|
|
5
|
+
Requires-Python: >=3.11
|
|
6
|
+
Requires-Dist: python-dotenv>=1.0
|
|
7
|
+
Requires-Dist: requests>=2.31
|
|
8
|
+
Requires-Dist: rich>=13.0
|
|
9
|
+
Requires-Dist: textual>=0.80.0
|
|
10
|
+
Requires-Dist: ty
|
|
11
|
+
Requires-Dist: ruff
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
bb_stats
|
|
@@ -0,0 +1,829 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
# Copyright (c) 2026. All rights reserved.
|
|
3
|
+
"""Terminal Bitbucket pull request and commit statistics viewer."""
|
|
4
|
+
|
|
5
|
+
# /// script
|
|
6
|
+
# requires-python = ">=3.11"
|
|
7
|
+
# dependencies = ["requests", "rich", "python-dotenv", "textual>=0.80.0"]
|
|
8
|
+
# ///
|
|
9
|
+
# Can also be installed as a package: uvx bb-stats
|
|
10
|
+
|
|
11
|
+
import argparse
|
|
12
|
+
import json
|
|
13
|
+
import logging
|
|
14
|
+
import os
|
|
15
|
+
import sys
|
|
16
|
+
from collections import defaultdict
|
|
17
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
18
|
+
from datetime import UTC, datetime
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
from statistics import mean, median
|
|
21
|
+
from typing import Any, ClassVar
|
|
22
|
+
|
|
23
|
+
import requests
|
|
24
|
+
from dotenv import load_dotenv
|
|
25
|
+
from requests.adapters import HTTPAdapter
|
|
26
|
+
from rich.console import Console
|
|
27
|
+
from textual import work
|
|
28
|
+
from textual.app import App, ComposeResult
|
|
29
|
+
from textual.binding import Binding
|
|
30
|
+
from textual.containers import Vertical
|
|
31
|
+
from textual.screen import Screen
|
|
32
|
+
from textual.widgets import (
|
|
33
|
+
DataTable,
|
|
34
|
+
Footer,
|
|
35
|
+
Header,
|
|
36
|
+
Input,
|
|
37
|
+
Label,
|
|
38
|
+
ListItem,
|
|
39
|
+
ListView,
|
|
40
|
+
LoadingIndicator,
|
|
41
|
+
Static,
|
|
42
|
+
TabbedContent,
|
|
43
|
+
TabPane,
|
|
44
|
+
)
|
|
45
|
+
from urllib3.util.retry import Retry
|
|
46
|
+
|
|
47
|
+
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
|
48
|
+
logger = logging.getLogger(__name__)
|
|
49
|
+
|
|
50
|
+
CACHE_FILE = Path(__file__).parent / "bb_cache.json"
|
|
51
|
+
console = Console()
|
|
52
|
+
DEFAULT_TIMEOUT = 30
|
|
53
|
+
|
|
54
|
+
SESSION: requests.Session
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _build_session(auth: tuple[str, str]) -> requests.Session:
|
|
58
|
+
session = requests.Session()
|
|
59
|
+
retry = Retry(
|
|
60
|
+
total=4,
|
|
61
|
+
backoff_factor=1,
|
|
62
|
+
status_forcelist=(429, 500, 502, 503, 504),
|
|
63
|
+
allowed_methods=("GET",),
|
|
64
|
+
raise_on_status=False,
|
|
65
|
+
)
|
|
66
|
+
adapter = HTTPAdapter(max_retries=retry, pool_connections=20, pool_maxsize=20)
|
|
67
|
+
session.mount("https://", adapter)
|
|
68
|
+
session.mount("http://", adapter)
|
|
69
|
+
session.auth = auth
|
|
70
|
+
return session
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
# ---------------------------------------------------------------------------
|
|
74
|
+
# API helpers
|
|
75
|
+
# ---------------------------------------------------------------------------
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _paginate(url: str, params: dict | None = None) -> list[dict]:
|
|
79
|
+
results = []
|
|
80
|
+
p = params or {}
|
|
81
|
+
while url:
|
|
82
|
+
resp = SESSION.get(url, params=p, timeout=DEFAULT_TIMEOUT)
|
|
83
|
+
resp.raise_for_status()
|
|
84
|
+
data = resp.json()
|
|
85
|
+
results.extend(data.get("values", []))
|
|
86
|
+
url = data.get("next")
|
|
87
|
+
p = {}
|
|
88
|
+
return results
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def fetch_workspaces() -> list[dict]:
|
|
92
|
+
"""Return visible Bitbucket workspaces for the authenticated user.
|
|
93
|
+
|
|
94
|
+
Returns:
|
|
95
|
+
Workspace payloads from the Bitbucket API.
|
|
96
|
+
|
|
97
|
+
"""
|
|
98
|
+
return _paginate("https://api.bitbucket.org/2.0/workspaces", {"pagelen": 50, "sort": "-updated_on"})
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def fetch_repos(workspace: str) -> list[dict]:
|
|
102
|
+
"""Return the most recently updated repositories in a workspace.
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
Repository payloads from the Bitbucket API.
|
|
106
|
+
|
|
107
|
+
"""
|
|
108
|
+
resp = SESSION.get(
|
|
109
|
+
f"https://api.bitbucket.org/2.0/repositories/{workspace}",
|
|
110
|
+
params={"pagelen": 40, "sort": "-updated_on"},
|
|
111
|
+
timeout=DEFAULT_TIMEOUT,
|
|
112
|
+
)
|
|
113
|
+
resp.raise_for_status()
|
|
114
|
+
return resp.json().get("values", [])
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def fetch_all_prs(workspace: str, repo_slug: str, state: str) -> list[dict]:
|
|
118
|
+
"""Return pull requests for a repository filtered by state.
|
|
119
|
+
|
|
120
|
+
Returns:
|
|
121
|
+
Pull request payloads matching the requested state.
|
|
122
|
+
|
|
123
|
+
"""
|
|
124
|
+
url = f"https://api.bitbucket.org/2.0/repositories/{workspace}/{repo_slug}/pullrequests"
|
|
125
|
+
return _paginate(url, {"state": state, "pagelen": 50})
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def fetch_pr_detail(workspace: str, repo_slug: str, pr_id: int) -> dict:
|
|
129
|
+
"""Return the full pull request payload for a single PR.
|
|
130
|
+
|
|
131
|
+
Returns:
|
|
132
|
+
The expanded pull request payload from Bitbucket.
|
|
133
|
+
|
|
134
|
+
"""
|
|
135
|
+
resp = SESSION.get(
|
|
136
|
+
f"https://api.bitbucket.org/2.0/repositories/{workspace}/{repo_slug}/pullrequests/{pr_id}",
|
|
137
|
+
timeout=DEFAULT_TIMEOUT,
|
|
138
|
+
)
|
|
139
|
+
resp.raise_for_status()
|
|
140
|
+
return resp.json()
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def fetch_commits(workspace: str, repo_slug: str) -> list[dict]:
|
|
144
|
+
"""Return commit history with the fields needed for author and merge timing stats.
|
|
145
|
+
|
|
146
|
+
Returns:
|
|
147
|
+
Commit payloads containing hash, date, and author fields.
|
|
148
|
+
|
|
149
|
+
"""
|
|
150
|
+
url = f"https://api.bitbucket.org/2.0/repositories/{workspace}/{repo_slug}/commits"
|
|
151
|
+
return _paginate(
|
|
152
|
+
url,
|
|
153
|
+
{
|
|
154
|
+
"pagelen": 100,
|
|
155
|
+
"fields": "next,values.hash,values.date,values.author.raw,values.author.user.display_name",
|
|
156
|
+
},
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def commit_author(commit: dict) -> str:
|
|
161
|
+
"""Return a stable display name for a commit author.
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
The best available author display name.
|
|
165
|
+
|
|
166
|
+
"""
|
|
167
|
+
user = commit.get("author", {}).get("user")
|
|
168
|
+
if user:
|
|
169
|
+
return user.get("display_name", "Unknown")
|
|
170
|
+
raw = commit.get("author", {}).get("raw", "Unknown")
|
|
171
|
+
return raw.split("<")[0].strip() if "<" in raw else raw
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def pr_author_name(pr: dict) -> str:
|
|
175
|
+
"""Return a PR author's display name with a safe fallback.
|
|
176
|
+
|
|
177
|
+
Returns:
|
|
178
|
+
A display name or ``"Unknown"`` when it is missing.
|
|
179
|
+
|
|
180
|
+
"""
|
|
181
|
+
return pr.get("author", {}).get("display_name", "Unknown")
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def pr_branch_name(pr: dict) -> str:
|
|
185
|
+
"""Return a PR destination branch name with a safe fallback.
|
|
186
|
+
|
|
187
|
+
Returns:
|
|
188
|
+
A destination branch name or ``"Unknown"`` when it is missing.
|
|
189
|
+
|
|
190
|
+
"""
|
|
191
|
+
return pr.get("destination", {}).get("branch", {}).get("name", "Unknown")
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def pr_title(pr: dict, max_len: int = 52) -> str:
|
|
195
|
+
"""Return a truncated PR title with a safe fallback.
|
|
196
|
+
|
|
197
|
+
Returns:
|
|
198
|
+
A title suitable for table display.
|
|
199
|
+
|
|
200
|
+
"""
|
|
201
|
+
return pr.get("title", "Untitled")[:max_len]
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def reviewer_name(participant: dict) -> str | None:
|
|
205
|
+
"""Return a participant reviewer display name when present.
|
|
206
|
+
|
|
207
|
+
Returns:
|
|
208
|
+
The reviewer display name, or ``None`` if it is absent.
|
|
209
|
+
|
|
210
|
+
"""
|
|
211
|
+
user = participant.get("user") or {}
|
|
212
|
+
return user.get("display_name")
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def enrich_with_participants(workspace: str, repo_slug: str, prs: list[dict]) -> list[dict]:
|
|
216
|
+
"""Expand PR summaries with detail payloads, falling back on transient failures.
|
|
217
|
+
|
|
218
|
+
Returns:
|
|
219
|
+
A list of PR payloads aligned to the input order.
|
|
220
|
+
|
|
221
|
+
"""
|
|
222
|
+
console.print(f"[dim]Fetching full details for {len(prs)} PRs...[/dim]")
|
|
223
|
+
results: dict[int, dict] = {}
|
|
224
|
+
summaries = {pr["id"]: pr for pr in prs}
|
|
225
|
+
with ThreadPoolExecutor(max_workers=20) as pool:
|
|
226
|
+
futures = {pool.submit(fetch_pr_detail, workspace, repo_slug, pr["id"]): pr["id"] for pr in prs}
|
|
227
|
+
for i, future in enumerate(as_completed(futures), 1):
|
|
228
|
+
pr_id = futures[future]
|
|
229
|
+
try:
|
|
230
|
+
results[pr_id] = future.result()
|
|
231
|
+
except requests.RequestException as exc:
|
|
232
|
+
logger.warning("Warning: failed to fetch PR details for #%s: %s", pr_id, exc)
|
|
233
|
+
results[pr_id] = summaries[pr_id]
|
|
234
|
+
if i % 50 == 0 or i == len(prs):
|
|
235
|
+
console.print(f"[dim] {i}/{len(prs)}[/dim]")
|
|
236
|
+
return [results[pr["id"]] for pr in prs]
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
# ---------------------------------------------------------------------------
|
|
240
|
+
# Cache — per workspace/repo
|
|
241
|
+
# ---------------------------------------------------------------------------
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def cache_path(workspace: str, repo_slug: str) -> Path:
|
|
245
|
+
"""Return the cache file path for a workspace/repository pair.
|
|
246
|
+
|
|
247
|
+
Returns:
|
|
248
|
+
The JSON cache path for the selected repository.
|
|
249
|
+
|
|
250
|
+
"""
|
|
251
|
+
return CACHE_FILE.parent / f"bb_cache_{workspace}_{repo_slug}.json"
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
def load_cache(workspace: str, repo_slug: str) -> tuple[list[dict], list[dict], list[dict], str] | None:
|
|
255
|
+
"""Load cached PR and commit data for a repository if it exists.
|
|
256
|
+
|
|
257
|
+
Returns:
|
|
258
|
+
The cached merged PRs, open PRs, commits, and fetch timestamp, or ``None``.
|
|
259
|
+
|
|
260
|
+
"""
|
|
261
|
+
path = cache_path(workspace, repo_slug)
|
|
262
|
+
if not path.exists():
|
|
263
|
+
return None
|
|
264
|
+
data = json.loads(path.read_text())
|
|
265
|
+
return data["merged"], data["open"], data.get("commits", []), data.get("fetched_at", "unknown")
|
|
266
|
+
|
|
267
|
+
|
|
268
|
+
def save_cache(workspace: str, repo_slug: str, snapshot: dict[str, Any]) -> None:
|
|
269
|
+
"""Persist a repository snapshot to the local JSON cache."""
|
|
270
|
+
cache_path(workspace, repo_slug).write_text(json.dumps(snapshot))
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
# ---------------------------------------------------------------------------
|
|
274
|
+
# Stats
|
|
275
|
+
# ---------------------------------------------------------------------------
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def _matching_commit_date(commit_dates: dict[str, datetime], commit_hash: str | None) -> datetime | None:
|
|
279
|
+
if not commit_hash:
|
|
280
|
+
return None
|
|
281
|
+
if commit_hash in commit_dates:
|
|
282
|
+
return commit_dates[commit_hash]
|
|
283
|
+
for known_hash, dt in commit_dates.items():
|
|
284
|
+
if known_hash.startswith(commit_hash) or commit_hash.startswith(known_hash):
|
|
285
|
+
return dt
|
|
286
|
+
return None
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
def pr_age_days(
|
|
290
|
+
pr: dict,
|
|
291
|
+
reference_now: datetime | None = None,
|
|
292
|
+
merge_commit_dates: dict[str, datetime] | None = None,
|
|
293
|
+
) -> float:
|
|
294
|
+
"""Return PR age in days using cached snapshot time for open PRs.
|
|
295
|
+
|
|
296
|
+
Returns:
|
|
297
|
+
The PR age expressed in fractional days.
|
|
298
|
+
|
|
299
|
+
"""
|
|
300
|
+
created = datetime.fromisoformat(pr["created_on"])
|
|
301
|
+
if pr["state"] == "OPEN":
|
|
302
|
+
end = reference_now or datetime.now(UTC)
|
|
303
|
+
else:
|
|
304
|
+
merge_hash = pr.get("merge_commit", {}).get("hash")
|
|
305
|
+
# Bitbucket's PR payload here does not expose a direct merged timestamp.
|
|
306
|
+
# We infer merge time from the merge commit date when available, then fall
|
|
307
|
+
# back to updated_on for older or partial payloads.
|
|
308
|
+
end = _matching_commit_date(merge_commit_dates or {}, merge_hash) or datetime.fromisoformat(pr["updated_on"])
|
|
309
|
+
return max(0.0, (end - created).total_seconds() / 86400)
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
def _build_commit_stats(commits: list[dict] | None) -> dict[str, Any]:
|
|
313
|
+
"""Aggregate commit-centric charts and author counts.
|
|
314
|
+
|
|
315
|
+
Returns:
|
|
316
|
+
A mapping of commit chart series and aggregate counters.
|
|
317
|
+
|
|
318
|
+
"""
|
|
319
|
+
commit_authors: dict[str, int] = defaultdict(int)
|
|
320
|
+
commit_weekly: dict[str, int] = defaultdict(int)
|
|
321
|
+
commit_monthly: dict[str, int] = defaultdict(int)
|
|
322
|
+
commit_dow: dict[str, int] = defaultdict(int)
|
|
323
|
+
commit_hour: dict[int, int] = defaultdict(int)
|
|
324
|
+
days = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
|
|
325
|
+
for commit in commits or []:
|
|
326
|
+
author = commit_author(commit)
|
|
327
|
+
dt = datetime.fromisoformat(commit["date"])
|
|
328
|
+
commit_authors[author] += 1
|
|
329
|
+
commit_weekly[dt.strftime("%Y-W%W")] += 1
|
|
330
|
+
commit_monthly[dt.strftime("%Y-%m")] += 1
|
|
331
|
+
commit_dow[days[dt.weekday()]] += 1
|
|
332
|
+
commit_hour[dt.hour] += 1
|
|
333
|
+
|
|
334
|
+
return {
|
|
335
|
+
"commit_authors": dict(sorted(commit_authors.items(), key=lambda item: -item[1])),
|
|
336
|
+
"commit_weekly": dict(sorted(commit_weekly.items())),
|
|
337
|
+
"commit_monthly": dict(sorted(commit_monthly.items())),
|
|
338
|
+
"commit_dow": {day: commit_dow[day] for day in days},
|
|
339
|
+
"commit_hour": dict(sorted(commit_hour.items())),
|
|
340
|
+
"total_commits": len(commits or []),
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
def build_stats(
|
|
345
|
+
merged: list[dict],
|
|
346
|
+
open_prs: list[dict],
|
|
347
|
+
commits: list[dict] | None = None,
|
|
348
|
+
reference_now: datetime | None = None,
|
|
349
|
+
) -> dict:
|
|
350
|
+
"""Build the full dataset consumed by the Textual UI.
|
|
351
|
+
|
|
352
|
+
Returns:
|
|
353
|
+
A stats mapping ready for the Textual application.
|
|
354
|
+
|
|
355
|
+
"""
|
|
356
|
+
commit_dates = {
|
|
357
|
+
commit["hash"]: datetime.fromisoformat(commit["date"])
|
|
358
|
+
for commit in commits or []
|
|
359
|
+
if commit.get("hash") and commit.get("date")
|
|
360
|
+
}
|
|
361
|
+
weekly: dict[str, int] = defaultdict(int)
|
|
362
|
+
monthly: dict[str, int] = defaultdict(int)
|
|
363
|
+
for pr in merged:
|
|
364
|
+
merge_dt = _matching_commit_date(commit_dates, pr.get("merge_commit", {}).get("hash"))
|
|
365
|
+
dt = merge_dt or datetime.fromisoformat(pr["updated_on"])
|
|
366
|
+
weekly[dt.strftime("%Y-W%W")] += 1
|
|
367
|
+
monthly[dt.strftime("%Y-%m")] += 1
|
|
368
|
+
|
|
369
|
+
author_prs: dict[str, list[dict]] = defaultdict(list)
|
|
370
|
+
for pr in merged:
|
|
371
|
+
author_prs[pr_author_name(pr)].append(pr)
|
|
372
|
+
|
|
373
|
+
reviewer_counts: dict[str, dict] = defaultdict(lambda: {"reviewed": 0, "approved": 0})
|
|
374
|
+
for pr in merged:
|
|
375
|
+
for p in pr.get("participants", []):
|
|
376
|
+
if p.get("role") == "REVIEWER":
|
|
377
|
+
name = reviewer_name(p)
|
|
378
|
+
if not name:
|
|
379
|
+
continue
|
|
380
|
+
reviewer_counts[name]["reviewed"] += 1
|
|
381
|
+
if p.get("approved"):
|
|
382
|
+
reviewer_counts[name]["approved"] += 1
|
|
383
|
+
|
|
384
|
+
branch_counts: dict[str, list[float]] = defaultdict(list)
|
|
385
|
+
for pr in merged:
|
|
386
|
+
branch_counts[pr_branch_name(pr)].append(pr_age_days(pr, merge_commit_dates=commit_dates))
|
|
387
|
+
|
|
388
|
+
stats = {
|
|
389
|
+
"merged": merged,
|
|
390
|
+
"open": open_prs,
|
|
391
|
+
"merged_ages": [pr_age_days(pr, merge_commit_dates=commit_dates) for pr in merged],
|
|
392
|
+
"weekly": dict(sorted(weekly.items())),
|
|
393
|
+
"monthly": dict(sorted(monthly.items())),
|
|
394
|
+
"author_prs": dict(author_prs),
|
|
395
|
+
"reviewer_counts": dict(reviewer_counts),
|
|
396
|
+
"branch_counts": dict(branch_counts),
|
|
397
|
+
"merge_commit_dates": commit_dates,
|
|
398
|
+
"reference_now": reference_now or datetime.now(UTC),
|
|
399
|
+
}
|
|
400
|
+
stats.update(_build_commit_stats(commits))
|
|
401
|
+
return stats
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
def bar_chart(data: dict[str, int], width: int = 40, last_n: int = 16) -> str:
|
|
405
|
+
"""Render a fixed-width unicode bar chart from a numeric series.
|
|
406
|
+
|
|
407
|
+
Returns:
|
|
408
|
+
A multiline string chart for terminal display.
|
|
409
|
+
|
|
410
|
+
"""
|
|
411
|
+
items = list(data.items())[-last_n:]
|
|
412
|
+
if not items:
|
|
413
|
+
return "No data"
|
|
414
|
+
max_val = max(v for _, v in items)
|
|
415
|
+
lines = []
|
|
416
|
+
for key, val in items:
|
|
417
|
+
bar_len = int((val / max_val) * width) if max_val else 0
|
|
418
|
+
lines.append(f" {key[-7:]:>7} │{'█' * bar_len:<{width}} {val}")
|
|
419
|
+
return "\n".join(lines)
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
# ---------------------------------------------------------------------------
|
|
423
|
+
# Selection TUI
|
|
424
|
+
# ---------------------------------------------------------------------------
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
class ItemSelectScreen(Screen[str | None]):
|
|
428
|
+
"""Filterable list. Dismisses with selected value or None on Escape."""
|
|
429
|
+
|
|
430
|
+
BINDINGS: ClassVar[tuple[Binding, ...]] = (Binding("escape", "dismiss(None)", "Back"),)
|
|
431
|
+
|
|
432
|
+
def __init__(self, title: str, items: list[tuple[str, str]]) -> None:
|
|
433
|
+
"""Store the picker title and available items."""
|
|
434
|
+
super().__init__()
|
|
435
|
+
self._title = title
|
|
436
|
+
self._all_items = items
|
|
437
|
+
self._visible: list[tuple[str, str]] = list(items)
|
|
438
|
+
|
|
439
|
+
def compose(self) -> ComposeResult:
|
|
440
|
+
"""Compose the filter input and selectable list.
|
|
441
|
+
|
|
442
|
+
Yields:
|
|
443
|
+
The widgets used by the filterable picker screen.
|
|
444
|
+
|
|
445
|
+
"""
|
|
446
|
+
yield Header()
|
|
447
|
+
yield Label(f" {self._title}", id="sel-title")
|
|
448
|
+
yield Input(placeholder="Type to filter...", id="sel-input")
|
|
449
|
+
yield ListView(id="sel-list")
|
|
450
|
+
yield Footer()
|
|
451
|
+
|
|
452
|
+
def on_mount(self) -> None:
|
|
453
|
+
"""Render the initial item set and focus the filter input."""
|
|
454
|
+
self._render_list(self._all_items)
|
|
455
|
+
self.query_one(Input).focus()
|
|
456
|
+
|
|
457
|
+
def _render_list(self, items: list[tuple[str, str]]) -> None:
|
|
458
|
+
self._visible = list(items)
|
|
459
|
+
lv = self.query_one(ListView)
|
|
460
|
+
lv.clear()
|
|
461
|
+
for display, _ in items:
|
|
462
|
+
lv.append(ListItem(Label(display)))
|
|
463
|
+
|
|
464
|
+
def on_input_changed(self, event: Input.Changed) -> None:
|
|
465
|
+
"""Filter visible items as the query changes."""
|
|
466
|
+
q = event.value.lower()
|
|
467
|
+
self._render_list([(d, v) for d, v in self._all_items if q in d.lower()])
|
|
468
|
+
|
|
469
|
+
def on_list_view_selected(self, event: ListView.Selected) -> None:
|
|
470
|
+
"""Dismiss the screen with the selected value."""
|
|
471
|
+
idx = event.list_view.index
|
|
472
|
+
if idx is not None and 0 <= idx < len(self._visible):
|
|
473
|
+
self.dismiss(self._visible[idx][1])
|
|
474
|
+
|
|
475
|
+
|
|
476
|
+
class SelectionApp(App[tuple[str, str] | None]):
|
|
477
|
+
"""Workspace → repo picker."""
|
|
478
|
+
|
|
479
|
+
CSS = """
|
|
480
|
+
#sel-title { padding: 1 2; color: $accent; text-style: bold; }
|
|
481
|
+
#sel-input { margin: 0 1; }
|
|
482
|
+
ListView { height: 1fr; }
|
|
483
|
+
LoadingIndicator { height: 1fr; }
|
|
484
|
+
"""
|
|
485
|
+
|
|
486
|
+
def compose(self) -> ComposeResult: # noqa: PLR6301
|
|
487
|
+
"""Compose the loading shell used while fetching options.
|
|
488
|
+
|
|
489
|
+
Yields:
|
|
490
|
+
The widgets used by the selection app shell.
|
|
491
|
+
|
|
492
|
+
"""
|
|
493
|
+
yield Header()
|
|
494
|
+
yield LoadingIndicator()
|
|
495
|
+
yield Footer()
|
|
496
|
+
|
|
497
|
+
def on_mount(self) -> None:
|
|
498
|
+
"""Kick off the workspace fetch on startup."""
|
|
499
|
+
self._load_workspaces()
|
|
500
|
+
|
|
501
|
+
@work(thread=True)
|
|
502
|
+
def _load_workspaces(self) -> None:
|
|
503
|
+
workspaces = fetch_workspaces()
|
|
504
|
+
items = [(f"{w.get('name', w['slug'])} ({w['slug']})", w["slug"]) for w in workspaces]
|
|
505
|
+
self.call_from_thread(
|
|
506
|
+
self.push_screen, ItemSelectScreen("Select workspace", items), self._on_workspace_selected
|
|
507
|
+
)
|
|
508
|
+
|
|
509
|
+
def _on_workspace_selected(self, workspace: str | None) -> None:
|
|
510
|
+
if workspace is None:
|
|
511
|
+
self.exit(None)
|
|
512
|
+
return
|
|
513
|
+
self._workspace = workspace
|
|
514
|
+
self._load_repos(workspace)
|
|
515
|
+
|
|
516
|
+
@work(thread=True)
|
|
517
|
+
def _load_repos(self, workspace: str) -> None:
|
|
518
|
+
repos = fetch_repos(workspace)
|
|
519
|
+
items = [(f"{r.get('name', r['slug'])} ({r['slug']})", r["slug"]) for r in repos]
|
|
520
|
+
self.call_from_thread(self.push_screen, ItemSelectScreen("Select repository", items), self._on_repo_selected)
|
|
521
|
+
|
|
522
|
+
def _on_repo_selected(self, repo: str | None) -> None:
|
|
523
|
+
if repo is None:
|
|
524
|
+
self.exit(None)
|
|
525
|
+
return
|
|
526
|
+
self.exit((self._workspace, repo))
|
|
527
|
+
|
|
528
|
+
|
|
529
|
+
# ---------------------------------------------------------------------------
|
|
530
|
+
# Stats TUI
|
|
531
|
+
# ---------------------------------------------------------------------------
|
|
532
|
+
|
|
533
|
+
|
|
534
|
+
class BBStatsApp(App):
|
|
535
|
+
"""Display Bitbucket PR and commit statistics in a tabbed Textual UI."""
|
|
536
|
+
|
|
537
|
+
CSS = """
|
|
538
|
+
Screen { background: $surface; }
|
|
539
|
+
DataTable { height: 1fr; }
|
|
540
|
+
Static.chart { padding: 1 2; height: 1fr; overflow-y: auto; }
|
|
541
|
+
Static.overview { padding: 2 4; }
|
|
542
|
+
#commits { height: 1fr; }
|
|
543
|
+
#commits > Vertical { height: 1fr; }
|
|
544
|
+
#commits-charts { height: 1fr; overflow-y: auto; padding: 1 2; }
|
|
545
|
+
#commits-table { height: 1fr; max-height: 12; }
|
|
546
|
+
"""
|
|
547
|
+
|
|
548
|
+
BINDINGS: ClassVar[tuple[Binding, ...]] = (
|
|
549
|
+
Binding("q", "quit", "Quit"),
|
|
550
|
+
Binding("1", "switch_tab('overview')", "Overview"),
|
|
551
|
+
Binding("2", "switch_tab('activity')", "Activity"),
|
|
552
|
+
Binding("3", "switch_tab('open')", "Open PRs"),
|
|
553
|
+
Binding("4", "switch_tab('authors')", "Authors"),
|
|
554
|
+
Binding("5", "switch_tab('reviews')", "Reviews"),
|
|
555
|
+
Binding("6", "switch_tab('branches')", "Branches"),
|
|
556
|
+
Binding("7", "switch_tab('slowest')", "Slowest"),
|
|
557
|
+
Binding("8", "switch_tab('commits')", "Commits"),
|
|
558
|
+
)
|
|
559
|
+
|
|
560
|
+
def __init__(self, workspace: str, repo_slug: str, stats: dict, fetched_at: str = "") -> None:
|
|
561
|
+
"""Store repository metadata and the precomputed stats payload."""
|
|
562
|
+
super().__init__()
|
|
563
|
+
self.workspace = workspace
|
|
564
|
+
self.repo_slug = repo_slug
|
|
565
|
+
self.stats = stats
|
|
566
|
+
self.fetched_at = fetched_at
|
|
567
|
+
|
|
568
|
+
def compose(self) -> ComposeResult: # noqa: PLR6301
|
|
569
|
+
"""Compose the tabbed stats layout.
|
|
570
|
+
|
|
571
|
+
Yields:
|
|
572
|
+
The widgets used by the stats application.
|
|
573
|
+
|
|
574
|
+
"""
|
|
575
|
+
yield Header(show_clock=True)
|
|
576
|
+
with TabbedContent(initial="overview"):
|
|
577
|
+
with TabPane("Overview [1]", id="overview"):
|
|
578
|
+
yield Static(id="overview-static", classes="overview")
|
|
579
|
+
with TabPane("Activity [2]", id="activity"):
|
|
580
|
+
yield Static(id="activity-static", classes="chart")
|
|
581
|
+
with TabPane("Open PRs [3]", id="open"):
|
|
582
|
+
yield DataTable(id="open-table", zebra_stripes=True)
|
|
583
|
+
with TabPane("Authors [4]", id="authors"):
|
|
584
|
+
yield DataTable(id="authors-table", zebra_stripes=True)
|
|
585
|
+
with TabPane("Reviews [5]", id="reviews"):
|
|
586
|
+
yield DataTable(id="reviews-table", zebra_stripes=True)
|
|
587
|
+
with TabPane("Branches [6]", id="branches"):
|
|
588
|
+
yield DataTable(id="branches-table", zebra_stripes=True)
|
|
589
|
+
with TabPane("Slowest [7]", id="slowest"):
|
|
590
|
+
yield DataTable(id="slowest-table", zebra_stripes=True)
|
|
591
|
+
with TabPane("Commits [8]", id="commits"), Vertical():
|
|
592
|
+
yield Static(id="commits-charts", classes="chart")
|
|
593
|
+
yield DataTable(id="commits-table", zebra_stripes=True)
|
|
594
|
+
yield Footer()
|
|
595
|
+
|
|
596
|
+
def on_mount(self) -> None:
|
|
597
|
+
"""Populate all tables and charts once the UI mounts."""
|
|
598
|
+
self._populate_overview()
|
|
599
|
+
self._populate_activity()
|
|
600
|
+
self._populate_open_prs()
|
|
601
|
+
self._populate_authors()
|
|
602
|
+
self._populate_reviews()
|
|
603
|
+
self._populate_branches()
|
|
604
|
+
self._populate_slowest()
|
|
605
|
+
self._populate_commits()
|
|
606
|
+
|
|
607
|
+
def _populate_overview(self) -> None:
|
|
608
|
+
s = self.stats
|
|
609
|
+
ages = s["merged_ages"]
|
|
610
|
+
open_prs = s["open"]
|
|
611
|
+
open_ages = [pr_age_days(pr, reference_now=s["reference_now"]) for pr in open_prs] if open_prs else [0.0]
|
|
612
|
+
top_author = max(s["author_prs"].items(), key=lambda x: len(x[1]))[0] if s["author_prs"] else "—"
|
|
613
|
+
top_reviewer = (
|
|
614
|
+
max(s["reviewer_counts"].items(), key=lambda x: x[1]["reviewed"])[0] if s["reviewer_counts"] else "—"
|
|
615
|
+
)
|
|
616
|
+
cache_line = f" Data as of : [dim]{self.fetched_at[:19]}[/dim]\n" if self.fetched_at else ""
|
|
617
|
+
avg_age = f"{mean(ages):.1f} days" if ages else "—"
|
|
618
|
+
median_age = f"{median(ages):.1f} days" if ages else "—"
|
|
619
|
+
fastest_age = f"{min(ages):.1f} days" if ages else "—"
|
|
620
|
+
slowest_age = f"{max(ages):.1f} days" if ages else "—"
|
|
621
|
+
text = (
|
|
622
|
+
f"[bold cyan]PR Statistics — {self.workspace}/{self.repo_slug}[/bold cyan]\n\n"
|
|
623
|
+
+ cache_line
|
|
624
|
+
+ "\n[bold]Merged PRs[/bold]\n"
|
|
625
|
+
f" Total : [cyan]{len(s['merged'])}[/cyan]\n"
|
|
626
|
+
f" Avg age : [cyan]{avg_age}[/cyan]\n"
|
|
627
|
+
f" Median age : [cyan]{median_age}[/cyan]\n"
|
|
628
|
+
f" Fastest merge : [green]{fastest_age}[/green]\n"
|
|
629
|
+
f" Slowest merge : [red]{slowest_age}[/red]\n\n"
|
|
630
|
+
f"[bold]Open PRs[/bold]\n"
|
|
631
|
+
f" Currently open : [yellow]{len(open_prs)}[/yellow]\n"
|
|
632
|
+
f" Oldest open : [red]{max(open_ages):.1f} days[/red]\n\n"
|
|
633
|
+
f"[bold]Team[/bold]\n"
|
|
634
|
+
f" Most PRs merged : [cyan]{top_author}[/cyan]\n"
|
|
635
|
+
f" Most reviews : [cyan]{top_reviewer}[/cyan]\n"
|
|
636
|
+
)
|
|
637
|
+
self.query_one("#overview-static", Static).update(text)
|
|
638
|
+
|
|
639
|
+
def _populate_activity(self) -> None:
|
|
640
|
+
s = self.stats
|
|
641
|
+
text = (
|
|
642
|
+
"[bold]PRs merged per week (last 16 weeks)[/bold]\n\n"
|
|
643
|
+
+ bar_chart(s["weekly"], width=40, last_n=16)
|
|
644
|
+
+ "\n\n[bold]PRs merged per month (last 12 months)[/bold]\n\n"
|
|
645
|
+
+ bar_chart(s["monthly"], width=40, last_n=12)
|
|
646
|
+
)
|
|
647
|
+
self.query_one("#activity-static", Static).update(text)
|
|
648
|
+
|
|
649
|
+
def _populate_open_prs(self) -> None:
|
|
650
|
+
table = self.query_one("#open-table", DataTable)
|
|
651
|
+
table.add_columns("Title", "Author", "Age (days)", "Target Branch", "Reviewers")
|
|
652
|
+
for pr in sorted(
|
|
653
|
+
self.stats["open"],
|
|
654
|
+
key=lambda pr: pr_age_days(pr, reference_now=self.stats["reference_now"]),
|
|
655
|
+
reverse=True,
|
|
656
|
+
):
|
|
657
|
+
reviewers = (
|
|
658
|
+
", ".join(
|
|
659
|
+
name
|
|
660
|
+
for p in pr.get("participants", [])
|
|
661
|
+
if p.get("role") == "REVIEWER"
|
|
662
|
+
if (name := reviewer_name(p))
|
|
663
|
+
)
|
|
664
|
+
or "—"
|
|
665
|
+
)
|
|
666
|
+
table.add_row(
|
|
667
|
+
pr_title(pr),
|
|
668
|
+
pr_author_name(pr),
|
|
669
|
+
f"{pr_age_days(pr, reference_now=self.stats['reference_now']):.1f}",
|
|
670
|
+
pr_branch_name(pr),
|
|
671
|
+
reviewers[:40],
|
|
672
|
+
)
|
|
673
|
+
|
|
674
|
+
def _populate_authors(self) -> None:
|
|
675
|
+
table = self.query_one("#authors-table", DataTable)
|
|
676
|
+
table.add_columns("Author", "PRs", "Avg age (days)", "Median (days)", "Avg comments")
|
|
677
|
+
for author, prs in sorted(self.stats["author_prs"].items(), key=lambda x: -len(x[1])):
|
|
678
|
+
ages = [pr_age_days(pr, merge_commit_dates=self.stats["merge_commit_dates"]) for pr in prs]
|
|
679
|
+
avg_comments = mean(pr.get("comment_count", 0) for pr in prs)
|
|
680
|
+
table.add_row(
|
|
681
|
+
author,
|
|
682
|
+
str(len(prs)),
|
|
683
|
+
f"{mean(ages):.1f}",
|
|
684
|
+
f"{median(ages):.1f}",
|
|
685
|
+
f"{avg_comments:.1f}",
|
|
686
|
+
)
|
|
687
|
+
|
|
688
|
+
def _populate_reviews(self) -> None:
|
|
689
|
+
table = self.query_one("#reviews-table", DataTable)
|
|
690
|
+
table.add_columns("Reviewer", "PRs Reviewed", "PRs Approved", "Approval Rate")
|
|
691
|
+
for name, counts in sorted(self.stats["reviewer_counts"].items(), key=lambda x: -x[1]["reviewed"]):
|
|
692
|
+
rate = counts["approved"] / counts["reviewed"] * 100 if counts["reviewed"] else 0
|
|
693
|
+
table.add_row(name, str(counts["reviewed"]), str(counts["approved"]), f"{rate:.0f}%")
|
|
694
|
+
|
|
695
|
+
def _populate_branches(self) -> None:
|
|
696
|
+
table = self.query_one("#branches-table", DataTable)
|
|
697
|
+
table.add_columns("Target Branch", "PR Count", "Avg age (days)", "Median (days)")
|
|
698
|
+
for branch, ages in sorted(self.stats["branch_counts"].items(), key=lambda x: -len(x[1])):
|
|
699
|
+
table.add_row(branch, str(len(ages)), f"{mean(ages):.1f}", f"{median(ages):.1f}")
|
|
700
|
+
|
|
701
|
+
def _populate_slowest(self) -> None:
|
|
702
|
+
table = self.query_one("#slowest-table", DataTable)
|
|
703
|
+
table.add_columns("Title", "Author", "Days", "Target Branch", "Comments")
|
|
704
|
+
for pr in sorted(
|
|
705
|
+
self.stats["merged"],
|
|
706
|
+
key=lambda pr: pr_age_days(pr, merge_commit_dates=self.stats["merge_commit_dates"]),
|
|
707
|
+
reverse=True,
|
|
708
|
+
)[:25]:
|
|
709
|
+
table.add_row(
|
|
710
|
+
pr_title(pr),
|
|
711
|
+
pr_author_name(pr),
|
|
712
|
+
f"{pr_age_days(pr, merge_commit_dates=self.stats['merge_commit_dates']):.1f}",
|
|
713
|
+
pr_branch_name(pr),
|
|
714
|
+
str(pr.get("comment_count", 0)),
|
|
715
|
+
)
|
|
716
|
+
|
|
717
|
+
def _populate_commits(self) -> None:
|
|
718
|
+
s = self.stats
|
|
719
|
+
if not s["total_commits"]:
|
|
720
|
+
self.query_one("#commits-charts", Static).update("[dim]No commit data — run with --refresh[/dim]")
|
|
721
|
+
return
|
|
722
|
+
|
|
723
|
+
dow_chart = bar_chart(s["commit_dow"], width=30, last_n=7)
|
|
724
|
+
hour_chart = bar_chart({f"{h:02d}h": v for h, v in s["commit_hour"].items()}, width=30, last_n=24)
|
|
725
|
+
text = (
|
|
726
|
+
f"[bold]Total commits: [cyan]{s['total_commits']}[/cyan][/bold]\n\n"
|
|
727
|
+
"[bold]Commits per week (last 16 weeks)[/bold]\n\n"
|
|
728
|
+
+ bar_chart(s["commit_weekly"], width=40, last_n=16)
|
|
729
|
+
+ "\n\n[bold]By day of week[/bold]\n\n"
|
|
730
|
+
+ dow_chart
|
|
731
|
+
+ "\n\n[bold]By hour of day[/bold]\n\n"
|
|
732
|
+
+ hour_chart
|
|
733
|
+
)
|
|
734
|
+
self.query_one("#commits-charts", Static).update(text)
|
|
735
|
+
|
|
736
|
+
table = self.query_one("#commits-table", DataTable)
|
|
737
|
+
table.add_columns("Author", "Commits", "% of total")
|
|
738
|
+
total = s["total_commits"]
|
|
739
|
+
for author, count in s["commit_authors"].items():
|
|
740
|
+
table.add_row(author, str(count), f"{count / total * 100:.1f}%")
|
|
741
|
+
|
|
742
|
+
def action_switch_tab(self, tab_id: str) -> None:
|
|
743
|
+
"""Switch the active stats tab from a keyboard binding."""
|
|
744
|
+
self.query_one(TabbedContent).active = tab_id
|
|
745
|
+
|
|
746
|
+
|
|
747
|
+
# ---------------------------------------------------------------------------
|
|
748
|
+
# Entry point
|
|
749
|
+
# ---------------------------------------------------------------------------
|
|
750
|
+
|
|
751
|
+
|
|
752
|
+
def _parse_args() -> argparse.Namespace:
|
|
753
|
+
parser = argparse.ArgumentParser(
|
|
754
|
+
prog="bb-stats",
|
|
755
|
+
description="Terminal Bitbucket pull request and commit statistics viewer.",
|
|
756
|
+
)
|
|
757
|
+
parser.add_argument("-l", "--list", action="store_true", help="interactively pick workspace and repository")
|
|
758
|
+
parser.add_argument("-r", "--refresh", action="store_true", help="force refresh cached data from the API")
|
|
759
|
+
parser.add_argument("-w", "--workspace", default=None, help="Bitbucket workspace slug")
|
|
760
|
+
parser.add_argument("-R", "--repo", default=None, help="Bitbucket repository slug")
|
|
761
|
+
return parser.parse_args()
|
|
762
|
+
|
|
763
|
+
|
|
764
|
+
def main() -> None:
|
|
765
|
+
"""Parse CLI flags, load data, and launch the stats application."""
|
|
766
|
+
load_dotenv()
|
|
767
|
+
|
|
768
|
+
token = os.environ.get("BITBUCKET")
|
|
769
|
+
email = os.environ.get("BITBUCKET_EMAIL")
|
|
770
|
+
if not token or not email:
|
|
771
|
+
logger.error("Error: BITBUCKET and BITBUCKET_EMAIL env vars must be set")
|
|
772
|
+
sys.exit(1)
|
|
773
|
+
|
|
774
|
+
global SESSION # noqa: PLW0603
|
|
775
|
+
SESSION = _build_session((email, token))
|
|
776
|
+
|
|
777
|
+
args = _parse_args()
|
|
778
|
+
|
|
779
|
+
workspace = args.workspace
|
|
780
|
+
repo_slug = args.repo
|
|
781
|
+
|
|
782
|
+
if args.list or not workspace or not repo_slug:
|
|
783
|
+
result = SelectionApp().run()
|
|
784
|
+
if not result:
|
|
785
|
+
return
|
|
786
|
+
workspace, repo_slug = result
|
|
787
|
+
|
|
788
|
+
cached = load_cache(workspace, repo_slug) if not args.refresh else None
|
|
789
|
+
|
|
790
|
+
if cached:
|
|
791
|
+
merged, open_prs, commits, fetched_at = cached
|
|
792
|
+
console.print(f"[dim]Loaded from cache (fetched {fetched_at[:19]}). Use --refresh to update.[/dim]")
|
|
793
|
+
else:
|
|
794
|
+
console.print("[dim]Fetching merged PRs...[/dim]")
|
|
795
|
+
merged = enrich_with_participants(workspace, repo_slug, fetch_all_prs(workspace, repo_slug, "MERGED"))
|
|
796
|
+
console.print("[dim]Fetching open PRs...[/dim]")
|
|
797
|
+
open_prs = enrich_with_participants(workspace, repo_slug, fetch_all_prs(workspace, repo_slug, "OPEN"))
|
|
798
|
+
console.print("[dim]Fetching commits...[/dim]")
|
|
799
|
+
commits = fetch_commits(workspace, repo_slug)
|
|
800
|
+
fetched_at = datetime.now(UTC).isoformat()
|
|
801
|
+
save_cache(
|
|
802
|
+
workspace,
|
|
803
|
+
repo_slug,
|
|
804
|
+
{
|
|
805
|
+
"fetched_at": fetched_at,
|
|
806
|
+
"merged": merged,
|
|
807
|
+
"open": open_prs,
|
|
808
|
+
"commits": commits,
|
|
809
|
+
},
|
|
810
|
+
)
|
|
811
|
+
console.print(
|
|
812
|
+
f"[dim]Loaded {len(merged)} merged + {len(open_prs)} open + {len(commits)} commits. Cache saved.[/dim]"
|
|
813
|
+
)
|
|
814
|
+
|
|
815
|
+
try:
|
|
816
|
+
reference_now = datetime.fromisoformat(fetched_at)
|
|
817
|
+
except ValueError:
|
|
818
|
+
reference_now = datetime.now(UTC)
|
|
819
|
+
|
|
820
|
+
BBStatsApp(
|
|
821
|
+
workspace,
|
|
822
|
+
repo_slug,
|
|
823
|
+
build_stats(merged, open_prs, commits, reference_now=reference_now),
|
|
824
|
+
fetched_at,
|
|
825
|
+
).run()
|
|
826
|
+
|
|
827
|
+
|
|
828
|
+
if __name__ == "__main__":
|
|
829
|
+
main()
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "bb-stats"
|
|
3
|
+
version = "0.1.1"
|
|
4
|
+
description = "Terminal Bitbucket pull request and commit statistics viewer"
|
|
5
|
+
requires-python = ">=3.11"
|
|
6
|
+
dependencies = [
|
|
7
|
+
"python-dotenv>=1.0",
|
|
8
|
+
"requests>=2.31",
|
|
9
|
+
"rich>=13.0",
|
|
10
|
+
"textual>=0.80.0",
|
|
11
|
+
"ty",
|
|
12
|
+
"ruff",
|
|
13
|
+
]
|
|
14
|
+
|
|
15
|
+
[project.scripts]
|
|
16
|
+
bb-stats = "bb_stats:main"
|
|
17
|
+
|
|
18
|
+
[tool.ruff]
|
|
19
|
+
target-version = "py314"
|
|
20
|
+
line-length = 120
|
|
21
|
+
|
|
22
|
+
[tool.ruff.lint]
|
|
23
|
+
select = ["ALL"]
|
|
24
|
+
preview = true
|
bb_stats-0.1.1/setup.cfg
ADDED