gitaura 0.1.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gitaura-0.1.1/LICENSE +21 -0
- gitaura-0.1.1/PKG-INFO +34 -0
- gitaura-0.1.1/README.md +156 -0
- gitaura-0.1.1/gitaura/__init__.py +1 -0
- gitaura-0.1.1/gitaura/gitaura.py +1011 -0
- gitaura-0.1.1/gitaura.egg-info/PKG-INFO +34 -0
- gitaura-0.1.1/gitaura.egg-info/SOURCES.txt +10 -0
- gitaura-0.1.1/gitaura.egg-info/dependency_links.txt +1 -0
- gitaura-0.1.1/gitaura.egg-info/entry_points.txt +2 -0
- gitaura-0.1.1/gitaura.egg-info/top_level.txt +1 -0
- gitaura-0.1.1/setup.cfg +4 -0
- gitaura-0.1.1/setup.py +37 -0
gitaura-0.1.1/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
The MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Harsh Nagarkar. https://harshfr.me
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in
|
|
13
|
+
all copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
21
|
+
THE SOFTWARE.
|
gitaura-0.1.1/PKG-INFO
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: gitaura
|
|
3
|
+
Version: 0.1.1
|
|
4
|
+
Summary: A utility for getting information about a git repository
|
|
5
|
+
Home-page: https://github.com/HarshNagarkar17/gitaura
|
|
6
|
+
Download-URL: https://github.com/HarshNagarkar17/gitaura
|
|
7
|
+
Author: Harsh Nagarkar
|
|
8
|
+
Author-email: harshvn871@gmail.com
|
|
9
|
+
License: MIT License
|
|
10
|
+
Classifier: Topic :: Utilities
|
|
11
|
+
Classifier: Intended Audience :: Developers
|
|
12
|
+
Classifier: Programming Language :: Python
|
|
13
|
+
Classifier: Programming Language :: Python :: 3
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.6
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.7
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
21
|
+
Classifier: Natural Language :: English
|
|
22
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
23
|
+
Classifier: Topic :: Software Development :: Version Control :: Git
|
|
24
|
+
Requires-Python: >=3.6
|
|
25
|
+
License-File: LICENSE
|
|
26
|
+
Dynamic: author
|
|
27
|
+
Dynamic: author-email
|
|
28
|
+
Dynamic: classifier
|
|
29
|
+
Dynamic: download-url
|
|
30
|
+
Dynamic: home-page
|
|
31
|
+
Dynamic: license
|
|
32
|
+
Dynamic: license-file
|
|
33
|
+
Dynamic: requires-python
|
|
34
|
+
Dynamic: summary
|
gitaura-0.1.1/README.md
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
1
|
+
# gitaura
|
|
2
|
+
|
|
3
|
+
A Python CLI for git repository analytics. Visualize commit activity, analyze file churn, explore branch history, and discover where you work most—all from the terminal.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
pip install gitaura
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
```
|
|
12
|
+
248 commits over 12 month(s)
|
|
13
|
+
2024-01 12 ▀▀▀▀▀
|
|
14
|
+
2024-02 28 ▀▀▀▀▀▀▀▀▀▀▀▀▀
|
|
15
|
+
2024-03 45 ▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
|
|
16
|
+
...
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
---
|
|
20
|
+
|
|
21
|
+
## Commands
|
|
22
|
+
|
|
23
|
+
### Commit Activity
|
|
24
|
+
|
|
25
|
+
| Command | Description |
|
|
26
|
+
| ---------- | ---------------------------------------------------- |
|
|
27
|
+
| (default) | Bar chart of commits by period (day/week/month/year) |
|
|
28
|
+
| `-p day` | Daily granularity |
|
|
29
|
+
| `-p week` | Weekly (ISO week) |
|
|
30
|
+
| `-p month` | Monthly (default) |
|
|
31
|
+
| `-p year` | Yearly |
|
|
32
|
+
| `-f` | Fill missing dates with zero-commit bars |
|
|
33
|
+
|
|
34
|
+
### File Analysis
|
|
35
|
+
|
|
36
|
+
| Command | Description |
|
|
37
|
+
| --------------- | -------------------------------------------------- |
|
|
38
|
+
| `--files` | File type breakdown (% JS, TS, MD, etc.) |
|
|
39
|
+
| `--churn` | File churn ranking (most modified files) |
|
|
40
|
+
| `--hotspots` | Hotspot detection (high churn + many contributors) |
|
|
41
|
+
| `--refactor` | Refactor density (balanced add/del ≈ refactoring) |
|
|
42
|
+
| `--min-churn N` | Minimum churn for hotspots (default: 50) |
|
|
43
|
+
|
|
44
|
+
### Branch Analysis
|
|
45
|
+
|
|
46
|
+
| Command | Description |
|
|
47
|
+
| ------------------- | -------------------------------------------------------------------- |
|
|
48
|
+
| `--branches` | Active branches ranked by recency |
|
|
49
|
+
| `--stale` | Stale branches (no commits in N days) |
|
|
50
|
+
| `--stale-days N` | Staleness threshold (default: 30) |
|
|
51
|
+
| `--divergence` | Ahead/behind counts vs main |
|
|
52
|
+
| `--branch-lifetime` | Creation → merge duration for merged branches |
|
|
53
|
+
| `--merges` | Merge frequency timeline (aggregated) |
|
|
54
|
+
| `--merge-timeline` | Timeline of branch merges (date + branch name) |
|
|
55
|
+
| `--compare A B` | Compare two branches statistically |
|
|
56
|
+
| `--main BRANCH` | Main branch for divergence/lifetime/merges (default: main or master) |
|
|
57
|
+
|
|
58
|
+
### Activity Map
|
|
59
|
+
|
|
60
|
+
| Command | Description |
|
|
61
|
+
| ------------ | ------------------------------------------------ |
|
|
62
|
+
| `--work-map` | “Where do I work most?” — directory activity map |
|
|
63
|
+
|
|
64
|
+
---
|
|
65
|
+
|
|
66
|
+
## Global Options
|
|
67
|
+
|
|
68
|
+
Apply to most commands:
|
|
69
|
+
|
|
70
|
+
| Option | Description |
|
|
71
|
+
| -------------------- | ---------------------------------- |
|
|
72
|
+
| `-a, --after DATE` | After date (e.g. `2024-01-01`) |
|
|
73
|
+
| `-b, --before DATE` | Before date |
|
|
74
|
+
| `-u, --author EMAIL` | Filter by author (substring match) |
|
|
75
|
+
|
|
76
|
+
---
|
|
77
|
+
|
|
78
|
+
## Examples
|
|
79
|
+
|
|
80
|
+
**Commit bars by week, filtered by author**
|
|
81
|
+
|
|
82
|
+
```bash
|
|
83
|
+
gitaura -p week -u "alice@company.com"
|
|
84
|
+
```
|
|
85
|
+
|
|
86
|
+
**File type breakdown for 2024**
|
|
87
|
+
|
|
88
|
+
```bash
|
|
89
|
+
gitaura --files -a 2024-01-01 -b 2024-12-31
|
|
90
|
+
```
|
|
91
|
+
|
|
92
|
+
**Most modified files**
|
|
93
|
+
|
|
94
|
+
```bash
|
|
95
|
+
gitaura --churn
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
**Hotspots — high churn and many contributors**
|
|
99
|
+
|
|
100
|
+
```bash
|
|
101
|
+
gitaura --hotspots --min-churn 100
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
**Refactor vs feature density**
|
|
105
|
+
|
|
106
|
+
```bash
|
|
107
|
+
gitaura --refactor
|
|
108
|
+
```
|
|
109
|
+
|
|
110
|
+
**Stale branches (no activity in 60 days)**
|
|
111
|
+
|
|
112
|
+
```bash
|
|
113
|
+
gitaura --stale --stale-days 60
|
|
114
|
+
```
|
|
115
|
+
|
|
116
|
+
**Branch divergence from main**
|
|
117
|
+
|
|
118
|
+
```bash
|
|
119
|
+
gitaura --divergence
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
**Compare main and feature branch**
|
|
123
|
+
|
|
124
|
+
```bash
|
|
125
|
+
gitaura --compare main feature-branch
|
|
126
|
+
gitaura --compare main origin/feature-branch
|
|
127
|
+
```
|
|
128
|
+
|
|
129
|
+
**Where do I work most?**
|
|
130
|
+
|
|
131
|
+
```bash
|
|
132
|
+
gitaura --work-map
|
|
133
|
+
gitaura --work-map -u "me@email.com"
|
|
134
|
+
```
|
|
135
|
+
|
|
136
|
+
**Merge timeline**
|
|
137
|
+
|
|
138
|
+
```bash
|
|
139
|
+
gitaura --merge-timeline
|
|
140
|
+
```
|
|
141
|
+
|
|
142
|
+
---
|
|
143
|
+
|
|
144
|
+
## Output Notes
|
|
145
|
+
|
|
146
|
+
- **Weekend bars** are colored blue in commit activity view.
|
|
147
|
+
- **Churn** = lines added + lines deleted per file.
|
|
148
|
+
- **Refactor density** uses “balanced” edits (adds ≈ dels) as a proxy for refactoring.
|
|
149
|
+
- **Hotspots** combine churn and contributor count; files with both rank higher.
|
|
150
|
+
- **Work map** shows directories by edit count and lines changed; hierarchy is indented.
|
|
151
|
+
|
|
152
|
+
---
|
|
153
|
+
|
|
154
|
+
## License
|
|
155
|
+
|
|
156
|
+
MIT
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__all__ = ["gitaura"]
|
|
@@ -0,0 +1,1011 @@
|
|
|
1
|
+
#!/usr/bin/python
|
|
2
|
+
|
|
3
|
+
"""
|
|
4
|
+
gitaura produces a simple commit visualisation for a git repository.
|
|
5
|
+
|
|
6
|
+
gitaura is a Python utility that uses 'git log'
|
|
7
|
+
to produce a simple bar graph visualisation for
|
|
8
|
+
commit activity in a git repository.
|
|
9
|
+
|
|
10
|
+
Licensed under the MIT License.
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
import os
|
|
14
|
+
import sys
|
|
15
|
+
import argparse
|
|
16
|
+
import datetime
|
|
17
|
+
from subprocess import check_output, DEVNULL
|
|
18
|
+
from collections import OrderedDict, namedtuple, Counter, defaultdict
|
|
19
|
+
|
|
20
|
+
try:
|
|
21
|
+
import pkg_resources
|
|
22
|
+
__version__ = pkg_resources.require("gitaura")[0].version
|
|
23
|
+
except Exception:
|
|
24
|
+
__version__ = "0.0.0"
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
Item = namedtuple(
|
|
28
|
+
"Item", ["timestamp", "is_weekend", "author", "commits", "score"])
|
|
29
|
+
Item.__new__.__defaults__ = (None, False, "none", 0, 0)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def print_bars(items, block=u"\u2580", width=50):
|
|
33
|
+
"""Print unicode bar representations of dates and scores."""
|
|
34
|
+
for i in items:
|
|
35
|
+
num = str(items[i].commits)
|
|
36
|
+
|
|
37
|
+
sys.stdout.write(i)
|
|
38
|
+
sys.stdout.write(" ")
|
|
39
|
+
sys.stdout.write(num)
|
|
40
|
+
sys.stdout.write((5 - len(num)) * " ")
|
|
41
|
+
|
|
42
|
+
# Colour the weekend bars.
|
|
43
|
+
if items[i].is_weekend:
|
|
44
|
+
sys.stdout.write("\033[94m")
|
|
45
|
+
|
|
46
|
+
sys.stdout.write(block * int(items[i].score * width))
|
|
47
|
+
|
|
48
|
+
if items[i].is_weekend:
|
|
49
|
+
sys.stdout.write("\x1b[0m")
|
|
50
|
+
|
|
51
|
+
sys.stdout.write("\n")
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def filter(items, periodicity="day", author=""):
|
|
55
|
+
"""Filter entries by periodicity and author."""
|
|
56
|
+
bars = OrderedDict()
|
|
57
|
+
|
|
58
|
+
for i in items:
|
|
59
|
+
# Extract the day/month/year part of the date.
|
|
60
|
+
d = i.timestamp
|
|
61
|
+
if periodicity == "week":
|
|
62
|
+
label = d.strftime("%Y/%V")
|
|
63
|
+
elif periodicity == "month":
|
|
64
|
+
label = d.strftime("%Y-%m")
|
|
65
|
+
elif periodicity == "year":
|
|
66
|
+
label = d.strftime("%Y")
|
|
67
|
+
else:
|
|
68
|
+
label = d.strftime("%Y-%m-%d %a")
|
|
69
|
+
|
|
70
|
+
# Filter by author.
|
|
71
|
+
if author != "":
|
|
72
|
+
if author not in i.author:
|
|
73
|
+
continue
|
|
74
|
+
|
|
75
|
+
if label not in bars:
|
|
76
|
+
bars[label] = Item(*i)
|
|
77
|
+
else:
|
|
78
|
+
bars[label] = bars[label]._replace(
|
|
79
|
+
commits=bars[label].commits + i.commits)
|
|
80
|
+
|
|
81
|
+
return bars
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def get_scores(items):
|
|
85
|
+
"""Compute normalized scores (0-1) for commit numbers."""
|
|
86
|
+
vals = [items[i].commits for i in items]
|
|
87
|
+
vals.append(0)
|
|
88
|
+
|
|
89
|
+
xmin = min(vals)
|
|
90
|
+
xmax = max(vals)
|
|
91
|
+
|
|
92
|
+
# Normalize.
|
|
93
|
+
out = OrderedDict()
|
|
94
|
+
for i in items:
|
|
95
|
+
out[i] = Item(
|
|
96
|
+
*(items[i]))._replace(score=normalize(items[i].commits, xmin, xmax))
|
|
97
|
+
|
|
98
|
+
return out
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def get_log(after, before, reverse=False, fill=False):
|
|
102
|
+
"""Return the list of git log from the git log command."""
|
|
103
|
+
# 2018-01-01 00:00:00|author@author.com
|
|
104
|
+
args = ["git", "log", "--pretty=format:%ai|%ae", "--reverse"]
|
|
105
|
+
|
|
106
|
+
if after:
|
|
107
|
+
args.append("--after=%s" % (after,))
|
|
108
|
+
if before:
|
|
109
|
+
args.append("--before=%s" % (before,))
|
|
110
|
+
|
|
111
|
+
items = []
|
|
112
|
+
for o in check_output(args, universal_newlines=True, shell=False).split("\n"):
|
|
113
|
+
c = o.split("|")
|
|
114
|
+
t = datetime.datetime.strptime(c[0], "%Y-%m-%d %H:%M:%S %z")
|
|
115
|
+
items.append(
|
|
116
|
+
Item(timestamp=t, author=c[1], is_weekend=t.weekday() > 4, commits=1))
|
|
117
|
+
|
|
118
|
+
if fill:
|
|
119
|
+
fill_dates(items)
|
|
120
|
+
|
|
121
|
+
if not reverse:
|
|
122
|
+
items.reverse()
|
|
123
|
+
|
|
124
|
+
return items
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def normalize(x, xmin, xmax):
|
|
128
|
+
"""Normalize a number to a 0-1 range given a min and max of its set."""
|
|
129
|
+
return float(x - xmin) / float(xmax - xmin)
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def fill_dates(data):
|
|
133
|
+
"""Fill missing dates where there were no commits."""
|
|
134
|
+
n = len(data)
|
|
135
|
+
i = 0
|
|
136
|
+
while i < n - 1:
|
|
137
|
+
cur = data[i].timestamp
|
|
138
|
+
if (data[i+1].timestamp - cur).days > 1:
|
|
139
|
+
data.insert(i+1, Item(
|
|
140
|
+
timestamp=cur + datetime.timedelta(days=1),
|
|
141
|
+
is_weekend=cur.weekday() > 4
|
|
142
|
+
))
|
|
143
|
+
n += 1
|
|
144
|
+
i += 1
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def get_file_breakdown(after="", before="", author=""):
|
|
148
|
+
"""Get file type breakdown from git log (counts each file change per commit)."""
|
|
149
|
+
args = ["git", "log", "--format=", "--name-only"]
|
|
150
|
+
|
|
151
|
+
if after:
|
|
152
|
+
args.append("--after=%s" % (after,))
|
|
153
|
+
if before:
|
|
154
|
+
args.append("--before=%s" % (before,))
|
|
155
|
+
if author:
|
|
156
|
+
args.append("--author=%s" % (author,))
|
|
157
|
+
|
|
158
|
+
output = check_output(args, universal_newlines=True, shell=False)
|
|
159
|
+
counts = Counter()
|
|
160
|
+
|
|
161
|
+
for line in output.split("\n"):
|
|
162
|
+
line = line.strip()
|
|
163
|
+
if not line:
|
|
164
|
+
continue
|
|
165
|
+
ext = os.path.splitext(line)[1]
|
|
166
|
+
if ext:
|
|
167
|
+
counts[ext.lstrip(".").upper()] += 1
|
|
168
|
+
else:
|
|
169
|
+
counts["(none)"] += 1
|
|
170
|
+
|
|
171
|
+
return dict(counts)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def print_file_breakdown(counts, block=u"\u2580", width=40):
|
|
175
|
+
"""Print file type breakdown as percentages with bar visualization."""
|
|
176
|
+
total = sum(counts.values())
|
|
177
|
+
if total == 0:
|
|
178
|
+
print("No files to breakdown")
|
|
179
|
+
return
|
|
180
|
+
|
|
181
|
+
# Sort by count descending
|
|
182
|
+
sorted_items = sorted(counts.items(), key=lambda x: -x[1])
|
|
183
|
+
|
|
184
|
+
print("%d file changes by type:\n" % total)
|
|
185
|
+
for ext, count in sorted_items:
|
|
186
|
+
pct = 100.0 * count / total
|
|
187
|
+
bar_len = int(pct / 100.0 * width)
|
|
188
|
+
num = str(count)
|
|
189
|
+
pct_str = "%5.1f%%" % pct
|
|
190
|
+
sys.stdout.write("%-8s %6s %4s " % (ext, num, pct_str))
|
|
191
|
+
sys.stdout.write(block * bar_len)
|
|
192
|
+
sys.stdout.write("\n")
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def get_file_stats(after="", before="", author=""):
|
|
196
|
+
"""
|
|
197
|
+
Get per-file stats from git log --numstat.
|
|
198
|
+
Returns: (file_churn, file_contributors, file_refactor_lines, total_refactor, total_churn)
|
|
199
|
+
- file_churn: dict path -> (additions, deletions, commits)
|
|
200
|
+
- file_contributors: dict path -> set of author emails
|
|
201
|
+
- file_refactor_lines: dict path -> (refactor_lines, total_lines) for density
|
|
202
|
+
- total_refactor, total_churn: repo-level refactor density
|
|
203
|
+
"""
|
|
204
|
+
args = ["git", "log", "--numstat", "--format=%H|%ae"]
|
|
205
|
+
if after:
|
|
206
|
+
args.append("--after=%s" % after)
|
|
207
|
+
if before:
|
|
208
|
+
args.append("--before=%s" % before)
|
|
209
|
+
if author:
|
|
210
|
+
args.append("--author=%s" % author)
|
|
211
|
+
|
|
212
|
+
out = check_output(args, universal_newlines=True, shell=False)
|
|
213
|
+
file_churn = defaultdict(lambda: [0, 0, 0]) # adds, dels, commits
|
|
214
|
+
file_contributors = defaultdict(set)
|
|
215
|
+
file_refactor = defaultdict(lambda: [0, 0]) # refactor_lines, total_lines
|
|
216
|
+
total_refactor = 0
|
|
217
|
+
total_churn = 0
|
|
218
|
+
|
|
219
|
+
current_author = ""
|
|
220
|
+
for line in out.split("\n"):
|
|
221
|
+
line = line.rstrip()
|
|
222
|
+
if "|" in line and not line[0].isdigit() and "-" not in line[:3]:
|
|
223
|
+
parts = line.split("|", 1)
|
|
224
|
+
if len(parts) == 2:
|
|
225
|
+
current_author = parts[1].strip()
|
|
226
|
+
continue
|
|
227
|
+
if not line.strip():
|
|
228
|
+
continue
|
|
229
|
+
parts = line.split("\t", 2)
|
|
230
|
+
if len(parts) < 3:
|
|
231
|
+
continue
|
|
232
|
+
add_s, del_s, path = parts[0], parts[1], parts[2].strip()
|
|
233
|
+
if not path:
|
|
234
|
+
continue
|
|
235
|
+
try:
|
|
236
|
+
adds = int(add_s) if add_s != "-" else 0
|
|
237
|
+
dels = int(del_s) if del_s != "-" else 0
|
|
238
|
+
except ValueError:
|
|
239
|
+
continue
|
|
240
|
+
churn = adds + dels
|
|
241
|
+
if churn == 0:
|
|
242
|
+
continue
|
|
243
|
+
|
|
244
|
+
file_churn[path][0] += adds
|
|
245
|
+
file_churn[path][1] += dels
|
|
246
|
+
file_churn[path][2] += 1
|
|
247
|
+
file_contributors[path].add(current_author)
|
|
248
|
+
refactor_lines = 2 * min(adds, dels)
|
|
249
|
+
file_refactor[path][0] += refactor_lines
|
|
250
|
+
file_refactor[path][1] += churn
|
|
251
|
+
total_refactor += refactor_lines
|
|
252
|
+
total_churn += churn
|
|
253
|
+
|
|
254
|
+
return (dict(file_churn), dict(file_contributors), dict(file_refactor),
|
|
255
|
+
total_refactor, total_churn)
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
def get_file_churn_ranking(after="", before="", author=""):
|
|
259
|
+
"""Return list of (path, adds, dels, commits) sorted by churn (adds+dels) desc."""
|
|
260
|
+
file_churn, _, _, _, _ = get_file_stats(after, before, author)
|
|
261
|
+
result = []
|
|
262
|
+
for path, (adds, dels, commits) in file_churn.items():
|
|
263
|
+
result.append((path, adds, dels, commits))
|
|
264
|
+
return sorted(result, key=lambda x: -(x[1] + x[2]))
|
|
265
|
+
|
|
266
|
+
|
|
267
|
+
def print_file_churn(churn_list, limit=25, block=u"\u2580", width=40):
|
|
268
|
+
"""Print file churn ranking."""
|
|
269
|
+
if not churn_list:
|
|
270
|
+
print("No file changes to rank")
|
|
271
|
+
return
|
|
272
|
+
total_churn = sum(adds + dels for _, adds, dels, _ in churn_list)
|
|
273
|
+
print("File churn ranking (most modified, %d total lines changed):\n" % total_churn)
|
|
274
|
+
vals = [adds + dels for _, adds, dels, _ in churn_list]
|
|
275
|
+
xmin, xmax = min(vals), max(vals)
|
|
276
|
+
for path, adds, dels, commits in churn_list[:limit]:
|
|
277
|
+
churn = adds + dels
|
|
278
|
+
score = normalize(churn, xmin, xmax) if xmax > xmin else 1.0
|
|
279
|
+
bar_len = int(score * width)
|
|
280
|
+
sys.stdout.write("%-45s %6d (+%d -%d) " % (path[:45], churn, adds, dels))
|
|
281
|
+
sys.stdout.write(block * bar_len)
|
|
282
|
+
sys.stdout.write("\n")
|
|
283
|
+
if len(churn_list) > limit:
|
|
284
|
+
print("\n... and %d more files" % (len(churn_list) - limit))
|
|
285
|
+
|
|
286
|
+
|
|
287
|
+
def get_hotspots(after="", before="", author="", min_churn=50):
|
|
288
|
+
"""Return list of (path, churn, contributors, score) for hotspot files."""
|
|
289
|
+
file_churn, file_contributors, _, _, _ = get_file_stats(after, before, author)
|
|
290
|
+
result = []
|
|
291
|
+
churns = [sum(c[0:2]) for c in file_churn.values()]
|
|
292
|
+
max_churn = max(churns) if churns else 1
|
|
293
|
+
max_contrib = max(len(s) for s in file_contributors.values()) if file_contributors else 1
|
|
294
|
+
|
|
295
|
+
for path in file_churn:
|
|
296
|
+
adds, dels, _ = file_churn[path]
|
|
297
|
+
churn = adds + dels
|
|
298
|
+
contribs = len(file_contributors.get(path, set()))
|
|
299
|
+
if churn < min_churn:
|
|
300
|
+
continue
|
|
301
|
+
churn_norm = churn / max_churn if max_churn else 0
|
|
302
|
+
contrib_norm = contribs / max_contrib if max_contrib else 0
|
|
303
|
+
score = churn_norm * (0.5 + 0.5 * contrib_norm)
|
|
304
|
+
result.append((path, churn, contribs, score))
|
|
305
|
+
return sorted(result, key=lambda x: -x[3])
|
|
306
|
+
|
|
307
|
+
|
|
308
|
+
def print_hotspots(hotspots, limit=20, block=u"\u2580", width=30):
|
|
309
|
+
"""Print hotspot files (high churn + high contributor count)."""
|
|
310
|
+
print("Hotspot detection (high churn + many contributors):\n")
|
|
311
|
+
if not hotspots:
|
|
312
|
+
print("No hotspots found")
|
|
313
|
+
return
|
|
314
|
+
scores = [h[3] for h in hotspots]
|
|
315
|
+
xmin, xmax = min(scores), max(scores)
|
|
316
|
+
for path, churn, contribs, score in hotspots[:limit]:
|
|
317
|
+
bar_len = int(normalize(score, xmin, xmax) * width) if xmax > xmin else width
|
|
318
|
+
sys.stdout.write("%-45s churn:%5d authors:%2d " % (path[:45], churn, contribs))
|
|
319
|
+
sys.stdout.write(block * bar_len)
|
|
320
|
+
sys.stdout.write("\n")
|
|
321
|
+
if len(hotspots) > limit:
|
|
322
|
+
print("\n... and %d more" % (len(hotspots) - limit))
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
def get_refactor_density(after="", before="", author=""):
|
|
326
|
+
"""Return (overall_density, per_file_list) for refactor density estimation."""
|
|
327
|
+
_, _, file_refactor, total_refactor, total_churn = get_file_stats(after, before, author)
|
|
328
|
+
overall = total_refactor / total_churn if total_churn else 0.0
|
|
329
|
+
per_file = []
|
|
330
|
+
for path, (refactor, churn) in file_refactor.items():
|
|
331
|
+
if churn > 0:
|
|
332
|
+
density = refactor / churn
|
|
333
|
+
per_file.append((path, refactor, churn, density))
|
|
334
|
+
per_file.sort(key=lambda x: -x[3])
|
|
335
|
+
return overall, per_file
|
|
336
|
+
|
|
337
|
+
|
|
338
|
+
def print_refactor_density(overall, per_file, limit=15, block=u"\u2580", width=40):
|
|
339
|
+
"""Print refactor density estimation."""
|
|
340
|
+
print("Refactor density (balanced add/del ≈ refactoring):\n")
|
|
341
|
+
print("Overall: %.1f%% of changes are refactor-like (add ≈ del)\n" % (overall * 100))
|
|
342
|
+
if not per_file:
|
|
343
|
+
print("No file-level data")
|
|
344
|
+
return
|
|
345
|
+
print("Top files by refactor density:\n")
|
|
346
|
+
for path, refactor, churn, density in per_file[:limit]:
|
|
347
|
+
pct = 100 * density
|
|
348
|
+
bar_len = int(density * width)
|
|
349
|
+
sys.stdout.write("%-45s %5.0f%% " % (path[:45], pct))
|
|
350
|
+
sys.stdout.write(block * bar_len)
|
|
351
|
+
sys.stdout.write(" (%d / %d lines)\n" % (refactor, churn))
|
|
352
|
+
if len(per_file) > limit:
|
|
353
|
+
print("\n... and %d more" % (len(per_file) - limit))
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
def get_main_branch():
|
|
357
|
+
"""Detect main branch (main or master)."""
|
|
358
|
+
for candidate in ["main", "master"]:
|
|
359
|
+
try:
|
|
360
|
+
check_output(
|
|
361
|
+
["git", "rev-parse", "--verify", candidate],
|
|
362
|
+
universal_newlines=True, shell=False, stderr=DEVNULL
|
|
363
|
+
)
|
|
364
|
+
return candidate
|
|
365
|
+
except Exception:
|
|
366
|
+
continue
|
|
367
|
+
return None
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
# --- Branch features ---
|
|
371
|
+
|
|
372
|
+
def get_active_branches():
|
|
373
|
+
"""Return branches ranked by most recent commit (name, date)."""
|
|
374
|
+
fmt = "%(refname:short)\t%(committerdate:iso8601)"
|
|
375
|
+
out = check_output(
|
|
376
|
+
["git", "for-each-ref", "--sort=-committerdate",
|
|
377
|
+
"--format=" + fmt, "refs/heads/"],
|
|
378
|
+
universal_newlines=True, shell=False
|
|
379
|
+
)
|
|
380
|
+
result = []
|
|
381
|
+
for line in out.strip().split("\n"):
|
|
382
|
+
if not line:
|
|
383
|
+
continue
|
|
384
|
+
parts = line.split("\t", 1)
|
|
385
|
+
if len(parts) == 2:
|
|
386
|
+
name, date_str = parts
|
|
387
|
+
try:
|
|
388
|
+
dt = datetime.datetime.fromisoformat(
|
|
389
|
+
date_str.replace("Z", "+00:00")[:19])
|
|
390
|
+
result.append((name, dt))
|
|
391
|
+
except ValueError:
|
|
392
|
+
result.append((name, None))
|
|
393
|
+
return result
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
def print_active_branches(branches, limit=20):
|
|
397
|
+
"""Print active branches ranked by recency."""
|
|
398
|
+
print("Active branches (most recent first):\n")
|
|
399
|
+
now = datetime.datetime.now()
|
|
400
|
+
for i, (name, dt) in enumerate(branches[:limit]):
|
|
401
|
+
if dt:
|
|
402
|
+
d = dt.replace(tzinfo=None) if dt.tzinfo else dt
|
|
403
|
+
age = now - d
|
|
404
|
+
days = age.days
|
|
405
|
+
if days == 0:
|
|
406
|
+
rel = "today"
|
|
407
|
+
elif days == 1:
|
|
408
|
+
rel = "1 day ago"
|
|
409
|
+
else:
|
|
410
|
+
rel = "%d days ago" % days
|
|
411
|
+
sys.stdout.write("%-40s %s\n" % (name, rel))
|
|
412
|
+
else:
|
|
413
|
+
sys.stdout.write("%s\n" % name)
|
|
414
|
+
if len(branches) > limit:
|
|
415
|
+
print("\n... and %d more" % (len(branches) - limit))
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
def get_stale_branches(stale_days=30):
|
|
419
|
+
"""Return branches with no commits in the last stale_days days."""
|
|
420
|
+
branches = get_active_branches()
|
|
421
|
+
cutoff = datetime.datetime.now() - datetime.timedelta(days=stale_days)
|
|
422
|
+
cutoff_naive = cutoff.replace(tzinfo=None) if cutoff.tzinfo else cutoff
|
|
423
|
+
stale = []
|
|
424
|
+
for name, dt in branches:
|
|
425
|
+
if dt:
|
|
426
|
+
d = dt.replace(tzinfo=None) if dt.tzinfo else dt
|
|
427
|
+
if d < cutoff_naive:
|
|
428
|
+
stale.append((name, dt))
|
|
429
|
+
else:
|
|
430
|
+
stale.append((name, None))
|
|
431
|
+
return stale
|
|
432
|
+
|
|
433
|
+
|
|
434
|
+
def print_stale_branches(branches, stale_days):
|
|
435
|
+
"""Print stale branches."""
|
|
436
|
+
print("Stale branches (no commits in %d+ days):\n" % stale_days)
|
|
437
|
+
if not branches:
|
|
438
|
+
print("None")
|
|
439
|
+
return
|
|
440
|
+
for name, dt in branches:
|
|
441
|
+
if dt:
|
|
442
|
+
age = datetime.datetime.now() - (dt.replace(tzinfo=None) if dt.tzinfo else dt)
|
|
443
|
+
sys.stdout.write("%-40s %d days\n" % (name, age.days))
|
|
444
|
+
else:
|
|
445
|
+
sys.stdout.write("%s\n" % name)
|
|
446
|
+
|
|
447
|
+
|
|
448
|
+
def get_branch_divergence(base_branch):
|
|
449
|
+
"""Return list of (branch, ahead, behind) for each branch."""
|
|
450
|
+
branches = check_output(
|
|
451
|
+
["git", "for-each-ref", "--format=%(refname:short)", "refs/heads/"],
|
|
452
|
+
universal_newlines=True, shell=False
|
|
453
|
+
).strip().split("\n")
|
|
454
|
+
branches = [b.strip() for b in branches if b.strip() and b.strip() != base_branch]
|
|
455
|
+
|
|
456
|
+
result = []
|
|
457
|
+
for branch in branches:
|
|
458
|
+
try:
|
|
459
|
+
out = check_output(
|
|
460
|
+
["git", "rev-list", "--left-right", "--count",
|
|
461
|
+
"%s...%s" % (base_branch, branch)],
|
|
462
|
+
universal_newlines=True, shell=False
|
|
463
|
+
).strip()
|
|
464
|
+
ahead, behind = map(int, out.split())
|
|
465
|
+
result.append((branch, ahead, behind))
|
|
466
|
+
except Exception:
|
|
467
|
+
result.append((branch, None, None))
|
|
468
|
+
return result
|
|
469
|
+
|
|
470
|
+
|
|
471
|
+
def print_branch_divergence(divergence, base_branch):
|
|
472
|
+
"""Print ahead/behind summary."""
|
|
473
|
+
print("Branch divergence vs '%s' (ahead / behind):\n" % base_branch)
|
|
474
|
+
for branch, ahead, behind in sorted(divergence, key=lambda x: (-(x[1] or 0), x[0])):
|
|
475
|
+
if ahead is not None and behind is not None:
|
|
476
|
+
sys.stdout.write("%-40s +%d / -%d\n" % (branch, ahead, behind))
|
|
477
|
+
else:
|
|
478
|
+
sys.stdout.write("%-40s (unable to compare)\n" % branch)
|
|
479
|
+
|
|
480
|
+
|
|
481
|
+
def get_branch_lifetimes(base_branch):
|
|
482
|
+
"""Return (branch, created_at, merged_at, days) for merged branches."""
|
|
483
|
+
try:
|
|
484
|
+
out = check_output(
|
|
485
|
+
["git", "log", "--merges", "--first-parent", base_branch,
|
|
486
|
+
"--format=%H %ai %P"],
|
|
487
|
+
universal_newlines=True, shell=False
|
|
488
|
+
)
|
|
489
|
+
except Exception:
|
|
490
|
+
return []
|
|
491
|
+
|
|
492
|
+
result = []
|
|
493
|
+
for line in out.strip().split("\n"):
|
|
494
|
+
if not line.strip():
|
|
495
|
+
continue
|
|
496
|
+
parts = line.split(None, 3)
|
|
497
|
+
if len(parts) < 4:
|
|
498
|
+
continue
|
|
499
|
+
merge_hash, merge_date_str, p1, p2 = parts[0], parts[1], parts[2], parts[3]
|
|
500
|
+
try:
|
|
501
|
+
merged_at = datetime.datetime.strptime(
|
|
502
|
+
merge_date_str[:19], "%Y-%m-%d %H:%M:%S")
|
|
503
|
+
except ValueError:
|
|
504
|
+
continue
|
|
505
|
+
merge_base = check_output(
|
|
506
|
+
["git", "merge-base", p1, p2],
|
|
507
|
+
universal_newlines=True, shell=False
|
|
508
|
+
).strip()
|
|
509
|
+
branch_name = check_output(
|
|
510
|
+
["git", "name-rev", "--name-only", p2],
|
|
511
|
+
universal_newlines=True, shell=False
|
|
512
|
+
).strip().split("~")[0].split("^")[0].strip()
|
|
513
|
+
if "/" in branch_name:
|
|
514
|
+
branch_name = branch_name.split("/")[-1]
|
|
515
|
+
try:
|
|
516
|
+
first_commit = check_output(
|
|
517
|
+
["git", "log", "--reverse", "--format=%ai", "%s..%s" % (merge_base, p2)],
|
|
518
|
+
universal_newlines=True, shell=False
|
|
519
|
+
).strip().split("\n")[0]
|
|
520
|
+
if not first_commit:
|
|
521
|
+
continue
|
|
522
|
+
created_at = datetime.datetime.strptime(
|
|
523
|
+
first_commit[:19], "%Y-%m-%d %H:%M:%S")
|
|
524
|
+
days = (merged_at - created_at).days
|
|
525
|
+
result.append((branch_name, created_at, merged_at, days))
|
|
526
|
+
except Exception:
|
|
527
|
+
continue
|
|
528
|
+
return result
|
|
529
|
+
|
|
530
|
+
|
|
531
|
+
def print_branch_lifetimes(lifetimes):
|
|
532
|
+
"""Print branch creation to merge duration."""
|
|
533
|
+
print("Branch lifetime (creation → merge):\n")
|
|
534
|
+
if not lifetimes:
|
|
535
|
+
print("No merged branches found")
|
|
536
|
+
return
|
|
537
|
+
for name, created, merged, days in sorted(lifetimes, key=lambda x: -x[3]):
|
|
538
|
+
sys.stdout.write("%-35s %3d days (%s → %s)\n" % (
|
|
539
|
+
name, days,
|
|
540
|
+
created.strftime("%Y-%m-%d"),
|
|
541
|
+
merged.strftime("%Y-%m-%d")))
|
|
542
|
+
|
|
543
|
+
|
|
544
|
+
def get_merge_timeline(after="", before="", periodicity="month"):
|
|
545
|
+
"""Return merge counts per period."""
|
|
546
|
+
args = ["git", "log", "--merges", "--pretty=format:%ai"]
|
|
547
|
+
if after:
|
|
548
|
+
args.append("--after=%s" % after)
|
|
549
|
+
if before:
|
|
550
|
+
args.append("--before=%s" % before)
|
|
551
|
+
|
|
552
|
+
out = check_output(args, universal_newlines=True, shell=False)
|
|
553
|
+
counts = OrderedDict()
|
|
554
|
+
for line in out.strip().split("\n"):
|
|
555
|
+
if not line:
|
|
556
|
+
continue
|
|
557
|
+
try:
|
|
558
|
+
t = datetime.datetime.strptime(line[:19], "%Y-%m-%d %H:%M:%S")
|
|
559
|
+
if periodicity == "week":
|
|
560
|
+
label = t.strftime("%Y/%V")
|
|
561
|
+
elif periodicity == "day":
|
|
562
|
+
label = t.strftime("%Y-%m-%d %a")
|
|
563
|
+
elif periodicity == "year":
|
|
564
|
+
label = t.strftime("%Y")
|
|
565
|
+
else:
|
|
566
|
+
label = t.strftime("%Y-%m")
|
|
567
|
+
counts[label] = counts.get(label, 0) + 1
|
|
568
|
+
except ValueError:
|
|
569
|
+
continue
|
|
570
|
+
return counts
|
|
571
|
+
|
|
572
|
+
|
|
573
|
+
def print_merge_timeline(counts, block=u"\u2580", width=40):
|
|
574
|
+
"""Print merge frequency timeline."""
|
|
575
|
+
total = sum(counts.values())
|
|
576
|
+
if total == 0:
|
|
577
|
+
print("No merges to plot")
|
|
578
|
+
return
|
|
579
|
+
print("%d merges by period:\n" % total)
|
|
580
|
+
vals = list(counts.values()) + [0]
|
|
581
|
+
xmin, xmax = min(vals), max(vals)
|
|
582
|
+
for label in sorted(counts.keys()):
|
|
583
|
+
n = counts[label]
|
|
584
|
+
score = normalize(n, xmin, xmax) if xmax > xmin else 1.0
|
|
585
|
+
bar_len = int(score * width)
|
|
586
|
+
sys.stdout.write("%-20s %3d " % (label, n))
|
|
587
|
+
sys.stdout.write(block * bar_len)
|
|
588
|
+
sys.stdout.write("\n")
|
|
589
|
+
|
|
590
|
+
|
|
591
|
+
def get_merge_timeline_events(base_branch=None, after="", before=""):
|
|
592
|
+
"""Return list of (date, branch_name, merge_hash) for each merge event."""
|
|
593
|
+
args = ["git", "log", "--merges", "--format=%H %ai %P"]
|
|
594
|
+
if base_branch:
|
|
595
|
+
args.extend(["--first-parent", base_branch]) # merges into base_branch
|
|
596
|
+
if after:
|
|
597
|
+
args.append("--after=%s" % after)
|
|
598
|
+
if before:
|
|
599
|
+
args.append("--before=%s" % before)
|
|
600
|
+
|
|
601
|
+
try:
|
|
602
|
+
out = check_output(args, universal_newlines=True, shell=False)
|
|
603
|
+
except Exception:
|
|
604
|
+
return []
|
|
605
|
+
|
|
606
|
+
result = []
|
|
607
|
+
for line in out.strip().split("\n"):
|
|
608
|
+
if not line.strip():
|
|
609
|
+
continue
|
|
610
|
+
parts = line.split(None, 3)
|
|
611
|
+
if len(parts) < 4:
|
|
612
|
+
continue
|
|
613
|
+
merge_hash, date_str, p1, p2 = parts[0], parts[1], parts[2], parts[3]
|
|
614
|
+
try:
|
|
615
|
+
merged_at = datetime.datetime.strptime(date_str[:19], "%Y-%m-%d %H:%M:%S")
|
|
616
|
+
except ValueError:
|
|
617
|
+
continue
|
|
618
|
+
branch_name = check_output(
|
|
619
|
+
["git", "name-rev", "--name-only", p2],
|
|
620
|
+
universal_newlines=True, shell=False
|
|
621
|
+
).strip().split("~")[0].split("^")[0].strip()
|
|
622
|
+
if "/" in branch_name:
|
|
623
|
+
branch_name = branch_name.split("/")[-1]
|
|
624
|
+
result.append((merged_at, branch_name, merge_hash))
|
|
625
|
+
return result
|
|
626
|
+
|
|
627
|
+
|
|
628
|
+
def print_merge_timeline_view(events, base_branch, limit=50, block=u"\u2580", width=30):
|
|
629
|
+
"""Print timeline view of branch merges (date + branch name)."""
|
|
630
|
+
base = base_branch or "HEAD"
|
|
631
|
+
into = "into %s" % base if base else "all merges"
|
|
632
|
+
print("Merge timeline (%s):\n" % into)
|
|
633
|
+
if not events:
|
|
634
|
+
print("No merges found")
|
|
635
|
+
return
|
|
636
|
+
total = len(events)
|
|
637
|
+
max_w = max(len(e[1]) for e in events) if events else 20
|
|
638
|
+
for merged_at, branch_name, _ in events[:limit]:
|
|
639
|
+
date_str = merged_at.strftime("%Y-%m-%d %H:%M")
|
|
640
|
+
sys.stdout.write("%s %-*s " % (date_str, max(min(max_w, 35), 15), branch_name))
|
|
641
|
+
sys.stdout.write(block)
|
|
642
|
+
sys.stdout.write("\n")
|
|
643
|
+
if total > limit:
|
|
644
|
+
print("\n... and %d more merges" % (total - limit))
|
|
645
|
+
|
|
646
|
+
|
|
647
|
+
def compare_branches(branch_a, branch_b):
|
|
648
|
+
"""
|
|
649
|
+
Compare two branches statistically.
|
|
650
|
+
Returns: dict with commits_a, commits_b, ahead, behind, files_changed, adds, dels, authors
|
|
651
|
+
"""
|
|
652
|
+
result = {"branch_a": branch_a, "branch_b": branch_b}
|
|
653
|
+
try:
|
|
654
|
+
out = check_output(
|
|
655
|
+
["git", "rev-list", "--left-right", "--count", "%s...%s" % (branch_a, branch_b)],
|
|
656
|
+
universal_newlines=True, shell=False
|
|
657
|
+
).strip().split()
|
|
658
|
+
result["commits_only_a"] = int(out[0])
|
|
659
|
+
result["commits_only_b"] = int(out[1])
|
|
660
|
+
except Exception:
|
|
661
|
+
result["commits_only_a"] = result["commits_only_b"] = None
|
|
662
|
+
|
|
663
|
+
merge_base = ""
|
|
664
|
+
try:
|
|
665
|
+
merge_base = check_output(
|
|
666
|
+
["git", "merge-base", branch_a, branch_b],
|
|
667
|
+
universal_newlines=True, shell=False
|
|
668
|
+
).strip()
|
|
669
|
+
except Exception:
|
|
670
|
+
pass
|
|
671
|
+
|
|
672
|
+
if merge_base:
|
|
673
|
+
try:
|
|
674
|
+
diff_out = check_output(
|
|
675
|
+
["git", "diff", "--numstat", "%s..%s" % (merge_base, branch_b)],
|
|
676
|
+
universal_newlines=True, shell=False
|
|
677
|
+
)
|
|
678
|
+
adds = dels = 0
|
|
679
|
+
files = 0
|
|
680
|
+
for line in diff_out.strip().split("\n"):
|
|
681
|
+
if not line:
|
|
682
|
+
continue
|
|
683
|
+
parts = line.split("\t", 2)
|
|
684
|
+
if len(parts) >= 2:
|
|
685
|
+
try:
|
|
686
|
+
a = int(parts[0]) if parts[0] != "-" else 0
|
|
687
|
+
d = int(parts[1]) if parts[1] != "-" else 0
|
|
688
|
+
adds += a
|
|
689
|
+
dels += d
|
|
690
|
+
files += 1
|
|
691
|
+
except ValueError:
|
|
692
|
+
pass
|
|
693
|
+
result["files_changed"] = files
|
|
694
|
+
result["additions"] = adds
|
|
695
|
+
result["deletions"] = dels
|
|
696
|
+
except Exception:
|
|
697
|
+
result["files_changed"] = result["additions"] = result["deletions"] = None
|
|
698
|
+
|
|
699
|
+
try:
|
|
700
|
+
auth_out = check_output(
|
|
701
|
+
["git", "shortlog", "-sne", "%s..%s" % (merge_base, branch_b)],
|
|
702
|
+
universal_newlines=True, shell=False
|
|
703
|
+
)
|
|
704
|
+
authors = set()
|
|
705
|
+
for line in auth_out.strip().split("\n"):
|
|
706
|
+
if "\t" in line:
|
|
707
|
+
authors.add(line.split("\t", 1)[1].strip())
|
|
708
|
+
result["authors"] = len(authors)
|
|
709
|
+
except Exception:
|
|
710
|
+
result["authors"] = None
|
|
711
|
+
else:
|
|
712
|
+
result["files_changed"] = result["additions"] = result["deletions"] = result["authors"] = None
|
|
713
|
+
|
|
714
|
+
return result
|
|
715
|
+
|
|
716
|
+
|
|
717
|
+
def print_branch_compare(stats):
|
|
718
|
+
"""Print two-branch comparison."""
|
|
719
|
+
a, b = stats["branch_a"], stats["branch_b"]
|
|
720
|
+
print("Branch comparison: %s vs %s\n" % (a, b))
|
|
721
|
+
print("%-25s %10s %10s" % ("", a, b))
|
|
722
|
+
print("-" * 50)
|
|
723
|
+
ca, cb = stats["commits_only_a"], stats["commits_only_b"]
|
|
724
|
+
if ca is not None and cb is not None:
|
|
725
|
+
print("%-25s %10d %10d" % ("Commits (unique)", ca, cb))
|
|
726
|
+
if stats.get("files_changed") is not None:
|
|
727
|
+
print("%-25s %10s %10d" % ("Files changed (in %s)" % b, "", stats["files_changed"]))
|
|
728
|
+
if stats.get("additions") is not None:
|
|
729
|
+
print("%-25s %10s %10d" % ("Lines added (in %s)" % b, "", stats["additions"]))
|
|
730
|
+
if stats.get("deletions") is not None:
|
|
731
|
+
print("%-25s %10s %10d" % ("Lines deleted (in %s)" % b, "", stats["deletions"]))
|
|
732
|
+
if stats.get("authors") is not None:
|
|
733
|
+
print("%-25s %10s %10d" % ("Authors (in %s)" % b, "", stats["authors"]))
|
|
734
|
+
if ca is not None and cb is not None:
|
|
735
|
+
print("\nSummary: %s has %d commits not in %s; %s has %d commits not in %s" %
|
|
736
|
+
(a, ca, b, b, cb, a))
|
|
737
|
+
|
|
738
|
+
|
|
739
|
+
def get_work_map(after="", before="", author=""):
|
|
740
|
+
"""
|
|
741
|
+
'Where do I work most?' - directory/path breakdown by commit activity.
|
|
742
|
+
Returns: dict of dir_path -> (commits, churn) for top-level dirs and nested.
|
|
743
|
+
"""
|
|
744
|
+
args = ["git", "log", "--numstat", "--format=%H|%ae"]
|
|
745
|
+
if after:
|
|
746
|
+
args.append("--after=%s" % after)
|
|
747
|
+
if before:
|
|
748
|
+
args.append("--before=%s" % before)
|
|
749
|
+
if author:
|
|
750
|
+
args.append("--author=%s" % author)
|
|
751
|
+
|
|
752
|
+
out = check_output(args, universal_newlines=True, shell=False)
|
|
753
|
+
dir_edits = defaultdict(int)
|
|
754
|
+
dir_churn = defaultdict(lambda: [0, 0])
|
|
755
|
+
|
|
756
|
+
current_author = ""
|
|
757
|
+
for line in out.split("\n"):
|
|
758
|
+
line = line.rstrip()
|
|
759
|
+
if "|" in line and not line[0].isdigit() and "-" not in line[:3]:
|
|
760
|
+
parts = line.split("|", 1)
|
|
761
|
+
if len(parts) == 2:
|
|
762
|
+
current_author = parts[1].strip()
|
|
763
|
+
continue
|
|
764
|
+
if not line.strip():
|
|
765
|
+
continue
|
|
766
|
+
parts = line.split("\t", 2)
|
|
767
|
+
if len(parts) < 3:
|
|
768
|
+
continue
|
|
769
|
+
add_s, del_s, path = parts[0], parts[1], parts[2].strip()
|
|
770
|
+
if not path:
|
|
771
|
+
continue
|
|
772
|
+
try:
|
|
773
|
+
adds = int(add_s) if add_s != "-" else 0
|
|
774
|
+
dels = int(del_s) if del_s != "-" else 0
|
|
775
|
+
except ValueError:
|
|
776
|
+
continue
|
|
777
|
+
|
|
778
|
+
dirs = []
|
|
779
|
+
head = ""
|
|
780
|
+
for part in path.split("/"):
|
|
781
|
+
head = head + part + "/" if head else part + "/"
|
|
782
|
+
dirs.append(head.rstrip("/"))
|
|
783
|
+
for d in dirs:
|
|
784
|
+
dir_edits[d] += 1
|
|
785
|
+
for d in dirs:
|
|
786
|
+
dir_churn[d][0] += adds
|
|
787
|
+
dir_churn[d][1] += dels
|
|
788
|
+
|
|
789
|
+
combined = []
|
|
790
|
+
for d in set(dir_edits.keys()) | set(dir_churn.keys()):
|
|
791
|
+
edits = dir_edits.get(d, 0)
|
|
792
|
+
churn = dir_churn.get(d, [0, 0])
|
|
793
|
+
total_churn = churn[0] + churn[1]
|
|
794
|
+
combined.append((d, edits, total_churn))
|
|
795
|
+
|
|
796
|
+
combined.sort(key=lambda x: -(x[1] + x[2]))
|
|
797
|
+
return combined
|
|
798
|
+
|
|
799
|
+
|
|
800
|
+
def print_work_map(work_data, limit=20, block=u"\u2580", width=40):
|
|
801
|
+
"""Print 'Where do I work most?' directory map."""
|
|
802
|
+
print("Where do I work most? (by directory)\n")
|
|
803
|
+
if not work_data:
|
|
804
|
+
print("No activity")
|
|
805
|
+
return
|
|
806
|
+
total_commits = sum(x[1] for x in work_data)
|
|
807
|
+
total_churn = sum(x[2] for x in work_data)
|
|
808
|
+
max_val = max(x[1] + x[2] for x in work_data) if work_data else 1
|
|
809
|
+
for path, commits, churn in work_data[:limit]:
|
|
810
|
+
score = (commits + churn) / max_val if max_val else 0
|
|
811
|
+
bar_len = int(score * width)
|
|
812
|
+
depth = path.count("/") + 1
|
|
813
|
+
indent = " " * (depth - 1)
|
|
814
|
+
disp = path if len(path) <= 45 else "..." + path[-42:]
|
|
815
|
+
sys.stdout.write("%s%-45s %4d edits %5d lines " % (indent, disp[:45], commits, churn))
|
|
816
|
+
sys.stdout.write(block * bar_len)
|
|
817
|
+
sys.stdout.write("\n")
|
|
818
|
+
print("\n(edits = file touches, lines = adds+dels)")
|
|
819
|
+
|
|
820
|
+
|
|
821
|
+
def main():
|
|
822
|
+
"""Commandline entry point."""
|
|
823
|
+
p = argparse.ArgumentParser(description="Shows git commit count bars. "
|
|
824
|
+
"Weekends are coloured. (version " + __version__ + ")")
|
|
825
|
+
p.add_argument("-p", "--periodicity", action="store", dest="periodicity",
|
|
826
|
+
type=str, required=False, default="month",
|
|
827
|
+
choices=["day", "week", "month", "year"])
|
|
828
|
+
|
|
829
|
+
p.add_argument("-u", "--author", action="store", dest="author",
|
|
830
|
+
type=str, required=False, default="",
|
|
831
|
+
help="filter by author's e-mail (substring)")
|
|
832
|
+
|
|
833
|
+
p.add_argument("-a", "--after", action="store", dest="after",
|
|
834
|
+
type=str, required=False, default="",
|
|
835
|
+
help="after date (yyyy-mm-dd hh:mm)")
|
|
836
|
+
|
|
837
|
+
p.add_argument("-b", "--before", action="store", dest="before",
|
|
838
|
+
type=str, required=False, default="",
|
|
839
|
+
help="before date (yyyy-mm-dd hh:mm)")
|
|
840
|
+
|
|
841
|
+
p.add_argument("-r", "--reverse", action="store", dest="reverse",
|
|
842
|
+
type=bool, required=False, default=False,
|
|
843
|
+
help="reverse date order")
|
|
844
|
+
|
|
845
|
+
p.add_argument("-f", "--fill", action="store", dest="fill",
|
|
846
|
+
type=bool, required=False, default=False,
|
|
847
|
+
help="fill dates (with no commits) on the graph")
|
|
848
|
+
|
|
849
|
+
p.add_argument("--files", action="store_true", dest="files",
|
|
850
|
+
help="show file type breakdown (%% JS, TS, MD, etc.)")
|
|
851
|
+
p.add_argument("--churn", action="store_true", dest="churn",
|
|
852
|
+
help="file churn ranking (most modified files)")
|
|
853
|
+
p.add_argument("--hotspots", action="store_true", dest="hotspots",
|
|
854
|
+
help="hotspot detection (high churn + high contributor count)")
|
|
855
|
+
p.add_argument("--refactor", action="store_true", dest="refactor",
|
|
856
|
+
help="refactor density estimation")
|
|
857
|
+
p.add_argument("--min-churn", action="store", type=int, default=50,
|
|
858
|
+
dest="min_churn", help="minimum churn for hotspots (default: 50)")
|
|
859
|
+
|
|
860
|
+
p.add_argument("--branches", action="store_true", dest="branches",
|
|
861
|
+
help="active branches ranked by recency")
|
|
862
|
+
p.add_argument("--stale", action="store_true", dest="stale",
|
|
863
|
+
help="detect stale branches (no commits in N days)")
|
|
864
|
+
p.add_argument("--stale-days", action="store", type=int, default=30,
|
|
865
|
+
dest="stale_days", help="days threshold for stale (default: 30)")
|
|
866
|
+
p.add_argument("--divergence", action="store_true", dest="divergence",
|
|
867
|
+
help="branch divergence summary (ahead/behind vs main)")
|
|
868
|
+
p.add_argument("--branch-lifetime", action="store_true", dest="branch_lifetime",
|
|
869
|
+
help="branch lifetime (creation → merge duration)")
|
|
870
|
+
p.add_argument("--merges", action="store_true", dest="merges",
|
|
871
|
+
help="merge frequency timeline")
|
|
872
|
+
p.add_argument("--merge-timeline", action="store_true", dest="merge_timeline",
|
|
873
|
+
help="timeline view of branch merges (date + branch name)")
|
|
874
|
+
p.add_argument("--compare", action="store", dest="compare", nargs=2, metavar=("A", "B"),
|
|
875
|
+
help="compare two branches statistically")
|
|
876
|
+
p.add_argument("--work-map", action="store_true", dest="work_map",
|
|
877
|
+
help="'where do I work most?' directory map")
|
|
878
|
+
p.add_argument("--main", action="store", dest="main_branch",
|
|
879
|
+
type=str, default="", help="main branch for divergence/lifetime/merge-timeline (default: main or master)")
|
|
880
|
+
|
|
881
|
+
args = p.parse_args()
|
|
882
|
+
|
|
883
|
+
"""Invoke the utility."""
|
|
884
|
+
if args.files:
|
|
885
|
+
try:
|
|
886
|
+
counts = get_file_breakdown(args.after, args.before, args.author)
|
|
887
|
+
print_file_breakdown(counts)
|
|
888
|
+
except Exception as e:
|
|
889
|
+
print("error running 'git log': %s" % (e,))
|
|
890
|
+
return
|
|
891
|
+
|
|
892
|
+
if args.churn:
|
|
893
|
+
try:
|
|
894
|
+
churn_list = get_file_churn_ranking(args.after, args.before, args.author)
|
|
895
|
+
print_file_churn(churn_list)
|
|
896
|
+
except Exception as e:
|
|
897
|
+
print("error: %s" % (e,))
|
|
898
|
+
return
|
|
899
|
+
|
|
900
|
+
if args.hotspots:
|
|
901
|
+
try:
|
|
902
|
+
hotspots = get_hotspots(args.after, args.before, args.author, args.min_churn)
|
|
903
|
+
print_hotspots(hotspots)
|
|
904
|
+
except Exception as e:
|
|
905
|
+
print("error: %s" % (e,))
|
|
906
|
+
return
|
|
907
|
+
|
|
908
|
+
if args.refactor:
|
|
909
|
+
try:
|
|
910
|
+
overall, per_file = get_refactor_density(args.after, args.before, args.author)
|
|
911
|
+
print_refactor_density(overall, per_file)
|
|
912
|
+
except Exception as e:
|
|
913
|
+
print("error: %s" % (e,))
|
|
914
|
+
return
|
|
915
|
+
|
|
916
|
+
if args.branches:
|
|
917
|
+
try:
|
|
918
|
+
branches = get_active_branches()
|
|
919
|
+
print_active_branches(branches)
|
|
920
|
+
except Exception as e:
|
|
921
|
+
print("error: %s" % (e,))
|
|
922
|
+
return
|
|
923
|
+
|
|
924
|
+
if args.stale:
|
|
925
|
+
try:
|
|
926
|
+
branches = get_stale_branches(args.stale_days)
|
|
927
|
+
print_stale_branches(branches, args.stale_days)
|
|
928
|
+
except Exception as e:
|
|
929
|
+
print("error: %s" % (e,))
|
|
930
|
+
return
|
|
931
|
+
|
|
932
|
+
if args.divergence:
|
|
933
|
+
try:
|
|
934
|
+
base = args.main_branch or get_main_branch()
|
|
935
|
+
if not base:
|
|
936
|
+
print("error: no main branch (use --main to specify)")
|
|
937
|
+
return
|
|
938
|
+
divergence = get_branch_divergence(base)
|
|
939
|
+
print_branch_divergence(divergence, base)
|
|
940
|
+
except Exception as e:
|
|
941
|
+
print("error: %s" % (e,))
|
|
942
|
+
return
|
|
943
|
+
|
|
944
|
+
if args.branch_lifetime:
|
|
945
|
+
try:
|
|
946
|
+
base = args.main_branch or get_main_branch()
|
|
947
|
+
if not base:
|
|
948
|
+
print("error: no main branch (use --main to specify)")
|
|
949
|
+
return
|
|
950
|
+
lifetimes = get_branch_lifetimes(base)
|
|
951
|
+
print_branch_lifetimes(lifetimes)
|
|
952
|
+
except Exception as e:
|
|
953
|
+
print("error: %s" % (e,))
|
|
954
|
+
return
|
|
955
|
+
|
|
956
|
+
if args.merges:
|
|
957
|
+
try:
|
|
958
|
+
counts = get_merge_timeline(args.after, args.before, args.periodicity)
|
|
959
|
+
print_merge_timeline(counts)
|
|
960
|
+
except Exception as e:
|
|
961
|
+
print("error: %s" % (e,))
|
|
962
|
+
return
|
|
963
|
+
|
|
964
|
+
if args.merge_timeline:
|
|
965
|
+
try:
|
|
966
|
+
base = args.main_branch or get_main_branch() or ""
|
|
967
|
+
events = get_merge_timeline_events(base, args.after, args.before)
|
|
968
|
+
print_merge_timeline_view(events, base or "HEAD")
|
|
969
|
+
except Exception as e:
|
|
970
|
+
print("error: %s" % (e,))
|
|
971
|
+
return
|
|
972
|
+
|
|
973
|
+
if args.compare:
|
|
974
|
+
try:
|
|
975
|
+
a, b = args.compare
|
|
976
|
+
stats = compare_branches(a, b)
|
|
977
|
+
print_branch_compare(stats)
|
|
978
|
+
except Exception as e:
|
|
979
|
+
print("error: %s" % (e,))
|
|
980
|
+
return
|
|
981
|
+
|
|
982
|
+
if args.work_map:
|
|
983
|
+
try:
|
|
984
|
+
work_data = get_work_map(args.after, args.before, args.author)
|
|
985
|
+
print_work_map(work_data)
|
|
986
|
+
except Exception as e:
|
|
987
|
+
print("error: %s" % (e,))
|
|
988
|
+
return
|
|
989
|
+
|
|
990
|
+
items = []
|
|
991
|
+
try:
|
|
992
|
+
items = get_log(args.after, args.before, args.reverse, args.fill)
|
|
993
|
+
except Exception as e:
|
|
994
|
+
print("error running 'git log': %s" % (e,))
|
|
995
|
+
return
|
|
996
|
+
|
|
997
|
+
filtered = filter(items, args.periodicity, args.author)
|
|
998
|
+
|
|
999
|
+
scores = get_scores(filtered)
|
|
1000
|
+
if scores:
|
|
1001
|
+
print("%d commits over %d %s(s)" %
|
|
1002
|
+
(sum([filtered[f].commits for f in filtered]),
|
|
1003
|
+
len(scores),
|
|
1004
|
+
args.periodicity))
|
|
1005
|
+
print_bars(scores)
|
|
1006
|
+
else:
|
|
1007
|
+
print("No commits to plot")
|
|
1008
|
+
|
|
1009
|
+
|
|
1010
|
+
if __name__ == "__main__":
|
|
1011
|
+
main()
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: gitaura
|
|
3
|
+
Version: 0.1.1
|
|
4
|
+
Summary: A utility for getting information about a git repository
|
|
5
|
+
Home-page: https://github.com/HarshNagarkar17/gitaura
|
|
6
|
+
Download-URL: https://github.com/HarshNagarkar17/gitaura
|
|
7
|
+
Author: Harsh Nagarkar
|
|
8
|
+
Author-email: harshvn871@gmail.com
|
|
9
|
+
License: MIT License
|
|
10
|
+
Classifier: Topic :: Utilities
|
|
11
|
+
Classifier: Intended Audience :: Developers
|
|
12
|
+
Classifier: Programming Language :: Python
|
|
13
|
+
Classifier: Programming Language :: Python :: 3
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.6
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.7
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.8
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.9
|
|
18
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
19
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
20
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
21
|
+
Classifier: Natural Language :: English
|
|
22
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
23
|
+
Classifier: Topic :: Software Development :: Version Control :: Git
|
|
24
|
+
Requires-Python: >=3.6
|
|
25
|
+
License-File: LICENSE
|
|
26
|
+
Dynamic: author
|
|
27
|
+
Dynamic: author-email
|
|
28
|
+
Dynamic: classifier
|
|
29
|
+
Dynamic: download-url
|
|
30
|
+
Dynamic: home-page
|
|
31
|
+
Dynamic: license
|
|
32
|
+
Dynamic: license-file
|
|
33
|
+
Dynamic: requires-python
|
|
34
|
+
Dynamic: summary
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
gitaura
|
gitaura-0.1.1/setup.cfg
ADDED
gitaura-0.1.1/setup.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
|
|
3
|
+
from setuptools import setup
|
|
4
|
+
|
|
5
|
+
setup(
|
|
6
|
+
name="gitaura",
|
|
7
|
+
version="0.1.1",
|
|
8
|
+
description="A utility for getting information about a git repository",
|
|
9
|
+
author="Harsh Nagarkar",
|
|
10
|
+
author_email="harshvn871@gmail.com",
|
|
11
|
+
url="https://github.com/HarshNagarkar17/gitaura",
|
|
12
|
+
packages=["gitaura"],
|
|
13
|
+
download_url="https://github.com/HarshNagarkar17/gitaura",
|
|
14
|
+
license="MIT License",
|
|
15
|
+
python_requires=">=3.6",
|
|
16
|
+
entry_points={
|
|
17
|
+
"console_scripts": [
|
|
18
|
+
"gitaura = gitaura.gitaura:main",
|
|
19
|
+
],
|
|
20
|
+
},
|
|
21
|
+
classifiers=[
|
|
22
|
+
"Topic :: Utilities",
|
|
23
|
+
"Intended Audience :: Developers",
|
|
24
|
+
"Programming Language :: Python",
|
|
25
|
+
"Programming Language :: Python :: 3",
|
|
26
|
+
"Programming Language :: Python :: 3.6",
|
|
27
|
+
"Programming Language :: Python :: 3.7",
|
|
28
|
+
"Programming Language :: Python :: 3.8",
|
|
29
|
+
"Programming Language :: Python :: 3.9",
|
|
30
|
+
"Programming Language :: Python :: 3.10",
|
|
31
|
+
"Programming Language :: Python :: 3.11",
|
|
32
|
+
"Programming Language :: Python :: 3.12",
|
|
33
|
+
"Natural Language :: English",
|
|
34
|
+
"License :: OSI Approved :: MIT License",
|
|
35
|
+
"Topic :: Software Development :: Version Control :: Git",
|
|
36
|
+
],
|
|
37
|
+
)
|