github_rest_api 0.26.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- github_rest_api-0.26.0/.gitignore +21 -0
- github_rest_api-0.26.0/PKG-INFO +13 -0
- github_rest_api-0.26.0/README.md +3 -0
- github_rest_api-0.26.0/github_rest_api/__init__.py +5 -0
- github_rest_api-0.26.0/github_rest_api/actions/__init__.py +1 -0
- github_rest_api-0.26.0/github_rest_api/actions/cargo/__init__.py +0 -0
- github_rest_api-0.26.0/github_rest_api/actions/cargo/benchmark.py +253 -0
- github_rest_api-0.26.0/github_rest_api/actions/cargo/profiling.py +126 -0
- github_rest_api-0.26.0/github_rest_api/actions/cargo/utils.py +10 -0
- github_rest_api-0.26.0/github_rest_api/actions/utils.py +92 -0
- github_rest_api-0.26.0/github_rest_api/github.py +318 -0
- github_rest_api-0.26.0/github_rest_api/utils.py +26 -0
- github_rest_api-0.26.0/pyproject.toml +28 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
.DS_Store
|
|
2
|
+
.idea/
|
|
3
|
+
.theia/
|
|
4
|
+
.vscode/
|
|
5
|
+
*.ipr
|
|
6
|
+
*.iws
|
|
7
|
+
.ipynb_checkpoints/
|
|
8
|
+
.coverage
|
|
9
|
+
.mypy/
|
|
10
|
+
.mypy_cache/
|
|
11
|
+
.pytype/
|
|
12
|
+
*.crc
|
|
13
|
+
__pycache__/
|
|
14
|
+
venv/
|
|
15
|
+
.venv/
|
|
16
|
+
target/
|
|
17
|
+
dist/
|
|
18
|
+
*.egg-info/
|
|
19
|
+
doc*/_build/
|
|
20
|
+
*.prof
|
|
21
|
+
core
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: github_rest_api
|
|
3
|
+
Version: 0.26.0
|
|
4
|
+
Summary: Simple wrapper of GitHub REST APIs.
|
|
5
|
+
Author-email: Ben Du <longendu@yahoo.com>
|
|
6
|
+
Requires-Python: <4,>=3.11
|
|
7
|
+
Requires-Dist: psutil>=5.9.4
|
|
8
|
+
Requires-Dist: requests>=2.28.2
|
|
9
|
+
Description-Content-Type: text/markdown
|
|
10
|
+
|
|
11
|
+
# GitHub REST APIs | [@GitHub](https://github.com/legendu-net/github_rest_api) | [@PyPI](https://pypi.org/project/github-rest-api/)
|
|
12
|
+
|
|
13
|
+
Simple wrapper of GitHub REST APIs with a focus on making GitHub Actions automation easy.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""GitHub Actions related utils."""
|
|
File without changes
|
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
"""Benchmark action using cargo criterion."""
|
|
2
|
+
|
|
3
|
+
from typing import Callable
|
|
4
|
+
import tempfile
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
import datetime
|
|
7
|
+
import shutil
|
|
8
|
+
from ..utils import (
|
|
9
|
+
config_git,
|
|
10
|
+
create_branch,
|
|
11
|
+
switch_branch,
|
|
12
|
+
push_branch,
|
|
13
|
+
gen_temp_branch,
|
|
14
|
+
commit_benchmarks,
|
|
15
|
+
)
|
|
16
|
+
from ...utils import run_cmd
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _copy_last_dev_bench(bench_dir: Path) -> None:
|
|
20
|
+
branch = gen_temp_branch()
|
|
21
|
+
create_branch(branch)
|
|
22
|
+
switch_branch(branch="gh-pages", fetch=True)
|
|
23
|
+
src = bench_dir / "dev/criterion"
|
|
24
|
+
tmpdir = tempfile.mkdtemp()
|
|
25
|
+
if src.is_dir():
|
|
26
|
+
shutil.copytree(src, tmpdir, dirs_exist_ok=True)
|
|
27
|
+
switch_branch(branch=branch, fetch=False)
|
|
28
|
+
target = Path("target/criterion")
|
|
29
|
+
target.mkdir(parents=True, exist_ok=True)
|
|
30
|
+
shutil.copytree(tmpdir, target, dirs_exist_ok=True)
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _cargo_criterion(bench_dir: Path, env: str = "") -> None:
|
|
34
|
+
"""Run `cargo criterion` to benchmark the specified branch.
|
|
35
|
+
Notice that a temp branch is created for benchmarking.
|
|
36
|
+
|
|
37
|
+
:param branch: The branch to benchmark.
|
|
38
|
+
"""
|
|
39
|
+
_copy_last_dev_bench(bench_dir=bench_dir)
|
|
40
|
+
run_cmd(f"{env} cargo criterion --all-features --message-format=json")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _copy_bench_results(bench_dir: Path, storage: str) -> None:
|
|
44
|
+
"""Copy benchmark results into the right directory of the gh-pages branch.
|
|
45
|
+
:param bench_dir: The root benchmark directory
|
|
46
|
+
(under the gh-pages branch).
|
|
47
|
+
:param storage: The directory relative to bench_dir for storing this benchmark results.
|
|
48
|
+
"""
|
|
49
|
+
switch_branch("gh-pages", fetch=True)
|
|
50
|
+
src = Path("target/criterion")
|
|
51
|
+
dst = bench_dir / storage / "criterion"
|
|
52
|
+
dst.mkdir(parents=True, exist_ok=True)
|
|
53
|
+
shutil.copytree(src, dst, dirs_exist_ok=True)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def _git_push_gh_pages(bench_dir: Path, pr_number: str) -> str:
|
|
57
|
+
"""Push benchmark results to a branch
|
|
58
|
+
with the pattern gh-pages_{pr_number}_yyyymmdd_HHMMSS.
|
|
59
|
+
:param bench_dir: The root benchmark directory
|
|
60
|
+
(under the gh-pages branch).
|
|
61
|
+
:return: The name of the pushed branch.
|
|
62
|
+
"""
|
|
63
|
+
commit_benchmarks(bench_dir=bench_dir)
|
|
64
|
+
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
65
|
+
branch = f"gh-pages_{pr_number}_{timestamp}"
|
|
66
|
+
create_branch(branch=branch)
|
|
67
|
+
push_branch(branch=branch)
|
|
68
|
+
return branch
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _rename_bench_reports(dirs: list[Path]):
|
|
72
|
+
for dir_ in dirs:
|
|
73
|
+
for path in dir_.glob("**/history.html"):
|
|
74
|
+
path.rename(path.with_name("index.html"))
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _parse_metric_ci(path: str | Path) -> tuple[str, str, str]:
|
|
78
|
+
if isinstance(path, str):
|
|
79
|
+
path = Path(path)
|
|
80
|
+
if path.is_dir():
|
|
81
|
+
path = path / "index.html"
|
|
82
|
+
with path.open(encoding="utf-8") as fin:
|
|
83
|
+
lines = fin.readlines()
|
|
84
|
+
|
|
85
|
+
def _find_start_line(lines: list[str]) -> int | None:
|
|
86
|
+
for i, line in enumerate(lines):
|
|
87
|
+
if "Change in Value:" in line:
|
|
88
|
+
return i
|
|
89
|
+
return None
|
|
90
|
+
|
|
91
|
+
def _extract_value_from_line(line: str):
|
|
92
|
+
start = line.find(">")
|
|
93
|
+
if start == -1:
|
|
94
|
+
return "0"
|
|
95
|
+
end = line.find("<", start + 1)
|
|
96
|
+
if end == -1:
|
|
97
|
+
return "0"
|
|
98
|
+
return line[start + 1 : end].strip()
|
|
99
|
+
|
|
100
|
+
start = _find_start_line(lines)
|
|
101
|
+
if start is None:
|
|
102
|
+
return ("0", "0", "0")
|
|
103
|
+
lower = _extract_value_from_line(lines[start + 1])
|
|
104
|
+
middle = _extract_value_from_line(lines[start + 2])
|
|
105
|
+
upper = _extract_value_from_line(lines[start + 3])
|
|
106
|
+
return (lower, middle, upper)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _gen_report_links_markdown(
|
|
110
|
+
dir_: str | Path, extract_benchmark_name: Callable
|
|
111
|
+
) -> str:
|
|
112
|
+
if isinstance(dir_, str):
|
|
113
|
+
dir_ = Path(dir_)
|
|
114
|
+
paths = (dir_ / "criterion/reports").iterdir()
|
|
115
|
+
paths = list(path for path in paths if path.is_dir())
|
|
116
|
+
paths.sort()
|
|
117
|
+
cips = [(_parse_metric_ci(p), p) for p in paths]
|
|
118
|
+
links = "\n".join(
|
|
119
|
+
_gen_report_link_markdown(cip, extract_benchmark_name) for cip in cips
|
|
120
|
+
)
|
|
121
|
+
links_sorted = "\n".join(
|
|
122
|
+
_gen_report_link_markdown(cip, extract_benchmark_name)
|
|
123
|
+
for cip in _sort_cips(cips)
|
|
124
|
+
)
|
|
125
|
+
return f"""## {dir_} - Sorted By Performance Change
|
|
126
|
+
{links_sorted}
|
|
127
|
+
## {dir_} - Sorted By Name
|
|
128
|
+
{links}
|
|
129
|
+
"""
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def _gen_report_link_markdown(
|
|
133
|
+
cip: tuple[tuple[str, str, str], Path], extract_benchmark_name: Callable
|
|
134
|
+
) -> str:
|
|
135
|
+
ci, p = cip
|
|
136
|
+
|
|
137
|
+
def _gen_link_md(path: Path):
|
|
138
|
+
text = extract_benchmark_name(path)
|
|
139
|
+
path = "/".join(path.parts[1:])
|
|
140
|
+
link = f"{path}/index.html"
|
|
141
|
+
return f"[{text}]({link})"
|
|
142
|
+
|
|
143
|
+
return f"- {_gen_report_ci_markdown(ci)} {_gen_link_md(p)}"
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def _gen_report_ci_markdown(ci: tuple[str, str, str]) -> str:
|
|
147
|
+
lower, middle, upper = ci
|
|
148
|
+
|
|
149
|
+
def _significance(lower: str, middle: str, upper: str) -> int:
|
|
150
|
+
def _reg_value(value: str):
|
|
151
|
+
return float(value.strip("%").strip())
|
|
152
|
+
|
|
153
|
+
lower = _reg_value(lower)
|
|
154
|
+
middle = _reg_value(middle)
|
|
155
|
+
upper = _reg_value(upper)
|
|
156
|
+
if abs(middle) < 1:
|
|
157
|
+
return 0
|
|
158
|
+
if lower < 0 and upper < 0:
|
|
159
|
+
return -1
|
|
160
|
+
if lower > 0 and upper > 0:
|
|
161
|
+
return 1
|
|
162
|
+
return 0
|
|
163
|
+
|
|
164
|
+
color = {0: "black", -1: "green", 1: "red"}[_significance(lower, middle, upper)]
|
|
165
|
+
return f'<span style="color:{color}"> [{lower}, <b>{middle}</b>, {upper}] </span>'
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
def _sort_cips(
|
|
169
|
+
cips: list[tuple[tuple[str, str, str], Path]],
|
|
170
|
+
) -> list[tuple[tuple[str, str, str], Path]]:
|
|
171
|
+
"""Sort confidence intervals according to the average performance changes."""
|
|
172
|
+
|
|
173
|
+
def _avg_perf_change(elem: tuple[tuple[str, str, str], Path]) -> float:
|
|
174
|
+
perf_change = elem[0][1]
|
|
175
|
+
return -float(perf_change.replace("%", "").strip())
|
|
176
|
+
|
|
177
|
+
return sorted(cips, key=_avg_perf_change)
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def _clean_bench_dirs(bench_dir: Path, history: int) -> list[Path]:
|
|
181
|
+
dirs = sorted(bench_dir.glob("[1-9]*/"))
|
|
182
|
+
for path in dirs[:-history]:
|
|
183
|
+
shutil.rmtree(path)
|
|
184
|
+
dirs = dirs[-history:]
|
|
185
|
+
dev = bench_dir / "dev"
|
|
186
|
+
if dev.is_dir():
|
|
187
|
+
dirs.append(dev)
|
|
188
|
+
return dirs
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def _gen_markdown(dirs: list[Path], extract_benchmark_name) -> str:
|
|
192
|
+
sections = "\n".join(
|
|
193
|
+
_gen_report_links_markdown(dir_, extract_benchmark_name)
|
|
194
|
+
for dir_ in reversed(dirs)
|
|
195
|
+
)
|
|
196
|
+
return f"# Benchmarks\n{sections}\n"
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def gen_index_markdown(
|
|
200
|
+
bench_dir: str | Path = "bench",
|
|
201
|
+
history: int = 1,
|
|
202
|
+
extract_benchmark_name: Callable[[Path], str] = lambda path: path.stem,
|
|
203
|
+
) -> None:
|
|
204
|
+
"""Generate index.md under the benchmark directory.
|
|
205
|
+
:param bench_dir: The root benchmark directory (under the gh-pages branch).
|
|
206
|
+
:param history: The number of historical benchmark results to keep.
|
|
207
|
+
:param extract_benchmark_name: A function to extract a benchmark name from a path.
|
|
208
|
+
"""
|
|
209
|
+
if isinstance(bench_dir, str):
|
|
210
|
+
bench_dir = Path(bench_dir)
|
|
211
|
+
dirs = _clean_bench_dirs(bench_dir=bench_dir, history=history)
|
|
212
|
+
_rename_bench_reports(dirs)
|
|
213
|
+
(bench_dir / "index.md").write_text(
|
|
214
|
+
_gen_markdown(dirs, extract_benchmark_name), encoding="utf-8"
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def benchmark(
|
|
219
|
+
local_repo_dir: str | Path,
|
|
220
|
+
pr_number: str,
|
|
221
|
+
bench_dir: str | Path = "bench",
|
|
222
|
+
storage: str = "",
|
|
223
|
+
extract_benchmark_name: Callable[[Path], str] = lambda path: path.stem,
|
|
224
|
+
env: str = "",
|
|
225
|
+
) -> str:
|
|
226
|
+
"""Benchmark using `cargo criterion` and push benchmark results to gh-pages.
|
|
227
|
+
|
|
228
|
+
:param local_repo_dir: Root directory of the local repository.
|
|
229
|
+
:param pr_number: The number of the corresponding PR.
|
|
230
|
+
:param bench_dir: The root benchmark directory (under the gh-pages branch).
|
|
231
|
+
:param storage: The directory relative to bench_dir for storing this benchmark results.
|
|
232
|
+
If not specified (empty or None), pr_number is used.
|
|
233
|
+
:param extract_benchmark_name: A function to extract a benchmark name from a path.
|
|
234
|
+
:param env: Environment variables configuration with the format `var1=val1 var2=val2`
|
|
235
|
+
for the command `cargo-criterion`).
|
|
236
|
+
"""
|
|
237
|
+
if isinstance(bench_dir, str):
|
|
238
|
+
bench_dir = Path(bench_dir)
|
|
239
|
+
if not storage:
|
|
240
|
+
storage = pr_number
|
|
241
|
+
config_git(
|
|
242
|
+
local_repo_dir=local_repo_dir,
|
|
243
|
+
user_email="bench-bot@github.com",
|
|
244
|
+
user_name="bench-bot",
|
|
245
|
+
)
|
|
246
|
+
_cargo_criterion(bench_dir=bench_dir, env=env) # _branch_*
|
|
247
|
+
_copy_bench_results(bench_dir=bench_dir, storage=storage) # gh-pages
|
|
248
|
+
gen_index_markdown(
|
|
249
|
+
bench_dir=bench_dir, history=1, extract_benchmark_name=extract_benchmark_name
|
|
250
|
+
)
|
|
251
|
+
return _git_push_gh_pages(
|
|
252
|
+
bench_dir=bench_dir, pr_number=pr_number
|
|
253
|
+
) # gh-pages_pr_yyyymmdd_hhmmss
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
"""Utils for profiling Rust applications."""
|
|
2
|
+
|
|
3
|
+
from typing import Iterable
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
import time
|
|
6
|
+
import datetime
|
|
7
|
+
import subprocess as sp
|
|
8
|
+
import psutil
|
|
9
|
+
from .utils import build_project
|
|
10
|
+
from ..utils import config_git, switch_branch, push_branch, commit_profiling
|
|
11
|
+
from ...utils import partition, run_cmd
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def launch_application(cmd: list[str]) -> int:
|
|
15
|
+
"""Launch the application to be profiled and return the pid of the process.
|
|
16
|
+
Notice that this works inside Docker containers too.
|
|
17
|
+
"""
|
|
18
|
+
print(
|
|
19
|
+
"Launched application for profiling at ",
|
|
20
|
+
datetime.datetime.now(),
|
|
21
|
+
" using the following command:\n",
|
|
22
|
+
" sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)\n",
|
|
23
|
+
"where cmd is as below:\n ",
|
|
24
|
+
cmd,
|
|
25
|
+
"\n",
|
|
26
|
+
sep="",
|
|
27
|
+
)
|
|
28
|
+
proc = sp.Popen(cmd, stdout=sp.DEVNULL, stderr=sp.PIPE) # pylint: disable=R1732
|
|
29
|
+
time.sleep(3)
|
|
30
|
+
status = proc.poll()
|
|
31
|
+
if status:
|
|
32
|
+
_, stderr = proc.communicate()
|
|
33
|
+
stderr = b"" if stderr is None else stderr
|
|
34
|
+
raise RuntimeError(
|
|
35
|
+
f"The launched application failed with the error code {status}!\n"
|
|
36
|
+
f"Stderr:\n{stderr.decode()}\n"
|
|
37
|
+
)
|
|
38
|
+
if status == 0:
|
|
39
|
+
raise ValueError(
|
|
40
|
+
"The launched application has already finished! "
|
|
41
|
+
"Please use a long running command instead."
|
|
42
|
+
)
|
|
43
|
+
return _find_process_id(proc)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def _find_process_id(proc: sp.Popen) -> int:
|
|
47
|
+
pids = []
|
|
48
|
+
for process in psutil.process_iter():
|
|
49
|
+
if process.cmdline() == proc.args:
|
|
50
|
+
pids.append(process.pid)
|
|
51
|
+
return min(pids, key=lambda pid: abs(pid - proc.pid))
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def nperf(pid: int, prof_name: str, prof_dir: str | Path = ".") -> Path:
|
|
55
|
+
"""Profile the given process using nperf (not-perf).
|
|
56
|
+
:param pid: The id of the process to be profiled.
|
|
57
|
+
:param prof_name: The name of the profiling.
|
|
58
|
+
:param prof_dir: The directory (the current working directory by default)
|
|
59
|
+
for saving profiling data.
|
|
60
|
+
"""
|
|
61
|
+
if isinstance(prof_dir, str):
|
|
62
|
+
prof_dir = Path(prof_dir)
|
|
63
|
+
time = datetime.datetime.now()
|
|
64
|
+
yymmdd = time.strftime("%Y%m%d")
|
|
65
|
+
prof_dir.mkdir(exist_ok=True, parents=True)
|
|
66
|
+
data_file = prof_dir / f"{yymmdd}_{prof_name}"
|
|
67
|
+
run_cmd(f"nperf record -p {pid} -o '{data_file}'")
|
|
68
|
+
return _gen_flamegraph(data_file)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _gen_flamegraph(data_file: Path) -> Path:
|
|
72
|
+
flamegraph = data_file.with_name(data_file.name + ".svg")
|
|
73
|
+
run_cmd(f"nperf flamegraph '{data_file}' > '{flamegraph}'")
|
|
74
|
+
return flamegraph
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _save_flamegraph(prof_dir: Path, history: int = 5):
|
|
78
|
+
yyyymmdd = (datetime.datetime.today() - datetime.timedelta(days=history)).strftime(
|
|
79
|
+
"%Y%m%d"
|
|
80
|
+
)
|
|
81
|
+
switch_branch("gh-pages", fetch=True)
|
|
82
|
+
for path in prof_dir.iterdir():
|
|
83
|
+
if path.suffix == "":
|
|
84
|
+
path.unlink()
|
|
85
|
+
svgs_keep, svgs_drop = partition(
|
|
86
|
+
lambda p: p.name > yyyymmdd, prof_dir.glob("*.svg")
|
|
87
|
+
)
|
|
88
|
+
for svg in svgs_drop:
|
|
89
|
+
svg.unlink()
|
|
90
|
+
_gen_markdown(svgs=sorted(svgs_keep, reverse=True), prof_dir=prof_dir)
|
|
91
|
+
commit_profiling(prof_dir=prof_dir)
|
|
92
|
+
yyyymmdd = datetime.datetime.now().strftime("%Y%m%d")
|
|
93
|
+
push_branch(branch="gh-pages", branch_alt="gh-pages_prof_" + yyyymmdd)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def _gen_markdown(svgs: Iterable[Path], prof_dir: Path) -> None:
|
|
97
|
+
def _gen_link(svg: Path):
|
|
98
|
+
svg: str = svg.name
|
|
99
|
+
yyyymmdd = svg[:8]
|
|
100
|
+
prof_name = svg[9:-4]
|
|
101
|
+
return f"- [{prof_name} - {yyyymmdd}]({svg})"
|
|
102
|
+
|
|
103
|
+
links = "\n".join(_gen_link(svg) for svg in svgs)
|
|
104
|
+
markdown = f"# Profiling\n{links}"
|
|
105
|
+
(prof_dir / "index.md").write_text(markdown, encoding="utf-8")
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def profiling(
|
|
109
|
+
local_repo_dir: str | Path,
|
|
110
|
+
apps: dict[str, list[str]],
|
|
111
|
+
profile: str = "release",
|
|
112
|
+
prof_dir: str | Path = "profiling",
|
|
113
|
+
):
|
|
114
|
+
"""Profiling specified applications."""
|
|
115
|
+
if isinstance(prof_dir, str):
|
|
116
|
+
prof_dir = Path(prof_dir)
|
|
117
|
+
config_git(
|
|
118
|
+
local_repo_dir=local_repo_dir,
|
|
119
|
+
user_email="profiling-bot@github.com",
|
|
120
|
+
user_name="profiling-bot",
|
|
121
|
+
)
|
|
122
|
+
build_project(profile=profile)
|
|
123
|
+
for name, cmd in apps.items():
|
|
124
|
+
pid = launch_application(cmd=cmd)
|
|
125
|
+
nperf(pid=pid, prof_name=name, prof_dir=prof_dir)
|
|
126
|
+
_save_flamegraph(prof_dir=prof_dir)
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
"""Util functions for building GitHub Actions for Rust projects."""
|
|
2
|
+
|
|
3
|
+
from ...utils import run_cmd
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def build_project(profile: str = "release") -> None:
|
|
7
|
+
"""Build the Rust project.
|
|
8
|
+
:param profile: The profile for building.
|
|
9
|
+
"""
|
|
10
|
+
run_cmd(f"RUSTFLAGS=-Awarnings cargo build --profile {profile}")
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
"""Util functions for GitHub actions."""
|
|
2
|
+
|
|
3
|
+
from typing import Iterable
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
import random
|
|
6
|
+
from ..utils import run_cmd
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class FailToPushToGitHubException(Exception):
|
|
10
|
+
"""Exception for failure to push a branch to GitHub."""
|
|
11
|
+
|
|
12
|
+
def __init__(self, branch: str, branch_alt: str):
|
|
13
|
+
msg = f"Failed to push the branch {branch} to GitHub!"
|
|
14
|
+
if branch_alt:
|
|
15
|
+
msg += f" Pushed to {branch_alt} instead."
|
|
16
|
+
super().__init__(msg)
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def config_git(local_repo_dir: str | Path, user_email: str, user_name: str):
|
|
20
|
+
"""Config Git.
|
|
21
|
+
:param local_repo_dir: The root directory of the project.
|
|
22
|
+
:param user_email: The email of the user (no need to be a valid one).
|
|
23
|
+
:param user_name: The name of the user.
|
|
24
|
+
"""
|
|
25
|
+
cmd = f"""git config --global --add safe.directory {local_repo_dir} \
|
|
26
|
+
&& git config --global user.email "{user_email}" \
|
|
27
|
+
&& git config --global user.name "{user_name}"
|
|
28
|
+
"""
|
|
29
|
+
run_cmd(cmd)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def create_branch(branch: str) -> None:
|
|
33
|
+
"""Create a new local branch.
|
|
34
|
+
:param branch: The new local branch to create.
|
|
35
|
+
"""
|
|
36
|
+
run_cmd(f"git checkout -b {branch}")
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def switch_branch(branch: str, fetch: bool) -> None:
|
|
40
|
+
"""Switch to another branch.
|
|
41
|
+
:param branch: The branch to checkout.
|
|
42
|
+
:param fetch: If true, fetch the branch from remote first.
|
|
43
|
+
"""
|
|
44
|
+
if fetch:
|
|
45
|
+
run_cmd(f"git fetch origin {branch}")
|
|
46
|
+
run_cmd(f"git checkout {branch}")
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def gen_temp_branch(
|
|
50
|
+
prefix: str = "_branch_", chars: Iterable[str | int] = range(10), nrand: int = 10
|
|
51
|
+
) -> str:
|
|
52
|
+
"""Generate a name for a (temp) branch.
|
|
53
|
+
:param prefix: The prefix of the name.
|
|
54
|
+
:param chars: An iterable of characters to sample from to form the suffix of the name.
|
|
55
|
+
:param nrand: The number of characters for the suffix of the name.
|
|
56
|
+
"""
|
|
57
|
+
if not isinstance(chars, (list, tuple)):
|
|
58
|
+
chars = list(chars)
|
|
59
|
+
chars = random.sample(chars, nrand)
|
|
60
|
+
return prefix + "".join(str(char) for char in chars)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def push_branch(branch: str, branch_alt: str = ""):
|
|
64
|
+
"""Try pushing a local Git branch to remote.
|
|
65
|
+
On failure, fork an alternative branch (if specified) and push it to GitHub.
|
|
66
|
+
:param branch: The local branch to push to GitHub.
|
|
67
|
+
:param branch_alt: An alternative branch name to push to GitHub.
|
|
68
|
+
"""
|
|
69
|
+
try:
|
|
70
|
+
run_cmd(f"git push origin {branch}")
|
|
71
|
+
except Exception as err:
|
|
72
|
+
if branch_alt:
|
|
73
|
+
cmd = f"""git checkout {branch} \
|
|
74
|
+
&& git checkout -b {branch_alt} \
|
|
75
|
+
&& git push origin {branch_alt}
|
|
76
|
+
"""
|
|
77
|
+
run_cmd(cmd)
|
|
78
|
+
raise FailToPushToGitHubException(branch, branch_alt) from err
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def commit_benchmarks(bench_dir: str | Path):
|
|
82
|
+
"""Commit changes in the benchmark directory.
|
|
83
|
+
:param bench_dir: The benchmark directory.
|
|
84
|
+
"""
|
|
85
|
+
run_cmd(f"git add {bench_dir} && git commit -m 'add benchmarks'")
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def commit_profiling(prof_dir: str | Path):
|
|
89
|
+
"""Commit changes in the profiling directory.
|
|
90
|
+
:param prof_dir: The profiling directory.
|
|
91
|
+
"""
|
|
92
|
+
run_cmd(f"git add {prof_dir} && git commit -m 'update profiling results'")
|
|
@@ -0,0 +1,318 @@
|
|
|
1
|
+
"""Simple wrapper of GitHub REST APIs."""
|
|
2
|
+
|
|
3
|
+
from enum import StrEnum
|
|
4
|
+
from typing import Any, Callable
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
import requests
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def build_http_headers(token: str) -> dict[str, str]:
|
|
10
|
+
"""Build headers for GitHub REST APIs.
|
|
11
|
+
:param token: The authentication token for GitHub REST APIs.
|
|
12
|
+
"""
|
|
13
|
+
headers = {
|
|
14
|
+
"Accept": "application/vnd.github+json",
|
|
15
|
+
}
|
|
16
|
+
if token:
|
|
17
|
+
headers["Authorization"] = f"Bearer {token}"
|
|
18
|
+
return headers
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _is_rust(file: str) -> bool:
|
|
22
|
+
path = Path(file)
|
|
23
|
+
if path.name in ("Cargo.toml", "Cargo.lock"):
|
|
24
|
+
return True
|
|
25
|
+
if path.suffix == ".rs":
|
|
26
|
+
return True
|
|
27
|
+
return False
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class GitHub:
|
|
31
|
+
def __init__(self, token: str):
|
|
32
|
+
self._token = token
|
|
33
|
+
self._headers = build_http_headers(token)
|
|
34
|
+
|
|
35
|
+
def get(
|
|
36
|
+
self, url: str, raise_for_status: bool = True, **kwargs
|
|
37
|
+
) -> requests.Response:
|
|
38
|
+
resp = requests.get(
|
|
39
|
+
url=url,
|
|
40
|
+
headers=self._headers,
|
|
41
|
+
timeout=10,
|
|
42
|
+
**kwargs,
|
|
43
|
+
)
|
|
44
|
+
if raise_for_status:
|
|
45
|
+
resp.raise_for_status()
|
|
46
|
+
return resp
|
|
47
|
+
|
|
48
|
+
def post(
|
|
49
|
+
self, url: str, headers=None, raise_for_status: bool = True, **kwargs
|
|
50
|
+
) -> requests.Response:
|
|
51
|
+
if headers is None:
|
|
52
|
+
headers = self._headers
|
|
53
|
+
resp = requests.post(
|
|
54
|
+
url=url,
|
|
55
|
+
headers=headers,
|
|
56
|
+
timeout=10,
|
|
57
|
+
**kwargs,
|
|
58
|
+
)
|
|
59
|
+
if raise_for_status:
|
|
60
|
+
resp.raise_for_status()
|
|
61
|
+
return resp
|
|
62
|
+
|
|
63
|
+
def delete(self, url, raise_for_status: bool = True) -> requests.Response:
|
|
64
|
+
resp = requests.delete(url=url, headers=self._headers, timeout=10)
|
|
65
|
+
if raise_for_status:
|
|
66
|
+
resp.raise_for_status()
|
|
67
|
+
return resp
|
|
68
|
+
|
|
69
|
+
def put(self, url, raise_for_status: bool = True) -> requests.Response:
|
|
70
|
+
resp = requests.put(
|
|
71
|
+
url=url,
|
|
72
|
+
headers=self._headers,
|
|
73
|
+
timeout=10,
|
|
74
|
+
)
|
|
75
|
+
if raise_for_status:
|
|
76
|
+
resp.raise_for_status()
|
|
77
|
+
return resp
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class Repository(GitHub):
|
|
81
|
+
"""Abstraction of a GitHub repository."""
|
|
82
|
+
|
|
83
|
+
def __init__(self, token: str, owner: str, repo: str):
|
|
84
|
+
"""Initialize Repository.
|
|
85
|
+
:param token: An authorization token for GitHub REST APIs.
|
|
86
|
+
:param owner: The owner of the repository.
|
|
87
|
+
:param repo: The name of the repository.
|
|
88
|
+
"""
|
|
89
|
+
super().__init__(token)
|
|
90
|
+
self._owner = owner
|
|
91
|
+
self._repo = repo
|
|
92
|
+
self._url_pull = f"https://api.github.com/repos/{owner}/{repo}/pulls"
|
|
93
|
+
self._url_branches = f"https://api.github.com/repos/{owner}/{repo}/branches"
|
|
94
|
+
self._url_refs = f"https://api.github.com/repos/{owner}/{repo}/git/refs"
|
|
95
|
+
self._url_issues = f"https://api.github.com/repos/{owner}/{repo}/issues"
|
|
96
|
+
self._url_releases = f"https://api.github.com/repos/{owner}/{repo}/releases"
|
|
97
|
+
|
|
98
|
+
def get_releases(self) -> list[dict[str, Any]]:
|
|
99
|
+
"""List releases in this repository."""
|
|
100
|
+
return self.get(url=self._url_releases).json()
|
|
101
|
+
|
|
102
|
+
def get_release_latest(self) -> dict[str, Any]:
|
|
103
|
+
return self.get(url=f"{self._url_releases}/latest").json()
|
|
104
|
+
|
|
105
|
+
def get_release_assets(self, release: int) -> list[dict[str, Any]]:
|
|
106
|
+
return requests.get(url=f"{self._url_releases}/{release}/assets").json()
|
|
107
|
+
|
|
108
|
+
def create_release(self, json: dict[str, Any]):
|
|
109
|
+
"""Create a release.
|
|
110
|
+
|
|
111
|
+
:param json: A dict containing the following info for the release.
|
|
112
|
+
- tag_name
|
|
113
|
+
- target_commitish
|
|
114
|
+
- name
|
|
115
|
+
- body
|
|
116
|
+
- draft
|
|
117
|
+
- prerelease
|
|
118
|
+
- generate_release_notes
|
|
119
|
+
It's passed to the json parameter of requests.post.
|
|
120
|
+
For more details, please refer to
|
|
121
|
+
https://docs.github.com/en/rest/releases/releases#create-a-release.
|
|
122
|
+
"""
|
|
123
|
+
if not isinstance(json, dict):
|
|
124
|
+
raise ValueError("A dict value is required for `json`.")
|
|
125
|
+
return self.post(
|
|
126
|
+
url=self._url_releases,
|
|
127
|
+
json=json,
|
|
128
|
+
).json()
|
|
129
|
+
|
|
130
|
+
def upload_release_asset(
|
|
131
|
+
self, release: int, name: str, path: str | Path
|
|
132
|
+
) -> dict[str, Any]:
|
|
133
|
+
if isinstance(path, str):
|
|
134
|
+
path = Path(path)
|
|
135
|
+
with path.open(mode="rb") as fin:
|
|
136
|
+
return self.post(
|
|
137
|
+
url=f"{self._url_releases.replace('api', 'uploads', 1)}/{release}/assets",
|
|
138
|
+
params={
|
|
139
|
+
"name": name,
|
|
140
|
+
},
|
|
141
|
+
headers=self._headers
|
|
142
|
+
| {
|
|
143
|
+
"Content-Type": "application/octet-stream",
|
|
144
|
+
},
|
|
145
|
+
data=fin,
|
|
146
|
+
).json()
|
|
147
|
+
|
|
148
|
+
def get_pull_requests(self) -> list[dict[str, Any]]:
|
|
149
|
+
"""List pull requests in this repository."""
|
|
150
|
+
return self.get(url=self._url_pull).json()
|
|
151
|
+
|
|
152
|
+
def create_pull_request(self, json: dict[str, str]) -> dict[str, Any] | None:
|
|
153
|
+
"""Create a pull request.
|
|
154
|
+
|
|
155
|
+
:param json: A dict containing info (e.g., base, head, title, body, etc.)
|
|
156
|
+
about the pull request to be created.
|
|
157
|
+
It's passed to the json parameter of requests.post.
|
|
158
|
+
"""
|
|
159
|
+
if not isinstance(json, dict):
|
|
160
|
+
raise ValueError("A dict value is required for `json`.")
|
|
161
|
+
if not ("head" in json and "base" in json):
|
|
162
|
+
raise ValueError("The data dict must contains keys head and base!")
|
|
163
|
+
# return an existing PR
|
|
164
|
+
prs = self.get_pull_requests()
|
|
165
|
+
for pr in prs:
|
|
166
|
+
if pr["head"]["ref"] == json["head"] and pr["base"]["ref"] == json["base"]:
|
|
167
|
+
return pr
|
|
168
|
+
# creat a new PR
|
|
169
|
+
resp = self.post(
|
|
170
|
+
url=self._url_pull,
|
|
171
|
+
json=json,
|
|
172
|
+
raise_for_status=False,
|
|
173
|
+
)
|
|
174
|
+
if resp.status_code == 422:
|
|
175
|
+
return None
|
|
176
|
+
resp.raise_for_status()
|
|
177
|
+
return resp.json()
|
|
178
|
+
|
|
179
|
+
def merge_pull_request(self, pr_number: int) -> dict[str, Any]:
|
|
180
|
+
"""Merge a pull request in this repository.
|
|
181
|
+
:param pr_number: The number of the pull quest to be merged.
|
|
182
|
+
"""
|
|
183
|
+
if not isinstance(pr_number, int):
|
|
184
|
+
raise ValueError("An integer value is required for `pr_number`.")
|
|
185
|
+
return self.put(
|
|
186
|
+
url=f"{self._url_pull}/{pr_number}/merge",
|
|
187
|
+
).json()
|
|
188
|
+
|
|
189
|
+
def update_branch(self, update: str, upstream: str) -> dict[str, Any] | None:
|
|
190
|
+
"""Update a branch by creating a PR from upstream and then merge it.
|
|
191
|
+
:param update: The branch to update.
|
|
192
|
+
:param upstream: The upstream branch.
|
|
193
|
+
"""
|
|
194
|
+
if not isinstance(update, str):
|
|
195
|
+
raise ValueError("A string value is required for `update`.")
|
|
196
|
+
if not isinstance(upstream, str):
|
|
197
|
+
raise ValueError("A string value is required for `upstream`.")
|
|
198
|
+
pr = self.create_pull_request(
|
|
199
|
+
{
|
|
200
|
+
"base": update,
|
|
201
|
+
"head": upstream,
|
|
202
|
+
"title": f"Merge {upstream} into {update}",
|
|
203
|
+
},
|
|
204
|
+
)
|
|
205
|
+
if pr is None:
|
|
206
|
+
return
|
|
207
|
+
return self.merge_pull_request(pr["number"])
|
|
208
|
+
|
|
209
|
+
def get_pull_request_files(self, pr_number: int) -> list[dict[str, Any]]:
|
|
210
|
+
"""List changed files in the specified GitHub pull request.
|
|
211
|
+
|
|
212
|
+
:param pr_number: The number of the pull request.
|
|
213
|
+
"""
|
|
214
|
+
if not isinstance(pr_number, int):
|
|
215
|
+
raise ValueError("An integer value is required for `pr_number`.")
|
|
216
|
+
return self.get(url=f"{self._url_pull}/{pr_number}/files").json()
|
|
217
|
+
|
|
218
|
+
def get_branches(self) -> list[dict[str, Any]]:
|
|
219
|
+
"""List branches in this repository."""
|
|
220
|
+
return self.get(url=self._url_branches).json()
|
|
221
|
+
|
|
222
|
+
def delete_ref(self, ref: str) -> dict[str, Any]:
|
|
223
|
+
"""Delete a reference from this repository.
|
|
224
|
+
:param ref: The reference to delete from this repository.
|
|
225
|
+
"""
|
|
226
|
+
if not isinstance(ref, str):
|
|
227
|
+
raise ValueError("A string value is required for `ref`.")
|
|
228
|
+
return self.delete(
|
|
229
|
+
url=f"{self._url_refs}/{ref}",
|
|
230
|
+
).json()
|
|
231
|
+
|
|
232
|
+
def delete_branch(self, branch: str) -> dict[str, Any]:
|
|
233
|
+
"""Delete a branch from this repository.
|
|
234
|
+
:param branch: The branch to delete from this repository.
|
|
235
|
+
"""
|
|
236
|
+
return self.delete_ref(ref=f"heads/{branch}")
|
|
237
|
+
|
|
238
|
+
def pr_has_change(
|
|
239
|
+
self, pr_number: int, pred: Callable[[str], bool] = lambda _: True
|
|
240
|
+
) -> bool:
|
|
241
|
+
"""Check whether a PR has any change satisfying pred.
|
|
242
|
+
|
|
243
|
+
:param pr_number: The number of the corresponding pull request.
|
|
244
|
+
:param pred: A boolean predictor (always true by default)
|
|
245
|
+
checking whether a single file has specific changes.
|
|
246
|
+
"""
|
|
247
|
+
files = self.get_pull_request_files(pr_number)
|
|
248
|
+
return any(pred(file["filename"]) for file in files)
|
|
249
|
+
|
|
250
|
+
def pr_has_rust_change(
|
|
251
|
+
self, pr_number: int, pred: Callable[[str], bool] = _is_rust
|
|
252
|
+
) -> bool:
|
|
253
|
+
"""Check whether a PR has any Rust-related changes.
|
|
254
|
+
|
|
255
|
+
:param token: The authorization token for GitHub REST API.
|
|
256
|
+
:param pr_number: The number of the corresponding pull request.
|
|
257
|
+
:param pred: A customized boolean predictor checking Rust-related changes.
|
|
258
|
+
"""
|
|
259
|
+
return self.pr_has_change(pr_number=pr_number, pred=pred)
|
|
260
|
+
|
|
261
|
+
def create_issue_comment(self, issue_number: int, body: str) -> dict[str, Any]:
|
|
262
|
+
"""Add a new comment to an issue.
|
|
263
|
+
|
|
264
|
+
:param issue_number: The number of the issue.
|
|
265
|
+
:param body: Body text of the new comment.
|
|
266
|
+
"""
|
|
267
|
+
if not isinstance(issue_number, int):
|
|
268
|
+
raise ValueError("An integer value is required for `issue_number`.")
|
|
269
|
+
if not isinstance(body, str):
|
|
270
|
+
raise ValueError("A string message is required for `body`.")
|
|
271
|
+
return self.post(
|
|
272
|
+
url=f"{self._url_issues}/{issue_number}/comments",
|
|
273
|
+
json={"body": body},
|
|
274
|
+
timeout=10,
|
|
275
|
+
).json()
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
class RepositoryType(StrEnum):
|
|
279
|
+
ALL = "all"
|
|
280
|
+
PUBLIC = "public"
|
|
281
|
+
PRIVATE = "private"
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
class Organization(GitHub):
|
|
285
|
+
def __init__(self, token: str, owner: str):
|
|
286
|
+
"""Initialize Repository.
|
|
287
|
+
:param token: An authorization token for GitHub REST APIs.
|
|
288
|
+
:param owner: The owner of the repository.
|
|
289
|
+
"""
|
|
290
|
+
super().__init__(token)
|
|
291
|
+
self._owner = owner
|
|
292
|
+
self._url_repos = f"https://api.github.com/orgs/{owner}/repos"
|
|
293
|
+
|
|
294
|
+
def get_repositories(
|
|
295
|
+
self, type_: RepositoryType = RepositoryType.ALL
|
|
296
|
+
) -> list[dict[str, Any]]:
|
|
297
|
+
"""Get all accessible repositories.
|
|
298
|
+
|
|
299
|
+
:param type_: Type of repositories (e.g., public).
|
|
300
|
+
"""
|
|
301
|
+
params = {
|
|
302
|
+
"type": type_,
|
|
303
|
+
"page": 1,
|
|
304
|
+
"per_page": 100,
|
|
305
|
+
}
|
|
306
|
+
repos = []
|
|
307
|
+
while True:
|
|
308
|
+
resp = self.get(url=self._url_repos, params=params)
|
|
309
|
+
resp.raise_for_status()
|
|
310
|
+
data = resp.json()
|
|
311
|
+
repos.extend(data)
|
|
312
|
+
if len(data) < params["per_page"]: # ty: ignore[unsupported-operator]
|
|
313
|
+
return repos
|
|
314
|
+
params["page"] += 1 # ty: ignore[unsupported-operator]
|
|
315
|
+
return repos
|
|
316
|
+
|
|
317
|
+
def instantiate_repository(self, repo: str) -> Repository:
|
|
318
|
+
return Repository(token=self._token, owner=self._owner, repo=repo)
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"""Some generally useful util functions."""
|
|
2
|
+
|
|
3
|
+
from itertools import tee, filterfalse
|
|
4
|
+
import logging
|
|
5
|
+
import subprocess as sp
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def partition(pred, iterable):
|
|
9
|
+
"""Use a predicate to partition entries into true entries and false entries.
|
|
10
|
+
:param pred: A function takes one parameter and return a boolean value.
|
|
11
|
+
:param iterable: An iterable of values to partition.
|
|
12
|
+
"""
|
|
13
|
+
it1, it2 = tee(iterable)
|
|
14
|
+
return filter(pred, it1), filterfalse(pred, it2)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def run_cmd(cmd: list | str, capture_output: bool = False) -> None:
|
|
18
|
+
"""Run a shell command.
|
|
19
|
+
|
|
20
|
+
:param cmd: The command to run.
|
|
21
|
+
:param capture_output: Whether to capture stdout and stderr of the command.
|
|
22
|
+
"""
|
|
23
|
+
proc = sp.run(
|
|
24
|
+
cmd, shell=isinstance(cmd, str), check=True, capture_output=capture_output
|
|
25
|
+
)
|
|
26
|
+
logging.debug(proc.args)
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "github_rest_api"
|
|
3
|
+
version = "0.26.0"
|
|
4
|
+
description = "Simple wrapper of GitHub REST APIs."
|
|
5
|
+
authors = [{ name = "Ben Du", email = "longendu@yahoo.com" }]
|
|
6
|
+
requires-python = ">=3.11,<4"
|
|
7
|
+
readme = "README.md"
|
|
8
|
+
dependencies = [
|
|
9
|
+
"requests>=2.28.2",
|
|
10
|
+
"psutil>=5.9.4",
|
|
11
|
+
]
|
|
12
|
+
|
|
13
|
+
[dependency-groups]
|
|
14
|
+
dev = [
|
|
15
|
+
"pyright>=1.1.407",
|
|
16
|
+
"ruff>=0.14.10",
|
|
17
|
+
"ty>=0.0.8",
|
|
18
|
+
]
|
|
19
|
+
|
|
20
|
+
[tool.hatch.build.targets.sdist]
|
|
21
|
+
include = ["github_rest_api"]
|
|
22
|
+
|
|
23
|
+
[tool.hatch.build.targets.wheel]
|
|
24
|
+
include = ["github_rest_api"]
|
|
25
|
+
|
|
26
|
+
[build-system]
|
|
27
|
+
requires = ["hatchling"]
|
|
28
|
+
build-backend = "hatchling.build"
|