stouputils 1.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- stouputils/__init__.py +40 -0
- stouputils/__main__.py +86 -0
- stouputils/_deprecated.py +37 -0
- stouputils/all_doctests.py +160 -0
- stouputils/applications/__init__.py +22 -0
- stouputils/applications/automatic_docs.py +634 -0
- stouputils/applications/upscaler/__init__.py +39 -0
- stouputils/applications/upscaler/config.py +128 -0
- stouputils/applications/upscaler/image.py +247 -0
- stouputils/applications/upscaler/video.py +287 -0
- stouputils/archive.py +344 -0
- stouputils/backup.py +488 -0
- stouputils/collections.py +244 -0
- stouputils/continuous_delivery/__init__.py +27 -0
- stouputils/continuous_delivery/cd_utils.py +243 -0
- stouputils/continuous_delivery/github.py +522 -0
- stouputils/continuous_delivery/pypi.py +130 -0
- stouputils/continuous_delivery/pyproject.py +147 -0
- stouputils/continuous_delivery/stubs.py +86 -0
- stouputils/ctx.py +408 -0
- stouputils/data_science/config/get.py +51 -0
- stouputils/data_science/config/set.py +125 -0
- stouputils/data_science/data_processing/image/__init__.py +66 -0
- stouputils/data_science/data_processing/image/auto_contrast.py +79 -0
- stouputils/data_science/data_processing/image/axis_flip.py +58 -0
- stouputils/data_science/data_processing/image/bias_field_correction.py +74 -0
- stouputils/data_science/data_processing/image/binary_threshold.py +73 -0
- stouputils/data_science/data_processing/image/blur.py +59 -0
- stouputils/data_science/data_processing/image/brightness.py +54 -0
- stouputils/data_science/data_processing/image/canny.py +110 -0
- stouputils/data_science/data_processing/image/clahe.py +92 -0
- stouputils/data_science/data_processing/image/common.py +30 -0
- stouputils/data_science/data_processing/image/contrast.py +53 -0
- stouputils/data_science/data_processing/image/curvature_flow_filter.py +74 -0
- stouputils/data_science/data_processing/image/denoise.py +378 -0
- stouputils/data_science/data_processing/image/histogram_equalization.py +123 -0
- stouputils/data_science/data_processing/image/invert.py +64 -0
- stouputils/data_science/data_processing/image/laplacian.py +60 -0
- stouputils/data_science/data_processing/image/median_blur.py +52 -0
- stouputils/data_science/data_processing/image/noise.py +59 -0
- stouputils/data_science/data_processing/image/normalize.py +65 -0
- stouputils/data_science/data_processing/image/random_erase.py +66 -0
- stouputils/data_science/data_processing/image/resize.py +69 -0
- stouputils/data_science/data_processing/image/rotation.py +80 -0
- stouputils/data_science/data_processing/image/salt_pepper.py +68 -0
- stouputils/data_science/data_processing/image/sharpening.py +55 -0
- stouputils/data_science/data_processing/image/shearing.py +64 -0
- stouputils/data_science/data_processing/image/threshold.py +64 -0
- stouputils/data_science/data_processing/image/translation.py +71 -0
- stouputils/data_science/data_processing/image/zoom.py +83 -0
- stouputils/data_science/data_processing/image_augmentation.py +118 -0
- stouputils/data_science/data_processing/image_preprocess.py +183 -0
- stouputils/data_science/data_processing/prosthesis_detection.py +359 -0
- stouputils/data_science/data_processing/technique.py +481 -0
- stouputils/data_science/dataset/__init__.py +45 -0
- stouputils/data_science/dataset/dataset.py +292 -0
- stouputils/data_science/dataset/dataset_loader.py +135 -0
- stouputils/data_science/dataset/grouping_strategy.py +296 -0
- stouputils/data_science/dataset/image_loader.py +100 -0
- stouputils/data_science/dataset/xy_tuple.py +696 -0
- stouputils/data_science/metric_dictionnary.py +106 -0
- stouputils/data_science/metric_utils.py +847 -0
- stouputils/data_science/mlflow_utils.py +206 -0
- stouputils/data_science/models/abstract_model.py +149 -0
- stouputils/data_science/models/all.py +85 -0
- stouputils/data_science/models/base_keras.py +765 -0
- stouputils/data_science/models/keras/all.py +38 -0
- stouputils/data_science/models/keras/convnext.py +62 -0
- stouputils/data_science/models/keras/densenet.py +50 -0
- stouputils/data_science/models/keras/efficientnet.py +60 -0
- stouputils/data_science/models/keras/mobilenet.py +56 -0
- stouputils/data_science/models/keras/resnet.py +52 -0
- stouputils/data_science/models/keras/squeezenet.py +233 -0
- stouputils/data_science/models/keras/vgg.py +42 -0
- stouputils/data_science/models/keras/xception.py +38 -0
- stouputils/data_science/models/keras_utils/callbacks/__init__.py +20 -0
- stouputils/data_science/models/keras_utils/callbacks/colored_progress_bar.py +219 -0
- stouputils/data_science/models/keras_utils/callbacks/learning_rate_finder.py +148 -0
- stouputils/data_science/models/keras_utils/callbacks/model_checkpoint_v2.py +31 -0
- stouputils/data_science/models/keras_utils/callbacks/progressive_unfreezing.py +249 -0
- stouputils/data_science/models/keras_utils/callbacks/warmup_scheduler.py +66 -0
- stouputils/data_science/models/keras_utils/losses/__init__.py +12 -0
- stouputils/data_science/models/keras_utils/losses/next_generation_loss.py +56 -0
- stouputils/data_science/models/keras_utils/visualizations.py +416 -0
- stouputils/data_science/models/model_interface.py +939 -0
- stouputils/data_science/models/sandbox.py +116 -0
- stouputils/data_science/range_tuple.py +234 -0
- stouputils/data_science/scripts/augment_dataset.py +77 -0
- stouputils/data_science/scripts/exhaustive_process.py +133 -0
- stouputils/data_science/scripts/preprocess_dataset.py +70 -0
- stouputils/data_science/scripts/routine.py +168 -0
- stouputils/data_science/utils.py +285 -0
- stouputils/decorators.py +605 -0
- stouputils/image.py +441 -0
- stouputils/installer/__init__.py +18 -0
- stouputils/installer/common.py +67 -0
- stouputils/installer/downloader.py +101 -0
- stouputils/installer/linux.py +144 -0
- stouputils/installer/main.py +223 -0
- stouputils/installer/windows.py +136 -0
- stouputils/io.py +486 -0
- stouputils/parallel.py +483 -0
- stouputils/print.py +482 -0
- stouputils/py.typed +1 -0
- stouputils/stouputils/__init__.pyi +15 -0
- stouputils/stouputils/_deprecated.pyi +12 -0
- stouputils/stouputils/all_doctests.pyi +46 -0
- stouputils/stouputils/applications/__init__.pyi +2 -0
- stouputils/stouputils/applications/automatic_docs.pyi +106 -0
- stouputils/stouputils/applications/upscaler/__init__.pyi +3 -0
- stouputils/stouputils/applications/upscaler/config.pyi +18 -0
- stouputils/stouputils/applications/upscaler/image.pyi +109 -0
- stouputils/stouputils/applications/upscaler/video.pyi +60 -0
- stouputils/stouputils/archive.pyi +67 -0
- stouputils/stouputils/backup.pyi +109 -0
- stouputils/stouputils/collections.pyi +86 -0
- stouputils/stouputils/continuous_delivery/__init__.pyi +5 -0
- stouputils/stouputils/continuous_delivery/cd_utils.pyi +129 -0
- stouputils/stouputils/continuous_delivery/github.pyi +162 -0
- stouputils/stouputils/continuous_delivery/pypi.pyi +53 -0
- stouputils/stouputils/continuous_delivery/pyproject.pyi +67 -0
- stouputils/stouputils/continuous_delivery/stubs.pyi +39 -0
- stouputils/stouputils/ctx.pyi +211 -0
- stouputils/stouputils/decorators.pyi +252 -0
- stouputils/stouputils/image.pyi +172 -0
- stouputils/stouputils/installer/__init__.pyi +5 -0
- stouputils/stouputils/installer/common.pyi +39 -0
- stouputils/stouputils/installer/downloader.pyi +24 -0
- stouputils/stouputils/installer/linux.pyi +39 -0
- stouputils/stouputils/installer/main.pyi +57 -0
- stouputils/stouputils/installer/windows.pyi +31 -0
- stouputils/stouputils/io.pyi +213 -0
- stouputils/stouputils/parallel.pyi +216 -0
- stouputils/stouputils/print.pyi +136 -0
- stouputils/stouputils/version_pkg.pyi +15 -0
- stouputils/version_pkg.py +189 -0
- stouputils-1.14.0.dist-info/METADATA +178 -0
- stouputils-1.14.0.dist-info/RECORD +140 -0
- stouputils-1.14.0.dist-info/WHEEL +4 -0
- stouputils-1.14.0.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,522 @@
|
|
|
1
|
+
""" This module contains utilities for continuous delivery on GitHub.
|
|
2
|
+
|
|
3
|
+
- upload_to_github: Upload the project to GitHub using the credentials and the configuration
|
|
4
|
+
(make a release and upload the assets, handle existing tag, generate changelog, etc.)
|
|
5
|
+
|
|
6
|
+
.. image:: https://raw.githubusercontent.com/Stoupy51/stouputils/refs/heads/main/assets/continuous_delivery/github_module.gif
|
|
7
|
+
:alt: stouputils upload_to_github examples
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
# Imports
|
|
11
|
+
import os
|
|
12
|
+
from typing import Any
|
|
13
|
+
|
|
14
|
+
from ..decorators import handle_error, measure_time
|
|
15
|
+
from ..io import clean_path
|
|
16
|
+
from ..print import info, progress, warning
|
|
17
|
+
from .cd_utils import clean_version, handle_response, version_to_float
|
|
18
|
+
|
|
19
|
+
# Constants
|
|
20
|
+
GITHUB_API_URL: str = "https://api.github.com"
|
|
21
|
+
PROJECT_ENDPOINT: str = f"{GITHUB_API_URL}/repos"
|
|
22
|
+
COMMIT_TYPES: dict[str, str] = {
|
|
23
|
+
"feat": "Features",
|
|
24
|
+
"fix": "Bug Fixes",
|
|
25
|
+
"docs": "Documentation",
|
|
26
|
+
"style": "Style",
|
|
27
|
+
"refactor": "Code Refactoring",
|
|
28
|
+
"perf": "Performance Improvements",
|
|
29
|
+
"test": "Tests",
|
|
30
|
+
"build": "Build System",
|
|
31
|
+
"ci": "CI/CD",
|
|
32
|
+
"chore": "Chores",
|
|
33
|
+
"revert": "Reverts",
|
|
34
|
+
"uwu": "UwU ༼ つ ◕_◕ ༽つ",
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
def validate_credentials(credentials: dict[str, dict[str, str]]) -> tuple[str, dict[str, str]]:
|
|
38
|
+
""" Get and validate GitHub credentials
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
credentials (dict[str, dict[str, str]]): Credentials for the GitHub API
|
|
42
|
+
Returns:
|
|
43
|
+
tuple[str, dict[str, str]]:
|
|
44
|
+
str: Owner (the username of the account to use)
|
|
45
|
+
|
|
46
|
+
dict[str, str]: Headers (for the requests to the GitHub API)
|
|
47
|
+
"""
|
|
48
|
+
if "github" not in credentials:
|
|
49
|
+
raise ValueError(
|
|
50
|
+
"The credentials file must contain a 'github' key, which is a dictionary containing a 'api_key' key"
|
|
51
|
+
"(a PAT for the GitHub API: https://github.com/settings/tokens) "
|
|
52
|
+
"and a 'username' key (the username of the account to use)"
|
|
53
|
+
)
|
|
54
|
+
if "api_key" not in credentials["github"]:
|
|
55
|
+
raise ValueError(
|
|
56
|
+
"The credentials file must contain a 'github' key, which is a dictionary containing a 'api_key' key"
|
|
57
|
+
"(a PAT for the GitHub API: https://github.com/settings/tokens) "
|
|
58
|
+
"and a 'username' key (the username of the account to use)"
|
|
59
|
+
)
|
|
60
|
+
if "username" not in credentials["github"]:
|
|
61
|
+
raise ValueError(
|
|
62
|
+
"The credentials file must contain a 'github' key, which is a dictionary containing a 'api_key' key"
|
|
63
|
+
"(a PAT for the GitHub API: https://github.com/settings/tokens) "
|
|
64
|
+
"and a 'username' key (the username of the account to use)"
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
api_key: str = credentials["github"]["api_key"]
|
|
68
|
+
owner: str = credentials["github"]["username"]
|
|
69
|
+
headers: dict[str, str] = {"Authorization": f"Bearer {api_key}"}
|
|
70
|
+
return owner, headers
|
|
71
|
+
|
|
72
|
+
def validate_config(github_config: dict[str, Any]) -> tuple[str, str, str, list[str]]:
|
|
73
|
+
""" Validate GitHub configuration
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
github_config (dict[str, str]): Configuration for the GitHub project
|
|
77
|
+
Returns:
|
|
78
|
+
tuple[str, str, str, list[str]]:
|
|
79
|
+
str: Project name on GitHub
|
|
80
|
+
|
|
81
|
+
str: Version of the project
|
|
82
|
+
|
|
83
|
+
str: Build folder path containing zip files to upload to the release
|
|
84
|
+
|
|
85
|
+
list[str]: List of zip files to upload to the release
|
|
86
|
+
"""
|
|
87
|
+
if "project_name" not in github_config:
|
|
88
|
+
raise ValueError(
|
|
89
|
+
"The github_config file must contain a 'project_name' key, "
|
|
90
|
+
"which is the name of the project on GitHub"
|
|
91
|
+
)
|
|
92
|
+
if "version" not in github_config:
|
|
93
|
+
raise ValueError(
|
|
94
|
+
"The github_config file must contain a 'version' key, "
|
|
95
|
+
"which is the version of the project"
|
|
96
|
+
)
|
|
97
|
+
if "build_folder" not in github_config:
|
|
98
|
+
raise ValueError(
|
|
99
|
+
"The github_config file must contain a 'build_folder' key, "
|
|
100
|
+
"which is the folder containing the build of the project "
|
|
101
|
+
"(datapack and resourcepack zip files)"
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
project_name: str = github_config["project_name"]
|
|
105
|
+
version: str = github_config["version"]
|
|
106
|
+
build_folder: str = github_config["build_folder"]
|
|
107
|
+
endswith: list[str] = github_config.get("endswith", [])
|
|
108
|
+
|
|
109
|
+
return project_name, version, build_folder, endswith
|
|
110
|
+
|
|
111
|
+
def handle_existing_tag(owner: str, project_name: str, version: str, headers: dict[str, str]) -> bool:
|
|
112
|
+
""" Check if tag exists and handle deletion if needed
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
owner (str): GitHub username
|
|
116
|
+
project_name (str): Name of the GitHub repository
|
|
117
|
+
version (str): Version to check for existing tag
|
|
118
|
+
headers (dict[str, str]): Headers for GitHub API requests
|
|
119
|
+
Returns:
|
|
120
|
+
bool: True if the tag was deleted or if it was not found, False otherwise
|
|
121
|
+
"""
|
|
122
|
+
# Get the tag URL and check if it exists
|
|
123
|
+
import requests
|
|
124
|
+
tag_url: str = f"{PROJECT_ENDPOINT}/{owner}/{project_name}/git/refs/tags/v{version}"
|
|
125
|
+
response: requests.Response = requests.get(tag_url, headers=headers)
|
|
126
|
+
|
|
127
|
+
# If the tag exists, ask the user if they want to delete it
|
|
128
|
+
if response.status_code == 200:
|
|
129
|
+
warning(f"A tag v{version} already exists. Do you want to delete it? (y/N): ")
|
|
130
|
+
if input().lower() == "y":
|
|
131
|
+
delete_existing_release(owner, project_name, version, headers)
|
|
132
|
+
delete_existing_tag(tag_url, headers)
|
|
133
|
+
return True
|
|
134
|
+
else:
|
|
135
|
+
return False
|
|
136
|
+
return True
|
|
137
|
+
|
|
138
|
+
def delete_existing_release(owner: str, project_name: str, version: str, headers: dict[str, str]) -> None:
|
|
139
|
+
""" Delete existing release for a version
|
|
140
|
+
|
|
141
|
+
Args:
|
|
142
|
+
owner (str): GitHub username
|
|
143
|
+
project_name (str): Name of the GitHub repository
|
|
144
|
+
version (str): Version of the release to delete
|
|
145
|
+
headers (dict[str, str]): Headers for GitHub API requests
|
|
146
|
+
"""
|
|
147
|
+
# Get the release URL and check if it exists
|
|
148
|
+
import requests
|
|
149
|
+
releases_url: str = f"{PROJECT_ENDPOINT}/{owner}/{project_name}/releases/tags/v{version}"
|
|
150
|
+
release_response: requests.Response = requests.get(releases_url, headers=headers)
|
|
151
|
+
|
|
152
|
+
# If the release exists, delete it
|
|
153
|
+
if release_response.status_code == 200:
|
|
154
|
+
release_id: int = release_response.json()["id"]
|
|
155
|
+
delete_release: requests.Response = requests.delete(
|
|
156
|
+
f"{PROJECT_ENDPOINT}/{owner}/{project_name}/releases/{release_id}",
|
|
157
|
+
headers=headers
|
|
158
|
+
)
|
|
159
|
+
handle_response(delete_release, "Failed to delete existing release")
|
|
160
|
+
info(f"Deleted existing release for v{version}")
|
|
161
|
+
|
|
162
|
+
def delete_existing_tag(tag_url: str, headers: dict[str, str]) -> None:
|
|
163
|
+
""" Delete existing tag
|
|
164
|
+
|
|
165
|
+
Args:
|
|
166
|
+
tag_url (str): URL of the tag to delete
|
|
167
|
+
headers (dict[str, str]): Headers for GitHub API requests
|
|
168
|
+
"""
|
|
169
|
+
import requests
|
|
170
|
+
delete_response: requests.Response = requests.delete(tag_url, headers=headers)
|
|
171
|
+
handle_response(delete_response, "Failed to delete existing tag")
|
|
172
|
+
info("Deleted existing tag")
|
|
173
|
+
|
|
174
|
+
def get_latest_tag(
|
|
175
|
+
owner: str, project_name: str, version: str, headers: dict[str, str]
|
|
176
|
+
) -> tuple[str, str] | tuple[None, None]:
|
|
177
|
+
""" Get latest tag information
|
|
178
|
+
|
|
179
|
+
Args:
|
|
180
|
+
owner (str): GitHub username
|
|
181
|
+
project_name (str): Name of the GitHub repository
|
|
182
|
+
version (str): Version to remove from the list of tags
|
|
183
|
+
headers (dict[str, str]): Headers for GitHub API requests
|
|
184
|
+
Returns:
|
|
185
|
+
str|None: SHA of the latest tag commit, None if no tags exist
|
|
186
|
+
str|None: Version number of the latest tag, None if no tags exist
|
|
187
|
+
"""
|
|
188
|
+
# Get the tags list
|
|
189
|
+
import requests
|
|
190
|
+
tags_url: str = f"{PROJECT_ENDPOINT}/{owner}/{project_name}/tags"
|
|
191
|
+
response = requests.get(tags_url, headers=headers)
|
|
192
|
+
handle_response(response, "Failed to get tags")
|
|
193
|
+
tags: list[dict[str, Any]] = response.json()
|
|
194
|
+
|
|
195
|
+
# Remove the version from the list of tags and sort the tags by their float values
|
|
196
|
+
tags = [tag for tag in tags if tag["name"] != f"v{version}"]
|
|
197
|
+
tags.sort(key=lambda x: version_to_float(x.get("name", "0")), reverse=True)
|
|
198
|
+
|
|
199
|
+
# If there are no tags, return None
|
|
200
|
+
if len(tags) == 0:
|
|
201
|
+
return None, None
|
|
202
|
+
else:
|
|
203
|
+
return tags[0]["commit"]["sha"], clean_version(tags[0]["name"], keep="ab")
|
|
204
|
+
|
|
205
|
+
def get_commits_since_tag(
|
|
206
|
+
owner: str, project_name: str, latest_tag_sha: str|None, headers: dict[str, str]
|
|
207
|
+
) -> list[dict[str, Any]]:
|
|
208
|
+
""" Get commits since last tag
|
|
209
|
+
|
|
210
|
+
Args:
|
|
211
|
+
owner (str): GitHub username
|
|
212
|
+
project_name (str): Name of the GitHub repository
|
|
213
|
+
latest_tag_sha (str|None): SHA of the latest tag commit
|
|
214
|
+
headers (dict[str, str]): Headers for GitHub API requests
|
|
215
|
+
Returns:
|
|
216
|
+
list[dict]: List of commits since the last tag
|
|
217
|
+
"""
|
|
218
|
+
# Get the commits URL and parameters
|
|
219
|
+
import requests
|
|
220
|
+
commits_url: str = f"{PROJECT_ENDPOINT}/{owner}/{project_name}/commits"
|
|
221
|
+
commits_params: dict[str, str] = {"per_page": "100"}
|
|
222
|
+
|
|
223
|
+
# Initialize tag_date as None
|
|
224
|
+
tag_date: str|None = None # type: ignore
|
|
225
|
+
|
|
226
|
+
# If there is a latest tag, use it to get the commits since the tag date
|
|
227
|
+
if latest_tag_sha:
|
|
228
|
+
|
|
229
|
+
# Get the date of the latest tag
|
|
230
|
+
tag_commit_url = f"{PROJECT_ENDPOINT}/{owner}/{project_name}/commits/{latest_tag_sha}"
|
|
231
|
+
tag_response = requests.get(tag_commit_url, headers=headers)
|
|
232
|
+
handle_response(tag_response, "Failed to get tag commit")
|
|
233
|
+
tag_date: str = tag_response.json()["commit"]["committer"]["date"]
|
|
234
|
+
|
|
235
|
+
# Use the date as the 'since' parameter to get all commits after that date
|
|
236
|
+
commits_params["since"] = tag_date
|
|
237
|
+
|
|
238
|
+
# Paginate through all commits
|
|
239
|
+
commits: list[dict[str, Any]] = []
|
|
240
|
+
page = 1
|
|
241
|
+
while True:
|
|
242
|
+
params = commits_params.copy()
|
|
243
|
+
params["page"] = str(page)
|
|
244
|
+
response = requests.get(commits_url, headers=headers, params=params)
|
|
245
|
+
handle_response(response, "Failed to get commits")
|
|
246
|
+
page_commits = response.json()
|
|
247
|
+
if not page_commits:
|
|
248
|
+
break
|
|
249
|
+
commits.extend(page_commits)
|
|
250
|
+
if len(page_commits) < 100:
|
|
251
|
+
break
|
|
252
|
+
page += 1
|
|
253
|
+
|
|
254
|
+
# Filter commits only if we have a tag_date
|
|
255
|
+
if tag_date:
|
|
256
|
+
commits = [c for c in commits if c["commit"]["committer"]["date"] != tag_date]
|
|
257
|
+
return commits
|
|
258
|
+
|
|
259
|
+
def generate_changelog(
|
|
260
|
+
commits: list[dict[str, Any]], owner: str, project_name: str, latest_tag_version: str|None, version: str
|
|
261
|
+
) -> str:
|
|
262
|
+
""" Generate changelog from commits. They must follow the conventional commits convention.
|
|
263
|
+
|
|
264
|
+
Convention format: <type>: <description> or <type>(<sub-category>): <description>
|
|
265
|
+
|
|
266
|
+
Args:
|
|
267
|
+
commits (list[dict]): List of commits to generate changelog from
|
|
268
|
+
owner (str): GitHub username
|
|
269
|
+
project_name (str): Name of the GitHub repository
|
|
270
|
+
latest_tag_version (str|None): Version number of the latest tag
|
|
271
|
+
version (str): Current version being released
|
|
272
|
+
Returns:
|
|
273
|
+
str: Generated changelog text
|
|
274
|
+
Source:
|
|
275
|
+
https://www.conventionalcommits.org/en/v1.0.0/
|
|
276
|
+
"""
|
|
277
|
+
# Initialize the commit groups
|
|
278
|
+
commit_groups: dict[str, list[tuple[str, str, str | None]]] = {}
|
|
279
|
+
|
|
280
|
+
# Iterate over the commits
|
|
281
|
+
for commit in commits:
|
|
282
|
+
message: str = commit["commit"]["message"].split("\n")[0]
|
|
283
|
+
sha: str = commit["sha"]
|
|
284
|
+
|
|
285
|
+
# If the message contains a colon, split the message into a type and a description
|
|
286
|
+
if ":" in message:
|
|
287
|
+
commit_type_part, desc = message.split(":", 1)
|
|
288
|
+
|
|
289
|
+
# Check for breaking change indicator (!)
|
|
290
|
+
is_breaking: bool = False
|
|
291
|
+
if "!" in commit_type_part:
|
|
292
|
+
is_breaking = True
|
|
293
|
+
commit_type_part = commit_type_part.replace("!", "")
|
|
294
|
+
|
|
295
|
+
# Extract sub-category if present (e.g., 'feat(Project)' -> 'feat', 'Project')
|
|
296
|
+
sub_category: str|None = None
|
|
297
|
+
if "(" in commit_type_part and ")" in commit_type_part:
|
|
298
|
+
# Extract the base type (before parentheses)
|
|
299
|
+
commit_type: str = commit_type_part.split('(')[0].split('/')[0]
|
|
300
|
+
# Extract the sub-category (between parentheses)
|
|
301
|
+
sub_category = commit_type_part.split('(')[1].split(')')[0]
|
|
302
|
+
else:
|
|
303
|
+
# No sub-category, just clean the type
|
|
304
|
+
commit_type: str = commit_type_part.split('/')[0]
|
|
305
|
+
|
|
306
|
+
# Clean the type to only keep letters
|
|
307
|
+
commit_type = "".join(c for c in commit_type.lower().strip() if c in "abcdefghijklmnopqrstuvwxyz")
|
|
308
|
+
commit_type = COMMIT_TYPES.get(commit_type, commit_type.title())
|
|
309
|
+
|
|
310
|
+
# Prepend emoji if breaking change
|
|
311
|
+
formatted_desc = f"🚨 {desc.strip()}" if is_breaking else desc.strip()
|
|
312
|
+
|
|
313
|
+
# Add the commit to the commit groups
|
|
314
|
+
if commit_type not in commit_groups:
|
|
315
|
+
commit_groups[commit_type] = []
|
|
316
|
+
commit_groups[commit_type].append((formatted_desc, sha, sub_category))
|
|
317
|
+
|
|
318
|
+
# Initialize the changelog
|
|
319
|
+
changelog: str = "## Changelog\n\n"
|
|
320
|
+
|
|
321
|
+
# Iterate over the commit groups
|
|
322
|
+
for commit_type in sorted(commit_groups.keys()):
|
|
323
|
+
changelog += f"### {commit_type}\n"
|
|
324
|
+
|
|
325
|
+
# Group commits by sub-category
|
|
326
|
+
sub_category_groups: dict[str|None, list[tuple[str, str, str|None]]] = {}
|
|
327
|
+
for desc, sha, sub_category in commit_groups[commit_type]:
|
|
328
|
+
if sub_category not in sub_category_groups:
|
|
329
|
+
sub_category_groups[sub_category] = []
|
|
330
|
+
sub_category_groups[sub_category].append((desc, sha, sub_category))
|
|
331
|
+
|
|
332
|
+
# Sort sub-categories (None comes first, then alphabetical)
|
|
333
|
+
sorted_sub_categories = sorted(
|
|
334
|
+
sub_category_groups.keys(),
|
|
335
|
+
key=lambda x: (x is None, x or "")
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
# Iterate over sub-categories
|
|
339
|
+
for sub_category in sorted_sub_categories:
|
|
340
|
+
|
|
341
|
+
# Add commits for this sub-category
|
|
342
|
+
for desc, sha, _ in reversed(sub_category_groups[sub_category]):
|
|
343
|
+
|
|
344
|
+
# Prepend sub-category to description if present
|
|
345
|
+
if sub_category:
|
|
346
|
+
words: list[str] = [
|
|
347
|
+
word[0].upper() + word[1:] # We don't use title() because we don't want to lowercase any letter
|
|
348
|
+
for word in sub_category.replace('_', ' ').split()
|
|
349
|
+
]
|
|
350
|
+
formatted_sub_category: str = ' '.join(words)
|
|
351
|
+
formatted_desc = f"[{formatted_sub_category}] {desc}"
|
|
352
|
+
else:
|
|
353
|
+
formatted_desc = desc
|
|
354
|
+
changelog += f"- {formatted_desc} ([{sha[:7]}](https://github.com/{owner}/{project_name}/commit/{sha}))\n"
|
|
355
|
+
|
|
356
|
+
changelog += "\n"
|
|
357
|
+
|
|
358
|
+
# Add the full changelog link if there is a latest tag and return the changelog
|
|
359
|
+
if latest_tag_version:
|
|
360
|
+
changelog += f"**Full Changelog**: https://github.com/{owner}/{project_name}/compare/v{latest_tag_version}...v{version}\n"
|
|
361
|
+
return changelog
|
|
362
|
+
|
|
363
|
+
def create_tag(owner: str, project_name: str, version: str, headers: dict[str, str]) -> None:
|
|
364
|
+
""" Create a new tag
|
|
365
|
+
|
|
366
|
+
Args:
|
|
367
|
+
owner (str): GitHub username
|
|
368
|
+
project_name (str): Name of the GitHub repository
|
|
369
|
+
version (str): Version for the new tag
|
|
370
|
+
headers (dict[str, str]): Headers for GitHub API requests
|
|
371
|
+
"""
|
|
372
|
+
# Message and prepare urls
|
|
373
|
+
import requests
|
|
374
|
+
progress(f"Creating tag v{version}")
|
|
375
|
+
create_tag_url: str = f"{PROJECT_ENDPOINT}/{owner}/{project_name}/git/refs"
|
|
376
|
+
latest_commit_url: str = f"{PROJECT_ENDPOINT}/{owner}/{project_name}/git/refs/heads/main"
|
|
377
|
+
|
|
378
|
+
# Get the latest commit SHA
|
|
379
|
+
commit_response: requests.Response = requests.get(latest_commit_url, headers=headers)
|
|
380
|
+
handle_response(commit_response, "Failed to get latest commit")
|
|
381
|
+
commit_sha: str = commit_response.json()["object"]["sha"]
|
|
382
|
+
|
|
383
|
+
# Create the tag
|
|
384
|
+
tag_data: dict[str, str] = {
|
|
385
|
+
"ref": f"refs/tags/v{version}",
|
|
386
|
+
"sha": commit_sha
|
|
387
|
+
}
|
|
388
|
+
response: requests.Response = requests.post(create_tag_url, headers=headers, json=tag_data)
|
|
389
|
+
handle_response(response, "Failed to create tag")
|
|
390
|
+
|
|
391
|
+
def create_release(owner: str, project_name: str, version: str, changelog: str, headers: dict[str, str]) -> int:
|
|
392
|
+
""" Create a new release
|
|
393
|
+
|
|
394
|
+
Args:
|
|
395
|
+
owner (str): GitHub username
|
|
396
|
+
project_name (str): Name of the GitHub repository
|
|
397
|
+
version (str): Version for the new release
|
|
398
|
+
changelog (str): Changelog text for the release
|
|
399
|
+
headers (dict[str, str]): Headers for GitHub API requests
|
|
400
|
+
Returns:
|
|
401
|
+
int: ID of the created release
|
|
402
|
+
"""
|
|
403
|
+
# Message and prepare urls
|
|
404
|
+
import requests
|
|
405
|
+
progress(f"Creating release v{version}")
|
|
406
|
+
release_url: str = f"{PROJECT_ENDPOINT}/{owner}/{project_name}/releases"
|
|
407
|
+
release_data: dict[str, str|bool] = {
|
|
408
|
+
"tag_name": f"v{version}",
|
|
409
|
+
"name": f"{project_name} [v{version}]",
|
|
410
|
+
"body": changelog,
|
|
411
|
+
"draft": False,
|
|
412
|
+
"prerelease": False
|
|
413
|
+
}
|
|
414
|
+
|
|
415
|
+
# Create the release and return the release ID
|
|
416
|
+
response: requests.Response = requests.post(release_url, headers=headers, json=release_data)
|
|
417
|
+
handle_response(response, "Failed to create release")
|
|
418
|
+
return response.json()["id"]
|
|
419
|
+
|
|
420
|
+
def upload_assets(
|
|
421
|
+
owner: str, project_name: str, release_id: int, build_folder: str, headers: dict[str, str], endswith: list[str]
|
|
422
|
+
) -> None:
|
|
423
|
+
""" Upload release assets
|
|
424
|
+
|
|
425
|
+
Args:
|
|
426
|
+
owner (str): GitHub username
|
|
427
|
+
project_name (str): Name of the GitHub repository
|
|
428
|
+
release_id (int): ID of the release to upload assets to
|
|
429
|
+
build_folder (str): Folder containing assets to upload
|
|
430
|
+
headers (dict[str, str]): Headers for GitHub API requests
|
|
431
|
+
endswith (list[str]): List of files to upload to the release
|
|
432
|
+
(every file ending with one of these strings will be uploaded)
|
|
433
|
+
"""
|
|
434
|
+
endswith_tuple: tuple[str, ...] = tuple(endswith)
|
|
435
|
+
|
|
436
|
+
# If there is no build folder, return
|
|
437
|
+
if not build_folder:
|
|
438
|
+
return
|
|
439
|
+
progress("Uploading assets")
|
|
440
|
+
|
|
441
|
+
# Get the release details
|
|
442
|
+
import requests
|
|
443
|
+
release_url: str = f"{PROJECT_ENDPOINT}/{owner}/{project_name}/releases/{release_id}"
|
|
444
|
+
response: requests.Response = requests.get(release_url, headers=headers)
|
|
445
|
+
handle_response(response, "Failed to get release details")
|
|
446
|
+
upload_url_template: str = response.json()["upload_url"]
|
|
447
|
+
upload_url_base: str = upload_url_template.split("{", maxsplit=1)[0]
|
|
448
|
+
|
|
449
|
+
# Iterate over the files in the build folder
|
|
450
|
+
for file in os.listdir(build_folder):
|
|
451
|
+
if file.endswith(endswith_tuple):
|
|
452
|
+
file_path: str = f"{clean_path(build_folder)}/{file}"
|
|
453
|
+
with open(file_path, "rb") as f:
|
|
454
|
+
|
|
455
|
+
# Prepare the headers and params
|
|
456
|
+
headers_with_content: dict[str, str] = {
|
|
457
|
+
**headers,
|
|
458
|
+
"Content-Type": "application/zip"
|
|
459
|
+
}
|
|
460
|
+
params: dict[str, str] = {"name": file}
|
|
461
|
+
|
|
462
|
+
# Upload the file
|
|
463
|
+
response: requests.Response = requests.post(
|
|
464
|
+
upload_url_base,
|
|
465
|
+
headers=headers_with_content,
|
|
466
|
+
params=params,
|
|
467
|
+
data=f.read()
|
|
468
|
+
)
|
|
469
|
+
handle_response(response, f"Failed to upload {file}")
|
|
470
|
+
progress(f"Uploaded {file}")
|
|
471
|
+
|
|
472
|
+
@measure_time(message="Uploading to GitHub took")
|
|
473
|
+
@handle_error()
|
|
474
|
+
def upload_to_github(credentials: dict[str, Any], github_config: dict[str, Any]) -> str:
|
|
475
|
+
""" Upload the project to GitHub using the credentials and the configuration
|
|
476
|
+
|
|
477
|
+
Args:
|
|
478
|
+
credentials (dict[str, Any]): Credentials for the GitHub API
|
|
479
|
+
github_config (dict[str, Any]): Configuration for the GitHub project
|
|
480
|
+
Returns:
|
|
481
|
+
str: Generated changelog text
|
|
482
|
+
Examples:
|
|
483
|
+
|
|
484
|
+
.. code-block:: python
|
|
485
|
+
|
|
486
|
+
> upload_to_github(
|
|
487
|
+
credentials={
|
|
488
|
+
"github": {
|
|
489
|
+
"api_key": "ghp_...",
|
|
490
|
+
"username": "Stoupy"
|
|
491
|
+
}
|
|
492
|
+
},
|
|
493
|
+
github_config={
|
|
494
|
+
"project_name": "stouputils",
|
|
495
|
+
"version": "1.0.0",
|
|
496
|
+
"build_folder": "build",
|
|
497
|
+
"endswith": [".zip"]
|
|
498
|
+
}
|
|
499
|
+
)
|
|
500
|
+
"""
|
|
501
|
+
import requests # type: ignore # noqa: F401
|
|
502
|
+
|
|
503
|
+
# Validate credentials and configuration
|
|
504
|
+
owner, headers = validate_credentials(credentials)
|
|
505
|
+
project_name, version, build_folder, endswith = validate_config(github_config)
|
|
506
|
+
|
|
507
|
+
# Handle existing tag
|
|
508
|
+
can_create: bool = handle_existing_tag(owner, project_name, version, headers)
|
|
509
|
+
|
|
510
|
+
# Get the latest tag and commits since the tag
|
|
511
|
+
latest_tag_sha, latest_tag_version = get_latest_tag(owner, project_name, version, headers)
|
|
512
|
+
commits: list[dict[str, Any]] = get_commits_since_tag(owner, project_name, latest_tag_sha, headers)
|
|
513
|
+
changelog: str = generate_changelog(commits, owner, project_name, latest_tag_version, version)
|
|
514
|
+
|
|
515
|
+
# Create the tag and release if needed
|
|
516
|
+
if can_create:
|
|
517
|
+
create_tag(owner, project_name, version, headers)
|
|
518
|
+
release_id: int = create_release(owner, project_name, version, changelog, headers)
|
|
519
|
+
upload_assets(owner, project_name, release_id, build_folder, headers, endswith)
|
|
520
|
+
info(f"Project '{project_name}' updated on GitHub!")
|
|
521
|
+
return changelog
|
|
522
|
+
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
""" This module contains utilities for PyPI.
|
|
2
|
+
(Using build and twine packages)
|
|
3
|
+
|
|
4
|
+
- pypi_full_routine: Upload the most recent file(s) to PyPI after updating pip and required packages and building the package (using build and twine)
|
|
5
|
+
- pypi_full_routine_using_uv: Full build and publish routine using 'uv' command line tool
|
|
6
|
+
|
|
7
|
+
.. image:: https://raw.githubusercontent.com/Stoupy51/stouputils/refs/heads/main/assets/continuous_delivery/pypi_module.gif
|
|
8
|
+
:alt: stouputils pypi examples
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
# Imports
|
|
12
|
+
import os
|
|
13
|
+
import sys
|
|
14
|
+
from collections.abc import Callable
|
|
15
|
+
from typing import Any
|
|
16
|
+
|
|
17
|
+
from ..decorators import LogLevels, handle_error
|
|
18
|
+
from .pyproject import read_pyproject
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def update_pip_and_required_packages() -> int:
|
|
22
|
+
""" Update pip and required packages.
|
|
23
|
+
|
|
24
|
+
Returns:
|
|
25
|
+
int: Return code of the os.system call.
|
|
26
|
+
"""
|
|
27
|
+
return os.system(f"{sys.executable} -m pip install --upgrade pip setuptools build twine pkginfo packaging")
|
|
28
|
+
|
|
29
|
+
def build_package() -> int:
|
|
30
|
+
""" Build the package.
|
|
31
|
+
|
|
32
|
+
Returns:
|
|
33
|
+
int: Return code of the os.system call.
|
|
34
|
+
"""
|
|
35
|
+
return os.system(f"{sys.executable} -m build")
|
|
36
|
+
|
|
37
|
+
def upload_package(repository: str, filepath: str) -> int:
|
|
38
|
+
""" Upload the package to PyPI.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
repository (str): Repository to upload to.
|
|
42
|
+
filepath (str): Path to the file to upload.
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
int: Return code of the os.system call.
|
|
46
|
+
"""
|
|
47
|
+
return os.system(f"{sys.executable} -m twine upload --verbose -r {repository} {filepath}")
|
|
48
|
+
|
|
49
|
+
@handle_error(message="Error while doing the pypi full routine", error_log=LogLevels.ERROR_TRACEBACK)
|
|
50
|
+
def pypi_full_routine(
|
|
51
|
+
repository: str,
|
|
52
|
+
dist_directory: str,
|
|
53
|
+
last_files: int = 1,
|
|
54
|
+
endswith: str = ".tar.gz",
|
|
55
|
+
|
|
56
|
+
update_all_function: Callable[[], int] = update_pip_and_required_packages,
|
|
57
|
+
build_package_function: Callable[[], int] = build_package,
|
|
58
|
+
upload_package_function: Callable[[str, str], int] = upload_package,
|
|
59
|
+
) -> None:
|
|
60
|
+
""" Upload the most recent file(s) to PyPI after updating pip and required packages and building the package.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
repository (str): Repository to upload to.
|
|
64
|
+
dist_directory (str): Directory to upload from.
|
|
65
|
+
last_files (int): Number of most recent files to upload. Defaults to 1.
|
|
66
|
+
endswith (str): End of the file name to upload. Defaults to ".tar.gz".
|
|
67
|
+
update_all_function (Callable[[], int]): Function to update pip and required packages.
|
|
68
|
+
Defaults to :func:`update_pip_and_required_packages`.
|
|
69
|
+
build_package_function (Callable[[], int]): Function to build the package.
|
|
70
|
+
Defaults to :func:`build_package`.
|
|
71
|
+
upload_package_function (Callable[[str, str], int]): Function to upload the package.
|
|
72
|
+
Defaults to :func:`upload_package`.
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
int: Return code of the command.
|
|
76
|
+
"""
|
|
77
|
+
if update_all_function() != 0:
|
|
78
|
+
raise Exception("Error while updating pip and required packages")
|
|
79
|
+
|
|
80
|
+
if build_package_function() != 0:
|
|
81
|
+
raise Exception("Error while building the package")
|
|
82
|
+
|
|
83
|
+
# Get list of tar.gz files in dist directory sorted by modification time
|
|
84
|
+
files: list[str] = sorted(
|
|
85
|
+
[x for x in os.listdir(dist_directory) if x.endswith(endswith)], # Get list of tar.gz files in dist directory
|
|
86
|
+
key=lambda x: os.path.getmtime(f"{dist_directory}/{x}"), # Sort by modification time
|
|
87
|
+
reverse=True # Sort in reverse order
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
# Upload the most recent file(s)
|
|
91
|
+
for file in files[:last_files]:
|
|
92
|
+
upload_package_function(repository, f"{dist_directory}/{file}")
|
|
93
|
+
|
|
94
|
+
def pypi_full_routine_using_uv() -> None:
|
|
95
|
+
""" Full build and publish routine using 'uv' command line tool.
|
|
96
|
+
|
|
97
|
+
Steps:
|
|
98
|
+
1. Generate stubs unless '--no-stubs' is passed
|
|
99
|
+
2. Increment version in pyproject.toml (patch by default, minor if 'minor' is passed as last argument, 'major' if 'major' is passed)
|
|
100
|
+
3. Build the package using 'uv build'
|
|
101
|
+
4. Upload the most recent file to PyPI using 'uv publish'
|
|
102
|
+
"""
|
|
103
|
+
# Get package name from pyproject.toml
|
|
104
|
+
pyproject_data: dict[str, Any] = read_pyproject("pyproject.toml")
|
|
105
|
+
package_name: str = pyproject_data["project"]["name"]
|
|
106
|
+
package_dir: str = package_name
|
|
107
|
+
if not os.path.isdir(package_dir):
|
|
108
|
+
package_dir = "src/" + package_name
|
|
109
|
+
|
|
110
|
+
# Generate stubs unless '--no-stubs' is passed
|
|
111
|
+
if "--no-stubs" not in sys.argv and "--no_stubs" not in sys.argv:
|
|
112
|
+
from .stubs import stubs_full_routine
|
|
113
|
+
stubs_full_routine(package_name, output_directory=package_dir, clean_before=True)
|
|
114
|
+
|
|
115
|
+
# Increment version in pyproject.toml
|
|
116
|
+
if "--no-bump" not in sys.argv and "--no_bump" not in sys.argv:
|
|
117
|
+
increment: str = "patch" if sys.argv[-1] not in ("minor", "major") else sys.argv[-1]
|
|
118
|
+
if os.system(f"uv version --bump {increment} --frozen") != 0:
|
|
119
|
+
raise Exception("Error while incrementing version using 'uv version'")
|
|
120
|
+
|
|
121
|
+
# Build the package using 'uv build'
|
|
122
|
+
import shutil
|
|
123
|
+
shutil.rmtree("dist", ignore_errors=True)
|
|
124
|
+
if os.system(f"{sys.executable} -m uv build") != 0:
|
|
125
|
+
raise Exception("Error while building the package using 'uv build'")
|
|
126
|
+
|
|
127
|
+
# Upload the most recent file to PyPI using 'uv publish'
|
|
128
|
+
if os.system(f"{sys.executable} -m uv publish") != 0:
|
|
129
|
+
raise Exception("Error while publishing the package using 'uv publish'")
|
|
130
|
+
|