micropython-stubber 1.20.5__py3-none-any.whl → 1.20.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {micropython_stubber-1.20.5.dist-info → micropython_stubber-1.20.6.dist-info}/LICENSE +30 -30
- {micropython_stubber-1.20.5.dist-info → micropython_stubber-1.20.6.dist-info}/METADATA +1 -1
- micropython_stubber-1.20.6.dist-info/RECORD +159 -0
- mpflash/README.md +184 -184
- mpflash/libusb_flash.ipynb +203 -203
- mpflash/mpflash/add_firmware.py +98 -98
- mpflash/mpflash/ask_input.py +236 -236
- mpflash/mpflash/bootloader/__init__.py +37 -36
- mpflash/mpflash/bootloader/manual.py +102 -102
- mpflash/mpflash/bootloader/micropython.py +10 -10
- mpflash/mpflash/bootloader/touch1200.py +45 -45
- mpflash/mpflash/cli_download.py +129 -129
- mpflash/mpflash/cli_flash.py +219 -219
- mpflash/mpflash/cli_group.py +98 -98
- mpflash/mpflash/cli_list.py +81 -81
- mpflash/mpflash/cli_main.py +41 -41
- mpflash/mpflash/common.py +164 -164
- mpflash/mpflash/config.py +47 -47
- mpflash/mpflash/connected.py +74 -74
- mpflash/mpflash/download.py +360 -360
- mpflash/mpflash/downloaded.py +129 -129
- mpflash/mpflash/errors.py +9 -9
- mpflash/mpflash/flash.py +52 -52
- mpflash/mpflash/flash_esp.py +59 -59
- mpflash/mpflash/flash_stm32.py +24 -24
- mpflash/mpflash/flash_stm32_cube.py +111 -111
- mpflash/mpflash/flash_stm32_dfu.py +101 -101
- mpflash/mpflash/flash_uf2.py +67 -67
- mpflash/mpflash/flash_uf2_boardid.py +15 -15
- mpflash/mpflash/flash_uf2_linux.py +123 -123
- mpflash/mpflash/flash_uf2_macos.py +34 -34
- mpflash/mpflash/flash_uf2_windows.py +34 -34
- mpflash/mpflash/list.py +89 -89
- mpflash/mpflash/logger.py +41 -41
- mpflash/mpflash/mpboard_id/__init__.py +93 -93
- mpflash/mpflash/mpboard_id/add_boards.py +255 -255
- mpflash/mpflash/mpboard_id/board.py +37 -37
- mpflash/mpflash/mpboard_id/board_id.py +86 -86
- mpflash/mpflash/mpboard_id/store.py +43 -43
- mpflash/mpflash/mpremoteboard/__init__.py +221 -221
- mpflash/mpflash/mpremoteboard/mpy_fw_info.py +141 -141
- mpflash/mpflash/mpremoteboard/runner.py +140 -140
- mpflash/mpflash/uf2disk.py +12 -12
- mpflash/mpflash/vendor/basicgit.py +288 -288
- mpflash/mpflash/vendor/click_aliases.py +91 -91
- mpflash/mpflash/vendor/dfu.py +165 -165
- mpflash/mpflash/vendor/pydfu.py +605 -605
- mpflash/mpflash/vendor/readme.md +2 -2
- mpflash/mpflash/vendor/versions.py +119 -117
- mpflash/mpflash/worklist.py +170 -170
- mpflash/poetry.lock +1588 -1588
- mpflash/pyproject.toml +60 -60
- mpflash/stm32_udev_rules.md +62 -62
- stubber/__init__.py +3 -3
- stubber/basicgit.py +294 -288
- stubber/board/board_info.csv +193 -193
- stubber/board/boot.py +34 -34
- stubber/board/createstubs.py +986 -986
- stubber/board/createstubs_db.py +825 -825
- stubber/board/createstubs_db_min.py +331 -331
- stubber/board/createstubs_db_mpy.mpy +0 -0
- stubber/board/createstubs_lvgl.py +741 -741
- stubber/board/createstubs_lvgl_min.py +741 -741
- stubber/board/createstubs_mem.py +766 -766
- stubber/board/createstubs_mem_min.py +306 -306
- stubber/board/createstubs_mem_mpy.mpy +0 -0
- stubber/board/createstubs_min.py +294 -294
- stubber/board/createstubs_mpy.mpy +0 -0
- stubber/board/fw_info.py +141 -141
- stubber/board/info.py +183 -183
- stubber/board/main.py +19 -19
- stubber/board/modulelist.txt +247 -247
- stubber/board/pyrightconfig.json +34 -34
- stubber/bulk/mcu_stubber.py +454 -454
- stubber/codemod/_partials/__init__.py +48 -48
- stubber/codemod/_partials/db_main.py +147 -147
- stubber/codemod/_partials/lvgl_main.py +77 -77
- stubber/codemod/_partials/modules_reader.py +80 -80
- stubber/codemod/add_comment.py +53 -53
- stubber/codemod/add_method.py +65 -65
- stubber/codemod/board.py +317 -317
- stubber/codemod/enrich.py +145 -145
- stubber/codemod/merge_docstub.py +284 -284
- stubber/codemod/modify_list.py +54 -54
- stubber/codemod/utils.py +57 -57
- stubber/commands/build_cmd.py +94 -94
- stubber/commands/cli.py +51 -51
- stubber/commands/clone_cmd.py +66 -66
- stubber/commands/config_cmd.py +29 -29
- stubber/commands/enrich_folder_cmd.py +70 -70
- stubber/commands/get_core_cmd.py +69 -69
- stubber/commands/get_docstubs_cmd.py +87 -87
- stubber/commands/get_frozen_cmd.py +112 -112
- stubber/commands/get_mcu_cmd.py +56 -56
- stubber/commands/merge_cmd.py +66 -66
- stubber/commands/publish_cmd.py +119 -119
- stubber/commands/stub_cmd.py +30 -30
- stubber/commands/switch_cmd.py +54 -54
- stubber/commands/variants_cmd.py +48 -48
- stubber/cst_transformer.py +178 -178
- stubber/data/board_info.csv +193 -193
- stubber/data/board_info.json +1729 -1729
- stubber/data/micropython_tags.csv +15 -15
- stubber/data/requirements-core-micropython.txt +38 -38
- stubber/data/requirements-core-pycopy.txt +39 -39
- stubber/downloader.py +36 -36
- stubber/freeze/common.py +68 -68
- stubber/freeze/freeze_folder.py +69 -69
- stubber/freeze/freeze_manifest_2.py +113 -113
- stubber/freeze/get_frozen.py +127 -127
- stubber/get_cpython.py +101 -101
- stubber/get_lobo.py +59 -59
- stubber/minify.py +418 -418
- stubber/publish/bump.py +86 -86
- stubber/publish/candidates.py +262 -262
- stubber/publish/database.py +18 -18
- stubber/publish/defaults.py +45 -45
- stubber/publish/enums.py +24 -24
- stubber/publish/helpers.py +29 -29
- stubber/publish/merge_docstubs.py +130 -130
- stubber/publish/missing_class_methods.py +49 -49
- stubber/publish/package.py +146 -146
- stubber/publish/pathnames.py +51 -51
- stubber/publish/publish.py +120 -120
- stubber/publish/pypi.py +38 -38
- stubber/publish/stubpackage.py +1029 -1029
- stubber/rst/__init__.py +9 -9
- stubber/rst/classsort.py +77 -77
- stubber/rst/lookup.py +530 -530
- stubber/rst/output_dict.py +401 -401
- stubber/rst/reader.py +822 -822
- stubber/rst/report_return.py +69 -69
- stubber/rst/rst_utils.py +540 -540
- stubber/stubber.py +38 -38
- stubber/stubs_from_docs.py +90 -90
- stubber/tools/manifestfile.py +610 -610
- stubber/tools/readme.md +5 -5
- stubber/update_fallback.py +117 -117
- stubber/update_module_list.py +123 -123
- stubber/utils/__init__.py +5 -5
- stubber/utils/config.py +127 -127
- stubber/utils/makeversionhdr.py +54 -54
- stubber/utils/manifest.py +92 -92
- stubber/utils/post.py +79 -79
- stubber/utils/repos.py +157 -154
- stubber/utils/stubmaker.py +139 -139
- stubber/utils/typed_config_toml.py +77 -77
- stubber/utils/versions.py +128 -120
- stubber/variants.py +106 -106
- micropython_stubber-1.20.5.dist-info/RECORD +0 -159
- {micropython_stubber-1.20.5.dist-info → micropython_stubber-1.20.6.dist-info}/WHEEL +0 -0
- {micropython_stubber-1.20.5.dist-info → micropython_stubber-1.20.6.dist-info}/entry_points.txt +0 -0
stubber/publish/stubpackage.py
CHANGED
@@ -1,1029 +1,1029 @@
|
|
1
|
-
"""Create a stub-only package for a specific version of micropython"""
|
2
|
-
|
3
|
-
import hashlib
|
4
|
-
import json
|
5
|
-
import shutil
|
6
|
-
import subprocess
|
7
|
-
from pathlib import Path
|
8
|
-
from typing import Any, Dict, List, Optional, Tuple, Union
|
9
|
-
|
10
|
-
import tenacity
|
11
|
-
|
12
|
-
from stubber.basicgit import get_git_describe
|
13
|
-
from stubber.publish.helpers import get_module_docstring
|
14
|
-
|
15
|
-
try:
|
16
|
-
import tomllib # type: ignore
|
17
|
-
except ModuleNotFoundError:
|
18
|
-
import tomli as tomllib # type: ignore
|
19
|
-
|
20
|
-
from typing import NewType
|
21
|
-
|
22
|
-
import tomli_w
|
23
|
-
from loguru import logger as log
|
24
|
-
from packaging.version import Version, parse
|
25
|
-
from pysondb import PysonDB
|
26
|
-
|
27
|
-
from stubber.publish.bump import bump_version
|
28
|
-
from stubber.publish.defaults import GENERIC_U, default_board
|
29
|
-
from stubber.publish.enums import StubSource
|
30
|
-
from stubber.publish.pypi import Version, get_pypi_versions
|
31
|
-
from stubber.utils.config import CONFIG
|
32
|
-
from stubber.utils.versions import SET_PREVIEW, V_PREVIEW, clean_version
|
33
|
-
|
34
|
-
Status = NewType("Status", Dict[str, Union[str, None]])
|
35
|
-
StubSources = List[Tuple[StubSource, Path]]
|
36
|
-
|
37
|
-
# indicates which stubs will be skipped when copying for these stub sources
|
38
|
-
STUBS_COPY_FILTER = {
|
39
|
-
StubSource.FROZEN: [
|
40
|
-
"espnow", # merged stubs + documentation of the espnow module is better than the info in the forzen stubs
|
41
|
-
],
|
42
|
-
StubSource.FIRMWARE: [
|
43
|
-
"builtins",
|
44
|
-
"collections", # collections must be in stdlib
|
45
|
-
],
|
46
|
-
StubSource.MERGED: [
|
47
|
-
"collections", # collections must be in stdlib
|
48
|
-
],
|
49
|
-
}
|
50
|
-
|
51
|
-
# these modules will be replaced by a simple import statement to import from stdlib
|
52
|
-
STDLIB_UMODULES = ["ucollections"]
|
53
|
-
|
54
|
-
|
55
|
-
class VersionedPackage(object):
|
56
|
-
"""
|
57
|
-
Represents a versioned package.
|
58
|
-
|
59
|
-
Attributes:
|
60
|
-
package_name (str): The name of the package.
|
61
|
-
mpy_version (str): The MicroPython version.
|
62
|
-
|
63
|
-
Methods:
|
64
|
-
__init__(self, package_name: str, mpy_version: str): Initializes a new instance of the VersionedPackage class.
|
65
|
-
is_preview(self): Checks if the package is a preview version.
|
66
|
-
pkg_version(self) -> str: Returns the version of the package.
|
67
|
-
pkg_version(self, version: str) -> None: Sets the version of the package.
|
68
|
-
get_prerelease_package_version(self, production: bool = False) -> str: Gets the next prerelease version for the package.
|
69
|
-
get_next_package_version(self, prod: bool = False, rc=False) -> str: Gets the next version for the package.
|
70
|
-
next_pkg_version(self, production: bool) -> str: Gets the next version for the package.
|
71
|
-
bump(self, *, rc: int = 0) -> str: Bumps the postrelease version of the package.
|
72
|
-
"""
|
73
|
-
|
74
|
-
def __init__(self, package_name: str, *, mpy_version: str):
|
75
|
-
super().__init__()
|
76
|
-
self.package_name: str = package_name
|
77
|
-
self.mpy_version: str = mpy_version
|
78
|
-
self._pkg_version: str = mpy_version
|
79
|
-
|
80
|
-
def __str__(self) -> str:
|
81
|
-
return f"{self.package_name}=={self.mpy_version}"
|
82
|
-
|
83
|
-
def __repr__(self) -> str:
|
84
|
-
return f"{self.package_name}=={self.mpy_version}"
|
85
|
-
|
86
|
-
def __eq__(self, o: object) -> bool:
|
87
|
-
return str(self) == str(o)
|
88
|
-
|
89
|
-
def __hash__(self) -> int:
|
90
|
-
return hash(str(self))
|
91
|
-
|
92
|
-
@property
|
93
|
-
def pkg_version(self) -> str:
|
94
|
-
"return the version of the package"
|
95
|
-
return self._pkg_version
|
96
|
-
|
97
|
-
@pkg_version.setter
|
98
|
-
def pkg_version(self, version: str) -> None:
|
99
|
-
"set the version of the package"
|
100
|
-
self._pkg_version = version
|
101
|
-
|
102
|
-
def next_package_version(self, production: bool) -> str:
|
103
|
-
# sourcery skip: assign-if-exp
|
104
|
-
"""Get the next version for the package"""
|
105
|
-
if self.is_preview():
|
106
|
-
return self._get_next_preview_package_version(production)
|
107
|
-
else:
|
108
|
-
return self._get_next_package_version(production)
|
109
|
-
|
110
|
-
def is_preview(self):
|
111
|
-
return self.mpy_version in SET_PREVIEW or V_PREVIEW in self.mpy_version
|
112
|
-
|
113
|
-
def _get_next_preview_package_version(self, production: bool = False) -> str:
|
114
|
-
"""
|
115
|
-
Get the next prerelease version for the package.
|
116
|
-
this is used for preview versions of micropython (-preview, formerly known as 'latest')
|
117
|
-
"""
|
118
|
-
rc = 1
|
119
|
-
if not (describe := get_git_describe(CONFIG.mpy_path.as_posix())):
|
120
|
-
return "99.99.99post99"
|
121
|
-
# use versiontag and the number of commits since the last tag
|
122
|
-
# "v1.19.1-841-g3446"
|
123
|
-
# 'v1.20.0-dirty'
|
124
|
-
# 'v1.22.0-preview-19-g8eb7721b4'
|
125
|
-
parts = describe.split("-", 3)
|
126
|
-
ver = parts[0]
|
127
|
-
if len(parts) > 1:
|
128
|
-
rc = parts[1] if parts[1].isdigit() else parts[2] if len(parts) > 2 and parts[2].isdigit() else 1
|
129
|
-
rc = int(rc)
|
130
|
-
base = bump_version(Version(ver), minor_bump=True) if parts[1] != V_PREVIEW else Version(ver)
|
131
|
-
return str(bump_version(base, rc=rc))
|
132
|
-
# raise ValueError("cannot determine next version number micropython")
|
133
|
-
|
134
|
-
def _get_next_package_version(self, prod: bool = False, rc=False) -> str:
|
135
|
-
"""Get the next version for the package."""
|
136
|
-
base = Version(self.pkg_version)
|
137
|
-
if pypi_versions := get_pypi_versions(self.package_name, production=prod, base=base):
|
138
|
-
# get the latest version from pypi
|
139
|
-
self.pkg_version = str(pypi_versions[-1])
|
140
|
-
else:
|
141
|
-
# no published package found , so we start at base version then bump 1 post release
|
142
|
-
self.pkg_version = Version(self.pkg_version).base_version
|
143
|
-
return self.bump()
|
144
|
-
|
145
|
-
def bump(self, *, rc: int = 0) -> str:
|
146
|
-
"""
|
147
|
-
bump the postrelease version of the package, and write the change to disk
|
148
|
-
if rc >= 1, the version is bumped to the specified release candidate
|
149
|
-
"""
|
150
|
-
try:
|
151
|
-
current = Version(self.pkg_version)
|
152
|
-
assert isinstance(current, Version)
|
153
|
-
# bump the version
|
154
|
-
self.pkg_version = str(bump_version(post_bump=True, current=current, rc=rc))
|
155
|
-
except Exception as e: # pragma: no cover
|
156
|
-
log.error(f"Error: {e}")
|
157
|
-
return self.pkg_version
|
158
|
-
|
159
|
-
|
160
|
-
class Builder(VersionedPackage):
|
161
|
-
"""
|
162
|
-
Builder class for creating and updating MicroPython stub packages.
|
163
|
-
|
164
|
-
Args:
|
165
|
-
package_name (str): The name of the package.
|
166
|
-
mpy_version (str, optional): The version of MicroPython. Defaults to "0.0.1".
|
167
|
-
port (str): The port for the package.
|
168
|
-
board (str, optional): The board for the package. Defaults to GENERIC_U.
|
169
|
-
description (str, optional): The description of the package. Defaults to "MicroPython stubs".
|
170
|
-
stubs (Optional[StubSources], optional): The stub sources for the package. Defaults to None.
|
171
|
-
|
172
|
-
Attributes:
|
173
|
-
package_name (str): The name of the package.
|
174
|
-
mpy_version (str): The version of MicroPython.
|
175
|
-
port (str): The port for the package.
|
176
|
-
board (str): The board for the package.
|
177
|
-
description (str): The description of the package.
|
178
|
-
stub_sources (Optional[StubSources]): The stub sources for the package.
|
179
|
-
hash (None): The hash of all the files in the package.
|
180
|
-
stub_hash (None): The hash of the stub files.
|
181
|
-
|
182
|
-
Properties:
|
183
|
-
package_path (Path): The package path based on the package name and version, relative to the publish folder.
|
184
|
-
toml_path (Path): The path to the `pyproject.toml` file.
|
185
|
-
pyproject (Union[Dict[str, Any], None]): The parsed pyproject.toml or None.
|
186
|
-
|
187
|
-
Methods:
|
188
|
-
create_update_pyproject_toml(): Create or update/overwrite a `pyproject.toml` file.
|
189
|
-
check(): Check if the package is valid.
|
190
|
-
clean(): Remove the stub files from the package folder.
|
191
|
-
copy_stubs(): Copy files from all listed stub folders to the package folder.
|
192
|
-
update_package_files(): Update the stub-only package for a specific version of MicroPython.
|
193
|
-
write_package_json(): Write the package.json file to disk.
|
194
|
-
to_dict(): Return the package as a dict to store in the jsondb.
|
195
|
-
from_dict(json_data: Dict): Load the package from a dict (from the jsondb).
|
196
|
-
calculate_hash(include_md: bool = True): Create a SHA1 hash of all files in the package.
|
197
|
-
update_hashes(): Update the package hashes.
|
198
|
-
is_changed(include_md: bool = True): Check if the package has changed.
|
199
|
-
"""
|
200
|
-
|
201
|
-
# BUF_SIZE is totally arbitrary,
|
202
|
-
BUF_SIZE = 65536 * 16 # lets read stuff in 16 x 64kb chunks!
|
203
|
-
|
204
|
-
def __init__(
|
205
|
-
self,
|
206
|
-
package_name: str,
|
207
|
-
*,
|
208
|
-
mpy_version: str = "0.0.1",
|
209
|
-
port: str,
|
210
|
-
board: str = GENERIC_U,
|
211
|
-
description: str = "MicroPython stubs",
|
212
|
-
stubs: Optional[StubSources] = None,
|
213
|
-
# json_data: Optional[Dict[str, Any]] = None,
|
214
|
-
): # port: str, board: str
|
215
|
-
super().__init__(package_name=package_name, mpy_version=mpy_version)
|
216
|
-
self._publish = True # intended for publishing
|
217
|
-
self.package_name = package_name
|
218
|
-
self.mpy_version = mpy_version
|
219
|
-
self.port = port
|
220
|
-
self.board = board
|
221
|
-
self.description = description
|
222
|
-
self.stub_sources = stubs or []
|
223
|
-
self.hash = None # intial hash
|
224
|
-
"""Hash of all the files in the package"""
|
225
|
-
self.stub_hash = None # intial hash
|
226
|
-
"""Hash of all .pyi files"""
|
227
|
-
|
228
|
-
@property
|
229
|
-
def package_path(self) -> Path:
|
230
|
-
"package path based on the package name and version and relative to the publish folder"
|
231
|
-
parts = self.package_name.split("-")
|
232
|
-
parts[1:1] = [clean_version(self.mpy_version, flat=True)]
|
233
|
-
return CONFIG.publish_path / "-".join(parts)
|
234
|
-
|
235
|
-
@property
|
236
|
-
def toml_path(self) -> Path:
|
237
|
-
"the path to the `pyproject.toml` file"
|
238
|
-
# todo: make sure this is always relative to the root path
|
239
|
-
return self.package_path / "pyproject.toml"
|
240
|
-
|
241
|
-
# -----------------------------------------------
|
242
|
-
@property
|
243
|
-
def pyproject(self) -> Union[Dict[str, Any], None]:
|
244
|
-
"parsed pyproject.toml or None"
|
245
|
-
pyproject = None
|
246
|
-
_toml = self.toml_path
|
247
|
-
if (_toml).exists():
|
248
|
-
with open(_toml, "rb") as f:
|
249
|
-
pyproject = tomllib.load(f)
|
250
|
-
return pyproject
|
251
|
-
|
252
|
-
@pyproject.setter
|
253
|
-
def pyproject(self, pyproject: Dict) -> None:
|
254
|
-
# check if the result is a valid toml file
|
255
|
-
try:
|
256
|
-
tomllib.loads(tomli_w.dumps(pyproject))
|
257
|
-
except tomllib.TOMLDecodeError as e:
|
258
|
-
print("Could not create a valid TOML file")
|
259
|
-
raise (e)
|
260
|
-
# make sure parent folder exists
|
261
|
-
_toml = self.toml_path
|
262
|
-
(_toml).parent.mkdir(parents=True, exist_ok=True)
|
263
|
-
with open(_toml, "wb") as output:
|
264
|
-
tomli_w.dump(pyproject, output)
|
265
|
-
|
266
|
-
# -----------------------------------------------
|
267
|
-
def create_update_pyproject_toml(self) -> None:
|
268
|
-
"""
|
269
|
-
create or update/overwrite a `pyproject.toml` file by combining a template file
|
270
|
-
with the given parameters.
|
271
|
-
"""
|
272
|
-
raise NotImplementedError("create_update_pyproject_toml not implemented")
|
273
|
-
|
274
|
-
# -----------------------------------------------
|
275
|
-
|
276
|
-
def check(self) -> bool:
|
277
|
-
"""Check if the package is valid, to be implemented by the subclass"""
|
278
|
-
return True
|
279
|
-
|
280
|
-
def clean(self) -> None:
|
281
|
-
"""
|
282
|
-
Remove the stub files from the package folder
|
283
|
-
|
284
|
-
This is used before update the stub package, to avoid lingering stub files,
|
285
|
-
and after the package has been built, to avoid needing to store files multiple times.
|
286
|
-
|
287
|
-
`.gitignore` cannot be used as this will prevent poetry from processing the files.
|
288
|
-
"""
|
289
|
-
# remove all *.py and *.pyi files in the folder
|
290
|
-
for wc in ["*.py", "*.pyi", "modules.json"]:
|
291
|
-
for f in (self.package_path).rglob(wc):
|
292
|
-
f.unlink()
|
293
|
-
|
294
|
-
def copy_stubs(self) -> None:
|
295
|
-
"""
|
296
|
-
Copy files from all listed stub folders to the package folder
|
297
|
-
the order of the stub folders is relevant as "last copy wins"
|
298
|
-
|
299
|
-
- 1 - Copy all MCU stubs/merged to the package folder
|
300
|
-
- 2 - copy the remaining stubs to the package folder
|
301
|
-
- 3 - remove *.py files from the package folder
|
302
|
-
"""
|
303
|
-
try:
|
304
|
-
# Check if all stub source folders exist
|
305
|
-
for stub_type, src_path in self.stub_sources:
|
306
|
-
if not (CONFIG.stub_path / src_path).exists():
|
307
|
-
raise FileNotFoundError(f"Could not find stub source folder {CONFIG.stub_path / src_path}")
|
308
|
-
|
309
|
-
# 1 - Copy the stubs to the package, directly in the package folder (no folders)
|
310
|
-
# for stub_type, fw_path in [s for s in self.stub_sources]:
|
311
|
-
for n in range(len(self.stub_sources)):
|
312
|
-
stub_type, src_path = self.stub_sources[n]
|
313
|
-
try:
|
314
|
-
log.debug(f"Copying {stub_type} from {src_path}")
|
315
|
-
self.copy_folder(stub_type, src_path)
|
316
|
-
except OSError as e:
|
317
|
-
if stub_type != StubSource.FROZEN:
|
318
|
-
raise FileNotFoundError(f"Could not find stub source folder {src_path}") from e
|
319
|
-
else:
|
320
|
-
log.debug(f"Error copying stubs from : {CONFIG.stub_path / src_path}, {e}")
|
321
|
-
finally:
|
322
|
-
# 3 - clean up a little bit
|
323
|
-
# delete all the .py files in the package folder if there is a corresponding .pyi file
|
324
|
-
for f in self.package_path.rglob("*.py"):
|
325
|
-
if f.with_suffix(".pyi").exists():
|
326
|
-
f.unlink()
|
327
|
-
self.update_umodules()
|
328
|
-
|
329
|
-
def update_umodules(self):
|
330
|
-
"""
|
331
|
-
Replace the STDLIB umodules with a simple import statement
|
332
|
-
in order to allow the typecheckers to resove the stdlib modules in the usual stdlib location.
|
333
|
-
"""
|
334
|
-
for f in self.package_path.rglob("*.pyi"):
|
335
|
-
if f.stem in STDLIB_UMODULES:
|
336
|
-
# read the docstring of the module
|
337
|
-
docstring = get_module_docstring(f) or ""
|
338
|
-
comment = "# import module from stdlib/module"
|
339
|
-
# replace the file with a simple import statement
|
340
|
-
f.write_text(f'"""\n{docstring}\n"""\n{comment}\nfrom {f.stem[1:]} import *')
|
341
|
-
|
342
|
-
def copy_folder(self, stub_type: StubSource, src_path: Path):
|
343
|
-
Path(self.package_path).mkdir(parents=True, exist_ok=True)
|
344
|
-
for item in (CONFIG.stub_path / src_path).rglob("*"):
|
345
|
-
if item.is_file():
|
346
|
-
# filter the 'poorly' decorated files
|
347
|
-
if stub_type in STUBS_COPY_FILTER and item.stem in STUBS_COPY_FILTER[stub_type]:
|
348
|
-
continue
|
349
|
-
|
350
|
-
target = Path(self.package_path) / item.relative_to(CONFIG.stub_path / src_path)
|
351
|
-
target.parent.mkdir(parents=True, exist_ok=True)
|
352
|
-
target.write_bytes(item.read_bytes())
|
353
|
-
|
354
|
-
def update_package_files(self) -> None:
|
355
|
-
"""
|
356
|
-
Update the stub-only package for a specific version of micropython
|
357
|
-
- cleans the package folder
|
358
|
-
- copies the stubs from the list of stubs.
|
359
|
-
- creates/updates the readme and the license file
|
360
|
-
"""
|
361
|
-
# create the package folder
|
362
|
-
self.package_path.mkdir(parents=True, exist_ok=True)
|
363
|
-
self.clean() # Delete any previous *.py? files
|
364
|
-
self.copy_stubs()
|
365
|
-
self.create_readme()
|
366
|
-
self.create_license()
|
367
|
-
|
368
|
-
def write_package_json(self) -> None:
|
369
|
-
"""write the package.json file to disk"""
|
370
|
-
# make sure folder exists
|
371
|
-
if not self.package_path.exists():
|
372
|
-
self.package_path.mkdir(parents=True, exist_ok=True)
|
373
|
-
# write the json to a file
|
374
|
-
with open(self.package_path / "package.json", "w") as f:
|
375
|
-
json.dump(self.to_dict(), f, indent=4)
|
376
|
-
|
377
|
-
def to_dict(self) -> dict:
|
378
|
-
"""return the package as a dict to store in the jsondb
|
379
|
-
|
380
|
-
need to simplify some of the Objects to allow serialization to json
|
381
|
-
- the paths to posix paths
|
382
|
-
- the version (semver) to a string
|
383
|
-
- toml file to list of lines
|
384
|
-
|
385
|
-
"""
|
386
|
-
return {
|
387
|
-
"name": self.package_name,
|
388
|
-
"mpy_version": self.mpy_version,
|
389
|
-
"publish": self._publish,
|
390
|
-
"pkg_version": str(self.pkg_version),
|
391
|
-
"path": self.package_path.name, # only store the folder name , as it is relative to the publish folder
|
392
|
-
"stub_sources": [(name, Path(path).as_posix()) for (name, path) in self.stub_sources],
|
393
|
-
"description": self.description,
|
394
|
-
"hash": self.hash,
|
395
|
-
"stub_hash": self.stub_hash,
|
396
|
-
}
|
397
|
-
|
398
|
-
def from_dict(self, json_data: Dict) -> None:
|
399
|
-
"""load the package from a dict (from the jsondb)"""
|
400
|
-
self.package_name = json_data["name"]
|
401
|
-
# self.package_path = Path(json_data["path"])
|
402
|
-
self.description = json_data["description"]
|
403
|
-
self.mpy_version = json_data["mpy_version"]
|
404
|
-
self._publish = json_data["publish"]
|
405
|
-
self.hash = json_data["hash"]
|
406
|
-
self.stub_hash = json_data["stub_hash"]
|
407
|
-
# create folder
|
408
|
-
if not self.package_path.exists():
|
409
|
-
self.package_path.mkdir(parents=True, exist_ok=True)
|
410
|
-
# create the pyproject.toml file
|
411
|
-
self.create_update_pyproject_toml()
|
412
|
-
# set pkg version after creating the toml file
|
413
|
-
self.pkg_version = json_data["pkg_version"]
|
414
|
-
self.stub_sources = []
|
415
|
-
for name, path in json_data["stub_sources"]:
|
416
|
-
if path.startswith("stubs/"):
|
417
|
-
path = path.replace("stubs/", "")
|
418
|
-
self.stub_sources.append((name, Path(path)))
|
419
|
-
|
420
|
-
def calculate_hash(self, include_md: bool = True) -> str:
|
421
|
-
# sourcery skip: reintroduce-else, swap-if-else-branches, use-named-expression
|
422
|
-
"""
|
423
|
-
Create a SHA1 hash of all files in the package, excluding the pyproject.toml file itself.
|
424
|
-
the hash is based on the content of the .py/.pyi and .md files in the package.
|
425
|
-
if include_md is False , the .md files are not hased, allowing the files in the packeges to be compared simply
|
426
|
-
As a single hash is created across all files, the files are sorted prior to hashing to ensure that the hash is stable.
|
427
|
-
|
428
|
-
Note: A changed hash will not indicate which of the files in the package have been changed.
|
429
|
-
"""
|
430
|
-
file_hash = hashlib.sha1()
|
431
|
-
# Stubs Only
|
432
|
-
files = list((self.package_path).rglob("**/*.pyi"))
|
433
|
-
if include_md:
|
434
|
-
files += (
|
435
|
-
[self.package_path / "LICENSE.md"]
|
436
|
-
+ [self.package_path / "README.md"]
|
437
|
-
# do not include [self.toml_file]
|
438
|
-
)
|
439
|
-
for file in sorted(files):
|
440
|
-
try:
|
441
|
-
# retry on file not found
|
442
|
-
self.add_file_hash(file, file_hash)
|
443
|
-
except FileNotFoundError:
|
444
|
-
log.warning(f"File not found {file}")
|
445
|
-
# ignore file not found errors to allow the hash to be created WHILE GIT / VIRUS SCANNERS HOLD LINGERING FILES
|
446
|
-
return file_hash.hexdigest()
|
447
|
-
|
448
|
-
@tenacity.retry(wait=tenacity.wait_fixed(0.2), stop=tenacity.stop_after_attempt(3))
|
449
|
-
def add_file_hash(self, file, file_hash):
|
450
|
-
"""
|
451
|
-
Adds the hash of a file to the given file hash object.
|
452
|
-
If an error occurs, the file is retried up to 3 times with a 0.2 second delay
|
453
|
-
|
454
|
-
Args:
|
455
|
-
file (str): The path to the file.
|
456
|
-
file_hash (hashlib._Hash): The file hash object to update.
|
457
|
-
|
458
|
-
Returns:
|
459
|
-
None
|
460
|
-
"""
|
461
|
-
with open(file, "rb") as f:
|
462
|
-
while True:
|
463
|
-
if data := f.read(Builder.BUF_SIZE):
|
464
|
-
file_hash.update(data)
|
465
|
-
else:
|
466
|
-
break
|
467
|
-
|
468
|
-
def update_hashes(self, ret=False) -> None:
|
469
|
-
"""Update the package hashes. Resets is_changed() to False"""
|
470
|
-
self.hash = self.calculate_hash()
|
471
|
-
self.stub_hash = self.calculate_hash(include_md=False)
|
472
|
-
|
473
|
-
def is_changed(self, include_md: bool = True) -> bool:
|
474
|
-
"""Check if the package has changed, based on the current and the stored hash.
|
475
|
-
The default checks the hash of all files, including the .md files.
|
476
|
-
"""
|
477
|
-
current = self.calculate_hash(include_md=include_md)
|
478
|
-
stored = self.hash if include_md else self.stub_hash
|
479
|
-
log.trace(f"changed = {self.hash != current} | Stored: {stored} | Current: {current}")
|
480
|
-
return stored != current
|
481
|
-
|
482
|
-
def create_license(self) -> None:
|
483
|
-
"""
|
484
|
-
Create a license file for the package
|
485
|
-
- copied from the template license file
|
486
|
-
"""
|
487
|
-
# copy the license file from the template to the package folder
|
488
|
-
# option : append other license files
|
489
|
-
shutil.copy(CONFIG.template_path / "LICENSE.md", self.package_path)
|
490
|
-
|
491
|
-
def create_readme(self) -> None:
|
492
|
-
"""
|
493
|
-
Create a readme file for the package
|
494
|
-
- based on the template readme file
|
495
|
-
- with a list of all included stub folders added to it (not the individual stub-files)
|
496
|
-
"""
|
497
|
-
# read the readme file and update the version and description
|
498
|
-
with open(CONFIG.template_path / "README.md", "r") as f:
|
499
|
-
TEMPLATE_README = f.read()
|
500
|
-
|
501
|
-
# add a readme with the names of the stub-folders
|
502
|
-
|
503
|
-
# read informations from firmware_stubs.json
|
504
|
-
firmware_stubs = {}
|
505
|
-
doc_stubs = {}
|
506
|
-
core_stubs = {}
|
507
|
-
try:
|
508
|
-
with open(self.package_path / "firmware_stubs.json", "r") as f:
|
509
|
-
firmware_stubs = json.load(f)
|
510
|
-
with open(self.package_path / "doc_stubs.json", "r") as f:
|
511
|
-
doc_stubs = json.load(f)
|
512
|
-
with open(self.package_path / "modules.json", "r") as f:
|
513
|
-
core_stubs = json.load(f)
|
514
|
-
except FileNotFoundError:
|
515
|
-
pass
|
516
|
-
|
517
|
-
# Prettify this by merging with template text
|
518
|
-
with open(self.package_path / "README.md", "w") as f:
|
519
|
-
f.write(f"# {self.package_name}\n\n")
|
520
|
-
f.write(TEMPLATE_README)
|
521
|
-
f.write(f"Included stubs:\n")
|
522
|
-
for name, folder in self.stub_sources:
|
523
|
-
f.write(f"* {name} from `stubs/{Path(folder).as_posix()}`\n")
|
524
|
-
|
525
|
-
f.write(f"\n\n")
|
526
|
-
f.write(f"origin | Family | Port | Board | Version\n")
|
527
|
-
f.write(f"-------|--------|------|-------|--------\n")
|
528
|
-
try:
|
529
|
-
f.write(
|
530
|
-
f"Firmware | {firmware_stubs['firmware']['family']} | {firmware_stubs['firmware']['port']} | {firmware_stubs['firmware']['machine']} | {clean_version(firmware_stubs['firmware']['version'])} \n"
|
531
|
-
)
|
532
|
-
except Exception:
|
533
|
-
pass
|
534
|
-
try:
|
535
|
-
f.write(
|
536
|
-
f"Documentation | {doc_stubs['firmware']['family']} | {doc_stubs['firmware']['port']} | - | {clean_version(doc_stubs['firmware']['version'])} \n"
|
537
|
-
)
|
538
|
-
except Exception:
|
539
|
-
pass
|
540
|
-
try:
|
541
|
-
f.write(
|
542
|
-
f"Core | {core_stubs['firmware']['family']} | {core_stubs['firmware']['port']} | - | {clean_version(core_stubs['firmware']['version'])} \n"
|
543
|
-
)
|
544
|
-
except Exception:
|
545
|
-
pass
|
546
|
-
|
547
|
-
|
548
|
-
class PoetryBuilder(Builder):
|
549
|
-
"""
|
550
|
-
Build a package using Poetry
|
551
|
-
"""
|
552
|
-
|
553
|
-
def __init__(
|
554
|
-
self,
|
555
|
-
package_name: str,
|
556
|
-
*,
|
557
|
-
port: str,
|
558
|
-
mpy_version: str = "0.0.1",
|
559
|
-
board: str = GENERIC_U,
|
560
|
-
description: str = "MicroPython stubs",
|
561
|
-
stubs: Optional[StubSources] = None,
|
562
|
-
json_data: Optional[Dict[str, Any]] = None,
|
563
|
-
):
|
564
|
-
super().__init__(
|
565
|
-
package_name=package_name,
|
566
|
-
mpy_version=mpy_version,
|
567
|
-
port=port,
|
568
|
-
board=board,
|
569
|
-
description=description,
|
570
|
-
stubs=stubs,
|
571
|
-
)
|
572
|
-
|
573
|
-
# -----------------------------------------------
|
574
|
-
# get and set the version of the package directly from the toml file
|
575
|
-
@property
|
576
|
-
def pkg_version(self) -> str:
|
577
|
-
"return the version of the package"
|
578
|
-
# read the version from the toml file
|
579
|
-
_toml = self.toml_path
|
580
|
-
if not _toml.exists():
|
581
|
-
return self.mpy_version
|
582
|
-
with open(_toml, "rb") as f:
|
583
|
-
pyproject = tomllib.load(f)
|
584
|
-
ver = pyproject["tool"]["poetry"]["version"]
|
585
|
-
return str(parse(ver)) if ver not in SET_PREVIEW else ver
|
586
|
-
|
587
|
-
@pkg_version.setter
|
588
|
-
def pkg_version(self, version: str) -> None:
|
589
|
-
# sourcery skip: remove-unnecessary-cast
|
590
|
-
"set the version of the package"
|
591
|
-
if not isinstance(version, str): # type: ignore
|
592
|
-
version = str(version)
|
593
|
-
# read the current file
|
594
|
-
_toml = self.toml_path
|
595
|
-
try:
|
596
|
-
with open(_toml, "rb") as f:
|
597
|
-
pyproject = tomllib.load(f)
|
598
|
-
pyproject["tool"]["poetry"]["version"] = version
|
599
|
-
# update the version in the toml file
|
600
|
-
with open(_toml, "wb") as output:
|
601
|
-
tomli_w.dump(pyproject, output)
|
602
|
-
except FileNotFoundError as e:
|
603
|
-
raise FileNotFoundError(f"pyproject.toml file not found at {_toml}") from e
|
604
|
-
|
605
|
-
# -----------------------------------------------
|
606
|
-
|
607
|
-
def poetry_build(self) -> bool:
|
608
|
-
"""build the package by running `poetry build`"""
|
609
|
-
return self.run_poetry(["build", "-vvv"])
|
610
|
-
|
611
|
-
def poetry_publish(self, production: bool = False) -> bool:
|
612
|
-
if not self._publish:
|
613
|
-
log.warning(f"Publishing is disabled for {self.package_name}")
|
614
|
-
return False
|
615
|
-
# update the package info
|
616
|
-
self.write_package_json()
|
617
|
-
if production:
|
618
|
-
log.debug("Publishing to PRODUCTION https://pypy.org")
|
619
|
-
params = ["publish"]
|
620
|
-
else:
|
621
|
-
log.debug("Publishing to TEST-PyPi https://test.pypy.org")
|
622
|
-
params = ["publish", "-r", "test-pypi"]
|
623
|
-
r = self.run_poetry(params)
|
624
|
-
print("") # add a newline after the output
|
625
|
-
return r
|
626
|
-
|
627
|
-
def run_poetry(self, parameters: List[str]) -> bool:
|
628
|
-
"""Run a poetry commandline in the package folder.
|
629
|
-
Note: this may write some output to the console ('All set!')
|
630
|
-
"""
|
631
|
-
# check for pyproject.toml in folder
|
632
|
-
if not (self.package_path / "pyproject.toml").exists(): # pragma: no cover
|
633
|
-
log.error(f"No pyproject.toml file found in {self.package_path}")
|
634
|
-
return False
|
635
|
-
# todo: call poetry directly to improve error handling
|
636
|
-
try:
|
637
|
-
log.debug(f"poetry {parameters} starting")
|
638
|
-
subprocess.run(
|
639
|
-
["poetry"] + parameters,
|
640
|
-
cwd=self.package_path,
|
641
|
-
check=True,
|
642
|
-
# stdout=subprocess.PIPE,
|
643
|
-
stdout=subprocess.PIPE, # interestingly: errors on stdout , output on stderr .....
|
644
|
-
universal_newlines=True,
|
645
|
-
encoding="utf-8",
|
646
|
-
)
|
647
|
-
log.trace(f"poetry {parameters} completed")
|
648
|
-
except (NotADirectoryError, FileNotFoundError) as e: # pragma: no cover # InvalidVersion
|
649
|
-
log.error("Exception on process, {}".format(e))
|
650
|
-
return False
|
651
|
-
except subprocess.CalledProcessError as e: # pragma: no cover
|
652
|
-
# Detect and log error detection om upload
|
653
|
-
# UploadError
|
654
|
-
# HTTP Error 400: File already exists. See https://test.pypi.org/help/#file-name-reuse for more information.
|
655
|
-
# TODO: how to return the state so it can be handled
|
656
|
-
print() # linefeed after output
|
657
|
-
errors = [l for l in e.stdout.splitlines()[1:7] if "Error" in l]
|
658
|
-
for e in errors:
|
659
|
-
log.error(e)
|
660
|
-
|
661
|
-
# log.error("Exception on process, {}".format(e))
|
662
|
-
return False
|
663
|
-
return True
|
664
|
-
|
665
|
-
def check(self) -> bool:
|
666
|
-
"""check if the package is valid by running `poetry check`
|
667
|
-
Note: this will write some output to the console ('All set!')
|
668
|
-
"""
|
669
|
-
return self.run_poetry(["check", "-vvv"])
|
670
|
-
|
671
|
-
def create_update_pyproject_toml(self) -> None:
|
672
|
-
"""
|
673
|
-
create or update/overwrite a `pyproject.toml` file by combining a template file
|
674
|
-
with the given parameters.
|
675
|
-
and updating it with the pyi files included
|
676
|
-
"""
|
677
|
-
if (self.toml_path).exists():
|
678
|
-
# do not overwrite the version of a pre-existing file
|
679
|
-
_pyproject = self.pyproject
|
680
|
-
assert _pyproject is not None
|
681
|
-
# clear out the packages section
|
682
|
-
_pyproject["tool"]["poetry"]["packages"] = []
|
683
|
-
# update the dependencies section by reading these from the template file
|
684
|
-
with open(CONFIG.template_path / "pyproject.toml", "rb") as f:
|
685
|
-
tpl = tomllib.load(f)
|
686
|
-
_pyproject["tool"]["poetry"]["dependencies"] = tpl["tool"]["poetry"]["dependencies"]
|
687
|
-
|
688
|
-
else:
|
689
|
-
# read the template pyproject.toml file from the template folder
|
690
|
-
try:
|
691
|
-
with open(CONFIG.template_path / "pyproject.toml", "rb") as f:
|
692
|
-
_pyproject = tomllib.load(f)
|
693
|
-
# note: can be 'latest' which is not semver
|
694
|
-
_pyproject["tool"]["poetry"]["version"] = self.mpy_version
|
695
|
-
except FileNotFoundError as e:
|
696
|
-
log.error(f"Could not find template pyproject.toml file {e}")
|
697
|
-
raise (e)
|
698
|
-
|
699
|
-
# update the name , version and description of the package
|
700
|
-
_pyproject["tool"]["poetry"]["name"] = self.package_name
|
701
|
-
_pyproject["tool"]["poetry"]["description"] = self.description
|
702
|
-
# write out the pyproject.toml file
|
703
|
-
self.pyproject = _pyproject
|
704
|
-
|
705
|
-
def update_pyproject_stubs(self) -> int:
|
706
|
-
"Add the stub files to the pyproject.toml file"
|
707
|
-
_pyproject = self.pyproject
|
708
|
-
assert _pyproject is not None, "No pyproject.toml file found"
|
709
|
-
_pyproject["tool"]["poetry"]["packages"] = [
|
710
|
-
{"include": p.relative_to(self.package_path).as_posix()} for p in sorted((self.package_path).rglob("*.pyi"))
|
711
|
-
]
|
712
|
-
# write out the pyproject.toml file
|
713
|
-
self.pyproject = _pyproject
|
714
|
-
return len(_pyproject["tool"]["poetry"]["packages"])
|
715
|
-
|
716
|
-
|
717
|
-
class StubPackage(PoetryBuilder):
|
718
|
-
"""
|
719
|
-
Create a stub-only package for a specific version , port and board of micropython
|
720
|
-
|
721
|
-
properties:
|
722
|
-
- toml_path - the path to the `pyproject.toml` file
|
723
|
-
- package_path - the path to the folder where the package info will be stored ('./publish').
|
724
|
-
- pkg_version - the version of the package as used on PyPi (semver). Is stored directly in the `pyproject.toml` file
|
725
|
-
- pyproject - the contents of the `pyproject.toml` file
|
726
|
-
|
727
|
-
methods:
|
728
|
-
- from_json - load the package from json
|
729
|
-
- to_json - return the package as json
|
730
|
-
|
731
|
-
- create_update_pyproject_toml - create or update the `pyproject.toml` file
|
732
|
-
- create_readme - create the readme file
|
733
|
-
- create_license - create the license file
|
734
|
-
- copy_stubs - copy the stubs to the package folder
|
735
|
-
- update_included_stubs - update the included stubs in the `pyproject.toml` file
|
736
|
-
- create_hash - create a hash of the package files
|
737
|
-
|
738
|
-
- update_package_files - combines clean, copy, and create reeadme & updates
|
739
|
-
"""
|
740
|
-
|
741
|
-
def __init__(
|
742
|
-
self,
|
743
|
-
package_name: str,
|
744
|
-
port: str,
|
745
|
-
*,
|
746
|
-
board: str = GENERIC_U,
|
747
|
-
version: str = "0.0.1",
|
748
|
-
description: str = "MicroPython stubs",
|
749
|
-
stubs: Optional[StubSources] = None,
|
750
|
-
json_data: Optional[Dict[str, Any]] = None,
|
751
|
-
):
|
752
|
-
"""
|
753
|
-
Create a stub-only package for a specific version of micropython
|
754
|
-
parameters:
|
755
|
-
|
756
|
-
- package_name - the name of the package as used on PyPi
|
757
|
-
- version - the version of the package as used on PyPi (semver)
|
758
|
-
- description
|
759
|
-
- stubs - a list of tuples (name, path) of the stubs to copy
|
760
|
-
- json_data - Optional: a json databse record that will be used to create the package from.
|
761
|
-
When `json_data` is provided, the version, description and stubs parameters are ignored
|
762
|
-
|
763
|
-
paths:
|
764
|
-
ROOT_PATH - the root path of the project ('./')
|
765
|
-
PUBLISH_PATH - root-relative path to the folder where the package info will be stored ('./publish').
|
766
|
-
TEMPLATE_PATH - root-relative path to the folder where the template files are stored ('./publish/template').
|
767
|
-
STUB_PATH - root-relative path to the folder where the stubs are stored ('./stubs').
|
768
|
-
|
769
|
-
"""
|
770
|
-
self.port = port
|
771
|
-
self.board = board
|
772
|
-
if json_data is not None:
|
773
|
-
self.from_dict(json_data)
|
774
|
-
else:
|
775
|
-
# store essentials
|
776
|
-
self.package_name = package_name
|
777
|
-
self.description = description
|
778
|
-
self.mpy_version = clean_version(version, drop_v=True) # Initial version
|
779
|
-
|
780
|
-
self.create_update_pyproject_toml()
|
781
|
-
|
782
|
-
self.stub_sources: StubSources = []
|
783
|
-
# save the stub sources
|
784
|
-
if stubs:
|
785
|
-
self.stub_sources = stubs
|
786
|
-
|
787
|
-
self.status: Status = Status(
|
788
|
-
{
|
789
|
-
"result": "-",
|
790
|
-
"name": self.package_name,
|
791
|
-
"version": self.pkg_version,
|
792
|
-
"error": None,
|
793
|
-
"path": self.package_path.as_posix(),
|
794
|
-
}
|
795
|
-
)
|
796
|
-
super().__init__(
|
797
|
-
package_name=package_name,
|
798
|
-
mpy_version=self.mpy_version,
|
799
|
-
port=port,
|
800
|
-
board=board,
|
801
|
-
description=description,
|
802
|
-
stubs=self.stub_sources,
|
803
|
-
)
|
804
|
-
|
805
|
-
def update_sources(self) -> StubSources:
|
806
|
-
"""
|
807
|
-
Update the stub sources to:
|
808
|
-
- FIRMWARE: prefer -merged stubs over bare MCU stubs
|
809
|
-
- FROZEN: fallback to use the GENERIC folder for the frozen sources if no board specific folder exists
|
810
|
-
"""
|
811
|
-
updated_sources = []
|
812
|
-
# TODO: find a way to simplify this code as this is a bit magic (and hard to understand)
|
813
|
-
for stub_type, fw_path in self.stub_sources:
|
814
|
-
# prefer -merged stubs over bare MCU stubs
|
815
|
-
if stub_type == StubSource.FIRMWARE:
|
816
|
-
# Check if -merged folder exists and use that instead
|
817
|
-
if fw_path.name.endswith("-merged"):
|
818
|
-
merged_path = fw_path
|
819
|
-
else:
|
820
|
-
merged_path = fw_path.with_name(f"{fw_path.name}-merged")
|
821
|
-
if (CONFIG.stub_path / merged_path).exists():
|
822
|
-
updated_sources.append((stub_type, merged_path))
|
823
|
-
else:
|
824
|
-
updated_sources.append((stub_type, fw_path))
|
825
|
-
elif stub_type == StubSource.FROZEN:
|
826
|
-
# use if folder exists , else use GENERIC folder
|
827
|
-
if (CONFIG.stub_path / fw_path).exists():
|
828
|
-
updated_sources.append((stub_type, fw_path))
|
829
|
-
elif fw_path.with_name("GENERIC").exists():
|
830
|
-
updated_sources.append((stub_type, fw_path.with_name("GENERIC")))
|
831
|
-
elif stub_type == StubSource.MERGED:
|
832
|
-
# Use the default board folder instead of the GENERIC board folder (if it exists)
|
833
|
-
if self.board.upper() == GENERIC_U:
|
834
|
-
family = fw_path.name.split("-")[0]
|
835
|
-
default_path = Path(
|
836
|
-
f"{family}-{clean_version(self.mpy_version, flat=True)}-{self.port}-{default_board(self.port, self.mpy_version)}-merged"
|
837
|
-
)
|
838
|
-
if (CONFIG.stub_path / default_path).exists():
|
839
|
-
fw_path = default_path
|
840
|
-
updated_sources.append((stub_type, fw_path))
|
841
|
-
# ---------
|
842
|
-
else:
|
843
|
-
updated_sources.append((stub_type, fw_path))
|
844
|
-
return updated_sources
|
845
|
-
|
846
|
-
def update_distribution(self, production: bool) -> bool:
|
847
|
-
"""Update the package .pyi files, if all the sources are available"""
|
848
|
-
log.info(f"- Update {self.package_path.name}")
|
849
|
-
log.trace(f"{self.package_path.as_posix()}")
|
850
|
-
|
851
|
-
# check if the sources exist
|
852
|
-
ok = self.are_package_sources_available()
|
853
|
-
if not ok:
|
854
|
-
log.debug(f"{self.package_name}: skipping as one or more source stub folders are missing")
|
855
|
-
self.status["error"] = "Skipped, stub folder(s) missing"
|
856
|
-
shutil.rmtree(self.package_path.as_posix())
|
857
|
-
self._publish = False # type: ignore
|
858
|
-
return False
|
859
|
-
try:
|
860
|
-
# update to -merged and fallback to GENERIC
|
861
|
-
self.stub_sources = self.update_sources()
|
862
|
-
self.update_package_files()
|
863
|
-
self.update_pyproject_stubs()
|
864
|
-
# for a new package the version could be 'latest', which is not a valid semver, so update
|
865
|
-
self.pkg_version = self.next_package_version(production)
|
866
|
-
return self.check()
|
867
|
-
except Exception as e: # pragma: no cover
|
868
|
-
log.error(f"{self.package_name}: {e}")
|
869
|
-
self.status["error"] = str(e)
|
870
|
-
return False
|
871
|
-
|
872
|
-
def build_distribution(
|
873
|
-
self,
|
874
|
-
production: bool, # PyPI or Test-PyPi - USED TO FIND THE NEXT VERSION NUMBER
|
875
|
-
force=False, # BUILD even if no changes
|
876
|
-
) -> bool: # sourcery skip: default-mutable-arg, extract-duplicate-method, require-parameter-annotation
|
877
|
-
"""
|
878
|
-
Build a package
|
879
|
-
look up the previous package version in the dabase
|
880
|
-
- update package files
|
881
|
-
- build the wheels and sdist
|
882
|
-
|
883
|
-
:param production: PyPI or Test-PyPi -
|
884
|
-
:param force: BUILD even if no changes
|
885
|
-
:return: True if the package was built
|
886
|
-
"""
|
887
|
-
log.info(f"Build: {self.package_path.name}")
|
888
|
-
|
889
|
-
ok = self.update_distribution(production)
|
890
|
-
self.status["version"] = self.pkg_version
|
891
|
-
if not ok:
|
892
|
-
log.info(f"{self.package_name}: skip - Could not build/update package")
|
893
|
-
if not self.status["error"]:
|
894
|
-
self.status["error"] = "Could not build/update package"
|
895
|
-
return False
|
896
|
-
|
897
|
-
# If there are changes to the package, then publish it
|
898
|
-
if self.is_changed() or force:
|
899
|
-
if force:
|
900
|
-
log.info(f"Force build: {self.package_name} {self.pkg_version} ")
|
901
|
-
else:
|
902
|
-
log.info(f"Found changes to package sources: {self.package_name} {self.pkg_version} ")
|
903
|
-
log.trace(f"Old hash {self.hash} != New hash {self.calculate_hash()}")
|
904
|
-
# Build the distribution files
|
905
|
-
old_ver = self.pkg_version
|
906
|
-
self.pkg_version = self.next_package_version(production)
|
907
|
-
self.status["version"] = self.pkg_version
|
908
|
-
# to get the next version
|
909
|
-
log.debug(
|
910
|
-
f"{self.package_name}: bump version for {old_ver} to {self.pkg_version } {'production' if production else 'test'}"
|
911
|
-
)
|
912
|
-
self.write_package_json()
|
913
|
-
log.trace(f"New hash: {self.package_name} {self.pkg_version} {self.hash}")
|
914
|
-
if self.poetry_build():
|
915
|
-
self.status["result"] = "Build OK"
|
916
|
-
else:
|
917
|
-
log.warning(f"{self.package_name}: skipping as build failed")
|
918
|
-
self.status["error"] = "Poetry build failed"
|
919
|
-
return False
|
920
|
-
return True
|
921
|
-
|
922
|
-
def publish_distribution_ifchanged(
|
923
|
-
self,
|
924
|
-
db: PysonDB,
|
925
|
-
*,
|
926
|
-
production: bool, # PyPI or Test-PyPi
|
927
|
-
build=False, #
|
928
|
-
force=False, # publish even if no changes
|
929
|
-
dry_run=False, # do not actually publish
|
930
|
-
clean: bool = False, # clean up afterwards
|
931
|
-
) -> (
|
932
|
-
bool
|
933
|
-
): # sourcery skip: assign-if-exp, default-mutable-arg, extract-method, remove-unnecessary-else, require-parameter-annotation, swap-if-else-branches, swap-if-expression
|
934
|
-
"""
|
935
|
-
Publish a package to PyPi
|
936
|
-
look up the previous package version in the dabase, and only publish if there are changes to the package
|
937
|
-
- change determied by hash across all files
|
938
|
-
|
939
|
-
Build
|
940
|
-
- update package files
|
941
|
-
- build the wheels and sdist
|
942
|
-
Publish
|
943
|
-
- publish to PyPi
|
944
|
-
- update database with new hash
|
945
|
-
"""
|
946
|
-
log.info(f"Publish: {self.package_path.name}")
|
947
|
-
# count .pyi files in the package
|
948
|
-
filecount = len(list(self.package_path.rglob("*.pyi")))
|
949
|
-
if filecount == 0:
|
950
|
-
log.debug(f"{self.package_name}: starting build as no .pyi files found")
|
951
|
-
build = True
|
952
|
-
|
953
|
-
if build or force or self.is_changed():
|
954
|
-
self.build_distribution(production=production, force=force)
|
955
|
-
|
956
|
-
if not self._publish:
|
957
|
-
log.debug(f"{self.package_name}: skip publishing")
|
958
|
-
return False
|
959
|
-
|
960
|
-
self.next_package_version(production=production)
|
961
|
-
# Publish the package to PyPi, Test-PyPi or Github
|
962
|
-
if self.is_changed():
|
963
|
-
if self.mpy_version in SET_PREVIEW and production and not force:
|
964
|
-
log.warning("version: `latest` package will only be available on Github, and not published to PyPi.")
|
965
|
-
self.status["result"] = "Published to GitHub"
|
966
|
-
else:
|
967
|
-
return self.publish_distribution(dry_run, production, db)
|
968
|
-
elif force:
|
969
|
-
return self.publish_distribution(dry_run, production, db)
|
970
|
-
else:
|
971
|
-
log.info(f"No changes to package : {self.package_name} {self.pkg_version}")
|
972
|
-
|
973
|
-
if clean:
|
974
|
-
self.clean()
|
975
|
-
return True
|
976
|
-
|
977
|
-
def publish_distribution(self, dry_run, production, db):
|
978
|
-
"""
|
979
|
-
Publishes the package to PyPi or Test-PyPi.
|
980
|
-
|
981
|
-
Args:
|
982
|
-
dry_run (bool): If True, performs a dry run without actually publishing.
|
983
|
-
production (bool): If True, publishes to PyPi. If False, publishes to Test-PyPi.
|
984
|
-
db: The database object to save the package state.
|
985
|
-
|
986
|
-
Returns:
|
987
|
-
bool: True if the publish was successful, False otherwise.
|
988
|
-
"""
|
989
|
-
self.update_hashes() # resets is_changed to False
|
990
|
-
if not dry_run:
|
991
|
-
pub_ok = self.poetry_publish(production=production)
|
992
|
-
else:
|
993
|
-
log.warning(f"{self.package_name}: Dry run, not publishing to {'' if production else 'Test-'}PyPi")
|
994
|
-
pub_ok = True
|
995
|
-
if not pub_ok:
|
996
|
-
log.warning(f"{self.package_name}: Publish failed for {self.pkg_version}")
|
997
|
-
self.status["error"] = "Publish failed"
|
998
|
-
return False
|
999
|
-
self.status["result"] = "Published to PyPi" if production else "Published to Test-PyPi"
|
1000
|
-
self.update_hashes()
|
1001
|
-
if dry_run:
|
1002
|
-
log.warning(f"{self.package_name}: Dry run, not saving to database")
|
1003
|
-
else:
|
1004
|
-
# get the package state and add it to the database
|
1005
|
-
db.add(self.to_dict())
|
1006
|
-
db.commit()
|
1007
|
-
return True
|
1008
|
-
|
1009
|
-
def are_package_sources_available(self) -> bool:
|
1010
|
-
"""
|
1011
|
-
Check if (all) the packages sources exist.
|
1012
|
-
"""
|
1013
|
-
ok = True
|
1014
|
-
for stub_type, src_path in self.update_sources():
|
1015
|
-
if (CONFIG.stub_path / src_path).exists():
|
1016
|
-
continue
|
1017
|
-
if stub_type == StubSource.FROZEN:
|
1018
|
-
# not a blocking issue if there are no frozen stubs, perhaps this port/board does not have any
|
1019
|
-
continue
|
1020
|
-
# todo: below is a workaround for different types, but where is the source of this difference coming from?
|
1021
|
-
msg = (
|
1022
|
-
f"{self.package_name}: source '{stub_type.value}' not found: {CONFIG.stub_path / src_path}"
|
1023
|
-
if isinstance(stub_type, StubSource) # type: ignore
|
1024
|
-
else f"{self.package_name}: source '{stub_type}' not found: {CONFIG.stub_path / src_path}"
|
1025
|
-
)
|
1026
|
-
self.status["error"] = msg
|
1027
|
-
log.debug(msg)
|
1028
|
-
ok = False
|
1029
|
-
return ok
|
1
|
+
"""Create a stub-only package for a specific version of micropython"""
|
2
|
+
|
3
|
+
import hashlib
|
4
|
+
import json
|
5
|
+
import shutil
|
6
|
+
import subprocess
|
7
|
+
from pathlib import Path
|
8
|
+
from typing import Any, Dict, List, Optional, Tuple, Union
|
9
|
+
|
10
|
+
import tenacity
|
11
|
+
|
12
|
+
from stubber.basicgit import get_git_describe
|
13
|
+
from stubber.publish.helpers import get_module_docstring
|
14
|
+
|
15
|
+
try:
|
16
|
+
import tomllib # type: ignore
|
17
|
+
except ModuleNotFoundError:
|
18
|
+
import tomli as tomllib # type: ignore
|
19
|
+
|
20
|
+
from typing import NewType
|
21
|
+
|
22
|
+
import tomli_w
|
23
|
+
from loguru import logger as log
|
24
|
+
from packaging.version import Version, parse
|
25
|
+
from pysondb import PysonDB
|
26
|
+
|
27
|
+
from stubber.publish.bump import bump_version
|
28
|
+
from stubber.publish.defaults import GENERIC_U, default_board
|
29
|
+
from stubber.publish.enums import StubSource
|
30
|
+
from stubber.publish.pypi import Version, get_pypi_versions
|
31
|
+
from stubber.utils.config import CONFIG
|
32
|
+
from stubber.utils.versions import SET_PREVIEW, V_PREVIEW, clean_version
|
33
|
+
|
34
|
+
Status = NewType("Status", Dict[str, Union[str, None]])
|
35
|
+
StubSources = List[Tuple[StubSource, Path]]
|
36
|
+
|
37
|
+
# indicates which stubs will be skipped when copying for these stub sources
|
38
|
+
STUBS_COPY_FILTER = {
|
39
|
+
StubSource.FROZEN: [
|
40
|
+
"espnow", # merged stubs + documentation of the espnow module is better than the info in the forzen stubs
|
41
|
+
],
|
42
|
+
StubSource.FIRMWARE: [
|
43
|
+
"builtins",
|
44
|
+
"collections", # collections must be in stdlib
|
45
|
+
],
|
46
|
+
StubSource.MERGED: [
|
47
|
+
"collections", # collections must be in stdlib
|
48
|
+
],
|
49
|
+
}
|
50
|
+
|
51
|
+
# these modules will be replaced by a simple import statement to import from stdlib
|
52
|
+
STDLIB_UMODULES = ["ucollections"]
|
53
|
+
|
54
|
+
|
55
|
+
class VersionedPackage(object):
|
56
|
+
"""
|
57
|
+
Represents a versioned package.
|
58
|
+
|
59
|
+
Attributes:
|
60
|
+
package_name (str): The name of the package.
|
61
|
+
mpy_version (str): The MicroPython version.
|
62
|
+
|
63
|
+
Methods:
|
64
|
+
__init__(self, package_name: str, mpy_version: str): Initializes a new instance of the VersionedPackage class.
|
65
|
+
is_preview(self): Checks if the package is a preview version.
|
66
|
+
pkg_version(self) -> str: Returns the version of the package.
|
67
|
+
pkg_version(self, version: str) -> None: Sets the version of the package.
|
68
|
+
get_prerelease_package_version(self, production: bool = False) -> str: Gets the next prerelease version for the package.
|
69
|
+
get_next_package_version(self, prod: bool = False, rc=False) -> str: Gets the next version for the package.
|
70
|
+
next_pkg_version(self, production: bool) -> str: Gets the next version for the package.
|
71
|
+
bump(self, *, rc: int = 0) -> str: Bumps the postrelease version of the package.
|
72
|
+
"""
|
73
|
+
|
74
|
+
def __init__(self, package_name: str, *, mpy_version: str):
|
75
|
+
super().__init__()
|
76
|
+
self.package_name: str = package_name
|
77
|
+
self.mpy_version: str = mpy_version
|
78
|
+
self._pkg_version: str = mpy_version
|
79
|
+
|
80
|
+
def __str__(self) -> str:
|
81
|
+
return f"{self.package_name}=={self.mpy_version}"
|
82
|
+
|
83
|
+
def __repr__(self) -> str:
|
84
|
+
return f"{self.package_name}=={self.mpy_version}"
|
85
|
+
|
86
|
+
def __eq__(self, o: object) -> bool:
|
87
|
+
return str(self) == str(o)
|
88
|
+
|
89
|
+
def __hash__(self) -> int:
|
90
|
+
return hash(str(self))
|
91
|
+
|
92
|
+
@property
|
93
|
+
def pkg_version(self) -> str:
|
94
|
+
"return the version of the package"
|
95
|
+
return self._pkg_version
|
96
|
+
|
97
|
+
@pkg_version.setter
|
98
|
+
def pkg_version(self, version: str) -> None:
|
99
|
+
"set the version of the package"
|
100
|
+
self._pkg_version = version
|
101
|
+
|
102
|
+
def next_package_version(self, production: bool) -> str:
|
103
|
+
# sourcery skip: assign-if-exp
|
104
|
+
"""Get the next version for the package"""
|
105
|
+
if self.is_preview():
|
106
|
+
return self._get_next_preview_package_version(production)
|
107
|
+
else:
|
108
|
+
return self._get_next_package_version(production)
|
109
|
+
|
110
|
+
def is_preview(self):
|
111
|
+
return self.mpy_version in SET_PREVIEW or V_PREVIEW in self.mpy_version
|
112
|
+
|
113
|
+
def _get_next_preview_package_version(self, production: bool = False) -> str:
|
114
|
+
"""
|
115
|
+
Get the next prerelease version for the package.
|
116
|
+
this is used for preview versions of micropython (-preview, formerly known as 'latest')
|
117
|
+
"""
|
118
|
+
rc = 1
|
119
|
+
if not (describe := get_git_describe(CONFIG.mpy_path.as_posix())):
|
120
|
+
return "99.99.99post99"
|
121
|
+
# use versiontag and the number of commits since the last tag
|
122
|
+
# "v1.19.1-841-g3446"
|
123
|
+
# 'v1.20.0-dirty'
|
124
|
+
# 'v1.22.0-preview-19-g8eb7721b4'
|
125
|
+
parts = describe.split("-", 3)
|
126
|
+
ver = parts[0]
|
127
|
+
if len(parts) > 1:
|
128
|
+
rc = parts[1] if parts[1].isdigit() else parts[2] if len(parts) > 2 and parts[2].isdigit() else 1
|
129
|
+
rc = int(rc)
|
130
|
+
base = bump_version(Version(ver), minor_bump=True) if parts[1] != V_PREVIEW else Version(ver)
|
131
|
+
return str(bump_version(base, rc=rc))
|
132
|
+
# raise ValueError("cannot determine next version number micropython")
|
133
|
+
|
134
|
+
def _get_next_package_version(self, prod: bool = False, rc=False) -> str:
|
135
|
+
"""Get the next version for the package."""
|
136
|
+
base = Version(self.pkg_version)
|
137
|
+
if pypi_versions := get_pypi_versions(self.package_name, production=prod, base=base):
|
138
|
+
# get the latest version from pypi
|
139
|
+
self.pkg_version = str(pypi_versions[-1])
|
140
|
+
else:
|
141
|
+
# no published package found , so we start at base version then bump 1 post release
|
142
|
+
self.pkg_version = Version(self.pkg_version).base_version
|
143
|
+
return self.bump()
|
144
|
+
|
145
|
+
def bump(self, *, rc: int = 0) -> str:
|
146
|
+
"""
|
147
|
+
bump the postrelease version of the package, and write the change to disk
|
148
|
+
if rc >= 1, the version is bumped to the specified release candidate
|
149
|
+
"""
|
150
|
+
try:
|
151
|
+
current = Version(self.pkg_version)
|
152
|
+
assert isinstance(current, Version)
|
153
|
+
# bump the version
|
154
|
+
self.pkg_version = str(bump_version(post_bump=True, current=current, rc=rc))
|
155
|
+
except Exception as e: # pragma: no cover
|
156
|
+
log.error(f"Error: {e}")
|
157
|
+
return self.pkg_version
|
158
|
+
|
159
|
+
|
160
|
+
class Builder(VersionedPackage):
|
161
|
+
"""
|
162
|
+
Builder class for creating and updating MicroPython stub packages.
|
163
|
+
|
164
|
+
Args:
|
165
|
+
package_name (str): The name of the package.
|
166
|
+
mpy_version (str, optional): The version of MicroPython. Defaults to "0.0.1".
|
167
|
+
port (str): The port for the package.
|
168
|
+
board (str, optional): The board for the package. Defaults to GENERIC_U.
|
169
|
+
description (str, optional): The description of the package. Defaults to "MicroPython stubs".
|
170
|
+
stubs (Optional[StubSources], optional): The stub sources for the package. Defaults to None.
|
171
|
+
|
172
|
+
Attributes:
|
173
|
+
package_name (str): The name of the package.
|
174
|
+
mpy_version (str): The version of MicroPython.
|
175
|
+
port (str): The port for the package.
|
176
|
+
board (str): The board for the package.
|
177
|
+
description (str): The description of the package.
|
178
|
+
stub_sources (Optional[StubSources]): The stub sources for the package.
|
179
|
+
hash (None): The hash of all the files in the package.
|
180
|
+
stub_hash (None): The hash of the stub files.
|
181
|
+
|
182
|
+
Properties:
|
183
|
+
package_path (Path): The package path based on the package name and version, relative to the publish folder.
|
184
|
+
toml_path (Path): The path to the `pyproject.toml` file.
|
185
|
+
pyproject (Union[Dict[str, Any], None]): The parsed pyproject.toml or None.
|
186
|
+
|
187
|
+
Methods:
|
188
|
+
create_update_pyproject_toml(): Create or update/overwrite a `pyproject.toml` file.
|
189
|
+
check(): Check if the package is valid.
|
190
|
+
clean(): Remove the stub files from the package folder.
|
191
|
+
copy_stubs(): Copy files from all listed stub folders to the package folder.
|
192
|
+
update_package_files(): Update the stub-only package for a specific version of MicroPython.
|
193
|
+
write_package_json(): Write the package.json file to disk.
|
194
|
+
to_dict(): Return the package as a dict to store in the jsondb.
|
195
|
+
from_dict(json_data: Dict): Load the package from a dict (from the jsondb).
|
196
|
+
calculate_hash(include_md: bool = True): Create a SHA1 hash of all files in the package.
|
197
|
+
update_hashes(): Update the package hashes.
|
198
|
+
is_changed(include_md: bool = True): Check if the package has changed.
|
199
|
+
"""
|
200
|
+
|
201
|
+
# BUF_SIZE is totally arbitrary,
|
202
|
+
BUF_SIZE = 65536 * 16 # lets read stuff in 16 x 64kb chunks!
|
203
|
+
|
204
|
+
def __init__(
|
205
|
+
self,
|
206
|
+
package_name: str,
|
207
|
+
*,
|
208
|
+
mpy_version: str = "0.0.1",
|
209
|
+
port: str,
|
210
|
+
board: str = GENERIC_U,
|
211
|
+
description: str = "MicroPython stubs",
|
212
|
+
stubs: Optional[StubSources] = None,
|
213
|
+
# json_data: Optional[Dict[str, Any]] = None,
|
214
|
+
): # port: str, board: str
|
215
|
+
super().__init__(package_name=package_name, mpy_version=mpy_version)
|
216
|
+
self._publish = True # intended for publishing
|
217
|
+
self.package_name = package_name
|
218
|
+
self.mpy_version = mpy_version
|
219
|
+
self.port = port
|
220
|
+
self.board = board
|
221
|
+
self.description = description
|
222
|
+
self.stub_sources = stubs or []
|
223
|
+
self.hash = None # intial hash
|
224
|
+
"""Hash of all the files in the package"""
|
225
|
+
self.stub_hash = None # intial hash
|
226
|
+
"""Hash of all .pyi files"""
|
227
|
+
|
228
|
+
@property
|
229
|
+
def package_path(self) -> Path:
|
230
|
+
"package path based on the package name and version and relative to the publish folder"
|
231
|
+
parts = self.package_name.split("-")
|
232
|
+
parts[1:1] = [clean_version(self.mpy_version, flat=True)]
|
233
|
+
return CONFIG.publish_path / "-".join(parts)
|
234
|
+
|
235
|
+
@property
|
236
|
+
def toml_path(self) -> Path:
|
237
|
+
"the path to the `pyproject.toml` file"
|
238
|
+
# todo: make sure this is always relative to the root path
|
239
|
+
return self.package_path / "pyproject.toml"
|
240
|
+
|
241
|
+
# -----------------------------------------------
|
242
|
+
@property
|
243
|
+
def pyproject(self) -> Union[Dict[str, Any], None]:
|
244
|
+
"parsed pyproject.toml or None"
|
245
|
+
pyproject = None
|
246
|
+
_toml = self.toml_path
|
247
|
+
if (_toml).exists():
|
248
|
+
with open(_toml, "rb") as f:
|
249
|
+
pyproject = tomllib.load(f)
|
250
|
+
return pyproject
|
251
|
+
|
252
|
+
@pyproject.setter
|
253
|
+
def pyproject(self, pyproject: Dict) -> None:
|
254
|
+
# check if the result is a valid toml file
|
255
|
+
try:
|
256
|
+
tomllib.loads(tomli_w.dumps(pyproject))
|
257
|
+
except tomllib.TOMLDecodeError as e:
|
258
|
+
print("Could not create a valid TOML file")
|
259
|
+
raise (e)
|
260
|
+
# make sure parent folder exists
|
261
|
+
_toml = self.toml_path
|
262
|
+
(_toml).parent.mkdir(parents=True, exist_ok=True)
|
263
|
+
with open(_toml, "wb") as output:
|
264
|
+
tomli_w.dump(pyproject, output)
|
265
|
+
|
266
|
+
# -----------------------------------------------
|
267
|
+
def create_update_pyproject_toml(self) -> None:
|
268
|
+
"""
|
269
|
+
create or update/overwrite a `pyproject.toml` file by combining a template file
|
270
|
+
with the given parameters.
|
271
|
+
"""
|
272
|
+
raise NotImplementedError("create_update_pyproject_toml not implemented")
|
273
|
+
|
274
|
+
# -----------------------------------------------
|
275
|
+
|
276
|
+
def check(self) -> bool:
|
277
|
+
"""Check if the package is valid, to be implemented by the subclass"""
|
278
|
+
return True
|
279
|
+
|
280
|
+
def clean(self) -> None:
|
281
|
+
"""
|
282
|
+
Remove the stub files from the package folder
|
283
|
+
|
284
|
+
This is used before update the stub package, to avoid lingering stub files,
|
285
|
+
and after the package has been built, to avoid needing to store files multiple times.
|
286
|
+
|
287
|
+
`.gitignore` cannot be used as this will prevent poetry from processing the files.
|
288
|
+
"""
|
289
|
+
# remove all *.py and *.pyi files in the folder
|
290
|
+
for wc in ["*.py", "*.pyi", "modules.json"]:
|
291
|
+
for f in (self.package_path).rglob(wc):
|
292
|
+
f.unlink()
|
293
|
+
|
294
|
+
def copy_stubs(self) -> None:
|
295
|
+
"""
|
296
|
+
Copy files from all listed stub folders to the package folder
|
297
|
+
the order of the stub folders is relevant as "last copy wins"
|
298
|
+
|
299
|
+
- 1 - Copy all MCU stubs/merged to the package folder
|
300
|
+
- 2 - copy the remaining stubs to the package folder
|
301
|
+
- 3 - remove *.py files from the package folder
|
302
|
+
"""
|
303
|
+
try:
|
304
|
+
# Check if all stub source folders exist
|
305
|
+
for stub_type, src_path in self.stub_sources:
|
306
|
+
if not (CONFIG.stub_path / src_path).exists():
|
307
|
+
raise FileNotFoundError(f"Could not find stub source folder {CONFIG.stub_path / src_path}")
|
308
|
+
|
309
|
+
# 1 - Copy the stubs to the package, directly in the package folder (no folders)
|
310
|
+
# for stub_type, fw_path in [s for s in self.stub_sources]:
|
311
|
+
for n in range(len(self.stub_sources)):
|
312
|
+
stub_type, src_path = self.stub_sources[n]
|
313
|
+
try:
|
314
|
+
log.debug(f"Copying {stub_type} from {src_path}")
|
315
|
+
self.copy_folder(stub_type, src_path)
|
316
|
+
except OSError as e:
|
317
|
+
if stub_type != StubSource.FROZEN:
|
318
|
+
raise FileNotFoundError(f"Could not find stub source folder {src_path}") from e
|
319
|
+
else:
|
320
|
+
log.debug(f"Error copying stubs from : {CONFIG.stub_path / src_path}, {e}")
|
321
|
+
finally:
|
322
|
+
# 3 - clean up a little bit
|
323
|
+
# delete all the .py files in the package folder if there is a corresponding .pyi file
|
324
|
+
for f in self.package_path.rglob("*.py"):
|
325
|
+
if f.with_suffix(".pyi").exists():
|
326
|
+
f.unlink()
|
327
|
+
self.update_umodules()
|
328
|
+
|
329
|
+
def update_umodules(self):
|
330
|
+
"""
|
331
|
+
Replace the STDLIB umodules with a simple import statement
|
332
|
+
in order to allow the typecheckers to resove the stdlib modules in the usual stdlib location.
|
333
|
+
"""
|
334
|
+
for f in self.package_path.rglob("*.pyi"):
|
335
|
+
if f.stem in STDLIB_UMODULES:
|
336
|
+
# read the docstring of the module
|
337
|
+
docstring = get_module_docstring(f) or ""
|
338
|
+
comment = "# import module from stdlib/module"
|
339
|
+
# replace the file with a simple import statement
|
340
|
+
f.write_text(f'"""\n{docstring}\n"""\n{comment}\nfrom {f.stem[1:]} import *')
|
341
|
+
|
342
|
+
def copy_folder(self, stub_type: StubSource, src_path: Path):
|
343
|
+
Path(self.package_path).mkdir(parents=True, exist_ok=True)
|
344
|
+
for item in (CONFIG.stub_path / src_path).rglob("*"):
|
345
|
+
if item.is_file():
|
346
|
+
# filter the 'poorly' decorated files
|
347
|
+
if stub_type in STUBS_COPY_FILTER and item.stem in STUBS_COPY_FILTER[stub_type]:
|
348
|
+
continue
|
349
|
+
|
350
|
+
target = Path(self.package_path) / item.relative_to(CONFIG.stub_path / src_path)
|
351
|
+
target.parent.mkdir(parents=True, exist_ok=True)
|
352
|
+
target.write_bytes(item.read_bytes())
|
353
|
+
|
354
|
+
def update_package_files(self) -> None:
|
355
|
+
"""
|
356
|
+
Update the stub-only package for a specific version of micropython
|
357
|
+
- cleans the package folder
|
358
|
+
- copies the stubs from the list of stubs.
|
359
|
+
- creates/updates the readme and the license file
|
360
|
+
"""
|
361
|
+
# create the package folder
|
362
|
+
self.package_path.mkdir(parents=True, exist_ok=True)
|
363
|
+
self.clean() # Delete any previous *.py? files
|
364
|
+
self.copy_stubs()
|
365
|
+
self.create_readme()
|
366
|
+
self.create_license()
|
367
|
+
|
368
|
+
def write_package_json(self) -> None:
|
369
|
+
"""write the package.json file to disk"""
|
370
|
+
# make sure folder exists
|
371
|
+
if not self.package_path.exists():
|
372
|
+
self.package_path.mkdir(parents=True, exist_ok=True)
|
373
|
+
# write the json to a file
|
374
|
+
with open(self.package_path / "package.json", "w") as f:
|
375
|
+
json.dump(self.to_dict(), f, indent=4)
|
376
|
+
|
377
|
+
def to_dict(self) -> dict:
|
378
|
+
"""return the package as a dict to store in the jsondb
|
379
|
+
|
380
|
+
need to simplify some of the Objects to allow serialization to json
|
381
|
+
- the paths to posix paths
|
382
|
+
- the version (semver) to a string
|
383
|
+
- toml file to list of lines
|
384
|
+
|
385
|
+
"""
|
386
|
+
return {
|
387
|
+
"name": self.package_name,
|
388
|
+
"mpy_version": self.mpy_version,
|
389
|
+
"publish": self._publish,
|
390
|
+
"pkg_version": str(self.pkg_version),
|
391
|
+
"path": self.package_path.name, # only store the folder name , as it is relative to the publish folder
|
392
|
+
"stub_sources": [(name, Path(path).as_posix()) for (name, path) in self.stub_sources],
|
393
|
+
"description": self.description,
|
394
|
+
"hash": self.hash,
|
395
|
+
"stub_hash": self.stub_hash,
|
396
|
+
}
|
397
|
+
|
398
|
+
def from_dict(self, json_data: Dict) -> None:
|
399
|
+
"""load the package from a dict (from the jsondb)"""
|
400
|
+
self.package_name = json_data["name"]
|
401
|
+
# self.package_path = Path(json_data["path"])
|
402
|
+
self.description = json_data["description"]
|
403
|
+
self.mpy_version = json_data["mpy_version"]
|
404
|
+
self._publish = json_data["publish"]
|
405
|
+
self.hash = json_data["hash"]
|
406
|
+
self.stub_hash = json_data["stub_hash"]
|
407
|
+
# create folder
|
408
|
+
if not self.package_path.exists():
|
409
|
+
self.package_path.mkdir(parents=True, exist_ok=True)
|
410
|
+
# create the pyproject.toml file
|
411
|
+
self.create_update_pyproject_toml()
|
412
|
+
# set pkg version after creating the toml file
|
413
|
+
self.pkg_version = json_data["pkg_version"]
|
414
|
+
self.stub_sources = []
|
415
|
+
for name, path in json_data["stub_sources"]:
|
416
|
+
if path.startswith("stubs/"):
|
417
|
+
path = path.replace("stubs/", "")
|
418
|
+
self.stub_sources.append((name, Path(path)))
|
419
|
+
|
420
|
+
def calculate_hash(self, include_md: bool = True) -> str:
|
421
|
+
# sourcery skip: reintroduce-else, swap-if-else-branches, use-named-expression
|
422
|
+
"""
|
423
|
+
Create a SHA1 hash of all files in the package, excluding the pyproject.toml file itself.
|
424
|
+
the hash is based on the content of the .py/.pyi and .md files in the package.
|
425
|
+
if include_md is False , the .md files are not hased, allowing the files in the packeges to be compared simply
|
426
|
+
As a single hash is created across all files, the files are sorted prior to hashing to ensure that the hash is stable.
|
427
|
+
|
428
|
+
Note: A changed hash will not indicate which of the files in the package have been changed.
|
429
|
+
"""
|
430
|
+
file_hash = hashlib.sha1()
|
431
|
+
# Stubs Only
|
432
|
+
files = list((self.package_path).rglob("**/*.pyi"))
|
433
|
+
if include_md:
|
434
|
+
files += (
|
435
|
+
[self.package_path / "LICENSE.md"]
|
436
|
+
+ [self.package_path / "README.md"]
|
437
|
+
# do not include [self.toml_file]
|
438
|
+
)
|
439
|
+
for file in sorted(files):
|
440
|
+
try:
|
441
|
+
# retry on file not found
|
442
|
+
self.add_file_hash(file, file_hash)
|
443
|
+
except FileNotFoundError:
|
444
|
+
log.warning(f"File not found {file}")
|
445
|
+
# ignore file not found errors to allow the hash to be created WHILE GIT / VIRUS SCANNERS HOLD LINGERING FILES
|
446
|
+
return file_hash.hexdigest()
|
447
|
+
|
448
|
+
@tenacity.retry(wait=tenacity.wait_fixed(0.2), stop=tenacity.stop_after_attempt(3))
|
449
|
+
def add_file_hash(self, file, file_hash):
|
450
|
+
"""
|
451
|
+
Adds the hash of a file to the given file hash object.
|
452
|
+
If an error occurs, the file is retried up to 3 times with a 0.2 second delay
|
453
|
+
|
454
|
+
Args:
|
455
|
+
file (str): The path to the file.
|
456
|
+
file_hash (hashlib._Hash): The file hash object to update.
|
457
|
+
|
458
|
+
Returns:
|
459
|
+
None
|
460
|
+
"""
|
461
|
+
with open(file, "rb") as f:
|
462
|
+
while True:
|
463
|
+
if data := f.read(Builder.BUF_SIZE):
|
464
|
+
file_hash.update(data)
|
465
|
+
else:
|
466
|
+
break
|
467
|
+
|
468
|
+
def update_hashes(self, ret=False) -> None:
|
469
|
+
"""Update the package hashes. Resets is_changed() to False"""
|
470
|
+
self.hash = self.calculate_hash()
|
471
|
+
self.stub_hash = self.calculate_hash(include_md=False)
|
472
|
+
|
473
|
+
def is_changed(self, include_md: bool = True) -> bool:
|
474
|
+
"""Check if the package has changed, based on the current and the stored hash.
|
475
|
+
The default checks the hash of all files, including the .md files.
|
476
|
+
"""
|
477
|
+
current = self.calculate_hash(include_md=include_md)
|
478
|
+
stored = self.hash if include_md else self.stub_hash
|
479
|
+
log.trace(f"changed = {self.hash != current} | Stored: {stored} | Current: {current}")
|
480
|
+
return stored != current
|
481
|
+
|
482
|
+
def create_license(self) -> None:
|
483
|
+
"""
|
484
|
+
Create a license file for the package
|
485
|
+
- copied from the template license file
|
486
|
+
"""
|
487
|
+
# copy the license file from the template to the package folder
|
488
|
+
# option : append other license files
|
489
|
+
shutil.copy(CONFIG.template_path / "LICENSE.md", self.package_path)
|
490
|
+
|
491
|
+
def create_readme(self) -> None:
|
492
|
+
"""
|
493
|
+
Create a readme file for the package
|
494
|
+
- based on the template readme file
|
495
|
+
- with a list of all included stub folders added to it (not the individual stub-files)
|
496
|
+
"""
|
497
|
+
# read the readme file and update the version and description
|
498
|
+
with open(CONFIG.template_path / "README.md", "r") as f:
|
499
|
+
TEMPLATE_README = f.read()
|
500
|
+
|
501
|
+
# add a readme with the names of the stub-folders
|
502
|
+
|
503
|
+
# read informations from firmware_stubs.json
|
504
|
+
firmware_stubs = {}
|
505
|
+
doc_stubs = {}
|
506
|
+
core_stubs = {}
|
507
|
+
try:
|
508
|
+
with open(self.package_path / "firmware_stubs.json", "r") as f:
|
509
|
+
firmware_stubs = json.load(f)
|
510
|
+
with open(self.package_path / "doc_stubs.json", "r") as f:
|
511
|
+
doc_stubs = json.load(f)
|
512
|
+
with open(self.package_path / "modules.json", "r") as f:
|
513
|
+
core_stubs = json.load(f)
|
514
|
+
except FileNotFoundError:
|
515
|
+
pass
|
516
|
+
|
517
|
+
# Prettify this by merging with template text
|
518
|
+
with open(self.package_path / "README.md", "w") as f:
|
519
|
+
f.write(f"# {self.package_name}\n\n")
|
520
|
+
f.write(TEMPLATE_README)
|
521
|
+
f.write(f"Included stubs:\n")
|
522
|
+
for name, folder in self.stub_sources:
|
523
|
+
f.write(f"* {name} from `stubs/{Path(folder).as_posix()}`\n")
|
524
|
+
|
525
|
+
f.write(f"\n\n")
|
526
|
+
f.write(f"origin | Family | Port | Board | Version\n")
|
527
|
+
f.write(f"-------|--------|------|-------|--------\n")
|
528
|
+
try:
|
529
|
+
f.write(
|
530
|
+
f"Firmware | {firmware_stubs['firmware']['family']} | {firmware_stubs['firmware']['port']} | {firmware_stubs['firmware']['machine']} | {clean_version(firmware_stubs['firmware']['version'])} \n"
|
531
|
+
)
|
532
|
+
except Exception:
|
533
|
+
pass
|
534
|
+
try:
|
535
|
+
f.write(
|
536
|
+
f"Documentation | {doc_stubs['firmware']['family']} | {doc_stubs['firmware']['port']} | - | {clean_version(doc_stubs['firmware']['version'])} \n"
|
537
|
+
)
|
538
|
+
except Exception:
|
539
|
+
pass
|
540
|
+
try:
|
541
|
+
f.write(
|
542
|
+
f"Core | {core_stubs['firmware']['family']} | {core_stubs['firmware']['port']} | - | {clean_version(core_stubs['firmware']['version'])} \n"
|
543
|
+
)
|
544
|
+
except Exception:
|
545
|
+
pass
|
546
|
+
|
547
|
+
|
548
|
+
class PoetryBuilder(Builder):
|
549
|
+
"""
|
550
|
+
Build a package using Poetry
|
551
|
+
"""
|
552
|
+
|
553
|
+
def __init__(
|
554
|
+
self,
|
555
|
+
package_name: str,
|
556
|
+
*,
|
557
|
+
port: str,
|
558
|
+
mpy_version: str = "0.0.1",
|
559
|
+
board: str = GENERIC_U,
|
560
|
+
description: str = "MicroPython stubs",
|
561
|
+
stubs: Optional[StubSources] = None,
|
562
|
+
json_data: Optional[Dict[str, Any]] = None,
|
563
|
+
):
|
564
|
+
super().__init__(
|
565
|
+
package_name=package_name,
|
566
|
+
mpy_version=mpy_version,
|
567
|
+
port=port,
|
568
|
+
board=board,
|
569
|
+
description=description,
|
570
|
+
stubs=stubs,
|
571
|
+
)
|
572
|
+
|
573
|
+
# -----------------------------------------------
|
574
|
+
# get and set the version of the package directly from the toml file
|
575
|
+
@property
|
576
|
+
def pkg_version(self) -> str:
|
577
|
+
"return the version of the package"
|
578
|
+
# read the version from the toml file
|
579
|
+
_toml = self.toml_path
|
580
|
+
if not _toml.exists():
|
581
|
+
return self.mpy_version
|
582
|
+
with open(_toml, "rb") as f:
|
583
|
+
pyproject = tomllib.load(f)
|
584
|
+
ver = pyproject["tool"]["poetry"]["version"]
|
585
|
+
return str(parse(ver)) if ver not in SET_PREVIEW else ver
|
586
|
+
|
587
|
+
@pkg_version.setter
|
588
|
+
def pkg_version(self, version: str) -> None:
|
589
|
+
# sourcery skip: remove-unnecessary-cast
|
590
|
+
"set the version of the package"
|
591
|
+
if not isinstance(version, str): # type: ignore
|
592
|
+
version = str(version)
|
593
|
+
# read the current file
|
594
|
+
_toml = self.toml_path
|
595
|
+
try:
|
596
|
+
with open(_toml, "rb") as f:
|
597
|
+
pyproject = tomllib.load(f)
|
598
|
+
pyproject["tool"]["poetry"]["version"] = version
|
599
|
+
# update the version in the toml file
|
600
|
+
with open(_toml, "wb") as output:
|
601
|
+
tomli_w.dump(pyproject, output)
|
602
|
+
except FileNotFoundError as e:
|
603
|
+
raise FileNotFoundError(f"pyproject.toml file not found at {_toml}") from e
|
604
|
+
|
605
|
+
# -----------------------------------------------
|
606
|
+
|
607
|
+
def poetry_build(self) -> bool:
|
608
|
+
"""build the package by running `poetry build`"""
|
609
|
+
return self.run_poetry(["build", "-vvv"])
|
610
|
+
|
611
|
+
def poetry_publish(self, production: bool = False) -> bool:
|
612
|
+
if not self._publish:
|
613
|
+
log.warning(f"Publishing is disabled for {self.package_name}")
|
614
|
+
return False
|
615
|
+
# update the package info
|
616
|
+
self.write_package_json()
|
617
|
+
if production:
|
618
|
+
log.debug("Publishing to PRODUCTION https://pypy.org")
|
619
|
+
params = ["publish"]
|
620
|
+
else:
|
621
|
+
log.debug("Publishing to TEST-PyPi https://test.pypy.org")
|
622
|
+
params = ["publish", "-r", "test-pypi"]
|
623
|
+
r = self.run_poetry(params)
|
624
|
+
print("") # add a newline after the output
|
625
|
+
return r
|
626
|
+
|
627
|
+
def run_poetry(self, parameters: List[str]) -> bool:
|
628
|
+
"""Run a poetry commandline in the package folder.
|
629
|
+
Note: this may write some output to the console ('All set!')
|
630
|
+
"""
|
631
|
+
# check for pyproject.toml in folder
|
632
|
+
if not (self.package_path / "pyproject.toml").exists(): # pragma: no cover
|
633
|
+
log.error(f"No pyproject.toml file found in {self.package_path}")
|
634
|
+
return False
|
635
|
+
# todo: call poetry directly to improve error handling
|
636
|
+
try:
|
637
|
+
log.debug(f"poetry {parameters} starting")
|
638
|
+
subprocess.run(
|
639
|
+
["poetry"] + parameters,
|
640
|
+
cwd=self.package_path,
|
641
|
+
check=True,
|
642
|
+
# stdout=subprocess.PIPE,
|
643
|
+
stdout=subprocess.PIPE, # interestingly: errors on stdout , output on stderr .....
|
644
|
+
universal_newlines=True,
|
645
|
+
encoding="utf-8",
|
646
|
+
)
|
647
|
+
log.trace(f"poetry {parameters} completed")
|
648
|
+
except (NotADirectoryError, FileNotFoundError) as e: # pragma: no cover # InvalidVersion
|
649
|
+
log.error("Exception on process, {}".format(e))
|
650
|
+
return False
|
651
|
+
except subprocess.CalledProcessError as e: # pragma: no cover
|
652
|
+
# Detect and log error detection om upload
|
653
|
+
# UploadError
|
654
|
+
# HTTP Error 400: File already exists. See https://test.pypi.org/help/#file-name-reuse for more information.
|
655
|
+
# TODO: how to return the state so it can be handled
|
656
|
+
print() # linefeed after output
|
657
|
+
errors = [l for l in e.stdout.splitlines()[1:7] if "Error" in l]
|
658
|
+
for e in errors:
|
659
|
+
log.error(e)
|
660
|
+
|
661
|
+
# log.error("Exception on process, {}".format(e))
|
662
|
+
return False
|
663
|
+
return True
|
664
|
+
|
665
|
+
def check(self) -> bool:
|
666
|
+
"""check if the package is valid by running `poetry check`
|
667
|
+
Note: this will write some output to the console ('All set!')
|
668
|
+
"""
|
669
|
+
return self.run_poetry(["check", "-vvv"])
|
670
|
+
|
671
|
+
def create_update_pyproject_toml(self) -> None:
|
672
|
+
"""
|
673
|
+
create or update/overwrite a `pyproject.toml` file by combining a template file
|
674
|
+
with the given parameters.
|
675
|
+
and updating it with the pyi files included
|
676
|
+
"""
|
677
|
+
if (self.toml_path).exists():
|
678
|
+
# do not overwrite the version of a pre-existing file
|
679
|
+
_pyproject = self.pyproject
|
680
|
+
assert _pyproject is not None
|
681
|
+
# clear out the packages section
|
682
|
+
_pyproject["tool"]["poetry"]["packages"] = []
|
683
|
+
# update the dependencies section by reading these from the template file
|
684
|
+
with open(CONFIG.template_path / "pyproject.toml", "rb") as f:
|
685
|
+
tpl = tomllib.load(f)
|
686
|
+
_pyproject["tool"]["poetry"]["dependencies"] = tpl["tool"]["poetry"]["dependencies"]
|
687
|
+
|
688
|
+
else:
|
689
|
+
# read the template pyproject.toml file from the template folder
|
690
|
+
try:
|
691
|
+
with open(CONFIG.template_path / "pyproject.toml", "rb") as f:
|
692
|
+
_pyproject = tomllib.load(f)
|
693
|
+
# note: can be 'latest' which is not semver
|
694
|
+
_pyproject["tool"]["poetry"]["version"] = self.mpy_version
|
695
|
+
except FileNotFoundError as e:
|
696
|
+
log.error(f"Could not find template pyproject.toml file {e}")
|
697
|
+
raise (e)
|
698
|
+
|
699
|
+
# update the name , version and description of the package
|
700
|
+
_pyproject["tool"]["poetry"]["name"] = self.package_name
|
701
|
+
_pyproject["tool"]["poetry"]["description"] = self.description
|
702
|
+
# write out the pyproject.toml file
|
703
|
+
self.pyproject = _pyproject
|
704
|
+
|
705
|
+
def update_pyproject_stubs(self) -> int:
|
706
|
+
"Add the stub files to the pyproject.toml file"
|
707
|
+
_pyproject = self.pyproject
|
708
|
+
assert _pyproject is not None, "No pyproject.toml file found"
|
709
|
+
_pyproject["tool"]["poetry"]["packages"] = [
|
710
|
+
{"include": p.relative_to(self.package_path).as_posix()} for p in sorted((self.package_path).rglob("*.pyi"))
|
711
|
+
]
|
712
|
+
# write out the pyproject.toml file
|
713
|
+
self.pyproject = _pyproject
|
714
|
+
return len(_pyproject["tool"]["poetry"]["packages"])
|
715
|
+
|
716
|
+
|
717
|
+
class StubPackage(PoetryBuilder):
|
718
|
+
"""
|
719
|
+
Create a stub-only package for a specific version , port and board of micropython
|
720
|
+
|
721
|
+
properties:
|
722
|
+
- toml_path - the path to the `pyproject.toml` file
|
723
|
+
- package_path - the path to the folder where the package info will be stored ('./publish').
|
724
|
+
- pkg_version - the version of the package as used on PyPi (semver). Is stored directly in the `pyproject.toml` file
|
725
|
+
- pyproject - the contents of the `pyproject.toml` file
|
726
|
+
|
727
|
+
methods:
|
728
|
+
- from_json - load the package from json
|
729
|
+
- to_json - return the package as json
|
730
|
+
|
731
|
+
- create_update_pyproject_toml - create or update the `pyproject.toml` file
|
732
|
+
- create_readme - create the readme file
|
733
|
+
- create_license - create the license file
|
734
|
+
- copy_stubs - copy the stubs to the package folder
|
735
|
+
- update_included_stubs - update the included stubs in the `pyproject.toml` file
|
736
|
+
- create_hash - create a hash of the package files
|
737
|
+
|
738
|
+
- update_package_files - combines clean, copy, and create reeadme & updates
|
739
|
+
"""
|
740
|
+
|
741
|
+
def __init__(
|
742
|
+
self,
|
743
|
+
package_name: str,
|
744
|
+
port: str,
|
745
|
+
*,
|
746
|
+
board: str = GENERIC_U,
|
747
|
+
version: str = "0.0.1",
|
748
|
+
description: str = "MicroPython stubs",
|
749
|
+
stubs: Optional[StubSources] = None,
|
750
|
+
json_data: Optional[Dict[str, Any]] = None,
|
751
|
+
):
|
752
|
+
"""
|
753
|
+
Create a stub-only package for a specific version of micropython
|
754
|
+
parameters:
|
755
|
+
|
756
|
+
- package_name - the name of the package as used on PyPi
|
757
|
+
- version - the version of the package as used on PyPi (semver)
|
758
|
+
- description
|
759
|
+
- stubs - a list of tuples (name, path) of the stubs to copy
|
760
|
+
- json_data - Optional: a json databse record that will be used to create the package from.
|
761
|
+
When `json_data` is provided, the version, description and stubs parameters are ignored
|
762
|
+
|
763
|
+
paths:
|
764
|
+
ROOT_PATH - the root path of the project ('./')
|
765
|
+
PUBLISH_PATH - root-relative path to the folder where the package info will be stored ('./publish').
|
766
|
+
TEMPLATE_PATH - root-relative path to the folder where the template files are stored ('./publish/template').
|
767
|
+
STUB_PATH - root-relative path to the folder where the stubs are stored ('./stubs').
|
768
|
+
|
769
|
+
"""
|
770
|
+
self.port = port
|
771
|
+
self.board = board
|
772
|
+
if json_data is not None:
|
773
|
+
self.from_dict(json_data)
|
774
|
+
else:
|
775
|
+
# store essentials
|
776
|
+
self.package_name = package_name
|
777
|
+
self.description = description
|
778
|
+
self.mpy_version = clean_version(version, drop_v=True) # Initial version
|
779
|
+
|
780
|
+
self.create_update_pyproject_toml()
|
781
|
+
|
782
|
+
self.stub_sources: StubSources = []
|
783
|
+
# save the stub sources
|
784
|
+
if stubs:
|
785
|
+
self.stub_sources = stubs
|
786
|
+
|
787
|
+
self.status: Status = Status(
|
788
|
+
{
|
789
|
+
"result": "-",
|
790
|
+
"name": self.package_name,
|
791
|
+
"version": self.pkg_version,
|
792
|
+
"error": None,
|
793
|
+
"path": self.package_path.as_posix(),
|
794
|
+
}
|
795
|
+
)
|
796
|
+
super().__init__(
|
797
|
+
package_name=package_name,
|
798
|
+
mpy_version=self.mpy_version,
|
799
|
+
port=port,
|
800
|
+
board=board,
|
801
|
+
description=description,
|
802
|
+
stubs=self.stub_sources,
|
803
|
+
)
|
804
|
+
|
805
|
+
def update_sources(self) -> StubSources:
|
806
|
+
"""
|
807
|
+
Update the stub sources to:
|
808
|
+
- FIRMWARE: prefer -merged stubs over bare MCU stubs
|
809
|
+
- FROZEN: fallback to use the GENERIC folder for the frozen sources if no board specific folder exists
|
810
|
+
"""
|
811
|
+
updated_sources = []
|
812
|
+
# TODO: find a way to simplify this code as this is a bit magic (and hard to understand)
|
813
|
+
for stub_type, fw_path in self.stub_sources:
|
814
|
+
# prefer -merged stubs over bare MCU stubs
|
815
|
+
if stub_type == StubSource.FIRMWARE:
|
816
|
+
# Check if -merged folder exists and use that instead
|
817
|
+
if fw_path.name.endswith("-merged"):
|
818
|
+
merged_path = fw_path
|
819
|
+
else:
|
820
|
+
merged_path = fw_path.with_name(f"{fw_path.name}-merged")
|
821
|
+
if (CONFIG.stub_path / merged_path).exists():
|
822
|
+
updated_sources.append((stub_type, merged_path))
|
823
|
+
else:
|
824
|
+
updated_sources.append((stub_type, fw_path))
|
825
|
+
elif stub_type == StubSource.FROZEN:
|
826
|
+
# use if folder exists , else use GENERIC folder
|
827
|
+
if (CONFIG.stub_path / fw_path).exists():
|
828
|
+
updated_sources.append((stub_type, fw_path))
|
829
|
+
elif fw_path.with_name("GENERIC").exists():
|
830
|
+
updated_sources.append((stub_type, fw_path.with_name("GENERIC")))
|
831
|
+
elif stub_type == StubSource.MERGED:
|
832
|
+
# Use the default board folder instead of the GENERIC board folder (if it exists)
|
833
|
+
if self.board.upper() == GENERIC_U:
|
834
|
+
family = fw_path.name.split("-")[0]
|
835
|
+
default_path = Path(
|
836
|
+
f"{family}-{clean_version(self.mpy_version, flat=True)}-{self.port}-{default_board(self.port, self.mpy_version)}-merged"
|
837
|
+
)
|
838
|
+
if (CONFIG.stub_path / default_path).exists():
|
839
|
+
fw_path = default_path
|
840
|
+
updated_sources.append((stub_type, fw_path))
|
841
|
+
# ---------
|
842
|
+
else:
|
843
|
+
updated_sources.append((stub_type, fw_path))
|
844
|
+
return updated_sources
|
845
|
+
|
846
|
+
def update_distribution(self, production: bool) -> bool:
|
847
|
+
"""Update the package .pyi files, if all the sources are available"""
|
848
|
+
log.info(f"- Update {self.package_path.name}")
|
849
|
+
log.trace(f"{self.package_path.as_posix()}")
|
850
|
+
|
851
|
+
# check if the sources exist
|
852
|
+
ok = self.are_package_sources_available()
|
853
|
+
if not ok:
|
854
|
+
log.debug(f"{self.package_name}: skipping as one or more source stub folders are missing")
|
855
|
+
self.status["error"] = "Skipped, stub folder(s) missing"
|
856
|
+
shutil.rmtree(self.package_path.as_posix())
|
857
|
+
self._publish = False # type: ignore
|
858
|
+
return False
|
859
|
+
try:
|
860
|
+
# update to -merged and fallback to GENERIC
|
861
|
+
self.stub_sources = self.update_sources()
|
862
|
+
self.update_package_files()
|
863
|
+
self.update_pyproject_stubs()
|
864
|
+
# for a new package the version could be 'latest', which is not a valid semver, so update
|
865
|
+
self.pkg_version = self.next_package_version(production)
|
866
|
+
return self.check()
|
867
|
+
except Exception as e: # pragma: no cover
|
868
|
+
log.error(f"{self.package_name}: {e}")
|
869
|
+
self.status["error"] = str(e)
|
870
|
+
return False
|
871
|
+
|
872
|
+
def build_distribution(
|
873
|
+
self,
|
874
|
+
production: bool, # PyPI or Test-PyPi - USED TO FIND THE NEXT VERSION NUMBER
|
875
|
+
force=False, # BUILD even if no changes
|
876
|
+
) -> bool: # sourcery skip: default-mutable-arg, extract-duplicate-method, require-parameter-annotation
|
877
|
+
"""
|
878
|
+
Build a package
|
879
|
+
look up the previous package version in the dabase
|
880
|
+
- update package files
|
881
|
+
- build the wheels and sdist
|
882
|
+
|
883
|
+
:param production: PyPI or Test-PyPi -
|
884
|
+
:param force: BUILD even if no changes
|
885
|
+
:return: True if the package was built
|
886
|
+
"""
|
887
|
+
log.info(f"Build: {self.package_path.name}")
|
888
|
+
|
889
|
+
ok = self.update_distribution(production)
|
890
|
+
self.status["version"] = self.pkg_version
|
891
|
+
if not ok:
|
892
|
+
log.info(f"{self.package_name}: skip - Could not build/update package")
|
893
|
+
if not self.status["error"]:
|
894
|
+
self.status["error"] = "Could not build/update package"
|
895
|
+
return False
|
896
|
+
|
897
|
+
# If there are changes to the package, then publish it
|
898
|
+
if self.is_changed() or force:
|
899
|
+
if force:
|
900
|
+
log.info(f"Force build: {self.package_name} {self.pkg_version} ")
|
901
|
+
else:
|
902
|
+
log.info(f"Found changes to package sources: {self.package_name} {self.pkg_version} ")
|
903
|
+
log.trace(f"Old hash {self.hash} != New hash {self.calculate_hash()}")
|
904
|
+
# Build the distribution files
|
905
|
+
old_ver = self.pkg_version
|
906
|
+
self.pkg_version = self.next_package_version(production)
|
907
|
+
self.status["version"] = self.pkg_version
|
908
|
+
# to get the next version
|
909
|
+
log.debug(
|
910
|
+
f"{self.package_name}: bump version for {old_ver} to {self.pkg_version } {'production' if production else 'test'}"
|
911
|
+
)
|
912
|
+
self.write_package_json()
|
913
|
+
log.trace(f"New hash: {self.package_name} {self.pkg_version} {self.hash}")
|
914
|
+
if self.poetry_build():
|
915
|
+
self.status["result"] = "Build OK"
|
916
|
+
else:
|
917
|
+
log.warning(f"{self.package_name}: skipping as build failed")
|
918
|
+
self.status["error"] = "Poetry build failed"
|
919
|
+
return False
|
920
|
+
return True
|
921
|
+
|
922
|
+
def publish_distribution_ifchanged(
|
923
|
+
self,
|
924
|
+
db: PysonDB,
|
925
|
+
*,
|
926
|
+
production: bool, # PyPI or Test-PyPi
|
927
|
+
build=False, #
|
928
|
+
force=False, # publish even if no changes
|
929
|
+
dry_run=False, # do not actually publish
|
930
|
+
clean: bool = False, # clean up afterwards
|
931
|
+
) -> (
|
932
|
+
bool
|
933
|
+
): # sourcery skip: assign-if-exp, default-mutable-arg, extract-method, remove-unnecessary-else, require-parameter-annotation, swap-if-else-branches, swap-if-expression
|
934
|
+
"""
|
935
|
+
Publish a package to PyPi
|
936
|
+
look up the previous package version in the dabase, and only publish if there are changes to the package
|
937
|
+
- change determied by hash across all files
|
938
|
+
|
939
|
+
Build
|
940
|
+
- update package files
|
941
|
+
- build the wheels and sdist
|
942
|
+
Publish
|
943
|
+
- publish to PyPi
|
944
|
+
- update database with new hash
|
945
|
+
"""
|
946
|
+
log.info(f"Publish: {self.package_path.name}")
|
947
|
+
# count .pyi files in the package
|
948
|
+
filecount = len(list(self.package_path.rglob("*.pyi")))
|
949
|
+
if filecount == 0:
|
950
|
+
log.debug(f"{self.package_name}: starting build as no .pyi files found")
|
951
|
+
build = True
|
952
|
+
|
953
|
+
if build or force or self.is_changed():
|
954
|
+
self.build_distribution(production=production, force=force)
|
955
|
+
|
956
|
+
if not self._publish:
|
957
|
+
log.debug(f"{self.package_name}: skip publishing")
|
958
|
+
return False
|
959
|
+
|
960
|
+
self.next_package_version(production=production)
|
961
|
+
# Publish the package to PyPi, Test-PyPi or Github
|
962
|
+
if self.is_changed():
|
963
|
+
if self.mpy_version in SET_PREVIEW and production and not force:
|
964
|
+
log.warning("version: `latest` package will only be available on Github, and not published to PyPi.")
|
965
|
+
self.status["result"] = "Published to GitHub"
|
966
|
+
else:
|
967
|
+
return self.publish_distribution(dry_run, production, db)
|
968
|
+
elif force:
|
969
|
+
return self.publish_distribution(dry_run, production, db)
|
970
|
+
else:
|
971
|
+
log.info(f"No changes to package : {self.package_name} {self.pkg_version}")
|
972
|
+
|
973
|
+
if clean:
|
974
|
+
self.clean()
|
975
|
+
return True
|
976
|
+
|
977
|
+
def publish_distribution(self, dry_run, production, db):
|
978
|
+
"""
|
979
|
+
Publishes the package to PyPi or Test-PyPi.
|
980
|
+
|
981
|
+
Args:
|
982
|
+
dry_run (bool): If True, performs a dry run without actually publishing.
|
983
|
+
production (bool): If True, publishes to PyPi. If False, publishes to Test-PyPi.
|
984
|
+
db: The database object to save the package state.
|
985
|
+
|
986
|
+
Returns:
|
987
|
+
bool: True if the publish was successful, False otherwise.
|
988
|
+
"""
|
989
|
+
self.update_hashes() # resets is_changed to False
|
990
|
+
if not dry_run:
|
991
|
+
pub_ok = self.poetry_publish(production=production)
|
992
|
+
else:
|
993
|
+
log.warning(f"{self.package_name}: Dry run, not publishing to {'' if production else 'Test-'}PyPi")
|
994
|
+
pub_ok = True
|
995
|
+
if not pub_ok:
|
996
|
+
log.warning(f"{self.package_name}: Publish failed for {self.pkg_version}")
|
997
|
+
self.status["error"] = "Publish failed"
|
998
|
+
return False
|
999
|
+
self.status["result"] = "Published to PyPi" if production else "Published to Test-PyPi"
|
1000
|
+
self.update_hashes()
|
1001
|
+
if dry_run:
|
1002
|
+
log.warning(f"{self.package_name}: Dry run, not saving to database")
|
1003
|
+
else:
|
1004
|
+
# get the package state and add it to the database
|
1005
|
+
db.add(self.to_dict())
|
1006
|
+
db.commit()
|
1007
|
+
return True
|
1008
|
+
|
1009
|
+
def are_package_sources_available(self) -> bool:
|
1010
|
+
"""
|
1011
|
+
Check if (all) the packages sources exist.
|
1012
|
+
"""
|
1013
|
+
ok = True
|
1014
|
+
for stub_type, src_path in self.update_sources():
|
1015
|
+
if (CONFIG.stub_path / src_path).exists():
|
1016
|
+
continue
|
1017
|
+
if stub_type == StubSource.FROZEN:
|
1018
|
+
# not a blocking issue if there are no frozen stubs, perhaps this port/board does not have any
|
1019
|
+
continue
|
1020
|
+
# todo: below is a workaround for different types, but where is the source of this difference coming from?
|
1021
|
+
msg = (
|
1022
|
+
f"{self.package_name}: source '{stub_type.value}' not found: {CONFIG.stub_path / src_path}"
|
1023
|
+
if isinstance(stub_type, StubSource) # type: ignore
|
1024
|
+
else f"{self.package_name}: source '{stub_type}' not found: {CONFIG.stub_path / src_path}"
|
1025
|
+
)
|
1026
|
+
self.status["error"] = msg
|
1027
|
+
log.debug(msg)
|
1028
|
+
ok = False
|
1029
|
+
return ok
|