micropython-stubber 1.23.1__py3-none-any.whl → 1.23.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (153) hide show
  1. {micropython_stubber-1.23.1.dist-info → micropython_stubber-1.23.2.dist-info}/LICENSE +30 -30
  2. {micropython_stubber-1.23.1.dist-info → micropython_stubber-1.23.2.dist-info}/METADATA +32 -15
  3. micropython_stubber-1.23.2.dist-info/RECORD +158 -0
  4. micropython_stubber-1.23.2.dist-info/entry_points.txt +5 -0
  5. mpflash/README.md +220 -194
  6. mpflash/libusb_flash.ipynb +203 -203
  7. mpflash/mpflash/add_firmware.py +98 -98
  8. mpflash/mpflash/ask_input.py +236 -236
  9. mpflash/mpflash/basicgit.py +284 -284
  10. mpflash/mpflash/bootloader/__init__.py +2 -2
  11. mpflash/mpflash/bootloader/activate.py +60 -60
  12. mpflash/mpflash/bootloader/detect.py +82 -82
  13. mpflash/mpflash/bootloader/manual.py +101 -101
  14. mpflash/mpflash/bootloader/micropython.py +12 -12
  15. mpflash/mpflash/bootloader/touch1200.py +36 -36
  16. mpflash/mpflash/cli_download.py +129 -129
  17. mpflash/mpflash/cli_flash.py +224 -219
  18. mpflash/mpflash/cli_group.py +111 -111
  19. mpflash/mpflash/cli_list.py +87 -81
  20. mpflash/mpflash/cli_main.py +39 -39
  21. mpflash/mpflash/common.py +210 -165
  22. mpflash/mpflash/config.py +44 -44
  23. mpflash/mpflash/connected.py +96 -78
  24. mpflash/mpflash/download.py +364 -364
  25. mpflash/mpflash/downloaded.py +130 -130
  26. mpflash/mpflash/errors.py +9 -9
  27. mpflash/mpflash/flash/__init__.py +55 -55
  28. mpflash/mpflash/flash/esp.py +59 -59
  29. mpflash/mpflash/flash/stm32.py +19 -19
  30. mpflash/mpflash/flash/stm32_dfu.py +104 -104
  31. mpflash/mpflash/flash/uf2/__init__.py +88 -88
  32. mpflash/mpflash/flash/uf2/boardid.py +15 -15
  33. mpflash/mpflash/flash/uf2/linux.py +136 -130
  34. mpflash/mpflash/flash/uf2/macos.py +42 -42
  35. mpflash/mpflash/flash/uf2/uf2disk.py +12 -12
  36. mpflash/mpflash/flash/uf2/windows.py +43 -43
  37. mpflash/mpflash/flash/worklist.py +170 -170
  38. mpflash/mpflash/list.py +106 -99
  39. mpflash/mpflash/logger.py +41 -41
  40. mpflash/mpflash/mpboard_id/__init__.py +93 -93
  41. mpflash/mpflash/mpboard_id/add_boards.py +251 -251
  42. mpflash/mpflash/mpboard_id/board.py +37 -37
  43. mpflash/mpflash/mpboard_id/board_id.py +86 -86
  44. mpflash/mpflash/mpboard_id/store.py +43 -43
  45. mpflash/mpflash/mpremoteboard/__init__.py +266 -222
  46. mpflash/mpflash/mpremoteboard/mpy_fw_info.py +141 -141
  47. mpflash/mpflash/mpremoteboard/runner.py +140 -140
  48. mpflash/mpflash/vendor/click_aliases.py +91 -91
  49. mpflash/mpflash/vendor/dfu.py +165 -165
  50. mpflash/mpflash/vendor/pydfu.py +605 -605
  51. mpflash/mpflash/vendor/readme.md +2 -2
  52. mpflash/mpflash/versions.py +135 -135
  53. mpflash/poetry.lock +1599 -1599
  54. mpflash/pyproject.toml +65 -65
  55. mpflash/stm32_udev_rules.md +62 -62
  56. stubber/__init__.py +3 -3
  57. stubber/board/board_info.csv +193 -193
  58. stubber/board/boot.py +34 -34
  59. stubber/board/createstubs.py +1004 -986
  60. stubber/board/createstubs_db.py +826 -825
  61. stubber/board/createstubs_db_min.py +332 -331
  62. stubber/board/createstubs_db_mpy.mpy +0 -0
  63. stubber/board/createstubs_lvgl.py +741 -741
  64. stubber/board/createstubs_lvgl_min.py +741 -741
  65. stubber/board/createstubs_mem.py +767 -766
  66. stubber/board/createstubs_mem_min.py +307 -306
  67. stubber/board/createstubs_mem_mpy.mpy +0 -0
  68. stubber/board/createstubs_min.py +295 -294
  69. stubber/board/createstubs_mpy.mpy +0 -0
  70. stubber/board/fw_info.py +141 -141
  71. stubber/board/info.py +183 -183
  72. stubber/board/main.py +19 -19
  73. stubber/board/modulelist.txt +247 -247
  74. stubber/board/pyrightconfig.json +34 -34
  75. stubber/bulk/mcu_stubber.py +437 -454
  76. stubber/codemod/_partials/__init__.py +48 -48
  77. stubber/codemod/_partials/db_main.py +147 -147
  78. stubber/codemod/_partials/lvgl_main.py +77 -77
  79. stubber/codemod/_partials/modules_reader.py +80 -80
  80. stubber/codemod/add_comment.py +53 -53
  81. stubber/codemod/add_method.py +65 -65
  82. stubber/codemod/board.py +317 -317
  83. stubber/codemod/enrich.py +151 -145
  84. stubber/codemod/merge_docstub.py +284 -284
  85. stubber/codemod/modify_list.py +54 -54
  86. stubber/codemod/utils.py +56 -56
  87. stubber/commands/build_cmd.py +94 -94
  88. stubber/commands/cli.py +49 -55
  89. stubber/commands/clone_cmd.py +78 -78
  90. stubber/commands/config_cmd.py +29 -29
  91. stubber/commands/enrich_folder_cmd.py +71 -71
  92. stubber/commands/get_core_cmd.py +71 -71
  93. stubber/commands/get_docstubs_cmd.py +92 -89
  94. stubber/commands/get_frozen_cmd.py +117 -114
  95. stubber/commands/get_mcu_cmd.py +102 -61
  96. stubber/commands/merge_cmd.py +66 -66
  97. stubber/commands/publish_cmd.py +118 -118
  98. stubber/commands/stub_cmd.py +31 -31
  99. stubber/commands/switch_cmd.py +62 -62
  100. stubber/commands/variants_cmd.py +48 -48
  101. stubber/cst_transformer.py +178 -178
  102. stubber/data/board_info.csv +193 -193
  103. stubber/data/board_info.json +1729 -1729
  104. stubber/data/micropython_tags.csv +15 -15
  105. stubber/data/requirements-core-micropython.txt +38 -38
  106. stubber/data/requirements-core-pycopy.txt +39 -39
  107. stubber/downloader.py +37 -36
  108. stubber/freeze/common.py +72 -68
  109. stubber/freeze/freeze_folder.py +69 -69
  110. stubber/freeze/freeze_manifest_2.py +126 -113
  111. stubber/freeze/get_frozen.py +131 -127
  112. stubber/get_cpython.py +112 -101
  113. stubber/get_lobo.py +59 -59
  114. stubber/minify.py +423 -419
  115. stubber/publish/bump.py +86 -86
  116. stubber/publish/candidates.py +275 -256
  117. stubber/publish/database.py +18 -18
  118. stubber/publish/defaults.py +40 -40
  119. stubber/publish/enums.py +24 -24
  120. stubber/publish/helpers.py +29 -29
  121. stubber/publish/merge_docstubs.py +136 -130
  122. stubber/publish/missing_class_methods.py +51 -49
  123. stubber/publish/package.py +150 -146
  124. stubber/publish/pathnames.py +51 -51
  125. stubber/publish/publish.py +120 -120
  126. stubber/publish/pypi.py +42 -38
  127. stubber/publish/stubpackage.py +1055 -1027
  128. stubber/rst/__init__.py +9 -9
  129. stubber/rst/classsort.py +78 -77
  130. stubber/rst/lookup.py +533 -530
  131. stubber/rst/output_dict.py +401 -401
  132. stubber/rst/reader.py +814 -814
  133. stubber/rst/report_return.py +77 -69
  134. stubber/rst/rst_utils.py +541 -540
  135. stubber/stubber.py +38 -38
  136. stubber/stubs_from_docs.py +90 -90
  137. stubber/tools/manifestfile.py +654 -654
  138. stubber/tools/readme.md +6 -6
  139. stubber/update_fallback.py +117 -117
  140. stubber/update_module_list.py +123 -123
  141. stubber/utils/__init__.py +6 -6
  142. stubber/utils/config.py +137 -125
  143. stubber/utils/makeversionhdr.py +54 -54
  144. stubber/utils/manifest.py +90 -90
  145. stubber/utils/post.py +80 -79
  146. stubber/utils/repos.py +156 -150
  147. stubber/utils/stubmaker.py +139 -139
  148. stubber/utils/typed_config_toml.py +80 -77
  149. stubber/variants.py +106 -106
  150. micropython_stubber-1.23.1.dist-info/RECORD +0 -159
  151. micropython_stubber-1.23.1.dist-info/entry_points.txt +0 -3
  152. mpflash/basicgit.py +0 -288
  153. {micropython_stubber-1.23.1.dist-info → micropython_stubber-1.23.2.dist-info}/WHEEL +0 -0
@@ -1,1027 +1,1055 @@
1
- """Create a stub-only package for a specific version of micropython"""
2
-
3
- import hashlib
4
- import json
5
- import shutil
6
- import subprocess
7
- from pathlib import Path
8
- from typing import Any, Dict, List, Optional, Tuple, Union
9
-
10
- import tenacity
11
-
12
- from mpflash.basicgit import get_git_describe
13
- from stubber.publish.helpers import get_module_docstring
14
-
15
- try:
16
- import tomllib # type: ignore
17
- except ModuleNotFoundError:
18
- import tomli as tomllib # type: ignore
19
-
20
- from typing import NewType
21
-
22
- import tomli_w
23
- from loguru import logger as log
24
- from packaging.version import Version, parse
25
- from pysondb import PysonDB
26
-
27
- from mpflash.versions import SET_PREVIEW, V_PREVIEW, clean_version
28
- from stubber.publish.bump import bump_version
29
- from stubber.publish.defaults import GENERIC_U, default_board
30
- from stubber.publish.enums import StubSource
31
- from stubber.publish.pypi import Version, get_pypi_versions
32
- from stubber.utils.config import CONFIG
33
-
34
- Status = NewType("Status", Dict[str, Union[str, None]])
35
- StubSources = List[Tuple[StubSource, Path]]
36
-
37
- # indicates which stubs will be skipped when copying for these stub sources
38
- STUBS_COPY_FILTER = {
39
- StubSource.FROZEN: [
40
- "espnow", # merged stubs + documentation of the espnow module is better than the info in the forzen stubs
41
- ],
42
- StubSource.FIRMWARE: [
43
- "builtins",
44
- "collections", # collections must be in stdlib
45
- ],
46
- StubSource.MERGED: [
47
- "collections", # collections must be in stdlib
48
- ],
49
- }
50
-
51
- # these modules will be replaced by a simple import statement to import from stdlib
52
- STDLIB_UMODULES = ["ucollections"]
53
-
54
-
55
- class VersionedPackage(object):
56
- """
57
- Represents a versioned package.
58
-
59
- Attributes:
60
- package_name (str): The name of the package.
61
- mpy_version (str): The MicroPython version.
62
-
63
- Methods:
64
- __init__(self, package_name: str, mpy_version: str): Initializes a new instance of the VersionedPackage class.
65
- is_preview(self): Checks if the package is a preview version.
66
- pkg_version(self) -> str: Returns the version of the package.
67
- pkg_version(self, version: str) -> None: Sets the version of the package.
68
- get_prerelease_package_version(self, production: bool = False) -> str: Gets the next prerelease version for the package.
69
- get_next_package_version(self, prod: bool = False, rc=False) -> str: Gets the next version for the package.
70
- next_pkg_version(self, production: bool) -> str: Gets the next version for the package.
71
- bump(self, *, rc: int = 0) -> str: Bumps the postrelease version of the package.
72
- """
73
-
74
- def __init__(self, package_name: str, *, mpy_version: str):
75
- super().__init__()
76
- self.package_name: str = package_name
77
- self.mpy_version: str = mpy_version
78
- self._pkg_version: str = mpy_version
79
-
80
- def __str__(self) -> str:
81
- return f"{self.package_name}=={self.mpy_version}"
82
-
83
- def __repr__(self) -> str:
84
- return f"{self.package_name}=={self.mpy_version}"
85
-
86
- def __eq__(self, o: object) -> bool:
87
- return str(self) == str(o)
88
-
89
- def __hash__(self) -> int:
90
- return hash(str(self))
91
-
92
- @property
93
- def pkg_version(self) -> str:
94
- "return the version of the package"
95
- return self._pkg_version
96
-
97
- @pkg_version.setter
98
- def pkg_version(self, version: str) -> None:
99
- "set the version of the package"
100
- self._pkg_version = version
101
-
102
- def next_package_version(self, production: bool) -> str:
103
- # sourcery skip: assign-if-exp
104
- """Get the next version for the package"""
105
- if self.is_preview():
106
- return self._get_next_preview_package_version(production)
107
- else:
108
- return self._get_next_package_version(production)
109
-
110
- def is_preview(self):
111
- return self.mpy_version in SET_PREVIEW or V_PREVIEW in self.mpy_version
112
-
113
- def _get_next_preview_package_version(self, production: bool = False) -> str:
114
- """
115
- Get the next prerelease version for the package.
116
- this is used for preview versions of micropython (-preview, formerly known as 'latest')
117
- """
118
- rc = 1
119
- if not (describe := get_git_describe(CONFIG.mpy_path.as_posix())):
120
- return "99.99.99post99"
121
- # use versiontag and the number of commits since the last tag
122
- # "v1.19.1-841-g3446"
123
- # 'v1.20.0-dirty'
124
- # 'v1.22.0-preview-19-g8eb7721b4'
125
- parts = describe.split("-", 3)
126
- ver = parts[0]
127
- if len(parts) > 1:
128
- rc = parts[1] if parts[1].isdigit() else parts[2] if len(parts) > 2 and parts[2].isdigit() else 1
129
- rc = int(rc)
130
- base = bump_version(Version(ver), minor_bump=True) if parts[1] != V_PREVIEW else Version(ver)
131
- return str(bump_version(base, rc=rc))
132
- # raise ValueError("cannot determine next version number micropython")
133
-
134
- def _get_next_package_version(self, prod: bool = False, rc=False) -> str:
135
- """Get the next version for the package."""
136
- base = Version(self.pkg_version)
137
- if pypi_versions := get_pypi_versions(self.package_name, production=prod, base=base):
138
- # get the latest version from pypi
139
- self.pkg_version = str(pypi_versions[-1])
140
- else:
141
- # no published package found , so we start at base version then bump 1 post release
142
- self.pkg_version = Version(self.pkg_version).base_version
143
- return self.bump()
144
-
145
- def bump(self, *, rc: int = 0) -> str:
146
- """
147
- bump the postrelease version of the package, and write the change to disk
148
- if rc >= 1, the version is bumped to the specified release candidate
149
- """
150
- try:
151
- current = Version(self.pkg_version)
152
- assert isinstance(current, Version)
153
- # bump the version
154
- self.pkg_version = str(bump_version(post_bump=True, current=current, rc=rc))
155
- except Exception as e: # pragma: no cover
156
- log.error(f"Error: {e}")
157
- return self.pkg_version
158
-
159
-
160
- class Builder(VersionedPackage):
161
- """
162
- Builder class for creating and updating MicroPython stub packages.
163
-
164
- Args:
165
- package_name (str): The name of the package.
166
- mpy_version (str, optional): The version of MicroPython. Defaults to "0.0.1".
167
- port (str): The port for the package.
168
- board (str, optional): The board for the package. Defaults to GENERIC_U.
169
- description (str, optional): The description of the package. Defaults to "MicroPython stubs".
170
- stubs (Optional[StubSources], optional): The stub sources for the package. Defaults to None.
171
-
172
- Attributes:
173
- package_name (str): The name of the package.
174
- mpy_version (str): The version of MicroPython.
175
- port (str): The port for the package.
176
- board (str): The board for the package.
177
- description (str): The description of the package.
178
- stub_sources (Optional[StubSources]): The stub sources for the package.
179
- hash (None): The hash of all the files in the package.
180
- stub_hash (None): The hash of the stub files.
181
-
182
- Properties:
183
- package_path (Path): The package path based on the package name and version, relative to the publish folder.
184
- toml_path (Path): The path to the `pyproject.toml` file.
185
- pyproject (Union[Dict[str, Any], None]): The parsed pyproject.toml or None.
186
-
187
- Methods:
188
- create_update_pyproject_toml(): Create or update/overwrite a `pyproject.toml` file.
189
- check(): Check if the package is valid.
190
- clean(): Remove the stub files from the package folder.
191
- copy_stubs(): Copy files from all listed stub folders to the package folder.
192
- update_package_files(): Update the stub-only package for a specific version of MicroPython.
193
- write_package_json(): Write the package.json file to disk.
194
- to_dict(): Return the package as a dict to store in the jsondb.
195
- from_dict(json_data: Dict): Load the package from a dict (from the jsondb).
196
- calculate_hash(include_md: bool = True): Create a SHA1 hash of all files in the package.
197
- update_hashes(): Update the package hashes.
198
- is_changed(include_md: bool = True): Check if the package has changed.
199
- """
200
-
201
- # BUF_SIZE is totally arbitrary,
202
- BUF_SIZE = 65536 * 16 # lets read stuff in 16 x 64kb chunks!
203
-
204
- def __init__(
205
- self,
206
- package_name: str,
207
- *,
208
- mpy_version: str = "0.0.1",
209
- port: str,
210
- board: str = GENERIC_U,
211
- description: str = "MicroPython stubs",
212
- stubs: Optional[StubSources] = None,
213
- # json_data: Optional[Dict[str, Any]] = None,
214
- ): # port: str, board: str
215
- super().__init__(package_name=package_name, mpy_version=mpy_version)
216
- self._publish = True # intended for publishing
217
- self.package_name = package_name
218
- self.mpy_version = mpy_version
219
- self.port = port
220
- self.board = board
221
- self.description = description
222
- self.stub_sources = stubs or []
223
- self.hash = None # intial hash
224
- """Hash of all the files in the package"""
225
- self.stub_hash = None # intial hash
226
- """Hash of all .pyi files"""
227
-
228
- @property
229
- def package_path(self) -> Path:
230
- "package path based on the package name and version and relative to the publish folder"
231
- parts = self.package_name.split("-")
232
- parts[1:1] = [clean_version(self.mpy_version, flat=True)]
233
- return CONFIG.publish_path / "-".join(parts)
234
-
235
- @property
236
- def toml_path(self) -> Path:
237
- "the path to the `pyproject.toml` file"
238
- # todo: make sure this is always relative to the root path
239
- return self.package_path / "pyproject.toml"
240
-
241
- # -----------------------------------------------
242
- @property
243
- def pyproject(self) -> Union[Dict[str, Any], None]:
244
- "parsed pyproject.toml or None"
245
- pyproject = None
246
- _toml = self.toml_path
247
- if (_toml).exists():
248
- with open(_toml, "rb") as f:
249
- pyproject = tomllib.load(f)
250
- return pyproject
251
-
252
- @pyproject.setter
253
- def pyproject(self, pyproject: Dict) -> None:
254
- # check if the result is a valid toml file
255
- try:
256
- tomllib.loads(tomli_w.dumps(pyproject))
257
- except tomllib.TOMLDecodeError as e:
258
- print("Could not create a valid TOML file")
259
- raise (e)
260
- # make sure parent folder exists
261
- _toml = self.toml_path
262
- (_toml).parent.mkdir(parents=True, exist_ok=True)
263
- with open(_toml, "wb") as output:
264
- tomli_w.dump(pyproject, output)
265
-
266
- # -----------------------------------------------
267
- def create_update_pyproject_toml(self) -> None:
268
- """
269
- create or update/overwrite a `pyproject.toml` file by combining a template file
270
- with the given parameters.
271
- """
272
- raise NotImplementedError("create_update_pyproject_toml not implemented")
273
-
274
- # -----------------------------------------------
275
-
276
- def check(self) -> bool:
277
- """Check if the package is valid, to be implemented by the subclass"""
278
- return True
279
-
280
- def clean(self) -> None:
281
- """
282
- Remove the stub files from the package folder
283
-
284
- This is used before update the stub package, to avoid lingering stub files,
285
- and after the package has been built, to avoid needing to store files multiple times.
286
-
287
- `.gitignore` cannot be used as this will prevent poetry from processing the files.
288
- """
289
- # remove all *.py and *.pyi files in the folder
290
- for wc in ["*.py", "*.pyi", "modules.json"]:
291
- for f in (self.package_path).rglob(wc):
292
- f.unlink()
293
-
294
- def copy_stubs(self) -> None:
295
- """
296
- Copy files from all listed stub folders to the package folder
297
- the order of the stub folders is relevant as "last copy wins"
298
-
299
- - 1 - Copy all MCU stubs/merged to the package folder
300
- - 2 - copy the remaining stubs to the package folder
301
- - 3 - remove *.py files from the package folder
302
- """
303
- try:
304
- # Check if all stub source folders exist
305
- for stub_type, src_path in self.stub_sources:
306
- if not (CONFIG.stub_path / src_path).exists():
307
- raise FileNotFoundError(f"Could not find stub source folder {CONFIG.stub_path / src_path}")
308
-
309
- # 1 - Copy the stubs to the package, directly in the package folder (no folders)
310
- # for stub_type, fw_path in [s for s in self.stub_sources]:
311
- for n in range(len(self.stub_sources)):
312
- stub_type, src_path = self.stub_sources[n]
313
- try:
314
- log.debug(f"Copying {stub_type} from {src_path}")
315
- self.copy_folder(stub_type, src_path)
316
- except OSError as e:
317
- if stub_type != StubSource.FROZEN:
318
- raise FileNotFoundError(f"Could not find stub source folder {src_path}") from e
319
- else:
320
- log.debug(f"Error copying stubs from : {CONFIG.stub_path / src_path}, {e}")
321
- finally:
322
- # 3 - clean up a little bit
323
- # delete all the .py files in the package folder if there is a corresponding .pyi file
324
- for f in self.package_path.rglob("*.py"):
325
- if f.with_suffix(".pyi").exists():
326
- f.unlink()
327
- self.update_umodules()
328
-
329
- def update_umodules(self):
330
- """
331
- Replace the STDLIB umodules with a simple import statement
332
- in order to allow the typecheckers to resove the stdlib modules in the usual stdlib location.
333
- """
334
- for f in self.package_path.rglob("*.pyi"):
335
- if f.stem in STDLIB_UMODULES:
336
- # read the docstring of the module
337
- docstring = get_module_docstring(f) or ""
338
- comment = "# import module from stdlib/module"
339
- # replace the file with a simple import statement
340
- f.write_text(f'"""\n{docstring}\n"""\n{comment}\nfrom {f.stem[1:]} import *')
341
-
342
- def copy_folder(self, stub_type: StubSource, src_path: Path):
343
- Path(self.package_path).mkdir(parents=True, exist_ok=True)
344
- for item in (CONFIG.stub_path / src_path).rglob("*"):
345
- if item.is_file():
346
- # filter the 'poorly' decorated files
347
- if stub_type in STUBS_COPY_FILTER and item.stem in STUBS_COPY_FILTER[stub_type]:
348
- continue
349
-
350
- target = Path(self.package_path) / item.relative_to(CONFIG.stub_path / src_path)
351
- target.parent.mkdir(parents=True, exist_ok=True)
352
- target.write_bytes(item.read_bytes())
353
-
354
- def update_package_files(self) -> None:
355
- """
356
- Update the stub-only package for a specific version of micropython
357
- - cleans the package folder
358
- - copies the stubs from the list of stubs.
359
- - creates/updates the readme and the license file
360
- """
361
- # create the package folder
362
- self.package_path.mkdir(parents=True, exist_ok=True)
363
- self.clean() # Delete any previous *.py? files
364
- self.copy_stubs()
365
- self.create_readme()
366
- self.create_license()
367
-
368
- def write_package_json(self) -> None:
369
- """write the package.json file to disk"""
370
- # make sure folder exists
371
- if not self.package_path.exists():
372
- self.package_path.mkdir(parents=True, exist_ok=True)
373
- # write the json to a file
374
- with open(self.package_path / "package.json", "w") as f:
375
- json.dump(self.to_dict(), f, indent=4)
376
-
377
- def to_dict(self) -> dict:
378
- """return the package as a dict to store in the jsondb
379
-
380
- need to simplify some of the Objects to allow serialization to json
381
- - the paths to posix paths
382
- - the version (semver) to a string
383
- - toml file to list of lines
384
-
385
- """
386
- return {
387
- "name": self.package_name,
388
- "mpy_version": self.mpy_version,
389
- "publish": self._publish,
390
- "pkg_version": str(self.pkg_version),
391
- "path": self.package_path.name, # only store the folder name , as it is relative to the publish folder
392
- "stub_sources": [(name, Path(path).as_posix()) for (name, path) in self.stub_sources],
393
- "description": self.description,
394
- "hash": self.hash,
395
- "stub_hash": self.stub_hash,
396
- }
397
-
398
- def from_dict(self, json_data: Dict) -> None:
399
- """load the package from a dict (from the jsondb)"""
400
- self.package_name = json_data["name"]
401
- # self.package_path = Path(json_data["path"])
402
- self.description = json_data["description"]
403
- self.mpy_version = json_data["mpy_version"]
404
- self._publish = json_data["publish"]
405
- self.hash = json_data["hash"]
406
- self.stub_hash = json_data["stub_hash"]
407
- # create folder
408
- if not self.package_path.exists():
409
- self.package_path.mkdir(parents=True, exist_ok=True)
410
- # create the pyproject.toml file
411
- self.create_update_pyproject_toml()
412
- # set pkg version after creating the toml file
413
- self.pkg_version = json_data["pkg_version"]
414
- self.stub_sources = []
415
- for name, path in json_data["stub_sources"]:
416
- if path.startswith("stubs/"):
417
- path = path.replace("stubs/", "")
418
- self.stub_sources.append((name, Path(path)))
419
-
420
- def calculate_hash(self, include_md: bool = True) -> str:
421
- # sourcery skip: reintroduce-else, swap-if-else-branches, use-named-expression
422
- """
423
- Create a SHA1 hash of all files in the package, excluding the pyproject.toml file itself.
424
- the hash is based on the content of the .py/.pyi and .md files in the package.
425
- if include_md is False , the .md files are not hased, allowing the files in the packeges to be compared simply
426
- As a single hash is created across all files, the files are sorted prior to hashing to ensure that the hash is stable.
427
-
428
- Note: A changed hash will not indicate which of the files in the package have been changed.
429
- """
430
- file_hash = hashlib.sha1()
431
- # Stubs Only
432
- files = list((self.package_path).rglob("**/*.pyi"))
433
- if include_md:
434
- files += (
435
- [self.package_path / "LICENSE.md"]
436
- + [self.package_path / "README.md"]
437
- # do not include [self.toml_file]
438
- )
439
- for file in sorted(files):
440
- try:
441
- # retry on file not found
442
- self.add_file_hash(file, file_hash)
443
- except FileNotFoundError:
444
- log.warning(f"File not found {file}")
445
- # ignore file not found errors to allow the hash to be created WHILE GIT / VIRUS SCANNERS HOLD LINGERING FILES
446
- return file_hash.hexdigest()
447
-
448
- @tenacity.retry(wait=tenacity.wait_fixed(0.2), stop=tenacity.stop_after_attempt(3))
449
- def add_file_hash(self, file, file_hash):
450
- """
451
- Adds the hash of a file to the given file hash object.
452
- If an error occurs, the file is retried up to 3 times with a 0.2 second delay
453
-
454
- Args:
455
- file (str): The path to the file.
456
- file_hash (hashlib._Hash): The file hash object to update.
457
-
458
- Returns:
459
- None
460
- """
461
- with open(file, "rb") as f:
462
- while True:
463
- if data := f.read(Builder.BUF_SIZE):
464
- file_hash.update(data)
465
- else:
466
- break
467
-
468
- def update_hashes(self, ret=False) -> None:
469
- """Update the package hashes. Resets is_changed() to False"""
470
- self.hash = self.calculate_hash()
471
- self.stub_hash = self.calculate_hash(include_md=False)
472
-
473
- def is_changed(self, include_md: bool = True) -> bool:
474
- """Check if the package has changed, based on the current and the stored hash.
475
- The default checks the hash of all files, including the .md files.
476
- """
477
- current = self.calculate_hash(include_md=include_md)
478
- stored = self.hash if include_md else self.stub_hash
479
- log.trace(f"changed = {self.hash != current} | Stored: {stored} | Current: {current}")
480
- return stored != current
481
-
482
- def create_license(self) -> None:
483
- """
484
- Create a license file for the package
485
- - copied from the template license file
486
- """
487
- # copy the license file from the template to the package folder
488
- # option : append other license files
489
- shutil.copy(CONFIG.template_path / "LICENSE.md", self.package_path)
490
-
491
- def create_readme(self) -> None:
492
- """
493
- Create a readme file for the package
494
- - based on the template readme file
495
- - with a list of all included stub folders added to it (not the individual stub-files)
496
- """
497
- # read the readme file and update the version and description
498
- with open(CONFIG.template_path / "README.md", "r") as f:
499
- TEMPLATE_README = f.read()
500
-
501
- # add a readme with the names of the stub-folders
502
-
503
- # read informations from firmware_stubs.json
504
- firmware_stubs = {}
505
- doc_stubs = {}
506
- core_stubs = {}
507
- try:
508
- with open(self.package_path / "firmware_stubs.json", "r") as f:
509
- firmware_stubs = json.load(f)
510
- with open(self.package_path / "doc_stubs.json", "r") as f:
511
- doc_stubs = json.load(f)
512
- with open(self.package_path / "modules.json", "r") as f:
513
- core_stubs = json.load(f)
514
- except FileNotFoundError:
515
- pass
516
-
517
- # Prettify this by merging with template text
518
- with open(self.package_path / "README.md", "w") as f:
519
- f.write(f"# {self.package_name}\n\n")
520
- f.write(TEMPLATE_README)
521
- f.write(f"Included stubs:\n")
522
- for name, folder in self.stub_sources:
523
- f.write(f"* {name} from `stubs/{Path(folder).as_posix()}`\n")
524
-
525
- f.write(f"\n\n")
526
- f.write(f"origin | Family | Port | Board | Version\n")
527
- f.write(f"-------|--------|------|-------|--------\n")
528
- try:
529
- f.write(
530
- f"Firmware | {firmware_stubs['firmware']['family']} | {firmware_stubs['firmware']['port']} | {firmware_stubs['firmware']['machine']} | {clean_version(firmware_stubs['firmware']['version'])} \n"
531
- )
532
- except Exception:
533
- pass
534
- try:
535
- f.write(
536
- f"Documentation | {doc_stubs['firmware']['family']} | {doc_stubs['firmware']['port']} | - | {clean_version(doc_stubs['firmware']['version'])} \n"
537
- )
538
- except Exception:
539
- pass
540
- try:
541
- f.write(
542
- f"Core | {core_stubs['firmware']['family']} | {core_stubs['firmware']['port']} | - | {clean_version(core_stubs['firmware']['version'])} \n"
543
- )
544
- except Exception:
545
- pass
546
-
547
-
548
- class PoetryBuilder(Builder):
549
- """
550
- Build a package using Poetry
551
- """
552
-
553
- def __init__(
554
- self,
555
- package_name: str,
556
- *,
557
- port: str,
558
- mpy_version: str = "0.0.1",
559
- board: str = GENERIC_U,
560
- description: str = "MicroPython stubs",
561
- stubs: Optional[StubSources] = None,
562
- json_data: Optional[Dict[str, Any]] = None,
563
- ):
564
- super().__init__(
565
- package_name=package_name,
566
- mpy_version=mpy_version,
567
- port=port,
568
- board=board,
569
- description=description,
570
- stubs=stubs,
571
- )
572
-
573
- # -----------------------------------------------
574
- # get and set the version of the package directly from the toml file
575
- @property
576
- def pkg_version(self) -> str:
577
- "return the version of the package"
578
- # read the version from the toml file
579
- _toml = self.toml_path
580
- if not _toml.exists():
581
- return self.mpy_version
582
- with open(_toml, "rb") as f:
583
- pyproject = tomllib.load(f)
584
- ver = pyproject["tool"]["poetry"]["version"]
585
- return str(parse(ver)) if ver not in SET_PREVIEW else ver
586
-
587
- @pkg_version.setter
588
- def pkg_version(self, version: str) -> None:
589
- # sourcery skip: remove-unnecessary-cast
590
- "set the version of the package"
591
- if not isinstance(version, str): # type: ignore
592
- version = str(version)
593
- # read the current file
594
- _toml = self.toml_path
595
- try:
596
- with open(_toml, "rb") as f:
597
- pyproject = tomllib.load(f)
598
- pyproject["tool"]["poetry"]["version"] = version
599
- # update the version in the toml file
600
- with open(_toml, "wb") as output:
601
- tomli_w.dump(pyproject, output)
602
- except FileNotFoundError as e:
603
- raise FileNotFoundError(f"pyproject.toml file not found at {_toml}") from e
604
-
605
- # -----------------------------------------------
606
-
607
- def poetry_build(self) -> bool:
608
- """build the package by running `poetry build`"""
609
- return self.run_poetry(["build", "-vvv"])
610
-
611
- def poetry_publish(self, production: bool = False) -> bool:
612
- if not self._publish:
613
- log.warning(f"Publishing is disabled for {self.package_name}")
614
- return False
615
- # update the package info
616
- self.write_package_json()
617
- if production:
618
- log.debug("Publishing to PRODUCTION https://pypy.org")
619
- params = ["publish"]
620
- else:
621
- log.debug("Publishing to TEST-PyPi https://test.pypy.org")
622
- params = ["publish", "-r", "test-pypi"]
623
- r = self.run_poetry(params)
624
- print("") # add a newline after the output
625
- return r
626
-
627
- def run_poetry(self, parameters: List[str]) -> bool:
628
- """Run a poetry commandline in the package folder.
629
- Note: this may write some output to the console ('All set!')
630
- """
631
- # check for pyproject.toml in folder
632
- if not (self.package_path / "pyproject.toml").exists(): # pragma: no cover
633
- log.error(f"No pyproject.toml file found in {self.package_path}")
634
- return False
635
- # todo: call poetry directly to improve error handling
636
- try:
637
- log.debug(f"poetry {parameters} starting")
638
- subprocess.run(
639
- ["poetry"] + parameters,
640
- cwd=self.package_path,
641
- check=True,
642
- # stdout=subprocess.PIPE,
643
- stdout=subprocess.PIPE, # interestingly: errors on stdout , output on stderr .....
644
- universal_newlines=True,
645
- encoding="utf-8",
646
- )
647
- log.trace(f"poetry {parameters} completed")
648
- except (NotADirectoryError, FileNotFoundError) as e: # pragma: no cover # InvalidVersion
649
- log.error("Exception on process, {}".format(e))
650
- return False
651
- except subprocess.CalledProcessError as e: # pragma: no cover
652
- # Detect and log error detection om upload
653
- # UploadError
654
- # HTTP Error 400: File already exists. See https://test.pypi.org/help/#file-name-reuse for more information.
655
- # TODO: how to return the state so it can be handled
656
- print() # linefeed after output
657
- errors = [l for l in e.stdout.splitlines()[1:7] if "Error" in l]
658
- for e in errors:
659
- log.error(e)
660
-
661
- # log.error("Exception on process, {}".format(e))
662
- return False
663
- return True
664
-
665
- def check(self) -> bool:
666
- """check if the package is valid by running `poetry check`
667
- Note: this will write some output to the console ('All set!')
668
- """
669
- return self.run_poetry(["check", "-vvv"])
670
-
671
- def create_update_pyproject_toml(self) -> None:
672
- """
673
- create or update/overwrite a `pyproject.toml` file by combining a template file
674
- with the given parameters.
675
- and updating it with the pyi files included
676
- """
677
- if (self.toml_path).exists():
678
- # do not overwrite the version of a pre-existing file
679
- _pyproject = self.pyproject
680
- assert _pyproject is not None
681
- # clear out the packages section
682
- _pyproject["tool"]["poetry"]["packages"] = []
683
- # update the dependencies section by reading these from the template file
684
- with open(CONFIG.template_path / "pyproject.toml", "rb") as f:
685
- tpl = tomllib.load(f)
686
- _pyproject["tool"]["poetry"]["dependencies"] = tpl["tool"]["poetry"]["dependencies"]
687
-
688
- else:
689
- # read the template pyproject.toml file from the template folder
690
- try:
691
- with open(CONFIG.template_path / "pyproject.toml", "rb") as f:
692
- _pyproject = tomllib.load(f)
693
- # note: can be 'latest' which is not semver
694
- _pyproject["tool"]["poetry"]["version"] = self.mpy_version
695
- except FileNotFoundError as e:
696
- log.error(f"Could not find template pyproject.toml file {e}")
697
- raise (e)
698
-
699
- # update the name , version and description of the package
700
- _pyproject["tool"]["poetry"]["name"] = self.package_name
701
- _pyproject["tool"]["poetry"]["description"] = self.description
702
- # write out the pyproject.toml file
703
- self.pyproject = _pyproject
704
-
705
- def update_pyproject_stubs(self) -> int:
706
- "Add the stub files to the pyproject.toml file"
707
- _pyproject = self.pyproject
708
- assert _pyproject is not None, "No pyproject.toml file found"
709
- _pyproject["tool"]["poetry"]["packages"] = [
710
- {"include": p.relative_to(self.package_path).as_posix()} for p in sorted((self.package_path).rglob("*.pyi"))
711
- ]
712
- # write out the pyproject.toml file
713
- self.pyproject = _pyproject
714
- return len(_pyproject["tool"]["poetry"]["packages"])
715
-
716
-
717
- class StubPackage(PoetryBuilder):
718
- """
719
- Create a stub-only package for a specific version , port and board of micropython
720
-
721
- properties:
722
- - toml_path - the path to the `pyproject.toml` file
723
- - package_path - the path to the folder where the package info will be stored ('./publish').
724
- - pkg_version - the version of the package as used on PyPi (semver). Is stored directly in the `pyproject.toml` file
725
- - pyproject - the contents of the `pyproject.toml` file
726
-
727
- methods:
728
- - from_json - load the package from json
729
- - to_json - return the package as json
730
-
731
- - create_update_pyproject_toml - create or update the `pyproject.toml` file
732
- - create_readme - create the readme file
733
- - create_license - create the license file
734
- - copy_stubs - copy the stubs to the package folder
735
- - update_included_stubs - update the included stubs in the `pyproject.toml` file
736
- - create_hash - create a hash of the package files
737
-
738
- - update_package_files - combines clean, copy, and create reeadme & updates
739
- """
740
-
741
- def __init__(
742
- self,
743
- package_name: str,
744
- port: str,
745
- *,
746
- board: str = GENERIC_U,
747
- version: str = "0.0.1",
748
- description: str = "MicroPython stubs",
749
- stubs: Optional[StubSources] = None,
750
- json_data: Optional[Dict[str, Any]] = None,
751
- ):
752
- """
753
- Create a stub-only package for a specific version of micropython
754
- parameters:
755
-
756
- - package_name - the name of the package as used on PyPi
757
- - version - the version of the package as used on PyPi (semver)
758
- - description
759
- - stubs - a list of tuples (name, path) of the stubs to copy
760
- - json_data - Optional: a json databse record that will be used to create the package from.
761
- When `json_data` is provided, the version, description and stubs parameters are ignored
762
-
763
- paths:
764
- ROOT_PATH - the root path of the project ('./')
765
- PUBLISH_PATH - root-relative path to the folder where the package info will be stored ('./publish').
766
- TEMPLATE_PATH - root-relative path to the folder where the template files are stored ('./publish/template').
767
- STUB_PATH - root-relative path to the folder where the stubs are stored ('./stubs').
768
-
769
- """
770
- self.port = port
771
- self.board = board
772
- if json_data is not None:
773
- self.from_dict(json_data)
774
- else:
775
- # store essentials
776
- self.package_name = package_name
777
- self.description = description
778
- self.mpy_version = clean_version(version, drop_v=True) # Initial version
779
-
780
- self.create_update_pyproject_toml()
781
-
782
- self.stub_sources: StubSources = []
783
- # save the stub sources
784
- if stubs:
785
- self.stub_sources = stubs
786
-
787
- self.status: Status = Status(
788
- {
789
- "result": "-",
790
- "name": self.package_name,
791
- "version": self.pkg_version,
792
- "error": None,
793
- "path": self.package_path.as_posix(),
794
- }
795
- )
796
- super().__init__(
797
- package_name=package_name,
798
- mpy_version=self.mpy_version,
799
- port=port,
800
- board=board,
801
- description=description,
802
- stubs=self.stub_sources,
803
- )
804
-
805
- def update_sources(self) -> StubSources:
806
- """
807
- Update the stub sources to:
808
- - FIRMWARE: prefer -merged stubs over bare MCU stubs
809
- - FROZEN: fallback to use the GENERIC folder for the frozen sources if no board specific folder exists
810
- """
811
- updated_sources = []
812
- # TODO: find a way to simplify this code as this is a bit magic (and hard to understand)
813
- for stub_type, fw_path in self.stub_sources:
814
- # prefer -merged stubs over bare MCU stubs
815
- if stub_type == StubSource.FIRMWARE:
816
- # Check if -merged folder exists and use that instead
817
- if fw_path.name.endswith("-merged"):
818
- merged_path = fw_path
819
- else:
820
- merged_path = fw_path.with_name(f"{fw_path.name}-merged")
821
- if (CONFIG.stub_path / merged_path).exists():
822
- updated_sources.append((stub_type, merged_path))
823
- else:
824
- updated_sources.append((stub_type, fw_path))
825
- elif stub_type == StubSource.FROZEN:
826
- # use if folder exists , else use GENERIC folder
827
- if (CONFIG.stub_path / fw_path).exists():
828
- updated_sources.append((stub_type, fw_path))
829
- elif fw_path.with_name("GENERIC").exists():
830
- updated_sources.append((stub_type, fw_path.with_name("GENERIC")))
831
- elif stub_type == StubSource.MERGED:
832
- # Use the default board folder instead of the GENERIC board folder (if it exists)
833
- if self.board.upper() == GENERIC_U:
834
- family = fw_path.name.split("-")[0]
835
- default_path = Path(
836
- f"{family}-{clean_version(self.mpy_version, flat=True)}-{self.port}-{default_board(self.port, self.mpy_version)}-merged"
837
- )
838
- if (CONFIG.stub_path / default_path).exists():
839
- fw_path = default_path
840
- updated_sources.append((stub_type, fw_path))
841
- # ---------
842
- else:
843
- updated_sources.append((stub_type, fw_path))
844
- return updated_sources
845
-
846
- def update_distribution(self, production: bool) -> bool:
847
- """Update the package .pyi files, if all the sources are available"""
848
- log.info(f"- Update {self.package_path.name}")
849
- log.trace(f"{self.package_path.as_posix()}")
850
-
851
- # check if the sources exist
852
- ok = self.are_package_sources_available()
853
- if not ok:
854
- log.debug(f"{self.package_name}: skipping as one or more source stub folders are missing")
855
- self.status["error"] = "Skipped, stub folder(s) missing"
856
- shutil.rmtree(self.package_path.as_posix())
857
- self._publish = False # type: ignore
858
- return False
859
- try:
860
- # update to -merged and fallback to GENERIC
861
- self.stub_sources = self.update_sources()
862
- self.update_package_files()
863
- self.update_pyproject_stubs()
864
- # for a new package the version could be 'latest', which is not a valid semver, so update
865
- self.pkg_version = self.next_package_version(production)
866
- return self.check()
867
- except Exception as e: # pragma: no cover
868
- log.error(f"{self.package_name}: {e}")
869
- self.status["error"] = str(e)
870
- return False
871
-
872
- def build_distribution(
873
- self,
874
- production: bool, # PyPI or Test-PyPi - USED TO FIND THE NEXT VERSION NUMBER
875
- force=False, # BUILD even if no changes
876
- ) -> bool: # sourcery skip: default-mutable-arg, extract-duplicate-method, require-parameter-annotation
877
- """
878
- Build a package
879
- look up the previous package version in the dabase
880
- - update package files
881
- - build the wheels and sdist
882
-
883
- :param production: PyPI or Test-PyPi -
884
- :param force: BUILD even if no changes
885
- :return: True if the package was built
886
- """
887
- log.info(f"Build: {self.package_path.name}")
888
-
889
- ok = self.update_distribution(production)
890
- self.status["version"] = self.pkg_version
891
- if not ok:
892
- log.info(f"{self.package_name}: skip - Could not build/update package")
893
- if not self.status["error"]:
894
- self.status["error"] = "Could not build/update package"
895
- return False
896
-
897
- # If there are changes to the package, then publish it
898
- if self.is_changed() or force:
899
- if force:
900
- log.info(f"Force build: {self.package_name} {self.pkg_version} ")
901
- else:
902
- log.info(f"Found changes to package sources: {self.package_name} {self.pkg_version} ")
903
- log.trace(f"Old hash {self.hash} != New hash {self.calculate_hash()}")
904
- # Build the distribution files
905
- old_ver = self.pkg_version
906
- self.pkg_version = self.next_package_version(production)
907
- self.status["version"] = self.pkg_version
908
- # to get the next version
909
- log.debug(f"{self.package_name}: bump version for {old_ver} to {self.pkg_version } {'production' if production else 'test'}")
910
- self.write_package_json()
911
- log.trace(f"New hash: {self.package_name} {self.pkg_version} {self.hash}")
912
- if self.poetry_build():
913
- self.status["result"] = "Build OK"
914
- else:
915
- log.warning(f"{self.package_name}: skipping as build failed")
916
- self.status["error"] = "Poetry build failed"
917
- return False
918
- return True
919
-
920
- def publish_distribution_ifchanged(
921
- self,
922
- db: PysonDB,
923
- *,
924
- production: bool, # PyPI or Test-PyPi
925
- build=False, #
926
- force=False, # publish even if no changes
927
- dry_run=False, # do not actually publish
928
- clean: bool = False, # clean up afterwards
929
- ) -> (
930
- bool
931
- ): # sourcery skip: assign-if-exp, default-mutable-arg, extract-method, remove-unnecessary-else, require-parameter-annotation, swap-if-else-branches, swap-if-expression
932
- """
933
- Publish a package to PyPi
934
- look up the previous package version in the dabase, and only publish if there are changes to the package
935
- - change determied by hash across all files
936
-
937
- Build
938
- - update package files
939
- - build the wheels and sdist
940
- Publish
941
- - publish to PyPi
942
- - update database with new hash
943
- """
944
- log.info(f"Publish: {self.package_path.name}")
945
- # count .pyi files in the package
946
- filecount = len(list(self.package_path.rglob("*.pyi")))
947
- if filecount == 0:
948
- log.debug(f"{self.package_name}: starting build as no .pyi files found")
949
- build = True
950
-
951
- if build or force or self.is_changed():
952
- self.build_distribution(production=production, force=force)
953
-
954
- if not self._publish:
955
- log.debug(f"{self.package_name}: skip publishing")
956
- return False
957
-
958
- self.next_package_version(production=production)
959
- # Publish the package to PyPi, Test-PyPi or Github
960
- if self.is_changed():
961
- if self.mpy_version in SET_PREVIEW and production and not force:
962
- log.warning("version: `latest` package will only be available on Github, and not published to PyPi.")
963
- self.status["result"] = "Published to GitHub"
964
- else:
965
- return self.publish_distribution(dry_run, production, db)
966
- elif force:
967
- return self.publish_distribution(dry_run, production, db)
968
- else:
969
- log.info(f"No changes to package : {self.package_name} {self.pkg_version}")
970
-
971
- if clean:
972
- self.clean()
973
- return True
974
-
975
- def publish_distribution(self, dry_run, production, db):
976
- """
977
- Publishes the package to PyPi or Test-PyPi.
978
-
979
- Args:
980
- dry_run (bool): If True, performs a dry run without actually publishing.
981
- production (bool): If True, publishes to PyPi. If False, publishes to Test-PyPi.
982
- db: The database object to save the package state.
983
-
984
- Returns:
985
- bool: True if the publish was successful, False otherwise.
986
- """
987
- self.update_hashes() # resets is_changed to False
988
- if not dry_run:
989
- pub_ok = self.poetry_publish(production=production)
990
- else:
991
- log.warning(f"{self.package_name}: Dry run, not publishing to {'' if production else 'Test-'}PyPi")
992
- pub_ok = True
993
- if not pub_ok:
994
- log.warning(f"{self.package_name}: Publish failed for {self.pkg_version}")
995
- self.status["error"] = "Publish failed"
996
- return False
997
- self.status["result"] = "Published to PyPi" if production else "Published to Test-PyPi"
998
- self.update_hashes()
999
- if dry_run:
1000
- log.warning(f"{self.package_name}: Dry run, not saving to database")
1001
- else:
1002
- # get the package state and add it to the database
1003
- db.add(self.to_dict())
1004
- db.commit()
1005
- return True
1006
-
1007
- def are_package_sources_available(self) -> bool:
1008
- """
1009
- Check if (all) the packages sources exist.
1010
- """
1011
- ok = True
1012
- for stub_type, src_path in self.update_sources():
1013
- if (CONFIG.stub_path / src_path).exists():
1014
- continue
1015
- if stub_type == StubSource.FROZEN:
1016
- # not a blocking issue if there are no frozen stubs, perhaps this port/board does not have any
1017
- continue
1018
- # todo: below is a workaround for different types, but where is the source of this difference coming from?
1019
- msg = (
1020
- f"{self.package_name}: source '{stub_type.value}' not found: {CONFIG.stub_path / src_path}"
1021
- if isinstance(stub_type, StubSource) # type: ignore
1022
- else f"{self.package_name}: source '{stub_type}' not found: {CONFIG.stub_path / src_path}"
1023
- )
1024
- self.status["error"] = msg
1025
- log.debug(msg)
1026
- ok = False
1027
- return ok
1
+ """Create a stub-only package for a specific version of micropython"""
2
+
3
+ import hashlib
4
+ import json
5
+ import shutil
6
+ import subprocess
7
+ from pathlib import Path
8
+ import sys
9
+ from typing import Any, Dict, List, Optional, Tuple, Union
10
+
11
+ import tenacity
12
+
13
+ from mpflash.basicgit import get_git_describe
14
+ from stubber.publish.helpers import get_module_docstring
15
+
16
+ if sys.version_info >= (3, 11):
17
+ import tomllib # type: ignore
18
+ else:
19
+ import tomli as tomllib # type: ignore
20
+
21
+ from typing import NewType
22
+
23
+ import tomli_w
24
+ from mpflash.logger import log
25
+ from packaging.version import Version, parse
26
+ from pysondb import PysonDB
27
+
28
+ from mpflash.versions import SET_PREVIEW, V_PREVIEW, clean_version
29
+ from stubber.publish.bump import bump_version
30
+ from stubber.publish.defaults import GENERIC_U, default_board
31
+ from stubber.publish.enums import StubSource
32
+ from stubber.publish.pypi import Version, get_pypi_versions
33
+ from stubber.utils.config import CONFIG
34
+
35
+ Status = NewType("Status", Dict[str, Union[str, None]])
36
+ StubSources = List[Tuple[StubSource, Path]]
37
+
38
+ # indicates which stubs will be skipped when copying for these stub sources
39
+ STUBS_COPY_FILTER = {
40
+ StubSource.FROZEN: [
41
+ "espnow", # merged stubs + documentation of the espnow module is better than the info in the forzen stubs
42
+ ],
43
+ StubSource.FIRMWARE: [
44
+ "builtins",
45
+ "collections", # collections must be in stdlib
46
+ ],
47
+ StubSource.MERGED: [
48
+ "collections", # collections must be in stdlib
49
+ ],
50
+ }
51
+
52
+ # these modules will be replaced by a simple import statement to import from stdlib
53
+ STDLIB_UMODULES = ["ucollections"]
54
+
55
+
56
+ class VersionedPackage(object):
57
+ """
58
+ Represents a versioned package.
59
+
60
+ Attributes:
61
+ package_name (str): The name of the package.
62
+ mpy_version (str): The MicroPython version.
63
+
64
+ Methods:
65
+ __init__(self, package_name: str, mpy_version: str): Initializes a new instance of the VersionedPackage class.
66
+ is_preview(self): Checks if the package is a preview version.
67
+ pkg_version(self) -> str: Returns the version of the package.
68
+ pkg_version(self, version: str) -> None: Sets the version of the package.
69
+ get_prerelease_package_version(self, production: bool = False) -> str: Gets the next prerelease version for the package.
70
+ get_next_package_version(self, prod: bool = False, rc=False) -> str: Gets the next version for the package.
71
+ next_pkg_version(self, production: bool) -> str: Gets the next version for the package.
72
+ bump(self, *, rc: int = 0) -> str: Bumps the postrelease version of the package.
73
+ """
74
+
75
+ def __init__(self, package_name: str, *, mpy_version: str):
76
+ super().__init__()
77
+ self.package_name: str = package_name
78
+ self.mpy_version: str = mpy_version
79
+ self._pkg_version: str = mpy_version
80
+
81
+ def __str__(self) -> str:
82
+ return f"{self.package_name}=={self.mpy_version}"
83
+
84
+ def __repr__(self) -> str:
85
+ return f"{self.package_name}=={self.mpy_version}"
86
+
87
+ def __eq__(self, o: object) -> bool:
88
+ return str(self) == str(o)
89
+
90
+ def __hash__(self) -> int:
91
+ return hash(str(self))
92
+
93
+ @property
94
+ def pkg_version(self) -> str:
95
+ "return the version of the package"
96
+ return self._pkg_version
97
+
98
+ @pkg_version.setter
99
+ def pkg_version(self, version: str) -> None:
100
+ "set the version of the package"
101
+ self._pkg_version = version
102
+
103
+ def next_package_version(self, production: bool) -> str:
104
+ # sourcery skip: assign-if-exp
105
+ """Get the next version for the package"""
106
+ if self.is_preview():
107
+ return self._get_next_preview_package_version(production)
108
+ else:
109
+ return self._get_next_package_version(production)
110
+
111
+ def is_preview(self):
112
+ return self.mpy_version in SET_PREVIEW or V_PREVIEW in self.mpy_version
113
+
114
+ def _get_next_preview_package_version(self, production: bool = False) -> str:
115
+ """
116
+ Get the next prerelease version for the package.
117
+ this is used for preview versions of micropython (-preview, formerly known as 'latest')
118
+ """
119
+ rc = 1
120
+ if not (describe := get_git_describe(CONFIG.mpy_path.as_posix())):
121
+ return "99.99.99post99"
122
+ # use versiontag and the number of commits since the last tag
123
+ # "v1.19.1-841-g3446"
124
+ # 'v1.20.0-dirty'
125
+ # 'v1.22.0-preview-19-g8eb7721b4'
126
+ parts = describe.split("-", 3)
127
+ ver = parts[0]
128
+ if len(parts) > 1:
129
+ rc = (
130
+ parts[1]
131
+ if parts[1].isdigit()
132
+ else parts[2] if len(parts) > 2 and parts[2].isdigit() else 1
133
+ )
134
+ rc = int(rc)
135
+ base = (
136
+ bump_version(Version(ver), minor_bump=True) if parts[1] != V_PREVIEW else Version(ver)
137
+ )
138
+ return str(bump_version(base, rc=rc))
139
+ # raise ValueError("cannot determine next version number micropython")
140
+
141
+ def _get_next_package_version(self, prod: bool = False, rc=False) -> str:
142
+ """Get the next version for the package."""
143
+ base = Version(self.pkg_version)
144
+ if pypi_versions := get_pypi_versions(self.package_name, production=prod, base=base):
145
+ # get the latest version from pypi
146
+ self.pkg_version = str(pypi_versions[-1])
147
+ else:
148
+ # no published package found , so we start at base version then bump 1 post release
149
+ self.pkg_version = Version(self.pkg_version).base_version
150
+ return self.bump()
151
+
152
+ def bump(self, *, rc: int = 0) -> str:
153
+ """
154
+ bump the postrelease version of the package, and write the change to disk
155
+ if rc >= 1, the version is bumped to the specified release candidate
156
+ """
157
+ try:
158
+ current = Version(self.pkg_version)
159
+ assert isinstance(current, Version)
160
+ # bump the version
161
+ self.pkg_version = str(bump_version(post_bump=True, current=current, rc=rc))
162
+ except Exception as e: # pragma: no cover
163
+ log.error(f"Error: {e}")
164
+ return self.pkg_version
165
+
166
+
167
+ class Builder(VersionedPackage):
168
+ """
169
+ Builder class for creating and updating MicroPython stub packages.
170
+
171
+ Args:
172
+ package_name (str): The name of the package.
173
+ mpy_version (str, optional): The version of MicroPython. Defaults to "0.0.1".
174
+ port (str): The port for the package.
175
+ board (str, optional): The board for the package. Defaults to GENERIC_U.
176
+ description (str, optional): The description of the package. Defaults to "MicroPython stubs".
177
+ stubs (Optional[StubSources], optional): The stub sources for the package. Defaults to None.
178
+
179
+ Attributes:
180
+ package_name (str): The name of the package.
181
+ mpy_version (str): The version of MicroPython.
182
+ port (str): The port for the package.
183
+ board (str): The board for the package.
184
+ description (str): The description of the package.
185
+ stub_sources (Optional[StubSources]): The stub sources for the package.
186
+ hash (None): The hash of all the files in the package.
187
+ stub_hash (None): The hash of the stub files.
188
+
189
+ Properties:
190
+ package_path (Path): The package path based on the package name and version, relative to the publish folder.
191
+ toml_path (Path): The path to the `pyproject.toml` file.
192
+ pyproject (Union[Dict[str, Any], None]): The parsed pyproject.toml or None.
193
+
194
+ Methods:
195
+ create_update_pyproject_toml(): Create or update/overwrite a `pyproject.toml` file.
196
+ check(): Check if the package is valid.
197
+ clean(): Remove the stub files from the package folder.
198
+ copy_stubs(): Copy files from all listed stub folders to the package folder.
199
+ update_package_files(): Update the stub-only package for a specific version of MicroPython.
200
+ write_package_json(): Write the package.json file to disk.
201
+ to_dict(): Return the package as a dict to store in the jsondb.
202
+ from_dict(json_data: Dict): Load the package from a dict (from the jsondb).
203
+ calculate_hash(include_md: bool = True): Create a SHA1 hash of all files in the package.
204
+ update_hashes(): Update the package hashes.
205
+ is_changed(include_md: bool = True): Check if the package has changed.
206
+ """
207
+
208
+ # BUF_SIZE is totally arbitrary,
209
+ BUF_SIZE = 65536 * 16 # lets read stuff in 16 x 64kb chunks!
210
+
211
+ def __init__(
212
+ self,
213
+ package_name: str,
214
+ *,
215
+ mpy_version: str = "0.0.1",
216
+ port: str,
217
+ board: str = GENERIC_U,
218
+ description: str = "MicroPython stubs",
219
+ stubs: Optional[StubSources] = None,
220
+ # json_data: Optional[Dict[str, Any]] = None,
221
+ ): # port: str, board: str
222
+ super().__init__(package_name=package_name, mpy_version=mpy_version)
223
+ self._publish = True # intended for publishing
224
+ self.package_name = package_name
225
+ self.mpy_version = mpy_version
226
+ self.port = port
227
+ self.board = board
228
+ self.description = description
229
+ self.stub_sources = stubs or []
230
+ self.hash = None # intial hash
231
+ """Hash of all the files in the package"""
232
+ self.stub_hash = None # intial hash
233
+ """Hash of all .pyi files"""
234
+
235
+ @property
236
+ def package_path(self) -> Path:
237
+ "package path based on the package name and version and relative to the publish folder"
238
+ parts = self.package_name.split("-")
239
+ parts[1:1] = [clean_version(self.mpy_version, flat=True)]
240
+ return CONFIG.publish_path / "-".join(parts)
241
+
242
+ @property
243
+ def toml_path(self) -> Path:
244
+ "the path to the `pyproject.toml` file"
245
+ # todo: make sure this is always relative to the root path
246
+ return self.package_path / "pyproject.toml"
247
+
248
+ # -----------------------------------------------
249
+ @property
250
+ def pyproject(self) -> Union[Dict[str, Any], None]:
251
+ "parsed pyproject.toml or None"
252
+ pyproject = None
253
+ _toml = self.toml_path
254
+ if (_toml).exists():
255
+ log.info(f"Load pyproject from {_toml}")
256
+ try:
257
+ with open(_toml, "rb") as f:
258
+ pyproject = tomllib.load(f)
259
+ except tomllib.TOMLDecodeError as e:
260
+ log.error(f"Could not load pyproject.toml file {e}")
261
+ return pyproject
262
+
263
+ @pyproject.setter
264
+ def pyproject(self, pyproject: Dict) -> None:
265
+ # check if the result is a valid toml file
266
+ try:
267
+ tomllib.loads(tomli_w.dumps(pyproject))
268
+ except tomllib.TOMLDecodeError as e:
269
+ print("Could not create a valid TOML file")
270
+ raise (e)
271
+ # make sure parent folder exists
272
+ _toml = self.toml_path
273
+ (_toml).parent.mkdir(parents=True, exist_ok=True)
274
+ with open(_toml, "wb") as output:
275
+ tomli_w.dump(pyproject, output)
276
+
277
+ # -----------------------------------------------
278
+ def create_update_pyproject_toml(self) -> None:
279
+ """
280
+ create or update/overwrite a `pyproject.toml` file by combining a template file
281
+ with the given parameters.
282
+ """
283
+ raise NotImplementedError("create_update_pyproject_toml not implemented")
284
+
285
+ # -----------------------------------------------
286
+
287
+ def check(self) -> bool:
288
+ """Check if the package is valid, to be implemented by the subclass"""
289
+ return True
290
+
291
+ def clean(self) -> None:
292
+ """
293
+ Remove the stub files from the package folder
294
+
295
+ This is used before update the stub package, to avoid lingering stub files,
296
+ and after the package has been built, to avoid needing to store files multiple times.
297
+
298
+ `.gitignore` cannot be used as this will prevent poetry from processing the files.
299
+ """
300
+ # remove all *.py and *.pyi files in the folder
301
+ for wc in ["*.py", "*.pyi", "modules.json"]:
302
+ for f in (self.package_path).rglob(wc):
303
+ f.unlink()
304
+
305
+ def copy_stubs(self) -> None:
306
+ """
307
+ Copy files from all listed stub folders to the package folder
308
+ the order of the stub folders is relevant as "last copy wins"
309
+
310
+ - 1 - Copy all MCU stubs/merged to the package folder
311
+ - 2 - copy the remaining stubs to the package folder
312
+ - 3 - remove *.py files from the package folder
313
+ """
314
+ try:
315
+ # Check if all stub source folders exist
316
+ for stub_type, src_path in self.stub_sources:
317
+ if not (CONFIG.stub_path / src_path).exists():
318
+ raise FileNotFoundError(
319
+ f"Could not find stub source folder {CONFIG.stub_path / src_path}"
320
+ )
321
+
322
+ # 1 - Copy the stubs to the package, directly in the package folder (no folders)
323
+ # for stub_type, fw_path in [s for s in self.stub_sources]:
324
+ for n in range(len(self.stub_sources)):
325
+ stub_type, src_path = self.stub_sources[n]
326
+ try:
327
+ log.debug(f"Copying {stub_type} from {src_path}")
328
+ self.copy_folder(stub_type, src_path)
329
+ except OSError as e:
330
+ if stub_type != StubSource.FROZEN:
331
+ raise FileNotFoundError(
332
+ f"Could not find stub source folder {src_path}"
333
+ ) from e
334
+ else:
335
+ log.debug(f"Error copying stubs from : {CONFIG.stub_path / src_path}, {e}")
336
+ finally:
337
+ # 3 - clean up a little bit
338
+ # delete all the .py files in the package folder if there is a corresponding .pyi file
339
+ for f in self.package_path.rglob("*.py"):
340
+ if f.with_suffix(".pyi").exists():
341
+ f.unlink()
342
+ self.update_umodules()
343
+
344
+ def update_umodules(self):
345
+ """
346
+ Replace the STDLIB umodules with a simple import statement
347
+ in order to allow the typecheckers to resove the stdlib modules in the usual stdlib location.
348
+ """
349
+ for f in self.package_path.rglob("*.pyi"):
350
+ if f.stem in STDLIB_UMODULES:
351
+ # read the docstring of the module
352
+ docstring = get_module_docstring(f) or ""
353
+ comment = "# import module from stdlib/module"
354
+ # replace the file with a simple import statement
355
+ f.write_text(f'"""\n{docstring}\n"""\n{comment}\nfrom {f.stem[1:]} import *')
356
+
357
+ def copy_folder(self, stub_type: StubSource, src_path: Path):
358
+ Path(self.package_path).mkdir(parents=True, exist_ok=True)
359
+ for item in (CONFIG.stub_path / src_path).rglob("*"):
360
+ if item.is_file():
361
+ # filter the 'poorly' decorated files
362
+ if stub_type in STUBS_COPY_FILTER and item.stem in STUBS_COPY_FILTER[stub_type]:
363
+ continue
364
+
365
+ target = Path(self.package_path) / item.relative_to(CONFIG.stub_path / src_path)
366
+ target.parent.mkdir(parents=True, exist_ok=True)
367
+ target.write_bytes(item.read_bytes())
368
+
369
+ def update_package_files(self) -> None:
370
+ """
371
+ Update the stub-only package for a specific version of micropython
372
+ - cleans the package folder
373
+ - copies the stubs from the list of stubs.
374
+ - creates/updates the readme and the license file
375
+ """
376
+ # create the package folder
377
+ self.package_path.mkdir(parents=True, exist_ok=True)
378
+ self.clean() # Delete any previous *.py? files
379
+ self.copy_stubs()
380
+ self.create_readme()
381
+ self.create_license()
382
+
383
+ def write_package_json(self) -> None:
384
+ """write the package.json file to disk"""
385
+ # make sure folder exists
386
+ if not self.package_path.exists():
387
+ self.package_path.mkdir(parents=True, exist_ok=True)
388
+ # write the json to a file
389
+ with open(self.package_path / "package.json", "w") as f:
390
+ json.dump(self.to_dict(), f, indent=4)
391
+
392
+ def to_dict(self) -> dict:
393
+ """return the package as a dict to store in the jsondb
394
+
395
+ need to simplify some of the Objects to allow serialization to json
396
+ - the paths to posix paths
397
+ - the version (semver) to a string
398
+ - toml file to list of lines
399
+
400
+ """
401
+ return {
402
+ "name": self.package_name,
403
+ "mpy_version": self.mpy_version,
404
+ "publish": self._publish,
405
+ "pkg_version": str(self.pkg_version),
406
+ "path": self.package_path.name, # only store the folder name , as it is relative to the publish folder
407
+ "stub_sources": [(name, Path(path).as_posix()) for (name, path) in self.stub_sources],
408
+ "description": self.description,
409
+ "hash": self.hash,
410
+ "stub_hash": self.stub_hash,
411
+ }
412
+
413
+ def from_dict(self, json_data: Dict) -> None:
414
+ """load the package from a dict (from the jsondb)"""
415
+ self.package_name = json_data["name"]
416
+ # self.package_path = Path(json_data["path"])
417
+ self.description = json_data["description"]
418
+ self.mpy_version = json_data["mpy_version"]
419
+ self._publish = json_data["publish"]
420
+ self.hash = json_data["hash"]
421
+ self.stub_hash = json_data["stub_hash"]
422
+ # create folder
423
+ if not self.package_path.exists():
424
+ self.package_path.mkdir(parents=True, exist_ok=True)
425
+ # create the pyproject.toml file
426
+ self.create_update_pyproject_toml()
427
+ # set pkg version after creating the toml file
428
+ self.pkg_version = json_data["pkg_version"]
429
+ self.stub_sources = []
430
+ for name, path in json_data["stub_sources"]:
431
+ if path.startswith("stubs/"):
432
+ path = path.replace("stubs/", "")
433
+ self.stub_sources.append((name, Path(path)))
434
+
435
+ def calculate_hash(self, include_md: bool = True) -> str:
436
+ # sourcery skip: reintroduce-else, swap-if-else-branches, use-named-expression
437
+ """
438
+ Create a SHA1 hash of all files in the package, excluding the pyproject.toml file itself.
439
+ the hash is based on the content of the .py/.pyi and .md files in the package.
440
+ if include_md is False , the .md files are not hased, allowing the files in the packeges to be compared simply
441
+ As a single hash is created across all files, the files are sorted prior to hashing to ensure that the hash is stable.
442
+
443
+ Note: A changed hash will not indicate which of the files in the package have been changed.
444
+ """
445
+ file_hash = hashlib.sha1()
446
+ # Stubs Only
447
+ files = list((self.package_path).rglob("**/*.pyi"))
448
+ if include_md:
449
+ files += (
450
+ [self.package_path / "LICENSE.md"]
451
+ + [self.package_path / "README.md"]
452
+ # do not include [self.toml_file]
453
+ )
454
+ for file in sorted(files):
455
+ try:
456
+ # retry on file not found
457
+ self.add_file_hash(file, file_hash)
458
+ except FileNotFoundError:
459
+ log.warning(f"File not found {file}")
460
+ # ignore file not found errors to allow the hash to be created WHILE GIT / VIRUS SCANNERS HOLD LINGERING FILES
461
+ return file_hash.hexdigest()
462
+
463
+ @tenacity.retry(wait=tenacity.wait_fixed(0.2), stop=tenacity.stop_after_attempt(3))
464
+ def add_file_hash(self, file, file_hash):
465
+ """
466
+ Adds the hash of a file to the given file hash object.
467
+ If an error occurs, the file is retried up to 3 times with a 0.2 second delay
468
+
469
+ Args:
470
+ file (str): The path to the file.
471
+ file_hash (hashlib._Hash): The file hash object to update.
472
+
473
+ Returns:
474
+ None
475
+ """
476
+ with open(file, "rb") as f:
477
+ while True:
478
+ if data := f.read(Builder.BUF_SIZE):
479
+ file_hash.update(data)
480
+ else:
481
+ break
482
+
483
+ def update_hashes(self, ret=False) -> None:
484
+ """Update the package hashes. Resets is_changed() to False"""
485
+ self.hash = self.calculate_hash()
486
+ self.stub_hash = self.calculate_hash(include_md=False)
487
+
488
+ def is_changed(self, include_md: bool = True) -> bool:
489
+ """Check if the package has changed, based on the current and the stored hash.
490
+ The default checks the hash of all files, including the .md files.
491
+ """
492
+ current = self.calculate_hash(include_md=include_md)
493
+ stored = self.hash if include_md else self.stub_hash
494
+ log.trace(f"changed = {self.hash != current} | Stored: {stored} | Current: {current}")
495
+ return stored != current
496
+
497
+ def create_license(self) -> None:
498
+ """
499
+ Create a license file for the package
500
+ - copied from the template license file
501
+ """
502
+ # copy the license file from the template to the package folder
503
+ # option : append other license files
504
+ shutil.copy(CONFIG.template_path / "LICENSE.md", self.package_path)
505
+
506
+ def create_readme(self) -> None:
507
+ """
508
+ Create a readme file for the package
509
+ - based on the template readme file
510
+ - with a list of all included stub folders added to it (not the individual stub-files)
511
+ """
512
+ # read the readme file and update the version and description
513
+ with open(CONFIG.template_path / "README.md", "r") as f:
514
+ TEMPLATE_README = f.read()
515
+
516
+ # add a readme with the names of the stub-folders
517
+
518
+ # read informations from firmware_stubs.json
519
+ firmware_stubs = {}
520
+ doc_stubs = {}
521
+ core_stubs = {}
522
+ try:
523
+ with open(self.package_path / "firmware_stubs.json", "r") as f:
524
+ firmware_stubs = json.load(f)
525
+ with open(self.package_path / "doc_stubs.json", "r") as f:
526
+ doc_stubs = json.load(f)
527
+ with open(self.package_path / "modules.json", "r") as f:
528
+ core_stubs = json.load(f)
529
+ except FileNotFoundError:
530
+ pass
531
+
532
+ # Prettify this by merging with template text
533
+ with open(self.package_path / "README.md", "w") as f:
534
+ f.write(f"# {self.package_name}\n\n")
535
+ f.write(TEMPLATE_README)
536
+ f.write(f"Included stubs:\n")
537
+ for name, folder in self.stub_sources:
538
+ f.write(f"* {name} from `stubs/{Path(folder).as_posix()}`\n")
539
+
540
+ f.write(f"\n\n")
541
+ f.write(f"origin | Family | Port | Board | Version\n")
542
+ f.write(f"-------|--------|------|-------|--------\n")
543
+ try:
544
+ f.write(
545
+ f"Firmware | {firmware_stubs['firmware']['family']} | {firmware_stubs['firmware']['port']} | {firmware_stubs['firmware']['machine']} | {clean_version(firmware_stubs['firmware']['version'])} \n"
546
+ )
547
+ except Exception:
548
+ pass
549
+ try:
550
+ f.write(
551
+ f"Documentation | {doc_stubs['firmware']['family']} | {doc_stubs['firmware']['port']} | - | {clean_version(doc_stubs['firmware']['version'])} \n"
552
+ )
553
+ except Exception:
554
+ pass
555
+ try:
556
+ f.write(
557
+ f"Core | {core_stubs['firmware']['family']} | {core_stubs['firmware']['port']} | - | {clean_version(core_stubs['firmware']['version'])} \n"
558
+ )
559
+ except Exception:
560
+ pass
561
+
562
+
563
+ class PoetryBuilder(Builder):
564
+ """
565
+ Build a package using Poetry
566
+ """
567
+
568
+ def __init__(
569
+ self,
570
+ package_name: str,
571
+ *,
572
+ port: str,
573
+ mpy_version: str = "0.0.1",
574
+ board: str = GENERIC_U,
575
+ description: str = "MicroPython stubs",
576
+ stubs: Optional[StubSources] = None,
577
+ json_data: Optional[Dict[str, Any]] = None,
578
+ ):
579
+ super().__init__(
580
+ package_name=package_name,
581
+ mpy_version=mpy_version,
582
+ port=port,
583
+ board=board,
584
+ description=description,
585
+ stubs=stubs,
586
+ )
587
+
588
+ # -----------------------------------------------
589
+ # get and set the version of the package directly from the toml file
590
+ @property
591
+ def pkg_version(self) -> str:
592
+ "return the version of the package"
593
+ # read the version from the toml file
594
+ _toml = self.toml_path
595
+ if not _toml.exists():
596
+ return self.mpy_version
597
+ with open(_toml, "rb") as f:
598
+ pyproject = tomllib.load(f)
599
+ ver = pyproject["tool"]["poetry"]["version"]
600
+ return str(parse(ver)) if ver not in SET_PREVIEW else ver
601
+
602
+ @pkg_version.setter
603
+ def pkg_version(self, version: str) -> None:
604
+ # sourcery skip: remove-unnecessary-cast
605
+ "set the version of the package"
606
+ if not isinstance(version, str): # type: ignore
607
+ version = str(version)
608
+ # read the current file
609
+ _toml = self.toml_path
610
+ try:
611
+ with open(_toml, "rb") as f:
612
+ pyproject = tomllib.load(f)
613
+ pyproject["tool"]["poetry"]["version"] = version
614
+ # update the version in the toml file
615
+ with open(_toml, "wb") as output:
616
+ tomli_w.dump(pyproject, output)
617
+ except FileNotFoundError as e:
618
+ raise FileNotFoundError(f"pyproject.toml file not found at {_toml}") from e
619
+
620
+ # -----------------------------------------------
621
+
622
+ def poetry_build(self) -> bool:
623
+ """build the package by running `poetry build`"""
624
+ return self.run_poetry(["build", "-vvv"])
625
+
626
+ def poetry_publish(self, production: bool = False) -> bool:
627
+ if not self._publish:
628
+ log.warning(f"Publishing is disabled for {self.package_name}")
629
+ return False
630
+ # update the package info
631
+ self.write_package_json()
632
+ if production:
633
+ log.debug("Publishing to PRODUCTION https://pypy.org")
634
+ params = ["publish"]
635
+ else:
636
+ log.debug("Publishing to TEST-PyPi https://test.pypy.org")
637
+ params = ["publish", "-r", "test-pypi"]
638
+ r = self.run_poetry(params)
639
+ print("") # add a newline after the output
640
+ return r
641
+
642
+ def run_poetry(self, parameters: List[str]) -> bool:
643
+ """Run a poetry commandline in the package folder.
644
+ Note: this may write some output to the console ('All set!')
645
+ """
646
+ # check for pyproject.toml in folder
647
+ if not (self.package_path / "pyproject.toml").exists(): # pragma: no cover
648
+ log.error(f"No pyproject.toml file found in {self.package_path}")
649
+ return False
650
+ # todo: call poetry directly to improve error handling
651
+ try:
652
+ log.debug(f"poetry {parameters} starting")
653
+ subprocess.run(
654
+ ["poetry"] + parameters,
655
+ cwd=self.package_path,
656
+ check=True,
657
+ # stdout=subprocess.PIPE,
658
+ stdout=subprocess.PIPE, # interestingly: errors on stdout , output on stderr .....
659
+ universal_newlines=True,
660
+ encoding="utf-8",
661
+ )
662
+ log.trace(f"poetry {parameters} completed")
663
+ except (NotADirectoryError, FileNotFoundError) as e: # pragma: no cover # InvalidVersion
664
+ log.error("Exception on process, {}".format(e))
665
+ return False
666
+ except subprocess.CalledProcessError as e: # pragma: no cover
667
+ # Detect and log error detection om upload
668
+ # UploadError
669
+ # HTTP Error 400: File already exists. See https://test.pypi.org/help/#file-name-reuse for more information.
670
+ # TODO: how to return the state so it can be handled
671
+ print() # linefeed after output
672
+ errors = [l for l in e.stdout.splitlines()[1:7] if "Error" in l]
673
+ for e in errors:
674
+ log.error(e)
675
+
676
+ # log.error("Exception on process, {}".format(e))
677
+ return False
678
+ return True
679
+
680
+ def check(self) -> bool:
681
+ """check if the package is valid by running `poetry check`
682
+ Note: this will write some output to the console ('All set!')
683
+ """
684
+ return self.run_poetry(["check", "-vvv"])
685
+
686
+ def create_update_pyproject_toml(self) -> None:
687
+ """
688
+ create or update/overwrite a `pyproject.toml` file by combining a template file
689
+ with the given parameters.
690
+ and updating it with the pyi files included
691
+ """
692
+ if (self.toml_path).exists():
693
+ # do not overwrite the version of a pre-existing file
694
+ _pyproject = self.pyproject
695
+ assert _pyproject is not None
696
+ # clear out the packages section
697
+ _pyproject["tool"]["poetry"]["packages"] = []
698
+ # update the dependencies section by reading these from the template file
699
+ with open(CONFIG.template_path / "pyproject.toml", "rb") as f:
700
+ tpl = tomllib.load(f)
701
+ _pyproject["tool"]["poetry"]["dependencies"] = tpl["tool"]["poetry"]["dependencies"]
702
+
703
+ else:
704
+ # read the template pyproject.toml file from the template folder
705
+ try:
706
+ with open(CONFIG.template_path / "pyproject.toml", "rb") as f:
707
+ _pyproject = tomllib.load(f)
708
+ # note: can be 'latest' which is not semver
709
+ _pyproject["tool"]["poetry"]["version"] = self.mpy_version
710
+ except FileNotFoundError as e:
711
+ log.error(f"Could not find template pyproject.toml file {e}")
712
+ raise (e)
713
+
714
+ # update the name , version and description of the package
715
+ _pyproject["tool"]["poetry"]["name"] = self.package_name
716
+ _pyproject["tool"]["poetry"]["description"] = self.description
717
+ # write out the pyproject.toml file
718
+ self.pyproject = _pyproject
719
+
720
+ def update_pyproject_stubs(self) -> int:
721
+ "Add the stub files to the pyproject.toml file"
722
+ _pyproject = self.pyproject
723
+ assert _pyproject is not None, "No pyproject.toml file found"
724
+ _pyproject["tool"]["poetry"]["packages"] = [
725
+ {"include": p.relative_to(self.package_path).as_posix()}
726
+ for p in sorted((self.package_path).rglob("*.pyi"))
727
+ ]
728
+ # write out the pyproject.toml file
729
+ self.pyproject = _pyproject
730
+ return len(_pyproject["tool"]["poetry"]["packages"])
731
+
732
+
733
+ class StubPackage(PoetryBuilder):
734
+ """
735
+ Create a stub-only package for a specific version , port and board of micropython
736
+
737
+ properties:
738
+ - toml_path - the path to the `pyproject.toml` file
739
+ - package_path - the path to the folder where the package info will be stored ('./publish').
740
+ - pkg_version - the version of the package as used on PyPi (semver). Is stored directly in the `pyproject.toml` file
741
+ - pyproject - the contents of the `pyproject.toml` file
742
+
743
+ methods:
744
+ - from_json - load the package from json
745
+ - to_json - return the package as json
746
+
747
+ - create_update_pyproject_toml - create or update the `pyproject.toml` file
748
+ - create_readme - create the readme file
749
+ - create_license - create the license file
750
+ - copy_stubs - copy the stubs to the package folder
751
+ - update_included_stubs - update the included stubs in the `pyproject.toml` file
752
+ - create_hash - create a hash of the package files
753
+
754
+ - update_package_files - combines clean, copy, and create reeadme & updates
755
+ """
756
+
757
+ def __init__(
758
+ self,
759
+ package_name: str,
760
+ port: str,
761
+ *,
762
+ board: str = GENERIC_U,
763
+ version: str = "0.0.1",
764
+ description: str = "MicroPython stubs",
765
+ stubs: Optional[StubSources] = None,
766
+ json_data: Optional[Dict[str, Any]] = None,
767
+ ):
768
+ """
769
+ Create a stub-only package for a specific version of micropython
770
+ parameters:
771
+
772
+ - package_name - the name of the package as used on PyPi
773
+ - version - the version of the package as used on PyPi (semver)
774
+ - description
775
+ - stubs - a list of tuples (name, path) of the stubs to copy
776
+ - json_data - Optional: a json databse record that will be used to create the package from.
777
+ When `json_data` is provided, the version, description and stubs parameters are ignored
778
+
779
+ paths:
780
+ ROOT_PATH - the root path of the project ('./')
781
+ PUBLISH_PATH - root-relative path to the folder where the package info will be stored ('./publish').
782
+ TEMPLATE_PATH - root-relative path to the folder where the template files are stored ('./publish/template').
783
+ STUB_PATH - root-relative path to the folder where the stubs are stored ('./stubs').
784
+
785
+ """
786
+ self.port = port
787
+ self.board = board
788
+ if json_data is not None:
789
+ self.from_dict(json_data)
790
+ else:
791
+ # store essentials
792
+ self.package_name = package_name
793
+ self.description = description
794
+ self.mpy_version = clean_version(version, drop_v=True) # Initial version
795
+
796
+ self.create_update_pyproject_toml()
797
+
798
+ self.stub_sources: StubSources = []
799
+ # save the stub sources
800
+ if stubs:
801
+ self.stub_sources = stubs
802
+
803
+ self.status: Status = Status(
804
+ {
805
+ "result": "-",
806
+ "name": self.package_name,
807
+ "version": self.pkg_version,
808
+ "error": None,
809
+ "path": self.package_path.as_posix(),
810
+ }
811
+ )
812
+ super().__init__(
813
+ package_name=package_name,
814
+ mpy_version=self.mpy_version,
815
+ port=port,
816
+ board=board,
817
+ description=description,
818
+ stubs=self.stub_sources,
819
+ )
820
+
821
+ def update_sources(self) -> StubSources:
822
+ """
823
+ Update the stub sources to:
824
+ - FIRMWARE: prefer -merged stubs over bare MCU stubs
825
+ - FROZEN: fallback to use the GENERIC folder for the frozen sources if no board specific folder exists
826
+ """
827
+ updated_sources = []
828
+ # TODO: find a way to simplify this code as this is a bit magic (and hard to understand)
829
+ for stub_type, fw_path in self.stub_sources:
830
+ # prefer -merged stubs over bare MCU stubs
831
+ if stub_type == StubSource.FIRMWARE:
832
+ # Check if -merged folder exists and use that instead
833
+ if fw_path.name.endswith("-merged"):
834
+ merged_path = fw_path
835
+ else:
836
+ merged_path = fw_path.with_name(f"{fw_path.name}-merged")
837
+ if (CONFIG.stub_path / merged_path).exists():
838
+ updated_sources.append((stub_type, merged_path))
839
+ else:
840
+ updated_sources.append((stub_type, fw_path))
841
+ elif stub_type == StubSource.FROZEN:
842
+ # use if folder exists , else use GENERIC folder
843
+ if (CONFIG.stub_path / fw_path).exists():
844
+ updated_sources.append((stub_type, fw_path))
845
+ elif fw_path.with_name("GENERIC").exists():
846
+ updated_sources.append((stub_type, fw_path.with_name("GENERIC")))
847
+ elif stub_type == StubSource.MERGED:
848
+ # Use the default board folder instead of the GENERIC board folder (if it exists)
849
+ if self.board.upper() == GENERIC_U:
850
+ family = fw_path.name.split("-")[0]
851
+ default_path = Path(
852
+ f"{family}-{clean_version(self.mpy_version, flat=True)}-{self.port}-{default_board(self.port, self.mpy_version)}-merged"
853
+ )
854
+ if (CONFIG.stub_path / default_path).exists():
855
+ fw_path = default_path
856
+ updated_sources.append((stub_type, fw_path))
857
+ # ---------
858
+ else:
859
+ updated_sources.append((stub_type, fw_path))
860
+ return updated_sources
861
+
862
+ def update_distribution(self, production: bool) -> bool:
863
+ """Update the package .pyi files, if all the sources are available"""
864
+ log.info(f"- Update {self.package_path.name}")
865
+ log.trace(f"{self.package_path.as_posix()}")
866
+
867
+ # check if the sources exist
868
+ ok = self.are_package_sources_available()
869
+ if not ok:
870
+ log.debug(
871
+ f"{self.package_name}: skipping as one or more source stub folders are missing"
872
+ )
873
+ self.status["error"] = "Skipped, stub folder(s) missing"
874
+ shutil.rmtree(self.package_path.as_posix())
875
+ self._publish = False # type: ignore
876
+ return False
877
+ try:
878
+ # update to -merged and fallback to GENERIC
879
+ self.stub_sources = self.update_sources()
880
+ self.update_package_files()
881
+ self.update_pyproject_stubs()
882
+ # for a new package the version could be 'latest', which is not a valid semver, so update
883
+ self.pkg_version = self.next_package_version(production)
884
+ return self.check()
885
+ except Exception as e: # pragma: no cover
886
+ log.error(f"{self.package_name}: {e}")
887
+ self.status["error"] = str(e)
888
+ return False
889
+
890
+ def build_distribution(
891
+ self,
892
+ production: bool, # PyPI or Test-PyPi - USED TO FIND THE NEXT VERSION NUMBER
893
+ force=False, # BUILD even if no changes
894
+ ) -> (
895
+ bool
896
+ ): # sourcery skip: default-mutable-arg, extract-duplicate-method, require-parameter-annotation
897
+ """
898
+ Build a package
899
+ look up the previous package version in the dabase
900
+ - update package files
901
+ - build the wheels and sdist
902
+
903
+ :param production: PyPI or Test-PyPi -
904
+ :param force: BUILD even if no changes
905
+ :return: True if the package was built
906
+ """
907
+ log.info(f"Build: {self.package_path.name}")
908
+
909
+ ok = self.update_distribution(production)
910
+ self.status["version"] = self.pkg_version
911
+ if not ok:
912
+ log.info(f"{self.package_name}: skip - Could not build/update package")
913
+ if not self.status["error"]:
914
+ self.status["error"] = "Could not build/update package"
915
+ return False
916
+
917
+ # If there are changes to the package, then publish it
918
+ if self.is_changed() or force:
919
+ if force:
920
+ log.info(f"Force build: {self.package_name} {self.pkg_version} ")
921
+ else:
922
+ log.info(
923
+ f"Found changes to package sources: {self.package_name} {self.pkg_version} "
924
+ )
925
+ log.trace(f"Old hash {self.hash} != New hash {self.calculate_hash()}")
926
+ # Build the distribution files
927
+ old_ver = self.pkg_version
928
+ self.pkg_version = self.next_package_version(production)
929
+ self.status["version"] = self.pkg_version
930
+ # to get the next version
931
+ log.debug(
932
+ f"{self.package_name}: bump version for {old_ver} to {self.pkg_version } {'production' if production else 'test'}"
933
+ )
934
+ self.write_package_json()
935
+ log.trace(f"New hash: {self.package_name} {self.pkg_version} {self.hash}")
936
+ if self.poetry_build():
937
+ self.status["result"] = "Build OK"
938
+ else:
939
+ log.warning(f"{self.package_name}: skipping as build failed")
940
+ self.status["error"] = "Poetry build failed"
941
+ return False
942
+ return True
943
+
944
+ def publish_distribution_ifchanged(
945
+ self,
946
+ db: PysonDB,
947
+ *,
948
+ production: bool, # PyPI or Test-PyPi
949
+ build=False, #
950
+ force=False, # publish even if no changes
951
+ dry_run=False, # do not actually publish
952
+ clean: bool = False, # clean up afterwards
953
+ ) -> (
954
+ bool
955
+ ): # sourcery skip: assign-if-exp, default-mutable-arg, extract-method, remove-unnecessary-else, require-parameter-annotation, swap-if-else-branches, swap-if-expression
956
+ """
957
+ Publish a package to PyPi
958
+ look up the previous package version in the dabase, and only publish if there are changes to the package
959
+ - change determied by hash across all files
960
+
961
+ Build
962
+ - update package files
963
+ - build the wheels and sdist
964
+ Publish
965
+ - publish to PyPi
966
+ - update database with new hash
967
+ """
968
+ log.info(f"Publish: {self.package_path.name}")
969
+ # count .pyi files in the package
970
+ filecount = len(list(self.package_path.rglob("*.pyi")))
971
+ if filecount == 0:
972
+ log.debug(f"{self.package_name}: starting build as no .pyi files found")
973
+ build = True
974
+
975
+ if build or force or self.is_changed():
976
+ self.build_distribution(production=production, force=force)
977
+
978
+ if not self._publish:
979
+ log.debug(f"{self.package_name}: skip publishing")
980
+ return False
981
+
982
+ self.next_package_version(production=production)
983
+ # Publish the package to PyPi, Test-PyPi or Github
984
+ if self.is_changed():
985
+ if self.mpy_version in SET_PREVIEW and production and not force:
986
+ log.warning(
987
+ "version: `latest` package will only be available on Github, and not published to PyPi."
988
+ )
989
+ self.status["result"] = "Published to GitHub"
990
+ else:
991
+ return self.publish_distribution(dry_run, production, db)
992
+ elif force:
993
+ return self.publish_distribution(dry_run, production, db)
994
+ else:
995
+ log.info(f"No changes to package : {self.package_name} {self.pkg_version}")
996
+
997
+ if clean:
998
+ self.clean()
999
+ return True
1000
+
1001
+ def publish_distribution(self, dry_run, production, db):
1002
+ """
1003
+ Publishes the package to PyPi or Test-PyPi.
1004
+
1005
+ Args:
1006
+ dry_run (bool): If True, performs a dry run without actually publishing.
1007
+ production (bool): If True, publishes to PyPi. If False, publishes to Test-PyPi.
1008
+ db: The database object to save the package state.
1009
+
1010
+ Returns:
1011
+ bool: True if the publish was successful, False otherwise.
1012
+ """
1013
+ self.update_hashes() # resets is_changed to False
1014
+ if not dry_run:
1015
+ pub_ok = self.poetry_publish(production=production)
1016
+ else:
1017
+ log.warning(
1018
+ f"{self.package_name}: Dry run, not publishing to {'' if production else 'Test-'}PyPi"
1019
+ )
1020
+ pub_ok = True
1021
+ if not pub_ok:
1022
+ log.warning(f"{self.package_name}: Publish failed for {self.pkg_version}")
1023
+ self.status["error"] = "Publish failed"
1024
+ return False
1025
+ self.status["result"] = "Published to PyPi" if production else "Published to Test-PyPi"
1026
+ self.update_hashes()
1027
+ if dry_run:
1028
+ log.warning(f"{self.package_name}: Dry run, not saving to database")
1029
+ else:
1030
+ # get the package state and add it to the database
1031
+ db.add(self.to_dict())
1032
+ db.commit()
1033
+ return True
1034
+
1035
+ def are_package_sources_available(self) -> bool:
1036
+ """
1037
+ Check if (all) the packages sources exist.
1038
+ """
1039
+ ok = True
1040
+ for stub_type, src_path in self.update_sources():
1041
+ if (CONFIG.stub_path / src_path).exists():
1042
+ continue
1043
+ if stub_type == StubSource.FROZEN:
1044
+ # not a blocking issue if there are no frozen stubs, perhaps this port/board does not have any
1045
+ continue
1046
+ # todo: below is a workaround for different types, but where is the source of this difference coming from?
1047
+ msg = (
1048
+ f"{self.package_name}: source '{stub_type.value}' not found: {CONFIG.stub_path / src_path}"
1049
+ if isinstance(stub_type, StubSource) # type: ignore
1050
+ else f"{self.package_name}: source '{stub_type}' not found: {CONFIG.stub_path / src_path}"
1051
+ )
1052
+ self.status["error"] = msg
1053
+ log.debug(msg)
1054
+ ok = False
1055
+ return ok