micropython-stubber 1.23.1.post1__py3-none-any.whl → 1.23.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. {micropython_stubber-1.23.1.post1.dist-info → micropython_stubber-1.23.2.dist-info}/LICENSE +30 -30
  2. {micropython_stubber-1.23.1.post1.dist-info → micropython_stubber-1.23.2.dist-info}/METADATA +4 -4
  3. micropython_stubber-1.23.2.dist-info/RECORD +158 -0
  4. mpflash/README.md +220 -220
  5. mpflash/libusb_flash.ipynb +203 -203
  6. mpflash/mpflash/add_firmware.py +98 -98
  7. mpflash/mpflash/ask_input.py +236 -236
  8. mpflash/mpflash/basicgit.py +284 -284
  9. mpflash/mpflash/bootloader/__init__.py +2 -2
  10. mpflash/mpflash/bootloader/activate.py +60 -60
  11. mpflash/mpflash/bootloader/detect.py +82 -82
  12. mpflash/mpflash/bootloader/manual.py +101 -101
  13. mpflash/mpflash/bootloader/micropython.py +12 -12
  14. mpflash/mpflash/bootloader/touch1200.py +36 -36
  15. mpflash/mpflash/cli_download.py +129 -129
  16. mpflash/mpflash/cli_flash.py +224 -216
  17. mpflash/mpflash/cli_group.py +111 -111
  18. mpflash/mpflash/cli_list.py +87 -87
  19. mpflash/mpflash/cli_main.py +39 -39
  20. mpflash/mpflash/common.py +210 -166
  21. mpflash/mpflash/config.py +44 -44
  22. mpflash/mpflash/connected.py +96 -77
  23. mpflash/mpflash/download.py +364 -364
  24. mpflash/mpflash/downloaded.py +130 -130
  25. mpflash/mpflash/errors.py +9 -9
  26. mpflash/mpflash/flash/__init__.py +55 -55
  27. mpflash/mpflash/flash/esp.py +59 -59
  28. mpflash/mpflash/flash/stm32.py +19 -19
  29. mpflash/mpflash/flash/stm32_dfu.py +104 -104
  30. mpflash/mpflash/flash/uf2/__init__.py +88 -88
  31. mpflash/mpflash/flash/uf2/boardid.py +15 -15
  32. mpflash/mpflash/flash/uf2/linux.py +136 -130
  33. mpflash/mpflash/flash/uf2/macos.py +42 -42
  34. mpflash/mpflash/flash/uf2/uf2disk.py +12 -12
  35. mpflash/mpflash/flash/uf2/windows.py +43 -43
  36. mpflash/mpflash/flash/worklist.py +170 -170
  37. mpflash/mpflash/list.py +106 -106
  38. mpflash/mpflash/logger.py +41 -41
  39. mpflash/mpflash/mpboard_id/__init__.py +93 -93
  40. mpflash/mpflash/mpboard_id/add_boards.py +251 -251
  41. mpflash/mpflash/mpboard_id/board.py +37 -37
  42. mpflash/mpflash/mpboard_id/board_id.py +86 -86
  43. mpflash/mpflash/mpboard_id/store.py +43 -43
  44. mpflash/mpflash/mpremoteboard/__init__.py +266 -266
  45. mpflash/mpflash/mpremoteboard/mpy_fw_info.py +141 -141
  46. mpflash/mpflash/mpremoteboard/runner.py +140 -140
  47. mpflash/mpflash/vendor/click_aliases.py +91 -91
  48. mpflash/mpflash/vendor/dfu.py +165 -165
  49. mpflash/mpflash/vendor/pydfu.py +605 -605
  50. mpflash/mpflash/vendor/readme.md +2 -2
  51. mpflash/mpflash/versions.py +135 -135
  52. mpflash/poetry.lock +1599 -1599
  53. mpflash/pyproject.toml +65 -65
  54. mpflash/stm32_udev_rules.md +62 -62
  55. stubber/__init__.py +3 -3
  56. stubber/board/board_info.csv +193 -193
  57. stubber/board/boot.py +34 -34
  58. stubber/board/createstubs.py +1004 -986
  59. stubber/board/createstubs_db.py +826 -825
  60. stubber/board/createstubs_db_min.py +332 -331
  61. stubber/board/createstubs_db_mpy.mpy +0 -0
  62. stubber/board/createstubs_lvgl.py +741 -741
  63. stubber/board/createstubs_lvgl_min.py +741 -741
  64. stubber/board/createstubs_mem.py +767 -766
  65. stubber/board/createstubs_mem_min.py +307 -306
  66. stubber/board/createstubs_mem_mpy.mpy +0 -0
  67. stubber/board/createstubs_min.py +295 -294
  68. stubber/board/createstubs_mpy.mpy +0 -0
  69. stubber/board/fw_info.py +141 -141
  70. stubber/board/info.py +183 -183
  71. stubber/board/main.py +19 -19
  72. stubber/board/modulelist.txt +247 -247
  73. stubber/board/pyrightconfig.json +34 -34
  74. stubber/bulk/mcu_stubber.py +437 -437
  75. stubber/codemod/_partials/__init__.py +48 -48
  76. stubber/codemod/_partials/db_main.py +147 -147
  77. stubber/codemod/_partials/lvgl_main.py +77 -77
  78. stubber/codemod/_partials/modules_reader.py +80 -80
  79. stubber/codemod/add_comment.py +53 -53
  80. stubber/codemod/add_method.py +65 -65
  81. stubber/codemod/board.py +317 -317
  82. stubber/codemod/enrich.py +151 -145
  83. stubber/codemod/merge_docstub.py +284 -284
  84. stubber/codemod/modify_list.py +54 -54
  85. stubber/codemod/utils.py +56 -56
  86. stubber/commands/build_cmd.py +94 -94
  87. stubber/commands/cli.py +49 -49
  88. stubber/commands/clone_cmd.py +78 -78
  89. stubber/commands/config_cmd.py +29 -29
  90. stubber/commands/enrich_folder_cmd.py +71 -71
  91. stubber/commands/get_core_cmd.py +71 -71
  92. stubber/commands/get_docstubs_cmd.py +92 -92
  93. stubber/commands/get_frozen_cmd.py +117 -117
  94. stubber/commands/get_mcu_cmd.py +102 -102
  95. stubber/commands/merge_cmd.py +66 -66
  96. stubber/commands/publish_cmd.py +118 -118
  97. stubber/commands/stub_cmd.py +31 -31
  98. stubber/commands/switch_cmd.py +62 -62
  99. stubber/commands/variants_cmd.py +48 -48
  100. stubber/cst_transformer.py +178 -178
  101. stubber/data/board_info.csv +193 -193
  102. stubber/data/board_info.json +1729 -1729
  103. stubber/data/micropython_tags.csv +15 -15
  104. stubber/data/requirements-core-micropython.txt +38 -38
  105. stubber/data/requirements-core-pycopy.txt +39 -39
  106. stubber/downloader.py +37 -37
  107. stubber/freeze/common.py +72 -72
  108. stubber/freeze/freeze_folder.py +69 -69
  109. stubber/freeze/freeze_manifest_2.py +126 -126
  110. stubber/freeze/get_frozen.py +131 -131
  111. stubber/get_cpython.py +112 -112
  112. stubber/get_lobo.py +59 -59
  113. stubber/minify.py +423 -423
  114. stubber/publish/bump.py +86 -86
  115. stubber/publish/candidates.py +275 -275
  116. stubber/publish/database.py +18 -18
  117. stubber/publish/defaults.py +40 -40
  118. stubber/publish/enums.py +24 -24
  119. stubber/publish/helpers.py +29 -29
  120. stubber/publish/merge_docstubs.py +136 -132
  121. stubber/publish/missing_class_methods.py +51 -51
  122. stubber/publish/package.py +150 -150
  123. stubber/publish/pathnames.py +51 -51
  124. stubber/publish/publish.py +120 -120
  125. stubber/publish/pypi.py +42 -42
  126. stubber/publish/stubpackage.py +1055 -1051
  127. stubber/rst/__init__.py +9 -9
  128. stubber/rst/classsort.py +78 -78
  129. stubber/rst/lookup.py +533 -531
  130. stubber/rst/output_dict.py +401 -401
  131. stubber/rst/reader.py +814 -814
  132. stubber/rst/report_return.py +77 -77
  133. stubber/rst/rst_utils.py +541 -541
  134. stubber/stubber.py +38 -38
  135. stubber/stubs_from_docs.py +90 -90
  136. stubber/tools/manifestfile.py +654 -654
  137. stubber/tools/readme.md +6 -6
  138. stubber/update_fallback.py +117 -117
  139. stubber/update_module_list.py +123 -123
  140. stubber/utils/__init__.py +6 -6
  141. stubber/utils/config.py +137 -137
  142. stubber/utils/makeversionhdr.py +54 -54
  143. stubber/utils/manifest.py +90 -90
  144. stubber/utils/post.py +80 -80
  145. stubber/utils/repos.py +156 -156
  146. stubber/utils/stubmaker.py +139 -139
  147. stubber/utils/typed_config_toml.py +80 -80
  148. stubber/variants.py +106 -106
  149. micropython_stubber-1.23.1.post1.dist-info/RECORD +0 -159
  150. mpflash/basicgit.py +0 -288
  151. {micropython_stubber-1.23.1.post1.dist-info → micropython_stubber-1.23.2.dist-info}/WHEEL +0 -0
  152. {micropython_stubber-1.23.1.post1.dist-info → micropython_stubber-1.23.2.dist-info}/entry_points.txt +0 -0
@@ -1,1051 +1,1055 @@
1
- """Create a stub-only package for a specific version of micropython"""
2
-
3
- import hashlib
4
- import json
5
- import shutil
6
- import subprocess
7
- from pathlib import Path
8
- import sys
9
- from typing import Any, Dict, List, Optional, Tuple, Union
10
-
11
- import tenacity
12
-
13
- from mpflash.basicgit import get_git_describe
14
- from stubber.publish.helpers import get_module_docstring
15
-
16
- if sys.version_info >= (3, 11):
17
- import tomllib # type: ignore
18
- else:
19
- import tomli as tomllib # type: ignore
20
-
21
- from typing import NewType
22
-
23
- import tomli_w
24
- from mpflash.logger import log
25
- from packaging.version import Version, parse
26
- from pysondb import PysonDB
27
-
28
- from mpflash.versions import SET_PREVIEW, V_PREVIEW, clean_version
29
- from stubber.publish.bump import bump_version
30
- from stubber.publish.defaults import GENERIC_U, default_board
31
- from stubber.publish.enums import StubSource
32
- from stubber.publish.pypi import Version, get_pypi_versions
33
- from stubber.utils.config import CONFIG
34
-
35
- Status = NewType("Status", Dict[str, Union[str, None]])
36
- StubSources = List[Tuple[StubSource, Path]]
37
-
38
- # indicates which stubs will be skipped when copying for these stub sources
39
- STUBS_COPY_FILTER = {
40
- StubSource.FROZEN: [
41
- "espnow", # merged stubs + documentation of the espnow module is better than the info in the forzen stubs
42
- ],
43
- StubSource.FIRMWARE: [
44
- "builtins",
45
- "collections", # collections must be in stdlib
46
- ],
47
- StubSource.MERGED: [
48
- "collections", # collections must be in stdlib
49
- ],
50
- }
51
-
52
- # these modules will be replaced by a simple import statement to import from stdlib
53
- STDLIB_UMODULES = ["ucollections"]
54
-
55
-
56
- class VersionedPackage(object):
57
- """
58
- Represents a versioned package.
59
-
60
- Attributes:
61
- package_name (str): The name of the package.
62
- mpy_version (str): The MicroPython version.
63
-
64
- Methods:
65
- __init__(self, package_name: str, mpy_version: str): Initializes a new instance of the VersionedPackage class.
66
- is_preview(self): Checks if the package is a preview version.
67
- pkg_version(self) -> str: Returns the version of the package.
68
- pkg_version(self, version: str) -> None: Sets the version of the package.
69
- get_prerelease_package_version(self, production: bool = False) -> str: Gets the next prerelease version for the package.
70
- get_next_package_version(self, prod: bool = False, rc=False) -> str: Gets the next version for the package.
71
- next_pkg_version(self, production: bool) -> str: Gets the next version for the package.
72
- bump(self, *, rc: int = 0) -> str: Bumps the postrelease version of the package.
73
- """
74
-
75
- def __init__(self, package_name: str, *, mpy_version: str):
76
- super().__init__()
77
- self.package_name: str = package_name
78
- self.mpy_version: str = mpy_version
79
- self._pkg_version: str = mpy_version
80
-
81
- def __str__(self) -> str:
82
- return f"{self.package_name}=={self.mpy_version}"
83
-
84
- def __repr__(self) -> str:
85
- return f"{self.package_name}=={self.mpy_version}"
86
-
87
- def __eq__(self, o: object) -> bool:
88
- return str(self) == str(o)
89
-
90
- def __hash__(self) -> int:
91
- return hash(str(self))
92
-
93
- @property
94
- def pkg_version(self) -> str:
95
- "return the version of the package"
96
- return self._pkg_version
97
-
98
- @pkg_version.setter
99
- def pkg_version(self, version: str) -> None:
100
- "set the version of the package"
101
- self._pkg_version = version
102
-
103
- def next_package_version(self, production: bool) -> str:
104
- # sourcery skip: assign-if-exp
105
- """Get the next version for the package"""
106
- if self.is_preview():
107
- return self._get_next_preview_package_version(production)
108
- else:
109
- return self._get_next_package_version(production)
110
-
111
- def is_preview(self):
112
- return self.mpy_version in SET_PREVIEW or V_PREVIEW in self.mpy_version
113
-
114
- def _get_next_preview_package_version(self, production: bool = False) -> str:
115
- """
116
- Get the next prerelease version for the package.
117
- this is used for preview versions of micropython (-preview, formerly known as 'latest')
118
- """
119
- rc = 1
120
- if not (describe := get_git_describe(CONFIG.mpy_path.as_posix())):
121
- return "99.99.99post99"
122
- # use versiontag and the number of commits since the last tag
123
- # "v1.19.1-841-g3446"
124
- # 'v1.20.0-dirty'
125
- # 'v1.22.0-preview-19-g8eb7721b4'
126
- parts = describe.split("-", 3)
127
- ver = parts[0]
128
- if len(parts) > 1:
129
- rc = (
130
- parts[1]
131
- if parts[1].isdigit()
132
- else parts[2] if len(parts) > 2 and parts[2].isdigit() else 1
133
- )
134
- rc = int(rc)
135
- base = (
136
- bump_version(Version(ver), minor_bump=True) if parts[1] != V_PREVIEW else Version(ver)
137
- )
138
- return str(bump_version(base, rc=rc))
139
- # raise ValueError("cannot determine next version number micropython")
140
-
141
- def _get_next_package_version(self, prod: bool = False, rc=False) -> str:
142
- """Get the next version for the package."""
143
- base = Version(self.pkg_version)
144
- if pypi_versions := get_pypi_versions(self.package_name, production=prod, base=base):
145
- # get the latest version from pypi
146
- self.pkg_version = str(pypi_versions[-1])
147
- else:
148
- # no published package found , so we start at base version then bump 1 post release
149
- self.pkg_version = Version(self.pkg_version).base_version
150
- return self.bump()
151
-
152
- def bump(self, *, rc: int = 0) -> str:
153
- """
154
- bump the postrelease version of the package, and write the change to disk
155
- if rc >= 1, the version is bumped to the specified release candidate
156
- """
157
- try:
158
- current = Version(self.pkg_version)
159
- assert isinstance(current, Version)
160
- # bump the version
161
- self.pkg_version = str(bump_version(post_bump=True, current=current, rc=rc))
162
- except Exception as e: # pragma: no cover
163
- log.error(f"Error: {e}")
164
- return self.pkg_version
165
-
166
-
167
- class Builder(VersionedPackage):
168
- """
169
- Builder class for creating and updating MicroPython stub packages.
170
-
171
- Args:
172
- package_name (str): The name of the package.
173
- mpy_version (str, optional): The version of MicroPython. Defaults to "0.0.1".
174
- port (str): The port for the package.
175
- board (str, optional): The board for the package. Defaults to GENERIC_U.
176
- description (str, optional): The description of the package. Defaults to "MicroPython stubs".
177
- stubs (Optional[StubSources], optional): The stub sources for the package. Defaults to None.
178
-
179
- Attributes:
180
- package_name (str): The name of the package.
181
- mpy_version (str): The version of MicroPython.
182
- port (str): The port for the package.
183
- board (str): The board for the package.
184
- description (str): The description of the package.
185
- stub_sources (Optional[StubSources]): The stub sources for the package.
186
- hash (None): The hash of all the files in the package.
187
- stub_hash (None): The hash of the stub files.
188
-
189
- Properties:
190
- package_path (Path): The package path based on the package name and version, relative to the publish folder.
191
- toml_path (Path): The path to the `pyproject.toml` file.
192
- pyproject (Union[Dict[str, Any], None]): The parsed pyproject.toml or None.
193
-
194
- Methods:
195
- create_update_pyproject_toml(): Create or update/overwrite a `pyproject.toml` file.
196
- check(): Check if the package is valid.
197
- clean(): Remove the stub files from the package folder.
198
- copy_stubs(): Copy files from all listed stub folders to the package folder.
199
- update_package_files(): Update the stub-only package for a specific version of MicroPython.
200
- write_package_json(): Write the package.json file to disk.
201
- to_dict(): Return the package as a dict to store in the jsondb.
202
- from_dict(json_data: Dict): Load the package from a dict (from the jsondb).
203
- calculate_hash(include_md: bool = True): Create a SHA1 hash of all files in the package.
204
- update_hashes(): Update the package hashes.
205
- is_changed(include_md: bool = True): Check if the package has changed.
206
- """
207
-
208
- # BUF_SIZE is totally arbitrary,
209
- BUF_SIZE = 65536 * 16 # lets read stuff in 16 x 64kb chunks!
210
-
211
- def __init__(
212
- self,
213
- package_name: str,
214
- *,
215
- mpy_version: str = "0.0.1",
216
- port: str,
217
- board: str = GENERIC_U,
218
- description: str = "MicroPython stubs",
219
- stubs: Optional[StubSources] = None,
220
- # json_data: Optional[Dict[str, Any]] = None,
221
- ): # port: str, board: str
222
- super().__init__(package_name=package_name, mpy_version=mpy_version)
223
- self._publish = True # intended for publishing
224
- self.package_name = package_name
225
- self.mpy_version = mpy_version
226
- self.port = port
227
- self.board = board
228
- self.description = description
229
- self.stub_sources = stubs or []
230
- self.hash = None # intial hash
231
- """Hash of all the files in the package"""
232
- self.stub_hash = None # intial hash
233
- """Hash of all .pyi files"""
234
-
235
- @property
236
- def package_path(self) -> Path:
237
- "package path based on the package name and version and relative to the publish folder"
238
- parts = self.package_name.split("-")
239
- parts[1:1] = [clean_version(self.mpy_version, flat=True)]
240
- return CONFIG.publish_path / "-".join(parts)
241
-
242
- @property
243
- def toml_path(self) -> Path:
244
- "the path to the `pyproject.toml` file"
245
- # todo: make sure this is always relative to the root path
246
- return self.package_path / "pyproject.toml"
247
-
248
- # -----------------------------------------------
249
- @property
250
- def pyproject(self) -> Union[Dict[str, Any], None]:
251
- "parsed pyproject.toml or None"
252
- pyproject = None
253
- _toml = self.toml_path
254
- if (_toml).exists():
255
- with open(_toml, "rb") as f:
256
- pyproject = tomllib.load(f)
257
- return pyproject
258
-
259
- @pyproject.setter
260
- def pyproject(self, pyproject: Dict) -> None:
261
- # check if the result is a valid toml file
262
- try:
263
- tomllib.loads(tomli_w.dumps(pyproject))
264
- except tomllib.TOMLDecodeError as e:
265
- print("Could not create a valid TOML file")
266
- raise (e)
267
- # make sure parent folder exists
268
- _toml = self.toml_path
269
- (_toml).parent.mkdir(parents=True, exist_ok=True)
270
- with open(_toml, "wb") as output:
271
- tomli_w.dump(pyproject, output)
272
-
273
- # -----------------------------------------------
274
- def create_update_pyproject_toml(self) -> None:
275
- """
276
- create or update/overwrite a `pyproject.toml` file by combining a template file
277
- with the given parameters.
278
- """
279
- raise NotImplementedError("create_update_pyproject_toml not implemented")
280
-
281
- # -----------------------------------------------
282
-
283
- def check(self) -> bool:
284
- """Check if the package is valid, to be implemented by the subclass"""
285
- return True
286
-
287
- def clean(self) -> None:
288
- """
289
- Remove the stub files from the package folder
290
-
291
- This is used before update the stub package, to avoid lingering stub files,
292
- and after the package has been built, to avoid needing to store files multiple times.
293
-
294
- `.gitignore` cannot be used as this will prevent poetry from processing the files.
295
- """
296
- # remove all *.py and *.pyi files in the folder
297
- for wc in ["*.py", "*.pyi", "modules.json"]:
298
- for f in (self.package_path).rglob(wc):
299
- f.unlink()
300
-
301
- def copy_stubs(self) -> None:
302
- """
303
- Copy files from all listed stub folders to the package folder
304
- the order of the stub folders is relevant as "last copy wins"
305
-
306
- - 1 - Copy all MCU stubs/merged to the package folder
307
- - 2 - copy the remaining stubs to the package folder
308
- - 3 - remove *.py files from the package folder
309
- """
310
- try:
311
- # Check if all stub source folders exist
312
- for stub_type, src_path in self.stub_sources:
313
- if not (CONFIG.stub_path / src_path).exists():
314
- raise FileNotFoundError(
315
- f"Could not find stub source folder {CONFIG.stub_path / src_path}"
316
- )
317
-
318
- # 1 - Copy the stubs to the package, directly in the package folder (no folders)
319
- # for stub_type, fw_path in [s for s in self.stub_sources]:
320
- for n in range(len(self.stub_sources)):
321
- stub_type, src_path = self.stub_sources[n]
322
- try:
323
- log.debug(f"Copying {stub_type} from {src_path}")
324
- self.copy_folder(stub_type, src_path)
325
- except OSError as e:
326
- if stub_type != StubSource.FROZEN:
327
- raise FileNotFoundError(
328
- f"Could not find stub source folder {src_path}"
329
- ) from e
330
- else:
331
- log.debug(f"Error copying stubs from : {CONFIG.stub_path / src_path}, {e}")
332
- finally:
333
- # 3 - clean up a little bit
334
- # delete all the .py files in the package folder if there is a corresponding .pyi file
335
- for f in self.package_path.rglob("*.py"):
336
- if f.with_suffix(".pyi").exists():
337
- f.unlink()
338
- self.update_umodules()
339
-
340
- def update_umodules(self):
341
- """
342
- Replace the STDLIB umodules with a simple import statement
343
- in order to allow the typecheckers to resove the stdlib modules in the usual stdlib location.
344
- """
345
- for f in self.package_path.rglob("*.pyi"):
346
- if f.stem in STDLIB_UMODULES:
347
- # read the docstring of the module
348
- docstring = get_module_docstring(f) or ""
349
- comment = "# import module from stdlib/module"
350
- # replace the file with a simple import statement
351
- f.write_text(f'"""\n{docstring}\n"""\n{comment}\nfrom {f.stem[1:]} import *')
352
-
353
- def copy_folder(self, stub_type: StubSource, src_path: Path):
354
- Path(self.package_path).mkdir(parents=True, exist_ok=True)
355
- for item in (CONFIG.stub_path / src_path).rglob("*"):
356
- if item.is_file():
357
- # filter the 'poorly' decorated files
358
- if stub_type in STUBS_COPY_FILTER and item.stem in STUBS_COPY_FILTER[stub_type]:
359
- continue
360
-
361
- target = Path(self.package_path) / item.relative_to(CONFIG.stub_path / src_path)
362
- target.parent.mkdir(parents=True, exist_ok=True)
363
- target.write_bytes(item.read_bytes())
364
-
365
- def update_package_files(self) -> None:
366
- """
367
- Update the stub-only package for a specific version of micropython
368
- - cleans the package folder
369
- - copies the stubs from the list of stubs.
370
- - creates/updates the readme and the license file
371
- """
372
- # create the package folder
373
- self.package_path.mkdir(parents=True, exist_ok=True)
374
- self.clean() # Delete any previous *.py? files
375
- self.copy_stubs()
376
- self.create_readme()
377
- self.create_license()
378
-
379
- def write_package_json(self) -> None:
380
- """write the package.json file to disk"""
381
- # make sure folder exists
382
- if not self.package_path.exists():
383
- self.package_path.mkdir(parents=True, exist_ok=True)
384
- # write the json to a file
385
- with open(self.package_path / "package.json", "w") as f:
386
- json.dump(self.to_dict(), f, indent=4)
387
-
388
- def to_dict(self) -> dict:
389
- """return the package as a dict to store in the jsondb
390
-
391
- need to simplify some of the Objects to allow serialization to json
392
- - the paths to posix paths
393
- - the version (semver) to a string
394
- - toml file to list of lines
395
-
396
- """
397
- return {
398
- "name": self.package_name,
399
- "mpy_version": self.mpy_version,
400
- "publish": self._publish,
401
- "pkg_version": str(self.pkg_version),
402
- "path": self.package_path.name, # only store the folder name , as it is relative to the publish folder
403
- "stub_sources": [(name, Path(path).as_posix()) for (name, path) in self.stub_sources],
404
- "description": self.description,
405
- "hash": self.hash,
406
- "stub_hash": self.stub_hash,
407
- }
408
-
409
- def from_dict(self, json_data: Dict) -> None:
410
- """load the package from a dict (from the jsondb)"""
411
- self.package_name = json_data["name"]
412
- # self.package_path = Path(json_data["path"])
413
- self.description = json_data["description"]
414
- self.mpy_version = json_data["mpy_version"]
415
- self._publish = json_data["publish"]
416
- self.hash = json_data["hash"]
417
- self.stub_hash = json_data["stub_hash"]
418
- # create folder
419
- if not self.package_path.exists():
420
- self.package_path.mkdir(parents=True, exist_ok=True)
421
- # create the pyproject.toml file
422
- self.create_update_pyproject_toml()
423
- # set pkg version after creating the toml file
424
- self.pkg_version = json_data["pkg_version"]
425
- self.stub_sources = []
426
- for name, path in json_data["stub_sources"]:
427
- if path.startswith("stubs/"):
428
- path = path.replace("stubs/", "")
429
- self.stub_sources.append((name, Path(path)))
430
-
431
- def calculate_hash(self, include_md: bool = True) -> str:
432
- # sourcery skip: reintroduce-else, swap-if-else-branches, use-named-expression
433
- """
434
- Create a SHA1 hash of all files in the package, excluding the pyproject.toml file itself.
435
- the hash is based on the content of the .py/.pyi and .md files in the package.
436
- if include_md is False , the .md files are not hased, allowing the files in the packeges to be compared simply
437
- As a single hash is created across all files, the files are sorted prior to hashing to ensure that the hash is stable.
438
-
439
- Note: A changed hash will not indicate which of the files in the package have been changed.
440
- """
441
- file_hash = hashlib.sha1()
442
- # Stubs Only
443
- files = list((self.package_path).rglob("**/*.pyi"))
444
- if include_md:
445
- files += (
446
- [self.package_path / "LICENSE.md"]
447
- + [self.package_path / "README.md"]
448
- # do not include [self.toml_file]
449
- )
450
- for file in sorted(files):
451
- try:
452
- # retry on file not found
453
- self.add_file_hash(file, file_hash)
454
- except FileNotFoundError:
455
- log.warning(f"File not found {file}")
456
- # ignore file not found errors to allow the hash to be created WHILE GIT / VIRUS SCANNERS HOLD LINGERING FILES
457
- return file_hash.hexdigest()
458
-
459
- @tenacity.retry(wait=tenacity.wait_fixed(0.2), stop=tenacity.stop_after_attempt(3))
460
- def add_file_hash(self, file, file_hash):
461
- """
462
- Adds the hash of a file to the given file hash object.
463
- If an error occurs, the file is retried up to 3 times with a 0.2 second delay
464
-
465
- Args:
466
- file (str): The path to the file.
467
- file_hash (hashlib._Hash): The file hash object to update.
468
-
469
- Returns:
470
- None
471
- """
472
- with open(file, "rb") as f:
473
- while True:
474
- if data := f.read(Builder.BUF_SIZE):
475
- file_hash.update(data)
476
- else:
477
- break
478
-
479
- def update_hashes(self, ret=False) -> None:
480
- """Update the package hashes. Resets is_changed() to False"""
481
- self.hash = self.calculate_hash()
482
- self.stub_hash = self.calculate_hash(include_md=False)
483
-
484
- def is_changed(self, include_md: bool = True) -> bool:
485
- """Check if the package has changed, based on the current and the stored hash.
486
- The default checks the hash of all files, including the .md files.
487
- """
488
- current = self.calculate_hash(include_md=include_md)
489
- stored = self.hash if include_md else self.stub_hash
490
- log.trace(f"changed = {self.hash != current} | Stored: {stored} | Current: {current}")
491
- return stored != current
492
-
493
- def create_license(self) -> None:
494
- """
495
- Create a license file for the package
496
- - copied from the template license file
497
- """
498
- # copy the license file from the template to the package folder
499
- # option : append other license files
500
- shutil.copy(CONFIG.template_path / "LICENSE.md", self.package_path)
501
-
502
- def create_readme(self) -> None:
503
- """
504
- Create a readme file for the package
505
- - based on the template readme file
506
- - with a list of all included stub folders added to it (not the individual stub-files)
507
- """
508
- # read the readme file and update the version and description
509
- with open(CONFIG.template_path / "README.md", "r") as f:
510
- TEMPLATE_README = f.read()
511
-
512
- # add a readme with the names of the stub-folders
513
-
514
- # read informations from firmware_stubs.json
515
- firmware_stubs = {}
516
- doc_stubs = {}
517
- core_stubs = {}
518
- try:
519
- with open(self.package_path / "firmware_stubs.json", "r") as f:
520
- firmware_stubs = json.load(f)
521
- with open(self.package_path / "doc_stubs.json", "r") as f:
522
- doc_stubs = json.load(f)
523
- with open(self.package_path / "modules.json", "r") as f:
524
- core_stubs = json.load(f)
525
- except FileNotFoundError:
526
- pass
527
-
528
- # Prettify this by merging with template text
529
- with open(self.package_path / "README.md", "w") as f:
530
- f.write(f"# {self.package_name}\n\n")
531
- f.write(TEMPLATE_README)
532
- f.write(f"Included stubs:\n")
533
- for name, folder in self.stub_sources:
534
- f.write(f"* {name} from `stubs/{Path(folder).as_posix()}`\n")
535
-
536
- f.write(f"\n\n")
537
- f.write(f"origin | Family | Port | Board | Version\n")
538
- f.write(f"-------|--------|------|-------|--------\n")
539
- try:
540
- f.write(
541
- f"Firmware | {firmware_stubs['firmware']['family']} | {firmware_stubs['firmware']['port']} | {firmware_stubs['firmware']['machine']} | {clean_version(firmware_stubs['firmware']['version'])} \n"
542
- )
543
- except Exception:
544
- pass
545
- try:
546
- f.write(
547
- f"Documentation | {doc_stubs['firmware']['family']} | {doc_stubs['firmware']['port']} | - | {clean_version(doc_stubs['firmware']['version'])} \n"
548
- )
549
- except Exception:
550
- pass
551
- try:
552
- f.write(
553
- f"Core | {core_stubs['firmware']['family']} | {core_stubs['firmware']['port']} | - | {clean_version(core_stubs['firmware']['version'])} \n"
554
- )
555
- except Exception:
556
- pass
557
-
558
-
559
- class PoetryBuilder(Builder):
560
- """
561
- Build a package using Poetry
562
- """
563
-
564
- def __init__(
565
- self,
566
- package_name: str,
567
- *,
568
- port: str,
569
- mpy_version: str = "0.0.1",
570
- board: str = GENERIC_U,
571
- description: str = "MicroPython stubs",
572
- stubs: Optional[StubSources] = None,
573
- json_data: Optional[Dict[str, Any]] = None,
574
- ):
575
- super().__init__(
576
- package_name=package_name,
577
- mpy_version=mpy_version,
578
- port=port,
579
- board=board,
580
- description=description,
581
- stubs=stubs,
582
- )
583
-
584
- # -----------------------------------------------
585
- # get and set the version of the package directly from the toml file
586
- @property
587
- def pkg_version(self) -> str:
588
- "return the version of the package"
589
- # read the version from the toml file
590
- _toml = self.toml_path
591
- if not _toml.exists():
592
- return self.mpy_version
593
- with open(_toml, "rb") as f:
594
- pyproject = tomllib.load(f)
595
- ver = pyproject["tool"]["poetry"]["version"]
596
- return str(parse(ver)) if ver not in SET_PREVIEW else ver
597
-
598
- @pkg_version.setter
599
- def pkg_version(self, version: str) -> None:
600
- # sourcery skip: remove-unnecessary-cast
601
- "set the version of the package"
602
- if not isinstance(version, str): # type: ignore
603
- version = str(version)
604
- # read the current file
605
- _toml = self.toml_path
606
- try:
607
- with open(_toml, "rb") as f:
608
- pyproject = tomllib.load(f)
609
- pyproject["tool"]["poetry"]["version"] = version
610
- # update the version in the toml file
611
- with open(_toml, "wb") as output:
612
- tomli_w.dump(pyproject, output)
613
- except FileNotFoundError as e:
614
- raise FileNotFoundError(f"pyproject.toml file not found at {_toml}") from e
615
-
616
- # -----------------------------------------------
617
-
618
- def poetry_build(self) -> bool:
619
- """build the package by running `poetry build`"""
620
- return self.run_poetry(["build", "-vvv"])
621
-
622
- def poetry_publish(self, production: bool = False) -> bool:
623
- if not self._publish:
624
- log.warning(f"Publishing is disabled for {self.package_name}")
625
- return False
626
- # update the package info
627
- self.write_package_json()
628
- if production:
629
- log.debug("Publishing to PRODUCTION https://pypy.org")
630
- params = ["publish"]
631
- else:
632
- log.debug("Publishing to TEST-PyPi https://test.pypy.org")
633
- params = ["publish", "-r", "test-pypi"]
634
- r = self.run_poetry(params)
635
- print("") # add a newline after the output
636
- return r
637
-
638
- def run_poetry(self, parameters: List[str]) -> bool:
639
- """Run a poetry commandline in the package folder.
640
- Note: this may write some output to the console ('All set!')
641
- """
642
- # check for pyproject.toml in folder
643
- if not (self.package_path / "pyproject.toml").exists(): # pragma: no cover
644
- log.error(f"No pyproject.toml file found in {self.package_path}")
645
- return False
646
- # todo: call poetry directly to improve error handling
647
- try:
648
- log.debug(f"poetry {parameters} starting")
649
- subprocess.run(
650
- ["poetry"] + parameters,
651
- cwd=self.package_path,
652
- check=True,
653
- # stdout=subprocess.PIPE,
654
- stdout=subprocess.PIPE, # interestingly: errors on stdout , output on stderr .....
655
- universal_newlines=True,
656
- encoding="utf-8",
657
- )
658
- log.trace(f"poetry {parameters} completed")
659
- except (NotADirectoryError, FileNotFoundError) as e: # pragma: no cover # InvalidVersion
660
- log.error("Exception on process, {}".format(e))
661
- return False
662
- except subprocess.CalledProcessError as e: # pragma: no cover
663
- # Detect and log error detection om upload
664
- # UploadError
665
- # HTTP Error 400: File already exists. See https://test.pypi.org/help/#file-name-reuse for more information.
666
- # TODO: how to return the state so it can be handled
667
- print() # linefeed after output
668
- errors = [l for l in e.stdout.splitlines()[1:7] if "Error" in l]
669
- for e in errors:
670
- log.error(e)
671
-
672
- # log.error("Exception on process, {}".format(e))
673
- return False
674
- return True
675
-
676
- def check(self) -> bool:
677
- """check if the package is valid by running `poetry check`
678
- Note: this will write some output to the console ('All set!')
679
- """
680
- return self.run_poetry(["check", "-vvv"])
681
-
682
- def create_update_pyproject_toml(self) -> None:
683
- """
684
- create or update/overwrite a `pyproject.toml` file by combining a template file
685
- with the given parameters.
686
- and updating it with the pyi files included
687
- """
688
- if (self.toml_path).exists():
689
- # do not overwrite the version of a pre-existing file
690
- _pyproject = self.pyproject
691
- assert _pyproject is not None
692
- # clear out the packages section
693
- _pyproject["tool"]["poetry"]["packages"] = []
694
- # update the dependencies section by reading these from the template file
695
- with open(CONFIG.template_path / "pyproject.toml", "rb") as f:
696
- tpl = tomllib.load(f)
697
- _pyproject["tool"]["poetry"]["dependencies"] = tpl["tool"]["poetry"]["dependencies"]
698
-
699
- else:
700
- # read the template pyproject.toml file from the template folder
701
- try:
702
- with open(CONFIG.template_path / "pyproject.toml", "rb") as f:
703
- _pyproject = tomllib.load(f)
704
- # note: can be 'latest' which is not semver
705
- _pyproject["tool"]["poetry"]["version"] = self.mpy_version
706
- except FileNotFoundError as e:
707
- log.error(f"Could not find template pyproject.toml file {e}")
708
- raise (e)
709
-
710
- # update the name , version and description of the package
711
- _pyproject["tool"]["poetry"]["name"] = self.package_name
712
- _pyproject["tool"]["poetry"]["description"] = self.description
713
- # write out the pyproject.toml file
714
- self.pyproject = _pyproject
715
-
716
- def update_pyproject_stubs(self) -> int:
717
- "Add the stub files to the pyproject.toml file"
718
- _pyproject = self.pyproject
719
- assert _pyproject is not None, "No pyproject.toml file found"
720
- _pyproject["tool"]["poetry"]["packages"] = [
721
- {"include": p.relative_to(self.package_path).as_posix()}
722
- for p in sorted((self.package_path).rglob("*.pyi"))
723
- ]
724
- # write out the pyproject.toml file
725
- self.pyproject = _pyproject
726
- return len(_pyproject["tool"]["poetry"]["packages"])
727
-
728
-
729
- class StubPackage(PoetryBuilder):
730
- """
731
- Create a stub-only package for a specific version , port and board of micropython
732
-
733
- properties:
734
- - toml_path - the path to the `pyproject.toml` file
735
- - package_path - the path to the folder where the package info will be stored ('./publish').
736
- - pkg_version - the version of the package as used on PyPi (semver). Is stored directly in the `pyproject.toml` file
737
- - pyproject - the contents of the `pyproject.toml` file
738
-
739
- methods:
740
- - from_json - load the package from json
741
- - to_json - return the package as json
742
-
743
- - create_update_pyproject_toml - create or update the `pyproject.toml` file
744
- - create_readme - create the readme file
745
- - create_license - create the license file
746
- - copy_stubs - copy the stubs to the package folder
747
- - update_included_stubs - update the included stubs in the `pyproject.toml` file
748
- - create_hash - create a hash of the package files
749
-
750
- - update_package_files - combines clean, copy, and create reeadme & updates
751
- """
752
-
753
- def __init__(
754
- self,
755
- package_name: str,
756
- port: str,
757
- *,
758
- board: str = GENERIC_U,
759
- version: str = "0.0.1",
760
- description: str = "MicroPython stubs",
761
- stubs: Optional[StubSources] = None,
762
- json_data: Optional[Dict[str, Any]] = None,
763
- ):
764
- """
765
- Create a stub-only package for a specific version of micropython
766
- parameters:
767
-
768
- - package_name - the name of the package as used on PyPi
769
- - version - the version of the package as used on PyPi (semver)
770
- - description
771
- - stubs - a list of tuples (name, path) of the stubs to copy
772
- - json_data - Optional: a json databse record that will be used to create the package from.
773
- When `json_data` is provided, the version, description and stubs parameters are ignored
774
-
775
- paths:
776
- ROOT_PATH - the root path of the project ('./')
777
- PUBLISH_PATH - root-relative path to the folder where the package info will be stored ('./publish').
778
- TEMPLATE_PATH - root-relative path to the folder where the template files are stored ('./publish/template').
779
- STUB_PATH - root-relative path to the folder where the stubs are stored ('./stubs').
780
-
781
- """
782
- self.port = port
783
- self.board = board
784
- if json_data is not None:
785
- self.from_dict(json_data)
786
- else:
787
- # store essentials
788
- self.package_name = package_name
789
- self.description = description
790
- self.mpy_version = clean_version(version, drop_v=True) # Initial version
791
-
792
- self.create_update_pyproject_toml()
793
-
794
- self.stub_sources: StubSources = []
795
- # save the stub sources
796
- if stubs:
797
- self.stub_sources = stubs
798
-
799
- self.status: Status = Status(
800
- {
801
- "result": "-",
802
- "name": self.package_name,
803
- "version": self.pkg_version,
804
- "error": None,
805
- "path": self.package_path.as_posix(),
806
- }
807
- )
808
- super().__init__(
809
- package_name=package_name,
810
- mpy_version=self.mpy_version,
811
- port=port,
812
- board=board,
813
- description=description,
814
- stubs=self.stub_sources,
815
- )
816
-
817
- def update_sources(self) -> StubSources:
818
- """
819
- Update the stub sources to:
820
- - FIRMWARE: prefer -merged stubs over bare MCU stubs
821
- - FROZEN: fallback to use the GENERIC folder for the frozen sources if no board specific folder exists
822
- """
823
- updated_sources = []
824
- # TODO: find a way to simplify this code as this is a bit magic (and hard to understand)
825
- for stub_type, fw_path in self.stub_sources:
826
- # prefer -merged stubs over bare MCU stubs
827
- if stub_type == StubSource.FIRMWARE:
828
- # Check if -merged folder exists and use that instead
829
- if fw_path.name.endswith("-merged"):
830
- merged_path = fw_path
831
- else:
832
- merged_path = fw_path.with_name(f"{fw_path.name}-merged")
833
- if (CONFIG.stub_path / merged_path).exists():
834
- updated_sources.append((stub_type, merged_path))
835
- else:
836
- updated_sources.append((stub_type, fw_path))
837
- elif stub_type == StubSource.FROZEN:
838
- # use if folder exists , else use GENERIC folder
839
- if (CONFIG.stub_path / fw_path).exists():
840
- updated_sources.append((stub_type, fw_path))
841
- elif fw_path.with_name("GENERIC").exists():
842
- updated_sources.append((stub_type, fw_path.with_name("GENERIC")))
843
- elif stub_type == StubSource.MERGED:
844
- # Use the default board folder instead of the GENERIC board folder (if it exists)
845
- if self.board.upper() == GENERIC_U:
846
- family = fw_path.name.split("-")[0]
847
- default_path = Path(
848
- f"{family}-{clean_version(self.mpy_version, flat=True)}-{self.port}-{default_board(self.port, self.mpy_version)}-merged"
849
- )
850
- if (CONFIG.stub_path / default_path).exists():
851
- fw_path = default_path
852
- updated_sources.append((stub_type, fw_path))
853
- # ---------
854
- else:
855
- updated_sources.append((stub_type, fw_path))
856
- return updated_sources
857
-
858
- def update_distribution(self, production: bool) -> bool:
859
- """Update the package .pyi files, if all the sources are available"""
860
- log.info(f"- Update {self.package_path.name}")
861
- log.trace(f"{self.package_path.as_posix()}")
862
-
863
- # check if the sources exist
864
- ok = self.are_package_sources_available()
865
- if not ok:
866
- log.debug(
867
- f"{self.package_name}: skipping as one or more source stub folders are missing"
868
- )
869
- self.status["error"] = "Skipped, stub folder(s) missing"
870
- shutil.rmtree(self.package_path.as_posix())
871
- self._publish = False # type: ignore
872
- return False
873
- try:
874
- # update to -merged and fallback to GENERIC
875
- self.stub_sources = self.update_sources()
876
- self.update_package_files()
877
- self.update_pyproject_stubs()
878
- # for a new package the version could be 'latest', which is not a valid semver, so update
879
- self.pkg_version = self.next_package_version(production)
880
- return self.check()
881
- except Exception as e: # pragma: no cover
882
- log.error(f"{self.package_name}: {e}")
883
- self.status["error"] = str(e)
884
- return False
885
-
886
- def build_distribution(
887
- self,
888
- production: bool, # PyPI or Test-PyPi - USED TO FIND THE NEXT VERSION NUMBER
889
- force=False, # BUILD even if no changes
890
- ) -> (
891
- bool
892
- ): # sourcery skip: default-mutable-arg, extract-duplicate-method, require-parameter-annotation
893
- """
894
- Build a package
895
- look up the previous package version in the dabase
896
- - update package files
897
- - build the wheels and sdist
898
-
899
- :param production: PyPI or Test-PyPi -
900
- :param force: BUILD even if no changes
901
- :return: True if the package was built
902
- """
903
- log.info(f"Build: {self.package_path.name}")
904
-
905
- ok = self.update_distribution(production)
906
- self.status["version"] = self.pkg_version
907
- if not ok:
908
- log.info(f"{self.package_name}: skip - Could not build/update package")
909
- if not self.status["error"]:
910
- self.status["error"] = "Could not build/update package"
911
- return False
912
-
913
- # If there are changes to the package, then publish it
914
- if self.is_changed() or force:
915
- if force:
916
- log.info(f"Force build: {self.package_name} {self.pkg_version} ")
917
- else:
918
- log.info(
919
- f"Found changes to package sources: {self.package_name} {self.pkg_version} "
920
- )
921
- log.trace(f"Old hash {self.hash} != New hash {self.calculate_hash()}")
922
- # Build the distribution files
923
- old_ver = self.pkg_version
924
- self.pkg_version = self.next_package_version(production)
925
- self.status["version"] = self.pkg_version
926
- # to get the next version
927
- log.debug(
928
- f"{self.package_name}: bump version for {old_ver} to {self.pkg_version } {'production' if production else 'test'}"
929
- )
930
- self.write_package_json()
931
- log.trace(f"New hash: {self.package_name} {self.pkg_version} {self.hash}")
932
- if self.poetry_build():
933
- self.status["result"] = "Build OK"
934
- else:
935
- log.warning(f"{self.package_name}: skipping as build failed")
936
- self.status["error"] = "Poetry build failed"
937
- return False
938
- return True
939
-
940
- def publish_distribution_ifchanged(
941
- self,
942
- db: PysonDB,
943
- *,
944
- production: bool, # PyPI or Test-PyPi
945
- build=False, #
946
- force=False, # publish even if no changes
947
- dry_run=False, # do not actually publish
948
- clean: bool = False, # clean up afterwards
949
- ) -> (
950
- bool
951
- ): # sourcery skip: assign-if-exp, default-mutable-arg, extract-method, remove-unnecessary-else, require-parameter-annotation, swap-if-else-branches, swap-if-expression
952
- """
953
- Publish a package to PyPi
954
- look up the previous package version in the dabase, and only publish if there are changes to the package
955
- - change determied by hash across all files
956
-
957
- Build
958
- - update package files
959
- - build the wheels and sdist
960
- Publish
961
- - publish to PyPi
962
- - update database with new hash
963
- """
964
- log.info(f"Publish: {self.package_path.name}")
965
- # count .pyi files in the package
966
- filecount = len(list(self.package_path.rglob("*.pyi")))
967
- if filecount == 0:
968
- log.debug(f"{self.package_name}: starting build as no .pyi files found")
969
- build = True
970
-
971
- if build or force or self.is_changed():
972
- self.build_distribution(production=production, force=force)
973
-
974
- if not self._publish:
975
- log.debug(f"{self.package_name}: skip publishing")
976
- return False
977
-
978
- self.next_package_version(production=production)
979
- # Publish the package to PyPi, Test-PyPi or Github
980
- if self.is_changed():
981
- if self.mpy_version in SET_PREVIEW and production and not force:
982
- log.warning(
983
- "version: `latest` package will only be available on Github, and not published to PyPi."
984
- )
985
- self.status["result"] = "Published to GitHub"
986
- else:
987
- return self.publish_distribution(dry_run, production, db)
988
- elif force:
989
- return self.publish_distribution(dry_run, production, db)
990
- else:
991
- log.info(f"No changes to package : {self.package_name} {self.pkg_version}")
992
-
993
- if clean:
994
- self.clean()
995
- return True
996
-
997
- def publish_distribution(self, dry_run, production, db):
998
- """
999
- Publishes the package to PyPi or Test-PyPi.
1000
-
1001
- Args:
1002
- dry_run (bool): If True, performs a dry run without actually publishing.
1003
- production (bool): If True, publishes to PyPi. If False, publishes to Test-PyPi.
1004
- db: The database object to save the package state.
1005
-
1006
- Returns:
1007
- bool: True if the publish was successful, False otherwise.
1008
- """
1009
- self.update_hashes() # resets is_changed to False
1010
- if not dry_run:
1011
- pub_ok = self.poetry_publish(production=production)
1012
- else:
1013
- log.warning(
1014
- f"{self.package_name}: Dry run, not publishing to {'' if production else 'Test-'}PyPi"
1015
- )
1016
- pub_ok = True
1017
- if not pub_ok:
1018
- log.warning(f"{self.package_name}: Publish failed for {self.pkg_version}")
1019
- self.status["error"] = "Publish failed"
1020
- return False
1021
- self.status["result"] = "Published to PyPi" if production else "Published to Test-PyPi"
1022
- self.update_hashes()
1023
- if dry_run:
1024
- log.warning(f"{self.package_name}: Dry run, not saving to database")
1025
- else:
1026
- # get the package state and add it to the database
1027
- db.add(self.to_dict())
1028
- db.commit()
1029
- return True
1030
-
1031
- def are_package_sources_available(self) -> bool:
1032
- """
1033
- Check if (all) the packages sources exist.
1034
- """
1035
- ok = True
1036
- for stub_type, src_path in self.update_sources():
1037
- if (CONFIG.stub_path / src_path).exists():
1038
- continue
1039
- if stub_type == StubSource.FROZEN:
1040
- # not a blocking issue if there are no frozen stubs, perhaps this port/board does not have any
1041
- continue
1042
- # todo: below is a workaround for different types, but where is the source of this difference coming from?
1043
- msg = (
1044
- f"{self.package_name}: source '{stub_type.value}' not found: {CONFIG.stub_path / src_path}"
1045
- if isinstance(stub_type, StubSource) # type: ignore
1046
- else f"{self.package_name}: source '{stub_type}' not found: {CONFIG.stub_path / src_path}"
1047
- )
1048
- self.status["error"] = msg
1049
- log.debug(msg)
1050
- ok = False
1051
- return ok
1
+ """Create a stub-only package for a specific version of micropython"""
2
+
3
+ import hashlib
4
+ import json
5
+ import shutil
6
+ import subprocess
7
+ from pathlib import Path
8
+ import sys
9
+ from typing import Any, Dict, List, Optional, Tuple, Union
10
+
11
+ import tenacity
12
+
13
+ from mpflash.basicgit import get_git_describe
14
+ from stubber.publish.helpers import get_module_docstring
15
+
16
+ if sys.version_info >= (3, 11):
17
+ import tomllib # type: ignore
18
+ else:
19
+ import tomli as tomllib # type: ignore
20
+
21
+ from typing import NewType
22
+
23
+ import tomli_w
24
+ from mpflash.logger import log
25
+ from packaging.version import Version, parse
26
+ from pysondb import PysonDB
27
+
28
+ from mpflash.versions import SET_PREVIEW, V_PREVIEW, clean_version
29
+ from stubber.publish.bump import bump_version
30
+ from stubber.publish.defaults import GENERIC_U, default_board
31
+ from stubber.publish.enums import StubSource
32
+ from stubber.publish.pypi import Version, get_pypi_versions
33
+ from stubber.utils.config import CONFIG
34
+
35
+ Status = NewType("Status", Dict[str, Union[str, None]])
36
+ StubSources = List[Tuple[StubSource, Path]]
37
+
38
+ # indicates which stubs will be skipped when copying for these stub sources
39
+ STUBS_COPY_FILTER = {
40
+ StubSource.FROZEN: [
41
+ "espnow", # merged stubs + documentation of the espnow module is better than the info in the forzen stubs
42
+ ],
43
+ StubSource.FIRMWARE: [
44
+ "builtins",
45
+ "collections", # collections must be in stdlib
46
+ ],
47
+ StubSource.MERGED: [
48
+ "collections", # collections must be in stdlib
49
+ ],
50
+ }
51
+
52
+ # these modules will be replaced by a simple import statement to import from stdlib
53
+ STDLIB_UMODULES = ["ucollections"]
54
+
55
+
56
+ class VersionedPackage(object):
57
+ """
58
+ Represents a versioned package.
59
+
60
+ Attributes:
61
+ package_name (str): The name of the package.
62
+ mpy_version (str): The MicroPython version.
63
+
64
+ Methods:
65
+ __init__(self, package_name: str, mpy_version: str): Initializes a new instance of the VersionedPackage class.
66
+ is_preview(self): Checks if the package is a preview version.
67
+ pkg_version(self) -> str: Returns the version of the package.
68
+ pkg_version(self, version: str) -> None: Sets the version of the package.
69
+ get_prerelease_package_version(self, production: bool = False) -> str: Gets the next prerelease version for the package.
70
+ get_next_package_version(self, prod: bool = False, rc=False) -> str: Gets the next version for the package.
71
+ next_pkg_version(self, production: bool) -> str: Gets the next version for the package.
72
+ bump(self, *, rc: int = 0) -> str: Bumps the postrelease version of the package.
73
+ """
74
+
75
+ def __init__(self, package_name: str, *, mpy_version: str):
76
+ super().__init__()
77
+ self.package_name: str = package_name
78
+ self.mpy_version: str = mpy_version
79
+ self._pkg_version: str = mpy_version
80
+
81
+ def __str__(self) -> str:
82
+ return f"{self.package_name}=={self.mpy_version}"
83
+
84
+ def __repr__(self) -> str:
85
+ return f"{self.package_name}=={self.mpy_version}"
86
+
87
+ def __eq__(self, o: object) -> bool:
88
+ return str(self) == str(o)
89
+
90
+ def __hash__(self) -> int:
91
+ return hash(str(self))
92
+
93
+ @property
94
+ def pkg_version(self) -> str:
95
+ "return the version of the package"
96
+ return self._pkg_version
97
+
98
+ @pkg_version.setter
99
+ def pkg_version(self, version: str) -> None:
100
+ "set the version of the package"
101
+ self._pkg_version = version
102
+
103
+ def next_package_version(self, production: bool) -> str:
104
+ # sourcery skip: assign-if-exp
105
+ """Get the next version for the package"""
106
+ if self.is_preview():
107
+ return self._get_next_preview_package_version(production)
108
+ else:
109
+ return self._get_next_package_version(production)
110
+
111
+ def is_preview(self):
112
+ return self.mpy_version in SET_PREVIEW or V_PREVIEW in self.mpy_version
113
+
114
+ def _get_next_preview_package_version(self, production: bool = False) -> str:
115
+ """
116
+ Get the next prerelease version for the package.
117
+ this is used for preview versions of micropython (-preview, formerly known as 'latest')
118
+ """
119
+ rc = 1
120
+ if not (describe := get_git_describe(CONFIG.mpy_path.as_posix())):
121
+ return "99.99.99post99"
122
+ # use versiontag and the number of commits since the last tag
123
+ # "v1.19.1-841-g3446"
124
+ # 'v1.20.0-dirty'
125
+ # 'v1.22.0-preview-19-g8eb7721b4'
126
+ parts = describe.split("-", 3)
127
+ ver = parts[0]
128
+ if len(parts) > 1:
129
+ rc = (
130
+ parts[1]
131
+ if parts[1].isdigit()
132
+ else parts[2] if len(parts) > 2 and parts[2].isdigit() else 1
133
+ )
134
+ rc = int(rc)
135
+ base = (
136
+ bump_version(Version(ver), minor_bump=True) if parts[1] != V_PREVIEW else Version(ver)
137
+ )
138
+ return str(bump_version(base, rc=rc))
139
+ # raise ValueError("cannot determine next version number micropython")
140
+
141
+ def _get_next_package_version(self, prod: bool = False, rc=False) -> str:
142
+ """Get the next version for the package."""
143
+ base = Version(self.pkg_version)
144
+ if pypi_versions := get_pypi_versions(self.package_name, production=prod, base=base):
145
+ # get the latest version from pypi
146
+ self.pkg_version = str(pypi_versions[-1])
147
+ else:
148
+ # no published package found , so we start at base version then bump 1 post release
149
+ self.pkg_version = Version(self.pkg_version).base_version
150
+ return self.bump()
151
+
152
+ def bump(self, *, rc: int = 0) -> str:
153
+ """
154
+ bump the postrelease version of the package, and write the change to disk
155
+ if rc >= 1, the version is bumped to the specified release candidate
156
+ """
157
+ try:
158
+ current = Version(self.pkg_version)
159
+ assert isinstance(current, Version)
160
+ # bump the version
161
+ self.pkg_version = str(bump_version(post_bump=True, current=current, rc=rc))
162
+ except Exception as e: # pragma: no cover
163
+ log.error(f"Error: {e}")
164
+ return self.pkg_version
165
+
166
+
167
+ class Builder(VersionedPackage):
168
+ """
169
+ Builder class for creating and updating MicroPython stub packages.
170
+
171
+ Args:
172
+ package_name (str): The name of the package.
173
+ mpy_version (str, optional): The version of MicroPython. Defaults to "0.0.1".
174
+ port (str): The port for the package.
175
+ board (str, optional): The board for the package. Defaults to GENERIC_U.
176
+ description (str, optional): The description of the package. Defaults to "MicroPython stubs".
177
+ stubs (Optional[StubSources], optional): The stub sources for the package. Defaults to None.
178
+
179
+ Attributes:
180
+ package_name (str): The name of the package.
181
+ mpy_version (str): The version of MicroPython.
182
+ port (str): The port for the package.
183
+ board (str): The board for the package.
184
+ description (str): The description of the package.
185
+ stub_sources (Optional[StubSources]): The stub sources for the package.
186
+ hash (None): The hash of all the files in the package.
187
+ stub_hash (None): The hash of the stub files.
188
+
189
+ Properties:
190
+ package_path (Path): The package path based on the package name and version, relative to the publish folder.
191
+ toml_path (Path): The path to the `pyproject.toml` file.
192
+ pyproject (Union[Dict[str, Any], None]): The parsed pyproject.toml or None.
193
+
194
+ Methods:
195
+ create_update_pyproject_toml(): Create or update/overwrite a `pyproject.toml` file.
196
+ check(): Check if the package is valid.
197
+ clean(): Remove the stub files from the package folder.
198
+ copy_stubs(): Copy files from all listed stub folders to the package folder.
199
+ update_package_files(): Update the stub-only package for a specific version of MicroPython.
200
+ write_package_json(): Write the package.json file to disk.
201
+ to_dict(): Return the package as a dict to store in the jsondb.
202
+ from_dict(json_data: Dict): Load the package from a dict (from the jsondb).
203
+ calculate_hash(include_md: bool = True): Create a SHA1 hash of all files in the package.
204
+ update_hashes(): Update the package hashes.
205
+ is_changed(include_md: bool = True): Check if the package has changed.
206
+ """
207
+
208
+ # BUF_SIZE is totally arbitrary,
209
+ BUF_SIZE = 65536 * 16 # lets read stuff in 16 x 64kb chunks!
210
+
211
+ def __init__(
212
+ self,
213
+ package_name: str,
214
+ *,
215
+ mpy_version: str = "0.0.1",
216
+ port: str,
217
+ board: str = GENERIC_U,
218
+ description: str = "MicroPython stubs",
219
+ stubs: Optional[StubSources] = None,
220
+ # json_data: Optional[Dict[str, Any]] = None,
221
+ ): # port: str, board: str
222
+ super().__init__(package_name=package_name, mpy_version=mpy_version)
223
+ self._publish = True # intended for publishing
224
+ self.package_name = package_name
225
+ self.mpy_version = mpy_version
226
+ self.port = port
227
+ self.board = board
228
+ self.description = description
229
+ self.stub_sources = stubs or []
230
+ self.hash = None # intial hash
231
+ """Hash of all the files in the package"""
232
+ self.stub_hash = None # intial hash
233
+ """Hash of all .pyi files"""
234
+
235
+ @property
236
+ def package_path(self) -> Path:
237
+ "package path based on the package name and version and relative to the publish folder"
238
+ parts = self.package_name.split("-")
239
+ parts[1:1] = [clean_version(self.mpy_version, flat=True)]
240
+ return CONFIG.publish_path / "-".join(parts)
241
+
242
+ @property
243
+ def toml_path(self) -> Path:
244
+ "the path to the `pyproject.toml` file"
245
+ # todo: make sure this is always relative to the root path
246
+ return self.package_path / "pyproject.toml"
247
+
248
+ # -----------------------------------------------
249
+ @property
250
+ def pyproject(self) -> Union[Dict[str, Any], None]:
251
+ "parsed pyproject.toml or None"
252
+ pyproject = None
253
+ _toml = self.toml_path
254
+ if (_toml).exists():
255
+ log.info(f"Load pyproject from {_toml}")
256
+ try:
257
+ with open(_toml, "rb") as f:
258
+ pyproject = tomllib.load(f)
259
+ except tomllib.TOMLDecodeError as e:
260
+ log.error(f"Could not load pyproject.toml file {e}")
261
+ return pyproject
262
+
263
+ @pyproject.setter
264
+ def pyproject(self, pyproject: Dict) -> None:
265
+ # check if the result is a valid toml file
266
+ try:
267
+ tomllib.loads(tomli_w.dumps(pyproject))
268
+ except tomllib.TOMLDecodeError as e:
269
+ print("Could not create a valid TOML file")
270
+ raise (e)
271
+ # make sure parent folder exists
272
+ _toml = self.toml_path
273
+ (_toml).parent.mkdir(parents=True, exist_ok=True)
274
+ with open(_toml, "wb") as output:
275
+ tomli_w.dump(pyproject, output)
276
+
277
+ # -----------------------------------------------
278
+ def create_update_pyproject_toml(self) -> None:
279
+ """
280
+ create or update/overwrite a `pyproject.toml` file by combining a template file
281
+ with the given parameters.
282
+ """
283
+ raise NotImplementedError("create_update_pyproject_toml not implemented")
284
+
285
+ # -----------------------------------------------
286
+
287
+ def check(self) -> bool:
288
+ """Check if the package is valid, to be implemented by the subclass"""
289
+ return True
290
+
291
+ def clean(self) -> None:
292
+ """
293
+ Remove the stub files from the package folder
294
+
295
+ This is used before update the stub package, to avoid lingering stub files,
296
+ and after the package has been built, to avoid needing to store files multiple times.
297
+
298
+ `.gitignore` cannot be used as this will prevent poetry from processing the files.
299
+ """
300
+ # remove all *.py and *.pyi files in the folder
301
+ for wc in ["*.py", "*.pyi", "modules.json"]:
302
+ for f in (self.package_path).rglob(wc):
303
+ f.unlink()
304
+
305
+ def copy_stubs(self) -> None:
306
+ """
307
+ Copy files from all listed stub folders to the package folder
308
+ the order of the stub folders is relevant as "last copy wins"
309
+
310
+ - 1 - Copy all MCU stubs/merged to the package folder
311
+ - 2 - copy the remaining stubs to the package folder
312
+ - 3 - remove *.py files from the package folder
313
+ """
314
+ try:
315
+ # Check if all stub source folders exist
316
+ for stub_type, src_path in self.stub_sources:
317
+ if not (CONFIG.stub_path / src_path).exists():
318
+ raise FileNotFoundError(
319
+ f"Could not find stub source folder {CONFIG.stub_path / src_path}"
320
+ )
321
+
322
+ # 1 - Copy the stubs to the package, directly in the package folder (no folders)
323
+ # for stub_type, fw_path in [s for s in self.stub_sources]:
324
+ for n in range(len(self.stub_sources)):
325
+ stub_type, src_path = self.stub_sources[n]
326
+ try:
327
+ log.debug(f"Copying {stub_type} from {src_path}")
328
+ self.copy_folder(stub_type, src_path)
329
+ except OSError as e:
330
+ if stub_type != StubSource.FROZEN:
331
+ raise FileNotFoundError(
332
+ f"Could not find stub source folder {src_path}"
333
+ ) from e
334
+ else:
335
+ log.debug(f"Error copying stubs from : {CONFIG.stub_path / src_path}, {e}")
336
+ finally:
337
+ # 3 - clean up a little bit
338
+ # delete all the .py files in the package folder if there is a corresponding .pyi file
339
+ for f in self.package_path.rglob("*.py"):
340
+ if f.with_suffix(".pyi").exists():
341
+ f.unlink()
342
+ self.update_umodules()
343
+
344
+ def update_umodules(self):
345
+ """
346
+ Replace the STDLIB umodules with a simple import statement
347
+ in order to allow the typecheckers to resove the stdlib modules in the usual stdlib location.
348
+ """
349
+ for f in self.package_path.rglob("*.pyi"):
350
+ if f.stem in STDLIB_UMODULES:
351
+ # read the docstring of the module
352
+ docstring = get_module_docstring(f) or ""
353
+ comment = "# import module from stdlib/module"
354
+ # replace the file with a simple import statement
355
+ f.write_text(f'"""\n{docstring}\n"""\n{comment}\nfrom {f.stem[1:]} import *')
356
+
357
+ def copy_folder(self, stub_type: StubSource, src_path: Path):
358
+ Path(self.package_path).mkdir(parents=True, exist_ok=True)
359
+ for item in (CONFIG.stub_path / src_path).rglob("*"):
360
+ if item.is_file():
361
+ # filter the 'poorly' decorated files
362
+ if stub_type in STUBS_COPY_FILTER and item.stem in STUBS_COPY_FILTER[stub_type]:
363
+ continue
364
+
365
+ target = Path(self.package_path) / item.relative_to(CONFIG.stub_path / src_path)
366
+ target.parent.mkdir(parents=True, exist_ok=True)
367
+ target.write_bytes(item.read_bytes())
368
+
369
+ def update_package_files(self) -> None:
370
+ """
371
+ Update the stub-only package for a specific version of micropython
372
+ - cleans the package folder
373
+ - copies the stubs from the list of stubs.
374
+ - creates/updates the readme and the license file
375
+ """
376
+ # create the package folder
377
+ self.package_path.mkdir(parents=True, exist_ok=True)
378
+ self.clean() # Delete any previous *.py? files
379
+ self.copy_stubs()
380
+ self.create_readme()
381
+ self.create_license()
382
+
383
+ def write_package_json(self) -> None:
384
+ """write the package.json file to disk"""
385
+ # make sure folder exists
386
+ if not self.package_path.exists():
387
+ self.package_path.mkdir(parents=True, exist_ok=True)
388
+ # write the json to a file
389
+ with open(self.package_path / "package.json", "w") as f:
390
+ json.dump(self.to_dict(), f, indent=4)
391
+
392
+ def to_dict(self) -> dict:
393
+ """return the package as a dict to store in the jsondb
394
+
395
+ need to simplify some of the Objects to allow serialization to json
396
+ - the paths to posix paths
397
+ - the version (semver) to a string
398
+ - toml file to list of lines
399
+
400
+ """
401
+ return {
402
+ "name": self.package_name,
403
+ "mpy_version": self.mpy_version,
404
+ "publish": self._publish,
405
+ "pkg_version": str(self.pkg_version),
406
+ "path": self.package_path.name, # only store the folder name , as it is relative to the publish folder
407
+ "stub_sources": [(name, Path(path).as_posix()) for (name, path) in self.stub_sources],
408
+ "description": self.description,
409
+ "hash": self.hash,
410
+ "stub_hash": self.stub_hash,
411
+ }
412
+
413
+ def from_dict(self, json_data: Dict) -> None:
414
+ """load the package from a dict (from the jsondb)"""
415
+ self.package_name = json_data["name"]
416
+ # self.package_path = Path(json_data["path"])
417
+ self.description = json_data["description"]
418
+ self.mpy_version = json_data["mpy_version"]
419
+ self._publish = json_data["publish"]
420
+ self.hash = json_data["hash"]
421
+ self.stub_hash = json_data["stub_hash"]
422
+ # create folder
423
+ if not self.package_path.exists():
424
+ self.package_path.mkdir(parents=True, exist_ok=True)
425
+ # create the pyproject.toml file
426
+ self.create_update_pyproject_toml()
427
+ # set pkg version after creating the toml file
428
+ self.pkg_version = json_data["pkg_version"]
429
+ self.stub_sources = []
430
+ for name, path in json_data["stub_sources"]:
431
+ if path.startswith("stubs/"):
432
+ path = path.replace("stubs/", "")
433
+ self.stub_sources.append((name, Path(path)))
434
+
435
+ def calculate_hash(self, include_md: bool = True) -> str:
436
+ # sourcery skip: reintroduce-else, swap-if-else-branches, use-named-expression
437
+ """
438
+ Create a SHA1 hash of all files in the package, excluding the pyproject.toml file itself.
439
+ the hash is based on the content of the .py/.pyi and .md files in the package.
440
+ if include_md is False , the .md files are not hased, allowing the files in the packeges to be compared simply
441
+ As a single hash is created across all files, the files are sorted prior to hashing to ensure that the hash is stable.
442
+
443
+ Note: A changed hash will not indicate which of the files in the package have been changed.
444
+ """
445
+ file_hash = hashlib.sha1()
446
+ # Stubs Only
447
+ files = list((self.package_path).rglob("**/*.pyi"))
448
+ if include_md:
449
+ files += (
450
+ [self.package_path / "LICENSE.md"]
451
+ + [self.package_path / "README.md"]
452
+ # do not include [self.toml_file]
453
+ )
454
+ for file in sorted(files):
455
+ try:
456
+ # retry on file not found
457
+ self.add_file_hash(file, file_hash)
458
+ except FileNotFoundError:
459
+ log.warning(f"File not found {file}")
460
+ # ignore file not found errors to allow the hash to be created WHILE GIT / VIRUS SCANNERS HOLD LINGERING FILES
461
+ return file_hash.hexdigest()
462
+
463
+ @tenacity.retry(wait=tenacity.wait_fixed(0.2), stop=tenacity.stop_after_attempt(3))
464
+ def add_file_hash(self, file, file_hash):
465
+ """
466
+ Adds the hash of a file to the given file hash object.
467
+ If an error occurs, the file is retried up to 3 times with a 0.2 second delay
468
+
469
+ Args:
470
+ file (str): The path to the file.
471
+ file_hash (hashlib._Hash): The file hash object to update.
472
+
473
+ Returns:
474
+ None
475
+ """
476
+ with open(file, "rb") as f:
477
+ while True:
478
+ if data := f.read(Builder.BUF_SIZE):
479
+ file_hash.update(data)
480
+ else:
481
+ break
482
+
483
+ def update_hashes(self, ret=False) -> None:
484
+ """Update the package hashes. Resets is_changed() to False"""
485
+ self.hash = self.calculate_hash()
486
+ self.stub_hash = self.calculate_hash(include_md=False)
487
+
488
+ def is_changed(self, include_md: bool = True) -> bool:
489
+ """Check if the package has changed, based on the current and the stored hash.
490
+ The default checks the hash of all files, including the .md files.
491
+ """
492
+ current = self.calculate_hash(include_md=include_md)
493
+ stored = self.hash if include_md else self.stub_hash
494
+ log.trace(f"changed = {self.hash != current} | Stored: {stored} | Current: {current}")
495
+ return stored != current
496
+
497
+ def create_license(self) -> None:
498
+ """
499
+ Create a license file for the package
500
+ - copied from the template license file
501
+ """
502
+ # copy the license file from the template to the package folder
503
+ # option : append other license files
504
+ shutil.copy(CONFIG.template_path / "LICENSE.md", self.package_path)
505
+
506
+ def create_readme(self) -> None:
507
+ """
508
+ Create a readme file for the package
509
+ - based on the template readme file
510
+ - with a list of all included stub folders added to it (not the individual stub-files)
511
+ """
512
+ # read the readme file and update the version and description
513
+ with open(CONFIG.template_path / "README.md", "r") as f:
514
+ TEMPLATE_README = f.read()
515
+
516
+ # add a readme with the names of the stub-folders
517
+
518
+ # read informations from firmware_stubs.json
519
+ firmware_stubs = {}
520
+ doc_stubs = {}
521
+ core_stubs = {}
522
+ try:
523
+ with open(self.package_path / "firmware_stubs.json", "r") as f:
524
+ firmware_stubs = json.load(f)
525
+ with open(self.package_path / "doc_stubs.json", "r") as f:
526
+ doc_stubs = json.load(f)
527
+ with open(self.package_path / "modules.json", "r") as f:
528
+ core_stubs = json.load(f)
529
+ except FileNotFoundError:
530
+ pass
531
+
532
+ # Prettify this by merging with template text
533
+ with open(self.package_path / "README.md", "w") as f:
534
+ f.write(f"# {self.package_name}\n\n")
535
+ f.write(TEMPLATE_README)
536
+ f.write(f"Included stubs:\n")
537
+ for name, folder in self.stub_sources:
538
+ f.write(f"* {name} from `stubs/{Path(folder).as_posix()}`\n")
539
+
540
+ f.write(f"\n\n")
541
+ f.write(f"origin | Family | Port | Board | Version\n")
542
+ f.write(f"-------|--------|------|-------|--------\n")
543
+ try:
544
+ f.write(
545
+ f"Firmware | {firmware_stubs['firmware']['family']} | {firmware_stubs['firmware']['port']} | {firmware_stubs['firmware']['machine']} | {clean_version(firmware_stubs['firmware']['version'])} \n"
546
+ )
547
+ except Exception:
548
+ pass
549
+ try:
550
+ f.write(
551
+ f"Documentation | {doc_stubs['firmware']['family']} | {doc_stubs['firmware']['port']} | - | {clean_version(doc_stubs['firmware']['version'])} \n"
552
+ )
553
+ except Exception:
554
+ pass
555
+ try:
556
+ f.write(
557
+ f"Core | {core_stubs['firmware']['family']} | {core_stubs['firmware']['port']} | - | {clean_version(core_stubs['firmware']['version'])} \n"
558
+ )
559
+ except Exception:
560
+ pass
561
+
562
+
563
+ class PoetryBuilder(Builder):
564
+ """
565
+ Build a package using Poetry
566
+ """
567
+
568
+ def __init__(
569
+ self,
570
+ package_name: str,
571
+ *,
572
+ port: str,
573
+ mpy_version: str = "0.0.1",
574
+ board: str = GENERIC_U,
575
+ description: str = "MicroPython stubs",
576
+ stubs: Optional[StubSources] = None,
577
+ json_data: Optional[Dict[str, Any]] = None,
578
+ ):
579
+ super().__init__(
580
+ package_name=package_name,
581
+ mpy_version=mpy_version,
582
+ port=port,
583
+ board=board,
584
+ description=description,
585
+ stubs=stubs,
586
+ )
587
+
588
+ # -----------------------------------------------
589
+ # get and set the version of the package directly from the toml file
590
+ @property
591
+ def pkg_version(self) -> str:
592
+ "return the version of the package"
593
+ # read the version from the toml file
594
+ _toml = self.toml_path
595
+ if not _toml.exists():
596
+ return self.mpy_version
597
+ with open(_toml, "rb") as f:
598
+ pyproject = tomllib.load(f)
599
+ ver = pyproject["tool"]["poetry"]["version"]
600
+ return str(parse(ver)) if ver not in SET_PREVIEW else ver
601
+
602
+ @pkg_version.setter
603
+ def pkg_version(self, version: str) -> None:
604
+ # sourcery skip: remove-unnecessary-cast
605
+ "set the version of the package"
606
+ if not isinstance(version, str): # type: ignore
607
+ version = str(version)
608
+ # read the current file
609
+ _toml = self.toml_path
610
+ try:
611
+ with open(_toml, "rb") as f:
612
+ pyproject = tomllib.load(f)
613
+ pyproject["tool"]["poetry"]["version"] = version
614
+ # update the version in the toml file
615
+ with open(_toml, "wb") as output:
616
+ tomli_w.dump(pyproject, output)
617
+ except FileNotFoundError as e:
618
+ raise FileNotFoundError(f"pyproject.toml file not found at {_toml}") from e
619
+
620
+ # -----------------------------------------------
621
+
622
+ def poetry_build(self) -> bool:
623
+ """build the package by running `poetry build`"""
624
+ return self.run_poetry(["build", "-vvv"])
625
+
626
+ def poetry_publish(self, production: bool = False) -> bool:
627
+ if not self._publish:
628
+ log.warning(f"Publishing is disabled for {self.package_name}")
629
+ return False
630
+ # update the package info
631
+ self.write_package_json()
632
+ if production:
633
+ log.debug("Publishing to PRODUCTION https://pypy.org")
634
+ params = ["publish"]
635
+ else:
636
+ log.debug("Publishing to TEST-PyPi https://test.pypy.org")
637
+ params = ["publish", "-r", "test-pypi"]
638
+ r = self.run_poetry(params)
639
+ print("") # add a newline after the output
640
+ return r
641
+
642
+ def run_poetry(self, parameters: List[str]) -> bool:
643
+ """Run a poetry commandline in the package folder.
644
+ Note: this may write some output to the console ('All set!')
645
+ """
646
+ # check for pyproject.toml in folder
647
+ if not (self.package_path / "pyproject.toml").exists(): # pragma: no cover
648
+ log.error(f"No pyproject.toml file found in {self.package_path}")
649
+ return False
650
+ # todo: call poetry directly to improve error handling
651
+ try:
652
+ log.debug(f"poetry {parameters} starting")
653
+ subprocess.run(
654
+ ["poetry"] + parameters,
655
+ cwd=self.package_path,
656
+ check=True,
657
+ # stdout=subprocess.PIPE,
658
+ stdout=subprocess.PIPE, # interestingly: errors on stdout , output on stderr .....
659
+ universal_newlines=True,
660
+ encoding="utf-8",
661
+ )
662
+ log.trace(f"poetry {parameters} completed")
663
+ except (NotADirectoryError, FileNotFoundError) as e: # pragma: no cover # InvalidVersion
664
+ log.error("Exception on process, {}".format(e))
665
+ return False
666
+ except subprocess.CalledProcessError as e: # pragma: no cover
667
+ # Detect and log error detection om upload
668
+ # UploadError
669
+ # HTTP Error 400: File already exists. See https://test.pypi.org/help/#file-name-reuse for more information.
670
+ # TODO: how to return the state so it can be handled
671
+ print() # linefeed after output
672
+ errors = [l for l in e.stdout.splitlines()[1:7] if "Error" in l]
673
+ for e in errors:
674
+ log.error(e)
675
+
676
+ # log.error("Exception on process, {}".format(e))
677
+ return False
678
+ return True
679
+
680
+ def check(self) -> bool:
681
+ """check if the package is valid by running `poetry check`
682
+ Note: this will write some output to the console ('All set!')
683
+ """
684
+ return self.run_poetry(["check", "-vvv"])
685
+
686
+ def create_update_pyproject_toml(self) -> None:
687
+ """
688
+ create or update/overwrite a `pyproject.toml` file by combining a template file
689
+ with the given parameters.
690
+ and updating it with the pyi files included
691
+ """
692
+ if (self.toml_path).exists():
693
+ # do not overwrite the version of a pre-existing file
694
+ _pyproject = self.pyproject
695
+ assert _pyproject is not None
696
+ # clear out the packages section
697
+ _pyproject["tool"]["poetry"]["packages"] = []
698
+ # update the dependencies section by reading these from the template file
699
+ with open(CONFIG.template_path / "pyproject.toml", "rb") as f:
700
+ tpl = tomllib.load(f)
701
+ _pyproject["tool"]["poetry"]["dependencies"] = tpl["tool"]["poetry"]["dependencies"]
702
+
703
+ else:
704
+ # read the template pyproject.toml file from the template folder
705
+ try:
706
+ with open(CONFIG.template_path / "pyproject.toml", "rb") as f:
707
+ _pyproject = tomllib.load(f)
708
+ # note: can be 'latest' which is not semver
709
+ _pyproject["tool"]["poetry"]["version"] = self.mpy_version
710
+ except FileNotFoundError as e:
711
+ log.error(f"Could not find template pyproject.toml file {e}")
712
+ raise (e)
713
+
714
+ # update the name , version and description of the package
715
+ _pyproject["tool"]["poetry"]["name"] = self.package_name
716
+ _pyproject["tool"]["poetry"]["description"] = self.description
717
+ # write out the pyproject.toml file
718
+ self.pyproject = _pyproject
719
+
720
+ def update_pyproject_stubs(self) -> int:
721
+ "Add the stub files to the pyproject.toml file"
722
+ _pyproject = self.pyproject
723
+ assert _pyproject is not None, "No pyproject.toml file found"
724
+ _pyproject["tool"]["poetry"]["packages"] = [
725
+ {"include": p.relative_to(self.package_path).as_posix()}
726
+ for p in sorted((self.package_path).rglob("*.pyi"))
727
+ ]
728
+ # write out the pyproject.toml file
729
+ self.pyproject = _pyproject
730
+ return len(_pyproject["tool"]["poetry"]["packages"])
731
+
732
+
733
+ class StubPackage(PoetryBuilder):
734
+ """
735
+ Create a stub-only package for a specific version , port and board of micropython
736
+
737
+ properties:
738
+ - toml_path - the path to the `pyproject.toml` file
739
+ - package_path - the path to the folder where the package info will be stored ('./publish').
740
+ - pkg_version - the version of the package as used on PyPi (semver). Is stored directly in the `pyproject.toml` file
741
+ - pyproject - the contents of the `pyproject.toml` file
742
+
743
+ methods:
744
+ - from_json - load the package from json
745
+ - to_json - return the package as json
746
+
747
+ - create_update_pyproject_toml - create or update the `pyproject.toml` file
748
+ - create_readme - create the readme file
749
+ - create_license - create the license file
750
+ - copy_stubs - copy the stubs to the package folder
751
+ - update_included_stubs - update the included stubs in the `pyproject.toml` file
752
+ - create_hash - create a hash of the package files
753
+
754
+ - update_package_files - combines clean, copy, and create reeadme & updates
755
+ """
756
+
757
+ def __init__(
758
+ self,
759
+ package_name: str,
760
+ port: str,
761
+ *,
762
+ board: str = GENERIC_U,
763
+ version: str = "0.0.1",
764
+ description: str = "MicroPython stubs",
765
+ stubs: Optional[StubSources] = None,
766
+ json_data: Optional[Dict[str, Any]] = None,
767
+ ):
768
+ """
769
+ Create a stub-only package for a specific version of micropython
770
+ parameters:
771
+
772
+ - package_name - the name of the package as used on PyPi
773
+ - version - the version of the package as used on PyPi (semver)
774
+ - description
775
+ - stubs - a list of tuples (name, path) of the stubs to copy
776
+ - json_data - Optional: a json databse record that will be used to create the package from.
777
+ When `json_data` is provided, the version, description and stubs parameters are ignored
778
+
779
+ paths:
780
+ ROOT_PATH - the root path of the project ('./')
781
+ PUBLISH_PATH - root-relative path to the folder where the package info will be stored ('./publish').
782
+ TEMPLATE_PATH - root-relative path to the folder where the template files are stored ('./publish/template').
783
+ STUB_PATH - root-relative path to the folder where the stubs are stored ('./stubs').
784
+
785
+ """
786
+ self.port = port
787
+ self.board = board
788
+ if json_data is not None:
789
+ self.from_dict(json_data)
790
+ else:
791
+ # store essentials
792
+ self.package_name = package_name
793
+ self.description = description
794
+ self.mpy_version = clean_version(version, drop_v=True) # Initial version
795
+
796
+ self.create_update_pyproject_toml()
797
+
798
+ self.stub_sources: StubSources = []
799
+ # save the stub sources
800
+ if stubs:
801
+ self.stub_sources = stubs
802
+
803
+ self.status: Status = Status(
804
+ {
805
+ "result": "-",
806
+ "name": self.package_name,
807
+ "version": self.pkg_version,
808
+ "error": None,
809
+ "path": self.package_path.as_posix(),
810
+ }
811
+ )
812
+ super().__init__(
813
+ package_name=package_name,
814
+ mpy_version=self.mpy_version,
815
+ port=port,
816
+ board=board,
817
+ description=description,
818
+ stubs=self.stub_sources,
819
+ )
820
+
821
+ def update_sources(self) -> StubSources:
822
+ """
823
+ Update the stub sources to:
824
+ - FIRMWARE: prefer -merged stubs over bare MCU stubs
825
+ - FROZEN: fallback to use the GENERIC folder for the frozen sources if no board specific folder exists
826
+ """
827
+ updated_sources = []
828
+ # TODO: find a way to simplify this code as this is a bit magic (and hard to understand)
829
+ for stub_type, fw_path in self.stub_sources:
830
+ # prefer -merged stubs over bare MCU stubs
831
+ if stub_type == StubSource.FIRMWARE:
832
+ # Check if -merged folder exists and use that instead
833
+ if fw_path.name.endswith("-merged"):
834
+ merged_path = fw_path
835
+ else:
836
+ merged_path = fw_path.with_name(f"{fw_path.name}-merged")
837
+ if (CONFIG.stub_path / merged_path).exists():
838
+ updated_sources.append((stub_type, merged_path))
839
+ else:
840
+ updated_sources.append((stub_type, fw_path))
841
+ elif stub_type == StubSource.FROZEN:
842
+ # use if folder exists , else use GENERIC folder
843
+ if (CONFIG.stub_path / fw_path).exists():
844
+ updated_sources.append((stub_type, fw_path))
845
+ elif fw_path.with_name("GENERIC").exists():
846
+ updated_sources.append((stub_type, fw_path.with_name("GENERIC")))
847
+ elif stub_type == StubSource.MERGED:
848
+ # Use the default board folder instead of the GENERIC board folder (if it exists)
849
+ if self.board.upper() == GENERIC_U:
850
+ family = fw_path.name.split("-")[0]
851
+ default_path = Path(
852
+ f"{family}-{clean_version(self.mpy_version, flat=True)}-{self.port}-{default_board(self.port, self.mpy_version)}-merged"
853
+ )
854
+ if (CONFIG.stub_path / default_path).exists():
855
+ fw_path = default_path
856
+ updated_sources.append((stub_type, fw_path))
857
+ # ---------
858
+ else:
859
+ updated_sources.append((stub_type, fw_path))
860
+ return updated_sources
861
+
862
+ def update_distribution(self, production: bool) -> bool:
863
+ """Update the package .pyi files, if all the sources are available"""
864
+ log.info(f"- Update {self.package_path.name}")
865
+ log.trace(f"{self.package_path.as_posix()}")
866
+
867
+ # check if the sources exist
868
+ ok = self.are_package_sources_available()
869
+ if not ok:
870
+ log.debug(
871
+ f"{self.package_name}: skipping as one or more source stub folders are missing"
872
+ )
873
+ self.status["error"] = "Skipped, stub folder(s) missing"
874
+ shutil.rmtree(self.package_path.as_posix())
875
+ self._publish = False # type: ignore
876
+ return False
877
+ try:
878
+ # update to -merged and fallback to GENERIC
879
+ self.stub_sources = self.update_sources()
880
+ self.update_package_files()
881
+ self.update_pyproject_stubs()
882
+ # for a new package the version could be 'latest', which is not a valid semver, so update
883
+ self.pkg_version = self.next_package_version(production)
884
+ return self.check()
885
+ except Exception as e: # pragma: no cover
886
+ log.error(f"{self.package_name}: {e}")
887
+ self.status["error"] = str(e)
888
+ return False
889
+
890
+ def build_distribution(
891
+ self,
892
+ production: bool, # PyPI or Test-PyPi - USED TO FIND THE NEXT VERSION NUMBER
893
+ force=False, # BUILD even if no changes
894
+ ) -> (
895
+ bool
896
+ ): # sourcery skip: default-mutable-arg, extract-duplicate-method, require-parameter-annotation
897
+ """
898
+ Build a package
899
+ look up the previous package version in the dabase
900
+ - update package files
901
+ - build the wheels and sdist
902
+
903
+ :param production: PyPI or Test-PyPi -
904
+ :param force: BUILD even if no changes
905
+ :return: True if the package was built
906
+ """
907
+ log.info(f"Build: {self.package_path.name}")
908
+
909
+ ok = self.update_distribution(production)
910
+ self.status["version"] = self.pkg_version
911
+ if not ok:
912
+ log.info(f"{self.package_name}: skip - Could not build/update package")
913
+ if not self.status["error"]:
914
+ self.status["error"] = "Could not build/update package"
915
+ return False
916
+
917
+ # If there are changes to the package, then publish it
918
+ if self.is_changed() or force:
919
+ if force:
920
+ log.info(f"Force build: {self.package_name} {self.pkg_version} ")
921
+ else:
922
+ log.info(
923
+ f"Found changes to package sources: {self.package_name} {self.pkg_version} "
924
+ )
925
+ log.trace(f"Old hash {self.hash} != New hash {self.calculate_hash()}")
926
+ # Build the distribution files
927
+ old_ver = self.pkg_version
928
+ self.pkg_version = self.next_package_version(production)
929
+ self.status["version"] = self.pkg_version
930
+ # to get the next version
931
+ log.debug(
932
+ f"{self.package_name}: bump version for {old_ver} to {self.pkg_version } {'production' if production else 'test'}"
933
+ )
934
+ self.write_package_json()
935
+ log.trace(f"New hash: {self.package_name} {self.pkg_version} {self.hash}")
936
+ if self.poetry_build():
937
+ self.status["result"] = "Build OK"
938
+ else:
939
+ log.warning(f"{self.package_name}: skipping as build failed")
940
+ self.status["error"] = "Poetry build failed"
941
+ return False
942
+ return True
943
+
944
+ def publish_distribution_ifchanged(
945
+ self,
946
+ db: PysonDB,
947
+ *,
948
+ production: bool, # PyPI or Test-PyPi
949
+ build=False, #
950
+ force=False, # publish even if no changes
951
+ dry_run=False, # do not actually publish
952
+ clean: bool = False, # clean up afterwards
953
+ ) -> (
954
+ bool
955
+ ): # sourcery skip: assign-if-exp, default-mutable-arg, extract-method, remove-unnecessary-else, require-parameter-annotation, swap-if-else-branches, swap-if-expression
956
+ """
957
+ Publish a package to PyPi
958
+ look up the previous package version in the dabase, and only publish if there are changes to the package
959
+ - change determied by hash across all files
960
+
961
+ Build
962
+ - update package files
963
+ - build the wheels and sdist
964
+ Publish
965
+ - publish to PyPi
966
+ - update database with new hash
967
+ """
968
+ log.info(f"Publish: {self.package_path.name}")
969
+ # count .pyi files in the package
970
+ filecount = len(list(self.package_path.rglob("*.pyi")))
971
+ if filecount == 0:
972
+ log.debug(f"{self.package_name}: starting build as no .pyi files found")
973
+ build = True
974
+
975
+ if build or force or self.is_changed():
976
+ self.build_distribution(production=production, force=force)
977
+
978
+ if not self._publish:
979
+ log.debug(f"{self.package_name}: skip publishing")
980
+ return False
981
+
982
+ self.next_package_version(production=production)
983
+ # Publish the package to PyPi, Test-PyPi or Github
984
+ if self.is_changed():
985
+ if self.mpy_version in SET_PREVIEW and production and not force:
986
+ log.warning(
987
+ "version: `latest` package will only be available on Github, and not published to PyPi."
988
+ )
989
+ self.status["result"] = "Published to GitHub"
990
+ else:
991
+ return self.publish_distribution(dry_run, production, db)
992
+ elif force:
993
+ return self.publish_distribution(dry_run, production, db)
994
+ else:
995
+ log.info(f"No changes to package : {self.package_name} {self.pkg_version}")
996
+
997
+ if clean:
998
+ self.clean()
999
+ return True
1000
+
1001
+ def publish_distribution(self, dry_run, production, db):
1002
+ """
1003
+ Publishes the package to PyPi or Test-PyPi.
1004
+
1005
+ Args:
1006
+ dry_run (bool): If True, performs a dry run without actually publishing.
1007
+ production (bool): If True, publishes to PyPi. If False, publishes to Test-PyPi.
1008
+ db: The database object to save the package state.
1009
+
1010
+ Returns:
1011
+ bool: True if the publish was successful, False otherwise.
1012
+ """
1013
+ self.update_hashes() # resets is_changed to False
1014
+ if not dry_run:
1015
+ pub_ok = self.poetry_publish(production=production)
1016
+ else:
1017
+ log.warning(
1018
+ f"{self.package_name}: Dry run, not publishing to {'' if production else 'Test-'}PyPi"
1019
+ )
1020
+ pub_ok = True
1021
+ if not pub_ok:
1022
+ log.warning(f"{self.package_name}: Publish failed for {self.pkg_version}")
1023
+ self.status["error"] = "Publish failed"
1024
+ return False
1025
+ self.status["result"] = "Published to PyPi" if production else "Published to Test-PyPi"
1026
+ self.update_hashes()
1027
+ if dry_run:
1028
+ log.warning(f"{self.package_name}: Dry run, not saving to database")
1029
+ else:
1030
+ # get the package state and add it to the database
1031
+ db.add(self.to_dict())
1032
+ db.commit()
1033
+ return True
1034
+
1035
+ def are_package_sources_available(self) -> bool:
1036
+ """
1037
+ Check if (all) the packages sources exist.
1038
+ """
1039
+ ok = True
1040
+ for stub_type, src_path in self.update_sources():
1041
+ if (CONFIG.stub_path / src_path).exists():
1042
+ continue
1043
+ if stub_type == StubSource.FROZEN:
1044
+ # not a blocking issue if there are no frozen stubs, perhaps this port/board does not have any
1045
+ continue
1046
+ # todo: below is a workaround for different types, but where is the source of this difference coming from?
1047
+ msg = (
1048
+ f"{self.package_name}: source '{stub_type.value}' not found: {CONFIG.stub_path / src_path}"
1049
+ if isinstance(stub_type, StubSource) # type: ignore
1050
+ else f"{self.package_name}: source '{stub_type}' not found: {CONFIG.stub_path / src_path}"
1051
+ )
1052
+ self.status["error"] = msg
1053
+ log.debug(msg)
1054
+ ok = False
1055
+ return ok