ruyi 0.39.0a20250731__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. ruyi/__init__.py +21 -0
  2. ruyi/__main__.py +98 -0
  3. ruyi/cli/__init__.py +5 -0
  4. ruyi/cli/builtin_commands.py +14 -0
  5. ruyi/cli/cmd.py +224 -0
  6. ruyi/cli/completer.py +50 -0
  7. ruyi/cli/completion.py +26 -0
  8. ruyi/cli/config_cli.py +153 -0
  9. ruyi/cli/main.py +111 -0
  10. ruyi/cli/self_cli.py +295 -0
  11. ruyi/cli/user_input.py +127 -0
  12. ruyi/cli/version_cli.py +45 -0
  13. ruyi/config/__init__.py +401 -0
  14. ruyi/config/editor.py +92 -0
  15. ruyi/config/errors.py +76 -0
  16. ruyi/config/news.py +39 -0
  17. ruyi/config/schema.py +197 -0
  18. ruyi/device/__init__.py +0 -0
  19. ruyi/device/provision.py +591 -0
  20. ruyi/device/provision_cli.py +40 -0
  21. ruyi/log/__init__.py +272 -0
  22. ruyi/mux/.gitignore +1 -0
  23. ruyi/mux/__init__.py +0 -0
  24. ruyi/mux/runtime.py +213 -0
  25. ruyi/mux/venv/__init__.py +12 -0
  26. ruyi/mux/venv/emulator_cfg.py +41 -0
  27. ruyi/mux/venv/maker.py +782 -0
  28. ruyi/mux/venv/venv_cli.py +92 -0
  29. ruyi/mux/venv_cfg.py +214 -0
  30. ruyi/pluginhost/__init__.py +0 -0
  31. ruyi/pluginhost/api.py +206 -0
  32. ruyi/pluginhost/ctx.py +222 -0
  33. ruyi/pluginhost/paths.py +135 -0
  34. ruyi/pluginhost/plugin_cli.py +37 -0
  35. ruyi/pluginhost/unsandboxed.py +246 -0
  36. ruyi/py.typed +0 -0
  37. ruyi/resource_bundle/__init__.py +20 -0
  38. ruyi/resource_bundle/__main__.py +55 -0
  39. ruyi/resource_bundle/data.py +26 -0
  40. ruyi/ruyipkg/__init__.py +0 -0
  41. ruyi/ruyipkg/admin_checksum.py +88 -0
  42. ruyi/ruyipkg/admin_cli.py +83 -0
  43. ruyi/ruyipkg/atom.py +184 -0
  44. ruyi/ruyipkg/augmented_pkg.py +212 -0
  45. ruyi/ruyipkg/canonical_dump.py +320 -0
  46. ruyi/ruyipkg/checksum.py +39 -0
  47. ruyi/ruyipkg/cli_completion.py +42 -0
  48. ruyi/ruyipkg/distfile.py +208 -0
  49. ruyi/ruyipkg/entity.py +387 -0
  50. ruyi/ruyipkg/entity_cli.py +123 -0
  51. ruyi/ruyipkg/entity_provider.py +273 -0
  52. ruyi/ruyipkg/fetch.py +271 -0
  53. ruyi/ruyipkg/host.py +55 -0
  54. ruyi/ruyipkg/install.py +554 -0
  55. ruyi/ruyipkg/install_cli.py +150 -0
  56. ruyi/ruyipkg/list.py +126 -0
  57. ruyi/ruyipkg/list_cli.py +79 -0
  58. ruyi/ruyipkg/list_filter.py +173 -0
  59. ruyi/ruyipkg/msg.py +99 -0
  60. ruyi/ruyipkg/news.py +123 -0
  61. ruyi/ruyipkg/news_cli.py +78 -0
  62. ruyi/ruyipkg/news_store.py +183 -0
  63. ruyi/ruyipkg/pkg_manifest.py +657 -0
  64. ruyi/ruyipkg/profile.py +208 -0
  65. ruyi/ruyipkg/profile_cli.py +33 -0
  66. ruyi/ruyipkg/protocols.py +55 -0
  67. ruyi/ruyipkg/repo.py +763 -0
  68. ruyi/ruyipkg/state.py +345 -0
  69. ruyi/ruyipkg/unpack.py +369 -0
  70. ruyi/ruyipkg/unpack_method.py +91 -0
  71. ruyi/ruyipkg/update_cli.py +54 -0
  72. ruyi/telemetry/__init__.py +0 -0
  73. ruyi/telemetry/aggregate.py +72 -0
  74. ruyi/telemetry/event.py +41 -0
  75. ruyi/telemetry/node_info.py +192 -0
  76. ruyi/telemetry/provider.py +411 -0
  77. ruyi/telemetry/scope.py +43 -0
  78. ruyi/telemetry/store.py +238 -0
  79. ruyi/telemetry/telemetry_cli.py +127 -0
  80. ruyi/utils/__init__.py +0 -0
  81. ruyi/utils/ar.py +74 -0
  82. ruyi/utils/ci.py +63 -0
  83. ruyi/utils/frontmatter.py +38 -0
  84. ruyi/utils/git.py +169 -0
  85. ruyi/utils/global_mode.py +204 -0
  86. ruyi/utils/l10n.py +83 -0
  87. ruyi/utils/markdown.py +73 -0
  88. ruyi/utils/nuitka.py +33 -0
  89. ruyi/utils/porcelain.py +51 -0
  90. ruyi/utils/prereqs.py +77 -0
  91. ruyi/utils/ssl_patch.py +170 -0
  92. ruyi/utils/templating.py +34 -0
  93. ruyi/utils/toml.py +115 -0
  94. ruyi/utils/url.py +7 -0
  95. ruyi/utils/xdg_basedir.py +80 -0
  96. ruyi/version.py +67 -0
  97. ruyi-0.39.0a20250731.dist-info/LICENSE-Apache.txt +201 -0
  98. ruyi-0.39.0a20250731.dist-info/METADATA +403 -0
  99. ruyi-0.39.0a20250731.dist-info/RECORD +101 -0
  100. ruyi-0.39.0a20250731.dist-info/WHEEL +4 -0
  101. ruyi-0.39.0a20250731.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,320 @@
1
+ from copy import deepcopy
2
+ import re
3
+ from typing import Final
4
+
5
+ from tomlkit import comment, document, nl, string, table, ws
6
+ from tomlkit.items import AoT, Array, InlineTable, Table, Trivia
7
+ from tomlkit.toml_document import TOMLDocument
8
+
9
+ from .pkg_manifest import (
10
+ BinaryDeclType,
11
+ BinaryFileDeclType,
12
+ BlobDeclType,
13
+ DistfileDeclType,
14
+ EmulatorDeclType,
15
+ EmulatorProgramDeclType,
16
+ FetchRestrictionDeclType,
17
+ PackageManifest,
18
+ PackageMetadataDeclType,
19
+ ProvisionableDeclType,
20
+ ServiceLevelDeclType,
21
+ SourceDeclType,
22
+ ToolchainComponentDeclType,
23
+ ToolchainDeclType,
24
+ VendorDeclType,
25
+ )
26
+ from ..utils.toml import (
27
+ extract_footer_comments,
28
+ extract_header_comments,
29
+ inline_table_with_spaces,
30
+ sorted_table,
31
+ str_array,
32
+ )
33
+
34
+ RE_INDENT_FIX: Final = re.compile(r"(?m)^ ([\"'{\[])")
35
+
36
+
37
+ # XXX: To workaround https://github.com/python-poetry/tomlkit/issues/290,
38
+ # post-process the output to have all leading 4-space indentation before
39
+ # strings, lists or tables replaced by 2-space ones.
40
+ def _fix_indent(s: str) -> str:
41
+ return RE_INDENT_FIX.sub(r" \1", s)
42
+
43
+
44
+ def dumps_canonical_package_manifest_toml(
45
+ pm: PackageManifest,
46
+ ) -> str:
47
+ return _fix_indent(_dump_canonical_package_manifest_toml(pm).as_string())
48
+
49
+
50
+ def _dump_canonical_package_manifest_toml(
51
+ pm: PackageManifest,
52
+ ) -> TOMLDocument:
53
+ x = pm.to_raw()
54
+ doc = pm.raw_doc
55
+
56
+ y = document()
57
+
58
+ if doc is not None:
59
+ if header_comments := extract_header_comments(doc):
60
+ last_is_ws = False
61
+ for c in header_comments:
62
+ if c.startswith("#"):
63
+ last_is_ws = False
64
+ y.add(comment(c[1:].strip()))
65
+ else:
66
+ last_is_ws = True
67
+ y.add(ws(c))
68
+
69
+ if not last_is_ws:
70
+ y.add(nl())
71
+
72
+ y.add("format", string(x["format"]))
73
+
74
+ dump_metadata_decl_into(y, x["metadata"])
75
+ dump_distfile_decls_into(y, x["distfiles"])
76
+ maybe_dump_binary_decls_into(y, x.get("binary"))
77
+ maybe_dump_blob_decl_into(y, x.get("blob"))
78
+ maybe_dump_emulator_decl_into(y, x.get("emulator"))
79
+ maybe_dump_provisionable_decl_into(y, x.get("provisionable"))
80
+ maybe_dump_source_decl_into(y, x.get("source"))
81
+ maybe_dump_toolchain_decl_into(y, x.get("toolchain"))
82
+
83
+ if doc is not None:
84
+ if footer_comments := extract_footer_comments(doc):
85
+ if footer_comments[0].startswith("#"):
86
+ y.add(nl())
87
+
88
+ for c in footer_comments:
89
+ if c.startswith("#"):
90
+ y.add(comment(c[1:].strip()))
91
+ else:
92
+ y.add(ws(c))
93
+
94
+ return y
95
+
96
+
97
+ def dump_service_level_entry(x: ServiceLevelDeclType) -> Table:
98
+ y = table()
99
+ y.add("level", x["level"])
100
+ if msgid := x.get("msgid"):
101
+ y.add("msgid", string(msgid))
102
+ if params := x.get("params"):
103
+ y.add("params", sorted_table(params))
104
+ return y
105
+
106
+
107
+ def dump_service_level_decls(x: list[ServiceLevelDeclType]) -> AoT:
108
+ return AoT([dump_service_level_entry(i) for i in x])
109
+
110
+
111
+ def dump_metadata_decl(x: PackageMetadataDeclType) -> Table:
112
+ y = table()
113
+ y.add("desc", string(x["desc"]))
114
+ y.add("vendor", dump_vendor_decl(x["vendor"]))
115
+ if "slug" in x:
116
+ y.add("slug", string(x["slug"]))
117
+ if uv := x.get("upstream_version"):
118
+ y.add("upstream_version", string(uv))
119
+ if sl := x.get("service_level"):
120
+ y.add(nl())
121
+ y.add("service_level", dump_service_level_decls(sl))
122
+ return y
123
+
124
+
125
+ def dump_metadata_decl_into(doc: TOMLDocument, x: PackageMetadataDeclType) -> None:
126
+ doc.add(nl())
127
+ doc.add("metadata", dump_metadata_decl(x))
128
+
129
+
130
+ def dump_vendor_decl(x: VendorDeclType) -> InlineTable:
131
+ y = inline_table_with_spaces()
132
+ with y:
133
+ y.add("name", string(x["name"]))
134
+ y.add("eula", string(x["eula"] if x["eula"] is not None else ""))
135
+ return y
136
+
137
+
138
+ def dump_distfile_decls(x: list[DistfileDeclType]) -> AoT:
139
+ return AoT([dump_distfile_entry(i) for i in x])
140
+
141
+
142
+ def dump_distfile_decls_into(doc: TOMLDocument, x: list[DistfileDeclType]) -> None:
143
+ doc.add(nl())
144
+ doc.add("distfiles", dump_distfile_decls(x))
145
+
146
+
147
+ def dump_distfile_entry(x: DistfileDeclType) -> Table:
148
+ y = table()
149
+ y.add("name", x["name"])
150
+ if v := x.get("unpack"):
151
+ y.add("unpack", string(v))
152
+ y.add("size", x["size"])
153
+ if s := x.get("strip_components"):
154
+ if s != 1:
155
+ y.add("strip_components", s)
156
+ if p := x.get("prefixes_to_unpack"):
157
+ y.add("prefixes_to_unpack", str_array(p, multiline=len(p) > 1))
158
+ if "urls" in x:
159
+ # XXX: https://github.com/python-poetry/tomlkit/issues/290 prevents us
160
+ # from using 2-space indentation for the array items for now.
161
+ y.add("urls", str_array([str(i) for i in x["urls"]], multiline=True))
162
+ if r := x.get("restrict"):
163
+ # If `restrict` is a string, convert it to a list, fixing a common
164
+ # oversight in package manifests.
165
+ if isinstance(r, str):
166
+ r = [r]
167
+ y.add("restrict", [str(i) for i in r])
168
+ if f := x.get("fetch_restriction"):
169
+ y.add("fetch_restriction", dump_fetch_restriction(f))
170
+ y.add("checksums", sorted_table(x["checksums"]))
171
+ return y
172
+
173
+
174
+ def dump_fetch_restriction(x: FetchRestrictionDeclType) -> Table:
175
+ y = table()
176
+ y.add("msgid", x["msgid"])
177
+ if "params" in x:
178
+ y.add("params", sorted_table(x["params"]))
179
+ return y
180
+
181
+
182
+ def dump_blob_decl(x: BlobDeclType) -> Table:
183
+ y = table()
184
+ y.add("distfiles", str_array(x["distfiles"], multiline=True))
185
+ return y
186
+
187
+
188
+ def maybe_dump_blob_decl_into(doc: TOMLDocument, x: BlobDeclType | None) -> None:
189
+ if x is None:
190
+ return
191
+ doc.add(nl())
192
+ doc.add("blob", dump_blob_decl(x))
193
+
194
+
195
+ def dump_provisionable_decl(x: ProvisionableDeclType) -> Table:
196
+ y = table()
197
+ y.add("strategy", x["strategy"])
198
+ y.add(
199
+ "partition_map",
200
+ sorted_table({str(k): v for k, v in x["partition_map"].items()}),
201
+ )
202
+ return y
203
+
204
+
205
+ def maybe_dump_provisionable_decl_into(
206
+ doc: TOMLDocument,
207
+ x: ProvisionableDeclType | None,
208
+ ) -> None:
209
+ if x is None:
210
+ return
211
+ doc.add(nl())
212
+ doc.add("provisionable", dump_provisionable_decl(x))
213
+
214
+
215
+ def dump_binary_decl(x: BinaryFileDeclType, last: bool) -> Table:
216
+ y = table()
217
+ y.add("host", string(x["host"]))
218
+ multiline_distfiles = len(x["distfiles"]) > 1
219
+ y.add("distfiles", str_array(x["distfiles"], multiline=multiline_distfiles))
220
+ if cmds := x.get("commands", {}):
221
+ y.add("commands", sorted_table(cmds))
222
+ if not last:
223
+ y.add(nl())
224
+ return y
225
+
226
+
227
+ def dump_binary_decls(x: list[BinaryFileDeclType]) -> AoT:
228
+ return AoT([dump_binary_decl(elem, i == len(x) - 1) for i, elem in enumerate(x)])
229
+
230
+
231
+ def maybe_dump_binary_decls_into(doc: TOMLDocument, x: BinaryDeclType | None) -> None:
232
+ if x is None:
233
+ return
234
+ doc.add("binary", dump_binary_decls(x))
235
+
236
+
237
+ def dump_emulator_program_decl(x: EmulatorProgramDeclType) -> Table:
238
+ y = table()
239
+ y.add("path", string(x["path"]))
240
+ y.add("flavor", string(x["flavor"]))
241
+ y.add("supported_arches", str_array(x["supported_arches"]))
242
+ if "binfmt_misc" in x:
243
+ y.add("binfmt_misc", string(x["binfmt_misc"]))
244
+ return y
245
+
246
+
247
+ def dump_emulator_decl(x: EmulatorDeclType) -> Table:
248
+ y = table()
249
+ # Prefer `quirks` to `flavors`
250
+ quirks = x.get("quirks")
251
+ if quirks is None:
252
+ quirks = x.get("flavors", [])
253
+ y.add("quirks", str_array(quirks))
254
+ y.add("programs", AoT([dump_emulator_program_decl(i) for i in x["programs"]]))
255
+ return y
256
+
257
+
258
+ def maybe_dump_emulator_decl_into(
259
+ doc: TOMLDocument, x: EmulatorDeclType | None
260
+ ) -> None:
261
+ if x is None:
262
+ return
263
+ doc.add(nl())
264
+ doc.add("emulator", dump_emulator_decl(x))
265
+
266
+
267
+ def dump_source_decl(x: SourceDeclType) -> Table:
268
+ y = table()
269
+ multiline_distfiles = len(x["distfiles"]) > 1
270
+ y.add("distfiles", str_array(x["distfiles"], multiline=multiline_distfiles))
271
+ return y
272
+
273
+
274
+ def maybe_dump_source_decl_into(doc: TOMLDocument, x: SourceDeclType | None) -> None:
275
+ if x is None:
276
+ return
277
+ doc.add(nl())
278
+ doc.add("source", dump_source_decl(x))
279
+
280
+
281
+ def dump_toolchain_component_decl(x: ToolchainComponentDeclType) -> InlineTable:
282
+ y = inline_table_with_spaces()
283
+ with y:
284
+ y.add("name", string(x["name"]))
285
+ y.add("version", string(x["version"]))
286
+ return y
287
+
288
+
289
+ def dump_toolchain_component_decls(x: list[ToolchainComponentDeclType]) -> Array:
290
+ sorted_x = deepcopy(x)
291
+ sorted_x.sort(key=lambda i: i["name"])
292
+ return Array(
293
+ [dump_toolchain_component_decl(i) for i in sorted_x],
294
+ Trivia(),
295
+ multiline=True,
296
+ )
297
+
298
+
299
+ def dump_toolchain_decl(x: ToolchainDeclType) -> Table:
300
+ y = table()
301
+ y.add("target", string(x["target"]))
302
+ # Prefer `quirks` to `flavors`
303
+ quirks = x.get("quirks")
304
+ if quirks is None:
305
+ quirks = x.get("flavors", [])
306
+ y.add("quirks", str_array(quirks))
307
+ y.add("components", dump_toolchain_component_decls(x["components"]))
308
+ if "included_sysroot" in x:
309
+ y.add("included_sysroot", x["included_sysroot"])
310
+ return y
311
+
312
+
313
+ def maybe_dump_toolchain_decl_into(
314
+ doc: TOMLDocument,
315
+ x: ToolchainDeclType | None,
316
+ ) -> None:
317
+ if x is None:
318
+ return
319
+ doc.add(nl())
320
+ doc.add("toolchain", dump_toolchain_decl(x))
@@ -0,0 +1,39 @@
1
+ import hashlib
2
+ from typing import BinaryIO, Final, Iterable
3
+
4
+ SUPPORTED_CHECKSUM_KINDS: Final = {"sha256", "sha512"}
5
+
6
+
7
+ def get_hash_instance(kind: str) -> "hashlib._Hash":
8
+ if kind not in SUPPORTED_CHECKSUM_KINDS:
9
+ raise ValueError(f"checksum algorithm {kind} not supported")
10
+ return hashlib.new(kind)
11
+
12
+
13
+ class Checksummer:
14
+ def __init__(self, file: BinaryIO, checksums: dict[str, str]) -> None:
15
+ self.file = file
16
+ self.checksums = checksums
17
+
18
+ def check(self) -> None:
19
+ computed_csums = self.compute()
20
+ for kind, expected_csum in self.checksums.items():
21
+ if computed_csums[kind] != expected_csum:
22
+ raise ValueError(
23
+ f"wrong {kind} checksum: want {expected_csum}, got {computed_csums[kind]}"
24
+ )
25
+
26
+ def compute(
27
+ self,
28
+ kinds: Iterable[str] | None = None,
29
+ chunksize: int = 4096,
30
+ ) -> dict[str, str]:
31
+ if kinds is None:
32
+ kinds = self.checksums.keys()
33
+
34
+ checksummers = {kind: get_hash_instance(kind) for kind in kinds}
35
+ while chunk := self.file.read(chunksize):
36
+ for h in checksummers.values():
37
+ h.update(chunk)
38
+
39
+ return {kind: h.hexdigest() for kind, h in checksummers.items()}
@@ -0,0 +1,42 @@
1
+ from typing import Any, Callable, TYPE_CHECKING
2
+
3
+ if TYPE_CHECKING:
4
+ from ..cli.completer import DynamicCompleter
5
+ from ..config import GlobalConfig
6
+
7
+
8
+ def package_completer_builder(
9
+ cfg: "GlobalConfig",
10
+ filters: list[Callable[[str], bool]] | None = None,
11
+ ) -> "DynamicCompleter":
12
+ # Lazy import to avoid circular dependency
13
+ from ..ruyipkg.augmented_pkg import (
14
+ AugmentedPkg,
15
+ ) # pylint: disable=import-outside-toplevel
16
+ from ..ruyipkg.list_filter import (
17
+ ListFilter,
18
+ ) # pylint: disable=import-outside-toplevel
19
+
20
+ all_pkgs = list(
21
+ AugmentedPkg.yield_from_repo(
22
+ cfg,
23
+ cfg.repo,
24
+ ListFilter(),
25
+ ensure_repo=False,
26
+ )
27
+ )
28
+ if filters is not None:
29
+ all_pkgs = [
30
+ pkg
31
+ for pkg in all_pkgs
32
+ if pkg.name is not None and all(f(pkg.name) for f in filters)
33
+ ]
34
+
35
+ def f(prefix: str, parsed_args: object, **kwargs: Any) -> list[str]:
36
+ return [
37
+ pkg.name
38
+ for pkg in all_pkgs
39
+ if pkg.name is not None and pkg.name.startswith(prefix)
40
+ ]
41
+
42
+ return f
@@ -0,0 +1,208 @@
1
+ from functools import cached_property
2
+ import os
3
+ from typing import Final
4
+
5
+ from ..log import RuyiLogger
6
+ from .checksum import Checksummer
7
+ from .fetch import BaseFetcher
8
+ from .pkg_manifest import DistfileDecl
9
+ from .repo import MetadataRepo
10
+ from .unpack import do_unpack, do_unpack_or_symlink
11
+ from .unpack_method import UnpackMethod
12
+
13
+
14
+ # https://github.com/ruyisdk/ruyi/issues/46
15
+ HELP_ERROR_FETCHING: Final = """
16
+ Downloads can fail for a multitude of reasons, most of which should not and
17
+ cannot be handled by [yellow]Ruyi[/]. For your convenience though, please check if any
18
+ of the following common failure modes apply to you, and take actions
19
+ accordingly if one of them turns out to be the case:
20
+
21
+ * Basic connectivity problems
22
+ - is [yellow]the gateway[/] reachable?
23
+ - is [yellow]common websites[/] reachable?
24
+ - is there any [yellow]DNS pollution[/]?
25
+ * Organizational and/or ISP restrictions
26
+ - is there a [yellow]firewall[/] preventing Ruyi traffic?
27
+ - is your [yellow]ISP blocking access[/] to the source website?
28
+ * Volatile upstream
29
+ - is the recorded [yellow]link dead[/]? (Please raise a Ruyi issue for a fix!)
30
+ """
31
+
32
+
33
+ class Distfile:
34
+ def __init__(
35
+ self,
36
+ decl: DistfileDecl,
37
+ mr: MetadataRepo,
38
+ ) -> None:
39
+ self._decl = decl
40
+ self._mr = mr
41
+
42
+ @cached_property
43
+ def dest(self) -> str:
44
+ destdir = self._mr.global_config.ensure_distfiles_dir()
45
+ return os.path.join(destdir, self._decl.name)
46
+
47
+ @property
48
+ def size(self) -> int:
49
+ return self._decl.size
50
+
51
+ @property
52
+ def csums(self) -> dict[str, str]:
53
+ return self._decl.checksums
54
+
55
+ @property
56
+ def prefixes_to_unpack(self) -> list[str] | None:
57
+ return self._decl.prefixes_to_unpack
58
+
59
+ @property
60
+ def strip_components(self) -> int:
61
+ return self._decl.strip_components
62
+
63
+ @property
64
+ def unpack_method(self) -> UnpackMethod:
65
+ return self._decl.unpack_method
66
+
67
+ @property
68
+ def is_fetch_restricted(self) -> bool:
69
+ return self._decl.is_restricted("fetch")
70
+
71
+ @cached_property
72
+ def urls(self) -> list[str]:
73
+ return self._mr.get_distfile_urls(self._decl)
74
+
75
+ def render_fetch_instructions(self, logger: RuyiLogger, lang_code: str) -> str:
76
+ fr = self._decl.fetch_restriction
77
+ if fr is None:
78
+ return ""
79
+
80
+ params = {
81
+ "dest_path": self.dest,
82
+ }
83
+ if "params" in fr:
84
+ for k in params.keys():
85
+ # Don't allow package-defined params to override preset params,
86
+ # to reduce surprises for packagers.
87
+ if k in fr["params"]:
88
+ logger.F(
89
+ f"malformed package fetch instructions: the param named '{k}' is reserved and cannot be overridden by packages"
90
+ )
91
+ raise RuntimeError("malformed package fetch instructions")
92
+
93
+ params.update(fr["params"])
94
+
95
+ return self._mr.messages.render_message(fr["msgid"], lang_code, params)
96
+
97
+ def is_downloaded(self) -> bool:
98
+ """Check if the distfile has been downloaded. A return value of True
99
+ does NOT guarantee integrity."""
100
+
101
+ try:
102
+ st = os.stat(self.dest)
103
+ return st.st_size == self.size
104
+ except FileNotFoundError:
105
+ return False
106
+
107
+ def ensure(self, logger: RuyiLogger) -> None:
108
+ logger.D(f"checking {self.dest}")
109
+ try:
110
+ st = os.stat(self.dest)
111
+ except FileNotFoundError:
112
+ logger.D(f"file {self.dest} not existent")
113
+ return self.fetch_and_ensure_integrity(logger)
114
+
115
+ if st.st_size < self.size:
116
+ # assume incomplete transmission, try to resume
117
+ logger.D(
118
+ f"file {self.dest} appears incomplete: size {st.st_size} < {self.size}; resuming"
119
+ )
120
+ return self.fetch_and_ensure_integrity(logger, resume=True)
121
+ elif st.st_size == self.size:
122
+ if self.ensure_integrity_or_rm(logger):
123
+ logger.D(f"file {self.dest} passed checks")
124
+ return
125
+
126
+ # the file is already gone, re-fetch
127
+ logger.D(f"re-fetching {self.dest}")
128
+ return self.fetch_and_ensure_integrity(logger)
129
+
130
+ logger.W(
131
+ f"file {self.dest} is corrupt: size too big ({st.st_size} > {self.size}); deleting"
132
+ )
133
+ os.remove(self.dest)
134
+ return self.fetch_and_ensure_integrity(logger)
135
+
136
+ def ensure_integrity_or_rm(self, logger: RuyiLogger) -> bool:
137
+ try:
138
+ with open(self.dest, "rb") as fp:
139
+ cs = Checksummer(fp, self.csums)
140
+ cs.check()
141
+ return True
142
+ except ValueError as e:
143
+ logger.W(f"file {self.dest} is corrupt: {e}; deleting")
144
+ os.remove(self.dest)
145
+ return False
146
+
147
+ def fetch_and_ensure_integrity(
148
+ self,
149
+ logger: RuyiLogger,
150
+ *,
151
+ resume: bool = False,
152
+ ) -> None:
153
+ if self.is_fetch_restricted:
154
+ # the file must be re-fetched if we arrive here, but we cannot,
155
+ # because of the fetch restriction.
156
+ #
157
+ # notify the user and die
158
+ # TODO: allow rendering instructions for all missing fetch-restricted
159
+ # files at once
160
+ logger.F(
161
+ f"the file [yellow]'{self.dest}'[/] cannot be automatically fetched"
162
+ )
163
+ logger.I("instructions on fetching this file:")
164
+ logger.I(
165
+ self.render_fetch_instructions(logger, self._mr.global_config.lang_code)
166
+ )
167
+ raise SystemExit(1)
168
+
169
+ try:
170
+ return self._fetch_and_ensure_integrity(logger, resume=resume)
171
+ except RuntimeError as e:
172
+ logger.F(f"{e}")
173
+ logger.stdout(HELP_ERROR_FETCHING)
174
+ raise SystemExit(1)
175
+
176
+ def _fetch_and_ensure_integrity(
177
+ self,
178
+ logger: RuyiLogger,
179
+ *,
180
+ resume: bool = False,
181
+ ) -> None:
182
+ fetcher = BaseFetcher.new(logger, self.urls, self.dest)
183
+ fetcher.fetch(resume=resume)
184
+
185
+ if not self.ensure_integrity_or_rm(logger):
186
+ raise RuntimeError(
187
+ f"failed to fetch distfile: {self.dest} failed integrity checks"
188
+ )
189
+
190
+ def unpack(self, root: str | None, logger: RuyiLogger) -> None:
191
+ return do_unpack(
192
+ logger,
193
+ self.dest,
194
+ root,
195
+ self.strip_components,
196
+ self.unpack_method,
197
+ prefixes_to_unpack=self.prefixes_to_unpack,
198
+ )
199
+
200
+ def unpack_or_symlink(self, root: str | None, logger: RuyiLogger) -> None:
201
+ return do_unpack_or_symlink(
202
+ logger,
203
+ self.dest,
204
+ root,
205
+ self.strip_components,
206
+ self.unpack_method,
207
+ prefixes_to_unpack=self.prefixes_to_unpack,
208
+ )