ruyi 0.39.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ruyi/__init__.py +21 -0
- ruyi/__main__.py +98 -0
- ruyi/cli/__init__.py +5 -0
- ruyi/cli/builtin_commands.py +14 -0
- ruyi/cli/cmd.py +224 -0
- ruyi/cli/completer.py +50 -0
- ruyi/cli/completion.py +26 -0
- ruyi/cli/config_cli.py +153 -0
- ruyi/cli/main.py +111 -0
- ruyi/cli/self_cli.py +295 -0
- ruyi/cli/user_input.py +127 -0
- ruyi/cli/version_cli.py +45 -0
- ruyi/config/__init__.py +401 -0
- ruyi/config/editor.py +92 -0
- ruyi/config/errors.py +76 -0
- ruyi/config/news.py +39 -0
- ruyi/config/schema.py +197 -0
- ruyi/device/__init__.py +0 -0
- ruyi/device/provision.py +591 -0
- ruyi/device/provision_cli.py +40 -0
- ruyi/log/__init__.py +272 -0
- ruyi/mux/.gitignore +1 -0
- ruyi/mux/__init__.py +0 -0
- ruyi/mux/runtime.py +213 -0
- ruyi/mux/venv/__init__.py +12 -0
- ruyi/mux/venv/emulator_cfg.py +41 -0
- ruyi/mux/venv/maker.py +782 -0
- ruyi/mux/venv/venv_cli.py +92 -0
- ruyi/mux/venv_cfg.py +214 -0
- ruyi/pluginhost/__init__.py +0 -0
- ruyi/pluginhost/api.py +206 -0
- ruyi/pluginhost/ctx.py +222 -0
- ruyi/pluginhost/paths.py +135 -0
- ruyi/pluginhost/plugin_cli.py +37 -0
- ruyi/pluginhost/unsandboxed.py +246 -0
- ruyi/py.typed +0 -0
- ruyi/resource_bundle/__init__.py +20 -0
- ruyi/resource_bundle/__main__.py +55 -0
- ruyi/resource_bundle/data.py +26 -0
- ruyi/ruyipkg/__init__.py +0 -0
- ruyi/ruyipkg/admin_checksum.py +88 -0
- ruyi/ruyipkg/admin_cli.py +83 -0
- ruyi/ruyipkg/atom.py +184 -0
- ruyi/ruyipkg/augmented_pkg.py +212 -0
- ruyi/ruyipkg/canonical_dump.py +320 -0
- ruyi/ruyipkg/checksum.py +39 -0
- ruyi/ruyipkg/cli_completion.py +42 -0
- ruyi/ruyipkg/distfile.py +208 -0
- ruyi/ruyipkg/entity.py +387 -0
- ruyi/ruyipkg/entity_cli.py +123 -0
- ruyi/ruyipkg/entity_provider.py +273 -0
- ruyi/ruyipkg/fetch.py +271 -0
- ruyi/ruyipkg/host.py +55 -0
- ruyi/ruyipkg/install.py +554 -0
- ruyi/ruyipkg/install_cli.py +150 -0
- ruyi/ruyipkg/list.py +126 -0
- ruyi/ruyipkg/list_cli.py +79 -0
- ruyi/ruyipkg/list_filter.py +173 -0
- ruyi/ruyipkg/msg.py +99 -0
- ruyi/ruyipkg/news.py +123 -0
- ruyi/ruyipkg/news_cli.py +78 -0
- ruyi/ruyipkg/news_store.py +183 -0
- ruyi/ruyipkg/pkg_manifest.py +657 -0
- ruyi/ruyipkg/profile.py +208 -0
- ruyi/ruyipkg/profile_cli.py +33 -0
- ruyi/ruyipkg/protocols.py +55 -0
- ruyi/ruyipkg/repo.py +763 -0
- ruyi/ruyipkg/state.py +345 -0
- ruyi/ruyipkg/unpack.py +369 -0
- ruyi/ruyipkg/unpack_method.py +91 -0
- ruyi/ruyipkg/update_cli.py +54 -0
- ruyi/telemetry/__init__.py +0 -0
- ruyi/telemetry/aggregate.py +72 -0
- ruyi/telemetry/event.py +41 -0
- ruyi/telemetry/node_info.py +192 -0
- ruyi/telemetry/provider.py +411 -0
- ruyi/telemetry/scope.py +43 -0
- ruyi/telemetry/store.py +238 -0
- ruyi/telemetry/telemetry_cli.py +127 -0
- ruyi/utils/__init__.py +0 -0
- ruyi/utils/ar.py +74 -0
- ruyi/utils/ci.py +63 -0
- ruyi/utils/frontmatter.py +38 -0
- ruyi/utils/git.py +169 -0
- ruyi/utils/global_mode.py +204 -0
- ruyi/utils/l10n.py +83 -0
- ruyi/utils/markdown.py +73 -0
- ruyi/utils/nuitka.py +33 -0
- ruyi/utils/porcelain.py +51 -0
- ruyi/utils/prereqs.py +77 -0
- ruyi/utils/ssl_patch.py +170 -0
- ruyi/utils/templating.py +34 -0
- ruyi/utils/toml.py +115 -0
- ruyi/utils/url.py +7 -0
- ruyi/utils/xdg_basedir.py +80 -0
- ruyi/version.py +67 -0
- ruyi-0.39.0.dist-info/LICENSE-Apache.txt +201 -0
- ruyi-0.39.0.dist-info/METADATA +403 -0
- ruyi-0.39.0.dist-info/RECORD +101 -0
- ruyi-0.39.0.dist-info/WHEEL +4 -0
- ruyi-0.39.0.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,273 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import json
|
|
3
|
+
import os
|
|
4
|
+
import pathlib
|
|
5
|
+
import sys
|
|
6
|
+
from typing import Any, Mapping, Sequence
|
|
7
|
+
|
|
8
|
+
if sys.version_info >= (3, 11):
|
|
9
|
+
import tomllib
|
|
10
|
+
else:
|
|
11
|
+
import tomli as tomllib
|
|
12
|
+
|
|
13
|
+
from ..log import RuyiLogger
|
|
14
|
+
from ..utils.porcelain import PorcelainEntity, PorcelainEntityType
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class PorcelainEntityListOutputV1(PorcelainEntity):
|
|
18
|
+
entity_type: str
|
|
19
|
+
entity_id: str
|
|
20
|
+
display_name: str | None
|
|
21
|
+
data: Mapping[str, Any]
|
|
22
|
+
related_refs: list[str]
|
|
23
|
+
reverse_refs: list[str]
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class EntityError(Exception):
|
|
27
|
+
"""Base exception for entity-related errors."""
|
|
28
|
+
|
|
29
|
+
pass
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class EntityValidationError(EntityError):
|
|
33
|
+
"""Exception raised when an entity fails validation."""
|
|
34
|
+
|
|
35
|
+
def __init__(self, entity_type: str, entity_id: str, cause: Exception) -> None:
|
|
36
|
+
self.entity_type = entity_type
|
|
37
|
+
self.entity_id = entity_id
|
|
38
|
+
self.cause = cause
|
|
39
|
+
message = (
|
|
40
|
+
f"Entity validation failed for entity '{entity_type}:{entity_id}': {cause}"
|
|
41
|
+
)
|
|
42
|
+
super().__init__(message)
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class BaseEntity:
|
|
46
|
+
"""Base class for all entity types."""
|
|
47
|
+
|
|
48
|
+
def __init__(
|
|
49
|
+
self,
|
|
50
|
+
entity_type: str,
|
|
51
|
+
entity_id: str,
|
|
52
|
+
data: Mapping[str, Any],
|
|
53
|
+
) -> None:
|
|
54
|
+
self._entity_type = entity_type
|
|
55
|
+
self._id = entity_id
|
|
56
|
+
self._data = data
|
|
57
|
+
|
|
58
|
+
self._reverse_refs: set[str] = set()
|
|
59
|
+
|
|
60
|
+
@property
|
|
61
|
+
def entity_type(self) -> str:
|
|
62
|
+
"""Type of the entity."""
|
|
63
|
+
return self._entity_type
|
|
64
|
+
|
|
65
|
+
@property
|
|
66
|
+
def id(self) -> str:
|
|
67
|
+
"""ID of the entity."""
|
|
68
|
+
return self._id
|
|
69
|
+
|
|
70
|
+
@property
|
|
71
|
+
def display_name(self) -> str | None:
|
|
72
|
+
"""Human-readable name of the entity."""
|
|
73
|
+
result = self._data[self.entity_type].get("display_name", None)
|
|
74
|
+
if result is None or isinstance(result, str):
|
|
75
|
+
return result
|
|
76
|
+
# return None if type is unexpected
|
|
77
|
+
return None
|
|
78
|
+
|
|
79
|
+
@property
|
|
80
|
+
def unique_among_type_during_traversal(self) -> bool:
|
|
81
|
+
"""Whether the entity should be unique among all entities of the same type
|
|
82
|
+
during traversal.
|
|
83
|
+
|
|
84
|
+
For example, if the entity is ``arch:foo64`` and there is also ``arch:foo32``,
|
|
85
|
+
with this property set to ``True`` on each, there will be only one
|
|
86
|
+
``arch:foo*`` entity in any traversal path involving them, so that a
|
|
87
|
+
hypothetical traversal starting from a "foo64" device will not return
|
|
88
|
+
entities only related to the "foo32" architecture.
|
|
89
|
+
"""
|
|
90
|
+
|
|
91
|
+
if r := self._data.get("unique_among_type_during_traversal", None):
|
|
92
|
+
if isinstance(r, bool):
|
|
93
|
+
return r
|
|
94
|
+
# return False if type is unexpected
|
|
95
|
+
return False
|
|
96
|
+
|
|
97
|
+
@property
|
|
98
|
+
def data(self) -> Any:
|
|
99
|
+
"""Raw data of the entity."""
|
|
100
|
+
return self._data[self.entity_type]
|
|
101
|
+
|
|
102
|
+
@property
|
|
103
|
+
def related_refs(self) -> list[str]:
|
|
104
|
+
"""Get the list of related entity references."""
|
|
105
|
+
if r := self._data.get("related"):
|
|
106
|
+
if isinstance(r, list):
|
|
107
|
+
return r
|
|
108
|
+
# return empty list if that is the case, or if the type is unexpected
|
|
109
|
+
return []
|
|
110
|
+
|
|
111
|
+
@property
|
|
112
|
+
def reverse_refs(self) -> list[str]:
|
|
113
|
+
"""Get the list of reverse-related entity references."""
|
|
114
|
+
return list(self._reverse_refs)
|
|
115
|
+
|
|
116
|
+
def _add_reverse_ref(self, ref: str) -> None:
|
|
117
|
+
self._reverse_refs.add(ref)
|
|
118
|
+
|
|
119
|
+
def to_porcelain(self) -> PorcelainEntityListOutputV1:
|
|
120
|
+
"""Convert this entity to porcelain output format."""
|
|
121
|
+
|
|
122
|
+
return {
|
|
123
|
+
"ty": PorcelainEntityType.EntityListOutputV1,
|
|
124
|
+
"entity_type": self.entity_type,
|
|
125
|
+
"entity_id": self.id,
|
|
126
|
+
"display_name": self.display_name,
|
|
127
|
+
"data": self._data,
|
|
128
|
+
"related_refs": self.related_refs,
|
|
129
|
+
"reverse_refs": self.reverse_refs,
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
def __str__(self) -> str:
|
|
133
|
+
return f"{self.entity_type}:{self.id}"
|
|
134
|
+
|
|
135
|
+
def __hash__(self) -> int:
|
|
136
|
+
return hash((self.entity_type, self.id))
|
|
137
|
+
|
|
138
|
+
def __eq__(self, other: object) -> bool:
|
|
139
|
+
if not isinstance(other, BaseEntity):
|
|
140
|
+
return NotImplemented
|
|
141
|
+
return self.entity_type == other.entity_type and self.id == other.id
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
class BaseEntityProvider(abc.ABC):
|
|
145
|
+
"""Abstract base class for entity data providers.
|
|
146
|
+
|
|
147
|
+
Entity providers are responsible for discovering and loading entity schemas and data.
|
|
148
|
+
"""
|
|
149
|
+
|
|
150
|
+
@abc.abstractmethod
|
|
151
|
+
def discover_schemas(self) -> dict[str, object]:
|
|
152
|
+
"""Discover available entity schemas.
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
A dictionary mapping entity types to their schema objects
|
|
156
|
+
"""
|
|
157
|
+
raise NotImplementedError
|
|
158
|
+
|
|
159
|
+
@abc.abstractmethod
|
|
160
|
+
def load_entities(
|
|
161
|
+
self,
|
|
162
|
+
entity_types: Sequence[str],
|
|
163
|
+
) -> Mapping[str, Mapping[str, Mapping[str, Any]]]:
|
|
164
|
+
"""Load entities of the given types.
|
|
165
|
+
|
|
166
|
+
Args:
|
|
167
|
+
entity_types: Sequence of entity types to load
|
|
168
|
+
|
|
169
|
+
Returns:
|
|
170
|
+
A nested dictionary mapping entity types to entity IDs to raw entity data
|
|
171
|
+
"""
|
|
172
|
+
raise NotImplementedError
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
class FSEntityProvider(BaseEntityProvider):
|
|
176
|
+
"""Entity provider that loads entity data from the filesystem.
|
|
177
|
+
|
|
178
|
+
This provider reads schemas from the ``_schemas`` directory and entity data from
|
|
179
|
+
subdirectories organized by entity type.
|
|
180
|
+
"""
|
|
181
|
+
|
|
182
|
+
def __init__(self, logger: RuyiLogger, entities_root: os.PathLike[Any]) -> None:
|
|
183
|
+
"""Initialize the filesystem-based entity provider.
|
|
184
|
+
|
|
185
|
+
Args:
|
|
186
|
+
logger: Logger instance to use.
|
|
187
|
+
entities_root: Path to the root directory containing entity data.
|
|
188
|
+
The ``_schemas`` directory should be a subdirectory of this path.
|
|
189
|
+
"""
|
|
190
|
+
|
|
191
|
+
self._logger = logger
|
|
192
|
+
self._entities_root = pathlib.Path(entities_root)
|
|
193
|
+
self._schemas_root = self._entities_root / "_schemas"
|
|
194
|
+
|
|
195
|
+
def discover_schemas(self) -> dict[str, object]:
|
|
196
|
+
"""Discover entity schemas from the filesystem.
|
|
197
|
+
|
|
198
|
+
Returns:
|
|
199
|
+
A dictionary mapping entity types to their schema objects
|
|
200
|
+
"""
|
|
201
|
+
schemas: dict[str, object] = {}
|
|
202
|
+
|
|
203
|
+
if not os.path.isdir(self._schemas_root):
|
|
204
|
+
self._logger.D(f"entity schemas directory not found: {self._schemas_root}")
|
|
205
|
+
return schemas
|
|
206
|
+
|
|
207
|
+
try:
|
|
208
|
+
schema_files = list(self._schemas_root.glob("*.jsonschema"))
|
|
209
|
+
except IOError as e:
|
|
210
|
+
self._logger.W(
|
|
211
|
+
f"failed to access entity schemas directory {self._schemas_root}: {e}"
|
|
212
|
+
)
|
|
213
|
+
return schemas
|
|
214
|
+
|
|
215
|
+
for p in schema_files:
|
|
216
|
+
# Extract entity type from schema filename (remove .jsonschema extension)
|
|
217
|
+
entity_type = p.name[:-11] # 11 is the length of ".jsonschema"
|
|
218
|
+
|
|
219
|
+
try:
|
|
220
|
+
with open(p, "r", encoding="utf-8") as f:
|
|
221
|
+
schema = json.load(f)
|
|
222
|
+
except (IOError, json.JSONDecodeError) as e:
|
|
223
|
+
self._logger.D(
|
|
224
|
+
f"failed to load schema for entity type '{entity_type}': {e}"
|
|
225
|
+
)
|
|
226
|
+
continue
|
|
227
|
+
|
|
228
|
+
# Cache the schema
|
|
229
|
+
schemas[entity_type] = schema
|
|
230
|
+
|
|
231
|
+
self._logger.D(f"discovered entity types from schemas: {list(schemas.keys())}")
|
|
232
|
+
return schemas
|
|
233
|
+
|
|
234
|
+
def load_entities(
|
|
235
|
+
self,
|
|
236
|
+
entity_types: Sequence[str],
|
|
237
|
+
) -> Mapping[str, Mapping[str, Mapping[str, Any]]]:
|
|
238
|
+
"""Load entity data from the filesystem.
|
|
239
|
+
|
|
240
|
+
Args:
|
|
241
|
+
entity_types: Set of entity types to load
|
|
242
|
+
|
|
243
|
+
Returns:
|
|
244
|
+
A nested dictionary mapping entity types to entity IDs to raw entity data
|
|
245
|
+
"""
|
|
246
|
+
entities: dict[str, dict[str, dict[str, Any]]] = {
|
|
247
|
+
entity_type: {} for entity_type in entity_types
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
for entity_type in entity_types:
|
|
251
|
+
type_dir = self._entities_root / entity_type
|
|
252
|
+
|
|
253
|
+
if not type_dir.exists():
|
|
254
|
+
self._logger.D(f"entity type directory does not exist: {type_dir}")
|
|
255
|
+
continue
|
|
256
|
+
|
|
257
|
+
for file_path in type_dir.glob("*.toml"):
|
|
258
|
+
try:
|
|
259
|
+
with open(file_path, "rb") as f:
|
|
260
|
+
data = tomllib.load(f)
|
|
261
|
+
except (IOError, tomllib.TOMLDecodeError) as e:
|
|
262
|
+
self._logger.W(f"failed to load entity from {file_path}: {e}")
|
|
263
|
+
continue
|
|
264
|
+
|
|
265
|
+
# Extract entity ID from filename (remove .toml extension)
|
|
266
|
+
entity_id = file_path.name[:-5]
|
|
267
|
+
|
|
268
|
+
# Create and store raw entity data
|
|
269
|
+
entities[entity_type][entity_id] = data
|
|
270
|
+
|
|
271
|
+
entity_counts = {t: len(e) for t, e in entities.items()}
|
|
272
|
+
self._logger.D(f"count of loaded entities from filesystem: {entity_counts}")
|
|
273
|
+
return entities
|
ruyi/ruyipkg/fetch.py
ADDED
|
@@ -0,0 +1,271 @@
|
|
|
1
|
+
import abc
|
|
2
|
+
import mmap
|
|
3
|
+
import os
|
|
4
|
+
import subprocess
|
|
5
|
+
from typing import Any, Final
|
|
6
|
+
|
|
7
|
+
import requests
|
|
8
|
+
from rich import progress
|
|
9
|
+
|
|
10
|
+
from ..log import RuyiLogger
|
|
11
|
+
|
|
12
|
+
ENV_OVERRIDE_FETCHER: Final = "RUYI_OVERRIDE_FETCHER"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _is_url_ftp(url: str) -> bool:
|
|
16
|
+
return url.lower().startswith("ftp://")
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class BaseFetcher:
|
|
20
|
+
def __init__(self, logger: RuyiLogger, urls: list[str], dest: str) -> None:
|
|
21
|
+
self._logger = logger
|
|
22
|
+
self.urls = urls
|
|
23
|
+
self.dest = dest
|
|
24
|
+
|
|
25
|
+
@classmethod
|
|
26
|
+
@abc.abstractmethod
|
|
27
|
+
def is_available(cls, logger: RuyiLogger) -> bool:
|
|
28
|
+
return False
|
|
29
|
+
|
|
30
|
+
@abc.abstractmethod
|
|
31
|
+
def fetch_one(self, url: str, dest: str, resume: bool) -> bool:
|
|
32
|
+
return False
|
|
33
|
+
|
|
34
|
+
def fetch_one_with_retry(
|
|
35
|
+
self,
|
|
36
|
+
url: str,
|
|
37
|
+
dest: str,
|
|
38
|
+
resume: bool,
|
|
39
|
+
retries: int,
|
|
40
|
+
) -> bool:
|
|
41
|
+
for t in range(retries):
|
|
42
|
+
if t > 0:
|
|
43
|
+
self._logger.I(f"retrying download ({t + 1} of {retries} times)")
|
|
44
|
+
if self.fetch_one(url, dest, resume):
|
|
45
|
+
return True
|
|
46
|
+
return False
|
|
47
|
+
|
|
48
|
+
def fetch(self, *, resume: bool = False, retries: int = 3) -> None:
|
|
49
|
+
for url in self.urls:
|
|
50
|
+
self._logger.I(f"downloading {url} to {self.dest}")
|
|
51
|
+
if self.fetch_one_with_retry(url, self.dest, resume, retries):
|
|
52
|
+
return
|
|
53
|
+
# all URLs have been tried and all have failed
|
|
54
|
+
raise RuntimeError(
|
|
55
|
+
f"failed to fetch '{self.dest}': all source URLs have failed"
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
@classmethod
|
|
59
|
+
def new(cls, logger: RuyiLogger, urls: list[str], dest: str) -> "BaseFetcher":
|
|
60
|
+
return get_usable_fetcher_cls(logger)(logger, urls, dest)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
KNOWN_FETCHERS: Final[dict[str, type[BaseFetcher]]] = {}
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def register_fetcher(name: str, f: type[BaseFetcher]) -> None:
|
|
67
|
+
# NOTE: can add priority support if needed
|
|
68
|
+
KNOWN_FETCHERS[name] = f
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
_fetcher_cache_populated: bool = False
|
|
72
|
+
_cached_usable_fetcher_class: type[BaseFetcher] | None = None
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def get_usable_fetcher_cls(logger: RuyiLogger) -> type[BaseFetcher]:
|
|
76
|
+
global _fetcher_cache_populated
|
|
77
|
+
global _cached_usable_fetcher_class
|
|
78
|
+
|
|
79
|
+
if _fetcher_cache_populated:
|
|
80
|
+
if _cached_usable_fetcher_class is None:
|
|
81
|
+
raise RuntimeError("no fetcher is available on the system")
|
|
82
|
+
return _cached_usable_fetcher_class
|
|
83
|
+
|
|
84
|
+
_fetcher_cache_populated = True
|
|
85
|
+
|
|
86
|
+
if override_name := os.environ.get(ENV_OVERRIDE_FETCHER):
|
|
87
|
+
logger.D(f"forcing fetcher '{override_name}'")
|
|
88
|
+
|
|
89
|
+
cls = KNOWN_FETCHERS.get(override_name)
|
|
90
|
+
if cls is None:
|
|
91
|
+
raise RuntimeError(f"unknown fetcher '{override_name}'")
|
|
92
|
+
if not cls.is_available(logger):
|
|
93
|
+
raise RuntimeError(
|
|
94
|
+
f"the requested fetcher '{override_name}' is unavailable on the system"
|
|
95
|
+
)
|
|
96
|
+
_cached_usable_fetcher_class = cls
|
|
97
|
+
return cls
|
|
98
|
+
|
|
99
|
+
for name, cls in KNOWN_FETCHERS.items():
|
|
100
|
+
if not cls.is_available(logger):
|
|
101
|
+
logger.D(f"fetcher '{name}' is unavailable")
|
|
102
|
+
continue
|
|
103
|
+
_cached_usable_fetcher_class = cls
|
|
104
|
+
return cls
|
|
105
|
+
|
|
106
|
+
raise RuntimeError("no fetcher is available on the system")
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
class CurlFetcher(BaseFetcher):
|
|
110
|
+
def __init__(self, logger: RuyiLogger, urls: list[str], dest: str) -> None:
|
|
111
|
+
super().__init__(logger, urls, dest)
|
|
112
|
+
|
|
113
|
+
@classmethod
|
|
114
|
+
def is_available(cls, logger: RuyiLogger) -> bool:
|
|
115
|
+
# try running "curl --version" and it should succeed
|
|
116
|
+
try:
|
|
117
|
+
retcode = subprocess.call(["curl", "--version"], stdout=subprocess.DEVNULL)
|
|
118
|
+
return retcode == 0
|
|
119
|
+
except Exception as e:
|
|
120
|
+
logger.D("exception occurred when trying to curl --version:", e)
|
|
121
|
+
return False
|
|
122
|
+
|
|
123
|
+
def fetch_one(self, url: str, dest: str, resume: bool) -> bool:
|
|
124
|
+
argv = ["curl"]
|
|
125
|
+
if resume:
|
|
126
|
+
argv.extend(("-C", "-"))
|
|
127
|
+
|
|
128
|
+
# A bug in curl 8.14.1 (and only that version) broke the recognition of
|
|
129
|
+
# the `--ftp-pasv`` flag, and unfortunately this version is currently
|
|
130
|
+
# provided by some popular distros so far.
|
|
131
|
+
#
|
|
132
|
+
# So, for the vast majority of non-FTP downloads to work even with
|
|
133
|
+
# this buggy version, we simply do not pass the flag if the URL is
|
|
134
|
+
# not an FTP one.
|
|
135
|
+
#
|
|
136
|
+
# See: https://github.com/curl/curl/issues/17545
|
|
137
|
+
# See: https://github.com/ruyisdk/ruyi/issues/316
|
|
138
|
+
if _is_url_ftp(url):
|
|
139
|
+
argv.append("--ftp-pasv")
|
|
140
|
+
|
|
141
|
+
argv.extend(
|
|
142
|
+
(
|
|
143
|
+
"-L",
|
|
144
|
+
"--connect-timeout",
|
|
145
|
+
"60",
|
|
146
|
+
"-o",
|
|
147
|
+
dest,
|
|
148
|
+
url,
|
|
149
|
+
)
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
retcode = subprocess.call(argv)
|
|
153
|
+
if retcode != 0:
|
|
154
|
+
self._logger.W(
|
|
155
|
+
f"failed to fetch distfile: command '{' '.join(argv)}' returned {retcode}"
|
|
156
|
+
)
|
|
157
|
+
return False
|
|
158
|
+
|
|
159
|
+
return True
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
register_fetcher("curl", CurlFetcher)
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
class WgetFetcher(BaseFetcher):
|
|
166
|
+
def __init__(self, logger: RuyiLogger, urls: list[str], dest: str) -> None:
|
|
167
|
+
super().__init__(logger, urls, dest)
|
|
168
|
+
|
|
169
|
+
@classmethod
|
|
170
|
+
def is_available(cls, logger: RuyiLogger) -> bool:
|
|
171
|
+
# try running "wget --version" and it should succeed
|
|
172
|
+
try:
|
|
173
|
+
retcode = subprocess.call(["wget", "--version"], stdout=subprocess.DEVNULL)
|
|
174
|
+
return retcode == 0
|
|
175
|
+
except Exception as e:
|
|
176
|
+
logger.D("exception occurred when trying to wget --version:", e)
|
|
177
|
+
return False
|
|
178
|
+
|
|
179
|
+
def fetch_one(self, url: str, dest: str, resume: bool) -> bool:
|
|
180
|
+
# These arguments are taken from Gentoo
|
|
181
|
+
argv = ["wget"]
|
|
182
|
+
if resume:
|
|
183
|
+
argv.append("-c")
|
|
184
|
+
# wget does not suffer from the same bug as curl, but to be safe, we
|
|
185
|
+
# also enable the passive FTP mode only if the URL is an FTP one.
|
|
186
|
+
if _is_url_ftp(url):
|
|
187
|
+
argv.append("--passive-ftp")
|
|
188
|
+
argv.extend(("-T", "60", "-O", dest, url))
|
|
189
|
+
|
|
190
|
+
retcode = subprocess.call(argv)
|
|
191
|
+
if retcode != 0:
|
|
192
|
+
self._logger.W(
|
|
193
|
+
f"failed to fetch distfile: command '{' '.join(argv)}' returned {retcode}"
|
|
194
|
+
)
|
|
195
|
+
return False
|
|
196
|
+
|
|
197
|
+
return True
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
register_fetcher("wget", WgetFetcher)
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
class PythonRequestsFetcher(BaseFetcher):
|
|
204
|
+
def __init__(self, logger: RuyiLogger, urls: list[str], dest: str) -> None:
|
|
205
|
+
super().__init__(logger, urls, dest)
|
|
206
|
+
|
|
207
|
+
self.chunk_size = 4 * mmap.PAGESIZE
|
|
208
|
+
# TODO: User-Agent
|
|
209
|
+
|
|
210
|
+
@classmethod
|
|
211
|
+
def is_available(cls, logger: RuyiLogger) -> bool:
|
|
212
|
+
return True
|
|
213
|
+
|
|
214
|
+
def fetch_one(self, url: str, dest: str, resume: bool) -> bool:
|
|
215
|
+
self._logger.D(f"downloading [cyan]{url}[/] to [cyan]{dest}")
|
|
216
|
+
|
|
217
|
+
open_mode = "ab" if resume else "wb"
|
|
218
|
+
start_from = 0
|
|
219
|
+
headers: dict[str, str] = {}
|
|
220
|
+
if resume:
|
|
221
|
+
filesize = os.stat(dest).st_size
|
|
222
|
+
self._logger.D(f"resuming from position {filesize}")
|
|
223
|
+
start_from = filesize
|
|
224
|
+
headers["Range"] = f"bytes={filesize}-"
|
|
225
|
+
|
|
226
|
+
r = requests.get(url, headers=headers, stream=True)
|
|
227
|
+
total_len: int | None = None
|
|
228
|
+
if total_len_str := r.headers.get("Content-Length"):
|
|
229
|
+
total_len = int(total_len_str) + start_from
|
|
230
|
+
|
|
231
|
+
try:
|
|
232
|
+
trc = progress.TimeRemainingColumn(compact=True, elapsed_when_finished=True) # type: ignore[call-arg,unused-ignore]
|
|
233
|
+
except TypeError:
|
|
234
|
+
# rich < 12.0.0 does not support the styles we're asking here, so
|
|
235
|
+
# just downgrade UX in favor of basic usability in that case.
|
|
236
|
+
#
|
|
237
|
+
# see https://github.com/Textualize/rich/pull/1992
|
|
238
|
+
trc = progress.TimeRemainingColumn()
|
|
239
|
+
|
|
240
|
+
columns = (
|
|
241
|
+
progress.SpinnerColumn(),
|
|
242
|
+
progress.BarColumn(),
|
|
243
|
+
progress.DownloadColumn(),
|
|
244
|
+
progress.TransferSpeedColumn(),
|
|
245
|
+
trc,
|
|
246
|
+
)
|
|
247
|
+
dest_filename = os.path.basename(dest)
|
|
248
|
+
with open(dest, open_mode) as f:
|
|
249
|
+
with progress.Progress(*columns, console=self._logger.log_console) as pg:
|
|
250
|
+
indeterminate = total_len is None
|
|
251
|
+
kwargs: dict[str, Any]
|
|
252
|
+
if indeterminate:
|
|
253
|
+
# be compatible with rich <= 12.3.0 where add_task()'s `total`
|
|
254
|
+
# parameter cannot be None
|
|
255
|
+
# see https://github.com/Textualize/rich/commit/052b15785876ad85
|
|
256
|
+
kwargs = {"start": False}
|
|
257
|
+
else:
|
|
258
|
+
kwargs = {"total": total_len}
|
|
259
|
+
|
|
260
|
+
task = pg.add_task(dest_filename, completed=start_from, **kwargs)
|
|
261
|
+
for chunk in r.iter_content(self.chunk_size):
|
|
262
|
+
f.write(chunk)
|
|
263
|
+
# according to the docs it's probably not okay to pulse the
|
|
264
|
+
# progress bar if the total number of steps is not yet known
|
|
265
|
+
if not indeterminate:
|
|
266
|
+
pg.advance(task, len(chunk))
|
|
267
|
+
|
|
268
|
+
return True
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
register_fetcher("requests", PythonRequestsFetcher)
|
ruyi/ruyipkg/host.py
ADDED
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import platform
|
|
2
|
+
import sys
|
|
3
|
+
from typing import NamedTuple
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class RuyiHost(NamedTuple):
|
|
7
|
+
os: str
|
|
8
|
+
arch: str
|
|
9
|
+
|
|
10
|
+
def __str__(self) -> str:
|
|
11
|
+
return f"{self.os}/{self.arch}"
|
|
12
|
+
|
|
13
|
+
def canonicalize(self) -> "RuyiHost":
|
|
14
|
+
return RuyiHost(
|
|
15
|
+
os=canonicalize_os_str(self.os),
|
|
16
|
+
arch=canonicalize_arch_str(self.arch),
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def canonicalize_host_str(host: str | RuyiHost) -> str:
|
|
21
|
+
if isinstance(host, str):
|
|
22
|
+
frags = host.split("/", 1)
|
|
23
|
+
os = "linux" if len(frags) == 1 else frags[0]
|
|
24
|
+
arch = frags[0] if len(frags) == 1 else frags[1]
|
|
25
|
+
return str(RuyiHost(os, arch).canonicalize())
|
|
26
|
+
|
|
27
|
+
return str(host.canonicalize())
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def canonicalize_arch_str(arch: str) -> str:
|
|
31
|
+
# Information sources:
|
|
32
|
+
#
|
|
33
|
+
# * https://bugs.python.org/issue7146#msg94134
|
|
34
|
+
# * https://superuser.com/questions/305901/possible-values-of-processor-architecture
|
|
35
|
+
match arch.lower():
|
|
36
|
+
case "amd64" | "em64t":
|
|
37
|
+
return "x86_64"
|
|
38
|
+
case "arm64":
|
|
39
|
+
return "aarch64"
|
|
40
|
+
case "x86":
|
|
41
|
+
return "i686"
|
|
42
|
+
case arch_lower:
|
|
43
|
+
return arch_lower
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def canonicalize_os_str(os: str) -> str:
|
|
47
|
+
match os:
|
|
48
|
+
case "win32":
|
|
49
|
+
return "windows"
|
|
50
|
+
case _:
|
|
51
|
+
return os
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def get_native_host() -> RuyiHost:
|
|
55
|
+
return RuyiHost(os=sys.platform, arch=platform.machine()).canonicalize()
|