ruyi 0.39.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ruyi/__init__.py +21 -0
- ruyi/__main__.py +98 -0
- ruyi/cli/__init__.py +5 -0
- ruyi/cli/builtin_commands.py +14 -0
- ruyi/cli/cmd.py +224 -0
- ruyi/cli/completer.py +50 -0
- ruyi/cli/completion.py +26 -0
- ruyi/cli/config_cli.py +153 -0
- ruyi/cli/main.py +111 -0
- ruyi/cli/self_cli.py +295 -0
- ruyi/cli/user_input.py +127 -0
- ruyi/cli/version_cli.py +45 -0
- ruyi/config/__init__.py +401 -0
- ruyi/config/editor.py +92 -0
- ruyi/config/errors.py +76 -0
- ruyi/config/news.py +39 -0
- ruyi/config/schema.py +197 -0
- ruyi/device/__init__.py +0 -0
- ruyi/device/provision.py +591 -0
- ruyi/device/provision_cli.py +40 -0
- ruyi/log/__init__.py +272 -0
- ruyi/mux/.gitignore +1 -0
- ruyi/mux/__init__.py +0 -0
- ruyi/mux/runtime.py +213 -0
- ruyi/mux/venv/__init__.py +12 -0
- ruyi/mux/venv/emulator_cfg.py +41 -0
- ruyi/mux/venv/maker.py +782 -0
- ruyi/mux/venv/venv_cli.py +92 -0
- ruyi/mux/venv_cfg.py +214 -0
- ruyi/pluginhost/__init__.py +0 -0
- ruyi/pluginhost/api.py +206 -0
- ruyi/pluginhost/ctx.py +222 -0
- ruyi/pluginhost/paths.py +135 -0
- ruyi/pluginhost/plugin_cli.py +37 -0
- ruyi/pluginhost/unsandboxed.py +246 -0
- ruyi/py.typed +0 -0
- ruyi/resource_bundle/__init__.py +20 -0
- ruyi/resource_bundle/__main__.py +55 -0
- ruyi/resource_bundle/data.py +26 -0
- ruyi/ruyipkg/__init__.py +0 -0
- ruyi/ruyipkg/admin_checksum.py +88 -0
- ruyi/ruyipkg/admin_cli.py +83 -0
- ruyi/ruyipkg/atom.py +184 -0
- ruyi/ruyipkg/augmented_pkg.py +212 -0
- ruyi/ruyipkg/canonical_dump.py +320 -0
- ruyi/ruyipkg/checksum.py +39 -0
- ruyi/ruyipkg/cli_completion.py +42 -0
- ruyi/ruyipkg/distfile.py +208 -0
- ruyi/ruyipkg/entity.py +387 -0
- ruyi/ruyipkg/entity_cli.py +123 -0
- ruyi/ruyipkg/entity_provider.py +273 -0
- ruyi/ruyipkg/fetch.py +271 -0
- ruyi/ruyipkg/host.py +55 -0
- ruyi/ruyipkg/install.py +554 -0
- ruyi/ruyipkg/install_cli.py +150 -0
- ruyi/ruyipkg/list.py +126 -0
- ruyi/ruyipkg/list_cli.py +79 -0
- ruyi/ruyipkg/list_filter.py +173 -0
- ruyi/ruyipkg/msg.py +99 -0
- ruyi/ruyipkg/news.py +123 -0
- ruyi/ruyipkg/news_cli.py +78 -0
- ruyi/ruyipkg/news_store.py +183 -0
- ruyi/ruyipkg/pkg_manifest.py +657 -0
- ruyi/ruyipkg/profile.py +208 -0
- ruyi/ruyipkg/profile_cli.py +33 -0
- ruyi/ruyipkg/protocols.py +55 -0
- ruyi/ruyipkg/repo.py +763 -0
- ruyi/ruyipkg/state.py +345 -0
- ruyi/ruyipkg/unpack.py +369 -0
- ruyi/ruyipkg/unpack_method.py +91 -0
- ruyi/ruyipkg/update_cli.py +54 -0
- ruyi/telemetry/__init__.py +0 -0
- ruyi/telemetry/aggregate.py +72 -0
- ruyi/telemetry/event.py +41 -0
- ruyi/telemetry/node_info.py +192 -0
- ruyi/telemetry/provider.py +411 -0
- ruyi/telemetry/scope.py +43 -0
- ruyi/telemetry/store.py +238 -0
- ruyi/telemetry/telemetry_cli.py +127 -0
- ruyi/utils/__init__.py +0 -0
- ruyi/utils/ar.py +74 -0
- ruyi/utils/ci.py +63 -0
- ruyi/utils/frontmatter.py +38 -0
- ruyi/utils/git.py +169 -0
- ruyi/utils/global_mode.py +204 -0
- ruyi/utils/l10n.py +83 -0
- ruyi/utils/markdown.py +73 -0
- ruyi/utils/nuitka.py +33 -0
- ruyi/utils/porcelain.py +51 -0
- ruyi/utils/prereqs.py +77 -0
- ruyi/utils/ssl_patch.py +170 -0
- ruyi/utils/templating.py +34 -0
- ruyi/utils/toml.py +115 -0
- ruyi/utils/url.py +7 -0
- ruyi/utils/xdg_basedir.py +80 -0
- ruyi/version.py +67 -0
- ruyi-0.39.0.dist-info/LICENSE-Apache.txt +201 -0
- ruyi-0.39.0.dist-info/METADATA +403 -0
- ruyi-0.39.0.dist-info/RECORD +101 -0
- ruyi-0.39.0.dist-info/WHEEL +4 -0
- ruyi-0.39.0.dist-info/entry_points.txt +3 -0
ruyi/ruyipkg/repo.py
ADDED
|
@@ -0,0 +1,763 @@
|
|
|
1
|
+
import glob
|
|
2
|
+
import itertools
|
|
3
|
+
import os.path
|
|
4
|
+
import pathlib
|
|
5
|
+
import sys
|
|
6
|
+
from typing import (
|
|
7
|
+
Any,
|
|
8
|
+
Final,
|
|
9
|
+
Iterable,
|
|
10
|
+
Mapping,
|
|
11
|
+
Sequence,
|
|
12
|
+
TypedDict,
|
|
13
|
+
TypeGuard,
|
|
14
|
+
TYPE_CHECKING,
|
|
15
|
+
cast,
|
|
16
|
+
)
|
|
17
|
+
from urllib import parse
|
|
18
|
+
|
|
19
|
+
from pygit2 import clone_repository
|
|
20
|
+
from pygit2.repository import Repository
|
|
21
|
+
|
|
22
|
+
from ..log import RuyiLogger
|
|
23
|
+
from ..pluginhost.ctx import PluginHostContext
|
|
24
|
+
from ..telemetry.scope import TelemetryScopeConfig
|
|
25
|
+
from ..utils.git import RemoteGitProgressIndicator, pull_ff_or_die
|
|
26
|
+
from ..utils.url import urljoin_for_sure
|
|
27
|
+
from .entity import EntityStore
|
|
28
|
+
from .entity_provider import BaseEntityProvider, FSEntityProvider
|
|
29
|
+
from .msg import RepoMessageStore
|
|
30
|
+
from .news_store import NewsItemStore
|
|
31
|
+
from .pkg_manifest import (
|
|
32
|
+
BoundPackageManifest,
|
|
33
|
+
DistfileDecl,
|
|
34
|
+
InputPackageManifestType,
|
|
35
|
+
is_prerelease,
|
|
36
|
+
)
|
|
37
|
+
from .profile import PluginProfileProvider, ProfileProxy
|
|
38
|
+
from .protocols import ProvidesPackageManifests
|
|
39
|
+
|
|
40
|
+
if sys.version_info >= (3, 11):
|
|
41
|
+
import tomllib
|
|
42
|
+
else:
|
|
43
|
+
import tomli as tomllib
|
|
44
|
+
|
|
45
|
+
if TYPE_CHECKING:
|
|
46
|
+
from typing_extensions import NotRequired
|
|
47
|
+
|
|
48
|
+
# for avoiding circular import
|
|
49
|
+
from ..config import GlobalConfig
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
class RepoConfigV0Type(TypedDict):
|
|
53
|
+
dist: str
|
|
54
|
+
doc_uri: "NotRequired[str]"
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def validate_repo_config_v0(x: object) -> TypeGuard[RepoConfigV0Type]:
|
|
58
|
+
if not isinstance(x, dict):
|
|
59
|
+
return False
|
|
60
|
+
if "ruyi-repo" in x:
|
|
61
|
+
return False
|
|
62
|
+
if "dist" not in x or not isinstance(x["dist"], str):
|
|
63
|
+
return False
|
|
64
|
+
if "doc_uri" in x and not isinstance(x["doc_uri"], str):
|
|
65
|
+
return False
|
|
66
|
+
return True
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class RepoConfigV1Repo(TypedDict):
|
|
70
|
+
doc_uri: "NotRequired[str]"
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class RepoConfigV1Mirror(TypedDict):
|
|
74
|
+
id: str
|
|
75
|
+
urls: list[str]
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class RepoConfigV1Telemetry(TypedDict):
|
|
79
|
+
id: str
|
|
80
|
+
scope: TelemetryScopeConfig
|
|
81
|
+
url: str
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
RepoConfigV1Type = TypedDict(
|
|
85
|
+
"RepoConfigV1Type",
|
|
86
|
+
{
|
|
87
|
+
"ruyi-repo": str,
|
|
88
|
+
"repo": "NotRequired[RepoConfigV1Repo]",
|
|
89
|
+
"mirrors": list[RepoConfigV1Mirror],
|
|
90
|
+
"telemetry": "NotRequired[list[RepoConfigV1Telemetry]]",
|
|
91
|
+
},
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def validate_repo_config_v1(x: object) -> TypeGuard[RepoConfigV1Type]:
|
|
96
|
+
if not isinstance(x, dict):
|
|
97
|
+
return False
|
|
98
|
+
x = cast(dict[str, object], x)
|
|
99
|
+
if x.get("ruyi-repo", "") != "v1":
|
|
100
|
+
return False
|
|
101
|
+
return True
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
MIRROR_ID_RUYI_DIST: Final = "ruyi-dist"
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class RepoConfig:
|
|
108
|
+
def __init__(
|
|
109
|
+
self,
|
|
110
|
+
mirrors: list[RepoConfigV1Mirror],
|
|
111
|
+
repo: RepoConfigV1Repo | None,
|
|
112
|
+
telemetry_apis: list[RepoConfigV1Telemetry] | None,
|
|
113
|
+
) -> None:
|
|
114
|
+
self.mirrors = {x["id"]: x["urls"] for x in mirrors}
|
|
115
|
+
self.repo = repo
|
|
116
|
+
|
|
117
|
+
self.telemetry_apis: dict[str, RepoConfigV1Telemetry]
|
|
118
|
+
if telemetry_apis is not None:
|
|
119
|
+
self.telemetry_apis = {x["id"]: x for x in telemetry_apis}
|
|
120
|
+
else:
|
|
121
|
+
self.telemetry_apis = {}
|
|
122
|
+
|
|
123
|
+
@classmethod
|
|
124
|
+
def from_object(cls, obj: object) -> "RepoConfig":
|
|
125
|
+
if not isinstance(obj, dict):
|
|
126
|
+
raise ValueError("repo config must be a dict")
|
|
127
|
+
if "ruyi-repo" in obj:
|
|
128
|
+
return cls.from_v1(cast(object, obj))
|
|
129
|
+
return cls.from_v0(cast(object, obj))
|
|
130
|
+
|
|
131
|
+
@classmethod
|
|
132
|
+
def from_v0(cls, obj: object) -> "RepoConfig":
|
|
133
|
+
if not validate_repo_config_v0(obj):
|
|
134
|
+
# TODO: more detail in the error message
|
|
135
|
+
raise RuntimeError("malformed v0 repo config")
|
|
136
|
+
|
|
137
|
+
v1_mirrors: list[RepoConfigV1Mirror] = [
|
|
138
|
+
{
|
|
139
|
+
"id": MIRROR_ID_RUYI_DIST,
|
|
140
|
+
"urls": [urljoin_for_sure(obj["dist"], "dist/")],
|
|
141
|
+
},
|
|
142
|
+
]
|
|
143
|
+
|
|
144
|
+
v1_repo: RepoConfigV1Repo | None = None
|
|
145
|
+
if "doc_uri" in obj:
|
|
146
|
+
v1_repo = {"doc_uri": obj["doc_uri"]}
|
|
147
|
+
|
|
148
|
+
return cls(v1_mirrors, v1_repo, None)
|
|
149
|
+
|
|
150
|
+
@classmethod
|
|
151
|
+
def from_v1(cls, obj: object) -> "RepoConfig":
|
|
152
|
+
if not validate_repo_config_v1(obj):
|
|
153
|
+
# TODO: more detail in the error message
|
|
154
|
+
raise RuntimeError("malformed v1 repo config")
|
|
155
|
+
return cls(obj["mirrors"], obj.get("repo"), obj.get("telemetry"))
|
|
156
|
+
|
|
157
|
+
def get_dist_urls_for_file(self, logger: RuyiLogger, url: str) -> list[str]:
|
|
158
|
+
u = parse.urlparse(url)
|
|
159
|
+
path = u.path.lstrip("/")
|
|
160
|
+
match u.scheme:
|
|
161
|
+
case "":
|
|
162
|
+
return self.get_mirror_urls_for_file(MIRROR_ID_RUYI_DIST, path)
|
|
163
|
+
case "mirror":
|
|
164
|
+
return self.get_mirror_urls_for_file(u.netloc, path)
|
|
165
|
+
case "http" | "https":
|
|
166
|
+
# pass-through known protocols
|
|
167
|
+
return [url]
|
|
168
|
+
case _:
|
|
169
|
+
# deny others
|
|
170
|
+
logger.W(f"unrecognized dist URL scheme: {u.scheme}")
|
|
171
|
+
return []
|
|
172
|
+
|
|
173
|
+
def get_mirror_urls_for_file(self, mirror_id: str, path: str) -> list[str]:
|
|
174
|
+
mirror_urls = self.mirrors.get(mirror_id, [])
|
|
175
|
+
return [parse.urljoin(base, path) for base in mirror_urls]
|
|
176
|
+
|
|
177
|
+
def get_telemetry_api_url(self, scope: TelemetryScopeConfig) -> str | None:
|
|
178
|
+
for api_decl in self.telemetry_apis.values():
|
|
179
|
+
if api_decl.get("scope", "") == scope:
|
|
180
|
+
return api_decl.get("url", None)
|
|
181
|
+
return None
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class ArchProfileStore:
|
|
185
|
+
def __init__(self, phctx: PluginHostContext[Any, Any], arch: str) -> None:
|
|
186
|
+
self._arch = arch
|
|
187
|
+
plugin_id = f"ruyi-profile-{arch}"
|
|
188
|
+
self._provider = PluginProfileProvider(phctx, plugin_id)
|
|
189
|
+
self._init_cache()
|
|
190
|
+
|
|
191
|
+
def _init_cache(self) -> None:
|
|
192
|
+
self._profiles_cache: dict[str, ProfileProxy] = {}
|
|
193
|
+
for profile_id in self._provider.list_all_profile_ids():
|
|
194
|
+
self._profiles_cache[profile_id] = ProfileProxy(
|
|
195
|
+
self._provider, self._arch, profile_id
|
|
196
|
+
)
|
|
197
|
+
|
|
198
|
+
def __contains__(self, profile_id: str) -> bool:
|
|
199
|
+
return profile_id in self._profiles_cache
|
|
200
|
+
|
|
201
|
+
def __getitem__(self, profile_id: str) -> ProfileProxy:
|
|
202
|
+
try:
|
|
203
|
+
return self._profiles_cache[profile_id]
|
|
204
|
+
except KeyError as e:
|
|
205
|
+
raise KeyError(
|
|
206
|
+
f"profile '{profile_id}' is not supported by this arch"
|
|
207
|
+
) from e
|
|
208
|
+
|
|
209
|
+
def get(self, profile_id: str) -> ProfileProxy | None:
|
|
210
|
+
return self._profiles_cache.get(profile_id)
|
|
211
|
+
|
|
212
|
+
def iter_profiles(self) -> Iterable[ProfileProxy]:
|
|
213
|
+
return self._profiles_cache.values()
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
class MetadataRepo(ProvidesPackageManifests):
|
|
217
|
+
def __init__(self, gc: "GlobalConfig") -> None:
|
|
218
|
+
self._gc = gc
|
|
219
|
+
self.root = gc.get_repo_dir()
|
|
220
|
+
self.remote = gc.get_repo_url()
|
|
221
|
+
self.branch = gc.get_repo_branch()
|
|
222
|
+
self.repo: Repository | None = None
|
|
223
|
+
|
|
224
|
+
self._cfg: RepoConfig | None = None
|
|
225
|
+
self._cfg_initialized = False
|
|
226
|
+
self._messages: RepoMessageStore | None = None
|
|
227
|
+
self._pkgs: dict[str, dict[str, BoundPackageManifest]] = {}
|
|
228
|
+
self._categories: dict[str, dict[str, dict[str, BoundPackageManifest]]] = {}
|
|
229
|
+
self._slug_cache: dict[str, BoundPackageManifest] = {}
|
|
230
|
+
self._supported_arches: set[str] | None = None
|
|
231
|
+
self._arch_profile_stores: dict[str, ArchProfileStore] = {}
|
|
232
|
+
self._news_cache: NewsItemStore | None = None
|
|
233
|
+
self._entity_store: EntityStore = EntityStore(
|
|
234
|
+
gc.logger,
|
|
235
|
+
FSEntityProvider(gc.logger, pathlib.Path(self.root) / "entities"),
|
|
236
|
+
MetadataRepoEntityProvider(self),
|
|
237
|
+
)
|
|
238
|
+
self._plugin_host_ctx = PluginHostContext.new(gc.logger, self.plugin_root)
|
|
239
|
+
self._plugin_fn_evaluator = self._plugin_host_ctx.make_evaluator()
|
|
240
|
+
|
|
241
|
+
@property
|
|
242
|
+
def repo_id(self) -> str:
|
|
243
|
+
# TODO: proper multi-repo support
|
|
244
|
+
return "ruyisdk"
|
|
245
|
+
|
|
246
|
+
@property
|
|
247
|
+
def logger(self) -> RuyiLogger:
|
|
248
|
+
return self._gc.logger
|
|
249
|
+
|
|
250
|
+
@property
|
|
251
|
+
def plugin_root(self) -> pathlib.Path:
|
|
252
|
+
return pathlib.Path(self.root) / "plugins"
|
|
253
|
+
|
|
254
|
+
def iter_plugin_ids(self) -> Iterable[str]:
|
|
255
|
+
try:
|
|
256
|
+
for p in self.plugin_root.iterdir():
|
|
257
|
+
if p.is_dir():
|
|
258
|
+
yield p.name
|
|
259
|
+
except (FileNotFoundError, NotADirectoryError):
|
|
260
|
+
pass
|
|
261
|
+
|
|
262
|
+
def get_from_plugin(self, plugin_id: str, key: str) -> object | None:
|
|
263
|
+
return self._plugin_host_ctx.get_from_plugin(plugin_id, key)
|
|
264
|
+
|
|
265
|
+
def eval_plugin_fn(
|
|
266
|
+
self,
|
|
267
|
+
function: object,
|
|
268
|
+
*args: object,
|
|
269
|
+
**kwargs: object,
|
|
270
|
+
) -> object:
|
|
271
|
+
"""Evaluates a function from a plugin.
|
|
272
|
+
|
|
273
|
+
NOTE: There is security implication for the unsandboxed plugin backend,
|
|
274
|
+
which provides **NO GUARDS** against arbitrary inputs for the ``function``
|
|
275
|
+
argument because there is **no sandbox**."""
|
|
276
|
+
|
|
277
|
+
return self._plugin_fn_evaluator.eval_function(function, *args, **kwargs)
|
|
278
|
+
|
|
279
|
+
def ensure_git_repo(self) -> Repository:
|
|
280
|
+
if self.repo is not None:
|
|
281
|
+
return self.repo
|
|
282
|
+
|
|
283
|
+
if os.path.exists(self.root):
|
|
284
|
+
self.repo = Repository(self.root)
|
|
285
|
+
return self.repo
|
|
286
|
+
|
|
287
|
+
self.logger.D(f"{self.root} does not exist, cloning from {self.remote}")
|
|
288
|
+
|
|
289
|
+
with RemoteGitProgressIndicator() as pr:
|
|
290
|
+
repo = clone_repository(
|
|
291
|
+
self.remote,
|
|
292
|
+
self.root,
|
|
293
|
+
checkout_branch=self.branch,
|
|
294
|
+
callbacks=pr,
|
|
295
|
+
)
|
|
296
|
+
# pygit2's type info is incomplete as of 1.16.0, and pyright
|
|
297
|
+
# will not look at the typeshed stub for the appropriate signature
|
|
298
|
+
# because pygit2 has the py.typed marker. Workaround the error for
|
|
299
|
+
# now by explicitly casting to the right runtime type.
|
|
300
|
+
self.repo = cast(Repository, repo) # type: ignore[redundant-cast]
|
|
301
|
+
|
|
302
|
+
# reinit config after cloning
|
|
303
|
+
self._cfg_initialized = False
|
|
304
|
+
self._read_config(False)
|
|
305
|
+
|
|
306
|
+
return self.repo
|
|
307
|
+
|
|
308
|
+
def sync(self) -> None:
|
|
309
|
+
repo = self.ensure_git_repo()
|
|
310
|
+
|
|
311
|
+
# only manage the repo settings on the user's behalf if the user
|
|
312
|
+
# has not overridden the repo directory themselves
|
|
313
|
+
allow_auto_management = self._gc.override_repo_dir is None
|
|
314
|
+
|
|
315
|
+
return pull_ff_or_die(
|
|
316
|
+
self.logger,
|
|
317
|
+
repo,
|
|
318
|
+
"origin",
|
|
319
|
+
self.remote,
|
|
320
|
+
self.branch,
|
|
321
|
+
allow_auto_management=allow_auto_management,
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
@property
|
|
325
|
+
def global_config(self) -> "GlobalConfig":
|
|
326
|
+
return self._gc
|
|
327
|
+
|
|
328
|
+
@property
|
|
329
|
+
def config(self) -> RepoConfig:
|
|
330
|
+
x = self._read_config(True)
|
|
331
|
+
assert x is not None
|
|
332
|
+
return x
|
|
333
|
+
|
|
334
|
+
@property
|
|
335
|
+
def maybe_config(self) -> RepoConfig | None:
|
|
336
|
+
"""Like ``config``, but does not pull down the repo in case the repo is
|
|
337
|
+
not locally present at invocation time."""
|
|
338
|
+
return self._read_config(False)
|
|
339
|
+
|
|
340
|
+
def _read_config(self, ensure_if_not_existing: bool) -> RepoConfig | None:
|
|
341
|
+
if self._cfg_initialized:
|
|
342
|
+
return self._cfg
|
|
343
|
+
|
|
344
|
+
if ensure_if_not_existing:
|
|
345
|
+
self.ensure_git_repo()
|
|
346
|
+
|
|
347
|
+
# we can read the config file directly because we're operating from a
|
|
348
|
+
# working tree (as opposed to a bare repo)
|
|
349
|
+
#
|
|
350
|
+
# this is a fake loop (that "loops" only once)
|
|
351
|
+
# here it's only for being able to use break's
|
|
352
|
+
while True:
|
|
353
|
+
try:
|
|
354
|
+
with open(os.path.join(self.root, "config.toml"), "rb") as fp:
|
|
355
|
+
obj = tomllib.load(fp)
|
|
356
|
+
break
|
|
357
|
+
except FileNotFoundError:
|
|
358
|
+
pass
|
|
359
|
+
|
|
360
|
+
self._cfg_initialized = True
|
|
361
|
+
return None
|
|
362
|
+
|
|
363
|
+
self._cfg_initialized = True
|
|
364
|
+
self._cfg = RepoConfig.from_object(obj)
|
|
365
|
+
return self._cfg
|
|
366
|
+
|
|
367
|
+
@property
|
|
368
|
+
def messages(self) -> RepoMessageStore:
|
|
369
|
+
if self._messages is not None:
|
|
370
|
+
return self._messages
|
|
371
|
+
|
|
372
|
+
self.ensure_git_repo()
|
|
373
|
+
|
|
374
|
+
obj: dict[str, object] = {}
|
|
375
|
+
try:
|
|
376
|
+
with open(os.path.join(self.root, "messages.toml"), "rb") as fp:
|
|
377
|
+
obj = tomllib.load(fp)
|
|
378
|
+
except FileNotFoundError:
|
|
379
|
+
pass
|
|
380
|
+
|
|
381
|
+
self._messages = RepoMessageStore.from_object(obj)
|
|
382
|
+
return self._messages
|
|
383
|
+
|
|
384
|
+
def iter_pkg_manifests(
|
|
385
|
+
self,
|
|
386
|
+
ensure_repo: bool = True,
|
|
387
|
+
) -> Iterable[BoundPackageManifest]:
|
|
388
|
+
if ensure_repo:
|
|
389
|
+
self.ensure_git_repo()
|
|
390
|
+
|
|
391
|
+
manifests_dir = os.path.join(self.root, "manifests")
|
|
392
|
+
try:
|
|
393
|
+
for f in os.scandir(manifests_dir):
|
|
394
|
+
if not f.is_dir():
|
|
395
|
+
continue
|
|
396
|
+
yield from self._iter_pkg_manifests_from_category(f.path)
|
|
397
|
+
except FileNotFoundError:
|
|
398
|
+
return
|
|
399
|
+
|
|
400
|
+
def _iter_pkg_manifests_from_category(
|
|
401
|
+
self,
|
|
402
|
+
category_dir: str,
|
|
403
|
+
) -> Iterable[BoundPackageManifest]:
|
|
404
|
+
self.ensure_git_repo()
|
|
405
|
+
|
|
406
|
+
category = os.path.basename(category_dir)
|
|
407
|
+
|
|
408
|
+
# all valid semver strings start with a number
|
|
409
|
+
for f in glob.iglob("*/[0-9]*.toml", root_dir=category_dir):
|
|
410
|
+
pkg_name, pkg_ver = os.path.split(f)
|
|
411
|
+
pkg_ver = pkg_ver[:-5] # strip the ".toml" suffix
|
|
412
|
+
with open(os.path.join(category_dir, f), "rb") as fp:
|
|
413
|
+
yield BoundPackageManifest(
|
|
414
|
+
category,
|
|
415
|
+
pkg_name,
|
|
416
|
+
pkg_ver,
|
|
417
|
+
cast(InputPackageManifestType, tomllib.load(fp)),
|
|
418
|
+
self,
|
|
419
|
+
)
|
|
420
|
+
|
|
421
|
+
def get_supported_arches(self) -> list[str]:
|
|
422
|
+
if self._supported_arches is not None:
|
|
423
|
+
return list(self._supported_arches)
|
|
424
|
+
|
|
425
|
+
res: set[str] = set()
|
|
426
|
+
for plugin_id in self.iter_plugin_ids():
|
|
427
|
+
if plugin_id.startswith("ruyi-profile-"):
|
|
428
|
+
res.add(plugin_id[13:])
|
|
429
|
+
self._supported_arches = res
|
|
430
|
+
return list(res)
|
|
431
|
+
|
|
432
|
+
def get_profile(self, name: str) -> ProfileProxy | None:
|
|
433
|
+
# TODO: deprecate this after making sure every call site has gained
|
|
434
|
+
# arch-awareness
|
|
435
|
+
for arch in self.get_supported_arches():
|
|
436
|
+
store = self.ensure_profile_store_for_arch(arch)
|
|
437
|
+
if p := store.get(name):
|
|
438
|
+
return p
|
|
439
|
+
return None
|
|
440
|
+
|
|
441
|
+
def get_profile_for_arch(self, arch: str, name: str) -> ProfileProxy | None:
|
|
442
|
+
store = self.ensure_profile_store_for_arch(arch)
|
|
443
|
+
return store.get(name)
|
|
444
|
+
|
|
445
|
+
def iter_profiles_for_arch(self, arch: str) -> Iterable[ProfileProxy]:
|
|
446
|
+
store = self.ensure_profile_store_for_arch(arch)
|
|
447
|
+
return store.iter_profiles()
|
|
448
|
+
|
|
449
|
+
def ensure_profile_store_for_arch(self, arch: str) -> ArchProfileStore:
|
|
450
|
+
if arch in self._arch_profile_stores:
|
|
451
|
+
return self._arch_profile_stores[arch]
|
|
452
|
+
|
|
453
|
+
self.ensure_git_repo()
|
|
454
|
+
store = ArchProfileStore(self._plugin_host_ctx, arch)
|
|
455
|
+
self._arch_profile_stores[arch] = store
|
|
456
|
+
return store
|
|
457
|
+
|
|
458
|
+
def ensure_pkg_cache(
|
|
459
|
+
self,
|
|
460
|
+
ensure_repo: bool = True,
|
|
461
|
+
) -> None:
|
|
462
|
+
if self._pkgs:
|
|
463
|
+
return
|
|
464
|
+
|
|
465
|
+
if ensure_repo:
|
|
466
|
+
self.ensure_git_repo()
|
|
467
|
+
|
|
468
|
+
cache_by_name: dict[str, dict[str, BoundPackageManifest]] = {}
|
|
469
|
+
cache_by_category: dict[str, dict[str, dict[str, BoundPackageManifest]]] = {}
|
|
470
|
+
slug_cache: dict[str, BoundPackageManifest] = {}
|
|
471
|
+
for pm in self.iter_pkg_manifests(ensure_repo=ensure_repo):
|
|
472
|
+
if pm.name not in cache_by_name:
|
|
473
|
+
cache_by_name[pm.name] = {}
|
|
474
|
+
cache_by_name[pm.name][pm.ver] = pm
|
|
475
|
+
|
|
476
|
+
if pm.category not in cache_by_category:
|
|
477
|
+
cache_by_category[pm.category] = {pm.name: {}}
|
|
478
|
+
if pm.name not in cache_by_category[pm.category]:
|
|
479
|
+
cache_by_category[pm.category][pm.name] = {}
|
|
480
|
+
cache_by_category[pm.category][pm.name][pm.ver] = pm
|
|
481
|
+
|
|
482
|
+
if pm.slug:
|
|
483
|
+
slug_cache[pm.slug] = pm
|
|
484
|
+
|
|
485
|
+
self._pkgs = cache_by_name
|
|
486
|
+
self._categories = cache_by_category
|
|
487
|
+
self._slug_cache = slug_cache
|
|
488
|
+
|
|
489
|
+
def iter_pkgs(
|
|
490
|
+
self,
|
|
491
|
+
ensure_repo: bool = True,
|
|
492
|
+
) -> Iterable[tuple[str, str, dict[str, BoundPackageManifest]]]:
|
|
493
|
+
if not self._pkgs:
|
|
494
|
+
self.ensure_pkg_cache(ensure_repo=ensure_repo)
|
|
495
|
+
|
|
496
|
+
for cat, cat_pkgs in self._categories.items():
|
|
497
|
+
for pkg_name, pkg_vers in cat_pkgs.items():
|
|
498
|
+
yield (cat, pkg_name, pkg_vers)
|
|
499
|
+
|
|
500
|
+
def get_pkg_by_slug(
|
|
501
|
+
self,
|
|
502
|
+
slug: str,
|
|
503
|
+
ensure_repo: bool = True,
|
|
504
|
+
) -> BoundPackageManifest | None:
|
|
505
|
+
if not self._pkgs:
|
|
506
|
+
self.ensure_pkg_cache(ensure_repo=ensure_repo)
|
|
507
|
+
|
|
508
|
+
return self._slug_cache.get(slug)
|
|
509
|
+
|
|
510
|
+
def iter_pkg_vers(
|
|
511
|
+
self,
|
|
512
|
+
name: str,
|
|
513
|
+
category: str | None = None,
|
|
514
|
+
ensure_repo: bool = True,
|
|
515
|
+
) -> Iterable[BoundPackageManifest]:
|
|
516
|
+
if not self._pkgs:
|
|
517
|
+
self.ensure_pkg_cache(ensure_repo=ensure_repo)
|
|
518
|
+
|
|
519
|
+
if category is not None:
|
|
520
|
+
return self._categories[category][name].values()
|
|
521
|
+
return self._pkgs[name].values()
|
|
522
|
+
|
|
523
|
+
def get_pkg(
|
|
524
|
+
self,
|
|
525
|
+
name: str,
|
|
526
|
+
category: str,
|
|
527
|
+
ver: str,
|
|
528
|
+
*,
|
|
529
|
+
ensure_repo: bool = True,
|
|
530
|
+
) -> BoundPackageManifest | None:
|
|
531
|
+
if not self._pkgs:
|
|
532
|
+
self.ensure_pkg_cache(ensure_repo=ensure_repo)
|
|
533
|
+
|
|
534
|
+
try:
|
|
535
|
+
return self._categories[category][name][ver]
|
|
536
|
+
except KeyError:
|
|
537
|
+
return None
|
|
538
|
+
|
|
539
|
+
def get_pkg_latest_ver(
|
|
540
|
+
self,
|
|
541
|
+
name: str,
|
|
542
|
+
category: str | None = None,
|
|
543
|
+
include_prerelease_vers: bool = False,
|
|
544
|
+
ensure_repo: bool = True,
|
|
545
|
+
) -> BoundPackageManifest:
|
|
546
|
+
if not self._pkgs:
|
|
547
|
+
self.ensure_pkg_cache(ensure_repo=ensure_repo)
|
|
548
|
+
|
|
549
|
+
if category is not None:
|
|
550
|
+
pkgset = self._categories[category]
|
|
551
|
+
else:
|
|
552
|
+
pkgset = self._pkgs
|
|
553
|
+
|
|
554
|
+
all_semvers = [pm.semver for pm in pkgset[name].values()]
|
|
555
|
+
if not include_prerelease_vers:
|
|
556
|
+
all_semvers = [sv for sv in all_semvers if not is_prerelease(sv)]
|
|
557
|
+
latest_ver = max(all_semvers)
|
|
558
|
+
return pkgset[name][str(latest_ver)]
|
|
559
|
+
|
|
560
|
+
def get_distfile_urls(self, decl: DistfileDecl) -> list[str]:
|
|
561
|
+
urls_to_expand: list[str] = []
|
|
562
|
+
if not decl.is_restricted("mirror"):
|
|
563
|
+
urls_to_expand.append(f"mirror://{MIRROR_ID_RUYI_DIST}/{decl.name}")
|
|
564
|
+
|
|
565
|
+
if decl.urls:
|
|
566
|
+
urls_to_expand.extend(decl.urls)
|
|
567
|
+
|
|
568
|
+
cfg = self.config
|
|
569
|
+
return list(
|
|
570
|
+
itertools.chain(
|
|
571
|
+
*(
|
|
572
|
+
cfg.get_dist_urls_for_file(self.logger, url)
|
|
573
|
+
for url in urls_to_expand
|
|
574
|
+
)
|
|
575
|
+
)
|
|
576
|
+
)
|
|
577
|
+
|
|
578
|
+
def ensure_news_cache(
|
|
579
|
+
self,
|
|
580
|
+
ensure_repo: bool = True,
|
|
581
|
+
) -> None:
|
|
582
|
+
if self._news_cache is not None:
|
|
583
|
+
return
|
|
584
|
+
|
|
585
|
+
if ensure_repo:
|
|
586
|
+
self.ensure_git_repo()
|
|
587
|
+
news_dir = os.path.join(self.root, "news")
|
|
588
|
+
|
|
589
|
+
rs_store = self._gc.news_read_status
|
|
590
|
+
rs_store.load()
|
|
591
|
+
|
|
592
|
+
cache = NewsItemStore(rs_store)
|
|
593
|
+
try:
|
|
594
|
+
for f in glob.iglob("*.md", root_dir=news_dir):
|
|
595
|
+
with open(os.path.join(news_dir, f), "r", encoding="utf-8") as fp:
|
|
596
|
+
try:
|
|
597
|
+
contents = fp.read()
|
|
598
|
+
except UnicodeDecodeError:
|
|
599
|
+
self.logger.W(
|
|
600
|
+
f"UnicodeDecodeError: {os.path.join(news_dir, f)}"
|
|
601
|
+
)
|
|
602
|
+
continue
|
|
603
|
+
cache.add(f, contents) # may fail but failures are harmless
|
|
604
|
+
except FileNotFoundError:
|
|
605
|
+
pass
|
|
606
|
+
|
|
607
|
+
cache.finalize()
|
|
608
|
+
self._news_cache = cache
|
|
609
|
+
|
|
610
|
+
def news_store(
|
|
611
|
+
self,
|
|
612
|
+
ensure_repo: bool = True,
|
|
613
|
+
) -> NewsItemStore:
|
|
614
|
+
if self._news_cache is None:
|
|
615
|
+
self.ensure_news_cache(ensure_repo=ensure_repo)
|
|
616
|
+
assert self._news_cache is not None
|
|
617
|
+
return self._news_cache
|
|
618
|
+
|
|
619
|
+
def run_plugin_cmd(self, cmd_name: str, args: list[str]) -> int:
|
|
620
|
+
plugin_id = f"ruyi-cmd-{cmd_name.lower()}"
|
|
621
|
+
|
|
622
|
+
plugin_entrypoint = self._plugin_host_ctx.get_from_plugin(
|
|
623
|
+
plugin_id,
|
|
624
|
+
"plugin_cmd_main_v1",
|
|
625
|
+
is_cmd_plugin=True, # allow access to host FS for command plugins
|
|
626
|
+
)
|
|
627
|
+
if plugin_entrypoint is None:
|
|
628
|
+
raise RuntimeError(f"cmd entrypoint not found in plugin '{plugin_id}'")
|
|
629
|
+
|
|
630
|
+
ret = self.eval_plugin_fn(plugin_entrypoint, args)
|
|
631
|
+
if not isinstance(ret, int):
|
|
632
|
+
self.logger.W(
|
|
633
|
+
f"unexpected return type of cmd plugin '{plugin_id}': {type(ret)} is not int."
|
|
634
|
+
)
|
|
635
|
+
self.logger.I("forcing return code to 1; the plugin should be fixed")
|
|
636
|
+
ret = 1
|
|
637
|
+
return ret
|
|
638
|
+
|
|
639
|
+
@property
|
|
640
|
+
def entity_store(self) -> EntityStore:
|
|
641
|
+
"""Get the entity store for this repository."""
|
|
642
|
+
return self._entity_store
|
|
643
|
+
|
|
644
|
+
def get_telemetry_api_url(self, scope: TelemetryScopeConfig) -> str | None:
|
|
645
|
+
# do not clone the metadata repo if it is absent, in case the user
|
|
646
|
+
# is simply trying trivial commands like `ruyi version`.
|
|
647
|
+
if repo_cfg := self.maybe_config:
|
|
648
|
+
return repo_cfg.get_telemetry_api_url(scope)
|
|
649
|
+
return None
|
|
650
|
+
|
|
651
|
+
|
|
652
|
+
PACKAGE_ENTITY_TYPE = "pkg"
|
|
653
|
+
PACKAGE_ENTITY_TYPE_SCHEMA = {
|
|
654
|
+
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
655
|
+
"required": ["pkg"],
|
|
656
|
+
"properties": {
|
|
657
|
+
"pkg": {
|
|
658
|
+
"type": "object",
|
|
659
|
+
"properties": {
|
|
660
|
+
"id": {"type": "string"},
|
|
661
|
+
"display_name": {"type": "string"},
|
|
662
|
+
"name": {"type": "string"},
|
|
663
|
+
"category": {"type": "string"},
|
|
664
|
+
},
|
|
665
|
+
"required": ["id", "display_name", "name", "category"],
|
|
666
|
+
},
|
|
667
|
+
"related": {
|
|
668
|
+
"type": "array",
|
|
669
|
+
"description": "List of related entity references",
|
|
670
|
+
"items": {"type": "string", "pattern": "^.+:.+"},
|
|
671
|
+
},
|
|
672
|
+
"unique_among_type_during_traversal": {
|
|
673
|
+
"type": "boolean",
|
|
674
|
+
"description": "Whether this entity should be unique among all entities of the same type during traversal",
|
|
675
|
+
},
|
|
676
|
+
},
|
|
677
|
+
}
|
|
678
|
+
|
|
679
|
+
|
|
680
|
+
class PackageEntityData(TypedDict):
|
|
681
|
+
id: str
|
|
682
|
+
display_name: str
|
|
683
|
+
name: str
|
|
684
|
+
category: str
|
|
685
|
+
|
|
686
|
+
|
|
687
|
+
class PackageEntity(TypedDict):
|
|
688
|
+
pkg: PackageEntityData
|
|
689
|
+
related: "NotRequired[list[str]]"
|
|
690
|
+
unique_among_type_during_traversal: "NotRequired[bool]"
|
|
691
|
+
|
|
692
|
+
|
|
693
|
+
class MetadataRepoEntityProvider(BaseEntityProvider):
|
|
694
|
+
def __init__(self, repo: MetadataRepo) -> None:
|
|
695
|
+
super().__init__()
|
|
696
|
+
self._repo = repo
|
|
697
|
+
|
|
698
|
+
def discover_schemas(self) -> dict[str, object]:
|
|
699
|
+
return {
|
|
700
|
+
PACKAGE_ENTITY_TYPE: PACKAGE_ENTITY_TYPE_SCHEMA,
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
def load_entities(
|
|
704
|
+
self,
|
|
705
|
+
entity_types: Sequence[str],
|
|
706
|
+
) -> Mapping[str, Mapping[str, Mapping[str, Any]]]:
|
|
707
|
+
result: dict[str, Mapping[str, Mapping[str, Any]]] = {}
|
|
708
|
+
for ty in entity_types:
|
|
709
|
+
if ty == PACKAGE_ENTITY_TYPE:
|
|
710
|
+
result[ty] = self._load_package_entities()
|
|
711
|
+
return result
|
|
712
|
+
|
|
713
|
+
def _load_package_entities(self) -> dict[str, PackageEntity]:
|
|
714
|
+
result: dict[str, PackageEntity] = {}
|
|
715
|
+
for cat, pkg_name, pkg_vers in self._repo.iter_pkgs():
|
|
716
|
+
full_name = f"{cat}/{pkg_name}"
|
|
717
|
+
relations = []
|
|
718
|
+
|
|
719
|
+
# see if all versions of the package are toolchains and share the
|
|
720
|
+
# same arch
|
|
721
|
+
tc_arch: str | None = None
|
|
722
|
+
for pkg_ver in pkg_vers.values():
|
|
723
|
+
if tm := pkg_ver.toolchain_metadata:
|
|
724
|
+
if tc_arch is None:
|
|
725
|
+
tc_arch = tm.target_arch
|
|
726
|
+
continue
|
|
727
|
+
if tc_arch != tm.target_arch:
|
|
728
|
+
tc_arch = None
|
|
729
|
+
break
|
|
730
|
+
else:
|
|
731
|
+
break
|
|
732
|
+
if tc_arch is not None:
|
|
733
|
+
# this is a toolchain package, add the arch as a related entity
|
|
734
|
+
relations.append(f"arch:{tc_arch}")
|
|
735
|
+
|
|
736
|
+
# similarly, check for the emulator kind
|
|
737
|
+
emu_arches: set[str] | None = None
|
|
738
|
+
for pkg_ver in pkg_vers.values():
|
|
739
|
+
if em := pkg_ver.emulator_metadata:
|
|
740
|
+
pkg_ver_arches: set[str] = set()
|
|
741
|
+
for p in em.programs:
|
|
742
|
+
pkg_ver_arches.update(p.supported_arches)
|
|
743
|
+
if emu_arches is None:
|
|
744
|
+
emu_arches = pkg_ver_arches
|
|
745
|
+
continue
|
|
746
|
+
if emu_arches != pkg_ver_arches:
|
|
747
|
+
emu_arches = emu_arches.intersection(pkg_ver_arches)
|
|
748
|
+
else:
|
|
749
|
+
break
|
|
750
|
+
if emu_arches is not None:
|
|
751
|
+
for emu_arch in emu_arches:
|
|
752
|
+
relations.append(f"arch:{emu_arch}")
|
|
753
|
+
|
|
754
|
+
result[full_name] = {
|
|
755
|
+
"pkg": {
|
|
756
|
+
"id": full_name,
|
|
757
|
+
"display_name": full_name,
|
|
758
|
+
"name": pkg_name,
|
|
759
|
+
"category": cat,
|
|
760
|
+
},
|
|
761
|
+
"related": relations,
|
|
762
|
+
}
|
|
763
|
+
return result
|