py-rattler 0.22.0__cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. py_rattler-0.22.0.dist-info/METADATA +208 -0
  2. py_rattler-0.22.0.dist-info/RECORD +68 -0
  3. py_rattler-0.22.0.dist-info/WHEEL +4 -0
  4. rattler/__init__.py +114 -0
  5. rattler/channel/__init__.py +5 -0
  6. rattler/channel/channel.py +94 -0
  7. rattler/channel/channel_config.py +43 -0
  8. rattler/channel/channel_priority.py +14 -0
  9. rattler/exceptions.py +120 -0
  10. rattler/explicit_environment/__init__.py +3 -0
  11. rattler/explicit_environment/environment.py +69 -0
  12. rattler/index/__init__.py +3 -0
  13. rattler/index/index.py +112 -0
  14. rattler/install/__init__.py +3 -0
  15. rattler/install/installer.py +96 -0
  16. rattler/lock/__init__.py +23 -0
  17. rattler/lock/channel.py +52 -0
  18. rattler/lock/environment.py +213 -0
  19. rattler/lock/hash.py +33 -0
  20. rattler/lock/lock_file.py +118 -0
  21. rattler/lock/package.py +302 -0
  22. rattler/match_spec/__init__.py +4 -0
  23. rattler/match_spec/match_spec.py +294 -0
  24. rattler/match_spec/nameless_match_spec.py +185 -0
  25. rattler/networking/__init__.py +21 -0
  26. rattler/networking/client.py +74 -0
  27. rattler/networking/fetch_repo_data.py +103 -0
  28. rattler/networking/middleware.py +234 -0
  29. rattler/package/__init__.py +26 -0
  30. rattler/package/about_json.py +329 -0
  31. rattler/package/index_json.py +437 -0
  32. rattler/package/no_arch_type.py +142 -0
  33. rattler/package/package_name.py +204 -0
  34. rattler/package/package_name_matcher.py +81 -0
  35. rattler/package/paths_json.py +696 -0
  36. rattler/package/run_exports_json.py +268 -0
  37. rattler/package_streaming/__init__.py +26 -0
  38. rattler/platform/__init__.py +4 -0
  39. rattler/platform/arch.py +59 -0
  40. rattler/platform/platform.py +217 -0
  41. rattler/prefix/__init__.py +4 -0
  42. rattler/prefix/prefix_paths.py +442 -0
  43. rattler/prefix/prefix_record.py +234 -0
  44. rattler/pty/__init__.py +25 -0
  45. rattler/pty/pty_process.py +391 -0
  46. rattler/pty/pty_session.py +241 -0
  47. rattler/py.typed +0 -0
  48. rattler/rattler.abi3.so +0 -0
  49. rattler/repo_data/__init__.py +19 -0
  50. rattler/repo_data/gateway.py +337 -0
  51. rattler/repo_data/package_record.py +938 -0
  52. rattler/repo_data/patch_instructions.py +22 -0
  53. rattler/repo_data/record.py +164 -0
  54. rattler/repo_data/repo_data.py +74 -0
  55. rattler/repo_data/source.py +85 -0
  56. rattler/repo_data/sparse.py +356 -0
  57. rattler/shell/__init__.py +3 -0
  58. rattler/shell/shell.py +134 -0
  59. rattler/solver/__init__.py +3 -0
  60. rattler/solver/solver.py +220 -0
  61. rattler/utils/rattler_version.py +19 -0
  62. rattler/version/__init__.py +5 -0
  63. rattler/version/version.py +591 -0
  64. rattler/version/version_spec.py +184 -0
  65. rattler/version/with_source.py +80 -0
  66. rattler/virtual_package/__init__.py +4 -0
  67. rattler/virtual_package/generic.py +136 -0
  68. rattler/virtual_package/virtual_package.py +201 -0
@@ -0,0 +1,337 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ from dataclasses import dataclass
5
+ from typing import TYPE_CHECKING, Any, Iterable, List, Optional, Union
6
+
7
+ from rattler.channel.channel import Channel
8
+ from rattler.match_spec.match_spec import MatchSpec
9
+ from rattler.networking.client import Client
10
+ from rattler.networking.fetch_repo_data import CacheAction
11
+ from rattler.package.package_name import PackageName
12
+ from rattler.platform.platform import Platform, PlatformLiteral
13
+ from rattler.rattler import PyGateway, PyMatchSpec, PySourceConfig
14
+ from rattler.repo_data.record import RepoDataRecord
15
+
16
+ if TYPE_CHECKING:
17
+ from rattler.repo_data.source import RepoDataSource
18
+
19
+
20
+ class _RepoDataSourceAdapter:
21
+ """Adapter that wraps a user's RepoDataSource and converts FFI types.
22
+
23
+ This adapter receives raw PyPlatform and PyPackageName from Rust,
24
+ converts them to the proper Python wrapper types (Platform, PackageName),
25
+ and calls the user's implementation.
26
+ """
27
+
28
+ def __init__(self, source: RepoDataSource) -> None:
29
+ self._source = source
30
+
31
+ async def fetch_package_records(self, py_platform: Any, py_name: Any) -> List[RepoDataRecord]:
32
+ """Convert FFI types and delegate to the wrapped source."""
33
+ # Wrap raw FFI types in Python wrapper classes
34
+ platform = Platform._from_py_platform(py_platform)
35
+ name = PackageName._from_py_package_name(py_name)
36
+
37
+ # Call the user's implementation with proper Python types
38
+ return await self._source.fetch_package_records(platform, name)
39
+
40
+ def package_names(self, py_platform: Any) -> List[str]:
41
+ """Convert FFI types and delegate to the wrapped source."""
42
+ platform = Platform._from_py_platform(py_platform)
43
+ return self._source.package_names(platform)
44
+
45
+
46
+ @dataclass
47
+ class SourceConfig:
48
+ """
49
+ Describes properties about a channel.
50
+
51
+ This can be used to configure the Gateway to handle channels in a certain
52
+ way.
53
+ """
54
+
55
+ jlap_enabled: bool = True
56
+ """Whether the JLAP compression is enabled or not."""
57
+
58
+ zstd_enabled: bool = True
59
+ """Whether the ZSTD compression is enabled or not."""
60
+
61
+ bz2_enabled: bool = True
62
+ """Whether the BZ2 compression is enabled or not."""
63
+
64
+ sharded_enabled: bool = True
65
+ """Whether sharded repodata is enabled or not."""
66
+
67
+ cache_action: CacheAction = "cache-or-fetch"
68
+ """How to interact with the cache.
69
+
70
+ * `'cache-or-fetch'` (default): Use the cache if its up to date or fetch from the URL if there is no valid cached value.
71
+ * `'use-cache-only'`: Only use the cache, but error out if the cache is not up to date
72
+ * `'force-cache-only'`: Only use the cache, ignore whether or not it is up to date.
73
+ * `'no-cache'`: Do not use the cache even if there is an up to date entry
74
+ """
75
+
76
+ def _into_py(self) -> PySourceConfig:
77
+ """
78
+ Converts this object into a type that can be used by the Rust code.
79
+
80
+ Examples
81
+ --------
82
+ ```python
83
+ >>> SourceConfig()._into_py() # doctest: +ELLIPSIS
84
+ <builtins.PySourceConfig object at 0x...>
85
+ >>>
86
+ ```
87
+ """
88
+ return PySourceConfig(
89
+ jlap_enabled=self.jlap_enabled,
90
+ zstd_enabled=self.zstd_enabled,
91
+ bz2_enabled=self.bz2_enabled,
92
+ sharded_enabled=self.sharded_enabled,
93
+ cache_action=self.cache_action,
94
+ )
95
+
96
+
97
+ class Gateway:
98
+ """
99
+ The gateway manages all the quircks and complex bits of efficiently acquiring
100
+ repodata. It implements all the necessary logic to fetch the repodata from a
101
+ remote server, cache it locally and convert it into python objects.
102
+
103
+ The gateway can also easily be used concurrently, as it is designed to be
104
+ thread-safe. When two threads are querying the same channel at the same time,
105
+ their requests are coalesced into a single request. This is done to reduce the
106
+ number of requests made to the remote server and reduce the overall memory usage.
107
+
108
+ The gateway caches the repodata internally, so if the same channel is queried
109
+ multiple times the records will only be fetched once. However, the conversion
110
+ of the records to a python object is done every time the query method is called.
111
+ Therefor, instead of requesting records directly, its more efficient to pass the
112
+ gateway itself to methods that accepts it.
113
+ """
114
+
115
+ def __init__(
116
+ self,
117
+ cache_dir: Optional[os.PathLike[str]] = None,
118
+ default_config: Optional[SourceConfig] = None,
119
+ per_channel_config: Optional[dict[str, SourceConfig]] = None,
120
+ max_concurrent_requests: int = 100,
121
+ client: Optional[Client] = None,
122
+ show_progress: bool = False,
123
+ ) -> None:
124
+ """
125
+ Arguments:
126
+ cache_dir: The directory where the repodata should be cached. If not specified the
127
+ default cache directory is used.
128
+ default_config: The default configuration for channels.
129
+ per_channel_config: Source configuration on a per-URL basis. This URL is used as a
130
+ prefix, so any channel that starts with the URL uses the configuration.
131
+ The configuration with the longest matching prefix is used.
132
+ max_concurrent_requests: The maximum number of concurrent requests that can be made.
133
+ client: An authenticated client to use for acquiring repodata. If not specified a default
134
+ client will be used.
135
+ show_progress: Whether to show progress bars when fetching repodata.
136
+
137
+ Examples
138
+ --------
139
+ ```python
140
+ >>> Gateway()
141
+ Gateway()
142
+ >>>
143
+ ```
144
+ """
145
+ default_config = default_config or SourceConfig()
146
+
147
+ self._gateway = PyGateway(
148
+ cache_dir=cache_dir,
149
+ default_config=default_config._into_py(),
150
+ per_channel_config={channel: config._into_py() for channel, config in (per_channel_config or {}).items()},
151
+ max_concurrent_requests=max_concurrent_requests,
152
+ client=client._client if client is not None else None,
153
+ show_progress=show_progress,
154
+ )
155
+
156
+ async def query(
157
+ self,
158
+ sources: Iterable[Union[Channel, str, RepoDataSource]],
159
+ platforms: Iterable[Platform | PlatformLiteral],
160
+ specs: Iterable[MatchSpec | PackageName | str],
161
+ recursive: bool = True,
162
+ ) -> List[List[RepoDataRecord]]:
163
+ """Queries the gateway for repodata from channels and custom sources.
164
+
165
+ If `recursive` is `True` the gateway will recursively fetch the dependencies of the
166
+ encountered records. If `recursive` is `False` only the records with the package names
167
+ specified in `specs` are returned.
168
+
169
+ The `specs` can either be a `MatchSpec`, `PackageName` or a string. If a string or a
170
+ `PackageName` is provided it will be converted into a MatchSpec that matches any record
171
+ with the given name. If a `MatchSpec` is provided all records that match the name
172
+ specified in the spec will be returned, but only the dependencies of the records
173
+ that match the entire spec are recursively fetched.
174
+
175
+ The gateway caches records from channels internally, so if the same channel is queried
176
+ multiple times the records will only be fetched once. However, the conversion of the
177
+ records to a python object is done every time the query method is called.
178
+
179
+ Note: Custom RepoDataSource implementations are **not cached** by the gateway. If caching
180
+ is needed for custom sources, it must be implemented within the source itself.
181
+
182
+ Arguments:
183
+ sources: The sources to query. Can be channels (by name, URL, or Channel object)
184
+ or custom RepoDataSource implementations.
185
+ platforms: The platforms to query.
186
+ specs: The specs to query.
187
+ recursive: Whether recursively fetch dependencies or not.
188
+
189
+ Returns:
190
+ A list of lists of `RepoDataRecord`s. The outer list contains the results for each
191
+ source in the same order they are provided in the `sources` argument.
192
+
193
+ Examples
194
+ --------
195
+ ```python
196
+ >>> import asyncio
197
+ >>> gateway = Gateway()
198
+ >>> records = asyncio.run(gateway.query(["conda-forge"], ["linux-aarch64"], ["python"]))
199
+ >>> assert len(records) == 1
200
+ >>>
201
+ ```
202
+ """
203
+ py_records = await self._gateway.query(
204
+ sources=_convert_sources(sources),
205
+ platforms=[
206
+ platform._inner if isinstance(platform, Platform) else Platform(platform)._inner
207
+ for platform in platforms
208
+ ],
209
+ specs=[
210
+ spec._match_spec if isinstance(spec, MatchSpec) else PyMatchSpec(str(spec), True, True)
211
+ for spec in specs
212
+ ],
213
+ recursive=recursive,
214
+ )
215
+
216
+ # Convert the records into python objects
217
+ return [[RepoDataRecord._from_py_record(record) for record in records] for records in py_records]
218
+
219
+ async def names(
220
+ self,
221
+ sources: Iterable[Union[Channel, str, RepoDataSource]],
222
+ platforms: Iterable[Platform | PlatformLiteral],
223
+ ) -> List[PackageName]:
224
+ """Queries all the names of packages in channels or custom sources.
225
+
226
+ Arguments:
227
+ sources: The sources to query. Can be channels (by name, URL, or Channel object)
228
+ or custom RepoDataSource implementations.
229
+ platforms: The platforms to query.
230
+
231
+ Returns:
232
+ A list of package names that are present in the given subdirectories.
233
+
234
+ Examples
235
+ --------
236
+ ```python
237
+ >>> import asyncio
238
+ >>> gateway = Gateway()
239
+ >>> records = asyncio.run(gateway.names(["conda-forge"], ["linux-64"]))
240
+ >>> PackageName("python") in records
241
+ True
242
+ >>>
243
+ ```
244
+ """
245
+
246
+ py_package_names = await self._gateway.names(
247
+ sources=_convert_sources(sources),
248
+ platforms=[
249
+ platform._inner if isinstance(platform, Platform) else Platform(platform)._inner
250
+ for platform in platforms
251
+ ],
252
+ )
253
+
254
+ # Convert the records into python objects
255
+ return [PackageName._from_py_package_name(package_name) for package_name in py_package_names]
256
+
257
+ def clear_repodata_cache(
258
+ self,
259
+ channel: Channel | str,
260
+ subdirs: Optional[Iterable[Platform | PlatformLiteral]] = None,
261
+ clear_disk: bool = False,
262
+ ) -> None:
263
+ """
264
+ Clears the cache for the given channel.
265
+
266
+ Any subsequent query will re-fetch any required data from the source.
267
+
268
+ Arguments:
269
+ channel: The channel to clear the cache for.
270
+ subdirs: A selection of subdirectories to clear, if `None` is specified
271
+ all subdirectories of the channel are cleared.
272
+ clear_disk: If `True`, also clears the on-disk cache. By default only the
273
+ in-memory cache is cleared.
274
+
275
+ Examples
276
+ --------
277
+ ```python
278
+ >>> gateway = Gateway()
279
+ >>> gateway.clear_repodata_cache("conda-forge", ["linux-64"])
280
+ >>> gateway.clear_repodata_cache("robostack")
281
+ >>> gateway.clear_repodata_cache("conda-forge", clear_disk=True)
282
+ >>>
283
+ ```
284
+ """
285
+ self._gateway.clear_repodata_cache(
286
+ channel._channel if isinstance(channel, Channel) else Channel(channel)._channel,
287
+ {subdir._inner if isinstance(subdir, Platform) else Platform(subdir)._inner for subdir in subdirs}
288
+ if subdirs is not None
289
+ else None,
290
+ clear_disk,
291
+ )
292
+
293
+ def __repr__(self) -> str:
294
+ """
295
+ Returns a representation of the Gateway.
296
+
297
+ Examples
298
+ --------
299
+ ```python
300
+ >>> Gateway()
301
+ Gateway()
302
+ >>>
303
+ ```
304
+ """
305
+ return f"{type(self).__name__}()"
306
+
307
+
308
+ def _convert_sources(sources: Iterable[Any]) -> List[Any]:
309
+ """Convert an iterable of sources to a list suitable for the Rust gateway.
310
+
311
+ Channels are converted to their internal PyChannel representation.
312
+ Custom RepoDataSource implementations are wrapped in an adapter that
313
+ converts between FFI types and Python wrapper types.
314
+
315
+ Raises:
316
+ TypeError: If a source doesn't implement the required interface.
317
+ """
318
+ from rattler.repo_data.source import RepoDataSource
319
+
320
+ converted = []
321
+ for source in sources:
322
+ if isinstance(source, str):
323
+ # String channel name/URL - convert to PyChannel
324
+ converted.append(Channel(source)._channel)
325
+ elif isinstance(source, Channel):
326
+ # Channel object - extract PyChannel
327
+ converted.append(source._channel)
328
+ elif isinstance(source, RepoDataSource):
329
+ # Wrap RepoDataSource in adapter for FFI type conversion
330
+ converted.append(_RepoDataSourceAdapter(source))
331
+ else:
332
+ raise TypeError(
333
+ f"Expected Channel, str, or object implementing RepoDataSource protocol, "
334
+ f"got {type(source).__name__}. "
335
+ f"See rattler.RepoDataSource for the required interface."
336
+ )
337
+ return converted