b10-transfer 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
b10_transfer/utils.py ADDED
@@ -0,0 +1,355 @@
1
+ import os
2
+ import time
3
+ import logging
4
+ import getpass
5
+ from pathlib import Path
6
+ from contextlib import contextmanager
7
+ from typing import Generator, Any
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+ # Lock file settings
12
+ LOCK_WAIT_SLEEP_SECONDS = 1.0 # How long to wait between lock file checks
13
+
14
+
15
+ class CacheError(Exception):
16
+ """Base cache operation error."""
17
+
18
+ pass
19
+
20
+
21
+ class CacheValidationError(CacheError):
22
+ """Path validation or compatibility check failed."""
23
+
24
+ pass
25
+
26
+
27
+ def get_current_username() -> str:
28
+ """
29
+ Get the current username using getpass.getuser().
30
+
31
+ This uses the same method as PyTorch for consistency.
32
+
33
+ Returns:
34
+ str: Current username.
35
+
36
+ Raises:
37
+ RuntimeError: If unable to determine the current username.
38
+ """
39
+ try:
40
+ return getpass.getuser()
41
+ except Exception as e:
42
+ raise RuntimeError(f"Unable to determine current username: {e}") from e
43
+
44
+
45
+ def validate_path_security(
46
+ path: str,
47
+ allowed_prefixes: list[str],
48
+ name: str,
49
+ exception_class: type = EnvironmentError,
50
+ ) -> str:
51
+ """
52
+ Validate that a path is secure and within allowed directory prefixes.
53
+
54
+ This function prevents directory traversal attacks and ensures paths
55
+ are within expected locations for security. It handles symlinks like
56
+ macOS /tmp -> /private/tmp by resolving both path and prefixes.
57
+
58
+ Args:
59
+ path: The path string to validate.
60
+ allowed_prefixes: List of allowed directory prefix strings.
61
+ name: Name of the configuration for error messages.
62
+ exception_class: Exception class to raise on validation failure.
63
+ Defaults to EnvironmentError.
64
+
65
+ Returns:
66
+ str: The validated resolved path.
67
+
68
+ Raises:
69
+ exception_class: If path is outside allowed prefixes or contains
70
+ unsafe components.
71
+ """
72
+ if not path:
73
+ raise exception_class(f"{name} cannot be empty")
74
+
75
+ # Convert to Path and resolve to handle symlinks and relative paths
76
+ try:
77
+ resolved_path = str(Path(path).resolve())
78
+ except (OSError, ValueError) as e:
79
+ raise exception_class(f"{name} path resolution failed: {e}")
80
+
81
+ # Check for directory traversal attempts
82
+ if ".." in path or path != path.strip():
83
+ raise exception_class(f"{name} contains unsafe path components: {path}")
84
+
85
+ # Validate against allowed prefixes
86
+ # Handle symlinks like macOS /tmp -> /private/tmp by checking both resolved and canonical forms
87
+ path_matches = False
88
+ for prefix in allowed_prefixes:
89
+ # Check resolved path against resolved prefix
90
+ try:
91
+ resolved_prefix = str(Path(prefix).resolve())
92
+ if resolved_path.startswith(resolved_prefix):
93
+ path_matches = True
94
+ break
95
+ except (OSError, ValueError):
96
+ # If prefix resolution fails, fall back to string comparison
97
+ if resolved_path.startswith(prefix):
98
+ path_matches = True
99
+ break
100
+
101
+ if not path_matches:
102
+ raise exception_class(
103
+ f"{name} path '{resolved_path}' must start with one of: {allowed_prefixes}"
104
+ )
105
+
106
+ return resolved_path
107
+
108
+
109
+ def validate_boolean_env(env_var: str, name: str) -> str:
110
+ """
111
+ Validate that an environment variable contains a safe boolean-like value.
112
+
113
+ Args:
114
+ env_var: The environment variable value to validate.
115
+ name: Name of the configuration for error messages.
116
+
117
+ Returns:
118
+ str: The validated environment variable value.
119
+
120
+ Raises:
121
+ CacheValidationError: If the value is not a recognized boolean string.
122
+ """
123
+ valid_values = {"0", "1", "true", "false", "True", "False", ""}
124
+ if env_var not in valid_values:
125
+ raise CacheValidationError(
126
+ f"{name} must be one of {valid_values}, got: {env_var}"
127
+ )
128
+ return env_var
129
+
130
+
131
+ def apply_cap(value: int, cap: int, name: str) -> int:
132
+ """
133
+ Apply security cap to user-provided values.
134
+ Not amazing (doesn't prevent the user from modifying the pip package
135
+ source code), but at least it prevents accidental environment variable
136
+ setting that could cause resource exhaustion.
137
+ """
138
+ if value > cap:
139
+ logger.warning(
140
+ f"{name} capped at {cap} (requested {value}) for security/stability"
141
+ )
142
+ return cap
143
+ return value
144
+
145
+
146
+ def timed_fn(logger=logger, name=None):
147
+ """Decorator to log function execution time.
148
+
149
+ This decorator logs when a function starts and finishes, including the
150
+ total execution time in seconds.
151
+
152
+ Args:
153
+ logger: Logger instance to use for logging. Defaults to module logger.
154
+ name: Custom name to use in log messages. If None, uses function name.
155
+
156
+ Returns:
157
+ Decorator function that wraps the target function with timing logic.
158
+ """
159
+
160
+ def decorator(fn):
161
+ def wrapper(*args, **kwargs):
162
+ logger.info(f"{name or fn.__name__} started")
163
+ start = time.perf_counter()
164
+ result = fn(*args, **kwargs)
165
+ logger.info(
166
+ f"{name or fn.__name__} finished in {time.perf_counter() - start:.2f}s"
167
+ )
168
+ return result
169
+
170
+ return wrapper
171
+
172
+ return decorator
173
+
174
+
175
+ def safe_execute(error_message: str, default_return: Any = None):
176
+ """Decorator to safely execute a function with error handling.
177
+
178
+ This decorator catches all exceptions from the wrapped function and logs
179
+ them with a custom error message, then returns a default value instead
180
+ of propagating the exception.
181
+
182
+ Args:
183
+ error_message: Message to log when an exception occurs.
184
+ default_return: Value to return if the function raises an exception.
185
+ Defaults to None.
186
+
187
+ Returns:
188
+ Decorator function that wraps the target function with error handling.
189
+ """
190
+
191
+ def decorator(func):
192
+ def wrapper(*args, **kwargs):
193
+ try:
194
+ return func(*args, **kwargs)
195
+ except Exception as e:
196
+ logger.error(f"{error_message}: {e}")
197
+ return default_return
198
+
199
+ return wrapper
200
+
201
+ return decorator
202
+
203
+
204
+ # TODO(SR): Make the 1-second sleep a configurable parameter + document what it does.
205
+ # FIXME(SR): There's a race condition here. If a single process creates a lock file
206
+ # (say because they are copy-ing in or copy-ing out, and the pod/replica crashes for whatever reason),
207
+ # then the lock file will never be released. This is bad because then a bunch of other replicas will
208
+ # be blocked from doing anything (loading in the cache or saving out the cache).
209
+ # We either need to find a way to ENSURE that the lock file will be released if the pod/replica crashes OR in a certain amount of time.
210
+ # OR enforce some retry-timeout logic to ensure that other replicas proceed with reading from the cache/writing to the cache if they are "held up" by the lock file N number of times or seconds perhaps
211
+ # Just a thought...need to think more + test this out.
212
+ def critical_section_b10fs_file_lock(name):
213
+ """Decorator to ensure critical section for b10fs file operations.
214
+
215
+ This decorator ensures that the decorated function runs in a critical section
216
+ where no other b10fs file operations can interfere. It uses a lock file to
217
+ synchronize access.
218
+
219
+ Args:
220
+ name: The name of the operation, used for the lock file name.
221
+
222
+ Returns:
223
+ The decorated function with critical section handling.
224
+ """
225
+
226
+ def decorator(func):
227
+ def wrapper(*args, **kwargs):
228
+ # Import here to avoid circular dependency
229
+ from .constants import B10FS_CACHE_DIR
230
+
231
+ lock_dir = Path(B10FS_CACHE_DIR)
232
+ lock_dir.mkdir(parents=True, exist_ok=True)
233
+
234
+ lock_file = lock_dir / f"{name}.lock"
235
+ while lock_file.exists():
236
+ logger.debug("Waiting for lock file to be released...")
237
+ time.sleep(LOCK_WAIT_SLEEP_SECONDS)
238
+
239
+ try:
240
+ lock_file.touch()
241
+ return func(*args, **kwargs)
242
+ finally:
243
+ lock_file.unlink(missing_ok=True)
244
+
245
+ return wrapper
246
+
247
+ return decorator
248
+
249
+
250
+ def safe_unlink(
251
+ file_path: Path, error_message: str, success_message: str = None
252
+ ) -> None:
253
+ """Safely unlink a file with dead mount filesystem protection.
254
+
255
+ This function attempts to delete a file while gracefully handling cases
256
+ where the filesystem (like b10fs) becomes unavailable or dead during
257
+ the operation. It uses missing_ok=True to handle missing files.
258
+
259
+ Args:
260
+ file_path: Path to the file to delete.
261
+ error_message: Message to log if deletion fails.
262
+ success_message: Optional message to log if deletion succeeds.
263
+
264
+ Raises:
265
+ No exceptions are raised; all errors are caught and logged.
266
+ """
267
+ try:
268
+ file_path.unlink(missing_ok=True)
269
+ if success_message:
270
+ logger.debug(success_message)
271
+ except Exception as e:
272
+ logger.error(f"{error_message}: {e}")
273
+
274
+
275
+ @contextmanager
276
+ def temp_file_cleanup(temp_path: Path) -> Generator[Path, None, None]:
277
+ """Context manager for temporary file with automatic safe cleanup.
278
+
279
+ This context manager ensures that temporary files are cleaned up even
280
+ if the filesystem becomes unavailable during the operation. It uses
281
+ safe_unlink to handle dead mount scenarios gracefully.
282
+
283
+ Args:
284
+ temp_path: Path to the temporary file to manage.
285
+
286
+ Yields:
287
+ Path: The temporary file path for use within the context.
288
+
289
+ Raises:
290
+ Cleanup errors are handled gracefully and logged but not raised.
291
+ """
292
+ try:
293
+ yield temp_path
294
+ finally:
295
+ safe_unlink(temp_path, f"Failed to delete temporary file {temp_path}")
296
+
297
+
298
+ def _is_b10fs_enabled() -> bool:
299
+ """Check if b10fs filesystem is enabled via environment variable.
300
+
301
+ This function checks the BASETEN_FS_ENABLED environment variable to
302
+ determine if the b10fs shared filesystem is available for cache operations.
303
+
304
+ Returns:
305
+ bool: True if BASETEN_FS_ENABLED is set to "1" or "True", False otherwise.
306
+ """
307
+ # Import here to avoid circular dependency
308
+ from .constants import BASETEN_FS_ENABLED
309
+
310
+ return BASETEN_FS_ENABLED in ("1", "True", "true")
311
+
312
+
313
+ def _validate_b10fs_available() -> None:
314
+ """Validate that b10fs filesystem is available for cache operations.
315
+
316
+ This function checks if b10fs is enabled and raises an exception if not.
317
+ It should be called before any operations that require b10fs access.
318
+
319
+ Raises:
320
+ CacheValidationError: If b10fs is not enabled (BASETEN_FS_ENABLED
321
+ is not set to "1" or "True").
322
+ """
323
+ if not _is_b10fs_enabled():
324
+ raise CacheValidationError(
325
+ "b10fs is not enabled. Set BASETEN_FS_ENABLED=1 or BASETEN_FS_ENABLED=True to enable cache operations."
326
+ )
327
+
328
+
329
+ @contextmanager
330
+ def cache_operation(operation_name: str) -> Generator[None, None, None]:
331
+ """Context manager for cache operations with b10fs validation and error handling.
332
+
333
+ This context manager validates that b10fs is available before executing
334
+ cache operations and provides consistent error logging. It should wrap
335
+ any operations that require b10fs access.
336
+
337
+ Args:
338
+ operation_name: Name of the operation for error logging (e.g., "Load", "Save").
339
+
340
+ Yields:
341
+ None: Context for the operation to execute.
342
+
343
+ Raises:
344
+ CacheValidationError: If b10fs is not available (re-raised after logging).
345
+ Exception: Any other errors during the operation (re-raised after logging).
346
+ """
347
+ try:
348
+ _validate_b10fs_available()
349
+ yield
350
+ except CacheValidationError as e:
351
+ logger.debug(f"{operation_name} failed: {e}")
352
+ raise
353
+ except Exception as e:
354
+ logger.debug(f"{operation_name} failed: {e}")
355
+ raise
@@ -0,0 +1,219 @@
1
+ Metadata-Version: 2.3
2
+ Name: b10-transfer
3
+ Version: 0.0.1
4
+ Summary: Distributed PyTorch compilation cache for Baseten - Environment-aware, lock-free compilation cache management
5
+ License: MIT
6
+ Keywords: pytorch,torch.compile,cache,machine-learning,inference
7
+ Author: Shounak Ray
8
+ Author-email: shounak.noreply@baseten.co
9
+ Maintainer: Fred Liu
10
+ Maintainer-email: fred.liu.noreply@baseten.co
11
+ Requires-Python: >=3.9,<4.0
12
+ Classifier: Development Status :: 4 - Beta
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: License :: OSI Approved :: MIT License
15
+ Classifier: Programming Language :: Python :: 3
16
+ Classifier: Programming Language :: Python :: 3.9
17
+ Classifier: Programming Language :: Python :: 3.10
18
+ Classifier: Programming Language :: Python :: 3.11
19
+ Classifier: Programming Language :: Python :: 3.12
20
+ Classifier: Programming Language :: Python :: 3.13
21
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
22
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
23
+ Requires-Dist: torch (>=2.0.0)
24
+ Requires-Dist: triton (>=2.0.0)
25
+ Project-URL: Documentation, https://docs.baseten.co/development/model/b10-transfer
26
+ Project-URL: Homepage, https://docs.baseten.co/development/model/b10-transfer
27
+ Project-URL: Repository, https://pypi.org/project/b10-transfer/
28
+ Description-Content-Type: text/markdown
29
+
30
+ https://www.notion.so/ml-infra/mega-base-cache-24291d247273805b8e20fe26677b7b0f
31
+
32
+ # B10 Transfer
33
+
34
+ PyTorch compilation cache for Baseten deployments.
35
+
36
+ ## Usage
37
+
38
+ ### Synchronous Operations (Blocking)
39
+
40
+ ```python
41
+ import b10_transfer
42
+
43
+ # Inside model.load() function
44
+ def load():
45
+ # Load cache before torch.compile()
46
+ status = b10_transfer.load_compile_cache()
47
+
48
+ # ...
49
+
50
+ # Your model compilation
51
+ model = torch.compile(model)
52
+ # Warm up the model with dummy prompts, and arguments that would be typically used in your requests (e.g resolutions)
53
+ dummy_input = "What is the capital of France?"
54
+ model(dummy_input)
55
+
56
+ # ...
57
+
58
+ # Save cache after compilation
59
+ if status != b10_transfer.LoadStatus.SUCCESS:
60
+ b10_transfer.save_compile_cache()
61
+ ```
62
+
63
+ ### Asynchronous Operations (Non-blocking)
64
+
65
+ ```python
66
+ import b10_transfer
67
+
68
+ def load_with_async_cache():
69
+ # Start async cache load (returns immediately with operation ID)
70
+ operation_id = b10_transfer.load_compile_cache_async()
71
+
72
+ # Check status periodically
73
+ while not b10_transfer.is_transfer_complete(operation_id):
74
+ status = b10_transfer.get_transfer_status(operation_id)
75
+ print(f"Cache load status: {status.status}")
76
+ time.sleep(1)
77
+
78
+ # Get final status
79
+ final_status = b10_transfer.get_transfer_status(operation_id)
80
+ if final_status.status == b10_transfer.AsyncTransferStatus.SUCCESS:
81
+ print("Cache loaded successfully!")
82
+
83
+ # Your model compilation...
84
+ model = torch.compile(model)
85
+
86
+ # Async save
87
+ save_op_id = b10_transfer.save_compile_cache_async()
88
+
89
+ # You can continue with other work while save happens in background
90
+ # Or wait for completion if needed
91
+ b10_transfer.wait_for_completion(save_op_id, timeout=300) # 5 minute timeout
92
+
93
+ # With progress callback
94
+ def on_progress(operation_id: str):
95
+ status = b10_transfer.get_transfer_status(operation_id)
96
+ print(f"Transfer {operation_id}: {status.status}")
97
+
98
+ operation_id = b10_transfer.load_compile_cache_async(progress_callback=on_progress)
99
+ ```
100
+
101
+ ### Generic Async Operations
102
+
103
+ You can also use the generic async system for custom transfer operations:
104
+
105
+ ```python
106
+ import b10_transfer
107
+ from pathlib import Path
108
+
109
+ def my_custom_callback(source: Path, dest: Path):
110
+ # Your custom transfer logic here
111
+ # This could be any file operation, compression, etc.
112
+ shutil.copy2(source, dest)
113
+
114
+ # Start a generic async transfer
115
+ operation_id = b10_transfer.start_transfer_async(
116
+ source=Path("/source/file.txt"),
117
+ dest=Path("/dest/file.txt"),
118
+ callback=my_custom_callback,
119
+ operation_name="custom_file_copy",
120
+ monitor_local=True,
121
+ monitor_b10fs=False
122
+ )
123
+
124
+ # Use the same progress tracking as torch cache operations
125
+ b10_transfer.wait_for_completion(operation_id)
126
+ ```
127
+
128
+ ## Configuration
129
+
130
+ Configure via environment variables:
131
+
132
+ ```bash
133
+ # Cache directories
134
+ export TORCH_CACHE_DIR="/tmp/torchinductor_root" # Default
135
+ export B10FS_CACHE_DIR="/cache/model/compile_cache" # Default
136
+ export LOCAL_WORK_DIR="/app" # Default
137
+
138
+ # Cache limits
139
+ export MAX_CACHE_SIZE_MB="1024" # 1GB default
140
+ ```
141
+
142
+ ## How It Works
143
+
144
+ ### Environment-Specific Caching
145
+
146
+ The library automatically creates unique cache keys based on your environment:
147
+
148
+ ```
149
+ torch-2.1.0_cuda-12.1_cc-8.6_triton-2.1.0 → cache_a1b2c3d4e5f6.latest.tar.gz
150
+ torch-2.0.1_cuda-11.8_cc-7.5_triton-2.0.1 → cache_x9y8z7w6v5u4.latest.tar.gz
151
+ torch-2.1.0_cpu_triton-none → cache_m1n2o3p4q5r6.latest.tar.gz
152
+ ```
153
+
154
+ **Components used:**
155
+ - **PyTorch version** (e.g., `torch-2.1.0`)
156
+ - **CUDA version** (e.g., `cuda-12.1` or `cpu`)
157
+ - **GPU compute capability** (e.g., `cc-8.6` for A100)
158
+ - **Triton version** (e.g., `triton-2.1.0` or `triton-none`)
159
+
160
+ ### Cache Workflow
161
+
162
+ 1. **Load Phase** (startup): Generate environment key, check for matching cache in B10FS, extract to local directory
163
+ 2. **Save Phase** (after compilation): Create archive, atomic copy to B10FS with environment-specific filename
164
+
165
+ ### Lock-Free Race Prevention
166
+
167
+ Uses journal pattern with atomic filesystem operations for parallel-safe cache saves.
168
+
169
+ ## API Reference
170
+
171
+ ### Synchronous Functions
172
+
173
+ - `load_compile_cache() -> LoadStatus`: Load cache from B10FS for current environment
174
+ - `save_compile_cache() -> SaveStatus`: Save cache to B10FS with environment-specific filename
175
+ - `clear_local_cache() -> bool`: Clear local cache directory
176
+ - `get_cache_info() -> Dict[str, Any]`: Get cache status information for current environment
177
+ - `list_available_caches() -> Dict[str, Any]`: List all cache files with environment details
178
+
179
+ ### Generic Asynchronous Functions
180
+
181
+ - `start_transfer_async(source, dest, callback, operation_name, **kwargs) -> str`: Start any async transfer operation
182
+ - `get_transfer_status(operation_id: str) -> TransferProgress`: Get current status of async operation
183
+ - `is_transfer_complete(operation_id: str) -> bool`: Check if async operation has completed
184
+ - `wait_for_completion(operation_id: str, timeout=None) -> bool`: Wait for async operation to complete
185
+ - `cancel_transfer(operation_id: str) -> bool`: Attempt to cancel running operation
186
+ - `list_active_transfers() -> Dict[str, TransferProgress]`: Get all active transfer operations
187
+
188
+ ### Torch Cache Async Functions
189
+
190
+ - `load_compile_cache_async(progress_callback=None) -> str`: Start async cache load, returns operation ID
191
+ - `save_compile_cache_async(progress_callback=None) -> str`: Start async cache save, returns operation ID
192
+
193
+ ### Status Enums
194
+
195
+ - `LoadStatus`: SUCCESS, ERROR, DOES_NOT_EXIST, SKIPPED
196
+ - `SaveStatus`: SUCCESS, ERROR, SKIPPED
197
+ - `AsyncTransferStatus`: NOT_STARTED, IN_PROGRESS, SUCCESS, ERROR, INTERRUPTED, CANCELLED
198
+
199
+ ### Data Classes
200
+
201
+ - `TransferProgress`: Contains operation_id, status, started_at, completed_at, error_message
202
+
203
+ ### Exceptions
204
+
205
+ - `CacheError`: Base exception for cache operations
206
+ - `CacheValidationError`: Path validation or compatibility check failed
207
+ - `CacheOperationInterrupted`: Operation interrupted due to insufficient disk space
208
+
209
+ ## Performance Impact
210
+
211
+ ### Debugging
212
+
213
+ Enable debug logging:
214
+
215
+ ```python
216
+ import logging
217
+ logging.getLogger('b10_tcache').setLevel(logging.DEBUG)
218
+ ```
219
+
@@ -0,0 +1,15 @@
1
+ b10_transfer/__init__.py,sha256=o1ej-OtAOsfrJbvh5C3PnqxW2qfcO7l8rllVD-07lXE,1400
2
+ b10_transfer/archive.py,sha256=GKb0mi0-YeM7ch4FLAoOLHXw0T6LkRerYad2N2y9TYM,6400
3
+ b10_transfer/async_torch_cache.py,sha256=4hMjVR44SLlGes25e_cjgMTywFfIYjH0TnUmg9o-iyI,1903
4
+ b10_transfer/async_transfers.py,sha256=AAML562qYzF9NyX9AdfiJ0OcQw6vXr985IZWXZSot9Q,9083
5
+ b10_transfer/cleanup.py,sha256=xjKStmBjaarZPxhPTT1-Ds_pvUR7kdJw5Kp19BLvzzY,6224
6
+ b10_transfer/constants.py,sha256=KjSUO6heScDJXQwFlHdeNV4KBBqKz7CKeJzo44-9qMM,4745
7
+ b10_transfer/core.py,sha256=BOnA6FXkZRm74_CtQBMudpx3q7HTEGEORUV26fb6cvQ,5920
8
+ b10_transfer/environment.py,sha256=aC0biEMQrtHk0ke_3epdcq1X9J5fPmPpBVt0fH7XF2Y,5625
9
+ b10_transfer/info.py,sha256=I3iOuImZ5r6DMJTDeBtVvzlSn6IuyPJbLJYUO_OF0ks,6299
10
+ b10_transfer/space_monitor.py,sha256=5pwW643KAHI3mtT61hYf29953UD9LekzWFF1K-QeYbw,10529
11
+ b10_transfer/torch_cache.py,sha256=Oe_OeUPGAlmK9wY-L9w4aPaXOoMnL_kD596hew6ETcw,14192
12
+ b10_transfer/utils.py,sha256=Stee0DFK-8MRRYNIocqaK64cJvfs4jPW3Mpx7zkWV6Y,11932
13
+ b10_transfer-0.0.1.dist-info/METADATA,sha256=hESeWyidAEbtWkIgepBn1Cxlo9--jIj9vcLxM4zP7lY,7502
14
+ b10_transfer-0.0.1.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
15
+ b10_transfer-0.0.1.dist-info/RECORD,,
@@ -0,0 +1,4 @@
1
+ Wheel-Version: 1.0
2
+ Generator: poetry-core 2.1.3
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any