b10-transfer 0.3.10__tar.gz → 0.3.11__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/PKG-INFO +1 -1
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/pyproject.toml +1 -1
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/src/b10_transfer/__init__.py +1 -1
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/src/b10_transfer/cache.py +1 -0
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/src/b10_transfer/cache_cli.py +3 -0
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/README.md +0 -0
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/src/b10_transfer/archive.py +0 -0
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/src/b10_transfer/cleanup.py +0 -0
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/src/b10_transfer/config.py +0 -0
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/src/b10_transfer/constants.py +0 -0
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/src/b10_transfer/core.py +0 -0
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/src/b10_transfer/environment.py +0 -0
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/src/b10_transfer/info.py +0 -0
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/src/b10_transfer/logging_utils.py +0 -0
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/src/b10_transfer/space_monitor.py +0 -0
- {b10_transfer-0.3.10 → b10_transfer-0.3.11}/src/b10_transfer/utils.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: b10-transfer
|
3
|
-
Version: 0.3.
|
3
|
+
Version: 0.3.11
|
4
4
|
Summary: Distributed PyTorch file transfer for Baseten - Environment-aware, lock-free file transfer management
|
5
5
|
License: MIT
|
6
6
|
Keywords: pytorch,file-transfer,cache,machine-learning,inference
|
@@ -4,7 +4,7 @@ build-backend = "poetry.core.masonry.api"
|
|
4
4
|
|
5
5
|
[tool.poetry]
|
6
6
|
name = "b10-transfer"
|
7
|
-
version = "0.3.
|
7
|
+
version = "0.3.11"
|
8
8
|
description = "Distributed PyTorch file transfer for Baseten - Environment-aware, lock-free file transfer management"
|
9
9
|
authors = ["Shounak Ray <shounak.noreply@baseten.co>", "Fred Liu <fred.liu.noreply@baseten.co>"]
|
10
10
|
maintainers = ["Fred Liu <fred.liu.noreply@baseten.co>", "Shounak Ray <shounak.noreply@baseten.co>"]
|
@@ -166,6 +166,7 @@ def load_compile_cache() -> OperationStatus:
|
|
166
166
|
"""
|
167
167
|
with cache_operation("Load"):
|
168
168
|
b10fs_dir, torch_dir, work_dir = _setup_cache_paths()
|
169
|
+
logger.info(f"TORCHINDUCTOR_CACHE_DIR IS {config.TORCH_CACHE_DIR}")
|
169
170
|
|
170
171
|
cache_filename = get_cache_filename()
|
171
172
|
final_file, _ = _get_cache_file_paths(cache_filename, b10fs_dir)
|
@@ -89,6 +89,8 @@ def main() -> None:
|
|
89
89
|
|
90
90
|
logger = _setup_logging(cfg.loglevel)
|
91
91
|
|
92
|
+
logger.info(f"TORCHINDUCTOR_CACHE_DIR IS {config.TORCH_CACHE_DIR}")
|
93
|
+
|
92
94
|
# 1) Preload any existing cache (non-fatal on error)
|
93
95
|
try:
|
94
96
|
if load_compile_cache() == OperationStatus.SUCCESS:
|
@@ -114,5 +116,6 @@ def main() -> None:
|
|
114
116
|
except Exception as e:
|
115
117
|
logger.exception("save_compile_cache() failed: %s", e)
|
116
118
|
sys.exit(3)
|
119
|
+
logger.info(f"TORCHINDUCTOR_CACHE_DIR IS {config.TORCH_CACHE_DIR}")
|
117
120
|
|
118
121
|
logger.info("vLLM automatic torch compile cache done.")
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|