matrice-compute 0.1.28__tar.gz → 0.1.30__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (24) hide show
  1. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/PKG-INFO +1 -1
  2. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/matrice_compute.egg-info/PKG-INFO +1 -1
  3. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/src/matrice_compute/action_instance.py +1 -3
  4. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/src/matrice_compute/scaling.py +41 -0
  5. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/LICENSE.txt +0 -0
  6. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/README.md +0 -0
  7. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/matrice_compute.egg-info/SOURCES.txt +0 -0
  8. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/matrice_compute.egg-info/dependency_links.txt +0 -0
  9. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/matrice_compute.egg-info/not-zip-safe +0 -0
  10. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/matrice_compute.egg-info/top_level.txt +0 -0
  11. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/pyproject.toml +0 -0
  12. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/setup.cfg +0 -0
  13. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/setup.py +0 -0
  14. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/src/matrice_compute/__init__.py +0 -0
  15. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/src/matrice_compute/actions_manager.py +0 -0
  16. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/src/matrice_compute/actions_scaledown_manager.py +0 -0
  17. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/src/matrice_compute/compute_operations_handler.py +0 -0
  18. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/src/matrice_compute/instance_manager.py +0 -0
  19. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/src/matrice_compute/instance_utils.py +0 -0
  20. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/src/matrice_compute/prechecks.py +0 -0
  21. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/src/matrice_compute/py.typed +0 -0
  22. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/src/matrice_compute/resources_tracker.py +0 -0
  23. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/src/matrice_compute/shutdown_manager.py +0 -0
  24. {matrice_compute-0.1.28 → matrice_compute-0.1.30}/src/matrice_compute/task_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: matrice_compute
3
- Version: 0.1.28
3
+ Version: 0.1.30
4
4
  Summary: Common server utilities for Matrice.ai services
5
5
  Author-email: "Matrice.ai" <dipendra@matrice.ai>
6
6
  License-Expression: MIT
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: matrice_compute
3
- Version: 0.1.28
3
+ Version: 0.1.30
4
4
  Summary: Common server utilities for Matrice.ai services
5
5
  Author-email: "Matrice.ai" <dipendra@matrice.ai>
6
6
  License-Expression: MIT
@@ -1587,10 +1587,8 @@ def redis_setup_execute(self: ActionInstance):
1587
1587
  # Redis container with --net=host (Port: 6379)
1588
1588
  redis_cmd = (
1589
1589
  f"docker run -d --net=host "
1590
- f"--name redis_container"
1590
+ f"--name redis_container "
1591
1591
  f"--restart unless-stopped "
1592
- f"--memory=32g "
1593
- f"--cpus=8 "
1594
1592
  f"{redis_image} "
1595
1593
  f"redis-server --bind 0.0.0.0 "
1596
1594
  f"--appendonly no "
@@ -1266,6 +1266,38 @@ class Scaling:
1266
1266
  if "jetson" in model or "tegra" in model:
1267
1267
  is_jetson = True
1268
1268
  gpu_provider = "NVIDIA"
1269
+
1270
+ # Detect specific Jetson model for GPU architecture
1271
+ if "orin" in model:
1272
+ if "agx" in model:
1273
+ gpu_arch_family = "Jetson Orin AGX"
1274
+ elif "nx" in model:
1275
+ gpu_arch_family = "Jetson Orin NX"
1276
+ elif "nano" in model:
1277
+ gpu_arch_family = "Jetson Orin Nano"
1278
+ else:
1279
+ gpu_arch_family = "Jetson Orin"
1280
+ elif "thor" in model:
1281
+ gpu_arch_family = "Jetson Thor"
1282
+ elif "xavier" in model:
1283
+ if "agx" in model:
1284
+ gpu_arch_family = "Jetson Xavier AGX"
1285
+ elif "nx" in model:
1286
+ gpu_arch_family = "Jetson Xavier NX"
1287
+ else:
1288
+ gpu_arch_family = "Jetson Xavier"
1289
+ elif "nano" in model and "orin" not in model:
1290
+ gpu_arch_family = "Jetson Nano"
1291
+ elif "tx2" in model:
1292
+ gpu_arch_family = "Jetson TX2"
1293
+ elif "tx1" in model:
1294
+ gpu_arch_family = "Jetson TX1"
1295
+ else:
1296
+ gpu_arch_family = "Jetson (Unknown Model)"
1297
+
1298
+ # Set gpu_arch to the full model string for detailed info
1299
+ gpu_arch = model.strip()
1300
+
1269
1301
  try:
1270
1302
  cuda_result = subprocess.run(["nvcc", "--version"], capture_output=True, text=True)
1271
1303
  if cuda_result.returncode == 0:
@@ -1275,6 +1307,15 @@ class Scaling:
1275
1307
  break
1276
1308
  except Exception:
1277
1309
  pass
1310
+ # Fallback to nvidia-smi for CUDA version if nvcc failed
1311
+ if not cuda_version:
1312
+ try:
1313
+ nvidia_smi_result = subprocess.run(["nvidia-smi", "--query-gpu=driver_version", "--format=csv,noheader"], capture_output=True, text=True)
1314
+ if nvidia_smi_result.returncode == 0:
1315
+ # nvidia-smi doesn't directly give CUDA version, but we can infer it's available
1316
+ cuda_version = "Available (via nvidia-smi)"
1317
+ except Exception:
1318
+ pass
1278
1319
  except Exception:
1279
1320
  pass
1280
1321