matrice-compute 0.1.16__tar.gz → 0.1.17__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/PKG-INFO +1 -1
  2. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/matrice_compute.egg-info/PKG-INFO +1 -1
  3. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/src/matrice_compute/action_instance.py +76 -6
  4. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/src/matrice_compute/scaling.py +3 -3
  5. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/LICENSE.txt +0 -0
  6. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/README.md +0 -0
  7. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/matrice_compute.egg-info/SOURCES.txt +0 -0
  8. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/matrice_compute.egg-info/dependency_links.txt +0 -0
  9. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/matrice_compute.egg-info/not-zip-safe +0 -0
  10. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/matrice_compute.egg-info/top_level.txt +0 -0
  11. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/pyproject.toml +0 -0
  12. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/setup.cfg +0 -0
  13. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/setup.py +0 -0
  14. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/src/matrice_compute/__init__.py +0 -0
  15. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/src/matrice_compute/actions_manager.py +0 -0
  16. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/src/matrice_compute/actions_scaledown_manager.py +0 -0
  17. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/src/matrice_compute/instance_manager.py +0 -0
  18. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/src/matrice_compute/instance_utils.py +0 -0
  19. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/src/matrice_compute/prechecks.py +0 -0
  20. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/src/matrice_compute/py.typed +0 -0
  21. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/src/matrice_compute/resources_tracker.py +0 -0
  22. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/src/matrice_compute/shutdown_manager.py +0 -0
  23. {matrice_compute-0.1.16 → matrice_compute-0.1.17}/src/matrice_compute/task_utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: matrice_compute
3
- Version: 0.1.16
3
+ Version: 0.1.17
4
4
  Summary: Common server utilities for Matrice.ai services
5
5
  Author-email: "Matrice.ai" <dipendra@matrice.ai>
6
6
  License-Expression: MIT
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: matrice_compute
3
- Version: 0.1.16
3
+ Version: 0.1.17
4
4
  Summary: Common server utilities for Matrice.ai services
5
5
  Author-email: "Matrice.ai" <dipendra@matrice.ai>
6
6
  License-Expression: MIT
@@ -75,7 +75,10 @@ class ActionInstance:
75
75
  "facial_recognition_setup": facial_recognition_setup_execute,
76
76
  "fe_fs_streaming": fe_fs_streaming_execute,
77
77
  "inference_ws_server": inference_ws_server_execute,
78
- "lpr_setup": lpr_setup_execute
78
+ "lpr_setup": lpr_setup_execute,
79
+ "fe_analytics_service": fe_analytics_service_execute,
80
+ "lpr_setup": lpr_setup_execute,
81
+ "tracker_server": inference_tracker_setup_execute
79
82
  }
80
83
  if self.action_type not in self.actions_map:
81
84
  raise ValueError(f"Unknown action type: {self.action_type}")
@@ -471,11 +474,12 @@ class ActionInstance:
471
474
  # Try to get model codebase URLs from action_details first
472
475
  model_codebase_url = job_params.get("model_codebase_url")
473
476
  model_requirements_url = job_params.get("model_requirements_url")
477
+ dockerId = job_params.get("_idDocker")
474
478
 
475
479
  # Fallback to API calls if not provided in action_details
476
480
  if not model_codebase_url:
477
481
  model_codebase_url, error, message = self.scaling.get_model_codebase(
478
- model_family
482
+ dockerId
479
483
  )
480
484
  if error:
481
485
  logging.warning(f"Failed to get model codebase URL: {message}")
@@ -486,7 +490,7 @@ class ActionInstance:
486
490
  model_codebase_requirements_url = model_requirements_url
487
491
  else:
488
492
  model_codebase_requirements_url, error, message = (
489
- self.scaling.get_model_codebase_requirements(model_family)
493
+ self.scaling.get_model_codebase_requirements(dockerId)
490
494
  )
491
495
  if error:
492
496
  logging.warning(
@@ -1173,7 +1177,7 @@ def inference_ws_server_execute(self: ActionInstance):
1173
1177
  def fe_fs_streaming_execute(self: ActionInstance):
1174
1178
  """
1175
1179
  Creates and setup the frontend for fs streaming.
1176
- Frontend streaming runs on port 3001 (localhost only with --net=host).
1180
+ Frontend streaming runs on port 3000 (localhost only with --net=host).
1177
1181
  """
1178
1182
  action_details = self.get_action_details()
1179
1183
 
@@ -1183,7 +1187,7 @@ def fe_fs_streaming_execute(self: ActionInstance):
1183
1187
 
1184
1188
  self.setup_action_requirements(action_details)
1185
1189
 
1186
- # Frontend streaming with --net=host (Port: 3001)
1190
+ # Frontend streaming with --net=host (Port: 3000)
1187
1191
  worker_cmd = (
1188
1192
  f"docker run -d --pull=always --net=host "
1189
1193
  f"--name fe_streaming "
@@ -1191,14 +1195,49 @@ def fe_fs_streaming_execute(self: ActionInstance):
1191
1195
  f'-e ENV="{os.environ.get("ENV", "prod")}" '
1192
1196
  f'-e MATRICE_SECRET_ACCESS_KEY="{self.matrice_secret_access_key}" '
1193
1197
  f'-e MATRICE_ACCESS_KEY_ID="{self.matrice_access_key_id}" '
1198
+ f"-e PORT=3000 "
1194
1199
  f"{image}"
1195
1200
  )
1196
- logging.info("Starting frontend streaming (Port: 3001): %s", worker_cmd)
1201
+ logging.info("Starting frontend streaming (Port: 3000): %s", worker_cmd)
1197
1202
 
1198
1203
  # Docker Command run
1199
1204
  self.start(worker_cmd, "fe_fs_streaming")
1200
1205
 
1201
1206
 
1207
+ @log_errors(raise_exception=False)
1208
+ def fe_analytics_service_execute(self: ActionInstance):
1209
+ """
1210
+ Creates and setup the frontend analytics service.
1211
+ Frontend analytics service runs on port 3001 (localhost only with --net=host).
1212
+ """
1213
+ action_details = self.get_action_details()
1214
+
1215
+ if not action_details:
1216
+ return
1217
+ image = action_details["actionDetails"].get("docker")
1218
+
1219
+ self.setup_action_requirements(action_details)
1220
+
1221
+ project_id = action_details["_idProject"]
1222
+
1223
+ # Frontend analytics service with --net=host (Port: 3001)
1224
+ worker_cmd = (
1225
+ f"docker run -d --pull=always --net=host "
1226
+ f"--name fe-analytics "
1227
+ f'-e NEXT_PUBLIC_DEPLOYMENT_ENV="{os.environ.get("ENV", "prod")}" '
1228
+ f'-e MATRICE_SECRET_ACCESS_KEY="{self.matrice_secret_access_key}" '
1229
+ f'-e MATRICE_ACCESS_KEY_ID="{self.matrice_access_key_id}" '
1230
+ f'-e ACTION_ID="{self.action_record_id}" '
1231
+ f"-e PORT=3001 "
1232
+ f'-e PROJECT_ID="{project_id}" '
1233
+ f"{image}"
1234
+ )
1235
+ logging.info("Starting frontend analytics service (Port: 3001): %s", worker_cmd)
1236
+
1237
+ # Docker Command run
1238
+ self.start(worker_cmd, "fe_analytics_service")
1239
+
1240
+
1202
1241
  @log_errors(raise_exception=False)
1203
1242
  def synthetic_dataset_generation_execute(self: ActionInstance):
1204
1243
  """Execute synthetic dataset generation task."""
@@ -1571,3 +1610,34 @@ def kafka_setup_execute(self: ActionInstance):
1571
1610
 
1572
1611
  logging.info("Starting Kafka container (Ports: 9092, 9093): %s", cmd)
1573
1612
  self.start(cmd, "kafka_setup")
1613
+
1614
+
1615
+ @log_errors(raise_exception=False)
1616
+ def inference_tracker_setup_execute(self: ActionInstance):
1617
+
1618
+ """
1619
+ Creates and start inference tracker.
1620
+ Inference tracker runs on port 8110 (localhost only with --net=host).
1621
+ """
1622
+
1623
+ action_details = self.get_action_details()
1624
+ if not action_details:
1625
+ return
1626
+
1627
+ image = self.docker_container
1628
+
1629
+ self.setup_action_requirements(action_details)
1630
+
1631
+ # This is the existing Docker run command
1632
+ worker_cmd = (
1633
+ f"docker run -d --pull=always --net=host "
1634
+ f"--name inference-tracker-worker "
1635
+ f"-v matrice_myvol:/matrice_data "
1636
+ f'-e ENV="{os.environ.get("ENV", "prod")}" '
1637
+ f'-e MATRICE_SECRET_ACCESS_KEY="{self.matrice_secret_access_key}" '
1638
+ f'-e MATRICE_ACCESS_KEY_ID="{self.matrice_access_key_id}" '
1639
+ f'-e ACTION_ID="{self.action_record_id}" '
1640
+ f"{image}"
1641
+ )
1642
+
1643
+ self.start(worker_cmd, "inference_tracker_setup")
@@ -674,16 +674,16 @@ class Scaling:
674
674
  )
675
675
 
676
676
  @log_errors(log_error=True)
677
- def get_model_codebase_requirements(self, model_family_id):
677
+ def get_model_codebase_requirements(self, dockerId):
678
678
  """Get model codebase requirements.
679
679
 
680
680
  Args:
681
- model_family_id: ID of the model family
681
+ dockerId: ID of the docker
682
682
 
683
683
  Returns:
684
684
  Tuple of (data, error, message) from API response
685
685
  """
686
- path = f"/v1/model_store/get_user_requirements_download_path/{model_family_id}"
686
+ path = f"/v1/model_store/get_user_requirements_download_path/{dockerId}"
687
687
  resp = self.rpc.get(path=path)
688
688
  return self.handle_response(
689
689
  resp,