matrice 1.0.99294__py3-none-any.whl → 1.0.99296__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -23,6 +23,7 @@ from ..core.config import BaseConfig, AlertConfig, ZoneConfig
23
23
  class AbandonedObjectConfig(BaseConfig):
24
24
  """Configuration for abandoned object detection use case."""
25
25
  enable_smoothing: bool = True
26
+ centroid_threshold: float = 10.0
26
27
  smoothing_algorithm: str = "observability"
27
28
  smoothing_window_size: int = 20
28
29
  smoothing_cooldown_frames: int = 5
@@ -193,7 +194,7 @@ class AbandonedObjectDetectionUseCase(BaseProcessor):
193
194
  track_info['bbox'] = bbox
194
195
 
195
196
  # Check if centroid has moved significantly
196
- if self._is_centroid_stationary(centroid, prev_centroid):
197
+ if self._is_centroid_stationary(centroid, prev_centroid, config.centroid_threshold):
197
198
  if track_info['frame_count'] >= config.stationary_threshold_frames:
198
199
  # Check for overlap with person
199
200
  if not self._overlaps_with_person(bbox):
@@ -222,6 +223,7 @@ class AbandonedObjectDetectionUseCase(BaseProcessor):
222
223
 
223
224
  def _is_centroid_stationary(self, centroid: tuple, prev_centroid: tuple, threshold: float = 5.0) -> bool:
224
225
  """Check if centroid movement is within threshold."""
226
+ threshold = threshold or self.config.centroid_threshold
225
227
  x1, y1 = centroid
226
228
  x2, y2 = prev_centroid
227
229
  distance = ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5
@@ -230,7 +232,7 @@ class AbandonedObjectDetectionUseCase(BaseProcessor):
230
232
  def _overlaps_with_person(self, bbox: Dict) -> bool:
231
233
  """Check if bbox overlaps with any person bbox."""
232
234
  for person_bbox in self._person_bboxes:
233
- if person_bbox and self._compute_iou(bbox, person_bbox) > 0.1:
235
+ if person_bbox and self._compute_iou(bbox, person_bbox) > 0.5:
234
236
  return True
235
237
  return False
236
238
 
@@ -35,9 +35,16 @@ class ColorDetectionConfig(BaseConfig):
35
35
  confidence_threshold: float = 0.5
36
36
  top_k_colors: int = 3
37
37
  frame_skip: int = 1
38
- target_categories: Optional[List[str]] = field(default_factory=lambda: [
39
- "person", "people", "car", "cars", "truck", "trucks", "motorcycle", "motorcycles", "vehicle", "vehicles", "bus", "bicycle"
40
- ])
38
+ usecase_categories: List[str] = field(
39
+ default_factory=lambda: [
40
+ "bicycle", "car", "motorbike", "auto rickshaw", "bus", "garbagevan",
41
+ "truck", "minibus", "army vehicle", "pickup", "policecar", "rickshaw",
42
+ "scooter", "suv", "taxi", "three wheelers -CNG-", "human hauler", "van", "wheelbarrow"
43
+ ]
44
+ )
45
+ target_categories: List[str] = field(
46
+ default_factory=lambda: ['car', 'bicycle', 'bus', 'garbagevan', 'truck', 'motorbike', 'van']
47
+ )
41
48
  fps: Optional[float] = None
42
49
  bbox_format: str = "auto"
43
50
  index_to_category: Optional[Dict[int, str]] = None
@@ -51,6 +58,30 @@ class ColorDetectionConfig(BaseConfig):
51
58
  smoothing_window_size: int = 20
52
59
  smoothing_cooldown_frames: int = 5
53
60
  smoothing_confidence_range_factor: float = 0.5
61
+ index_to_category: Optional[Dict[int, str]] = field(
62
+ default_factory=lambda: {
63
+ 0: "ambulance",
64
+ 1: "army vehicle",
65
+ 2: "car",
66
+ 3: "bicycle",
67
+ 4: "bus",
68
+ 5: "auto rickshaw",
69
+ 6: "garbagevan",
70
+ 7: "truck",
71
+ 8: "minibus",
72
+ 9: "minivan",
73
+ 10: "motorbike",
74
+ 11: "pickup",
75
+ 12: "policecar",
76
+ 13: "rickshaw",
77
+ 14: "scooter",
78
+ 15: "suv",
79
+ 16: "taxi",
80
+ 17: "three wheelers -CNG-",
81
+ 18: "human hauler",
82
+ 19: "van",
83
+ 20: "wheelbarrow"
84
+ })
54
85
 
55
86
  def validate(self) -> List[str]:
56
87
  """Validate configuration parameters."""
@@ -74,12 +105,20 @@ class ColorDetectionConfig(BaseConfig):
74
105
 
75
106
  class ColorDetectionUseCase(BaseProcessor):
76
107
  """Color detection processor for analyzing object colors in video streams with tracking."""
108
+ CATEGORY_DISPLAY = {
109
+ "bicycle": "Bicycle", "car": "Car", "motorbike": "Motorbike", "auto rickshaw": "Auto Rickshaw",
110
+ "bus": "Bus", "garbagevan": "Garbage Van", "truck": "Truck", "minibus": "Minibus",
111
+ "army vehicle": "Army Vehicle", "pickup": "Pickup", "policecar": "Police Car",
112
+ "rickshaw": "Rickshaw", "scooter": "Scooter", "suv": "SUV", "taxi": "Taxi",
113
+ "three wheelers -CNG-": "Three Wheelers (CNG)", "human hauler": "Human Hauler",
114
+ "van": "Van", "wheelbarrow": "Wheelbarrow"
115
+ }
77
116
 
78
117
  def __init__(self):
79
118
  super().__init__("color_detection")
80
119
  self.category = "visual_appearance"
81
120
 
82
- self.target_categories = ["person", "people", "car", "cars", "truck", "trucks", "motorcycle", "motorcycles", "vehicle", "vehicles", "bus", "bicycle"]
121
+ self.target_categories = ['car', 'bicycle', 'bus', 'garbagevan', 'truck', 'motorbike', 'van']
83
122
 
84
123
  self.CASE_TYPE: Optional[str] = 'color_detection'
85
124
  self.CASE_VERSION: Optional[str] = '1.3'
@@ -292,10 +331,14 @@ class ColorDetectionUseCase(BaseProcessor):
292
331
  if config.index_to_category:
293
332
  processed_data = apply_category_mapping(processed_data, config.index_to_category)
294
333
  self.logger.debug("Applied category mapping")
334
+
335
+ if config.target_categories:
336
+ color_processed_data = [d for d in processed_data if d.get('category') in self.target_categories]
337
+ self.logger.debug("Applied category filtering")
295
338
 
296
339
  # Step 2.5: Filter to only include target categories
297
- color_processed_data = filter_by_categories(processed_data.copy(), config.target_categories)
298
- self.logger.debug(f"Applied target category filtering for: {config.target_categories}")
340
+ # color_processed_data = filter_by_categories(processed_data.copy(), config.target_categories)
341
+ # self.logger.debug(f"Applied target category filtering for: {config.target_categories}")
299
342
 
300
343
  raw_processed_data = [copy.deepcopy(det) for det in color_processed_data]
301
344
  # Step 3: Apply bounding box smoothing if enabled
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: matrice
3
- Version: 1.0.99294
3
+ Version: 1.0.99296
4
4
  Summary: SDK for connecting to matrice.ai services
5
5
  Home-page: https://github.com/matrice-ai/python-sdk
6
6
  Author: Matrice.ai
@@ -157,7 +157,7 @@ matrice/deploy/utils/post_processing/test_cases/test_utilities.py,sha256=lmT5bp5
157
157
  matrice/deploy/utils/post_processing/test_cases/test_utils.py,sha256=bfmOT1rr9asv3jpr-p_UrjnnSZ1qEWM2LEqNKkyvJZ8,29370
158
158
  matrice/deploy/utils/post_processing/usecases/Histopathological_Cancer_Detection_img.py,sha256=bHDXxxG3QgWMFZbDuBaJWpkIvxTXsFMTqCPBCFm3SDs,30247
159
159
  matrice/deploy/utils/post_processing/usecases/__init__.py,sha256=aTVBEy8PnIEQdxFKpyB6_bYW5WWed6S2JBkYK57VT2s,9571
160
- matrice/deploy/utils/post_processing/usecases/abandoned_object_detection.py,sha256=W6thVUxNzZc2vDaXG69DNvENP8WVPQh-vE52YXtLEHU,31750
160
+ matrice/deploy/utils/post_processing/usecases/abandoned_object_detection.py,sha256=l0s0BZKuK6XpfD85zJSb-cgbfLxWnP9Mzb5JwAFPX7k,31878
161
161
  matrice/deploy/utils/post_processing/usecases/advanced_customer_service.py,sha256=ELt5euxr6P4X2s8-YGngmj27QscOHefjOsx3774sNFk,75914
162
162
  matrice/deploy/utils/post_processing/usecases/age_detection.py,sha256=yn1LXOgbnOWSMDnsCds6-uN6W-I1Hy4_-AMrjbT5PtY,41318
163
163
  matrice/deploy/utils/post_processing/usecases/anti_spoofing_detection.py,sha256=XdtDdXGzZMLQdWcoOoiE5t4LPYHhgOtJ7tZCNlq1A2E,31329
@@ -170,7 +170,7 @@ matrice/deploy/utils/post_processing/usecases/car_part_segmentation.py,sha256=Jb
170
170
  matrice/deploy/utils/post_processing/usecases/cardiomegaly_classification.py,sha256=1P6DyOU6R1XKmQ-55BbKMU8CSsm4-wR5wS827UJG2JU,41244
171
171
  matrice/deploy/utils/post_processing/usecases/chicken_pose_detection.py,sha256=-e8di7Am-E-FCQFrSY8qJTO1aWtdRAVJoE-VKBgcyyI,29291
172
172
  matrice/deploy/utils/post_processing/usecases/child_monitoring.py,sha256=z3oymoqq4hDGwA8MkdEONZW_Vx5CAZmvzZaNLsqmCfw,39380
173
- matrice/deploy/utils/post_processing/usecases/color_detection.py,sha256=s7Qi4i-44DekS9tWM9NLkx8yVoTUCHfyMqi_qed-wjc,67335
173
+ matrice/deploy/utils/post_processing/usecases/color_detection.py,sha256=n6oI0nSTKYz0ZAPVvLovMVEgrBz3QfcWEokH3JyZ93Y,69096
174
174
  matrice/deploy/utils/post_processing/usecases/color_map_utils.py,sha256=SP-AEVcjLmL8rxblu-ixqUJC2fqlcr7ab4hWo4Fcr_k,2677
175
175
  matrice/deploy/utils/post_processing/usecases/concrete_crack_detection.py,sha256=pxhOH_hG4hq9yytNepbGMdk2W_lTG8D1_2RAagaPBkg,40252
176
176
  matrice/deploy/utils/post_processing/usecases/crop_weed_detection.py,sha256=Ao1k5fJDYU_f6yZ8VO-jW8-esECV0-zY5Q570c_fako,35674
@@ -244,8 +244,8 @@ matrice/deployment/camera_manager.py,sha256=e1Lc81RJP5wUWRdTgHO6tMWF9BkBdHOSVyx3
244
244
  matrice/deployment/deployment.py,sha256=HFt151eWq6iqIAMsQvurpV2WNxW6Cx_gIUVfnVy5SWE,48093
245
245
  matrice/deployment/inference_pipeline.py,sha256=6b4Mm3-qt-Zy0BeiJfFQdImOn3FzdNCY-7ET7Rp8PMk,37911
246
246
  matrice/deployment/streaming_gateway_manager.py,sha256=ifYGl3g25wyU39HwhPQyI2OgF3M6oIqKMWt8RXtMxY8,21401
247
- matrice-1.0.99294.dist-info/licenses/LICENSE.txt,sha256=2bm9uFabQZ3Ykb_SaSU_uUbAj2-htc6WJQmS_65qD00,1073
248
- matrice-1.0.99294.dist-info/METADATA,sha256=lXvLsYMYQbH8vQxZlhIDVAdn4rKiem6xy5gtVfRVaig,14624
249
- matrice-1.0.99294.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
250
- matrice-1.0.99294.dist-info/top_level.txt,sha256=P97js8ur6o5ClRqMH3Cjoab_NqbJ6sOQ3rJmVzKBvMc,8
251
- matrice-1.0.99294.dist-info/RECORD,,
247
+ matrice-1.0.99296.dist-info/licenses/LICENSE.txt,sha256=2bm9uFabQZ3Ykb_SaSU_uUbAj2-htc6WJQmS_65qD00,1073
248
+ matrice-1.0.99296.dist-info/METADATA,sha256=X8NmjUp1h3-0ggOqCfmTFha8Ki0wkJMZB7SAGViJb7k,14624
249
+ matrice-1.0.99296.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
250
+ matrice-1.0.99296.dist-info/top_level.txt,sha256=P97js8ur6o5ClRqMH3Cjoab_NqbJ6sOQ3rJmVzKBvMc,8
251
+ matrice-1.0.99296.dist-info/RECORD,,