matrice 1.0.99295__py3-none-any.whl → 1.0.99297__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -35,9 +35,16 @@ class ColorDetectionConfig(BaseConfig):
35
35
  confidence_threshold: float = 0.5
36
36
  top_k_colors: int = 3
37
37
  frame_skip: int = 1
38
- target_categories: Optional[List[str]] = field(default_factory=lambda: [
39
- "person", "people", "car", "cars", "truck", "trucks", "motorcycle", "motorcycles", "vehicle", "vehicles", "bus", "bicycle"
40
- ])
38
+ usecase_categories: List[str] = field(
39
+ default_factory=lambda: [
40
+ "bicycle", "car", "motorbike", "auto rickshaw", "bus", "garbagevan",
41
+ "truck", "minibus", "army vehicle", "pickup", "policecar", "rickshaw",
42
+ "scooter", "suv", "taxi", "three wheelers -CNG-", "human hauler", "van", "wheelbarrow"
43
+ ]
44
+ )
45
+ target_categories: List[str] = field(
46
+ default_factory=lambda: ['car', 'bicycle', 'bus', 'garbagevan', 'truck', 'motorbike', 'van']
47
+ )
41
48
  fps: Optional[float] = None
42
49
  bbox_format: str = "auto"
43
50
  index_to_category: Optional[Dict[int, str]] = None
@@ -51,6 +58,30 @@ class ColorDetectionConfig(BaseConfig):
51
58
  smoothing_window_size: int = 20
52
59
  smoothing_cooldown_frames: int = 5
53
60
  smoothing_confidence_range_factor: float = 0.5
61
+ index_to_category: Optional[Dict[int, str]] = field(
62
+ default_factory=lambda: {
63
+ 0: "ambulance",
64
+ 1: "army vehicle",
65
+ 2: "car",
66
+ 3: "bicycle",
67
+ 4: "bus",
68
+ 5: "auto rickshaw",
69
+ 6: "garbagevan",
70
+ 7: "truck",
71
+ 8: "minibus",
72
+ 9: "minivan",
73
+ 10: "motorbike",
74
+ 11: "pickup",
75
+ 12: "policecar",
76
+ 13: "rickshaw",
77
+ 14: "scooter",
78
+ 15: "suv",
79
+ 16: "taxi",
80
+ 17: "three wheelers -CNG-",
81
+ 18: "human hauler",
82
+ 19: "van",
83
+ 20: "wheelbarrow"
84
+ })
54
85
 
55
86
  def validate(self) -> List[str]:
56
87
  """Validate configuration parameters."""
@@ -74,12 +105,20 @@ class ColorDetectionConfig(BaseConfig):
74
105
 
75
106
  class ColorDetectionUseCase(BaseProcessor):
76
107
  """Color detection processor for analyzing object colors in video streams with tracking."""
108
+ CATEGORY_DISPLAY = {
109
+ "bicycle": "Bicycle", "car": "Car", "motorbike": "Motorbike", "auto rickshaw": "Auto Rickshaw",
110
+ "bus": "Bus", "garbagevan": "Garbage Van", "truck": "Truck", "minibus": "Minibus",
111
+ "army vehicle": "Army Vehicle", "pickup": "Pickup", "policecar": "Police Car",
112
+ "rickshaw": "Rickshaw", "scooter": "Scooter", "suv": "SUV", "taxi": "Taxi",
113
+ "three wheelers -CNG-": "Three Wheelers (CNG)", "human hauler": "Human Hauler",
114
+ "van": "Van", "wheelbarrow": "Wheelbarrow"
115
+ }
77
116
 
78
117
  def __init__(self):
79
118
  super().__init__("color_detection")
80
119
  self.category = "visual_appearance"
81
120
 
82
- self.target_categories = ["person", "people", "car", "cars", "truck", "trucks", "motorcycle", "motorcycles", "vehicle", "vehicles", "bus", "bicycle"]
121
+ self.target_categories = ['car', 'bicycle', 'bus', 'garbagevan', 'truck', 'motorbike', 'van']
83
122
 
84
123
  self.CASE_TYPE: Optional[str] = 'color_detection'
85
124
  self.CASE_VERSION: Optional[str] = '1.3'
@@ -292,10 +331,17 @@ class ColorDetectionUseCase(BaseProcessor):
292
331
  if config.index_to_category:
293
332
  processed_data = apply_category_mapping(processed_data, config.index_to_category)
294
333
  self.logger.debug("Applied category mapping")
334
+
335
+ if config.target_categories:
336
+ color_processed_data = [d for d in processed_data if d.get('category') in self.target_categories]
337
+ self.logger.debug("Applied category filtering")
338
+ print("-------------------COLOR_PROCESSED_DATA-------------------")
339
+ print(color_processed_data)
340
+ print("-------------------COLOR_PROCESSED_DATA-------------------")
295
341
 
296
342
  # Step 2.5: Filter to only include target categories
297
- color_processed_data = filter_by_categories(processed_data.copy(), config.target_categories)
298
- self.logger.debug(f"Applied target category filtering for: {config.target_categories}")
343
+ # color_processed_data = filter_by_categories(processed_data.copy(), config.target_categories)
344
+ # self.logger.debug(f"Applied target category filtering for: {config.target_categories}")
299
345
 
300
346
  raw_processed_data = [copy.deepcopy(det) for det in color_processed_data]
301
347
  # Step 3: Apply bounding box smoothing if enabled
@@ -350,6 +396,9 @@ class ColorDetectionUseCase(BaseProcessor):
350
396
  input_bytes,
351
397
  config
352
398
  )
399
+ print("-------------------COLOR_ANALYSIS-------------------")
400
+ print(color_analysis)
401
+ print("-------------------COLOR_ANALYSIS-------------------")
353
402
 
354
403
  # Step 8: Calculate summaries
355
404
  color_summary = self._calculate_color_summary(color_analysis, config)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: matrice
3
- Version: 1.0.99295
3
+ Version: 1.0.99297
4
4
  Summary: SDK for connecting to matrice.ai services
5
5
  Home-page: https://github.com/matrice-ai/python-sdk
6
6
  Author: Matrice.ai
@@ -170,7 +170,7 @@ matrice/deploy/utils/post_processing/usecases/car_part_segmentation.py,sha256=Jb
170
170
  matrice/deploy/utils/post_processing/usecases/cardiomegaly_classification.py,sha256=1P6DyOU6R1XKmQ-55BbKMU8CSsm4-wR5wS827UJG2JU,41244
171
171
  matrice/deploy/utils/post_processing/usecases/chicken_pose_detection.py,sha256=-e8di7Am-E-FCQFrSY8qJTO1aWtdRAVJoE-VKBgcyyI,29291
172
172
  matrice/deploy/utils/post_processing/usecases/child_monitoring.py,sha256=z3oymoqq4hDGwA8MkdEONZW_Vx5CAZmvzZaNLsqmCfw,39380
173
- matrice/deploy/utils/post_processing/usecases/color_detection.py,sha256=s7Qi4i-44DekS9tWM9NLkx8yVoTUCHfyMqi_qed-wjc,67335
173
+ matrice/deploy/utils/post_processing/usecases/color_detection.py,sha256=BoVX1Dd0VzKEWIF-q_UI7VDjqU_aKka0mflsCmzQeHE,69478
174
174
  matrice/deploy/utils/post_processing/usecases/color_map_utils.py,sha256=SP-AEVcjLmL8rxblu-ixqUJC2fqlcr7ab4hWo4Fcr_k,2677
175
175
  matrice/deploy/utils/post_processing/usecases/concrete_crack_detection.py,sha256=pxhOH_hG4hq9yytNepbGMdk2W_lTG8D1_2RAagaPBkg,40252
176
176
  matrice/deploy/utils/post_processing/usecases/crop_weed_detection.py,sha256=Ao1k5fJDYU_f6yZ8VO-jW8-esECV0-zY5Q570c_fako,35674
@@ -244,8 +244,8 @@ matrice/deployment/camera_manager.py,sha256=e1Lc81RJP5wUWRdTgHO6tMWF9BkBdHOSVyx3
244
244
  matrice/deployment/deployment.py,sha256=HFt151eWq6iqIAMsQvurpV2WNxW6Cx_gIUVfnVy5SWE,48093
245
245
  matrice/deployment/inference_pipeline.py,sha256=6b4Mm3-qt-Zy0BeiJfFQdImOn3FzdNCY-7ET7Rp8PMk,37911
246
246
  matrice/deployment/streaming_gateway_manager.py,sha256=ifYGl3g25wyU39HwhPQyI2OgF3M6oIqKMWt8RXtMxY8,21401
247
- matrice-1.0.99295.dist-info/licenses/LICENSE.txt,sha256=2bm9uFabQZ3Ykb_SaSU_uUbAj2-htc6WJQmS_65qD00,1073
248
- matrice-1.0.99295.dist-info/METADATA,sha256=ZdeolA_TMO2WIHB1CSLTPNWywK-uF3t4IP4QZAiMA-Q,14624
249
- matrice-1.0.99295.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
250
- matrice-1.0.99295.dist-info/top_level.txt,sha256=P97js8ur6o5ClRqMH3Cjoab_NqbJ6sOQ3rJmVzKBvMc,8
251
- matrice-1.0.99295.dist-info/RECORD,,
247
+ matrice-1.0.99297.dist-info/licenses/LICENSE.txt,sha256=2bm9uFabQZ3Ykb_SaSU_uUbAj2-htc6WJQmS_65qD00,1073
248
+ matrice-1.0.99297.dist-info/METADATA,sha256=WL2htBzZzbugAk-mI6bbvIuXVm08F-Zzd_BomZZU7Pg,14624
249
+ matrice-1.0.99297.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
250
+ matrice-1.0.99297.dist-info/top_level.txt,sha256=P97js8ur6o5ClRqMH3Cjoab_NqbJ6sOQ3rJmVzKBvMc,8
251
+ matrice-1.0.99297.dist-info/RECORD,,