ultralytics 8.3.36__py3-none-any.whl → 8.3.38__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -280,8 +280,8 @@ class RepC3(nn.Module):
280
280
  """Initialize CSP Bottleneck with a single convolution using input channels, output channels, and number."""
281
281
  super().__init__()
282
282
  c_ = int(c2 * e) # hidden channels
283
- self.cv1 = Conv(c1, c2, 1, 1)
284
- self.cv2 = Conv(c1, c2, 1, 1)
283
+ self.cv1 = Conv(c1, c_, 1, 1)
284
+ self.cv2 = Conv(c1, c_, 1, 1)
285
285
  self.m = nn.Sequential(*[RepConv(c_, c_) for _ in range(n)])
286
286
  self.cv3 = Conv(c_, c2, 1, 1) if c_ != c2 else nn.Identity()
287
287
 
@@ -50,7 +50,7 @@ class Conv(nn.Module):
50
50
  return self.act(self.bn(self.conv(x)))
51
51
 
52
52
  def forward_fuse(self, x):
53
- """Perform transposed convolution of 2D data."""
53
+ """Apply convolution and activation without batch normalization."""
54
54
  return self.act(self.conv(x))
55
55
 
56
56
 
@@ -89,7 +89,7 @@ class ParkingPtsSelection:
89
89
  """Uploads and displays an image on the canvas, resizing it to fit within specified dimensions."""
90
90
  from PIL import Image, ImageTk # scope because ImageTk requires tkinter package
91
91
 
92
- self.image = Image.open(self.filedialog.askopenfilename(filetypes=[("Image Files", "*.png;*.jpg;*.jpeg")]))
92
+ self.image = Image.open(self.filedialog.askopenfilename(filetypes=[("Image Files", "*.png *.jpg *.jpeg")]))
93
93
  if not self.image:
94
94
  return
95
95
 
@@ -44,7 +44,7 @@ class BaseTrack:
44
44
  start_frame (int): The frame number where tracking started.
45
45
  frame_id (int): The most recent frame ID processed by the track.
46
46
  time_since_update (int): Frames passed since the last update.
47
- location (Tuple): The location of the object in the context of multi-camera tracking.
47
+ location (tuple): The location of the object in the context of multi-camera tracking.
48
48
 
49
49
  Methods:
50
50
  end_frame: Returns the ID of the last frame where the object was tracked.
@@ -27,10 +27,9 @@ def linear_assignment(cost_matrix: np.ndarray, thresh: float, use_lap: bool = Tr
27
27
  use_lap (bool): Use lap.lapjv for the assignment. If False, scipy.optimize.linear_sum_assignment is used.
28
28
 
29
29
  Returns:
30
- (tuple): A tuple containing:
31
- - matched_indices (np.ndarray): Array of matched indices of shape (K, 2), where K is the number of matches.
32
- - unmatched_a (np.ndarray): Array of unmatched indices from the first set, with shape (L,).
33
- - unmatched_b (np.ndarray): Array of unmatched indices from the second set, with shape (M,).
30
+ matched_indices (np.ndarray): Array of matched indices of shape (K, 2), where K is the number of matches.
31
+ unmatched_a (np.ndarray): Array of unmatched indices from the first set, with shape (L,).
32
+ unmatched_b (np.ndarray): Array of unmatched indices from the second set, with shape (M,).
34
33
 
35
34
  Examples:
36
35
  >>> cost_matrix = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
@@ -607,13 +607,12 @@ def is_raspberrypi() -> bool:
607
607
 
608
608
  def is_jetson() -> bool:
609
609
  """
610
- Determines if the Python environment is running on a Jetson Nano or Jetson Orin device by checking the device model
611
- information.
610
+ Determines if the Python environment is running on an NVIDIA Jetson device by checking the device model information.
612
611
 
613
612
  Returns:
614
- (bool): True if running on a Jetson Nano or Jetson Orin, False otherwise.
613
+ (bool): True if running on an NVIDIA Jetson device, False otherwise.
615
614
  """
616
- return "NVIDIA" in PROC_DEVICE_MODEL # i.e. "NVIDIA Jetson Nano" or "NVIDIA Orin NX"
615
+ return any(keyword in PROC_DEVICE_MODEL.lower() for keyword in ("nvidia", "jetson"))
617
616
 
618
617
 
619
618
  def is_online() -> bool:
@@ -1255,9 +1254,12 @@ class SettingsManager(JSONDict):
1255
1254
  self.update(self.defaults)
1256
1255
 
1257
1256
 
1258
- def deprecation_warn(arg, new_arg):
1257
+ def deprecation_warn(arg, new_arg=None):
1259
1258
  """Issue a deprecation warning when a deprecated argument is used, suggesting an updated argument."""
1260
- LOGGER.warning(f"WARNING ⚠️ '{arg}' is deprecated and will be removed in in the future. Use '{new_arg}' instead.")
1259
+ msg = f"WARNING ⚠️ '{arg}' is deprecated and will be removed in in the future."
1260
+ if new_arg is not None:
1261
+ msg += f" Use '{new_arg}' instead."
1262
+ LOGGER.warning(msg)
1261
1263
 
1262
1264
 
1263
1265
  def clean_url(url):
ultralytics/utils/loss.py CHANGED
@@ -552,9 +552,8 @@ class v8PoseLoss(v8DetectionLoss):
552
552
  pred_kpts (torch.Tensor): Predicted keypoints, shape (BS, N_anchors, N_kpts_per_object, kpts_dim).
553
553
 
554
554
  Returns:
555
- (tuple): Returns a tuple containing:
556
- - kpts_loss (torch.Tensor): The keypoints loss.
557
- - kpts_obj_loss (torch.Tensor): The keypoints object loss.
555
+ kpts_loss (torch.Tensor): The keypoints loss.
556
+ kpts_obj_loss (torch.Tensor): The keypoints object loss.
558
557
  """
559
558
  batch_idx = batch_idx.flatten()
560
559
  batch_size = len(masks)
@@ -549,19 +549,18 @@ def ap_per_class(
549
549
  prefix (str, optional): A prefix string for saving the plot files. Defaults to an empty string.
550
550
 
551
551
  Returns:
552
- (tuple): A tuple of six arrays and one array of unique classes, where:
553
- tp (np.ndarray): True positive counts at threshold given by max F1 metric for each class.Shape: (nc,).
554
- fp (np.ndarray): False positive counts at threshold given by max F1 metric for each class. Shape: (nc,).
555
- p (np.ndarray): Precision values at threshold given by max F1 metric for each class. Shape: (nc,).
556
- r (np.ndarray): Recall values at threshold given by max F1 metric for each class. Shape: (nc,).
557
- f1 (np.ndarray): F1-score values at threshold given by max F1 metric for each class. Shape: (nc,).
558
- ap (np.ndarray): Average precision for each class at different IoU thresholds. Shape: (nc, 10).
559
- unique_classes (np.ndarray): An array of unique classes that have data. Shape: (nc,).
560
- p_curve (np.ndarray): Precision curves for each class. Shape: (nc, 1000).
561
- r_curve (np.ndarray): Recall curves for each class. Shape: (nc, 1000).
562
- f1_curve (np.ndarray): F1-score curves for each class. Shape: (nc, 1000).
563
- x (np.ndarray): X-axis values for the curves. Shape: (1000,).
564
- prec_values: Precision values at mAP@0.5 for each class. Shape: (nc, 1000).
552
+ tp (np.ndarray): True positive counts at threshold given by max F1 metric for each class.Shape: (nc,).
553
+ fp (np.ndarray): False positive counts at threshold given by max F1 metric for each class. Shape: (nc,).
554
+ p (np.ndarray): Precision values at threshold given by max F1 metric for each class. Shape: (nc,).
555
+ r (np.ndarray): Recall values at threshold given by max F1 metric for each class. Shape: (nc,).
556
+ f1 (np.ndarray): F1-score values at threshold given by max F1 metric for each class. Shape: (nc,).
557
+ ap (np.ndarray): Average precision for each class at different IoU thresholds. Shape: (nc, 10).
558
+ unique_classes (np.ndarray): An array of unique classes that have data. Shape: (nc,).
559
+ p_curve (np.ndarray): Precision curves for each class. Shape: (nc, 1000).
560
+ r_curve (np.ndarray): Recall curves for each class. Shape: (nc, 1000).
561
+ f1_curve (np.ndarray): F1-score curves for each class. Shape: (nc, 1000).
562
+ x (np.ndarray): X-axis values for the curves. Shape: (1000,).
563
+ prec_values (np.ndarray): Precision values at mAP@0.5 for each class. Shape: (nc, 1000).
565
564
  """
566
565
  # Sort by objectness
567
566
  i = np.argsort(-conf)
ultralytics/utils/ops.py CHANGED
@@ -317,11 +317,11 @@ def clip_boxes(boxes, shape):
317
317
  Takes a list of bounding boxes and a shape (height, width) and clips the bounding boxes to the shape.
318
318
 
319
319
  Args:
320
- boxes (torch.Tensor): the bounding boxes to clip
321
- shape (tuple): the shape of the image
320
+ boxes (torch.Tensor): The bounding boxes to clip.
321
+ shape (tuple): The shape of the image.
322
322
 
323
323
  Returns:
324
- (torch.Tensor | numpy.ndarray): Clipped boxes
324
+ (torch.Tensor | numpy.ndarray): The clipped boxes.
325
325
  """
326
326
  if isinstance(boxes, torch.Tensor): # faster individually (WARNING: inplace .clamp_() Apple MPS bug)
327
327
  boxes[..., 0] = boxes[..., 0].clamp(0, shape[1]) # x1
@@ -359,9 +359,9 @@ def scale_image(masks, im0_shape, ratio_pad=None):
359
359
  Takes a mask, and resizes it to the original image size.
360
360
 
361
361
  Args:
362
- masks (np.ndarray): resized and padded masks/images, [h, w, num]/[h, w, 3].
363
- im0_shape (tuple): the original image shape
364
- ratio_pad (tuple): the ratio of the padding to the original image.
362
+ masks (np.ndarray): Resized and padded masks/images, [h, w, num]/[h, w, 3].
363
+ im0_shape (tuple): The original image shape.
364
+ ratio_pad (tuple): The ratio of the padding to the original image.
365
365
 
366
366
  Returns:
367
367
  masks (np.ndarray): The masks that are being returned with shape [h, w, num].
@@ -692,12 +692,12 @@ def process_mask_native(protos, masks_in, bboxes, shape):
692
692
 
693
693
  Args:
694
694
  protos (torch.Tensor): [mask_dim, mask_h, mask_w]
695
- masks_in (torch.Tensor): [n, mask_dim], n is number of masks after nms
696
- bboxes (torch.Tensor): [n, 4], n is number of masks after nms
697
- shape (tuple): the size of the input image (h,w)
695
+ masks_in (torch.Tensor): [n, mask_dim], n is number of masks after nms.
696
+ bboxes (torch.Tensor): [n, 4], n is number of masks after nms.
697
+ shape (tuple): The size of the input image (h,w).
698
698
 
699
699
  Returns:
700
- masks (torch.Tensor): The returned masks with dimensions [h, w, n]
700
+ masks (torch.Tensor): The returned masks with dimensions [h, w, n].
701
701
  """
702
702
  c, mh, mw = protos.shape # CHW
703
703
  masks = (masks_in @ protos.float().view(c, -1)).view(-1, mh, mw)
@@ -783,23 +783,29 @@ def regularize_rboxes(rboxes):
783
783
  return torch.stack([x, y, w_, h_, t], dim=-1) # regularized boxes
784
784
 
785
785
 
786
- def masks2segments(masks, strategy="largest"):
786
+ def masks2segments(masks, strategy="all"):
787
787
  """
788
788
  It takes a list of masks(n,h,w) and returns a list of segments(n,xy).
789
789
 
790
790
  Args:
791
791
  masks (torch.Tensor): the output of the model, which is a tensor of shape (batch_size, 160, 160)
792
- strategy (str): 'concat' or 'largest'. Defaults to largest
792
+ strategy (str): 'all' or 'largest'. Defaults to all
793
793
 
794
794
  Returns:
795
795
  segments (List): list of segment masks
796
796
  """
797
+ from ultralytics.data.converter import merge_multi_segment
798
+
797
799
  segments = []
798
800
  for x in masks.int().cpu().numpy().astype("uint8"):
799
801
  c = cv2.findContours(x, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[0]
800
802
  if c:
801
- if strategy == "concat": # concatenate all segments
802
- c = np.concatenate([x.reshape(-1, 2) for x in c])
803
+ if strategy == "all": # merge and concatenate all segments
804
+ c = (
805
+ np.concatenate(merge_multi_segment([x.reshape(-1, 2) for x in c]))
806
+ if len(c) > 1
807
+ else c[0].reshape(-1, 2)
808
+ )
803
809
  elif strategy == "largest": # select largest segment
804
810
  c = np.array(c[np.array([len(x) for x in c]).argmax()]).reshape(-1, 2)
805
811
  else:
@@ -584,8 +584,8 @@ class Annotator:
584
584
  Displays queue counts on an image centered at the points with customizable font size and colors.
585
585
 
586
586
  Args:
587
- label (str): queue counts label
588
- points (tuple): region points for center point calculation to display text
587
+ label (str): Queue counts label.
588
+ points (tuple): Region points for center point calculation to display text.
589
589
  region_color (tuple): RGB queue region color.
590
590
  txt_color (tuple): RGB text display color.
591
591
  """
@@ -624,13 +624,13 @@ class Annotator:
624
624
  Display the bounding boxes labels in parking management app.
625
625
 
626
626
  Args:
627
- im0 (ndarray): inference image
628
- text (str): object/class name
629
- txt_color (tuple): display color for text foreground
630
- bg_color (tuple): display color for text background
631
- x_center (float): x position center point for bounding box
632
- y_center (float): y position center point for bounding box
633
- margin (int): gap between text and rectangle for better display
627
+ im0 (ndarray): Inference image.
628
+ text (str): Object/class name.
629
+ txt_color (tuple): Display color for text foreground.
630
+ bg_color (tuple): Display color for text background.
631
+ x_center (float): The x position center point for bounding box.
632
+ y_center (float): The y position center point for bounding box.
633
+ margin (int): The gap between text and rectangle for better display.
634
634
  """
635
635
  text_size = cv2.getTextSize(text, 0, fontScale=self.sf, thickness=self.tf)[0]
636
636
  text_x = x_center - text_size[0] // 2
@@ -648,11 +648,11 @@ class Annotator:
648
648
  Display the overall statistics for parking lots.
649
649
 
650
650
  Args:
651
- im0 (ndarray): inference image
652
- text (dict): labels dictionary
653
- txt_color (tuple): display color for text foreground
654
- bg_color (tuple): display color for text background
655
- margin (int): gap between text and rectangle for better display
651
+ im0 (ndarray): Inference image.
652
+ text (dict): Labels dictionary.
653
+ txt_color (tuple): Display color for text foreground.
654
+ bg_color (tuple): Display color for text background.
655
+ margin (int): Gap between text and rectangle for better display.
656
656
  """
657
657
  horizontal_gap = int(im0.shape[1] * 0.02)
658
658
  vertical_gap = int(im0.shape[0] * 0.01)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics
3
- Version: 8.3.36
3
+ Version: 8.3.38
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -238,8 +238,8 @@ See [Segmentation Docs](https://docs.ultralytics.com/tasks/segment/) for usage e
238
238
  | [YOLO11l-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11l-seg.pt) | 640 | 53.4 | 42.9 | 344.2 ± 3.2 | 7.8 ± 0.2 | 27.6 | 142.2 |
239
239
  | [YOLO11x-seg](https://github.com/ultralytics/assets/releases/download/v8.3.0/yolo11x-seg.pt) | 640 | 54.7 | 43.8 | 664.5 ± 3.2 | 15.8 ± 0.7 | 62.1 | 319.0 |
240
240
 
241
- - **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org/) dataset. <br>Reproduce by `yolo val segment data=coco-seg.yaml device=0`
242
- - **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val segment data=coco-seg.yaml batch=1 device=0|cpu`
241
+ - **mAP<sup>val</sup>** values are for single-model single-scale on [COCO val2017](https://cocodataset.org/) dataset. <br>Reproduce by `yolo val segment data=coco.yaml device=0`
242
+ - **Speed** averaged over COCO val images using an [Amazon EC2 P4d](https://aws.amazon.com/ec2/instance-types/p4/) instance. <br>Reproduce by `yolo val segment data=coco.yaml batch=1 device=0|cpu`
243
243
 
244
244
  </details>
245
245
 
@@ -7,11 +7,11 @@ tests/test_exports.py,sha256=1MvhcQ2qHdbJImHII-bFarcaIcm-kPlEK-OdFLxnj7o,8769
7
7
  tests/test_integrations.py,sha256=f5-QCUk1SU_-qn4mBCZwS3GN3tXEBIIXo4z2EhExbHw,6126
8
8
  tests/test_python.py,sha256=I1RRdCwLdrc3jX06huVxct8HX8ccQOmQgVpuEflRl0U,23560
9
9
  tests/test_solutions.py,sha256=HlDe-XOgBX0k1cLhRTAhhawMHk6p-5dg5xl2AIRjfdk,3790
10
- ultralytics/__init__.py,sha256=8zf8uvs_KTLPV49QQoIaydRNBC_k4H1qsXDKoxjxuy4,681
10
+ ultralytics/__init__.py,sha256=E7u0cCuS67ALDjZVP9cgv7qP2VVAgLMVCk0-Vhgc0ug,681
11
11
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
12
12
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
13
- ultralytics/cfg/__init__.py,sha256=ArJow4-pOPN3y6aKOv5KcVXimikI6vAQvQlSRb7IdWE,38743
14
- ultralytics/cfg/default.yaml,sha256=jlSdLkFAngX6HvrzJHdZ9kdi-xO7utyLc4X2M3NWhEI,8342
13
+ ultralytics/cfg/__init__.py,sha256=4O7zcTGSWzT1O4zg71f7XSh-PywdeJ4PrBiuEZiBeiM,38771
14
+ ultralytics/cfg/default.yaml,sha256=FcXbvTXXvMpssk9fSwdlnVTtyqfmlYE9gAcHsf0OMf8,8347
15
15
  ultralytics/cfg/datasets/Argoverse.yaml,sha256=FyeuJT5CHq_9d4hlfAf0kpZlnbUMO0S--UJ1yIqcdKk,3134
16
16
  ultralytics/cfg/datasets/DOTAv1.5.yaml,sha256=QVfp_Qp-4rukuicaB4qx86NxSHM8Mrzym8l_fIDo8gw,1195
17
17
  ultralytics/cfg/datasets/DOTAv1.yaml,sha256=sxe2P7nY-cCPufH3G1pymnQVtNoGH1y0ETG5CyWfK9g,1165
@@ -91,17 +91,17 @@ ultralytics/cfg/trackers/botsort.yaml,sha256=FDIrZ3hAhRtMfDl654pt1HIexmPqlFQK-3l
91
91
  ultralytics/cfg/trackers/bytetrack.yaml,sha256=rBWY4RjjX6PTO2o6TUJFYHVgXNZHCN5TuBuzwuPYVjA,723
92
92
  ultralytics/data/__init__.py,sha256=VGe-ATG7j35F4A4r8Jmzffjlhve4JAJPgRa5ahKTU18,616
93
93
  ultralytics/data/annotator.py,sha256=JNmS6uELlEABrU5ViVJiPnjt44v-Us7j39Bwoug_73Y,3117
94
- ultralytics/data/augment.py,sha256=1yBz98EO0uVvzVEk6rvuO8YwmxwyYZfe5NV0mNIsHkI,120509
94
+ ultralytics/data/augment.py,sha256=Cfa3cufMjNMBqnzSpCFrF7IjR5B-NkpOJ6NwpVdEAWo,120461
95
95
  ultralytics/data/base.py,sha256=ZCIhAyFfxXVp5fVnYD8mwbksNALJTayBKIR5FKGV7ZM,15168
96
96
  ultralytics/data/build.py,sha256=AfMmz0sHIYmwry_90tEJFRk_kz0S3SolScVXqYHiT08,7261
97
97
  ultralytics/data/converter.py,sha256=RIfTXNrazwZqmTYOYoJtupDMtNzm8dxsrVp6q2m8gyg,24388
98
98
  ultralytics/data/dataset.py,sha256=D556AW0ZEsW3V8c5zJiHM_prc_YfZqymIkDKPw3k9Io,22936
99
- ultralytics/data/loaders.py,sha256=Fr70Q9p9t7buLW_8R2_lI_nyCMG033gWSxvwy1M-a-U,28449
99
+ ultralytics/data/loaders.py,sha256=k1Vq7Rxv6tpsRsYuMdZeI3_f2BciAaZwhDQU8iHhVJM,28506
100
100
  ultralytics/data/split_dota.py,sha256=eFafJ7Vg52wj6KDCHFJAf1tKzyPD5YaPB8kM4VX5Aeg,10688
101
101
  ultralytics/data/utils.py,sha256=bmWEIrdogj4kssZQSJdSbIF8QsJU00lo-EY-Mgcqv4M,31073
102
102
  ultralytics/engine/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
103
- ultralytics/engine/exporter.py,sha256=hTzJ09-7E0WCehrqws_joG613VBhyIfWZYrRCsnafjQ,66832
104
- ultralytics/engine/model.py,sha256=TfuTczFjNJ3GW0E_qWVH6OaJ_2I-_Srx7i_4GQebDoo,51472
103
+ ultralytics/engine/exporter.py,sha256=137idYe5ct3KuJBpjjjNRUAb6Gx0PeETKm21GZm43Nk,66972
104
+ ultralytics/engine/model.py,sha256=VthPB0IK4tsT0VAmu8Jz7q-crWsggCLFH17NwwIxnOo,51962
105
105
  ultralytics/engine/predictor.py,sha256=nO6lzxG75GXyQsUNEimLk5MLfcMwl8AkRAaoYMPwQug,17687
106
106
  ultralytics/engine/results.py,sha256=a1XFZRPwqgKDBOEAibHuT9nP2xefLiWVsMoBJbcr4iA,75058
107
107
  ultralytics/engine/trainer.py,sha256=Cd95QLJ3C4fncoOX1YgauLA9aWVYRd1G6x0Au2xX86k,37335
@@ -127,17 +127,17 @@ ultralytics/models/rtdetr/model.py,sha256=2VkppF1_581XmQ0UI7lo8fX7MqhAJPXVMr2jyM
127
127
  ultralytics/models/rtdetr/predict.py,sha256=cxULdJAzL9RM11Y24tIguKcNJZXwynNsrWRCW-jUYEQ,3568
128
128
  ultralytics/models/rtdetr/train.py,sha256=m8S9Z94kNaH0HN9TR51iQpToIDV8AUoXpkI5qMdLB7Q,3847
129
129
  ultralytics/models/rtdetr/val.py,sha256=xVjZShZ1AvES97wVekl2q_1g20Pq-IIHhkJdWtxMncs,5566
130
- ultralytics/models/sam/__init__.py,sha256=o4_D6y8YJlOXIK7Lwo9RHnIJJ9xoFNi4zK99QSc1kdM,176
130
+ ultralytics/models/sam/__init__.py,sha256=E4IHie-T0HYCklKW6-kqlW84GJJdD6rujf7W_SgRlrs,218
131
131
  ultralytics/models/sam/amg.py,sha256=GrmO_8YfIDt_QkPEMF_WFjPZkhwhf7iwx7ig8JgOUnE,8709
132
132
  ultralytics/models/sam/build.py,sha256=ac7Pop5f51TVzGgfV6bbXSFDA9fBVxERUc_6WDQ-9Ys,12487
133
- ultralytics/models/sam/model.py,sha256=2KFUp8SHiqOgwUjkdqdau0oduJwKQxm4N9GHWjdhUFo,7382
134
- ultralytics/models/sam/predict.py,sha256=gmvnzRlGNnmXFh-sPJA00mlZ168k-SEYKOOPIrNgsSk,40444
133
+ ultralytics/models/sam/model.py,sha256=CE4ruw1Iwrp7-9aHGspQihQaTVsqagYrQLWmpXYodLw,7382
134
+ ultralytics/models/sam/predict.py,sha256=fv9s1kYx8Er2ZsaMpmiB9Phz5l0mGdjCyqQpmM2CpcE,82535
135
135
  ultralytics/models/sam/modules/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
136
136
  ultralytics/models/sam/modules/blocks.py,sha256=Q-KwhFbdyZhl1tjG_kP2LcQkZbzoNt618i-NRrKNx2Y,45919
137
137
  ultralytics/models/sam/modules/decoders.py,sha256=mODsqnTN_CjE3H0Sh9cd8PfTnHANPjGB1bjqHxfezSg,25830
138
138
  ultralytics/models/sam/modules/encoders.py,sha256=Ay3sYeUonCf6URXBdB0dDwyngovevW8hUDgULRnNIoA,34824
139
139
  ultralytics/models/sam/modules/memory_attention.py,sha256=XilWBnRfH8wZxIoL2-yEk-dRypCsS0Jf_9t8WJxXKg0,9722
140
- ultralytics/models/sam/modules/sam.py,sha256=H0EJpbwwYUJ-Hx4d_5OVCH0rZInmS937cu1183lzpcc,53102
140
+ ultralytics/models/sam/modules/sam.py,sha256=ED_1CwDJ_eoELMkKlfAQpWLlHsBYj6gNL63imtSloro,52685
141
141
  ultralytics/models/sam/modules/tiny_encoder.py,sha256=NyzeFMLnmqwcFQFs-JBM9PCWSsYoYZ_6h59Un1DeDV0,41332
142
142
  ultralytics/models/sam/modules/transformer.py,sha256=nuhF_14LGrr5uYCAP9XCXps-zlVcT4OWO0evXWDxPwI,16081
143
143
  ultralytics/models/sam/modules/utils.py,sha256=Y36V6BVy6GeaAvKE8gHmoDIa-f5LjJpmSVwywNkv2yk,12315
@@ -174,8 +174,8 @@ ultralytics/nn/autobackend.py,sha256=Arke5BaRQmr4yQd-xr6Z8P7kbTBNLI-O0fsDPFLOXMw
174
174
  ultralytics/nn/tasks.py,sha256=pqRe1F1HOH8AjLZpFaZCGb5gSYsXH0eVnHITKDTFFhI,48527
175
175
  ultralytics/nn/modules/__init__.py,sha256=xhW2BennT9U_VaMXVpRu-bdLgp1BXt9L8mkIUBE3idU,2625
176
176
  ultralytics/nn/modules/activation.py,sha256=chhn469wnRHEs5BMGNBYXwPYZc_7-urspTT8fnBd-xA,895
177
- ultralytics/nn/modules/block.py,sha256=PAm23KpRHDNlGtNWf1w8Ae0LdjII2H5vu0A4eeWx_XQ,41851
178
- ultralytics/nn/modules/conv.py,sha256=vOeHZ6Z4sc6-9PrDmRGT1hFkxSBbbWkQm2jRbGGjpqQ,12705
177
+ ultralytics/nn/modules/block.py,sha256=Rk9CT23Bpqpo3LYRuQePYML6HAvsM20p2QlFTCaYFH4,41851
178
+ ultralytics/nn/modules/conv.py,sha256=DPLZCRno_ZOjsuajAXIq-GbJdOh2jp1WayRXfDEd8z8,12724
179
179
  ultralytics/nn/modules/head.py,sha256=KCO-qarg2K7uJqQ7L5zVJ4-viiHqmu4bzbSgAw3L_nk,27815
180
180
  ultralytics/nn/modules/transformer.py,sha256=tGiK8NmPfswwW1rbF21r5ILUkkZQ6Nk4s8j16vFBmps,18069
181
181
  ultralytics/nn/modules/utils.py,sha256=a88cKl2wz1nMVSEBiajtvaCbDBQIkESWOKTZ_WAJy90,3195
@@ -185,22 +185,22 @@ ultralytics/solutions/analytics.py,sha256=C57pIghXeKN8hul8QOV7W9YDMpfFfSfPTBb-lE
185
185
  ultralytics/solutions/distance_calculation.py,sha256=KN3CC-dm2dTQylj79IrifCJT8ZhE7hc2EweH3KK31mE,5461
186
186
  ultralytics/solutions/heatmap.py,sha256=-1VtMCJRmpHnLqgna0i2HOBsxNoqFernzpKQnICngUM,5449
187
187
  ultralytics/solutions/object_counter.py,sha256=MuxQG4a22458WwciAB96m5AxVXwH98AIWAaf_kPali4,9613
188
- ultralytics/solutions/parking_management.py,sha256=1DsEE94eauqcnnFxUYI-BX9eA1GbJVNt7oncj1okYpI,11198
188
+ ultralytics/solutions/parking_management.py,sha256=Hh28FTuP_TaO7x5RadYm-JSVJuEu1M2SSgHqgdYYtr8,11198
189
189
  ultralytics/solutions/queue_management.py,sha256=D9TqwJSVrZQFxp_M8O62WfBAxkAuDWWnXe7FFmnp7_w,4881
190
190
  ultralytics/solutions/region_counter.py,sha256=w0c0Sz9XG6rwzr5nA6nb1zFW8IVkTQuatfZNBtOik68,4947
191
191
  ultralytics/solutions/solutions.py,sha256=HC5008BgQmWTw4aY8VgTEQioUzvuZxJebIk35E5HdcA,7275
192
192
  ultralytics/solutions/speed_estimation.py,sha256=A10DmuZlGkoZUyfHhZWcDRjj1-9GXiDhEjyBbAzfaDs,4936
193
193
  ultralytics/solutions/streamlit_inference.py,sha256=w4dnvSv2FOrpji9W1Ir86phka3OXc7jd_38-OCbQdZw,5701
194
194
  ultralytics/trackers/__init__.py,sha256=j72IgH2dZHQArMPK4YwcV5ieIw94fYvlGdQjB9cOQKw,227
195
- ultralytics/trackers/basetrack.py,sha256=dXnXW3cxxd7lPm20JJCNO2voCIrQ4vhbNI1g4YEgn-Y,4423
195
+ ultralytics/trackers/basetrack.py,sha256=kPOeAX2ihvANtQJk-zUsN0C7JjhlJbx0UhjaCFk_ovQ,4423
196
196
  ultralytics/trackers/bot_sort.py,sha256=766grVQExvonb087Wy-SB32TSwYYsTEM22yoWeQ_EEo,10494
197
197
  ultralytics/trackers/byte_tracker.py,sha256=jl3egXlItfqPfbmxsLebvA7eKZWa1Ghj2Qc9wNTtebQ,20818
198
198
  ultralytics/trackers/track.py,sha256=BfkdmdgTvoI8Raz6yuDQMrbCrWOGm9Lfu3aBTXYv2j8,3874
199
199
  ultralytics/trackers/utils/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
200
200
  ultralytics/trackers/utils/gmc.py,sha256=VcURuY041qGCeWUGMxHZBr10T16LtcMqyv7AmTfE1MY,14557
201
201
  ultralytics/trackers/utils/kalman_filter.py,sha256=cH9zD3fwkuezP97H9mw8cSBN7a8hHKx_Sx1j7t3oYGs,21349
202
- ultralytics/trackers/utils/matching.py,sha256=3Ie1WNNRZ4_q3365F03XD7Nr9juZB_08mw4yUKC3w74,7162
203
- ultralytics/utils/__init__.py,sha256=08pFkzKn1eR9xdIFhx8tx_8MO-gqXjt2n0HGwDeUlWE,49159
202
+ ultralytics/trackers/utils/matching.py,sha256=YCC9O1iwrWKD5k6WryklrttoQZcdUquSQi5cTWmp4I4,7107
203
+ ultralytics/utils/__init__.py,sha256=_KUqXbKcFgN11_ZLGrpQuPNOdSbIGhuv_IBGUPw9jX0,49203
204
204
  ultralytics/utils/autobatch.py,sha256=nt0nSNNhrQqvtaxeNBBYpU2OkZnI3ihNEAa3jF4pybo,4594
205
205
  ultralytics/utils/benchmarks.py,sha256=Ub--iTq2hL_oHkG2R3HXmZXQ6qcBC-P9MabUv60bMLE,25625
206
206
  ultralytics/utils/checks.py,sha256=KXQSeauhzecy9tSjyDVy8oXbTDkHSSB9lOTYrqRWpok,29582
@@ -209,11 +209,11 @@ ultralytics/utils/downloads.py,sha256=fh7I5toTSowAOXtmx5zIzCEDREfTFG45cLIHmsDmuY
209
209
  ultralytics/utils/errors.py,sha256=GqP_Jgj_n0paxn8OMhn3DTCgoNkB2WjUcUaqs-M6SQk,816
210
210
  ultralytics/utils/files.py,sha256=uiXQSVABJRoI5ImnM6ndEBIFbECfksmWNEldBg8GnSo,8224
211
211
  ultralytics/utils/instance.py,sha256=EnLp3hCihG5-32eGSMmjzspbxZsDvbqEOs-X0kcvxwQ,16252
212
- ultralytics/utils/loss.py,sha256=SW3FVFFp8Ki_LCT8wIdFbm6KmyPcQn3RmKNcvVAhMQI,34174
213
- ultralytics/utils/metrics.py,sha256=msPaXc244ndc0NPBhnNlHsKkVhdc-TMgFn5NATlZZVI,53918
214
- ultralytics/utils/ops.py,sha256=dsXNdyrYx_p6io6zezig9p84dxS7U-10vceHNVu2IL0,32888
212
+ ultralytics/utils/loss.py,sha256=jUCiUcxgF6jGxGdvIcupeMidLoF-gI7s1tcJoQCZbnk,34113
213
+ ultralytics/utils/metrics.py,sha256=toJlyA0W-xtChqAtIDiHISolxc_30NP33ezxWQ1rnPc,53804
214
+ ultralytics/utils/ops.py,sha256=L9DEpuJOdIiysZaypDy-w8r3VWg6nJChGnORBBJo4y8,33100
215
215
  ultralytics/utils/patches.py,sha256=J-iOwIRbfUs-inBZerhnXby5tUKjYcOIyvhLTS352JE,3270
216
- ultralytics/utils/plotting.py,sha256=TKtdbAOl6gZdFD2hlA5T4LNWfr2LUWbCC-cXkgL1JAU,61089
216
+ ultralytics/utils/plotting.py,sha256=6Iwh2dn6hDhaTk4hlZ14fRYKhqVnr7f1NNUw2Oq3PWk,61115
217
217
  ultralytics/utils/tal.py,sha256=thD_AEhVmhaZqmS5szZMvpKO-RKOeZwfX1BYAhdnA0o,18470
218
218
  ultralytics/utils/torch_utils.py,sha256=57y3iY2ke-E-v7MGMN2nPPAEwqEBsf0rjHEOfo9VPBc,32068
219
219
  ultralytics/utils/triton.py,sha256=gg1finxno_tY2Ge9PMhmu7PI9wvoFZoiicdT4Bhqv3w,3936
@@ -229,9 +229,9 @@ ultralytics/utils/callbacks/neptune.py,sha256=IbGQfEltamUKXJt93uSLQFn8c2rYh3DMTg
229
229
  ultralytics/utils/callbacks/raytune.py,sha256=Ck_yFzg7UZXiDWrLHaltjQybzVWSFDfzpdrx9ZYTRfI,700
230
230
  ultralytics/utils/callbacks/tensorboard.py,sha256=SHlE58Fb-sg-uZKtgy-ybIO3SAIfK55aj8kTYGA0Cyg,4167
231
231
  ultralytics/utils/callbacks/wb.py,sha256=sizfTa-xI9k2pnDSP_Q9pHZEFwcl__gSFM0AcneuRpY,7058
232
- ultralytics-8.3.36.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
233
- ultralytics-8.3.36.dist-info/METADATA,sha256=32AcyU2TCZfAUFhgWdjFWJN9FJSCDug98r1IhHOOXOM,35209
234
- ultralytics-8.3.36.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
235
- ultralytics-8.3.36.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
236
- ultralytics-8.3.36.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
237
- ultralytics-8.3.36.dist-info/RECORD,,
232
+ ultralytics-8.3.38.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
233
+ ultralytics-8.3.38.dist-info/METADATA,sha256=pMzt-gXnvYy-Am3XsD_H_io7DnC1HYF7nZ85sON6fRo,35201
234
+ ultralytics-8.3.38.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
235
+ ultralytics-8.3.38.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
236
+ ultralytics-8.3.38.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
237
+ ultralytics-8.3.38.dist-info/RECORD,,