ultralytics 8.2.58__py3-none-any.whl → 8.2.60__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ultralytics might be problematic. Click here for more details.

ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
2
 
3
- __version__ = "8.2.58"
3
+ __version__ = "8.2.60"
4
4
 
5
5
  import os
6
6
 
@@ -563,7 +563,7 @@ class Exporter:
563
563
  LOGGER.warning(f"{prefix} WARNING ⚠️ PNNX GitHub assets not found: {e}, using default {asset}")
564
564
  unzip_dir = safe_download(f"https://github.com/pnnx/pnnx/releases/download/{release}/{asset}", delete=True)
565
565
  if check_is_path_safe(Path.cwd(), unzip_dir): # avoid path traversal security vulnerability
566
- (unzip_dir / name).rename(pnnx) # move binary to ROOT
566
+ shutil.move(src=unzip_dir / name, dst=pnnx) # move binary to ROOT
567
567
  pnnx.chmod(0o777) # set read, write, and execute permissions for everyone
568
568
  shutil.rmtree(unzip_dir) # delete unzip dir
569
569
 
@@ -41,6 +41,11 @@ class DetectionValidator(BaseValidator):
41
41
  self.iouv = torch.linspace(0.5, 0.95, 10) # IoU vector for mAP@0.5:0.95
42
42
  self.niou = self.iouv.numel()
43
43
  self.lb = [] # for autolabelling
44
+ if self.args.save_hybrid:
45
+ LOGGER.warning(
46
+ "WARNING ⚠️ 'save_hybrid=True' will append ground truth to predictions for autolabelling.\n"
47
+ "WARNING ⚠️ 'save_hybrid=True' will cause incorrect mAP.\n"
48
+ )
44
49
 
45
50
  def preprocess(self, batch):
46
51
  """Preprocesses batch of images for YOLO training."""
@@ -53,21 +58,21 @@ class DetectionValidator(BaseValidator):
53
58
  height, width = batch["img"].shape[2:]
54
59
  nb = len(batch["img"])
55
60
  bboxes = batch["bboxes"] * torch.tensor((width, height, width, height), device=self.device)
56
- self.lb = (
57
- [
58
- torch.cat([batch["cls"][batch["batch_idx"] == i], bboxes[batch["batch_idx"] == i]], dim=-1)
59
- for i in range(nb)
60
- ]
61
- if self.args.save_hybrid
62
- else []
63
- ) # for autolabelling
61
+ self.lb = [
62
+ torch.cat([batch["cls"][batch["batch_idx"] == i], bboxes[batch["batch_idx"] == i]], dim=-1)
63
+ for i in range(nb)
64
+ ]
64
65
 
65
66
  return batch
66
67
 
67
68
  def init_metrics(self, model):
68
69
  """Initialize evaluation metrics for YOLO."""
69
70
  val = self.data.get(self.args.split, "") # validation path
70
- self.is_coco = isinstance(val, str) and "coco" in val and val.endswith(f"{os.sep}val2017.txt") # is COCO
71
+ self.is_coco = (
72
+ isinstance(val, str)
73
+ and "coco" in val
74
+ and (val.endswith(f"{os.sep}val2017.txt") or val.endswith(f"{os.sep}test-dev2017.txt"))
75
+ ) # is COCO
71
76
  self.is_lvis = isinstance(val, str) and "lvis" in val and not self.is_coco # is LVIS
72
77
  self.class_map = converter.coco80_to_coco91_class() if self.is_coco else list(range(len(model.names)))
73
78
  self.args.save_json |= (self.is_coco or self.is_lvis) and not self.training # run on final val if training COCO
@@ -159,8 +164,12 @@ class DetectionValidator(BaseValidator):
159
164
  if self.args.save_json:
160
165
  self.pred_to_json(predn, batch["im_file"][si])
161
166
  if self.args.save_txt:
162
- file = self.save_dir / "labels" / f'{Path(batch["im_file"][si]).stem}.txt'
163
- self.save_one_txt(predn, self.args.save_conf, pbatch["ori_shape"], file)
167
+ self.save_one_txt(
168
+ predn,
169
+ self.args.save_conf,
170
+ pbatch["ori_shape"],
171
+ self.save_dir / "labels" / f'{Path(batch["im_file"][si]).stem}.txt',
172
+ )
164
173
 
165
174
  def finalize_metrics(self, *args, **kwargs):
166
175
  """Set final values for metrics speed and confusion matrix."""
@@ -260,12 +269,14 @@ class DetectionValidator(BaseValidator):
260
269
 
261
270
  def save_one_txt(self, predn, save_conf, shape, file):
262
271
  """Save YOLO detections to a txt file in normalized coordinates in a specific format."""
263
- gn = torch.tensor(shape)[[1, 0, 1, 0]] # normalization gain whwh
264
- for *xyxy, conf, cls in predn.tolist():
265
- xywh = (ops.xyxy2xywh(torch.tensor(xyxy).view(1, 4)) / gn).view(-1).tolist() # normalized xywh
266
- line = (cls, *xywh, conf) if save_conf else (cls, *xywh) # label format
267
- with open(file, "a") as f:
268
- f.write(("%g " * len(line)).rstrip() % line + "\n")
272
+ from ultralytics.engine.results import Results
273
+
274
+ Results(
275
+ np.zeros((shape[0], shape[1]), dtype=np.uint8),
276
+ path=None,
277
+ names=self.names,
278
+ boxes=predn[:, :6],
279
+ ).save_txt(file, save_conf=save_conf)
269
280
 
270
281
  def pred_to_json(self, predn, filename):
271
282
  """Serialize YOLO predictions to COCO json format."""
@@ -130,13 +130,19 @@ class OBBValidator(DetectionValidator):
130
130
 
131
131
  def save_one_txt(self, predn, save_conf, shape, file):
132
132
  """Save YOLO detections to a txt file in normalized coordinates in a specific format."""
133
- gn = torch.tensor(shape)[[1, 0]] # normalization gain whwh
134
- for *xywh, conf, cls, angle in predn.tolist():
135
- xywha = torch.tensor([*xywh, angle]).view(1, 5)
136
- xyxyxyxy = (ops.xywhr2xyxyxyxy(xywha) / gn).view(-1).tolist() # normalized xywh
137
- line = (cls, *xyxyxyxy, conf) if save_conf else (cls, *xyxyxyxy) # label format
138
- with open(file, "a") as f:
139
- f.write(("%g " * len(line)).rstrip() % line + "\n")
133
+ import numpy as np
134
+
135
+ from ultralytics.engine.results import Results
136
+
137
+ rboxes = torch.cat([predn[:, :4], predn[:, -1:]], dim=-1)
138
+ # xywh, r, conf, cls
139
+ obb = torch.cat([rboxes, predn[:, 4:6]], dim=-1)
140
+ Results(
141
+ np.zeros((shape[0], shape[1]), dtype=np.uint8),
142
+ path=None,
143
+ names=self.names,
144
+ obb=obb,
145
+ ).save_txt(file, save_conf=save_conf)
140
146
 
141
147
  def eval_json(self, stats):
142
148
  """Evaluates YOLO output in JSON format and returns performance statistics."""
@@ -147,8 +147,14 @@ class PoseValidator(DetectionValidator):
147
147
  # Save
148
148
  if self.args.save_json:
149
149
  self.pred_to_json(predn, batch["im_file"][si])
150
- # if self.args.save_txt:
151
- # save_one_txt(predn, save_conf, shape, file=save_dir / 'labels' / f'{path.stem}.txt')
150
+ if self.args.save_txt:
151
+ self.save_one_txt(
152
+ predn,
153
+ pred_kpts,
154
+ self.args.save_conf,
155
+ pbatch["ori_shape"],
156
+ self.save_dir / "labels" / f'{Path(batch["im_file"][si]).stem}.txt',
157
+ )
152
158
 
153
159
  def _process_batch(self, detections, gt_bboxes, gt_cls, pred_kpts=None, gt_kpts=None):
154
160
  """
@@ -217,6 +223,18 @@ class PoseValidator(DetectionValidator):
217
223
  on_plot=self.on_plot,
218
224
  ) # pred
219
225
 
226
+ def save_one_txt(self, predn, pred_kpts, save_conf, shape, file):
227
+ """Save YOLO detections to a txt file in normalized coordinates in a specific format."""
228
+ from ultralytics.engine.results import Results
229
+
230
+ Results(
231
+ np.zeros((shape[0], shape[1]), dtype=np.uint8),
232
+ path=None,
233
+ names=self.names,
234
+ boxes=predn[:, :6],
235
+ keypoints=pred_kpts,
236
+ ).save_txt(file, save_conf=save_conf)
237
+
220
238
  def pred_to_json(self, predn, filename):
221
239
  """Converts YOLO predictions to COCO JSON format."""
222
240
  stem = Path(filename).stem
@@ -48,9 +48,8 @@ class SegmentationValidator(DetectionValidator):
48
48
  self.plot_masks = []
49
49
  if self.args.save_json:
50
50
  check_requirements("pycocotools>=2.0.6")
51
- self.process = ops.process_mask_upsample # more accurate
52
- else:
53
- self.process = ops.process_mask # faster
51
+ # more accurate vs faster
52
+ self.process = ops.process_mask_native if self.args.save_json or self.args.save_txt else ops.process_mask
54
53
  self.stats = dict(tp_m=[], tp=[], conf=[], pred_cls=[], target_cls=[], target_img=[])
55
54
 
56
55
  def get_desc(self):
@@ -148,14 +147,23 @@ class SegmentationValidator(DetectionValidator):
148
147
 
149
148
  # Save
150
149
  if self.args.save_json:
151
- pred_masks = ops.scale_image(
152
- pred_masks.permute(1, 2, 0).contiguous().cpu().numpy(),
150
+ self.pred_to_json(
151
+ predn,
152
+ batch["im_file"][si],
153
+ ops.scale_image(
154
+ pred_masks.permute(1, 2, 0).contiguous().cpu().numpy(),
155
+ pbatch["ori_shape"],
156
+ ratio_pad=batch["ratio_pad"][si],
157
+ ),
158
+ )
159
+ if self.args.save_txt:
160
+ self.save_one_txt(
161
+ predn,
162
+ pred_masks,
163
+ self.args.save_conf,
153
164
  pbatch["ori_shape"],
154
- ratio_pad=batch["ratio_pad"][si],
165
+ self.save_dir / "labels" / f'{Path(batch["im_file"][si]).stem}.txt',
155
166
  )
156
- self.pred_to_json(predn, batch["im_file"][si], pred_masks)
157
- # if self.args.save_txt:
158
- # save_one_txt(predn, save_conf, shape, file=save_dir / 'labels' / f'{path.stem}.txt')
159
167
 
160
168
  def finalize_metrics(self, *args, **kwargs):
161
169
  """Sets speed and confusion matrix for evaluation metrics."""
@@ -235,6 +243,18 @@ class SegmentationValidator(DetectionValidator):
235
243
  ) # pred
236
244
  self.plot_masks.clear()
237
245
 
246
+ def save_one_txt(self, predn, pred_masks, save_conf, shape, file):
247
+ """Save YOLO detections to a txt file in normalized coordinates in a specific format."""
248
+ from ultralytics.engine.results import Results
249
+
250
+ Results(
251
+ np.zeros((shape[0], shape[1]), dtype=np.uint8),
252
+ path=None,
253
+ names=self.names,
254
+ boxes=predn[:, :6],
255
+ masks=pred_masks,
256
+ ).save_txt(file, save_conf=save_conf)
257
+
238
258
  def pred_to_json(self, predn, filename, pred_masks):
239
259
  """
240
260
  Save one JSON result.
@@ -855,7 +855,7 @@ class Attention(nn.Module):
855
855
  self.head_dim = dim // num_heads
856
856
  self.key_dim = int(self.head_dim * attn_ratio)
857
857
  self.scale = self.key_dim**-0.5
858
- nh_kd = nh_kd = self.key_dim * num_heads
858
+ nh_kd = self.key_dim * num_heads
859
859
  h = dim + nh_kd * 2
860
860
  self.qkv = Conv(dim, h, 1, act=False)
861
861
  self.proj = Conv(dim, dim, 1, act=False)
@@ -99,24 +99,26 @@ def inference():
99
99
 
100
100
  stop_button = st.button("Stop") # Button to stop the inference
101
101
 
102
- prev_time = 0
103
102
  while videocapture.isOpened():
104
103
  success, frame = videocapture.read()
105
104
  if not success:
106
105
  st.warning("Failed to read frame from webcam. Please make sure the webcam is connected properly.")
107
106
  break
108
107
 
109
- curr_time = time.time()
110
- fps = 1 / (curr_time - prev_time)
111
- prev_time = curr_time
108
+ prev_time = time.time()
112
109
 
113
110
  # Store model predictions
114
- if enable_trk:
111
+ if enable_trk == "Yes":
115
112
  results = model.track(frame, conf=conf, iou=iou, classes=selected_ind, persist=True)
116
113
  else:
117
114
  results = model(frame, conf=conf, iou=iou, classes=selected_ind)
118
115
  annotated_frame = results[0].plot() # Add annotations on frame
119
116
 
117
+ # Calculate model FPS
118
+ curr_time = time.time()
119
+ fps = 1 / (curr_time - prev_time)
120
+ prev_time = curr_time
121
+
120
122
  # display frame
121
123
  org_frame.image(frame, channels="BGR")
122
124
  ann_frame.image(annotated_frame, channels="BGR")
ultralytics/utils/ops.py CHANGED
@@ -652,27 +652,6 @@ def crop_mask(masks, boxes):
652
652
  return masks * ((r >= x1) * (r < x2) * (c >= y1) * (c < y2))
653
653
 
654
654
 
655
- def process_mask_upsample(protos, masks_in, bboxes, shape):
656
- """
657
- Takes the output of the mask head, and applies the mask to the bounding boxes. This produces masks of higher quality
658
- but is slower.
659
-
660
- Args:
661
- protos (torch.Tensor): [mask_dim, mask_h, mask_w]
662
- masks_in (torch.Tensor): [n, mask_dim], n is number of masks after nms
663
- bboxes (torch.Tensor): [n, 4], n is number of masks after nms
664
- shape (tuple): the size of the input image (h,w)
665
-
666
- Returns:
667
- (torch.Tensor): The upsampled masks.
668
- """
669
- c, mh, mw = protos.shape # CHW
670
- masks = (masks_in @ protos.float().view(c, -1)).view(-1, mh, mw)
671
- masks = F.interpolate(masks[None], shape, mode="bilinear", align_corners=False)[0] # CHW
672
- masks = crop_mask(masks, bboxes) # CHW
673
- return masks.gt_(0.0)
674
-
675
-
676
655
  def process_mask(protos, masks_in, bboxes, shape, upsample=False):
677
656
  """
678
657
  Apply masks to bounding boxes using the output of the mask head.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics
3
- Version: 8.2.58
3
+ Version: 8.2.60
4
4
  Summary: Ultralytics YOLOv8 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author: Glenn Jocher, Ayush Chaurasia, Jing Qiu
6
6
  Maintainer: Glenn Jocher, Ayush Chaurasia, Jing Qiu
@@ -8,7 +8,7 @@ tests/test_exports.py,sha256=Uezf3OatpPHlo5qoPw-2kqkZxuMCF9L4XF2riD4vmII,8225
8
8
  tests/test_integrations.py,sha256=xglcfMPjfVh346PV8WTpk6tBxraCXEFJEQyyJMr5tyU,6064
9
9
  tests/test_python.py,sha256=qhtSQ7NDfBChsVUxeSwfUIkoKq0S1Z-Rd9_MP023Y5k,21794
10
10
  tests/test_solutions.py,sha256=EACnPXbeJe2aVTOKfqMk5jclKKCWCVgFEzjpR6y7Sh8,3304
11
- ultralytics/__init__.py,sha256=Cfjin2MEmuwjjw4wyXtKmTKRpM_6SD6i4baqR34duUs,694
11
+ ultralytics/__init__.py,sha256=BWfmMROCBEF419nKyVBMe6PuaVj7Z-EM2uJegbdGZdg,694
12
12
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
13
13
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
14
14
  ultralytics/cfg/__init__.py,sha256=-3FW9UuCjhvWw0OFWbiXHWMqujOvBX428-NgSMFG0sQ,26198
@@ -98,7 +98,7 @@ ultralytics/data/explorer/utils.py,sha256=EvvukQiQUTBrsZznmMnyEX2EqTuwZo_Geyc8yf
98
98
  ultralytics/data/explorer/gui/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
99
99
  ultralytics/data/explorer/gui/dash.py,sha256=CPlFIIhf53j_YVAqealsC3AbcztdPqZxfniQcBnlKK4,10042
100
100
  ultralytics/engine/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
101
- ultralytics/engine/exporter.py,sha256=yV5DKjz5DZ6BrW8mOC5Nb5eDcuCc93Ft-RQwJ21xVZs,58729
101
+ ultralytics/engine/exporter.py,sha256=mJqo3TbYuVcNA26rN5Fc57a1uVAqYfT1P3GSSE5k4rU,58741
102
102
  ultralytics/engine/model.py,sha256=OvQsoANg5oyN3k3K-ppa4KrIqPi96hvfGcjqd-TU5l0,39215
103
103
  ultralytics/engine/predictor.py,sha256=W58kDCFH2AfoFzpGbos3k8zUEVsLunBuM8sc2B64rPY,17449
104
104
  ultralytics/engine/results.py,sha256=5MevvBz0E-cpDf55FqweInlKdcQPb7sz0EgZSROJqw4,35817
@@ -148,19 +148,19 @@ ultralytics/models/yolo/classify/val.py,sha256=MXdtWrBYVpfFuPfFPOTLKa_wBdTIA4dBZ
148
148
  ultralytics/models/yolo/detect/__init__.py,sha256=JR8gZJWn7wMBbh-0j_073nxJVZTMFZVWTOG5Wnvk6w0,229
149
149
  ultralytics/models/yolo/detect/predict.py,sha256=_a9vH3DmKFY6eeztFTdj3nkfu_MKG6n7zb5rRKGjs9I,1510
150
150
  ultralytics/models/yolo/detect/train.py,sha256=8Ulq1SPNLrkOqXj0Yt5zNR1c_Xl_QnOjllCdqBHUMds,6353
151
- ultralytics/models/yolo/detect/val.py,sha256=BJpA37JA-RBCa5RsUtQlB7N69HVrFAmPGA9jKpboAf8,14981
151
+ ultralytics/models/yolo/detect/val.py,sha256=WaCGB_B_TTIbeR8ZxKoC2YJrPdIgFJ-fP8EI7SoE4NA,15128
152
152
  ultralytics/models/yolo/obb/__init__.py,sha256=txWbPGLY1_M7ZwlLQjrwGjTBOlsv9P3yk5ZEgysTinU,193
153
153
  ultralytics/models/yolo/obb/predict.py,sha256=prfDzhwuVHKF6CRwnFVBA-YFI5q7U7NEQwITGHmB2Ow,2037
154
154
  ultralytics/models/yolo/obb/train.py,sha256=tWpFtcasMwWq1A_9VdbEg5pIVHwuWwmeLOyj-S4_1sY,1473
155
- ultralytics/models/yolo/obb/val.py,sha256=YMFZ79aaW45LdPBrQwRACrxbOI9cH9M_C_ibwi9PeIs,9346
155
+ ultralytics/models/yolo/obb/val.py,sha256=fflxcpdAAYJBzao1TlEbNY0rWl-9irmCIdrXcAbvkQY,9303
156
156
  ultralytics/models/yolo/pose/__init__.py,sha256=OGvxN3LqJot2h8GX1csJ1KErsHnDKsm33Ce6ZBU9Lr4,199
157
157
  ultralytics/models/yolo/pose/predict.py,sha256=illk4qyZvybc_XMo9TKT54FIkizx91MYviE5c5OwBTQ,2404
158
158
  ultralytics/models/yolo/pose/train.py,sha256=ki8bkT8WfIFjTKf1ofeRDqeIqmk6A8a7AFog7nM-otM,2926
159
- ultralytics/models/yolo/pose/val.py,sha256=VEYKClcZSt_RcAArAHn_nohuh7fW5rxulra675RFgGM,11721
159
+ ultralytics/models/yolo/pose/val.py,sha256=QnPrSnlHHN7UVoZ6tgtRjuJjwOZY8l-MEYxuQPYvJ-4,12364
160
160
  ultralytics/models/yolo/segment/__init__.py,sha256=mSbKOE8BnHL7PL2nCOVG7dRM7CI6hJezFPPwZFjEmy8,247
161
161
  ultralytics/models/yolo/segment/predict.py,sha256=xtA0ZZyuh9WVpX7zZFdAeCkWnxhQ30ADEzSud_H6N7E,2491
162
162
  ultralytics/models/yolo/segment/train.py,sha256=aOQpDIptZfKSl9mFa6B-3W3QccMRlmBINBkI9K8-3sQ,2298
163
- ultralytics/models/yolo/segment/val.py,sha256=wH5H0NMjFzZeRcuzkspOqohhTsqBI00kmLkyGhpJA7o,13327
163
+ ultralytics/models/yolo/segment/val.py,sha256=kPnlAd5aA6kHsIPp5UCsGTy-ai5kyKx2QggVGCH_H6U,14034
164
164
  ultralytics/models/yolo/world/__init__.py,sha256=3VTH0q4NOt2EWRom15yCymvmvm0Etp2bmETJUhsVTBI,103
165
165
  ultralytics/models/yolo/world/train.py,sha256=acYN2-onL69LrL4av6_hY2r5AY0urC0WViDstn7npfI,3686
166
166
  ultralytics/models/yolo/world/train_world.py,sha256=IsnCEVt6DcM9lUskCKmIN-M8MM79xLpwTRqRoAHUnZ4,4857
@@ -168,7 +168,7 @@ ultralytics/nn/__init__.py,sha256=4BPLHY89xEM_al5uK0aOmFgiML6CMGEZbezxOvTjOEs,58
168
168
  ultralytics/nn/autobackend.py,sha256=vtCvcYTyF2l4KeG5N-PD8FhmPx9pca92mmGaHdQuUfE,31258
169
169
  ultralytics/nn/tasks.py,sha256=jGAauQZOOSXKsxAKad_HBNfLleOoTS7T9XSlOZN8v7Y,45856
170
170
  ultralytics/nn/modules/__init__.py,sha256=mARjWk83WPYF5phXhXfPbAu2ZohtdbHdi5zzoxyMubo,2553
171
- ultralytics/nn/modules/block.py,sha256=DIXowCZn_Luc5VgGQEGXi34fqeiz_bhaNT48zEzguDM,34491
171
+ ultralytics/nn/modules/block.py,sha256=tLNMDomPgsUc8yP7HKvtuaSAMJxPomRmO9WJoLb6hAY,34483
172
172
  ultralytics/nn/modules/conv.py,sha256=Ywe87IhuaS22mR2JJ9xjnW8Sb-m7WTjxuqIxV_Dv8lI,12722
173
173
  ultralytics/nn/modules/head.py,sha256=6VV6t2OJ_t9fCdhFxzcMcirp6lonv-xSm0o2yFghZZ0,26747
174
174
  ultralytics/nn/modules/transformer.py,sha256=AxD9uURpCl-EqvXe3DiG6JW-pBzB16G-AahLdZ7yayo,17909
@@ -182,7 +182,7 @@ ultralytics/solutions/object_counter.py,sha256=C80ET_-tIKv7pfshO8DFwimCieBHV4Ns7
182
182
  ultralytics/solutions/parking_management.py,sha256=E55v0c-AfKbDNfEMng2UJapktDnYJHcRKC6uAImg7kM,9928
183
183
  ultralytics/solutions/queue_management.py,sha256=CxFvHwSHq8OZ5aW7x2F10jcjkGAQ3LSJ5z69zusRVbs,6781
184
184
  ultralytics/solutions/speed_estimation.py,sha256=kjqMSHGTHMZaNgTKNKWULxnJQNsvhq4WMUphMVlBjsc,6768
185
- ultralytics/solutions/streamlit_inference.py,sha256=wmte67QJAtTlHoEqlJxncWIHEiENpNLv9qOMNVGEUXo,5508
185
+ ultralytics/solutions/streamlit_inference.py,sha256=d4LIpexPv31o8WQ5xXUvUlZmEwmKlJQD3PdrMIJ8ISY,5566
186
186
  ultralytics/trackers/__init__.py,sha256=j72IgH2dZHQArMPK4YwcV5ieIw94fYvlGdQjB9cOQKw,227
187
187
  ultralytics/trackers/basetrack.py,sha256=-vBDD-Q9lsxfTMK2w9kuqWGrYbRMmaBCCEbGGyR53gE,3675
188
188
  ultralytics/trackers/bot_sort.py,sha256=39AvhYVbT7izF3--rX_e6Lhgb5czTA23gw6AgnNcRds,8601
@@ -203,7 +203,7 @@ ultralytics/utils/files.py,sha256=TVfY0Wi5IsUc4YdsDzC0dAg-jAP5exYvwqB3VmXhDLY,67
203
203
  ultralytics/utils/instance.py,sha256=5daM5nkxBv9hr5QzyII8zmuFj24hHuNtcr4EMCHAtpY,15654
204
204
  ultralytics/utils/loss.py,sha256=tAAi_l0SAtbtqT8AQSBSCvEyv342-r04H2KcSF1Yk_w,33795
205
205
  ultralytics/utils/metrics.py,sha256=C7qFuZjwGqbsG4sggm_qfm8gVuBUwHg_Fhxj08b6NfU,53671
206
- ultralytics/utils/ops.py,sha256=Jlb0YBkN_SMVT2AjKPEjxgOtgnj7i7HTBh9FEwpoprU,33509
206
+ ultralytics/utils/ops.py,sha256=CQeMDVV4f9QWvYPNvNJu7GJAW2-XG93D7ee7yFY0vsI,32688
207
207
  ultralytics/utils/patches.py,sha256=SgMqeMsq2K6JoBJP1NplXMl9C6rK0JeJUChjBrJOneo,2750
208
208
  ultralytics/utils/plotting.py,sha256=5HRfiG2dklWZJheTxGTy0gFRk39utHcZbMJl7j2hnMI,55522
209
209
  ultralytics/utils/tal.py,sha256=xuIyryUjaaYHkHPG9GvBwh1xxN2Hq4y3hXOtuERehwY,16017
@@ -221,9 +221,9 @@ ultralytics/utils/callbacks/neptune.py,sha256=5Z3ua5YBTUS56FH8VQKQG1aaIo9fH8GEyz
221
221
  ultralytics/utils/callbacks/raytune.py,sha256=ODVYzy-CoM4Uge0zjkh3Hnh9nF2M0vhDrSenXnvcizw,705
222
222
  ultralytics/utils/callbacks/tensorboard.py,sha256=QEgOVhUqY9akOs5TJIwz1Rvn6l32xWLpOxlwEyWF0B8,4136
223
223
  ultralytics/utils/callbacks/wb.py,sha256=9-fjQIdLjr3b73DTE3rHO171KvbH1VweJ-bmbv-rqTw,6747
224
- ultralytics-8.2.58.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
225
- ultralytics-8.2.58.dist-info/METADATA,sha256=-4-9mqwsiCumLVi2LWv_F6QwJ9lZFJUdvyXbFyTSd08,41217
226
- ultralytics-8.2.58.dist-info/WHEEL,sha256=Z4pYXqR_rTB7OWNDYFOm1qRk0RX6GFP2o8LgvP453Hk,91
227
- ultralytics-8.2.58.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
228
- ultralytics-8.2.58.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
229
- ultralytics-8.2.58.dist-info/RECORD,,
224
+ ultralytics-8.2.60.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
225
+ ultralytics-8.2.60.dist-info/METADATA,sha256=qWBL0aATA3aVIF_4uXevFxxw1rdJmYCrobrr8_y16W4,41217
226
+ ultralytics-8.2.60.dist-info/WHEEL,sha256=-oYQCr74JF3a37z2nRlQays_SX2MqOANoqVjBBAP2yE,91
227
+ ultralytics-8.2.60.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
228
+ ultralytics-8.2.60.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
229
+ ultralytics-8.2.60.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (70.3.0)
2
+ Generator: setuptools (71.0.3)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5