ultralytics 8.2.25__py3-none-any.whl → 8.2.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ultralytics might be problematic. Click here for more details.

ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
2
 
3
- __version__ = "8.2.25"
3
+ __version__ = "8.2.26"
4
4
 
5
5
  import os
6
6
 
@@ -384,9 +384,7 @@ class Exporter:
384
384
  """YOLOv8 ONNX export."""
385
385
  requirements = ["onnx>=1.12.0"]
386
386
  if self.args.simplify:
387
- requirements += ["onnxsim>=0.4.33", "onnxruntime-gpu" if torch.cuda.is_available() else "onnxruntime"]
388
- if ARM64:
389
- check_requirements("cmake") # 'cmake' is needed to build onnxsim on aarch64
387
+ requirements += ["cmake", "onnxsim>=0.4.33", "onnxruntime" + ("-gpu" if torch.cuda.is_available() else "")]
390
388
  check_requirements(requirements)
391
389
  import onnx # noqa
392
390
 
@@ -815,11 +813,11 @@ class Exporter:
815
813
  version = ">=2.0.0"
816
814
  check_requirements(f"tensorflow{suffix}{version}")
817
815
  import tensorflow as tf # noqa
818
- if ARM64:
819
- check_requirements("cmake") # 'cmake' is needed to build onnxsim on aarch64
820
816
  check_requirements(
821
817
  (
822
- "keras",
818
+ "cmake", # 'cmake' is needed to build onnxsim on aarch64 and Conda runners
819
+ "keras", # required by onnx2tf package
820
+ "tf_keras", # required by onnx2tf package
823
821
  "onnx>=1.12.0",
824
822
  "onnx2tf>1.17.5,<=1.22.3",
825
823
  "sng4onnx>=1.0.1",
@@ -32,6 +32,7 @@ class DetectionValidator(BaseValidator):
32
32
  """Initialize detection model with necessary variables and settings."""
33
33
  super().__init__(dataloader, save_dir, pbar, args, _callbacks)
34
34
  self.nt_per_class = None
35
+ self.nt_per_image = None
35
36
  self.is_coco = False
36
37
  self.is_lvis = False
37
38
  self.class_map = None
@@ -77,7 +78,7 @@ class DetectionValidator(BaseValidator):
77
78
  self.confusion_matrix = ConfusionMatrix(nc=self.nc, conf=self.args.conf)
78
79
  self.seen = 0
79
80
  self.jdict = []
80
- self.stats = dict(tp=[], conf=[], pred_cls=[], target_cls=[])
81
+ self.stats = dict(tp=[], conf=[], pred_cls=[], target_cls=[], target_img=[])
81
82
 
82
83
  def get_desc(self):
83
84
  """Return a formatted string summarizing class metrics of YOLO model."""
@@ -130,6 +131,7 @@ class DetectionValidator(BaseValidator):
130
131
  cls, bbox = pbatch.pop("cls"), pbatch.pop("bbox")
131
132
  nl = len(cls)
132
133
  stat["target_cls"] = cls
134
+ stat["target_img"] = cls.unique()
133
135
  if npr == 0:
134
136
  if nl:
135
137
  for k in self.stats.keys():
@@ -168,11 +170,11 @@ class DetectionValidator(BaseValidator):
168
170
  def get_stats(self):
169
171
  """Returns metrics statistics and results dictionary."""
170
172
  stats = {k: torch.cat(v, 0).cpu().numpy() for k, v in self.stats.items()} # to numpy
173
+ self.nt_per_class = np.bincount(stats["target_cls"].astype(int), minlength=self.nc)
174
+ self.nt_per_image = np.bincount(stats["target_img"].astype(int), minlength=self.nc)
175
+ stats.pop("target_img", None)
171
176
  if len(stats) and stats["tp"].any():
172
177
  self.metrics.process(**stats)
173
- self.nt_per_class = np.bincount(
174
- stats["target_cls"].astype(int), minlength=self.nc
175
- ) # number of targets per class
176
178
  return self.metrics.results_dict
177
179
 
178
180
  def print_results(self):
@@ -185,7 +187,9 @@ class DetectionValidator(BaseValidator):
185
187
  # Print results per class
186
188
  if self.args.verbose and not self.training and self.nc > 1 and len(self.stats):
187
189
  for i, c in enumerate(self.metrics.ap_class_index):
188
- LOGGER.info(pf % (self.names[c], self.seen, self.nt_per_class[c], *self.metrics.class_result(i)))
190
+ LOGGER.info(
191
+ pf % (self.names[c], self.nt_per_image[c], self.nt_per_class[c], *self.metrics.class_result(i))
192
+ )
189
193
 
190
194
  if self.args.plots:
191
195
  for normalize in True, False:
@@ -81,7 +81,7 @@ class PoseValidator(DetectionValidator):
81
81
  is_pose = self.kpt_shape == [17, 3]
82
82
  nkpt = self.kpt_shape[0]
83
83
  self.sigma = OKS_SIGMA if is_pose else np.ones(nkpt) / nkpt
84
- self.stats = dict(tp_p=[], tp=[], conf=[], pred_cls=[], target_cls=[])
84
+ self.stats = dict(tp_p=[], tp=[], conf=[], pred_cls=[], target_cls=[], target_img=[])
85
85
 
86
86
  def _prepare_batch(self, si, batch):
87
87
  """Prepares a batch for processing by converting keypoints to float and moving to device."""
@@ -118,6 +118,7 @@ class PoseValidator(DetectionValidator):
118
118
  cls, bbox = pbatch.pop("cls"), pbatch.pop("bbox")
119
119
  nl = len(cls)
120
120
  stat["target_cls"] = cls
121
+ stat["target_img"] = cls.unique()
121
122
  if npr == 0:
122
123
  if nl:
123
124
  for k in self.stats.keys():
@@ -51,7 +51,7 @@ class SegmentationValidator(DetectionValidator):
51
51
  self.process = ops.process_mask_upsample # more accurate
52
52
  else:
53
53
  self.process = ops.process_mask # faster
54
- self.stats = dict(tp_m=[], tp=[], conf=[], pred_cls=[], target_cls=[])
54
+ self.stats = dict(tp_m=[], tp=[], conf=[], pred_cls=[], target_cls=[], target_img=[])
55
55
 
56
56
  def get_desc(self):
57
57
  """Return a formatted description of evaluation metrics."""
@@ -112,6 +112,7 @@ class SegmentationValidator(DetectionValidator):
112
112
  cls, bbox = pbatch.pop("cls"), pbatch.pop("bbox")
113
113
  nl = len(cls)
114
114
  stat["target_cls"] = cls
115
+ stat["target_img"] = cls.unique()
115
116
  if npr == 0:
116
117
  if nl:
117
118
  for k in self.stats.keys():
ultralytics/nn/tasks.py CHANGED
@@ -425,11 +425,11 @@ class ClassificationModel(BaseModel):
425
425
  elif isinstance(m, nn.Sequential):
426
426
  types = [type(x) for x in m]
427
427
  if nn.Linear in types:
428
- i = types.index(nn.Linear) # nn.Linear index
428
+ i = len(types) - 1 - types[::-1].index(nn.Linear) # last nn.Linear index
429
429
  if m[i].out_features != nc:
430
430
  m[i] = nn.Linear(m[i].in_features, nc)
431
431
  elif nn.Conv2d in types:
432
- i = types.index(nn.Conv2d) # nn.Conv2d index
432
+ i = len(types) - 1 - types[::-1].index(nn.Conv2d) # last nn.Conv2d index
433
433
  if m[i].out_channels != nc:
434
434
  m[i] = nn.Conv2d(m[i].in_channels, nc, m[i].kernel_size, m[i].stride, bias=m[i].bias is not None)
435
435
 
@@ -1,5 +1,6 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
2
 
3
+ import warnings
3
4
  from itertools import cycle
4
5
 
5
6
  import cv2
@@ -27,6 +28,7 @@ class Analytics:
27
28
  fontsize=13,
28
29
  view_img=False,
29
30
  save_img=True,
31
+ max_points=50,
30
32
  ):
31
33
  """
32
34
  Initialize the Analytics class with various chart types.
@@ -45,6 +47,7 @@ class Analytics:
45
47
  fontsize (int): Font size for chart text.
46
48
  view_img (bool): Whether to display the image.
47
49
  save_img (bool): Whether to save the image.
50
+ max_points (int): Specifies when to remove the oldest points in a graph for multiple lines.
48
51
  """
49
52
 
50
53
  self.bg_color = bg_color
@@ -53,12 +56,14 @@ class Analytics:
53
56
  self.save_img = save_img
54
57
  self.title = title
55
58
  self.writer = writer
59
+ self.max_points = max_points
56
60
 
57
61
  # Set figure size based on image shape
58
62
  figsize = (im0_shape[0] / 100, im0_shape[1] / 100)
59
63
 
60
64
  if type == "line":
61
65
  # Initialize line plot
66
+ self.lines = {}
62
67
  fig = Figure(facecolor=self.bg_color, figsize=figsize)
63
68
  self.canvas = FigureCanvas(fig)
64
69
  self.ax = fig.add_subplot(111, facecolor=self.bg_color)
@@ -112,9 +117,53 @@ class Analytics:
112
117
  self.ax.autoscale_view()
113
118
  self.canvas.draw()
114
119
  im0 = np.array(self.canvas.renderer.buffer_rgba())
115
- im0 = cv2.cvtColor(im0[:, :, :3], cv2.COLOR_RGBA2BGR)
120
+ self.write_and_display_line(im0)
116
121
 
117
- # Display and save the updated graph
122
+ def update_multiple_lines(self, counts_dict, labels_list, frame_number):
123
+ """
124
+ Update the line graph with multiple classes.
125
+
126
+ Args:
127
+ counts_dict (int): Dictionary include each class counts.
128
+ labels_list (int): list include each classes names.
129
+ frame_number (int): The current frame number.
130
+ """
131
+ warnings.warn("Display is not supported for multiple lines, output will be stored normally!")
132
+ for obj in labels_list:
133
+ if obj not in self.lines:
134
+ (line,) = self.ax.plot([], [], label=obj, marker="o", markersize=15)
135
+ self.lines[obj] = line
136
+
137
+ x_data = self.lines[obj].get_xdata()
138
+ y_data = self.lines[obj].get_ydata()
139
+
140
+ # Remove the initial point if the number of points exceeds max_points
141
+ if len(x_data) >= self.max_points:
142
+ x_data = np.delete(x_data, 0)
143
+ y_data = np.delete(y_data, 0)
144
+
145
+ x_data = np.append(x_data, float(frame_number)) # Ensure frame_number is converted to float
146
+ y_data = np.append(y_data, float(counts_dict.get(obj, 0))) # Ensure total_count is converted to float
147
+ self.lines[obj].set_data(x_data, y_data)
148
+
149
+ self.ax.relim()
150
+ self.ax.autoscale_view()
151
+ self.ax.legend()
152
+ self.canvas.draw()
153
+
154
+ im0 = np.array(self.canvas.renderer.buffer_rgba())
155
+ self.view_img = False # for multiple line view_img not supported yet, coming soon!
156
+ self.write_and_display_line(im0)
157
+
158
+ def write_and_display_line(self, im0):
159
+ """
160
+ Write and display the line graph
161
+ Args:
162
+ im0 (ndarray): Image for processing
163
+ """
164
+
165
+ # convert image to BGR format
166
+ im0 = cv2.cvtColor(im0[:, :, :3], cv2.COLOR_RGBA2BGR)
118
167
  cv2.imshow(self.title, im0) if self.view_img else None
119
168
  self.writer.write(im0) if self.save_img else None
120
169
 
@@ -33,6 +33,7 @@ from ultralytics.utils import (
33
33
  ROOT,
34
34
  TORCHVISION_VERSION,
35
35
  USER_CONFIG_DIR,
36
+ Retry,
36
37
  SimpleNamespace,
37
38
  ThreadingLocked,
38
39
  TryExcept,
@@ -381,6 +382,11 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
381
382
  except (AssertionError, metadata.PackageNotFoundError):
382
383
  pkgs.append(r)
383
384
 
385
+ @Retry(times=2, delay=1)
386
+ def attempt_install(packages, commands):
387
+ """Attempt pip install command with retries on failure."""
388
+ return subprocess.check_output(f"pip install --no-cache-dir {packages} {commands}", shell=True).decode()
389
+
384
390
  s = " ".join(f'"{x}"' for x in pkgs) # console string
385
391
  if s:
386
392
  if install and AUTOINSTALL: # check environment variable
@@ -389,7 +395,7 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
389
395
  try:
390
396
  t = time.time()
391
397
  assert ONLINE, "AutoUpdate skipped (offline)"
392
- LOGGER.info(subprocess.check_output(f"pip install --no-cache-dir {s} {cmds}", shell=True).decode())
398
+ LOGGER.info(attempt_install(s, cmds))
393
399
  dt = time.time() - t
394
400
  LOGGER.info(
395
401
  f"{prefix} AutoUpdate success ✅ {dt:.1f}s, installed {n} package{'s' * (n > 1)}: {pkgs}\n"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics
3
- Version: 8.2.25
3
+ Version: 8.2.26
4
4
  Summary: Ultralytics YOLOv8 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author: Glenn Jocher, Ayush Chaurasia, Jing Qiu
6
6
  Maintainer: Glenn Jocher, Ayush Chaurasia, Jing Qiu
@@ -7,7 +7,7 @@ tests/test_explorer.py,sha256=r1pWer2y290Y0DqsM-La7egfEY0497YCdC4rwq3URV4,2178
7
7
  tests/test_exports.py,sha256=TC4Ckp7OefOv4qS9NR2D1K7PQIf_P-vb_BelMmhqC48,7966
8
8
  tests/test_integrations.py,sha256=8Ru7GyKV8j44EEc8X9_E7q7aR4CTOIMPuSagXjSGUxw,5847
9
9
  tests/test_python.py,sha256=3qV963KPGGnYwSiEG5YcDf6g_ozo3NtQEjDDtH32rV4,20212
10
- ultralytics/__init__.py,sha256=LPJl-hE2E7lpDxTOSXHYyTV_K-w1XOP_Fn6Ez7-KJx4,694
10
+ ultralytics/__init__.py,sha256=Wncc3o00fwuTHDNtKdox2p_ESH2FKRiR0VDemd1hJwM,694
11
11
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
12
12
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
13
13
  ultralytics/cfg/__init__.py,sha256=lR6jykSO_0cigsjrqSyFj_8JG_LvYi796viasyWhcfs,21358
@@ -88,7 +88,7 @@ ultralytics/data/explorer/utils.py,sha256=EvvukQiQUTBrsZznmMnyEX2EqTuwZo_Geyc8yf
88
88
  ultralytics/data/explorer/gui/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
89
89
  ultralytics/data/explorer/gui/dash.py,sha256=3mLrH0h-k_AthlgqVNXOHdlKoqjwNwFlnMYiMPAdL6Q,10059
90
90
  ultralytics/engine/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
91
- ultralytics/engine/exporter.py,sha256=IRGwdGG704QvU1VzOOxrV7QK9ptrTk8qU1hSHjdNTEI,58204
91
+ ultralytics/engine/exporter.py,sha256=FbFO435RT1HIWFsbgdDnOmW0zdZLvl6ymTGSpgJjIpw,58169
92
92
  ultralytics/engine/model.py,sha256=IE6HE9VIzqO3DscxSLexub0LUR673eiPFrCPCt6ozEE,40103
93
93
  ultralytics/engine/predictor.py,sha256=wQRKdWGDTP5A6CS0gTC6U3RPDMhP3QkEzWSPm6eqCkU,17022
94
94
  ultralytics/engine/results.py,sha256=zRuEIrBtpoCQ3M6a_YscnyXrWSP-zpL3ACv0gTdrDaw,30987
@@ -138,7 +138,7 @@ ultralytics/models/yolo/classify/val.py,sha256=MXdtWrBYVpfFuPfFPOTLKa_wBdTIA4dBZ
138
138
  ultralytics/models/yolo/detect/__init__.py,sha256=JR8gZJWn7wMBbh-0j_073nxJVZTMFZVWTOG5Wnvk6w0,229
139
139
  ultralytics/models/yolo/detect/predict.py,sha256=_a9vH3DmKFY6eeztFTdj3nkfu_MKG6n7zb5rRKGjs9I,1510
140
140
  ultralytics/models/yolo/detect/train.py,sha256=8Ulq1SPNLrkOqXj0Yt5zNR1c_Xl_QnOjllCdqBHUMds,6353
141
- ultralytics/models/yolo/detect/val.py,sha256=2XFFH66HGN7ujLhtfIw9929Oq3y8rMdsN6bUNN-bcaM,14427
141
+ ultralytics/models/yolo/detect/val.py,sha256=OmTQpPD7ffFVSRNoao7ULOrY8OYVaMxZjc93--kfI2E,14647
142
142
  ultralytics/models/yolo/obb/__init__.py,sha256=txWbPGLY1_M7ZwlLQjrwGjTBOlsv9P3yk5ZEgysTinU,193
143
143
  ultralytics/models/yolo/obb/predict.py,sha256=prfDzhwuVHKF6CRwnFVBA-YFI5q7U7NEQwITGHmB2Ow,2037
144
144
  ultralytics/models/yolo/obb/train.py,sha256=tWpFtcasMwWq1A_9VdbEg5pIVHwuWwmeLOyj-S4_1sY,1473
@@ -146,17 +146,17 @@ ultralytics/models/yolo/obb/val.py,sha256=tHoUDh-Pv95GEnQ73yzCAAxnTMNayv4yZg33hm
146
146
  ultralytics/models/yolo/pose/__init__.py,sha256=OGvxN3LqJot2h8GX1csJ1KErsHnDKsm33Ce6ZBU9Lr4,199
147
147
  ultralytics/models/yolo/pose/predict.py,sha256=illk4qyZvybc_XMo9TKT54FIkizx91MYviE5c5OwBTQ,2404
148
148
  ultralytics/models/yolo/pose/train.py,sha256=ki8bkT8WfIFjTKf1ofeRDqeIqmk6A8a7AFog7nM-otM,2926
149
- ultralytics/models/yolo/pose/val.py,sha256=w_VIKzGcj_0CRNObPqk0NnDOfRN-xl2C6uwpFOkJH3Q,10607
149
+ ultralytics/models/yolo/pose/val.py,sha256=beoPPTWckvO7c1kWf2DbFjIN6IHcTV2hcB1rKvk0pwE,10668
150
150
  ultralytics/models/yolo/segment/__init__.py,sha256=mSbKOE8BnHL7PL2nCOVG7dRM7CI6hJezFPPwZFjEmy8,247
151
151
  ultralytics/models/yolo/segment/predict.py,sha256=xtA0ZZyuh9WVpX7zZFdAeCkWnxhQ30ADEzSud_H6N7E,2491
152
152
  ultralytics/models/yolo/segment/train.py,sha256=aOQpDIptZfKSl9mFa6B-3W3QccMRlmBINBkI9K8-3sQ,2298
153
- ultralytics/models/yolo/segment/val.py,sha256=njiF6RWddS-HOWxVvlk5PXRw6UOgEt_HEOZVPF7rruQ,11745
153
+ ultralytics/models/yolo/segment/val.py,sha256=DxEpR0FaQePlOXb19-FO4G0Nl9rWf9smtAh9eH__2g0,11806
154
154
  ultralytics/models/yolo/world/__init__.py,sha256=3VTH0q4NOt2EWRom15yCymvmvm0Etp2bmETJUhsVTBI,103
155
155
  ultralytics/models/yolo/world/train.py,sha256=acYN2-onL69LrL4av6_hY2r5AY0urC0WViDstn7npfI,3686
156
156
  ultralytics/models/yolo/world/train_world.py,sha256=ICPsYNbuPkq_qf3FHl2YJ-q3g7ik0pI-zhMpLmHa5-4,4805
157
157
  ultralytics/nn/__init__.py,sha256=4BPLHY89xEM_al5uK0aOmFgiML6CMGEZbezxOvTjOEs,587
158
158
  ultralytics/nn/autobackend.py,sha256=6amaXnbDlvh0kTIbeHV3kIM6X7P1r0T3le1GPxIgkOs,30864
159
- ultralytics/nn/tasks.py,sha256=a3FSkIUErlE7qI506ye5vGggqzMxqXWDkIbbLD4AGyI,43623
159
+ ultralytics/nn/tasks.py,sha256=JK-sKA0RWz612RpVfUI9zeevy4M7Fh6bysbana90wMs,43679
160
160
  ultralytics/nn/modules/__init__.py,sha256=KzLoyn2ldfReiQL8H8xsMC49Xvtb8Kv9ikE5Q3OBoAs,2326
161
161
  ultralytics/nn/modules/block.py,sha256=smIz3oNTDA7UKrAH5FfSMh08C12-avgWTeIkbgZIv18,25251
162
162
  ultralytics/nn/modules/conv.py,sha256=Ywe87IhuaS22mR2JJ9xjnW8Sb-m7WTjxuqIxV_Dv8lI,12722
@@ -165,7 +165,7 @@ ultralytics/nn/modules/transformer.py,sha256=AxD9uURpCl-EqvXe3DiG6JW-pBzB16G-Aah
165
165
  ultralytics/nn/modules/utils.py,sha256=779QnnKp9v8jv251ESduTXJ0ol8HkIOLbGQWwEGQjhU,3196
166
166
  ultralytics/solutions/__init__.py,sha256=S4m7p_rpg2pk9PdnqqD-6Sk--wDHxZSo7cUZjSwj_iQ,561
167
167
  ultralytics/solutions/ai_gym.py,sha256=HDzzvBVFqWgQw2IgtEx5Eo3tEKbFRY3gkiVqax-4j2w,4683
168
- ultralytics/solutions/analytics.py,sha256=l5FeVQAJYIKRAAWqT_lWPBbCHCuYGYdMn55UYVLmEf4,7094
168
+ ultralytics/solutions/analytics.py,sha256=_gnK8xFjwUa0nyO7t9t6NAaBr86OFdLMIAxxDFHomoY,9062
169
169
  ultralytics/solutions/distance_calculation.py,sha256=pSIkyytHGRAaNzIrkkNkiOnSVWU1PYvURlCIV_jRORA,6505
170
170
  ultralytics/solutions/heatmap.py,sha256=AHXnmXhoQ95ph74zsdrvX_Lfy3wF0SsH0MIeTixE7Qg,10386
171
171
  ultralytics/solutions/object_counter.py,sha256=htcQGWJX1y-vXVV1yUiTDT3sm8ByItjSNfu2Rl2IEmk,10808
@@ -184,7 +184,7 @@ ultralytics/trackers/utils/matching.py,sha256=UxhSGa5pN6WoYwYSBAkkt-O7xMxUR47VuU
184
184
  ultralytics/utils/__init__.py,sha256=dlKr7P0h2Ez3Q-WLQ49p0jsjjWkKq3CRkhlCJLGKlMk,38620
185
185
  ultralytics/utils/autobatch.py,sha256=ygZ3f2ByIkcujB89ENcTnGWWnAQw5Pbg6nBuShg-5t4,3863
186
186
  ultralytics/utils/benchmarks.py,sha256=oCngvKzfZu4dFFd3U3ZcNR-BKM1kJLbWuR_egg_qSRw,23609
187
- ultralytics/utils/checks.py,sha256=XtUrZvw7_pUcSGAUCOtjqJ5KilZy6IXGHdBxG64WMV0,28172
187
+ ultralytics/utils/checks.py,sha256=4OQkddqlxh6Lldvhr8YOpyqaLVCohgTvr0R15Uanzq4,28376
188
188
  ultralytics/utils/dist.py,sha256=3HeNbY2gp7vYhcvVhsrvTrQXpQmgT8tpmnzApf3eQRA,2267
189
189
  ultralytics/utils/downloads.py,sha256=cmO2Ev1DV1m_lYgQ2yGDG5xVRIBVS_z9nS_Frec_NeU,21496
190
190
  ultralytics/utils/errors.py,sha256=GqP_Jgj_n0paxn8OMhn3DTCgoNkB2WjUcUaqs-M6SQk,816
@@ -210,9 +210,9 @@ ultralytics/utils/callbacks/neptune.py,sha256=5Z3ua5YBTUS56FH8VQKQG1aaIo9fH8GEyz
210
210
  ultralytics/utils/callbacks/raytune.py,sha256=ODVYzy-CoM4Uge0zjkh3Hnh9nF2M0vhDrSenXnvcizw,705
211
211
  ultralytics/utils/callbacks/tensorboard.py,sha256=Z1veCVcn9THPhdplWuIzwlsW2yF7y-On9IZIk3khM0Y,4135
212
212
  ultralytics/utils/callbacks/wb.py,sha256=DViD0KeXH_i3eVT_CLR4bZFs1TMMUZBVBBYIS3aUfp0,6745
213
- ultralytics-8.2.25.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
214
- ultralytics-8.2.25.dist-info/METADATA,sha256=JQiMXANVtQaN0BvbDPxF1Lni9G8_GWW73gQaqi-4tWs,41200
215
- ultralytics-8.2.25.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
216
- ultralytics-8.2.25.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
217
- ultralytics-8.2.25.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
218
- ultralytics-8.2.25.dist-info/RECORD,,
213
+ ultralytics-8.2.26.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
214
+ ultralytics-8.2.26.dist-info/METADATA,sha256=cWBae60M9_nsJ4ZUkX50l8a7GNojD-bZqkb4oQckp2M,41200
215
+ ultralytics-8.2.26.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
216
+ ultralytics-8.2.26.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
217
+ ultralytics-8.2.26.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
218
+ ultralytics-8.2.26.dist-info/RECORD,,