ultralytics 8.2.35__py3-none-any.whl → 8.2.37__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ultralytics might be problematic. Click here for more details.

ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
2
 
3
- __version__ = "8.2.35"
3
+ __version__ = "8.2.37"
4
4
 
5
5
  import os
6
6
 
@@ -1,11 +1,11 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
- # YOLOv9c-seg
2
+ # YOLOv9c-seg instance segmentation model. For Usage examples see https://docs.ultralytics.com/models/yolov9
3
3
  # 654 layers, 27897120 parameters, 159.4 GFLOPs
4
4
 
5
- # parameters
5
+ # Parameters
6
6
  nc: 80 # number of classes
7
7
 
8
- # gelan backbone
8
+ # GELAN backbone
9
9
  backbone:
10
10
  - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2
11
11
  - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4
@@ -1,11 +1,11 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
- # YOLOv9c
2
+ # YOLOv9c object detection model. For Usage examples see https://docs.ultralytics.com/models/yolov9
3
3
  # 618 layers, 25590912 parameters, 104.0 GFLOPs
4
4
 
5
- # parameters
5
+ # Parameters
6
6
  nc: 80 # number of classes
7
7
 
8
- # gelan backbone
8
+ # GELAN backbone
9
9
  backbone:
10
10
  - [-1, 1, Conv, [64, 3, 2]] # 0-P1/2
11
11
  - [-1, 1, Conv, [128, 3, 2]] # 1-P2/4
@@ -1,11 +1,11 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
- # YOLOv9c-seg
2
+ # YOLOv9e-seg instance segmentation model. For Usage examples see https://docs.ultralytics.com/models/yolov9
3
3
  # 1261 layers, 60512800 parameters, 248.4 GFLOPs
4
4
 
5
- # parameters
5
+ # Parameters
6
6
  nc: 80 # number of classes
7
7
 
8
- # gelan backbone
8
+ # GELAN backbone
9
9
  backbone:
10
10
  - [-1, 1, nn.Identity, []]
11
11
  - [-1, 1, Conv, [64, 3, 2]] # 1-P1/2
@@ -40,7 +40,7 @@ backbone:
40
40
  - [-1, 1, RepNCSPELAN4, [1024, 512, 256, 2]] # 28
41
41
  - [-1, 1, SPPELAN, [512, 256]] # 29
42
42
 
43
- # gelan head
43
+ # GELAN head
44
44
  head:
45
45
  - [-1, 1, nn.Upsample, [None, 2, "nearest"]]
46
46
  - [[-1, 25], 1, Concat, [1]] # cat backbone P4
@@ -1,11 +1,11 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
- # YOLOv9e
2
+ # YOLOv9e object detection model. For Usage examples see https://docs.ultralytics.com/models/yolov9
3
3
  # 1225 layers, 58206592 parameters, 193.0 GFLOPs
4
4
 
5
- # parameters
5
+ # Parameters
6
6
  nc: 80 # number of classes
7
7
 
8
- # gelan backbone
8
+ # GELAN backbone
9
9
  backbone:
10
10
  - [-1, 1, nn.Identity, []]
11
11
  - [-1, 1, Conv, [64, 3, 2]] # 1-P1/2
@@ -40,7 +40,7 @@ backbone:
40
40
  - [-1, 1, RepNCSPELAN4, [1024, 512, 256, 2]] # 28
41
41
  - [-1, 1, SPPELAN, [512, 256]] # 29
42
42
 
43
- # gelan head
43
+ # GELAN head
44
44
  head:
45
45
  - [-1, 1, nn.Upsample, [None, 2, "nearest"]]
46
46
  - [[-1, 25], 1, Concat, [1]] # cat backbone P4
@@ -1,11 +1,11 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
- # YOLOv9t
2
+ # YOLOv9m object detection model. For Usage examples see https://docs.ultralytics.com/models/yolov9
3
3
  # 603 layers, 20216160 parameters, 77.9 GFLOPs
4
4
 
5
- # parameters
5
+ # Parameters
6
6
  nc: 80 # number of classes
7
7
 
8
- # gelan backbone
8
+ # GELAN backbone
9
9
  backbone:
10
10
  - [-1, 1, Conv, [32, 3, 2]] # 0-P1/2
11
11
  - [-1, 1, Conv, [64, 3, 2]] # 1-P2/4
@@ -1,11 +1,11 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
- # YOLOv9s
2
+ # YOLOv9s object detection model. For Usage examples see https://docs.ultralytics.com/models/yolov9
3
3
  # 917 layers, 7318368 parameters, 27.6 GFLOPs
4
4
 
5
- # parameters
5
+ # Parameters
6
6
  nc: 80 # number of classes
7
7
 
8
- # gelan backbone
8
+ # GELAN backbone
9
9
  backbone:
10
10
  - [-1, 1, Conv, [32, 3, 2]] # 0-P1/2
11
11
  - [-1, 1, Conv, [64, 3, 2]] # 1-P2/4
@@ -1,11 +1,11 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
- # YOLOv9t
2
+ # YOLOv9t object detection model. For Usage examples see https://docs.ultralytics.com/models/yolov9
3
3
  # 917 layers, 2128720 parameters, 8.5 GFLOPs
4
4
 
5
- # parameters
5
+ # Parameters
6
6
  nc: 80 # number of classes
7
7
 
8
- # gelan backbone
8
+ # GELAN backbone
9
9
  backbone:
10
10
  - [-1, 1, Conv, [16, 3, 2]] # 0-P1/2
11
11
  - [-1, 1, Conv, [32, 3, 2]] # 1-P2/4
@@ -362,7 +362,7 @@ class LoadImagesAndVideos:
362
362
  self.mode = "image"
363
363
  im0 = cv2.imread(path) # BGR
364
364
  if im0 is None:
365
- raise FileNotFoundError(f"Image Not Found {path}")
365
+ raise FileNotFoundError(f"Image Read Error {path}")
366
366
  paths.append(path)
367
367
  imgs.append(im0)
368
368
  info.append(f"image {self.count + 1}/{self.nf} {path}: ")
@@ -672,18 +672,6 @@ class SPPELAN(nn.Module):
672
672
  return self.cv5(torch.cat(y, 1))
673
673
 
674
674
 
675
- class Silence(nn.Module):
676
- """Silence."""
677
-
678
- def __init__(self):
679
- """Initializes the Silence module."""
680
- super(Silence, self).__init__()
681
-
682
- def forward(self, x):
683
- """Forward pass through Silence layer."""
684
- return x
685
-
686
-
687
675
  class CBLinear(nn.Module):
688
676
  """CBLinear."""
689
677
 
ultralytics/nn/tasks.py CHANGED
@@ -279,6 +279,12 @@ class DetectionModel(BaseModel):
279
279
  """Initialize the YOLOv8 detection model with the given config and parameters."""
280
280
  super().__init__()
281
281
  self.yaml = cfg if isinstance(cfg, dict) else yaml_model_load(cfg) # cfg dict
282
+ if self.yaml["backbone"][0][2] == "Silence":
283
+ LOGGER.warning(
284
+ "WARNING ⚠️ YOLOv9 `Silence` module is deprecated in favor of nn.Identity. "
285
+ "Please delete local *.pt file and re-download the latest model checkpoint."
286
+ )
287
+ self.yaml["backbone"][0][2] = "nn.Identity"
282
288
 
283
289
  # Define model
284
290
  ch = self.yaml["ch"] = self.yaml.get("ch", ch) # input channels
@@ -669,7 +675,7 @@ class Ensemble(nn.ModuleList):
669
675
 
670
676
 
671
677
  @contextlib.contextmanager
672
- def temporary_modules(modules=None):
678
+ def temporary_modules(modules={}, attributes={}):
673
679
  """
674
680
  Context manager for temporarily adding or modifying modules in Python's module cache (`sys.modules`).
675
681
 
@@ -679,11 +685,13 @@ def temporary_modules(modules=None):
679
685
 
680
686
  Args:
681
687
  modules (dict, optional): A dictionary mapping old module paths to new module paths.
688
+ attributes (dict, optional): A dictionary mapping old module attributes to new module attributes.
682
689
 
683
690
  Example:
684
691
  ```python
685
- with temporary_modules({'old.module.path': 'new.module.path'}):
692
+ with temporary_modules({'old.module.path': 'new.module.path'}, {'old.module.attribute': 'new.module.attribute'}):
686
693
  import old.module.path # this will now import new.module.path
694
+ from old.module import attribute # this will now import new.module.attribute
687
695
  ```
688
696
 
689
697
  Note:
@@ -691,13 +699,21 @@ def temporary_modules(modules=None):
691
699
  Be aware that directly manipulating `sys.modules` can lead to unpredictable results, especially in larger
692
700
  applications or libraries. Use this function with caution.
693
701
  """
694
- if not modules:
695
- modules = {}
696
702
 
697
703
  import importlib
698
704
  import sys
699
705
 
700
706
  try:
707
+ # Set attributes in sys.modules under their old name
708
+ for old, new in attributes.items():
709
+ old_module, old_attr = old.rsplit(".", 1)
710
+ new_module, new_attr = new.rsplit(".", 1)
711
+ setattr(
712
+ importlib.import_module(old_module),
713
+ old_attr,
714
+ getattr(importlib.import_module(new_module), new_attr),
715
+ )
716
+
701
717
  # Set modules in sys.modules under their old name
702
718
  for old, new in modules.items():
703
719
  sys.modules[old] = importlib.import_module(new)
@@ -728,11 +744,14 @@ def torch_safe_load(weight):
728
744
  file = attempt_download_asset(weight) # search online if missing locally
729
745
  try:
730
746
  with temporary_modules(
731
- {
747
+ modules={
732
748
  "ultralytics.yolo.utils": "ultralytics.utils",
733
749
  "ultralytics.yolo.v8": "ultralytics.models.yolo",
734
750
  "ultralytics.yolo.data": "ultralytics.data",
735
- }
751
+ },
752
+ attributes={
753
+ "ultralytics.nn.modules.block.Silence": "torch.nn.Identity",
754
+ },
736
755
  ): # for legacy 8.0 Classify and Pose models
737
756
  ckpt = torch.load(file, map_location="cpu")
738
757
 
@@ -38,10 +38,12 @@ try:
38
38
  except (ImportError, AssertionError):
39
39
  mlflow = None
40
40
 
41
+
41
42
  def sanitize_dict(x):
42
43
  """Sanitize dictionary keys by removing parentheses and converting values to floats."""
43
44
  return {k.replace("(", "").replace(")", ""): float(v) for k, v in x.items()}
44
45
 
46
+
45
47
  def on_pretrain_routine_end(trainer):
46
48
  """
47
49
  Log training parameters to MLflow at the end of the pretraining routine.
@@ -33,26 +33,26 @@ class Colors:
33
33
  def __init__(self):
34
34
  """Initialize colors as hex = matplotlib.colors.TABLEAU_COLORS.values()."""
35
35
  hexs = (
36
- "FF3838",
37
- "FF9D97",
38
- "FF701F",
39
- "FFB21D",
40
- "CFD231",
41
- "48F90A",
42
- "92CC17",
43
- "3DDB86",
44
- "1A9334",
45
- "00D4BB",
46
- "2C99A8",
47
- "00C2FF",
48
- "344593",
49
- "6473FF",
50
- "0018EC",
51
- "8438FF",
52
- "520085",
53
- "CB38FF",
54
- "FF95C8",
55
- "FF37C7",
36
+ "042AFF",
37
+ "0BDBEB",
38
+ "F3F3F3",
39
+ "00DFB7",
40
+ "111F68",
41
+ "FF6FDD",
42
+ "FF444F",
43
+ "CCED00",
44
+ "00F344",
45
+ "BD00FF",
46
+ "00B4FF",
47
+ "DD00BA",
48
+ "00FFFF",
49
+ "26C000",
50
+ "01FFB3",
51
+ "7D24FF",
52
+ "7B0068",
53
+ "FF1B6C",
54
+ "FC6D2F",
55
+ "A2FF0B",
56
56
  )
57
57
  self.palette = [self.hex2rgb(f"#{c}") for c in hexs]
58
58
  self.n = len(self.palette)
@@ -158,9 +158,133 @@ class Annotator:
158
158
 
159
159
  self.limb_color = colors.pose_palette[[9, 9, 9, 9, 7, 7, 7, 0, 0, 0, 0, 0, 16, 16, 16, 16, 16, 16, 16]]
160
160
  self.kpt_color = colors.pose_palette[[16, 16, 16, 16, 16, 0, 0, 0, 0, 0, 0, 9, 9, 9, 9, 9, 9]]
161
+ self.dark_colors = {
162
+ (235, 219, 11),
163
+ (243, 243, 243),
164
+ (183, 223, 0),
165
+ (221, 111, 255),
166
+ (0, 237, 204),
167
+ (68, 243, 0),
168
+ (255, 255, 0),
169
+ (179, 255, 1),
170
+ (11, 255, 162),
171
+ }
172
+ self.light_colors = {
173
+ (255, 42, 4),
174
+ (79, 68, 255),
175
+ (255, 0, 189),
176
+ (255, 180, 0),
177
+ (186, 0, 221),
178
+ (0, 192, 38),
179
+ (255, 36, 125),
180
+ (104, 0, 123),
181
+ (108, 27, 255),
182
+ (47, 109, 252),
183
+ (104, 31, 17),
184
+ }
185
+
186
+ def get_txt_color(self, color=(128, 128, 128), txt_color=(255, 255, 255)):
187
+ """Assign text color based on background color."""
188
+ if color in self.dark_colors:
189
+ return 104, 31, 17
190
+ elif color in self.light_colors:
191
+ return 255, 255, 255
192
+ else:
193
+ return txt_color
194
+
195
+ def circle_label(self, box, label="", color=(128, 128, 128), txt_color=(255, 255, 255), margin=2):
196
+ """
197
+ Draws a label with a background rectangle centered within a given bounding box.
198
+
199
+ Args:
200
+ box (tuple): The bounding box coordinates (x1, y1, x2, y2).
201
+ label (str): The text label to be displayed.
202
+ color (tuple, optional): The background color of the rectangle (R, G, B).
203
+ txt_color (tuple, optional): The color of the text (R, G, B).
204
+ margin (int, optional): The margin between the text and the rectangle border.
205
+ """
206
+
207
+ # If label have more than 3 characters, skip other characters, due to circle size
208
+ if len(label) > 3:
209
+ print(
210
+ f"Length of label is {len(label)}, initial 3 label characters will be considered for circle annotation!"
211
+ )
212
+ label = label[:3]
213
+
214
+ # Calculate the center of the box
215
+ x_center, y_center = int((box[0] + box[2]) / 2), int((box[1] + box[3]) / 2)
216
+ # Get the text size
217
+ text_size = cv2.getTextSize(str(label), cv2.FONT_HERSHEY_SIMPLEX, self.sf - 0.15, self.tf)[0]
218
+ # Calculate the required radius to fit the text with the margin
219
+ required_radius = int(((text_size[0] ** 2 + text_size[1] ** 2) ** 0.5) / 2) + margin
220
+ # Draw the circle with the required radius
221
+ cv2.circle(self.im, (x_center, y_center), required_radius, color, -1)
222
+ # Calculate the position for the text
223
+ text_x = x_center - text_size[0] // 2
224
+ text_y = y_center + text_size[1] // 2
225
+ # Draw the text
226
+ cv2.putText(
227
+ self.im,
228
+ str(label),
229
+ (text_x, text_y),
230
+ cv2.FONT_HERSHEY_SIMPLEX,
231
+ self.sf - 0.15,
232
+ self.get_txt_color(color, txt_color),
233
+ self.tf,
234
+ lineType=cv2.LINE_AA,
235
+ )
236
+
237
+ def text_label(self, box, label="", color=(128, 128, 128), txt_color=(255, 255, 255), margin=5):
238
+ """
239
+ Draws a label with a background rectangle centered within a given bounding box.
240
+
241
+ Args:
242
+ box (tuple): The bounding box coordinates (x1, y1, x2, y2).
243
+ label (str): The text label to be displayed.
244
+ color (tuple, optional): The background color of the rectangle (R, G, B).
245
+ txt_color (tuple, optional): The color of the text (R, G, B).
246
+ margin (int, optional): The margin between the text and the rectangle border.
247
+ """
248
+
249
+ # Calculate the center of the bounding box
250
+ x_center, y_center = int((box[0] + box[2]) / 2), int((box[1] + box[3]) / 2)
251
+ # Get the size of the text
252
+ text_size = cv2.getTextSize(label, cv2.FONT_HERSHEY_SIMPLEX, self.sf - 0.1, self.tf)[0]
253
+ # Calculate the top-left corner of the text (to center it)
254
+ text_x = x_center - text_size[0] // 2
255
+ text_y = y_center + text_size[1] // 2
256
+ # Calculate the coordinates of the background rectangle
257
+ rect_x1 = text_x - margin
258
+ rect_y1 = text_y - text_size[1] - margin
259
+ rect_x2 = text_x + text_size[0] + margin
260
+ rect_y2 = text_y + margin
261
+ # Draw the background rectangle
262
+ cv2.rectangle(self.im, (rect_x1, rect_y1), (rect_x2, rect_y2), color, -1)
263
+ # Draw the text on top of the rectangle
264
+ cv2.putText(
265
+ self.im,
266
+ label,
267
+ (text_x, text_y),
268
+ cv2.FONT_HERSHEY_SIMPLEX,
269
+ self.sf - 0.1,
270
+ self.get_txt_color(color, txt_color),
271
+ self.tf,
272
+ lineType=cv2.LINE_AA,
273
+ )
161
274
 
162
275
  def box_label(self, box, label="", color=(128, 128, 128), txt_color=(255, 255, 255), rotated=False):
163
- """Add one xyxy box to image with label."""
276
+ """
277
+ Draws a bounding box to image with label.
278
+
279
+ Args:
280
+ box (tuple): The bounding box coordinates (x1, y1, x2, y2).
281
+ label (str): The text label to be displayed.
282
+ color (tuple, optional): The background color of the rectangle (R, G, B).
283
+ txt_color (tuple, optional): The color of the text (R, G, B).
284
+ rotated (bool, optional): Variable used to check if task is OBB
285
+ """
286
+
287
+ txt_color = self.get_txt_color(color, txt_color)
164
288
  if isinstance(box, torch.Tensor):
165
289
  box = box.tolist()
166
290
  if self.pil or not is_ascii(label):
@@ -215,6 +339,7 @@ class Annotator:
215
339
  alpha (float): Mask transparency: 0.0 fully transparent, 1.0 opaque
216
340
  retina_masks (bool): Whether to use high resolution masks or not. Defaults to False.
217
341
  """
342
+
218
343
  if self.pil:
219
344
  # Convert to numpy first
220
345
  self.im = np.asarray(self.im).copy()
@@ -254,6 +379,7 @@ class Annotator:
254
379
  Note:
255
380
  `kpt_line=True` currently only supports human pose plotting.
256
381
  """
382
+
257
383
  if self.pil:
258
384
  # Convert to numpy first
259
385
  self.im = np.asarray(self.im).copy()
@@ -349,6 +475,7 @@ class Annotator:
349
475
  Returns:
350
476
  angle (degree): Degree value of angle between three points
351
477
  """
478
+
352
479
  x_min, y_min, x_max, y_max = bbox
353
480
  width = x_max - x_min
354
481
  height = y_max - y_min
@@ -363,6 +490,7 @@ class Annotator:
363
490
  color (tuple): Region Color value
364
491
  thickness (int): Region area thickness value
365
492
  """
493
+
366
494
  cv2.polylines(self.im, [np.array(reg_pts, dtype=np.int32)], isClosed=True, color=color, thickness=thickness)
367
495
 
368
496
  def draw_centroid_and_tracks(self, track, color=(255, 0, 255), track_thickness=2):
@@ -374,6 +502,7 @@ class Annotator:
374
502
  color (tuple): tracks line color
375
503
  track_thickness (int): track line thickness value
376
504
  """
505
+
377
506
  points = np.hstack(track).astype(np.int32).reshape((-1, 1, 2))
378
507
  cv2.polylines(self.im, [points], isClosed=False, color=color, thickness=track_thickness)
379
508
  cv2.circle(self.im, (int(track[-1][0]), int(track[-1][1])), track_thickness * 2, color, -1)
@@ -486,6 +615,7 @@ class Annotator:
486
615
  Returns:
487
616
  angle (degree): Degree value of angle between three points
488
617
  """
618
+
489
619
  a, b, c = np.array(a), np.array(b), np.array(c)
490
620
  radians = np.arctan2(c[1] - b[1], c[0] - b[0]) - np.arctan2(a[1] - b[1], a[0] - b[0])
491
621
  angle = np.abs(radians * 180.0 / np.pi)
@@ -503,6 +633,7 @@ class Annotator:
503
633
  shape (tuple): imgsz for model inference
504
634
  radius (int): Keypoint radius value
505
635
  """
636
+
506
637
  if indices is None:
507
638
  indices = [2, 5, 7]
508
639
  for i, k in enumerate(keypoints):
@@ -599,6 +730,7 @@ class Annotator:
599
730
  det_label (str): Detection label text
600
731
  track_label (str): Tracking label text
601
732
  """
733
+
602
734
  cv2.polylines(self.im, [np.int32([mask])], isClosed=True, color=mask_color, thickness=2)
603
735
 
604
736
  label = f"Track ID: {track_label}" if track_label else det_label
@@ -668,6 +800,7 @@ class Annotator:
668
800
  color (tuple): object centroid and line color value
669
801
  pin_color (tuple): visioneye point color value
670
802
  """
803
+
671
804
  center_bbox = int((box[0] + box[2]) / 2), int((box[1] + box[3]) / 2)
672
805
  cv2.circle(self.im, center_point, self.tf * 2, pin_color, -1)
673
806
  cv2.circle(self.im, center_bbox, self.tf * 2, color, -1)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics
3
- Version: 8.2.35
3
+ Version: 8.2.37
4
4
  Summary: Ultralytics YOLOv8 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author: Glenn Jocher, Ayush Chaurasia, Jing Qiu
6
6
  Maintainer: Glenn Jocher, Ayush Chaurasia, Jing Qiu
@@ -93,13 +93,13 @@ Requires-Dist: dvclive >=2.12.0 ; extra == 'logging'
93
93
  <div>
94
94
  <a href="https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml"><img src="https://github.com/ultralytics/ultralytics/actions/workflows/ci.yaml/badge.svg" alt="Ultralytics CI"></a>
95
95
  <a href="https://codecov.io/github/ultralytics/ultralytics"><img src="https://codecov.io/github/ultralytics/ultralytics/branch/main/graph/badge.svg?token=HHW7IIVFVY" alt="Ultralytics Code Coverage"></a>
96
- <a href="https://zenodo.org/badge/latestdoi/264818686"><img src="https://zenodo.org/badge/264818686.svg" alt="YOLOv8 Citation"></a>
97
- <a href="https://hub.docker.com/r/ultralytics/ultralytics"><img src="https://img.shields.io/docker/pulls/ultralytics/ultralytics?logo=docker" alt="Docker Pulls"></a>
98
- <a href="https://ultralytics.com/discord"><img alt="Discord" src="https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue"></a>
96
+ <a href="https://zenodo.org/badge/latestdoi/264818686"><img src="https://zenodo.org/badge/264818686.svg" alt="Ultralytics YOLOv8 Citation"></a>
97
+ <a href="https://hub.docker.com/r/ultralytics/ultralytics"><img src="https://img.shields.io/docker/pulls/ultralytics/ultralytics?logo=docker" alt="Ultralytics Docker Pulls"></a>
98
+ <a href="https://ultralytics.com/discord"><img alt="Ultralytics Discord" src="https://img.shields.io/discord/1089800235347353640?logo=discord&logoColor=white&label=Discord&color=blue"></a>
99
99
  <br>
100
- <a href="https://console.paperspace.com/github/ultralytics/ultralytics"><img src="https://assets.paperspace.io/img/gradient-badge.svg" alt="Run on Gradient"></a>
101
- <a href="https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/tutorial.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a>
102
- <a href="https://www.kaggle.com/ultralytics/yolov8"><img src="https://kaggle.com/static/images/open-in-kaggle.svg" alt="Open In Kaggle"></a>
100
+ <a href="https://console.paperspace.com/github/ultralytics/ultralytics"><img src="https://assets.paperspace.io/img/gradient-badge.svg" alt="Run Ultralytics on Gradient"></a>
101
+ <a href="https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/tutorial.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open Ultralytics In Colab"></a>
102
+ <a href="https://www.kaggle.com/ultralytics/yolov8"><img src="https://kaggle.com/static/images/open-in-kaggle.svg" alt="Open Ultralytics In Kaggle"></a>
103
103
  </div>
104
104
  <br>
105
105
 
@@ -188,13 +188,13 @@ See YOLOv8 [Python Docs](https://docs.ultralytics.com/usage/python) for more exa
188
188
 
189
189
  Ultralytics provides interactive notebooks for YOLOv8, covering training, validation, tracking, and more. Each notebook is paired with a [YouTube](https://youtube.com/ultralytics?sub_confirmation=1) tutorial, making it easy to learn and implement advanced YOLOv8 features.
190
190
 
191
- | Docs | Notebook | YouTube |
192
- | --------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: |
193
- | <a href="https://docs.ultralytics.com/modes/">YOLOv8 Train, Val, Predict and Export Modes</a> | <a href="https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/tutorial.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a> | <a href="https://youtu.be/j8uQc0qB91s"><center><img width=30% src="https://raw.githubusercontent.com/ultralytics/assets/main/social/logo-social-youtube-rect.png" alt="Ultralytics Youtube Video"></center></a> |
194
- | <a href="https://docs.ultralytics.com/hub/quickstart/">Ultralytics HUB QuickStart</a> | <a href="https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/hub.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a> | <a href="https://youtu.be/lveF9iCMIzc"><center><img width=30% src="https://raw.githubusercontent.com/ultralytics/assets/main/social/logo-social-youtube-rect.png" alt="Ultralytics Youtube Video"></center></a> |
195
- | <a href="https://docs.ultralytics.com/modes/track/">YOLOv8 Multi-Object Tracking in Videos</a> | <a href="https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/object_tracking.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a> | <a href="https://youtu.be/hHyHmOtmEgs"><center><img width=30% src="https://raw.githubusercontent.com/ultralytics/assets/main/social/logo-social-youtube-rect.png" alt="Ultralytics Youtube Video"></center></a> |
196
- | <a href="https://docs.ultralytics.com/guides/object-counting/">YOLOv8 Object Counting in Videos</a> | <a href="https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/object_counting.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a> | <a href="https://youtu.be/Ag2e-5_NpS0"><center><img width=30% src="https://raw.githubusercontent.com/ultralytics/assets/main/social/logo-social-youtube-rect.png" alt="Ultralytics Youtube Video"></center></a> |
197
- | <a href="https://docs.ultralytics.com/guides/heatmaps/">YOLOv8 Heatmaps in Videos</a> | <a href="https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/heatmaps.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a> | <a href="https://youtu.be/4ezde5-nZZw"><center><img width=30% src="https://raw.githubusercontent.com/ultralytics/assets/main/social/logo-social-youtube-rect.png" alt="Ultralytics Youtube Video"></center></a> |
191
+ | Docs | Notebook | YouTube |
192
+ | ---------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: |
193
+ | <a href="https://docs.ultralytics.com/modes/">YOLOv8 Train, Val, Predict and Export Modes</a> | <a href="https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/tutorial.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a> | <a href="https://youtu.be/j8uQc0qB91s"><center><img width=30% src="https://raw.githubusercontent.com/ultralytics/assets/main/social/logo-social-youtube-rect.png" alt="Ultralytics Youtube Video"></center></a> |
194
+ | <a href="https://docs.ultralytics.com/hub/quickstart/">Ultralytics HUB QuickStart</a> | <a href="https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/hub.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a> | <a href="https://youtu.be/lveF9iCMIzc"><center><img width=30% src="https://raw.githubusercontent.com/ultralytics/assets/main/social/logo-social-youtube-rect.png" alt="Ultralytics Youtube Video"></center></a> |
195
+ | <a href="https://docs.ultralytics.com/modes/track/">YOLOv8 Multi-Object Tracking in Videos</a> | <a href="https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/object_tracking.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a> | <a href="https://youtu.be/hHyHmOtmEgs"><center><img width=30% src="https://raw.githubusercontent.com/ultralytics/assets/main/social/logo-social-youtube-rect.png" alt="Ultralytics Youtube Video"></center></a> |
196
+ | <a href="https://docs.ultralytics.com/guides/object-counting/">YOLOv8 Object Counting in Videos</a> | <a href="https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/object_counting.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a> | <a href="https://youtu.be/Ag2e-5_NpS0"><center><img width=30% src="https://raw.githubusercontent.com/ultralytics/assets/main/social/logo-social-youtube-rect.png" alt="Ultralytics Youtube Video"></center></a> |
197
+ | <a href="https://docs.ultralytics.com/guides/heatmaps/">YOLOv8 Heatmaps in Videos</a> | <a href="https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/examples/heatmaps.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a> | <a href="https://youtu.be/4ezde5-nZZw"><center><img width=30% src="https://raw.githubusercontent.com/ultralytics/assets/main/social/logo-social-youtube-rect.png" alt="Ultralytics Youtube Video"></center></a> |
198
198
  | <a href="https://docs.ultralytics.com/datasets/explorer/">Ultralytics Datasets Explorer with SQL and OpenAI Integration 🚀 New</a> | <a href="https://colab.research.google.com/github/ultralytics/ultralytics/blob/main/docs/en/datasets/explorer/explorer.ipynb"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"></a> | <a href="https://youtu.be/3VryynorQeo"><center><img width=30% src="https://raw.githubusercontent.com/ultralytics/assets/main/social/logo-social-youtube-rect.png" alt="Ultralytics Youtube Video"></center></a> |
199
199
 
200
200
  ## <div align="center">Models</div>
@@ -332,7 +332,7 @@ Our key integrations with leading AI platforms extend the functionality of Ultra
332
332
  <img src="https://github.com/ultralytics/assets/raw/main/partners/logo-neuralmagic.png" width="10%" alt="NeuralMagic logo"></a>
333
333
  </div>
334
334
 
335
- | Roboflow | ClearML ⭐ NEW | Comet ⭐ NEW | Neural Magic ⭐ NEW |
335
+ | Roboflow | ClearML ⭐ NEW | Comet ⭐ NEW | Neural Magic ⭐ NEW |
336
336
  | :--------------------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------: | :-------------------------------------------------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------: |
337
337
  | Label and export your custom datasets directly to YOLOv8 for training with [Roboflow](https://roboflow.com/?ref=ultralytics) | Automatically track, visualize and even remotely train YOLOv8 using [ClearML](https://clear.ml/) (open-source!) | Free forever, [Comet](https://bit.ly/yolov8-readme-comet) lets you save YOLOv8 models, resume training, and interactively visualize and debug predictions | Run YOLOv8 inference up to 6x faster with [Neural Magic DeepSparse](https://bit.ly/yolov5-neuralmagic) |
338
338
 
@@ -7,7 +7,7 @@ tests/test_explorer.py,sha256=r1pWer2y290Y0DqsM-La7egfEY0497YCdC4rwq3URV4,2178
7
7
  tests/test_exports.py,sha256=qc4YOgsGixqYLO6IRNY16-v6z14R0dp5fdni1v222xw,8034
8
8
  tests/test_integrations.py,sha256=8Ru7GyKV8j44EEc8X9_E7q7aR4CTOIMPuSagXjSGUxw,5847
9
9
  tests/test_python.py,sha256=5cTM45P77LoOl-qixJ7TQmf66zw69adj01kNaaSxHqE,20265
10
- ultralytics/__init__.py,sha256=8W6USnulq-5lM79JviwPwZPJkWTGtGBW-gUMhtDKvOk,694
10
+ ultralytics/__init__.py,sha256=SZ2J0Bd3FrWlOh7a0GS_8EnhlKDLXT2cih66PzAHgfU,694
11
11
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
12
12
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
13
13
  ultralytics/cfg/__init__.py,sha256=JblkT6Ze9MZ8hSs8gkV8JPcEKNMm-YqRqM4x501Dn9g,21507
@@ -66,13 +66,13 @@ ultralytics/cfg/models/v8/yolov8-seg.yaml,sha256=fN85m_aDMCH4oTJ3z-ft98Pdh6dk0pZ
66
66
  ultralytics/cfg/models/v8/yolov8-world.yaml,sha256=RXTp_tgix8dbnVHprapxiK2aax7M2qIfmuR-aAve4sU,2019
67
67
  ultralytics/cfg/models/v8/yolov8-worldv2.yaml,sha256=fvGVUxvlBOjN6LUiiaiGsnjK5ZKjwYGWxgkJ49hGmMg,1956
68
68
  ultralytics/cfg/models/v8/yolov8.yaml,sha256=VjSe_V2Gn9ZpJrwTtz0A6_6IMp6UuugNiR7aEShR5rc,1889
69
- ultralytics/cfg/models/v9/yolov9c-seg.yaml,sha256=526Rv4rjzHT-Vkm1JIhe3E7FEQ5FOCVkKesVd1bsc6k,1251
70
- ultralytics/cfg/models/v9/yolov9c.yaml,sha256=eya4Dv8YUHcdFpQcqOPLA9f1tdvoNW12erOb5BqqQFY,1236
71
- ultralytics/cfg/models/v9/yolov9e-seg.yaml,sha256=BaleOWTpGuMSTMg4y2v_12e1RbZU_L4gM6FtlDcHAUQ,2182
72
- ultralytics/cfg/models/v9/yolov9e.yaml,sha256=vLIw0Y3jULtTd_ePxb2lXfZx9YidjCEO0q4JcJarn10,2166
73
- ultralytics/cfg/models/v9/yolov9m.yaml,sha256=CV_Y59Ou24eLgdpeMkXKR6l78id56hdLJdRwmsFDhWU,1221
74
- ultralytics/cfg/models/v9/yolov9s.yaml,sha256=bvfbEZsXy3qHPo7QR7ca64iXiM4ipL08Rllj4cNC8BM,1201
75
- ultralytics/cfg/models/v9/yolov9t.yaml,sha256=1Y0DFei9RYdisXgBHQjX-Eoec_AfClhTnE4Nj9l5FSM,1185
69
+ ultralytics/cfg/models/v9/yolov9c-seg.yaml,sha256=Bled9VT_X3AGGe9OATXGA8arq_USbGUc8pjsy684A7w,1346
70
+ ultralytics/cfg/models/v9/yolov9c.yaml,sha256=vPfYbdDDg2aDm2yVRfyjXNOgFryTUWFG60lcHQjLpuA,1326
71
+ ultralytics/cfg/models/v9/yolov9e-seg.yaml,sha256=ycMexB2qKdte-hko7SF9PY4qOng1moIoq-ssg4a3teA,2277
72
+ ultralytics/cfg/models/v9/yolov9e.yaml,sha256=dhaR47WxuLOrZWDCceS4bQG00sQdrMc8FQ5K3mKsnaU,2256
73
+ ultralytics/cfg/models/v9/yolov9m.yaml,sha256=l6CmivzNu44sRVmkQXk4-tXflbV1nWnk5MSc8su2vhs,1311
74
+ ultralytics/cfg/models/v9/yolov9s.yaml,sha256=lPWcu-6ub1kCBD6zIDFwthYZ3RvdJfODWKy3vEQWRjo,1291
75
+ ultralytics/cfg/models/v9/yolov9t.yaml,sha256=qL__kr6GoefpQWP4jV0jdzwTp46bdFUcqtPRnfDbkY8,1275
76
76
  ultralytics/cfg/trackers/botsort.yaml,sha256=YrPmj18p1UU40kJH5NRdL_4S8f7knggkk_q2KYnVudo,883
77
77
  ultralytics/cfg/trackers/bytetrack.yaml,sha256=QvHmtuwulK4X6j3T5VEqtCm0sbWWBUVmWPcCcM20qe0,688
78
78
  ultralytics/data/__init__.py,sha256=VGe-ATG7j35F4A4r8Jmzffjlhve4JAJPgRa5ahKTU18,616
@@ -82,7 +82,7 @@ ultralytics/data/base.py,sha256=C3teLnw97ZTbpJHT9P7yYWosAKocMzgJjRe1rxgfpls,1352
82
82
  ultralytics/data/build.py,sha256=AfMmz0sHIYmwry_90tEJFRk_kz0S3SolScVXqYHiT08,7261
83
83
  ultralytics/data/converter.py,sha256=NLDiV67RshbKQnMJUiQQF11boVzEqgi2Hz39nKVAI4U,17528
84
84
  ultralytics/data/dataset.py,sha256=NFaXyHRn64TyTEbtSkr7SkqWXK8bEJl6lZ6M1JwO3MY,22201
85
- ultralytics/data/loaders.py,sha256=b6XZVOHO_f5mCz3MFYTmXmL0Op6FQ-D5qJTReEgfCN0,23931
85
+ ultralytics/data/loaders.py,sha256=eqfgFwrQeCiqiZKfkmZ54SN0APVJDGhnlXTTFqeKFSU,23932
86
86
  ultralytics/data/split_dota.py,sha256=xiPScUhknxAyBgJ_J7g8SJdgjJdomSVVAosfZ51rGWA,10072
87
87
  ultralytics/data/utils.py,sha256=zqFg4xaWU--fastZmwvZ3DxGyJQ3i4tVNLuYnqS1xxs,31044
88
88
  ultralytics/data/explorer/__init__.py,sha256=-Y3m1ZedepOQUv_KW82zaGxvU_PSHcuwUTFqG9BhAr4,113
@@ -159,9 +159,9 @@ ultralytics/models/yolo/world/train.py,sha256=acYN2-onL69LrL4av6_hY2r5AY0urC0WVi
159
159
  ultralytics/models/yolo/world/train_world.py,sha256=n0XTAHYxufHU5OZ_QjpkHieKik-24z0LrYKzWYbCLvA,4798
160
160
  ultralytics/nn/__init__.py,sha256=4BPLHY89xEM_al5uK0aOmFgiML6CMGEZbezxOvTjOEs,587
161
161
  ultralytics/nn/autobackend.py,sha256=zsMF-GS12xtMBeQEkSoJ5cudEHyzMaRSQBuXcfuBNdo,31210
162
- ultralytics/nn/tasks.py,sha256=_mEgl8urgF6l9rAWtjRyalPiaSM52njwdUhtnBsGeV0,43869
162
+ ultralytics/nn/tasks.py,sha256=extgDOPk2wHFxjiyOMotM68AqeGzNrMwehEdi5lX0JE,44954
163
163
  ultralytics/nn/modules/__init__.py,sha256=JPj_TloK33DdxS8gvA8Pcet5ax1SgbRcb5mTTOS0DCI,2371
164
- ultralytics/nn/modules/block.py,sha256=T7XV7nykPsVL6y1JsFsK566d6kAGN1DICh25g3ooWjU,26033
164
+ ultralytics/nn/modules/block.py,sha256=3SfxkNMBKbjzAzNrt_CeGxpeBLkrdko7n07cDSIY6gg,25781
165
165
  ultralytics/nn/modules/conv.py,sha256=Ywe87IhuaS22mR2JJ9xjnW8Sb-m7WTjxuqIxV_Dv8lI,12722
166
166
  ultralytics/nn/modules/head.py,sha256=3N_4zW1UvhI1jCrIxIkNYxQDdiW6HxtxpaNAAudq6NU,22236
167
167
  ultralytics/nn/modules/transformer.py,sha256=AxD9uURpCl-EqvXe3DiG6JW-pBzB16G-AahLdZ7yayo,17909
@@ -197,7 +197,7 @@ ultralytics/utils/loss.py,sha256=ejXnPEIAzNEoNz2UjW0_fcdeUs9Hy-jPzUrJ3FiIIwE,327
197
197
  ultralytics/utils/metrics.py,sha256=XPD-xP0fchR8KgCuTcihV2-n0EK1cWi3-53BWN_pLuA,53518
198
198
  ultralytics/utils/ops.py,sha256=J9wbb9aTW9aaI5DJRqA72BZAX77cmVyCJdnGuwkDu-k,33089
199
199
  ultralytics/utils/patches.py,sha256=SgMqeMsq2K6JoBJP1NplXMl9C6rK0JeJUChjBrJOneo,2750
200
- ultralytics/utils/plotting.py,sha256=rM6FbEHD_TYtAjl_jrBztKgJYg4QSpWPW-P7demEEcw,48262
200
+ ultralytics/utils/plotting.py,sha256=I3YYLSsmj1BX8S5DphsedAm0RfisrPbeLpyuzsKXbqY,53288
201
201
  ultralytics/utils/tal.py,sha256=xuIyryUjaaYHkHPG9GvBwh1xxN2Hq4y3hXOtuERehwY,16017
202
202
  ultralytics/utils/torch_utils.py,sha256=G8gVzI3sOSVSHORi5a2u-iFhUCGGHn5_eKHaOaLfsOY,27047
203
203
  ultralytics/utils/triton.py,sha256=gg1finxno_tY2Ge9PMhmu7PI9wvoFZoiicdT4Bhqv3w,3936
@@ -208,14 +208,14 @@ ultralytics/utils/callbacks/clearml.py,sha256=M9Fi1OfdWqcm8uVkauuX3zJIYhNh6Tp7Jo
208
208
  ultralytics/utils/callbacks/comet.py,sha256=QR3-9f0L_W7nZWWg_OEN7t8La2JotapSS-CnNYVjCdk,13744
209
209
  ultralytics/utils/callbacks/dvc.py,sha256=WIClMsuvhiiyrwRv5BsZLxjsxYNJ3Y8Vq7zN0Bthtro,5045
210
210
  ultralytics/utils/callbacks/hub.py,sha256=IPNnCRlAEFA-Dt18JWTuHhaQpcAy3XGgxBD4JhO0jSs,3586
211
- ultralytics/utils/callbacks/mlflow.py,sha256=_1mbw2zg-IY2dg16GhqcMxm0pQdMzwn3PdElJ7kjxUc,5389
211
+ ultralytics/utils/callbacks/mlflow.py,sha256=_bUzHyPb0npne0WFlGzlGCy-X5sxGQhC_xA3dZbF08I,5391
212
212
  ultralytics/utils/callbacks/neptune.py,sha256=5Z3ua5YBTUS56FH8VQKQG1aaIo9fH8GEyzC5q7p4ipQ,3756
213
213
  ultralytics/utils/callbacks/raytune.py,sha256=ODVYzy-CoM4Uge0zjkh3Hnh9nF2M0vhDrSenXnvcizw,705
214
214
  ultralytics/utils/callbacks/tensorboard.py,sha256=QEgOVhUqY9akOs5TJIwz1Rvn6l32xWLpOxlwEyWF0B8,4136
215
215
  ultralytics/utils/callbacks/wb.py,sha256=9-fjQIdLjr3b73DTE3rHO171KvbH1VweJ-bmbv-rqTw,6747
216
- ultralytics-8.2.35.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
217
- ultralytics-8.2.35.dist-info/METADATA,sha256=bYE30qcGkZFsx0WsQU8f9g0hoB7-CnoopHig0mW5GvY,41240
218
- ultralytics-8.2.35.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
219
- ultralytics-8.2.35.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
220
- ultralytics-8.2.35.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
221
- ultralytics-8.2.35.dist-info/RECORD,,
216
+ ultralytics-8.2.37.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
217
+ ultralytics-8.2.37.dist-info/METADATA,sha256=LHuqk6NTu__ZhHOS1G0EldVE8hSCUtsXdmGtp55pHRQ,41316
218
+ ultralytics-8.2.37.dist-info/WHEEL,sha256=cpQTJ5IWu9CdaPViMhC9YzF8gZuS5-vlfoFihTBC86A,91
219
+ ultralytics-8.2.37.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
220
+ ultralytics-8.2.37.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
221
+ ultralytics-8.2.37.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: setuptools (70.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5