ultralytics 8.2.93__py3-none-any.whl → 8.2.94__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ultralytics might be problematic. Click here for more details.

tests/test_cli.py CHANGED
@@ -101,7 +101,7 @@ def test_mobilesam():
101
101
  model.predict(source, points=[900, 370], labels=[1])
102
102
 
103
103
  # Predict a segment based on a box prompt
104
- model.predict(source, bboxes=[439, 437, 524, 709])
104
+ model.predict(source, bboxes=[439, 437, 524, 709], save=True)
105
105
 
106
106
  # Predict all
107
107
  # model(source)
ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
2
 
3
- __version__ = "8.2.93"
3
+ __version__ = "8.2.94"
4
4
 
5
5
 
6
6
  import os
@@ -28,7 +28,6 @@ from ultralytics.utils import (
28
28
  DEFAULT_CFG,
29
29
  LOCAL_RANK,
30
30
  LOGGER,
31
- MACOS,
32
31
  RANK,
33
32
  TQDM,
34
33
  __version__,
@@ -409,13 +408,17 @@ class BaseTrainer:
409
408
  break
410
409
 
411
410
  # Log
412
- mem = f"{torch.cuda.memory_reserved() / 1E9 if torch.cuda.is_available() else 0:.3g}G" # (GB)
413
- loss_len = self.tloss.shape[0] if len(self.tloss.shape) else 1
414
- losses = self.tloss if loss_len > 1 else torch.unsqueeze(self.tloss, 0)
415
411
  if RANK in {-1, 0}:
412
+ loss_length = self.tloss.shape[0] if len(self.tloss.shape) else 1
416
413
  pbar.set_description(
417
- ("%11s" * 2 + "%11.4g" * (2 + loss_len))
418
- % (f"{epoch + 1}/{self.epochs}", mem, *losses, batch["cls"].shape[0], batch["img"].shape[-1])
414
+ ("%11s" * 2 + "%11.4g" * (2 + loss_length))
415
+ % (
416
+ f"{epoch + 1}/{self.epochs}",
417
+ f"{self._get_memory():.3g}G", # (GB) GPU memory util
418
+ *(self.tloss if loss_length > 1 else torch.unsqueeze(self.tloss, 0)), # losses
419
+ batch["cls"].shape[0], # batch size, i.e. 8
420
+ batch["img"].shape[-1], # imgsz, i.e 640
421
+ )
419
422
  )
420
423
  self.run_callbacks("on_batch_end")
421
424
  if self.args.plots and ni in self.plot_idx:
@@ -453,11 +456,7 @@ class BaseTrainer:
453
456
  self.scheduler.last_epoch = self.epoch # do not move
454
457
  self.stop |= epoch >= self.epochs # stop if exceeded epochs
455
458
  self.run_callbacks("on_fit_epoch_end")
456
- gc.collect()
457
- if MACOS:
458
- torch.mps.empty_cache() # clear unified memory at end of epoch, may help MPS' management of 'unlimited' virtual memoy
459
- else:
460
- torch.cuda.empty_cache() # clear GPU memory at end of epoch, may help reduce CUDA out of memory errors
459
+ self._clear_memory()
461
460
 
462
461
  # Early Stopping
463
462
  if RANK != -1: # if DDP training
@@ -478,14 +477,29 @@ class BaseTrainer:
478
477
  if self.args.plots:
479
478
  self.plot_metrics()
480
479
  self.run_callbacks("on_train_end")
480
+ self._clear_memory()
481
+ self.run_callbacks("teardown")
482
+
483
+ def _get_memory(self):
484
+ """Get accelerator memory utilization in GB."""
485
+ if self.device.type == "mps":
486
+ memory = torch.mps.driver_allocated_memory()
487
+ elif self.device.type == "cpu":
488
+ memory = 0
489
+ else:
490
+ memory = torch.cuda.memory_reserved()
491
+ return memory / 1e9
492
+
493
+ def _clear_memory(self):
494
+ """Clear accelerator memory on different platforms."""
481
495
  gc.collect()
482
- if MACOS:
496
+ if self.device.type == "mps":
483
497
  torch.mps.empty_cache()
498
+ elif self.device.type == "cpu":
499
+ return
484
500
  else:
485
501
  torch.cuda.empty_cache()
486
502
 
487
- self.run_callbacks("teardown")
488
-
489
503
  def read_results_csv(self):
490
504
  """Read results.csv into a dict using pandas."""
491
505
  import pandas as pd # scope for faster 'import ultralytics'
@@ -450,16 +450,18 @@ class Predictor(BasePredictor):
450
450
 
451
451
  results = []
452
452
  for masks, orig_img, img_path in zip([pred_masks], orig_imgs, self.batch[0]):
453
- if pred_bboxes is not None:
454
- pred_bboxes = ops.scale_boxes(img.shape[2:], pred_bboxes.float(), orig_img.shape, padding=False)
455
- cls = torch.arange(len(pred_masks), dtype=torch.int32, device=pred_masks.device)
456
- pred_bboxes = torch.cat([pred_bboxes, pred_scores[:, None], cls[:, None]], dim=-1)
457
-
458
453
  if len(masks) == 0:
459
454
  masks = None
460
455
  else:
461
456
  masks = ops.scale_masks(masks[None].float(), orig_img.shape[:2], padding=False)[0]
462
457
  masks = masks > self.model.mask_threshold # to bool
458
+ if pred_bboxes is not None:
459
+ pred_bboxes = ops.scale_boxes(img.shape[2:], pred_bboxes.float(), orig_img.shape, padding=False)
460
+ else:
461
+ pred_bboxes = batched_mask_to_box(masks)
462
+ # NOTE: SAM models do not return cls info. This `cls` here is just a placeholder for consistency.
463
+ cls = torch.arange(len(pred_masks), dtype=torch.int32, device=pred_masks.device)
464
+ pred_bboxes = torch.cat([pred_bboxes, pred_scores[:, None], cls[:, None]], dim=-1)
463
465
  results.append(Results(orig_img, path=img_path, names=names, masks=masks, boxes=pred_bboxes))
464
466
  # Reset segment-all mode.
465
467
  self.segment_all = False
@@ -20,7 +20,7 @@ from ultralytics.utils.files import increment_path
20
20
 
21
21
  class Colors:
22
22
  """
23
- Ultralytics default color palette https://ultralytics.com/.
23
+ Ultralytics color palette https://docs.ultralytics.com/reference/utils/plotting/#ultralytics.utils.plotting.Colors.
24
24
 
25
25
  This class provides methods to work with the Ultralytics color palette, including converting hex color codes to
26
26
  RGB values.
@@ -29,6 +29,60 @@ class Colors:
29
29
  palette (list of tuple): List of RGB color values.
30
30
  n (int): The number of colors in the palette.
31
31
  pose_palette (np.ndarray): A specific color palette array with dtype np.uint8.
32
+
33
+ ## Ultralytics Color Palette
34
+
35
+ | Index | Color | HEX | RGB |
36
+ |-------|-------------------------------------------------------------------|-----------|-------------------|
37
+ | 0 | <i class="fa-solid fa-square fa-2xl" style="color: #042aff;"></i> | `#042aff` | (4, 42, 255) |
38
+ | 1 | <i class="fa-solid fa-square fa-2xl" style="color: #0bdbeb;"></i> | `#0bdbeb` | (11, 219, 235) |
39
+ | 2 | <i class="fa-solid fa-square fa-2xl" style="color: #f3f3f3;"></i> | `#f3f3f3` | (243, 243, 243) |
40
+ | 3 | <i class="fa-solid fa-square fa-2xl" style="color: #00dfb7;"></i> | `#00dfb7` | (0, 223, 183) |
41
+ | 4 | <i class="fa-solid fa-square fa-2xl" style="color: #111f68;"></i> | `#111f68` | (17, 31, 104) |
42
+ | 5 | <i class="fa-solid fa-square fa-2xl" style="color: #ff6fdd;"></i> | `#ff6fdd` | (255, 111, 221) |
43
+ | 6 | <i class="fa-solid fa-square fa-2xl" style="color: #ff444f;"></i> | `#ff444f` | (255, 68, 79) |
44
+ | 7 | <i class="fa-solid fa-square fa-2xl" style="color: #cced00;"></i> | `#cced00` | (204, 237, 0) |
45
+ | 8 | <i class="fa-solid fa-square fa-2xl" style="color: #00f344;"></i> | `#00f344` | (0, 243, 68) |
46
+ | 9 | <i class="fa-solid fa-square fa-2xl" style="color: #bd00ff;"></i> | `#bd00ff` | (189, 0, 255) |
47
+ | 10 | <i class="fa-solid fa-square fa-2xl" style="color: #00b4ff;"></i> | `#00b4ff` | (0, 180, 255) |
48
+ | 11 | <i class="fa-solid fa-square fa-2xl" style="color: #dd00ba;"></i> | `#dd00ba` | (221, 0, 186) |
49
+ | 12 | <i class="fa-solid fa-square fa-2xl" style="color: #00ffff;"></i> | `#00ffff` | (0, 255, 255) |
50
+ | 13 | <i class="fa-solid fa-square fa-2xl" style="color: #26c000;"></i> | `#26c000` | (38, 192, 0) |
51
+ | 14 | <i class="fa-solid fa-square fa-2xl" style="color: #01ffb3;"></i> | `#01ffb3` | (1, 255, 179) |
52
+ | 15 | <i class="fa-solid fa-square fa-2xl" style="color: #7d24ff;"></i> | `#7d24ff` | (125, 36, 255) |
53
+ | 16 | <i class="fa-solid fa-square fa-2xl" style="color: #7b0068;"></i> | `#7b0068` | (123, 0, 104) |
54
+ | 17 | <i class="fa-solid fa-square fa-2xl" style="color: #ff1b6c;"></i> | `#ff1b6c` | (255, 27, 108) |
55
+ | 18 | <i class="fa-solid fa-square fa-2xl" style="color: #fc6d2f;"></i> | `#fc6d2f` | (252, 109, 47) |
56
+ | 19 | <i class="fa-solid fa-square fa-2xl" style="color: #a2ff0b;"></i> | `#a2ff0b` | (162, 255, 11) |
57
+
58
+ ## Pose Color Palette
59
+
60
+ | Index | Color | HEX | RGB |
61
+ |-------|-------------------------------------------------------------------|-----------|-------------------|
62
+ | 0 | <i class="fa-solid fa-square fa-2xl" style="color: #ff8000;"></i> | `#ff8000` | (255, 128, 0) |
63
+ | 1 | <i class="fa-solid fa-square fa-2xl" style="color: #ff9933;"></i> | `#ff9933` | (255, 153, 51) |
64
+ | 2 | <i class="fa-solid fa-square fa-2xl" style="color: #ffb266;"></i> | `#ffb266` | (255, 178, 102) |
65
+ | 3 | <i class="fa-solid fa-square fa-2xl" style="color: #e6e600;"></i> | `#e6e600` | (230, 230, 0) |
66
+ | 4 | <i class="fa-solid fa-square fa-2xl" style="color: #ff99ff;"></i> | `#ff99ff` | (255, 153, 255) |
67
+ | 5 | <i class="fa-solid fa-square fa-2xl" style="color: #99ccff;"></i> | `#99ccff` | (153, 204, 255) |
68
+ | 6 | <i class="fa-solid fa-square fa-2xl" style="color: #ff66ff;"></i> | `#ff66ff` | (255, 102, 255) |
69
+ | 7 | <i class="fa-solid fa-square fa-2xl" style="color: #ff33ff;"></i> | `#ff33ff` | (255, 51, 255) |
70
+ | 8 | <i class="fa-solid fa-square fa-2xl" style="color: #66b2ff;"></i> | `#66b2ff` | (102, 178, 255) |
71
+ | 9 | <i class="fa-solid fa-square fa-2xl" style="color: #3399ff;"></i> | `#3399ff` | (51, 153, 255) |
72
+ | 10 | <i class="fa-solid fa-square fa-2xl" style="color: #ff9999;"></i> | `#ff9999` | (255, 153, 153) |
73
+ | 11 | <i class="fa-solid fa-square fa-2xl" style="color: #ff6666;"></i> | `#ff6666` | (255, 102, 102) |
74
+ | 12 | <i class="fa-solid fa-square fa-2xl" style="color: #ff3333;"></i> | `#ff3333` | (255, 51, 51) |
75
+ | 13 | <i class="fa-solid fa-square fa-2xl" style="color: #99ff99;"></i> | `#99ff99` | (153, 255, 153) |
76
+ | 14 | <i class="fa-solid fa-square fa-2xl" style="color: #66ff66;"></i> | `#66ff66` | (102, 255, 102) |
77
+ | 15 | <i class="fa-solid fa-square fa-2xl" style="color: #33ff33;"></i> | `#33ff33` | (51, 255, 51) |
78
+ | 16 | <i class="fa-solid fa-square fa-2xl" style="color: #00ff00;"></i> | `#00ff00` | (0, 255, 0) |
79
+ | 17 | <i class="fa-solid fa-square fa-2xl" style="color: #0000ff;"></i> | `#0000ff` | (0, 0, 255) |
80
+ | 18 | <i class="fa-solid fa-square fa-2xl" style="color: #ff0000;"></i> | `#ff0000` | (255, 0, 0) |
81
+ | 19 | <i class="fa-solid fa-square fa-2xl" style="color: #ffffff;"></i> | `#ffffff` | (255, 255, 255) |
82
+
83
+ !!! note "Ultralytics Brand Colors"
84
+
85
+ For Ultralytics brand colors see [https://www.ultralytics.com/brand](https://www.ultralytics.com/brand). Please use the official Ultralytics colors for all marketing materials.
32
86
  """
33
87
 
34
88
  def __init__(self):
@@ -1,14 +1,14 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics
3
- Version: 8.2.93
4
- Summary: Ultralytics YOLOv8 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
3
+ Version: 8.2.94
4
+ Summary: Ultralytics YOLO for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author: Glenn Jocher, Ayush Chaurasia, Jing Qiu
6
6
  Maintainer: Glenn Jocher, Ayush Chaurasia, Jing Qiu
7
7
  License: AGPL-3.0
8
8
  Project-URL: Bug Reports, https://github.com/ultralytics/ultralytics/issues
9
9
  Project-URL: Funding, https://ultralytics.com
10
10
  Project-URL: Source, https://github.com/ultralytics/ultralytics/
11
- Keywords: machine-learning,deep-learning,computer-vision,ML,DL,AI,YOLO,YOLOv3,YOLOv5,YOLOv8,HUB,Ultralytics
11
+ Keywords: machine-learning,deep-learning,computer-vision,ML,DL,AI,YOLO,YOLOv3,YOLOv5,YOLOv8,YOLOv9,YOLOv10,HUB,Ultralytics
12
12
  Classifier: Development Status :: 4 - Beta
13
13
  Classifier: Intended Audience :: Developers
14
14
  Classifier: Intended Audience :: Education
@@ -132,7 +132,7 @@ To request an Enterprise License please complete the form at [Ultralytics Licens
132
132
 
133
133
  ## <div align="center">Documentation</div>
134
134
 
135
- See below for a quickstart installation and usage example, and see the [YOLOv8 Docs](https://docs.ultralytics.com) for full documentation on training, validation, prediction and deployment.
135
+ See below for a quickstart installation and usage example, and see the [YOLOv8 Docs](https://docs.ultralytics.com/) for full documentation on training, validation, prediction and deployment.
136
136
 
137
137
  <details open>
138
138
  <summary>Install</summary>
@@ -145,7 +145,7 @@ Pip install the ultralytics package including all [requirements](https://github.
145
145
  pip install ultralytics
146
146
  ```
147
147
 
148
- For alternative installation methods including [Conda](https://anaconda.org/conda-forge/ultralytics), [Docker](https://hub.docker.com/r/ultralytics/ultralytics), and Git, please refer to the [Quickstart Guide](https://docs.ultralytics.com/quickstart).
148
+ For alternative installation methods including [Conda](https://anaconda.org/conda-forge/ultralytics), [Docker](https://hub.docker.com/r/ultralytics/ultralytics), and Git, please refer to the [Quickstart Guide](https://docs.ultralytics.com/quickstart/).
149
149
 
150
150
  [![Conda Version](https://img.shields.io/conda/vn/conda-forge/ultralytics?logo=condaforge)](https://anaconda.org/conda-forge/ultralytics) [![Docker Image Version](https://img.shields.io/docker/v/ultralytics/ultralytics?sort=semver&logo=docker)](https://hub.docker.com/r/ultralytics/ultralytics)
151
151
 
@@ -162,7 +162,7 @@ YOLOv8 may be used directly in the Command Line Interface (CLI) with a `yolo` co
162
162
  yolo predict model=yolov8n.pt source='https://ultralytics.com/images/bus.jpg'
163
163
  ```
164
164
 
165
- `yolo` can be used for a variety of tasks and modes and accepts additional arguments, i.e. `imgsz=640`. See the YOLOv8 [CLI Docs](https://docs.ultralytics.com/usage/cli) for examples.
165
+ `yolo` can be used for a variety of tasks and modes and accepts additional arguments, i.e. `imgsz=640`. See the YOLOv8 [CLI Docs](https://docs.ultralytics.com/usage/cli/) for examples.
166
166
 
167
167
  ### Python
168
168
 
@@ -182,7 +182,7 @@ results = model("https://ultralytics.com/images/bus.jpg") # predict on an image
182
182
  path = model.export(format="onnx") # export the model to ONNX format
183
183
  ```
184
184
 
185
- See YOLOv8 [Python Docs](https://docs.ultralytics.com/usage/python) for more examples.
185
+ See YOLOv8 [Python Docs](https://docs.ultralytics.com/usage/python/) for more examples.
186
186
 
187
187
  </details>
188
188
 
@@ -201,7 +201,7 @@ Ultralytics provides interactive notebooks for YOLOv8, covering training, valida
201
201
 
202
202
  ## <div align="center">Models</div>
203
203
 
204
- YOLOv8 [Detect](https://docs.ultralytics.com/tasks/detect), [Segment](https://docs.ultralytics.com/tasks/segment) and [Pose](https://docs.ultralytics.com/tasks/pose) models pretrained on the [COCO](https://docs.ultralytics.com/datasets/detect/coco) dataset are available here, as well as YOLOv8 [Classify](https://docs.ultralytics.com/tasks/classify) models pretrained on the [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet) dataset. [Track](https://docs.ultralytics.com/modes/track) mode is available for all Detect, Segment and Pose models.
204
+ YOLOv8 [Detect](https://docs.ultralytics.com/tasks/detect/), [Segment](https://docs.ultralytics.com/tasks/segment/) and [Pose](https://docs.ultralytics.com/tasks/pose/) models pretrained on the [COCO](https://docs.ultralytics.com/datasets/detect/coco/) dataset are available here, as well as YOLOv8 [Classify](https://docs.ultralytics.com/tasks/classify/) models pretrained on the [ImageNet](https://docs.ultralytics.com/datasets/classify/imagenet/) dataset. [Track](https://docs.ultralytics.com/modes/track/) mode is available for all Detect, Segment and Pose models.
205
205
 
206
206
  <img width="1024" src="https://raw.githubusercontent.com/ultralytics/assets/main/im/banner-tasks.png" alt="Ultralytics YOLO supported tasks">
207
207
 
@@ -312,7 +312,7 @@ See [Classification Docs](https://docs.ultralytics.com/tasks/classify/) for usag
312
312
 
313
313
  ## <div align="center">Integrations</div>
314
314
 
315
- Our key integrations with leading AI platforms extend the functionality of Ultralytics' offerings, enhancing tasks like dataset labeling, training, visualization, and model management. Discover how Ultralytics, in collaboration with [Roboflow](https://roboflow.com/?ref=ultralytics), ClearML, [Comet](https://bit.ly/yolov8-readme-comet), Neural Magic and [OpenVINO](https://docs.ultralytics.com/integrations/openvino), can optimize your AI workflow.
315
+ Our key integrations with leading AI platforms extend the functionality of Ultralytics' offerings, enhancing tasks like dataset labeling, training, visualization, and model management. Discover how Ultralytics, in collaboration with [Roboflow](https://roboflow.com/?ref=ultralytics), ClearML, [Comet](https://bit.ly/yolov8-readme-comet), Neural Magic and [OpenVINO](https://docs.ultralytics.com/integrations/openvino/), can optimize your AI workflow.
316
316
 
317
317
  <br>
318
318
  <a href="https://ultralytics.com/hub" target="_blank">
@@ -347,7 +347,7 @@ Experience seamless AI with [Ultralytics HUB](https://www.ultralytics.com/hub)
347
347
 
348
348
  ## <div align="center">Contribute</div>
349
349
 
350
- We love your input! YOLOv5 and YOLOv8 would not be possible without help from our community. Please see our [Contributing Guide](https://docs.ultralytics.com/help/contributing) to get started, and fill out our [Survey](https://www.ultralytics.com/survey?utm_source=github&utm_medium=social&utm_campaign=Survey) to send us feedback on your experience. Thank you 🙏 to all our contributors!
350
+ We love your input! YOLOv5 and YOLOv8 would not be possible without help from our community. Please see our [Contributing Guide](https://docs.ultralytics.com/help/contributing/) to get started, and fill out our [Survey](https://www.ultralytics.com/survey?utm_source=github&utm_medium=social&utm_campaign=Survey) to send us feedback on your experience. Thank you 🙏 to all our contributors!
351
351
 
352
352
  <!-- SVG image from https://opencollective.com/ultralytics/contributors.svg?width=990 -->
353
353
 
@@ -1,6 +1,6 @@
1
1
  tests/__init__.py,sha256=9evx3lOdKZeY1iWXvH-FkMkgf8jLucWICoabzeD6aYg,626
2
2
  tests/conftest.py,sha256=3ZtD4VlMKK5jVJwIPCrNAcG63vywJzdLq7U2AfYR2VI,2919
3
- tests/test_cli.py,sha256=as6cuNdDF2s_h3DxVXKmy45V3DXWB6y40xect93TKHc,4810
3
+ tests/test_cli.py,sha256=E4lMt49TGo12Lb5CgQfpk1bwyFUZuFxF0V9j_ykV7xM,4821
4
4
  tests/test_cuda.py,sha256=uD-ddNEcBMFQmQ9iE4fIGh0EIcGwEoDEUNVCEHicaWE,5133
5
5
  tests/test_engine.py,sha256=xW-UT9_9xZp-7-hSnbJgMw_ezTk6NqTOIiA59XZDmxA,4934
6
6
  tests/test_explorer.py,sha256=IMFvZ9uMoEXVC5FwdaVh0821wBgs7muVF6aw1F-auAI,2572
@@ -8,7 +8,7 @@ tests/test_exports.py,sha256=Uezf3OatpPHlo5qoPw-2kqkZxuMCF9L4XF2riD4vmII,8225
8
8
  tests/test_integrations.py,sha256=xglcfMPjfVh346PV8WTpk6tBxraCXEFJEQyyJMr5tyU,6064
9
9
  tests/test_python.py,sha256=08fg47DuJflumuUBto480-9VCqtEGAhQjNnQdcHs9_c,22242
10
10
  tests/test_solutions.py,sha256=p_2edhl96Ty3jwzSf02Q2m2mTu9skc0Z-eMcUuuXfLg,3300
11
- ultralytics/__init__.py,sha256=Q72ivw5La5ht_Ky1pTUaxg2X6CaMBSSncTAiqLD-wPM,695
11
+ ultralytics/__init__.py,sha256=O6sPQ_8fCEfwvfDZLx6eIevrmM_KAbE4h0FlV6oM2lg,695
12
12
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
13
13
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
14
14
  ultralytics/cfg/__init__.py,sha256=pkB7wk0pHOA3xzKzMbS-hA0iJoPOWVNnwZJh0LuWh-w,33089
@@ -102,7 +102,7 @@ ultralytics/engine/exporter.py,sha256=MtBFbJp3ifhn9sQXuQb7vxxOmtS_SOw7lnQhrq4H42
102
102
  ultralytics/engine/model.py,sha256=AB9tu7kJW-QiTAp0F_J8KQJ4FijsHXcYBTaVHb7aMrg,52281
103
103
  ultralytics/engine/predictor.py,sha256=MgMWHUJdRcVCaVmOyvdy2Gjk_EyRHv-ar0SSGxQe8F4,17471
104
104
  ultralytics/engine/results.py,sha256=x5Ptr5uGjEz63_N1DnfDg2ktNhLqT93oPyIPruuWp6w,70986
105
- ultralytics/engine/trainer.py,sha256=7QPWrOwfw2hUNzNKtvTnAM2ui8vdIEDbMn9JTLkmQ9o,36307
105
+ ultralytics/engine/trainer.py,sha256=GtuAXRElX7CJ-XAQy_XqmvzzsD25SrRlViRGADKec2c,36681
106
106
  ultralytics/engine/tuner.py,sha256=gPqDTHH7vRB2O3YyH26m1BjVKbXxuA2XAlPRzTKFZsc,11838
107
107
  ultralytics/engine/validator.py,sha256=yaUMb5efBvgFg8M24IFlmv3J-acbbSgtqLCk-mM07Wo,14623
108
108
  ultralytics/hub/__init__.py,sha256=AM_twjV9ouUmyxh3opoPgTqDpMOd8xIOHsAKdWS2L18,5663
@@ -129,7 +129,7 @@ ultralytics/models/sam/__init__.py,sha256=o4_D6y8YJlOXIK7Lwo9RHnIJJ9xoFNi4zK99QS
129
129
  ultralytics/models/sam/amg.py,sha256=GrmO_8YfIDt_QkPEMF_WFjPZkhwhf7iwx7ig8JgOUnE,8709
130
130
  ultralytics/models/sam/build.py,sha256=zNQbrgSHUgz1gyXQwLKGTpa6CSEjeaevcP3w1Z1l3mo,12233
131
131
  ultralytics/models/sam/model.py,sha256=2KFUp8SHiqOgwUjkdqdau0oduJwKQxm4N9GHWjdhUFo,7382
132
- ultralytics/models/sam/predict.py,sha256=4HOvBp27MvO8ef3gD64wVooNT1P5eMy3Bk8W7ysU57o,38352
132
+ ultralytics/models/sam/predict.py,sha256=unsoNrEx6pexKD28-HTpALa02PtNtE4e2ERdzs9qbYw,38556
133
133
  ultralytics/models/sam/modules/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
134
134
  ultralytics/models/sam/modules/blocks.py,sha256=Q-KwhFbdyZhl1tjG_kP2LcQkZbzoNt618i-NRrKNx2Y,45919
135
135
  ultralytics/models/sam/modules/decoders.py,sha256=mODsqnTN_CjE3H0Sh9cd8PfTnHANPjGB1bjqHxfezSg,25830
@@ -209,7 +209,7 @@ ultralytics/utils/loss.py,sha256=mDHGmF-gjggAUVhI1dkCm7TtfZHCwz25XKm4M2xJKLs,339
209
209
  ultralytics/utils/metrics.py,sha256=UgLGudWp57uXDMlMUJy4gsz6cfVjcq7tYmHeto3TqvM,53927
210
210
  ultralytics/utils/ops.py,sha256=dsXNdyrYx_p6io6zezig9p84dxS7U-10vceHNVu2IL0,32888
211
211
  ultralytics/utils/patches.py,sha256=Oo3DkP7MbXnNGvPfoFSocAkVvaPh9kwMT_9RQUfjVhI,3594
212
- ultralytics/utils/plotting.py,sha256=Tp1vjSrzbtQc1ILlT1Frw9YzvGtOHlf8bdLAvZg7TBU,56181
212
+ ultralytics/utils/plotting.py,sha256=bud5mAvFxQ2JD29dReaO4c7Z00k6jIaPJJCznIoyy2w,61543
213
213
  ultralytics/utils/tal.py,sha256=ECsu95xEqOItmxMDN4YTD3FsUiIsQNWy0pZC3TfvFfk,16877
214
214
  ultralytics/utils/torch_utils.py,sha256=NgZtDgjQkAVCAqCdFrFMSU9Fl_x3pYqaYa1mhAvOb_8,29312
215
215
  ultralytics/utils/triton.py,sha256=gg1finxno_tY2Ge9PMhmu7PI9wvoFZoiicdT4Bhqv3w,3936
@@ -225,9 +225,9 @@ ultralytics/utils/callbacks/neptune.py,sha256=5Z3ua5YBTUS56FH8VQKQG1aaIo9fH8GEyz
225
225
  ultralytics/utils/callbacks/raytune.py,sha256=ODVYzy-CoM4Uge0zjkh3Hnh9nF2M0vhDrSenXnvcizw,705
226
226
  ultralytics/utils/callbacks/tensorboard.py,sha256=0kn4IR10no99UCIheojWRujgybmUHSx5fPI6Vsq6l_g,4135
227
227
  ultralytics/utils/callbacks/wb.py,sha256=9-fjQIdLjr3b73DTE3rHO171KvbH1VweJ-bmbv-rqTw,6747
228
- ultralytics-8.2.93.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
229
- ultralytics-8.2.93.dist-info/METADATA,sha256=dZ7rJ_R_Hwgk3PxcoHgfxNG5ACT4HBbUPp5yCGNvl2I,41871
230
- ultralytics-8.2.93.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91
231
- ultralytics-8.2.93.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
232
- ultralytics-8.2.93.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
233
- ultralytics-8.2.93.dist-info/RECORD,,
228
+ ultralytics-8.2.94.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
229
+ ultralytics-8.2.94.dist-info/METADATA,sha256=tqncGrC7zri313tqgv5vjcUWYKeUYHhlWpPCXMtxIO0,41897
230
+ ultralytics-8.2.94.dist-info/WHEEL,sha256=5Mi1sN9lKoFv_gxcPtisEVrJZihrm_beibeg5R6xb4I,91
231
+ ultralytics-8.2.94.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
232
+ ultralytics-8.2.94.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
233
+ ultralytics-8.2.94.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (74.1.2)
2
+ Generator: setuptools (75.0.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5