ultralytics 8.3.131__py3-none-any.whl → 8.3.133__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. tests/test_cuda.py +16 -10
  2. ultralytics/__init__.py +1 -1
  3. ultralytics/cfg/datasets/HomeObjects-3K.yaml +33 -0
  4. ultralytics/data/dataset.py +5 -3
  5. ultralytics/engine/exporter.py +10 -2
  6. ultralytics/engine/model.py +1 -1
  7. ultralytics/engine/trainer.py +13 -10
  8. ultralytics/models/yolo/detect/train.py +1 -1
  9. ultralytics/models/yolo/pose/train.py +15 -0
  10. ultralytics/models/yolo/world/train_world.py +6 -5
  11. ultralytics/models/yolo/yoloe/predict.py +0 -1
  12. ultralytics/models/yolo/yoloe/train_seg.py +0 -1
  13. ultralytics/nn/modules/block.py +2 -2
  14. ultralytics/nn/tasks.py +15 -6
  15. ultralytics/solutions/heatmap.py +1 -2
  16. ultralytics/solutions/instance_segmentation.py +1 -0
  17. ultralytics/solutions/object_blurrer.py +0 -1
  18. ultralytics/solutions/object_counter.py +9 -4
  19. ultralytics/solutions/solutions.py +3 -6
  20. ultralytics/solutions/vision_eye.py +0 -1
  21. ultralytics/utils/__init__.py +2 -1
  22. ultralytics/utils/autodevice.py +0 -1
  23. ultralytics/utils/callbacks/neptune.py +0 -1
  24. ultralytics/utils/checks.py +17 -12
  25. ultralytics/utils/loss.py +1 -1
  26. ultralytics/utils/metrics.py +2 -2
  27. {ultralytics-8.3.131.dist-info → ultralytics-8.3.133.dist-info}/METADATA +1 -1
  28. {ultralytics-8.3.131.dist-info → ultralytics-8.3.133.dist-info}/RECORD +32 -31
  29. {ultralytics-8.3.131.dist-info → ultralytics-8.3.133.dist-info}/WHEEL +0 -0
  30. {ultralytics-8.3.131.dist-info → ultralytics-8.3.133.dist-info}/entry_points.txt +0 -0
  31. {ultralytics-8.3.131.dist-info → ultralytics-8.3.133.dist-info}/licenses/LICENSE +0 -0
  32. {ultralytics-8.3.131.dist-info → ultralytics-8.3.133.dist-info}/top_level.txt +0 -0
tests/test_cuda.py CHANGED
@@ -9,7 +9,7 @@ import torch
9
9
  from tests import CUDA_DEVICE_COUNT, CUDA_IS_AVAILABLE, MODEL, SOURCE
10
10
  from ultralytics import YOLO
11
11
  from ultralytics.cfg import TASK2DATA, TASK2MODEL, TASKS
12
- from ultralytics.utils import ASSETS, WEIGHTS_DIR
12
+ from ultralytics.utils import ASSETS, IS_JETSON, WEIGHTS_DIR
13
13
  from ultralytics.utils.autodevice import GPUInfo
14
14
  from ultralytics.utils.checks import check_amp
15
15
  from ultralytics.utils.torch_utils import TORCH_1_13
@@ -17,11 +17,14 @@ from ultralytics.utils.torch_utils import TORCH_1_13
17
17
  # Try to find idle devices if CUDA is available
18
18
  DEVICES = []
19
19
  if CUDA_IS_AVAILABLE:
20
- gpu_info = GPUInfo()
21
- gpu_info.print_status()
22
- idle_gpus = gpu_info.select_idle_gpu(count=2, min_memory_mb=2048)
23
- if idle_gpus:
24
- DEVICES = idle_gpus
20
+ if IS_JETSON:
21
+ DEVICES = [0] # NVIDIA Jetson only has one GPU and does not fully support pynvml library
22
+ else:
23
+ gpu_info = GPUInfo()
24
+ gpu_info.print_status()
25
+ idle_gpus = gpu_info.select_idle_gpu(count=2, min_memory_mb=2048)
26
+ if idle_gpus:
27
+ DEVICES = idle_gpus
25
28
 
26
29
 
27
30
  def test_checks():
@@ -38,6 +41,7 @@ def test_amp():
38
41
 
39
42
 
40
43
  @pytest.mark.slow
44
+ # @pytest.mark.skipif(IS_JETSON, reason="Temporary disable ONNX for Jetson")
41
45
  @pytest.mark.skipif(not DEVICES, reason="No CUDA devices available")
42
46
  @pytest.mark.parametrize(
43
47
  "task, dynamic, int8, half, batch, simplify, nms",
@@ -49,7 +53,7 @@ def test_amp():
49
53
  if not (
50
54
  (int8 and half)
51
55
  or (task == "classify" and nms)
52
- or (task == "obb" and nms and not TORCH_1_13)
56
+ or (task == "obb" and nms and (not TORCH_1_13 or IS_JETSON)) # obb nms fails on NVIDIA Jetson
53
57
  or (simplify and dynamic) # onnxslim is slow when dynamic=True
54
58
  )
55
59
  ],
@@ -110,9 +114,11 @@ def test_train():
110
114
 
111
115
  device = tuple(DEVICES) if len(DEVICES) > 1 else DEVICES[0]
112
116
  results = YOLO(MODEL).train(data="coco8.yaml", imgsz=64, epochs=1, device=device) # requires imgsz>=64
113
- visible = eval(os.environ["CUDA_VISIBLE_DEVICES"])
114
- assert visible == device, f"Passed GPUs '{device}', but used GPUs '{visible}'"
115
- assert results is (None if len(DEVICES) > 1 else not None) # DDP returns None, single-GPU returns metrics
117
+ # NVIDIA Jetson only has one GPU and therefore skipping checks
118
+ if not IS_JETSON:
119
+ visible = eval(os.environ["CUDA_VISIBLE_DEVICES"])
120
+ assert visible == device, f"Passed GPUs '{device}', but used GPUs '{visible}'"
121
+ assert results is (None if len(DEVICES) > 1 else not None) # DDP returns None, single-GPU returns metrics
116
122
 
117
123
 
118
124
  @pytest.mark.slow
ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- __version__ = "8.3.131"
3
+ __version__ = "8.3.133"
4
4
 
5
5
  import os
6
6
 
@@ -0,0 +1,33 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ # HomeObjects-3K dataset by Ultralytics
4
+ # Documentation: https://docs.ultralytics.com/datasets/detect/homeobjects-3k/
5
+ # Example usage: yolo train data=HomeObjects-3K.yaml
6
+ # parent
7
+ # ├── ultralytics
8
+ # └── datasets
9
+ # └── homeobjects-3K ← downloads here (390 MB)
10
+
11
+ # Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..]
12
+ path: ../datasets/homeobjects-3K # dataset root dir
13
+ train: train/images # train images (relative to 'path') 2285 images
14
+ val: valid/images # val images (relative to 'path') 404 images
15
+ test: # test images (relative to 'path')
16
+
17
+ # Classes
18
+ names:
19
+ 0: bed
20
+ 1: sofa
21
+ 2: chair
22
+ 3: table
23
+ 4: lamp
24
+ 5: tv
25
+ 6: laptop
26
+ 7: wardrobe
27
+ 8: window
28
+ 9: door
29
+ 10: potted plant
30
+ 11: photo frame
31
+
32
+ # Download script/URL (optional)
33
+ download: https://github.com/ultralytics/assets/releases/download/v0.0.0/homeobjects-3K.zip
@@ -39,7 +39,7 @@ from .utils import (
39
39
  verify_image_label,
40
40
  )
41
41
 
42
- # Ultralytics dataset *.cache version, >= 1.0.0 for YOLOv8
42
+ # Ultralytics dataset *.cache version, >= 1.0.0 for Ultralytics YOLO models
43
43
  DATASET_CACHE_VERSION = "1.0.3"
44
44
 
45
45
 
@@ -184,7 +184,9 @@ class YOLODataset(BaseDataset):
184
184
  [cache.pop(k) for k in ("hash", "version", "msgs")] # remove items
185
185
  labels = cache["labels"]
186
186
  if not labels:
187
- LOGGER.warning(f"No images found in {cache_path}, training may not work correctly. {HELP_URL}")
187
+ raise RuntimeError(
188
+ f"No valid images found in {cache_path}. Images with incorrectly formatted labels are ignored. {HELP_URL}"
189
+ )
188
190
  self.im_files = [lb["im_file"] for lb in labels] # update im_files
189
191
 
190
192
  # Check if the dataset is all boxes or all segments
@@ -199,7 +201,7 @@ class YOLODataset(BaseDataset):
199
201
  for lb in labels:
200
202
  lb["segments"] = []
201
203
  if len_cls == 0:
202
- LOGGER.warning(f"No labels found in {cache_path}, training may not work correctly. {HELP_URL}")
204
+ LOGGER.warning(f"Labels are missing or empty in {cache_path}, training may not work correctly. {HELP_URL}")
203
205
  return labels
204
206
 
205
207
  def build_transforms(self, hyp=None):
@@ -89,6 +89,7 @@ from ultralytics.utils import (
89
89
  MACOS_VERSION,
90
90
  RKNN_CHIPS,
91
91
  ROOT,
92
+ SETTINGS,
92
93
  WINDOWS,
93
94
  YAML,
94
95
  callbacks,
@@ -106,7 +107,7 @@ from ultralytics.utils.downloads import attempt_download_asset, get_github_asset
106
107
  from ultralytics.utils.export import export_engine, export_onnx
107
108
  from ultralytics.utils.files import file_size, spaces_in_path
108
109
  from ultralytics.utils.ops import Profile, nms_rotated
109
- from ultralytics.utils.torch_utils import TORCH_1_13, get_latest_opset, select_device
110
+ from ultralytics.utils.torch_utils import TORCH_1_13, get_cpu_info, get_latest_opset, select_device
110
111
 
111
112
 
112
113
  def export_formats():
@@ -344,7 +345,6 @@ class Exporter:
344
345
  "See https://docs.ultralytics.com/models/yolo-world for details."
345
346
  )
346
347
  model.clip_model = None # openvino int8 export error: https://github.com/ultralytics/ultralytics/pull/18445
347
-
348
348
  if self.args.int8 and not self.args.data:
349
349
  self.args.data = DEFAULT_CFG.data or TASK2DATA[getattr(model, "task", "detect")] # assign default data
350
350
  LOGGER.warning(
@@ -352,6 +352,14 @@ class Exporter:
352
352
  )
353
353
  if tfjs and (ARM64 and LINUX):
354
354
  raise SystemError("TF.js exports are not currently supported on ARM64 Linux")
355
+ # Recommend OpenVINO if export and Intel CPU
356
+ if SETTINGS.get("openvino_msg"):
357
+ if "intel" in get_cpu_info().lower():
358
+ LOGGER.info(
359
+ "💡 ProTip: Export to OpenVINO format for best performance on Intel CPUs."
360
+ " Learn more at https://docs.ultralytics.com/integrations/openvino/"
361
+ )
362
+ SETTINGS["openvino_msg"] = False
355
363
 
356
364
  # Input
357
365
  im = torch.zeros(self.args.batch, model.yaml.get("channels", 3), *self.imgsz).to(self.device)
@@ -529,7 +529,7 @@ class Model(torch.nn.Module):
529
529
  - For SAM-type models, 'prompts' can be passed as a keyword argument.
530
530
  """
531
531
  if source is None:
532
- source = ASSETS
532
+ source = "https://ultralytics.com/images/boats.jpg" if self.task == "obb" else ASSETS
533
533
  LOGGER.warning(f"'source' is missing. Using 'source={source}'.")
534
534
 
535
535
  is_cli = (ARGV[0].endswith("yolo") or ARGV[0].endswith("ultralytics")) and any(
@@ -77,8 +77,6 @@ class BaseTrainer:
77
77
  amp (bool): Flag to enable AMP (Automatic Mixed Precision).
78
78
  scaler (amp.GradScaler): Gradient scaler for AMP.
79
79
  data (str): Path to data.
80
- trainset (torch.utils.data.Dataset): Training dataset.
81
- testset (torch.utils.data.Dataset): Testing dataset.
82
80
  ema (nn.Module): EMA (Exponential Moving Average) of the model.
83
81
  resume (bool): Resume training from a checkpoint.
84
82
  lf (nn.Module): Loss function.
@@ -136,7 +134,8 @@ class BaseTrainer:
136
134
  # Model and Dataset
137
135
  self.model = check_model_file_from_stem(self.args.model) # add suffix, i.e. yolo11n -> yolo11n.pt
138
136
  with torch_distributed_zero_first(LOCAL_RANK): # avoid auto-downloading dataset multiple times
139
- self.trainset, self.testset = self.get_dataset()
137
+ self.data = self.get_dataset()
138
+
140
139
  self.ema = None
141
140
 
142
141
  # Optimization utils init
@@ -289,11 +288,16 @@ class BaseTrainer:
289
288
 
290
289
  # Dataloaders
291
290
  batch_size = self.batch_size // max(world_size, 1)
292
- self.train_loader = self.get_dataloader(self.trainset, batch_size=batch_size, rank=LOCAL_RANK, mode="train")
291
+ self.train_loader = self.get_dataloader(
292
+ self.data["train"], batch_size=batch_size, rank=LOCAL_RANK, mode="train"
293
+ )
293
294
  if RANK in {-1, 0}:
294
295
  # Note: When training DOTA dataset, double batch size could get OOM on images with >2000 objects.
295
296
  self.test_loader = self.get_dataloader(
296
- self.testset, batch_size=batch_size if self.args.task == "obb" else batch_size * 2, rank=-1, mode="val"
297
+ self.data.get("val") or self.data.get("test"),
298
+ batch_size=batch_size if self.args.task == "obb" else batch_size * 2,
299
+ rank=-1,
300
+ mode="val",
297
301
  )
298
302
  self.validator = self.get_validator()
299
303
  metric_keys = self.validator.metrics.keys + self.label_loss_items(prefix="val")
@@ -569,7 +573,7 @@ class BaseTrainer:
569
573
  Get train and validation datasets from data dictionary.
570
574
 
571
575
  Returns:
572
- (tuple): A tuple containing the training and validation/test datasets.
576
+ (dict): A dictionary containing the training/validation/test dataset and category names.
573
577
  """
574
578
  try:
575
579
  if self.args.task == "classify":
@@ -585,12 +589,11 @@ class BaseTrainer:
585
589
  self.args.data = data["yaml_file"] # for validating 'yolo train data=url.zip' usage
586
590
  except Exception as e:
587
591
  raise RuntimeError(emojis(f"Dataset '{clean_url(self.args.data)}' error ❌ {e}")) from e
588
- self.data = data
589
592
  if self.args.single_cls:
590
593
  LOGGER.info("Overriding class names with single class.")
591
- self.data["names"] = {0: "item"}
592
- self.data["nc"] = 1
593
- return data["train"], data.get("val") or data.get("test")
594
+ data["names"] = {0: "item"}
595
+ data["nc"] = 1
596
+ return data
594
597
 
595
598
  def setup_model(self):
596
599
  """
@@ -212,6 +212,6 @@ class DetectionTrainer(BaseTrainer):
212
212
  Returns:
213
213
  (int): Optimal batch size.
214
214
  """
215
- train_dataset = self.build_dataset(self.trainset, mode="train", batch=16)
215
+ train_dataset = self.build_dataset(self.data["train"], mode="train", batch=16)
216
216
  max_num_obj = max(len(label["cls"]) for label in train_dataset.labels) * 4 # 4 for mosaic augmentation
217
217
  return super().auto_batch(max_num_obj)
@@ -137,3 +137,18 @@ class PoseTrainer(yolo.detect.DetectionTrainer):
137
137
  def plot_metrics(self):
138
138
  """Plots training/val metrics."""
139
139
  plot_results(file=self.csv, pose=True, on_plot=self.on_plot) # save results.png
140
+
141
+ def get_dataset(self):
142
+ """
143
+ Retrieves the dataset and ensures it contains the required `kpt_shape` key.
144
+
145
+ Returns:
146
+ (dict): A dictionary containing the training/validation/test dataset and category names.
147
+
148
+ Raises:
149
+ KeyError: If the `kpt_shape` key is not present in the dataset.
150
+ """
151
+ data = super().get_dataset()
152
+ if "kpt_shape" not in data:
153
+ raise KeyError(f"No `kpt_shape` in the {self.args.data}. See https://docs.ultralytics.com/datasets/pose/")
154
+ return data
@@ -137,12 +137,13 @@ class WorldTrainerFromScratch(WorldTrainer):
137
137
  for g in grounding_data:
138
138
  assert isinstance(g, dict), f"Grounding data should be provided in dict format, but got {type(g)}"
139
139
  final_data[s] += grounding_data
140
+ data["val"] = data["val"][0] # assign the first val dataset as currently only one validation set is supported
140
141
  # NOTE: to make training work properly, set `nc` and `names`
141
- final_data["nc"] = data["val"][0]["nc"]
142
- final_data["names"] = data["val"][0]["names"]
142
+ final_data["nc"] = data["val"]["nc"]
143
+ final_data["names"] = data["val"]["names"]
143
144
  # NOTE: add path with lvis path
144
- final_data["path"] = data["val"][0]["path"]
145
- final_data["channels"] = data["val"][0]["channels"]
145
+ final_data["path"] = data["val"]["path"]
146
+ final_data["channels"] = data["val"]["channels"]
146
147
  self.data = final_data
147
148
  if self.args.single_cls: # consistent with base trainer
148
149
  LOGGER.info("Overriding class names with single class.")
@@ -154,7 +155,7 @@ class WorldTrainerFromScratch(WorldTrainer):
154
155
  d["names"] = {0: "object"}
155
156
  d["nc"] = 1
156
157
  self.training_data[d["train"]] = d
157
- return final_data["train"], final_data["val"][0]
158
+ return final_data
158
159
 
159
160
  def plot_training_labels(self):
160
161
  """Do not plot labels for YOLO-World training."""
@@ -1,6 +1,5 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
-
4
3
  import numpy as np
5
4
  import torch
6
5
 
@@ -1,6 +1,5 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
-
4
3
  from copy import copy, deepcopy
5
4
 
6
5
  from ultralytics.models.yolo.segment import SegmentationTrainer
@@ -76,11 +76,11 @@ class DFL(nn.Module):
76
76
 
77
77
 
78
78
  class Proto(nn.Module):
79
- """YOLOv8 mask Proto module for segmentation models."""
79
+ """Ultralytics YOLO models mask Proto module for segmentation models."""
80
80
 
81
81
  def __init__(self, c1, c_=256, c2=32):
82
82
  """
83
- Initialize the YOLOv8 mask Proto module with specified number of protos and masks.
83
+ Initialize the Ultralytics YOLO models mask Proto module with specified number of protos and masks.
84
84
 
85
85
  Args:
86
86
  c1 (int): Input channels.
ultralytics/nn/tasks.py CHANGED
@@ -281,10 +281,19 @@ class BaseModel(torch.nn.Module):
281
281
  """
282
282
  model = weights["model"] if isinstance(weights, dict) else weights # torchvision models are not dicts
283
283
  csd = model.float().state_dict() # checkpoint state_dict as FP32
284
- csd = intersect_dicts(csd, self.state_dict()) # intersect
285
- self.load_state_dict(csd, strict=False) # load
284
+ updated_csd = intersect_dicts(csd, self.state_dict()) # intersect
285
+ self.load_state_dict(updated_csd, strict=False) # load
286
+ len_updated_csd = len(updated_csd)
287
+ first_conv = "model.0.conv.weight"
288
+ if first_conv not in updated_csd: # mostly used to boost multi-channel training
289
+ c1, c2, h, w = self.state_dict()[first_conv].shape
290
+ cc1, cc2, ch, cw = csd[first_conv].shape
291
+ if ch == h and cw == w:
292
+ c1, c2 = min(c1, cc1), min(c2, cc2)
293
+ self.state_dict()[first_conv][:c1, :c2] = csd[first_conv][:c1, :c2]
294
+ len_updated_csd += 1
286
295
  if verbose:
287
- LOGGER.info(f"Transferred {len(csd)}/{len(self.model.state_dict())} items from pretrained weights")
296
+ LOGGER.info(f"Transferred {len_updated_csd}/{len(self.model.state_dict())} items from pretrained weights")
288
297
 
289
298
  def loss(self, batch, preds=None):
290
299
  """
@@ -458,7 +467,7 @@ class SegmentationModel(DetectionModel):
458
467
 
459
468
  def __init__(self, cfg="yolo11n-seg.yaml", ch=3, nc=None, verbose=True):
460
469
  """
461
- Initialize YOLOv8 segmentation model with given config and parameters.
470
+ Initialize Ultralytics YOLO segmentation model with given config and parameters.
462
471
 
463
472
  Args:
464
473
  cfg (str | dict): Model configuration file path or dictionary.
@@ -478,7 +487,7 @@ class PoseModel(DetectionModel):
478
487
 
479
488
  def __init__(self, cfg="yolo11n-pose.yaml", ch=3, nc=None, data_kpt_shape=(None, None), verbose=True):
480
489
  """
481
- Initialize YOLOv8 Pose model.
490
+ Initialize Ultralytics YOLO Pose model.
482
491
 
483
492
  Args:
484
493
  cfg (str | dict): Model configuration file path or dictionary.
@@ -517,7 +526,7 @@ class ClassificationModel(BaseModel):
517
526
 
518
527
  def _from_yaml(self, cfg, ch, nc, verbose):
519
528
  """
520
- Set YOLOv8 model configurations and define the model architecture.
529
+ Set Ultralytics YOLO model configurations and define the model architecture.
521
530
 
522
531
  Args:
523
532
  cfg (str | dict): Model configuration file path or dictionary.
@@ -100,12 +100,11 @@ class Heatmap(ObjectCounter):
100
100
  self.annotator.draw_region(reg_pts=self.region, color=(104, 0, 123), thickness=self.line_width * 2)
101
101
  self.store_tracking_history(track_id, box) # Store track history
102
102
  self.store_classwise_counts(cls) # Store classwise counts in dict
103
- current_centroid = ((box[0] + box[2]) / 2, (box[1] + box[3]) / 2)
104
103
  # Get previous position if available
105
104
  prev_position = None
106
105
  if len(self.track_history[track_id]) > 1:
107
106
  prev_position = self.track_history[track_id][-2]
108
- self.count_objects(current_centroid, track_id, prev_position, cls) # Perform object counting
107
+ self.count_objects(self.track_history[track_id][-1], track_id, prev_position, cls) # object counting
109
108
 
110
109
  plot_im = self.annotator.result()
111
110
  if self.region is not None:
@@ -62,6 +62,7 @@ class InstanceSegmentation(BaseSolution):
62
62
  >>> print(summary)
63
63
  """
64
64
  self.extract_tracks(im0) # Extract tracks (bounding boxes, classes, and masks)
65
+ self.masks = getattr(self.tracks[0], "masks", None)
65
66
 
66
67
  # Iterate over detected classes, track IDs, and segmentation masks
67
68
  if self.masks is None:
@@ -1,6 +1,5 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
-
4
3
  import cv2
5
4
 
6
5
  from ultralytics.solutions.solutions import BaseSolution, SolutionAnnotator, SolutionResults
@@ -174,6 +174,10 @@ class ObjectCounter(BaseSolution):
174
174
  self.extract_tracks(im0) # Extract tracks
175
175
  self.annotator = SolutionAnnotator(im0, line_width=self.line_width) # Initialize annotator
176
176
 
177
+ is_obb = getattr(self.tracks[0], "obb", None) is not None # True if OBB results exist
178
+ if is_obb:
179
+ self.boxes = self.track_data.xyxyxyxy.reshape(-1, 4, 2).cpu()
180
+
177
181
  self.annotator.draw_region(
178
182
  reg_pts=self.region, color=(104, 0, 123), thickness=self.line_width * 2
179
183
  ) # Draw region
@@ -181,16 +185,17 @@ class ObjectCounter(BaseSolution):
181
185
  # Iterate over bounding boxes, track ids and classes index
182
186
  for box, track_id, cls, conf in zip(self.boxes, self.track_ids, self.clss, self.confs):
183
187
  # Draw bounding box and counting region
184
- self.annotator.box_label(box, label=self.adjust_box_label(cls, conf, track_id), color=colors(cls, True))
185
- self.store_tracking_history(track_id, box) # Store track history
188
+ self.annotator.box_label(
189
+ box, label=self.adjust_box_label(cls, conf, track_id), color=colors(cls, True), rotated=is_obb
190
+ )
191
+ self.store_tracking_history(track_id, box, is_obb=is_obb) # Store track history
186
192
  self.store_classwise_counts(cls) # Store classwise counts in dict
187
193
 
188
- current_centroid = ((box[0] + box[2]) / 2, (box[1] + box[3]) / 2)
189
194
  # Store previous position of track for object counting
190
195
  prev_position = None
191
196
  if len(self.track_history[track_id]) > 1:
192
197
  prev_position = self.track_history[track_id][-2]
193
- self.count_objects(current_centroid, track_id, prev_position, cls) # Perform object counting
198
+ self.count_objects(self.track_history[track_id][-1], track_id, prev_position, cls) # object counting
194
199
 
195
200
  plot_im = self.annotator.result()
196
201
  self.display_counts(plot_im) # Display the counts on the frame
@@ -139,10 +139,6 @@ class BaseSolution:
139
139
  self.tracks = self.model.track(source=im0, persist=True, classes=self.classes, **self.track_add_args)
140
140
  self.track_data = self.tracks[0].obb or self.tracks[0].boxes # Extract tracks for OBB or object detection
141
141
 
142
- self.masks = (
143
- self.tracks[0].masks if hasattr(self.tracks[0], "masks") and self.tracks[0].masks is not None else None
144
- )
145
-
146
142
  if self.track_data and self.track_data.id is not None:
147
143
  self.boxes = self.track_data.xyxy.cpu()
148
144
  self.clss = self.track_data.cls.cpu().tolist()
@@ -152,7 +148,7 @@ class BaseSolution:
152
148
  self.LOGGER.warning("no tracks found!")
153
149
  self.boxes, self.clss, self.track_ids, self.confs = [], [], [], []
154
150
 
155
- def store_tracking_history(self, track_id, box):
151
+ def store_tracking_history(self, track_id, box, is_obb=False):
156
152
  """
157
153
  Stores the tracking history of an object.
158
154
 
@@ -162,6 +158,7 @@ class BaseSolution:
162
158
  Args:
163
159
  track_id (int): The unique identifier for the tracked object.
164
160
  box (List[float]): The bounding box coordinates of the object in the format [x1, y1, x2, y2].
161
+ is_obb (bool): True if OBB model is used (applies to object counting only).
165
162
 
166
163
  Examples:
167
164
  >>> solution = BaseSolution()
@@ -169,7 +166,7 @@ class BaseSolution:
169
166
  """
170
167
  # Store tracking history
171
168
  self.track_line = self.track_history[track_id]
172
- self.track_line.append(((box[0] + box[2]) / 2, (box[1] + box[3]) / 2))
169
+ self.track_line.append(tuple(box.mean(dim=0)) if is_obb else (box[:4:2].mean(), box[1:4:2].mean()))
173
170
  if len(self.track_line) > 30:
174
171
  self.track_line.pop(0)
175
172
 
@@ -1,6 +1,5 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
-
4
3
  from ultralytics.solutions.solutions import BaseSolution, SolutionAnnotator, SolutionResults
5
4
  from ultralytics.utils.plotting import colors
6
5
 
@@ -1312,7 +1312,8 @@ class SettingsManager(JSONDict):
1312
1312
  "raytune": True, # Ray Tune integration
1313
1313
  "tensorboard": False, # TensorBoard logging
1314
1314
  "wandb": False, # Weights & Biases logging
1315
- "vscode_msg": True, # VSCode messaging
1315
+ "vscode_msg": True, # VSCode message
1316
+ "openvino_msg": True, # OpenVINO export on Intel CPU message
1316
1317
  }
1317
1318
 
1318
1319
  self.help_msg = (
@@ -1,6 +1,5 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
-
4
3
  from ultralytics.utils import LOGGER
5
4
  from ultralytics.utils.checks import check_requirements
6
5
 
@@ -1,6 +1,5 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
-
4
3
  from ultralytics.utils import LOGGER, SETTINGS, TESTS_RUNNING
5
4
 
6
5
  try:
@@ -24,6 +24,7 @@ from ultralytics.utils import (
24
24
  AUTOINSTALL,
25
25
  IS_COLAB,
26
26
  IS_GIT_DIR,
27
+ IS_JETSON,
27
28
  IS_KAGGLE,
28
29
  IS_PIP_PACKAGE,
29
30
  LINUX,
@@ -343,7 +344,7 @@ def check_python(minimum: str = "3.8.0", hard: bool = True, verbose: bool = Fals
343
344
  @TryExcept()
344
345
  def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=(), install=True, cmds=""):
345
346
  """
346
- Check if installed dependencies meet YOLOv8 requirements and attempt to auto-update if needed.
347
+ Check if installed dependencies meet Ultralytics YOLO models requirements and attempt to auto-update if needed.
347
348
 
348
349
  Args:
349
350
  requirements (Union[Path, str, List[str]]): Path to a requirements.txt file, a single package requirement as a
@@ -820,19 +821,23 @@ def cuda_device_count() -> int:
820
821
  Returns:
821
822
  (int): The number of NVIDIA GPUs available.
822
823
  """
823
- try:
824
- # Run the nvidia-smi command and capture its output
825
- output = subprocess.check_output(
826
- ["nvidia-smi", "--query-gpu=count", "--format=csv,noheader,nounits"], encoding="utf-8"
827
- )
824
+ if IS_JETSON:
825
+ # NVIDIA Jetson does not fully support nvidia-smi and therefore use PyTorch instead
826
+ return torch.cuda.device_count()
827
+ else:
828
+ try:
829
+ # Run the nvidia-smi command and capture its output
830
+ output = subprocess.check_output(
831
+ ["nvidia-smi", "--query-gpu=count", "--format=csv,noheader,nounits"], encoding="utf-8"
832
+ )
828
833
 
829
- # Take the first line and strip any leading/trailing white space
830
- first_line = output.strip().split("\n")[0]
834
+ # Take the first line and strip any leading/trailing white space
835
+ first_line = output.strip().split("\n")[0]
831
836
 
832
- return int(first_line)
833
- except (subprocess.CalledProcessError, FileNotFoundError, ValueError):
834
- # If the command fails, nvidia-smi is not found, or output is not an integer, assume no GPUs are available
835
- return 0
837
+ return int(first_line)
838
+ except (subprocess.CalledProcessError, FileNotFoundError, ValueError):
839
+ # If the command fails, nvidia-smi is not found, or output is not an integer, assume no GPUs are available
840
+ return 0
836
841
 
837
842
 
838
843
  def cuda_is_available() -> bool:
ultralytics/utils/loss.py CHANGED
@@ -674,7 +674,7 @@ class v8OBBLoss(v8DetectionLoss):
674
674
  raise TypeError(
675
675
  "ERROR ❌ OBB dataset incorrectly formatted or not a OBB dataset.\n"
676
676
  "This error can occur when incorrectly training a 'OBB' model on a 'detect' dataset, "
677
- "i.e. 'yolo train model=yolo11n-obb.pt data=dota8.yaml'.\nVerify your dataset is a "
677
+ "i.e. 'yolo train model=yolo11n-obb.pt data=coco8.yaml'.\nVerify your dataset is a "
678
678
  "correctly formatted 'OBB' dataset using 'data=dota8.yaml' "
679
679
  "as an example.\nSee https://docs.ultralytics.com/datasets/obb/ for help."
680
680
  ) from e
@@ -665,7 +665,7 @@ def ap_per_class(
665
665
 
666
666
  class Metric(SimpleClass):
667
667
  """
668
- Class for computing evaluation metrics for YOLOv8 model.
668
+ Class for computing evaluation metrics for Ultralytics YOLO models.
669
669
 
670
670
  Attributes:
671
671
  p (list): Precision for each class. Shape: (nc,).
@@ -788,7 +788,7 @@ class Metric(SimpleClass):
788
788
  def fitness(self):
789
789
  """Return model fitness as a weighted combination of metrics."""
790
790
  w = [0.0, 0.0, 0.1, 0.9] # weights for [P, R, mAP@0.5, mAP@0.5:0.95]
791
- return (np.array(self.mean_results()) * w).sum()
791
+ return (np.nan_to_num(np.array(self.mean_results())) * w).sum()
792
792
 
793
793
  def update(self, results):
794
794
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ultralytics
3
- Version: 8.3.131
3
+ Version: 8.3.133
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -1,13 +1,13 @@
1
1
  tests/__init__.py,sha256=xnMhv3O_DF1YrW4zk__ZywQzAaoTDjPKPoiI1Ktss1w,670
2
2
  tests/conftest.py,sha256=rsIAipRKfrVNoTaJ1LdpYue8AbcJ_fr3d3WIlM_6uXY,2982
3
3
  tests/test_cli.py,sha256=PtMFl5Lp_6ygBbYDJ1ndofz2k7ZYupMPEAiZw6aZVm8,5450
4
- tests/test_cuda.py,sha256=j07QZ92aeBhpw4s7zyCO18MOXrfEamsee20IWAa31JI,7739
4
+ tests/test_cuda.py,sha256=eKwaqLxWTRRYNROnkH24Ch-HmxTRKQLSIxbMYFYq_p0,8123
5
5
  tests/test_engine.py,sha256=aGqZ8P7QO5C_nOa1b4FOyk92Ysdk5WiP-ST310Vyxys,4962
6
6
  tests/test_exports.py,sha256=UeeBloqYYGZNh520R3CR80XBxA9XFrNmbK9An6V6C4w,9838
7
7
  tests/test_integrations.py,sha256=dQteeRsRVuT_p5-T88-7jqT65Zm9iAXkyKg-KQ1_TQ8,6341
8
8
  tests/test_python.py,sha256=m3tV3atrc3DvXZ5S-_C1ief_pDo4KlLgudjc7rq26l0,25492
9
9
  tests/test_solutions.py,sha256=IFlqyOUCvGbLe_YZqWmNCe_afg4as0p-SfAv3j7VURI,6205
10
- ultralytics/__init__.py,sha256=cm7DlJknvdL-XQcwHBpWBxn2ZQaCZcJZ5NnDyL0Ezjs,730
10
+ ultralytics/__init__.py,sha256=5KJcFLzyXLEENlwDYrbaJSUI5eiIL_K54mrNQvfpFhE,730
11
11
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
12
12
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
13
13
  ultralytics/cfg/__init__.py,sha256=We3ti0mvUQrGRmUPcufDGboW0YAO3nSRYuoWxGagk3M,39462
@@ -16,6 +16,7 @@ ultralytics/cfg/datasets/Argoverse.yaml,sha256=_xlEDIJ9XkUo0v_iNL7FW079BoSeZtKSu
16
16
  ultralytics/cfg/datasets/DOTAv1.5.yaml,sha256=SHND_CFkojxw5iQD5Mcgju2kCZIl0gW2ajuzv1cqoL0,1224
17
17
  ultralytics/cfg/datasets/DOTAv1.yaml,sha256=j_DvXVQzZ4dQmf8I7oPX4v9xO3WZXztxV4Xo9VhUTsM,1194
18
18
  ultralytics/cfg/datasets/GlobalWheat2020.yaml,sha256=TgPAhAnQAwviZcWRkuVTEww3u9VJ86rBlJvjj58ENu4,2157
19
+ ultralytics/cfg/datasets/HomeObjects-3K.yaml,sha256=-7HrCmBkKVzfp5c7LCHg-nBZYMZ4j58QVHXz_4V6daQ,990
19
20
  ultralytics/cfg/datasets/ImageNet.yaml,sha256=6F1GXJg80iS8PJTcbAVbZX7Eb25NdJAAZ4UIS8mmrhk,42543
20
21
  ultralytics/cfg/datasets/Objects365.yaml,sha256=E0WmOVH22cKpgyWSiuLxmAMd35x2O--kS8VLW-ONoqU,9370
21
22
  ultralytics/cfg/datasets/SKU-110K.yaml,sha256=EmYFUdlxmF4SnijaifO3dHaP_uf95Vgz4FdckHeEVEM,2558
@@ -107,7 +108,7 @@ ultralytics/data/augment.py,sha256=7Md80H36S0X5RiSqCcwynSgGcRwMqnI4YbSw-rkYnlk,1
107
108
  ultralytics/data/base.py,sha256=bsASjxdkvojkFjas-JfFNSpBjo0GRAbYKDh64Y2hCH4,19015
108
109
  ultralytics/data/build.py,sha256=0nW3fjx-DceRIKJX786zP3cMAekUXHkuTGr5eVr9rSU,9769
109
110
  ultralytics/data/converter.py,sha256=znXH2XTdo0Q4NDHMny1ydVBvrxKn2kbbwI-X5bn1MlQ,26890
110
- ultralytics/data/dataset.py,sha256=hbsjhmZBO-T1_gkUAm128kKowdwsLNwnK2lhnzmxJB8,34826
111
+ ultralytics/data/dataset.py,sha256=uc5OMkaQtWQHBd_KST_WXO6FEoeF4xUhKDDJBKkQ354,34916
111
112
  ultralytics/data/loaders.py,sha256=q1dlJ9hyLnf-gorutgFZLndP8ZNJDCmCcZzJZRDDLDw,28868
112
113
  ultralytics/data/split.py,sha256=6UFXcbVrzYVAPmFbl4FeZFJOkdbN3jQFepJxi_pD-I0,4748
113
114
  ultralytics/data/split_dota.py,sha256=ihG56YfNFZJDq1r7Zcgk8fKzde3gn21W0f67ub6nT68,11879
@@ -117,11 +118,11 @@ ultralytics/data/scripts/get_coco.sh,sha256=UuJpJeo3qQpTHVINeOpmP0NYmg8PhEFE3A8J
117
118
  ultralytics/data/scripts/get_coco128.sh,sha256=qmRQl_hOKrsdHrTrnyQuFIH01oDz3lfaz138OgGfLt8,650
118
119
  ultralytics/data/scripts/get_imagenet.sh,sha256=hr42H16bM47iT27rgS7MpEo-GeOZAYUQXgr0B2cwn48,1705
119
120
  ultralytics/engine/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
120
- ultralytics/engine/exporter.py,sha256=XDJboUBDGDrFsppwTVujoGilf5vTkO14KYMhMu5YZQ0,70333
121
- ultralytics/engine/model.py,sha256=37qGh6aqqPTUyMfpsvBQMaZ1Av7eJDe6mfRl9GvlfKg,52860
121
+ ultralytics/engine/exporter.py,sha256=QI84hCFHFAbBX2evpPBxtcCLUjJEyEv40ASjqq64du4,70782
122
+ ultralytics/engine/model.py,sha256=fWhPNWUQzjjWfTEXzTaqSSearV4THRkEa_fl4dDvzWw,52930
122
123
  ultralytics/engine/predictor.py,sha256=AwKpOGY2G-thNNiRw4Kf_MBLamq5tbRhXLNSMRArqFo,21803
123
124
  ultralytics/engine/results.py,sha256=-JPBn_YMyZv6HhdlyhjRIZCcMf41LTyWID7JrEP64rc,79632
124
- ultralytics/engine/trainer.py,sha256=aj41kXVeNfJOlMhSNrW_XwElQ5D0jtuX6ezJC2w8xa8,39046
125
+ ultralytics/engine/trainer.py,sha256=c_iGyt6bwIf4aRUeVcVEuOKG9ZpixJsZUbI2eMqQXto,38951
125
126
  ultralytics/engine/tuner.py,sha256=zEW1UpLlZ6N4xbvS7MxICkshRlaFgLNfuADA0VfRpao,12629
126
127
  ultralytics/engine/validator.py,sha256=jfV81wuFDgrVVXEcPzgOpxAPrAZn-1LgpKwu9l_1-ts,17050
127
128
  ultralytics/hub/__init__.py,sha256=wDtAUKdfqob95tfFHgDJFXcsNSDSdoIQkJTm-CfIUTI,6616
@@ -169,7 +170,7 @@ ultralytics/models/yolo/classify/train.py,sha256=rv2CJv9fzvtHf2q4l5g0RsjplWKeLpz
169
170
  ultralytics/models/yolo/classify/val.py,sha256=xk-YwSQdl_oqyCBV0OOAOcXFL6CchebFOc36AkRSyjE,9992
170
171
  ultralytics/models/yolo/detect/__init__.py,sha256=GIRsLYR-kT4JJx7lh4ZZAFGBZj0aebokuU0A7JbjDVA,257
171
172
  ultralytics/models/yolo/detect/predict.py,sha256=DOjhYCHPFPPAwZLWWmNt0d7lGka8GFeriM0OA9PTEGU,5310
172
- ultralytics/models/yolo/detect/train.py,sha256=YOEmUZkfJBq6hNbB_P10k-uy4_2fUgdPfVWzO4y8Egs,9538
173
+ ultralytics/models/yolo/detect/train.py,sha256=FHA2rQPbWFjceng4uVMU-k0kyOnvC5hbpv2VRnYuPSM,9543
173
174
  ultralytics/models/yolo/detect/val.py,sha256=7AB_wZi7aQ9_V1pZQSWk5qiJYS34fuO3P5aX7_3eeFE,18471
174
175
  ultralytics/models/yolo/obb/__init__.py,sha256=tQmpG8wVHsajWkZdmD6cjGohJ4ki64iSXQT8JY_dydo,221
175
176
  ultralytics/models/yolo/obb/predict.py,sha256=L40iamQgTY7VDn0WggG2jeJK8cVUo1qsNuFSbK67ry0,2974
@@ -177,7 +178,7 @@ ultralytics/models/yolo/obb/train.py,sha256=NBSpXCyIn2qxtaG7gvolUzXOB0mf3oEFIpQZ
177
178
  ultralytics/models/yolo/obb/val.py,sha256=dkXUh2JfffILVRkfXycQGImQQssUDgKMtfDRP7jUpV0,13981
178
179
  ultralytics/models/yolo/pose/__init__.py,sha256=63xmuHZLNzV8I76HhVXAq4f2W0KTk8Oi9eL-Y204LyQ,227
179
180
  ultralytics/models/yolo/pose/predict.py,sha256=sY-yMVl-hW8tGVSKt-5Pl1Bhdhj9exnmGIeb4n9wUDc,3836
180
- ultralytics/models/yolo/pose/train.py,sha256=QQo4Q5kpvPv7kfa4uWmg3mFFa__fvIQ0yklGpa6XL58,5942
181
+ ultralytics/models/yolo/pose/train.py,sha256=dKa1Vzt4GoZ9yqdK6olqLEg-qhYaPUh29Qg62bHAVi8,6502
181
182
  ultralytics/models/yolo/pose/val.py,sha256=FWDOPjf1Ajumh8DU5VRqUKYEDB8PeAzWtdZvhaIYTRc,18303
182
183
  ultralytics/models/yolo/segment/__init__.py,sha256=3IThhZ1wlkY9FvmWm9cE-5-ZyE6F1FgzAtQ6jOOFzzw,275
183
184
  ultralytics/models/yolo/segment/predict.py,sha256=mIC3aHI7Jg4dU1k2UZnjVj4unE-5TWi_rh7P0AEyJmA,5410
@@ -185,19 +186,19 @@ ultralytics/models/yolo/segment/train.py,sha256=EIyIAjYp127Mb-DomyjPORaONu57OY_g
185
186
  ultralytics/models/yolo/segment/val.py,sha256=cXJM1JNuzDraU0SJQRIdzNxabd0bfcxiRE8wozHZChY,18415
186
187
  ultralytics/models/yolo/world/__init__.py,sha256=nlh8I6t8hMGz_vZg8QSlsUW1R-2eKvn9CGUoPPQEGhA,131
187
188
  ultralytics/models/yolo/world/train.py,sha256=HUJ0XiJIGx_FA9kqNYnSFsaKWMiZUDxgkpfGoBH6UNc,4896
188
- ultralytics/models/yolo/world/train_world.py,sha256=-o_-85zoczEvXZHWHJaVhXQ_hAIGTFtPlgSWJmUf5AU,8287
189
+ ultralytics/models/yolo/world/train_world.py,sha256=DSa-t9jDbtwF43SJlvtESh1Ux7M77zo9f945eR2D-5w,8363
189
190
  ultralytics/models/yolo/yoloe/__init__.py,sha256=6SLytdJtwu37qewf7CobG7C7Wl1m-xtNdvCXEasfPDE,760
190
- ultralytics/models/yolo/yoloe/predict.py,sha256=pjvQ8TKlAe_KIFo70qiNdOrSTITU3pcJ4VE_k7uJjDk,6994
191
+ ultralytics/models/yolo/yoloe/predict.py,sha256=N0oYcr_mdw8wyUAWprAwJhrA0r23BaTeYXEjw2e8_mI,6993
191
192
  ultralytics/models/yolo/yoloe/train.py,sha256=St3zw_XWRol9pODWU4lvKlJnWYr1lmWQNuhLFwWMge4,12989
192
- ultralytics/models/yolo/yoloe/train_seg.py,sha256=l0SOMQQd0Y_EBBHhTNekgrQsftqhYyK4oWTdCg1dLrE,4633
193
+ ultralytics/models/yolo/yoloe/train_seg.py,sha256=BYFBd04k5WQaJPcFbCvVIbEf2IOQyW8_sGeoVT_74j0,4632
193
194
  ultralytics/models/yolo/yoloe/val.py,sha256=oA8cVT3pBXF6aPZy7ITq0mDcktRuIgks8tTtqMRISyY,8431
194
195
  ultralytics/nn/__init__.py,sha256=rjociYD9lo_K-d-1s6TbdWklPLjTcEHk7OIlRDJstIE,615
195
196
  ultralytics/nn/autobackend.py,sha256=X2cxCytBu9fmniy8uJ5aZb28IukQ-uxV1INXeS1lclA,39368
196
- ultralytics/nn/tasks.py,sha256=0rnM6Z01BUnRtUwCkTwVsPxZ_D3A5tNbBjd7aEoxxns,62943
197
+ ultralytics/nn/tasks.py,sha256=BvNqt1Igk-DulR6jH9vI3LsiPBcui41t-s4xmBlTg3Y,63496
197
198
  ultralytics/nn/text_model.py,sha256=8_7SRejKZA4Pi-ha0gjcWrQDDCDMBhtwlg8pPMWgjDE,13145
198
199
  ultralytics/nn/modules/__init__.py,sha256=dXLtIk9rt944WfsTdpgEdWOg3HQEHdwQztuZ6WNJygs,3144
199
200
  ultralytics/nn/modules/activation.py,sha256=PvXZkA9AzEntR575JkFORdmtcRwATyy0lje-uHA5_8w,2210
200
- ultralytics/nn/modules/block.py,sha256=jGPMLa-FWYall7FmWvSLIduc2qu-A-lOcBjCaHqe4nk,66667
201
+ ultralytics/nn/modules/block.py,sha256=yd6Ao9T2UJNAWc8oB1-CSxyF6-exqbFcN3hTWUZNU3M,66701
201
202
  ultralytics/nn/modules/conv.py,sha256=nxbfAxmvo6A9atuxY3LXTtzMXhihZapCSg1F5mI4sIA,21361
202
203
  ultralytics/nn/modules/head.py,sha256=FbFB-e44Zvxgzdfy0FqeGWUn0DDahmEZvD1W_N2olcM,38442
203
204
  ultralytics/nn/modules/transformer.py,sha256=tC80QKFaLtWZo0zVNTuORX4pOu6HVs2wS0vSM-3h5W4,28227
@@ -207,21 +208,21 @@ ultralytics/solutions/ai_gym.py,sha256=QRrZGMka83NY4B9gU3N2GxTaomo0WmTMNLxkNZTxo
207
208
  ultralytics/solutions/analytics.py,sha256=u-khRAViGupjq9mkuAFCl9G3yE8hXfXASfKZd_SQZ-8,12111
208
209
  ultralytics/solutions/config.py,sha256=TLxQuZjqW-vhbS2OFmTT188-31ukHg1XP7l-BeOmqbU,5427
209
210
  ultralytics/solutions/distance_calculation.py,sha256=E13siGlQTqaGCk0xULk5Q86PwxiBAL4XWp83kQPb0YE,5751
210
- ultralytics/solutions/heatmap.py,sha256=lXYptA_EbypipF7YJMjsxxBzLAgsroLcdqypvNAhduA,5569
211
- ultralytics/solutions/instance_segmentation.py,sha256=HxzFf752PwjAjZhrf8BzI-gEey_f9mjxTOqJsLHSIB8,3498
212
- ultralytics/solutions/object_blurrer.py,sha256=0oSDdziKBw4ZxEwD4nGNrOcNPFs3bAux39RIJ87vVUE,3947
213
- ultralytics/solutions/object_counter.py,sha256=7u8OkFye91R9tf1Ar19ttXhKcoB6ziyi0pZfbHaQJ5U,10044
211
+ ultralytics/solutions/heatmap.py,sha256=_QzsWTL6S32J3pt8N1gcl-2DZeypNpn_iuhQzkYKxEo,5495
212
+ ultralytics/solutions/instance_segmentation.py,sha256=IuAxxEkKrbTPHmD0jV3VEjNWpBc78o8exg00nE0ldeQ,3558
213
+ ultralytics/solutions/object_blurrer.py,sha256=-wXOdqqZisVhxLutZz7JvZmdgVGmsN7Ymary0JHc2qo,3946
214
+ ultralytics/solutions/object_counter.py,sha256=aYjNTeEr5TGAwoecICp14K1cSrob7O6iPEe72l4E6CM,10224
214
215
  ultralytics/solutions/object_cropper.py,sha256=L6QZC5as_cUT42TMzeyXmkHa7vBi2UpNFf_-Jc7C1G0,3316
215
216
  ultralytics/solutions/parking_management.py,sha256=BV-2lpSfgmK7fib3DnPSZ5rtLdy11c8pBQm-72iTetc,13289
216
217
  ultralytics/solutions/queue_management.py,sha256=p1-cuI_rs4ygtlBryXjE65NYG2bnZXhp3ylggFnWcRs,4344
217
218
  ultralytics/solutions/region_counter.py,sha256=Zn35YRXNzhBk27D9MLOHBYe2L1o6H2ey3mEwCXofB_E,5418
218
219
  ultralytics/solutions/security_alarm.py,sha256=cmUWvz7U9IAxlOr-QCIU_j95lc2c8eUx9wI04t1vDFU,6251
219
220
  ultralytics/solutions/similarity_search.py,sha256=WTYmHNHfFrRiJ6mrZhJvGPsjt3szQUiM6VRpw2eBRjA,7332
220
- ultralytics/solutions/solutions.py,sha256=aXU5p6zv8UPyaC8v51tsE9L_KzmnRCP4M9PP6pAYMXQ,32715
221
+ ultralytics/solutions/solutions.py,sha256=1iZIj3Z5bs14WbVT8MIDXABfW-pBmfvQNdBJ6l21uVY,32696
221
222
  ultralytics/solutions/speed_estimation.py,sha256=r7S5nGIx8PTV-zC4zCI36lQD2DVy5cen5cTXItfQIHo,5318
222
223
  ultralytics/solutions/streamlit_inference.py,sha256=M0ppTFInqSPrdytZBLH8x-XoA7zFc7PaRQ51wHG9ppU,9846
223
224
  ultralytics/solutions/trackzone.py,sha256=mfklnZcVRqI3bbhPiHF2iSoV6INcd10wwwGP4tlK7L0,3854
224
- ultralytics/solutions/vision_eye.py,sha256=7YrMqZkR28LLNHWxX3Ye78GvPdXXuouQAmgMdGwRLQ4,2953
225
+ ultralytics/solutions/vision_eye.py,sha256=LCb-2YPVvEks9e7xqZtNGftpAXNaZhEUb5yb3N0ni_U,2952
225
226
  ultralytics/solutions/templates/similarity-search.html,sha256=DPoAO-1H-KXNt_T8mGtSCsYUEi_5Nrx01p0cZfX-E8Q,3790
226
227
  ultralytics/trackers/__init__.py,sha256=Zlu_Ig5osn7hqch_g5Be_e4pwZUkeeTQiesJCi0pFGI,255
227
228
  ultralytics/trackers/basetrack.py,sha256=LYvWB5d7Woyrz_RlxaopjV07RQKH3sff_lZJfMcMxcA,4450
@@ -232,19 +233,19 @@ ultralytics/trackers/utils/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6D
232
233
  ultralytics/trackers/utils/gmc.py,sha256=dz3I5LbIv7h1__Xg7rGHecQFE32VFTe54tUnxb8F0Z8,14466
233
234
  ultralytics/trackers/utils/kalman_filter.py,sha256=A0CqOnnaKH6kr0XwuHzyHmIU6aJAjJYxF9jVlNBKZHo,21326
234
235
  ultralytics/trackers/utils/matching.py,sha256=7eIufSdeN7cXuFMjvcfvz0Ldq84m4YKZl5IGxBR8IIo,7169
235
- ultralytics/utils/__init__.py,sha256=YSBOQcgak2v6l03EHPjkpzH-ZtjVXrg2_4o0BF1cqDQ,52807
236
+ ultralytics/utils/__init__.py,sha256=vac0M-Hx55QXl6Vod3QPjnLBlt87Hwxu1784RXPmeQA,52879
236
237
  ultralytics/utils/autobatch.py,sha256=kg05q2qKg74y_Uq2vvr01i3KhLfpVR7sT0IXBt3_kyI,4921
237
- ultralytics/utils/autodevice.py,sha256=OrLSk34UpW0I5ndxnkQEIWBxL--CvAON_W9Qw51zOGA,7233
238
+ ultralytics/utils/autodevice.py,sha256=OKZfTbswg6SlsYGCGMqROkA-451CXGG47oeyC5Q1kFM,7232
238
239
  ultralytics/utils/benchmarks.py,sha256=lDNNnLeLUzmqKrqrqlCOiau-q7A-gcLooZP2dbxCu-U,30214
239
- ultralytics/utils/checks.py,sha256=QOVLJJ6FLYojefMEnWl8GD0u9-01idYF4NyXR3BX9sc,32854
240
+ ultralytics/utils/checks.py,sha256=1wUunWTC9574gi7WWbyDrr_rCrqFJYxTcOCPXQQBhW4,33091
240
241
  ultralytics/utils/dist.py,sha256=aytW0JEkcA5ZTZucV92ot7Bn-apiej8aLk3QNWicjAc,4103
241
242
  ultralytics/utils/downloads.py,sha256=Rn8xDwn2bzgBqiYz3Xn0rm3MWjk4T-QUd2Ajlu1EpQ4,22312
242
243
  ultralytics/utils/errors.py,sha256=vY9h2evFSrHnZdHJVVrmm8Zzw4qVDLyo9DeYW5g0dFk,1573
243
244
  ultralytics/utils/export.py,sha256=XInnl9AQeik7EuR1492nzDvgDqaV43FlnM5CLamrgd4,8814
244
245
  ultralytics/utils/files.py,sha256=0K4O1cgqRiXaDw7EQK13TqA5SME_RrvfDVQSPetNr5w,8042
245
246
  ultralytics/utils/instance.py,sha256=UOEsXR9V-bXNRk6BTonASBEgeMqvzzAk4S7VdXZJUAM,18090
246
- ultralytics/utils/loss.py,sha256=zIDWS_0AOH-yEYLcsfmFRUkApPIZhu2ENsB0UwJYIuw,37607
247
- ultralytics/utils/metrics.py,sha256=L0d1nOqxuc_TuveiIchGclkahsUkXOpbYpwjQ8ZVzyw,53937
247
+ ultralytics/utils/loss.py,sha256=Woc_rj7ptCyezHdylEygXMeSEgivYu_B9jJHD4UwxWE,37607
248
+ ultralytics/utils/metrics.py,sha256=pWNq-66VqkMjj05Gqkm8ddoElDK72q_U9cl8y-aEN6k,53963
248
249
  ultralytics/utils/ops.py,sha256=YFwPrKlPcgEmgAWqnJVR0Ccx5NQgp5e3P-YYHwVSP0k,34779
249
250
  ultralytics/utils/patches.py,sha256=_dhIU_eDklQE-aWIjpyjPHl_wOwZoGuIUQnXgdSwk_A,5020
250
251
  ultralytics/utils/plotting.py,sha256=m9Hsbt6U073jAiztX6clpd9KzznW62oHxCWlBcm0T-s,46920
@@ -259,13 +260,13 @@ ultralytics/utils/callbacks/comet.py,sha256=_j8tKKxGlxDcw_Rx4Ow2PjZ3UpBHm9gLJlYS
259
260
  ultralytics/utils/callbacks/dvc.py,sha256=NywyiMqJfnK_UfJ_f1IK31puyIXZy0iVJQ4bB9uyu08,7532
260
261
  ultralytics/utils/callbacks/hub.py,sha256=dPSeSStRE1x-WYyqrUghCp_VtBxNZ5-Bmb4wW2KYV2Y,4073
261
262
  ultralytics/utils/callbacks/mlflow.py,sha256=rcjjN_QVg6XoL4Kbw8YqC28RDCQMs0LxfsXRpAc8BgY,5430
262
- ultralytics/utils/callbacks/neptune.py,sha256=JaI95Cj2kIjUhlEEOiDN0-Drc-fDelLhNI2gf1jHuvk,4665
263
+ ultralytics/utils/callbacks/neptune.py,sha256=yYUgEgSv6L39sSev6vjwhAWU3DlPDsbSDVFoR24NYio,4664
263
264
  ultralytics/utils/callbacks/raytune.py,sha256=A8amUGpux7dYES-L1iSeMoMXBySGWCD1aUqT7vcG-pU,1284
264
265
  ultralytics/utils/callbacks/tensorboard.py,sha256=jgYnym3cUQFAgN1GzTyO7l3jINtfAh8zhrllDvnLuVQ,5339
265
266
  ultralytics/utils/callbacks/wb.py,sha256=iDRFXI4IIDm8R5OI89DMTmjs8aHLo1HRCLkOFKdaMG4,7507
266
- ultralytics-8.3.131.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
267
- ultralytics-8.3.131.dist-info/METADATA,sha256=TRTZLq12MKZGhBDGSW722ihsjK4oEg0e39ey9V0YuR8,37223
268
- ultralytics-8.3.131.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
269
- ultralytics-8.3.131.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
270
- ultralytics-8.3.131.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
271
- ultralytics-8.3.131.dist-info/RECORD,,
267
+ ultralytics-8.3.133.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
268
+ ultralytics-8.3.133.dist-info/METADATA,sha256=po51EqOXoP7a9l8ZhORSK5BE5RJ3iPeUvxPUqILhT5s,37223
269
+ ultralytics-8.3.133.dist-info/WHEEL,sha256=DnLRTWE75wApRYVsjgc6wsVswC54sMSJhAEd4xhDpBk,91
270
+ ultralytics-8.3.133.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
271
+ ultralytics-8.3.133.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
272
+ ultralytics-8.3.133.dist-info/RECORD,,