ultralytics 8.3.122__py3-none-any.whl → 8.3.124__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. tests/test_cuda.py +1 -1
  2. tests/test_python.py +3 -3
  3. ultralytics/__init__.py +1 -1
  4. ultralytics/cfg/__init__.py +3 -9
  5. ultralytics/data/base.py +1 -2
  6. ultralytics/data/loaders.py +3 -4
  7. ultralytics/data/utils.py +1 -2
  8. ultralytics/engine/exporter.py +6 -5
  9. ultralytics/engine/model.py +4 -1
  10. ultralytics/engine/results.py +4 -7
  11. ultralytics/models/sam/model.py +2 -1
  12. ultralytics/models/sam/modules/tiny_encoder.py +2 -3
  13. ultralytics/models/sam/predict.py +4 -1
  14. ultralytics/models/yolo/detect/predict.py +1 -1
  15. ultralytics/nn/autobackend.py +1 -1
  16. ultralytics/nn/tasks.py +5 -5
  17. ultralytics/solutions/analytics.py +2 -2
  18. ultralytics/solutions/config.py +104 -0
  19. ultralytics/solutions/heatmap.py +1 -1
  20. ultralytics/solutions/object_blurrer.py +1 -1
  21. ultralytics/solutions/object_cropper.py +2 -2
  22. ultralytics/solutions/parking_management.py +2 -2
  23. ultralytics/solutions/security_alarm.py +1 -1
  24. ultralytics/solutions/solutions.py +6 -9
  25. ultralytics/solutions/speed_estimation.py +4 -4
  26. ultralytics/solutions/trackzone.py +1 -1
  27. ultralytics/solutions/vision_eye.py +1 -1
  28. ultralytics/trackers/track.py +3 -3
  29. ultralytics/utils/__init__.py +1 -3
  30. ultralytics/utils/autobatch.py +2 -2
  31. ultralytics/utils/benchmarks.py +5 -5
  32. ultralytics/utils/checks.py +2 -1
  33. ultralytics/utils/dist.py +2 -1
  34. ultralytics/utils/downloads.py +6 -1
  35. ultralytics/utils/torch_utils.py +14 -9
  36. {ultralytics-8.3.122.dist-info → ultralytics-8.3.124.dist-info}/METADATA +1 -1
  37. {ultralytics-8.3.122.dist-info → ultralytics-8.3.124.dist-info}/RECORD +41 -41
  38. ultralytics/cfg/solutions/default.yaml +0 -24
  39. {ultralytics-8.3.122.dist-info → ultralytics-8.3.124.dist-info}/WHEEL +0 -0
  40. {ultralytics-8.3.122.dist-info → ultralytics-8.3.124.dist-info}/entry_points.txt +0 -0
  41. {ultralytics-8.3.122.dist-info → ultralytics-8.3.124.dist-info}/licenses/LICENSE +0 -0
  42. {ultralytics-8.3.122.dist-info → ultralytics-8.3.124.dist-info}/top_level.txt +0 -0
tests/test_cuda.py CHANGED
@@ -115,7 +115,7 @@ def test_utils_benchmarks():
115
115
 
116
116
  # Pre-export a dynamic engine model to use dynamic inference
117
117
  YOLO(MODEL).export(format="engine", imgsz=32, dynamic=True, batch=1)
118
- ProfileModels([MODEL], imgsz=32, half=False, min_time=1, num_timed_runs=3, num_warmup_runs=1).profile()
118
+ ProfileModels([MODEL], imgsz=32, half=False, min_time=1, num_timed_runs=3, num_warmup_runs=1).run()
119
119
 
120
120
 
121
121
  @pytest.mark.skipif(not CUDA_IS_AVAILABLE, reason="CUDA is not available")
tests/test_python.py CHANGED
@@ -399,18 +399,18 @@ def test_utils_benchmarks():
399
399
  """Benchmark model performance using 'ProfileModels' from 'ultralytics.utils.benchmarks'."""
400
400
  from ultralytics.utils.benchmarks import ProfileModels
401
401
 
402
- ProfileModels(["yolo11n.yaml"], imgsz=32, min_time=1, num_timed_runs=3, num_warmup_runs=1).profile()
402
+ ProfileModels(["yolo11n.yaml"], imgsz=32, min_time=1, num_timed_runs=3, num_warmup_runs=1).run()
403
403
 
404
404
 
405
405
  def test_utils_torchutils():
406
406
  """Test Torch utility functions including profiling and FLOP calculations."""
407
407
  from ultralytics.nn.modules.conv import Conv
408
- from ultralytics.utils.torch_utils import get_flops_with_torch_profiler, profile, time_sync
408
+ from ultralytics.utils.torch_utils import get_flops_with_torch_profiler, profile_ops, time_sync
409
409
 
410
410
  x = torch.randn(1, 64, 20, 20)
411
411
  m = Conv(64, 64, k=1, s=2)
412
412
 
413
- profile(x, [m], n=3)
413
+ profile_ops(x, [m], n=3)
414
414
  get_flops_with_torch_profiler(m)
415
415
  time_sync()
416
416
 
ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- __version__ = "8.3.122"
3
+ __version__ = "8.3.124"
4
4
 
5
5
  import os
6
6
 
@@ -15,7 +15,6 @@ from ultralytics.utils import (
15
15
  DEFAULT_CFG,
16
16
  DEFAULT_CFG_DICT,
17
17
  DEFAULT_CFG_PATH,
18
- DEFAULT_SOL_DICT,
19
18
  IS_VSCODE,
20
19
  LOGGER,
21
20
  RANK,
@@ -650,7 +649,6 @@ def handle_yolo_solutions(args: List[str]) -> None:
650
649
  >>> handle_yolo_solutions(["inference", "model=yolo11n.pt"])
651
650
 
652
651
  Notes:
653
- - Default configurations are merged from DEFAULT_SOL_DICT and DEFAULT_CFG_DICT
654
652
  - Arguments can be provided in the format 'key=value' or as boolean flags
655
653
  - Available solutions are defined in SOLUTION_MAP with their respective classes and methods
656
654
  - If an invalid solution is provided, defaults to 'count' solution
@@ -662,13 +660,9 @@ def handle_yolo_solutions(args: List[str]) -> None:
662
660
  - The inference solution will be launched using the 'streamlit run' command.
663
661
  - The Streamlit app file is located in the Ultralytics package directory.
664
662
  """
665
- full_args_dict = {
666
- **DEFAULT_SOL_DICT,
667
- **DEFAULT_CFG_DICT,
668
- "blur_ratio": 0.5,
669
- "vision_point": (20, 20),
670
- "crop_dir": "cropped-detections",
671
- } # arguments dictionary
663
+ from ultralytics.solutions.config import SolutionConfig
664
+
665
+ full_args_dict = vars(SolutionConfig()) # arguments dictionary
672
666
  overrides = {}
673
667
 
674
668
  # check dictionary alignment
ultralytics/data/base.py CHANGED
@@ -11,7 +11,6 @@ from typing import Optional
11
11
 
12
12
  import cv2
13
13
  import numpy as np
14
- import psutil
15
14
  from torch.utils.data import Dataset
16
15
 
17
16
  from ultralytics.data.utils import FORMATS_HELP_MSG, HELP_URL, IMG_FORMATS, check_file_speeds
@@ -336,7 +335,7 @@ class BaseDataset(Dataset):
336
335
  ratio = self.imgsz / max(im.shape[0], im.shape[1]) # max(h, w) # ratio
337
336
  b += im.nbytes * ratio**2
338
337
  mem_required = b * self.ni / n * (1 + safety_margin) # GB required to cache dataset into RAM
339
- mem = psutil.virtual_memory()
338
+ mem = __import__("psutil").virtual_memory()
340
339
  if mem_required > mem.available:
341
340
  self.cache = None
342
341
  LOGGER.warning(
@@ -4,14 +4,13 @@ import glob
4
4
  import math
5
5
  import os
6
6
  import time
7
+ import urllib
7
8
  from dataclasses import dataclass
8
9
  from pathlib import Path
9
10
  from threading import Thread
10
- from urllib.parse import urlparse
11
11
 
12
12
  import cv2
13
13
  import numpy as np
14
- import requests
15
14
  import torch
16
15
  from PIL import Image
17
16
 
@@ -111,7 +110,7 @@ class LoadStreams:
111
110
  for i, s in enumerate(sources): # index, source
112
111
  # Start thread to read frames from video stream
113
112
  st = f"{i + 1}/{n}: {s}... "
114
- if urlparse(s).hostname in {"www.youtube.com", "youtube.com", "youtu.be"}: # if source is YouTube video
113
+ if urllib.parse.urlparse(s).hostname in {"www.youtube.com", "youtube.com", "youtu.be"}: # YouTube video
115
114
  # YouTube format i.e. 'https://www.youtube.com/watch?v=Jsn8D3aC840' or 'https://youtu.be/Jsn8D3aC840'
116
115
  s = get_best_youtube_url(s)
117
116
  s = eval(s) if s.isnumeric() else s # i.e. s = '0' local webcam
@@ -589,7 +588,7 @@ def autocast_list(source):
589
588
  files = []
590
589
  for im in source:
591
590
  if isinstance(im, (str, Path)): # filename or uri
592
- files.append(Image.open(requests.get(im, stream=True).raw if str(im).startswith("http") else im))
591
+ files.append(Image.open(urllib.request.urlopen(im) if str(im).startswith("http") else im))
593
592
  elif isinstance(im, (Image.Image, np.ndarray)): # PIL or np Image
594
593
  files.append(im)
595
594
  else:
ultralytics/data/utils.py CHANGED
@@ -1,6 +1,5 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- import hashlib
4
3
  import json
5
4
  import os
6
5
  import random
@@ -131,7 +130,7 @@ def get_hash(paths):
131
130
  size += os.stat(p).st_size
132
131
  except OSError:
133
132
  continue
134
- h = hashlib.sha256(str(size).encode()) # hash sizes
133
+ h = __import__("hashlib").sha256(str(size).encode()) # hash sizes
135
134
  h.update("".join(paths).encode()) # hash paths
136
135
  return h.hexdigest() # return hash
137
136
 
@@ -140,7 +140,7 @@ def export_formats():
140
140
  ["MNN", "mnn", ".mnn", True, True, ["batch", "half", "int8"]],
141
141
  ["NCNN", "ncnn", "_ncnn_model", True, True, ["batch", "half"]],
142
142
  ["IMX", "imx", "_imx_model", True, True, ["int8", "fraction"]],
143
- ["RKNN", "rknn", "_rknn_model", False, False, ["batch", "name"]],
143
+ ["RKNN", "rknn", "_rknn_model", False, False, ["batch", "name", "int8"]],
144
144
  ]
145
145
  return dict(zip(["Format", "Argument", "Suffix", "CPU", "GPU", "Arguments"], zip(*x)))
146
146
 
@@ -970,8 +970,9 @@ class Exporter:
970
970
  output_integer_quantized_tflite=self.args.int8,
971
971
  quant_type="per-tensor", # "per-tensor" (faster) or "per-channel" (slower but more accurate)
972
972
  custom_input_op_name_np_data_path=np_data,
973
- disable_group_convolution=True, # for end-to-end model compatibility
974
- enable_batchmatmul_unfold=True, # for end-to-end model compatibility
973
+ enable_batchmatmul_unfold=True, # fix lower no. of detected objects on GPU delegate
974
+ output_signaturedefs=True, # fix error with Attention block group convolution
975
+ optimization_for_gpu_delegate=True,
975
976
  )
976
977
  yaml_save(f / "metadata.yaml", self.metadata) # add metadata.yaml
977
978
 
@@ -1110,8 +1111,8 @@ class Exporter:
1110
1111
  rknn = RKNN(verbose=False)
1111
1112
  rknn.config(mean_values=[[0, 0, 0]], std_values=[[255, 255, 255]], target_platform=self.args.name)
1112
1113
  rknn.load_onnx(model=f)
1113
- rknn.build(do_quantization=False) # TODO: Add quantization support
1114
- f = f.replace(".onnx", f"-{self.args.name}.rknn")
1114
+ rknn.build(do_quantization=self.args.int8)
1115
+ f = f.replace(".onnx", f"-{self.args.name}-int8.rknn" if self.args.int8 else f"-{self.args.name}-fp16.rknn")
1115
1116
  rknn.export_rknn(f"{export_path / f}")
1116
1117
  yaml_save(export_path / "metadata.yaml", self.metadata)
1117
1118
  return export_path, None
@@ -10,7 +10,6 @@ from PIL import Image
10
10
 
11
11
  from ultralytics.cfg import TASK2DATA, get_cfg, get_save_dir
12
12
  from ultralytics.engine.results import Results
13
- from ultralytics.hub import HUB_WEB_ROOT, HUBTrainingSession
14
13
  from ultralytics.nn.tasks import attempt_load_one_weight, guess_model_task, yaml_model_load
15
14
  from ultralytics.utils import (
16
15
  ARGV,
@@ -126,6 +125,8 @@ class Model(torch.nn.Module):
126
125
 
127
126
  # Check if Ultralytics HUB model from https://hub.ultralytics.com
128
127
  if self.is_hub_model(model):
128
+ from ultralytics.hub import HUBTrainingSession
129
+
129
130
  # Fetch model from HUB
130
131
  checks.check_requirements("hub-sdk>=0.0.12")
131
132
  session = HUBTrainingSession.create_session(model)
@@ -225,6 +226,8 @@ class Model(torch.nn.Module):
225
226
  >>> Model.is_hub_model("yolo11n.pt")
226
227
  False
227
228
  """
229
+ from ultralytics.hub import HUB_WEB_ROOT
230
+
228
231
  return model.startswith(f"{HUB_WEB_ROOT}/models/")
229
232
 
230
233
  def _new(self, cfg: str, task=None, model=None, verbose=False) -> None:
@@ -662,17 +662,14 @@ class Results(SimpleClass):
662
662
  - For classification tasks, it returns the top 5 class probabilities and their corresponding class names.
663
663
  - The returned string is comma-separated and ends with a comma and a space.
664
664
  """
665
- log_string = ""
666
665
  probs = self.probs
667
666
  if len(self) == 0:
668
- return log_string if probs is not None else f"{log_string}(no detections), "
667
+ return "" if probs is not None else "(no detections), "
669
668
  if probs is not None:
670
- log_string += f"{', '.join(f'{self.names[j]} {probs.data[j]:.2f}' for j in probs.top5)}, "
669
+ return f"{', '.join(f'{self.names[j]} {probs.data[j]:.2f}' for j in probs.top5)}, "
671
670
  if boxes := self.boxes:
672
- for c in boxes.cls.unique():
673
- n = (boxes.cls == c).sum() # detections per class
674
- log_string += f"{n} {self.names[int(c)]}{'s' * (n > 1)}, "
675
- return log_string
671
+ counts = boxes.cls.int().bincount()
672
+ return "".join(f"{n} {self.names[i]}{'s' * (n > 1)}, " for i, n in enumerate(counts) if n > 0)
676
673
 
677
674
  def save_txt(self, txt_file, save_conf=False):
678
675
  """
@@ -19,7 +19,6 @@ from pathlib import Path
19
19
  from ultralytics.engine.model import Model
20
20
  from ultralytics.utils.torch_utils import model_info
21
21
 
22
- from .build import build_sam
23
22
  from .predict import Predictor, SAM2Predictor
24
23
 
25
24
 
@@ -78,6 +77,8 @@ class SAM(Model):
78
77
  >>> sam = SAM("sam_b.pt")
79
78
  >>> sam._load("path/to/custom_weights.pt")
80
79
  """
80
+ from .build import build_sam # slow import
81
+
81
82
  self.model = build_sam(weights)
82
83
 
83
84
  def predict(self, source, stream=False, bboxes=None, points=None, labels=None, **kwargs):
@@ -15,7 +15,6 @@ from typing import Tuple
15
15
  import torch
16
16
  import torch.nn as nn
17
17
  import torch.nn.functional as F
18
- import torch.utils.checkpoint as checkpoint
19
18
 
20
19
  from ultralytics.nn.modules import LayerNorm2d
21
20
  from ultralytics.utils.instance import to_2tuple
@@ -308,7 +307,7 @@ class ConvLayer(nn.Module):
308
307
  def forward(self, x):
309
308
  """Processes input through convolutional layers, applying MBConv blocks and optional downsampling."""
310
309
  for blk in self.blocks:
311
- x = checkpoint.checkpoint(blk, x) if self.use_checkpoint else blk(x)
310
+ x = torch.utils.checkpoint(blk, x) if self.use_checkpoint else blk(x) # warn: checkpoint is slow import
312
311
  return x if self.downsample is None else self.downsample(x)
313
312
 
314
313
 
@@ -751,7 +750,7 @@ class BasicLayer(nn.Module):
751
750
  def forward(self, x):
752
751
  """Processes input through TinyViT blocks and optional downsampling."""
753
752
  for blk in self.blocks:
754
- x = checkpoint.checkpoint(blk, x) if self.use_checkpoint else blk(x)
753
+ x = torch.utils.checkpoint(blk, x) if self.use_checkpoint else blk(x) # warn: checkpoint is slow import
755
754
  return x if self.downsample is None else self.downsample(x)
756
755
 
757
756
  def extra_repr(self) -> str:
@@ -31,7 +31,6 @@ from .amg import (
31
31
  uncrop_boxes_xyxy,
32
32
  uncrop_masks,
33
33
  )
34
- from .build import build_sam
35
34
 
36
35
 
37
36
  class Predictor(BasePredictor):
@@ -439,6 +438,8 @@ class Predictor(BasePredictor):
439
438
 
440
439
  def get_model(self):
441
440
  """Retrieves or builds the Segment Anything Model (SAM) for image segmentation tasks."""
441
+ from .build import build_sam # slow import
442
+
442
443
  return build_sam(self.args.model)
443
444
 
444
445
  def postprocess(self, preds, img, orig_imgs):
@@ -658,6 +659,8 @@ class SAM2Predictor(Predictor):
658
659
 
659
660
  def get_model(self):
660
661
  """Retrieves and initializes the Segment Anything Model 2 (SAM2) for image segmentation tasks."""
662
+ from .build import build_sam # slow import
663
+
661
664
  return build_sam(self.args.model)
662
665
 
663
666
  def prompt_inference(
@@ -90,7 +90,7 @@ class DetectionPredictor(BasePredictor):
90
90
  obj_feats = torch.cat(
91
91
  [x.permute(0, 2, 3, 1).reshape(x.shape[0], -1, s, x.shape[1] // s).mean(dim=-1) for x in feat_maps], dim=1
92
92
  ) # mean reduce all vectors to same length
93
- return [feats[idx] for feats, idx in zip(obj_feats, idxs)] # for each image in batch, indexed separately
93
+ return [feats[idx] if len(idx) else [] for feats, idx in zip(obj_feats, idxs)] # for each img in batch
94
94
 
95
95
  def construct_results(self, preds, img, orig_imgs):
96
96
  """
@@ -725,7 +725,7 @@ class AutoBackend(nn.Module):
725
725
  else:
726
726
  im = im.cpu().numpy()
727
727
  if self.saved_model: # SavedModel
728
- y = self.model(im, training=False) if self.keras else self.model(im)
728
+ y = self.model(im, training=False) if self.keras else self.model.serving_default(im)
729
729
  if not isinstance(y, list):
730
730
  y = [y]
731
731
  elif self.pb: # GraphDef
ultralytics/nn/tasks.py CHANGED
@@ -92,11 +92,6 @@ from ultralytics.utils.torch_utils import (
92
92
  time_sync,
93
93
  )
94
94
 
95
- try:
96
- import thop
97
- except ImportError:
98
- thop = None # conda support without 'ultralytics-thop' installed
99
-
100
95
 
101
96
  class BaseModel(torch.nn.Module):
102
97
  """The BaseModel class serves as a base class for all the models in the Ultralytics YOLO family."""
@@ -183,6 +178,11 @@ class BaseModel(torch.nn.Module):
183
178
  x (torch.Tensor): The input data to the layer.
184
179
  dt (list): A list to store the computation time of the layer.
185
180
  """
181
+ try:
182
+ import thop
183
+ except ImportError:
184
+ thop = None # conda support without 'ultralytics-thop' installed
185
+
186
186
  c = m == self.model[-1] and isinstance(x, list) # is final layer list, copy input as inplace fix
187
187
  flops = thop.profile(m, inputs=[x.copy() if c else x], verbose=False)[0] / 1e9 * 2 if thop else 0 # GFLOPs
188
188
  t = time_sync()
@@ -51,7 +51,7 @@ class Analytics(BaseSolution):
51
51
  """Initialize Analytics class with various chart types for visual data representation."""
52
52
  super().__init__(**kwargs)
53
53
 
54
- self.type = self.CFG["analytics_type"] # extract type of analytics
54
+ self.type = self.CFG["analytics_type"] # type of analytics i.e "line", "pie", "bar" or "area" charts.
55
55
  self.x_label = "Classes" if self.type in {"bar", "pie"} else "Frame#"
56
56
  self.y_label = "Total Counts"
57
57
 
@@ -61,7 +61,7 @@ class Analytics(BaseSolution):
61
61
  self.title = "Ultralytics Solutions" # window name
62
62
  self.max_points = 45 # maximum points to be drawn on window
63
63
  self.fontsize = 25 # text font size for display
64
- figsize = (12.8, 7.2) # Set output image size 1280 * 720
64
+ figsize = self.CFG["figsize"] # set output image size i.e (12.8, 7.2) -> w = 1280, h = 720
65
65
  self.color_cycle = cycle(["#DD00BA", "#042AFF", "#FF4447", "#7D24FF", "#BD00FF"])
66
66
 
67
67
  self.total_counts = 0 # count variable for storing total counts i.e. for line
@@ -0,0 +1,104 @@
1
+ # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
+
3
+ from dataclasses import dataclass, field
4
+ from typing import List, Optional, Tuple
5
+
6
+ import cv2
7
+
8
+
9
+ @dataclass
10
+ class SolutionConfig:
11
+ """
12
+ Manages configuration parameters for Ultralytics Vision AI solutions.
13
+
14
+ The SolutionConfig class serves as a centralized configuration container for all the
15
+ Ultralytics solution modules: https://docs.ultralytics.com/solutions/#solutions.
16
+ It leverages Python `dataclass` for clear, type-safe, and maintainable parameter definitions.
17
+
18
+ Attributes:
19
+ source (Optional[str]): Path to the input source (video, RTSP, etc.). Only usable with Solutions CLI.
20
+ model (Optional[str]): Path to the Ultralytics YOLO model to be used for inference.
21
+ classes (Optional[List[int]]): List of class indices to filter detections.
22
+ show_conf (bool): Whether to show confidence scores on the visual output.
23
+ show_labels (bool): Whether to display class labels on visual output.
24
+ region (Optional[List[Tuple[int, int]]]): Polygonal region or line for object counting.
25
+ colormap (Optional[int]): OpenCV colormap constant for visual overlays (e.g., cv2.COLORMAP_JET).
26
+ show_in (bool): Whether to display count number for objects entering the region.
27
+ show_out (bool): Whether to display count number for objects leaving the region.
28
+ up_angle (float): Upper angle threshold used in pose-based workouts monitoring.
29
+ down_angle (int): Lower angle threshold used in pose-based workouts monitoring.
30
+ kpts (List[int]): Keypoint indices to monitor, e.g., for pose analytics.
31
+ analytics_type (str): Type of analytics to perform ("line", "area", "bar", "pie", etc.).
32
+ figsize (Optional[Tuple[int, int]]): Size of the matplotlib figure used for analytical plots (width, height).
33
+ blur_ratio (float): Ratio used to blur objects in the video frames (0.0 to 1.0).
34
+ vision_point (Tuple[int, int]): Reference point for directional tracking or perspective drawing.
35
+ crop_dir (str): Directory path to save cropped detection images.
36
+ json_file (str): Path to a JSON file containing data for parking areas.
37
+ line_width (int): Width for visual display i.e. bounding boxes, keypoints, counts.
38
+ records (int): Number of detection records to send email alerts.
39
+ fps (float): Frame rate (Frames Per Second) for speed estimation calculation.
40
+ max_hist (int): Maximum number of historical points or states stored per tracked object for speed estimation.
41
+ meter_per_pixel (float): Scale for real-world measurement, used in speed or distance calculations.
42
+ max_speed (int): Maximum speed limit (e.g., km/h or mph) used in visual alerts or constraints.
43
+ show (bool): Whether to display the visual output on screen.
44
+ iou (float): Intersection-over-Union threshold for detection filtering.
45
+ conf (float): Confidence threshold for keeping predictions.
46
+ device (Optional[str]): Device to run inference on (e.g., 'cpu', '0' for CUDA GPU).
47
+ max_det (int): Maximum number of detections allowed per video frame.
48
+ half (bool): Whether to use FP16 precision (requires a supported CUDA device).
49
+ tracker (str): Path to tracking configuration YAML file (e.g., 'botsort.yaml').
50
+ verbose (bool): Enable verbose logging output for debugging or diagnostics.
51
+
52
+ Methods:
53
+ update: Update the configuration with user-defined keyword arguments and raise error on invalid keys.
54
+
55
+ Examples:
56
+ >>> from ultralytics.solutions.config import SolutionConfig
57
+ >>> cfg = SolutionConfig(model="yolo11n.pt", region=[(0, 0), (100, 0), (100, 100), (0, 100)])
58
+ >>> cfg.update(show=False, conf=0.3)
59
+ >>> print(cfg.model)
60
+ """
61
+
62
+ source: Optional[str] = None
63
+ model: Optional[str] = None
64
+ classes: Optional[List[int]] = None
65
+ show_conf: bool = True
66
+ show_labels: bool = True
67
+ region: Optional[List[Tuple[int, int]]] = None
68
+ colormap: Optional[int] = cv2.COLORMAP_DEEPGREEN
69
+ show_in: bool = True
70
+ show_out: bool = True
71
+ up_angle: float = 145.0
72
+ down_angle: int = 90
73
+ kpts: List[int] = field(default_factory=lambda: [6, 8, 10])
74
+ analytics_type: str = "line"
75
+ figsize: Optional[Tuple[int, int]] = (12.8, 7.2)
76
+ blur_ratio: float = 0.5
77
+ vision_point: Tuple[int, int] = (20, 20)
78
+ crop_dir: str = "cropped-detections"
79
+ json_file: str = None
80
+ line_width: int = 2
81
+ records: int = 5
82
+ fps: float = 30.0
83
+ max_hist: int = 5
84
+ meter_per_pixel: float = 0.05
85
+ max_speed: int = 120
86
+ show: bool = False
87
+ iou: float = 0.7
88
+ conf: float = 0.25
89
+ device: Optional[str] = None
90
+ max_det: int = 300
91
+ half: bool = False
92
+ tracker: str = "botsort.yaml"
93
+ verbose: bool = True
94
+
95
+ def update(self, **kwargs):
96
+ """Update configuration parameters with new values provided as keyword arguments."""
97
+ for key, value in kwargs.items():
98
+ if hasattr(self, key):
99
+ setattr(self, key, value)
100
+ else:
101
+ raise ValueError(
102
+ f"❌ {key} is not a valid solution argument, available arguments here: https://docs.ultralytics.com/solutions/#solutions-arguments"
103
+ )
104
+ return self
@@ -45,7 +45,7 @@ class Heatmap(ObjectCounter):
45
45
  self.initialize_region()
46
46
 
47
47
  # Store colormap
48
- self.colormap = cv2.COLORMAP_PARULA if self.CFG["colormap"] is None else self.CFG["colormap"]
48
+ self.colormap = self.CFG["colormap"]
49
49
  self.heatmap = None
50
50
 
51
51
  def heatmap_effect(self, box):
@@ -41,7 +41,7 @@ class ObjectBlurrer(BaseSolution):
41
41
  blur_ratio (float): Intensity of the blur effect (0.1-1.0, default=0.5).
42
42
  """
43
43
  super().__init__(**kwargs)
44
- blur_ratio = kwargs.get("blur_ratio", 0.5)
44
+ blur_ratio = self.CFG["blur_ratio"]
45
45
  if blur_ratio < 0.1:
46
46
  LOGGER.warning("blur ratio cannot be less than 0.1, updating it to default value 0.5")
47
47
  blur_ratio = 0.5
@@ -40,7 +40,7 @@ class ObjectCropper(BaseSolution):
40
40
  """
41
41
  super().__init__(**kwargs)
42
42
 
43
- self.crop_dir = kwargs.get("crop_dir", "cropped-detections") # Directory for storing cropped detections
43
+ self.crop_dir = self.CFG["crop_dir"] # Directory for storing cropped detections
44
44
  if not os.path.exists(self.crop_dir):
45
45
  os.mkdir(self.crop_dir) # Create directory if it does not exist
46
46
  if self.CFG["show"]:
@@ -49,7 +49,7 @@ class ObjectCropper(BaseSolution):
49
49
  )
50
50
  self.crop_idx = 0 # Initialize counter for total cropped objects
51
51
  self.iou = self.CFG["iou"]
52
- self.conf = self.CFG["conf"] if self.CFG["conf"] is not None else 0.25
52
+ self.conf = self.CFG["conf"]
53
53
 
54
54
  def process(self, im0):
55
55
  """
@@ -201,9 +201,9 @@ class ParkingManagement(BaseSolution):
201
201
  """Initialize the parking management system with a YOLO model and visualization settings."""
202
202
  super().__init__(**kwargs)
203
203
 
204
- self.json_file = self.CFG["json_file"] # Load JSON data
204
+ self.json_file = self.CFG["json_file"] # Load parking regions JSON data
205
205
  if self.json_file is None:
206
- LOGGER.warning("json_file argument missing. Parking region details required.")
206
+ LOGGER.warning("json_file argument missing. Parking region details required.")
207
207
  raise ValueError("❌ Json file path can not be empty")
208
208
 
209
209
  with open(self.json_file) as f:
@@ -143,7 +143,7 @@ class SecurityAlarm(BaseSolution):
143
143
  annotator.box_label(box, label=self.names[cls], color=colors(cls, True))
144
144
 
145
145
  total_det = len(self.clss)
146
- if total_det > self.records and not self.email_sent: # Only send email if not sent before
146
+ if total_det >= self.records and not self.email_sent: # Only send email if not sent before
147
147
  self.send_email(im0, total_det)
148
148
  self.email_sent = True
149
149
 
@@ -7,7 +7,8 @@ import cv2
7
7
  import numpy as np
8
8
 
9
9
  from ultralytics import YOLO
10
- from ultralytics.utils import ASSETS_URL, DEFAULT_CFG_DICT, DEFAULT_SOL_DICT, LOGGER
10
+ from ultralytics.solutions.config import SolutionConfig
11
+ from ultralytics.utils import ASSETS_URL, LOGGER
11
12
  from ultralytics.utils.checks import check_imshow, check_requirements
12
13
  from ultralytics.utils.plotting import Annotator
13
14
 
@@ -72,15 +73,11 @@ class BaseSolution:
72
73
  self.r_s = None
73
74
 
74
75
  self.LOGGER = LOGGER # Store logger object to be used in multiple solution classes
75
-
76
- # Load config and update with args
77
- DEFAULT_SOL_DICT.update(kwargs)
78
- DEFAULT_CFG_DICT.update(kwargs)
79
- self.CFG = {**DEFAULT_SOL_DICT, **DEFAULT_CFG_DICT}
80
- self.LOGGER.info(f"Ultralytics Solutions: ✅ {DEFAULT_SOL_DICT}")
76
+ self.CFG = vars(SolutionConfig().update(**kwargs))
77
+ self.LOGGER.info(f"Ultralytics Solutions: {self.CFG}")
81
78
 
82
79
  self.region = self.CFG["region"] # Store region data for other classes usage
83
- self.line_width = self.CFG["line_width"] if self.CFG["line_width"] not in (None, 0) else 2 # Store line_width
80
+ self.line_width = self.CFG["line_width"]
84
81
 
85
82
  # Load Model and store additional information (classes, show_conf, show_label)
86
83
  if self.CFG["model"] is None:
@@ -178,7 +175,7 @@ class BaseSolution:
178
175
  def initialize_region(self):
179
176
  """Initialize the counting region and line segment based on configuration settings."""
180
177
  if self.region is None:
181
- self.region = [(20, 400), (1080, 400), (1080, 360), (20, 360)]
178
+ self.region = [(10, 200), (540, 200), (540, 180), (10, 180)]
182
179
  self.r_s = (
183
180
  self.Polygon(self.region) if len(self.region) >= 3 else self.LineString(self.region)
184
181
  ) # region or line
@@ -44,15 +44,15 @@ class SpeedEstimator(BaseSolution):
44
44
  """
45
45
  super().__init__(**kwargs)
46
46
 
47
- self.fps = kwargs.get("fps", 30) # assumed video FPS
47
+ self.fps = self.CFG["fps"] # assumed video FPS
48
48
  self.frame_count = 0 # global frame count
49
49
  self.trk_frame_ids = {} # Track ID → first frame index
50
50
  self.spd = {} # Final speed per object (km/h), once locked
51
51
  self.trk_hist = {} # Track ID → deque of (time, position)
52
52
  self.locked_ids = set() # Track IDs whose speed has been finalized
53
- self.max_hist = kwargs.get("max_hist", 5) # Required frame history before computing speed
54
- self.meter_per_pixel = kwargs.get("meter_per_pixel", 0.05) # Scene scale, depends on camera details
55
- self.max_speed = kwargs.get("max_speed", 120) # max_speed adjustment
53
+ self.max_hist = self.CFG["max_hist"] # Required frame history before computing speed
54
+ self.meter_per_pixel = self.CFG["meter_per_pixel"] # Scene scale, depends on camera details
55
+ self.max_speed = self.CFG["max_speed"] # max_speed adjustment
56
56
 
57
57
  def process(self, im0):
58
58
  """
@@ -42,7 +42,7 @@ class TrackZone(BaseSolution):
42
42
  **kwargs (Any): Additional keyword arguments passed to the parent class.
43
43
  """
44
44
  super().__init__(**kwargs)
45
- default_region = [(150, 150), (1130, 150), (1130, 570), (150, 570)]
45
+ default_region = [(75, 75), (565, 75), (565, 285), (75, 285)]
46
46
  self.region = cv2.convexHull(np.array(self.region or default_region, dtype=np.int32))
47
47
 
48
48
  def process(self, im0):
@@ -34,7 +34,7 @@ class VisionEye(BaseSolution):
34
34
  """
35
35
  super().__init__(**kwargs)
36
36
  # Set the vision point where the system will view objects and draw tracks
37
- self.vision_point = kwargs.get("vision_point", (30, 30))
37
+ self.vision_point = self.CFG["vision_point"]
38
38
 
39
39
  def process(self, im0):
40
40
  """
@@ -58,10 +58,10 @@ def on_predict_start(predictor: object, persist: bool = False) -> None:
58
58
  predictor._feats = None
59
59
 
60
60
  # Register hook to extract input of Detect layer
61
- def capture_io(module, input, output):
62
- predictor._feats = input[0]
61
+ def pre_hook(module, input):
62
+ predictor._feats = [t.clone() for t in input[0]]
63
63
 
64
- predictor.model.model.model[-1].register_forward_hook(capture_io)
64
+ predictor.model.model.model[-1].register_forward_pre_hook(pre_hook)
65
65
 
66
66
  trackers = []
67
67
  for _ in range(predictor.dataset.bs):
@@ -12,7 +12,6 @@ import subprocess
12
12
  import sys
13
13
  import threading
14
14
  import time
15
- import uuid
16
15
  import warnings
17
16
  from pathlib import Path
18
17
  from threading import Lock
@@ -41,7 +40,6 @@ ROOT = FILE.parents[1] # YOLO
41
40
  ASSETS = ROOT / "assets" # default images
42
41
  ASSETS_URL = "https://github.com/ultralytics/assets/releases/download/v0.0.0" # assets GitHub URL
43
42
  DEFAULT_CFG_PATH = ROOT / "cfg/default.yaml"
44
- DEFAULT_SOL_CFG_PATH = ROOT / "cfg/solutions/default.yaml" # Ultralytics solutions yaml path
45
43
  NUM_THREADS = min(8, max(1, os.cpu_count() - 1)) # number of YOLO multiprocessing threads
46
44
  AUTOINSTALL = str(os.getenv("YOLO_AUTOINSTALL", True)).lower() == "true" # global auto-install mode
47
45
  VERBOSE = str(os.getenv("YOLO_VERBOSE", True)).lower() == "true" # global verbose mode
@@ -545,7 +543,6 @@ def yaml_print(yaml_file: Union[str, Path, dict]) -> None:
545
543
 
546
544
  # Default configuration
547
545
  DEFAULT_CFG_DICT = yaml_load(DEFAULT_CFG_PATH)
548
- DEFAULT_SOL_DICT = yaml_load(DEFAULT_SOL_CFG_PATH) # Ultralytics solutions configuration
549
546
  for k, v in DEFAULT_CFG_DICT.items():
550
547
  if isinstance(v, str) and v.lower() == "none":
551
548
  DEFAULT_CFG_DICT[k] = None
@@ -1226,6 +1223,7 @@ class SettingsManager(JSONDict):
1226
1223
  def __init__(self, file=SETTINGS_FILE, version="0.0.6"):
1227
1224
  """Initializes the SettingsManager with default settings and loads user settings."""
1228
1225
  import hashlib
1226
+ import uuid
1229
1227
 
1230
1228
  from ultralytics.utils.torch_utils import torch_distributed_zero_first
1231
1229
 
@@ -8,7 +8,7 @@ import numpy as np
8
8
  import torch
9
9
 
10
10
  from ultralytics.utils import DEFAULT_CFG, LOGGER, colorstr
11
- from ultralytics.utils.torch_utils import autocast, profile
11
+ from ultralytics.utils.torch_utils import autocast, profile_ops
12
12
 
13
13
 
14
14
  def check_train_batch_size(model, imgsz=640, amp=True, batch=-1, max_num_obj=1):
@@ -74,7 +74,7 @@ def autobatch(model, imgsz=640, fraction=0.60, batch_size=DEFAULT_CFG.batch, max
74
74
  batch_sizes = [1, 2, 4, 8, 16] if t < 16 else [1, 2, 4, 8, 16, 32, 64]
75
75
  try:
76
76
  img = [torch.empty(b, 3, imgsz, imgsz) for b in batch_sizes]
77
- results = profile(img, model, n=1, device=device, max_num_obj=max_num_obj)
77
+ results = profile_ops(img, model, n=1, device=device, max_num_obj=max_num_obj)
78
78
 
79
79
  # Fit a solution
80
80
  xy = [
@@ -4,7 +4,7 @@ Benchmark a YOLO model formats for speed and accuracy.
4
4
 
5
5
  Usage:
6
6
  from ultralytics.utils.benchmarks import ProfileModels, benchmark
7
- ProfileModels(['yolo11n.yaml', 'yolov8s.yaml']).profile()
7
+ ProfileModels(['yolo11n.yaml', 'yolov8s.yaml']).run()
8
8
  benchmark(model='yolo11n.pt', imgsz=160)
9
9
 
10
10
  Format | `format=argument` | Model
@@ -378,7 +378,7 @@ class ProfileModels:
378
378
  Profile models and print results
379
379
  >>> from ultralytics.utils.benchmarks import ProfileModels
380
380
  >>> profiler = ProfileModels(["yolo11n.yaml", "yolov8s.yaml"], imgsz=640)
381
- >>> profiler.profile()
381
+ >>> profiler.run()
382
382
  """
383
383
 
384
384
  def __init__(
@@ -412,7 +412,7 @@ class ProfileModels:
412
412
  Initialize and profile models
413
413
  >>> from ultralytics.utils.benchmarks import ProfileModels
414
414
  >>> profiler = ProfileModels(["yolo11n.yaml", "yolov8s.yaml"], imgsz=640)
415
- >>> profiler.profile()
415
+ >>> profiler.run()
416
416
  """
417
417
  self.paths = paths
418
418
  self.num_timed_runs = num_timed_runs
@@ -423,7 +423,7 @@ class ProfileModels:
423
423
  self.trt = trt # run TensorRT profiling
424
424
  self.device = device or torch.device(0 if torch.cuda.is_available() else "cpu")
425
425
 
426
- def profile(self):
426
+ def run(self):
427
427
  """
428
428
  Profile YOLO models for speed and accuracy across various formats including ONNX and TensorRT.
429
429
 
@@ -434,7 +434,7 @@ class ProfileModels:
434
434
  Profile models and print results
435
435
  >>> from ultralytics.utils.benchmarks import ProfileModels
436
436
  >>> profiler = ProfileModels(["yolo11n.yaml", "yolov8s.yaml"])
437
- >>> results = profiler.profile()
437
+ >>> results = profiler.run()
438
438
  """
439
439
  files = self.get_files()
440
440
 
@@ -16,7 +16,6 @@ from typing import Optional
16
16
 
17
17
  import cv2
18
18
  import numpy as np
19
- import requests
20
19
  import torch
21
20
 
22
21
  from ultralytics.utils import (
@@ -261,6 +260,8 @@ def check_latest_pypi_version(package_name="ultralytics"):
261
260
  Returns:
262
261
  (str): The latest version of the package.
263
262
  """
263
+ import requests # slow import
264
+
264
265
  try:
265
266
  requests.packages.urllib3.disable_warnings() # Disable the InsecureRequestWarning
266
267
  response = requests.get(f"https://pypi.org/pypi/{package_name}/json", timeout=3)
ultralytics/utils/dist.py CHANGED
@@ -2,7 +2,6 @@
2
2
 
3
3
  import os
4
4
  import shutil
5
- import socket
6
5
  import sys
7
6
  import tempfile
8
7
 
@@ -20,6 +19,8 @@ def find_free_network_port() -> int:
20
19
  Returns:
21
20
  (int): The available network port number.
22
21
  """
22
+ import socket
23
+
23
24
  with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
24
25
  s.bind(("127.0.0.1", 0))
25
26
  return s.getsockname()[1] # port
@@ -8,7 +8,6 @@ from multiprocessing.pool import ThreadPool
8
8
  from pathlib import Path
9
9
  from urllib import parse, request
10
10
 
11
- import requests
12
11
  import torch
13
12
 
14
13
  from ultralytics.utils import LOGGER, TQDM, checks, clean_url, emojis, is_online, url2file
@@ -203,6 +202,8 @@ def check_disk_space(url="https://ultralytics.com/assets/coco8.zip", path=Path.c
203
202
  Returns:
204
203
  (bool): True if there is sufficient disk space, False otherwise.
205
204
  """
205
+ import requests # slow import
206
+
206
207
  try:
207
208
  r = requests.head(url) # response
208
209
  assert r.status_code < 400, f"URL error for {url}: {r.status_code} {r.reason}" # check response
@@ -244,6 +245,8 @@ def get_google_drive_file_info(link):
244
245
  >>> link = "https://drive.google.com/file/d/1cqT-cJgANNrhIHCrEufUYhQ4RqiWG_lJ/view?usp=drive_link"
245
246
  >>> url, filename = get_google_drive_file_info(link)
246
247
  """
248
+ import requests # slow import
249
+
247
250
  file_id = link.split("/d/")[1].split("/view")[0]
248
251
  drive_url = f"https://drive.google.com/uc?export=download&id={file_id}"
249
252
  filename = None
@@ -388,6 +391,8 @@ def get_github_assets(repo="ultralytics/assets", version="latest", retry=False):
388
391
  Examples:
389
392
  >>> tag, assets = get_github_assets(repo="ultralytics/assets", version="latest")
390
393
  """
394
+ import requests # slow import
395
+
391
396
  if version != "latest":
392
397
  version = f"tags/{version}" # i.e. tags/v6.2
393
398
  url = f"https://api.github.com/repos/{repo}/releases/{version}"
@@ -30,11 +30,6 @@ from ultralytics.utils import (
30
30
  )
31
31
  from ultralytics.utils.checks import check_version
32
32
 
33
- try:
34
- import thop
35
- except ImportError:
36
- thop = None # conda support without 'ultralytics-thop' installed
37
-
38
33
  # Version checks (all default to version>=min_version)
39
34
  TORCH_1_9 = check_version(torch.__version__, "1.9.0")
40
35
  TORCH_1_13 = check_version(torch.__version__, "1.13.0")
@@ -378,7 +373,7 @@ def model_info_for_loggers(trainer):
378
373
  if trainer.args.profile: # profile ONNX and TensorRT times
379
374
  from ultralytics.utils.benchmarks import ProfileModels
380
375
 
381
- results = ProfileModels([trainer.last], device=trainer.device).profile()[0]
376
+ results = ProfileModels([trainer.last], device=trainer.device).run()[0]
382
377
  results.pop("model/name")
383
378
  else: # only return PyTorch times from most recent validation
384
379
  results = {
@@ -404,6 +399,11 @@ def get_flops(model, imgsz=640):
404
399
  Returns:
405
400
  (float): The model FLOPs in billions.
406
401
  """
402
+ try:
403
+ import thop
404
+ except ImportError:
405
+ thop = None # conda support without 'ultralytics-thop' installed
406
+
407
407
  if not thop:
408
408
  return 0.0 # if not installed return 0.0 GFLOPs
409
409
 
@@ -790,7 +790,7 @@ def cuda_memory_usage(device=None):
790
790
  yield cuda_info
791
791
 
792
792
 
793
- def profile(input, ops, n=10, device=None, max_num_obj=0):
793
+ def profile_ops(input, ops, n=10, device=None, max_num_obj=0):
794
794
  """
795
795
  Ultralytics speed, memory and FLOPs profiler.
796
796
 
@@ -805,12 +805,17 @@ def profile(input, ops, n=10, device=None, max_num_obj=0):
805
805
  (list): Profile results for each operation.
806
806
 
807
807
  Examples:
808
- >>> from ultralytics.utils.torch_utils import profile
808
+ >>> from ultralytics.utils.torch_utils import profile_ops
809
809
  >>> input = torch.randn(16, 3, 640, 640)
810
810
  >>> m1 = lambda x: x * torch.sigmoid(x)
811
811
  >>> m2 = nn.SiLU()
812
- >>> profile(input, [m1, m2], n=100) # profile over 100 iterations
812
+ >>> profile_ops(input, [m1, m2], n=100) # profile over 100 iterations
813
813
  """
814
+ try:
815
+ import thop
816
+ except ImportError:
817
+ thop = None # conda support without 'ultralytics-thop' installed
818
+
814
819
  results = []
815
820
  if not isinstance(device, torch.device):
816
821
  device = select_device(device)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ultralytics
3
- Version: 8.3.122
3
+ Version: 8.3.124
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -1,16 +1,16 @@
1
1
  tests/__init__.py,sha256=xnMhv3O_DF1YrW4zk__ZywQzAaoTDjPKPoiI1Ktss1w,670
2
2
  tests/conftest.py,sha256=rsIAipRKfrVNoTaJ1LdpYue8AbcJ_fr3d3WIlM_6uXY,2982
3
3
  tests/test_cli.py,sha256=PtMFl5Lp_6ygBbYDJ1ndofz2k7ZYupMPEAiZw6aZVm8,5450
4
- tests/test_cuda.py,sha256=0uvTF4bY_Grsd_Xgtp7TdIEgMpUqKv8_kWA82NYDl_g,6260
4
+ tests/test_cuda.py,sha256=vCpPMAkEUQrQMVe4oMwGZQVOiuujEAkZ2zturNXFF-4,6256
5
5
  tests/test_engine.py,sha256=aGqZ8P7QO5C_nOa1b4FOyk92Ysdk5WiP-ST310Vyxys,4962
6
6
  tests/test_exports.py,sha256=dhZn86LdbapW15RthQF870LGxDjC1MUZhlGdBgPmgIQ,9716
7
7
  tests/test_integrations.py,sha256=dQteeRsRVuT_p5-T88-7jqT65Zm9iAXkyKg-KQ1_TQ8,6341
8
- tests/test_python.py,sha256=ok2xp7zwPOwcyl4yNawlx1uJ5HETn9eU-jyTPYzA0fI,25491
8
+ tests/test_python.py,sha256=NDIqkKt-awgjq45y29xopZLhX8kkknqYz81Wm7ixqXo,25495
9
9
  tests/test_solutions.py,sha256=BIvg9zW0a_ggEmrPKgB_Y0MncveH-eYuN5KlqdJ6nHs,5726
10
- ultralytics/__init__.py,sha256=StjzKKqUrYk4rkeEY2AnsNAR8keYsHJzELLBKmqs6Q4,730
10
+ ultralytics/__init__.py,sha256=NauKhjvgfctbHUxB1ptVJY2crHo1JEEDQLp-q3_O368,730
11
11
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
12
12
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
13
- ultralytics/cfg/__init__.py,sha256=ZXbvd-lyu0IIwVYAN6NH3KbQ5MLC5865Lh2c7IDkNSw,39675
13
+ ultralytics/cfg/__init__.py,sha256=rCuATbYdct_z6SO1ojFaKVGTEUkcO6ExZlkNdG79wl4,39483
14
14
  ultralytics/cfg/default.yaml,sha256=zSiCmQp_HRlh0gZe_AZSjNQNe1aNDoX2vcNUo5oJs2Q,8306
15
15
  ultralytics/cfg/datasets/Argoverse.yaml,sha256=_xlEDIJ9XkUo0v_iNL7FW079BoSeZtKSuLteKTtGbA8,3275
16
16
  ultralytics/cfg/datasets/DOTAv1.5.yaml,sha256=SHND_CFkojxw5iQD5Mcgju2kCZIl0gW2ajuzv1cqoL0,1224
@@ -99,29 +99,28 @@ ultralytics/cfg/models/v9/yolov9e.yaml,sha256=Olr2PlADpkD6N1TiVyAJEMzkrA7SbNul1n
99
99
  ultralytics/cfg/models/v9/yolov9m.yaml,sha256=WcKQ3xRsC1JMgA42Hx4xzr4FZmtE6B3wKvqhlQxkqw8,1411
100
100
  ultralytics/cfg/models/v9/yolov9s.yaml,sha256=j_v3JWaPtiuM8aKJt15Z_4HPRCoHWn_G6Z07t8CZyjk,1391
101
101
  ultralytics/cfg/models/v9/yolov9t.yaml,sha256=Q8GpSXE7fumhuJiQg4a2SkuS_UmnXqp-eoZxW_C0vEo,1375
102
- ultralytics/cfg/solutions/default.yaml,sha256=c-9thwI7y7VmIoIM6AW70Z0r825SToH2h7gSCsUoAak,1664
103
102
  ultralytics/cfg/trackers/botsort.yaml,sha256=TpRaK5kH_-QbjCQ7ekM4s_7j8I8ti3q8Hs7WDz4rEwA,1215
104
103
  ultralytics/cfg/trackers/bytetrack.yaml,sha256=6u-tiZlk16EqEwkNXaMrza6PAQmWj_ypgv26LGCtPDg,886
105
104
  ultralytics/data/__init__.py,sha256=nAXaL1puCc7z_NjzQNlJnhbVhT9Fla2u7Dsqo7q1dAc,644
106
105
  ultralytics/data/annotator.py,sha256=VEwb11FsEZm75qlEp8XDHFGKW0_rGsEaFDaBVd771Kw,2902
107
106
  ultralytics/data/augment.py,sha256=hAnd6yvlauJYk0Ek3_rTPc0RC8sTUfTk_GogMeH61MA,129231
108
- ultralytics/data/base.py,sha256=uMh_xzs6ci1hciDLpbVW2ZQr7js0o8jctE8KhL2T7Z4,19015
107
+ ultralytics/data/base.py,sha256=bsASjxdkvojkFjas-JfFNSpBjo0GRAbYKDh64Y2hCH4,19015
109
108
  ultralytics/data/build.py,sha256=FVIkgLGv5n1C7SRDrQiKOMDcI7V59WmEihKslzvEISg,9651
110
109
  ultralytics/data/converter.py,sha256=znXH2XTdo0Q4NDHMny1ydVBvrxKn2kbbwI-X5bn1MlQ,26890
111
110
  ultralytics/data/dataset.py,sha256=hbsjhmZBO-T1_gkUAm128kKowdwsLNwnK2lhnzmxJB8,34826
112
- ultralytics/data/loaders.py,sha256=o844tZlfZEhXop16t-hwaEQHhbfP3_bQMS0whF_NSos,28531
111
+ ultralytics/data/loaders.py,sha256=MRu9ylvwLfBxX2eH4wRNvk4rNyUEIHBb8c0QyDOX-8c,28488
113
112
  ultralytics/data/split.py,sha256=6LHB1z8woXurWjXfM-Zm2thRr1KXvzR18CFJA-SDUvE,4677
114
113
  ultralytics/data/split_dota.py,sha256=ihG56YfNFZJDq1r7Zcgk8fKzde3gn21W0f67ub6nT68,11879
115
- ultralytics/data/utils.py,sha256=HET4rbj4iUcjen0t8E_Qo_9S9RGPVQRYL-j0KI0qflI,35269
114
+ ultralytics/data/utils.py,sha256=pSQ5dycIInRuAUQI8HIuLrJvC1e1TSbbtdFEjFrXewg,35268
116
115
  ultralytics/data/scripts/download_weights.sh,sha256=0y8XtZxOru7dVThXDFUXLHBuICgOIqZNUwpyL4Rh6lg,595
117
116
  ultralytics/data/scripts/get_coco.sh,sha256=UuJpJeo3qQpTHVINeOpmP0NYmg8PhEFE3A8J3jKrnPw,1768
118
117
  ultralytics/data/scripts/get_coco128.sh,sha256=qmRQl_hOKrsdHrTrnyQuFIH01oDz3lfaz138OgGfLt8,650
119
118
  ultralytics/data/scripts/get_imagenet.sh,sha256=hr42H16bM47iT27rgS7MpEo-GeOZAYUQXgr0B2cwn48,1705
120
119
  ultralytics/engine/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
121
- ultralytics/engine/exporter.py,sha256=rFi7V-REi66DRLTWQzgx87NMfdp8eLfPZkYfSpS7Wfg,70137
122
- ultralytics/engine/model.py,sha256=wS1cwgv0iyhsslMAZYMGlYDWitDIRW96d7MxwW-Sw5o,52817
120
+ ultralytics/engine/exporter.py,sha256=56PU45SvNhYym9JiJctZXO5NkW-cwzTu5o7yIqx13Fc,70251
121
+ ultralytics/engine/model.py,sha256=D5femO5IfJYqxbbpEVoG85rT2HHlQDonH3SgkniR8x0,52866
123
122
  ultralytics/engine/predictor.py,sha256=YJ5l-0qIpr6JAJxowswtZ0IqmXBqVTvAA9vR40v0sCM,21752
124
- ultralytics/engine/results.py,sha256=MZkhI0CCOkBQPR-EzswymVqvqeyk35EkESGUQ_08r8k,79738
123
+ ultralytics/engine/results.py,sha256=-JPBn_YMyZv6HhdlyhjRIZCcMf41LTyWID7JrEP64rc,79632
125
124
  ultralytics/engine/trainer.py,sha256=fdB8H6brnnQAL-ZFP6nmNmKMze0_qy0OT3jJg1B5uhQ,38864
126
125
  ultralytics/engine/tuner.py,sha256=IyFKsh4Q4a1DsjfK02DdN9cufAiBDhdhIq7F7ddguys,12646
127
126
  ultralytics/engine/validator.py,sha256=jfV81wuFDgrVVXEcPzgOpxAPrAZn-1LgpKwu9l_1-ts,17050
@@ -148,15 +147,15 @@ ultralytics/models/rtdetr/val.py,sha256=4KsGuWOsik7JXpU8mUY6ts7_wWuPvcNSxiAGIiGS
148
147
  ultralytics/models/sam/__init__.py,sha256=iR7B06rAEni21eptg8n4rLOP0Z_qV9y9PL-L93n4_7s,266
149
148
  ultralytics/models/sam/amg.py,sha256=r_duG0DCeCyTYfhcVh-ti10FPMl4VGL4SKc8yvbQpNU,11050
150
149
  ultralytics/models/sam/build.py,sha256=Vhml3zBGDcRO-efauNdM0ZlKTV10ADAj_aT823lPJv8,12515
151
- ultralytics/models/sam/model.py,sha256=19zgkysRYJ-9hKSAv0pQk-G7dW0ndZz_VFW7-WzCIx0,7111
152
- ultralytics/models/sam/predict.py,sha256=hWZs3rP96F3bUTx93HqhQA1phCLdh_oQkqlMZuU95Cg,82376
150
+ ultralytics/models/sam/model.py,sha256=XWeFKNuSTuc7mgGnCQpSMgRVeLD7TedUiUtrTjiS8SY,7135
151
+ ultralytics/models/sam/predict.py,sha256=tT_-v2dJInrZaOse1V7q8PoHtUDsrNjhopn0FRlImtg,82453
153
152
  ultralytics/models/sam/modules/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
154
153
  ultralytics/models/sam/modules/blocks.py,sha256=Kj9bWyP1E96JPllJS8cJ2FSxPdkQChZdvogm3OPPF2E,45935
155
154
  ultralytics/models/sam/modules/decoders.py,sha256=4Ijtkl7g_UmLMNEGokt1C05T05MkUczFIRJIUX0gDDc,25654
156
155
  ultralytics/models/sam/modules/encoders.py,sha256=uXP-CMjtTRCGD2hkbDfXjKSrW0l6Lj_pyx3ZwztYZcw,37614
157
156
  ultralytics/models/sam/modules/memory_attention.py,sha256=2HWCr7GrXMRX_V3RTfz44i2W44owpStPZU8Jq2hM0gE,12964
158
157
  ultralytics/models/sam/modules/sam.py,sha256=PJxBIfJdJTe-NLWZZgmSWbnvHhyQjzr7gXNarjqBNJE,52628
159
- ultralytics/models/sam/modules/tiny_encoder.py,sha256=p6386bsmIwgZq1wfV7h6dcnI6955SBO2bBrp0HwjnYQ,40837
158
+ ultralytics/models/sam/modules/tiny_encoder.py,sha256=1TDefN-f6QEOEDRZGIrRZYI2T9iYf7f1l-Y6kOdr1O4,40865
160
159
  ultralytics/models/sam/modules/transformer.py,sha256=YRhoriZ-j37kxq19kArfv2DSOz2Jj9DAbs2mcOBVORw,14674
161
160
  ultralytics/models/sam/modules/utils.py,sha256=3PatFjbgO1uasMZXXLJw23CrjuYTW7BS9NM4aXom-zY,16294
162
161
  ultralytics/models/utils/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
@@ -169,7 +168,7 @@ ultralytics/models/yolo/classify/predict.py,sha256=JV9szginTQ9Lpob0FozhKMiEIu1vV
169
168
  ultralytics/models/yolo/classify/train.py,sha256=rv2CJv9fzvtHf2q4l5g0RsjplWKeLpz637kKqjtrLNY,9737
170
169
  ultralytics/models/yolo/classify/val.py,sha256=xk-YwSQdl_oqyCBV0OOAOcXFL6CchebFOc36AkRSyjE,9992
171
170
  ultralytics/models/yolo/detect/__init__.py,sha256=GIRsLYR-kT4JJx7lh4ZZAFGBZj0aebokuU0A7JbjDVA,257
172
- ultralytics/models/yolo/detect/predict.py,sha256=oAftDYhksUDjVfgKIsmmair1_ujwVY-yc-MHrl7r9Hw,5343
171
+ ultralytics/models/yolo/detect/predict.py,sha256=n1-WmzkvW3dHglI7XrxDr4i0nZ236h6Wh37TAWXpFfo,5341
173
172
  ultralytics/models/yolo/detect/train.py,sha256=YOEmUZkfJBq6hNbB_P10k-uy4_2fUgdPfVWzO4y8Egs,9538
174
173
  ultralytics/models/yolo/detect/val.py,sha256=7AB_wZi7aQ9_V1pZQSWk5qiJYS34fuO3P5aX7_3eeFE,18471
175
174
  ultralytics/models/yolo/obb/__init__.py,sha256=tQmpG8wVHsajWkZdmD6cjGohJ4ki64iSXQT8JY_dydo,221
@@ -193,8 +192,8 @@ ultralytics/models/yolo/yoloe/train.py,sha256=St3zw_XWRol9pODWU4lvKlJnWYr1lmWQNu
193
192
  ultralytics/models/yolo/yoloe/train_seg.py,sha256=l0SOMQQd0Y_EBBHhTNekgrQsftqhYyK4oWTdCg1dLrE,4633
194
193
  ultralytics/models/yolo/yoloe/val.py,sha256=oA8cVT3pBXF6aPZy7ITq0mDcktRuIgks8tTtqMRISyY,8431
195
194
  ultralytics/nn/__init__.py,sha256=rjociYD9lo_K-d-1s6TbdWklPLjTcEHk7OIlRDJstIE,615
196
- ultralytics/nn/autobackend.py,sha256=tnYxzboWGBgNvUYrz2zokPH1Bw__GD2ZQro1gO-ZIF8,39298
197
- ultralytics/nn/tasks.py,sha256=EwRC70qA3eP8Xp-gGP8OuN-q8LCGDrq1iRue7ncRSV4,62916
195
+ ultralytics/nn/autobackend.py,sha256=ng6CUi82BrV6qGVsif_U_1E4foL19N7isB7tQGECTGE,39314
196
+ ultralytics/nn/tasks.py,sha256=i2_1t63rNc5DRTvhA61tdUyVinalN4SmUtd11RN9KZ4,62948
198
197
  ultralytics/nn/text_model.py,sha256=8_7SRejKZA4Pi-ha0gjcWrQDDCDMBhtwlg8pPMWgjDE,13145
199
198
  ultralytics/nn/modules/__init__.py,sha256=dXLtIk9rt944WfsTdpgEdWOg3HQEHdwQztuZ6WNJygs,3144
200
199
  ultralytics/nn/modules/activation.py,sha256=PvXZkA9AzEntR575JkFORdmtcRwATyy0lje-uHA5_8w,2210
@@ -205,37 +204,38 @@ ultralytics/nn/modules/transformer.py,sha256=tC80QKFaLtWZo0zVNTuORX4pOu6HVs2wS0v
205
204
  ultralytics/nn/modules/utils.py,sha256=rn8yTObZGkQoqVzjbZWLaHiytppG4ffjMME4Lw60glM,6092
206
205
  ultralytics/solutions/__init__.py,sha256=pjNYva0qnw-4hf_tTLx_dgIfg24XrYLLp3kygPj95rs,1113
207
206
  ultralytics/solutions/ai_gym.py,sha256=QRrZGMka83NY4B9gU3N2GxTaomo0WmTMNLxkNZTxo9U,5763
208
- ultralytics/solutions/analytics.py,sha256=O8dXdDTpHPRlz2vAGMvef1NfWUXBvoYt2G_TQI_UjoQ,11983
207
+ ultralytics/solutions/analytics.py,sha256=dTwjnC6udHwLqkW86miBHCqjO5sj78C9ws-6EXDia8s,12052
208
+ ultralytics/solutions/config.py,sha256=ogXWpE0LhVXHz05M2ChrVu5usIxsRy2yxraHuSyc_V0,5330
209
209
  ultralytics/solutions/distance_calculation.py,sha256=E13siGlQTqaGCk0xULk5Q86PwxiBAL4XWp83kQPb0YE,5751
210
- ultralytics/solutions/heatmap.py,sha256=dagbZ0Vn4UdywNyiAypYW5v1uzOWf521QrkzmqyeCEc,5626
210
+ ultralytics/solutions/heatmap.py,sha256=lXYptA_EbypipF7YJMjsxxBzLAgsroLcdqypvNAhduA,5569
211
211
  ultralytics/solutions/instance_segmentation.py,sha256=HxzFf752PwjAjZhrf8BzI-gEey_f9mjxTOqJsLHSIB8,3498
212
- ultralytics/solutions/object_blurrer.py,sha256=OCLHCZul8cQOxK-HTV48rCWmgr_na8x9F9jf8FSAQgk,3954
212
+ ultralytics/solutions/object_blurrer.py,sha256=0oSDdziKBw4ZxEwD4nGNrOcNPFs3bAux39RIJ87vVUE,3947
213
213
  ultralytics/solutions/object_counter.py,sha256=7u8OkFye91R9tf1Ar19ttXhKcoB6ziyi0pZfbHaQJ5U,10044
214
- ultralytics/solutions/object_cropper.py,sha256=RNk_v_XRXm9Ye2TsKG5CPd3TDsRaiODWpy8MvYqkSLs,3382
215
- ultralytics/solutions/parking_management.py,sha256=SiVxRl44OxxYUXIzNOxOBqtaFJSRRpD_gTsNyvB1n5o,13277
214
+ ultralytics/solutions/object_cropper.py,sha256=L6QZC5as_cUT42TMzeyXmkHa7vBi2UpNFf_-Jc7C1G0,3316
215
+ ultralytics/solutions/parking_management.py,sha256=BV-2lpSfgmK7fib3DnPSZ5rtLdy11c8pBQm-72iTetc,13289
216
216
  ultralytics/solutions/queue_management.py,sha256=p1-cuI_rs4ygtlBryXjE65NYG2bnZXhp3ylggFnWcRs,4344
217
217
  ultralytics/solutions/region_counter.py,sha256=Zn35YRXNzhBk27D9MLOHBYe2L1o6H2ey3mEwCXofB_E,5418
218
- ultralytics/solutions/security_alarm.py,sha256=mbUtqoLgjAWz9k3pjMoEZY_PR-lhjiic1NK90FhEJkw,6250
219
- ultralytics/solutions/solutions.py,sha256=OZAmwmqCOK8SI5dpZFrzUkrPIUFGMcgPL5zV4ymzkzU,32688
220
- ultralytics/solutions/speed_estimation.py,sha256=dbHzj9NWrcuMXYbBJAZNcQ3D9zjKV8PsNkU6orOqf7Q,5344
218
+ ultralytics/solutions/security_alarm.py,sha256=cmUWvz7U9IAxlOr-QCIU_j95lc2c8eUx9wI04t1vDFU,6251
219
+ ultralytics/solutions/solutions.py,sha256=MV2sKr0mHVMh-dT2SmiYkYLFCdoNz-2VA0z4a7fWK_8,32503
220
+ ultralytics/solutions/speed_estimation.py,sha256=r7S5nGIx8PTV-zC4zCI36lQD2DVy5cen5cTXItfQIHo,5318
221
221
  ultralytics/solutions/streamlit_inference.py,sha256=M0ppTFInqSPrdytZBLH8x-XoA7zFc7PaRQ51wHG9ppU,9846
222
- ultralytics/solutions/trackzone.py,sha256=efko4U8zT8lyNLLo9zF543rTXHefeYthxf9GV3c2TiU,3860
223
- ultralytics/solutions/vision_eye.py,sha256=DHf3pQzNqP71oYx3QXflvcGsg4nEYJCD1SOdSOxiWBk,2965
222
+ ultralytics/solutions/trackzone.py,sha256=mfklnZcVRqI3bbhPiHF2iSoV6INcd10wwwGP4tlK7L0,3854
223
+ ultralytics/solutions/vision_eye.py,sha256=7YrMqZkR28LLNHWxX3Ye78GvPdXXuouQAmgMdGwRLQ4,2953
224
224
  ultralytics/trackers/__init__.py,sha256=Zlu_Ig5osn7hqch_g5Be_e4pwZUkeeTQiesJCi0pFGI,255
225
225
  ultralytics/trackers/basetrack.py,sha256=LYvWB5d7Woyrz_RlxaopjV07RQKH3sff_lZJfMcMxcA,4450
226
226
  ultralytics/trackers/bot_sort.py,sha256=rpaj7X8COT0Vi5GFR9z-CGSBgJ7gTfFx2wTSZFTnhco,11466
227
227
  ultralytics/trackers/byte_tracker.py,sha256=D7JQ_6V8OUMQryxTrAr010UXMSaboQnI7T1xppzHXYg,20921
228
- ultralytics/trackers/track.py,sha256=mu6L9RWAW8Nq0vJanX-hTTUST-OmLq49d8VV96-J9u8,4817
228
+ ultralytics/trackers/track.py,sha256=wuja3-xceuhaTEJyD2VqRBJUodPEM7-4iK47MkxshjM,4830
229
229
  ultralytics/trackers/utils/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
230
230
  ultralytics/trackers/utils/gmc.py,sha256=dz3I5LbIv7h1__Xg7rGHecQFE32VFTe54tUnxb8F0Z8,14466
231
231
  ultralytics/trackers/utils/kalman_filter.py,sha256=A0CqOnnaKH6kr0XwuHzyHmIU6aJAjJYxF9jVlNBKZHo,21326
232
232
  ultralytics/trackers/utils/matching.py,sha256=7eIufSdeN7cXuFMjvcfvz0Ldq84m4YKZl5IGxBR8IIo,7169
233
- ultralytics/utils/__init__.py,sha256=qV5nw3ED1NuSCoYwW3WpT6BTLeCnoH7KJgbPZU_3Sbo,50422
234
- ultralytics/utils/autobatch.py,sha256=VZTIKLWeFZFwBHJmbiCn3MaxoFp89hLR0DSCR_iLXJg,4913
235
- ultralytics/utils/benchmarks.py,sha256=aZse9tetEwjMy2GkdNWZ0WfCgjLfCM3_BkI1qNNQb_w,30377
236
- ultralytics/utils/checks.py,sha256=5bkna--ZH4FJDZtgef_K4xgjiKOZqCarTqIE4Z0vwJU,32628
237
- ultralytics/utils/dist.py,sha256=e-DK_YowV7D9rDGQyWR9Kaosxp2eWe2EogSWnnUMthc,4098
238
- ultralytics/utils/downloads.py,sha256=IvHng2-bApoyi-QMvesGwMmFNqEFiXPIKiiW16Q-U4M,22220
233
+ ultralytics/utils/__init__.py,sha256=8ymHkBKOfH44w3tkLWBjNP2bijWU_Z25wa6S8cl00k4,50246
234
+ ultralytics/utils/autobatch.py,sha256=kg05q2qKg74y_Uq2vvr01i3KhLfpVR7sT0IXBt3_kyI,4921
235
+ ultralytics/utils/benchmarks.py,sha256=GXcatQqAUCBg3lSmzR5ZEZDYWdPREtFapHP-S4wj7G4,30357
236
+ ultralytics/utils/checks.py,sha256=GO-QLkI3ZrjBj_lunIc3SGd3D8eHqIIxdKqHtCYF4MI,32648
237
+ ultralytics/utils/dist.py,sha256=aytW0JEkcA5ZTZucV92ot7Bn-apiej8aLk3QNWicjAc,4103
238
+ ultralytics/utils/downloads.py,sha256=Rn8xDwn2bzgBqiYz3Xn0rm3MWjk4T-QUd2Ajlu1EpQ4,22312
239
239
  ultralytics/utils/errors.py,sha256=vY9h2evFSrHnZdHJVVrmm8Zzw4qVDLyo9DeYW5g0dFk,1573
240
240
  ultralytics/utils/export.py,sha256=1MgT6rSuofvLRR-J01EQvfHylzyO_b5BDM13imypQzA,8814
241
241
  ultralytics/utils/files.py,sha256=0K4O1cgqRiXaDw7EQK13TqA5SME_RrvfDVQSPetNr5w,8042
@@ -246,7 +246,7 @@ ultralytics/utils/ops.py,sha256=YFwPrKlPcgEmgAWqnJVR0Ccx5NQgp5e3P-YYHwVSP0k,3477
246
246
  ultralytics/utils/patches.py,sha256=6rVT-l8WDp_Py3O-gZdv9t3PnrYRRkrX_lF3mZ1XS8c,4928
247
247
  ultralytics/utils/plotting.py,sha256=5QPK1y-gm4T1mK3sjfRZhIUJAyP05D1cJ7h9wHPTifU,46616
248
248
  ultralytics/utils/tal.py,sha256=P5nPoR9qNnFuDIda0fsn8WP6m1V8r7EbvXUuhNRFFTA,20805
249
- ultralytics/utils/torch_utils.py,sha256=KUt2qoud3O2bb_cWv1TDjZloNKuLbWk0XJU97wlEdU4,39028
249
+ ultralytics/utils/torch_utils.py,sha256=SOdT9asxyQ-MEJGZQIH_Va9jcbonjISeHOwiFg1gRYE,39180
250
250
  ultralytics/utils/triton.py,sha256=xK9Db_ZUVDnIK1u76S2G-6ulIBsLfj9HN_YOaSrnMuU,5304
251
251
  ultralytics/utils/tuner.py,sha256=0Bp7l5dWZe1RzdvAIa11wQoX6eoAaoNRcA-EAnpofbk,6755
252
252
  ultralytics/utils/callbacks/__init__.py,sha256=hzL63Rce6VkZhP4Lcim9LKjadixaQG86nKqPhk7IkS0,242
@@ -260,9 +260,9 @@ ultralytics/utils/callbacks/neptune.py,sha256=JaI95Cj2kIjUhlEEOiDN0-Drc-fDelLhNI
260
260
  ultralytics/utils/callbacks/raytune.py,sha256=A8amUGpux7dYES-L1iSeMoMXBySGWCD1aUqT7vcG-pU,1284
261
261
  ultralytics/utils/callbacks/tensorboard.py,sha256=jgYnym3cUQFAgN1GzTyO7l3jINtfAh8zhrllDvnLuVQ,5339
262
262
  ultralytics/utils/callbacks/wb.py,sha256=iDRFXI4IIDm8R5OI89DMTmjs8aHLo1HRCLkOFKdaMG4,7507
263
- ultralytics-8.3.122.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
264
- ultralytics-8.3.122.dist-info/METADATA,sha256=Ie20iTid09rrSu0IDpyAHcgvr6MlwsZqZ5ciNuwfWmM,37180
265
- ultralytics-8.3.122.dist-info/WHEEL,sha256=wXxTzcEDnjrTwFYjLPcsW_7_XihufBwmpiBeiXNBGEA,91
266
- ultralytics-8.3.122.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
267
- ultralytics-8.3.122.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
268
- ultralytics-8.3.122.dist-info/RECORD,,
263
+ ultralytics-8.3.124.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
264
+ ultralytics-8.3.124.dist-info/METADATA,sha256=2u_HB01E-HJkGn15DtHXlRt_yXP1KPmkVWHKjEVX8rY,37180
265
+ ultralytics-8.3.124.dist-info/WHEEL,sha256=wXxTzcEDnjrTwFYjLPcsW_7_XihufBwmpiBeiXNBGEA,91
266
+ ultralytics-8.3.124.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
267
+ ultralytics-8.3.124.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
268
+ ultralytics-8.3.124.dist-info/RECORD,,
@@ -1,24 +0,0 @@
1
- # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
-
3
- # Global configuration YAML with settings and arguments for Ultralytics Solutions
4
- # For documentation see https://docs.ultralytics.com/solutions/
5
-
6
- # Object counting settings --------------------------------------------------------------------------------------------
7
- region: # list[tuple[int, int]] object counting, queue or speed estimation region points.
8
- show_in: True # (bool) flag to display objects moving *into* the defined region
9
- show_out: True # (bool) flag to display objects moving *out of* the defined region
10
-
11
- # Heatmaps settings ----------------------------------------------------------------------------------------------------
12
- colormap: # (int | str) colormap for heatmap, Only OPENCV supported colormaps can be used.
13
-
14
- # Workouts monitoring settings -----------------------------------------------------------------------------------------
15
- up_angle: 145.0 # (float) Workouts up_angle for counts, 145.0 is default value.
16
- down_angle: 90 # (float) Workouts down_angle for counts, 90 is default value. Y
17
- kpts: [6, 8, 10] # (list[int]) keypoints for workouts monitoring, i.e. for push-ups kpts have values of [6, 8, 10].
18
-
19
- # Analytics settings ---------------------------------------------------------------------------------------------------
20
- analytics_type: "line" # (str) analytics type i.e "line", "pie", "bar" or "area" charts.
21
- json_file: # (str) parking system regions file path.
22
-
23
- # Security alarm system settings ---------------------------------------------------------------------------------------
24
- records: 5 # (int) Total detections count to send an email about security