ultralytics 8.3.40__py3-none-any.whl → 8.3.44__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
2
 
3
- __version__ = "8.3.40"
3
+ __version__ = "8.3.44"
4
4
 
5
5
  import os
6
6
 
@@ -77,7 +77,7 @@ SOLUTIONS_HELP_MSG = f"""
77
77
 
78
78
  yolo solutions SOLUTION ARGS
79
79
 
80
- Where SOLUTION (optional) is one of {list(SOLUTION_MAP.keys())}
80
+ Where SOLUTION (optional) is one of {list(SOLUTION_MAP.keys())[:-1]}
81
81
  ARGS (optional) are any number of custom 'arg=value' pairs like 'show_in=True' that override defaults
82
82
  at https://docs.ultralytics.com/usage/cfg
83
83
 
@@ -96,7 +96,7 @@ SOLUTIONS_HELP_MSG = f"""
96
96
  5. Generate analytical graphs
97
97
  yolo solutions analytics analytics_type="pie"
98
98
 
99
- 6. Track Objects Within Specific Zones
99
+ 6. Track objects within specific zones
100
100
  yolo solutions trackzone source="path/to/video/file.mp4" region=[(150, 150), (1130, 150), (1130, 570), (150, 570)]
101
101
  """
102
102
  CLI_HELP_MSG = f"""
@@ -125,7 +125,7 @@ CLI_HELP_MSG = f"""
125
125
  yolo streamlit-predict
126
126
 
127
127
  6. Ultralytics solutions usage
128
- yolo solutions count or in {list(SOLUTION_MAP.keys())} source="path/to/video/file.mp4"
128
+ yolo solutions count or in {list(SOLUTION_MAP.keys())[1:-1]} source="path/to/video/file.mp4"
129
129
 
130
130
  7. Run special commands:
131
131
  yolo help
@@ -136,6 +136,7 @@ class Model(nn.Module):
136
136
  # Check if Triton Server model
137
137
  elif self.is_triton_model(model):
138
138
  self.model_name = self.model = model
139
+ self.overrides["task"] = task or "detect" # set `task=detect` if not explicitly set
139
140
  return
140
141
 
141
142
  # Load or create new YOLO model
@@ -155,7 +155,7 @@ class BasePredictor:
155
155
  same_shapes = len({x.shape for x in im}) == 1
156
156
  letterbox = LetterBox(
157
157
  self.imgsz,
158
- auto=same_shapes and (self.model.pt or getattr(self.model, "dynamic", False)),
158
+ auto=same_shapes and (self.model.pt or (getattr(self.model, "dynamic", False) and not self.model.imx)),
159
159
  stride=self.model.stride,
160
160
  )
161
161
  return [letterbox(image=x) for x in im]
@@ -96,7 +96,7 @@ class AutoBackend(nn.Module):
96
96
  Initialize the AutoBackend for inference.
97
97
 
98
98
  Args:
99
- weights (str): Path to the model weights file. Defaults to 'yolov8n.pt'.
99
+ weights (str | torch.nn.Module): Path to the model weights file or a module instance. Defaults to 'yolo11n.pt'.
100
100
  device (torch.device): Device to run the model on. Defaults to CPU.
101
101
  dnn (bool): Use OpenCV DNN module for ONNX inference. Defaults to False.
102
102
  data (str | Path | optional): Path to the additional data.yaml file containing class names. Optional.
@@ -462,6 +462,7 @@ class AutoBackend(nn.Module):
462
462
  from ultralytics.utils.triton import TritonRemoteModel
463
463
 
464
464
  model = TritonRemoteModel(w)
465
+ metadata = model.metadata
465
466
 
466
467
  # Any other format (unsupported)
467
468
  else:
@@ -700,8 +701,7 @@ class AutoBackend(nn.Module):
700
701
  # print(type(x), len(x)) if isinstance(x, (list, tuple)) else print(type(x), x.shape) # debug shapes
701
702
  if isinstance(y, (list, tuple)):
702
703
  if len(self.names) == 999 and (self.task == "segment" or len(y) == 2): # segments and names not defined
703
- ip, ib = (0, 1) if len(y[0].shape) == 4 else (1, 0) # index of protos, boxes
704
- nc = y[ib].shape[1] - y[ip].shape[3] - 4 # y = (1, 160, 160, 32), (1, 116, 8400)
704
+ nc = y[0].shape[1] - y[1].shape[1] - 4 # y = (1, 32, 160, 160), (1, 116, 8400)
705
705
  self.names = {i: f"class{i}" for i in range(nc)}
706
706
  return self.from_numpy(y[0]) if len(y) == 1 else [self.from_numpy(x) for x in y]
707
707
  else:
@@ -27,12 +27,8 @@ class Heatmap(ObjectCounter):
27
27
  Examples:
28
28
  >>> from ultralytics.solutions import Heatmap
29
29
  >>> heatmap = Heatmap(model="yolov8n.pt", colormap=cv2.COLORMAP_JET)
30
- >>> results = heatmap("path/to/video.mp4")
31
- >>> for result in results:
32
- ... print(result.speed) # Print inference speed
33
- ... cv2.imshow("Heatmap", result.plot())
34
- ... if cv2.waitKey(1) & 0xFF == ord("q"):
35
- ... break
30
+ >>> frame = cv2.imread("frame.jpg")
31
+ >>> processed_frame = heatmap.generate_heatmap(frame)
36
32
  """
37
33
 
38
34
  def __init__(self, **kwargs):
@@ -27,10 +27,13 @@ class QueueManager(BaseSolution):
27
27
  display_output: Displays the processed output.
28
28
 
29
29
  Examples:
30
- >>> queue_manager = QueueManager(source="video.mp4", region=[100, 100, 200, 200, 300, 300])
31
- >>> for frame in video_stream:
32
- ... processed_frame = queue_manager.process_queue(frame)
33
- ... cv2.imshow("Queue Management", processed_frame)
30
+ >>> cap = cv2.VideoCapture("Path/to/video/file.mp4")
31
+ >>> queue_manager = QueueManager(region=[100, 100, 200, 200, 300, 300])
32
+ >>> while cap.isOpened():
33
+ >>> success, im0 = cap.read()
34
+ >>> if not success:
35
+ >>> break
36
+ >>> out = queue.process_queue(im0)
34
37
  """
35
38
 
36
39
  def __init__(self, **kwargs):
@@ -13,7 +13,7 @@ try:
13
13
  except (ImportError, AssertionError, AttributeError):
14
14
  from ultralytics.utils.checks import check_requirements
15
15
 
16
- check_requirements("lapx>=0.5.2") # update to lap package from https://github.com/rathaROG/lapx
16
+ check_requirements("lap>=0.5.12") # https://github.com/gatagat/lap
17
17
  import lap
18
18
 
19
19
 
@@ -669,8 +669,22 @@ def check_amp(model):
669
669
  from ultralytics.utils.torch_utils import autocast
670
670
 
671
671
  device = next(model.parameters()).device # get model device
672
+ prefix = colorstr("AMP: ")
672
673
  if device.type in {"cpu", "mps"}:
673
674
  return False # AMP only used on CUDA devices
675
+ else:
676
+ # GPUs that have issues with AMP
677
+ pattern = re.compile(
678
+ r"(nvidia|geforce|quadro|tesla).*?(1660|1650|1630|t400|t550|t600|t1000|t1200|t2000|k40m)", re.IGNORECASE
679
+ )
680
+
681
+ gpu = torch.cuda.get_device_name(device)
682
+ if bool(pattern.search(gpu)):
683
+ LOGGER.warning(
684
+ f"{prefix}checks failed ❌. AMP training on {gpu} GPU may cause "
685
+ f"NaN losses or zero-mAP results, so AMP will be disabled during training."
686
+ )
687
+ return False
674
688
 
675
689
  def amp_allclose(m, im):
676
690
  """All close FP32 vs AMP results."""
@@ -683,7 +697,6 @@ def check_amp(model):
683
697
  return a.shape == b.shape and torch.allclose(a, b.float(), atol=0.5) # close to 0.5 absolute tolerance
684
698
 
685
699
  im = ASSETS / "bus.jpg" # image to check
686
- prefix = colorstr("AMP: ")
687
700
  LOGGER.info(f"{prefix}running Automatic Mixed Precision (AMP) checks...")
688
701
  warning_msg = "Setting 'amp=True'. If you experience zero-mAP or NaN losses you can disable AMP with amp=False."
689
702
  try:
ultralytics/utils/ops.py CHANGED
@@ -400,7 +400,7 @@ def xyxy2xywh(x):
400
400
  y (np.ndarray | torch.Tensor): The bounding box coordinates in (x, y, width, height) format.
401
401
  """
402
402
  assert x.shape[-1] == 4, f"input shape last dimension expected 4 but input shape is {x.shape}"
403
- y = torch.empty_like(x) if isinstance(x, torch.Tensor) else np.empty_like(x) # faster than clone/copy
403
+ y = empty_like(x) # faster than clone/copy
404
404
  y[..., 0] = (x[..., 0] + x[..., 2]) / 2 # x center
405
405
  y[..., 1] = (x[..., 1] + x[..., 3]) / 2 # y center
406
406
  y[..., 2] = x[..., 2] - x[..., 0] # width
@@ -420,7 +420,7 @@ def xywh2xyxy(x):
420
420
  y (np.ndarray | torch.Tensor): The bounding box coordinates in (x1, y1, x2, y2) format.
421
421
  """
422
422
  assert x.shape[-1] == 4, f"input shape last dimension expected 4 but input shape is {x.shape}"
423
- y = torch.empty_like(x) if isinstance(x, torch.Tensor) else np.empty_like(x) # faster than clone/copy
423
+ y = empty_like(x) # faster than clone/copy
424
424
  xy = x[..., :2] # centers
425
425
  wh = x[..., 2:] / 2 # half width-height
426
426
  y[..., :2] = xy - wh # top left xy
@@ -443,7 +443,7 @@ def xywhn2xyxy(x, w=640, h=640, padw=0, padh=0):
443
443
  x1,y1 is the top-left corner, x2,y2 is the bottom-right corner of the bounding box.
444
444
  """
445
445
  assert x.shape[-1] == 4, f"input shape last dimension expected 4 but input shape is {x.shape}"
446
- y = torch.empty_like(x) if isinstance(x, torch.Tensor) else np.empty_like(x) # faster than clone/copy
446
+ y = empty_like(x) # faster than clone/copy
447
447
  y[..., 0] = w * (x[..., 0] - x[..., 2] / 2) + padw # top left x
448
448
  y[..., 1] = h * (x[..., 1] - x[..., 3] / 2) + padh # top left y
449
449
  y[..., 2] = w * (x[..., 0] + x[..., 2] / 2) + padw # bottom right x
@@ -469,7 +469,7 @@ def xyxy2xywhn(x, w=640, h=640, clip=False, eps=0.0):
469
469
  if clip:
470
470
  x = clip_boxes(x, (h - eps, w - eps))
471
471
  assert x.shape[-1] == 4, f"input shape last dimension expected 4 but input shape is {x.shape}"
472
- y = torch.empty_like(x) if isinstance(x, torch.Tensor) else np.empty_like(x) # faster than clone/copy
472
+ y = empty_like(x) # faster than clone/copy
473
473
  y[..., 0] = ((x[..., 0] + x[..., 2]) / 2) / w # x center
474
474
  y[..., 1] = ((x[..., 1] + x[..., 3]) / 2) / h # y center
475
475
  y[..., 2] = (x[..., 2] - x[..., 0]) / w # width
@@ -625,8 +625,9 @@ def resample_segments(segments, n=1000):
625
625
  """
626
626
  for i, s in enumerate(segments):
627
627
  s = np.concatenate((s, s[0:1, :]), axis=0)
628
- x = np.linspace(0, len(s) - 1, n)
628
+ x = np.linspace(0, len(s) - 1, n - len(s) if len(s) < n else n)
629
629
  xp = np.arange(len(s))
630
+ x = np.insert(x, np.searchsorted(x, xp), xp) if len(s) < n else x
630
631
  segments[i] = (
631
632
  np.concatenate([np.interp(x, xp, s[:, i]) for i in range(2)], dtype=np.float32).reshape(2, -1).T
632
633
  ) # segment xy
@@ -837,3 +838,10 @@ def clean_str(s):
837
838
  (str): a string with special characters replaced by an underscore _
838
839
  """
839
840
  return re.sub(pattern="[|@#!¡·$€%&()=?¿^*;:,¨´><+]", repl="_", string=s)
841
+
842
+
843
+ def empty_like(x):
844
+ """Creates empty torch.Tensor or np.ndarray with same shape as input and float32 dtype."""
845
+ return (
846
+ torch.empty_like(x, dtype=torch.float32) if isinstance(x, torch.Tensor) else np.empty_like(x, dtype=np.float32)
847
+ )
@@ -66,6 +66,7 @@ class TritonRemoteModel:
66
66
  self.np_input_formats = [type_map[x] for x in self.input_formats]
67
67
  self.input_names = [x["name"] for x in config["input"]]
68
68
  self.output_names = [x["name"] for x in config["output"]]
69
+ self.metadata = eval(config.get("parameters", {}).get("metadata", {}).get("string_value", "None"))
69
70
 
70
71
  def __call__(self, *inputs: np.ndarray) -> List[np.ndarray]:
71
72
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics
3
- Version: 8.3.40
3
+ Version: 8.3.44
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -7,10 +7,10 @@ tests/test_exports.py,sha256=1MvhcQ2qHdbJImHII-bFarcaIcm-kPlEK-OdFLxnj7o,8769
7
7
  tests/test_integrations.py,sha256=f5-QCUk1SU_-qn4mBCZwS3GN3tXEBIIXo4z2EhExbHw,6126
8
8
  tests/test_python.py,sha256=I1RRdCwLdrc3jX06huVxct8HX8ccQOmQgVpuEflRl0U,23560
9
9
  tests/test_solutions.py,sha256=HlDe-XOgBX0k1cLhRTAhhawMHk6p-5dg5xl2AIRjfdk,3790
10
- ultralytics/__init__.py,sha256=g043TDkiEqdLx6EEsZlBx5SW4RgiiHq5CUtD78wMHIo,681
10
+ ultralytics/__init__.py,sha256=prdKsbiqsMF5NpkMTyJcn1GHRLdsZknBHt6DcMrYPSY,681
11
11
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
12
12
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
13
- ultralytics/cfg/__init__.py,sha256=LgTvW_Rd_phZoLzC8p5UEh8o7pIjx9xc67I91Xh5llY,38910
13
+ ultralytics/cfg/__init__.py,sha256=etGrRb8t9r6R-n-00qFAmOZHXNriXEUe0zvEzCPi5oc,38921
14
14
  ultralytics/cfg/default.yaml,sha256=FcXbvTXXvMpssk9fSwdlnVTtyqfmlYE9gAcHsf0OMf8,8347
15
15
  ultralytics/cfg/datasets/Argoverse.yaml,sha256=FyeuJT5CHq_9d4hlfAf0kpZlnbUMO0S--UJ1yIqcdKk,3134
16
16
  ultralytics/cfg/datasets/DOTAv1.5.yaml,sha256=QVfp_Qp-4rukuicaB4qx86NxSHM8Mrzym8l_fIDo8gw,1195
@@ -101,8 +101,8 @@ ultralytics/data/split_dota.py,sha256=eFafJ7Vg52wj6KDCHFJAf1tKzyPD5YaPB8kM4VX5Ae
101
101
  ultralytics/data/utils.py,sha256=bmWEIrdogj4kssZQSJdSbIF8QsJU00lo-EY-Mgcqv4M,31073
102
102
  ultralytics/engine/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
103
103
  ultralytics/engine/exporter.py,sha256=137idYe5ct3KuJBpjjjNRUAb6Gx0PeETKm21GZm43Nk,66972
104
- ultralytics/engine/model.py,sha256=SDlZw6yvbNWHzbPN5VjJYx6qM1v1iZHVKAoa-PgJ8ig,53010
105
- ultralytics/engine/predictor.py,sha256=nO6lzxG75GXyQsUNEimLk5MLfcMwl8AkRAaoYMPwQug,17687
104
+ ultralytics/engine/model.py,sha256=-_vG3fyXbTpaftVktFU7A8lSd7pgc9lDMIjZSu6wI0E,53107
105
+ ultralytics/engine/predictor.py,sha256=o1RYMFH3_uVOMCIXXakpRYpNzoD-6Bdsxryt5fuBni0,17712
106
106
  ultralytics/engine/results.py,sha256=a1XFZRPwqgKDBOEAibHuT9nP2xefLiWVsMoBJbcr4iA,75058
107
107
  ultralytics/engine/trainer.py,sha256=Cd95QLJ3C4fncoOX1YgauLA9aWVYRd1G6x0Au2xX86k,37335
108
108
  ultralytics/engine/tuner.py,sha256=WBj8iw1K1TK0hvanlA-wkwmfqh1SI8jEe2dGwUINeTg,11838
@@ -170,7 +170,7 @@ ultralytics/models/yolo/world/__init__.py,sha256=3VTH0q4NOt2EWRom15yCymvmvm0Etp2
170
170
  ultralytics/models/yolo/world/train.py,sha256=gaDrAmLJpg9qDtmL5evA5HsV2yb4RTRSfk2EDYrHdRg,3686
171
171
  ultralytics/models/yolo/world/train_world.py,sha256=IsnCEVt6DcM9lUskCKmIN-M8MM79xLpwTRqRoAHUnZ4,4857
172
172
  ultralytics/nn/__init__.py,sha256=4BPLHY89xEM_al5uK0aOmFgiML6CMGEZbezxOvTjOEs,587
173
- ultralytics/nn/autobackend.py,sha256=Arke5BaRQmr4yQd-xr6Z8P7kbTBNLI-O0fsDPFLOXMw,35625
173
+ ultralytics/nn/autobackend.py,sha256=kzJW9i6imwsB6YQ6q3p_mExeb4fHqVQamtuXOEgWZBc,35606
174
174
  ultralytics/nn/tasks.py,sha256=pqRe1F1HOH8AjLZpFaZCGb5gSYsXH0eVnHITKDTFFhI,48527
175
175
  ultralytics/nn/modules/__init__.py,sha256=xhW2BennT9U_VaMXVpRu-bdLgp1BXt9L8mkIUBE3idU,2625
176
176
  ultralytics/nn/modules/activation.py,sha256=chhn469wnRHEs5BMGNBYXwPYZc_7-urspTT8fnBd-xA,895
@@ -183,10 +183,10 @@ ultralytics/solutions/__init__.py,sha256=lpTOauaJf7dFlymZB9lHiH_feDlS8Vlrp4TC7Gu
183
183
  ultralytics/solutions/ai_gym.py,sha256=Jv8ERJqcSjQeFh78zCAH2XnXoTIngCK7X_7XOQ6cPzs,5255
184
184
  ultralytics/solutions/analytics.py,sha256=C57pIghXeKN8hul8QOV7W9YDMpfFfSfPTBb-lE9HeAc,11535
185
185
  ultralytics/solutions/distance_calculation.py,sha256=KN3CC-dm2dTQylj79IrifCJT8ZhE7hc2EweH3KK31mE,5461
186
- ultralytics/solutions/heatmap.py,sha256=-1VtMCJRmpHnLqgna0i2HOBsxNoqFernzpKQnICngUM,5449
186
+ ultralytics/solutions/heatmap.py,sha256=JkqwYAkIIDOj4HL5fLmcxQO0yix6-X8tAceXON6-Yg0,5275
187
187
  ultralytics/solutions/object_counter.py,sha256=MuxQG4a22458WwciAB96m5AxVXwH98AIWAaf_kPali4,9613
188
188
  ultralytics/solutions/parking_management.py,sha256=Hh28FTuP_TaO7x5RadYm-JSVJuEu1M2SSgHqgdYYtr8,11198
189
- ultralytics/solutions/queue_management.py,sha256=D9TqwJSVrZQFxp_M8O62WfBAxkAuDWWnXe7FFmnp7_w,4881
189
+ ultralytics/solutions/queue_management.py,sha256=lIHBgdMSKmGGPrICY2HC01_Ofad-vu4AnaGAqH-DxMs,4931
190
190
  ultralytics/solutions/region_counter.py,sha256=w0c0Sz9XG6rwzr5nA6nb1zFW8IVkTQuatfZNBtOik68,4947
191
191
  ultralytics/solutions/solutions.py,sha256=BqkMDAq9A8kqL4TkjHLkMYXrJAdZPK-VAdNSObS1kNQ,7502
192
192
  ultralytics/solutions/speed_estimation.py,sha256=A10DmuZlGkoZUyfHhZWcDRjj1-9GXiDhEjyBbAzfaDs,4936
@@ -200,11 +200,11 @@ ultralytics/trackers/track.py,sha256=BfkdmdgTvoI8Raz6yuDQMrbCrWOGm9Lfu3aBTXYv2j8
200
200
  ultralytics/trackers/utils/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
201
201
  ultralytics/trackers/utils/gmc.py,sha256=VcURuY041qGCeWUGMxHZBr10T16LtcMqyv7AmTfE1MY,14557
202
202
  ultralytics/trackers/utils/kalman_filter.py,sha256=cH9zD3fwkuezP97H9mw8cSBN7a8hHKx_Sx1j7t3oYGs,21349
203
- ultralytics/trackers/utils/matching.py,sha256=YCC9O1iwrWKD5k6WryklrttoQZcdUquSQi5cTWmp4I4,7107
203
+ ultralytics/trackers/utils/matching.py,sha256=Y94cMwo9TLd-IWFqHKp8dHSDyguS1qtOeebBMalWnJQ,7078
204
204
  ultralytics/utils/__init__.py,sha256=_KUqXbKcFgN11_ZLGrpQuPNOdSbIGhuv_IBGUPw9jX0,49203
205
205
  ultralytics/utils/autobatch.py,sha256=nt0nSNNhrQqvtaxeNBBYpU2OkZnI3ihNEAa3jF4pybo,4594
206
206
  ultralytics/utils/benchmarks.py,sha256=Ub--iTq2hL_oHkG2R3HXmZXQ6qcBC-P9MabUv60bMLE,25625
207
- ultralytics/utils/checks.py,sha256=KXQSeauhzecy9tSjyDVy8oXbTDkHSSB9lOTYrqRWpok,29582
207
+ ultralytics/utils/checks.py,sha256=BZdD2JVpMvHJLVbgl048kBgVqk3LpHARb_BE5oOwYK8,30120
208
208
  ultralytics/utils/dist.py,sha256=NDFga-uKxkBX2zLxFHSene_cCiGQJoyOeCXcN9JIOIk,2358
209
209
  ultralytics/utils/downloads.py,sha256=fh7I5toTSowAOXtmx5zIzCEDREfTFG45cLIHmsDmuYw,21974
210
210
  ultralytics/utils/errors.py,sha256=GqP_Jgj_n0paxn8OMhn3DTCgoNkB2WjUcUaqs-M6SQk,816
@@ -212,12 +212,12 @@ ultralytics/utils/files.py,sha256=uiXQSVABJRoI5ImnM6ndEBIFbECfksmWNEldBg8GnSo,82
212
212
  ultralytics/utils/instance.py,sha256=EnLp3hCihG5-32eGSMmjzspbxZsDvbqEOs-X0kcvxwQ,16252
213
213
  ultralytics/utils/loss.py,sha256=jUCiUcxgF6jGxGdvIcupeMidLoF-gI7s1tcJoQCZbnk,34113
214
214
  ultralytics/utils/metrics.py,sha256=toJlyA0W-xtChqAtIDiHISolxc_30NP33ezxWQ1rnPc,53804
215
- ultralytics/utils/ops.py,sha256=ojw9AT7HI1_SgmYIFWrFM7QTs7zvf0QPsSLrMgAq2uI,33051
215
+ ultralytics/utils/ops.py,sha256=32Vg2cDwdqcMyb3XT9RGS2-YinIDRiE1-iS7H_0wssE,33174
216
216
  ultralytics/utils/patches.py,sha256=J-iOwIRbfUs-inBZerhnXby5tUKjYcOIyvhLTS352JE,3270
217
217
  ultralytics/utils/plotting.py,sha256=GmBkN7e1skJK2cZ2hzKBXQCb1gayWTrA9TLHw0q07UM,62948
218
218
  ultralytics/utils/tal.py,sha256=thD_AEhVmhaZqmS5szZMvpKO-RKOeZwfX1BYAhdnA0o,18470
219
219
  ultralytics/utils/torch_utils.py,sha256=ddWR82FkxSiFQqr_uzqxQvir-RACvCxsQbqphKSFTok,32084
220
- ultralytics/utils/triton.py,sha256=gg1finxno_tY2Ge9PMhmu7PI9wvoFZoiicdT4Bhqv3w,3936
220
+ ultralytics/utils/triton.py,sha256=HL_gjIwMoi-WD8gJLTmemBehIto8eRz3HdK8fcROLk0,4043
221
221
  ultralytics/utils/tuner.py,sha256=K09-z5k1E4ZriSKoWdwQrJ2PJ2fY1ez3-b2R6aKPTqM,6198
222
222
  ultralytics/utils/callbacks/__init__.py,sha256=YrWqC3BVVaTLob4iCPR6I36mUxIUOpPJW7B_LjT78Qw,214
223
223
  ultralytics/utils/callbacks/base.py,sha256=PHjQ6RITwC2dylCQTB0bdPgAsHjxVeuDb5N1NPTbHGc,5775
@@ -230,9 +230,9 @@ ultralytics/utils/callbacks/neptune.py,sha256=IbGQfEltamUKXJt93uSLQFn8c2rYh3DMTg
230
230
  ultralytics/utils/callbacks/raytune.py,sha256=Ck_yFzg7UZXiDWrLHaltjQybzVWSFDfzpdrx9ZYTRfI,700
231
231
  ultralytics/utils/callbacks/tensorboard.py,sha256=SHlE58Fb-sg-uZKtgy-ybIO3SAIfK55aj8kTYGA0Cyg,4167
232
232
  ultralytics/utils/callbacks/wb.py,sha256=sizfTa-xI9k2pnDSP_Q9pHZEFwcl__gSFM0AcneuRpY,7058
233
- ultralytics-8.3.40.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
234
- ultralytics-8.3.40.dist-info/METADATA,sha256=765LKLYZ8BHcGLWpKO5pQFPka_hilm8fl_96W_xvp2c,35332
235
- ultralytics-8.3.40.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
236
- ultralytics-8.3.40.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
237
- ultralytics-8.3.40.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
238
- ultralytics-8.3.40.dist-info/RECORD,,
233
+ ultralytics-8.3.44.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
234
+ ultralytics-8.3.44.dist-info/METADATA,sha256=EnGf_PYz7_95wT2dFkJ00pfuyQSgs9NDbEc9SJu-0Iw,35332
235
+ ultralytics-8.3.44.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
236
+ ultralytics-8.3.44.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
237
+ ultralytics-8.3.44.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
238
+ ultralytics-8.3.44.dist-info/RECORD,,