ultralytics 8.3.43__py3-none-any.whl → 8.3.47__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
2
 
3
- __version__ = "8.3.43"
3
+ __version__ = "8.3.47"
4
4
 
5
5
  import os
6
6
 
@@ -73,7 +73,7 @@ from ultralytics.data import build_dataloader
73
73
  from ultralytics.data.dataset import YOLODataset
74
74
  from ultralytics.data.utils import check_cls_dataset, check_det_dataset
75
75
  from ultralytics.nn.autobackend import check_class_names, default_class_names
76
- from ultralytics.nn.modules import C2f, Detect, RTDETRDecoder
76
+ from ultralytics.nn.modules import C2f, Classify, Detect, RTDETRDecoder
77
77
  from ultralytics.nn.tasks import DetectionModel, SegmentationModel, WorldModel
78
78
  from ultralytics.utils import (
79
79
  ARM64,
@@ -287,6 +287,8 @@ class Exporter:
287
287
 
288
288
  model = FXModel(model)
289
289
  for m in model.modules():
290
+ if isinstance(m, Classify):
291
+ m.export = True
290
292
  if isinstance(m, (Detect, RTDETRDecoder)): # includes all Detect subclasses like Segment, Pose, OBB
291
293
  m.dynamic = self.args.dynamic
292
294
  m.export = True
@@ -136,6 +136,7 @@ class Model(nn.Module):
136
136
  # Check if Triton Server model
137
137
  elif self.is_triton_model(model):
138
138
  self.model_name = self.model = model
139
+ self.overrides["task"] = task or "detect" # set `task=detect` if not explicitly set
139
140
  return
140
141
 
141
142
  # Load or create new YOLO model
@@ -155,7 +155,7 @@ class BasePredictor:
155
155
  same_shapes = len({x.shape for x in im}) == 1
156
156
  letterbox = LetterBox(
157
157
  self.imgsz,
158
- auto=same_shapes and (self.model.pt or getattr(self.model, "dynamic", False)),
158
+ auto=same_shapes and (self.model.pt or (getattr(self.model, "dynamic", False) and not self.model.imx)),
159
159
  stride=self.model.stride,
160
160
  )
161
161
  return [letterbox(image=x) for x in im]
@@ -1105,7 +1105,7 @@ class SAM2VideoPredictor(SAM2Predictor):
1105
1105
  for obj_temp_output_dict in temp_output_dict_per_obj.values():
1106
1106
  temp_frame_inds.update(obj_temp_output_dict[storage_key].keys())
1107
1107
  consolidated_frame_inds[storage_key].update(temp_frame_inds)
1108
- # consolidate the temprary output across all objects on this frame
1108
+ # consolidate the temporary output across all objects on this frame
1109
1109
  for frame_idx in temp_frame_inds:
1110
1110
  consolidated_out = self._consolidate_temp_output_across_obj(
1111
1111
  frame_idx, is_cond=is_cond, run_mem_encoder=True
@@ -53,7 +53,8 @@ class ClassificationPredictor(BasePredictor):
53
53
  if not isinstance(orig_imgs, list): # input images are a torch.Tensor, not a list
54
54
  orig_imgs = ops.convert_torch2numpy_batch(orig_imgs)
55
55
 
56
+ preds = preds[0] if isinstance(preds, (list, tuple)) else preds
56
57
  return [
57
- Results(orig_img, path=img_path, names=self.model.names, probs=pred.softmax(0))
58
+ Results(orig_img, path=img_path, names=self.model.names, probs=pred)
58
59
  for pred, orig_img, img_path in zip(preds, orig_imgs, self.batch[0])
59
60
  ]
@@ -71,6 +71,10 @@ class ClassificationValidator(BaseValidator):
71
71
  self.metrics.confusion_matrix = self.confusion_matrix
72
72
  self.metrics.save_dir = self.save_dir
73
73
 
74
+ def postprocess(self, preds):
75
+ """Preprocesses the classification predictions."""
76
+ return preds[0] if isinstance(preds, (list, tuple)) else preds
77
+
74
78
  def get_stats(self):
75
79
  """Returns a dictionary of metrics obtained by processing targets and predictions."""
76
80
  self.metrics.process(self.targets, self.pred)
@@ -96,7 +96,7 @@ class AutoBackend(nn.Module):
96
96
  Initialize the AutoBackend for inference.
97
97
 
98
98
  Args:
99
- weights (str): Path to the model weights file. Defaults to 'yolov8n.pt'.
99
+ weights (str | torch.nn.Module): Path to the model weights file or a module instance. Defaults to 'yolo11n.pt'.
100
100
  device (torch.device): Device to run the model on. Defaults to CPU.
101
101
  dnn (bool): Use OpenCV DNN module for ONNX inference. Defaults to False.
102
102
  data (str | Path | optional): Path to the additional data.yaml file containing class names. Optional.
@@ -462,6 +462,7 @@ class AutoBackend(nn.Module):
462
462
  from ultralytics.utils.triton import TritonRemoteModel
463
463
 
464
464
  model = TritonRemoteModel(w)
465
+ metadata = model.metadata
465
466
 
466
467
  # Any other format (unsupported)
467
468
  else:
@@ -282,6 +282,8 @@ class Pose(Detect):
282
282
  class Classify(nn.Module):
283
283
  """YOLO classification head, i.e. x(b,c1,20,20) to x(b,c2)."""
284
284
 
285
+ export = False # export mode
286
+
285
287
  def __init__(self, c1, c2, k=1, s=1, p=None, g=1):
286
288
  """Initializes YOLO classification head to transform input tensor from (b,c1,20,20) to (b,c2) shape."""
287
289
  super().__init__()
@@ -296,7 +298,10 @@ class Classify(nn.Module):
296
298
  if isinstance(x, list):
297
299
  x = torch.cat(x, 1)
298
300
  x = self.linear(self.drop(self.pool(self.conv(x)).flatten(1)))
299
- return x
301
+ if self.training:
302
+ return x
303
+ y = x.softmax(1) # get final output
304
+ return y if self.export else (y, x)
300
305
 
301
306
 
302
307
  class WorldDetect(Detect):
@@ -13,7 +13,7 @@ try:
13
13
  except (ImportError, AssertionError, AttributeError):
14
14
  from ultralytics.utils.checks import check_requirements
15
15
 
16
- check_requirements("lapx>=0.5.2") # update to lap package from https://github.com/rathaROG/lapx
16
+ check_requirements("lap>=0.5.12") # https://github.com/gatagat/lap
17
17
  import lap
18
18
 
19
19
 
@@ -669,8 +669,22 @@ def check_amp(model):
669
669
  from ultralytics.utils.torch_utils import autocast
670
670
 
671
671
  device = next(model.parameters()).device # get model device
672
+ prefix = colorstr("AMP: ")
672
673
  if device.type in {"cpu", "mps"}:
673
674
  return False # AMP only used on CUDA devices
675
+ else:
676
+ # GPUs that have issues with AMP
677
+ pattern = re.compile(
678
+ r"(nvidia|geforce|quadro|tesla).*?(1660|1650|1630|t400|t550|t600|t1000|t1200|t2000|k40m)", re.IGNORECASE
679
+ )
680
+
681
+ gpu = torch.cuda.get_device_name(device)
682
+ if bool(pattern.search(gpu)):
683
+ LOGGER.warning(
684
+ f"{prefix}checks failed ❌. AMP training on {gpu} GPU may cause "
685
+ f"NaN losses or zero-mAP results, so AMP will be disabled during training."
686
+ )
687
+ return False
674
688
 
675
689
  def amp_allclose(m, im):
676
690
  """All close FP32 vs AMP results."""
@@ -683,7 +697,6 @@ def check_amp(model):
683
697
  return a.shape == b.shape and torch.allclose(a, b.float(), atol=0.5) # close to 0.5 absolute tolerance
684
698
 
685
699
  im = ASSETS / "bus.jpg" # image to check
686
- prefix = colorstr("AMP: ")
687
700
  LOGGER.info(f"{prefix}running Automatic Mixed Precision (AMP) checks...")
688
701
  warning_msg = "Setting 'amp=True'. If you experience zero-mAP or NaN losses you can disable AMP with amp=False."
689
702
  try:
ultralytics/utils/loss.py CHANGED
@@ -604,6 +604,7 @@ class v8ClassificationLoss:
604
604
 
605
605
  def __call__(self, preds, batch):
606
606
  """Compute the classification loss between predictions and true labels."""
607
+ preds = preds[1] if isinstance(preds, (list, tuple)) else preds
607
608
  loss = F.cross_entropy(preds, batch["cls"], reduction="mean")
608
609
  loss_items = loss.detach()
609
610
  return loss, loss_items
ultralytics/utils/ops.py CHANGED
@@ -400,7 +400,7 @@ def xyxy2xywh(x):
400
400
  y (np.ndarray | torch.Tensor): The bounding box coordinates in (x, y, width, height) format.
401
401
  """
402
402
  assert x.shape[-1] == 4, f"input shape last dimension expected 4 but input shape is {x.shape}"
403
- y = torch.empty_like(x) if isinstance(x, torch.Tensor) else np.empty_like(x) # faster than clone/copy
403
+ y = empty_like(x) # faster than clone/copy
404
404
  y[..., 0] = (x[..., 0] + x[..., 2]) / 2 # x center
405
405
  y[..., 1] = (x[..., 1] + x[..., 3]) / 2 # y center
406
406
  y[..., 2] = x[..., 2] - x[..., 0] # width
@@ -420,7 +420,7 @@ def xywh2xyxy(x):
420
420
  y (np.ndarray | torch.Tensor): The bounding box coordinates in (x1, y1, x2, y2) format.
421
421
  """
422
422
  assert x.shape[-1] == 4, f"input shape last dimension expected 4 but input shape is {x.shape}"
423
- y = torch.empty_like(x) if isinstance(x, torch.Tensor) else np.empty_like(x) # faster than clone/copy
423
+ y = empty_like(x) # faster than clone/copy
424
424
  xy = x[..., :2] # centers
425
425
  wh = x[..., 2:] / 2 # half width-height
426
426
  y[..., :2] = xy - wh # top left xy
@@ -443,7 +443,7 @@ def xywhn2xyxy(x, w=640, h=640, padw=0, padh=0):
443
443
  x1,y1 is the top-left corner, x2,y2 is the bottom-right corner of the bounding box.
444
444
  """
445
445
  assert x.shape[-1] == 4, f"input shape last dimension expected 4 but input shape is {x.shape}"
446
- y = torch.empty_like(x) if isinstance(x, torch.Tensor) else np.empty_like(x) # faster than clone/copy
446
+ y = empty_like(x) # faster than clone/copy
447
447
  y[..., 0] = w * (x[..., 0] - x[..., 2] / 2) + padw # top left x
448
448
  y[..., 1] = h * (x[..., 1] - x[..., 3] / 2) + padh # top left y
449
449
  y[..., 2] = w * (x[..., 0] + x[..., 2] / 2) + padw # bottom right x
@@ -469,7 +469,7 @@ def xyxy2xywhn(x, w=640, h=640, clip=False, eps=0.0):
469
469
  if clip:
470
470
  x = clip_boxes(x, (h - eps, w - eps))
471
471
  assert x.shape[-1] == 4, f"input shape last dimension expected 4 but input shape is {x.shape}"
472
- y = torch.empty_like(x) if isinstance(x, torch.Tensor) else np.empty_like(x, dtype=float) # faster than clone/copy
472
+ y = empty_like(x) # faster than clone/copy
473
473
  y[..., 0] = ((x[..., 0] + x[..., 2]) / 2) / w # x center
474
474
  y[..., 1] = ((x[..., 1] + x[..., 3]) / 2) / h # y center
475
475
  y[..., 2] = (x[..., 2] - x[..., 0]) / w # width
@@ -625,8 +625,9 @@ def resample_segments(segments, n=1000):
625
625
  """
626
626
  for i, s in enumerate(segments):
627
627
  s = np.concatenate((s, s[0:1, :]), axis=0)
628
- x = np.linspace(0, len(s) - 1, n)
628
+ x = np.linspace(0, len(s) - 1, n - len(s) if len(s) < n else n)
629
629
  xp = np.arange(len(s))
630
+ x = np.insert(x, np.searchsorted(x, xp), xp) if len(s) < n else x
630
631
  segments[i] = (
631
632
  np.concatenate([np.interp(x, xp, s[:, i]) for i in range(2)], dtype=np.float32).reshape(2, -1).T
632
633
  ) # segment xy
@@ -837,3 +838,10 @@ def clean_str(s):
837
838
  (str): a string with special characters replaced by an underscore _
838
839
  """
839
840
  return re.sub(pattern="[|@#!¡·$€%&()=?¿^*;:,¨´><+]", repl="_", string=s)
841
+
842
+
843
+ def empty_like(x):
844
+ """Creates empty torch.Tensor or np.ndarray with same shape as input and float32 dtype."""
845
+ return (
846
+ torch.empty_like(x, dtype=torch.float32) if isinstance(x, torch.Tensor) else np.empty_like(x, dtype=np.float32)
847
+ )
@@ -66,6 +66,7 @@ class TritonRemoteModel:
66
66
  self.np_input_formats = [type_map[x] for x in self.input_formats]
67
67
  self.input_names = [x["name"] for x in config["input"]]
68
68
  self.output_names = [x["name"] for x in config["output"]]
69
+ self.metadata = eval(config.get("parameters", {}).get("metadata", {}).get("string_value", "None"))
69
70
 
70
71
  def __call__(self, *inputs: np.ndarray) -> List[np.ndarray]:
71
72
  """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics
3
- Version: 8.3.43
3
+ Version: 8.3.47
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -7,7 +7,7 @@ tests/test_exports.py,sha256=1MvhcQ2qHdbJImHII-bFarcaIcm-kPlEK-OdFLxnj7o,8769
7
7
  tests/test_integrations.py,sha256=f5-QCUk1SU_-qn4mBCZwS3GN3tXEBIIXo4z2EhExbHw,6126
8
8
  tests/test_python.py,sha256=I1RRdCwLdrc3jX06huVxct8HX8ccQOmQgVpuEflRl0U,23560
9
9
  tests/test_solutions.py,sha256=HlDe-XOgBX0k1cLhRTAhhawMHk6p-5dg5xl2AIRjfdk,3790
10
- ultralytics/__init__.py,sha256=n8Bs8eFAkYMki3zCDHydZ4q5Lvk-LTwKojrDVGNeRSE,681
10
+ ultralytics/__init__.py,sha256=6x0xcQ6F-4K3YpE4bbVt-q0jvqZx_FNT_r-_VWPZ614,681
11
11
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
12
12
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
13
13
  ultralytics/cfg/__init__.py,sha256=etGrRb8t9r6R-n-00qFAmOZHXNriXEUe0zvEzCPi5oc,38921
@@ -100,9 +100,9 @@ ultralytics/data/loaders.py,sha256=k1Vq7Rxv6tpsRsYuMdZeI3_f2BciAaZwhDQU8iHhVJM,2
100
100
  ultralytics/data/split_dota.py,sha256=eFafJ7Vg52wj6KDCHFJAf1tKzyPD5YaPB8kM4VX5Aeg,10688
101
101
  ultralytics/data/utils.py,sha256=bmWEIrdogj4kssZQSJdSbIF8QsJU00lo-EY-Mgcqv4M,31073
102
102
  ultralytics/engine/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
103
- ultralytics/engine/exporter.py,sha256=137idYe5ct3KuJBpjjjNRUAb6Gx0PeETKm21GZm43Nk,66972
104
- ultralytics/engine/model.py,sha256=SDlZw6yvbNWHzbPN5VjJYx6qM1v1iZHVKAoa-PgJ8ig,53010
105
- ultralytics/engine/predictor.py,sha256=nO6lzxG75GXyQsUNEimLk5MLfcMwl8AkRAaoYMPwQug,17687
103
+ ultralytics/engine/exporter.py,sha256=EMrdP8Ra5VnmNEUjustWOvgrH1C4vgwz0L0frjZRSJY,67054
104
+ ultralytics/engine/model.py,sha256=-_vG3fyXbTpaftVktFU7A8lSd7pgc9lDMIjZSu6wI0E,53107
105
+ ultralytics/engine/predictor.py,sha256=o1RYMFH3_uVOMCIXXakpRYpNzoD-6Bdsxryt5fuBni0,17712
106
106
  ultralytics/engine/results.py,sha256=a1XFZRPwqgKDBOEAibHuT9nP2xefLiWVsMoBJbcr4iA,75058
107
107
  ultralytics/engine/trainer.py,sha256=Cd95QLJ3C4fncoOX1YgauLA9aWVYRd1G6x0Au2xX86k,37335
108
108
  ultralytics/engine/tuner.py,sha256=WBj8iw1K1TK0hvanlA-wkwmfqh1SI8jEe2dGwUINeTg,11838
@@ -131,7 +131,7 @@ ultralytics/models/sam/__init__.py,sha256=E4IHie-T0HYCklKW6-kqlW84GJJdD6rujf7W_S
131
131
  ultralytics/models/sam/amg.py,sha256=GrmO_8YfIDt_QkPEMF_WFjPZkhwhf7iwx7ig8JgOUnE,8709
132
132
  ultralytics/models/sam/build.py,sha256=ac7Pop5f51TVzGgfV6bbXSFDA9fBVxERUc_6WDQ-9Ys,12487
133
133
  ultralytics/models/sam/model.py,sha256=CE4ruw1Iwrp7-9aHGspQihQaTVsqagYrQLWmpXYodLw,7382
134
- ultralytics/models/sam/predict.py,sha256=fv9s1kYx8Er2ZsaMpmiB9Phz5l0mGdjCyqQpmM2CpcE,82535
134
+ ultralytics/models/sam/predict.py,sha256=0BliE-_Khbj6wDT0-AG6WaN9TDfBfEHdJrGMBH9PY_Y,82536
135
135
  ultralytics/models/sam/modules/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
136
136
  ultralytics/models/sam/modules/blocks.py,sha256=Q-KwhFbdyZhl1tjG_kP2LcQkZbzoNt618i-NRrKNx2Y,45919
137
137
  ultralytics/models/sam/modules/decoders.py,sha256=mODsqnTN_CjE3H0Sh9cd8PfTnHANPjGB1bjqHxfezSg,25830
@@ -147,9 +147,9 @@ ultralytics/models/utils/ops.py,sha256=aPAPwWMLJLWq-I04wS_YrqJ_Vy_xBXtqQu6Aox15Y
147
147
  ultralytics/models/yolo/__init__.py,sha256=e1cZr9pbSbf3Ya2OvkTjGRwD_E2YZpe610xskBM8gEk,247
148
148
  ultralytics/models/yolo/model.py,sha256=E4TuJZZux0L_SG7sC0SDgxrmeBvuZRpxprPrCC26lvs,4233
149
149
  ultralytics/models/yolo/classify/__init__.py,sha256=t-4pUHmgI2gjhc-l3bqNEcEtKD1dO40nD4Vc6Y2xD6o,355
150
- ultralytics/models/yolo/classify/predict.py,sha256=ungApAXm_KkLMMlz4MQpmL5IFzAKX69wLYHSliSR7VA,2455
150
+ ultralytics/models/yolo/classify/predict.py,sha256=sCBcCscSasy1bSf03gvuAVYhBtGSO0i9Qr_-956LFMU,2516
151
151
  ultralytics/models/yolo/classify/train.py,sha256=3aYzLDqX_03xR1xqlTn1TxA4t58cCIGI8RCtWheTrm0,6273
152
- ultralytics/models/yolo/classify/val.py,sha256=Tzizhp3ebzPvwJejrE8tb-TuXw4MdkEI9mOANV74eXQ,4909
152
+ ultralytics/models/yolo/classify/val.py,sha256=YEmgxOLhSsUdQXAV9CetxX8cgVaqaxKZyUiZaX14_4Q,5074
153
153
  ultralytics/models/yolo/detect/__init__.py,sha256=JR8gZJWn7wMBbh-0j_073nxJVZTMFZVWTOG5Wnvk6w0,229
154
154
  ultralytics/models/yolo/detect/predict.py,sha256=-uZFLutxGYZX47RANcaxC-LFStRbv0nBv_8-ypadQoI,1471
155
155
  ultralytics/models/yolo/detect/train.py,sha256=LKCcQTAsXm3-TPK2zkE1YJhbAcS65qhY2-MSlj-kB4w,6710
@@ -170,13 +170,13 @@ ultralytics/models/yolo/world/__init__.py,sha256=3VTH0q4NOt2EWRom15yCymvmvm0Etp2
170
170
  ultralytics/models/yolo/world/train.py,sha256=gaDrAmLJpg9qDtmL5evA5HsV2yb4RTRSfk2EDYrHdRg,3686
171
171
  ultralytics/models/yolo/world/train_world.py,sha256=IsnCEVt6DcM9lUskCKmIN-M8MM79xLpwTRqRoAHUnZ4,4857
172
172
  ultralytics/nn/__init__.py,sha256=4BPLHY89xEM_al5uK0aOmFgiML6CMGEZbezxOvTjOEs,587
173
- ultralytics/nn/autobackend.py,sha256=x1TgOEELZ0Qc01QFgXSMlZ-JqTHFhFRF6v2mGaGg_a0,35529
173
+ ultralytics/nn/autobackend.py,sha256=kzJW9i6imwsB6YQ6q3p_mExeb4fHqVQamtuXOEgWZBc,35606
174
174
  ultralytics/nn/tasks.py,sha256=pqRe1F1HOH8AjLZpFaZCGb5gSYsXH0eVnHITKDTFFhI,48527
175
175
  ultralytics/nn/modules/__init__.py,sha256=xhW2BennT9U_VaMXVpRu-bdLgp1BXt9L8mkIUBE3idU,2625
176
176
  ultralytics/nn/modules/activation.py,sha256=chhn469wnRHEs5BMGNBYXwPYZc_7-urspTT8fnBd-xA,895
177
177
  ultralytics/nn/modules/block.py,sha256=Rk9CT23Bpqpo3LYRuQePYML6HAvsM20p2QlFTCaYFH4,41851
178
178
  ultralytics/nn/modules/conv.py,sha256=DPLZCRno_ZOjsuajAXIq-GbJdOh2jp1WayRXfDEd8z8,12724
179
- ultralytics/nn/modules/head.py,sha256=Bg_WXtvO004fAKF7qExFreywWFrgQoc5Tc3fA9KVoL4,27780
179
+ ultralytics/nn/modules/head.py,sha256=yZdDr71pWm-vB18XrNkbX35o3q4o4mhzrfJz6yVh9m4,27934
180
180
  ultralytics/nn/modules/transformer.py,sha256=tGiK8NmPfswwW1rbF21r5ILUkkZQ6Nk4s8j16vFBmps,18069
181
181
  ultralytics/nn/modules/utils.py,sha256=a88cKl2wz1nMVSEBiajtvaCbDBQIkESWOKTZ_WAJy90,3195
182
182
  ultralytics/solutions/__init__.py,sha256=lpTOauaJf7dFlymZB9lHiH_feDlS8Vlrp4TC7GuM8SU,761
@@ -200,24 +200,24 @@ ultralytics/trackers/track.py,sha256=BfkdmdgTvoI8Raz6yuDQMrbCrWOGm9Lfu3aBTXYv2j8
200
200
  ultralytics/trackers/utils/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
201
201
  ultralytics/trackers/utils/gmc.py,sha256=VcURuY041qGCeWUGMxHZBr10T16LtcMqyv7AmTfE1MY,14557
202
202
  ultralytics/trackers/utils/kalman_filter.py,sha256=cH9zD3fwkuezP97H9mw8cSBN7a8hHKx_Sx1j7t3oYGs,21349
203
- ultralytics/trackers/utils/matching.py,sha256=YCC9O1iwrWKD5k6WryklrttoQZcdUquSQi5cTWmp4I4,7107
203
+ ultralytics/trackers/utils/matching.py,sha256=Y94cMwo9TLd-IWFqHKp8dHSDyguS1qtOeebBMalWnJQ,7078
204
204
  ultralytics/utils/__init__.py,sha256=_KUqXbKcFgN11_ZLGrpQuPNOdSbIGhuv_IBGUPw9jX0,49203
205
205
  ultralytics/utils/autobatch.py,sha256=nt0nSNNhrQqvtaxeNBBYpU2OkZnI3ihNEAa3jF4pybo,4594
206
206
  ultralytics/utils/benchmarks.py,sha256=Ub--iTq2hL_oHkG2R3HXmZXQ6qcBC-P9MabUv60bMLE,25625
207
- ultralytics/utils/checks.py,sha256=KXQSeauhzecy9tSjyDVy8oXbTDkHSSB9lOTYrqRWpok,29582
207
+ ultralytics/utils/checks.py,sha256=BZdD2JVpMvHJLVbgl048kBgVqk3LpHARb_BE5oOwYK8,30120
208
208
  ultralytics/utils/dist.py,sha256=NDFga-uKxkBX2zLxFHSene_cCiGQJoyOeCXcN9JIOIk,2358
209
209
  ultralytics/utils/downloads.py,sha256=fh7I5toTSowAOXtmx5zIzCEDREfTFG45cLIHmsDmuYw,21974
210
210
  ultralytics/utils/errors.py,sha256=GqP_Jgj_n0paxn8OMhn3DTCgoNkB2WjUcUaqs-M6SQk,816
211
211
  ultralytics/utils/files.py,sha256=uiXQSVABJRoI5ImnM6ndEBIFbECfksmWNEldBg8GnSo,8224
212
212
  ultralytics/utils/instance.py,sha256=EnLp3hCihG5-32eGSMmjzspbxZsDvbqEOs-X0kcvxwQ,16252
213
- ultralytics/utils/loss.py,sha256=jUCiUcxgF6jGxGdvIcupeMidLoF-gI7s1tcJoQCZbnk,34113
213
+ ultralytics/utils/loss.py,sha256=_d2L4lIemaeAHrGHqf9q-KI7yTgHKCbIcYAF7Y-farI,34185
214
214
  ultralytics/utils/metrics.py,sha256=toJlyA0W-xtChqAtIDiHISolxc_30NP33ezxWQ1rnPc,53804
215
- ultralytics/utils/ops.py,sha256=97qwzCipDB3AVqcn8rCao1ufZcpOM-kXBtOa2p_LT9o,33064
215
+ ultralytics/utils/ops.py,sha256=32Vg2cDwdqcMyb3XT9RGS2-YinIDRiE1-iS7H_0wssE,33174
216
216
  ultralytics/utils/patches.py,sha256=J-iOwIRbfUs-inBZerhnXby5tUKjYcOIyvhLTS352JE,3270
217
217
  ultralytics/utils/plotting.py,sha256=GmBkN7e1skJK2cZ2hzKBXQCb1gayWTrA9TLHw0q07UM,62948
218
218
  ultralytics/utils/tal.py,sha256=thD_AEhVmhaZqmS5szZMvpKO-RKOeZwfX1BYAhdnA0o,18470
219
219
  ultralytics/utils/torch_utils.py,sha256=ddWR82FkxSiFQqr_uzqxQvir-RACvCxsQbqphKSFTok,32084
220
- ultralytics/utils/triton.py,sha256=gg1finxno_tY2Ge9PMhmu7PI9wvoFZoiicdT4Bhqv3w,3936
220
+ ultralytics/utils/triton.py,sha256=HL_gjIwMoi-WD8gJLTmemBehIto8eRz3HdK8fcROLk0,4043
221
221
  ultralytics/utils/tuner.py,sha256=K09-z5k1E4ZriSKoWdwQrJ2PJ2fY1ez3-b2R6aKPTqM,6198
222
222
  ultralytics/utils/callbacks/__init__.py,sha256=YrWqC3BVVaTLob4iCPR6I36mUxIUOpPJW7B_LjT78Qw,214
223
223
  ultralytics/utils/callbacks/base.py,sha256=PHjQ6RITwC2dylCQTB0bdPgAsHjxVeuDb5N1NPTbHGc,5775
@@ -230,9 +230,9 @@ ultralytics/utils/callbacks/neptune.py,sha256=IbGQfEltamUKXJt93uSLQFn8c2rYh3DMTg
230
230
  ultralytics/utils/callbacks/raytune.py,sha256=Ck_yFzg7UZXiDWrLHaltjQybzVWSFDfzpdrx9ZYTRfI,700
231
231
  ultralytics/utils/callbacks/tensorboard.py,sha256=SHlE58Fb-sg-uZKtgy-ybIO3SAIfK55aj8kTYGA0Cyg,4167
232
232
  ultralytics/utils/callbacks/wb.py,sha256=sizfTa-xI9k2pnDSP_Q9pHZEFwcl__gSFM0AcneuRpY,7058
233
- ultralytics-8.3.43.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
234
- ultralytics-8.3.43.dist-info/METADATA,sha256=HrPTWW9i5rdFrpbTLW_e3coSiqoICIT5D3IL5STFFF4,35332
235
- ultralytics-8.3.43.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
236
- ultralytics-8.3.43.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
237
- ultralytics-8.3.43.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
238
- ultralytics-8.3.43.dist-info/RECORD,,
233
+ ultralytics-8.3.47.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
234
+ ultralytics-8.3.47.dist-info/METADATA,sha256=WBD3BU1M-ccsdrV8BGHu232NDD7ndQa6cWIFMtElOyk,35332
235
+ ultralytics-8.3.47.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
236
+ ultralytics-8.3.47.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
237
+ ultralytics-8.3.47.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
238
+ ultralytics-8.3.47.dist-info/RECORD,,