ultralytics 8.3.55__py3-none-any.whl → 8.3.56__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
tests/test_solutions.py CHANGED
@@ -14,46 +14,53 @@ POSE_VIDEO = "solution_ci_pose_demo.mp4"
14
14
 
15
15
  @pytest.mark.slow
16
16
  def test_major_solutions():
17
- """Test the object counting, heatmap, speed estimation and queue management solution."""
17
+ """Test the object counting, heatmap, speed estimation, trackzone and queue management solution."""
18
18
  safe_download(url=f"{ASSETS_URL}/{DEMO_VIDEO}", dir=TMP)
19
19
  cap = cv2.VideoCapture(str(TMP / DEMO_VIDEO))
20
20
  assert cap.isOpened(), "Error reading video file"
21
21
  region_points = [(20, 400), (1080, 400), (1080, 360), (20, 360)]
22
22
  counter = solutions.ObjectCounter(region=region_points, model="yolo11n.pt", show=False) # Test object counter
23
23
  heatmap = solutions.Heatmap(colormap=cv2.COLORMAP_PARULA, model="yolo11n.pt", show=False) # Test heatmaps
24
+ heatmap_count = solutions.Heatmap(
25
+ colormap=cv2.COLORMAP_PARULA, model="yolo11n.pt", show=False, region=region_points
26
+ ) # Test heatmaps with object counting
24
27
  speed = solutions.SpeedEstimator(region=region_points, model="yolo11n.pt", show=False) # Test queue manager
25
28
  queue = solutions.QueueManager(region=region_points, model="yolo11n.pt", show=False) # Test speed estimation
26
29
  line_analytics = solutions.Analytics(analytics_type="line", model="yolo11n.pt", show=False) # line analytics
27
30
  pie_analytics = solutions.Analytics(analytics_type="pie", model="yolo11n.pt", show=False) # line analytics
28
31
  bar_analytics = solutions.Analytics(analytics_type="bar", model="yolo11n.pt", show=False) # line analytics
29
32
  area_analytics = solutions.Analytics(analytics_type="area", model="yolo11n.pt", show=False) # line analytics
33
+ trackzone = solutions.TrackZone(region=region_points, model="yolo11n.pt", show=False) # Test trackzone
30
34
  frame_count = 0 # Required for analytics
31
35
  while cap.isOpened():
32
36
  success, im0 = cap.read()
33
37
  if not success:
34
38
  break
39
+ frame_count += 1
35
40
  original_im0 = im0.copy()
36
41
  _ = counter.count(original_im0.copy())
37
42
  _ = heatmap.generate_heatmap(original_im0.copy())
43
+ _ = heatmap_count.generate_heatmap(original_im0.copy())
38
44
  _ = speed.estimate_speed(original_im0.copy())
39
45
  _ = queue.process_queue(original_im0.copy())
40
46
  _ = line_analytics.process_data(original_im0.copy(), frame_count)
41
47
  _ = pie_analytics.process_data(original_im0.copy(), frame_count)
42
48
  _ = bar_analytics.process_data(original_im0.copy(), frame_count)
43
49
  _ = area_analytics.process_data(original_im0.copy(), frame_count)
50
+ _ = trackzone.trackzone(original_im0.copy())
44
51
  cap.release()
45
52
 
46
53
  # Test workouts monitoring
47
54
  safe_download(url=f"{ASSETS_URL}/{POSE_VIDEO}", dir=TMP)
48
- cap1 = cv2.VideoCapture(str(TMP / POSE_VIDEO))
49
- assert cap1.isOpened(), "Error reading video file"
50
- gym = solutions.AIGym(line_width=2, kpts=[5, 11, 13], show=False)
51
- while cap1.isOpened():
52
- success, im0 = cap1.read()
55
+ cap = cv2.VideoCapture(str(TMP / POSE_VIDEO))
56
+ assert cap.isOpened(), "Error reading video file"
57
+ gym = solutions.AIGym(kpts=[5, 11, 13], show=False)
58
+ while cap.isOpened():
59
+ success, im0 = cap.read()
53
60
  if not success:
54
61
  break
55
62
  _ = gym.monitor(im0)
56
- cap1.release()
63
+ cap.release()
57
64
 
58
65
 
59
66
  @pytest.mark.slow
ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics YOLO 🚀, AGPL-3.0 license
2
2
 
3
- __version__ = "8.3.55"
3
+ __version__ = "8.3.56"
4
4
 
5
5
  import os
6
6
 
@@ -303,7 +303,7 @@ def get_cfg(cfg: Union[str, Path, Dict, SimpleNamespace] = DEFAULT_CFG_DICT, ove
303
303
  if k in cfg and isinstance(cfg[k], (int, float)):
304
304
  cfg[k] = str(cfg[k])
305
305
  if cfg.get("name") == "model": # assign model to 'name' arg
306
- cfg["name"] = cfg.get("model", "").split(".")[0]
306
+ cfg["name"] = str(cfg.get("model", "")).split(".")[0]
307
307
  LOGGER.warning(f"WARNING ⚠️ 'name=model' automatically updated to 'name={cfg['name']}'.")
308
308
 
309
309
  # Type and Value checks
@@ -266,7 +266,7 @@ def convert_coco(
266
266
  # since LVIS val set contains images from COCO 2017 train in addition to the COCO 2017 val split.
267
267
  (fn / "train2017").mkdir(parents=True, exist_ok=True)
268
268
  (fn / "val2017").mkdir(parents=True, exist_ok=True)
269
- with open(json_file) as f:
269
+ with open(json_file, encoding="utf-8") as f:
270
270
  data = json.load(f)
271
271
 
272
272
  # Create image dict
@@ -323,7 +323,8 @@ class GroundingDataset(YOLODataset):
323
323
  if box[2] <= 0 or box[3] <= 0:
324
324
  continue
325
325
 
326
- cat_name = " ".join([img["caption"][t[0] : t[1]] for t in ann["tokens_positive"]])
326
+ caption = img["caption"]
327
+ cat_name = " ".join([caption[t[0] : t[1]] for t in ann["tokens_positive"]])
327
328
  if cat_name not in cat2id:
328
329
  cat2id[cat_name] = len(cat2id)
329
330
  texts.append([cat_name])
@@ -285,6 +285,7 @@ class Exporter:
285
285
  "(torchscript, onnx, openvino, engine, coreml) formats. "
286
286
  "See https://docs.ultralytics.com/models/yolo-world for details."
287
287
  )
288
+ model.clip_model = None # openvino int8 export error: https://github.com/ultralytics/ultralytics/pull/18445
288
289
  if self.args.int8 and not self.args.data:
289
290
  self.args.data = DEFAULT_CFG.data or TASK2DATA[getattr(model, "task", "detect")] # assign default data
290
291
  LOGGER.warning(
@@ -602,7 +603,7 @@ class Exporter:
602
603
  @try_export
603
604
  def export_paddle(self, prefix=colorstr("PaddlePaddle:")):
604
605
  """YOLO Paddle export."""
605
- check_requirements(("paddlepaddle", "x2paddle"))
606
+ check_requirements(("paddlepaddle-gpu" if torch.cuda.is_available() else "paddlepaddle", "x2paddle"))
606
607
  import x2paddle # noqa
607
608
  from x2paddle.convert import pytorch2paddle # noqa
608
609
 
@@ -949,7 +950,7 @@ class Exporter:
949
950
  "sng4onnx>=1.0.1", # required by 'onnx2tf' package
950
951
  "onnx_graphsurgeon>=0.3.26", # required by 'onnx2tf' package
951
952
  "onnx>=1.12.0",
952
- "onnx2tf>1.17.5,<=1.22.3",
953
+ "onnx2tf>1.17.5,<=1.26.3",
953
954
  "onnxslim>=0.1.31",
954
955
  "tflite_support<=0.4.3" if IS_JETSON else "tflite_support", # fix ImportError 'GLIBCXX_3.4.29'
955
956
  "flatbuffers>=23.5.26,<100", # update old 'flatbuffers' included inside tensorflow package
@@ -1136,7 +1137,7 @@ class Exporter:
1136
1137
  if getattr(self.model, "end2end", False):
1137
1138
  raise ValueError("IMX export is not supported for end2end models.")
1138
1139
  if "C2f" not in self.model.__str__():
1139
- raise ValueError("IMX export is only supported for YOLOv8 detection models")
1140
+ raise ValueError("IMX export is only supported for YOLOv8n detection models")
1140
1141
  check_requirements(("model-compression-toolkit==2.1.1", "sony-custom-layers==0.2.0", "tensorflow==2.12.0"))
1141
1142
  check_requirements("imx500-converter[pt]==3.14.3") # Separate requirements for imx500-converter
1142
1143
 
@@ -91,9 +91,9 @@ class Predictor(BasePredictor):
91
91
  _callbacks (Dict | None): Dictionary of callback functions to customize behavior.
92
92
 
93
93
  Examples:
94
- >>> predictor = Predictor(cfg=DEFAULT_CFG)
95
- >>> predictor = Predictor(overrides={"imgsz": 640})
96
- >>> predictor = Predictor(_callbacks={"on_predict_start": custom_callback})
94
+ >>> predictor_example = Predictor(cfg=DEFAULT_CFG)
95
+ >>> predictor_example_with_imgsz = Predictor(overrides={"imgsz": 640})
96
+ >>> predictor_example_with_callback = Predictor(_callbacks={"on_predict_start": custom_callback})
97
97
  """
98
98
  if overrides is None:
99
99
  overrides = {}
@@ -215,7 +215,7 @@ class Predictor(BasePredictor):
215
215
  im (torch.Tensor): Preprocessed input image tensor with shape (N, C, H, W).
216
216
  bboxes (np.ndarray | List | None): Bounding boxes in XYXY format with shape (N, 4).
217
217
  points (np.ndarray | List | None): Points indicating object locations with shape (N, 2) or (N, num_points, 2), in pixels.
218
- labels (np.ndarray | List | None): Point prompt labels with shape (N,) or (N, num_points). 1 for foreground, 0 for background.
218
+ labels (np.ndarray | List | None): Point prompt labels with shape (N) or (N, num_points). 1 for foreground, 0 for background.
219
219
  masks (np.ndarray | None): Low-res masks from previous predictions with shape (N, H, W). For SAM, H=W=256.
220
220
  multimask_output (bool): Flag to return multiple masks for ambiguous prompts.
221
221
 
@@ -260,7 +260,7 @@ class Predictor(BasePredictor):
260
260
  dst_shape (tuple): The target shape (height, width) for the prompts.
261
261
  bboxes (np.ndarray | List | None): Bounding boxes in XYXY format with shape (N, 4).
262
262
  points (np.ndarray | List | None): Points indicating object locations with shape (N, 2) or (N, num_points, 2), in pixels.
263
- labels (np.ndarray | List | None): Point prompt labels with shape (N,) or (N, num_points). 1 for foreground, 0 for background.
263
+ labels (np.ndarray | List | None): Point prompt labels with shape (N) or (N, num_points). 1 for foreground, 0 for background.
264
264
  masks (List | np.ndarray, Optional): Masks for the objects, where each mask is a 2D array.
265
265
 
266
266
  Raises:
@@ -853,8 +853,8 @@ class SAM2VideoPredictor(SAM2Predictor):
853
853
 
854
854
  Examples:
855
855
  >>> predictor = SAM2VideoPredictor(cfg=DEFAULT_CFG)
856
- >>> predictor = SAM2VideoPredictor(overrides={"imgsz": 640})
857
- >>> predictor = SAM2VideoPredictor(_callbacks={"on_predict_start": custom_callback})
856
+ >>> predictor_example_with_imgsz = SAM2VideoPredictor(overrides={"imgsz": 640})
857
+ >>> predictor_example_with_callback = SAM2VideoPredictor(_callbacks={"on_predict_start": custom_callback})
858
858
  """
859
859
  super().__init__(cfg, overrides, _callbacks)
860
860
  self.inference_state = {}
@@ -133,7 +133,7 @@ class AutoBackend(nn.Module):
133
133
 
134
134
  # Set device
135
135
  cuda = torch.cuda.is_available() and device.type != "cpu" # use CUDA
136
- if cuda and not any([nn_module, pt, jit, engine, onnx]): # GPU dataloader formats
136
+ if cuda and not any([nn_module, pt, jit, engine, onnx, paddle]): # GPU dataloader formats
137
137
  device = torch.device("cpu")
138
138
  cuda = False
139
139
 
@@ -1269,7 +1269,7 @@ def plt_color_scatter(v, f, bins=20, cmap="viridis", alpha=0.8, edgecolors="none
1269
1269
 
1270
1270
  def plot_tune_results(csv_file="tune_results.csv"):
1271
1271
  """
1272
- Plot the evolution results stored in an 'tune_results.csv' file. The function generates a scatter plot for each key
1272
+ Plot the evolution results stored in a 'tune_results.csv' file. The function generates a scatter plot for each key
1273
1273
  in the CSV, color-coded based on fitness scores. The best-performing configurations are highlighted on the plots.
1274
1274
 
1275
1275
  Args:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ultralytics
3
- Version: 8.3.55
3
+ Version: 8.3.56
4
4
  Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -57,7 +57,6 @@ Requires-Dist: coverage[toml]; extra == "dev"
57
57
  Requires-Dist: mkdocs>=1.6.0; extra == "dev"
58
58
  Requires-Dist: mkdocs-material>=9.5.9; extra == "dev"
59
59
  Requires-Dist: mkdocstrings[python]; extra == "dev"
60
- Requires-Dist: mkdocs-jupyter; extra == "dev"
61
60
  Requires-Dist: mkdocs-redirects; extra == "dev"
62
61
  Requires-Dist: mkdocs-ultralytics-plugin>=0.1.8; extra == "dev"
63
62
  Requires-Dist: mkdocs-macros-plugin>=1.0.5; extra == "dev"
@@ -6,11 +6,11 @@ tests/test_engine.py,sha256=dcEcJsMQh61rDSNv7l4TIAgybLpzjVwerv9JZC_KCM8,4934
6
6
  tests/test_exports.py,sha256=1MvhcQ2qHdbJImHII-bFarcaIcm-kPlEK-OdFLxnj7o,8769
7
7
  tests/test_integrations.py,sha256=f5-QCUk1SU_-qn4mBCZwS3GN3tXEBIIXo4z2EhExbHw,6126
8
8
  tests/test_python.py,sha256=S399TdcZcymRJIYrKlXPiROWg_izHL3TGhHgW15kcrA,23210
9
- tests/test_solutions.py,sha256=G-MRSeYZk6nPJzOJ7x3s9Kz7cMS2eHNow6637zsvz3E,3761
10
- ultralytics/__init__.py,sha256=Tsj0LL-94bTdQvFEb2g6cS4xifoXylcTZDh0uwu3PQI,681
9
+ tests/test_solutions.py,sha256=O-GM6qBdew8BQmkpt8XLbyQJTcTdElz1yTBL1WOJsWw,4177
10
+ ultralytics/__init__.py,sha256=clZvHAMufRM-Rh5yOmdU_XwloeuwE7XW3UKwmjaMp6k,681
11
11
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
12
12
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
13
- ultralytics/cfg/__init__.py,sha256=MEgNvQOfdbLTF07TzwgC7yIdHQQLJ6dE30FEurfRmJg,39026
13
+ ultralytics/cfg/__init__.py,sha256=MJ52wv8-rQHvD8ZBJ4RA31npqgCtUtFYEG4sQ2kciFc,39031
14
14
  ultralytics/cfg/default.yaml,sha256=FcXbvTXXvMpssk9fSwdlnVTtyqfmlYE9gAcHsf0OMf8,8347
15
15
  ultralytics/cfg/datasets/Argoverse.yaml,sha256=FyeuJT5CHq_9d4hlfAf0kpZlnbUMO0S--UJ1yIqcdKk,3134
16
16
  ultralytics/cfg/datasets/DOTAv1.5.yaml,sha256=QVfp_Qp-4rukuicaB4qx86NxSHM8Mrzym8l_fIDo8gw,1195
@@ -95,13 +95,13 @@ ultralytics/data/annotator.py,sha256=JNmS6uELlEABrU5ViVJiPnjt44v-Us7j39Bwoug_73Y
95
95
  ultralytics/data/augment.py,sha256=UUgIv2e1qFSqjNGDX4Lgn8fH3o7kd5GCMTVUOzK1gUo,120497
96
96
  ultralytics/data/base.py,sha256=ZCIhAyFfxXVp5fVnYD8mwbksNALJTayBKIR5FKGV7ZM,15168
97
97
  ultralytics/data/build.py,sha256=AfMmz0sHIYmwry_90tEJFRk_kz0S3SolScVXqYHiT08,7261
98
- ultralytics/data/converter.py,sha256=RIfTXNrazwZqmTYOYoJtupDMtNzm8dxsrVp6q2m8gyg,24388
99
- ultralytics/data/dataset.py,sha256=HA6-0H-k7ioPr5xRqOEDUyWhycrg9z8hGLaplEmajA0,23182
98
+ ultralytics/data/converter.py,sha256=JdYwN9eATLUZ7321DistDNo02E3RRTEU97jl6ikWVXk,24406
99
+ ultralytics/data/dataset.py,sha256=6_6sHSjJYX7lVUzqBqVW_q_REXbjeoh6dHqAqH9krfA,23216
100
100
  ultralytics/data/loaders.py,sha256=k1Vq7Rxv6tpsRsYuMdZeI3_f2BciAaZwhDQU8iHhVJM,28506
101
101
  ultralytics/data/split_dota.py,sha256=eFafJ7Vg52wj6KDCHFJAf1tKzyPD5YaPB8kM4VX5Aeg,10688
102
102
  ultralytics/data/utils.py,sha256=bmWEIrdogj4kssZQSJdSbIF8QsJU00lo-EY-Mgcqv4M,31073
103
103
  ultralytics/engine/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
104
- ultralytics/engine/exporter.py,sha256=pN9BUQwvcz0DMktXB8Lzp3VOeY5JF5kKjEwtaa1ACLw,68596
104
+ ultralytics/engine/exporter.py,sha256=3I7TIqeU3creMPJvmP7BVbOSrDHO9DI0pq__rFbQSVs,68771
105
105
  ultralytics/engine/model.py,sha256=3csd_Ml9M6CKxUKU7vRZadanNnJw96sNIx71qHVGdGQ,53082
106
106
  ultralytics/engine/predictor.py,sha256=o1RYMFH3_uVOMCIXXakpRYpNzoD-6Bdsxryt5fuBni0,17712
107
107
  ultralytics/engine/results.py,sha256=a1XFZRPwqgKDBOEAibHuT9nP2xefLiWVsMoBJbcr4iA,75058
@@ -132,7 +132,7 @@ ultralytics/models/sam/__init__.py,sha256=E4IHie-T0HYCklKW6-kqlW84GJJdD6rujf7W_S
132
132
  ultralytics/models/sam/amg.py,sha256=GrmO_8YfIDt_QkPEMF_WFjPZkhwhf7iwx7ig8JgOUnE,8709
133
133
  ultralytics/models/sam/build.py,sha256=ac7Pop5f51TVzGgfV6bbXSFDA9fBVxERUc_6WDQ-9Ys,12487
134
134
  ultralytics/models/sam/model.py,sha256=CE4ruw1Iwrp7-9aHGspQihQaTVsqagYrQLWmpXYodLw,7382
135
- ultralytics/models/sam/predict.py,sha256=-FBgCry1M-HhH_wa9EQfUyl2e6sPdGaONt8YP8oeo0M,82535
135
+ ultralytics/models/sam/predict.py,sha256=jQEqZHh2v06qYZ04wHRl96GkbQ2zcCJQxZK_CeMTMNA,82623
136
136
  ultralytics/models/sam/modules/__init__.py,sha256=mHtJuK4hwF8cuV-VHDc7tp6u6D1gHz2Z7JI8grmQDTs,42
137
137
  ultralytics/models/sam/modules/blocks.py,sha256=Q-KwhFbdyZhl1tjG_kP2LcQkZbzoNt618i-NRrKNx2Y,45919
138
138
  ultralytics/models/sam/modules/decoders.py,sha256=mODsqnTN_CjE3H0Sh9cd8PfTnHANPjGB1bjqHxfezSg,25830
@@ -171,7 +171,7 @@ ultralytics/models/yolo/world/__init__.py,sha256=3VTH0q4NOt2EWRom15yCymvmvm0Etp2
171
171
  ultralytics/models/yolo/world/train.py,sha256=gaDrAmLJpg9qDtmL5evA5HsV2yb4RTRSfk2EDYrHdRg,3686
172
172
  ultralytics/models/yolo/world/train_world.py,sha256=IsnCEVt6DcM9lUskCKmIN-M8MM79xLpwTRqRoAHUnZ4,4857
173
173
  ultralytics/nn/__init__.py,sha256=4BPLHY89xEM_al5uK0aOmFgiML6CMGEZbezxOvTjOEs,587
174
- ultralytics/nn/autobackend.py,sha256=xdOM2rzRu4CPCtMhFrYcgE_EZ3cAjTxO0MquUm8rAtA,35548
174
+ ultralytics/nn/autobackend.py,sha256=7WyyipeaAqKCFUAA7_y2jIOz2e90GxHrD7c7ARe4ZJI,35556
175
175
  ultralytics/nn/tasks.py,sha256=pqRe1F1HOH8AjLZpFaZCGb5gSYsXH0eVnHITKDTFFhI,48527
176
176
  ultralytics/nn/modules/__init__.py,sha256=xhW2BennT9U_VaMXVpRu-bdLgp1BXt9L8mkIUBE3idU,2625
177
177
  ultralytics/nn/modules/activation.py,sha256=chhn469wnRHEs5BMGNBYXwPYZc_7-urspTT8fnBd-xA,895
@@ -216,7 +216,7 @@ ultralytics/utils/loss.py,sha256=_d2L4lIemaeAHrGHqf9q-KI7yTgHKCbIcYAF7Y-farI,341
216
216
  ultralytics/utils/metrics.py,sha256=mKimIbiEoFT4J5PnOJegOZNkY0k9C6vv19o9HvExHd8,53778
217
217
  ultralytics/utils/ops.py,sha256=d5sLAvgqP36Pq_dMQE1DZFYhmIGUMrlrxh1czcuUfC4,33546
218
218
  ultralytics/utils/patches.py,sha256=J-iOwIRbfUs-inBZerhnXby5tUKjYcOIyvhLTS352JE,3270
219
- ultralytics/utils/plotting.py,sha256=LMytuIuIgtTlogGuRjwqnmBUAL-3_m8vMdXP4O8S9kg,62960
219
+ ultralytics/utils/plotting.py,sha256=SudFfq9KOfprtpXsurfWEOeQqVsU0K3aVvcOGFcNB4A,62959
220
220
  ultralytics/utils/tal.py,sha256=thD_AEhVmhaZqmS5szZMvpKO-RKOeZwfX1BYAhdnA0o,18470
221
221
  ultralytics/utils/torch_utils.py,sha256=7qP0YhF5d8qCUD2XiOwXjCTOw8pje6HvX42J8oL3Ldw,33263
222
222
  ultralytics/utils/triton.py,sha256=HL_gjIwMoi-WD8gJLTmemBehIto8eRz3HdK8fcROLk0,4043
@@ -232,9 +232,9 @@ ultralytics/utils/callbacks/neptune.py,sha256=IbGQfEltamUKXJt93uSLQFn8c2rYh3DMTg
232
232
  ultralytics/utils/callbacks/raytune.py,sha256=Ck_yFzg7UZXiDWrLHaltjQybzVWSFDfzpdrx9ZYTRfI,700
233
233
  ultralytics/utils/callbacks/tensorboard.py,sha256=SHlE58Fb-sg-uZKtgy-ybIO3SAIfK55aj8kTYGA0Cyg,4167
234
234
  ultralytics/utils/callbacks/wb.py,sha256=sizfTa-xI9k2pnDSP_Q9pHZEFwcl__gSFM0AcneuRpY,7058
235
- ultralytics-8.3.55.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
236
- ultralytics-8.3.55.dist-info/METADATA,sha256=8aK1w4MGw79JrbAyT0sHPenTnaZOa_eqbq0RWesISCM,35332
237
- ultralytics-8.3.55.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
238
- ultralytics-8.3.55.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
239
- ultralytics-8.3.55.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
240
- ultralytics-8.3.55.dist-info/RECORD,,
235
+ ultralytics-8.3.56.dist-info/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
236
+ ultralytics-8.3.56.dist-info/METADATA,sha256=WgefhJBgLzKqorO1eg-DQCSced-fF2pa-ph_M2PfX2c,35286
237
+ ultralytics-8.3.56.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
238
+ ultralytics-8.3.56.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
239
+ ultralytics-8.3.56.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
240
+ ultralytics-8.3.56.dist-info/RECORD,,