dgenerate-ultralytics-headless 8.3.246__py3-none-any.whl → 8.3.247__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dgenerate-ultralytics-headless
3
- Version: 8.3.246
3
+ Version: 8.3.247
4
4
  Summary: Automatically built Ultralytics package with python-opencv-headless dependency instead of python-opencv
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -1,4 +1,4 @@
1
- dgenerate_ultralytics_headless-8.3.246.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
1
+ dgenerate_ultralytics_headless-8.3.247.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
2
2
  tests/__init__.py,sha256=bCox_hLdGRFYGLb2kd722VdNP2zEXNYNuLLYtqZSrbw,804
3
3
  tests/conftest.py,sha256=mOy9lGpNp7lk1hHl6_pVE0f9cU-72gnkoSm4TO-CNZU,2318
4
4
  tests/test_cli.py,sha256=GhIFHi-_WIJpDgoGNRi0DnjbfwP1wHbklBMnkCM-P_4,5464
@@ -8,7 +8,7 @@ tests/test_exports.py,sha256=5G5EgDmars6d-N7TVnJdDFWId0IJs-yw03DvdQIjrNU,14246
8
8
  tests/test_integrations.py,sha256=6QgSh9n0J04RdUYz08VeVOnKmf4S5MDEQ0chzS7jo_c,6220
9
9
  tests/test_python.py,sha256=viMvRajIbDZdm64hRRg9i8qZ1sU9frwB69e56mxwEXk,29266
10
10
  tests/test_solutions.py,sha256=CIaphpmOXgz9AE9xcm1RWODKrwGfZLCc84IggGXArNM,14122
11
- ultralytics/__init__.py,sha256=zA1SrrkUbY0pB0fq-YkFi-qyresnypQAzaAZ6bowhcE,1302
11
+ ultralytics/__init__.py,sha256=pSBu41NHtqNCzmsdvqPp9FVTFBF4JPwbfXF07bjIYYE,1302
12
12
  ultralytics/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
13
13
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
14
14
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
@@ -112,7 +112,7 @@ ultralytics/data/annotator.py,sha256=kbfSPBesKEVK6ys3dilTdMh7rCKyp0xV7tGQeEDbpWI
112
112
  ultralytics/data/augment.py,sha256=ahqEe2ZBLeMZbK44Z-QihfCVCArOqtHjSSD-41_NlA8,127503
113
113
  ultralytics/data/base.py,sha256=pMs8yJOmAFPXdgfLCDtUemSvkPNDzxReP-fWzkNtonc,19723
114
114
  ultralytics/data/build.py,sha256=s-tkSZPf3OfQyfXPXB9XxdW_gIcU6Xy_u21ekSgTnRo,17205
115
- ultralytics/data/converter.py,sha256=_54Xw78TLRswJ9nUVCd2lfEP5riQ82rM0_g_Gad4PAI,31893
115
+ ultralytics/data/converter.py,sha256=1m345J7YUn7gtaChO7To4BWZm72pC8D8L2O0k99q0DE,31898
116
116
  ultralytics/data/dataset.py,sha256=L5QYgic_B1e1zffgRA5lqKDd5PQuMDg6PZVd-RTUA7E,36523
117
117
  ultralytics/data/loaders.py,sha256=BQbhgjiLCGcRBPkGVG9Hr1jeNfG1nuZD3jstiWb7zS8,31889
118
118
  ultralytics/data/split.py,sha256=HpR0ltf5oN1DpZstavFbBFC1YdpGPaATXxDOcAMwOqc,5101
@@ -126,10 +126,10 @@ ultralytics/engine/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QU
126
126
  ultralytics/engine/exporter.py,sha256=Ncf5GK5xAqSu0DH-6z5V53qZB7LstDJFTMF5a-7VQfs,72639
127
127
  ultralytics/engine/model.py,sha256=61ea1rB0wmL0CCaEr8p5gzneH0eL55OOMaTcFt8fR80,53079
128
128
  ultralytics/engine/predictor.py,sha256=neYmNDX27Vv3ggk9xqaKlH6XzB2vlFIghU5o7ZC0zFo,22838
129
- ultralytics/engine/results.py,sha256=LHX0AaVOv3CEjYjw8i4LThXqihxmahWCxpH20b4s9dM,68030
129
+ ultralytics/engine/results.py,sha256=DomI01voqR_i7v8LhDGb6jWCprWB4H6I436GSO2NMBY,68030
130
130
  ultralytics/engine/trainer.py,sha256=mqVrhL8xnJwwKJVjxDEiiwu0WH48Ne5dB4SXxlxyHh4,45479
131
131
  ultralytics/engine/tuner.py,sha256=qiozSxYC-Hk1TQgyftrYTKkqLrrwFzjjkT6mOYR3Vjc,21460
132
- ultralytics/engine/validator.py,sha256=DRoqyPYhH5rBEMLo-Y94CdiLtxQTYaJHP04fP9e-NJM,17528
132
+ ultralytics/engine/validator.py,sha256=2rqdVt4hB9ruMJq-L7PbaCNFwuERS7ZHdVSg91RM3wk,17761
133
133
  ultralytics/hub/__init__.py,sha256=Z0K_E00jzQh90b18q3IDChwVmTvyIYp6C00sCV-n2F8,6709
134
134
  ultralytics/hub/auth.py,sha256=ANzCeZA7lUzTWc_sFHbDuuyBh1jLl2sTpHkoUbIkFYE,6254
135
135
  ultralytics/hub/session.py,sha256=OzBXAL9R135gRDdfNYUqyiSrxOyaiMFCVYSZua99sF0,18364
@@ -164,7 +164,7 @@ ultralytics/models/sam/modules/memory_attention.py,sha256=jFVWVbgDS7VXPqOL1e3gAz
164
164
  ultralytics/models/sam/modules/sam.py,sha256=-KV-1PZK39DTdSpR5DI3E8I6gGVLja3tMv1MH7Au_eA,61654
165
165
  ultralytics/models/sam/modules/tiny_encoder.py,sha256=RJQTHjfUe2N3cm1EZHXObJlKqVn10EnYJFla1mnWU_8,42065
166
166
  ultralytics/models/sam/modules/transformer.py,sha256=NmTuyxS9PNsg66tKY9_Q2af4I09VW5s8IbfswyTT3ao,14892
167
- ultralytics/models/sam/modules/utils.py,sha256=Re09hcKe9LJpFzKHxpbwEmoMpPImnJaoNw7pe62_ui0,21129
167
+ ultralytics/models/sam/modules/utils.py,sha256=hE06t6cZf10AmPLPwGZbFrGheoOgGAGZ0GXRWlJH9pE,21125
168
168
  ultralytics/models/sam/sam3/__init__.py,sha256=aM4-KimnYgIFe-e5ctLT8e6k9PagvuvKFaHaagDZM7E,144
169
169
  ultralytics/models/sam/sam3/decoder.py,sha256=kXgPOjOh63ttJPFwMF90arK9AKZwPmhxOiexnPijiTE,22872
170
170
  ultralytics/models/sam/sam3/encoder.py,sha256=IFUIJkWrVW1MmkeA142Sxhgnx5Tssq2Bgi9T3iIppU4,21543
@@ -188,7 +188,7 @@ ultralytics/models/yolo/classify/val.py,sha256=gtoUJN5_-56EbiYp5Ur-shfdBNMJOqToW
188
188
  ultralytics/models/yolo/detect/__init__.py,sha256=GIRsLYR-kT4JJx7lh4ZZAFGBZj0aebokuU0A7JbjDVA,257
189
189
  ultralytics/models/yolo/detect/predict.py,sha256=Sct-UwkDe54ZmVtTYl0-fKgx_0BOlPBUsr4NodFd-eU,5385
190
190
  ultralytics/models/yolo/detect/train.py,sha256=-PHH6i767_XKCPsBeAOi7AxfHpoq451GfjY4TRMuo7c,10469
191
- ultralytics/models/yolo/detect/val.py,sha256=O8TkCHnEvuxV2Hyqw_CuVZMWzHWBjCM48fqtdf8T8dQ,22379
191
+ ultralytics/models/yolo/detect/val.py,sha256=-UTrVG3HturHHAY76BUegO2s5d9Xq_dEumebLiNkSVc,22351
192
192
  ultralytics/models/yolo/obb/__init__.py,sha256=tQmpG8wVHsajWkZdmD6cjGohJ4ki64iSXQT8JY_dydo,221
193
193
  ultralytics/models/yolo/obb/predict.py,sha256=vA_BueSJJJuyaAZPWE0xKk7KI_YPQCUOCqeZZLMTeXM,2600
194
194
  ultralytics/models/yolo/obb/train.py,sha256=qtBjwOHOq0oQ9mK0mOtnUrXAQ5UCUrntKq_Z0-oCBHo,3438
@@ -211,14 +211,14 @@ ultralytics/models/yolo/yoloe/train_seg.py,sha256=0hRByMXsEJA-J2B1wXDMVhiW9f9MOT
211
211
  ultralytics/models/yolo/yoloe/val.py,sha256=utUFWeFKRFWZrPr1y3A8ztbTwdoWMYqzlwBN7CQ0tCA,9418
212
212
  ultralytics/nn/__init__.py,sha256=538LZPUKKvc3JCMgiQ4VLGqRN2ZAaVLFcQbeNNHFkEA,545
213
213
  ultralytics/nn/autobackend.py,sha256=RkHTt8nBZaeupfshPpze8Wy7vw08FiJWctvzU3SEaro,44578
214
- ultralytics/nn/tasks.py,sha256=636MN27VvlupIaHPpV4r0J705RUdV-giNKjIeJbRkxI,70448
214
+ ultralytics/nn/tasks.py,sha256=nHhP3R8r17K_pHSfGXwDAPEwUyV0sbqzkSHjeZ2PRkg,70418
215
215
  ultralytics/nn/text_model.py,sha256=novnuosqXnW1NmlOzWOk7dEKuN6Vq40CTksr6hI3Knc,15109
216
216
  ultralytics/nn/modules/__init__.py,sha256=5Sg_28MDfKwdu14Ty_WCaiIXZyjBSQ-xCNCwnoz_w-w,3198
217
217
  ultralytics/nn/modules/activation.py,sha256=J6n-CJKFK0YbhwcRDqm9zEJM9pSAEycj5quQss_3x6E,2219
218
218
  ultralytics/nn/modules/block.py,sha256=YRALZHImSMdLpmF0qIf8uF3yENz0EK63SFp7gzylo5g,69885
219
219
  ultralytics/nn/modules/conv.py,sha256=9WUlBzHD-wLgz0riLyttzASLIqBtXPK6Jk5EdyIiGCM,21100
220
220
  ultralytics/nn/modules/head.py,sha256=V1zSWN-AOHPkciqvfruDA0LgBgSGyKc_CULNCNEAe8o,51875
221
- ultralytics/nn/modules/transformer.py,sha256=oasUhhIm03kY0QtWrpvSSLnQa9q3eW2ksx82MgpPmsE,31972
221
+ ultralytics/nn/modules/transformer.py,sha256=lAjTH-U8IkBp_1cXSOOFSus9tJf-s8WISKKcXPB84CM,31972
222
222
  ultralytics/nn/modules/utils.py,sha256=EyhENse_RESlXjLHAJWvV07_tq1MVMmfzXgPR1fiT9w,6066
223
223
  ultralytics/solutions/__init__.py,sha256=Jj7OcRiYjHH-e104H4xTgjjR5W6aPB4mBRndbaSPmgU,1209
224
224
  ultralytics/solutions/ai_gym.py,sha256=ItLE6HYMx6AEgiHEDG1HKDkippnrnycb-79S2g72AYA,5181
@@ -263,9 +263,9 @@ ultralytics/utils/events.py,sha256=6vqs_iSxoXIhQ804sOjApNZmXwNW9FUFtjaHPY8ta10,4
263
263
  ultralytics/utils/files.py,sha256=BdaRwEKqzle4glSj8n_jq6bDjTCAs_H1SN06ZOQ9qFU,8190
264
264
  ultralytics/utils/git.py,sha256=UdqeIiiEzg1qkerAZrg5YtTYPuJYwrpxW9N_6Pq6s8U,5501
265
265
  ultralytics/utils/instance.py,sha256=11mhefvTI9ftMqSirXuiViAi0Fxlo6v84qvNxfRNUoE,18862
266
- ultralytics/utils/logger.py,sha256=LRhi4d76BYiHmSzjZv8V1TpNRpUoufJQYC1QQUOqhtw,19045
266
+ ultralytics/utils/logger.py,sha256=T5iaNnaqbCvx_FZf1dhVkr5FVxyxb4vO17t4SJfCIhg,19132
267
267
  ultralytics/utils/loss.py,sha256=t-z7qkvqF8OtuRHrj2wmvClZV2CCumIRi9jnqkc9i_A,39573
268
- ultralytics/utils/metrics.py,sha256=sqqFPhqhAva30KU8i8OJB2Q-JwytCaZJVX99wpDH6gE,69197
268
+ ultralytics/utils/metrics.py,sha256=dpS9jSPf3dqozcrkiraKhYBI03U2t-_lt8pWNCijGww,69152
269
269
  ultralytics/utils/nms.py,sha256=zv1rOzMF6WU8Kdk41VzNf1H1EMt_vZHcbDFbg3mnN2o,14248
270
270
  ultralytics/utils/ops.py,sha256=nWvTLJSBeW_XrxCy5Ytxl7sZJHp2sRqyCv4mm8QwYnw,25797
271
271
  ultralytics/utils/patches.py,sha256=mD3slAMAhcezzP42_fOWmacNMU6zXB68Br4_EBCyIjs,7117
@@ -274,7 +274,7 @@ ultralytics/utils/tal.py,sha256=w7oi6fp0NmL6hHh-yvCCX1cBuuB4JuX7w1wiR4_SMZs,2067
274
274
  ultralytics/utils/torch_utils.py,sha256=zOPUQlorTiEPSkqlSEPyaQhpmzmgOIKF7f3xJb0UjdQ,40268
275
275
  ultralytics/utils/tqdm.py,sha256=4kL_nczykHu6VxRzRSbvUSJknrCZydoS_ZegZkFXpsg,16197
276
276
  ultralytics/utils/triton.py,sha256=BQu3CD3OlT76d1OtmnX5slQU37VC1kzRvEtfI2saIQA,5211
277
- ultralytics/utils/tuner.py,sha256=RY0SLmGsFDj7RgqAj-XXRDKZ3asWbdwakAAKWmDTQv4,6867
277
+ ultralytics/utils/tuner.py,sha256=NOh0CDAqD1IvTLB5UglIgSS5RXP7lmiyrWKU4uJ0I74,7355
278
278
  ultralytics/utils/callbacks/__init__.py,sha256=hzL63Rce6VkZhP4Lcim9LKjadixaQG86nKqPhk7IkS0,242
279
279
  ultralytics/utils/callbacks/base.py,sha256=floD31JHqHpiVabQiE76_hzC_j7KjtL4w_czkD1bLKc,6883
280
280
  ultralytics/utils/callbacks/clearml.py,sha256=LjfNe4mswceCOpEGVLxqGXjkl_XGbef4awdcp4502RU,5831
@@ -283,7 +283,7 @@ ultralytics/utils/callbacks/dvc.py,sha256=YT0Sa5P8Huj8Fn9jM2P6MYzUY3PIVxsa5BInVi
283
283
  ultralytics/utils/callbacks/hub.py,sha256=fVLqqr3ZM6hoYFlVMEeejfq1MWDrkWCskPFOG3HGILQ,4159
284
284
  ultralytics/utils/callbacks/mlflow.py,sha256=wCXjQgdufp9LYujqMzLZOmIOur6kvrApHNeo9dA7t_g,5323
285
285
  ultralytics/utils/callbacks/neptune.py,sha256=_vt3cMwDHCR-LyT3KtRikGpj6AG11oQ-skUUUUdZ74o,4391
286
- ultralytics/utils/callbacks/platform.py,sha256=_JpvGzhwb1gJLUOPKn7NSqt1I_RA11eKW6dhdjSifFA,10278
286
+ ultralytics/utils/callbacks/platform.py,sha256=L7P5ttko-QVkig2y3r-D8YxfOWb7lNAan4iuMXxQ_u4,11682
287
287
  ultralytics/utils/callbacks/raytune.py,sha256=Y0dFyNZVRuFovSh7nkgUIHTQL3xIXOACElgHuYbg_5I,1278
288
288
  ultralytics/utils/callbacks/tensorboard.py,sha256=PTJYvD2gqRUN8xw5VoTjvKnu2adukLfvhMlDgTnTiFU,4952
289
289
  ultralytics/utils/callbacks/wb.py,sha256=ghmL3gigOa-z_F54-TzMraKw9MAaYX-Wk4H8dLoRvX8,7705
@@ -291,8 +291,8 @@ ultralytics/utils/export/__init__.py,sha256=Cfh-PwVfTF_lwPp-Ss4wiX4z8Sm1XRPklsqd
291
291
  ultralytics/utils/export/engine.py,sha256=23-lC6dNsmz5vprSJzaN7UGNXrFlVedNcqhlOH_IXes,9956
292
292
  ultralytics/utils/export/imx.py,sha256=F3b334IZdwjF8PdP1s6QI3Ndd82_2e77clj8aGLzIDo,12856
293
293
  ultralytics/utils/export/tensorflow.py,sha256=igYzwbdblb9YgfV4Jgl5lMvynuVRcF51dAzI7j-BBI0,9966
294
- dgenerate_ultralytics_headless-8.3.246.dist-info/METADATA,sha256=LoBmAU4Ur_f8FTLn5Ch-ebFhBwMFGA15N39d8IoNYIA,38799
295
- dgenerate_ultralytics_headless-8.3.246.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
296
- dgenerate_ultralytics_headless-8.3.246.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
297
- dgenerate_ultralytics_headless-8.3.246.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
298
- dgenerate_ultralytics_headless-8.3.246.dist-info/RECORD,,
294
+ dgenerate_ultralytics_headless-8.3.247.dist-info/METADATA,sha256=WiMRFlV9VZMKUYVg5f9qeHS0Hd3BPi775nB0eJOtEQc,38799
295
+ dgenerate_ultralytics_headless-8.3.247.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
296
+ dgenerate_ultralytics_headless-8.3.247.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
297
+ dgenerate_ultralytics_headless-8.3.247.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
298
+ dgenerate_ultralytics_headless-8.3.247.dist-info/RECORD,,
ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- __version__ = "8.3.246"
3
+ __version__ = "8.3.247"
4
4
 
5
5
  import importlib
6
6
  import os
@@ -353,7 +353,7 @@ def convert_segment_masks_to_yolo_seg(masks_dir: str, output_dir: str, classes:
353
353
  Args:
354
354
  masks_dir (str): The path to the directory where all mask images (png, jpg) are stored.
355
355
  output_dir (str): The path to the directory where the converted YOLO segmentation masks will be stored.
356
- classes (int): Total classes in the dataset i.e. for COCO classes=80
356
+ classes (int): Total number of classes in the dataset, e.g., 80 for COCO.
357
357
 
358
358
  Examples:
359
359
  >>> from ultralytics.data.converter import convert_segment_masks_to_yolo_seg
@@ -954,8 +954,8 @@ class Boxes(BaseTensor):
954
954
  >>> boxes = Boxes(torch.tensor([[100, 50, 150, 100], [200, 150, 300, 250]]), orig_shape=(480, 640))
955
955
  >>> xywh = boxes.xywh
956
956
  >>> print(xywh)
957
- tensor([[100.0000, 50.0000, 50.0000, 50.0000],
958
- [200.0000, 150.0000, 100.0000, 100.0000]])
957
+ tensor([[125.0000, 75.0000, 50.0000, 50.0000],
958
+ [250.0000, 200.0000, 100.0000, 100.0000]])
959
959
  """
960
960
  return ops.xyxy2xywh(self.xyxy)
961
961
 
@@ -364,7 +364,10 @@ class BaseValidator:
364
364
  return []
365
365
 
366
366
  def on_plot(self, name, data=None):
367
- """Register plots for visualization."""
367
+ """Register plots for visualization, deduplicating by type."""
368
+ plot_type = data.get("type") if data else None
369
+ if plot_type and any((v.get("data") or {}).get("type") == plot_type for v in self.plots.values()):
370
+ return # Skip duplicate plot types
368
371
  self.plots[Path(name)] = {"data": data, "timestamp": time.time()}
369
372
 
370
373
  def plot_val_samples(self, batch, ni):
@@ -166,7 +166,7 @@ def reshape_for_broadcast(freqs_cis: torch.Tensor, x: torch.Tensor):
166
166
  AssertionError: If the shape of freqs_cis doesn't match the last two dimensions of x.
167
167
  """
168
168
  ndim = x.ndim
169
- assert 0 <= 1 < ndim
169
+ assert ndim >= 2
170
170
  assert freqs_cis.shape == (x.shape[-2], x.shape[-1])
171
171
  shape = [d if i >= ndim - 2 else 1 for i, d in enumerate(x.shape)]
172
172
  return freqs_cis.view(*shape)
@@ -347,14 +347,14 @@ class DetectionValidator(BaseValidator):
347
347
  ni (int): Batch index.
348
348
  max_det (Optional[int]): Maximum number of detections to plot.
349
349
  """
350
- # TODO: optimize this
350
+ if not preds:
351
+ return
351
352
  for i, pred in enumerate(preds):
352
353
  pred["batch_idx"] = torch.ones_like(pred["conf"]) * i # add batch index to predictions
353
354
  keys = preds[0].keys()
354
355
  max_det = max_det or self.args.max_det
355
356
  batched_preds = {k: torch.cat([x[k][:max_det] for x in preds], dim=0) for k in keys}
356
- # TODO: fix this
357
- batched_preds["bboxes"][:, :4] = ops.xyxy2xywh(batched_preds["bboxes"][:, :4]) # convert to xywh format
357
+ batched_preds["bboxes"] = ops.xyxy2xywh(batched_preds["bboxes"]) # convert to xywh format
358
358
  plot_images(
359
359
  images=batch["img"],
360
360
  labels=batched_preds,
@@ -305,16 +305,16 @@ class TransformerBlock(nn.Module):
305
305
  """Forward propagate the input through the transformer block.
306
306
 
307
307
  Args:
308
- x (torch.Tensor): Input tensor with shape [b, c1, w, h].
308
+ x (torch.Tensor): Input tensor with shape [b, c1, h, w].
309
309
 
310
310
  Returns:
311
- (torch.Tensor): Output tensor with shape [b, c2, w, h].
311
+ (torch.Tensor): Output tensor with shape [b, c2, h, w].
312
312
  """
313
313
  if self.conv is not None:
314
314
  x = self.conv(x)
315
- b, _, w, h = x.shape
315
+ b, _, h, w = x.shape
316
316
  p = x.flatten(2).permute(2, 0, 1)
317
- return self.tr(p + self.linear(p)).permute(1, 2, 0).reshape(b, self.c2, w, h)
317
+ return self.tr(p + self.linear(p)).permute(1, 2, 0).reshape(b, self.c2, h, w)
318
318
 
319
319
 
320
320
  class MLPBlock(nn.Module):
ultralytics/nn/tasks.py CHANGED
@@ -407,7 +407,7 @@ class DetectionModel(BaseModel):
407
407
  self.model.train() # Set model back to training(default) mode
408
408
  m.bias_init() # only run once
409
409
  else:
410
- self.stride = torch.Tensor([32]) # default stride for i.e. RTDETR
410
+ self.stride = torch.Tensor([32]) # default stride, e.g., RTDETR
411
411
 
412
412
  # Init weights, biases
413
413
  initialize_weights(self)
@@ -1589,7 +1589,7 @@ def parse_model(d, ch, verbose=True):
1589
1589
  n = n_ = max(round(n * depth), 1) if n > 1 else n # depth gain
1590
1590
  if m in base_modules:
1591
1591
  c1, c2 = ch[f], args[0]
1592
- if c2 != nc: # if c2 not equal to number of classes (i.e. for Classify() output)
1592
+ if c2 != nc: # if c2 != nc (e.g., Classify() output)
1593
1593
  c2 = make_divisible(min(c2, max_channels) * width, 8)
1594
1594
  if m is C2fAttn: # set 1) embed channels and 2) num heads
1595
1595
  args[1] = make_divisible(min(args[1], max_channels // 2) * width, 8)
@@ -8,8 +8,9 @@ from concurrent.futures import ThreadPoolExecutor
8
8
  from pathlib import Path
9
9
  from time import time
10
10
 
11
- from ultralytics.utils import ENVIRONMENT, GIT, LOGGER, PYTHON_VERSION, RANK, SETTINGS, TESTS_RUNNING
11
+ from ultralytics.utils import ENVIRONMENT, GIT, LOGGER, PYTHON_VERSION, RANK, SETTINGS, TESTS_RUNNING, colorstr
12
12
 
13
+ PREFIX = colorstr("Platform: ")
13
14
  _last_upload = 0 # Rate limit model uploads
14
15
  _console_logger = None # Global console logger instance
15
16
  _system_logger = None # Cached system logger instance
@@ -31,6 +32,34 @@ except (AssertionError, ImportError):
31
32
  _api_key = None
32
33
 
33
34
 
35
+ def _interp_plot(plot, n=101):
36
+ """Interpolate plot curve data from 1000 to n points to reduce storage size."""
37
+ import numpy as np
38
+
39
+ if not plot.get("x") or not plot.get("y"):
40
+ return plot # No interpolation needed (e.g., confusion_matrix)
41
+
42
+ x, y = np.array(plot["x"]), np.array(plot["y"])
43
+ if len(x) <= n:
44
+ return plot # Already small enough
45
+
46
+ # New x values (101 points gives clean 0.01 increments: 0, 0.01, 0.02, ..., 1.0)
47
+ x_new = np.linspace(x[0], x[-1], n)
48
+
49
+ # Interpolate y values (handle both 1D and 2D arrays)
50
+ if y.ndim == 1:
51
+ y_new = np.interp(x_new, x, y)
52
+ else:
53
+ y_new = np.array([np.interp(x_new, x, yi) for yi in y])
54
+
55
+ # Also interpolate ap if present (for PR curves)
56
+ result = {**plot, "x": x_new.tolist(), "y": y_new.tolist()}
57
+ if "ap" in plot:
58
+ result["ap"] = plot["ap"] # Keep AP values as-is (per-class scalars)
59
+
60
+ return result
61
+
62
+
34
63
  def _send(event, data, project, name):
35
64
  """Send event to Platform endpoint."""
36
65
  try:
@@ -75,7 +104,8 @@ def _upload_model(model_path, project, name):
75
104
  timeout=600, # 10 min timeout for large models
76
105
  ).raise_for_status()
77
106
 
78
- LOGGER.info(f"Platform: Model uploaded to '{project}'")
107
+ # url = f"https://alpha.ultralytics.com/{project}/{name}"
108
+ # LOGGER.info(f"{PREFIX}Model uploaded to {url}")
79
109
  return data.get("gcsPath")
80
110
 
81
111
  except Exception as e:
@@ -150,7 +180,8 @@ def on_pretrain_routine_start(trainer):
150
180
  _last_upload = time()
151
181
 
152
182
  project, name = str(trainer.args.project), str(trainer.args.name or "train")
153
- LOGGER.info(f"Platform: Streaming to project '{project}' as '{name}'")
183
+ url = f"https://alpha.ultralytics.com/{project}/{name}"
184
+ LOGGER.info(f"{PREFIX}Streaming to {url}")
154
185
 
155
186
  # Create callback to send console output to Platform
156
187
  def send_console_output(content, line_count, chunk_id):
@@ -272,11 +303,15 @@ def on_train_end(trainer):
272
303
  model_size = Path(trainer.best).stat().st_size
273
304
  model_path = _upload_model(trainer.best, project, name)
274
305
 
275
- # Collect plots from trainer and validator
276
- plots = [info["data"] for info in getattr(trainer, "plots", {}).values() if info.get("data")]
277
- plots += [
278
- info["data"] for info in getattr(getattr(trainer, "validator", None), "plots", {}).values() if info.get("data")
279
- ]
306
+ # Collect plots from trainer and validator, deduplicating by type
307
+ plots_by_type = {}
308
+ for info in getattr(trainer, "plots", {}).values():
309
+ if info.get("data") and info["data"].get("type"):
310
+ plots_by_type[info["data"]["type"]] = info["data"]
311
+ for info in getattr(getattr(trainer, "validator", None), "plots", {}).values():
312
+ if info.get("data") and info["data"].get("type"):
313
+ plots_by_type.setdefault(info["data"]["type"], info["data"]) # Don't overwrite trainer plots
314
+ plots = [_interp_plot(p) for p in plots_by_type.values()] # Interpolate curves to reduce size
280
315
 
281
316
  # Get class names
282
317
  names = getattr(getattr(trainer, "validator", None), "names", None) or (trainer.data or {}).get("names")
@@ -298,7 +333,8 @@ def on_train_end(trainer):
298
333
  project,
299
334
  name,
300
335
  )
301
- LOGGER.info(f"Platform: Training complete, results uploaded to '{project}' ({len(plots)} plots)")
336
+ url = f"https://alpha.ultralytics.com/{project}/{name}"
337
+ LOGGER.info(f"{PREFIX}View results at {url}")
302
338
 
303
339
 
304
340
  callbacks = (
@@ -330,14 +330,19 @@ class SystemLogger:
330
330
 
331
331
  def _init_nvidia(self):
332
332
  """Initialize NVIDIA GPU monitoring with pynvml."""
333
+ if MACOS:
334
+ return False
335
+
333
336
  try:
334
- assert not MACOS
335
337
  check_requirements("nvidia-ml-py>=12.0.0")
336
338
  self.pynvml = __import__("pynvml")
337
339
  self.pynvml.nvmlInit()
338
340
  return True
339
341
  except Exception as e:
340
- LOGGER.warning(f"SystemLogger NVML init failed: {e}")
342
+ import torch
343
+
344
+ if torch.cuda.is_available():
345
+ LOGGER.warning(f"SystemLogger NVML init failed: {e}")
341
346
  return False
342
347
 
343
348
  def get_metrics(self, rates=False):
@@ -568,7 +568,6 @@ class ConfusionMatrix(DataExportMixin):
568
568
  fig.savefig(plot_fname, dpi=250)
569
569
  plt.close(fig)
570
570
  if on_plot:
571
- # Pass confusion matrix data for interactive plotting (raw counts only, normalization done on frontend)
572
571
  on_plot(plot_fname, {"type": "confusion_matrix", "matrix": self.matrix.tolist()})
573
572
 
574
573
  def print(self):
@@ -663,7 +662,8 @@ def plot_pr_curve(
663
662
  plt.close(fig)
664
663
  if on_plot:
665
664
  # Pass PR curve data for interactive plotting (class names stored at model level)
666
- on_plot(save_dir, {"type": "pr_curve", "x": px.tolist(), "y": py.tolist(), "ap": ap.tolist()})
665
+ # Transpose py to match other curves: y[class][point] format
666
+ on_plot(save_dir, {"type": "pr_curve", "x": px.tolist(), "y": py.T.tolist(), "ap": ap.tolist()})
667
667
 
668
668
 
669
669
  @plt_settings()
@@ -87,12 +87,23 @@ def run_ray_tune(
87
87
  # Put the model in ray store
88
88
  task = model.task
89
89
  model_in_store = ray.put(model)
90
+ base_name = train_args.get("name", "tune")
90
91
 
91
92
  def _tune(config):
92
93
  """Train the YOLO model with the specified hyperparameters and return results."""
93
94
  model_to_train = ray.get(model_in_store) # get the model from ray store for tuning
94
95
  model_to_train.reset_callbacks()
95
96
  config.update(train_args)
97
+
98
+ # Set trial-specific name for W&B logging
99
+ try:
100
+ trial_id = tune.get_trial_id() # Get current trial ID (e.g., "2c2fc_00000")
101
+ trial_suffix = trial_id.split("_")[-1] if "_" in trial_id else trial_id
102
+ config["name"] = f"{base_name}_{trial_suffix}"
103
+ except Exception:
104
+ # Not in Ray Tune context or error getting trial ID, use base name
105
+ config["name"] = base_name
106
+
96
107
  results = model_to_train.train(**config)
97
108
  return results.results_dict
98
109