dgenerate-ultralytics-headless 8.3.225__py3-none-any.whl → 8.3.226__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dgenerate-ultralytics-headless
3
- Version: 8.3.225
3
+ Version: 8.3.226
4
4
  Summary: Automatically built Ultralytics package with python-opencv-headless dependency instead of python-opencv
5
5
  Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
6
6
  Maintainer-email: Ultralytics <hello@ultralytics.com>
@@ -160,7 +160,7 @@ Request an Enterprise License for commercial use at [Ultralytics Licensing](http
160
160
  <img src="https://github.com/ultralytics/assets/raw/main/social/logo-transparent.png" width="2%" alt="space">
161
161
  <a href="https://twitter.com/ultralytics"><img src="https://github.com/ultralytics/assets/raw/main/social/logo-social-twitter.png" width="2%" alt="Ultralytics Twitter"></a>
162
162
  <img src="https://github.com/ultralytics/assets/raw/main/social/logo-transparent.png" width="2%" alt="space">
163
- <a href="https://youtube.com/ultralytics?sub_confirmation=1"><img src="https://github.com/ultralytics/assets/raw/main/social/logo-social-youtube.png" width="2%" alt="Ultralytics YouTube"></a>
163
+ <a href="https://www.youtube.com/ultralytics?sub_confirmation=1"><img src="https://github.com/ultralytics/assets/raw/main/social/logo-social-youtube.png" width="2%" alt="Ultralytics YouTube"></a>
164
164
  <img src="https://github.com/ultralytics/assets/raw/main/social/logo-transparent.png" width="2%" alt="space">
165
165
  <a href="https://www.tiktok.com/@ultralytics"><img src="https://github.com/ultralytics/assets/raw/main/social/logo-social-tiktok.png" width="2%" alt="Ultralytics TikTok"></a>
166
166
  <img src="https://github.com/ultralytics/assets/raw/main/social/logo-transparent.png" width="2%" alt="space">
@@ -396,7 +396,7 @@ For bug reports and feature requests related to Ultralytics software, please vis
396
396
  <img src="https://github.com/ultralytics/assets/raw/main/social/logo-transparent.png" width="3%" alt="space">
397
397
  <a href="https://twitter.com/ultralytics"><img src="https://github.com/ultralytics/assets/raw/main/social/logo-social-twitter.png" width="3%" alt="Ultralytics Twitter"></a>
398
398
  <img src="https://github.com/ultralytics/assets/raw/main/social/logo-transparent.png" width="3%" alt="space">
399
- <a href="https://youtube.com/ultralytics?sub_confirmation=1"><img src="https://github.com/ultralytics/assets/raw/main/social/logo-social-youtube.png" width="3%" alt="Ultralytics YouTube"></a>
399
+ <a href="https://www.youtube.com/ultralytics?sub_confirmation=1"><img src="https://github.com/ultralytics/assets/raw/main/social/logo-social-youtube.png" width="3%" alt="Ultralytics YouTube"></a>
400
400
  <img src="https://github.com/ultralytics/assets/raw/main/social/logo-transparent.png" width="3%" alt="space">
401
401
  <a href="https://www.tiktok.com/@ultralytics"><img src="https://github.com/ultralytics/assets/raw/main/social/logo-social-tiktok.png" width="3%" alt="Ultralytics TikTok"></a>
402
402
  <img src="https://github.com/ultralytics/assets/raw/main/social/logo-transparent.png" width="3%" alt="space">
@@ -1,18 +1,18 @@
1
- dgenerate_ultralytics_headless-8.3.225.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
1
+ dgenerate_ultralytics_headless-8.3.226.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
2
2
  tests/__init__.py,sha256=bCox_hLdGRFYGLb2kd722VdNP2zEXNYNuLLYtqZSrbw,804
3
3
  tests/conftest.py,sha256=mOy9lGpNp7lk1hHl6_pVE0f9cU-72gnkoSm4TO-CNZU,2318
4
4
  tests/test_cli.py,sha256=GhIFHi-_WIJpDgoGNRi0DnjbfwP1wHbklBMnkCM-P_4,5464
5
- tests/test_cuda.py,sha256=6zUSwu3xaYiO3RRNyDkNsuyeq47b1e9f6JNhPZVeDL4,8142
5
+ tests/test_cuda.py,sha256=d5Pcm-YBDhOPsnQxNtH_WIZQkwr_kYD0j1KUvX6nZOM,8253
6
6
  tests/test_engine.py,sha256=ER2DsHM0GfUG99AH1Q-Lpm4x36qxkfOzxmH6uYM75ds,5722
7
7
  tests/test_exports.py,sha256=OMLio2uUhyqo8D8qB5xUwmk7Po2rMeAACRc8WYoxbj4,13147
8
8
  tests/test_integrations.py,sha256=6QgSh9n0J04RdUYz08VeVOnKmf4S5MDEQ0chzS7jo_c,6220
9
- tests/test_python.py,sha256=YFeBN7Y3OuyTHSebnezxmPFNCuUj8ek-rhWb7NfiG3I,27730
9
+ tests/test_python.py,sha256=jhnN-Oie3euE3kfHzUqvnadkWOsQyvFmdmEcse9Rsto,29253
10
10
  tests/test_solutions.py,sha256=j_PZZ5tMR1Y5ararY-OTXZr1hYJ7vEVr8H3w4O1tbQs,14153
11
- ultralytics/__init__.py,sha256=qtzb2uGnup4OSdjyjmNzjoTIaRKxrLjMemA_67GBWuQ,1302
11
+ ultralytics/__init__.py,sha256=WY251vQ-GY1axMzoZx1zfHfVdJaGRKuXmkcDb_Sc6es,1302
12
12
  ultralytics/py.typed,sha256=la67KBlbjXN-_-DfGNcdOcjYumVpKG_Tkw-8n5dnGB4,8
13
13
  ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
14
14
  ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
15
- ultralytics/cfg/__init__.py,sha256=UWWIc3ApN9T36C421dnNlkL5yv3o5_JeiAeYyJt_tq0,39820
15
+ ultralytics/cfg/__init__.py,sha256=1sSIzMkJuPvRa3QXOj1NP1LuoTqHVWl85JPc4WTmpmU,40200
16
16
  ultralytics/cfg/default.yaml,sha256=KKENSHolDSto1HJVGjBvTXvz9ae-XMcYRzKrjU3QfZc,8912
17
17
  ultralytics/cfg/datasets/Argoverse.yaml,sha256=J4ItoUlE_EiYTmp1DFKYHfbqHkj8j4wUtRJQhaMIlBM,3275
18
18
  ultralytics/cfg/datasets/DOTAv1.5.yaml,sha256=VZ_KKFX0H2YvlFVJ8JHcLWYBZ2xiQ6Z-ROSTiKWpS7c,1211
@@ -109,12 +109,12 @@ ultralytics/cfg/trackers/botsort.yaml,sha256=tRxC-qT4Wz0mLn5x7ZEwrqgGKrmTDVY7gMg
109
109
  ultralytics/cfg/trackers/bytetrack.yaml,sha256=7LS1ObP5u7BUFcmeY6L2m3bRuPUktnpJspFKd_ElVWc,908
110
110
  ultralytics/data/__init__.py,sha256=ToR8zl0JhBHy42ZvV7zIwO_F3lbi5oNlGQNPK3dlddU,644
111
111
  ultralytics/data/annotator.py,sha256=kbfSPBesKEVK6ys3dilTdMh7rCKyp0xV7tGQeEDbpWI,2985
112
- ultralytics/data/augment.py,sha256=5Z_hG3X7rwL_MkVXS9n0nyDzQBKp0zunurDhKlx_Ytc,132275
112
+ ultralytics/data/augment.py,sha256=2yyeKIABTqgIf7_spUqGR846kaw40TDlll36CYz8Y1Q,133160
113
113
  ultralytics/data/base.py,sha256=2sJmh1VUCvxjfdvEAQldK9PLVsw-pDVjcyo8gCLlbuo,19575
114
114
  ultralytics/data/build.py,sha256=nv59cOR5oG2hUziR6KDo-pjbW0OmVLipnM2-OS4gpJU,17060
115
115
  ultralytics/data/converter.py,sha256=_54Xw78TLRswJ9nUVCd2lfEP5riQ82rM0_g_Gad4PAI,31893
116
116
  ultralytics/data/dataset.py,sha256=L5QYgic_B1e1zffgRA5lqKDd5PQuMDg6PZVd-RTUA7E,36523
117
- ultralytics/data/loaders.py,sha256=P3fooutMZyii5VYHEAxghFfDJLrYdLLwnubPnx_K7qQ,31652
117
+ ultralytics/data/loaders.py,sha256=d2FDVDFrD_wX58TLRhFav63B0v0jfbGbcgfJ2qprpZM,31651
118
118
  ultralytics/data/split.py,sha256=HpR0ltf5oN1DpZstavFbBFC1YdpGPaATXxDOcAMwOqc,5101
119
119
  ultralytics/data/split_dota.py,sha256=Qp9vGB2lzb5fQOrpNupKc8KN9ulqZoco9d4gRcx7JZk,12873
120
120
  ultralytics/data/utils.py,sha256=HGwqyLVw-_3Mx48UqVMEF4QBCMYEz7oLxGsrCMEqTqw,36836
@@ -123,12 +123,12 @@ ultralytics/data/scripts/get_coco.sh,sha256=UuJpJeo3qQpTHVINeOpmP0NYmg8PhEFE3A8J
123
123
  ultralytics/data/scripts/get_coco128.sh,sha256=qmRQl_hOKrsdHrTrnyQuFIH01oDz3lfaz138OgGfLt8,650
124
124
  ultralytics/data/scripts/get_imagenet.sh,sha256=hr42H16bM47iT27rgS7MpEo-GeOZAYUQXgr0B2cwn48,1705
125
125
  ultralytics/engine/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
126
- ultralytics/engine/exporter.py,sha256=kfg25_7NU8h4adaL7SChsM5n0e0-PhyFCE76GINmHUM,69805
127
- ultralytics/engine/model.py,sha256=fY7-LTiLcWQ0LTR075X88dLykTmHcVr2B4NmA6RZfgo,53079
128
- ultralytics/engine/predictor.py,sha256=OJJnB9ym056ZhHIbh8M3OfKHiXbJHF_GCVcQxAmvMOU,22719
126
+ ultralytics/engine/exporter.py,sha256=hqcZ0CZId8HrqHdvkXeiyP0Y6KtvLszcOomt07UPKIg,67856
127
+ ultralytics/engine/model.py,sha256=s-exI_DPWaMkyba8oK6_UP0VUz0MT_52B7--r6wYf84,53186
128
+ ultralytics/engine/predictor.py,sha256=eu0sVo3PTt4zKH6SntzdO1E8cgFj9PFOJrfQO6VNqCE,22698
129
129
  ultralytics/engine/results.py,sha256=j8MLEM4sgo1EDVTjkmLIag2MqfZbEBUMuzPJfYr7tWE,70906
130
- ultralytics/engine/trainer.py,sha256=8CNNNwpWOsYbbCZHRRvWm-tMeEmE-WFaf8docXqvaU0,44065
131
- ultralytics/engine/tuner.py,sha256=1p-QIgtMni7Lct5dULcibMpFMar-OMKmTxN_kGjq8KQ,21574
130
+ ultralytics/engine/trainer.py,sha256=xzsouV6UX259WT3n_in8GoXblmmlrzyYpD6fQt_zBm0,45214
131
+ ultralytics/engine/tuner.py,sha256=nWFTYjDYXsl7DVnaLVaS0pJRVQaGek7kPBlMmtXvzOI,21555
132
132
  ultralytics/engine/validator.py,sha256=mG9u7atDw7mkCmoB_JjA4pM9m41vF5U7hPLRpBg8QFA,17528
133
133
  ultralytics/hub/__init__.py,sha256=Z0K_E00jzQh90b18q3IDChwVmTvyIYp6C00sCV-n2F8,6709
134
134
  ultralytics/hub/auth.py,sha256=ANzCeZA7lUzTWc_sFHbDuuyBh1jLl2sTpHkoUbIkFYE,6254
@@ -137,7 +137,7 @@ ultralytics/hub/utils.py,sha256=jknll06yNaAxKyOqKliILJv1XOU39WJWOGG_DyFUh20,6353
137
137
  ultralytics/hub/google/__init__.py,sha256=r06Ld4TuZEBOqg4iagpeN-eMAkg43T2OTxOH4_7IfkM,8445
138
138
  ultralytics/models/__init__.py,sha256=ljus_u1CIuP99k9fu6sCtzIeFZ-TCE28NZ8kefZHFNY,309
139
139
  ultralytics/models/fastsam/__init__.py,sha256=Ku89Fy_X8ok3YPEUajjUZ5i4O08jdJMjJHt-3Z99Frk,231
140
- ultralytics/models/fastsam/model.py,sha256=IinjSMV1BWHt7rCWZcvJaHoIuTZT8XYJ7cz2LYRs2YE,3424
140
+ ultralytics/models/fastsam/model.py,sha256=eRGZ5q4DZ0MK-G9pcoBlJqde-L45nDqTYcFGFC5EbTs,3431
141
141
  ultralytics/models/fastsam/predict.py,sha256=ZIVgdBk_T-CtlAYpm08TSUtyOd2m-tapav7YYKALmT0,8623
142
142
  ultralytics/models/fastsam/utils.py,sha256=de9ieh4pBUuTNh5HTiNdRpWZhXAaSfNo3R1FNMt2GOE,879
143
143
  ultralytics/models/fastsam/val.py,sha256=SHWCc9tH07IRCYHMQu6cr44EiwOk4-Oz6c_kDaRNbps,2027
@@ -176,7 +176,7 @@ ultralytics/models/yolo/classify/val.py,sha256=ZQusqW7s8Qbb6CZLFtAcsExNN9csUOfwr
176
176
  ultralytics/models/yolo/detect/__init__.py,sha256=GIRsLYR-kT4JJx7lh4ZZAFGBZj0aebokuU0A7JbjDVA,257
177
177
  ultralytics/models/yolo/detect/predict.py,sha256=xzU-uAGRH5DWd2x20kLxBmmoj7kKNvT4x2VcL4Y4upw,5362
178
178
  ultralytics/models/yolo/detect/train.py,sha256=5xDl8M_DrK7S8txW4IoRcdtiVaz-LvoMMr6VTWYFtyU,10477
179
- ultralytics/models/yolo/detect/val.py,sha256=UzyxHX7Hg9m09S-v0dsV57tudK3N1_r__Ciaz6Fl3bQ,22391
179
+ ultralytics/models/yolo/detect/val.py,sha256=b4swS4fEGEFkNzXAUD8OKwS9o0tBg9kU0UGPlTlYndU,22384
180
180
  ultralytics/models/yolo/obb/__init__.py,sha256=tQmpG8wVHsajWkZdmD6cjGohJ4ki64iSXQT8JY_dydo,221
181
181
  ultralytics/models/yolo/obb/predict.py,sha256=nOq_zVjkHto8uqFJ4FkjaOvKIm0Liw3nsBCCb9Bzcrc,2865
182
182
  ultralytics/models/yolo/obb/train.py,sha256=qtBjwOHOq0oQ9mK0mOtnUrXAQ5UCUrntKq_Z0-oCBHo,3438
@@ -210,7 +210,7 @@ ultralytics/nn/modules/transformer.py,sha256=WAyTfdMFdD8YdSkURuYQP_eoc6i-IWWzkyH
210
210
  ultralytics/nn/modules/utils.py,sha256=tkUDhTXjmW-YMvTGvM4RFUVtzh5k2c33i3TWmzaWWtI,6067
211
211
  ultralytics/solutions/__init__.py,sha256=Jj7OcRiYjHH-e104H4xTgjjR5W6aPB4mBRndbaSPmgU,1209
212
212
  ultralytics/solutions/ai_gym.py,sha256=7ggUIkClVtvZG_nzoZCoZ_wlDfr-Da2U7ZhECaHe80I,5166
213
- ultralytics/solutions/analytics.py,sha256=wD54NU5ZUfXJW7oXlC1_FLmxi9FDDPLyNM3zf1oPLEA,12800
213
+ ultralytics/solutions/analytics.py,sha256=QFv_y-iCY7M0g4hlGghoqF9fffVXdxUrmPib1B-WXso,12863
214
214
  ultralytics/solutions/config.py,sha256=AwnmZbMwg44Nz-wTy99Xef509mG6xbIO_JAzodjbhSU,5391
215
215
  ultralytics/solutions/distance_calculation.py,sha256=_sdG2J7tomc4AmryX0jRie2mqEIWKUBBBXQKu5HqhuY,5901
216
216
  ultralytics/solutions/heatmap.py,sha256=DUyV5UFsOwZ8ArN4BtW8Vm3ps8_VZXc6VP0uiKyGDWY,5481
@@ -238,18 +238,18 @@ ultralytics/trackers/utils/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6D
238
238
  ultralytics/trackers/utils/gmc.py,sha256=pz1dmNSLTWjHh6PJn4WpqGSDuiKFu1NXdWQI-_TC9xY,13978
239
239
  ultralytics/trackers/utils/kalman_filter.py,sha256=_qTZD8_zLNSLu5NjVepzEhgNB7q7c1XIOV6TVmD4LKk,21597
240
240
  ultralytics/trackers/utils/matching.py,sha256=7lyDXEw6w5iEKeb9CARlAoPbvT35VnCc9hkjD6ZcIqs,7144
241
- ultralytics/utils/__init__.py,sha256=sTNIfChWh6UjNYIVYv6qOZsLG_R8qLbf1Zieq5EMzlE,53257
241
+ ultralytics/utils/__init__.py,sha256=WaEgRWwCVPZxoiiFis4QtWf54GxSfqjO5sps6k28e_Q,53233
242
242
  ultralytics/utils/autobatch.py,sha256=jiE4m_--H9UkXFDm_FqzcZk_hSTCGpS72XdVEKgZwAo,5114
243
243
  ultralytics/utils/autodevice.py,sha256=Ukj6OKoycI4psiRw0mzfSqiLhtZ0uQ3pR8tbSlk8JEU,8825
244
- ultralytics/utils/benchmarks.py,sha256=5EOwp7jWe2auaK3ilMQspXBFyD3q4_DMVOgNnHeJL50,32540
245
- ultralytics/utils/checks.py,sha256=sqt-JPlkel7oExYrT12WmaIHyUcZLDMlsW10k_JrJMM,36137
246
- ultralytics/utils/cpu.py,sha256=gLAhGFfojUmiRBF4_rSJwEO5oxavU_cQZyyJLFqiLXI,3629
244
+ ultralytics/utils/benchmarks.py,sha256=zDKrMJV-GDhFuqu0BaEZVAiPYcmxpCmJuC4vMStVTIg,32528
245
+ ultralytics/utils/checks.py,sha256=L-Swpu7CDEaf8ozipCIzw3zwRiN2js6TZPmm6NZFEBA,36212
246
+ ultralytics/utils/cpu.py,sha256=OksKOlX93AsbSsFuoYvLXRXgpkOibrZSwQyW6lipt4Q,3493
247
247
  ultralytics/utils/dist.py,sha256=hOuY1-unhQAY-uWiZw3LWw36d1mqJuYK75NdlwB4oKE,4131
248
248
  ultralytics/utils/downloads.py,sha256=pUzi3N6-L--aLUbyIv2lU3zYtL84eSD-Z-PycwPLwuA,22883
249
249
  ultralytics/utils/errors.py,sha256=ZF552GVUGFOpxHDgGY2Yp_hJldGRZ821lYmn9TzZOC0,1570
250
250
  ultralytics/utils/events.py,sha256=6vqs_iSxoXIhQ804sOjApNZmXwNW9FUFtjaHPY8ta10,4665
251
251
  ultralytics/utils/files.py,sha256=Zw3pQEe1vz7oGBIb1c_umaVGJDvWn0z5zcPTFYcAF94,8125
252
- ultralytics/utils/git.py,sha256=cGxHm8MQeDkcqhf1uLOGMXSBfWiAGXty4XSb5_oN4Ig,5498
252
+ ultralytics/utils/git.py,sha256=O12SgwSh1JHizR0H_pOAyMY_qRpSe5I4cc5nAoEQ-zE,5489
253
253
  ultralytics/utils/instance.py,sha256=11mhefvTI9ftMqSirXuiViAi0Fxlo6v84qvNxfRNUoE,18862
254
254
  ultralytics/utils/logger.py,sha256=kXiTowpfg4_SbH6eSlS4NqOvKN8zFJ19c50xU5prj8s,15090
255
255
  ultralytics/utils/loss.py,sha256=R1uC00IlXVHFWc8I8ngjtfRfuUj_sT_Zw59OlYKwmFY,39781
@@ -260,8 +260,8 @@ ultralytics/utils/patches.py,sha256=6WDGUokiND76iDbLeul_6Ny-bvvFcy6Bms5f9MkxhfQ,
260
260
  ultralytics/utils/plotting.py,sha256=FoGnXc52IvsVtlDvS8Ffee-SszwpepAvrYrusTn21Fs,48283
261
261
  ultralytics/utils/tal.py,sha256=w7oi6fp0NmL6hHh-yvCCX1cBuuB4JuX7w1wiR4_SMZs,20678
262
262
  ultralytics/utils/torch_utils.py,sha256=o6KMukW6g-mUYrVMPHb5qkcGbQIk8aMMnVrOrsJoL1Q,40220
263
- ultralytics/utils/tqdm.py,sha256=ifBAVM1IACZb4H6oo5FoHCV6j4vyOZz5q02a7vc6jag,16123
264
- ultralytics/utils/triton.py,sha256=qC6AbHH1zYBS5itrikrK_frwCriSfOU-vZV4FED5Ex4,5383
263
+ ultralytics/utils/tqdm.py,sha256=lYZNsunfjUX1WlCh0_bMu9OglK614dUtLO-7Db_uWIw,16029
264
+ ultralytics/utils/triton.py,sha256=2wZil1PfvOpaBymTzzP8Da6Aam-2MTLumO3uBmTE5FY,5406
265
265
  ultralytics/utils/tuner.py,sha256=rN8gFWnQOJFtrGlFcvOo0Eah9dEVFx0nFkpTGrlewZA,6861
266
266
  ultralytics/utils/callbacks/__init__.py,sha256=hzL63Rce6VkZhP4Lcim9LKjadixaQG86nKqPhk7IkS0,242
267
267
  ultralytics/utils/callbacks/base.py,sha256=floD31JHqHpiVabQiE76_hzC_j7KjtL4w_czkD1bLKc,6883
@@ -279,8 +279,8 @@ ultralytics/utils/export/__init__.py,sha256=Cfh-PwVfTF_lwPp-Ss4wiX4z8Sm1XRPklsqd
279
279
  ultralytics/utils/export/engine.py,sha256=23-lC6dNsmz5vprSJzaN7UGNXrFlVedNcqhlOH_IXes,9956
280
280
  ultralytics/utils/export/imx.py,sha256=9UPA4CwTPADzvJx9dOsh_8fQ-LMeqG7eI9EYIn5ojkc,11621
281
281
  ultralytics/utils/export/tensorflow.py,sha256=PyAp0_rXSUcXiqV2RY0H9b_-oFaZ7hZBiSM42X53t0Q,9374
282
- dgenerate_ultralytics_headless-8.3.225.dist-info/METADATA,sha256=rZ5Ae7uxtG0mmCjf9TFuZd2TM_8xsxi9KPpPWBMdLSc,38764
283
- dgenerate_ultralytics_headless-8.3.225.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
284
- dgenerate_ultralytics_headless-8.3.225.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
285
- dgenerate_ultralytics_headless-8.3.225.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
286
- dgenerate_ultralytics_headless-8.3.225.dist-info/RECORD,,
282
+ dgenerate_ultralytics_headless-8.3.226.dist-info/METADATA,sha256=MRE6VknAO-0hUfZ4ZIZflU_5E188hGc7BNl_LMPRyAo,38772
283
+ dgenerate_ultralytics_headless-8.3.226.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
284
+ dgenerate_ultralytics_headless-8.3.226.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
285
+ dgenerate_ultralytics_headless-8.3.226.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
286
+ dgenerate_ultralytics_headless-8.3.226.dist-info/RECORD,,
tests/test_cuda.py CHANGED
@@ -118,6 +118,7 @@ def test_train():
118
118
  results = YOLO(MODEL).train(
119
119
  data="coco8.yaml", imgsz=64, epochs=1, device=device, batch=15
120
120
  ) # requires imgsz>=64
121
+ results = YOLO(MODEL).train(data="coco8.yaml", imgsz=64, epochs=1, device=device, batch=15, val=False)
121
122
  visible = eval(os.environ["CUDA_VISIBLE_DEVICES"])
122
123
  assert visible == device, f"Passed GPUs '{device}', but used GPUs '{visible}'"
123
124
  assert (
tests/test_python.py CHANGED
@@ -385,7 +385,46 @@ def test_cfg_init():
385
385
  check_dict_alignment({"a": 1}, {"b": 2})
386
386
  copy_default_cfg()
387
387
  (Path.cwd() / DEFAULT_CFG_PATH.name.replace(".yaml", "_copy.yaml")).unlink(missing_ok=False)
388
- [smart_value(x) for x in {"none", "true", "false"}]
388
+
389
+ # Test smart_value() with comprehensive cases
390
+ # Test None conversion
391
+ assert smart_value("none") is None
392
+ assert smart_value("None") is None
393
+ assert smart_value("NONE") is None
394
+
395
+ # Test boolean conversion
396
+ assert smart_value("true") is True
397
+ assert smart_value("True") is True
398
+ assert smart_value("TRUE") is True
399
+ assert smart_value("false") is False
400
+ assert smart_value("False") is False
401
+ assert smart_value("FALSE") is False
402
+
403
+ # Test numeric conversion (ast.literal_eval)
404
+ assert smart_value("42") == 42
405
+ assert smart_value("-42") == -42
406
+ assert smart_value("3.14") == 3.14
407
+ assert smart_value("-3.14") == -3.14
408
+ assert smart_value("1e-3") == 0.001
409
+
410
+ # Test list/tuple conversion (ast.literal_eval)
411
+ assert smart_value("[1, 2, 3]") == [1, 2, 3]
412
+ assert smart_value("(1, 2, 3)") == (1, 2, 3)
413
+ assert smart_value("[640, 640]") == [640, 640]
414
+
415
+ # Test dict conversion (ast.literal_eval)
416
+ assert smart_value("{'a': 1, 'b': 2}") == {"a": 1, "b": 2}
417
+
418
+ # Test string fallback (when ast.literal_eval fails)
419
+ assert smart_value("some_string") == "some_string"
420
+ assert smart_value("path/to/file") == "path/to/file"
421
+ assert smart_value("hello world") == "hello world"
422
+
423
+ # Test that code injection is prevented (ast.literal_eval safety)
424
+ # These should return strings, not execute code
425
+ assert smart_value("__import__('os').system('ls')") == "__import__('os').system('ls')"
426
+ assert smart_value("eval('1+1')") == "eval('1+1')"
427
+ assert smart_value("exec('x=1')") == "exec('x=1')"
389
428
 
390
429
 
391
430
  def test_utils_init():
@@ -721,7 +760,7 @@ def test_grayscale(task: str, model: str, data: str, tmp_path) -> None:
721
760
  grayscale_data = tmp_path / f"{Path(data).stem}-grayscale.yaml"
722
761
  data = check_det_dataset(data)
723
762
  data["channels"] = 1 # add additional channels key for grayscale
724
- YAML.save(grayscale_data, data)
763
+ YAML.save(data=data, file=grayscale_data)
725
764
  # remove npy files in train/val splits if exists, might be created by previous tests
726
765
  for split in {"train", "val"}:
727
766
  for npy_file in (Path(data["path"]) / data[split]).glob("*.npy"):
ultralytics/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
2
2
 
3
- __version__ = "8.3.225"
3
+ __version__ = "8.3.226"
4
4
 
5
5
  import importlib
6
6
  import os
@@ -2,6 +2,7 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
+ import ast
5
6
  import shutil
6
7
  import subprocess
7
8
  import sys
@@ -459,7 +460,9 @@ def _handle_deprecation(custom: dict) -> dict:
459
460
  return custom
460
461
 
461
462
 
462
- def check_dict_alignment(base: dict, custom: dict, e: Exception | None = None) -> None:
463
+ def check_dict_alignment(
464
+ base: dict, custom: dict, e: Exception | None = None, allowed_custom_keys: set | None = None
465
+ ) -> None:
463
466
  """Check alignment between custom and base configuration dictionaries, handling deprecated keys and providing error
464
467
  messages for mismatched keys.
465
468
 
@@ -467,6 +470,7 @@ def check_dict_alignment(base: dict, custom: dict, e: Exception | None = None) -
467
470
  base (dict): The base configuration dictionary containing valid keys.
468
471
  custom (dict): The custom configuration dictionary to be checked for alignment.
469
472
  e (Exception | None): Optional error instance passed by the calling function.
473
+ allowed_custom_keys (set | None): Optional set of additional keys that are allowed in the custom dictionary.
470
474
 
471
475
  Raises:
472
476
  SystemExit: If mismatched keys are found between the custom and base dictionaries.
@@ -486,7 +490,10 @@ def check_dict_alignment(base: dict, custom: dict, e: Exception | None = None) -
486
490
  """
487
491
  custom = _handle_deprecation(custom)
488
492
  base_keys, custom_keys = (frozenset(x.keys()) for x in (base, custom))
489
- if mismatched := [k for k in custom_keys if k not in base_keys]:
493
+ # Allow 'augmentations' as a valid custom parameter for custom Albumentations transforms
494
+ if allowed_custom_keys is None:
495
+ allowed_custom_keys = {"augmentations"}
496
+ if mismatched := [k for k in custom_keys if k not in base_keys and k not in allowed_custom_keys]:
490
497
  from difflib import get_close_matches
491
498
 
492
499
  string = ""
@@ -797,7 +804,7 @@ def smart_value(v: str) -> Any:
797
804
 
798
805
  Notes:
799
806
  - The function uses a case-insensitive comparison for boolean and None values.
800
- - For other types, it attempts to use Python's eval() function, which can be unsafe if used on untrusted input.
807
+ - For other types, it attempts to use Python's ast.literal_eval() function for safe evaluation.
801
808
  - If no conversion is possible, the original string is returned.
802
809
  """
803
810
  v_lower = v.lower()
@@ -809,7 +816,7 @@ def smart_value(v: str) -> Any:
809
816
  return False
810
817
  else:
811
818
  try:
812
- return eval(v)
819
+ return ast.literal_eval(v)
813
820
  except Exception:
814
821
  return v
815
822
 
@@ -1845,7 +1845,7 @@ class Albumentations:
1845
1845
  - Spatial transforms are handled differently and require special processing for bounding boxes.
1846
1846
  """
1847
1847
 
1848
- def __init__(self, p: float = 1.0) -> None:
1848
+ def __init__(self, p: float = 1.0, transforms: list | None = None) -> None:
1849
1849
  """Initialize the Albumentations transform object for YOLO bbox formatted parameters.
1850
1850
 
1851
1851
  This class applies various image augmentations using the Albumentations library, including Blur, Median Blur,
@@ -1854,6 +1854,7 @@ class Albumentations:
1854
1854
 
1855
1855
  Args:
1856
1856
  p (float): Probability of applying the augmentations. Must be between 0 and 1.
1857
+ transforms (list, optional): List of custom Albumentations transforms. If None, uses default transforms.
1857
1858
 
1858
1859
  Attributes:
1859
1860
  p (float): Probability of applying the augmentations.
@@ -1870,6 +1871,11 @@ class Albumentations:
1870
1871
  >>> augmented_image = augmented["image"]
1871
1872
  >>> augmented_bboxes = augmented["bboxes"]
1872
1873
 
1874
+ >>> # Custom transforms example
1875
+ >>> import albumentations as A
1876
+ >>> custom_transforms = [A.Blur(p=0.01), A.CLAHE(p=0.01)]
1877
+ >>> transform = Albumentations(p=1.0, transforms=custom_transforms)
1878
+
1873
1879
  Notes:
1874
1880
  - Requires Albumentations version 1.0.3 or higher.
1875
1881
  - Spatial transforms are handled differently to ensure bbox compatibility.
@@ -1931,16 +1937,20 @@ class Albumentations:
1931
1937
  "XYMasking",
1932
1938
  } # from https://albumentations.ai/docs/getting_started/transforms_and_targets/#spatial-level-transforms
1933
1939
 
1934
- # Transforms
1935
- T = [
1936
- A.Blur(p=0.01),
1937
- A.MedianBlur(p=0.01),
1938
- A.ToGray(p=0.01),
1939
- A.CLAHE(p=0.01),
1940
- A.RandomBrightnessContrast(p=0.0),
1941
- A.RandomGamma(p=0.0),
1942
- A.ImageCompression(quality_range=(75, 100), p=0.0),
1943
- ]
1940
+ # Transforms, use custom transforms if provided, otherwise use defaults
1941
+ T = (
1942
+ [
1943
+ A.Blur(p=0.01),
1944
+ A.MedianBlur(p=0.01),
1945
+ A.ToGray(p=0.01),
1946
+ A.CLAHE(p=0.01),
1947
+ A.RandomBrightnessContrast(p=0.0),
1948
+ A.RandomGamma(p=0.0),
1949
+ A.ImageCompression(quality_range=(75, 100), p=0.0),
1950
+ ]
1951
+ if transforms is None
1952
+ else transforms
1953
+ )
1944
1954
 
1945
1955
  # Compose transforms
1946
1956
  self.contains_spatial = any(transform.__class__.__name__ in spatial_transforms for transform in T)
@@ -2482,6 +2492,12 @@ def v8_transforms(dataset, imgsz: int, hyp: IterableSimpleNamespace, stretch: bo
2482
2492
  >>> hyp = IterableSimpleNamespace(mosaic=1.0, copy_paste=0.5, degrees=10.0, translate=0.2, scale=0.9)
2483
2493
  >>> transforms = v8_transforms(dataset, imgsz=640, hyp=hyp)
2484
2494
  >>> augmented_data = transforms(dataset[0])
2495
+
2496
+ >>> # With custom albumentations
2497
+ >>> import albumentations as A
2498
+ >>> augmentations = [A.Blur(p=0.01), A.CLAHE(p=0.01)]
2499
+ >>> hyp.augmentations = augmentations
2500
+ >>> transforms = v8_transforms(dataset, imgsz=640, hyp=hyp)
2485
2501
  """
2486
2502
  mosaic = Mosaic(dataset, imgsz=imgsz, p=hyp.mosaic)
2487
2503
  affine = RandomPerspective(
@@ -2519,7 +2535,7 @@ def v8_transforms(dataset, imgsz: int, hyp: IterableSimpleNamespace, stretch: bo
2519
2535
  pre_transform,
2520
2536
  MixUp(dataset, pre_transform=pre_transform, p=hyp.mixup),
2521
2537
  CutMix(dataset, pre_transform=pre_transform, p=hyp.cutmix),
2522
- Albumentations(p=1.0),
2538
+ Albumentations(p=1.0, transforms=getattr(hyp, "augmentations", None)),
2523
2539
  RandomHSV(hgain=hyp.hsv_h, sgain=hyp.hsv_s, vgain=hyp.hsv_v),
2524
2540
  RandomFlip(direction="vertical", p=hyp.flipud, flip_idx=flip_idx),
2525
2541
  RandomFlip(direction="horizontal", p=hyp.fliplr, flip_idx=flip_idx),
@@ -123,7 +123,7 @@ class LoadStreams:
123
123
  if urllib.parse.urlparse(s).hostname in {"www.youtube.com", "youtube.com", "youtu.be"}: # YouTube video
124
124
  # YouTube format i.e. 'https://www.youtube.com/watch?v=Jsn8D3aC840' or 'https://youtu.be/Jsn8D3aC840'
125
125
  s = get_best_youtube_url(s)
126
- s = eval(s) if s.isnumeric() else s # i.e. s = '0' local webcam
126
+ s = int(s) if s.isnumeric() else s # i.e. s = '0' local webcam
127
127
  if s == 0 and (IS_COLAB or IS_KAGGLE):
128
128
  raise NotImplementedError(
129
129
  "'source=0' webcam not supported in Colab and Kaggle notebooks. "
@@ -90,7 +90,6 @@ from ultralytics.utils import (
90
90
  MACOS,
91
91
  MACOS_VERSION,
92
92
  RKNN_CHIPS,
93
- ROOT,
94
93
  SETTINGS,
95
94
  TORCH_VERSION,
96
95
  WINDOWS,
@@ -101,13 +100,11 @@ from ultralytics.utils import (
101
100
  )
102
101
  from ultralytics.utils.checks import (
103
102
  check_imgsz,
104
- check_is_path_safe,
105
103
  check_requirements,
106
104
  check_version,
107
105
  is_intel,
108
106
  is_sudo_available,
109
107
  )
110
- from ultralytics.utils.downloads import get_github_assets, safe_download
111
108
  from ultralytics.utils.export import (
112
109
  keras2pb,
113
110
  onnx2engine,
@@ -165,12 +162,12 @@ def export_formats():
165
162
 
166
163
  def best_onnx_opset(onnx, cuda=False) -> int:
167
164
  """Return max ONNX opset for this torch version with ONNX fallback."""
168
- version = ".".join(TORCH_VERSION.split(".")[:2])
169
165
  if TORCH_2_4: # _constants.ONNX_MAX_OPSET first defined in torch 1.13
170
166
  opset = torch.onnx.utils._constants.ONNX_MAX_OPSET - 1 # use second-latest version for safety
171
167
  if cuda:
172
168
  opset -= 2 # fix CUDA ONNXRuntime NMS squeeze op errors
173
169
  else:
170
+ version = ".".join(TORCH_VERSION.split(".")[:2])
174
171
  opset = {
175
172
  "1.8": 12,
176
173
  "1.9": 12,
@@ -532,7 +529,7 @@ class Exporter:
532
529
  f[0] = self.export_torchscript()
533
530
  if engine: # TensorRT required before ONNX
534
531
  f[1] = self.export_engine(dla=dla)
535
- if onnx or ncnn: # ONNX
532
+ if onnx: # ONNX
536
533
  f[2] = self.export_onnx()
537
534
  if xml: # OpenVINO
538
535
  f[3] = self.export_openvino()
@@ -822,65 +819,31 @@ class Exporter:
822
819
  def export_ncnn(self, prefix=colorstr("NCNN:")):
823
820
  """Export YOLO model to NCNN format using PNNX https://github.com/pnnx/pnnx."""
824
821
  check_requirements("ncnn", cmds="--no-deps") # no deps to avoid installing opencv-python
822
+ check_requirements("pnnx")
825
823
  import ncnn
824
+ import pnnx
826
825
 
827
- LOGGER.info(f"\n{prefix} starting export with NCNN {ncnn.__version__}...")
826
+ LOGGER.info(f"\n{prefix} starting export with NCNN {ncnn.__version__} and PNNX {pnnx.__version__}...")
828
827
  f = Path(str(self.file).replace(self.file.suffix, f"_ncnn_model{os.sep}"))
829
- f_onnx = self.file.with_suffix(".onnx")
830
828
 
831
- name = Path("pnnx.exe" if WINDOWS else "pnnx") # PNNX filename
832
- pnnx = name if name.is_file() else (ROOT / name)
833
- if not pnnx.is_file():
834
- LOGGER.warning(
835
- f"{prefix} PNNX not found. Attempting to download binary file from "
836
- "https://github.com/pnnx/pnnx/.\nNote PNNX Binary file must be placed in current working directory "
837
- f"or in {ROOT}. See PNNX repo for full installation instructions."
838
- )
839
- system = "macos" if MACOS else "windows" if WINDOWS else "linux-aarch64" if ARM64 else "linux"
840
- try:
841
- release, assets = get_github_assets(repo="pnnx/pnnx")
842
- asset = next(x for x in assets if f"{system}.zip" in x)
843
- assert isinstance(asset, str), "Unable to retrieve PNNX repo assets" # i.e. pnnx-20250930-macos.zip
844
- LOGGER.info(f"{prefix} successfully found latest PNNX asset file {asset}")
845
- except Exception as e:
846
- release = "20250930"
847
- asset = f"pnnx-{release}-{system}.zip"
848
- LOGGER.warning(f"{prefix} PNNX GitHub assets not found: {e}, using default {asset}")
849
- unzip_dir = safe_download(f"https://github.com/pnnx/pnnx/releases/download/{release}/{asset}", delete=True)
850
- if check_is_path_safe(Path.cwd(), unzip_dir): # avoid path traversal security vulnerability
851
- shutil.move(src=unzip_dir / name, dst=pnnx) # move binary to ROOT
852
- pnnx.chmod(0o777) # set read, write, and execute permissions for everyone
853
- shutil.rmtree(unzip_dir) # delete unzip dir
854
-
855
- ncnn_args = [
856
- f"ncnnparam={f / 'model.ncnn.param'}",
857
- f"ncnnbin={f / 'model.ncnn.bin'}",
858
- f"ncnnpy={f / 'model_ncnn.py'}",
859
- ]
860
-
861
- pnnx_args = [
862
- f"pnnxparam={f / 'model.pnnx.param'}",
863
- f"pnnxbin={f / 'model.pnnx.bin'}",
864
- f"pnnxpy={f / 'model_pnnx.py'}",
865
- f"pnnxonnx={f / 'model.pnnx.onnx'}",
866
- ]
867
-
868
- cmd = [
869
- str(pnnx),
870
- str(f_onnx),
871
- *ncnn_args,
872
- *pnnx_args,
873
- f"fp16={int(self.args.half)}",
874
- f"device={self.device.type}",
875
- f'inputshape="{[self.args.batch, 3, *self.imgsz]}"',
876
- ]
829
+ ncnn_args = dict(
830
+ ncnnparam=(f / "model.ncnn.param").as_posix(),
831
+ ncnnbin=(f / "model.ncnn.bin").as_posix(),
832
+ ncnnpy=(f / "model_ncnn.py").as_posix(),
833
+ )
834
+
835
+ pnnx_args = dict(
836
+ ptpath=(f / "model.pt").as_posix(),
837
+ pnnxparam=(f / "model.pnnx.param").as_posix(),
838
+ pnnxbin=(f / "model.pnnx.bin").as_posix(),
839
+ pnnxpy=(f / "model_pnnx.py").as_posix(),
840
+ pnnxonnx=(f / "model.pnnx.onnx").as_posix(),
841
+ )
842
+
877
843
  f.mkdir(exist_ok=True) # make ncnn_model directory
878
- LOGGER.info(f"{prefix} running '{' '.join(cmd)}'")
879
- subprocess.run(cmd, check=True)
844
+ pnnx.export(self.model, inputs=self.im, **ncnn_args, **pnnx_args, fp16=self.args.half, device=self.device.type)
880
845
 
881
- # Remove debug files
882
- pnnx_files = [x.rsplit("=", 1)[-1] for x in pnnx_args]
883
- for f_debug in ("debug.bin", "debug.param", "debug2.bin", "debug2.param", *pnnx_files):
846
+ for f_debug in ("debug.bin", "debug.param", "debug2.bin", "debug2.param", *pnnx_args.values()):
884
847
  Path(f_debug).unlink(missing_ok=True)
885
848
 
886
849
  YAML.save(f / "metadata.yaml", self.metadata) # add metadata.yaml
@@ -740,6 +740,7 @@ class Model(torch.nn.Module):
740
740
  - optimizer (str): Optimizer to use for training.
741
741
  - lr0 (float): Initial learning rate.
742
742
  - patience (int): Epochs to wait for no observable improvement for early stopping of training.
743
+ - augmentations (list[Callable]): List of augmentation functions to apply during training.
743
744
 
744
745
  Returns:
745
746
  (dict | None): Training metrics if available and training is successful; otherwise, None.
@@ -260,13 +260,12 @@ class BasePredictor:
260
260
  channels=getattr(self.model, "ch", 3),
261
261
  )
262
262
  self.source_type = self.dataset.source_type
263
- long_sequence = (
263
+ if (
264
264
  self.source_type.stream
265
265
  or self.source_type.screenshot
266
266
  or len(self.dataset) > 1000 # many images
267
267
  or any(getattr(self.dataset, "video_flag", [False]))
268
- )
269
- if long_sequence:
268
+ ): # long sequence
270
269
  import torchvision # noqa (import here triggers torchvision NMS use in nms.py)
271
270
 
272
271
  if not getattr(self, "stream", True): # videos
@@ -138,7 +138,12 @@ class BaseTrainer:
138
138
  if RANK in {-1, 0}:
139
139
  self.wdir.mkdir(parents=True, exist_ok=True) # make dir
140
140
  self.args.save_dir = str(self.save_dir)
141
- YAML.save(self.save_dir / "args.yaml", vars(self.args)) # save run args
141
+ # Save run args, serializing augmentations as reprs for resume compatibility
142
+ args_dict = vars(self.args).copy()
143
+ if args_dict.get("augmentations") is not None:
144
+ # Serialize Albumentations transforms as their repr strings for checkpoint compatibility
145
+ args_dict["augmentations"] = [repr(t) for t in args_dict["augmentations"]]
146
+ YAML.save(self.save_dir / "args.yaml", args_dict) # save run args
142
147
  self.last, self.best = self.wdir / "last.pt", self.wdir / "best.pt" # checkpoint paths
143
148
  self.save_period = self.args.save_period
144
149
 
@@ -464,10 +469,10 @@ class BaseTrainer:
464
469
 
465
470
  self.run_callbacks("on_train_epoch_end")
466
471
  if RANK in {-1, 0}:
467
- final_epoch = epoch + 1 >= self.epochs
468
472
  self.ema.update_attr(self.model, include=["yaml", "nc", "args", "names", "stride", "class_weights"])
469
473
 
470
474
  # Validation
475
+ final_epoch = epoch + 1 >= self.epochs
471
476
  if self.args.val or final_epoch or self.stopper.possible_stop or self.stop:
472
477
  self._clear_memory(threshold=0.5) # prevent VRAM spike
473
478
  self.metrics, self.fitness = self.validate()
@@ -626,7 +631,7 @@ class BaseTrainer:
626
631
  try:
627
632
  if self.args.task == "classify":
628
633
  data = check_cls_dataset(self.args.data)
629
- elif self.args.data.rsplit(".", 1)[-1] == "ndjson":
634
+ elif str(self.args.data).rsplit(".", 1)[-1] == "ndjson":
630
635
  # Convert NDJSON to YOLO format
631
636
  import asyncio
632
637
 
@@ -635,7 +640,7 @@ class BaseTrainer:
635
640
  yaml_path = asyncio.run(convert_ndjson_to_yolo(self.args.data))
636
641
  self.args.data = str(yaml_path)
637
642
  data = check_det_dataset(self.args.data)
638
- elif self.args.data.rsplit(".", 1)[-1] in {"yaml", "yml"} or self.args.task in {
643
+ elif str(self.args.data).rsplit(".", 1)[-1] in {"yaml", "yml"} or self.args.task in {
639
644
  "detect",
640
645
  "segment",
641
646
  "pose",
@@ -755,9 +760,9 @@ class BaseTrainer:
755
760
  n = len(metrics) + 2 # number of cols
756
761
  t = time.time() - self.train_time_start
757
762
  self.csv.parent.mkdir(parents=True, exist_ok=True) # ensure parent directory exists
758
- s = "" if self.csv.exists() else (("%s," * n % tuple(["epoch", "time", *keys])).rstrip(",") + "\n") # header
763
+ s = "" if self.csv.exists() else ("%s," * n % ("epoch", "time", *keys)).rstrip(",") + "\n"
759
764
  with open(self.csv, "a", encoding="utf-8") as f:
760
- f.write(s + ("%.6g," * n % tuple([self.epoch + 1, t, *vals])).rstrip(",") + "\n")
765
+ f.write(s + ("%.6g," * n % (self.epoch + 1, t, *vals)).rstrip(",") + "\n")
761
766
 
762
767
  def plot_metrics(self):
763
768
  """Plot metrics from a CSV file."""
@@ -806,10 +811,21 @@ class BaseTrainer:
806
811
  "batch",
807
812
  "device",
808
813
  "close_mosaic",
814
+ "augmentations",
809
815
  ): # allow arg updates to reduce memory or update device on resume
810
816
  if k in overrides:
811
817
  setattr(self.args, k, overrides[k])
812
818
 
819
+ # Handle augmentations parameter for resume: check if user provided custom augmentations
820
+ if ckpt_args.get("augmentations") is not None:
821
+ # Augmentations were saved in checkpoint as reprs but can't be restored automatically
822
+ LOGGER.warning(
823
+ "Custom Albumentations transforms were used in the original training run but are not "
824
+ "being restored. To preserve custom augmentations when resuming, you need to pass the "
825
+ "'augmentations' parameter again to get expected results. Example: \n"
826
+ f"model.train(resume=True, augmentations={ckpt_args['augmentations']})"
827
+ )
828
+
813
829
  except Exception as e:
814
830
  raise FileNotFoundError(
815
831
  "Resume checkpoint not found. Please pass a valid checkpoint to resume from, "
@@ -295,8 +295,7 @@ class Tuner:
295
295
 
296
296
  # Try MongoDB first if available
297
297
  if self.mongodb:
298
- results = self._get_mongodb_results(n)
299
- if results:
298
+ if results := self._get_mongodb_results(n):
300
299
  # MongoDB already sorted by fitness DESC, so results[0] is best
301
300
  x = np.array([[r["fitness"]] + [r["hyperparameters"][k] for k in self.space.keys()] for r in results])
302
301
  elif self.collection.name in self.collection.database.list_collection_names(): # Tuner started elsewhere
@@ -35,7 +35,7 @@ class FastSAM(Model):
35
35
  >>> results = model.predict("image.jpg", bboxes=[[100, 100, 200, 200]])
36
36
  """
37
37
 
38
- def __init__(self, model: str = "FastSAM-x.pt"):
38
+ def __init__(self, model: str | Path = "FastSAM-x.pt"):
39
39
  """Initialize the FastSAM model with the specified pre-trained weights."""
40
40
  if str(model) == "FastSAM.pt":
41
41
  model = "FastSAM-x.pt"
@@ -226,7 +226,7 @@ class DetectionValidator(BaseValidator):
226
226
  dist.gather_object(self.metrics.stats, gathered_stats, dst=0)
227
227
  merged_stats = {key: [] for key in self.metrics.stats.keys()}
228
228
  for stats_dict in gathered_stats:
229
- for key in merged_stats.keys():
229
+ for key in merged_stats:
230
230
  merged_stats[key].extend(stats_dict[key])
231
231
  gathered_jdict = [None] * dist.get_world_size()
232
232
  dist.gather_object(self.jdict, gathered_jdict, dst=0)
@@ -9,6 +9,7 @@ import cv2
9
9
  import numpy as np
10
10
 
11
11
  from ultralytics.solutions.solutions import BaseSolution, SolutionResults # Import a parent class
12
+ from ultralytics.utils import plt_settings
12
13
 
13
14
 
14
15
  class Analytics(BaseSolution):
@@ -46,6 +47,7 @@ class Analytics(BaseSolution):
46
47
  >>> cv2.imshow("Analytics", results.plot_im)
47
48
  """
48
49
 
50
+ @plt_settings()
49
51
  def __init__(self, **kwargs: Any) -> None:
50
52
  """Initialize Analytics class with various chart types for visual data representation."""
51
53
  super().__init__(**kwargs)
@@ -722,14 +722,14 @@ def is_jetson(jetpack=None) -> bool:
722
722
  Returns:
723
723
  (bool): True if running on an NVIDIA Jetson device, False otherwise.
724
724
  """
725
- if jetson := ("tegra" in DEVICE_MODEL):
726
- if jetpack:
727
- try:
728
- content = open("/etc/nv_tegra_release").read()
729
- version_map = {4: "R32", 5: "R35", 6: "R36"} # JetPack to L4T major version mapping
730
- return jetpack in version_map and version_map[jetpack] in content
731
- except Exception:
732
- return False
725
+ jetson = "tegra" in DEVICE_MODEL
726
+ if jetson and jetpack:
727
+ try:
728
+ content = open("/etc/nv_tegra_release").read()
729
+ version_map = {4: "R32", 5: "R35", 6: "R36"} # JetPack to L4T major version mapping
730
+ return jetpack in version_map and version_map[jetpack] in content
731
+ except Exception:
732
+ return False
733
733
  return jetson
734
734
 
735
735
 
@@ -604,7 +604,7 @@ class ProfileModels:
604
604
  sess_options.intra_op_num_threads = 8 # Limit the number of threads
605
605
  sess = ort.InferenceSession(onnx_file, sess_options, providers=["CPUExecutionProvider"])
606
606
 
607
- input_data_dict = dict()
607
+ input_data_dict = {}
608
608
  for input_tensor in sess.get_inputs():
609
609
  input_type = input_tensor.type
610
610
  if self.check_dynamic(input_tensor.shape):
@@ -632,7 +632,7 @@ class ProfileModels:
632
632
 
633
633
  input_data = np.random.rand(*input_shape).astype(input_dtype)
634
634
  input_name = input_tensor.name
635
- input_data_dict.update({input_name: input_data})
635
+ input_data_dict[input_name] = input_data
636
636
 
637
637
  output_name = sess.get_outputs()[0].name
638
638
 
@@ -2,6 +2,7 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
+ import ast
5
6
  import functools
6
7
  import glob
7
8
  import inspect
@@ -135,7 +136,7 @@ def check_imgsz(imgsz, stride=32, min_dim=1, max_dim=2, floor=0):
135
136
  elif isinstance(imgsz, (list, tuple)):
136
137
  imgsz = list(imgsz)
137
138
  elif isinstance(imgsz, str): # i.e. '640' or '[640,640]'
138
- imgsz = [int(imgsz)] if imgsz.isnumeric() else eval(imgsz)
139
+ imgsz = [int(imgsz)] if imgsz.isnumeric() else ast.literal_eval(imgsz)
139
140
  else:
140
141
  raise TypeError(
141
142
  f"'imgsz={imgsz}' is of invalid type {type(imgsz).__name__}. "
@@ -412,17 +413,19 @@ def check_requirements(requirements=ROOT.parent / "requirements.txt", exclude=()
412
413
  f"--index-strategy=unsafe-best-match --break-system-packages --prerelease=allow"
413
414
  )
414
415
  try:
415
- return subprocess.check_output(base, shell=True, stderr=subprocess.PIPE, text=True)
416
+ return subprocess.check_output(base, shell=True, stderr=subprocess.STDOUT, text=True)
416
417
  except subprocess.CalledProcessError as e:
417
- if e.stderr and "No virtual environment found" in e.stderr:
418
+ if e.output and "No virtual environment found" in e.output:
418
419
  return subprocess.check_output(
419
420
  base.replace("uv pip install", "uv pip install --system"),
420
421
  shell=True,
421
- stderr=subprocess.PIPE,
422
+ stderr=subprocess.STDOUT,
422
423
  text=True,
423
424
  )
424
425
  raise
425
- return subprocess.check_output(f"pip install --no-cache-dir {packages} {commands}", shell=True, text=True)
426
+ return subprocess.check_output(
427
+ f"pip install --no-cache-dir {packages} {commands}", shell=True, stderr=subprocess.STDOUT, text=True
428
+ )
426
429
 
427
430
  s = " ".join(f'"{x}"' for x in pkgs) # console string
428
431
  if s:
ultralytics/utils/cpu.py CHANGED
@@ -70,13 +70,9 @@ class CPUInfo:
70
70
  """Normalize and prettify a raw CPU descriptor string."""
71
71
  s = re.sub(r"\s+", " ", s.strip())
72
72
  s = s.replace("(TM)", "").replace("(tm)", "").replace("(R)", "").replace("(r)", "").strip()
73
- # Normalize common Intel pattern to 'Model Freq'
74
- m = re.search(r"(Intel.*?i\d[\w-]*) CPU @ ([\d.]+GHz)", s, re.I)
75
- if m:
73
+ if m := re.search(r"(Intel.*?i\d[\w-]*) CPU @ ([\d.]+GHz)", s, re.I):
76
74
  return f"{m.group(1)} {m.group(2)}"
77
- # Normalize common AMD Ryzen pattern to 'Model Freq'
78
- m = re.search(r"(AMD.*?Ryzen.*?[\w-]*) CPU @ ([\d.]+GHz)", s, re.I)
79
- if m:
75
+ if m := re.search(r"(AMD.*?Ryzen.*?[\w-]*) CPU @ ([\d.]+GHz)", s, re.I):
80
76
  return f"{m.group(1)} {m.group(2)}"
81
77
  return s
82
78
 
ultralytics/utils/git.py CHANGED
@@ -75,8 +75,7 @@ class GitRepo:
75
75
  def _ref_commit(self, ref: str) -> str | None:
76
76
  """Commit for ref (handles packed-refs)."""
77
77
  rf = self.gitdir / ref
78
- s = self._read(rf)
79
- if s:
78
+ if s := self._read(rf):
80
79
  return s
81
80
  pf = self.gitdir / "packed-refs"
82
81
  b = pf.read_bytes().splitlines() if pf.exists() else []
ultralytics/utils/tqdm.py CHANGED
@@ -148,7 +148,7 @@ class TQDM:
148
148
  self.start_t = time.time()
149
149
  self.last_rate = 0.0
150
150
  self.closed = False
151
- self.is_bytes = unit_scale and unit in ("B", "bytes")
151
+ self.is_bytes = unit_scale and unit in {"B", "bytes"}
152
152
  self.scales = (
153
153
  [(1073741824, "GB/s"), (1048576, "MB/s"), (1024, "KB/s")]
154
154
  if self.is_bytes
@@ -248,10 +248,8 @@ class TQDM:
248
248
  percent = (self.n / self.total) * 100
249
249
  n_str = self._format_num(self.n)
250
250
  t_str = self._format_num(self.total)
251
- if self.is_bytes:
252
- # Collapse suffix only when identical (e.g. "5.4/5.4MB")
253
- if n_str[-2] == t_str[-2]:
254
- n_str = n_str.rstrip("KMGTPB") # Remove unit suffix from current if different than total
251
+ if self.is_bytes and n_str[-2] == t_str[-2]: # Collapse suffix only when identical (e.g. "5.4/5.4MB")
252
+ n_str = n_str.rstrip("KMGTPB")
255
253
  else:
256
254
  percent = 0.0
257
255
  n_str, t_str = self._format_num(self.n), "?"
@@ -2,6 +2,7 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
+ import ast
5
6
  from urllib.parse import urlsplit
6
7
 
7
8
  import numpy as np
@@ -83,7 +84,7 @@ class TritonRemoteModel:
83
84
  self.np_input_formats = [type_map[x] for x in self.input_formats]
84
85
  self.input_names = [x["name"] for x in config["input"]]
85
86
  self.output_names = [x["name"] for x in config["output"]]
86
- self.metadata = eval(config.get("parameters", {}).get("metadata", {}).get("string_value", "None"))
87
+ self.metadata = ast.literal_eval(config.get("parameters", {}).get("metadata", {}).get("string_value", "None"))
87
88
 
88
89
  def __call__(self, *inputs: np.ndarray) -> list[np.ndarray]:
89
90
  """Call the model with the given inputs and return inference results.