ultralytics 8.3.108__py3-none-any.whl → 8.3.110__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ultralytics/__init__.py +1 -1
- ultralytics/data/base.py +2 -1
- ultralytics/data/dataset.py +2 -0
- ultralytics/data/scripts/download_weights.sh +18 -0
- ultralytics/data/scripts/get_coco.sh +61 -0
- ultralytics/data/scripts/get_coco128.sh +18 -0
- ultralytics/data/scripts/get_imagenet.sh +52 -0
- ultralytics/data/utils.py +81 -1
- ultralytics/engine/exporter.py +3 -3
- ultralytics/engine/trainer.py +2 -2
- ultralytics/models/rtdetr/val.py +1 -3
- ultralytics/nn/autobackend.py +1 -1
- ultralytics/nn/modules/block.py +1 -1
- ultralytics/nn/modules/conv.py +1 -1
- ultralytics/solutions/parking_management.py +1 -1
- ultralytics/utils/autobatch.py +3 -3
- ultralytics/utils/checks.py +1 -0
- ultralytics/utils/dist.py +1 -1
- ultralytics/utils/metrics.py +1 -1
- {ultralytics-8.3.108.dist-info → ultralytics-8.3.110.dist-info}/METADATA +1 -1
- {ultralytics-8.3.108.dist-info → ultralytics-8.3.110.dist-info}/RECORD +25 -21
- {ultralytics-8.3.108.dist-info → ultralytics-8.3.110.dist-info}/WHEEL +0 -0
- {ultralytics-8.3.108.dist-info → ultralytics-8.3.110.dist-info}/entry_points.txt +0 -0
- {ultralytics-8.3.108.dist-info → ultralytics-8.3.110.dist-info}/licenses/LICENSE +0 -0
- {ultralytics-8.3.108.dist-info → ultralytics-8.3.110.dist-info}/top_level.txt +0 -0
ultralytics/__init__.py
CHANGED
ultralytics/data/base.py
CHANGED
@@ -14,7 +14,7 @@ import numpy as np
|
|
14
14
|
import psutil
|
15
15
|
from torch.utils.data import Dataset
|
16
16
|
|
17
|
-
from ultralytics.data.utils import FORMATS_HELP_MSG, HELP_URL, IMG_FORMATS
|
17
|
+
from ultralytics.data.utils import FORMATS_HELP_MSG, HELP_URL, IMG_FORMATS, check_file_speeds
|
18
18
|
from ultralytics.utils import DEFAULT_CFG, LOCAL_RANK, LOGGER, NUM_THREADS, TQDM
|
19
19
|
|
20
20
|
|
@@ -172,6 +172,7 @@ class BaseDataset(Dataset):
|
|
172
172
|
raise FileNotFoundError(f"{self.prefix}Error loading data from {img_path}\n{HELP_URL}") from e
|
173
173
|
if self.fraction < 1:
|
174
174
|
im_files = im_files[: round(len(im_files) * self.fraction)] # retain a fraction of the dataset
|
175
|
+
check_file_speeds(im_files, prefix=self.prefix) # check image read speeds
|
175
176
|
return im_files
|
176
177
|
|
177
178
|
def update_labels(self, include_class: Optional[list]):
|
ultralytics/data/dataset.py
CHANGED
@@ -31,6 +31,7 @@ from .converter import merge_multi_segment
|
|
31
31
|
from .utils import (
|
32
32
|
HELP_URL,
|
33
33
|
LOGGER,
|
34
|
+
check_file_speeds,
|
34
35
|
get_hash,
|
35
36
|
img2label_paths,
|
36
37
|
load_dataset_cache_file,
|
@@ -794,6 +795,7 @@ class ClassificationDataset:
|
|
794
795
|
path = Path(self.root).with_suffix(".cache") # *.cache file path
|
795
796
|
|
796
797
|
try:
|
798
|
+
check_file_speeds([file for (file, _) in self.samples[:5]], prefix=self.prefix) # check image read speeds
|
797
799
|
cache = load_dataset_cache_file(path) # attempt to load a *.cache file
|
798
800
|
assert cache["version"] == DATASET_CACHE_VERSION # matches current version
|
799
801
|
assert cache["hash"] == get_hash([x[0] for x in self.samples]) # identical hash
|
@@ -0,0 +1,18 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
3
|
+
|
4
|
+
# Download latest models from https://github.com/ultralytics/assets/releases
|
5
|
+
# Example usage: bash ultralytics/data/scripts/download_weights.sh
|
6
|
+
# parent
|
7
|
+
# └── weights
|
8
|
+
# ├── yolov8n.pt ← downloads here
|
9
|
+
# ├── yolov8s.pt
|
10
|
+
# └── ...
|
11
|
+
|
12
|
+
python << EOF
|
13
|
+
from ultralytics.utils.downloads import attempt_download_asset
|
14
|
+
|
15
|
+
assets = [f"yolov8{size}{suffix}.pt" for size in "nsmlx" for suffix in ("", "-cls", "-seg", "-pose")]
|
16
|
+
for x in assets:
|
17
|
+
attempt_download_asset(f"weights/{x}")
|
18
|
+
EOF
|
@@ -0,0 +1,61 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
3
|
+
|
4
|
+
# Download COCO 2017 dataset https://cocodataset.org
|
5
|
+
# Example usage: bash data/scripts/get_coco.sh
|
6
|
+
# parent
|
7
|
+
# ├── ultralytics
|
8
|
+
# └── datasets
|
9
|
+
# └── coco ← downloads here
|
10
|
+
|
11
|
+
# Arguments (optional) Usage: bash data/scripts/get_coco.sh --train --val --test --segments
|
12
|
+
if [ "$#" -gt 0 ]; then
|
13
|
+
for opt in "$@"; do
|
14
|
+
case "${opt}" in
|
15
|
+
--train) train=true ;;
|
16
|
+
--val) val=true ;;
|
17
|
+
--test) test=true ;;
|
18
|
+
--segments) segments=true ;;
|
19
|
+
--sama) sama=true ;;
|
20
|
+
esac
|
21
|
+
done
|
22
|
+
else
|
23
|
+
train=true
|
24
|
+
val=true
|
25
|
+
test=false
|
26
|
+
segments=false
|
27
|
+
sama=false
|
28
|
+
fi
|
29
|
+
|
30
|
+
# Download/unzip labels
|
31
|
+
d='../datasets' # unzip directory
|
32
|
+
url=https://github.com/ultralytics/assets/releases/download/v0.0.0/
|
33
|
+
if [ "$segments" == "true" ]; then
|
34
|
+
f='coco2017labels-segments.zip' # 169 MB
|
35
|
+
elif [ "$sama" == "true" ]; then
|
36
|
+
f='coco2017labels-segments-sama.zip' # 199 MB https://www.sama.com/sama-coco-dataset/
|
37
|
+
else
|
38
|
+
f='coco2017labels.zip' # 46 MB
|
39
|
+
fi
|
40
|
+
echo 'Downloading' $url$f ' ...'
|
41
|
+
curl -L $url$f -o $f -# && unzip -q $f -d $d && rm $f &
|
42
|
+
|
43
|
+
# Download/unzip images
|
44
|
+
d='../datasets/coco/images' # unzip directory
|
45
|
+
url=http://images.cocodataset.org/zips/
|
46
|
+
if [ "$train" == "true" ]; then
|
47
|
+
f='train2017.zip' # 19G, 118k images
|
48
|
+
echo 'Downloading' $url$f '...'
|
49
|
+
curl -L $url$f -o $f -# && unzip -q $f -d $d && rm $f &
|
50
|
+
fi
|
51
|
+
if [ "$val" == "true" ]; then
|
52
|
+
f='val2017.zip' # 1G, 5k images
|
53
|
+
echo 'Downloading' $url$f '...'
|
54
|
+
curl -L $url$f -o $f -# && unzip -q $f -d $d && rm $f &
|
55
|
+
fi
|
56
|
+
if [ "$test" == "true" ]; then
|
57
|
+
f='test2017.zip' # 7G, 41k images (optional)
|
58
|
+
echo 'Downloading' $url$f '...'
|
59
|
+
curl -L $url$f -o $f -# && unzip -q $f -d $d && rm $f &
|
60
|
+
fi
|
61
|
+
wait # finish background tasks
|
@@ -0,0 +1,18 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
3
|
+
|
4
|
+
# Download COCO128 dataset https://www.kaggle.com/ultralytics/coco128 (first 128 images from COCO train2017)
|
5
|
+
# Example usage: bash data/scripts/get_coco128.sh
|
6
|
+
# parent
|
7
|
+
# ├── ultralytics
|
8
|
+
# └── datasets
|
9
|
+
# └── coco128 ← downloads here
|
10
|
+
|
11
|
+
# Download/unzip images and labels
|
12
|
+
d='../datasets' # unzip directory
|
13
|
+
url=https://github.com/ultralytics/assets/releases/download/v0.0.0/
|
14
|
+
f='coco128.zip' # or 'coco128-segments.zip', 68 MB
|
15
|
+
echo 'Downloading' $url$f ' ...'
|
16
|
+
curl -L $url$f -o $f -# && unzip -q $f -d $d && rm $f &
|
17
|
+
|
18
|
+
wait # finish background tasks
|
@@ -0,0 +1,52 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
3
|
+
|
4
|
+
# Download ILSVRC2012 ImageNet dataset https://image-net.org
|
5
|
+
# Example usage: bash data/scripts/get_imagenet.sh
|
6
|
+
# parent
|
7
|
+
# ├── ultralytics
|
8
|
+
# └── datasets
|
9
|
+
# └── imagenet ← downloads here
|
10
|
+
|
11
|
+
# Arguments (optional) Usage: bash data/scripts/get_imagenet.sh --train --val
|
12
|
+
if [ "$#" -gt 0 ]; then
|
13
|
+
for opt in "$@"; do
|
14
|
+
case "${opt}" in
|
15
|
+
--train) train=true ;;
|
16
|
+
--val) val=true ;;
|
17
|
+
esac
|
18
|
+
done
|
19
|
+
else
|
20
|
+
train=true
|
21
|
+
val=true
|
22
|
+
fi
|
23
|
+
|
24
|
+
# Make dir
|
25
|
+
d='../datasets/imagenet' # unzip directory
|
26
|
+
mkdir -p $d && cd $d
|
27
|
+
|
28
|
+
# Download/unzip train
|
29
|
+
if [ "$train" == "true" ]; then
|
30
|
+
wget https://image-net.org/data/ILSVRC/2012/ILSVRC2012_img_train.tar # download 138G, 1281167 images
|
31
|
+
mkdir train && mv ILSVRC2012_img_train.tar train/ && cd train
|
32
|
+
tar -xf ILSVRC2012_img_train.tar && rm -f ILSVRC2012_img_train.tar
|
33
|
+
find . -name "*.tar" | while read NAME; do
|
34
|
+
mkdir -p "${NAME%.tar}"
|
35
|
+
tar -xf "${NAME}" -C "${NAME%.tar}"
|
36
|
+
rm -f "${NAME}"
|
37
|
+
done
|
38
|
+
cd ..
|
39
|
+
fi
|
40
|
+
|
41
|
+
# Download/unzip val
|
42
|
+
if [ "$val" == "true" ]; then
|
43
|
+
wget https://image-net.org/data/ILSVRC/2012/ILSVRC2012_img_val.tar # download 6.3G, 50000 images
|
44
|
+
mkdir val && mv ILSVRC2012_img_val.tar val/ && cd val && tar -xf ILSVRC2012_img_val.tar
|
45
|
+
wget -qO- https://raw.githubusercontent.com/soumith/imagenetloader.torch/master/valprep.sh | bash # move into subdirs
|
46
|
+
fi
|
47
|
+
|
48
|
+
# Delete corrupted image (optional: PNG under JPEG name that may cause dataloaders to fail)
|
49
|
+
# rm train/n04266014/n04266014_10835.JPEG
|
50
|
+
|
51
|
+
# TFRecords (optional)
|
52
|
+
# wget https://raw.githubusercontent.com/tensorflow/models/master/research/slim/datasets/imagenet_lsvrc_2015_synsets.txt
|
ultralytics/data/utils.py
CHANGED
@@ -47,9 +47,89 @@ def img2label_paths(img_paths):
|
|
47
47
|
return [sb.join(x.rsplit(sa, 1)).rsplit(".", 1)[0] + ".txt" for x in img_paths]
|
48
48
|
|
49
49
|
|
50
|
+
def check_file_speeds(files, threshold_ms=10, max_files=5, prefix=""):
|
51
|
+
"""
|
52
|
+
Check dataset file access speed and provide performance feedback.
|
53
|
+
|
54
|
+
This function tests the access speed of dataset files by measuring ping (stat call) time and read speed.
|
55
|
+
It samples up to 5 files from the provided list and warns if access times exceed the threshold.
|
56
|
+
|
57
|
+
Args:
|
58
|
+
files (list): List of file paths to check for access speed.
|
59
|
+
threshold_ms (float, optional): Threshold in milliseconds for ping time warnings.
|
60
|
+
max_files (int, optional): The maximum number of files to check.
|
61
|
+
prefix (str, optional): Prefix string to add to log messages.
|
62
|
+
|
63
|
+
Examples:
|
64
|
+
>>> from pathlib import Path
|
65
|
+
>>> image_files = list(Path("dataset/images").glob("*.jpg"))
|
66
|
+
>>> check_file_speeds(image_files, threshold_ms=15)
|
67
|
+
"""
|
68
|
+
if not files or len(files) == 0:
|
69
|
+
LOGGER.warning(f"{prefix}WARNING ⚠️ Image speed checks: No files to check")
|
70
|
+
return
|
71
|
+
|
72
|
+
# Sample files (max 5)
|
73
|
+
files = random.sample(files, min(max_files, len(files)))
|
74
|
+
|
75
|
+
# Test ping (stat time)
|
76
|
+
ping_times = []
|
77
|
+
file_sizes = []
|
78
|
+
read_speeds = []
|
79
|
+
|
80
|
+
for f in files:
|
81
|
+
try:
|
82
|
+
# Measure ping (stat call)
|
83
|
+
start = time.perf_counter()
|
84
|
+
file_size = os.stat(f).st_size
|
85
|
+
ping_times.append((time.perf_counter() - start) * 1000) # ms
|
86
|
+
file_sizes.append(file_size)
|
87
|
+
|
88
|
+
# Measure read speed
|
89
|
+
start = time.perf_counter()
|
90
|
+
with open(f, "rb") as file_obj:
|
91
|
+
_ = file_obj.read()
|
92
|
+
read_time = time.perf_counter() - start
|
93
|
+
if read_time > 0: # Avoid division by zero
|
94
|
+
read_speeds.append(file_size / (1 << 20) / read_time) # MB/s
|
95
|
+
except Exception:
|
96
|
+
pass
|
97
|
+
|
98
|
+
if not ping_times:
|
99
|
+
LOGGER.warning(f"{prefix}WARNING ⚠️ Image speed checks: failed to access files")
|
100
|
+
return
|
101
|
+
|
102
|
+
# Calculate stats with uncertainties
|
103
|
+
avg_ping = np.mean(ping_times)
|
104
|
+
std_ping = np.std(ping_times, ddof=1) if len(ping_times) > 1 else 0
|
105
|
+
size_msg = f", size: {np.mean(file_sizes) / (1 << 10):.1f} KB"
|
106
|
+
ping_msg = f"ping: {avg_ping:.1f}±{std_ping:.1f} ms"
|
107
|
+
|
108
|
+
if read_speeds:
|
109
|
+
avg_speed = np.mean(read_speeds)
|
110
|
+
std_speed = np.std(read_speeds, ddof=1) if len(read_speeds) > 1 else 0
|
111
|
+
speed_msg = f", read: {avg_speed:.1f}±{std_speed:.1f} MB/s"
|
112
|
+
else:
|
113
|
+
speed_msg = ""
|
114
|
+
|
115
|
+
if avg_ping < threshold_ms:
|
116
|
+
LOGGER.info(f"{prefix}Fast image access ✅ ({ping_msg}{speed_msg}{size_msg})")
|
117
|
+
else:
|
118
|
+
LOGGER.warning(
|
119
|
+
f"{prefix}WARNING ⚠️ Slow image access detected ({ping_msg}{speed_msg}{size_msg}). "
|
120
|
+
f"Use local storage instead of remote/mounted storage for better performance. "
|
121
|
+
f"See https://docs.ultralytics.com/guides/model-training-tips/"
|
122
|
+
)
|
123
|
+
|
124
|
+
|
50
125
|
def get_hash(paths):
|
51
126
|
"""Returns a single hash value of a list of paths (files or dirs)."""
|
52
|
-
size =
|
127
|
+
size = 0
|
128
|
+
for p in paths:
|
129
|
+
try:
|
130
|
+
size += os.stat(p).st_size
|
131
|
+
except OSError:
|
132
|
+
continue
|
53
133
|
h = hashlib.sha256(str(size).encode()) # hash sizes
|
54
134
|
h.update("".join(paths).encode()) # hash paths
|
55
135
|
return h.hexdigest() # return hash
|
ultralytics/engine/exporter.py
CHANGED
@@ -813,7 +813,7 @@ class Exporter:
|
|
813
813
|
scale = 1 / 255
|
814
814
|
classifier_config = None
|
815
815
|
if self.model.task == "classify":
|
816
|
-
classifier_config = ct.ClassifierConfig(list(self.model.names.values()))
|
816
|
+
classifier_config = ct.ClassifierConfig(list(self.model.names.values()))
|
817
817
|
model = self.model
|
818
818
|
elif self.model.task == "detect":
|
819
819
|
model = IOSDetectModel(self.model, self.im) if self.args.nms else self.model
|
@@ -1000,7 +1000,7 @@ class Exporter:
|
|
1000
1000
|
|
1001
1001
|
@try_export
|
1002
1002
|
def export_pb(self, keras_model, prefix=colorstr("TensorFlow GraphDef:")):
|
1003
|
-
"""YOLO TensorFlow GraphDef *.pb export https://github.com/leimao/
|
1003
|
+
"""YOLO TensorFlow GraphDef *.pb export https://github.com/leimao/Frozen-Graph-TensorFlow."""
|
1004
1004
|
import tensorflow as tf # noqa
|
1005
1005
|
from tensorflow.python.framework.convert_to_constants import convert_variables_to_constants_v2 # noqa
|
1006
1006
|
|
@@ -1281,7 +1281,7 @@ class Exporter:
|
|
1281
1281
|
return f, None
|
1282
1282
|
|
1283
1283
|
def _add_tflite_metadata(self, file):
|
1284
|
-
"""Add metadata to *.tflite models per https://
|
1284
|
+
"""Add metadata to *.tflite models per https://ai.google.dev/edge/litert/models/metadata."""
|
1285
1285
|
import flatbuffers
|
1286
1286
|
|
1287
1287
|
try:
|
ultralytics/engine/trainer.py
CHANGED
@@ -457,8 +457,8 @@ class BaseTrainer:
|
|
457
457
|
self.scheduler.last_epoch = self.epoch # do not move
|
458
458
|
self.stop |= epoch >= self.epochs # stop if exceeded epochs
|
459
459
|
self.run_callbacks("on_fit_epoch_end")
|
460
|
-
if self._get_memory(fraction=True) > 0.
|
461
|
-
self._clear_memory() # clear if memory utilization >
|
460
|
+
if self._get_memory(fraction=True) > 0.5:
|
461
|
+
self._clear_memory() # clear if memory utilization > 50%
|
462
462
|
|
463
463
|
# Early Stopping
|
464
464
|
if RANK != -1: # if DDP training
|
ultralytics/models/rtdetr/val.py
CHANGED
@@ -143,12 +143,10 @@ class RTDETRValidator(DetectionValidator):
|
|
143
143
|
for i, bbox in enumerate(bboxes): # (300, 4)
|
144
144
|
bbox = ops.xywh2xyxy(bbox)
|
145
145
|
score, cls = scores[i].max(-1) # (300, )
|
146
|
-
# Do not need threshold for evaluation as only got 300 boxes here
|
147
|
-
# idx = score > self.args.conf
|
148
146
|
pred = torch.cat([bbox, score[..., None], cls[..., None]], dim=-1) # filter
|
149
147
|
# Sort by confidence to correctly get internal metrics
|
150
148
|
pred = pred[score.argsort(descending=True)]
|
151
|
-
outputs[i] = pred
|
149
|
+
outputs[i] = pred[score > self.args.conf]
|
152
150
|
|
153
151
|
return outputs
|
154
152
|
|
ultralytics/nn/autobackend.py
CHANGED
@@ -397,7 +397,7 @@ class AutoBackend(nn.Module):
|
|
397
397
|
pass
|
398
398
|
|
399
399
|
# TFLite or TFLite Edge TPU
|
400
|
-
elif tflite or edgetpu: # https://
|
400
|
+
elif tflite or edgetpu: # https://ai.google.dev/edge/litert/microcontrollers/python
|
401
401
|
try: # https://coral.ai/docs/edgetpu/tflite-python/#update-existing-tf-lite-code-for-the-edge-tpu
|
402
402
|
from tflite_runtime.interpreter import Interpreter, load_delegate
|
403
403
|
except ImportError:
|
ultralytics/nn/modules/block.py
CHANGED
@@ -422,7 +422,7 @@ class C3Ghost(C3):
|
|
422
422
|
|
423
423
|
|
424
424
|
class GhostBottleneck(nn.Module):
|
425
|
-
"""Ghost Bottleneck https://github.com/huawei-noah/
|
425
|
+
"""Ghost Bottleneck https://github.com/huawei-noah/Efficient-AI-Backbones."""
|
426
426
|
|
427
427
|
def __init__(self, c1, c2, k=3, s=1):
|
428
428
|
"""
|
ultralytics/nn/modules/conv.py
CHANGED
@@ -337,7 +337,7 @@ class GhostConv(nn.Module):
|
|
337
337
|
cv2 (Conv): Cheap operation convolution.
|
338
338
|
|
339
339
|
References:
|
340
|
-
https://github.com/huawei-noah/
|
340
|
+
https://github.com/huawei-noah/Efficient-AI-Backbones
|
341
341
|
"""
|
342
342
|
|
343
343
|
def __init__(self, c1, c2, k=1, s=1, g=1, act=True):
|
@@ -58,7 +58,7 @@ class ParkingPtsSelection:
|
|
58
58
|
"Linux": "sudo apt install python3-tk (Debian/Ubuntu) | sudo dnf install python3-tkinter (Fedora) | "
|
59
59
|
"sudo pacman -S tk (Arch)",
|
60
60
|
"Windows": "reinstall Python and enable the checkbox `tcl/tk and IDLE` on **Optional Features** during installation",
|
61
|
-
"Darwin": "reinstall Python from https://www.python.org/downloads/
|
61
|
+
"Darwin": "reinstall Python from https://www.python.org/downloads/macos/ or `brew install python-tk`",
|
62
62
|
}.get(platform.system(), "Unknown OS. Check your Python installation.")
|
63
63
|
|
64
64
|
LOGGER.warning(f"WARNING ⚠️ Tkinter is not configured or supported. Potential fix: {install_cmd}")
|
ultralytics/utils/autobatch.py
CHANGED
@@ -86,8 +86,8 @@ def autobatch(model, imgsz=640, fraction=0.60, batch_size=DEFAULT_CFG.batch, max
|
|
86
86
|
and (i == 0 or not results[i - 1] or y[2] > results[i - 1][2]) # first item or increasing memory
|
87
87
|
]
|
88
88
|
fit_x, fit_y = zip(*xy) if xy else ([], [])
|
89
|
-
p = np.polyfit(
|
90
|
-
b = int(round(
|
89
|
+
p = np.polyfit(fit_x, fit_y, deg=1) # first-degree polynomial fit in log space
|
90
|
+
b = int((round(f * fraction) - p[1]) / p[0]) # y intercept (optimal batch size)
|
91
91
|
if None in results: # some sizes failed
|
92
92
|
i = results.index(None) # first fail index
|
93
93
|
if b >= batch_sizes[i]: # y intercept above failure point
|
@@ -96,7 +96,7 @@ def autobatch(model, imgsz=640, fraction=0.60, batch_size=DEFAULT_CFG.batch, max
|
|
96
96
|
LOGGER.info(f"{prefix}WARNING ⚠️ batch={b} outside safe range, using default batch-size {batch_size}.")
|
97
97
|
b = batch_size
|
98
98
|
|
99
|
-
fraction = (np.
|
99
|
+
fraction = (np.polyval(p, b) + r + a) / t # predicted fraction
|
100
100
|
LOGGER.info(f"{prefix}Using batch-size {b} for {d} {t * fraction:.2f}G/{t:.2f}G ({fraction * 100:.0f}%) ✅")
|
101
101
|
return b
|
102
102
|
except Exception as e:
|
ultralytics/utils/checks.py
CHANGED
@@ -890,5 +890,6 @@ check_torchvision() # check torch-torchvision compatibility
|
|
890
890
|
|
891
891
|
# Define constants
|
892
892
|
IS_PYTHON_MINIMUM_3_10 = check_python("3.10", hard=False)
|
893
|
+
IS_PYTHON_3_11 = PYTHON_VERSION.startswith("3.11")
|
893
894
|
IS_PYTHON_3_12 = PYTHON_VERSION.startswith("3.12")
|
894
895
|
IS_PYTHON_3_13 = PYTHON_VERSION.startswith("3.13")
|
ultralytics/utils/dist.py
CHANGED
@@ -87,7 +87,7 @@ def generate_ddp_command(world_size, trainer):
|
|
87
87
|
cmd (List[str]): The command to execute for distributed training.
|
88
88
|
file (str): Path to the temporary file created for DDP training.
|
89
89
|
"""
|
90
|
-
import __main__ # noqa local import to avoid https://github.com/Lightning-AI/lightning/issues/15218
|
90
|
+
import __main__ # noqa local import to avoid https://github.com/Lightning-AI/pytorch-lightning/issues/15218
|
91
91
|
|
92
92
|
if not trainer.resume:
|
93
93
|
shutil.rmtree(trainer.save_dir) # remove the save_dir
|
ultralytics/utils/metrics.py
CHANGED
@@ -52,7 +52,7 @@ def bbox_ioa(box1, box2, iou=False, eps=1e-7):
|
|
52
52
|
def box_iou(box1, box2, eps=1e-7):
|
53
53
|
"""
|
54
54
|
Calculate intersection-over-union (IoU) of boxes. Both sets of boxes are expected to be in (x1, y1, x2, y2) format.
|
55
|
-
Based on https://github.com/pytorch/vision/blob/
|
55
|
+
Based on https://github.com/pytorch/vision/blob/main/torchvision/ops/boxes.py.
|
56
56
|
|
57
57
|
Args:
|
58
58
|
box1 (torch.Tensor): A tensor of shape (N, 4) representing N bounding boxes.
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: ultralytics
|
3
|
-
Version: 8.3.
|
3
|
+
Version: 8.3.110
|
4
4
|
Summary: Ultralytics YOLO 🚀 for SOTA object detection, multi-object tracking, instance segmentation, pose estimation and image classification.
|
5
5
|
Author-email: Glenn Jocher <glenn.jocher@ultralytics.com>, Jing Qiu <jing.qiu@ultralytics.com>
|
6
6
|
Maintainer-email: Ultralytics <hello@ultralytics.com>
|
@@ -1,4 +1,4 @@
|
|
1
|
-
ultralytics/__init__.py,sha256=
|
1
|
+
ultralytics/__init__.py,sha256=d49pdkawY70527DKA-stvgRvkhGPDCMMglotCnaHB04,730
|
2
2
|
ultralytics/assets/bus.jpg,sha256=wCAZxJecGR63Od3ZRERe9Aja1Weayrb9Ug751DS_vGM,137419
|
3
3
|
ultralytics/assets/zidane.jpg,sha256=Ftc4aeMmen1O0A3o6GCDO9FlfBslLpTAw0gnetx7bts,50427
|
4
4
|
ultralytics/cfg/__init__.py,sha256=HZdpo0m_8NynZLmTie2dDx-OEZH7WoM8YtALjB7lKgM,39838
|
@@ -94,19 +94,23 @@ ultralytics/cfg/trackers/bytetrack.yaml,sha256=6u-tiZlk16EqEwkNXaMrza6PAQmWj_ypg
|
|
94
94
|
ultralytics/data/__init__.py,sha256=nAXaL1puCc7z_NjzQNlJnhbVhT9Fla2u7Dsqo7q1dAc,644
|
95
95
|
ultralytics/data/annotator.py,sha256=VEwb11FsEZm75qlEp8XDHFGKW0_rGsEaFDaBVd771Kw,2902
|
96
96
|
ultralytics/data/augment.py,sha256=JsliREHEOQzjipY8iLF1TNP0nuAfix3DKV4AoB4R4fM,124738
|
97
|
-
ultralytics/data/base.py,sha256=
|
97
|
+
ultralytics/data/base.py,sha256=G1S1koste1rCrSzPu4fG6lAwjWflX8Dl8_Q6Bx3IXQc,18551
|
98
98
|
ultralytics/data/build.py,sha256=56pavLie6PDFEVYChMxnGQGtGsxozYZRpFqC70DRGls,9650
|
99
99
|
ultralytics/data/converter.py,sha256=eaRqru-MZR8VEP-pL8EFSrH8dC6EkqVF4oEb551FXUw,24657
|
100
|
-
ultralytics/data/dataset.py,sha256=
|
100
|
+
ultralytics/data/dataset.py,sha256=sxFqIfrCtjTDNjxd6iaRmEMykPNgaljW_1ARELqtPpg,34835
|
101
101
|
ultralytics/data/loaders.py,sha256=_Gyp_BfGTZwsFdn4UnolXxdU_sAYZLIrv0L2TRI9R5g,28627
|
102
102
|
ultralytics/data/split_dota.py,sha256=p8eVGht9tABSVbf9vwvxA_AQYEva3IGHePKlMeNrn64,11872
|
103
|
-
ultralytics/data/utils.py,sha256=
|
103
|
+
ultralytics/data/utils.py,sha256=FI4CBjSy44HUULCt-Xrsc0nQBLjsUabuqernkQebtMU,36185
|
104
|
+
ultralytics/data/scripts/download_weights.sh,sha256=0y8XtZxOru7dVThXDFUXLHBuICgOIqZNUwpyL4Rh6lg,595
|
105
|
+
ultralytics/data/scripts/get_coco.sh,sha256=UuJpJeo3qQpTHVINeOpmP0NYmg8PhEFE3A8J3jKrnPw,1768
|
106
|
+
ultralytics/data/scripts/get_coco128.sh,sha256=qmRQl_hOKrsdHrTrnyQuFIH01oDz3lfaz138OgGfLt8,650
|
107
|
+
ultralytics/data/scripts/get_imagenet.sh,sha256=hr42H16bM47iT27rgS7MpEo-GeOZAYUQXgr0B2cwn48,1705
|
104
108
|
ultralytics/engine/__init__.py,sha256=lm6MckFYCPTbqIoX7w0s_daxdjNeBeKW6DXppv1-QUM,70
|
105
|
-
ultralytics/engine/exporter.py,sha256=
|
109
|
+
ultralytics/engine/exporter.py,sha256=tOfxhc5Zo5otugqu2cvplRqMUHEhivmNlK0fc23qwx4,73919
|
106
110
|
ultralytics/engine/model.py,sha256=YgQKYZrPENSTvLENspg-bXI9FinzzWARfb0U-C9vH-M,52916
|
107
111
|
ultralytics/engine/predictor.py,sha256=hXDF7d03rtVzoEQBW1tMN665-TALIyM1q7kXARJlmKM,21630
|
108
112
|
ultralytics/engine/results.py,sha256=H3pFJhUjYKvVyOUqqZjfIn8vnCpl81aYNOnregMrBoQ,79716
|
109
|
-
ultralytics/engine/trainer.py,sha256=
|
113
|
+
ultralytics/engine/trainer.py,sha256=CdCkH0ky8cqqVQHZQf4rQ_f5wKz98sYwY6Z83uLDrwY,38904
|
110
114
|
ultralytics/engine/tuner.py,sha256=CW6Ys4NV6SVScXA5GQO5DeSIJWys9e_mqUg26b6NYu4,12598
|
111
115
|
ultralytics/engine/validator.py,sha256=Xijg74RHn43ANjQJaBJ4zZkWd0MMPUH2TzfmydAMbzk,16974
|
112
116
|
ultralytics/hub/__init__.py,sha256=wDtAUKdfqob95tfFHgDJFXcsNSDSdoIQkJTm-CfIUTI,6616
|
@@ -128,7 +132,7 @@ ultralytics/models/rtdetr/__init__.py,sha256=_jEHmOjI_QP_nT3XJXLgYHQ6bXG4EL8Gnvn
|
|
128
132
|
ultralytics/models/rtdetr/model.py,sha256=zx9UKpReYCRL7Is2DXIX9ZcJE25KE_fPZ-NYx5vF6E4,2119
|
129
133
|
ultralytics/models/rtdetr/predict.py,sha256=5VNvyULxegg_NfGo7ugfIKHrtKhpaspJZdagU1haQmo,3942
|
130
134
|
ultralytics/models/rtdetr/train.py,sha256=YONMv5RjLuO29Ab_tuHtgrlBfsicCGQeAvYDVeL02bs,4144
|
131
|
-
ultralytics/models/rtdetr/val.py,sha256=
|
135
|
+
ultralytics/models/rtdetr/val.py,sha256=MfX3drVsGOqbK0au-ZroDNfeYXmFCSembfElFmuFGuI,7301
|
132
136
|
ultralytics/models/sam/__init__.py,sha256=iR7B06rAEni21eptg8n4rLOP0Z_qV9y9PL-L93n4_7s,266
|
133
137
|
ultralytics/models/sam/amg.py,sha256=r_duG0DCeCyTYfhcVh-ti10FPMl4VGL4SKc8yvbQpNU,11050
|
134
138
|
ultralytics/models/sam/build.py,sha256=Vhml3zBGDcRO-efauNdM0ZlKTV10ADAj_aT823lPJv8,12515
|
@@ -177,13 +181,13 @@ ultralytics/models/yolo/yoloe/train.py,sha256=7JxJkMN9bkUGsO-RojFG2Q3yfdKhb-TXlB
|
|
177
181
|
ultralytics/models/yolo/yoloe/train_seg.py,sha256=JguKB1ez8Rf7XBu_D_mWHMLJto7y7Kr2m0Tq2NwDtwU,5269
|
178
182
|
ultralytics/models/yolo/yoloe/val.py,sha256=utdt8wZvvW9OPxO5rx8KsFlkLG0FXj0YMD7Jhyk54D8,8440
|
179
183
|
ultralytics/nn/__init__.py,sha256=rjociYD9lo_K-d-1s6TbdWklPLjTcEHk7OIlRDJstIE,615
|
180
|
-
ultralytics/nn/autobackend.py,sha256=
|
184
|
+
ultralytics/nn/autobackend.py,sha256=_ww4j-KnNwRPQZmtsArg0_CXVbc4U2WZj5C4nmnKGQc,38949
|
181
185
|
ultralytics/nn/tasks.py,sha256=r9CoXW9owNK5UWH2ufM5cyG3DB5TEEIX-JmhTSECCN8,62991
|
182
186
|
ultralytics/nn/text_model.py,sha256=H6OiLe0FOyZY4pd7-ixRTxaBgx3lOc2GmGTmrFnoJd0,10136
|
183
187
|
ultralytics/nn/modules/__init__.py,sha256=dXLtIk9rt944WfsTdpgEdWOg3HQEHdwQztuZ6WNJygs,3144
|
184
188
|
ultralytics/nn/modules/activation.py,sha256=PvXZkA9AzEntR575JkFORdmtcRwATyy0lje-uHA5_8w,2210
|
185
|
-
ultralytics/nn/modules/block.py,sha256=
|
186
|
-
ultralytics/nn/modules/conv.py,sha256=
|
189
|
+
ultralytics/nn/modules/block.py,sha256=hKjIM2UmJnfZeGKd92sPDv9TjwCbaW95ctxI_PEsPaY,66655
|
190
|
+
ultralytics/nn/modules/conv.py,sha256=WeiLrtWYdfrhQPgDEKbimJmQMgzaOgFG87y6-jaeg_o,21459
|
187
191
|
ultralytics/nn/modules/head.py,sha256=_b0O_IFino6NS25Lyk11UCtUb7q0VrZ_5Tyy-UhvI8A,38255
|
188
192
|
ultralytics/nn/modules/transformer.py,sha256=tC80QKFaLtWZo0zVNTuORX4pOu6HVs2wS0vSM-3h5W4,28227
|
189
193
|
ultralytics/nn/modules/utils.py,sha256=rn8yTObZGkQoqVzjbZWLaHiytppG4ffjMME4Lw60glM,6092
|
@@ -196,7 +200,7 @@ ultralytics/solutions/instance_segmentation.py,sha256=q8vXQmnoqbiExq3CVYMybkdJ7X
|
|
196
200
|
ultralytics/solutions/object_blurrer.py,sha256=9Qzs8M3YI--FoWvihMytFdtnhin6gQ0l_uy6CsdoF9U,3896
|
197
201
|
ultralytics/solutions/object_counter.py,sha256=QXBRBEv_a0uiOYYzsNdu0VAH62rg97v1EiSHy60O1q4,9999
|
198
202
|
ultralytics/solutions/object_cropper.py,sha256=AlIM-RnqFRogAY8JilE0KnbzFMulaIYJGPpn1nFRL5w,3386
|
199
|
-
ultralytics/solutions/parking_management.py,sha256=
|
203
|
+
ultralytics/solutions/parking_management.py,sha256=brxU2NdEdU_j-Y_6TPElJPDqNYZoNZ1HnsQk-qbhKCE,13292
|
200
204
|
ultralytics/solutions/queue_management.py,sha256=cUzAMMeWijowkdiuaSUZRr0S3I5MTHkCQOLjOqS0JN0,4299
|
201
205
|
ultralytics/solutions/region_counter.py,sha256=LKZuykgmnevKKzYifyeHQwQroF7tJJIPI6HVXi5mb9M,5299
|
202
206
|
ultralytics/solutions/security_alarm.py,sha256=KLP1R5qAFcmMliHfsuYNS_k-E1vGbOccLrzbmcpp4xQ,6254
|
@@ -215,17 +219,17 @@ ultralytics/trackers/utils/gmc.py,sha256=NnLxtgZIKdO5-C_J0xqeob1iRXgpubyJOgbIEeJ
|
|
215
219
|
ultralytics/trackers/utils/kalman_filter.py,sha256=A0CqOnnaKH6kr0XwuHzyHmIU6aJAjJYxF9jVlNBKZHo,21326
|
216
220
|
ultralytics/trackers/utils/matching.py,sha256=7eIufSdeN7cXuFMjvcfvz0Ldq84m4YKZl5IGxBR8IIo,7169
|
217
221
|
ultralytics/utils/__init__.py,sha256=-OY2ZAJdN7XLPSG1dpnWWv63ZqmhzAxrio2dMGXuyEg,50254
|
218
|
-
ultralytics/utils/autobatch.py,sha256=
|
222
|
+
ultralytics/utils/autobatch.py,sha256=0QSSYfzZIcHbbE5udrhRofJiJru20YaO7I1D8nhJHhc,4950
|
219
223
|
ultralytics/utils/benchmarks.py,sha256=7xJ7I0XqLXE-51_OCETKdfMKpk1zUkMTq0kCbdMsMks,30359
|
220
|
-
ultralytics/utils/checks.py,sha256=
|
221
|
-
ultralytics/utils/dist.py,sha256=
|
224
|
+
ultralytics/utils/checks.py,sha256=J2ebkGG1QBbYIrBjwlfECiJtDJzqFkAg_Nn9pdRsW_c,32728
|
225
|
+
ultralytics/utils/dist.py,sha256=e-DK_YowV7D9rDGQyWR9Kaosxp2eWe2EogSWnnUMthc,4098
|
222
226
|
ultralytics/utils/downloads.py,sha256=4P1JIc04tTd_oz3-AHlhRSGaVtnSQPg_gYlh__U27-4,22169
|
223
227
|
ultralytics/utils/errors.py,sha256=vY9h2evFSrHnZdHJVVrmm8Zzw4qVDLyo9DeYW5g0dFk,1573
|
224
228
|
ultralytics/utils/export.py,sha256=o_Ln8fkF_XE4fXjnWJ66_O5mx5U_k30Fm8WLk7QjAdQ,8832
|
225
229
|
ultralytics/utils/files.py,sha256=0K4O1cgqRiXaDw7EQK13TqA5SME_RrvfDVQSPetNr5w,8042
|
226
230
|
ultralytics/utils/instance.py,sha256=UOEsXR9V-bXNRk6BTonASBEgeMqvzzAk4S7VdXZJUAM,18090
|
227
231
|
ultralytics/utils/loss.py,sha256=us3lwmSlIwEzoMztNjpet7Kb1r1-sMGyESykqgYPDVo,36945
|
228
|
-
ultralytics/utils/metrics.py,sha256=
|
232
|
+
ultralytics/utils/metrics.py,sha256=Lyt2jFS16rmOFbXqfZBnd0VkpYBHoaLdb7XfBjEvlII,53784
|
229
233
|
ultralytics/utils/ops.py,sha256=Ag69Hvy8HxKLvewrtfQRseveboc_RGzlMYmO1B2U1Lk,34215
|
230
234
|
ultralytics/utils/patches.py,sha256=auTWwYBieowiwH7ww1FgR67JSPkKr_7-PGA1SCYXB4A,4569
|
231
235
|
ultralytics/utils/plotting.py,sha256=wAg_z9ik6Wi3XZCfKO2K6TWV1G0TcLEkjxxz2H42CX8,46703
|
@@ -244,9 +248,9 @@ ultralytics/utils/callbacks/neptune.py,sha256=XXnnKQ-MoLIexl8y2Vb0i-cCLyePE0n5BU
|
|
244
248
|
ultralytics/utils/callbacks/raytune.py,sha256=A8amUGpux7dYES-L1iSeMoMXBySGWCD1aUqT7vcG-pU,1284
|
245
249
|
ultralytics/utils/callbacks/tensorboard.py,sha256=7eUX21_Ym7i6iN4euZzrqglphyl5xak1yl_-wfFshbg,5502
|
246
250
|
ultralytics/utils/callbacks/wb.py,sha256=iDRFXI4IIDm8R5OI89DMTmjs8aHLo1HRCLkOFKdaMG4,7507
|
247
|
-
ultralytics-8.3.
|
248
|
-
ultralytics-8.3.
|
249
|
-
ultralytics-8.3.
|
250
|
-
ultralytics-8.3.
|
251
|
-
ultralytics-8.3.
|
252
|
-
ultralytics-8.3.
|
251
|
+
ultralytics-8.3.110.dist-info/licenses/LICENSE,sha256=DZak_2itbUtvHzD3E7GNUYSRK6jdOJ-GqncQ2weavLA,34523
|
252
|
+
ultralytics-8.3.110.dist-info/METADATA,sha256=8IVymarmcNdBGE1wTtKQ2I3_eHnd872PQJ-gUyB-dWI,37354
|
253
|
+
ultralytics-8.3.110.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
254
|
+
ultralytics-8.3.110.dist-info/entry_points.txt,sha256=YM_wiKyTe9yRrsEfqvYolNO5ngwfoL4-NwgKzc8_7sI,93
|
255
|
+
ultralytics-8.3.110.dist-info/top_level.txt,sha256=XP49TwiMw4QGsvTLSYiJhz1xF_k7ev5mQ8jJXaXi45Q,12
|
256
|
+
ultralytics-8.3.110.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|