dgenerate-ultralytics-headless 8.3.253__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dgenerate_ultralytics_headless-8.3.253.dist-info/METADATA +405 -0
- dgenerate_ultralytics_headless-8.3.253.dist-info/RECORD +299 -0
- dgenerate_ultralytics_headless-8.3.253.dist-info/WHEEL +5 -0
- dgenerate_ultralytics_headless-8.3.253.dist-info/entry_points.txt +3 -0
- dgenerate_ultralytics_headless-8.3.253.dist-info/licenses/LICENSE +661 -0
- dgenerate_ultralytics_headless-8.3.253.dist-info/top_level.txt +1 -0
- tests/__init__.py +23 -0
- tests/conftest.py +59 -0
- tests/test_cli.py +131 -0
- tests/test_cuda.py +216 -0
- tests/test_engine.py +157 -0
- tests/test_exports.py +309 -0
- tests/test_integrations.py +151 -0
- tests/test_python.py +777 -0
- tests/test_solutions.py +371 -0
- ultralytics/__init__.py +48 -0
- ultralytics/assets/bus.jpg +0 -0
- ultralytics/assets/zidane.jpg +0 -0
- ultralytics/cfg/__init__.py +1028 -0
- ultralytics/cfg/datasets/Argoverse.yaml +78 -0
- ultralytics/cfg/datasets/DOTAv1.5.yaml +37 -0
- ultralytics/cfg/datasets/DOTAv1.yaml +36 -0
- ultralytics/cfg/datasets/GlobalWheat2020.yaml +68 -0
- ultralytics/cfg/datasets/HomeObjects-3K.yaml +32 -0
- ultralytics/cfg/datasets/ImageNet.yaml +2025 -0
- ultralytics/cfg/datasets/Objects365.yaml +447 -0
- ultralytics/cfg/datasets/SKU-110K.yaml +58 -0
- ultralytics/cfg/datasets/TT100K.yaml +346 -0
- ultralytics/cfg/datasets/VOC.yaml +102 -0
- ultralytics/cfg/datasets/VisDrone.yaml +87 -0
- ultralytics/cfg/datasets/african-wildlife.yaml +25 -0
- ultralytics/cfg/datasets/brain-tumor.yaml +22 -0
- ultralytics/cfg/datasets/carparts-seg.yaml +44 -0
- ultralytics/cfg/datasets/coco-pose.yaml +64 -0
- ultralytics/cfg/datasets/coco.yaml +118 -0
- ultralytics/cfg/datasets/coco128-seg.yaml +101 -0
- ultralytics/cfg/datasets/coco128.yaml +101 -0
- ultralytics/cfg/datasets/coco8-grayscale.yaml +103 -0
- ultralytics/cfg/datasets/coco8-multispectral.yaml +104 -0
- ultralytics/cfg/datasets/coco8-pose.yaml +47 -0
- ultralytics/cfg/datasets/coco8-seg.yaml +101 -0
- ultralytics/cfg/datasets/coco8.yaml +101 -0
- ultralytics/cfg/datasets/construction-ppe.yaml +32 -0
- ultralytics/cfg/datasets/crack-seg.yaml +22 -0
- ultralytics/cfg/datasets/dog-pose.yaml +52 -0
- ultralytics/cfg/datasets/dota8-multispectral.yaml +38 -0
- ultralytics/cfg/datasets/dota8.yaml +35 -0
- ultralytics/cfg/datasets/hand-keypoints.yaml +50 -0
- ultralytics/cfg/datasets/kitti.yaml +27 -0
- ultralytics/cfg/datasets/lvis.yaml +1240 -0
- ultralytics/cfg/datasets/medical-pills.yaml +21 -0
- ultralytics/cfg/datasets/open-images-v7.yaml +663 -0
- ultralytics/cfg/datasets/package-seg.yaml +22 -0
- ultralytics/cfg/datasets/signature.yaml +21 -0
- ultralytics/cfg/datasets/tiger-pose.yaml +41 -0
- ultralytics/cfg/datasets/xView.yaml +155 -0
- ultralytics/cfg/default.yaml +130 -0
- ultralytics/cfg/models/11/yolo11-cls-resnet18.yaml +17 -0
- ultralytics/cfg/models/11/yolo11-cls.yaml +33 -0
- ultralytics/cfg/models/11/yolo11-obb.yaml +50 -0
- ultralytics/cfg/models/11/yolo11-pose.yaml +51 -0
- ultralytics/cfg/models/11/yolo11-seg.yaml +50 -0
- ultralytics/cfg/models/11/yolo11.yaml +50 -0
- ultralytics/cfg/models/11/yoloe-11-seg.yaml +48 -0
- ultralytics/cfg/models/11/yoloe-11.yaml +48 -0
- ultralytics/cfg/models/12/yolo12-cls.yaml +32 -0
- ultralytics/cfg/models/12/yolo12-obb.yaml +48 -0
- ultralytics/cfg/models/12/yolo12-pose.yaml +49 -0
- ultralytics/cfg/models/12/yolo12-seg.yaml +48 -0
- ultralytics/cfg/models/12/yolo12.yaml +48 -0
- ultralytics/cfg/models/rt-detr/rtdetr-l.yaml +53 -0
- ultralytics/cfg/models/rt-detr/rtdetr-resnet101.yaml +45 -0
- ultralytics/cfg/models/rt-detr/rtdetr-resnet50.yaml +45 -0
- ultralytics/cfg/models/rt-detr/rtdetr-x.yaml +57 -0
- ultralytics/cfg/models/v10/yolov10b.yaml +45 -0
- ultralytics/cfg/models/v10/yolov10l.yaml +45 -0
- ultralytics/cfg/models/v10/yolov10m.yaml +45 -0
- ultralytics/cfg/models/v10/yolov10n.yaml +45 -0
- ultralytics/cfg/models/v10/yolov10s.yaml +45 -0
- ultralytics/cfg/models/v10/yolov10x.yaml +45 -0
- ultralytics/cfg/models/v3/yolov3-spp.yaml +49 -0
- ultralytics/cfg/models/v3/yolov3-tiny.yaml +40 -0
- ultralytics/cfg/models/v3/yolov3.yaml +49 -0
- ultralytics/cfg/models/v5/yolov5-p6.yaml +62 -0
- ultralytics/cfg/models/v5/yolov5.yaml +51 -0
- ultralytics/cfg/models/v6/yolov6.yaml +56 -0
- ultralytics/cfg/models/v8/yoloe-v8-seg.yaml +48 -0
- ultralytics/cfg/models/v8/yoloe-v8.yaml +48 -0
- ultralytics/cfg/models/v8/yolov8-cls-resnet101.yaml +28 -0
- ultralytics/cfg/models/v8/yolov8-cls-resnet50.yaml +28 -0
- ultralytics/cfg/models/v8/yolov8-cls.yaml +32 -0
- ultralytics/cfg/models/v8/yolov8-ghost-p2.yaml +58 -0
- ultralytics/cfg/models/v8/yolov8-ghost-p6.yaml +60 -0
- ultralytics/cfg/models/v8/yolov8-ghost.yaml +50 -0
- ultralytics/cfg/models/v8/yolov8-obb.yaml +49 -0
- ultralytics/cfg/models/v8/yolov8-p2.yaml +57 -0
- ultralytics/cfg/models/v8/yolov8-p6.yaml +59 -0
- ultralytics/cfg/models/v8/yolov8-pose-p6.yaml +60 -0
- ultralytics/cfg/models/v8/yolov8-pose.yaml +50 -0
- ultralytics/cfg/models/v8/yolov8-rtdetr.yaml +49 -0
- ultralytics/cfg/models/v8/yolov8-seg-p6.yaml +59 -0
- ultralytics/cfg/models/v8/yolov8-seg.yaml +49 -0
- ultralytics/cfg/models/v8/yolov8-world.yaml +51 -0
- ultralytics/cfg/models/v8/yolov8-worldv2.yaml +49 -0
- ultralytics/cfg/models/v8/yolov8.yaml +49 -0
- ultralytics/cfg/models/v9/yolov9c-seg.yaml +41 -0
- ultralytics/cfg/models/v9/yolov9c.yaml +41 -0
- ultralytics/cfg/models/v9/yolov9e-seg.yaml +64 -0
- ultralytics/cfg/models/v9/yolov9e.yaml +64 -0
- ultralytics/cfg/models/v9/yolov9m.yaml +41 -0
- ultralytics/cfg/models/v9/yolov9s.yaml +41 -0
- ultralytics/cfg/models/v9/yolov9t.yaml +41 -0
- ultralytics/cfg/trackers/botsort.yaml +21 -0
- ultralytics/cfg/trackers/bytetrack.yaml +12 -0
- ultralytics/data/__init__.py +26 -0
- ultralytics/data/annotator.py +66 -0
- ultralytics/data/augment.py +2801 -0
- ultralytics/data/base.py +435 -0
- ultralytics/data/build.py +437 -0
- ultralytics/data/converter.py +855 -0
- ultralytics/data/dataset.py +834 -0
- ultralytics/data/loaders.py +704 -0
- ultralytics/data/scripts/download_weights.sh +18 -0
- ultralytics/data/scripts/get_coco.sh +61 -0
- ultralytics/data/scripts/get_coco128.sh +18 -0
- ultralytics/data/scripts/get_imagenet.sh +52 -0
- ultralytics/data/split.py +138 -0
- ultralytics/data/split_dota.py +344 -0
- ultralytics/data/utils.py +798 -0
- ultralytics/engine/__init__.py +1 -0
- ultralytics/engine/exporter.py +1580 -0
- ultralytics/engine/model.py +1125 -0
- ultralytics/engine/predictor.py +508 -0
- ultralytics/engine/results.py +1522 -0
- ultralytics/engine/trainer.py +977 -0
- ultralytics/engine/tuner.py +449 -0
- ultralytics/engine/validator.py +387 -0
- ultralytics/hub/__init__.py +166 -0
- ultralytics/hub/auth.py +151 -0
- ultralytics/hub/google/__init__.py +174 -0
- ultralytics/hub/session.py +422 -0
- ultralytics/hub/utils.py +162 -0
- ultralytics/models/__init__.py +9 -0
- ultralytics/models/fastsam/__init__.py +7 -0
- ultralytics/models/fastsam/model.py +79 -0
- ultralytics/models/fastsam/predict.py +169 -0
- ultralytics/models/fastsam/utils.py +23 -0
- ultralytics/models/fastsam/val.py +38 -0
- ultralytics/models/nas/__init__.py +7 -0
- ultralytics/models/nas/model.py +98 -0
- ultralytics/models/nas/predict.py +56 -0
- ultralytics/models/nas/val.py +38 -0
- ultralytics/models/rtdetr/__init__.py +7 -0
- ultralytics/models/rtdetr/model.py +63 -0
- ultralytics/models/rtdetr/predict.py +88 -0
- ultralytics/models/rtdetr/train.py +89 -0
- ultralytics/models/rtdetr/val.py +216 -0
- ultralytics/models/sam/__init__.py +25 -0
- ultralytics/models/sam/amg.py +275 -0
- ultralytics/models/sam/build.py +365 -0
- ultralytics/models/sam/build_sam3.py +377 -0
- ultralytics/models/sam/model.py +169 -0
- ultralytics/models/sam/modules/__init__.py +1 -0
- ultralytics/models/sam/modules/blocks.py +1067 -0
- ultralytics/models/sam/modules/decoders.py +495 -0
- ultralytics/models/sam/modules/encoders.py +794 -0
- ultralytics/models/sam/modules/memory_attention.py +298 -0
- ultralytics/models/sam/modules/sam.py +1160 -0
- ultralytics/models/sam/modules/tiny_encoder.py +979 -0
- ultralytics/models/sam/modules/transformer.py +344 -0
- ultralytics/models/sam/modules/utils.py +512 -0
- ultralytics/models/sam/predict.py +3940 -0
- ultralytics/models/sam/sam3/__init__.py +3 -0
- ultralytics/models/sam/sam3/decoder.py +546 -0
- ultralytics/models/sam/sam3/encoder.py +529 -0
- ultralytics/models/sam/sam3/geometry_encoders.py +415 -0
- ultralytics/models/sam/sam3/maskformer_segmentation.py +286 -0
- ultralytics/models/sam/sam3/model_misc.py +199 -0
- ultralytics/models/sam/sam3/necks.py +129 -0
- ultralytics/models/sam/sam3/sam3_image.py +339 -0
- ultralytics/models/sam/sam3/text_encoder_ve.py +307 -0
- ultralytics/models/sam/sam3/vitdet.py +547 -0
- ultralytics/models/sam/sam3/vl_combiner.py +160 -0
- ultralytics/models/utils/__init__.py +1 -0
- ultralytics/models/utils/loss.py +466 -0
- ultralytics/models/utils/ops.py +315 -0
- ultralytics/models/yolo/__init__.py +7 -0
- ultralytics/models/yolo/classify/__init__.py +7 -0
- ultralytics/models/yolo/classify/predict.py +90 -0
- ultralytics/models/yolo/classify/train.py +202 -0
- ultralytics/models/yolo/classify/val.py +216 -0
- ultralytics/models/yolo/detect/__init__.py +7 -0
- ultralytics/models/yolo/detect/predict.py +122 -0
- ultralytics/models/yolo/detect/train.py +227 -0
- ultralytics/models/yolo/detect/val.py +507 -0
- ultralytics/models/yolo/model.py +430 -0
- ultralytics/models/yolo/obb/__init__.py +7 -0
- ultralytics/models/yolo/obb/predict.py +56 -0
- ultralytics/models/yolo/obb/train.py +79 -0
- ultralytics/models/yolo/obb/val.py +302 -0
- ultralytics/models/yolo/pose/__init__.py +7 -0
- ultralytics/models/yolo/pose/predict.py +65 -0
- ultralytics/models/yolo/pose/train.py +110 -0
- ultralytics/models/yolo/pose/val.py +248 -0
- ultralytics/models/yolo/segment/__init__.py +7 -0
- ultralytics/models/yolo/segment/predict.py +109 -0
- ultralytics/models/yolo/segment/train.py +69 -0
- ultralytics/models/yolo/segment/val.py +307 -0
- ultralytics/models/yolo/world/__init__.py +5 -0
- ultralytics/models/yolo/world/train.py +173 -0
- ultralytics/models/yolo/world/train_world.py +178 -0
- ultralytics/models/yolo/yoloe/__init__.py +22 -0
- ultralytics/models/yolo/yoloe/predict.py +162 -0
- ultralytics/models/yolo/yoloe/train.py +287 -0
- ultralytics/models/yolo/yoloe/train_seg.py +122 -0
- ultralytics/models/yolo/yoloe/val.py +206 -0
- ultralytics/nn/__init__.py +27 -0
- ultralytics/nn/autobackend.py +964 -0
- ultralytics/nn/modules/__init__.py +182 -0
- ultralytics/nn/modules/activation.py +54 -0
- ultralytics/nn/modules/block.py +1947 -0
- ultralytics/nn/modules/conv.py +669 -0
- ultralytics/nn/modules/head.py +1183 -0
- ultralytics/nn/modules/transformer.py +793 -0
- ultralytics/nn/modules/utils.py +159 -0
- ultralytics/nn/tasks.py +1768 -0
- ultralytics/nn/text_model.py +356 -0
- ultralytics/py.typed +1 -0
- ultralytics/solutions/__init__.py +41 -0
- ultralytics/solutions/ai_gym.py +108 -0
- ultralytics/solutions/analytics.py +264 -0
- ultralytics/solutions/config.py +107 -0
- ultralytics/solutions/distance_calculation.py +123 -0
- ultralytics/solutions/heatmap.py +125 -0
- ultralytics/solutions/instance_segmentation.py +86 -0
- ultralytics/solutions/object_blurrer.py +89 -0
- ultralytics/solutions/object_counter.py +190 -0
- ultralytics/solutions/object_cropper.py +87 -0
- ultralytics/solutions/parking_management.py +280 -0
- ultralytics/solutions/queue_management.py +93 -0
- ultralytics/solutions/region_counter.py +133 -0
- ultralytics/solutions/security_alarm.py +151 -0
- ultralytics/solutions/similarity_search.py +219 -0
- ultralytics/solutions/solutions.py +828 -0
- ultralytics/solutions/speed_estimation.py +114 -0
- ultralytics/solutions/streamlit_inference.py +260 -0
- ultralytics/solutions/templates/similarity-search.html +156 -0
- ultralytics/solutions/trackzone.py +88 -0
- ultralytics/solutions/vision_eye.py +67 -0
- ultralytics/trackers/__init__.py +7 -0
- ultralytics/trackers/basetrack.py +115 -0
- ultralytics/trackers/bot_sort.py +257 -0
- ultralytics/trackers/byte_tracker.py +469 -0
- ultralytics/trackers/track.py +116 -0
- ultralytics/trackers/utils/__init__.py +1 -0
- ultralytics/trackers/utils/gmc.py +339 -0
- ultralytics/trackers/utils/kalman_filter.py +482 -0
- ultralytics/trackers/utils/matching.py +154 -0
- ultralytics/utils/__init__.py +1450 -0
- ultralytics/utils/autobatch.py +118 -0
- ultralytics/utils/autodevice.py +205 -0
- ultralytics/utils/benchmarks.py +728 -0
- ultralytics/utils/callbacks/__init__.py +5 -0
- ultralytics/utils/callbacks/base.py +233 -0
- ultralytics/utils/callbacks/clearml.py +146 -0
- ultralytics/utils/callbacks/comet.py +625 -0
- ultralytics/utils/callbacks/dvc.py +197 -0
- ultralytics/utils/callbacks/hub.py +110 -0
- ultralytics/utils/callbacks/mlflow.py +134 -0
- ultralytics/utils/callbacks/neptune.py +126 -0
- ultralytics/utils/callbacks/platform.py +453 -0
- ultralytics/utils/callbacks/raytune.py +42 -0
- ultralytics/utils/callbacks/tensorboard.py +123 -0
- ultralytics/utils/callbacks/wb.py +188 -0
- ultralytics/utils/checks.py +1020 -0
- ultralytics/utils/cpu.py +85 -0
- ultralytics/utils/dist.py +123 -0
- ultralytics/utils/downloads.py +529 -0
- ultralytics/utils/errors.py +35 -0
- ultralytics/utils/events.py +113 -0
- ultralytics/utils/export/__init__.py +7 -0
- ultralytics/utils/export/engine.py +237 -0
- ultralytics/utils/export/imx.py +325 -0
- ultralytics/utils/export/tensorflow.py +231 -0
- ultralytics/utils/files.py +219 -0
- ultralytics/utils/git.py +137 -0
- ultralytics/utils/instance.py +484 -0
- ultralytics/utils/logger.py +506 -0
- ultralytics/utils/loss.py +849 -0
- ultralytics/utils/metrics.py +1563 -0
- ultralytics/utils/nms.py +337 -0
- ultralytics/utils/ops.py +664 -0
- ultralytics/utils/patches.py +201 -0
- ultralytics/utils/plotting.py +1047 -0
- ultralytics/utils/tal.py +404 -0
- ultralytics/utils/torch_utils.py +984 -0
- ultralytics/utils/tqdm.py +443 -0
- ultralytics/utils/triton.py +112 -0
- ultralytics/utils/tuner.py +168 -0
|
@@ -0,0 +1,346 @@
|
|
|
1
|
+
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
|
+
|
|
3
|
+
# Tsinghua-Tencent 100K (TT100K) dataset https://cg.cs.tsinghua.edu.cn/traffic-sign/ by Tsinghua University
|
|
4
|
+
# Documentation: https://cg.cs.tsinghua.edu.cn/traffic-sign/tutorial.html
|
|
5
|
+
# Paper: Traffic-Sign Detection and Classification in the Wild (CVPR 2016)
|
|
6
|
+
# License: CC BY-NC 2.0 license for non-commercial use only
|
|
7
|
+
# Example usage: yolo train data=TT100K.yaml
|
|
8
|
+
# parent
|
|
9
|
+
# ├── ultralytics
|
|
10
|
+
# └── datasets
|
|
11
|
+
# └── TT100K ← downloads here (~18 GB)
|
|
12
|
+
|
|
13
|
+
# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..]
|
|
14
|
+
path: TT100K # dataset root dir
|
|
15
|
+
train: images/train # train images (relative to 'path') 6105 images
|
|
16
|
+
val: images/val # val images (relative to 'path') 7641 images (original 'other' split)
|
|
17
|
+
test: images/test # test images (relative to 'path') 3071 images
|
|
18
|
+
|
|
19
|
+
# Classes (221 traffic sign categories, 45 with sufficient training instances)
|
|
20
|
+
names:
|
|
21
|
+
0: pl5
|
|
22
|
+
1: pl10
|
|
23
|
+
2: pl15
|
|
24
|
+
3: pl20
|
|
25
|
+
4: pl25
|
|
26
|
+
5: pl30
|
|
27
|
+
6: pl40
|
|
28
|
+
7: pl50
|
|
29
|
+
8: pl60
|
|
30
|
+
9: pl70
|
|
31
|
+
10: pl80
|
|
32
|
+
11: pl90
|
|
33
|
+
12: pl100
|
|
34
|
+
13: pl110
|
|
35
|
+
14: pl120
|
|
36
|
+
15: pm5
|
|
37
|
+
16: pm10
|
|
38
|
+
17: pm13
|
|
39
|
+
18: pm15
|
|
40
|
+
19: pm20
|
|
41
|
+
20: pm25
|
|
42
|
+
21: pm30
|
|
43
|
+
22: pm35
|
|
44
|
+
23: pm40
|
|
45
|
+
24: pm46
|
|
46
|
+
25: pm50
|
|
47
|
+
26: pm55
|
|
48
|
+
27: pm8
|
|
49
|
+
28: pn
|
|
50
|
+
29: pne
|
|
51
|
+
30: ph4
|
|
52
|
+
31: ph4.5
|
|
53
|
+
32: ph5
|
|
54
|
+
33: ps
|
|
55
|
+
34: pg
|
|
56
|
+
35: ph1.5
|
|
57
|
+
36: ph2
|
|
58
|
+
37: ph2.1
|
|
59
|
+
38: ph2.2
|
|
60
|
+
39: ph2.4
|
|
61
|
+
40: ph2.5
|
|
62
|
+
41: ph2.8
|
|
63
|
+
42: ph2.9
|
|
64
|
+
43: ph3
|
|
65
|
+
44: ph3.2
|
|
66
|
+
45: ph3.5
|
|
67
|
+
46: ph3.8
|
|
68
|
+
47: ph4.2
|
|
69
|
+
48: ph4.3
|
|
70
|
+
49: ph4.8
|
|
71
|
+
50: ph5.3
|
|
72
|
+
51: ph5.5
|
|
73
|
+
52: pb
|
|
74
|
+
53: pr10
|
|
75
|
+
54: pr100
|
|
76
|
+
55: pr20
|
|
77
|
+
56: pr30
|
|
78
|
+
57: pr40
|
|
79
|
+
58: pr45
|
|
80
|
+
59: pr50
|
|
81
|
+
60: pr60
|
|
82
|
+
61: pr70
|
|
83
|
+
62: pr80
|
|
84
|
+
63: pr90
|
|
85
|
+
64: p1
|
|
86
|
+
65: p2
|
|
87
|
+
66: p3
|
|
88
|
+
67: p4
|
|
89
|
+
68: p5
|
|
90
|
+
69: p6
|
|
91
|
+
70: p7
|
|
92
|
+
71: p8
|
|
93
|
+
72: p9
|
|
94
|
+
73: p10
|
|
95
|
+
74: p11
|
|
96
|
+
75: p12
|
|
97
|
+
76: p13
|
|
98
|
+
77: p14
|
|
99
|
+
78: p15
|
|
100
|
+
79: p16
|
|
101
|
+
80: p17
|
|
102
|
+
81: p18
|
|
103
|
+
82: p19
|
|
104
|
+
83: p20
|
|
105
|
+
84: p21
|
|
106
|
+
85: p22
|
|
107
|
+
86: p23
|
|
108
|
+
87: p24
|
|
109
|
+
88: p25
|
|
110
|
+
89: p26
|
|
111
|
+
90: p27
|
|
112
|
+
91: p28
|
|
113
|
+
92: pa8
|
|
114
|
+
93: pa10
|
|
115
|
+
94: pa12
|
|
116
|
+
95: pa13
|
|
117
|
+
96: pa14
|
|
118
|
+
97: pb5
|
|
119
|
+
98: pc
|
|
120
|
+
99: pg
|
|
121
|
+
100: ph1
|
|
122
|
+
101: ph1.3
|
|
123
|
+
102: ph1.5
|
|
124
|
+
103: ph2
|
|
125
|
+
104: ph3
|
|
126
|
+
105: ph4
|
|
127
|
+
106: ph5
|
|
128
|
+
107: pi
|
|
129
|
+
108: pl0
|
|
130
|
+
109: pl4
|
|
131
|
+
110: pl5
|
|
132
|
+
111: pl8
|
|
133
|
+
112: pl10
|
|
134
|
+
113: pl15
|
|
135
|
+
114: pl20
|
|
136
|
+
115: pl25
|
|
137
|
+
116: pl30
|
|
138
|
+
117: pl35
|
|
139
|
+
118: pl40
|
|
140
|
+
119: pl50
|
|
141
|
+
120: pl60
|
|
142
|
+
121: pl65
|
|
143
|
+
122: pl70
|
|
144
|
+
123: pl80
|
|
145
|
+
124: pl90
|
|
146
|
+
125: pl100
|
|
147
|
+
126: pl110
|
|
148
|
+
127: pl120
|
|
149
|
+
128: pm2
|
|
150
|
+
129: pm8
|
|
151
|
+
130: pm10
|
|
152
|
+
131: pm13
|
|
153
|
+
132: pm15
|
|
154
|
+
133: pm20
|
|
155
|
+
134: pm25
|
|
156
|
+
135: pm30
|
|
157
|
+
136: pm35
|
|
158
|
+
137: pm40
|
|
159
|
+
138: pm46
|
|
160
|
+
139: pm50
|
|
161
|
+
140: pm55
|
|
162
|
+
141: pn
|
|
163
|
+
142: pne
|
|
164
|
+
143: po
|
|
165
|
+
144: pr10
|
|
166
|
+
145: pr100
|
|
167
|
+
146: pr20
|
|
168
|
+
147: pr30
|
|
169
|
+
148: pr40
|
|
170
|
+
149: pr45
|
|
171
|
+
150: pr50
|
|
172
|
+
151: pr60
|
|
173
|
+
152: pr70
|
|
174
|
+
153: pr80
|
|
175
|
+
154: ps
|
|
176
|
+
155: w1
|
|
177
|
+
156: w2
|
|
178
|
+
157: w3
|
|
179
|
+
158: w5
|
|
180
|
+
159: w8
|
|
181
|
+
160: w10
|
|
182
|
+
161: w12
|
|
183
|
+
162: w13
|
|
184
|
+
163: w16
|
|
185
|
+
164: w18
|
|
186
|
+
165: w20
|
|
187
|
+
166: w21
|
|
188
|
+
167: w22
|
|
189
|
+
168: w24
|
|
190
|
+
169: w28
|
|
191
|
+
170: w30
|
|
192
|
+
171: w31
|
|
193
|
+
172: w32
|
|
194
|
+
173: w34
|
|
195
|
+
174: w35
|
|
196
|
+
175: w37
|
|
197
|
+
176: w38
|
|
198
|
+
177: w41
|
|
199
|
+
178: w42
|
|
200
|
+
179: w43
|
|
201
|
+
180: w44
|
|
202
|
+
181: w45
|
|
203
|
+
182: w46
|
|
204
|
+
183: w47
|
|
205
|
+
184: w48
|
|
206
|
+
185: w49
|
|
207
|
+
186: w50
|
|
208
|
+
187: w51
|
|
209
|
+
188: w52
|
|
210
|
+
189: w53
|
|
211
|
+
190: w54
|
|
212
|
+
191: w55
|
|
213
|
+
192: w56
|
|
214
|
+
193: w57
|
|
215
|
+
194: w58
|
|
216
|
+
195: w59
|
|
217
|
+
196: w60
|
|
218
|
+
197: w62
|
|
219
|
+
198: w63
|
|
220
|
+
199: w66
|
|
221
|
+
200: i1
|
|
222
|
+
201: i2
|
|
223
|
+
202: i3
|
|
224
|
+
203: i4
|
|
225
|
+
204: i5
|
|
226
|
+
205: i6
|
|
227
|
+
206: i7
|
|
228
|
+
207: i8
|
|
229
|
+
208: i9
|
|
230
|
+
209: i10
|
|
231
|
+
210: i11
|
|
232
|
+
211: i12
|
|
233
|
+
212: i13
|
|
234
|
+
213: i14
|
|
235
|
+
214: i15
|
|
236
|
+
215: il60
|
|
237
|
+
216: il80
|
|
238
|
+
217: il100
|
|
239
|
+
218: il110
|
|
240
|
+
219: io
|
|
241
|
+
220: ip
|
|
242
|
+
|
|
243
|
+
# Download script/URL (optional) ---------------------------------------------------------------------------------------
|
|
244
|
+
download: |
|
|
245
|
+
import json
|
|
246
|
+
import shutil
|
|
247
|
+
from pathlib import Path
|
|
248
|
+
|
|
249
|
+
from PIL import Image
|
|
250
|
+
|
|
251
|
+
from ultralytics.utils import TQDM
|
|
252
|
+
from ultralytics.utils.downloads import download
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def tt100k2yolo(dir):
|
|
256
|
+
"""Convert TT100K annotations to YOLO format with images/{split} and labels/{split} structure."""
|
|
257
|
+
data_dir = dir / "data"
|
|
258
|
+
anno_file = data_dir / "annotations.json"
|
|
259
|
+
|
|
260
|
+
print("Loading annotations...")
|
|
261
|
+
with open(anno_file, encoding="utf-8") as f:
|
|
262
|
+
data = json.load(f)
|
|
263
|
+
|
|
264
|
+
# Build class name to index mapping from yaml
|
|
265
|
+
names = yaml["names"]
|
|
266
|
+
class_to_idx = {v: k for k, v in names.items()}
|
|
267
|
+
|
|
268
|
+
# Create directories
|
|
269
|
+
for split in ["train", "val", "test"]:
|
|
270
|
+
(dir / "images" / split).mkdir(parents=True, exist_ok=True)
|
|
271
|
+
(dir / "labels" / split).mkdir(parents=True, exist_ok=True)
|
|
272
|
+
|
|
273
|
+
print("Converting annotations to YOLO format...")
|
|
274
|
+
skipped = 0
|
|
275
|
+
for img_id, img_data in TQDM(data["imgs"].items(), desc="Processing"):
|
|
276
|
+
img_path_str = img_data["path"]
|
|
277
|
+
if "train" in img_path_str:
|
|
278
|
+
split = "train"
|
|
279
|
+
elif "test" in img_path_str:
|
|
280
|
+
split = "test"
|
|
281
|
+
else:
|
|
282
|
+
split = "val"
|
|
283
|
+
|
|
284
|
+
# Source and destination paths
|
|
285
|
+
src_img = data_dir / img_path_str
|
|
286
|
+
if not src_img.exists():
|
|
287
|
+
continue
|
|
288
|
+
|
|
289
|
+
dst_img = dir / "images" / split / src_img.name
|
|
290
|
+
|
|
291
|
+
# Get image dimensions
|
|
292
|
+
try:
|
|
293
|
+
with Image.open(src_img) as img:
|
|
294
|
+
img_width, img_height = img.size
|
|
295
|
+
except Exception as e:
|
|
296
|
+
print(f"Error reading {src_img}: {e}")
|
|
297
|
+
continue
|
|
298
|
+
|
|
299
|
+
# Copy image to destination
|
|
300
|
+
shutil.copy2(src_img, dst_img)
|
|
301
|
+
|
|
302
|
+
# Convert annotations
|
|
303
|
+
label_file = dir / "labels" / split / f"{src_img.stem}.txt"
|
|
304
|
+
lines = []
|
|
305
|
+
|
|
306
|
+
for obj in img_data.get("objects", []):
|
|
307
|
+
category = obj["category"]
|
|
308
|
+
if category not in class_to_idx:
|
|
309
|
+
skipped += 1
|
|
310
|
+
continue
|
|
311
|
+
|
|
312
|
+
bbox = obj["bbox"]
|
|
313
|
+
xmin, ymin = bbox["xmin"], bbox["ymin"]
|
|
314
|
+
xmax, ymax = bbox["xmax"], bbox["ymax"]
|
|
315
|
+
|
|
316
|
+
# Convert to YOLO format (normalized center coordinates and dimensions)
|
|
317
|
+
x_center = ((xmin + xmax) / 2.0) / img_width
|
|
318
|
+
y_center = ((ymin + ymax) / 2.0) / img_height
|
|
319
|
+
width = (xmax - xmin) / img_width
|
|
320
|
+
height = (ymax - ymin) / img_height
|
|
321
|
+
|
|
322
|
+
# Clip to valid range
|
|
323
|
+
x_center = max(0, min(1, x_center))
|
|
324
|
+
y_center = max(0, min(1, y_center))
|
|
325
|
+
width = max(0, min(1, width))
|
|
326
|
+
height = max(0, min(1, height))
|
|
327
|
+
|
|
328
|
+
cls_idx = class_to_idx[category]
|
|
329
|
+
lines.append(f"{cls_idx} {x_center:.6f} {y_center:.6f} {width:.6f} {height:.6f}\n")
|
|
330
|
+
|
|
331
|
+
# Write label file
|
|
332
|
+
if lines:
|
|
333
|
+
label_file.write_text("".join(lines), encoding="utf-8")
|
|
334
|
+
|
|
335
|
+
if skipped:
|
|
336
|
+
print(f"Skipped {skipped} annotations with unknown categories")
|
|
337
|
+
print("Conversion complete!")
|
|
338
|
+
|
|
339
|
+
|
|
340
|
+
# Download
|
|
341
|
+
dir = Path(yaml["path"]) # dataset root dir
|
|
342
|
+
urls = ["https://cg.cs.tsinghua.edu.cn/traffic-sign/data_model_code/data.zip"]
|
|
343
|
+
download(urls, dir=dir, curl=True, threads=1)
|
|
344
|
+
|
|
345
|
+
# Convert
|
|
346
|
+
tt100k2yolo(dir)
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
|
+
|
|
3
|
+
# PASCAL VOC dataset http://host.robots.ox.ac.uk/pascal/VOC by University of Oxford
|
|
4
|
+
# Documentation: https://docs.ultralytics.com/datasets/detect/voc/
|
|
5
|
+
# Example usage: yolo train data=VOC.yaml
|
|
6
|
+
# parent
|
|
7
|
+
# ├── ultralytics
|
|
8
|
+
# └── datasets
|
|
9
|
+
# └── VOC ← downloads here (2.8 GB)
|
|
10
|
+
|
|
11
|
+
# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..]
|
|
12
|
+
path: VOC
|
|
13
|
+
train: # train images (relative to 'path') 16551 images
|
|
14
|
+
- images/train2012
|
|
15
|
+
- images/train2007
|
|
16
|
+
- images/val2012
|
|
17
|
+
- images/val2007
|
|
18
|
+
val: # val images (relative to 'path') 4952 images
|
|
19
|
+
- images/test2007
|
|
20
|
+
test: # test images (optional)
|
|
21
|
+
- images/test2007
|
|
22
|
+
|
|
23
|
+
# Classes
|
|
24
|
+
names:
|
|
25
|
+
0: aeroplane
|
|
26
|
+
1: bicycle
|
|
27
|
+
2: bird
|
|
28
|
+
3: boat
|
|
29
|
+
4: bottle
|
|
30
|
+
5: bus
|
|
31
|
+
6: car
|
|
32
|
+
7: cat
|
|
33
|
+
8: chair
|
|
34
|
+
9: cow
|
|
35
|
+
10: diningtable
|
|
36
|
+
11: dog
|
|
37
|
+
12: horse
|
|
38
|
+
13: motorbike
|
|
39
|
+
14: person
|
|
40
|
+
15: pottedplant
|
|
41
|
+
16: sheep
|
|
42
|
+
17: sofa
|
|
43
|
+
18: train
|
|
44
|
+
19: tvmonitor
|
|
45
|
+
|
|
46
|
+
# Download script/URL (optional) ---------------------------------------------------------------------------------------
|
|
47
|
+
download: |
|
|
48
|
+
import xml.etree.ElementTree as ET
|
|
49
|
+
from pathlib import Path
|
|
50
|
+
|
|
51
|
+
from ultralytics.utils.downloads import download
|
|
52
|
+
from ultralytics.utils import ASSETS_URL, TQDM
|
|
53
|
+
|
|
54
|
+
def convert_label(path, lb_path, year, image_id):
|
|
55
|
+
"""Converts XML annotations from VOC format to YOLO format by extracting bounding boxes and class IDs."""
|
|
56
|
+
|
|
57
|
+
def convert_box(size, box):
|
|
58
|
+
dw, dh = 1.0 / size[0], 1.0 / size[1]
|
|
59
|
+
x, y, w, h = (box[0] + box[1]) / 2.0 - 1, (box[2] + box[3]) / 2.0 - 1, box[1] - box[0], box[3] - box[2]
|
|
60
|
+
return x * dw, y * dh, w * dw, h * dh
|
|
61
|
+
|
|
62
|
+
with open(path / f"VOC{year}/Annotations/{image_id}.xml") as in_file, open(lb_path, "w", encoding="utf-8") as out_file:
|
|
63
|
+
tree = ET.parse(in_file)
|
|
64
|
+
root = tree.getroot()
|
|
65
|
+
size = root.find("size")
|
|
66
|
+
w = int(size.find("width").text)
|
|
67
|
+
h = int(size.find("height").text)
|
|
68
|
+
|
|
69
|
+
names = list(yaml["names"].values()) # names list
|
|
70
|
+
for obj in root.iter("object"):
|
|
71
|
+
cls = obj.find("name").text
|
|
72
|
+
if cls in names and int(obj.find("difficult").text) != 1:
|
|
73
|
+
xmlbox = obj.find("bndbox")
|
|
74
|
+
bb = convert_box((w, h), [float(xmlbox.find(x).text) for x in ("xmin", "xmax", "ymin", "ymax")])
|
|
75
|
+
cls_id = names.index(cls) # class id
|
|
76
|
+
out_file.write(" ".join(str(a) for a in (cls_id, *bb)) + "\n")
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
# Download
|
|
80
|
+
dir = Path(yaml["path"]) # dataset root dir
|
|
81
|
+
urls = [
|
|
82
|
+
f"{ASSETS_URL}/VOCtrainval_06-Nov-2007.zip", # 446MB, 5012 images
|
|
83
|
+
f"{ASSETS_URL}/VOCtest_06-Nov-2007.zip", # 438MB, 4953 images
|
|
84
|
+
f"{ASSETS_URL}/VOCtrainval_11-May-2012.zip", # 1.95GB, 17126 images
|
|
85
|
+
]
|
|
86
|
+
download(urls, dir=dir / "images", threads=3, exist_ok=True) # download and unzip over existing (required)
|
|
87
|
+
|
|
88
|
+
# Convert
|
|
89
|
+
path = dir / "images/VOCdevkit"
|
|
90
|
+
for year, image_set in ("2012", "train"), ("2012", "val"), ("2007", "train"), ("2007", "val"), ("2007", "test"):
|
|
91
|
+
imgs_path = dir / "images" / f"{image_set}{year}"
|
|
92
|
+
lbs_path = dir / "labels" / f"{image_set}{year}"
|
|
93
|
+
imgs_path.mkdir(exist_ok=True, parents=True)
|
|
94
|
+
lbs_path.mkdir(exist_ok=True, parents=True)
|
|
95
|
+
|
|
96
|
+
with open(path / f"VOC{year}/ImageSets/Main/{image_set}.txt") as f:
|
|
97
|
+
image_ids = f.read().strip().split()
|
|
98
|
+
for id in TQDM(image_ids, desc=f"{image_set}{year}"):
|
|
99
|
+
f = path / f"VOC{year}/JPEGImages/{id}.jpg" # old img path
|
|
100
|
+
lb_path = (lbs_path / f.name).with_suffix(".txt") # new label path
|
|
101
|
+
f.rename(imgs_path / f.name) # move image
|
|
102
|
+
convert_label(path, lb_path, year, id) # convert labels to YOLO format
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
|
+
|
|
3
|
+
# VisDrone2019-DET dataset https://github.com/VisDrone/VisDrone-Dataset by Tianjin University
|
|
4
|
+
# Documentation: https://docs.ultralytics.com/datasets/detect/visdrone/
|
|
5
|
+
# Example usage: yolo train data=VisDrone.yaml
|
|
6
|
+
# parent
|
|
7
|
+
# ├── ultralytics
|
|
8
|
+
# └── datasets
|
|
9
|
+
# └── VisDrone ← downloads here (2.3 GB)
|
|
10
|
+
|
|
11
|
+
# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..]
|
|
12
|
+
path: VisDrone # dataset root dir
|
|
13
|
+
train: images/train # train images (relative to 'path') 6471 images
|
|
14
|
+
val: images/val # val images (relative to 'path') 548 images
|
|
15
|
+
test: images/test # test-dev images (optional) 1610 images
|
|
16
|
+
|
|
17
|
+
# Classes
|
|
18
|
+
names:
|
|
19
|
+
0: pedestrian
|
|
20
|
+
1: people
|
|
21
|
+
2: bicycle
|
|
22
|
+
3: car
|
|
23
|
+
4: van
|
|
24
|
+
5: truck
|
|
25
|
+
6: tricycle
|
|
26
|
+
7: awning-tricycle
|
|
27
|
+
8: bus
|
|
28
|
+
9: motor
|
|
29
|
+
|
|
30
|
+
# Download script/URL (optional) ---------------------------------------------------------------------------------------
|
|
31
|
+
download: |
|
|
32
|
+
import os
|
|
33
|
+
from pathlib import Path
|
|
34
|
+
import shutil
|
|
35
|
+
|
|
36
|
+
from ultralytics.utils.downloads import download
|
|
37
|
+
from ultralytics.utils import ASSETS_URL, TQDM
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def visdrone2yolo(dir, split, source_name=None):
|
|
41
|
+
"""Convert VisDrone annotations to YOLO format with images/{split} and labels/{split} structure."""
|
|
42
|
+
from PIL import Image
|
|
43
|
+
|
|
44
|
+
source_dir = dir / (source_name or f"VisDrone2019-DET-{split}")
|
|
45
|
+
images_dir = dir / "images" / split
|
|
46
|
+
labels_dir = dir / "labels" / split
|
|
47
|
+
labels_dir.mkdir(parents=True, exist_ok=True)
|
|
48
|
+
|
|
49
|
+
# Move images to new structure
|
|
50
|
+
if (source_images_dir := source_dir / "images").exists():
|
|
51
|
+
images_dir.mkdir(parents=True, exist_ok=True)
|
|
52
|
+
for img in source_images_dir.glob("*.jpg"):
|
|
53
|
+
img.rename(images_dir / img.name)
|
|
54
|
+
|
|
55
|
+
for f in TQDM((source_dir / "annotations").glob("*.txt"), desc=f"Converting {split}"):
|
|
56
|
+
img_size = Image.open(images_dir / f.with_suffix(".jpg").name).size
|
|
57
|
+
dw, dh = 1.0 / img_size[0], 1.0 / img_size[1]
|
|
58
|
+
lines = []
|
|
59
|
+
|
|
60
|
+
with open(f, encoding="utf-8") as file:
|
|
61
|
+
for row in [x.split(",") for x in file.read().strip().splitlines()]:
|
|
62
|
+
if row[4] != "0": # Skip ignored regions
|
|
63
|
+
x, y, w, h = map(int, row[:4])
|
|
64
|
+
cls = int(row[5]) - 1
|
|
65
|
+
# Convert to YOLO format
|
|
66
|
+
x_center, y_center = (x + w / 2) * dw, (y + h / 2) * dh
|
|
67
|
+
w_norm, h_norm = w * dw, h * dh
|
|
68
|
+
lines.append(f"{cls} {x_center:.6f} {y_center:.6f} {w_norm:.6f} {h_norm:.6f}\n")
|
|
69
|
+
|
|
70
|
+
(labels_dir / f.name).write_text("".join(lines), encoding="utf-8")
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
# Download (ignores test-challenge split)
|
|
74
|
+
dir = Path(yaml["path"]) # dataset root dir
|
|
75
|
+
urls = [
|
|
76
|
+
f"{ASSETS_URL}/VisDrone2019-DET-train.zip",
|
|
77
|
+
f"{ASSETS_URL}/VisDrone2019-DET-val.zip",
|
|
78
|
+
f"{ASSETS_URL}/VisDrone2019-DET-test-dev.zip",
|
|
79
|
+
# f"{ASSETS_URL}/VisDrone2019-DET-test-challenge.zip",
|
|
80
|
+
]
|
|
81
|
+
download(urls, dir=dir, threads=4)
|
|
82
|
+
|
|
83
|
+
# Convert
|
|
84
|
+
splits = {"VisDrone2019-DET-train": "train", "VisDrone2019-DET-val": "val", "VisDrone2019-DET-test-dev": "test"}
|
|
85
|
+
for folder, split in splits.items():
|
|
86
|
+
visdrone2yolo(dir, split, folder) # convert VisDrone annotations to YOLO labels
|
|
87
|
+
shutil.rmtree(dir / folder) # cleanup original directory
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
|
+
|
|
3
|
+
# African Wildlife dataset by Ultralytics
|
|
4
|
+
# Documentation: https://docs.ultralytics.com/datasets/detect/african-wildlife/
|
|
5
|
+
# Example usage: yolo train data=african-wildlife.yaml
|
|
6
|
+
# parent
|
|
7
|
+
# ├── ultralytics
|
|
8
|
+
# └── datasets
|
|
9
|
+
# └── african-wildlife ← downloads here (100 MB)
|
|
10
|
+
|
|
11
|
+
# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..]
|
|
12
|
+
path: african-wildlife # dataset root dir
|
|
13
|
+
train: images/train # train images (relative to 'path') 1052 images
|
|
14
|
+
val: images/val # val images (relative to 'path') 225 images
|
|
15
|
+
test: images/test # test images (relative to 'path') 227 images
|
|
16
|
+
|
|
17
|
+
# Classes
|
|
18
|
+
names:
|
|
19
|
+
0: buffalo
|
|
20
|
+
1: elephant
|
|
21
|
+
2: rhino
|
|
22
|
+
3: zebra
|
|
23
|
+
|
|
24
|
+
# Download script/URL (optional)
|
|
25
|
+
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/african-wildlife.zip
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
|
+
|
|
3
|
+
# Brain-tumor dataset by Ultralytics
|
|
4
|
+
# Documentation: https://docs.ultralytics.com/datasets/detect/brain-tumor/
|
|
5
|
+
# Example usage: yolo train data=brain-tumor.yaml
|
|
6
|
+
# parent
|
|
7
|
+
# ├── ultralytics
|
|
8
|
+
# └── datasets
|
|
9
|
+
# └── brain-tumor ← downloads here (4.21 MB)
|
|
10
|
+
|
|
11
|
+
# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..]
|
|
12
|
+
path: brain-tumor # dataset root dir
|
|
13
|
+
train: images/train # train images (relative to 'path') 893 images
|
|
14
|
+
val: images/val # val images (relative to 'path') 223 images
|
|
15
|
+
|
|
16
|
+
# Classes
|
|
17
|
+
names:
|
|
18
|
+
0: negative
|
|
19
|
+
1: positive
|
|
20
|
+
|
|
21
|
+
# Download script/URL (optional)
|
|
22
|
+
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/brain-tumor.zip
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
|
+
|
|
3
|
+
# Carparts-seg dataset by Ultralytics
|
|
4
|
+
# Documentation: https://docs.ultralytics.com/datasets/segment/carparts-seg/
|
|
5
|
+
# Example usage: yolo train data=carparts-seg.yaml
|
|
6
|
+
# parent
|
|
7
|
+
# ├── ultralytics
|
|
8
|
+
# └── datasets
|
|
9
|
+
# └── carparts-seg ← downloads here (133 MB)
|
|
10
|
+
|
|
11
|
+
# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..]
|
|
12
|
+
path: carparts-seg # dataset root dir
|
|
13
|
+
train: images/train # train images (relative to 'path') 3516 images
|
|
14
|
+
val: images/val # val images (relative to 'path') 276 images
|
|
15
|
+
test: images/test # test images (relative to 'path') 401 images
|
|
16
|
+
|
|
17
|
+
# Classes
|
|
18
|
+
names:
|
|
19
|
+
0: back_bumper
|
|
20
|
+
1: back_door
|
|
21
|
+
2: back_glass
|
|
22
|
+
3: back_left_door
|
|
23
|
+
4: back_left_light
|
|
24
|
+
5: back_light
|
|
25
|
+
6: back_right_door
|
|
26
|
+
7: back_right_light
|
|
27
|
+
8: front_bumper
|
|
28
|
+
9: front_door
|
|
29
|
+
10: front_glass
|
|
30
|
+
11: front_left_door
|
|
31
|
+
12: front_left_light
|
|
32
|
+
13: front_light
|
|
33
|
+
14: front_right_door
|
|
34
|
+
15: front_right_light
|
|
35
|
+
16: hood
|
|
36
|
+
17: left_mirror
|
|
37
|
+
18: object
|
|
38
|
+
19: right_mirror
|
|
39
|
+
20: tailgate
|
|
40
|
+
21: trunk
|
|
41
|
+
22: wheel
|
|
42
|
+
|
|
43
|
+
# Download script/URL (optional)
|
|
44
|
+
download: https://github.com/ultralytics/assets/releases/download/v0.0.0/carparts-seg.zip
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
# Ultralytics 🚀 AGPL-3.0 License - https://ultralytics.com/license
|
|
2
|
+
|
|
3
|
+
# COCO 2017 Keypoints dataset https://cocodataset.org by Microsoft
|
|
4
|
+
# Documentation: https://docs.ultralytics.com/datasets/pose/coco/
|
|
5
|
+
# Example usage: yolo train data=coco-pose.yaml
|
|
6
|
+
# parent
|
|
7
|
+
# ├── ultralytics
|
|
8
|
+
# └── datasets
|
|
9
|
+
# └── coco-pose ← downloads here (20.1 GB)
|
|
10
|
+
|
|
11
|
+
# Train/val/test sets as 1) dir: path/to/imgs, 2) file: path/to/imgs.txt, or 3) list: [path/to/imgs1, path/to/imgs2, ..]
|
|
12
|
+
path: coco-pose # dataset root dir
|
|
13
|
+
train: train2017.txt # train images (relative to 'path') 56599 images
|
|
14
|
+
val: val2017.txt # val images (relative to 'path') 2346 images
|
|
15
|
+
test: test-dev2017.txt # 20288 of 40670 images, submit to https://codalab.lisn.upsaclay.fr/competitions/7403
|
|
16
|
+
|
|
17
|
+
# Keypoints
|
|
18
|
+
kpt_shape: [17, 3] # number of keypoints, number of dims (2 for x,y or 3 for x,y,visible)
|
|
19
|
+
flip_idx: [0, 2, 1, 4, 3, 6, 5, 8, 7, 10, 9, 12, 11, 14, 13, 16, 15]
|
|
20
|
+
|
|
21
|
+
# Classes
|
|
22
|
+
names:
|
|
23
|
+
0: person
|
|
24
|
+
|
|
25
|
+
# Keypoint names per class
|
|
26
|
+
kpt_names:
|
|
27
|
+
0:
|
|
28
|
+
- nose
|
|
29
|
+
- left_eye
|
|
30
|
+
- right_eye
|
|
31
|
+
- left_ear
|
|
32
|
+
- right_ear
|
|
33
|
+
- left_shoulder
|
|
34
|
+
- right_shoulder
|
|
35
|
+
- left_elbow
|
|
36
|
+
- right_elbow
|
|
37
|
+
- left_wrist
|
|
38
|
+
- right_wrist
|
|
39
|
+
- left_hip
|
|
40
|
+
- right_hip
|
|
41
|
+
- left_knee
|
|
42
|
+
- right_knee
|
|
43
|
+
- left_ankle
|
|
44
|
+
- right_ankle
|
|
45
|
+
|
|
46
|
+
# Download script/URL (optional)
|
|
47
|
+
download: |
|
|
48
|
+
from pathlib import Path
|
|
49
|
+
|
|
50
|
+
from ultralytics.utils import ASSETS_URL
|
|
51
|
+
from ultralytics.utils.downloads import download
|
|
52
|
+
|
|
53
|
+
# Download labels
|
|
54
|
+
dir = Path(yaml["path"]) # dataset root dir
|
|
55
|
+
|
|
56
|
+
urls = [f"{ASSETS_URL}/coco2017labels-pose.zip"]
|
|
57
|
+
download(urls, dir=dir.parent)
|
|
58
|
+
# Download data
|
|
59
|
+
urls = [
|
|
60
|
+
"http://images.cocodataset.org/zips/train2017.zip", # 19G, 118k images
|
|
61
|
+
"http://images.cocodataset.org/zips/val2017.zip", # 1G, 5k images
|
|
62
|
+
"http://images.cocodataset.org/zips/test2017.zip", # 7G, 41k images (optional)
|
|
63
|
+
]
|
|
64
|
+
download(urls, dir=dir / "images", threads=3)
|