bplusplus 1.1.0__py3-none-any.whl → 1.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of bplusplus might be problematic. Click here for more details.
- bplusplus/__init__.py +4 -2
- bplusplus/collect.py +72 -3
- bplusplus/hierarchical/test.py +670 -0
- bplusplus/hierarchical/train.py +676 -0
- bplusplus/prepare.py +236 -71
- bplusplus/resnet/test.py +473 -0
- bplusplus/resnet/train.py +329 -0
- bplusplus-1.2.1.dist-info/METADATA +252 -0
- bplusplus-1.2.1.dist-info/RECORD +12 -0
- bplusplus/yolov5detect/__init__.py +0 -1
- bplusplus/yolov5detect/detect.py +0 -444
- bplusplus/yolov5detect/export.py +0 -1530
- bplusplus/yolov5detect/insect.yaml +0 -8
- bplusplus/yolov5detect/models/__init__.py +0 -0
- bplusplus/yolov5detect/models/common.py +0 -1109
- bplusplus/yolov5detect/models/experimental.py +0 -130
- bplusplus/yolov5detect/models/hub/anchors.yaml +0 -56
- bplusplus/yolov5detect/models/hub/yolov3-spp.yaml +0 -52
- bplusplus/yolov5detect/models/hub/yolov3-tiny.yaml +0 -42
- bplusplus/yolov5detect/models/hub/yolov3.yaml +0 -52
- bplusplus/yolov5detect/models/hub/yolov5-bifpn.yaml +0 -49
- bplusplus/yolov5detect/models/hub/yolov5-fpn.yaml +0 -43
- bplusplus/yolov5detect/models/hub/yolov5-p2.yaml +0 -55
- bplusplus/yolov5detect/models/hub/yolov5-p34.yaml +0 -42
- bplusplus/yolov5detect/models/hub/yolov5-p6.yaml +0 -57
- bplusplus/yolov5detect/models/hub/yolov5-p7.yaml +0 -68
- bplusplus/yolov5detect/models/hub/yolov5-panet.yaml +0 -49
- bplusplus/yolov5detect/models/hub/yolov5l6.yaml +0 -61
- bplusplus/yolov5detect/models/hub/yolov5m6.yaml +0 -61
- bplusplus/yolov5detect/models/hub/yolov5n6.yaml +0 -61
- bplusplus/yolov5detect/models/hub/yolov5s-LeakyReLU.yaml +0 -50
- bplusplus/yolov5detect/models/hub/yolov5s-ghost.yaml +0 -49
- bplusplus/yolov5detect/models/hub/yolov5s-transformer.yaml +0 -49
- bplusplus/yolov5detect/models/hub/yolov5s6.yaml +0 -61
- bplusplus/yolov5detect/models/hub/yolov5x6.yaml +0 -61
- bplusplus/yolov5detect/models/segment/yolov5l-seg.yaml +0 -49
- bplusplus/yolov5detect/models/segment/yolov5m-seg.yaml +0 -49
- bplusplus/yolov5detect/models/segment/yolov5n-seg.yaml +0 -49
- bplusplus/yolov5detect/models/segment/yolov5s-seg.yaml +0 -49
- bplusplus/yolov5detect/models/segment/yolov5x-seg.yaml +0 -49
- bplusplus/yolov5detect/models/tf.py +0 -797
- bplusplus/yolov5detect/models/yolo.py +0 -495
- bplusplus/yolov5detect/models/yolov5l.yaml +0 -49
- bplusplus/yolov5detect/models/yolov5m.yaml +0 -49
- bplusplus/yolov5detect/models/yolov5n.yaml +0 -49
- bplusplus/yolov5detect/models/yolov5s.yaml +0 -49
- bplusplus/yolov5detect/models/yolov5x.yaml +0 -49
- bplusplus/yolov5detect/utils/__init__.py +0 -97
- bplusplus/yolov5detect/utils/activations.py +0 -134
- bplusplus/yolov5detect/utils/augmentations.py +0 -448
- bplusplus/yolov5detect/utils/autoanchor.py +0 -175
- bplusplus/yolov5detect/utils/autobatch.py +0 -70
- bplusplus/yolov5detect/utils/aws/__init__.py +0 -0
- bplusplus/yolov5detect/utils/aws/mime.sh +0 -26
- bplusplus/yolov5detect/utils/aws/resume.py +0 -41
- bplusplus/yolov5detect/utils/aws/userdata.sh +0 -27
- bplusplus/yolov5detect/utils/callbacks.py +0 -72
- bplusplus/yolov5detect/utils/dataloaders.py +0 -1385
- bplusplus/yolov5detect/utils/docker/Dockerfile +0 -73
- bplusplus/yolov5detect/utils/docker/Dockerfile-arm64 +0 -40
- bplusplus/yolov5detect/utils/docker/Dockerfile-cpu +0 -42
- bplusplus/yolov5detect/utils/downloads.py +0 -136
- bplusplus/yolov5detect/utils/flask_rest_api/README.md +0 -70
- bplusplus/yolov5detect/utils/flask_rest_api/example_request.py +0 -17
- bplusplus/yolov5detect/utils/flask_rest_api/restapi.py +0 -49
- bplusplus/yolov5detect/utils/general.py +0 -1294
- bplusplus/yolov5detect/utils/google_app_engine/Dockerfile +0 -25
- bplusplus/yolov5detect/utils/google_app_engine/additional_requirements.txt +0 -6
- bplusplus/yolov5detect/utils/google_app_engine/app.yaml +0 -16
- bplusplus/yolov5detect/utils/loggers/__init__.py +0 -476
- bplusplus/yolov5detect/utils/loggers/clearml/README.md +0 -222
- bplusplus/yolov5detect/utils/loggers/clearml/__init__.py +0 -0
- bplusplus/yolov5detect/utils/loggers/clearml/clearml_utils.py +0 -230
- bplusplus/yolov5detect/utils/loggers/clearml/hpo.py +0 -90
- bplusplus/yolov5detect/utils/loggers/comet/README.md +0 -250
- bplusplus/yolov5detect/utils/loggers/comet/__init__.py +0 -551
- bplusplus/yolov5detect/utils/loggers/comet/comet_utils.py +0 -151
- bplusplus/yolov5detect/utils/loggers/comet/hpo.py +0 -126
- bplusplus/yolov5detect/utils/loggers/comet/optimizer_config.json +0 -135
- bplusplus/yolov5detect/utils/loggers/wandb/__init__.py +0 -0
- bplusplus/yolov5detect/utils/loggers/wandb/wandb_utils.py +0 -210
- bplusplus/yolov5detect/utils/loss.py +0 -259
- bplusplus/yolov5detect/utils/metrics.py +0 -381
- bplusplus/yolov5detect/utils/plots.py +0 -517
- bplusplus/yolov5detect/utils/segment/__init__.py +0 -0
- bplusplus/yolov5detect/utils/segment/augmentations.py +0 -100
- bplusplus/yolov5detect/utils/segment/dataloaders.py +0 -366
- bplusplus/yolov5detect/utils/segment/general.py +0 -160
- bplusplus/yolov5detect/utils/segment/loss.py +0 -198
- bplusplus/yolov5detect/utils/segment/metrics.py +0 -225
- bplusplus/yolov5detect/utils/segment/plots.py +0 -152
- bplusplus/yolov5detect/utils/torch_utils.py +0 -482
- bplusplus/yolov5detect/utils/triton.py +0 -90
- bplusplus-1.1.0.dist-info/METADATA +0 -179
- bplusplus-1.1.0.dist-info/RECORD +0 -92
- {bplusplus-1.1.0.dist-info → bplusplus-1.2.1.dist-info}/LICENSE +0 -0
- {bplusplus-1.1.0.dist-info → bplusplus-1.2.1.dist-info}/WHEEL +0 -0
bplusplus/__init__.py
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
from .collect import Group, collect
|
|
2
2
|
from .train_validate import train, validate
|
|
3
3
|
from .prepare import prepare
|
|
4
|
-
from .
|
|
5
|
-
|
|
4
|
+
from .resnet.train import train_resnet
|
|
5
|
+
from .resnet.test import test_resnet
|
|
6
|
+
from .hierarchical.train import train_multitask
|
|
7
|
+
from .hierarchical.test import test_multitask
|
bplusplus/collect.py
CHANGED
|
@@ -1,11 +1,13 @@
|
|
|
1
1
|
import os
|
|
2
2
|
import random
|
|
3
|
+
import threading
|
|
3
4
|
from enum import Enum
|
|
4
|
-
from typing import Any, Optional
|
|
5
|
+
from typing import Any, Dict, List, Optional
|
|
5
6
|
|
|
6
7
|
import pygbif
|
|
7
8
|
import requests
|
|
8
9
|
import validators
|
|
10
|
+
from tqdm import tqdm
|
|
9
11
|
|
|
10
12
|
|
|
11
13
|
#this lists currently supported groupings, more can be added with proper testing
|
|
@@ -13,10 +15,28 @@ class Group(str, Enum):
|
|
|
13
15
|
scientificName="scientificName"
|
|
14
16
|
|
|
15
17
|
#TODO add back support for fetching from dataset (or csvs)
|
|
16
|
-
def collect(group_by_key: Group, search_parameters: dict[str, Any], images_per_group: int, output_directory: str):
|
|
18
|
+
def collect(group_by_key: Group, search_parameters: dict[str, Any], images_per_group: int, output_directory: str, num_threads: int):
|
|
17
19
|
|
|
18
20
|
groups: list[str] = search_parameters[group_by_key.value]
|
|
19
21
|
|
|
22
|
+
# check if user wants to parallelize the process
|
|
23
|
+
if num_threads > 1:
|
|
24
|
+
__threaded_collect(
|
|
25
|
+
images_per_group=images_per_group,
|
|
26
|
+
output_directory=output_directory,
|
|
27
|
+
num_threads=num_threads,
|
|
28
|
+
groups=groups)
|
|
29
|
+
else:
|
|
30
|
+
__single_collect(
|
|
31
|
+
search_parameters=search_parameters,
|
|
32
|
+
images_per_group=images_per_group,
|
|
33
|
+
output_directory=output_directory,
|
|
34
|
+
group_by_key=group_by_key,
|
|
35
|
+
groups=groups,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
def __single_collect(group_by_key: Group, search_parameters: dict[str, Any], images_per_group: int, output_directory: str, groups: list[str]):
|
|
39
|
+
|
|
20
40
|
#TODO throw error if groups is not a str list
|
|
21
41
|
|
|
22
42
|
__create_folders(
|
|
@@ -37,7 +57,7 @@ def collect(group_by_key: Group, search_parameters: dict[str, Any], images_per_g
|
|
|
37
57
|
sampled_occurrences = random.sample(occurrences, min(images_per_group, len(occurrences)))
|
|
38
58
|
|
|
39
59
|
print(f"Downloading {len(sampled_occurrences)} images into the {group} folder...")
|
|
40
|
-
for occurrence in sampled_occurrences:
|
|
60
|
+
for occurrence in tqdm(sampled_occurrences, desc=f"Downloading images for {group}", unit="image"):
|
|
41
61
|
# image_url = occurrence.image_url.replace("original", "large") # hack to get max 1024px image
|
|
42
62
|
|
|
43
63
|
__down_image(
|
|
@@ -49,6 +69,38 @@ def collect(group_by_key: Group, search_parameters: dict[str, Any], images_per_g
|
|
|
49
69
|
|
|
50
70
|
print("Finished collecting images.")
|
|
51
71
|
|
|
72
|
+
# threaded_collect: paralellize the collection of images
|
|
73
|
+
def __threaded_collect(images_per_group: int, output_directory: str, num_threads: int, groups: list[str]):
|
|
74
|
+
# Handle edge case where num_threads is greater than number of groups
|
|
75
|
+
if num_threads >= len(groups):
|
|
76
|
+
num_threads = len(groups)
|
|
77
|
+
|
|
78
|
+
# Divide the species list into num_threads parts
|
|
79
|
+
chunk_size = len(groups) // num_threads
|
|
80
|
+
species_chunks = [
|
|
81
|
+
groups[i:i + chunk_size] for i in range(0, len(groups), chunk_size)
|
|
82
|
+
]
|
|
83
|
+
|
|
84
|
+
# Ensure we have exactly num_threads chunks (the last chunk might be larger if len(species_list) % num_threads != 0)
|
|
85
|
+
while len(species_chunks) < num_threads:
|
|
86
|
+
species_chunks.append([])
|
|
87
|
+
|
|
88
|
+
threads = []
|
|
89
|
+
for i, chunk in enumerate(species_chunks):
|
|
90
|
+
thread = threading.Thread(
|
|
91
|
+
target=__collect_subset,
|
|
92
|
+
args=(chunk, images_per_group, output_directory, i)
|
|
93
|
+
)
|
|
94
|
+
threads.append(thread)
|
|
95
|
+
thread.start()
|
|
96
|
+
|
|
97
|
+
# Wait for all threads to complete
|
|
98
|
+
for thread in threads:
|
|
99
|
+
thread.join()
|
|
100
|
+
|
|
101
|
+
print("All collection threads have finished.")
|
|
102
|
+
|
|
103
|
+
|
|
52
104
|
def _fetch_occurrences(group_key: str, group_value: str, parameters: dict[str, Any], totalLimit: int) -> list[dict[str, Any]]:
|
|
53
105
|
parameters[group_key] = group_value
|
|
54
106
|
return __next_batch(
|
|
@@ -98,6 +150,23 @@ def __create_folders(names: list[str], directory: str):
|
|
|
98
150
|
# Create a folder using the group name
|
|
99
151
|
os.makedirs(folder_name, exist_ok=True)
|
|
100
152
|
|
|
153
|
+
def __collect_subset(species_subset: List[str], images_per_group: int, output_directory: str, thread_id: int):
|
|
154
|
+
search_subset: Dict[str, Any] = {
|
|
155
|
+
"scientificName": species_subset
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
print(f"Thread {thread_id} starting collection for {len(species_subset)} species.")
|
|
159
|
+
|
|
160
|
+
__single_collect(
|
|
161
|
+
search_parameters=search_subset,
|
|
162
|
+
images_per_group=images_per_group,
|
|
163
|
+
output_directory=output_directory,
|
|
164
|
+
group_by_key=Group.scientificName,
|
|
165
|
+
groups=species_subset
|
|
166
|
+
)
|
|
167
|
+
|
|
168
|
+
print(f"Thread {thread_id} finished collection.")
|
|
169
|
+
|
|
101
170
|
|
|
102
171
|
|
|
103
172
|
|