megadetector 5.0.29__py3-none-any.whl → 10.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of megadetector might be problematic. Click here for more details.
- megadetector/classification/efficientnet/model.py +8 -8
- megadetector/classification/efficientnet/utils.py +6 -5
- megadetector/classification/prepare_classification_script_mc.py +3 -3
- megadetector/data_management/annotations/annotation_constants.py +0 -1
- megadetector/data_management/camtrap_dp_to_coco.py +34 -1
- megadetector/data_management/cct_json_utils.py +2 -2
- megadetector/data_management/coco_to_yolo.py +22 -5
- megadetector/data_management/databases/add_width_and_height_to_db.py +85 -12
- megadetector/data_management/databases/combine_coco_camera_traps_files.py +2 -2
- megadetector/data_management/databases/integrity_check_json_db.py +29 -15
- megadetector/data_management/generate_crops_from_cct.py +50 -1
- megadetector/data_management/labelme_to_coco.py +4 -2
- megadetector/data_management/labelme_to_yolo.py +82 -2
- megadetector/data_management/lila/generate_lila_per_image_labels.py +276 -18
- megadetector/data_management/lila/get_lila_annotation_counts.py +5 -3
- megadetector/data_management/lila/lila_common.py +3 -0
- megadetector/data_management/lila/test_lila_metadata_urls.py +15 -5
- megadetector/data_management/mewc_to_md.py +5 -0
- megadetector/data_management/ocr_tools.py +4 -3
- megadetector/data_management/read_exif.py +20 -5
- megadetector/data_management/remap_coco_categories.py +66 -4
- megadetector/data_management/remove_exif.py +50 -1
- megadetector/data_management/rename_images.py +3 -3
- megadetector/data_management/resize_coco_dataset.py +563 -95
- megadetector/data_management/yolo_output_to_md_output.py +131 -2
- megadetector/data_management/yolo_to_coco.py +140 -5
- megadetector/detection/change_detection.py +4 -3
- megadetector/detection/pytorch_detector.py +60 -22
- megadetector/detection/run_detector.py +225 -25
- megadetector/detection/run_detector_batch.py +42 -16
- megadetector/detection/run_inference_with_yolov5_val.py +12 -2
- megadetector/detection/run_tiled_inference.py +1 -0
- megadetector/detection/video_utils.py +53 -24
- megadetector/postprocessing/add_max_conf.py +4 -0
- megadetector/postprocessing/categorize_detections_by_size.py +1 -1
- megadetector/postprocessing/classification_postprocessing.py +55 -20
- megadetector/postprocessing/combine_batch_outputs.py +3 -2
- megadetector/postprocessing/compare_batch_results.py +64 -10
- megadetector/postprocessing/convert_output_format.py +12 -8
- megadetector/postprocessing/create_crop_folder.py +137 -10
- megadetector/postprocessing/load_api_results.py +26 -8
- megadetector/postprocessing/md_to_coco.py +4 -4
- megadetector/postprocessing/md_to_labelme.py +18 -7
- megadetector/postprocessing/merge_detections.py +5 -0
- megadetector/postprocessing/postprocess_batch_results.py +6 -3
- megadetector/postprocessing/remap_detection_categories.py +55 -2
- megadetector/postprocessing/render_detection_confusion_matrix.py +9 -6
- megadetector/postprocessing/repeat_detection_elimination/repeat_detections_core.py +2 -2
- megadetector/taxonomy_mapping/map_new_lila_datasets.py +3 -4
- megadetector/taxonomy_mapping/prepare_lila_taxonomy_release.py +40 -19
- megadetector/taxonomy_mapping/preview_lila_taxonomy.py +1 -1
- megadetector/taxonomy_mapping/species_lookup.py +123 -41
- megadetector/utils/ct_utils.py +133 -113
- megadetector/utils/md_tests.py +93 -13
- megadetector/utils/path_utils.py +137 -107
- megadetector/utils/split_locations_into_train_val.py +2 -2
- megadetector/utils/string_utils.py +7 -7
- megadetector/utils/url_utils.py +81 -58
- megadetector/utils/wi_utils.py +46 -17
- megadetector/visualization/plot_utils.py +13 -9
- megadetector/visualization/render_images_with_thumbnails.py +2 -1
- megadetector/visualization/visualization_utils.py +94 -46
- megadetector/visualization/visualize_db.py +36 -9
- megadetector/visualization/visualize_detector_output.py +4 -4
- {megadetector-5.0.29.dist-info → megadetector-10.0.0.dist-info}/METADATA +135 -135
- megadetector-10.0.0.dist-info/RECORD +139 -0
- {megadetector-5.0.29.dist-info → megadetector-10.0.0.dist-info}/licenses/LICENSE +0 -0
- {megadetector-5.0.29.dist-info → megadetector-10.0.0.dist-info}/top_level.txt +0 -0
- megadetector/api/batch_processing/api_core/__init__.py +0 -0
- megadetector/api/batch_processing/api_core/batch_service/__init__.py +0 -0
- megadetector/api/batch_processing/api_core/batch_service/score.py +0 -438
- megadetector/api/batch_processing/api_core/server.py +0 -294
- megadetector/api/batch_processing/api_core/server_api_config.py +0 -97
- megadetector/api/batch_processing/api_core/server_app_config.py +0 -55
- megadetector/api/batch_processing/api_core/server_batch_job_manager.py +0 -220
- megadetector/api/batch_processing/api_core/server_job_status_table.py +0 -149
- megadetector/api/batch_processing/api_core/server_orchestration.py +0 -360
- megadetector/api/batch_processing/api_core/server_utils.py +0 -88
- megadetector/api/batch_processing/api_core_support/__init__.py +0 -0
- megadetector/api/batch_processing/api_core_support/aggregate_results_manually.py +0 -46
- megadetector/api/batch_processing/api_support/__init__.py +0 -0
- megadetector/api/batch_processing/api_support/summarize_daily_activity.py +0 -152
- megadetector/api/batch_processing/data_preparation/__init__.py +0 -0
- megadetector/api/synchronous/__init__.py +0 -0
- megadetector/api/synchronous/api_core/animal_detection_api/__init__.py +0 -0
- megadetector/api/synchronous/api_core/animal_detection_api/api_backend.py +0 -151
- megadetector/api/synchronous/api_core/animal_detection_api/api_frontend.py +0 -263
- megadetector/api/synchronous/api_core/animal_detection_api/config.py +0 -35
- megadetector/api/synchronous/api_core/tests/__init__.py +0 -0
- megadetector/api/synchronous/api_core/tests/load_test.py +0 -109
- megadetector/utils/azure_utils.py +0 -178
- megadetector/utils/sas_blob_utils.py +0 -513
- megadetector-5.0.29.dist-info/RECORD +0 -163
- /megadetector/{api/batch_processing/__init__.py → __init__.py} +0 -0
- {megadetector-5.0.29.dist-info → megadetector-10.0.0.dist-info}/WHEEL +0 -0
|
@@ -1,109 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import json
|
|
3
|
-
import io
|
|
4
|
-
import random
|
|
5
|
-
import requests
|
|
6
|
-
|
|
7
|
-
from PIL import Image
|
|
8
|
-
from multiprocessing import Pool
|
|
9
|
-
from datetime import datetime
|
|
10
|
-
from requests_toolbelt import MultipartEncoder
|
|
11
|
-
from requests_toolbelt.multipart import decoder
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
ip_address = '100.100.200.200'
|
|
15
|
-
port = 5050
|
|
16
|
-
|
|
17
|
-
base_url = 'http://{}:{}/v1/camera-trap/sync/'.format(ip_address, port)
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
def call_api(args):
|
|
21
|
-
start = datetime.now()
|
|
22
|
-
|
|
23
|
-
index, url, params, data, headers = args['index'],args['url'], args['params'], args['data'], args['headers']
|
|
24
|
-
print('calling api: {} starttime: {}'.format(index, start))
|
|
25
|
-
|
|
26
|
-
response = requests.post(url, params=params, data=data, headers=headers)
|
|
27
|
-
elapsed_time = datetime.now() - start
|
|
28
|
-
print('\napi {} status code: {}, elapsed time in seconds {}'.format(index, response.status_code, elapsed_time.total_seconds()))
|
|
29
|
-
|
|
30
|
-
get_detections(response)
|
|
31
|
-
return response
|
|
32
|
-
|
|
33
|
-
def get_detections(response):
|
|
34
|
-
results = decoder.MultipartDecoder.from_response(response)
|
|
35
|
-
text_results = {}
|
|
36
|
-
images = {}
|
|
37
|
-
for part in results.parts:
|
|
38
|
-
# part is a BodyPart object with b'Content-Type', and b'Content-Disposition', the later includes 'name' and 'filename' info
|
|
39
|
-
headers = {}
|
|
40
|
-
for k, v in part.headers.items():
|
|
41
|
-
headers[k.decode(part.encoding)] = v.decode(part.encoding)
|
|
42
|
-
|
|
43
|
-
if headers.get('Content-Type', None) == 'application/json':
|
|
44
|
-
text_result = json.loads(part.content.decode())
|
|
45
|
-
|
|
46
|
-
print(text_result)
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
def load_test(num_requests, params, max_images=1):
|
|
50
|
-
requests = []
|
|
51
|
-
|
|
52
|
-
# read the images anew for each request
|
|
53
|
-
index = 0
|
|
54
|
-
for i in range(num_requests):
|
|
55
|
-
index += 1
|
|
56
|
-
files = {}
|
|
57
|
-
sample_input_dir = '../../../api/synchronous/sample_input/test_images'
|
|
58
|
-
|
|
59
|
-
image_files = os.listdir(sample_input_dir)
|
|
60
|
-
random.shuffle(image_files)
|
|
61
|
-
|
|
62
|
-
num_images = 0
|
|
63
|
-
for i, image_name in enumerate(image_files):
|
|
64
|
-
if not image_name.lower().endswith('.jpg'):
|
|
65
|
-
continue
|
|
66
|
-
|
|
67
|
-
if num_images >= max_images:
|
|
68
|
-
break
|
|
69
|
-
else:
|
|
70
|
-
num_images += 1
|
|
71
|
-
|
|
72
|
-
img_path = os.path.join(sample_input_dir, image_name)
|
|
73
|
-
with open(img_path, 'rb') as f:
|
|
74
|
-
content = f.read()
|
|
75
|
-
files[image_name] = (image_name, content, 'image/jpeg')
|
|
76
|
-
|
|
77
|
-
m = MultipartEncoder(fields=files)
|
|
78
|
-
args = {
|
|
79
|
-
'index': index,
|
|
80
|
-
'url': base_url + 'detect',
|
|
81
|
-
'params': params,
|
|
82
|
-
'data': m,
|
|
83
|
-
'headers': {'Content-Type': m.content_type}
|
|
84
|
-
}
|
|
85
|
-
requests.append(args)
|
|
86
|
-
|
|
87
|
-
print('starting', num_requests, 'threads...')
|
|
88
|
-
# images are read and in each request by the time we call the API in map()
|
|
89
|
-
with Pool(num_requests) as pool:
|
|
90
|
-
results = pool.map(call_api, requests)
|
|
91
|
-
|
|
92
|
-
return results
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
if __name__ == "__main__":
|
|
96
|
-
params = {
|
|
97
|
-
'min_confidence': 0.05,
|
|
98
|
-
'min_rendering_confidence': 0.2,
|
|
99
|
-
'render': True
|
|
100
|
-
}
|
|
101
|
-
|
|
102
|
-
num_requests = 10
|
|
103
|
-
max_images = 1
|
|
104
|
-
|
|
105
|
-
start = datetime.now()
|
|
106
|
-
responses = load_test(num_requests, params, max_images=max_images)
|
|
107
|
-
end = datetime.now()
|
|
108
|
-
total_time = end - start
|
|
109
|
-
print('Total time for {} requests: {}'.format(num_requests, total_time))
|
|
@@ -1,178 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
|
|
3
|
-
azure_utils.py
|
|
4
|
-
|
|
5
|
-
Miscellaneous Azure Blob Storage utilities
|
|
6
|
-
|
|
7
|
-
Requires azure-storage-blob>=12.4.0
|
|
8
|
-
|
|
9
|
-
"""
|
|
10
|
-
|
|
11
|
-
#%% Imports
|
|
12
|
-
|
|
13
|
-
import json
|
|
14
|
-
|
|
15
|
-
from typing import Any, Iterable, Optional, Union
|
|
16
|
-
from azure.storage.blob import BlobPrefix, ContainerClient # type: ignore
|
|
17
|
-
|
|
18
|
-
from megadetector.utils import path_utils
|
|
19
|
-
from megadetector.utils import sas_blob_utils
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
#%% Functions
|
|
23
|
-
|
|
24
|
-
def walk_container(container_client: ContainerClient,
|
|
25
|
-
max_depth: int = -1,
|
|
26
|
-
prefix: str = '',
|
|
27
|
-
store_folders: bool = True,
|
|
28
|
-
store_blobs: bool = True,
|
|
29
|
-
debug_max_items: int = -1) -> tuple[list[str], list[str]]:
|
|
30
|
-
"""
|
|
31
|
-
Recursively walk folders a Azure Blob Storage container.
|
|
32
|
-
|
|
33
|
-
Based on:
|
|
34
|
-
https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/storage/azure-storage-blob/samples/blob_samples_walk_blob_hierarchy.py
|
|
35
|
-
"""
|
|
36
|
-
|
|
37
|
-
depth = 1
|
|
38
|
-
|
|
39
|
-
def walk_blob_hierarchy(prefix: str,
|
|
40
|
-
folders: Optional[list[str]] = None,
|
|
41
|
-
blobs: Optional[list[str]] = None
|
|
42
|
-
) -> tuple[list[str], list[str]]:
|
|
43
|
-
if folders is None:
|
|
44
|
-
folders = []
|
|
45
|
-
if blobs is None:
|
|
46
|
-
blobs = []
|
|
47
|
-
|
|
48
|
-
nonlocal depth
|
|
49
|
-
|
|
50
|
-
if 0 < max_depth < depth:
|
|
51
|
-
return folders, blobs
|
|
52
|
-
|
|
53
|
-
for item in container_client.walk_blobs(name_starts_with=prefix):
|
|
54
|
-
short_name = item.name[len(prefix):]
|
|
55
|
-
if isinstance(item, BlobPrefix):
|
|
56
|
-
# print('F: ' + prefix + short_name)
|
|
57
|
-
if store_folders:
|
|
58
|
-
folders.append(prefix + short_name)
|
|
59
|
-
depth += 1
|
|
60
|
-
walk_blob_hierarchy(item.name, folders=folders, blobs=blobs)
|
|
61
|
-
if (debug_max_items > 0
|
|
62
|
-
and len(folders) + len(blobs) > debug_max_items):
|
|
63
|
-
return folders, blobs
|
|
64
|
-
depth -= 1
|
|
65
|
-
else:
|
|
66
|
-
if store_blobs:
|
|
67
|
-
blobs.append(prefix + short_name)
|
|
68
|
-
|
|
69
|
-
return folders, blobs
|
|
70
|
-
|
|
71
|
-
folders, blobs = walk_blob_hierarchy(prefix=prefix)
|
|
72
|
-
|
|
73
|
-
assert all(s.endswith('/') for s in folders)
|
|
74
|
-
folders = [s.strip('/') for s in folders]
|
|
75
|
-
|
|
76
|
-
return folders, blobs
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
def list_top_level_blob_folders(container_client: ContainerClient) -> list[str]:
|
|
80
|
-
"""
|
|
81
|
-
List all top-level folders in a container.
|
|
82
|
-
"""
|
|
83
|
-
|
|
84
|
-
top_level_folders, _ = walk_container(
|
|
85
|
-
container_client, max_depth=1, store_blobs=False)
|
|
86
|
-
return top_level_folders
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
def concatenate_json_lists(input_files: Iterable[str],
|
|
90
|
-
output_file: Optional[str] = None
|
|
91
|
-
) -> list[Any]:
|
|
92
|
-
"""
|
|
93
|
-
Given a list of JSON files that contain lists (typically string
|
|
94
|
-
filenames), concatenates the lists into a single list and optionally
|
|
95
|
-
writes out this list to a new output JSON file.
|
|
96
|
-
"""
|
|
97
|
-
|
|
98
|
-
output_list = []
|
|
99
|
-
for fn in input_files:
|
|
100
|
-
with open(fn, 'r') as f:
|
|
101
|
-
file_list = json.load(f)
|
|
102
|
-
output_list.extend(file_list)
|
|
103
|
-
if output_file is not None:
|
|
104
|
-
with open(output_file, 'w') as f:
|
|
105
|
-
json.dump(output_list, f, indent=1)
|
|
106
|
-
return output_list
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
def upload_file_to_blob(account_name: str,
|
|
110
|
-
container_name: str,
|
|
111
|
-
local_path: str,
|
|
112
|
-
blob_name: str,
|
|
113
|
-
sas_token: str,
|
|
114
|
-
overwrite: bool=False) -> str:
|
|
115
|
-
"""
|
|
116
|
-
Uploads a local file to Azure Blob Storage and returns the uploaded
|
|
117
|
-
blob URI with SAS token.
|
|
118
|
-
"""
|
|
119
|
-
|
|
120
|
-
container_uri = sas_blob_utils.build_azure_storage_uri(
|
|
121
|
-
account=account_name, container=container_name, sas_token=sas_token)
|
|
122
|
-
with open(local_path, 'rb') as data:
|
|
123
|
-
return sas_blob_utils.upload_blob(
|
|
124
|
-
container_uri=container_uri, blob_name=blob_name, data=data,
|
|
125
|
-
overwrite=overwrite)
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
def enumerate_blobs_to_file(
|
|
129
|
-
output_file: str,
|
|
130
|
-
account_name: str,
|
|
131
|
-
container_name: str,
|
|
132
|
-
sas_token: Optional[str] = None,
|
|
133
|
-
blob_prefix: Optional[str] = None,
|
|
134
|
-
blob_suffix: Optional[Union[str, tuple[str]]] = None,
|
|
135
|
-
rsearch: Optional[str] = None,
|
|
136
|
-
limit: Optional[int] = None,
|
|
137
|
-
verbose: Optional[bool] = True
|
|
138
|
-
) -> list[str]:
|
|
139
|
-
"""
|
|
140
|
-
Enumerates blobs in a container, and writes the blob names to an output
|
|
141
|
-
file.
|
|
142
|
-
|
|
143
|
-
Args:
|
|
144
|
-
output_file: str, path to save list of files in container
|
|
145
|
-
If ends in '.json', writes a JSON string. Otherwise, writes a
|
|
146
|
-
newline-delimited list. Can be None, in which case this is just a
|
|
147
|
-
convenient wrapper for blob enumeration.
|
|
148
|
-
account_name: str, Azure Storage account name
|
|
149
|
-
container_name: str, Azure Blob Storage container name
|
|
150
|
-
sas_token: optional str, container SAS token, leading ? will be removed if present.
|
|
151
|
-
blob_prefix: optional str, returned results will only contain blob names
|
|
152
|
-
to with this prefix
|
|
153
|
-
blob_suffix: optional str or tuple of str, returned results will only
|
|
154
|
-
contain blob names with this/these suffix(es). The blob names will
|
|
155
|
-
be lowercased first before comparing with the suffix(es).
|
|
156
|
-
rsearch: optional str, returned results will only contain blob names
|
|
157
|
-
that match this regex. Can also be a list of regexes, in which case
|
|
158
|
-
blobs matching *any* of the regex's will be returned.
|
|
159
|
-
limit: int, maximum # of blob names to list
|
|
160
|
-
if None, then returns all blob names
|
|
161
|
-
|
|
162
|
-
Returns: list of str, sorted blob names, of length limit or shorter.
|
|
163
|
-
"""
|
|
164
|
-
|
|
165
|
-
if sas_token is not None and len(sas_token) > 9 and sas_token[0] == '?':
|
|
166
|
-
sas_token = sas_token[1:]
|
|
167
|
-
|
|
168
|
-
container_uri = sas_blob_utils.build_azure_storage_uri(
|
|
169
|
-
account=account_name, container=container_name, sas_token=sas_token)
|
|
170
|
-
|
|
171
|
-
matched_blobs = sas_blob_utils.list_blobs_in_container(
|
|
172
|
-
container_uri=container_uri, blob_prefix=blob_prefix,
|
|
173
|
-
blob_suffix=blob_suffix, rsearch=rsearch, limit=limit, verbose=verbose)
|
|
174
|
-
|
|
175
|
-
if output_file is not None:
|
|
176
|
-
path_utils.write_list_to_file(output_file, matched_blobs)
|
|
177
|
-
|
|
178
|
-
return matched_blobs
|