megadetector 5.0.5__py3-none-any.whl → 5.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of megadetector might be problematic. Click here for more details.

Files changed (132) hide show
  1. api/batch_processing/data_preparation/manage_local_batch.py +302 -263
  2. api/batch_processing/data_preparation/manage_video_batch.py +81 -2
  3. api/batch_processing/postprocessing/add_max_conf.py +1 -0
  4. api/batch_processing/postprocessing/categorize_detections_by_size.py +50 -19
  5. api/batch_processing/postprocessing/compare_batch_results.py +110 -60
  6. api/batch_processing/postprocessing/load_api_results.py +56 -70
  7. api/batch_processing/postprocessing/md_to_coco.py +1 -1
  8. api/batch_processing/postprocessing/md_to_labelme.py +2 -1
  9. api/batch_processing/postprocessing/postprocess_batch_results.py +240 -81
  10. api/batch_processing/postprocessing/render_detection_confusion_matrix.py +625 -0
  11. api/batch_processing/postprocessing/repeat_detection_elimination/find_repeat_detections.py +71 -23
  12. api/batch_processing/postprocessing/repeat_detection_elimination/remove_repeat_detections.py +1 -1
  13. api/batch_processing/postprocessing/repeat_detection_elimination/repeat_detections_core.py +227 -75
  14. api/batch_processing/postprocessing/subset_json_detector_output.py +132 -5
  15. api/batch_processing/postprocessing/top_folders_to_bottom.py +1 -1
  16. api/synchronous/api_core/animal_detection_api/detection/run_detector_batch.py +2 -2
  17. classification/prepare_classification_script.py +191 -191
  18. data_management/coco_to_yolo.py +68 -45
  19. data_management/databases/integrity_check_json_db.py +7 -5
  20. data_management/generate_crops_from_cct.py +3 -3
  21. data_management/get_image_sizes.py +8 -6
  22. data_management/importers/add_timestamps_to_icct.py +79 -0
  23. data_management/importers/animl_results_to_md_results.py +160 -0
  24. data_management/importers/auckland_doc_test_to_json.py +4 -4
  25. data_management/importers/auckland_doc_to_json.py +1 -1
  26. data_management/importers/awc_to_json.py +5 -5
  27. data_management/importers/bellevue_to_json.py +5 -5
  28. data_management/importers/carrizo_shrubfree_2018.py +5 -5
  29. data_management/importers/carrizo_trail_cam_2017.py +5 -5
  30. data_management/importers/cct_field_adjustments.py +2 -3
  31. data_management/importers/channel_islands_to_cct.py +4 -4
  32. data_management/importers/ena24_to_json.py +5 -5
  33. data_management/importers/helena_to_cct.py +10 -10
  34. data_management/importers/idaho-camera-traps.py +12 -12
  35. data_management/importers/idfg_iwildcam_lila_prep.py +8 -8
  36. data_management/importers/jb_csv_to_json.py +4 -4
  37. data_management/importers/missouri_to_json.py +1 -1
  38. data_management/importers/noaa_seals_2019.py +1 -1
  39. data_management/importers/pc_to_json.py +5 -5
  40. data_management/importers/prepare-noaa-fish-data-for-lila.py +4 -4
  41. data_management/importers/prepare_zsl_imerit.py +5 -5
  42. data_management/importers/rspb_to_json.py +4 -4
  43. data_management/importers/save_the_elephants_survey_A.py +5 -5
  44. data_management/importers/save_the_elephants_survey_B.py +6 -6
  45. data_management/importers/snapshot_safari_importer.py +9 -9
  46. data_management/importers/snapshot_serengeti_lila.py +9 -9
  47. data_management/importers/timelapse_csv_set_to_json.py +5 -7
  48. data_management/importers/ubc_to_json.py +4 -4
  49. data_management/importers/umn_to_json.py +4 -4
  50. data_management/importers/wellington_to_json.py +1 -1
  51. data_management/importers/wi_to_json.py +2 -2
  52. data_management/importers/zamba_results_to_md_results.py +181 -0
  53. data_management/labelme_to_coco.py +35 -7
  54. data_management/labelme_to_yolo.py +229 -0
  55. data_management/lila/add_locations_to_island_camera_traps.py +1 -1
  56. data_management/lila/add_locations_to_nacti.py +147 -0
  57. data_management/lila/create_lila_blank_set.py +474 -0
  58. data_management/lila/create_lila_test_set.py +2 -1
  59. data_management/lila/create_links_to_md_results_files.py +106 -0
  60. data_management/lila/download_lila_subset.py +46 -21
  61. data_management/lila/generate_lila_per_image_labels.py +23 -14
  62. data_management/lila/get_lila_annotation_counts.py +17 -11
  63. data_management/lila/lila_common.py +14 -11
  64. data_management/lila/test_lila_metadata_urls.py +116 -0
  65. data_management/ocr_tools.py +829 -0
  66. data_management/resize_coco_dataset.py +13 -11
  67. data_management/yolo_output_to_md_output.py +84 -12
  68. data_management/yolo_to_coco.py +38 -20
  69. detection/process_video.py +36 -14
  70. detection/pytorch_detector.py +23 -8
  71. detection/run_detector.py +76 -19
  72. detection/run_detector_batch.py +178 -63
  73. detection/run_inference_with_yolov5_val.py +326 -57
  74. detection/run_tiled_inference.py +153 -43
  75. detection/video_utils.py +34 -8
  76. md_utils/ct_utils.py +172 -1
  77. md_utils/md_tests.py +372 -51
  78. md_utils/path_utils.py +167 -39
  79. md_utils/process_utils.py +26 -7
  80. md_utils/split_locations_into_train_val.py +215 -0
  81. md_utils/string_utils.py +10 -0
  82. md_utils/url_utils.py +0 -2
  83. md_utils/write_html_image_list.py +9 -26
  84. md_visualization/plot_utils.py +12 -8
  85. md_visualization/visualization_utils.py +106 -7
  86. md_visualization/visualize_db.py +16 -8
  87. md_visualization/visualize_detector_output.py +208 -97
  88. {megadetector-5.0.5.dist-info → megadetector-5.0.7.dist-info}/METADATA +3 -6
  89. {megadetector-5.0.5.dist-info → megadetector-5.0.7.dist-info}/RECORD +98 -121
  90. {megadetector-5.0.5.dist-info → megadetector-5.0.7.dist-info}/WHEEL +1 -1
  91. taxonomy_mapping/map_lila_taxonomy_to_wi_taxonomy.py +1 -1
  92. taxonomy_mapping/map_new_lila_datasets.py +43 -39
  93. taxonomy_mapping/prepare_lila_taxonomy_release.py +5 -2
  94. taxonomy_mapping/preview_lila_taxonomy.py +27 -27
  95. taxonomy_mapping/species_lookup.py +33 -13
  96. taxonomy_mapping/taxonomy_csv_checker.py +7 -5
  97. api/synchronous/api_core/yolov5/detect.py +0 -252
  98. api/synchronous/api_core/yolov5/export.py +0 -607
  99. api/synchronous/api_core/yolov5/hubconf.py +0 -146
  100. api/synchronous/api_core/yolov5/models/__init__.py +0 -0
  101. api/synchronous/api_core/yolov5/models/common.py +0 -738
  102. api/synchronous/api_core/yolov5/models/experimental.py +0 -104
  103. api/synchronous/api_core/yolov5/models/tf.py +0 -574
  104. api/synchronous/api_core/yolov5/models/yolo.py +0 -338
  105. api/synchronous/api_core/yolov5/train.py +0 -670
  106. api/synchronous/api_core/yolov5/utils/__init__.py +0 -36
  107. api/synchronous/api_core/yolov5/utils/activations.py +0 -103
  108. api/synchronous/api_core/yolov5/utils/augmentations.py +0 -284
  109. api/synchronous/api_core/yolov5/utils/autoanchor.py +0 -170
  110. api/synchronous/api_core/yolov5/utils/autobatch.py +0 -66
  111. api/synchronous/api_core/yolov5/utils/aws/__init__.py +0 -0
  112. api/synchronous/api_core/yolov5/utils/aws/resume.py +0 -40
  113. api/synchronous/api_core/yolov5/utils/benchmarks.py +0 -148
  114. api/synchronous/api_core/yolov5/utils/callbacks.py +0 -71
  115. api/synchronous/api_core/yolov5/utils/dataloaders.py +0 -1087
  116. api/synchronous/api_core/yolov5/utils/downloads.py +0 -178
  117. api/synchronous/api_core/yolov5/utils/flask_rest_api/example_request.py +0 -19
  118. api/synchronous/api_core/yolov5/utils/flask_rest_api/restapi.py +0 -46
  119. api/synchronous/api_core/yolov5/utils/general.py +0 -1018
  120. api/synchronous/api_core/yolov5/utils/loggers/__init__.py +0 -187
  121. api/synchronous/api_core/yolov5/utils/loggers/wandb/__init__.py +0 -0
  122. api/synchronous/api_core/yolov5/utils/loggers/wandb/log_dataset.py +0 -27
  123. api/synchronous/api_core/yolov5/utils/loggers/wandb/sweep.py +0 -41
  124. api/synchronous/api_core/yolov5/utils/loggers/wandb/wandb_utils.py +0 -577
  125. api/synchronous/api_core/yolov5/utils/loss.py +0 -234
  126. api/synchronous/api_core/yolov5/utils/metrics.py +0 -355
  127. api/synchronous/api_core/yolov5/utils/plots.py +0 -489
  128. api/synchronous/api_core/yolov5/utils/torch_utils.py +0 -314
  129. api/synchronous/api_core/yolov5/val.py +0 -394
  130. md_utils/matlab_porting_tools.py +0 -97
  131. {megadetector-5.0.5.dist-info → megadetector-5.0.7.dist-info}/LICENSE +0 -0
  132. {megadetector-5.0.5.dist-info → megadetector-5.0.7.dist-info}/top_level.txt +0 -0
@@ -1,178 +0,0 @@
1
- # YOLOv5 🚀 by Ultralytics, GPL-3.0 license
2
- """
3
- Download utils
4
- """
5
-
6
- import logging
7
- import os
8
- import platform
9
- import subprocess
10
- import time
11
- import urllib
12
- from pathlib import Path
13
- from zipfile import ZipFile
14
-
15
- import requests
16
- import torch
17
-
18
-
19
- def is_url(url):
20
- # Check if online file exists
21
- try:
22
- r = urllib.request.urlopen(url) # response
23
- return r.getcode() == 200
24
- except urllib.request.HTTPError:
25
- return False
26
-
27
-
28
- def gsutil_getsize(url=''):
29
- # gs://bucket/file size https://cloud.google.com/storage/docs/gsutil/commands/du
30
- s = subprocess.check_output(f'gsutil du {url}', shell=True).decode('utf-8')
31
- return eval(s.split(' ')[0]) if len(s) else 0 # bytes
32
-
33
-
34
- def safe_download(file, url, url2=None, min_bytes=1E0, error_msg=''):
35
- # Attempts to download file from url or url2, checks and removes incomplete downloads < min_bytes
36
- from utils.general import LOGGER
37
-
38
- file = Path(file)
39
- assert_msg = f"Downloaded file '{file}' does not exist or size is < min_bytes={min_bytes}"
40
- try: # url1
41
- LOGGER.info(f'Downloading {url} to {file}...')
42
- torch.hub.download_url_to_file(url, str(file), progress=LOGGER.level <= logging.INFO)
43
- assert file.exists() and file.stat().st_size > min_bytes, assert_msg # check
44
- except Exception as e: # url2
45
- file.unlink(missing_ok=True) # remove partial downloads
46
- LOGGER.info(f'ERROR: {e}\nRe-attempting {url2 or url} to {file}...')
47
- os.system(f"curl -L '{url2 or url}' -o '{file}' --retry 3 -C -") # curl download, retry and resume on fail
48
- finally:
49
- if not file.exists() or file.stat().st_size < min_bytes: # check
50
- file.unlink(missing_ok=True) # remove partial downloads
51
- LOGGER.info(f"ERROR: {assert_msg}\n{error_msg}")
52
- LOGGER.info('')
53
-
54
-
55
- def attempt_download(file, repo='ultralytics/yolov5', release='v6.1'):
56
- # Attempt file download from GitHub release assets if not found locally. release = 'latest', 'v6.1', etc.
57
- from utils.general import LOGGER
58
-
59
- def github_assets(repository, version='latest'):
60
- # Return GitHub repo tag (i.e. 'v6.1') and assets (i.e. ['yolov5s.pt', 'yolov5m.pt', ...])
61
- if version != 'latest':
62
- version = f'tags/{version}' # i.e. tags/v6.1
63
- response = requests.get(f'https://api.github.com/repos/{repository}/releases/{version}').json() # github api
64
- return response['tag_name'], [x['name'] for x in response['assets']] # tag, assets
65
-
66
- file = Path(str(file).strip().replace("'", ''))
67
- if not file.exists():
68
- # URL specified
69
- name = Path(urllib.parse.unquote(str(file))).name # decode '%2F' to '/' etc.
70
- if str(file).startswith(('http:/', 'https:/')): # download
71
- url = str(file).replace(':/', '://') # Pathlib turns :// -> :/
72
- file = name.split('?')[0] # parse authentication https://url.com/file.txt?auth...
73
- if Path(file).is_file():
74
- LOGGER.info(f'Found {url} locally at {file}') # file already exists
75
- else:
76
- safe_download(file=file, url=url, min_bytes=1E5)
77
- return file
78
-
79
- # GitHub assets
80
- assets = [
81
- 'yolov5n.pt', 'yolov5s.pt', 'yolov5m.pt', 'yolov5l.pt', 'yolov5x.pt', 'yolov5n6.pt', 'yolov5s6.pt',
82
- 'yolov5m6.pt', 'yolov5l6.pt', 'yolov5x6.pt']
83
- try:
84
- tag, assets = github_assets(repo, release)
85
- except Exception:
86
- try:
87
- tag, assets = github_assets(repo) # latest release
88
- except Exception:
89
- try:
90
- tag = subprocess.check_output('git tag', shell=True, stderr=subprocess.STDOUT).decode().split()[-1]
91
- except Exception:
92
- tag = release
93
-
94
- file.parent.mkdir(parents=True, exist_ok=True) # make parent dir (if required)
95
- if name in assets:
96
- url3 = 'https://drive.google.com/drive/folders/1EFQTEUeXWSFww0luse2jB9M1QNZQGwNl' # backup gdrive mirror
97
- safe_download(
98
- file,
99
- url=f'https://github.com/{repo}/releases/download/{tag}/{name}',
100
- url2=f'https://storage.googleapis.com/{repo}/{tag}/{name}', # backup url (optional)
101
- min_bytes=1E5,
102
- error_msg=f'{file} missing, try downloading from https://github.com/{repo}/releases/{tag} or {url3}')
103
-
104
- return str(file)
105
-
106
-
107
- def gdrive_download(id='16TiPfZj7htmTyhntwcZyEEAejOUxuT6m', file='tmp.zip'):
108
- # Downloads a file from Google Drive. from yolov5.utils.downloads import *; gdrive_download()
109
- t = time.time()
110
- file = Path(file)
111
- cookie = Path('cookie') # gdrive cookie
112
- print(f'Downloading https://drive.google.com/uc?export=download&id={id} as {file}... ', end='')
113
- file.unlink(missing_ok=True) # remove existing file
114
- cookie.unlink(missing_ok=True) # remove existing cookie
115
-
116
- # Attempt file download
117
- out = "NUL" if platform.system() == "Windows" else "/dev/null"
118
- os.system(f'curl -c ./cookie -s -L "drive.google.com/uc?export=download&id={id}" > {out}')
119
- if os.path.exists('cookie'): # large file
120
- s = f'curl -Lb ./cookie "drive.google.com/uc?export=download&confirm={get_token()}&id={id}" -o {file}'
121
- else: # small file
122
- s = f'curl -s -L -o {file} "drive.google.com/uc?export=download&id={id}"'
123
- r = os.system(s) # execute, capture return
124
- cookie.unlink(missing_ok=True) # remove existing cookie
125
-
126
- # Error check
127
- if r != 0:
128
- file.unlink(missing_ok=True) # remove partial
129
- print('Download error ') # raise Exception('Download error')
130
- return r
131
-
132
- # Unzip if archive
133
- if file.suffix == '.zip':
134
- print('unzipping... ', end='')
135
- ZipFile(file).extractall(path=file.parent) # unzip
136
- file.unlink() # remove zip
137
-
138
- print(f'Done ({time.time() - t:.1f}s)')
139
- return r
140
-
141
-
142
- def get_token(cookie="./cookie"):
143
- with open(cookie) as f:
144
- for line in f:
145
- if "download" in line:
146
- return line.split()[-1]
147
- return ""
148
-
149
-
150
- # Google utils: https://cloud.google.com/storage/docs/reference/libraries ----------------------------------------------
151
- #
152
- #
153
- # def upload_blob(bucket_name, source_file_name, destination_blob_name):
154
- # # Uploads a file to a bucket
155
- # # https://cloud.google.com/storage/docs/uploading-objects#storage-upload-object-python
156
- #
157
- # storage_client = storage.Client()
158
- # bucket = storage_client.get_bucket(bucket_name)
159
- # blob = bucket.blob(destination_blob_name)
160
- #
161
- # blob.upload_from_filename(source_file_name)
162
- #
163
- # print('File {} uploaded to {}.'.format(
164
- # source_file_name,
165
- # destination_blob_name))
166
- #
167
- #
168
- # def download_blob(bucket_name, source_blob_name, destination_file_name):
169
- # # Uploads a blob from a bucket
170
- # storage_client = storage.Client()
171
- # bucket = storage_client.get_bucket(bucket_name)
172
- # blob = bucket.blob(source_blob_name)
173
- #
174
- # blob.download_to_filename(destination_file_name)
175
- #
176
- # print('Blob {} downloaded to {}.'.format(
177
- # source_blob_name,
178
- # destination_file_name))
@@ -1,19 +0,0 @@
1
- # YOLOv5 🚀 by Ultralytics, GPL-3.0 license
2
- """
3
- Perform test request
4
- """
5
-
6
- import pprint
7
-
8
- import requests
9
-
10
- DETECTION_URL = "http://localhost:5000/v1/object-detection/yolov5s"
11
- IMAGE = "zidane.jpg"
12
-
13
- # Read image
14
- with open(IMAGE, "rb") as f:
15
- image_data = f.read()
16
-
17
- response = requests.post(DETECTION_URL, files={"image": image_data}).json()
18
-
19
- pprint.pprint(response)
@@ -1,46 +0,0 @@
1
- # YOLOv5 🚀 by Ultralytics, GPL-3.0 license
2
- """
3
- Run a Flask REST API exposing a YOLOv5s model
4
- """
5
-
6
- import argparse
7
- import io
8
-
9
- import torch
10
- from flask import Flask, request
11
- from PIL import Image
12
-
13
- app = Flask(__name__)
14
-
15
- DETECTION_URL = "/v1/object-detection/yolov5s"
16
-
17
-
18
- @app.route(DETECTION_URL, methods=["POST"])
19
- def predict():
20
- if request.method != "POST":
21
- return
22
-
23
- if request.files.get("image"):
24
- # Method 1
25
- # with request.files["image"] as f:
26
- # im = Image.open(io.BytesIO(f.read()))
27
-
28
- # Method 2
29
- im_file = request.files["image"]
30
- im_bytes = im_file.read()
31
- im = Image.open(io.BytesIO(im_bytes))
32
-
33
- results = model(im, size=640) # reduce size=320 for faster inference
34
- return results.pandas().xyxy[0].to_json(orient="records")
35
-
36
-
37
- if __name__ == "__main__":
38
- parser = argparse.ArgumentParser(description="Flask API exposing YOLOv5 model")
39
- parser.add_argument("--port", default=5000, type=int, help="port number")
40
- opt = parser.parse_args()
41
-
42
- # Fix known issue urllib.error.HTTPError 403: rate limit exceeded https://github.com/ultralytics/yolov5/pull/7210
43
- torch.hub._validate_not_a_forked_repo = lambda a, b, c: True
44
-
45
- model = torch.hub.load("ultralytics/yolov5", "yolov5s", force_reload=True) # force_reload to recache
46
- app.run(host="0.0.0.0", port=opt.port) # debug=True causes Restarting with stat