megadetector 10.0.7__py3-none-any.whl → 10.0.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of megadetector might be problematic. Click here for more details.
- megadetector/data_management/cct_json_utils.py +16 -6
- megadetector/data_management/databases/subset_json_db.py +57 -2
- megadetector/detection/pytorch_detector.py +32 -15
- megadetector/detection/run_detector.py +1 -2
- megadetector/detection/run_detector_batch.py +30 -15
- megadetector/detection/run_inference_with_yolov5_val.py +3 -1
- megadetector/detection/run_tiled_inference.py +61 -17
- megadetector/detection/video_utils.py +23 -7
- megadetector/postprocessing/classification_postprocessing.py +5 -1
- megadetector/postprocessing/compare_batch_results.py +48 -28
- megadetector/postprocessing/convert_output_format.py +81 -87
- megadetector/postprocessing/postprocess_batch_results.py +1 -1
- megadetector/postprocessing/subset_json_detector_output.py +83 -0
- megadetector/utils/directory_listing.py +19 -13
- megadetector/utils/path_utils.py +58 -8
- megadetector/utils/url_utils.py +91 -1
- megadetector/utils/wi_taxonomy_utils.py +26 -26
- megadetector/visualization/visualize_video_output.py +16 -6
- {megadetector-10.0.7.dist-info → megadetector-10.0.9.dist-info}/METADATA +1 -1
- {megadetector-10.0.7.dist-info → megadetector-10.0.9.dist-info}/RECORD +23 -23
- {megadetector-10.0.7.dist-info → megadetector-10.0.9.dist-info}/WHEEL +0 -0
- {megadetector-10.0.7.dist-info → megadetector-10.0.9.dist-info}/licenses/LICENSE +0 -0
- {megadetector-10.0.7.dist-info → megadetector-10.0.9.dist-info}/top_level.txt +0 -0
|
@@ -21,7 +21,7 @@ from megadetector.utils.path_utils import is_image_file
|
|
|
21
21
|
|
|
22
22
|
#%% Directory enumeration functions
|
|
23
23
|
|
|
24
|
-
def
|
|
24
|
+
def _create_plain_index(root, dirs, files, dirname=None):
|
|
25
25
|
"""
|
|
26
26
|
Creates the fairly plain HTML folder index including a preview of a single image file,
|
|
27
27
|
if any is present.
|
|
@@ -40,6 +40,7 @@ def create_plain_index(root, dirs, files, dirname=None):
|
|
|
40
40
|
|
|
41
41
|
if dirname is None:
|
|
42
42
|
dirname = root or '/'
|
|
43
|
+
dirname = dirname.replace('\\','/')
|
|
43
44
|
|
|
44
45
|
html = "<!DOCTYPE html>\n"
|
|
45
46
|
html += "<html lang='en'><head>"
|
|
@@ -104,13 +105,14 @@ def create_plain_index(root, dirs, files, dirname=None):
|
|
|
104
105
|
html += "</body></html>\n"
|
|
105
106
|
return html
|
|
106
107
|
|
|
107
|
-
# ...def
|
|
108
|
+
# ...def _create_plain_index(...)
|
|
108
109
|
|
|
109
110
|
|
|
110
|
-
def
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
111
|
+
def create_html_index(dir,
|
|
112
|
+
overwrite=False,
|
|
113
|
+
template_fun=_create_plain_index,
|
|
114
|
+
basepath=None,
|
|
115
|
+
recursive=True):
|
|
114
116
|
"""
|
|
115
117
|
Recursively traverses the local directory [dir] and generates a index
|
|
116
118
|
file for each folder using [template_fun] to generate the HTML output.
|
|
@@ -118,12 +120,13 @@ def traverse_and_create_index(dir,
|
|
|
118
120
|
|
|
119
121
|
Args:
|
|
120
122
|
dir (str): directory to process
|
|
121
|
-
|
|
123
|
+
overwrite (bool, optional): whether to over-write existing index file
|
|
122
124
|
template_fun (func, optional): function taking three arguments (string,
|
|
123
125
|
list of string, list of string) representing the current root, the list of folders,
|
|
124
126
|
and the list of files. Should return the HTML source of the index file.
|
|
125
127
|
basepath (str, optional): if not None, the name used for each subfolder in [dir]
|
|
126
128
|
in the output files will be relative to [basepath]
|
|
129
|
+
recursive (bool, optional): recurse into subfolders
|
|
127
130
|
"""
|
|
128
131
|
|
|
129
132
|
print('Traversing {}'.format(dir))
|
|
@@ -141,7 +144,7 @@ def traverse_and_create_index(dir,
|
|
|
141
144
|
# Output is written to file *root*/index.html
|
|
142
145
|
output_file = os.path.join(root, "index.html")
|
|
143
146
|
|
|
144
|
-
if not
|
|
147
|
+
if (not overwrite) and os.path.isfile(output_file):
|
|
145
148
|
print('Skipping {}, file exists'.format(output_file))
|
|
146
149
|
continue
|
|
147
150
|
|
|
@@ -157,7 +160,10 @@ def traverse_and_create_index(dir,
|
|
|
157
160
|
with open(output_file, 'wt') as fi:
|
|
158
161
|
fi.write(html)
|
|
159
162
|
|
|
160
|
-
|
|
163
|
+
if not recursive:
|
|
164
|
+
break
|
|
165
|
+
|
|
166
|
+
# ...def create_html_index(...)
|
|
161
167
|
|
|
162
168
|
|
|
163
169
|
#%% Command-line driver
|
|
@@ -171,7 +177,7 @@ def main(): # noqa
|
|
|
171
177
|
parser.add_argument("--basepath", type=str,
|
|
172
178
|
help='Folder names will be printed relative to basepath, if specified',
|
|
173
179
|
default=None)
|
|
174
|
-
parser.add_argument("--
|
|
180
|
+
parser.add_argument("--overwrite", action='store_true', default=False,
|
|
175
181
|
help='If set, the script will overwrite existing index.html files.')
|
|
176
182
|
|
|
177
183
|
if len(sys.argv[1:]) == 0:
|
|
@@ -182,9 +188,9 @@ def main(): # noqa
|
|
|
182
188
|
|
|
183
189
|
assert os.path.isdir(args.directory), "{} is not a valid directory".format(args.directory)
|
|
184
190
|
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
191
|
+
create_html_index(args.directory,
|
|
192
|
+
overwrite=args.overwrite,
|
|
193
|
+
basepath=args.basepath)
|
|
188
194
|
|
|
189
195
|
if __name__ == '__main__':
|
|
190
196
|
main()
|
megadetector/utils/path_utils.py
CHANGED
|
@@ -528,7 +528,8 @@ def find_images(dirname,
|
|
|
528
528
|
def clean_filename(filename,
|
|
529
529
|
allow_list=VALID_FILENAME_CHARS,
|
|
530
530
|
char_limit=CHAR_LIMIT,
|
|
531
|
-
force_lower=
|
|
531
|
+
force_lower=False,
|
|
532
|
+
remove_trailing_leading_whitespace=True):
|
|
532
533
|
r"""
|
|
533
534
|
Removes non-ASCII and other invalid filename characters (on any
|
|
534
535
|
reasonable OS) from a filename, then optionally trims to a maximum length.
|
|
@@ -544,11 +545,27 @@ def clean_filename(filename,
|
|
|
544
545
|
char_limit (int, optional): maximum allowable filename length, if None will skip this
|
|
545
546
|
step
|
|
546
547
|
force_lower (bool, optional): convert the resulting filename to lowercase
|
|
547
|
-
|
|
548
|
+
remove_trailing_leading_whitespace (bool, optional): remove trailing and
|
|
549
|
+
leading whitespace from each component of a path, e.g. does not allow
|
|
550
|
+
a/b/c /d.jpg
|
|
548
551
|
Returns:
|
|
549
552
|
str: cleaned version of [filename]
|
|
550
553
|
"""
|
|
551
554
|
|
|
555
|
+
if remove_trailing_leading_whitespace:
|
|
556
|
+
|
|
557
|
+
# Best effort to preserve the original separator
|
|
558
|
+
separator = '/'
|
|
559
|
+
if '\\' in filename:
|
|
560
|
+
separator = '\\'
|
|
561
|
+
|
|
562
|
+
filename = filename.replace('\\','/')
|
|
563
|
+
components = filename.split('/')
|
|
564
|
+
clean_components = [c.strip() for c in components]
|
|
565
|
+
filename = separator.join(clean_components)
|
|
566
|
+
if separator == '\\':
|
|
567
|
+
filename = filename.replace('/','\\')
|
|
568
|
+
|
|
552
569
|
# keep only valid ascii chars
|
|
553
570
|
cleaned_filename = (unicodedata.normalize('NFKD', filename)
|
|
554
571
|
.encode('ASCII', 'ignore').decode())
|
|
@@ -565,7 +582,8 @@ def clean_filename(filename,
|
|
|
565
582
|
def clean_path(pathname,
|
|
566
583
|
allow_list=VALID_PATH_CHARS,
|
|
567
584
|
char_limit=CHAR_LIMIT,
|
|
568
|
-
force_lower=False
|
|
585
|
+
force_lower=False,
|
|
586
|
+
remove_trailing_leading_whitespace=True):
|
|
569
587
|
"""
|
|
570
588
|
Removes non-ASCII and other invalid path characters (on any reasonable
|
|
571
589
|
OS) from a path, then optionally trims to a maximum length.
|
|
@@ -576,13 +594,20 @@ def clean_path(pathname,
|
|
|
576
594
|
char_limit (int, optional): maximum allowable filename length, if None will skip this
|
|
577
595
|
step
|
|
578
596
|
force_lower (bool, optional): convert the resulting filename to lowercase
|
|
597
|
+
remove_trailing_leading_whitespace (bool, optional): remove trailing and
|
|
598
|
+
leading whitespace from each component of a path, e.g. does not allow
|
|
599
|
+
a/b/c /d.jpg
|
|
579
600
|
|
|
580
601
|
Returns:
|
|
581
602
|
str: cleaned version of [filename]
|
|
582
603
|
"""
|
|
583
604
|
|
|
584
|
-
return clean_filename(pathname,
|
|
585
|
-
|
|
605
|
+
return clean_filename(pathname,
|
|
606
|
+
allow_list=allow_list,
|
|
607
|
+
char_limit=char_limit,
|
|
608
|
+
force_lower=force_lower,
|
|
609
|
+
remove_trailing_leading_whitespace=\
|
|
610
|
+
remove_trailing_leading_whitespace)
|
|
586
611
|
|
|
587
612
|
|
|
588
613
|
def flatten_path(pathname,separator_chars=SEPARATOR_CHARS,separator_char_replacement='~'):
|
|
@@ -1553,6 +1578,7 @@ class TestPathUtils:
|
|
|
1553
1578
|
"""
|
|
1554
1579
|
|
|
1555
1580
|
self.test_dir = make_test_folder(subfolder='megadetector/path_utils_tests')
|
|
1581
|
+
print('Using temporary folder {} for path utils testing'.format(self.test_dir))
|
|
1556
1582
|
os.makedirs(self.test_dir, exist_ok=True)
|
|
1557
1583
|
|
|
1558
1584
|
|
|
@@ -1776,7 +1802,11 @@ class TestPathUtils:
|
|
|
1776
1802
|
])
|
|
1777
1803
|
folders_non_recursive_abs = folder_list(folder_list_dir, recursive=False,
|
|
1778
1804
|
return_relative_paths=False)
|
|
1779
|
-
assert sorted(folders_non_recursive_abs) == expected_folders_non_recursive_abs
|
|
1805
|
+
assert sorted(folders_non_recursive_abs) == expected_folders_non_recursive_abs, \
|
|
1806
|
+
'Non-recursive folder list failured, expected:\n\n{}\n\nFound:\n\n{}'.format(
|
|
1807
|
+
str(expected_folders_non_recursive_abs),
|
|
1808
|
+
str(folders_non_recursive_abs)
|
|
1809
|
+
)
|
|
1780
1810
|
|
|
1781
1811
|
# Test non-recursive, relative paths
|
|
1782
1812
|
expected_folders_non_recursive_rel = sorted(['subdir1', 'subdir2'])
|
|
@@ -2114,7 +2144,17 @@ class TestPathUtils:
|
|
|
2114
2144
|
assert clean_filename("test*file?.txt", char_limit=10) == "testfile.t"
|
|
2115
2145
|
assert clean_filename("TestFile.TXT", force_lower=True) == "testfile.txt"
|
|
2116
2146
|
assert clean_filename("file:with<illegal>chars.txt") == "filewithillegalchars.txt"
|
|
2117
|
-
|
|
2147
|
+
|
|
2148
|
+
s = " accented_name_éà.txt"
|
|
2149
|
+
|
|
2150
|
+
assert clean_filename(s,
|
|
2151
|
+
remove_trailing_leading_whitespace=False) == " accented_name_ea.txt", \
|
|
2152
|
+
'clean_filename with remove_trailing_leading_whitespace=False: {}'.format(
|
|
2153
|
+
clean_filename(s, remove_trailing_leading_whitespace=False))
|
|
2154
|
+
|
|
2155
|
+
assert clean_filename(s, remove_trailing_leading_whitespace=True) == "accented_name_ea.txt", \
|
|
2156
|
+
'clean_filename with remove_trailing_leading_whitespace=False: {}'.format(
|
|
2157
|
+
clean_filename(s, remove_trailing_leading_whitespace=True))
|
|
2118
2158
|
|
|
2119
2159
|
# Separators are not allowed by default in clean_filename
|
|
2120
2160
|
assert clean_filename("path/to/file.txt") == "pathtofile.txt"
|
|
@@ -2444,7 +2484,13 @@ class TestPathUtils:
|
|
|
2444
2484
|
un_tar_dir = os.path.join(self.test_dir, "un_tar_contents")
|
|
2445
2485
|
os.makedirs(un_tar_dir, exist_ok=True)
|
|
2446
2486
|
with tarfile.open(output_tar_path, 'r:gz') as tf:
|
|
2447
|
-
|
|
2487
|
+
# The "filter" option was added as of Python 3.12, and *not* specifying
|
|
2488
|
+
# filter=None will change behavior as of Python 3.14. We want the unmodified
|
|
2489
|
+
# behavior, but we want to support Python <3.12, so we do a version check.
|
|
2490
|
+
if sys.version_info >= (3, 12):
|
|
2491
|
+
tf.extractall(path=un_tar_dir, filter=None)
|
|
2492
|
+
else:
|
|
2493
|
+
tf.extractall(path=un_tar_dir)
|
|
2448
2494
|
|
|
2449
2495
|
expected_untarred_file1 = os.path.join(un_tar_dir, os.path.relpath(file1_path, self.test_dir))
|
|
2450
2496
|
expected_untarred_file2 = os.path.join(un_tar_dir, os.path.relpath(file2_path, self.test_dir))
|
|
@@ -2618,7 +2664,9 @@ def test_path_utils():
|
|
|
2618
2664
|
|
|
2619
2665
|
test_instance = TestPathUtils()
|
|
2620
2666
|
test_instance.set_up()
|
|
2667
|
+
|
|
2621
2668
|
try:
|
|
2669
|
+
|
|
2622
2670
|
test_instance.test_is_image_file()
|
|
2623
2671
|
test_instance.test_find_image_strings()
|
|
2624
2672
|
test_instance.test_find_images()
|
|
@@ -2643,5 +2691,7 @@ def test_path_utils():
|
|
|
2643
2691
|
test_instance.test_add_files_to_single_tar_file()
|
|
2644
2692
|
test_instance.test_parallel_zip_individual_files_and_folders()
|
|
2645
2693
|
test_instance.test_compute_file_hash()
|
|
2694
|
+
|
|
2646
2695
|
finally:
|
|
2696
|
+
|
|
2647
2697
|
test_instance.tear_down()
|
megadetector/utils/url_utils.py
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
url_utils.py
|
|
4
4
|
|
|
5
|
-
Frequently-used functions for downloading or
|
|
5
|
+
Frequently-used functions for downloading, manipulating, or serving URLs
|
|
6
6
|
|
|
7
7
|
"""
|
|
8
8
|
|
|
@@ -16,6 +16,9 @@ import urllib.error
|
|
|
16
16
|
import requests
|
|
17
17
|
import shutil
|
|
18
18
|
import pytest
|
|
19
|
+
import socketserver
|
|
20
|
+
import threading
|
|
21
|
+
import http.server
|
|
19
22
|
|
|
20
23
|
from functools import partial
|
|
21
24
|
from tqdm import tqdm
|
|
@@ -453,6 +456,93 @@ def get_url_sizes(urls,n_workers=1,pool_type='thread',timeout=None,verbose=False
|
|
|
453
456
|
return url_to_size
|
|
454
457
|
|
|
455
458
|
|
|
459
|
+
#%% Singleton HTTP server
|
|
460
|
+
|
|
461
|
+
class QuietHTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
|
|
462
|
+
"""
|
|
463
|
+
SimpleHTTPRequestHandler sublcass that suppresses console printouts
|
|
464
|
+
"""
|
|
465
|
+
def __init__(self, *args, directory=None, **kwargs):
|
|
466
|
+
super().__init__(*args, directory=directory, **kwargs)
|
|
467
|
+
|
|
468
|
+
def log_message(self, format, *args): # noqa
|
|
469
|
+
pass
|
|
470
|
+
|
|
471
|
+
|
|
472
|
+
class SingletonHTTPServer:
|
|
473
|
+
"""
|
|
474
|
+
HTTP server that runs on a local port, serving a particular local folder. Runs as a
|
|
475
|
+
singleton, so starting a server in a new folder closes the previous server. I use this
|
|
476
|
+
primarily to serve MD/SpeciesNet previews from manage_local_batch, which can exceed
|
|
477
|
+
the 260-character filename length limitation imposed by browser on Windows, so really the
|
|
478
|
+
point here is just to remove characters from the URL.
|
|
479
|
+
"""
|
|
480
|
+
|
|
481
|
+
_server = None
|
|
482
|
+
_thread = None
|
|
483
|
+
|
|
484
|
+
@classmethod
|
|
485
|
+
def start_server(cls, directory, port=8000, host='localhost'):
|
|
486
|
+
"""
|
|
487
|
+
Start or restart the HTTP server with a specific directory
|
|
488
|
+
|
|
489
|
+
Args:
|
|
490
|
+
directory (str): the root folder served by the server
|
|
491
|
+
port (int, optional): the port on which to create the server
|
|
492
|
+
host (str, optional): the host on which to listen, typically
|
|
493
|
+
either "localhost" (default) or "0.0.0.0"
|
|
494
|
+
|
|
495
|
+
Returns:
|
|
496
|
+
str: URL to the running host
|
|
497
|
+
"""
|
|
498
|
+
|
|
499
|
+
# Stop the existing server instance if necessary
|
|
500
|
+
cls.stop_server()
|
|
501
|
+
|
|
502
|
+
# Create new server
|
|
503
|
+
handler = partial(QuietHTTPRequestHandler, directory=directory)
|
|
504
|
+
cls._server = socketserver.TCPServer((host, port), handler)
|
|
505
|
+
|
|
506
|
+
# Start server in daemon thread (dies when parent process dies)
|
|
507
|
+
cls._thread = threading.Thread(target=cls._server.serve_forever)
|
|
508
|
+
cls._thread.daemon = True
|
|
509
|
+
cls._thread.start()
|
|
510
|
+
|
|
511
|
+
print(f"Serving {directory} at http://{host}:{port}")
|
|
512
|
+
return f"http://{host}:{port}"
|
|
513
|
+
|
|
514
|
+
|
|
515
|
+
@classmethod
|
|
516
|
+
def stop_server(cls):
|
|
517
|
+
"""
|
|
518
|
+
Stop the current server (if one is running)
|
|
519
|
+
"""
|
|
520
|
+
|
|
521
|
+
if cls._server:
|
|
522
|
+
cls._server.shutdown()
|
|
523
|
+
cls._server.server_close()
|
|
524
|
+
cls._server = None
|
|
525
|
+
if cls._thread:
|
|
526
|
+
cls._thread.join(timeout=1)
|
|
527
|
+
cls._thread = None
|
|
528
|
+
|
|
529
|
+
|
|
530
|
+
@classmethod
|
|
531
|
+
def is_running(cls):
|
|
532
|
+
"""
|
|
533
|
+
Check whether the server is currently running.
|
|
534
|
+
|
|
535
|
+
Returns:
|
|
536
|
+
bool: True if the server is running
|
|
537
|
+
"""
|
|
538
|
+
|
|
539
|
+
return (cls._server is not None) and \
|
|
540
|
+
(cls._thread is not None) and \
|
|
541
|
+
(cls._thread.is_alive())
|
|
542
|
+
|
|
543
|
+
# ...class SingletonHTTPServer
|
|
544
|
+
|
|
545
|
+
|
|
456
546
|
#%% Tests
|
|
457
547
|
|
|
458
548
|
# Constants for tests
|
|
@@ -10,8 +10,6 @@ Functions related to working with the SpeciesNet / Wildlife Insights taxonomy.
|
|
|
10
10
|
|
|
11
11
|
import os
|
|
12
12
|
import json
|
|
13
|
-
import tempfile
|
|
14
|
-
import uuid
|
|
15
13
|
|
|
16
14
|
import pandas as pd
|
|
17
15
|
|
|
@@ -399,11 +397,12 @@ def generate_whole_image_detections_for_classifications(classifications_json_fil
|
|
|
399
397
|
|
|
400
398
|
|
|
401
399
|
def generate_md_results_from_predictions_json(predictions_json_file,
|
|
402
|
-
md_results_file,
|
|
400
|
+
md_results_file=None,
|
|
403
401
|
base_folder=None,
|
|
404
402
|
max_decimals=5,
|
|
405
403
|
convert_human_to_person=True,
|
|
406
|
-
convert_homo_species_to_human=True
|
|
404
|
+
convert_homo_species_to_human=True,
|
|
405
|
+
verbose=False):
|
|
407
406
|
"""
|
|
408
407
|
Generate an MD-formatted .json file from a predictions.json file, generated by the
|
|
409
408
|
SpeciesNet ensemble. Typically, MD results files use relative paths, and predictions.json
|
|
@@ -424,7 +423,7 @@ def generate_md_results_from_predictions_json(predictions_json_file,
|
|
|
424
423
|
|
|
425
424
|
Args:
|
|
426
425
|
predictions_json_file (str): path to a predictions.json file, or a dict
|
|
427
|
-
md_results_file (str): path to which we should write an MD-formatted .json file
|
|
426
|
+
md_results_file (str, optional): path to which we should write an MD-formatted .json file
|
|
428
427
|
base_folder (str, optional): leading string to remove from each path in the
|
|
429
428
|
predictions.json file
|
|
430
429
|
max_decimals (int, optional): number of decimal places to which we should round
|
|
@@ -435,6 +434,10 @@ def generate_md_results_from_predictions_json(predictions_json_file,
|
|
|
435
434
|
convert_homo_species_to_human (bool, optional): the ensemble often rolls human predictions
|
|
436
435
|
up to "homo species", which isn't wrong, but looks odd. This forces these back to
|
|
437
436
|
"homo sapiens".
|
|
437
|
+
verbose (bool, optional): enable additional debug output
|
|
438
|
+
|
|
439
|
+
Returns:
|
|
440
|
+
dict: results in MD format
|
|
438
441
|
"""
|
|
439
442
|
|
|
440
443
|
# Read predictions file
|
|
@@ -558,7 +561,9 @@ def generate_md_results_from_predictions_json(predictions_json_file,
|
|
|
558
561
|
# but a non-blank prediction. For now, create a fake detection to handle this prediction.
|
|
559
562
|
if len(im_out['detections']) == 0:
|
|
560
563
|
|
|
561
|
-
|
|
564
|
+
if verbose:
|
|
565
|
+
print('Warning: creating fake detection for non-blank whole-image classification' + \
|
|
566
|
+
' in {}'.format(im_in['file']))
|
|
562
567
|
det_out = {}
|
|
563
568
|
all_unknown_detections.append(det_out)
|
|
564
569
|
|
|
@@ -640,12 +645,15 @@ def generate_md_results_from_predictions_json(predictions_json_file,
|
|
|
640
645
|
output_dict['classification_category_descriptions'] = classification_category_descriptions
|
|
641
646
|
output_dict['images'] = images_out
|
|
642
647
|
|
|
643
|
-
|
|
644
|
-
|
|
648
|
+
if md_results_file is not None:
|
|
649
|
+
with open(md_results_file,'w') as f:
|
|
650
|
+
json.dump(output_dict,f,indent=1)
|
|
645
651
|
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
652
|
+
validation_options = ValidateBatchResultsOptions()
|
|
653
|
+
validation_options.raise_errors = True
|
|
654
|
+
_ = validate_batch_results(md_results_file, options=validation_options)
|
|
655
|
+
|
|
656
|
+
return output_dict
|
|
649
657
|
|
|
650
658
|
# ...def generate_md_results_from_predictions_json(...)
|
|
651
659
|
|
|
@@ -885,24 +893,16 @@ def load_md_or_speciesnet_file(fn,verbose=True):
|
|
|
885
893
|
with open(fn,'r') as f:
|
|
886
894
|
detector_output = json.load(f)
|
|
887
895
|
|
|
888
|
-
#
|
|
896
|
+
# If this is a SpeicesNet file, convert to MD format
|
|
889
897
|
if 'predictions' in detector_output:
|
|
898
|
+
|
|
890
899
|
if verbose:
|
|
891
900
|
print('This appears to be a SpeciesNet output file, converting to MD format')
|
|
892
|
-
|
|
893
|
-
|
|
894
|
-
|
|
895
|
-
|
|
896
|
-
|
|
897
|
-
md_results_file=temp_results_file,
|
|
898
|
-
base_folder=None)
|
|
899
|
-
with open(temp_results_file,'r') as f:
|
|
900
|
-
detector_output = json.load(f)
|
|
901
|
-
try:
|
|
902
|
-
os.remove(temp_results_file)
|
|
903
|
-
except Exception:
|
|
904
|
-
if verbose:
|
|
905
|
-
print('Warning: error removing temporary .json {}'.format(temp_results_file))
|
|
901
|
+
detector_output = generate_md_results_from_predictions_json(predictions_json_file=fn,
|
|
902
|
+
md_results_file=None,
|
|
903
|
+
base_folder=None)
|
|
904
|
+
|
|
905
|
+
# ...if this is a SpeciesNet file
|
|
906
906
|
|
|
907
907
|
assert 'images' in detector_output, \
|
|
908
908
|
'Detector output file should be a json file with an "images" field.'
|
|
@@ -67,6 +67,10 @@ class VideoVisualizationOptions:
|
|
|
67
67
|
#: Skip frames before first and after last above-threshold detection
|
|
68
68
|
self.trim_to_detections = False
|
|
69
69
|
|
|
70
|
+
#: By default, output videos use the same extension as input videos,
|
|
71
|
+
#: use this to force a particular extension
|
|
72
|
+
self.output_extension = None
|
|
73
|
+
|
|
70
74
|
# ...class VideoVisualizationOptions
|
|
71
75
|
|
|
72
76
|
|
|
@@ -287,10 +291,16 @@ def _process_video(video_entry,
|
|
|
287
291
|
result['error'] = 'Video not found: {}'.format(input_video_path)
|
|
288
292
|
return result
|
|
289
293
|
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
+
output_fn_relative = video_entry['file']
|
|
295
|
+
|
|
296
|
+
if options.output_extension is not None:
|
|
297
|
+
ext = options.output_extension
|
|
298
|
+
if not ext.startswith('.'):
|
|
299
|
+
ext = '.' + ext
|
|
300
|
+
output_fn_relative = os.path.splitext(output_fn_relative)[0] + ext
|
|
301
|
+
|
|
302
|
+
output_fn_abs = os.path.join(out_dir, output_fn_relative)
|
|
303
|
+
os.makedirs(os.path.dirname(output_fn_abs), exist_ok=True)
|
|
294
304
|
|
|
295
305
|
# Get frames to process
|
|
296
306
|
frames_to_process = _get_frames_to_process(video_entry,
|
|
@@ -392,10 +402,10 @@ def _process_video(video_entry,
|
|
|
392
402
|
|
|
393
403
|
# Create VideoWriter
|
|
394
404
|
fourcc = cv2.VideoWriter_fourcc(*options.fourcc)
|
|
395
|
-
video_writer = cv2.VideoWriter(
|
|
405
|
+
video_writer = cv2.VideoWriter(output_fn_abs, fourcc, output_framerate, (width, height))
|
|
396
406
|
|
|
397
407
|
if not video_writer.isOpened():
|
|
398
|
-
result['error'] = 'Failed to open video writer for {}'.format(
|
|
408
|
+
result['error'] = 'Failed to open video writer for {}'.format(output_fn_abs)
|
|
399
409
|
return result
|
|
400
410
|
|
|
401
411
|
# Write frames
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: megadetector
|
|
3
|
-
Version: 10.0.
|
|
3
|
+
Version: 10.0.9
|
|
4
4
|
Summary: MegaDetector is an AI model that helps conservation folks spend less time doing boring things with camera trap images.
|
|
5
5
|
Author-email: Your friendly neighborhood MegaDetector team <cameratraps@lila.science>
|
|
6
6
|
Maintainer-email: Your friendly neighborhood MegaDetector team <cameratraps@lila.science>
|
|
@@ -32,7 +32,7 @@ megadetector/classification/efficientnet/utils.py,sha256=76SQdh0zK7CFcwTW4kiechC
|
|
|
32
32
|
megadetector/data_management/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
33
33
|
megadetector/data_management/animl_to_md.py,sha256=Z6PDJxeM_5dMZJBM3j0mxDPW2_1bNkXx0M3_qq42_Ig,4416
|
|
34
34
|
megadetector/data_management/camtrap_dp_to_coco.py,sha256=HoCGMzZTEvnudnAjbOr-mCizXHmc8mMNSUChy_Q9PkI,9673
|
|
35
|
-
megadetector/data_management/cct_json_utils.py,sha256=
|
|
35
|
+
megadetector/data_management/cct_json_utils.py,sha256=Azyuwok6-g5YGVAdBzv3-eJIlplXCoTcjGWu6zy9bQ0,19917
|
|
36
36
|
megadetector/data_management/cct_to_md.py,sha256=e1fYevSz0m65n5H16uB6uwzNiXiwxjdB2ka5p68R4d0,5120
|
|
37
37
|
megadetector/data_management/cct_to_wi.py,sha256=wcBOmurXY5I-hiqV6SmRSGUAeYaKHEU1LgCZjqVmCyw,9561
|
|
38
38
|
megadetector/data_management/coco_to_labelme.py,sha256=uYJ60XoZfHUEfLzj-EjLyeNM590skNnMp-IThWwNISo,8683
|
|
@@ -59,7 +59,7 @@ megadetector/data_management/databases/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JC
|
|
|
59
59
|
megadetector/data_management/databases/add_width_and_height_to_db.py,sha256=EYfFGPkXyFz6ZGQfXjCK3pNXLBg0hu73wiACdEEB0E0,2964
|
|
60
60
|
megadetector/data_management/databases/combine_coco_camera_traps_files.py,sha256=Au7akR2KZHm_l8-MGGRGf0CQy1az_JVgZW5Yz1_XQeQ,6609
|
|
61
61
|
megadetector/data_management/databases/integrity_check_json_db.py,sha256=kxGCHpBADXT_LHVLUENGvmby-orvVYIsK6fdgqhABBI,17386
|
|
62
|
-
megadetector/data_management/databases/subset_json_db.py,sha256=
|
|
62
|
+
megadetector/data_management/databases/subset_json_db.py,sha256=AQF-12vnZJ73JrWvjUd2ME666MmQDiCOvO0e7zGjPpg,6243
|
|
63
63
|
megadetector/data_management/lila/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
64
64
|
megadetector/data_management/lila/create_lila_blank_set.py,sha256=tApCL4XN1Vzl6YvcdI6SY4TZbHeCyHLzNYweFfX0sy0,19490
|
|
65
65
|
megadetector/data_management/lila/create_lila_test_set.py,sha256=UWJPKrwNW-UVeGrMUrFvmIt2UHVyuFiPzRFKkbEbk5A,5014
|
|
@@ -73,21 +73,21 @@ megadetector/data_management/lila/test_lila_metadata_urls.py,sha256=ThU78Ks5V3rF
|
|
|
73
73
|
megadetector/detection/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
74
74
|
megadetector/detection/change_detection.py,sha256=Ne3GajbH_0KPBU8ruHp4Rkr0uKd5oKAMQ3CQTRKRHgQ,28659
|
|
75
75
|
megadetector/detection/process_video.py,sha256=kuQHrpOC3LQo9ecqJPpzkds9fZVnoLmrfJw_yh-oxi8,17890
|
|
76
|
-
megadetector/detection/pytorch_detector.py,sha256
|
|
77
|
-
megadetector/detection/run_detector.py,sha256=
|
|
78
|
-
megadetector/detection/run_detector_batch.py,sha256=
|
|
79
|
-
megadetector/detection/run_inference_with_yolov5_val.py,sha256=
|
|
76
|
+
megadetector/detection/pytorch_detector.py,sha256=-TvtDcX2Hh_CgBEz7Eg2NzyEts8DjOgY0mE_fle6zkM,60705
|
|
77
|
+
megadetector/detection/run_detector.py,sha256=JWTIYsk5aCgW9PBCGnAECe31JwKHhkfp6zKsSDqfrsA,46831
|
|
78
|
+
megadetector/detection/run_detector_batch.py,sha256=Ah-LSsA73Io-GH0BpI8qopGRI5eUKWhhIR19lC6s41A,90602
|
|
79
|
+
megadetector/detection/run_inference_with_yolov5_val.py,sha256=dJXh3BwKOQQ4OA-Mq_heEb7AfBAk7qKUAagnIGuFtaU,53689
|
|
80
80
|
megadetector/detection/run_md_and_speciesnet.py,sha256=Dp_SpJZp0pX9jzFtxM6zPCyBNq49uyQpMDAdNDLVorM,50280
|
|
81
|
-
megadetector/detection/run_tiled_inference.py,sha256=
|
|
81
|
+
megadetector/detection/run_tiled_inference.py,sha256=v_wL4uZfYdswJdYXBoGci62UvVprBD9OHKkrw2g-G5M,41081
|
|
82
82
|
megadetector/detection/tf_detector.py,sha256=3b2MiqgMw8KBDzHQliUSDXWrmKpa9iZnfe6EgYpMcYo,8398
|
|
83
|
-
megadetector/detection/video_utils.py,sha256=
|
|
83
|
+
megadetector/detection/video_utils.py,sha256=M7yje6XeOnR_QwDyuG1o6bwTKvRysoA2NiOK2MSi98E,53943
|
|
84
84
|
megadetector/postprocessing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
85
85
|
megadetector/postprocessing/add_max_conf.py,sha256=9MYtsH2mwkiaZb7Qcor5J_HskfAj7d9srp8G_Qldpk0,1722
|
|
86
86
|
megadetector/postprocessing/categorize_detections_by_size.py,sha256=DpZpRNFlyeOfWuOc6ICuENgIWDCEtiErJ_frBZp9lYM,5382
|
|
87
|
-
megadetector/postprocessing/classification_postprocessing.py,sha256=
|
|
87
|
+
megadetector/postprocessing/classification_postprocessing.py,sha256=OoPVr34vXyLykB42SplcSKo9cj7dgf8Yju_DCDhd6_k,68574
|
|
88
88
|
megadetector/postprocessing/combine_batch_outputs.py,sha256=BEP8cVa0sMIPg7tkWQc_8vOEPnbmWjOsQdVJHe61uz8,8468
|
|
89
|
-
megadetector/postprocessing/compare_batch_results.py,sha256=
|
|
90
|
-
megadetector/postprocessing/convert_output_format.py,sha256=
|
|
89
|
+
megadetector/postprocessing/compare_batch_results.py,sha256=QbdegGZkgVLZdO5Vjm3aTAQS5VzP9_tX0PKwCSkHKhw,85009
|
|
90
|
+
megadetector/postprocessing/convert_output_format.py,sha256=3KLO6NqddofgIEYjV8_iZIf0iXaplFN2AroUq5i4R7k,14472
|
|
91
91
|
megadetector/postprocessing/create_crop_folder.py,sha256=T37HnvBEakikXY3n3Bgk5boFo_0-Z5aKnkEWXv-Ki4s,23166
|
|
92
92
|
megadetector/postprocessing/detector_calibration.py,sha256=UFjJ8D6tMghatLRj3CyrtJ7vrPIJkULMNsYMIj98j2M,20495
|
|
93
93
|
megadetector/postprocessing/generate_csv_report.py,sha256=KIGT8zFZev-cl4YOCq2BqnodBWsZG-7CZaWuep_211U,19169
|
|
@@ -96,11 +96,11 @@ megadetector/postprocessing/md_to_coco.py,sha256=CkN1ky4A2uZj_gUu8rmyaaxyOH00k6J
|
|
|
96
96
|
megadetector/postprocessing/md_to_labelme.py,sha256=r-EGyXVrSSyN6N6wqQ6pwKeV-fCNzb50ZkJqaDqjrvM,11935
|
|
97
97
|
megadetector/postprocessing/md_to_wi.py,sha256=8IHtkMNKRMIcvE2jsKNfKtdH94JTkzrByyo5uBXHOSA,1220
|
|
98
98
|
megadetector/postprocessing/merge_detections.py,sha256=hvb4TJ6u1PyWOVQai9wZk72li1GpjmBxbpfUcV3qqXY,15749
|
|
99
|
-
megadetector/postprocessing/postprocess_batch_results.py,sha256=
|
|
99
|
+
megadetector/postprocessing/postprocess_batch_results.py,sha256=M6dTnMEPm6iaLYBwZl4qvAbMGPvWIcD7-HhVty43lTw,84610
|
|
100
100
|
megadetector/postprocessing/remap_detection_categories.py,sha256=BE6Ce-PGBEx1FyG3XwbYp2D5sh5xUlVf6fonaMuPMAg,7927
|
|
101
101
|
megadetector/postprocessing/render_detection_confusion_matrix.py,sha256=oNvDTh5td5ynELNnhz4XaLP2HiwLuojkJlob15TpgcY,26365
|
|
102
102
|
megadetector/postprocessing/separate_detections_into_folders.py,sha256=Yvpkl_MsWbGoo4zvQHrXHkATRJaYdYligItfg9bvuV8,32262
|
|
103
|
-
megadetector/postprocessing/subset_json_detector_output.py,sha256=
|
|
103
|
+
megadetector/postprocessing/subset_json_detector_output.py,sha256=HI-TD0rEzIttg7U8ap8HWHJnbXXjY2vS74axnmLnOGU,35041
|
|
104
104
|
megadetector/postprocessing/top_folders_to_bottom.py,sha256=zYrqMHjUZG8urh2CYphfs91ZQ620uqe-TL8jVYy8KVw,6049
|
|
105
105
|
megadetector/postprocessing/validate_batch_results.py,sha256=9nr7LeKMdki9Y821ag2bZFQCxuq0OqINDH7cPXyVcY8,12059
|
|
106
106
|
megadetector/postprocessing/repeat_detection_elimination/find_repeat_detections.py,sha256=XgVeyga8iSC01MAjXxb2rn-CgJTYHqC_gfxxEoSn4aw,9420
|
|
@@ -121,17 +121,17 @@ megadetector/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuF
|
|
|
121
121
|
megadetector/tests/test_nms_synthetic.py,sha256=oY6xmT1sLSSN7weQJ8TPTaZgAiSiZ6s43EffUhwLWIw,14707
|
|
122
122
|
megadetector/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
123
123
|
megadetector/utils/ct_utils.py,sha256=IiZV8dWtJamveINv_joATMgMPHeDkZ8l82jDEQcLgQg,60502
|
|
124
|
-
megadetector/utils/directory_listing.py,sha256=
|
|
124
|
+
megadetector/utils/directory_listing.py,sha256=0-VMuQWo6rETIKERqfX6Zn7pRp_GJ4JiFiWvsw9PQcU,6500
|
|
125
125
|
megadetector/utils/extract_frames_from_video.py,sha256=vjSVgxtb5z2syHCVYWc2KdNUpc-O6yY8nkbj_wqsIvY,12255
|
|
126
126
|
megadetector/utils/gpu_test.py,sha256=5zUfAVeSjH8I08eCqayFmMxL-0mix8SjJJTe5ORABvU,3544
|
|
127
127
|
megadetector/utils/md_tests.py,sha256=Iup4KjyIpLUpZ4TzzwEyGK61rg6aH7NrEQsdQ-ov51I,80300
|
|
128
|
-
megadetector/utils/path_utils.py,sha256=
|
|
128
|
+
megadetector/utils/path_utils.py,sha256=tV8eh77m_uS8YYpOQZO8GUKR6l5sZrSSIkApqgi_DmY,101030
|
|
129
129
|
megadetector/utils/process_utils.py,sha256=gQcpH9WYvGPUs0FhtJ5_Xvl6JsvoGz8_mnDQk0PbTRM,5673
|
|
130
130
|
megadetector/utils/split_locations_into_train_val.py,sha256=fd_6pj1aWY6hybwaXvBn9kBcOHjI90U-OsTmEAGpeu8,10297
|
|
131
131
|
megadetector/utils/string_utils.py,sha256=r2Maw3zbzk3EyaZcNkdqr96yP_8m4ey6v0WxlemEY9U,6155
|
|
132
|
-
megadetector/utils/url_utils.py,sha256=
|
|
132
|
+
megadetector/utils/url_utils.py,sha256=PzqN-VquAZFBRin2ZaYi5U2WCsMYSwvM0X-NN45Fdh4,28448
|
|
133
133
|
megadetector/utils/wi_platform_utils.py,sha256=8CGpiox_aL6RVZKfJqPVwpW4_6Cjku0HIajJPcmeNpE,32019
|
|
134
|
-
megadetector/utils/wi_taxonomy_utils.py,sha256=
|
|
134
|
+
megadetector/utils/wi_taxonomy_utils.py,sha256=o4AvY5gZXfk69pPckdGxgIPhqsH2-hJQucavSRsUnoc,66513
|
|
135
135
|
megadetector/utils/write_html_image_list.py,sha256=6Tbe5wyUxoBYJgH9yVrxxKCeWF2BVre_wQMEOQJ-ZIU,9068
|
|
136
136
|
megadetector/visualization/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
137
137
|
megadetector/visualization/plot_utils.py,sha256=uDDlOhdaJ3V8sGj2kS9b0cgszKc8WCq2_ofl6TW_XUs,10727
|
|
@@ -139,9 +139,9 @@ megadetector/visualization/render_images_with_thumbnails.py,sha256=-XX4PG4wnrFjF
|
|
|
139
139
|
megadetector/visualization/visualization_utils.py,sha256=E5uvysS3F1S_yiPFxZty3U2f6cjuE8zG6XWggYOu-5o,75921
|
|
140
140
|
megadetector/visualization/visualize_db.py,sha256=8YDWSR0eMehXYdPtak9z8UUw35xV7hu-0eCuzgSLjWc,25558
|
|
141
141
|
megadetector/visualization/visualize_detector_output.py,sha256=HpWh7ugwo51YBHsFi40iAp9G-uRAMMjgsm8H_uBolBs,20295
|
|
142
|
-
megadetector/visualization/visualize_video_output.py,sha256=
|
|
143
|
-
megadetector-10.0.
|
|
144
|
-
megadetector-10.0.
|
|
145
|
-
megadetector-10.0.
|
|
146
|
-
megadetector-10.0.
|
|
147
|
-
megadetector-10.0.
|
|
142
|
+
megadetector/visualization/visualize_video_output.py,sha256=ibMGB5ynMwNXmaMlY8h8tURb-Lyvuxs1EB08x_jvev0,20606
|
|
143
|
+
megadetector-10.0.9.dist-info/licenses/LICENSE,sha256=RMa3qq-7Cyk7DdtqRj_bP1oInGFgjyHn9-PZ3PcrqIs,1100
|
|
144
|
+
megadetector-10.0.9.dist-info/METADATA,sha256=s8q_fi96c5kt67xihApa5_Lko4voT5lH0swUawbUgzE,6486
|
|
145
|
+
megadetector-10.0.9.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
146
|
+
megadetector-10.0.9.dist-info/top_level.txt,sha256=wf9DXa8EwiOSZ4G5IPjakSxBPxTDjhYYnqWRfR-zS4M,13
|
|
147
|
+
megadetector-10.0.9.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|