megadetector 5.0.15__py3-none-any.whl → 5.0.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of megadetector might be problematic. Click here for more details.

Files changed (34) hide show
  1. megadetector/data_management/importers/import_desert_lion_conservation_camera_traps.py +387 -0
  2. megadetector/data_management/importers/snapshot_safari_importer_reprise.py +28 -16
  3. megadetector/data_management/lila/generate_lila_per_image_labels.py +3 -3
  4. megadetector/data_management/lila/test_lila_metadata_urls.py +2 -2
  5. megadetector/data_management/remove_exif.py +61 -36
  6. megadetector/data_management/yolo_to_coco.py +25 -6
  7. megadetector/detection/process_video.py +270 -127
  8. megadetector/detection/pytorch_detector.py +13 -11
  9. megadetector/detection/run_detector.py +9 -2
  10. megadetector/detection/run_detector_batch.py +8 -1
  11. megadetector/detection/run_inference_with_yolov5_val.py +58 -10
  12. megadetector/detection/tf_detector.py +8 -2
  13. megadetector/detection/video_utils.py +214 -18
  14. megadetector/postprocessing/md_to_coco.py +31 -9
  15. megadetector/postprocessing/postprocess_batch_results.py +23 -7
  16. megadetector/postprocessing/repeat_detection_elimination/repeat_detections_core.py +5 -2
  17. megadetector/postprocessing/subset_json_detector_output.py +22 -12
  18. megadetector/taxonomy_mapping/map_new_lila_datasets.py +3 -3
  19. megadetector/taxonomy_mapping/prepare_lila_taxonomy_release.py +2 -1
  20. megadetector/taxonomy_mapping/preview_lila_taxonomy.py +1 -1
  21. megadetector/taxonomy_mapping/simple_image_download.py +5 -0
  22. megadetector/taxonomy_mapping/species_lookup.py +1 -1
  23. megadetector/utils/ct_utils.py +48 -0
  24. megadetector/utils/md_tests.py +231 -56
  25. megadetector/utils/path_utils.py +2 -2
  26. megadetector/utils/torch_test.py +32 -0
  27. megadetector/utils/url_utils.py +101 -4
  28. megadetector/visualization/visualization_utils.py +21 -6
  29. megadetector/visualization/visualize_db.py +16 -0
  30. {megadetector-5.0.15.dist-info → megadetector-5.0.17.dist-info}/LICENSE +0 -0
  31. {megadetector-5.0.15.dist-info → megadetector-5.0.17.dist-info}/METADATA +5 -7
  32. {megadetector-5.0.15.dist-info → megadetector-5.0.17.dist-info}/RECORD +34 -32
  33. {megadetector-5.0.15.dist-info → megadetector-5.0.17.dist-info}/WHEEL +1 -1
  34. {megadetector-5.0.15.dist-info → megadetector-5.0.17.dist-info}/top_level.txt +0 -0
@@ -75,7 +75,8 @@ def download_url(url,
75
75
  destination_filename=None,
76
76
  progress_updater=None,
77
77
  force_download=False,
78
- verbose=True):
78
+ verbose=True,
79
+ escape_spaces=True):
79
80
  """
80
81
  Downloads a URL to a file. If no file is specified, creates a temporary file,
81
82
  making a best effort to avoid filename collisions.
@@ -92,6 +93,7 @@ def download_url(url,
92
93
  force_download (bool, optional): download this file even if [destination_filename]
93
94
  exists.
94
95
  verbose (bool, optional): enable additional debug console output
96
+ escape_spaces (bool, optional): replace ' ' with '%20'
95
97
 
96
98
  Returns:
97
99
  str: the filename to which [url] was downloaded, the same as [destination_filename]
@@ -107,6 +109,7 @@ def download_url(url,
107
109
  url_no_sas = url.split('?')[0]
108
110
 
109
111
  if destination_filename is None:
112
+
110
113
  target_folder = get_temp_folder()
111
114
  url_without_sas = url.split('?', 1)[0]
112
115
 
@@ -119,6 +122,9 @@ def download_url(url,
119
122
  destination_filename = \
120
123
  os.path.join(target_folder,url_as_filename)
121
124
 
125
+ if escape_spaces:
126
+ url = url.replace(' ','%20')
127
+
122
128
  if (not force_download) and (os.path.isfile(destination_filename)):
123
129
  if verbose:
124
130
  print('Bypassing download of already-downloaded file {}'.format(os.path.basename(url_no_sas)))
@@ -135,6 +141,8 @@ def download_url(url,
135
141
 
136
142
  return destination_filename
137
143
 
144
+ # ...def download_url(...)
145
+
138
146
 
139
147
  def download_relative_filename(url, output_base, verbose=False):
140
148
  """
@@ -162,6 +170,8 @@ def download_relative_filename(url, output_base, verbose=False):
162
170
  destination_filename = os.path.join(output_base,relative_filename)
163
171
  return download_url(url, destination_filename, verbose=verbose)
164
172
 
173
+ # ...def download_relative_filename(...)
174
+
165
175
 
166
176
  def _do_parallelized_download(download_info,overwrite=False,verbose=False):
167
177
  """
@@ -191,6 +201,8 @@ def _do_parallelized_download(download_info,overwrite=False,verbose=False):
191
201
  result['status'] = 'success'
192
202
  return result
193
203
 
204
+ # ...def _do_parallelized_download(...)
205
+
194
206
 
195
207
  def parallel_download_urls(url_to_target_file,verbose=False,overwrite=False,
196
208
  n_workers=20,pool_type='thread'):
@@ -251,8 +263,10 @@ def parallel_download_urls(url_to_target_file,verbose=False,overwrite=False,
251
263
 
252
264
  return results
253
265
 
254
-
255
- def test_url(url, error_on_failure=True, timeout=None):
266
+ # ...def parallel_download_urls(...)
267
+
268
+
269
+ def test_url(url,error_on_failure=True,timeout=None):
256
270
  """
257
271
  Tests the availability of [url], returning an http status code.
258
272
 
@@ -275,7 +289,7 @@ def test_url(url, error_on_failure=True, timeout=None):
275
289
  return r.status_code
276
290
 
277
291
 
278
- def test_urls(urls, error_on_failure=True, n_workers=1, pool_type='thread', timeout=None):
292
+ def test_urls(urls,error_on_failure=True,n_workers=1,pool_type='thread',timeout=None):
279
293
  """
280
294
  Verify that URLs are available (i.e., returns status 200). By default,
281
295
  errors if any URL is unavailable.
@@ -321,3 +335,86 @@ def test_urls(urls, error_on_failure=True, n_workers=1, pool_type='thread', time
321
335
  urls), total=len(urls)))
322
336
 
323
337
  return status_codes
338
+
339
+ # ...def test_urls(...)
340
+
341
+
342
+ def get_url_size(url,verbose=False,timeout=None):
343
+ """
344
+ Get the size of the file pointed to by a URL, based on the Content-Length property. If the
345
+ URL is not available, or the Content-Length property is not available, or the content-Length
346
+ property is not an integer, returns None.
347
+
348
+ Args:
349
+ url (str): the url to test
350
+ verbose (bool, optional): enable additional debug output
351
+ timeout (int, optional): timeout in seconds to wait before considering this
352
+ access attempt to be a failure; see requests.head() for precise documentation
353
+
354
+ Returns:
355
+ int: the file size in bytes, or None if it can't be retrieved
356
+ """
357
+
358
+ try:
359
+ r = urllib.request.Request(url,method='HEAD')
360
+ f = urllib.request.urlopen(r, timeout=timeout)
361
+ if f.status != 200:
362
+ if verbose:
363
+ print('Status {} retrieving file size for {}'.format(f.status,url))
364
+ return None
365
+ size_bytes = int(f.headers['Content-Length'])
366
+ return size_bytes
367
+ except Exception as e:
368
+ if verbose:
369
+ print('Error retrieving file size for {}:\n{}'.format(url,str(e)))
370
+ return None
371
+
372
+ # ...def get_url_size(...)
373
+
374
+
375
+ def get_url_sizes(urls,n_workers=1,pool_type='thread',timeout=None,verbose=False):
376
+ """
377
+ Retrieve file sizes for the URLs specified by [urls]. Returns None for any URLs
378
+ that we can't access, or URLs for which the Content-Length property is not set.
379
+
380
+ Args:
381
+ urls (list): list of URLs for which we should retrieve sizes
382
+ n_workers (int, optional): number of concurrent workers, set to <=1 to disable
383
+ parallelization
384
+ pool_type (str, optional): worker type to use; should be 'thread' or 'process'
385
+ timeout (int, optional): timeout in seconds to wait before considering this
386
+ access attempt to be a failure; see requests.head() for precise documentation
387
+ verbose (bool, optional): print additional debug information
388
+
389
+ Returns:
390
+ dict: maps urls to file sizes, which will be None for URLs for which we were unable
391
+ to retrieve a valid size.
392
+ """
393
+
394
+ url_to_size = {}
395
+
396
+ if n_workers <= 1:
397
+
398
+ for url in tqdm(urls):
399
+ url_to_size[url] = get_url_size(url,verbose=verbose,timeout=timeout)
400
+
401
+ else:
402
+
403
+ if pool_type == 'thread':
404
+ pool = ThreadPool(n_workers)
405
+ else:
406
+ assert pool_type == 'process', 'Unsupported pool type {}'.format(pool_type)
407
+ pool = Pool(n_workers)
408
+
409
+ print('Starting a {} pool with {} workers'.format(pool_type,n_workers))
410
+
411
+ file_sizes = list(tqdm(pool.imap(
412
+ partial(get_url_size,verbose=verbose,timeout=timeout),
413
+ urls), total=len(urls)))
414
+
415
+ for i_url,url in enumerate(urls):
416
+ url_to_size[url] = file_sizes[i_url]
417
+
418
+ return url_to_size
419
+
420
+ # ...get_url_sizes(...)
@@ -536,6 +536,9 @@ def render_detection_bounding_boxes(detections,
536
536
 
537
537
  x1, y1, w_box, h_box = detection['bbox']
538
538
  display_boxes.append([y1, x1, y1 + h_box, x1 + w_box])
539
+
540
+ # The class index to use for coloring this box, which may be based on the detection
541
+ # category or on the most confident classification category.
539
542
  clss = detection['category']
540
543
 
541
544
  # {} is the default, which means "show labels with no mapping", so don't use "if label_map" here
@@ -558,22 +561,30 @@ def render_detection_bounding_boxes(detections,
558
561
  assert len(displayed_label) == 1
559
562
  displayed_label[0] += ' ' + custom_string
560
563
 
561
- if 'classifications' in detection:
564
+ if ('classifications' in detection) and len(detection['classifications']) > 0:
562
565
 
563
- # To avoid duplicate colors with detection-only visualization, offset
564
- # the classification class index by the number of detection classes
565
- clss = annotation_constants.NUM_DETECTOR_CATEGORIES + int(detection['classifications'][0][0])
566
566
  classifications = detection['classifications']
567
+
567
568
  if len(classifications) > max_classifications:
568
569
  classifications = classifications[0:max_classifications]
569
570
 
571
+ max_classification_category = 0
572
+ max_classification_conf = -100
573
+
570
574
  for classification in classifications:
571
575
 
572
576
  classification_conf = classification[1]
573
- if classification_conf is not None and \
574
- classification_conf < classification_confidence_threshold:
577
+ if classification_conf is None or \
578
+ classification_conf < classification_confidence_threshold:
575
579
  continue
580
+
576
581
  class_key = classification[0]
582
+
583
+ # Is this the most confident classification for this detection?
584
+ if classification_conf > max_classification_conf:
585
+ max_classification_conf = classification_conf
586
+ max_classification_category = int(class_key)
587
+
577
588
  if (classification_label_map is not None) and (class_key in classification_label_map):
578
589
  class_name = classification_label_map[class_key]
579
590
  else:
@@ -585,6 +596,10 @@ def render_detection_bounding_boxes(detections,
585
596
 
586
597
  # ...for each classification
587
598
 
599
+ # To avoid duplicate colors with detection-only visualization, offset
600
+ # the classification class index by the number of detection classes
601
+ clss = annotation_constants.NUM_DETECTOR_CATEGORIES + max_classification_category
602
+
588
603
  # ...if we have classification results
589
604
 
590
605
  display_strs.append(displayed_label)
@@ -122,6 +122,14 @@ class DbVizOptions:
122
122
 
123
123
  #: Enable additionald debug console output
124
124
  self.verbose = False
125
+
126
+ #: COCO files used for evaluation may contain confidence scores, this
127
+ #: determines the field name used for confidence scores
128
+ self.confidence_field_name = 'score'
129
+
130
+ #: Optionally apply a confidence threshold; this requires that [confidence_field_name]
131
+ #: be present in all detections.
132
+ self.confidence_threshold = None
125
133
 
126
134
 
127
135
  #%% Helper functions
@@ -294,6 +302,14 @@ def visualize_db(db_path, output_dir, image_base_dir, options=None):
294
302
  # iAnn = 0; anno = annos_i.iloc[iAnn]
295
303
  for iAnn,anno in annos_i.iterrows():
296
304
 
305
+ if options.confidence_threshold is not None:
306
+ assert options.confidence_field_name in anno, \
307
+ 'Error: confidence thresholding requested, ' + \
308
+ 'but at least one annotation does not have the {} field'.format(
309
+ options.confidence_field_name)
310
+ if anno[options.confidence_field_name] < options.confidence_threshold:
311
+ continue
312
+
297
313
  if 'sequence_level_annotation' in anno:
298
314
  bSequenceLevelAnnotation = anno['sequence_level_annotation']
299
315
  if bSequenceLevelAnnotation:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: megadetector
3
- Version: 5.0.15
3
+ Version: 5.0.17
4
4
  Summary: MegaDetector is an AI model that helps conservation folks spend less time doing boring things with camera trap images.
5
5
  Author-email: Your friendly neighborhood MegaDetector team <cameratraps@lila.science>
6
6
  Maintainer-email: Your friendly neighborhood MegaDetector team <cameratraps@lila.science>
@@ -54,25 +54,23 @@ Requires-Dist: ultralytics-yolov5 ==0.1.1
54
54
 
55
55
  This package is a pip-installable version of the support/inference code for [MegaDetector](https://github.com/agentmorris/MegaDetector/?tab=readme-ov-file#megadetector), an object detection model that helps conservation biologists spend less time doing boring things with camera trap images. Complete documentation for this Python package is available at [megadetector.readthedocs.io](https://megadetector.readthedocs.io).
56
56
 
57
- If you aren't looking for the Python package specificaly, and you just want to learn more about what MegaDetector is all about, head over to the [MegaDetector repo](https://github.com/agentmorris/MegaDetector/?tab=readme-ov-file#megadetector).
57
+ If you aren't looking for the Python package specifically, and you just want to learn more about what MegaDetector is all about, head over to the [MegaDetector repo](https://github.com/agentmorris/MegaDetector/?tab=readme-ov-file#megadetector).
58
58
 
59
59
 
60
- ## Reasons you probably aren't looking for this package
60
+ ## Reasons might not be looking for this package
61
61
 
62
62
  ### If you are an ecologist...
63
63
 
64
- If you are an ecologist looking to use MegaDetector to help you get through your camera trap images, you probably don't want this package. We recommend starting with our "[Getting started with MegaDetector](https://github.com/agentmorris/MegaDetector/blob/main/getting-started.md)" page, then digging in to the [MegaDetector User Guide](https://github.com/agentmorris/MegaDetector/blob/main/megadetector.md), which will walk you through the process of using MegaDetector. That journey will <i>not</i> involve this Python package.
64
+ If you are an ecologist looking to use MegaDetector to help you get through your camera trap images, you probably don't want this package, or at least you probably don't want to start at this page. We recommend starting with our "[Getting started with MegaDetector](https://github.com/agentmorris/MegaDetector/blob/main/getting-started.md)" page, then digging in to the [MegaDetector User Guide](https://github.com/agentmorris/MegaDetector/blob/main/megadetector.md), which will walk you through the process of using MegaDetector.
65
65
 
66
66
  ### If you are a computer-vision-y type...
67
67
 
68
- If you are a computer-vision-y person looking to run or fine-tune MegaDetector programmatically, you still probably don't want this package. MegaDetector is just a fine-tuned version of [YOLOv5](https://github.com/ultralytics/yolov5), and the [ultralytics](https://github.com/ultralytics/ultralytics/) package (from the developers of YOLOv5) has a zillion bells and whistles for both inference and fine-tuning that this package doesn't.
68
+ If you are a computer-vision-y person looking to run or fine-tune MegaDetector programmatically, you probably don't want this package. MegaDetector is just a fine-tuned version of [YOLOv5](https://github.com/ultralytics/yolov5), and the [ultralytics](https://github.com/ultralytics/ultralytics/) package (from the developers of YOLOv5) has a zillion bells and whistles for both inference and fine-tuning that this package doesn't.
69
69
 
70
70
  ## Reasons you might want to use this package
71
71
 
72
72
  If you want to programmatically interact with the postprocessing tools from the MegaDetector repo, or programmatically run MegaDetector in a way that produces [Timelapse](https://saul.cpsc.ucalgary.ca/timelapse)-friendly output (i.e., output in the standard [MegaDetector output format](https://github.com/agentmorris/MegaDetector/tree/main/megadetector/api/batch_processing#megadetector-batch-output-format)), this package might be for you.
73
73
 
74
- Although even if that describes you, you <i>still</i> might be better off cloning the MegaDetector repo. Pip-installability requires that some dependencies be newer than what was available at the time MDv5 was trained, so results are <i>very slightly</i> different than results produced in the "official" environment. These differences <i>probably</i> don't matter much, but they have not been formally characterized.
75
-
76
74
  ## If I haven't talked you out of using this package...
77
75
 
78
76
  To install:
@@ -65,12 +65,12 @@ megadetector/data_management/labelme_to_yolo.py,sha256=dRePSOwU_jiCr0EakDQCz1Ct-
65
65
  megadetector/data_management/ocr_tools.py,sha256=T9ClY3B-blnK3-UF1vpVdageknYsykm_6FAfqn0kliU,32529
66
66
  megadetector/data_management/read_exif.py,sha256=-q0NqJ3VZSBovD_d6de-s3UR2NuKF6gSw2etfvVuRO4,27866
67
67
  megadetector/data_management/remap_coco_categories.py,sha256=xXWv0QhTjkUfc9RKtAZanK77HMSq_21mFg_34KFD6hw,2903
68
- megadetector/data_management/remove_exif.py,sha256=9YwMUliszhVzkkUcotpRKA-a3h5WdQF1taQ594Bgm60,1666
68
+ megadetector/data_management/remove_exif.py,sha256=vIWnJfw1i9JgyQKUDGEzzqkHro4ndykIPFWhtkm6RAU,2502
69
69
  megadetector/data_management/rename_images.py,sha256=AG3YIxXEYdGmK4G-rv0_XZIylPqOZpS6gfEkydF6oDg,6918
70
70
  megadetector/data_management/resize_coco_dataset.py,sha256=AaiV7efIcNnqsXsnQckmHq2G__7ZQHBV_jN6rhZfMjo,6810
71
71
  megadetector/data_management/wi_download_csv_to_coco.py,sha256=ilnJZhNZK-FGUR-AfUSWjIDUk9Gytgxw7IOK_N8WKLE,8350
72
72
  megadetector/data_management/yolo_output_to_md_output.py,sha256=VZtatLoryeh2pbh1fRAJe-ao7vtoNn6ACyRbAk-2Mlg,17561
73
- megadetector/data_management/yolo_to_coco.py,sha256=G9XiB9D8PWaCq_kc61pKe2GkkuKwdJ7K7zsbGShb_jw,25176
73
+ megadetector/data_management/yolo_to_coco.py,sha256=TzAagQ2ATbB_tn1oZxrHCWsrFGO_OhfZmi-3X45WdDU,26180
74
74
  megadetector/data_management/annotations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
75
75
  megadetector/data_management/annotations/annotation_constants.py,sha256=1597MpAr_HdidIHoDFj4RgUO3K5e2Xm2bGafGeonR2k,953
76
76
  megadetector/data_management/databases/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -95,6 +95,7 @@ megadetector/data_management/importers/filenames_to_json.py,sha256=Jc_FydTiZWsB6
95
95
  megadetector/data_management/importers/helena_to_cct.py,sha256=IVTXXxDDxtbvYZaABCmnYWi2ZJ_1xpAXQG1TjOhRuVE,8712
96
96
  megadetector/data_management/importers/idaho-camera-traps.py,sha256=9BpMwygyN8OLimGsHIodNrikVgSK9SGkZJ0c10GxT-0,54112
97
97
  megadetector/data_management/importers/idfg_iwildcam_lila_prep.py,sha256=ql0fnO-IZuyT4611n8oYlTMDibhiDLDES1za1o6BEck,8194
98
+ megadetector/data_management/importers/import_desert_lion_conservation_camera_traps.py,sha256=eILnUvSOR7upfewX_44cM8d73E9UQQxKYTkPUfIPMrY,12985
98
99
  megadetector/data_management/importers/jb_csv_to_json.py,sha256=IPoXwdz2OhrjMyK1Yv98PVmAD4VBZ9prSuXhx1xLfcg,3726
99
100
  megadetector/data_management/importers/mcgill_to_json.py,sha256=dfSxU1hHimyGT6Zt64XFrW63GWGsdKpqRrp5PE--xUw,6702
100
101
  megadetector/data_management/importers/missouri_to_json.py,sha256=C0ia3eCEZujVUKE2gmQc6ScsK8kXWM7m0ibeKgHfXNo,14848
@@ -108,7 +109,7 @@ megadetector/data_management/importers/rspb_to_json.py,sha256=y03v1d1un9mI3HZRCZ
108
109
  megadetector/data_management/importers/save_the_elephants_survey_A.py,sha256=lugw8m5Nh2Fhs-FYo9L0mDL3_29nAweLxEul6GekdkI,10669
109
110
  megadetector/data_management/importers/save_the_elephants_survey_B.py,sha256=SWClXENsIePwifP8eJeRsj3kh3Bztl6Kzc_BdqNZvFw,11172
110
111
  megadetector/data_management/importers/snapshot_safari_importer.py,sha256=dQ1GmpHcrQCQF9YZ0UaLTvc_3aOZEDqWGcxzYQeq4ho,23605
111
- megadetector/data_management/importers/snapshot_safari_importer_reprise.py,sha256=cv2zOWmhvXPRM-ZFrzmYFjq0Y2fwo7PUN_UJ_T2aryo,22333
112
+ megadetector/data_management/importers/snapshot_safari_importer_reprise.py,sha256=f2WXC22fzbKaQl2888bfUlzap4oDhRG3ysZOUMBrcw0,22549
112
113
  megadetector/data_management/importers/snapshot_serengeti_lila.py,sha256=-aYq_5IxhpcR6oxFYYVv98WVnGAr0mnVkbX-oJCPd8M,33865
113
114
  megadetector/data_management/importers/sulross_get_exif.py,sha256=Bt1tGYtr5CllxCe2BL8uI3SfPu3e1SSqijnOz--iRqQ,2071
114
115
  megadetector/data_management/importers/timelapse_csv_set_to_json.py,sha256=B9VbBltf3IdPBI2O1Cmg8wODhlIML4MQpjdhTFD4GP4,15916
@@ -129,20 +130,20 @@ megadetector/data_management/lila/create_lila_blank_set.py,sha256=SBwpM0-pycW37T
129
130
  megadetector/data_management/lila/create_lila_test_set.py,sha256=DjivKgsFJlO1IHezXrwAGpiCAhLVmvPnv2nJYpv1ABU,4835
130
131
  megadetector/data_management/lila/create_links_to_md_results_files.py,sha256=MvaPBAgdwoxaNrRaKZ8mGaOCky1BYXlrT08tPG9BrpM,3803
131
132
  megadetector/data_management/lila/download_lila_subset.py,sha256=rh09kphSCVPlUGuYY-CkSyd8dy0pBUdth6uHkZ84sEo,5345
132
- megadetector/data_management/lila/generate_lila_per_image_labels.py,sha256=awfBLjVgwP39a2nySMZSAzcoAMHcblzYGlQVt2jP45E,18075
133
+ megadetector/data_management/lila/generate_lila_per_image_labels.py,sha256=K54-JS7s88HsugtaXo56P22PiPsGEdHYB2AaGMBhvIY,18135
133
134
  megadetector/data_management/lila/get_lila_annotation_counts.py,sha256=aOkjemasOqf1Uixu-yhaFKYyKILYRZQZi4GBW4sbtic,5602
134
135
  megadetector/data_management/lila/get_lila_image_counts.py,sha256=UxXS5RDnSA_WbxE92qN-N7p-qR-jbyTsTZ7duLo06us,3620
135
136
  megadetector/data_management/lila/lila_common.py,sha256=IEnGoyRgcqbek1qJ1gFE83p1Pg_5kaMS-nQI25lRWIs,10132
136
- megadetector/data_management/lila/test_lila_metadata_urls.py,sha256=2zKNjgqC3kxdFfyvQC3KTlpc9lf2iMzecHQBf--r_Tk,4438
137
+ megadetector/data_management/lila/test_lila_metadata_urls.py,sha256=jqN7UID16fu78BK_2sygb4s9BBeVCpSZT3_oL2GYxxY,4438
137
138
  megadetector/detection/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
138
- megadetector/detection/process_video.py,sha256=xV24TWlQ0ImN21DCPkLs7y3oUOP7cjmyrFD0PFzzhsY,42713
139
- megadetector/detection/pytorch_detector.py,sha256=p70kAX5pqU4SO4GjYJmzbTPV4tKUp5WRapOs7vgSKes,13885
140
- megadetector/detection/run_detector.py,sha256=biXbeS8aNDlidilxjzhZ-p4_wr2ID-rpsRklbNEd7ME,30094
141
- megadetector/detection/run_detector_batch.py,sha256=nplmaxOcEe_vnS3VrUd49uZoCQNRmJQmbSMpi8uhh8k,57064
142
- megadetector/detection/run_inference_with_yolov5_val.py,sha256=u9i1ndwl_k0DsiAWYQcYrrrB9D9Wt56_k6iGTAetUaM,46786
139
+ megadetector/detection/process_video.py,sha256=yVjkm5SxHH-R5SazMQmF-XXRhrhPdE2z4X4bGkfeN-k,50019
140
+ megadetector/detection/pytorch_detector.py,sha256=StOnaspDBkMeePiTyq5ZEcFUDBEddq36nigHXbF-zAQ,14029
141
+ megadetector/detection/run_detector.py,sha256=vEfq3jJTseD0sIM9MaIhbeEVqP6JoLXOC2cl8Dhehxs,30553
142
+ megadetector/detection/run_detector_batch.py,sha256=d0fayCVXzKxa1tCiw6D8kmDqcwOAIuvrgw_Zfw0eRjE,57304
143
+ megadetector/detection/run_inference_with_yolov5_val.py,sha256=yjNm130qntOyJ4jbetdt5xDHWnSmBXRydyxB2I56XjM,49099
143
144
  megadetector/detection/run_tiled_inference.py,sha256=vw0713eNuMiEOjHfweQl58zPHNxPOMdFWZ8bTDLhlMY,37883
144
- megadetector/detection/tf_detector.py,sha256=-vcBuYRRLKumUj6imcDYgCgClGji0a21uMjoUAtY3yw,8104
145
- megadetector/detection/video_utils.py,sha256=DNEW10EgWN5ZiJg9uRpxxfJLuBX9Ts2l0eJ66F9kmmw,32474
145
+ megadetector/detection/tf_detector.py,sha256=5V94a0gR6WmGPacKm59hl1eYEZI8cG04frF4EvHrmzU,8285
146
+ megadetector/detection/video_utils.py,sha256=MzD8aUgdyAD9xK9w2fK0lvZtDhjpeHDkftvRXk3YJVA,41399
146
147
  megadetector/detection/detector_training/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
147
148
  megadetector/detection/detector_training/model_main_tf2.py,sha256=YwNsZ7hkIFaEuwKU0rHG_VyqiR_0E01BbdlD0Yx4Smo,4936
148
149
  megadetector/postprocessing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -153,49 +154,50 @@ megadetector/postprocessing/combine_api_outputs.py,sha256=xCJHEKca8YW-mupEr0yNNw
153
154
  megadetector/postprocessing/compare_batch_results.py,sha256=7O5c6-JsIDpuIGobks_R9j8MPuiZQRnEtNnJQsJqICM,38918
154
155
  megadetector/postprocessing/convert_output_format.py,sha256=HwThfK76UPEAGa3KQbJ_tMKIrUvJ3JhKoQVWJt9dPBk,15447
155
156
  megadetector/postprocessing/load_api_results.py,sha256=FqcaiPMuqTojZOV3Jn14pJESpuwjWGbZtcvJuVXUaDM,6861
156
- megadetector/postprocessing/md_to_coco.py,sha256=t8zHN3QmwxuvcQKxLd_yMSjwncxy7YEoq2EGr0kwBDs,11049
157
+ megadetector/postprocessing/md_to_coco.py,sha256=x3sUnOLd2lVfdG2zRN7k-oUvx6rvRD7DWmWJymPc108,12359
157
158
  megadetector/postprocessing/md_to_labelme.py,sha256=hejMKVxaz_xdtsGDPTQkeWuis7gzT-VOrL2Qf8ym1x0,11703
158
159
  megadetector/postprocessing/merge_detections.py,sha256=AEMgMivhph1vph_t_Qv85d9iHynT2nvq7otN4KGrDLU,17776
159
- megadetector/postprocessing/postprocess_batch_results.py,sha256=ucFW2WsuoxIgEC62CrgOLCOTO3LxIZ-LPCYRJ9xjais,78178
160
+ megadetector/postprocessing/postprocess_batch_results.py,sha256=xa1FCQnzo1B6Inq8EWqS_In5xDu3qNzES_YdZ0INKr0,78978
160
161
  megadetector/postprocessing/remap_detection_categories.py,sha256=d9IYTa0i_KbbrarJc_mczABmdwypscl5-KpK8Hx_z8o,6640
161
162
  megadetector/postprocessing/render_detection_confusion_matrix.py,sha256=_wsk4W0PbNiqmFuHy-EA0Z07B1tQLMsdCTPatnHAdZw,27382
162
163
  megadetector/postprocessing/separate_detections_into_folders.py,sha256=k42gxnL8hbBiV0e2T-jmFrhxzIxnhi57Nx9cDSSL5s0,31218
163
- megadetector/postprocessing/subset_json_detector_output.py,sha256=TIXIWEv0nh4cXvhMLcM_ZryM5ly1NOTkWopM2RjEpqQ,26822
164
+ megadetector/postprocessing/subset_json_detector_output.py,sha256=PDgb6cnsFm9d4E7_sMVIguLIU7s79uFQa2CRCxAO0F4,27064
164
165
  megadetector/postprocessing/top_folders_to_bottom.py,sha256=Dqk-KZXiRlIYlmLZmk6aUapmaaLJUKOf8wK1kxt9W6A,6283
165
166
  megadetector/postprocessing/repeat_detection_elimination/find_repeat_detections.py,sha256=e4Y9CyMyd-bLN3il8tu76vI0nVYHZlhZr6vcL0J4zQ0,9832
166
167
  megadetector/postprocessing/repeat_detection_elimination/remove_repeat_detections.py,sha256=tARPxuY0OyQgpKU2XqiQPko3f-hHnWuISB8ZlZgXwxI,2819
167
- megadetector/postprocessing/repeat_detection_elimination/repeat_detections_core.py,sha256=_RX0Gtb8YQPYdfQDGIvg1RvyqsdyanmEg1pqVmheHlg,67776
168
+ megadetector/postprocessing/repeat_detection_elimination/repeat_detections_core.py,sha256=vEmWLSSv0_rxDwhjz_S9YaKZ_LM2tADTz2JYb_zUCnc,67923
168
169
  megadetector/taxonomy_mapping/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
169
170
  megadetector/taxonomy_mapping/map_lila_taxonomy_to_wi_taxonomy.py,sha256=6D_YHTeWTs6O8S9ABog2t9-wfQSh9dW2k9XTqXUZKfo,17927
170
- megadetector/taxonomy_mapping/map_new_lila_datasets.py,sha256=M-hRnQuqh5QhW-7LmTvYRex1Y2izQFSgEzb92gqqx1M,4062
171
- megadetector/taxonomy_mapping/prepare_lila_taxonomy_release.py,sha256=N9TUgg3_2u4hc5OBRydvEpweC3RIJ9ry5bXoi1BXLAY,4676
172
- megadetector/taxonomy_mapping/preview_lila_taxonomy.py,sha256=3e-e3rurissksHgRCrc-_oLJSy2KCxcvwtAQM7L2E1U,19549
171
+ megadetector/taxonomy_mapping/map_new_lila_datasets.py,sha256=FSJ6ygpADtlYLf5Bhp9kMb5km2-MH0mmM_ccyStxo34,4054
172
+ megadetector/taxonomy_mapping/prepare_lila_taxonomy_release.py,sha256=sRCTgaY84FiGoTtK5LOHL5dhpSrEk9zZGkUR1w9FNm4,4694
173
+ megadetector/taxonomy_mapping/preview_lila_taxonomy.py,sha256=qCOyhrgddFZOYBCamfIDKdMMQuIMdGhSrd7ovLz1Yuo,19549
173
174
  megadetector/taxonomy_mapping/retrieve_sample_image.py,sha256=4cfWsLRwS_EwAmQr2p5tA_W6glBK71tSjPfaHxUZQWs,1979
174
- megadetector/taxonomy_mapping/simple_image_download.py,sha256=_1dEGn4356mdQAy9yzkH5DntPO7-nQyYo2zm08ODpJc,6852
175
- megadetector/taxonomy_mapping/species_lookup.py,sha256=B5arfF1OVICtTokVOtJcN8W2SxGmq46AO0SfA11Upt8,28291
175
+ megadetector/taxonomy_mapping/simple_image_download.py,sha256=wLhyMSocX_JhDGA6yLbEfpysz8MMI8YFJWaxyA-GZ9c,6932
176
+ megadetector/taxonomy_mapping/species_lookup.py,sha256=HZ7fyhap9CNdhdmq-id8dMnIa9TPMA3557rsamAkWkU,28329
176
177
  megadetector/taxonomy_mapping/taxonomy_csv_checker.py,sha256=A_zPwzY-ERz6xawxgk2Tpfsycl-1sDcjUiuaXXBppi8,4850
177
178
  megadetector/taxonomy_mapping/taxonomy_graph.py,sha256=ayrTFseVaIMbtMXhnjWCkZdxI5SAVe_BUtnanGewQpU,12263
178
179
  megadetector/taxonomy_mapping/validate_lila_category_mappings.py,sha256=1qyZr23bvZSVUYLQnO1XAtIZ4jdpARA5dxt8euKVyOA,2527
179
180
  megadetector/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
180
181
  megadetector/utils/azure_utils.py,sha256=0BdnkG2hW-X0yFpsJqmBhOd2wysz_LvhuyImPJMVPJs,6271
181
- megadetector/utils/ct_utils.py,sha256=RTMc0UszYuW9QpMo-qetaWder1mFWIzkMLL2UM6PYdY,17960
182
+ megadetector/utils/ct_utils.py,sha256=1LXAjnzeeFeQqp59cWn3Nxt5OQk3t2DfO5wQ30flA5E,19441
182
183
  megadetector/utils/directory_listing.py,sha256=r4rg2xA4O9ZVxVtzPZzXIXa0DOEukAJMTTNcNSiQcuM,9668
183
- megadetector/utils/md_tests.py,sha256=mtjxQ_dQYBgo8OOVta5JrPhuDMT1ZNh0S-OQXRocBEM,51236
184
- megadetector/utils/path_utils.py,sha256=Uj_aNvA_P0buq-3ebQLZz-6to8mNO5JyBhD7n1-pUoU,37149
184
+ megadetector/utils/md_tests.py,sha256=n_5PkeUComn8pbvN-sLS4XdNsNPnvz8jk6DhrCcm9PU,58225
185
+ megadetector/utils/path_utils.py,sha256=o68jfPDaLj3NizipVCQEnmB5GfPHpMOLUmQWamYM4w0,37165
185
186
  megadetector/utils/process_utils.py,sha256=2SdFVxqob-YUW2BTjUEavNuRH3jA4V05fbKMtrVSd3c,5635
186
187
  megadetector/utils/sas_blob_utils.py,sha256=k76EcMmJc_otrEHcfV2fxAC6fNhxU88FxM3ddSYrsKU,16917
187
188
  megadetector/utils/split_locations_into_train_val.py,sha256=jvaDu1xKB51L3Xq2nXQo0XtXRjNRf8RglBApl1g6gHo,10101
188
189
  megadetector/utils/string_utils.py,sha256=ZQapJodzvTDyQhjZgMoMl3-9bqnKAUlORpws8Db9AkA,2050
189
- megadetector/utils/url_utils.py,sha256=uJRsSMxA1zMd997dX3V3wqFnKMTcSiOaE_atXUTRRVI,11476
190
+ megadetector/utils/torch_test.py,sha256=aEYE-1vGt5PujD0bHAVRTJiLrKFlGWpS8zeYhqEYZLY,853
191
+ megadetector/utils/url_utils.py,sha256=yybWwJ-vl2A6Fci66i-xt_dl3Uqh72Ylnb8XOT2Grog,14835
190
192
  megadetector/utils/write_html_image_list.py,sha256=apzoWkgZWG-ybCT4k92PlS4-guN_sNBSMMMbj7Cfm1k,8638
191
193
  megadetector/visualization/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
192
194
  megadetector/visualization/plot_utils.py,sha256=lOfU3uPrcuHZagV_1SN8erT8PujIepocgw6KZ17Ej6c,10671
193
195
  megadetector/visualization/render_images_with_thumbnails.py,sha256=kgJYW8BsqRO4C7T3sqItdBuSkZ64I1vOtIWAsVG4XBI,10589
194
- megadetector/visualization/visualization_utils.py,sha256=jWiXlLpmWh_CH2vApZURclOC7fdip1aKWQ66wuNabyA,62369
195
- megadetector/visualization/visualize_db.py,sha256=3FhOtn3GHvNsomwSpsSEzYe58lF9B4Ob3MEi_xplrdo,21256
196
+ megadetector/visualization/visualization_utils.py,sha256=J53VsI8aQmzzBBeu-msm8c-qC6pm_HCMkMKYvnylqjo,63083
197
+ megadetector/visualization/visualize_db.py,sha256=x9jScwG-3V-mZGy5cB1s85KWbiAIfvgVUcLqUplHxGA,22110
196
198
  megadetector/visualization/visualize_detector_output.py,sha256=LY8QgDWpWlXVLZJUskvT29CdkNvIlEsFTk4DC_lS6pk,17052
197
- megadetector-5.0.15.dist-info/LICENSE,sha256=RMa3qq-7Cyk7DdtqRj_bP1oInGFgjyHn9-PZ3PcrqIs,1100
198
- megadetector-5.0.15.dist-info/METADATA,sha256=E3ZvrTIEHvAVmogrd9wXYmYgvHBo9YsFmIb6-I9uEb0,7893
199
- megadetector-5.0.15.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
200
- megadetector-5.0.15.dist-info/top_level.txt,sha256=wf9DXa8EwiOSZ4G5IPjakSxBPxTDjhYYnqWRfR-zS4M,13
201
- megadetector-5.0.15.dist-info/RECORD,,
199
+ megadetector-5.0.17.dist-info/LICENSE,sha256=RMa3qq-7Cyk7DdtqRj_bP1oInGFgjyHn9-PZ3PcrqIs,1100
200
+ megadetector-5.0.17.dist-info/METADATA,sha256=gGtsuo3rHyxfYxhvxEDnP9RZfFKL2_jDvuIR51KlwqQ,7460
201
+ megadetector-5.0.17.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91
202
+ megadetector-5.0.17.dist-info/top_level.txt,sha256=wf9DXa8EwiOSZ4G5IPjakSxBPxTDjhYYnqWRfR-zS4M,13
203
+ megadetector-5.0.17.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.43.0)
2
+ Generator: setuptools (74.1.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5