megadetector 5.0.11__py3-none-any.whl → 5.0.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of megadetector might be problematic. Click here for more details.
- megadetector/api/__init__.py +0 -0
- megadetector/api/batch_processing/__init__.py +0 -0
- megadetector/api/batch_processing/api_core/__init__.py +0 -0
- megadetector/api/batch_processing/api_core/batch_service/__init__.py +0 -0
- megadetector/api/batch_processing/api_core/batch_service/score.py +439 -0
- megadetector/api/batch_processing/api_core/server.py +294 -0
- megadetector/api/batch_processing/api_core/server_api_config.py +97 -0
- megadetector/api/batch_processing/api_core/server_app_config.py +55 -0
- megadetector/api/batch_processing/api_core/server_batch_job_manager.py +220 -0
- megadetector/api/batch_processing/api_core/server_job_status_table.py +149 -0
- megadetector/api/batch_processing/api_core/server_orchestration.py +360 -0
- megadetector/api/batch_processing/api_core/server_utils.py +88 -0
- megadetector/api/batch_processing/api_core_support/__init__.py +0 -0
- megadetector/api/batch_processing/api_core_support/aggregate_results_manually.py +46 -0
- megadetector/api/batch_processing/api_support/__init__.py +0 -0
- megadetector/api/batch_processing/api_support/summarize_daily_activity.py +152 -0
- megadetector/api/batch_processing/data_preparation/__init__.py +0 -0
- megadetector/api/batch_processing/integration/digiKam/setup.py +6 -0
- megadetector/api/batch_processing/integration/digiKam/xmp_integration.py +465 -0
- megadetector/api/batch_processing/integration/eMammal/test_scripts/config_template.py +5 -0
- megadetector/api/batch_processing/integration/eMammal/test_scripts/push_annotations_to_emammal.py +125 -0
- megadetector/api/batch_processing/integration/eMammal/test_scripts/select_images_for_testing.py +55 -0
- megadetector/api/synchronous/__init__.py +0 -0
- megadetector/api/synchronous/api_core/animal_detection_api/__init__.py +0 -0
- megadetector/api/synchronous/api_core/animal_detection_api/api_backend.py +152 -0
- megadetector/api/synchronous/api_core/animal_detection_api/api_frontend.py +263 -0
- megadetector/api/synchronous/api_core/animal_detection_api/config.py +35 -0
- megadetector/api/synchronous/api_core/tests/__init__.py +0 -0
- megadetector/api/synchronous/api_core/tests/load_test.py +110 -0
- megadetector/classification/__init__.py +0 -0
- megadetector/classification/aggregate_classifier_probs.py +108 -0
- megadetector/classification/analyze_failed_images.py +227 -0
- megadetector/classification/cache_batchapi_outputs.py +198 -0
- megadetector/classification/create_classification_dataset.py +627 -0
- megadetector/classification/crop_detections.py +516 -0
- megadetector/classification/csv_to_json.py +226 -0
- megadetector/classification/detect_and_crop.py +855 -0
- megadetector/classification/efficientnet/__init__.py +9 -0
- megadetector/classification/efficientnet/model.py +415 -0
- megadetector/classification/efficientnet/utils.py +607 -0
- megadetector/classification/evaluate_model.py +520 -0
- megadetector/classification/identify_mislabeled_candidates.py +152 -0
- megadetector/classification/json_to_azcopy_list.py +63 -0
- megadetector/classification/json_validator.py +699 -0
- megadetector/classification/map_classification_categories.py +276 -0
- megadetector/classification/merge_classification_detection_output.py +506 -0
- megadetector/classification/prepare_classification_script.py +194 -0
- megadetector/classification/prepare_classification_script_mc.py +228 -0
- megadetector/classification/run_classifier.py +287 -0
- megadetector/classification/save_mislabeled.py +110 -0
- megadetector/classification/train_classifier.py +827 -0
- megadetector/classification/train_classifier_tf.py +725 -0
- megadetector/classification/train_utils.py +323 -0
- megadetector/data_management/__init__.py +0 -0
- megadetector/data_management/annotations/__init__.py +0 -0
- megadetector/data_management/annotations/annotation_constants.py +34 -0
- megadetector/data_management/camtrap_dp_to_coco.py +237 -0
- megadetector/data_management/cct_json_utils.py +404 -0
- megadetector/data_management/cct_to_md.py +176 -0
- megadetector/data_management/cct_to_wi.py +289 -0
- megadetector/data_management/coco_to_labelme.py +283 -0
- megadetector/data_management/coco_to_yolo.py +662 -0
- megadetector/data_management/databases/__init__.py +0 -0
- megadetector/data_management/databases/add_width_and_height_to_db.py +33 -0
- megadetector/data_management/databases/combine_coco_camera_traps_files.py +206 -0
- megadetector/data_management/databases/integrity_check_json_db.py +493 -0
- megadetector/data_management/databases/subset_json_db.py +115 -0
- megadetector/data_management/generate_crops_from_cct.py +149 -0
- megadetector/data_management/get_image_sizes.py +189 -0
- megadetector/data_management/importers/add_nacti_sizes.py +52 -0
- megadetector/data_management/importers/add_timestamps_to_icct.py +79 -0
- megadetector/data_management/importers/animl_results_to_md_results.py +158 -0
- megadetector/data_management/importers/auckland_doc_test_to_json.py +373 -0
- megadetector/data_management/importers/auckland_doc_to_json.py +201 -0
- megadetector/data_management/importers/awc_to_json.py +191 -0
- megadetector/data_management/importers/bellevue_to_json.py +273 -0
- megadetector/data_management/importers/cacophony-thermal-importer.py +793 -0
- megadetector/data_management/importers/carrizo_shrubfree_2018.py +269 -0
- megadetector/data_management/importers/carrizo_trail_cam_2017.py +289 -0
- megadetector/data_management/importers/cct_field_adjustments.py +58 -0
- megadetector/data_management/importers/channel_islands_to_cct.py +913 -0
- megadetector/data_management/importers/eMammal/copy_and_unzip_emammal.py +180 -0
- megadetector/data_management/importers/eMammal/eMammal_helpers.py +249 -0
- megadetector/data_management/importers/eMammal/make_eMammal_json.py +223 -0
- megadetector/data_management/importers/ena24_to_json.py +276 -0
- megadetector/data_management/importers/filenames_to_json.py +386 -0
- megadetector/data_management/importers/helena_to_cct.py +283 -0
- megadetector/data_management/importers/idaho-camera-traps.py +1407 -0
- megadetector/data_management/importers/idfg_iwildcam_lila_prep.py +294 -0
- megadetector/data_management/importers/jb_csv_to_json.py +150 -0
- megadetector/data_management/importers/mcgill_to_json.py +250 -0
- megadetector/data_management/importers/missouri_to_json.py +490 -0
- megadetector/data_management/importers/nacti_fieldname_adjustments.py +79 -0
- megadetector/data_management/importers/noaa_seals_2019.py +181 -0
- megadetector/data_management/importers/pc_to_json.py +365 -0
- megadetector/data_management/importers/plot_wni_giraffes.py +123 -0
- megadetector/data_management/importers/prepare-noaa-fish-data-for-lila.py +359 -0
- megadetector/data_management/importers/prepare_zsl_imerit.py +131 -0
- megadetector/data_management/importers/rspb_to_json.py +356 -0
- megadetector/data_management/importers/save_the_elephants_survey_A.py +320 -0
- megadetector/data_management/importers/save_the_elephants_survey_B.py +329 -0
- megadetector/data_management/importers/snapshot_safari_importer.py +758 -0
- megadetector/data_management/importers/snapshot_safari_importer_reprise.py +665 -0
- megadetector/data_management/importers/snapshot_serengeti_lila.py +1067 -0
- megadetector/data_management/importers/snapshotserengeti/make_full_SS_json.py +150 -0
- megadetector/data_management/importers/snapshotserengeti/make_per_season_SS_json.py +153 -0
- megadetector/data_management/importers/sulross_get_exif.py +65 -0
- megadetector/data_management/importers/timelapse_csv_set_to_json.py +490 -0
- megadetector/data_management/importers/ubc_to_json.py +399 -0
- megadetector/data_management/importers/umn_to_json.py +507 -0
- megadetector/data_management/importers/wellington_to_json.py +263 -0
- megadetector/data_management/importers/wi_to_json.py +442 -0
- megadetector/data_management/importers/zamba_results_to_md_results.py +181 -0
- megadetector/data_management/labelme_to_coco.py +547 -0
- megadetector/data_management/labelme_to_yolo.py +272 -0
- megadetector/data_management/lila/__init__.py +0 -0
- megadetector/data_management/lila/add_locations_to_island_camera_traps.py +97 -0
- megadetector/data_management/lila/add_locations_to_nacti.py +147 -0
- megadetector/data_management/lila/create_lila_blank_set.py +558 -0
- megadetector/data_management/lila/create_lila_test_set.py +152 -0
- megadetector/data_management/lila/create_links_to_md_results_files.py +106 -0
- megadetector/data_management/lila/download_lila_subset.py +178 -0
- megadetector/data_management/lila/generate_lila_per_image_labels.py +516 -0
- megadetector/data_management/lila/get_lila_annotation_counts.py +170 -0
- megadetector/data_management/lila/get_lila_image_counts.py +112 -0
- megadetector/data_management/lila/lila_common.py +300 -0
- megadetector/data_management/lila/test_lila_metadata_urls.py +132 -0
- megadetector/data_management/ocr_tools.py +870 -0
- megadetector/data_management/read_exif.py +809 -0
- megadetector/data_management/remap_coco_categories.py +84 -0
- megadetector/data_management/remove_exif.py +66 -0
- megadetector/data_management/rename_images.py +187 -0
- megadetector/data_management/resize_coco_dataset.py +189 -0
- megadetector/data_management/wi_download_csv_to_coco.py +247 -0
- megadetector/data_management/yolo_output_to_md_output.py +446 -0
- megadetector/data_management/yolo_to_coco.py +676 -0
- megadetector/detection/__init__.py +0 -0
- megadetector/detection/detector_training/__init__.py +0 -0
- megadetector/detection/detector_training/model_main_tf2.py +114 -0
- megadetector/detection/process_video.py +846 -0
- megadetector/detection/pytorch_detector.py +355 -0
- megadetector/detection/run_detector.py +779 -0
- megadetector/detection/run_detector_batch.py +1219 -0
- megadetector/detection/run_inference_with_yolov5_val.py +1087 -0
- megadetector/detection/run_tiled_inference.py +934 -0
- megadetector/detection/tf_detector.py +192 -0
- megadetector/detection/video_utils.py +698 -0
- megadetector/postprocessing/__init__.py +0 -0
- megadetector/postprocessing/add_max_conf.py +64 -0
- megadetector/postprocessing/categorize_detections_by_size.py +165 -0
- megadetector/postprocessing/classification_postprocessing.py +716 -0
- megadetector/postprocessing/combine_api_outputs.py +249 -0
- megadetector/postprocessing/compare_batch_results.py +966 -0
- megadetector/postprocessing/convert_output_format.py +396 -0
- megadetector/postprocessing/load_api_results.py +195 -0
- megadetector/postprocessing/md_to_coco.py +310 -0
- megadetector/postprocessing/md_to_labelme.py +330 -0
- megadetector/postprocessing/merge_detections.py +412 -0
- megadetector/postprocessing/postprocess_batch_results.py +1908 -0
- megadetector/postprocessing/remap_detection_categories.py +170 -0
- megadetector/postprocessing/render_detection_confusion_matrix.py +660 -0
- megadetector/postprocessing/repeat_detection_elimination/find_repeat_detections.py +211 -0
- megadetector/postprocessing/repeat_detection_elimination/remove_repeat_detections.py +83 -0
- megadetector/postprocessing/repeat_detection_elimination/repeat_detections_core.py +1635 -0
- megadetector/postprocessing/separate_detections_into_folders.py +730 -0
- megadetector/postprocessing/subset_json_detector_output.py +700 -0
- megadetector/postprocessing/top_folders_to_bottom.py +223 -0
- megadetector/taxonomy_mapping/__init__.py +0 -0
- megadetector/taxonomy_mapping/map_lila_taxonomy_to_wi_taxonomy.py +491 -0
- megadetector/taxonomy_mapping/map_new_lila_datasets.py +150 -0
- megadetector/taxonomy_mapping/prepare_lila_taxonomy_release.py +142 -0
- megadetector/taxonomy_mapping/preview_lila_taxonomy.py +588 -0
- megadetector/taxonomy_mapping/retrieve_sample_image.py +71 -0
- megadetector/taxonomy_mapping/simple_image_download.py +219 -0
- megadetector/taxonomy_mapping/species_lookup.py +834 -0
- megadetector/taxonomy_mapping/taxonomy_csv_checker.py +159 -0
- megadetector/taxonomy_mapping/taxonomy_graph.py +346 -0
- megadetector/taxonomy_mapping/validate_lila_category_mappings.py +83 -0
- megadetector/utils/__init__.py +0 -0
- megadetector/utils/azure_utils.py +178 -0
- megadetector/utils/ct_utils.py +613 -0
- megadetector/utils/directory_listing.py +246 -0
- megadetector/utils/md_tests.py +1164 -0
- megadetector/utils/path_utils.py +1045 -0
- megadetector/utils/process_utils.py +160 -0
- megadetector/utils/sas_blob_utils.py +509 -0
- megadetector/utils/split_locations_into_train_val.py +228 -0
- megadetector/utils/string_utils.py +92 -0
- megadetector/utils/url_utils.py +323 -0
- megadetector/utils/write_html_image_list.py +225 -0
- megadetector/visualization/__init__.py +0 -0
- megadetector/visualization/plot_utils.py +293 -0
- megadetector/visualization/render_images_with_thumbnails.py +275 -0
- megadetector/visualization/visualization_utils.py +1536 -0
- megadetector/visualization/visualize_db.py +552 -0
- megadetector/visualization/visualize_detector_output.py +405 -0
- {megadetector-5.0.11.dist-info → megadetector-5.0.13.dist-info}/LICENSE +0 -0
- {megadetector-5.0.11.dist-info → megadetector-5.0.13.dist-info}/METADATA +2 -2
- megadetector-5.0.13.dist-info/RECORD +201 -0
- megadetector-5.0.13.dist-info/top_level.txt +1 -0
- megadetector-5.0.11.dist-info/RECORD +0 -5
- megadetector-5.0.11.dist-info/top_level.txt +0 -1
- {megadetector-5.0.11.dist-info → megadetector-5.0.13.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
"""
|
|
2
|
+
|
|
3
|
+
simple_image_download.py
|
|
4
|
+
|
|
5
|
+
Web image downloader, used in preview_lila_taxonomy.py
|
|
6
|
+
|
|
7
|
+
Slightly modified from:
|
|
8
|
+
|
|
9
|
+
https://github.com/RiddlerQ/simple_image_download
|
|
10
|
+
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
#%% Imports
|
|
14
|
+
|
|
15
|
+
import os
|
|
16
|
+
import urllib
|
|
17
|
+
import requests
|
|
18
|
+
import magic
|
|
19
|
+
import random
|
|
20
|
+
|
|
21
|
+
from urllib.parse import quote
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
#%% Constants
|
|
25
|
+
|
|
26
|
+
BASE_URL = 'https://www.google.com/search?q='
|
|
27
|
+
GOOGLE_PICTURE_ID = '''&biw=1536&bih=674&tbm=isch&sxsrf=ACYBGNSXXpS6YmAKUiLKKBs6xWb4uUY5gA:1581168823770&source=lnms&sa=X&ved=0ahUKEwioj8jwiMLnAhW9AhAIHbXTBMMQ_AUI3QUoAQ'''
|
|
28
|
+
HEADERS = {
|
|
29
|
+
'User-Agent':
|
|
30
|
+
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36"
|
|
31
|
+
}
|
|
32
|
+
SCANNER_COUNTER = None
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
#%% Support functions
|
|
36
|
+
|
|
37
|
+
def generate_search_url(keywords):
|
|
38
|
+
keywords_to_search = [str(item).strip() for item in keywords.split(',')][0].split()
|
|
39
|
+
keywords_count = len(keywords_to_search)
|
|
40
|
+
return keywords_to_search, keywords_count
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def generate_urls(search):
|
|
44
|
+
"""
|
|
45
|
+
Generate Google search URLs for all tokens in the list [search]
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
return [(BASE_URL+quote(word)+GOOGLE_PICTURE_ID) for word in search]
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def check_webpage(url):
|
|
52
|
+
checked_url = None
|
|
53
|
+
try:
|
|
54
|
+
request = requests.get(url, allow_redirects=True, timeout=10)
|
|
55
|
+
if 'html' not in str(request.content):
|
|
56
|
+
checked_url = request
|
|
57
|
+
except Exception as err:
|
|
58
|
+
print(err)
|
|
59
|
+
return checked_url
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def scan_webpage(webpage, extensions, timer):
|
|
63
|
+
"""
|
|
64
|
+
Scan for pictures to download based on keywords
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
global SCANNER_COUNTER
|
|
68
|
+
scanner = webpage.find
|
|
69
|
+
found = False
|
|
70
|
+
counter = 0
|
|
71
|
+
while counter < timer:
|
|
72
|
+
new_line = scanner('"https://', SCANNER_COUNTER + 1) # How Many New lines
|
|
73
|
+
SCANNER_COUNTER = scanner('"', new_line + 1) # Ends of line
|
|
74
|
+
buffer = scanner('\\', new_line + 1, SCANNER_COUNTER)
|
|
75
|
+
if buffer != -1:
|
|
76
|
+
object_raw = webpage[new_line + 1:buffer]
|
|
77
|
+
else:
|
|
78
|
+
object_raw = webpage[new_line + 1:SCANNER_COUNTER]
|
|
79
|
+
if any(extension in object_raw for extension in extensions):
|
|
80
|
+
found = True
|
|
81
|
+
break
|
|
82
|
+
counter += 1
|
|
83
|
+
if found:
|
|
84
|
+
object_ready = check_webpage(object_raw)
|
|
85
|
+
return object_ready
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
#%% Main class
|
|
89
|
+
|
|
90
|
+
class Downloader:
|
|
91
|
+
"""
|
|
92
|
+
Main Downloader
|
|
93
|
+
::param extension:iterable of Files extensions
|
|
94
|
+
"""
|
|
95
|
+
def __init__(self, extensions=None):
|
|
96
|
+
if extensions:
|
|
97
|
+
self._extensions = set(*[extensions])
|
|
98
|
+
else:
|
|
99
|
+
self._extensions = {'.jpg', '.png', '.ico', '.gif', '.jpeg'}
|
|
100
|
+
self._directory = "simple_images/"
|
|
101
|
+
self.get_dirs = set()
|
|
102
|
+
self._cached_urls = {}
|
|
103
|
+
|
|
104
|
+
@property
|
|
105
|
+
def directory(self):
|
|
106
|
+
return self._directory
|
|
107
|
+
|
|
108
|
+
@directory.setter
|
|
109
|
+
def directory(self, value):
|
|
110
|
+
self._directory = value
|
|
111
|
+
|
|
112
|
+
@property
|
|
113
|
+
def cached_urls(self):
|
|
114
|
+
return self._cached_urls
|
|
115
|
+
|
|
116
|
+
@property
|
|
117
|
+
def extensions(self):
|
|
118
|
+
return self._extensions
|
|
119
|
+
|
|
120
|
+
@extensions.setter
|
|
121
|
+
def extensions(self, value):
|
|
122
|
+
self._extensions = set([value])
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def get_urls(self):
|
|
126
|
+
return [self._cached_urls[url][1].url
|
|
127
|
+
for url in self._cached_urls]
|
|
128
|
+
|
|
129
|
+
def _download_page(self, url):
|
|
130
|
+
req = urllib.request.Request(url, headers=HEADERS)
|
|
131
|
+
resp = urllib.request.urlopen(req)
|
|
132
|
+
resp_data = str(resp.read())
|
|
133
|
+
return resp_data
|
|
134
|
+
|
|
135
|
+
def search_urls(self, keywords, limit=1, verbose=False, cache=True, timer=None):
|
|
136
|
+
cache_out = {}
|
|
137
|
+
search, count = generate_search_url(keywords)
|
|
138
|
+
urls_ = generate_urls(search)
|
|
139
|
+
timer = timer if timer else 1000
|
|
140
|
+
# max_progressbar = count * (list(range(limit+1))[-1]+1)
|
|
141
|
+
|
|
142
|
+
# bar = progressbar.ProgressBar(maxval=max_progressbar,
|
|
143
|
+
# widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()]).start()
|
|
144
|
+
i = 0
|
|
145
|
+
while i < count:
|
|
146
|
+
global SCANNER_COUNTER
|
|
147
|
+
SCANNER_COUNTER = -1
|
|
148
|
+
url = urls_[i]
|
|
149
|
+
path = self.generate_dir(search[i])
|
|
150
|
+
raw_html = self._download_page(url) # Download the entire page from the google Picture search
|
|
151
|
+
for _ in range(limit+1):
|
|
152
|
+
webpage_url = scan_webpage(raw_html, self._extensions, timer)
|
|
153
|
+
if webpage_url:
|
|
154
|
+
file_name = Downloader.gen_fn(webpage_url, search[i])
|
|
155
|
+
cache_out[file_name] = [path, webpage_url]
|
|
156
|
+
else:
|
|
157
|
+
pass
|
|
158
|
+
# bar.update(bar.currval + 1)
|
|
159
|
+
i += 1
|
|
160
|
+
# bar.finish()
|
|
161
|
+
if verbose:
|
|
162
|
+
for url in cache_out:
|
|
163
|
+
print(url)
|
|
164
|
+
if cache:
|
|
165
|
+
self._cached_urls = cache_out
|
|
166
|
+
if not cache_out:
|
|
167
|
+
print('==='*15 + ' < ' + 'NO PICTURES FOUND' + ' > ' + '==='*15)
|
|
168
|
+
return cache_out
|
|
169
|
+
|
|
170
|
+
def download(self, keywords=None, limit=1, verbose=False, cache=True, download_cache=False,
|
|
171
|
+
timer=None):
|
|
172
|
+
if not download_cache:
|
|
173
|
+
content = self.search_urls(keywords, limit, verbose, cache, timer)
|
|
174
|
+
else:
|
|
175
|
+
content = self._cached_urls
|
|
176
|
+
if not content:
|
|
177
|
+
print('Downloader has not URLs saved in Memory yet, run Downloader.search_urls to find pics first')
|
|
178
|
+
paths = []
|
|
179
|
+
for name, (path, url) in content.items():
|
|
180
|
+
fullpath = os.path.join(path, name)
|
|
181
|
+
paths.append(fullpath)
|
|
182
|
+
with open(fullpath, 'wb') as file:
|
|
183
|
+
file.write(url.content)
|
|
184
|
+
if verbose:
|
|
185
|
+
print(f'File Name={name}, Downloaded from {url.url}')
|
|
186
|
+
return paths
|
|
187
|
+
|
|
188
|
+
def _create_directories(self, name):
|
|
189
|
+
dir_path = os.path.join(self._directory, name)
|
|
190
|
+
try:
|
|
191
|
+
if not os.path.exists(dir_path):
|
|
192
|
+
os.makedirs(dir_path)
|
|
193
|
+
except OSError:
|
|
194
|
+
raise
|
|
195
|
+
self.get_dirs.update([name])
|
|
196
|
+
return
|
|
197
|
+
|
|
198
|
+
def generate_dir(self, dir_name):
|
|
199
|
+
"""Generate Path and Directory, also check if Directory exists or not """
|
|
200
|
+
dir_name = dir_name.replace(" ", "_")
|
|
201
|
+
if dir_name in self.get_dirs:
|
|
202
|
+
pass
|
|
203
|
+
else:
|
|
204
|
+
self._create_directories(dir_name)
|
|
205
|
+
return os.path.join(self._directory,dir_name)
|
|
206
|
+
|
|
207
|
+
@staticmethod
|
|
208
|
+
def gen_fn(check, name):
|
|
209
|
+
"""Create a file name string and generate a random identifiers otherwise won't import same pic twice"""
|
|
210
|
+
id = str(hex(random.randrange(1000)))
|
|
211
|
+
mime = magic.Magic(mime=True)
|
|
212
|
+
file_type = mime.from_buffer(check.content)
|
|
213
|
+
file_extension = f'.{file_type.split("/")[1]}'
|
|
214
|
+
file_name = str(name) + "_" + id[2:] + file_extension
|
|
215
|
+
return file_name
|
|
216
|
+
|
|
217
|
+
def flush_cache(self):
|
|
218
|
+
"""Clear the Downloader instance cache"""
|
|
219
|
+
self._cached_urls = set()
|