megadetector 5.0.9__py3-none-any.whl → 5.0.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of megadetector might be problematic. Click here for more details.
- {megadetector-5.0.9.dist-info → megadetector-5.0.11.dist-info}/LICENSE +0 -0
- {megadetector-5.0.9.dist-info → megadetector-5.0.11.dist-info}/METADATA +12 -11
- megadetector-5.0.11.dist-info/RECORD +5 -0
- megadetector-5.0.11.dist-info/top_level.txt +1 -0
- api/__init__.py +0 -0
- api/batch_processing/__init__.py +0 -0
- api/batch_processing/api_core/__init__.py +0 -0
- api/batch_processing/api_core/batch_service/__init__.py +0 -0
- api/batch_processing/api_core/batch_service/score.py +0 -439
- api/batch_processing/api_core/server.py +0 -294
- api/batch_processing/api_core/server_api_config.py +0 -98
- api/batch_processing/api_core/server_app_config.py +0 -55
- api/batch_processing/api_core/server_batch_job_manager.py +0 -220
- api/batch_processing/api_core/server_job_status_table.py +0 -152
- api/batch_processing/api_core/server_orchestration.py +0 -360
- api/batch_processing/api_core/server_utils.py +0 -92
- api/batch_processing/api_core_support/__init__.py +0 -0
- api/batch_processing/api_core_support/aggregate_results_manually.py +0 -46
- api/batch_processing/api_support/__init__.py +0 -0
- api/batch_processing/api_support/summarize_daily_activity.py +0 -152
- api/batch_processing/data_preparation/__init__.py +0 -0
- api/batch_processing/data_preparation/manage_local_batch.py +0 -2391
- api/batch_processing/data_preparation/manage_video_batch.py +0 -327
- api/batch_processing/integration/digiKam/setup.py +0 -6
- api/batch_processing/integration/digiKam/xmp_integration.py +0 -465
- api/batch_processing/integration/eMammal/test_scripts/config_template.py +0 -5
- api/batch_processing/integration/eMammal/test_scripts/push_annotations_to_emammal.py +0 -126
- api/batch_processing/integration/eMammal/test_scripts/select_images_for_testing.py +0 -55
- api/batch_processing/postprocessing/__init__.py +0 -0
- api/batch_processing/postprocessing/add_max_conf.py +0 -64
- api/batch_processing/postprocessing/categorize_detections_by_size.py +0 -163
- api/batch_processing/postprocessing/combine_api_outputs.py +0 -249
- api/batch_processing/postprocessing/compare_batch_results.py +0 -958
- api/batch_processing/postprocessing/convert_output_format.py +0 -397
- api/batch_processing/postprocessing/load_api_results.py +0 -195
- api/batch_processing/postprocessing/md_to_coco.py +0 -310
- api/batch_processing/postprocessing/md_to_labelme.py +0 -330
- api/batch_processing/postprocessing/merge_detections.py +0 -401
- api/batch_processing/postprocessing/postprocess_batch_results.py +0 -1904
- api/batch_processing/postprocessing/remap_detection_categories.py +0 -170
- api/batch_processing/postprocessing/render_detection_confusion_matrix.py +0 -661
- api/batch_processing/postprocessing/repeat_detection_elimination/find_repeat_detections.py +0 -211
- api/batch_processing/postprocessing/repeat_detection_elimination/remove_repeat_detections.py +0 -82
- api/batch_processing/postprocessing/repeat_detection_elimination/repeat_detections_core.py +0 -1631
- api/batch_processing/postprocessing/separate_detections_into_folders.py +0 -731
- api/batch_processing/postprocessing/subset_json_detector_output.py +0 -696
- api/batch_processing/postprocessing/top_folders_to_bottom.py +0 -223
- api/synchronous/__init__.py +0 -0
- api/synchronous/api_core/animal_detection_api/__init__.py +0 -0
- api/synchronous/api_core/animal_detection_api/api_backend.py +0 -152
- api/synchronous/api_core/animal_detection_api/api_frontend.py +0 -266
- api/synchronous/api_core/animal_detection_api/config.py +0 -35
- api/synchronous/api_core/animal_detection_api/data_management/annotations/annotation_constants.py +0 -47
- api/synchronous/api_core/animal_detection_api/detection/detector_training/copy_checkpoints.py +0 -43
- api/synchronous/api_core/animal_detection_api/detection/detector_training/model_main_tf2.py +0 -114
- api/synchronous/api_core/animal_detection_api/detection/process_video.py +0 -543
- api/synchronous/api_core/animal_detection_api/detection/pytorch_detector.py +0 -304
- api/synchronous/api_core/animal_detection_api/detection/run_detector.py +0 -627
- api/synchronous/api_core/animal_detection_api/detection/run_detector_batch.py +0 -1029
- api/synchronous/api_core/animal_detection_api/detection/run_inference_with_yolov5_val.py +0 -581
- api/synchronous/api_core/animal_detection_api/detection/run_tiled_inference.py +0 -754
- api/synchronous/api_core/animal_detection_api/detection/tf_detector.py +0 -165
- api/synchronous/api_core/animal_detection_api/detection/video_utils.py +0 -495
- api/synchronous/api_core/animal_detection_api/md_utils/azure_utils.py +0 -174
- api/synchronous/api_core/animal_detection_api/md_utils/ct_utils.py +0 -262
- api/synchronous/api_core/animal_detection_api/md_utils/directory_listing.py +0 -251
- api/synchronous/api_core/animal_detection_api/md_utils/matlab_porting_tools.py +0 -97
- api/synchronous/api_core/animal_detection_api/md_utils/path_utils.py +0 -416
- api/synchronous/api_core/animal_detection_api/md_utils/process_utils.py +0 -110
- api/synchronous/api_core/animal_detection_api/md_utils/sas_blob_utils.py +0 -509
- api/synchronous/api_core/animal_detection_api/md_utils/string_utils.py +0 -59
- api/synchronous/api_core/animal_detection_api/md_utils/url_utils.py +0 -144
- api/synchronous/api_core/animal_detection_api/md_utils/write_html_image_list.py +0 -226
- api/synchronous/api_core/animal_detection_api/md_visualization/visualization_utils.py +0 -841
- api/synchronous/api_core/tests/__init__.py +0 -0
- api/synchronous/api_core/tests/load_test.py +0 -110
- classification/__init__.py +0 -0
- classification/aggregate_classifier_probs.py +0 -108
- classification/analyze_failed_images.py +0 -227
- classification/cache_batchapi_outputs.py +0 -198
- classification/create_classification_dataset.py +0 -627
- classification/crop_detections.py +0 -516
- classification/csv_to_json.py +0 -226
- classification/detect_and_crop.py +0 -855
- classification/efficientnet/__init__.py +0 -9
- classification/efficientnet/model.py +0 -415
- classification/efficientnet/utils.py +0 -610
- classification/evaluate_model.py +0 -520
- classification/identify_mislabeled_candidates.py +0 -152
- classification/json_to_azcopy_list.py +0 -63
- classification/json_validator.py +0 -695
- classification/map_classification_categories.py +0 -276
- classification/merge_classification_detection_output.py +0 -506
- classification/prepare_classification_script.py +0 -194
- classification/prepare_classification_script_mc.py +0 -228
- classification/run_classifier.py +0 -286
- classification/save_mislabeled.py +0 -110
- classification/train_classifier.py +0 -825
- classification/train_classifier_tf.py +0 -724
- classification/train_utils.py +0 -322
- data_management/__init__.py +0 -0
- data_management/annotations/__init__.py +0 -0
- data_management/annotations/annotation_constants.py +0 -34
- data_management/camtrap_dp_to_coco.py +0 -238
- data_management/cct_json_utils.py +0 -395
- data_management/cct_to_md.py +0 -176
- data_management/cct_to_wi.py +0 -289
- data_management/coco_to_labelme.py +0 -272
- data_management/coco_to_yolo.py +0 -662
- data_management/databases/__init__.py +0 -0
- data_management/databases/add_width_and_height_to_db.py +0 -33
- data_management/databases/combine_coco_camera_traps_files.py +0 -206
- data_management/databases/integrity_check_json_db.py +0 -477
- data_management/databases/subset_json_db.py +0 -115
- data_management/generate_crops_from_cct.py +0 -149
- data_management/get_image_sizes.py +0 -188
- data_management/importers/add_nacti_sizes.py +0 -52
- data_management/importers/add_timestamps_to_icct.py +0 -79
- data_management/importers/animl_results_to_md_results.py +0 -158
- data_management/importers/auckland_doc_test_to_json.py +0 -372
- data_management/importers/auckland_doc_to_json.py +0 -200
- data_management/importers/awc_to_json.py +0 -189
- data_management/importers/bellevue_to_json.py +0 -273
- data_management/importers/cacophony-thermal-importer.py +0 -796
- data_management/importers/carrizo_shrubfree_2018.py +0 -268
- data_management/importers/carrizo_trail_cam_2017.py +0 -287
- data_management/importers/cct_field_adjustments.py +0 -57
- data_management/importers/channel_islands_to_cct.py +0 -913
- data_management/importers/eMammal/copy_and_unzip_emammal.py +0 -180
- data_management/importers/eMammal/eMammal_helpers.py +0 -249
- data_management/importers/eMammal/make_eMammal_json.py +0 -223
- data_management/importers/ena24_to_json.py +0 -275
- data_management/importers/filenames_to_json.py +0 -385
- data_management/importers/helena_to_cct.py +0 -282
- data_management/importers/idaho-camera-traps.py +0 -1407
- data_management/importers/idfg_iwildcam_lila_prep.py +0 -294
- data_management/importers/jb_csv_to_json.py +0 -150
- data_management/importers/mcgill_to_json.py +0 -250
- data_management/importers/missouri_to_json.py +0 -489
- data_management/importers/nacti_fieldname_adjustments.py +0 -79
- data_management/importers/noaa_seals_2019.py +0 -181
- data_management/importers/pc_to_json.py +0 -365
- data_management/importers/plot_wni_giraffes.py +0 -123
- data_management/importers/prepare-noaa-fish-data-for-lila.py +0 -359
- data_management/importers/prepare_zsl_imerit.py +0 -131
- data_management/importers/rspb_to_json.py +0 -356
- data_management/importers/save_the_elephants_survey_A.py +0 -320
- data_management/importers/save_the_elephants_survey_B.py +0 -332
- data_management/importers/snapshot_safari_importer.py +0 -758
- data_management/importers/snapshot_safari_importer_reprise.py +0 -665
- data_management/importers/snapshot_serengeti_lila.py +0 -1067
- data_management/importers/snapshotserengeti/make_full_SS_json.py +0 -150
- data_management/importers/snapshotserengeti/make_per_season_SS_json.py +0 -153
- data_management/importers/sulross_get_exif.py +0 -65
- data_management/importers/timelapse_csv_set_to_json.py +0 -490
- data_management/importers/ubc_to_json.py +0 -399
- data_management/importers/umn_to_json.py +0 -507
- data_management/importers/wellington_to_json.py +0 -263
- data_management/importers/wi_to_json.py +0 -441
- data_management/importers/zamba_results_to_md_results.py +0 -181
- data_management/labelme_to_coco.py +0 -548
- data_management/labelme_to_yolo.py +0 -272
- data_management/lila/__init__.py +0 -0
- data_management/lila/add_locations_to_island_camera_traps.py +0 -97
- data_management/lila/add_locations_to_nacti.py +0 -147
- data_management/lila/create_lila_blank_set.py +0 -557
- data_management/lila/create_lila_test_set.py +0 -151
- data_management/lila/create_links_to_md_results_files.py +0 -106
- data_management/lila/download_lila_subset.py +0 -177
- data_management/lila/generate_lila_per_image_labels.py +0 -515
- data_management/lila/get_lila_annotation_counts.py +0 -170
- data_management/lila/get_lila_image_counts.py +0 -111
- data_management/lila/lila_common.py +0 -300
- data_management/lila/test_lila_metadata_urls.py +0 -132
- data_management/ocr_tools.py +0 -874
- data_management/read_exif.py +0 -681
- data_management/remap_coco_categories.py +0 -84
- data_management/remove_exif.py +0 -66
- data_management/resize_coco_dataset.py +0 -189
- data_management/wi_download_csv_to_coco.py +0 -246
- data_management/yolo_output_to_md_output.py +0 -441
- data_management/yolo_to_coco.py +0 -676
- detection/__init__.py +0 -0
- detection/detector_training/__init__.py +0 -0
- detection/detector_training/model_main_tf2.py +0 -114
- detection/process_video.py +0 -703
- detection/pytorch_detector.py +0 -337
- detection/run_detector.py +0 -779
- detection/run_detector_batch.py +0 -1219
- detection/run_inference_with_yolov5_val.py +0 -917
- detection/run_tiled_inference.py +0 -935
- detection/tf_detector.py +0 -188
- detection/video_utils.py +0 -606
- docs/source/conf.py +0 -43
- md_utils/__init__.py +0 -0
- md_utils/azure_utils.py +0 -174
- md_utils/ct_utils.py +0 -612
- md_utils/directory_listing.py +0 -246
- md_utils/md_tests.py +0 -968
- md_utils/path_utils.py +0 -1044
- md_utils/process_utils.py +0 -157
- md_utils/sas_blob_utils.py +0 -509
- md_utils/split_locations_into_train_val.py +0 -228
- md_utils/string_utils.py +0 -92
- md_utils/url_utils.py +0 -323
- md_utils/write_html_image_list.py +0 -225
- md_visualization/__init__.py +0 -0
- md_visualization/plot_utils.py +0 -293
- md_visualization/render_images_with_thumbnails.py +0 -275
- md_visualization/visualization_utils.py +0 -1537
- md_visualization/visualize_db.py +0 -551
- md_visualization/visualize_detector_output.py +0 -406
- megadetector-5.0.9.dist-info/RECORD +0 -224
- megadetector-5.0.9.dist-info/top_level.txt +0 -8
- taxonomy_mapping/__init__.py +0 -0
- taxonomy_mapping/map_lila_taxonomy_to_wi_taxonomy.py +0 -491
- taxonomy_mapping/map_new_lila_datasets.py +0 -154
- taxonomy_mapping/prepare_lila_taxonomy_release.py +0 -142
- taxonomy_mapping/preview_lila_taxonomy.py +0 -591
- taxonomy_mapping/retrieve_sample_image.py +0 -71
- taxonomy_mapping/simple_image_download.py +0 -218
- taxonomy_mapping/species_lookup.py +0 -834
- taxonomy_mapping/taxonomy_csv_checker.py +0 -159
- taxonomy_mapping/taxonomy_graph.py +0 -346
- taxonomy_mapping/validate_lila_category_mappings.py +0 -83
- {megadetector-5.0.9.dist-info → megadetector-5.0.11.dist-info}/WHEEL +0 -0
|
@@ -1,509 +0,0 @@
|
|
|
1
|
-
########
|
|
2
|
-
#
|
|
3
|
-
# sas_blob_utils.py
|
|
4
|
-
#
|
|
5
|
-
# This module contains helper functions for dealing with Shared Access Signatures
|
|
6
|
-
# (SAS) tokens for Azure Blob Storage.
|
|
7
|
-
#
|
|
8
|
-
# The default Azure Storage SAS URI format is:
|
|
9
|
-
#
|
|
10
|
-
# https://<account>.blob.core.windows.net/<container>/<blob>?<sas_token>
|
|
11
|
-
#
|
|
12
|
-
# This module assumes azure-storage-blob version 12.5.
|
|
13
|
-
#
|
|
14
|
-
# Documentation for Azure Blob Storage:
|
|
15
|
-
# docs.microsoft.com/en-us/azure/developer/python/sdk/storage/storage-blob-readme
|
|
16
|
-
#
|
|
17
|
-
# Documentation for SAS:
|
|
18
|
-
# docs.microsoft.com/en-us/azure/storage/common/storage-sas-overview
|
|
19
|
-
#
|
|
20
|
-
########
|
|
21
|
-
|
|
22
|
-
#%% Imports
|
|
23
|
-
|
|
24
|
-
from datetime import datetime, timedelta
|
|
25
|
-
import io
|
|
26
|
-
import re
|
|
27
|
-
from typing import (Any, AnyStr, Dict, IO, Iterable, List, Optional, Set, Tuple, Union)
|
|
28
|
-
from urllib import parse
|
|
29
|
-
import uuid
|
|
30
|
-
|
|
31
|
-
from tqdm import tqdm
|
|
32
|
-
|
|
33
|
-
from azure.storage.blob import (
|
|
34
|
-
BlobClient,
|
|
35
|
-
BlobProperties,
|
|
36
|
-
ContainerClient,
|
|
37
|
-
ContainerSasPermissions,
|
|
38
|
-
generate_container_sas)
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
#%% URI management
|
|
42
|
-
|
|
43
|
-
def build_azure_storage_uri(
|
|
44
|
-
account: str,
|
|
45
|
-
container: Optional[str] = None,
|
|
46
|
-
blob: Optional[str] = None,
|
|
47
|
-
sas_token: Optional[str] = None,
|
|
48
|
-
account_url_template: str = 'https://{account}.blob.core.windows.net'
|
|
49
|
-
) -> str:
|
|
50
|
-
"""
|
|
51
|
-
Args:
|
|
52
|
-
account: str, name of Azure Storage account
|
|
53
|
-
container: optional str, name of Azure Blob Storage container
|
|
54
|
-
blob: optional str, name of blob, not URL-escaped
|
|
55
|
-
if blob is given, must also specify container
|
|
56
|
-
sas_token: optional str, Shared Access Signature (SAS). Leading ?
|
|
57
|
-
is removed if present.
|
|
58
|
-
account_url_template: str, Python 3 string formatting template,
|
|
59
|
-
contains '{account}' placeholder, defaults to default Azure
|
|
60
|
-
Storage URL format. Set this value if using Azurite Azure Storage
|
|
61
|
-
emulator.
|
|
62
|
-
|
|
63
|
-
Returns: str, Azure storage URI
|
|
64
|
-
"""
|
|
65
|
-
|
|
66
|
-
uri = account_url_template.format(account=account)
|
|
67
|
-
if container is not None:
|
|
68
|
-
uri = f'{uri}/{container}'
|
|
69
|
-
if blob is not None:
|
|
70
|
-
assert container is not None
|
|
71
|
-
blob = parse.quote(blob)
|
|
72
|
-
uri = f'{uri}/{blob}'
|
|
73
|
-
if sas_token is not None:
|
|
74
|
-
if sas_token[0] == '?':
|
|
75
|
-
sas_token = sas_token[1:]
|
|
76
|
-
uri = f'{uri}?{sas_token}'
|
|
77
|
-
return uri
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
def _get_resource_reference(prefix: str) -> str:
|
|
81
|
-
return '{}{}'.format(prefix, str(uuid.uuid4()).replace('-', ''))
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
def get_client_from_uri(container_uri: str) -> ContainerClient:
|
|
85
|
-
"""
|
|
86
|
-
Gets a ContainerClient for the given container URI.
|
|
87
|
-
"""
|
|
88
|
-
|
|
89
|
-
return ContainerClient.from_container_url(container_uri)
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
def get_account_from_uri(sas_uri: str) -> str:
|
|
93
|
-
"""
|
|
94
|
-
Assumes that sas_uri points to Azure Blob Storage account hosted at
|
|
95
|
-
a default Azure URI. Does not work for locally-emulated Azure Storage
|
|
96
|
-
or Azure Storage hosted at custom endpoints.
|
|
97
|
-
"""
|
|
98
|
-
|
|
99
|
-
url_parts = parse.urlsplit(sas_uri)
|
|
100
|
-
loc = url_parts.netloc # "<account>.blob.core.windows.net"
|
|
101
|
-
return loc.split('.')[0]
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
def is_container_uri(sas_uri: str) -> bool:
|
|
105
|
-
"""
|
|
106
|
-
Returns True if the signed resource field in the URI "sr" is a container "c"
|
|
107
|
-
or a directory "d"
|
|
108
|
-
"""
|
|
109
|
-
|
|
110
|
-
data = get_all_query_parts(sas_uri)
|
|
111
|
-
if 'sr' not in data:
|
|
112
|
-
return False
|
|
113
|
-
|
|
114
|
-
if 'c' in data['sr'] or 'd' in data['sr']:
|
|
115
|
-
return True
|
|
116
|
-
else:
|
|
117
|
-
return False
|
|
118
|
-
|
|
119
|
-
def is_blob_uri(sas_uri: str) -> bool:
|
|
120
|
-
"""
|
|
121
|
-
Returns True if the signed resource field in the URI "sr" is a blob "b".
|
|
122
|
-
"""
|
|
123
|
-
|
|
124
|
-
data = get_all_query_parts(sas_uri)
|
|
125
|
-
if 'sr' not in data:
|
|
126
|
-
return False
|
|
127
|
-
|
|
128
|
-
if 'b' in data['sr']:
|
|
129
|
-
return True
|
|
130
|
-
else:
|
|
131
|
-
return False
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
def get_container_from_uri(sas_uri: str, unquote: bool = True) -> str:
|
|
135
|
-
"""
|
|
136
|
-
Gets the container name from a Azure Blob Storage URI.
|
|
137
|
-
|
|
138
|
-
Assumes that sas_uri points to Azure Blob Storage account hosted at
|
|
139
|
-
a default Azure URI. Does not work for locally-emulated Azure Storage
|
|
140
|
-
or Azure Storage hosted at custom endpoints.
|
|
141
|
-
|
|
142
|
-
Args:
|
|
143
|
-
sas_uri: str, Azure blob storage URI, may include SAS token
|
|
144
|
-
unquote: bool, whether to replace any %xx escapes by their
|
|
145
|
-
single-character equivalent, default True
|
|
146
|
-
|
|
147
|
-
Returns: str, container name
|
|
148
|
-
|
|
149
|
-
Raises: ValueError, if sas_uri does not include a container
|
|
150
|
-
"""
|
|
151
|
-
|
|
152
|
-
url_parts = parse.urlsplit(sas_uri)
|
|
153
|
-
raw_path = url_parts.path.lstrip('/') # remove leading "/" from path
|
|
154
|
-
container = raw_path.split('/')[0]
|
|
155
|
-
if container == '':
|
|
156
|
-
raise ValueError('Given sas_uri does not include a container.')
|
|
157
|
-
if unquote:
|
|
158
|
-
container = parse.unquote(container)
|
|
159
|
-
return container
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
def get_blob_from_uri(sas_uri: str, unquote: bool = True) -> str:
|
|
163
|
-
"""
|
|
164
|
-
Return the path to the blob from the root container if this sas_uri
|
|
165
|
-
is for an individual blob; otherwise returns None.
|
|
166
|
-
|
|
167
|
-
Args:
|
|
168
|
-
sas_uri: str, Azure blob storage URI, may include SAS token
|
|
169
|
-
unquote: bool, whether to replace any %xx escapes by their
|
|
170
|
-
single-character equivalent, default True
|
|
171
|
-
|
|
172
|
-
Returns: str, blob name (path to the blob from the root container)
|
|
173
|
-
|
|
174
|
-
Raises: ValueError, if sas_uri does not include a blob name
|
|
175
|
-
"""
|
|
176
|
-
|
|
177
|
-
# Get the entire path with all slashes after the container
|
|
178
|
-
url_parts = parse.urlsplit(sas_uri)
|
|
179
|
-
raw_path = url_parts.path.lstrip('/') # remove leading "/" from path
|
|
180
|
-
parts = raw_path.split('/', maxsplit=1)
|
|
181
|
-
if len(parts) < 2 or parts[1] == '':
|
|
182
|
-
raise ValueError('Given sas_uri does not include a blob name')
|
|
183
|
-
|
|
184
|
-
blob = parts[1] # first item is an empty string
|
|
185
|
-
if unquote:
|
|
186
|
-
blob = parse.unquote(blob)
|
|
187
|
-
return blob
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
def get_sas_token_from_uri(sas_uri: str) -> Optional[str]:
|
|
191
|
-
"""
|
|
192
|
-
Get the query part of the SAS token that contains permissions, access
|
|
193
|
-
times and signature.
|
|
194
|
-
|
|
195
|
-
Args:
|
|
196
|
-
sas_uri: str, Azure blob storage SAS token
|
|
197
|
-
|
|
198
|
-
Returns: str, query part of the SAS token (without leading '?'),
|
|
199
|
-
or None if URI has no token.
|
|
200
|
-
"""
|
|
201
|
-
|
|
202
|
-
url_parts = parse.urlsplit(sas_uri)
|
|
203
|
-
sas_token = url_parts.query or None # None if query is empty string
|
|
204
|
-
return sas_token
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
def get_resource_type_from_uri(sas_uri: str) -> Optional[str]:
|
|
208
|
-
"""
|
|
209
|
-
Get the resource type pointed to by this SAS token.
|
|
210
|
-
|
|
211
|
-
Args:
|
|
212
|
-
sas_uri: str, Azure blob storage URI with SAS token
|
|
213
|
-
|
|
214
|
-
Returns: A string (either 'blob' or 'container') or None.
|
|
215
|
-
"""
|
|
216
|
-
|
|
217
|
-
url_parts = parse.urlsplit(sas_uri)
|
|
218
|
-
data = parse.parse_qs(url_parts.query)
|
|
219
|
-
if 'sr' in data:
|
|
220
|
-
types = data['sr']
|
|
221
|
-
if 'b' in types:
|
|
222
|
-
return 'blob'
|
|
223
|
-
elif 'c' in types:
|
|
224
|
-
return 'container'
|
|
225
|
-
return None
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
def get_endpoint_suffix(sas_uri):
|
|
229
|
-
"""
|
|
230
|
-
Gets the endpoint at which the blob storage account is served.
|
|
231
|
-
Args:
|
|
232
|
-
sas_uri: str, Azure blob storage URI with SAS token
|
|
233
|
-
|
|
234
|
-
Returns: A string, usually 'core.windows.net' or 'core.chinacloudapi.cn', to
|
|
235
|
-
use for the `endpoint` argument in various blob storage SDK functions.
|
|
236
|
-
"""
|
|
237
|
-
|
|
238
|
-
url_parts = parse.urlsplit(sas_uri)
|
|
239
|
-
suffix = url_parts.netloc.split('.blob.')[1].split('/')[0]
|
|
240
|
-
return suffix
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
def get_permissions_from_uri(sas_uri: str) -> Set[str]:
|
|
244
|
-
"""
|
|
245
|
-
Get the permissions given by this SAS token.
|
|
246
|
-
|
|
247
|
-
Args:
|
|
248
|
-
sas_uri: str, Azure blob storage URI with SAS token
|
|
249
|
-
|
|
250
|
-
Returns: A set containing some of 'read', 'write', 'delete' and 'list'.
|
|
251
|
-
Empty set returned if no permission specified in sas_uri.
|
|
252
|
-
"""
|
|
253
|
-
|
|
254
|
-
data = get_all_query_parts(sas_uri)
|
|
255
|
-
permissions_set = set()
|
|
256
|
-
if 'sp' in data:
|
|
257
|
-
permissions = data['sp'][0]
|
|
258
|
-
if 'r' in permissions:
|
|
259
|
-
permissions_set.add('read')
|
|
260
|
-
if 'w' in permissions:
|
|
261
|
-
permissions_set.add('write')
|
|
262
|
-
if 'd' in permissions:
|
|
263
|
-
permissions_set.add('delete')
|
|
264
|
-
if 'l' in permissions:
|
|
265
|
-
permissions_set.add('list')
|
|
266
|
-
return permissions_set
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
def get_all_query_parts(sas_uri: str) -> Dict[str, Any]:
|
|
270
|
-
"""
|
|
271
|
-
Gets the SAS token parameters.
|
|
272
|
-
"""
|
|
273
|
-
|
|
274
|
-
url_parts = parse.urlsplit(sas_uri)
|
|
275
|
-
return parse.parse_qs(url_parts.query)
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
#%% Blob
|
|
279
|
-
|
|
280
|
-
def check_blob_exists(sas_uri: str, blob_name: Optional[str] = None) -> bool:
|
|
281
|
-
"""
|
|
282
|
-
Checks whether a given URI points to an actual blob.
|
|
283
|
-
|
|
284
|
-
Assumes that sas_uri points to Azure Blob Storage account hosted at
|
|
285
|
-
a default Azure URI. Does not work for locally-emulated Azure Storage
|
|
286
|
-
or Azure Storage hosted at custom endpoints. In these cases, create a
|
|
287
|
-
BlobClient using the default constructor, instead of from_blob_url(),
|
|
288
|
-
and use the BlobClient.exists() method directly.
|
|
289
|
-
|
|
290
|
-
Args:
|
|
291
|
-
sas_uri: str, URI to a container or a blob
|
|
292
|
-
if blob_name is given, sas_uri is treated as a container URI
|
|
293
|
-
otherwise, sas_uri is treated as a blob URI
|
|
294
|
-
blob_name: optional str, name of blob, not URL-escaped
|
|
295
|
-
must be given if sas_uri is a URI to a container
|
|
296
|
-
|
|
297
|
-
Returns: bool, whether the sas_uri given points to an existing blob
|
|
298
|
-
"""
|
|
299
|
-
|
|
300
|
-
if blob_name is not None:
|
|
301
|
-
sas_uri = build_blob_uri(
|
|
302
|
-
container_uri=sas_uri, blob_name=blob_name)
|
|
303
|
-
|
|
304
|
-
with BlobClient.from_blob_url(sas_uri) as blob_client:
|
|
305
|
-
return blob_client.exists()
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
def upload_blob(container_uri: str, blob_name: str,
|
|
309
|
-
data: Union[Iterable[AnyStr], IO[AnyStr]],
|
|
310
|
-
overwrite: bool = False) -> str:
|
|
311
|
-
"""
|
|
312
|
-
Creates a new blob of the given name from an IO stream.
|
|
313
|
-
|
|
314
|
-
Args:
|
|
315
|
-
container_uri: str, URI to a container, may include SAS token
|
|
316
|
-
blob_name: str, name of blob to upload
|
|
317
|
-
data: str, bytes, or IO stream
|
|
318
|
-
if str, assumes utf-8 encoding
|
|
319
|
-
overwrite: bool, whether to overwrite existing blob (if any)
|
|
320
|
-
|
|
321
|
-
Returns: str, URL to blob, includes SAS token if container_uri has SAS token
|
|
322
|
-
"""
|
|
323
|
-
|
|
324
|
-
account_url, container, sas_token = split_container_uri(container_uri)
|
|
325
|
-
with BlobClient(account_url=account_url, container_name=container,
|
|
326
|
-
blob_name=blob_name, credential=sas_token) as blob_client:
|
|
327
|
-
blob_client.upload_blob(data, overwrite=overwrite)
|
|
328
|
-
return blob_client.url
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
def download_blob_to_stream(sas_uri: str) -> Tuple[io.BytesIO, BlobProperties]:
|
|
332
|
-
"""
|
|
333
|
-
Downloads a blob to an IO stream.
|
|
334
|
-
|
|
335
|
-
Args:
|
|
336
|
-
sas_uri: str, URI to a blob
|
|
337
|
-
|
|
338
|
-
Returns:
|
|
339
|
-
output_stream: io.BytesIO, remember to close it when finished using
|
|
340
|
-
blob_properties: BlobProperties
|
|
341
|
-
|
|
342
|
-
Raises: azure.core.exceptions.ResourceNotFoundError, if sas_uri points
|
|
343
|
-
to a non-existent blob
|
|
344
|
-
"""
|
|
345
|
-
|
|
346
|
-
with BlobClient.from_blob_url(sas_uri) as blob_client:
|
|
347
|
-
output_stream = io.BytesIO()
|
|
348
|
-
blob_client.download_blob().readinto(output_stream)
|
|
349
|
-
output_stream.seek(0)
|
|
350
|
-
blob_properties = blob_client.get_blob_properties()
|
|
351
|
-
return output_stream, blob_properties
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
def build_blob_uri(container_uri: str, blob_name: str) -> str:
|
|
355
|
-
"""
|
|
356
|
-
Args:
|
|
357
|
-
container_uri: str, URI to blob storage container
|
|
358
|
-
<account_url>/<container>?<sas_token>
|
|
359
|
-
blob_name: str, name of blob, not URL-escaped
|
|
360
|
-
|
|
361
|
-
Returns: str, blob URI <account_url>/<container>/<blob_name>?<sas_token>,
|
|
362
|
-
<blob_name> is URL-escaped
|
|
363
|
-
"""
|
|
364
|
-
|
|
365
|
-
account_url, container, sas_token = split_container_uri(container_uri)
|
|
366
|
-
|
|
367
|
-
blob_name = parse.quote(blob_name)
|
|
368
|
-
blob_uri = f'{account_url}/{container}/{blob_name}'
|
|
369
|
-
if sas_token is not None:
|
|
370
|
-
blob_uri += f'?{sas_token}'
|
|
371
|
-
return blob_uri
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
#%% Container
|
|
375
|
-
|
|
376
|
-
def list_blobs_in_container(
|
|
377
|
-
container_uri: str,
|
|
378
|
-
blob_prefix: Optional[str] = None,
|
|
379
|
-
blob_suffix: Optional[Union[str, Tuple[str]]] = None,
|
|
380
|
-
rsearch: Optional[str] = None,
|
|
381
|
-
limit: Optional[int] = None,
|
|
382
|
-
verbose: Optional[bool] = True
|
|
383
|
-
) -> List[str]:
|
|
384
|
-
"""
|
|
385
|
-
Get a sorted list of blob names in this container.
|
|
386
|
-
|
|
387
|
-
Args:
|
|
388
|
-
container_uri: str, URI to a container, may include SAS token
|
|
389
|
-
blob_prefix: optional str, returned results will only contain blob names
|
|
390
|
-
to with this prefix
|
|
391
|
-
blob_suffix: optional str or tuple of str, returned results will only
|
|
392
|
-
contain blob names with this/these suffix(es). The blob names will
|
|
393
|
-
be lowercased first before comparing with the suffix(es).
|
|
394
|
-
rsearch: optional str, returned results will only contain blob names
|
|
395
|
-
that match this regex. Can also be a list of regexes, in which case
|
|
396
|
-
blobs matching *any* of the regex's will be returned.
|
|
397
|
-
limit: int, maximum # of blob names to list
|
|
398
|
-
if None, then returns all blob names
|
|
399
|
-
|
|
400
|
-
Returns:
|
|
401
|
-
sorted list of blob names, of length limit or shorter.
|
|
402
|
-
"""
|
|
403
|
-
|
|
404
|
-
if verbose:
|
|
405
|
-
print('Listing blobs')
|
|
406
|
-
|
|
407
|
-
if (get_sas_token_from_uri(container_uri) is not None
|
|
408
|
-
and get_resource_type_from_uri(container_uri) != 'container'):
|
|
409
|
-
raise ValueError('The SAS token provided is not for a container.')
|
|
410
|
-
|
|
411
|
-
if blob_prefix is not None and not isinstance(blob_prefix, str):
|
|
412
|
-
raise ValueError('blob_prefix must be a str.')
|
|
413
|
-
|
|
414
|
-
if (blob_suffix is not None
|
|
415
|
-
and not isinstance(blob_suffix, str)
|
|
416
|
-
and not isinstance(blob_suffix, tuple)):
|
|
417
|
-
raise ValueError('blob_suffix must be a str or a tuple of strings')
|
|
418
|
-
|
|
419
|
-
list_blobs = []
|
|
420
|
-
with get_client_from_uri(container_uri) as container_client:
|
|
421
|
-
generator = container_client.list_blobs(
|
|
422
|
-
name_starts_with=blob_prefix)
|
|
423
|
-
|
|
424
|
-
if blob_suffix is None and rsearch is None:
|
|
425
|
-
list_blobs = [blob.name for blob in tqdm(generator,disable=(not verbose))]
|
|
426
|
-
i = len(list_blobs)
|
|
427
|
-
else:
|
|
428
|
-
i = 0
|
|
429
|
-
for blob in tqdm(generator,disable=(not verbose)):
|
|
430
|
-
i += 1
|
|
431
|
-
suffix_ok = (blob_suffix is None
|
|
432
|
-
or blob.name.lower().endswith(blob_suffix))
|
|
433
|
-
regex_ok = False
|
|
434
|
-
if rsearch is None:
|
|
435
|
-
regex_ok = True
|
|
436
|
-
else:
|
|
437
|
-
if not isinstance(rsearch, list):
|
|
438
|
-
rsearch = [rsearch]
|
|
439
|
-
# Check whether this blob name matches *any* of our regex's
|
|
440
|
-
for expr in rsearch:
|
|
441
|
-
if re.search(expr, blob.name) is not None:
|
|
442
|
-
regex_ok = True
|
|
443
|
-
break
|
|
444
|
-
if suffix_ok and regex_ok:
|
|
445
|
-
list_blobs.append(blob.name)
|
|
446
|
-
if limit is not None and len(list_blobs) == limit:
|
|
447
|
-
break
|
|
448
|
-
|
|
449
|
-
if verbose:
|
|
450
|
-
print(f'Enumerated {len(list_blobs)} matching blobs out of {i} total')
|
|
451
|
-
|
|
452
|
-
return sorted(list_blobs) # sort for determinism
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
def generate_writable_container_sas(account_name: str,
|
|
456
|
-
account_key: str,
|
|
457
|
-
container_name: str,
|
|
458
|
-
access_duration_hrs: float,
|
|
459
|
-
account_url: Optional[str] = None
|
|
460
|
-
) -> str:
|
|
461
|
-
"""
|
|
462
|
-
Creates a container and returns a SAS URI with read/write/list
|
|
463
|
-
permissions.
|
|
464
|
-
|
|
465
|
-
Args:
|
|
466
|
-
account_name: str, name of blob storage account
|
|
467
|
-
account_key: str, account SAS token or account shared access key
|
|
468
|
-
container_name: str, name of container to create, must not match an
|
|
469
|
-
existing container in the given storage account
|
|
470
|
-
access_duration_hrs: float
|
|
471
|
-
account_url: str, optional, defaults to default Azure Storage URL
|
|
472
|
-
|
|
473
|
-
Returns: str, URL to newly created container
|
|
474
|
-
|
|
475
|
-
Raises: azure.core.exceptions.ResourceExistsError, if container already
|
|
476
|
-
exists
|
|
477
|
-
"""
|
|
478
|
-
|
|
479
|
-
if account_url is None:
|
|
480
|
-
account_url = build_azure_storage_uri(account=account_name)
|
|
481
|
-
with ContainerClient(account_url=account_url,
|
|
482
|
-
container_name=container_name,
|
|
483
|
-
credential=account_key) as container_client:
|
|
484
|
-
container_client.create_container()
|
|
485
|
-
|
|
486
|
-
permissions = ContainerSasPermissions(read=True, write=True, list=True)
|
|
487
|
-
container_sas_token = generate_container_sas(
|
|
488
|
-
account_name=account_name,
|
|
489
|
-
container_name=container_name,
|
|
490
|
-
account_key=account_key,
|
|
491
|
-
permission=permissions,
|
|
492
|
-
expiry=datetime.utcnow() + timedelta(hours=access_duration_hrs))
|
|
493
|
-
|
|
494
|
-
return f'{account_url}/{container_name}?{container_sas_token}'
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
def split_container_uri(container_uri: str) -> Tuple[str, str, Optional[str]]:
|
|
498
|
-
"""
|
|
499
|
-
Args:
|
|
500
|
-
container_uri: str, URI to blob storage container
|
|
501
|
-
<account_url>/<container>?<sas_token>
|
|
502
|
-
|
|
503
|
-
Returns: account_url, container_name, sas_token
|
|
504
|
-
"""
|
|
505
|
-
|
|
506
|
-
account_container = container_uri.split('?', maxsplit=1)[0]
|
|
507
|
-
account_url, container_name = account_container.rsplit('/', maxsplit=1)
|
|
508
|
-
sas_token = get_sas_token_from_uri(container_uri)
|
|
509
|
-
return account_url, container_name, sas_token
|
|
@@ -1,59 +0,0 @@
|
|
|
1
|
-
########
|
|
2
|
-
#
|
|
3
|
-
# string_utils.py
|
|
4
|
-
#
|
|
5
|
-
# Miscellaneous string utilities
|
|
6
|
-
#
|
|
7
|
-
########
|
|
8
|
-
|
|
9
|
-
import re
|
|
10
|
-
|
|
11
|
-
def is_float(s):
|
|
12
|
-
"""
|
|
13
|
-
Checks whether a string represents a valid float
|
|
14
|
-
"""
|
|
15
|
-
|
|
16
|
-
try:
|
|
17
|
-
_ = float(s)
|
|
18
|
-
except ValueError:
|
|
19
|
-
return False
|
|
20
|
-
return True
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
def human_readable_to_bytes(size):
|
|
24
|
-
"""
|
|
25
|
-
Given a human-readable byte string (e.g. 2G, 10GB, 30MB, 20KB),
|
|
26
|
-
return the number of bytes. Will return 0 if the argument has
|
|
27
|
-
unexpected form.
|
|
28
|
-
|
|
29
|
-
https://gist.github.com/beugley/ccd69945346759eb6142272a6d69b4e0
|
|
30
|
-
"""
|
|
31
|
-
|
|
32
|
-
size = re.sub(r'\s+', '', size)
|
|
33
|
-
|
|
34
|
-
if (size[-1] == 'B'):
|
|
35
|
-
size = size[:-1]
|
|
36
|
-
|
|
37
|
-
if (size.isdigit()):
|
|
38
|
-
bytes = int(size)
|
|
39
|
-
elif (is_float(size)):
|
|
40
|
-
bytes = float(size)
|
|
41
|
-
else:
|
|
42
|
-
bytes = size[:-1]
|
|
43
|
-
unit = size[-1]
|
|
44
|
-
try:
|
|
45
|
-
bytes = float(bytes)
|
|
46
|
-
if (unit == 'T'):
|
|
47
|
-
bytes *= 1024*1024*1024*1024
|
|
48
|
-
elif (unit == 'G'):
|
|
49
|
-
bytes *= 1024*1024*1024
|
|
50
|
-
elif (unit == 'M'):
|
|
51
|
-
bytes *= 1024*1024
|
|
52
|
-
elif (unit == 'K'):
|
|
53
|
-
bytes *= 1024
|
|
54
|
-
else:
|
|
55
|
-
bytes = 0
|
|
56
|
-
except ValueError:
|
|
57
|
-
bytes = 0
|
|
58
|
-
|
|
59
|
-
return bytes
|
|
@@ -1,144 +0,0 @@
|
|
|
1
|
-
########
|
|
2
|
-
#
|
|
3
|
-
# url_utils.py
|
|
4
|
-
#
|
|
5
|
-
# Frequently-used functions for downloading or manipulating URLs
|
|
6
|
-
#
|
|
7
|
-
########
|
|
8
|
-
|
|
9
|
-
#%% Imports and constants
|
|
10
|
-
|
|
11
|
-
import os
|
|
12
|
-
import re
|
|
13
|
-
import urllib
|
|
14
|
-
import tempfile
|
|
15
|
-
import requests
|
|
16
|
-
|
|
17
|
-
from tqdm import tqdm
|
|
18
|
-
from urllib.parse import urlparse
|
|
19
|
-
|
|
20
|
-
url_utils_temp_dir = None
|
|
21
|
-
max_path_len = 255
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
#%% Download functions
|
|
25
|
-
|
|
26
|
-
class DownloadProgressBar():
|
|
27
|
-
"""
|
|
28
|
-
https://stackoverflow.com/questions/37748105/how-to-use-progressbar-module-with-urlretrieve
|
|
29
|
-
"""
|
|
30
|
-
|
|
31
|
-
def __init__(self):
|
|
32
|
-
self.pbar = None
|
|
33
|
-
|
|
34
|
-
def __call__(self, block_num, block_size, total_size):
|
|
35
|
-
if not self.pbar:
|
|
36
|
-
# This is a pretty random import I'd rather not depend on outside of the
|
|
37
|
-
# rare case where it's used, so importing locally
|
|
38
|
-
# pip install progressbar2
|
|
39
|
-
import progressbar
|
|
40
|
-
self.pbar = progressbar.ProgressBar(max_value=total_size)
|
|
41
|
-
self.pbar.start()
|
|
42
|
-
|
|
43
|
-
downloaded = block_num * block_size
|
|
44
|
-
if downloaded < total_size:
|
|
45
|
-
self.pbar.update(downloaded)
|
|
46
|
-
else:
|
|
47
|
-
self.pbar.finish()
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
def get_temp_folder(preferred_name='url_utils'):
|
|
51
|
-
|
|
52
|
-
global url_utils_temp_dir
|
|
53
|
-
|
|
54
|
-
if url_utils_temp_dir is None:
|
|
55
|
-
url_utils_temp_dir = os.path.join(tempfile.gettempdir(),preferred_name)
|
|
56
|
-
os.makedirs(url_utils_temp_dir,exist_ok=True)
|
|
57
|
-
|
|
58
|
-
return url_utils_temp_dir
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
def download_url(url, destination_filename=None, progress_updater=None,
|
|
62
|
-
force_download=False, verbose=True):
|
|
63
|
-
"""
|
|
64
|
-
Download a URL to a file. If no file is specified, creates a temporary file,
|
|
65
|
-
with a semi-best-effort to avoid filename collisions.
|
|
66
|
-
|
|
67
|
-
Prints some diagnostic information and makes sure to omit SAS tokens from printouts.
|
|
68
|
-
|
|
69
|
-
progress_updater can be "None", "True", or a specific callback.
|
|
70
|
-
"""
|
|
71
|
-
|
|
72
|
-
if progress_updater is not None and isinstance(progress_updater,bool):
|
|
73
|
-
if not progress_updater:
|
|
74
|
-
progress_updater = None
|
|
75
|
-
else:
|
|
76
|
-
progress_updater = DownloadProgressBar()
|
|
77
|
-
|
|
78
|
-
url_no_sas = url.split('?')[0]
|
|
79
|
-
|
|
80
|
-
if destination_filename is None:
|
|
81
|
-
target_folder = get_temp_folder()
|
|
82
|
-
url_without_sas = url.split('?', 1)[0]
|
|
83
|
-
|
|
84
|
-
# This does not guarantee uniqueness, hence "semi-best-effort"
|
|
85
|
-
url_as_filename = re.sub(r'\W+', '', url_without_sas)
|
|
86
|
-
n_folder_chars = len(url_utils_temp_dir)
|
|
87
|
-
if len(url_as_filename) + n_folder_chars > max_path_len:
|
|
88
|
-
print('Warning: truncating filename target to {} characters'.format(max_path_len))
|
|
89
|
-
url_as_filename = url_as_filename[-1*(max_path_len-n_folder_chars):]
|
|
90
|
-
destination_filename = \
|
|
91
|
-
os.path.join(target_folder,url_as_filename)
|
|
92
|
-
|
|
93
|
-
if (not force_download) and (os.path.isfile(destination_filename)):
|
|
94
|
-
if verbose:
|
|
95
|
-
print('Bypassing download of already-downloaded file {}'.format(os.path.basename(url_no_sas)))
|
|
96
|
-
else:
|
|
97
|
-
if verbose:
|
|
98
|
-
print('Downloading file {} to {}'.format(os.path.basename(url_no_sas),destination_filename),end='')
|
|
99
|
-
target_dir = os.path.dirname(destination_filename)
|
|
100
|
-
os.makedirs(target_dir,exist_ok=True)
|
|
101
|
-
urllib.request.urlretrieve(url, destination_filename, progress_updater)
|
|
102
|
-
assert(os.path.isfile(destination_filename))
|
|
103
|
-
nBytes = os.path.getsize(destination_filename)
|
|
104
|
-
if verbose:
|
|
105
|
-
print('...done, {} bytes.'.format(nBytes))
|
|
106
|
-
|
|
107
|
-
return destination_filename
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
def download_relative_filename(url, output_base, verbose=False):
|
|
111
|
-
"""
|
|
112
|
-
Download a URL to output_base, preserving relative path
|
|
113
|
-
"""
|
|
114
|
-
|
|
115
|
-
p = urlparse(url)
|
|
116
|
-
# remove the leading '/'
|
|
117
|
-
assert p.path.startswith('/'); relative_filename = p.path[1:]
|
|
118
|
-
destination_filename = os.path.join(output_base,relative_filename)
|
|
119
|
-
download_url(url, destination_filename, verbose=verbose)
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
def test_urls(urls, error_on_failure=True):
|
|
123
|
-
"""
|
|
124
|
-
Verify that a list of URLs is available (returns status 200). By default,
|
|
125
|
-
errors if any URL is unavailable. If error_on_failure is False, returns
|
|
126
|
-
status codes for each URL.
|
|
127
|
-
|
|
128
|
-
TODO: trivially parallelizable.
|
|
129
|
-
"""
|
|
130
|
-
|
|
131
|
-
status_codes = []
|
|
132
|
-
|
|
133
|
-
for url in tqdm(urls):
|
|
134
|
-
|
|
135
|
-
r = requests.get(url)
|
|
136
|
-
|
|
137
|
-
if error_on_failure and r.status_code != 200:
|
|
138
|
-
raise ValueError('Could not access {}: error {}'.format(url,r.status_code))
|
|
139
|
-
status_codes.append(r.status_code)
|
|
140
|
-
|
|
141
|
-
return status_codes
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|