cloud-files 4.23.0__py3-none-any.whl → 4.24.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {cloud_files-4.23.0.dist-info → cloud_files-4.24.1.dist-info}/METADATA +1 -1
- {cloud_files-4.23.0.dist-info → cloud_files-4.24.1.dist-info}/RECORD +10 -10
- cloud_files-4.24.1.dist-info/pbr.json +1 -0
- cloudfiles/cloudfiles.py +56 -14
- cloudfiles/interfaces.py +47 -17
- cloud_files-4.23.0.dist-info/pbr.json +0 -1
- {cloud_files-4.23.0.dist-info → cloud_files-4.24.1.dist-info}/AUTHORS +0 -0
- {cloud_files-4.23.0.dist-info → cloud_files-4.24.1.dist-info}/LICENSE +0 -0
- {cloud_files-4.23.0.dist-info → cloud_files-4.24.1.dist-info}/WHEEL +0 -0
- {cloud_files-4.23.0.dist-info → cloud_files-4.24.1.dist-info}/entry_points.txt +0 -0
- {cloud_files-4.23.0.dist-info → cloud_files-4.24.1.dist-info}/top_level.txt +0 -0
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
cloudfiles/__init__.py,sha256=pLB4CcV2l3Jgv_ni1520Np1pfzFj8Cpr87vNxFT3rNI,493
|
|
2
2
|
cloudfiles/buckets.py,sha256=eRAYdDfvVpNyJyK5ryDRMwgNJUeEuFBJ6doWU2JkAcA,74
|
|
3
|
-
cloudfiles/cloudfiles.py,sha256=
|
|
3
|
+
cloudfiles/cloudfiles.py,sha256=YUf_-7DS8-2mCKOWT6mMKxl1glxMTEGomhpCnebtsy8,44801
|
|
4
4
|
cloudfiles/compression.py,sha256=pqYdpu5vfFv-094BpfZ2pgRjVu7ESM9pAZC09P6E8bY,6150
|
|
5
5
|
cloudfiles/connectionpools.py,sha256=aL8RiSjRepECfgAFmJcz80aJFKbou7hsbuEgugDKwB8,4814
|
|
6
6
|
cloudfiles/exceptions.py,sha256=H2IcMlZoy2Bsn-6wCPwyLDjg66LZCyxtcf3s_p21FDw,770
|
|
7
7
|
cloudfiles/gcs.py,sha256=_njJ7TpqwrHCjPHRGkBN5alCrCWKM2m9qdy5DhxMZ7U,3718
|
|
8
|
-
cloudfiles/interfaces.py,sha256=
|
|
8
|
+
cloudfiles/interfaces.py,sha256=LAjmO40mhg_foEjdzdP0YxUxpk9Ut8C7puortTqSGXI,34539
|
|
9
9
|
cloudfiles/lib.py,sha256=fEqL5APu_WQhl2yxqQbwE7msHdu7U8pstAJw6LgoKO0,5142
|
|
10
10
|
cloudfiles/paths.py,sha256=tqR9XnRdAKopTJqSM6V5xrMo1xfmBdl9b5DpBLZnoB0,9998
|
|
11
11
|
cloudfiles/resumable_tools.py,sha256=pK-VcoPjQ2BjGjvlvH4dDCBf6lNsqHG-weiBgxVFbzA,5838
|
|
@@ -16,11 +16,11 @@ cloudfiles/typing.py,sha256=f3ZYkNfN9poxhGu5j-P0KCxjCCqSn9HAg5KiIPkjnCg,416
|
|
|
16
16
|
cloudfiles_cli/LICENSE,sha256=Jna4xYE8CCQmaxjr5Fs-wmUBnIQJ1DGcNn9MMjbkprk,1538
|
|
17
17
|
cloudfiles_cli/__init__.py,sha256=Wftt3R3F21QsHtWqx49ODuqT9zcSr0em7wk48kcH0WM,29
|
|
18
18
|
cloudfiles_cli/cloudfiles_cli.py,sha256=eETIOK4QyztQcpA4ZRny21SobLtcrPDlzZ_JaKBmmmA,28449
|
|
19
|
-
cloud_files-4.
|
|
20
|
-
cloud_files-4.
|
|
21
|
-
cloud_files-4.
|
|
22
|
-
cloud_files-4.
|
|
23
|
-
cloud_files-4.
|
|
24
|
-
cloud_files-4.
|
|
25
|
-
cloud_files-4.
|
|
26
|
-
cloud_files-4.
|
|
19
|
+
cloud_files-4.24.1.dist-info/AUTHORS,sha256=7E2vC894bbLPO_kvUuEB2LFZZbIxZn23HabxH7x0Hgo,266
|
|
20
|
+
cloud_files-4.24.1.dist-info/LICENSE,sha256=Jna4xYE8CCQmaxjr5Fs-wmUBnIQJ1DGcNn9MMjbkprk,1538
|
|
21
|
+
cloud_files-4.24.1.dist-info/METADATA,sha256=neXuIXtjmY7tLeSTTNH95jTyw_rHsnrYfum-AFEboR0,26804
|
|
22
|
+
cloud_files-4.24.1.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92
|
|
23
|
+
cloud_files-4.24.1.dist-info/entry_points.txt,sha256=xlirb1FVhn1mbcv4IoyMEGumDqKOA4VMVd3drsRQxIg,51
|
|
24
|
+
cloud_files-4.24.1.dist-info/pbr.json,sha256=whK0oWj9vjQb1lRu9_jrdIBm1TA0onqx-o5fzWihcBo,46
|
|
25
|
+
cloud_files-4.24.1.dist-info/top_level.txt,sha256=xPyrST3okJbsmdCF5IC2gYAVxg_aD5AYVTnNo8UuoZU,26
|
|
26
|
+
cloud_files-4.24.1.dist-info/RECORD,,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"git_version": "560597e", "is_release": true}
|
cloudfiles/cloudfiles.py
CHANGED
|
@@ -951,6 +951,7 @@ class CloudFiles:
|
|
|
951
951
|
block_size:int = 64,
|
|
952
952
|
reencode:Optional[str] = None,
|
|
953
953
|
content_type:Optional[str] = None,
|
|
954
|
+
allow_missing:bool = False,
|
|
954
955
|
) -> None:
|
|
955
956
|
"""
|
|
956
957
|
Transfer all files from this CloudFiles storage
|
|
@@ -992,7 +993,11 @@ class CloudFiles:
|
|
|
992
993
|
green=self.green, num_threads=self.num_threads,
|
|
993
994
|
)
|
|
994
995
|
|
|
995
|
-
return cf_dest.transfer_from(
|
|
996
|
+
return cf_dest.transfer_from(
|
|
997
|
+
self, paths, block_size,
|
|
998
|
+
reencode, content_type,
|
|
999
|
+
allow_missing,
|
|
1000
|
+
)
|
|
996
1001
|
|
|
997
1002
|
def transfer_from(
|
|
998
1003
|
self,
|
|
@@ -1001,6 +1006,7 @@ class CloudFiles:
|
|
|
1001
1006
|
block_size:int = 64,
|
|
1002
1007
|
reencode:Optional[str] = None,
|
|
1003
1008
|
content_type:Optional[str] = None,
|
|
1009
|
+
allow_missing:bool = False,
|
|
1004
1010
|
) -> None:
|
|
1005
1011
|
"""
|
|
1006
1012
|
Transfer all files from the source CloudFiles storage
|
|
@@ -1053,7 +1059,10 @@ class CloudFiles:
|
|
|
1053
1059
|
and self.protocol == "file"
|
|
1054
1060
|
and reencode is None
|
|
1055
1061
|
):
|
|
1056
|
-
self.__transfer_file_to_file(
|
|
1062
|
+
self.__transfer_file_to_file(
|
|
1063
|
+
cf_src, self, paths, total,
|
|
1064
|
+
pbar, block_size, allow_missing
|
|
1065
|
+
)
|
|
1057
1066
|
elif (
|
|
1058
1067
|
cf_src.protocol == "file"
|
|
1059
1068
|
and self.protocol != "file"
|
|
@@ -1061,7 +1070,8 @@ class CloudFiles:
|
|
|
1061
1070
|
):
|
|
1062
1071
|
self.__transfer_file_to_remote(
|
|
1063
1072
|
cf_src, self, paths, total,
|
|
1064
|
-
pbar, block_size, content_type
|
|
1073
|
+
pbar, block_size, content_type,
|
|
1074
|
+
allow_missing,
|
|
1065
1075
|
)
|
|
1066
1076
|
elif (
|
|
1067
1077
|
(
|
|
@@ -1076,19 +1086,22 @@ class CloudFiles:
|
|
|
1076
1086
|
):
|
|
1077
1087
|
self.__transfer_cloud_internal(
|
|
1078
1088
|
cf_src, self, paths,
|
|
1079
|
-
total, pbar, block_size
|
|
1089
|
+
total, pbar, block_size,
|
|
1090
|
+
allow_missing,
|
|
1080
1091
|
)
|
|
1081
1092
|
else:
|
|
1082
1093
|
self.__transfer_general(
|
|
1083
1094
|
cf_src, self, paths, total,
|
|
1084
1095
|
pbar, block_size,
|
|
1085
|
-
reencode, content_type
|
|
1096
|
+
reencode, content_type,
|
|
1097
|
+
allow_missing,
|
|
1086
1098
|
)
|
|
1087
1099
|
|
|
1088
1100
|
def __transfer_general(
|
|
1089
1101
|
self, cf_src, cf_dest, paths,
|
|
1090
1102
|
total, pbar, block_size,
|
|
1091
|
-
reencode, content_type
|
|
1103
|
+
reencode, content_type,
|
|
1104
|
+
allow_missing
|
|
1092
1105
|
):
|
|
1093
1106
|
"""
|
|
1094
1107
|
Downloads the file into RAM, transforms
|
|
@@ -1107,7 +1120,13 @@ class CloudFiles:
|
|
|
1107
1120
|
if reencode is not None:
|
|
1108
1121
|
downloaded = compression.transcode(downloaded, reencode, in_place=True)
|
|
1109
1122
|
def renameiter():
|
|
1123
|
+
nonlocal allow_missing
|
|
1110
1124
|
for item in downloaded:
|
|
1125
|
+
if item["content"] is None:
|
|
1126
|
+
if allow_missing:
|
|
1127
|
+
item["content"] = b""
|
|
1128
|
+
else:
|
|
1129
|
+
raise FileNotFoundError(f"{item['path']}")
|
|
1111
1130
|
if (
|
|
1112
1131
|
item["tags"] is not None
|
|
1113
1132
|
and "dest_path" in item["tags"]
|
|
@@ -1126,7 +1145,7 @@ class CloudFiles:
|
|
|
1126
1145
|
|
|
1127
1146
|
def __transfer_file_to_file(
|
|
1128
1147
|
self, cf_src, cf_dest, paths,
|
|
1129
|
-
total, pbar, block_size
|
|
1148
|
+
total, pbar, block_size, allow_missing
|
|
1130
1149
|
):
|
|
1131
1150
|
"""
|
|
1132
1151
|
shutil.copyfile, starting in Python 3.8, uses
|
|
@@ -1148,12 +1167,21 @@ class CloudFiles:
|
|
|
1148
1167
|
if dest_ext_compress != dest_ext:
|
|
1149
1168
|
dest += dest_ext_compress
|
|
1150
1169
|
|
|
1151
|
-
|
|
1170
|
+
try:
|
|
1171
|
+
shutil.copyfile(src, dest) # avoids user space
|
|
1172
|
+
except FileNotFoundError:
|
|
1173
|
+
if allow_missing:
|
|
1174
|
+
with open(dest, "wb") as f:
|
|
1175
|
+
f.write(b'')
|
|
1176
|
+
else:
|
|
1177
|
+
raise
|
|
1178
|
+
|
|
1152
1179
|
pbar.update(1)
|
|
1153
1180
|
|
|
1154
1181
|
def __transfer_file_to_remote(
|
|
1155
1182
|
self, cf_src, cf_dest, paths,
|
|
1156
|
-
total, pbar, block_size, content_type
|
|
1183
|
+
total, pbar, block_size, content_type,
|
|
1184
|
+
allow_missing
|
|
1157
1185
|
):
|
|
1158
1186
|
"""
|
|
1159
1187
|
Provide file handles instead of slurped binaries
|
|
@@ -1174,19 +1202,29 @@ class CloudFiles:
|
|
|
1174
1202
|
handle_path, encoding = FileInterface.get_encoded_file_path(
|
|
1175
1203
|
os.path.join(srcdir, src_path)
|
|
1176
1204
|
)
|
|
1205
|
+
try:
|
|
1206
|
+
handle = open(handle_path, "rb")
|
|
1207
|
+
except FileNotFoundError:
|
|
1208
|
+
if allow_missing:
|
|
1209
|
+
handle = b''
|
|
1210
|
+
else:
|
|
1211
|
+
raise
|
|
1212
|
+
|
|
1177
1213
|
to_upload.append({
|
|
1178
1214
|
"path": dest_path,
|
|
1179
|
-
"content":
|
|
1215
|
+
"content": handle,
|
|
1180
1216
|
"compress": encoding,
|
|
1181
1217
|
})
|
|
1182
1218
|
cf_dest.puts(to_upload, raw=True, progress=False, content_type=content_type)
|
|
1183
1219
|
for item in to_upload:
|
|
1184
|
-
item["content"]
|
|
1220
|
+
handle = item["content"]
|
|
1221
|
+
if hasattr(handle, "close"):
|
|
1222
|
+
handle.close()
|
|
1185
1223
|
pbar.update(len(block_paths))
|
|
1186
1224
|
|
|
1187
1225
|
def __transfer_cloud_internal(
|
|
1188
1226
|
self, cf_src, cf_dest, paths,
|
|
1189
|
-
total, pbar, block_size
|
|
1227
|
+
total, pbar, block_size, allow_missing
|
|
1190
1228
|
):
|
|
1191
1229
|
"""
|
|
1192
1230
|
For performing internal transfers in gs or s3.
|
|
@@ -1206,8 +1244,12 @@ class CloudFiles:
|
|
|
1206
1244
|
dest_key = key
|
|
1207
1245
|
|
|
1208
1246
|
dest_key = posixpath.join(cf_dest._path.path, dest_key)
|
|
1209
|
-
conn.copy_file(src_key, cf_dest._path.bucket, dest_key)
|
|
1210
|
-
|
|
1247
|
+
found = conn.copy_file(src_key, cf_dest._path.bucket, dest_key)
|
|
1248
|
+
|
|
1249
|
+
if found == False and not allow_missing:
|
|
1250
|
+
raise FileNotFoundError(src_key)
|
|
1251
|
+
|
|
1252
|
+
return int(found)
|
|
1211
1253
|
|
|
1212
1254
|
results = schedule_jobs(
|
|
1213
1255
|
fns=( partial(thunk_copy, path) for path in paths ),
|
cloudfiles/interfaces.py
CHANGED
|
@@ -22,7 +22,7 @@ import fasteners
|
|
|
22
22
|
|
|
23
23
|
from .compression import COMPRESSION_TYPES
|
|
24
24
|
from .connectionpools import S3ConnectionPool, GCloudBucketPool, MemoryPool, MEMORY_DATA
|
|
25
|
-
from .exceptions import MD5IntegrityError
|
|
25
|
+
from .exceptions import MD5IntegrityError, CompressionError
|
|
26
26
|
from .lib import mkdir, sip, md5, validate_s3_multipart_etag
|
|
27
27
|
from .secrets import http_credentials, CLOUD_FILES_DIR, CLOUD_FILES_LOCK_DIR
|
|
28
28
|
|
|
@@ -82,6 +82,21 @@ retry = tenacity.retry(
|
|
|
82
82
|
wait=tenacity.wait_random_exponential(0.5, 60.0),
|
|
83
83
|
)
|
|
84
84
|
|
|
85
|
+
def retry_if_not(exception_type):
|
|
86
|
+
if type(exception_type) != list:
|
|
87
|
+
exception_type = [ exception_type ]
|
|
88
|
+
|
|
89
|
+
conditions = tenacity.retry_if_not_exception_type(exception_type[0])
|
|
90
|
+
for et in exception_type[1:]:
|
|
91
|
+
conditions = conditions | tenacity.retry_if_not_exception_type(et)
|
|
92
|
+
|
|
93
|
+
return tenacity.retry(
|
|
94
|
+
retry=conditions,
|
|
95
|
+
reraise=True,
|
|
96
|
+
stop=tenacity.stop_after_attempt(7),
|
|
97
|
+
wait=tenacity.wait_random_exponential(0.5, 60.0),
|
|
98
|
+
)
|
|
99
|
+
|
|
85
100
|
class StorageInterface(object):
|
|
86
101
|
exists_batch_size = 1
|
|
87
102
|
delete_batch_size = 1
|
|
@@ -528,7 +543,7 @@ class GoogleCloudStorageInterface(StorageInterface):
|
|
|
528
543
|
def get_path_to_file(self, file_path):
|
|
529
544
|
return posixpath.join(self._path.path, file_path)
|
|
530
545
|
|
|
531
|
-
@
|
|
546
|
+
@retry_if_not(CompressionError)
|
|
532
547
|
def put_file(self, file_path, content, content_type,
|
|
533
548
|
compress, cache_control=None, storage_class=None):
|
|
534
549
|
key = self.get_path_to_file(file_path)
|
|
@@ -545,7 +560,7 @@ class GoogleCloudStorageInterface(StorageInterface):
|
|
|
545
560
|
elif compress in ("bzip2", "bz2"):
|
|
546
561
|
blob.content_encoding = "bz2"
|
|
547
562
|
elif compress:
|
|
548
|
-
raise
|
|
563
|
+
raise CompressionError("Compression type {} not supported.".format(compress))
|
|
549
564
|
|
|
550
565
|
if cache_control:
|
|
551
566
|
blob.cache_control = cache_control
|
|
@@ -562,11 +577,17 @@ class GoogleCloudStorageInterface(StorageInterface):
|
|
|
562
577
|
with GCS_BUCKET_POOL_LOCK:
|
|
563
578
|
pool = GC_POOL[GCloudBucketPoolParams(dest_bucket, self._request_payer)]
|
|
564
579
|
dest_bucket = pool.get_connection(self._secrets, None)
|
|
565
|
-
self._bucket.copy_blob(
|
|
566
|
-
source_blob, dest_bucket, dest_key
|
|
567
|
-
)
|
|
568
580
|
|
|
569
|
-
|
|
581
|
+
try:
|
|
582
|
+
self._bucket.copy_blob(
|
|
583
|
+
source_blob, dest_bucket, dest_key
|
|
584
|
+
)
|
|
585
|
+
except google.api_core.exceptions.NotFound:
|
|
586
|
+
return False
|
|
587
|
+
|
|
588
|
+
return True
|
|
589
|
+
|
|
590
|
+
@retry_if_not(google.cloud.exceptions.NotFound)
|
|
570
591
|
def get_file(self, file_path, start=None, end=None, part_size=None):
|
|
571
592
|
key = self.get_path_to_file(file_path)
|
|
572
593
|
blob = self._bucket.blob( key )
|
|
@@ -590,7 +611,7 @@ class GoogleCloudStorageInterface(StorageInterface):
|
|
|
590
611
|
|
|
591
612
|
return (content, blob.content_encoding, hash_value, hash_type)
|
|
592
613
|
|
|
593
|
-
@
|
|
614
|
+
@retry_if_not(google.cloud.exceptions.NotFound)
|
|
594
615
|
def head(self, file_path):
|
|
595
616
|
key = self.get_path_to_file(file_path)
|
|
596
617
|
blob = self._bucket.get_blob(key)
|
|
@@ -609,7 +630,7 @@ class GoogleCloudStorageInterface(StorageInterface):
|
|
|
609
630
|
"Component-Count": blob.component_count,
|
|
610
631
|
}
|
|
611
632
|
|
|
612
|
-
@
|
|
633
|
+
@retry_if_not(google.cloud.exceptions.NotFound)
|
|
613
634
|
def size(self, file_path):
|
|
614
635
|
key = self.get_path_to_file(file_path)
|
|
615
636
|
blob = self._bucket.get_blob(key)
|
|
@@ -617,7 +638,7 @@ class GoogleCloudStorageInterface(StorageInterface):
|
|
|
617
638
|
return blob.size
|
|
618
639
|
return None
|
|
619
640
|
|
|
620
|
-
@
|
|
641
|
+
@retry_if_not(google.cloud.exceptions.NotFound)
|
|
621
642
|
def exists(self, file_path):
|
|
622
643
|
key = self.get_path_to_file(file_path)
|
|
623
644
|
blob = self._bucket.blob(key)
|
|
@@ -724,9 +745,9 @@ class HttpInterface(StorageInterface):
|
|
|
724
745
|
@retry
|
|
725
746
|
def head(self, file_path):
|
|
726
747
|
key = self.get_path_to_file(file_path)
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
748
|
+
with self.session.head(key) as resp:
|
|
749
|
+
resp.raise_for_status()
|
|
750
|
+
return resp.headers
|
|
730
751
|
|
|
731
752
|
@retry
|
|
732
753
|
def get_file(self, file_path, start=None, end=None, part_size=None):
|
|
@@ -741,6 +762,7 @@ class HttpInterface(StorageInterface):
|
|
|
741
762
|
resp = self.session.get(key)
|
|
742
763
|
if resp.status_code in (404, 403):
|
|
743
764
|
return (None, None, None, None)
|
|
765
|
+
resp.close()
|
|
744
766
|
resp.raise_for_status()
|
|
745
767
|
|
|
746
768
|
# Don't check MD5 for http because the etag can come in many
|
|
@@ -758,9 +780,8 @@ class HttpInterface(StorageInterface):
|
|
|
758
780
|
@retry
|
|
759
781
|
def exists(self, file_path):
|
|
760
782
|
key = self.get_path_to_file(file_path)
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
return resp.ok
|
|
783
|
+
with self.session.get(key, stream=True) as resp:
|
|
784
|
+
return resp.ok
|
|
764
785
|
|
|
765
786
|
def files_exist(self, file_paths):
|
|
766
787
|
return {path: self.exists(path) for path in file_paths}
|
|
@@ -783,6 +804,7 @@ class HttpInterface(StorageInterface):
|
|
|
783
804
|
params={ "prefix": prefix, "pageToken": token },
|
|
784
805
|
)
|
|
785
806
|
results.raise_for_status()
|
|
807
|
+
results.close()
|
|
786
808
|
return results.json()
|
|
787
809
|
|
|
788
810
|
token = None
|
|
@@ -907,7 +929,15 @@ class S3Interface(StorageInterface):
|
|
|
907
929
|
'Bucket': self._path.bucket,
|
|
908
930
|
'Key': key,
|
|
909
931
|
}
|
|
910
|
-
|
|
932
|
+
try:
|
|
933
|
+
dest_bucket.copy(CopySource=copy_source, Bucket=dest_bucket_name, Key=dest_key)
|
|
934
|
+
except botocore.exceptions.ClientError as err:
|
|
935
|
+
if err.response['Error']['Code'] in ('NoSuchKey', '404'):
|
|
936
|
+
return False
|
|
937
|
+
else:
|
|
938
|
+
raise
|
|
939
|
+
|
|
940
|
+
return True
|
|
911
941
|
|
|
912
942
|
@retry
|
|
913
943
|
def get_file(self, file_path, start=None, end=None, part_size=None):
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"git_version": "a123f94", "is_release": true}
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|