cloud-files 5.0.0__py3-none-any.whl → 5.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cloud-files
3
- Version: 5.0.0
3
+ Version: 5.0.1
4
4
  Summary: Fast access to cloud storage and local FS.
5
5
  Home-page: https://github.com/seung-lab/cloud-files/
6
6
  Author: William Silversmith
@@ -4,8 +4,8 @@ cloudfiles/compression.py,sha256=pqYdpu5vfFv-094BpfZ2pgRjVu7ESM9pAZC09P6E8bY,615
4
4
  cloudfiles/connectionpools.py,sha256=aL8RiSjRepECfgAFmJcz80aJFKbou7hsbuEgugDKwB8,4814
5
5
  cloudfiles/exceptions.py,sha256=N0oGQNG-St6RvnT8e5p_yC_E61q2kgAe2scwAL0F49c,843
6
6
  cloudfiles/gcs.py,sha256=unqu5KxGKaPq6N4QeHSpCDdtnK1BzPOAerTZ8FLt2_4,3820
7
- cloudfiles/interfaces.py,sha256=x6iRoxUWe8LFs8OAqI4DPOXu1nnJqI896h3t0IuVuNM,42024
8
- cloudfiles/lib.py,sha256=YOoaEkKtkXc9FdpNnC4FbZJVG1ujbyoxN07WKdUOJcs,5200
7
+ cloudfiles/interfaces.py,sha256=rc6jblcdxfvsPAyiOrosY_kTn7idHftS026prG84x2M,43389
8
+ cloudfiles/lib.py,sha256=9YFGMCy0mi23baqtsr-f5Gd1ewAGvD7x3UVNt772Z9E,5317
9
9
  cloudfiles/paths.py,sha256=VFgqrOqI7lTZ2EE7Ktg6I343TVXYEhn8GHTfJZZ9giE,11435
10
10
  cloudfiles/resumable_tools.py,sha256=NyuSoGh1SaP5akrHCpd9kgy2-JruEWrHW9lvJxV7jpE,6711
11
11
  cloudfiles/scheduler.py,sha256=DqDANmOpB3NdzFgJDNMMibRIkCrXQqIh2XGL8GWoc9c,3668
@@ -15,11 +15,11 @@ cloudfiles/typing.py,sha256=f3ZYkNfN9poxhGu5j-P0KCxjCCqSn9HAg5KiIPkjnCg,416
15
15
  cloudfiles_cli/LICENSE,sha256=Jna4xYE8CCQmaxjr5Fs-wmUBnIQJ1DGcNn9MMjbkprk,1538
16
16
  cloudfiles_cli/__init__.py,sha256=Wftt3R3F21QsHtWqx49ODuqT9zcSr0em7wk48kcH0WM,29
17
17
  cloudfiles_cli/cloudfiles_cli.py,sha256=Mwoeo0xXFaGx76f1pjU0H-qCGYoscykv_kMfWRxlnxg,34647
18
- cloud_files-5.0.0.dist-info/AUTHORS,sha256=BFVmobgAhaVFI5fqbuqAY5XmBQxe09ZZAsAOTy87hKQ,318
19
- cloud_files-5.0.0.dist-info/LICENSE,sha256=Jna4xYE8CCQmaxjr5Fs-wmUBnIQJ1DGcNn9MMjbkprk,1538
20
- cloud_files-5.0.0.dist-info/METADATA,sha256=ypcRq07V2r_YR5V0NxTF5iAvOEPoPq3z7ZTBSV_pGEY,27047
21
- cloud_files-5.0.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
22
- cloud_files-5.0.0.dist-info/entry_points.txt,sha256=xlirb1FVhn1mbcv4IoyMEGumDqKOA4VMVd3drsRQxIg,51
23
- cloud_files-5.0.0.dist-info/pbr.json,sha256=9c_Y5OB-aqslSyCLAkm_Gge6KKpyRGKGU4UCZi0TOV0,46
24
- cloud_files-5.0.0.dist-info/top_level.txt,sha256=xPyrST3okJbsmdCF5IC2gYAVxg_aD5AYVTnNo8UuoZU,26
25
- cloud_files-5.0.0.dist-info/RECORD,,
18
+ cloud_files-5.0.1.dist-info/AUTHORS,sha256=BFVmobgAhaVFI5fqbuqAY5XmBQxe09ZZAsAOTy87hKQ,318
19
+ cloud_files-5.0.1.dist-info/LICENSE,sha256=Jna4xYE8CCQmaxjr5Fs-wmUBnIQJ1DGcNn9MMjbkprk,1538
20
+ cloud_files-5.0.1.dist-info/METADATA,sha256=ZlbLvxexsgn0F_bjYRWmCEqy6d6tw22EBAWSe3iSs40,27047
21
+ cloud_files-5.0.1.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
22
+ cloud_files-5.0.1.dist-info/entry_points.txt,sha256=xlirb1FVhn1mbcv4IoyMEGumDqKOA4VMVd3drsRQxIg,51
23
+ cloud_files-5.0.1.dist-info/pbr.json,sha256=fsVPsBKZeRiSu8S18zaNE_nxJYIgmKStJaGJAQiz8lM,46
24
+ cloud_files-5.0.1.dist-info/top_level.txt,sha256=xPyrST3okJbsmdCF5IC2gYAVxg_aD5AYVTnNo8UuoZU,26
25
+ cloud_files-5.0.1.dist-info/RECORD,,
@@ -0,0 +1 @@
1
+ {"git_version": "4c96852", "is_release": true}
cloudfiles/interfaces.py CHANGED
@@ -22,7 +22,7 @@ import fasteners
22
22
  from .compression import COMPRESSION_TYPES
23
23
  from .connectionpools import S3ConnectionPool, GCloudBucketPool, MemoryPool, MEMORY_DATA
24
24
  from .exceptions import MD5IntegrityError, CompressionError, AuthorizationError
25
- from .lib import mkdir, sip, md5, validate_s3_multipart_etag
25
+ from .lib import mkdir, sip, md5, encode_crc32c_b64, validate_s3_multipart_etag
26
26
  from .secrets import (
27
27
  http_credentials,
28
28
  cave_credentials,
@@ -494,7 +494,33 @@ class MemoryInterface(StorageInterface):
494
494
  return True
495
495
 
496
496
  def head(self, file_path):
497
- raise NotImplementedError()
497
+ path = self.get_path_to_file(file_path)
498
+
499
+ data = None
500
+ encoding = ''
501
+
502
+ with EXT_TEST_SEQUENCE_LOCK:
503
+ for ext, enc in EXT_TEST_SEQUENCE:
504
+ pathext = path + ext
505
+ if pathext in self._data:
506
+ data = self._data[pathext]
507
+ encoding = enc
508
+ break
509
+
510
+ return {
511
+ "Cache-Control": None,
512
+ "Content-Length": len(data),
513
+ "Content-Type": None,
514
+ "ETag": None,
515
+ "Last-Modified": None,
516
+ "Content-Md5": None,
517
+ "Content-Encoding": encoding,
518
+ "Content-Disposition": None,
519
+ "Content-Language": None,
520
+ "Storage-Class": None,
521
+ "Request-Charged": None,
522
+ "Parts-Count": None,
523
+ }
498
524
 
499
525
  def size(self, file_path):
500
526
  path = self.get_path_to_file(file_path)
@@ -1112,7 +1138,7 @@ class S3Interface(StorageInterface):
1112
1138
  elif compress in ("xz", "lzma"):
1113
1139
  attrs['ContentEncoding'] = 'xz'
1114
1140
  elif compress in ("bzip2", "bz2"):
1115
- attrs['ContentEncoding'] = 'bz2'
1141
+ attrs['ContentEncoding'] = 'bzip2'
1116
1142
  elif compress:
1117
1143
  raise ValueError("Compression type {} not supported.".format(compress))
1118
1144
 
@@ -1136,23 +1162,39 @@ class S3Interface(StorageInterface):
1136
1162
 
1137
1163
  if multipart:
1138
1164
  self._conn.upload_fileobj(content, self._path.bucket, key, ExtraArgs=attrs)
1165
+ # upload_fileobj will add 'aws-chunked' to the ContentEncoding,
1166
+ # which after it finishes uploading is useless and messes up our
1167
+ # software. Therefore, edit the metadata and replace it (but this incurs
1168
+ # 2x class-A...)
1169
+ self._conn.copy_object(
1170
+ Bucket=self._path.bucket,
1171
+ Key=key,
1172
+ CopySource={'Bucket': self._path.bucket, 'Key': key},
1173
+ MetadataDirective="REPLACE",
1174
+ **attrs
1175
+ )
1139
1176
  else:
1140
1177
  attrs['Bucket'] = self._path.bucket
1141
1178
  attrs['Body'] = content
1142
1179
  attrs['Key'] = key
1143
- attrs['ContentMD5'] = md5(content)
1180
+ attrs["ChecksumCRC32C"] = str(encode_crc32c_b64(content))
1144
1181
  self._conn.put_object(**attrs)
1145
1182
 
1146
1183
  @retry
1147
1184
  def copy_file(self, src_path, dest_bucket_name, dest_key):
1148
1185
  key = self.get_path_to_file(src_path)
1149
- dest_bucket = self._get_bucket(dest_bucket_name)
1186
+ s3client = self._get_bucket(dest_bucket_name)
1150
1187
  copy_source = {
1151
1188
  'Bucket': self._path.bucket,
1152
1189
  'Key': key,
1153
1190
  }
1154
1191
  try:
1155
- dest_bucket.copy(CopySource=copy_source, Bucket=dest_bucket_name, Key=dest_key)
1192
+ s3client.copy_object(
1193
+ CopySource=copy_source,
1194
+ Bucket=dest_bucket_name,
1195
+ Key=dest_key,
1196
+ MetadataDirective='COPY' # Ensure metadata like Content-Encoding is copied
1197
+ )
1156
1198
  except botocore.exceptions.ClientError as err:
1157
1199
  if err.response['Error']['Code'] in ('NoSuchKey', '404'):
1158
1200
  return False
@@ -1231,8 +1273,9 @@ class S3Interface(StorageInterface):
1231
1273
 
1232
1274
  mkdir(os.path.dirname(dest))
1233
1275
 
1276
+ encoding = resp.get("Content-Encoding", "") or ""
1234
1277
  encoding = ",".join([
1235
- enc for enc in resp.get("Content-Encoding", "").split(",")
1278
+ enc for enc in encoding.split(",")
1236
1279
  if enc != "aws-chunked"
1237
1280
  ])
1238
1281
  ext = FileInterface.get_extension(encoding)
@@ -1263,6 +1306,11 @@ class S3Interface(StorageInterface):
1263
1306
  Key=self.get_path_to_file(file_path),
1264
1307
  **self._additional_attrs,
1265
1308
  )
1309
+
1310
+ encoding = response.get("ContentEncoding", None)
1311
+ if encoding == '':
1312
+ encoding = None
1313
+
1266
1314
  return {
1267
1315
  "Cache-Control": response.get("CacheControl", None),
1268
1316
  "Content-Length": response.get("ContentLength", None),
@@ -1270,7 +1318,7 @@ class S3Interface(StorageInterface):
1270
1318
  "ETag": response.get("ETag", None),
1271
1319
  "Last-Modified": response.get("LastModified", None),
1272
1320
  "Content-Md5": response["ResponseMetadata"]["HTTPHeaders"].get("content-md5", None),
1273
- "Content-Encoding": response.get("ContentEncoding", None),
1321
+ "Content-Encoding": encoding,
1274
1322
  "Content-Disposition": response.get("ContentDisposition", None),
1275
1323
  "Content-Language": response.get("ContentLanguage", None),
1276
1324
  "Storage-Class": response.get("StorageClass", None),
cloudfiles/lib.py CHANGED
@@ -153,6 +153,11 @@ def decode_crc32c_b64(b64digest):
153
153
  # !I means network order (big endian) and unsigned int
154
154
  return struct.unpack("!I", base64.b64decode(b64digest))[0]
155
155
 
156
+ def encode_crc32c_b64(binary):
157
+ val = crc32c(binary)
158
+ val = val.to_bytes(4, 'big')
159
+ return base64.b64encode(val)
160
+
156
161
  def crc32c(binary):
157
162
  """
158
163
  Computes the crc32c of a binary string
@@ -1 +0,0 @@
1
- {"git_version": "ee08b4f", "is_release": true}