azure-storage-blob 12.21.0b1__py3-none-any.whl → 12.22.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- azure/storage/blob/__init__.py +19 -18
- azure/storage/blob/_blob_client.py +470 -1555
- azure/storage/blob/_blob_client_helpers.py +1242 -0
- azure/storage/blob/_blob_service_client.py +93 -112
- azure/storage/blob/_blob_service_client_helpers.py +27 -0
- azure/storage/blob/_container_client.py +169 -376
- azure/storage/blob/_container_client_helpers.py +261 -0
- azure/storage/blob/_deserialize.py +68 -44
- azure/storage/blob/_download.py +375 -241
- azure/storage/blob/_encryption.py +14 -7
- azure/storage/blob/_generated/py.typed +1 -0
- azure/storage/blob/_lease.py +52 -63
- azure/storage/blob/_list_blobs_helper.py +129 -135
- azure/storage/blob/_models.py +480 -277
- azure/storage/blob/_quick_query_helper.py +30 -31
- azure/storage/blob/_serialize.py +38 -56
- azure/storage/blob/_shared/avro/datafile.py +1 -1
- azure/storage/blob/_shared/avro/datafile_async.py +1 -1
- azure/storage/blob/_shared/base_client.py +1 -1
- azure/storage/blob/_shared/base_client_async.py +1 -1
- azure/storage/blob/_shared/policies.py +8 -6
- azure/storage/blob/_shared/policies_async.py +3 -1
- azure/storage/blob/_shared/response_handlers.py +6 -2
- azure/storage/blob/_shared/shared_access_signature.py +2 -2
- azure/storage/blob/_shared/uploads.py +1 -1
- azure/storage/blob/_shared/uploads_async.py +1 -1
- azure/storage/blob/_shared_access_signature.py +70 -53
- azure/storage/blob/_upload_helpers.py +75 -68
- azure/storage/blob/_version.py +1 -1
- azure/storage/blob/aio/__init__.py +19 -11
- azure/storage/blob/aio/_blob_client_async.py +554 -301
- azure/storage/blob/aio/_blob_service_client_async.py +148 -97
- azure/storage/blob/aio/_container_client_async.py +282 -139
- azure/storage/blob/aio/_download_async.py +408 -283
- azure/storage/blob/aio/_lease_async.py +61 -60
- azure/storage/blob/aio/_list_blobs_helper.py +94 -96
- azure/storage/blob/aio/_models.py +60 -38
- azure/storage/blob/aio/_upload_helpers.py +75 -66
- {azure_storage_blob-12.21.0b1.dist-info → azure_storage_blob-12.22.0.dist-info}/METADATA +7 -7
- {azure_storage_blob-12.21.0b1.dist-info → azure_storage_blob-12.22.0.dist-info}/RECORD +43 -39
- {azure_storage_blob-12.21.0b1.dist-info → azure_storage_blob-12.22.0.dist-info}/WHEEL +1 -1
- {azure_storage_blob-12.21.0b1.dist-info → azure_storage_blob-12.22.0.dist-info}/LICENSE +0 -0
- {azure_storage_blob-12.21.0b1.dist-info → azure_storage_blob-12.22.0.dist-info}/top_level.txt +0 -0
@@ -5,37 +5,39 @@
|
|
5
5
|
# --------------------------------------------------------------------------
|
6
6
|
|
7
7
|
from io import BytesIO
|
8
|
-
from typing import
|
8
|
+
from typing import Any, Dict, Generator, IO, Iterable, Optional, Type, Union, TYPE_CHECKING
|
9
9
|
|
10
|
-
from ._shared.avro.datafile import DataFileReader
|
11
10
|
from ._shared.avro.avro_io import DatumReader
|
11
|
+
from ._shared.avro.datafile import DataFileReader
|
12
|
+
|
13
|
+
if TYPE_CHECKING:
|
14
|
+
from ._models import BlobQueryError
|
12
15
|
|
13
16
|
|
14
17
|
class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes
|
15
|
-
"""A streaming object to read query results.
|
16
|
-
|
17
|
-
:
|
18
|
-
|
19
|
-
:
|
20
|
-
|
21
|
-
:
|
22
|
-
|
23
|
-
:
|
24
|
-
|
25
|
-
|
26
|
-
"""
|
18
|
+
"""A streaming object to read query results."""
|
19
|
+
|
20
|
+
name: str
|
21
|
+
"""The name of the blob being quered."""
|
22
|
+
container: str
|
23
|
+
"""The name of the container where the blob is."""
|
24
|
+
response_headers: Dict[str, Any]
|
25
|
+
"""The response_headers of the quick query request."""
|
26
|
+
record_delimiter: str
|
27
|
+
"""The delimiter used to separate lines, or records with the data. The `records`
|
28
|
+
method will return these lines via a generator."""
|
27
29
|
|
28
30
|
def __init__(
|
29
31
|
self,
|
30
|
-
name=None,
|
31
|
-
container=None,
|
32
|
-
errors=None,
|
33
|
-
record_delimiter='\n',
|
34
|
-
encoding=None,
|
35
|
-
headers=None,
|
36
|
-
response=None,
|
37
|
-
error_cls=None,
|
38
|
-
):
|
32
|
+
name: str = None, # type: ignore [assignment]
|
33
|
+
container: str = None, # type: ignore [assignment]
|
34
|
+
errors: Any = None,
|
35
|
+
record_delimiter: str = '\n',
|
36
|
+
encoding: Optional[str] = None,
|
37
|
+
headers: Dict[str, Any] = None, # type: ignore [assignment]
|
38
|
+
response: Any = None,
|
39
|
+
error_cls: Type["BlobQueryError"] = None, # type: ignore [assignment]
|
40
|
+
) -> None:
|
39
41
|
self.name = name
|
40
42
|
self.container = container
|
41
43
|
self.response_headers = headers
|
@@ -51,7 +53,7 @@ class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes
|
|
51
53
|
def __len__(self):
|
52
54
|
return self._size
|
53
55
|
|
54
|
-
def _process_record(self, result):
|
56
|
+
def _process_record(self, result: Dict[str, Any]) -> Optional[bytes]:
|
55
57
|
self._size = result.get('totalBytes', self._size)
|
56
58
|
self._bytes_processed = result.get('bytesScanned', self._bytes_processed)
|
57
59
|
if 'data' in result:
|
@@ -67,7 +69,7 @@ class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes
|
|
67
69
|
self._errors(error)
|
68
70
|
return None
|
69
71
|
|
70
|
-
def _iter_stream(self):
|
72
|
+
def _iter_stream(self) -> Generator[bytes, None, None]:
|
71
73
|
if self._first_result is not None:
|
72
74
|
yield self._first_result
|
73
75
|
for next_result in self._parsed_results:
|
@@ -75,8 +77,7 @@ class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes
|
|
75
77
|
if processed_result is not None:
|
76
78
|
yield processed_result
|
77
79
|
|
78
|
-
def readall(self):
|
79
|
-
# type: () -> Union[bytes, str]
|
80
|
+
def readall(self) -> Union[bytes, str]:
|
80
81
|
"""Return all query results.
|
81
82
|
|
82
83
|
This operation is blocking until all data is downloaded.
|
@@ -93,8 +94,7 @@ class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes
|
|
93
94
|
return data.decode(self._encoding)
|
94
95
|
return data
|
95
96
|
|
96
|
-
def readinto(self, stream):
|
97
|
-
# type: (IO) -> None
|
97
|
+
def readinto(self, stream: IO) -> None:
|
98
98
|
"""Download the query result to a stream.
|
99
99
|
|
100
100
|
:param IO stream:
|
@@ -105,8 +105,7 @@ class BlobQueryReader(object): # pylint: disable=too-many-instance-attributes
|
|
105
105
|
for record in self._iter_stream():
|
106
106
|
stream.write(record)
|
107
107
|
|
108
|
-
def records(self):
|
109
|
-
# type: () -> Iterable[Union[bytes, str]]
|
108
|
+
def records(self) -> Iterable[Union[bytes, str]]:
|
110
109
|
"""Returns a record generator for the query result.
|
111
110
|
|
112
111
|
Records will be returned line by line.
|
azure/storage/blob/_serialize.py
CHANGED
@@ -3,9 +3,7 @@
|
|
3
3
|
# Licensed under the MIT License. See License.txt in the project root for
|
4
4
|
# license information.
|
5
5
|
# --------------------------------------------------------------------------
|
6
|
-
from typing import
|
7
|
-
Any, Dict, Optional, Tuple, Union,
|
8
|
-
TYPE_CHECKING)
|
6
|
+
from typing import Any, cast, Dict, Optional, Tuple, Union, TYPE_CHECKING
|
9
7
|
|
10
8
|
try:
|
11
9
|
from urllib.parse import quote
|
@@ -14,23 +12,22 @@ except ImportError:
|
|
14
12
|
|
15
13
|
from azure.core import MatchConditions
|
16
14
|
|
17
|
-
from ._models import (
|
18
|
-
ContainerEncryptionScope,
|
19
|
-
DelimitedJsonDialect)
|
20
15
|
from ._generated.models import (
|
21
|
-
|
22
|
-
|
23
|
-
|
16
|
+
ArrowConfiguration,
|
17
|
+
BlobTag,
|
18
|
+
BlobTags,
|
24
19
|
ContainerCpkScopeInfo,
|
25
|
-
|
26
|
-
QuerySerialization,
|
20
|
+
CpkScopeInfo,
|
27
21
|
DelimitedTextConfiguration,
|
28
22
|
JsonTextConfiguration,
|
29
|
-
|
23
|
+
LeaseAccessConditions,
|
24
|
+
ModifiedAccessConditions,
|
25
|
+
QueryFormat,
|
30
26
|
QueryFormatType,
|
31
|
-
|
32
|
-
|
27
|
+
QuerySerialization,
|
28
|
+
SourceModifiedAccessConditions
|
33
29
|
)
|
30
|
+
from ._models import ContainerEncryptionScope, DelimitedJsonDialect
|
34
31
|
|
35
32
|
if TYPE_CHECKING:
|
36
33
|
from ._lease import BlobLeaseClient
|
@@ -62,8 +59,11 @@ _SUPPORTED_API_VERSIONS = [
|
|
62
59
|
]
|
63
60
|
|
64
61
|
|
65
|
-
def _get_match_headers(
|
66
|
-
|
62
|
+
def _get_match_headers(
|
63
|
+
kwargs: Dict[str, Any],
|
64
|
+
match_param: str,
|
65
|
+
etag_param: str
|
66
|
+
) -> Tuple[Optional[str], Optional[Any]]:
|
67
67
|
if_match = None
|
68
68
|
if_none_match = None
|
69
69
|
match_condition = kwargs.pop(match_param, None)
|
@@ -87,8 +87,7 @@ def _get_match_headers(kwargs, match_param, etag_param):
|
|
87
87
|
return if_match, if_none_match
|
88
88
|
|
89
89
|
|
90
|
-
def get_access_conditions(lease):
|
91
|
-
# type: (Optional[Union[BlobLeaseClient, str]]) -> Union[LeaseAccessConditions, None]
|
90
|
+
def get_access_conditions(lease: Optional[Union["BlobLeaseClient", str]]) -> Optional[LeaseAccessConditions]:
|
92
91
|
try:
|
93
92
|
lease_id = lease.id # type: ignore
|
94
93
|
except AttributeError:
|
@@ -96,8 +95,7 @@ def get_access_conditions(lease):
|
|
96
95
|
return LeaseAccessConditions(lease_id=lease_id) if lease_id else None
|
97
96
|
|
98
97
|
|
99
|
-
def get_modify_conditions(kwargs):
|
100
|
-
# type: (Dict[str, Any]) -> ModifiedAccessConditions
|
98
|
+
def get_modify_conditions(kwargs: Dict[str, Any]) -> ModifiedAccessConditions:
|
101
99
|
if_match, if_none_match = _get_match_headers(kwargs, 'match_condition', 'etag')
|
102
100
|
return ModifiedAccessConditions(
|
103
101
|
if_modified_since=kwargs.pop('if_modified_since', None),
|
@@ -108,8 +106,7 @@ def get_modify_conditions(kwargs):
|
|
108
106
|
)
|
109
107
|
|
110
108
|
|
111
|
-
def get_source_conditions(kwargs):
|
112
|
-
# type: (Dict[str, Any]) -> SourceModifiedAccessConditions
|
109
|
+
def get_source_conditions(kwargs: Dict[str, Any]) -> SourceModifiedAccessConditions:
|
113
110
|
if_match, if_none_match = _get_match_headers(kwargs, 'source_match_condition', 'source_etag')
|
114
111
|
return SourceModifiedAccessConditions(
|
115
112
|
source_if_modified_since=kwargs.pop('source_if_modified_since', None),
|
@@ -120,15 +117,13 @@ def get_source_conditions(kwargs):
|
|
120
117
|
)
|
121
118
|
|
122
119
|
|
123
|
-
def get_cpk_scope_info(kwargs):
|
124
|
-
# type: (Dict[str, Any]) -> CpkScopeInfo
|
120
|
+
def get_cpk_scope_info(kwargs: Dict[str, Any]) -> Optional[CpkScopeInfo]:
|
125
121
|
if 'encryption_scope' in kwargs:
|
126
122
|
return CpkScopeInfo(encryption_scope=kwargs.pop('encryption_scope'))
|
127
123
|
return None
|
128
124
|
|
129
125
|
|
130
|
-
def get_container_cpk_scope_info(kwargs):
|
131
|
-
# type: (Dict[str, Any]) -> ContainerCpkScopeInfo
|
126
|
+
def get_container_cpk_scope_info(kwargs: Dict[str, Any]) -> Optional[ContainerCpkScopeInfo]:
|
132
127
|
encryption_scope = kwargs.pop('container_encryption_scope', None)
|
133
128
|
if encryption_scope:
|
134
129
|
if isinstance(encryption_scope, ContainerEncryptionScope):
|
@@ -145,22 +140,19 @@ def get_container_cpk_scope_info(kwargs):
|
|
145
140
|
return None
|
146
141
|
|
147
142
|
|
148
|
-
def get_api_version(kwargs):
|
149
|
-
# type: (Dict[str, Any]) -> str
|
143
|
+
def get_api_version(kwargs: Dict[str, Any]) -> str:
|
150
144
|
api_version = kwargs.get('api_version', None)
|
151
145
|
if api_version and api_version not in _SUPPORTED_API_VERSIONS:
|
152
146
|
versions = '\n'.join(_SUPPORTED_API_VERSIONS)
|
153
147
|
raise ValueError(f"Unsupported API version '{api_version}'. Please select from:\n{versions}")
|
154
148
|
return api_version or _SUPPORTED_API_VERSIONS[-1]
|
155
149
|
|
156
|
-
def get_version_id(self_vid, kwargs):
|
157
|
-
# type: (Optional[str], Dict[str, Any]) -> Optional[str]
|
150
|
+
def get_version_id(self_vid: Optional[str], kwargs: Dict[str, Any]) -> Optional[str]:
|
158
151
|
if 'version_id' in kwargs:
|
159
|
-
return kwargs.pop('version_id')
|
152
|
+
return cast(str, kwargs.pop('version_id'))
|
160
153
|
return self_vid
|
161
154
|
|
162
|
-
def serialize_blob_tags_header(tags=None):
|
163
|
-
# type: (Optional[Dict[str, str]]) -> str
|
155
|
+
def serialize_blob_tags_header(tags: Optional[Dict[str, str]] = None) -> Optional[str]:
|
164
156
|
if tags is None:
|
165
157
|
return None
|
166
158
|
|
@@ -178,33 +170,27 @@ def serialize_blob_tags_header(tags=None):
|
|
178
170
|
return ''.join(components)
|
179
171
|
|
180
172
|
|
181
|
-
def serialize_blob_tags(tags=None):
|
182
|
-
# type: (Optional[Dict[str, str]]) -> Union[BlobTags, None]
|
173
|
+
def serialize_blob_tags(tags: Optional[Dict[str, str]] = None) -> BlobTags:
|
183
174
|
tag_list = []
|
184
175
|
if tags:
|
185
176
|
tag_list = [BlobTag(key=k, value=v) for k, v in tags.items()]
|
186
177
|
return BlobTags(blob_tag_set=tag_list)
|
187
178
|
|
188
179
|
|
189
|
-
def serialize_query_format(formater):
|
180
|
+
def serialize_query_format(formater: Union[str, DelimitedJsonDialect]) -> Optional[QuerySerialization]:
|
190
181
|
if formater == "ParquetDialect":
|
191
|
-
qq_format = QueryFormat(
|
192
|
-
type=QueryFormatType.PARQUET,
|
193
|
-
parquet_text_configuration=' '
|
194
|
-
)
|
182
|
+
qq_format = QueryFormat(type=QueryFormatType.PARQUET, parquet_text_configuration=' ') #type: ignore [arg-type]
|
195
183
|
elif isinstance(formater, DelimitedJsonDialect):
|
196
|
-
|
197
|
-
|
198
|
-
)
|
199
|
-
qq_format = QueryFormat(
|
200
|
-
type=QueryFormatType.json,
|
201
|
-
json_text_configuration=serialization_settings)
|
184
|
+
json_serialization_settings = JsonTextConfiguration(record_separator=formater.delimiter)
|
185
|
+
qq_format = QueryFormat(type=QueryFormatType.JSON, json_text_configuration=json_serialization_settings)
|
202
186
|
elif hasattr(formater, 'quotechar'): # This supports a csv.Dialect as well
|
203
187
|
try:
|
204
|
-
headers = formater.has_header
|
188
|
+
headers = formater.has_header # type: ignore
|
205
189
|
except AttributeError:
|
206
190
|
headers = False
|
207
|
-
|
191
|
+
if isinstance(formater, str):
|
192
|
+
raise ValueError("Unknown string value provided. Accepted values: ParquetDialect")
|
193
|
+
csv_serialization_settings = DelimitedTextConfiguration(
|
208
194
|
column_separator=formater.delimiter,
|
209
195
|
field_quote=formater.quotechar,
|
210
196
|
record_separator=formater.lineterminator,
|
@@ -212,16 +198,12 @@ def serialize_query_format(formater):
|
|
212
198
|
headers_present=headers
|
213
199
|
)
|
214
200
|
qq_format = QueryFormat(
|
215
|
-
type=QueryFormatType.
|
216
|
-
delimited_text_configuration=
|
201
|
+
type=QueryFormatType.DELIMITED,
|
202
|
+
delimited_text_configuration=csv_serialization_settings
|
217
203
|
)
|
218
204
|
elif isinstance(formater, list):
|
219
|
-
|
220
|
-
|
221
|
-
)
|
222
|
-
qq_format = QueryFormat(
|
223
|
-
type=QueryFormatType.arrow,
|
224
|
-
arrow_configuration=serialization_settings)
|
205
|
+
arrow_serialization_settings = ArrowConfiguration(schema=formater)
|
206
|
+
qq_format = QueryFormat(type=QueryFormatType.arrow, arrow_configuration=arrow_serialization_settings)
|
225
207
|
elif not formater:
|
226
208
|
return None
|
227
209
|
else:
|
@@ -185,7 +185,7 @@ class DataFileReader(object): # pylint: disable=too-many-instance-attributes
|
|
185
185
|
|
186
186
|
# check magic number
|
187
187
|
if header.get('magic') != MAGIC:
|
188
|
-
fail_msg = f"Not an Avro data file: {header.get('magic')} doesn't match {MAGIC}."
|
188
|
+
fail_msg = f"Not an Avro data file: {header.get('magic')} doesn't match {MAGIC!r}."
|
189
189
|
raise schema.AvroException(fail_msg)
|
190
190
|
|
191
191
|
# set metadata
|
@@ -146,7 +146,7 @@ class AsyncDataFileReader(object): # pylint: disable=too-many-instance-attribut
|
|
146
146
|
|
147
147
|
# check magic number
|
148
148
|
if header.get('magic') != MAGIC:
|
149
|
-
fail_msg = f"Not an Avro data file: {header.get('magic')} doesn't match {MAGIC}."
|
149
|
+
fail_msg = f"Not an Avro data file: {header.get('magic')} doesn't match {MAGIC!r}."
|
150
150
|
raise schema.AvroException(fail_msg)
|
151
151
|
|
152
152
|
# set metadata
|
@@ -331,7 +331,7 @@ class StorageAccountHostsMixin(object): # pylint: disable=too-many-instance-att
|
|
331
331
|
)
|
332
332
|
raise error
|
333
333
|
return iter(parts)
|
334
|
-
return parts
|
334
|
+
return parts # type: ignore [no-any-return]
|
335
335
|
except HttpResponseError as error:
|
336
336
|
process_storage_error(error)
|
337
337
|
|
@@ -14,10 +14,10 @@ from io import SEEK_SET, UnsupportedOperation
|
|
14
14
|
from time import time
|
15
15
|
from typing import Any, Dict, Optional, TYPE_CHECKING
|
16
16
|
from urllib.parse import (
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
17
|
+
parse_qsl,
|
18
|
+
urlencode,
|
19
|
+
urlparse,
|
20
|
+
urlunparse,
|
21
21
|
)
|
22
22
|
from wsgiref.handlers import format_date_time
|
23
23
|
|
@@ -28,10 +28,10 @@ from azure.core.pipeline.policies import (
|
|
28
28
|
HTTPPolicy,
|
29
29
|
NetworkTraceLoggingPolicy,
|
30
30
|
RequestHistory,
|
31
|
-
SansIOHTTPPolicy
|
31
|
+
SansIOHTTPPolicy
|
32
32
|
)
|
33
33
|
|
34
|
-
from .authentication import StorageHttpChallenge
|
34
|
+
from .authentication import AzureSigningError, StorageHttpChallenge
|
35
35
|
from .constants import DEFAULT_OAUTH_SCOPE
|
36
36
|
from .models import LocationMode
|
37
37
|
|
@@ -542,6 +542,8 @@ class StorageRetryPolicy(HTTPPolicy):
|
|
542
542
|
continue
|
543
543
|
break
|
544
544
|
except AzureError as err:
|
545
|
+
if isinstance(err, AzureSigningError):
|
546
|
+
raise
|
545
547
|
retries_remaining = self.increment(
|
546
548
|
retry_settings, request=request.http_request, error=err)
|
547
549
|
if retries_remaining:
|
@@ -13,7 +13,7 @@ from typing import Any, Dict, TYPE_CHECKING
|
|
13
13
|
from azure.core.exceptions import AzureError
|
14
14
|
from azure.core.pipeline.policies import AsyncBearerTokenCredentialPolicy, AsyncHTTPPolicy
|
15
15
|
|
16
|
-
from .authentication import StorageHttpChallenge
|
16
|
+
from .authentication import AzureSigningError, StorageHttpChallenge
|
17
17
|
from .constants import DEFAULT_OAUTH_SCOPE
|
18
18
|
from .policies import is_retry, StorageRetryPolicy
|
19
19
|
|
@@ -127,6 +127,8 @@ class AsyncStorageRetryPolicy(StorageRetryPolicy):
|
|
127
127
|
continue
|
128
128
|
break
|
129
129
|
except AzureError as err:
|
130
|
+
if isinstance(err, AzureSigningError):
|
131
|
+
raise
|
130
132
|
retries_remaining = self.increment(
|
131
133
|
retry_settings, request=request.http_request, error=err)
|
132
134
|
if retries_remaining:
|
@@ -17,7 +17,8 @@ from azure.core.exceptions import (
|
|
17
17
|
)
|
18
18
|
from azure.core.pipeline.policies import ContentDecodePolicy
|
19
19
|
|
20
|
-
from .
|
20
|
+
from .authentication import AzureSigningError
|
21
|
+
from .models import get_enum_value, StorageErrorCode, UserDelegationKey
|
21
22
|
from .parser import _to_utc_datetime
|
22
23
|
|
23
24
|
|
@@ -81,9 +82,12 @@ def return_raw_deserialized(response, *_):
|
|
81
82
|
return response.http_response.location_mode, response.context[ContentDecodePolicy.CONTEXT_NAME]
|
82
83
|
|
83
84
|
|
84
|
-
def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # pylint:disable=too-many-statements
|
85
|
+
def process_storage_error(storage_error) -> NoReturn: # type: ignore [misc] # pylint:disable=too-many-statements, too-many-branches
|
85
86
|
raise_error = HttpResponseError
|
86
87
|
serialized = False
|
88
|
+
if isinstance(storage_error, AzureSigningError):
|
89
|
+
storage_error.message = storage_error.message + \
|
90
|
+
'. This is likely due to an invalid shared key. Please check your shared key and try again.'
|
87
91
|
if not storage_error.response or storage_error.response.status_code in [200, 204]:
|
88
92
|
raise storage_error
|
89
93
|
# If it is one of those three then it has been serialized prior by the generated layer.
|
@@ -108,7 +108,7 @@ class SharedAccessSignature(object):
|
|
108
108
|
self.x_ms_version = x_ms_version
|
109
109
|
|
110
110
|
def generate_account(self, services, resource_types, permission, expiry, start=None,
|
111
|
-
ip=None, protocol=None, **kwargs):
|
111
|
+
ip=None, protocol=None, **kwargs) -> str:
|
112
112
|
'''
|
113
113
|
Generates a shared access signature for the account.
|
114
114
|
Use the returned signature with the sas_token parameter of the service
|
@@ -230,5 +230,5 @@ class _SharedAccessHelper(object):
|
|
230
230
|
self._add_query(QueryStringConstants.SIGNED_SIGNATURE,
|
231
231
|
sign_string(account_key, string_to_sign))
|
232
232
|
|
233
|
-
def get_token(self):
|
233
|
+
def get_token(self) -> str:
|
234
234
|
return '&'.join([f'{n}={url_quote(v)}' for n, v in self.query_dict.items() if v is not None])
|
@@ -12,7 +12,7 @@ from threading import Lock
|
|
12
12
|
|
13
13
|
from azure.core.tracing.common import with_current_context
|
14
14
|
|
15
|
-
from .
|
15
|
+
from .import encode_base64, url_quote
|
16
16
|
from .request_handlers import get_length
|
17
17
|
from .response_handlers import return_response_headers
|
18
18
|
|
@@ -13,7 +13,7 @@ from itertools import islice
|
|
13
13
|
from math import ceil
|
14
14
|
from typing import AsyncGenerator, Union
|
15
15
|
|
16
|
-
from .
|
16
|
+
from .import encode_base64, url_quote
|
17
17
|
from .request_handlers import get_length
|
18
18
|
from .response_handlers import return_response_headers
|
19
19
|
from .uploads import SubStream, IterStreamer # pylint: disable=unused-import
|
@@ -5,25 +5,17 @@
|
|
5
5
|
# --------------------------------------------------------------------------
|
6
6
|
# pylint: disable=docstring-keyword-should-match-keyword-only
|
7
7
|
|
8
|
-
from typing import
|
9
|
-
Union, Optional, Any, TYPE_CHECKING
|
10
|
-
)
|
8
|
+
from typing import Union, Optional, Any, TYPE_CHECKING
|
11
9
|
from urllib.parse import parse_qs
|
12
10
|
|
13
11
|
from ._shared import sign_string, url_quote
|
14
12
|
from ._shared.constants import X_MS_VERSION
|
15
13
|
from ._shared.models import Services, UserDelegationKey
|
16
|
-
from ._shared.shared_access_signature import SharedAccessSignature, _SharedAccessHelper
|
17
|
-
QueryStringConstants
|
14
|
+
from ._shared.shared_access_signature import QueryStringConstants, SharedAccessSignature, _SharedAccessHelper
|
18
15
|
|
19
16
|
if TYPE_CHECKING:
|
20
17
|
from datetime import datetime
|
21
|
-
from ..blob import
|
22
|
-
ResourceTypes,
|
23
|
-
AccountSasPermissions,
|
24
|
-
ContainerSasPermissions,
|
25
|
-
BlobSasPermissions
|
26
|
-
)
|
18
|
+
from ..blob import AccountSasPermissions, BlobSasPermissions, ContainerSasPermissions, ResourceTypes
|
27
19
|
|
28
20
|
|
29
21
|
class BlobQueryStringConstants(object):
|
@@ -38,13 +30,17 @@ class BlobSharedAccessSignature(SharedAccessSignature):
|
|
38
30
|
generate_*_shared_access_signature method directly.
|
39
31
|
'''
|
40
32
|
|
41
|
-
def __init__(
|
33
|
+
def __init__(
|
34
|
+
self, account_name: str,
|
35
|
+
account_key: Optional[str] = None,
|
36
|
+
user_delegation_key: Optional[UserDelegationKey] = None
|
37
|
+
) -> None:
|
42
38
|
'''
|
43
39
|
:param str account_name:
|
44
40
|
The storage account name used to generate the shared access signatures.
|
45
|
-
:param str account_key:
|
41
|
+
:param Optional[str] account_key:
|
46
42
|
The access key to generate the shares access signatures.
|
47
|
-
:param ~azure.storage.blob.models.UserDelegationKey user_delegation_key:
|
43
|
+
:param Optional[~azure.storage.blob.models.UserDelegationKey] user_delegation_key:
|
48
44
|
Instead of an account key, the user could pass in a user delegation key.
|
49
45
|
A user delegation key can be obtained from the service by authenticating with an AAD identity;
|
50
46
|
this can be accomplished by calling get_user_delegation_key on any Blob service object.
|
@@ -52,11 +48,24 @@ class BlobSharedAccessSignature(SharedAccessSignature):
|
|
52
48
|
super(BlobSharedAccessSignature, self).__init__(account_name, account_key, x_ms_version=X_MS_VERSION)
|
53
49
|
self.user_delegation_key = user_delegation_key
|
54
50
|
|
55
|
-
def generate_blob(
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
51
|
+
def generate_blob(
|
52
|
+
self, container_name: str,
|
53
|
+
blob_name: str,
|
54
|
+
snapshot: Optional[str] = None,
|
55
|
+
version_id: Optional[str] = None,
|
56
|
+
permission: Optional[Union["BlobSasPermissions", str]] = None,
|
57
|
+
expiry: Optional[Union["datetime", str]] = None,
|
58
|
+
start: Optional[Union["datetime", str]] = None,
|
59
|
+
policy_id: Optional[str] = None,
|
60
|
+
ip: Optional[str] = None,
|
61
|
+
protocol: Optional[str] = None,
|
62
|
+
cache_control: Optional[str] = None,
|
63
|
+
content_disposition: Optional[str] = None,
|
64
|
+
content_encoding: Optional[str] = None,
|
65
|
+
content_language: Optional[str] = None,
|
66
|
+
content_type: Optional[str] = None,
|
67
|
+
**kwargs: Any
|
68
|
+
) -> str:
|
60
69
|
'''
|
61
70
|
Generates a shared access signature for the blob or one of its snapshots.
|
62
71
|
Use the returned signature with the sas_token parameter of any BlobService.
|
@@ -66,7 +75,7 @@ class BlobSharedAccessSignature(SharedAccessSignature):
|
|
66
75
|
:param str blob_name:
|
67
76
|
Name of blob.
|
68
77
|
:param str snapshot:
|
69
|
-
The snapshot parameter is an opaque
|
78
|
+
The snapshot parameter is an opaque datetime value that,
|
70
79
|
when present, specifies the blob snapshot to grant permission.
|
71
80
|
:param str version_id:
|
72
81
|
An optional blob version ID. This parameter is only applicable for versioning-enabled
|
@@ -148,11 +157,21 @@ class BlobSharedAccessSignature(SharedAccessSignature):
|
|
148
157
|
|
149
158
|
return sas.get_token()
|
150
159
|
|
151
|
-
def generate_container(
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
160
|
+
def generate_container(
|
161
|
+
self, container_name: str,
|
162
|
+
permission: Optional[Union["ContainerSasPermissions", str]] = None,
|
163
|
+
expiry: Optional[Union["datetime", str]] = None,
|
164
|
+
start: Optional[Union["datetime", str]] = None,
|
165
|
+
policy_id: Optional[str] = None,
|
166
|
+
ip: Optional[str] = None,
|
167
|
+
protocol: Optional[str] = None,
|
168
|
+
cache_control: Optional[str] = None,
|
169
|
+
content_disposition: Optional[str] = None,
|
170
|
+
content_encoding: Optional[str] = None,
|
171
|
+
content_language: Optional[str] = None,
|
172
|
+
content_type: Optional[str] = None,
|
173
|
+
**kwargs: Any
|
174
|
+
) -> str:
|
156
175
|
'''
|
157
176
|
Generates a shared access signature for the container.
|
158
177
|
Use the returned signature with the sas_token parameter of any BlobService.
|
@@ -298,7 +317,7 @@ class _BlobSharedAccessHelper(_SharedAccessHelper):
|
|
298
317
|
sign_string(account_key if user_delegation_key is None else user_delegation_key.value,
|
299
318
|
string_to_sign))
|
300
319
|
|
301
|
-
def get_token(self):
|
320
|
+
def get_token(self) -> str:
|
302
321
|
# a conscious decision was made to exclude the timestamp in the generated token
|
303
322
|
# this is to avoid having two snapshot ids in the query parameters when the user appends the snapshot timestamp
|
304
323
|
exclude = [BlobQueryStringConstants.SIGNED_TIMESTAMP]
|
@@ -378,22 +397,21 @@ def generate_account_sas(
|
|
378
397
|
start=start,
|
379
398
|
ip=ip,
|
380
399
|
**kwargs
|
381
|
-
)
|
400
|
+
)
|
382
401
|
|
383
402
|
|
384
403
|
def generate_container_sas(
|
385
|
-
|
386
|
-
|
387
|
-
|
388
|
-
|
389
|
-
|
390
|
-
|
391
|
-
|
392
|
-
|
393
|
-
|
394
|
-
|
395
|
-
|
396
|
-
# type: (...) -> Any
|
404
|
+
account_name: str,
|
405
|
+
container_name: str,
|
406
|
+
account_key: Optional[str] = None,
|
407
|
+
user_delegation_key: Optional[UserDelegationKey] = None,
|
408
|
+
permission: Optional[Union["ContainerSasPermissions", str]] = None,
|
409
|
+
expiry: Optional[Union["datetime", str]] = None,
|
410
|
+
start: Optional[Union["datetime", str]] = None,
|
411
|
+
policy_id: Optional[str] = None,
|
412
|
+
ip: Optional[str] = None,
|
413
|
+
**kwargs: Any
|
414
|
+
) -> str:
|
397
415
|
"""Generates a shared access signature for a container.
|
398
416
|
|
399
417
|
Use the returned signature with the credential parameter of any BlobServiceClient,
|
@@ -502,20 +520,19 @@ def generate_container_sas(
|
|
502
520
|
|
503
521
|
|
504
522
|
def generate_blob_sas(
|
505
|
-
|
506
|
-
|
507
|
-
|
508
|
-
|
509
|
-
|
510
|
-
|
511
|
-
|
512
|
-
|
513
|
-
|
514
|
-
|
515
|
-
|
516
|
-
|
517
|
-
|
518
|
-
# type: (...) -> Any
|
523
|
+
account_name: str,
|
524
|
+
container_name: str,
|
525
|
+
blob_name: str,
|
526
|
+
snapshot: Optional[str] = None,
|
527
|
+
account_key: Optional[str] = None,
|
528
|
+
user_delegation_key: Optional[UserDelegationKey] = None,
|
529
|
+
permission: Optional[Union["BlobSasPermissions", str]] = None,
|
530
|
+
expiry: Optional[Union["datetime", str]] = None,
|
531
|
+
start: Optional[Union["datetime", str]] = None,
|
532
|
+
policy_id: Optional[str] = None,
|
533
|
+
ip: Optional[str] = None,
|
534
|
+
**kwargs: Any
|
535
|
+
) -> str:
|
519
536
|
"""Generates a shared access signature for a blob.
|
520
537
|
|
521
538
|
Use the returned signature with the credential parameter of any BlobServiceClient,
|