azureml-core 1.55.0.post2__py3-none-any.whl → 1.57.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- azureml/_base_sdk_common/_version.py +1 -1
- azureml/_base_sdk_common/common.py +3 -0
- azureml/_base_sdk_common/workspace/models/rest_client_enums.py +1 -0
- azureml/_file_utils/file_utils.py +21 -20
- azureml/_model_management/_util.py +3 -0
- azureml/_project/_compute_target_commands.py +1 -1
- azureml/_project/azureml_base_images.json +7 -7
- azureml/_project/azureml_sdk_scope.txt +45 -12
- azureml/_project/file_utilities.py +2 -0
- azureml/_project/ignore_file.py +2 -0
- azureml/_project/project_info.py +3 -0
- azureml/_project/project_manager.py +9 -0
- azureml/_restclient/artifacts_client.py +1 -1
- azureml/_restclient/models/rest_client_enums.py +1 -0
- azureml/_vendor/azure_storage/blob/_encryption.py +1 -2
- azureml/_vendor/azure_storage/blob/_shared/policies.py +20 -20
- azureml/_vendor/azure_storage/fileshare/_shared/policies.py +20 -20
- azureml/_workspace/_utils.py +3 -1
- azureml/core/authentication.py +1 -4
- azureml/core/compute/computeinstance.py +54 -0
- azureml/core/conda_dependencies.py +4 -3
- azureml/core/datastore.py +23 -23
- azureml/core/model.py +5 -8
- azureml/core/runconfig.py +20 -1
- azureml/core/webservice/aks.py +0 -4
- azureml/core/webservice/local.py +0 -4
- azureml/core/webservice/webservice.py +0 -4
- azureml/data/abstract_dataset.py +19 -7
- azureml/data/constants.py +1 -0
- azureml/data/context_managers.py +1 -1
- azureml/data/dataset_factory.py +1 -1
- azureml/data/datastore_client.py +6 -0
- azureml/data/file_dataset.py +1 -2
- {azureml_core-1.55.0.post2.dist-info → azureml_core-1.57.0.dist-info}/METADATA +33 -34
- {azureml_core-1.55.0.post2.dist-info → azureml_core-1.57.0.dist-info}/RECORD +39 -39
- {azureml_core-1.55.0.post2.dist-info → azureml_core-1.57.0.dist-info}/WHEEL +1 -1
- {azureml_core-1.55.0.post2.dist-info → azureml_core-1.57.0.dist-info}/LICENSE.txt +0 -0
- {azureml_core-1.55.0.post2.dist-info → azureml_core-1.57.0.dist-info}/entry_points.txt +0 -0
- {azureml_core-1.55.0.post2.dist-info → azureml_core-1.57.0.dist-info}/top_level.txt +0 -0
@@ -1 +1 @@
|
|
1
|
-
ver = "1.
|
1
|
+
ver = "1.57.0"
|
@@ -133,6 +133,7 @@ def get_run_config_dir_path_if_exists(project_path):
|
|
133
133
|
def get_run_config_dir_path(project_path):
|
134
134
|
# Try to look for the old aml_config directory first
|
135
135
|
# If that does not exist default to use the new .azureml
|
136
|
+
project_path = os.path.normpath(project_path)
|
136
137
|
run_config_dir_path = os.path.join(project_path, AML_CONFIG_DIR)
|
137
138
|
if not os.path.exists(run_config_dir_path):
|
138
139
|
run_config_dir_path = os.path.join(project_path, AZUREML_DIR)
|
@@ -142,6 +143,7 @@ def get_run_config_dir_path(project_path):
|
|
142
143
|
def get_run_config_dir_name(project_path):
|
143
144
|
# Try to look for the old aml_config directory first
|
144
145
|
# If that does not exist default to use the new .azureml
|
146
|
+
project_path = os.path.normpath(project_path)
|
145
147
|
run_config_dir_path = os.path.join(project_path, AML_CONFIG_DIR)
|
146
148
|
run_config_dir_name = AML_CONFIG_DIR
|
147
149
|
if not os.path.exists(run_config_dir_path):
|
@@ -156,6 +158,7 @@ def get_config_file_name(project_config_path):
|
|
156
158
|
:return: Either project.json or config.json
|
157
159
|
:rtype: str
|
158
160
|
"""
|
161
|
+
project_config_path = os.path.normpath(project_config_path)
|
159
162
|
legacy_config_file_path = os.path.join(project_config_path, LEGACY_PROJECT_FILENAME)
|
160
163
|
if os.path.exists(legacy_config_file_path):
|
161
164
|
return LEGACY_PROJECT_FILENAME
|
@@ -24,11 +24,11 @@ from azure.common import AzureMissingResourceHttpError
|
|
24
24
|
module_logger = logging.getLogger(__name__)
|
25
25
|
|
26
26
|
|
27
|
-
def _validate_content_match(
|
28
|
-
|
29
|
-
'
|
30
|
-
if
|
31
|
-
raise AzureMLException(
|
27
|
+
def _validate_content_match(server_256, computed_sha256):
|
28
|
+
_ERROR_sha256_MISMATCH = \
|
29
|
+
'sha256 mismatch. Expected value is \'{0}\', computed value is \'{1}\'.'
|
30
|
+
if server_256 != computed_sha256:
|
31
|
+
raise AzureMLException(_ERROR_sha256_MISMATCH.format(server_256, computed_sha256))
|
32
32
|
|
33
33
|
|
34
34
|
def normalize_path_and_join(path, name):
|
@@ -98,6 +98,7 @@ def makedirs_for_file_path(file_path):
|
|
98
98
|
"""
|
99
99
|
:param file_path: relative or absolute path to a file
|
100
100
|
"""
|
101
|
+
file_path = os.path.normpath(file_path)
|
101
102
|
parent_path = os.path.join(file_path, os.path.pardir)
|
102
103
|
parent_path = os.path.normpath(parent_path)
|
103
104
|
if not os.path.exists(parent_path):
|
@@ -205,7 +206,7 @@ def download_file(source_uri, path=None, max_retries=5, stream=True, protocol="h
|
|
205
206
|
if path is None:
|
206
207
|
module_logger.debug('Output file path is {}, the file was not downloaded.'.format(path))
|
207
208
|
return
|
208
|
-
|
209
|
+
path = os.path.normpath(path)
|
209
210
|
# download using BlobClient
|
210
211
|
if is_source_uri_matches_storage_blob(source_uri):
|
211
212
|
sas_token, account_name, endpoint_suffix, container_name, blob_name = get_block_blob_service_credentials(
|
@@ -246,14 +247,14 @@ def download_file(source_uri, path=None, max_retries=5, stream=True, protocol="h
|
|
246
247
|
# download using requests.Session
|
247
248
|
def _handle_response(response):
|
248
249
|
makedirs_for_file_path(path)
|
249
|
-
|
250
|
+
sha256_hash = hashlib.sha256()
|
250
251
|
with open(path, 'wb') as write_to_file:
|
251
252
|
for chunk in response.iter_content(chunk_size=1024):
|
252
253
|
if chunk:
|
253
|
-
|
254
|
+
sha256_hash.update(chunk)
|
254
255
|
write_to_file.write(chunk)
|
255
256
|
if _validate_check_sum:
|
256
|
-
_validate_content(
|
257
|
+
_validate_content(sha256_hash, response)
|
257
258
|
|
258
259
|
_request_file_with_retry(source_uri, _handle_response, max_retries, stream, session)
|
259
260
|
|
@@ -318,37 +319,37 @@ def download_file_stream(source_uri, encoding="utf-8", download_to_bytes=False,
|
|
318
319
|
# download using requests.Session
|
319
320
|
def _handle_response(response):
|
320
321
|
bytes_str = bytes()
|
321
|
-
|
322
|
+
sha256_hash = hashlib.sha256()
|
322
323
|
if response.status_code != 200:
|
323
324
|
response.raise_for_status()
|
324
325
|
for chunk in response.iter_content(chunk_size=1024):
|
325
326
|
if chunk:
|
326
|
-
|
327
|
+
sha256_hash.update(chunk)
|
327
328
|
bytes_str += chunk
|
328
329
|
if _validate_check_sum:
|
329
|
-
_validate_content(
|
330
|
+
_validate_content(sha256_hash, response)
|
330
331
|
return bytes_str if download_to_bytes else bytes_str.decode(encoding)
|
331
332
|
|
332
333
|
return _request_file_with_retry(source_uri, _handle_response, max_retries, stream, session)
|
333
334
|
|
334
335
|
|
335
|
-
def _validate_content(
|
336
|
+
def _validate_content(sha256_hash, response):
|
336
337
|
"""
|
337
|
-
Validate the content of response with
|
338
|
+
Validate the content of response with sha256_hash
|
338
339
|
|
339
|
-
:param
|
340
|
-
:type
|
340
|
+
:param sha256_hash:
|
341
|
+
:type sha256_hash: _Hash
|
341
342
|
:param response: the response object
|
342
343
|
:type response: requests.Response
|
343
344
|
:return: None
|
344
345
|
:rtype: None
|
345
346
|
"""
|
346
|
-
if 'content-
|
347
|
-
_validate_content_match(response.headers['content-
|
348
|
-
base64.b64encode(
|
347
|
+
if 'content-sha256' in response.headers:
|
348
|
+
_validate_content_match(response.headers['content-sha256'],
|
349
|
+
base64.b64encode(sha256_hash.digest()).decode('utf-8'))
|
349
350
|
else:
|
350
351
|
module_logger.debug(
|
351
|
-
"validate_check_sum flag is set to true but content-
|
352
|
+
"validate_check_sum flag is set to true but content-sha256 not found on respose header")
|
352
353
|
|
353
354
|
|
354
355
|
def get_directory_size(path, size_limit=None, include_function=None, exclude_function=None):
|
@@ -150,6 +150,7 @@ def upload_dependency(workspace, dependency, create_tar=False, arcname=None, sho
|
|
150
150
|
"""
|
151
151
|
from azureml._restclient.artifacts_client import ArtifactsClient
|
152
152
|
artifact_client = ArtifactsClient(workspace.service_context)
|
153
|
+
dependency = os.path.normpath(dependency)
|
153
154
|
if dependency.startswith('http') or dependency.startswith('wasb'):
|
154
155
|
return dependency, urlparse(dependency).path.split('/')[-1]
|
155
156
|
if not os.path.exists(dependency):
|
@@ -204,6 +205,7 @@ def wrap_execution_script(execution_script, schema_file, dependencies, log_aml_d
|
|
204
205
|
:return: str path to wrapped execution script
|
205
206
|
"""
|
206
207
|
new_script_loc = tempfile.mkstemp(suffix='.py')[1]
|
208
|
+
execution_script = os.path.normpath(execution_script)
|
207
209
|
dependencies.append(execution_script)
|
208
210
|
if not os.path.exists(execution_script):
|
209
211
|
raise WebserviceException('Path to execution script {} does not exist.'.format(execution_script),
|
@@ -878,6 +880,7 @@ def cleanup_docker_image(docker_client, image_id):
|
|
878
880
|
|
879
881
|
|
880
882
|
def validate_path_exists_or_throw(member, name, extra_message=''):
|
883
|
+
member = os.path.normpath(member)
|
881
884
|
if not os.path.exists(member):
|
882
885
|
raise WebserviceException("{0} {1} doesn't exist. {2}".format(name, member, extra_message),
|
883
886
|
logger=module_logger)
|
@@ -244,7 +244,7 @@ def _execute_ssh_command(address, port, username, command_to_run, password=None,
|
|
244
244
|
|
245
245
|
ssh = paramiko.SSHClient()
|
246
246
|
|
247
|
-
ssh.set_missing_host_key_policy(paramiko.
|
247
|
+
ssh.set_missing_host_key_policy(paramiko.RejectPolicy())
|
248
248
|
|
249
249
|
ssh.connect(address, port, username, pkey=private_key, password=password)
|
250
250
|
|
@@ -1,9 +1,9 @@
|
|
1
1
|
{
|
2
|
-
"mcr.microsoft.com/azureml/openmpi4.1.0-cuda11.1-cudnn8-ubuntu20.04": "
|
3
|
-
"mcr.microsoft.com/azureml/openmpi4.1.0-cuda11.2-cudnn8-ubuntu20.04": "
|
4
|
-
"mcr.microsoft.com/azureml/openmpi4.1.0-cuda11.3-cudnn8-ubuntu20.04": "
|
5
|
-
"mcr.microsoft.com/azureml/openmpi4.1.0-cuda11.6-cudnn8-ubuntu20.04": "
|
6
|
-
"mcr.microsoft.com/azureml/openmpi4.1.0-cuda11.8-cudnn8-ubuntu22.04": "
|
7
|
-
"mcr.microsoft.com/azureml/openmpi4.1.0-ubuntu20.04": "
|
8
|
-
"mcr.microsoft.com/azureml/openmpi4.1.0-ubuntu22.04": "
|
2
|
+
"mcr.microsoft.com/azureml/openmpi4.1.0-cuda11.1-cudnn8-ubuntu20.04": "20240709.v1",
|
3
|
+
"mcr.microsoft.com/azureml/openmpi4.1.0-cuda11.2-cudnn8-ubuntu20.04": "20240709.v1",
|
4
|
+
"mcr.microsoft.com/azureml/openmpi4.1.0-cuda11.3-cudnn8-ubuntu20.04": "20240709.v1",
|
5
|
+
"mcr.microsoft.com/azureml/openmpi4.1.0-cuda11.6-cudnn8-ubuntu20.04": "20240709.v1",
|
6
|
+
"mcr.microsoft.com/azureml/openmpi4.1.0-cuda11.8-cudnn8-ubuntu22.04": "20240709.v1",
|
7
|
+
"mcr.microsoft.com/azureml/openmpi4.1.0-ubuntu20.04": "20240709.v1",
|
8
|
+
"mcr.microsoft.com/azureml/openmpi4.1.0-ubuntu22.04": "20240709.v1"
|
9
9
|
}
|
@@ -1,12 +1,45 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
1
|
+
azureml-core
|
2
|
+
azureml-train
|
3
|
+
azureml-sdk
|
4
|
+
azureml-defaults
|
5
|
+
azureml-train-core
|
6
|
+
azureml-automl-core
|
7
|
+
azureml-automl-runtime
|
8
|
+
azureml-training-tabular
|
9
|
+
azureml-train-automl
|
10
|
+
azureml-train-automl-client
|
11
|
+
azureml-train-automl-runtime
|
12
|
+
azureml-contrib-automl-dnn-forecasting
|
13
|
+
azureml-automl-dnn-vision
|
14
|
+
azureml-automl-dnn-nlp
|
15
|
+
azureml-contrib-automl-pipeline-steps
|
16
|
+
azureml-train-restclients-hyperdrive
|
17
|
+
azureml-telemetry
|
18
|
+
azureml-tensorboard
|
19
|
+
azureml-contrib-notebook
|
20
|
+
azureml-explain-model
|
21
|
+
azureml-interpret
|
22
|
+
azureml-contrib-server
|
23
|
+
azureml-contrib-services
|
24
|
+
azureml-contrib-iot
|
25
|
+
azureml-contrib-run
|
26
|
+
azureml-datadrift
|
27
|
+
azureml-widgets
|
28
|
+
azureml-pipeline
|
29
|
+
azureml-pipeline-core
|
30
|
+
azureml-pipeline-steps
|
31
|
+
azureml-contrib-pipeline-steps
|
32
|
+
azureml-cli-common
|
33
|
+
azureml-opendatasets
|
34
|
+
azureml-accel-models
|
35
|
+
azureml-mlflow
|
36
|
+
azureml-contrib-functions
|
37
|
+
azureml-contrib-dataset
|
38
|
+
azureml-contrib-reinforcementlearning
|
39
|
+
azureml-contrib-mir
|
40
|
+
azureml-contrib-fairness
|
41
|
+
azureml-contrib-aisc
|
42
|
+
azureml-dataset-runtime
|
43
|
+
azureml-synapse
|
44
|
+
azureml-responsibleai
|
45
|
+
azureml-automl-common-tools
|
@@ -18,6 +18,7 @@ def create_directory(path, set_hidden=False):
|
|
18
18
|
|
19
19
|
:rtype None
|
20
20
|
"""
|
21
|
+
path = os.path.normpath(path)
|
21
22
|
if os.path.exists(path):
|
22
23
|
return
|
23
24
|
os.makedirs(path)
|
@@ -34,6 +35,7 @@ def make_file_or_directory_hidden(path):
|
|
34
35
|
|
35
36
|
:rtype str
|
36
37
|
"""
|
38
|
+
path = os.path.normpath(path)
|
37
39
|
if os.name == 'nt':
|
38
40
|
ctypes.windll.kernel32.SetFileAttributesW(path, 0x02)
|
39
41
|
else:
|
azureml/_project/ignore_file.py
CHANGED
@@ -52,6 +52,7 @@ class IgnoreFile(object):
|
|
52
52
|
|
53
53
|
:rtype: None
|
54
54
|
"""
|
55
|
+
self._path = os.path.normpath(self._path)
|
55
56
|
return self._path and os.path.exists(self._path)
|
56
57
|
|
57
58
|
def create_if_not_exists(self, patterns_to_exclude=default_patterns):
|
@@ -61,6 +62,7 @@ class IgnoreFile(object):
|
|
61
62
|
:rtype: None
|
62
63
|
"""
|
63
64
|
if not self.exists():
|
65
|
+
self._path = os.path.normpath(self._path)
|
64
66
|
with open(self._path, 'w') as fo:
|
65
67
|
fo.write('\n'.join(patterns_to_exclude) + '\n')
|
66
68
|
|
azureml/_project/project_info.py
CHANGED
@@ -33,6 +33,7 @@ def add(project_id, scope, project_path, is_config_file_path=False):
|
|
33
33
|
config_file_name = get_config_file_name(config_directory)
|
34
34
|
project_file_path = os.path.join(config_directory, config_file_name)
|
35
35
|
# We overwriting if project.json exists.
|
36
|
+
project_file_path = os.path.normpath(project_file_path)
|
36
37
|
with open(project_file_path, "w") as fo:
|
37
38
|
info = ProjectInfo(project_id, scope)
|
38
39
|
fo.write(json.dumps(info.__dict__))
|
@@ -76,7 +77,9 @@ def get(project_path, no_recursive_check=False):
|
|
76
77
|
for config_path in [AML_CONFIG_DIR, AZUREML_DIR]:
|
77
78
|
for files_to_look in [LEGACY_PROJECT_FILENAME, CONFIG_FILENAME]:
|
78
79
|
config_file_path = os.path.join(project_path, config_path, files_to_look)
|
80
|
+
config_file_path = os.path.normpath(config_file_path)
|
79
81
|
if os.path.exists(config_file_path):
|
82
|
+
config_file_path = os.path.normpath(config_file_path)
|
80
83
|
with open(config_file_path) as info_json:
|
81
84
|
config_json = json.load(info_json)
|
82
85
|
# If Scope is not there, then this is an old workspace config.json config file
|
@@ -82,6 +82,7 @@ def _update_requirements_binding(repo_path, config_dir_to_use):
|
|
82
82
|
conda_dependencies_path = os.path.join(repo_path, config_dir_to_use, _conda_dependencies_file_name)
|
83
83
|
|
84
84
|
lines = []
|
85
|
+
conda_dependencies_path = os.path.normpath(conda_dependencies_path)
|
85
86
|
with open(conda_dependencies_path, "r") as infile:
|
86
87
|
for line in infile:
|
87
88
|
if requirements_version:
|
@@ -90,6 +91,7 @@ def _update_requirements_binding(repo_path, config_dir_to_use):
|
|
90
91
|
line = line.replace(default_index, requirements_index)
|
91
92
|
|
92
93
|
lines.append(line)
|
94
|
+
conda_dependencies_path = os.path.normpath(conda_dependencies_path)
|
93
95
|
with open(conda_dependencies_path, 'w') as outfile:
|
94
96
|
for line in lines:
|
95
97
|
outfile.write(line)
|
@@ -105,8 +107,10 @@ def attach_project(project_id, project_path, scope, compute_target_dict):
|
|
105
107
|
:rtype: None
|
106
108
|
"""
|
107
109
|
from azureml._base_sdk_common.common import get_run_config_dir_name
|
110
|
+
project_path = os.path.normpath(project_path)
|
108
111
|
is_existing_dir = os.path.isdir(project_path)
|
109
112
|
if not is_existing_dir:
|
113
|
+
project_path = os.path.normpath(project_path)
|
110
114
|
# We creating all intermediate dirs too.
|
111
115
|
os.makedirs(os.path.abspath(project_path))
|
112
116
|
|
@@ -157,15 +161,20 @@ def _copy_default_files(path, default_fileset):
|
|
157
161
|
this_dir, this_filename = os.path.split(__file__)
|
158
162
|
default_files_path = os.path.join(this_dir, default_fileset)
|
159
163
|
|
164
|
+
path = os.path.normpath(path)
|
160
165
|
if not os.path.exists(path):
|
166
|
+
path = os.path.normpath(path)
|
161
167
|
os.mkdir(path)
|
162
168
|
for filename in os.listdir(default_files_path):
|
163
169
|
orig_path = os.path.join(default_files_path, filename)
|
164
170
|
new_path = os.path.join(path, filename)
|
165
171
|
if os.path.isdir(orig_path):
|
172
|
+
new_path = os.path.normpath(new_path)
|
166
173
|
shutil.copytree(orig_path, new_path)
|
167
174
|
else:
|
175
|
+
new_path = os.path.normpath(new_path)
|
168
176
|
if not os.path.exists(new_path):
|
177
|
+
new_path = os.path.normpath(new_path)
|
169
178
|
shutil.copy(orig_path, new_path)
|
170
179
|
|
171
180
|
|
@@ -101,8 +101,8 @@ class ArtifactsClient(WorkspaceClient):
|
|
101
101
|
|
102
102
|
def upload_artifact_from_path(self, path, *args, **kwargs):
|
103
103
|
"""upload a local file to a new artifact"""
|
104
|
-
path = os.path.normpath(path)
|
105
104
|
path = os.path.abspath(path)
|
105
|
+
path = os.path.normpath(path)
|
106
106
|
with open(path, "rb") as stream:
|
107
107
|
return self.upload_artifact_from_stream(stream, *args, **kwargs)
|
108
108
|
|
@@ -251,8 +251,7 @@ class GCMBlobEncryptionStream:
|
|
251
251
|
:param bytes data: The data to encrypt.
|
252
252
|
"""
|
253
253
|
# Each region MUST use a different nonce
|
254
|
-
nonce =
|
255
|
-
self.nonce_counter += 1
|
254
|
+
nonce = os.urandom(12)
|
256
255
|
|
257
256
|
aesgcm = AESGCM(self.content_encryption_key)
|
258
257
|
|
@@ -101,11 +101,11 @@ def is_retry(response, mode): # pylint: disable=too-many-return-statements
|
|
101
101
|
if status in [501, 505]:
|
102
102
|
return False
|
103
103
|
return True
|
104
|
-
# retry if invalid content
|
105
|
-
if response.context.get('validate_content', False) and response.http_response.headers.get('content-
|
106
|
-
|
107
|
-
encode_base64(StorageContentValidation.
|
108
|
-
if response.http_response.headers['content-
|
104
|
+
# retry if invalid content sha256
|
105
|
+
if response.context.get('validate_content', False) and response.http_response.headers.get('content-sha256'):
|
106
|
+
computed_sha256 = response.http_request.headers.get('content-sha256', None) or \
|
107
|
+
encode_base64(StorageContentValidation.get_content_sha256(response.http_response.body()))
|
108
|
+
if response.http_response.headers['content-sha256'] != computed_sha256:
|
109
109
|
return True
|
110
110
|
return False
|
111
111
|
|
@@ -343,17 +343,17 @@ class StorageContentValidation(SansIOHTTPPolicy):
|
|
343
343
|
|
344
344
|
This will overwrite any headers already defined in the request.
|
345
345
|
"""
|
346
|
-
header_name = 'Content-
|
346
|
+
header_name = 'Content-sha256'
|
347
347
|
|
348
348
|
def __init__(self, **kwargs): # pylint: disable=unused-argument
|
349
349
|
super(StorageContentValidation, self).__init__()
|
350
350
|
|
351
351
|
@staticmethod
|
352
|
-
def
|
352
|
+
def get_content_sha256(data):
|
353
353
|
data = data or b""
|
354
|
-
|
354
|
+
sha256 = hashlib.sha256() # nosec
|
355
355
|
if isinstance(data, bytes):
|
356
|
-
|
356
|
+
sha256.update(data)
|
357
357
|
elif hasattr(data, 'read'):
|
358
358
|
pos = 0
|
359
359
|
try:
|
@@ -361,7 +361,7 @@ class StorageContentValidation(SansIOHTTPPolicy):
|
|
361
361
|
except: # pylint: disable=bare-except
|
362
362
|
pass
|
363
363
|
for chunk in iter(lambda: data.read(4096), b""):
|
364
|
-
|
364
|
+
sha256.update(chunk)
|
365
365
|
try:
|
366
366
|
data.seek(pos, SEEK_SET)
|
367
367
|
except (AttributeError, IOError):
|
@@ -369,25 +369,25 @@ class StorageContentValidation(SansIOHTTPPolicy):
|
|
369
369
|
else:
|
370
370
|
raise ValueError("Data should be bytes or a seekable file-like object.")
|
371
371
|
|
372
|
-
return
|
372
|
+
return sha256.digest()
|
373
373
|
|
374
374
|
def on_request(self, request):
|
375
375
|
# type: (PipelineRequest, Any) -> None
|
376
376
|
validate_content = request.context.options.pop('validate_content', False)
|
377
377
|
if validate_content and request.http_request.method != 'GET':
|
378
|
-
|
379
|
-
request.http_request.headers[self.header_name] =
|
380
|
-
request.context['
|
378
|
+
computed_sha256 = encode_base64(StorageContentValidation.get_content_sha256(request.http_request.data))
|
379
|
+
request.http_request.headers[self.header_name] = computed_sha256
|
380
|
+
request.context['validate_content_sha256'] = computed_sha256
|
381
381
|
request.context['validate_content'] = validate_content
|
382
382
|
|
383
383
|
def on_response(self, request, response):
|
384
|
-
if response.context.get('validate_content', False) and response.http_response.headers.get('content-
|
385
|
-
|
386
|
-
encode_base64(StorageContentValidation.
|
387
|
-
if response.http_response.headers['content-
|
384
|
+
if response.context.get('validate_content', False) and response.http_response.headers.get('content-sha256'):
|
385
|
+
computed_sha256 = request.context.get('validate_content_sha256') or \
|
386
|
+
encode_base64(StorageContentValidation.get_content_sha256(response.http_response.body()))
|
387
|
+
if response.http_response.headers['content-sha256'] != computed_sha256:
|
388
388
|
raise AzureError(
|
389
|
-
'
|
390
|
-
response.http_response.headers['content-
|
389
|
+
'sha256 mismatch. Expected value is \'{0}\', computed value is \'{1}\'.'.format(
|
390
|
+
response.http_response.headers['content-sha256'], computed_sha256),
|
391
391
|
response=response.http_response
|
392
392
|
)
|
393
393
|
|
@@ -97,11 +97,11 @@ def is_retry(response, mode): # pylint: disable=too-many-return-statements
|
|
97
97
|
if status in [501, 505]:
|
98
98
|
return False
|
99
99
|
return True
|
100
|
-
# retry if invalid content
|
101
|
-
if response.context.get('validate_content', False) and response.http_response.headers.get('content-
|
102
|
-
|
103
|
-
encode_base64(StorageContentValidation.
|
104
|
-
if response.http_response.headers['content-
|
100
|
+
# retry if invalid content sha256
|
101
|
+
if response.context.get('validate_content', False) and response.http_response.headers.get('content-sha256'):
|
102
|
+
computed_sha256 = response.http_request.headers.get('content-sha256', None) or \
|
103
|
+
encode_base64(StorageContentValidation.get_content_sha256(response.http_response.body()))
|
104
|
+
if response.http_response.headers['content-sha256'] != computed_sha256:
|
105
105
|
return True
|
106
106
|
return False
|
107
107
|
|
@@ -329,17 +329,17 @@ class StorageContentValidation(SansIOHTTPPolicy):
|
|
329
329
|
|
330
330
|
This will overwrite any headers already defined in the request.
|
331
331
|
"""
|
332
|
-
header_name = 'Content-
|
332
|
+
header_name = 'Content-sha256'
|
333
333
|
|
334
334
|
def __init__(self, **kwargs): # pylint: disable=unused-argument
|
335
335
|
super(StorageContentValidation, self).__init__()
|
336
336
|
|
337
337
|
@staticmethod
|
338
|
-
def
|
338
|
+
def get_content_sha256(data):
|
339
339
|
data = data or b""
|
340
|
-
|
340
|
+
sha256 = hashlib.sha256() # nosec
|
341
341
|
if isinstance(data, bytes):
|
342
|
-
|
342
|
+
sha256.update(data)
|
343
343
|
elif hasattr(data, 'read'):
|
344
344
|
pos = 0
|
345
345
|
try:
|
@@ -347,7 +347,7 @@ class StorageContentValidation(SansIOHTTPPolicy):
|
|
347
347
|
except: # pylint: disable=bare-except
|
348
348
|
pass
|
349
349
|
for chunk in iter(lambda: data.read(4096), b""):
|
350
|
-
|
350
|
+
sha256.update(chunk)
|
351
351
|
try:
|
352
352
|
data.seek(pos, SEEK_SET)
|
353
353
|
except (AttributeError, IOError):
|
@@ -355,25 +355,25 @@ class StorageContentValidation(SansIOHTTPPolicy):
|
|
355
355
|
else:
|
356
356
|
raise ValueError("Data should be bytes or a seekable file-like object.")
|
357
357
|
|
358
|
-
return
|
358
|
+
return sha256.digest()
|
359
359
|
|
360
360
|
def on_request(self, request):
|
361
361
|
# type: (PipelineRequest, Any) -> None
|
362
362
|
validate_content = request.context.options.pop('validate_content', False)
|
363
363
|
if validate_content and request.http_request.method != 'GET':
|
364
|
-
|
365
|
-
request.http_request.headers[self.header_name] =
|
366
|
-
request.context['
|
364
|
+
computed_sha256 = encode_base64(StorageContentValidation.get_content_sha256(request.http_request.data))
|
365
|
+
request.http_request.headers[self.header_name] = computed_sha256
|
366
|
+
request.context['validate_content_sha256'] = computed_sha256
|
367
367
|
request.context['validate_content'] = validate_content
|
368
368
|
|
369
369
|
def on_response(self, request, response):
|
370
|
-
if response.context.get('validate_content', False) and response.http_response.headers.get('content-
|
371
|
-
|
372
|
-
encode_base64(StorageContentValidation.
|
373
|
-
if response.http_response.headers['content-
|
370
|
+
if response.context.get('validate_content', False) and response.http_response.headers.get('content-sha256'):
|
371
|
+
computed_sha256 = request.context.get('validate_content_sha256') or \
|
372
|
+
encode_base64(StorageContentValidation.get_content_sha256(response.http_response.body()))
|
373
|
+
if response.http_response.headers['content-sha256'] != computed_sha256:
|
374
374
|
raise AzureError(
|
375
|
-
'
|
376
|
-
response.http_response.headers['content-
|
375
|
+
'sha256 mismatch. Expected value is \'{0}\', computed value is \'{1}\'.'.format(
|
376
|
+
response.http_response.headers['content-sha256'], computed_sha256),
|
377
377
|
response=response.http_response
|
378
378
|
)
|
379
379
|
|
azureml/_workspace/_utils.py
CHANGED
@@ -52,7 +52,9 @@ def get_application_insights_region(workspace_region):
|
|
52
52
|
"southindia": "centralindia",
|
53
53
|
"polandcentral": "northeurope",
|
54
54
|
"italynorth": "westeurope",
|
55
|
-
"chinaeast3": "chinaeast2"
|
55
|
+
"chinaeast3": "chinaeast2",
|
56
|
+
"spaincentral": "francecentral",
|
57
|
+
"israelcentral": "westeurope"
|
56
58
|
}.get(workspace_region, workspace_region)
|
57
59
|
|
58
60
|
|
azureml/core/authentication.py
CHANGED
@@ -1566,10 +1566,7 @@ class AzureMLTokenAuthentication(AbstractAuthentication):
|
|
1566
1566
|
@staticmethod
|
1567
1567
|
def _get_token(token, should_encrypt=False):
|
1568
1568
|
password = os.environ.get("AZUREML_RUN_TOKEN_PASS")
|
1569
|
-
|
1570
|
-
m = hashlib.sha256()
|
1571
|
-
m.update(random_string.encode())
|
1572
|
-
salt = m.digest()
|
1569
|
+
salt = os.urandom(16)
|
1573
1570
|
kdf = PBKDF2HMAC(
|
1574
1571
|
algorithm=hashes.SHA256(),
|
1575
1572
|
length=32,
|
@@ -365,6 +365,60 @@ class ComputeInstance(ComputeTarget):
|
|
365
365
|
vnet_name, subnet_name, tags, description, assigned_user_object_id, assigned_user_tenant_id)
|
366
366
|
return config
|
367
367
|
|
368
|
+
def update_sso_settings(self, value):
|
369
|
+
"""Update single sign-on settings of the compute instance.
|
370
|
+
|
371
|
+
:param value: The value of sso settings
|
372
|
+
:type value: bool
|
373
|
+
:return: Whether the update was successful or not
|
374
|
+
:rtype: bool
|
375
|
+
"""
|
376
|
+
return self._update_sso_settings(self.workspace, self.name, value)
|
377
|
+
|
378
|
+
@staticmethod
|
379
|
+
def _update_sso_settings(workspace, compute_name, value):
|
380
|
+
"""Update single sign-on settings.
|
381
|
+
|
382
|
+
:param workspace: The workspace.
|
383
|
+
:type workspace: azureml.core.Workspace
|
384
|
+
:param compute_name: The compute name.
|
385
|
+
:type compute_name: string
|
386
|
+
:param value: The value of sso settings
|
387
|
+
:type value: bool
|
388
|
+
:return: Whether the update was successful or not
|
389
|
+
:rtype: bool
|
390
|
+
"""
|
391
|
+
if not workspace:
|
392
|
+
return False
|
393
|
+
|
394
|
+
enable_sso_fmt = '{}/subscriptions/{}/resourcegroups/{}/providers/' \
|
395
|
+
'Microsoft.MachineLearningServices/workspaces/{}/computes/{}/enableSso'
|
396
|
+
arm_endpoint = ComputeTarget._get_resource_manager_endpoint(workspace)
|
397
|
+
endpoint = enable_sso_fmt.format(arm_endpoint,
|
398
|
+
workspace.subscription_id,
|
399
|
+
workspace.resource_group,
|
400
|
+
workspace.name,
|
401
|
+
compute_name)
|
402
|
+
headers = workspace._auth.get_authentication_header()
|
403
|
+
ComputeTarget._add_request_tracking_headers(headers)
|
404
|
+
params = {'api-version': MLC_WORKSPACE_API_VERSION}
|
405
|
+
compute_update_payload = {'EnableSSO': value}
|
406
|
+
|
407
|
+
try:
|
408
|
+
resp = ClientBase._execute_func(get_requests_session().post, endpoint,
|
409
|
+
params=params, headers=headers, json=compute_update_payload)
|
410
|
+
resp.raise_for_status()
|
411
|
+
except requests.exceptions.HTTPError:
|
412
|
+
raise ComputeTargetException('Single sign-on settings update request failed:\n'
|
413
|
+
'Response Code: {}\n'
|
414
|
+
'Headers: {}\n'
|
415
|
+
'Content: {}'.format(resp.status_code, resp.headers, resp.content))
|
416
|
+
|
417
|
+
if resp.status_code != 200:
|
418
|
+
return False
|
419
|
+
|
420
|
+
return True
|
421
|
+
|
368
422
|
@staticmethod
|
369
423
|
def _build_create_payload(config, location, subscription_id):
|
370
424
|
"""Construct the payload needed to create an ComputeInstance.
|