azureml-core 1.55.0.post2__py3-none-any.whl → 1.57.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. azureml/_base_sdk_common/_version.py +1 -1
  2. azureml/_base_sdk_common/common.py +3 -0
  3. azureml/_base_sdk_common/workspace/models/rest_client_enums.py +1 -0
  4. azureml/_file_utils/file_utils.py +21 -20
  5. azureml/_model_management/_util.py +3 -0
  6. azureml/_project/_compute_target_commands.py +1 -1
  7. azureml/_project/azureml_base_images.json +7 -7
  8. azureml/_project/azureml_sdk_scope.txt +45 -12
  9. azureml/_project/file_utilities.py +2 -0
  10. azureml/_project/ignore_file.py +2 -0
  11. azureml/_project/project_info.py +3 -0
  12. azureml/_project/project_manager.py +9 -0
  13. azureml/_restclient/artifacts_client.py +1 -1
  14. azureml/_restclient/models/rest_client_enums.py +1 -0
  15. azureml/_vendor/azure_storage/blob/_encryption.py +1 -2
  16. azureml/_vendor/azure_storage/blob/_shared/policies.py +20 -20
  17. azureml/_vendor/azure_storage/fileshare/_shared/policies.py +20 -20
  18. azureml/_workspace/_utils.py +3 -1
  19. azureml/core/authentication.py +1 -4
  20. azureml/core/compute/computeinstance.py +54 -0
  21. azureml/core/conda_dependencies.py +4 -3
  22. azureml/core/datastore.py +23 -23
  23. azureml/core/model.py +5 -8
  24. azureml/core/runconfig.py +20 -1
  25. azureml/core/webservice/aks.py +0 -4
  26. azureml/core/webservice/local.py +0 -4
  27. azureml/core/webservice/webservice.py +0 -4
  28. azureml/data/abstract_dataset.py +19 -7
  29. azureml/data/constants.py +1 -0
  30. azureml/data/context_managers.py +1 -1
  31. azureml/data/dataset_factory.py +1 -1
  32. azureml/data/datastore_client.py +6 -0
  33. azureml/data/file_dataset.py +1 -2
  34. {azureml_core-1.55.0.post2.dist-info → azureml_core-1.57.0.dist-info}/METADATA +33 -34
  35. {azureml_core-1.55.0.post2.dist-info → azureml_core-1.57.0.dist-info}/RECORD +39 -39
  36. {azureml_core-1.55.0.post2.dist-info → azureml_core-1.57.0.dist-info}/WHEEL +1 -1
  37. {azureml_core-1.55.0.post2.dist-info → azureml_core-1.57.0.dist-info}/LICENSE.txt +0 -0
  38. {azureml_core-1.55.0.post2.dist-info → azureml_core-1.57.0.dist-info}/entry_points.txt +0 -0
  39. {azureml_core-1.55.0.post2.dist-info → azureml_core-1.57.0.dist-info}/top_level.txt +0 -0
@@ -29,7 +29,7 @@ PIP = 'pip'
29
29
  PYTHON_PREFIX = 'python'
30
30
  VERSION_REGEX = re.compile(r'(\d+)\.(\d+)(\.(\d+))?([ab](\d+))?$')
31
31
  CNTK_DEFAULT_VERSION = '2.7'
32
- PYTHON_DEFAULT_VERSION = '3.8.13'
32
+ PYTHON_DEFAULT_VERSION = '3.9.12'
33
33
  LINUX_PLATFORM = 'linux'
34
34
  WINDOWS_PLATFORM = 'win32'
35
35
  TENSORFLOW_DEFAULT_VERSION = '2.2.0'
@@ -125,10 +125,11 @@ class CondaDependencies(object):
125
125
 
126
126
  def __init__(self, conda_dependencies_file_path=None, _underlying_structure=None):
127
127
  """Initialize a new object to manage dependencies."""
128
- if conda_dependencies_file_path:
128
+ if conda_dependencies_file_path is not None:
129
+ conda_dependencies_file_path = os.path.normpath(conda_dependencies_file_path)
129
130
  with open(conda_dependencies_file_path, "r") as input:
130
131
  self._conda_dependencies = ruamelyaml.round_trip_load(input)
131
- elif _underlying_structure:
132
+ elif _underlying_structure is not None:
132
133
  self._conda_dependencies = _underlying_structure
133
134
  else:
134
135
  with resource_stream(
azureml/core/datastore.py CHANGED
@@ -54,29 +54,29 @@ class Datastore(object):
54
54
 
55
55
  .. code-block:: python
56
56
 
57
- from azureml.exceptions import UserErrorException
58
-
59
- blob_datastore_name='MyBlobDatastore'
60
- account_name=os.getenv("BLOB_ACCOUNTNAME_62", "<my-account-name>") # Storage account name
61
- container_name=os.getenv("BLOB_CONTAINER_62", "<my-container-name>") # Name of Azure blob container
62
- account_key=os.getenv("BLOB_ACCOUNT_KEY_62", "<my-account-key>") # Storage account key
63
-
64
- try:
65
- blob_datastore = Datastore.get(ws, blob_datastore_name)
66
- print("Found Blob Datastore with name: %s" % blob_datastore_name)
67
- except UserErrorException:
68
- blob_datastore = Datastore.register_azure_blob_container(
69
- workspace=ws,
70
- datastore_name=blob_datastore_name,
71
- account_name=account_name, # Storage account name
72
- container_name=container_name, # Name of Azure blob container
73
- account_key=account_key) # Storage account key
74
- print("Registered blob datastore with name: %s" % blob_datastore_name)
75
-
76
- blob_data_ref = DataReference(
77
- datastore=blob_datastore,
78
- data_reference_name="blob_test_data",
79
- path_on_datastore="testdata")
57
+ # from azureml.exceptions import UserErrorException
58
+ #
59
+ # blob_datastore_name='MyBlobDatastore'
60
+ # account_name=os.getenv("BLOB_ACCOUNTNAME_62", "<my-account-name>") # Storage account name
61
+ # container_name=os.getenv("BLOB_CONTAINER_62", "<my-container-name>") # Name of Azure blob container
62
+ # account_key=os.getenv("BLOB_ACCOUNT_KEY_62", "<my-account-key>") # Storage account key
63
+ #
64
+ # try:
65
+ # blob_datastore = Datastore.get(ws, blob_datastore_name)
66
+ # print("Found Blob Datastore with name: %s" % blob_datastore_name)
67
+ # except UserErrorException:
68
+ # blob_datastore = Datastore.register_azure_blob_container(
69
+ # workspace=ws,
70
+ # datastore_name=blob_datastore_name,
71
+ # account_name=account_name, # Storage account name
72
+ # container_name=container_name, # Name of Azure blob container
73
+ # account_key=account_key) # Storage account key
74
+ # print("Registered blob datastore with name: %s" % blob_datastore_name)
75
+ #
76
+ # blob_data_ref = DataReference(
77
+ # datastore=blob_datastore,
78
+ # data_reference_name="blob_test_data",
79
+ # path_on_datastore="testdata")
80
80
 
81
81
  Full sample is available from
82
82
  https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/machine-learning-pipelines/intro-to-pipelines/aml-pipelines-data-transfer.ipynb
azureml/core/model.py CHANGED
@@ -150,10 +150,6 @@ class Model(object):
150
150
  description="Ridge regression model to predict diabetes",
151
151
  workspace=ws)
152
152
 
153
- Full sample is available from
154
- https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/deploy-to-local/register-model-deploy-local-advanced.ipynb
155
-
156
-
157
153
  The following sample shows how to register a model specifying framework, input and output
158
154
  datasets, and resource configuration.
159
155
 
@@ -526,10 +522,6 @@ class Model(object):
526
522
  description="Ridge regression model to predict diabetes",
527
523
  workspace=ws)
528
524
 
529
- Full sample is available from
530
- https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/deploy-to-local/register-model-deploy-local-advanced.ipynb
531
-
532
-
533
525
  If you have a model that was produced as a result of an experiment run, you can register it
534
526
  from a run object directly without downloading it to a local file first. In order to do that use
535
527
  the :func:`azureml.core.run.Run.register_model` method as documented in the :class:`azureml.core.run.Run`
@@ -599,6 +591,7 @@ class Model(object):
599
591
  # Artifact ID components.
600
592
  origin = 'LocalUpload'
601
593
  container = '{}-{}'.format(datetime.now().strftime('%y%m%dT%H%M%S'), str(uuid.uuid4())[:8])
594
+ model_path = os.path.normpath(model_path)
602
595
  model_base_name = os.path.basename(os.path.abspath(model_path))
603
596
 
604
597
  file_names, artifact_names = Model._collect_model_artifact_paths(model_path, child_paths)
@@ -639,12 +632,14 @@ class Model(object):
639
632
  :type child_paths: builtin.list[str]
640
633
  :raises: azureml.exceptions.WebserviceException
641
634
  """
635
+ model_path = os.path.normpath(model_path)
642
636
  if not os.path.exists(model_path):
643
637
  raise WebserviceException('Error, provided model path "{}" cannot be found'.format(model_path),
644
638
  logger=module_logger)
645
639
 
646
640
  if child_paths:
647
641
  for path in child_paths:
642
+ model_path = os.path.normpath(model_path)
648
643
  if not os.path.exists(os.path.join(model_path, path)):
649
644
  raise WebserviceException('Error, provided child path "{}" cannot be found'.format(path),
650
645
  logger=module_logger)
@@ -658,6 +653,7 @@ class Model(object):
658
653
  :param child_paths:
659
654
  :type child_paths: builtin.list[str]
660
655
  """
656
+ model_path = os.path.normpath(model_path)
661
657
  model_parent_path = os.path.dirname(os.path.abspath(model_path))
662
658
 
663
659
  if child_paths:
@@ -944,6 +940,7 @@ class Model(object):
944
940
  return sas_to_relative_download_path
945
941
 
946
942
  def _download_model_files(self, sas_to_relative_download_path, target_dir, exist_ok):
943
+ target_dir = os.path.normpath(target_dir)
947
944
  for sas, path in sas_to_relative_download_path.items():
948
945
  target_path = os.path.join(target_dir, path)
949
946
  if not exist_ok and os.path.exists(target_path):
azureml/core/runconfig.py CHANGED
@@ -1456,6 +1456,7 @@ class RunConfiguration(_AbstractRunConfigElement):
1456
1456
  else:
1457
1457
  # A user might have specified the file location to save.
1458
1458
  parent_dir = os.path.dirname(path)
1459
+ parent_dir = os.path.normpath(parent_dir)
1459
1460
  if os.path.exists(parent_dir) and os.path.isdir(parent_dir):
1460
1461
  project_dir_case = False
1461
1462
  else:
@@ -1514,6 +1515,7 @@ class RunConfiguration(_AbstractRunConfigElement):
1514
1515
  path = os.getcwd()
1515
1516
 
1516
1517
  project_dir_case = True
1518
+ path = os.path.normpath(path)
1517
1519
  if os.path.isfile(path):
1518
1520
  full_runconfig_path = path
1519
1521
  project_dir_case = False
@@ -1527,7 +1529,9 @@ class RunConfiguration(_AbstractRunConfigElement):
1527
1529
  run_config_dir_name = get_run_config_dir_name(path) + "/"
1528
1530
  full_runconfig_path = os.path.join(path, run_config_dir_name + name)
1529
1531
 
1532
+ full_runconfig_path = os.path.normpath(full_runconfig_path)
1530
1533
  if os.path.isfile(full_runconfig_path):
1534
+ full_runconfig_path = os.path.normpath(full_runconfig_path)
1531
1535
  return RunConfiguration._load_from_path(full_runconfig_path=full_runconfig_path,
1532
1536
  path=path,
1533
1537
  name=name,
@@ -1535,10 +1539,12 @@ class RunConfiguration(_AbstractRunConfigElement):
1535
1539
 
1536
1540
  # Appending .runconfig suffix for backcompat case.
1537
1541
  full_runconfig_path = full_runconfig_path + RUNCONFIGURATION_EXTENSION
1542
+ full_runconfig_path = os.path.normpath(full_runconfig_path)
1538
1543
  if not os.path.isfile(full_runconfig_path) and name:
1539
1544
  # check for file not in .azureml or aml_config directory
1540
1545
  full_runconfig_path = os.path.join(path, name + RUNCONFIGURATION_EXTENSION)
1541
1546
 
1547
+ full_runconfig_path = os.path.normpath(full_runconfig_path)
1542
1548
  if os.path.isfile(full_runconfig_path):
1543
1549
  # Setting name=name_with_ext, so that any subsequent save happens
1544
1550
  # on the name.runconfig file instead of name
@@ -1564,6 +1570,7 @@ class RunConfiguration(_AbstractRunConfigElement):
1564
1570
  :return: The run configuration object.
1565
1571
  :rtype: RunConfiguration
1566
1572
  """
1573
+ full_runconfig_path = os.path.normpath(full_runconfig_path)
1567
1574
  with open(full_runconfig_path, "r") as run_config:
1568
1575
  # Loads with all the comments intact.
1569
1576
  commented_map_dict = ruamelyaml.round_trip_load(run_config)
@@ -1587,12 +1594,17 @@ class RunConfiguration(_AbstractRunConfigElement):
1587
1594
  """
1588
1595
  file_found = False
1589
1596
  legacy_full_file_path = os.path.join(path, AML_CONFIG_DIR, name + RUNCONFIGURATION_EXTENSION)
1597
+ legacy_full_file_path = os.path.normpath(legacy_full_file_path)
1590
1598
  full_file_path = os.path.join(path, AZUREML_DIR, name + RUNCONFIGURATION_EXTENSION)
1599
+ legacy_full_file_path = os.path.normpath(legacy_full_file_path)
1591
1600
  if os.path.isfile(legacy_full_file_path):
1592
1601
  file_found = True
1602
+ legacy_full_file_path = os.path.normpath(legacy_full_file_path)
1593
1603
  os.remove(legacy_full_file_path)
1604
+ full_file_path = os.path.normpath(full_file_path)
1594
1605
  if os.path.isfile(full_file_path):
1595
1606
  file_found = True
1607
+ full_file_path = os.path.normpath(full_file_path)
1596
1608
  os.remove(full_file_path)
1597
1609
 
1598
1610
  if file_found == False:
@@ -1698,10 +1710,12 @@ class RunConfiguration(_AbstractRunConfigElement):
1698
1710
  run_config_dir_path,
1699
1711
  run_config_object.target + COMPUTECONTEXT_EXTENSION)
1700
1712
 
1713
+ compute_target_path = os.path.normpath(compute_target_path)
1701
1714
  if not os.path.isfile(compute_target_path):
1702
1715
  raise UserErrorException("Compute target = {} doesn't exist at {}".format(
1703
1716
  run_config_object.target, compute_target_path))
1704
1717
 
1718
+ compute_target_path = os.path.normpath(compute_target_path)
1705
1719
  with open(compute_target_path, "r") as compute_target_file:
1706
1720
  compute_target_dict = ruamelyaml.round_trip_load(compute_target_file)
1707
1721
  if "baseDockerImage" in compute_target_dict:
@@ -1763,11 +1777,13 @@ class RunConfiguration(_AbstractRunConfigElement):
1763
1777
  # Reading spark dependencies file.
1764
1778
  spark_dependencies_path = os.path.join(
1765
1779
  path, spark_dependencies_file)
1780
+ spark_dependencies_path = os.path.normpath(spark_dependencies_path)
1766
1781
 
1767
1782
  if not os.path.isfile(spark_dependencies_path):
1768
1783
  raise UserErrorException("Spark dependencies file = {} doesn't exist at {}".format(
1769
1784
  spark_dependencies_file, spark_dependencies_path))
1770
1785
 
1786
+ spark_dependencies_path = os.path.normpath(spark_dependencies_path)
1771
1787
  with open(spark_dependencies_path, "r") as spark_file:
1772
1788
  if use_commented_map:
1773
1789
  spark_file_dict = ruamelyaml.round_trip_load(spark_file)
@@ -1824,12 +1840,14 @@ class RunConfiguration(_AbstractRunConfigElement):
1824
1840
  _yaml_set_comment_before_after_key_with_error(
1825
1841
  commented_map_dict, "environment", "The file path that contains the environment configuration.")
1826
1842
 
1843
+ full_runconfig_path = os.path.normpath(full_runconfig_path)
1827
1844
  with open(full_runconfig_path, 'w') as outfile:
1828
1845
  ruamelyaml.round_trip_dump(commented_map_dict, outfile)
1829
-
1846
+ full_env_path = os.path.normpath(full_env_path)
1830
1847
  with open(full_env_path, 'w') as outfile:
1831
1848
  ruamelyaml.round_trip_dump(environment_commented_map, outfile)
1832
1849
  else:
1850
+ full_runconfig_path = os.path.normpath(full_runconfig_path)
1833
1851
  with open(full_runconfig_path, 'w') as outfile:
1834
1852
  ruamelyaml.round_trip_dump(commented_map_dict, outfile)
1835
1853
 
@@ -2009,6 +2027,7 @@ class RunConfiguration(_AbstractRunConfigElement):
2009
2027
  # environment is specified as a file reference.
2010
2028
  environment_path = os.path.join(dir_to_load,
2011
2029
  commented_map_or_dict["environment"])
2030
+ environment_path = os.path.normpath(environment_path)
2012
2031
  with open(environment_path, "r") as environment_config:
2013
2032
  # Replacing string path with the actual environment serialized dictionary.
2014
2033
  commented_map_or_dict["environment"] = ruamelyaml.round_trip_load(environment_config)
@@ -70,10 +70,6 @@ class AksWebservice(Webservice):
70
70
  # # Enable token auth and disable (key) auth on the webservice
71
71
  # aks_config = AksWebservice.deploy_configuration(token_auth_enabled=True, auth_enabled=False)
72
72
 
73
- Full sample is available from
74
- https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/production-deploy-to-aks/production-deploy-to-aks.ipynb
75
-
76
-
77
73
  There are a number of ways to deploy a model as a webservice, including with the:
78
74
 
79
75
  * ``deploy`` method of the :class:`azureml.core.model.Model` for models already registered in the workspace.
@@ -95,10 +95,6 @@ class LocalWebservice(Webservice):
95
95
 
96
96
  local_service.wait_for_deployment()
97
97
 
98
- Full sample is available from
99
- https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/deploy-to-local/register-model-deploy-local.ipynb
100
-
101
-
102
98
  :param workspace: The workspace object containing any Model objects that will be retrieved.
103
99
  :type workspace: azureml.core.Workspace
104
100
  :param name: The name of the Webservice object to retrieve.
@@ -84,10 +84,6 @@ class Webservice(ABC):
84
84
  # # Enable token auth and disable (key) auth on the webservice
85
85
  # aks_config = AksWebservice.deploy_configuration(token_auth_enabled=True, auth_enabled=False)
86
86
 
87
- Full sample is available from
88
- https://github.com/Azure/MachineLearningNotebooks/blob/master/how-to-use-azureml/deployment/production-deploy-to-aks/production-deploy-to-aks.ipynb
89
-
90
-
91
87
  The following sample shows how to find an existing :class:`azureml.core.webservice.AciWebservice` in a
92
88
  workspace and delete it if it exists so the name can be reused.
93
89
 
@@ -655,7 +655,7 @@ class AbstractDataset(object):
655
655
 
656
656
  @staticmethod
657
657
  @track(_get_logger, activity_type=_INTERNAL_API)
658
- def _load(path: str, workspace):
658
+ def _load(path: str, workspace, dataType=None):
659
659
  AbstractDataset._validate_args(path, workspace)
660
660
 
661
661
  from azureml.dataprep.api.mltable._mltable_helper import _download_mltable_yaml, _is_tabular,\
@@ -674,11 +674,21 @@ class AbstractDataset(object):
674
674
  # path is legacy dataset name, optional_version is the version number
675
675
  return AbstractDataset._get_by_name(workspace, path, optional_version)
676
676
  elif path_type is _PathType.cloud:
677
- try:
678
- local_path = _download_mltable_yaml(path)
679
- except MLtable_UserErrorException as exc:
680
- _get_logger().warning('Failed to download mltable yaml with error {}'.format(exc))
681
- raise UserErrorException("Failed to download mltable yaml with error: {}".format(exc))
677
+ if dataType == "UriFolder" or dataType == "UriFile":
678
+ path_type = "folder" if dataType == "UriFolder" else "file"
679
+ from azureml.dataprep.rslex import PyRsDataflow
680
+ dataflow = dataprep().EnginelessDataflow(PyRsDataflow.from_paths([{path_type: path}]))
681
+ from azureml.data.file_dataset import FileDataset
682
+ return FileDataset._create(definition=dataflow)
683
+ elif dataType == "MLTable" or dataType is None:
684
+ # assume MLTable if no explicit dataType is provided
685
+ try:
686
+ local_path = _download_mltable_yaml(path)
687
+ except MLtable_UserErrorException as exc:
688
+ _get_logger().warning('Failed to download mltable yaml with error {}'.format(exc))
689
+ raise UserErrorException("Failed to download mltable yaml with error: {}".format(exc))
690
+ else:
691
+ raise UserErrorException("Asset type {} is not supported by V1 SDK, pleae use V2 SDK".format(dataType))
682
692
 
683
693
  import os.path
684
694
  local_yaml_path = "{}/MLTable".format(local_path.rstrip("/"))
@@ -913,7 +923,9 @@ class AbstractDataset(object):
913
923
 
914
924
  if v2_success:
915
925
  # 'dataUri' points to the location of the data dir containing the MLTable file
916
- return AbstractDataset._load(path=v2_result.json()['dataVersion']['dataUri'], workspace=workspace)
926
+ path = v2_result.json()['dataVersion']['dataUri']
927
+ type = v2_result.json()['dataVersion']['dataType']
928
+ return AbstractDataset._load(path=path, workspace=workspace, dataType=type)
917
929
  else:
918
930
  _get_logger().warning('Tried to retrieve v2 data asset but could not find v2 data asset'
919
931
  'registered with name "{}"{} in the workspace.'
azureml/data/constants.py CHANGED
@@ -14,6 +14,7 @@ AZURE_DATA_LAKE_GEN2 = "AzureDataLakeGen2"
14
14
  HDFS = "Hdfs"
15
15
  CUSTOM = "Custom"
16
16
  GIT = "Git"
17
+ ONE_LAKE = "OneLake"
17
18
 
18
19
  ACCOUNT_KEY = "AccountKey"
19
20
  SAS = "Sas"
@@ -1003,7 +1003,7 @@ class DatasetContextManager(_CommonContextManager):
1003
1003
  import hashlib
1004
1004
 
1005
1005
  path_elems = os.path.normpath(path).split(os.path.sep)
1006
- hashed_path_elems = list(map(lambda p: hashlib.md5(bytes(p, encoding='utf-8')).hexdigest(), path_elems))
1006
+ hashed_path_elems = list(map(lambda p: hashlib.sha256(bytes(p, encoding='utf-8')).hexdigest(), path_elems))
1007
1007
  return os.path.join(*hashed_path_elems)
1008
1008
 
1009
1009
 
@@ -364,7 +364,7 @@ class TabularDatasetFactory:
364
364
  # merge with set_column_types
365
365
  if set_column_types is not None:
366
366
  for column in set_column_types.keys():
367
- if column_types_builder.conversion_candidates[column]:
367
+ if column in column_types_builder.conversion_candidates:
368
368
  del column_types_builder.conversion_candidates[column]
369
369
 
370
370
  dataflow = column_types_builder.to_dataflow()
@@ -1092,6 +1092,12 @@ class _DatastoreClient:
1092
1092
  if datastore_type == constants.GIT:
1093
1093
  # TODO return GitDatastore for git datastore
1094
1094
  return AbstractDatastore(ws, datastore.name, constants.GIT)
1095
+ if datastore_type == constants.ONE_LAKE:
1096
+ # operations using OneLake datastore types are only supported in v2 sdk
1097
+ # but we're returning an AbstractDatastore to not break datastore list scenarios
1098
+ module_logger.debug(("Datastore {} is a OneLake datastore type. Datastore operations for OneLake types "
1099
+ "are only supported in the v2 sdk.").format(datastore.name))
1100
+ return AbstractDatastore(ws, datastore.name, constants.ONE_LAKE)
1095
1101
  raise TypeError("Unsupported Datastore Type: {}".format(datastore.data_store_type))
1096
1102
 
1097
1103
  @staticmethod
@@ -587,13 +587,12 @@ def _get_and_validate_download_list_local(download_records, download_list, targe
587
587
  errors.append((resource_identifier, value.errorCode))
588
588
  else:
589
589
  raise AzureMLException(f'Unexpected error during file download: {value}')
590
- actual_download_list = []
591
590
 
592
591
  if errors:
593
592
  from .dataset_error_handling import _download_error_handler
594
593
  # this will throw UserErrorException or AzureMLException based on set of errors encountered
595
594
  _download_error_handler(errors, _get_logger())
596
- return actual_download_list
595
+ return downloaded_files
597
596
 
598
597
 
599
598
  def _log_and_print_warning(message):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: azureml-core
3
- Version: 1.55.0.post2
3
+ Version: 1.57.0
4
4
  Summary: Azure Machine Learning core packages, modules, and classes
5
5
  Home-page: https://docs.microsoft.com/python/api/overview/azure/ml/?view=azure-ml-py
6
6
  Author: Microsoft Corp
@@ -10,47 +10,46 @@ Classifier: Intended Audience :: Developers
10
10
  Classifier: Intended Audience :: System Administrators
11
11
  Classifier: Programming Language :: Python
12
12
  Classifier: Programming Language :: Python :: 3
13
- Classifier: Programming Language :: Python :: 3.7
14
13
  Classifier: Programming Language :: Python :: 3.8
15
14
  Classifier: Programming Language :: Python :: 3.9
16
15
  Classifier: Programming Language :: Python :: 3.10
17
- Requires-Python: >=3.7,< 4.0
16
+ Requires-Python: >=3.8,< 4.0
18
17
  Description-Content-Type: text/x-rst
19
18
  License-File: LICENSE.txt
20
19
  Requires-Dist: pytz
21
20
  Requires-Dist: backports.tempfile
22
- Requires-Dist: pathspec <1.0.0
23
- Requires-Dist: requests[socks] <3.0.0,>=2.19.1
24
- Requires-Dist: msal <2.0.0,>=1.15.0
25
- Requires-Dist: msal-extensions <=2.0.0,>=0.3.0
26
- Requires-Dist: knack <0.12.0
27
- Requires-Dist: azure-core <2.0.0
21
+ Requires-Dist: pathspec<1.0.0
22
+ Requires-Dist: requests[socks]<3.0.0,>=2.19.1
23
+ Requires-Dist: msal<2.0.0,>=1.15.0
24
+ Requires-Dist: msal-extensions<=2.0.0,>=0.3.0
25
+ Requires-Dist: knack<0.12.0
26
+ Requires-Dist: azure-core<2.0.0
28
27
  Requires-Dist: pkginfo
29
- Requires-Dist: argcomplete <4
30
- Requires-Dist: humanfriendly <11.0,>=4.7
31
- Requires-Dist: paramiko <4.0.0,>=2.0.8
32
- Requires-Dist: azure-mgmt-resource <=24.0.0,>=15.0.0
33
- Requires-Dist: azure-mgmt-containerregistry <11,>=8.2.0
34
- Requires-Dist: azure-mgmt-storage <=22.0.0,>=16.0.0
35
- Requires-Dist: azure-mgmt-keyvault <11.0.0,>=0.40.0
36
- Requires-Dist: azure-mgmt-authorization <5,>=0.40.0
37
- Requires-Dist: azure-mgmt-network <=25.2.0
38
- Requires-Dist: azure-graphrbac <1.0.0,>=0.40.0
39
- Requires-Dist: azure-common <2.0.0,>=1.1.12
40
- Requires-Dist: msrest <=0.7.1,>=0.5.1
41
- Requires-Dist: msrestazure <=0.6.4,>=0.4.33
42
- Requires-Dist: urllib3 <3.0.0,>1.26.17
43
- Requires-Dist: packaging <=24.0,>=20.0
44
- Requires-Dist: python-dateutil <3.0.0,>=2.7.3
45
- Requires-Dist: ndg-httpsclient <=0.5.1
46
- Requires-Dist: SecretStorage <4.0.0
47
- Requires-Dist: jsonpickle <4.0.0
48
- Requires-Dist: contextlib2 <22.0.0
49
- Requires-Dist: docker <8.0.0
50
- Requires-Dist: PyJWT <3.0.0
51
- Requires-Dist: adal <=1.2.7,>=1.2.0
52
- Requires-Dist: pyopenssl <25.0.0
53
- Requires-Dist: jmespath <2.0.0
28
+ Requires-Dist: argcomplete<4
29
+ Requires-Dist: humanfriendly<11.0,>=4.7
30
+ Requires-Dist: paramiko<4.0.0,>=2.0.8
31
+ Requires-Dist: azure-mgmt-resource<=24.0.0,>=15.0.0
32
+ Requires-Dist: azure-mgmt-containerregistry<11,>=8.2.0
33
+ Requires-Dist: azure-mgmt-storage<=22.0.0,>=16.0.0
34
+ Requires-Dist: azure-mgmt-keyvault<11.0.0,>=0.40.0
35
+ Requires-Dist: azure-mgmt-authorization<5,>=0.40.0
36
+ Requires-Dist: azure-mgmt-network<=26.0.0
37
+ Requires-Dist: azure-graphrbac<1.0.0,>=0.40.0
38
+ Requires-Dist: azure-common<2.0.0,>=1.1.12
39
+ Requires-Dist: msrest<=0.7.1,>=0.5.1
40
+ Requires-Dist: msrestazure<=0.7,>=0.4.33
41
+ Requires-Dist: urllib3<3.0.0,>1.26.17
42
+ Requires-Dist: packaging<=25.0,>=20.0
43
+ Requires-Dist: python-dateutil<3.0.0,>=2.7.3
44
+ Requires-Dist: ndg-httpsclient<=0.5.1
45
+ Requires-Dist: SecretStorage<4.0.0
46
+ Requires-Dist: jsonpickle<4.0.0
47
+ Requires-Dist: contextlib2<22.0.0
48
+ Requires-Dist: docker<8.0.0
49
+ Requires-Dist: PyJWT<3.0.0
50
+ Requires-Dist: adal<=1.2.7,>=1.2.0
51
+ Requires-Dist: pyopenssl<25.0.0
52
+ Requires-Dist: jmespath<2.0.0
54
53
 
55
54
 
56
55
  The azureml-core provides core packages, modules, and classes for Azure Machine Learning and includes the following: