azure-quantum 3.2.0__py3-none-any.whl → 3.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,153 +0,0 @@
1
- # ------------------------------------
2
- # Copyright (c) Microsoft Corporation.
3
- # Licensed under the MIT License.
4
- # ------------------------------------
5
- import logging
6
- import re
7
- from typing import Optional
8
- import urllib3
9
- from azure.core.credentials import AccessToken
10
- from azure.identity import (
11
- AzurePowerShellCredential,
12
- EnvironmentCredential,
13
- ManagedIdentityCredential,
14
- AzureCliCredential,
15
- VisualStudioCodeCredential,
16
- InteractiveBrowserCredential,
17
- DeviceCodeCredential,
18
- _internal as AzureIdentityInternals,
19
- )
20
- from ._chained import _ChainedTokenCredential
21
- from ._token import _TokenFileCredential
22
- from azure.quantum._constants import ConnectionConstants
23
-
24
- _LOGGER = logging.getLogger(__name__)
25
- WWW_AUTHENTICATE_REGEX = re.compile(
26
- r"""
27
- ^
28
- Bearer\sauthorization_uri="
29
- https://(?P<authority>[^/]*)/
30
- (?P<tenant_id>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})
31
- "
32
- """,
33
- re.VERBOSE | re.IGNORECASE)
34
- WWW_AUTHENTICATE_HEADER_NAME = "WWW-Authenticate"
35
-
36
-
37
- class _DefaultAzureCredential(_ChainedTokenCredential):
38
- """
39
- Based on Azure.Identity.DefaultAzureCredential from:
40
- https://github.com/Azure/azure-sdk-for-python/blob/master/sdk/identity/azure-identity/azure/identity/_credentials/default.py
41
-
42
- The three key differences are:
43
- 1) Inherit from _ChainedTokenCredential, which has
44
- more aggressive error handling than ChainedTokenCredential
45
- 2) Instantiate the internal credentials the first time the get_token gets called
46
- such that we can get the tenant_id if it was not passed by the user (but we don't
47
- want to do that in the constructor).
48
- We automatically identify the user's tenant_id for a given subscription
49
- so that users with MSA accounts don't need to pass it.
50
- This is a mitigation for bug https://github.com/Azure/azure-sdk-for-python/issues/18975
51
- We need the following parameters to enable auto-detection of tenant_id
52
- - subscription_id
53
- - arm_endpoint (defaults to the production url "https://management.azure.com/")
54
- 3) Add custom TokenFileCredential as first method to attempt,
55
- which will look for a local access token.
56
- """
57
- def __init__(
58
- self,
59
- arm_endpoint: str,
60
- subscription_id: str,
61
- client_id: Optional[str] = None,
62
- tenant_id: Optional[str] = None,
63
- authority: Optional[str] = None,
64
- ):
65
- if arm_endpoint is None:
66
- raise ValueError("arm_endpoint is mandatory parameter")
67
- if subscription_id is None:
68
- raise ValueError("subscription_id is mandatory parameter")
69
-
70
- self.authority = self._authority_or_default(
71
- authority=authority,
72
- arm_endpoint=arm_endpoint)
73
- self.tenant_id = tenant_id
74
- self.subscription_id = subscription_id
75
- self.arm_endpoint = arm_endpoint
76
- self.client_id = client_id
77
- # credentials will be created lazy on the first call to get_token
78
- super(_DefaultAzureCredential, self).__init__()
79
-
80
- def _authority_or_default(self, authority: str, arm_endpoint: str):
81
- if authority:
82
- return AzureIdentityInternals.normalize_authority(authority)
83
- if arm_endpoint == ConnectionConstants.ARM_DOGFOOD_ENDPOINT:
84
- return ConnectionConstants.DOGFOOD_AUTHORITY
85
- return ConnectionConstants.AUTHORITY
86
-
87
- def _initialize_credentials(self):
88
- self._discover_tenant_id_(
89
- arm_endpoint=self.arm_endpoint,
90
- subscription_id=self.subscription_id)
91
- credentials = []
92
- credentials.append(_TokenFileCredential())
93
- credentials.append(EnvironmentCredential())
94
- if self.client_id:
95
- credentials.append(ManagedIdentityCredential(client_id=self.client_id))
96
- if self.authority and self.tenant_id:
97
- credentials.append(VisualStudioCodeCredential(authority=self.authority, tenant_id=self.tenant_id))
98
- credentials.append(AzureCliCredential(tenant_id=self.tenant_id))
99
- credentials.append(AzurePowerShellCredential(tenant_id=self.tenant_id))
100
- credentials.append(InteractiveBrowserCredential(authority=self.authority, tenant_id=self.tenant_id))
101
- if self.client_id:
102
- credentials.append(DeviceCodeCredential(authority=self.authority, client_id=self.client_id, tenant_id=self.tenant_id))
103
- self.credentials = credentials
104
-
105
- def get_token(self, *scopes: str, **kwargs) -> AccessToken:
106
- """
107
- Request an access token for `scopes`.
108
- This method is called automatically by Azure SDK clients.
109
-
110
- :param str scopes: desired scopes for the access token.
111
- This method requires at least one scope.
112
-
113
- :raises ~azure.core.exceptions.ClientAuthenticationError:authentication failed.
114
- The exception has a `message` attribute listing each authentication
115
- attempt and its error message.
116
- """
117
- # lazy-initialize the credentials
118
- if self.credentials is None or len(self.credentials) == 0:
119
- self._initialize_credentials()
120
-
121
- return super(_DefaultAzureCredential, self).get_token(*scopes, **kwargs)
122
-
123
- def _discover_tenant_id_(self, arm_endpoint:str, subscription_id:str):
124
- """
125
- If the tenant_id was not given, try to obtain it
126
- by calling the management endpoint for the subscription_id,
127
- or by applying default values.
128
- """
129
- if self.tenant_id:
130
- return
131
-
132
- try:
133
- url = (
134
- f"{arm_endpoint.rstrip('/')}/subscriptions/"
135
- + f"{subscription_id}?api-version=2018-01-01"
136
- + "&discover-tenant-id" # used by the test recording infrastructure
137
- )
138
- http = urllib3.PoolManager()
139
- response = http.request(
140
- method="GET",
141
- url=url,
142
- )
143
- if WWW_AUTHENTICATE_HEADER_NAME in response.headers:
144
- www_authenticate = response.headers[WWW_AUTHENTICATE_HEADER_NAME]
145
- match = re.search(WWW_AUTHENTICATE_REGEX, www_authenticate)
146
- if match:
147
- self.tenant_id = match.group("tenant_id")
148
- # pylint: disable=broad-exception-caught
149
- except Exception as ex:
150
- _LOGGER.error(ex)
151
-
152
- # apply default values
153
- self.tenant_id = self.tenant_id or ConnectionConstants.MSA_TENANT_ID
@@ -1,83 +0,0 @@
1
- ##
2
- # Copyright (c) Microsoft Corporation. All rights reserved.
3
- # Licensed under the MIT License.
4
- ##
5
- import json
6
- from json.decoder import JSONDecodeError
7
- import logging
8
- import os
9
- import time
10
-
11
- from azure.identity import CredentialUnavailableError
12
- from azure.core.credentials import AccessToken
13
- from azure.quantum._constants import EnvironmentVariables
14
-
15
- _LOGGER = logging.getLogger(__name__)
16
-
17
-
18
- class _TokenFileCredential(object):
19
- """
20
- Implements a custom TokenCredential to use a local file as
21
- the source for an AzureQuantum token.
22
-
23
- It will only use the local file if the AZURE_QUANTUM_TOKEN_FILE
24
- environment variable is set, and references an existing json file
25
- that contains the access_token and expires_on timestamp in milliseconds.
26
-
27
- If the environment variable is not set, the file does not exist,
28
- or the token is invalid in any way (expired, for example),
29
- then the credential will throw CredentialUnavailableError,
30
- so that _ChainedTokenCredential can fallback to other methods.
31
- """
32
- def __init__(self):
33
- self.token_file = os.environ.get(EnvironmentVariables.QUANTUM_TOKEN_FILE)
34
- if self.token_file:
35
- _LOGGER.debug("Using provided token file location: %s", self.token_file)
36
- else:
37
- _LOGGER.debug("No token file location provided for %s environment variable.",
38
- EnvironmentVariables.QUANTUM_TOKEN_FILE)
39
-
40
- def get_token(self, *scopes: str, **kwargs) -> AccessToken: # pylint:disable=unused-argument
41
- """Request an access token for `scopes`.
42
- This method is called automatically by Azure SDK clients.
43
- This method only returns tokens for the https://quantum.microsoft.com/.default scope.
44
-
45
- :param str scopes: desired scopes for the access token.
46
-
47
- :raises ~azure.identity.CredentialUnavailableError
48
- when failing to get the token.
49
- The exception has a `message` attribute with the error message.
50
- """
51
- if not self.token_file:
52
- raise CredentialUnavailableError(message="Token file location not set.")
53
-
54
- if not os.path.isfile(self.token_file):
55
- raise CredentialUnavailableError(
56
- message=f"Token file at {self.token_file} does not exist.")
57
-
58
- try:
59
- token = self._parse_token_file(self.token_file)
60
- except JSONDecodeError as exception:
61
- raise CredentialUnavailableError(
62
- message="Failed to parse token file: Invalid JSON.") from exception
63
- except KeyError as exception:
64
- raise CredentialUnavailableError(
65
- message="Failed to parse token file: Missing expected value: "
66
- + str(exception)) from exception
67
- except Exception as exception:
68
- raise CredentialUnavailableError(
69
- message="Failed to parse token file: " + str(exception)) from exception
70
-
71
- if token.expires_on <= time.time():
72
- raise CredentialUnavailableError(
73
- message=f"Token already expired at {time.asctime(time.gmtime(token.expires_on))}")
74
-
75
- return token
76
-
77
- def _parse_token_file(self, path) -> AccessToken:
78
- with open(path, mode="r", encoding="utf-8") as file:
79
- data = json.load(file)
80
- # Convert ms to seconds, since python time.time only handles epoch time in seconds
81
- expires_on = int(data["expires_on"]) / 1000
82
- token = AccessToken(data["access_token"], expires_on)
83
- return token
@@ -1 +0,0 @@
1
- """Defines classes for interacting with Microsoft Elements services"""
@@ -1,6 +0,0 @@
1
- """Defines classes for interacting with Microsoft Elements DFT service"""
2
-
3
- from .target import MicrosoftElementsDft
4
- from .job import MicrosoftElementsDftJob
5
-
6
- __all__ = ["MicrosoftElementsDft", "MicrosoftElementsDftJob"]
@@ -1,171 +0,0 @@
1
- import collections.abc
2
- import logging
3
- from typing import Any, Dict, Union, Optional
4
- from azure.quantum.job import JobFailedWithResultsError
5
- from azure.quantum.job.base_job import BaseJob, ContentType
6
- from azure.quantum.job.job import Job, DEFAULT_TIMEOUT
7
- from azure.quantum._client.models import JobDetails
8
- from azure.quantum.workspace import Workspace
9
-
10
- logger = logging.getLogger(__name__)
11
-
12
- class MicrosoftElementsDftJob(Job):
13
- """
14
- A dedicated job class for jobs from the microsoft.dft target.
15
- """
16
-
17
- def __init__(self, workspace, job_details: JobDetails, **kwargs):
18
- """Azure Quantum Job that is submitted to a given Workspace.
19
-
20
- :param workspace: Workspace instance to submit job to
21
- :type workspace: Workspace
22
- :param job_details: Job details model,
23
- contains Job ID, name and other details
24
- :type job_details: JobDetails
25
- """
26
- super().__init__(workspace, job_details, **kwargs)
27
-
28
-
29
- def get_results(self, timeout_secs: float = DEFAULT_TIMEOUT) -> Dict[str, Any]:
30
- """Get job results by downloading the results blob from the
31
- storage container linked via the workspace.
32
-
33
- :param timeout_secs: Timeout in seconds, defaults to 300
34
- :type timeout_secs: float
35
- :raises: :class:`RuntimeError` if job execution failed.
36
- :raises: :class:`azure.quantum.job.JobFailedWithResultsError` if job execution failed,
37
- but failure results could still be retrieved.
38
- :return: Results dictionary.
39
- """
40
-
41
- try:
42
- job_results = super().get_results(timeout_secs)
43
- return job_results
44
- except JobFailedWithResultsError as e:
45
- failure_results = e.get_failure_results()
46
- if MicrosoftElementsDftJob._is_dft_failure_results(failure_results):
47
- error = failure_results["results"][0]["error"]
48
- message = f'{e.get_message()} Error type: {error["error_type"]}. Message: {error["error_message"]}'
49
- raise JobFailedWithResultsError(message, failure_results) from None
50
-
51
-
52
- @classmethod
53
- def _allow_failure_results(cls) -> bool:
54
- """
55
- Allow to download job results even if the Job status is "Failed".
56
- """
57
- return True
58
-
59
-
60
- @staticmethod
61
- def _is_dft_failure_results(failure_results: Union[Dict[str, Any], str]) -> bool:
62
- return isinstance(failure_results, dict) \
63
- and "results" in failure_results \
64
- and isinstance(failure_results["results"], collections.abc.Sequence) \
65
- and len(failure_results["results"]) > 0 \
66
- and isinstance(failure_results["results"][0], dict) \
67
- and "error" in failure_results["results"][0] \
68
- and isinstance(failure_results["results"][0]["error"], dict) \
69
- and "error_type" in failure_results["results"][0]["error"] \
70
- and "error_message" in failure_results["results"][0]["error"]
71
-
72
- @classmethod
73
- def from_input_data_container(
74
- cls,
75
- workspace: "Workspace",
76
- name: str,
77
- target: str,
78
- input_data: bytes,
79
- batch_input_blobs: Dict[str, bytes],
80
- content_type: ContentType = ContentType.json,
81
- blob_name: str = "inputData",
82
- encoding: str = "",
83
- job_id: str = None,
84
- container_name: str = None,
85
- provider_id: str = None,
86
- input_data_format: str = None,
87
- output_data_format: str = None,
88
- input_params: Dict[str, Any] = None,
89
- session_id: Optional[str] = None,
90
- **kwargs
91
- ) -> "BaseJob":
92
- """Create a new Azure Quantum job based on a list of input_data.
93
-
94
- :param workspace: Azure Quantum workspace to submit the input_data to
95
- :type workspace: Workspace
96
- :param name: Name of the job
97
- :type name: str
98
- :param target: Azure Quantum target
99
- :type target: str
100
- :param input_data: Raw input data to submit
101
- :type input_data: Dict
102
- :param blob_name: Dict of Input data json to gives a table of contents
103
- :type batch_input_blobs: Dict
104
- :param blob_name: Dict of QcSchema Data where the key is the blob name to store it in the container
105
- :type blob_name: str
106
- :param content_type: Content type, e.g. "application/json"
107
- :type content_type: ContentType
108
- :param encoding: input_data encoding, e.g. "gzip", defaults to empty string
109
- :type encoding: str
110
- :param job_id: Job ID, defaults to None
111
- :type job_id: str
112
- :param container_name: Container name, defaults to None
113
- :type container_name: str
114
- :param provider_id: Provider ID, defaults to None
115
- :type provider_id: str
116
- :param input_data_format: Input data format, defaults to None
117
- :type input_data_format: str
118
- :param output_data_format: Output data format, defaults to None
119
- :type output_data_format: str
120
- :param input_params: Input parameters, defaults to None
121
- :type input_params: Dict[str, Any]
122
- :param input_params: Input params for job
123
- :type input_params: Dict[str, Any]
124
- :return: Azure Quantum Job
125
- :rtype: Job
126
- """
127
- # Generate job ID if not specified
128
- if job_id is None:
129
- job_id = cls.create_job_id()
130
-
131
- # Create container if it does not yet exist
132
- container_uri = workspace.get_container_uri(
133
- job_id=job_id,
134
- container_name=container_name
135
- )
136
- logger.debug(f"Container URI: {container_uri}")
137
-
138
- # Upload Input Data
139
- input_data_uri = cls.upload_input_data(
140
- container_uri=container_uri,
141
- input_data=input_data,
142
- content_type=content_type,
143
- blob_name=blob_name,
144
- encoding=encoding,
145
- )
146
-
147
- # Upload data to container
148
- for blob_name, input_data_item in batch_input_blobs.items():
149
- cls.upload_input_data(
150
- container_uri=container_uri,
151
- input_data=input_data_item,
152
- content_type=content_type,
153
- blob_name=blob_name,
154
- encoding=encoding,
155
- )
156
-
157
- # Create and submit job
158
- return cls.from_storage_uri(
159
- workspace=workspace,
160
- job_id=job_id,
161
- target=target,
162
- input_data_uri=input_data_uri,
163
- container_uri=container_uri,
164
- name=name,
165
- input_data_format=input_data_format,
166
- output_data_format=output_data_format,
167
- provider_id=provider_id,
168
- input_params=input_params,
169
- session_id=session_id,
170
- **kwargs
171
- )