azure-quantum 0.30.0__py3-none-any.whl → 1.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- azure/quantum/_client/_version.py +1 -1
- azure/quantum/cirq/service.py +7 -0
- azure/quantum/cirq/targets/quantinuum.py +1 -1
- azure/quantum/job/job.py +15 -1
- azure/quantum/qiskit/backends/backend.py +130 -35
- azure/quantum/qiskit/backends/ionq.py +65 -5
- azure/quantum/qiskit/backends/qci.py +35 -2
- azure/quantum/qiskit/backends/quantinuum.py +25 -4
- azure/quantum/qiskit/backends/rigetti.py +8 -1
- azure/quantum/qiskit/job.py +7 -16
- azure/quantum/qiskit/provider.py +18 -2
- azure/quantum/target/ionq.py +37 -12
- azure/quantum/target/microsoft/elements/dft/target.py +13 -1
- azure/quantum/target/microsoft/target.py +36 -9
- azure/quantum/target/params.py +1 -1
- azure/quantum/target/pasqal/target.py +16 -2
- azure/quantum/target/quantinuum.py +34 -9
- azure/quantum/target/rigetti/target.py +15 -2
- azure/quantum/target/solvers.py +7 -1
- azure/quantum/target/target.py +82 -0
- azure/quantum/target/target_factory.py +0 -2
- azure/quantum/version.py +1 -1
- azure/quantum/workspace.py +11 -8
- {azure_quantum-0.30.0.dist-info → azure_quantum-1.0.0.dist-info}/METADATA +3 -5
- {azure_quantum-0.30.0.dist-info → azure_quantum-1.0.0.dist-info}/RECORD +27 -55
- azure/quantum/_client/aio/__init__.py +0 -23
- azure/quantum/_client/aio/_client.py +0 -124
- azure/quantum/_client/aio/_configuration.py +0 -89
- azure/quantum/_client/aio/_patch.py +0 -20
- azure/quantum/_client/aio/operations/__init__.py +0 -29
- azure/quantum/_client/aio/operations/_operations.py +0 -1291
- azure/quantum/_client/aio/operations/_patch.py +0 -20
- azure/quantum/aio/__init__.py +0 -14
- azure/quantum/aio/_authentication/__init__.py +0 -9
- azure/quantum/aio/_authentication/_chained.py +0 -94
- azure/quantum/aio/_authentication/_default.py +0 -212
- azure/quantum/aio/_authentication/_token.py +0 -81
- azure/quantum/aio/job/__init__.py +0 -1
- azure/quantum/aio/job/base_job.py +0 -326
- azure/quantum/aio/job/job.py +0 -104
- azure/quantum/aio/optimization/__init__.py +0 -11
- azure/quantum/aio/optimization/online_problem.py +0 -17
- azure/quantum/aio/optimization/problem.py +0 -102
- azure/quantum/aio/optimization/streaming_problem.py +0 -280
- azure/quantum/aio/storage.py +0 -390
- azure/quantum/aio/target/__init__.py +0 -19
- azure/quantum/aio/target/ionq.py +0 -47
- azure/quantum/aio/target/quantinuum.py +0 -47
- azure/quantum/aio/target/solvers.py +0 -96
- azure/quantum/aio/target/target.py +0 -68
- azure/quantum/aio/target/target_factory.py +0 -72
- azure/quantum/aio/target/toshiba.py +0 -6
- azure/quantum/aio/workspace.py +0 -337
- {azure_quantum-0.30.0.dist-info → azure_quantum-1.0.0.dist-info}/WHEEL +0 -0
- {azure_quantum-0.30.0.dist-info → azure_quantum-1.0.0.dist-info}/top_level.txt +0 -0
|
@@ -1,326 +0,0 @@
|
|
|
1
|
-
##
|
|
2
|
-
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
3
|
-
# Licensed under the MIT License.
|
|
4
|
-
##
|
|
5
|
-
import logging
|
|
6
|
-
|
|
7
|
-
from urllib.parse import urlparse
|
|
8
|
-
from typing import Any, Dict, Optional, TYPE_CHECKING
|
|
9
|
-
from urllib.parse import urlparse
|
|
10
|
-
from azure.storage.blob import BlobClient
|
|
11
|
-
|
|
12
|
-
from azure.quantum.aio.storage import upload_blob, download_blob, ContainerClient
|
|
13
|
-
from azure.quantum._client.models import JobDetails
|
|
14
|
-
from azure.quantum.job.job import BaseJob as SyncBaseJob, ContentType
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
if TYPE_CHECKING:
|
|
18
|
-
from azure.quantum.aio.workspace import Workspace
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
logger = logging.getLogger(__name__)
|
|
22
|
-
|
|
23
|
-
DEFAULT_TIMEOUT = 300 # Default timeout for waiting for job to complete
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
class BaseJob(SyncBaseJob):
|
|
27
|
-
# Optionally override these to create a Provider-specific Job subclass
|
|
28
|
-
"""
|
|
29
|
-
Base job class with methods to create a job from raw blob data,
|
|
30
|
-
upload blob data and download results.
|
|
31
|
-
"""
|
|
32
|
-
@classmethod
|
|
33
|
-
async def from_input_data(
|
|
34
|
-
cls,
|
|
35
|
-
workspace: "Workspace",
|
|
36
|
-
name: str,
|
|
37
|
-
target: str,
|
|
38
|
-
input_data: bytes,
|
|
39
|
-
content_type: str,
|
|
40
|
-
blob_name: str = "inputData",
|
|
41
|
-
encoding: str = "",
|
|
42
|
-
job_id: str = None,
|
|
43
|
-
container_name: str = None,
|
|
44
|
-
provider_id: str = None,
|
|
45
|
-
input_data_format: str = None,
|
|
46
|
-
output_data_format: str = None,
|
|
47
|
-
input_params: Dict[str, Any] = None,
|
|
48
|
-
**kwargs
|
|
49
|
-
) -> "BaseJob":
|
|
50
|
-
"""Create a new Azure Quantum job based on a raw input_data payload.
|
|
51
|
-
|
|
52
|
-
:param workspace: Azure Quantum workspace to submit the input_data to
|
|
53
|
-
:type workspace: "Workspace"
|
|
54
|
-
:param name: Name of the job
|
|
55
|
-
:type name: str
|
|
56
|
-
:param target: Azure Quantum target
|
|
57
|
-
:type target: str
|
|
58
|
-
:param input_data: Raw input data to submit
|
|
59
|
-
:type input_data: bytes
|
|
60
|
-
:param blob_name: Input data blob name, defaults to "inputData"
|
|
61
|
-
:type blob_name: str
|
|
62
|
-
:param content_type: Content type, e.g. "application/json"
|
|
63
|
-
:type content_type: str
|
|
64
|
-
:param encoding: input_data encoding, e.g. "gzip", defaults to empty string
|
|
65
|
-
:type encoding: str
|
|
66
|
-
:param job_id: Job ID, defaults to None
|
|
67
|
-
:type job_id: str, optional
|
|
68
|
-
:param container_name: Container name, defaults to None
|
|
69
|
-
:type container_name: str
|
|
70
|
-
:param provider_id: Provider ID, defaults to None
|
|
71
|
-
:type provider_id: str, optional
|
|
72
|
-
:param input_data_format: Input data format, defaults to None
|
|
73
|
-
:type input_data_format: str, optional
|
|
74
|
-
:param output_data_format: Output data format, defaults to None
|
|
75
|
-
:type output_data_format: str, optional
|
|
76
|
-
:param input_params: Input parameters, defaults to None
|
|
77
|
-
:type input_params: Dict[str, Any], optional
|
|
78
|
-
:param input_params: Input params for job
|
|
79
|
-
:type input_params: Dict[str, Any]
|
|
80
|
-
:return: Azure Quantum Job
|
|
81
|
-
:rtype: Job
|
|
82
|
-
"""
|
|
83
|
-
# Generate job ID if not specified
|
|
84
|
-
if job_id is None:
|
|
85
|
-
job_id = cls.create_job_id()
|
|
86
|
-
|
|
87
|
-
# Create container if it does not yet exist
|
|
88
|
-
container_uri = await workspace.get_container_uri(
|
|
89
|
-
job_id=job_id,
|
|
90
|
-
container_name=container_name
|
|
91
|
-
)
|
|
92
|
-
logger.debug(f"Container URI: {container_uri}")
|
|
93
|
-
|
|
94
|
-
# Upload data to container
|
|
95
|
-
input_data_uri = await cls.upload_input_data(
|
|
96
|
-
container_uri=container_uri,
|
|
97
|
-
input_data=input_data,
|
|
98
|
-
content_type=content_type,
|
|
99
|
-
blob_name=blob_name,
|
|
100
|
-
encoding=encoding
|
|
101
|
-
)
|
|
102
|
-
|
|
103
|
-
# Create and submit job
|
|
104
|
-
return await cls.from_storage_uri(
|
|
105
|
-
workspace=workspace,
|
|
106
|
-
job_id=job_id,
|
|
107
|
-
target=target,
|
|
108
|
-
input_data_uri=input_data_uri,
|
|
109
|
-
container_uri=container_uri,
|
|
110
|
-
name=name,
|
|
111
|
-
input_data_format=input_data_format,
|
|
112
|
-
output_data_format=output_data_format,
|
|
113
|
-
provider_id=provider_id,
|
|
114
|
-
input_params=input_params,
|
|
115
|
-
**kwargs
|
|
116
|
-
)
|
|
117
|
-
|
|
118
|
-
@classmethod
|
|
119
|
-
async def from_storage_uri(
|
|
120
|
-
cls,
|
|
121
|
-
workspace: "Workspace",
|
|
122
|
-
name: str,
|
|
123
|
-
target: str,
|
|
124
|
-
input_data_uri: str,
|
|
125
|
-
provider_id: str,
|
|
126
|
-
input_data_format: str,
|
|
127
|
-
output_data_format: str,
|
|
128
|
-
container_uri: str = None,
|
|
129
|
-
job_id: str = None,
|
|
130
|
-
input_params: Dict[str, Any] = None,
|
|
131
|
-
submit_job: bool = True,
|
|
132
|
-
**kwargs
|
|
133
|
-
) -> "BaseJob":
|
|
134
|
-
"""Create new Job from URI if input data is already uploaded
|
|
135
|
-
to blob storage
|
|
136
|
-
|
|
137
|
-
:param workspace: Azure Quantum workspace to submit the blob to
|
|
138
|
-
:type workspace: "Workspace"
|
|
139
|
-
:param name: Job name
|
|
140
|
-
:type name: str
|
|
141
|
-
:param target: Azure Quantum target
|
|
142
|
-
:type target: str
|
|
143
|
-
:param input_data_uri: Input data URI
|
|
144
|
-
:type input_data_uri: str
|
|
145
|
-
:param provider_id: Provider ID
|
|
146
|
-
:type provider_id: str, optional
|
|
147
|
-
:param input_data_format: Input data format
|
|
148
|
-
:type input_data_format: str, optional
|
|
149
|
-
:param output_data_format: Output data format
|
|
150
|
-
:type output_data_format: str, optional
|
|
151
|
-
:param container_uri: Container URI, defaults to None
|
|
152
|
-
:type container_uri: str
|
|
153
|
-
:param job_id: Pre-generated job ID, defaults to None
|
|
154
|
-
:type job_id: str
|
|
155
|
-
:param input_params: Input parameters, defaults to None
|
|
156
|
-
:type input_params: Dict[str, Any], optional
|
|
157
|
-
:param submit_job: If job should be submitted to the service, defaults to True
|
|
158
|
-
:type submit_job: bool
|
|
159
|
-
:return: Job instance
|
|
160
|
-
:rtype: Job
|
|
161
|
-
"""
|
|
162
|
-
# Generate job_id, input_params, data formats and provider ID if not specified
|
|
163
|
-
if job_id is None:
|
|
164
|
-
job_id = cls.create_job_id()
|
|
165
|
-
if input_params is None:
|
|
166
|
-
input_params = {}
|
|
167
|
-
|
|
168
|
-
# Create container for output data if not specified
|
|
169
|
-
if container_uri is None:
|
|
170
|
-
container_uri = await workspace.get_container_uri(job_id=job_id)
|
|
171
|
-
|
|
172
|
-
# Create job details and return Job
|
|
173
|
-
details = JobDetails(
|
|
174
|
-
id=job_id,
|
|
175
|
-
name=name,
|
|
176
|
-
container_uri=container_uri,
|
|
177
|
-
input_data_format=input_data_format,
|
|
178
|
-
output_data_format=output_data_format,
|
|
179
|
-
input_data_uri=input_data_uri,
|
|
180
|
-
provider_id=provider_id,
|
|
181
|
-
target=target,
|
|
182
|
-
input_params=input_params,
|
|
183
|
-
**kwargs
|
|
184
|
-
)
|
|
185
|
-
job = cls(workspace, details, **kwargs)
|
|
186
|
-
|
|
187
|
-
logger.info(
|
|
188
|
-
f"Submitting problem '{name}'. \
|
|
189
|
-
Using payload from: '{job.details.input_data_uri}'"
|
|
190
|
-
)
|
|
191
|
-
|
|
192
|
-
if submit_job:
|
|
193
|
-
logger.debug(f"==> submitting: {job.details}")
|
|
194
|
-
await job.submit()
|
|
195
|
-
|
|
196
|
-
return job
|
|
197
|
-
|
|
198
|
-
@staticmethod
|
|
199
|
-
async def upload_input_data(
|
|
200
|
-
container_uri: str,
|
|
201
|
-
input_data: bytes,
|
|
202
|
-
content_type: Optional[ContentType] = ContentType.json,
|
|
203
|
-
blob_name: str = "inputData",
|
|
204
|
-
encoding: str = "",
|
|
205
|
-
return_sas_token: bool = False
|
|
206
|
-
) -> str:
|
|
207
|
-
"""Upload input data file
|
|
208
|
-
|
|
209
|
-
:param container_uri: Container URI
|
|
210
|
-
:type container_uri: str
|
|
211
|
-
:param input_data: Input data in binary format
|
|
212
|
-
:type input_data: bytes
|
|
213
|
-
:param content_type: Content type, e.g. "application/json"
|
|
214
|
-
:type content_type: str
|
|
215
|
-
:param blob_name: Blob name, defaults to "inputData"
|
|
216
|
-
:type blob_name: str, optional
|
|
217
|
-
:param encoding: Encoding, e.g. "gzip", defaults to ""
|
|
218
|
-
:type encoding: str, optional
|
|
219
|
-
:param return_sas_token: Flag to return SAS token as part of URI, defaults to False
|
|
220
|
-
:type return_sas_token: bool, optional
|
|
221
|
-
:return: Uploaded data URI
|
|
222
|
-
:rtype: str
|
|
223
|
-
"""
|
|
224
|
-
container_client = ContainerClient.from_container_url(
|
|
225
|
-
container_uri
|
|
226
|
-
)
|
|
227
|
-
|
|
228
|
-
uploaded_blob_uri = await upload_blob(
|
|
229
|
-
container_client,
|
|
230
|
-
blob_name,
|
|
231
|
-
content_type,
|
|
232
|
-
encoding,
|
|
233
|
-
input_data,
|
|
234
|
-
return_sas_token=return_sas_token
|
|
235
|
-
)
|
|
236
|
-
|
|
237
|
-
await container_client.close()
|
|
238
|
-
return uploaded_blob_uri
|
|
239
|
-
|
|
240
|
-
async def download_data(self, blob_uri: str) -> dict:
|
|
241
|
-
"""Download file from blob uri
|
|
242
|
-
|
|
243
|
-
:param blob_uri: Blob URI
|
|
244
|
-
:type blob_uri: str
|
|
245
|
-
:return: Payload from blob
|
|
246
|
-
:rtype: dict
|
|
247
|
-
"""
|
|
248
|
-
url = urlparse(blob_uri)
|
|
249
|
-
if url.query.find("se=") == -1:
|
|
250
|
-
# blob_uri does not contains SAS token,
|
|
251
|
-
# get sas url from service
|
|
252
|
-
blob_client = BlobClient.from_blob_url(
|
|
253
|
-
blob_uri
|
|
254
|
-
)
|
|
255
|
-
blob_uri = await self.workspace._get_linked_storage_sas_uri(
|
|
256
|
-
blob_client.container_name, blob_client.blob_name
|
|
257
|
-
)
|
|
258
|
-
payload = await download_blob(blob_uri)
|
|
259
|
-
await blob_client.close()
|
|
260
|
-
else:
|
|
261
|
-
# blob_uri contains SAS token, use it
|
|
262
|
-
payload = await download_blob(blob_uri)
|
|
263
|
-
|
|
264
|
-
return payload
|
|
265
|
-
|
|
266
|
-
async def upload_attachment(
|
|
267
|
-
self,
|
|
268
|
-
blob_name: str,
|
|
269
|
-
data: bytes,
|
|
270
|
-
container_uri: str = None,
|
|
271
|
-
**kwargs
|
|
272
|
-
) -> str:
|
|
273
|
-
"""Uploads an attachment to the job's container file. Attachment's are identified by name.
|
|
274
|
-
Uploading to an existing attachment overrides its previous content.
|
|
275
|
-
|
|
276
|
-
:param name: Attachment name
|
|
277
|
-
:type name: str
|
|
278
|
-
:param data: Attachment data in binary format
|
|
279
|
-
:type input_data: bytes
|
|
280
|
-
:param container_uri: Container URI, defaults to the job's linked container.
|
|
281
|
-
:type container_uri: str, Optional
|
|
282
|
-
|
|
283
|
-
:return: Uploaded data URI
|
|
284
|
-
:rtype: str
|
|
285
|
-
"""
|
|
286
|
-
|
|
287
|
-
# Use Job's default container if not specified
|
|
288
|
-
if container_uri is None:
|
|
289
|
-
container_uri = await self.workspace.get_container_uri(job_id=self.id)
|
|
290
|
-
|
|
291
|
-
uploaded_blob_uri = await self.upload_input_data(
|
|
292
|
-
container_uri = container_uri,
|
|
293
|
-
blob_name = blob_name,
|
|
294
|
-
input_data = data,
|
|
295
|
-
**kwargs
|
|
296
|
-
)
|
|
297
|
-
return uploaded_blob_uri
|
|
298
|
-
|
|
299
|
-
async def download_attachment(
|
|
300
|
-
self,
|
|
301
|
-
name: str,
|
|
302
|
-
container_uri: str = None
|
|
303
|
-
):
|
|
304
|
-
""" Downloads an attachment from job's container in Azure Storage. Attachments are blobs of data
|
|
305
|
-
created as part of the Job's execution, or they can be created by uploading directly from Python
|
|
306
|
-
using the upload_attachment method.
|
|
307
|
-
|
|
308
|
-
:param name: Attachment name
|
|
309
|
-
:type name: str
|
|
310
|
-
:param container_uri: Container URI, defaults to the job's linked container.
|
|
311
|
-
:type container_uri: str, Optional
|
|
312
|
-
|
|
313
|
-
:return: Attachment data
|
|
314
|
-
:rtype: bytes
|
|
315
|
-
"""
|
|
316
|
-
# Use Job's default container if not specified
|
|
317
|
-
if container_uri is None:
|
|
318
|
-
container_uri = await self.workspace.get_container_uri(job_id=self.id)
|
|
319
|
-
|
|
320
|
-
container_client = ContainerClient.from_container_url(container_uri)
|
|
321
|
-
blob_client = container_client.get_blob_client(name)
|
|
322
|
-
response = await (await blob_client.download_blob()).readall()
|
|
323
|
-
await blob_client.close()
|
|
324
|
-
|
|
325
|
-
return response
|
|
326
|
-
|
azure/quantum/aio/job/job.py
DELETED
|
@@ -1,104 +0,0 @@
|
|
|
1
|
-
##
|
|
2
|
-
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
3
|
-
# Licensed under the MIT License.
|
|
4
|
-
##
|
|
5
|
-
|
|
6
|
-
import asyncio
|
|
7
|
-
import logging
|
|
8
|
-
import json
|
|
9
|
-
import time
|
|
10
|
-
|
|
11
|
-
from azure.quantum.aio.job.base_job import BaseJob, DEFAULT_TIMEOUT
|
|
12
|
-
from azure.quantum.job.job import Job as SyncJob
|
|
13
|
-
from azure.quantum.job.filtered_job import FilteredJob
|
|
14
|
-
|
|
15
|
-
__all__ = ["Job"]
|
|
16
|
-
|
|
17
|
-
logger = logging.getLogger(__name__)
|
|
18
|
-
|
|
19
|
-
_log = logging.getLogger(__name__)
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
class Job(BaseJob, SyncJob, FilteredJob):
|
|
23
|
-
"""Azure Quantum Job that is submitted to a given Workspace.
|
|
24
|
-
|
|
25
|
-
:param workspace: Workspace instance to submit job to
|
|
26
|
-
:type workspace: Workspace
|
|
27
|
-
:param job_details: Job details model,
|
|
28
|
-
contains Job ID, name and other details
|
|
29
|
-
:type job_details: JobDetails
|
|
30
|
-
"""
|
|
31
|
-
async def submit(self):
|
|
32
|
-
"""Submit a job to Azure Quantum."""
|
|
33
|
-
_log.debug(f"Submitting job with ID {self.id}")
|
|
34
|
-
job = await self.workspace.submit_job(self)
|
|
35
|
-
self.details = job.details
|
|
36
|
-
|
|
37
|
-
async def refresh(self):
|
|
38
|
-
"""Refreshes the Job's details by querying the workspace."""
|
|
39
|
-
self.details = (await self.workspace.get_job(self.id)).details
|
|
40
|
-
|
|
41
|
-
async def wait_until_completed(
|
|
42
|
-
self,
|
|
43
|
-
max_poll_wait_secs=30,
|
|
44
|
-
timeout_secs=None,
|
|
45
|
-
print_progress=True
|
|
46
|
-
) -> None:
|
|
47
|
-
"""Keeps refreshing the Job's details
|
|
48
|
-
until it reaches a finished status.
|
|
49
|
-
|
|
50
|
-
:param max_poll_wait_secs: Maximum poll wait time, defaults to 30
|
|
51
|
-
:type max_poll_wait_secs: int, optional
|
|
52
|
-
:param timeout_secs: Timeout in seconds, defaults to None
|
|
53
|
-
:type timeout_secs: int, optional
|
|
54
|
-
:param print_progress: Print "." to stdout to display progress
|
|
55
|
-
:type print_progress: bool, optional
|
|
56
|
-
:raises TimeoutError: If the total poll time exceeds timeout, raise
|
|
57
|
-
"""
|
|
58
|
-
await self.refresh()
|
|
59
|
-
poll_wait = SyncJob._default_poll_wait
|
|
60
|
-
start_time = time.time()
|
|
61
|
-
while not self.has_completed():
|
|
62
|
-
if timeout_secs is not None and (time.time() - start_time) >= timeout_secs:
|
|
63
|
-
raise TimeoutError(f"The wait time has exceeded {timeout_secs} seconds.")
|
|
64
|
-
|
|
65
|
-
logger.debug(
|
|
66
|
-
f"Waiting for job {self.id},"
|
|
67
|
-
+ f"it is in status '{self.details.status}'"
|
|
68
|
-
)
|
|
69
|
-
if print_progress:
|
|
70
|
-
print(".", end="", flush=True)
|
|
71
|
-
await asyncio.sleep(poll_wait)
|
|
72
|
-
await self.refresh()
|
|
73
|
-
poll_wait = (
|
|
74
|
-
max_poll_wait_secs
|
|
75
|
-
if poll_wait >= max_poll_wait_secs
|
|
76
|
-
else poll_wait * 1.5
|
|
77
|
-
)
|
|
78
|
-
|
|
79
|
-
async def get_results(self, timeout_secs: float = DEFAULT_TIMEOUT) -> dict:
|
|
80
|
-
"""Get job results by downloading the results blob from the
|
|
81
|
-
storage container linked via the workspace.
|
|
82
|
-
|
|
83
|
-
:param timeout_secs: Timeout in seconds, defaults to 300
|
|
84
|
-
:type timeout_secs: int
|
|
85
|
-
:raises RuntimeError: [description]
|
|
86
|
-
:return: [description]
|
|
87
|
-
:rtype: dict
|
|
88
|
-
"""
|
|
89
|
-
if self.results is not None:
|
|
90
|
-
return self.results
|
|
91
|
-
|
|
92
|
-
if not self.has_completed():
|
|
93
|
-
await self.wait_until_completed(timeout_secs=timeout_secs)
|
|
94
|
-
|
|
95
|
-
if not self.details.status == "Succeeded":
|
|
96
|
-
raise RuntimeError(
|
|
97
|
-
f'{"Cannot retrieve results as job execution failed"}'
|
|
98
|
-
+ f"(status: {self.details.status}."
|
|
99
|
-
+ f"error: {self.details.error_data})"
|
|
100
|
-
)
|
|
101
|
-
|
|
102
|
-
payload = await self.download_data(self.details.output_data_uri)
|
|
103
|
-
results = json.loads(payload.decode("utf8"))
|
|
104
|
-
return results
|
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
# coding=utf-8
|
|
2
|
-
##
|
|
3
|
-
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
4
|
-
# Licensed under the MIT License.
|
|
5
|
-
##
|
|
6
|
-
|
|
7
|
-
from .problem import *
|
|
8
|
-
from .online_problem import *
|
|
9
|
-
from .streaming_problem import *
|
|
10
|
-
from .online_problem import *
|
|
11
|
-
from azure.quantum.aio.target import Solver
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
from typing import TYPE_CHECKING
|
|
3
|
-
from azure.quantum.aio.optimization import Problem
|
|
4
|
-
from azure.quantum.optimization import OnlineProblem as SyncOnlineProblem
|
|
5
|
-
|
|
6
|
-
logger = logging.getLogger(__name__)
|
|
7
|
-
|
|
8
|
-
__all__ = ["OnlineProblem"]
|
|
9
|
-
|
|
10
|
-
if TYPE_CHECKING:
|
|
11
|
-
from azure.quantum.aio.workspace import Workspace
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
class OnlineProblem(SyncOnlineProblem):
|
|
15
|
-
async def download(self, workspace: "Workspace") -> Problem:
|
|
16
|
-
logger.info("The problem will be downloaded to the client")
|
|
17
|
-
return await Problem.download(self, workspace)
|
|
@@ -1,102 +0,0 @@
|
|
|
1
|
-
##
|
|
2
|
-
# Copyright (c) Microsoft Corporation. All rights reserved.
|
|
3
|
-
# Licensed under the MIT License.
|
|
4
|
-
##
|
|
5
|
-
|
|
6
|
-
from __future__ import annotations
|
|
7
|
-
import logging
|
|
8
|
-
from typing import TYPE_CHECKING
|
|
9
|
-
|
|
10
|
-
from azure.quantum.aio.storage import (
|
|
11
|
-
ContainerClient,
|
|
12
|
-
download_blob,
|
|
13
|
-
BlobClient
|
|
14
|
-
)
|
|
15
|
-
from azure.quantum.aio.job.job import Job
|
|
16
|
-
from azure.quantum.optimization import Problem as SyncProblem
|
|
17
|
-
from azure.quantum.optimization import ProblemType
|
|
18
|
-
|
|
19
|
-
logger = logging.getLogger(__name__)
|
|
20
|
-
|
|
21
|
-
__all__ = ["Problem", "ProblemType"]
|
|
22
|
-
|
|
23
|
-
if TYPE_CHECKING:
|
|
24
|
-
from azure.quantum.aio.workspace import Workspace
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
class Problem(SyncProblem):
|
|
28
|
-
"""Problem to submit to the service.
|
|
29
|
-
|
|
30
|
-
:param name: Problem name
|
|
31
|
-
:type name: str
|
|
32
|
-
:param terms: Problem terms, depending on solver.
|
|
33
|
-
Defaults to None
|
|
34
|
-
:type terms: Optional[List[Term]], optional
|
|
35
|
-
:param init_config: Optional configuration details, depending on solver.
|
|
36
|
-
Defaults to None
|
|
37
|
-
:type init_config: Optional[Dict[str,int]], optional
|
|
38
|
-
:param problem_type: Problem type (ProblemType.pubo or
|
|
39
|
-
ProblemType.ising), defaults to ProblemType.ising
|
|
40
|
-
:type problem_type: ProblemType, optional
|
|
41
|
-
"""
|
|
42
|
-
async def upload(
|
|
43
|
-
self,
|
|
44
|
-
workspace: "Workspace",
|
|
45
|
-
container_name: str = "optimization-problems",
|
|
46
|
-
blob_name: str = "inputData",
|
|
47
|
-
container_uri: str = None
|
|
48
|
-
):
|
|
49
|
-
"""Uploads an optimization problem instance to
|
|
50
|
-
the cloud storage linked with the Workspace.
|
|
51
|
-
|
|
52
|
-
:param workspace: interaction terms of the problem.
|
|
53
|
-
:type workspace: Workspace
|
|
54
|
-
:param container_name: Container name, defaults to "optimization-problems"
|
|
55
|
-
:type container_name: str, optional
|
|
56
|
-
:param blob_name: Blob name, defaults to None
|
|
57
|
-
:type blob_name: str, optional
|
|
58
|
-
:param container_uri: Optional container URI
|
|
59
|
-
:type container_uri: str
|
|
60
|
-
:return: uri of the uploaded problem
|
|
61
|
-
:rtype: str
|
|
62
|
-
"""
|
|
63
|
-
blob_params = [workspace, container_name, blob_name]
|
|
64
|
-
if self.uploaded_blob_uri and self.uploaded_blob_params == blob_params:
|
|
65
|
-
return self.uploaded_blob_uri
|
|
66
|
-
|
|
67
|
-
if blob_name is None:
|
|
68
|
-
blob_name = self._blob_name()
|
|
69
|
-
|
|
70
|
-
encoding = "gzip"
|
|
71
|
-
blob = self.to_blob()
|
|
72
|
-
if container_uri is None:
|
|
73
|
-
container_uri = await workspace.get_container_uri(
|
|
74
|
-
container_name=container_name
|
|
75
|
-
)
|
|
76
|
-
input_data_uri = await Job.upload_input_data(
|
|
77
|
-
input_data=blob,
|
|
78
|
-
blob_name=blob_name,
|
|
79
|
-
container_uri=container_uri,
|
|
80
|
-
encoding=encoding,
|
|
81
|
-
content_type="application/json"
|
|
82
|
-
)
|
|
83
|
-
self.uploaded_blob_params = blob_params
|
|
84
|
-
self.uploaded_blob_uri = input_data_uri
|
|
85
|
-
return input_data_uri
|
|
86
|
-
|
|
87
|
-
async def download(self, workspace: "Workspace"):
|
|
88
|
-
"""Downloads the uploaded problem as an instance of `Problem`"""
|
|
89
|
-
if not self.uploaded_blob_uri:
|
|
90
|
-
raise Exception(
|
|
91
|
-
"Problem may not be downloaded before it is uploaded"
|
|
92
|
-
)
|
|
93
|
-
blob_client = BlobClient.from_blob_url(self.uploaded_blob_uri)
|
|
94
|
-
container_client = ContainerClient.from_container_url(
|
|
95
|
-
await workspace._get_linked_storage_sas_uri(
|
|
96
|
-
blob_client.container_name
|
|
97
|
-
)
|
|
98
|
-
)
|
|
99
|
-
blob_name = blob_client.blob_name
|
|
100
|
-
blob = container_client.get_blob_client(blob_name)
|
|
101
|
-
contents = await download_blob(blob.url)
|
|
102
|
-
return Problem.deserialize(contents, self.name)
|