azure-quantum 0.29.2__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. azure/quantum/_client/_version.py +1 -1
  2. azure/quantum/cirq/service.py +7 -0
  3. azure/quantum/cirq/targets/quantinuum.py +1 -1
  4. azure/quantum/job/__init__.py +1 -0
  5. azure/quantum/job/base_job.py +41 -15
  6. azure/quantum/job/job.py +35 -1
  7. azure/quantum/job/job_failed_with_results_error.py +41 -0
  8. azure/quantum/qiskit/backends/backend.py +130 -35
  9. azure/quantum/qiskit/backends/ionq.py +65 -5
  10. azure/quantum/qiskit/backends/qci.py +35 -2
  11. azure/quantum/qiskit/backends/quantinuum.py +25 -4
  12. azure/quantum/qiskit/backends/rigetti.py +8 -1
  13. azure/quantum/qiskit/job.py +7 -16
  14. azure/quantum/qiskit/provider.py +18 -2
  15. azure/quantum/storage.py +2 -1
  16. azure/quantum/target/__init__.py +1 -0
  17. azure/quantum/target/ionq.py +37 -12
  18. azure/quantum/target/microsoft/elements/dft/__init__.py +4 -0
  19. azure/quantum/target/microsoft/elements/dft/job.py +46 -0
  20. azure/quantum/target/microsoft/elements/dft/target.py +66 -0
  21. azure/quantum/target/microsoft/target.py +36 -9
  22. azure/quantum/target/params.py +1 -1
  23. azure/quantum/target/pasqal/target.py +16 -2
  24. azure/quantum/target/quantinuum.py +34 -9
  25. azure/quantum/target/rigetti/target.py +21 -3
  26. azure/quantum/target/solvers.py +7 -1
  27. azure/quantum/target/target.py +82 -0
  28. azure/quantum/target/target_factory.py +0 -2
  29. azure/quantum/version.py +1 -1
  30. azure/quantum/workspace.py +11 -8
  31. {azure_quantum-0.29.2.dist-info → azure_quantum-1.0.0.dist-info}/METADATA +3 -5
  32. azure_quantum-1.0.0.dist-info/RECORD +86 -0
  33. azure/quantum/_client/aio/__init__.py +0 -23
  34. azure/quantum/_client/aio/_client.py +0 -124
  35. azure/quantum/_client/aio/_configuration.py +0 -89
  36. azure/quantum/_client/aio/_patch.py +0 -20
  37. azure/quantum/_client/aio/operations/__init__.py +0 -29
  38. azure/quantum/_client/aio/operations/_operations.py +0 -1291
  39. azure/quantum/_client/aio/operations/_patch.py +0 -20
  40. azure/quantum/aio/__init__.py +0 -14
  41. azure/quantum/aio/_authentication/__init__.py +0 -9
  42. azure/quantum/aio/_authentication/_chained.py +0 -94
  43. azure/quantum/aio/_authentication/_default.py +0 -212
  44. azure/quantum/aio/_authentication/_token.py +0 -81
  45. azure/quantum/aio/job/__init__.py +0 -1
  46. azure/quantum/aio/job/base_job.py +0 -326
  47. azure/quantum/aio/job/job.py +0 -104
  48. azure/quantum/aio/optimization/__init__.py +0 -11
  49. azure/quantum/aio/optimization/online_problem.py +0 -17
  50. azure/quantum/aio/optimization/problem.py +0 -102
  51. azure/quantum/aio/optimization/streaming_problem.py +0 -280
  52. azure/quantum/aio/storage.py +0 -390
  53. azure/quantum/aio/target/__init__.py +0 -19
  54. azure/quantum/aio/target/ionq.py +0 -47
  55. azure/quantum/aio/target/quantinuum.py +0 -47
  56. azure/quantum/aio/target/solvers.py +0 -96
  57. azure/quantum/aio/target/target.py +0 -68
  58. azure/quantum/aio/target/target_factory.py +0 -72
  59. azure/quantum/aio/target/toshiba.py +0 -6
  60. azure/quantum/aio/workspace.py +0 -337
  61. azure_quantum-0.29.2.dist-info/RECORD +0 -110
  62. {azure_quantum-0.29.2.dist-info → azure_quantum-1.0.0.dist-info}/WHEEL +0 -0
  63. {azure_quantum-0.29.2.dist-info → azure_quantum-1.0.0.dist-info}/top_level.txt +0 -0
@@ -1,280 +0,0 @@
1
- ##
2
- # Copyright (c) Microsoft Corporation. All rights reserved.
3
- # Licensed under the MIT License.
4
- ##
5
-
6
- import logging
7
- import sys
8
- import gzip
9
- import io
10
-
11
- from asyncio import sleep
12
-
13
- from typing import TYPE_CHECKING, Dict, List, Optional, Union
14
- from azure.quantum.optimization import Term
15
- from azure.quantum.aio.optimization import Problem
16
- from azure.quantum.aio.storage import (
17
- StreamedBlob,
18
- ContainerClient,
19
- BlobClient,
20
- download_blob,
21
- )
22
- from azure.quantum.optimization.streaming_problem import StreamingProblem as SyncStreamingProblem
23
- from azure.quantum.optimization.streaming_problem import JsonStreamingProblemUploader as SyncJsonStreamingProblemUploader
24
- from azure.quantum.aio.storage import StreamedBlobState
25
- from azure.quantum.aio.optimization.problem import ProblemType
26
-
27
- from asyncio import create_task
28
- from queue import Empty
29
-
30
- if TYPE_CHECKING:
31
- from azure.quantum.aio.workspace import Workspace
32
-
33
-
34
- logger = logging.getLogger(__name__)
35
-
36
- __all__ = ["StreamingProblem"]
37
-
38
-
39
- class StreamingProblem(SyncStreamingProblem):
40
- """Problem to be streamed to the service.
41
-
42
- Streaming problems are uploaded on the fly as terms are added,
43
- meaning that the whole problem representation is not kept in memory. This
44
- is very useful when constructing large problems.
45
-
46
- :param workspace: Workspace to upload problem to
47
- :type workspace: Workspace
48
- :param name: Problem name
49
- :type name: str
50
- :param terms: Problem terms, depending on solver. Defaults to None
51
- :type terms: Optional[List[Term]], optional
52
- :param init_config: Optional configuration details,
53
- depending on solver. Defaults to None
54
- :type init_config: Optional[Dict[str,int]], optional
55
- :param problem_type: Problem type (ProblemType.pubo or
56
- ProblemType.ising), defaults to ProblemType.ising
57
- :type problem_type: ProblemType, optional
58
- """
59
- @classmethod
60
- async def create(
61
- cls,
62
- workspace: "Workspace",
63
- name: str = "Optimization Problem",
64
- terms: Optional[List[Term]] = None,
65
- init_config: Optional[Dict[str, int]] = None,
66
- problem_type: "ProblemType" = ProblemType.ising,
67
- metadata: Dict[str, str] = {},
68
- **kw,
69
- ):
70
- problem = cls(
71
- workspace,
72
- name,
73
- None, # terms need to be added manually later so it can be awaited
74
- init_config,
75
- problem_type,
76
- metadata,
77
- **kw
78
- )
79
- if terms is not None and len(terms) > 0:
80
- await problem.add_terms(terms.copy())
81
- return problem
82
-
83
- async def add_term(self, c: Union[int, float], indices: List[int]):
84
- """Adds a single term to the `Problem`
85
- representation and queues it to be uploaded
86
-
87
- :param c: The cost or weight of this term
88
- :type c: int, float
89
- :param indices: The variable indices that are in this term
90
- :type indices: List[int]
91
- """
92
- await self.add_terms([Term(indices=indices, c=c)])
93
-
94
- async def _get_upload_coords(self):
95
- blob_name = self.id
96
- if self.upload_to_url:
97
- blob_client = BlobClient.from_blob_url(self.upload_to_url)
98
- container_client = ContainerClient.from_container_url(
99
- await self.workspace._get_linked_storage_sas_uri(
100
- blob_client.container_name
101
- )
102
- )
103
- blob_name = blob_client.blob_name
104
- await blob_client.close()
105
- elif not self.workspace.storage:
106
- # No storage account is passed, use the linked one
107
- container_uri = await self.workspace._get_linked_storage_sas_uri(self.id)
108
- container_client = ContainerClient.from_container_url(
109
- container_uri
110
- )
111
- else:
112
- # Use the specified storage account
113
- container_client = ContainerClient.from_connection_string(
114
- self.workspace.storage, self.id
115
- )
116
-
117
- return {"blob_name": blob_name, "container_client": container_client}
118
-
119
- async def add_terms(self, terms: List[Term]):
120
- """Adds a list of terms to the `Problem`
121
- representation and queues them to be uploaded
122
-
123
- :param terms: The list of terms to add to the problem
124
- """
125
- if self.uploaded_uri is not None:
126
- raise Exception("Cannot add terms after problem has been uploaded")
127
-
128
- if terms is not None:
129
- if self.uploader is None:
130
- upload_coords = await self._get_upload_coords()
131
- self.uploader = JsonStreamingProblemUploader(
132
- problem=self,
133
- container=upload_coords["container_client"],
134
- name=upload_coords["blob_name"],
135
- upload_size_threshold=self.upload_size_threshold,
136
- upload_term_threshold=self.upload_terms_threshold,
137
- )
138
-
139
- max_coupling = -sys.float_info.max
140
- min_coupling = sys.float_info.max
141
- for term in terms:
142
- if isinstance(term, Term):
143
- n = len(term.ids)
144
- max_coupling = max(max_coupling, n)
145
- min_coupling = min(min_coupling, n)
146
- self.__n_couplers += n
147
- self.stats["num_terms"] += 1
148
- else:
149
- raise Exception(
150
- "Unsupported statistics in streamingproblem for TermBase subclass {}.".format(type(term))
151
- )
152
-
153
- self.stats["avg_coupling"] = (
154
- self.__n_couplers / self.stats["num_terms"]
155
- )
156
- if self.stats["max_coupling"] < max_coupling:
157
- self.stats["max_coupling"] = max_coupling
158
- if self.stats["min_coupling"] > min_coupling:
159
- self.stats["min_coupling"] = min_coupling
160
-
161
- await self.uploader.upload(terms)
162
-
163
- async def download(self):
164
- """Downloads the uploaded problem as an instance of `Problem`"""
165
- if not self.uploaded_uri:
166
- raise Exception(
167
- "StreamingProblem may not be downloaded before it is uploaded"
168
- )
169
-
170
- coords = await self._get_upload_coords()
171
- blob = coords["container_client"].get_blob_client(coords["blob_name"])
172
- contents = await download_blob(blob.url)
173
- return Problem.deserialize(contents, self.name)
174
-
175
- async def upload(
176
- self,
177
- workspace=None
178
- ):
179
- """Uploads an optimization problem instance
180
- to the cloud storage linked with the Workspace.
181
-
182
- :return: uri of the uploaded problem
183
- """
184
- if self.uploader is None:
185
- raise RuntimeError("You must add terms before uploading")
186
-
187
- if self.uploaded_uri:
188
- raise RuntimeError("Problem has already been uploaded.")
189
-
190
- if workspace and self.workspace != workspace:
191
- raise RuntimeError("Workspace must match workspace provided in constructor.")
192
-
193
- if not self.uploaded_uri:
194
- self.uploader.blob_properties = {
195
- k: str(v) for k, v in {**self.stats, **self.metadata}.items()
196
- }
197
- await self.uploader.finish_upload()
198
- blob = self.uploader.blob
199
- self.uploaded_uri = blob.getUri(self.workspace.storage)
200
- self.uploader = None
201
- self.terms_queue = None
202
-
203
- return self.uploaded_uri
204
-
205
-
206
- class JsonStreamingProblemUploader(SyncJsonStreamingProblemUploader):
207
- """Helper class for uploading json problem files in chunks.
208
-
209
- :param problem: Back-ref to the problem being uploaded
210
- :param container: Reference to the container
211
- client in which to store the problem
212
- :param name: Name of the problem (added to blob metadata)
213
- :param upload_size_threshold: Chunking threshold (in bytes).
214
- Once the internal buffer reaches this size, the chunk will be uploaded.
215
- :param upload_term_threshold: Chunking threshold (in terms).
216
- Once this many terms are ready to be uploaded, the chunk will be uploaded.
217
- :param blob_properties: Properties to set on the blob.
218
- """
219
- def __init__(
220
- self,
221
- problem: StreamingProblem,
222
- container: ContainerClient,
223
- name: str,
224
- upload_size_threshold: int,
225
- upload_term_threshold: int,
226
- blob_properties: Dict[str, str] = None,
227
- ):
228
- self.problem = problem
229
- self.started_upload = False
230
- self.blob = StreamedBlob(
231
- container,
232
- name,
233
- "application/json",
234
- self._get_content_type(),
235
- )
236
- self.compressedStream = io.BytesIO()
237
- self.compressor = (
238
- gzip.GzipFile(mode="wb", fileobj=self.compressedStream)
239
- )
240
- self.uploaded_terms = 0
241
- self.blob_properties = blob_properties
242
- self.__thread = None
243
- self.__queue_wait_timeout = 1
244
- self.__upload_terms_threshold = upload_term_threshold
245
- self.__upload_size_threshold = upload_size_threshold
246
- self.__read_pos = 0
247
-
248
- def is_done(self):
249
- """True if the thread uploader has completed"""
250
- return self.blob.state == StreamedBlobState.committed
251
-
252
- async def _upload_start(self, terms):
253
- self.started_upload = True
254
- await self._upload_chunk(
255
- f'{{"cost_function":{{"version":"{self._get_version()}",'
256
- + f'"type":"{self._scrub(self.problem.problem_type.name)}",'
257
- + self._get_initial_config_string()
258
- + '"terms":['
259
- + self._get_terms_string(terms)
260
- )
261
-
262
- async def upload(self, terms):
263
- if not self.started_upload:
264
- await self._upload_start(terms)
265
- else:
266
- await self._upload_chunk(self._get_terms_string(terms))
267
-
268
- async def _upload_chunk(self, chunk: str, is_final: bool = False):
269
- compressed = self._maybe_compress_bits(chunk.encode(), is_final)
270
- if compressed is None:
271
- return
272
- if len(compressed) > 0:
273
- await self.blob.upload_data(compressed)
274
-
275
- async def finish_upload(self):
276
- if not self.started_upload:
277
- await self._upload_start([])
278
-
279
- await self._upload_chunk(f'{"]}}"}', True)
280
- await self.blob.commit(metadata=self.blob_properties)
@@ -1,390 +0,0 @@
1
- ##
2
- # Copyright (c) Microsoft Corporation. All rights reserved.
3
- # Licensed under the MIT License.
4
- ##
5
- import logging
6
- from typing import Any, Dict
7
- from azure.core import exceptions
8
- from azure.storage.blob.aio import (
9
- BlobServiceClient,
10
- ContainerClient,
11
- BlobClient,
12
- BlobType,
13
- )
14
- from azure.storage.blob import (
15
- BlobSasPermissions,
16
- ContentSettings,
17
- generate_blob_sas,
18
- generate_container_sas,
19
- )
20
- from datetime import datetime, timedelta
21
- from enum import Enum
22
-
23
- logger = logging.getLogger(__name__)
24
-
25
-
26
- async def create_container(
27
- connection_string: str, container_name: str
28
- ) -> ContainerClient:
29
- """
30
- Creates and initialize a container; returns the client needed to access it.
31
- """
32
- blob_service_client = BlobServiceClient.from_connection_string(
33
- connection_string
34
- )
35
- logger.info(
36
- f'{"Initializing storage client for account:"}'
37
- + f"{blob_service_client.account_name}"
38
- )
39
-
40
- container_client = blob_service_client.get_container_client(container_name)
41
- await create_container_using_client(container_client)
42
- return container_client
43
-
44
-
45
- async def create_container_using_client(container_client: ContainerClient):
46
- """
47
- Creates and initializes a container.
48
- """
49
- container_exists = await container_client.exists()
50
- if not container_exists:
51
- logger.debug(
52
- f'{" - uploading to **new** container:"}'
53
- f"{container_client.container_name}"
54
- )
55
- await container_client.create_container()
56
-
57
-
58
- async def get_container_uri(connection_string: str, container_name: str) -> str:
59
- """
60
- Creates and initialize a container;
61
- returns a URI with a SAS read/write token to access it.
62
- """
63
- container = await create_container(connection_string, container_name)
64
- logger.info(
65
- f'{"Creating SAS token for container"}'
66
- + f"'{container_name}' on account: '{container.account_name}'"
67
- )
68
-
69
- sas_token = generate_container_sas(
70
- container.account_name,
71
- container.container_name,
72
- account_key=container.credential.account_key,
73
- permission=BlobSasPermissions(
74
- read=True, add=True, write=True, create=True
75
- ),
76
- expiry=datetime.utcnow() + timedelta(days=14),
77
- )
78
-
79
- uri = container.url + "?" + sas_token
80
- logger.debug(f" - container url: '{uri}'.")
81
- return uri
82
-
83
-
84
- async def upload_blob(
85
- container: ContainerClient,
86
- blob_name: str,
87
- content_type: str,
88
- content_encoding: str,
89
- data: Any,
90
- return_sas_token: bool = True,
91
- ) -> str:
92
- """
93
- Uploads the given data to a blob record.
94
- If a blob with the given name already exist, it throws an error.
95
-
96
- Returns a uri with a SAS token to access the newly created blob.
97
- """
98
- await create_container_using_client(container)
99
- logger.info(
100
- f"Uploading blob '{blob_name}'"
101
- + f"to container '{container.container_name}'"
102
- + f"on account: '{container.account_name}'"
103
- )
104
-
105
- content_settings = ContentSettings(
106
- content_type=content_type, content_encoding=content_encoding
107
- )
108
- blob = container.get_blob_client(blob_name)
109
- await blob.upload_blob(data, content_settings=content_settings)
110
- logger.debug(f" - blob '{blob_name}' uploaded. generating sas token.")
111
-
112
- if return_sas_token:
113
- uri = get_blob_uri_with_sas_token(blob)
114
- else:
115
- uri = remove_sas_token(blob.url)
116
-
117
- logger.debug(f" - blob access url: '{uri}'.")
118
- await blob.close()
119
-
120
- return uri
121
-
122
-
123
- async def append_blob(
124
- container: ContainerClient,
125
- blob_name: str,
126
- content_type: str,
127
- content_encoding: str,
128
- data: Any,
129
- return_sas_token: bool = True,
130
- metadata: Dict[str, str] = None,
131
- ) -> str:
132
- """
133
- Uploads the given data to a blob record.
134
- If a blob with the given name already exist, it throws an error.
135
-
136
- Returns a uri with a SAS token to access the newly created blob.
137
- """
138
- await create_container_using_client(container)
139
- logger.info(
140
- f"Appending data to blob '{blob_name}'"
141
- + f"in container '{container.container_name}'"
142
- + f"on account: '{container.account_name}'"
143
- )
144
-
145
- content_settings = ContentSettings(
146
- content_type=content_type, content_encoding=content_encoding
147
- )
148
- blob = container.get_blob_client(blob_name)
149
- try:
150
- props = await blob.get_blob_properties()
151
- if props.blob_type != BlobType.AppendBlob:
152
- raise Exception("blob must be an append blob")
153
- except exceptions.ResourceNotFoundError:
154
- props = await blob.create_append_blob(
155
- content_settings=content_settings, metadata=metadata
156
- )
157
-
158
- await blob.append_block(data, len(data))
159
- logger.debug(f" - blob '{blob_name}' appended. generating sas token.")
160
-
161
- if return_sas_token:
162
- uri = get_blob_uri_with_sas_token(blob)
163
- else:
164
- uri = remove_sas_token(blob.url)
165
-
166
- logger.debug(f" - blob access url: '{uri}'.")
167
- await blob.close()
168
-
169
- return uri
170
-
171
-
172
- def get_blob_uri_with_sas_token(blob: BlobClient):
173
- """Returns a URI for the given blob that contains a SAS Token"""
174
- sas_token = generate_blob_sas(
175
- blob.account_name,
176
- blob.container_name,
177
- blob.blob_name,
178
- account_key=blob.credential.account_key,
179
- permission=BlobSasPermissions(read=True),
180
- expiry=datetime.utcnow() + timedelta(days=14),
181
- )
182
-
183
- return blob.url + "?" + sas_token
184
-
185
-
186
- async def download_blob(blob_url: str) -> Any:
187
- """
188
- Downloads the given blob from the container.
189
- """
190
- blob_client = BlobClient.from_blob_url(blob_url)
191
- logger.info(
192
- f"Downloading blob '{blob_client.blob_name}'"
193
- + f"from container '{blob_client.container_name}'"
194
- + f"on account: '{blob_client.account_name}'"
195
- )
196
-
197
- response = await (await blob_client.download_blob()).readall()
198
- logger.debug(response)
199
- await blob_client.close()
200
-
201
- return response
202
-
203
-
204
- async def download_blob_properties(blob_url: str) -> Dict[str, str]:
205
- """Downloads the blob properties from Azure for the given blob URI"""
206
- blob_client = BlobClient.from_blob_url(blob_url)
207
- logger.info(
208
- f"Downloading blob properties '{blob_client.blob_name}'"
209
- + f"from container '{blob_client.container_name}'"
210
- + f"on account: '{blob_client.account_name}'"
211
- )
212
-
213
- response = await blob_client.get_blob_properties()
214
- logger.debug(response)
215
-
216
- return response
217
-
218
-
219
- async def download_blob_metadata(blob_url: str) -> Dict[str, str]:
220
- """Downloads the blob metadata from the
221
- blob properties in Azure for the given blob URI"""
222
- return (await download_blob_properties(blob_url)).metadata
223
-
224
-
225
- async def set_blob_metadata(blob_url: str, metadata: Dict[str, str]):
226
- """Sets the provided dictionary as the metadata on the Azure blob"""
227
- blob_client = BlobClient.from_blob_url(blob_url)
228
- logger.info(
229
- f"Setting blob properties '{blob_client.blob_name}'"
230
- + f"from container '{blob_client.container_name}' on account:"
231
- + f"'{blob_client.account_name}'"
232
- )
233
- return await blob_client.set_blob_metadata(metadata=metadata)
234
-
235
-
236
- def remove_sas_token(sas_uri: str) -> str:
237
- """Removes the SAS Token from the given URI if it contains one"""
238
- index = sas_uri.find("?")
239
- if index != -1:
240
- sas_uri = sas_uri[0:index]
241
-
242
- return sas_uri
243
-
244
-
245
- async def init_blob_for_streaming_upload(
246
- container: ContainerClient,
247
- blob_name: str,
248
- content_type: str,
249
- content_encoding: str,
250
- data: Any,
251
- return_sas_token: bool = True,
252
- ) -> str:
253
- """
254
- Uploads the given data to a blob record.
255
- If a blob with the given name already exist, it throws an error.
256
-
257
- Returns a uri with a SAS token to access the newly created blob.
258
- """
259
- await create_container_using_client(container)
260
- logger.info(
261
- f"Streaming blob '{blob_name}'"
262
- + f"to container '{container.container_name}' on account:"
263
- + f"'{container.account_name}'"
264
- )
265
-
266
- content_settings = ContentSettings(
267
- content_type=content_type, content_encoding=content_encoding
268
- )
269
- blob = container.get_blob_client(blob_name)
270
- await blob.stage_block()
271
- await blob.commit_block_list()
272
- await blob.upload_blob(data, content_settings=content_settings)
273
- logger.debug(f" - blob '{blob_name}' uploaded. generating sas token.")
274
-
275
- if return_sas_token:
276
- sas_token = generate_blob_sas(
277
- blob.account_name,
278
- blob.container_name,
279
- blob.blob_name,
280
- account_key=blob.credential.account_key,
281
- permission=BlobSasPermissions(read=True),
282
- expiry=datetime.utcnow() + timedelta(days=14),
283
- )
284
-
285
- uri = blob.url + "?" + sas_token
286
- else:
287
- uri = remove_sas_token(blob.url)
288
-
289
- logger.debug(f" - blob access url: '{uri}'.")
290
-
291
- return uri
292
-
293
-
294
- class StreamedBlobState(str, Enum):
295
- not_initialized = 0
296
- uploading = 1
297
- committed = 2
298
-
299
-
300
- class StreamedBlob:
301
- """Class that provides a state machine for writing
302
- blobs using the Azure Block Blob API
303
-
304
- Internally implements a state machine for uploading blob data.
305
- To use, start calling `upload_data()`
306
- to add data blocks. Each call to `upload_data()`
307
- will synchronously upload an individual block to Azure.
308
- Once all blocks have been added, call `commit()`
309
- to commit the blocks and make the blob available/readable.
310
-
311
- :param container: The container client that the blob will be uploaded to
312
- :param blob_name: The name of the blob
313
- (including optional path) within the blob container
314
- :param content_type: The HTTP content type to apply to the blob metadata
315
- :param content_encoding: The HTTP
316
- content encoding to apply to the blob metadata
317
- """
318
-
319
- def __init__(
320
- self,
321
- container: ContainerClient,
322
- blob_name: str,
323
- content_type: str,
324
- content_encoding: str,
325
- ):
326
- self.container = container
327
- self.blob_name = blob_name
328
- self.content_settings = ContentSettings(
329
- content_type=content_type, content_encoding=content_encoding
330
- )
331
- self.state = StreamedBlobState.not_initialized
332
- self.blob = container.get_blob_client(blob_name)
333
- self.blocks = []
334
-
335
- async def upload_data(self, data):
336
- """Synchronously uploads a block to the given block blob in Azure
337
-
338
- :param data: The data to be uploaded as a block.
339
- :type data: Union[Iterable[AnyStr], IO[AnyStr]]
340
- """
341
- if self.state == StreamedBlobState.not_initialized:
342
- await create_container_using_client(self.container)
343
- logger.info(
344
- f"Streaming blob '{self.blob_name}' to container"
345
- + f"'{self.container.container_name}'"
346
- + f"on account: '{self.container.account_name}'"
347
- )
348
- self.initialized = True
349
-
350
- self.state = StreamedBlobState.uploading
351
- id = self._get_next_block_id()
352
- logger.debug(f"Uploading block '{id}' to {self.blob_name}")
353
- await self.blob.stage_block(id, data, length=len(data))
354
- self.blocks.append(id)
355
-
356
- async def commit(self, metadata: Dict[str, str] = None):
357
- """Synchronously commits all previously
358
- uploaded blobs to the block blob
359
-
360
- :param metadata: Optional dictionary of
361
- metadata to be applied to the block blob
362
- """
363
- if self.state == StreamedBlobState.not_initialized:
364
- raise Exception("StreamedBlob cannot commit before uploading data")
365
- elif self.state == StreamedBlobState.committed:
366
- raise Exception("StreamedBlob is already committed")
367
-
368
- logger.debug(f"Committing {len(self.blocks)} blocks {self.blob_name}")
369
- await self.blob.commit_block_list(
370
- self.blocks,
371
- content_settings=self.content_settings,
372
- metadata=metadata,
373
- )
374
- self.state = StreamedBlobState.committed
375
- await self.container.close()
376
- await self.blob.close()
377
- logger.debug(f"Committed {self.blob_name}")
378
-
379
- def getUri(self, with_sas_token: bool = False):
380
- """Gets the full Azure Storage URI for the
381
- uploaded blob after it has been committed"""
382
- if self.state != StreamedBlobState.committed:
383
- raise Exception("Can only retrieve sas token for committed blob")
384
- if with_sas_token:
385
- return get_blob_uri_with_sas_token(self.blob)
386
-
387
- return remove_sas_token(self.blob.url)
388
-
389
- def _get_next_block_id(self):
390
- return f"{len(self.blocks):10}"
@@ -1,19 +0,0 @@
1
- ##
2
- # Copyright (c) Microsoft Corporation. All rights reserved.
3
- # Licensed under the MIT License.
4
- ##
5
- from azure.quantum.aio.target.target import Target
6
- from azure.quantum.aio.target.solvers import Solver
7
- from .toshiba import (
8
- SimulatedBifurcationMachine
9
- )
10
- from .ionq import IonQ
11
- from .quantinuum import Quantinuum
12
-
13
- # Default targets to use when there is no target class
14
- # associated with a given target ID
15
- DEFAULT_TARGETS = {
16
- "ionq": IonQ,
17
- "quantinuum": Quantinuum,
18
- "toshiba": Solver
19
- }