azure-quantum 1.0.1__py3-none-any.whl → 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,387 +0,0 @@
1
- ##
2
- # Copyright (c) Microsoft Corporation. All rights reserved.
3
- # Licensed under the MIT License.
4
- ##
5
-
6
- import logging
7
- import uuid
8
- import io
9
- import gzip
10
- import json
11
- import threading
12
- import sys
13
-
14
- from typing import List, Union, Dict, Optional
15
- from azure.quantum import Workspace
16
- from azure.quantum.optimization import Term, Problem, ProblemType
17
- from azure.quantum.storage import (
18
- StreamedBlob,
19
- ContainerClient,
20
- BlobClient,
21
- download_blob,
22
- )
23
- from queue import Queue, Empty
24
-
25
- logger = logging.getLogger(__name__)
26
-
27
- __all__ = ["StreamingProblem"]
28
-
29
-
30
- class StreamingProblem(object):
31
- """Problem to be streamed to the service.
32
-
33
- Streaming problems are uploaded on the fly as terms are added,
34
- meaning that the whole problem representation is not kept in memory. This
35
- is very useful when constructing large problems.
36
-
37
- :param workspace: Workspace to upload problem to
38
- :type workspace: Workspace
39
- :param name: Problem name
40
- :type name: str
41
- :param terms: Problem terms, depending on solver. Defaults to None
42
- :type terms: Optional[List[TermBase]], optional
43
- :param init_config: Optional configuration details,
44
- depending on solver. Defaults to None
45
- :type init_config: Optional[Dict[str,int]], optional
46
- :param problem_type: Problem type (ProblemType.pubo or
47
- ProblemType.ising), defaults to ProblemType.ising
48
- :type problem_type: ProblemType, optional
49
- """
50
-
51
- def __init__(
52
- self,
53
- workspace: Workspace,
54
- name: str = "Optimization Problem",
55
- terms: Optional[List[Term]] = None,
56
- init_config: Optional[Dict[str, int]] = None,
57
- problem_type: ProblemType = ProblemType.ising,
58
- metadata: Dict[str, str] = {},
59
- **kw,
60
- ):
61
- super(StreamingProblem, self).__init__(**kw)
62
- self.name = name
63
- self._id = str(uuid.uuid1())
64
- self.workspace = workspace
65
- self.problem_type = problem_type
66
- self.init_config = init_config
67
- self.terms_queue = Queue()
68
- self.uploaded_uri = None
69
- self.upload_to_url = None
70
- self.uploader = None
71
- self.__n_couplers = 0
72
- self.stats = {
73
- "type": problem_type.name,
74
- "max_coupling": 0,
75
- "avg_coupling": 0,
76
- "min_coupling": sys.maxsize,
77
- "num_terms": 0,
78
- }
79
- self.upload_size_threshold = 10e6
80
- self.upload_terms_threshold = 1000
81
- self.metadata = metadata
82
- if terms is not None and len(terms) > 0:
83
- self.add_terms(terms.copy())
84
-
85
- @property
86
- def id(self):
87
- return self._id
88
-
89
- def add_term(self, c: Union[int, float], indices: List[int]):
90
- """Adds a single monomial term to the `Problem`
91
- representation and queues it to be uploaded
92
-
93
- :param c: The cost or weight of this term
94
- :type c: int, float
95
- :param indices: The variable indices that are in this term
96
- :type indices: List[int]
97
- """
98
- self.add_terms([Term(indices=indices, c=c)])
99
-
100
- def _get_upload_coords(self):
101
- blob_name = self.id
102
- if self.upload_to_url:
103
- blob_client = BlobClient.from_blob_url(self.upload_to_url)
104
- container_client = ContainerClient.from_container_url(
105
- self.workspace._get_linked_storage_sas_uri(
106
- blob_client.container_name
107
- )
108
- )
109
- blob_name = blob_client.blob_name
110
- elif not self.workspace.storage:
111
- # No storage account is passed, use the linked one
112
- container_uri = self.workspace._get_linked_storage_sas_uri(self.id)
113
- container_client = ContainerClient.from_container_url(
114
- container_uri
115
- )
116
- else:
117
- # Use the specified storage account
118
- container_client = ContainerClient.from_connection_string(
119
- self.workspace.storage, self.id
120
- )
121
-
122
- return {"blob_name": blob_name, "container_client": container_client}
123
-
124
- def add_terms(
125
- self,
126
- terms: List[Term]
127
- ):
128
- """Adds a list of terms to the `Problem`
129
- representation and queues them to be uploaded. Special terms are not supported.
130
-
131
- :param terms: The list of terms to add to the problem
132
- """
133
- if self.uploaded_uri is not None:
134
- raise Exception("Cannot add terms after problem has been uploaded")
135
-
136
- if terms is not None:
137
- if self.uploader is None:
138
- upload_coords = self._get_upload_coords()
139
- self.uploader = JsonStreamingProblemUploader(
140
- problem=self,
141
- container=upload_coords["container_client"],
142
- name=upload_coords["blob_name"],
143
- upload_size_threshold=self.upload_size_threshold,
144
- upload_term_threshold=self.upload_terms_threshold,
145
- )
146
- self.uploader.start()
147
- elif self.uploader.is_done():
148
- raise Exception(
149
- "Cannot add terms after problem has been uploaded"
150
- )
151
-
152
- max_coupling = -sys.float_info.max
153
- min_coupling = sys.float_info.max
154
- for term in terms:
155
- if isinstance(term, Term):
156
- n = len(term.ids)
157
- max_coupling = max(max_coupling, n)
158
- min_coupling = min(min_coupling, n)
159
- self.__n_couplers += n
160
- self.stats["num_terms"] += 1
161
- else:
162
- raise Exception(
163
- "Unsupported statistics in streamingproblem for TermBase subclass {}.".format(type(term))
164
- )
165
-
166
- self.stats["avg_coupling"] = (
167
- self.__n_couplers / self.stats["num_terms"]
168
- )
169
- if self.stats["max_coupling"] < max_coupling:
170
- self.stats["max_coupling"] = max_coupling
171
- if self.stats["min_coupling"] > min_coupling:
172
- self.stats["min_coupling"] = min_coupling
173
- self.terms_queue.put(terms)
174
-
175
- def download(self):
176
- """Downloads the uploaded problem as an instance of `Problem`"""
177
- if not self.uploaded_uri:
178
- raise Exception(
179
- "StreamingProblem may not be downloaded before it is uploaded"
180
- )
181
-
182
- coords = self._get_upload_coords()
183
- blob = coords["container_client"].get_blob_client(coords["blob_name"])
184
- contents = download_blob(blob.url)
185
- return Problem.deserialize(contents, self.name)
186
-
187
- def upload(
188
- self,
189
- workspace,
190
- container_name: str = "optimization-problems",
191
- blob_name: str = None,
192
- ):
193
- """Uploads an optimization problem instance
194
- to the cloud storage linked with the Workspace.
195
-
196
- :param workspace: interaction terms of the problem.
197
- :return: uri of the uploaded problem
198
- """
199
- if not self.uploaded_uri:
200
- self.uploader.blob_properties = {
201
- k: str(v) for k, v in {**self.stats, **self.metadata}.items()
202
- }
203
- self.terms_queue.put(None)
204
- blob = self.uploader.join()
205
- self.uploaded_uri = blob.getUri(not not self.workspace.storage)
206
- self.uploader = None
207
- self.terms_queue = None
208
-
209
- return self.uploaded_uri
210
-
211
-
212
- class JsonStreamingProblemUploader:
213
- """Helper class for uploading json problem files in chunks.
214
-
215
- :param problem: Back-ref to the problem being uploaded
216
- :param container: Reference to the container
217
- client in which to store the problem
218
- :param name: Name of the problem (added to blob metadata)
219
- :param upload_size_threshold: Chunking threshold (in bytes).
220
- Once the internal buffer reaches this size, the chunk will be uploaded.
221
- :param upload_term_threshold: Chunking threshold (in terms).
222
- Once this many terms are ready to be uploaded, the chunk will be uploaded.
223
- :param blob_properties: Properties to set on the blob.
224
- """
225
-
226
- def __init__(
227
- self,
228
- problem: StreamingProblem,
229
- container: ContainerClient,
230
- name: str,
231
- upload_size_threshold: int,
232
- upload_term_threshold: int,
233
- blob_properties: Dict[str, str] = None,
234
- ):
235
- self.problem = problem
236
- self.started_upload = False
237
- self.blob = StreamedBlob(
238
- container,
239
- name,
240
- "application/json",
241
- self._get_content_type(),
242
- )
243
- self.compressedStream = io.BytesIO()
244
- self.compressor = (
245
- gzip.GzipFile(mode="wb", fileobj=self.compressedStream)
246
- )
247
- self.uploaded_terms = 0
248
- self.blob_properties = blob_properties
249
- self.__thread = None
250
- self.__queue_wait_timeout = 1
251
- self.__upload_terms_threshold = upload_term_threshold
252
- self.__upload_size_threshold = upload_size_threshold
253
- self.__read_pos = 0
254
-
255
- def _get_content_type(self):
256
- return "gzip"
257
-
258
- def start(self):
259
- """Starts the problem uploader in another thread"""
260
- if self.__thread is not None:
261
- raise Exception(
262
- "JsonStreamingProblemUploader thread already started"
263
- )
264
-
265
- self.__thread = threading.Thread(target=self._run_queue)
266
- self.__thread.start()
267
-
268
- def join(self, timeout: float = None) -> StreamedBlob:
269
- """Joins the problem uploader thread -
270
- returning when it completes or when `timeout` is hit
271
-
272
- :param timeout: The the time to wait for the thread to complete.
273
- If omitted, the method will wait until the thread completes
274
- """
275
- if self.__thread is None:
276
- raise Exception("JsonStreamingProblemUploader has not started")
277
-
278
- self.__thread.join(timeout=timeout)
279
- return self.blob
280
-
281
- def is_done(self):
282
- """True if the thread uploader has completed"""
283
- return not self.__thread.is_alive()
284
-
285
- def _run_queue(self):
286
- continue_processing = True
287
- terms = []
288
- while continue_processing:
289
- try:
290
- new_terms = self.problem.terms_queue.get(
291
- block=True, timeout=self.__queue_wait_timeout
292
- )
293
- if new_terms is None:
294
- continue_processing = False
295
- else:
296
- terms = terms + new_terms
297
- if len(terms) < self.__upload_terms_threshold:
298
- continue
299
- except Empty:
300
- pass
301
- except Exception as e:
302
- raise e
303
-
304
- if len(terms) > 0:
305
- self._upload_next(terms)
306
- terms = []
307
-
308
- self._finish_upload()
309
-
310
- def _upload_start(self, terms):
311
- self.started_upload = True
312
- self._upload_chunk(
313
- f'{{"cost_function":{{"version":"{self._get_version()}",'
314
- + f'"type":"{self._scrub(self.problem.problem_type.name)}",'
315
- + self._get_initial_config_string()
316
- + '"terms":['
317
- + self._get_terms_string(terms)
318
- )
319
-
320
- def _get_initial_config_string(self):
321
- if self.problem.init_config:
322
- return (
323
- f'{"initial_configuration":}'
324
- + json.dumps(self.problem.init_config)
325
- + ","
326
- )
327
- return ""
328
-
329
- def _get_version(self):
330
- return "1.1" if self.problem.init_config else "1.0"
331
-
332
- def _get_terms_string(self, terms):
333
- result = ("," if self.uploaded_terms > 0 else "") + ",".join(
334
- [json.dumps(term.to_dict()) for term in terms]
335
- )
336
- self.uploaded_terms += len(terms)
337
- return result
338
-
339
- def _scrub(self, s):
340
- if '"' in s:
341
- raise "string should not contain a literal double quote '\"'"
342
-
343
- return s
344
-
345
- def _upload_next(self, terms):
346
- if not self.started_upload:
347
- self._upload_start(terms)
348
- else:
349
- self._upload_chunk(self._get_terms_string(terms))
350
-
351
- def _maybe_compress_bits(self, chunk: bytes, is_final: bool):
352
- if self.compressor is None:
353
- return chunk
354
-
355
- if self.__read_pos > 0:
356
- self.compressedStream.truncate(0)
357
- self.compressedStream.seek(0)
358
-
359
- self.compressor.write(chunk)
360
- if is_final:
361
- self.compressor.flush()
362
- self.compressor.close()
363
- elif (
364
- self.compressedStream.getbuffer().nbytes
365
- < self.__upload_size_threshold
366
- ):
367
- self.__read_pos = 0
368
- return
369
-
370
- self.compressedStream.seek(0)
371
- compressed = self.compressedStream.read(-1)
372
- self.__read_pos = 0 if compressed is None else len(compressed)
373
- return compressed
374
-
375
- def _upload_chunk(self, chunk: str, is_final: bool = False):
376
- compressed = self._maybe_compress_bits(chunk.encode(), is_final)
377
- if compressed is None:
378
- return
379
- if len(compressed) > 0:
380
- self.blob.upload_data(compressed)
381
-
382
- def _finish_upload(self):
383
- if not self.started_upload:
384
- self._upload_start([])
385
-
386
- self._upload_chunk(f'{"]}}"}', True)
387
- self.blob.commit(metadata=self.blob_properties)
@@ -1,203 +0,0 @@
1
- ##
2
- # Copyright (c) Microsoft Corporation. All rights reserved.
3
- # Licensed under the MIT License.
4
- ##
5
-
6
- from __future__ import annotations
7
- import numpy as np
8
- from typing import List, Dict, Union, Optional
9
- from enum import Enum
10
- from abc import ABC
11
-
12
- __all__ = ["TermBase", "Term"]
13
-
14
- try:
15
- import numpy.typing as npt
16
-
17
- WArray = Union[int, float, npt.ArrayLike]
18
-
19
- def _convert_if_numpy_type(param: WArray):
20
- # Attempt first a conversion to a supported
21
- # type if parameter is a numpy float/int.
22
- numpy_integer_types = [
23
- np.byte,
24
- np.ubyte,
25
- np.short,
26
- np.ushort,
27
- np.intc,
28
- np.uintc,
29
- np.int_,
30
- np.uint,
31
- np.longlong,
32
- np.ulonglong,
33
- np.int8,
34
- np.uint8,
35
- np.int16,
36
- np.uint16,
37
- np.int32,
38
- np.uint32,
39
- np.int64,
40
- np.uint64,
41
- ]
42
-
43
- numpy_float_types = [
44
- np.float16,
45
- np.float32,
46
- np.float64,
47
- np.float_,
48
- np.half,
49
- np.single,
50
- np.double,
51
- ]
52
-
53
- if hasattr(param, "__iter__"):
54
- # Handle scalar-like arrays, if specified.
55
- param = param[0]
56
-
57
- if (
58
- hasattr(param, "dtype")
59
- and param.dtype in numpy_integer_types + numpy_float_types
60
- ):
61
- return param.item()
62
- else:
63
- return param
64
-
65
-
66
- except ImportError:
67
- npt = None
68
- WArray = Union[int, float]
69
-
70
- def _convert_if_numpy_type(param: WArray):
71
- return param
72
-
73
-
74
- class TermBase(ABC):
75
- """
76
- Term base class; this class is not directly initialized
77
- """
78
- def __init__(
79
- self,
80
- c: Optional[WArray] = None,
81
- ):
82
- if c is not None:
83
- # Current intended specification of term.
84
- coeff = c
85
- else:
86
- raise RuntimeError("Cost should be provided for each term.")
87
-
88
- coeff = _convert_if_numpy_type(coeff)
89
- if type(coeff) != int and type(coeff) != float:
90
- raise RuntimeError(
91
- "c must be a float or int value, \
92
- or a NumPy value that can be converted to those."
93
- )
94
- self.c = coeff
95
-
96
- def to_dict(self):
97
- return self.__dict__
98
-
99
- @classmethod
100
- def from_dict(cls, obj):
101
- return cls(c=obj.get("c"))
102
-
103
- def evaluate(self, *args, **kwargs) -> float:
104
- """Given a variable configuration, evaluate the value of the term.
105
- :param configuration:
106
- The dictionary of variable ids to their assigned value
107
- """
108
- return self.c
109
-
110
- def reduce_by_variable_state(self, *args, **kwargs) -> Optional[TermBase]:
111
- """Given some fixed variable states,
112
- transform the existing term into new term.
113
- Returns None if the new term is effectively 0
114
- :param fixed_variables:
115
- The dictionary of variable ids and their fixed state
116
- """
117
- return TermBase(c=self.c) if self.c != 0 else None
118
-
119
- def __repr__(self):
120
- return str(self.__dict__)
121
-
122
- def __eq__(self, other):
123
- if isinstance(other, self.__class__):
124
- return self.__dict__ == other.__dict__
125
- else:
126
- return False
127
-
128
- class Term(TermBase):
129
- """
130
- Class describing a single (monomial) term.
131
- """
132
- def __init__(
133
- self,
134
- indices: List[int] = None,
135
- w: Optional[WArray] = None,
136
- c: Optional[WArray] = None,
137
- ):
138
- if w is not None:
139
- # Legacy support if 'w' is used to specify
140
- # term instead of the expected 'c'.
141
- coeff = w
142
- parameter_name_used = "w"
143
- elif c is not None:
144
- coeff = c
145
- parameter_name_used = "c"
146
- else:
147
- raise RuntimeError("Cost should be provided for each term.")
148
- coeff = _convert_if_numpy_type(coeff)
149
- if type(coeff) != int and type(coeff) != float:
150
- raise RuntimeError(
151
- f"{parameter_name_used} must be a float or int value, \
152
- or a NumPy value that can be converted to those."
153
- )
154
- TermBase.__init__(self, c=coeff)
155
- self.ids = indices
156
-
157
- @classmethod
158
- def from_dict(cls, obj: dict):
159
- return cls(indices=obj.get("ids"), c=obj.get("c"))
160
-
161
- def evaluate(self, configuration: Dict[int, int]) -> float:
162
- """Given a variable configuration, evaluate the value of the term.
163
- :param configuration:
164
- The dictionary of variable ids to their assigned value
165
- """
166
- try:
167
- multiplier = (
168
- np.prod([configuration[i] for i in self.ids])
169
- if len(self.ids) > 0
170
- else 1.0
171
- )
172
- except KeyError:
173
- print(
174
- "Error - variable id found in term {0}, \
175
- but not found in the supplied configuration.".format(
176
- self.ids
177
- )
178
- )
179
- raise
180
-
181
- return multiplier * self.c
182
-
183
- def reduce_by_variable_state(
184
- self, fixed_variables: Dict[int, int]
185
- ) -> Optional[Term]:
186
- """Given some fixed variable states,
187
- transform the existing term into new term.
188
- Returns None if the new term is effectively 0
189
- :param fixed_variables:
190
- The dictionary of variable ids and their fixed state
191
- """
192
- new_ids = []
193
- new_c = self.c
194
-
195
- for i in self.ids:
196
- if i not in fixed_variables:
197
- new_ids.append(i)
198
- else:
199
- new_c *= fixed_variables[i]
200
- if new_c == 0:
201
- return None
202
-
203
- return Term(indices=new_ids, c=new_c)
@@ -1,10 +0,0 @@
1
- # coding=utf-8
2
- ##
3
- # Copyright (c) Microsoft Corporation. All rights reserved.
4
- # Licensed under the MIT License.
5
- ##
6
- import warnings
7
- warnings.warn("The azure.quantum.optimization.toshiba namespace will be deprecated. \
8
- Please use azure.quantum.target.toshiba instead.")
9
-
10
- from .solvers import *
@@ -1,12 +0,0 @@
1
- # coding=utf-8
2
- ##
3
- # Copyright (c) Microsoft Corporation. All rights reserved.
4
- # Licensed under the MIT License.
5
- ##
6
- import warnings
7
- warnings.warn("The azure.quantum.optimization.toshiba.solvers namespace will be deprecated. \
8
- Please use azure.quantum.target.toshiba instead.")
9
-
10
- from azure.quantum.target.toshiba.solvers import (
11
- SimulatedBifurcationMachine
12
- )