qadence 1.5.0__py3-none-any.whl → 1.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
qadence/backend.py CHANGED
@@ -26,7 +26,6 @@ from qadence.mitigations import Mitigations
26
26
  from qadence.noise import Noise
27
27
  from qadence.parameters import stringify
28
28
  from qadence.types import ArrayLike, BackendName, DiffMode, Endianness, Engine, ParamDictType
29
- from qadence.utils import validate_values_and_state
30
29
 
31
30
  logger = get_logger(__file__)
32
31
 
@@ -259,29 +258,6 @@ class Backend(ABC):
259
258
  """
260
259
  raise NotImplementedError
261
260
 
262
- @abstractmethod
263
- def _run(
264
- self,
265
- circuit: ConvertedCircuit,
266
- param_values: dict[str, ArrayLike] = {},
267
- state: ArrayLike | None = None,
268
- endianness: Endianness = Endianness.BIG,
269
- ) -> ArrayLike:
270
- """Run a circuit and return the resulting wave function.
271
-
272
- Arguments:
273
- circuit: A converted circuit as returned by `backend.circuit`.
274
- param_values: _**Already embedded**_ parameters of the circuit. See
275
- [`embedding`][qadence.blocks.embedding.embedding] for more info.
276
- state: Initial state.
277
- endianness: Endianness of the resulting wavefunction.
278
-
279
- Returns:
280
- A list of Counter objects where each key represents a bitstring
281
- and its value the number of times it has been sampled from the given wave function.
282
- """
283
- raise NotImplementedError
284
-
285
261
  def run(
286
262
  self,
287
263
  circuit: ConvertedCircuit,
@@ -304,8 +280,7 @@ class Backend(ABC):
304
280
  A list of Counter objects where each key represents a bitstring
305
281
  and its value the number of times it has been sampled from the given wave function.
306
282
  """
307
- validate_values_and_state(state, circuit.abstract.n_qubits, param_values)
308
- return self._run(circuit, param_values, state, endianness, *args, **kwargs)
283
+ raise NotImplementedError
309
284
 
310
285
  @abstractmethod
311
286
  def run_dm(
@@ -88,7 +88,7 @@ class Backend(BackendInterface):
88
88
  ).squeeze(0)
89
89
  return ConvertedObservable(native=native, abstract=obs, original=obs)
90
90
 
91
- def _run(
91
+ def run(
92
92
  self,
93
93
  circuit: ConvertedCircuit,
94
94
  param_values: dict[str, Tensor] = {},
@@ -66,7 +66,7 @@ class Backend(BackendInterface):
66
66
  hq_obs = convert_observable(block, n_qubits=n_qubits, config=self.config)
67
67
  return ConvertedObservable(native=hq_obs, abstract=block, original=observable)
68
68
 
69
- def _run(
69
+ def run(
70
70
  self,
71
71
  circuit: ConvertedCircuit,
72
72
  param_values: ParamDictType = {},
@@ -200,7 +200,7 @@ class Backend(BackendInterface):
200
200
 
201
201
  return circuit.native.build(**numpy_param_values)
202
202
 
203
- def _run(
203
+ def run(
204
204
  self,
205
205
  circuit: ConvertedCircuit,
206
206
  param_values: dict[str, Tensor] = {},
@@ -80,7 +80,7 @@ class Backend(BackendInterface):
80
80
  (native,) = convert_observable(block, n_qubits=n_qubits, config=self.config)
81
81
  return ConvertedObservable(native=native, abstract=block, original=observable)
82
82
 
83
- def _run(
83
+ def run(
84
84
  self,
85
85
  circuit: ConvertedCircuit,
86
86
  param_values: dict[str, Tensor] = {},
@@ -289,11 +289,23 @@ class TransformedModule(torch.nn.Module):
289
289
  def to(self, *args: Any, **kwargs: Any) -> TransformedModule:
290
290
  try:
291
291
  self.model = self.model.to(*args, **kwargs)
292
- self._input_scaling = self._input_scaling.to(*args, **kwargs)
293
- self._input_shifting = self._input_shifting.to(*args, **kwargs)
294
- self._output_scaling = self._output_scaling.to(*args, **kwargs)
295
- self._output_shifting = self._output_shifting.to(*args, **kwargs)
296
-
292
+ if isinstance(self.model, QuantumModel):
293
+ device = self.model._circuit.native.device
294
+ dtype = (
295
+ torch.float64
296
+ if self.model._circuit.native.dtype == torch.cdouble
297
+ else torch.float32
298
+ )
299
+
300
+ self._input_scaling = self._input_scaling.to(device=device, dtype=dtype)
301
+ self._input_shifting = self._input_shifting.to(device=device, dtype=dtype)
302
+ self._output_scaling = self._output_scaling.to(device=device, dtype=dtype)
303
+ self._output_shifting = self._output_shifting.to(device=device, dtype=dtype)
304
+ elif isinstance(self.model, torch.nn.Module):
305
+ self._input_scaling = self._input_scaling.to(*args, **kwargs)
306
+ self._input_shifting = self._input_shifting.to(*args, **kwargs)
307
+ self._output_scaling = self._output_scaling.to(*args, **kwargs)
308
+ self._output_shifting = self._output_shifting.to(*args, **kwargs)
297
309
  logger.debug(f"Moved {self} to {args}, {kwargs}.")
298
310
  except Exception as e:
299
311
  logger.warning(f"Unable to move {self} to {args}, {kwargs} due to {e}.")
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  from typing import Callable, Union
4
4
 
5
5
  from rich.progress import BarColumn, Progress, TaskProgressColumn, TextColumn, TimeRemainingColumn
6
+ from torch import complex128, float32, float64
6
7
  from torch import device as torch_device
7
8
  from torch import dtype as torch_dtype
8
9
  from torch.nn import DataParallel, Module
@@ -110,17 +111,17 @@ def train(
110
111
  train_with_grad(model, data, optimizer, config, loss_fn=loss_fn)
111
112
  ```
112
113
  """
114
+ # load available checkpoint
115
+ init_iter = 0
116
+ if config.folder:
117
+ model, optimizer, init_iter = load_checkpoint(config.folder, model, optimizer)
118
+ logger.debug(f"Loaded model and optimizer from {config.folder}")
113
119
 
114
120
  # Move model to device before optimizer is loaded
115
121
  if isinstance(model, DataParallel):
116
122
  model = model.module.to(device=device, dtype=dtype)
117
123
  else:
118
124
  model = model.to(device=device, dtype=dtype)
119
- # load available checkpoint
120
- init_iter = 0
121
- if config.folder:
122
- model, optimizer, init_iter = load_checkpoint(config.folder, model, optimizer)
123
- logger.debug(f"Loaded model and optimizer from {config.folder}")
124
125
  # initialize tensorboard
125
126
  writer = SummaryWriter(config.folder, purge_step=init_iter)
126
127
 
@@ -131,7 +132,9 @@ def train(
131
132
  TaskProgressColumn(),
132
133
  TimeRemainingColumn(elapsed_when_finished=True),
133
134
  )
134
-
135
+ data_dtype = None
136
+ if dtype:
137
+ data_dtype = float64 if dtype == complex128 else float32
135
138
  with progress:
136
139
  dl_iter = iter(dataloader) if dataloader is not None else None
137
140
 
@@ -143,7 +146,12 @@ def train(
143
146
  # which do not have classical input data (e.g. chemistry)
144
147
  if dataloader is None:
145
148
  loss, metrics = optimize_step(
146
- model=model, optimizer=optimizer, loss_fn=loss_fn, xs=None, device=device
149
+ model=model,
150
+ optimizer=optimizer,
151
+ loss_fn=loss_fn,
152
+ xs=None,
153
+ device=device,
154
+ dtype=data_dtype,
147
155
  )
148
156
  loss = loss.item()
149
157
 
@@ -154,6 +162,7 @@ def train(
154
162
  loss_fn=loss_fn,
155
163
  xs=next(dl_iter), # type: ignore[arg-type]
156
164
  device=device,
165
+ dtype=data_dtype,
157
166
  )
158
167
 
159
168
  else:
@@ -342,9 +342,10 @@ class QuantumModel(nn.Module):
342
342
  return self.backend.assign_parameters(self._circuit, params)
343
343
 
344
344
  def to(self, *args: Any, **kwargs: Any) -> QuantumModel:
345
+ from pyqtorch import QuantumCircuit as PyQCircuit
346
+
345
347
  try:
346
- if isinstance(self._circuit.native, torch.nn.Module):
347
- # Backends which are not torch-based cannot be moved to 'device'
348
+ if isinstance(self._circuit.native, PyQCircuit):
348
349
  self._circuit.native = self._circuit.native.to(*args, **kwargs)
349
350
  if self._observable is not None:
350
351
  if isinstance(self._observable, ConvertedObservable):
@@ -359,6 +360,8 @@ class QuantumModel(nn.Module):
359
360
  else torch.float32,
360
361
  )
361
362
  logger.debug(f"Moved {self} to {args}, {kwargs}.")
363
+ else:
364
+ logger.debug("QuantumModel.to only supports pyqtorch.QuantumCircuits.")
362
365
  except Exception as e:
363
366
  logger.warning(f"Unable to move {self} to {args}, {kwargs} due to {e}.")
364
367
  return self
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: qadence
3
- Version: 1.5.0
3
+ Version: 1.5.1
4
4
  Summary: Pasqal interface for circuit-based quantum computing SDKs
5
5
  Author-email: Aleksander Wennersteen <aleksander.wennersteen@pasqal.com>, Gert-Jan Both <gert-jan.both@pasqal.com>, Niklas Heim <niklas.heim@pasqal.com>, Mario Dagrada <mario.dagrada@pasqal.com>, Vincent Elfving <vincent.elfving@pasqal.com>, Dominik Seitz <dominik.seitz@pasqal.com>, Roland Guichard <roland.guichard@pasqal.com>, "Joao P. Moutinho" <joao.moutinho@pasqal.com>, Vytautas Abramavicius <vytautas.abramavicius@pasqal.com>, Gergana Velikova <gergana.velikova@pasqal.com>
6
6
  License: Apache 2.0
@@ -1,5 +1,5 @@
1
1
  qadence/__init__.py,sha256=-UKQQ_dYiaa7viishl2baAbxS82eS6dAoCnq_CLSmao,1708
2
- qadence/backend.py,sha256=5sfXUTRts_13v6lajq1Wvy3u_eOCOeIYnTiaZV_NGFQ,15493
2
+ qadence/backend.py,sha256=qxTCLfSqjtFAxlu3QgNj_npx_xbY9P2f3SP0mFkL-e8,14410
3
3
  qadence/circuit.py,sha256=EGBPRRWlK-mcXaaAhJnp-hxVWQ8NxngGKbvhPqrEeKM,6892
4
4
  qadence/decompose.py,sha256=_L0hI3SbYErXEDp-aXFeNk0JR9ffJ_JD_EnRJbJKT20,5230
5
5
  qadence/divergences.py,sha256=JhpELhWSnuDvQxa9hJp_DE3EQg2Ban-Ta0mHZ_fVrHg,1832
@@ -31,15 +31,15 @@ qadence/backends/gpsr.py,sha256=227h5KPI_KStrwfP5zuwkzOqviRZmqa7ijIIhhawwPM,4341
31
31
  qadence/backends/jax_utils.py,sha256=VfKhqCKknHDWZO21UFipWH_Lkiq175Z5GkP49gWjbyw,5038
32
32
  qadence/backends/utils.py,sha256=hnV9AXztMvAPcO8mv9UhdGMbS9albiMQBxlYPgLrD68,6490
33
33
  qadence/backends/braket/__init__.py,sha256=eruyDZKMqkh1LE7eJ980vcrLJbia35uUX6krAP78clI,121
34
- qadence/backends/braket/backend.py,sha256=PLW-NCNp_RqAQy149ASEOOvwPrLsFGjCKWuEmTnS5Qg,8730
34
+ qadence/backends/braket/backend.py,sha256=XRrrkdylsH8GejbtY8fSJMmX2X7xWmZmEZPxcqWWM5E,8729
35
35
  qadence/backends/braket/config.py,sha256=b9aIdma0DRwC_3A6xUSLdXMCZe6z6kDcAgkp6MxcXIk,603
36
36
  qadence/backends/braket/convert_ops.py,sha256=DVXV7sT9sX_yGOgPKclD9KIGgmbBRuDy_e39i1Z8I1s,3417
37
37
  qadence/backends/horqrux/__init__.py,sha256=0OdVy6cq0oQggV48LO1WXdaZuSkDkz7OYNEPIkNAmfk,140
38
- qadence/backends/horqrux/backend.py,sha256=qdThkUXbhDrsD1lAio2SFsxdYATxLVXoNsuXNcD3ZBU,9330
38
+ qadence/backends/horqrux/backend.py,sha256=ZOkkklcqqM0T5CTwfSpNAAcW_a0l922h48gj6kPNw4I,9329
39
39
  qadence/backends/horqrux/config.py,sha256=fPWFag1hmRhqj0T-fJOx5x8_C5UEZUXpdUnpOgX0Jpc,901
40
40
  qadence/backends/horqrux/convert_ops.py,sha256=nzfYF0yjB7zwaHCEXWZUUYDfz38Yi22xF2zDRFaOwR0,8564
41
41
  qadence/backends/pulser/__init__.py,sha256=capQ-eHqwtOeLf4mWsI0BIseAHhiLGie5cFD4-iVhUo,116
42
- qadence/backends/pulser/backend.py,sha256=3p9cB5QN60nzqhWvdLZol0t66PugaWqvWHdDEXgGRwM,13868
42
+ qadence/backends/pulser/backend.py,sha256=ZxGg9zLyGTg3gJAZXTL7b96PHvhmN5D4yOAAdnVgLu4,13867
43
43
  qadence/backends/pulser/channels.py,sha256=ZF0yEXUFHAmi3IdeXjzdTNGR5NzaRRFTiUpUGVg2sO4,329
44
44
  qadence/backends/pulser/cloud.py,sha256=0uUluvbFV9sOuCPraE-9uiVtC3Q8QaDY1IJMDi8grDM,2057
45
45
  qadence/backends/pulser/config.py,sha256=1qu_GhGTGcCpFoKctGt_IhKOKWiMcJIL2vHTFJg9I3E,3122
@@ -48,7 +48,7 @@ qadence/backends/pulser/devices.py,sha256=DermLZNfmCB3SqteKVW4uhg4jp6ya1G6ptnXbB
48
48
  qadence/backends/pulser/pulses.py,sha256=DopdEZ8eeWK7wZxqJTBhqY0w5bEXu6fVK7rnZOb50ns,11893
49
49
  qadence/backends/pulser/waveforms.py,sha256=0uz95b7rUaUUtN0tuHBZmJ0H6UBmfHST_59ozwsRCzg,2227
50
50
  qadence/backends/pyqtorch/__init__.py,sha256=0OdVy6cq0oQggV48LO1WXdaZuSkDkz7OYNEPIkNAmfk,140
51
- qadence/backends/pyqtorch/backend.py,sha256=hNo9nKxfJ3rFDEPjZOhfmbKUUVsNzTl0aRh9sgeKlqc,9763
51
+ qadence/backends/pyqtorch/backend.py,sha256=eaC-yV-Ckgq6YCq1UrrOh6Ug_vFHmUR43RQBiXfcv1Q,9762
52
52
  qadence/backends/pyqtorch/config.py,sha256=f5BjWehCqm9do2OahNWrv2w55y3orkw0Wj2f6flwRaU,1907
53
53
  qadence/backends/pyqtorch/convert_ops.py,sha256=By_p1-Oem8MhHYP8jx5qdut9lhDWN0xc4B9YaP0MSxA,17512
54
54
  qadence/blocks/__init__.py,sha256=H6jEA_CptkE-eoB4UfSbUiDszbxxhZwECV_TgoZWXoU,960
@@ -105,18 +105,18 @@ qadence/mitigations/readout.py,sha256=HPfYmdjRlieUdOBMZTghFK4DRWfveM4KkDkEI0bMI0
105
105
  qadence/ml_tools/__init__.py,sha256=_H5A_BWZRZVGoJszb9s8XRJnLnJxUNfYjuT9HT2yASo,786
106
106
  qadence/ml_tools/config.py,sha256=X8dHyjq4D9-ITjs7UQo0vjJTcHkpbZC0gChH5eEN2G8,2356
107
107
  qadence/ml_tools/data.py,sha256=8ZUFjhQSp94w7icX7RzM2J39Yo7P_T-AgjcThBc8miI,4283
108
- qadence/ml_tools/models.py,sha256=biUWnqXKmGMJfT-QObJ-cQ6770YIfOTz-o28EKPkM3Q,11736
108
+ qadence/ml_tools/models.py,sha256=-9XOmMRXQDI5fAjlrqlSGI7vCV3DKJVmRdngu98QroM,12476
109
109
  qadence/ml_tools/optimize_step.py,sha256=ATXWmAqybJVK3QmAaDqVXB5mxjTo2MIi_e0a5WSPFpc,1800
110
110
  qadence/ml_tools/parameters.py,sha256=gew2Kq_5-RgRpaTvs8eauVhgo0sTqqDQEV6WHFEiLGM,1301
111
111
  qadence/ml_tools/printing.py,sha256=kwwD9yLVqezaqWX5OAsXr8GLdJUnGrY-t5SnoKHtl9g,707
112
112
  qadence/ml_tools/saveload.py,sha256=Xi3o2bMsYueFPxrU6AXgDB0MHSev8gKLVhdqecPDBt8,4663
113
113
  qadence/ml_tools/tensors.py,sha256=xZ9ZRzOqEaMgLUGWQf1najDmL6iLuN1ojCGVFs1Tm94,1337
114
- qadence/ml_tools/train_grad.py,sha256=b_FxOkK3QoLAOwSowjkkMIjlBEDjoLcjeo3Vk_RHhkc,7399
114
+ qadence/ml_tools/train_grad.py,sha256=zNzkgK73OtIllc8JLTqaM8P9m233BGa116HelsQBQqU,7727
115
115
  qadence/ml_tools/train_no_grad.py,sha256=erwus-pUOg8q6WgoQsDW6MeH80wlRPBh69W1ZMHKoL8,4714
116
116
  qadence/ml_tools/utils.py,sha256=_GZSN5Flk1nRFutkXih397Q3cWKdX0UP8c9CRXpUL7c,1654
117
117
  qadence/models/__init__.py,sha256=0nZzAC2TGr8Yuf40-R7m2cSsr_BlNq_GsMOwaOYZLqM,193
118
118
  qadence/models/qnn.py,sha256=gc_iC1GG6WJbeLaln9jy4yYp9fY0p8fkpKkKJpXJ3ck,10397
119
- qadence/models/quantum_model.py,sha256=ucExUBPBYWrC7mLF4SQbvHqjdXmkEi7-wIdXqpfaGew,14107
119
+ qadence/models/quantum_model.py,sha256=SetO2TPd9pe2QcNCcfdHKtGM1Rj-bhCTOsaExq7smnY,14186
120
120
  qadence/noise/__init__.py,sha256=r0nR8uEZeB1M9pI2UisjWq0bjw50fPFfVGzIMev923g,147
121
121
  qadence/noise/protocols.py,sha256=-aZ06JvMnpxCeT5v5lI_RNPOLbb9Ju1Pi1AB6uAXxVE,1653
122
122
  qadence/noise/readout.py,sha256=BqBIZbPXWqZaKi6EpBSpXXQ9NhQXdQ-YL6ZmwbSjgfE,6736
@@ -134,7 +134,7 @@ qadence/transpile/digitalize.py,sha256=iWRwYAYQsD2INHj0HNbGJriv_3fRCuBW1nDBrwtKS
134
134
  qadence/transpile/flatten.py,sha256=EdhSG5WyF56nbnxINNLqrHgY84MRM1YFjT3fR4aph5Q,3427
135
135
  qadence/transpile/invert.py,sha256=KAefHTG2AWr39aengVhXrzCtJPhrZC-ZnL6vYvmbnY0,4867
136
136
  qadence/transpile/transpile.py,sha256=6MRRkk1OS279L1fwUQjazA6qlfpbd-T_EJMKT8hAhOU,2721
137
- qadence-1.5.0.dist-info/METADATA,sha256=NgAwZRUkuLfgnTuo46gGAW1Pj8EmPCAvvPr3FbPi1Ww,8997
138
- qadence-1.5.0.dist-info/WHEEL,sha256=uNdcs2TADwSd5pVaP0Z_kcjcvvTUklh2S7bxZMF8Uj0,87
139
- qadence-1.5.0.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
140
- qadence-1.5.0.dist-info/RECORD,,
137
+ qadence-1.5.1.dist-info/METADATA,sha256=yM1L8J2RKy1j3WW8YnA7wZ2k8U3sIiN7wedQq__IIKM,8997
138
+ qadence-1.5.1.dist-info/WHEEL,sha256=as-1oFTWSeWBgyzh0O_qF439xqBe6AbBgt4MfYe5zwY,87
139
+ qadence-1.5.1.dist-info/licenses/LICENSE,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
140
+ qadence-1.5.1.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.22.4
2
+ Generator: hatchling 1.22.5
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any