tequila-basic 1.9.8__py3-none-any.whl → 1.9.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tequila/__init__.py +29 -14
- tequila/apps/__init__.py +14 -5
- tequila/apps/_unary_state_prep_impl.py +145 -112
- tequila/apps/adapt/__init__.py +9 -1
- tequila/apps/adapt/adapt.py +154 -113
- tequila/apps/krylov/__init__.py +1 -1
- tequila/apps/krylov/krylov.py +23 -21
- tequila/apps/robustness/helpers.py +10 -6
- tequila/apps/robustness/interval.py +238 -156
- tequila/apps/unary_state_prep.py +29 -23
- tequila/autograd_imports.py +8 -5
- tequila/circuit/__init__.py +2 -1
- tequila/circuit/_gates_impl.py +135 -67
- tequila/circuit/circuit.py +177 -88
- tequila/circuit/compiler.py +114 -105
- tequila/circuit/gates.py +288 -120
- tequila/circuit/gradient.py +35 -23
- tequila/circuit/noise.py +83 -74
- tequila/circuit/postselection.py +120 -0
- tequila/circuit/pyzx.py +10 -6
- tequila/circuit/qasm.py +201 -83
- tequila/circuit/qpic.py +63 -61
- tequila/grouping/binary_rep.py +148 -146
- tequila/grouping/binary_utils.py +84 -75
- tequila/grouping/compile_groups.py +334 -230
- tequila/grouping/ev_utils.py +77 -41
- tequila/grouping/fermionic_functions.py +383 -308
- tequila/grouping/fermionic_methods.py +170 -123
- tequila/grouping/overlapping_methods.py +69 -52
- tequila/hamiltonian/paulis.py +12 -13
- tequila/hamiltonian/paulistring.py +1 -1
- tequila/hamiltonian/qubit_hamiltonian.py +45 -35
- tequila/ml/__init__.py +1 -0
- tequila/ml/interface_torch.py +19 -16
- tequila/ml/ml_api.py +11 -10
- tequila/ml/utils_ml.py +12 -11
- tequila/objective/__init__.py +8 -3
- tequila/objective/braket.py +55 -47
- tequila/objective/objective.py +91 -56
- tequila/objective/qtensor.py +36 -27
- tequila/optimizers/__init__.py +31 -23
- tequila/optimizers/_containers.py +11 -7
- tequila/optimizers/optimizer_base.py +111 -83
- tequila/optimizers/optimizer_gd.py +258 -231
- tequila/optimizers/optimizer_gpyopt.py +56 -42
- tequila/optimizers/optimizer_scipy.py +157 -112
- tequila/quantumchemistry/__init__.py +66 -38
- tequila/quantumchemistry/chemistry_tools.py +394 -203
- tequila/quantumchemistry/encodings.py +121 -13
- tequila/quantumchemistry/madness_interface.py +170 -96
- tequila/quantumchemistry/orbital_optimizer.py +86 -40
- tequila/quantumchemistry/psi4_interface.py +166 -97
- tequila/quantumchemistry/pyscf_interface.py +70 -23
- tequila/quantumchemistry/qc_base.py +866 -414
- tequila/simulators/__init__.py +0 -3
- tequila/simulators/simulator_api.py +258 -106
- tequila/simulators/simulator_aqt.py +102 -0
- tequila/simulators/simulator_base.py +156 -55
- tequila/simulators/simulator_cirq.py +58 -42
- tequila/simulators/simulator_cudaq.py +600 -0
- tequila/simulators/simulator_ddsim.py +390 -0
- tequila/simulators/simulator_mqp.py +30 -0
- tequila/simulators/simulator_pyquil.py +190 -171
- tequila/simulators/simulator_qibo.py +95 -87
- tequila/simulators/simulator_qiskit.py +124 -114
- tequila/simulators/simulator_qlm.py +52 -26
- tequila/simulators/simulator_qulacs.py +85 -59
- tequila/simulators/simulator_spex.py +464 -0
- tequila/simulators/simulator_symbolic.py +6 -5
- tequila/simulators/test_spex_simulator.py +208 -0
- tequila/tools/convenience.py +4 -4
- tequila/tools/qng.py +72 -64
- tequila/tools/random_generators.py +38 -34
- tequila/utils/bitstrings.py +13 -7
- tequila/utils/exceptions.py +19 -5
- tequila/utils/joined_transformation.py +8 -10
- tequila/utils/keymap.py +0 -5
- tequila/utils/misc.py +6 -4
- tequila/version.py +1 -1
- tequila/wavefunction/qubit_wavefunction.py +52 -30
- {tequila_basic-1.9.8.dist-info → tequila_basic-1.9.10.dist-info}/METADATA +23 -17
- tequila_basic-1.9.10.dist-info/RECORD +93 -0
- {tequila_basic-1.9.8.dist-info → tequila_basic-1.9.10.dist-info}/WHEEL +1 -1
- tequila_basic-1.9.8.dist-info/RECORD +0 -86
- {tequila_basic-1.9.8.dist-info → tequila_basic-1.9.10.dist-info/licenses}/LICENSE +0 -0
- {tequila_basic-1.9.8.dist-info → tequila_basic-1.9.10.dist-info}/top_level.txt +0 -0
@@ -65,12 +65,11 @@ class OptimizerGPyOpt(Optimizer):
|
|
65
65
|
|
66
66
|
@classmethod
|
67
67
|
def available_methods(cls):
|
68
|
-
return [
|
69
|
-
|
70
|
-
def __init__(self, maxiter=100, backend=None,
|
71
|
-
samples=None, noise=None, device=None,
|
72
|
-
save_history=True, silent=False):
|
68
|
+
return ["gpyopt-lbfgs", "gpyopt-direct", "gpyopt-cma"]
|
73
69
|
|
70
|
+
def __init__(
|
71
|
+
self, maxiter=100, backend=None, samples=None, noise=None, device=None, save_history=True, silent=False
|
72
|
+
):
|
74
73
|
"""
|
75
74
|
|
76
75
|
Parameters
|
@@ -94,8 +93,15 @@ class OptimizerGPyOpt(Optimizer):
|
|
94
93
|
silent: bool: Default = False:
|
95
94
|
suppresses printouts if true.
|
96
95
|
"""
|
97
|
-
super().__init__(
|
98
|
-
|
96
|
+
super().__init__(
|
97
|
+
backend=backend,
|
98
|
+
maxiter=maxiter,
|
99
|
+
samples=samples,
|
100
|
+
save_history=save_history,
|
101
|
+
device=device,
|
102
|
+
noise=noise,
|
103
|
+
silent=silent,
|
104
|
+
)
|
99
105
|
|
100
106
|
def get_domain(self, objective: Objective, passive_angles: dict = None) -> typing.List[typing.Dict]:
|
101
107
|
"""
|
@@ -125,7 +131,7 @@ class OptimizerGPyOpt(Optimizer):
|
|
125
131
|
for i, thing in enumerate(op):
|
126
132
|
if thing in passive_angles.keys():
|
127
133
|
op.remove(thing)
|
128
|
-
return [{
|
134
|
+
return [{"name": v, "type": "continuous", "domain": (0, 2 * np.pi)} for v in op]
|
129
135
|
|
130
136
|
def get_object(self, func, domain, method) -> GPyOpt.methods.BayesianOptimization:
|
131
137
|
"""
|
@@ -160,10 +166,12 @@ class OptimizerGPyOpt(Optimizer):
|
|
160
166
|
-------
|
161
167
|
callable.
|
162
168
|
"""
|
163
|
-
return lambda arr: objective(
|
164
|
-
|
165
|
-
|
166
|
-
|
169
|
+
return lambda arr: objective(
|
170
|
+
backend=self.backend,
|
171
|
+
variables=array_to_objective_dict(objective, arr, passive_angles),
|
172
|
+
samples=self.samples,
|
173
|
+
noise=self.noise,
|
174
|
+
)
|
167
175
|
|
168
176
|
def redictify(self, arr, objective, passive_angles=None) -> typing.Dict:
|
169
177
|
"""
|
@@ -192,11 +200,15 @@ class OptimizerGPyOpt(Optimizer):
|
|
192
200
|
back[k] = v
|
193
201
|
return back
|
194
202
|
|
195
|
-
def __call__(
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
203
|
+
def __call__(
|
204
|
+
self,
|
205
|
+
objective: Objective,
|
206
|
+
initial_values: typing.Dict[Variable, numbers.Real] = None,
|
207
|
+
variables: typing.List[typing.Hashable] = None,
|
208
|
+
method: str = "lbfgs",
|
209
|
+
*args,
|
210
|
+
**kwargs,
|
211
|
+
) -> GPyOptResults:
|
200
212
|
"""
|
201
213
|
perform optimization of an objective via GPyOpt.
|
202
214
|
|
@@ -235,7 +247,7 @@ class OptimizerGPyOpt(Optimizer):
|
|
235
247
|
f = self.construct_function(O, passive_angles)
|
236
248
|
opt = self.get_object(f, dom, method)
|
237
249
|
|
238
|
-
method_options = {"max_iter": self.maxiter, "verbosity": not self.silent, "eps": 1.
|
250
|
+
method_options = {"max_iter": self.maxiter, "verbosity": not self.silent, "eps": 1.0e-4}
|
239
251
|
|
240
252
|
if "method_options" in kwargs:
|
241
253
|
tmp = {**method_options, **kwargs["method_options"]}
|
@@ -244,23 +256,28 @@ class OptimizerGPyOpt(Optimizer):
|
|
244
256
|
if self.save_history:
|
245
257
|
self.history.energies = opt.get_evaluations()[1].flatten()
|
246
258
|
self.history.angles = [self.redictify(v, objective, passive_angles) for v in opt.get_evaluations()[0]]
|
247
|
-
return GPyOptResults(
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
259
|
+
return GPyOptResults(
|
260
|
+
energy=opt.fx_opt,
|
261
|
+
variables=self.redictify(opt.x_opt, objective, passive_angles),
|
262
|
+
history=self.history,
|
263
|
+
gpyopt_instance=opt,
|
264
|
+
)
|
265
|
+
|
266
|
+
|
267
|
+
def minimize(
|
268
|
+
objective: Objective,
|
269
|
+
maxiter: int,
|
270
|
+
variables: typing.List = None,
|
271
|
+
initial_values: typing.Dict = None,
|
272
|
+
samples: int = None,
|
273
|
+
backend: str = None,
|
274
|
+
noise=None,
|
275
|
+
device: str = None,
|
276
|
+
method: str = "lbfgs",
|
277
|
+
silent: bool = False,
|
278
|
+
*args,
|
279
|
+
**kwargs,
|
280
|
+
) -> GPyOptResults:
|
264
281
|
"""
|
265
282
|
Minimize an objective using GPyOpt.
|
266
283
|
Parameters
|
@@ -291,10 +308,7 @@ def minimize(objective: Objective,
|
|
291
308
|
the results of an optimization.
|
292
309
|
"""
|
293
310
|
|
294
|
-
optimizer = OptimizerGPyOpt(
|
295
|
-
|
296
|
-
|
297
|
-
return optimizer(objective=objective, initial_values=initial_values,
|
298
|
-
variables=variables,
|
299
|
-
method=method
|
300
|
-
)
|
311
|
+
optimizer = OptimizerGPyOpt(
|
312
|
+
samples=samples, backend=backend, maxiter=maxiter, device=device, noise=noise, silent=silent
|
313
|
+
)
|
314
|
+
return optimizer(objective=objective, initial_values=initial_values, variables=variables, method=method)
|
@@ -1,4 +1,7 @@
|
|
1
|
-
import scipy
|
1
|
+
import scipy
|
2
|
+
import numpy
|
3
|
+
import typing
|
4
|
+
import numbers
|
2
5
|
from tequila.objective import Objective
|
3
6
|
from tequila.objective.objective import assign_variable, Variable, format_variable_dictionary, format_variable_list
|
4
7
|
from .optimizer_base import Optimizer, OptimizerResults
|
@@ -9,17 +12,18 @@ from tequila.tools.qng import get_qng_combos
|
|
9
12
|
|
10
13
|
from dataclasses import dataclass
|
11
14
|
|
15
|
+
|
12
16
|
class TequilaScipyException(TequilaException):
|
13
17
|
""" """
|
18
|
+
|
14
19
|
pass
|
15
20
|
|
21
|
+
|
16
22
|
@dataclass
|
17
23
|
class SciPyResults(OptimizerResults):
|
18
|
-
|
19
24
|
scipy_result: scipy.optimize.OptimizeResult = None
|
20
25
|
|
21
26
|
|
22
|
-
|
23
27
|
class OptimizerSciPy(Optimizer):
|
24
28
|
"""
|
25
29
|
Class wrapping over the scipy optimizer for use by Tequila.
|
@@ -39,8 +43,9 @@ class OptimizerSciPy(Optimizer):
|
|
39
43
|
silent:
|
40
44
|
if False, the optimizer prints out all evaluated energies
|
41
45
|
"""
|
42
|
-
|
43
|
-
|
46
|
+
|
47
|
+
gradient_free_methods = ["NELDER-MEAD", "COBYLA", "POWELL", "SLSQP"]
|
48
|
+
gradient_based_methods = ["L-BFGS-B", "BFGS", "CG", "TNC"]
|
44
49
|
hessian_based_methods = ["TRUST-KRYLOV", "NEWTON-CG", "DOGLEG", "TRUST-NCG", "TRUST-EXACT", "TRUST-CONSTR"]
|
45
50
|
|
46
51
|
@classmethod
|
@@ -48,12 +53,15 @@ class OptimizerSciPy(Optimizer):
|
|
48
53
|
""":return: All tested available methods"""
|
49
54
|
return cls.gradient_free_methods + cls.gradient_based_methods + cls.hessian_based_methods
|
50
55
|
|
51
|
-
def __init__(
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
56
|
+
def __init__(
|
57
|
+
self,
|
58
|
+
method: str = "L-BFGS-B",
|
59
|
+
tol: numbers.Real = None,
|
60
|
+
method_options=None,
|
61
|
+
method_bounds=None,
|
62
|
+
method_constraints=None,
|
63
|
+
**kwargs,
|
64
|
+
):
|
57
65
|
"""
|
58
66
|
Parameters
|
59
67
|
----------
|
@@ -83,28 +91,30 @@ class OptimizerSciPy(Optimizer):
|
|
83
91
|
self.method_bounds = method_bounds
|
84
92
|
|
85
93
|
if method_options is None:
|
86
|
-
self.method_options = {
|
94
|
+
self.method_options = {"maxiter": self.maxiter}
|
87
95
|
else:
|
88
96
|
self.method_options = method_options
|
89
|
-
if
|
90
|
-
self.method_options[
|
97
|
+
if "maxiter" not in method_options:
|
98
|
+
self.method_options["maxiter"] = self.maxiter
|
91
99
|
|
92
|
-
self.method_options[
|
100
|
+
self.method_options["disp"] = self.print_level > 0
|
93
101
|
|
94
102
|
if method_constraints is None:
|
95
103
|
self.method_constraints = ()
|
96
104
|
else:
|
97
105
|
self.method_constraints = method_constraints
|
98
106
|
|
99
|
-
def __call__(
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
107
|
+
def __call__(
|
108
|
+
self,
|
109
|
+
objective: Objective,
|
110
|
+
variables: typing.List[Variable] = None,
|
111
|
+
initial_values: typing.Dict[Variable, numbers.Real] = None,
|
112
|
+
gradient: typing.Dict[Variable, Objective] = None,
|
113
|
+
hessian: typing.Dict[typing.Tuple[Variable, Variable], Objective] = None,
|
114
|
+
reset_history: bool = True,
|
115
|
+
*args,
|
116
|
+
**kwargs,
|
117
|
+
) -> SciPyResults:
|
108
118
|
"""
|
109
119
|
Perform optimization using scipy optimizers.
|
110
120
|
|
@@ -157,16 +167,18 @@ class OptimizerSciPy(Optimizer):
|
|
157
167
|
bounds[k] = v
|
158
168
|
infostring += "{:15} : {}\n".format("bounds", self.method_bounds)
|
159
169
|
names, bounds = zip(*bounds.items())
|
160
|
-
assert
|
170
|
+
assert names == param_keys # make sure the bounds are not shuffled
|
161
171
|
|
162
172
|
# do the compilation here to avoid costly recompilation during the optimization
|
163
173
|
compiled_objective = self.compile_objective(objective=objective, *args, **kwargs)
|
164
|
-
E = _EvalContainer(
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
174
|
+
E = _EvalContainer(
|
175
|
+
objective=compiled_objective,
|
176
|
+
param_keys=param_keys,
|
177
|
+
samples=self.samples,
|
178
|
+
passive_angles=passive_angles,
|
179
|
+
save_history=self.save_history,
|
180
|
+
print_level=self.print_level,
|
181
|
+
)
|
170
182
|
|
171
183
|
compile_gradient = self.method in (self.gradient_based_methods + self.hessian_based_methods)
|
172
184
|
compile_hessian = self.method in self.hessian_based_methods
|
@@ -176,13 +188,18 @@ class OptimizerSciPy(Optimizer):
|
|
176
188
|
# detect if numerical gradients shall be used
|
177
189
|
# switch off compiling if so
|
178
190
|
if isinstance(gradient, str):
|
179
|
-
if gradient.lower() ==
|
191
|
+
if gradient.lower() == "qng":
|
180
192
|
compile_gradient = False
|
181
193
|
if compile_hessian:
|
182
|
-
raise TequilaException(
|
183
|
-
|
184
|
-
combos = get_qng_combos(
|
185
|
-
|
194
|
+
raise TequilaException("Sorry, QNG and hessian not yet tested together.")
|
195
|
+
|
196
|
+
combos = get_qng_combos(
|
197
|
+
objective,
|
198
|
+
initial_values=initial_values,
|
199
|
+
backend=self.backend,
|
200
|
+
samples=self.samples,
|
201
|
+
noise=self.noise,
|
202
|
+
)
|
186
203
|
dE = _QngContainer(combos=combos, param_keys=param_keys, passive_angles=passive_angles)
|
187
204
|
infostring += "{:15} : QNG {}\n".format("gradient", dE)
|
188
205
|
else:
|
@@ -195,15 +212,21 @@ class OptimizerSciPy(Optimizer):
|
|
195
212
|
infostring += "{:15} : scipy numerical {}\n".format("gradient", dE)
|
196
213
|
infostring += "{:15} : scipy numerical {}\n".format("hessian", ddE)
|
197
214
|
|
198
|
-
if isinstance(gradient,dict) and "method" in gradient:
|
199
|
-
if gradient[
|
200
|
-
func = gradient[
|
215
|
+
if isinstance(gradient, dict) and "method" in gradient:
|
216
|
+
if gradient["method"] == "qng":
|
217
|
+
func = gradient["function"]
|
201
218
|
compile_gradient = False
|
202
219
|
if compile_hessian:
|
203
|
-
raise TequilaException(
|
204
|
-
|
205
|
-
combos = get_qng_combos(
|
206
|
-
|
220
|
+
raise TequilaException("Sorry, QNG and hessian not yet tested together.")
|
221
|
+
|
222
|
+
combos = get_qng_combos(
|
223
|
+
objective,
|
224
|
+
func=func,
|
225
|
+
initial_values=initial_values,
|
226
|
+
backend=self.backend,
|
227
|
+
samples=self.samples,
|
228
|
+
noise=self.noise,
|
229
|
+
)
|
207
230
|
dE = _QngContainer(combos=combos, param_keys=param_keys, passive_angles=passive_angles)
|
208
231
|
infostring += "{:15} : QNG {}\n".format("gradient", dE)
|
209
232
|
|
@@ -212,28 +235,33 @@ class OptimizerSciPy(Optimizer):
|
|
212
235
|
compile_hessian = False
|
213
236
|
|
214
237
|
if compile_gradient:
|
215
|
-
grad_obj, comp_grad_obj = self.compile_gradient(
|
238
|
+
grad_obj, comp_grad_obj = self.compile_gradient(
|
239
|
+
objective=objective, variables=variables, gradient=gradient, *args, **kwargs
|
240
|
+
)
|
216
241
|
expvals = sum([o.count_expectationvalues() for o in comp_grad_obj.values()])
|
217
242
|
infostring += "{:15} : {} expectationvalues\n".format("gradient", expvals)
|
218
|
-
dE = _GradContainer(
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
243
|
+
dE = _GradContainer(
|
244
|
+
objective=comp_grad_obj,
|
245
|
+
param_keys=param_keys,
|
246
|
+
samples=self.samples,
|
247
|
+
passive_angles=passive_angles,
|
248
|
+
save_history=self.save_history,
|
249
|
+
print_level=self.print_level,
|
250
|
+
)
|
224
251
|
if compile_hessian:
|
225
|
-
hess_obj, comp_hess_obj = self.compile_hessian(
|
226
|
-
|
227
|
-
|
228
|
-
comp_grad_obj=comp_grad_obj, *args, **kwargs)
|
252
|
+
hess_obj, comp_hess_obj = self.compile_hessian(
|
253
|
+
variables=variables, hessian=hessian, grad_obj=grad_obj, comp_grad_obj=comp_grad_obj, *args, **kwargs
|
254
|
+
)
|
229
255
|
expvals = sum([o.count_expectationvalues() for o in comp_hess_obj.values()])
|
230
256
|
infostring += "{:15} : {} expectationvalues\n".format("hessian", expvals)
|
231
|
-
ddE = _HessContainer(
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
257
|
+
ddE = _HessContainer(
|
258
|
+
objective=comp_hess_obj,
|
259
|
+
param_keys=param_keys,
|
260
|
+
samples=self.samples,
|
261
|
+
passive_angles=passive_angles,
|
262
|
+
save_history=self.save_history,
|
263
|
+
print_level=self.print_level,
|
264
|
+
)
|
237
265
|
if self.print_level > 0:
|
238
266
|
print(self)
|
239
267
|
print(infostring)
|
@@ -242,6 +270,7 @@ class OptimizerSciPy(Optimizer):
|
|
242
270
|
Es = []
|
243
271
|
|
244
272
|
optimizer_instance = self
|
273
|
+
|
245
274
|
class SciPyCallback:
|
246
275
|
energies = []
|
247
276
|
gradients = []
|
@@ -257,17 +286,23 @@ class OptimizerSciPy(Optimizer):
|
|
257
286
|
if ddE is not None and not isinstance(ddE, str):
|
258
287
|
self.hessians.append(ddE.history[-1])
|
259
288
|
self.real_iterations += 1
|
260
|
-
if
|
261
|
-
optimizer_instance.kwargs[
|
289
|
+
if "callback" in optimizer_instance.kwargs:
|
290
|
+
optimizer_instance.kwargs["callback"](E.history_angles[-1])
|
262
291
|
|
263
292
|
callback = SciPyCallback()
|
264
|
-
res = scipy.optimize.minimize(
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
293
|
+
res = scipy.optimize.minimize(
|
294
|
+
E,
|
295
|
+
x0=param_values,
|
296
|
+
jac=dE,
|
297
|
+
hess=ddE,
|
298
|
+
args=(Es,),
|
299
|
+
method=self.method,
|
300
|
+
tol=self.tol,
|
301
|
+
bounds=bounds,
|
302
|
+
constraints=self.method_constraints,
|
303
|
+
options=self.method_options,
|
304
|
+
callback=callback,
|
305
|
+
)
|
271
306
|
|
272
307
|
# failsafe since callback is not implemented everywhere
|
273
308
|
if callback.real_iterations == 0:
|
@@ -293,10 +328,12 @@ class OptimizerSciPy(Optimizer):
|
|
293
328
|
# some scipy methods always give back the last value and not the minimum (e.g. cobyla)
|
294
329
|
ea = sorted(zip(E.history, E.history_angles), key=lambda x: x[0])
|
295
330
|
E_final = ea[0][0]
|
296
|
-
angles_final = ea[0][1]
|
331
|
+
angles_final = ea[0][1] # dict((param_keys[i], res.x[i]) for i in range(len(param_keys)))
|
297
332
|
angles_final = {**angles_final, **passive_angles}
|
298
333
|
|
299
|
-
return SciPyResults(
|
334
|
+
return SciPyResults(
|
335
|
+
energy=E_final, history=self.history, variables=format_variable_dictionary(angles_final), scipy_result=res
|
336
|
+
)
|
300
337
|
|
301
338
|
|
302
339
|
def available_methods(energy=True, gradient=True, hessian=True) -> typing.List[str]:
|
@@ -313,7 +350,7 @@ def available_methods(energy=True, gradient=True, hessian=True) -> typing.List[s
|
|
313
350
|
Returns
|
314
351
|
-------
|
315
352
|
Available methods of the scipy optimizer, a list of strings.
|
316
|
-
|
353
|
+
|
317
354
|
"""
|
318
355
|
methods = []
|
319
356
|
if energy:
|
@@ -325,26 +362,28 @@ def available_methods(energy=True, gradient=True, hessian=True) -> typing.List[s
|
|
325
362
|
return methods
|
326
363
|
|
327
364
|
|
328
|
-
def minimize(
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
347
|
-
|
365
|
+
def minimize(
|
366
|
+
objective: Objective,
|
367
|
+
gradient: typing.Union[str, typing.Dict[Variable, Objective]] = None,
|
368
|
+
hessian: typing.Union[str, typing.Dict[typing.Tuple[Variable, Variable], Objective]] = None,
|
369
|
+
initial_values: typing.Dict[typing.Hashable, numbers.Real] = None,
|
370
|
+
variables: typing.List[typing.Hashable] = None,
|
371
|
+
samples: int = None,
|
372
|
+
maxiter: int = 100,
|
373
|
+
backend: str = None,
|
374
|
+
backend_options: dict = None,
|
375
|
+
noise: NoiseModel = None,
|
376
|
+
device: str = None,
|
377
|
+
method: str = "BFGS",
|
378
|
+
tol: float = 1.0e-3,
|
379
|
+
method_options: dict = None,
|
380
|
+
method_bounds: typing.Dict[typing.Hashable, numbers.Real] = None,
|
381
|
+
method_constraints=None,
|
382
|
+
silent: bool = False,
|
383
|
+
save_history: bool = True,
|
384
|
+
*args,
|
385
|
+
**kwargs,
|
386
|
+
) -> SciPyResults:
|
348
387
|
"""
|
349
388
|
|
350
389
|
Parameters
|
@@ -406,23 +445,29 @@ def minimize(objective: Objective,
|
|
406
445
|
|
407
446
|
# set defaults
|
408
447
|
|
409
|
-
optimizer = OptimizerSciPy(
|
410
|
-
|
411
|
-
|
412
|
-
|
413
|
-
|
414
|
-
|
415
|
-
|
416
|
-
|
417
|
-
|
418
|
-
|
419
|
-
|
420
|
-
|
421
|
-
|
422
|
-
|
423
|
-
|
424
|
-
|
425
|
-
|
426
|
-
|
427
|
-
|
428
|
-
|
448
|
+
optimizer = OptimizerSciPy(
|
449
|
+
save_history=save_history,
|
450
|
+
maxiter=maxiter,
|
451
|
+
method=method,
|
452
|
+
method_options=method_options,
|
453
|
+
method_bounds=method_bounds,
|
454
|
+
method_constraints=method_constraints,
|
455
|
+
silent=silent,
|
456
|
+
backend=backend,
|
457
|
+
backend_options=backend_options,
|
458
|
+
device=device,
|
459
|
+
samples=samples,
|
460
|
+
noise=noise,
|
461
|
+
tol=tol,
|
462
|
+
*args,
|
463
|
+
**kwargs,
|
464
|
+
)
|
465
|
+
return optimizer(
|
466
|
+
objective=objective,
|
467
|
+
gradient=gradient,
|
468
|
+
hessian=hessian,
|
469
|
+
initial_values=initial_values,
|
470
|
+
variables=variables,
|
471
|
+
*args,
|
472
|
+
**kwargs,
|
473
|
+
)
|