superquantx 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. superquantx/__init__.py +321 -0
  2. superquantx/algorithms/__init__.py +55 -0
  3. superquantx/algorithms/base_algorithm.py +413 -0
  4. superquantx/algorithms/hybrid_classifier.py +628 -0
  5. superquantx/algorithms/qaoa.py +406 -0
  6. superquantx/algorithms/quantum_agents.py +1006 -0
  7. superquantx/algorithms/quantum_kmeans.py +575 -0
  8. superquantx/algorithms/quantum_nn.py +544 -0
  9. superquantx/algorithms/quantum_pca.py +499 -0
  10. superquantx/algorithms/quantum_svm.py +346 -0
  11. superquantx/algorithms/vqe.py +553 -0
  12. superquantx/algorithms.py +863 -0
  13. superquantx/backends/__init__.py +265 -0
  14. superquantx/backends/base_backend.py +321 -0
  15. superquantx/backends/braket_backend.py +420 -0
  16. superquantx/backends/cirq_backend.py +466 -0
  17. superquantx/backends/ocean_backend.py +491 -0
  18. superquantx/backends/pennylane_backend.py +419 -0
  19. superquantx/backends/qiskit_backend.py +451 -0
  20. superquantx/backends/simulator_backend.py +455 -0
  21. superquantx/backends/tket_backend.py +519 -0
  22. superquantx/circuits.py +447 -0
  23. superquantx/cli/__init__.py +28 -0
  24. superquantx/cli/commands.py +528 -0
  25. superquantx/cli/main.py +254 -0
  26. superquantx/client.py +298 -0
  27. superquantx/config.py +326 -0
  28. superquantx/exceptions.py +287 -0
  29. superquantx/gates.py +588 -0
  30. superquantx/logging_config.py +347 -0
  31. superquantx/measurements.py +702 -0
  32. superquantx/ml.py +936 -0
  33. superquantx/noise.py +760 -0
  34. superquantx/utils/__init__.py +83 -0
  35. superquantx/utils/benchmarking.py +523 -0
  36. superquantx/utils/classical_utils.py +575 -0
  37. superquantx/utils/feature_mapping.py +467 -0
  38. superquantx/utils/optimization.py +410 -0
  39. superquantx/utils/quantum_utils.py +456 -0
  40. superquantx/utils/visualization.py +654 -0
  41. superquantx/version.py +33 -0
  42. superquantx-0.1.0.dist-info/METADATA +365 -0
  43. superquantx-0.1.0.dist-info/RECORD +46 -0
  44. superquantx-0.1.0.dist-info/WHEEL +4 -0
  45. superquantx-0.1.0.dist-info/entry_points.txt +2 -0
  46. superquantx-0.1.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,406 @@
1
+ """Quantum Approximate Optimization Algorithm (QAOA) implementation.
2
+
3
+ This module provides a QAOA implementation for solving combinatorial optimization
4
+ problems using quantum circuits with parameterized gates.
5
+ """
6
+
7
+ import logging
8
+ from typing import Any, Callable, Dict, Optional, Tuple, Union
9
+
10
+ import numpy as np
11
+ from scipy.optimize import minimize
12
+
13
+ from .base_algorithm import OptimizationQuantumAlgorithm
14
+
15
+
16
+ logger = logging.getLogger(__name__)
17
+
18
+ class QAOA(OptimizationQuantumAlgorithm):
19
+ """Quantum Approximate Optimization Algorithm for combinatorial optimization.
20
+
21
+ QAOA is a hybrid quantum-classical algorithm that alternates between
22
+ quantum evolution and classical parameter optimization to find approximate
23
+ solutions to combinatorial optimization problems.
24
+
25
+ The algorithm works by:
26
+ 1. Preparing an initial superposition state
27
+ 2. Applying alternating problem and mixer Hamiltonians
28
+ 3. Measuring the quantum state
29
+ 4. Classically optimizing the parameters
30
+
31
+ Args:
32
+ backend: Quantum backend for circuit execution
33
+ p: Number of QAOA layers (depth)
34
+ problem_hamiltonian: Problem Hamiltonian function
35
+ mixer_hamiltonian: Mixer Hamiltonian function
36
+ initial_state: Initial quantum state preparation
37
+ optimizer: Classical optimizer ('COBYLA', 'L-BFGS-B', etc.)
38
+ shots: Number of measurement shots
39
+ maxiter: Maximum optimization iterations
40
+ **kwargs: Additional parameters
41
+
42
+ Example:
43
+ >>> # Define Max-Cut problem
44
+ >>> def problem_ham(gamma, graph):
45
+ ... return create_maxcut_hamiltonian(gamma, graph)
46
+ >>> qaoa = QAOA(backend='pennylane', p=2, problem_hamiltonian=problem_ham)
47
+ >>> result = qaoa.optimize(graph_data)
48
+
49
+ """
50
+
51
+ def __init__(
52
+ self,
53
+ backend: Union[str, Any],
54
+ p: int = 1,
55
+ problem_hamiltonian: Optional[Callable] = None,
56
+ mixer_hamiltonian: Optional[Callable] = None,
57
+ initial_state: str = 'uniform_superposition',
58
+ optimizer: str = 'COBYLA',
59
+ shots: int = 1024,
60
+ maxiter: int = 1000,
61
+ **kwargs
62
+ ) -> None:
63
+ super().__init__(backend=backend, shots=shots, **kwargs)
64
+
65
+ self.p = p
66
+ self.problem_hamiltonian = problem_hamiltonian
67
+ self.mixer_hamiltonian = mixer_hamiltonian or self._default_mixer
68
+ self.initial_state = initial_state
69
+ self.optimizer = optimizer
70
+ self.maxiter = maxiter
71
+
72
+ # QAOA-specific parameters
73
+ self.n_qubits = None
74
+ self.problem_instance = None
75
+ self.circuit = None
76
+
77
+ # Parameter bounds
78
+ self.gamma_bounds = (0, 2*np.pi)
79
+ self.beta_bounds = (0, np.pi)
80
+
81
+ logger.info(f"Initialized QAOA with p={p}, optimizer={optimizer}")
82
+
83
+ def _default_mixer(self, beta: float) -> Any:
84
+ """Default X-mixer Hamiltonian."""
85
+ if hasattr(self.backend, 'create_mixer_hamiltonian'):
86
+ return self.backend.create_mixer_hamiltonian(beta, self.n_qubits)
87
+ else:
88
+ return self._fallback_mixer(beta)
89
+
90
+ def _fallback_mixer(self, beta: float) -> Any:
91
+ """Fallback mixer implementation."""
92
+ logger.warning("Using fallback mixer implementation")
93
+ return None
94
+
95
+ def _create_initial_state(self) -> Any:
96
+ """Create initial quantum state."""
97
+ if self.initial_state == 'uniform_superposition':
98
+ if hasattr(self.backend, 'create_uniform_superposition'):
99
+ return self.backend.create_uniform_superposition(self.n_qubits)
100
+ else:
101
+ return self._fallback_initial_state()
102
+ else:
103
+ return self.initial_state
104
+
105
+ def _fallback_initial_state(self) -> Any:
106
+ """Fallback initial state preparation."""
107
+ logger.warning("Using fallback initial state")
108
+ return None
109
+
110
+ def _create_qaoa_circuit(self, params: np.ndarray) -> Any:
111
+ """Create QAOA circuit with given parameters.
112
+
113
+ Args:
114
+ params: Array of [gamma_1, beta_1, ..., gamma_p, beta_p]
115
+
116
+ Returns:
117
+ Quantum circuit
118
+
119
+ """
120
+ if len(params) != 2 * self.p:
121
+ raise ValueError(f"Expected {2*self.p} parameters, got {len(params)}")
122
+
123
+ gammas = params[:self.p]
124
+ betas = params[self.p:]
125
+
126
+ try:
127
+ if hasattr(self.backend, 'create_qaoa_circuit'):
128
+ return self.backend.create_qaoa_circuit(
129
+ n_qubits=self.n_qubits,
130
+ gammas=gammas,
131
+ betas=betas,
132
+ problem_hamiltonian=self.problem_hamiltonian,
133
+ mixer_hamiltonian=self.mixer_hamiltonian,
134
+ initial_state=self._create_initial_state(),
135
+ problem_instance=self.problem_instance
136
+ )
137
+ else:
138
+ return self._fallback_circuit(gammas, betas)
139
+ except Exception as e:
140
+ logger.error(f"Failed to create QAOA circuit: {e}")
141
+ return self._fallback_circuit(gammas, betas)
142
+
143
+ def _fallback_circuit(self, gammas: np.ndarray, betas: np.ndarray) -> Any:
144
+ """Fallback circuit implementation."""
145
+ logger.warning("Using fallback QAOA circuit")
146
+ return None
147
+
148
+ def _objective_function(self, params: np.ndarray) -> float:
149
+ """QAOA objective function to minimize.
150
+
151
+ Args:
152
+ params: Circuit parameters
153
+
154
+ Returns:
155
+ Negative expectation value (for minimization)
156
+
157
+ """
158
+ try:
159
+ # Create circuit with current parameters
160
+ circuit = self._create_qaoa_circuit(params)
161
+
162
+ # Execute circuit and get measurement results
163
+ if hasattr(self.backend, 'execute_qaoa'):
164
+ expectation = self.backend.execute_qaoa(
165
+ circuit,
166
+ self.problem_hamiltonian,
167
+ self.problem_instance,
168
+ shots=self.shots
169
+ )
170
+ else:
171
+ expectation = self._fallback_execution(circuit)
172
+
173
+ # Store optimization history
174
+ self.optimization_history_.append({
175
+ 'params': params.copy(),
176
+ 'cost': expectation,
177
+ 'iteration': len(self.optimization_history_)
178
+ })
179
+
180
+ return -expectation # Negative for minimization
181
+
182
+ except Exception as e:
183
+ logger.error(f"Error in objective function: {e}")
184
+ return float('inf')
185
+
186
+ def _fallback_execution(self, circuit: Any) -> float:
187
+ """Fallback circuit execution."""
188
+ logger.warning("Using fallback circuit execution")
189
+ return np.random.random() # Placeholder
190
+
191
+ def fit(self, X: np.ndarray, y: Optional[np.ndarray] = None, **kwargs) -> 'QAOA':
192
+ """Fit QAOA to problem instance.
193
+
194
+ Args:
195
+ X: Problem instance data (e.g., adjacency matrix for Max-Cut)
196
+ y: Not used in QAOA
197
+ **kwargs: Additional parameters
198
+
199
+ Returns:
200
+ Self for method chaining
201
+
202
+ """
203
+ logger.info(f"Fitting QAOA to problem instance of shape {X.shape}")
204
+
205
+ self.problem_instance = X
206
+ self.n_qubits = self._infer_qubits(X)
207
+
208
+ # Reset optimization history
209
+ self.optimization_history_ = []
210
+
211
+ self.is_fitted = True
212
+ return self
213
+
214
+ def _infer_qubits(self, problem_instance: np.ndarray) -> int:
215
+ """Infer number of qubits from problem instance."""
216
+ if len(problem_instance.shape) == 2:
217
+ # Assume square matrix (e.g., graph adjacency matrix)
218
+ return problem_instance.shape[0]
219
+ else:
220
+ # Assume 1D problem encoding
221
+ return int(np.ceil(np.log2(len(problem_instance))))
222
+
223
+ def predict(self, X: np.ndarray, **kwargs) -> np.ndarray:
224
+ """Get optimal solution from QAOA results.
225
+
226
+ Args:
227
+ X: Problem instance (not used if same as training)
228
+ **kwargs: Additional parameters
229
+
230
+ Returns:
231
+ Optimal bit string solution
232
+
233
+ """
234
+ if not self.is_fitted or not self.optimal_params_:
235
+ raise ValueError("QAOA must be fitted and optimized before prediction")
236
+
237
+ # Create circuit with optimal parameters
238
+ circuit = self._create_qaoa_circuit(self.optimal_params_)
239
+
240
+ # Sample from the optimized quantum state
241
+ if hasattr(self.backend, 'sample_circuit'):
242
+ samples = self.backend.sample_circuit(circuit, shots=self.shots)
243
+ # Return most frequent bit string
244
+ unique, counts = np.unique(samples, axis=0, return_counts=True)
245
+ best_solution = unique[np.argmax(counts)]
246
+ else:
247
+ # Fallback: return random solution
248
+ best_solution = np.random.randint(0, 2, self.n_qubits)
249
+
250
+ return best_solution
251
+
252
+ def _run_optimization(self, objective_function, initial_params: Optional[np.ndarray] = None, **kwargs):
253
+ """Run QAOA optimization.
254
+
255
+ Args:
256
+ objective_function: Function to optimize (ignored, uses internal)
257
+ initial_params: Initial parameter guess
258
+ **kwargs: Additional optimization parameters
259
+
260
+ Returns:
261
+ Optimization result
262
+
263
+ """
264
+ if not self.is_fitted:
265
+ raise ValueError("QAOA must be fitted before optimization")
266
+
267
+ # Use provided initial parameters or generate random ones
268
+ if initial_params is None:
269
+ initial_params = self._generate_initial_params()
270
+
271
+ logger.info(f"Starting QAOA optimization with {len(initial_params)} parameters")
272
+
273
+ # Set up parameter bounds
274
+ bounds = []
275
+ for i in range(self.p):
276
+ bounds.append(self.gamma_bounds) # gamma bounds
277
+ for i in range(self.p):
278
+ bounds.append(self.beta_bounds) # beta bounds
279
+
280
+ # Run classical optimization
281
+ try:
282
+ result = minimize(
283
+ fun=self._objective_function,
284
+ x0=initial_params,
285
+ method=self.optimizer,
286
+ bounds=bounds,
287
+ options={
288
+ 'maxiter': self.maxiter,
289
+ 'disp': True
290
+ }
291
+ )
292
+
293
+ self.optimal_params_ = result.x
294
+ self.optimal_value_ = -result.fun # Convert back from minimization
295
+
296
+ logger.info(f"QAOA optimization completed. Best value: {self.optimal_value_:.6f}")
297
+
298
+ return {
299
+ 'optimal_params': self.optimal_params_,
300
+ 'optimal_value': self.optimal_value_,
301
+ 'success': result.success,
302
+ 'message': result.message,
303
+ 'n_iterations': result.nfev,
304
+ }
305
+
306
+ except Exception as e:
307
+ logger.error(f"QAOA optimization failed: {e}")
308
+ raise
309
+
310
+ def _generate_initial_params(self) -> np.ndarray:
311
+ """Generate random initial parameters."""
312
+ gammas = np.random.uniform(*self.gamma_bounds, self.p)
313
+ betas = np.random.uniform(*self.beta_bounds, self.p)
314
+ return np.concatenate([gammas, betas])
315
+
316
+ def get_optimization_landscape(self, param_range: Tuple[float, float], resolution: int = 50) -> Dict[str, Any]:
317
+ """Compute optimization landscape for visualization.
318
+
319
+ Args:
320
+ param_range: Range of parameters to explore
321
+ resolution: Number of points per dimension
322
+
323
+ Returns:
324
+ Dictionary with landscape data
325
+
326
+ """
327
+ if self.p != 1:
328
+ logger.warning("Landscape visualization only supported for p=1")
329
+ return {}
330
+
331
+ gamma_range = np.linspace(*param_range, resolution)
332
+ beta_range = np.linspace(*param_range, resolution)
333
+
334
+ landscape = np.zeros((resolution, resolution))
335
+
336
+ for i, gamma in enumerate(gamma_range):
337
+ for j, beta in enumerate(beta_range):
338
+ params = np.array([gamma, beta])
339
+ landscape[i, j] = -self._objective_function(params)
340
+
341
+ return {
342
+ 'gamma_range': gamma_range,
343
+ 'beta_range': beta_range,
344
+ 'landscape': landscape,
345
+ 'optimal_params': self.optimal_params_ if hasattr(self, 'optimal_params_') else None
346
+ }
347
+
348
+ def analyze_solution_quality(self, true_optimum: Optional[float] = None) -> Dict[str, Any]:
349
+ """Analyze quality of QAOA solution.
350
+
351
+ Args:
352
+ true_optimum: Known optimal value for comparison
353
+
354
+ Returns:
355
+ Analysis results
356
+
357
+ """
358
+ if not self.optimal_value_:
359
+ raise ValueError("No optimal solution available")
360
+
361
+ analysis = {
362
+ 'qaoa_value': self.optimal_value_,
363
+ 'n_layers': self.p,
364
+ 'n_parameters': 2 * self.p,
365
+ 'optimization_iterations': len(self.optimization_history_),
366
+ }
367
+
368
+ if true_optimum is not None:
369
+ approximation_ratio = self.optimal_value_ / true_optimum
370
+ analysis.update({
371
+ 'true_optimum': true_optimum,
372
+ 'approximation_ratio': approximation_ratio,
373
+ 'relative_error': abs(1 - approximation_ratio),
374
+ })
375
+
376
+ # Analyze convergence
377
+ if len(self.optimization_history_) > 1:
378
+ costs = [-entry['cost'] for entry in self.optimization_history_]
379
+ analysis.update({
380
+ 'convergence_achieved': costs[-1] == max(costs),
381
+ 'improvement_over_random': self.optimal_value_ - np.mean(costs[:5]) if len(costs) >= 5 else 0,
382
+ 'final_cost_variance': np.var(costs[-10:]) if len(costs) >= 10 else 0,
383
+ })
384
+
385
+ return analysis
386
+
387
+ def get_params(self, deep: bool = True) -> Dict[str, Any]:
388
+ """Get QAOA parameters."""
389
+ params = super().get_params(deep)
390
+ params.update({
391
+ 'p': self.p,
392
+ 'optimizer': self.optimizer,
393
+ 'initial_state': self.initial_state,
394
+ 'maxiter': self.maxiter,
395
+ 'gamma_bounds': self.gamma_bounds,
396
+ 'beta_bounds': self.beta_bounds,
397
+ })
398
+ return params
399
+
400
+ def set_params(self, **params) -> 'QAOA':
401
+ """Set QAOA parameters."""
402
+ if self.is_fitted and any(key in params for key in ['p', 'problem_hamiltonian', 'mixer_hamiltonian']):
403
+ logger.warning("Changing core parameters requires refitting the model")
404
+ self.is_fitted = False
405
+
406
+ return super().set_params(**params)