qadence 1.7.8__py3-none-any.whl → 1.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. qadence/__init__.py +1 -1
  2. qadence/analog/device.py +1 -1
  3. qadence/analog/parse_analog.py +1 -2
  4. qadence/backend.py +3 -3
  5. qadence/backends/gpsr.py +8 -2
  6. qadence/backends/horqrux/backend.py +3 -3
  7. qadence/backends/pulser/backend.py +21 -38
  8. qadence/backends/pulser/convert_ops.py +2 -2
  9. qadence/backends/pyqtorch/backend.py +85 -10
  10. qadence/backends/pyqtorch/config.py +10 -3
  11. qadence/backends/pyqtorch/convert_ops.py +245 -233
  12. qadence/backends/utils.py +9 -1
  13. qadence/blocks/abstract.py +1 -1
  14. qadence/blocks/embedding.py +21 -11
  15. qadence/blocks/matrix.py +3 -1
  16. qadence/blocks/primitive.py +37 -11
  17. qadence/circuit.py +1 -1
  18. qadence/constructors/__init__.py +2 -1
  19. qadence/constructors/ansatze.py +176 -0
  20. qadence/engines/differentiable_backend.py +3 -3
  21. qadence/engines/jax/differentiable_backend.py +2 -2
  22. qadence/engines/jax/differentiable_expectation.py +2 -2
  23. qadence/engines/torch/differentiable_backend.py +2 -2
  24. qadence/engines/torch/differentiable_expectation.py +2 -2
  25. qadence/execution.py +14 -16
  26. qadence/extensions.py +1 -1
  27. qadence/log_config.yaml +10 -0
  28. qadence/measurements/shadow.py +101 -133
  29. qadence/measurements/tomography.py +2 -2
  30. qadence/measurements/utils.py +4 -4
  31. qadence/mitigations/analog_zne.py +8 -7
  32. qadence/mitigations/protocols.py +2 -2
  33. qadence/mitigations/readout.py +14 -5
  34. qadence/ml_tools/__init__.py +4 -8
  35. qadence/ml_tools/callbacks/__init__.py +30 -0
  36. qadence/ml_tools/callbacks/callback.py +451 -0
  37. qadence/ml_tools/callbacks/callbackmanager.py +214 -0
  38. qadence/ml_tools/{saveload.py → callbacks/saveload.py} +11 -11
  39. qadence/ml_tools/callbacks/writer_registry.py +430 -0
  40. qadence/ml_tools/config.py +132 -258
  41. qadence/ml_tools/constructors.py +2 -2
  42. qadence/ml_tools/data.py +7 -3
  43. qadence/ml_tools/loss/__init__.py +10 -0
  44. qadence/ml_tools/loss/loss.py +87 -0
  45. qadence/ml_tools/models.py +7 -7
  46. qadence/ml_tools/optimize_step.py +45 -10
  47. qadence/ml_tools/stages.py +46 -0
  48. qadence/ml_tools/train_utils/__init__.py +7 -0
  49. qadence/ml_tools/train_utils/base_trainer.py +548 -0
  50. qadence/ml_tools/train_utils/config_manager.py +184 -0
  51. qadence/ml_tools/trainer.py +692 -0
  52. qadence/model.py +6 -6
  53. qadence/noise/__init__.py +2 -2
  54. qadence/noise/protocols.py +188 -36
  55. qadence/operations/control_ops.py +37 -22
  56. qadence/operations/ham_evo.py +88 -26
  57. qadence/operations/parametric.py +32 -10
  58. qadence/operations/primitive.py +61 -29
  59. qadence/overlap.py +0 -6
  60. qadence/parameters.py +3 -2
  61. qadence/transpile/__init__.py +2 -1
  62. qadence/transpile/noise.py +53 -0
  63. qadence/types.py +39 -3
  64. {qadence-1.7.8.dist-info → qadence-1.9.0.dist-info}/METADATA +5 -9
  65. {qadence-1.7.8.dist-info → qadence-1.9.0.dist-info}/RECORD +67 -63
  66. {qadence-1.7.8.dist-info → qadence-1.9.0.dist-info}/WHEEL +1 -1
  67. qadence/backends/braket/__init__.py +0 -4
  68. qadence/backends/braket/backend.py +0 -234
  69. qadence/backends/braket/config.py +0 -22
  70. qadence/backends/braket/convert_ops.py +0 -116
  71. qadence/ml_tools/printing.py +0 -153
  72. qadence/ml_tools/train_grad.py +0 -395
  73. qadence/ml_tools/train_no_grad.py +0 -199
  74. qadence/noise/readout.py +0 -218
  75. {qadence-1.7.8.dist-info → qadence-1.9.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,184 @@
1
+ from __future__ import annotations
2
+
3
+ import datetime
4
+ import os
5
+ from logging import getLogger
6
+ from pathlib import Path
7
+
8
+ from torch import Tensor
9
+
10
+ from qadence.ml_tools.config import TrainConfig
11
+ from qadence.types import ExperimentTrackingTool
12
+
13
+ logger = getLogger("ml_tools")
14
+
15
+
16
+ class ConfigManager:
17
+ """A class to manage and initialize the configuration for a.
18
+
19
+ machine learning training run using TrainConfig.
20
+
21
+ Attributes:
22
+ config (TrainConfig): The training configuration object
23
+ containing parameters and settings.
24
+ """
25
+
26
+ optimization_type: str = "with_grad"
27
+
28
+ def __init__(self, config: TrainConfig):
29
+ """
30
+ Initialize the ConfigManager with a given training configuration.
31
+
32
+ Args:
33
+ config (TrainConfig): The training configuration object.
34
+ """
35
+ self.config: TrainConfig = config
36
+
37
+ def initialize_config(self) -> None:
38
+ """
39
+ Initialize the configuration by setting up the folder structure,.
40
+
41
+ handling hyperparameters, deriving additional parameters,
42
+ and logging warnings.
43
+ """
44
+ self._initialize_folder()
45
+ self._handle_hyperparams()
46
+ self._setup_additional_configuration()
47
+ self._log_warnings()
48
+
49
+ def _initialize_folder(self) -> None:
50
+ """
51
+ Initialize the folder structure for logging.
52
+
53
+ Creates a log folder
54
+ if the folder path is specified in the configuration.
55
+ config has three parameters
56
+ - folder: The root folder for logging
57
+ - subfolders: list of subfolders inside `folder` that are used for logging
58
+ - log_folder: folder currently used for logging.
59
+ """
60
+ self.config.log_folder = self._createlog_folder(self.config.root_folder)
61
+
62
+ def _createlog_folder(self, root_folder: str | Path) -> Path:
63
+ """
64
+ Create a log folder in the specified root folder, adding subfolders if required.
65
+
66
+ Args:
67
+ root_folder (str | Path): The root folder where the log folder will be created.
68
+
69
+ Returns:
70
+ Path: The path to the created log folder.
71
+ """
72
+ self._added_new_subfolder: bool = False
73
+ root_folder_path = Path(root_folder)
74
+ root_folder_path.mkdir(parents=True, exist_ok=True)
75
+
76
+ if self.config.create_subfolder_per_run:
77
+ self._add_subfolder()
78
+ log_folder = root_folder_path / self.config._subfolders[-1]
79
+ else:
80
+ if self.config._subfolders:
81
+ if self.config.log_folder == root_folder_path / self.config._subfolders[-1]:
82
+ log_folder = root_folder_path / self.config._subfolders[-1]
83
+ else:
84
+ log_folder = Path(self.config.log_folder)
85
+ else:
86
+ if self.config.log_folder == Path("./"):
87
+ self._add_subfolder()
88
+ log_folder = root_folder_path / self.config._subfolders[-1]
89
+ else:
90
+ log_folder = Path(self.config.log_folder)
91
+
92
+ log_folder.mkdir(parents=True, exist_ok=True)
93
+ return Path(log_folder)
94
+
95
+ def _add_subfolder(self) -> None:
96
+ """
97
+ Add a unique subfolder name to the configuration for logging.
98
+
99
+ The subfolder name includes a run ID, timestamp, and process ID in hexadecimal format.
100
+ """
101
+ timestamp = datetime.datetime.now().strftime("%Y%m%dT%H%M%S")
102
+ pid_hex = hex(os.getpid())[2:]
103
+ run_id = len(self.config._subfolders) + 1
104
+ subfolder_name = f"{run_id}_{timestamp}_{pid_hex}"
105
+ self.config._subfolders.append(str(subfolder_name))
106
+ self._added_new_subfolder = True
107
+
108
+ def _handle_hyperparams(self) -> None:
109
+ """
110
+ Handle and filter hyperparameters based on the selected tracking tool.
111
+
112
+ Removes incompatible hyperparameters when using TensorBoard.
113
+ """
114
+ # tensorboard only allows for certain types as hyperparameters
115
+
116
+ if (
117
+ self.config.hyperparams
118
+ and self.config.tracking_tool == ExperimentTrackingTool.TENSORBOARD
119
+ ):
120
+ self._filter_tb_hyperparams()
121
+
122
+ def _filter_tb_hyperparams(self) -> None:
123
+ """
124
+ Filter out hyperparameters that cannot be logged by TensorBoard.
125
+
126
+ Logs a warning for the removed hyperparameters.
127
+ """
128
+
129
+ # tensorboard only allows for certain types as hyperparameters
130
+ tb_allowed_hyperparams_types: tuple = (int, float, str, bool, Tensor)
131
+ keys_to_remove = [
132
+ key
133
+ for key, value in self.config.hyperparams.items()
134
+ if not isinstance(value, tb_allowed_hyperparams_types)
135
+ ]
136
+ if keys_to_remove:
137
+ logger.warning(
138
+ f"Tensorboard cannot log the following hyperparameters: {keys_to_remove}."
139
+ )
140
+ for key in keys_to_remove:
141
+ self.config.hyperparams.pop(key)
142
+
143
+ def _setup_additional_configuration(self) -> None:
144
+ """
145
+ Derive additional parameters for the training configuration.
146
+
147
+ Sets the stopping criterion if it is not already defined.
148
+ """
149
+ if self.config.trainstop_criterion is None:
150
+ self.config.trainstop_criterion = lambda x: x <= self.config.max_iter
151
+
152
+ def _log_warnings(self) -> None:
153
+ """
154
+ Log warnings for incompatible configurations related to tracking tools.
155
+
156
+ and plotting functions.
157
+ """
158
+ if (
159
+ self.config.plotting_functions
160
+ and self.config.tracking_tool != ExperimentTrackingTool.MLFLOW
161
+ ):
162
+ logger.warning("In-training plots are only available with mlflow tracking.")
163
+ if (
164
+ not self.config.plotting_functions
165
+ and self.config.tracking_tool == ExperimentTrackingTool.MLFLOW
166
+ ):
167
+ logger.warning("Tracking with mlflow, but no plotting functions provided.")
168
+ if self.config.plot_every and not self.config.plotting_functions:
169
+ logger.warning(
170
+ "`plot_every` is only available when `plotting_functions` are provided."
171
+ "No plots will be saved."
172
+ )
173
+ if self.config.checkpoint_best_only and not self.config.validation_criterion:
174
+ logger.warning(
175
+ "`Checkpoint_best_only` is only available when `validation_criterion` is provided."
176
+ "No checkpoints will be saved."
177
+ )
178
+ if self.config.log_folder != Path("./") and self.config.root_folder != Path("./qml_logs"):
179
+ logger.warning("Both `log_folder` and `root_folder` provided by the user.")
180
+ if self.config.log_folder != Path("./") and self.config.create_subfolder_per_run:
181
+ logger.warning(
182
+ "`log_folder` is invalid when `create_subfolder_per_run` = True."
183
+ "`root_folder` (default qml_logs) will be used to save logs."
184
+ )