labmate 0.10.4__py3-none-any.whl → 0.10.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- labmate/__config__.py +1 -1
- labmate/acquisition/__init__.py +3 -2
- labmate/acquisition/acquisition_data.py +18 -0
- labmate/acquisition/acquisition_loop.py +11 -18
- labmate/acquisition/acquisition_manager.py +101 -41
- labmate/acquisition/analysis_data.py +15 -25
- labmate/acquisition/analysis_loop.py +9 -7
- labmate/acquisition/backend.py +24 -0
- labmate/acquisition/config_file.py +5 -3
- labmate/acquisition/custom_lint.py +2 -3
- labmate/acquisition_notebook/__init__.py +2 -2
- labmate/acquisition_notebook/acquisition_analysis_manager.py +30 -45
- labmate/acquisition_notebook/display_widget.py +9 -9
- labmate/attrdict/attrdict_class.py +4 -10
- labmate/display/__init__.py +2 -3
- labmate/display/buttons.py +1 -0
- labmate/display/links.py +2 -1
- labmate/display/main.py +3 -2
- labmate/display/platform_utils/__init__.py +3 -1
- labmate/display/platform_utils/windows_utils.py +3 -9
- labmate/parsing/__init__.py +3 -5
- labmate/parsing/parsed_value.py +30 -10
- labmate/parsing/saving.py +2 -6
- labmate/utils/async_utils.py +1 -0
- labmate/utils/autoreload.py +2 -0
- labmate/utils/file_read.py +12 -13
- labmate/utils/lint.py +9 -11
- labmate/utils/random_utils.py +1 -0
- labmate/utils/title_parsing.py +4 -14
- {labmate-0.10.4.dist-info → labmate-0.10.6.dist-info}/METADATA +13 -2
- labmate-0.10.6.dist-info/RECORD +41 -0
- {labmate-0.10.4.dist-info → labmate-0.10.6.dist-info}/WHEEL +1 -1
- labmate-0.10.4.dist-info/RECORD +0 -40
- {labmate-0.10.4.dist-info → labmate-0.10.6.dist-info/licenses}/LICENCE +0 -0
- {labmate-0.10.4.dist-info → labmate-0.10.6.dist-info}/top_level.txt +0 -0
labmate/__config__.py
CHANGED
labmate/acquisition/__init__.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
# flake8: noqa: F401
|
|
2
|
-
from .acquisition_manager import AcquisitionManager
|
|
3
|
-
from .acquisition_loop import AcquisitionLoop
|
|
4
2
|
from .acquisition_data import NotebookAcquisitionData
|
|
3
|
+
from .acquisition_loop import AcquisitionLoop
|
|
4
|
+
from .acquisition_manager import AcquisitionManager
|
|
5
5
|
from .analysis_data import AnalysisData, FigureProtocol
|
|
6
6
|
from .analysis_loop import AnalysisLoop
|
|
7
|
+
from .backend import AcquisitionBackend
|
|
@@ -179,3 +179,21 @@ class NotebookAcquisitionData(DH5):
|
|
|
179
179
|
if step is None:
|
|
180
180
|
step = self.current_step
|
|
181
181
|
self._cells[step] = cell
|
|
182
|
+
|
|
183
|
+
def backend_save(self) -> None:
|
|
184
|
+
"""Hook for acquisition backends to serialise custom payloads.
|
|
185
|
+
|
|
186
|
+
Custom backends can override this hook to expose data that should be
|
|
187
|
+
included when persisting to a remote store.
|
|
188
|
+
"""
|
|
189
|
+
|
|
190
|
+
pass
|
|
191
|
+
|
|
192
|
+
def backend_load(self) -> None:
|
|
193
|
+
"""Hook for acquisition backends to hydrate from remote state.
|
|
194
|
+
|
|
195
|
+
Custom backends can override this hook to restore local state after a
|
|
196
|
+
backend has fetched remote payloads.
|
|
197
|
+
"""
|
|
198
|
+
|
|
199
|
+
pass
|
|
@@ -96,9 +96,7 @@ class AcquisitionLoop(DH5):
|
|
|
96
96
|
) -> Iterator:
|
|
97
97
|
"""Return np.arange(start, stop, step) given a start, stop and step."""
|
|
98
98
|
|
|
99
|
-
def __call__(
|
|
100
|
-
self, *args, iterable: Optional[Iterable] = None, **kwds
|
|
101
|
-
) -> Optional[Iterator]:
|
|
99
|
+
def __call__(self, *args, iterable: Optional[Iterable] = None, **kwds) -> Optional[Iterator]:
|
|
102
100
|
"""Append_data or arange.
|
|
103
101
|
|
|
104
102
|
If kwds are provided then is same as calling append_data(kwds),
|
|
@@ -115,10 +113,11 @@ class AcquisitionLoop(DH5):
|
|
|
115
113
|
return None
|
|
116
114
|
|
|
117
115
|
if iterable is None:
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
116
|
+
iterable = (
|
|
117
|
+
np.arange(*args)
|
|
118
|
+
if isinstance(args[0], (int, float, np.int_, np.floating)) # type: ignore
|
|
119
|
+
else args[0]
|
|
120
|
+
)
|
|
122
121
|
|
|
123
122
|
if iterable is None:
|
|
124
123
|
raise ValueError("You should provide an iterable")
|
|
@@ -166,8 +165,8 @@ class AcquisitionLoop(DH5):
|
|
|
166
165
|
if len(key_shape) < len(self[key].shape):
|
|
167
166
|
raise ValueError(
|
|
168
167
|
f"Object {key} hasn't the same shape as before. Now it's"
|
|
169
|
-
f" {key_shape[len(shape):]},"
|
|
170
|
-
f" but before it was {self[key].shape[len(shape):]}."
|
|
168
|
+
f" {key_shape[len(shape) :]},"
|
|
169
|
+
f" but before it was {self[key].shape[len(shape) :]}."
|
|
171
170
|
)
|
|
172
171
|
if len(key_shape) > len(self[key].shape):
|
|
173
172
|
raise ValueError(
|
|
@@ -178,9 +177,7 @@ class AcquisitionLoop(DH5):
|
|
|
178
177
|
self[key] = SyncNp(
|
|
179
178
|
np.pad(
|
|
180
179
|
self[key],
|
|
181
|
-
pad_width=tuple(
|
|
182
|
-
(0, i - j) for i, j in zip(key_shape, self[key].shape)
|
|
183
|
-
),
|
|
180
|
+
pad_width=tuple((0, i - j) for i, j in zip(key_shape, self[key].shape)),
|
|
184
181
|
)
|
|
185
182
|
)
|
|
186
183
|
self[key][iteration] = value
|
|
@@ -201,9 +198,7 @@ class AcquisitionLoop(DH5):
|
|
|
201
198
|
):
|
|
202
199
|
if length is None:
|
|
203
200
|
if not hasattr(iterable, "__len__"):
|
|
204
|
-
raise TypeError(
|
|
205
|
-
"Iterable should has __len__ method or length should be provided"
|
|
206
|
-
)
|
|
201
|
+
raise TypeError("Iterable should has __len__ method or length should be provided")
|
|
207
202
|
length = len(iterable) # type: ignore
|
|
208
203
|
|
|
209
204
|
def loop_iter(array, length):
|
|
@@ -283,9 +278,7 @@ class AcquisitionLoop(DH5):
|
|
|
283
278
|
"""
|
|
284
279
|
if key is None:
|
|
285
280
|
if not self._save_indexes:
|
|
286
|
-
raise ValueError(
|
|
287
|
-
"As indexes are not saved with the Loop, key should be provided."
|
|
288
|
-
)
|
|
281
|
+
raise ValueError("As indexes are not saved with the Loop, key should be provided.")
|
|
289
282
|
key = f"__index_{self._level}__"
|
|
290
283
|
|
|
291
284
|
iteration = tuple(self._iteration[: self._level])
|
|
@@ -1,5 +1,16 @@
|
|
|
1
1
|
import os
|
|
2
|
-
from
|
|
2
|
+
from concurrent.futures import Future, ThreadPoolExecutor
|
|
3
|
+
from typing import (
|
|
4
|
+
TYPE_CHECKING,
|
|
5
|
+
Any,
|
|
6
|
+
Dict,
|
|
7
|
+
Iterable,
|
|
8
|
+
List,
|
|
9
|
+
NamedTuple,
|
|
10
|
+
Optional,
|
|
11
|
+
Tuple,
|
|
12
|
+
Union,
|
|
13
|
+
)
|
|
3
14
|
|
|
4
15
|
from dh5 import jsn
|
|
5
16
|
from dh5.path import Path
|
|
@@ -10,6 +21,10 @@ from ..utils.file_read import read_file, read_files
|
|
|
10
21
|
from .acquisition_data import NotebookAcquisitionData
|
|
11
22
|
|
|
12
23
|
|
|
24
|
+
if TYPE_CHECKING:
|
|
25
|
+
from .backend import AcquisitionBackend
|
|
26
|
+
|
|
27
|
+
|
|
13
28
|
class AcquisitionTmpData(NamedTuple):
|
|
14
29
|
"""Temporary data that stores inside temp.json."""
|
|
15
30
|
|
|
@@ -40,13 +55,16 @@ class AcquisitionManager:
|
|
|
40
55
|
|
|
41
56
|
cell: Optional[str] = None
|
|
42
57
|
|
|
58
|
+
_backend: Optional[Tuple["AcquisitionBackend", ...]] = None
|
|
59
|
+
|
|
43
60
|
def __init__(
|
|
44
61
|
self,
|
|
45
|
-
data_directory: Optional[Union[str, Path]] = None,
|
|
62
|
+
data_directory: Optional[Union[str, Path, Any]] = None,
|
|
46
63
|
*,
|
|
47
64
|
config_files: Optional[List[str]] = None,
|
|
48
65
|
save_files: Optional[bool] = None,
|
|
49
66
|
save_on_edit: Optional[bool] = None,
|
|
67
|
+
backend: Optional[Union["AcquisitionBackend", Iterable["AcquisitionBackend"]]] = None,
|
|
50
68
|
):
|
|
51
69
|
if save_files is not None:
|
|
52
70
|
self._save_files = save_files
|
|
@@ -58,12 +76,24 @@ class AcquisitionManager:
|
|
|
58
76
|
self._acquisition_tmp_data = None
|
|
59
77
|
self._once_saved = False
|
|
60
78
|
|
|
79
|
+
self._backend = (
|
|
80
|
+
tuple(backend)
|
|
81
|
+
if isinstance(backend, Iterable)
|
|
82
|
+
else (backend,)
|
|
83
|
+
if backend is not None
|
|
84
|
+
else None
|
|
85
|
+
)
|
|
86
|
+
|
|
61
87
|
self.config_files = []
|
|
62
88
|
self.config_files_eval = {}
|
|
63
89
|
self._configs_last_modified = []
|
|
64
90
|
|
|
65
91
|
if data_directory is not None:
|
|
66
|
-
self.data_directory =
|
|
92
|
+
self.data_directory = (
|
|
93
|
+
Path(str(data_directory))
|
|
94
|
+
if not isinstance(data_directory, Path)
|
|
95
|
+
else data_directory
|
|
96
|
+
)
|
|
67
97
|
elif "ACQUISITION_DIR" in os.environ:
|
|
68
98
|
self.data_directory = Path(os.environ["ACQUISITION_DIR"])
|
|
69
99
|
else:
|
|
@@ -108,13 +138,9 @@ class AcquisitionManager:
|
|
|
108
138
|
configs: dict of configurations files to save
|
|
109
139
|
directory: directory where the data is stored
|
|
110
140
|
"""
|
|
111
|
-
acquisition_tmp_data = self._acquisition_tmp_data or self.get_temp_data(
|
|
112
|
-
self.temp_file_path
|
|
113
|
-
)
|
|
141
|
+
acquisition_tmp_data = self._acquisition_tmp_data or self.get_temp_data(self.temp_file_path)
|
|
114
142
|
if acquisition_tmp_data is None:
|
|
115
|
-
raise ValueError(
|
|
116
|
-
"You should create a new acquisition. It will create temp.json file."
|
|
117
|
-
)
|
|
143
|
+
raise ValueError("You should create a new acquisition. It will create temp.json file.")
|
|
118
144
|
return acquisition_tmp_data
|
|
119
145
|
|
|
120
146
|
@acquisition_tmp_data.setter
|
|
@@ -136,12 +162,10 @@ class AcquisitionManager:
|
|
|
136
162
|
filename = [str(filename)]
|
|
137
163
|
|
|
138
164
|
self.config_files = list(filename)
|
|
139
|
-
self._config_files_names_to_path = {
|
|
140
|
-
os.path.basename(file): file for file in self.config_files
|
|
141
|
-
}
|
|
165
|
+
self._config_files_names_to_path = {Path(file).name: file for file in self.config_files}
|
|
142
166
|
|
|
143
167
|
for config_file in self.config_files:
|
|
144
|
-
if not
|
|
168
|
+
if not Path(config_file).exists():
|
|
145
169
|
raise ValueError(f"Configuration file at {config_file} does not exist")
|
|
146
170
|
|
|
147
171
|
return self
|
|
@@ -151,7 +175,7 @@ class AcquisitionManager:
|
|
|
151
175
|
raise ValueError(
|
|
152
176
|
"Configuration file should be specified before with set_config_file function"
|
|
153
177
|
)
|
|
154
|
-
self.config_files_eval[
|
|
178
|
+
self.config_files_eval[Path(file).name] = module
|
|
155
179
|
|
|
156
180
|
def set_init_analyse_file(self, filename: Union[str, Path]) -> None:
|
|
157
181
|
if not isinstance(filename, Path):
|
|
@@ -168,22 +192,18 @@ class AcquisitionManager:
|
|
|
168
192
|
if not experiment_path.exists():
|
|
169
193
|
experiment_path.makedirs()
|
|
170
194
|
# Copy init_analyses code to the experiment directory if it does not exist
|
|
171
|
-
if self._init_code and not
|
|
172
|
-
with open(
|
|
173
|
-
experiment_path / "init_analyse.py", "w", encoding="utf-8"
|
|
174
|
-
) as file:
|
|
195
|
+
if self._init_code and not (experiment_path / "init_analyse.py").exists():
|
|
196
|
+
with open(experiment_path / "init_analyse.py", "w", encoding="utf-8") as file:
|
|
175
197
|
file.write(self._init_code)
|
|
176
198
|
|
|
177
|
-
filepath_original = filepath =
|
|
178
|
-
experiment_path / f"{dic.time_stamp}__{dic.experiment_name}"
|
|
179
|
-
)
|
|
199
|
+
filepath_original = filepath = experiment_path / f"{dic.time_stamp}__{dic.experiment_name}"
|
|
180
200
|
|
|
181
201
|
# If ignore existence is True, no check is required
|
|
182
202
|
if ignore_existence:
|
|
183
203
|
return filepath
|
|
184
204
|
# If the file already exists, add a suffix to the name
|
|
185
205
|
index = 1
|
|
186
|
-
while
|
|
206
|
+
while (filepath + ".h5").exists():
|
|
187
207
|
filepath = filepath_original + f"__{index}"
|
|
188
208
|
index += 1
|
|
189
209
|
|
|
@@ -191,12 +211,50 @@ class AcquisitionManager:
|
|
|
191
211
|
|
|
192
212
|
@staticmethod
|
|
193
213
|
def get_temp_data(path: Path) -> Optional[AcquisitionTmpData]:
|
|
194
|
-
if not
|
|
214
|
+
if not Path(path).exists():
|
|
195
215
|
return None
|
|
196
216
|
return AcquisitionTmpData(**jsn.read(path))
|
|
197
217
|
|
|
198
218
|
def _get_configs_last_modified(self) -> List[float]:
|
|
199
|
-
return [
|
|
219
|
+
return [Path(file).stat().st_mtime for file in self.config_files]
|
|
220
|
+
|
|
221
|
+
def _schedule_backend_save(self, acquisition: NotebookAcquisitionData) -> Optional[Future]:
|
|
222
|
+
if self._backend is None:
|
|
223
|
+
return None
|
|
224
|
+
|
|
225
|
+
executor = ThreadPoolExecutor(max_workers=1)
|
|
226
|
+
|
|
227
|
+
def save_snapshot():
|
|
228
|
+
if self._backend is None:
|
|
229
|
+
return
|
|
230
|
+
for backend in self._backend:
|
|
231
|
+
backend.save_snapshot(acquisition)
|
|
232
|
+
|
|
233
|
+
def shutdown_executor(future: Future):
|
|
234
|
+
executor.shutdown(wait=False)
|
|
235
|
+
|
|
236
|
+
future = executor.submit(save_snapshot)
|
|
237
|
+
future.add_done_callback(shutdown_executor)
|
|
238
|
+
return future
|
|
239
|
+
|
|
240
|
+
def _schedule_backend_load(self, acquisition: NotebookAcquisitionData) -> Optional[Future]:
|
|
241
|
+
if self._backend is None:
|
|
242
|
+
return None
|
|
243
|
+
|
|
244
|
+
executor = ThreadPoolExecutor(max_workers=1)
|
|
245
|
+
|
|
246
|
+
def load_snapshot():
|
|
247
|
+
if self._backend is None:
|
|
248
|
+
return
|
|
249
|
+
for backend in self._backend:
|
|
250
|
+
backend.load_snapshot(acquisition)
|
|
251
|
+
|
|
252
|
+
def shutdown_executor(future: Future):
|
|
253
|
+
executor.shutdown(wait=False)
|
|
254
|
+
|
|
255
|
+
future = executor.submit(load_snapshot)
|
|
256
|
+
future.add_done_callback(shutdown_executor)
|
|
257
|
+
return future
|
|
200
258
|
|
|
201
259
|
def new_acquisition(
|
|
202
260
|
self, name: str, cell: Optional[str] = None, save_on_edit: Optional[bool] = None
|
|
@@ -209,9 +267,7 @@ class AcquisitionManager:
|
|
|
209
267
|
self._configs_last_modified = self._get_configs_last_modified()
|
|
210
268
|
|
|
211
269
|
if self.config_files_eval:
|
|
212
|
-
configs = append_values_from_modules_to_files(
|
|
213
|
-
configs, self.config_files_eval
|
|
214
|
-
)
|
|
270
|
+
configs = append_values_from_modules_to_files(configs, self.config_files_eval)
|
|
215
271
|
|
|
216
272
|
dic = AcquisitionTmpData(
|
|
217
273
|
experiment_name=name,
|
|
@@ -222,9 +278,7 @@ class AcquisitionManager:
|
|
|
222
278
|
|
|
223
279
|
self.acquisition_tmp_data = dic
|
|
224
280
|
|
|
225
|
-
self._current_acquisition = self.get_acquisition(
|
|
226
|
-
replace=True, save_on_edit=save_on_edit
|
|
227
|
-
)
|
|
281
|
+
self._current_acquisition = self.get_acquisition(replace=True, save_on_edit=save_on_edit)
|
|
228
282
|
|
|
229
283
|
return self.current_acquisition
|
|
230
284
|
|
|
@@ -238,9 +292,7 @@ class AcquisitionManager:
|
|
|
238
292
|
configs = read_files(self.config_files)
|
|
239
293
|
|
|
240
294
|
if self.config_files_eval:
|
|
241
|
-
configs = append_values_from_modules_to_files(
|
|
242
|
-
configs, self.config_files_eval
|
|
243
|
-
)
|
|
295
|
+
configs = append_values_from_modules_to_files(configs, self.config_files_eval)
|
|
244
296
|
|
|
245
297
|
if name is None:
|
|
246
298
|
name = self.current_experiment_name + "_item"
|
|
@@ -256,7 +308,7 @@ class AcquisitionManager:
|
|
|
256
308
|
configs = configs if configs else None
|
|
257
309
|
save_on_edit = save_on_edit if save_on_edit is not None else self._save_on_edit
|
|
258
310
|
|
|
259
|
-
|
|
311
|
+
acquisition = NotebookAcquisitionData(
|
|
260
312
|
filepath=str(filepath),
|
|
261
313
|
configs=configs,
|
|
262
314
|
cell=cell or self.cell,
|
|
@@ -264,6 +316,10 @@ class AcquisitionManager:
|
|
|
264
316
|
save_on_edit=save_on_edit,
|
|
265
317
|
save_files=self._save_files,
|
|
266
318
|
)
|
|
319
|
+
# TODO: chech if this gives the expected behaviour
|
|
320
|
+
self._schedule_backend_load(acquisition)
|
|
321
|
+
|
|
322
|
+
return acquisition
|
|
267
323
|
|
|
268
324
|
@property
|
|
269
325
|
def current_acquisition(self) -> NotebookAcquisitionData:
|
|
@@ -284,24 +340,20 @@ class AcquisitionManager:
|
|
|
284
340
|
|
|
285
341
|
@property
|
|
286
342
|
def current_experiment_name(self) -> str:
|
|
287
|
-
return
|
|
288
|
-
self.acquisition_tmp_data.experiment_name
|
|
289
|
-
) # self.current_acquisition.name
|
|
343
|
+
return self.acquisition_tmp_data.experiment_name # self.current_acquisition.name
|
|
290
344
|
|
|
291
345
|
def get_acquisition(
|
|
292
346
|
self, replace: Optional[bool] = False, save_on_edit: Optional[bool] = None
|
|
293
347
|
) -> NotebookAcquisitionData:
|
|
294
348
|
acquisition_tmp_data = self.acquisition_tmp_data
|
|
295
|
-
filepath = self.create_path_from_tmp_data(
|
|
296
|
-
acquisition_tmp_data, ignore_existence=True
|
|
297
|
-
)
|
|
349
|
+
filepath = self.create_path_from_tmp_data(acquisition_tmp_data, ignore_existence=True)
|
|
298
350
|
configs = acquisition_tmp_data.configs
|
|
299
351
|
configs = configs if configs else None
|
|
300
352
|
cell = self.cell
|
|
301
353
|
|
|
302
354
|
save_on_edit = save_on_edit if save_on_edit is not None else self._save_on_edit
|
|
303
355
|
|
|
304
|
-
|
|
356
|
+
acquisition = NotebookAcquisitionData(
|
|
305
357
|
filepath=str(filepath),
|
|
306
358
|
configs=configs,
|
|
307
359
|
cell=cell,
|
|
@@ -311,6 +363,10 @@ class AcquisitionManager:
|
|
|
311
363
|
experiment_name=acquisition_tmp_data.experiment_name,
|
|
312
364
|
)
|
|
313
365
|
|
|
366
|
+
self._schedule_backend_load(acquisition)
|
|
367
|
+
|
|
368
|
+
return acquisition
|
|
369
|
+
|
|
314
370
|
def save_acquisition(self, update_: bool = True, /, **kwds) -> "AcquisitionManager":
|
|
315
371
|
acq_data = self.current_acquisition
|
|
316
372
|
if acq_data is None:
|
|
@@ -329,4 +385,8 @@ class AcquisitionManager:
|
|
|
329
385
|
if acq_data.save_on_edit is False:
|
|
330
386
|
acq_data.save()
|
|
331
387
|
self._once_saved = True
|
|
388
|
+
self._schedule_backend_save(acq_data)
|
|
332
389
|
return self
|
|
390
|
+
|
|
391
|
+
def close(self) -> None:
|
|
392
|
+
pass
|
|
@@ -12,6 +12,7 @@ from ..logger import logger
|
|
|
12
12
|
from .analysis_loop import AnalysisLoop
|
|
13
13
|
from .config_file import ConfigFile
|
|
14
14
|
|
|
15
|
+
|
|
15
16
|
_T = TypeVar("_T", bound="AnalysisData")
|
|
16
17
|
|
|
17
18
|
|
|
@@ -115,7 +116,7 @@ class AnalysisData(DH5):
|
|
|
115
116
|
self._save_files = save_files
|
|
116
117
|
self._save_fig_inside_h5 = save_fig_inside_h5
|
|
117
118
|
|
|
118
|
-
self._default_config_files: Tuple[str, ...] =
|
|
119
|
+
self._default_config_files: Tuple[str, ...] = ()
|
|
119
120
|
if "info" in self and "default_config_files" in self["info"]:
|
|
120
121
|
self._default_config_files = tuple(self["info"]["default_config_files"])
|
|
121
122
|
|
|
@@ -158,9 +159,9 @@ class AnalysisData(DH5):
|
|
|
158
159
|
logger.warning("Analysis cell is not set. Nothing to save")
|
|
159
160
|
return self
|
|
160
161
|
|
|
161
|
-
self.unlock_data(cell_name_key).update({cell_name_key: code}).lock_data(
|
|
162
|
-
cell_name_key
|
|
163
|
-
)
|
|
162
|
+
self.unlock_data(cell_name_key).update({cell_name_key: code}).lock_data(cell_name_key).save(
|
|
163
|
+
[cell_name_key]
|
|
164
|
+
)
|
|
164
165
|
|
|
165
166
|
if self._save_files:
|
|
166
167
|
assert self.filepath, "You must set self.filepath before saving"
|
|
@@ -198,18 +199,14 @@ class AnalysisData(DH5):
|
|
|
198
199
|
"inside_h5", False
|
|
199
200
|
):
|
|
200
201
|
try:
|
|
201
|
-
raise NotImplementedError(
|
|
202
|
-
"save_fig_inside_h5 is not implemented for the moment."
|
|
203
|
-
)
|
|
202
|
+
raise NotImplementedError("save_fig_inside_h5 is not implemented for the moment.")
|
|
204
203
|
# import pltsave
|
|
205
204
|
|
|
206
205
|
# data = pltsave.dumps(fig).to_json()
|
|
207
206
|
# self[f"figures/{fig_name}"] = data
|
|
208
207
|
# self.save([f"figures/{fig_name}"])
|
|
209
208
|
except Exception as error:
|
|
210
|
-
logger.exception(
|
|
211
|
-
"Failed to save the figure inside h5 file due to %s", error
|
|
212
|
-
)
|
|
209
|
+
logger.exception("Failed to save the figure inside h5 file due to %s", error)
|
|
213
210
|
if tight_layout and hasattr(fig, "tight_layout"):
|
|
214
211
|
fig.tight_layout() # type: ignore
|
|
215
212
|
if metadata is None:
|
|
@@ -261,14 +258,12 @@ class AnalysisData(DH5):
|
|
|
261
258
|
name = str(name)
|
|
262
259
|
if not name.isnumeric() and name[0] != "_":
|
|
263
260
|
name = "_" + name
|
|
264
|
-
if
|
|
261
|
+
if Path(name).suffix == "": # check if name has no extension
|
|
265
262
|
name = f"{name}.{extensions}"
|
|
266
263
|
|
|
267
264
|
return "FIG" + name
|
|
268
265
|
|
|
269
|
-
def parse_config(
|
|
270
|
-
self, config_files: Optional[Tuple[str, ...]] = None
|
|
271
|
-
) -> "ConfigFile":
|
|
266
|
+
def parse_config(self, config_files: Optional[Tuple[str, ...]] = None) -> "ConfigFile":
|
|
272
267
|
"""Parse config files. If `config_files` are not provided takes `default_config_files`."""
|
|
273
268
|
|
|
274
269
|
config_files = config_files or self._default_config_files
|
|
@@ -303,9 +298,7 @@ class AnalysisData(DH5):
|
|
|
303
298
|
if key_value == "filename" or key_value == "file" or key_value == "f":
|
|
304
299
|
filename = os.path.split(self.filepath)[-1]
|
|
305
300
|
keys_with_values.append(
|
|
306
|
-
utils.title_parsing.ValueForPrint(
|
|
307
|
-
key_value, filename, key_units, key_format
|
|
308
|
-
)
|
|
301
|
+
utils.title_parsing.ValueForPrint(key_value, filename, key_units, key_format)
|
|
309
302
|
)
|
|
310
303
|
elif key_value in self:
|
|
311
304
|
keys_with_values.append(
|
|
@@ -358,9 +351,7 @@ class AnalysisData(DH5):
|
|
|
358
351
|
)
|
|
359
352
|
|
|
360
353
|
if config_file_name in self._parsed_configs:
|
|
361
|
-
self._parsed_configs[original_config_name] = self._parsed_configs[
|
|
362
|
-
config_file_name
|
|
363
|
-
]
|
|
354
|
+
self._parsed_configs[original_config_name] = self._parsed_configs[config_file_name]
|
|
364
355
|
return self._parsed_configs[config_file_name]
|
|
365
356
|
|
|
366
357
|
else:
|
|
@@ -393,16 +384,14 @@ class AnalysisData(DH5):
|
|
|
393
384
|
code: Optional[dict] = self.get("analysis_cells")
|
|
394
385
|
if code is None:
|
|
395
386
|
raise ValueError(
|
|
396
|
-
|
|
387
|
+
"There is no field 'analysis_cells' inside the data file. "
|
|
397
388
|
f"Possible keys are {tuple(self.keys())}."
|
|
398
389
|
)
|
|
399
390
|
|
|
400
391
|
# if isinstance(code, bytes):
|
|
401
392
|
# code = code.decode()
|
|
402
393
|
if name not in code:
|
|
403
|
-
raise KeyError(
|
|
404
|
-
f"Cannot get cell '{name}'. Possible cells are: {tuple(code.keys())}"
|
|
405
|
-
)
|
|
394
|
+
raise KeyError(f"Cannot get cell '{name}'. Possible cells are: {tuple(code.keys())}")
|
|
406
395
|
|
|
407
396
|
code_str: str = code[name]
|
|
408
397
|
if update_code:
|
|
@@ -426,7 +415,8 @@ class AnalysisData(DH5):
|
|
|
426
415
|
return figures
|
|
427
416
|
|
|
428
417
|
# raise NotImplementedError(
|
|
429
|
-
# "Not implemented for the moment. If you want to open an old figure.
|
|
418
|
+
# "Not implemented for the moment. If you want to open an old figure.
|
|
419
|
+
# Use open_old_figs function" )
|
|
430
420
|
|
|
431
421
|
def pull(self, force_pull: bool = False):
|
|
432
422
|
self._reset_attrs()
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
It has mainly __iter__ method and __getitem__ method for slicing.
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
|
-
from typing import Any,
|
|
5
|
+
from typing import Any, List, Optional, Sequence, Tuple, Union
|
|
6
6
|
|
|
7
7
|
from dh5 import DH5
|
|
8
8
|
|
|
@@ -39,16 +39,14 @@ class AnalysisLoop(DH5):
|
|
|
39
39
|
|
|
40
40
|
"""
|
|
41
41
|
|
|
42
|
-
def __init__(
|
|
43
|
-
self, data: Optional[dict] = None, loop_shape: Optional[List[int]] = None
|
|
44
|
-
):
|
|
42
|
+
def __init__(self, data: Optional[dict] = None, loop_shape: Optional[List[int]] = None):
|
|
45
43
|
"""Initialize an AnalysisLoop object.
|
|
46
44
|
|
|
47
45
|
Args:
|
|
48
46
|
data (Optional[dict]): A dictionary containing data to initialize the object with.
|
|
49
|
-
loop_shape (
|
|
47
|
+
loop_shape (list[int] | None): A list of ints representing the shape of the analysis loop.
|
|
50
48
|
If not provided, the shape is retrieved from the object's '__loop_shape__' attribute.
|
|
51
|
-
"""
|
|
49
|
+
""" # noqa: E501
|
|
52
50
|
super().__init__(data=data)
|
|
53
51
|
if loop_shape is None:
|
|
54
52
|
loop_shape = self.get("__loop_shape__")
|
|
@@ -86,7 +84,11 @@ class AnalysisLoop(DH5):
|
|
|
86
84
|
child_kwds[key] = value[index]
|
|
87
85
|
|
|
88
86
|
val = child_kwds[key]
|
|
89
|
-
if
|
|
87
|
+
if (
|
|
88
|
+
isinstance(val, (Sequence))
|
|
89
|
+
and len(val) == 1
|
|
90
|
+
and not isinstance(val[0], (Sequence))
|
|
91
|
+
): # type: ignore
|
|
90
92
|
child_kwds[key] = val[0] # type: ignore
|
|
91
93
|
|
|
92
94
|
if len(self._loop_shape) > 1:
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING: # pragma: no cover - imported for typing only
|
|
7
|
+
from .acquisition_data import NotebookAcquisitionData
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class AcquisitionBackend:
|
|
11
|
+
"""Lightweight backend interface used by :class:`AcquisitionManager`."""
|
|
12
|
+
|
|
13
|
+
def save_snapshot(self, acquisition: "NotebookAcquisitionData") -> None:
|
|
14
|
+
"""Persist a snapshot of the acquisition remotely."""
|
|
15
|
+
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
def load_snapshot(self, acquisition: "NotebookAcquisitionData") -> None:
|
|
19
|
+
"""Hydrate the acquisition from a remote snapshot."""
|
|
20
|
+
|
|
21
|
+
pass
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
__all__ = ["AcquisitionBackend"]
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
"""ConfigFile class."""
|
|
2
|
+
|
|
2
3
|
from typing import Any
|
|
4
|
+
|
|
3
5
|
from .. import attrdict
|
|
4
6
|
|
|
5
7
|
|
|
@@ -43,8 +45,8 @@ class ConfigFile(attrdict.AttrDict):
|
|
|
43
45
|
raise ValueError("Content is not defined")
|
|
44
46
|
|
|
45
47
|
module = type(attrdict)("config_module")
|
|
46
|
-
cc = compile(self.content, "<string>", "exec")
|
|
47
|
-
eval(cc, module.__dict__)
|
|
48
|
+
cc = compile(self.content, "<string>", "exec")
|
|
49
|
+
eval(cc, module.__dict__)
|
|
48
50
|
return module
|
|
49
51
|
|
|
50
52
|
def eval_key(self, key) -> Any:
|
|
@@ -59,5 +61,5 @@ class ConfigFile(attrdict.AttrDict):
|
|
|
59
61
|
"""
|
|
60
62
|
val = self.get(key)
|
|
61
63
|
if val and val.value:
|
|
62
|
-
return eval(val.value)
|
|
64
|
+
return eval(val.value)
|
|
63
65
|
return None
|
|
@@ -5,6 +5,7 @@ from typing import Any, Dict
|
|
|
5
5
|
|
|
6
6
|
from ..utils.lint import get_all_args_from_call
|
|
7
7
|
|
|
8
|
+
|
|
8
9
|
SPECIAL_FUNCTIONS_LIST = []
|
|
9
10
|
|
|
10
11
|
|
|
@@ -25,9 +26,7 @@ def on_call_functions(node: ast.Call, db: Dict[str, Any]):
|
|
|
25
26
|
h = hash(tuple(get_all_args_from_call(node)))
|
|
26
27
|
data = db.setdefault("save_fig", [])
|
|
27
28
|
if h in data:
|
|
28
|
-
errors.append(
|
|
29
|
-
"save_fig with the save arguments is used more then ones."
|
|
30
|
-
)
|
|
29
|
+
errors.append("save_fig with the save arguments is used more then ones.")
|
|
31
30
|
|
|
32
31
|
data.append(h)
|
|
33
32
|
return errors
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
# flake8: noqa: F401
|
|
2
|
+
from ..acquisition.acquisition_loop import AcquisitionLoop
|
|
3
|
+
from ..acquisition.analysis_data import AnalysisData
|
|
2
4
|
from .acquisition_analysis_manager import (
|
|
3
5
|
AcquisitionAnalysisManager,
|
|
4
6
|
AcquisitionAnalysisManagerDataOnly,
|
|
5
7
|
)
|
|
6
|
-
from ..acquisition.acquisition_loop import AcquisitionLoop
|
|
7
|
-
from ..acquisition.analysis_data import AnalysisData
|