sqil-core 0.1.0__py3-none-any.whl → 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. sqil_core/__init__.py +1 -0
  2. sqil_core/config_log.py +42 -0
  3. sqil_core/experiment/__init__.py +11 -0
  4. sqil_core/experiment/_analysis.py +125 -0
  5. sqil_core/experiment/_events.py +25 -0
  6. sqil_core/experiment/_experiment.py +553 -0
  7. sqil_core/experiment/data/plottr.py +778 -0
  8. sqil_core/experiment/helpers/_function_override_handler.py +111 -0
  9. sqil_core/experiment/helpers/_labone_wrappers.py +12 -0
  10. sqil_core/experiment/instruments/__init__.py +2 -0
  11. sqil_core/experiment/instruments/_instrument.py +190 -0
  12. sqil_core/experiment/instruments/drivers/SignalCore_SC5511A.py +515 -0
  13. sqil_core/experiment/instruments/local_oscillator.py +205 -0
  14. sqil_core/experiment/instruments/server.py +175 -0
  15. sqil_core/experiment/instruments/setup.yaml +21 -0
  16. sqil_core/experiment/instruments/zurich_instruments.py +55 -0
  17. sqil_core/fit/__init__.py +23 -0
  18. sqil_core/fit/_core.py +179 -31
  19. sqil_core/fit/_fit.py +544 -94
  20. sqil_core/fit/_guess.py +304 -0
  21. sqil_core/fit/_models.py +50 -1
  22. sqil_core/fit/_quality.py +266 -0
  23. sqil_core/resonator/__init__.py +2 -0
  24. sqil_core/resonator/_resonator.py +256 -74
  25. sqil_core/utils/__init__.py +40 -13
  26. sqil_core/utils/_analysis.py +226 -0
  27. sqil_core/utils/_const.py +83 -18
  28. sqil_core/utils/_formatter.py +127 -55
  29. sqil_core/utils/_plot.py +272 -6
  30. sqil_core/utils/_read.py +178 -95
  31. sqil_core/utils/_utils.py +147 -0
  32. {sqil_core-0.1.0.dist-info → sqil_core-1.1.0.dist-info}/METADATA +9 -1
  33. sqil_core-1.1.0.dist-info/RECORD +36 -0
  34. {sqil_core-0.1.0.dist-info → sqil_core-1.1.0.dist-info}/WHEEL +1 -1
  35. sqil_core-0.1.0.dist-info/RECORD +0 -19
  36. {sqil_core-0.1.0.dist-info → sqil_core-1.1.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,778 @@
1
+ """plottr.data.datadict_storage
2
+
3
+ Provides file-storage tools for the DataDict class.
4
+
5
+ .. note::
6
+ Any function in this module that interacts with a ddh5 file, will create a lock file while it is using the file.
7
+ The lock file has the following format: ~<file_name>.lock. The file lock will get deleted even if the program
8
+ crashes. If the process is suddenly stopped however, we cannot guarantee that the file lock will be deleted.
9
+ """
10
+
11
+ """Added by Taketo
12
+ I changed DDH5_Writer module 'data_folder'.
13
+ Now each measurement folder will be named by run number, exp_name and time.
14
+ """
15
+ import datetime
16
+ import json
17
+ import logging
18
+ import os
19
+ import shutil
20
+ import time
21
+ import uuid
22
+ from enum import Enum
23
+ from pathlib import Path
24
+ from types import TracebackType
25
+ from typing import Any, Collection, Dict, Optional, Type, Union
26
+
27
+ import h5py
28
+ import numpy as np
29
+ from plottr import QtCore, QtGui, QtWidgets, Signal, Slot
30
+ from plottr.data.datadict import DataDict, DataDictBase, is_meta_key
31
+ from plottr.node import Node, NodeWidget, emitGuiUpdate, updateGuiFromNode, updateOption
32
+ from qcodes.utils import NumpyJSONEncoder
33
+
34
+ __author__ = "Wolfgang Pfaff"
35
+ __license__ = "MIT"
36
+
37
+ DATAFILEXT = "ddh5"
38
+ TIMESTRFORMAT = "%Y-%m-%d %H:%M:%S"
39
+
40
+ logger = logging.getLogger(__name__)
41
+
42
+ # FIXME: need correct handling of dtypes and list/array conversion
43
+
44
+
45
+ class AppendMode(Enum):
46
+ """How/Whether to append data to existing data."""
47
+
48
+ #: Data that is additional compared to already existing data is appended.
49
+ new = 0
50
+ #: All data is appended to existing data.
51
+ all = 1
52
+ #: Data is overwritten.
53
+ none = 2
54
+
55
+
56
+ # tools for working on hdf5 objects
57
+
58
+
59
+ def h5ify(obj: Any) -> Any:
60
+ """
61
+ Convert an object into something that we can assign to an HDF5 attribute.
62
+
63
+ Performs the following conversions:
64
+ - list/array of strings -> numpy chararray of unicode type
65
+
66
+ :param obj: Input object.
67
+ :return: Object, converted if necessary.
68
+ """
69
+ if isinstance(obj, list):
70
+ all_string = True
71
+ for elt in obj:
72
+ if not isinstance(elt, str):
73
+ all_string = False
74
+ break
75
+ if not all_string:
76
+ obj = np.array(obj)
77
+
78
+ if type(obj) == np.ndarray and obj.dtype.kind == "U":
79
+ return np.char.encode(obj, encoding="utf8")
80
+
81
+ return obj
82
+
83
+
84
+ def deh5ify(obj: Any) -> Any:
85
+ """Convert slightly mangled types back to more handy ones.
86
+
87
+ :param obj: Input object.
88
+ :return: Object
89
+ """
90
+ if type(obj) == bytes:
91
+ return obj.decode()
92
+
93
+ if type(obj) == np.ndarray and obj.dtype.kind == "S":
94
+ return np.char.decode(obj)
95
+
96
+ return obj
97
+
98
+
99
+ def set_attr(h5obj: Any, name: str, val: Any) -> None:
100
+ """Set attribute `name` of object `h5obj` to `val`
101
+
102
+ Use :func:`h5ify` to convert the object, then try to set the attribute
103
+ to the returned value. If that does not succeed due to a HDF5 typing
104
+ restriction, set the attribute to the string representation of the value.
105
+ """
106
+ try:
107
+ h5obj.attrs[name] = h5ify(val)
108
+ except TypeError:
109
+ newval = str(val)
110
+ h5obj.attrs[name] = h5ify(newval)
111
+
112
+
113
+ def add_cur_time_attr(
114
+ h5obj: Any, name: str = "creation", prefix: str = "__", suffix: str = "__"
115
+ ) -> None:
116
+ """Add current time information to the given HDF5 object, following the format of:
117
+ ``<prefix><name>_time_sec<suffix>``.
118
+
119
+ :param h5obj: The HDF5 object.
120
+ :param name: The name of the attribute.
121
+ :param prefix: Prefix of the attribute.
122
+ :param suffix: Suffix of the attribute.
123
+ """
124
+
125
+ t = time.localtime()
126
+ tsec = time.mktime(t)
127
+ tstr = time.strftime(TIMESTRFORMAT, t)
128
+
129
+ set_attr(h5obj, prefix + name + "_time_sec" + suffix, tsec)
130
+ set_attr(h5obj, prefix + name + "_time_str" + suffix, tstr)
131
+
132
+
133
+ # elementary reading/writing
134
+
135
+
136
+ def _data_file_path(file: Union[str, Path], init_directory: bool = False) -> Path:
137
+ """Get the full filepath of the data file.
138
+ If `init_directory` is True, then create the parent directory."""
139
+
140
+ path = Path(file)
141
+
142
+ if path.suffix != f".{DATAFILEXT}":
143
+ path = Path(path.parent, path.stem + f".{DATAFILEXT}")
144
+ if init_directory:
145
+ path.parent.mkdir(parents=True, exist_ok=True)
146
+ return path
147
+
148
+
149
+ def datadict_to_hdf5(
150
+ datadict: DataDict,
151
+ path: Union[str, Path],
152
+ groupname: str = "data",
153
+ append_mode: AppendMode = AppendMode.new,
154
+ file_timeout: Optional[float] = None,
155
+ ) -> None:
156
+ """Write a DataDict to DDH5
157
+
158
+ Note: Meta data is only written during initial writing of the dataset.
159
+ If we're appending to existing datasets, we're not setting meta
160
+ data anymore.
161
+
162
+ :param datadict: Datadict to write to disk.
163
+ :param path: Path of the file (extension may be omitted).
164
+ :param groupname: Name of the top level group to store the data in.
165
+ :param append_mode:
166
+ - `AppendMode.none` : Delete and re-create group.
167
+ - `AppendMode.new` : Append rows in the datadict that exceed
168
+ the number of existing rows in the dataset already stored.
169
+ Note: we're not checking for content, only length!
170
+
171
+ - `AppendMode.all` : Append all data in datadict to file data sets.
172
+ :param file_timeout: How long the function will wait for the ddh5 file to unlock. Only relevant if you are
173
+ writing to a file that already exists and some other program is trying to read it at the same time.
174
+ If none uses the default value from the :class:`FileOpener`.
175
+
176
+ """
177
+ filepath = _data_file_path(path, True)
178
+ if not filepath.exists():
179
+ append_mode = AppendMode.none
180
+
181
+ with FileOpener(filepath, "a", file_timeout) as f:
182
+ if append_mode is AppendMode.none:
183
+ init_file(f, groupname)
184
+ assert groupname in f
185
+ grp = f[groupname]
186
+
187
+ # add top-level meta data.
188
+ for k, v in datadict.meta_items(clean_keys=False):
189
+ set_attr(grp, k, v)
190
+
191
+ for k, v in datadict.data_items():
192
+ data = v["values"]
193
+ shp = data.shape
194
+ nrows = shp[0]
195
+
196
+ # create new dataset, add axes and unit metadata
197
+ if k not in grp:
198
+ maxshp = tuple([None] + list(shp[1:]))
199
+ ds = grp.create_dataset(k, maxshape=maxshp, data=data)
200
+
201
+ # add meta data
202
+ add_cur_time_attr(ds)
203
+
204
+ if v.get("axes", []):
205
+ set_attr(ds, "axes", v["axes"])
206
+ if v.get("unit", "") != "":
207
+ set_attr(ds, "unit", v["unit"])
208
+
209
+ for kk, vv in datadict.meta_items(k, clean_keys=False):
210
+ set_attr(ds, kk, vv)
211
+ ds.flush()
212
+
213
+ # if the dataset already exits, append data according to
214
+ # chosen append mode.
215
+ else:
216
+ ds = grp[k]
217
+ dslen = ds.shape[0]
218
+
219
+ if append_mode == AppendMode.new:
220
+ newshp = tuple([nrows] + list(shp[1:]))
221
+ ds.resize(newshp)
222
+ ds[dslen:] = data[dslen:]
223
+ elif append_mode == AppendMode.all:
224
+ newshp = tuple([dslen + nrows] + list(shp[1:]))
225
+ ds.resize(newshp)
226
+ ds[dslen:] = data[:]
227
+ ds.flush()
228
+
229
+
230
+ def init_file(f: h5py.File, groupname: str = "data") -> None:
231
+
232
+ if groupname in f:
233
+ del f[groupname]
234
+ f.flush()
235
+ grp = f.create_group(groupname)
236
+ add_cur_time_attr(grp)
237
+ f.flush()
238
+ else:
239
+ grp = f.create_group(groupname)
240
+ add_cur_time_attr(grp)
241
+ f.flush()
242
+
243
+
244
+ def datadict_from_hdf5(
245
+ path: Union[str, Path],
246
+ groupname: str = "data",
247
+ startidx: Union[int, None] = None,
248
+ stopidx: Union[int, None] = None,
249
+ structure_only: bool = False,
250
+ ignore_unequal_lengths: bool = True,
251
+ file_timeout: Optional[float] = None,
252
+ ) -> DataDict:
253
+ """Load a DataDict from file.
254
+
255
+ :param path: Full filepath without the file extension.
256
+ :param groupname: Name of hdf5 group.
257
+ :param startidx: Start row.
258
+ :param stopidx: End row + 1.
259
+ :param structure_only: If `True`, don't load the data values.
260
+ :param ignore_unequal_lengths: If `True`, don't fail when the rows have
261
+ unequal length; will return the longest consistent DataDict possible.
262
+ :param file_timeout: How long the function will wait for the ddh5 file to unlock. If none uses the default
263
+ value from the :class:`FileOpener`.
264
+ :return: Validated DataDict.
265
+ """
266
+ filepath = _data_file_path(path)
267
+ if not filepath.exists():
268
+ raise ValueError("Specified file does not exist.")
269
+
270
+ if startidx is None:
271
+ startidx = 0
272
+
273
+ res = {}
274
+ with FileOpener(filepath, "r", file_timeout) as f:
275
+ if groupname not in f:
276
+ raise ValueError("Group does not exist.")
277
+
278
+ grp = f[groupname]
279
+ keys = list(grp.keys())
280
+ lens = [len(grp[k][:]) for k in keys]
281
+
282
+ if len(set(lens)) > 1:
283
+ if not ignore_unequal_lengths:
284
+ raise RuntimeError("Unequal lengths in the datasets.")
285
+
286
+ if stopidx is None or stopidx > min(lens):
287
+ stopidx = min(lens)
288
+ else:
289
+ if stopidx is None or stopidx > lens[0]:
290
+ stopidx = lens[0]
291
+
292
+ for attr in grp.attrs:
293
+ if is_meta_key(attr):
294
+ res[attr] = deh5ify(grp.attrs[attr])
295
+
296
+ for k in keys:
297
+ ds = grp[k]
298
+ entry: Dict[str, Union[Collection[Any], np.ndarray]] = dict(
299
+ values=np.array([]),
300
+ )
301
+
302
+ if "axes" in ds.attrs:
303
+ entry["axes"] = deh5ify(ds.attrs["axes"]).tolist()
304
+ else:
305
+ entry["axes"] = []
306
+
307
+ if "unit" in ds.attrs:
308
+ entry["unit"] = deh5ify(ds.attrs["unit"])
309
+
310
+ if not structure_only:
311
+ entry["values"] = ds[startidx:stopidx]
312
+
313
+ entry["__shape__"] = ds[:].shape
314
+
315
+ # and now the meta data
316
+ for attr in ds.attrs:
317
+ if is_meta_key(attr):
318
+ _val = deh5ify(ds.attrs[attr])
319
+ entry[attr] = deh5ify(ds.attrs[attr])
320
+
321
+ res[k] = entry
322
+
323
+ dd = DataDict(**res)
324
+ dd.validate()
325
+ return dd
326
+
327
+
328
+ def all_datadicts_from_hdf5(
329
+ path: Union[str, Path], file_timeout: Optional[float] = None, **kwargs: Any
330
+ ) -> Dict[str, Any]:
331
+ """
332
+ Loads all the DataDicts contained on a single HDF5 file. Returns a dictionary with the group names as keys and
333
+ the DataDicts as the values of that key.
334
+
335
+ :param path: The path of the HDF5 file.
336
+ :param file_timeout: How long the function will wait for the ddh5 file to unlock. If none uses the default
337
+ value from the :class:`FileOpener`.
338
+ :return: Dictionary with group names as key, and the DataDicts inside them as values.
339
+ """
340
+ filepath = _data_file_path(path)
341
+ if not os.path.exists(filepath):
342
+ raise ValueError("Specified file does not exist.")
343
+
344
+ ret = {}
345
+ with FileOpener(filepath, "r", file_timeout) as f:
346
+ keys = [k for k in f.keys()]
347
+ for k in keys:
348
+ ret[k] = datadict_from_hdf5(
349
+ path=path, groupname=k, file_timeout=file_timeout, **kwargs
350
+ )
351
+ return ret
352
+
353
+
354
+ # File access with locking
355
+
356
+
357
+ class FileOpener:
358
+ """
359
+ Context manager for opening files, creates its own file lock to indicate other programs that the file is being
360
+ used. The lock file follows the following structure: "~<file_name>.lock".
361
+
362
+ :param path: The file path.
363
+ :param mode: The opening file mode. Only the following modes are supported: 'r', 'w', 'w-', 'a'. Defaults to 'r'.
364
+ :param timeout: Time, in seconds, the context manager waits for the file to unlock. Defaults to 30.
365
+ :param test_delay: Length of time in between checks. I.e. how long the FileOpener waits to see if a file got
366
+ unlocked again
367
+ """
368
+
369
+ def __init__(
370
+ self,
371
+ path: Union[Path, str],
372
+ mode: str = "r",
373
+ timeout: Optional[float] = None,
374
+ test_delay: float = 0.1,
375
+ ):
376
+ self.path = Path(path)
377
+ self.lock_path = self.path.parent.joinpath("~" + str(self.path.stem) + ".lock")
378
+ if mode not in ["r", "w", "w-", "a"]:
379
+ raise ValueError("Only 'r', 'w', 'w-', 'a' modes are supported.")
380
+ self.mode = mode
381
+ self.default_timeout = 30.0
382
+ if timeout is None:
383
+ self.timeout = self.default_timeout
384
+ else:
385
+ self.timeout = timeout
386
+ self.test_delay = test_delay
387
+
388
+ self.file: Optional[h5py.File] = None
389
+
390
+ def __enter__(self) -> h5py.File:
391
+ self.file = self.open_when_unlocked()
392
+ return self.file
393
+
394
+ def __exit__(
395
+ self,
396
+ exc_type: Optional[Type[BaseException]],
397
+ exc_value: Optional[BaseException],
398
+ exc_traceback: Optional[TracebackType],
399
+ ) -> None:
400
+ try:
401
+ assert self.file is not None
402
+ self.file.close()
403
+ finally:
404
+ if self.lock_path.is_file():
405
+ self.lock_path.unlink()
406
+
407
+ def open_when_unlocked(self) -> h5py.File:
408
+ t0 = time.time()
409
+ while True:
410
+ if not self.lock_path.is_file():
411
+ try:
412
+ self.lock_path.touch(exist_ok=False)
413
+ # This happens if some other process beat this one and created the file beforehand
414
+ except FileExistsError:
415
+ continue
416
+
417
+ while True:
418
+ try:
419
+ f = h5py.File(str(self.path), self.mode)
420
+ return f
421
+ except (OSError, PermissionError, RuntimeError):
422
+ pass
423
+ time.sleep(
424
+ self.test_delay
425
+ ) # don't overwhelm the FS by very fast repeated calls.
426
+ if time.time() - t0 > self.timeout:
427
+ raise RuntimeError("Waiting or file unlock timeout")
428
+
429
+ time.sleep(
430
+ self.test_delay
431
+ ) # don't overwhelm the FS by very fast repeated calls.
432
+ if time.time() - t0 > self.timeout:
433
+ raise RuntimeError("Lock file remained for longer than timeout time")
434
+
435
+
436
+ # Node for monitoring #
437
+
438
+
439
+ class DDH5LoaderWidget(NodeWidget):
440
+
441
+ def __init__(self, node: Node):
442
+ super().__init__(node=node)
443
+ assert self.node is not None
444
+
445
+ self.fileinput = QtWidgets.QLineEdit()
446
+ self.groupinput = QtWidgets.QLineEdit("data")
447
+ self.reload = QtWidgets.QPushButton("Reload")
448
+
449
+ self.optSetters = {
450
+ "filepath": self.fileinput.setText,
451
+ "groupname": self.groupinput.setText,
452
+ }
453
+ self.optGetters = {
454
+ "filepath": self.fileinput.text,
455
+ "groupname": self.groupinput.text,
456
+ }
457
+
458
+ flayout = QtWidgets.QFormLayout()
459
+ flayout.addRow("File path:", self.fileinput)
460
+ flayout.addRow("Group:", self.groupinput)
461
+
462
+ vlayout = QtWidgets.QVBoxLayout()
463
+ vlayout.addLayout(flayout)
464
+ vlayout.addWidget(self.reload)
465
+
466
+ self.setLayout(vlayout)
467
+
468
+ self.fileinput.textEdited.connect(lambda x: self.signalOption("filepath"))
469
+ self.groupinput.textEdited.connect(lambda x: self.signalOption("groupname"))
470
+ self.reload.pressed.connect(self.node.update)
471
+
472
+
473
+ class DDH5Loader(Node):
474
+
475
+ nodeName = "DDH5Loader"
476
+ uiClass = DDH5LoaderWidget
477
+ useUi = True
478
+
479
+ setProcessOptions = Signal(str, str)
480
+
481
+ def __init__(self, name: str):
482
+ self._filepath: Optional[str] = None
483
+ self._groupname: str = "data"
484
+
485
+ super().__init__(name)
486
+
487
+ self.nLoadedRecords = 0
488
+
489
+ self.loadingThread = QtCore.QThread()
490
+ self.loadingWorker = _Loader(self.filepath, self.groupname)
491
+ self.loadingWorker.moveToThread(self.loadingThread)
492
+ self.loadingThread.started.connect(self.loadingWorker.loadData)
493
+ self.loadingWorker.dataLoaded.connect(self.onThreadComplete)
494
+ self.loadingWorker.dataLoaded.connect(lambda x: self.loadingThread.quit())
495
+ self.setProcessOptions.connect(self.loadingWorker.setPathAndGroup)
496
+
497
+ @property
498
+ def filepath(self) -> Optional[str]:
499
+ return self._filepath
500
+
501
+ @filepath.setter
502
+ @updateOption("filepath")
503
+ def filepath(self, val: str) -> None:
504
+ self._filepath = val
505
+
506
+ @property
507
+ def groupname(self) -> str:
508
+ return self._groupname
509
+
510
+ @groupname.setter
511
+ @updateOption("groupname")
512
+ def groupname(self, val: str) -> None:
513
+ self._groupname = val
514
+
515
+ # Data processing #
516
+
517
+ def process(
518
+ self, dataIn: Optional[DataDictBase] = None
519
+ ) -> Optional[Dict[str, Any]]:
520
+
521
+ # TODO: maybe needs an optional way to read only new data from file? -- can make that an option
522
+
523
+ # this is the flow when process is called due to some trigger
524
+ if self._filepath is None or self._groupname is None:
525
+ return None
526
+ if not os.path.exists(self._filepath):
527
+ return None
528
+
529
+ if not self.loadingThread.isRunning():
530
+ self.loadingWorker.setPathAndGroup(self.filepath, self.groupname)
531
+ self.loadingThread.start()
532
+ return None
533
+
534
+ @Slot(object)
535
+ def onThreadComplete(self, data: Optional[DataDict]) -> None:
536
+ if data is None:
537
+ return None
538
+
539
+ title = f"{self.filepath}"
540
+ data.add_meta("title", title)
541
+ nrecords = data.nrecords()
542
+ assert nrecords is not None
543
+ self.nLoadedRecords = nrecords
544
+ self.setOutput(dataOut=data)
545
+
546
+ # this makes sure that we analyze the data and emit signals for changes
547
+ super().process(dataIn=data)
548
+
549
+
550
+ class _Loader(QtCore.QObject):
551
+
552
+ nRetries = 5
553
+ retryDelay = 0.01
554
+
555
+ dataLoaded = Signal(object)
556
+
557
+ def __init__(self, filepath: Optional[str], groupname: Optional[str]) -> None:
558
+ super().__init__()
559
+ self.filepath = filepath
560
+ self.groupname = groupname
561
+
562
+ def setPathAndGroup(
563
+ self, filepath: Optional[str], groupname: Optional[str]
564
+ ) -> None:
565
+ self.filepath = filepath
566
+ self.groupname = groupname
567
+
568
+ def loadData(self) -> bool:
569
+ if self.filepath is None or self.groupname is None:
570
+ self.dataLoaded.emit(None)
571
+ return True
572
+
573
+ data = datadict_from_hdf5(self.filepath, groupname=self.groupname)
574
+ self.dataLoaded.emit(data)
575
+ return True
576
+
577
+
578
+ class DDH5Writer(object):
579
+ """Context manager for writing data to DDH5.
580
+ Based on typical needs in taking data in an experimental physics lab.
581
+
582
+ Creates lock file when writing data.
583
+
584
+ :param basedir: The root directory in which data is stored.
585
+ :meth:`.create_file_structure` is creating the structure inside this root and
586
+ determines the file name of the data. The default structure implemented here is
587
+ ``<root>/YYYY-MM-DD/YYYY-mm-dd_THHMMSS_<ID>-<name>/<filename>.ddh5``,
588
+ where <ID> is a short identifier string and <name> is the value of parameter `name`.
589
+ To change this, re-implement :meth:`.data_folder` and/or
590
+ :meth:`.create_file_structure`.
591
+ :param datadict: Initial data object. Must contain at least the structure of the
592
+ data to be able to use :meth:`add_data` to add data.
593
+ :param groupname: Name of the top-level group in the file container. An existing
594
+ group of that name will be deleted.
595
+ :param name: Name of this dataset. Used in path/file creation and added as meta data.
596
+ :param filename: Filename to use. Defaults to 'data.ddh5'.
597
+ :param file_timeout: How long the function will wait for the ddh5 file to unlock. If none uses the default
598
+ value from the :class:`FileOpener`.
599
+ """
600
+
601
+ # TODO: need an operation mode for not keeping data in memory.
602
+ # TODO: a mode for working with pre-allocated data
603
+
604
+ def __init__(
605
+ self,
606
+ datadict: DataDict,
607
+ basedir: Union[str, Path] = ".",
608
+ groupname: str = "data",
609
+ name: Optional[str] = None,
610
+ filename: str = "data",
611
+ filepath: Optional[Union[str, Path]] = None,
612
+ file_timeout: Optional[float] = None,
613
+ ):
614
+ """Constructor for :class:`.DDH5Writer`"""
615
+
616
+ self.basedir = Path(basedir)
617
+ self.datadict = datadict
618
+
619
+ if name is None:
620
+ name = ""
621
+ self.name = name
622
+
623
+ self.groupname = groupname
624
+ self.filename = Path(filename)
625
+
626
+ self.filepath: Optional[Path] = None
627
+ if filepath is not None:
628
+ self.filepath = Path(filepath)
629
+
630
+ self.datadict.add_meta("dataset.name", name)
631
+ self.file_timeout = file_timeout
632
+ self.uuid = uuid.uuid1()
633
+
634
+ def __enter__(self) -> "DDH5Writer":
635
+ if self.filepath is None:
636
+ self.filepath = _data_file_path(self.data_file_path(), True)
637
+ logger.info(f"Data location: {self.filepath}")
638
+
639
+ nrecords: Optional[int] = self.datadict.nrecords()
640
+ if nrecords is not None and nrecords > 0:
641
+ datadict_to_hdf5(
642
+ self.datadict,
643
+ str(self.filepath),
644
+ groupname=self.groupname,
645
+ append_mode=AppendMode.none,
646
+ file_timeout=self.file_timeout,
647
+ )
648
+ return self
649
+
650
+ def __exit__(
651
+ self,
652
+ exc_type: Optional[Type[BaseException]],
653
+ exc_value: Optional[BaseException],
654
+ exc_traceback: Optional[TracebackType],
655
+ ) -> None:
656
+ assert self.filepath is not None
657
+ with FileOpener(self.filepath, "a", timeout=self.file_timeout) as f:
658
+ add_cur_time_attr(f.require_group(self.groupname), name="close")
659
+ if exc_type is None:
660
+ # exiting because the measurement is complete
661
+ self.add_tag("__complete__")
662
+ else:
663
+ # exiting because of an exception
664
+ self.add_tag("__interrupted__")
665
+
666
+ def data_folder(self) -> Path:
667
+ """Return the folder, relative to the data root path, in which data will
668
+ be saved.
669
+
670
+ Default format:
671
+ ``<basedir>/YYYY-MM-DD/YYYY-mm-ddTHHMMSS_<ID>-<name>``.
672
+ In this implementation we use the first 8 characters of a UUID as ID.
673
+
674
+ :returns: The folder path.
675
+ """
676
+ """
677
+ Comment by Taketo
678
+ I changed the naming discipline.
679
+ The folder name format:
680
+ ''<basedir>/YYYY-MM-DD/<run_num>-<name>_<ID>''.
681
+ """
682
+ # ID = str(self.uuid).split('-')[0]
683
+ # parent = f"{datetime.datetime.now().replace(microsecond=0).isoformat().replace(':', '')}_{ID}"
684
+ # if self.name:
685
+ # parent += f'-{self.name}'
686
+ # path = Path(time.strftime("%Y-%m-%d"), parent)
687
+
688
+ # make utils folder if not existing
689
+ os.makedirs(f"{self.basedir}/utils", exist_ok=True)
690
+ # if setting.json exist, read the json file to a python dictionary
691
+ if os.path.isfile(f"{self.basedir}/utils/setting.json") == True:
692
+ with open(f"{self.basedir}/utils/setting.json", "r") as f:
693
+ d = json.load(f)
694
+ else: # if setting.json doesn't exist make a empty python dictionary
695
+ d = {}
696
+
697
+ # update "run_num"
698
+ if "run_num" in d:
699
+ d["run_num"] += 1
700
+ else:
701
+ d["run_num"] = 0
702
+ run_num = str(d["run_num"])
703
+ with open(f"{self.basedir}/utils/setting.json", "w") as f:
704
+ json.dump(d, f, indent=4)
705
+
706
+ if self.name:
707
+ parent = f"{run_num.zfill(5)}-{self.name}_{datetime.datetime.now().replace(microsecond=0).isoformat().replace(':', '')}"
708
+ else:
709
+ parent = f"{run_num.zfill(5)}_{datetime.datetime.now().replace(microsecond=0).isoformat().replace(':', '')}"
710
+ path = Path(time.strftime("%Y-%m-%d"), parent)
711
+ return path
712
+
713
+ def data_file_path(self) -> Path:
714
+ """Determine the filepath of the data file.
715
+
716
+ :returns: The filepath of the data file.
717
+ """
718
+ data_folder_path = Path(self.basedir, self.data_folder())
719
+ appendix = ""
720
+ idx = 2
721
+ while data_folder_path.exists():
722
+ appendix = f"-{idx}"
723
+ data_folder_path = Path(self.basedir, str(self.data_folder()) + appendix)
724
+ idx += 1
725
+
726
+ return Path(data_folder_path, self.filename)
727
+
728
+ def add_data(self, **kwargs: Any) -> None:
729
+ """Add data to the file (and the internal `DataDict`).
730
+
731
+ Requires one keyword argument per data field in the `DataDict`, with
732
+ the key being the name, and value the data to add. It is required that
733
+ all added data has the same number of 'rows', i.e., the most outer dimension
734
+ has to match for data to be inserted faithfully.
735
+ If some data is scalar and others are not, then the data should be reshaped
736
+ to (1, ) for the scalar data, and (1, ...) for the others; in other words,
737
+ an outer dimension with length 1 is added for all.
738
+ """
739
+ self.datadict.add_data(**kwargs)
740
+ nrecords = self.datadict.nrecords()
741
+ if nrecords is not None and nrecords > 0:
742
+ datadict_to_hdf5(
743
+ self.datadict,
744
+ str(self.filepath),
745
+ groupname=self.groupname,
746
+ file_timeout=self.file_timeout,
747
+ )
748
+
749
+ assert self.filepath is not None
750
+ with FileOpener(self.filepath, "a", timeout=self.file_timeout) as f:
751
+ add_cur_time_attr(f, name="last_change")
752
+ add_cur_time_attr(f[self.groupname], name="last_change")
753
+
754
+ # convenience methods for saving things in the same directory as the ddh5 file
755
+
756
+ def add_tag(self, tags: Union[str, Collection[str]]) -> None:
757
+ assert self.filepath is not None
758
+ if isinstance(tags, str):
759
+ tags = [tags]
760
+ for tag in tags:
761
+ open(self.filepath.parent / f"{tag}.tag", "x").close()
762
+
763
+ def backup_file(self, paths: Union[str, Collection[str]]) -> None:
764
+ assert self.filepath is not None
765
+ if isinstance(paths, str):
766
+ paths = [paths]
767
+ for path in paths:
768
+ shutil.copy(path, self.filepath.parent)
769
+
770
+ def save_text(self, name: str, text: str) -> None:
771
+ assert self.filepath is not None
772
+ with open(self.filepath.parent / name, "x", encoding="utf-8") as f:
773
+ f.write(text)
774
+
775
+ def save_dict(self, name: str, d: dict) -> None:
776
+ assert self.filepath is not None
777
+ with open(self.filepath.parent / name, "x") as f:
778
+ json.dump(d, f, indent=4, ensure_ascii=False, cls=NumpyJSONEncoder)