brukerapi 0.1.10__py3-none-any.whl → 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
brukerapi/dataset.py CHANGED
@@ -1,14 +1,29 @@
1
- from .exceptions import *
2
- from .schemas import *
3
- from .data import *
4
-
5
- from pathlib import Path
1
+ import datetime
6
2
  import json
7
- import numpy as np
8
3
  import os
9
4
  import os.path
5
+ import re
6
+ from copy import deepcopy
7
+ from pathlib import Path
8
+
9
+ import numpy as np
10
10
  import yaml
11
- import datetime
11
+
12
+ from .data import DataRandomAccess
13
+ from .exceptions import (
14
+ DataNotLoaded,
15
+ DatasetTypeMissmatch,
16
+ FilterEvalFalse,
17
+ IncompleteDataset,
18
+ NotADatasetDir,
19
+ ParametersNotLoaded,
20
+ PropertyConditionNotMet,
21
+ SchemeNotLoaded,
22
+ TrajNotLoaded,
23
+ UnsuportedDatasetType,
24
+ )
25
+ from .jcampdx import JCAMPDX
26
+ from .schemas import Schema2dseq, SchemaFid, SchemaRawdata, SchemaSer, SchemaTraj
12
27
 
13
28
  LOAD_STAGES = {
14
29
  "empty": 0,
@@ -19,52 +34,37 @@ LOAD_STAGES = {
19
34
 
20
35
  # Dict of default dataset states
21
36
  DEFAULT_STATES = {
22
- 'fid': {
23
- "parameter_files" : ['acqp', 'method'],
24
- "property_files": [
25
- Path(__file__).parents[0] / 'config/properties_fid_core.json',
26
- Path(__file__).parents[0] / 'config/properties_fid_custom.json'
27
- ],
28
- "load": LOAD_STAGES['all'],
29
- "mmap": False
37
+ "fid": {
38
+ "parameter_files": ["acqp", "method"],
39
+ "property_files": [Path(__file__).parents[0] / "config/properties_fid_core.json", Path(__file__).parents[0] / "config/properties_fid_custom.json"],
40
+ "load": LOAD_STAGES["all"],
41
+ "mmap": False,
30
42
  },
31
- '2dseq': {
32
- "parameter_files": ['visu_pars'],
33
- "property_files": [
34
- Path(__file__).parents[0] / 'config/properties_2dseq_core.json',
35
- Path(__file__).parents[0] / 'config/properties_2dseq_custom.json'
36
- ],
37
- "load": LOAD_STAGES['all'],
43
+ "2dseq": {
44
+ "parameter_files": ["visu_pars"],
45
+ "property_files": [Path(__file__).parents[0] / "config/properties_2dseq_core.json", Path(__file__).parents[0] / "config/properties_2dseq_custom.json"],
46
+ "load": LOAD_STAGES["all"],
38
47
  "scale": True,
39
- "mmap": False
48
+ "mmap": False,
40
49
  },
41
- 'traj': {
42
- "parameter_files": ['acqp', 'method'],
43
- "property_files": [
44
- Path(__file__).parents[0] / 'config/properties_traj_core.json',
45
- Path(__file__).parents[0] / 'config/properties_traj_custom.json'
46
- ],
47
- "load": LOAD_STAGES['all'],
48
- "mmap": False
50
+ "traj": {
51
+ "parameter_files": ["acqp", "method"],
52
+ "property_files": [Path(__file__).parents[0] / "config/properties_traj_core.json", Path(__file__).parents[0] / "config/properties_traj_custom.json"],
53
+ "load": LOAD_STAGES["all"],
54
+ "mmap": False,
49
55
  },
50
- 'ser': {
51
- "parameter_files": ['acqp', 'method'],
52
- "property_files": [
53
- Path(__file__).parents[0] / 'config/properties_ser_core.json',
54
- Path(__file__).parents[0] / 'config/properties_ser_custom.json'
55
- ],
56
- "load": LOAD_STAGES['all'],
57
- "mmap": False
56
+ "ser": {
57
+ "parameter_files": ["acqp", "method"],
58
+ "property_files": [Path(__file__).parents[0] / "config/properties_ser_core.json", Path(__file__).parents[0] / "config/properties_ser_custom.json"],
59
+ "load": LOAD_STAGES["all"],
60
+ "mmap": False,
61
+ },
62
+ "rawdata": {
63
+ "parameter_files": ["acqp", "method"],
64
+ "property_files": [Path(__file__).parents[0] / "config/properties_rawdata_core.json", Path(__file__).parents[0] / "config/properties_rawdata_custom.json"],
65
+ "load": LOAD_STAGES["all"],
66
+ "mmap": False,
58
67
  },
59
- 'rawdata': {
60
- "parameter_files": ['acqp', 'method'],
61
- "property_files": [
62
- Path(__file__).parents[0] / 'config/properties_rawdata_core.json',
63
- Path(__file__).parents[0] / 'config/properties_rawdata_custom.json'
64
- ],
65
- "load": LOAD_STAGES['all'],
66
- "mmap": False
67
- }
68
68
  }
69
69
 
70
70
  RELATIVE_PATHS = {
@@ -75,7 +75,7 @@ RELATIVE_PATHS = {
75
75
  "reco": "./pdata/1/reco",
76
76
  "visu_pars": "./pdata/1/visu_pars",
77
77
  "AdjStatePerScan": "./AdjStatePerScan",
78
- "AdjStatePerStudy": "../AdjStatePerStudy"
78
+ "AdjStatePerStudy": "../AdjStatePerStudy",
79
79
  },
80
80
  "2dseq": {
81
81
  "method": "../../method",
@@ -93,7 +93,7 @@ RELATIVE_PATHS = {
93
93
  "reco": "./pdata/1/reco",
94
94
  "visu_pars": "./pdata/1/visu_pars",
95
95
  "AdjStatePerScan": "./AdjStatePerScan",
96
- "AdjStatePerStudy": "../AdjStatePerStudy"
96
+ "AdjStatePerStudy": "../AdjStatePerStudy",
97
97
  },
98
98
  "rawdata": {
99
99
  "method": "./method",
@@ -102,8 +102,8 @@ RELATIVE_PATHS = {
102
102
  "reco": "./pdata/1/reco",
103
103
  "visu_pars": "./pdata/1/visu_pars",
104
104
  "AdjStatePerScan": "./AdjStatePerScan",
105
- "AdjStatePerStudy": "../AdjStatePerStudy"
106
- }
105
+ "AdjStatePerStudy": "../AdjStatePerStudy",
106
+ },
107
107
  }
108
108
 
109
109
 
@@ -145,7 +145,7 @@ class Dataset:
145
145
 
146
146
  from bruker.dataset import Dataset
147
147
 
148
- dataset = Dataset('path/2dseq')
148
+ dataset = Dataset("path/2dseq")
149
149
 
150
150
  """
151
151
 
@@ -162,22 +162,23 @@ class Dataset:
162
162
  """
163
163
  self.path = Path(path)
164
164
 
165
- if not self.path.exists() and state.get('load') is not LOAD_STAGES['empty']:
165
+ if not self.path.exists() and state.get("load") is not LOAD_STAGES["empty"]:
166
166
  raise FileNotFoundError(self.path)
167
167
 
168
168
  # directory constructor
169
- if self.path.is_dir():
169
+ if self.path.is_dir() and state.get("load"):
170
170
  content = os.listdir(self.path)
171
- if 'fid' in content:
172
- self.path = self.path / 'fid'
173
- elif '2dseq' in content:
174
- self.path = self.path / '2dseq'
171
+ if "fid" in content:
172
+ self.path = self.path / "fid"
173
+ elif "2dseq" in content:
174
+ self.path = self.path / "2dseq"
175
175
  else:
176
176
  raise NotADatasetDir(self.path)
177
177
 
178
178
  self.type = self.path.stem
179
179
  self.subtype = self.path.suffix
180
- if self.subtype: self.subtype = self.subtype[1:] # remove the dot from the suffix
180
+ if self.subtype:
181
+ self.subtype = self.subtype[1:] # remove the dot from the suffix
181
182
  self._properties = []
182
183
 
183
184
  # set
@@ -189,15 +190,14 @@ class Dataset:
189
190
  # load data if the load kwarg is true
190
191
  self.load()
191
192
 
192
-
193
193
  def __enter__(self):
194
- self._state['load'] = LOAD_STAGES['all']
194
+ self._state["load"] = LOAD_STAGES["all"]
195
195
  self.load()
196
196
  return self
197
197
 
198
198
  def __exit__(self, exc_type, exc_val, exc_tb):
199
199
  self.unload()
200
- self._state['load'] = LOAD_STAGES['empty']
200
+ self._state["load"] = LOAD_STAGES["empty"]
201
201
 
202
202
  def __str__(self):
203
203
  """
@@ -214,6 +214,9 @@ class Dataset:
214
214
 
215
215
  raise KeyError(item)
216
216
 
217
+ def __contains__(self, item):
218
+ return any(item in parameter_file for parameter_file in self._parameters.values())
219
+
217
220
  def __call__(self, **kwargs):
218
221
  self._set_state(kwargs)
219
222
  return self
@@ -221,11 +224,11 @@ class Dataset:
221
224
  def _set_state(self, passed):
222
225
  result = deepcopy(DEFAULT_STATES[self.type])
223
226
 
224
- if 'parameter_files' in passed.keys():
225
- passed['parameter_files'] = result['parameter_files'] + passed['parameter_files']
227
+ if "parameter_files" in passed:
228
+ passed["parameter_files"] = result["parameter_files"] + passed["parameter_files"]
226
229
 
227
- if 'property_files' in passed.keys():
228
- passed['property_files'] = result['property_files'] + passed['property_files']
230
+ if "property_files" in passed:
231
+ passed["property_files"] = result["property_files"] + passed["property_files"]
229
232
 
230
233
  result.update(passed)
231
234
  self._state = result
@@ -241,13 +244,12 @@ class Dataset:
241
244
  """
242
245
 
243
246
  # Check whether dataset file is supported
244
- if self.type not in DEFAULT_STATES.keys():
247
+ if self.type not in DEFAULT_STATES:
245
248
  raise UnsuportedDatasetType(self.type)
246
249
 
247
250
  # Check whether all necessary JCAMP-DX files are present
248
- if self._state.get('load') >= LOAD_STAGES['parameters']:
249
- if not (set(DEFAULT_STATES[self.type]['parameter_files']) <= set(os.listdir(str(self.path.parent)))):
250
- raise IncompleteDataset
251
+ if self._state.get("load") >= LOAD_STAGES["parameters"] and not (set(DEFAULT_STATES[self.type]["parameter_files"]) <= set(os.listdir(str(self.path.parent)))):
252
+ raise IncompleteDataset
251
253
 
252
254
  def load(self):
253
255
  """
@@ -255,15 +257,18 @@ class Dataset:
255
257
  traj is loaded as well.
256
258
  """
257
259
 
258
- if self._state['load'] is LOAD_STAGES['empty']: return
260
+ if self._state["load"] is LOAD_STAGES["empty"]:
261
+ return
259
262
 
260
263
  self.load_parameters()
261
264
 
262
- if self._state['load'] is LOAD_STAGES['parameters']: return
265
+ if self._state["load"] is LOAD_STAGES["parameters"]:
266
+ return
263
267
 
264
268
  self.load_properties()
265
269
 
266
- if self._state['load'] is LOAD_STAGES['properties']: return
270
+ if self._state["load"] is LOAD_STAGES["properties"]:
271
+ return
267
272
 
268
273
  self.load_schema()
269
274
  self.load_data()
@@ -286,7 +291,8 @@ class Dataset:
286
291
 
287
292
  def load_parameters(self):
288
293
  """
289
- Load all parameters essential for reading of given dataset type. For instance, type `fid` data set loads acqp and method file, from parent directory in which the fid file is contained.
294
+ Load all parameters essential for reading of given dataset type.
295
+ For instance, type `fid` data set loads acqp and method file, from parent directory in which the fid file is contained.
290
296
  """
291
297
  self._read_parameters()
292
298
 
@@ -306,14 +312,14 @@ class Dataset:
306
312
 
307
313
  from bruker.dataset import Dataset
308
314
 
309
- dataset = Dataset('.../2dseq')
310
- dataset.add_parameter_file('method')
311
- dataset['PVM_DwDir'].value
315
+ dataset = Dataset(".../2dseq")
316
+ dataset.add_parameter_file("method")
317
+ dataset["PVM_DwDir"].value
312
318
 
313
319
  """
314
320
  path = self.path.parent / RELATIVE_PATHS[self.type][file]
315
321
 
316
- if not hasattr(self, '_parameters') or self._parameters is None:
322
+ if not hasattr(self, "_parameters") or self._parameters is None:
317
323
  self._parameters = {path.name: JCAMPDX(path)}
318
324
  else:
319
325
  self._parameters[path.name] = JCAMPDX(path)
@@ -324,17 +330,16 @@ class Dataset:
324
330
 
325
331
  :return:
326
332
  """
327
- parameter_files = self._state['parameter_files']
333
+ parameter_files = self._state["parameter_files"]
328
334
  for file in parameter_files:
329
335
  try:
330
336
  self.add_parameter_file(file)
331
337
  except FileNotFoundError as e:
332
338
  # if jcampdx file is required but not found raise Error
333
- if file in DEFAULT_STATES[self.type]['parameter_files']:
339
+ if file in DEFAULT_STATES[self.type]["parameter_files"]:
334
340
  raise e
335
341
  # if jcampdx file is not found, but not required, pass
336
- else:
337
- pass
342
+ pass
338
343
 
339
344
  def _write_parameters(self, parent):
340
345
  for type_, jcampdx in self._parameters.items():
@@ -363,22 +368,22 @@ class Dataset:
363
368
 
364
369
  from bruker.dataset import Dataset
365
370
 
366
- dataset = Dataset('.../fid')
367
- dataset.add_parameter_file('AdjStatePerScan')
371
+ dataset = Dataset(".../fid")
372
+ dataset.add_parameter_file("AdjStatePerScan")
368
373
  dataset.load_properties()
369
374
  dataset.date
370
375
 
371
376
  """
372
- for file in self._state['property_files']:
377
+ for file in self._state["property_files"]:
373
378
  self.add_property_file(file)
374
379
 
375
- self._state['load_properties'] = True
380
+ self._state["load_properties"] = True
376
381
 
377
382
  def unload_properties(self):
378
383
  for property in self._properties:
379
- delattr(self,property)
384
+ delattr(self, property)
380
385
  self._properties = []
381
- self._state['load_properties'] = False
386
+ self._state["load_properties"] = False
382
387
 
383
388
  def reload_properties(self):
384
389
  self.unload_properties()
@@ -390,7 +395,7 @@ class Dataset:
390
395
  self._add_property(property)
391
396
 
392
397
  def _add_property(self, property):
393
- """ Add property to the dataset and schema
398
+ """Add property to the dataset and schema
394
399
 
395
400
  * Evaluate the condition for a given command if these are fulfilled, the next step follows, otherwise,
396
401
  the next command is processed.
@@ -402,13 +407,15 @@ class Dataset:
402
407
  """
403
408
  for desc in property[1]:
404
409
  try:
405
- self._eval_conditions(desc['conditions'])
410
+ self._eval_conditions(desc["conditions"])
406
411
  try:
407
- value = self._make_element(desc['cmd'])
412
+ value = self._make_element(desc["cmd"])
408
413
  self.__setattr__(property[0], value)
409
414
 
410
- if not hasattr(self, '_properties'):
411
- self._properties = [property[0],]
415
+ if not hasattr(self, "_properties"):
416
+ self._properties = [
417
+ property[0],
418
+ ]
412
419
  else:
413
420
  self._properties.append(property[0])
414
421
 
@@ -429,13 +436,14 @@ class Dataset:
429
436
  """
430
437
  if isinstance(cmd, str):
431
438
  return eval(self._sub_parameters(cmd))
432
- elif isinstance(cmd, int) or isinstance(cmd, float):
439
+ if isinstance(cmd, (int, float)):
433
440
  return cmd
434
- elif isinstance(cmd, list):
441
+ if isinstance(cmd, list):
435
442
  element = []
436
443
  for cmd_ in cmd:
437
444
  element.append(self._make_element(cmd_))
438
445
  return element
446
+ return None
439
447
 
440
448
  def _eval_conditions(self, conditions):
441
449
  """
@@ -453,22 +461,21 @@ class Dataset:
453
461
  if isinstance(condition, str):
454
462
  if not self._make_element(condition):
455
463
  raise PropertyConditionNotMet
456
- elif isinstance(condition, list):
457
- if not self._make_element(condition[0]) in condition[1]:
458
- raise PropertyConditionNotMet
464
+ elif isinstance(condition, list) and self._make_element(condition[0]) not in condition[1]:
465
+ raise PropertyConditionNotMet
459
466
  except KeyError:
460
- raise PropertyConditionNotMet
467
+ raise PropertyConditionNotMet from KeyError
461
468
 
462
469
  def _sub_parameters(self, recipe):
463
470
  # entries with property e.g. VisuFGOrderDesc.nested to self._dataset['VisuFGOrderDesc'].nested
464
- for match in re.finditer(r'#[a-zA-Z0-9_]+\.[a-zA-Z]+', recipe):
465
- m = re.match('#[a-zA-Z0-9_]+', match.group())
466
- recipe = recipe.replace(m.group(),"self['{}']".format(m.group()[1:]))
471
+ for match in re.finditer(r"#[a-zA-Z0-9_]+\.[a-zA-Z]+", recipe):
472
+ m = re.match("#[a-zA-Z0-9_]+", match.group())
473
+ recipe = recipe.replace(m.group(), f"self['{m.group()[1:]}']")
467
474
  # entries without property e.g. VisuFGOrderDesc to self._dataset['VisuFGOrderDesc'].value
468
- for match in re.finditer('@[a-zA-Z0-9_]+', recipe):
469
- recipe = recipe.replace(match.group(),"self.{}".format(match.group()[1:]))
470
- for match in re.finditer('#[a-zA-Z0-9_]+', recipe):
471
- recipe = recipe.replace(match.group(),"self['{}'].value".format(match.group()[1:]))
475
+ for match in re.finditer("@[a-zA-Z0-9_]+", recipe):
476
+ recipe = recipe.replace(match.group(), f"self.{match.group()[1:]}")
477
+ for match in re.finditer("#[a-zA-Z0-9_]+", recipe):
478
+ recipe = recipe.replace(match.group(), f"self['{match.group()[1:]}'].value")
472
479
  return recipe
473
480
 
474
481
  """
@@ -479,15 +486,15 @@ class Dataset:
479
486
  """
480
487
  Load the schema for given data set.
481
488
  """
482
- if self.type == 'fid':
489
+ if self.type == "fid":
483
490
  self._schema = SchemaFid(self)
484
- elif self.type == '2dseq':
491
+ elif self.type == "2dseq":
485
492
  self._schema = Schema2dseq(self)
486
- elif self.type == 'rawdata':
493
+ elif self.type == "rawdata":
487
494
  self._schema = SchemaRawdata(self)
488
- elif self.type == 'ser':
495
+ elif self.type == "ser":
489
496
  self._schema = SchemaSer(self)
490
- elif self.type == 'traj':
497
+ elif self.type == "traj":
491
498
  self._schema = SchemaTraj(self)
492
499
 
493
500
  def unload_schema(self):
@@ -511,7 +518,7 @@ class Dataset:
511
518
 
512
519
  **called in the class constructor.**
513
520
  """
514
- if self._state['mmap']:
521
+ if self._state["mmap"]:
515
522
  self._data = DataRandomAccess(self)
516
523
  else:
517
524
  self._data = self._read_data()
@@ -539,12 +546,12 @@ class Dataset:
539
546
  1D ndarray containing the full data vector
540
547
  """
541
548
  # TODO debug with this
542
- # try:
543
- # assert os.stat(str(path)).st_size == np.prod(shape) * dtype.itemsize
544
- # except AssertionError:
545
- # raise ValueError('Dimension missmatch')
549
+ try:
550
+ assert os.stat(str(path)).st_size == np.prod(shape) * dtype.itemsize
551
+ except AssertionError:
552
+ raise ValueError("Dimension mismatch") from AssertionError
546
553
 
547
- return np.array(np.memmap(path, dtype=dtype, shape=shape, order='F')[:])
554
+ return np.array(np.memmap(path, dtype=dtype, shape=shape, order="F")[:])
548
555
 
549
556
  def _write_data(self, path):
550
557
  data = self.data.copy()
@@ -552,7 +559,7 @@ class Dataset:
552
559
  self._write_binary_file(path, data, self.shape_storage, self.numpy_dtype)
553
560
 
554
561
  def _write_binary_file(self, path, data, storage_layout, dtype):
555
- fp = np.memmap(path, mode='w+', dtype=dtype, shape=storage_layout, order='F')
562
+ fp = np.memmap(path, mode="w+", dtype=dtype, shape=storage_layout, order="F")
556
563
  fp[:] = data
557
564
 
558
565
  """
@@ -560,11 +567,10 @@ class Dataset:
560
567
  """
561
568
 
562
569
  def load_traj(self, **kwargs):
563
- if Path(self.path.parent / 'traj').exists() and self.type != 'traj':
564
- self._traj = Dataset(self.path.parent / 'traj', load=False, random_access=self.random_access)
570
+ if Path(self.path.parent / "traj").exists() and self.type != "traj":
571
+ self._traj = Dataset(self.path.parent / "traj", load=False, random_access=self.random_access)
565
572
  self._traj._parameters = self.parameters
566
- self._traj._schema = SchemaTraj(self._traj, meta=self.schema._meta, sub_params=self.schema._sub_params,
567
- fid=self)
573
+ self._traj._schema = SchemaTraj(self._traj, meta=self.schema._meta, sub_params=self.schema._sub_params, fid=self)
568
574
  self._traj.load_data()
569
575
  else:
570
576
  self._traj = None
@@ -575,6 +581,7 @@ class Dataset:
575
581
  """
576
582
  EXPORT INTERFACE
577
583
  """
584
+
578
585
  def write(self, path, **kwargs):
579
586
  """
580
587
  Write the Dataset instance to the disk. This consists of writing the binary data file {fid, rawdata, 2dseq,
@@ -587,13 +594,13 @@ class Dataset:
587
594
 
588
595
  path = Path(path)
589
596
 
590
- if path.name != self.type:
597
+ if path.name.split(".")[0] != self.type:
591
598
  raise DatasetTypeMissmatch
592
599
 
593
600
  parent = path.parent
594
601
 
595
602
  if not parent.exists():
596
- os.mkdir(parent)
603
+ os.makedirs(parent, exist_ok=True)
597
604
 
598
605
  self._write_parameters(parent)
599
606
  self._write_data(path)
@@ -611,16 +618,16 @@ class Dataset:
611
618
  """
612
619
 
613
620
  if path is None:
614
- path = self.path.parent / self.id + '.json'
621
+ path = self.path.parent / self.id + ".json"
615
622
  elif path.is_dir():
616
- path = Path(path) / self.id + '.json'
623
+ path = Path(path) / self.id + ".json"
617
624
 
618
625
  if verbose:
619
- print("bruker report: {} -> {}".format(str(self.path), str(path)))
626
+ print(f"bruker report: {self.path!s} -> {path!s}")
620
627
 
621
- if path.suffix == '.json':
628
+ if path.suffix == ".json":
622
629
  self.to_json(path, props=props)
623
- elif path.suffix == '.yml':
630
+ elif path.suffix == ".yml":
624
631
  self.to_yaml(path, props=props)
625
632
 
626
633
  def to_json(self, path=None, props=None):
@@ -631,10 +638,11 @@ class Dataset:
631
638
  :param names: *list* names of properties to be exported
632
639
  """
633
640
  if path:
634
- with open(path, 'w') as json_file:
635
- json.dump(self.to_dict(props=props), json_file, indent=4)
641
+ with open(path, "w") as json_file:
642
+ json.dump(self.to_dict(props=props), json_file, indent=4)
636
643
  else:
637
644
  return json.dumps(self.to_dict(props=props), indent=4)
645
+ return None
638
646
 
639
647
  def to_yaml(self, path=None, props=None):
640
648
  """
@@ -644,10 +652,11 @@ class Dataset:
644
652
  :param names: *list* names of properties to be exported
645
653
  """
646
654
  if path:
647
- with open(path, 'w') as yaml_file:
648
- yaml.dump(self.to_dict(props=props), yaml_file, default_flow_style=False)
655
+ with open(path, "w") as yaml_file:
656
+ yaml.dump(self.to_dict(props=props), yaml_file, default_flow_style=False)
649
657
  else:
650
658
  return yaml.dump(self.to_dict(props=props), default_flow_style=False)
659
+ return None
651
660
 
652
661
  def to_dict(self, props=None):
653
662
  """
@@ -661,8 +670,7 @@ class Dataset:
661
670
  props = list(vars(self).keys())
662
671
 
663
672
  # list of Dataset properties to be excluded from the export
664
- reserved = ['_parameters', 'path', '_data', '_traj', '_state', '_schema', 'random_access', 'study_id',
665
- 'exp_id', 'proc_id', 'subj_id', '_properties']
673
+ reserved = ["_parameters", "path", "_data", "_traj", "_state", "_schema", "random_access", "study_id", "exp_id", "proc_id", "subj_id", "_properties"]
666
674
  props = list(set(props) - set(reserved))
667
675
 
668
676
  properties = {}
@@ -680,24 +688,21 @@ class Dataset:
680
688
  """
681
689
  if isinstance(var, Path):
682
690
  return str(var)
683
- elif isinstance(var, np.integer) or isinstance(var, np.int32):
691
+ if isinstance(var, (np.integer, np.int32)):
684
692
  return int(var)
685
- elif isinstance(var, np.floating):
693
+ if isinstance(var, np.floating):
686
694
  return float(var)
687
- elif isinstance(var, np.ndarray):
695
+ if isinstance(var, np.ndarray):
688
696
  return var.tolist()
689
- elif isinstance(var, np.dtype):
697
+ if isinstance(var, np.dtype):
690
698
  return var.name
691
- elif isinstance(var, list):
699
+ if isinstance(var, list):
692
700
  return [self._encode_property(var_) for var_ in var]
693
- elif isinstance(var, tuple):
701
+ if isinstance(var, tuple):
694
702
  return self._encode_property(list(var))
695
- elif isinstance(var, datetime.datetime):
696
- return str(var)
697
- elif isinstance(var, str):
703
+ if isinstance(var, (datetime.datetime, str)):
698
704
  return str(var)
699
- else:
700
- return var
705
+ return var
701
706
 
702
707
  def query(self, query):
703
708
  if isinstance(query, str):
@@ -708,11 +713,12 @@ class Dataset:
708
713
  if not eval(self._sub_parameters(q)):
709
714
  raise FilterEvalFalse
710
715
  except (KeyError, AttributeError) as e:
711
- raise FilterEvalFalse
716
+ raise FilterEvalFalse from e
712
717
 
713
718
  """
714
719
  PROPERTIES
715
720
  """
721
+
716
722
  @property
717
723
  def data(self):
718
724
  """Data array.
@@ -721,11 +727,10 @@ class Dataset:
721
727
  """
722
728
  if self._data is not None:
723
729
  return self._data
724
- else:
725
- raise DataNotLoaded
730
+ raise DataNotLoaded
726
731
 
727
732
  @data.setter
728
- def data(self,value):
733
+ def data(self, value):
729
734
  self._data = value
730
735
 
731
736
  @property
@@ -736,15 +741,13 @@ class Dataset:
736
741
  """
737
742
  if self._traj is not None:
738
743
  return self._traj.data
739
- else:
740
- raise TrajNotLoaded
744
+ raise TrajNotLoaded
741
745
 
742
746
  @property
743
747
  def parameters(self):
744
748
  if self._parameters is not None:
745
749
  return self._parameters
746
- else:
747
- raise ParametersNotLoaded
750
+ raise ParametersNotLoaded
748
751
 
749
752
  @parameters.setter
750
753
  def parameters(self, value):
@@ -754,8 +757,7 @@ class Dataset:
754
757
  def schema(self):
755
758
  if self._schema is not None:
756
759
  return self._schema
757
- else:
758
- raise SchemeNotLoaded
760
+ raise SchemeNotLoaded
759
761
 
760
762
  @property
761
763
  def dim(self):
@@ -772,4 +774,3 @@ class Dataset:
772
774
  :type: *tuple*
773
775
  """
774
776
  return self.data.shape
775
-