tomwer 1.3.4__py3-none-any.whl → 1.3.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. tomwer/core/process/control/datalistener/datalistener.py +11 -11
  2. tomwer/core/process/control/datawatcher/edfdwprocess.py +0 -9
  3. tomwer/core/process/reconstruction/axis/axis.py +3 -3
  4. tomwer/core/process/reconstruction/darkref/darkrefscopy.py +3 -2
  5. tomwer/core/process/reconstruction/nabu/nabucommon.py +3 -4
  6. tomwer/core/process/reconstruction/nabu/nabuscores.py +1 -0
  7. tomwer/core/process/reconstruction/nabu/nabuslices.py +6 -52
  8. tomwer/core/process/reconstruction/nabu/nabuvolume.py +2 -5
  9. tomwer/core/process/reconstruction/nabu/utils.py +6 -0
  10. tomwer/core/process/reconstruction/saaxis/saaxis.py +2 -0
  11. tomwer/core/process/reconstruction/sadeltabeta/sadeltabeta.py +2 -0
  12. tomwer/core/process/task.py +4 -2
  13. tomwer/core/process/test/test_data_transfer.py +4 -3
  14. tomwer/core/scan/blissscan.py +3 -3
  15. tomwer/core/scan/nxtomoscan.py +2 -2
  16. tomwer/core/scan/scanbase.py +5 -6
  17. tomwer/gui/control/datawatcher/datawatcher.py +1 -24
  18. tomwer/gui/control/reducedarkflatselector.py +2 -2
  19. tomwer/gui/edit/dkrfpatch.py +4 -4
  20. tomwer/gui/edit/nxtomowarmer.py +3 -2
  21. tomwer/gui/imagefromfile.py +2 -2
  22. tomwer/gui/qfolderdialog.py +4 -0
  23. tomwer/gui/reconstruction/axis/radioaxis.py +3 -0
  24. tomwer/gui/reconstruction/nabu/nabuconfig/preprocessing.py +11 -10
  25. tomwer/gui/reconstruction/saaxis/saaxis.py +2 -2
  26. tomwer/gui/visualization/volumeviewer.py +10 -4
  27. tomwer/io/utils/h5pyutils.py +3 -7
  28. tomwer/io/utils/utils.py +3 -3
  29. tomwer/synctools/stacks/reconstruction/castvolume.py +20 -5
  30. tomwer/version.py +1 -1
  31. tomwer-1.3.12-py3.11-nspkg.pth +1 -0
  32. {tomwer-1.3.4.dist-info → tomwer-1.3.12.dist-info}/METADATA +1 -1
  33. {tomwer-1.3.4.dist-info → tomwer-1.3.12.dist-info}/RECORD +38 -38
  34. {tomwer-1.3.4.dist-info → tomwer-1.3.12.dist-info}/WHEEL +1 -1
  35. tomwer-1.3.4-py3.11-nspkg.pth +0 -1
  36. {tomwer-1.3.4.dist-info → tomwer-1.3.12.dist-info}/LICENSE +0 -0
  37. {tomwer-1.3.4.dist-info → tomwer-1.3.12.dist-info}/entry_points.txt +0 -0
  38. {tomwer-1.3.4.dist-info → tomwer-1.3.12.dist-info}/namespace_packages.txt +0 -0
  39. {tomwer-1.3.4.dist-info → tomwer-1.3.12.dist-info}/top_level.txt +0 -0
@@ -37,7 +37,7 @@ import typing
37
37
  from ewokscore.task import Task as EwoksTask
38
38
  from nxtomomill import converter as nxtomomill_converter
39
39
  from silx.io.utils import h5py_read_dataset
40
- from tomoscan.io import HDF5File
40
+ from silx.io.utils import open as open_hdf5
41
41
 
42
42
  import tomwer.version
43
43
  from tomwer.core import settings
@@ -229,7 +229,7 @@ class DataListener(BaseProcessInfo):
229
229
  """Return the list of scans dir for this bliss_file / entry"""
230
230
 
231
231
  def get_scan_indexes():
232
- with HDF5File(bliss_file, "r", swmr=DataListener.SWMR_MODE) as h5f:
232
+ with open_hdf5(bliss_file) as h5f:
233
233
  entry_node = h5f[entry]
234
234
  if _SCAN_NUMBER_PATH in entry_node:
235
235
  return h5py_read_dataset(entry_node[_SCAN_NUMBER_PATH])
@@ -254,7 +254,7 @@ class DataListener(BaseProcessInfo):
254
254
  """Return the proposal file of the experimentation if registred by the
255
255
  data listener"""
256
256
  if entry is None:
257
- with HDF5File(process_file, "r", swmr=DataListener.SWMR_MODE) as h5f:
257
+ with open_hdf5(process_file) as h5f:
258
258
  entries = BaseProcessInfo._get_process_nodes(
259
259
  root_node=h5f, process=DataListener
260
260
  )
@@ -269,7 +269,7 @@ class DataListener(BaseProcessInfo):
269
269
  entry = list(entries.keys())[0]
270
270
  _logger.info("take %s as default entry" % entry)
271
271
 
272
- with HDF5File(process_file, "r", swmr=DataListener.SWMR_MODE) as h5f:
272
+ with open_hdf5(process_file) as h5f:
273
273
  dl_nodes = BaseProcessInfo._get_process_nodes(
274
274
  root_node=h5f[entry], process=DataListener
275
275
  )
@@ -311,7 +311,7 @@ class DataListener(BaseProcessInfo):
311
311
  """Return the proposal file of the experimentation if registred by the
312
312
  data listener"""
313
313
  if entry is None:
314
- with HDF5File(process_file, "r", swmr=DataListener.SWMR_MODE) as h5f:
314
+ with open_hdf5(process_file) as h5f:
315
315
  entries = BaseProcessInfo._get_process_nodes(
316
316
  root_node=h5f, process=DataListener
317
317
  )
@@ -326,7 +326,7 @@ class DataListener(BaseProcessInfo):
326
326
  entry = list(entries.keys())[0]
327
327
  _logger.info("take %s as default entry" % entry)
328
328
 
329
- with HDF5File(process_file, "r", swmr=DataListener.SWMR_MODE) as h5f:
329
+ with open_hdf5(process_file) as h5f:
330
330
  dl_nodes = BaseProcessInfo._get_process_nodes(
331
331
  root_node=h5f[entry], process=DataListener
332
332
  )
@@ -360,7 +360,7 @@ class DataListener(BaseProcessInfo):
360
360
  """Return the proposal file of the experimentation if registred by the
361
361
  data listener"""
362
362
  if entry is None:
363
- with HDF5File(process_file, "r", swmr=DataListener.SWMR_MODE) as h5f:
363
+ with open_hdf5(process_file) as h5f:
364
364
  entries = BaseProcessInfo._get_process_nodes(
365
365
  root_node=h5f, process=DataListener
366
366
  )
@@ -375,7 +375,7 @@ class DataListener(BaseProcessInfo):
375
375
  entry = list(entries.keys())[0]
376
376
  _logger.info("take %s as default entry" % entry)
377
377
 
378
- with HDF5File(process_file, "r", swmr=DataListener.SWMR_MODE) as h5f:
378
+ with open_hdf5(process_file) as h5f:
379
379
  dl_nodes = BaseProcessInfo._get_process_nodes(
380
380
  root_node=h5f[entry], process=DataListener
381
381
  )
@@ -410,7 +410,7 @@ class DataListener(BaseProcessInfo):
410
410
  specific sequence data
411
411
  """
412
412
  if entry is None:
413
- with HDF5File(process_file, "r", swmr=DataListener.SWMR_MODE) as h5f:
413
+ with open_hdf5(process_file) as h5f:
414
414
  entries = BaseProcessInfo._get_process_nodes(
415
415
  root_node=h5f, process=DataListener
416
416
  )
@@ -425,7 +425,7 @@ class DataListener(BaseProcessInfo):
425
425
  entry = list(entries.keys())[0]
426
426
  _logger.info("take %s as default entry" % entry)
427
427
 
428
- with HDF5File(process_file, "r", swmr=DataListener.SWMR_MODE) as h5f:
428
+ with open_hdf5(process_file) as h5f:
429
429
  dl_nodes = BaseProcessInfo._get_process_nodes(
430
430
  root_node=h5f[entry], process=DataListener
431
431
  )
@@ -503,7 +503,7 @@ class DataListener(BaseProcessInfo):
503
503
  # write
504
504
  def sequence_is_finished():
505
505
  try:
506
- with HDF5File(bliss_file, "r", swmr=DataListener.SWMR_MODE) as h5f:
506
+ with open_hdf5(bliss_file) as h5f:
507
507
  end_scan_path = "/".join((entry, "end_time"))
508
508
  return end_scan_path in h5f
509
509
  except Exception:
@@ -50,15 +50,6 @@ except ImportError:
50
50
  has_rsync = True
51
51
 
52
52
 
53
- def get_info_val(lines, key):
54
- r = range(len(lines))
55
- key = key + "="
56
- for i in r:
57
- if key in lines[i]:
58
- val = float(lines[i].split("=")[1])
59
- return val
60
-
61
-
62
53
  class _DataWatcherEDFProcess(_DataWatcherProcess):
63
54
  """
64
55
  Base class for edf acquisition observation
@@ -76,7 +76,7 @@ else:
76
76
  has_composite_cor_finder = True
77
77
  from silx.io.url import DataUrl
78
78
  from silx.io.utils import h5py_read_dataset
79
- from tomoscan.io import HDF5File, get_swmr_mode
79
+ from silx.io.utils import open as open_hdf5
80
80
 
81
81
  _logger = logging.getLogger(__name__)
82
82
  if not has_composite_cor_finder:
@@ -1164,7 +1164,7 @@ class AxisTask(
1164
1164
  :return:
1165
1165
  """
1166
1166
  if entry is None:
1167
- with HDF5File(process_file, "r", swmr=get_swmr_mode()) as h5f:
1167
+ with open_hdf5(process_file) as h5f:
1168
1168
  entries = AxisTask._get_process_nodes(root_node=h5f, process=AxisTask)
1169
1169
  if len(entries) == 0:
1170
1170
  _logger.info("unable to find a Axis process in %s" % process_file)
@@ -1175,7 +1175,7 @@ class AxisTask(
1175
1175
  entry = list(entries.keys())[0]
1176
1176
  _logger.info("take %s as default entry" % entry)
1177
1177
 
1178
- with HDF5File(process_file, "r", swmr=get_swmr_mode()) as h5f:
1178
+ with open_hdf5(process_file) as h5f:
1179
1179
  axis_nodes = AxisTask._get_process_nodes(
1180
1180
  root_node=h5f[entry], process=AxisTask
1181
1181
  )
@@ -39,6 +39,7 @@ from typing import Union
39
39
 
40
40
  from silx.io.dictdump import dicttoh5, h5todict
41
41
  from silx.io.url import DataUrl
42
+ from silx.io.utils import open as open_hdf5
42
43
  from tomoscan.esrf.scan.utils import (
43
44
  copy_h5_dict_darks_to,
44
45
  copy_h5_dict_flats_to,
@@ -297,7 +298,7 @@ class DarkRefsCopy(DarkRefsTask):
297
298
  if not os.path.exists(self._save_file):
298
299
  return False
299
300
  else:
300
- with HDF5File(self._save_file, mode="r") as h5f:
301
+ with open_hdf5(self._save_file) as h5f:
301
302
  return self._flats_url.data_path() in h5f
302
303
 
303
304
  def has_dark_stored(self) -> bool:
@@ -309,7 +310,7 @@ class DarkRefsCopy(DarkRefsTask):
309
310
  if not os.path.exists(self._save_file):
310
311
  return False
311
312
  else:
312
- with HDF5File(self._save_file, mode="r") as h5f:
313
+ with open_hdf5(self._save_file) as h5f:
313
314
  return self._darks_url.data_path() in h5f
314
315
 
315
316
  def contains_dark(self, scan: TomwerScanBase) -> bool:
@@ -43,9 +43,10 @@ import sys
43
43
 
44
44
  import numpy
45
45
  from silx.io.url import DataUrl
46
+ from silx.io.utils import open as open_hdf5
46
47
  from sluurp.executor import submit as submit_to_slurm_cluster
47
48
  from sluurp.job import SBatchScriptJob
48
- from tomoscan.io import HDF5File, get_swmr_mode
49
+ from tomoscan.io import HDF5File
49
50
  from tomoscan.normalization import Method as INormMethod
50
51
  from tomoscan.identifier import VolumeIdentifier
51
52
 
@@ -581,9 +582,7 @@ class _NabuBaseReconstructor:
581
582
  # if an url exists insure we can access it
582
583
  dataset_url = DataUrl(path=dataset_url)
583
584
  if os.path.exists(dataset_url.file_path()):
584
- with HDF5File(
585
- dataset_url.file_path(), mode="r", swmr=get_swmr_mode()
586
- ) as h5f:
585
+ with open_hdf5(dataset_url.file_path()) as h5f:
587
586
  if dataset_url.data_path() not in h5f:
588
587
  dataset_url = None
589
588
  else:
@@ -211,6 +211,7 @@ def run_nabu_multicor(
211
211
  recons_urls = {}
212
212
  std_outs = []
213
213
  std_errs = []
214
+ future_tomo_obj = None
214
215
 
215
216
  success = result.success
216
217
  if isinstance(result, ResultsWithStd):
@@ -35,6 +35,7 @@ import os
35
35
  import gc
36
36
  from tomwer.io.utils import format_stderr_stdout
37
37
  from silx.utils.deprecation import deprecated, deprecated_warning
38
+ from silx.io.utils import open as open_hdf5
38
39
 
39
40
  from processview.core.manager.manager import ProcessManager, DatasetState
40
41
 
@@ -84,7 +85,6 @@ from processview.core.superviseprocess import SuperviseProcess
84
85
  from silx.io.dictdump import h5todict
85
86
  from silx.io.utils import h5py_read_dataset
86
87
  from silx.utils.enum import Enum as _Enum
87
- from tomoscan.io import HDF5File, get_swmr_mode
88
88
 
89
89
  from tomwer.core.process.task import Task
90
90
  from tomwer.core.scan.edfscan import EDFTomoScan
@@ -173,14 +173,6 @@ def run_slices_reconstruction(
173
173
 
174
174
  # handle special cases like several db...
175
175
  nabu_configurations = interpret_tomwer_configuration(config, scan=scan)
176
- if "phase" in config and "delta_beta" in config["phase"]:
177
- db_list = utils.retrieve_lst_of_value_from_str(
178
- config["phase"]["delta_beta"], type_=float
179
- )
180
- else:
181
- db_list = ()
182
- ask_sinogram_registration = len(db_list) > 0
183
- ask_sinogram_load = False
184
176
  output_urls = []
185
177
  stderrs = []
186
178
  stdouts = []
@@ -199,8 +191,6 @@ def run_slices_reconstruction(
199
191
  scan=scan,
200
192
  slice_index=slice_index,
201
193
  dry_run=dry_run,
202
- ask_sinogram_registration=ask_sinogram_registration,
203
- ask_sinogram_load=ask_sinogram_load,
204
194
  instanciate_class_only=instanciate_classes_only,
205
195
  axis=config.get("reconstruction", {}).get("slice_plane", "XY"),
206
196
  )
@@ -238,9 +228,6 @@ def run_slices_reconstruction(
238
228
  final_configs.append(result.config)
239
229
  all_succeed = all_succeed and result.success
240
230
 
241
- if ask_sinogram_registration:
242
- ask_sinogram_load = True
243
- ask_sinogram_registration = False
244
231
  if advancement is not None:
245
232
  advancement.increaseAdvancement(1)
246
233
 
@@ -494,7 +481,7 @@ class NabuSlicesTask(
494
481
  :rtype:dict
495
482
  """
496
483
  if entry is None:
497
- with HDF5File(process_file, "r", swmr=get_swmr_mode()) as h5f:
484
+ with open_hdf5(process_file) as h5f:
498
485
  entries = NabuSlicesTask._get_process_nodes(
499
486
  root_node=h5f, process=NabuSlicesTask
500
487
  )
@@ -510,7 +497,7 @@ class NabuSlicesTask(
510
497
  configuration_path = None
511
498
  res = {}
512
499
 
513
- with HDF5File(process_file, "r", swmr=get_swmr_mode()) as h5f:
500
+ with open_hdf5(process_file) as h5f:
514
501
  nabu_nodes = NabuSlicesTask._get_process_nodes(
515
502
  root_node=h5f[entry], process=NabuSlicesTask
516
503
  )
@@ -640,8 +627,6 @@ class SingleSliceRunner(_NabuBaseReconstructor):
640
627
  slice_index: Union[int, str, None],
641
628
  axis: NabuPlane,
642
629
  target: Target,
643
- ask_sinogram_registration: bool,
644
- ask_sinogram_load: bool,
645
630
  cluster_config: Optional[dict],
646
631
  process_name: str,
647
632
  add_to_latest_reconstructions: bool = True,
@@ -658,8 +643,6 @@ class SingleSliceRunner(_NabuBaseReconstructor):
658
643
  if not isinstance(config, dict):
659
644
  raise TypeError(f"config is expected to be a dictionary not {type(dict)}")
660
645
  self._config = config
661
- self._ask_sinogram_registration = ask_sinogram_registration
662
- self._ask_sinogram_load = ask_sinogram_load
663
646
  self._add_to_latest_reconstructions = add_to_latest_reconstructions
664
647
 
665
648
  @property
@@ -670,14 +653,6 @@ class SingleSliceRunner(_NabuBaseReconstructor):
670
653
  def config(self):
671
654
  return self._config
672
655
 
673
- @property
674
- def ask_sinogram_load(self):
675
- return self._ask_sinogram_load
676
-
677
- @property
678
- def ask_sinogram_registration(self):
679
- return self._ask_sinogram_registration
680
-
681
656
  @property
682
657
  def add_to_latest_reconstructions(self):
683
658
  return self._add_to_latest_reconstructions
@@ -721,25 +696,8 @@ class SingleSliceRunner(_NabuBaseReconstructor):
721
696
  # force overwrite results
722
697
  if "output" not in config:
723
698
  config["output"] = {}
724
- # handle nabu sinogram step
725
- if self.scan.process_file is not None:
726
- steps_file_basename, _ = os.path.splitext(self.scan.process_file)
727
- steps_file_basename = "_".join(
728
- ("steps_file_basename", "nabu", "sinogram", "save", "step")
729
- )
730
- steps_file_basename = steps_file_basename + ".hdf5"
731
- steps_file = os.path.join(
732
- os.path.dirname(self.scan.process_file), steps_file_basename
733
- )
734
- else:
735
- steps_file = ""
736
699
 
737
700
  config["output"].update({"overwrite_results": 1})
738
- config["pipeline"] = {
739
- "save_steps": "sinogram" if self.ask_sinogram_registration else "",
740
- "resume_from_step": "sinogram" if self.ask_sinogram_load else "",
741
- "steps_file": steps_file,
742
- }
743
701
 
744
702
  config, cfg_folder = self._treateOutputSliceConfig(config)
745
703
  # the policy is to save nabu .cfg file at the same location as the
@@ -845,7 +803,7 @@ class SingleSliceRunner(_NabuBaseReconstructor):
845
803
  basename, _ = os.path.splitext(scan.master_file)
846
804
  basename = os.path.basename(basename)
847
805
  try:
848
- with HDF5File(scan.master_file, mode="r", swmr=get_swmr_mode()) as h5f:
806
+ with open_hdf5(scan.master_file) as h5f:
849
807
  if len(h5f.keys()) > 1:
850
808
  # if there is more than one entry in the file append the entry name to the file basename
851
809
  basename = "_".join((basename, scan.entry.lstrip("/")))
@@ -918,8 +876,6 @@ def run_single_slice_reconstruction(
918
876
  nabu_config,
919
877
  dry_run,
920
878
  slice_index: Union[int, str, None],
921
- ask_sinogram_registration: bool,
922
- ask_sinogram_load: bool,
923
879
  process_id: Optional[int] = None,
924
880
  cluster_config: Optional[dict] = None,
925
881
  add_to_latest_reconstructions=True,
@@ -938,8 +894,6 @@ def run_single_slice_reconstruction(
938
894
  :param local:
939
895
  :param stdout: file to redirect stdout
940
896
  :param stderr: file to redirect stderr
941
- :param bool ask_sinogram_registration: should we ask nabu to register sinogram
942
- :param bool ask_sinogram_load: should we ask nabu to load sinogram
943
897
  :param bool add_to_latest_reconstructions: if true add reconstructed slice to the latest reconstruction.
944
898
  We wan't to avoid this treatment for saaxis and sadeltebeta for example
945
899
  :param bool instanciate_class_only: if we don't want to run the SingleSliceRunner but only return them. Use case: we want to keep a hand on processing and it can be cancelled
@@ -970,8 +924,6 @@ def run_single_slice_reconstruction(
970
924
  slice_index=slice_index,
971
925
  axis=axis,
972
926
  target=target,
973
- ask_sinogram_registration=ask_sinogram_registration,
974
- ask_sinogram_load=ask_sinogram_load,
975
927
  cluster_config=cluster_config,
976
928
  add_to_latest_reconstructions=add_to_latest_reconstructions,
977
929
  process_name=process_name,
@@ -1010,6 +962,8 @@ class NabuSliceMode(_Enum):
1010
962
  n_slice = scan.dim_2 or 2048
1011
963
  elif axis in (NabuPlane.YZ, NabuPlane.XZ):
1012
964
  n_slice = scan.dim_1 or 2048
965
+ else:
966
+ raise NotImplementedError(f"unknow axis {axis}")
1013
967
  res.append(n_slice // 2)
1014
968
  else:
1015
969
  raise ValueError(
@@ -43,10 +43,9 @@ from processview.core.manager.manager import ProcessManager, DatasetState
43
43
  from processview.core.superviseprocess import SuperviseProcess
44
44
 
45
45
  from silx.io.utils import h5py_read_dataset
46
+ from silx.io.utils import open as open_hdf5
46
47
  from tomwer.core.utils.deprecation import deprecated_warning
47
48
 
48
- from tomoscan.io import HDF5File, get_swmr_mode
49
-
50
49
  from tomwer.core.cluster.cluster import SlurmClusterConfiguration
51
50
  from tomwer.core.futureobject import FutureTomwerObject
52
51
  from tomwer.core.process.reconstruction.nabu.plane import NabuPlane
@@ -354,9 +353,7 @@ class VolumeRunner(_NabuBaseReconstructor):
354
353
  basename = os.path.basename(basename)
355
354
  try:
356
355
  # if there is more than one entry in the file append the entry name to the file basename
357
- with HDF5File(
358
- self.scan.master_file, mode="r", swmr=get_swmr_mode()
359
- ) as h5f:
356
+ with open_hdf5(self.scan.master_file) as h5f:
360
357
  if len(h5f.keys()) > 1:
361
358
  basename = "_".join((basename, self.scan.entry.strip("/")))
362
359
  except Exception:
@@ -171,6 +171,8 @@ def get_recons_volume_identifier(
171
171
  entry = scan.entry
172
172
  elif isinstance(scan, EDFTomoScan):
173
173
  entry = "entry"
174
+ else:
175
+ raise NotImplementedError(f"unrecognized scan type ({type(scan)})")
174
176
 
175
177
  volumes = (
176
178
  HDF5Volume(
@@ -461,6 +463,10 @@ def nabu_std_err_has_error(errs: typing.Optional[bytes]):
461
463
  in line
462
464
  or "return SourceModule(" in line
463
465
  or "CUBLAS" in line
466
+ or "Not supported for EDF"
467
+ in line # debatable but very disturbing from the gui side... anyway EDF days are coming to an end
468
+ or "PerformanceWarning" in line
469
+ or "jitify._init_module()" in line
464
470
  )
465
471
 
466
472
  if errs is None:
@@ -536,6 +536,8 @@ class SAAxisTask(
536
536
  mess = "sa-axis computation succeeded"
537
537
  else:
538
538
  cors_res = {}
539
+ state = DatasetState.FAILED
540
+ mess = "couldn't find 'compute_scores'"
539
541
 
540
542
  if self._cancelled:
541
543
  state = DatasetState.CANCELLED
@@ -341,6 +341,8 @@ class SADeltaBetaTask(
341
341
  mess = "sa-delta-beta computation succeeded"
342
342
  else:
343
343
  dbs_res = {}
344
+ state = DatasetState.FAILED
345
+ mess = "couldn't find 'compute_scores'"
344
346
 
345
347
  if self._cancelled:
346
348
  state = DatasetState.CANCELLED
@@ -46,7 +46,8 @@ from ewokscore.task import Task as _EwoksTask
46
46
  from ewokscore.taskwithprogress import TaskWithProgress as _EwoksTaskWithProgress
47
47
  from silx.io.dictdump import dicttoh5, h5todict
48
48
  from silx.io.utils import h5py_read_dataset
49
- from tomoscan.io import HDF5File, get_swmr_mode
49
+ from silx.io.utils import open as open_hdf5
50
+ from tomoscan.io import HDF5File
50
51
  from tomwer.core.utils.locker import FileLockerManager
51
52
 
52
53
 
@@ -376,12 +377,13 @@ class BaseProcessInfo:
376
377
  :rtype: list
377
378
  """
378
379
  # retrieve process to load
379
- with HDF5File(process_file, mode="r", swmr=get_swmr_mode()) as h5f:
380
+ with open_hdf5(process_file) as h5f:
380
381
  if entry is None:
381
382
  if len(h5f.keys()) > 0:
382
383
  root = h5f[list(h5f.keys())[0]]
383
384
  else:
384
385
  _logger.warning("no process find")
386
+ return []
385
387
  else:
386
388
  root = h5f[entry]
387
389
 
@@ -42,7 +42,8 @@ from nxtomomill.converter import from_h5_to_nx
42
42
  from nxtomomill.io.config.hdf5config import TomoHDF5Config
43
43
  from nxtomo.nxobject.nxdetector import ImageKey
44
44
 
45
- from tomoscan.io import HDF5File, get_swmr_mode
45
+ from silx.io.utils import open as open_hdf5
46
+ from tomoscan.io import HDF5File
46
47
  from tomoscan.validator import is_valid_for_reconstruction
47
48
 
48
49
  from tomwer.core.process.control.datalistener import DataListener
@@ -335,9 +336,9 @@ class NXTomoDataTransferBase(unittest.TestCase):
335
336
  dst_scan = process.outputs.data
336
337
  assert dst_scan.master_file != self.nexus_file_path
337
338
  assert is_valid_for_reconstruction(dst_scan, check_values=True)
338
- with HDF5File(src_scan.master_file, mode="r", swmr=get_swmr_mode()) as h5s_src:
339
+ with open_hdf5(src_scan.master_file) as h5s_src:
339
340
  src_dataset = h5s_src["entry0000/detector/data"][...]
340
- with HDF5File(dst_scan.master_file, mode="r", swmr=get_swmr_mode()) as h5s_dst:
341
+ with open_hdf5(dst_scan.master_file) as h5s_dst:
341
342
  dst_dataset = h5s_dst["entry0000/detector/data"][...]
342
343
  assert numpy.array_equal(src_dataset, dst_dataset)
343
344
 
@@ -54,7 +54,7 @@ import logging
54
54
  from typing import Optional
55
55
 
56
56
  from silx.io.utils import h5py_read_dataset
57
- from tomoscan.io import HDF5File, get_swmr_mode
57
+ from silx.io.utils import open as open_hdf5
58
58
 
59
59
  _logger = logging.getLogger(__name__)
60
60
 
@@ -203,7 +203,7 @@ class BlissScan:
203
203
  return True
204
204
  return False
205
205
 
206
- with HDF5File(file_path, mode="r", swmr=get_swmr_mode()) as h5s:
206
+ with open_hdf5(file_path) as h5s:
207
207
  if not isinstance(h5s, h5py.Group) or not isinstance(
208
208
  h5s[entry], h5py.Group
209
209
  ):
@@ -227,7 +227,7 @@ class BlissScan:
227
227
  return tuple()
228
228
  else:
229
229
  res = []
230
- with HDF5File(file_path, mode="r", swmr=get_swmr_mode()) as h5s:
230
+ with open_hdf5(file_path) as h5s:
231
231
  for entry in h5s:
232
232
  if BlissScan.is_bliss_valid_entry(file_path=file_path, entry=entry):
233
233
  res.append(entry)
@@ -40,12 +40,12 @@ from urllib.parse import urlparse
40
40
 
41
41
  import h5py
42
42
  from processview.core.dataset import DatasetIdentifier
43
+ from silx.io.utils import open as open_hdf5
43
44
  from tomoscan.esrf.identifier.hdf5Identifier import (
44
45
  NXtomoScanIdentifier as _NXtomoScanIdentifier,
45
46
  )
46
47
  from tomoscan.esrf.identifier.url_utils import UrlSettings, split_path, split_query
47
48
  from tomoscan.esrf.scan.nxtomoscan import NXtomoScan as _tsNXtomoScan
48
- from tomoscan.io import HDF5File, get_swmr_mode
49
49
  from nxtomo.nxobject.nxdetector import ImageKey
50
50
 
51
51
  from tomwer.utils import docstring
@@ -424,7 +424,7 @@ class NXtomoScan(_tsNXtomoScan, TomwerScanBase):
424
424
  return tuple()
425
425
  else:
426
426
  res = []
427
- with HDF5File(file_path, mode="r", swmr=get_swmr_mode()) as h5s:
427
+ with open_hdf5(file_path) as h5s:
428
428
  for entry_name, node in h5s.items():
429
429
  if isinstance(node, h5py.Group):
430
430
  if NXtomoScan.entry_is_nx_tomo(node):
@@ -40,8 +40,9 @@ import functools
40
40
  import numpy
41
41
  from silx.io.url import DataUrl
42
42
  from silx.utils.enum import Enum as _Enum
43
+ from silx.io.utils import open as open_hdf5
44
+
43
45
  from tomoscan.identifier import VolumeIdentifier
44
- from tomoscan.io import HDF5File, get_swmr_mode
45
46
  from tomoscan.normalization import IntensityNormalization
46
47
  from tomoscan.volumebase import VolumeBase
47
48
  from tomoscan.identifier import BaseIdentifier
@@ -131,7 +132,7 @@ class TomwerScanBase(TomwerObject):
131
132
  and self.process_file is not None
132
133
  and os.path.exists(self.process_file)
133
134
  ):
134
- with HDF5File(self.process_file, mode="r", swmr=get_swmr_mode()) as h5s:
135
+ with open_hdf5(self.process_file) as h5s:
135
136
  self._process_index = len(h5s.items())
136
137
  else:
137
138
  self._process_index = 0
@@ -756,7 +757,7 @@ class TomwerScanBase(TomwerObject):
756
757
  from tomwer.core.process.task import Task
757
758
 
758
759
  if os.path.exists(self.process_file):
759
- with HDF5File(self.process_file, mode="r", swmr=get_swmr_mode()) as h5s:
760
+ with open_hdf5(self.process_file) as h5s:
760
761
  if not hasattr(self, "entry"):
761
762
  entry = "entry"
762
763
  else:
@@ -864,9 +865,7 @@ def _get_reconstructed_single_file_volume(
864
865
 
865
866
  if check_url is True:
866
867
  try:
867
- with HDF5File(
868
- os.path.abspath(file_), "r", swmr=get_swmr_mode()
869
- ) as h5f:
868
+ with open_hdf5(os.path.abspath(file_)) as h5f:
870
869
  if entry not in h5f:
871
870
  logger.info("{volume} does not exists")
872
871
  return None
@@ -148,7 +148,6 @@ class DataWatcherWidget(_DataWatcher, qt.QMainWindow):
148
148
 
149
149
  # set initial path to observe
150
150
  self.setFolderObserved(self._getInitPath())
151
- self._initStatusView()
152
151
 
153
152
  # hide all windows by default
154
153
  for widget in (
@@ -475,20 +474,10 @@ class DataWatcherWidget(_DataWatcher, qt.QMainWindow):
475
474
  self.observationThread.sigScanReady.disconnect(self._signalScanReady)
476
475
  self.obsThIsConnected = False
477
476
 
478
- def _initStatusView(self):
479
- """
480
- The status view need a thread to update the animated icon when scanning
481
- """
482
- self.__threadAnimation = QWaiterThread(0.1)
483
- self.__threadAnimation.finished.connect(self._updateAnimatedIcon)
484
-
485
477
  def _updateStatusView(self):
486
478
  """Update the processing state"""
487
479
  if self.currentStatus in self._animatedStates:
488
- if not self.__threadAnimation.isRunning():
489
- self.__threadAnimation.start()
490
- elif self.__threadAnimation is not None:
491
- self.__threadAnimation.wait(4000)
480
+ pass
492
481
  elif self.currentStatus == "acquisition ended":
493
482
  self._setStateIcon(silxicons.getQIcon("selected"))
494
483
  elif self.currentStatus == "failure":
@@ -506,18 +495,6 @@ class DataWatcherWidget(_DataWatcher, qt.QMainWindow):
506
495
  else:
507
496
  self._stateLabel.setPixmap(icon.pixmap(30, state=qt.QIcon.On))
508
497
 
509
- def _updateAnimatedIcon(self):
510
- """Simple function which manage the waiting icon"""
511
- if self.currentStatus in self._animatedStates:
512
- icon = self.animated_icon.currentIcon()
513
- if icon is None:
514
- icon = qt.QIcon()
515
- self.animated_icon._updateState()
516
- self._setStateIcon(icon)
517
-
518
- # get ready for the next animation
519
- self.__threadAnimation.start()
520
-
521
498
  def _signalScanReady(self, scan):
522
499
  if type(scan) is str:
523
500
  try:
@@ -8,11 +8,11 @@ from silx.gui import qt
8
8
  from silx.gui.plot import Plot2D
9
9
  from silx.io.dictdump import h5todict
10
10
  from silx.io.url import DataUrl
11
+ from silx.io.utils import open as open_hdf5
11
12
  from silx.gui.dialog.DataFileDialog import DataFileDialog
12
13
 
13
14
  from tomoscan.esrf.scan.utils import cwd_context
14
15
  from tomoscan.framereducer.target import REDUCER_TARGET
15
- from tomoscan.io import HDF5File, get_swmr_mode
16
16
 
17
17
  from tomwer.io.utils import get_default_directory
18
18
 
@@ -249,7 +249,7 @@ class ReduceDarkFlatSelectorTableWidget(qt.QWidget):
249
249
  if not os.path.exists(file_path):
250
250
  _logger.error(f"file doesn't exists ({file_path})")
251
251
 
252
- with HDF5File(file_path, mode="r", swmr=get_swmr_mode()) as h5f:
252
+ with open_hdf5(file_path) as h5f:
253
253
  entries = tuple(h5f.keys())
254
254
 
255
255
  res = []