tomwer 1.4.0rc6__py3-none-any.whl → 1.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. orangecontrib/tomwer/tutorials/simple_volume_to_slurm_reconstruction.ows +2 -2
  2. orangecontrib/tomwer/widgets/reconstruction/AxisOW.py +19 -47
  3. orangecontrib/tomwer/widgets/reconstruction/NabuOW.py +8 -3
  4. orangecontrib/tomwer/widgets/reconstruction/NabuVolumeOW.py +4 -6
  5. orangecontrib/tomwer/widgets/reconstruction/SAAxisOW.py +8 -4
  6. orangecontrib/tomwer/widgets/reconstruction/SADeltaBetaOW.py +0 -4
  7. orangecontrib/tomwer/widgets/reconstruction/SinoNormOW.py +1 -13
  8. tomwer/app/axis.py +0 -1
  9. tomwer/app/intensitynormalization.py +0 -14
  10. tomwer/app/multicor.py +1 -33
  11. tomwer/app/multipag.py +1 -31
  12. tomwer/app/nabuapp.py +0 -1
  13. tomwer/app/patchrawdarkflat.py +0 -3
  14. tomwer/app/reducedarkflat.py +0 -1
  15. tomwer/core/process/control/datalistener/datalistener.py +0 -232
  16. tomwer/core/process/control/datawatcher/datawatcher.py +0 -5
  17. tomwer/core/process/control/scantransfer.py +3 -60
  18. tomwer/core/process/edit/darkflatpatch.py +0 -8
  19. tomwer/core/process/edit/imagekeyeditor.py +2 -19
  20. tomwer/core/process/reconstruction/axis/axis.py +0 -13
  21. tomwer/core/process/reconstruction/darkref/darkrefs.py +0 -59
  22. tomwer/core/process/reconstruction/nabu/nabuslices.py +0 -88
  23. tomwer/core/process/reconstruction/nabu/nabuvolume.py +0 -10
  24. tomwer/core/process/reconstruction/params_cache.py +36 -0
  25. tomwer/core/process/reconstruction/saaxis/saaxis.py +80 -88
  26. tomwer/core/process/reconstruction/sadeltabeta/sadeltabeta.py +78 -86
  27. tomwer/core/process/reconstruction/tests/test_params_cache.py +37 -0
  28. tomwer/core/process/script/python.py +0 -19
  29. tomwer/core/process/task.py +0 -290
  30. tomwer/core/process/tests/test_dark_and_flat.py +0 -6
  31. tomwer/core/process/tests/test_data_transfer.py +0 -1
  32. tomwer/core/process/tests/test_data_watcher.py +6 -23
  33. tomwer/core/scan/edfscan.py +0 -11
  34. tomwer/core/scan/nxtomoscan.py +0 -12
  35. tomwer/core/scan/scanbase.py +0 -81
  36. tomwer/gui/reconstruction/axis/CalculationWidget.py +3 -5
  37. tomwer/gui/reconstruction/tests/test_saaxis.py +2 -2
  38. tomwer/gui/reconstruction/tests/test_sadeltabeta.py +2 -2
  39. tomwer/gui/stitching/config/axisparams.py +2 -0
  40. tomwer/synctools/stacks/reconstruction/axis.py +0 -18
  41. tomwer/synctools/tests/test_foldertransfer.py +2 -19
  42. tomwer/tests/orangecontrib/tomwer/widgets/reconstruction/tests/test_i_norm.py +0 -10
  43. tomwer/tests/orangecontrib/tomwer/widgets/reconstruction/tests/test_sa_delta_beta.py +103 -153
  44. tomwer/tests/orangecontrib/tomwer/widgets/reconstruction/tests/test_saaxis.py +117 -152
  45. tomwer/tests/orangecontrib/tomwer/widgets/{tests/test_darkref.py → test_darkref.py} +0 -9
  46. tomwer/tests/orangecontrib/tomwer/widgets/{tests/test_foldertransfert.py → test_foldertransfert.py} +1 -1
  47. tomwer/tests/test_ewoks/test_workflows.py +0 -4
  48. tomwer/version.py +3 -3
  49. {tomwer-1.4.0rc6.dist-info → tomwer-1.4.1.dist-info}/METADATA +1 -2
  50. {tomwer-1.4.0rc6.dist-info → tomwer-1.4.1.dist-info}/RECORD +55 -56
  51. tomwer/core/scan/tests/test_process_registration.py +0 -64
  52. tomwer/core/utils/Singleton.py +0 -36
  53. tomwer/core/utils/locker.py +0 -58
  54. /tomwer/tests/orangecontrib/tomwer/widgets/{tests/test_conditions.py → test_conditions.py} +0 -0
  55. {tomwer-1.4.0rc6.dist-info → tomwer-1.4.1.dist-info}/LICENSE +0 -0
  56. {tomwer-1.4.0rc6.dist-info → tomwer-1.4.1.dist-info}/WHEEL +0 -0
  57. {tomwer-1.4.0rc6.dist-info → tomwer-1.4.1.dist-info}/entry_points.txt +0 -0
  58. {tomwer-1.4.0rc6.dist-info → tomwer-1.4.1.dist-info}/top_level.txt +0 -0
@@ -9,7 +9,6 @@ import time
9
9
 
10
10
  from ewokscore.task import Task as EwoksTask
11
11
  from nxtomomill import converter as nxtomomill_converter
12
- from silx.io.utils import h5py_read_dataset
13
12
  from silx.io.utils import open as open_hdf5
14
13
 
15
14
  import tomwer.version
@@ -161,241 +160,10 @@ class DataListener(BaseProcessInfo):
161
160
  master_file, entry_master_file = mf_emf
162
161
  if master_file is not None and entry_master_file is not None:
163
162
  scan = NXtomoScan(scan=master_file, entry=entry_master_file)
164
-
165
- try:
166
- # register process.
167
- self.register_process(
168
- process_file=scan.process_file,
169
- entry=scan.entry,
170
- results={"output_file": scan.master_file, "entry": scan.entry},
171
- configuration={
172
- "sample_file": sample_file,
173
- "entry": entry,
174
- "file_proposal": proposal_file,
175
- "master_sample_file": master_sample_file,
176
- },
177
- process_index=scan.pop_process_index(),
178
- overwrite=True,
179
- )
180
- except Exception as e:
181
- _logger.error(e)
182
-
183
163
  self._signal_scan_ready(scan)
184
164
  scans.append(scan)
185
165
  return tuple(scans)
186
166
 
187
- @staticmethod
188
- def get_proposal_file(process_file, entry):
189
- """Return the proposal file of the experimentation if registered by the
190
- data listener"""
191
- if entry is None:
192
- with open_hdf5(process_file) as h5f:
193
- entries = BaseProcessInfo._get_process_nodes(
194
- root_node=h5f, process=DataListener
195
- )
196
- if len(entries) == 0:
197
- _logger.info(
198
- "unable to find a DarkRef process in %s" % process_file
199
- )
200
- return None
201
- elif len(entries) > 1:
202
- raise ValueError("several entry found, entry should be " "specify")
203
- else:
204
- entry = list(entries.keys())[0]
205
- _logger.info("take %s as default entry" % entry)
206
-
207
- with open_hdf5(process_file) as h5f:
208
- dl_nodes = BaseProcessInfo._get_process_nodes(
209
- root_node=h5f[entry], process=DataListener
210
- )
211
- index_to_path = {}
212
- for key, index in dl_nodes.items():
213
- index_to_path[index] = key
214
-
215
- if len(dl_nodes) == 0:
216
- return None
217
- # take the last processed dark ref
218
- last_process_index = sorted(dl_nodes.values())[-1]
219
- last_process_dl = index_to_path[last_process_index]
220
- if (len(index_to_path)) > 1:
221
- _logger.debug(
222
- "several processing found for data listener,"
223
- "take the last one: %s" % last_process_dl
224
- )
225
-
226
- if "configuration" in h5f[last_process_dl].keys():
227
- results_node = h5f[last_process_dl]["configuration"]
228
- if "file_proposal" in results_node.keys():
229
- try:
230
- fp = h5py_read_dataset(
231
- results_node["file_proposal"]
232
- ) # .decode("UTF-8")
233
- except AttributeError:
234
- return None
235
- else:
236
- return fp
237
- else:
238
- return None
239
- return None
240
-
241
- # TODO the 3 static functions get_proposal_file, get_sample_file and
242
- # get_source_scans are sharing a lot of source code and should
243
- # be 'concatenate'
244
- @staticmethod
245
- def get_sample_file(process_file, entry):
246
- """Return the proposal file of the experimentation if registred by the
247
- data listener"""
248
- if entry is None:
249
- with open_hdf5(process_file) as h5f:
250
- entries = BaseProcessInfo._get_process_nodes(
251
- root_node=h5f, process=DataListener
252
- )
253
- if len(entries) == 0:
254
- _logger.info(
255
- "unable to find a DarkRef process in %s" % process_file
256
- )
257
- return None
258
- elif len(entries) > 1:
259
- raise ValueError("several entry found, entry should be " "specify")
260
- else:
261
- entry = list(entries.keys())[0]
262
- _logger.info("take %s as default entry" % entry)
263
-
264
- with open_hdf5(process_file) as h5f:
265
- dl_nodes = BaseProcessInfo._get_process_nodes(
266
- root_node=h5f[entry], process=DataListener
267
- )
268
- index_to_path = {}
269
- for key, value in dl_nodes.items():
270
- index_to_path[key] = key
271
-
272
- if len(dl_nodes) == 0:
273
- return None
274
- # take the last processed dark ref
275
- last_process_index = sorted(dl_nodes.keys())[-1]
276
- last_process_dl = index_to_path[last_process_index]
277
- if (len(index_to_path)) > 1:
278
- _logger.debug(
279
- "several processing found for data listener,"
280
- "take the last one: %s" % last_process_dl
281
- )
282
-
283
- if "configuration" in h5f[last_process_dl].keys():
284
- results_node = h5f[last_process_dl]["configuration"]
285
- if "sample_file" in results_node.keys():
286
- return h5py_read_dataset(
287
- results_node["sample_file"]
288
- ) # .decode("UTF-8")
289
- else:
290
- return None
291
- return None
292
-
293
- @staticmethod
294
- def get_master_sample_file(process_file, entry):
295
- """Return the proposal file of the experimentation if registered by the
296
- data listener"""
297
- if entry is None:
298
- with open_hdf5(process_file) as h5f:
299
- entries = BaseProcessInfo._get_process_nodes(
300
- root_node=h5f, process=DataListener
301
- )
302
- if len(entries) == 0:
303
- _logger.info(
304
- "unable to find a DarkRef process in %s" % process_file
305
- )
306
- return None
307
- elif len(entries) > 1:
308
- raise ValueError("several entry found, entry should be " "specify")
309
- else:
310
- entry = list(entries.keys())[0]
311
- _logger.info("take %s as default entry" % entry)
312
-
313
- with open_hdf5(process_file) as h5f:
314
- dl_nodes = BaseProcessInfo._get_process_nodes(
315
- root_node=h5f[entry], process=DataListener
316
- )
317
- index_to_path = {}
318
- for key, value in dl_nodes.items():
319
- index_to_path[key] = key
320
-
321
- if len(dl_nodes) == 0:
322
- return None
323
- # take the last processed dark ref
324
- last_process_index = sorted(dl_nodes.keys())[-1]
325
- last_process_dl = index_to_path[last_process_index]
326
- if (len(index_to_path)) > 1:
327
- _logger.debug(
328
- "several processing found for data listener,"
329
- "take the last one: %s" % last_process_dl
330
- )
331
-
332
- if "configuration" in h5f[last_process_dl].keys():
333
- results_node = h5f[last_process_dl]["configuration"]
334
- if "master_sample_file" in results_node.keys():
335
- return h5py_read_dataset(
336
- results_node["master_sample_file"]
337
- ) ##.decode("UTF-8")
338
- else:
339
- return None
340
- return None
341
-
342
- @staticmethod
343
- def get_source_scans(process_file, entry):
344
- """Return the list of 'bliss scan directory' created for holding this
345
- specific sequence data
346
- """
347
- if entry is None:
348
- with open_hdf5(process_file) as h5f:
349
- entries = BaseProcessInfo._get_process_nodes(
350
- root_node=h5f, process=DataListener
351
- )
352
- if len(entries) == 0:
353
- _logger.info(
354
- "unable to find a DarkRef process in %s" % process_file
355
- )
356
- return None
357
- elif len(entries) > 1:
358
- raise ValueError("several entry found, entry should be " "specify")
359
- else:
360
- entry = list(entries.keys())[0]
361
- _logger.info("take %s as default entry" % entry)
362
-
363
- with open_hdf5(process_file) as h5f:
364
- dl_nodes = BaseProcessInfo._get_process_nodes(
365
- root_node=h5f[entry], process=DataListener
366
- )
367
- index_to_path = {}
368
- for key, index in dl_nodes.items():
369
- index_to_path[index] = key
370
-
371
- if len(dl_nodes) == 0:
372
- return {}
373
- # take the last processed dark ref
374
- last_process_index = sorted(dl_nodes.values())[-1]
375
- last_process_dl = index_to_path[last_process_index]
376
- if (len(index_to_path)) > 1:
377
- _logger.debug(
378
- "several processing found for data listener,"
379
- "take the last one: %s" % last_process_dl
380
- )
381
-
382
- if "configuration" in h5f[last_process_dl].keys():
383
- results_node = h5f[last_process_dl]["configuration"]
384
- if "source_scans" in results_node.keys():
385
- tmp_res = h5py_read_dataset(results_node["source_scans"])
386
-
387
- def conv(my_str):
388
- if hasattr(my_str, "decode"):
389
- return my_str.decode("UTF-8")
390
- else:
391
- return my_str
392
-
393
- res = [conv(mystr) for mystr in tmp_res]
394
- return res
395
- else:
396
- return None
397
- return None
398
-
399
167
  def convert(self, bliss_file: str, entry: str) -> tuple:
400
168
  """
401
169
 
@@ -409,11 +409,6 @@ class DataWatcher(_DataWatcher):
409
409
  def _signalScanReady(self, scan):
410
410
  assert isinstance(scan, TomwerScanBase)
411
411
  super()._signalScanReady(scan)
412
- if self._serialize_output_data:
413
- value = scan.to_dict()
414
- else:
415
- value = scan
416
- self.register_output(key="data", value=value)
417
412
  self.scan_found_event.set()
418
413
 
419
414
  def _launchObservation(self):
@@ -209,7 +209,6 @@ class ScanTransferTask(
209
209
  callback_parameters=(scan, output_scan),
210
210
  rights=777,
211
211
  )
212
- self.register_output(key="data", value=outputdir)
213
212
  return output_scan
214
213
 
215
214
  def _get_hdf5_dst_scan(self, bliss_scan_folder_path):
@@ -248,7 +247,9 @@ class ScanTransferTask(
248
247
 
249
248
  def _process_hdf5_scan(self, scan) -> TomwerScanBase:
250
249
  assert isinstance(scan, NXtomoScan)
251
- from tomwer.core.process.control.datalistener import DataListener
250
+ logger.warning(
251
+ "scan transfer for HDF5 is a prototype. Please check transfer is properly executed."
252
+ )
252
253
 
253
254
  files_sources = []
254
255
  files_dest = []
@@ -259,53 +260,6 @@ class ScanTransferTask(
259
260
  lambda file_: os.path.abspath(file_) == os.path.abspath(scan.master_file),
260
261
  associated_files,
261
262
  )
262
- source_scans = [os.path.dirname(file_) for file_ in associated_files]
263
- # manage scan folder
264
- if source_scans is not None:
265
- for source_scan in source_scans:
266
- # files_sources.append(source_scan)
267
- # files_dest.append(
268
- # self._get_hdf5_dst_scan(bliss_scan_folder_path=source_scan)
269
- # )
270
- # delete_opt.append(True)
271
- # scan folder is no more managed. Now this is done by browsing VDS directly
272
- pass
273
-
274
- # manage proposal file
275
- if os.path.exists(scan.process_file):
276
- proposal_file = DataListener.get_proposal_file(
277
- process_file=scan.process_file, entry=scan.entry
278
- )
279
- else:
280
- proposal_file = None
281
- if proposal_file is not None:
282
- files_sources.append(proposal_file)
283
- files_dest.append(self._get_hdf5_proposal_file_dst(proposal_file))
284
- delete_opt.append(False)
285
-
286
- # manage sample file
287
- if os.path.exists(scan.process_file):
288
- sample_file = DataListener.get_sample_file(
289
- process_file=scan.process_file, entry=scan.entry
290
- )
291
- else:
292
- sample_file = None
293
- if sample_file is not None:
294
- files_sources.append(sample_file)
295
- files_dest.append(self._get_hdf5_sample_file_or_nx_dst(sample_file))
296
- delete_opt.append(False)
297
-
298
- # manage saving file
299
- if os.path.exists(scan.process_file):
300
- mst_sample_file = DataListener.get_master_sample_file(
301
- process_file=scan.process_file, entry=scan.entry
302
- )
303
- else:
304
- mst_sample_file = None
305
- if mst_sample_file is not None:
306
- files_sources.append(mst_sample_file)
307
- files_dest.append(self._get_master_sample_file_dst(mst_sample_file))
308
- delete_opt.append(False)
309
263
 
310
264
  # manage .nx file
311
265
  if os.path.exists(scan.master_file):
@@ -507,17 +461,6 @@ class ScanTransferTask(
507
461
  def __noticeTransfertSuccess(self, input_scan, output_scan):
508
462
  self._signalCopySucceed()
509
463
 
510
- results = {"input_scan": str(input_scan), "output_scan": str(output_scan)}
511
- entry = "entry"
512
- if isinstance(output_scan, NXtomoScan):
513
- entry = output_scan.entry
514
- self.register_process(
515
- process_file=output_scan.process_file,
516
- entry=entry,
517
- results=results,
518
- process_index=output_scan.pop_process_index(),
519
- configuration=None,
520
- )
521
464
  logger.processSucceed(
522
465
  f"transfer succeed of {input_scan} to {output_scan}",
523
466
  extra={
@@ -69,14 +69,6 @@ class DarkFlatPatchTask(
69
69
  if isinstance(value, DataUrl):
70
70
  config[key] = value.path()
71
71
 
72
- self.register_process(
73
- process_file=scan.process_file,
74
- entry=scan.entry,
75
- configuration=config,
76
- results={},
77
- process_index=scan.pop_process_index(),
78
- overwrite=True,
79
- )
80
72
  if self.get_input_value("serialize_output_data", True):
81
73
  self.outputs.data = scan.to_dict()
82
74
  else:
@@ -115,14 +115,7 @@ class ImageKeyEditorTask(
115
115
  value = config["modifications"][key]
116
116
  config["modifications"][str(key)] = value
117
117
  config["modifications"] = new_modif
118
- self.register_process(
119
- process_file=scan.process_file,
120
- entry=scan.entry,
121
- configuration=config,
122
- results={},
123
- process_index=scan.pop_process_index(),
124
- overwrite=True,
125
- )
118
+
126
119
  if self.get_input_value("serialize_output_data", True):
127
120
  self.outputs.data = scan.to_dict()
128
121
  else:
@@ -175,17 +168,7 @@ class ImageKeyUpgraderTask(
175
168
 
176
169
  # apply modification using tomoscan
177
170
  change_image_key_control(scan=scan, config=configuration)
178
- self.register_process(
179
- process_file=scan.process_file,
180
- entry=scan.entry,
181
- configuration={
182
- ImageKey.from_value(key).value: ImageKey.from_value(value).value
183
- for key, value in operations.items()
184
- },
185
- results={},
186
- process_index=scan.pop_process_index(),
187
- overwrite=True,
188
- )
171
+
189
172
  if self.get_input_value("serialize_output_data", True):
190
173
  self.outputs.data = scan.to_dict()
191
174
  else:
@@ -251,19 +251,6 @@ class AxisTask(
251
251
 
252
252
  def _process_end(self, scan, cor, error=None):
253
253
  assert isinstance(scan, TomwerScanBase)
254
- if scan.process_file is not None:
255
- entry = "entry"
256
- if isinstance(scan, NXtomoScan):
257
- entry = scan.entry
258
- self.register_process(
259
- process_file=scan.process_file,
260
- entry=entry,
261
- results={"center_of_rotation": cor if cor is not None else "-"},
262
- configuration=self._axis_params.to_dict(),
263
- process_index=scan.pop_process_index(),
264
- overwrite=True,
265
- )
266
-
267
254
  try:
268
255
  extra = {
269
256
  logconfig.DOC_TITLE: self._scheme_title,
@@ -20,7 +20,6 @@ import tomwer.version
20
20
  from tomwer.core import settings
21
21
  from tomwer.core.utils.lbsram import is_low_on_memory
22
22
  from tomwer.core.process.task import Task
23
- from tomwer.core.scan.nxtomoscan import NXtomoScan
24
23
  from tomwer.core.scan.scanbase import TomwerScanBase
25
24
  from tomwer.core.scan.scanfactory import ScanFactory
26
25
  from tomwer.core.utils.scanutils import data_identifier_to_scan
@@ -200,47 +199,6 @@ class DarkRefsTask(
200
199
  else:
201
200
  self._recons_params.load_from_dict(properties)
202
201
 
203
- @staticmethod
204
- def get_darks_frm_process_file(
205
- process_file, entry=None, as_url: bool = False
206
- ) -> dict | None:
207
- """
208
-
209
- :param process_file: path to the process file
210
- :param entry: entry to read in the process file if more than one
211
- :param as_url: if true then an url will be used instead of a
212
- numpy.array
213
- :return: dictionary with index in the sequence as key and numpy array
214
- as value (or url if as_url set to True)
215
- """
216
- deprecated_warning(
217
- name="tomwer.core.process.reconstruction.darkref.DarkRefsTask.get_darks_frm_process_file",
218
- type_="function",
219
- reason="removed. Reduced dark / flat are expected to be stored on didicated files and not in tomwer_processes anymore",
220
- since_version="1.2",
221
- )
222
- return None
223
-
224
- @staticmethod
225
- def get_flats_frm_process_file(
226
- process_file, entry=None, as_url: bool = False
227
- ) -> dict | None:
228
- """
229
-
230
- :param process_file:
231
- :param entry: entry to read in the process file if more than one
232
- :param as_url: if true then an url will be used instead of a
233
- numpy.array
234
- :return:
235
- """
236
- deprecated_warning(
237
- name="tomwer.core.process.reconstruction.darkref.DarkRefsTask.get_flats_frm_process_file",
238
- type_="function",
239
- reason="removed. Reduced dark / flat are expected to be stored on didicated files and not in tomwer_processes anymore",
240
- since_version="1.2",
241
- )
242
- return None
243
-
244
202
  def run(self):
245
203
  if isinstance(self.inputs.data, str):
246
204
  try:
@@ -364,23 +322,6 @@ class DarkRefsTask(
364
322
 
365
323
  scan.save_reduced_flats(f_flats, flats_infos=o_flats_infos, overwrite=True)
366
324
 
367
- if len(results) > 0:
368
- # if some processing to be registered
369
- if scan.process_file is not None and not (
370
- isinstance(scan, NXtomoScan) and not self.__new_hdf5_entry_created
371
- ):
372
- entry = "entry"
373
- if hasattr(scan, "entry"):
374
- entry = scan.entry
375
- self.register_process(
376
- process_file=scan.process_file,
377
- entry=entry,
378
- configuration=self.recons_params.to_dict(),
379
- results=results,
380
- interpretations=interpretations,
381
- process_index=scan.pop_process_index(),
382
- overwrite=True,
383
- )
384
325
  logger.processSucceed(f"Dark and flat reduction succeeded for {scan}")
385
326
  self.notify_to_state_to_managed(
386
327
  dataset=scan, state=DatasetState.SUCCEED, details=None
@@ -54,8 +54,6 @@ from nabu.pipeline.fullfield.nabu_config import (
54
54
  nabu_config as nabu_fullfield_default_config,
55
55
  )
56
56
  from processview.core.superviseprocess import SuperviseProcess
57
- from silx.io.dictdump import h5todict
58
- from silx.io.utils import h5py_read_dataset
59
57
  from silx.utils.enum import Enum as _Enum
60
58
 
61
59
  from tomwer.core.process.task import Task
@@ -63,7 +61,6 @@ from tomwer.core.scan.edfscan import EDFTomoScan
63
61
  from tomwer.core.scan.nxtomoscan import NXtomoScan
64
62
  from tomwer.core.scan.scanbase import TomwerScanBase
65
63
  from tomwer.core.scan.scanfactory import ScanFactory
66
- from tomwer.io.utils.h5pyutils import EntryReader
67
64
  from tomwer.utils import docstring
68
65
 
69
66
  from . import settings as nabu_settings
@@ -276,10 +273,6 @@ class NabuSlicesTask(
276
273
  raise ValueError(f"input type of {scan}: {type(scan)} is not managed" "")
277
274
  assert isinstance(configuration, dict), "configuration is expected to be a dict"
278
275
 
279
- entry = "entry"
280
- if isinstance(scan, NXtomoScan):
281
- entry = scan.entry
282
-
283
276
  output_urls = []
284
277
  stderrs = []
285
278
  stdouts = []
@@ -362,7 +355,6 @@ class NabuSlicesTask(
362
355
  VolumeFactory.from_identifier_to_vol_urls(rec_identifier)
363
356
  )
364
357
 
365
- process_index = scan.pop_process_index()
366
358
  # update processes information / registration
367
359
  gc.collect()
368
360
 
@@ -395,15 +387,6 @@ class NabuSlicesTask(
395
387
  details=details,
396
388
  )
397
389
 
398
- # register result
399
- self.register_process(
400
- process_file=scan.process_file,
401
- entry=entry,
402
- configuration=configuration,
403
- results={},
404
- process_index=process_index,
405
- overwrite=True,
406
- )
407
390
  if self.get_input_value("serialize_output_data", True):
408
391
  self.outputs.data = scan.to_dict()
409
392
  else:
@@ -433,69 +416,6 @@ class NabuSlicesTask(
433
416
  def dry_run(self):
434
417
  return self._dry_run
435
418
 
436
- @staticmethod
437
- def get_process_frm_process_file(process_file: str, entry: str) -> dict:
438
- """
439
- Read informations regarding the nabu process save in the tomwer_process.h5 file
440
-
441
- :return: dictionary with the contain of the nabu process
442
- """
443
- if entry is None:
444
- with open_hdf5(process_file) as h5f:
445
- entries = NabuSlicesTask._get_process_nodes(
446
- root_node=h5f, process=NabuSlicesTask
447
- )
448
- if len(entries) == 0:
449
- _logger.info("unable to find a Axis process in %s" % process_file)
450
- return None
451
- elif len(entries) > 1:
452
- raise ValueError("several entry found, entry should be " "specify")
453
- else:
454
- entry = list(entries.keys())[0]
455
- _logger.info("take %s as default entry" % entry)
456
-
457
- configuration_path = None
458
- res = {}
459
-
460
- with open_hdf5(process_file) as h5f:
461
- nabu_nodes = NabuSlicesTask._get_process_nodes(
462
- root_node=h5f[entry], process=NabuSlicesTask
463
- )
464
- index_to_path = {}
465
- for key, index in nabu_nodes.items():
466
- index_to_path[index] = key
467
-
468
- if len(nabu_nodes) == 0:
469
- return None
470
- # take the last processed dark ref
471
- last_process_index = sorted(list(nabu_nodes.values()))[-1]
472
- last_process_dark = index_to_path[last_process_index]
473
- if (len(index_to_path)) > 1:
474
- _logger.debug(
475
- "several processing found for dark-ref, "
476
- "take the last one: %s" % last_process_dark
477
- )
478
-
479
- for key_name in (
480
- "class_instance",
481
- "date",
482
- "program",
483
- "sequence_index",
484
- "version",
485
- ):
486
- if key_name in h5f[last_process_dark]:
487
- res[key_name] = h5py_read_dataset(h5f[last_process_dark][key_name])
488
- if "configuration" in h5f[last_process_dark]:
489
- configuration_path = "/".join(
490
- (h5f[last_process_dark].name, "configuration")
491
- )
492
-
493
- if configuration_path is not None:
494
- res["configuration"] = h5todict(
495
- h5file=process_file, path=configuration_path
496
- )
497
- return res
498
-
499
419
  def cancel(self):
500
420
  """
501
421
  stop current processing
@@ -504,14 +424,6 @@ class NabuSlicesTask(
504
424
  if self._current_processing is not None:
505
425
  self._current_processing.cancel()
506
426
 
507
- @staticmethod
508
- def retrieve_last_relative_cor(scan):
509
- with EntryReader(scan.process_file_url) as h5f:
510
- latest_nabu_node = Task.get_most_recent_process(h5f, NabuSlicesTask)
511
- path = "configuration/reconstruction/rotation_axis_position"
512
- if latest_nabu_node is not None and path in latest_nabu_node:
513
- return h5py_read_dataset(latest_nabu_node[path])
514
-
515
427
 
516
428
  def interpret_tomwer_configuration(config: dict, scan: TomwerScanBase | None) -> tuple:
517
429
  """
@@ -14,7 +14,6 @@ from nabu.pipeline.fullfield.nabu_config import (
14
14
  from processview.core.manager.manager import ProcessManager, DatasetState
15
15
  from processview.core.superviseprocess import SuperviseProcess
16
16
 
17
- from silx.io.utils import h5py_read_dataset
18
17
  from silx.io.utils import open as open_hdf5
19
18
  from tomwer.core.utils.deprecation import deprecated_warning
20
19
 
@@ -30,7 +29,6 @@ from tomwer.core.scan.scanfactory import ScanFactory
30
29
  from tomwer.core.utils.dictutils import concatenate_dict
31
30
  from tomwer.core.utils.scanutils import data_identifier_to_scan
32
31
  from tomwer.core.volume.volumefactory import VolumeFactory
33
- from tomwer.io.utils.h5pyutils import EntryReader
34
32
  from tomwer.utils import docstring
35
33
  from tomwer.io.utils import format_stderr_stdout
36
34
  from tomwer.core.process.reconstruction.output import (
@@ -548,14 +546,6 @@ class NabuVolumeTask(
548
546
  def dry_run(self):
549
547
  return self._dry_run
550
548
 
551
- @staticmethod
552
- def retrieve_last_relative_cor(scan):
553
- with EntryReader(scan.process_file_url) as h5f:
554
- latest_nabu_node = Task.get_most_recent_process(h5f, NabuVolumeTask)
555
- path = "configuration/reconstruction/rotation_axis_position"
556
- if latest_nabu_node is not None and path in latest_nabu_node:
557
- return h5py_read_dataset(latest_nabu_node[path])
558
-
559
549
  def cancel(self):
560
550
  """
561
551
  stop current processing