mxcubecore 1.382.0__py3-none-any.whl → 1.383.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,12 +4,14 @@ import shutil
4
4
  from collections import defaultdict
5
5
  from datetime import datetime, timedelta
6
6
  from pathlib import Path
7
- from typing import List, Optional
7
+ from typing import TYPE_CHECKING, List, Optional
8
8
  from zoneinfo import ZoneInfo
9
9
 
10
10
  import requests
11
11
  from pyicat_plus.client.main import IcatClient
12
- from pyicat_plus.client.models.session import Session as ICATSession
12
+
13
+ if TYPE_CHECKING:
14
+ from pyicat_plus.client.models.session import Session as ICATSession
13
15
 
14
16
  from mxcubecore import HardwareRepository as HWR
15
17
  from mxcubecore.BaseHardwareObjects import HardwareObject
@@ -43,6 +45,7 @@ class ICATLIMS(AbstractLims):
43
45
  self.url = self.get_property("ws_root")
44
46
  self.ingesters = self.get_property("queue_urls")
45
47
  self.investigations = []
48
+ self.samples = []
46
49
 
47
50
  # Initialize ICAT client
48
51
  self.icatClient = IcatClient(
@@ -62,21 +65,21 @@ class ICATLIMS(AbstractLims):
62
65
  password: str,
63
66
  session_manager: Optional[LimsSessionManager],
64
67
  ) -> LimsSessionManager:
65
- logger.debug("authenticate %s" % (user_name))
68
+ msg = f"authenticate {user_name}"
69
+ logger.debug(msg)
66
70
 
67
71
  self.icat_session: ICATSession = self.icatClient.do_log_in(password)
68
72
 
69
73
  if self.icatClient is None or self.icatClient is None:
70
- logger.exception(
71
- "Error initializing icatClient. icatClient=%s" % (self.url)
72
- )
74
+ msg = "Error initializing icatClient: "
75
+ msg += f"icatClient={self.url}"
76
+ logger.error(msg)
73
77
  raise RuntimeError("Could not initialize icatClient")
74
78
 
75
79
  # Connected to metadata icatClient
76
- logger.debug(
77
- "Connected succesfully to icatClient. fullName=%s url=%s"
78
- % (self.icat_session["fullName"], self.url)
79
- )
80
+ msg = "Connected succesfully to icatClient: "
81
+ msg += f"fullName={self.icat_session['fullName']}, url={self.url}"
82
+ logger.debug(msg)
80
83
 
81
84
  # Retrieving user's investigations
82
85
  sessions = self.to_sessions(self.__get_all_investigations())
@@ -85,7 +88,8 @@ class ICATLIMS(AbstractLims):
85
88
  msg = f"No sessions available for user {user_name}"
86
89
  raise RuntimeError(msg)
87
90
 
88
- logger.debug("Successfully retrieved %s sessions" % (len(sessions)))
91
+ msg = f"Successfully retrieved {len(sessions)} sessions"
92
+ logger.debug(msg)
89
93
 
90
94
  # This is done because ICATLims can be used standalone or from ESRFLims
91
95
  if session_manager is not None:
@@ -95,39 +99,37 @@ class ICATLIMS(AbstractLims):
95
99
  # access to that session
96
100
  if self.session_manager.active_session:
97
101
  session_found = False
98
-
102
+ session_id = self.session_manager.active_session.session_id
99
103
  for session in sessions:
100
- if session.session_id == self.session_manager.active_session.session_id:
104
+ if session.session_id == session_id:
101
105
  session_found = True
102
106
  break
103
107
 
104
108
  if not session_found:
105
- raise RuntimeError(
106
- "Current session in-use (with id %s) not avaialble to user %s"
107
- % (self.session_manager.active_session.session_id, user_name)
108
- )
109
+ msg = f"Current session in-use (with id {session_id}) "
110
+ msg += f"not avaialble for user {user_name}"
111
+ raise RuntimeError(msg)
112
+
109
113
  return self.session_manager, self.icat_session["name"], sessions
110
114
 
111
115
  def is_user_login_type(self) -> bool:
112
116
  return True
113
117
 
114
118
  def get_proposals_by_user(self, user_name):
115
- logger.debug("get_proposals_by_user %s" % user_name)
119
+ msg = f"get_proposals_by_user {user_name}\n"
120
+ msg += f"[ICATCLient] Read {len(self.lims_rest.investigations)} investigations"
121
+ logger.debug(msg)
116
122
 
117
- logger.debug(
118
- "[ICATCLient] Read %s investigations" % len(self.lims_rest.investigations)
119
- )
120
123
  return self.lims_rest.to_sessions(self.lims_rest.investigations)
121
124
 
122
- def _get_loaded_pucks(self, parcels):
123
- """
124
- Retrieves all pucks from the parcels that have a defined 'sampleChangerLocation'.
125
-
126
- A puck is considered "loaded" if it contains the key 'sampleChangerLocation'.
127
- Iterates through all parcels and collects such pucks.
128
-
125
+ def _get_loaded_pucks(self, parcels) -> list:
126
+ """Retrieve all pucks from the parcels that have a defined
127
+ 'sampleChangerLocation'.
128
+ A puck is considered "loaded" if it contains the key
129
+ 'sampleChangerLocation'.
130
+ Iterates through all parcels and collects such pucks.
129
131
  Returns:
130
- list: A list of pucks (dicts) that have 'sampleChangerLocation' defined.
132
+ A list of pucks (dicts) that have 'sampleChangerLocation' defined.
131
133
  """
132
134
  loaded_pucks = []
133
135
 
@@ -143,21 +145,20 @@ class ICATLIMS(AbstractLims):
143
145
 
144
146
  return loaded_pucks
145
147
 
146
- def get_samples(self, lims_name):
147
- """
148
- Retrieves and processes sample information from LIMS based on the provided name.
149
-
150
- This method:
151
- - Retrieves parcel data (containers like UniPucks or SpinePucks).
152
- - Retrieves sample sheet data.
153
- - Identifies and processes only loaded pucks (those with a 'sampleChangerLocation').
154
- - Converts each sample in the pucks into internal queue samples using `__to_sample`.
155
-
148
+ def get_samples(self, lims_name: str) -> list:
149
+ """Retrieve and process sample information from LIMS based on the
150
+ provided name:
151
+ - Retrieves parcel data (containers like UniPucks or SpinePucks).
152
+ - Retrieves sample sheet data.
153
+ - Identifies and processes only loaded pucks
154
+ (those with a 'sampleChangerLocation').
155
+ - Converts each sample in the pucks into internal queue samples
156
+ using `__to_sample`.
156
157
  Args:
157
- lims_name (str): The LIMS name or identifier used to fetch sample-related data.
158
+ The LIMS name or identifier used to fetch sample-related data.
158
159
 
159
160
  Returns:
160
- list: A list of processed sample objects ready for queuing.
161
+ A list of processed sample objects ready for queuing.
161
162
  """
162
163
 
163
164
  self.samples = []
@@ -181,37 +182,33 @@ class ICATLIMS(AbstractLims):
181
182
 
182
183
  # Filter for loaded pucks
183
184
  self.loaded_pucks = self._get_loaded_pucks(self.parcels)
184
- logger.debug("[ICATClient] %d loaded pucks found", len(self.loaded_pucks))
185
+ msg = f"[ICATClient] {len(self.loaded_pucks)} loaded pucks found"
186
+ logger.debug(msg)
185
187
 
186
188
  # Extract and process samples from loaded pucks
187
189
  for puck in self.loaded_pucks:
188
190
  tracking_samples = puck.get("content", [])
189
191
  puck_name = puck.get("name", "Unnamed")
190
192
  location = puck.get("sampleChangerLocation", "Unknown")
191
-
192
- logger.debug(
193
- "[ICATClient] Found puck '%s' at position '%s' containing %d samples",
194
- puck_name,
195
- location,
196
- len(tracking_samples),
197
- )
198
-
193
+ msg = f"[ICATClient] Found puck {puck_name} at position "
194
+ msg += f"{location}, containing {len(tracking_samples)} samples"
195
+ logger.debug(msg)
199
196
  for tracking_sample in tracking_samples:
200
197
  sample = self.__to_sample(tracking_sample, puck, self.sample_sheets)
201
198
  self.samples.append(sample)
202
- logger.debug("[ICATClient] Total %d samples read", len(self.samples))
203
-
204
- return self.samples
205
199
  except RuntimeError:
206
200
  logger.exception("[ICATClient] Error retrieving samples: %s")
207
-
201
+ else:
202
+ msg = f"[ICATClient] Total {len(self.samples)} samples read"
203
+ logger.debug(msg)
204
+ return self.samples
208
205
  return []
209
206
 
210
207
  def get_sample_sheet_by_id(
211
208
  self, samples: List[SampleSheet], sample_id: int
212
209
  ) -> Optional[SampleSheet]:
213
210
  """
214
- Retrieves a sample sheet by its unique ID.
211
+ Retrieve a sample sheet by its unique ID.
215
212
 
216
213
  Args:
217
214
  samples (List[SampleSheet]): A list of Sample objects.
@@ -240,22 +237,21 @@ class ICATLIMS(AbstractLims):
240
237
 
241
238
  # Enrich the processing_plan
242
239
  for item in processing_plan:
243
- if item["key"] == "pipelines":
240
+ if item["key"] == "Advanced_processing":
244
241
  for pipeline in item["value"]:
245
- # Match reference to filename
242
+ # Update reference
246
243
  ref = pipeline.get("reference")
247
244
  if ref in file_path_lookup:
248
- pipeline["reference_path"] = file_path_lookup[ref]
249
- # Match search_models to groupName
250
- group = pipeline.get("search_models")
251
- if group in group_paths:
252
- pipeline["search_models_path"] = group_paths[group]
253
- for item in processing_plan:
254
- if item["key"] == "search_models":
255
- # Match reference to filename
256
- models = item.get("value")
257
- for model in models:
258
- model["path"] = group_paths[model["pdb_group"]]
245
+ pipeline["reference"] = {"filepath": file_path_lookup[ref]}
246
+
247
+ # Update search_models
248
+ if "search_models" in pipeline:
249
+ models = json.loads(pipeline["search_models"])
250
+ for model in models:
251
+ group = model.get("pdb_group")
252
+ if group in group_paths:
253
+ model["file_paths"] = group_paths[group]
254
+ pipeline["search_models"] = models
259
255
 
260
256
  def _safe_json_loads(self, json_str):
261
257
  try:
@@ -267,13 +263,12 @@ class ICATLIMS(AbstractLims):
267
263
  self, tracking_sample: dict, puck: dict, sample_sheets: List[SampleSheet]
268
264
  ) -> dict:
269
265
  """
270
- Converts a tracking sample and associated metadata into the internal sample data structure.
271
-
272
- This method:
266
+ Convert a tracking sample and associated metadata into the internal
267
+ sample data structure.
273
268
  - Extracts relevant sample metadata.
274
269
  - Resolves protein acronym from the sample sheet if available.
275
270
  - Maps experiment plan details into a diffraction plan dictionary.
276
- - Assembles all relevant fields into a final structured sample dictionary.
271
+ - Assembles all relevant fields into a structured sample dictionary.
277
272
 
278
273
  Args:
279
274
  tracking_sample (dict): The raw sample data from tracking.
@@ -294,12 +289,10 @@ class ICATLIMS(AbstractLims):
294
289
  # identifier that points to the sample tracking
295
290
  tracking_sample_id = tracking_sample.get("_id")
296
291
 
297
- logger.debug(
298
- "[ICATClient] Sample ids sample_id=%s sample_sheet_id=%s tracking_sample_id=%s",
299
- sample_id,
300
- sample_sheet_id,
301
- tracking_sample_id,
302
- )
292
+ msg = f"[ICATClient] Sample ids sample_id={sample_id} "
293
+ msg += f"sample_sheet_id={sample_sheet_id} "
294
+ msg += f"tracking_sample_id={tracking_sample_id}"
295
+ logger.debug(msg)
303
296
 
304
297
  sample_location = tracking_sample.get("sampleContainerPosition")
305
298
  puck_location = str(puck.get("sampleChangerLocation", "Unknown"))
@@ -336,17 +329,18 @@ class ICATLIMS(AbstractLims):
336
329
  destination_folder = HWR.beamline.session.get_full_path("", "")[
337
330
  0
338
331
  ]
339
- logger.debug(
340
- "Download restource. sample_sheet_id=%s destination_folder=%s"
341
- % (sample_sheet_id, destination_folder)
342
- )
332
+ msg = "Download restource: "
333
+ msg += f"sample_sheet_id={sample_sheet_id} "
334
+ msg += f"destination_folder={destination_folder}"
335
+ logger.debug(msg)
343
336
  downloads = self._download_resources(
344
337
  sample_sheet_id,
345
338
  sample_information.resources,
346
339
  destination_folder,
347
340
  sample_name,
348
341
  )
349
- logger.debug("downloaded %s resources" % len(downloads))
342
+ msg = f"downloaded {len(downloads)} resources"
343
+ logger.debug(msg)
350
344
  if len(downloads) > 0:
351
345
  try:
352
346
  self.__add_download_path_to_processing_plan(
@@ -362,7 +356,8 @@ class ICATLIMS(AbstractLims):
362
356
  }
363
357
 
364
358
  except RuntimeError as e:
365
- logger.warning("error getting sample information %s " % e)
359
+ msg = f"error getting sample information {e}"
360
+ logger.warning(msg)
366
361
 
367
362
  comments = tracking_sample.get("comments")
368
363
 
@@ -511,38 +506,32 @@ class ICATLIMS(AbstractLims):
511
506
  self.icatClient.reschedule_investigation(session.session_id)
512
507
 
513
508
  def get_session_by_id(self, sid: str):
514
- logger.debug(
515
- "get_session_by_id investigationId=%s investigations=%s",
516
- sid,
517
- str(len(self.investigations)),
518
- )
509
+ msg = f"get_session_by_id investigationId={sid} "
510
+ msg += f"investigations={len(self.investigations)}"
511
+ logger.debug(msg)
512
+
519
513
  investigation_list = list(filter(lambda p: p["id"] == sid, self.investigations))
520
514
  if len(investigation_list) == 1:
521
515
  self.investigation = investigation_list[0]
522
516
  return self.__to_session(investigation_list[0])
523
- logger.warn(
524
- "No investigation found. get_session_by_id investigationId=%s investigations=%s",
525
- sid,
526
- str(len(self.investigations)),
527
- )
517
+
518
+ logger.warning("No investigation found")
528
519
  return None
529
520
 
530
521
  def __get_all_investigations(self):
531
522
  """Returns all investigations by user. An investigation corresponds to
532
523
  one experimental session. It returns an empty array in case of error"""
524
+ self.investigations = []
533
525
  try:
534
- self.investigations = []
535
- logger.debug(
536
- "__get_all_investigations before=%s after=%s beamline=%s isInstrumentScientist=%s isAdministrator=%s compatible_beamlines=%s"
537
- % (
538
- self.before_offset_days,
539
- self.after_offset_days,
540
- self.override_beamline_name,
541
- self.icat_session["isInstrumentScientist"],
542
- self.icat_session["isAdministrator"],
543
- self.compatible_beamlines,
544
- )
526
+ msg = f"__get_all_investigations before={self.before_offset_days} "
527
+ msg += f"after={self.after_offset_days} "
528
+ msg += f"beamline={self.override_beamline_name} "
529
+ msg += (
530
+ f"isInstrumentScientist={self.icat_session['isInstrumentScientist']} "
545
531
  )
532
+ msg += f"isAdministrator={self.icat_session['isAdministrator']} "
533
+ msg += f"compatible_beamlines={self.compatible_beamlines}"
534
+ logger.debug(msg)
546
535
 
547
536
  if self.icat_session is not None and (
548
537
  self.icat_session["isAdministrator"]
@@ -558,7 +547,8 @@ class ICATLIMS(AbstractLims):
558
547
  )
559
548
  elif self.only_staff_session_selection:
560
549
  if self.session_manager.active_session is None:
561
- # If no session selected and only staff is allowed then print warning an return no investigations
550
+ # print warning an return no investigations
551
+ # if no session selected and only staff is allowed
562
552
  logger.warning(
563
553
  "No session selected. Only staff can select a session"
564
554
  )
@@ -576,14 +566,14 @@ class ICATLIMS(AbstractLims):
576
566
  end_date=datetime.today()
577
567
  + timedelta(days=float(self.after_offset_days)),
578
568
  )
579
- logger.debug(
580
- "__get_all_investigations retrieved %s investigations"
581
- % len(self.investigations)
582
- )
583
- return self.investigations
584
569
  except Exception:
585
570
  self.investigations = []
586
571
  logger.exception("Failed on __get_all_investigations")
572
+ else:
573
+ msg = "__get_all_investigations retrieved "
574
+ msg += f"{len(self.investigations)} investigations"
575
+ logger.debug(msg)
576
+
587
577
  return self.investigations
588
578
 
589
579
  def __get_proposal_number_by_investigation(self, investigation):
@@ -690,9 +680,7 @@ class ICATLIMS(AbstractLims):
690
680
  data_portal_URL=self._get_data_portal_url(investigation),
691
681
  user_portal_URL=self._get_user_portal_url(investigation),
692
682
  logbook_URL=self._get_logbook_url(investigation),
693
- is_rescheduled=(
694
- True if "__actualEndDate" in investigation["parameters"] else False
695
- ),
683
+ is_rescheduled=bool("__actualEndDate" in investigation["parameters"]),
696
684
  volume=self.__get_investigation_parameter_by_name(
697
685
  investigation, "__volume"
698
686
  ),
@@ -716,34 +704,35 @@ class ICATLIMS(AbstractLims):
716
704
  def get_parcels(self):
717
705
  """Returns the parcels associated to an investigation"""
718
706
  try:
719
- logger.debug(
720
- "Retrieving parcels by investigation_id %s "
721
- % (self.session_manager.active_session.session_id)
722
- )
723
- parcels = self.icatClient.get_parcels_by(
724
- self.session_manager.active_session.session_id
725
- )
726
- logger.debug("Successfully retrieved %s parcels" % (len(parcels)))
727
- return parcels
707
+ session_id = self.session_manager.active_session.session_id
708
+ msg = f"Retrieving parcels by investigation_id {session_id}"
709
+ logger.debug(msg)
710
+ parcels = self.icatClient.get_parcels_by(session_id)
728
711
  except Exception:
729
712
  logger.exception("Failed on get_parcels_by_investigation_id")
713
+ else:
714
+ msg = f"Successfully retrieved {len(parcels)} parcels"
715
+ logger.debug(msg)
716
+ return parcels
717
+
730
718
  return []
731
719
 
732
720
  def get_samples_sheets(self) -> List[SampleSheet]:
733
721
  """Returns the samples sheets associated to an investigation"""
734
722
  try:
735
- logger.debug(
736
- "Retrieving samples by investigation_id %s "
737
- % (self.session_manager.active_session.session_id)
738
- )
723
+ msg = "Retrieving samples by investigation_id "
724
+ msg += f"{self.session_manager.active_session.session_id}"
725
+ logger.debug(msg)
739
726
  samples = self.icatClient.get_samples_by(
740
727
  self.session_manager.active_session.session_id
741
728
  )
742
- logger.debug("Successfully retrieved %s samples" % (len(samples)))
743
- # Convert to object
744
- return [SampleSheet.parse_obj(sample) for sample in samples]
745
729
  except Exception:
746
730
  logger.exception("Failed on get_samples_by_investigation_id")
731
+ else:
732
+ msg = f"Successfully retrieved {len(samples)} samples"
733
+ logger.debug(msg)
734
+ # Convert to object
735
+ return [SampleSheet.parse_obj(sample) for sample in samples]
747
736
  return []
748
737
 
749
738
  def echo(self):
@@ -786,9 +775,9 @@ class ICATLIMS(AbstractLims):
786
775
  None,
787
776
  )
788
777
 
789
- def _get_sample_position(self):
790
- """
791
- Returns the position of the puck in the samples changer and the position f the sample within the puck
778
+ def _get_sample_position(self) -> tuple:
779
+ """Return the position of the puck in the samples changer
780
+ and the position of the sample within the puck,
792
781
  """
793
782
  try:
794
783
  queue_entry = HWR.beamline.queue_manager.get_current_entry()
@@ -802,14 +791,11 @@ class ICATLIMS(AbstractLims):
802
791
  (puck, sample_position) = location
803
792
 
804
793
  position = None
805
- try:
806
- if cell is not None and puck is not None:
807
- position = int(cell * 3) + int(puck)
808
- except Exception:
809
- logger.exception()
810
- return position, sample_position
794
+ if None not in (cell, puck):
795
+ position = int(cell * 3) + int(puck)
811
796
  except Exception:
812
- logger.exception()
797
+ logger.exception("Cannot retrieve sample position")
798
+ return position, sample_position
813
799
 
814
800
  def store_beamline_setup(self, session_id: str, bl_config_dict: dict):
815
801
  pass
@@ -817,12 +803,76 @@ class ICATLIMS(AbstractLims):
817
803
  def store_image(self, image_dict: dict):
818
804
  pass
819
805
 
820
- def store_energy_scan(self, energyscan_dict: dict):
821
- pass
806
+ def store_common_data(self, datacollection_dict: dict) -> dict:
807
+ """Fill in a dictionary with the common for all the
808
+ data collection tecjniques meta data.
809
+ Args:
810
+ datacollection_dict(dict): dictionarry from the data collection.
811
+ """
812
+ sample_id = datacollection_dict.get("blSampleId")
813
+ msg = f"SampleId is: {sample_id}"
814
+ logging.getLogger("HWR").debug(msg)
815
+ try:
816
+ sample = HWR.beamline.lims.find_sample_by_sample_id(sample_id)
817
+ sample_name = sample.get("sampleName")
818
+ except (AttributeError, TypeError):
819
+ sample_name = "unknown"
820
+ msg = f"Sample {sample_id} not found"
821
+ logging.getLogger("HWR").debug(msg)
822
+
823
+ start_time = datacollection_dict.get("startTime", "")
824
+ end_time = datacollection_dict.get("endTime", "")
822
825
 
823
- def store_xfe_spectrum(self, collection_parameters: dict):
824
- status = {"xfeFluorescenceSpectrumId": -1}
825
826
  try:
827
+ dt_aware = datetime.strptime(start_time, "%Y-%m-%d %H:%M:%S").replace(
828
+ tzinfo=ZoneInfo("Europe/Paris")
829
+ )
830
+ dt_aware_end = datetime.strptime(end_time, "%Y-%m-%d %H:%M:%S").replace(
831
+ tzinfo=ZoneInfo("Europe/Paris")
832
+ )
833
+
834
+ start_time = dt_aware.isoformat(timespec="microseconds")
835
+ end_time = dt_aware_end.isoformat(timespec="microseconds")
836
+ except TypeError:
837
+ logging.getLogger("HWR").exception("Cannot parse start and end time")
838
+ bsx, bsy, shape, _ = HWR.beamline.beam.get_value()
839
+ flux_end = datacollection_dict.get("flux_end") or HWR.beamline.flux.get_value()
840
+
841
+ transmission = (
842
+ datacollection_dict.get("transmission")
843
+ or HWR.beamline.transmission.get_value()
844
+ )
845
+
846
+ energy = datacollection_dict.get("energy") or HWR.beamline.energy.get_value()
847
+ wavelength = (
848
+ datacollection_dict.get("wavelength")
849
+ or HWR.beamline.energy.get_wavelength()
850
+ )
851
+
852
+ machine_info = HWR.beamline.machine_info.get_value()
853
+
854
+ return {
855
+ "sampleId": sample_id,
856
+ "Sample_name": sample_name,
857
+ "startDate": start_time,
858
+ "endDate": end_time,
859
+ "MX_beamShape": shape.value,
860
+ "MX_beamSizeAtSampleX": bsx,
861
+ "MX_beamSizeAtSampleY": bsy,
862
+ "MX_xBeam": bsx,
863
+ "MX_yBeam": bsy,
864
+ "MX_flux": datacollection_dict.get("flux"),
865
+ "MX_fluxEnd": flux_end,
866
+ "MX_transmission": transmission,
867
+ "InstrumentMonochromator_wavelength": wavelength,
868
+ "InstrumentMonochromator_energy": energy,
869
+ "InstrumentSource_current": machine_info.get("current"),
870
+ "InstrumentSource_mode": machine_info.get("fill_mode"),
871
+ }
872
+
873
+ def store_energy_scan(self, energyscan_dict: dict):
874
+ try:
875
+ metadata = self.store_common_data(energyscan_dict)
826
876
  try:
827
877
  beamline = self._get_scheduled_beamline()
828
878
  msg = f"Dataset Beamline={beamline} "
@@ -835,49 +885,68 @@ class ICATLIMS(AbstractLims):
835
885
  _session = HWR.beamline.session
836
886
  proposal = f"{_session.proposal_code}{_session.proposal_number}"
837
887
 
838
- try:
839
- dt_aware = datetime.strptime(
840
- collection_parameters.get("startTime"),
841
- "%Y-%m-%d %H:%M:%S",
842
- ).replace(tzinfo=ZoneInfo("Europe/Paris"))
843
- dt_aware_end = datetime.strptime(
844
- collection_parameters.get("endTime"),
845
- "%Y-%m-%d %H:%M:%S",
846
- ).replace(tzinfo=ZoneInfo("Europe/Paris"))
847
-
848
- start_time = dt_aware.isoformat(timespec="microseconds")
849
- end_time = dt_aware_end.isoformat(timespec="microseconds")
850
- except TypeError:
851
- logging.getLogger("HWR").exception("Failed to parse start and end time")
888
+ directory = Path(energyscan_dict["scanFileFullPath"]).parent
889
+
890
+ metadata.update(
891
+ {
892
+ "scanType": "energy_scan",
893
+ "MX_directory": str(directory),
894
+ "MX_exposureTime": energyscan_dict.get("exposureTime"),
895
+ "InstrumentDetector01_model": energyscan_dict.get(
896
+ "fluorescenceDetector"
897
+ ),
898
+ "MX_element": energyscan_dict.get("element"),
899
+ "MX_edgeEnergy": energyscan_dict.get("edgeEnergy"),
900
+ "MX_startEnergy": energyscan_dict.get("startEnergy"),
901
+ "MX_endEnergy": energyscan_dict.get("endEnergy"),
902
+ "MX_peakEnergy": energyscan_dict.get("endEnergy"),
903
+ "MX_inflectioEnergy": energyscan_dict.get("inflectioEnergy"),
904
+ "MX_remoteEnergy": energyscan_dict.get("remoteEnergy"),
905
+ "MX_peakFPrime": energyscan_dict.get("peakFPrime"),
906
+ "MX_peakFDoublePrime": energyscan_dict.get("peakFDoublePrime"),
907
+ "MX_inflectionFPrime": energyscan_dict.get("inflectionFPrime"),
908
+ "MX_inflectionFDoublePrime": energyscan_dict.get(
909
+ "inflectionFDoublePrime"
910
+ ),
911
+ "MX_comments": energyscan_dict.get("comments"),
912
+ }
913
+ )
852
914
 
853
- directory = Path(collection_parameters["filename"]).parent
915
+ self.icatClient.store_dataset(
916
+ beamline=beamline,
917
+ proposal=proposal,
918
+ dataset=str(directory.name),
919
+ path=str(directory),
920
+ metadata=metadata,
921
+ )
922
+ except Exception:
923
+ logging.getLogger("ispyb_client").exception()
854
924
 
855
- msg = f"SampleId is: {collection_parameters.get('blSampleId')}"
856
- logging.getLogger("HWR").debug(msg)
925
+ def store_xfe_spectrum(self, xfespectrum_dict: dict):
926
+ status = {"xfeFluorescenceSpectrumId": -1}
927
+ try:
928
+ metadata = self.store_common_data(xfespectrum_dict)
857
929
  try:
858
- sample = HWR.beamline.lims.find_sample_by_sample_id(
859
- collection_parameters.get("blSampleId")
930
+ beamline = self._get_scheduled_beamline()
931
+ msg = f"Dataset Beamline={beamline} "
932
+ msg += f"Current Beamline={HWR.beamline.session.beamline_name}"
933
+ logging.getLogger("HWR").info(msg)
934
+ except Exception:
935
+ logging.getLogger("HWR").exception(
936
+ "Failed to get _get_scheduled_beamline",
860
937
  )
861
- sample_name = sample["sampleName"]
862
- except (AttributeError, TypeError):
863
- sample_name = "unknown"
864
- msg = f"Sample not found {collection_parameters.get('blSampleId')}"
865
- logging.getLogger("HWR").debug(msg)
866
-
867
- metadata = {
868
- "sampleId": collection_parameters.get("blSampleId"),
869
- "MX_beamSizeAtSampleX": collection_parameters.get("beamSizeHorizontal"),
870
- "MX_beamSizeAtSampleY": collection_parameters.get("beamSizeVertical"),
871
- "MX_directory": str(directory),
872
- "MX_exposureTime": collection_parameters.get("exposureTime"),
873
- "MX_flux": collection_parameters.get("flux"),
874
- "scanType": "xrf",
875
- "MX_transmission": collection_parameters.get("beamTransmission"),
876
- "Sample_name": sample_name,
877
- "InstrumentMonochromator_energy": collection_parameters.get("energy"),
878
- "startDate": start_time,
879
- "endDate": end_time,
880
- }
938
+ _session = HWR.beamline.session
939
+ proposal = f"{_session.proposal_code}{_session.proposal_number}"
940
+
941
+ directory = Path(xfespectrum_dict["filename"]).parent
942
+
943
+ metadata.update(
944
+ {
945
+ "scanType": "xrf",
946
+ "MX_directory": str(directory),
947
+ "MX_exposureTime": xfespectrum_dict.get("exposureTime"),
948
+ }
949
+ )
881
950
 
882
951
  self.icatClient.store_dataset(
883
952
  beamline=beamline,
@@ -894,12 +963,14 @@ class ICATLIMS(AbstractLims):
894
963
  def store_workflow(self, workflow_dict: dict):
895
964
  pass
896
965
 
897
- def store_data_collection(self, mx_collection, bl_config=None):
898
- # stores the dictionary with the information about the beamline to be sent when a dataset is produced
899
- self.beamline_config = bl_config
966
+ def store_data_collection(self, datacollection_dict, beamline_config_dict=None):
967
+ """Store the dictionary with the information about the beamline
968
+ to be sent when a dataset is produced.
969
+ """
970
+ self.beamline_config = beamline_config_dict
900
971
 
901
- def update_data_collection(self, mx_collection):
902
- pass
972
+ def update_data_collection(self, datacollection_dict: dict):
973
+ """Update data collection."""
903
974
 
904
975
  def _get_oscillation_end(self, oscillation_sequence):
905
976
  return float(oscillation_sequence["start"]) + (
@@ -950,14 +1021,15 @@ class ICATLIMS(AbstractLims):
950
1021
  self, sample_id, resources, output_folder: str, sample_name: str
951
1022
  ) -> List[Download]:
952
1023
  """
953
- Downloads resources related to a given sample and saves them to the specified directory.
1024
+ Download resources related to a given sample and save them to the
1025
+ specified directory.
954
1026
 
955
- Parameters:
956
- sample (str): Sample identifier.
1027
+ Argss:
1028
+ sample_id (str): Sample identifier.
957
1029
  output_folder (str): Directory where storefiles will be saved.
958
1030
 
959
1031
  Returns:
960
- dict: A dictionary containing the paths of the downloaded files.
1032
+ List containing the paths of the downloaded files.
961
1033
  """
962
1034
  downloaded_files: List[Download] = []
963
1035
  for resource in resources:
@@ -968,7 +1040,7 @@ class ICATLIMS(AbstractLims):
968
1040
  resource_folder.mkdir(
969
1041
  parents=True,
970
1042
  exist_ok=True,
971
- ) # Ensure the folder exists
1043
+ ) # Make sure the folder exists
972
1044
 
973
1045
  try:
974
1046
  token = self.icat_session["sessionId"]
@@ -997,106 +1069,92 @@ class ICATLIMS(AbstractLims):
997
1069
 
998
1070
  return downloaded_files
999
1071
 
1000
- def finalize_data_collection(self, collection_parameters):
1072
+ def finalize_data_collection(self, datacollection_dict):
1001
1073
  logger.info("Storing datacollection in ICAT")
1002
1074
 
1075
+ metadata = self.store_common_data(datacollection_dict)
1076
+
1003
1077
  try:
1004
- fileinfo = collection_parameters["fileinfo"]
1078
+ fileinfo = datacollection_dict["fileinfo"]
1005
1079
  directory = Path(fileinfo["directory"])
1006
1080
  dataset_name = directory.name
1007
1081
  # Determine the scan type
1008
- if dataset_name.endswith("mesh"):
1009
- scan_type = "mesh"
1010
- elif dataset_name.endswith("line"):
1011
- scan_type = "line"
1012
- elif dataset_name.endswith("characterisation"):
1013
- scan_type = "characterisation"
1014
- elif dataset_name.endswith("datacollection"):
1015
- scan_type = "datacollection"
1016
- else:
1017
- scan_type = collection_parameters["experiment_type"]
1082
+ scan_types = ["mesh", "line", "characterisation", "datacollection"]
1083
+ scan_type = datacollection_dict["experiment_type"]
1084
+ for nam in scan_types:
1085
+ if dataset_name.endswith(nam):
1086
+ scan_type = nam
1018
1087
 
1019
- workflow_params = collection_parameters.get("workflow_parameters", {})
1088
+ workflow_params = datacollection_dict.get("workflow_parameters", {})
1020
1089
  workflow_type = workflow_params.get("workflow_type")
1021
1090
 
1022
- if workflow_type is None:
1023
- if not directory.name.startswith("run"):
1024
- dataset_name = fileinfo["prefix"]
1025
-
1026
- try:
1027
- dt_aware = datetime.strptime(
1028
- collection_parameters.get("collection_start_time"),
1029
- "%Y-%m-%d %H:%M:%S",
1030
- ).replace(tzinfo=ZoneInfo("Europe/Paris"))
1031
- start_time = dt_aware.isoformat(timespec="microseconds")
1032
- end_time = datetime.now(ZoneInfo("Europe/Paris")).isoformat()
1033
- except RuntimeError:
1034
- logger.warning("Failed to parse start and end time")
1091
+ if workflow_type is None and not directory.name.startswith("run"):
1092
+ dataset_name = fileinfo["prefix"]
1035
1093
 
1036
- if collection_parameters["sample_reference"]["acronym"]:
1094
+ if datacollection_dict["sample_reference"]["acronym"]:
1037
1095
  sample_name = (
1038
- collection_parameters["sample_reference"]["acronym"]
1096
+ datacollection_dict["sample_reference"]["acronym"]
1039
1097
  + "-"
1040
- + collection_parameters["sample_reference"]["sample_name"]
1098
+ + datacollection_dict["sample_reference"]["sample_name"]
1041
1099
  )
1042
1100
  else:
1043
- sample_name = collection_parameters["sample_reference"][
1101
+ sample_name = datacollection_dict["sample_reference"][
1044
1102
  "sample_name"
1045
1103
  ].replace(":", "-")
1046
1104
 
1047
1105
  logger.info(f"LIMS sample name {sample_name}")
1048
- oscillation_sequence = collection_parameters["oscillation_sequence"][0]
1106
+ oscillation_sequence = datacollection_dict["oscillation_sequence"][0]
1049
1107
 
1050
1108
  beamline = HWR.beamline.session.beamline_name.lower()
1051
1109
  distance = HWR.beamline.detector.distance.get_value()
1052
- proposal = f"{HWR.beamline.session.proposal_code}{HWR.beamline.session.proposal_number}"
1053
- metadata = {
1054
- "MX_beamShape": collection_parameters.get("beamShape"),
1055
- "sampleId": collection_parameters.get("blSampleId"),
1056
- "MX_beamSizeAtSampleX": collection_parameters.get("beamSizeAtSampleX"),
1057
- "MX_beamSizeAtSampleY": collection_parameters.get("beamSizeAtSampleY"),
1058
- "MX_dataCollectionId": collection_parameters.get("collection_id"),
1059
- "MX_detectorDistance": distance,
1060
- "MX_directory": str(directory),
1061
- "MX_exposureTime": oscillation_sequence["exposure_time"],
1062
- "MX_flux": collection_parameters.get("flux"),
1063
- "MX_fluxEnd": collection_parameters.get("flux_end"),
1064
- "MX_positionName": collection_parameters.get("position_name"),
1065
- "MX_numberOfImages": oscillation_sequence["number_of_images"],
1066
- "MX_oscillationRange": oscillation_sequence["range"],
1067
- "MX_axis_start": oscillation_sequence["start"],
1068
- "MX_oscillationOverlap": oscillation_sequence["overlap"],
1069
- "MX_resolution": collection_parameters.get("resolution"),
1070
- "MX_resolution_at_corner": collection_parameters.get(
1071
- "resolutionAtCorner"
1072
- ),
1073
- "scanType": scan_type,
1074
- "MX_startImageNumber": oscillation_sequence["start_image_number"],
1075
- "MX_template": fileinfo["template"],
1076
- "MX_transmission": collection_parameters.get("transmission"),
1077
- "MX_xBeam": collection_parameters.get("xBeam"),
1078
- "MX_yBeam": collection_parameters.get("yBeam"),
1079
- "Sample_name": sample_name,
1080
- "InstrumentMonochromator_wavelength": collection_parameters.get(
1081
- "wavelength"
1082
- ),
1083
- "InstrumentMonochromator_energy": collection_parameters.get("energy"),
1084
- "Workflow_name": workflow_params.get("workflow_name"),
1085
- "Workflow_type": workflow_params.get("workflow_type"),
1086
- "Workflow_id": workflow_params.get("workflow_uid"),
1087
- "MX_kappa_settings_id": workflow_params.get(
1088
- "workflow_kappa_settings_id"
1089
- ),
1090
- "MX_characterisation_id": workflow_params.get(
1091
- "workflow_characterisation_id"
1092
- ),
1093
- "MX_position_id": workflow_params.get("workflow_position_id"),
1094
- "group_by": workflow_params.get("workflow_group_by"),
1095
- "startDate": start_time,
1096
- "endDate": end_time, # strftime("%Y-%m-%d %H:%M:%S"),
1097
- }
1110
+ proposal = f"{HWR.beamline.session.proposal_code}"
1111
+ proposal += f"{HWR.beamline.session.proposal_number}"
1112
+
1113
+ try:
1114
+ dt_aware = datetime.strptime(
1115
+ datacollection_dict.get("collection_start_time"),
1116
+ "%Y-%m-%d %H:%M:%S",
1117
+ ).replace(tzinfo=ZoneInfo("Europe/Paris"))
1118
+ start_time = dt_aware.isoformat(timespec="microseconds")
1119
+ except RuntimeError:
1120
+ logger.warning("Failed to parse start and end time")
1121
+
1122
+ metadata.update(
1123
+ {
1124
+ "MX_dataCollectionId": datacollection_dict.get("collection_id"),
1125
+ "MX_detectorDistance": distance,
1126
+ "MX_directory": str(directory),
1127
+ "MX_exposureTime": oscillation_sequence["exposure_time"],
1128
+ "MX_positionName": datacollection_dict.get("position_name"),
1129
+ "MX_numberOfImages": oscillation_sequence["number_of_images"],
1130
+ "MX_oscillationRange": oscillation_sequence["range"],
1131
+ "MX_axis_start": oscillation_sequence["start"],
1132
+ "MX_oscillationOverlap": oscillation_sequence["overlap"],
1133
+ "MX_resolution": datacollection_dict.get("resolution"),
1134
+ "MX_resolution_at_corner": datacollection_dict.get(
1135
+ "resolutionAtCorner"
1136
+ ),
1137
+ "scanType": scan_type,
1138
+ "MX_startImageNumber": oscillation_sequence["start_image_number"],
1139
+ "MX_template": fileinfo["template"],
1140
+ "Sample_name": sample_name,
1141
+ "Workflow_name": workflow_params.get("workflow_name"),
1142
+ "Workflow_type": workflow_params.get("workflow_type"),
1143
+ "Workflow_id": workflow_params.get("workflow_uid"),
1144
+ "MX_kappa_settings_id": workflow_params.get(
1145
+ "workflow_kappa_settings_id"
1146
+ ),
1147
+ "MX_characterisation_id": workflow_params.get(
1148
+ "workflow_characterisation_id"
1149
+ ),
1150
+ "MX_position_id": workflow_params.get("workflow_position_id"),
1151
+ "group_by": workflow_params.get("workflow_group_by"),
1152
+ "startDate": start_time,
1153
+ }
1154
+ )
1098
1155
 
1099
- # This forces the ingester to associate the dataset to the experiment by ID
1156
+ # This forces the ingester to associate the dataset
1157
+ # to the experiment by ID
1100
1158
  if self.session_manager.active_session.session_id:
1101
1159
  metadata["investigationId"] = (
1102
1160
  self.session_manager.active_session.session_id
@@ -1109,7 +1167,7 @@ class ICATLIMS(AbstractLims):
1109
1167
  metadata["SampleTrackingContainer_position"] = sample_position
1110
1168
  # Find sample by sampleId
1111
1169
  sample = HWR.beamline.lims.find_sample_by_sample_id(
1112
- collection_parameters.get("blSampleId")
1170
+ datacollection_dict.get("blSampleId")
1113
1171
  )
1114
1172
 
1115
1173
  try:
@@ -1117,22 +1175,13 @@ class ICATLIMS(AbstractLims):
1117
1175
  except Exception:
1118
1176
  logger.exception("Failed to read get_active_lims.")
1119
1177
 
1120
- try:
1121
- metadata["InstrumentSource_current"] = (
1122
- HWR.beamline.machine_info.get_value().get("current")
1123
- )
1124
- metadata["InstrumentSource_mode"] = (
1125
- HWR.beamline.machine_info.get_value().get("fill_mode")
1126
- )
1127
- except Exception as e:
1128
- logger.warning("Failed to read machine_info metadata.%s", e)
1129
-
1130
1178
  try:
1131
1179
  if sample is not None:
1132
1180
  metadata["SampleProtein_acronym"] = sample.get("proteinAcronym")
1133
1181
  metadata["SampleTrackingContainer_id"] = sample.get(
1134
1182
  "containerCode"
1135
- ) # containerCode instead of sampletrackingcontainer_id for ISPyB's compatiblity
1183
+ ) # containerCode instead of sampletrackingcontainer_id
1184
+ # for ISPyB's compatiblity
1136
1185
  metadata["SampleTrackingParcel_id"] = sample.get(
1137
1186
  "SampleTrackingParcel_id"
1138
1187
  )
@@ -1164,7 +1213,7 @@ class ICATLIMS(AbstractLims):
1164
1213
  logger.warning("Failed to get MX_axis_end")
1165
1214
 
1166
1215
  icat_metadata_path = Path(directory) / "metadata.json"
1167
- with open(icat_metadata_path, "w") as f:
1216
+ with Path(icat_metadata_path).open("w") as f:
1168
1217
  # We add the processing and experiment plan only in the metadata.json
1169
1218
  # it will not work thought pyicat-plus
1170
1219
  merged = metadata.copy()
@@ -1183,25 +1232,26 @@ class ICATLIMS(AbstractLims):
1183
1232
  gallery_path.mkdir(mode=0o755, exist_ok=True)
1184
1233
  for snapshot_index in range(1, 5):
1185
1234
  key = f"xtalSnapshotFullPath{snapshot_index}"
1186
- if key in collection_parameters:
1187
- snapshot_path = Path(collection_parameters[key])
1235
+ if key in datacollection_dict:
1236
+ snapshot_path = Path(datacollection_dict[key])
1188
1237
  if snapshot_path.exists():
1189
- logger.debug(
1190
- f"Copying snapshot index {snapshot_index} to gallery"
1191
- )
1238
+ msg = f"Copying snapshot index {snapshot_index} to gallery"
1239
+ logger.debug(msg)
1192
1240
  shutil.copy(snapshot_path, gallery_path)
1193
1241
  except RuntimeError as e:
1194
1242
  logger.warning("Failed to create gallery. %s", e)
1195
1243
 
1196
1244
  try:
1197
1245
  beamline = self._get_scheduled_beamline()
1198
- logger.info(
1199
- f"Dataset Beamline={beamline} Current Beamline={HWR.beamline.session.beamline_name}"
1200
- )
1201
- except RuntimeError as e:
1202
- logger.warning("Failed to get _get_scheduled_beamline. %s", e)
1246
+ msg = f"Dataset Beamline={beamline} "
1247
+ msg += f"Current Beamline={HWR.beamline.session.beamline_name}"
1248
+ logger.info(msg)
1249
+ except RuntimeError as err:
1250
+ msg = f"Failed to get _get_scheduled_beamline {err}"
1251
+ logger.warning(msg)
1203
1252
 
1204
- # __actualInstrument is a dataset parameter that indicates where the dataset has been actually collected
1253
+ # __actualInstrument is a dataset parameter that indicates
1254
+ # where the dataset has been actually collected
1205
1255
  # only filled when it does not match the scheduled beamline
1206
1256
  try:
1207
1257
  if (
@@ -1223,10 +1273,10 @@ class ICATLIMS(AbstractLims):
1223
1273
  except Exception as e:
1224
1274
  logger.warning("Failed uploading to ICAT. %s", e)
1225
1275
 
1226
- def _get_scheduled_beamline(self):
1227
- """
1228
- This returns the beamline where the session has been scheduled (in case of a different beamline)
1229
- otherwise it returns the name of the beamline as set in the properties
1276
+ def _get_scheduled_beamline(self) -> str:
1277
+ """Return the name of the beamline as set in the properties or the
1278
+ name of the beamline where the session has been scheduled
1279
+ (in case of a different beamline)
1230
1280
  """
1231
1281
  active_session = self.session_manager.active_session
1232
1282
 
@@ -1234,7 +1284,8 @@ class ICATLIMS(AbstractLims):
1234
1284
  return HWR.beamline.session.beamline_name.lower()
1235
1285
 
1236
1286
  beamline = str(active_session.beamline_name.lower())
1237
- logger.info(f"Session have been moved to another beamline: {beamline}")
1287
+ msg = f"Session have been moved to another beamline: {beamline}"
1288
+ logger.info(msg)
1238
1289
  return beamline
1239
1290
 
1240
1291
  def update_bl_sample(self, bl_sample: str):