mx-bluesky 1.5.10__py3-none-any.whl → 1.5.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (121) hide show
  1. mx_bluesky/_version.py +2 -2
  2. mx_bluesky/beamlines/aithre_lasershaping/experiment_plans/__init__.py +0 -0
  3. mx_bluesky/beamlines/aithre_lasershaping/experiment_plans/robot_load_plan.py +198 -0
  4. mx_bluesky/beamlines/aithre_lasershaping/goniometer_controls.py +2 -2
  5. mx_bluesky/beamlines/aithre_lasershaping/parameters/__init__.py +0 -0
  6. mx_bluesky/beamlines/aithre_lasershaping/parameters/constants.py +17 -0
  7. mx_bluesky/beamlines/aithre_lasershaping/parameters/robot_load_parameters.py +13 -0
  8. mx_bluesky/beamlines/aithre_lasershaping/pin_tip_centring.py +31 -0
  9. mx_bluesky/beamlines/aithre_lasershaping/robot_load.py +80 -0
  10. mx_bluesky/beamlines/i02_1/parameters/gridscan.py +1 -1
  11. mx_bluesky/beamlines/i04/__init__.py +6 -2
  12. mx_bluesky/beamlines/i04/callbacks/murko_callback.py +27 -12
  13. mx_bluesky/beamlines/i04/experiment_plans/i04_grid_detect_then_xray_centre_plan.py +94 -20
  14. mx_bluesky/beamlines/i04/external_interaction/__init__.py +0 -0
  15. mx_bluesky/beamlines/i04/external_interaction/config_server.py +15 -0
  16. mx_bluesky/beamlines/i04/oav_centering_plans/__init__.py +0 -0
  17. mx_bluesky/beamlines/i04/oav_centering_plans/oav_imaging.py +115 -0
  18. mx_bluesky/beamlines/i04/parameters/__init__.py +0 -0
  19. mx_bluesky/beamlines/i04/parameters/constants.py +21 -0
  20. mx_bluesky/beamlines/i04/redis_to_murko_forwarder.py +24 -1
  21. mx_bluesky/beamlines/i04/thawing_plan.py +149 -154
  22. mx_bluesky/beamlines/i24/jungfrau_commissioning/experiment_plans/do_darks.py +55 -10
  23. mx_bluesky/beamlines/i24/jungfrau_commissioning/plan_stubs/do_external_acquisition.py +1 -1
  24. mx_bluesky/beamlines/i24/jungfrau_commissioning/plan_stubs/plan_utils.py +1 -1
  25. mx_bluesky/beamlines/i24/serial/__init__.py +7 -5
  26. mx_bluesky/beamlines/i24/serial/dcid.py +6 -7
  27. mx_bluesky/beamlines/i24/serial/extruder/{i24ssx_Extruder_Collect_py3v2.py → i24ssx_extruder_collect_py3v2.py} +70 -37
  28. mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/CustomChip_py3v1.edl +11 -11
  29. mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/DetStage.edl +3 -3
  30. mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/DiamondChipI24-py3v1.edl +142 -142
  31. mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/MappingLite-oxford_py3v1.edl +135 -135
  32. mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/PMAC_Command.edl +8 -8
  33. mx_bluesky/beamlines/i24/serial/fixed_target/FT-gui-edm/pumpprobe-py3v1.edl +13 -13
  34. mx_bluesky/beamlines/i24/serial/fixed_target/{i24ssx_Chip_Collect_py3v1.py → i24ssx_chip_collect_py3v1.py} +12 -9
  35. mx_bluesky/beamlines/i24/serial/fixed_target/{i24ssx_Chip_Manager_py3v1.py → i24ssx_chip_manager_py3v1.py} +81 -78
  36. mx_bluesky/beamlines/i24/serial/fixed_target/{i24ssx_Chip_StartUp_py3v1.py → i24ssx_chip_startup_py3v1.py} +3 -3
  37. mx_bluesky/beamlines/i24/serial/fixed_target/i24ssx_moveonclick.py +33 -33
  38. mx_bluesky/beamlines/i24/serial/log.py +11 -11
  39. mx_bluesky/beamlines/i24/serial/parameters/fixed_target/cs/cs_maker.json +3 -3
  40. mx_bluesky/beamlines/i24/serial/parameters/utils.py +5 -5
  41. mx_bluesky/beamlines/i24/serial/setup_beamline/ca.py +0 -12
  42. mx_bluesky/beamlines/i24/serial/setup_beamline/pv.py +122 -334
  43. mx_bluesky/beamlines/i24/serial/setup_beamline/pv_abstract.py +5 -5
  44. mx_bluesky/beamlines/i24/serial/setup_beamline/setup_beamline.py +30 -251
  45. mx_bluesky/beamlines/i24/serial/setup_beamline/setup_detector.py +3 -3
  46. mx_bluesky/beamlines/i24/serial/setup_beamline/setup_zebra_plans.py +4 -4
  47. mx_bluesky/beamlines/i24/serial/web_gui_plans/general_plans.py +103 -16
  48. mx_bluesky/beamlines/i24/serial/web_gui_plans/oav_plans.py +64 -0
  49. mx_bluesky/beamlines/i24/serial/write_nexus.py +4 -4
  50. mx_bluesky/common/device_setup_plans/gonio.py +28 -0
  51. mx_bluesky/common/device_setup_plans/manipulate_sample.py +8 -1
  52. mx_bluesky/common/device_setup_plans/robot_load_unload.py +1 -1
  53. mx_bluesky/common/device_setup_plans/setup_oav.py +8 -0
  54. mx_bluesky/common/device_setup_plans/setup_zebra_and_shutter.py +0 -5
  55. mx_bluesky/common/device_setup_plans/xbpm_feedback.py +8 -1
  56. mx_bluesky/common/experiment_plans/beamstop_check.py +229 -0
  57. mx_bluesky/common/experiment_plans/common_flyscan_xray_centre_plan.py +8 -6
  58. mx_bluesky/common/experiment_plans/common_grid_detect_then_xray_centre_plan.py +2 -2
  59. mx_bluesky/common/experiment_plans/inner_plans/do_fgs.py +1 -1
  60. mx_bluesky/common/experiment_plans/inner_plans/read_hardware.py +7 -4
  61. mx_bluesky/common/experiment_plans/inner_plans/write_sample_status.py +2 -2
  62. mx_bluesky/common/experiment_plans/oav_snapshot_plan.py +1 -2
  63. mx_bluesky/{hyperion → common}/experiment_plans/pin_tip_centring_plan.py +23 -24
  64. mx_bluesky/common/external_interaction/callbacks/common/grid_detection_callback.py +5 -0
  65. mx_bluesky/common/external_interaction/callbacks/common/ispyb_callback_base.py +13 -15
  66. mx_bluesky/common/external_interaction/callbacks/common/ispyb_mapping.py +3 -5
  67. mx_bluesky/common/external_interaction/callbacks/common/plan_reactive_callback.py +1 -1
  68. mx_bluesky/common/external_interaction/callbacks/common/zocalo_callback.py +2 -2
  69. mx_bluesky/common/external_interaction/callbacks/sample_handling/sample_handling_callback.py +3 -3
  70. mx_bluesky/common/external_interaction/callbacks/xray_centre/ispyb_callback.py +12 -10
  71. mx_bluesky/common/external_interaction/callbacks/xray_centre/nexus_callback.py +2 -2
  72. mx_bluesky/common/external_interaction/config_server.py +4 -4
  73. mx_bluesky/common/external_interaction/ispyb/data_model.py +11 -4
  74. mx_bluesky/common/external_interaction/ispyb/exp_eye_store.py +163 -4
  75. mx_bluesky/common/external_interaction/ispyb/ispyb_store.py +76 -167
  76. mx_bluesky/common/external_interaction/ispyb/ispyb_utils.py +0 -14
  77. mx_bluesky/common/external_interaction/nexus/nexus_utils.py +2 -2
  78. mx_bluesky/common/external_interaction/nexus/write_nexus.py +3 -3
  79. mx_bluesky/common/parameters/components.py +1 -0
  80. mx_bluesky/common/parameters/constants.py +4 -3
  81. mx_bluesky/common/parameters/device_composites.py +4 -2
  82. mx_bluesky/common/parameters/gridscan.py +2 -2
  83. mx_bluesky/common/utils/exceptions.py +24 -7
  84. mx_bluesky/common/utils/log.py +13 -4
  85. mx_bluesky/common/utils/tracing.py +5 -5
  86. mx_bluesky/common/utils/utils.py +56 -8
  87. mx_bluesky/hyperion/__main__.py +6 -16
  88. mx_bluesky/hyperion/baton_handler.py +38 -14
  89. mx_bluesky/hyperion/device_setup_plans/utils.py +1 -1
  90. mx_bluesky/hyperion/experiment_plans/experiment_registry.py +1 -1
  91. mx_bluesky/hyperion/experiment_plans/hyperion_flyscan_xray_centre_plan.py +15 -13
  92. mx_bluesky/hyperion/experiment_plans/load_centre_collect_full_plan.py +2 -2
  93. mx_bluesky/hyperion/experiment_plans/optimise_attenuation_plan.py +9 -9
  94. mx_bluesky/hyperion/experiment_plans/pin_centre_then_xray_centre_plan.py +7 -8
  95. mx_bluesky/hyperion/experiment_plans/robot_load_and_change_energy.py +3 -10
  96. mx_bluesky/hyperion/experiment_plans/robot_load_then_centre_plan.py +4 -2
  97. mx_bluesky/hyperion/experiment_plans/rotation_scan_plan.py +10 -4
  98. mx_bluesky/hyperion/experiment_plans/set_energy_plan.py +2 -2
  99. mx_bluesky/hyperion/experiment_plans/udc_default_state.py +160 -0
  100. mx_bluesky/hyperion/external_interaction/agamemnon.py +3 -3
  101. mx_bluesky/hyperion/external_interaction/callbacks/__main__.py +2 -2
  102. mx_bluesky/hyperion/external_interaction/callbacks/rotation/ispyb_callback.py +3 -3
  103. mx_bluesky/hyperion/external_interaction/callbacks/rotation/ispyb_mapping.py +1 -0
  104. mx_bluesky/hyperion/external_interaction/callbacks/rotation/nexus_callback.py +3 -6
  105. mx_bluesky/hyperion/external_interaction/config_server.py +5 -5
  106. mx_bluesky/hyperion/parameters/constants.py +11 -4
  107. mx_bluesky/hyperion/parameters/device_composites.py +2 -2
  108. mx_bluesky/hyperion/parameters/gridscan.py +4 -4
  109. mx_bluesky/hyperion/parameters/robot_load.py +1 -9
  110. mx_bluesky/hyperion/plan_runner.py +6 -6
  111. mx_bluesky/hyperion/runner.py +10 -8
  112. mx_bluesky/jupyter_example.ipynb +3 -3
  113. {mx_bluesky-1.5.10.dist-info → mx_bluesky-1.5.12.dist-info}/METADATA +9 -7
  114. {mx_bluesky-1.5.10.dist-info → mx_bluesky-1.5.12.dist-info}/RECORD +118 -104
  115. mx_bluesky/common/experiment_plans/inner_plans/udc_default_state.py +0 -65
  116. mx_bluesky/common/external_interaction/callbacks/common/logging_callback.py +0 -29
  117. mx_bluesky/hyperion/device_setup_plans/smargon.py +0 -25
  118. {mx_bluesky-1.5.10.dist-info → mx_bluesky-1.5.12.dist-info}/WHEEL +0 -0
  119. {mx_bluesky-1.5.10.dist-info → mx_bluesky-1.5.12.dist-info}/entry_points.txt +0 -0
  120. {mx_bluesky-1.5.10.dist-info → mx_bluesky-1.5.12.dist-info}/licenses/LICENSE +0 -0
  121. {mx_bluesky-1.5.10.dist-info → mx_bluesky-1.5.12.dist-info}/top_level.txt +0 -0
@@ -1,36 +1,28 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  from collections.abc import Sequence
4
- from dataclasses import asdict
5
4
  from typing import TYPE_CHECKING
6
5
 
7
- import ispyb
8
- import ispyb.sqlalchemy
9
- import numpy as np
10
- from ispyb.connector.mysqlsp.main import ISPyBMySQLSPConnector as Connector
11
- from ispyb.sp.mxacquisition import MXAcquisition
12
- from ispyb.strictordereddict import StrictOrderedDict
13
6
  from pydantic import BaseModel
14
7
 
8
+ from mx_bluesky.common.external_interaction.callbacks.common.ispyb_mapping import (
9
+ get_proposal_and_session_from_visit_string,
10
+ )
15
11
  from mx_bluesky.common.external_interaction.ispyb.data_model import (
16
- DataCollectionGridInfo,
17
12
  DataCollectionGroupInfo,
18
13
  DataCollectionInfo,
19
14
  ScanDataInfo,
20
15
  )
16
+ from mx_bluesky.common.external_interaction.ispyb.exp_eye_store import ExpeyeInteraction
21
17
  from mx_bluesky.common.external_interaction.ispyb.ispyb_utils import (
22
18
  get_current_time_string,
23
- get_session_id_from_visit,
24
19
  )
20
+ from mx_bluesky.common.utils.exceptions import ISPyBDepositionNotMadeError
25
21
  from mx_bluesky.common.utils.log import ISPYB_ZOCALO_CALLBACK_LOGGER
26
- from mx_bluesky.common.utils.tracing import TRACER
27
22
 
28
23
  if TYPE_CHECKING:
29
24
  pass
30
25
 
31
- I03_EIGER_DETECTOR = 78
32
- EIGER_FILE_SUFFIX = "h5"
33
-
34
26
 
35
27
  class IspybIds(BaseModel):
36
28
  data_collection_ids: tuple[int, ...] = ()
@@ -41,6 +33,7 @@ class IspybIds(BaseModel):
41
33
  class StoreInIspyb:
42
34
  def __init__(self, ispyb_config: str) -> None:
43
35
  self.ISPYB_CONFIG_PATH: str = ispyb_config
36
+ self._expeye = ExpeyeInteraction()
44
37
 
45
38
  def begin_deposition(
46
39
  self,
@@ -76,45 +69,41 @@ class StoreInIspyb:
76
69
  data_collection_group_info: DataCollectionGroupInfo | None,
77
70
  scan_data_infos,
78
71
  ) -> IspybIds:
79
- with ispyb.open(self.ISPYB_CONFIG_PATH) as conn:
80
- assert conn, "Failed to connect to ISPyB"
81
- if data_collection_group_info:
82
- ispyb_ids.data_collection_group_id = (
83
- self._store_data_collection_group_table(
84
- conn,
85
- data_collection_group_info,
86
- ispyb_ids.data_collection_group_id,
87
- )
88
- )
89
- else:
90
- assert ispyb_ids.data_collection_group_id, (
91
- "Attempt to update data collection without a data collection group ID"
72
+ if data_collection_group_info:
73
+ ispyb_ids.data_collection_group_id = (
74
+ self._store_data_collection_group_table(
75
+ data_collection_group_info, ispyb_ids.data_collection_group_id
92
76
  )
77
+ )
78
+ else:
79
+ assert ispyb_ids.data_collection_group_id, (
80
+ "Attempt to update data collection without a data collection group ID"
81
+ )
93
82
 
94
- grid_ids = list(ispyb_ids.grid_ids)
95
- data_collection_ids_out = list(ispyb_ids.data_collection_ids)
96
- for scan_data_info in scan_data_infos:
97
- data_collection_id = scan_data_info.data_collection_id
98
- if (
99
- scan_data_info.data_collection_info
100
- and not scan_data_info.data_collection_info.parent_id
101
- ):
102
- scan_data_info.data_collection_info.parent_id = (
103
- ispyb_ids.data_collection_group_id
104
- )
105
-
106
- new_data_collection_id, grid_id = self._store_single_scan_data(
107
- conn, scan_data_info, data_collection_id
83
+ grid_ids = list(ispyb_ids.grid_ids)
84
+ data_collection_ids_out = list(ispyb_ids.data_collection_ids)
85
+ for scan_data_info in scan_data_infos:
86
+ data_collection_id = scan_data_info.data_collection_id
87
+ if (
88
+ scan_data_info.data_collection_info
89
+ and not scan_data_info.data_collection_info.parent_id
90
+ ):
91
+ scan_data_info.data_collection_info.parent_id = (
92
+ ispyb_ids.data_collection_group_id
108
93
  )
109
- if not data_collection_id:
110
- data_collection_ids_out.append(new_data_collection_id)
111
- if grid_id:
112
- grid_ids.append(grid_id)
113
- ispyb_ids = IspybIds(
114
- data_collection_ids=tuple(data_collection_ids_out),
115
- grid_ids=tuple(grid_ids),
116
- data_collection_group_id=ispyb_ids.data_collection_group_id,
94
+
95
+ new_data_collection_id, grid_id = self._store_single_scan_data(
96
+ scan_data_info, data_collection_id
117
97
  )
98
+ if not data_collection_id:
99
+ data_collection_ids_out.append(new_data_collection_id)
100
+ if grid_id:
101
+ grid_ids.append(grid_id)
102
+ ispyb_ids = IspybIds(
103
+ data_collection_ids=tuple(data_collection_ids_out),
104
+ grid_ids=tuple(grid_ids),
105
+ data_collection_group_id=ispyb_ids.data_collection_group_id,
106
+ )
118
107
  return ispyb_ids
119
108
 
120
109
  def end_deposition(self, ispyb_ids: IspybIds, success: str, reason: str):
@@ -135,24 +124,19 @@ class StoreInIspyb:
135
124
  run_status = "DataCollection Successful"
136
125
  current_time = get_current_time_string()
137
126
  self._update_scan_with_end_time_and_status(
138
- current_time,
139
- run_status,
140
- reason,
141
- id_,
142
- ispyb_ids.data_collection_group_id,
127
+ current_time, run_status, reason, id_
143
128
  )
144
129
 
145
130
  def append_to_comment(
146
131
  self, data_collection_id: int, comment: str, delimiter: str = " "
147
132
  ) -> None:
148
133
  try:
149
- with ispyb.open(self.ISPYB_CONFIG_PATH) as conn:
150
- assert conn is not None, "Failed to connect to ISPyB!"
151
- mx_acquisition: MXAcquisition = conn.mx_acquisition
152
- mx_acquisition.update_data_collection_append_comments(
153
- data_collection_id, comment, delimiter
154
- )
155
- except ispyb.ReadWriteError as e:
134
+ self._expeye.update_data_collection(
135
+ data_collection_id,
136
+ DataCollectionInfo(comments=delimiter + comment),
137
+ True,
138
+ )
139
+ except ISPyBDepositionNotMadeError as e:
156
140
  ISPYB_ZOCALO_CALLBACK_LOGGER.warning(
157
141
  f"Unable to log comment, comment probably exceeded column length: {comment}",
158
142
  exc_info=e,
@@ -163,143 +147,68 @@ class StoreInIspyb:
163
147
  dcg_info: DataCollectionGroupInfo,
164
148
  data_collection_group_id: int | None = None,
165
149
  ) -> None:
166
- with ispyb.open(self.ISPYB_CONFIG_PATH) as conn:
167
- assert conn is not None, "Failed to connect to ISPyB!"
168
- self._store_data_collection_group_table(
169
- conn,
170
- dcg_info,
171
- data_collection_group_id,
172
- )
150
+ self._store_data_collection_group_table(dcg_info, data_collection_group_id)
173
151
 
174
152
  def _update_scan_with_end_time_and_status(
175
- self,
176
- end_time: str,
177
- run_status: str,
178
- reason: str,
179
- data_collection_id: int,
180
- data_collection_group_id: int,
153
+ self, end_time: str, run_status: str, reason: str, data_collection_id: int
181
154
  ) -> None:
182
- if reason is not None and reason != "":
155
+ if reason != "":
183
156
  self.append_to_comment(data_collection_id, f"{run_status} reason: {reason}")
184
157
 
185
- with ispyb.open(self.ISPYB_CONFIG_PATH) as conn:
186
- assert conn is not None, "Failed to connect to ISPyB!"
187
-
188
- mx_acquisition: MXAcquisition = conn.mx_acquisition
189
-
190
- params = mx_acquisition.get_data_collection_params()
191
- params["id"] = data_collection_id
192
- params["parentid"] = data_collection_group_id
193
- params["endtime"] = end_time
194
- params["run_status"] = run_status
195
- mx_acquisition.upsert_data_collection(list(params.values()))
196
-
197
- def _store_position_table(
198
- self, conn: Connector, dc_pos_info, data_collection_id
199
- ) -> int:
200
- mx_acquisition: MXAcquisition = conn.mx_acquisition
201
-
202
- params = mx_acquisition.get_dc_position_params()
203
- params["id"] = data_collection_id
204
- params |= asdict(dc_pos_info)
205
-
206
- return mx_acquisition.update_dc_position(list(params.values()))
158
+ info = DataCollectionInfo(end_time=end_time, run_status=run_status)
159
+ self._expeye.update_data_collection(data_collection_id, info)
207
160
 
208
161
  def _store_data_collection_group_table(
209
162
  self,
210
- conn: Connector,
211
163
  dcg_info: DataCollectionGroupInfo,
212
164
  data_collection_group_id: int | None = None,
213
165
  ) -> int:
214
- mx_acquisition: MXAcquisition = conn.mx_acquisition
215
-
216
- params = mx_acquisition.get_data_collection_group_params()
217
166
  if data_collection_group_id:
218
- params["id"] = data_collection_group_id
219
- params["parent_id"] = get_session_id_from_visit(conn, dcg_info.visit_string)
220
- params |= {k: v for k, v in asdict(dcg_info).items() if k != "visit_string"}
221
-
222
- return self._upsert_data_collection_group(conn, params)
167
+ self._expeye.update_data_group(data_collection_group_id, dcg_info)
168
+ return data_collection_group_id
169
+ else:
170
+ proposal, session = get_proposal_and_session_from_visit_string(
171
+ dcg_info.visit_string
172
+ )
173
+ return self._expeye.create_data_group(proposal, session, dcg_info)
223
174
 
224
175
  def _store_data_collection_table(
225
- self, conn, data_collection_id, data_collection_info
226
- ):
176
+ self, data_collection_id, data_collection_info: DataCollectionInfo
177
+ ) -> int:
227
178
  if data_collection_id and data_collection_info.comments:
228
179
  self.append_to_comment(
229
180
  data_collection_id, data_collection_info.comments, " "
230
181
  )
231
182
  data_collection_info.comments = None
232
183
 
233
- params = self._fill_common_data_collection_params(
234
- conn, data_collection_id, data_collection_info
235
- )
236
-
237
- return self._upsert_data_collection(conn, params)
184
+ if data_collection_id:
185
+ self._expeye.update_data_collection(
186
+ data_collection_id, data_collection_info
187
+ )
188
+ return data_collection_id
189
+ else:
190
+ assert data_collection_info.parent_id, (
191
+ "Data Collection must have a Data Collection Group"
192
+ )
193
+ return self._expeye.create_data_collection(
194
+ data_collection_info.parent_id, data_collection_info
195
+ )
238
196
 
239
197
  def _store_single_scan_data(
240
- self, conn, scan_data_info, data_collection_id=None
198
+ self, scan_data_info, data_collection_id=None
241
199
  ) -> tuple[int, int | None]:
242
200
  data_collection_id = self._store_data_collection_table(
243
- conn, data_collection_id, scan_data_info.data_collection_info
201
+ data_collection_id, scan_data_info.data_collection_info
244
202
  )
245
203
 
246
204
  if scan_data_info.data_collection_position_info:
247
- self._store_position_table(
248
- conn,
249
- scan_data_info.data_collection_position_info,
250
- data_collection_id,
205
+ self._expeye.create_position(
206
+ data_collection_id, scan_data_info.data_collection_position_info
251
207
  )
252
208
 
253
209
  grid_id = None
254
210
  if scan_data_info.data_collection_grid_info:
255
- grid_id = self._store_grid_info_table(
256
- conn,
257
- data_collection_id,
258
- scan_data_info.data_collection_grid_info,
211
+ grid_id = self._expeye.create_grid(
212
+ data_collection_id, scan_data_info.data_collection_grid_info
259
213
  )
260
214
  return data_collection_id, grid_id
261
-
262
- def _store_grid_info_table(
263
- self,
264
- conn: Connector,
265
- ispyb_data_collection_id: int,
266
- dc_grid_info: DataCollectionGridInfo,
267
- ) -> int:
268
- mx_acquisition: MXAcquisition = conn.mx_acquisition
269
- params = mx_acquisition.get_dc_grid_params()
270
- params |= dc_grid_info.as_dict()
271
- params["parentid"] = ispyb_data_collection_id
272
- return mx_acquisition.upsert_dc_grid(list(params.values()))
273
-
274
- def _fill_common_data_collection_params(
275
- self, conn, data_collection_id, data_collection_info: DataCollectionInfo
276
- ) -> StrictOrderedDict:
277
- mx_acquisition: MXAcquisition = conn.mx_acquisition
278
- params = mx_acquisition.get_data_collection_params()
279
-
280
- if data_collection_id:
281
- params["id"] = data_collection_id
282
- if data_collection_info.visit_string:
283
- # This is only needed for populating the DataCollectionGroup
284
- params["visit_id"] = get_session_id_from_visit(
285
- conn, data_collection_info.visit_string
286
- )
287
- params |= {
288
- k: v.item() if isinstance(v, np.generic) else v # Convert to native types
289
- for k, v in asdict(data_collection_info).items()
290
- if k != "visit_string"
291
- }
292
-
293
- return params
294
-
295
- @staticmethod
296
- @TRACER.start_as_current_span("_upsert_data_collection_group")
297
- def _upsert_data_collection_group(
298
- conn: Connector, params: StrictOrderedDict
299
- ) -> int:
300
- return conn.mx_acquisition.upsert_data_collection_group(list(params.values()))
301
-
302
- @staticmethod
303
- @TRACER.start_as_current_span("_upsert_data_collection")
304
- def _upsert_data_collection(conn: Connector, params: StrictOrderedDict) -> int:
305
- return conn.mx_acquisition.upsert_data_collection(list(params.values()))
@@ -1,12 +1,6 @@
1
- from __future__ import annotations
2
-
3
1
  import datetime
4
2
  import os
5
3
 
6
- from ispyb import NoResult
7
- from ispyb.connector.mysqlsp.main import ISPyBMySQLSPConnector as Connector
8
- from ispyb.sp.core import Core
9
-
10
4
 
11
5
  def get_ispyb_config() -> str:
12
6
  ispyb_config = os.environ.get("ISPYB_CONFIG_PATH")
@@ -14,14 +8,6 @@ def get_ispyb_config() -> str:
14
8
  return ispyb_config
15
9
 
16
10
 
17
- def get_session_id_from_visit(conn: Connector, visit: str):
18
- try:
19
- core: Core = conn.core
20
- return core.retrieve_visit_id(visit)
21
- except NoResult as e:
22
- raise NoResult(f"No session ID found in ispyb for visit {visit}") from e
23
-
24
-
25
11
  def get_current_time_string():
26
12
  now = datetime.datetime.now()
27
13
  return now.strftime("%Y-%m-%d %H:%M:%S")
@@ -11,7 +11,7 @@ from nexgen.nxs_utils.axes import TransformationType
11
11
  from numpy.typing import DTypeLike
12
12
 
13
13
  from mx_bluesky.common.utils.log import NEXUS_LOGGER
14
- from mx_bluesky.common.utils.utils import convert_eV_to_angstrom
14
+ from mx_bluesky.common.utils.utils import convert_ev_to_angstrom
15
15
 
16
16
 
17
17
  class AxisDirection(Enum):
@@ -158,6 +158,6 @@ def create_beam_and_attenuator_parameters(
158
158
  tuple[Beam, Attenuator]: Descriptions of the beam and attenuator for nexgen.
159
159
  """
160
160
  return (
161
- Beam(wavelength=convert_eV_to_angstrom(energy_kev * 1000), flux=flux),
161
+ Beam(wavelength=convert_ev_to_angstrom(energy_kev * 1000), flux=flux),
162
162
  Attenuator(transmission=transmission_fraction),
163
163
  )
@@ -87,7 +87,7 @@ class NexusWriter:
87
87
  vds_shape = self.data_shape
88
88
 
89
89
  for filename in [self.nexus_file, self.master_file]:
90
- NXmx_Writer = NXmxFileWriter(
90
+ nxmx_writer = NXmxFileWriter(
91
91
  filename,
92
92
  self.goniometer,
93
93
  self.detector,
@@ -96,12 +96,12 @@ class NexusWriter:
96
96
  self.attenuator,
97
97
  self.full_num_of_images,
98
98
  )
99
- NXmx_Writer.write(
99
+ nxmx_writer.write(
100
100
  image_filename=f"{self.data_filename}",
101
101
  start_time=start_time,
102
102
  est_end_time=est_end_time,
103
103
  )
104
- NXmx_Writer.write_vds(
104
+ nxmx_writer.write_vds(
105
105
  vds_offset=self.start_index, vds_shape=vds_shape, vds_dtype=bit_depth
106
106
  )
107
107
 
@@ -171,6 +171,7 @@ class DiffractionExperiment(
171
171
  ispyb_experiment_type: IspybExperimentType
172
172
  storage_directory: str
173
173
  use_roi_mode: bool = Field(default=GridscanParamConstants.USE_ROI)
174
+ snapshot_directory: Path = None # type:ignore # filled in on validation
174
175
 
175
176
  @model_validator(mode="before")
176
177
  @classmethod
@@ -40,6 +40,8 @@ def _get_oav_config_json_path():
40
40
  return "tests/test_data/test_OAVCentring.json"
41
41
  elif BEAMLINE == "i03":
42
42
  return f"/dls_sw/{BEAMLINE}/software/daq_configuration/json/OAVCentring_hyperion.json"
43
+ elif BEAMLINE == "aithre":
44
+ return "/dls/science/groups/i23/aithre/daq_configuration/json/OAVCentring_aithre.json"
43
45
  else:
44
46
  return f"/dls_sw/{BEAMLINE}/software/daq_configuration/json/OAVCentring.json"
45
47
 
@@ -85,7 +87,6 @@ class HardwareConstants:
85
87
  OAV_REFRESH_DELAY = 0.3
86
88
  PANDA_FGS_RUN_UP_DEFAULT = 0.17
87
89
  CRYOJET_MARGIN_MM = 0.2
88
- THAWING_TIME = 40
89
90
  TIP_OFFSET_UM = 0
90
91
 
91
92
  # Value quoted in https://www.dectris.com/en/detectors/x-ray-detectors/eiger2/eiger2-for-synchrotrons/eiger2-x/,
@@ -163,9 +164,9 @@ class Status(Enum):
163
164
 
164
165
 
165
166
  @dataclass
166
- class FeatureSetting: ... # List of features and their default values. Subclasses must also be a pydantic dataclass
167
+ class FeatureSettings: ... # List of features and their default values. Subclasses must also be a pydantic dataclass
167
168
 
168
169
 
169
- class FeatureSettingources(
170
+ class FeatureSettingSources(
170
171
  StrEnum
171
172
  ): ... # List of features and the name of that property in domain.properties
@@ -4,6 +4,7 @@ from dodal.devices.aperturescatterguard import (
4
4
  )
5
5
  from dodal.devices.attenuator.attenuator import BinaryFilterAttenuator
6
6
  from dodal.devices.backlight import Backlight
7
+ from dodal.devices.beamsize.beamsize import BeamsizeBase
7
8
  from dodal.devices.common_dcm import DoubleCrystalMonochromator
8
9
  from dodal.devices.detector.detector_motion import DetectorMotion
9
10
  from dodal.devices.eiger import EigerDetector
@@ -18,7 +19,7 @@ from dodal.devices.robot import BartRobot
18
19
  from dodal.devices.s4_slit_gaps import S4SlitGaps
19
20
  from dodal.devices.smargon import Smargon
20
21
  from dodal.devices.synchrotron import Synchrotron
21
- from dodal.devices.undulator import Undulator
22
+ from dodal.devices.undulator import UndulatorInKeV
22
23
  from dodal.devices.xbpm_feedback import XBPMFeedback
23
24
  from dodal.devices.zebra.zebra import Zebra
24
25
  from dodal.devices.zebra.zebra_controlled_shutter import ZebraShutter
@@ -51,6 +52,7 @@ class GridDetectThenXRayCentreComposite(FlyScanEssentialDevices):
51
52
  attenuator: BinaryFilterAttenuator
52
53
  backlight: Backlight
53
54
  beamstop: Beamstop
55
+ beamsize: BeamsizeBase
54
56
  dcm: DoubleCrystalMonochromator
55
57
  detector_motion: DetectorMotion
56
58
  zebra_fast_grid_scan: ZebraFastGridScanThreeD
@@ -58,7 +60,7 @@ class GridDetectThenXRayCentreComposite(FlyScanEssentialDevices):
58
60
  oav: OAV
59
61
  pin_tip_detection: PinTipDetection
60
62
  s4_slit_gaps: S4SlitGaps
61
- undulator: Undulator
63
+ undulator: UndulatorInKeV
62
64
  xbpm_feedback: XBPMFeedback
63
65
  zebra: Zebra
64
66
  robot: BartRobot
@@ -109,7 +109,7 @@ class SpecifiedGrid(GridCommon, XyzStarts, WithScan, Generic[GridScanParamType])
109
109
 
110
110
  @property
111
111
  @abstractmethod
112
- def FGS_params(self) -> GridScanParamType: ...
112
+ def fast_gridscan_params(self) -> GridScanParamType: ...
113
113
 
114
114
  def do_set_stub_offsets(self, value: bool):
115
115
  self._set_stub_offsets = value
@@ -167,7 +167,7 @@ class SpecifiedThreeDGridScan(
167
167
  grid2_omega_deg: float = Field(default=GridscanParamConstants.OMEGA_2)
168
168
 
169
169
  @property
170
- def FGS_params(self) -> ZebraGridScanParamsThreeD:
170
+ def fast_gridscan_params(self) -> ZebraGridScanParamsThreeD:
171
171
  return ZebraGridScanParamsThreeD(
172
172
  x_steps=self.x_steps,
173
173
  y_steps=self.y_steps,
@@ -7,20 +7,37 @@ from bluesky.preprocessors import contingency_wrapper
7
7
  from bluesky.utils import Msg
8
8
 
9
9
 
10
- class WarningException(Exception):
10
+ class WarningError(
11
+ Exception
12
+ ): # see https://github.com/DiamondLightSource/mx-bluesky/issues/1394 on naming
11
13
  """An exception used when we want to warn GDA of a
12
14
  problem but continue with UDC anyway"""
13
15
 
14
16
  pass
15
17
 
16
18
 
17
- class ISPyBDepositionNotMade(Exception):
19
+ class BeamlineCheckFailureError(Exception):
20
+ """
21
+ An error which is raised during a beamline check to indicate that the check did
22
+ not pass.
23
+ """
24
+
25
+ ...
26
+
27
+
28
+ class ISPyBDepositionNotMadeError(Exception):
18
29
  """Raised when the ISPyB or Zocalo callbacks can't access ISPyB deposition numbers."""
19
30
 
20
31
  pass
21
32
 
22
33
 
23
- class SampleException(WarningException):
34
+ class BeamlineStateError(Exception):
35
+ """Exception raised when the beamline is in the incorrect state"""
36
+
37
+ pass
38
+
39
+
40
+ class SampleError(WarningError):
24
41
  """An exception which identifies an issue relating to the sample."""
25
42
 
26
43
  def __str__(self):
@@ -36,7 +53,7 @@ class SampleException(WarningException):
36
53
  T = TypeVar("T")
37
54
 
38
55
 
39
- class CrystalNotFoundException(SampleException):
56
+ class CrystalNotFoundError(SampleError):
40
57
  """Raised if grid detection completed normally but no crystal was found."""
41
58
 
42
59
  def __init__(self, *args):
@@ -49,7 +66,7 @@ def catch_exception_and_warn(
49
66
  *args,
50
67
  **kwargs,
51
68
  ) -> Generator[Msg, None, T]:
52
- """A plan wrapper to catch a specific exception and instead raise a WarningException,
69
+ """A plan wrapper to catch a specific exception and instead raise a WarningError,
53
70
  so that UDC is not halted
54
71
 
55
72
  Example usage:
@@ -58,12 +75,12 @@ def catch_exception_and_warn(
58
75
  ...
59
76
  yield from catch_exception_and_warn(ExceptionA, plan_which_can_raise_exception_a, **args, **kwargs)'
60
77
 
61
- This will catch ExceptionA raised by the plan and instead raise a WarningException
78
+ This will catch ExceptionA raised by the plan and instead raise a WarningError
62
79
  """
63
80
 
64
81
  def warn_if_exception_matches(exception: Exception):
65
82
  if isinstance(exception, exception_to_catch):
66
- raise SampleException(str(exception)) from exception
83
+ raise SampleError(str(exception)) from exception
67
84
  yield from null()
68
85
 
69
86
  return (
@@ -1,3 +1,4 @@
1
+ import json
1
2
  import logging
2
3
  from logging.handlers import TimedRotatingFileHandler
3
4
  from os import environ
@@ -10,11 +11,11 @@ from dodal.log import (
10
11
  integrate_bluesky_and_ophyd_logging,
11
12
  set_up_all_logging_handlers,
12
13
  )
13
- from dodal.log import LOGGER as dodal_logger
14
+ from dodal.log import LOGGER as DODAL_LOGGER
14
15
 
15
16
  LOGGER = logging.getLogger("MX-Bluesky")
16
17
  LOGGER.setLevel("DEBUG")
17
- LOGGER.parent = dodal_logger
18
+ LOGGER.parent = DODAL_LOGGER
18
19
 
19
20
  ISPYB_ZOCALO_CALLBACK_LOGGER = logging.getLogger("ISPyB and Zocalo callbacks")
20
21
  ISPYB_ZOCALO_CALLBACK_LOGGER.setLevel(logging.DEBUG)
@@ -27,6 +28,14 @@ ALL_LOGGERS = [LOGGER, ISPYB_ZOCALO_CALLBACK_LOGGER, NEXUS_LOGGER]
27
28
  __logger_handlers: DodalLogHandlers | None = None
28
29
 
29
30
 
31
+ def format_doc_for_log(doc):
32
+ class _BestEffortEncoder(json.JSONEncoder):
33
+ def default(self, o):
34
+ return repr(o)
35
+
36
+ return json.dumps(doc, indent=2, cls=_BestEffortEncoder)
37
+
38
+
30
39
  class ExperimentMetadataTagFilter(logging.Filter):
31
40
  """When an instance of this custom filter is added to a logging handler, dc_group_id
32
41
  and run_id will be tagged in that handlers' log messages."""
@@ -68,7 +77,7 @@ def do_default_logging_setup(
68
77
  and bluesky and ophyd-async are optionally included."""
69
78
  logging_path, debug_logging_path = _get_logging_dirs(dev_mode)
70
79
  handlers = set_up_all_logging_handlers(
71
- dodal_logger,
80
+ DODAL_LOGGER,
72
81
  logging_path,
73
82
  file_name,
74
83
  dev_mode,
@@ -78,7 +87,7 @@ def do_default_logging_setup(
78
87
  )
79
88
 
80
89
  if integrate_all_logs:
81
- integrate_bluesky_and_ophyd_logging(dodal_logger)
90
+ integrate_bluesky_and_ophyd_logging(DODAL_LOGGER)
82
91
 
83
92
  handlers["graylog_handler"].addFilter(tag_filter)
84
93
 
@@ -11,18 +11,18 @@ from opentelemetry.sdk.trace.export import BatchSpanProcessor
11
11
  def setup_tracing(service_name: str = "Hyperion"):
12
12
  resource = Resource(attributes={SERVICE_NAME: service_name})
13
13
 
14
- traceProvider = TracerProvider(resource=resource)
14
+ trace_provider = TracerProvider(resource=resource)
15
15
  processor = BatchSpanProcessor(
16
16
  OTLPSpanExporter(endpoint="http://0.0.0.0:4318/v1/traces")
17
17
  )
18
- traceProvider.add_span_processor(processor)
19
- trace.set_tracer_provider(traceProvider)
18
+ trace_provider.add_span_processor(processor)
19
+ trace.set_tracer_provider(trace_provider)
20
20
 
21
21
  reader = PeriodicExportingMetricReader(
22
22
  OTLPMetricExporter(endpoint="http://0.0.0.0:4318/v1/metrics")
23
23
  )
24
- meterProvider = MeterProvider(resource=resource, metric_readers=[reader])
25
- metrics.set_meter_provider(meterProvider)
24
+ meter_provider = MeterProvider(resource=resource, metric_readers=[reader])
25
+ metrics.set_meter_provider(meter_provider)
26
26
 
27
27
 
28
28
  TRACER = trace.get_tracer(__name__)