roc-film 1.13.5__py3-none-any.whl → 1.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. roc/__init__.py +2 -1
  2. roc/film/__init__.py +2 -2
  3. roc/film/commands.py +372 -323
  4. roc/film/config/__init__.py +0 -1
  5. roc/film/constants.py +101 -65
  6. roc/film/descriptor.json +127 -96
  7. roc/film/exceptions.py +28 -27
  8. roc/film/tasks/__init__.py +16 -16
  9. roc/film/tasks/cat_solo_hk.py +86 -74
  10. roc/film/tasks/cdf_postpro.py +438 -309
  11. roc/film/tasks/check_dds.py +39 -45
  12. roc/film/tasks/db_to_anc_bia_sweep_table.py +381 -0
  13. roc/film/tasks/dds_to_l0.py +232 -180
  14. roc/film/tasks/export_solo_coord.py +147 -0
  15. roc/film/tasks/file_handler.py +91 -75
  16. roc/film/tasks/l0_to_hk.py +117 -103
  17. roc/film/tasks/l0_to_l1_bia_current.py +38 -30
  18. roc/film/tasks/l0_to_l1_bia_sweep.py +417 -329
  19. roc/film/tasks/l0_to_l1_sbm.py +250 -208
  20. roc/film/tasks/l0_to_l1_surv.py +185 -130
  21. roc/film/tasks/make_daily_tm.py +40 -37
  22. roc/film/tasks/merge_tcreport.py +77 -71
  23. roc/film/tasks/merge_tmraw.py +101 -88
  24. roc/film/tasks/parse_dds_xml.py +21 -20
  25. roc/film/tasks/set_l0_utc.py +51 -49
  26. roc/film/tests/cdf_compare.py +565 -0
  27. roc/film/tests/hdf5_compare.py +84 -62
  28. roc/film/tests/test_dds_to_l0.py +93 -51
  29. roc/film/tests/test_dds_to_tc.py +8 -11
  30. roc/film/tests/test_dds_to_tm.py +8 -10
  31. roc/film/tests/test_film.py +161 -116
  32. roc/film/tests/test_l0_to_hk.py +64 -36
  33. roc/film/tests/test_l0_to_l1_bia.py +10 -14
  34. roc/film/tests/test_l0_to_l1_sbm.py +14 -19
  35. roc/film/tests/test_l0_to_l1_surv.py +68 -41
  36. roc/film/tests/test_metadata.py +21 -20
  37. roc/film/tests/tests.py +743 -396
  38. roc/film/tools/__init__.py +5 -5
  39. roc/film/tools/dataset_tasks.py +34 -2
  40. roc/film/tools/file_helpers.py +390 -269
  41. roc/film/tools/l0.py +402 -324
  42. roc/film/tools/metadata.py +147 -127
  43. roc/film/tools/skeleton.py +12 -17
  44. roc/film/tools/tools.py +109 -92
  45. roc/film/tools/xlsx2skt.py +161 -139
  46. {roc_film-1.13.5.dist-info → roc_film-1.14.0.dist-info}/LICENSE +127 -125
  47. roc_film-1.14.0.dist-info/METADATA +60 -0
  48. roc_film-1.14.0.dist-info/RECORD +50 -0
  49. {roc_film-1.13.5.dist-info → roc_film-1.14.0.dist-info}/WHEEL +1 -1
  50. roc/film/tasks/l0_to_anc_bia_sweep_table.py +0 -348
  51. roc_film-1.13.5.dist-info/METADATA +0 -120
  52. roc_film-1.13.5.dist-info/RECORD +0 -48
@@ -2,6 +2,7 @@
2
2
  # -*- coding: utf-8 -*-
3
3
 
4
4
  """Module to create the RPW L1 SBM1/SBM2 CDF files."""
5
+
5
6
  import os
6
7
  from datetime import timedelta
7
8
  import uuid
@@ -16,146 +17,142 @@ from roc.film.tools.l0 import L0
16
17
  from roc.rpl.time import Time
17
18
  from roc.rpl.packet_parser import raw_to_eng
18
19
 
19
- from roc.film.tools.file_helpers import get_l0_files, l0_to_trange_cdf, get_output_dir, is_output_dir, get_l0_trange
20
+ from roc.film.tools.file_helpers import (
21
+ get_l0_files,
22
+ l0_to_trange_cdf,
23
+ get_output_dir,
24
+ is_output_dir,
25
+ get_l0_trange,
26
+ )
20
27
 
21
28
  from roc.film.constants import TIME_JSON_STRFORMAT
22
29
 
23
- __all__ = ['L0ToL1Sbm']
30
+ __all__ = ["L0ToL1Sbm"]
24
31
 
25
32
  # SBM1 QF TF ID
26
- TF_PA_DPU_0038 = 'CIWP0028TM'
33
+ TF_PA_DPU_0038 = "CIWP0028TM"
27
34
  # SBM2 QF TF ID
28
- TF_PA_DPU_0039 = 'CIWP0029TM'
35
+ TF_PA_DPU_0039 = "CIWP0029TM"
29
36
 
30
37
 
31
38
  class L0ToL1Sbm(Task):
32
39
  """
33
40
  Task to generate l1 sbm CDF from l0 file(s)
34
41
  """
35
- plugin_name = 'roc.film'
36
- name = 'l0_to_l1_sbm'
37
42
 
38
- def add_targets(self):
43
+ plugin_name = "roc.film"
44
+ name = "l0_to_l1_sbm"
39
45
 
40
- self.add_input(target_class=FileTarget,
41
- identifier='l0_file',
42
- filepath=get_l0_files,
43
- many=True)
46
+ def add_targets(self):
47
+ self.add_input(
48
+ target_class=FileTarget,
49
+ identifier="l0_file",
50
+ filepath=get_l0_files,
51
+ many=True,
52
+ )
44
53
 
45
- self.add_output(target_class=FileTarget,
46
- identifier='l1_sbm1_rswf')
54
+ self.add_output(target_class=FileTarget, identifier="l1_sbm1_rswf")
47
55
 
48
- self.add_output(target_class=FileTarget,
49
- identifier='l1_sbm2_tswf')
56
+ self.add_output(target_class=FileTarget, identifier="l1_sbm2_tswf")
50
57
 
51
- self.add_output(target_class=FileTarget,
52
- identifier='l1_sbm1_cwf')
58
+ self.add_output(target_class=FileTarget, identifier="l1_sbm1_cwf")
53
59
 
54
- self.add_output(target_class=FileTarget,
55
- identifier='l1_sbm1_bp1')
60
+ self.add_output(target_class=FileTarget, identifier="l1_sbm1_bp1")
56
61
 
57
- self.add_output(target_class=FileTarget,
58
- identifier='l1_sbm1_bp2')
62
+ self.add_output(target_class=FileTarget, identifier="l1_sbm1_bp2")
59
63
 
60
- self.add_output(target_class=FileTarget,
61
- identifier='l1_sbm2_cwf')
64
+ self.add_output(target_class=FileTarget, identifier="l1_sbm2_cwf")
62
65
 
63
- self.add_output(target_class=FileTarget,
64
- identifier='l1_sbm2_bp1')
66
+ self.add_output(target_class=FileTarget, identifier="l1_sbm2_bp1")
65
67
 
66
- self.add_output(target_class=FileTarget,
67
- identifier='l1_sbm2_bp2')
68
+ self.add_output(target_class=FileTarget, identifier="l1_sbm2_bp2")
68
69
 
69
70
  def setup_inputs(self):
70
-
71
71
  # Get products directory (folder where final output files will be
72
72
  # moved)
73
- self.products_dir = self.pipeline.get('products_dir',
74
- default=[None], args=True)[0]
73
+ self.products_dir = self.pipeline.get(
74
+ "products_dir", default=[None], args=True
75
+ )[0]
75
76
 
76
77
  # Get output dir
77
78
  self.output_dir = get_output_dir(self.pipeline)
78
- if not is_output_dir(self.output_dir,
79
- products_dir=self.products_dir):
80
- logger.info(f'Making {self.output_dir}')
79
+ if not is_output_dir(self.output_dir, products_dir=self.products_dir):
80
+ logger.info(f"Making {self.output_dir}")
81
81
  os.makedirs(self.output_dir)
82
82
  else:
83
- logger.debug(f'Output files will be '
84
- f'saved into folder {self.output_dir}')
83
+ logger.debug(f"Output files will be saved into folder {self.output_dir}")
85
84
 
86
85
  # Get (optional) arguments for SPICE
87
- predictive = self.pipeline.get('predictive', default=False, args=True)
88
- kernel_date = self.pipeline.get('kernel_date', default=None, args=True)
89
- no_spice = self.pipeline.get('no_spice', default=False, args=True)
86
+ predictive = self.pipeline.get("predictive", default=False, args=True)
87
+ kernel_date = self.pipeline.get("kernel_date", default=None, args=True)
88
+ no_spice = self.pipeline.get("no_spice", default=False, args=True)
90
89
 
91
90
  # Get/create Time singleton
92
- self.time = Time(predictive=predictive,
93
- kernel_date=kernel_date,
94
- no_spice=no_spice)
91
+ self.time = Time(
92
+ predictive=predictive, kernel_date=kernel_date, no_spice=no_spice
93
+ )
95
94
 
96
95
  # Get list of input l0 file(s)
97
- self.l0_file_list = self.inputs['l0_file'].filepath
96
+ self.l0_file_list = self.inputs["l0_file"].filepath
98
97
 
99
98
  # Get or create failed_files list from pipeline properties
100
- self.failed_files = self.pipeline.get(
101
- 'failed_files', default=[], create=True)
99
+ self.failed_files = self.pipeline.get("failed_files", default=[], create=True)
102
100
 
103
101
  # Get or create processed_files list from pipeline properties
104
102
  self.processed_files = self.pipeline.get(
105
- 'processed_files', default=[], create=True)
103
+ "processed_files", default=[], create=True
104
+ )
106
105
 
107
106
  # Get overwrite argument
108
- self.overwrite = self.pipeline.get(
109
- 'overwrite', default=False, args=True)
107
+ self.overwrite = self.pipeline.get("overwrite", default=False, args=True)
110
108
 
111
109
  # Get force optional keyword
112
- self.force = self.pipeline.get('force', default=False, args=True)
110
+ self.force = self.pipeline.get("force", default=False, args=True)
113
111
 
114
112
  # Get --cdag keyword
115
- self.is_cdag = self.pipeline.get('cdag', default=False, args=True)
113
+ self.is_cdag = self.pipeline.get("cdag", default=False, args=True)
116
114
 
117
115
  # Get --no-sbm1/2 ans --as-is keywords
118
- self.no_sbm1 = self.pipeline.get('no_sbm1', default=False, args=True)
119
- self.no_sbm2 = self.pipeline.get('no_sbm2', default=False, args=True)
120
- self.manual = self.pipeline.get('manual', default=False, args=True)
116
+ self.no_sbm1 = self.pipeline.get("no_sbm1", default=False, args=True)
117
+ self.no_sbm2 = self.pipeline.get("no_sbm2", default=False, args=True)
118
+ self.manual = self.pipeline.get("manual", default=False, args=True)
121
119
 
122
120
  # Define output file start time
123
- self.start_time = self.pipeline.get('start_time', default=[None])[0]
124
- logger.debug(f'start_time value is {self.start_time}')
121
+ self.start_time = self.pipeline.get("start_time", default=[None])[0]
122
+ logger.debug(f"start_time value is {self.start_time}")
125
123
 
126
124
  # Define output file end time
127
- self.end_time = self.pipeline.get('end_time', default=[None])[0]
128
- logger.debug(f'end_time value is {self.end_time}')
125
+ self.end_time = self.pipeline.get("end_time", default=[None])[0]
126
+ logger.debug(f"end_time value is {self.end_time}")
129
127
 
130
128
  # Define SBM type (only used with --as-is optional keyword)
131
- self.sbm_type = self.pipeline.get('sbm_type', default=[None])[0]
132
- logger.debug(f'sbm_type value is {self.sbm_type}')
129
+ self.sbm_type = self.pipeline.get("sbm_type", default=[None])[0]
130
+ logger.debug(f"sbm_type value is {self.sbm_type}")
133
131
 
134
132
  return True
135
133
 
136
134
  def run(self):
137
-
138
135
  # Define task job ID (long and short)
139
136
  self.job_uuid = str(uuid.uuid4())
140
- self.job_id = f'L0ToL1Sbm-{self.job_uuid[:8]}'
141
- logger.info(f'Task {self.job_id} is starting')
137
+ self.job_id = self.job_uuid[:8]
138
+ logger.info(f"Task job {self.job_id} is starting")
142
139
  try:
143
140
  self.setup_inputs()
144
- except:
145
- logger.exception(
146
- f'Initializing inputs has failed for {self.job_id}!')
141
+ except Exception:
142
+ logger.exception(f"Initializing inputs has failed for job {self.job_id}!")
147
143
  try:
148
- os.makedirs(os.path.join(self.output_dir, 'failed'))
149
- except:
150
- logger.error(f'output_dir argument is not defined for {self.job_id}!')
144
+ os.makedirs(os.path.join(self.output_dir, "failed"))
145
+ except Exception:
146
+ logger.error(
147
+ f"output_dir argument is not defined for job {self.job_id}!"
148
+ )
151
149
  self.pipeline.exit()
152
150
  return
153
151
 
154
152
  if self.manual:
155
153
  # If 'manual' option is passed, then try to
156
154
  # process straightly the SBM packet data found in the input L0 files
157
- logger.info(
158
- f'Try to process data manually [{self.job_id}]')
155
+ logger.info(f"Try to process data manually [{self.job_id}]")
159
156
 
160
157
  # Define time range to process
161
158
  if not self.start_time and not self.end_time:
@@ -168,72 +165,89 @@ class L0ToL1Sbm(Task):
168
165
  end_time = self.end_time
169
166
 
170
167
  if not self.sbm_type:
171
- logger.warning('--sbm-type argument value is not passed!')
172
- elif self.sbm_type and self.sbm_type not in [1,2]:
173
- logger.error('--sbm-type argument value is not valid: must be 1 or 2!')
168
+ logger.warning("--sbm-type argument value is not passed!")
169
+ elif self.sbm_type and self.sbm_type not in [1, 2]:
170
+ logger.error("--sbm-type argument value is not valid: must be 1 or 2!")
174
171
  self.pipeline.exit()
175
172
 
176
- sbm_list = [{'start_time': start_time,
177
- 'end_time': end_time,
178
- # We do not know DT?_SBM? parameters here, so assume
179
- # that SBM event occurrence time is in the center of the window
180
- 'sbm_time': start_time + 0.5 * (end_time - start_time),
181
- 'sbm_l0': self.l0_file_list,
182
- 'sbm_type': self.sbm_type}]
173
+ sbm_list = [
174
+ {
175
+ "start_time": start_time,
176
+ "end_time": end_time,
177
+ # We do not know DT?_SBM? parameters here, so assume
178
+ # that SBM event occurrence time is in the center of the window
179
+ "sbm_time": start_time + 0.5 * (end_time - start_time),
180
+ "sbm_l0": self.l0_file_list,
181
+ "sbm_type": self.sbm_type,
182
+ }
183
+ ]
183
184
  sbm_num = 1
184
185
  else:
185
-
186
186
  # Build list of sbm events by looking for
187
187
  # TM_DPU_EVENT_PR_DPU_SBM1 or TM_DPU_EVENT_PR_DPU_SBM2 packets in input L0 files
188
- logger.info(f'Building list of SBM events to process... [{self.job_id}]')
189
- sbm_list = self._build_sbm_list(self.l0_file_list,
190
- start_time=self.start_time,
191
- end_time=self.end_time,
192
- no_sbm1=self.no_sbm1,
193
- no_sbm2=self.no_sbm2)
188
+ logger.info(f"Building list of SBM events to process... [{self.job_id}]")
189
+ sbm_list = self._build_sbm_list(
190
+ self.l0_file_list,
191
+ start_time=self.start_time,
192
+ end_time=self.end_time,
193
+ no_sbm1=self.no_sbm1,
194
+ no_sbm2=self.no_sbm2,
195
+ )
194
196
 
195
197
  sbm_num = len(sbm_list)
196
198
  if sbm_num == 0:
197
- logger.info('No SBM detection event found in input L0 files [{self.job_id}]')
199
+ logger.info(
200
+ "No SBM detection event found in input L0 files [{self.job_id}]"
201
+ )
198
202
  return
199
203
  else:
200
- logger.info(f'{sbm_num} SBM events to process [{self.job_id}]')
204
+ logger.info(f"{sbm_num} SBM events to process [{self.job_id}]")
201
205
 
202
206
  # Initialize loop variables
203
207
  l1_cdf_path = None
204
208
 
205
209
  # Loop over each SBM event in the list
206
210
  for i, current_sbm in enumerate(sbm_list):
207
-
208
211
  # Get info of current sbm event
209
- sbm_start_time = current_sbm['start_time']
210
- sbm_end_time = current_sbm['end_time']
211
- sbm_time = current_sbm['sbm_time']
212
- sbm_l0_list = current_sbm['sbm_l0']
213
- sbm_type = current_sbm.get('sbm_type', 'UNKNOWN')
214
- sbm_qf = current_sbm.get('sbm_qf', 'UNKNOWN')
215
- sbm_algo = current_sbm.get('sbm_algo', 'UNKNOWN')
216
- sbm_duration = current_sbm.get('sbm_duration', 'UNKNOWN')
217
- if sbm_type != 'UNKNOWN':
218
- logger.info(f'Processing SBM{sbm_type} event detected '
219
- f"between {current_sbm['start_time']} and "
220
- f"{current_sbm['end_time']}... ({sbm_num - i} events remaining) [{self.job_id}]")
212
+ sbm_start_time = current_sbm["start_time"]
213
+ sbm_end_time = current_sbm["end_time"]
214
+ sbm_time = current_sbm["sbm_time"]
215
+ sbm_obt = current_sbm["sbm_obt"]
216
+ sbm_l0_list = current_sbm["sbm_l0"]
217
+ sbm_type = current_sbm.get("sbm_type", "UNKNOWN")
218
+ sbm_qf = current_sbm.get("sbm_qf", "UNKNOWN")
219
+ sbm_algo = current_sbm.get("sbm_algo", "UNKNOWN")
220
+ sbm_duration = current_sbm.get("sbm_duration", "UNKNOWN")
221
+ if sbm_type != "UNKNOWN":
222
+ logger.info(
223
+ f"Processing SBM{sbm_type} event detected "
224
+ f"between {current_sbm['start_time']} and "
225
+ f"{current_sbm['end_time']}... ({sbm_num - i} events remaining) [{self.job_id}]"
226
+ )
221
227
 
222
228
  try:
223
229
  # Generate L1 CDF from L0 files
224
- l1_cdf_path = l0_to_trange_cdf(self, 'l0_to_l1_sbm',
225
- sbm_l0_list, self.output_dir,
226
- start_time=sbm_start_time,
227
- end_time=sbm_end_time,
228
- failed_files=self.failed_files,
229
- processed_files=self.processed_files,
230
- is_cdag=self.is_cdag,
231
- overwrite=self.overwrite)
232
- except:
233
- logger.exception(f'L1 SBM CDF production has failed! [{self.job_id}]')
230
+ l1_cdf_path = l0_to_trange_cdf(
231
+ self,
232
+ "l0_to_l1_sbm",
233
+ sbm_l0_list,
234
+ self.output_dir,
235
+ time_instance=self.time,
236
+ start_time=sbm_start_time,
237
+ end_time=sbm_end_time,
238
+ failed_files=self.failed_files,
239
+ processed_files=self.processed_files,
240
+ is_cdag=self.is_cdag,
241
+ overwrite=self.overwrite,
242
+ )
243
+ except Exception as e:
244
+ logger.exception(
245
+ f"L1 SBM CDF production has failed! [{self.job_id}]:\n{e}"
246
+ )
234
247
  if not l1_cdf_path:
235
- l1_cdf_path = [os.path.join(
236
- self.output_dir, '.l0_to_l1_sbm_failed')]
248
+ l1_cdf_path = [
249
+ os.path.join(self.output_dir, ".l0_to_l1_sbm_failed")
250
+ ]
237
251
  if l1_cdf_path[0] not in self.failed_files:
238
252
  self.failed_files.append(l1_cdf_path[0])
239
253
  return
@@ -241,41 +255,43 @@ class L0ToL1Sbm(Task):
241
255
  if l1_cdf_path and os.path.isfile(l1_cdf_path[0]):
242
256
  # Open CDF and add some extra information about SBM event
243
257
  # parameters
244
- logger.info(f'Filling {l1_cdf_path[0]} file with SBM event parameters... [{self.job_id}]')
258
+ logger.info(
259
+ f"Filling {l1_cdf_path[0]} file with SBM event parameters... [{self.job_id}]"
260
+ )
245
261
  cdf = None
246
262
  try:
247
- cdf = CDF(l1_cdf_path[0])
248
- cdf.readonly(False)
249
-
250
- # Add QF, detection time, duration and algo type
251
- # as g. attributes
252
- # TODO - Add also in CDF skeletons
253
- cdf.attrs['SBM_QUALITY_FACTOR'] = str(sbm_qf)
254
- cdf.attrs['SBM_DURATION'] = str(sbm_duration)
255
- cdf.attrs['SBM_ALGO_TYPE'] = str(sbm_algo)
256
- cdf.attrs['SBM_TIME'] = sbm_time.strftime(
257
- TIME_JSON_STRFORMAT)
258
-
259
- except:
263
+ with CDF(l1_cdf_path[0]) as cdf:
264
+ cdf.readonly(False)
265
+
266
+ # Add QF, detection time, duration and algo type
267
+ # as g. attributes
268
+ # TODO - Add also in CDF skeletons
269
+ cdf.attrs["SBM_QUALITY_FACTOR"] = str(sbm_qf)
270
+ cdf.attrs["SBM_DURATION"] = str(sbm_duration)
271
+ cdf.attrs["SBM_ALGO_TYPE"] = str(sbm_algo)
272
+ cdf.attrs["SBM_TIME"] = sbm_time.strftime(TIME_JSON_STRFORMAT)
273
+ cdf.attrs["SBM_OBT"] = f"{sbm_obt[0]}:{sbm_obt[1]}"
274
+ except Exception as e:
260
275
  logger.exception(
261
- 'Filling L1 SBM CDF with SBM parameter has failed! [{self.job_id}]')
276
+ f"Filling L1 SBM CDF with SBM parameter has failed! [{self.job_id}]:\n{e}"
277
+ )
262
278
  if l1_cdf_path[0] not in self.failed_files:
263
279
  self.failed_files.append(l1_cdf_path[0])
264
-
265
280
  else:
266
- logger.info(f'{l1_cdf_path[0]} filled with SBM parameters [{self.job_id}]')
281
+ logger.info(
282
+ f"{l1_cdf_path[0]} filled with SBM parameters [{self.job_id}]"
283
+ )
267
284
  if l1_cdf_path[0] not in self.processed_files:
268
285
  self.processed_files.append(l1_cdf_path[0])
269
- finally:
270
- if cdf:
271
- cdf.close()
272
-
273
- def _build_sbm_list(self, l0_file_list,
274
- start_time=None,
275
- end_time=None,
276
- no_sbm1=False,
277
- no_sbm2=False,
278
- ):
286
+
287
+ @staticmethod
288
+ def _build_sbm_list(
289
+ l0_file_list,
290
+ start_time=None,
291
+ end_time=None,
292
+ no_sbm1=False,
293
+ no_sbm2=False,
294
+ ):
279
295
  """
280
296
  Build list of SBM events to process
281
297
  from an input set of l0 files.
@@ -285,7 +301,7 @@ class L0ToL1Sbm(Task):
285
301
  :param end_time: Filter data by end_time
286
302
  :param no_sbm1: If True, do not include SBM1 event in the output list
287
303
  :param no_sbm2: If True, do not include SBM1 event in the output list
288
- :return: list of sbm events to process
304
+ :return: sbm events to process
289
305
  """
290
306
 
291
307
  # Initialize output list
@@ -293,91 +309,109 @@ class L0ToL1Sbm(Task):
293
309
 
294
310
  expected_packet_list = []
295
311
  if not no_sbm1:
296
- expected_packet_list.append('TM_DPU_EVENT_PR_DPU_SBM1')
312
+ expected_packet_list.append("TM_DPU_EVENT_PR_DPU_SBM1")
297
313
  if not no_sbm2:
298
- expected_packet_list.append('TM_DPU_EVENT_PR_DPU_SBM2')
314
+ expected_packet_list.append("TM_DPU_EVENT_PR_DPU_SBM2")
299
315
 
300
316
  if not expected_packet_list:
301
- logger.warning('--no-sbm1 and --no-sbm2 keywords '
302
- 'should not be passed together!')
317
+ logger.warning(
318
+ "--no-sbm1 and --no-sbm2 keywords should not be passed together!"
319
+ )
303
320
  else:
304
321
  # Extract wanted packets and re-order by increasing time
305
- sbm_packet_list = L0.l0_to_raw(l0_file_list,
306
- expected_packet_list=expected_packet_list,
307
- start_time=start_time,
308
- end_time=end_time,
309
- increasing_time=True,
310
- )['packet_list']
322
+ sbm_packet_list = L0.l0_to_raw(
323
+ l0_file_list,
324
+ expected_packet_list=expected_packet_list,
325
+ start_time=start_time,
326
+ end_time=end_time,
327
+ increasing_time=True,
328
+ )["packet_list"]
311
329
  if sbm_packet_list:
312
-
313
330
  for current_packet in sbm_packet_list:
331
+ # current_time = current_packet['utc_time']
332
+ current_name = current_packet["palisade_id"]
333
+ current_data = current_packet["data"]
314
334
 
315
- current_time = current_packet['utc_time']
316
- current_name = current_packet['palisade_id']
317
- current_data = current_packet['data']
318
-
319
- current_idb_source = current_packet['idb_source']
320
- current_idb_version = current_packet['idb_version']
335
+ current_idb_source = current_packet["idb_source"]
336
+ current_idb_version = current_packet["idb_version"]
321
337
 
322
338
  # Get SBM event parameters
323
339
  current_sbm_type = int(current_name[-1])
340
+
341
+ # Get SBM detection time (Onboard time in CCSDS CUC format)
342
+ current_sbm_obt = current_data[
343
+ f"HK_RPW_S20_SBM{current_sbm_type}_TIME_D"
344
+ ][:2].reshape([1, 2])[0]
345
+
324
346
  # Get SBM detection time (UTC)
325
347
  current_sbm_time = Time().obt_to_utc(
326
- current_data[f'HK_RPW_S20_SBM{current_sbm_type}_TIME_D'][:2].reshape([1, 2]),
327
- to_datetime=True)[0]
348
+ current_sbm_obt, to_datetime=True
349
+ )[0]
328
350
 
329
351
  # Get algo
330
- current_sbm_algo = current_data[f'SY_DPU_SBM{current_sbm_type}_ALGO']
352
+ current_sbm_algo = current_data[
353
+ f"SY_DPU_SBM{current_sbm_type}_ALGO"
354
+ ]
331
355
 
332
356
  # Get SBM duration
333
357
  # (see SSS or DAS User manual for details)
334
358
  if current_sbm_type == 1:
335
- current_sbm_dt1_sbm1 = current_data[
336
- 'SY_DPU_SBM1_DT1_SBM1_D']
337
- current_sbm_dt2_sbm1 = current_data[
338
- 'SY_DPU_SBM1_DT2_SBM1_D']
339
- current_sbm_dt3_sbm1 = current_data[
340
- 'SY_DPU_SBM1_DT3_SBM1_D']
341
- current_sbm_qf = sbm1_qf_eng(current_data['HK_RPW_S20_SBM1_QF_D'],
342
- idb_source=current_idb_source,
343
- idb_version=current_idb_version)
359
+ current_sbm_dt1_sbm1 = current_data["SY_DPU_SBM1_DT1_SBM1_D"]
360
+ current_sbm_dt2_sbm1 = current_data["SY_DPU_SBM1_DT2_SBM1_D"]
361
+ current_sbm_dt3_sbm1 = current_data["SY_DPU_SBM1_DT3_SBM1_D"]
362
+ current_sbm_qf = sbm1_qf_eng(
363
+ current_data["HK_RPW_S20_SBM1_QF_D"],
364
+ idb_source=current_idb_source,
365
+ idb_version=current_idb_version,
366
+ )
344
367
  logger.debug(
345
- f'Current SBM1 event parameters: [{current_sbm_dt1_sbm1}, {current_sbm_dt2_sbm1}, {current_sbm_dt3_sbm1}, {current_sbm_qf}]')
368
+ f"Current SBM1 event parameters: [{current_sbm_dt1_sbm1}, {current_sbm_dt2_sbm1}, {current_sbm_dt3_sbm1}, {current_sbm_qf}]"
369
+ )
346
370
  # Set SBM1 duration
347
371
  current_sbm_duration = int(current_sbm_dt2_sbm1)
348
372
  # Get SBM1 start/end time (UTC)
349
373
  if current_sbm_dt2_sbm1 < 2 * current_sbm_dt1_sbm1:
350
- current_sbm_end = current_sbm_time + \
351
- timedelta(seconds=int(
352
- current_sbm_dt1_sbm1))
353
- current_sbm_start = current_sbm_end - \
354
- timedelta(seconds=int(current_sbm_duration))
374
+ current_sbm_end = current_sbm_time + timedelta(
375
+ seconds=int(current_sbm_dt1_sbm1)
376
+ )
377
+ current_sbm_start = current_sbm_end - timedelta(
378
+ seconds=int(current_sbm_duration)
379
+ )
355
380
  elif current_sbm_dt2_sbm1 > 2 * current_sbm_dt1_sbm1:
356
- current_sbm_end = current_sbm_time + \
357
- timedelta(
358
- seconds=int(current_sbm_dt1_sbm1 + current_sbm_dt3_sbm1))
359
- current_sbm_start = current_sbm_end - \
360
- timedelta(seconds=current_sbm_duration)
381
+ current_sbm_end = current_sbm_time + timedelta(
382
+ seconds=int(current_sbm_dt1_sbm1 + current_sbm_dt3_sbm1)
383
+ )
384
+ current_sbm_start = current_sbm_end - timedelta(
385
+ seconds=current_sbm_duration
386
+ )
361
387
  else:
362
388
  current_sbm_start = current_sbm_time - timedelta(
363
- seconds=(current_sbm_duration / 2))
389
+ seconds=(current_sbm_duration / 2)
390
+ )
364
391
  current_sbm_end = current_sbm_time + timedelta(
365
- seconds=(current_sbm_duration / 2))
392
+ seconds=(current_sbm_duration / 2)
393
+ )
366
394
 
367
395
  elif current_sbm_type == 2:
368
- current_sbm_duration = current_data[
369
- 'HK_DPU_SBM2_DT_SBM2']
370
- current_sbm_qf = sbm2_qf_eng(current_data['HK_RPW_S20_SBM2_QF_D'],
371
- idb_source=current_idb_source,
372
- idb_version=current_idb_version)
396
+ current_sbm_duration = current_data["HK_DPU_SBM2_DT_SBM2"]
397
+ current_sbm_qf = sbm2_qf_eng(
398
+ current_data["HK_RPW_S20_SBM2_QF_D"],
399
+ idb_source=current_idb_source,
400
+ idb_version=current_idb_version,
401
+ )
373
402
  # Get SBM2 start/end time (UTC)
374
403
  current_sbm_start = current_sbm_time
375
404
  current_sbm_end = current_sbm_time + timedelta(
376
- seconds=(int(current_sbm_duration) + 1))
405
+ seconds=(int(current_sbm_duration) + 1)
406
+ )
377
407
 
378
- logger.debug(f'Current SBM2 event parameters: [{current_sbm_duration}, {current_sbm_qf}]')
408
+ logger.debug(
409
+ f"Current SBM2 event parameters: [{current_sbm_duration}, {current_sbm_qf}]"
410
+ )
379
411
  else:
380
- logger.error(f'Wrong SBM type: {current_sbm_type}! (should be 1 or 2)')
412
+ logger.error(
413
+ f"Wrong SBM type: {current_sbm_type}! (should be 1 or 2)"
414
+ )
381
415
  continue
382
416
 
383
417
  # Extend start_time/end_time by 1 minutes
@@ -389,25 +423,30 @@ class L0ToL1Sbm(Task):
389
423
  current_sbm_l0 = L0.filter_l0_files(
390
424
  l0_file_list,
391
425
  start_time=current_sbm_start,
392
- end_time=current_sbm_end)
426
+ end_time=current_sbm_end,
427
+ )
393
428
 
394
429
  # add current event to the list of events to return
395
430
  sbm_event_list.append(
396
- {'start_time': current_sbm_start,
397
- 'end_time': current_sbm_end,
398
- 'sbm_time': current_sbm_time,
399
- 'sbm_type': current_sbm_type,
400
- 'sbm_duration': current_sbm_duration,
401
- 'sbm_algo': current_sbm_algo,
402
- 'sbm_qf': current_sbm_qf,
403
- 'sbm_l0': current_sbm_l0}
431
+ {
432
+ "start_time": current_sbm_start,
433
+ "end_time": current_sbm_end,
434
+ "sbm_time": current_sbm_time,
435
+ "sbm_obt": current_sbm_obt,
436
+ "sbm_type": current_sbm_type,
437
+ "sbm_duration": current_sbm_duration,
438
+ "sbm_algo": current_sbm_algo,
439
+ "sbm_qf": current_sbm_qf,
440
+ "sbm_l0": current_sbm_l0,
441
+ }
404
442
  )
405
443
 
406
444
  return sbm_event_list
407
445
 
408
446
 
409
- def sbm1_qf_eng(raw_values, tf_srdb_id=TF_PA_DPU_0038,
410
- idb_source='MIB', idb_version=None):
447
+ def sbm1_qf_eng(
448
+ raw_values, tf_srdb_id=TF_PA_DPU_0038, idb_source="MIB", idb_version=None
449
+ ):
411
450
  """
412
451
  Retrieve engineering values of the SBM1 event quality factor
413
452
 
@@ -417,12 +456,14 @@ def sbm1_qf_eng(raw_values, tf_srdb_id=TF_PA_DPU_0038,
417
456
  :param idb_version:
418
457
  :return: engineering values of SBM1 QF
419
458
  """
420
- return raw_to_eng(raw_values, tf_srdb_id,
421
- idb_source=idb_source, idb_version=idb_version)
459
+ return raw_to_eng(
460
+ raw_values, tf_srdb_id, idb_source=idb_source, idb_version=idb_version
461
+ )
422
462
 
423
463
 
424
- def sbm2_qf_eng(raw_values, tf_srdb_id=TF_PA_DPU_0039,
425
- idb_source='MIB', idb_version=None):
464
+ def sbm2_qf_eng(
465
+ raw_values, tf_srdb_id=TF_PA_DPU_0039, idb_source="MIB", idb_version=None
466
+ ):
426
467
  """
427
468
  Retrieve engineering values of the SBM2 event quality factor
428
469
 
@@ -432,5 +473,6 @@ def sbm2_qf_eng(raw_values, tf_srdb_id=TF_PA_DPU_0039,
432
473
  :param idb_version:
433
474
  :return: engineering values of SBM1 QF
434
475
  """
435
- return raw_to_eng(raw_values, tf_srdb_id,
436
- idb_source=idb_source, idb_version=idb_version)
476
+ return raw_to_eng(
477
+ raw_values, tf_srdb_id, idb_source=idb_source, idb_version=idb_version
478
+ )