roc-film 1.13.5__py3-none-any.whl → 1.14.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. roc/__init__.py +2 -1
  2. roc/film/__init__.py +2 -2
  3. roc/film/commands.py +372 -323
  4. roc/film/config/__init__.py +0 -1
  5. roc/film/constants.py +101 -65
  6. roc/film/descriptor.json +127 -96
  7. roc/film/exceptions.py +28 -27
  8. roc/film/tasks/__init__.py +16 -16
  9. roc/film/tasks/cat_solo_hk.py +86 -74
  10. roc/film/tasks/cdf_postpro.py +438 -309
  11. roc/film/tasks/check_dds.py +39 -45
  12. roc/film/tasks/db_to_anc_bia_sweep_table.py +381 -0
  13. roc/film/tasks/dds_to_l0.py +232 -180
  14. roc/film/tasks/export_solo_coord.py +147 -0
  15. roc/film/tasks/file_handler.py +91 -75
  16. roc/film/tasks/l0_to_hk.py +117 -103
  17. roc/film/tasks/l0_to_l1_bia_current.py +38 -30
  18. roc/film/tasks/l0_to_l1_bia_sweep.py +417 -329
  19. roc/film/tasks/l0_to_l1_sbm.py +250 -208
  20. roc/film/tasks/l0_to_l1_surv.py +185 -130
  21. roc/film/tasks/make_daily_tm.py +40 -37
  22. roc/film/tasks/merge_tcreport.py +77 -71
  23. roc/film/tasks/merge_tmraw.py +101 -88
  24. roc/film/tasks/parse_dds_xml.py +21 -20
  25. roc/film/tasks/set_l0_utc.py +51 -49
  26. roc/film/tests/cdf_compare.py +565 -0
  27. roc/film/tests/hdf5_compare.py +84 -62
  28. roc/film/tests/test_dds_to_l0.py +93 -51
  29. roc/film/tests/test_dds_to_tc.py +8 -11
  30. roc/film/tests/test_dds_to_tm.py +8 -10
  31. roc/film/tests/test_film.py +161 -116
  32. roc/film/tests/test_l0_to_hk.py +64 -36
  33. roc/film/tests/test_l0_to_l1_bia.py +10 -14
  34. roc/film/tests/test_l0_to_l1_sbm.py +14 -19
  35. roc/film/tests/test_l0_to_l1_surv.py +68 -41
  36. roc/film/tests/test_metadata.py +21 -20
  37. roc/film/tests/tests.py +743 -396
  38. roc/film/tools/__init__.py +5 -5
  39. roc/film/tools/dataset_tasks.py +34 -2
  40. roc/film/tools/file_helpers.py +390 -269
  41. roc/film/tools/l0.py +402 -324
  42. roc/film/tools/metadata.py +147 -127
  43. roc/film/tools/skeleton.py +12 -17
  44. roc/film/tools/tools.py +109 -92
  45. roc/film/tools/xlsx2skt.py +161 -139
  46. {roc_film-1.13.5.dist-info → roc_film-1.14.0.dist-info}/LICENSE +127 -125
  47. roc_film-1.14.0.dist-info/METADATA +60 -0
  48. roc_film-1.14.0.dist-info/RECORD +50 -0
  49. {roc_film-1.13.5.dist-info → roc_film-1.14.0.dist-info}/WHEEL +1 -1
  50. roc/film/tasks/l0_to_anc_bia_sweep_table.py +0 -348
  51. roc_film-1.13.5.dist-info/METADATA +0 -120
  52. roc_film-1.13.5.dist-info/RECORD +0 -48
@@ -14,8 +14,13 @@ from poppy.core.db.connector import Connector
14
14
 
15
15
  from roc.film.tools.file_helpers import generate_filepath, get_output_dir, is_output_dir
16
16
  from roc.film.tools.metadata import init_l0_meta, get_spice_kernels
17
- from roc.film.constants import TIME_ISO_STRFORMAT, CHUNK_SIZE, SCOS_HEADER_BYTES, \
18
- DATA_VERSION, TC_ACK_ALLOWED_STATUS
17
+ from roc.film.constants import (
18
+ TIME_ISO_STRFORMAT,
19
+ CHUNK_SIZE,
20
+ SCOS_HEADER_BYTES,
21
+ DATA_VERSION,
22
+ TC_ACK_ALLOWED_STATUS,
23
+ )
19
24
 
20
25
  from roc.film.exceptions import L0ProdFailure
21
26
 
@@ -30,7 +35,7 @@ from roc.dingo.models.packet import InvalidPacketLog
30
35
  from roc.dingo.tools import get_or_create_in_db
31
36
  from roc.dingo.constants import PIPELINE_DATABASE
32
37
 
33
- __all__ = ['DdsToL0']
38
+ __all__ = ["DdsToL0"]
34
39
 
35
40
 
36
41
  class DdsToL0(Task):
@@ -38,30 +43,33 @@ class DdsToL0(Task):
38
43
  Make the RPW L0 data file from RPW DDS files.
39
44
  The L0 writing is perform by chunk of packets.
40
45
  """
41
- plugin_name = 'roc.film'
42
- name = 'dds_to_l0'
46
+
47
+ plugin_name = "roc.film"
48
+ name = "dds_to_l0"
43
49
 
44
50
  def add_targets(self):
45
- self.add_input(target_class=FileTarget,
46
- identifier='dds_tmraw_xml',
47
- many=True,
48
- filepath=self.get_dds_tmraw_xml())
49
- self.add_input(target_class=FileTarget,
50
- identifier='dds_tcreport_xml',
51
- many=True,
52
- filepath=self.get_dds_tcreport_xml())
53
- self.add_output(target_class=FileTarget,
54
- identifier='l0_file')
51
+ self.add_input(
52
+ target_class=FileTarget,
53
+ identifier="dds_tmraw_xml",
54
+ many=True,
55
+ filepath=self.get_dds_tmraw_xml(),
56
+ )
57
+ self.add_input(
58
+ target_class=FileTarget,
59
+ identifier="dds_tcreport_xml",
60
+ many=True,
61
+ filepath=self.get_dds_tcreport_xml(),
62
+ )
63
+ self.add_output(target_class=FileTarget, identifier="l0_file")
55
64
 
56
65
  def get_dds_tmraw_xml(self):
57
- return self.pipeline.get('dds_tmraw_xml', default=[])
66
+ return self.pipeline.get("dds_tmraw_xml", default=[])
58
67
 
59
68
  def get_dds_tcreport_xml(self):
60
- return self.pipeline.get('dds_tcreport_xml', default=[])
69
+ return self.pipeline.get("dds_tcreport_xml", default=[])
61
70
 
62
71
  @Connector.if_connected(PIPELINE_DATABASE)
63
72
  def setup_inputs(self):
64
-
65
73
  # Import external Tasks, classes and methods (if any)
66
74
  from roc.rpl.packet_parser import PacketParser, palisade_metadata
67
75
  from roc.rpl.time import Time
@@ -71,20 +79,23 @@ class DdsToL0(Task):
71
79
 
72
80
  # Pass input arguments for the Time instance
73
81
  self.time_instance.kernel_date = self.pipeline.get(
74
- 'kernel_date', default=None, args=True)
82
+ "kernel_date", default=None, args=True
83
+ )
75
84
  self.time_instance.predictive = self.pipeline.get(
76
- 'predictive', default=True, args=True)
85
+ "predictive", default=True, args=True
86
+ )
77
87
  self.time_instance.no_spice = self.pipeline.get(
78
- 'no_spice', default=False, args=True)
88
+ "no_spice", default=False, args=True
89
+ )
79
90
 
80
91
  # Load SPICE kernels
81
92
  if not self.time_instance.spice:
82
- logger.error('Cannot load SPICE kernels for the current task!')
93
+ logger.error("Cannot load SPICE kernels for the current task!")
83
94
  return False
84
95
 
85
96
  # Get IDB inputs
86
- self.idb_version = self.pipeline.get('idb_version', default=[None])[0]
87
- self.idb_source = self.pipeline.get('idb_source', default=[None])[0]
97
+ self.idb_version = self.pipeline.get("idb_version", default=[None])[0]
98
+ self.idb_source = self.pipeline.get("idb_source", default=[None])[0]
88
99
 
89
100
  # Initialize PacketParser instance
90
101
  self.PacketParser = PacketParser
@@ -93,22 +104,22 @@ class DdsToL0(Task):
93
104
  self.dds_file_list = []
94
105
 
95
106
  # Get input DDS TmRaw files
96
- tmraw_files = self.inputs['dds_tmraw_xml'].filepath
107
+ tmraw_files = self.inputs["dds_tmraw_xml"].filepath
97
108
  if tmraw_files:
98
109
  tmraw_file_num = len(tmraw_files)
99
- logger.info(f'{tmraw_file_num} DDS TmRaw XML files to process')
110
+ logger.info(f"{tmraw_file_num} DDS TmRaw XML files to process")
100
111
  self.dds_file_list.extend(tmraw_files)
101
112
  else:
102
- logger.info('No DDS TmRaw XML file to process')
113
+ logger.info("No DDS TmRaw XML file to process")
103
114
 
104
115
  # Get input DDS TcReport files
105
- tcreport_files = self.inputs['dds_tcreport_xml'].filepath
116
+ tcreport_files = self.inputs["dds_tcreport_xml"].filepath
106
117
  if tcreport_files:
107
118
  tcreport_files_num = len(tcreport_files)
108
- logger.info(f'{tcreport_files_num} DDS TcReport XML files to process')
119
+ logger.info(f"{tcreport_files_num} DDS TcReport XML files to process")
109
120
  self.dds_file_list.extend(tcreport_files)
110
121
  else:
111
- logger.info('No DDS TcReport XML file to process')
122
+ logger.info("No DDS TcReport XML file to process")
112
123
 
113
124
  # If no DDS file, exit
114
125
  self.dds_file_num = len(self.dds_file_list)
@@ -116,63 +127,67 @@ class DdsToL0(Task):
116
127
  return False
117
128
 
118
129
  # Get chunk size
119
- self.chunk_size = self.pipeline.get('chunk', default=CHUNK_SIZE)
130
+ self.chunk_size = self.pipeline.get("chunk", default=CHUNK_SIZE)
120
131
 
121
132
  # Get products directory (folder where final output files will be
122
133
  # moved)
123
- self.products_dir = self.pipeline.get('products_dir',
124
- default=[None], args=True)[0]
134
+ self.products_dir = self.pipeline.get(
135
+ "products_dir", default=[None], args=True
136
+ )[0]
125
137
 
126
138
  # Get force optional keyword
127
- self.force = self.pipeline.get('force', default=False, args=True)
139
+ self.force = self.pipeline.get("force", default=False, args=True)
128
140
 
129
141
  # Get/create list of well processed DDS files
130
142
  self.processed_dds_files = self.pipeline.get(
131
- 'processed_dds_files', default=[], create=True)
143
+ "processed_dds_files", default=[], create=True
144
+ )
132
145
  # Get/create list of failed DDS files
133
146
  self.failed_dds_files = self.pipeline.get(
134
- 'failed_dds_files', default=[], create=True)
147
+ "failed_dds_files", default=[], create=True
148
+ )
135
149
 
136
150
  # Get/create list of well processed L0 files
137
151
  self.processed_files = self.pipeline.get(
138
- 'processed_files', default=[], create=True)
152
+ "processed_files", default=[], create=True
153
+ )
139
154
  # Get/create list of failed DDS files
140
- self.failed_files = self.pipeline.get(
141
- 'failed_files', default=[], create=True)
155
+ self.failed_files = self.pipeline.get("failed_files", default=[], create=True)
142
156
 
143
157
  # get data_version keyword (if passed)
144
158
  self.data_version = valid_data_version(
145
- self.pipeline.get('data_version', default=[DATA_VERSION])[0])
159
+ self.pipeline.get("data_version", default=[DATA_VERSION])[0]
160
+ )
146
161
 
147
162
  # Get scos header size to remove
148
163
  self.scos_header = self.pipeline.get(
149
- 'scos_header', default=[SCOS_HEADER_BYTES])[0]
164
+ "scos_header", default=[SCOS_HEADER_BYTES]
165
+ )[0]
150
166
 
151
- if self.idb_source and self.idb_source == 'PALISADE':
167
+ if self.idb_source and self.idb_source == "PALISADE":
152
168
  palisade_version = self.idb_version
153
169
  else:
154
170
  palisade_version = None
155
171
 
156
172
  # Get output dir
157
173
  self.output_dir = get_output_dir(self.pipeline)
158
- if not is_output_dir(self.output_dir,
159
- products_dir=self.products_dir):
160
- logger.debug(f'Making {self.output_dir}')
174
+ if not is_output_dir(self.output_dir, products_dir=self.products_dir):
175
+ logger.debug(f"Making {self.output_dir}")
161
176
  os.makedirs(self.output_dir)
162
177
  else:
163
- logger.info(f'Output files will be '
164
- f'saved into existing folder {self.output_dir}')
178
+ logger.info(
179
+ f"Output files will be saved into existing folder {self.output_dir}"
180
+ )
165
181
 
166
182
  # Get palisade metadata
167
- self.palisade_metadata = palisade_metadata(
168
- palisade_version=palisade_version)
183
+ self.palisade_metadata = palisade_metadata(palisade_version=palisade_version)
169
184
 
170
185
  # Get start_time/end_time
171
- self.start_time = self.pipeline.get('start_time', default=[None])[0]
172
- self.end_time = self.pipeline.get('end_time', default=[None])[0]
186
+ self.start_time = self.pipeline.get("start_time", default=[None])[0]
187
+ self.end_time = self.pipeline.get("end_time", default=[None])[0]
173
188
 
174
189
  # Get --cdag keyword
175
- self.is_cdag = self.pipeline.get('cdag', default=False, create=True)
190
+ self.is_cdag = self.pipeline.get("cdag", default=False, create=True)
176
191
 
177
192
  # get a database session
178
193
  self.session = Connector.manager[PIPELINE_DATABASE].session
@@ -180,61 +195,64 @@ class DdsToL0(Task):
180
195
  return True
181
196
 
182
197
  def run(self):
183
-
184
198
  # Define task job ID (long and short)
185
199
  self.job_uuid = str(uuid.uuid4())
186
- self.job_id = f'DdsToL0-{self.job_uuid[:8]}'
187
- logger.info(f'Task {self.job_id} is starting')
200
+ self.job_id = f"DdsToL0-{self.job_uuid[:8]}"
201
+ logger.info(f"Task {self.job_id} is starting")
188
202
  try:
189
203
  # Initialize task inputs
190
204
  self.setup_inputs()
191
205
  except Exception:
192
- logger.exception(f'Initializing inputs has failed for task {self.job_id}!')
206
+ logger.exception(f"Initializing inputs has failed for task {self.job_id}!")
193
207
  self.pipeline.exit()
194
208
  return
195
209
 
196
210
  # get L0 metadata
197
- logger.debug(f'Building output L0 file path... [{self.job_id}]')
198
- extra_attrs = {'Data_version': self.data_version}
199
- metadata = init_l0_meta(self,
200
- extra_attrs=extra_attrs)
211
+ logger.debug(f"Building output L0 file path... [{self.job_id}]")
212
+ extra_attrs = {"Data_version": self.data_version}
213
+ metadata = init_l0_meta(self, extra_attrs=extra_attrs)
201
214
 
202
215
  # Generate output filepath
203
- l0_file = generate_filepath(
204
- self, metadata, '.h5', is_cdag=self.is_cdag)
205
- logger.info(f'Packet data will be saved into {l0_file} [{self.job_id}]')
216
+ l0_file = generate_filepath(self, metadata, ".h5", is_cdag=self.is_cdag)
217
+ logger.info(f"Packet data will be saved into {l0_file} [{self.job_id}]")
206
218
 
207
219
  # Add some metadata
208
- metadata['Generation_date'] = datetime.utcnow().isoformat()
209
- metadata['File_ID'] = str(uuid.uuid4())
220
+ metadata["Generation_date"] = datetime.utcnow().isoformat()
221
+ metadata["File_ID"] = str(uuid.uuid4())
210
222
 
211
223
  if self.start_time:
212
- metadata['TIME_MIN'] = self.start_time.strftime(TIME_ISO_STRFORMAT)
224
+ metadata["TIME_MIN"] = self.start_time.strftime(TIME_ISO_STRFORMAT)
213
225
 
214
226
  if self.end_time:
215
- metadata['TIME_MAX'] = self.end_time.strftime(TIME_ISO_STRFORMAT)
227
+ metadata["TIME_MAX"] = self.end_time.strftime(TIME_ISO_STRFORMAT)
216
228
 
217
229
  # Add SPICE SCLK kernel as an entry
218
230
  # of the "Kernels" g. attr
219
- sclk_file = get_spice_kernels(time_instance=self.time_instance,
220
- pattern='solo_ANC_soc-sclk')
231
+ sclk_file = get_spice_kernels(
232
+ time_instance=self.time_instance, pattern="solo_ANC_soc-sclk"
233
+ )
221
234
  if sclk_file:
222
- metadata['SPICE_KERNELS'] = sclk_file[-1]
235
+ metadata["SPICE_KERNELS"] = sclk_file[-1]
223
236
  else:
224
- logger.warning('No SPICE SCLK kernel '
225
- f'saved for {l0_file} [{self.job_id}]')
237
+ logger.warning(
238
+ f"No SPICE SCLK kernel saved for {l0_file} [{self.job_id}]"
239
+ )
226
240
 
227
241
  # Get total number of packets
228
- logger.info(f'Getting total number of packets to process... [{self.job_id}]')
229
- dds_packet_num_list = [count_packets(
230
- dds_file) for dds_file in self.dds_file_list]
242
+ logger.info(f"Getting total number of packets to process... [{self.job_id}]")
243
+ dds_packet_num_list = [
244
+ count_packets(dds_file) for dds_file in self.dds_file_list
245
+ ]
231
246
  dds_total_packet_num = sum(dds_packet_num_list)
232
247
 
233
248
  logger.info(
234
- f'{dds_total_packet_num} packets in the {self.dds_file_num} input DDS files [{self.job_id}]')
249
+ f"{dds_total_packet_num} packets in the {self.dds_file_num} input DDS files [{self.job_id}]"
250
+ )
235
251
 
236
252
  if dds_total_packet_num == 0:
237
- logger.info(f'No packet to process, exit {DdsToL0.name} task [{self.job_id}]')
253
+ logger.info(
254
+ f"No packet to process, exit {DdsToL0.name} task [{self.job_id}]"
255
+ )
238
256
  return
239
257
 
240
258
  # Initialize some loop variables
@@ -242,13 +260,12 @@ class DdsToL0(Task):
242
260
 
243
261
  # Start loop over dds XML file
244
262
  for i, dds_file in enumerate(self.dds_file_list):
245
-
246
263
  # (Re)initialize loop variables
247
264
  dds_data = None
248
265
  packet_parser = None
249
266
 
250
267
  # Parse input RPW TM/TC DDS format file
251
- logger.info(f'Parsing {dds_file}...')
268
+ logger.info(f"Parsing {dds_file}...")
252
269
 
253
270
  try:
254
271
  dds_data = self._parse_dds(dds_file)
@@ -260,76 +277,88 @@ class DdsToL0(Task):
260
277
 
261
278
  packet_num = len(dds_data)
262
279
  if packet_num == 0:
263
- logger.info(f'No DDS TM/TC packet found in {dds_file} [{self.job_id}]')
280
+ logger.info(
281
+ f"No DDS TM/TC packet found in {dds_file} [{self.job_id}]"
282
+ )
264
283
  continue
265
284
 
266
285
  # Parse TM/TC packets (identify packets and extract parameter
267
286
  # data)
268
287
  try:
269
- logger.info(f'Extracting {dds_packet_num_list[i]} packets from {dds_file}... [{self.job_id}]')
288
+ logger.info(
289
+ f"Extracting {dds_packet_num_list[i]} packets from {dds_file}... [{self.job_id}]"
290
+ )
270
291
  packet_parser = self._parse_packet(dds_data)
271
292
  except Exception:
272
- logger.exception(f'Parsing current packet list has failed! [{self.job_id}]')
293
+ logger.exception(
294
+ f"Parsing current packet list has failed! [{self.job_id}]"
295
+ )
273
296
  continue
274
297
 
275
298
  # Get only valid packets
276
299
  valid_packets = self.PacketParser.packet_status(
277
- packet_parser.parsed_packets,
278
- status=VALID_PACKET)
300
+ packet_parser.parsed_packets, status=VALID_PACKET
301
+ )
279
302
  n_valid = len(valid_packets)
280
303
 
281
304
  # Get only invalid packets
282
305
  invalid_packets = self.PacketParser.packet_status(
283
- packet_parser.parsed_packets,
284
- status=VALID_PACKET, invert=True)
306
+ packet_parser.parsed_packets, status=VALID_PACKET, invert=True
307
+ )
285
308
  n_invalid = len(invalid_packets)
286
309
 
287
310
  if n_invalid > 0:
288
- logger.error(f'{n_invalid} invalid TM/TC packets found in {dds_file}! [{self.job_id}]')
311
+ logger.error(
312
+ f"{n_invalid} invalid TM/TC packets found in {dds_file}! [{self.job_id}]"
313
+ )
289
314
  try:
290
315
  self.invalid_to_db(invalid_packets)
291
316
  except Exception:
292
- logger.exception(f'Invalid packets cannot be inserted in the database! [{self.job_id}]')
317
+ logger.exception(
318
+ f"Invalid packets cannot be inserted in the database! [{self.job_id}]"
319
+ )
293
320
  raise L0ProdFailure
294
321
 
295
322
  # Check if valid packets are found
296
323
  if n_valid == 0:
297
- logger.info(f'No valid TM/TC packet found in {dds_file} [{self.job_id}]')
324
+ logger.info(
325
+ f"No valid TM/TC packet found in {dds_file} [{self.job_id}]"
326
+ )
298
327
  continue
299
328
  else:
300
- logger.info(f'{n_valid} valid TM/TC packets found in {dds_file} [{self.job_id}]')
329
+ logger.info(
330
+ f"{n_valid} valid TM/TC packets found in {dds_file} [{self.job_id}]"
331
+ )
301
332
 
302
333
  # Write metadata and packets into the L0 file
303
- L0().to_hdf5(l0_file,
304
- packet_parser=packet_parser,
305
- metadata=metadata)
334
+ L0().to_hdf5(l0_file, packet_parser=packet_parser, metadata=metadata)
306
335
 
307
336
  except L0ProdFailure:
308
- logger.exception('L0ProdFailure')
337
+ logger.exception("L0ProdFailure")
309
338
  self.failed_files.append(l0_file)
310
339
  break
311
340
  except Exception:
312
- logger.exception(f'Error when parsing {dds_file}! [{self.job_id}]')
341
+ logger.exception(f"Error when parsing {dds_file}! [{self.job_id}]")
313
342
  self.failed_dds_files.append(dds_file)
314
343
  else:
315
344
  self.processed_dds_files.append(dds_file)
316
345
 
317
346
  if os.path.isfile(l0_file) and l0_file not in self.failed_files:
318
-
319
347
  # Add final parent list as L0 root attribute
320
348
  # (Done at the end to make sure to
321
349
  # have also input DDS files with no packet in the parent list)
322
- metadata['Parents'] = ','.join(parent_list)
323
- L0().to_hdf5(l0_file,
324
- metadata=metadata)
350
+ metadata["Parents"] = ",".join(parent_list)
351
+ L0().to_hdf5(l0_file, metadata=metadata)
325
352
 
326
353
  # Sort packet datasets in L0 by ascending UTC Time
327
- logger.info(f'Sorting {l0_file} file by ascending packet creation time (UTC) [{self.job_id}]')
354
+ logger.info(
355
+ f"Sorting {l0_file} file by ascending packet creation time (UTC) [{self.job_id}]"
356
+ )
328
357
  L0.order_by_utc(l0_file, unique=True, update_time_minmax=True)
329
358
 
330
359
  # Set output target 'l0_file' filepath
331
360
  self.processed_files.append(l0_file)
332
- self.outputs['l0_file'].filepath = l0_file
361
+ self.outputs["l0_file"].filepath = l0_file
333
362
 
334
363
  def _parse_dds(self, dds_file):
335
364
  """
@@ -342,16 +371,18 @@ class DdsToL0(Task):
342
371
 
343
372
  output_list = []
344
373
 
345
- dds_data = xml_to_dict(dds_file)['ns2:ResponsePart']['Response']
374
+ dds_data = xml_to_dict(dds_file)["ns2:ResponsePart"]["Response"]
346
375
 
347
- if 'PktRawResponse' in dds_data:
348
- dds_data = dds_data['PktRawResponse']['PktRawResponseElement']
349
- dds_type = 'TM'
350
- elif 'PktTcReportResponse' in dds_data:
351
- dds_data = dds_data['PktTcReportResponse']['PktTcReportList']['PktTcReportListElement']
352
- dds_type = 'TC'
376
+ if "PktRawResponse" in dds_data:
377
+ dds_data = dds_data["PktRawResponse"]["PktRawResponseElement"]
378
+ dds_type = "TM"
379
+ elif "PktTcReportResponse" in dds_data:
380
+ dds_data = dds_data["PktTcReportResponse"]["PktTcReportList"][
381
+ "PktTcReportListElement"
382
+ ]
383
+ dds_type = "TC"
353
384
  else:
354
- logger.warning(f'Invalid input dds file {dds_file}')
385
+ logger.warning(f"Invalid input dds file {dds_file}")
355
386
  return output_list
356
387
 
357
388
  # Make sure that returned dds_data is a list
@@ -360,13 +391,15 @@ class DdsToL0(Task):
360
391
  if not isinstance(dds_data, list):
361
392
  dds_data = [dds_data]
362
393
 
363
- output_list = [self._build_packet_dict(current_packet, dds_type)
364
- for current_packet in dds_data]
394
+ output_list = [
395
+ self._build_packet_dict(current_packet, dds_type)
396
+ for current_packet in dds_data
397
+ ]
365
398
 
366
399
  # Remove wrong packets
367
- output_list = [current_packet
368
- for current_packet in output_list
369
- if current_packet]
400
+ output_list = [
401
+ current_packet for current_packet in output_list if current_packet
402
+ ]
370
403
 
371
404
  return output_list
372
405
 
@@ -386,88 +419,91 @@ class DdsToL0(Task):
386
419
  file_type = dds_file_type.upper()
387
420
 
388
421
  if not isinstance(packet, dict):
389
- logger.error(f'Problem with packet: {packet}')
422
+ logger.error(f"Problem with packet: {packet}")
390
423
  return {}
391
424
 
392
- if file_type == 'TC':
393
-
425
+ if file_type == "TC":
394
426
  # Get packet SRDB id
395
- srdb_id = packet.get('CommandName', None)
427
+ srdb_id = packet.get("CommandName", None)
396
428
  if srdb_id is None:
397
- logger.error('CommandName not defined!')
429
+ logger.error("CommandName not defined!")
398
430
  return {}
399
431
 
400
432
  # Get corresponding PALISADE ID
401
433
  try:
402
- palisade_id = self.palisade_metadata[srdb_id]['palisade_id']
434
+ palisade_id = self.palisade_metadata[srdb_id]["palisade_id"]
403
435
  except Exception:
404
- logger.error(f'palisade_id not found for {srdb_id}')
436
+ logger.error(f"palisade_id not found for {srdb_id}")
405
437
  return {}
406
438
 
407
439
  # Get corresponding packet category
408
440
  try:
409
- packet_category = self.palisade_metadata[srdb_id]['packet_category']
441
+ packet_category = self.palisade_metadata[srdb_id]["packet_category"]
410
442
  except Exception:
411
- logger.error(f'packet_category not found for {srdb_id}')
443
+ logger.error(f"packet_category not found for {srdb_id}")
412
444
  return {}
413
445
 
414
446
  try:
415
- utc_time = datetime.strptime(packet.get(
416
- 'ExecutionTime'), TIME_ISO_STRFORMAT)
447
+ utc_time = datetime.strptime(
448
+ packet.get("ExecutionTime"), TIME_ISO_STRFORMAT
449
+ )
417
450
  except Exception:
418
451
  utc_time = INVALID_UTC_DATETIME
419
452
 
420
453
  # Get ack execution completion status
421
454
  # If Playback (routine) ...
422
- ack_exe_state = packet.get('ExecCompPBState', 'UNKNOWN')
455
+ ack_exe_state = packet.get("ExecCompPBState", "UNKNOWN")
423
456
  if ack_exe_state not in TC_ACK_ALLOWED_STATUS:
424
457
  # If realtime downlink (e.g., commissioning) ...
425
- ack_exe_state = packet.get('ExecCompState', 'UNKNOWN')
458
+ ack_exe_state = packet.get("ExecCompState", "UNKNOWN")
426
459
 
427
460
  # Get ack acceptation completion status
428
461
  # If Playback (routine) ...
429
- ack_acc_state = packet.get('OnBoardAccPBState', 'UNKNOWN')
462
+ ack_acc_state = packet.get("OnBoardAccPBState", "UNKNOWN")
430
463
  if ack_acc_state not in TC_ACK_ALLOWED_STATUS:
431
464
  # If realtime downlink (e.g., commissioning) ...
432
- ack_acc_state = packet.get('OnBoardAccState', 'UNKNOWN')
465
+ ack_acc_state = packet.get("OnBoardAccState", "UNKNOWN")
433
466
 
434
467
  try:
435
- unique_id = 'UNKNOWN'
436
- for i, field in enumerate(packet['CustomField']):
437
- if field['FieldName'] == 'uniqueID':
438
- unique_id = packet['CustomField'][i]['Value']
468
+ unique_id = "UNKNOWN"
469
+ for i, field in enumerate(packet["CustomField"]):
470
+ if field["FieldName"] == "uniqueID":
471
+ unique_id = packet["CustomField"][i]["Value"]
439
472
  break
440
473
  except Exception:
441
- unique_id = 'UNKNOWN'
474
+ unique_id = "UNKNOWN"
442
475
 
443
476
  # Only keep "PASSED" and "FAILED" exe status in L0
444
477
  if ack_exe_state in TC_ACK_ALLOWED_STATUS:
445
478
  # Build dictionary for the current packet
446
- packet_dict = {'binary': packet.get('RawBodyData', None),
447
- 'srdb_id': srdb_id,
448
- 'palisade_id': palisade_id,
449
- 'descr': packet.get('Description', None),
450
- 'category': packet_category,
451
- 'type': 'TC',
452
- 'utc_time': utc_time,
453
- 'ack_exe_state': ack_exe_state,
454
- 'ack_acc_state': ack_acc_state,
455
- 'sequence_name': packet.get('SequenceName', None),
456
- 'unique_id': unique_id,
457
- 'release_state': packet.get('ReleaseState', 'UNKNOWN'),
458
- 'release_time': packet.get('ReleaseTime', 'UNKNOWN'),
459
- 'ground_state': packet.get('GroundState', 'UNKNOWN'),
460
- 'uplink_state': packet.get('UplinkState', 'UNKNOWN'),
461
- 'uplink_time': packet.get('UplinkTime', 'UNKNOWN'),
462
- 'onboard_state': packet.get('OnBoardState', 'UNKNOWN'),
463
- }
464
- elif file_type == 'TM':
465
- packet_dict = {'type': 'TM',
466
- 'srdb_id': None,
467
- 'palisade_id': None,
468
- 'binary': packet['Packet']}
479
+ packet_dict = {
480
+ "binary": packet.get("RawBodyData", None),
481
+ "srdb_id": srdb_id,
482
+ "palisade_id": palisade_id,
483
+ "descr": packet.get("Description", None),
484
+ "category": packet_category,
485
+ "type": "TC",
486
+ "utc_time": utc_time,
487
+ "ack_exe_state": ack_exe_state,
488
+ "ack_acc_state": ack_acc_state,
489
+ "sequence_name": packet.get("SequenceName", None),
490
+ "unique_id": unique_id,
491
+ "release_state": packet.get("ReleaseState", "UNKNOWN"),
492
+ "release_time": packet.get("ReleaseTime", "UNKNOWN"),
493
+ "ground_state": packet.get("GroundState", "UNKNOWN"),
494
+ "uplink_state": packet.get("UplinkState", "UNKNOWN"),
495
+ "uplink_time": packet.get("UplinkTime", "UNKNOWN"),
496
+ "onboard_state": packet.get("OnBoardState", "UNKNOWN"),
497
+ }
498
+ elif file_type == "TM":
499
+ packet_dict = {
500
+ "type": "TM",
501
+ "srdb_id": None,
502
+ "palisade_id": None,
503
+ "binary": packet["Packet"],
504
+ }
469
505
  else:
470
- logger.warning(f'Unknown dds file type: {file_type}')
506
+ logger.warning(f"Unknown dds file type: {file_type}")
471
507
  packet_dict = {}
472
508
 
473
509
  return packet_dict
@@ -493,10 +529,12 @@ class DdsToL0(Task):
493
529
  parser.extract_error.connect(self.exception)
494
530
 
495
531
  # Analyse input RPW TM/TC packets
496
- parser.parse_packets(packet_list,
497
- start_time=self.start_time,
498
- end_time=self.end_time,
499
- valid_only=False)
532
+ parser.parse_packets(
533
+ packet_list,
534
+ start_time=self.start_time,
535
+ end_time=self.end_time,
536
+ valid_only=False,
537
+ )
500
538
 
501
539
  return parser
502
540
 
@@ -511,40 +549,54 @@ class DdsToL0(Task):
511
549
  for current_packet in invalid_packets:
512
550
  new_entry = dict()
513
551
  # Compute specific SHA255 for invalid packet
514
- new_entry['sha'] = self.PacketParser.get_packet_sha(current_packet)
552
+ new_entry["sha"] = self.PacketParser.get_packet_sha(current_packet)
515
553
 
516
554
  # Get palisade_id, srdb_id, apid and utc_time (if known)
517
- new_entry['palisade_id'] = current_packet.get('palisade_id', None)
518
- new_entry['srdb_id'] = current_packet.get('srdb_id', None)
519
- new_entry['apid'] = current_packet.get('apid', None)
520
- new_entry['utc_time'] = current_packet.get('utc_time', None)
555
+ new_entry["palisade_id"] = current_packet.get("palisade_id", None)
556
+ new_entry["srdb_id"] = current_packet.get("srdb_id", None)
557
+ new_entry["apid"] = current_packet.get("apid", None)
558
+ new_entry["utc_time"] = current_packet.get("utc_time", None)
521
559
 
522
560
  # Get status and comment
523
- new_entry['status'] = current_packet['status']
524
- new_entry['comment'] = current_packet['comment']
561
+ new_entry["status"] = current_packet["status"]
562
+ new_entry["comment"] = current_packet["comment"]
525
563
 
526
564
  # Store packet data
527
- new_entry['data'] = {key: val
528
- for key, val in current_packet.items()
529
- if key not in new_entry.keys()}
565
+ new_entry["data"] = {
566
+ key: val
567
+ for key, val in current_packet.items()
568
+ if key not in new_entry.keys()
569
+ }
530
570
 
531
571
  # Modify header and data_header content (to be writable in JSONB format)
532
- new_entry['data']['header'] = str(new_entry['data']['header'].to_dict())
533
- new_entry['data']['data_header'] = str(new_entry['data']['data_header'].to_dict())
572
+ new_entry["data"]["header"] = str(new_entry["data"]["header"].to_dict())
573
+ new_entry["data"]["data_header"] = str(
574
+ new_entry["data"]["data_header"].to_dict()
575
+ )
534
576
 
535
577
  # Set insertion time
536
- new_entry['insert_time'] = datetime.today()
578
+ new_entry["insert_time"] = datetime.today()
537
579
 
538
580
  # Insert new entry
539
- job, done, created = get_or_create_in_db(self.session, InvalidPacketLog, new_entry,
540
- kwargs={'sha': new_entry['sha']})
581
+ job, done, created = get_or_create_in_db(
582
+ self.session,
583
+ InvalidPacketLog,
584
+ new_entry,
585
+ kwargs={"sha": new_entry["sha"]},
586
+ )
541
587
  if done:
542
588
  if created:
543
- logger.info(f'New entry in database for invalid packet {current_packet}')
589
+ logger.info(
590
+ f"New entry in database for invalid packet {current_packet}"
591
+ )
544
592
  else:
545
- logger.info(f'An entry already exists in database for invalid packet {current_packet}')
593
+ logger.info(
594
+ f"An entry already exists in database for invalid packet {current_packet}"
595
+ )
546
596
  else:
547
- logger.error(f'Cannot insert new entry in database for invalid packet {current_packet}!')
597
+ logger.error(
598
+ f"Cannot insert new entry in database for invalid packet {current_packet}!"
599
+ )
548
600
  failed_insertion.append(current_packet)
549
601
 
550
602
  return failed_insertion