roc-film 1.13.4__py3-none-any.whl → 1.14.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (52) hide show
  1. roc/__init__.py +2 -1
  2. roc/film/__init__.py +2 -2
  3. roc/film/commands.py +372 -323
  4. roc/film/config/__init__.py +0 -1
  5. roc/film/constants.py +101 -65
  6. roc/film/descriptor.json +126 -95
  7. roc/film/exceptions.py +28 -27
  8. roc/film/tasks/__init__.py +16 -16
  9. roc/film/tasks/cat_solo_hk.py +86 -74
  10. roc/film/tasks/cdf_postpro.py +438 -309
  11. roc/film/tasks/check_dds.py +39 -45
  12. roc/film/tasks/db_to_anc_bia_sweep_table.py +381 -0
  13. roc/film/tasks/dds_to_l0.py +232 -180
  14. roc/film/tasks/export_solo_coord.py +147 -0
  15. roc/film/tasks/file_handler.py +91 -75
  16. roc/film/tasks/l0_to_hk.py +117 -103
  17. roc/film/tasks/l0_to_l1_bia_current.py +38 -30
  18. roc/film/tasks/l0_to_l1_bia_sweep.py +417 -329
  19. roc/film/tasks/l0_to_l1_sbm.py +250 -208
  20. roc/film/tasks/l0_to_l1_surv.py +185 -130
  21. roc/film/tasks/make_daily_tm.py +40 -37
  22. roc/film/tasks/merge_tcreport.py +77 -71
  23. roc/film/tasks/merge_tmraw.py +102 -89
  24. roc/film/tasks/parse_dds_xml.py +21 -20
  25. roc/film/tasks/set_l0_utc.py +51 -49
  26. roc/film/tests/cdf_compare.py +565 -0
  27. roc/film/tests/hdf5_compare.py +84 -62
  28. roc/film/tests/test_dds_to_l0.py +93 -51
  29. roc/film/tests/test_dds_to_tc.py +8 -11
  30. roc/film/tests/test_dds_to_tm.py +8 -10
  31. roc/film/tests/test_film.py +161 -116
  32. roc/film/tests/test_l0_to_hk.py +64 -36
  33. roc/film/tests/test_l0_to_l1_bia.py +10 -14
  34. roc/film/tests/test_l0_to_l1_sbm.py +14 -19
  35. roc/film/tests/test_l0_to_l1_surv.py +68 -41
  36. roc/film/tests/test_metadata.py +21 -20
  37. roc/film/tests/tests.py +743 -396
  38. roc/film/tools/__init__.py +5 -5
  39. roc/film/tools/dataset_tasks.py +34 -2
  40. roc/film/tools/file_helpers.py +390 -269
  41. roc/film/tools/l0.py +402 -324
  42. roc/film/tools/metadata.py +147 -127
  43. roc/film/tools/skeleton.py +12 -17
  44. roc/film/tools/tools.py +109 -92
  45. roc/film/tools/xlsx2skt.py +161 -139
  46. {roc_film-1.13.4.dist-info → roc_film-1.14.0.dist-info}/LICENSE +127 -125
  47. roc_film-1.14.0.dist-info/METADATA +60 -0
  48. roc_film-1.14.0.dist-info/RECORD +50 -0
  49. {roc_film-1.13.4.dist-info → roc_film-1.14.0.dist-info}/WHEEL +1 -1
  50. roc/film/tasks/l0_to_anc_bia_sweep_table.py +0 -348
  51. roc_film-1.13.4.dist-info/METADATA +0 -120
  52. roc_film-1.13.4.dist-info/RECORD +0 -48
@@ -15,7 +15,13 @@ from poppy.core.logger import logger
15
15
  from poppy.core.target import PyObjectTarget, FileTarget
16
16
  from poppy.core.task import Task
17
17
 
18
- from roc.film import DATA_VERSION, TIME_DAILY_STRFORMAT, TCREPORT_PREFIX_BASENAME, ARCHIVE_DAILY_DIR, TIME_ISO_STRFORMAT
18
+ from roc.film import (
19
+ DATA_VERSION,
20
+ TIME_DAILY_STRFORMAT,
21
+ TCREPORT_PREFIX_BASENAME,
22
+ ARCHIVE_DAILY_DIR,
23
+ TIME_ISO_STRFORMAT,
24
+ )
19
25
  from roc.film.tools.file_helpers import get_output_dir
20
26
  from roc.film.tools import valid_data_version, sort_dict_list, get_latest_file
21
27
 
@@ -25,58 +31,60 @@ class MergeTcReport(Task):
25
31
  Task to merge input set of DDS TcReport XML Elements
26
32
  into daily XML files.
27
33
  """
28
- plugin_name = 'roc.film'
29
- name = 'merge_tcreport'
34
+
35
+ plugin_name = "roc.film"
36
+ name = "merge_tcreport"
30
37
 
31
38
  def add_targets(self):
32
- self.add_input(identifier='dds_data',
33
- target_class=PyObjectTarget)
34
- self.add_output(identifier='tcreport_daily_xml', many=True,
35
- target_class=FileTarget)
39
+ self.add_input(identifier="dds_data", target_class=PyObjectTarget)
40
+ self.add_output(
41
+ identifier="tcreport_daily_xml", many=True, target_class=FileTarget
42
+ )
36
43
 
37
44
  def setup_inputs(self):
38
-
39
45
  # Get data_version input keyword (can be used to force version of
40
46
  # output file)
41
47
  self.data_version = valid_data_version(
42
- self.pipeline.get('data_version', default=[DATA_VERSION])[0])
48
+ self.pipeline.get("data_version", default=[DATA_VERSION])[0]
49
+ )
43
50
 
44
51
  # Get/create list of well processed L0 files
45
52
  self.processed_files = self.pipeline.get(
46
- 'processed_files', default=[], create=True)
53
+ "processed_files", default=[], create=True
54
+ )
47
55
  # Get/create list of failed DDS files
48
- self.failed_files = self.pipeline.get(
49
- 'failed_files', default=[], create=True)
56
+ self.failed_files = self.pipeline.get("failed_files", default=[], create=True)
50
57
 
51
58
  # Get/create list of already processed tcreport
52
59
  self.processed_tcreport = self.pipeline.get(
53
- 'processed_tcreport', default=[], create=True)
60
+ "processed_tcreport", default=[], create=True
61
+ )
54
62
 
55
63
  # Get/create TcReport failed
56
64
  self.failed_tcreport = self.pipeline.get(
57
- 'failed_tcreport', default=[], create=True)
65
+ "failed_tcreport", default=[], create=True
66
+ )
58
67
 
59
68
  # Get local archive path
60
- self.archive_path = self.pipeline.get(
61
- 'archive_path', default=[None])[0]
69
+ self.archive_path = self.pipeline.get("archive_path", default=[None])[0]
62
70
 
63
71
  # Get list of dates to process
64
- self.filter_date = self.pipeline.get('filter_date', default=[])
72
+ self.filter_date = self.pipeline.get("filter_date", default=[])
65
73
  if self.filter_date:
66
- self.filter_date = [filter_date.date()
67
- for filter_date in self.filter_date]
74
+ self.filter_date = [filter_date.date() for filter_date in self.filter_date]
68
75
 
69
76
  # If output directory not found, create it
70
77
  self.output_dir = get_output_dir(self.pipeline)
71
78
  if not os.path.isdir(self.output_dir):
72
- logger.debug(f'Making {self.output_dir}...')
79
+ logger.debug(f"Making {self.output_dir}...")
73
80
  os.makedirs(self.output_dir)
74
81
 
75
82
  # Get input TcReport data
76
83
  try:
77
- dds_data = self.inputs['dds_data'].value
78
- except:
84
+ dds_data = self.inputs["dds_data"].value
85
+ except Exception as e:
79
86
  logger.exception("Cannot retrieve input 'dds_data' input!")
87
+ logger.debug(e)
80
88
  return False
81
89
  else:
82
90
  self.tcreport_data = self._get_tcreport_elements(dds_data)
@@ -84,8 +92,7 @@ class MergeTcReport(Task):
84
92
  return True
85
93
 
86
94
  def run(self):
87
-
88
- logger.info('Running MergeTcReport Task...')
95
+ logger.info("Running MergeTcReport Task...")
89
96
 
90
97
  # get/initialize inputs
91
98
  if not self.setup_inputs():
@@ -100,20 +107,24 @@ class MergeTcReport(Task):
100
107
  # Loop over each day in the outputs
101
108
  output_files = []
102
109
  for current_date, output_data in self.tcreport_data.items():
103
-
104
110
  if self.filter_date and current_date not in self.filter_date:
105
- logger.info(f'Skipping current date {current_date}')
111
+ logger.info(f"Skipping current date {current_date}")
106
112
  continue
107
113
 
108
114
  # Check if existing data and new data are the same
109
115
  current_existing_data = existing_packet_data[current_date]
110
- if not [True for current_data in output_data
111
- if current_data not in current_existing_data]:
112
- logger.info(f'No new tcreport element for {current_date}')
116
+ if not [
117
+ True
118
+ for current_data in output_data
119
+ if current_data not in current_existing_data
120
+ ]:
121
+ logger.info(f"No new tcreport element for {current_date}")
113
122
  continue
114
123
  else:
115
- logger.debug(f'Creating a new daily TcReport XML file in {self.output_dir} '
116
- f'for {current_date}')
124
+ logger.debug(
125
+ f"Creating a new daily TcReport XML file in {self.output_dir} "
126
+ f"for {current_date}"
127
+ )
117
128
 
118
129
  # If existing data list is not empty ...
119
130
  new_data = output_data.copy()
@@ -122,41 +133,39 @@ class MergeTcReport(Task):
122
133
  new_data.extend(current_existing_data)
123
134
 
124
135
  # Make sure it has unique elements
125
- new_data = [i for n, i in enumerate(new_data)
126
- if i not in new_data[n + 1:]]
136
+ new_data = [i for n, i in enumerate(new_data) if i not in new_data[n + 1 :]]
127
137
 
128
138
  # Make sure new data is time sorted
129
- new_data = sort_dict_list(new_data, 'ExecutionTime')
139
+ new_data = sort_dict_list(new_data, "ExecutionTime")
130
140
 
131
141
  # define format of data version
132
- data_version = f'V{int(self.data_version):02d}'
142
+ data_version = f"V{int(self.data_version):02d}"
133
143
 
134
144
  # Build output TcReport file basename
135
145
  packet_date_str = current_date.strftime(TIME_DAILY_STRFORMAT)
136
- file_basename = '_'.join([
137
- TCREPORT_PREFIX_BASENAME,
138
- packet_date_str]
139
- )
146
+ file_basename = "_".join([TCREPORT_PREFIX_BASENAME, packet_date_str])
140
147
 
141
148
  # Build full new output file basename
142
- file_basename = '_'.join([file_basename, data_version]) + '.xml'
149
+ file_basename = "_".join([file_basename, data_version]) + ".xml"
143
150
 
144
151
  # Build output file path
145
152
  output_target_path = os.path.join(self.output_dir, file_basename)
146
153
 
147
154
  # Write output file
148
- logger.info(f'Writing {len(output_data)} TcReport elements '
149
- f'into {output_target_path}')
150
- if make_tcreport_xml(new_data, output_target_path,
151
- overwrite=True,
152
- logger=logger):
155
+ logger.info(
156
+ f"Writing {len(output_data)} TcReport elements "
157
+ f"into {output_target_path}"
158
+ )
159
+ if make_tcreport_xml(
160
+ new_data, output_target_path, overwrite=True, logger=logger
161
+ ):
153
162
  self.processed_files.append(output_target_path)
154
163
  output_files.append(output_target_path)
155
164
  else:
156
- logger.error(f'Writing {output_target_path} has failed!')
165
+ logger.error(f"Writing {output_target_path} has failed!")
157
166
  self.failed_files.append(output_target_path)
158
167
 
159
- self.outputs['tcreport_daily_xml'].filepath = output_files
168
+ self.outputs["tcreport_daily_xml"].filepath = output_files
160
169
 
161
170
  def _get_existing_data(self, packet_date):
162
171
  """
@@ -174,44 +183,39 @@ class MergeTcReport(Task):
174
183
  # Build list of directories where to check for existing TM files
175
184
  dir_list = [self.output_dir]
176
185
  if self.archive_path:
177
- dir_list.append(os.path.join(self.archive_path,
178
- packet_date.strftime(ARCHIVE_DAILY_DIR)))
186
+ dir_list.append(
187
+ os.path.join(self.archive_path, packet_date.strftime(ARCHIVE_DAILY_DIR))
188
+ )
179
189
 
180
190
  # Loop over directories where to check for existing file
181
191
  # (start to check in output directory then in archive dir if provided)
182
192
  latest_existing_file = None
183
193
  for current_dir in dir_list:
184
- latest_existing_file = self._check_existence(
185
- current_dir, packet_date)
194
+ latest_existing_file = self._check_existence(current_dir, packet_date)
186
195
  if latest_existing_file:
187
196
  break
188
197
 
189
198
  # Then, if latest existing file was found, parse it to retrieve data
190
199
  if latest_existing_file:
191
- logger.info(f'Loading existing TcReport data from {latest_existing_file}...')
192
- output_data = self._get_tcreport_elements(
193
- xml_to_dict(latest_existing_file))
200
+ logger.info(
201
+ f"Loading existing TcReport data from {latest_existing_file}..."
202
+ )
203
+ output_data = self._get_tcreport_elements(xml_to_dict(latest_existing_file))
194
204
  else:
195
- logger.info(f'No existing TcReport data file found '
196
- f' for {packet_date}')
205
+ logger.info(f"No existing TcReport data file found for {packet_date}")
197
206
 
198
207
  return output_data
199
208
 
200
209
  def _check_existence(self, dir_path, packet_date):
201
-
202
210
  # Convert input packet date into string
203
211
  packet_date_str = packet_date.strftime(TIME_DAILY_STRFORMAT)
204
212
 
205
213
  # Build output TmRaw file basename
206
- file_basename = '_'.join([
207
- TCREPORT_PREFIX_BASENAME,
208
- packet_date_str]
209
- )
214
+ file_basename = "_".join([TCREPORT_PREFIX_BASENAME, packet_date_str])
210
215
 
211
- existing_files = glob(os.path.join(
212
- dir_path, file_basename + '_V??.xml'))
216
+ existing_files = glob(os.path.join(dir_path, file_basename + "_V??.xml"))
213
217
  if existing_files:
214
- logger.debug(f'{len(existing_files)} already exists for {packet_date}')
218
+ logger.debug(f"{len(existing_files)} already exists for {packet_date}")
215
219
  # If files found then get latest version
216
220
  latest_existing_file = get_latest_file(existing_files)
217
221
  else:
@@ -228,9 +232,9 @@ class MergeTcReport(Task):
228
232
  :return: list of TcReport XML elements
229
233
  """
230
234
 
231
- output_tcreport_list = tcreport_xml_dict['ns2:ResponsePart'] \
232
- ['Response']['PktTcReportResponse'] \
233
- ['PktTcReportList']['PktTcReportListElement']
235
+ output_tcreport_list = tcreport_xml_dict["ns2:ResponsePart"]["Response"][
236
+ "PktTcReportResponse"
237
+ ]["PktTcReportList"]["PktTcReportListElement"]
234
238
 
235
239
  # Make sure that returned output_tcreport_list is a list
236
240
  # (If only one PktTcReportListElement is found in the XML
@@ -242,10 +246,12 @@ class MergeTcReport(Task):
242
246
  output_tcreport_dict = collections.defaultdict(list)
243
247
  for current_tcreport in output_tcreport_list:
244
248
  try:
245
- key = datetime.strptime(current_tcreport['ExecutionTime'],
246
- TIME_ISO_STRFORMAT).date()
247
- except:
248
- logger.warning(f'Cannot get ExecutionTime for {current_tcreport}')
249
+ key = datetime.strptime(
250
+ current_tcreport["ExecutionTime"], TIME_ISO_STRFORMAT
251
+ ).date()
252
+ except Exception as e:
253
+ logger.warning(f"Cannot get ExecutionTime for {current_tcreport}")
254
+ logger.debug(e)
249
255
  if current_tcreport not in self.failed_tcreport:
250
256
  self.failed_tcreport.append(current_tcreport)
251
257
  else:
@@ -9,51 +9,56 @@ import collections
9
9
  import os
10
10
  from glob import glob
11
11
 
12
- from edds_process.response import \
13
- xml_to_dict, remove_scos_header
12
+ from edds_process.response import xml_to_dict, remove_scos_header
14
13
 
15
14
  from poppy.core.task import Task
16
15
  from poppy.core.logger import logger
17
16
  from poppy.core.target import FileTarget, PyObjectTarget
18
17
 
19
- from roc.film import TM_PACKET_CATEG, ARCHIVE_DAILY_DIR, TIME_DAILY_STRFORMAT, TMRAW_PREFIX_BASENAME
18
+ from roc.film import (
19
+ TM_PACKET_CATEG,
20
+ ARCHIVE_DAILY_DIR,
21
+ TIME_DAILY_STRFORMAT,
22
+ TMRAW_PREFIX_BASENAME,
23
+ )
20
24
  from roc.film.tools.file_helpers import get_output_dir
21
25
  from roc.film.tools import get_latest_file
22
26
 
23
27
  from roc.rpl.time import Time
24
28
  from roc.rpl.packet_structure.data import Data
25
29
 
30
+
26
31
  class MergeTmRaw(Task):
27
32
  """
28
33
  Task to merge input set of DDS TmRaw Packet XML Elements
29
34
  into daily XML files.
30
35
  """
31
- plugin_name = 'roc.film'
32
- name = 'merge_tmraw'
36
+
37
+ plugin_name = "roc.film"
38
+ name = "merge_tmraw"
33
39
 
34
40
  def add_targets(self):
35
- self.add_input(identifier='dds_xml',
36
- target_class=FileTarget)
37
- self.add_input(identifier='dds_data',
38
- target_class=PyObjectTarget)
41
+ self.add_input(identifier="dds_xml", target_class=FileTarget)
42
+ self.add_input(identifier="dds_data", target_class=PyObjectTarget)
39
43
 
40
44
  def get_dds_xml(self):
41
- return self.pipeline.get('dds_xml', default=[])
45
+ return self.pipeline.get("dds_xml", default=[])
42
46
 
43
47
  def setup_inputs(self):
44
-
45
48
  # Get/create list of failed DDS files
46
49
  self.failed_dds_files = self.pipeline.get(
47
- 'failed_dds_files', default=[], create=True)
50
+ "failed_dds_files", default=[], create=True
51
+ )
48
52
 
49
53
  # Get input DDS XML file
50
54
  dds_file = None
51
55
  try:
52
- dds_file = self.inputs['dds_xml'].filepath
56
+ dds_file = self.inputs["dds_xml"].filepath
53
57
  if not os.path.isfile(dds_file):
54
58
  raise FileNotFoundError
55
- except:
59
+ except Exception as e:
56
60
  logger.exception(f"Cannot load input DDS XML file '{dds_file}'")
61
+ logger.debug(e)
57
62
  self.failed_dds_files.append(dds_file)
58
63
  return False
59
64
  else:
@@ -62,34 +67,33 @@ class MergeTmRaw(Task):
62
67
  # Get packet category from input DDS filename
63
68
  # (Convention should be "RPW_<packet_category>_*.xml"
64
69
  try:
65
- self.packet_category = os.path.basename(
66
- self.dds_file).split('_')[1].lower()
67
- except:
68
- logger.exception(f'Packet category cannot be extracted from DDS filename!')
70
+ self.packet_category = os.path.basename(self.dds_file).split("_")[1].lower()
71
+ except Exception as e:
72
+ logger.exception("Packet category cannot be extracted from DDS filename!")
73
+ logger.debug(e)
69
74
  self.failed_dds_files.append(self.dds_file)
70
75
  return False
71
76
 
72
77
  # If output directory not found, create it
73
78
  self.output_dir = get_output_dir(self.pipeline)
74
79
  if not os.path.isdir(self.output_dir):
75
- logger.debug(f'Making {self.output_dir}...')
80
+ logger.debug(f"Making {self.output_dir}...")
76
81
  os.makedirs(self.output_dir)
77
82
 
78
83
  # Get local archive path
79
- self.archive_path = self.pipeline.get(
80
- 'archive_path', default=[None])[0]
84
+ self.archive_path = self.pipeline.get("archive_path", default=[None])[0]
81
85
 
82
86
  # Get list of dates to process
83
- self.filter_date = self.pipeline.get('filter_date', default=[])
87
+ self.filter_date = self.pipeline.get("filter_date", default=[])
84
88
  if self.filter_date:
85
- self.filter_date = [filter_date.date()
86
- for filter_date in self.filter_date]
89
+ self.filter_date = [filter_date.date() for filter_date in self.filter_date]
87
90
 
88
91
  # Get input TmRaw data
89
92
  try:
90
- dds_data = self.inputs['dds_data'].value
91
- except:
93
+ dds_data = self.inputs["dds_data"].value
94
+ except Exception as e:
92
95
  logger.exception("Cannot retrieve input 'dds_data' input!")
96
+ logger.debug(e)
93
97
  self.failed_dds_files.append(self.dds_file)
94
98
  return False
95
99
  else:
@@ -98,25 +102,26 @@ class MergeTmRaw(Task):
98
102
 
99
103
  # Get packet cache
100
104
  self.packet_cache = self.pipeline.get(
101
- 'packet_cache', default={tm_cat: {}
102
- for tm_cat in TM_PACKET_CATEG},
103
- create=True)
105
+ "packet_cache",
106
+ default={tm_cat: {} for tm_cat in TM_PACKET_CATEG},
107
+ create=True,
108
+ )
104
109
 
105
110
  # And flag to indicated if there are new packets
106
111
  self.has_new_packet = self.pipeline.get(
107
- 'has_new_packet', default={tm_cat: {}
108
- for tm_cat in TM_PACKET_CATEG},
109
- create=True)
112
+ "has_new_packet",
113
+ default={tm_cat: {} for tm_cat in TM_PACKET_CATEG},
114
+ create=True,
115
+ )
110
116
 
111
117
  return True
112
118
 
113
119
  def run(self):
114
-
115
- logger.debug('Running MergeTmRaw Task...')
120
+ logger.debug("Running MergeTmRaw Task...")
116
121
 
117
122
  # get/initialize inputs
118
123
  if not self.setup_inputs():
119
- logger.warning('Missing inputs for MergeTmRaw Task!')
124
+ logger.warning("Missing inputs for MergeTmRaw Task!")
120
125
  return
121
126
 
122
127
  # Check/Get existing data for previous TmRaw daily files (if any)
@@ -127,9 +132,8 @@ class MergeTmRaw(Task):
127
132
 
128
133
  # Loop over each day in the outputs
129
134
  for current_date, output_data in self.tmraw_data.items():
130
-
131
135
  if self.filter_date and current_date not in self.filter_date:
132
- logger.info(f'Skipping current date {current_date}')
136
+ logger.info(f"Skipping current date {current_date}")
133
137
  continue
134
138
 
135
139
  # Check if existing data and new data are the same
@@ -138,13 +142,14 @@ class MergeTmRaw(Task):
138
142
  # If current_date.year <= 2000, output_data contains not synchronized packets
139
143
  # in this case, always write packets into an output file
140
144
  new_data = list(set(output_data) - set(current_existing_data))
141
- if (current_date.year > 2000 and
142
- not new_data):
143
- logger.info(f'No new packet for {current_date} in {self.dds_file}')
145
+ if current_date.year > 2000 and not new_data:
146
+ logger.info(f"No new packet for {current_date} in {self.dds_file}")
144
147
  self.has_new_packet[self.packet_category][current_date] = False
145
148
  else:
146
- logger.info(f'{len(new_data)} new TmRaw Packet elements found '
147
- f'for {current_date} in {self.dds_file}...')
149
+ logger.info(
150
+ f"{len(new_data)} new TmRaw Packet elements found "
151
+ f"for {current_date} in {self.dds_file}..."
152
+ )
148
153
 
149
154
  # If existing data list is not empty ...
150
155
  if current_existing_data:
@@ -162,11 +167,11 @@ class MergeTmRaw(Task):
162
167
 
163
168
  # Store new packets for given category and date in the packet_cache
164
169
  if current_existing_data:
165
- self.packet_cache[self.packet_category][
166
- current_date] = current_existing_data
170
+ self.packet_cache[self.packet_category][current_date] = (
171
+ current_existing_data
172
+ )
167
173
 
168
- def _get_packet_time(self, packet_data,
169
- scos_header=True):
174
+ def _get_packet_time(self, packet_data, scos_header=True):
170
175
  """
171
176
  Extract CUC time from current packet binary.
172
177
 
@@ -182,15 +187,15 @@ class MergeTmRaw(Task):
182
187
  else:
183
188
  packet_raw_data = packet_data
184
189
  packet_bytes_data = bytearray.fromhex(packet_raw_data)
185
- packet_data = Data(bytes(packet_bytes_data),
186
- len(packet_bytes_data)
187
- )
190
+ packet_data = Data(bytes(packet_bytes_data), len(packet_bytes_data))
188
191
  data_field_header = packet_data.extract_tm_header()
189
192
  packet_time = Time.cuc_to_datetime(data_field_header.time[:2])[0]
190
- except:
191
- logger.exception(f'Packet CUC time cannot be retrieved '
192
- f'from Packet element {packet_data}, skip it '
193
- )
193
+ except Exception as e:
194
+ logger.exception(
195
+ f"Packet CUC time cannot be retrieved "
196
+ f"from Packet element {packet_data}, skip it "
197
+ )
198
+ logger.debug(e)
194
199
  if packet_data not in self.failed_tmraw:
195
200
  self.failed_tmraw.append(packet_data)
196
201
  return None, None
@@ -209,9 +214,11 @@ class MergeTmRaw(Task):
209
214
 
210
215
  # First check if there are already packets store in packet_cache
211
216
  if packet_date in self.packet_cache[self.packet_category]:
212
- logger.info(f'Retrieving existing {self.packet_category} '
213
- f'data for {packet_date} from pipeline cache '
214
- f'({len(self.packet_cache[self.packet_category][packet_date])} packets) ...')
217
+ logger.info(
218
+ f"Retrieving existing {self.packet_category} "
219
+ f"data for {packet_date} from pipeline cache "
220
+ f"({len(self.packet_cache[self.packet_category][packet_date])} packets) ..."
221
+ )
215
222
  return self.packet_cache[self.packet_category]
216
223
 
217
224
  # If not initialize output data
@@ -221,43 +228,43 @@ class MergeTmRaw(Task):
221
228
  # Build list of directories where to check for existing TM files
222
229
  dir_list = [self.output_dir]
223
230
  if self.archive_path:
224
- dir_list.append(os.path.join(self.archive_path,
225
- packet_date.strftime(ARCHIVE_DAILY_DIR)))
231
+ dir_list.append(
232
+ os.path.join(self.archive_path, packet_date.strftime(ARCHIVE_DAILY_DIR))
233
+ )
226
234
 
227
235
  # Loop over directories where to check for existing file
228
236
  # (start to check in output directory then in archive dir if provided)
229
237
  for current_dir in dir_list:
230
- latest_existing_file = self._check_existence(
231
- current_dir, packet_date)
238
+ latest_existing_file = self._check_existence(current_dir, packet_date)
232
239
  if latest_existing_file:
233
240
  break
234
241
 
235
242
  # Then, if latest existing file was found, parse it to retrieve data
236
243
  if latest_existing_file:
237
- logger.info(f'Loading existing TmRaw data from {latest_existing_file}...')
238
- output_data = self._get_tmraw_packets(xml_to_dict(latest_existing_file),
239
- scos_header=False)
244
+ logger.info(f"Loading existing TmRaw data from {latest_existing_file}...")
245
+ output_data = self._get_tmraw_packets(
246
+ xml_to_dict(latest_existing_file), scos_header=False
247
+ )
240
248
  else:
241
- logger.info(f'No existing {self.packet_category} TmRaw data file found '
242
- f' for {packet_date}')
249
+ logger.info(
250
+ f"No existing {self.packet_category} TmRaw data file found "
251
+ f" for {packet_date}"
252
+ )
243
253
 
244
254
  return output_data
245
255
 
246
256
  def _check_existence(self, dir_path, packet_date):
247
-
248
257
  # Convert input packet date into string
249
258
  packet_date_str = packet_date.strftime(TIME_DAILY_STRFORMAT)
250
259
 
251
260
  # Build output TmRaw file basename
252
- file_basename = '_'.join([
253
- TMRAW_PREFIX_BASENAME + '-' + self.packet_category,
254
- packet_date_str]
261
+ file_basename = "_".join(
262
+ [TMRAW_PREFIX_BASENAME + "-" + self.packet_category, packet_date_str]
255
263
  )
256
264
 
257
- existing_files = glob(os.path.join(
258
- dir_path, file_basename + '_V??.xml'))
265
+ existing_files = glob(os.path.join(dir_path, file_basename + "_V??.xml"))
259
266
  if existing_files:
260
- logger.debug(f'{len(existing_files)} already exists for {packet_date}')
267
+ logger.debug(f"{len(existing_files)} already exists for {packet_date}")
261
268
  # If files found then get latest version
262
269
  latest_existing_file = get_latest_file(existing_files)
263
270
  else:
@@ -265,8 +272,7 @@ class MergeTmRaw(Task):
265
272
 
266
273
  return latest_existing_file
267
274
 
268
- def _get_tmraw_packets(self, tmraw_xml_dict,
269
- scos_header=True):
275
+ def _get_tmraw_packets(self, tmraw_xml_dict, scos_header=True):
270
276
  """
271
277
  Extract TmRaw Packet Element list from a dictionary
272
278
  of a input TmRaw XML file
@@ -279,32 +285,39 @@ class MergeTmRaw(Task):
279
285
 
280
286
  # Get/create list of already processed tmraw packets
281
287
  self.processed_tmraw = self.pipeline.get(
282
- 'processed_tmraw', default=[], create=True)
288
+ "processed_tmraw", default=[], create=True
289
+ )
283
290
 
284
291
  # Get/create list of failed tmraw packets
285
- self.failed_tmraw = self.pipeline.get(
286
- 'failed_tmraw', default=[], create=True)
292
+ self.failed_tmraw = self.pipeline.get("failed_tmraw", default=[], create=True)
287
293
 
288
- output_packet_list = tmraw_xml_dict['ns2:ResponsePart']\
289
- ['Response']['PktRawResponse'] \
290
- ['PktRawResponseElement']
294
+ output_packet_list = tmraw_xml_dict["ns2:ResponsePart"]["Response"][
295
+ "PktRawResponse"
296
+ ]["PktRawResponseElement"]
291
297
 
292
298
  # Make sure that returned output_tmraw_list is a list
293
299
  # (If only one Packet element is found in the XML
294
300
  # the xml_to_dict method returns a collections.OrderedDict() instance).
295
- if isinstance(output_packet_list, collections.OrderedDict):
301
+ if not isinstance(output_packet_list, list):
296
302
  output_packet_list = [output_packet_list]
297
303
 
298
304
  # Add packet date and time to output list
299
305
  # Make sure to take unique packets
300
- logger.debug(f'Extracting packet CUC time from '
301
- f'{len(output_packet_list)} TmRaw Packet elements...')
302
-
303
- output_packet_list = list(set([
304
- self._get_packet_time(current_packet['Packet'],
305
- scos_header=scos_header)
306
- for current_packet in output_packet_list
307
- ]))
306
+ logger.debug(
307
+ f"Extracting packet CUC time from "
308
+ f"{len(output_packet_list)} TmRaw Packet elements..."
309
+ )
310
+
311
+ output_packet_list = list(
312
+ set(
313
+ [
314
+ self._get_packet_time(
315
+ current_packet["Packet"], scos_header=scos_header
316
+ )
317
+ for current_packet in output_packet_list
318
+ ]
319
+ )
320
+ )
308
321
 
309
322
  # return output as a dictionary of packet date keywords
310
323
  output_packet_dict = collections.defaultdict(list)