roc-film 1.13.5__py3-none-any.whl → 1.14.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (52) hide show
  1. roc/__init__.py +2 -1
  2. roc/film/__init__.py +2 -2
  3. roc/film/commands.py +372 -323
  4. roc/film/config/__init__.py +0 -1
  5. roc/film/constants.py +101 -65
  6. roc/film/descriptor.json +127 -96
  7. roc/film/exceptions.py +28 -27
  8. roc/film/tasks/__init__.py +16 -16
  9. roc/film/tasks/cat_solo_hk.py +86 -74
  10. roc/film/tasks/cdf_postpro.py +438 -309
  11. roc/film/tasks/check_dds.py +39 -45
  12. roc/film/tasks/db_to_anc_bia_sweep_table.py +381 -0
  13. roc/film/tasks/dds_to_l0.py +232 -180
  14. roc/film/tasks/export_solo_coord.py +147 -0
  15. roc/film/tasks/file_handler.py +91 -75
  16. roc/film/tasks/l0_to_hk.py +117 -103
  17. roc/film/tasks/l0_to_l1_bia_current.py +38 -30
  18. roc/film/tasks/l0_to_l1_bia_sweep.py +417 -329
  19. roc/film/tasks/l0_to_l1_sbm.py +250 -208
  20. roc/film/tasks/l0_to_l1_surv.py +185 -130
  21. roc/film/tasks/make_daily_tm.py +40 -37
  22. roc/film/tasks/merge_tcreport.py +77 -71
  23. roc/film/tasks/merge_tmraw.py +101 -88
  24. roc/film/tasks/parse_dds_xml.py +21 -20
  25. roc/film/tasks/set_l0_utc.py +51 -49
  26. roc/film/tests/cdf_compare.py +565 -0
  27. roc/film/tests/hdf5_compare.py +84 -62
  28. roc/film/tests/test_dds_to_l0.py +93 -51
  29. roc/film/tests/test_dds_to_tc.py +8 -11
  30. roc/film/tests/test_dds_to_tm.py +8 -10
  31. roc/film/tests/test_film.py +161 -116
  32. roc/film/tests/test_l0_to_hk.py +64 -36
  33. roc/film/tests/test_l0_to_l1_bia.py +10 -14
  34. roc/film/tests/test_l0_to_l1_sbm.py +14 -19
  35. roc/film/tests/test_l0_to_l1_surv.py +68 -41
  36. roc/film/tests/test_metadata.py +21 -20
  37. roc/film/tests/tests.py +743 -396
  38. roc/film/tools/__init__.py +5 -5
  39. roc/film/tools/dataset_tasks.py +34 -2
  40. roc/film/tools/file_helpers.py +390 -269
  41. roc/film/tools/l0.py +402 -324
  42. roc/film/tools/metadata.py +147 -127
  43. roc/film/tools/skeleton.py +12 -17
  44. roc/film/tools/tools.py +109 -92
  45. roc/film/tools/xlsx2skt.py +161 -139
  46. {roc_film-1.13.5.dist-info → roc_film-1.14.0.dist-info}/LICENSE +127 -125
  47. roc_film-1.14.0.dist-info/METADATA +60 -0
  48. roc_film-1.14.0.dist-info/RECORD +50 -0
  49. {roc_film-1.13.5.dist-info → roc_film-1.14.0.dist-info}/WHEEL +1 -1
  50. roc/film/tasks/l0_to_anc_bia_sweep_table.py +0 -348
  51. roc_film-1.13.5.dist-info/METADATA +0 -120
  52. roc_film-1.13.5.dist-info/RECORD +0 -48
roc/film/exceptions.py CHANGED
@@ -7,23 +7,25 @@ Exceptions definition for FILM plugin.
7
7
 
8
8
  from poppy.core.logger import logger
9
9
 
10
- __all__ = ['FilmException',
11
- 'MetadataException',
12
- 'UnknownPipeline',
13
- 'AncReportProdError',
14
- 'LoadDataSetError',
15
- 'NoData',
16
- 'NoEpochFoundError',
17
- 'L0ProdFailure',
18
- 'L1BiaProdError',
19
- 'L1SurvProdFailure',
20
- 'L1PostProError',
21
- 'HkProdFailure',
22
- 'L1SbmProdError',
23
- 'AncBiaProdError',
24
- 'HandlingFileError',
25
- 'InvalidDataVersion',
26
- 'EmptyInput']
10
+ __all__ = [
11
+ "FilmException",
12
+ "MetadataException",
13
+ "UnknownPipeline",
14
+ "AncReportProdError",
15
+ "LoadDataSetError",
16
+ "NoData",
17
+ "NoEpochFoundError",
18
+ "L0ProdFailure",
19
+ "L1BiaProdError",
20
+ "L1SurvProdFailure",
21
+ "L1PostProError",
22
+ "HkProdFailure",
23
+ "L1SbmProdError",
24
+ "AncBiaProdError",
25
+ "HandlingFileError",
26
+ "InvalidDataVersion",
27
+ "EmptyInput",
28
+ ]
27
29
 
28
30
 
29
31
  class FilmException(Exception):
@@ -145,8 +147,7 @@ class NoData(Exception):
145
147
  Exception raised when no output data processed
146
148
  """
147
149
 
148
- def __init__(self, message=None, ll=logger.error,
149
- *args, **kwargs):
150
+ def __init__(self, message=None, ll=logger.error, *args, **kwargs):
150
151
  super(NoData, self).__init__(*args, **kwargs)
151
152
  if message is not None:
152
153
  ll(message)
@@ -167,8 +168,8 @@ class L1SbmProdError(Exception):
167
168
  logger.error(message)
168
169
  self.message = message
169
170
 
170
- # logger_level = 'warning'
171
- # use_traceback = True
171
+ # logger_level = 'warning'
172
+ # use_traceback = True
172
173
 
173
174
  pass
174
175
 
@@ -181,8 +182,8 @@ class L1BiaProdError(Exception):
181
182
  logger.error(message)
182
183
  self.message = message
183
184
 
184
- # logger_level = 'warning'
185
- # use_traceback = True
185
+ # logger_level = 'warning'
186
+ # use_traceback = True
186
187
 
187
188
  pass
188
189
 
@@ -195,8 +196,8 @@ class AncBiaProdError(Exception):
195
196
  logger.error(message)
196
197
  self.message = message
197
198
 
198
- # logger_level = 'warning'
199
- # use_traceback = True
199
+ # logger_level = 'warning'
200
+ # use_traceback = True
200
201
 
201
202
  pass
202
203
 
@@ -209,7 +210,7 @@ class InvalidDataVersion(Exception):
209
210
  logger.error(message)
210
211
  self.message = message
211
212
 
212
- # logger_level = 'warning'
213
- # use_traceback = True
213
+ # logger_level = 'warning'
214
+ # use_traceback = True
214
215
 
215
216
  pass
@@ -1,19 +1,19 @@
1
1
  #!/usr/bin/env python3
2
2
  # -*- coding: utf-8 -*-
3
3
 
4
- from roc.film.tasks.check_dds import *
5
- from roc.film.tasks.cat_solo_hk import *
6
- from roc.film.tasks.make_daily_tm import *
7
- from roc.film.tasks.merge_tmraw import *
8
- from roc.film.tasks.merge_tcreport import *
9
- from roc.film.tasks.parse_dds_xml import *
10
- from roc.film.tasks.dds_to_l0 import *
11
- from roc.film.tasks.set_l0_utc import *
12
- from roc.film.tasks.l0_to_hk import *
13
- from roc.film.tasks.l0_to_l1_surv import *
14
- from roc.film.tasks.l0_to_l1_sbm import *
15
- from roc.film.tasks.l0_to_l1_bia_sweep import *
16
- from roc.film.tasks.l0_to_anc_bia_sweep_table import *
17
- from roc.film.tasks.l0_to_l1_bia_current import *
18
- from roc.film.tasks.cdf_postpro import *
19
- from roc.film.tasks.file_handler import *
4
+ from roc.film.tasks.check_dds import * # noqa: F403
5
+ from roc.film.tasks.cat_solo_hk import * # noqa: F403
6
+ from roc.film.tasks.make_daily_tm import * # noqa: F403
7
+ from roc.film.tasks.merge_tmraw import * # noqa: F403
8
+ from roc.film.tasks.merge_tcreport import * # noqa: F403
9
+ from roc.film.tasks.parse_dds_xml import * # noqa: F403
10
+ from roc.film.tasks.dds_to_l0 import * # noqa: F403
11
+ from roc.film.tasks.set_l0_utc import * # noqa: F403
12
+ from roc.film.tasks.l0_to_hk import * # noqa: F403
13
+ from roc.film.tasks.l0_to_l1_surv import * # noqa: F403
14
+ from roc.film.tasks.l0_to_l1_sbm import * # noqa: F403
15
+ from roc.film.tasks.l0_to_l1_bia_sweep import * # noqa: F403
16
+ from roc.film.tasks.db_to_anc_bia_sweep_table import * # noqa: F403
17
+ from roc.film.tasks.l0_to_l1_bia_current import * # noqa: F403
18
+ from roc.film.tasks.cdf_postpro import * # noqa: F403
19
+ from roc.film.tasks.file_handler import * # noqa: F403
@@ -15,12 +15,18 @@ from poppy.core.logger import logger
15
15
  from poppy.core.target import FileTarget
16
16
  from poppy.core.task import Task
17
17
 
18
- from roc.film import DATA_VERSION, TIME_DAILY_STRFORMAT, TIME_ISO_STRFORMAT, ARCHIVE_DAILY_DIR
18
+ from roc.film import (
19
+ DATA_VERSION,
20
+ TIME_DAILY_STRFORMAT,
21
+ TIME_ISO_STRFORMAT,
22
+ ARCHIVE_DAILY_DIR,
23
+ )
19
24
  from roc.film.constants import SOLOHK_PREFIX_BASENAME
20
25
  from roc.film.tools.file_helpers import get_output_dir
21
26
  from roc.film.tools import valid_data_version, sort_dict_list, get_latest_file
22
27
 
23
- __all__ = ['CatSoloHk']
28
+ __all__ = ["CatSoloHk"]
29
+
24
30
 
25
31
  class CatSoloHk(Task):
26
32
  """
@@ -29,68 +35,72 @@ class CatSoloHk(Task):
29
35
  ParamSampleListElement XML elements are loaded from a
30
36
  set of input SOLO EDDS Param XML files
31
37
  """
32
- plugin_name = 'roc.film'
33
- name = 'cat_solo_hk'
38
+
39
+ plugin_name = "roc.film"
40
+ name = "cat_solo_hk"
34
41
 
35
42
  def add_targets(self):
36
- self.add_input(identifier='dds_xml', many=True,
37
- target_class=FileTarget,
38
- filepath=self.get_dds_xml())
39
- self.add_output(identifier='solohk_daily_xml', many=True,
40
- target_class=FileTarget)
43
+ self.add_input(
44
+ identifier="dds_xml",
45
+ many=True,
46
+ target_class=FileTarget,
47
+ filepath=self.get_dds_xml(),
48
+ )
49
+ self.add_output(
50
+ identifier="solohk_daily_xml", many=True, target_class=FileTarget
51
+ )
41
52
 
42
53
  def get_dds_xml(self):
43
- return self.pipeline.get('dds_files', default=[])
54
+ return self.pipeline.get("dds_files", default=[])
44
55
 
45
56
  def setup_inputs(self):
46
-
47
57
  # Get data_version input keyword (can be used to force version of
48
58
  # output file)
49
59
  self.data_version = valid_data_version(
50
- self.pipeline.get('data_version', default=[DATA_VERSION])[0])
60
+ self.pipeline.get("data_version", default=[DATA_VERSION])[0]
61
+ )
51
62
 
52
63
  # Get input list of DDS XML files
53
- self.dds_file_list = self.inputs['dds_xml'].filepath
64
+ self.dds_file_list = self.inputs["dds_xml"].filepath
54
65
  self.dds_file_num = len(self.dds_file_list)
55
66
  if self.dds_file_num == 0:
56
- logger.warning('No input DDS XML file passed as input argument!')
67
+ logger.warning("No input DDS XML file passed as input argument!")
57
68
  return False
58
69
 
59
70
  # Get/create list of well processed L0 files
60
71
  self.processed_files = self.pipeline.get(
61
- 'processed_files', default=[], create=True)
72
+ "processed_files", default=[], create=True
73
+ )
62
74
  # Get/create list of failed DDS files
63
- self.failed_files = self.pipeline.get(
64
- 'failed_files', default=[], create=True)
75
+ self.failed_files = self.pipeline.get("failed_files", default=[], create=True)
65
76
 
66
77
  # Get/create list of already processed DDS
67
- self.processed_dds_files = self.pipeline.get('processed_dds_files',
68
- default=[], create=True)
78
+ self.processed_dds_files = self.pipeline.get(
79
+ "processed_dds_files", default=[], create=True
80
+ )
69
81
 
70
82
  # Get/create list of failed DDS
71
- self.failed_dds_files = self.pipeline.get('failed_dds_files',
72
- default=[], create=True)
83
+ self.failed_dds_files = self.pipeline.get(
84
+ "failed_dds_files", default=[], create=True
85
+ )
73
86
 
74
87
  # Get local archive path
75
- self.archive_path = self.pipeline.get(
76
- 'archive_path', default=[None])[0]
88
+ self.archive_path = self.pipeline.get("archive_path", default=[None])[0]
77
89
 
78
90
  # Get list of dates to process
79
- self.filter_date = self.pipeline.get('filter_date', default=[])
91
+ self.filter_date = self.pipeline.get("filter_date", default=[])
80
92
  if self.filter_date:
81
- self.filter_date = [filter_date.date()
82
- for filter_date in self.filter_date]
93
+ self.filter_date = [filter_date.date() for filter_date in self.filter_date]
83
94
 
84
95
  # If output directory not found, create it
85
96
  self.output_dir = get_output_dir(self.pipeline)
86
97
  if not os.path.isdir(self.output_dir):
87
- logger.debug(f'Making {self.output_dir}...')
98
+ logger.debug(f"Making {self.output_dir}...")
88
99
  os.makedirs(self.output_dir)
89
100
 
90
101
  return True
91
102
 
92
103
  def run(self):
93
-
94
104
  # Initialize task inputs
95
105
  if not self.setup_inputs():
96
106
  self.pipeline.exit()
@@ -102,15 +112,15 @@ class CatSoloHk(Task):
102
112
 
103
113
  # Loop over list of input files
104
114
  for input_dds_file in self.dds_file_list:
105
-
106
115
  # Retrieve list of SOLO HK PARAMS inside the XML
107
116
  try:
108
117
  xml_param_list = CatSoloHk.parse_dds_param_xml(input_dds_file)
109
118
  except FileNotFoundError:
110
- logger.warning(f'Input file {input_dds_file} not found!')
119
+ logger.warning(f"Input file {input_dds_file} not found!")
111
120
  continue
112
- except:
113
- logger.exception(f'Cannot parse {input_dds_file}')
121
+ except Exception as e:
122
+ logger.exception(f"Cannot parse {input_dds_file}")
123
+ logger.debug(e)
114
124
  if input_dds_file not in self.failed_dds_files:
115
125
  self.failed_dds_files.append(input_dds_file)
116
126
  continue
@@ -124,70 +134,71 @@ class CatSoloHk(Task):
124
134
  # Loop over days in solo_hk_dict
125
135
  output_files = []
126
136
  for current_date, current_param_list in solo_hk_dict.items():
127
-
128
137
  # Check if output file already exists for the current
129
138
  # day
130
139
  existing_data = self._get_existing_data(current_date)
131
140
  existing_num = len(existing_data)
132
- logger.info(f'{existing_num} Solo HK elements already '
133
- f'found for {current_date}')
141
+ logger.info(
142
+ f"{existing_num} Solo HK elements already found for {current_date}"
143
+ )
134
144
 
135
145
  # Append new elements in the list of existing data
136
146
  new_data = existing_data.copy()
137
- [new_data.append(current_data)
138
- for current_data in current_param_list
139
- if current_data not in existing_data]
147
+ [
148
+ new_data.append(current_data)
149
+ for current_data in current_param_list
150
+ if current_data not in existing_data
151
+ ]
140
152
 
141
153
  # If no difference with existing data, no need to save new output
142
154
  # file
143
155
  new_data_num = len(new_data)
144
156
  if existing_num == new_data_num:
145
- logger.info(f'No new SOLO HK element for {current_date}')
157
+ logger.info(f"No new SOLO HK element for {current_date}")
146
158
  continue
147
159
  else:
148
- logger.debug(f'Creating a new SOLO HK daily file in {self.output_dir} '
149
- f'for {current_date}')
160
+ logger.debug(
161
+ f"Creating a new SOLO HK daily file in {self.output_dir} "
162
+ f"for {current_date}"
163
+ )
150
164
 
151
165
  # Always make sure that elements are unique and sorted
152
166
  # by ascending timestamp
153
167
  # Make sure it has unique elements
154
- new_data = [i for n, i in enumerate(new_data)
155
- if i not in new_data[n + 1:]]
168
+ new_data = [i for n, i in enumerate(new_data) if i not in new_data[n + 1 :]]
156
169
 
157
170
  # Make sure new data is time sorted
158
- new_data = sort_dict_list(new_data, 'TimeStampAsciiA')
171
+ new_data = sort_dict_list(new_data, "TimeStampAsciiA")
159
172
 
160
173
  # define format of data version
161
- data_version = f'V{int(self.data_version):02d}'
174
+ data_version = f"V{int(self.data_version):02d}"
162
175
 
163
176
  # Convert input date into string
164
177
  date_str = current_date.strftime(TIME_DAILY_STRFORMAT)
165
178
 
166
179
  # Build output solo hk daily file basename
167
- file_basename = '_'.join([
168
- SOLOHK_PREFIX_BASENAME,
169
- date_str]
170
- )
180
+ file_basename = "_".join([SOLOHK_PREFIX_BASENAME, date_str])
171
181
 
172
182
  # Build full new output file basename
173
- file_basename = '_'.join([file_basename, data_version]) + '.xml'
183
+ file_basename = "_".join([file_basename, data_version]) + ".xml"
174
184
 
175
185
  # Build output file path
176
186
  output_target_path = os.path.join(self.output_dir, file_basename)
177
187
 
178
188
  # Write output file
179
- logger.info(f'Writing {len(new_data)} Solo HK elements '
180
- f'into {output_target_path}')
181
- if make_param_xml(new_data, output_target_path,
182
- overwrite=True,
183
- logger=logger):
189
+ logger.info(
190
+ f"Writing {len(new_data)} Solo HK elements into {output_target_path}"
191
+ )
192
+ if make_param_xml(
193
+ new_data, output_target_path, overwrite=True, logger=logger
194
+ ):
184
195
  self.processed_files.append(output_target_path)
185
196
  output_files.append(output_target_path)
186
197
  else:
187
- logger.error(f'Writing {output_target_path} has failed!')
198
+ logger.error(f"Writing {output_target_path} has failed!")
188
199
  self.failed_files.append(output_target_path)
189
200
 
190
- self.outputs['solohk_daily_xml'].filepath = output_files
201
+ self.outputs["solohk_daily_xml"].filepath = output_files
191
202
 
192
203
  @staticmethod
193
204
  def parse_dds_param_xml(xml_file):
@@ -201,9 +212,9 @@ class CatSoloHk(Task):
201
212
  ParamSampleListElement = []
202
213
 
203
214
  if os.path.isfile(xml_file):
204
- ParamSampleListElement = xml_to_dict(xml_file)['ns2:ResponsePart'] \
205
- ['Response']['ParamResponse'] \
206
- ['ParamSampleList']['ParamSampleListElement']
215
+ ParamSampleListElement = xml_to_dict(xml_file)["ns2:ResponsePart"][
216
+ "Response"
217
+ ]["ParamResponse"]["ParamSampleList"]["ParamSampleListElement"]
207
218
 
208
219
  # Make sure that returned output is a list
209
220
  # (If only one PktTcReportListElement is found in the XML
@@ -228,7 +239,6 @@ class CatSoloHk(Task):
228
239
 
229
240
  # Loop over ParamSampleListElement in the input list
230
241
  for current_param in param_list:
231
-
232
242
  # Get element timestamp
233
243
  current_timestamp = self._extract_timestamp(current_param)
234
244
  # Get date
@@ -245,8 +255,9 @@ class CatSoloHk(Task):
245
255
  :return: ParamSampleListElement TimeStampAsciiA as datetime object
246
256
  """
247
257
 
248
- return datetime.strptime(param_element['TimeStampAsciiA'],
249
- TIME_ISO_STRFORMAT[:-1])
258
+ return datetime.strptime(
259
+ param_element["TimeStampAsciiA"], TIME_ISO_STRFORMAT[:-1]
260
+ )
250
261
 
251
262
  def _get_existing_data(self, date):
252
263
  """
@@ -266,25 +277,26 @@ class CatSoloHk(Task):
266
277
 
267
278
  # Get SOLO HK daily file local archive directory path
268
279
  if self.archive_path:
269
- solo_hk_data_dir = os.path.join(self.archive_path,
270
- date.strftime(ARCHIVE_DAILY_DIR))
280
+ solo_hk_data_dir = os.path.join(
281
+ self.archive_path, date.strftime(ARCHIVE_DAILY_DIR)
282
+ )
271
283
  else:
272
284
  # Otherwise get output directory
273
285
  solo_hk_data_dir = self.output_dir
274
286
 
275
287
  # Build SOLO HK daily file basename
276
- file_basename = '_'.join([
277
- SOLOHK_PREFIX_BASENAME,
278
- date_str]
279
- )
288
+ file_basename = "_".join([SOLOHK_PREFIX_BASENAME, date_str])
280
289
 
281
290
  # Check if daily file(s) already exists in the target directory
282
- logger.debug(f'Checking for SOLO HK daily file existence '
283
- f'on {solo_hk_data_dir} for {date} ...')
284
- existing_files = glob(os.path.join(
285
- solo_hk_data_dir, file_basename + '_V??.xml'))
291
+ logger.debug(
292
+ f"Checking for SOLO HK daily file existence "
293
+ f"on {solo_hk_data_dir} for {date} ..."
294
+ )
295
+ existing_files = glob(
296
+ os.path.join(solo_hk_data_dir, file_basename + "_V??.xml")
297
+ )
286
298
  if existing_files:
287
- logger.debug(f'{len(existing_files)} files already exist for {date}')
299
+ logger.debug(f"{len(existing_files)} files already exist for {date}")
288
300
  # If files found then get latest version
289
301
  latest_existing_file = get_latest_file(existing_files)
290
302
  else: