roc-film 1.13.5__py3-none-any.whl → 1.14.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (52) hide show
  1. roc/__init__.py +2 -1
  2. roc/film/__init__.py +2 -2
  3. roc/film/commands.py +372 -323
  4. roc/film/config/__init__.py +0 -1
  5. roc/film/constants.py +101 -65
  6. roc/film/descriptor.json +127 -96
  7. roc/film/exceptions.py +28 -27
  8. roc/film/tasks/__init__.py +16 -16
  9. roc/film/tasks/cat_solo_hk.py +86 -74
  10. roc/film/tasks/cdf_postpro.py +438 -309
  11. roc/film/tasks/check_dds.py +39 -45
  12. roc/film/tasks/db_to_anc_bia_sweep_table.py +381 -0
  13. roc/film/tasks/dds_to_l0.py +232 -180
  14. roc/film/tasks/export_solo_coord.py +147 -0
  15. roc/film/tasks/file_handler.py +91 -75
  16. roc/film/tasks/l0_to_hk.py +117 -103
  17. roc/film/tasks/l0_to_l1_bia_current.py +38 -30
  18. roc/film/tasks/l0_to_l1_bia_sweep.py +417 -329
  19. roc/film/tasks/l0_to_l1_sbm.py +250 -208
  20. roc/film/tasks/l0_to_l1_surv.py +185 -130
  21. roc/film/tasks/make_daily_tm.py +40 -37
  22. roc/film/tasks/merge_tcreport.py +77 -71
  23. roc/film/tasks/merge_tmraw.py +101 -88
  24. roc/film/tasks/parse_dds_xml.py +21 -20
  25. roc/film/tasks/set_l0_utc.py +51 -49
  26. roc/film/tests/cdf_compare.py +565 -0
  27. roc/film/tests/hdf5_compare.py +84 -62
  28. roc/film/tests/test_dds_to_l0.py +93 -51
  29. roc/film/tests/test_dds_to_tc.py +8 -11
  30. roc/film/tests/test_dds_to_tm.py +8 -10
  31. roc/film/tests/test_film.py +161 -116
  32. roc/film/tests/test_l0_to_hk.py +64 -36
  33. roc/film/tests/test_l0_to_l1_bia.py +10 -14
  34. roc/film/tests/test_l0_to_l1_sbm.py +14 -19
  35. roc/film/tests/test_l0_to_l1_surv.py +68 -41
  36. roc/film/tests/test_metadata.py +21 -20
  37. roc/film/tests/tests.py +743 -396
  38. roc/film/tools/__init__.py +5 -5
  39. roc/film/tools/dataset_tasks.py +34 -2
  40. roc/film/tools/file_helpers.py +390 -269
  41. roc/film/tools/l0.py +402 -324
  42. roc/film/tools/metadata.py +147 -127
  43. roc/film/tools/skeleton.py +12 -17
  44. roc/film/tools/tools.py +109 -92
  45. roc/film/tools/xlsx2skt.py +161 -139
  46. {roc_film-1.13.5.dist-info → roc_film-1.14.0.dist-info}/LICENSE +127 -125
  47. roc_film-1.14.0.dist-info/METADATA +60 -0
  48. roc_film-1.14.0.dist-info/RECORD +50 -0
  49. {roc_film-1.13.5.dist-info → roc_film-1.14.0.dist-info}/WHEEL +1 -1
  50. roc/film/tasks/l0_to_anc_bia_sweep_table.py +0 -348
  51. roc_film-1.13.5.dist-info/METADATA +0 -120
  52. roc_film-1.13.5.dist-info/RECORD +0 -48
@@ -11,20 +11,25 @@ from poppy.pop.plugins import Plugin
11
11
 
12
12
  from roc.film.exceptions import UnknownPipeline, InvalidDataVersion
13
13
  from roc.film.tools import valid_data_version
14
- from roc.film.constants import PLUGIN, TIME_DAILY_STRFORMAT, \
15
- CDF_TRANGE_STRFORMAT, UNKNOWN_IDB, DATA_VERSION
16
-
17
- __all__ = ['init_l0_meta',
18
- 'init_cdf_global',
19
- 'get_data_version',
20
- 'set_logical_file_id',
21
- 'get_logical_file_id',
22
- 'get_spice_kernels'
23
- ]
24
-
25
-
26
- def init_cdf_global(l0_attrs, task, master_path,
27
- overwrite=None):
14
+ from roc.film.constants import (
15
+ PLUGIN,
16
+ TIME_DAILY_STRFORMAT,
17
+ CDF_TRANGE_STRFORMAT,
18
+ UNKNOWN_IDB,
19
+ DATA_VERSION,
20
+ )
21
+
22
+ __all__ = [
23
+ "init_l0_meta",
24
+ "init_cdf_global",
25
+ "get_data_version",
26
+ "set_logical_file_id",
27
+ "get_logical_file_id",
28
+ "get_spice_kernels",
29
+ ]
30
+
31
+
32
+ def init_cdf_global(l0_attrs, task, master_path, overwrite=None):
28
33
  """
29
34
  Define global attributes data to save into the CDF from the content of the L0 file and task.
30
35
  See roc.film.tasks.l0.init_l0_meta for the list of specific L0 meta
@@ -42,79 +47,80 @@ def init_cdf_global(l0_attrs, task, master_path,
42
47
  # by import master CDF global attributes
43
48
  meta = dict(CDF(master_path).attrs)
44
49
 
45
- pipeline_id = Configuration.manager['descriptor']['pipeline.identifier'].upper()
46
- pipeline_version = Configuration.manager['descriptor']['pipeline.release.version']
50
+ pipeline_id = Configuration.manager["descriptor"]["pipeline.identifier"].upper()
51
+ pipeline_version = Configuration.manager["descriptor"]["pipeline.release.version"]
47
52
 
48
- if pipeline_id == 'RGTS':
53
+ if pipeline_id == "RGTS":
49
54
  # Specific to RGTS
50
- meta['Pipeline_name'] = pipeline_id + '>ROC Ground Test SGSE'
55
+ pipeline_name = pipeline_id + ">ROC Ground Test SGSE"
51
56
 
52
57
  try:
53
- meta['Test_name'] = l0_attrs['Test_name'].encode('utf-8')
54
- meta['Test_uuid'] = l0_attrs['Test_uuid']
55
- meta['Test_description'] = l0_attrs[
56
- 'Test_description'].encode('utf-8')
57
- meta['Test_creation_date'] = l0_attrs['Test_creation_date']
58
- meta['Test_launched_date'] = l0_attrs['Test_launched_date']
59
- meta['Test_terminated_date'] = l0_attrs['Test_terminated_date']
60
- meta['Test_log_file'] = l0_attrs['Test_log_file']
58
+ meta["Test_name"] = l0_attrs["Test_name"].encode("utf-8")
59
+ meta["Test_uuid"] = l0_attrs["Test_uuid"]
60
+ meta["Test_description"] = l0_attrs["Test_description"].encode("utf-8")
61
+ meta["Test_creation_date"] = l0_attrs["Test_creation_date"]
62
+ meta["Test_launched_date"] = l0_attrs["Test_launched_date"]
63
+ meta["Test_terminated_date"] = l0_attrs["Test_terminated_date"]
64
+ meta["Test_log_file"] = l0_attrs["Test_log_file"]
65
+
66
+ if "Free_field" in l0_attrs and len(l0_attrs["Free_field"].strip()) > 0:
67
+ meta["Free_field"] = l0_attrs["Free_field"]
68
+
69
+ # provider in the good format
70
+ meta["Provider"] = l0_attrs["Provider"]
61
71
 
62
72
  # ID of the test for the ROC internal use
63
- meta['Test_id'] = l0_attrs['Test_id']
73
+ meta["Test_id"] = l0_attrs["Test_id"]
64
74
  except Exception:
65
75
  logger.warning('No "Test_*" attribute found for the input l0')
66
76
 
67
- elif pipeline_id == 'RODP':
68
- # TODO - Complete specific global attributes for RODP
69
- meta['Pipeline_name'] = pipeline_id + '>RPW Operation and Data Pipeline'
70
- pass
77
+ elif pipeline_id == "RODP":
78
+ pipeline_name = pipeline_id + ">RPW Operation and Data Pipeline"
71
79
  else:
72
- raise UnknownPipeline(f'UNKNOWN PIPELINE TYPE:'
73
- f' {pipeline_id}, ABORTING!')
80
+ raise UnknownPipeline(f"UNKNOWN PIPELINE TYPE: {pipeline_id}, ABORTING!")
74
81
 
75
82
  # Common global attributes
76
83
  try:
77
84
  # Perform some verifications on metadata
78
- if str(meta['Pipeline_name']) != str(l0_attrs['Pipeline_name']):
79
- logger.warning('Pipeline_name is inconsistent '
80
- f"between the pipeline ({meta['Pipeline_name']})"
81
- f"and the input L0 file ({l0_attrs['Pipeline_name']})!")
82
-
83
- meta['Pipeline_version'] = pipeline_version
84
- meta['Parents'] = ['CDF>' + l0_attrs['Logical_file_id']]
85
- meta['Parent_version'] = valid_data_version(l0_attrs['Data_version'])
86
- meta['Free_field'] = l0_attrs['Free_field']
87
- meta['Software_version'] = Plugin.manager[PLUGIN].version
85
+ if pipeline_name != str(l0_attrs["Pipeline_name"]):
86
+ logger.warning(
87
+ "Pipeline_name is inconsistent "
88
+ f"between the pipeline ({meta['Pipeline_name']})"
89
+ f"and the input L0 file ({l0_attrs['Pipeline_name']})!"
90
+ )
91
+
92
+ meta["Pipeline_name"] = pipeline_name
93
+ meta["Pipeline_version"] = pipeline_version
94
+ meta["Parents"] = ["CDF>" + l0_attrs["Logical_file_id"]]
95
+ # meta['Parent_version'] = valid_data_version(l0_attrs['Data_version'])
96
+ meta["Software_version"] = Plugin.manager[PLUGIN].version
88
97
 
89
98
  # Use for building filename
90
- meta['Datetime'] = l0_attrs['Datetime']
91
-
92
- # provider in the good format
93
- meta['Provider'] = l0_attrs['Provider']
99
+ meta["Datetime"] = l0_attrs["Datetime"]
94
100
 
95
101
  # Get file naming convention
96
- meta['File_naming_convention'] = l0_attrs['File_naming_convention']
102
+ meta["File_naming_convention"] = l0_attrs["File_naming_convention"]
97
103
  except Exception:
98
- logger.error('Missing attributes in l0 file!')
104
+ logger.error("Missing attributes in l0 file!")
99
105
 
100
106
  # the name of the software (plugin) that generated the file, from the
101
107
  # descriptor information
102
- meta['Software_name'] = PLUGIN
108
+ meta["Software_name"] = PLUGIN
103
109
 
104
110
  # Initialize Validate (0 = no validation)
105
- meta['Validate'] = '0'
111
+ meta["Validate"] = "0"
106
112
 
107
113
  # Initialize data_version to "01"
108
- meta['Data_version'] = get_data_version(task)
114
+ meta["Data_version"] = get_data_version(task)
109
115
 
110
116
  # If overwrite keyword, then replace g.attrs value
111
117
  if overwrite:
112
118
  for key, val in overwrite.items():
113
119
  meta[key] = val
114
- logger.debug(f'{key} g.attribute value set to {val}')
120
+ logger.debug(f"{key} g.attribute value set to {val}")
115
121
 
116
122
  # Initialize logical_file_id
117
- meta['Logical_file_id'] = set_logical_file_id(meta)
123
+ meta["Logical_file_id"] = set_logical_file_id(meta)
118
124
 
119
125
  return meta
120
126
 
@@ -127,13 +133,16 @@ def get_idb_version(task, **kwargs):
127
133
  :return: string with idb_version
128
134
  """
129
135
 
130
- idb_version = task.pipeline.get('idb_version',
131
- default=kwargs.get('idb_version', UNKNOWN_IDB))
136
+ idb_version = task.pipeline.get(
137
+ "idb_version", default=kwargs.get("idb_version", UNKNOWN_IDB)
138
+ )
132
139
  try:
133
- idb_version = task.inputs['raw_data'].value.packet_parser.idb_version
140
+ idb_version = task.inputs["raw_data"].value.packet_parser.idb_version
134
141
  except Exception:
135
- logger.debug('No IDB version found in the input raw_data:\n'
136
- f'attempting to retrieve value from pipeline properties: {idb_version}')
142
+ logger.debug(
143
+ "No IDB version found in the input raw_data:\n"
144
+ f"attempting to retrieve value from pipeline properties: {idb_version}"
145
+ )
137
146
 
138
147
  return idb_version
139
148
 
@@ -146,19 +155,21 @@ def get_idb_source(task, **kwargs):
146
155
  :return: string with idb_source
147
156
  """
148
157
 
149
- idb_source = task.pipeline.get('idb_source',
150
- default=kwargs.get('idb_source', UNKNOWN_IDB))
158
+ idb_source = task.pipeline.get(
159
+ "idb_source", default=kwargs.get("idb_source", UNKNOWN_IDB)
160
+ )
151
161
  try:
152
- idb_source = task.inputs['raw_data'].value.packet_parser.idb_source
162
+ idb_source = task.inputs["raw_data"].value.packet_parser.idb_source
153
163
  except Exception:
154
- logger.debug('No IDB source found in the input raw_data:\n'
155
- f'attempting to retrieve value from pipeline properties: {idb_source}')
164
+ logger.debug(
165
+ "No IDB source found in the input raw_data:\n"
166
+ f"attempting to retrieve value from pipeline properties: {idb_source}"
167
+ )
156
168
 
157
169
  return idb_source
158
170
 
159
171
 
160
- def init_l0_meta(task,
161
- extra_attrs={}):
172
+ def init_l0_meta(task, extra_attrs={}):
162
173
  """
163
174
  Initialize RPW L0 metadata
164
175
 
@@ -172,100 +183,110 @@ def init_l0_meta(task,
172
183
 
173
184
  # Retrieve required values from the pipeline properties
174
185
  # Get pipeline ID ("RGTS" or "RODP")
175
- pipeline_id = task.pipeline.properties.configuration['environment.ROC_PIP_NAME'].upper()
186
+ pipeline_id = task.pipeline.properties.configuration[
187
+ "environment.ROC_PIP_NAME"
188
+ ].upper()
176
189
 
177
190
  # Get input RawData value
178
191
  try:
179
- raw_data = task.inputs['raw_data'].value
192
+ raw_data = task.inputs["raw_data"].value
180
193
  except Exception:
181
194
  raw_data = None
182
195
 
183
196
  # Get metadata specific to ROC-SGSE
184
- if pipeline_id == 'RGTS':
185
-
186
- meta['Pipeline_name'] = pipeline_id + '>ROC Ground Test SGSE'
197
+ if pipeline_id == "RGTS":
198
+ meta["Pipeline_name"] = pipeline_id + ">ROC Ground Test SGSE"
187
199
 
188
200
  try:
189
201
  # Get the 7 first characters of the test log SHA
190
202
  test_sha = raw_data.sha1
191
203
  test_short_sha = raw_data.short_sha1
192
204
 
193
- meta['Test_name'] = raw_data.name
194
- meta['Test_uuid'] = raw_data.uuid
195
- meta['Test_description'] = raw_data.description
196
- meta['Test_creation_date'] = str(raw_data.creation_date)
197
- meta['Test_launched_date'] = str(raw_data.date)
198
- meta['Test_terminated_date'] = str(raw_data.terminated_date)
199
- meta['Test_log_file'] = osp.basename(raw_data.file_path)
200
- meta['Test_id'] = test_short_sha + '>' + test_sha
201
-
202
- meta['Free_field'] = '-'.join([task.pipeline.provider[:3].lower(),
203
- test_short_sha])
204
- meta['Datetime'] = '-'.join([raw_data.time_min.strftime(CDF_TRANGE_STRFORMAT),
205
- raw_data.time_max.strftime(CDF_TRANGE_STRFORMAT)])
205
+ meta["Test_name"] = raw_data.name
206
+ meta["Test_uuid"] = raw_data.uuid
207
+ meta["Test_description"] = raw_data.description
208
+ meta["Test_creation_date"] = str(raw_data.creation_date)
209
+ meta["Test_launched_date"] = str(raw_data.date)
210
+ meta["Test_terminated_date"] = str(raw_data.terminated_date)
211
+ meta["Test_log_file"] = osp.basename(raw_data.file_path)
212
+ meta["Test_id"] = test_short_sha + ">" + test_sha
213
+
214
+ meta["Free_field"] = "-".join(
215
+ [task.pipeline.provider[:3].lower(), test_short_sha]
216
+ )
217
+ meta["Datetime"] = "-".join(
218
+ [
219
+ raw_data.time_min.strftime(CDF_TRANGE_STRFORMAT),
220
+ raw_data.time_max.strftime(CDF_TRANGE_STRFORMAT),
221
+ ]
222
+ )
206
223
  except Exception:
207
- logger.warning('No input test log found!')
208
- meta['Free_field'] = ''
209
- meta['Datetime'] = datetime.now().strftime(TIME_DAILY_STRFORMAT)
224
+ logger.warning("No input test log found!")
225
+ meta["Free_field"] = ""
226
+ meta["Datetime"] = datetime.now().strftime(TIME_DAILY_STRFORMAT)
210
227
 
211
- meta['File_naming_convention'] = '<Source_name>_<LEVEL>_<Descriptor>_' \
212
- '<Datetime>_V<Data_version>_' \
213
- '<Free_field>'
228
+ meta["File_naming_convention"] = (
229
+ "<Source_name>_<LEVEL>_<Descriptor>_<Datetime>_V<Data_version>_<Free_field>"
230
+ )
214
231
 
215
- elif pipeline_id == 'RODP':
232
+ elif pipeline_id == "RODP":
216
233
  # Get metadata specific to RODP
217
234
  # TODO - Complete metadata for RPW L0
218
235
 
219
- meta['File_naming_convention'] = '<Source_name>_<LEVEL>_<Descriptor>_' \
220
- '<Datetime>_V<Data_version>'
236
+ meta["File_naming_convention"] = (
237
+ "<Source_name>_<LEVEL>_<Descriptor>_<Datetime>_V<Data_version>"
238
+ )
221
239
 
222
- meta['Pipeline_name'] = pipeline_id + '>RPW Operation and Data Pipeline'
223
- meta['Free_field'] = ''
240
+ meta["Pipeline_name"] = pipeline_id + ">RPW Operation and Data Pipeline"
241
+ meta["Free_field"] = ""
224
242
 
225
243
  # Define Datetime value
226
- datetime_attr = extra_attrs.pop('Datetime', None)
244
+ datetime_attr = extra_attrs.pop("Datetime", None)
227
245
  if datetime_attr is None:
228
- if raw_data is not None and hasattr(raw_data, 'datetime') and raw_data.datetime is not None:
246
+ if (
247
+ raw_data is not None
248
+ and hasattr(raw_data, "datetime")
249
+ and raw_data.datetime is not None
250
+ ):
229
251
  datetime_attr = raw_data.datetime.strftime(TIME_DAILY_STRFORMAT)
230
252
  else:
231
- datetime_attr = task.pipeline.get('datetime')
253
+ datetime_attr = task.pipeline.get("datetime")
232
254
  if datetime_attr is None:
233
- logger.warning('Unknown Datetime attribute value')
255
+ logger.warning("Unknown Datetime attribute value")
234
256
  else:
235
257
  datetime_attr = datetime_attr.strftime(TIME_DAILY_STRFORMAT)
236
258
 
237
- meta['Datetime'] = datetime_attr
259
+ meta["Datetime"] = datetime_attr
238
260
 
239
261
  else:
240
- raise UnknownPipeline(f'UNKNOWN PIPELINE TYPE:'
241
- f' {pipeline_id}, ABORTING!')
262
+ raise UnknownPipeline(f"UNKNOWN PIPELINE TYPE: {pipeline_id}, ABORTING!")
242
263
 
243
264
  # Common metadata
244
- meta['Project'] = 'SOLO>Solar Orbiter'
245
- meta['Source_name'] = 'SOLO>Solar Orbiter'
246
- meta['Software_name'] = PLUGIN
247
- meta['Software_version'] = Plugin.manager[PLUGIN].version
248
- meta['Dataset_ID'] = 'SOLO_L0_RPW'
249
- meta['Descriptor'] = 'RPW>Radio and Plasma Waves instrument'
250
- meta['LEVEL'] = 'L0>Level 0 data processing'
251
- meta['Provider'] = '>'.join(
265
+ meta["Project"] = "SOLO>Solar Orbiter"
266
+ meta["Source_name"] = "SOLO>Solar Orbiter"
267
+ meta["Software_name"] = PLUGIN
268
+ meta["Software_version"] = Plugin.manager[PLUGIN].version
269
+ meta["Dataset_ID"] = "SOLO_L0_RPW"
270
+ meta["Descriptor"] = "RPW>Radio and Plasma Waves instrument"
271
+ meta["LEVEL"] = "L0>Level 0 data processing"
272
+ meta["Provider"] = ">".join(
252
273
  [
253
274
  task.pipeline.provider[:3].upper(),
254
275
  task.pipeline.provider,
255
276
  ]
256
277
  )
257
278
 
258
- meta['Pipeline_version'] = Configuration.manager['descriptor'][
259
- 'pipeline.release.version'
279
+ meta["Pipeline_version"] = Configuration.manager["descriptor"][
280
+ "pipeline.release.version"
260
281
  ]
261
282
 
262
283
  # Initialize data_version
263
- data_version = extra_attrs.pop('Data_version', None)
284
+ data_version = extra_attrs.pop("Data_version", None)
264
285
  if data_version is None:
265
286
  # Initialize data_Version to "01"
266
- meta['Data_version'] = get_data_version(task)
287
+ meta["Data_version"] = get_data_version(task)
267
288
  else:
268
- meta['Data_version'] = data_version
289
+ meta["Data_version"] = data_version
269
290
 
270
291
  # Add extra attributes (if any)
271
292
  if extra_attrs:
@@ -273,7 +294,7 @@ def init_l0_meta(task,
273
294
  meta[key] = val
274
295
 
275
296
  # Initialize logical_file_id
276
- meta['Logical_file_id'] = set_logical_file_id(meta)
297
+ meta["Logical_file_id"] = set_logical_file_id(meta)
277
298
 
278
299
  return meta
279
300
 
@@ -286,7 +307,7 @@ def get_data_version(task):
286
307
  :return: string containing Data_version value
287
308
  """
288
309
 
289
- data_version = task.pipeline.get('data_version', default=None, args=True)
310
+ data_version = task.pipeline.get("data_version", default=None, args=True)
290
311
  if data_version is None:
291
312
  data_version = DATA_VERSION
292
313
  else:
@@ -296,7 +317,7 @@ def get_data_version(task):
296
317
  try:
297
318
  return valid_data_version(data_version)
298
319
  except Exception:
299
- raise InvalidDataVersion(f'Input data version is invalid: {data_version}')
320
+ raise InvalidDataVersion(f"Input data version is invalid: {data_version}")
300
321
 
301
322
 
302
323
  def set_logical_file_id(metadata):
@@ -308,16 +329,16 @@ def set_logical_file_id(metadata):
308
329
  """
309
330
 
310
331
  # Get expected fields in the file_naming_convention
311
- logical_file_id = str(metadata['File_naming_convention'])
312
- for field in re.findall(r'<([A-Za-z0-9_\-]+)>', logical_file_id):
332
+ logical_file_id = str(metadata["File_naming_convention"])
333
+ for field in re.findall(r"<([A-Za-z0-9_\-]+)>", logical_file_id):
313
334
  # Extract value from metadata
314
- value = str(metadata[field]).split('>')[0]
315
- if field == 'Datetime' or field == 'LEVEL':
335
+ value = str(metadata[field]).split(">")[0]
336
+ if field == "Datetime" or field == "LEVEL":
316
337
  value = value.upper()
317
338
  else:
318
339
  value = value.lower()
319
340
 
320
- logical_file_id = logical_file_id.replace('<' + field + '>', value)
341
+ logical_file_id = logical_file_id.replace("<" + field + ">", value)
321
342
 
322
343
  return logical_file_id
323
344
 
@@ -334,20 +355,19 @@ def get_logical_file_id(filename):
334
355
  return osp.basename(osp.splitext(filename)[0])
335
356
 
336
357
 
337
- def get_spice_kernels(time_instance=None,
338
- pattern=None):
358
+ def get_spice_kernels(time_instance=None, pattern=None):
339
359
  # If time_instance not passed as input argument,
340
360
  # then initialize it from Time class (singleton)
341
361
  if time_instance is None:
342
362
  from roc.rpl.time import Time
363
+
343
364
  time_instance = Time()
344
365
 
345
366
  # get all loaded kernels
346
367
  loaded_kernels = time_instance.spice.kall()
347
368
 
348
369
  if pattern is not None:
349
- loaded_kernels = [kfile for kfile in loaded_kernels.keys()
350
- if pattern in kfile]
370
+ loaded_kernels = [kfile for kfile in loaded_kernels.keys() if pattern in kfile]
351
371
  else:
352
372
  loaded_kernels = list(loaded_kernels.keys())
353
373
 
@@ -19,12 +19,12 @@ from roc.film.tools import DESCRIPTOR
19
19
  __all__ = ["IDBToExcel"]
20
20
 
21
21
 
22
-
23
22
  class IDBToExcel(object):
24
23
  """
25
24
  Class used to convert the IDB in XML format to the excel format needed by
26
25
  the converter from excel to CDF skeleton format by Xavier.
27
26
  """
27
+
28
28
  def __init__(self, args):
29
29
  """
30
30
  Store arguments from the command line.
@@ -94,8 +94,8 @@ class IDBToExcel(object):
94
94
 
95
95
  # save the excel file in the output directory
96
96
  outfile = osp.join(
97
- self.args.directory,
98
- data_descr["template"].replace(".cdf", ".xlsx"))
97
+ self.args.directory, data_descr["template"].replace(".cdf", ".xlsx")
98
+ )
99
99
  logger.info("Saving {0}".format(outfile))
100
100
  workbook.save(outfile)
101
101
 
@@ -119,8 +119,7 @@ class IDBToExcel(object):
119
119
 
120
120
  if version is None:
121
121
  raise ValueError(
122
- "Skeleton version is not set in the HK skeleton " +
123
- "template."
122
+ "Skeleton version is not set in the HK skeleton " + "template."
124
123
  )
125
124
 
126
125
  # remove surrounds
@@ -130,8 +129,8 @@ class IDBToExcel(object):
130
129
  if re.match("^[0-9]{2}$", version) is None:
131
130
  raise ValueError(
132
131
  (
133
- "Bad format for the skeleton version in the HK " +
134
- "template with value {0}"
132
+ "Bad format for the skeleton version in the HK "
133
+ + "template with value {0}"
135
134
  ).format(version)
136
135
  )
137
136
 
@@ -164,13 +163,11 @@ class IDBToExcel(object):
164
163
 
165
164
  # logical source
166
165
  elif name == "Logical_source":
167
- row[3].value = "_".join([items[0].lower(),
168
- "_HK_",
169
- items[2].lower()])
166
+ row[3].value = "_".join([items[0].lower(), "_HK_", items[2].lower()])
170
167
 
171
168
  # descriptor
172
169
  elif name == "Descriptor":
173
- row[3].value = (items[2].upper() + ">" + items[2].upper())
170
+ row[3].value = items[2].upper() + ">" + items[2].upper()
174
171
 
175
172
  elif name == "MODS":
176
173
  row[3].value = dt.datetime.utcnow().isoformat()
@@ -178,8 +175,8 @@ class IDBToExcel(object):
178
175
  # TEXT field
179
176
  elif name == "TEXT":
180
177
  row[3].value = (
181
- "This file contains RPW {0} housekeeping." +
182
- "Parameters are returned in the TM_{0}_HK packets."
178
+ "This file contains RPW {0} housekeeping."
179
+ + "Parameters are returned in the TM_{0}_HK packets."
183
180
  ).format(items[2].split("-")[1])
184
181
 
185
182
  def base_workbook(self):
@@ -217,7 +214,7 @@ class IDBToExcel(object):
217
214
  for row in rows:
218
215
  for cell in row:
219
216
  cell.value = None
220
- #sheet._garbage_collect()
217
+ # sheet._garbage_collect()
221
218
 
222
219
  return wb, attributes
223
220
 
@@ -278,9 +275,7 @@ class IDBToExcel(object):
278
275
  attributes["LABLAXIS"]["value"] = parameter.definition.name
279
276
  attributes["SRDB_PARAM_ID"]["value"] = str(parameter)
280
277
  if self.idb_parser.is_enumeration(parameter):
281
- attributes["SRDB_ENUM_ID"][
282
- "value"
283
- ] = self.idb_parser.enumeration_srdb(
278
+ attributes["SRDB_ENUM_ID"]["value"] = self.idb_parser.enumeration_srdb(
284
279
  parameter.definition.data_type,
285
280
  parameter.type,
286
281
  )