roc-film 1.13.4__py3-none-any.whl → 1.14.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (52) hide show
  1. roc/__init__.py +2 -1
  2. roc/film/__init__.py +2 -2
  3. roc/film/commands.py +372 -323
  4. roc/film/config/__init__.py +0 -1
  5. roc/film/constants.py +101 -65
  6. roc/film/descriptor.json +126 -95
  7. roc/film/exceptions.py +28 -27
  8. roc/film/tasks/__init__.py +16 -16
  9. roc/film/tasks/cat_solo_hk.py +86 -74
  10. roc/film/tasks/cdf_postpro.py +438 -309
  11. roc/film/tasks/check_dds.py +39 -45
  12. roc/film/tasks/db_to_anc_bia_sweep_table.py +381 -0
  13. roc/film/tasks/dds_to_l0.py +232 -180
  14. roc/film/tasks/export_solo_coord.py +147 -0
  15. roc/film/tasks/file_handler.py +91 -75
  16. roc/film/tasks/l0_to_hk.py +117 -103
  17. roc/film/tasks/l0_to_l1_bia_current.py +38 -30
  18. roc/film/tasks/l0_to_l1_bia_sweep.py +417 -329
  19. roc/film/tasks/l0_to_l1_sbm.py +250 -208
  20. roc/film/tasks/l0_to_l1_surv.py +185 -130
  21. roc/film/tasks/make_daily_tm.py +40 -37
  22. roc/film/tasks/merge_tcreport.py +77 -71
  23. roc/film/tasks/merge_tmraw.py +102 -89
  24. roc/film/tasks/parse_dds_xml.py +21 -20
  25. roc/film/tasks/set_l0_utc.py +51 -49
  26. roc/film/tests/cdf_compare.py +565 -0
  27. roc/film/tests/hdf5_compare.py +84 -62
  28. roc/film/tests/test_dds_to_l0.py +93 -51
  29. roc/film/tests/test_dds_to_tc.py +8 -11
  30. roc/film/tests/test_dds_to_tm.py +8 -10
  31. roc/film/tests/test_film.py +161 -116
  32. roc/film/tests/test_l0_to_hk.py +64 -36
  33. roc/film/tests/test_l0_to_l1_bia.py +10 -14
  34. roc/film/tests/test_l0_to_l1_sbm.py +14 -19
  35. roc/film/tests/test_l0_to_l1_surv.py +68 -41
  36. roc/film/tests/test_metadata.py +21 -20
  37. roc/film/tests/tests.py +743 -396
  38. roc/film/tools/__init__.py +5 -5
  39. roc/film/tools/dataset_tasks.py +34 -2
  40. roc/film/tools/file_helpers.py +390 -269
  41. roc/film/tools/l0.py +402 -324
  42. roc/film/tools/metadata.py +147 -127
  43. roc/film/tools/skeleton.py +12 -17
  44. roc/film/tools/tools.py +109 -92
  45. roc/film/tools/xlsx2skt.py +161 -139
  46. {roc_film-1.13.4.dist-info → roc_film-1.14.0.dist-info}/LICENSE +127 -125
  47. roc_film-1.14.0.dist-info/METADATA +60 -0
  48. roc_film-1.14.0.dist-info/RECORD +50 -0
  49. {roc_film-1.13.4.dist-info → roc_film-1.14.0.dist-info}/WHEEL +1 -1
  50. roc/film/tasks/l0_to_anc_bia_sweep_table.py +0 -348
  51. roc_film-1.13.4.dist-info/METADATA +0 -120
  52. roc_film-1.13.4.dist-info/RECORD +0 -48
roc/film/commands.py CHANGED
@@ -13,15 +13,12 @@ from poppy.core.logger import logger
13
13
 
14
14
  from roc.film.tools.file_helpers import get_output_dir, get_products_dir, is_output_dir
15
15
  from roc.film.tools import IDBToExcel
16
- from roc.film.tools import paths, \
17
- valid_time, \
18
- valid_single_file, \
19
- setup_lock, valid_date
16
+ from roc.film.tools import paths, valid_time, valid_single_file, setup_lock, valid_date
20
17
 
21
18
  from roc.film.tasks import L0ToHk
22
19
  from roc.film.tasks import L0ToL1Surv
23
20
  from roc.film.tasks import L0ToL1Sbm
24
- from roc.film.tasks.l0_to_anc_bia_sweep_table import L0ToAncBiaSweepTable
21
+ from roc.film.tasks.db_to_anc_bia_sweep_table import DbToAncBiaSweepTable
25
22
  from roc.film.tasks.l0_to_l1_bia_current import L0ToL1BiaCurrent
26
23
  from roc.film.tasks import L0ToL1BiaSweep
27
24
  from roc.film.tasks import MoveFailedFiles, MoveToProdDir
@@ -35,17 +32,24 @@ from roc.film.tasks.make_daily_tm import MakeDailyTm
35
32
  from roc.film.tasks.merge_tcreport import MergeTcReport
36
33
  from roc.film.tasks.merge_tmraw import MergeTmRaw
37
34
  from roc.film.tasks.parse_dds_xml import ParseDdsXml
38
- from roc.film.constants import SCOS_HEADER_BYTES, TEMP_DIR, CDFEXPORT_PATH, CDF_POST_PRO_OPTS_ARGS
35
+ from roc.film.tasks.export_solo_coord import ExportSoloHeeCoord
36
+ from roc.film.constants import (
37
+ SCOS_HEADER_BYTES,
38
+ TEMP_DIR,
39
+ CDFCONVERT_PATH,
40
+ CDF_POST_PRO_OPTS_ARGS,
41
+ )
39
42
 
40
43
 
41
44
  class FilmCommands(Command):
42
45
  """
43
46
  Manage the commands relative to the FILM plugin.
44
47
  """
45
- __command__ = 'film'
46
- __command_name__ = 'film'
47
- __parent__ = 'master'
48
- __parent_arguments__ = ['base']
48
+
49
+ __command__ = "film"
50
+ __command_name__ = "film"
51
+ __parent__ = "master"
52
+ __parent_arguments__ = ["base"]
49
53
  __help__ = """
50
54
  Commands relative to the FILM plugin, responsible for generating and
51
55
  storing data products files from the ROC pipeline.
@@ -64,8 +68,8 @@ class FilmCommands(Command):
64
68
  # To indicate that output file production is in progress
65
69
  # lock file is automatically deleted at the end.
66
70
  parser.add_argument(
67
- '--lock-file',
68
- help='Name of the lock temporary file.',
71
+ "--lock-file",
72
+ help="Name of the lock temporary file.",
69
73
  default=None,
70
74
  nargs=1,
71
75
  )
@@ -74,34 +78,34 @@ class FilmCommands(Command):
74
78
  # Output files will be moved into this directory at the end of the run
75
79
  # (If not passed, output files will stay in the output directory)
76
80
  parser.add_argument(
77
- '--products-dir',
81
+ "--products-dir",
78
82
  type=str,
79
- help='Path of the directory where output file(s) folder'
80
- ' must be moved at the end of the process',
83
+ help="Path of the directory where output file(s) folder"
84
+ " must be moved at the end of the process",
81
85
  default=None,
82
86
  nargs=1,
83
87
  )
84
88
 
85
89
  # specify the IDB version to use
86
90
  parser.add_argument(
87
- '--idb-version',
88
- help='IDB version to use.',
91
+ "--idb-version",
92
+ help="IDB version to use.",
89
93
  default=None,
90
94
  nargs=1,
91
95
  )
92
96
 
93
97
  # specify the IDB source to use
94
98
  parser.add_argument(
95
- '--idb-source',
96
- help='IDB source to use (MIB, SRDB or PALISADE).',
99
+ "--idb-source",
100
+ help="IDB source to use (MIB, SRDB or PALISADE).",
97
101
  default=None,
98
102
  nargs=1,
99
103
  )
100
104
 
101
105
  # Get path of the master binary CDF directory
102
106
  parser.add_argument(
103
- '-m',
104
- '--master-cdf-dir',
107
+ "-m",
108
+ "--master-cdf-dir",
105
109
  help="""
106
110
  The absolute path to the directory where the master binary CDF are stored.
107
111
  If not provided, try to check the value of
@@ -115,17 +119,17 @@ class FilmCommands(Command):
115
119
 
116
120
  # Specify the value of the Data_version attribute (and filename)
117
121
  parser.add_argument(
118
- '-v',
119
- '--data-version',
120
- help='Define the Data_version attribute value for output CDF.',
122
+ "-v",
123
+ "--data-version",
124
+ help="Define the Data_version attribute value for output CDF.",
121
125
  default=None,
122
- nargs=1
126
+ nargs=1,
123
127
  )
124
128
 
125
129
  parser.add_argument(
126
- '-s',
127
- '--start-time',
128
- help='Data file production start time. '
130
+ "-s",
131
+ "--start-time",
132
+ help="Data file production start time. "
129
133
  "Expected datetime format is 'YYYY-MM-DDThh:mm:ss'.",
130
134
  type=valid_time,
131
135
  default=None,
@@ -133,10 +137,10 @@ class FilmCommands(Command):
133
137
  )
134
138
 
135
139
  parser.add_argument(
136
- '-e',
137
- '--end-time',
138
- help='Data file production end time. '
139
- "Expected datetime format is 'YYYY-MM-DDThh:mm:ss'.",
140
+ "-e",
141
+ "--end-time",
142
+ help="Data file production end time. "
143
+ "Expected datetime format is 'YYYY-MM-DDThh:mm:ss'.",
140
144
  type=valid_time,
141
145
  default=None,
142
146
  nargs=1,
@@ -144,28 +148,28 @@ class FilmCommands(Command):
144
148
 
145
149
  # Give the month to process (will replace start_time/end_time values)
146
150
  parser.add_argument(
147
- '--monthly',
148
- action='store_true',
151
+ "--monthly",
152
+ action="store_true",
149
153
  default=False,
150
- help='Generate output monthly files'
154
+ help="Generate output monthly files",
151
155
  )
152
156
 
153
157
  # Remove SCOS2000 header in the binary packet
154
158
  parser.add_argument(
155
- '--scos-header',
159
+ "--scos-header",
156
160
  nargs=1,
157
161
  type=int,
158
162
  default=[SCOS_HEADER_BYTES],
159
- help='Length (in bytes) of SCOS2000 header to be removed'
160
- ' from the TM packet in the DDS file.'
161
- f' (Default value is {SCOS_HEADER_BYTES} bytes.)',
163
+ help="Length (in bytes) of SCOS2000 header to be removed"
164
+ " from the TM packet in the DDS file."
165
+ f" (Default value is {SCOS_HEADER_BYTES} bytes.)",
162
166
  )
163
167
 
164
168
  # Do no process/write invalid packet(s)
165
169
  parser.add_argument(
166
- '--no-invalid-packet',
167
- action='store_true',
168
- help='Do not keep invalid packet(s).',
170
+ "--no-invalid-packet",
171
+ action="store_true",
172
+ help="Do not keep invalid packet(s).",
169
173
  )
170
174
 
171
175
  # If True, tag any output file with "-cdag" suffix in the descriptor field
@@ -173,24 +177,31 @@ class FilmCommands(Command):
173
177
  # Indicating that it is a preliminary files to be distributed to the
174
178
  # Calibration Data Access Group (CDAG) only
175
179
  parser.add_argument(
176
- '--cdag',
177
- action='store_true',
180
+ "--cdag",
181
+ action="store_true",
178
182
  help='If True, add the "cdag" suffix to the descriptor field of L1 CDF filename.',
179
183
  default=False,
180
184
  )
181
185
 
182
186
  # Do not use NAIF SPICE toolkit to compute time/ancillary data
183
187
  parser.add_argument(
184
- '--no-spice',
185
- action='store_true',
186
- help='Do not use NAIF SPICE toolkit to compute time/ancillary data.',
188
+ "--no-spice",
189
+ action="store_true",
190
+ help="Do not use NAIF SPICE toolkit to compute time/ancillary data.",
191
+ )
192
+
193
+ # Do not move output files in the final target directory ("products")
194
+ parser.add_argument(
195
+ "--no-move",
196
+ action="store_true",
197
+ help="Do not move output files in the final target directory.",
187
198
  )
188
199
 
189
200
  # Force data file creation
190
201
  parser.add_argument(
191
- '--force',
192
- action='store_true',
193
- help='Force data file creation.',
202
+ "--force",
203
+ action="store_true",
204
+ help="Force data file creation.",
194
205
  )
195
206
 
196
207
 
@@ -203,30 +214,30 @@ class ClassifyTmRawCommand(Command):
203
214
  by default new version of the daily file is created and
204
215
  only new DDS packets have been inserted.
205
216
  """
206
- __command__ = 'film_classify_tmraw'
207
- __command_name__ = 'classify_tmraw'
208
- __parent__ = 'film'
209
- __parent_arguments__ = ['base']
217
+
218
+ __command__ = "film_classify_tmraw"
219
+ __command_name__ = "classify_tmraw"
220
+ __parent__ = "film"
221
+ __parent_arguments__ = ["base"]
210
222
  __help__ = """
211
223
  Command to classify input SolO DDS TmRaw files as daily file(s).
212
224
  """
213
225
 
214
226
  def add_arguments(self, parser):
215
-
216
227
  # path of input DDS TmRaw response file(s)
217
228
  parser.add_argument(
218
- '--dds-files',
229
+ "--dds-files",
219
230
  help="""
220
231
  List of input SolO DDS TmRaw response XML file(s) to classify.
221
232
  """,
222
233
  type=str,
223
- nargs='+',
234
+ nargs="+",
224
235
  required=True,
225
236
  )
226
237
 
227
238
  parser.add_argument(
228
- '--processed-dds-dir',
229
- help=f"""
239
+ "--processed-dds-dir",
240
+ help="""
230
241
  Directory where processed DDS file(s) must be moved at the end.
231
242
  """,
232
243
  type=str,
@@ -235,8 +246,8 @@ class ClassifyTmRawCommand(Command):
235
246
  )
236
247
 
237
248
  parser.add_argument(
238
- '--failed-dds-dir',
239
- help=f"""
249
+ "--failed-dds-dir",
250
+ help="""
240
251
  Directory where failed DDS file(s) must be moved at the end.
241
252
  """,
242
253
  type=str,
@@ -245,28 +256,28 @@ class ClassifyTmRawCommand(Command):
245
256
  )
246
257
 
247
258
  parser.add_argument(
248
- '--archive-path',
259
+ "--archive-path",
249
260
  type=str,
250
261
  default=None,
251
- help='Root path of the archive local directory.'
252
- 'If defined, the pipeline first check if daily file(s) already exist(s) in the archive',
262
+ help="Root path of the archive local directory."
263
+ "If defined, the pipeline first check if daily file(s) already exist(s) in the archive",
253
264
  nargs=1,
254
265
  )
255
266
 
256
267
  # Clear input DDS files
257
268
  parser.add_argument(
258
- '--clear-dds',
259
- action='store_true',
260
- help='If passed, then remove input list of processed/failed Dds.',
269
+ "--clear-dds",
270
+ action="store_true",
271
+ help="If passed, then remove input list of processed/failed Dds.",
261
272
  default=False,
262
273
  )
263
274
 
264
275
  parser.add_argument(
265
- '--filter-date',
276
+ "--filter-date",
266
277
  type=valid_date,
267
278
  default=[],
268
- help='List of date(s) to process (format is YYYYMMDD)',
269
- nargs='+',
279
+ help="List of date(s) to process (format is YYYYMMDD)",
280
+ nargs="+",
270
281
  )
271
282
 
272
283
  def setup_tasks(self, pipeline):
@@ -283,9 +294,16 @@ class ClassifyTmRawCommand(Command):
283
294
  # create the tasks and their dependencies :
284
295
  # load an input DDS TmRaw, then extract time,
285
296
  # then generate or update daily files
286
- pipeline | start | loop_start | loop_end | MakeDailyTm() | \
287
- MoveFailedFiles() | \
288
- CopyFailedDds() | end
297
+ (
298
+ pipeline
299
+ | start
300
+ | loop_start
301
+ | loop_end
302
+ | MakeDailyTm()
303
+ | MoveFailedFiles()
304
+ | CopyFailedDds()
305
+ | end
306
+ )
289
307
 
290
308
  # define the start points of the pipeline
291
309
  pipeline.start = start
@@ -306,30 +324,30 @@ class ClassifyTcReportCommand(Command):
306
324
  by default new version of the daily file is created and
307
325
  only new DDS packets have been inserted.
308
326
  """
309
- __command__ = 'film_classify_tcreport'
310
- __command_name__ = 'classify_tcreport'
311
- __parent__ = 'film'
312
- __parent_arguments__ = ['base']
327
+
328
+ __command__ = "film_classify_tcreport"
329
+ __command_name__ = "classify_tcreport"
330
+ __parent__ = "film"
331
+ __parent_arguments__ = ["base"]
313
332
  __help__ = """
314
333
  Command to classify input SolO DDS TcReport files as daily file(s).
315
334
  """
316
335
 
317
336
  def add_arguments(self, parser):
318
-
319
337
  # List of input DDS TcReport response files
320
338
  parser.add_argument(
321
- '--dds-files',
339
+ "--dds-files",
322
340
  help="""
323
341
  List of SolO DDS TcReport response XML file(s) to classify.
324
342
  """,
325
343
  type=str,
326
- nargs='+',
344
+ nargs="+",
327
345
  required=True,
328
346
  )
329
347
 
330
348
  parser.add_argument(
331
- '--processed-dds-dir',
332
- help=f"""
349
+ "--processed-dds-dir",
350
+ help="""
333
351
  Directory where processed DDS file(s) must be moved at the end.
334
352
  """,
335
353
  type=str,
@@ -338,8 +356,8 @@ class ClassifyTcReportCommand(Command):
338
356
  )
339
357
 
340
358
  parser.add_argument(
341
- '--failed-dds-dir',
342
- help=f"""
359
+ "--failed-dds-dir",
360
+ help="""
343
361
  Directory where failed DDS file(s) must be moved at the end.
344
362
  """,
345
363
  type=str,
@@ -348,28 +366,28 @@ class ClassifyTcReportCommand(Command):
348
366
  )
349
367
 
350
368
  parser.add_argument(
351
- '--archive-path',
369
+ "--archive-path",
352
370
  type=str,
353
371
  default=None,
354
372
  nargs=1,
355
- help='Root path of the archive local directory.'
356
- 'If defined, the pipeline first check if daily file(s) already exist(s) in the archive',
373
+ help="Root path of the archive local directory."
374
+ "If defined, the pipeline first check if daily file(s) already exist(s) in the archive",
357
375
  )
358
376
 
359
377
  # Clear input DDS files
360
378
  parser.add_argument(
361
- '--clear-dds',
362
- action='store_true',
363
- help='If passed, then remove input list of processed/failed Dds.',
379
+ "--clear-dds",
380
+ action="store_true",
381
+ help="If passed, then remove input list of processed/failed Dds.",
364
382
  default=False,
365
383
  )
366
384
 
367
385
  parser.add_argument(
368
- '--filter-date',
386
+ "--filter-date",
369
387
  type=valid_date,
370
388
  default=[],
371
- help='List of date(s) to process (format is YYYYMMDD)',
372
- nargs='+',
389
+ help="List of date(s) to process (format is YYYYMMDD)",
390
+ nargs="+",
373
391
  )
374
392
 
375
393
  def setup_tasks(self, pipeline):
@@ -385,9 +403,15 @@ class ClassifyTcReportCommand(Command):
385
403
  # create the tasks and their dependencies :
386
404
  # load an input DDS TcReport, then extract time,
387
405
  # then generate or update daily files
388
- pipeline | start | loop_start | MergeTcReport() \
389
- | CopyProcessedDds() | loop_end | \
390
- MoveFailedFiles()
406
+ (
407
+ pipeline
408
+ | start
409
+ | loop_start
410
+ | MergeTcReport()
411
+ | CopyProcessedDds()
412
+ | loop_end
413
+ | MoveFailedFiles()
414
+ )
391
415
 
392
416
  # define the start points of the pipeline
393
417
  pipeline.start = start
@@ -403,38 +427,38 @@ class ProcessSoloHkCommand(Command):
403
427
  """
404
428
  Command to process input set of SolO DDS Param file(s) containing
405
429
  Solar Orbiter HK data.
406
- Input data are save into daily XML files when parameters are sorted
430
+ Input data are saved into daily XML files when parameters are sorted
407
431
  by ascending times
408
432
 
409
433
  If output files already found for a given date in the local archive,
410
434
  by default new version of the daily file is created and
411
435
  only new Param elements have been inserted.
412
436
  """
413
- __command__ = 'film_process_solohk'
414
- __command_name__ = 'process_solohk'
415
- __parent__ = 'film'
416
- __parent_arguments__ = ['base']
437
+
438
+ __command__ = "film_process_solohk"
439
+ __command_name__ = "process_solohk"
440
+ __parent__ = "film"
441
+ __parent_arguments__ = ["base"]
417
442
  __help__ = """
418
443
  Command to make daily XML file(s) from an
419
444
  input set of SolO DDS Solo HK param files
420
445
  """
421
446
 
422
447
  def add_arguments(self, parser):
423
-
424
448
  # path of input DDS TmRaw response file(s)
425
449
  parser.add_argument(
426
- '--dds-files',
450
+ "--dds-files",
427
451
  help="""
428
452
  List of input SolO DDS Param response XML file(s) to process.
429
453
  """,
430
454
  type=str,
431
- nargs='+',
455
+ nargs="+",
432
456
  required=True,
433
457
  )
434
458
 
435
459
  parser.add_argument(
436
- '--processed-dds-dir',
437
- help=f"""
460
+ "--processed-dds-dir",
461
+ help="""
438
462
  Directory where processed DDS file(s) must be moved at the end.
439
463
  """,
440
464
  type=str,
@@ -443,8 +467,8 @@ class ProcessSoloHkCommand(Command):
443
467
  )
444
468
 
445
469
  parser.add_argument(
446
- '--failed-dds-dir',
447
- help=f"""
470
+ "--failed-dds-dir",
471
+ help="""
448
472
  Directory where failed DDS file(s) must be moved at the end.
449
473
  """,
450
474
  type=str,
@@ -453,28 +477,28 @@ class ProcessSoloHkCommand(Command):
453
477
  )
454
478
 
455
479
  parser.add_argument(
456
- '--archive-path',
480
+ "--archive-path",
457
481
  type=str,
458
482
  default=None,
459
- help='Root path of the archive local directory.'
460
- 'If defined, the pipeline first check if daily file(s) already exist(s) in the archive',
483
+ help="Root path of the archive local directory."
484
+ "If defined, the pipeline first check if daily file(s) already exist(s) in the archive",
461
485
  nargs=1,
462
486
  )
463
487
 
464
488
  # Clear input DDS files
465
489
  parser.add_argument(
466
- '--clear-dds',
467
- action='store_true',
468
- help='If passed, then remove input list of processed/failed Dds.',
490
+ "--clear-dds",
491
+ action="store_true",
492
+ help="If passed, then remove input list of processed/failed Dds.",
469
493
  default=False,
470
494
  )
471
495
 
472
496
  parser.add_argument(
473
- '--filter-date',
497
+ "--filter-date",
474
498
  type=valid_date,
475
499
  default=[],
476
- help='List of date(s) to process (format is YYYYMMDD)',
477
- nargs='+',
500
+ help="List of date(s) to process (format is YYYYMMDD)",
501
+ nargs="+",
478
502
  )
479
503
 
480
504
  def setup_tasks(self, pipeline):
@@ -489,9 +513,7 @@ class ProcessSoloHkCommand(Command):
489
513
  # create the tasks and their dependencies :
490
514
  # load an input DDS SOLO HK Param, then extract time,
491
515
  # then generate or update daily files
492
- pipeline | start | CopyProcessedDds() \
493
- | CopyFailedDds() \
494
- | end
516
+ pipeline | start | CopyProcessedDds() | CopyFailedDds() | end
495
517
 
496
518
  # define the start points of the pipeline
497
519
  pipeline.start = start
@@ -503,23 +525,23 @@ class DdsToLOCommand(Command):
503
525
  Command to produce a RPW L0 file for a given day
504
526
  from an input set of MOC DDS response XML file(s).
505
527
  """
506
- __command__ = 'film_dds_to_l0'
507
- __command_name__ = 'dds_to_l0'
508
- __parent__ = 'film'
509
- __parent_arguments__ = ['base']
528
+
529
+ __command__ = "film_dds_to_l0"
530
+ __command_name__ = "dds_to_l0"
531
+ __parent__ = "film"
532
+ __parent_arguments__ = ["base"]
510
533
  __help__ = """
511
534
  Command to generate a RPW L0 XML daily file
512
535
  from an input set of MOC DDS response XML files.
513
536
  """
514
537
 
515
538
  def add_arguments(self, parser):
516
-
517
539
  # add lstable argument
518
540
  # LSTableMixin.add_arguments(parser)
519
541
 
520
542
  #
521
543
  parser.add_argument(
522
- 'datetime',
544
+ "datetime",
523
545
  help="""
524
546
  Date for which L0 file must produced.
525
547
  """,
@@ -528,29 +550,29 @@ class DdsToLOCommand(Command):
528
550
 
529
551
  # path to input DDS TmRaw response file(s)
530
552
  parser.add_argument(
531
- '--dds-tmraw-xml',
553
+ "--dds-tmraw-xml",
532
554
  help="""
533
555
  Input DDS TmRaw response XML file(s) to convert.
534
556
  """,
535
- nargs='*',
557
+ nargs="*",
536
558
  type=str,
537
559
  default=[],
538
560
  )
539
561
 
540
562
  # path to input DDS TcReport XML response file(s)
541
563
  parser.add_argument(
542
- '--dds-tcreport-xml',
564
+ "--dds-tcreport-xml",
543
565
  help="""
544
566
  Input DDS TcReport response XML file(s) (to add TC in the output file).
545
567
  """,
546
- nargs='*',
568
+ nargs="*",
547
569
  type=str,
548
570
  default=[],
549
571
  )
550
572
 
551
573
  parser.add_argument(
552
- '--chunk',
553
- help=f"""
574
+ "--chunk",
575
+ help="""
554
576
  Number of DDS packets to write in the L0 in one shot.
555
577
  """,
556
578
  type=int,
@@ -563,13 +585,15 @@ class DdsToLOCommand(Command):
563
585
  """
564
586
 
565
587
  # Check if output dir already exists
566
- force = pipeline.get('force', default=False)
588
+ force = pipeline.get("force", default=False)
567
589
  output_dir = get_output_dir(pipeline)
568
590
  products_dir = get_products_dir(pipeline)
569
- if (is_output_dir(output_dir, products_dir=products_dir) and
570
- not force):
591
+ if is_output_dir(output_dir, products_dir=products_dir) and not force:
571
592
  # if yes exit
572
- logger.info(f'Input request has been already processed. (Use --force keyword to force execution)')
593
+ logger.info(
594
+ "Input request has been already processed. "
595
+ "(Use --force keyword to force execution)"
596
+ )
573
597
  pipeline.exit()
574
598
 
575
599
  start = DdsToL0()
@@ -579,15 +603,15 @@ class DdsToLOCommand(Command):
579
603
  pipeline.start = start
580
604
 
581
605
  # Check mandatory arguments
582
- for arg in ['idb_source', 'idb_version']:
606
+ for arg in ["idb_source", "idb_version"]:
583
607
  if not pipeline.get(arg, default=None, args=True):
584
- raise MissingArgument(f'{arg} input argument not defined, aborting!')
608
+ raise MissingArgument(f"{arg} input argument not defined, aborting!")
585
609
 
586
- # Setup the lock file
610
+ # Set up the lock file
587
611
  setup_lock(pipeline)
588
612
 
589
613
  # Force setting of start_time/end_time value for the input datetime
590
- if pipeline.get('datetime', args=True):
614
+ if pipeline.get("datetime", args=True):
591
615
  pipeline.properties.start_time = None
592
616
  pipeline.properties.end_time = None
593
617
 
@@ -596,10 +620,11 @@ class SetL0UtcCommand(Command):
596
620
  """
597
621
  Command to set the UTC times of the input L0 file using SPICE kernels.
598
622
  """
599
- __command__ = 'film_set_l0_utc'
600
- __command_name__ = 'set_l0_utc'
601
- __parent__ = 'film'
602
- __parent_arguments__ = ['base']
623
+
624
+ __command__ = "film_set_l0_utc"
625
+ __command_name__ = "set_l0_utc"
626
+ __parent__ = "film"
627
+ __parent_arguments__ = ["base"]
603
628
  __help__ = """
604
629
  Command to set the packet UTC times of
605
630
  the input L0 file using SPICE kernels.
@@ -607,9 +632,8 @@ class SetL0UtcCommand(Command):
607
632
  """
608
633
 
609
634
  def add_arguments(self, parser):
610
-
611
635
  parser.add_argument(
612
- 'l0_file',
636
+ "l0_file",
613
637
  help="""
614
638
  L0 file to update.
615
639
  """,
@@ -618,7 +642,7 @@ class SetL0UtcCommand(Command):
618
642
  )
619
643
 
620
644
  parser.add_argument(
621
- '--kernel-date',
645
+ "--kernel-date",
622
646
  help="""
623
647
  Date of the SPICE kernels to use.
624
648
  """,
@@ -627,15 +651,13 @@ class SetL0UtcCommand(Command):
627
651
  )
628
652
 
629
653
  def setup_tasks(self, pipeline):
630
-
631
654
  start = SetL0Utc()
632
- pipeline | start | \
633
- MoveFailedFiles() | MoveToProdDir()
655
+ pipeline | start | MoveFailedFiles() | MoveToProdDir()
634
656
 
635
657
  # define the start points of the pipeline
636
658
  pipeline.start = start
637
659
 
638
- # Setup the lock file
660
+ # Set up the lock file
639
661
  setup_lock(pipeline)
640
662
 
641
663
 
@@ -643,10 +665,11 @@ class L0ToHkCommand(Command):
643
665
  """
644
666
  Command to generate RPW HK "digest" CDF files from a given L0 file.
645
667
  """
646
- __command__ = 'film_l0_to_hk'
647
- __command_name__ = 'l0_to_hk'
648
- __parent__ = 'film'
649
- __parent_arguments__ = ['base']
668
+
669
+ __command__ = "film_l0_to_hk"
670
+ __command_name__ = "l0_to_hk"
671
+ __parent__ = "film"
672
+ __parent_arguments__ = ["base"]
650
673
  __help__ = """
651
674
  Command to generate RPW HK CDF files from
652
675
  a given L0 file.
@@ -655,7 +678,7 @@ class L0ToHkCommand(Command):
655
678
  def add_arguments(self, parser):
656
679
  # path to XML file of the IDB
657
680
  parser.add_argument(
658
- 'l0_file',
681
+ "l0_file",
659
682
  help="""
660
683
  The L0 file to parse.
661
684
  """,
@@ -665,12 +688,12 @@ class L0ToHkCommand(Command):
665
688
 
666
689
  # Specify the list of dataset for which files must be generated
667
690
  parser.add_argument(
668
- '-d',
669
- '--dataset',
670
- help='List of RPW HK dataset(s) for which files must be produced.'
671
- 'If not defined, then produce all file(s).',
691
+ "-d",
692
+ "--dataset",
693
+ help="List of RPW HK dataset(s) for which files must be produced."
694
+ "If not defined, then produce all file(s).",
672
695
  type=str,
673
- nargs='+',
696
+ nargs="+",
674
697
  default=[None],
675
698
  )
676
699
 
@@ -680,13 +703,15 @@ class L0ToHkCommand(Command):
680
703
  """
681
704
 
682
705
  # Check if output dir already exists
683
- force = pipeline.get('force', default=False)
706
+ force = pipeline.get("force", default=False)
684
707
  output_dir = get_output_dir(pipeline)
685
708
  products_dir = get_products_dir(pipeline)
686
- if (is_output_dir(output_dir, products_dir=products_dir) and
687
- not force):
709
+ if is_output_dir(output_dir, products_dir=products_dir) and not force:
688
710
  # if yes exit
689
- logger.info(f'Input request has been already processed. (Use --force keyword to force execution)')
711
+ logger.info(
712
+ "Input request has been already processed. "
713
+ "(Use --force keyword to force execution)"
714
+ )
690
715
  pipeline.exit()
691
716
 
692
717
  # the task
@@ -696,7 +721,7 @@ class L0ToHkCommand(Command):
696
721
  pipeline | task | MoveFailedFiles() | MoveToProdDir()
697
722
  pipeline.start = task
698
723
 
699
- # Setup the lock file
724
+ # Set up the lock file
700
725
  setup_lock(pipeline)
701
726
 
702
727
 
@@ -704,10 +729,11 @@ class L0ToL1SurvCommand(Command):
704
729
  """
705
730
  Command to generate RPW L1 survey data CDF files from a given L0 file.
706
731
  """
707
- __command__ = 'film_l0_to_l1_surv'
708
- __command_name__ = 'l0_to_l1_surv'
709
- __parent__ = 'film'
710
- __parent_arguments__ = ['base']
732
+
733
+ __command__ = "film_l0_to_l1_surv"
734
+ __command_name__ = "l0_to_l1_surv"
735
+ __parent__ = "film"
736
+ __parent_arguments__ = ["base"]
711
737
  __help__ = """
712
738
  Command to generate RPW L1 survey data files from
713
739
  a given L0 file.
@@ -716,22 +742,22 @@ class L0ToL1SurvCommand(Command):
716
742
  def add_arguments(self, parser):
717
743
  # path to XML file of the IDB
718
744
  parser.add_argument(
719
- 'l0_file',
745
+ "l0_file",
720
746
  help="""
721
747
  The L0 file to parse.
722
748
  """,
723
749
  type=valid_single_file,
724
- nargs=1
750
+ nargs=1,
725
751
  )
726
752
 
727
753
  # Specify the list of dataset for which files must be generated
728
754
  parser.add_argument(
729
- '-d',
730
- '--dataset',
731
- help='List of RPW L1 dataset(s) for which files must be produced.'
732
- 'If not defined, then produce all file(s).',
755
+ "-d",
756
+ "--dataset",
757
+ help="List of RPW L1 dataset(s) for which files must be produced."
758
+ "If not defined, then produce all file(s).",
733
759
  type=str,
734
- nargs='+',
760
+ nargs="+",
735
761
  default=[None],
736
762
  )
737
763
 
@@ -741,13 +767,15 @@ class L0ToL1SurvCommand(Command):
741
767
  """
742
768
 
743
769
  # Check if output dir already exists
744
- force = pipeline.get('force', default=False)
770
+ force = pipeline.get("force", default=False)
745
771
  output_dir = get_output_dir(pipeline)
746
772
  products_dir = get_products_dir(pipeline)
747
- if (is_output_dir(output_dir, products_dir=products_dir) and
748
- not force):
773
+ if is_output_dir(output_dir, products_dir=products_dir) and not force:
749
774
  # if yes exit
750
- logger.info(f'Input request has been already processed. (Use --force keyword to force execution)')
775
+ logger.info(
776
+ "Input request has been already processed. "
777
+ "(Use --force keyword to force execution)"
778
+ )
751
779
  pipeline.exit()
752
780
 
753
781
  # the task
@@ -765,10 +793,11 @@ class L0ToL1SbmCommand(Command):
765
793
  """
766
794
  Command to generate RPW L1 SBM1/SBM2 data CDF files from a given set of L0 files.
767
795
  """
768
- __command__ = 'film_l0_to_l1_sbm'
769
- __command_name__ = 'l0_to_l1_sbm'
770
- __parent__ = 'film'
771
- __parent_arguments__ = ['base']
796
+
797
+ __command__ = "film_l0_to_l1_sbm"
798
+ __command_name__ = "l0_to_l1_sbm"
799
+ __parent__ = "film"
800
+ __parent_arguments__ = ["base"]
772
801
  __help__ = """
773
802
  Command to generate RPW L1 SBM1/SBM2 data files from
774
803
  a given set of L0 file(s).
@@ -777,60 +806,60 @@ class L0ToL1SbmCommand(Command):
777
806
  def add_arguments(self, parser):
778
807
  # path to RPW L0 file(s) to process
779
808
  parser.add_argument(
780
- 'l0_files',
809
+ "l0_files",
781
810
  help="""
782
811
  The L0 file(s) to process.
783
812
  """,
784
813
  type=str,
785
- nargs='+',
814
+ nargs="+",
786
815
  )
787
816
 
788
817
  # Specify the list of dataset for which files must be generated
789
818
  parser.add_argument(
790
- '-d',
791
- '--dataset',
792
- help='List of RPW dataset(s) for which files must be produced.'
793
- 'If not defined, then produce all file(s).',
819
+ "-d",
820
+ "--dataset",
821
+ help="List of RPW dataset(s) for which files must be produced."
822
+ "If not defined, then produce all file(s).",
794
823
  type=str,
795
- nargs='+',
824
+ nargs="+",
796
825
  default=[None],
797
826
  )
798
827
 
799
828
  # No process SBM1
800
829
  parser.add_argument(
801
- '--no-sbm1',
802
- action='store_true',
803
- help='If passed, then do no process SBM1 data.',
830
+ "--no-sbm1",
831
+ action="store_true",
832
+ help="If passed, then do no process SBM1 data.",
804
833
  default=False,
805
834
  )
806
835
 
807
836
  # No process SBM2
808
837
  parser.add_argument(
809
- '--no-sbm2',
810
- action='store_true',
811
- help='If passed, then do no process SBM2 data.',
838
+ "--no-sbm2",
839
+ action="store_true",
840
+ help="If passed, then do no process SBM2 data.",
812
841
  default=False,
813
842
  )
814
843
 
815
844
  # Process any SBM TM packets found in input L0 files, without using
816
845
  # TM_DPU_EVENT_PR_DPU_SBM1 or TM_DPU_EVENT_PR_DPU_SBM1 packets information
817
846
  parser.add_argument(
818
- '--manual',
819
- action='store_true',
820
- help='Process any SBM TM packets found in input L0 files '
821
- '(i.e., without using TM_DPU_EVENT_PR_DPU_SBM1 or TM_DPU_EVENT_PR_DPU_SBM1 packets). '
822
- 'Use this option with --start-time, --end-time and --sbm-type keywords to process SBM science packets'
823
- ' dumped by TC.',
847
+ "--manual",
848
+ action="store_true",
849
+ help="Process any SBM TM packets found in input L0 files "
850
+ "(i.e., without using TM_DPU_EVENT_PR_DPU_SBM1 or TM_DPU_EVENT_PR_DPU_SBM1 packets). "
851
+ "Use this option with --start-time, --end-time and --sbm-type keywords to process SBM science packets"
852
+ " dumped by TC.",
824
853
  default=False,
825
854
  )
826
855
 
827
856
  #
828
857
  parser.add_argument(
829
- '--sbm-type',
858
+ "--sbm-type",
830
859
  nargs=1,
831
860
  type=int,
832
861
  default=[None],
833
- help='Indicate the type of SBM event (1=SBM1 or 2=SBM2) processed when --manual option is passed.',
862
+ help="Indicate the type of SBM event (1=SBM1 or 2=SBM2) processed when --manual option is passed.",
834
863
  )
835
864
 
836
865
  def setup_tasks(self, pipeline):
@@ -839,13 +868,15 @@ class L0ToL1SbmCommand(Command):
839
868
  """
840
869
 
841
870
  # Check if output dir already exists
842
- force = pipeline.get('force', default=False)
871
+ force = pipeline.get("force", default=False)
843
872
  output_dir = get_output_dir(pipeline)
844
873
  products_dir = get_products_dir(pipeline)
845
- if (is_output_dir(output_dir, products_dir=products_dir) and
846
- not force):
874
+ if is_output_dir(output_dir, products_dir=products_dir) and not force:
847
875
  # if yes exit
848
- logger.info(f'Input request has been already processed. (Use --force keyword to force execution)')
876
+ logger.info(
877
+ "Input request has been already processed. "
878
+ "(Use --force keyword to force execution)"
879
+ )
849
880
  pipeline.exit()
850
881
 
851
882
  # the task
@@ -859,57 +890,38 @@ class L0ToL1SbmCommand(Command):
859
890
  setup_lock(pipeline)
860
891
 
861
892
 
862
- class L0ToAncBiaSweepTableCommand(Command):
893
+ class DbToAncBiaSweepTableCommand(Command):
863
894
  """
864
- Command to run the pipeline to generate ancillary Bias Sweep table file
895
+ Command to run the pipeline to generate csv file
896
+ containing Bias Sweep table in the roc database
865
897
  """
866
- __command__ = 'film_l0_to_anc_bia_sweep_table'
867
- __command_name__ = 'l0_to_anc_bia_sweep_table'
868
- __parent__ = 'film'
869
- __parent_arguments__ = ['base']
898
+
899
+ __command__ = "film_db_to_anc_bia_sweep_table"
900
+ __command_name__ = "db_to_anc_bia_sweep_table"
901
+ __parent__ = "film"
902
+ __parent_arguments__ = ["base"]
870
903
  __help__ = """
871
- Command to generate ancillary Bias sweep table report file.
904
+ Command to generate Bias sweep table report csv file.
872
905
  """
873
906
 
874
907
  def add_arguments(self, parser):
875
- # path to input L0 files
876
- parser.add_argument(
877
- '--l0-files',
878
- help="""
879
- List of input l0 files used to make output ANC Bias sweep table file.
880
- """,
881
- type=str,
882
- nargs='+',
883
- required=True,
884
- )
885
-
886
- # Existing anc_bia_sweep_table csv file containing previous sweep table data
887
- # (can be used to ensure the continuity of sweep table mapping)
888
- parser.add_argument(
889
- '--sweep-tables',
890
- help="""
891
- Existing anc_bia_sweep_table file containing previous sweep table data.
892
- (can be used to ensure the continuity of sweep table mapping
893
- or check if new data must be saved)
894
- """,
895
- type=str,
896
- nargs=1
897
- )
908
+ pass
898
909
 
899
910
  def setup_tasks(self, pipeline):
900
-
901
911
  # Check if output dir already exists
902
- force = pipeline.get('force', default=False)
912
+ force = pipeline.get("force", default=False)
903
913
  output_dir = get_output_dir(pipeline)
904
914
  products_dir = get_products_dir(pipeline)
905
- if (is_output_dir(output_dir, products_dir=products_dir) and
906
- not force):
915
+ if is_output_dir(output_dir, products_dir=products_dir) and not force:
907
916
  # if yes exit
908
- logger.info(f'Input request has been already processed. (Use --force keyword to force execution)')
917
+ logger.info(
918
+ "Output directory already exists. "
919
+ "(Use --force keyword to force execution)"
920
+ )
909
921
  pipeline.exit()
910
922
 
911
923
  # starting task
912
- start = L0ToAncBiaSweepTable()
924
+ start = DbToAncBiaSweepTable()
913
925
 
914
926
  # create the tasks and their dependencies
915
927
  pipeline | start | MoveFailedFiles() | MoveToProdDir()
@@ -925,47 +937,39 @@ class L0ToL1BiaSweepCommand(Command):
925
937
  """
926
938
  Command to run the pipeline to generate Bias Sweep L1 CDF.
927
939
  """
928
- __command__ = 'film_l0_to_l1_bia_sweep'
929
- __command_name__ = 'l0_to_l1_bia_sweep'
930
- __parent__ = 'film'
931
- __parent_arguments__ = ['base']
940
+
941
+ __command__ = "film_l0_to_l1_bia_sweep"
942
+ __command_name__ = "l0_to_l1_bia_sweep"
943
+ __parent__ = "film"
944
+ __parent_arguments__ = ["base"]
932
945
  __help__ = """
933
946
  Command to generate Bias sweep L1 CDF.
934
947
  """
935
948
 
936
949
  def add_arguments(self, parser):
937
-
938
- parser.add_argument(
939
- '--sweep-tables',
940
- help="""
941
- List of CSV file(s) containing Bias sweep table data.
942
- """,
943
- type=str,
944
- nargs='+',
945
- required=True,
946
- )
947
-
948
950
  # path to input L0 files
949
951
  parser.add_argument(
950
- '--l0-files',
952
+ "-l0",
953
+ "--l0-files",
951
954
  help="""
952
955
  List of input l0 files used to make output L1 Bias sweep CDF.
953
956
  """,
954
957
  type=str,
955
- nargs='+',
958
+ nargs="+",
956
959
  required=True,
957
960
  )
958
961
 
959
962
  def setup_tasks(self, pipeline):
960
-
961
963
  # Check if output dir already exists
962
- force = pipeline.get('force', default=False)
964
+ force = pipeline.get("force", default=False)
963
965
  output_dir = get_output_dir(pipeline)
964
966
  products_dir = get_products_dir(pipeline)
965
- if (is_output_dir(output_dir, products_dir=products_dir) and
966
- not force):
967
+ if is_output_dir(output_dir, products_dir=products_dir) and not force:
967
968
  # if yes exit
968
- logger.info(f'Input request has been already processed. (Use --force keyword to force execution)')
969
+ logger.info(
970
+ "Input request has been already processed. "
971
+ "(Use --force keyword to force execution)"
972
+ )
969
973
  pipeline.exit()
970
974
 
971
975
  # starting task
@@ -977,7 +981,7 @@ class L0ToL1BiaSweepCommand(Command):
977
981
  # define the start points of the pipeline
978
982
  pipeline.start = start
979
983
 
980
- # Setup the lock file
984
+ # Set up the lock file
981
985
  setup_lock(pipeline)
982
986
 
983
987
 
@@ -985,10 +989,11 @@ class L0ToL1BiaCurrentCommand(Command):
985
989
  """
986
990
  Command to run the pipeline to generate L1 Bias current CDF file
987
991
  """
988
- __command__ = 'film_l0_to_l1_bia_current'
989
- __command_name__ = 'l0_to_l1_bia_current'
990
- __parent__ = 'film'
991
- __parent_arguments__ = ['base']
992
+
993
+ __command__ = "film_l0_to_l1_bia_current"
994
+ __command_name__ = "l0_to_l1_bia_current"
995
+ __parent__ = "film"
996
+ __parent_arguments__ = ["base"]
992
997
  __help__ = """
993
998
  Command to generate L1 Bias current CDF file.
994
999
  """
@@ -996,25 +1001,26 @@ class L0ToL1BiaCurrentCommand(Command):
996
1001
  def add_arguments(self, parser):
997
1002
  # path to input L0 files
998
1003
  parser.add_argument(
999
- '--l0-files',
1004
+ "--l0-files",
1000
1005
  help="""
1001
1006
  List of input l0 files used to make output L1 Bias current CDF.
1002
1007
  """,
1003
1008
  type=str,
1004
- nargs='+',
1009
+ nargs="+",
1005
1010
  required=True,
1006
1011
  )
1007
1012
 
1008
1013
  def setup_tasks(self, pipeline):
1009
-
1010
1014
  # Check if output dir already exists
1011
- force = pipeline.get('force', default=False)
1015
+ force = pipeline.get("force", default=False)
1012
1016
  output_dir = get_output_dir(pipeline)
1013
1017
  products_dir = get_products_dir(pipeline)
1014
- if (is_output_dir(output_dir, products_dir=products_dir) and
1015
- not force):
1018
+ if is_output_dir(output_dir, products_dir=products_dir) and not force:
1016
1019
  # if yes exit
1017
- logger.info(f'Input request has been already processed. (Use --force keyword to force execution)')
1020
+ logger.info(
1021
+ "Input request has been already processed. "
1022
+ "(Use --force keyword to force execution)"
1023
+ )
1018
1024
  pipeline.exit()
1019
1025
 
1020
1026
  # starting task
@@ -1034,10 +1040,11 @@ class HkSktToXlsxCommand(Command):
1034
1040
  """
1035
1041
  Command to generate the skeleton files for HK parameters from the IDB.
1036
1042
  """
1037
- __command__ = 'hk_skt_to_xlsx'
1038
- __command_name__ = 'hk_skt_to_xlsx'
1039
- __parent__ = 'film'
1040
- __parent_arguments__ = ['base']
1043
+
1044
+ __command__ = "hk_skt_to_xlsx"
1045
+ __command_name__ = "hk_skt_to_xlsx"
1046
+ __parent__ = "film"
1047
+ __parent_arguments__ = ["base"]
1041
1048
  __help__ = """
1042
1049
  Command to generate the Excel files used to generate CDF skeleton for
1043
1050
  HK parameters.
@@ -1047,20 +1054,20 @@ class HkSktToXlsxCommand(Command):
1047
1054
  # to read the path to the directory where to store the HK CDF skeletons
1048
1055
  # in Excel format
1049
1056
  parser.add_argument(
1050
- '-d',
1051
- '--directory',
1057
+ "-d",
1058
+ "--directory",
1052
1059
  help="""
1053
1060
  The absolute path to the directory where to
1054
1061
  save the HK CDF skeletons in Excel format.
1055
1062
  """,
1056
1063
  type=str,
1057
- default=osp.join(TEMP_DIR, 'hk_xls')
1064
+ default=osp.join(TEMP_DIR, "hk_xls"),
1058
1065
  )
1059
1066
 
1060
1067
  # path to XML file of the IDB
1061
1068
  parser.add_argument(
1062
- '-i',
1063
- '--idb',
1069
+ "-i",
1070
+ "--idb",
1064
1071
  help="""
1065
1072
  Path to the RPW IDB main directory.
1066
1073
  """,
@@ -1069,8 +1076,8 @@ class HkSktToXlsxCommand(Command):
1069
1076
 
1070
1077
  # path to the mapping of parameters and packets to the SRDB
1071
1078
  parser.add_argument(
1072
- '-m',
1073
- '--mapping',
1079
+ "-m",
1080
+ "--mapping",
1074
1081
  help="""
1075
1082
  Path to the XML file containing the mapping of parameters and
1076
1083
  packets to the SRDB.
@@ -1080,19 +1087,19 @@ class HkSktToXlsxCommand(Command):
1080
1087
 
1081
1088
  # path to the configuration file for the generation
1082
1089
  parser.add_argument(
1083
- '-s',
1084
- '--skeleton-configuration',
1090
+ "-s",
1091
+ "--skeleton-configuration",
1085
1092
  help="""
1086
1093
  Path to the JSON configuration file of the skeleton command, for
1087
1094
  packets selection and structure.
1088
1095
  """,
1089
1096
  type=str,
1090
- default=paths.from_config('hk_metadef.json'),
1097
+ default=paths.from_config("hk_metadef.json"),
1091
1098
  )
1092
1099
 
1093
1100
  # path to the HK CDF Excel template file
1094
1101
  parser.add_argument(
1095
- 'hk_template_file',
1102
+ "hk_template_file",
1096
1103
  help="""
1097
1104
  Path to the HK CDF Excel template file.
1098
1105
  """,
@@ -1113,75 +1120,75 @@ class HkSktToXlsxCommand(Command):
1113
1120
 
1114
1121
  class CdfPostProCommand(Command):
1115
1122
  """
1116
- Command to run the pipeline to generate ancillary Bias Sweep table file
1123
+ Command to run the pipeline to run post-processings on a list of input RPW CDF files.
1117
1124
  """
1118
- __command__ = 'film_cdf_post_pro'
1119
- __command_name__ = 'cdf_post_pro'
1120
- __parent__ = 'film'
1121
- __parent_arguments__ = ['base']
1125
+
1126
+ __command__ = "film_cdf_postpro"
1127
+ __command_name__ = "cdf_postpro"
1128
+ __parent__ = "film"
1129
+ __parent_arguments__ = ["base"]
1122
1130
  __help__ = """
1123
- Command to perform post-processing in the set of input RPW CDF files.
1131
+ Command to perform post-processings on a list of input RPW CDF files.
1124
1132
  """
1125
1133
 
1126
1134
  def add_arguments(self, parser):
1127
-
1128
1135
  parser.add_argument(
1129
- '--cdf-files',
1136
+ "--cdf-files",
1130
1137
  help="""
1131
1138
  List of input RPW CDF files to post-process.
1132
1139
  """,
1133
1140
  type=str,
1134
- nargs='+',
1141
+ nargs="+",
1135
1142
  required=True,
1136
1143
  )
1137
1144
 
1138
1145
  parser.add_argument(
1139
- '--rpw-obs-json',
1146
+ "--rpw-obs-json",
1140
1147
  help="""
1141
- List of RPW SoopKitchen export JSON files.
1148
+ List of RPW SoopKitchen export JSON files. Pattern can also be passed.
1142
1149
  """,
1143
1150
  type=str,
1144
- nargs='+',
1151
+ nargs="+",
1145
1152
  )
1146
1153
 
1147
1154
  parser.add_argument(
1148
- '--rpw-ior-xml',
1155
+ "--rpw-ior-xml",
1149
1156
  help="""
1150
- List of RPW IOR XML files (ZIP archive can be also passed).
1157
+ List of RPW IOR XML files. Pattern or ZIP archive can be also passed.
1151
1158
  """,
1152
1159
  type=str,
1153
- nargs='+',
1160
+ nargs="+",
1154
1161
  )
1155
1162
 
1156
1163
  parser.add_argument(
1157
- '--options',
1164
+ "--options",
1158
1165
  help=f"""
1159
1166
  List of post-processing jobs to run.
1160
1167
  Available options are: {CDF_POST_PRO_OPTS_ARGS} .
1161
1168
  """,
1162
1169
  type=str,
1163
- nargs='+',
1170
+ nargs="+",
1164
1171
  required=True,
1165
1172
  )
1166
1173
 
1167
1174
  parser.add_argument(
1168
- '--update-json',
1169
- help='JSON file containing updates to be performed '
1170
- 'on input CDF files. '
1171
- '(Only works with "update_cdf" option)',
1175
+ "--update-json",
1176
+ help="JSON file containing updates to be performed "
1177
+ "on input CDF files. "
1178
+ '(Only works with "update_cdf" option)',
1172
1179
  type=str,
1173
1180
  nargs=1,
1174
1181
  default=[None],
1175
1182
  )
1176
1183
 
1177
1184
  parser.add_argument(
1178
- '--cdfexport',
1185
+ "--cdfconvert",
1179
1186
  help="""
1180
- Path to the cdfexport executable.
1187
+ Path to the cdfconvert executable.
1181
1188
  """,
1182
1189
  type=str,
1183
1190
  nargs=1,
1184
- default=[CDFEXPORT_PATH],
1191
+ default=[CDFCONVERT_PATH],
1185
1192
  )
1186
1193
 
1187
1194
  def setup_tasks(self, pipeline):
@@ -1190,13 +1197,15 @@ class CdfPostProCommand(Command):
1190
1197
  """
1191
1198
 
1192
1199
  # Check if output dir already exists
1193
- force = pipeline.get('force', default=False)
1200
+ force = pipeline.get("force", default=False)
1194
1201
  output_dir = get_output_dir(pipeline)
1195
1202
  products_dir = get_products_dir(pipeline)
1196
- if (is_output_dir(output_dir, products_dir=products_dir) and
1197
- not force):
1203
+ if is_output_dir(output_dir, products_dir=products_dir) and not force:
1198
1204
  # if yes exit
1199
- logger.info(f'Input request has been already processed. (Use --force keyword to force execution)')
1205
+ logger.info(
1206
+ f"Output directory already exists ({products_dir}) \n"
1207
+ "(Use --force keyword to force execution)"
1208
+ )
1200
1209
  pipeline.exit()
1201
1210
 
1202
1211
  # starting task
@@ -1210,3 +1219,43 @@ class CdfPostProCommand(Command):
1210
1219
 
1211
1220
  # Setup the lock file
1212
1221
  setup_lock(pipeline)
1222
+
1223
+
1224
+ class ExportSoloHeeCoordCommand(Command):
1225
+ """
1226
+ Command to generate CSV file containing SolO HEE coordinates
1227
+ with (distance in AU, longitude in deg, latitude in deg)
1228
+ """
1229
+
1230
+ __command__ = "film_export_solo_hee_coord"
1231
+ __command_name__ = "export_solo_hee_coord"
1232
+ __parent__ = "film"
1233
+ __parent_arguments__ = ["base"]
1234
+ __help__ = """
1235
+ Command to generate CSV file containing SolO HEE coordinates.
1236
+ """
1237
+
1238
+ def add_arguments(self, parser):
1239
+ parser.add_argument(
1240
+ "--output-csv",
1241
+ help="""
1242
+ Path of the output CSV file containing SolO HEE coordinates.
1243
+ """,
1244
+ type=str,
1245
+ nargs=1,
1246
+ default=[None],
1247
+ )
1248
+
1249
+ def setup_tasks(self, pipeline):
1250
+ """
1251
+ Execute the RPW CDF post-processing.
1252
+ """
1253
+
1254
+ # starting task
1255
+ start = ExportSoloHeeCoord()
1256
+
1257
+ # create the tasks workflow and their dependencies
1258
+ pipeline | start
1259
+
1260
+ # define the start points of the pipeline
1261
+ pipeline.start = start