roc-film 1.13.5__py3-none-any.whl → 1.14.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. roc/__init__.py +2 -1
  2. roc/film/__init__.py +2 -2
  3. roc/film/commands.py +372 -323
  4. roc/film/config/__init__.py +0 -1
  5. roc/film/constants.py +101 -65
  6. roc/film/descriptor.json +127 -96
  7. roc/film/exceptions.py +28 -27
  8. roc/film/tasks/__init__.py +16 -16
  9. roc/film/tasks/cat_solo_hk.py +86 -74
  10. roc/film/tasks/cdf_postpro.py +438 -309
  11. roc/film/tasks/check_dds.py +39 -45
  12. roc/film/tasks/db_to_anc_bia_sweep_table.py +381 -0
  13. roc/film/tasks/dds_to_l0.py +232 -180
  14. roc/film/tasks/export_solo_coord.py +147 -0
  15. roc/film/tasks/file_handler.py +124 -70
  16. roc/film/tasks/l0_to_hk.py +117 -103
  17. roc/film/tasks/l0_to_l1_bia_current.py +44 -30
  18. roc/film/tasks/l0_to_l1_bia_sweep.py +417 -329
  19. roc/film/tasks/l0_to_l1_sbm.py +250 -208
  20. roc/film/tasks/l0_to_l1_surv.py +185 -130
  21. roc/film/tasks/make_daily_tm.py +40 -37
  22. roc/film/tasks/merge_tcreport.py +77 -71
  23. roc/film/tasks/merge_tmraw.py +101 -88
  24. roc/film/tasks/parse_dds_xml.py +21 -20
  25. roc/film/tasks/set_l0_utc.py +51 -49
  26. roc/film/tests/cdf_compare.py +565 -0
  27. roc/film/tests/hdf5_compare.py +84 -62
  28. roc/film/tests/test_dds_to_l0.py +93 -51
  29. roc/film/tests/test_dds_to_tc.py +8 -11
  30. roc/film/tests/test_dds_to_tm.py +8 -10
  31. roc/film/tests/test_film.py +161 -116
  32. roc/film/tests/test_l0_to_hk.py +64 -36
  33. roc/film/tests/test_l0_to_l1_bia.py +10 -14
  34. roc/film/tests/test_l0_to_l1_sbm.py +14 -19
  35. roc/film/tests/test_l0_to_l1_surv.py +68 -41
  36. roc/film/tests/test_metadata.py +21 -20
  37. roc/film/tests/tests.py +743 -396
  38. roc/film/tools/__init__.py +5 -5
  39. roc/film/tools/dataset_tasks.py +34 -2
  40. roc/film/tools/file_helpers.py +402 -271
  41. roc/film/tools/l0.py +402 -324
  42. roc/film/tools/metadata.py +147 -127
  43. roc/film/tools/skeleton.py +12 -17
  44. roc/film/tools/tools.py +109 -92
  45. roc/film/tools/xlsx2skt.py +161 -139
  46. {roc_film-1.13.5.dist-info → roc_film-1.14.1.dist-info}/LICENSE +127 -125
  47. roc_film-1.14.1.dist-info/METADATA +60 -0
  48. roc_film-1.14.1.dist-info/RECORD +50 -0
  49. {roc_film-1.13.5.dist-info → roc_film-1.14.1.dist-info}/WHEEL +1 -1
  50. roc/film/tasks/l0_to_anc_bia_sweep_table.py +0 -348
  51. roc_film-1.13.5.dist-info/METADATA +0 -120
  52. roc_film-1.13.5.dist-info/RECORD +0 -48
@@ -0,0 +1,147 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+
4
+ """
5
+ Contains task to generate CSV file containing SolO S/C distance to Sun (in AU) for each date.
6
+ This file is needed by MUSIC FAUST app to define the occurrence rate of the Bias sweeps.
7
+ """
8
+
9
+ import os
10
+ import uuid
11
+ from datetime import datetime, timedelta
12
+ from glob import glob
13
+ import csv
14
+
15
+ from poppy.core.logger import logger
16
+ from poppy.core.task import Task
17
+
18
+ from roc.rpl.time.spice import SpiceHarvester
19
+
20
+ from roc.film.tools.file_helpers import get_output_dir
21
+ from roc.film.constants import CP_START_TIME, NAIF_SPICE_ID, TIME_DAILY_STRFORMAT
22
+
23
+ __all__ = ["ExportSoloHeeCoord"]
24
+
25
+
26
+ class ExportSoloHeeCoord(Task):
27
+ """
28
+ Task to export SolO HEE coordinates in a CSV file
29
+ with (distance in AU, longitude in deg, latitude in deg).
30
+ """
31
+
32
+ plugin_name = "roc.film"
33
+ name = "export_solo_hee_coord"
34
+
35
+ def add_targets(self):
36
+ pass
37
+
38
+ def setup_inputs(self):
39
+ # Get/create list of well processed DDS files
40
+ self.processed_files = self.pipeline.get(
41
+ "processed_files", default=[], create=True
42
+ )
43
+
44
+ # Get/create list of failed DDS files
45
+ self.failed_files = self.pipeline.get("failed_files", default=[], create=True)
46
+
47
+ # If output directory not found, create it
48
+ self.output_dir = get_output_dir(self.pipeline)
49
+ if not os.path.isdir(self.output_dir):
50
+ logger.debug(f"Making {self.output_dir}...")
51
+ os.makedirs(self.output_dir)
52
+
53
+ # Get path of SPICE kernels
54
+ self.kernel_path = SpiceHarvester.spice_kernel_path()
55
+
56
+ # Load SPICE kernels (only meta kernels for the moment)
57
+ self.spice = SpiceHarvester.load_spice_kernels(
58
+ self.kernel_path, only_mk=True, predictive=False, flown=False
59
+ )
60
+
61
+ # Function to convert from radians to degrees
62
+ self.dpr = self.spice._spiceypy.dpr()
63
+
64
+ # Define output file start time
65
+ self.start_time = self.pipeline.get("start_time", default=[None])[0]
66
+ logger.debug(f"start_time value is {self.start_time}")
67
+
68
+ # Define output file end time
69
+ self.end_time = self.pipeline.get("end_time", default=[None])[0]
70
+ logger.debug(f"end_time value is {self.end_time}")
71
+
72
+ # Generating list of days for which distance will be computed
73
+ if self.start_time is None:
74
+ self.start_time = CP_START_TIME
75
+
76
+ if self.end_time is None:
77
+ self.end_time = datetime.today() + timedelta(days=90)
78
+
79
+ # Get output_csv input argument
80
+ self.output_csv = self.pipeline.get("output_csv", default=[None])[0]
81
+ if self.output_csv is None:
82
+ # If not passed, then try to generate automatically the output CSV filename
83
+ basename = f"solo_ANC_solo-hee-coord_{self.start_time.strftime(TIME_DAILY_STRFORMAT)}T{self.end_time.strftime(TIME_DAILY_STRFORMAT)}"
84
+ pattern = os.path.join(self.output_dir, basename + "*.csv")
85
+ existing_files = list(glob(pattern))
86
+ data_version = f"{len(existing_files) + 1:02d}"
87
+ self.output_csv = os.path.join(
88
+ self.output_dir, basename + f"_V{data_version}.csv"
89
+ )
90
+
91
+ def run(self):
92
+ # Define task job ID (long and short)
93
+ self.job_id = str(uuid.uuid4())
94
+ self.job_sid = self.job_id[:8]
95
+ logger.info(f"[{self.job_sid}]\t Task started")
96
+ try:
97
+ self.setup_inputs()
98
+ except Exception:
99
+ logger.exception(f"[{self.job_sid}]\t Initializing inputs has failed!")
100
+ self.pipeline.exit()
101
+ return
102
+
103
+ logger.info(f"Creating {self.output_csv} ...")
104
+ with open(self.output_csv, "w", newline="") as csvfile:
105
+ fieldnames = ["DATE", "R_AU", "HEE_LON_DEG", "HEE_LAT_DEG"]
106
+ writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
107
+ writer.writeheader()
108
+
109
+ # Compute SolO S/C HEE coordinates [r(AU), long(deg), lat(deg)]
110
+ # for each day of the mission
111
+ # and write results in the CSV file
112
+ current_date = self.start_time.date()
113
+ while current_date <= self.end_time.date():
114
+ # Convert time to string then ephemeris time
115
+ time_str = current_date.strftime("%Y %B %d") + " 12:00:00"
116
+ et = self.spice._spiceypy.str2et(time_str)
117
+
118
+ # Now we need to compute the actual distance between
119
+ # the Sun and Solar Orbiter. The above spkezp call gives us
120
+ # the apparent distance, so we need to adjust our
121
+ # aberration correction appropriately.
122
+ [solo_hee_pos, ltime] = self.spice._spiceypy.spkezp(
123
+ NAIF_SPICE_ID["SUN"],
124
+ et,
125
+ "SOLO_HEE",
126
+ "NONE",
127
+ NAIF_SPICE_ID["SOLAR_ORBITER"],
128
+ )
129
+ # Convert SOLO HEE coordinates to [radius, longitude, latitude]
130
+ [r, lon, lat] = self.spice._spiceypy.reclat(solo_hee_pos)
131
+ lat = -lat * self.dpr
132
+ lon = 180.0 + (lon * self.dpr) if lon <= 0 else (lon * self.dpr) - 180.0
133
+
134
+ # Convert radius to AU using convrt.
135
+ r_au = self.spice._spiceypy.convrt(r, "KM", "AU")
136
+ # print(time_str, r_au, lon, lat)
137
+
138
+ row_to_write = {
139
+ "DATE": current_date.strftime(TIME_DAILY_STRFORMAT),
140
+ "R_AU": r_au,
141
+ "HEE_LON_DEG": lon,
142
+ "HEE_LAT_DEG": lat,
143
+ }
144
+ writer.writerow(row_to_write)
145
+ logger.debug(f"New line {row_to_write} in {self.output_csv}")
146
+
147
+ current_date += timedelta(days=1)
@@ -1,16 +1,16 @@
1
1
  #!/usr/bin/env python3
2
2
  # -*- coding: utf-8 -*-
3
3
 
4
- """ Tasks for file handling in FILM plugin."""
4
+ """Tasks for file handling in FILM plugin."""
5
5
 
6
6
  import os
7
7
  import shutil
8
+ import uuid
8
9
 
9
10
  from poppy.core.logger import logger
10
11
  from poppy.core.task import Task
11
12
 
12
- __all__ = ['MoveToProdDir', 'MoveFailedFiles',
13
- 'CopyFailedDds', 'CopyProcessedDds']
13
+ __all__ = ["MoveToProdDir", "MoveFailedFiles", "CopyFailedDds", "CopyProcessedDds"]
14
14
 
15
15
  from roc.film.tools.tools import safe_move
16
16
 
@@ -20,22 +20,27 @@ from roc.film.tools.file_helpers import get_output_dir, get_products_dir
20
20
  class MoveToProdDir(Task):
21
21
  """Task to move output files folder to
22
22
  final products directory."""
23
- plugin_name = 'roc.film'
24
- name = 'move_to_products_dir'
25
23
 
26
- def run(self):
24
+ plugin_name = "roc.film"
25
+ name = "move_to_products_dir"
27
26
 
27
+ def run(self):
28
28
  # TODO - add a lock file mechanism but at the task level
29
29
  # (useful here to make sure that the
30
30
  # a folder in the products_dir is not moved/removed while
31
31
  # the pipeline is still working on it
32
32
  # Add a LockFile class instance to the Task class in Poppy ?
33
33
 
34
+ self.job_uuid = str(uuid.uuid4())
35
+ self.job_id = self.job_uuid[:8]
36
+ logger.debug(f"Task {self.job_id} is starting")
37
+
34
38
  # See if --no-move keyword is defined
35
- no_move = self.pipeline.get('no_move', default=False, args=True)
39
+ no_move = self.pipeline.get("no_move", default=False, args=True)
36
40
  if no_move:
37
41
  logger.info(
38
- 'Skip current task "move_to_products_dir": --no-move is True')
42
+ f'--no-move is passed: skip current task "move_to_products_dir"\t[{self.job_id}]'
43
+ )
39
44
  return
40
45
 
41
46
  # Retrieve pipeline output file directory
@@ -46,49 +51,67 @@ class MoveToProdDir(Task):
46
51
  products_dir = get_products_dir(self.pipeline)
47
52
 
48
53
  # Ignore possible lock file in the output directory
49
- ignore_patterns = '*.lock'
54
+ ignore_patterns = "*.lock"
50
55
 
51
56
  if not products_dir:
52
57
  logger.info(
53
- 'Skip current task "move_to_products_dir": products_dir argument not defined')
58
+ f'products_dir argument not defined: Skip current task "move_to_products_dir"\t[{self.job_id}]'
59
+ )
54
60
  else:
55
61
  output_dirbasename = os.path.basename(output_dir)
56
62
  target_dir = os.path.join(products_dir, output_dirbasename)
57
- logger.info(f'Moving {output_dir} into {products_dir}')
58
- if safe_move(output_dir, target_dir,
59
- ignore_patterns=ignore_patterns):
60
- logger.info(f'{output_dir} moved into {products_dir}')
63
+ logger.info(f"Moving {output_dir} into {products_dir}")
64
+ if safe_move(output_dir, target_dir, ignore_patterns=ignore_patterns):
65
+ logger.info(f"{output_dir} moved into {products_dir}")
66
+
67
+ logger.debug(f"Task {self.job_id} completed")
61
68
 
62
69
 
63
70
  class MoveFailedFiles(Task):
64
71
  """Move any failed files found
65
72
  into a 'failed' subdirectory."""
66
- plugin_name = 'roc.film'
67
- name = 'move_failed_files'
68
73
 
69
- def run(self):
74
+ plugin_name = "roc.film"
75
+ name = "move_failed_files"
70
76
 
77
+ def setup_inputs(self):
71
78
  # Retrieve list of failed files
72
- failed_file_list = self.pipeline.get('failed_files', default=[])
73
- failed_file_count = len(failed_file_list)
79
+ self.failed_file_list = self.pipeline.get("failed_files", default=[])
80
+ self.failed_file_count = len(self.failed_file_list)
74
81
 
75
82
  # Retrieve output directory
76
- output_dir = get_output_dir(self.pipeline)
83
+ self.output_dir = get_output_dir(self.pipeline)
77
84
 
78
- if failed_file_count == 0:
79
- logger.debug('No failed file(s) to move')
80
- else:
85
+ # Retrieve failed_dir
86
+ self.failed_dir = self.pipeline.get("failed_dir", default=[])
87
+
88
+ def run(self):
89
+ # Define task job ID (long and short)
90
+ self.job_uuid = str(uuid.uuid4())
91
+ self.job_id = self.job_uuid[:8]
92
+ logger.debug(f"Task {self.job_id} is starting")
93
+ try:
94
+ self.setup_inputs()
95
+ except Exception:
96
+ logger.exception(f"Initializing inputs has failed for {self.job_id}!")
97
+ self.pipeline.exit()
98
+ return
81
99
 
100
+ if self.failed_file_count == 0:
101
+ logger.debug("No failed file(s) to move\t[{self.job_id}")
102
+ else:
82
103
  # Loop over failed files list
83
- for failed_file in failed_file_list:
104
+ for i, failed_file in enumerate(self.failed_file_list):
105
+ if not self.failed_dir:
106
+ failed_dir = os.path.join(os.path.dirname(failed_file), "failed")
107
+ else:
108
+ failed_dir = self.failed_dir
84
109
 
85
110
  # Make failed subdir if not exists
86
- failed_dir = os.path.join(output_dir, 'failed')
87
111
  os.makedirs(failed_dir, exist_ok=True)
88
112
 
89
113
  # if failed item is a file
90
114
  if os.path.isfile(failed_file):
91
-
92
115
  # Get failed file basename
93
116
  failed_basename = os.path.basename(failed_file)
94
117
 
@@ -98,7 +121,11 @@ class MoveFailedFiles(Task):
98
121
  # perform a safe move (i.e., copy, check and delete) into
99
122
  # failed dir
100
123
  if safe_move(failed_file, target_filepath):
101
- logger.info(f'{failed_file} moved into {failed_dir}')
124
+ logger.warning(
125
+ f"{failed_file} moved into {failed_dir}\t[{self.job_id}]"
126
+ )
127
+
128
+ logger.debug(f"Task {self.job_id} completed")
102
129
 
103
130
 
104
131
  class CopyProcessedDds(Task):
@@ -106,49 +133,56 @@ class CopyProcessedDds(Task):
106
133
  Task to copy processed DDs files into a dedicated directory.
107
134
  """
108
135
 
109
- plugin_name = 'roc.film'
110
- name = 'copy_processed_dds'
136
+ plugin_name = "roc.film"
137
+ name = "copy_processed_dds"
111
138
 
112
139
  def run(self):
140
+ # Define task job ID (long and short)
141
+ self.job_uuid = str(uuid.uuid4())
142
+ self.job_id = self.job_uuid[:8]
143
+ logger.debug(f"Task {self.job_id} is starting")
113
144
 
114
145
  # Get processed file target directory
115
- processed_dir = self.pipeline.get('processed_dds_dir',
116
- default=[None], args=True)[0]
146
+ processed_dir = self.pipeline.get(
147
+ "processed_dds_dir", default=[None], args=True
148
+ )[0]
117
149
 
118
150
  # skip task if processed_dir is None
119
151
  if processed_dir is None:
120
152
  logger.info(
121
- 'Skip task copy_processed_dds: No processed_dds_dir argument defined')
153
+ f"No processed_dds_dir argument defined: skip task copy_processed_dds\t[{self.job_id}]"
154
+ )
122
155
  return
123
156
  elif not os.path.isdir(processed_dir):
124
- logger.debug(f'Creating {processed_dir}...')
157
+ logger.debug(f"Creating {processed_dir}...\t[{self.job_id}]")
125
158
  os.makedirs(processed_dir)
126
159
  else:
127
- logger.debug(f'process_dir set to {processed_dir}')
160
+ logger.debug(f"process_dir set to {processed_dir}\t[{self.job_id}]")
128
161
 
129
162
  # If processed_files list not defined in the pipeline properties,
130
163
  # initialize it
131
- processed_file_list = self.pipeline.get(
132
- 'processed_dds_files', default=[])
164
+ processed_file_list = self.pipeline.get("processed_dds_files", default=[])
133
165
  processed_files_count = len(processed_file_list)
134
166
  # Skip task if no processed files
135
167
  if processed_files_count == 0:
136
168
  logger.info(
137
- 'Skip task copy_processed_dds: No processed file to move')
169
+ f"No processed file to move: skip task copy_processed_dds\t[{self.job_id}]"
170
+ )
138
171
  return
139
172
 
140
173
  # Get clear-dds keyword
141
- clear_dds = self.pipeline.get('clear_dds', default=False)
174
+ clear_dds = self.pipeline.get("clear_dds", default=False)
142
175
 
143
176
  # Get list of failed files too
144
- failed_file_list = self.pipeline.get('failed_dds_files', default=[])
177
+ failed_file_list = self.pipeline.get("failed_dds_files", default=[])
145
178
 
146
179
  # Loop over processed files to copy
147
180
  for processed_file in processed_file_list.copy():
148
-
149
181
  # Check first that processed file is not in failed list
150
182
  if processed_file in failed_file_list:
151
- logger.warning(f'{processed_file} found in the failed file list!')
183
+ logger.warning(
184
+ f"{processed_file} found in the failed file list!\t[{self.job_id}]"
185
+ )
152
186
  continue
153
187
 
154
188
  # Build target filepath
@@ -156,13 +190,20 @@ class CopyProcessedDds(Task):
156
190
  target_filepath = os.path.join(processed_dir, basename)
157
191
 
158
192
  # copy file
159
- logger.debug(f'Copying {processed_file} into {processed_dir}')
193
+ logger.debug(
194
+ f"Copying {processed_file} into {processed_dir}\t[{self.job_id}]"
195
+ )
160
196
  try:
161
197
  shutil.copyfile(processed_file, target_filepath)
162
- except:
163
- logger.exception(f'Copying {processed_file} into {processed_dir} has failed!')
198
+ except Exception as e:
199
+ logger.exception(
200
+ f"Copying {processed_file} into {processed_dir} has failed!\t[{self.job_id}]"
201
+ )
202
+ logger.debug(e)
164
203
  else:
165
- logger.info(f'{processed_file} copied into {target_filepath}')
204
+ logger.info(
205
+ f"{processed_file} copied into {target_filepath}\t[{self.job_id}]"
206
+ )
166
207
 
167
208
  # Remove current file from the list in pipeline properties
168
209
  processed_file_list.remove(processed_file)
@@ -170,59 +211,70 @@ class CopyProcessedDds(Task):
170
211
  # if clear-dds keyword is passed, then remove processed Dds
171
212
  if clear_dds:
172
213
  os.remove(processed_file)
173
- logger.debug(f'{processed_file} deleted')
214
+ logger.debug(f"{processed_file} deleted\t[{self.job_id}]")
174
215
 
175
216
 
176
217
  class CopyFailedDds(Task):
177
218
  """
178
- Task to copy failed DDs files into a dedicated directory.
179
- """
180
- plugin_name = 'roc.film'
181
- name = 'copy_failed_dds'
219
+ Task to copy failed DDs files into a dedicated directory.
220
+ """
221
+
222
+ plugin_name = "roc.film"
223
+ name = "copy_failed_dds"
182
224
 
183
225
  def run(self):
226
+ # Define task job ID (long and short)
227
+ self.job_uuid = str(uuid.uuid4())
228
+ self.job_id = self.job_uuid[:8]
229
+ logger.debug(f"Task {self.job_id} is starting")
184
230
 
185
231
  # Get failed file target directory
186
- failed_dir = self.pipeline.get('failed_dds_dir',
187
- default=[None], args=True)[0]
232
+ failed_dir = self.pipeline.get("failed_dds_dir", default=[None], args=True)[0]
188
233
  # skip task if failed_dir is None
189
234
  if failed_dir is None:
190
235
  logger.info(
191
- 'Skip task copy_failed_dds: No failed_dds_dir argument defined')
236
+ f"No failed_dds_dir argument defined: skip task copy_failed_dds\t[{self.job_id}]"
237
+ )
192
238
  return
193
239
  elif not os.path.isdir(failed_dir):
194
- logger.debug(f'Creating {failed_dir}...')
240
+ logger.debug(f"Creating {failed_dir}...\t[{self.job_id}]")
195
241
  os.makedirs(failed_dir)
196
242
  else:
197
- logger.debug(f'failed_dir set to {failed_dir}')
243
+ logger.debug(f"failed_dir set to {failed_dir}\t[{self.job_id}]")
198
244
 
199
245
  # If failed_files list not defined in the pipeline properties,
200
246
  # initialize it
201
- failed_file_list = self.pipeline.get('failed_dds_files', default=[])
247
+ failed_file_list = self.pipeline.get("failed_dds_files", default=[])
202
248
  failed_files_count = len(failed_file_list)
203
249
  # Skip task if no failed dds files
204
250
  if failed_files_count == 0:
205
- logger.info('Skip task copy_failed_dds: No failed file to move')
251
+ logger.info(
252
+ f"No failed file to move: skip task copy_failed_dds\t[{self.job_id}]"
253
+ )
206
254
  return
207
255
 
208
256
  # Get clear-dds keyword
209
- clear_dds = self.pipeline.get('clear_dds', default=False)
257
+ clear_dds = self.pipeline.get("clear_dds", default=False)
210
258
 
211
259
  # Loop over failed files to copy
212
260
  for failed_file in failed_file_list.copy():
213
-
214
261
  # Build target filepath
215
262
  basename = os.path.basename(failed_file)
216
263
  target_filepath = os.path.join(failed_dir, basename)
217
264
 
218
265
  # copy file
219
- logger.debug(f'Copying {failed_file} into {failed_dir}')
266
+ logger.debug(f"Copying {failed_file} into {failed_dir}\t[{self.job_id}]")
220
267
  try:
221
268
  shutil.copyfile(failed_file, target_filepath)
222
- except:
223
- logger.exception(f'Copying {failed_file} into {failed_dir} has failed!')
269
+ except Exception as e:
270
+ logger.exception(
271
+ f"Copying {failed_file} into {failed_dir} has failed!\t[{self.job_id}]"
272
+ )
273
+ logger.debug(e)
224
274
  else:
225
- logger.info(f'{failed_file} copied into {target_filepath}')
275
+ logger.info(
276
+ f"{failed_file} copied into {target_filepath}\t[{self.job_id}]"
277
+ )
226
278
 
227
279
  # Remove current file from the list in pipeline properties
228
280
  failed_file_list.remove(failed_file)
@@ -230,19 +282,21 @@ class CopyFailedDds(Task):
230
282
  # if clear-dds keyword is passed, then remove processed Dds
231
283
  if clear_dds:
232
284
  os.remove(failed_file)
233
- logger.debug(f'{failed_file} deleted')
285
+ logger.debug(f"{failed_file} deleted\t[{self.job_id}]")
234
286
 
235
287
  # Get failed tmraw list
236
- failed_tmraw_list = self.pipeline.get('failed_tmraw', default=[])
288
+ failed_tmraw_list = self.pipeline.get("failed_tmraw", default=[])
237
289
  failed_tmraw_count = len(failed_tmraw_list)
238
290
  # Skip task if no failed tmraw
239
291
  if failed_tmraw_count == 0:
240
- logger.debug('No failed tmraw to write')
292
+ logger.debug("No failed tmraw to write\t[{self.job_id}]")
241
293
  return
242
294
  else:
243
295
  # Else save list of failed tmraw into text file
244
- tmraw_failed_file = os.path.join(failed_dir, 'tmraw_failed.log')
245
- with open(tmraw_failed_file, 'a') as fw:
296
+ tmraw_failed_file = os.path.join(failed_dir, "tmraw_failed.log")
297
+ with open(tmraw_failed_file, "a") as fw:
246
298
  fw.writelines(failed_tmraw_list)
247
- logger.info(f'{failed_tmraw_count} failed TmRaw entries '
248
- f'saved into {tmraw_failed_file}')
299
+ logger.info(
300
+ f"{failed_tmraw_count} failed TmRaw entries "
301
+ f"saved into {tmraw_failed_file}\t[{self.job_id}]"
302
+ )