roc-film 1.13.4__py3-none-any.whl → 1.14.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (52) hide show
  1. roc/__init__.py +2 -1
  2. roc/film/__init__.py +2 -2
  3. roc/film/commands.py +372 -323
  4. roc/film/config/__init__.py +0 -1
  5. roc/film/constants.py +101 -65
  6. roc/film/descriptor.json +126 -95
  7. roc/film/exceptions.py +28 -27
  8. roc/film/tasks/__init__.py +16 -16
  9. roc/film/tasks/cat_solo_hk.py +86 -74
  10. roc/film/tasks/cdf_postpro.py +438 -309
  11. roc/film/tasks/check_dds.py +39 -45
  12. roc/film/tasks/db_to_anc_bia_sweep_table.py +381 -0
  13. roc/film/tasks/dds_to_l0.py +232 -180
  14. roc/film/tasks/export_solo_coord.py +147 -0
  15. roc/film/tasks/file_handler.py +91 -75
  16. roc/film/tasks/l0_to_hk.py +117 -103
  17. roc/film/tasks/l0_to_l1_bia_current.py +38 -30
  18. roc/film/tasks/l0_to_l1_bia_sweep.py +417 -329
  19. roc/film/tasks/l0_to_l1_sbm.py +250 -208
  20. roc/film/tasks/l0_to_l1_surv.py +185 -130
  21. roc/film/tasks/make_daily_tm.py +40 -37
  22. roc/film/tasks/merge_tcreport.py +77 -71
  23. roc/film/tasks/merge_tmraw.py +102 -89
  24. roc/film/tasks/parse_dds_xml.py +21 -20
  25. roc/film/tasks/set_l0_utc.py +51 -49
  26. roc/film/tests/cdf_compare.py +565 -0
  27. roc/film/tests/hdf5_compare.py +84 -62
  28. roc/film/tests/test_dds_to_l0.py +93 -51
  29. roc/film/tests/test_dds_to_tc.py +8 -11
  30. roc/film/tests/test_dds_to_tm.py +8 -10
  31. roc/film/tests/test_film.py +161 -116
  32. roc/film/tests/test_l0_to_hk.py +64 -36
  33. roc/film/tests/test_l0_to_l1_bia.py +10 -14
  34. roc/film/tests/test_l0_to_l1_sbm.py +14 -19
  35. roc/film/tests/test_l0_to_l1_surv.py +68 -41
  36. roc/film/tests/test_metadata.py +21 -20
  37. roc/film/tests/tests.py +743 -396
  38. roc/film/tools/__init__.py +5 -5
  39. roc/film/tools/dataset_tasks.py +34 -2
  40. roc/film/tools/file_helpers.py +390 -269
  41. roc/film/tools/l0.py +402 -324
  42. roc/film/tools/metadata.py +147 -127
  43. roc/film/tools/skeleton.py +12 -17
  44. roc/film/tools/tools.py +109 -92
  45. roc/film/tools/xlsx2skt.py +161 -139
  46. {roc_film-1.13.4.dist-info → roc_film-1.14.0.dist-info}/LICENSE +127 -125
  47. roc_film-1.14.0.dist-info/METADATA +60 -0
  48. roc_film-1.14.0.dist-info/RECORD +50 -0
  49. {roc_film-1.13.4.dist-info → roc_film-1.14.0.dist-info}/WHEEL +1 -1
  50. roc/film/tasks/l0_to_anc_bia_sweep_table.py +0 -348
  51. roc_film-1.13.4.dist-info/METADATA +0 -120
  52. roc_film-1.13.4.dist-info/RECORD +0 -48
@@ -0,0 +1,147 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+
4
+ """
5
+ Contains task to generate CSV file containing SolO S/C distance to Sun (in AU) for each date.
6
+ This file is needed by MUSIC FAUST app to define the occurrence rate of the Bias sweeps.
7
+ """
8
+
9
+ import os
10
+ import uuid
11
+ from datetime import datetime, timedelta
12
+ from glob import glob
13
+ import csv
14
+
15
+ from poppy.core.logger import logger
16
+ from poppy.core.task import Task
17
+
18
+ from roc.rpl.time.spice import SpiceHarvester
19
+
20
+ from roc.film.tools.file_helpers import get_output_dir
21
+ from roc.film.constants import CP_START_TIME, NAIF_SPICE_ID, TIME_DAILY_STRFORMAT
22
+
23
+ __all__ = ["ExportSoloHeeCoord"]
24
+
25
+
26
+ class ExportSoloHeeCoord(Task):
27
+ """
28
+ Task to export SolO HEE coordinates in a CSV file
29
+ with (distance in AU, longitude in deg, latitude in deg).
30
+ """
31
+
32
+ plugin_name = "roc.film"
33
+ name = "export_solo_hee_coord"
34
+
35
+ def add_targets(self):
36
+ pass
37
+
38
+ def setup_inputs(self):
39
+ # Get/create list of well processed DDS files
40
+ self.processed_files = self.pipeline.get(
41
+ "processed_files", default=[], create=True
42
+ )
43
+
44
+ # Get/create list of failed DDS files
45
+ self.failed_files = self.pipeline.get("failed_files", default=[], create=True)
46
+
47
+ # If output directory not found, create it
48
+ self.output_dir = get_output_dir(self.pipeline)
49
+ if not os.path.isdir(self.output_dir):
50
+ logger.debug(f"Making {self.output_dir}...")
51
+ os.makedirs(self.output_dir)
52
+
53
+ # Get path of SPICE kernels
54
+ self.kernel_path = SpiceHarvester.spice_kernel_path()
55
+
56
+ # Load SPICE kernels (only meta kernels for the moment)
57
+ self.spice = SpiceHarvester.load_spice_kernels(
58
+ self.kernel_path, only_mk=True, predictive=False, flown=False
59
+ )
60
+
61
+ # Function to convert from radians to degrees
62
+ self.dpr = self.spice._spiceypy.dpr()
63
+
64
+ # Define output file start time
65
+ self.start_time = self.pipeline.get("start_time", default=[None])[0]
66
+ logger.debug(f"start_time value is {self.start_time}")
67
+
68
+ # Define output file end time
69
+ self.end_time = self.pipeline.get("end_time", default=[None])[0]
70
+ logger.debug(f"end_time value is {self.end_time}")
71
+
72
+ # Generating list of days for which distance will be computed
73
+ if self.start_time is None:
74
+ self.start_time = CP_START_TIME
75
+
76
+ if self.end_time is None:
77
+ self.end_time = datetime.today() + timedelta(days=90)
78
+
79
+ # Get output_csv input argument
80
+ self.output_csv = self.pipeline.get("output_csv", default=[None])[0]
81
+ if self.output_csv is None:
82
+ # If not passed, then try to generate automatically the output CSV filename
83
+ basename = f"solo_ANC_solo-hee-coord_{self.start_time.strftime(TIME_DAILY_STRFORMAT)}T{self.end_time.strftime(TIME_DAILY_STRFORMAT)}"
84
+ pattern = os.path.join(self.output_dir, basename + "*.csv")
85
+ existing_files = list(glob(pattern))
86
+ data_version = f"{len(existing_files) + 1:02d}"
87
+ self.output_csv = os.path.join(
88
+ self.output_dir, basename + f"_V{data_version}.csv"
89
+ )
90
+
91
+ def run(self):
92
+ # Define task job ID (long and short)
93
+ self.job_id = str(uuid.uuid4())
94
+ self.job_sid = self.job_id[:8]
95
+ logger.info(f"[{self.job_sid}]\t Task started")
96
+ try:
97
+ self.setup_inputs()
98
+ except Exception:
99
+ logger.exception(f"[{self.job_sid}]\t Initializing inputs has failed!")
100
+ self.pipeline.exit()
101
+ return
102
+
103
+ logger.info(f"Creating {self.output_csv} ...")
104
+ with open(self.output_csv, "w", newline="") as csvfile:
105
+ fieldnames = ["DATE", "R_AU", "HEE_LON_DEG", "HEE_LAT_DEG"]
106
+ writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
107
+ writer.writeheader()
108
+
109
+ # Compute SolO S/C HEE coordinates [r(AU), long(deg), lat(deg)]
110
+ # for each day of the mission
111
+ # and write results in the CSV file
112
+ current_date = self.start_time.date()
113
+ while current_date <= self.end_time.date():
114
+ # Convert time to string then ephemeris time
115
+ time_str = current_date.strftime("%Y %B %d") + " 12:00:00"
116
+ et = self.spice._spiceypy.str2et(time_str)
117
+
118
+ # Now we need to compute the actual distance between
119
+ # the Sun and Solar Orbiter. The above spkezp call gives us
120
+ # the apparent distance, so we need to adjust our
121
+ # aberration correction appropriately.
122
+ [solo_hee_pos, ltime] = self.spice._spiceypy.spkezp(
123
+ NAIF_SPICE_ID["SUN"],
124
+ et,
125
+ "SOLO_HEE",
126
+ "NONE",
127
+ NAIF_SPICE_ID["SOLAR_ORBITER"],
128
+ )
129
+ # Convert SOLO HEE coordinates to [radius, longitude, latitude]
130
+ [r, lon, lat] = self.spice._spiceypy.reclat(solo_hee_pos)
131
+ lat = -lat * self.dpr
132
+ lon = 180.0 + (lon * self.dpr) if lon <= 0 else (lon * self.dpr) - 180.0
133
+
134
+ # Convert radius to AU using convrt.
135
+ r_au = self.spice._spiceypy.convrt(r, "KM", "AU")
136
+ # print(time_str, r_au, lon, lat)
137
+
138
+ row_to_write = {
139
+ "DATE": current_date.strftime(TIME_DAILY_STRFORMAT),
140
+ "R_AU": r_au,
141
+ "HEE_LON_DEG": lon,
142
+ "HEE_LAT_DEG": lat,
143
+ }
144
+ writer.writerow(row_to_write)
145
+ logger.debug(f"New line {row_to_write} in {self.output_csv}")
146
+
147
+ current_date += timedelta(days=1)
@@ -1,16 +1,16 @@
1
1
  #!/usr/bin/env python3
2
2
  # -*- coding: utf-8 -*-
3
3
 
4
- """ Tasks for file handling in FILM plugin."""
4
+ """Tasks for file handling in FILM plugin."""
5
5
 
6
6
  import os
7
7
  import shutil
8
+ import uuid
8
9
 
9
10
  from poppy.core.logger import logger
10
11
  from poppy.core.task import Task
11
12
 
12
- __all__ = ['MoveToProdDir', 'MoveFailedFiles',
13
- 'CopyFailedDds', 'CopyProcessedDds']
13
+ __all__ = ["MoveToProdDir", "MoveFailedFiles", "CopyFailedDds", "CopyProcessedDds"]
14
14
 
15
15
  from roc.film.tools.tools import safe_move
16
16
 
@@ -20,11 +20,11 @@ from roc.film.tools.file_helpers import get_output_dir, get_products_dir
20
20
  class MoveToProdDir(Task):
21
21
  """Task to move output files folder to
22
22
  final products directory."""
23
- plugin_name = 'roc.film'
24
- name = 'move_to_products_dir'
25
23
 
26
- def run(self):
24
+ plugin_name = "roc.film"
25
+ name = "move_to_products_dir"
27
26
 
27
+ def run(self):
28
28
  # TODO - add a lock file mechanism but at the task level
29
29
  # (useful here to make sure that the
30
30
  # a folder in the products_dir is not moved/removed while
@@ -32,10 +32,9 @@ class MoveToProdDir(Task):
32
32
  # Add a LockFile class instance to the Task class in Poppy ?
33
33
 
34
34
  # See if --no-move keyword is defined
35
- no_move = self.pipeline.get('no_move', default=False, args=True)
35
+ no_move = self.pipeline.get("no_move", default=False, args=True)
36
36
  if no_move:
37
- logger.info(
38
- 'Skip current task "move_to_products_dir": --no-move is True')
37
+ logger.info('Skip current task "move_to_products_dir": --no-move is True')
39
38
  return
40
39
 
41
40
  # Retrieve pipeline output file directory
@@ -46,49 +45,65 @@ class MoveToProdDir(Task):
46
45
  products_dir = get_products_dir(self.pipeline)
47
46
 
48
47
  # Ignore possible lock file in the output directory
49
- ignore_patterns = '*.lock'
48
+ ignore_patterns = "*.lock"
50
49
 
51
50
  if not products_dir:
52
51
  logger.info(
53
- 'Skip current task "move_to_products_dir": products_dir argument not defined')
52
+ 'Skip current task "move_to_products_dir": products_dir argument not defined'
53
+ )
54
54
  else:
55
55
  output_dirbasename = os.path.basename(output_dir)
56
56
  target_dir = os.path.join(products_dir, output_dirbasename)
57
- logger.info(f'Moving {output_dir} into {products_dir}')
58
- if safe_move(output_dir, target_dir,
59
- ignore_patterns=ignore_patterns):
60
- logger.info(f'{output_dir} moved into {products_dir}')
57
+ logger.info(f"Moving {output_dir} into {products_dir}")
58
+ if safe_move(output_dir, target_dir, ignore_patterns=ignore_patterns):
59
+ logger.info(f"{output_dir} moved into {products_dir}")
61
60
 
62
61
 
63
62
  class MoveFailedFiles(Task):
64
63
  """Move any failed files found
65
64
  into a 'failed' subdirectory."""
66
- plugin_name = 'roc.film'
67
- name = 'move_failed_files'
68
65
 
69
- def run(self):
66
+ plugin_name = "roc.film"
67
+ name = "move_failed_files"
70
68
 
69
+ def setup_inputs(self):
71
70
  # Retrieve list of failed files
72
- failed_file_list = self.pipeline.get('failed_files', default=[])
73
- failed_file_count = len(failed_file_list)
71
+ self.failed_file_list = self.pipeline.get("failed_files", default=[])
72
+ self.failed_file_count = len(self.failed_file_list)
74
73
 
75
74
  # Retrieve output directory
76
- output_dir = get_output_dir(self.pipeline)
75
+ self.output_dir = get_output_dir(self.pipeline)
77
76
 
78
- if failed_file_count == 0:
79
- logger.debug('No failed file(s) to move')
80
- else:
77
+ # Retrieve failed_dir
78
+ self.failed_dir = self.pipeline.get("failed_dir", default=[])
81
79
 
80
+ def run(self):
81
+ # Define task job ID (long and short)
82
+ self.job_uuid = str(uuid.uuid4())
83
+ self.job_id = self.job_uuid[:8]
84
+ logger.info(f"Task {self.job_id} is starting")
85
+ try:
86
+ self.setup_inputs()
87
+ except Exception:
88
+ logger.exception(f"Initializing inputs has failed for {self.job_id}!")
89
+ self.pipeline.exit()
90
+ return
91
+
92
+ if self.failed_file_count == 0:
93
+ logger.debug("No failed file(s) to move")
94
+ else:
82
95
  # Loop over failed files list
83
- for failed_file in failed_file_list:
96
+ for i, failed_file in enumerate(self.failed_file_list):
97
+ if not self.failed_dir:
98
+ failed_dir = os.path.join(os.path.dirname(failed_file), "failed")
99
+ else:
100
+ failed_dir = self.failed_dir
84
101
 
85
102
  # Make failed subdir if not exists
86
- failed_dir = os.path.join(output_dir, 'failed')
87
103
  os.makedirs(failed_dir, exist_ok=True)
88
104
 
89
105
  # if failed item is a file
90
106
  if os.path.isfile(failed_file):
91
-
92
107
  # Get failed file basename
93
108
  failed_basename = os.path.basename(failed_file)
94
109
 
@@ -98,7 +113,7 @@ class MoveFailedFiles(Task):
98
113
  # perform a safe move (i.e., copy, check and delete) into
99
114
  # failed dir
100
115
  if safe_move(failed_file, target_filepath):
101
- logger.info(f'{failed_file} moved into {failed_dir}')
116
+ logger.info(f"{failed_file} moved into {failed_dir}")
102
117
 
103
118
 
104
119
  class CopyProcessedDds(Task):
@@ -106,49 +121,47 @@ class CopyProcessedDds(Task):
106
121
  Task to copy processed DDs files into a dedicated directory.
107
122
  """
108
123
 
109
- plugin_name = 'roc.film'
110
- name = 'copy_processed_dds'
124
+ plugin_name = "roc.film"
125
+ name = "copy_processed_dds"
111
126
 
112
127
  def run(self):
113
-
114
128
  # Get processed file target directory
115
- processed_dir = self.pipeline.get('processed_dds_dir',
116
- default=[None], args=True)[0]
129
+ processed_dir = self.pipeline.get(
130
+ "processed_dds_dir", default=[None], args=True
131
+ )[0]
117
132
 
118
133
  # skip task if processed_dir is None
119
134
  if processed_dir is None:
120
135
  logger.info(
121
- 'Skip task copy_processed_dds: No processed_dds_dir argument defined')
136
+ "Skip task copy_processed_dds: No processed_dds_dir argument defined"
137
+ )
122
138
  return
123
139
  elif not os.path.isdir(processed_dir):
124
- logger.debug(f'Creating {processed_dir}...')
140
+ logger.debug(f"Creating {processed_dir}...")
125
141
  os.makedirs(processed_dir)
126
142
  else:
127
- logger.debug(f'process_dir set to {processed_dir}')
143
+ logger.debug(f"process_dir set to {processed_dir}")
128
144
 
129
145
  # If processed_files list not defined in the pipeline properties,
130
146
  # initialize it
131
- processed_file_list = self.pipeline.get(
132
- 'processed_dds_files', default=[])
147
+ processed_file_list = self.pipeline.get("processed_dds_files", default=[])
133
148
  processed_files_count = len(processed_file_list)
134
149
  # Skip task if no processed files
135
150
  if processed_files_count == 0:
136
- logger.info(
137
- 'Skip task copy_processed_dds: No processed file to move')
151
+ logger.info("Skip task copy_processed_dds: No processed file to move")
138
152
  return
139
153
 
140
154
  # Get clear-dds keyword
141
- clear_dds = self.pipeline.get('clear_dds', default=False)
155
+ clear_dds = self.pipeline.get("clear_dds", default=False)
142
156
 
143
157
  # Get list of failed files too
144
- failed_file_list = self.pipeline.get('failed_dds_files', default=[])
158
+ failed_file_list = self.pipeline.get("failed_dds_files", default=[])
145
159
 
146
160
  # Loop over processed files to copy
147
161
  for processed_file in processed_file_list.copy():
148
-
149
162
  # Check first that processed file is not in failed list
150
163
  if processed_file in failed_file_list:
151
- logger.warning(f'{processed_file} found in the failed file list!')
164
+ logger.warning(f"{processed_file} found in the failed file list!")
152
165
  continue
153
166
 
154
167
  # Build target filepath
@@ -156,13 +169,16 @@ class CopyProcessedDds(Task):
156
169
  target_filepath = os.path.join(processed_dir, basename)
157
170
 
158
171
  # copy file
159
- logger.debug(f'Copying {processed_file} into {processed_dir}')
172
+ logger.debug(f"Copying {processed_file} into {processed_dir}")
160
173
  try:
161
174
  shutil.copyfile(processed_file, target_filepath)
162
- except:
163
- logger.exception(f'Copying {processed_file} into {processed_dir} has failed!')
175
+ except Exception as e:
176
+ logger.exception(
177
+ f"Copying {processed_file} into {processed_dir} has failed!"
178
+ )
179
+ logger.debug(e)
164
180
  else:
165
- logger.info(f'{processed_file} copied into {target_filepath}')
181
+ logger.info(f"{processed_file} copied into {target_filepath}")
166
182
 
167
183
  # Remove current file from the list in pipeline properties
168
184
  processed_file_list.remove(processed_file)
@@ -170,59 +186,57 @@ class CopyProcessedDds(Task):
170
186
  # if clear-dds keyword is passed, then remove processed Dds
171
187
  if clear_dds:
172
188
  os.remove(processed_file)
173
- logger.debug(f'{processed_file} deleted')
189
+ logger.debug(f"{processed_file} deleted")
174
190
 
175
191
 
176
192
  class CopyFailedDds(Task):
177
193
  """
178
- Task to copy failed DDs files into a dedicated directory.
179
- """
180
- plugin_name = 'roc.film'
181
- name = 'copy_failed_dds'
194
+ Task to copy failed DDs files into a dedicated directory.
195
+ """
182
196
 
183
- def run(self):
197
+ plugin_name = "roc.film"
198
+ name = "copy_failed_dds"
184
199
 
200
+ def run(self):
185
201
  # Get failed file target directory
186
- failed_dir = self.pipeline.get('failed_dds_dir',
187
- default=[None], args=True)[0]
202
+ failed_dir = self.pipeline.get("failed_dds_dir", default=[None], args=True)[0]
188
203
  # skip task if failed_dir is None
189
204
  if failed_dir is None:
190
- logger.info(
191
- 'Skip task copy_failed_dds: No failed_dds_dir argument defined')
205
+ logger.info("Skip task copy_failed_dds: No failed_dds_dir argument defined")
192
206
  return
193
207
  elif not os.path.isdir(failed_dir):
194
- logger.debug(f'Creating {failed_dir}...')
208
+ logger.debug(f"Creating {failed_dir}...")
195
209
  os.makedirs(failed_dir)
196
210
  else:
197
- logger.debug(f'failed_dir set to {failed_dir}')
211
+ logger.debug(f"failed_dir set to {failed_dir}")
198
212
 
199
213
  # If failed_files list not defined in the pipeline properties,
200
214
  # initialize it
201
- failed_file_list = self.pipeline.get('failed_dds_files', default=[])
215
+ failed_file_list = self.pipeline.get("failed_dds_files", default=[])
202
216
  failed_files_count = len(failed_file_list)
203
217
  # Skip task if no failed dds files
204
218
  if failed_files_count == 0:
205
- logger.info('Skip task copy_failed_dds: No failed file to move')
219
+ logger.info("Skip task copy_failed_dds: No failed file to move")
206
220
  return
207
221
 
208
222
  # Get clear-dds keyword
209
- clear_dds = self.pipeline.get('clear_dds', default=False)
223
+ clear_dds = self.pipeline.get("clear_dds", default=False)
210
224
 
211
225
  # Loop over failed files to copy
212
226
  for failed_file in failed_file_list.copy():
213
-
214
227
  # Build target filepath
215
228
  basename = os.path.basename(failed_file)
216
229
  target_filepath = os.path.join(failed_dir, basename)
217
230
 
218
231
  # copy file
219
- logger.debug(f'Copying {failed_file} into {failed_dir}')
232
+ logger.debug(f"Copying {failed_file} into {failed_dir}")
220
233
  try:
221
234
  shutil.copyfile(failed_file, target_filepath)
222
- except:
223
- logger.exception(f'Copying {failed_file} into {failed_dir} has failed!')
235
+ except Exception as e:
236
+ logger.exception(f"Copying {failed_file} into {failed_dir} has failed!")
237
+ logger.debug(e)
224
238
  else:
225
- logger.info(f'{failed_file} copied into {target_filepath}')
239
+ logger.info(f"{failed_file} copied into {target_filepath}")
226
240
 
227
241
  # Remove current file from the list in pipeline properties
228
242
  failed_file_list.remove(failed_file)
@@ -230,19 +244,21 @@ class CopyFailedDds(Task):
230
244
  # if clear-dds keyword is passed, then remove processed Dds
231
245
  if clear_dds:
232
246
  os.remove(failed_file)
233
- logger.debug(f'{failed_file} deleted')
247
+ logger.debug(f"{failed_file} deleted")
234
248
 
235
249
  # Get failed tmraw list
236
- failed_tmraw_list = self.pipeline.get('failed_tmraw', default=[])
250
+ failed_tmraw_list = self.pipeline.get("failed_tmraw", default=[])
237
251
  failed_tmraw_count = len(failed_tmraw_list)
238
252
  # Skip task if no failed tmraw
239
253
  if failed_tmraw_count == 0:
240
- logger.debug('No failed tmraw to write')
254
+ logger.debug("No failed tmraw to write")
241
255
  return
242
256
  else:
243
257
  # Else save list of failed tmraw into text file
244
- tmraw_failed_file = os.path.join(failed_dir, 'tmraw_failed.log')
245
- with open(tmraw_failed_file, 'a') as fw:
258
+ tmraw_failed_file = os.path.join(failed_dir, "tmraw_failed.log")
259
+ with open(tmraw_failed_file, "a") as fw:
246
260
  fw.writelines(failed_tmraw_list)
247
- logger.info(f'{failed_tmraw_count} failed TmRaw entries '
248
- f'saved into {tmraw_failed_file}')
261
+ logger.info(
262
+ f"{failed_tmraw_count} failed TmRaw entries "
263
+ f"saved into {tmraw_failed_file}"
264
+ )