roc-film 1.14.3__py3-none-any.whl → 1.14.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
roc/film/constants.py CHANGED
@@ -49,6 +49,7 @@ __all__ = [
49
49
  "TIME_WAIT_SEC",
50
50
  "SQL_LIMIT",
51
51
  "BIA_SWEEP_TABLE_PACKETS",
52
+ "QB_EVENT_LOG_LIST",
52
53
  ]
53
54
 
54
55
  # root directory of the module
@@ -168,3 +169,16 @@ SQL_LIMIT = 1000000000
168
169
 
169
170
  # Names of TC handling Bias sweep table
170
171
  BIA_SWEEP_TABLE_PACKETS = ["TC_DPU_LOAD_BIAS_SWEEP", "TC_DPU_CLEAR_BIAS_SWEEP"]
172
+
173
+ # List of events to query for setting quality_bitmask in cdf_postpro task
174
+ QB_EVENT_LOG_LIST = [
175
+ "BIA_SWEEP_ANT1",
176
+ "BIA_SWEEP_ANT2",
177
+ "BIA_SWEEP_ANT3",
178
+ "EMC_MAND_QUIET",
179
+ "EMC_PREF_NOISY",
180
+ "TCM",
181
+ "SLEW",
182
+ "WOL",
183
+ "ROLL",
184
+ ]
roc/film/descriptor.json CHANGED
@@ -6,10 +6,10 @@
6
6
  "description": "RPW FILe Maker (FILM): Plugin to make RPW L0, L1 and HK data files"
7
7
  },
8
8
  "release": {
9
- "version": "1.14.3",
10
- "date": "2025-03-12",
9
+ "version": "1.14.4",
10
+ "date": "2025-03-13",
11
11
  "author": "Xavier Bonnin <xavier.bonnin@obspm.fr>, ROC Team <roc.support@sympa.obspm.fr>",
12
- "modification": "Extend time range for event_log query in cdf_postpro set_bitmask",
12
+ "modification": "filtering event requested from event_log in cdf_postpro _set_bitmask",
13
13
  "url": "https://gitlab.obspm.fr/ROC/Pipelines/Plugins/FILM"
14
14
  },
15
15
  "tasks": [
@@ -7,6 +7,7 @@ import json
7
7
  import os
8
8
  import shutil
9
9
  from datetime import datetime, timedelta
10
+ import itertools
10
11
 
11
12
  from sqlalchemy import and_
12
13
  import numpy as np
@@ -29,13 +30,18 @@ from roc.dingo.constants import PIPELINE_DATABASE
29
30
  from roc.film import (
30
31
  INPUT_DATETIME_STRFTIME,
31
32
  )
32
- from roc.film.tools.file_helpers import is_output_dir, get_output_dir
33
+ from roc.film.tools.file_helpers import (
34
+ is_output_dir,
35
+ get_output_dir,
36
+ get_files_datetime,
37
+ )
33
38
  from roc.film.tools import glob_list
34
39
  from roc.film.constants import (
35
40
  CDFCONVERT_PATH,
36
41
  TIMEOUT,
37
42
  CDF_POST_PRO_OPTS_ARGS,
38
43
  TIME_DAILY_STRFORMAT,
44
+ QB_EVENT_LOG_LIST,
39
45
  )
40
46
  from roc.film.exceptions import L1PostProError
41
47
 
@@ -69,7 +75,7 @@ class CdfPostPro(Task):
69
75
  @Connector.if_connected(PIPELINE_DATABASE)
70
76
  def setup_inputs(self):
71
77
  try:
72
- self.cdf_file_list = glob_list(self.inputs["cdf_file"].filepath)
78
+ self.cdf_file_list = sorted(glob_list(self.inputs["cdf_file"].filepath))
73
79
  except Exception:
74
80
  raise ValueError('No input target "cdf_file" passed')
75
81
 
@@ -153,6 +159,11 @@ class CdfPostPro(Task):
153
159
  no_spice=self.no_spice,
154
160
  )
155
161
 
162
+ # Get time range of the input files
163
+ files_datetimes = get_files_datetime(self.cdf_file_list)
164
+ files_datetimes = sorted(list(itertools.chain.from_iterable(files_datetimes)))
165
+ self.time_range = [files_datetimes[0], files_datetimes[-1]]
166
+
156
167
  # get a database session
157
168
  self.session = Connector.manager[PIPELINE_DATABASE].session
158
169
 
@@ -169,9 +180,8 @@ class CdfPostPro(Task):
169
180
  logger.info(f"Task job {self.job_id} is starting")
170
181
  try:
171
182
  self.setup_inputs()
172
- except Exception as e:
173
- logger.error(f"Initializing inputs has failed for {self.job_id}!")
174
- logger.debug(e)
183
+ except: # noqa: E722
184
+ logger.exception(f"Initializing inputs has failed for {self.job_id}!")
175
185
  self.pipeline.exit()
176
186
  return
177
187
 
@@ -180,16 +190,17 @@ class CdfPostPro(Task):
180
190
  f"{len(self.cdf_file_list)} input CDF files "
181
191
  f"to post-process\t[{self.job_id}]"
182
192
  )
193
+ logger.debug(f"Covered time range is {self.time_range}\t[{self.job_id}]")
183
194
  for current_file in self.cdf_file_list:
184
195
  if self.overwrite:
185
196
  # If overwrite is set, then update current file
186
- logger.warning(f"{current_file} will be overwritten\t[{self.job_id}]")
197
+ logger.info(f"{current_file} will be overwritten\t[{self.job_id}]")
187
198
  self.current_file = current_file
188
199
  else:
189
200
  # Otherwise create a copy of the input CDF in the output
190
201
  # directory, then update the copy
191
202
  logger.info(
192
- f"Making a copy of {current_file} in {self.output_dir}\t[{self.job_id}]"
203
+ f"Working with a copy of {current_file} in {self.output_dir}\t[{self.job_id}]"
193
204
  )
194
205
  self.current_file = os.path.join(
195
206
  self.output_dir, os.path.basename(current_file)
@@ -272,10 +283,10 @@ class CdfPostPro(Task):
272
283
  logger.error(
273
284
  f"cdfconvert calling has expired: \n {e}\t[{self.job_id}]"
274
285
  )
275
- except Exception:
286
+ except: # noqa: E722
276
287
  logger.error("cdfconvert calling has failed!\t[{self.job_id}]")
277
288
 
278
- except Exception:
289
+ except: # noqa: E722
279
290
  logger.exception(
280
291
  f"Post-processing {self.current_file} has failed\t[{self.job_id}]"
281
292
  )
@@ -582,8 +593,9 @@ class CdfPostPro(Task):
582
593
  logger.debug(f"Querying event_log table...\t[{self.job_id}]")
583
594
  model = EventLog
584
595
  filters = [
585
- model.start_time >= self.time_min - timedelta(hours=1),
586
- model.end_time <= self.time_max + timedelta(hours=1),
596
+ model.start_time >= self.time_range[0] - timedelta(days=1),
597
+ model.end_time <= self.time_range[1] + timedelta(days=1),
598
+ model.label.in_(QB_EVENT_LOG_LIST),
587
599
  ]
588
600
  self.event_log = query_db(
589
601
  self.session,
@@ -604,20 +616,18 @@ class CdfPostPro(Task):
604
616
  # Loop over events to fill quality_bitmask
605
617
  for i, row in self.event_log.iterrows():
606
618
  # Filter events
607
- if row["label"] not in [
608
- "BIA_SWEEP_ANT1",
609
- "BIA_SWEEP_ANT2",
610
- "BIA_SWEEP_ANT3",
611
- "EMC_MAND_QUIET",
612
- "EMC_PREF_NOISY",
613
- "TCM",
614
- "SLEW",
615
- "WOL",
616
- "ROLL",
617
- ]:
619
+ if row["label"] not in QB_EVENT_LOG_LIST:
620
+ logger.exception(
621
+ f"{row['label']} is not in the list {QB_EVENT_LOG_LIST}"
622
+ )
623
+ raise ValueError
624
+
625
+ # Check that CDF time range covering the event
626
+ if row["start_time"] > self.time_max or row["end_time"] < self.time_min:
627
+ # If not, skip next steps
618
628
  continue
619
629
 
620
- # Get time range covering the event
630
+ # Get Epoch indices associated to the time range covering the event
621
631
  w = (row["start_time"] <= epoch) & (row["end_time"] >= epoch)
622
632
  if not any(w):
623
633
  continue
@@ -6,6 +6,8 @@ from glob import glob
6
6
  from datetime import datetime, timedelta
7
7
  import uuid
8
8
  import calendar
9
+ from typing import List, Union
10
+ from pathlib import Path
9
11
 
10
12
  import h5py
11
13
  import numpy as np
@@ -33,7 +35,7 @@ from roc.film.exceptions import (
33
35
  LoadDataSetError,
34
36
  NoData,
35
37
  )
36
- from roc.film.tools import valid_data_version, get_datasets
38
+ from roc.film.tools import valid_data_version, get_datasets, extract_datetime
37
39
  from roc.film.constants import (
38
40
  TIME_ISO_STRFORMAT,
39
41
  CDF_TRANGE_STRFORMAT,
@@ -55,6 +57,7 @@ __all__ = [
55
57
  "get_output_dir",
56
58
  "get_master_cdf_dir",
57
59
  "is_output_dir",
60
+ "get_files_datetime",
58
61
  ]
59
62
 
60
63
 
@@ -176,7 +179,7 @@ def generate_filepath(
176
179
  def get_l0_file(pipeline) -> str:
177
180
  try:
178
181
  return pipeline.args.l0_file[0]
179
- except Exception:
182
+ except: # noqa: E722
180
183
  # If not defined as input argument, then assume that it is already
181
184
  # defined as target input
182
185
  pass
@@ -188,7 +191,7 @@ def get_l0_files(pipeline: Pipeline) -> list:
188
191
  if not isinstance(l0_files, list):
189
192
  l0_files = [l0_files]
190
193
  return l0_files
191
- except Exception:
194
+ except: # noqa: E722
192
195
  # If not defined as input argument, then assume that it is already
193
196
  # defined as target input
194
197
  pass
@@ -214,6 +217,31 @@ def put_cdf_global(cdf: CDF, metadata: dict) -> bool:
214
217
  return True
215
218
 
216
219
 
220
+ def get_files_datetime(
221
+ list_of_files: Union[List[str], List[Path]],
222
+ ) -> List[List[datetime]]:
223
+ """
224
+ Get the timerange of a list of input files.
225
+ Names of the files must be compliant with SolO file naming conventions.
226
+
227
+ :param list_of_files: Files to process
228
+ :return: timeranges of input files
229
+ """
230
+ # Initialize output
231
+ file_datetimes = []
232
+ for current_file in list_of_files:
233
+ # Extract date and time of input files and convert it into datetime object
234
+ file_date = Path(current_file).stem.split("_")[3]
235
+ if "T" in file_date:
236
+ str_tformat = "%Y%m%dT%H%M%S"
237
+ else:
238
+ str_tformat = "%Y%m%d"
239
+
240
+ file_datetimes.append(extract_datetime(file_date, str_tformat=str_tformat))
241
+
242
+ return file_datetimes
243
+
244
+
217
245
  def get_master_cdf_dir(task: type[Task]) -> str:
218
246
  """
219
247
  Try to load the master_dir directory from :
roc/film/tools/tools.py CHANGED
@@ -9,6 +9,8 @@ import shutil
9
9
  import argparse
10
10
  from datetime import datetime
11
11
  import glob
12
+ from typing import List, Union
13
+ from pathlib import Path
12
14
 
13
15
  import numpy as np
14
16
 
@@ -99,41 +101,41 @@ def raise_error(message, exception=FilmException):
99
101
  raise exception(message)
100
102
 
101
103
 
102
- def valid_time(t, format=INPUT_DATETIME_STRFTIME):
104
+ def valid_time(t: str, str_format: str = INPUT_DATETIME_STRFTIME) -> datetime:
103
105
  """
104
106
  Validate input datetime string format.
105
107
 
106
108
  :param t: input datetime string
107
- :param format: expected datetime string format
109
+ :param str_format: expected datetime string format
108
110
  :return: datetime object with input datetime info
109
111
  """
110
112
  if t:
111
113
  try:
112
- return datetime.strptime(t, format)
114
+ return datetime.strptime(t, str_format)
113
115
  except ValueError:
114
116
  raise_error(
115
117
  f"Not a valid datetime: '{t}'.", exception=argparse.ArgumentTypeError
116
118
  )
117
119
 
118
120
 
119
- def valid_date(t, format=TIME_DAILY_STRFORMAT):
121
+ def valid_date(t: str, str_format: str = TIME_DAILY_STRFORMAT) -> datetime:
120
122
  """
121
123
  Validate input date string format.
122
124
 
123
125
  :param t: input datetime string
124
- :param format: expected datetime string format
126
+ :param str_format: expected datetime string format
125
127
  :return: datetime object with input datetime info
126
128
  """
127
129
  if t:
128
130
  try:
129
- return datetime.strptime(t, format)
131
+ return datetime.strptime(t, str_format)
130
132
  except ValueError:
131
133
  raise_error(
132
134
  f"Not a valid date: '{t}'.", exception=argparse.ArgumentTypeError
133
135
  )
134
136
 
135
137
 
136
- def valid_data_version(data_version):
138
+ def valid_data_version(data_version: Union[str, int]) -> str:
137
139
  """
138
140
  Make sure to have a valid data version.
139
141
 
@@ -149,51 +151,60 @@ def valid_data_version(data_version):
149
151
  raise_error(f"Input value for --data-version is not valid! ({data_version})")
150
152
 
151
153
 
152
- def valid_single_file(file):
154
+ def valid_single_file(file_to_validate: Union[str, list]) -> Union[str, Path]:
153
155
  """
154
156
  Make sure to have a valid single file.
155
157
 
156
- :param file: 1-element list or string containing the path to the file
157
- :return:
158
+ :param file_to_validate: 1-element list or string containing the path to the file
159
+ :return: Path of the input file if it is valid
158
160
  """
159
161
  try:
160
- if isinstance(file, list):
161
- file = file[0]
162
- if os.path.isfile(file):
163
- return file
162
+ if isinstance(file_to_validate, list):
163
+ file_to_validate = file_to_validate[0]
164
+ if os.path.isfile(str(file_to_validate)):
165
+ return file_to_validate
164
166
  else:
165
167
  raise FileNotFoundError
166
168
  except FileNotFoundError:
167
- raise_error(f"Input file not found! ({file})", exception=FileNotFoundError)
169
+ raise_error(
170
+ f"Input file not found! ({file_to_validate})", exception=FileNotFoundError
171
+ )
168
172
  except ValueError:
169
- raise_error(f"Input file is not valid! ({file})", exception=ValueError)
173
+ raise_error(
174
+ f"Input file is not valid! ({file_to_validate})", exception=ValueError
175
+ )
170
176
  except Exception as e:
171
- raise_error(f"Problem with input file! ({file})", exception=e)
177
+ raise_error(f"Problem with input file! ({file_to_validate})", exception=e)
172
178
 
173
179
 
174
- def valid_dir(dir):
180
+ def valid_dir(dir_to_validate: Union[str, Path]) -> Union[str, Path]:
175
181
  """
176
182
  Make sure to have a valid input directory.
177
183
 
178
- :param dir: 1-element list or string containing the path to the directory
179
- :return:
184
+ :param dir_to_validate: 1-element list or string containing the path to the directory
185
+ :return:Path of the input directory if it is valid
180
186
  """
181
187
  try:
182
- if isinstance(dir, list):
183
- dir = dir[0]
184
- if os.path.isdir(dir):
185
- return dir
188
+ if isinstance(dir_to_validate, list):
189
+ dir_to_validate = dir_to_validate[0]
190
+ if os.path.isdir(dir_to_validate):
191
+ return dir_to_validate
186
192
  else:
187
193
  raise IsADirectoryError
188
194
  except IsADirectoryError:
189
- raise_error(f"Input directory not found! ({dir})", exception=IsADirectoryError)
195
+ raise_error(
196
+ f"Input directory not found! ({dir_to_validate})",
197
+ exception=IsADirectoryError,
198
+ )
190
199
  except ValueError:
191
- raise_error(f"Input directory is not valid! ({dir})", exception=ValueError)
200
+ raise_error(
201
+ f"Input directory is not valid! ({dir_to_validate})", exception=ValueError
202
+ )
192
203
  except Exception as e:
193
- raise_error(f"Problem with input directory! ({dir})", exception=e)
204
+ raise_error(f"Problem with input directory! ({dir_to_validate})", exception=e)
194
205
 
195
206
 
196
- def unique_dates(utc_times):
207
+ def unique_dates(utc_times: list) -> List[datetime.date]:
197
208
  """
198
209
  Get list of unique dates from input list of utc_time
199
210
 
@@ -210,29 +221,29 @@ def unique_dates(utc_times):
210
221
  return dates
211
222
 
212
223
 
213
- def extract_datetime(str_datetime):
224
+ def extract_datetime(str_datetime: str, str_tformat: str = "%Y%m%d") -> List[datetime]:
214
225
  """
215
226
  Extract Datetime attribute value.
216
227
 
217
- :param str_datetime: String containing Datetime attribute value (can be time range 'YYYYMMDDThh:mm:ss-YYYYMMDDThh:mm:ss' or daily 'YYYYMMDD' format)
218
- :return: 2-elements list containing Datetime start/end time (if input Datetime has a daily format, return the day twice).
228
+ :param str_datetime: String containing Datetime attribute value (can be time range or daily format)
229
+ :param str_tformat: expected datetime string format
230
+ :return: Datetime start/end times. If input Datetime has a daily format, return 1-element list.
219
231
  """
232
+ out_datetime = None
220
233
 
221
234
  str_datetime_list = str_datetime.split("-")
235
+
222
236
  if len(str_datetime_list) == 1:
223
- out_datetime = [datetime.strptime(str_datetime, "%Y%m%d")] * 2
237
+ out_datetime = [datetime.strptime(str_datetime, str_tformat)]
224
238
  elif len(str_datetime_list) == 2:
225
- out_datetime = [
226
- datetime.strptime(dt, "%Y%m%dT%H%M%S") for dt in str_datetime_list
227
- ]
239
+ out_datetime = [datetime.strptime(dt, str_tformat) for dt in str_datetime_list]
228
240
  else:
229
- logger.error(f"Wrong input datetime format: {str_datetime}")
230
- return None
241
+ logger.error(f"Wrong file name format: {str_datetime}")
231
242
 
232
243
  return out_datetime
233
244
 
234
245
 
235
- def get_latest_file(file_list):
246
+ def get_latest_file(file_list: list) -> Union[Path, str]:
236
247
  """
237
248
  Get the latest version file from an input list of files.
238
249
  Input files must be formatted using ROC standards
@@ -312,7 +323,7 @@ def get_datasets(task, task_name):
312
323
  return dataset_list
313
324
 
314
325
 
315
- def unique_dict_list(list_of_dict):
326
+ def unique_dict_list(list_of_dict: List[dict]) -> List[dict]:
316
327
  """
317
328
  Unify an input list of dict.
318
329
 
@@ -322,7 +333,7 @@ def unique_dict_list(list_of_dict):
322
333
  return [i for n, i in enumerate(list_of_dict) if i not in list_of_dict[n + 1 :]]
323
334
 
324
335
 
325
- def sort_dict_list(list_of_dict, key):
336
+ def sort_dict_list(list_of_dict: List[dict], key: object) -> List[dict]:
326
337
  """
327
338
  Sort a list of dictionaries
328
339
  using a given keyword in the dictionaries.
@@ -334,9 +345,9 @@ def sort_dict_list(list_of_dict, key):
334
345
  return sorted(list_of_dict, key=lambda i: i[key])
335
346
 
336
347
 
337
- def sort_indices(list_to_sort):
348
+ def sort_indices(list_to_sort: list) -> list:
338
349
  """
339
- Return sorted indices of a input list
350
+ Return sorted indices of an input list
340
351
 
341
352
  :param list_to_sort: list for which sorted indices must be returned
342
353
  :return: list of sorted indices
@@ -345,7 +356,7 @@ def sort_indices(list_to_sort):
345
356
  return sorted(range(len(list_to_sort)), key=lambda k: list_to_sort[k])
346
357
 
347
358
 
348
- def safe_move(src, dst, ignore_patterns=[]):
359
+ def safe_move(src: str, dst: str, ignore_patterns: Union[list, None] = None) -> bool:
349
360
  """
350
361
  Perform a safe move of a file or directory.
351
362
 
@@ -354,6 +365,8 @@ def safe_move(src, dst, ignore_patterns=[]):
354
365
  :param ignore_patterns: string containing the file patterns to ignore (for copytree only)
355
366
  :return: True if the move has succeeded, False otherwise
356
367
  """
368
+ if not ignore_patterns:
369
+ ignore_patterns = []
357
370
 
358
371
  # Initialize output
359
372
  is_copied = False
@@ -366,7 +379,7 @@ def safe_move(src, dst, ignore_patterns=[]):
366
379
  shutil.copytree(
367
380
  src,
368
381
  dst,
369
- ignore=shutil.ignore_patterns(ignore_patterns),
382
+ ignore=shutil.ignore_patterns(*ignore_patterns),
370
383
  dirs_exist_ok=True,
371
384
  )
372
385
  except Exception:
@@ -436,40 +449,19 @@ def sort_cdf_by_epoch(cdf, descending=False, zvar_list=[]):
436
449
 
437
450
 
438
451
  def extract_file_fields(
439
- rpw_file,
440
- get_source=False,
441
- get_level=False,
442
- get_descriptor=False,
443
- get_datetime=False,
444
- get_version=False,
445
- ):
452
+ rpw_file: Union[Path, str],
453
+ ) -> list:
446
454
  """
447
455
  Extract RPW file fields (assuming SolO file naming standards)
448
456
 
449
457
  :param rpw_file: RPW file to split
450
- :param get_source: return only source field
451
- :param get_level: return only level field
452
- :param get_descriptor: return only descriptor field
453
- :param get_datetime: return only datetime field
454
- :param get_version: return only data version field
455
- :return: list of file fields (or a scalar with expected field)
458
+ :return: list of file fields
456
459
  """
457
- fields = os.path.splitext(os.path.basename(rpw_file))[0].split("_")
460
+ fields = Path(rpw_file).stem.split("_")
458
461
 
459
462
  if len(fields) < 5:
460
- logger.warning(f'Cannot extract file fields: invalid input file "{rpw_file}"!')
461
- return None
462
-
463
- if get_source:
464
- fields = fields[0]
465
- elif get_level:
466
- fields = fields[1]
467
- elif get_descriptor:
468
- fields = fields[2]
469
- elif get_datetime:
470
- fields = fields[3]
471
- elif get_version:
472
- fields = fields[4]
463
+ logger.error(f'Cannot extract file fields: invalid input file "{rpw_file}"!')
464
+ raise ValueError
473
465
 
474
466
  return fields
475
467
 
@@ -513,7 +505,8 @@ class Map(dict):
513
505
 
514
506
  def glob_list(list_of_files: list) -> list:
515
507
  """
516
- Perform glob.glob on a list of input files.
508
+ Apply glob.glob() method on a list of input files.
509
+ It permits to expand possible regex in the list of files.
517
510
 
518
511
  :param list_of_files: List of input files (strings)
519
512
  :type:list
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: roc-film
3
- Version: 1.14.3
3
+ Version: 1.14.4
4
4
  Summary: RPW FILe Maker (FILM): Plugin to make RPW L0, L1 and HK data files
5
5
  License: CECILL-2.1
6
6
  Author: Xavier Bonnin
@@ -2,12 +2,12 @@ roc/__init__.py,sha256=0LOVV4ExL9DrAzmNJj6c6395-nQTPM6v6S1hJnBAf3E,89
2
2
  roc/film/__init__.py,sha256=XBPsnRsgcUjJnutv_tkgNViw2TqnA5ozO-bNO2HY-mg,143
3
3
  roc/film/commands.py,sha256=xzyARalgWc6mPfN5nUvDtqTzaovP2_Xbi6wCDsS4tVY,37019
4
4
  roc/film/config/__init__.py,sha256=Yh8cG85CwN7OIAndtBs213LZn5NwEjPjFYKpMnagJuc,68
5
- roc/film/constants.py,sha256=zibC4vjLfg3fpH3BP7AGW4j48vf-AaXn--Afx5gmGH4,3998
6
- roc/film/descriptor.json,sha256=sA4R1oLl_TfhL06xNfmZJfkm7RcZ6Zt4nnidDRr41nQ,33532
5
+ roc/film/constants.py,sha256=wDM0-UbKxNvs03uoabi0oHhOHNNNp8oPnBeRMkjXefc,4278
6
+ roc/film/descriptor.json,sha256=VmGeJd-03Ydi3ph2DQtua0T_ZE8802G-pYLeOkBKYwE,33536
7
7
  roc/film/exceptions.py,sha256=kGWP9VR_DjDdyZYC5gDixtvV7xAv7qEx5srzS4_8TlU,5177
8
8
  roc/film/tasks/__init__.py,sha256=GNuQAi7E9nOApSSUUg4DEKlY0OlOzVaJexFKK0h9A2c,961
9
9
  roc/film/tasks/cat_solo_hk.py,sha256=PX3HkyJCgFHrw_wwa44r_tWcXQ7jCbd6oiZGS8KRXfI,11227
10
- roc/film/tasks/cdf_postpro.py,sha256=HZJQg1NLsnuCV2ClnbCgEIMKzR4_gyBfr8FKUYw4RFU,25363
10
+ roc/film/tasks/cdf_postpro.py,sha256=55zrk4hVZCWBpIRIlq7s6W8qWAbPzQmmdemqhfN-jI8,25948
11
11
  roc/film/tasks/check_dds.py,sha256=pb0YZGu_cqPL2-Vi5aUaXAFJy86R2rOLVVaXnTLArug,3734
12
12
  roc/film/tasks/db_to_anc_bia_sweep_table.py,sha256=o-rJpj1VzS9azAT_EiWL9fxaZQt2qaArKtGiLMHv3S4,12781
13
13
  roc/film/tasks/dds_to_l0.py,sha256=nhsBESTlkEqiKAVVKek2oXe5neLFet1nTVsTiGTm1zY,22120
@@ -38,13 +38,13 @@ roc/film/tests/test_metadata.py,sha256=1nl5or5RPMsIgMqtvYz28qdZXVgjYw6olphMm6y8W
38
38
  roc/film/tests/tests.py,sha256=boEPC42mZYQfinYE3jyol-7O34ttU5p_Ei0a_9YUHM4,44273
39
39
  roc/film/tools/__init__.py,sha256=dkYmLgy3b_B1T3ZZ6s_rv7NyDjLF0yOJPerjLEKAFlA,303
40
40
  roc/film/tools/dataset_tasks.py,sha256=CI9UIYchLwXfcjJoD8PsaFIcNX8akAsXz4RQ4hqhJeU,2825
41
- roc/film/tools/file_helpers.py,sha256=KtcQxwIBfawj6HnrmvV91YjdvXm4tRfKG4ORVOhccds,30983
41
+ roc/film/tools/file_helpers.py,sha256=NJlinZzTORa-zteQ3qHufu6DK1wSWBkg9U2XzP2_vgE,31876
42
42
  roc/film/tools/l0.py,sha256=a5xxk3BYxtwOeZp36AoDPtF07VZpXfyKrpIYcC83u6w,46657
43
43
  roc/film/tools/metadata.py,sha256=gYFoo_VinBFoJOVIBwDfSqq0WvYTdDaS3NjEDtYREUM,12409
44
44
  roc/film/tools/skeleton.py,sha256=aTe6VWvy5Y79FuA3aKieTQ91m26PEvJ3MnzC4ZZqMvc,10765
45
- roc/film/tools/tools.py,sha256=q2eN494T8irkNUn0d6_oQs2W_ZJj1OxJu_ViORGOa3g,17945
45
+ roc/film/tools/tools.py,sha256=b273WwDvxdowH9d8BqJX9REI3TpI7lzFsTviO23hzv4,18252
46
46
  roc/film/tools/xlsx2skt.py,sha256=sHmFLGuL1pT1N4V4Nk8i7yoHM2lveXphuvUUN28MvJs,19599
47
- roc_film-1.14.3.dist-info/LICENSE,sha256=TqI0k3vHsKpSR-Q2aQ0euTJIdbx1kOzeUL79OONRkKU,21778
48
- roc_film-1.14.3.dist-info/METADATA,sha256=sjsTi2NAdNWTVp9LlImwsP379OVm_FLGJxECDh2VRng,1805
49
- roc_film-1.14.3.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
50
- roc_film-1.14.3.dist-info/RECORD,,
47
+ roc_film-1.14.4.dist-info/LICENSE,sha256=TqI0k3vHsKpSR-Q2aQ0euTJIdbx1kOzeUL79OONRkKU,21778
48
+ roc_film-1.14.4.dist-info/METADATA,sha256=ykXd1Tj0Os1MltsqoJ45Q2c7_MDTahDyVPpBYbxLDiA,1805
49
+ roc_film-1.14.4.dist-info/WHEEL,sha256=XbeZDeTWKc1w7CSIyre5aMDU_-PohRwTQceYnisIYYY,88
50
+ roc_film-1.14.4.dist-info/RECORD,,