roc-film 1.13.4__py3-none-any.whl → 1.14.0__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (52) hide show
  1. roc/__init__.py +2 -1
  2. roc/film/__init__.py +2 -2
  3. roc/film/commands.py +372 -323
  4. roc/film/config/__init__.py +0 -1
  5. roc/film/constants.py +101 -65
  6. roc/film/descriptor.json +126 -95
  7. roc/film/exceptions.py +28 -27
  8. roc/film/tasks/__init__.py +16 -16
  9. roc/film/tasks/cat_solo_hk.py +86 -74
  10. roc/film/tasks/cdf_postpro.py +438 -309
  11. roc/film/tasks/check_dds.py +39 -45
  12. roc/film/tasks/db_to_anc_bia_sweep_table.py +381 -0
  13. roc/film/tasks/dds_to_l0.py +232 -180
  14. roc/film/tasks/export_solo_coord.py +147 -0
  15. roc/film/tasks/file_handler.py +91 -75
  16. roc/film/tasks/l0_to_hk.py +117 -103
  17. roc/film/tasks/l0_to_l1_bia_current.py +38 -30
  18. roc/film/tasks/l0_to_l1_bia_sweep.py +417 -329
  19. roc/film/tasks/l0_to_l1_sbm.py +250 -208
  20. roc/film/tasks/l0_to_l1_surv.py +185 -130
  21. roc/film/tasks/make_daily_tm.py +40 -37
  22. roc/film/tasks/merge_tcreport.py +77 -71
  23. roc/film/tasks/merge_tmraw.py +102 -89
  24. roc/film/tasks/parse_dds_xml.py +21 -20
  25. roc/film/tasks/set_l0_utc.py +51 -49
  26. roc/film/tests/cdf_compare.py +565 -0
  27. roc/film/tests/hdf5_compare.py +84 -62
  28. roc/film/tests/test_dds_to_l0.py +93 -51
  29. roc/film/tests/test_dds_to_tc.py +8 -11
  30. roc/film/tests/test_dds_to_tm.py +8 -10
  31. roc/film/tests/test_film.py +161 -116
  32. roc/film/tests/test_l0_to_hk.py +64 -36
  33. roc/film/tests/test_l0_to_l1_bia.py +10 -14
  34. roc/film/tests/test_l0_to_l1_sbm.py +14 -19
  35. roc/film/tests/test_l0_to_l1_surv.py +68 -41
  36. roc/film/tests/test_metadata.py +21 -20
  37. roc/film/tests/tests.py +743 -396
  38. roc/film/tools/__init__.py +5 -5
  39. roc/film/tools/dataset_tasks.py +34 -2
  40. roc/film/tools/file_helpers.py +390 -269
  41. roc/film/tools/l0.py +402 -324
  42. roc/film/tools/metadata.py +147 -127
  43. roc/film/tools/skeleton.py +12 -17
  44. roc/film/tools/tools.py +109 -92
  45. roc/film/tools/xlsx2skt.py +161 -139
  46. {roc_film-1.13.4.dist-info → roc_film-1.14.0.dist-info}/LICENSE +127 -125
  47. roc_film-1.14.0.dist-info/METADATA +60 -0
  48. roc_film-1.14.0.dist-info/RECORD +50 -0
  49. {roc_film-1.13.4.dist-info → roc_film-1.14.0.dist-info}/WHEEL +1 -1
  50. roc/film/tasks/l0_to_anc_bia_sweep_table.py +0 -348
  51. roc_film-1.13.4.dist-info/METADATA +0 -120
  52. roc_film-1.13.4.dist-info/RECORD +0 -48
@@ -11,6 +11,7 @@ import sys
11
11
  import h5py
12
12
  import numpy
13
13
 
14
+
14
15
  class Hdf5Reader:
15
16
  def __init__(self, logger=logging):
16
17
  self.logger = logger
@@ -22,7 +23,7 @@ class Hdf5Reader:
22
23
  :return: HDF5 file
23
24
  """
24
25
  try:
25
- hdf5_file = h5py.File(filepath, 'r')
26
+ hdf5_file = h5py.File(filepath, "r")
26
27
  return hdf5_file
27
28
  except IOError:
28
29
  self.logger.error("Unable to open file '%s'" % filepath)
@@ -46,7 +47,7 @@ class Hdf5Reader:
46
47
  :return: Summary of the group
47
48
  """
48
49
 
49
- return {'attr': self.read_attributes(hval), 'htype': 'group'}
50
+ return {"attr": self.read_attributes(hval), "htype": "group"}
50
51
 
51
52
  def read_data(self, hval):
52
53
  """
@@ -55,10 +56,12 @@ class Hdf5Reader:
55
56
  :return: Summary of the dataset
56
57
  """
57
58
  data_value = hval[()] # hval.value has been deprecated
58
- return {'attr': self.read_attributes(hval),
59
- 'value': data_value,
60
- 'htype': 'dataset',
61
- 'dtype': type(data_value)}
59
+ return {
60
+ "attr": self.read_attributes(hval),
61
+ "value": data_value,
62
+ "htype": "dataset",
63
+ "dtype": type(data_value),
64
+ }
62
65
 
63
66
  #
64
67
  def evaluate_group(self, path, grp):
@@ -76,13 +79,20 @@ class Hdf5Reader:
76
79
  elif isinstance(v, h5py.Group):
77
80
  result[k] = self.read_group(v)
78
81
  else:
79
- raise Exception('Unknown h5py type: %s (%s -- %s)' % (type(v), path, k))
82
+ raise Exception("Unknown h5py type: %s (%s -- %s)" % (type(v), path, k))
80
83
  return result
81
84
 
82
85
 
83
86
  class Hdf5Diff(Hdf5Reader):
84
- def __init__(self, filepath1, filepath2, group_path1, group_path2,
85
- exclude_attr=[], logger=logging):
87
+ def __init__(
88
+ self,
89
+ filepath1,
90
+ filepath2,
91
+ group_path1,
92
+ group_path2,
93
+ exclude_attr=[],
94
+ logger=logging,
95
+ ):
86
96
  # set up the logger
87
97
  super().__init__(logger)
88
98
 
@@ -100,103 +110,108 @@ class Hdf5Diff(Hdf5Reader):
100
110
 
101
111
  def compare_dataset(self, path, name, dataset1, dataset2):
102
112
  # compare dtypes
103
- if dataset1[name]['dtype'] != dataset2[name]['dtype']:
104
- d1 = dataset1[name]['dtype']
105
- d2 = dataset2[name]['dtype']
106
- self.register_diff(path, 'DIFF_DTYPE', (d1, d2), )
113
+ if dataset1[name]["dtype"] != dataset2[name]["dtype"]:
114
+ d1 = dataset1[name]["dtype"]
115
+ d2 = dataset2[name]["dtype"]
116
+ self.register_diff(
117
+ path,
118
+ "DIFF_DTYPE",
119
+ (d1, d2),
120
+ )
107
121
 
108
122
  # compare values
109
- v1 = dataset1[name]['value']
110
- v2 = dataset2[name]['value']
123
+ v1 = dataset1[name]["value"]
124
+ v2 = dataset2[name]["value"]
111
125
  try:
112
126
  if not numpy.allclose(v1, v2, equal_nan=True):
113
127
  raise ValueError
114
128
  except ValueError:
115
- self.register_diff(path + name, 'DIFF_DATA', (v1, v2))
129
+ self.register_diff(path + name, "DIFF_DATA", (v1, v2))
116
130
  except TypeError:
117
131
  # Try to compare row by row (only if same length)
118
132
  if v1.shape != v2.shape:
119
- self.register_diff(path + name, 'DIFF_DATA', (v1, v2))
133
+ self.register_diff(path + name, "DIFF_DATA", (v1, v2))
120
134
  else:
121
135
  for i in range(v1.shape[0]):
122
136
  if len(numpy.array(v1[i]).shape) > 0:
123
137
  if any(v1[i] != v2[i]):
124
- self.register_diff(path + name, 'DIFF_DATA', (v1, v2))
138
+ self.register_diff(path + name, "DIFF_DATA", (v1, v2))
125
139
  break
126
140
  else:
127
141
  if v1[i] != v2[i]:
128
- self.register_diff(path + name, 'DIFF_DATA', (v1, v2))
142
+ self.register_diff(path + name, "DIFF_DATA", (v1, v2))
129
143
  break
130
- except:
131
- raise Exception(f'Datasets {path+name} cannot be compared')
144
+ except Exception as e:
145
+ self.logger.exception(f"Datasets {path + name} cannot be compared:\n{e}")
146
+ raise
132
147
  else:
133
148
  pass
134
149
 
135
150
  # compare attributes
136
- for k in dataset1[name]['attr']:
151
+ for k in dataset1[name]["attr"]:
137
152
  if self.exclude_attr and k in self.exclude_attr:
138
153
  continue
139
- if k not in dataset2[name]['attr']:
140
- self.register_diff(path + name, 'DIFF_UNIQ_ATTR', (k,), file_id=1)
154
+ if k not in dataset2[name]["attr"]:
155
+ self.register_diff(path + name, "DIFF_UNIQ_ATTR", (k,), file_id=1)
141
156
 
142
- for k in dataset2[name]['attr']:
157
+ for k in dataset2[name]["attr"]:
143
158
  if self.exclude_attr and k in self.exclude_attr:
144
159
  continue
145
- if k not in dataset1[name]['attr']:
146
- self.register_diff(path + name, 'DIFF_UNIQ_ATTR', (k,), file_id=2)
160
+ if k not in dataset1[name]["attr"]:
161
+ self.register_diff(path + name, "DIFF_UNIQ_ATTR", (k,), file_id=2)
147
162
 
148
- for k in dataset1[name]['attr']:
163
+ for k in dataset1[name]["attr"]:
149
164
  if self.exclude_attr and k in self.exclude_attr:
150
165
  continue
151
- if k in dataset2[name]['attr']:
152
- v1 = dataset1[name]['attr'][k]
153
- v2 = dataset2[name]['attr'][k]
166
+ if k in dataset2[name]["attr"]:
167
+ v1 = dataset1[name]["attr"][k]
168
+ v2 = dataset2[name]["attr"][k]
154
169
  if v1 != v2:
155
- self.register_diff(path + name, 'DIFF_ATTR_DTYPE', (k, v1, v2))
170
+ self.register_diff(path + name, "DIFF_ATTR_DTYPE", (k, v1, v2))
156
171
 
157
172
  def compare_group(self, path, name, desc1, desc2):
158
-
159
173
  # compare attributes
160
- for k in desc1[name]['attr']:
174
+ for k in desc1[name]["attr"]:
161
175
  if self.exclude_attr and k in self.exclude_attr:
162
176
  continue
163
- if k not in desc2[name]['attr']:
164
- self.register_diff(path + name, 'DIFF_UNIQ_ATTR', (k,), file_id=1)
165
- for k in desc2[name]['attr']:
177
+ if k not in desc2[name]["attr"]:
178
+ self.register_diff(path + name, "DIFF_UNIQ_ATTR", (k,), file_id=1)
179
+ for k in desc2[name]["attr"]:
166
180
  if self.exclude_attr and k in self.exclude_attr:
167
181
  continue
168
- if k not in desc1[name]['attr']:
169
- self.register_diff(path + name, 'DIFF_UNIQ_ATTR', (k,), file_id=2)
182
+ if k not in desc1[name]["attr"]:
183
+ self.register_diff(path + name, "DIFF_UNIQ_ATTR", (k,), file_id=2)
170
184
 
171
185
  def register_diff(self, path, error_code, values, file_id=None):
172
-
173
186
  error_messages = {
174
- 'DIFF_OBJECTS': '[{path}] Different element types: {values[0]} and {values[1]}',
175
- 'DIFF_UNIQUE': '[{path}] Element {values[0]} only in file {file_id}',
176
- 'DIFF_UNIQ_ATTR': '[{path}] Attribute {values[0]} only in file {file_id}',
177
- 'DIFF_ATTR_DTYPE': '[{path}] Attribute {values[0]} has different type: {values[1]} and {values[2]}',
178
- 'DIFF_DATA': '[{path}] Different data: {values[0]} and {values[1]}',
179
- 'DIFF_DTYPE': '[{path}] Different dtypes: {values[0]} and {values[1]}',
187
+ "DIFF_OBJECTS": "[{path}] Different element types: {values[0]} and {values[1]}",
188
+ "DIFF_UNIQUE": "[{path}] Element {values[0]} only in file {file_id}",
189
+ "DIFF_UNIQ_ATTR": "[{path}] Attribute {values[0]} only in file {file_id}",
190
+ "DIFF_ATTR_DTYPE": "[{path}] Attribute {values[0]} has different type: {values[1]} and {values[2]}",
191
+ "DIFF_DATA": "[{path}] Different data: {values[0]} and {values[1]}",
192
+ "DIFF_DTYPE": "[{path}] Different dtypes: {values[0]} and {values[1]}",
180
193
  }
181
194
 
182
195
  error_message = error_messages.get(error_code, None)
183
196
 
184
197
  if error_message is None:
185
- raise Exception('Unknown error_code %s' % (error_code,))
198
+ raise Exception("Unknown error_code %s" % (error_code,))
186
199
 
187
- content = {'path': path, 'values': values, 'error_code': error_code}
200
+ content = {"path": path, "values": values, "error_code": error_code}
188
201
 
189
202
  if file_id is not None:
190
- content['file_id'] = file_id
203
+ content["file_id"] = file_id
191
204
 
192
205
  # store the error
193
206
  self.diff_list.append(content)
194
207
 
195
208
  # and send logs
196
- self.logger.info(error_message.format(path=path, values=values, file_id=file_id))
209
+ self.logger.info(
210
+ error_message.format(path=path, values=values, file_id=file_id)
211
+ )
197
212
 
198
- def diff_groups(self, grp1, grp2, path='/'):
199
- self.logger.debug('Examining ' + path)
213
+ def diff_groups(self, grp1, grp2, path="/"):
214
+ self.logger.debug("Examining " + path)
200
215
 
201
216
  # get the groups content
202
217
  desc1 = self.evaluate_group(path, grp1)
@@ -210,37 +225,44 @@ class Hdf5Diff(Hdf5Reader):
210
225
  if k in desc2:
211
226
  common.append(k)
212
227
  else:
213
- self.register_diff(path, 'DIFF_UNIQUE', (k,), file_id=1)
228
+ self.register_diff(path, "DIFF_UNIQUE", (k,), file_id=1)
214
229
 
215
230
  # get the keys specific to the group 2
216
231
  for k in desc2:
217
232
  if k not in desc1:
218
- self.register_diff(path, 'DIFF_UNIQUE', (k,), file_id=2)
233
+ self.register_diff(path, "DIFF_UNIQUE", (k,), file_id=2)
219
234
 
220
235
  # loop over common keys
221
236
  for name in common:
222
- self.logger.debug('\t' + name)
237
+ self.logger.debug("\t" + name)
223
238
 
224
239
  # compare types
225
- h1 = desc1[name]['htype']
226
- h2 = desc2[name]['htype']
240
+ h1 = desc1[name]["htype"]
241
+ h2 = desc2[name]["htype"]
227
242
  if h1 != h2:
228
- self.register_diff(path, 'DIFF_OBJECTS', (h1, h2), )
243
+ self.register_diff(
244
+ path,
245
+ "DIFF_OBJECTS",
246
+ (h1, h2),
247
+ )
229
248
  # different hdf5 types -- don't try to compare further
230
249
  continue
231
250
 
232
251
  # call the appropriate method(s) depending on the node type
233
- if desc1[name]['htype'] == 'dataset':
252
+ if desc1[name]["htype"] == "dataset":
234
253
  # handle dataset
235
254
  self.compare_dataset(path, name, desc1, desc2)
236
- elif desc1[name]['htype'] == 'group':
255
+ elif desc1[name]["htype"] == "group":
237
256
  # handle groups
238
257
  self.compare_group(path, name, desc1, desc2)
239
258
  # recurse into subgroup
240
- self.diff_groups(grp1[name], grp2[name], path=path + name + '/')
259
+ self.diff_groups(grp1[name], grp2[name], path=path + name + "/")
241
260
  else:
242
261
  # handle unrecognized hdf5 objects
243
- self.logger.warning("Element is not a recognized type (%s) and isn't being evaluated" % h1)
262
+ self.logger.warning(
263
+ "Element is not a recognized type (%s) and isn't being evaluated"
264
+ % h1
265
+ )
244
266
  continue
245
267
 
246
268
  def diff_files(self):
@@ -7,7 +7,6 @@ Test tm_to_l0 command of the roc.film plugin.
7
7
 
8
8
  import filecmp
9
9
  import os
10
- import tempfile
11
10
  from pathlib import Path
12
11
  from pprint import pformat
13
12
 
@@ -45,16 +44,16 @@ class TestDdsToL0(CommandTestCase):
45
44
  # clear the files produced during test
46
45
  shutil.rmtree(self.output_dir_path)
47
46
 
48
- #@pytest.mark.skip()
47
+ # @pytest.mark.skip()
49
48
  @pytest.mark.parametrize(
50
- 'date_to_test,idb_source,idb_version',
51
- [('20220501', 'MIB', '20200131')],
49
+ "date_to_test,idb_source,idb_version",
50
+ [("20220501", "MIB", "20200131")],
52
51
  )
53
52
  def test_dds_to_l0(self, date_to_test, idb_source, idb_version):
54
53
  from poppy.core.conf import Settings
55
54
 
56
55
  # Name of the command to test
57
- cmd = 'dds_to_l0'
56
+ cmd = "dds_to_l0"
58
57
 
59
58
  # Retrieve data for current test
60
59
  test_data_path = FilmTest().get_test_data(cmd, extract=True)
@@ -64,89 +63,132 @@ class TestDdsToL0(CommandTestCase):
64
63
  # Initialize inputs and expected outputs
65
64
  input_dir_path, inputs = FilmTest.get_inputs(test_data_dir)
66
65
  if not inputs:
67
- raise FileNotFoundError(f'No input found in {test_data_dir}!')
68
- expected_output_dir_path, expected_outputs = FilmTest.get_expected_outputs(test_data_dir)
66
+ raise FileNotFoundError(f"No input found in {test_data_dir}!")
67
+ expected_output_dir_path, expected_outputs = FilmTest.get_expected_outputs(
68
+ test_data_dir
69
+ )
69
70
  if not expected_outputs:
70
- raise FileNotFoundError(f'No expected output found in {test_data_dir}!')
71
+ raise FileNotFoundError(f"No expected output found in {test_data_dir}!")
71
72
 
72
73
  # Initialize directory where files produced during test will be saved
73
- output_dir_path = os.path.join(test_data_dir, 'output')
74
+ output_dir_path = os.path.join(test_data_dir, "output")
74
75
  self.output_dir_path = output_dir_path
75
76
 
76
77
  # Check that SPICE kernels are present in ./data/spice_kernels folder
77
78
  spice_kernels_dir = FilmTest.get_spice_kernel_dir()
78
79
  if not os.path.isdir(spice_kernels_dir):
79
- raise FileNotFoundError(f'No SPICE kernel set found in {spice_kernels_dir}!')
80
+ raise FileNotFoundError(
81
+ f"No SPICE kernel set found in {spice_kernels_dir}!"
82
+ )
80
83
 
81
84
  # length in scos header bytes to remove from input binary TM packets
82
- scos_header_byte_length = '0'
85
+ scos_header_byte_length = "0"
83
86
 
84
87
  # Split inputs between two lists containing RPW TM and TC files respectively
85
- tm_inputs = ' '.join([
86
- os.path.join(input_dir_path, input_file)
87
- for input_file in inputs
88
- if os.path.basename(input_file).startswith('solo_TM_')
89
- ])
90
- tc_inputs = ' '.join([
91
- os.path.join(input_dir_path, input_file)
92
- for input_file in inputs
93
- if os.path.basename(input_file).startswith('solo_TC_')
94
- ])
88
+ tm_inputs = " ".join(
89
+ [
90
+ os.path.join(input_dir_path, input_file)
91
+ for input_file in inputs
92
+ if os.path.basename(input_file).startswith("solo_TM_")
93
+ ]
94
+ )
95
+ tc_inputs = " ".join(
96
+ [
97
+ os.path.join(input_dir_path, input_file)
98
+ for input_file in inputs
99
+ if os.path.basename(input_file).startswith("solo_TC_")
100
+ ]
101
+ )
95
102
 
96
103
  # initialize the main command
97
104
  # Make sure that start-time/end_time keyword values are consistent with input DDS files
98
- command_to_test = ' '.join(['pop', 'film',
99
- '--force',
100
- '--idb-version', idb_version,
101
- '--idb-source', idb_source,
102
- '--scos-header', scos_header_byte_length,
103
- '--cdag',
104
- cmd, date_to_test,
105
- '--dds-tmraw-xml', tm_inputs,
106
- '--dds-tcreport-xml', tc_inputs,
107
- '--output-dir', output_dir_path,
108
- '-ll', 'INFO'])
105
+ command_to_test = " ".join(
106
+ [
107
+ "pop",
108
+ "film",
109
+ "--force",
110
+ "--idb-version",
111
+ idb_version,
112
+ "--idb-source",
113
+ idb_source,
114
+ "--scos-header",
115
+ scos_header_byte_length,
116
+ "--cdag",
117
+ cmd,
118
+ date_to_test,
119
+ "--dds-tmraw-xml",
120
+ tm_inputs,
121
+ "--dds-tcreport-xml",
122
+ tc_inputs,
123
+ "--output-dir",
124
+ output_dir_path,
125
+ "-ll",
126
+ "INFO",
127
+ ]
128
+ )
109
129
 
110
130
  # define the required plugins
111
- plugin_list = ['poppy.pop', 'roc.idb', 'roc.rpl', 'roc.rap', 'roc.dingo', 'roc.film']
131
+ plugin_list = [
132
+ "poppy.pop",
133
+ "roc.idb",
134
+ "roc.rpl",
135
+ "roc.rap",
136
+ "roc.dingo",
137
+ "roc.film",
138
+ ]
112
139
  #
113
140
  # run the command
114
141
  # force the value of the plugin list
115
- with mock.patch.object(Settings, 'configure',
116
- autospec=True,
117
- side_effect=self.mock_configure_settings(dictionary={'PLUGINS': plugin_list})):
142
+ with mock.patch.object(
143
+ Settings,
144
+ "configure",
145
+ autospec=True,
146
+ side_effect=self.mock_configure_settings(
147
+ dictionary={"PLUGINS": plugin_list}
148
+ ),
149
+ ):
118
150
  # Load IDB in the database
119
151
  FilmTest.load_idb(self, idb_version=idb_version)
120
152
 
121
153
  # Run the command to test
122
- logger.info(f'Running {command_to_test}')
154
+ logger.info(f"Running {command_to_test}")
123
155
  self.run_command(shlex.split(command_to_test))
124
156
 
125
157
  # compare directory content
126
- dirs_cmp = filecmp.dircmp(output_dir_path,
127
- expected_output_dir_path)
158
+ dirs_cmp = filecmp.dircmp(output_dir_path, expected_output_dir_path)
128
159
 
129
160
  dirs_cmp.report()
130
161
  # ensure that we have the same files in both directories
131
162
  assert (len(dirs_cmp.left_only) == 0) and (len(dirs_cmp.right_only) == 0)
132
163
 
133
164
  # ensure the name of the two expected and generated L0 files is the same
134
- produced_l0_path = os.path.join(output_dir_path, f'solo_L0_rpw-cdag_{date_to_test}_V01.h5')
135
- expected_l0_path = os.path.join(expected_output_dir_path, f'solo_L0_rpw-cdag_{date_to_test}_V01.h5')
165
+ produced_l0_path = os.path.join(
166
+ output_dir_path, f"solo_L0_rpw-cdag_{date_to_test}_V01.h5"
167
+ )
168
+ expected_l0_path = os.path.join(
169
+ expected_output_dir_path, f"solo_L0_rpw-cdag_{date_to_test}_V01.h5"
170
+ )
136
171
  assert Path(produced_l0_path).name == Path(expected_l0_path).name
137
172
 
138
173
  # Compare content of the two L0 files
139
174
  attributes_to_ignore = [
140
- 'File_ID',
141
- 'Generation_date',
142
- 'Pipeline_version',
143
- 'Pipeline_name',
144
- 'Software_version',
145
- 'IDB_version'
146
- ]
147
- result = Hdf5Diff(produced_l0_path, expected_l0_path, '/', '/',
148
- exclude_attr=attributes_to_ignore).diff_files()
175
+ "File_ID",
176
+ "Generation_date",
177
+ "Pipeline_version",
178
+ "Pipeline_name",
179
+ "Software_version",
180
+ "IDB_version",
181
+ ]
182
+ result = Hdf5Diff(
183
+ produced_l0_path,
184
+ expected_l0_path,
185
+ "/",
186
+ "/",
187
+ exclude_attr=attributes_to_ignore,
188
+ ).diff_files()
149
189
  if result:
150
- logger.error(f'Differences between expected output and the command output: {pformat(result)}')
190
+ logger.error(
191
+ f"Differences between expected output and the command output: {pformat(result)}"
192
+ )
151
193
 
152
194
  assert result == []
@@ -5,22 +5,16 @@
5
5
  Test dds_to_tc command of the roc.film plugin.
6
6
  """
7
7
 
8
- import filecmp
9
- import os
10
8
  import tempfile
11
- from pprint import pformat
12
9
 
13
10
  import pytest
14
11
  import shutil
15
- import unittest.mock as mock
16
12
 
17
- from poppy.core.logger import logger
18
13
  from poppy.core.test import CommandTestCase
19
14
  from roc.film.tests.test_film import FilmTest
20
15
 
21
16
 
22
17
  class TestDdsToTc(CommandTestCase):
23
-
24
18
  film = FilmTest()
25
19
 
26
20
  def setup_method(self, method):
@@ -44,12 +38,15 @@ class TestDdsToTc(CommandTestCase):
44
38
  # clear the downloaded files
45
39
  shutil.rmtree(self.tmp_dir_path)
46
40
 
47
- @pytest.mark.skip(reason='Not working')
48
- @pytest.mark.parametrize('idb_source,idb_version', [
49
- ('MIB', '20200131'),
50
- ])
41
+ @pytest.mark.skip(reason="Not working")
42
+ @pytest.mark.parametrize(
43
+ "idb_source,idb_version",
44
+ [
45
+ ("MIB", "20200131"),
46
+ ],
47
+ )
51
48
  def test_classify_tcreport(self, idb_source, idb_version):
52
- from poppy.core.conf import Settings
49
+ pass
53
50
 
54
51
  # input_dir_path, inputs = self.get_inputs('classify_tcreport')
55
52
  # expected_output_dir_path, expected_outputs = self.get_expected_outputs('classify_tcreport')
@@ -5,16 +5,11 @@
5
5
  Test dds_to_tm command of the roc.film plugin.
6
6
  """
7
7
 
8
- import filecmp
9
- import os
10
8
  import tempfile
11
- from pprint import pformat
12
9
 
13
10
  import pytest
14
11
  import shutil
15
- import unittest.mock as mock
16
12
 
17
- from poppy.core.logger import logger
18
13
  from poppy.core.test import CommandTestCase
19
14
  from roc.film.tests.test_film import FilmTest
20
15
 
@@ -43,12 +38,15 @@ class TestDdsToTm(CommandTestCase):
43
38
  # clear the downloaded files
44
39
  shutil.rmtree(self.tmp_dir_path)
45
40
 
46
- @pytest.mark.skip(reason='Not working')
47
- @pytest.mark.parametrize('idb_source,idb_version', [
48
- ('MIB', '20200131'),
49
- ])
41
+ @pytest.mark.skip(reason="Not working")
42
+ @pytest.mark.parametrize(
43
+ "idb_source,idb_version",
44
+ [
45
+ ("MIB", "20200131"),
46
+ ],
47
+ )
50
48
  def test_classify_tmraw(self, idb_source, idb_version):
51
- from poppy.core.conf import Settings
49
+ pass
52
50
 
53
51
  # input_dir_path, inputs = self.get_inputs('classify_tmraw')
54
52
  # expected_output_dir_path, expected_outputs = self.get_expected_outputs('classify_tmraw')