roc-film 1.13.4__py3-none-any.whl → 1.14.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- roc/__init__.py +2 -1
- roc/film/__init__.py +2 -2
- roc/film/commands.py +372 -323
- roc/film/config/__init__.py +0 -1
- roc/film/constants.py +101 -65
- roc/film/descriptor.json +126 -95
- roc/film/exceptions.py +28 -27
- roc/film/tasks/__init__.py +16 -16
- roc/film/tasks/cat_solo_hk.py +86 -74
- roc/film/tasks/cdf_postpro.py +438 -309
- roc/film/tasks/check_dds.py +39 -45
- roc/film/tasks/db_to_anc_bia_sweep_table.py +381 -0
- roc/film/tasks/dds_to_l0.py +232 -180
- roc/film/tasks/export_solo_coord.py +147 -0
- roc/film/tasks/file_handler.py +91 -75
- roc/film/tasks/l0_to_hk.py +117 -103
- roc/film/tasks/l0_to_l1_bia_current.py +38 -30
- roc/film/tasks/l0_to_l1_bia_sweep.py +417 -329
- roc/film/tasks/l0_to_l1_sbm.py +250 -208
- roc/film/tasks/l0_to_l1_surv.py +185 -130
- roc/film/tasks/make_daily_tm.py +40 -37
- roc/film/tasks/merge_tcreport.py +77 -71
- roc/film/tasks/merge_tmraw.py +102 -89
- roc/film/tasks/parse_dds_xml.py +21 -20
- roc/film/tasks/set_l0_utc.py +51 -49
- roc/film/tests/cdf_compare.py +565 -0
- roc/film/tests/hdf5_compare.py +84 -62
- roc/film/tests/test_dds_to_l0.py +93 -51
- roc/film/tests/test_dds_to_tc.py +8 -11
- roc/film/tests/test_dds_to_tm.py +8 -10
- roc/film/tests/test_film.py +161 -116
- roc/film/tests/test_l0_to_hk.py +64 -36
- roc/film/tests/test_l0_to_l1_bia.py +10 -14
- roc/film/tests/test_l0_to_l1_sbm.py +14 -19
- roc/film/tests/test_l0_to_l1_surv.py +68 -41
- roc/film/tests/test_metadata.py +21 -20
- roc/film/tests/tests.py +743 -396
- roc/film/tools/__init__.py +5 -5
- roc/film/tools/dataset_tasks.py +34 -2
- roc/film/tools/file_helpers.py +390 -269
- roc/film/tools/l0.py +402 -324
- roc/film/tools/metadata.py +147 -127
- roc/film/tools/skeleton.py +12 -17
- roc/film/tools/tools.py +109 -92
- roc/film/tools/xlsx2skt.py +161 -139
- {roc_film-1.13.4.dist-info → roc_film-1.14.0.dist-info}/LICENSE +127 -125
- roc_film-1.14.0.dist-info/METADATA +60 -0
- roc_film-1.14.0.dist-info/RECORD +50 -0
- {roc_film-1.13.4.dist-info → roc_film-1.14.0.dist-info}/WHEEL +1 -1
- roc/film/tasks/l0_to_anc_bia_sweep_table.py +0 -348
- roc_film-1.13.4.dist-info/METADATA +0 -120
- roc_film-1.13.4.dist-info/RECORD +0 -48
@@ -1,348 +0,0 @@
|
|
1
|
-
#!/usr/bin/env python
|
2
|
-
# -*- coding: utf-8 -*-
|
3
|
-
|
4
|
-
"""Contains task to create the RPW ANC Bias sweep table CDF files."""
|
5
|
-
|
6
|
-
import csv
|
7
|
-
import os
|
8
|
-
import uuid
|
9
|
-
from datetime import datetime
|
10
|
-
|
11
|
-
import numpy as np
|
12
|
-
from poppy.core.logger import logger
|
13
|
-
from poppy.core import TargetFileNotSaved
|
14
|
-
from poppy.core.generic.cache import CachedProperty
|
15
|
-
from poppy.core.target import FileTarget
|
16
|
-
from poppy.core.task import Task
|
17
|
-
|
18
|
-
from roc.film import TIME_DAILY_STRFORMAT, TIME_ISO_STRFORMAT
|
19
|
-
from roc.film.constants import BIA_SWEEP_TABLE_NR
|
20
|
-
from roc.film.tools import get_datasets, unique_dict_list, sort_dict_list
|
21
|
-
from roc.film.tools.file_helpers import get_l0_files, get_output_dir, is_output_dir, get_l0_trange, generate_filepath
|
22
|
-
from roc.film.tools.l0 import L0
|
23
|
-
from roc.film.tools.metadata import set_logical_file_id
|
24
|
-
|
25
|
-
from roc.rap.tasks.bia.current import raw_to_na
|
26
|
-
|
27
|
-
__all__ = ['L0ToAncBiaSweepTable']
|
28
|
-
|
29
|
-
class L0ToAncBiaSweepTable(Task):
|
30
|
-
"""
|
31
|
-
Task to generate ANC bias sweep table file from l0 file(s).
|
32
|
-
|
33
|
-
For more information about the Bias sweeping, see section 'BIAS sweeping' of
|
34
|
-
the RPW DAS User Manual (RPW-SYS-MEB-DPS-NTT-000859-LES)
|
35
|
-
|
36
|
-
"""
|
37
|
-
plugin_name = 'roc.film'
|
38
|
-
name = 'l0_to_anc_bia_sweep_table'
|
39
|
-
|
40
|
-
csv_fieldnames = ['TC_EXE_UTC_TIME',
|
41
|
-
'BIA_SWEEP_TABLE_CUR',
|
42
|
-
'EEPROM_LOADING',
|
43
|
-
'TC_NAME',
|
44
|
-
'TC_EXE_STATE',
|
45
|
-
]
|
46
|
-
|
47
|
-
def add_targets(self):
|
48
|
-
|
49
|
-
self.add_input(target_class=FileTarget,
|
50
|
-
identifier='l0_files',
|
51
|
-
many=True,
|
52
|
-
filepath=get_l0_files)
|
53
|
-
|
54
|
-
self.add_output(target_class=FileTarget,
|
55
|
-
identifier='anc_bia_sweep_table')
|
56
|
-
|
57
|
-
def setup_inputs(self):
|
58
|
-
|
59
|
-
# Get products directory (folder where final output files will be
|
60
|
-
# moved)
|
61
|
-
self.products_dir = self.pipeline.get('products_dir',
|
62
|
-
default=[None], args=True)[0]
|
63
|
-
|
64
|
-
# Get output dir
|
65
|
-
self.output_dir = get_output_dir(self.pipeline)
|
66
|
-
if not is_output_dir(self.output_dir,
|
67
|
-
products_dir=self.products_dir):
|
68
|
-
logger.info(f'Making {self.output_dir}')
|
69
|
-
os.makedirs(self.output_dir)
|
70
|
-
else:
|
71
|
-
logger.debug(f'Output files will be '
|
72
|
-
f'saved into folder {self.output_dir}')
|
73
|
-
|
74
|
-
# Get or create failed_files list from pipeline properties
|
75
|
-
self.failed_files = self.pipeline.get(
|
76
|
-
'failed_files', default=[], create=True)
|
77
|
-
|
78
|
-
# Get or create processed_files list from pipeline properties
|
79
|
-
self.processed_files = self.pipeline.get(
|
80
|
-
'processed_files', default=[], create=True)
|
81
|
-
|
82
|
-
# Get or create ignored_target list from pipeline properties
|
83
|
-
self.ignored_target = self.pipeline.get(
|
84
|
-
'ignored_target', default=[], create=True)
|
85
|
-
|
86
|
-
# Get overwrite argument
|
87
|
-
self.overwrite = self.pipeline.get(
|
88
|
-
'overwrite', default=False, args=True)
|
89
|
-
|
90
|
-
# Get list of input l0 file(s)
|
91
|
-
self.l0_file_list = self.inputs['l0_files'].filepath
|
92
|
-
|
93
|
-
# Get force optional keyword
|
94
|
-
self.force = self.pipeline.get('force', default=False, args=True)
|
95
|
-
|
96
|
-
# Get L0 files time_min/time_max
|
97
|
-
l0_time_min, l0_time_max = get_l0_trange(self.l0_file_list)
|
98
|
-
|
99
|
-
# Define output file start time
|
100
|
-
self.start_time = self.pipeline.get(
|
101
|
-
'start_time', default=[min(l0_time_min)])[0]
|
102
|
-
logger.debug(f'start_time value is {self.start_time}')
|
103
|
-
|
104
|
-
# Define output file end time
|
105
|
-
self.end_time = self.pipeline.get(
|
106
|
-
'end_time', default=[max(l0_time_max)])[0]
|
107
|
-
logger.debug(f'end_time value is {self.end_time}')
|
108
|
-
|
109
|
-
# Retrieve output dataset to produce for the task (it should be one)
|
110
|
-
self.dataset = get_datasets(self, self.name)[0]
|
111
|
-
logger.debug(f'Produce file(s) for the following dataset: {self.dataset["name"]}')
|
112
|
-
|
113
|
-
# Get existing data (if any)
|
114
|
-
self.existing_file = self.pipeline.get('sweep_tables',
|
115
|
-
args=True, default=[None])[0]
|
116
|
-
if self.existing_file:
|
117
|
-
self.existing_data = L0ToAncBiaSweepTable.parse_bia_sweep_table_file(
|
118
|
-
self.existing_file)
|
119
|
-
else:
|
120
|
-
self.existing_data = []
|
121
|
-
|
122
|
-
return True
|
123
|
-
|
124
|
-
@CachedProperty
|
125
|
-
def output_filepath(self):
|
126
|
-
|
127
|
-
# Build output filename using metadata
|
128
|
-
filename_items = {}
|
129
|
-
filename_items[
|
130
|
-
'File_naming_convention'] = '<Source_name>_<LEVEL>_<Descriptor>_<Datetime>_V<Data_version>'
|
131
|
-
filename_items['Source_name'] = 'SOLO>Solar Orbiter'
|
132
|
-
filename_items[
|
133
|
-
'Descriptor'] = 'RPW-BIA-SWEEP-TABLE>RPW Bias sweep table report'
|
134
|
-
filename_items['LEVEL'] = 'ANC>Ancillary data'
|
135
|
-
filename_items['Data_version'] = self.dataset['version']
|
136
|
-
|
137
|
-
filename_items['Datetime'] = self.start_time.strftime(
|
138
|
-
TIME_DAILY_STRFORMAT) + '-' + self.end_time.strftime(TIME_DAILY_STRFORMAT)
|
139
|
-
filename_items['Logical_file_id'] = set_logical_file_id(filename_items)
|
140
|
-
return generate_filepath(self, filename_items, 'csv',
|
141
|
-
output_dir=self.output_dir,
|
142
|
-
overwrite=self.overwrite)
|
143
|
-
|
144
|
-
def run(self):
|
145
|
-
|
146
|
-
# Define task job ID (long and short)
|
147
|
-
self.job_uuid = str(uuid.uuid4())
|
148
|
-
self.job_id = f'L0ToAncBiaSweepTable-{self.job_uuid[:8]}'
|
149
|
-
logger.info(f'Task {self.job_id} is starting')
|
150
|
-
try:
|
151
|
-
self.setup_inputs()
|
152
|
-
except:
|
153
|
-
logger.exception(
|
154
|
-
f'Initializing inputs has failed for {self.job_id}!')
|
155
|
-
try:
|
156
|
-
os.makedirs(os.path.join(self.output_dir, 'failed'))
|
157
|
-
except:
|
158
|
-
logger.error('output_dir argument is not defined!')
|
159
|
-
self.pipeline.exit()
|
160
|
-
return
|
161
|
-
|
162
|
-
logger.info(f'Loading data from {len(self.l0_file_list)} L0 files '
|
163
|
-
f'between {self.start_time} and {self.end_time}...')
|
164
|
-
|
165
|
-
# First retrieve sweep table data from TC load/clear in l0 files
|
166
|
-
# List of TC utc times, tc names, tc ack states and parameters
|
167
|
-
tc_load_sweep_list = L0.l0_to_packet_list(self.l0_file_list,
|
168
|
-
include=self.dataset[
|
169
|
-
'descr']['packet'],
|
170
|
-
start_time=self.start_time,
|
171
|
-
end_time=self.end_time,
|
172
|
-
ascending=True,
|
173
|
-
)
|
174
|
-
|
175
|
-
tc_load_sweep_num = len(tc_load_sweep_list)
|
176
|
-
if tc_load_sweep_num == 0:
|
177
|
-
logger.warning('No sweep table TC found in the input L0 files')
|
178
|
-
return
|
179
|
-
else:
|
180
|
-
logger.info(f'{tc_load_sweep_num} sweep table TCs found')
|
181
|
-
|
182
|
-
# csv header fieldnames length
|
183
|
-
field_num = len(self.csv_fieldnames)
|
184
|
-
|
185
|
-
# Initialize the sweep table array with NaN values
|
186
|
-
# (Assume here that the table is empty at the beginning)
|
187
|
-
sweep_table = np.empty(BIA_SWEEP_TABLE_NR, dtype=np.float32)
|
188
|
-
sweep_table[:] = np.nan
|
189
|
-
|
190
|
-
# Loop over tc load/clear sweep list
|
191
|
-
has_new_data = False
|
192
|
-
for tc_load_sweep in tc_load_sweep_list:
|
193
|
-
|
194
|
-
# Get elements from current tc load/clear sweep packet
|
195
|
-
tc_time = tc_load_sweep['utc_time']
|
196
|
-
tc_name = tc_load_sweep['palisade_id']
|
197
|
-
tc_state = tc_load_sweep['tc_exe_state']
|
198
|
-
tc_idb_version = tc_load_sweep['idb_version']
|
199
|
-
tc_idb_source = tc_load_sweep['idb_source']
|
200
|
-
|
201
|
-
eeprom_loading = '0'
|
202
|
-
if tc_state != 'PASSED':
|
203
|
-
# If failed command, the current sweep table stays unchanged
|
204
|
-
logger.info(f'{tc_name} on {tc_time} was failed, skip it')
|
205
|
-
elif tc_name == 'TC_DPU_CLEAR_BIAS_SWEEP':
|
206
|
-
# if valid clear table command is found, then
|
207
|
-
# reset the sweep table with NaN values
|
208
|
-
sweep_table[:] = np.nan
|
209
|
-
elif tc_name == 'TC_DPU_LOAD_BIAS_SWEEP':
|
210
|
-
# If valid load table command is executed,
|
211
|
-
# then update the current sweep table values
|
212
|
-
|
213
|
-
# Get first index of the elements to change in the sweep table
|
214
|
-
first_idx = tc_load_sweep['CP_DPU_BIA_SWEEP_FIRST_IDX']
|
215
|
-
# Get number of elements to change in the sweep table
|
216
|
-
step_nr = tc_load_sweep['CP_DPU_BIA_SWEEP_STEP_NR']
|
217
|
-
# Get current values in physical units (nA)
|
218
|
-
# of elements to change in the sweep table
|
219
|
-
step_cur = raw_to_na(tc_load_sweep['CP_DPU_BIA_SWEEP_STEP_CUR'],
|
220
|
-
idb_source=tc_idb_source,
|
221
|
-
idb_version=tc_idb_version)
|
222
|
-
eeprom_loading = str(tc_load_sweep['CP_DPU_BIA_SWEEP_EEPROM'])
|
223
|
-
|
224
|
-
# Update the sweep table with new current values
|
225
|
-
sweep_table[first_idx:first_idx + step_nr] = step_cur
|
226
|
-
|
227
|
-
# Write data into output csv file
|
228
|
-
row = [''] * field_num
|
229
|
-
row[0] = tc_time
|
230
|
-
row[1] = np.copy(sweep_table)
|
231
|
-
row[2] = eeprom_loading
|
232
|
-
row[3] = tc_name
|
233
|
-
row[4] = tc_state # Only write the TC execution state
|
234
|
-
|
235
|
-
# Add row into existing data list
|
236
|
-
row_dict = dict(zip(self.csv_fieldnames, row))
|
237
|
-
if row_dict not in self.existing_data:
|
238
|
-
self.existing_data.append(row_dict)
|
239
|
-
has_new_data = True
|
240
|
-
else:
|
241
|
-
logger.debug(f'({row_dict}) already found in {self.existing_file}')
|
242
|
-
|
243
|
-
if not has_new_data:
|
244
|
-
logger.info(
|
245
|
-
'No new data loaded: no need to generate a new output file')
|
246
|
-
self.pipeline.exit()
|
247
|
-
return
|
248
|
-
|
249
|
-
# Make sure to have unique values in the list
|
250
|
-
sorted_data = unique_dict_list(self.existing_data)
|
251
|
-
|
252
|
-
# Re-order rows by ascending time values
|
253
|
-
logger.debug('Re-ordering sweep table data by ascending times...')
|
254
|
-
sorted_data = sort_dict_list(sorted_data, 'TC_EXE_UTC_TIME')
|
255
|
-
|
256
|
-
# Write output CSV file
|
257
|
-
output_filepath = self.output_filepath
|
258
|
-
logger.info(f'Writing {output_filepath}...')
|
259
|
-
try:
|
260
|
-
with open(output_filepath, 'w', newline='') as csvfile:
|
261
|
-
writer = csv.DictWriter(csvfile, fieldnames=self.csv_fieldnames,
|
262
|
-
delimiter=',')
|
263
|
-
writer.writeheader()
|
264
|
-
for current_row in sorted_data:
|
265
|
-
current_row['TC_EXE_UTC_TIME'] = current_row['TC_EXE_UTC_TIME'].strftime(
|
266
|
-
TIME_ISO_STRFORMAT) # Write UTC time in ISO format
|
267
|
-
current_row['BIA_SWEEP_TABLE_CUR'] = ';'.join([str(element)
|
268
|
-
for element in current_row[
|
269
|
-
'BIA_SWEEP_TABLE_CUR']]) # Write sweep table values using ';' delimiter
|
270
|
-
writer.writerow(current_row)
|
271
|
-
|
272
|
-
except:
|
273
|
-
if output_filepath not in self.failed_files:
|
274
|
-
self.failed_files.append(output_filepath)
|
275
|
-
raise TargetFileNotSaved(
|
276
|
-
'Anc Bias sweep table csv file production has failed!')
|
277
|
-
|
278
|
-
if not os.path.isfile(output_filepath):
|
279
|
-
if output_filepath not in self.failed_files:
|
280
|
-
self.failed_files.append(output_filepath)
|
281
|
-
raise FileNotFoundError(f'{output_filepath} not found')
|
282
|
-
else:
|
283
|
-
logger.info(f'{output_filepath} saved')
|
284
|
-
if output_filepath not in self.processed_files:
|
285
|
-
self.processed_files.append(output_filepath)
|
286
|
-
|
287
|
-
self.outputs['anc_bia_sweep_table'] = output_filepath
|
288
|
-
|
289
|
-
@staticmethod
|
290
|
-
def parse_bia_sweep_table_file(sweep_table_file):
|
291
|
-
"""
|
292
|
-
Parse an input bia sweep table CSV file
|
293
|
-
|
294
|
-
:param sweep_table_file: File to parse
|
295
|
-
:return: list of sweep tables
|
296
|
-
"""
|
297
|
-
|
298
|
-
# Initialize output list
|
299
|
-
sweep_table_list = []
|
300
|
-
|
301
|
-
if not os.path.isfile(sweep_table_file):
|
302
|
-
logger.error(f'{sweep_table_file} not found!')
|
303
|
-
else:
|
304
|
-
# Read file and store in output list
|
305
|
-
with open(sweep_table_file, 'r', newline='') as csv_file:
|
306
|
-
reader = csv.DictReader(csv_file)
|
307
|
-
|
308
|
-
# Loop over rows
|
309
|
-
for row in reader:
|
310
|
-
row['TC_EXE_UTC_TIME'] = datetime.strptime(
|
311
|
-
row['TC_EXE_UTC_TIME'], TIME_ISO_STRFORMAT)
|
312
|
-
row['BIA_SWEEP_TABLE_CUR'] = row[
|
313
|
-
'BIA_SWEEP_TABLE_CUR'].split(';')
|
314
|
-
sweep_table_list.append(row)
|
315
|
-
|
316
|
-
return sweep_table_list
|
317
|
-
|
318
|
-
@staticmethod
|
319
|
-
def get_latest_sweep_table(current_time, sweep_table_list):
|
320
|
-
"""
|
321
|
-
Get the latest sweep table for a given datetime
|
322
|
-
|
323
|
-
:param current_time: Time for which sweep table must be returned (datetime object)
|
324
|
-
:param sweep_table_list: list of sweep tables
|
325
|
-
:return: row of the sweep table list
|
326
|
-
"""
|
327
|
-
|
328
|
-
# Initialize output
|
329
|
-
output_table = {}
|
330
|
-
|
331
|
-
# Get size of input table list
|
332
|
-
sweep_table_num = len(sweep_table_list)
|
333
|
-
|
334
|
-
# Loop over time of sweep tables
|
335
|
-
i = 0
|
336
|
-
table_time = sweep_table_list[0]['TC_EXE_UTC_TIME']
|
337
|
-
while True:
|
338
|
-
# Only get sweep table for passed TC
|
339
|
-
if (sweep_table_list[i]['TC_EXE_STATE'] == 'PASSED'):
|
340
|
-
output_table = sweep_table_list[i]
|
341
|
-
table_time = sweep_table_list[i]['TC_EXE_UTC_TIME']
|
342
|
-
i += 1
|
343
|
-
if i >= sweep_table_num:
|
344
|
-
break
|
345
|
-
if current_time <= sweep_table_list[i]['TC_EXE_UTC_TIME']:
|
346
|
-
break
|
347
|
-
|
348
|
-
return output_table
|
@@ -1,120 +0,0 @@
|
|
1
|
-
Metadata-Version: 2.1
|
2
|
-
Name: roc-film
|
3
|
-
Version: 1.13.4
|
4
|
-
Summary: RPW FILe Maker (FILM): Plugin to make RPW L0, L1 and HK data files
|
5
|
-
Home-page: https://gitlab.obspm.fr/ROC/Pipelines/Plugins/FILM
|
6
|
-
License: CECILL-C
|
7
|
-
Author: Xavier Bonnin
|
8
|
-
Author-email: xavier.bonnin@obspm.fr
|
9
|
-
Requires-Python: >=3.8,<4
|
10
|
-
Classifier: License :: CeCILL-C Free Software License Agreement (CECILL-C)
|
11
|
-
Classifier: Programming Language :: Python :: 3
|
12
|
-
Classifier: Programming Language :: Python :: 3.8
|
13
|
-
Classifier: Programming Language :: Python :: 3.9
|
14
|
-
Classifier: Programming Language :: Python :: 3.10
|
15
|
-
Classifier: Programming Language :: Python :: 3.11
|
16
|
-
Requires-Dist: edds_process (>=0.8.2)
|
17
|
-
Requires-Dist: h5py (>=3.7,<4.0)
|
18
|
-
Requires-Dist: jinja2 (>=3.0,<4.0)
|
19
|
-
Requires-Dist: maser-tools (>=0.1.3)
|
20
|
-
Requires-Dist: numpy (!=1.19.5)
|
21
|
-
Requires-Dist: pandas (>=1.3,<2.0)
|
22
|
-
Requires-Dist: poppy-core
|
23
|
-
Requires-Dist: poppy-pop
|
24
|
-
Requires-Dist: roc-dingo (>=1.0,<2.0)
|
25
|
-
Requires-Dist: roc-idb (>=1.0,<2.0)
|
26
|
-
Requires-Dist: roc-rap (>=1.0,<2.0)
|
27
|
-
Requires-Dist: roc-rpl (>=1.0,<2.0)
|
28
|
-
Requires-Dist: spacepy (>=0.4,<0.5)
|
29
|
-
Requires-Dist: sqlalchemy (>=1.4,<2.0)
|
30
|
-
Requires-Dist: xmltodict (>=0.13,<0.14)
|
31
|
-
Project-URL: Repository, https://gitlab.obspm.fr/ROC/Pipelines/Plugins/FILM
|
32
|
-
Description-Content-Type: text/markdown
|
33
|
-
|
34
|
-
# FILM PLUGIN README
|
35
|
-
|
36
|
-
[](https://gitlab.obspm.fr/ROC/Pipelines/Plugins/FILM/pipelines)
|
37
|
-
|
38
|
-
This directory contains the source files of the Rpw FILe Maker (FILM), a plugin of the ROC pipelines dedicated to the RPW L0, L1 and HK files production.
|
39
|
-
|
40
|
-
FILM has been developed with the [POPPY framework](https://poppy-framework.readthedocs.io/en/latest/).
|
41
|
-
|
42
|
-
## Quickstart
|
43
|
-
|
44
|
-
### Installation with pip
|
45
|
-
|
46
|
-
To install the plugin using pip:
|
47
|
-
|
48
|
-
```
|
49
|
-
pip install roc-film
|
50
|
-
```
|
51
|
-
|
52
|
-
### Installation from the repository
|
53
|
-
|
54
|
-
First, retrieve the `FILM` repository from the ROC gitlab server:
|
55
|
-
|
56
|
-
```
|
57
|
-
git clone https://gitlab.obspm.fr/ROC/Pipelines/Plugins/FILM.git
|
58
|
-
```
|
59
|
-
|
60
|
-
You will need a personal access token to reach the package registry in the ROC Gitlab server.
|
61
|
-
|
62
|
-
Then, install the package (here using (poetry)[https://python-poetry.org/]):
|
63
|
-
|
64
|
-
```
|
65
|
-
poetry install"
|
66
|
-
```
|
67
|
-
|
68
|
-
NOTES:
|
69
|
-
|
70
|
-
- It is also possible to clone the repository using SSH
|
71
|
-
- To install poetry: `pip install poetry`
|
72
|
-
|
73
|
-
## Usage
|
74
|
-
|
75
|
-
The roc-film plugin is designed to be run in a POPPy-built pipeline.
|
76
|
-
Nevertheless, it is still possible to import some classes and methods in Python files.
|
77
|
-
|
78
|
-
### How to release a new version of the plugin?
|
79
|
-
|
80
|
-
1. Checkout to the git *develop* branch (and make pull to be sure to work from the latest commit in the gitlab server)
|
81
|
-
|
82
|
-
2. First update metadata (version, dependencies, etc.) in the plugin *pyproject.toml* file.
|
83
|
-
|
84
|
-
3. Then make sure the *descriptor.json* and *poetry.lock* files are also up-to-date.
|
85
|
-
|
86
|
-
To update the *descriptor.json* file, run the command:
|
87
|
-
|
88
|
-
python bump_descriptor.py -m <modification_message>
|
89
|
-
|
90
|
-
To update the *poetry.lock* file, enter:
|
91
|
-
|
92
|
-
poetry lock
|
93
|
-
|
94
|
-
N.B. *poetry* Python package must be installed (see https://python-poetry.org/).
|
95
|
-
|
96
|
-
4. Commit the changes in the *develop* branch. Make sure to commit with a comprehensive enough message.
|
97
|
-
5. Checkout to the *master* branch and merge the *develop* branch into *master*
|
98
|
-
6. Create a new git tag `X.Y.Z` for the new version of the plugin (must be the same version than in the *pyproject.toml* file)
|
99
|
-
7. Push both the *master* branch and the tag to the gitlab server
|
100
|
-
8. Do a rebase of *develop* onto the *master* branch
|
101
|
-
9. Push the up-to-date *develop* branch to the gitlab server
|
102
|
-
|
103
|
-
N.B. This procedure only concerns the version release. It is assumed that any other changes in the code have been already validated previously.
|
104
|
-
|
105
|
-
## CONTACT
|
106
|
-
|
107
|
-
* Xavier BONNIN xavier.bonnin@obspm.fr (author, maintainer)
|
108
|
-
* Florence HENRY florence.henry@obspm.fr (maintainer)
|
109
|
-
|
110
|
-
|
111
|
-
License
|
112
|
-
-------
|
113
|
-
|
114
|
-
This project is licensed under CeCILL-C.
|
115
|
-
|
116
|
-
Acknowledgments
|
117
|
-
---------------
|
118
|
-
|
119
|
-
* Solar Orbiter / RPW Operation Centre (ROC) team
|
120
|
-
|
roc_film-1.13.4.dist-info/RECORD
DELETED
@@ -1,48 +0,0 @@
|
|
1
|
-
roc/__init__.py,sha256=jv2YF__bseklT3OWEzlqJ5qE24c4aWd5F4r0TTjOrWQ,65
|
2
|
-
roc/film/__init__.py,sha256=jUW739rZC4yDLTfWhspDjrNBYdPUhBvI8fjS_fLYls8,114
|
3
|
-
roc/film/commands.py,sha256=zG40YQKNrXR64ggOKTXUHyiV2zfaAg0WS01CDZU-IPc,36766
|
4
|
-
roc/film/config/__init__.py,sha256=GGK4JAwslXPWoB6cqM9L9MdH06Oadq09WyKHTY-Poaw,69
|
5
|
-
roc/film/constants.py,sha256=oF-DhusBcMaDjJ5mrp_Jb4AG1JH_ivQng5VlbmuDRHE,3545
|
6
|
-
roc/film/descriptor.json,sha256=TU5jcWvCyrx6GApqnWtitE12UmTe41lhF4doYZ3icDk,29929
|
7
|
-
roc/film/exceptions.py,sha256=HJ0Mwf1aFv6ZqlL46kl_Mcw4SAbh34idJAN5KeXM6Js,5267
|
8
|
-
roc/film/tasks/__init__.py,sha256=2gjcefCsrXrkp_Rb216h8nj0AsFVF_56qg51WR2vp34,737
|
9
|
-
roc/film/tasks/cat_solo_hk.py,sha256=HSxn14LLVVgd54zzCRrGnAAZ4HU-h3CFMnoxrgr6YvA,11328
|
10
|
-
roc/film/tasks/cdf_postpro.py,sha256=aoP6q9acyl8IOtC-j1ngG-DlysuZ4FTn1E6PLxli5-g,29343
|
11
|
-
roc/film/tasks/check_dds.py,sha256=zKvHF2q73BP7xLhhUnTgJpSLi197smrV8L2_h_nlbwo,3953
|
12
|
-
roc/film/tasks/dds_to_l0.py,sha256=hdf5FkbrjDyyd2LKtiapojUZNr83zK6K3AxWiC6rA_I,21880
|
13
|
-
roc/film/tasks/file_handler.py,sha256=-j4k-AScStO0-leNPRQ1mt40g1EU_7nHeKVfD_n9TFI,9018
|
14
|
-
roc/film/tasks/l0_to_anc_bia_sweep_table.py,sha256=FmcOSmz7pJF5Ml7O9TY9F6x-agYs4cWLb0DpMsOx4Tg,14015
|
15
|
-
roc/film/tasks/l0_to_hk.py,sha256=AIpcFhoWWehTwmOzuRO8Q-_ldlXwWFWr1dGOThwLjRU,10853
|
16
|
-
roc/film/tasks/l0_to_l1_bia_current.py,sha256=z1h8Dz9HNzVIkw9r0HX1-VQMSVdcl0n9PKln2fIpaTE,2397
|
17
|
-
roc/film/tasks/l0_to_l1_bia_sweep.py,sha256=tA-WYKQvRbZII8MeIJKuwbrXW-Y_R0AwhWXHE0clA-s,33792
|
18
|
-
roc/film/tasks/l0_to_l1_sbm.py,sha256=riQpWlgV7s4na1JbPRcE3earUyiZkEA6jV0EzWU8RNI,19087
|
19
|
-
roc/film/tasks/l0_to_l1_surv.py,sha256=n0h5Mizo5mzYWW6Q1AdmMrF8TiOq9t2Glin9ai4ANeM,13886
|
20
|
-
roc/film/tasks/make_daily_tm.py,sha256=tLeNnvCVkB7DqU2_Sw6J8qetofVufBPu860Z0MOhOI8,4407
|
21
|
-
roc/film/tasks/merge_tcreport.py,sha256=4O14qqkSAQZRP0jCjgyGzovgWQr87cxoJEVAA189j8E,9770
|
22
|
-
roc/film/tasks/merge_tmraw.py,sha256=45WTIYb4NnCIORP5fQm8lYi9BW87f46MqVxgg5MpYAw,12354
|
23
|
-
roc/film/tasks/parse_dds_xml.py,sha256=KWNC_bxzABdiz9V_TpNXm8_ktCicN8W-Ec2EZW1NN8s,2106
|
24
|
-
roc/film/tasks/set_l0_utc.py,sha256=kcZAM58FseL-IHTEpqckxlrY9FDXsJ3ccYBzCPrwxCQ,6147
|
25
|
-
roc/film/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
26
|
-
roc/film/tests/hdf5_compare.py,sha256=5fT_RQUGxAQt55fn83eUH-PYhQyVZ5gaZtzaBY1RbAA,8806
|
27
|
-
roc/film/tests/test_dds_to_l0.py,sha256=BKxF3P6A39PBfMhlbCJ9jsLqaoHRc_A6Wl_kvkqinYY,5798
|
28
|
-
roc/film/tests/test_dds_to_tc.py,sha256=q2YvyeAogLaJ5LgsC11URlzqY9Bf4UhlXVySugYrTOQ,3820
|
29
|
-
roc/film/tests/test_dds_to_tm.py,sha256=PzQG1eRAR44OFyBeGE5jJeszM7es-A7PNGgyQ1vn_4g,3807
|
30
|
-
roc/film/tests/test_film.py,sha256=EakUk6j1AOxGxVw3pG7wsXt52kPrpRzsK5UpNBAIrHM,14738
|
31
|
-
roc/film/tests/test_l0_to_hk.py,sha256=3Sg2AnGihl4Yr63rFXhQcxKHs1BMHVwk4h7QtcGR6Dg,4797
|
32
|
-
roc/film/tests/test_l0_to_l1_bia.py,sha256=JnS9FdItutl2eCedRYuaZhZIul-9_qcib8a2H7UA2jA,13133
|
33
|
-
roc/film/tests/test_l0_to_l1_sbm.py,sha256=pbmqSPyVJYz2dZ7chhIb6Dmx3u3rwUfKORJJLiAO5Ug,4530
|
34
|
-
roc/film/tests/test_l0_to_l1_surv.py,sha256=v6CDwO45BphlE5EU2_4AYtiiG4H70eJ6Hn2KpODTDqw,4984
|
35
|
-
roc/film/tests/test_metadata.py,sha256=nN5gGPNxpxKb_ikpKNVwSr4J9ndwo8yAhmlDXjzelTo,2557
|
36
|
-
roc/film/tests/tests.py,sha256=AQQ37OMbHCX2NACki57OG2s6PxE-Pa0AZN7zK_dcrx8,40694
|
37
|
-
roc/film/tools/__init__.py,sha256=avGYqYYazACAjEZWgSopEJ85h9x6U1C9CwC5_deoVQM,233
|
38
|
-
roc/film/tools/dataset_tasks.py,sha256=EQjoFtKpA3pUGKN5WljlRcfLkLQDgCkiYLrZhHVwY4Y,1891
|
39
|
-
roc/film/tools/file_helpers.py,sha256=hNO0eBV6XWdiejKCi-fchRmdU9qj79xkkY7dcOCluEk,27741
|
40
|
-
roc/film/tools/l0.py,sha256=TI69yuO9C9BpNAUNH-v9QWQ48zUtVGQ4ZnLjb1t7HqU,45912
|
41
|
-
roc/film/tools/metadata.py,sha256=3NoW2gNID7_K8RSqIf8pMtDMLBUU-88r9miwrxJqN_4,12540
|
42
|
-
roc/film/tools/skeleton.py,sha256=T4JN6W7GXhycHpqLmKE5gGlZm_vNwuTYMCV4HS3ik2k,10921
|
43
|
-
roc/film/tools/tools.py,sha256=FMBmgk_IF4obJPBBXb_eGdWMVajW7hw7x0XHiBhyZJ4,18132
|
44
|
-
roc/film/tools/xlsx2skt.py,sha256=LJcVDtEuGY55RvtKm92AZb8vV2fpugaXU3Ae-LP1uRU,19691
|
45
|
-
roc_film-1.13.4.dist-info/LICENSE,sha256=dukgTUfTdh4BFRSdIM5OWE6H4wfVR6m7A4wbLEilMTk,21863
|
46
|
-
roc_film-1.13.4.dist-info/METADATA,sha256=hk91ZVIjuLBlbeaOr6i_XziotBU_5pEtW6wt8QXnyuQ,3978
|
47
|
-
roc_film-1.13.4.dist-info/WHEEL,sha256=vVCvjcmxuUltf8cYhJ0sJMRDLr1XsPuxEId8YDzbyCY,88
|
48
|
-
roc_film-1.13.4.dist-info/RECORD,,
|