geospacelab 0.10.2__py3-none-any.whl → 0.10.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (23) hide show
  1. geospacelab/__init__.py +1 -1
  2. geospacelab/datahub/sources/madrigal/isr/millstonehill/basic/__init__.py +2 -2
  3. geospacelab/datahub/sources/madrigal/isr/millstonehill/basic/loader.py +203 -8
  4. geospacelab/datahub/sources/madrigal/isr/millstonehill/basic/variable_config.py +1 -1
  5. geospacelab/datahub/sources/madrigal/isr/pfisr/fitted/__init__.py +5 -7
  6. geospacelab/datahub/sources/madrigal/isr/pfisr/fitted/loader.py +6 -6
  7. geospacelab/datahub/sources/madrigal/isr/pfisr/vi/__init__.py +4 -6
  8. geospacelab/datahub/sources/madrigal/isr/risr_n/__init__.py +0 -0
  9. geospacelab/datahub/sources/madrigal/isr/risr_n/downloader.py +155 -0
  10. geospacelab/datahub/sources/madrigal/isr/risr_n/fitted/__init__.py +422 -0
  11. geospacelab/datahub/sources/madrigal/isr/risr_n/fitted/downloader.py +73 -0
  12. geospacelab/datahub/sources/madrigal/isr/risr_n/fitted/loader.py +225 -0
  13. geospacelab/datahub/sources/madrigal/isr/risr_n/fitted/variable_config.py +278 -0
  14. geospacelab/datahub/sources/madrigal/isr/risr_n/vi/__init__.py +343 -0
  15. geospacelab/datahub/sources/madrigal/isr/risr_n/vi/downloader.py +62 -0
  16. geospacelab/datahub/sources/madrigal/isr/risr_n/vi/loader.py +108 -0
  17. geospacelab/datahub/sources/madrigal/isr/risr_n/vi/variable_config.py +671 -0
  18. geospacelab/visualization/mpl/panels.py +6 -3
  19. {geospacelab-0.10.2.dist-info → geospacelab-0.10.3.dist-info}/METADATA +1 -2
  20. {geospacelab-0.10.2.dist-info → geospacelab-0.10.3.dist-info}/RECORD +23 -13
  21. {geospacelab-0.10.2.dist-info → geospacelab-0.10.3.dist-info}/WHEEL +0 -0
  22. {geospacelab-0.10.2.dist-info → geospacelab-0.10.3.dist-info}/licenses/LICENSE +0 -0
  23. {geospacelab-0.10.2.dist-info → geospacelab-0.10.3.dist-info}/top_level.txt +0 -0
geospacelab/__init__.py CHANGED
@@ -6,7 +6,7 @@ __author__ = "Lei Cai"
6
6
  __copyright__ = "Copyright 2021, GeospaceLAB"
7
7
  __credits__ = ["Lei Cai"]
8
8
  __license__ = "BSD-3-Clause License"
9
- __version__ = "0.10.2"
9
+ __version__ = "0.10.3"
10
10
  __maintainer__ = "Lei Cai"
11
11
  __email__ = "lei.cai@oulu.fi"
12
12
  __status__ = "Developing"
@@ -43,7 +43,7 @@ default_dataset_attrs = {
43
43
  }
44
44
 
45
45
  default_variable_names = [
46
- 'DATETIME', 'AZ', 'AZ1', 'AZ2', 'EL', 'EL1', 'EL2', 'PULSE_LENGTH',
46
+ 'DATETIME', 'DATETIME_1', 'DATETIME_2', 'AZ', 'AZ1', 'AZ2', 'EL', 'EL1', 'EL2', 'PULSE_LENGTH',
47
47
  'T_SYS', 'POWER_NORM', 'P_Tx', 'MODE_TYPE', 'POWER_LENGTH_F',
48
48
  'LAG_SPACING', 'IPP', 'f_Tx', 'v_PHASE_Tx', 'v_PHASE_Tx_err',
49
49
  'SCAN_TYPE', 'CYCN', 'POSN', 'RANGE_RES', 'RANGE',
@@ -52,7 +52,7 @@ default_variable_names = [
52
52
  'n_e_err', 'T_i', 'T_i_err', 'T_r', 'T_r_err', 'T_e', 'T_e_err',
53
53
  'nu_i', 'nu_i_err', 'v_i_los', 'v_i_los_err', 'comp_H_p',
54
54
  'comp_H_p_err', 'comp_mix', 'comp_mix_err', 'v_DOP_los', 'v_DOP_los_err',
55
- 'HEIGHT'
55
+ 'HEIGHT',
56
56
  ]
57
57
 
58
58
  # default_data_search_recursive = True
@@ -29,6 +29,9 @@ antenna_code_dict = {
29
29
  'misa': 31,
30
30
  }
31
31
 
32
+ antenna_code_dict_r = {v: k for k, v in antenna_code_dict.items()}
33
+ pulse_code_dict_r = {v: k for k, v in pulse_code_dict.items()}
34
+
32
35
  var_name_dict = {
33
36
  'AZ1': 'az1',
34
37
  'AZ2': 'az2',
@@ -98,7 +101,200 @@ class Loader:
98
101
 
99
102
  self.done = False
100
103
  if load_data:
101
- self.load()
104
+ self.load_from_table_layout()
105
+
106
+
107
+ def load_from_table_layout(self):
108
+ variables = {}
109
+
110
+ with h5py.File(self.file_path, 'r') as fh5:
111
+ table = fh5['Data']['Table Layout'][::]
112
+ table_dtype = table.dtype
113
+
114
+ # Get variable names
115
+ table_var_names = list(table_dtype.fields.keys())
116
+ # Get table data
117
+ table_data = list(zip(*table))
118
+ table_vars = {k: np.array(table_data[table_var_names.index(k)]) for k in table_var_names}
119
+ # Check modes
120
+ antenna_codes = table_vars['kinst'] = table_vars['kinst'].astype(np.int32)
121
+ pulse_lengths = table_vars['pl'] = table_vars['pl'] * 1e6
122
+ pulse_codes = table_vars['mdtyp'] = table_vars['mdtyp'].astype(np.int32)
123
+ modes_ = list(zip(antenna_codes, pulse_codes, pulse_lengths))
124
+ modes = np.empty(len(modes_), dtype=object)
125
+ modes[:] = modes_
126
+ modes_unique = self.get_modes(modes)
127
+
128
+ mode = self.validate_mode(modes_unique)
129
+
130
+ inds_mode = np.where((antenna_codes==mode[0]) & (pulse_codes==mode[1]) & (pulse_lengths==mode[2]) )[0]
131
+
132
+ self.metadata['antenna'] = antenna_code_dict_r[mode[0]]
133
+ self.metadata['pulse_code'] = pulse_code_dict_r[mode[1]]
134
+ self.metadata['pulse_length'] = mode[2]
135
+
136
+ # Grid data
137
+ ut_unix_1 = table_vars['ut1_unix'][inds_mode]
138
+ ut_unix_1_unique = np.unique(ut_unix_1)
139
+ num_ut = len(ut_unix_1_unique)
140
+ vars_fh5 = {}
141
+ if len(ut_unix_1) % num_ut == 0:
142
+ num_gate = int(len(ut_unix_1) / num_ut)
143
+
144
+ for k, v in table_vars.items():
145
+ vars_fh5[k] = v[inds_mode].reshape((num_ut, num_gate))
146
+ else:
147
+ gate_inds = []
148
+ gate_nums = []
149
+ for t in ut_unix_1_unique:
150
+ ii = np.where(ut_unix_1==t)[0]
151
+ ran = table_vars['range'][inds_mode][ii]
152
+ diff_ran = np.diff(ran)
153
+ if any(diff_ran < 0): # duplicated ranges
154
+ iii = range(np.where(diff_ran<0)[0][0]+1)
155
+ ii = ii[iii]
156
+ gate_inds.append(ii)
157
+
158
+ gate_nums.append(len(gate_inds[-1]))
159
+ max_gate_num = np.max(gate_nums)
160
+
161
+ for k, v in table_vars.items():
162
+ vars_fh5[k] = np.empty((num_ut, max_gate_num))
163
+ vars_fh5[k][::] = np.nan
164
+ for i, inds in enumerate(gate_inds):
165
+ vars_fh5[k][i, 0:len(inds)] = v[inds_mode][inds]
166
+
167
+ # Assign data
168
+ records = fh5['Metadata']['_record_layout'][0]
169
+ rec_var_names = np.array(list(records.dtype.fields.keys()))
170
+ rec_vars = {str(rec_var_names[i]): int(records[i]) for i in range(len(rec_var_names))}
171
+ for var_name, var_name_fh5 in var_name_dict.items():
172
+ if var_name_fh5 not in vars_fh5.keys():
173
+ mylog.StreamLogger.warning(f"The requested variable {var_name_fh5} does not exist in the data file!")
174
+ variables[var_name] = None
175
+ continue
176
+ if rec_vars[var_name_fh5] == 1:
177
+ variables[var_name] = vars_fh5[var_name_fh5][:, 0][:, np.newaxis]
178
+ else:
179
+ variables[var_name] = vars_fh5[var_name_fh5]
180
+
181
+ variables['comp_O_p'] = 1. - variables['comp_mix'] - variables['comp_H_p']
182
+ variables['comp_O_p_err'] = np.sqrt(variables['comp_mix_err']**2 + variables['comp_H_p_err']**2)
183
+
184
+ # need to be check when AZ close to 0.
185
+ variables['AZ1'] = variables['AZ1'] % 360.
186
+ variables['AZ2'] = variables['AZ2'] % 360.
187
+ variables['AZ'] = (variables['AZ1'] + variables['AZ2']) / 2
188
+ diff_az = np.abs(variables['AZ1'] - variables['AZ2'])
189
+ variables['AZ'] = np.where(diff_az<180, variables['AZ'], ((variables['AZ1'] + variables['AZ2'] + 360) / 2) % 360)
190
+ variables['EL'] = (variables['EL1'] + variables['EL2']) / 2
191
+
192
+ variables['DATETIME_1'] = dttool.convert_unix_time_to_datetime_cftime(vars_fh5['ut1_unix'][:, 0])[:, np.newaxis]
193
+ variables['DATETIME_2'] = dttool.convert_unix_time_to_datetime_cftime(vars_fh5['ut2_unix'][:, 0])[:, np.newaxis]
194
+ variables['DATETIME'] = variables['DATETIME_1'] + (variables['DATETIME_2'] - variables['DATETIME_1']) / 2
195
+ variables['T_e'] = variables['T_i'] * variables['T_r']
196
+ variables['T_e_err'] = variables['T_e'] * np.sqrt((variables['T_i_err'] / variables['T_i']) ** 2
197
+ + (variables['T_r_err'] / variables['T_r']) ** 2)
198
+ self.variables = variables
199
+
200
+ return
201
+
202
+ def validate_mode(self, modes_unique):
203
+ mode = None
204
+
205
+ # Check antenna
206
+ modes_antenna_matched = []
207
+ try:
208
+ for m in modes_unique:
209
+ if m[0] == antenna_code_dict[self.antenna]:
210
+ modes_antenna_matched.append(m)
211
+ if len(modes_antenna_matched) == 0:
212
+ raise KeyError
213
+ elif len(modes_antenna_matched) == 1:
214
+ if str(self.pulse_code):
215
+ if pulse_code_dict[self.pulse_code] != modes_antenna_matched[0][1]:
216
+ raise ValueError
217
+ if self.pulse_length > 0:
218
+ if self.pulse_length != modes_antenna_matched[0][2]:
219
+ raise ValueError
220
+ return modes_antenna_matched[0]
221
+ else:
222
+ pass
223
+ except Exception as e:
224
+ if str(self.antenna):
225
+ mylog.StreamLogger.error("Antenna {} is not found!".format(self.antenna.upper()))
226
+ else:
227
+ mylog.StreamLogger.error("Antenna must be specified!")
228
+ self.list_modes()
229
+ return None
230
+
231
+ modes_pulse_code_matched = []
232
+ try:
233
+ for m in modes_antenna_matched:
234
+ if m[1] == pulse_code_dict[self.pulse_code]:
235
+ modes_pulse_code_matched.append(m)
236
+ if len(modes_pulse_code_matched) == 0:
237
+ raise KeyError
238
+ elif len(modes_pulse_code_matched) == 1:
239
+ if self.pulse_length > 0:
240
+ if self.pulse_length != modes_antenna_matched[0][2]:
241
+ raise ValueError
242
+ return modes_pulse_code_matched[0]
243
+ else:
244
+ pass
245
+ except Exception as e:
246
+ if str(self.pulse_code):
247
+ mylog.StreamLogger.error("Pulse code {} is not found!".format(self.pulse_code.upper()))
248
+ else:
249
+ mylog.StreamLogger.error("Pulse code must be specified!")
250
+ self.list_modes()
251
+ return None
252
+
253
+ modes_pulse_length_matched = []
254
+ try:
255
+ for m in modes_pulse_code_matched:
256
+ if m[2] == self.pulse_length:
257
+ modes_pulse_length_matched.append(m)
258
+ if len(modes_pulse_length_matched) == 0:
259
+ raise ValueError
260
+ elif len(modes_pulse_length_matched) == 1:
261
+ return modes_pulse_length_matched[0]
262
+ else:
263
+ mylog.StreamLogger.error("Multiple modes found!")
264
+ raise ValueError
265
+ except Exception as e:
266
+ if self.pulse_length > 0:
267
+ mylog.StreamLogger.error("Pulse length {} is not found!".format(self.pulse_code.upper()))
268
+ else:
269
+ mylog.StreamLogger.error("Pulse length must be specified!")
270
+ self.list_modes()
271
+ return None
272
+ return mode
273
+
274
+ def list_modes(self):
275
+ mylog.simpleinfo.info("List of the experiment modes:")
276
+
277
+ for i, m in enumerate(self.metadata['MODES']):
278
+ s = '{}, '.format(i)
279
+ for k, v in m.items():
280
+ s = s + "{}: {}, ".format(k, v)
281
+ mylog.simpleinfo.info(s)
282
+ return
283
+ def get_modes(self, modes):
284
+
285
+ modes_unique = np.unique(modes)
286
+
287
+ self.metadata['MODES'] = []
288
+ num_modes = self.metadata['NUM_MODES'] = len(modes_unique)
289
+ for i in range(num_modes):
290
+ self.metadata['MODES'].append({
291
+ 'ANTENNA_ID': modes_unique[i][0],
292
+ 'ANTENNA': antenna_code_dict_r[modes_unique[i][0]],
293
+ 'PULSE_CODE_ID': modes_unique[i][1],
294
+ 'PULSE_CODE': pulse_code_dict_r[modes_unique[i][1]],
295
+ 'PULSE_LENGTH': modes_unique[i][2]
296
+ })
297
+ return modes_unique
102
298
 
103
299
  def load(self):
104
300
  variables = {}
@@ -182,8 +378,12 @@ class Loader:
182
378
  variables['comp_O_p_err'] = np.sqrt(variables['comp_mix_err']**2 + variables['comp_H_p_err']**2)
183
379
 
184
380
  # need to be check when AZ close to 0.
185
- variables['AZ'] = variables['AZ1'] % 360.
186
- variables['EL'] = variables['EL1']
381
+ variables['AZ1'] = variables['AZ1'] % 360.
382
+ variables['AZ2'] = variables['AZ2'] % 360.
383
+ variables['AZ'] = (variables['AZ1'] + variables['AZ2']) / 2
384
+ diff_az = np.abs(variables['AZ1'] - variables['AZ2'])
385
+ variables['AZ'] = np.where(diff_az<180, variables['AZ'], ((variables['AZ1'] + variables['AZ2'] + 360) / 2) % 360)
386
+ variables['EL'] = (variables['EL1'] + variables['EL2']) / 2
187
387
 
188
388
  variables['RANGE'] = np.tile(vars_fh5['range'], [variables['n_e'].shape[0], 1])
189
389
  variables['DATETIME'] = dttool.convert_unix_time_to_datetime_cftime(vars_fh5['timestamps'])
@@ -194,8 +394,3 @@ class Loader:
194
394
  self.variables = variables
195
395
  self.metadata = metadata
196
396
 
197
-
198
- if __name__ == "__main__":
199
- import pathlib
200
- fp = pathlib.Path("/home/lei/afys-data/Madrigal/Millstone_ISR/2016/20160314/Millstone_ISR_combined_20160314.005.hdf5")
201
- Loader(file_path=fp)
@@ -87,7 +87,7 @@ axis[1].data = "@d.HEIGHT.value"
87
87
  axis[2].data = "@v.value"
88
88
  axis[1].label = 'h'
89
89
  axis[1].unit = 'km'
90
- axis[2].lim = [8e9, 9e11]
90
+ axis[2].lim = [8e9, 3e12]
91
91
  axis[2].scale = 'log'
92
92
  axis[2].label = '@v.label'
93
93
  axis[2].unit = '@v.unit_label'
@@ -232,9 +232,8 @@ class Dataset(datahub.DatasetSourced):
232
232
  search_pattern = "*EID-*/"
233
233
  exp_dirs = list(initial_file_dir.glob(search_pattern))
234
234
 
235
- if not list(exp_dirs):
236
- done = False
237
- continue
235
+ if not list(exp_dirs) and self.allow_download:
236
+ self.download_data()
238
237
 
239
238
  def dir_parser(dirs):
240
239
  dirs_out = []
@@ -252,9 +251,8 @@ class Dataset(datahub.DatasetSourced):
252
251
  return dirs_out
253
252
  file_dirs = dir_parser(exp_dirs)
254
253
 
255
- if not list(file_dirs):
256
- done=False
257
- continue
254
+ if not list(file_dirs) and self.allow_download:
255
+ self.download_data()
258
256
 
259
257
  for fd in file_dirs:
260
258
  if isinstance(self.exp_name_pattern, list):
@@ -281,7 +279,7 @@ class Dataset(datahub.DatasetSourced):
281
279
  initial_file_dir=fd, search_pattern=search_pattern,
282
280
  allow_multiple_files=True, recursive=recursive)
283
281
 
284
- # Validate file paths
282
+ # Validate file paths
285
283
 
286
284
  if not done and self.allow_download:
287
285
  done = self.download_data()
@@ -141,6 +141,12 @@ class Loader:
141
141
 
142
142
  return metadata
143
143
 
144
+ def load_from_table_layout(self):
145
+ variables = {}
146
+ metadata = {}
147
+
148
+
149
+
144
150
  def load(self):
145
151
  variables = {}
146
152
  metadata = {}
@@ -217,9 +223,3 @@ class Loader:
217
223
  self.beam_el = np.nanmedian(variables['EL'])
218
224
 
219
225
  self.metadata = metadata
220
-
221
-
222
- if __name__ == "__main__":
223
- import pathlib
224
- fp = pathlib.Path("/home/lei/Downloads/pfa140102.002.hdf5")
225
- Loader(file_path=fp, beam_az=290.5, beam_el=70.)
@@ -159,9 +159,8 @@ class Dataset(datahub.DatasetSourced):
159
159
  search_pattern = "*EID-*/"
160
160
  exp_dirs = list(initial_file_dir.glob(search_pattern))
161
161
 
162
- if not list(exp_dirs):
163
- done = False
164
- continue
162
+ if not list(exp_dirs) and self.allow_download:
163
+ self.download_data()
165
164
 
166
165
  def dir_parser(dirs):
167
166
  dirs_out = []
@@ -180,9 +179,8 @@ class Dataset(datahub.DatasetSourced):
180
179
 
181
180
  file_dirs = dir_parser(exp_dirs)
182
181
 
183
- if not list(file_dirs):
184
- done = False
185
- continue
182
+ if not list(file_dirs) and self.allow_download:
183
+ self.download_data()
186
184
 
187
185
  for fd in file_dirs:
188
186
  if isinstance(self.exp_name_pattern, list):
@@ -0,0 +1,155 @@
1
+ # Licensed under the BSD 3-Clause License
2
+ # Copyright (C) 2021 GeospaceLab (geospacelab)
3
+ # Author: Lei Cai, Space Physics and Astronomy, University of Oulu
4
+
5
+ __author__ = "Lei Cai"
6
+ __copyright__ = "Copyright 2021, GeospaceLab"
7
+ __license__ = "BSD-3-Clause License"
8
+ __email__ = "lei.cai@oulu.fi"
9
+ __docformat__ = "reStructureText"
10
+
11
+ from cProfile import label
12
+ import datetime
13
+ from dateutil.relativedelta import relativedelta
14
+ from dateutil.parser import parse as dtparse
15
+ import numpy as np
16
+ import re
17
+ import requests
18
+ import bs4
19
+ import os
20
+ import pathlib
21
+
22
+ from geospacelab.config import prf
23
+ import geospacelab.datahub.sources.madrigal as madrigal
24
+ import geospacelab.toolbox.utilities.pylogging as mylog
25
+ import geospacelab.toolbox.utilities.pydatetime as dttool
26
+ from geospacelab.datahub.sources.madrigal.downloader import Downloader as DownloaderBase
27
+
28
+
29
+ def test():
30
+ dt_fr = datetime.datetime(2016, 3, 14)
31
+ dt_to = datetime.datetime(2016, 3, 14, 23)
32
+ eid = 100213152
33
+ download_obj = Downloader(
34
+ dt_fr, dt_to, include_exp_ids=[100213152, 100213190],
35
+ data_product='vi',
36
+ include_file_name_patterns= [['ran']],
37
+ include_file_type_patterns=[['velocity']],
38
+ exclude_file_type_patterns=['.*uncorrected.*', '.*power.*'],
39
+ force_download=True,
40
+ dry_run=False)
41
+
42
+
43
+ class Downloader(DownloaderBase):
44
+
45
+ def __init__(
46
+ self, dt_fr: datetime.datetime, dt_to: datetime,
47
+ data_product=None,
48
+ include_exp_name_patterns: list=None,
49
+ exclude_exp_name_patterns: list=None,
50
+ include_exp_ids: list = None,
51
+ exclude_exp_ids: list=[],
52
+ include_file_name_patterns: list = None,
53
+ exclude_file_name_patterns: list = None,
54
+ include_file_type_patterns=None,
55
+ exclude_file_type_patterns=None,
56
+ data_file_root_dir: str = None,
57
+ direct_download = True,
58
+ force_download = False,
59
+ dry_run: bool=False,
60
+ madrigal_url: str = "https://data.amisr.com/madrigal",
61
+ user_fullname: str=madrigal.default_user_fullname,
62
+ user_email: str=madrigal.default_user_email,
63
+ user_affiliation: str=madrigal.default_user_affiliation):
64
+
65
+ icodes = [91,]
66
+ self.data_product = data_product if isinstance(data_product, str) else ''
67
+
68
+ dt_fr = dttool.get_start_of_the_day(dt_fr)
69
+ dt_to = dttool.get_end_of_the_day(dt_to)
70
+
71
+ super().__init__(
72
+ dt_fr=dt_fr, dt_to=dt_to, icodes=icodes,
73
+ include_exp_name_patterns=include_exp_name_patterns,
74
+ exclude_exp_name_patterns=exclude_exp_name_patterns,
75
+ include_exp_ids=include_exp_ids,
76
+ exclude_exp_ids=exclude_exp_ids,
77
+ include_file_name_patterns=include_file_name_patterns,
78
+ exclude_file_name_patterns=exclude_file_name_patterns,
79
+ include_file_type_patterns=include_file_type_patterns,
80
+ exclude_file_type_patterns=exclude_file_type_patterns,
81
+ data_file_root_dir=data_file_root_dir,
82
+ force_download=force_download, direct_download=direct_download, dry_run=dry_run,
83
+ madrigal_url=madrigal_url,
84
+ user_fullname=user_fullname, user_email=user_email, user_affiliation=user_affiliation)
85
+
86
+ def download(self, **kwargs):
87
+
88
+ exps, database = self.get_exp_list(
89
+ dt_fr=self.dt_fr,
90
+ dt_to=self.dt_to,
91
+ include_exp_name_patterns=self.include_exp_name_patterns,
92
+ exclude_exp_name_patterns=self.exclude_exp_name_patterns,
93
+ include_exp_ids=self.include_exp_ids,
94
+ exclude_exp_ids=self.exclude_exp_ids,
95
+ icodes=self.icodes,
96
+ madrigal_url=self.madrigal_url,
97
+ display=True)
98
+ self.exp_list = list(exps)
99
+ self.database = database
100
+
101
+ exps, exps_error = self.get_online_file_list(
102
+ exp_list=self.exp_list, database=database,
103
+ include_file_name_patterns=self.include_file_name_patterns,
104
+ exclude_file_name_patterns=self.exclude_file_name_patterns,
105
+ include_file_type_patterns=self.include_file_type_patterns,
106
+ exclude_file_type_patterns=self.exclude_file_type_patterns,
107
+ display=True
108
+ )
109
+ self.exp_list_error = list(exps_error)
110
+
111
+ file_paths = []
112
+ for exp in exps:
113
+ dt_fr_exp = datetime.datetime(
114
+ exp.startyear, exp.startmonth, exp.startday, exp.starthour, exp.startmin, exp.startsec
115
+ )
116
+ dt_to_exp = datetime.datetime(
117
+ exp.endyear, exp.endmonth, exp.endday, exp.endhour, exp.endmin, exp.endsec
118
+ )
119
+ for file in list(exp.files):
120
+
121
+ file_path_remote = pathlib.Path(file.name)
122
+ file_name_remote = file_path_remote.name
123
+
124
+ res = re.search(r'/([\d]{2}[a-z]{3}[\d]{2})', file.doi)
125
+ dtstr = res.groups()[0]
126
+ dtstr = dtstr[0:2] + dtstr[2].upper() + dtstr[3:]
127
+ thisday = datetime.datetime.strptime(dtstr, "%d%b%y")
128
+
129
+ exp_name_patterns = re.findall('[\w.]+', exp.name)
130
+ if len(exp_name_patterns) > 5:
131
+ exp_name_patterns = exp_name_patterns[:5]
132
+ exp_name_patterns[0] = exp_name_patterns[0].replace('.risrn', '')
133
+ exp_name = '_'.join(exp_name_patterns).lower()
134
+ file_dir_local = self.data_file_root_dir / thisday.strftime("%Y") / \
135
+ ('RISR-N_EID-' + str(exp.id) + '_'
136
+ + dt_fr_exp.strftime("%Y%m%d%H%M%S") + '_'
137
+ + dt_to_exp.strftime("%Y%m%d%H%M%S") + '_' + exp_name)
138
+ file_dir_local.mkdir(parents=True, exist_ok=True)
139
+ file_name_local = 'RISR-N_' + thisday.strftime('%Y%m%d') + '_' + \
140
+ self.data_product.replace(' ', '_') + \
141
+ '.' + '.'.join(file_name_remote.split('.')[1:])
142
+ file_path_local = file_dir_local / file_name_local
143
+ if file_path_local.is_file():
144
+ mylog.simpleinfo.info("The file has been downloaded: {}.".format(file_path_local))
145
+ self.done = True
146
+ continue
147
+ super().download(
148
+ file_path_remote=file.name, file_path_local=file_path_local,
149
+ file_format='hdf5')
150
+ file_paths.append(file_path_local)
151
+ return file_paths
152
+
153
+
154
+ if __name__ == "__main__":
155
+ test()