geospacelab 0.10.2__py3-none-any.whl → 0.10.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. geospacelab/__init__.py +1 -1
  2. geospacelab/config/__init__.py +3 -2
  3. geospacelab/config/__mpl__.py +98 -0
  4. geospacelab/config/_preferences.py +3 -0
  5. geospacelab/datahub/sources/cdaweb/dmsp/ssusi/edr_aur/__init__.py +9 -2
  6. geospacelab/datahub/sources/cdaweb/dmsp/ssusi/edr_aur/downloader.py +2 -2
  7. geospacelab/datahub/sources/madrigal/downloader.py +15 -5
  8. geospacelab/datahub/sources/madrigal/gnss/tecmap/variable_config.py +3 -2
  9. geospacelab/datahub/sources/madrigal/isr/eiscat/__init__.py +6 -5
  10. geospacelab/datahub/sources/madrigal/isr/millstonehill/basic/__init__.py +2 -2
  11. geospacelab/datahub/sources/madrigal/isr/millstonehill/basic/loader.py +203 -8
  12. geospacelab/datahub/sources/madrigal/isr/millstonehill/basic/variable_config.py +1 -1
  13. geospacelab/datahub/sources/madrigal/isr/pfisr/fitted/__init__.py +49 -13
  14. geospacelab/datahub/sources/madrigal/isr/pfisr/fitted/loader.py +28 -9
  15. geospacelab/datahub/sources/madrigal/isr/pfisr/vi/__init__.py +4 -6
  16. geospacelab/datahub/sources/madrigal/isr/risr_n/__init__.py +0 -0
  17. geospacelab/datahub/sources/madrigal/isr/risr_n/downloader.py +155 -0
  18. geospacelab/datahub/sources/madrigal/isr/risr_n/fitted/__init__.py +422 -0
  19. geospacelab/datahub/sources/madrigal/isr/risr_n/fitted/downloader.py +73 -0
  20. geospacelab/datahub/sources/madrigal/isr/risr_n/fitted/loader.py +225 -0
  21. geospacelab/datahub/sources/madrigal/isr/risr_n/fitted/variable_config.py +278 -0
  22. geospacelab/datahub/sources/madrigal/isr/risr_n/vi/__init__.py +343 -0
  23. geospacelab/datahub/sources/madrigal/isr/risr_n/vi/downloader.py +62 -0
  24. geospacelab/datahub/sources/madrigal/isr/risr_n/vi/loader.py +108 -0
  25. geospacelab/datahub/sources/madrigal/isr/risr_n/vi/variable_config.py +671 -0
  26. geospacelab/observatory/orbit/utilities.py +14 -3
  27. geospacelab/visualization/mpl/__base__.py +1 -97
  28. geospacelab/visualization/mpl/colormaps.py +10 -0
  29. geospacelab/visualization/mpl/geomap/geopanels.py +11 -1
  30. geospacelab/visualization/mpl/panels.py +6 -3
  31. {geospacelab-0.10.2.dist-info → geospacelab-0.10.4.dist-info}/METADATA +1 -2
  32. {geospacelab-0.10.2.dist-info → geospacelab-0.10.4.dist-info}/RECORD +35 -24
  33. {geospacelab-0.10.2.dist-info → geospacelab-0.10.4.dist-info}/WHEEL +0 -0
  34. {geospacelab-0.10.2.dist-info → geospacelab-0.10.4.dist-info}/licenses/LICENSE +0 -0
  35. {geospacelab-0.10.2.dist-info → geospacelab-0.10.4.dist-info}/top_level.txt +0 -0
geospacelab/__init__.py CHANGED
@@ -6,7 +6,7 @@ __author__ = "Lei Cai"
6
6
  __copyright__ = "Copyright 2021, GeospaceLAB"
7
7
  __credits__ = ["Lei Cai"]
8
8
  __license__ = "BSD-3-Clause License"
9
- __version__ = "0.10.2"
9
+ __version__ = "0.10.4"
10
10
  __maintainer__ = "Lei Cai"
11
11
  __email__ = "lei.cai@oulu.fi"
12
12
  __status__ = "Developing"
@@ -9,9 +9,8 @@ __email__ = "lei.cai@oulu.fi"
9
9
  __docformat__ = "reStructureText"
10
10
 
11
11
 
12
- from geospacelab.config._preferences import Preferences
12
+ from geospacelab.config._preferences import prf, pref
13
13
 
14
- prf = pref = Preferences()
15
14
 
16
15
  try:
17
16
  opt = pref.user_config['visualization']
@@ -20,3 +19,5 @@ except KeyError:
20
19
  uc['visualization'] = dict()
21
20
  uc['visualization']['mpl'] = dict()
22
21
  pref.set_user_config(user_config=uc, set_as_default=True)
22
+
23
+ from geospacelab.config.__mpl__ import mpl, plt
@@ -0,0 +1,98 @@
1
+ import matplotlib as mpl
2
+ import matplotlib.pyplot as plt
3
+
4
+ from geospacelab.config._preferences import pref
5
+
6
+
7
+ from cycler import cycler
8
+
9
+ try:
10
+ mpl_style = pref.user_config['visualization']['mpl']['style']
11
+ except KeyError:
12
+ uc = pref.user_config
13
+ uc['visualization']['mpl']['style'] = 'light'
14
+ pref.set_user_config(user_config=uc, set_as_default=True)
15
+
16
+
17
+ # plt.rcParams['font.serif'] = 'Ubuntu'
18
+ # plt.rcParams['font.monospace'] = 'Ubuntu Mono'
19
+ plt.rcParams['font.size'] = 10
20
+ plt.rcParams['axes.labelsize'] = 10
21
+ plt.rcParams['axes.labelweight'] = 'book'
22
+ plt.rcParams['axes.titlesize'] = 10
23
+ plt.rcParams['xtick.labelsize'] = 10
24
+ plt.rcParams['ytick.labelsize'] = 10
25
+ plt.rcParams['legend.fontsize'] = 10
26
+ plt.rcParams['figure.titlesize'] = 12
27
+
28
+ # plt.style.use('https://github.com/dhaitz/matplotlib-stylesheets/raw/master/pacoty.mplstyle')
29
+ mpl_style = pref.user_config['visualization']['mpl']['style']
30
+
31
+ if mpl_style == 'light':
32
+ plt.rcParams['axes.facecolor'] = '#FCFCFC'
33
+ plt.rcParams['text.color'] = 'k'
34
+ default_cycler = (cycler(color=['tab:blue', 'tab:red', 'tab:green', 'tab:purple', 'tab:orange', 'tab:brown', 'tab:pink', 'tab:gray', 'tab:olive', 'tab:cyan']))
35
+ default_cycler = (cycler(color=['#1f77b4DD', '#ff7f0eDD', '#2ca02cDD', '#d62728DD', '#9467bdDD', '#8c564bDD', '#e377c2DD', '#7f7f7fDD', '#bcbd22DD', '#17becfDD']))
36
+ # colors = [
37
+ # (0.8980392156862745, 0.5254901960784314, 0.023529411764705882),
38
+ # (0.36470588235294116, 0.4117647058823529, 0.6941176470588235),
39
+ # (0.3215686274509804, 0.7372549019607844, 0.6392156862745098),
40
+ # (0.6, 0.788235294117647, 0.27058823529411763),
41
+ # (0.8, 0.3803921568627451, 0.6901960784313725),
42
+ # (0.1411764705882353, 0.4745098039215686, 0.4235294117647059),
43
+ # (0.8549019607843137, 0.6470588235294118, 0.10588235294117647),
44
+ # (0.1843137254901961, 0.5411764705882353, 0.7686274509803922),
45
+ # (0.4627450980392157, 0.3058823529411765, 0.6235294117647059),
46
+ # (0.9294117647058824, 0.39215686274509803, 0.35294117647058826),
47
+ # ]
48
+ colors = [
49
+ (0.36470588235294116, 0.4117647058823529, 0.6941176470588235),
50
+ (0.9294117647058824, 0.39215686274509803, 0.35294117647058826),
51
+ (0.3215686274509804, 0.7372549019607844, 0.6392156862745098),
52
+ (0.8980392156862745, 0.5254901960784314, 0.023529411764705882),
53
+ (0.6, 0.788235294117647, 0.27058823529411763),
54
+ (0.8, 0.3803921568627451, 0.6901960784313725),
55
+ (0.1411764705882353, 0.4745098039215686, 0.4235294117647059),
56
+ (0.8549019607843137, 0.6470588235294118, 0.10588235294117647),
57
+ (0.1843137254901961, 0.5411764705882353, 0.7686274509803922),
58
+ (0.4627450980392157, 0.3058823529411765, 0.6235294117647059),
59
+
60
+ ]
61
+ default_cycler = (cycler(color=colors))
62
+ plt.rc('axes', prop_cycle=default_cycler)
63
+ elif mpl_style == 'dark':
64
+ plt.rcParams['figure.facecolor'] = '#0C1C23'
65
+ plt.rcParams['savefig.facecolor'] = '#0C1C23'
66
+
67
+ plt.rcParams['axes.facecolor'] = '#FFFFFF20'
68
+ plt.rcParams['axes.edgecolor'] = '#FFFFFF3D'
69
+ plt.rcParams['axes.labelcolor'] = '#FFFFFFD9'
70
+
71
+ plt.rcParams['xtick.color'] = '#FFFFFFD9'
72
+ plt.rcParams['ytick.color'] = '#FFFFFFD9'
73
+ plt.rcParams['text.color'] = 'white'
74
+
75
+ plt.rcParams['grid.color'] = '#FFFFFF'
76
+ plt.rcParams['legend.facecolor'] = plt.rcParams['axes.facecolor']
77
+ plt.rcParams['legend.edgecolor'] = '#FFFFFFD9'
78
+
79
+ # seaborn dark:['#001c7f', '#b1400d', '#12711c', '#8c0800', '#591e71', '#592f0d', '#a23582', '#3c3c3c', '#b8850a', '#006374']
80
+ # seaborn pastel '#a1c9f4', '#ffb482', '#8de5a1', '#ff9f9b', '#d0bbff', '#debb9b', '#fab0e4', '#cfcfcf', '#fffea3', '#b9f2f0'
81
+ default_cycler = (cycler(color=['#F5EE33', '#33FF99', 'r', '#9467bd', '#08C7FE', '#FE66BB', ]))
82
+ colors = [
83
+ (0.1843137254901961, 0.5411764705882353, 0.7686274509803922),
84
+ (0.9294117647058824, 0.39215686274509803, 0.35294117647058826),
85
+ (0.3215686274509804, 0.7372549019607844, 0.6392156862745098),
86
+ (0.8980392156862745, 0.5254901960784314, 0.023529411764705882),
87
+ (0.6, 0.788235294117647, 0.27058823529411763),
88
+ (0.8, 0.3803921568627451, 0.6901960784313725),
89
+ (0.1411764705882353, 0.4745098039215686, 0.4235294117647059),
90
+ (0.8549019607843137, 0.6470588235294118, 0.10588235294117647),
91
+ (0.36470588235294116, 0.4117647058823529, 0.6941176470588235),
92
+ (0.4627450980392157, 0.3058823529411765, 0.6235294117647059),
93
+ ]
94
+ default_cycler = (cycler(color=colors))
95
+ # default_cycler = (cycler(color=palettable.cartocolors.qualitative.Safe_10.mpl_colors))
96
+ plt.rc('axes', prop_cycle=default_cycler)
97
+ else:
98
+ plt.style.use(mpl_style)
@@ -126,3 +126,6 @@ data_root_dir = \"\"
126
126
  user_config_dict = toml.load(config_file_path)
127
127
  self.user_config = user_config_dict
128
128
  return user_config_dict
129
+
130
+
131
+ prf = pref = Preferences()
@@ -34,6 +34,7 @@ default_dataset_attrs = {
34
34
 
35
35
  default_variable_names = [
36
36
  'DATETIME', 'STARTING_TIME', 'STOPPING_TIME',
37
+ 'ORBIT_ID',
37
38
  'GRID_MLAT', 'GRID_MLON', 'GRID_MLT', 'GRID_UT',
38
39
  'GRID_AUR_1216', 'GRID_AUR_1304', 'GRID_AUR_1356', 'GRID_AUR_LBHS', 'GRID_AUR_LBHL',
39
40
  ]
@@ -93,6 +94,8 @@ class Dataset(datahub.DatasetSourced):
93
94
  default_variable_names,
94
95
  configured_variables=var_config.configured_variables
95
96
  )
97
+ if self.orbit_id is None:
98
+ self.orbit_id = []
96
99
  for file_path in self.data_file_paths:
97
100
  try:
98
101
  load_obj = self.loader(file_path, file_type=self.product.lower(), pole=self.pole)
@@ -105,14 +108,17 @@ class Dataset(datahub.DatasetSourced):
105
108
  continue
106
109
  if var_name in ['DATETIME', 'STARTING_TIME', 'STOPPING_TIME']:
107
110
  value = np.array([load_obj.variables[var_name]])[np.newaxis, :]
111
+ elif var_name == 'ORBIT_ID':
112
+ value = np.array([load_obj.metadata['ORBIT_ID']], dtype=str)
108
113
  else:
109
114
  value = np.empty((1, ), dtype=object)
110
115
  value[0] = load_obj.variables[var_name]
111
116
  # value = load_obj.variables[var_name][np.newaxis, ::]
112
117
  self._variables[var_name].join(value)
113
118
 
114
- self.orbit_id = load_obj.metadata['ORBIT_ID']
115
- # self.select_beams(field_aligned=True)
119
+ self.orbit_id = self['ORBIT_ID'].value
120
+ if len(self.orbit_id):
121
+ self.orbit_id = self.orbit_id[0]
116
122
  if self.time_clip:
117
123
  self.time_filter_by_range()
118
124
 
@@ -178,6 +184,7 @@ class Dataset(datahub.DatasetSourced):
178
184
  download_obj = self.downloader(
179
185
  dt_fr, dt_to,
180
186
  orbit_id=self.orbit_id, sat_id=self.sat_id,
187
+ data_file_root_dir=self.data_root_dir
181
188
  )
182
189
  return download_obj.done
183
190
 
@@ -22,7 +22,7 @@ class Downloader(DownloaderBase):
22
22
  ):
23
23
  product = 'EDR_AUR'
24
24
  if data_file_root_dir is None:
25
- data_file_root_dir = prf.datahub_data_root_dir / 'CDAWeb' / 'DMSP' / 'SSUSI' /product / sat_id.upper()
25
+ data_file_root_dir = prf.datahub_data_root_dir / 'CDAWeb' / 'DMSP' / 'SSUSI' / product
26
26
  self.sat_id = sat_id
27
27
  self.orbit_id = orbit_id
28
28
  self.source_subdirs = ['dmsp', 'dmsp'+self.sat_id.lower(), 'ssusi', 'data', 'edr-aurora']
@@ -78,7 +78,7 @@ class Downloader(DownloaderBase):
78
78
  year = int(sy)
79
79
  this_day = dttool.convert_doy_to_datetime(year, int(sdoy))
80
80
  if file_dir is None:
81
- file_dir = self.data_file_root_dir / sy / this_day.strftime("%Y%m%d")
81
+ file_dir = self.data_file_root_dir / self.sat_id.upper() / sy / this_day.strftime("%Y%m%d")
82
82
  super().save_file_from_http(url, file_dir=file_dir)
83
83
 
84
84
 
@@ -331,7 +331,7 @@ class Downloader(object):
331
331
  exclude_exp_name_patterns=None,
332
332
  include_exp_ids=None,
333
333
  exclude_exp_ids=None,
334
- icodes=None, madrigal_url=None, display=True,
334
+ icodes=None, madrigal_url=None, display=True, level=0,
335
335
  ):
336
336
 
337
337
  def try_to_get_database(max=3, interval=30):
@@ -383,8 +383,9 @@ class Downloader(object):
383
383
 
384
384
  exps_new = []
385
385
  another_madrigal_url = ''
386
+
386
387
  for exp in exps:
387
- if exp.id == -1:
388
+ if exp.id == -1 and level==0:
388
389
  if another_madrigal_url != exp.madrigalUrl:
389
390
  mylog.StreamLogger.warning(
390
391
  f'Another Madrigal site detected: {exp.madrigalUrl}!'
@@ -400,12 +401,21 @@ class Downloader(object):
400
401
 
401
402
  exps = np.array(exps_new, dtype=object)
402
403
 
403
- if not list(exps):
404
+ if not list(exps) and not str(another_madrigal_url):
404
405
  raise ValueError('Cannot find available experiments from the current database! Check the input values!')
405
- elif str(another_madrigal_url):
406
+ elif str(another_madrigal_url) and level==0:
406
407
  mylog.StreamLogger.warning(
407
- 'Some data are located in another Madrigal site and will not be processed!'
408
+ 'Some data are located in another Madrigal site: {}.'.format(another_madrigal_url)
408
409
  )
410
+
411
+ exps, database = Downloader.get_exp_list(
412
+ dt_fr=dt_fr, dt_to=dt_to,
413
+ include_exp_name_patterns=include_exp_name_patterns,
414
+ exclude_exp_name_patterns=exclude_exp_name_patterns,
415
+ include_exp_ids=include_exp_ids,
416
+ exclude_exp_ids=exclude_exp_ids,
417
+ icodes=icodes, madrigal_url=another_madrigal_url, display=display, level=level+1
418
+ )
409
419
  else:
410
420
  pass
411
421
 
@@ -45,7 +45,7 @@ var = Var(name=var_name, ndim=3, variable_type='scalar', visual=visual)
45
45
  var.fullname = 'GNSS TEC MAP'
46
46
  var.label = r'TEC'
47
47
  var.group = r'TEC'
48
- var.unit = 'TECu'
48
+ var.unit = 'TECU'
49
49
  var.depends = {0: depend_0, 1: depend_1}
50
50
  # set plot attrs
51
51
  plot_config = var.visual.plot_config
@@ -53,6 +53,7 @@ plot_config.config(**default_plot_config)
53
53
  plot_config.style = '2P'
54
54
  # set axis attrs
55
55
  axis = var.visual.axis
56
-
56
+ axis[2].label = '@v.label'
57
+ axis[2].unit = '@v.unit'
57
58
 
58
59
  configured_variables[var_name] = var
@@ -134,16 +134,17 @@ class Dataset(datahub.DatasetSourced):
134
134
  self.affiliation = load_obj.metadata['affiliation']
135
135
  self.metadata = load_obj.metadata
136
136
 
137
- inds_cmb = np.argsort(self['DATETIME'].flatten())
138
- if any(np.diff(np.array(inds_cmb))<0):
139
- for var_name in self.keys():
140
- self[var_name].value = self[var_name].value[inds_cmb, :]
141
-
142
137
  if self.add_AACGM or self.add_APEX:
143
138
  self.calc_lat_lon()
144
139
  # self.select_beams(field_aligned=True)
145
140
  if self.time_clip:
146
141
  self.time_filter_by_range()
142
+
143
+ inds_cmb = np.argsort(self['DATETIME'].flatten())
144
+ if any(np.diff(np.array(inds_cmb)) < 0):
145
+ for var_name in self.keys():
146
+ self[var_name].value = self[var_name].value[inds_cmb, :]
147
+
147
148
  if self.status_control:
148
149
  self.status_mask()
149
150
  if self.residual_control:
@@ -43,7 +43,7 @@ default_dataset_attrs = {
43
43
  }
44
44
 
45
45
  default_variable_names = [
46
- 'DATETIME', 'AZ', 'AZ1', 'AZ2', 'EL', 'EL1', 'EL2', 'PULSE_LENGTH',
46
+ 'DATETIME', 'DATETIME_1', 'DATETIME_2', 'AZ', 'AZ1', 'AZ2', 'EL', 'EL1', 'EL2', 'PULSE_LENGTH',
47
47
  'T_SYS', 'POWER_NORM', 'P_Tx', 'MODE_TYPE', 'POWER_LENGTH_F',
48
48
  'LAG_SPACING', 'IPP', 'f_Tx', 'v_PHASE_Tx', 'v_PHASE_Tx_err',
49
49
  'SCAN_TYPE', 'CYCN', 'POSN', 'RANGE_RES', 'RANGE',
@@ -52,7 +52,7 @@ default_variable_names = [
52
52
  'n_e_err', 'T_i', 'T_i_err', 'T_r', 'T_r_err', 'T_e', 'T_e_err',
53
53
  'nu_i', 'nu_i_err', 'v_i_los', 'v_i_los_err', 'comp_H_p',
54
54
  'comp_H_p_err', 'comp_mix', 'comp_mix_err', 'v_DOP_los', 'v_DOP_los_err',
55
- 'HEIGHT'
55
+ 'HEIGHT',
56
56
  ]
57
57
 
58
58
  # default_data_search_recursive = True
@@ -29,6 +29,9 @@ antenna_code_dict = {
29
29
  'misa': 31,
30
30
  }
31
31
 
32
+ antenna_code_dict_r = {v: k for k, v in antenna_code_dict.items()}
33
+ pulse_code_dict_r = {v: k for k, v in pulse_code_dict.items()}
34
+
32
35
  var_name_dict = {
33
36
  'AZ1': 'az1',
34
37
  'AZ2': 'az2',
@@ -98,7 +101,200 @@ class Loader:
98
101
 
99
102
  self.done = False
100
103
  if load_data:
101
- self.load()
104
+ self.load_from_table_layout()
105
+
106
+
107
+ def load_from_table_layout(self):
108
+ variables = {}
109
+
110
+ with h5py.File(self.file_path, 'r') as fh5:
111
+ table = fh5['Data']['Table Layout'][::]
112
+ table_dtype = table.dtype
113
+
114
+ # Get variable names
115
+ table_var_names = list(table_dtype.fields.keys())
116
+ # Get table data
117
+ table_data = list(zip(*table))
118
+ table_vars = {k: np.array(table_data[table_var_names.index(k)]) for k in table_var_names}
119
+ # Check modes
120
+ antenna_codes = table_vars['kinst'] = table_vars['kinst'].astype(np.int32)
121
+ pulse_lengths = table_vars['pl'] = table_vars['pl'] * 1e6
122
+ pulse_codes = table_vars['mdtyp'] = table_vars['mdtyp'].astype(np.int32)
123
+ modes_ = list(zip(antenna_codes, pulse_codes, pulse_lengths))
124
+ modes = np.empty(len(modes_), dtype=object)
125
+ modes[:] = modes_
126
+ modes_unique = self.get_modes(modes)
127
+
128
+ mode = self.validate_mode(modes_unique)
129
+
130
+ inds_mode = np.where((antenna_codes==mode[0]) & (pulse_codes==mode[1]) & (pulse_lengths==mode[2]) )[0]
131
+
132
+ self.metadata['antenna'] = antenna_code_dict_r[mode[0]]
133
+ self.metadata['pulse_code'] = pulse_code_dict_r[mode[1]]
134
+ self.metadata['pulse_length'] = mode[2]
135
+
136
+ # Grid data
137
+ ut_unix_1 = table_vars['ut1_unix'][inds_mode]
138
+ ut_unix_1_unique = np.unique(ut_unix_1)
139
+ num_ut = len(ut_unix_1_unique)
140
+ vars_fh5 = {}
141
+ if len(ut_unix_1) % num_ut == 0:
142
+ num_gate = int(len(ut_unix_1) / num_ut)
143
+
144
+ for k, v in table_vars.items():
145
+ vars_fh5[k] = v[inds_mode].reshape((num_ut, num_gate))
146
+ else:
147
+ gate_inds = []
148
+ gate_nums = []
149
+ for t in ut_unix_1_unique:
150
+ ii = np.where(ut_unix_1==t)[0]
151
+ ran = table_vars['range'][inds_mode][ii]
152
+ diff_ran = np.diff(ran)
153
+ if any(diff_ran < 0): # duplicated ranges
154
+ iii = range(np.where(diff_ran<0)[0][0]+1)
155
+ ii = ii[iii]
156
+ gate_inds.append(ii)
157
+
158
+ gate_nums.append(len(gate_inds[-1]))
159
+ max_gate_num = np.max(gate_nums)
160
+
161
+ for k, v in table_vars.items():
162
+ vars_fh5[k] = np.empty((num_ut, max_gate_num))
163
+ vars_fh5[k][::] = np.nan
164
+ for i, inds in enumerate(gate_inds):
165
+ vars_fh5[k][i, 0:len(inds)] = v[inds_mode][inds]
166
+
167
+ # Assign data
168
+ records = fh5['Metadata']['_record_layout'][0]
169
+ rec_var_names = np.array(list(records.dtype.fields.keys()))
170
+ rec_vars = {str(rec_var_names[i]): int(records[i]) for i in range(len(rec_var_names))}
171
+ for var_name, var_name_fh5 in var_name_dict.items():
172
+ if var_name_fh5 not in vars_fh5.keys():
173
+ mylog.StreamLogger.warning(f"The requested variable {var_name_fh5} does not exist in the data file!")
174
+ variables[var_name] = None
175
+ continue
176
+ if rec_vars[var_name_fh5] == 1:
177
+ variables[var_name] = vars_fh5[var_name_fh5][:, 0][:, np.newaxis]
178
+ else:
179
+ variables[var_name] = vars_fh5[var_name_fh5]
180
+
181
+ variables['comp_O_p'] = 1. - variables['comp_mix'] - variables['comp_H_p']
182
+ variables['comp_O_p_err'] = np.sqrt(variables['comp_mix_err']**2 + variables['comp_H_p_err']**2)
183
+
184
+ # need to be check when AZ close to 0.
185
+ variables['AZ1'] = variables['AZ1'] % 360.
186
+ variables['AZ2'] = variables['AZ2'] % 360.
187
+ variables['AZ'] = (variables['AZ1'] + variables['AZ2']) / 2
188
+ diff_az = np.abs(variables['AZ1'] - variables['AZ2'])
189
+ variables['AZ'] = np.where(diff_az<180, variables['AZ'], ((variables['AZ1'] + variables['AZ2'] + 360) / 2) % 360)
190
+ variables['EL'] = (variables['EL1'] + variables['EL2']) / 2
191
+
192
+ variables['DATETIME_1'] = dttool.convert_unix_time_to_datetime_cftime(vars_fh5['ut1_unix'][:, 0])[:, np.newaxis]
193
+ variables['DATETIME_2'] = dttool.convert_unix_time_to_datetime_cftime(vars_fh5['ut2_unix'][:, 0])[:, np.newaxis]
194
+ variables['DATETIME'] = variables['DATETIME_1'] + (variables['DATETIME_2'] - variables['DATETIME_1']) / 2
195
+ variables['T_e'] = variables['T_i'] * variables['T_r']
196
+ variables['T_e_err'] = variables['T_e'] * np.sqrt((variables['T_i_err'] / variables['T_i']) ** 2
197
+ + (variables['T_r_err'] / variables['T_r']) ** 2)
198
+ self.variables = variables
199
+
200
+ return
201
+
202
+ def validate_mode(self, modes_unique):
203
+ mode = None
204
+
205
+ # Check antenna
206
+ modes_antenna_matched = []
207
+ try:
208
+ for m in modes_unique:
209
+ if m[0] == antenna_code_dict[self.antenna]:
210
+ modes_antenna_matched.append(m)
211
+ if len(modes_antenna_matched) == 0:
212
+ raise KeyError
213
+ elif len(modes_antenna_matched) == 1:
214
+ if str(self.pulse_code):
215
+ if pulse_code_dict[self.pulse_code] != modes_antenna_matched[0][1]:
216
+ raise ValueError
217
+ if self.pulse_length > 0:
218
+ if self.pulse_length != modes_antenna_matched[0][2]:
219
+ raise ValueError
220
+ return modes_antenna_matched[0]
221
+ else:
222
+ pass
223
+ except Exception as e:
224
+ if str(self.antenna):
225
+ mylog.StreamLogger.error("Antenna {} is not found!".format(self.antenna.upper()))
226
+ else:
227
+ mylog.StreamLogger.error("Antenna must be specified!")
228
+ self.list_modes()
229
+ return None
230
+
231
+ modes_pulse_code_matched = []
232
+ try:
233
+ for m in modes_antenna_matched:
234
+ if m[1] == pulse_code_dict[self.pulse_code]:
235
+ modes_pulse_code_matched.append(m)
236
+ if len(modes_pulse_code_matched) == 0:
237
+ raise KeyError
238
+ elif len(modes_pulse_code_matched) == 1:
239
+ if self.pulse_length > 0:
240
+ if self.pulse_length != modes_antenna_matched[0][2]:
241
+ raise ValueError
242
+ return modes_pulse_code_matched[0]
243
+ else:
244
+ pass
245
+ except Exception as e:
246
+ if str(self.pulse_code):
247
+ mylog.StreamLogger.error("Pulse code {} is not found!".format(self.pulse_code.upper()))
248
+ else:
249
+ mylog.StreamLogger.error("Pulse code must be specified!")
250
+ self.list_modes()
251
+ return None
252
+
253
+ modes_pulse_length_matched = []
254
+ try:
255
+ for m in modes_pulse_code_matched:
256
+ if m[2] == self.pulse_length:
257
+ modes_pulse_length_matched.append(m)
258
+ if len(modes_pulse_length_matched) == 0:
259
+ raise ValueError
260
+ elif len(modes_pulse_length_matched) == 1:
261
+ return modes_pulse_length_matched[0]
262
+ else:
263
+ mylog.StreamLogger.error("Multiple modes found!")
264
+ raise ValueError
265
+ except Exception as e:
266
+ if self.pulse_length > 0:
267
+ mylog.StreamLogger.error("Pulse length {} is not found!".format(self.pulse_code.upper()))
268
+ else:
269
+ mylog.StreamLogger.error("Pulse length must be specified!")
270
+ self.list_modes()
271
+ return None
272
+ return mode
273
+
274
+ def list_modes(self):
275
+ mylog.simpleinfo.info("List of the experiment modes:")
276
+
277
+ for i, m in enumerate(self.metadata['MODES']):
278
+ s = '{}, '.format(i)
279
+ for k, v in m.items():
280
+ s = s + "{}: {}, ".format(k, v)
281
+ mylog.simpleinfo.info(s)
282
+ return
283
+ def get_modes(self, modes):
284
+
285
+ modes_unique = np.unique(modes)
286
+
287
+ self.metadata['MODES'] = []
288
+ num_modes = self.metadata['NUM_MODES'] = len(modes_unique)
289
+ for i in range(num_modes):
290
+ self.metadata['MODES'].append({
291
+ 'ANTENNA_ID': modes_unique[i][0],
292
+ 'ANTENNA': antenna_code_dict_r[modes_unique[i][0]],
293
+ 'PULSE_CODE_ID': modes_unique[i][1],
294
+ 'PULSE_CODE': pulse_code_dict_r[modes_unique[i][1]],
295
+ 'PULSE_LENGTH': modes_unique[i][2]
296
+ })
297
+ return modes_unique
102
298
 
103
299
  def load(self):
104
300
  variables = {}
@@ -182,8 +378,12 @@ class Loader:
182
378
  variables['comp_O_p_err'] = np.sqrt(variables['comp_mix_err']**2 + variables['comp_H_p_err']**2)
183
379
 
184
380
  # need to be check when AZ close to 0.
185
- variables['AZ'] = variables['AZ1'] % 360.
186
- variables['EL'] = variables['EL1']
381
+ variables['AZ1'] = variables['AZ1'] % 360.
382
+ variables['AZ2'] = variables['AZ2'] % 360.
383
+ variables['AZ'] = (variables['AZ1'] + variables['AZ2']) / 2
384
+ diff_az = np.abs(variables['AZ1'] - variables['AZ2'])
385
+ variables['AZ'] = np.where(diff_az<180, variables['AZ'], ((variables['AZ1'] + variables['AZ2'] + 360) / 2) % 360)
386
+ variables['EL'] = (variables['EL1'] + variables['EL2']) / 2
187
387
 
188
388
  variables['RANGE'] = np.tile(vars_fh5['range'], [variables['n_e'].shape[0], 1])
189
389
  variables['DATETIME'] = dttool.convert_unix_time_to_datetime_cftime(vars_fh5['timestamps'])
@@ -194,8 +394,3 @@ class Loader:
194
394
  self.variables = variables
195
395
  self.metadata = metadata
196
396
 
197
-
198
- if __name__ == "__main__":
199
- import pathlib
200
- fp = pathlib.Path("/home/lei/afys-data/Madrigal/Millstone_ISR/2016/20160314/Millstone_ISR_combined_20160314.005.hdf5")
201
- Loader(file_path=fp)
@@ -87,7 +87,7 @@ axis[1].data = "@d.HEIGHT.value"
87
87
  axis[2].data = "@v.value"
88
88
  axis[1].label = 'h'
89
89
  axis[1].unit = 'km'
90
- axis[2].lim = [8e9, 9e11]
90
+ axis[2].lim = [8e9, 3e12]
91
91
  axis[2].scale = 'log'
92
92
  axis[2].label = '@v.label'
93
93
  axis[2].unit = '@v.unit_label'