geospacelab 0.11.4__py3-none-any.whl → 0.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. geospacelab/__init__.py +1 -1
  2. geospacelab/datahub/sources/cdaweb/dmsp/ssusi/downloader.py +103 -0
  3. geospacelab/datahub/sources/cdaweb/dmsp/ssusi/edr_aur/__init__.py +17 -7
  4. geospacelab/datahub/sources/cdaweb/dmsp/ssusi/edr_aur/downloader.py +13 -62
  5. geospacelab/datahub/sources/cdaweb/dmsp/ssusi/sdr_disk/__init__.py +317 -0
  6. geospacelab/datahub/sources/cdaweb/dmsp/ssusi/sdr_disk/downloader.py +44 -0
  7. geospacelab/datahub/sources/cdaweb/dmsp/ssusi/sdr_disk/loader.py +198 -0
  8. geospacelab/datahub/sources/cdaweb/dmsp/ssusi/sdr_disk/variable_config.py +149 -0
  9. geospacelab/datahub/sources/cdaweb/downloader.py +396 -97
  10. geospacelab/datahub/sources/cdaweb/downloader_backup.py +93 -0
  11. geospacelab/datahub/sources/cdaweb/omni/__init__.py +26 -14
  12. geospacelab/datahub/sources/cdaweb/omni/downloader.py +97 -84
  13. geospacelab/datahub/sources/esa_eo/swarm/advanced/efi_tct02/__init__.py +1 -1
  14. geospacelab/datahub/sources/esa_eo/swarm/advanced/efi_tct02/downloader.py +1 -1
  15. geospacelab/datahub/sources/gfz/hpo/__init__.py +1 -1
  16. geospacelab/datahub/sources/gfz/hpo/variable_config.py +3 -1
  17. geospacelab/datahub/sources/madrigal/isr/pfisr/fitted/loader.py +1 -1
  18. geospacelab/datahub/sources/madrigal/satellites/dmsp/downloader.py +2 -1
  19. geospacelab/datahub/sources/tud/champ/dns_acc/__init__.py +24 -7
  20. geospacelab/datahub/sources/tud/champ/dns_acc/downloader.py +29 -36
  21. geospacelab/datahub/sources/tud/champ/dns_acc/loader.py +28 -2
  22. geospacelab/datahub/sources/tud/champ/wnd_acc/__init__.py +68 -10
  23. geospacelab/datahub/sources/tud/champ/wnd_acc/downloader.py +29 -36
  24. geospacelab/datahub/sources/tud/champ/wnd_acc/loader.py +36 -7
  25. geospacelab/datahub/sources/tud/champ/wnd_acc/variable_config.py +3 -3
  26. geospacelab/datahub/sources/tud/downloader.py +288 -113
  27. geospacelab/datahub/sources/tud/goce/dns_acc/__init__.py +354 -0
  28. geospacelab/datahub/sources/tud/goce/dns_acc/downloader.py +42 -0
  29. geospacelab/datahub/sources/tud/goce/dns_acc/loader.py +66 -0
  30. geospacelab/datahub/sources/tud/goce/dns_acc/variable_config.py +139 -0
  31. geospacelab/datahub/sources/tud/goce/dns_wnd_acc/__init__.py +3 -3
  32. geospacelab/datahub/sources/tud/goce/dns_wnd_acc_v01/__init__.py +339 -0
  33. geospacelab/datahub/sources/tud/goce/dns_wnd_acc_v01/downloader.py +42 -0
  34. geospacelab/datahub/sources/tud/goce/dns_wnd_acc_v01/loader.py +84 -0
  35. geospacelab/datahub/sources/tud/goce/dns_wnd_acc_v01/variable_config.py +212 -0
  36. geospacelab/datahub/sources/tud/goce/wnd_acc/__init__.py +339 -0
  37. geospacelab/datahub/sources/tud/goce/wnd_acc/downloader.py +42 -0
  38. geospacelab/datahub/sources/tud/goce/wnd_acc/loader.py +65 -0
  39. geospacelab/datahub/sources/tud/goce/wnd_acc/variable_config.py +188 -0
  40. geospacelab/datahub/sources/tud/grace/dns_acc/__init__.py +6 -3
  41. geospacelab/datahub/sources/tud/grace/dns_acc/downloader.py +29 -37
  42. geospacelab/datahub/sources/tud/grace/wnd_acc/__init__.py +21 -4
  43. geospacelab/datahub/sources/tud/grace/wnd_acc/downloader.py +29 -39
  44. geospacelab/datahub/sources/tud/grace/wnd_acc/loader.py +5 -1
  45. geospacelab/datahub/sources/tud/grace/wnd_acc/variable_config.py +74 -0
  46. geospacelab/datahub/sources/tud/grace_fo/dns_acc/__init__.py +6 -3
  47. geospacelab/datahub/sources/tud/grace_fo/dns_acc/downloader.py +35 -40
  48. geospacelab/datahub/sources/tud/grace_fo/wnd_acc/__init__.py +20 -4
  49. geospacelab/datahub/sources/tud/grace_fo/wnd_acc/downloader.py +29 -44
  50. geospacelab/datahub/sources/tud/grace_fo/wnd_acc/loader.py +4 -0
  51. geospacelab/datahub/sources/tud/grace_fo/wnd_acc/variable_config.py +73 -0
  52. geospacelab/datahub/sources/tud/swarm/dns_acc/__init__.py +27 -5
  53. geospacelab/datahub/sources/tud/swarm/dns_acc/downloader.py +29 -38
  54. geospacelab/datahub/sources/tud/swarm/dns_pod/__init__.py +24 -5
  55. geospacelab/datahub/sources/tud/swarm/dns_pod/downloader.py +29 -38
  56. geospacelab/datahub/sources/tud/swarm/dns_pod/loader.py +3 -0
  57. geospacelab/datahub/sources/wdc/asysym/downloader.py +2 -2
  58. geospacelab/visualization/mpl/panels.py +7 -3
  59. {geospacelab-0.11.4.dist-info → geospacelab-0.12.0.dist-info}/METADATA +1 -1
  60. {geospacelab-0.11.4.dist-info → geospacelab-0.12.0.dist-info}/RECORD +63 -45
  61. {geospacelab-0.11.4.dist-info → geospacelab-0.12.0.dist-info}/WHEEL +1 -1
  62. {geospacelab-0.11.4.dist-info → geospacelab-0.12.0.dist-info}/licenses/LICENSE +0 -0
  63. {geospacelab-0.11.4.dist-info → geospacelab-0.12.0.dist-info}/top_level.txt +0 -0
geospacelab/__init__.py CHANGED
@@ -6,7 +6,7 @@ __author__ = "Lei Cai"
6
6
  __copyright__ = "Copyright 2021, GeospaceLAB"
7
7
  __credits__ = ["Lei Cai"]
8
8
  __license__ = "BSD-3-Clause License"
9
- __version__ = "0.11.4"
9
+ __version__ = "0.12.0"
10
10
  __maintainer__ = "Lei Cai"
11
11
  __email__ = "lei.cai@oulu.fi"
12
12
  __status__ = "Developing"
@@ -0,0 +1,103 @@
1
+ import datetime
2
+ import pathlib
3
+ import copy
4
+
5
+ import geospacelab.toolbox.utilities.pydatetime as dttool
6
+ import geospacelab.toolbox.utilities.pylogging as mylog
7
+ from geospacelab.config import prf
8
+ from geospacelab.datahub.sources.cdaweb.downloader import CDAWebHTTPDownloader as DownloaderBase
9
+
10
+
11
+ class Downloader(DownloaderBase):
12
+
13
+ def __init__(
14
+ self,
15
+ dt_fr=None, dt_to=None,
16
+ sat_id=None,
17
+ orbit_id=None,
18
+ product='EDR_AUR',
19
+ direct_download=True,
20
+ force_download=False,
21
+ dry_run=False,
22
+ root_dir_local = None,
23
+ ):
24
+
25
+ self.sat_id = sat_id
26
+ self.orbit_id = orbit_id
27
+ self.product = product
28
+ if root_dir_local is None:
29
+ root_dir_local = prf.datahub_data_root_dir / 'CDAWeb' / 'DMSP' / 'SSUSI' / self.product.upper
30
+ else:
31
+ root_dir_local = pathlib.Path(root_dir_local)
32
+ root_dir_remote = '/'.join([
33
+ self.root_dir_remote, 'dmsp', 'dmsp'+self.sat_id.lower(), 'ssusi', 'data', product_dict[self.product]
34
+ ])
35
+ super().__init__(
36
+ dt_fr, dt_to,
37
+ root_dir_local=root_dir_local,
38
+ root_dir_remote=root_dir_remote,
39
+ direct_download=direct_download,
40
+ force_download=force_download,
41
+ dry_run=dry_run,
42
+ )
43
+
44
+
45
+ def search_from_http(self, *args, **kwargs):
46
+
47
+ dt_fr_1 = self.dt_fr - datetime.timedelta(hours=3)
48
+ dt_to_1 = self.dt_to + datetime.timedelta(hours=3)
49
+ diff_days = dttool.get_diff_days(dt_fr_1, dt_to_1)
50
+ dt0 = dttool.get_start_of_the_day(dt_fr_1)
51
+ file_paths_remote = []
52
+ for nd in range(diff_days + 1):
53
+ this_day = dt0 + datetime.timedelta(days=nd)
54
+ doy = dttool.get_doy(this_day)
55
+ sdoy = '{:03d}'.format(doy)
56
+ subdirs = [str(this_day.year), sdoy]
57
+
58
+ if self.orbit_id is None:
59
+ file_name_patterns = [
60
+ 'dmsp' + self.sat_id.lower(),
61
+ 'ssusi',
62
+ product_dict[self.product],
63
+ this_day.strftime("%Y") + sdoy + 'T',
64
+ '.nc'
65
+ ]
66
+ else:
67
+ file_name_patterns = [
68
+ 'dmsp' + self.sat_id.lower(),
69
+ 'ssusi',
70
+ product_dict[self.product],
71
+ 'REV',
72
+ self.orbit_id,
73
+ '.nc'
74
+ ]
75
+ paths = super().search_from_http(subdirs=subdirs, file_name_patterns=file_name_patterns)
76
+ file_paths_remote.extend(paths)
77
+ return file_paths_remote
78
+
79
+ def save_files_from_http(self, file_paths_local=None, root_dir_remote=None):
80
+ if file_paths_local is None:
81
+ file_paths_local = []
82
+ for fp_remote in self.file_paths_remote:
83
+ sy = fp_remote.split('/')[-3]
84
+ sdoy = fp_remote.split('/')[-2]
85
+ year = int(sy)
86
+ this_day = dttool.convert_doy_to_datetime(year, int(sdoy))
87
+ file_dir_local = self.root_dir_local / self.sat_id.upper() / sy / this_day.strftime("%Y%m%d")
88
+ file_paths_local.append(file_dir_local / fp_remote.split('/')[-1])
89
+ return super().save_files_from_http(file_paths_local, root_dir_remote)
90
+
91
+
92
+ product_dict = {
93
+ 'EDR_AUR': 'edr-aurora',
94
+ 'EDR_DAY_LIMB': 'edr-day-limb',
95
+ 'EDR_IONO': 'edr-iono',
96
+ 'EDR_NIGHT_LIMB': 'edr-night-limb',
97
+ 'L1B': 'l1b',
98
+ 'SDR_DISK': 'sdr-disk',
99
+ 'SDR_LIMB': 'sdr-limb',
100
+ 'SDR2_DISK': 'sdr2-disk',
101
+ 'SPECT_L1B': 'spect-l1b',
102
+ }
103
+
@@ -53,8 +53,10 @@ class Dataset(datahub.DatasetSourced):
53
53
  self.database = kwargs.pop('database', 'CDAWeb')
54
54
  self.facility = kwargs.pop('facility', 'DMSP')
55
55
  self.instrument = kwargs.pop('instrument', 'SSUSI')
56
- self.product = kwargs.pop('product', 'EDR-EUR')
56
+ self.product = kwargs.pop('product', 'EDR_AUR')
57
57
  self.allow_download = kwargs.pop('allow_download', True)
58
+ self.force_download = kwargs.pop('force_download', False)
59
+ self.download_dry_run = kwargs.pop('download_dry_run', False)
58
60
 
59
61
  self.sat_id = kwargs.pop('sat_id', '')
60
62
  self.orbit_id = kwargs.pop('orbit_id', None)
@@ -149,14 +151,20 @@ class Dataset(datahub.DatasetSourced):
149
151
 
150
152
  if self.orbit_id is not None:
151
153
  file_patterns.extend(['REV', self.orbit_id])
152
- multiple_files = False
153
- else:
154
- multiple_files = True
154
+
155
155
  # remove empty str
156
156
  file_patterns = [pattern for pattern in file_patterns if str(pattern)]
157
157
 
158
158
  search_pattern = '*' + '*'.join(file_patterns) + '*'
159
159
 
160
+ if self.orbit_id is not None:
161
+ multiple_files = False
162
+ else:
163
+ fp_log = initial_file_dir / (self.product.upper() + '.full.log')
164
+ if not fp_log.is_file():
165
+ self.download_data(dt_fr=thisday, dt_to=thisday)
166
+ multiple_files = True
167
+
160
168
  done = super().search_data_files(
161
169
  initial_file_dir=initial_file_dir,
162
170
  search_pattern=search_pattern,
@@ -167,7 +175,7 @@ class Dataset(datahub.DatasetSourced):
167
175
 
168
176
  # Validate file paths
169
177
 
170
- if not done and self.allow_download:
178
+ if (not done and self.allow_download) or (self.force_download):
171
179
  done = self.download_data(dt_fr=thisday, dt_to=thisday)
172
180
  if done:
173
181
  done = super().search_data_files(
@@ -186,9 +194,11 @@ class Dataset(datahub.DatasetSourced):
186
194
  download_obj = self.downloader(
187
195
  dt_fr, dt_to,
188
196
  orbit_id=self.orbit_id, sat_id=self.sat_id,
189
- data_file_root_dir=self.data_root_dir
197
+ root_dir_local=self.data_root_dir,
198
+ force_download=self.force_download,
199
+ dry_run=self.download_dry_run,
190
200
  )
191
- return download_obj.done
201
+ return any(download_obj.done)
192
202
 
193
203
  @property
194
204
  def database(self):
@@ -5,7 +5,7 @@ import copy
5
5
  import geospacelab.toolbox.utilities.pydatetime as dttool
6
6
  import geospacelab.toolbox.utilities.pylogging as mylog
7
7
  from geospacelab.config import prf
8
- from geospacelab.datahub.sources.cdaweb.downloader import Downloader as DownloaderBase
8
+ from geospacelab.datahub.sources.cdaweb.dmsp.ssusi.downloader import Downloader as DownloaderBase
9
9
 
10
10
 
11
11
  class Downloader(DownloaderBase):
@@ -17,70 +17,20 @@ class Downloader(DownloaderBase):
17
17
  orbit_id=None,
18
18
  direct_download=True,
19
19
  force_download=False,
20
- data_file_root_dir = None,
21
20
  dry_run=False,
21
+ root_dir_local = None,
22
22
  ):
23
- product = 'EDR_AUR'
24
- if data_file_root_dir is None:
25
- data_file_root_dir = prf.datahub_data_root_dir / 'CDAWeb' / 'DMSP' / 'SSUSI' / product
26
- self.sat_id = sat_id
27
- self.orbit_id = orbit_id
28
- self.source_subdirs = ['dmsp', 'dmsp'+self.sat_id.lower(), 'ssusi', 'data', 'edr-aurora']
29
-
23
+
30
24
  super().__init__(
31
25
  dt_fr, dt_to,
32
- data_file_root_dir=data_file_root_dir,
33
- direct_download=direct_download,force_download=force_download,dry_run=dry_run
34
- )
35
-
36
-
37
- def search_from_http(self, file_name_patterns=None, allow_multiple_files=True):
38
-
39
- dt_fr_1 = self.dt_fr - datetime.timedelta(hours=3)
40
- dt_to_1 = self.dt_to + datetime.timedelta(hours=3)
41
- diff_days = dttool.get_diff_days(dt_fr_1, dt_to_1)
42
- dt0 = dttool.get_start_of_the_day(dt_fr_1)
43
- source_file_paths = []
44
- for nd in range(diff_days + 1):
45
- this_day = dt0 + datetime.timedelta(days=nd)
46
- doy = dttool.get_doy(this_day)
47
- sdoy = '{:03d}'.format(doy)
48
- subdirs = copy.deepcopy(self.source_subdirs)
49
- subdirs.extend(
50
- [str(this_day.year), sdoy]
51
- )
52
-
53
- if self.orbit_id is None:
54
- file_name_patterns = [
55
- 'dmsp' + self.sat_id.lower(),
56
- 'ssusi',
57
- 'edr-aurora',
58
- this_day.strftime("%Y") + sdoy + 'T',
59
- '.nc'
60
- ]
61
- else:
62
- file_name_patterns = [
63
- 'dmsp' + self.sat_id.lower(),
64
- 'ssusi',
65
- 'edr-aurora',
66
- 'REV',
67
- self.orbit_id,
68
- '.nc'
69
- ]
70
- paths = super().search_from_http(subdirs=subdirs, file_name_patterns=file_name_patterns)
71
- source_file_paths.extend(paths)
72
- return source_file_paths
73
-
74
- def save_file_from_http(self, url, file_dir=None, file_name=None):
75
-
76
- sy = url.split('/')[-3]
77
- sdoy = url.split('/')[-2]
78
- year = int(sy)
79
- this_day = dttool.convert_doy_to_datetime(year, int(sdoy))
80
- if file_dir is None:
81
- file_dir = self.data_file_root_dir / self.sat_id.upper() / sy / this_day.strftime("%Y%m%d")
82
- super().save_file_from_http(url, file_dir=file_dir)
83
-
26
+ sat_id=sat_id,
27
+ orbit_id=orbit_id,
28
+ product='EDR_AUR',
29
+ root_dir_local=root_dir_local,
30
+ direct_download=direct_download,
31
+ force_download=force_download,
32
+ dry_run=dry_run,
33
+ )
84
34
 
85
35
 
86
36
  if __name__ == "__main__":
@@ -88,6 +38,7 @@ if __name__ == "__main__":
88
38
  dt_fr = datetime.datetime(2011, 1, 6),
89
39
  dt_to = datetime.datetime(2011, 1, 6, 12),
90
40
  sat_id='F17',
91
- orbit_id='21523',
41
+ orbit_id=None, #'21523',
42
+ force_download=True,
92
43
  dry_run=False,
93
44
  )
@@ -0,0 +1,317 @@
1
+ # Licensed under the BSD 3-Clause License
2
+ # Copyright (C) 2021 GeospaceLab (geospacelab)
3
+ # Author: Lei Cai, Space Physics and Astronomy, University of Oulu
4
+
5
+ import datetime
6
+
7
+ import numpy as np
8
+
9
+ import geospacelab.datahub as datahub
10
+ from geospacelab.datahub import DatabaseModel, FacilityModel, InstrumentModel, ProductModel
11
+ from geospacelab.datahub.sources.jhuapl import jhuapl_database
12
+ from geospacelab.config import prf
13
+ import geospacelab.toolbox.utilities.pydatetime as dttool
14
+ import geospacelab.toolbox.utilities.pybasic as basic
15
+ import geospacelab.toolbox.utilities.pylogging as mylog
16
+ from geospacelab.datahub.sources.cdaweb.dmsp.ssusi.sdr_disk.loader import Loader as default_Loader
17
+ from geospacelab.datahub.sources.cdaweb.dmsp.ssusi.sdr_disk.downloader import Downloader as default_Downloader
18
+ import geospacelab.datahub.sources.cdaweb.dmsp.ssusi.sdr_disk.variable_config as var_config
19
+
20
+
21
+ default_dataset_attrs = {
22
+ 'database': jhuapl_database,
23
+ 'facility': 'DMSP',
24
+ 'instrument': 'SSUSI',
25
+ 'product': 'SDR_DISK',
26
+ 'data_file_ext': 'nc',
27
+ 'data_root_dir': prf.datahub_data_root_dir / 'CDAWeb' / 'DMSP' / 'SSUSI' / 'SDR_DISK',
28
+ 'allow_load': True,
29
+ 'allow_download': True,
30
+ 'data_search_recursive': False,
31
+ 'label_fields': ['database', 'facility', 'instrument', 'product'],
32
+ 'time_clip': False,
33
+ }
34
+
35
+ default_variable_names = [
36
+ 'STARTING_TIME', 'STOPPING_TIME', 'DATETIME',
37
+ 'FILE_VERSION', 'DATA_PRODUCT_VERSION', 'SOFTWARE_VERSION_NUMBER', 'CALIBRATION_PERIOD_VERSION', 'EMISSION_SPECTRA',
38
+ 'SC_DATETIME', 'SC_ORBIT_ID', 'SC_GEO_LAT', 'SC_GEO_LON', 'SC_GEO_ALT',
39
+ 'DISK_GEO_LAT', 'DISK_GEO_LON', 'DISK_GEO_ALT', 'DISK_SZA', 'DISK_SAA',
40
+ 'ACROSS_PIXEL_SIZE', 'ALONG_PIXEL_SIZE', 'EFFECTIVE_LOOK_ANGLE', 'EXPOSURE', 'SAA_COUNT', 'DARK_COUNT_CORRECTION',
41
+ 'SCATTER_LIGHT_1216_CORRECTION', 'SCATTER_LIGHT_1304_CORRECTION', 'OVERLAP_1304_1356_CORRECTION', 'LONG_WAVE_SCATTER_CORRECTION',
42
+ 'RED_LEAK_CORRECTION', 'DQI',
43
+ 'DISK_COUNTS_1216', 'DISK_COUNTS_1304', 'DISK_COUNTS_1356', 'DISK_COUNTS_LBHS', 'DISK_COUNTS_LBHL',
44
+ 'DISK_R_1216', 'DISK_R_1304', 'DISK_R_1356', 'DISK_R_LBHS', 'DISK_R_LBHL',
45
+ 'DISK_R_RECT_1216', 'DISK_R_RECT_1304', 'DISK_R_RECT_1356', 'DISK_R_RECT_LBHS', 'DISK_R_RECT_LBHL',
46
+ 'DISK_R_1216_ERROR', 'DISK_R_1304_ERROR', 'DISK_R_1356_ERROR', 'DISK_R_LBHS_ERROR', 'DISK_R_LBHL_ERROR',
47
+ 'DISK_R_RECT_1216_ERROR', 'DISK_R_RECT_1304_ERROR', 'DISK_R_RECT_1356_ERROR', 'DISK_R_RECT_LBHS_ERROR', 'DISK_R_RECT_LBHL_ERROR',
48
+ 'DISK_DECOMP_1216_ERROR', 'DISK_DECOMP_1304_ERROR', 'DISK_DECOMP_1356_ERROR', 'DISK_DECOMP_LBHS_ERROR', 'DISK_DECOMP_LBHL_ERROR',
49
+ 'DISK_CALIB_1216_ERROR', 'DISK_CALIB_1304_ERROR', 'DISK_CALIB_1356_ERROR', 'DISK_CALIB_LBHS_ERROR', 'DISK_CALIB_LBHL_ERROR',
50
+ 'DQI_1216', 'DQI_1304', 'DQI_1356', 'DQI_LBHS', 'DQI_LBHL'
51
+ ]
52
+
53
+ # default_data_search_recursive = True
54
+
55
+ default_attrs_required = ['sat_id', 'orbit_id', 'pp_type', 'pole']
56
+
57
+
58
+ class Dataset(datahub.DatasetSourced):
59
+ def __init__(self, **kwargs):
60
+ kwargs = basic.dict_set_default(kwargs, **default_dataset_attrs)
61
+
62
+ super().__init__(**kwargs)
63
+
64
+ self.database = kwargs.pop('database', 'JHUAPL')
65
+ self.facility = kwargs.pop('facility', 'DMSP')
66
+ self.instrument = kwargs.pop('instrument', 'SSUSI')
67
+ self.product = kwargs.pop('product', 'SDR_DISK')
68
+ self.allow_download = kwargs.pop('allow_download', True)
69
+ self.force_download = kwargs.pop('force_download', False)
70
+ self.download_dry_run = kwargs.pop('download_dry_run', False)
71
+
72
+ self.sat_id = kwargs.pop('sat_id', '')
73
+ self.orbit_id = kwargs.pop('orbit_id', None)
74
+ self.pole = kwargs.pop('pole', '')
75
+ self.pp_type = kwargs.pop('pp_type', '')
76
+
77
+ self.metadata = None
78
+
79
+ allow_load = kwargs.pop('allow_load', False)
80
+
81
+ # self.config(**kwargs)
82
+
83
+ if self.loader is None:
84
+ self.loader = default_Loader
85
+
86
+ if self.downloader is None:
87
+ self.downloader = default_Downloader
88
+
89
+ self._validate_attrs()
90
+
91
+ if allow_load:
92
+ self.load_data()
93
+
94
+ def _validate_attrs(self):
95
+ for attr_name in default_attrs_required:
96
+ attr = getattr(self, attr_name)
97
+ if not str(attr):
98
+ mylog.StreamLogger.warning("The parameter {} is required before loading data!".format(attr_name))
99
+ if attr_name == 'orbit_id':
100
+ if attr is None or attr == '':
101
+ mylog.StreamLogger.warning("For a fast process, it's better to specify the orbit id.")
102
+
103
+ def label(self, **kwargs):
104
+ label = super().label()
105
+ return label
106
+
107
+ def load_data(self, **kwargs):
108
+ self.check_data_files(**kwargs)
109
+
110
+ self._set_default_variables(
111
+ default_variable_names,
112
+ configured_variables=var_config.configured_variables
113
+ )
114
+ for file_path in self.data_file_paths:
115
+ load_obj = self.loader(file_path, file_type=self.product.lower(), pole=self.pole, pp_type=self.pp_type)
116
+
117
+ for var_name in self._variables.keys():
118
+ if var_name == 'EMISSION_SPECTRA':
119
+ self._variables[var_name].value = load_obj.variables[var_name]
120
+ continue
121
+ if var_name in ['DATETIME', 'STARTING_TIME', 'STOPPING_TIME',
122
+ 'FILE_VERSION', 'DATA_PRODUCT_VERSION', 'SOFTWARE_VERSION_NUMBER',
123
+ 'CALIBRATION_PERIOD_VERSION']:
124
+ value = np.array([load_obj.variables[var_name]])[np.newaxis, :]
125
+ else:
126
+ value = np.empty((1, ), dtype=object)
127
+ value[0] = load_obj.variables[var_name]
128
+ # value = np.array([[load_obj.variables[var_name]]], dtype=object)
129
+ self._variables[var_name].join(value)
130
+
131
+ # self.orbit_id = load_obj.metadata['ORBIT_ID']
132
+ # self.select_beams(field_aligned=True)
133
+ if self.time_clip:
134
+ self.time_filter_by_range()
135
+
136
+ def get_time_ind(self, ut, time_res=20*60, var_datetime_name='DATETIME', edge_cutoff=False, **kwargs):
137
+ ind = super().get_time_ind(ut, time_res=time_res, var_datetime_name=var_datetime_name, edge_cutoff=edge_cutoff, **kwargs)
138
+ return ind
139
+
140
+ def regriddata(self, disk_data=None, disk_geo_lat=None, disk_geo_lon=None, *, across_res=20., interp_method='linear'):
141
+ from scipy.interpolate import interp1d, griddata
142
+
143
+ data_pts = disk_data.flatten()
144
+ ind_valid = np.where(np.isfinite(data_pts))[0]
145
+ xd = range(disk_data.shape[0])
146
+ yd = range(disk_data.shape[1])
147
+
148
+ # create the new grids
149
+ ps_across = self['ACROSS_PIXEL_SIZE'].value[0]
150
+ xx = range(disk_data.shape[0])
151
+ yy = []
152
+ for ind in range(disk_data.shape[1]-1):
153
+ ps1 = ps_across[ind]
154
+ ps2 = ps_across[ind+1]
155
+ n_insert = int(np.round((ps1 + ps2) / 2 / across_res) - 1)
156
+ yy.extend(np.linspace(ind, ind+1-0.001, n_insert + 2)[0: -1])
157
+ yy.extend([ind+1])
158
+ # grid_x, grid_y = np.meshgrid(xx, yy, indexing='ij')
159
+
160
+ factor = np.pi / 180.
161
+ sin_glat = np.sin(disk_geo_lat * factor)
162
+ itpf_sin = interp1d(yd, sin_glat, kind='cubic', bounds_error=False, fill_value='extrapolate')
163
+ sin_glat_i = itpf_sin(yy)
164
+ sin_glat_i[sin_glat_i>1.] = 1.
165
+ sin_glat_i[sin_glat_i<-1.] = -1.
166
+
167
+ cos_glat = np.cos(disk_geo_lat * factor)
168
+ itpf_cos = interp1d(yd, cos_glat, kind='cubic', bounds_error=False, fill_value='extrapolate')
169
+ cos_glat_i = itpf_cos(yy)
170
+ cos_glat_i[cos_glat_i>1.] = 1.
171
+ cos_glat_i[cos_glat_i<-1.] = -1.
172
+
173
+ rad = np.sign(sin_glat_i) * (np.pi / 2 - np.arcsin(cos_glat_i))
174
+ grid_lat = rad / factor
175
+
176
+ sin_glon = np.sin(disk_geo_lon * factor)
177
+ itpf_sin = interp1d(yd, sin_glon, kind='cubic', bounds_error=False, fill_value='extrapolate')
178
+ sin_glon_i = itpf_sin(yy)
179
+ sin_glon_i[sin_glon_i>1.] = 1.
180
+ sin_glon_i[sin_glon_i<-1.] = -1.
181
+
182
+ cos_glon = np.cos(disk_geo_lon * factor)
183
+ itpf_cos = interp1d(yd, cos_glon, kind='cubic', bounds_error=False, fill_value='extrapolate')
184
+ cos_glon_i = itpf_cos(yy)
185
+ cos_glon_i[cos_glon_i>1.] = 1.
186
+ cos_glon_i[cos_glon_i<-1.] = -1.
187
+
188
+ rad = np.sign(sin_glon_i) * (np.pi / 2 - np.arcsin(cos_glon_i))
189
+ grid_lon = rad / factor
190
+
191
+ itpf_data = interp1d(yd, disk_data, kind='linear', bounds_error=False, fill_value='extrapolate')
192
+ grid_data = itpf_data(yy)
193
+ return grid_lat, grid_lon, grid_data
194
+
195
+
196
+ def search_data_files(self, **kwargs):
197
+ dt_fr = self.dt_fr
198
+ if self.dt_to.hour > 22:
199
+ dt_to = self.dt_to + datetime.timedelta(days=1)
200
+ else:
201
+ dt_to = self.dt_to
202
+ diff_days = dttool.get_diff_days(dt_fr, dt_to)
203
+ dt0 = dttool.get_start_of_the_day(dt_fr)
204
+ for i in range(diff_days + 1):
205
+ thisday = dt0 + datetime.timedelta(days=i)
206
+ initial_file_dir = kwargs.pop('initial_file_dir', None)
207
+
208
+ if initial_file_dir is None:
209
+ initial_file_dir = self.data_root_dir / self.sat_id.upper() / str(thisday.year) /thisday.strftime("%Y%m%d")
210
+
211
+ file_patterns = [
212
+ 'dmsp' + self.sat_id.lower(),
213
+ 'sdr-disk',
214
+ thisday.strftime("%Y%j") + 'T',
215
+ ]
216
+
217
+ if self.orbit_id is not None:
218
+ file_patterns.extend(['REV', self.orbit_id])
219
+
220
+ # remove empty str
221
+ file_patterns = [pattern for pattern in file_patterns if str(pattern)]
222
+
223
+ search_pattern = '*' + '*'.join(file_patterns) + '*'
224
+
225
+ if self.orbit_id is not None:
226
+ multiple_files = False
227
+ else:
228
+ fp_log = initial_file_dir / (self.product.upper() + '.full.log')
229
+ if not fp_log.is_file():
230
+ self.download_data(dt_fr=thisday, dt_to=thisday)
231
+ multiple_files = True
232
+ done = super().search_data_files(
233
+ initial_file_dir=initial_file_dir,
234
+ search_pattern=search_pattern,
235
+ allow_multiple_files=multiple_files,
236
+ )
237
+ if done and self.orbit_id is not None:
238
+ return True
239
+
240
+ # Validate file paths
241
+
242
+ if (not done and self.allow_download) or (self.force_download):
243
+ done = self.download_data(dt_fr=thisday, dt_to=thisday)
244
+ if done:
245
+ done = super().search_data_files(
246
+ initial_file_dir=initial_file_dir,
247
+ search_pattern=search_pattern,
248
+ allow_multiple_files=multiple_files
249
+ )
250
+
251
+ return done
252
+
253
+ def download_data(self, dt_fr=None, dt_to=None):
254
+ if dt_fr is None:
255
+ dt_fr = self.dt_fr
256
+ if dt_to is None:
257
+ dt_to = self.dt_to
258
+ download_obj = self.downloader(
259
+ dt_fr, dt_to,
260
+ orbit_id=self.orbit_id, sat_id=self.sat_id,
261
+ root_dir_local=self.data_root_dir,
262
+ force_download=self.force_download,
263
+ dry_run=self.download_dry_run,
264
+ )
265
+ return any(download_obj.done)
266
+
267
+ @property
268
+ def database(self):
269
+ return self._database
270
+
271
+ @database.setter
272
+ def database(self, value):
273
+ if isinstance(value, str):
274
+ self._database = DatabaseModel(value)
275
+ elif issubclass(value.__class__, DatabaseModel):
276
+ self._database = value
277
+ else:
278
+ raise TypeError
279
+
280
+ @property
281
+ def product(self):
282
+ return self._product
283
+
284
+ @product.setter
285
+ def product(self, value):
286
+ if isinstance(value, str):
287
+ self._product = ProductModel(value)
288
+ elif issubclass(value.__class__, ProductModel):
289
+ self._product = value
290
+ else:
291
+ raise TypeError
292
+
293
+ @property
294
+ def facility(self):
295
+ return self._facility
296
+
297
+ @facility.setter
298
+ def facility(self, value):
299
+ if isinstance(value, str):
300
+ self._facility = FacilityModel(value)
301
+ elif issubclass(value.__class__, FacilityModel):
302
+ self._facility = value
303
+ else:
304
+ raise TypeError
305
+
306
+ @property
307
+ def instrument(self):
308
+ return self._instrument
309
+
310
+ @instrument.setter
311
+ def instrument(self, value):
312
+ if isinstance(value, str):
313
+ self._instrument = InstrumentModel(value)
314
+ elif issubclass(value.__class__, InstrumentModel):
315
+ self._instrument = value
316
+ else:
317
+ raise TypeError
@@ -0,0 +1,44 @@
1
+ import datetime
2
+ import pathlib
3
+ import copy
4
+
5
+ import geospacelab.toolbox.utilities.pydatetime as dttool
6
+ import geospacelab.toolbox.utilities.pylogging as mylog
7
+ from geospacelab.config import prf
8
+ from geospacelab.datahub.sources.cdaweb.dmsp.ssusi.downloader import Downloader as DownloaderBase
9
+
10
+
11
+ class Downloader(DownloaderBase):
12
+
13
+ def __init__(
14
+ self,
15
+ dt_fr=None, dt_to=None,
16
+ sat_id=None,
17
+ orbit_id=None,
18
+ direct_download=True,
19
+ force_download=False,
20
+ dry_run=False,
21
+ root_dir_local = None,
22
+ ):
23
+
24
+ super().__init__(
25
+ dt_fr, dt_to,
26
+ sat_id=sat_id,
27
+ orbit_id=orbit_id,
28
+ product='SDR_DISK',
29
+ root_dir_local=root_dir_local,
30
+ direct_download=direct_download,
31
+ force_download=force_download,
32
+ dry_run=dry_run,
33
+ )
34
+
35
+
36
+ if __name__ == "__main__":
37
+ downloader = Downloader(
38
+ dt_fr = datetime.datetime(2011, 1, 6),
39
+ dt_to = datetime.datetime(2011, 1, 6, 12),
40
+ sat_id='F17',
41
+ orbit_id='21523',
42
+ force_download=True,
43
+ dry_run=False,
44
+ )