pypromice 1.5.3__py3-none-any.whl → 1.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pypromice might be problematic. Click here for more details.

Files changed (67) hide show
  1. pypromice/__init__.py +2 -0
  2. pypromice/{qc → core/qc}/github_data_issues.py +22 -13
  3. pypromice/{qc → core/qc}/percentiles/compute_thresholds.py +2 -2
  4. pypromice/{qc → core/qc}/persistence.py +22 -29
  5. pypromice/{process → core/qc}/value_clipping.py +3 -3
  6. pypromice/core/resampling.py +142 -0
  7. pypromice/core/variables/__init__.py +1 -0
  8. pypromice/core/variables/air_temperature.py +64 -0
  9. pypromice/core/variables/gps.py +221 -0
  10. pypromice/core/variables/humidity.py +111 -0
  11. pypromice/core/variables/precipitation.py +108 -0
  12. pypromice/core/variables/pressure_transducer_depth.py +79 -0
  13. pypromice/core/variables/radiation.py +422 -0
  14. pypromice/core/variables/station_boom_height.py +75 -0
  15. pypromice/core/variables/station_pose.py +375 -0
  16. pypromice/io/bufr/__init__.py +0 -0
  17. pypromice/{postprocess → io/bufr}/bufr_to_csv.py +1 -1
  18. pypromice/{postprocess → io/bufr}/create_bufr_files.py +2 -2
  19. pypromice/{postprocess → io/bufr}/get_bufr.py +6 -6
  20. pypromice/{postprocess → io/bufr}/real_time_utilities.py +3 -3
  21. pypromice/io/ingest/__init__.py +0 -0
  22. pypromice/{utilities → io/ingest}/git.py +1 -3
  23. pypromice/io/ingest/l0.py +294 -0
  24. pypromice/io/ingest/l0_repository.py +103 -0
  25. pypromice/io/ingest/toa5.py +87 -0
  26. pypromice/{process → io}/write.py +1 -1
  27. pypromice/pipeline/L0toL1.py +291 -0
  28. pypromice/pipeline/L1toL2.py +233 -0
  29. pypromice/{process → pipeline}/L2toL3.py +113 -118
  30. pypromice/pipeline/__init__.py +4 -0
  31. pypromice/{process → pipeline}/aws.py +10 -82
  32. pypromice/{process → pipeline}/get_l2.py +2 -2
  33. pypromice/{process → pipeline}/get_l2tol3.py +19 -22
  34. pypromice/{process → pipeline}/join_l2.py +31 -32
  35. pypromice/{process → pipeline}/join_l3.py +16 -14
  36. pypromice/{process → pipeline}/resample.py +75 -51
  37. pypromice/{process → pipeline}/utilities.py +0 -22
  38. pypromice/resources/file_attributes.csv +4 -4
  39. pypromice/resources/variable_aliases_GC-Net.csv +2 -2
  40. pypromice/resources/variables.csv +27 -24
  41. {pypromice-1.5.3.dist-info → pypromice-1.7.0.dist-info}/METADATA +1 -2
  42. pypromice-1.7.0.dist-info/RECORD +65 -0
  43. pypromice-1.7.0.dist-info/entry_points.txt +12 -0
  44. pypromice/get/__init__.py +0 -1
  45. pypromice/get/get.py +0 -211
  46. pypromice/get/get_promice_data.py +0 -56
  47. pypromice/process/L0toL1.py +0 -564
  48. pypromice/process/L1toL2.py +0 -824
  49. pypromice/process/__init__.py +0 -4
  50. pypromice/process/load.py +0 -161
  51. pypromice-1.5.3.dist-info/RECORD +0 -54
  52. pypromice-1.5.3.dist-info/entry_points.txt +0 -13
  53. /pypromice/{postprocess → core}/__init__.py +0 -0
  54. /pypromice/{utilities → core}/dependency_graph.py +0 -0
  55. /pypromice/{qc → core/qc}/__init__.py +0 -0
  56. /pypromice/{qc → core/qc}/percentiles/__init__.py +0 -0
  57. /pypromice/{qc → core/qc}/percentiles/outlier_detector.py +0 -0
  58. /pypromice/{qc → core/qc}/percentiles/thresholds.csv +0 -0
  59. /pypromice/{process → core/variables}/wind.py +0 -0
  60. /pypromice/{utilities → io}/__init__.py +0 -0
  61. /pypromice/{postprocess → io/bufr}/bufr_utilities.py +0 -0
  62. /pypromice/{postprocess → io/bufr}/positions_seed.csv +0 -0
  63. /pypromice/{station_configuration.py → io/bufr/station_configuration.py} +0 -0
  64. /pypromice/{postprocess → io}/make_metadata_csv.py +0 -0
  65. {pypromice-1.5.3.dist-info → pypromice-1.7.0.dist-info}/WHEEL +0 -0
  66. {pypromice-1.5.3.dist-info → pypromice-1.7.0.dist-info}/licenses/LICENSE.txt +0 -0
  67. {pypromice-1.5.3.dist-info → pypromice-1.7.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,65 @@
1
+ pypromice/__init__.py,sha256=X2LaniNJv4iVYqRtmf2jyGMQLaj59bIgkhlWhT6LCgQ,74
2
+ pypromice/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ pypromice/core/dependency_graph.py,sha256=bqoXasC8pg5ipjBd6rqDhfHwIq11t2_cFlNT72ncw4w,3135
4
+ pypromice/core/resampling.py,sha256=KyYk6HWFqJo60CPiX-gBd0uwLx5iPrYJBMQ3Sqb9yFg,4645
5
+ pypromice/core/qc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ pypromice/core/qc/github_data_issues.py,sha256=mTnJArv89p8vSzJdWFRkA4kBAYnFcR-I-Xz4QzlrifA,13857
7
+ pypromice/core/qc/persistence.py,sha256=Y9CmAAPHNqEjdsZW4LEAVm7cnMEBK-zwEAD_UBDASVw,6466
8
+ pypromice/core/qc/value_clipping.py,sha256=KGLLN54-QeD4TQ-Dd-NeooYG3kdEC6SPhz4LT2ZsRi4,1533
9
+ pypromice/core/qc/percentiles/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ pypromice/core/qc/percentiles/compute_thresholds.py,sha256=jHk1hAvm168ViORIwDOZHm4MAxQTtvseo_43-P3mtoY,6096
11
+ pypromice/core/qc/percentiles/outlier_detector.py,sha256=5_458aMm9MAubfYv7oIz1Lp9ME6Sn1xiSiAQmIe-riY,3370
12
+ pypromice/core/qc/percentiles/thresholds.csv,sha256=KTQcYsg2VCZmR-Rf1Zzx1Jn-ZmR5yPPWWXYZ0Z03PDQ,9516
13
+ pypromice/core/variables/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
14
+ pypromice/core/variables/air_temperature.py,sha256=TS_0kGC98pbSe7DlAl6W_E5AzGslgRNlIiskyIjA07c,2025
15
+ pypromice/core/variables/gps.py,sha256=YCS1OeQ8f2Rel32kiZ_UH-kNlRLlaakKKMPjaCAvO-I,7615
16
+ pypromice/core/variables/humidity.py,sha256=TfQfGbQ0gyj_A9zRChGei47oZYMRAR13eIzjkQ3kfD4,3775
17
+ pypromice/core/variables/precipitation.py,sha256=iH7m1sJDwbstZuAutO2HCRVo3RqaSuDG33YERBtvQag,3467
18
+ pypromice/core/variables/pressure_transducer_depth.py,sha256=SadI_8oRKmMI2dnsYfYDJD7AnE6-i1bv0MOmO5xms8E,2418
19
+ pypromice/core/variables/radiation.py,sha256=11iGl2cq5WGJxnPwTAy3H4Y5bU-QXTaMaEife1BPNA0,14318
20
+ pypromice/core/variables/station_boom_height.py,sha256=5uGalC-uVEdJFMPQ-cuayP5kpFu8mvou10w38X7kd8Y,2281
21
+ pypromice/core/variables/station_pose.py,sha256=3aVb8hywsFBCDorgHPW_nNdf_pyyil6ziJsP3UBhje4,13112
22
+ pypromice/core/variables/wind.py,sha256=-dpaBOtqGyPDIU4O1HbbWRzlLNRC2a50OdnZhIaWdeI,1701
23
+ pypromice/io/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
+ pypromice/io/make_metadata_csv.py,sha256=lUw8fZC6dzG10IePZ4d7eZIrTbquHYCDuppYP4r_dF0,9776
25
+ pypromice/io/write.py,sha256=D-u7WuSlD18B9Y-_qB_AuuXRNoUkDsRPhXibUnVJzJo,15819
26
+ pypromice/io/bufr/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
+ pypromice/io/bufr/bufr_to_csv.py,sha256=YhvtK3Rbya8ulpjeCaMyPQ_luNOmRqR_3XkzuyIvaWg,503
28
+ pypromice/io/bufr/bufr_utilities.py,sha256=O1vnpKipUC6nMPmSrPvWECJocjdsB9FAb2ILXD8b8fs,21264
29
+ pypromice/io/bufr/create_bufr_files.py,sha256=hN5UzBuj1kRk_kgwWiE0H9JaYTC3G5DjvenZGCJs6CY,5661
30
+ pypromice/io/bufr/get_bufr.py,sha256=Yy3w9U-s29Xkfs6zNsyKTV9HHCEA_Mc31jRw3A4_BHM,16705
31
+ pypromice/io/bufr/positions_seed.csv,sha256=0kVCQ8UfEALdeXNYCddmwxpseRqLRudbFStqp_bZRBw,224
32
+ pypromice/io/bufr/real_time_utilities.py,sha256=HfLPDW6rS0D2CXr4yJeyRxZxHUm0yA337vYRbb8_l7A,9490
33
+ pypromice/io/bufr/station_configuration.py,sha256=h0ap21-dCeU9kboktgYeziGvcni4OoJBi82fStV3se0,4009
34
+ pypromice/io/ingest/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
35
+ pypromice/io/ingest/git.py,sha256=GcMdsZT2Dr6lTHDtlTLH5ueCUMMHytktgeOy6hX9YDw,1774
36
+ pypromice/io/ingest/l0.py,sha256=1Dmz3wpMcK3nSmf2ENMrPm-aoTqTgsng5gkr4CdWyq4,9011
37
+ pypromice/io/ingest/l0_repository.py,sha256=jmlQMSETj9aBA1iOC9sPl4-ia9UfsRHTWiR0VisZt90,3552
38
+ pypromice/io/ingest/toa5.py,sha256=GiDCUMJpvbTRs0PzU1wz6uPkYkih9XjDVPcGbl__RtU,2857
39
+ pypromice/pipeline/L0toL1.py,sha256=g_pH9sVPJMfYKdFgJpT7WW5rqJntJPKsxpw9N_nKs-o,13816
40
+ pypromice/pipeline/L1toL2.py,sha256=lNSC3QOQf-ZZpFlCW6PFeRIkWtGWlDMBXWzNBU2yKAc,10158
41
+ pypromice/pipeline/L2toL3.py,sha256=yEvjIaheTdT136HEMJXmqAuReYgnx0pF6VhG23VFj00,61650
42
+ pypromice/pipeline/__init__.py,sha256=yqYenngaSNFtpnAsfc953142P84ocq3ykvlsCIbsw3g,151
43
+ pypromice/pipeline/aws.py,sha256=5Xv7XVf5PvcSAnEu_jPpjDWV4U0p_UvdpOzCtmH0ICU,5092
44
+ pypromice/pipeline/get_l2.py,sha256=VV4iV3P34HFSOi3jr0IQrNXjBMgCx5GpF0uwNbT84Ck,3072
45
+ pypromice/pipeline/get_l2tol3.py,sha256=KIsScgatZbccz7ypZHBGiibbresJ9HWq9Sv4Ys2LcHs,4497
46
+ pypromice/pipeline/join_l2.py,sha256=AQL9wVqRFbdB6bevuOArg9knZCv-jDVwypMETjuDqBM,4561
47
+ pypromice/pipeline/join_l3.py,sha256=Wd5slBCo1XgzExOjPu9IRLWnk7NJMTwDcckhithvw0E,20316
48
+ pypromice/pipeline/resample.py,sha256=tq6GvnmbQJsFvdJxZF2DrwTTmeNRVig_LHZQboKWreU,8818
49
+ pypromice/pipeline/utilities.py,sha256=m-BaHWMKBGzTHa09w-49yqWRulXex5TTScg7IZu8fSY,1248
50
+ pypromice/resources/__init__.py,sha256=MpKmvV11R4tcqmyvJpXQt-_It3oRI0WEIQNbMST--4w,907
51
+ pypromice/resources/file_attributes.csv,sha256=ISKR-Ax12CT9tQD38ByNyvWPLLpMszpWXwviPyhuUaI,7018
52
+ pypromice/resources/variable_aliases_GC-Net.csv,sha256=mIAnBy2XvnGGy4_HODHlbZCvk0jq3v9gAncMwyDmyqI,1001
53
+ pypromice/resources/variables.csv,sha256=hcd0WcY9vaG6MhqnCUNjbFxXNg7p5d_ZIyaebLKy0s8,14185
54
+ pypromice/tx/__init__.py,sha256=-62bhHWJGfzFh5JwHcLqRj2jcGzmqzYOLWByhO706YY,30
55
+ pypromice/tx/get_l0tx.py,sha256=b34-96KGshTyTN2tBFaAIBl7oZZzbRB_JR7sXtDNfXA,6957
56
+ pypromice/tx/get_msg.py,sha256=OGS60OHjy4Wf8JExTfOdK-9xhjFdjhuChxoTSPe_MjI,3417
57
+ pypromice/tx/payload_formats.csv,sha256=A46-XcYdpe9-gzmADylP2UVizLi_UphF-BPT5u3Lyn8,7903
58
+ pypromice/tx/payload_types.csv,sha256=C1-xCmHytAqqAzgzPwBLWqabzWu6s6tKAd8AjVd935s,457
59
+ pypromice/tx/tx.py,sha256=asbgXVI5vurKM-WVACTfpKRt-70wtzVvSbvjvYufajI,34416
60
+ pypromice-1.7.0.dist-info/licenses/LICENSE.txt,sha256=gXf5dRMhNSbfLPYYTY_5hsZ1r7UU1OaKQEAQUhuIBkM,18092
61
+ pypromice-1.7.0.dist-info/METADATA,sha256=sU7zLwdi_aCLLUUUagNsQgIFVEwae5acab3osEHJDGM,4958
62
+ pypromice-1.7.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
63
+ pypromice-1.7.0.dist-info/entry_points.txt,sha256=aU2SG5A0wI2n4HE9fotG-y5yeIqJ0G1-8UrEgr1GUQk,535
64
+ pypromice-1.7.0.dist-info/top_level.txt,sha256=cBdfwgSbWDQq3a07nKRjrfmLC7jdaYXs98GG58HpTks,10
65
+ pypromice-1.7.0.dist-info/RECORD,,
@@ -0,0 +1,12 @@
1
+ [console_scripts]
2
+ bufr_to_csv = pypromice.pio.bufr.bufr_to_csv:main
3
+ create_bufr_files = pypromice.io.bufr.create_bufr_files:main
4
+ get_bufr = pypromice.io.bufr.get_bufr:main
5
+ get_l0tx = pypromice.tx.get_l0tx:get_l0tx
6
+ get_l2 = pypromice.pipeline.get_l2:main
7
+ get_l2tol3 = pypromice.pipeline.get_l2tol3:main
8
+ get_msg = pypromice.tx.get_msg:get_msg
9
+ get_watsontx = pypromice.tx.get_watsontx:get_watsontx
10
+ join_l2 = pypromice.pipeline.join_l2:main
11
+ join_l3 = pypromice.pipeline.join_l3:main
12
+ make_metadata_csv = pypromice.io.make_metadata_csv:main
pypromice/get/__init__.py DELETED
@@ -1 +0,0 @@
1
- from pypromice.get.get import *
pypromice/get/get.py DELETED
@@ -1,211 +0,0 @@
1
- #!/usr/bin/env python3
2
- # -*- coding: utf-8 -*-
3
- """
4
- AWS data retrieval module
5
- """
6
- from pyDataverse.api import NativeApi
7
- import pandas as pd
8
- import xarray as xr
9
- import unittest, pkg_resources
10
- from datetime import datetime
11
- import warnings, os
12
-
13
- def aws_names():
14
- '''Return PROMICE and GC-Net AWS names that can be used in get.aws_data()
15
- fetching'''
16
- lookup = lookup_table(['doi:10.22008/FK2/IW73UU'])
17
- print(f'Available dataset keywords: {list(lookup.keys())}')
18
- return list(lookup.keys())
19
-
20
- def aws_data(aws_name):
21
- '''Return PROMICE and GC-Net AWS L3 v3 hourly observations
22
-
23
- Returns
24
- -------
25
- df : pandas.DataFrame
26
- AWS observations dataframe
27
- '''
28
- lookup = lookup_table(['doi:10.22008/FK2/IW73UU'])
29
- assert aws_name.lower() in list(lookup.keys())
30
- data = pd.read_csv(lookup[aws_name], index_col=0, parse_dates=True)
31
- return data
32
-
33
- def watson_discharge(t='hour'):
34
- '''Return PROMICE hourly Watson river discharge
35
-
36
- Parameters
37
- ----------
38
- t : str
39
- Temporal resolution of the data - "hour", "day" or "year"
40
-
41
- Returns
42
- -------
43
- df : pandas.DataFrame
44
- Watson river discharge dataframe
45
- '''
46
- lookup = lookup_table(['doi:10.22008/FK2/XEHYCM'])
47
-
48
- dict_keys = lookup.keys()
49
-
50
- if 'year' in t.lower():
51
-
52
- key = [k for k in dict_keys if 'year' in k]
53
-
54
- if not key:
55
- warnings.warn('The yearly Watson River Discharge file does not exist, or has changed name, on GEUS Dataverse DOI, ' + \
56
- 'please check the dataset, and the naming of the txt files on Dataverse')
57
-
58
- if len(key) > 1:
59
- warnings.warn('Warning, there exist multiple yearly txt files on dataverse, please check ' + \
60
- 'if the correct txt file is used')
61
-
62
- link = lookup[key[0]]
63
- df = pd.read_csv(link, sep="\s+", skiprows=9, index_col=0)
64
-
65
- elif 'daily' in t.lower() or 'day' in t.lower():
66
-
67
- key = [k for k in dict_keys if 'daily' in k]
68
-
69
- if not key:
70
- warnings.warn('The daily Watson River Discharge file does not exist, or has changed name, on GEUS Dataverse DOI, ' + \
71
- 'please check the dataset, and the naming of the txt files on Dataverse')
72
-
73
- if len(key) > 1:
74
- warnings.warn('Warning, there exist multiple daily txt files on dataverse, please check ' + \
75
- 'if the correct txt file is used')
76
-
77
- link = lookup[key[0]]
78
-
79
- df = pd.read_csv(link, sep="\s+", parse_dates=[[0,1,2]])\
80
- .rename({"WaterFluxDiversOnly(m3/s)" : "divers",
81
- "Uncertainty(m3/s)" : "divers_err",
82
- "WaterFluxDivers&Temperature(m3/s)" : "divers_t",
83
- "Uncertainty(m3/s).1" : "divers_t_err",
84
- "WaterFluxCumulative(km3)" : "Q",
85
- "Uncertainty(km3)" : "err"},
86
- axis='columns')
87
- df['time'] = df.iloc[:,0]
88
- df = df.set_index('time')
89
- df.drop(columns=df.columns[0:1], axis=1, inplace=True)
90
-
91
- else:
92
-
93
- key = [k for k in dict_keys if 'hourly' in k]
94
-
95
- if not key:
96
- warnings.warn('The hourly Watson River Discharge file does not exist, or has changed name, on GEUS Dataverse DOI, ' + \
97
- 'please check the dataset, and the naming of the txt files on Dataverse')
98
-
99
- if len(key) > 1:
100
- warnings.warn('Warning, there exist multiple Houlry txt files on dataverse, please check ' + \
101
- 'if the correct txt file is used')
102
-
103
- link = lookup[key[0]]
104
-
105
- df = pd.read_csv(link, sep="\s+", parse_dates=[[0,1,2,3]])\
106
- .rename({"WaterFluxDiversOnly(m3/s)" : "divers",
107
- "Uncertainty(m3/s)" : "divers_err",
108
- "WaterFluxDivers&Temperature(m3/s)" : "divers_t",
109
- "Uncertainty(m3/s).1" : "divers_t_err",
110
- "WaterFluxCumulative(km3)" : "Q",
111
- "Uncertainty(km3)" : "err"},
112
- axis='columns')
113
- df = _getDFdatetime(df, list(df.iloc[:,0]))
114
-
115
- return df
116
-
117
- def lookup_table(base_dois,
118
- server='https://dataverse.geus.dk'):
119
- '''Fetch dictionary of data files and download URLs from a DOI entry in the
120
- GEUS Dataverse
121
-
122
- Parameters
123
- ----------
124
- base_dois : list
125
- List of DOIs to search
126
- server : str, optional
127
- DOI server. The default is "https://dataverse.geus.dk"
128
- '''
129
- # Prime API
130
- dataverse_server = server.strip("/")
131
- api = NativeApi(dataverse_server)
132
-
133
- # Look through DOI entries
134
- lookup_list = {}
135
- for d in base_dois:
136
- dataset = api.get_dataset(d)
137
-
138
- # Get file names and DOIs
139
- f_list = dataset.json()['data']['latestVersion']['files']
140
- for f in f_list:
141
- fname = f['dataFile']['filename'].lower()
142
- if '.csv' in fname or '.txt' in fname:
143
- link = _getURL(f['dataFile']['persistentId'])
144
- lookup_list[fname] = link
145
- return lookup_list
146
-
147
- def _getURL(persistentId,
148
- base_link='https://dataverse.geus.dk/api/access/datafile/:persistentId?persistentId='):
149
- '''Return URL download link from persistentId attached to DOI'''
150
- return base_link+persistentId
151
-
152
-
153
- def _getDFdatetime(df, dt_str, dt_format='%Y %m %d %H'):
154
- '''Format dataframe with datetime (year, month, day, hour) index column
155
-
156
- Parameters
157
- ----------
158
- df : pandas.DataFrame
159
- Input DataFrame
160
- dt_str : list
161
- List of datetime strings to format and add
162
- dt_format : str
163
- Datetime string format. Default is "%Y %m %d %H".
164
-
165
- Returns
166
- -------
167
- df : pandas.DataFrame
168
- DataFrame with added datetime as index
169
- '''
170
- dates = [datetime.strptime(str(d), dt_format) for d in dt_str]
171
- df['time'] = dates
172
- df = df.set_index('time')
173
- df.drop(columns=df.columns[0], axis=1, inplace=True)
174
- return df
175
-
176
- #------------------------------------------------------------------------------
177
-
178
- class TestGet(unittest.TestCase):
179
- def testURL(self):
180
- '''Test URL retrieval'''
181
- l = lookup_table(['doi:10.22008/FK2/IW73UU'])
182
- self.assertTrue('10.22008/FK2' in list(l.values())[0])
183
-
184
- # def testAWSname(self):
185
- # '''Test AWS names retrieval'''
186
- # n = aws_names()
187
- # self.assertIsInstance(n, list)
188
- # self.assertTrue('nuk_k_hour.csv' in n)
189
-
190
- # def testAWScsv(self):
191
- # '''Test AWS data retrieval'''
192
- # kan_b = aws_data('kan_b_hour.csv')
193
- # self.assertIsInstance(kan_b, pd.DataFrame)
194
-
195
- # def testWatsonHour(self):
196
- # '''Test Wason River discharge hourly data retrieval'''
197
- # wh = watson_discharge()
198
- # self.assertTrue(wh['Q']['2021-10-27 23:00:00']==5.48)
199
-
200
- # def testWatsonDaily(self):
201
- # '''Test Wason River discharge daily data retrieval'''
202
- # wd = watson_discharge(t='day')
203
- # self.assertTrue(wd['Q']['2009-09-04 00:00:00']==4.72)
204
-
205
- def testGetCLI(self):
206
- '''Test get_promice_data'''
207
- exit_status = os.system('get_promice_data -h')
208
- self.assertEqual(exit_status, 0)
209
-
210
- if __name__ == "__main__":
211
- unittest.main()
@@ -1,56 +0,0 @@
1
- #!/usr/bin/env python
2
- from argparse import ArgumentParser
3
- import os, unittest
4
- from pypromice.get.get import aws_data
5
-
6
-
7
- def parse_arguments_data():
8
- parser = ArgumentParser(description="PROMICE and GC-Net dataset fetcher")
9
- parser.add_argument('-n', '--awsname', default=None, type=str, required=True,
10
- help='AWS name')
11
- parser.add_argument('-f', '--format', default='csv', type=str, required=False,
12
- help='File format to save data as')
13
- parser.add_argument('-o', '--outpath', default=os.getcwd(), type=str, required=False,
14
- help='Directory where file will be written to')
15
- args = parser.parse_args()
16
- return args
17
-
18
-
19
- def get_promice_data():
20
- '''Command line driver for fetching PROMICE and GC-Net datasets'''
21
-
22
- args = parse_arguments_data()
23
-
24
- # Construct AWS dataset name
25
- # n = aws_names()
26
- # assert(args.awsname in n)
27
-
28
- # Check file format type
29
- f = args.format.lower()
30
- assert(args.format in ['csv', 'nc', '.csv', '.nc'])
31
-
32
- # Construct output file path
33
- assert(os.path.exists(args.outpath))
34
-
35
- # Remove pre-existing files of same name
36
- if os.path.isfile(f):
37
- os.remove(f)
38
-
39
- # Fetch data
40
- print(f'Fetching {args.awsname.lower()}...')
41
- data = aws_data(args.awsname.lower())
42
-
43
- # Save to file
44
- if f in 'csv':
45
- outfile = os.path.join(args.outpath, args.awsname.lower())
46
- if outfile is not None:
47
- data.to_csv(outfile)
48
- elif f in 'nc':
49
- data.to_netcdf(outfile, mode='w', format='NETCDF4', compute=True)
50
- if outfile is not None:
51
- outfile = os.path.join(args.outpath, args.awsname.lower().split('.csv')[0]+'.nc')
52
-
53
- print(f'File saved to {outfile}')
54
-
55
- if __name__ == "__main__":
56
- get_promice_data()