weatherdb 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- docker/Dockerfile +30 -0
- docker/docker-compose.yaml +58 -0
- docker/docker-compose_test.yaml +24 -0
- docker/start-docker-test.sh +6 -0
- docs/requirements.txt +10 -0
- docs/source/Changelog.md +2 -0
- docs/source/License.rst +7 -0
- docs/source/Methode.md +161 -0
- docs/source/_static/custom.css +8 -0
- docs/source/_static/favicon.ico +0 -0
- docs/source/_static/logo.png +0 -0
- docs/source/api/api.rst +15 -0
- docs/source/api/cli.rst +8 -0
- docs/source/api/weatherDB.broker.rst +10 -0
- docs/source/api/weatherDB.config.rst +7 -0
- docs/source/api/weatherDB.db.rst +23 -0
- docs/source/api/weatherDB.rst +22 -0
- docs/source/api/weatherDB.station.rst +56 -0
- docs/source/api/weatherDB.stations.rst +46 -0
- docs/source/api/weatherDB.utils.rst +22 -0
- docs/source/conf.py +137 -0
- docs/source/index.rst +33 -0
- docs/source/setup/Configuration.md +127 -0
- docs/source/setup/Hosting.md +9 -0
- docs/source/setup/Install.md +49 -0
- docs/source/setup/Quickstart.md +183 -0
- docs/source/setup/setup.rst +12 -0
- weatherdb/__init__.py +24 -0
- weatherdb/_version.py +1 -0
- weatherdb/alembic/README.md +8 -0
- weatherdb/alembic/alembic.ini +80 -0
- weatherdb/alembic/config.py +9 -0
- weatherdb/alembic/env.py +100 -0
- weatherdb/alembic/script.py.mako +26 -0
- weatherdb/alembic/versions/V1.0.0_initial_database_creation.py +898 -0
- weatherdb/alembic/versions/V1.0.2_more_charachters_for_settings+term_station_ma_raster.py +88 -0
- weatherdb/alembic/versions/V1.0.5_fix-ma-raster-values.py +152 -0
- weatherdb/alembic/versions/V1.0.6_update-views.py +22 -0
- weatherdb/broker.py +667 -0
- weatherdb/cli.py +214 -0
- weatherdb/config/ConfigParser.py +663 -0
- weatherdb/config/__init__.py +5 -0
- weatherdb/config/config_default.ini +162 -0
- weatherdb/db/__init__.py +3 -0
- weatherdb/db/connections.py +374 -0
- weatherdb/db/fixtures/RichterParameters.json +34 -0
- weatherdb/db/models.py +402 -0
- weatherdb/db/queries/get_quotient.py +155 -0
- weatherdb/db/views.py +165 -0
- weatherdb/station/GroupStation.py +710 -0
- weatherdb/station/StationBases.py +3108 -0
- weatherdb/station/StationET.py +111 -0
- weatherdb/station/StationP.py +807 -0
- weatherdb/station/StationPD.py +98 -0
- weatherdb/station/StationT.py +164 -0
- weatherdb/station/__init__.py +13 -0
- weatherdb/station/constants.py +21 -0
- weatherdb/stations/GroupStations.py +519 -0
- weatherdb/stations/StationsBase.py +1021 -0
- weatherdb/stations/StationsBaseTET.py +30 -0
- weatherdb/stations/StationsET.py +17 -0
- weatherdb/stations/StationsP.py +128 -0
- weatherdb/stations/StationsPD.py +24 -0
- weatherdb/stations/StationsT.py +21 -0
- weatherdb/stations/__init__.py +11 -0
- weatherdb/utils/TimestampPeriod.py +369 -0
- weatherdb/utils/__init__.py +3 -0
- weatherdb/utils/dwd.py +350 -0
- weatherdb/utils/geometry.py +69 -0
- weatherdb/utils/get_data.py +285 -0
- weatherdb/utils/logging.py +126 -0
- weatherdb-1.1.0.dist-info/LICENSE +674 -0
- weatherdb-1.1.0.dist-info/METADATA +765 -0
- weatherdb-1.1.0.dist-info/RECORD +77 -0
- weatherdb-1.1.0.dist-info/WHEEL +5 -0
- weatherdb-1.1.0.dist-info/entry_points.txt +2 -0
- weatherdb-1.1.0.dist-info/top_level.txt +3 -0
@@ -0,0 +1,519 @@
|
|
1
|
+
# libraries
|
2
|
+
import warnings
|
3
|
+
import pandas as pd
|
4
|
+
import geopandas as gpd
|
5
|
+
import logging
|
6
|
+
import datetime
|
7
|
+
import socket
|
8
|
+
import zipfile
|
9
|
+
from pathlib import Path
|
10
|
+
from sqlalchemy import text as sqltxt
|
11
|
+
|
12
|
+
from ..db.connections import db_engine
|
13
|
+
from ..utils import TimestampPeriod
|
14
|
+
from ..station import StationP, GroupStation
|
15
|
+
from .StationsP import StationsP
|
16
|
+
from .StationsT import StationsT
|
17
|
+
from .StationsET import StationsET
|
18
|
+
from .StationsBase import StationsBase
|
19
|
+
|
20
|
+
# set settings
|
21
|
+
# ############
|
22
|
+
__all__ = ["GroupStations"]
|
23
|
+
log = logging.getLogger(__name__)
|
24
|
+
|
25
|
+
# class definition
|
26
|
+
##################
|
27
|
+
|
28
|
+
class GroupStations(object):
|
29
|
+
"""A class to group all possible parameters of all the stations.
|
30
|
+
"""
|
31
|
+
_StationP = StationP
|
32
|
+
_GroupStation = GroupStation
|
33
|
+
|
34
|
+
def __init__(self):
|
35
|
+
self.stationsP = StationsP()
|
36
|
+
|
37
|
+
def get_valid_stids(self):
|
38
|
+
if not hasattr(self, "_valid_stids"):
|
39
|
+
sql ="""SELECT station_id FROM meta_p"""
|
40
|
+
with db_engine.connect() as con:
|
41
|
+
res = con.execute(sqltxt(sql))
|
42
|
+
self._valid_stids = [el[0] for el in res.all()]
|
43
|
+
return self._valid_stids
|
44
|
+
|
45
|
+
def _check_paras(self, paras):
|
46
|
+
if isinstance(paras, str) and paras != "all":
|
47
|
+
paras = [paras,]
|
48
|
+
|
49
|
+
valid_paras=["n", "t", "et"]
|
50
|
+
if isinstance(paras, str) and (paras == "all"):
|
51
|
+
return valid_paras
|
52
|
+
else:
|
53
|
+
paras_new = []
|
54
|
+
for para in paras:
|
55
|
+
if para in valid_paras:
|
56
|
+
paras_new.append(para)
|
57
|
+
else:
|
58
|
+
raise ValueError(
|
59
|
+
f"The parameter {para} you asked for is not a valid parameter. Please enter one of {valid_paras}")
|
60
|
+
return paras_new
|
61
|
+
|
62
|
+
def _check_period(self, period, stids, kinds, nas_allowed=True):
|
63
|
+
# get max_period of stations
|
64
|
+
max_period = None
|
65
|
+
for stid in stids:
|
66
|
+
max_period_i = self._GroupStation(stid).get_max_period(
|
67
|
+
kinds=kinds, nas_allowed=nas_allowed)
|
68
|
+
if max_period is not None:
|
69
|
+
max_period = max_period.union(
|
70
|
+
max_period_i,
|
71
|
+
how="outer" if nas_allowed else "inner"
|
72
|
+
)
|
73
|
+
else:
|
74
|
+
max_period=max_period_i
|
75
|
+
|
76
|
+
if not isinstance(period, TimestampPeriod):
|
77
|
+
period = TimestampPeriod(*period)
|
78
|
+
if period.is_empty():
|
79
|
+
return max_period
|
80
|
+
else:
|
81
|
+
if not period.inside(max_period):
|
82
|
+
period = period.union(max_period, how="inner")
|
83
|
+
warnings.warn("The asked period is too large. Only {min_tstp} - {max_tstp} is returned".format(
|
84
|
+
**period.get_sql_format_dict(format="%Y-%m-%d %H:%M")))
|
85
|
+
return period
|
86
|
+
|
87
|
+
def _check_stids(self, stids):
|
88
|
+
"""Check if the given stids are valid Station IDs.
|
89
|
+
|
90
|
+
It checks against the Precipitation stations.
|
91
|
+
"""
|
92
|
+
if isinstance(stids, str) and (stids == "all"):
|
93
|
+
return self.get_valid_stids()
|
94
|
+
else:
|
95
|
+
valid_stids = self.get_valid_stids()
|
96
|
+
mask_stids_valid = [stid in valid_stids for stid in stids]
|
97
|
+
if all(mask_stids_valid):
|
98
|
+
return stids
|
99
|
+
else:
|
100
|
+
raise ValueError(
|
101
|
+
"There is no station defined in the database for the IDs: {stids}".format(
|
102
|
+
stids=", ".join(
|
103
|
+
[str(stid) for stid, valid in zip(stids, mask_stids_valid)
|
104
|
+
if not valid])))
|
105
|
+
|
106
|
+
@staticmethod
|
107
|
+
def _check_dir(dir):
|
108
|
+
"""Checks if a directors is valid and empty.
|
109
|
+
|
110
|
+
If not existing the directory is created.
|
111
|
+
|
112
|
+
Parameters
|
113
|
+
----------
|
114
|
+
dir : pathlib object or zipfile.ZipFile
|
115
|
+
The directory to check.
|
116
|
+
|
117
|
+
Raises
|
118
|
+
------
|
119
|
+
ValueError
|
120
|
+
If the directory is not empty.
|
121
|
+
ValueError
|
122
|
+
If the directory is not valid. E.G. it is a file path.
|
123
|
+
"""
|
124
|
+
# check types
|
125
|
+
if isinstance(dir, str):
|
126
|
+
dir = Path(dir)
|
127
|
+
|
128
|
+
# check directory
|
129
|
+
if isinstance(dir, zipfile.ZipFile):
|
130
|
+
return dir
|
131
|
+
elif isinstance(dir, Path):
|
132
|
+
if dir.is_dir():
|
133
|
+
if len(list(dir.iterdir())) > 0:
|
134
|
+
raise ValueError(
|
135
|
+
"The given directory '{dir}' is not empty.".format(
|
136
|
+
dir=str(dir)))
|
137
|
+
elif dir.suffix == "":
|
138
|
+
dir.mkdir()
|
139
|
+
elif dir.suffix == ".zip":
|
140
|
+
if not dir.parent.is_dir():
|
141
|
+
raise ValueError(
|
142
|
+
"The given parent directory '{dir}' of the zipfile is not a directory.".format(
|
143
|
+
dir=dir.parents))
|
144
|
+
else:
|
145
|
+
raise ValueError(
|
146
|
+
"The given directory '{dir}' is not a directory.".format(
|
147
|
+
dir=dir))
|
148
|
+
else:
|
149
|
+
raise ValueError(
|
150
|
+
"The given directory '{dir}' is not a directory or zipfile.".format(
|
151
|
+
dir=dir))
|
152
|
+
|
153
|
+
return dir
|
154
|
+
|
155
|
+
@classmethod
|
156
|
+
def get_meta_explanation(cls, infos="all"):
|
157
|
+
"""Get the explanations of the available meta fields.
|
158
|
+
|
159
|
+
Parameters
|
160
|
+
----------
|
161
|
+
infos : list or string, optional
|
162
|
+
The infos you wish to get an explanation for.
|
163
|
+
If "all" then all the available information get returned.
|
164
|
+
The default is "all"
|
165
|
+
|
166
|
+
Returns
|
167
|
+
-------
|
168
|
+
pd.Series
|
169
|
+
a pandas Series with the information names as index and the explanation as values.
|
170
|
+
"""
|
171
|
+
return cls._GroupStation.get_meta_explanation(infos=infos)
|
172
|
+
|
173
|
+
def get_meta(self, paras="all", stids="all", **kwargs):
|
174
|
+
"""Get the meta Dataframe from the Database.
|
175
|
+
|
176
|
+
Parameters
|
177
|
+
----------
|
178
|
+
paras : list or str, optional
|
179
|
+
The parameters for which to get the information.
|
180
|
+
If "all" then all the available parameters are requested.
|
181
|
+
The default is "all".
|
182
|
+
stids: string or list of int, optional
|
183
|
+
The Stations to return the meta information for.
|
184
|
+
Can either be "all", for all possible stations
|
185
|
+
or a list with the Station IDs.
|
186
|
+
The default is "all".
|
187
|
+
**kwargs: dict, optional
|
188
|
+
The keyword arguments are passed to the station.GroupStation().get_meta method.
|
189
|
+
From there it is passed to the single station get_meta method.
|
190
|
+
Can be e.g. "infos"
|
191
|
+
|
192
|
+
Returns
|
193
|
+
-------
|
194
|
+
dict of pandas.DataFrame or geopandas.GeoDataFrame
|
195
|
+
or pandas.DataFrame or geopandas.GeoDataFrame
|
196
|
+
The meta DataFrame.
|
197
|
+
If several parameters are asked for, then a dict with an entry per parameter is returned.
|
198
|
+
|
199
|
+
Raises
|
200
|
+
------
|
201
|
+
ValueError
|
202
|
+
If the given stids (Station_IDs) are not all valid.
|
203
|
+
ValueError
|
204
|
+
If the given paras are not all valid.
|
205
|
+
"""
|
206
|
+
paras = self._check_paras(paras)
|
207
|
+
stats = self.get_para_stations(paras=paras)
|
208
|
+
|
209
|
+
for stat in stats:
|
210
|
+
meta_para = stat.get_meta(stids=stids, **kwargs)
|
211
|
+
meta_para["parameter"] = stat._para
|
212
|
+
if "meta_all" not in locals():
|
213
|
+
meta_all = meta_para
|
214
|
+
else:
|
215
|
+
meta_all = pd.concat([meta_all, meta_para], axis=0)
|
216
|
+
if isinstance(meta_para, gpd.GeoDataFrame):
|
217
|
+
meta_all = gpd.GeoDataFrame(meta_all, crs=meta_para.crs)
|
218
|
+
|
219
|
+
if len(paras)==1:
|
220
|
+
return meta_all.drop("parameter", axis=1)
|
221
|
+
else:
|
222
|
+
return meta_all.reset_index().set_index(["station_id", "parameter"]).sort_index()
|
223
|
+
|
224
|
+
def get_para_stations(self, paras="all"):
|
225
|
+
"""Get a list with all the multi parameter stations as stations.Station*Parameter*-objects.
|
226
|
+
|
227
|
+
Parameters
|
228
|
+
----------
|
229
|
+
paras : list or str, optional
|
230
|
+
The parameters for which to get the objects.
|
231
|
+
If "all" then all the available parameters are requested.
|
232
|
+
The default is "all".
|
233
|
+
|
234
|
+
Returns
|
235
|
+
-------
|
236
|
+
Station-object
|
237
|
+
returns a list with the corresponding station objects.
|
238
|
+
|
239
|
+
Raises
|
240
|
+
------
|
241
|
+
ValueError
|
242
|
+
If the given stids (Station_IDs) are not all valid.
|
243
|
+
"""
|
244
|
+
paras = self._check_paras(paras)
|
245
|
+
if not hasattr(self, "stations"):
|
246
|
+
self.stations = [self.stationsP(), StationsT(), StationsET()]
|
247
|
+
return [stats for stats in self.stations if stats._para in paras]
|
248
|
+
|
249
|
+
def get_group_stations(self, stids="all", **kwargs):
|
250
|
+
"""Get a list with all the stations as station.GroupStation-objects.
|
251
|
+
|
252
|
+
Parameters
|
253
|
+
----------
|
254
|
+
stids: string or list of int, optional
|
255
|
+
The Stations to return.
|
256
|
+
Can either be "all", for all possible stations
|
257
|
+
or a list with the Station IDs.
|
258
|
+
The default is "all".
|
259
|
+
**kwargs: optional
|
260
|
+
The keyword arguments are handed to the creation of the single GroupStation objects.
|
261
|
+
Can be e.g. "error_if_missing".
|
262
|
+
|
263
|
+
Returns
|
264
|
+
-------
|
265
|
+
Station-object
|
266
|
+
returns a list with the corresponding station objects.
|
267
|
+
|
268
|
+
Raises
|
269
|
+
------
|
270
|
+
ValueError
|
271
|
+
If the given stids (Station_IDs) are not all valid.
|
272
|
+
"""
|
273
|
+
if "error_if_missing" not in kwargs:
|
274
|
+
kwargs.update({"error_if_missing": False})
|
275
|
+
kwargs.update({"_skip_meta_check":True})
|
276
|
+
valid_stids = self.get_valid_stids()
|
277
|
+
|
278
|
+
if isinstance(stids, str) and (stids == "all"):
|
279
|
+
stations = [
|
280
|
+
self._GroupStation(stid, **kwargs)
|
281
|
+
for stid in valid_stids]
|
282
|
+
else:
|
283
|
+
stids = list(stids)
|
284
|
+
stations = [self._GroupStation(stid, **kwargs)
|
285
|
+
for stid in valid_stids if stid in stids]
|
286
|
+
stations_ids = [stat.id for stat in stations]
|
287
|
+
if len(stations) != len(stids):
|
288
|
+
raise ValueError(
|
289
|
+
"It was not possible to create a {para_long} Station with the following IDs: {stids}".format(
|
290
|
+
para_long=self._para_long,
|
291
|
+
stids = ", ".join([stid for stid in stids if stid in stations_ids])
|
292
|
+
))
|
293
|
+
|
294
|
+
return stations
|
295
|
+
|
296
|
+
def create_ts(self, dir, period=(None, None), kinds="best",
|
297
|
+
stids="all", agg_to="10 min", r_r0=None, split_date=False,
|
298
|
+
nas_allowed=True, add_na_share=False,
|
299
|
+
add_t_min=False, add_t_max=False,
|
300
|
+
**kwargs):
|
301
|
+
"""Download and create the weather tables as csv files.
|
302
|
+
|
303
|
+
Parameters
|
304
|
+
----------
|
305
|
+
dir : path-like object
|
306
|
+
The directory where to save the tables.
|
307
|
+
If the directory is a ZipFile, then the output will get zipped into this.
|
308
|
+
period : TimestampPeriod like object, optional
|
309
|
+
The period for which to get the timeseries.
|
310
|
+
If (None, None) is entered, then the maximal possible period is computed.
|
311
|
+
The default is (None, None)
|
312
|
+
kinds : str or list of str
|
313
|
+
The data kind to look for filled period.
|
314
|
+
Must be a column in the timeseries DB.
|
315
|
+
Must be one of "raw", "qc", "filled", "adj".
|
316
|
+
If "best" is given, then depending on the parameter of the station the best kind is selected.
|
317
|
+
For Precipitation this is "corr" and for the other this is "filled".
|
318
|
+
For the precipitation also "qn" and "corr" are valid.
|
319
|
+
stids : string or list of int, optional
|
320
|
+
The Stations for which to compute.
|
321
|
+
Can either be "all", for all possible stations
|
322
|
+
or a list with the Station IDs.
|
323
|
+
The default is "all".
|
324
|
+
agg_to : str, optional
|
325
|
+
To what aggregation level should the timeseries get aggregated to.
|
326
|
+
The minimum aggregation for Temperatur and ET is daily and for the precipitation it is 10 minutes.
|
327
|
+
If a smaller aggregation is selected the minimum possible aggregation for the respective parameter is returned.
|
328
|
+
So if 10 minutes is selected, than precipitation is returned in 10 minuets and T and ET as daily.
|
329
|
+
The default is "10 min".
|
330
|
+
r_r0 : int or float or None or pd.Series or list, optional
|
331
|
+
Should the ET timeserie contain a column with R/R0.
|
332
|
+
If None, then no column is added.
|
333
|
+
If int, then a R/R0 column is appended with this number as standard value.
|
334
|
+
If list of int or floats, then the list should have the same length as the ET-timeserie and is appended to the Timeserie.
|
335
|
+
If pd.Series, then the index should be a timestamp index. The series is then joined to the ET timeserie.
|
336
|
+
The default is None.
|
337
|
+
split_date : bool, optional
|
338
|
+
Should the timestamp get splitted into parts, so one column for year, one for month etc.?
|
339
|
+
If False the timestamp is saved in one column as string.
|
340
|
+
nas_allowed : bool, optional
|
341
|
+
Should NAs be allowed?
|
342
|
+
If True, then the maximum possible period is returned, even if there are NAs in the timeserie.
|
343
|
+
If False, then the minimal filled period is returned.
|
344
|
+
The default is True.
|
345
|
+
add_na_share : bool, optional
|
346
|
+
Should one or several columns be added to the Dataframe with the share of NAs in the data.
|
347
|
+
This is especially important, when the stations data get aggregated, because the aggregation doesn't make sense if there are a lot of NAs in the original data.
|
348
|
+
If True, one column per asked kind is added with the respective share of NAs, if the aggregation step is not the smallest.
|
349
|
+
The "kind"_na_share column is in percentage.
|
350
|
+
The default is False.
|
351
|
+
add_t_min : bool, optional
|
352
|
+
Should the minimal temperature value get added?
|
353
|
+
The default is False.
|
354
|
+
add_t_max : bool, optional
|
355
|
+
Should the maximal temperature value get added?
|
356
|
+
The default is False.
|
357
|
+
**kwargs:
|
358
|
+
additional parameters for GroupStation.create_ts
|
359
|
+
"""
|
360
|
+
start_time = datetime.datetime.now()
|
361
|
+
# check directory and stids
|
362
|
+
dir = self._check_dir(dir)
|
363
|
+
stids = self._check_stids(stids)
|
364
|
+
|
365
|
+
# check period
|
366
|
+
period = self._check_period(
|
367
|
+
period=period, stids=stids, kinds=kinds,
|
368
|
+
nas_allowed=nas_allowed)
|
369
|
+
if period.is_empty():
|
370
|
+
raise ValueError("For the given settings, no timeseries could get extracted from the database.\nMaybe try to change the nas_allowed parameter to True, to see, where the problem comes from.")
|
371
|
+
|
372
|
+
# create GroupStation instances
|
373
|
+
gstats = self.get_group_stations(stids=stids)
|
374
|
+
pbar = StationsBase._get_progressbar(
|
375
|
+
max_value=len(gstats),
|
376
|
+
name="create TS")
|
377
|
+
pbar.update(0)
|
378
|
+
|
379
|
+
if dir.suffix == ".zip":
|
380
|
+
with zipfile.ZipFile(
|
381
|
+
dir, "w",
|
382
|
+
compression=zipfile.ZIP_DEFLATED,
|
383
|
+
compresslevel=5) as zf:
|
384
|
+
for stat in gstats:
|
385
|
+
stat.create_ts(
|
386
|
+
dir=zf,
|
387
|
+
period=period,
|
388
|
+
kinds=kinds,
|
389
|
+
agg_to=agg_to,
|
390
|
+
r_r0=r_r0,
|
391
|
+
split_date=split_date,
|
392
|
+
nas_allowed=nas_allowed,
|
393
|
+
add_na_share=add_na_share,
|
394
|
+
add_t_min=add_t_min,
|
395
|
+
add_t_max=add_t_max,
|
396
|
+
_skip_period_check=True,
|
397
|
+
**kwargs)
|
398
|
+
pbar.variables["last_station"] = stat.id
|
399
|
+
pbar.update(pbar.value + 1)
|
400
|
+
else:
|
401
|
+
for stat in gstats:
|
402
|
+
stat.create_ts(
|
403
|
+
dir=dir.joinpath(str(stat.id)),
|
404
|
+
period=period,
|
405
|
+
kinds=kinds,
|
406
|
+
agg_to=agg_to,
|
407
|
+
r_r0=r_r0,
|
408
|
+
split_date=split_date,
|
409
|
+
nas_allowed=nas_allowed,
|
410
|
+
add_na_share=add_na_share,
|
411
|
+
add_t_min=add_t_min,
|
412
|
+
add_t_max=add_t_max,
|
413
|
+
**kwargs)
|
414
|
+
pbar.variables["last_station"] = stat.id
|
415
|
+
pbar.update(pbar.value + 1)
|
416
|
+
|
417
|
+
# get size of output file
|
418
|
+
if dir.suffix == ".zip":
|
419
|
+
out_size = dir.stat().st_size
|
420
|
+
else:
|
421
|
+
out_size = sum(
|
422
|
+
f.stat().st_size for f in dir.glob('**/*') if f.is_file())
|
423
|
+
|
424
|
+
# save needed time to db
|
425
|
+
if db_engine.is_superuser:
|
426
|
+
sql_save_time = """
|
427
|
+
INSERT INTO needed_download_time(timestamp, quantity, aggregate, timespan, zip, pc, duration, output_size)
|
428
|
+
VALUES (now(), '{quantity}', '{agg_to}', '{timespan}', '{zip}', '{pc}', '{duration}', '{out_size}');
|
429
|
+
""".format(
|
430
|
+
quantity=len(stids),
|
431
|
+
agg_to=agg_to,
|
432
|
+
timespan=str(period.get_interval()),
|
433
|
+
duration=str(datetime.datetime.now() - start_time),
|
434
|
+
zip="true" if dir.suffix ==".zip" else "false",
|
435
|
+
pc=socket.gethostname(),
|
436
|
+
out_size=out_size)
|
437
|
+
with db_engine.connect() as con:
|
438
|
+
con.execute(sqltxt(sql_save_time))
|
439
|
+
con.commit()
|
440
|
+
|
441
|
+
# create log message
|
442
|
+
log.debug(
|
443
|
+
"The timeseries tables for {quantity} stations got created in {dir}".format(
|
444
|
+
quantity=len(stids), dir=dir))
|
445
|
+
|
446
|
+
def create_roger_ts(self, dir, period=(None, None), stids="all",
|
447
|
+
kind="best", r_r0=1,
|
448
|
+
add_t_min=False, add_t_max=False,
|
449
|
+
do_toolbox_format=False, **kwargs):
|
450
|
+
"""Create the timeserie files for roger as csv.
|
451
|
+
|
452
|
+
This is only a wrapper function for create_ts with some standard settings.
|
453
|
+
|
454
|
+
Parameters
|
455
|
+
----------
|
456
|
+
dir : pathlib like object or zipfile.ZipFile
|
457
|
+
The directory or Zipfile to store the timeseries in.
|
458
|
+
If a zipfile is given a folder with the stations ID is added to the filepath.
|
459
|
+
period : TimestampPeriod like object, optional
|
460
|
+
The period for which to get the timeseries.
|
461
|
+
If (None, None) is entered, then the maximal possible period is computed.
|
462
|
+
The default is (None, None)
|
463
|
+
stids: string or list of int, optional
|
464
|
+
The Stations for which to compute.
|
465
|
+
Can either be "all", for all possible stations
|
466
|
+
or a list with the Station IDs.
|
467
|
+
The default is "all".
|
468
|
+
kind : str
|
469
|
+
The data kind to look for filled period.
|
470
|
+
Must be a column in the timeseries DB.
|
471
|
+
Must be one of "raw", "qc", "filled", "adj".
|
472
|
+
If "best" is given, then depending on the parameter of the station the best kind is selected.
|
473
|
+
For Precipitation this is "corr" and for the other this is "filled".
|
474
|
+
For the precipitation also "qn" and "corr" are valid.
|
475
|
+
r_r0 : int or float or None or pd.Series or list, optional
|
476
|
+
Should the ET timeserie contain a column with R_R0.
|
477
|
+
If None, then no column is added.
|
478
|
+
If int, then a R/R0 column is appended with this number as standard value.
|
479
|
+
If list of int or floats, then the list should have the same length as the ET-timeserie and is appended to the Timeserie.
|
480
|
+
If pd.Series, then the index should be a timestamp index. The series is then joined to the ET timeserie.
|
481
|
+
The default is 1.
|
482
|
+
add_t_min : bool, optional
|
483
|
+
Should the minimal temperature value get added?
|
484
|
+
The default is False.
|
485
|
+
add_t_max : bool, optional
|
486
|
+
Should the maximal temperature value get added?
|
487
|
+
The default is False.
|
488
|
+
do_toolbox_format : bool, optional
|
489
|
+
Should the timeseries be saved in the RoGeR toolbox format? (have a look at the RoGeR examples in https://github.com/Hydrology-IFH/roger)
|
490
|
+
The default is False.
|
491
|
+
**kwargs:
|
492
|
+
additional parameters for GroupStation.create_ts
|
493
|
+
|
494
|
+
Raises
|
495
|
+
------
|
496
|
+
Warning
|
497
|
+
If there are NAs in the timeseries or the period got changed.
|
498
|
+
"""
|
499
|
+
if do_toolbox_format:
|
500
|
+
return self.create_ts(
|
501
|
+
dir=dir, period=period, kinds=kind,
|
502
|
+
agg_to="10 min", r_r0=r_r0, stids=stids,
|
503
|
+
split_date=True, nas_allowed=False,
|
504
|
+
add_t_min=add_t_min, add_t_max=add_t_max,
|
505
|
+
file_names={"N":"PREC.txt", "T":"TA.txt", "ET":"PET.txt"},
|
506
|
+
col_names={"N":"PREC", "ET":"PET",
|
507
|
+
"T":"TA", "T_min":"TA_min", "T_max":"TA_max",
|
508
|
+
"Jahr":"YYYY", "Monat":"MM", "Tag":"DD",
|
509
|
+
"Stunde":"hh", "Minute":"mm"},
|
510
|
+
add_meta=False,
|
511
|
+
keep_date_parts=True,
|
512
|
+
**kwargs)
|
513
|
+
else:
|
514
|
+
return self.create_ts(
|
515
|
+
dir=dir, period=period, kinds=kind,
|
516
|
+
agg_to="10 min", r_r0=r_r0, stids=stids,
|
517
|
+
split_date=True, nas_allowed=False,
|
518
|
+
add_t_min=add_t_min, add_t_max=add_t_max,
|
519
|
+
**kwargs)
|