pastastore 1.7.0__py3-none-any.whl → 1.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pastastore/__init__.py +1 -1
- pastastore/extensions/__init__.py +14 -0
- pastastore/extensions/accessor.py +15 -0
- pastastore/extensions/hpd.py +593 -0
- pastastore/version.py +1 -1
- {pastastore-1.7.0.dist-info → pastastore-1.7.1.dist-info}/METADATA +1 -1
- pastastore-1.7.1.dist-info/RECORD +18 -0
- pastastore-1.7.0.dist-info/RECORD +0 -15
- {pastastore-1.7.0.dist-info → pastastore-1.7.1.dist-info}/LICENSE +0 -0
- {pastastore-1.7.0.dist-info → pastastore-1.7.1.dist-info}/WHEEL +0 -0
- {pastastore-1.7.0.dist-info → pastastore-1.7.1.dist-info}/top_level.txt +0 -0
pastastore/__init__.py
CHANGED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
# ruff: noqa: D104 F401
|
|
2
|
+
from pastastore.extensions.accessor import (
|
|
3
|
+
register_pastastore_accessor as register_pastastore_accessor,
|
|
4
|
+
)
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def activate_hydropandas_extension():
|
|
8
|
+
"""Register Plotly extension for pastas.Model class for interactive plotting."""
|
|
9
|
+
from pastastore.extensions.hpd import HydroPandasExtension as _
|
|
10
|
+
|
|
11
|
+
print(
|
|
12
|
+
"Registered HydroPandas extension in PastaStore class, "
|
|
13
|
+
"e.g. `pstore.hpd.download_bro_gmw()`."
|
|
14
|
+
)
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# ruff: noqa: D100
|
|
2
|
+
from pastas.extensions.accessor import _register_accessor
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def register_pastastore_accessor(name: str):
|
|
6
|
+
"""Register an extension in the PastaStore class.
|
|
7
|
+
|
|
8
|
+
Parameters
|
|
9
|
+
----------
|
|
10
|
+
name : str
|
|
11
|
+
name of the extension to register
|
|
12
|
+
"""
|
|
13
|
+
from pastastore.store import PastaStore
|
|
14
|
+
|
|
15
|
+
return _register_accessor(name, PastaStore)
|
|
@@ -0,0 +1,593 @@
|
|
|
1
|
+
"""HydroPandas extension for PastaStore.
|
|
2
|
+
|
|
3
|
+
Features:
|
|
4
|
+
|
|
5
|
+
- Add `hpd.Obs` and `hpd.ObsCollection` to PastaStore.
|
|
6
|
+
- Download and store meteorological data from KNMI or groundwater observations from BRO.
|
|
7
|
+
- Update currently stored (KNMI or BRO) time series from last observation to tmax.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import logging
|
|
11
|
+
from typing import List, Optional, Union
|
|
12
|
+
|
|
13
|
+
import hydropandas as hpd
|
|
14
|
+
import numpy as np
|
|
15
|
+
from hydropandas.io.knmi import _check_latest_measurement_date_de_bilt, get_stations
|
|
16
|
+
from pandas import DataFrame, Series, Timedelta, Timestamp
|
|
17
|
+
from pastas.timeseries_utils import timestep_weighted_resample
|
|
18
|
+
from tqdm.auto import tqdm
|
|
19
|
+
|
|
20
|
+
from pastastore.extensions.accessor import register_pastastore_accessor
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger("hydropandas_extension")
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
TimeType = Optional[Union[str, Timestamp]]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@register_pastastore_accessor("hpd")
|
|
29
|
+
class HydroPandasExtension:
|
|
30
|
+
"""HydroPandas extension for PastaStore.
|
|
31
|
+
|
|
32
|
+
Parameters
|
|
33
|
+
----------
|
|
34
|
+
store: pastastore.store.PastaStore
|
|
35
|
+
PastaStore object to extend with HydroPandas functionality
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def __init__(self, store):
|
|
39
|
+
"""Initialize HydroPandasExtenstion.
|
|
40
|
+
|
|
41
|
+
Parameters
|
|
42
|
+
----------
|
|
43
|
+
store : pasta.store.PastaStore
|
|
44
|
+
PastaStore object to extend with HydroPandas functionality
|
|
45
|
+
"""
|
|
46
|
+
self._store = store
|
|
47
|
+
|
|
48
|
+
def add_obscollection(
|
|
49
|
+
self,
|
|
50
|
+
libname: str,
|
|
51
|
+
oc: hpd.ObsCollection,
|
|
52
|
+
kind: Optional[str] = None,
|
|
53
|
+
data_column: Optional[str] = None,
|
|
54
|
+
unit_multiplier: float = 1.0,
|
|
55
|
+
update: bool = False,
|
|
56
|
+
normalize_datetime_index: bool = False,
|
|
57
|
+
):
|
|
58
|
+
"""Add an ObsCollection to the PastaStore.
|
|
59
|
+
|
|
60
|
+
Parameters
|
|
61
|
+
----------
|
|
62
|
+
libname : str
|
|
63
|
+
Name of the library to add the ObsCollection to ["oseries", "stresses"].
|
|
64
|
+
oc : hpd.ObsCollection
|
|
65
|
+
ObsCollection to add to the store.
|
|
66
|
+
kind : str, optional
|
|
67
|
+
kind identifier for observations, by default None. Required for adding
|
|
68
|
+
stresses.
|
|
69
|
+
data_column : str, optional
|
|
70
|
+
name of column containing observation values, by default None.
|
|
71
|
+
unit_multiplier : float, optional
|
|
72
|
+
multiply unit by this value before saving it in the store
|
|
73
|
+
update : bool, optional
|
|
74
|
+
if True, update currently stored time series with new data
|
|
75
|
+
normalize_datetime_index : bool, optional
|
|
76
|
+
if True, normalize the datetime so stress value at midnight represents
|
|
77
|
+
the daily total, by default True.
|
|
78
|
+
"""
|
|
79
|
+
for name, row in oc.iterrows():
|
|
80
|
+
obs = row["obs"]
|
|
81
|
+
# metadata = row.drop("obs").to_dict()
|
|
82
|
+
self.add_observation(
|
|
83
|
+
libname,
|
|
84
|
+
obs,
|
|
85
|
+
name=name,
|
|
86
|
+
kind=kind,
|
|
87
|
+
data_column=data_column,
|
|
88
|
+
unit_multiplier=unit_multiplier,
|
|
89
|
+
update=update,
|
|
90
|
+
normalize_datetime_index=normalize_datetime_index,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
def add_observation(
|
|
94
|
+
self,
|
|
95
|
+
libname: str,
|
|
96
|
+
obs: hpd.Obs,
|
|
97
|
+
name: Optional[str] = None,
|
|
98
|
+
kind: Optional[str] = None,
|
|
99
|
+
data_column: Optional[str] = None,
|
|
100
|
+
unit_multiplier: float = 1.0,
|
|
101
|
+
update: bool = False,
|
|
102
|
+
normalize_datetime_index: bool = False,
|
|
103
|
+
):
|
|
104
|
+
"""Add an hydropandas observation series to the PastaStore.
|
|
105
|
+
|
|
106
|
+
Parameters
|
|
107
|
+
----------
|
|
108
|
+
libname : str
|
|
109
|
+
Name of the library to add the observation to ["oseries", "stresses"].
|
|
110
|
+
obs : hpd.Obs
|
|
111
|
+
hydroPandas observation series to add to the store.
|
|
112
|
+
name : str, optional
|
|
113
|
+
Name of the observation, by default None. If None, the name of the
|
|
114
|
+
observation is used.
|
|
115
|
+
kind : str, optional
|
|
116
|
+
kind identifier for observations, by default None. Required for adding
|
|
117
|
+
stresses.
|
|
118
|
+
data_column : str, optional
|
|
119
|
+
name of column containing observation values, by default None.
|
|
120
|
+
unit_multiplier : float, optional
|
|
121
|
+
multiply unit by this value before saving it in the store
|
|
122
|
+
update : bool, optional
|
|
123
|
+
if True, update currently stored time series with new data
|
|
124
|
+
normalize_datetime_index : bool, optional
|
|
125
|
+
if True, normalize the datetime so stress value at midnight represents
|
|
126
|
+
the daily total, by default True.
|
|
127
|
+
"""
|
|
128
|
+
# if data_column is not None, use data_column
|
|
129
|
+
if data_column is not None:
|
|
130
|
+
if not obs.empty:
|
|
131
|
+
o = obs[[data_column]]
|
|
132
|
+
else:
|
|
133
|
+
o = Series()
|
|
134
|
+
elif isinstance(obs, Series):
|
|
135
|
+
o = obs
|
|
136
|
+
# else raise error
|
|
137
|
+
elif isinstance(obs, DataFrame) and (obs.columns.size > 1):
|
|
138
|
+
raise ValueError("No data_column specified and obs has multiple columns.")
|
|
139
|
+
else:
|
|
140
|
+
raise TypeError("obs must be a Series or DataFrame with a single column.")
|
|
141
|
+
|
|
142
|
+
# break if obs is empty
|
|
143
|
+
if o.empty:
|
|
144
|
+
logger.info("Observation '%s' is empty, not adding to store.", name)
|
|
145
|
+
return
|
|
146
|
+
|
|
147
|
+
if normalize_datetime_index and o.index.size > 1:
|
|
148
|
+
o = self._normalize_datetime_index(o).iloc[1:] # remove first nan
|
|
149
|
+
elif normalize_datetime_index and o.index.size <= 1:
|
|
150
|
+
raise ValueError(
|
|
151
|
+
"Must have minimum of 2 observations for timestep_weighted_resample."
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
# gather metadata from obs object
|
|
155
|
+
metadata = {key: getattr(obs, key) for key in obs._metadata}
|
|
156
|
+
|
|
157
|
+
# convert np dtypes to builtins
|
|
158
|
+
for k, v in metadata.items():
|
|
159
|
+
if isinstance(v, np.integer):
|
|
160
|
+
metadata[k] = int(v)
|
|
161
|
+
elif isinstance(v, np.floating):
|
|
162
|
+
metadata[k] = float(v)
|
|
163
|
+
|
|
164
|
+
metadata.pop("name", None)
|
|
165
|
+
metadata.pop("meta", None)
|
|
166
|
+
unit = metadata.get("unit", None)
|
|
167
|
+
if unit == "m" and unit_multiplier == 1e3:
|
|
168
|
+
metadata["unit"] = "mm"
|
|
169
|
+
elif unit_multiplier != 1.0:
|
|
170
|
+
metadata["unit"] = f"{unit_multiplier:e}*{unit}"
|
|
171
|
+
|
|
172
|
+
source = metadata.get("source", "")
|
|
173
|
+
if len(source) > 0:
|
|
174
|
+
source = f"{source} "
|
|
175
|
+
|
|
176
|
+
if update:
|
|
177
|
+
action_msg = "updated in"
|
|
178
|
+
else:
|
|
179
|
+
action_msg = "added to"
|
|
180
|
+
|
|
181
|
+
if libname == "oseries":
|
|
182
|
+
self._store.upsert_oseries(o.squeeze(axis=1), name, metadata=metadata)
|
|
183
|
+
logger.info(
|
|
184
|
+
"%sobservation '%s' %s oseries library.", source, name, action_msg
|
|
185
|
+
)
|
|
186
|
+
elif libname == "stresses":
|
|
187
|
+
if kind is None:
|
|
188
|
+
raise ValueError("`kind` must be specified for stresses!")
|
|
189
|
+
self._store.upsert_stress(
|
|
190
|
+
(o * unit_multiplier).squeeze(axis=1), name, kind, metadata=metadata
|
|
191
|
+
)
|
|
192
|
+
logger.info(
|
|
193
|
+
"%sstress '%s' (kind='%s') %s stresses library.",
|
|
194
|
+
source,
|
|
195
|
+
name,
|
|
196
|
+
kind,
|
|
197
|
+
action_msg,
|
|
198
|
+
)
|
|
199
|
+
else:
|
|
200
|
+
raise ValueError("libname must be 'oseries' or 'stresses'.")
|
|
201
|
+
|
|
202
|
+
def download_knmi_precipitation(
|
|
203
|
+
self,
|
|
204
|
+
stns: Optional[list[int]] = None,
|
|
205
|
+
meteo_var: str = "RD",
|
|
206
|
+
tmin: TimeType = None,
|
|
207
|
+
tmax: TimeType = None,
|
|
208
|
+
unit_multiplier: float = 1e3,
|
|
209
|
+
fill_missing_obs: bool = True,
|
|
210
|
+
normalize_datetime_index: bool = True,
|
|
211
|
+
**kwargs,
|
|
212
|
+
):
|
|
213
|
+
"""Download precipitation data from KNMI and store in PastaStore.
|
|
214
|
+
|
|
215
|
+
Parameters
|
|
216
|
+
----------
|
|
217
|
+
stns : list of int/str, optional
|
|
218
|
+
list of station numbers to download data for, by default None
|
|
219
|
+
meteo_var : str, optional
|
|
220
|
+
variable to download, by default "RD", valid options are ["RD", "RH"].
|
|
221
|
+
tmin : TimeType, optional
|
|
222
|
+
start time, by default None
|
|
223
|
+
tmax : TimeType, optional
|
|
224
|
+
end time, by default None
|
|
225
|
+
unit_multiplier : float, optional
|
|
226
|
+
multiply unit by this value before saving it in the store,
|
|
227
|
+
by default 1e3 to convert m to mm
|
|
228
|
+
"""
|
|
229
|
+
self.download_knmi_meteo(
|
|
230
|
+
meteo_var=meteo_var,
|
|
231
|
+
kind="prec",
|
|
232
|
+
stns=stns,
|
|
233
|
+
tmin=tmin,
|
|
234
|
+
tmax=tmax,
|
|
235
|
+
unit_multiplier=unit_multiplier,
|
|
236
|
+
fill_missing_obs=fill_missing_obs,
|
|
237
|
+
normalize_datetime_index=normalize_datetime_index,
|
|
238
|
+
**kwargs,
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
def download_knmi_evaporation(
|
|
242
|
+
self,
|
|
243
|
+
stns: Optional[list[int]] = None,
|
|
244
|
+
meteo_var: str = "EV24",
|
|
245
|
+
tmin: TimeType = None,
|
|
246
|
+
tmax: TimeType = None,
|
|
247
|
+
unit_multiplier: float = 1e3,
|
|
248
|
+
fill_missing_obs: bool = True,
|
|
249
|
+
normalize_datetime_index: bool = True,
|
|
250
|
+
**kwargs,
|
|
251
|
+
):
|
|
252
|
+
"""Download evaporation data from KNMI and store in PastaStore.
|
|
253
|
+
|
|
254
|
+
Parameters
|
|
255
|
+
----------
|
|
256
|
+
stns : list of int/str, optional
|
|
257
|
+
list of station numbers to download data for, by default None
|
|
258
|
+
meteo_var : str, optional
|
|
259
|
+
variable to download, by default "EV24"
|
|
260
|
+
tmin : TimeType, optional
|
|
261
|
+
start time, by default None
|
|
262
|
+
tmax : TimeType, optional
|
|
263
|
+
end time, by default None
|
|
264
|
+
unit_multiplier : float, optional
|
|
265
|
+
multiply unit by this value before saving it in the store,
|
|
266
|
+
by default 1e3 to convert m to mm
|
|
267
|
+
fill_missing_obs : bool, optional
|
|
268
|
+
if True, fill missing observations by getting observations from nearest
|
|
269
|
+
station with data.
|
|
270
|
+
normalize_datetime_index : bool, optional
|
|
271
|
+
if True, normalize the datetime so stress value at midnight represents
|
|
272
|
+
the daily total, by default True.
|
|
273
|
+
"""
|
|
274
|
+
self.download_knmi_meteo(
|
|
275
|
+
meteo_var=meteo_var,
|
|
276
|
+
kind="evap",
|
|
277
|
+
stns=stns,
|
|
278
|
+
tmin=tmin,
|
|
279
|
+
tmax=tmax,
|
|
280
|
+
unit_multiplier=unit_multiplier,
|
|
281
|
+
fill_missing_obs=fill_missing_obs,
|
|
282
|
+
normalize_datetime_index=normalize_datetime_index,
|
|
283
|
+
**kwargs,
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
def download_knmi_meteo(
|
|
287
|
+
self,
|
|
288
|
+
meteo_var: str,
|
|
289
|
+
kind: str,
|
|
290
|
+
stns: Optional[list[int]] = None,
|
|
291
|
+
tmin: TimeType = None,
|
|
292
|
+
tmax: TimeType = None,
|
|
293
|
+
unit_multiplier: float = 1.0,
|
|
294
|
+
normalize_datetime_index: bool = True,
|
|
295
|
+
fill_missing_obs: bool = True,
|
|
296
|
+
**kwargs,
|
|
297
|
+
):
|
|
298
|
+
"""Download meteorological data from KNMI and store in PastaStore.
|
|
299
|
+
|
|
300
|
+
Parameters
|
|
301
|
+
----------
|
|
302
|
+
meteo_var : str, optional
|
|
303
|
+
variable to download, by default "RH", valid options are
|
|
304
|
+
e.g. ["RD", "RH", "EV24", "T", "Q"].
|
|
305
|
+
kind : str
|
|
306
|
+
kind identifier for observations, usually "prec" or "evap".
|
|
307
|
+
stns : list of int/str, optional
|
|
308
|
+
list of station numbers to download data for, by default None
|
|
309
|
+
tmin : TimeType, optional
|
|
310
|
+
start time, by default None
|
|
311
|
+
tmax : TimeType, optional
|
|
312
|
+
end time, by default None
|
|
313
|
+
unit_multiplier : float, optional
|
|
314
|
+
multiply unit by this value before saving it in the store,
|
|
315
|
+
by default 1.0 (no conversion)
|
|
316
|
+
fill_missing_obs : bool, optional
|
|
317
|
+
if True, fill missing observations by getting observations from nearest
|
|
318
|
+
station with data.
|
|
319
|
+
normalize_datetime_index : bool, optional
|
|
320
|
+
if True, normalize the datetime so stress value at midnight represents
|
|
321
|
+
the daily total, by default True.
|
|
322
|
+
"""
|
|
323
|
+
# get tmin/tmax if not specified
|
|
324
|
+
tmintmax = self._store.get_tmin_tmax("oseries")
|
|
325
|
+
if tmin is None:
|
|
326
|
+
tmin = tmintmax.loc[:, "tmin"].min() - Timedelta(days=10 * 365)
|
|
327
|
+
if tmax is None:
|
|
328
|
+
tmax = tmintmax.loc[:, "tmax"].max()
|
|
329
|
+
|
|
330
|
+
if stns is None:
|
|
331
|
+
locations = self._store.oseries.loc[:, ["x", "y"]]
|
|
332
|
+
else:
|
|
333
|
+
locations = None
|
|
334
|
+
|
|
335
|
+
# download data
|
|
336
|
+
knmi = hpd.read_knmi(
|
|
337
|
+
locations=locations,
|
|
338
|
+
stns=stns,
|
|
339
|
+
meteo_vars=[meteo_var],
|
|
340
|
+
starts=tmin,
|
|
341
|
+
ends=tmax,
|
|
342
|
+
fill_missing_obs=fill_missing_obs,
|
|
343
|
+
**kwargs,
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
# add to store
|
|
347
|
+
self.add_obscollection(
|
|
348
|
+
libname="stresses",
|
|
349
|
+
oc=knmi,
|
|
350
|
+
kind=kind,
|
|
351
|
+
data_column=meteo_var,
|
|
352
|
+
unit_multiplier=unit_multiplier,
|
|
353
|
+
update=False,
|
|
354
|
+
normalize_datetime_index=normalize_datetime_index,
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
def update_knmi_meteo(
|
|
358
|
+
self,
|
|
359
|
+
names: Optional[List[str]] = None,
|
|
360
|
+
tmin: TimeType = None,
|
|
361
|
+
tmax: TimeType = None,
|
|
362
|
+
fill_missing_obs: bool = True,
|
|
363
|
+
normalize_datetime_index: bool = True,
|
|
364
|
+
raise_on_error: bool = False,
|
|
365
|
+
**kwargs,
|
|
366
|
+
):
|
|
367
|
+
"""Update meteorological data from KNMI in PastaStore.
|
|
368
|
+
|
|
369
|
+
Parameters
|
|
370
|
+
----------
|
|
371
|
+
names : list of str, optional
|
|
372
|
+
list of names of observations to update, by default None
|
|
373
|
+
tmin : TimeType, optional
|
|
374
|
+
start time, by default None, which uses current last observation timestamp
|
|
375
|
+
as tmin
|
|
376
|
+
tmax : TimeType, optional
|
|
377
|
+
end time, by default None, which defaults to today
|
|
378
|
+
fill_missing_obs : bool, optional
|
|
379
|
+
if True, fill missing observations by getting observations from nearest
|
|
380
|
+
station with data.
|
|
381
|
+
normalize_datetime_index : bool, optional
|
|
382
|
+
if True, normalize the datetime so stress value at midnight represents
|
|
383
|
+
the daily total, by default True.
|
|
384
|
+
raise_on_error : bool, optional
|
|
385
|
+
if True, raise error if an error occurs, by default False
|
|
386
|
+
**kwargs : dict, optional
|
|
387
|
+
Additional keyword arguments to pass to `hpd.read_knmi()`
|
|
388
|
+
"""
|
|
389
|
+
if names is None:
|
|
390
|
+
names = self._store.stresses.loc[
|
|
391
|
+
self._store.stresses["source"] == "KNMI"
|
|
392
|
+
].index.tolist()
|
|
393
|
+
|
|
394
|
+
tmintmax = self._store.get_tmin_tmax("stresses", names=names)
|
|
395
|
+
|
|
396
|
+
if tmax is not None:
|
|
397
|
+
if tmintmax["tmax"].min() >= Timestamp(tmax):
|
|
398
|
+
logger.info(f"All KNMI stresses are up to date till {tmax}.")
|
|
399
|
+
return
|
|
400
|
+
|
|
401
|
+
try:
|
|
402
|
+
maxtmax_rd = _check_latest_measurement_date_de_bilt("RD")
|
|
403
|
+
maxtmax_ev24 = _check_latest_measurement_date_de_bilt("EV24")
|
|
404
|
+
except Exception as e:
|
|
405
|
+
# otherwise use maxtmax 28 days (4 weeks) prior to today
|
|
406
|
+
logger.warning(
|
|
407
|
+
"Could not check latest measurement date in De Bilt: %s" % str(e)
|
|
408
|
+
)
|
|
409
|
+
maxtmax_rd = maxtmax_ev24 = Timestamp.today() - Timedelta(days=28)
|
|
410
|
+
logger.info(
|
|
411
|
+
"Using 28 days (4 weeks) prior to today as maxtmax: %s."
|
|
412
|
+
% str(maxtmax_rd)
|
|
413
|
+
)
|
|
414
|
+
|
|
415
|
+
for name in tqdm(names, desc="Updating KNMI meteo stresses"):
|
|
416
|
+
meteo_var = self._store.stresses.loc[name, "meteo_var"]
|
|
417
|
+
if meteo_var == "RD":
|
|
418
|
+
maxtmax = maxtmax_rd
|
|
419
|
+
elif meteo_var == "EV24":
|
|
420
|
+
maxtmax = maxtmax_ev24
|
|
421
|
+
else:
|
|
422
|
+
maxtmax = maxtmax_rd
|
|
423
|
+
|
|
424
|
+
# 1 days extra to ensure computation of daily totals using
|
|
425
|
+
# timestep_weighted_resample
|
|
426
|
+
if tmin is None:
|
|
427
|
+
itmin = tmintmax.loc[name, "tmax"] - Timedelta(days=1)
|
|
428
|
+
else:
|
|
429
|
+
itmin = tmin - Timedelta(days=1)
|
|
430
|
+
|
|
431
|
+
# ensure 2 observations at least
|
|
432
|
+
if itmin >= (maxtmax - Timedelta(days=1)):
|
|
433
|
+
logger.debug("KNMI %s is already up to date." % name)
|
|
434
|
+
continue
|
|
435
|
+
|
|
436
|
+
if tmax is None:
|
|
437
|
+
itmax = maxtmax
|
|
438
|
+
else:
|
|
439
|
+
itmax = Timestamp(tmax)
|
|
440
|
+
|
|
441
|
+
# fix for duplicate station entry in metadata:
|
|
442
|
+
stress_station = (
|
|
443
|
+
self._store.stresses.at[name, "station"]
|
|
444
|
+
if "station" in self._store.stresses.columns
|
|
445
|
+
else None
|
|
446
|
+
)
|
|
447
|
+
if stress_station is not None and not isinstance(
|
|
448
|
+
stress_station, (int, np.integer)
|
|
449
|
+
):
|
|
450
|
+
stress_station = stress_station.squeeze().unique().item()
|
|
451
|
+
|
|
452
|
+
unit = self._store.stresses.loc[name, "unit"]
|
|
453
|
+
kind = self._store.stresses.loc[name, "kind"]
|
|
454
|
+
if stress_station is not None:
|
|
455
|
+
stn = stress_station
|
|
456
|
+
else:
|
|
457
|
+
stns = get_stations(meteo_var)
|
|
458
|
+
stn_name = name.split("_")[-1].lower()
|
|
459
|
+
mask = stns["name"].str.lower().str.replace(" ", "-") == stn_name
|
|
460
|
+
if not mask.any():
|
|
461
|
+
logger.warning(
|
|
462
|
+
"Station '%s' not found in list of KNMI %s stations."
|
|
463
|
+
% (stn_name, meteo_var)
|
|
464
|
+
)
|
|
465
|
+
continue
|
|
466
|
+
stn = stns.loc[mask].index[0]
|
|
467
|
+
|
|
468
|
+
if unit == "mm":
|
|
469
|
+
unit_multiplier = 1e3
|
|
470
|
+
else:
|
|
471
|
+
unit_multiplier = 1.0
|
|
472
|
+
|
|
473
|
+
logger.debug("Updating KNMI %s from %s to %s" % (name, itmin, itmax))
|
|
474
|
+
knmi = hpd.read_knmi(
|
|
475
|
+
stns=[stn],
|
|
476
|
+
meteo_vars=[meteo_var],
|
|
477
|
+
starts=itmin,
|
|
478
|
+
ends=itmax,
|
|
479
|
+
fill_missing_obs=fill_missing_obs,
|
|
480
|
+
**kwargs,
|
|
481
|
+
)
|
|
482
|
+
obs = knmi["obs"].iloc[0]
|
|
483
|
+
|
|
484
|
+
try:
|
|
485
|
+
self.add_observation(
|
|
486
|
+
"stresses",
|
|
487
|
+
obs,
|
|
488
|
+
name=name,
|
|
489
|
+
kind=kind,
|
|
490
|
+
data_column=meteo_var,
|
|
491
|
+
unit_multiplier=unit_multiplier,
|
|
492
|
+
update=True,
|
|
493
|
+
normalize_datetime_index=normalize_datetime_index,
|
|
494
|
+
)
|
|
495
|
+
except ValueError as e:
|
|
496
|
+
logger.error("Error updating KNMI %s: %s" % (name, str(e)))
|
|
497
|
+
if raise_on_error:
|
|
498
|
+
raise e
|
|
499
|
+
|
|
500
|
+
@staticmethod
|
|
501
|
+
def _normalize_datetime_index(obs):
|
|
502
|
+
"""Normalize observation datetime index (i.e. set observation time to midnight).
|
|
503
|
+
|
|
504
|
+
Parameters
|
|
505
|
+
----------
|
|
506
|
+
obs : pandas.Series
|
|
507
|
+
observation series to normalize
|
|
508
|
+
|
|
509
|
+
Returns
|
|
510
|
+
-------
|
|
511
|
+
hpd.Obs
|
|
512
|
+
observation series with normalized datetime index
|
|
513
|
+
"""
|
|
514
|
+
if isinstance(obs, hpd.Obs):
|
|
515
|
+
metadata = {k: getattr(obs, k) for k in obs._metadata}
|
|
516
|
+
else:
|
|
517
|
+
metadata = {}
|
|
518
|
+
return obs.__class__(
|
|
519
|
+
timestep_weighted_resample(
|
|
520
|
+
obs,
|
|
521
|
+
obs.index.normalize(),
|
|
522
|
+
).rename(obs.name),
|
|
523
|
+
**metadata,
|
|
524
|
+
)
|
|
525
|
+
|
|
526
|
+
def download_bro_gmw(
|
|
527
|
+
self,
|
|
528
|
+
extent: Optional[List[float]] = None,
|
|
529
|
+
tmin: TimeType = None,
|
|
530
|
+
tmax: TimeType = None,
|
|
531
|
+
update: bool = False,
|
|
532
|
+
**kwargs,
|
|
533
|
+
):
|
|
534
|
+
"""Download groundwater monitoring well observations from BRO.
|
|
535
|
+
|
|
536
|
+
Parameters
|
|
537
|
+
----------
|
|
538
|
+
extent: tuple, optional
|
|
539
|
+
Extent of the area to download observations from.
|
|
540
|
+
tmin: pandas.Timestamp, optional
|
|
541
|
+
Start date of the observations to download.
|
|
542
|
+
tmax: pandas.Timestamp, optional
|
|
543
|
+
End date of the observations to download.
|
|
544
|
+
**kwargs: dict, optional
|
|
545
|
+
Additional keyword arguments to pass to `hpd.read_bro()`
|
|
546
|
+
"""
|
|
547
|
+
bro = hpd.read_bro(
|
|
548
|
+
extent=extent,
|
|
549
|
+
tmin=tmin,
|
|
550
|
+
tmax=tmax,
|
|
551
|
+
**kwargs,
|
|
552
|
+
)
|
|
553
|
+
self.add_obscollection("oseries", bro, data_column="values", update=update)
|
|
554
|
+
|
|
555
|
+
def update_bro_gmw(
|
|
556
|
+
self,
|
|
557
|
+
names: Optional[List[str]] = None,
|
|
558
|
+
tmin: TimeType = None,
|
|
559
|
+
tmax: TimeType = None,
|
|
560
|
+
**kwargs,
|
|
561
|
+
):
|
|
562
|
+
"""Update groundwater monitoring well observations from BRO.
|
|
563
|
+
|
|
564
|
+
Parameters
|
|
565
|
+
----------
|
|
566
|
+
names : list of str, optional
|
|
567
|
+
list of names of observations to update, by default None which updates all
|
|
568
|
+
stored oseries.
|
|
569
|
+
tmin : TimeType, optional
|
|
570
|
+
start time, by default None, which uses current last observation timestamp
|
|
571
|
+
as tmin
|
|
572
|
+
tmax : TimeType, optional
|
|
573
|
+
end time, by default None, which defaults to today
|
|
574
|
+
**kwargs : dict, optional
|
|
575
|
+
Additional keyword arguments to pass to `hpd.GroundwaterObs.from_bro()`
|
|
576
|
+
"""
|
|
577
|
+
if names is None:
|
|
578
|
+
names = self._store.oseries.index.to_list()
|
|
579
|
+
|
|
580
|
+
tmintmax = self._store.get_tmin_tmax("oseries")
|
|
581
|
+
|
|
582
|
+
for obsnam in tqdm(names, desc="Updating BRO oseries"):
|
|
583
|
+
bro_id, tube_number = obsnam.split("_")
|
|
584
|
+
|
|
585
|
+
if tmin is None:
|
|
586
|
+
_, tmin = tmintmax.loc[obsnam] # tmin is stored tmax
|
|
587
|
+
|
|
588
|
+
obs = hpd.GroundwaterObs.from_bro(
|
|
589
|
+
bro_id, int(tube_number), tmin=tmin, tmax=tmax, **kwargs
|
|
590
|
+
)
|
|
591
|
+
self.add_observation(
|
|
592
|
+
"oseries", obs, name=obsnam, data_column="values", update=True
|
|
593
|
+
)
|
pastastore/version.py
CHANGED
|
@@ -9,7 +9,7 @@ PASTAS_VERSION = parse_version(ps.__version__)
|
|
|
9
9
|
PASTAS_LEQ_022 = PASTAS_VERSION <= parse_version("0.22.0")
|
|
10
10
|
PASTAS_GEQ_150 = PASTAS_VERSION >= parse_version("1.5.0")
|
|
11
11
|
|
|
12
|
-
__version__ = "1.7.
|
|
12
|
+
__version__ = "1.7.1"
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
def show_versions(optional=False) -> None:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pastastore
|
|
3
|
-
Version: 1.7.
|
|
3
|
+
Version: 1.7.1
|
|
4
4
|
Summary: Tools for managing Pastas time series models.
|
|
5
5
|
Author: D.A. Brakenhoff
|
|
6
6
|
Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
pastastore/__init__.py,sha256=wpGHiNYYS2mAJIx6HjFPajUubBU4A6C0XP_L68OS_LY,281
|
|
2
|
+
pastastore/base.py,sha256=LLSg8CaKQDoRV0aOWQCSXTYQkBDH9t-xkhgLxiPxQ9Y,67333
|
|
3
|
+
pastastore/connectors.py,sha256=YK3I_Jb2uNwzBQvN2VwZvmTRfPeUETW-4ddcFSWkHVw,16820
|
|
4
|
+
pastastore/datasets.py,sha256=FHVfmKqb8beEs9NONsWrCoJY37BmlvFLSEQ1VAFmE8A,6415
|
|
5
|
+
pastastore/plotting.py,sha256=t6gEeHVGzrwvM6q1l8V3OkklpU75O2Y4h6nKEHRWdjo,46416
|
|
6
|
+
pastastore/store.py,sha256=xbv1prv6QqYj8M-2c77CT0ZQejjmNSldpuqu_M4WxoU,60906
|
|
7
|
+
pastastore/styling.py,sha256=4xAY0FmhKrvmAGIuoMM7Uucww_X4KAxTpEoHlsxMldc,2280
|
|
8
|
+
pastastore/util.py,sha256=iXHoGHfK6VDbUpufNsnzdV71oBVp-koZUD4VJj6MOwo,28250
|
|
9
|
+
pastastore/version.py,sha256=nDiQD4EnDPyhOloSR17RfiDAMPuUxG1wLT1hQr1pU0s,1205
|
|
10
|
+
pastastore/yaml_interface.py,sha256=MddELxWe8_aqJRMUydOCbjoU1-ZodzxFKYnAaqJ5SqA,29947
|
|
11
|
+
pastastore/extensions/__init__.py,sha256=lCN9xfX1qefUzUbE2FQ12c6NjLbf5HoNo-D8cGb5CTw,461
|
|
12
|
+
pastastore/extensions/accessor.py,sha256=kftQM6dqMDoySbyTKcvmkjC5gJRp465KA18G4NVXUO0,367
|
|
13
|
+
pastastore/extensions/hpd.py,sha256=E7YdmndaagXjVcRxHuFr1YDrIoEUf-R78YQagHUtNvo,21097
|
|
14
|
+
pastastore-1.7.1.dist-info/LICENSE,sha256=DtHftfUEm99KzgwLr3rQUTg8H3kAS0Z-p5WWJgLf_OY,1082
|
|
15
|
+
pastastore-1.7.1.dist-info/METADATA,sha256=E1nuAqgM1KnHIIRNUjxduDvd7k8vsR1xQcY5nvR0iGA,8021
|
|
16
|
+
pastastore-1.7.1.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
|
17
|
+
pastastore-1.7.1.dist-info/top_level.txt,sha256=QKfonr1KJZN46MFsj8eGRBw9Mg-jO-HFvgE2orVX7Sk,11
|
|
18
|
+
pastastore-1.7.1.dist-info/RECORD,,
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
pastastore/__init__.py,sha256=l6zRpDO0j6MIrfdljCTbkF70bt-GFlPseBd4IlmaC-o,269
|
|
2
|
-
pastastore/base.py,sha256=LLSg8CaKQDoRV0aOWQCSXTYQkBDH9t-xkhgLxiPxQ9Y,67333
|
|
3
|
-
pastastore/connectors.py,sha256=YK3I_Jb2uNwzBQvN2VwZvmTRfPeUETW-4ddcFSWkHVw,16820
|
|
4
|
-
pastastore/datasets.py,sha256=FHVfmKqb8beEs9NONsWrCoJY37BmlvFLSEQ1VAFmE8A,6415
|
|
5
|
-
pastastore/plotting.py,sha256=t6gEeHVGzrwvM6q1l8V3OkklpU75O2Y4h6nKEHRWdjo,46416
|
|
6
|
-
pastastore/store.py,sha256=xbv1prv6QqYj8M-2c77CT0ZQejjmNSldpuqu_M4WxoU,60906
|
|
7
|
-
pastastore/styling.py,sha256=4xAY0FmhKrvmAGIuoMM7Uucww_X4KAxTpEoHlsxMldc,2280
|
|
8
|
-
pastastore/util.py,sha256=iXHoGHfK6VDbUpufNsnzdV71oBVp-koZUD4VJj6MOwo,28250
|
|
9
|
-
pastastore/version.py,sha256=lo0Pof9TIeRN4t2EblhGJbu04oz9H0QT_FafAlkwfDE,1205
|
|
10
|
-
pastastore/yaml_interface.py,sha256=MddELxWe8_aqJRMUydOCbjoU1-ZodzxFKYnAaqJ5SqA,29947
|
|
11
|
-
pastastore-1.7.0.dist-info/LICENSE,sha256=DtHftfUEm99KzgwLr3rQUTg8H3kAS0Z-p5WWJgLf_OY,1082
|
|
12
|
-
pastastore-1.7.0.dist-info/METADATA,sha256=GX4vWuy91h3-vjiJxy0oHph1_wPCqeHJ8tTFK3lBiZU,8021
|
|
13
|
-
pastastore-1.7.0.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
|
|
14
|
-
pastastore-1.7.0.dist-info/top_level.txt,sha256=QKfonr1KJZN46MFsj8eGRBw9Mg-jO-HFvgE2orVX7Sk,11
|
|
15
|
-
pastastore-1.7.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|