pastastore 1.2.2__tar.gz → 1.3.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pastastore-1.2.2 → pastastore-1.3.0}/PKG-INFO +33 -2
- pastastore-1.3.0/pastastore/__init__.py +11 -0
- {pastastore-1.2.2 → pastastore-1.3.0}/pastastore/base.py +21 -10
- {pastastore-1.2.2 → pastastore-1.3.0}/pastastore/connectors.py +2 -2
- {pastastore-1.2.2 → pastastore-1.3.0}/pastastore/plotting.py +21 -12
- {pastastore-1.2.2 → pastastore-1.3.0}/pastastore/store.py +157 -16
- {pastastore-1.2.2 → pastastore-1.3.0}/pastastore/util.py +24 -15
- {pastastore-1.2.2 → pastastore-1.3.0}/pastastore/version.py +1 -1
- {pastastore-1.2.2 → pastastore-1.3.0}/pastastore/yaml_interface.py +3 -3
- {pastastore-1.2.2 → pastastore-1.3.0}/pastastore.egg-info/PKG-INFO +33 -2
- {pastastore-1.2.2 → pastastore-1.3.0}/tests/test_001_import.py +1 -1
- {pastastore-1.2.2 → pastastore-1.3.0}/tests/test_002_connectors.py +4 -4
- {pastastore-1.2.2 → pastastore-1.3.0}/tests/test_003_pastastore.py +22 -4
- {pastastore-1.2.2 → pastastore-1.3.0}/tests/test_004_yaml.py +0 -1
- pastastore-1.2.2/pastastore/__init__.py +0 -10
- {pastastore-1.2.2 → pastastore-1.3.0}/LICENSE +0 -0
- {pastastore-1.2.2 → pastastore-1.3.0}/pastastore/datasets.py +0 -0
- {pastastore-1.2.2 → pastastore-1.3.0}/pastastore.egg-info/SOURCES.txt +0 -0
- {pastastore-1.2.2 → pastastore-1.3.0}/pastastore.egg-info/dependency_links.txt +0 -0
- {pastastore-1.2.2 → pastastore-1.3.0}/pastastore.egg-info/requires.txt +0 -0
- {pastastore-1.2.2 → pastastore-1.3.0}/pastastore.egg-info/top_level.txt +0 -0
- {pastastore-1.2.2 → pastastore-1.3.0}/pyproject.toml +0 -0
- {pastastore-1.2.2 → pastastore-1.3.0}/readme.md +0 -0
- {pastastore-1.2.2 → pastastore-1.3.0}/setup.cfg +0 -0
- {pastastore-1.2.2 → pastastore-1.3.0}/tests/test_005_maps_plots.py +0 -0
- {pastastore-1.2.2 → pastastore-1.3.0}/tests/test_006_benchmark.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pastastore
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.3.0
|
|
4
4
|
Summary: Tools for managing Pastas time series models.
|
|
5
5
|
Author: D.A. Brakenhoff
|
|
6
6
|
Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
|
|
@@ -49,15 +49,46 @@ Classifier: Programming Language :: Python :: 3 :: Only
|
|
|
49
49
|
Classifier: Topic :: Scientific/Engineering :: Hydrology
|
|
50
50
|
Requires-Python: >=3.7
|
|
51
51
|
Description-Content-Type: text/markdown
|
|
52
|
+
License-File: LICENSE
|
|
53
|
+
Requires-Dist: pastas>=0.13
|
|
54
|
+
Requires-Dist: tqdm>=4.36
|
|
55
|
+
Requires-Dist: pyyaml
|
|
52
56
|
Provides-Extra: full
|
|
57
|
+
Requires-Dist: pastastore[arcticdb,optional]; extra == "full"
|
|
53
58
|
Provides-Extra: lint
|
|
59
|
+
Requires-Dist: black; extra == "lint"
|
|
60
|
+
Requires-Dist: flake8; extra == "lint"
|
|
61
|
+
Requires-Dist: isort; extra == "lint"
|
|
54
62
|
Provides-Extra: optional
|
|
63
|
+
Requires-Dist: contextily; extra == "optional"
|
|
64
|
+
Requires-Dist: pyproj; extra == "optional"
|
|
65
|
+
Requires-Dist: adjustText; extra == "optional"
|
|
55
66
|
Provides-Extra: test
|
|
67
|
+
Requires-Dist: pastastore[arcticdb,lint,optional]; extra == "test"
|
|
68
|
+
Requires-Dist: hydropandas[full]; extra == "test"
|
|
69
|
+
Requires-Dist: coverage; extra == "test"
|
|
70
|
+
Requires-Dist: codecov; extra == "test"
|
|
71
|
+
Requires-Dist: pytest; extra == "test"
|
|
72
|
+
Requires-Dist: pytest-cov; extra == "test"
|
|
73
|
+
Requires-Dist: pytest-dependency; extra == "test"
|
|
74
|
+
Requires-Dist: pytest-benchmark; extra == "test"
|
|
75
|
+
Requires-Dist: codacy-coverage; extra == "test"
|
|
76
|
+
Requires-Dist: lxml; extra == "test"
|
|
56
77
|
Provides-Extra: pystore
|
|
78
|
+
Requires-Dist: fsspec>=0.3.3; extra == "pystore"
|
|
79
|
+
Requires-Dist: python-snappy; extra == "pystore"
|
|
80
|
+
Requires-Dist: dask[dataframe]; extra == "pystore"
|
|
57
81
|
Provides-Extra: arctic
|
|
82
|
+
Requires-Dist: arctic; extra == "arctic"
|
|
58
83
|
Provides-Extra: arcticdb
|
|
84
|
+
Requires-Dist: arcticdb; extra == "arcticdb"
|
|
59
85
|
Provides-Extra: docs
|
|
60
|
-
|
|
86
|
+
Requires-Dist: pastastore[optional]; extra == "docs"
|
|
87
|
+
Requires-Dist: sphinx_rtd_theme; extra == "docs"
|
|
88
|
+
Requires-Dist: Ipython; extra == "docs"
|
|
89
|
+
Requires-Dist: ipykernel; extra == "docs"
|
|
90
|
+
Requires-Dist: nbsphinx; extra == "docs"
|
|
91
|
+
Requires-Dist: nbsphinx_link; extra == "docs"
|
|
61
92
|
|
|
62
93
|

|
|
63
94
|
[](https://pastastore.readthedocs.io/en/latest/?badge=latest)
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
# ruff: noqa: F401
|
|
2
|
+
from pastastore import connectors, util
|
|
3
|
+
from pastastore.connectors import (
|
|
4
|
+
ArcticConnector,
|
|
5
|
+
ArcticDBConnector,
|
|
6
|
+
DictConnector,
|
|
7
|
+
PasConnector,
|
|
8
|
+
PystoreConnector,
|
|
9
|
+
)
|
|
10
|
+
from pastastore.store import PastaStore
|
|
11
|
+
from pastastore.version import __version__
|
|
@@ -11,10 +11,10 @@ import pastas as ps
|
|
|
11
11
|
from numpy import isin
|
|
12
12
|
from packaging.version import parse as parse_version
|
|
13
13
|
from pastas.io.pas import PastasEncoder
|
|
14
|
-
from tqdm import tqdm
|
|
14
|
+
from tqdm.auto import tqdm
|
|
15
15
|
|
|
16
|
-
from .util import ItemInLibraryException, _custom_warning, validate_names
|
|
17
|
-
from .version import PASTAS_LEQ_022
|
|
16
|
+
from pastastore.util import ItemInLibraryException, _custom_warning, validate_names
|
|
17
|
+
from pastastore.version import PASTAS_LEQ_022
|
|
18
18
|
|
|
19
19
|
FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
|
|
20
20
|
warnings.showwarning = _custom_warning
|
|
@@ -934,13 +934,24 @@ class BaseConnector(ABC):
|
|
|
934
934
|
)
|
|
935
935
|
if ui.lower() != "y":
|
|
936
936
|
return
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
937
|
+
|
|
938
|
+
if libname == "models":
|
|
939
|
+
# also delete linked modelnames linked to oseries
|
|
940
|
+
libs = ["models", "oseries_models"]
|
|
941
|
+
else:
|
|
942
|
+
libs = [libname]
|
|
943
|
+
|
|
944
|
+
# delete items and clear caches
|
|
945
|
+
for libname in libs:
|
|
946
|
+
names = self._parse_names(None, libname)
|
|
947
|
+
for name in (
|
|
948
|
+
tqdm(names, desc=f"Deleting items from {libname}")
|
|
949
|
+
if progressbar
|
|
950
|
+
else names
|
|
951
|
+
):
|
|
952
|
+
self._del_item(libname, name)
|
|
953
|
+
self._clear_cache(libname)
|
|
954
|
+
print(f"Emptied library {libname} in {self.name}: " f"{self.__class__}")
|
|
944
955
|
|
|
945
956
|
def _iter_series(self, libname: str, names: Optional[List[str]] = None):
|
|
946
957
|
"""Internal method iterate over time series in library.
|
|
@@ -8,8 +8,8 @@ from typing import Dict, Optional, Union
|
|
|
8
8
|
import pandas as pd
|
|
9
9
|
from pastas.io.pas import PastasEncoder, pastas_hook
|
|
10
10
|
|
|
11
|
-
from .base import BaseConnector, ConnectorUtil, ModelAccessor
|
|
12
|
-
from .util import _custom_warning
|
|
11
|
+
from pastastore.base import BaseConnector, ConnectorUtil, ModelAccessor
|
|
12
|
+
from pastastore.util import _custom_warning
|
|
13
13
|
|
|
14
14
|
FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
|
|
15
15
|
warnings.showwarning = _custom_warning
|
|
@@ -14,9 +14,6 @@ follows::
|
|
|
14
14
|
ax = pstore.maps.oseries()
|
|
15
15
|
pstore.maps.add_background_map(ax) # for adding a background map
|
|
16
16
|
"""
|
|
17
|
-
|
|
18
|
-
from collections.abc import Iterable
|
|
19
|
-
|
|
20
17
|
import matplotlib.pyplot as plt
|
|
21
18
|
import numpy as np
|
|
22
19
|
import pandas as pd
|
|
@@ -96,15 +93,11 @@ class Plots:
|
|
|
96
93
|
|
|
97
94
|
if ax is None:
|
|
98
95
|
if split:
|
|
99
|
-
|
|
96
|
+
_, axes = plt.subplots(len(names), 1, sharex=True, figsize=figsize)
|
|
100
97
|
else:
|
|
101
|
-
|
|
98
|
+
_, axes = plt.subplots(1, 1, figsize=figsize)
|
|
102
99
|
else:
|
|
103
100
|
axes = ax
|
|
104
|
-
if isinstance(axes, Iterable):
|
|
105
|
-
fig = axes[0].figure
|
|
106
|
-
else:
|
|
107
|
-
fig = axes.figure
|
|
108
101
|
|
|
109
102
|
tsdict = self.pstore.conn._get_series(
|
|
110
103
|
libname, names, progressbar=progressbar, squeeze=False
|
|
@@ -397,20 +390,31 @@ class Plots:
|
|
|
397
390
|
linewidth=0,
|
|
398
391
|
rasterized=True,
|
|
399
392
|
)
|
|
393
|
+
|
|
400
394
|
# make a colorbar in an ax on the
|
|
401
395
|
# right side, then set the current axes to ax again
|
|
402
396
|
cb = fig.colorbar(pc, ax=ax, cax=cax, extend="both")
|
|
403
397
|
cb.set_ticks(bounds)
|
|
404
398
|
cb.ax.set_yticklabels(labels)
|
|
405
399
|
cb.ax.minorticks_off()
|
|
400
|
+
|
|
406
401
|
if set_yticks:
|
|
407
|
-
ax.set_yticks(np.arange(0.5, len(series) + 0.5))
|
|
402
|
+
ax.set_yticks(np.arange(0.5, len(series) + 0.5), minor=False)
|
|
403
|
+
ax.set_yticks(np.arange(0, len(series) + 1), minor=True)
|
|
408
404
|
if names is None:
|
|
409
405
|
names = [s.name for s in series]
|
|
410
406
|
ax.set_yticklabels(names)
|
|
407
|
+
|
|
408
|
+
for tick in ax.yaxis.get_major_ticks(): # don't show major ytick marker
|
|
409
|
+
tick.tick1line.set_visible(False)
|
|
410
|
+
|
|
411
|
+
ax.grid(True, which="minor", axis="y")
|
|
412
|
+
ax.grid(True, which="major", axis="x")
|
|
413
|
+
|
|
411
414
|
else:
|
|
412
415
|
ax.set_ylabel("Timeseries (-)")
|
|
413
|
-
|
|
416
|
+
ax.grid(True, which="both")
|
|
417
|
+
ax.grid(True, which="both")
|
|
414
418
|
|
|
415
419
|
return ax
|
|
416
420
|
|
|
@@ -712,6 +716,7 @@ class Maps:
|
|
|
712
716
|
def modelstat(
|
|
713
717
|
self,
|
|
714
718
|
statistic,
|
|
719
|
+
modelnames=None,
|
|
715
720
|
label=True,
|
|
716
721
|
adjust=False,
|
|
717
722
|
cmap="viridis",
|
|
@@ -728,6 +733,8 @@ class Maps:
|
|
|
728
733
|
----------
|
|
729
734
|
statistic: str
|
|
730
735
|
name of the statistic, e.g. "evp" or "aic"
|
|
736
|
+
modelnames : list of str, optional
|
|
737
|
+
list of modelnames to include
|
|
731
738
|
label: bool, optional
|
|
732
739
|
label points, by default True
|
|
733
740
|
adjust: bool, optional
|
|
@@ -757,7 +764,9 @@ class Maps:
|
|
|
757
764
|
--------
|
|
758
765
|
self.add_background_map
|
|
759
766
|
"""
|
|
760
|
-
statsdf = self.pstore.get_statistics(
|
|
767
|
+
statsdf = self.pstore.get_statistics(
|
|
768
|
+
[statistic], modelnames=modelnames, progressbar=False
|
|
769
|
+
).to_frame()
|
|
761
770
|
|
|
762
771
|
statsdf["oseries"] = [
|
|
763
772
|
self.pstore.get_models(m, return_dict=True)["oseries"]["name"]
|
|
@@ -8,11 +8,13 @@ import pandas as pd
|
|
|
8
8
|
import pastas as ps
|
|
9
9
|
from packaging.version import parse as parse_version
|
|
10
10
|
from pastas.io.pas import pastas_hook
|
|
11
|
-
from tqdm import tqdm
|
|
11
|
+
from tqdm.auto import tqdm
|
|
12
12
|
|
|
13
|
-
from .
|
|
14
|
-
from .
|
|
15
|
-
from .
|
|
13
|
+
from pastastore.base import BaseConnector
|
|
14
|
+
from pastastore.connectors import DictConnector
|
|
15
|
+
from pastastore.plotting import Maps, Plots
|
|
16
|
+
from pastastore.util import _custom_warning
|
|
17
|
+
from pastastore.yaml_interface import PastastoreYAML
|
|
16
18
|
|
|
17
19
|
FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
|
|
18
20
|
warnings.showwarning = _custom_warning
|
|
@@ -38,14 +40,19 @@ class PastaStore:
|
|
|
38
40
|
name of the PastaStore, by default takes the name of the Connector object
|
|
39
41
|
"""
|
|
40
42
|
|
|
41
|
-
def __init__(
|
|
43
|
+
def __init__(
|
|
44
|
+
self,
|
|
45
|
+
connector: Optional[BaseConnector] = None,
|
|
46
|
+
name: Optional[str] = None,
|
|
47
|
+
):
|
|
42
48
|
"""Initialize PastaStore for managing pastas time series and models.
|
|
43
49
|
|
|
44
50
|
Parameters
|
|
45
51
|
----------
|
|
46
|
-
connector : Connector object
|
|
47
|
-
object that provides the
|
|
48
|
-
|
|
52
|
+
connector : Connector object, optional
|
|
53
|
+
object that provides the connection to the database. Default is None, which
|
|
54
|
+
will create a DictConnector. This default Connector does not store data on
|
|
55
|
+
disk.
|
|
49
56
|
name : str, optional
|
|
50
57
|
name of the PastaStore, if not provided uses the Connector name
|
|
51
58
|
"""
|
|
@@ -53,6 +60,8 @@ class PastaStore:
|
|
|
53
60
|
raise DeprecationWarning(
|
|
54
61
|
"PastaStore expects the connector as the first argument since v1.1!"
|
|
55
62
|
)
|
|
63
|
+
if connector is None:
|
|
64
|
+
connector = DictConnector("pastas_db")
|
|
56
65
|
self.conn = connector
|
|
57
66
|
self.name = name if name is not None else self.conn.name
|
|
58
67
|
self._register_connector_methods()
|
|
@@ -300,6 +309,81 @@ class PastaStore:
|
|
|
300
309
|
data = pd.concat([data, series], axis=0)
|
|
301
310
|
return data
|
|
302
311
|
|
|
312
|
+
def get_signatures(
|
|
313
|
+
self,
|
|
314
|
+
signatures=None,
|
|
315
|
+
names=None,
|
|
316
|
+
libname="oseries",
|
|
317
|
+
progressbar=False,
|
|
318
|
+
ignore_errors=False,
|
|
319
|
+
):
|
|
320
|
+
"""Get groundwater signatures. NaN-values are returned when the
|
|
321
|
+
signature could not be computed.
|
|
322
|
+
|
|
323
|
+
Parameters
|
|
324
|
+
----------
|
|
325
|
+
signatures : list of str, optional
|
|
326
|
+
list of groundwater signatures to compute, if None all groundwater
|
|
327
|
+
signatures in ps.stats.signatures.__all__ are used, by default None
|
|
328
|
+
names : str, list of str, or None, optional
|
|
329
|
+
names of the time series, by default None which
|
|
330
|
+
uses all the time series in the library
|
|
331
|
+
libname : str
|
|
332
|
+
name of the library containing the time series
|
|
333
|
+
('oseries' or 'stresses'), by default "oseries"
|
|
334
|
+
progressbar : bool, optional
|
|
335
|
+
show progressbar, by default False
|
|
336
|
+
ignore_errors : bool, optional
|
|
337
|
+
ignore errors when True, i.e. when non-existent timeseries is
|
|
338
|
+
encountered in names, by default False
|
|
339
|
+
|
|
340
|
+
Returns
|
|
341
|
+
-------
|
|
342
|
+
signatures_df : pandas.DataFrame
|
|
343
|
+
DataFrame containing the signatures (columns) per time series (rows)
|
|
344
|
+
"""
|
|
345
|
+
names = self.conn._parse_names(names, libname=libname)
|
|
346
|
+
|
|
347
|
+
if signatures is None:
|
|
348
|
+
signatures = ps.stats.signatures.__all__.copy()
|
|
349
|
+
|
|
350
|
+
# create dataframe for results
|
|
351
|
+
signatures_df = pd.DataFrame(index=names, columns=signatures, data=np.nan)
|
|
352
|
+
|
|
353
|
+
# loop through oseries names
|
|
354
|
+
desc = "Get groundwater signatures"
|
|
355
|
+
for name in tqdm(names, desc=desc) if progressbar else names:
|
|
356
|
+
try:
|
|
357
|
+
if libname == "oseries":
|
|
358
|
+
s = self.conn.get_oseries(name)
|
|
359
|
+
else:
|
|
360
|
+
s = self.conn.get_stresses(name)
|
|
361
|
+
except Exception as e:
|
|
362
|
+
if ignore_errors:
|
|
363
|
+
signatures_df.loc[name, :] = np.nan
|
|
364
|
+
continue
|
|
365
|
+
else:
|
|
366
|
+
raise e
|
|
367
|
+
|
|
368
|
+
try:
|
|
369
|
+
i_signatures = ps.stats.signatures.summary(s.squeeze(), signatures)
|
|
370
|
+
except Exception as e:
|
|
371
|
+
if ignore_errors:
|
|
372
|
+
i_signatures = []
|
|
373
|
+
for signature in signatures:
|
|
374
|
+
try:
|
|
375
|
+
sign_val = getattr(ps.stats.signatures, signature)(
|
|
376
|
+
s.squeeze()
|
|
377
|
+
)
|
|
378
|
+
except Exception as _:
|
|
379
|
+
sign_val = np.nan
|
|
380
|
+
i_signatures.append(sign_val)
|
|
381
|
+
else:
|
|
382
|
+
raise e
|
|
383
|
+
signatures_df.loc[name, signatures] = i_signatures
|
|
384
|
+
|
|
385
|
+
return signatures_df
|
|
386
|
+
|
|
303
387
|
def get_tmin_tmax(self, libname, names=None, progressbar=False):
|
|
304
388
|
"""Get tmin and tmax for time series.
|
|
305
389
|
|
|
@@ -334,6 +418,23 @@ class PastaStore:
|
|
|
334
418
|
tmintmax.loc[n, "tmax"] = s.last_valid_index()
|
|
335
419
|
return tmintmax
|
|
336
420
|
|
|
421
|
+
def get_extent(self, libname, names=None, buffer=0.0):
|
|
422
|
+
names = self.conn._parse_names(names, libname=libname)
|
|
423
|
+
if libname in ["oseries", "stresses"]:
|
|
424
|
+
df = getattr(self, libname)
|
|
425
|
+
elif libname == "models":
|
|
426
|
+
df = self.oseries
|
|
427
|
+
else:
|
|
428
|
+
raise ValueError(f"Cannot get extent for library '{libname}'.")
|
|
429
|
+
|
|
430
|
+
extent = [
|
|
431
|
+
df.loc[names, "x"].min() - buffer,
|
|
432
|
+
df.loc[names, "x"].max() + buffer,
|
|
433
|
+
df.loc[names, "y"].min() - buffer,
|
|
434
|
+
df.loc[names, "y"].max() + buffer,
|
|
435
|
+
]
|
|
436
|
+
return extent
|
|
437
|
+
|
|
337
438
|
def get_parameters(
|
|
338
439
|
self,
|
|
339
440
|
parameters: Optional[List[str]] = None,
|
|
@@ -428,13 +529,13 @@ class PastaStore:
|
|
|
428
529
|
|
|
429
530
|
modelnames = self.conn._parse_names(modelnames, libname="models")
|
|
430
531
|
|
|
431
|
-
# create dataframe for results
|
|
432
|
-
s = pd.DataFrame(index=modelnames, columns=statistics, data=np.nan)
|
|
433
|
-
|
|
434
532
|
# if statistics is str
|
|
435
533
|
if isinstance(statistics, str):
|
|
436
534
|
statistics = [statistics]
|
|
437
535
|
|
|
536
|
+
# create dataframe for results
|
|
537
|
+
s = pd.DataFrame(index=modelnames, columns=statistics, data=np.nan)
|
|
538
|
+
|
|
438
539
|
# loop through model names
|
|
439
540
|
desc = "Get model statistics"
|
|
440
541
|
for mlname in tqdm(modelnames, desc=desc) if progressbar else modelnames:
|
|
@@ -836,7 +937,7 @@ class PastaStore:
|
|
|
836
937
|
def from_zip(
|
|
837
938
|
cls,
|
|
838
939
|
fname: str,
|
|
839
|
-
conn,
|
|
940
|
+
conn: Optional[BaseConnector] = None,
|
|
840
941
|
storename: Optional[str] = None,
|
|
841
942
|
progressbar: bool = True,
|
|
842
943
|
):
|
|
@@ -846,8 +947,9 @@ class PastaStore:
|
|
|
846
947
|
----------
|
|
847
948
|
fname : str
|
|
848
949
|
pathname of zipfile
|
|
849
|
-
conn : Connector object
|
|
850
|
-
connector for storing loaded data
|
|
950
|
+
conn : Connector object, optional
|
|
951
|
+
connector for storing loaded data, default is None which creates a
|
|
952
|
+
DictConnector. This Connector does not store data on disk.
|
|
851
953
|
storename : str, optional
|
|
852
954
|
name of the PastaStore, by default None, which
|
|
853
955
|
defaults to the name of the Connector.
|
|
@@ -861,6 +963,9 @@ class PastaStore:
|
|
|
861
963
|
"""
|
|
862
964
|
from zipfile import ZipFile
|
|
863
965
|
|
|
966
|
+
if conn is None:
|
|
967
|
+
conn = DictConnector("pastas_db")
|
|
968
|
+
|
|
864
969
|
with ZipFile(fname, "r") as archive:
|
|
865
970
|
namelist = [
|
|
866
971
|
fi for fi in archive.namelist() if not fi.endswith("_meta.json")
|
|
@@ -868,7 +973,7 @@ class PastaStore:
|
|
|
868
973
|
for f in tqdm(namelist, desc="Reading zip") if progressbar else namelist:
|
|
869
974
|
libname, fjson = os.path.split(f)
|
|
870
975
|
if libname in ["stresses", "oseries"]:
|
|
871
|
-
s = pd.read_json(archive.open(f), orient="columns")
|
|
976
|
+
s = pd.read_json(archive.open(f), dtype=float, orient="columns")
|
|
872
977
|
if not isinstance(s.index, pd.DatetimeIndex):
|
|
873
978
|
s.index = pd.to_datetime(s.index, unit="ms")
|
|
874
979
|
s = s.sort_index()
|
|
@@ -983,9 +1088,45 @@ class PastaStore:
|
|
|
983
1088
|
structure.loc[mlnam, pnam] = 1
|
|
984
1089
|
structure.loc[mlnam, enam] = 1
|
|
985
1090
|
elif "stress" in sm:
|
|
986
|
-
|
|
1091
|
+
smstress = sm["stress"]
|
|
1092
|
+
if isinstance(smstress, dict):
|
|
1093
|
+
smstress = [smstress]
|
|
1094
|
+
for s in smstress:
|
|
987
1095
|
structure.loc[mlnam, s["name"]] = 1
|
|
988
1096
|
if dropna:
|
|
989
1097
|
return structure.dropna(how="all", axis=1)
|
|
990
1098
|
else:
|
|
991
1099
|
return structure
|
|
1100
|
+
|
|
1101
|
+
def apply(self, libname, func, names=None, progressbar=True):
|
|
1102
|
+
"""Apply function to items in library.
|
|
1103
|
+
|
|
1104
|
+
Supported libraries are oseries, stresses, and models.
|
|
1105
|
+
|
|
1106
|
+
Parameters
|
|
1107
|
+
----------
|
|
1108
|
+
libname : str
|
|
1109
|
+
library name, supports "oseries", "stresses" and "models"
|
|
1110
|
+
func : callable
|
|
1111
|
+
function that accepts items from one of the supported libraries as input
|
|
1112
|
+
names : str, list of str, optional
|
|
1113
|
+
apply function to these names, by default None which loops over all stored
|
|
1114
|
+
items in library
|
|
1115
|
+
progressbar : bool, optional
|
|
1116
|
+
show progressbar, by default True
|
|
1117
|
+
|
|
1118
|
+
Returns
|
|
1119
|
+
-------
|
|
1120
|
+
dict
|
|
1121
|
+
dict of results of func, with names as keys and results as values
|
|
1122
|
+
"""
|
|
1123
|
+
names = self.conn._parse_names(names, libname)
|
|
1124
|
+
result = {}
|
|
1125
|
+
if libname not in ("oseries", "stresses", "models"):
|
|
1126
|
+
raise ValueError(
|
|
1127
|
+
"'libname' must be one of ['oseries', 'stresses', 'models']!"
|
|
1128
|
+
)
|
|
1129
|
+
getter = getattr(self.conn, f"get_{libname}")
|
|
1130
|
+
for n in tqdm(names) if progressbar else names:
|
|
1131
|
+
result[n] = func(getter(n))
|
|
1132
|
+
return result
|
|
@@ -6,9 +6,9 @@ import pandas as pd
|
|
|
6
6
|
from numpy.lib._iotools import NameValidator
|
|
7
7
|
from pandas.testing import assert_series_equal
|
|
8
8
|
from pastas.stats.tests import runs_test, stoffer_toloi
|
|
9
|
-
from tqdm import tqdm
|
|
9
|
+
from tqdm.auto import tqdm
|
|
10
10
|
|
|
11
|
-
from .version import PASTAS_LEQ_022
|
|
11
|
+
from pastastore.version import PASTAS_LEQ_022
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
def _custom_warning(message, category=UserWarning, filename="", lineno=-1, *args):
|
|
@@ -704,6 +704,8 @@ def frontiers_checks(
|
|
|
704
704
|
)
|
|
705
705
|
else:
|
|
706
706
|
tmem = ml.get_response_tmax(sm_name)
|
|
707
|
+
if tmem is None: # no rfunc in stressmodel
|
|
708
|
+
tmem = 0
|
|
707
709
|
check_tmem_passed = tmem < len_oseries_calib / 2
|
|
708
710
|
checks.loc[f"calib_period > 2*t_mem_95%: {sm_name}", :] = (
|
|
709
711
|
tmem,
|
|
@@ -736,23 +738,30 @@ def frontiers_checks(
|
|
|
736
738
|
"(unit head)/(unit well stress)",
|
|
737
739
|
check_gain_passed,
|
|
738
740
|
)
|
|
741
|
+
continue
|
|
742
|
+
elif sm._name == "LinearTrend":
|
|
743
|
+
gain = ml.parameters.loc[f"{sm_name}_a", "optimal"]
|
|
744
|
+
gain_std = ml.parameters.loc[f"{sm_name}_a", "stderr"]
|
|
745
|
+
elif sm._name == "StepModel":
|
|
746
|
+
gain = ml.parameters.loc[f"{sm_name}_d", "optimal"]
|
|
747
|
+
gain_std = ml.parameters.loc[f"{sm_name}_d", "stderr"]
|
|
739
748
|
else:
|
|
740
749
|
gain = ml.parameters.loc[f"{sm_name}_A", "optimal"]
|
|
741
750
|
gain_std = ml.parameters.loc[f"{sm_name}_A", "stderr"]
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
751
|
+
|
|
752
|
+
if gain_std is None:
|
|
753
|
+
gain_std = np.nan
|
|
754
|
+
check_gain_passed = pd.NA
|
|
755
|
+
elif np.isnan(gain_std):
|
|
756
|
+
check_gain_passed = pd.NA
|
|
757
|
+
else:
|
|
749
758
|
check_gain_passed = np.abs(gain) > 2 * gain_std
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
759
|
+
checks.loc[f"gain > 2*std: {sm_name}", :] = (
|
|
760
|
+
gain,
|
|
761
|
+
2 * gain_std,
|
|
762
|
+
"(unit head)/(unit well stress)",
|
|
763
|
+
check_gain_passed,
|
|
764
|
+
)
|
|
756
765
|
|
|
757
766
|
# Check 5 - Parameter Bounds
|
|
758
767
|
if check5_parambounds:
|
|
@@ -2,14 +2,14 @@ import datetime
|
|
|
2
2
|
import logging
|
|
3
3
|
import os
|
|
4
4
|
from copy import deepcopy
|
|
5
|
-
from typing import Dict, List, Optional, Union
|
|
5
|
+
from typing import Any, Dict, List, Optional, Union
|
|
6
6
|
|
|
7
7
|
import numpy as np
|
|
8
8
|
import pandas as pd
|
|
9
9
|
import pastas as ps
|
|
10
10
|
import yaml
|
|
11
11
|
|
|
12
|
-
from .version import PASTAS_LEQ_022
|
|
12
|
+
from pastastore.version import PASTAS_LEQ_022
|
|
13
13
|
|
|
14
14
|
ps.logger.setLevel("ERROR")
|
|
15
15
|
|
|
@@ -17,7 +17,7 @@ logging.basicConfig(level="INFO")
|
|
|
17
17
|
logger = logging.getLogger(__name__)
|
|
18
18
|
|
|
19
19
|
|
|
20
|
-
def _convert_dict_dtypes_for_yaml(d: Dict):
|
|
20
|
+
def _convert_dict_dtypes_for_yaml(d: Dict[str, Any]):
|
|
21
21
|
"""Internal method to convert dictionary values for storing in YAML format.
|
|
22
22
|
|
|
23
23
|
Parameters
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pastastore
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.3.0
|
|
4
4
|
Summary: Tools for managing Pastas time series models.
|
|
5
5
|
Author: D.A. Brakenhoff
|
|
6
6
|
Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
|
|
@@ -49,15 +49,46 @@ Classifier: Programming Language :: Python :: 3 :: Only
|
|
|
49
49
|
Classifier: Topic :: Scientific/Engineering :: Hydrology
|
|
50
50
|
Requires-Python: >=3.7
|
|
51
51
|
Description-Content-Type: text/markdown
|
|
52
|
+
License-File: LICENSE
|
|
53
|
+
Requires-Dist: pastas>=0.13
|
|
54
|
+
Requires-Dist: tqdm>=4.36
|
|
55
|
+
Requires-Dist: pyyaml
|
|
52
56
|
Provides-Extra: full
|
|
57
|
+
Requires-Dist: pastastore[arcticdb,optional]; extra == "full"
|
|
53
58
|
Provides-Extra: lint
|
|
59
|
+
Requires-Dist: black; extra == "lint"
|
|
60
|
+
Requires-Dist: flake8; extra == "lint"
|
|
61
|
+
Requires-Dist: isort; extra == "lint"
|
|
54
62
|
Provides-Extra: optional
|
|
63
|
+
Requires-Dist: contextily; extra == "optional"
|
|
64
|
+
Requires-Dist: pyproj; extra == "optional"
|
|
65
|
+
Requires-Dist: adjustText; extra == "optional"
|
|
55
66
|
Provides-Extra: test
|
|
67
|
+
Requires-Dist: pastastore[arcticdb,lint,optional]; extra == "test"
|
|
68
|
+
Requires-Dist: hydropandas[full]; extra == "test"
|
|
69
|
+
Requires-Dist: coverage; extra == "test"
|
|
70
|
+
Requires-Dist: codecov; extra == "test"
|
|
71
|
+
Requires-Dist: pytest; extra == "test"
|
|
72
|
+
Requires-Dist: pytest-cov; extra == "test"
|
|
73
|
+
Requires-Dist: pytest-dependency; extra == "test"
|
|
74
|
+
Requires-Dist: pytest-benchmark; extra == "test"
|
|
75
|
+
Requires-Dist: codacy-coverage; extra == "test"
|
|
76
|
+
Requires-Dist: lxml; extra == "test"
|
|
56
77
|
Provides-Extra: pystore
|
|
78
|
+
Requires-Dist: fsspec>=0.3.3; extra == "pystore"
|
|
79
|
+
Requires-Dist: python-snappy; extra == "pystore"
|
|
80
|
+
Requires-Dist: dask[dataframe]; extra == "pystore"
|
|
57
81
|
Provides-Extra: arctic
|
|
82
|
+
Requires-Dist: arctic; extra == "arctic"
|
|
58
83
|
Provides-Extra: arcticdb
|
|
84
|
+
Requires-Dist: arcticdb; extra == "arcticdb"
|
|
59
85
|
Provides-Extra: docs
|
|
60
|
-
|
|
86
|
+
Requires-Dist: pastastore[optional]; extra == "docs"
|
|
87
|
+
Requires-Dist: sphinx_rtd_theme; extra == "docs"
|
|
88
|
+
Requires-Dist: Ipython; extra == "docs"
|
|
89
|
+
Requires-Dist: ipykernel; extra == "docs"
|
|
90
|
+
Requires-Dist: nbsphinx; extra == "docs"
|
|
91
|
+
Requires-Dist: nbsphinx_link; extra == "docs"
|
|
61
92
|
|
|
62
93
|

|
|
63
94
|
[](https://pastastore.readthedocs.io/en/latest/?badge=latest)
|
|
@@ -14,7 +14,7 @@ ps.set_log_level("ERROR")
|
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
def test_get_library(conn):
|
|
17
|
-
|
|
17
|
+
_ = conn._get_library("oseries")
|
|
18
18
|
|
|
19
19
|
|
|
20
20
|
def test_add_get_series(request, conn):
|
|
@@ -206,13 +206,13 @@ def test_add_stress(conn):
|
|
|
206
206
|
@pytest.mark.dependency()
|
|
207
207
|
def test_get_oseries(request, conn):
|
|
208
208
|
depends(request, [f"test_add_oseries[{conn.type}]"])
|
|
209
|
-
|
|
209
|
+
_ = conn.get_oseries("oseries1")
|
|
210
210
|
|
|
211
211
|
|
|
212
212
|
@pytest.mark.dependency()
|
|
213
213
|
def test_get_oseries_and_metadata(request, conn):
|
|
214
214
|
depends(request, [f"test_add_oseries[{conn.type}]"])
|
|
215
|
-
|
|
215
|
+
_ = conn.get_oseries("oseries1", return_metadata=True)
|
|
216
216
|
|
|
217
217
|
|
|
218
218
|
@pytest.mark.dependency()
|
|
@@ -225,7 +225,7 @@ def test_get_stress(request, conn):
|
|
|
225
225
|
@pytest.mark.dependency()
|
|
226
226
|
def test_get_stress_and_metadata(request, conn):
|
|
227
227
|
depends(request, [f"test_add_stress[{conn.type}]"])
|
|
228
|
-
s,
|
|
228
|
+
s, _ = conn.get_stresses("prec", return_metadata=True)
|
|
229
229
|
s.name = "prec"
|
|
230
230
|
|
|
231
231
|
|
|
@@ -39,7 +39,7 @@ def test_search(pstore):
|
|
|
39
39
|
|
|
40
40
|
@pytest.mark.dependency()
|
|
41
41
|
def test_create_model(pstore):
|
|
42
|
-
|
|
42
|
+
_ = pstore.create_model("oseries1")
|
|
43
43
|
|
|
44
44
|
|
|
45
45
|
@pytest.mark.dependency()
|
|
@@ -139,7 +139,7 @@ def test_get_model(request, pstore):
|
|
|
139
139
|
f"test_store_model_missing_series[{pstore.type}]",
|
|
140
140
|
],
|
|
141
141
|
)
|
|
142
|
-
|
|
142
|
+
_ = pstore.conn.get_models("oseries1")
|
|
143
143
|
|
|
144
144
|
|
|
145
145
|
@pytest.mark.dependency()
|
|
@@ -158,7 +158,7 @@ def test_del_model(request, pstore):
|
|
|
158
158
|
|
|
159
159
|
@pytest.mark.dependency()
|
|
160
160
|
def test_create_models(pstore):
|
|
161
|
-
|
|
161
|
+
_ = pstore.create_models_bulk(
|
|
162
162
|
["oseries1", "oseries2"], store=True, progressbar=False
|
|
163
163
|
)
|
|
164
164
|
_ = pstore.conn.models
|
|
@@ -172,6 +172,13 @@ def test_get_parameters(request, pstore):
|
|
|
172
172
|
assert p.isna().sum().sum() == 0
|
|
173
173
|
|
|
174
174
|
|
|
175
|
+
@pytest.mark.dependency()
|
|
176
|
+
def test_get_signatures(request, pstore):
|
|
177
|
+
depends(request, [f"test_create_models[{pstore.type}]"])
|
|
178
|
+
s = pstore.get_signatures(progressbar=False)
|
|
179
|
+
assert s.shape[1] == len(ps.stats.signatures.__all__)
|
|
180
|
+
|
|
181
|
+
|
|
175
182
|
@pytest.mark.dependency()
|
|
176
183
|
def test_iter_models(request, pstore):
|
|
177
184
|
depends(request, [f"test_create_models[{pstore.type}]"])
|
|
@@ -181,13 +188,24 @@ def test_iter_models(request, pstore):
|
|
|
181
188
|
@pytest.mark.dependency()
|
|
182
189
|
def test_solve_models_and_get_stats(request, pstore):
|
|
183
190
|
depends(request, [f"test_create_models[{pstore.type}]"])
|
|
184
|
-
|
|
191
|
+
_ = pstore.solve_models(
|
|
185
192
|
ignore_solve_errors=False, progressbar=False, store_result=True
|
|
186
193
|
)
|
|
187
194
|
stats = pstore.get_statistics(["evp", "aic"], progressbar=False)
|
|
188
195
|
assert stats.index.size == 2
|
|
189
196
|
|
|
190
197
|
|
|
198
|
+
@pytest.mark.dependency()
|
|
199
|
+
def test_apply(request, pstore):
|
|
200
|
+
depends(request, [f"test_solve_models_and_get_stats[{pstore.type}]"])
|
|
201
|
+
|
|
202
|
+
def func(ml):
|
|
203
|
+
return ml.parameters.loc["recharge_A", "optimal"]
|
|
204
|
+
|
|
205
|
+
result = pstore.apply("models", func)
|
|
206
|
+
assert len(result) == 2
|
|
207
|
+
|
|
208
|
+
|
|
191
209
|
@pytest.mark.dependency()
|
|
192
210
|
def test_save_and_load_model(request, pstore):
|
|
193
211
|
ml = pstore.create_model("oseries2")
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|