pastastore 1.2.2__py3-none-any.whl → 1.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pastastore/__init__.py +5 -4
- pastastore/base.py +21 -10
- pastastore/connectors.py +2 -2
- pastastore/plotting.py +129 -27
- pastastore/store.py +193 -18
- pastastore/styling.py +66 -0
- pastastore/util.py +38 -25
- pastastore/version.py +1 -1
- pastastore/yaml_interface.py +3 -3
- {pastastore-1.2.2.dist-info → pastastore-1.4.0.dist-info}/METADATA +5 -4
- pastastore-1.4.0.dist-info/RECORD +15 -0
- {pastastore-1.2.2.dist-info → pastastore-1.4.0.dist-info}/WHEEL +1 -1
- pastastore-1.2.2.dist-info/RECORD +0 -14
- {pastastore-1.2.2.dist-info → pastastore-1.4.0.dist-info}/LICENSE +0 -0
- {pastastore-1.2.2.dist-info → pastastore-1.4.0.dist-info}/top_level.txt +0 -0
pastastore/__init__.py
CHANGED
|
@@ -1,10 +1,11 @@
|
|
|
1
|
-
|
|
2
|
-
from
|
|
1
|
+
# ruff: noqa: F401
|
|
2
|
+
from pastastore import connectors, styling, util
|
|
3
|
+
from pastastore.connectors import (
|
|
3
4
|
ArcticConnector,
|
|
4
5
|
ArcticDBConnector,
|
|
5
6
|
DictConnector,
|
|
6
7
|
PasConnector,
|
|
7
8
|
PystoreConnector,
|
|
8
9
|
)
|
|
9
|
-
from .store import PastaStore
|
|
10
|
-
from .version import __version__
|
|
10
|
+
from pastastore.store import PastaStore
|
|
11
|
+
from pastastore.version import __version__
|
pastastore/base.py
CHANGED
|
@@ -11,10 +11,10 @@ import pastas as ps
|
|
|
11
11
|
from numpy import isin
|
|
12
12
|
from packaging.version import parse as parse_version
|
|
13
13
|
from pastas.io.pas import PastasEncoder
|
|
14
|
-
from tqdm import tqdm
|
|
14
|
+
from tqdm.auto import tqdm
|
|
15
15
|
|
|
16
|
-
from .util import ItemInLibraryException, _custom_warning, validate_names
|
|
17
|
-
from .version import PASTAS_LEQ_022
|
|
16
|
+
from pastastore.util import ItemInLibraryException, _custom_warning, validate_names
|
|
17
|
+
from pastastore.version import PASTAS_LEQ_022
|
|
18
18
|
|
|
19
19
|
FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
|
|
20
20
|
warnings.showwarning = _custom_warning
|
|
@@ -934,13 +934,24 @@ class BaseConnector(ABC):
|
|
|
934
934
|
)
|
|
935
935
|
if ui.lower() != "y":
|
|
936
936
|
return
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
940
|
-
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
937
|
+
|
|
938
|
+
if libname == "models":
|
|
939
|
+
# also delete linked modelnames linked to oseries
|
|
940
|
+
libs = ["models", "oseries_models"]
|
|
941
|
+
else:
|
|
942
|
+
libs = [libname]
|
|
943
|
+
|
|
944
|
+
# delete items and clear caches
|
|
945
|
+
for libname in libs:
|
|
946
|
+
names = self._parse_names(None, libname)
|
|
947
|
+
for name in (
|
|
948
|
+
tqdm(names, desc=f"Deleting items from {libname}")
|
|
949
|
+
if progressbar
|
|
950
|
+
else names
|
|
951
|
+
):
|
|
952
|
+
self._del_item(libname, name)
|
|
953
|
+
self._clear_cache(libname)
|
|
954
|
+
print(f"Emptied library {libname} in {self.name}: " f"{self.__class__}")
|
|
944
955
|
|
|
945
956
|
def _iter_series(self, libname: str, names: Optional[List[str]] = None):
|
|
946
957
|
"""Internal method iterate over time series in library.
|
pastastore/connectors.py
CHANGED
|
@@ -8,8 +8,8 @@ from typing import Dict, Optional, Union
|
|
|
8
8
|
import pandas as pd
|
|
9
9
|
from pastas.io.pas import PastasEncoder, pastas_hook
|
|
10
10
|
|
|
11
|
-
from .base import BaseConnector, ConnectorUtil, ModelAccessor
|
|
12
|
-
from .util import _custom_warning
|
|
11
|
+
from pastastore.base import BaseConnector, ConnectorUtil, ModelAccessor
|
|
12
|
+
from pastastore.util import _custom_warning
|
|
13
13
|
|
|
14
14
|
FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
|
|
15
15
|
warnings.showwarning = _custom_warning
|
pastastore/plotting.py
CHANGED
|
@@ -15,8 +15,6 @@ follows::
|
|
|
15
15
|
pstore.maps.add_background_map(ax) # for adding a background map
|
|
16
16
|
"""
|
|
17
17
|
|
|
18
|
-
from collections.abc import Iterable
|
|
19
|
-
|
|
20
18
|
import matplotlib.pyplot as plt
|
|
21
19
|
import numpy as np
|
|
22
20
|
import pandas as pd
|
|
@@ -52,6 +50,9 @@ class Plots:
|
|
|
52
50
|
split=False,
|
|
53
51
|
figsize=(10, 5),
|
|
54
52
|
progressbar=True,
|
|
53
|
+
show_legend=True,
|
|
54
|
+
labelfunc=None,
|
|
55
|
+
legend_kwargs=None,
|
|
55
56
|
**kwargs,
|
|
56
57
|
):
|
|
57
58
|
"""Internal method to plot time series from pastastore.
|
|
@@ -74,6 +75,13 @@ class Plots:
|
|
|
74
75
|
progressbar : bool, optional
|
|
75
76
|
show progressbar when loading time series from store,
|
|
76
77
|
by default True
|
|
78
|
+
show_legend : bool, optional
|
|
79
|
+
show legend, default is True.
|
|
80
|
+
labelfunc : callable, optional
|
|
81
|
+
function to create custom labels, function should take name of time series
|
|
82
|
+
as input
|
|
83
|
+
legend_kwargs : dict, optional
|
|
84
|
+
additional arguments to pass to legend
|
|
77
85
|
|
|
78
86
|
Returns
|
|
79
87
|
-------
|
|
@@ -96,15 +104,11 @@ class Plots:
|
|
|
96
104
|
|
|
97
105
|
if ax is None:
|
|
98
106
|
if split:
|
|
99
|
-
|
|
107
|
+
_, axes = plt.subplots(len(names), 1, sharex=True, figsize=figsize)
|
|
100
108
|
else:
|
|
101
|
-
|
|
109
|
+
_, axes = plt.subplots(1, 1, figsize=figsize)
|
|
102
110
|
else:
|
|
103
111
|
axes = ax
|
|
104
|
-
if isinstance(axes, Iterable):
|
|
105
|
-
fig = axes[0].figure
|
|
106
|
-
else:
|
|
107
|
-
fig = axes.figure
|
|
108
112
|
|
|
109
113
|
tsdict = self.pstore.conn._get_series(
|
|
110
114
|
libname, names, progressbar=progressbar, squeeze=False
|
|
@@ -116,16 +120,33 @@ class Plots:
|
|
|
116
120
|
iax = axes
|
|
117
121
|
else:
|
|
118
122
|
iax = ax
|
|
123
|
+
if labelfunc is not None:
|
|
124
|
+
n = labelfunc(n)
|
|
119
125
|
iax.plot(ts.index, ts.squeeze(), label=n, **kwargs)
|
|
120
|
-
|
|
126
|
+
|
|
127
|
+
if split and show_legend:
|
|
121
128
|
iax.legend(loc="best", fontsize="x-small")
|
|
122
129
|
|
|
123
|
-
if not split:
|
|
124
|
-
|
|
130
|
+
if not split and show_legend:
|
|
131
|
+
if legend_kwargs is None:
|
|
132
|
+
legend_kwargs = {}
|
|
133
|
+
ncol = legend_kwargs.pop("ncol", 7)
|
|
134
|
+
fontsize = legend_kwargs.pop("fontsize", "x-small")
|
|
135
|
+
axes.legend(loc=(0, 1), frameon=False, ncol=ncol, fontsize=fontsize)
|
|
125
136
|
|
|
126
137
|
return axes
|
|
127
138
|
|
|
128
|
-
def oseries(
|
|
139
|
+
def oseries(
|
|
140
|
+
self,
|
|
141
|
+
names=None,
|
|
142
|
+
ax=None,
|
|
143
|
+
split=False,
|
|
144
|
+
figsize=(10, 5),
|
|
145
|
+
show_legend=True,
|
|
146
|
+
labelfunc=None,
|
|
147
|
+
legend_kwargs=None,
|
|
148
|
+
**kwargs,
|
|
149
|
+
):
|
|
129
150
|
"""Plot oseries.
|
|
130
151
|
|
|
131
152
|
Parameters
|
|
@@ -141,6 +162,13 @@ class Plots:
|
|
|
141
162
|
A maximum of 20 time series is supported when split=True.
|
|
142
163
|
figsize : tuple, optional
|
|
143
164
|
figure size, by default (10, 5)
|
|
165
|
+
show_legend : bool, optional
|
|
166
|
+
show legend, default is True.
|
|
167
|
+
labelfunc : callable, optional
|
|
168
|
+
function to create custom labels, function should take name of time series
|
|
169
|
+
as input
|
|
170
|
+
legend_kwargs : dict, optional
|
|
171
|
+
additional arguments to pass to legend
|
|
144
172
|
|
|
145
173
|
Returns
|
|
146
174
|
-------
|
|
@@ -153,6 +181,9 @@ class Plots:
|
|
|
153
181
|
ax=ax,
|
|
154
182
|
split=split,
|
|
155
183
|
figsize=figsize,
|
|
184
|
+
show_legend=show_legend,
|
|
185
|
+
labelfunc=labelfunc,
|
|
186
|
+
legend_kwargs=legend_kwargs,
|
|
156
187
|
**kwargs,
|
|
157
188
|
)
|
|
158
189
|
|
|
@@ -163,6 +194,9 @@ class Plots:
|
|
|
163
194
|
ax=None,
|
|
164
195
|
split=False,
|
|
165
196
|
figsize=(10, 5),
|
|
197
|
+
show_legend=True,
|
|
198
|
+
labelfunc=None,
|
|
199
|
+
legend_kwargs=None,
|
|
166
200
|
**kwargs,
|
|
167
201
|
):
|
|
168
202
|
"""Plot stresses.
|
|
@@ -183,6 +217,13 @@ class Plots:
|
|
|
183
217
|
A maximum of 20 time series is supported when split=True.
|
|
184
218
|
figsize : tuple, optional
|
|
185
219
|
figure size, by default (10, 5)
|
|
220
|
+
show_legend : bool, optional
|
|
221
|
+
show legend, default is True.
|
|
222
|
+
labelfunc : callable, optional
|
|
223
|
+
function to create custom labels, function should take name of time series
|
|
224
|
+
as input
|
|
225
|
+
legend_kwargs : dict, optional
|
|
226
|
+
additional arguments to pass to legend
|
|
186
227
|
|
|
187
228
|
Returns
|
|
188
229
|
-------
|
|
@@ -203,6 +244,9 @@ class Plots:
|
|
|
203
244
|
ax=ax,
|
|
204
245
|
split=split,
|
|
205
246
|
figsize=figsize,
|
|
247
|
+
show_legend=show_legend,
|
|
248
|
+
labelfunc=labelfunc,
|
|
249
|
+
legend_kwargs=legend_kwargs,
|
|
206
250
|
**kwargs,
|
|
207
251
|
)
|
|
208
252
|
|
|
@@ -397,20 +441,31 @@ class Plots:
|
|
|
397
441
|
linewidth=0,
|
|
398
442
|
rasterized=True,
|
|
399
443
|
)
|
|
444
|
+
|
|
400
445
|
# make a colorbar in an ax on the
|
|
401
446
|
# right side, then set the current axes to ax again
|
|
402
447
|
cb = fig.colorbar(pc, ax=ax, cax=cax, extend="both")
|
|
403
448
|
cb.set_ticks(bounds)
|
|
404
449
|
cb.ax.set_yticklabels(labels)
|
|
405
450
|
cb.ax.minorticks_off()
|
|
451
|
+
|
|
406
452
|
if set_yticks:
|
|
407
|
-
ax.set_yticks(np.arange(0.5, len(series) + 0.5))
|
|
453
|
+
ax.set_yticks(np.arange(0.5, len(series) + 0.5), minor=False)
|
|
454
|
+
ax.set_yticks(np.arange(0, len(series) + 1), minor=True)
|
|
408
455
|
if names is None:
|
|
409
456
|
names = [s.name for s in series]
|
|
410
457
|
ax.set_yticklabels(names)
|
|
458
|
+
|
|
459
|
+
for tick in ax.yaxis.get_major_ticks(): # don't show major ytick marker
|
|
460
|
+
tick.tick1line.set_visible(False)
|
|
461
|
+
|
|
462
|
+
ax.grid(True, which="minor", axis="y")
|
|
463
|
+
ax.grid(True, which="major", axis="x")
|
|
464
|
+
|
|
411
465
|
else:
|
|
412
466
|
ax.set_ylabel("Timeseries (-)")
|
|
413
|
-
|
|
467
|
+
ax.grid(True, which="both")
|
|
468
|
+
ax.grid(True, which="both")
|
|
414
469
|
|
|
415
470
|
return ax
|
|
416
471
|
|
|
@@ -507,6 +562,22 @@ class Plots:
|
|
|
507
562
|
|
|
508
563
|
return ax
|
|
509
564
|
|
|
565
|
+
def compare_models(self, modelnames, ax=None, **kwargs):
|
|
566
|
+
models = self.pstore.get_models(modelnames)
|
|
567
|
+
names = []
|
|
568
|
+
onames = [iml.oseries.name for iml in models]
|
|
569
|
+
if len(np.unique(onames)) == 1:
|
|
570
|
+
for modelname in modelnames:
|
|
571
|
+
if onames[0] in modelname:
|
|
572
|
+
names.append(modelname.replace(onames[0], ""))
|
|
573
|
+
else:
|
|
574
|
+
names.append(modelname)
|
|
575
|
+
else:
|
|
576
|
+
names = modelnames
|
|
577
|
+
cm = ps.CompareModels(models, names=names)
|
|
578
|
+
cm.plot(**kwargs)
|
|
579
|
+
return cm
|
|
580
|
+
|
|
510
581
|
|
|
511
582
|
class Maps:
|
|
512
583
|
"""Map Class for PastaStore.
|
|
@@ -535,10 +606,12 @@ class Maps:
|
|
|
535
606
|
self,
|
|
536
607
|
names=None,
|
|
537
608
|
kind=None,
|
|
609
|
+
extent=None,
|
|
538
610
|
labels=True,
|
|
539
611
|
adjust=False,
|
|
540
612
|
figsize=(10, 8),
|
|
541
613
|
backgroundmap=False,
|
|
614
|
+
label_kwargs=None,
|
|
542
615
|
**kwargs,
|
|
543
616
|
):
|
|
544
617
|
"""Plot stresses locations on map.
|
|
@@ -550,6 +623,8 @@ class Maps:
|
|
|
550
623
|
kind: str, optional
|
|
551
624
|
if passed, only plot stresses of a specific kind, default is None
|
|
552
625
|
which plots all stresses.
|
|
626
|
+
extent : list of float, optional
|
|
627
|
+
plot only stresses within extent [xmin, xmax, ymin, ymax]
|
|
553
628
|
labels: bool, optional
|
|
554
629
|
label models, by default True
|
|
555
630
|
adjust: bool, optional
|
|
@@ -561,6 +636,8 @@ class Maps:
|
|
|
561
636
|
backgroundmap: bool, optional
|
|
562
637
|
if True, add background map (default CRS is EPSG:28992) with default tiles
|
|
563
638
|
by OpenStreetMap.Mapnik. Default option is False.
|
|
639
|
+
label_kwargs: dict, optional
|
|
640
|
+
dictionary with keyword arguments to pass to add_labels method
|
|
564
641
|
|
|
565
642
|
Returns
|
|
566
643
|
-------
|
|
@@ -571,10 +648,10 @@ class Maps:
|
|
|
571
648
|
--------
|
|
572
649
|
self.add_background_map
|
|
573
650
|
"""
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
651
|
+
names = self.pstore.conn._parse_names(names, "stresses")
|
|
652
|
+
if extent is not None:
|
|
653
|
+
names = self.pstore.within(extent, names=names, libname="stresses")
|
|
654
|
+
df = self.pstore.stresses.loc[names]
|
|
578
655
|
|
|
579
656
|
if kind is not None:
|
|
580
657
|
if isinstance(kind, str):
|
|
@@ -599,7 +676,9 @@ class Maps:
|
|
|
599
676
|
else:
|
|
600
677
|
ax = r
|
|
601
678
|
if labels:
|
|
602
|
-
|
|
679
|
+
if label_kwargs is None:
|
|
680
|
+
label_kwargs = {}
|
|
681
|
+
self.add_labels(stresses, ax, adjust=adjust, **label_kwargs)
|
|
603
682
|
|
|
604
683
|
if backgroundmap:
|
|
605
684
|
self.add_background_map(ax)
|
|
@@ -609,10 +688,12 @@ class Maps:
|
|
|
609
688
|
def oseries(
|
|
610
689
|
self,
|
|
611
690
|
names=None,
|
|
691
|
+
extent=None,
|
|
612
692
|
labels=True,
|
|
613
693
|
adjust=False,
|
|
614
694
|
figsize=(10, 8),
|
|
615
695
|
backgroundmap=False,
|
|
696
|
+
label_kwargs=None,
|
|
616
697
|
**kwargs,
|
|
617
698
|
):
|
|
618
699
|
"""Plot oseries locations on map.
|
|
@@ -621,8 +702,11 @@ class Maps:
|
|
|
621
702
|
----------
|
|
622
703
|
names: list, optional
|
|
623
704
|
oseries names, by default None which plots all oseries locations
|
|
624
|
-
|
|
625
|
-
|
|
705
|
+
extent : list of float, optional
|
|
706
|
+
plot only oseries within extent [xmin, xmax, ymin, ymax]
|
|
707
|
+
labels: bool or str, optional
|
|
708
|
+
label models, by default True, if passed as "grouped", only the first
|
|
709
|
+
label for each x,y-location is shown.
|
|
626
710
|
adjust: bool, optional
|
|
627
711
|
automated smart label placement using adjustText, by default False
|
|
628
712
|
figsize: tuple, optional
|
|
@@ -630,6 +714,8 @@ class Maps:
|
|
|
630
714
|
backgroundmap: bool, optional
|
|
631
715
|
if True, add background map (default CRS is EPSG:28992) with default tiles
|
|
632
716
|
by OpenStreetMap.Mapnik. Default option is False.
|
|
717
|
+
label_kwargs: dict, optional
|
|
718
|
+
dictionary with keyword arguments to pass to add_labels method
|
|
633
719
|
|
|
634
720
|
Returns
|
|
635
721
|
-------
|
|
@@ -642,6 +728,8 @@ class Maps:
|
|
|
642
728
|
"""
|
|
643
729
|
|
|
644
730
|
names = self.pstore.conn._parse_names(names, "oseries")
|
|
731
|
+
if extent is not None:
|
|
732
|
+
names = self.pstore.within(extent, names=names)
|
|
645
733
|
oseries = self.pstore.oseries.loc[names]
|
|
646
734
|
mask0 = (oseries["x"] != 0.0) | (oseries["y"] != 0.0)
|
|
647
735
|
r = self._plotmap_dataframe(oseries.loc[mask0], figsize=figsize, **kwargs)
|
|
@@ -650,7 +738,12 @@ class Maps:
|
|
|
650
738
|
else:
|
|
651
739
|
ax = r
|
|
652
740
|
if labels:
|
|
653
|
-
|
|
741
|
+
if label_kwargs is None:
|
|
742
|
+
label_kwargs = {}
|
|
743
|
+
if labels == "grouped":
|
|
744
|
+
gr = oseries.sort_index().reset_index().groupby(["x", "y"])
|
|
745
|
+
oseries = oseries.loc[gr["index"].first().tolist()]
|
|
746
|
+
self.add_labels(oseries, ax, adjust=adjust, **label_kwargs)
|
|
654
747
|
|
|
655
748
|
if backgroundmap:
|
|
656
749
|
self.add_background_map(ax)
|
|
@@ -712,6 +805,7 @@ class Maps:
|
|
|
712
805
|
def modelstat(
|
|
713
806
|
self,
|
|
714
807
|
statistic,
|
|
808
|
+
modelnames=None,
|
|
715
809
|
label=True,
|
|
716
810
|
adjust=False,
|
|
717
811
|
cmap="viridis",
|
|
@@ -728,6 +822,8 @@ class Maps:
|
|
|
728
822
|
----------
|
|
729
823
|
statistic: str
|
|
730
824
|
name of the statistic, e.g. "evp" or "aic"
|
|
825
|
+
modelnames : list of str, optional
|
|
826
|
+
list of modelnames to include
|
|
731
827
|
label: bool, optional
|
|
732
828
|
label points, by default True
|
|
733
829
|
adjust: bool, optional
|
|
@@ -757,7 +853,9 @@ class Maps:
|
|
|
757
853
|
--------
|
|
758
854
|
self.add_background_map
|
|
759
855
|
"""
|
|
760
|
-
statsdf = self.pstore.get_statistics(
|
|
856
|
+
statsdf = self.pstore.get_statistics(
|
|
857
|
+
[statistic], modelnames=modelnames, progressbar=False
|
|
858
|
+
).to_frame()
|
|
761
859
|
|
|
762
860
|
statsdf["oseries"] = [
|
|
763
861
|
self.pstore.get_models(m, return_dict=True)["oseries"]["name"]
|
|
@@ -1249,7 +1347,7 @@ class Maps:
|
|
|
1249
1347
|
ctx.add_basemap(ax, source=providers[map_provider], crs=proj.srs, **kwargs)
|
|
1250
1348
|
|
|
1251
1349
|
@staticmethod
|
|
1252
|
-
def add_labels(df, ax, adjust=False, **kwargs):
|
|
1350
|
+
def add_labels(df, ax, adjust=False, objects=None, **kwargs):
|
|
1253
1351
|
"""Add labels to points on plot.
|
|
1254
1352
|
|
|
1255
1353
|
Uses dataframe index to label points.
|
|
@@ -1262,11 +1360,12 @@ class Maps:
|
|
|
1262
1360
|
axes object to label points on
|
|
1263
1361
|
adjust: bool
|
|
1264
1362
|
automated smart label placement using adjustText
|
|
1363
|
+
objects : list of matplotlib objects
|
|
1364
|
+
use to avoid labels overlapping markers
|
|
1265
1365
|
**kwargs:
|
|
1266
|
-
keyword arguments to ax.annotate
|
|
1366
|
+
keyword arguments to ax.annotate or adjusttext
|
|
1267
1367
|
"""
|
|
1268
1368
|
stroke = [patheffects.withStroke(linewidth=3, foreground="w")]
|
|
1269
|
-
|
|
1270
1369
|
fontsize = kwargs.pop("fontsize", 10)
|
|
1271
1370
|
|
|
1272
1371
|
if adjust:
|
|
@@ -1286,7 +1385,9 @@ class Maps:
|
|
|
1286
1385
|
|
|
1287
1386
|
adjust_text(
|
|
1288
1387
|
texts,
|
|
1289
|
-
|
|
1388
|
+
objects=objects,
|
|
1389
|
+
force_text=(0.05, 0.10),
|
|
1390
|
+
**kwargs,
|
|
1290
1391
|
**{
|
|
1291
1392
|
"arrowprops": {
|
|
1292
1393
|
"arrowstyle": "-",
|
|
@@ -1309,4 +1410,5 @@ class Maps:
|
|
|
1309
1410
|
textcoords=textcoords,
|
|
1310
1411
|
xytext=xytext,
|
|
1311
1412
|
**{"path_effects": stroke},
|
|
1413
|
+
**kwargs,
|
|
1312
1414
|
)
|
pastastore/store.py
CHANGED
|
@@ -8,11 +8,13 @@ import pandas as pd
|
|
|
8
8
|
import pastas as ps
|
|
9
9
|
from packaging.version import parse as parse_version
|
|
10
10
|
from pastas.io.pas import pastas_hook
|
|
11
|
-
from tqdm import tqdm
|
|
11
|
+
from tqdm.auto import tqdm
|
|
12
12
|
|
|
13
|
-
from .
|
|
14
|
-
from .
|
|
15
|
-
from .
|
|
13
|
+
from pastastore.base import BaseConnector
|
|
14
|
+
from pastastore.connectors import DictConnector
|
|
15
|
+
from pastastore.plotting import Maps, Plots
|
|
16
|
+
from pastastore.util import _custom_warning
|
|
17
|
+
from pastastore.yaml_interface import PastastoreYAML
|
|
16
18
|
|
|
17
19
|
FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
|
|
18
20
|
warnings.showwarning = _custom_warning
|
|
@@ -38,14 +40,19 @@ class PastaStore:
|
|
|
38
40
|
name of the PastaStore, by default takes the name of the Connector object
|
|
39
41
|
"""
|
|
40
42
|
|
|
41
|
-
def __init__(
|
|
43
|
+
def __init__(
|
|
44
|
+
self,
|
|
45
|
+
connector: Optional[BaseConnector] = None,
|
|
46
|
+
name: Optional[str] = None,
|
|
47
|
+
):
|
|
42
48
|
"""Initialize PastaStore for managing pastas time series and models.
|
|
43
49
|
|
|
44
50
|
Parameters
|
|
45
51
|
----------
|
|
46
|
-
connector : Connector object
|
|
47
|
-
object that provides the
|
|
48
|
-
|
|
52
|
+
connector : Connector object, optional
|
|
53
|
+
object that provides the connection to the database. Default is None, which
|
|
54
|
+
will create a DictConnector. This default Connector does not store data on
|
|
55
|
+
disk.
|
|
49
56
|
name : str, optional
|
|
50
57
|
name of the PastaStore, if not provided uses the Connector name
|
|
51
58
|
"""
|
|
@@ -53,6 +60,8 @@ class PastaStore:
|
|
|
53
60
|
raise DeprecationWarning(
|
|
54
61
|
"PastaStore expects the connector as the first argument since v1.1!"
|
|
55
62
|
)
|
|
63
|
+
if connector is None:
|
|
64
|
+
connector = DictConnector("pastas_db")
|
|
56
65
|
self.conn = connector
|
|
57
66
|
self.name = name if name is not None else self.conn.name
|
|
58
67
|
self._register_connector_methods()
|
|
@@ -300,6 +309,81 @@ class PastaStore:
|
|
|
300
309
|
data = pd.concat([data, series], axis=0)
|
|
301
310
|
return data
|
|
302
311
|
|
|
312
|
+
def get_signatures(
|
|
313
|
+
self,
|
|
314
|
+
signatures=None,
|
|
315
|
+
names=None,
|
|
316
|
+
libname="oseries",
|
|
317
|
+
progressbar=False,
|
|
318
|
+
ignore_errors=False,
|
|
319
|
+
):
|
|
320
|
+
"""Get groundwater signatures. NaN-values are returned when the
|
|
321
|
+
signature could not be computed.
|
|
322
|
+
|
|
323
|
+
Parameters
|
|
324
|
+
----------
|
|
325
|
+
signatures : list of str, optional
|
|
326
|
+
list of groundwater signatures to compute, if None all groundwater
|
|
327
|
+
signatures in ps.stats.signatures.__all__ are used, by default None
|
|
328
|
+
names : str, list of str, or None, optional
|
|
329
|
+
names of the time series, by default None which
|
|
330
|
+
uses all the time series in the library
|
|
331
|
+
libname : str
|
|
332
|
+
name of the library containing the time series
|
|
333
|
+
('oseries' or 'stresses'), by default "oseries"
|
|
334
|
+
progressbar : bool, optional
|
|
335
|
+
show progressbar, by default False
|
|
336
|
+
ignore_errors : bool, optional
|
|
337
|
+
ignore errors when True, i.e. when non-existent timeseries is
|
|
338
|
+
encountered in names, by default False
|
|
339
|
+
|
|
340
|
+
Returns
|
|
341
|
+
-------
|
|
342
|
+
signatures_df : pandas.DataFrame
|
|
343
|
+
DataFrame containing the signatures (columns) per time series (rows)
|
|
344
|
+
"""
|
|
345
|
+
names = self.conn._parse_names(names, libname=libname)
|
|
346
|
+
|
|
347
|
+
if signatures is None:
|
|
348
|
+
signatures = ps.stats.signatures.__all__.copy()
|
|
349
|
+
|
|
350
|
+
# create dataframe for results
|
|
351
|
+
signatures_df = pd.DataFrame(index=names, columns=signatures, data=np.nan)
|
|
352
|
+
|
|
353
|
+
# loop through oseries names
|
|
354
|
+
desc = "Get groundwater signatures"
|
|
355
|
+
for name in tqdm(names, desc=desc) if progressbar else names:
|
|
356
|
+
try:
|
|
357
|
+
if libname == "oseries":
|
|
358
|
+
s = self.conn.get_oseries(name)
|
|
359
|
+
else:
|
|
360
|
+
s = self.conn.get_stresses(name)
|
|
361
|
+
except Exception as e:
|
|
362
|
+
if ignore_errors:
|
|
363
|
+
signatures_df.loc[name, :] = np.nan
|
|
364
|
+
continue
|
|
365
|
+
else:
|
|
366
|
+
raise e
|
|
367
|
+
|
|
368
|
+
try:
|
|
369
|
+
i_signatures = ps.stats.signatures.summary(s.squeeze(), signatures)
|
|
370
|
+
except Exception as e:
|
|
371
|
+
if ignore_errors:
|
|
372
|
+
i_signatures = []
|
|
373
|
+
for signature in signatures:
|
|
374
|
+
try:
|
|
375
|
+
sign_val = getattr(ps.stats.signatures, signature)(
|
|
376
|
+
s.squeeze()
|
|
377
|
+
)
|
|
378
|
+
except Exception as _:
|
|
379
|
+
sign_val = np.nan
|
|
380
|
+
i_signatures.append(sign_val)
|
|
381
|
+
else:
|
|
382
|
+
raise e
|
|
383
|
+
signatures_df.loc[name, signatures] = i_signatures.squeeze()
|
|
384
|
+
|
|
385
|
+
return signatures_df
|
|
386
|
+
|
|
303
387
|
def get_tmin_tmax(self, libname, names=None, progressbar=False):
|
|
304
388
|
"""Get tmin and tmax for time series.
|
|
305
389
|
|
|
@@ -334,6 +418,23 @@ class PastaStore:
|
|
|
334
418
|
tmintmax.loc[n, "tmax"] = s.last_valid_index()
|
|
335
419
|
return tmintmax
|
|
336
420
|
|
|
421
|
+
def get_extent(self, libname, names=None, buffer=0.0):
|
|
422
|
+
names = self.conn._parse_names(names, libname=libname)
|
|
423
|
+
if libname in ["oseries", "stresses"]:
|
|
424
|
+
df = getattr(self, libname)
|
|
425
|
+
elif libname == "models":
|
|
426
|
+
df = self.oseries
|
|
427
|
+
else:
|
|
428
|
+
raise ValueError(f"Cannot get extent for library '{libname}'.")
|
|
429
|
+
|
|
430
|
+
extent = [
|
|
431
|
+
df.loc[names, "x"].min() - buffer,
|
|
432
|
+
df.loc[names, "x"].max() + buffer,
|
|
433
|
+
df.loc[names, "y"].min() - buffer,
|
|
434
|
+
df.loc[names, "y"].max() + buffer,
|
|
435
|
+
]
|
|
436
|
+
return extent
|
|
437
|
+
|
|
337
438
|
def get_parameters(
|
|
338
439
|
self,
|
|
339
440
|
parameters: Optional[List[str]] = None,
|
|
@@ -428,13 +529,13 @@ class PastaStore:
|
|
|
428
529
|
|
|
429
530
|
modelnames = self.conn._parse_names(modelnames, libname="models")
|
|
430
531
|
|
|
431
|
-
# create dataframe for results
|
|
432
|
-
s = pd.DataFrame(index=modelnames, columns=statistics, data=np.nan)
|
|
433
|
-
|
|
434
532
|
# if statistics is str
|
|
435
533
|
if isinstance(statistics, str):
|
|
436
534
|
statistics = [statistics]
|
|
437
535
|
|
|
536
|
+
# create dataframe for results
|
|
537
|
+
s = pd.DataFrame(index=modelnames, columns=statistics, data=np.nan)
|
|
538
|
+
|
|
438
539
|
# loop through model names
|
|
439
540
|
desc = "Get model statistics"
|
|
440
541
|
for mlname in tqdm(modelnames, desc=desc) if progressbar else modelnames:
|
|
@@ -836,7 +937,7 @@ class PastaStore:
|
|
|
836
937
|
def from_zip(
|
|
837
938
|
cls,
|
|
838
939
|
fname: str,
|
|
839
|
-
conn,
|
|
940
|
+
conn: Optional[BaseConnector] = None,
|
|
840
941
|
storename: Optional[str] = None,
|
|
841
942
|
progressbar: bool = True,
|
|
842
943
|
):
|
|
@@ -846,8 +947,9 @@ class PastaStore:
|
|
|
846
947
|
----------
|
|
847
948
|
fname : str
|
|
848
949
|
pathname of zipfile
|
|
849
|
-
conn : Connector object
|
|
850
|
-
connector for storing loaded data
|
|
950
|
+
conn : Connector object, optional
|
|
951
|
+
connector for storing loaded data, default is None which creates a
|
|
952
|
+
DictConnector. This Connector does not store data on disk.
|
|
851
953
|
storename : str, optional
|
|
852
954
|
name of the PastaStore, by default None, which
|
|
853
955
|
defaults to the name of the Connector.
|
|
@@ -861,6 +963,9 @@ class PastaStore:
|
|
|
861
963
|
"""
|
|
862
964
|
from zipfile import ZipFile
|
|
863
965
|
|
|
966
|
+
if conn is None:
|
|
967
|
+
conn = DictConnector("pastas_db")
|
|
968
|
+
|
|
864
969
|
with ZipFile(fname, "r") as archive:
|
|
865
970
|
namelist = [
|
|
866
971
|
fi for fi in archive.namelist() if not fi.endswith("_meta.json")
|
|
@@ -868,7 +973,7 @@ class PastaStore:
|
|
|
868
973
|
for f in tqdm(namelist, desc="Reading zip") if progressbar else namelist:
|
|
869
974
|
libname, fjson = os.path.split(f)
|
|
870
975
|
if libname in ["stresses", "oseries"]:
|
|
871
|
-
s = pd.read_json(archive.open(f), orient="columns")
|
|
976
|
+
s = pd.read_json(archive.open(f), dtype=float, orient="columns")
|
|
872
977
|
if not isinstance(s.index, pd.DatetimeIndex):
|
|
873
978
|
s.index = pd.to_datetime(s.index, unit="ms")
|
|
874
979
|
s = s.sort_index()
|
|
@@ -886,8 +991,9 @@ class PastaStore:
|
|
|
886
991
|
libname: str,
|
|
887
992
|
s: Optional[Union[list, str]] = None,
|
|
888
993
|
case_sensitive: bool = True,
|
|
994
|
+
sort=True,
|
|
889
995
|
):
|
|
890
|
-
"""Search for names of time series or models starting with s
|
|
996
|
+
"""Search for names of time series or models starting with `s`.
|
|
891
997
|
|
|
892
998
|
Parameters
|
|
893
999
|
----------
|
|
@@ -897,6 +1003,8 @@ class PastaStore:
|
|
|
897
1003
|
find names with part of this string or strings in list
|
|
898
1004
|
case_sensitive : bool, optional
|
|
899
1005
|
whether search should be case sensitive, by default True
|
|
1006
|
+
sort : bool, optional
|
|
1007
|
+
sort list of names
|
|
900
1008
|
|
|
901
1009
|
Returns
|
|
902
1010
|
-------
|
|
@@ -926,7 +1034,8 @@ class PastaStore:
|
|
|
926
1034
|
else:
|
|
927
1035
|
m = np.append(m, [n for n in lib_names if sub.lower() in n.lower()])
|
|
928
1036
|
matches = list(np.unique(m))
|
|
929
|
-
|
|
1037
|
+
if sort:
|
|
1038
|
+
matches.sort()
|
|
930
1039
|
return matches
|
|
931
1040
|
|
|
932
1041
|
def get_model_timeseries_names(
|
|
@@ -983,9 +1092,75 @@ class PastaStore:
|
|
|
983
1092
|
structure.loc[mlnam, pnam] = 1
|
|
984
1093
|
structure.loc[mlnam, enam] = 1
|
|
985
1094
|
elif "stress" in sm:
|
|
986
|
-
|
|
1095
|
+
smstress = sm["stress"]
|
|
1096
|
+
if isinstance(smstress, dict):
|
|
1097
|
+
smstress = [smstress]
|
|
1098
|
+
for s in smstress:
|
|
987
1099
|
structure.loc[mlnam, s["name"]] = 1
|
|
988
1100
|
if dropna:
|
|
989
1101
|
return structure.dropna(how="all", axis=1)
|
|
990
1102
|
else:
|
|
991
1103
|
return structure
|
|
1104
|
+
|
|
1105
|
+
def apply(self, libname, func, names=None, progressbar=True):
|
|
1106
|
+
"""Apply function to items in library.
|
|
1107
|
+
|
|
1108
|
+
Supported libraries are oseries, stresses, and models.
|
|
1109
|
+
|
|
1110
|
+
Parameters
|
|
1111
|
+
----------
|
|
1112
|
+
libname : str
|
|
1113
|
+
library name, supports "oseries", "stresses" and "models"
|
|
1114
|
+
func : callable
|
|
1115
|
+
function that accepts items from one of the supported libraries as input
|
|
1116
|
+
names : str, list of str, optional
|
|
1117
|
+
apply function to these names, by default None which loops over all stored
|
|
1118
|
+
items in library
|
|
1119
|
+
progressbar : bool, optional
|
|
1120
|
+
show progressbar, by default True
|
|
1121
|
+
|
|
1122
|
+
Returns
|
|
1123
|
+
-------
|
|
1124
|
+
dict
|
|
1125
|
+
dict of results of func, with names as keys and results as values
|
|
1126
|
+
"""
|
|
1127
|
+
names = self.conn._parse_names(names, libname)
|
|
1128
|
+
result = {}
|
|
1129
|
+
if libname not in ("oseries", "stresses", "models"):
|
|
1130
|
+
raise ValueError(
|
|
1131
|
+
"'libname' must be one of ['oseries', 'stresses', 'models']!"
|
|
1132
|
+
)
|
|
1133
|
+
getter = getattr(self.conn, f"get_{libname}")
|
|
1134
|
+
for n in (
|
|
1135
|
+
tqdm(names, desc=f"Applying {func.__name__}") if progressbar else names
|
|
1136
|
+
):
|
|
1137
|
+
result[n] = func(getter(n))
|
|
1138
|
+
return result
|
|
1139
|
+
|
|
1140
|
+
def within(self, extent, names=None, libname="oseries"):
|
|
1141
|
+
xmin, xmax, ymin, ymax = extent
|
|
1142
|
+
names = self.conn._parse_names(names, libname)
|
|
1143
|
+
if libname == "oseries":
|
|
1144
|
+
df = self.oseries.loc[names]
|
|
1145
|
+
elif libname == "stresses":
|
|
1146
|
+
df = self.stresses.loc[names]
|
|
1147
|
+
elif libname == "models":
|
|
1148
|
+
onames = np.unique(
|
|
1149
|
+
[
|
|
1150
|
+
self.get_models(modelname, return_dict=True)["oseries"]["name"]
|
|
1151
|
+
for modelname in names
|
|
1152
|
+
]
|
|
1153
|
+
)
|
|
1154
|
+
df = self.oseries.loc[onames]
|
|
1155
|
+
else:
|
|
1156
|
+
raise ValueError(
|
|
1157
|
+
"libname must be one of ['oseries', 'stresses', 'models']"
|
|
1158
|
+
f", got '{libname}'"
|
|
1159
|
+
)
|
|
1160
|
+
mask = (
|
|
1161
|
+
(df["x"] <= xmax)
|
|
1162
|
+
& (df["x"] >= xmin)
|
|
1163
|
+
& (df["y"] >= ymin)
|
|
1164
|
+
& (df["y"] <= ymax)
|
|
1165
|
+
)
|
|
1166
|
+
return df.loc[mask].index.tolist()
|
pastastore/styling.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import matplotlib as mpl
|
|
2
|
+
import matplotlib.pyplot as plt
|
|
3
|
+
import numpy as np
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def float_styler(val, norm, cmap=None):
|
|
7
|
+
"""Style float values in DataFrame.
|
|
8
|
+
|
|
9
|
+
Parameters
|
|
10
|
+
----------
|
|
11
|
+
val : float
|
|
12
|
+
value in cell
|
|
13
|
+
norm : matplotlib.colors.Normalize
|
|
14
|
+
normalizer to map values to range(0, 1)
|
|
15
|
+
cmap : colormap, optional
|
|
16
|
+
colormap to use, by default None, which uses RdYlBu
|
|
17
|
+
|
|
18
|
+
Returns
|
|
19
|
+
-------
|
|
20
|
+
str
|
|
21
|
+
css value pairs for styling dataframe
|
|
22
|
+
|
|
23
|
+
Usage
|
|
24
|
+
-----
|
|
25
|
+
Given some dataframe
|
|
26
|
+
|
|
27
|
+
>>> df.map(float_styler, subset=["some column"], norm=norm, cmap=cmap)
|
|
28
|
+
|
|
29
|
+
"""
|
|
30
|
+
if cmap is None:
|
|
31
|
+
cmap = plt.get_cmap("RdYlBu")
|
|
32
|
+
bg = cmap(norm(val))
|
|
33
|
+
color = mpl.colors.rgb2hex(bg)
|
|
34
|
+
c = "White" if np.mean(bg[:3]) < 0.4 else "Black"
|
|
35
|
+
return f"background-color: {color}; color: {c}"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def boolean_styler(b):
|
|
39
|
+
"""Style boolean values in DataFrame.
|
|
40
|
+
|
|
41
|
+
Parameters
|
|
42
|
+
----------
|
|
43
|
+
b : bool
|
|
44
|
+
value in cell
|
|
45
|
+
|
|
46
|
+
Returns
|
|
47
|
+
-------
|
|
48
|
+
str
|
|
49
|
+
css value pairs for styling dataframe
|
|
50
|
+
|
|
51
|
+
Usage
|
|
52
|
+
-----
|
|
53
|
+
Given some dataframe
|
|
54
|
+
|
|
55
|
+
>>> df.map(boolean_styler, subset=["some column"])
|
|
56
|
+
"""
|
|
57
|
+
if b:
|
|
58
|
+
return (
|
|
59
|
+
f"background-color: {mpl.colors.rgb2hex((231/255, 255/255, 239/255))}; "
|
|
60
|
+
"color: darkgreen"
|
|
61
|
+
)
|
|
62
|
+
else:
|
|
63
|
+
return (
|
|
64
|
+
f"background-color: {mpl.colors.rgb2hex((255/255, 238/255, 238/255))}; "
|
|
65
|
+
"color: darkred"
|
|
66
|
+
)
|
pastastore/util.py
CHANGED
|
@@ -6,9 +6,9 @@ import pandas as pd
|
|
|
6
6
|
from numpy.lib._iotools import NameValidator
|
|
7
7
|
from pandas.testing import assert_series_equal
|
|
8
8
|
from pastas.stats.tests import runs_test, stoffer_toloi
|
|
9
|
-
from tqdm import tqdm
|
|
9
|
+
from tqdm.auto import tqdm
|
|
10
10
|
|
|
11
|
-
from .version import PASTAS_LEQ_022
|
|
11
|
+
from pastastore.version import PASTAS_LEQ_022
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
def _custom_warning(message, category=UserWarning, filename="", lineno=-1, *args):
|
|
@@ -335,9 +335,11 @@ def compare_models(ml1, ml2, stats=None, detailed_comparison=False):
|
|
|
335
335
|
try:
|
|
336
336
|
assert_series_equal(
|
|
337
337
|
oso,
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
338
|
+
(
|
|
339
|
+
ml.oseries.series_original
|
|
340
|
+
if PASTAS_LEQ_022
|
|
341
|
+
else ml.oseries._series_original
|
|
342
|
+
),
|
|
341
343
|
)
|
|
342
344
|
compare_oso = True
|
|
343
345
|
except (ValueError, AssertionError):
|
|
@@ -378,9 +380,9 @@ def compare_models(ml1, ml2, stats=None, detailed_comparison=False):
|
|
|
378
380
|
for ts in stresses:
|
|
379
381
|
df.loc[f"- time series: '{ts.name}'"] = ts.name
|
|
380
382
|
for tsk in ts.settings.keys():
|
|
381
|
-
df.loc[
|
|
382
|
-
|
|
383
|
-
|
|
383
|
+
df.loc[f" - {ts.name} settings: {tsk}", f"model {i}"] = (
|
|
384
|
+
ts.settings[tsk]
|
|
385
|
+
)
|
|
384
386
|
|
|
385
387
|
if i == 0:
|
|
386
388
|
if PASTAS_LEQ_022:
|
|
@@ -403,9 +405,11 @@ def compare_models(ml1, ml2, stats=None, detailed_comparison=False):
|
|
|
403
405
|
try:
|
|
404
406
|
assert_series_equal(
|
|
405
407
|
so1[counter],
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
408
|
+
(
|
|
409
|
+
ts.series_original
|
|
410
|
+
if PASTAS_LEQ_022
|
|
411
|
+
else ts._series_original
|
|
412
|
+
),
|
|
409
413
|
)
|
|
410
414
|
compare_so1 = True
|
|
411
415
|
except (ValueError, AssertionError):
|
|
@@ -703,7 +707,9 @@ def frontiers_checks(
|
|
|
703
707
|
check_tmem_passed,
|
|
704
708
|
)
|
|
705
709
|
else:
|
|
706
|
-
tmem = ml.get_response_tmax(sm_name)
|
|
710
|
+
tmem = ml.get_response_tmax(sm_name, cutoff=check3_cutoff)
|
|
711
|
+
if tmem is None: # no rfunc in stressmodel
|
|
712
|
+
tmem = 0
|
|
707
713
|
check_tmem_passed = tmem < len_oseries_calib / 2
|
|
708
714
|
checks.loc[f"calib_period > 2*t_mem_95%: {sm_name}", :] = (
|
|
709
715
|
tmem,
|
|
@@ -736,23 +742,30 @@ def frontiers_checks(
|
|
|
736
742
|
"(unit head)/(unit well stress)",
|
|
737
743
|
check_gain_passed,
|
|
738
744
|
)
|
|
745
|
+
continue
|
|
746
|
+
elif sm._name == "LinearTrend":
|
|
747
|
+
gain = ml.parameters.loc[f"{sm_name}_a", "optimal"]
|
|
748
|
+
gain_std = ml.parameters.loc[f"{sm_name}_a", "stderr"]
|
|
749
|
+
elif sm._name == "StepModel":
|
|
750
|
+
gain = ml.parameters.loc[f"{sm_name}_d", "optimal"]
|
|
751
|
+
gain_std = ml.parameters.loc[f"{sm_name}_d", "stderr"]
|
|
739
752
|
else:
|
|
740
753
|
gain = ml.parameters.loc[f"{sm_name}_A", "optimal"]
|
|
741
754
|
gain_std = ml.parameters.loc[f"{sm_name}_A", "stderr"]
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
755
|
+
|
|
756
|
+
if gain_std is None:
|
|
757
|
+
gain_std = np.nan
|
|
758
|
+
check_gain_passed = pd.NA
|
|
759
|
+
elif np.isnan(gain_std):
|
|
760
|
+
check_gain_passed = pd.NA
|
|
761
|
+
else:
|
|
749
762
|
check_gain_passed = np.abs(gain) > 2 * gain_std
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
763
|
+
checks.loc[f"gain > 2*std: {sm_name}", :] = (
|
|
764
|
+
gain,
|
|
765
|
+
2 * gain_std,
|
|
766
|
+
"(unit head)/(unit well stress)",
|
|
767
|
+
check_gain_passed,
|
|
768
|
+
)
|
|
756
769
|
|
|
757
770
|
# Check 5 - Parameter Bounds
|
|
758
771
|
if check5_parambounds:
|
pastastore/version.py
CHANGED
pastastore/yaml_interface.py
CHANGED
|
@@ -2,14 +2,14 @@ import datetime
|
|
|
2
2
|
import logging
|
|
3
3
|
import os
|
|
4
4
|
from copy import deepcopy
|
|
5
|
-
from typing import Dict, List, Optional, Union
|
|
5
|
+
from typing import Any, Dict, List, Optional, Union
|
|
6
6
|
|
|
7
7
|
import numpy as np
|
|
8
8
|
import pandas as pd
|
|
9
9
|
import pastas as ps
|
|
10
10
|
import yaml
|
|
11
11
|
|
|
12
|
-
from .version import PASTAS_LEQ_022
|
|
12
|
+
from pastastore.version import PASTAS_LEQ_022
|
|
13
13
|
|
|
14
14
|
ps.logger.setLevel("ERROR")
|
|
15
15
|
|
|
@@ -17,7 +17,7 @@ logging.basicConfig(level="INFO")
|
|
|
17
17
|
logger = logging.getLogger(__name__)
|
|
18
18
|
|
|
19
19
|
|
|
20
|
-
def _convert_dict_dtypes_for_yaml(d: Dict):
|
|
20
|
+
def _convert_dict_dtypes_for_yaml(d: Dict[str, Any]):
|
|
21
21
|
"""Internal method to convert dictionary values for storing in YAML format.
|
|
22
22
|
|
|
23
23
|
Parameters
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pastastore
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.4.0
|
|
4
4
|
Summary: Tools for managing Pastas time series models.
|
|
5
5
|
Author: D.A. Brakenhoff
|
|
6
6
|
Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
|
|
@@ -50,13 +50,14 @@ Classifier: Topic :: Scientific/Engineering :: Hydrology
|
|
|
50
50
|
Requires-Python: >=3.7
|
|
51
51
|
Description-Content-Type: text/markdown
|
|
52
52
|
License-File: LICENSE
|
|
53
|
-
Requires-Dist: pastas
|
|
54
|
-
Requires-Dist: tqdm
|
|
53
|
+
Requires-Dist: pastas >=0.13
|
|
54
|
+
Requires-Dist: tqdm >=4.36
|
|
55
55
|
Requires-Dist: pyyaml
|
|
56
56
|
Provides-Extra: arctic
|
|
57
57
|
Requires-Dist: arctic ; extra == 'arctic'
|
|
58
58
|
Provides-Extra: arcticdb
|
|
59
59
|
Requires-Dist: arcticdb ; extra == 'arcticdb'
|
|
60
|
+
Requires-Dist: protobuf ~=4.0 ; extra == 'arcticdb'
|
|
60
61
|
Provides-Extra: docs
|
|
61
62
|
Requires-Dist: pastastore[optional] ; extra == 'docs'
|
|
62
63
|
Requires-Dist: sphinx-rtd-theme ; extra == 'docs'
|
|
@@ -75,7 +76,7 @@ Requires-Dist: contextily ; extra == 'optional'
|
|
|
75
76
|
Requires-Dist: pyproj ; extra == 'optional'
|
|
76
77
|
Requires-Dist: adjustText ; extra == 'optional'
|
|
77
78
|
Provides-Extra: pystore
|
|
78
|
-
Requires-Dist: fsspec
|
|
79
|
+
Requires-Dist: fsspec >=0.3.3 ; extra == 'pystore'
|
|
79
80
|
Requires-Dist: python-snappy ; extra == 'pystore'
|
|
80
81
|
Requires-Dist: dask[dataframe] ; extra == 'pystore'
|
|
81
82
|
Provides-Extra: test
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
pastastore/__init__.py,sha256=LmaXFyfECBeaUeAEGeYxX-SU8g7tFUJ2ojyXoj3xz4o,292
|
|
2
|
+
pastastore/base.py,sha256=nPIIiDHJMDUlvPaE2dftjPRxAM2kFtC8kHXvmB9Ayw0,62631
|
|
3
|
+
pastastore/connectors.py,sha256=QH6jvnhIIivn1Is9ThBeMAEcJBVnfcK46pJClGMdiBA,28747
|
|
4
|
+
pastastore/datasets.py,sha256=f-92WOh2ROCxOVvKGHXjnzd66Q7hO_-BtXRfMKwg1KU,6640
|
|
5
|
+
pastastore/plotting.py,sha256=DCtbl81t23Zrk5l7QqxJVwv8wqvinBqlPbaTy5Q81sg,45849
|
|
6
|
+
pastastore/store.py,sha256=9gjB5vq8XhfpProUjox_YKUssHvgQYAMMIf8epnAghA,40682
|
|
7
|
+
pastastore/styling.py,sha256=u2rRf1Gqq5vs2HG7E4S4wEkEczyTuvCQaoYcvrh91lk,1465
|
|
8
|
+
pastastore/util.py,sha256=CuQsLE3Z7egnel55LejVLXmOswwabGJiQtm164K7gxE,30830
|
|
9
|
+
pastastore/version.py,sha256=hBqb0IH800rddVduM2_nP9zwh8BNg2nMxgX4FHGuiQQ,203
|
|
10
|
+
pastastore/yaml_interface.py,sha256=yNpyEHtIapx44Thv2PCm7Sc_W-reXDHjT3uRC2gDAzw,29515
|
|
11
|
+
pastastore-1.4.0.dist-info/LICENSE,sha256=DtHftfUEm99KzgwLr3rQUTg8H3kAS0Z-p5WWJgLf_OY,1082
|
|
12
|
+
pastastore-1.4.0.dist-info/METADATA,sha256=6ZcnAKj_NUmsvdWFpU5qsNVFO7LghNPc_PpeJhX-Ij8,7896
|
|
13
|
+
pastastore-1.4.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
14
|
+
pastastore-1.4.0.dist-info/top_level.txt,sha256=QKfonr1KJZN46MFsj8eGRBw9Mg-jO-HFvgE2orVX7Sk,11
|
|
15
|
+
pastastore-1.4.0.dist-info/RECORD,,
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
pastastore/__init__.py,sha256=1janwX-BqYJobFi0dvVNW-zTnCUba_jxkBcT2pRj4fg,225
|
|
2
|
-
pastastore/base.py,sha256=CHg7mKkdaQGTvQdVnbaPK6WslKZjkYhMMs84nrmMxvs,62290
|
|
3
|
-
pastastore/connectors.py,sha256=riiSPX_N6Tcmn-_jQgDbw32hkq1iGLLHcH0U9wueMG8,28727
|
|
4
|
-
pastastore/datasets.py,sha256=f-92WOh2ROCxOVvKGHXjnzd66Q7hO_-BtXRfMKwg1KU,6640
|
|
5
|
-
pastastore/plotting.py,sha256=IZuri_U0b4hgvCgXCTNJv00bPK_KvD8DOPZvI-ZZNMI,41758
|
|
6
|
-
pastastore/store.py,sha256=gqr1qi9sdUlmSQfkn3CC-ZKmqOro7bUw0-E-VlZRads,34126
|
|
7
|
-
pastastore/util.py,sha256=lYlKrToHYh4rZHCMmVL2fp28GGAVPt7goEOb0HrmSfM,30268
|
|
8
|
-
pastastore/version.py,sha256=kBMPTkZdkocSOnb2tjCjb04Zpe_3zTlbqK7OODZQYfA,203
|
|
9
|
-
pastastore/yaml_interface.py,sha256=9nep4ALV9IomAeVuaafsPzlGDcmi6gL0rNHAjHiPbMc,29490
|
|
10
|
-
pastastore-1.2.2.dist-info/LICENSE,sha256=DtHftfUEm99KzgwLr3rQUTg8H3kAS0Z-p5WWJgLf_OY,1082
|
|
11
|
-
pastastore-1.2.2.dist-info/METADATA,sha256=kKIoYj0Nx8OD4wl9oaN8vofVWc96yJltsFnCkVETO90,7850
|
|
12
|
-
pastastore-1.2.2.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
|
|
13
|
-
pastastore-1.2.2.dist-info/top_level.txt,sha256=QKfonr1KJZN46MFsj8eGRBw9Mg-jO-HFvgE2orVX7Sk,11
|
|
14
|
-
pastastore-1.2.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|