pastastore 1.4.0__py3-none-any.whl → 1.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pastastore/__init__.py +2 -4
- pastastore/base.py +204 -58
- pastastore/connectors.py +51 -30
- pastastore/datasets.py +3 -2
- pastastore/plotting.py +27 -14
- pastastore/store.py +163 -31
- pastastore/styling.py +2 -1
- pastastore/util.py +34 -6
- pastastore/version.py +32 -1
- pastastore/yaml_interface.py +33 -25
- {pastastore-1.4.0.dist-info → pastastore-1.5.0.dist-info}/METADATA +12 -11
- pastastore-1.5.0.dist-info/RECORD +15 -0
- {pastastore-1.4.0.dist-info → pastastore-1.5.0.dist-info}/WHEEL +1 -1
- pastastore-1.4.0.dist-info/RECORD +0 -15
- {pastastore-1.4.0.dist-info → pastastore-1.5.0.dist-info}/LICENSE +0 -0
- {pastastore-1.4.0.dist-info → pastastore-1.5.0.dist-info}/top_level.txt +0 -0
pastastore/store.py
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
|
+
"""Module containing the PastaStore object for managing time series and models."""
|
|
2
|
+
|
|
1
3
|
import json
|
|
2
4
|
import os
|
|
3
5
|
import warnings
|
|
4
|
-
from typing import List, Optional, Tuple, Union
|
|
6
|
+
from typing import List, Literal, Optional, Tuple, Union
|
|
5
7
|
|
|
6
8
|
import numpy as np
|
|
7
9
|
import pandas as pd
|
|
@@ -14,6 +16,7 @@ from pastastore.base import BaseConnector
|
|
|
14
16
|
from pastastore.connectors import DictConnector
|
|
15
17
|
from pastastore.plotting import Maps, Plots
|
|
16
18
|
from pastastore.util import _custom_warning
|
|
19
|
+
from pastastore.version import PASTAS_GEQ_150
|
|
17
20
|
from pastastore.yaml_interface import PastastoreYAML
|
|
18
21
|
|
|
19
22
|
FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
|
|
@@ -72,7 +75,7 @@ class PastaStore:
|
|
|
72
75
|
self.yaml = PastastoreYAML(self)
|
|
73
76
|
|
|
74
77
|
def _register_connector_methods(self):
|
|
75
|
-
"""
|
|
78
|
+
"""Register connector methods (internal method)."""
|
|
76
79
|
methods = [
|
|
77
80
|
func
|
|
78
81
|
for func in dir(self.conn)
|
|
@@ -83,26 +86,70 @@ class PastaStore:
|
|
|
83
86
|
|
|
84
87
|
@property
|
|
85
88
|
def oseries(self):
|
|
89
|
+
"""
|
|
90
|
+
Returns the oseries metadata as dataframe.
|
|
91
|
+
|
|
92
|
+
Returns
|
|
93
|
+
-------
|
|
94
|
+
oseries
|
|
95
|
+
oseries metadata as dataframe
|
|
96
|
+
"""
|
|
86
97
|
return self.conn.oseries
|
|
87
98
|
|
|
88
99
|
@property
|
|
89
100
|
def stresses(self):
|
|
101
|
+
"""
|
|
102
|
+
Returns the stresses metadata as dataframe.
|
|
103
|
+
|
|
104
|
+
Returns
|
|
105
|
+
-------
|
|
106
|
+
stresses
|
|
107
|
+
stresses metadata as dataframe
|
|
108
|
+
"""
|
|
90
109
|
return self.conn.stresses
|
|
91
110
|
|
|
92
111
|
@property
|
|
93
112
|
def models(self):
|
|
113
|
+
"""Return list of model names.
|
|
114
|
+
|
|
115
|
+
Returns
|
|
116
|
+
-------
|
|
117
|
+
list
|
|
118
|
+
list of model names
|
|
119
|
+
"""
|
|
94
120
|
return self.conn.models
|
|
95
121
|
|
|
96
122
|
@property
|
|
97
123
|
def oseries_names(self):
|
|
124
|
+
"""Return list of oseries names.
|
|
125
|
+
|
|
126
|
+
Returns
|
|
127
|
+
-------
|
|
128
|
+
list
|
|
129
|
+
list of oseries names
|
|
130
|
+
"""
|
|
98
131
|
return self.conn.oseries_names
|
|
99
132
|
|
|
100
133
|
@property
|
|
101
134
|
def stresses_names(self):
|
|
135
|
+
"""Return list of streses names.
|
|
136
|
+
|
|
137
|
+
Returns
|
|
138
|
+
-------
|
|
139
|
+
list
|
|
140
|
+
list of stresses names
|
|
141
|
+
"""
|
|
102
142
|
return self.conn.stresses_names
|
|
103
143
|
|
|
104
144
|
@property
|
|
105
145
|
def model_names(self):
|
|
146
|
+
"""Return list of model names.
|
|
147
|
+
|
|
148
|
+
Returns
|
|
149
|
+
-------
|
|
150
|
+
list
|
|
151
|
+
list of model names
|
|
152
|
+
"""
|
|
106
153
|
return self.conn.model_names
|
|
107
154
|
|
|
108
155
|
@property
|
|
@@ -111,22 +158,57 @@ class PastaStore:
|
|
|
111
158
|
|
|
112
159
|
@property
|
|
113
160
|
def n_oseries(self):
|
|
161
|
+
"""Return number of oseries.
|
|
162
|
+
|
|
163
|
+
Returns
|
|
164
|
+
-------
|
|
165
|
+
int
|
|
166
|
+
number of oseries
|
|
167
|
+
"""
|
|
114
168
|
return self.conn.n_oseries
|
|
115
169
|
|
|
116
170
|
@property
|
|
117
171
|
def n_stresses(self):
|
|
172
|
+
"""Return number of stresses.
|
|
173
|
+
|
|
174
|
+
Returns
|
|
175
|
+
-------
|
|
176
|
+
int
|
|
177
|
+
number of stresses
|
|
178
|
+
"""
|
|
118
179
|
return self.conn.n_stresses
|
|
119
180
|
|
|
120
181
|
@property
|
|
121
182
|
def n_models(self):
|
|
183
|
+
"""Return number of models.
|
|
184
|
+
|
|
185
|
+
Returns
|
|
186
|
+
-------
|
|
187
|
+
int
|
|
188
|
+
number of models
|
|
189
|
+
"""
|
|
122
190
|
return self.conn.n_models
|
|
123
191
|
|
|
124
192
|
@property
|
|
125
193
|
def oseries_models(self):
|
|
194
|
+
"""Return dictionary of models per oseries.
|
|
195
|
+
|
|
196
|
+
Returns
|
|
197
|
+
-------
|
|
198
|
+
dict
|
|
199
|
+
dictionary containing list of models (values) for each oseries (keys).
|
|
200
|
+
"""
|
|
126
201
|
return self.conn.oseries_models
|
|
127
202
|
|
|
128
203
|
@property
|
|
129
204
|
def oseries_with_models(self):
|
|
205
|
+
"""Return list of oseries for which models are contained in the database.
|
|
206
|
+
|
|
207
|
+
Returns
|
|
208
|
+
-------
|
|
209
|
+
list
|
|
210
|
+
list of oseries names for which models are contained in the database.
|
|
211
|
+
"""
|
|
130
212
|
return self.conn.oseries_with_models
|
|
131
213
|
|
|
132
214
|
def __repr__(self):
|
|
@@ -136,7 +218,7 @@ class PastaStore:
|
|
|
136
218
|
def get_oseries_distances(
|
|
137
219
|
self, names: Optional[Union[list, str]] = None
|
|
138
220
|
) -> FrameorSeriesUnion:
|
|
139
|
-
"""
|
|
221
|
+
"""Get the distances in meters between the oseries.
|
|
140
222
|
|
|
141
223
|
Parameters
|
|
142
224
|
----------
|
|
@@ -175,7 +257,7 @@ class PastaStore:
|
|
|
175
257
|
n: int = 1,
|
|
176
258
|
maxdist: Optional[float] = None,
|
|
177
259
|
) -> FrameorSeriesUnion:
|
|
178
|
-
"""
|
|
260
|
+
"""Get the nearest (n) oseries.
|
|
179
261
|
|
|
180
262
|
Parameters
|
|
181
263
|
----------
|
|
@@ -191,7 +273,6 @@ class PastaStore:
|
|
|
191
273
|
oseries:
|
|
192
274
|
list with the names of the oseries.
|
|
193
275
|
"""
|
|
194
|
-
|
|
195
276
|
distances = self.get_oseries_distances(names)
|
|
196
277
|
if maxdist is not None:
|
|
197
278
|
distances = distances.where(distances <= maxdist, np.nan)
|
|
@@ -214,8 +295,7 @@ class PastaStore:
|
|
|
214
295
|
stresses: Optional[Union[list, str]] = None,
|
|
215
296
|
kind: Optional[Union[str, List[str]]] = None,
|
|
216
297
|
) -> FrameorSeriesUnion:
|
|
217
|
-
"""
|
|
218
|
-
stresses.
|
|
298
|
+
"""Get the distances in meters between the oseries and stresses.
|
|
219
299
|
|
|
220
300
|
Parameters
|
|
221
301
|
----------
|
|
@@ -274,7 +354,7 @@ class PastaStore:
|
|
|
274
354
|
n: int = 1,
|
|
275
355
|
maxdist: Optional[float] = None,
|
|
276
356
|
) -> FrameorSeriesUnion:
|
|
277
|
-
"""
|
|
357
|
+
"""Get the nearest (n) stresses of a specific kind.
|
|
278
358
|
|
|
279
359
|
Parameters
|
|
280
360
|
----------
|
|
@@ -295,7 +375,6 @@ class PastaStore:
|
|
|
295
375
|
stresses:
|
|
296
376
|
list with the names of the stresses.
|
|
297
377
|
"""
|
|
298
|
-
|
|
299
378
|
distances = self.get_distances(oseries, stresses, kind)
|
|
300
379
|
if maxdist is not None:
|
|
301
380
|
distances = distances.where(distances <= maxdist, np.nan)
|
|
@@ -317,8 +396,9 @@ class PastaStore:
|
|
|
317
396
|
progressbar=False,
|
|
318
397
|
ignore_errors=False,
|
|
319
398
|
):
|
|
320
|
-
"""Get groundwater signatures.
|
|
321
|
-
|
|
399
|
+
"""Get groundwater signatures.
|
|
400
|
+
|
|
401
|
+
NaN-values are returned when the signature cannot be computed.
|
|
322
402
|
|
|
323
403
|
Parameters
|
|
324
404
|
----------
|
|
@@ -384,7 +464,12 @@ class PastaStore:
|
|
|
384
464
|
|
|
385
465
|
return signatures_df
|
|
386
466
|
|
|
387
|
-
def get_tmin_tmax(
|
|
467
|
+
def get_tmin_tmax(
|
|
468
|
+
self,
|
|
469
|
+
libname: Literal["oseries", "stresses", "models"],
|
|
470
|
+
names: Union[str, List[str], None] = None,
|
|
471
|
+
progressbar: bool = False,
|
|
472
|
+
):
|
|
388
473
|
"""Get tmin and tmax for time series.
|
|
389
474
|
|
|
390
475
|
Parameters
|
|
@@ -403,22 +488,48 @@ class PastaStore:
|
|
|
403
488
|
tmintmax : pd.dataframe
|
|
404
489
|
Dataframe containing tmin and tmax per time series
|
|
405
490
|
"""
|
|
406
|
-
|
|
407
491
|
names = self.conn._parse_names(names, libname=libname)
|
|
408
492
|
tmintmax = pd.DataFrame(
|
|
409
493
|
index=names, columns=["tmin", "tmax"], dtype="datetime64[ns]"
|
|
410
494
|
)
|
|
411
495
|
desc = f"Get tmin/tmax {libname}"
|
|
412
496
|
for n in tqdm(names, desc=desc) if progressbar else names:
|
|
413
|
-
if libname == "
|
|
414
|
-
|
|
497
|
+
if libname == "models":
|
|
498
|
+
mld = self.conn.get_models(
|
|
499
|
+
n,
|
|
500
|
+
return_dict=True,
|
|
501
|
+
)
|
|
502
|
+
tmintmax.loc[n, "tmin"] = mld["settings"]["tmin"]
|
|
503
|
+
tmintmax.loc[n, "tmax"] = mld["settings"]["tmax"]
|
|
415
504
|
else:
|
|
416
|
-
s =
|
|
417
|
-
|
|
418
|
-
|
|
505
|
+
s = (
|
|
506
|
+
self.conn.get_oseries(n)
|
|
507
|
+
if libname == "oseries"
|
|
508
|
+
else self.conn.get_stresses(n)
|
|
509
|
+
)
|
|
510
|
+
tmintmax.loc[n, "tmin"] = s.first_valid_index()
|
|
511
|
+
tmintmax.loc[n, "tmax"] = s.last_valid_index()
|
|
512
|
+
|
|
419
513
|
return tmintmax
|
|
420
514
|
|
|
421
515
|
def get_extent(self, libname, names=None, buffer=0.0):
|
|
516
|
+
"""Get extent [xmin, xmax, ymin, ymax] from library.
|
|
517
|
+
|
|
518
|
+
Parameters
|
|
519
|
+
----------
|
|
520
|
+
libname : str
|
|
521
|
+
name of the library containing the time series
|
|
522
|
+
('oseries', 'stresses', 'models')
|
|
523
|
+
names : str, list of str, or None, optional
|
|
524
|
+
list of names to include for computing the extent
|
|
525
|
+
buffer : float, optional
|
|
526
|
+
add this distance to the extent, by default 0.0
|
|
527
|
+
|
|
528
|
+
Returns
|
|
529
|
+
-------
|
|
530
|
+
extent : list
|
|
531
|
+
extent [xmin, xmax, ymin, ymax]
|
|
532
|
+
"""
|
|
422
533
|
names = self.conn._parse_names(names, libname=libname)
|
|
423
534
|
if libname in ["oseries", "stresses"]:
|
|
424
535
|
df = getattr(self, libname)
|
|
@@ -443,8 +554,10 @@ class PastaStore:
|
|
|
443
554
|
progressbar: Optional[bool] = False,
|
|
444
555
|
ignore_errors: Optional[bool] = False,
|
|
445
556
|
) -> FrameorSeriesUnion:
|
|
446
|
-
"""Get model parameters.
|
|
447
|
-
|
|
557
|
+
"""Get model parameters.
|
|
558
|
+
|
|
559
|
+
NaN-values are returned when the parameters are not present in the model or the
|
|
560
|
+
model is not optimized.
|
|
448
561
|
|
|
449
562
|
Parameters
|
|
450
563
|
----------
|
|
@@ -526,7 +639,6 @@ class PastaStore:
|
|
|
526
639
|
-------
|
|
527
640
|
s : pandas.DataFrame
|
|
528
641
|
"""
|
|
529
|
-
|
|
530
642
|
modelnames = self.conn._parse_names(modelnames, libname="models")
|
|
531
643
|
|
|
532
644
|
# if statistics is str
|
|
@@ -558,6 +670,7 @@ class PastaStore:
|
|
|
558
670
|
name: str,
|
|
559
671
|
modelname: str = None,
|
|
560
672
|
add_recharge: bool = True,
|
|
673
|
+
add_ar_noisemodel: bool = False,
|
|
561
674
|
recharge_name: str = "recharge",
|
|
562
675
|
) -> ps.Model:
|
|
563
676
|
"""Create a pastas Model.
|
|
@@ -572,6 +685,8 @@ class PastaStore:
|
|
|
572
685
|
add recharge to the model by looking for the closest
|
|
573
686
|
precipitation and evaporation time series in the stresses
|
|
574
687
|
library, by default True
|
|
688
|
+
add_ar1_noisemodel : bool, optional
|
|
689
|
+
add AR(1) noise model to the model, by default False
|
|
575
690
|
recharge_name : str
|
|
576
691
|
name of the RechargeModel
|
|
577
692
|
|
|
@@ -598,6 +713,8 @@ class PastaStore:
|
|
|
598
713
|
ml = ps.Model(ts, name=modelname, metadata=meta)
|
|
599
714
|
if add_recharge:
|
|
600
715
|
self.add_recharge(ml, recharge_name=recharge_name)
|
|
716
|
+
if add_ar_noisemodel and PASTAS_GEQ_150:
|
|
717
|
+
ml.add_noisemodel(ps.ArNoiseModel())
|
|
601
718
|
return ml
|
|
602
719
|
else:
|
|
603
720
|
raise ValueError("Empty time series!")
|
|
@@ -709,9 +826,9 @@ class PastaStore:
|
|
|
709
826
|
for var in ("prec", "evap"):
|
|
710
827
|
try:
|
|
711
828
|
name = self.get_nearest_stresses(ml.oseries.name, kind=var).iloc[0, 0]
|
|
712
|
-
except AttributeError:
|
|
829
|
+
except AttributeError as e:
|
|
713
830
|
msg = "No precipitation or evaporation time series found!"
|
|
714
|
-
raise Exception(msg)
|
|
831
|
+
raise Exception(msg) from e
|
|
715
832
|
if isinstance(name, float):
|
|
716
833
|
if np.isnan(name):
|
|
717
834
|
raise ValueError(
|
|
@@ -828,8 +945,8 @@ class PastaStore:
|
|
|
828
945
|
"""
|
|
829
946
|
try:
|
|
830
947
|
from art_tools import pastas_get_model_results
|
|
831
|
-
except Exception:
|
|
832
|
-
raise ModuleNotFoundError("You need 'art_tools' to use this method!")
|
|
948
|
+
except Exception as e:
|
|
949
|
+
raise ModuleNotFoundError("You need 'art_tools' to use this method!") from e
|
|
833
950
|
|
|
834
951
|
if mls is None:
|
|
835
952
|
mls = self.conn.models
|
|
@@ -870,7 +987,7 @@ class PastaStore:
|
|
|
870
987
|
"File already exists! " "Use 'overwrite=True' to " "force writing file."
|
|
871
988
|
)
|
|
872
989
|
elif os.path.exists(fname):
|
|
873
|
-
warnings.warn(f"Overwriting file '{os.path.basename(fname)}'")
|
|
990
|
+
warnings.warn(f"Overwriting file '{os.path.basename(fname)}'", stacklevel=1)
|
|
874
991
|
|
|
875
992
|
with ZipFile(fname, "w", compression=ZIP_DEFLATED) as archive:
|
|
876
993
|
# oseries
|
|
@@ -1011,13 +1128,12 @@ class PastaStore:
|
|
|
1011
1128
|
matches : list
|
|
1012
1129
|
list of names that match search result
|
|
1013
1130
|
"""
|
|
1014
|
-
|
|
1015
1131
|
if libname == "models":
|
|
1016
|
-
lib_names =
|
|
1132
|
+
lib_names = self.model_names
|
|
1017
1133
|
elif libname == "stresses":
|
|
1018
|
-
lib_names =
|
|
1134
|
+
lib_names = self.stresses_names
|
|
1019
1135
|
elif libname == "oseries":
|
|
1020
|
-
lib_names =
|
|
1136
|
+
lib_names = self.oseries_names
|
|
1021
1137
|
else:
|
|
1022
1138
|
raise ValueError("Provide valid libname: 'models', 'stresses' or 'oseries'")
|
|
1023
1139
|
|
|
@@ -1064,7 +1180,6 @@ class PastaStore:
|
|
|
1064
1180
|
indicating whether a stress is contained within a time series
|
|
1065
1181
|
model.
|
|
1066
1182
|
"""
|
|
1067
|
-
|
|
1068
1183
|
model_names = self.conn._parse_names(modelnames, libname="models")
|
|
1069
1184
|
structure = pd.DataFrame(
|
|
1070
1185
|
index=model_names, columns=["oseries"] + self.stresses_names
|
|
@@ -1138,6 +1253,23 @@ class PastaStore:
|
|
|
1138
1253
|
return result
|
|
1139
1254
|
|
|
1140
1255
|
def within(self, extent, names=None, libname="oseries"):
|
|
1256
|
+
"""Get names of items within extent.
|
|
1257
|
+
|
|
1258
|
+
Parameters
|
|
1259
|
+
----------
|
|
1260
|
+
extent : list
|
|
1261
|
+
list with [xmin, xmax, ymin, ymax]
|
|
1262
|
+
names : str, list of str, optional
|
|
1263
|
+
list of names to include, by default None
|
|
1264
|
+
libname : str, optional
|
|
1265
|
+
name of library, must be one of ('oseries', 'stresses', 'models'), by
|
|
1266
|
+
default "oseries"
|
|
1267
|
+
|
|
1268
|
+
Returns
|
|
1269
|
+
-------
|
|
1270
|
+
list
|
|
1271
|
+
list of items within extent
|
|
1272
|
+
"""
|
|
1141
1273
|
xmin, xmax, ymin, ymax = extent
|
|
1142
1274
|
names = self.conn._parse_names(names, libname)
|
|
1143
1275
|
if libname == "oseries":
|
pastastore/styling.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
"""Module containing dataframe styling functions."""
|
|
2
|
+
|
|
1
3
|
import matplotlib as mpl
|
|
2
4
|
import matplotlib.pyplot as plt
|
|
3
5
|
import numpy as np
|
|
@@ -25,7 +27,6 @@ def float_styler(val, norm, cmap=None):
|
|
|
25
27
|
Given some dataframe
|
|
26
28
|
|
|
27
29
|
>>> df.map(float_styler, subset=["some column"], norm=norm, cmap=cmap)
|
|
28
|
-
|
|
29
30
|
"""
|
|
30
31
|
if cmap is None:
|
|
31
32
|
cmap = plt.get_cmap("RdYlBu")
|
pastastore/util.py
CHANGED
|
@@ -1,4 +1,7 @@
|
|
|
1
|
+
"""Useful utilities for pastastore."""
|
|
2
|
+
|
|
1
3
|
import os
|
|
4
|
+
import warnings
|
|
2
5
|
from typing import Dict, List, Optional, Union
|
|
3
6
|
|
|
4
7
|
import numpy as np
|
|
@@ -16,9 +19,12 @@ def _custom_warning(message, category=UserWarning, filename="", lineno=-1, *args
|
|
|
16
19
|
|
|
17
20
|
|
|
18
21
|
class ItemInLibraryException(Exception):
|
|
22
|
+
"""Exception when item is already in library."""
|
|
23
|
+
|
|
19
24
|
pass
|
|
20
25
|
|
|
21
26
|
|
|
27
|
+
# TODO: remove in future version
|
|
22
28
|
def delete_pystore_connector(
|
|
23
29
|
conn=None,
|
|
24
30
|
path: Optional[str] = None,
|
|
@@ -39,6 +45,12 @@ def delete_pystore_connector(
|
|
|
39
45
|
list of library names to delete, by default None which deletes
|
|
40
46
|
all libraries
|
|
41
47
|
"""
|
|
48
|
+
warnings.warn(
|
|
49
|
+
"This function is deprecated. We recommend to migrate to a different "
|
|
50
|
+
"Connector, e.g. `pst.PasConnector`.",
|
|
51
|
+
DeprecationWarning,
|
|
52
|
+
stacklevel=1,
|
|
53
|
+
)
|
|
42
54
|
import pystore
|
|
43
55
|
|
|
44
56
|
if conn is not None:
|
|
@@ -60,6 +72,7 @@ def delete_pystore_connector(
|
|
|
60
72
|
print(f" - deleted: {lib}")
|
|
61
73
|
|
|
62
74
|
|
|
75
|
+
# TODO: remove in future version
|
|
63
76
|
def delete_arctic_connector(
|
|
64
77
|
conn=None,
|
|
65
78
|
connstr: Optional[str] = None,
|
|
@@ -80,6 +93,12 @@ def delete_arctic_connector(
|
|
|
80
93
|
list of library names to delete, by default None which deletes
|
|
81
94
|
all libraries
|
|
82
95
|
"""
|
|
96
|
+
warnings.warn(
|
|
97
|
+
"This function is deprecated. We recommend to migrate to a different "
|
|
98
|
+
"Connector, e.g. `pst.ArcticDBConnector`.",
|
|
99
|
+
DeprecationWarning,
|
|
100
|
+
stacklevel=1,
|
|
101
|
+
)
|
|
83
102
|
import arctic
|
|
84
103
|
|
|
85
104
|
if conn is not None:
|
|
@@ -172,6 +191,7 @@ def delete_arcticdb_connector(
|
|
|
172
191
|
|
|
173
192
|
|
|
174
193
|
def delete_dict_connector(conn, libraries: Optional[List[str]] = None) -> None:
|
|
194
|
+
"""Delete DictConnector object."""
|
|
175
195
|
print(f"Deleting DictConnector: '{conn.name}' ... ", end="")
|
|
176
196
|
if libraries is None:
|
|
177
197
|
del conn
|
|
@@ -185,6 +205,7 @@ def delete_dict_connector(conn, libraries: Optional[List[str]] = None) -> None:
|
|
|
185
205
|
|
|
186
206
|
|
|
187
207
|
def delete_pas_connector(conn, libraries: Optional[List[str]] = None) -> None:
|
|
208
|
+
"""Delete PasConnector object."""
|
|
188
209
|
import shutil
|
|
189
210
|
|
|
190
211
|
print(f"Deleting PasConnector database: '{conn.name}' ... ", end="")
|
|
@@ -222,10 +243,12 @@ def delete_pastastore(pstore, libraries: Optional[List[str]] = None) -> None:
|
|
|
222
243
|
TypeError
|
|
223
244
|
when Connector type is not recognized
|
|
224
245
|
"""
|
|
246
|
+
# TODO: remove in future version
|
|
225
247
|
if pstore.conn.conn_type == "pystore":
|
|
226
248
|
delete_pystore_connector(conn=pstore.conn, libraries=libraries)
|
|
227
249
|
elif pstore.conn.conn_type == "dict":
|
|
228
250
|
delete_dict_connector(pstore)
|
|
251
|
+
# TODO: remove in future version
|
|
229
252
|
elif pstore.conn.conn_type == "arctic":
|
|
230
253
|
delete_arctic_connector(conn=pstore.conn, libraries=libraries)
|
|
231
254
|
elif pstore.conn.conn_type == "arcticdb":
|
|
@@ -303,7 +326,6 @@ def compare_models(ml1, ml2, stats=None, detailed_comparison=False):
|
|
|
303
326
|
returns True if models are equivalent when detailed_comparison=True
|
|
304
327
|
else returns DataFrame containing comparison details.
|
|
305
328
|
"""
|
|
306
|
-
|
|
307
329
|
df = pd.DataFrame(columns=["model 0", "model 1"])
|
|
308
330
|
so1 = [] # for storing series_original
|
|
309
331
|
sv1 = [] # for storing series_validated
|
|
@@ -552,6 +574,7 @@ def frontiers_checks(
|
|
|
552
574
|
check4_gain: bool = True,
|
|
553
575
|
check5_parambounds: bool = False,
|
|
554
576
|
csv_dir: Optional[str] = None,
|
|
577
|
+
progressbar: bool = False,
|
|
555
578
|
) -> pd.DataFrame: # pragma: no cover
|
|
556
579
|
"""Check models in a PastaStore to see if they pass reliability criteria.
|
|
557
580
|
|
|
@@ -597,6 +620,8 @@ def frontiers_checks(
|
|
|
597
620
|
csv_dir : string, optional
|
|
598
621
|
directory to store CSV file with overview of checks for every
|
|
599
622
|
model, by default None which will not store results
|
|
623
|
+
progressbar : bool, optional
|
|
624
|
+
show progressbar, by default False
|
|
600
625
|
|
|
601
626
|
Returns
|
|
602
627
|
-------
|
|
@@ -612,7 +637,6 @@ def frontiers_checks(
|
|
|
612
637
|
Application of Time Series Analysis to Estimate Drawdown From Multiple Well
|
|
613
638
|
Fields. Front. Earth Sci., 14 June 2022 doi:10.3389/feart.2022.907609
|
|
614
639
|
"""
|
|
615
|
-
|
|
616
640
|
df = pd.DataFrame(columns=["all_checks_passed"])
|
|
617
641
|
|
|
618
642
|
if modelnames is not None:
|
|
@@ -629,7 +653,9 @@ def frontiers_checks(
|
|
|
629
653
|
else:
|
|
630
654
|
models = pstore.model_names
|
|
631
655
|
|
|
632
|
-
for mlnam in
|
|
656
|
+
for mlnam in (
|
|
657
|
+
tqdm(models, desc="Running model diagnostics") if progressbar else models
|
|
658
|
+
):
|
|
633
659
|
ml = pstore.get_models(mlnam)
|
|
634
660
|
|
|
635
661
|
if ml.parameters["optimal"].hasnans:
|
|
@@ -734,7 +760,10 @@ def frontiers_checks(
|
|
|
734
760
|
else:
|
|
735
761
|
check_gain_passed = np.abs(gain) > 2 * gain_std
|
|
736
762
|
checks.loc[
|
|
737
|
-
|
|
763
|
+
(
|
|
764
|
+
f"gain > 2*std: {sm_name}-{iw:02g}"
|
|
765
|
+
f" ({sm.distances.index[iw]})"
|
|
766
|
+
),
|
|
738
767
|
:,
|
|
739
768
|
] = (
|
|
740
769
|
gain,
|
|
@@ -829,7 +858,6 @@ def frontiers_aic_select(
|
|
|
829
858
|
Multiple Well Fields. Front. Earth Sci., 14 June 2022
|
|
830
859
|
doi:10.3389/feart.2022.907609
|
|
831
860
|
"""
|
|
832
|
-
|
|
833
861
|
if modelnames is None and oseries is None:
|
|
834
862
|
modelnames = pstore.model_names
|
|
835
863
|
elif modelnames is None and oseries is not None:
|
|
@@ -853,7 +881,7 @@ def frontiers_aic_select(
|
|
|
853
881
|
# with lowest AIC per location
|
|
854
882
|
collect = []
|
|
855
883
|
gr = df.join(aic).groupby("oseries")
|
|
856
|
-
for
|
|
884
|
+
for _, idf in gr:
|
|
857
885
|
idf.index.name = "modelname"
|
|
858
886
|
idf = (
|
|
859
887
|
idf.sort_values("aic").reset_index().set_index(["oseries", "modelname"])
|
pastastore/version.py
CHANGED
|
@@ -1,7 +1,38 @@
|
|
|
1
|
+
# ruff: noqa: D100
|
|
2
|
+
from importlib import import_module, metadata
|
|
3
|
+
from platform import python_version
|
|
4
|
+
|
|
1
5
|
import pastas as ps
|
|
2
6
|
from packaging.version import parse as parse_version
|
|
3
7
|
|
|
4
8
|
PASTAS_VERSION = parse_version(ps.__version__)
|
|
5
9
|
PASTAS_LEQ_022 = PASTAS_VERSION <= parse_version("0.22.0")
|
|
10
|
+
PASTAS_GEQ_150 = PASTAS_VERSION >= parse_version("1.5.0")
|
|
11
|
+
|
|
12
|
+
__version__ = "1.5.0"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def show_versions(optional=False) -> None:
|
|
16
|
+
"""Print the version of dependencies.
|
|
17
|
+
|
|
18
|
+
Parameters
|
|
19
|
+
----------
|
|
20
|
+
optional : bool, optional
|
|
21
|
+
Print the version of optional dependencies, by default False
|
|
22
|
+
"""
|
|
23
|
+
msg = (
|
|
24
|
+
f"Python version : {python_version()}\n"
|
|
25
|
+
f"Pandas version : {metadata.version('pandas')}\n"
|
|
26
|
+
f"Matplotlib version : {metadata.version('matplotlib')}\n"
|
|
27
|
+
f"Pastas version : {metadata.version('pastas')}\n"
|
|
28
|
+
f"PyYAML version : {metadata.version('pyyaml')}\n"
|
|
29
|
+
)
|
|
30
|
+
if optional:
|
|
31
|
+
msg += "\nArcticDB version : "
|
|
32
|
+
try:
|
|
33
|
+
import_module("arcticdb")
|
|
34
|
+
msg += f"{metadata.version('arctidb')}"
|
|
35
|
+
except ImportError:
|
|
36
|
+
msg += "Not Installed"
|
|
6
37
|
|
|
7
|
-
|
|
38
|
+
print(msg)
|