pastastore 1.4.0__py3-none-any.whl → 1.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pastastore/__init__.py +2 -4
- pastastore/base.py +206 -59
- pastastore/connectors.py +23 -420
- pastastore/datasets.py +5 -10
- pastastore/plotting.py +27 -14
- pastastore/store.py +522 -73
- pastastore/styling.py +2 -1
- pastastore/util.py +22 -108
- pastastore/version.py +33 -1
- pastastore/yaml_interface.py +33 -25
- {pastastore-1.4.0.dist-info → pastastore-1.6.0.dist-info}/METADATA +15 -14
- pastastore-1.6.0.dist-info/RECORD +15 -0
- {pastastore-1.4.0.dist-info → pastastore-1.6.0.dist-info}/WHEEL +1 -1
- pastastore-1.4.0.dist-info/RECORD +0 -15
- {pastastore-1.4.0.dist-info → pastastore-1.6.0.dist-info}/LICENSE +0 -0
- {pastastore-1.4.0.dist-info → pastastore-1.6.0.dist-info}/top_level.txt +0 -0
pastastore/styling.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
"""Module containing dataframe styling functions."""
|
|
2
|
+
|
|
1
3
|
import matplotlib as mpl
|
|
2
4
|
import matplotlib.pyplot as plt
|
|
3
5
|
import numpy as np
|
|
@@ -25,7 +27,6 @@ def float_styler(val, norm, cmap=None):
|
|
|
25
27
|
Given some dataframe
|
|
26
28
|
|
|
27
29
|
>>> df.map(float_styler, subset=["some column"], norm=norm, cmap=cmap)
|
|
28
|
-
|
|
29
30
|
"""
|
|
30
31
|
if cmap is None:
|
|
31
32
|
cmap = plt.get_cmap("RdYlBu")
|
pastastore/util.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
"""Useful utilities for pastastore."""
|
|
2
|
+
|
|
1
3
|
import os
|
|
2
4
|
from typing import Dict, List, Optional, Union
|
|
3
5
|
|
|
@@ -16,98 +18,9 @@ def _custom_warning(message, category=UserWarning, filename="", lineno=-1, *args
|
|
|
16
18
|
|
|
17
19
|
|
|
18
20
|
class ItemInLibraryException(Exception):
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
def delete_pystore_connector(
|
|
23
|
-
conn=None,
|
|
24
|
-
path: Optional[str] = None,
|
|
25
|
-
name: Optional[str] = None,
|
|
26
|
-
libraries: Optional[List[str]] = None,
|
|
27
|
-
) -> None: # pragma: no cover
|
|
28
|
-
"""Delete libraries from pystore.
|
|
29
|
-
|
|
30
|
-
Parameters
|
|
31
|
-
----------
|
|
32
|
-
conn : PystoreConnector, optional
|
|
33
|
-
PystoreConnector object
|
|
34
|
-
path : str, optional
|
|
35
|
-
path to pystore
|
|
36
|
-
name : str, optional
|
|
37
|
-
name of the pystore
|
|
38
|
-
libraries : Optional[List[str]], optional
|
|
39
|
-
list of library names to delete, by default None which deletes
|
|
40
|
-
all libraries
|
|
41
|
-
"""
|
|
42
|
-
import pystore
|
|
43
|
-
|
|
44
|
-
if conn is not None:
|
|
45
|
-
name = conn.name
|
|
46
|
-
path = conn.path
|
|
47
|
-
elif name is None or path is None:
|
|
48
|
-
raise ValueError("Please provide 'name' and 'path' OR 'conn'!")
|
|
49
|
-
|
|
50
|
-
print(f"Deleting PystoreConnector database: '{name}' ...", end="")
|
|
51
|
-
pystore.set_path(path)
|
|
52
|
-
if libraries is None:
|
|
53
|
-
pystore.delete_store(name)
|
|
54
|
-
print(" Done!")
|
|
55
|
-
else:
|
|
56
|
-
store = pystore.store(name)
|
|
57
|
-
for lib in libraries:
|
|
58
|
-
print()
|
|
59
|
-
store.delete_collection(lib)
|
|
60
|
-
print(f" - deleted: {lib}")
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
def delete_arctic_connector(
|
|
64
|
-
conn=None,
|
|
65
|
-
connstr: Optional[str] = None,
|
|
66
|
-
name: Optional[str] = None,
|
|
67
|
-
libraries: Optional[List[str]] = None,
|
|
68
|
-
) -> None: # pragma: no cover
|
|
69
|
-
"""Delete libraries from arctic database.
|
|
70
|
-
|
|
71
|
-
Parameters
|
|
72
|
-
----------
|
|
73
|
-
conn : pastastore.ArcticConnector
|
|
74
|
-
ArcticConnector object
|
|
75
|
-
connstr : str, optional
|
|
76
|
-
connection string to the database
|
|
77
|
-
name : str, optional
|
|
78
|
-
name of the database
|
|
79
|
-
libraries : Optional[List[str]], optional
|
|
80
|
-
list of library names to delete, by default None which deletes
|
|
81
|
-
all libraries
|
|
82
|
-
"""
|
|
83
|
-
import arctic
|
|
84
|
-
|
|
85
|
-
if conn is not None:
|
|
86
|
-
name = conn.name
|
|
87
|
-
connstr = conn.connstr
|
|
88
|
-
elif name is None or connstr is None:
|
|
89
|
-
raise ValueError("Provide 'name' and 'connstr' OR 'conn'!")
|
|
90
|
-
|
|
91
|
-
arc = arctic.Arctic(connstr)
|
|
92
|
-
|
|
93
|
-
print(f"Deleting ArcticConnector database: '{name}' ... ", end="")
|
|
94
|
-
# get library names
|
|
95
|
-
if libraries is None:
|
|
96
|
-
libs = []
|
|
97
|
-
for ilib in arc.list_libraries():
|
|
98
|
-
if ilib.split(".")[0] == name:
|
|
99
|
-
libs.append(ilib)
|
|
100
|
-
elif name is not None:
|
|
101
|
-
libs = [name + "." + ilib for ilib in libraries]
|
|
102
|
-
else:
|
|
103
|
-
raise ValueError("Provide 'name' and 'connstr' OR 'conn'!")
|
|
21
|
+
"""Exception when item is already in library."""
|
|
104
22
|
|
|
105
|
-
|
|
106
|
-
arc.delete_library(lib)
|
|
107
|
-
if libraries is not None:
|
|
108
|
-
print()
|
|
109
|
-
print(f" - deleted: {lib}")
|
|
110
|
-
print("Done!")
|
|
23
|
+
pass
|
|
111
24
|
|
|
112
25
|
|
|
113
26
|
def delete_arcticdb_connector(
|
|
@@ -148,17 +61,15 @@ def delete_arcticdb_connector(
|
|
|
148
61
|
libs = []
|
|
149
62
|
for ilib in arc.list_libraries():
|
|
150
63
|
if ilib.split(".")[0] == name:
|
|
151
|
-
|
|
152
|
-
libs.append(ilib.replace(".", "/"))
|
|
64
|
+
libs.append(ilib)
|
|
153
65
|
elif name is not None:
|
|
154
|
-
|
|
155
|
-
libs = [name + "/" + ilib for ilib in libraries]
|
|
66
|
+
libs = [name + "." + ilib for ilib in libraries]
|
|
156
67
|
else:
|
|
157
68
|
raise ValueError("Provide 'name' and 'uri' OR 'conn'!")
|
|
158
69
|
|
|
159
70
|
for lib in libs:
|
|
160
|
-
|
|
161
|
-
shutil.rmtree(os.path.join(conn.uri.split("//")[-1], lib))
|
|
71
|
+
arc.delete_library(lib)
|
|
72
|
+
# shutil.rmtree(os.path.join(conn.uri.split("//")[-1], lib))
|
|
162
73
|
|
|
163
74
|
if libraries is not None:
|
|
164
75
|
print()
|
|
@@ -172,6 +83,7 @@ def delete_arcticdb_connector(
|
|
|
172
83
|
|
|
173
84
|
|
|
174
85
|
def delete_dict_connector(conn, libraries: Optional[List[str]] = None) -> None:
|
|
86
|
+
"""Delete DictConnector object."""
|
|
175
87
|
print(f"Deleting DictConnector: '{conn.name}' ... ", end="")
|
|
176
88
|
if libraries is None:
|
|
177
89
|
del conn
|
|
@@ -185,6 +97,7 @@ def delete_dict_connector(conn, libraries: Optional[List[str]] = None) -> None:
|
|
|
185
97
|
|
|
186
98
|
|
|
187
99
|
def delete_pas_connector(conn, libraries: Optional[List[str]] = None) -> None:
|
|
100
|
+
"""Delete PasConnector object."""
|
|
188
101
|
import shutil
|
|
189
102
|
|
|
190
103
|
print(f"Deleting PasConnector database: '{conn.name}' ... ", end="")
|
|
@@ -222,12 +135,8 @@ def delete_pastastore(pstore, libraries: Optional[List[str]] = None) -> None:
|
|
|
222
135
|
TypeError
|
|
223
136
|
when Connector type is not recognized
|
|
224
137
|
"""
|
|
225
|
-
if pstore.conn.conn_type == "
|
|
226
|
-
delete_pystore_connector(conn=pstore.conn, libraries=libraries)
|
|
227
|
-
elif pstore.conn.conn_type == "dict":
|
|
138
|
+
if pstore.conn.conn_type == "dict":
|
|
228
139
|
delete_dict_connector(pstore)
|
|
229
|
-
elif pstore.conn.conn_type == "arctic":
|
|
230
|
-
delete_arctic_connector(conn=pstore.conn, libraries=libraries)
|
|
231
140
|
elif pstore.conn.conn_type == "arcticdb":
|
|
232
141
|
delete_arcticdb_connector(conn=pstore.conn, libraries=libraries)
|
|
233
142
|
elif pstore.conn.conn_type == "pas":
|
|
@@ -303,7 +212,6 @@ def compare_models(ml1, ml2, stats=None, detailed_comparison=False):
|
|
|
303
212
|
returns True if models are equivalent when detailed_comparison=True
|
|
304
213
|
else returns DataFrame containing comparison details.
|
|
305
214
|
"""
|
|
306
|
-
|
|
307
215
|
df = pd.DataFrame(columns=["model 0", "model 1"])
|
|
308
216
|
so1 = [] # for storing series_original
|
|
309
217
|
sv1 = [] # for storing series_validated
|
|
@@ -552,6 +460,7 @@ def frontiers_checks(
|
|
|
552
460
|
check4_gain: bool = True,
|
|
553
461
|
check5_parambounds: bool = False,
|
|
554
462
|
csv_dir: Optional[str] = None,
|
|
463
|
+
progressbar: bool = False,
|
|
555
464
|
) -> pd.DataFrame: # pragma: no cover
|
|
556
465
|
"""Check models in a PastaStore to see if they pass reliability criteria.
|
|
557
466
|
|
|
@@ -597,6 +506,8 @@ def frontiers_checks(
|
|
|
597
506
|
csv_dir : string, optional
|
|
598
507
|
directory to store CSV file with overview of checks for every
|
|
599
508
|
model, by default None which will not store results
|
|
509
|
+
progressbar : bool, optional
|
|
510
|
+
show progressbar, by default False
|
|
600
511
|
|
|
601
512
|
Returns
|
|
602
513
|
-------
|
|
@@ -612,7 +523,6 @@ def frontiers_checks(
|
|
|
612
523
|
Application of Time Series Analysis to Estimate Drawdown From Multiple Well
|
|
613
524
|
Fields. Front. Earth Sci., 14 June 2022 doi:10.3389/feart.2022.907609
|
|
614
525
|
"""
|
|
615
|
-
|
|
616
526
|
df = pd.DataFrame(columns=["all_checks_passed"])
|
|
617
527
|
|
|
618
528
|
if modelnames is not None:
|
|
@@ -629,7 +539,9 @@ def frontiers_checks(
|
|
|
629
539
|
else:
|
|
630
540
|
models = pstore.model_names
|
|
631
541
|
|
|
632
|
-
for mlnam in
|
|
542
|
+
for mlnam in (
|
|
543
|
+
tqdm(models, desc="Running model diagnostics") if progressbar else models
|
|
544
|
+
):
|
|
633
545
|
ml = pstore.get_models(mlnam)
|
|
634
546
|
|
|
635
547
|
if ml.parameters["optimal"].hasnans:
|
|
@@ -734,7 +646,10 @@ def frontiers_checks(
|
|
|
734
646
|
else:
|
|
735
647
|
check_gain_passed = np.abs(gain) > 2 * gain_std
|
|
736
648
|
checks.loc[
|
|
737
|
-
|
|
649
|
+
(
|
|
650
|
+
f"gain > 2*std: {sm_name}-{iw:02g}"
|
|
651
|
+
f" ({sm.distances.index[iw]})"
|
|
652
|
+
),
|
|
738
653
|
:,
|
|
739
654
|
] = (
|
|
740
655
|
gain,
|
|
@@ -829,7 +744,6 @@ def frontiers_aic_select(
|
|
|
829
744
|
Multiple Well Fields. Front. Earth Sci., 14 June 2022
|
|
830
745
|
doi:10.3389/feart.2022.907609
|
|
831
746
|
"""
|
|
832
|
-
|
|
833
747
|
if modelnames is None and oseries is None:
|
|
834
748
|
modelnames = pstore.model_names
|
|
835
749
|
elif modelnames is None and oseries is not None:
|
|
@@ -853,7 +767,7 @@ def frontiers_aic_select(
|
|
|
853
767
|
# with lowest AIC per location
|
|
854
768
|
collect = []
|
|
855
769
|
gr = df.join(aic).groupby("oseries")
|
|
856
|
-
for
|
|
770
|
+
for _, idf in gr:
|
|
857
771
|
idf.index.name = "modelname"
|
|
858
772
|
idf = (
|
|
859
773
|
idf.sort_values("aic").reset_index().set_index(["oseries", "modelname"])
|
pastastore/version.py
CHANGED
|
@@ -1,7 +1,39 @@
|
|
|
1
|
+
# ruff: noqa: D100
|
|
2
|
+
from importlib import import_module, metadata
|
|
3
|
+
from platform import python_version
|
|
4
|
+
|
|
1
5
|
import pastas as ps
|
|
2
6
|
from packaging.version import parse as parse_version
|
|
3
7
|
|
|
4
8
|
PASTAS_VERSION = parse_version(ps.__version__)
|
|
5
9
|
PASTAS_LEQ_022 = PASTAS_VERSION <= parse_version("0.22.0")
|
|
10
|
+
PASTAS_GEQ_150 = PASTAS_VERSION >= parse_version("1.5.0")
|
|
11
|
+
|
|
12
|
+
__version__ = "1.6.0"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def show_versions(optional=False) -> None:
|
|
16
|
+
"""Print the version of dependencies.
|
|
17
|
+
|
|
18
|
+
Parameters
|
|
19
|
+
----------
|
|
20
|
+
optional : bool, optional
|
|
21
|
+
Print the version of optional dependencies, by default False
|
|
22
|
+
"""
|
|
23
|
+
msg = (
|
|
24
|
+
f"Pastastore version : {__version__}\n\n"
|
|
25
|
+
f"Python version : {python_version()}\n"
|
|
26
|
+
f"Pandas version : {metadata.version('pandas')}\n"
|
|
27
|
+
f"Matplotlib version : {metadata.version('matplotlib')}\n"
|
|
28
|
+
f"Pastas version : {metadata.version('pastas')}\n"
|
|
29
|
+
f"PyYAML version : {metadata.version('pyyaml')}\n"
|
|
30
|
+
)
|
|
31
|
+
if optional:
|
|
32
|
+
msg += "\nArcticDB version : "
|
|
33
|
+
try:
|
|
34
|
+
import_module("arcticdb")
|
|
35
|
+
msg += f"{metadata.version('arctidb')}"
|
|
36
|
+
except ImportError:
|
|
37
|
+
msg += "Not Installed"
|
|
6
38
|
|
|
7
|
-
|
|
39
|
+
print(msg)
|
pastastore/yaml_interface.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
"""Module containing YAML interface for Pastas models using PastaStore."""
|
|
2
|
+
|
|
1
3
|
import datetime
|
|
2
4
|
import logging
|
|
3
5
|
import os
|
|
@@ -11,14 +13,11 @@ import yaml
|
|
|
11
13
|
|
|
12
14
|
from pastastore.version import PASTAS_LEQ_022
|
|
13
15
|
|
|
14
|
-
ps.logger.setLevel("ERROR")
|
|
15
|
-
|
|
16
|
-
logging.basicConfig(level="INFO")
|
|
17
16
|
logger = logging.getLogger(__name__)
|
|
18
17
|
|
|
19
18
|
|
|
20
19
|
def _convert_dict_dtypes_for_yaml(d: Dict[str, Any]):
|
|
21
|
-
"""
|
|
20
|
+
"""Convert dictionary values for storing in YAML format (internal function).
|
|
22
21
|
|
|
23
22
|
Parameters
|
|
24
23
|
----------
|
|
@@ -95,7 +94,6 @@ def reduce_to_minimal_dict(d, keys=None):
|
|
|
95
94
|
["name", "oseries", "settings", "tmin", "tmax", "noise",
|
|
96
95
|
"stressmodels", "rfunc", "stress", "prec", "evap", "stressmodel"]
|
|
97
96
|
"""
|
|
98
|
-
|
|
99
97
|
if keys is None:
|
|
100
98
|
keys = [
|
|
101
99
|
"name",
|
|
@@ -170,7 +168,7 @@ class PastastoreYAML:
|
|
|
170
168
|
"""
|
|
171
169
|
|
|
172
170
|
def __init__(self, pstore):
|
|
173
|
-
"""
|
|
171
|
+
"""Create for PastasstoreYAML class.
|
|
174
172
|
|
|
175
173
|
Parameters
|
|
176
174
|
----------
|
|
@@ -181,7 +179,7 @@ class PastastoreYAML:
|
|
|
181
179
|
self.pstore = pstore
|
|
182
180
|
|
|
183
181
|
def _parse_rechargemodel_dict(self, d: Dict, onam: Optional[str] = None) -> Dict:
|
|
184
|
-
"""
|
|
182
|
+
"""Parse RechargeModel dictionary (internal method).
|
|
185
183
|
|
|
186
184
|
Note: supports 'nearest' as input to 'prec' and 'evap',
|
|
187
185
|
which will automatically select nearest stress with kind="prec" or
|
|
@@ -208,7 +206,7 @@ class PastastoreYAML:
|
|
|
208
206
|
if isinstance(prec_val, dict):
|
|
209
207
|
pnam = prec_val["name"]
|
|
210
208
|
p = self.pstore.get_stresses(pnam)
|
|
211
|
-
prec_val["series"] = p
|
|
209
|
+
prec_val["series"] = p.squeeze()
|
|
212
210
|
prec = prec_val
|
|
213
211
|
elif prec_val.startswith("nearest"):
|
|
214
212
|
if onam is None:
|
|
@@ -224,7 +222,7 @@ class PastastoreYAML:
|
|
|
224
222
|
"name": pnam,
|
|
225
223
|
"settings": "prec",
|
|
226
224
|
"metadata": pmeta,
|
|
227
|
-
"series": p,
|
|
225
|
+
"series": p.squeeze(),
|
|
228
226
|
}
|
|
229
227
|
elif isinstance(prec_val, str):
|
|
230
228
|
pnam = d["prec"]
|
|
@@ -233,7 +231,7 @@ class PastastoreYAML:
|
|
|
233
231
|
"name": pnam,
|
|
234
232
|
"settings": "prec",
|
|
235
233
|
"metadata": pmeta,
|
|
236
|
-
"series": p,
|
|
234
|
+
"series": p.squeeze(),
|
|
237
235
|
}
|
|
238
236
|
else:
|
|
239
237
|
raise NotImplementedError(f"Could not parse prec value: '{prec_val}'")
|
|
@@ -244,7 +242,7 @@ class PastastoreYAML:
|
|
|
244
242
|
if isinstance(evap_val, dict):
|
|
245
243
|
enam = evap_val["name"]
|
|
246
244
|
e = self.pstore.get_stresses(enam)
|
|
247
|
-
evap_val["series"] = e
|
|
245
|
+
evap_val["series"] = e.squeeze()
|
|
248
246
|
evap = evap_val
|
|
249
247
|
elif evap_val.startswith("nearest"):
|
|
250
248
|
if onam is None:
|
|
@@ -260,7 +258,7 @@ class PastastoreYAML:
|
|
|
260
258
|
"name": enam,
|
|
261
259
|
"settings": "evap",
|
|
262
260
|
"metadata": emeta,
|
|
263
|
-
"series": e,
|
|
261
|
+
"series": e.squeeze(),
|
|
264
262
|
}
|
|
265
263
|
elif isinstance(evap_val, str):
|
|
266
264
|
enam = d["evap"]
|
|
@@ -269,7 +267,7 @@ class PastastoreYAML:
|
|
|
269
267
|
"name": enam,
|
|
270
268
|
"settings": "evap",
|
|
271
269
|
"metadata": emeta,
|
|
272
|
-
"series": e,
|
|
270
|
+
"series": e.squeeze(),
|
|
273
271
|
}
|
|
274
272
|
else:
|
|
275
273
|
raise NotImplementedError(f"Could not parse evap value: '{evap_val}'")
|
|
@@ -310,12 +308,12 @@ class PastastoreYAML:
|
|
|
310
308
|
onam = d["oseries"]
|
|
311
309
|
if isinstance(onam, str):
|
|
312
310
|
o = self.pstore.get_oseries(onam)
|
|
313
|
-
d["oseries"] = o
|
|
311
|
+
d["oseries"] = o.squeeze()
|
|
314
312
|
|
|
315
313
|
return d
|
|
316
314
|
|
|
317
315
|
def _parse_stressmodel_dict(self, d: Dict, onam: Optional[str] = None) -> Dict:
|
|
318
|
-
"""
|
|
316
|
+
"""Parse StressModel dictionary (internal method).
|
|
319
317
|
|
|
320
318
|
Note: supports 'nearest' or 'nearest <kind>' as input to 'stress',
|
|
321
319
|
which will automatically select nearest stress with kind=<kind>.
|
|
@@ -337,7 +335,6 @@ class PastastoreYAML:
|
|
|
337
335
|
containing stresses obtained from PastaStore, and setting
|
|
338
336
|
defaults if they were not already provided.
|
|
339
337
|
"""
|
|
340
|
-
|
|
341
338
|
# get stress
|
|
342
339
|
snam = d.pop("stress")
|
|
343
340
|
|
|
@@ -361,7 +358,7 @@ class PastastoreYAML:
|
|
|
361
358
|
"name": snam,
|
|
362
359
|
"settings": d.pop("settings", None),
|
|
363
360
|
"metadata": smeta,
|
|
364
|
-
"series": s,
|
|
361
|
+
"series": s.squeeze(),
|
|
365
362
|
}
|
|
366
363
|
d["stress"] = [s] if PASTAS_LEQ_022 else s
|
|
367
364
|
|
|
@@ -380,7 +377,7 @@ class PastastoreYAML:
|
|
|
380
377
|
return d
|
|
381
378
|
|
|
382
379
|
def _parse_wellmodel_dict(self, d: Dict, onam: Optional[str] = None) -> Dict:
|
|
383
|
-
"""
|
|
380
|
+
"""Parse WellModel dictionary (internal method).
|
|
384
381
|
|
|
385
382
|
Note: supports 'nearest' or 'nearest <number> <kind>' as input to
|
|
386
383
|
'stress', which will automatically select nearest or <number> of
|
|
@@ -402,7 +399,6 @@ class PastastoreYAML:
|
|
|
402
399
|
containing stresses obtained from PastaStore, and setting
|
|
403
400
|
defaults if they were not already provided.
|
|
404
401
|
"""
|
|
405
|
-
|
|
406
402
|
# parse stress
|
|
407
403
|
snames = d.pop("stress")
|
|
408
404
|
|
|
@@ -415,12 +411,12 @@ class PastastoreYAML:
|
|
|
415
411
|
elif len(snames.split()) == 2:
|
|
416
412
|
try:
|
|
417
413
|
n = int(snames.split()[1])
|
|
418
|
-
except ValueError:
|
|
414
|
+
except ValueError as e:
|
|
419
415
|
raise ValueError(
|
|
420
416
|
f"Could not parse: '{snames}'! "
|
|
421
417
|
"When using option 'nearest' for WellModel, "
|
|
422
418
|
"use 'nearest <n>' or 'nearest <n> <kind>'!"
|
|
423
|
-
)
|
|
419
|
+
) from e
|
|
424
420
|
kind = "well"
|
|
425
421
|
elif len(snames.split()) == 1:
|
|
426
422
|
n = 1
|
|
@@ -444,7 +440,7 @@ class PastastoreYAML:
|
|
|
444
440
|
"name": snam,
|
|
445
441
|
"settings": "well",
|
|
446
442
|
"metadata": smeta,
|
|
447
|
-
"series": s,
|
|
443
|
+
"series": s.squeeze(),
|
|
448
444
|
}
|
|
449
445
|
slist.append(sdict)
|
|
450
446
|
d["stress"] = slist
|
|
@@ -479,6 +475,20 @@ class PastastoreYAML:
|
|
|
479
475
|
return d
|
|
480
476
|
|
|
481
477
|
def construct_mldict(self, mlyml: dict, mlnam: str) -> dict:
|
|
478
|
+
"""Create Pastas.Model dictionary from YAML dictionary.
|
|
479
|
+
|
|
480
|
+
Parameters
|
|
481
|
+
----------
|
|
482
|
+
mlyml : dict
|
|
483
|
+
YAML dictionary
|
|
484
|
+
mlnam : str
|
|
485
|
+
model name
|
|
486
|
+
|
|
487
|
+
Returns
|
|
488
|
+
-------
|
|
489
|
+
dict
|
|
490
|
+
dictionary of pastas.Model that can be read by Pastas
|
|
491
|
+
"""
|
|
482
492
|
# get oseries + metadata
|
|
483
493
|
if isinstance(mlyml["oseries"], dict):
|
|
484
494
|
onam = str(mlyml["oseries"]["name"])
|
|
@@ -490,7 +500,7 @@ class PastastoreYAML:
|
|
|
490
500
|
o, ometa = self.pstore.get_oseries(onam, return_metadata=True)
|
|
491
501
|
|
|
492
502
|
# create model to obtain default model settings
|
|
493
|
-
ml = ps.Model(o, name=mlnam, metadata=ometa)
|
|
503
|
+
ml = ps.Model(o.squeeze(), name=mlnam, metadata=ometa)
|
|
494
504
|
mldict = ml.to_dict(series=True)
|
|
495
505
|
|
|
496
506
|
# update with stored model settings
|
|
@@ -608,7 +618,6 @@ class PastastoreYAML:
|
|
|
608
618
|
NotImplementedError
|
|
609
619
|
if unsupported stressmodel is encountered
|
|
610
620
|
"""
|
|
611
|
-
|
|
612
621
|
with open(fyaml, "r") as f:
|
|
613
622
|
yml = yaml.load(f, Loader=yaml.CFullLoader)
|
|
614
623
|
|
|
@@ -654,7 +663,6 @@ class PastastoreYAML:
|
|
|
654
663
|
the time series are actually the nearest ones! Only used
|
|
655
664
|
when minimal_yaml=True. Default is False.
|
|
656
665
|
"""
|
|
657
|
-
|
|
658
666
|
onames = self.pstore.conn._parse_names(oseries, "oseries")
|
|
659
667
|
|
|
660
668
|
for onam in onames:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pastastore
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.6.0
|
|
4
4
|
Summary: Tools for managing Pastas time series models.
|
|
5
5
|
Author: D.A. Brakenhoff
|
|
6
6
|
Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
|
|
@@ -40,11 +40,10 @@ Classifier: Operating System :: Unix
|
|
|
40
40
|
Classifier: Operating System :: MacOS
|
|
41
41
|
Classifier: Programming Language :: Python
|
|
42
42
|
Classifier: Programming Language :: Python :: 3
|
|
43
|
-
Classifier: Programming Language :: Python :: 3.7
|
|
44
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
45
43
|
Classifier: Programming Language :: Python :: 3.9
|
|
46
44
|
Classifier: Programming Language :: Python :: 3.10
|
|
47
45
|
Classifier: Programming Language :: Python :: 3.11
|
|
46
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
48
47
|
Classifier: Programming Language :: Python :: 3 :: Only
|
|
49
48
|
Classifier: Topic :: Scientific/Engineering :: Hydrology
|
|
50
49
|
Requires-Python: >=3.7
|
|
@@ -53,11 +52,8 @@ License-File: LICENSE
|
|
|
53
52
|
Requires-Dist: pastas >=0.13
|
|
54
53
|
Requires-Dist: tqdm >=4.36
|
|
55
54
|
Requires-Dist: pyyaml
|
|
56
|
-
Provides-Extra: arctic
|
|
57
|
-
Requires-Dist: arctic ; extra == 'arctic'
|
|
58
55
|
Provides-Extra: arcticdb
|
|
59
56
|
Requires-Dist: arcticdb ; extra == 'arcticdb'
|
|
60
|
-
Requires-Dist: protobuf ~=4.0 ; extra == 'arcticdb'
|
|
61
57
|
Provides-Extra: docs
|
|
62
58
|
Requires-Dist: pastastore[optional] ; extra == 'docs'
|
|
63
59
|
Requires-Dist: sphinx-rtd-theme ; extra == 'docs'
|
|
@@ -65,20 +61,16 @@ Requires-Dist: Ipython ; extra == 'docs'
|
|
|
65
61
|
Requires-Dist: ipykernel ; extra == 'docs'
|
|
66
62
|
Requires-Dist: nbsphinx ; extra == 'docs'
|
|
67
63
|
Requires-Dist: nbsphinx-link ; extra == 'docs'
|
|
64
|
+
Provides-Extra: extensions
|
|
65
|
+
Requires-Dist: hydropandas ; extra == 'extensions'
|
|
68
66
|
Provides-Extra: full
|
|
69
67
|
Requires-Dist: pastastore[arcticdb,optional] ; extra == 'full'
|
|
70
68
|
Provides-Extra: lint
|
|
71
|
-
Requires-Dist:
|
|
72
|
-
Requires-Dist: flake8 ; extra == 'lint'
|
|
73
|
-
Requires-Dist: isort ; extra == 'lint'
|
|
69
|
+
Requires-Dist: ruff ; extra == 'lint'
|
|
74
70
|
Provides-Extra: optional
|
|
75
71
|
Requires-Dist: contextily ; extra == 'optional'
|
|
76
72
|
Requires-Dist: pyproj ; extra == 'optional'
|
|
77
73
|
Requires-Dist: adjustText ; extra == 'optional'
|
|
78
|
-
Provides-Extra: pystore
|
|
79
|
-
Requires-Dist: fsspec >=0.3.3 ; extra == 'pystore'
|
|
80
|
-
Requires-Dist: python-snappy ; extra == 'pystore'
|
|
81
|
-
Requires-Dist: dask[dataframe] ; extra == 'pystore'
|
|
82
74
|
Provides-Extra: test
|
|
83
75
|
Requires-Dist: pastastore[arcticdb,lint,optional] ; extra == 'test'
|
|
84
76
|
Requires-Dist: hydropandas[full] ; extra == 'test'
|
|
@@ -89,7 +81,16 @@ Requires-Dist: pytest-cov ; extra == 'test'
|
|
|
89
81
|
Requires-Dist: pytest-dependency ; extra == 'test'
|
|
90
82
|
Requires-Dist: pytest-benchmark ; extra == 'test'
|
|
91
83
|
Requires-Dist: codacy-coverage ; extra == 'test'
|
|
92
|
-
|
|
84
|
+
Provides-Extra: test_py312
|
|
85
|
+
Requires-Dist: pastastore[lint,optional] ; extra == 'test_py312'
|
|
86
|
+
Requires-Dist: hydropandas[full] ; extra == 'test_py312'
|
|
87
|
+
Requires-Dist: coverage ; extra == 'test_py312'
|
|
88
|
+
Requires-Dist: codecov ; extra == 'test_py312'
|
|
89
|
+
Requires-Dist: pytest ; extra == 'test_py312'
|
|
90
|
+
Requires-Dist: pytest-cov ; extra == 'test_py312'
|
|
91
|
+
Requires-Dist: pytest-dependency ; extra == 'test_py312'
|
|
92
|
+
Requires-Dist: pytest-benchmark ; extra == 'test_py312'
|
|
93
|
+
Requires-Dist: codacy-coverage ; extra == 'test_py312'
|
|
93
94
|
|
|
94
95
|

|
|
95
96
|
[](https://pastastore.readthedocs.io/en/latest/?badge=latest)
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
pastastore/__init__.py,sha256=l6zRpDO0j6MIrfdljCTbkF70bt-GFlPseBd4IlmaC-o,269
|
|
2
|
+
pastastore/base.py,sha256=n7hPrkaLjR6_8S0XRHxvviqBWvULx3W_faQcoA9HZ9I,67166
|
|
3
|
+
pastastore/connectors.py,sha256=YK3I_Jb2uNwzBQvN2VwZvmTRfPeUETW-4ddcFSWkHVw,16820
|
|
4
|
+
pastastore/datasets.py,sha256=FHVfmKqb8beEs9NONsWrCoJY37BmlvFLSEQ1VAFmE8A,6415
|
|
5
|
+
pastastore/plotting.py,sha256=t6gEeHVGzrwvM6q1l8V3OkklpU75O2Y4h6nKEHRWdjo,46416
|
|
6
|
+
pastastore/store.py,sha256=istLgbTVXvNWqTkZQtEcxWEweouh-M6HWUAXybKrESw,58286
|
|
7
|
+
pastastore/styling.py,sha256=ioaH10ELV8CFvJA-xAKFbnBklTd6FB1TZV8sqvZrEcw,1518
|
|
8
|
+
pastastore/util.py,sha256=iXHoGHfK6VDbUpufNsnzdV71oBVp-koZUD4VJj6MOwo,28250
|
|
9
|
+
pastastore/version.py,sha256=p4YdipfRBvajfHzz2s7TjR_IpDOit_K_Lr2e7pnKhLU,1205
|
|
10
|
+
pastastore/yaml_interface.py,sha256=MddELxWe8_aqJRMUydOCbjoU1-ZodzxFKYnAaqJ5SqA,29947
|
|
11
|
+
pastastore-1.6.0.dist-info/LICENSE,sha256=DtHftfUEm99KzgwLr3rQUTg8H3kAS0Z-p5WWJgLf_OY,1082
|
|
12
|
+
pastastore-1.6.0.dist-info/METADATA,sha256=cFimKbNDGJdY-iOFcDfpo4ger907HniL0kWFeSUvTqg,8021
|
|
13
|
+
pastastore-1.6.0.dist-info/WHEEL,sha256=HiCZjzuy6Dw0hdX5R3LCFPDmFS4BWl8H-8W39XfmgX4,91
|
|
14
|
+
pastastore-1.6.0.dist-info/top_level.txt,sha256=QKfonr1KJZN46MFsj8eGRBw9Mg-jO-HFvgE2orVX7Sk,11
|
|
15
|
+
pastastore-1.6.0.dist-info/RECORD,,
|
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
pastastore/__init__.py,sha256=LmaXFyfECBeaUeAEGeYxX-SU8g7tFUJ2ojyXoj3xz4o,292
|
|
2
|
-
pastastore/base.py,sha256=nPIIiDHJMDUlvPaE2dftjPRxAM2kFtC8kHXvmB9Ayw0,62631
|
|
3
|
-
pastastore/connectors.py,sha256=QH6jvnhIIivn1Is9ThBeMAEcJBVnfcK46pJClGMdiBA,28747
|
|
4
|
-
pastastore/datasets.py,sha256=f-92WOh2ROCxOVvKGHXjnzd66Q7hO_-BtXRfMKwg1KU,6640
|
|
5
|
-
pastastore/plotting.py,sha256=DCtbl81t23Zrk5l7QqxJVwv8wqvinBqlPbaTy5Q81sg,45849
|
|
6
|
-
pastastore/store.py,sha256=9gjB5vq8XhfpProUjox_YKUssHvgQYAMMIf8epnAghA,40682
|
|
7
|
-
pastastore/styling.py,sha256=u2rRf1Gqq5vs2HG7E4S4wEkEczyTuvCQaoYcvrh91lk,1465
|
|
8
|
-
pastastore/util.py,sha256=CuQsLE3Z7egnel55LejVLXmOswwabGJiQtm164K7gxE,30830
|
|
9
|
-
pastastore/version.py,sha256=hBqb0IH800rddVduM2_nP9zwh8BNg2nMxgX4FHGuiQQ,203
|
|
10
|
-
pastastore/yaml_interface.py,sha256=yNpyEHtIapx44Thv2PCm7Sc_W-reXDHjT3uRC2gDAzw,29515
|
|
11
|
-
pastastore-1.4.0.dist-info/LICENSE,sha256=DtHftfUEm99KzgwLr3rQUTg8H3kAS0Z-p5WWJgLf_OY,1082
|
|
12
|
-
pastastore-1.4.0.dist-info/METADATA,sha256=6ZcnAKj_NUmsvdWFpU5qsNVFO7LghNPc_PpeJhX-Ij8,7896
|
|
13
|
-
pastastore-1.4.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
14
|
-
pastastore-1.4.0.dist-info/top_level.txt,sha256=QKfonr1KJZN46MFsj8eGRBw9Mg-jO-HFvgE2orVX7Sk,11
|
|
15
|
-
pastastore-1.4.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|