pastastore 1.8.0__py3-none-any.whl → 1.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pastastore/base.py +65 -11
- pastastore/connectors.py +43 -2
- pastastore/extensions/hpd.py +13 -6
- pastastore/plotting.py +289 -35
- pastastore/store.py +108 -54
- pastastore/styling.py +4 -4
- pastastore/util.py +14 -10
- pastastore/version.py +1 -1
- pastastore/yaml_interface.py +35 -8
- {pastastore-1.8.0.dist-info → pastastore-1.10.0.dist-info}/LICENSE +1 -1
- {pastastore-1.8.0.dist-info → pastastore-1.10.0.dist-info}/METADATA +16 -25
- pastastore-1.10.0.dist-info/RECORD +28 -0
- {pastastore-1.8.0.dist-info → pastastore-1.10.0.dist-info}/WHEEL +1 -1
- tests/conftest.py +1 -5
- tests/test_003_pastastore.py +26 -9
- tests/test_006_benchmark.py +0 -3
- pastastore-1.8.0.dist-info/RECORD +0 -28
- {pastastore-1.8.0.dist-info → pastastore-1.10.0.dist-info}/top_level.txt +0 -0
pastastore/store.py
CHANGED
|
@@ -15,7 +15,7 @@ from pastas.io.pas import pastas_hook
|
|
|
15
15
|
from tqdm.auto import tqdm
|
|
16
16
|
|
|
17
17
|
from pastastore.base import BaseConnector
|
|
18
|
-
from pastastore.connectors import DictConnector
|
|
18
|
+
from pastastore.connectors import ArcticDBConnector, DictConnector, PasConnector
|
|
19
19
|
from pastastore.plotting import Maps, Plots
|
|
20
20
|
from pastastore.util import _custom_warning
|
|
21
21
|
from pastastore.version import PASTAS_GEQ_150, PASTAS_LEQ_022
|
|
@@ -79,6 +79,24 @@ class PastaStore:
|
|
|
79
79
|
self.plots = Plots(self)
|
|
80
80
|
self.yaml = PastastoreYAML(self)
|
|
81
81
|
|
|
82
|
+
@classmethod
|
|
83
|
+
def from_pastastore_config_file(cls, fname):
|
|
84
|
+
"""Create a PastaStore from a pastastore config file."""
|
|
85
|
+
with open(fname, "r") as f:
|
|
86
|
+
cfg = json.load(f)
|
|
87
|
+
|
|
88
|
+
conn_type = cfg.pop("connector_type")
|
|
89
|
+
if conn_type == "pas":
|
|
90
|
+
conn = PasConnector(**cfg)
|
|
91
|
+
elif conn_type == "arcticdb":
|
|
92
|
+
conn = ArcticDBConnector(**cfg)
|
|
93
|
+
else:
|
|
94
|
+
raise ValueError(
|
|
95
|
+
f"Cannot load connector type: '{conn_type}'. "
|
|
96
|
+
"This is only supported for PasConnector and ArcticDBConnector."
|
|
97
|
+
)
|
|
98
|
+
return cls(conn)
|
|
99
|
+
|
|
82
100
|
@property
|
|
83
101
|
def empty(self) -> bool:
|
|
84
102
|
"""Check if the PastaStore is empty."""
|
|
@@ -120,12 +138,43 @@ class PastaStore:
|
|
|
120
138
|
|
|
121
139
|
@property
|
|
122
140
|
def models(self):
|
|
123
|
-
"""Return
|
|
141
|
+
"""Return the ModelAccessor object.
|
|
142
|
+
|
|
143
|
+
The ModelAccessor object allows dictionary-like assignment and access to models.
|
|
144
|
+
In addition it provides some useful utilities for working with stored models
|
|
145
|
+
in the database.
|
|
146
|
+
|
|
147
|
+
Examples
|
|
148
|
+
--------
|
|
149
|
+
Get a model by name::
|
|
150
|
+
|
|
151
|
+
>>> model = pstore.models["my_model"]
|
|
152
|
+
|
|
153
|
+
Store a model in the database::
|
|
154
|
+
|
|
155
|
+
>>> pstore.models["my_model_v2"] = model
|
|
156
|
+
|
|
157
|
+
Get model metadata dataframe::
|
|
158
|
+
|
|
159
|
+
>>> pstore.models.metadata
|
|
160
|
+
|
|
161
|
+
Number of models::
|
|
162
|
+
|
|
163
|
+
>>> len(pstore.models)
|
|
164
|
+
|
|
165
|
+
Random model::
|
|
166
|
+
|
|
167
|
+
>>> model = pstore.models.random()
|
|
168
|
+
|
|
169
|
+
Iterate over stored models::
|
|
170
|
+
|
|
171
|
+
>>> for ml in pstore.models:
|
|
172
|
+
>>> ml.solve()
|
|
124
173
|
|
|
125
174
|
Returns
|
|
126
175
|
-------
|
|
127
|
-
|
|
128
|
-
|
|
176
|
+
ModelAccessor
|
|
177
|
+
ModelAccessor object
|
|
129
178
|
"""
|
|
130
179
|
return self.conn.models
|
|
131
180
|
|
|
@@ -562,7 +611,7 @@ class PastaStore:
|
|
|
562
611
|
modelnames: Optional[List[str]] = None,
|
|
563
612
|
param_value: Optional[str] = "optimal",
|
|
564
613
|
progressbar: Optional[bool] = False,
|
|
565
|
-
ignore_errors: Optional[bool] =
|
|
614
|
+
ignore_errors: Optional[bool] = True,
|
|
566
615
|
) -> FrameorSeriesUnion:
|
|
567
616
|
"""Get model parameters.
|
|
568
617
|
|
|
@@ -584,7 +633,7 @@ class PastaStore:
|
|
|
584
633
|
show progressbar, default is False
|
|
585
634
|
ignore_errors : bool, optional
|
|
586
635
|
ignore errors when True, i.e. when non-existent model is
|
|
587
|
-
encountered in modelnames, by default
|
|
636
|
+
encountered in modelnames, by default True
|
|
588
637
|
|
|
589
638
|
Returns
|
|
590
639
|
-------
|
|
@@ -613,7 +662,10 @@ class PastaStore:
|
|
|
613
662
|
pindex = parameters
|
|
614
663
|
|
|
615
664
|
for c in pindex:
|
|
616
|
-
|
|
665
|
+
if c in mldict["parameters"].index:
|
|
666
|
+
p.loc[mlname, c] = mldict["parameters"].loc[c, param_value]
|
|
667
|
+
else:
|
|
668
|
+
p.loc[mlname, c] = np.nan
|
|
617
669
|
|
|
618
670
|
p = p.squeeze()
|
|
619
671
|
return p.astype(float)
|
|
@@ -757,6 +809,7 @@ class PastaStore:
|
|
|
757
809
|
solve: bool = False,
|
|
758
810
|
store_models: bool = True,
|
|
759
811
|
ignore_errors: bool = False,
|
|
812
|
+
suffix: Optional[str] = None,
|
|
760
813
|
progressbar: bool = True,
|
|
761
814
|
**kwargs,
|
|
762
815
|
) -> Union[Tuple[dict, dict], dict]:
|
|
@@ -777,6 +830,8 @@ class PastaStore:
|
|
|
777
830
|
store the models in the database.
|
|
778
831
|
ignore_errors : bool, optional
|
|
779
832
|
ignore errors while creating models, by default False
|
|
833
|
+
suffix : str, optional
|
|
834
|
+
add suffix to oseries name to create model name, by default None
|
|
780
835
|
progressbar : bool, optional
|
|
781
836
|
show progressbar, by default True
|
|
782
837
|
|
|
@@ -797,7 +852,13 @@ class PastaStore:
|
|
|
797
852
|
desc = "Bulk creation models"
|
|
798
853
|
for o in tqdm(oseries, desc=desc) if progressbar else oseries:
|
|
799
854
|
try:
|
|
800
|
-
|
|
855
|
+
if suffix is not None:
|
|
856
|
+
modelname = f"{o}{suffix}"
|
|
857
|
+
else:
|
|
858
|
+
modelname = o
|
|
859
|
+
iml = self.create_model(
|
|
860
|
+
o, modelname=modelname, add_recharge=add_recharge
|
|
861
|
+
)
|
|
801
862
|
except Exception as e:
|
|
802
863
|
if ignore_errors:
|
|
803
864
|
errors[o] = e
|
|
@@ -1101,13 +1162,19 @@ class PastaStore:
|
|
|
1101
1162
|
|
|
1102
1163
|
# special for WellModels
|
|
1103
1164
|
if stressmodel._name == "WellModel":
|
|
1104
|
-
|
|
1165
|
+
if isinstance(stresses["stress"], list):
|
|
1166
|
+
names = [s.squeeze().name for s in stresses["stress"]]
|
|
1167
|
+
else:
|
|
1168
|
+
names = [stresses["stress"].squeeze().name]
|
|
1169
|
+
stresses["stress"] = [stresses["stress"]] # ugly fix for WellModel
|
|
1105
1170
|
# check oseries is provided
|
|
1106
1171
|
if oseries is None:
|
|
1107
1172
|
raise ValueError("WellModel requires 'oseries' to compute distances!")
|
|
1108
1173
|
# compute distances and add to kwargs
|
|
1109
1174
|
distances = (
|
|
1110
|
-
self.get_distances(oseries=oseries, stresses=names)
|
|
1175
|
+
self.get_distances(oseries=oseries, stresses=names)
|
|
1176
|
+
.T.squeeze(axis=1)
|
|
1177
|
+
.values
|
|
1111
1178
|
)
|
|
1112
1179
|
kwargs["distances"] = distances
|
|
1113
1180
|
# set settings to well
|
|
@@ -1306,56 +1373,40 @@ class PastaStore:
|
|
|
1306
1373
|
):
|
|
1307
1374
|
solve_model(ml_name=ml_name)
|
|
1308
1375
|
|
|
1309
|
-
def
|
|
1310
|
-
|
|
1311
|
-
mls: Optional[Union[ps.Model, list, str]] = None,
|
|
1312
|
-
progressbar: bool = True,
|
|
1313
|
-
): # pragma: no cover
|
|
1314
|
-
"""Get pastas model results.
|
|
1376
|
+
def check_models(self, checklist=None, modelnames=None):
|
|
1377
|
+
"""Check models against checklist.
|
|
1315
1378
|
|
|
1316
1379
|
Parameters
|
|
1317
1380
|
----------
|
|
1318
|
-
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1381
|
+
checklist : dict, optional
|
|
1382
|
+
dictionary containing model check methods, by default None which
|
|
1383
|
+
uses the ps.checks.checks_brakenhoff_2022 checklist. This includes:
|
|
1384
|
+
- fit metric R² >= 0.6
|
|
1385
|
+
- runs test for autocorrelation
|
|
1386
|
+
- t95 response < half length calibration period
|
|
1387
|
+
- |model parameters| < 1.96 * σ (std deviation)
|
|
1388
|
+
- model parameters are not on bounds
|
|
1389
|
+
modelnames : list of str, optional
|
|
1390
|
+
list of modelnames to perform checks on, by default None
|
|
1323
1391
|
|
|
1324
1392
|
Returns
|
|
1325
1393
|
-------
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
for each model
|
|
1329
|
-
|
|
1330
|
-
Raises
|
|
1331
|
-
------
|
|
1332
|
-
ModuleNotFoundError
|
|
1333
|
-
if the art_tools module is not available
|
|
1394
|
+
pd.DataFrame
|
|
1395
|
+
DataFrame containing pass True/False for each check for each model
|
|
1334
1396
|
"""
|
|
1335
|
-
|
|
1336
|
-
|
|
1337
|
-
|
|
1338
|
-
|
|
1339
|
-
|
|
1340
|
-
if mls is None:
|
|
1341
|
-
mls = self.conn.models
|
|
1342
|
-
elif isinstance(mls, ps.Model):
|
|
1343
|
-
mls = [mls.name]
|
|
1344
|
-
|
|
1345
|
-
results_list = []
|
|
1346
|
-
desc = "Get model results"
|
|
1347
|
-
for mlname in tqdm(mls, desc=desc) if progressbar else mls:
|
|
1348
|
-
try:
|
|
1349
|
-
iml = self.conn.get_models(mlname)
|
|
1350
|
-
except Exception as e:
|
|
1351
|
-
print("{1}: '{0}' could not be parsed!".format(mlname, e))
|
|
1352
|
-
continue
|
|
1353
|
-
iresults = pastas_get_model_results(
|
|
1354
|
-
iml, par_selection="all", stats=("evp",), stderrors=True
|
|
1355
|
-
)
|
|
1356
|
-
results_list.append(iresults)
|
|
1397
|
+
if checklist is None:
|
|
1398
|
+
checklist = ps.check.checks_brakenhoff_2022
|
|
1399
|
+
|
|
1400
|
+
names = self.conn._parse_names(modelnames, libname="models")
|
|
1357
1401
|
|
|
1358
|
-
|
|
1402
|
+
check_dfs = []
|
|
1403
|
+
for n in names:
|
|
1404
|
+
cdf = ps.check.checklist(self.models[n], checklist, report=False)["pass"]
|
|
1405
|
+
cdf.name = n
|
|
1406
|
+
check_dfs.append(cdf)
|
|
1407
|
+
chkdf = pd.concat(check_dfs, axis=1)
|
|
1408
|
+
chkdf.columns.name = "models"
|
|
1409
|
+
return chkdf
|
|
1359
1410
|
|
|
1360
1411
|
def to_zip(self, fname: str, overwrite=False, progressbar: bool = True):
|
|
1361
1412
|
"""Write data to zipfile.
|
|
@@ -1373,7 +1424,7 @@ class PastaStore:
|
|
|
1373
1424
|
|
|
1374
1425
|
if os.path.exists(fname) and not overwrite:
|
|
1375
1426
|
raise FileExistsError(
|
|
1376
|
-
"File already exists!
|
|
1427
|
+
"File already exists! Use 'overwrite=True' to force writing file."
|
|
1377
1428
|
)
|
|
1378
1429
|
elif os.path.exists(fname):
|
|
1379
1430
|
warnings.warn(f"Overwriting file '{os.path.basename(fname)}'", stacklevel=1)
|
|
@@ -1548,7 +1599,10 @@ class PastaStore:
|
|
|
1548
1599
|
"models": self.model_names,
|
|
1549
1600
|
}
|
|
1550
1601
|
else:
|
|
1551
|
-
raise ValueError(
|
|
1602
|
+
raise ValueError(
|
|
1603
|
+
"Provide valid libname: 'models', 'stresses', 'oseries' or None"
|
|
1604
|
+
" to seach within all libraries."
|
|
1605
|
+
)
|
|
1552
1606
|
|
|
1553
1607
|
result = {}
|
|
1554
1608
|
for lib, names in lib_names.items():
|
pastastore/styling.py
CHANGED
|
@@ -57,12 +57,12 @@ def boolean_styler(b):
|
|
|
57
57
|
"""
|
|
58
58
|
if b:
|
|
59
59
|
return (
|
|
60
|
-
f"background-color: {rgb2hex((231/255, 255/255, 239/255))}; "
|
|
60
|
+
f"background-color: {rgb2hex((231 / 255, 255 / 255, 239 / 255))}; "
|
|
61
61
|
"color: darkgreen"
|
|
62
62
|
)
|
|
63
63
|
else:
|
|
64
64
|
return (
|
|
65
|
-
f"background-color: {rgb2hex((255/255, 238/255, 238/255))}; "
|
|
65
|
+
f"background-color: {rgb2hex((255 / 255, 238 / 255, 238 / 255))}; "
|
|
66
66
|
"color: darkred"
|
|
67
67
|
)
|
|
68
68
|
|
|
@@ -90,11 +90,11 @@ def boolean_row_styler(row, column):
|
|
|
90
90
|
"""
|
|
91
91
|
if row[column]:
|
|
92
92
|
return (
|
|
93
|
-
f"background-color: {rgb2hex((231/255, 255/255, 239/255))}; "
|
|
93
|
+
f"background-color: {rgb2hex((231 / 255, 255 / 255, 239 / 255))}; "
|
|
94
94
|
"color: darkgreen",
|
|
95
95
|
) * row.size
|
|
96
96
|
else:
|
|
97
97
|
return (
|
|
98
|
-
f"background-color: {rgb2hex((255/255, 238/255, 238/255))}; "
|
|
98
|
+
f"background-color: {rgb2hex((255 / 255, 238 / 255, 238 / 255))}; "
|
|
99
99
|
"color: darkred",
|
|
100
100
|
) * row.size
|
pastastore/util.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"""Useful utilities for pastastore."""
|
|
2
2
|
|
|
3
3
|
import os
|
|
4
|
+
import shutil
|
|
4
5
|
from typing import Dict, List, Optional, Union
|
|
5
6
|
|
|
6
7
|
import numpy as np
|
|
@@ -43,8 +44,6 @@ def delete_arcticdb_connector(
|
|
|
43
44
|
list of library names to delete, by default None which deletes
|
|
44
45
|
all libraries
|
|
45
46
|
"""
|
|
46
|
-
import shutil
|
|
47
|
-
|
|
48
47
|
import arcticdb
|
|
49
48
|
|
|
50
49
|
if conn is not None:
|
|
@@ -75,9 +74,17 @@ def delete_arcticdb_connector(
|
|
|
75
74
|
print()
|
|
76
75
|
print(f" - deleted: {lib}")
|
|
77
76
|
|
|
78
|
-
|
|
77
|
+
# delete .pastastore file if entire pastastore is deleted
|
|
78
|
+
remaining_libs = [
|
|
79
|
+
ilib for ilib in arc.list_libraries() if ilib.split(".")[0] == name
|
|
80
|
+
]
|
|
81
|
+
if remaining_libs == 0:
|
|
82
|
+
os.unlink(os.path.join(uri.split("//")[-1], f"{name}.pastastore"))
|
|
83
|
+
|
|
84
|
+
# check if any remaining libraries in lmdb dir, if none, delete entire folder
|
|
85
|
+
remaining = arc.list_libraries()
|
|
79
86
|
if len(remaining) == 0:
|
|
80
|
-
shutil.rmtree(os.path.join(conn.uri.split("//")[-1]
|
|
87
|
+
shutil.rmtree(os.path.join(conn.uri.split("//")[-1]))
|
|
81
88
|
|
|
82
89
|
print("Done!")
|
|
83
90
|
|
|
@@ -98,8 +105,6 @@ def delete_dict_connector(conn, libraries: Optional[List[str]] = None) -> None:
|
|
|
98
105
|
|
|
99
106
|
def delete_pas_connector(conn, libraries: Optional[List[str]] = None) -> None:
|
|
100
107
|
"""Delete PasConnector object."""
|
|
101
|
-
import shutil
|
|
102
|
-
|
|
103
108
|
print(f"Deleting PasConnector database: '{conn.name}' ... ", end="")
|
|
104
109
|
if libraries is None:
|
|
105
110
|
shutil.rmtree(conn.path)
|
|
@@ -143,7 +148,7 @@ def delete_pastastore(pstore, libraries: Optional[List[str]] = None) -> None:
|
|
|
143
148
|
delete_pas_connector(conn=pstore.conn, libraries=libraries)
|
|
144
149
|
else:
|
|
145
150
|
raise TypeError(
|
|
146
|
-
"Unrecognized pastastore Connector type:
|
|
151
|
+
f"Unrecognized pastastore Connector type: {pstore.conn.conn_type}"
|
|
147
152
|
)
|
|
148
153
|
|
|
149
154
|
|
|
@@ -545,7 +550,7 @@ def frontiers_checks(
|
|
|
545
550
|
ml = pstore.get_models(mlnam)
|
|
546
551
|
|
|
547
552
|
if ml.parameters["optimal"].hasnans:
|
|
548
|
-
print(f"Warning! Skipping model '{mlnam}' because
|
|
553
|
+
print(f"Warning! Skipping model '{mlnam}' because it is not solved!")
|
|
549
554
|
continue
|
|
550
555
|
|
|
551
556
|
checks = pd.DataFrame(columns=["stat", "threshold", "units", "check_passed"])
|
|
@@ -752,8 +757,7 @@ def frontiers_aic_select(
|
|
|
752
757
|
modelnames += pstore.oseries_models[o]
|
|
753
758
|
elif oseries is not None:
|
|
754
759
|
print(
|
|
755
|
-
"Warning! Both 'modelnames' and 'oseries' provided, "
|
|
756
|
-
"using only 'modelnames'"
|
|
760
|
+
"Warning! Both 'modelnames' and 'oseries' provided, using only 'modelnames'"
|
|
757
761
|
)
|
|
758
762
|
|
|
759
763
|
# Dataframe of models with corresponding oseries
|
pastastore/version.py
CHANGED
|
@@ -9,7 +9,7 @@ PASTAS_VERSION = parse_version(ps.__version__)
|
|
|
9
9
|
PASTAS_LEQ_022 = PASTAS_VERSION <= parse_version("0.22.0")
|
|
10
10
|
PASTAS_GEQ_150 = PASTAS_VERSION >= parse_version("1.5.0")
|
|
11
11
|
|
|
12
|
-
__version__ = "1.
|
|
12
|
+
__version__ = "1.10.0"
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
def show_versions(optional=False) -> None:
|
pastastore/yaml_interface.py
CHANGED
|
@@ -3,6 +3,8 @@
|
|
|
3
3
|
import datetime
|
|
4
4
|
import logging
|
|
5
5
|
import os
|
|
6
|
+
import tempfile
|
|
7
|
+
from contextlib import contextmanager
|
|
6
8
|
from copy import deepcopy
|
|
7
9
|
from typing import Any, Dict, List, Optional, Union
|
|
8
10
|
|
|
@@ -124,6 +126,18 @@ def reduce_to_minimal_dict(d, keys=None):
|
|
|
124
126
|
reduce_to_minimal_dict(v, keys=keys)
|
|
125
127
|
|
|
126
128
|
|
|
129
|
+
@contextmanager
|
|
130
|
+
def temporary_yaml_from_str(yaml):
|
|
131
|
+
"""Temporary yaml file that is deleted after usage."""
|
|
132
|
+
temp = tempfile.NamedTemporaryFile(delete=False)
|
|
133
|
+
temp.write(yaml.encode("utf-8"))
|
|
134
|
+
temp.close()
|
|
135
|
+
try:
|
|
136
|
+
yield temp.name
|
|
137
|
+
finally:
|
|
138
|
+
os.unlink(temp.name)
|
|
139
|
+
|
|
140
|
+
|
|
127
141
|
class PastastoreYAML:
|
|
128
142
|
"""Class for reading/writing Pastas models in YAML format.
|
|
129
143
|
|
|
@@ -427,7 +441,7 @@ class PastastoreYAML:
|
|
|
427
441
|
.values
|
|
428
442
|
)
|
|
429
443
|
logger.info(
|
|
430
|
-
f" | using {n} nearest stress(es) with kind='{kind}':
|
|
444
|
+
f" | using {n} nearest stress(es) with kind='{kind}': {snames}"
|
|
431
445
|
)
|
|
432
446
|
else:
|
|
433
447
|
snames = [snames]
|
|
@@ -533,7 +547,10 @@ class PastastoreYAML:
|
|
|
533
547
|
if (
|
|
534
548
|
smnam.lower() in ["rch", "rech", "recharge", "rechargemodel"]
|
|
535
549
|
) and not smtyp:
|
|
536
|
-
logger.info(
|
|
550
|
+
logger.info(
|
|
551
|
+
"| no StressModel type provided, using 'RechargeModel' based on "
|
|
552
|
+
"stressmodel name."
|
|
553
|
+
)
|
|
537
554
|
# check if stressmodel dictionary is empty, create (nearly
|
|
538
555
|
# empty) dict so defaults are used
|
|
539
556
|
if smyml is None:
|
|
@@ -547,14 +564,14 @@ class PastastoreYAML:
|
|
|
547
564
|
# cannot make any assumptions for non-RechargeModels
|
|
548
565
|
if smyml is None:
|
|
549
566
|
raise ValueError(
|
|
550
|
-
"Insufficient information
|
|
567
|
+
f"Insufficient information for stressmodel '{name}'!"
|
|
551
568
|
)
|
|
552
569
|
# get stressmodel type, with default StressModel
|
|
553
570
|
if classkey in smyml:
|
|
554
571
|
smtyp = smyml[classkey]
|
|
555
572
|
else:
|
|
556
573
|
logger.info(
|
|
557
|
-
"| no stressmodel class type provided,
|
|
574
|
+
"| no stressmodel class type provided, using 'StressModel'"
|
|
558
575
|
)
|
|
559
576
|
smtyp = "StressModel"
|
|
560
577
|
|
|
@@ -574,7 +591,7 @@ class PastastoreYAML:
|
|
|
574
591
|
sm = self._parse_wellmodel_dict(smyml, onam=onam)
|
|
575
592
|
else:
|
|
576
593
|
raise NotImplementedError(
|
|
577
|
-
"PastaStore.yaml interface does
|
|
594
|
+
f"PastaStore.yaml interface does not (yet) support '{smtyp}'!"
|
|
578
595
|
)
|
|
579
596
|
|
|
580
597
|
# add to list
|
|
@@ -604,7 +621,7 @@ class PastastoreYAML:
|
|
|
604
621
|
Parameters
|
|
605
622
|
----------
|
|
606
623
|
fyaml : str
|
|
607
|
-
path to file
|
|
624
|
+
YAML as str or path to file
|
|
608
625
|
|
|
609
626
|
Returns
|
|
610
627
|
-------
|
|
@@ -618,8 +635,18 @@ class PastastoreYAML:
|
|
|
618
635
|
NotImplementedError
|
|
619
636
|
if unsupported stressmodel is encountered
|
|
620
637
|
"""
|
|
621
|
-
|
|
622
|
-
|
|
638
|
+
if "\n" in fyaml or "\r" in fyaml:
|
|
639
|
+
with temporary_yaml_from_str(fyaml) as fyaml:
|
|
640
|
+
with open(fyaml, "r") as f:
|
|
641
|
+
yml = yaml.load(f, Loader=yaml.CFullLoader)
|
|
642
|
+
elif os.path.exists(fyaml):
|
|
643
|
+
with open(fyaml, "r") as f:
|
|
644
|
+
yml = yaml.load(f, Loader=yaml.CFullLoader)
|
|
645
|
+
else:
|
|
646
|
+
raise ValueError(
|
|
647
|
+
"Could not read YAML file! Check if input is valid YAML "
|
|
648
|
+
"or valid path to YAML file."
|
|
649
|
+
)
|
|
623
650
|
|
|
624
651
|
models = []
|
|
625
652
|
|
|
@@ -1,12 +1,12 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.2
|
|
2
2
|
Name: pastastore
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.10.0
|
|
4
4
|
Summary: Tools for managing Pastas time series models.
|
|
5
5
|
Author: D.A. Brakenhoff
|
|
6
6
|
Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
|
|
7
7
|
License: The MIT License (MIT)
|
|
8
8
|
|
|
9
|
-
Copyright (c) 2020 D.A. Brakenhoff
|
|
9
|
+
Copyright (c) 2020-2025 D.A. Brakenhoff
|
|
10
10
|
|
|
11
11
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
12
12
|
of this software and associated documentation files (the "Software"), to deal
|
|
@@ -40,13 +40,13 @@ Classifier: Operating System :: Unix
|
|
|
40
40
|
Classifier: Operating System :: MacOS
|
|
41
41
|
Classifier: Programming Language :: Python
|
|
42
42
|
Classifier: Programming Language :: Python :: 3
|
|
43
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
44
43
|
Classifier: Programming Language :: Python :: 3.10
|
|
45
44
|
Classifier: Programming Language :: Python :: 3.11
|
|
46
45
|
Classifier: Programming Language :: Python :: 3.12
|
|
46
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
47
47
|
Classifier: Programming Language :: Python :: 3 :: Only
|
|
48
48
|
Classifier: Topic :: Scientific/Engineering :: Hydrology
|
|
49
|
-
Requires-Python: >=3.
|
|
49
|
+
Requires-Python: >=3.10
|
|
50
50
|
Description-Content-Type: text/markdown
|
|
51
51
|
License-File: LICENSE
|
|
52
52
|
Requires-Dist: pastas>=0.13
|
|
@@ -65,26 +65,17 @@ Provides-Extra: arcticdb
|
|
|
65
65
|
Requires-Dist: arcticdb; extra == "arcticdb"
|
|
66
66
|
Provides-Extra: lint
|
|
67
67
|
Requires-Dist: ruff; extra == "lint"
|
|
68
|
+
Provides-Extra: pytest
|
|
69
|
+
Requires-Dist: coverage; extra == "pytest"
|
|
70
|
+
Requires-Dist: codecov; extra == "pytest"
|
|
71
|
+
Requires-Dist: pytest; extra == "pytest"
|
|
72
|
+
Requires-Dist: pytest-cov; extra == "pytest"
|
|
73
|
+
Requires-Dist: pytest-dependency; extra == "pytest"
|
|
74
|
+
Requires-Dist: pytest-benchmark; extra == "pytest"
|
|
75
|
+
Requires-Dist: codacy-coverage; extra == "pytest"
|
|
68
76
|
Provides-Extra: test
|
|
69
|
-
Requires-Dist: pastastore[arcticdb,lint,optional]; extra == "test"
|
|
77
|
+
Requires-Dist: pastastore[arcticdb,lint,optional,pytest]; extra == "test"
|
|
70
78
|
Requires-Dist: hydropandas[full]; extra == "test"
|
|
71
|
-
Requires-Dist: coverage; extra == "test"
|
|
72
|
-
Requires-Dist: codecov; extra == "test"
|
|
73
|
-
Requires-Dist: pytest; extra == "test"
|
|
74
|
-
Requires-Dist: pytest-cov; extra == "test"
|
|
75
|
-
Requires-Dist: pytest-dependency; extra == "test"
|
|
76
|
-
Requires-Dist: pytest-benchmark; extra == "test"
|
|
77
|
-
Requires-Dist: codacy-coverage; extra == "test"
|
|
78
|
-
Provides-Extra: test-py312
|
|
79
|
-
Requires-Dist: pastastore[lint,optional]; extra == "test-py312"
|
|
80
|
-
Requires-Dist: hydropandas[full]; extra == "test-py312"
|
|
81
|
-
Requires-Dist: coverage; extra == "test-py312"
|
|
82
|
-
Requires-Dist: codecov; extra == "test-py312"
|
|
83
|
-
Requires-Dist: pytest; extra == "test-py312"
|
|
84
|
-
Requires-Dist: pytest-cov; extra == "test-py312"
|
|
85
|
-
Requires-Dist: pytest-dependency; extra == "test-py312"
|
|
86
|
-
Requires-Dist: pytest-benchmark; extra == "test-py312"
|
|
87
|
-
Requires-Dist: codacy-coverage; extra == "test-py312"
|
|
88
79
|
Provides-Extra: docs
|
|
89
80
|
Requires-Dist: pastastore[optional]; extra == "docs"
|
|
90
81
|
Requires-Dist: sphinx_rtd_theme; extra == "docs"
|
|
@@ -102,7 +93,7 @@ Requires-Dist: nbsphinx_link; extra == "docs"
|
|
|
102
93
|
# pastastore
|
|
103
94
|
|
|
104
95
|
This module stores
|
|
105
|
-
[Pastas](https://pastas.readthedocs.io/
|
|
96
|
+
[Pastas](https://pastas.readthedocs.io/latest/) time series and models in a
|
|
106
97
|
database.
|
|
107
98
|
|
|
108
99
|
Storing time series and models in a database allows the user to manage time
|
|
@@ -187,4 +178,4 @@ pstore.to_zip("my_backup.zip")
|
|
|
187
178
|
```
|
|
188
179
|
|
|
189
180
|
For more elaborate examples, refer to the
|
|
190
|
-
[Notebooks](https://pastastore.readthedocs.io/
|
|
181
|
+
[Notebooks](https://pastastore.readthedocs.io/latest/examples.html#example-notebooks).
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
docs/conf.py,sha256=XcZUTmn9fGDhhu8k3mpaLu435SpIRNpABADCCTJJuag,6291
|
|
2
|
+
pastastore/__init__.py,sha256=cWwG9-YeiI4aOU0CDBGKbQgmKmmkcPd64YwPq2rRGt0,416
|
|
3
|
+
pastastore/base.py,sha256=B7sPe1eEpXFSeQsgrPXc5Mvp8Xkbhe_TxML6Zlp19Lk,48172
|
|
4
|
+
pastastore/connectors.py,sha256=6D2j1AUMQhHZNUhCD0tKoxf77FlQM5fTdH2m_c8KAnY,50183
|
|
5
|
+
pastastore/datasets.py,sha256=FHVfmKqb8beEs9NONsWrCoJY37BmlvFLSEQ1VAFmE8A,6415
|
|
6
|
+
pastastore/plotting.py,sha256=ygKXdi42sPLaehze4EjU8kRE2Dk46wVxSkB9RJ2Re84,54535
|
|
7
|
+
pastastore/store.py,sha256=KOs0L4AICRFRaQRdnnq2o-oadmX1CDkcg_kDtC8Tal0,67703
|
|
8
|
+
pastastore/styling.py,sha256=0IEp_r-SpcaslShAZvZV6iuEhTG_YzNq-ad8krib3U0,2304
|
|
9
|
+
pastastore/util.py,sha256=31dzHaK6xdFHGDkYh49qGBq1dGel2m9r7i797S3WUpQ,28505
|
|
10
|
+
pastastore/version.py,sha256=EyZTJILqRXkQwkj1Pipjq7kjKw-VsZMCFcFt78vCEK0,1206
|
|
11
|
+
pastastore/yaml_interface.py,sha256=n6zjQ7ENrUvxszb6zE-jPLa-XVsoEOTJHQmRV1_fFt0,30818
|
|
12
|
+
pastastore/extensions/__init__.py,sha256=lCN9xfX1qefUzUbE2FQ12c6NjLbf5HoNo-D8cGb5CTw,461
|
|
13
|
+
pastastore/extensions/accessor.py,sha256=kftQM6dqMDoySbyTKcvmkjC5gJRp465KA18G4NVXUO0,367
|
|
14
|
+
pastastore/extensions/hpd.py,sha256=VHMhGZaSIHTZNDYuyxGqWtDqlCSbucq44oT8sZRsu0E,27749
|
|
15
|
+
tests/conftest.py,sha256=TB0ZUH1m45gvQd_EZO7iudvhFw4JA-8rTJ71GT6Nf1w,5061
|
|
16
|
+
tests/test_001_import.py,sha256=g8AaJzWZ088A4B30_w-MrDfAVeeg8m78l--j7Onsklc,208
|
|
17
|
+
tests/test_002_connectors.py,sha256=k9etSRuSFVOrSEtZyxqsCF9GwIg0T7VdDJ2SjSe6i_s,7742
|
|
18
|
+
tests/test_003_pastastore.py,sha256=nhcUJHC2KiF9KREP_2uj_T2skKooUk13T1EVtkbwQnM,10051
|
|
19
|
+
tests/test_004_yaml.py,sha256=3hMNjb9s0S2rbmpyEjW6FDRAxfUZS_U1qoPl4wB-cCo,4440
|
|
20
|
+
tests/test_005_maps_plots.py,sha256=L0ppGf-cudsrdxteWy3qsV4We96DW4bCBE7c6jEm6aM,1866
|
|
21
|
+
tests/test_006_benchmark.py,sha256=VZG0bY7uz8DkfIZTgRCzkEDG8rguBEt_-mdGSMQLN2w,4930
|
|
22
|
+
tests/test_007_hpdextension.py,sha256=1QNUahq3hzqxjKbzsjofi9Yuyqe_oDGL0vWp6iouYe4,3004
|
|
23
|
+
tests/test_008_stressmodels.py,sha256=733fyCvuzjKcaLjvSMt5dTTLp-T4alzNJAToSxTIUug,4003
|
|
24
|
+
pastastore-1.10.0.dist-info/LICENSE,sha256=MB_6p4kXDCUsYNjslcMByBu6i7wMNRKPC36JnhzpN4o,1087
|
|
25
|
+
pastastore-1.10.0.dist-info/METADATA,sha256=xfHn3JQgATCb34Bfn3xImDDhEFGCo5x6SnK6xDQwTbQ,7579
|
|
26
|
+
pastastore-1.10.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
|
27
|
+
pastastore-1.10.0.dist-info/top_level.txt,sha256=1bgyMk1p23f04RK83Jju2_YAQBwyoQD_fInxoPB4YRw,22
|
|
28
|
+
pastastore-1.10.0.dist-info/RECORD,,
|
tests/conftest.py
CHANGED
|
@@ -1,18 +1,14 @@
|
|
|
1
1
|
# ruff: noqa: D100 D103
|
|
2
2
|
import importlib
|
|
3
3
|
from importlib import metadata
|
|
4
|
-
from platform import python_version
|
|
5
4
|
|
|
6
5
|
import pandas as pd
|
|
7
6
|
import pastas as ps
|
|
8
7
|
import pytest
|
|
9
|
-
from packaging.version import parse as parse_version
|
|
10
8
|
|
|
11
9
|
import pastastore as pst
|
|
12
10
|
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
params = ["dict", "pas", "arcticdb"] if not IS_PY312 else ["dict", "pas"]
|
|
11
|
+
params = ["dict", "pas", "arcticdb"]
|
|
16
12
|
|
|
17
13
|
|
|
18
14
|
def initialize_project(conn):
|