pastastore 1.10.2__py3-none-any.whl → 1.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pastastore/__init__.py +5 -1
- pastastore/base.py +739 -270
- pastastore/connectors.py +217 -800
- pastastore/datasets.py +23 -29
- pastastore/extensions/__init__.py +7 -3
- pastastore/extensions/hpd.py +39 -17
- pastastore/plotting.py +71 -38
- pastastore/store.py +191 -184
- pastastore/typing.py +12 -0
- pastastore/util.py +321 -88
- pastastore/validator.py +474 -0
- pastastore/version.py +1 -2
- pastastore/yaml_interface.py +37 -39
- {pastastore-1.10.2.dist-info → pastastore-1.11.0.dist-info}/METADATA +14 -8
- pastastore-1.11.0.dist-info/RECORD +30 -0
- tests/conftest.py +4 -9
- tests/test_001_import.py +2 -1
- tests/test_002_connectors.py +40 -3
- tests/test_003_pastastore.py +57 -28
- tests/test_005_maps_plots.py +12 -0
- tests/test_006_benchmark.py +1 -1
- tests/test_007_hpdextension.py +46 -8
- pastastore-1.10.2.dist-info/RECORD +0 -28
- {pastastore-1.10.2.dist-info → pastastore-1.11.0.dist-info}/WHEEL +0 -0
- {pastastore-1.10.2.dist-info → pastastore-1.11.0.dist-info}/licenses/LICENSE +0 -0
- {pastastore-1.10.2.dist-info → pastastore-1.11.0.dist-info}/top_level.txt +0 -0
pastastore/yaml_interface.py
CHANGED
|
@@ -6,6 +6,7 @@ import os
|
|
|
6
6
|
import tempfile
|
|
7
7
|
from contextlib import contextmanager
|
|
8
8
|
from copy import deepcopy
|
|
9
|
+
from pathlib import Path
|
|
9
10
|
from typing import Any, Dict, List, Optional, Union
|
|
10
11
|
|
|
11
12
|
import numpy as np
|
|
@@ -13,8 +14,6 @@ import pandas as pd
|
|
|
13
14
|
import pastas as ps
|
|
14
15
|
import yaml
|
|
15
16
|
|
|
16
|
-
from pastastore.version import PASTAS_LEQ_022
|
|
17
|
-
|
|
18
17
|
logger = logging.getLogger(__name__)
|
|
19
18
|
|
|
20
19
|
|
|
@@ -38,9 +37,9 @@ def _convert_dict_dtypes_for_yaml(d: Dict[str, Any]):
|
|
|
38
37
|
elif isinstance(v, datetime.datetime):
|
|
39
38
|
d[k] = pd.to_datetime(v).strftime("%Y-%m-%d %H:%M:%S")
|
|
40
39
|
elif isinstance(v, pd.Timedelta):
|
|
41
|
-
d[k] = v.to_timedelta64()
|
|
40
|
+
d[k] = str(v.to_timedelta64())
|
|
42
41
|
elif isinstance(v, datetime.timedelta):
|
|
43
|
-
d[k] = pd.to_timedelta(v).to_timedelta64()
|
|
42
|
+
d[k] = str(pd.to_timedelta(v).to_timedelta64())
|
|
44
43
|
elif isinstance(v, np.int64):
|
|
45
44
|
d[k] = int(v)
|
|
46
45
|
elif isinstance(v, np.float64):
|
|
@@ -109,7 +108,7 @@ def reduce_to_minimal_dict(d, keys=None):
|
|
|
109
108
|
"stress",
|
|
110
109
|
"prec",
|
|
111
110
|
"evap",
|
|
112
|
-
"
|
|
111
|
+
"class",
|
|
113
112
|
]
|
|
114
113
|
|
|
115
114
|
# also keep stressmodels by adding names to keys list
|
|
@@ -133,7 +132,7 @@ def temporary_yaml_from_str(yaml):
|
|
|
133
132
|
temp.write(yaml.encode("utf-8"))
|
|
134
133
|
temp.close()
|
|
135
134
|
try:
|
|
136
|
-
yield temp.name
|
|
135
|
+
yield Path(temp.name)
|
|
137
136
|
finally:
|
|
138
137
|
os.unlink(temp.name)
|
|
139
138
|
|
|
@@ -230,7 +229,7 @@ class PastastoreYAML:
|
|
|
230
229
|
else:
|
|
231
230
|
kind = "prec"
|
|
232
231
|
pnam = self.pstore.get_nearest_stresses(onam, kind=kind).iloc[0, 0]
|
|
233
|
-
logger.info(
|
|
232
|
+
logger.info(" | using nearest stress with kind='%s': '%s'", kind, pnam)
|
|
234
233
|
p, pmeta = self.pstore.get_stresses(pnam, return_metadata=True)
|
|
235
234
|
prec = {
|
|
236
235
|
"name": pnam,
|
|
@@ -266,7 +265,7 @@ class PastastoreYAML:
|
|
|
266
265
|
else:
|
|
267
266
|
kind = "evap"
|
|
268
267
|
enam = self.pstore.get_nearest_stresses(onam, kind=kind).iloc[0, 0]
|
|
269
|
-
logger.info(
|
|
268
|
+
logger.info(" | using nearest stress with kind='%s': '%s'", kind, enam)
|
|
270
269
|
e, emeta = self.pstore.get_stresses(enam, return_metadata=True)
|
|
271
270
|
evap = {
|
|
272
271
|
"name": enam,
|
|
@@ -291,11 +290,11 @@ class PastastoreYAML:
|
|
|
291
290
|
if "rfunc" not in d:
|
|
292
291
|
logger.info(" | no 'rfunc' provided, using 'Exponential'")
|
|
293
292
|
# for pastas >= 0.23.0, convert rfunc value to dictionary with 'class' key
|
|
294
|
-
elif not isinstance(d["rfunc"], dict)
|
|
293
|
+
elif not isinstance(d["rfunc"], dict):
|
|
295
294
|
d["rfunc"] = {"class": d["rfunc"]}
|
|
296
295
|
|
|
297
296
|
# stressmodel
|
|
298
|
-
classkey = "
|
|
297
|
+
classkey = "class"
|
|
299
298
|
if classkey not in d:
|
|
300
299
|
d[classkey] = "RechargeModel"
|
|
301
300
|
|
|
@@ -303,7 +302,7 @@ class PastastoreYAML:
|
|
|
303
302
|
if ("recharge" not in d) and (d[classkey] == "RechargeModel"):
|
|
304
303
|
logger.info(" | no 'recharge' type provided, using 'Linear'")
|
|
305
304
|
# if pastas >= 0.23.0, recharge value must be dict with class key
|
|
306
|
-
elif not isinstance(d["recharge"], dict)
|
|
305
|
+
elif not isinstance(d["recharge"], dict):
|
|
307
306
|
d["recharge"] = {"class": d["recharge"]}
|
|
308
307
|
|
|
309
308
|
# tarsomodel logic
|
|
@@ -314,7 +313,8 @@ class PastastoreYAML:
|
|
|
314
313
|
if ((dmin is None) or (dmax is None)) and (oseries is None):
|
|
315
314
|
logger.info(
|
|
316
315
|
" | no 'dmin/dmax' or 'oseries' provided,"
|
|
317
|
-
|
|
316
|
+
" filling in 'oseries': '%s'",
|
|
317
|
+
onam,
|
|
318
318
|
)
|
|
319
319
|
d["oseries"] = onam
|
|
320
320
|
|
|
@@ -365,7 +365,7 @@ class PastastoreYAML:
|
|
|
365
365
|
snam = self.pstore.get_nearest_oseries(onam).iloc[0, 0]
|
|
366
366
|
else:
|
|
367
367
|
snam = self.pstore.get_nearest_stresses(onam, kind=kind).iloc[0, 0]
|
|
368
|
-
logger.info(
|
|
368
|
+
logger.info(" | using nearest stress with kind='%s': %s", kind, snam)
|
|
369
369
|
|
|
370
370
|
s, smeta = self.pstore.get_stresses(snam, return_metadata=True)
|
|
371
371
|
s = {
|
|
@@ -374,7 +374,7 @@ class PastastoreYAML:
|
|
|
374
374
|
"metadata": smeta,
|
|
375
375
|
"series": s.squeeze(),
|
|
376
376
|
}
|
|
377
|
-
d["stress"] =
|
|
377
|
+
d["stress"] = s
|
|
378
378
|
|
|
379
379
|
# use stress name if not provided
|
|
380
380
|
if "name" not in d:
|
|
@@ -383,9 +383,9 @@ class PastastoreYAML:
|
|
|
383
383
|
# rfunc
|
|
384
384
|
if "rfunc" not in d:
|
|
385
385
|
logger.info(" | no 'rfunc' provided, using 'Gamma'")
|
|
386
|
-
d["rfunc"] =
|
|
386
|
+
d["rfunc"] = {"class": "Gamma"}
|
|
387
387
|
# for pastas >= 0.23.0, convert rfunc value to dictionary with 'class' key
|
|
388
|
-
elif not isinstance(d["rfunc"], dict)
|
|
388
|
+
elif not isinstance(d["rfunc"], dict):
|
|
389
389
|
d["rfunc"] = {"class": d["rfunc"]}
|
|
390
390
|
|
|
391
391
|
return d
|
|
@@ -441,7 +441,10 @@ class PastastoreYAML:
|
|
|
441
441
|
.values
|
|
442
442
|
)
|
|
443
443
|
logger.info(
|
|
444
|
-
|
|
444
|
+
" | using %d nearest stress(es) with kind='%s': %s",
|
|
445
|
+
n,
|
|
446
|
+
kind,
|
|
447
|
+
snames,
|
|
445
448
|
)
|
|
446
449
|
else:
|
|
447
450
|
snames = [snames]
|
|
@@ -472,11 +475,9 @@ class PastastoreYAML:
|
|
|
472
475
|
# rfunc
|
|
473
476
|
if "rfunc" not in d:
|
|
474
477
|
logger.info(" | no 'rfunc' provided, using 'HantushWellModel'")
|
|
475
|
-
d["rfunc"] =
|
|
476
|
-
"HantushWellModel" if PASTAS_LEQ_022 else {"class": "HantushWellModel"}
|
|
477
|
-
)
|
|
478
|
+
d["rfunc"] = {"class": "HantushWellModel"}
|
|
478
479
|
# for pastas >= 0.23.0, convert rfunc value to dictionary with 'class' key
|
|
479
|
-
elif not isinstance(d["rfunc"], dict)
|
|
480
|
+
elif not isinstance(d["rfunc"], dict):
|
|
480
481
|
d["rfunc"] = {"class": d["rfunc"]}
|
|
481
482
|
|
|
482
483
|
if "up" not in d:
|
|
@@ -510,7 +511,7 @@ class PastastoreYAML:
|
|
|
510
511
|
else:
|
|
511
512
|
onam = str(mlyml.pop("oseries"))
|
|
512
513
|
|
|
513
|
-
logger.info(
|
|
514
|
+
logger.info("Building model '%s' for oseries '%s'", mlnam, onam)
|
|
514
515
|
o, ometa = self.pstore.get_oseries(onam, return_metadata=True)
|
|
515
516
|
|
|
516
517
|
# create model to obtain default model settings
|
|
@@ -528,14 +529,11 @@ class PastastoreYAML:
|
|
|
528
529
|
name = smyml.get("name", smnam)
|
|
529
530
|
else:
|
|
530
531
|
name = smnam
|
|
531
|
-
logger.info(
|
|
532
|
+
logger.info("| parsing stressmodel: '%s'", name)
|
|
532
533
|
|
|
533
534
|
# check whether smtyp is defined
|
|
534
|
-
classkey = "
|
|
535
|
+
classkey = "class"
|
|
535
536
|
if smyml is not None:
|
|
536
|
-
if PASTAS_LEQ_022:
|
|
537
|
-
if "class" in smyml:
|
|
538
|
-
smyml["stressmodel"] = smyml.pop("class")
|
|
539
537
|
if classkey in smyml:
|
|
540
538
|
smtyp = True
|
|
541
539
|
else:
|
|
@@ -637,10 +635,10 @@ class PastastoreYAML:
|
|
|
637
635
|
"""
|
|
638
636
|
if "\n" in fyaml or "\r" in fyaml:
|
|
639
637
|
with temporary_yaml_from_str(fyaml) as fyaml:
|
|
640
|
-
with open(
|
|
638
|
+
with fyaml.open("r", encoding="utf-8") as f:
|
|
641
639
|
yml = yaml.load(f, Loader=yaml.CFullLoader)
|
|
642
|
-
elif
|
|
643
|
-
with open(
|
|
640
|
+
elif Path(fyaml).exists():
|
|
641
|
+
with Path(fyaml).open("r", encoding="utf-8") as f:
|
|
644
642
|
yml = yaml.load(f, Loader=yaml.CFullLoader)
|
|
645
643
|
else:
|
|
646
644
|
raise ValueError(
|
|
@@ -655,8 +653,8 @@ class PastastoreYAML:
|
|
|
655
653
|
|
|
656
654
|
mldict = self.construct_mldict(mlyml, mlnam)
|
|
657
655
|
|
|
658
|
-
#
|
|
659
|
-
ml = ps.io.base._load_model(mldict)
|
|
656
|
+
# Use pastas' internal _load_model - required for model reconstruction
|
|
657
|
+
ml = ps.io.base._load_model(mldict) # noqa: SLF001
|
|
660
658
|
models.append(ml)
|
|
661
659
|
|
|
662
660
|
return models
|
|
@@ -664,7 +662,7 @@ class PastastoreYAML:
|
|
|
664
662
|
def export_stored_models_per_oseries(
|
|
665
663
|
self,
|
|
666
664
|
oseries: Optional[Union[List[str], str]] = None,
|
|
667
|
-
outdir: Optional[str] = ".",
|
|
665
|
+
outdir: Optional[Path | str] = ".",
|
|
668
666
|
minimal_yaml: Optional[bool] = False,
|
|
669
667
|
use_nearest: Optional[bool] = False,
|
|
670
668
|
):
|
|
@@ -690,7 +688,7 @@ class PastastoreYAML:
|
|
|
690
688
|
the time series are actually the nearest ones! Only used
|
|
691
689
|
when minimal_yaml=True. Default is False.
|
|
692
690
|
"""
|
|
693
|
-
onames = self.pstore.conn.
|
|
691
|
+
onames = self.pstore.conn.parse_names(oseries, "oseries")
|
|
694
692
|
|
|
695
693
|
for onam in onames:
|
|
696
694
|
try:
|
|
@@ -716,7 +714,7 @@ class PastastoreYAML:
|
|
|
716
714
|
name = d.pop("name")
|
|
717
715
|
model_dicts[name] = d
|
|
718
716
|
|
|
719
|
-
with
|
|
717
|
+
with (Path(outdir) / f"{onam}.yaml").open("w", encoding="utf-8") as f:
|
|
720
718
|
yaml.dump(model_dicts, f, Dumper=yaml.CDumper)
|
|
721
719
|
|
|
722
720
|
def export_models(
|
|
@@ -758,7 +756,7 @@ class PastastoreYAML:
|
|
|
758
756
|
filename for YAML file, only used if `split=False`
|
|
759
757
|
"""
|
|
760
758
|
if models is None:
|
|
761
|
-
modelnames = self.pstore.conn.
|
|
759
|
+
modelnames = self.pstore.conn.parse_names(modelnames, "models")
|
|
762
760
|
model_list = self.pstore.get_models(
|
|
763
761
|
modelnames, return_dict=True, squeeze=False
|
|
764
762
|
)
|
|
@@ -783,13 +781,13 @@ class PastastoreYAML:
|
|
|
783
781
|
name = d.pop("name")
|
|
784
782
|
model_dicts[name] = d
|
|
785
783
|
|
|
786
|
-
with
|
|
784
|
+
with (Path(outdir) / filename).open("w", encoding="utf-8") as f:
|
|
787
785
|
yaml.dump(model_dicts, f, Dumper=yaml.CDumper)
|
|
788
786
|
|
|
789
787
|
@staticmethod
|
|
790
788
|
def export_model(
|
|
791
789
|
ml: Union[ps.Model, dict],
|
|
792
|
-
outdir: Optional[str] = ".",
|
|
790
|
+
outdir: Optional[Path | str] = ".",
|
|
793
791
|
minimal_yaml: Optional[bool] = False,
|
|
794
792
|
use_nearest: Optional[bool] = False,
|
|
795
793
|
):
|
|
@@ -816,7 +814,7 @@ class PastastoreYAML:
|
|
|
816
814
|
name = ml["name"]
|
|
817
815
|
else:
|
|
818
816
|
name = ml.name
|
|
819
|
-
with
|
|
817
|
+
with (Path(outdir) / f"{name}.yaml").open("w", encoding="utf-8") as f:
|
|
820
818
|
if isinstance(ml, ps.Model):
|
|
821
819
|
mldict = deepcopy(ml.to_dict(series=False))
|
|
822
820
|
elif isinstance(ml, dict):
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: pastastore
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.11.0
|
|
4
4
|
Summary: Tools for managing Pastas time series models.
|
|
5
5
|
Author: D.A. Brakenhoff
|
|
6
6
|
Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
|
|
@@ -52,6 +52,7 @@ License-File: LICENSE
|
|
|
52
52
|
Requires-Dist: pastas>=0.13
|
|
53
53
|
Requires-Dist: tqdm>=4.36
|
|
54
54
|
Requires-Dist: pyyaml
|
|
55
|
+
Requires-Dist: colorama
|
|
55
56
|
Provides-Extra: full
|
|
56
57
|
Requires-Dist: pastastore[arcticdb,optional]; extra == "full"
|
|
57
58
|
Requires-Dist: hydropandas; extra == "full"
|
|
@@ -63,7 +64,6 @@ Requires-Dist: pyproj; extra == "optional"
|
|
|
63
64
|
Requires-Dist: adjustText; extra == "optional"
|
|
64
65
|
Provides-Extra: arcticdb
|
|
65
66
|
Requires-Dist: arcticdb; extra == "arcticdb"
|
|
66
|
-
Requires-Dist: protobuf<6,>=3.5.0.post1; extra == "arcticdb"
|
|
67
67
|
Provides-Extra: lint
|
|
68
68
|
Requires-Dist: ruff; extra == "lint"
|
|
69
69
|
Provides-Extra: pytest
|
|
@@ -74,9 +74,6 @@ Requires-Dist: pytest-cov; extra == "pytest"
|
|
|
74
74
|
Requires-Dist: pytest-dependency; extra == "pytest"
|
|
75
75
|
Requires-Dist: pytest-benchmark; extra == "pytest"
|
|
76
76
|
Requires-Dist: codacy-coverage; extra == "pytest"
|
|
77
|
-
Provides-Extra: test
|
|
78
|
-
Requires-Dist: pastastore[arcticdb,lint,optional,pytest]; extra == "test"
|
|
79
|
-
Requires-Dist: hydropandas[full]; extra == "test"
|
|
80
77
|
Provides-Extra: docs
|
|
81
78
|
Requires-Dist: pastastore[optional]; extra == "docs"
|
|
82
79
|
Requires-Dist: sphinx_rtd_theme; extra == "docs"
|
|
@@ -84,6 +81,11 @@ Requires-Dist: Ipython; extra == "docs"
|
|
|
84
81
|
Requires-Dist: ipykernel; extra == "docs"
|
|
85
82
|
Requires-Dist: nbsphinx; extra == "docs"
|
|
86
83
|
Requires-Dist: nbsphinx_link; extra == "docs"
|
|
84
|
+
Provides-Extra: test
|
|
85
|
+
Requires-Dist: pastastore[arcticdb,optional,pytest]; extra == "test"
|
|
86
|
+
Requires-Dist: hydropandas[full]; extra == "test"
|
|
87
|
+
Provides-Extra: dev
|
|
88
|
+
Requires-Dist: pastastore[docs,lint,test]; extra == "dev"
|
|
87
89
|
Dynamic: license-file
|
|
88
90
|
|
|
89
91
|

|
|
@@ -106,8 +108,12 @@ left off without having to reload everything.
|
|
|
106
108
|
|
|
107
109
|
Install the module with `pip install pastastore`.
|
|
108
110
|
|
|
109
|
-
For
|
|
110
|
-
|
|
111
|
+
For development, clone the repository and install all development, testing, and
|
|
112
|
+
documentation dependencies with:
|
|
113
|
+
|
|
114
|
+
```sh
|
|
115
|
+
pip install -e .[dev]
|
|
116
|
+
```
|
|
111
117
|
|
|
112
118
|
For plotting background maps, the `contextily` and `pyproj` packages are
|
|
113
119
|
required. For a full install, including optional dependencies for plotting and
|
|
@@ -180,4 +186,4 @@ pstore.to_zip("my_backup.zip")
|
|
|
180
186
|
```
|
|
181
187
|
|
|
182
188
|
For more elaborate examples, refer to the
|
|
183
|
-
[Notebooks](https://pastastore.readthedocs.io/latest/examples.html#example-notebooks).
|
|
189
|
+
[Notebooks](https://pastastore.readthedocs.io/en/latest/examples.html#example-notebooks).
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
docs/conf.py,sha256=XcZUTmn9fGDhhu8k3mpaLu435SpIRNpABADCCTJJuag,6291
|
|
2
|
+
pastastore/__init__.py,sha256=It_5KcVu7tMdo0QL9DXUNWtMKUDSLNfjzcuomkRdnUE,547
|
|
3
|
+
pastastore/base.py,sha256=1dh2Zg9p1_qvFTvXm0NQWvCjtbn4GxR_3VI8pioC0ro,66447
|
|
4
|
+
pastastore/connectors.py,sha256=6tOVsNKSoVK1PPa87IXzL4oKndKXRXKKjvo0iSx_Cqc,29866
|
|
5
|
+
pastastore/datasets.py,sha256=qLucdtbn4v4DmQkj9QeYytiRlGzcn28bq3zeV4KOUi0,6198
|
|
6
|
+
pastastore/plotting.py,sha256=Zvesn7yY_z4LXdTLNT5TDui1YJ1wcm_yTvgqw6h2iQg,55697
|
|
7
|
+
pastastore/store.py,sha256=yYCDSnyYJRdQgho5_9smaCXAl9YVPBls7nn1KVszkrA,69484
|
|
8
|
+
pastastore/styling.py,sha256=0IEp_r-SpcaslShAZvZV6iuEhTG_YzNq-ad8krib3U0,2304
|
|
9
|
+
pastastore/typing.py,sha256=wsn50wH_LV3J6WXNpAUcRnSJppmGqYsX6kGCg49uGzY,376
|
|
10
|
+
pastastore/util.py,sha256=XA_a7DGwqh7dkC_axdUkZvIv6GOv9bLuD8JuU-iEklI,35552
|
|
11
|
+
pastastore/validator.py,sha256=DTlvDhMvzVpt3029l-6oRHdhGGCAx_HuseMb42ZLxjo,18336
|
|
12
|
+
pastastore/version.py,sha256=EoknNjhECKV7q5OOsr1xudTsLXdOOYUUN92J1FZ6gZ0,1147
|
|
13
|
+
pastastore/yaml_interface.py,sha256=x05edO_N-F9ia-nn5LlKzGdGATFHaGs9rwQAGvuDrnM,30577
|
|
14
|
+
pastastore/extensions/__init__.py,sha256=pHZQha6yhq3fwsoDWvXW-lYEbUUmlfCcHMrYoK_1Hxs,505
|
|
15
|
+
pastastore/extensions/accessor.py,sha256=kftQM6dqMDoySbyTKcvmkjC5gJRp465KA18G4NVXUO0,367
|
|
16
|
+
pastastore/extensions/hpd.py,sha256=bTX3UebPKAVZikIgDfHh6xrK7sHVAT2EbiImh1e7_OE,29728
|
|
17
|
+
pastastore-1.11.0.dist-info/licenses/LICENSE,sha256=MB_6p4kXDCUsYNjslcMByBu6i7wMNRKPC36JnhzpN4o,1087
|
|
18
|
+
tests/conftest.py,sha256=paJy1re7l_HQEnnqQLbc3cL4Q_3CKTtJSY3DPeHEvkA,4916
|
|
19
|
+
tests/test_001_import.py,sha256=o31MWpMNxngnytJeEnPz_0UTcT-XbG1JqggvrWLvSUY,238
|
|
20
|
+
tests/test_002_connectors.py,sha256=o5uXrylGUiHvOAaQ-IqhM8LJ4WC1ktHBecmZ0TUreqg,9442
|
|
21
|
+
tests/test_003_pastastore.py,sha256=HpwIoVu4FQ1oAkPxm7hhVCqqDtJ4bsd3IyYHa896mnI,11337
|
|
22
|
+
tests/test_004_yaml.py,sha256=3hMNjb9s0S2rbmpyEjW6FDRAxfUZS_U1qoPl4wB-cCo,4440
|
|
23
|
+
tests/test_005_maps_plots.py,sha256=HVfkTQhtpONl8N8Uv39nJMrkLYOmMYy9T2AjpzYmUp4,2152
|
|
24
|
+
tests/test_006_benchmark.py,sha256=hEk2kpYKpghI7RDQaSuWkSzkF9lNphm8VgBxouVtWAU,4940
|
|
25
|
+
tests/test_007_hpdextension.py,sha256=QKXct8sBKIypAYmI7URypYPaAtCJgM464u7D3yJMNow,4304
|
|
26
|
+
tests/test_008_stressmodels.py,sha256=733fyCvuzjKcaLjvSMt5dTTLp-T4alzNJAToSxTIUug,4003
|
|
27
|
+
pastastore-1.11.0.dist-info/METADATA,sha256=zjJNGAYXBLWnUIm0pm-8jEH4BP9z0lZPf5hvKkmhbic,7717
|
|
28
|
+
pastastore-1.11.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
29
|
+
pastastore-1.11.0.dist-info/top_level.txt,sha256=1bgyMk1p23f04RK83Jju2_YAQBwyoQD_fInxoPB4YRw,22
|
|
30
|
+
pastastore-1.11.0.dist-info/RECORD,,
|
tests/conftest.py
CHANGED
|
@@ -44,14 +44,10 @@ def initialize_project(conn):
|
|
|
44
44
|
|
|
45
45
|
# well 1
|
|
46
46
|
s = pd.read_csv("./tests/data/well_month_end.csv", index_col=0, parse_dates=True)
|
|
47
|
-
|
|
48
|
-
s
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
).bfill()
|
|
52
|
-
except AttributeError:
|
|
53
|
-
# pastas<=0.22.0
|
|
54
|
-
pass
|
|
47
|
+
s = ps.ts.timestep_weighted_resample(
|
|
48
|
+
s,
|
|
49
|
+
pd.date_range(s.index[0] - pd.offsets.MonthBegin(), s.index[-1], freq="D"),
|
|
50
|
+
).bfill()
|
|
55
51
|
pstore.add_stress(s, "well1", kind="well", metadata={"x": 164691, "y": 423579})
|
|
56
52
|
# add second well
|
|
57
53
|
pstore.add_stress(
|
|
@@ -104,7 +100,6 @@ def delete_arcticdb_test_db():
|
|
|
104
100
|
name = "test_project"
|
|
105
101
|
connector = pst.ArcticDBConnector(name, connstr)
|
|
106
102
|
pst.util.delete_arcticdb_connector(connector)
|
|
107
|
-
print("ArcticDBConnector 'test_project' deleted.")
|
|
108
103
|
|
|
109
104
|
|
|
110
105
|
_has_pkg_cache = {}
|
tests/test_001_import.py
CHANGED
tests/test_002_connectors.py
CHANGED
|
@@ -73,7 +73,7 @@ def test_add_get_dataframe(request, conn):
|
|
|
73
73
|
o1.index.name = "test_idx"
|
|
74
74
|
conn.add_oseries(o1, "test_df", metadata=None)
|
|
75
75
|
o2 = conn.get_oseries("test_df")
|
|
76
|
-
#
|
|
76
|
+
# PasConnector does not preserve DataFrames after load, so convert if needed.
|
|
77
77
|
if conn.conn_type == "pas":
|
|
78
78
|
o2 = o2.to_frame()
|
|
79
79
|
try:
|
|
@@ -93,10 +93,29 @@ def test_add_pastas_timeseries(request, conn):
|
|
|
93
93
|
ts = ps.timeseries.TimeSeries(o1, metadata={"x": 100000.0, "y": 400000.0})
|
|
94
94
|
try:
|
|
95
95
|
conn.add_oseries(ts, "test_pastas_ts", metadata=None)
|
|
96
|
-
except
|
|
96
|
+
except TypeError:
|
|
97
97
|
pass
|
|
98
98
|
|
|
99
99
|
|
|
100
|
+
def test_add_series_illegal_filename(request, conn):
|
|
101
|
+
o1 = pd.Series(
|
|
102
|
+
index=pd.date_range("2000", periods=10, freq="D"),
|
|
103
|
+
data=0.0,
|
|
104
|
+
)
|
|
105
|
+
o1.name = r"test\series/illegal_chars"
|
|
106
|
+
conn.add_oseries(o1, o1.name, metadata=None)
|
|
107
|
+
o2 = conn.get_oseries("testseriesillegal_chars")
|
|
108
|
+
try:
|
|
109
|
+
assert isinstance(o2, pd.Series)
|
|
110
|
+
assert o1.equals(o2)
|
|
111
|
+
finally:
|
|
112
|
+
conn.del_oseries("testseriesillegal_chars")
|
|
113
|
+
|
|
114
|
+
if conn.conn_type == "pas":
|
|
115
|
+
with pytest.raises(ValueError, match="cannot end with '_meta'"):
|
|
116
|
+
conn.add_oseries(o1, "illegal_meta", metadata=None)
|
|
117
|
+
|
|
118
|
+
|
|
100
119
|
def test_update_series(request, conn):
|
|
101
120
|
o1 = pd.DataFrame(
|
|
102
121
|
data=1.0,
|
|
@@ -281,10 +300,28 @@ def test_empty_library(request, conn):
|
|
|
281
300
|
|
|
282
301
|
@pytest.mark.dependency
|
|
283
302
|
def test_delete(request, conn):
|
|
284
|
-
#
|
|
303
|
+
# No need to delete dictconnector (in memory)
|
|
285
304
|
if conn.conn_type == "arcticdb":
|
|
286
305
|
pst.util.delete_arcticdb_connector(conn, libraries=["oseries"])
|
|
287
306
|
pst.util.delete_arcticdb_connector(conn)
|
|
288
307
|
elif conn.conn_type == "pas":
|
|
289
308
|
pst.util.delete_pas_connector(conn, libraries=["oseries"])
|
|
290
309
|
pst.util.delete_pas_connector(conn)
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
def test_new_connector_in_occupied_dir():
|
|
313
|
+
conn1 = pst.PasConnector("my_db", "./tests/data/pas")
|
|
314
|
+
with pytest.raises(
|
|
315
|
+
ValueError, match=f"Directory '{conn1.name}/' in use by another connector type!"
|
|
316
|
+
):
|
|
317
|
+
pst.ArcticDBConnector("my_db", "lmdb://./tests/data/pas")
|
|
318
|
+
|
|
319
|
+
pst.util.delete_pas_connector(conn1)
|
|
320
|
+
|
|
321
|
+
conn1 = pst.ArcticDBConnector("my_db", "lmdb://./tests/data/arcticdb")
|
|
322
|
+
with pytest.raises(
|
|
323
|
+
ValueError, match=f"Directory '{conn1.name}/' in use by another connector type!"
|
|
324
|
+
):
|
|
325
|
+
pst.PasConnector("my_db", "./tests/data/arcticdb")
|
|
326
|
+
|
|
327
|
+
pst.util.delete_arcticdb_connector(conn1)
|
tests/test_003_pastastore.py
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
# ruff: noqa: D100 D103
|
|
2
|
-
import
|
|
2
|
+
from pathlib import Path
|
|
3
3
|
|
|
4
4
|
import numpy as np
|
|
5
5
|
import pandas as pd
|
|
@@ -10,6 +10,7 @@ from packaging.version import parse
|
|
|
10
10
|
from pytest_dependency import depends
|
|
11
11
|
|
|
12
12
|
import pastastore as pst
|
|
13
|
+
from pastastore.util import SeriesUsedByModel
|
|
13
14
|
|
|
14
15
|
|
|
15
16
|
@pytest.mark.dependency
|
|
@@ -30,7 +31,7 @@ def test_get_tmintmax(pstore):
|
|
|
30
31
|
assert sttt.at["evap2", "tmax"] == pd.Timestamp("2016-11-22")
|
|
31
32
|
ml = pstore.create_model("oseries1")
|
|
32
33
|
ml.solve(report=False)
|
|
33
|
-
pstore.
|
|
34
|
+
pstore.add_model(ml)
|
|
34
35
|
mltt = pstore.get_tmin_tmax("models")
|
|
35
36
|
assert mltt.at["oseries1", "tmax"] == pd.Timestamp("2015-06-28")
|
|
36
37
|
pstore.del_model("oseries1")
|
|
@@ -58,6 +59,8 @@ def test_properties(pstore):
|
|
|
58
59
|
_ = pstore.oseries
|
|
59
60
|
_ = pstore.stresses
|
|
60
61
|
_ = pstore.models
|
|
62
|
+
_ = pstore.oseries_models
|
|
63
|
+
_ = pstore.stresses_models
|
|
61
64
|
|
|
62
65
|
try:
|
|
63
66
|
assert pstore.n_oseries == pstore.conn.n_oseries
|
|
@@ -71,19 +74,43 @@ def test_properties(pstore):
|
|
|
71
74
|
def test_store_model(request, pstore):
|
|
72
75
|
depends(request, [f"test_create_model[{pstore.type}]"])
|
|
73
76
|
ml = pstore.create_model("oseries1")
|
|
74
|
-
pstore.
|
|
77
|
+
pstore.add_model(ml)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
@pytest.mark.dependency
|
|
81
|
+
def test_del_oseries_used_by_model(request, pstore):
|
|
82
|
+
depends(request, [f"test_store_model[{pstore.type}]"])
|
|
83
|
+
oseries, ometa = pstore.get_oseries("oseries1", return_metadata=True)
|
|
84
|
+
with pytest.raises(SeriesUsedByModel):
|
|
85
|
+
pstore.del_oseries("oseries1")
|
|
86
|
+
pstore.del_oseries("oseries1", force=True)
|
|
87
|
+
pstore.add_oseries(oseries, "oseries1", metadata=ometa)
|
|
88
|
+
pstore.validator.set_protect_series_in_models(False)
|
|
89
|
+
pstore.del_oseries("oseries1")
|
|
90
|
+
pstore.add_oseries(oseries, "oseries1", metadata=ometa)
|
|
91
|
+
pstore.validator.set_protect_series_in_models(True)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
@pytest.mark.dependency
|
|
95
|
+
def test_del_stress_used_by_model(request, pstore):
|
|
96
|
+
depends(request, [f"test_store_model[{pstore.type}]"])
|
|
97
|
+
stress, smeta = pstore.get_stress("prec1", return_metadata=True)
|
|
98
|
+
with pytest.raises(SeriesUsedByModel):
|
|
99
|
+
pstore.del_stress("prec1")
|
|
100
|
+
pstore.del_stress("prec1", force=True)
|
|
101
|
+
pstore.add_stress(stress, "prec1", kind="prec", metadata=smeta)
|
|
102
|
+
pstore.validator.set_protect_series_in_models(False)
|
|
103
|
+
pstore.del_stress("prec1")
|
|
104
|
+
pstore.add_stress(stress, "prec1", kind="prec", metadata=smeta)
|
|
105
|
+
pstore.validator.set_protect_series_in_models(True)
|
|
75
106
|
|
|
76
107
|
|
|
77
108
|
@pytest.mark.dependency
|
|
78
109
|
def test_model_accessor(request, pstore):
|
|
79
110
|
depends(request, [f"test_store_model[{pstore.type}]"])
|
|
80
|
-
# repr
|
|
81
111
|
pstore.models.__repr__()
|
|
82
|
-
# getter
|
|
83
112
|
ml = pstore.models["oseries1"]
|
|
84
|
-
# setter
|
|
85
113
|
pstore.models["oseries1_2"] = ml
|
|
86
|
-
# iter
|
|
87
114
|
mnames = [ml.name for ml in pstore.models]
|
|
88
115
|
try:
|
|
89
116
|
assert len(mnames) == 2
|
|
@@ -96,19 +123,15 @@ def test_model_accessor(request, pstore):
|
|
|
96
123
|
@pytest.mark.dependency
|
|
97
124
|
def test_oseries_model_accessor(request, pstore):
|
|
98
125
|
depends(request, [f"test_store_model[{pstore.type}]"])
|
|
99
|
-
# repr
|
|
100
126
|
pstore.oseries_models.__repr__()
|
|
101
|
-
# get model names
|
|
102
127
|
ml = pstore.models["oseries1"]
|
|
103
128
|
ml_list1 = pstore.oseries_models["oseries1"]
|
|
104
129
|
assert len(ml_list1) == 1
|
|
105
130
|
|
|
106
|
-
# add model
|
|
107
131
|
pstore.models["oseries1_2"] = ml
|
|
108
132
|
ml_list2 = pstore.oseries_models["oseries1"]
|
|
109
133
|
assert len(ml_list2) == 2
|
|
110
134
|
|
|
111
|
-
# delete model
|
|
112
135
|
pstore.del_models("oseries1_2")
|
|
113
136
|
ml_list3 = pstore.oseries_models["oseries1"]
|
|
114
137
|
assert len(ml_list3) == 1
|
|
@@ -145,7 +168,7 @@ def test_get_model(request, pstore):
|
|
|
145
168
|
f"test_store_model_missing_series[{pstore.type}]",
|
|
146
169
|
],
|
|
147
170
|
)
|
|
148
|
-
_ = pstore.
|
|
171
|
+
_ = pstore.get_models("oseries1")
|
|
149
172
|
|
|
150
173
|
|
|
151
174
|
@pytest.mark.dependency
|
|
@@ -159,7 +182,7 @@ def test_del_model(request, pstore):
|
|
|
159
182
|
f"test_get_model[{pstore.type}]",
|
|
160
183
|
],
|
|
161
184
|
)
|
|
162
|
-
pstore.
|
|
185
|
+
pstore.del_models("oseries1")
|
|
163
186
|
|
|
164
187
|
|
|
165
188
|
@pytest.mark.dependency
|
|
@@ -167,7 +190,7 @@ def test_create_models(pstore):
|
|
|
167
190
|
_ = pstore.create_models_bulk(
|
|
168
191
|
["oseries1", "oseries2"], store=True, progressbar=False
|
|
169
192
|
)
|
|
170
|
-
_ = pstore.
|
|
193
|
+
_ = pstore.models
|
|
171
194
|
assert pstore.n_models == 2
|
|
172
195
|
|
|
173
196
|
|
|
@@ -183,7 +206,7 @@ def test_get_parameters(request, pstore):
|
|
|
183
206
|
def test_get_signatures(request, pstore):
|
|
184
207
|
depends(request, [f"test_create_models[{pstore.type}]"])
|
|
185
208
|
s = pstore.get_signatures(progressbar=False)
|
|
186
|
-
assert s.shape[
|
|
209
|
+
assert s.shape[0] == len(ps.stats.signatures.__all__)
|
|
187
210
|
|
|
188
211
|
|
|
189
212
|
@pytest.mark.dependency
|
|
@@ -202,6 +225,13 @@ def test_solve_models_and_get_stats(request, pstore):
|
|
|
202
225
|
assert stats.index.size == 2
|
|
203
226
|
|
|
204
227
|
|
|
228
|
+
@pytest.mark.dependency
|
|
229
|
+
def test_check_models(request, pstore):
|
|
230
|
+
depends(request, [f"test_solve_models_and_get_stats[{pstore.type}]"])
|
|
231
|
+
if parse(ps.__version__) >= parse("1.8.0"):
|
|
232
|
+
_ = pstore.check_models(style_output=True)
|
|
233
|
+
|
|
234
|
+
|
|
205
235
|
@pytest.mark.dependency
|
|
206
236
|
def test_solve_models_parallel(request, pstore):
|
|
207
237
|
depends(request, [f"test_create_models[{pstore.type}]"])
|
|
@@ -232,7 +262,7 @@ def test_save_and_load_model(request, pstore):
|
|
|
232
262
|
|
|
233
263
|
|
|
234
264
|
def test_update_ts_settings(request, pstore):
|
|
235
|
-
pstore.set_check_model_series_values(False)
|
|
265
|
+
pstore.validator.set_check_model_series_values(False)
|
|
236
266
|
|
|
237
267
|
o = pstore.get_oseries("oseries2")
|
|
238
268
|
ml = ps.Model(o.loc[:"2013"], name="ml_oseries2")
|
|
@@ -258,14 +288,7 @@ def test_update_ts_settings(request, pstore):
|
|
|
258
288
|
assert ml2.stressmodels["recharge"].evap.settings["tmax"] == tmax
|
|
259
289
|
assert ml2.stressmodels["prec"].stress[0].settings["tmax"] == p2.index[-1]
|
|
260
290
|
pstore.del_models("ml_oseries2")
|
|
261
|
-
pstore.set_check_model_series_values(True)
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
# @pytest.mark.dependency()
|
|
265
|
-
# def test_model_results(request, pstore):
|
|
266
|
-
# depends(request, [f"test_create_models[{pstore.type}]",
|
|
267
|
-
# f"test_solve_models[{pstore.type}]"])
|
|
268
|
-
# pstore.model_results(["oseries1", "oseries2"], progressbar=False)
|
|
291
|
+
pstore.validator.set_check_model_series_values(True)
|
|
269
292
|
|
|
270
293
|
|
|
271
294
|
def test_oseries_distances(pstore):
|
|
@@ -291,15 +314,17 @@ def test_to_from_zip(pstore):
|
|
|
291
314
|
store = pst.PastaStore.from_zip(zipname, conn)
|
|
292
315
|
assert not store.oseries.empty
|
|
293
316
|
finally:
|
|
294
|
-
|
|
317
|
+
Path(zipname).unlink()
|
|
295
318
|
|
|
296
319
|
|
|
297
320
|
def test_load_pastastore_from_config_file(pstore):
|
|
298
321
|
if pstore.type == "pas" or pstore.type == "arcticdb":
|
|
299
322
|
path = (
|
|
300
|
-
pstore.conn.path
|
|
323
|
+
pstore.conn.path
|
|
324
|
+
if pstore.type == "pas"
|
|
325
|
+
else Path(pstore.conn.uri.split("://")[-1]) / pstore.conn.name
|
|
301
326
|
)
|
|
302
|
-
fname =
|
|
327
|
+
fname = path / f"{pstore.conn.name}.pastastore"
|
|
303
328
|
pstore2 = pst.PastaStore.from_pastastore_config_file(fname)
|
|
304
329
|
assert not pstore2.empty
|
|
305
330
|
|
|
@@ -331,8 +356,12 @@ def test_meta_with_name(pstore):
|
|
|
331
356
|
|
|
332
357
|
@pytest.mark.dependency
|
|
333
358
|
def test_models_metadata(request, pstore):
|
|
334
|
-
# depends(request, [f"test_create_models[{pstore.type}]"])
|
|
335
359
|
pstore.create_models_bulk(["oseries1", "oseries2"], store=True, progressbar=False)
|
|
336
360
|
df = pstore.models.metadata
|
|
337
361
|
assert df.index.size == 2
|
|
338
362
|
assert (df["n_stressmodels"] == 1).all()
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
def test_pstore_validator_settings(pstore):
|
|
366
|
+
_ = pstore.validator.settings
|
|
367
|
+
_ = pstore.conn.validation_settings
|