pastastore 1.6.1__tar.gz → 1.7.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pastastore-1.6.1 → pastastore-1.7.0}/PKG-INFO +1 -1
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore/base.py +5 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore/store.py +103 -34
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore/styling.py +39 -6
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore/version.py +1 -1
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore.egg-info/PKG-INFO +1 -1
- {pastastore-1.6.1 → pastastore-1.7.0}/pyproject.toml +1 -1
- {pastastore-1.6.1 → pastastore-1.7.0}/tests/test_003_pastastore.py +6 -1
- {pastastore-1.6.1 → pastastore-1.7.0}/LICENSE +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore/__init__.py +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore/connectors.py +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore/datasets.py +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore/plotting.py +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore/util.py +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore/yaml_interface.py +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore.egg-info/SOURCES.txt +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore.egg-info/dependency_links.txt +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore.egg-info/requires.txt +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/pastastore.egg-info/top_level.txt +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/readme.md +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/setup.cfg +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/tests/test_001_import.py +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/tests/test_002_connectors.py +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/tests/test_004_yaml.py +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/tests/test_005_maps_plots.py +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/tests/test_006_benchmark.py +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/tests/test_007_hpdextension.py +0 -0
- {pastastore-1.6.1 → pastastore-1.7.0}/tests/test_008_stressmodels.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pastastore
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.7.0
|
|
4
4
|
Summary: Tools for managing Pastas time series models.
|
|
5
5
|
Author: D.A. Brakenhoff
|
|
6
6
|
Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
|
|
@@ -56,6 +56,11 @@ class BaseConnector(ABC):
|
|
|
56
56
|
f"{self.n_models} models"
|
|
57
57
|
)
|
|
58
58
|
|
|
59
|
+
@property
|
|
60
|
+
def empty(self):
|
|
61
|
+
"""Check if the database is empty."""
|
|
62
|
+
return not any([self.n_oseries > 0, self.n_stresses > 0, self.n_models > 0])
|
|
63
|
+
|
|
59
64
|
@abstractmethod
|
|
60
65
|
def _get_library(self, libname: str):
|
|
61
66
|
"""Get library handle.
|
|
@@ -4,6 +4,8 @@ import json
|
|
|
4
4
|
import logging
|
|
5
5
|
import os
|
|
6
6
|
import warnings
|
|
7
|
+
from concurrent.futures import ProcessPoolExecutor
|
|
8
|
+
from functools import partial
|
|
7
9
|
from typing import Dict, List, Literal, Optional, Tuple, Union
|
|
8
10
|
|
|
9
11
|
import numpy as np
|
|
@@ -12,6 +14,7 @@ import pastas as ps
|
|
|
12
14
|
from packaging.version import parse as parse_version
|
|
13
15
|
from pastas.io.pas import pastas_hook
|
|
14
16
|
from tqdm.auto import tqdm
|
|
17
|
+
from tqdm.contrib.concurrent import process_map
|
|
15
18
|
|
|
16
19
|
from pastastore.base import BaseConnector
|
|
17
20
|
from pastastore.connectors import DictConnector
|
|
@@ -78,6 +81,11 @@ class PastaStore:
|
|
|
78
81
|
self.plots = Plots(self)
|
|
79
82
|
self.yaml = PastastoreYAML(self)
|
|
80
83
|
|
|
84
|
+
@property
|
|
85
|
+
def empty(self) -> bool:
|
|
86
|
+
"""Check if the PastaStore is empty."""
|
|
87
|
+
return self.conn.empty
|
|
88
|
+
|
|
81
89
|
def _register_connector_methods(self):
|
|
82
90
|
"""Register connector methods (internal method)."""
|
|
83
91
|
methods = [
|
|
@@ -1175,18 +1183,19 @@ class PastaStore:
|
|
|
1175
1183
|
|
|
1176
1184
|
def solve_models(
|
|
1177
1185
|
self,
|
|
1178
|
-
|
|
1186
|
+
modelnames: Union[List[str], str, None] = None,
|
|
1179
1187
|
report: bool = False,
|
|
1180
1188
|
ignore_solve_errors: bool = False,
|
|
1181
|
-
store_result: bool = True,
|
|
1182
1189
|
progressbar: bool = True,
|
|
1190
|
+
parallel: bool = False,
|
|
1191
|
+
max_workers: Optional[int] = None,
|
|
1183
1192
|
**kwargs,
|
|
1184
1193
|
) -> None:
|
|
1185
1194
|
"""Solves the models in the store.
|
|
1186
1195
|
|
|
1187
1196
|
Parameters
|
|
1188
1197
|
----------
|
|
1189
|
-
|
|
1198
|
+
modelnames : list of str, optional
|
|
1190
1199
|
list of model names, if None all models in the pastastore
|
|
1191
1200
|
are solved.
|
|
1192
1201
|
report : boolean, optional
|
|
@@ -1196,43 +1205,103 @@ class PastaStore:
|
|
|
1196
1205
|
if True, errors emerging from the solve method are ignored,
|
|
1197
1206
|
default is False which will raise an exception when a model
|
|
1198
1207
|
cannot be optimized
|
|
1199
|
-
store_result : bool, optional
|
|
1200
|
-
if True save optimized models, default is True
|
|
1201
1208
|
progressbar : bool, optional
|
|
1202
|
-
show progressbar, default is True
|
|
1203
|
-
|
|
1209
|
+
show progressbar, default is True.
|
|
1210
|
+
parallel: bool, optional
|
|
1211
|
+
if True, solve models in parallel using ProcessPoolExecutor
|
|
1212
|
+
max_workers: int, optional
|
|
1213
|
+
maximum number of workers to use in parallel solving, default is
|
|
1214
|
+
None which will use the number of cores available on the machine
|
|
1215
|
+
**kwargs : dictionary
|
|
1204
1216
|
arguments are passed to the solve method.
|
|
1217
|
+
|
|
1218
|
+
Notes
|
|
1219
|
+
-----
|
|
1220
|
+
Users should be aware that parallel solving is platform dependent
|
|
1221
|
+
and may not always work. The current implementation works well for Linux users.
|
|
1222
|
+
For Windows users, parallel solving does not work when called directly from
|
|
1223
|
+
Jupyter Notebooks or IPython. To use parallel solving on Windows, the following
|
|
1224
|
+
code should be used in a Python file::
|
|
1225
|
+
|
|
1226
|
+
from multiprocessing import freeze_support
|
|
1227
|
+
|
|
1228
|
+
if __name__ == "__main__":
|
|
1229
|
+
freeze_support()
|
|
1230
|
+
pstore.solve_models(parallel=True)
|
|
1205
1231
|
"""
|
|
1206
|
-
if mls
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
mls = [mls.name]
|
|
1232
|
+
if "mls" in kwargs:
|
|
1233
|
+
modelnames = kwargs.pop("mls")
|
|
1234
|
+
logger.warning("Argument `mls` is deprecated, use `modelnames` instead.")
|
|
1210
1235
|
|
|
1211
|
-
|
|
1212
|
-
for ml_name in tqdm(mls, desc=desc) if progressbar else mls:
|
|
1213
|
-
ml = self.conn.get_models(ml_name)
|
|
1236
|
+
modelnames = self.conn._parse_names(modelnames, libname="models")
|
|
1214
1237
|
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1238
|
+
solve_model = partial(
|
|
1239
|
+
self._solve_model,
|
|
1240
|
+
report=report,
|
|
1241
|
+
ignore_solve_errors=ignore_solve_errors,
|
|
1242
|
+
**kwargs,
|
|
1243
|
+
)
|
|
1244
|
+
if self.conn.conn_type != "pas":
|
|
1245
|
+
parallel = False
|
|
1246
|
+
logger.error(
|
|
1247
|
+
"Parallel solving only supported for PasConnector databases."
|
|
1248
|
+
"Setting parallel to `False`"
|
|
1249
|
+
)
|
|
1225
1250
|
|
|
1226
|
-
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1251
|
+
if parallel and progressbar:
|
|
1252
|
+
process_map(solve_model, modelnames, max_workers=max_workers)
|
|
1253
|
+
elif parallel and not progressbar:
|
|
1254
|
+
with ProcessPoolExecutor(max_workers=max_workers) as executor:
|
|
1255
|
+
executor.map(solve_model, modelnames)
|
|
1256
|
+
else:
|
|
1257
|
+
for ml_name in (
|
|
1258
|
+
tqdm(modelnames, desc="Solving models") if progressbar else modelnames
|
|
1259
|
+
):
|
|
1260
|
+
solve_model(ml_name=ml_name)
|
|
1261
|
+
|
|
1262
|
+
def _solve_model(
|
|
1263
|
+
self,
|
|
1264
|
+
ml_name: str,
|
|
1265
|
+
report: bool = False,
|
|
1266
|
+
ignore_solve_errors: bool = False,
|
|
1267
|
+
**kwargs,
|
|
1268
|
+
) -> None:
|
|
1269
|
+
"""Solve a model in the store (internal method).
|
|
1270
|
+
|
|
1271
|
+
ml_name : list of str, optional
|
|
1272
|
+
name of a model in the pastastore
|
|
1273
|
+
report : boolean, optional
|
|
1274
|
+
determines if a report is printed when the model is solved,
|
|
1275
|
+
default is False
|
|
1276
|
+
ignore_solve_errors : boolean, optional
|
|
1277
|
+
if True, errors emerging from the solve method are ignored,
|
|
1278
|
+
default is False which will raise an exception when a model
|
|
1279
|
+
cannot be optimized
|
|
1280
|
+
**kwargs : dictionary
|
|
1281
|
+
arguments are passed to the solve method.
|
|
1282
|
+
"""
|
|
1283
|
+
ml = self.conn.get_models(ml_name)
|
|
1284
|
+
m_kwargs = {}
|
|
1285
|
+
for key, value in kwargs.items():
|
|
1286
|
+
if isinstance(value, pd.Series):
|
|
1287
|
+
m_kwargs[key] = value.loc[ml.name]
|
|
1288
|
+
else:
|
|
1289
|
+
m_kwargs[key] = value
|
|
1290
|
+
# Convert timestamps
|
|
1291
|
+
for tstamp in ["tmin", "tmax"]:
|
|
1292
|
+
if tstamp in m_kwargs:
|
|
1293
|
+
m_kwargs[tstamp] = pd.Timestamp(m_kwargs[tstamp])
|
|
1294
|
+
|
|
1295
|
+
try:
|
|
1296
|
+
ml.solve(report=report, **m_kwargs)
|
|
1297
|
+
except Exception as e:
|
|
1298
|
+
if ignore_solve_errors:
|
|
1299
|
+
warning = "Solve error ignored for '%s': %s " % (ml.name, e)
|
|
1300
|
+
logger.warning(warning)
|
|
1301
|
+
else:
|
|
1302
|
+
raise e
|
|
1303
|
+
|
|
1304
|
+
self.conn.add_model(ml, overwrite=True)
|
|
1236
1305
|
|
|
1237
1306
|
def model_results(
|
|
1238
1307
|
self,
|
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
"""Module containing dataframe styling functions."""
|
|
2
2
|
|
|
3
|
-
import matplotlib as mpl
|
|
4
3
|
import matplotlib.pyplot as plt
|
|
5
4
|
import numpy as np
|
|
5
|
+
from matplotlib.colors import rgb2hex
|
|
6
6
|
|
|
7
7
|
|
|
8
8
|
def float_styler(val, norm, cmap=None):
|
|
@@ -26,12 +26,12 @@ def float_styler(val, norm, cmap=None):
|
|
|
26
26
|
-----
|
|
27
27
|
Given some dataframe
|
|
28
28
|
|
|
29
|
-
>>> df.map(float_styler, subset=["some column"], norm=norm, cmap=cmap)
|
|
29
|
+
>>> df.style.map(float_styler, subset=["some column"], norm=norm, cmap=cmap)
|
|
30
30
|
"""
|
|
31
31
|
if cmap is None:
|
|
32
32
|
cmap = plt.get_cmap("RdYlBu")
|
|
33
33
|
bg = cmap(norm(val))
|
|
34
|
-
color =
|
|
34
|
+
color = rgb2hex(bg)
|
|
35
35
|
c = "White" if np.mean(bg[:3]) < 0.4 else "Black"
|
|
36
36
|
return f"background-color: {color}; color: {c}"
|
|
37
37
|
|
|
@@ -53,15 +53,48 @@ def boolean_styler(b):
|
|
|
53
53
|
-----
|
|
54
54
|
Given some dataframe
|
|
55
55
|
|
|
56
|
-
>>> df.map(boolean_styler, subset=["some column"])
|
|
56
|
+
>>> df.style.map(boolean_styler, subset=["some column"])
|
|
57
57
|
"""
|
|
58
58
|
if b:
|
|
59
59
|
return (
|
|
60
|
-
f"background-color: {
|
|
60
|
+
f"background-color: {rgb2hex((231/255, 255/255, 239/255))}; "
|
|
61
61
|
"color: darkgreen"
|
|
62
62
|
)
|
|
63
63
|
else:
|
|
64
64
|
return (
|
|
65
|
-
f"background-color: {
|
|
65
|
+
f"background-color: {rgb2hex((255/255, 238/255, 238/255))}; "
|
|
66
66
|
"color: darkred"
|
|
67
67
|
)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def boolean_row_styler(row, column):
|
|
71
|
+
"""Styler function to color rows based on the value in column.
|
|
72
|
+
|
|
73
|
+
Parameters
|
|
74
|
+
----------
|
|
75
|
+
row : pd.Series
|
|
76
|
+
row in dataframe
|
|
77
|
+
column : str
|
|
78
|
+
column name to get boolean value for styling
|
|
79
|
+
|
|
80
|
+
Returns
|
|
81
|
+
-------
|
|
82
|
+
str
|
|
83
|
+
css for styling dataframe row
|
|
84
|
+
|
|
85
|
+
Usage
|
|
86
|
+
-----
|
|
87
|
+
Given some dataframe
|
|
88
|
+
|
|
89
|
+
>>> df.style.apply(boolean_row_styler, column="boolean_column", axis=1)
|
|
90
|
+
"""
|
|
91
|
+
if row[column]:
|
|
92
|
+
return (
|
|
93
|
+
f"background-color: {rgb2hex((231/255, 255/255, 239/255))}; "
|
|
94
|
+
"color: darkgreen",
|
|
95
|
+
) * row.size
|
|
96
|
+
else:
|
|
97
|
+
return (
|
|
98
|
+
f"background-color: {rgb2hex((255/255, 238/255, 238/255))}; "
|
|
99
|
+
"color: darkred",
|
|
100
|
+
) * row.size
|
|
@@ -9,7 +9,7 @@ PASTAS_VERSION = parse_version(ps.__version__)
|
|
|
9
9
|
PASTAS_LEQ_022 = PASTAS_VERSION <= parse_version("0.22.0")
|
|
10
10
|
PASTAS_GEQ_150 = PASTAS_VERSION >= parse_version("1.5.0")
|
|
11
11
|
|
|
12
|
-
__version__ = "1.
|
|
12
|
+
__version__ = "1.7.0"
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
def show_versions(optional=False) -> None:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pastastore
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.7.0
|
|
4
4
|
Summary: Tools for managing Pastas time series models.
|
|
5
5
|
Author: D.A. Brakenhoff
|
|
6
6
|
Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
|
|
@@ -195,13 +195,18 @@ def test_iter_models(request, pstore):
|
|
|
195
195
|
def test_solve_models_and_get_stats(request, pstore):
|
|
196
196
|
depends(request, [f"test_create_models[{pstore.type}]"])
|
|
197
197
|
_ = pstore.solve_models(
|
|
198
|
-
ignore_solve_errors=False, progressbar=False,
|
|
198
|
+
ignore_solve_errors=False, progressbar=False, parallel=False
|
|
199
199
|
)
|
|
200
200
|
stats = pstore.get_statistics(["evp", "aic"], progressbar=False)
|
|
201
201
|
assert stats.index.size == 2
|
|
202
202
|
|
|
203
203
|
|
|
204
204
|
@pytest.mark.dependency
|
|
205
|
+
def test_solve_models_parallel(request, pstore):
|
|
206
|
+
depends(request, [f"test_create_models[{pstore.type}]"])
|
|
207
|
+
_ = pstore.solve_models(ignore_solve_errors=False, progressbar=False, parallel=True)
|
|
208
|
+
|
|
209
|
+
|
|
205
210
|
def test_apply(request, pstore):
|
|
206
211
|
depends(request, [f"test_solve_models_and_get_stats[{pstore.type}]"])
|
|
207
212
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|