pastastore 1.10.2__py3-none-any.whl → 1.12.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- docs/conf.py +10 -97
- pastastore/__init__.py +5 -1
- pastastore/base.py +875 -272
- pastastore/connectors.py +359 -816
- pastastore/datasets.py +23 -33
- pastastore/extensions/__init__.py +7 -3
- pastastore/extensions/hpd.py +39 -17
- pastastore/plotting.py +71 -38
- pastastore/store.py +205 -186
- pastastore/styling.py +4 -2
- pastastore/typing.py +12 -0
- pastastore/util.py +322 -88
- pastastore/validator.py +524 -0
- pastastore/version.py +2 -3
- pastastore/yaml_interface.py +37 -39
- {pastastore-1.10.2.dist-info → pastastore-1.12.0.dist-info}/METADATA +17 -11
- pastastore-1.12.0.dist-info/RECORD +31 -0
- {pastastore-1.10.2.dist-info → pastastore-1.12.0.dist-info}/WHEEL +1 -1
- tests/conftest.py +156 -59
- tests/test_001_import.py +2 -1
- tests/test_002_connectors.py +40 -3
- tests/test_003_pastastore.py +60 -29
- tests/test_005_maps_plots.py +12 -0
- tests/test_006_benchmark.py +1 -1
- tests/test_007_hpdextension.py +46 -8
- tests/test_009_parallel.py +393 -0
- pastastore-1.10.2.dist-info/RECORD +0 -28
- {pastastore-1.10.2.dist-info → pastastore-1.12.0.dist-info}/licenses/LICENSE +0 -0
- {pastastore-1.10.2.dist-info → pastastore-1.12.0.dist-info}/top_level.txt +0 -0
pastastore/base.py
CHANGED
|
@@ -2,25 +2,236 @@
|
|
|
2
2
|
"""Base classes for PastaStore Connectors."""
|
|
3
3
|
|
|
4
4
|
import functools
|
|
5
|
+
import logging
|
|
5
6
|
import warnings
|
|
6
7
|
|
|
7
8
|
# import weakref
|
|
8
9
|
from abc import ABC, abstractmethod
|
|
10
|
+
from collections.abc import Iterable
|
|
9
11
|
from itertools import chain
|
|
10
|
-
from
|
|
12
|
+
from random import choice
|
|
13
|
+
|
|
14
|
+
# import weakref
|
|
15
|
+
from typing import Callable, Dict, List, Optional, Union
|
|
11
16
|
|
|
12
17
|
import pandas as pd
|
|
13
18
|
import pastas as ps
|
|
19
|
+
from packaging.version import parse as parse_version
|
|
14
20
|
from tqdm.auto import tqdm
|
|
15
21
|
|
|
16
|
-
from pastastore.
|
|
17
|
-
from pastastore.
|
|
22
|
+
from pastastore.typing import AllLibs, FrameOrSeriesUnion, TimeSeriesLibs
|
|
23
|
+
from pastastore.util import (
|
|
24
|
+
ItemInLibraryException,
|
|
25
|
+
SeriesUsedByModel,
|
|
26
|
+
_custom_warning,
|
|
27
|
+
validate_names,
|
|
28
|
+
)
|
|
29
|
+
from pastastore.validator import Validator
|
|
30
|
+
from pastastore.version import PASTAS_GEQ_150
|
|
18
31
|
|
|
19
|
-
FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
|
|
20
32
|
warnings.showwarning = _custom_warning
|
|
21
33
|
|
|
34
|
+
logger = logging.getLogger(__name__)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class ConnectorUtil:
|
|
38
|
+
"""Mix-in class for utility methods used by BaseConnector subclasses.
|
|
39
|
+
|
|
40
|
+
This class contains internal methods for parsing names, handling metadata,
|
|
41
|
+
and parsing model dictionaries. It is designed to be mixed into BaseConnector
|
|
42
|
+
subclasses and assumes the presence of certain attributes and methods from
|
|
43
|
+
BaseConnector (e.g., oseries_names, stresses_names, get_oseries, get_stresses).
|
|
44
|
+
|
|
45
|
+
Note
|
|
46
|
+
----
|
|
47
|
+
This class should not be instantiated directly. It is intended to be used
|
|
48
|
+
as a mixin with BaseConnector subclasses only.
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
def _parse_names(
|
|
52
|
+
self,
|
|
53
|
+
names: list[str] | str | None = None,
|
|
54
|
+
libname: AllLibs = "oseries",
|
|
55
|
+
) -> list:
|
|
56
|
+
"""Parse names kwarg, returns iterable with name(s) (internal method).
|
|
57
|
+
|
|
58
|
+
Parameters
|
|
59
|
+
----------
|
|
60
|
+
names : Union[list, str], optional
|
|
61
|
+
str or list of str or None or 'all' (last two options
|
|
62
|
+
retrieves all names)
|
|
63
|
+
libname : str, optional
|
|
64
|
+
name of library, default is 'oseries'
|
|
65
|
+
|
|
66
|
+
Returns
|
|
67
|
+
-------
|
|
68
|
+
list
|
|
69
|
+
list of names
|
|
70
|
+
"""
|
|
71
|
+
if not isinstance(names, str) and isinstance(names, Iterable):
|
|
72
|
+
return names
|
|
73
|
+
elif isinstance(names, str) and names != "all":
|
|
74
|
+
return [names]
|
|
75
|
+
elif names is None or names == "all":
|
|
76
|
+
if libname == "oseries":
|
|
77
|
+
return self.oseries_names
|
|
78
|
+
elif libname == "stresses":
|
|
79
|
+
return self.stresses_names
|
|
80
|
+
elif libname == "models":
|
|
81
|
+
return self.model_names
|
|
82
|
+
elif libname == "oseries_models":
|
|
83
|
+
return self.oseries_with_models
|
|
84
|
+
elif libname == "stresses_models":
|
|
85
|
+
return self.stresses_with_models
|
|
86
|
+
else:
|
|
87
|
+
raise ValueError(f"No library '{libname}'!")
|
|
88
|
+
else:
|
|
89
|
+
raise NotImplementedError(f"Cannot parse 'names': {names}")
|
|
90
|
+
|
|
91
|
+
@staticmethod
|
|
92
|
+
def _meta_list_to_frame(metalist: list, names: list):
|
|
93
|
+
"""Convert list of metadata dictionaries to DataFrame.
|
|
22
94
|
|
|
23
|
-
|
|
95
|
+
Parameters
|
|
96
|
+
----------
|
|
97
|
+
metalist : list
|
|
98
|
+
list of metadata dictionaries
|
|
99
|
+
names : list
|
|
100
|
+
list of names corresponding to data in metalist
|
|
101
|
+
|
|
102
|
+
Returns
|
|
103
|
+
-------
|
|
104
|
+
pandas.DataFrame
|
|
105
|
+
DataFrame containing overview of metadata
|
|
106
|
+
"""
|
|
107
|
+
# convert to dataframe
|
|
108
|
+
if len(metalist) > 1:
|
|
109
|
+
meta = pd.DataFrame(metalist)
|
|
110
|
+
if len({"x", "y"}.difference(meta.columns)) == 0:
|
|
111
|
+
meta["x"] = meta["x"].astype(float)
|
|
112
|
+
meta["y"] = meta["y"].astype(float)
|
|
113
|
+
elif len(metalist) == 1:
|
|
114
|
+
meta = pd.DataFrame(metalist)
|
|
115
|
+
elif len(metalist) == 0:
|
|
116
|
+
meta = pd.DataFrame()
|
|
117
|
+
|
|
118
|
+
meta.index = names
|
|
119
|
+
meta.index.name = "name"
|
|
120
|
+
return meta
|
|
121
|
+
|
|
122
|
+
def _parse_model_dict(self, mdict: dict, update_ts_settings: bool = False):
|
|
123
|
+
"""Parse dictionary describing pastas models (internal method).
|
|
124
|
+
|
|
125
|
+
Parameters
|
|
126
|
+
----------
|
|
127
|
+
mdict : dict
|
|
128
|
+
dictionary describing pastas.Model
|
|
129
|
+
update_ts_settings : bool, optional
|
|
130
|
+
update stored tmin and tmax in time series settings
|
|
131
|
+
based on time series loaded from store.
|
|
132
|
+
|
|
133
|
+
Returns
|
|
134
|
+
-------
|
|
135
|
+
ml : pastas.Model
|
|
136
|
+
time series analysis model
|
|
137
|
+
"""
|
|
138
|
+
PASFILE_LEQ_022 = parse_version(
|
|
139
|
+
mdict["file_info"]["pastas_version"]
|
|
140
|
+
) <= parse_version("0.22.0")
|
|
141
|
+
|
|
142
|
+
# oseries
|
|
143
|
+
if "series" not in mdict["oseries"]:
|
|
144
|
+
name = str(mdict["oseries"]["name"])
|
|
145
|
+
if name not in self.oseries.index:
|
|
146
|
+
msg = f"oseries '{name}' not present in library"
|
|
147
|
+
raise LookupError(msg)
|
|
148
|
+
mdict["oseries"]["series"] = self.get_oseries(name).squeeze()
|
|
149
|
+
# update tmin/tmax from time series
|
|
150
|
+
if update_ts_settings:
|
|
151
|
+
mdict["oseries"]["settings"]["tmin"] = mdict["oseries"]["series"].index[
|
|
152
|
+
0
|
|
153
|
+
]
|
|
154
|
+
mdict["oseries"]["settings"]["tmax"] = mdict["oseries"]["series"].index[
|
|
155
|
+
-1
|
|
156
|
+
]
|
|
157
|
+
|
|
158
|
+
# StressModel, WellModel
|
|
159
|
+
for ts in mdict["stressmodels"].values():
|
|
160
|
+
if "stress" in ts.keys():
|
|
161
|
+
# WellModel
|
|
162
|
+
classkey = "stressmodel" if PASFILE_LEQ_022 else "class"
|
|
163
|
+
if ts[classkey] == "WellModel":
|
|
164
|
+
for stress in ts["stress"]:
|
|
165
|
+
if "series" not in stress:
|
|
166
|
+
name = str(stress["name"])
|
|
167
|
+
if self._item_exists("stresses", name):
|
|
168
|
+
stress["series"] = self.get_stresses(name).squeeze()
|
|
169
|
+
# update tmin/tmax from time series
|
|
170
|
+
if update_ts_settings:
|
|
171
|
+
stress["settings"]["tmin"] = stress["series"].index[
|
|
172
|
+
0
|
|
173
|
+
]
|
|
174
|
+
stress["settings"]["tmax"] = stress["series"].index[
|
|
175
|
+
-1
|
|
176
|
+
]
|
|
177
|
+
# StressModel
|
|
178
|
+
else:
|
|
179
|
+
for stress in ts["stress"] if PASFILE_LEQ_022 else [ts["stress"]]:
|
|
180
|
+
if "series" not in stress:
|
|
181
|
+
name = str(stress["name"])
|
|
182
|
+
if self._item_exists("stresses", name):
|
|
183
|
+
stress["series"] = self.get_stresses(name).squeeze()
|
|
184
|
+
# update tmin/tmax from time series
|
|
185
|
+
if update_ts_settings:
|
|
186
|
+
stress["settings"]["tmin"] = stress["series"].index[
|
|
187
|
+
0
|
|
188
|
+
]
|
|
189
|
+
stress["settings"]["tmax"] = stress["series"].index[
|
|
190
|
+
-1
|
|
191
|
+
]
|
|
192
|
+
|
|
193
|
+
# RechargeModel, TarsoModel
|
|
194
|
+
if ("prec" in ts.keys()) and ("evap" in ts.keys()):
|
|
195
|
+
for stress in [ts["prec"], ts["evap"]]:
|
|
196
|
+
if "series" not in stress:
|
|
197
|
+
name = str(stress["name"])
|
|
198
|
+
if self._item_exists("stresses", name):
|
|
199
|
+
stress["series"] = self.get_stresses(name).squeeze()
|
|
200
|
+
# update tmin/tmax from time series
|
|
201
|
+
if update_ts_settings:
|
|
202
|
+
stress["settings"]["tmin"] = stress["series"].index[0]
|
|
203
|
+
stress["settings"]["tmax"] = stress["series"].index[-1]
|
|
204
|
+
else:
|
|
205
|
+
msg = "stress '{name}' not present in library"
|
|
206
|
+
raise KeyError(msg)
|
|
207
|
+
|
|
208
|
+
# hack for pcov w dtype object (when filled with NaNs on store?)
|
|
209
|
+
if "fit" in mdict:
|
|
210
|
+
if "pcov" in mdict["fit"]:
|
|
211
|
+
pcov = mdict["fit"]["pcov"]
|
|
212
|
+
if pcov.dtypes.apply(lambda dtyp: isinstance(dtyp, object)).any():
|
|
213
|
+
mdict["fit"]["pcov"] = pcov.astype(float)
|
|
214
|
+
|
|
215
|
+
# check pastas version vs pas-file version
|
|
216
|
+
file_version = mdict["file_info"]["pastas_version"]
|
|
217
|
+
|
|
218
|
+
# check file version and pastas version
|
|
219
|
+
# if file<0.23 and pastas>=1.0 --> error
|
|
220
|
+
PASTAS_GT_023 = parse_version(ps.__version__) > parse_version("0.23.1")
|
|
221
|
+
if PASFILE_LEQ_022 and PASTAS_GT_023:
|
|
222
|
+
raise UserWarning(
|
|
223
|
+
f"This file was created with Pastas v{file_version} "
|
|
224
|
+
f"and cannot be loaded with Pastas v{ps.__version__} Please load and "
|
|
225
|
+
"save the file with Pastas 0.23 first to update the file "
|
|
226
|
+
"format."
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
# Use pastas' internal _load_model - required for model reconstruction
|
|
230
|
+
ml = ps.io.base._load_model(mdict) # noqa: SLF001
|
|
231
|
+
return ml
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
class BaseConnector(ABC, ConnectorUtil):
|
|
24
235
|
"""Base Connector class.
|
|
25
236
|
|
|
26
237
|
Class holds base logic for dealing with time series and Pastas Models. Create your
|
|
@@ -33,18 +244,35 @@ class BaseConnector(ABC):
|
|
|
33
244
|
"stresses",
|
|
34
245
|
"models",
|
|
35
246
|
"oseries_models",
|
|
247
|
+
"stresses_models",
|
|
36
248
|
]
|
|
37
249
|
|
|
38
|
-
|
|
39
|
-
|
|
250
|
+
_conn_type: Optional[str] = None
|
|
251
|
+
_validator: Optional[Validator] = None
|
|
252
|
+
name = None
|
|
253
|
+
_added_models = [] # internal list of added models used for updating links
|
|
40
254
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
USE_PASTAS_VALIDATE_SERIES = False if PASTAS_LEQ_022 else True
|
|
255
|
+
def __getstate__(self):
|
|
256
|
+
"""Replace Manager proxies with simple values for pickling.
|
|
44
257
|
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
258
|
+
Manager proxies cannot be pickled, so we convert them to simple booleans.
|
|
259
|
+
This allows connectors to be pickled for multiprocessing.
|
|
260
|
+
"""
|
|
261
|
+
state = self.__dict__.copy()
|
|
262
|
+
# Replace unpicklable Manager proxies with their values
|
|
263
|
+
state["_oseries_links_need_update"] = self._oseries_links_need_update.value
|
|
264
|
+
state["_stresses_links_need_update"] = self._stresses_links_need_update.value
|
|
265
|
+
return state
|
|
266
|
+
|
|
267
|
+
def __setstate__(self, state):
|
|
268
|
+
"""Replace Manager proxies with simple booleans after unpickling.
|
|
269
|
+
|
|
270
|
+
After unpickling, use simple booleans instead of recreating Manager.
|
|
271
|
+
This works because worker processes don't need shared memory - they
|
|
272
|
+
work on independent copies of the connector.
|
|
273
|
+
"""
|
|
274
|
+
self.__dict__.update(state)
|
|
275
|
+
# Flags are already simple booleans from __getstate__
|
|
48
276
|
|
|
49
277
|
def __repr__(self):
|
|
50
278
|
"""Representation string of the object."""
|
|
@@ -55,13 +283,34 @@ class BaseConnector(ABC):
|
|
|
55
283
|
f"{self.n_models} models"
|
|
56
284
|
)
|
|
57
285
|
|
|
286
|
+
@property
|
|
287
|
+
def validation_settings(self):
|
|
288
|
+
"""Return current connector settings as dictionary."""
|
|
289
|
+
return self.validator.settings
|
|
290
|
+
|
|
58
291
|
@property
|
|
59
292
|
def empty(self):
|
|
60
293
|
"""Check if the database is empty."""
|
|
61
294
|
return not any([self.n_oseries > 0, self.n_stresses > 0, self.n_models > 0])
|
|
62
295
|
|
|
296
|
+
@property
|
|
297
|
+
def validator(self) -> Validator:
|
|
298
|
+
"""Get the Validator instance for this connector."""
|
|
299
|
+
if self._validator is None:
|
|
300
|
+
raise AttributeError("Validator not set for this connector.")
|
|
301
|
+
return self._validator
|
|
302
|
+
|
|
303
|
+
@property
|
|
304
|
+
def conn_type(self) -> str:
|
|
305
|
+
"""Get the connector type."""
|
|
306
|
+
if self._conn_type is None:
|
|
307
|
+
raise AttributeError(
|
|
308
|
+
"Connector class must set a connector type in `conn_type` attribute."
|
|
309
|
+
)
|
|
310
|
+
return self._conn_type
|
|
311
|
+
|
|
63
312
|
@abstractmethod
|
|
64
|
-
def _get_library(self, libname:
|
|
313
|
+
def _get_library(self, libname: AllLibs):
|
|
65
314
|
"""Get library handle.
|
|
66
315
|
|
|
67
316
|
Must be overridden by subclass.
|
|
@@ -80,11 +329,10 @@ class BaseConnector(ABC):
|
|
|
80
329
|
@abstractmethod
|
|
81
330
|
def _add_item(
|
|
82
331
|
self,
|
|
83
|
-
libname:
|
|
84
|
-
item: Union[
|
|
332
|
+
libname: AllLibs,
|
|
333
|
+
item: Union[FrameOrSeriesUnion, Dict],
|
|
85
334
|
name: str,
|
|
86
335
|
metadata: Optional[Dict] = None,
|
|
87
|
-
overwrite: bool = False,
|
|
88
336
|
) -> None:
|
|
89
337
|
"""Add item for both time series and pastas.Models (internal method).
|
|
90
338
|
|
|
@@ -100,10 +348,17 @@ class BaseConnector(ABC):
|
|
|
100
348
|
name of the item
|
|
101
349
|
metadata : dict, optional
|
|
102
350
|
dictionary containing metadata, by default None
|
|
351
|
+
|
|
352
|
+
Note
|
|
353
|
+
----
|
|
354
|
+
Metadata storage can vary by connector:
|
|
355
|
+
- ArcticDB: Native metadata support via write()
|
|
356
|
+
- DictConnector: Stored as tuple (metadata, item)
|
|
357
|
+
- PasConnector: Separate {name}_meta.pas JSON file
|
|
103
358
|
"""
|
|
104
359
|
|
|
105
360
|
@abstractmethod
|
|
106
|
-
def _get_item(self, libname:
|
|
361
|
+
def _get_item(self, libname: AllLibs, name: str) -> Union[FrameOrSeriesUnion, Dict]:
|
|
107
362
|
"""Get item (series or pastas.Models) (internal method).
|
|
108
363
|
|
|
109
364
|
Must be overridden by subclass.
|
|
@@ -122,7 +377,7 @@ class BaseConnector(ABC):
|
|
|
122
377
|
"""
|
|
123
378
|
|
|
124
379
|
@abstractmethod
|
|
125
|
-
def _del_item(self, libname:
|
|
380
|
+
def _del_item(self, libname: AllLibs, name: str, force: bool = False) -> None:
|
|
126
381
|
"""Delete items (series or models) (internal method).
|
|
127
382
|
|
|
128
383
|
Must be overridden by subclass.
|
|
@@ -136,7 +391,7 @@ class BaseConnector(ABC):
|
|
|
136
391
|
"""
|
|
137
392
|
|
|
138
393
|
@abstractmethod
|
|
139
|
-
def _get_metadata(self, libname:
|
|
394
|
+
def _get_metadata(self, libname: TimeSeriesLibs, name: str) -> Dict:
|
|
140
395
|
"""Get metadata (internal method).
|
|
141
396
|
|
|
142
397
|
Must be overridden by subclass.
|
|
@@ -154,35 +409,62 @@ class BaseConnector(ABC):
|
|
|
154
409
|
dictionary containing metadata
|
|
155
410
|
"""
|
|
156
411
|
|
|
157
|
-
@property
|
|
158
412
|
@abstractmethod
|
|
413
|
+
def _list_symbols(self, libname: AllLibs) -> List[str]:
|
|
414
|
+
"""Return list of symbol names in library."""
|
|
415
|
+
|
|
416
|
+
@abstractmethod
|
|
417
|
+
def _item_exists(self, libname: AllLibs, name: str) -> bool:
|
|
418
|
+
"""Return True if item present in library, else False."""
|
|
419
|
+
|
|
420
|
+
@property
|
|
159
421
|
def oseries_names(self):
|
|
160
422
|
"""List of oseries names.
|
|
161
423
|
|
|
162
424
|
Property must be overridden by subclass.
|
|
163
425
|
"""
|
|
426
|
+
return self._list_symbols("oseries")
|
|
164
427
|
|
|
165
428
|
@property
|
|
166
|
-
@abstractmethod
|
|
167
429
|
def stresses_names(self):
|
|
168
430
|
"""List of stresses names.
|
|
169
431
|
|
|
170
432
|
Property must be overridden by subclass.
|
|
171
433
|
"""
|
|
434
|
+
return self._list_symbols("stresses")
|
|
172
435
|
|
|
173
436
|
@property
|
|
174
|
-
@abstractmethod
|
|
175
437
|
def model_names(self):
|
|
176
438
|
"""List of model names.
|
|
177
439
|
|
|
178
440
|
Property must be overridden by subclass.
|
|
179
441
|
"""
|
|
442
|
+
return self._modelnames_cache
|
|
443
|
+
|
|
444
|
+
@property
|
|
445
|
+
def oseries_with_models(self):
|
|
446
|
+
"""List of oseries used in models.
|
|
447
|
+
|
|
448
|
+
Property must be overridden by subclass.
|
|
449
|
+
"""
|
|
450
|
+
self._trigger_links_update_if_needed()
|
|
451
|
+
return self._list_symbols("oseries_models")
|
|
452
|
+
|
|
453
|
+
@property
|
|
454
|
+
def stresses_with_models(self):
|
|
455
|
+
"""List of stresses used in models.
|
|
456
|
+
|
|
457
|
+
Property must be overridden by subclass.
|
|
458
|
+
"""
|
|
459
|
+
self._trigger_links_update_if_needed()
|
|
460
|
+
return self._list_symbols("stresses_models")
|
|
180
461
|
|
|
181
462
|
@abstractmethod
|
|
182
463
|
def _parallel(
|
|
183
464
|
self,
|
|
184
465
|
func: Callable,
|
|
185
466
|
names: List[str],
|
|
467
|
+
kwargs: Optional[Dict] = None,
|
|
186
468
|
progressbar: Optional[bool] = True,
|
|
187
469
|
max_workers: Optional[int] = None,
|
|
188
470
|
chunksize: Optional[int] = None,
|
|
@@ -198,6 +480,8 @@ class BaseConnector(ABC):
|
|
|
198
480
|
function to apply in parallel
|
|
199
481
|
names : list
|
|
200
482
|
list of names to apply function to
|
|
483
|
+
kwargs : dict
|
|
484
|
+
additional keyword arguments to pass to function
|
|
201
485
|
progressbar : bool, optional
|
|
202
486
|
show progressbar, by default True
|
|
203
487
|
max_workers : int, optional
|
|
@@ -208,72 +492,122 @@ class BaseConnector(ABC):
|
|
|
208
492
|
description for progressbar, by default ""
|
|
209
493
|
"""
|
|
210
494
|
|
|
211
|
-
def
|
|
212
|
-
|
|
495
|
+
def parse_names(
|
|
496
|
+
self,
|
|
497
|
+
names: list[str] | str | None = None,
|
|
498
|
+
libname: AllLibs = "oseries",
|
|
499
|
+
) -> list:
|
|
500
|
+
"""Parse names argument and return list of names.
|
|
213
501
|
|
|
214
|
-
|
|
215
|
-
way). When turned on, the model time series
|
|
216
|
-
(ml.oseries._series_original, and stressmodel.stress._series_original)
|
|
217
|
-
values are checked against the stored copies in the database. If these
|
|
218
|
-
do not match, an error is raised, and the model is not added to the
|
|
219
|
-
database. This guarantees the stored model will be identical after
|
|
220
|
-
loading from the database. This check is somewhat computationally
|
|
221
|
-
expensive, which is why it can be turned on or off.
|
|
502
|
+
Public method that exposes name parsing functionality.
|
|
222
503
|
|
|
223
504
|
Parameters
|
|
224
505
|
----------
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
506
|
+
names : Union[list, str], optional
|
|
507
|
+
str or list of str or None or 'all' (last two options
|
|
508
|
+
retrieves all names)
|
|
509
|
+
libname : str, optional
|
|
510
|
+
name of library, default is 'oseries'
|
|
511
|
+
|
|
512
|
+
Returns
|
|
513
|
+
-------
|
|
514
|
+
list
|
|
515
|
+
list of names
|
|
228
516
|
"""
|
|
229
|
-
self.
|
|
230
|
-
print(f"Model time series checking set to: {b}.")
|
|
517
|
+
return self._parse_names(names, libname)
|
|
231
518
|
|
|
232
|
-
|
|
233
|
-
|
|
519
|
+
@property # type: ignore
|
|
520
|
+
@functools.lru_cache()
|
|
521
|
+
def oseries(self):
|
|
522
|
+
"""Dataframe with overview of oseries."""
|
|
523
|
+
return self.get_metadata("oseries", self.oseries_names)
|
|
234
524
|
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
building the models. This in turn will mean that storing the models
|
|
241
|
-
will not work as the stored time series copy is checked against the
|
|
242
|
-
time series in the model to check if they are equal.
|
|
525
|
+
@property # type: ignore
|
|
526
|
+
@functools.lru_cache()
|
|
527
|
+
def stresses(self):
|
|
528
|
+
"""Dataframe with overview of stresses."""
|
|
529
|
+
return self.get_metadata("stresses", self.stresses_names)
|
|
243
530
|
|
|
244
|
-
|
|
531
|
+
@property # type: ignore
|
|
532
|
+
@functools.lru_cache()
|
|
533
|
+
def _modelnames_cache(self):
|
|
534
|
+
"""List of model names."""
|
|
535
|
+
return self._list_symbols("models")
|
|
245
536
|
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
b : bool
|
|
249
|
-
boolean indicating whether option should be turned on (True) or
|
|
250
|
-
off (False). Option is on by default.
|
|
537
|
+
@property
|
|
538
|
+
def n_oseries(self):
|
|
251
539
|
"""
|
|
252
|
-
|
|
253
|
-
print(f"Model time series checking set to: {b}.")
|
|
540
|
+
Returns the number of oseries.
|
|
254
541
|
|
|
255
|
-
|
|
256
|
-
|
|
542
|
+
Returns
|
|
543
|
+
-------
|
|
544
|
+
int
|
|
545
|
+
The number of oseries names.
|
|
546
|
+
"""
|
|
547
|
+
return len(self.oseries_names)
|
|
257
548
|
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
549
|
+
@property
|
|
550
|
+
def n_stresses(self):
|
|
551
|
+
"""
|
|
552
|
+
Returns the number of stresses.
|
|
262
553
|
|
|
263
554
|
Returns
|
|
264
555
|
-------
|
|
265
|
-
|
|
266
|
-
|
|
556
|
+
int
|
|
557
|
+
The number of stresses.
|
|
267
558
|
"""
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
559
|
+
return len(self.stresses_names)
|
|
560
|
+
|
|
561
|
+
@property
|
|
562
|
+
def n_models(self):
|
|
563
|
+
"""
|
|
564
|
+
Returns the number of models in the store.
|
|
565
|
+
|
|
566
|
+
Returns
|
|
567
|
+
-------
|
|
568
|
+
int
|
|
569
|
+
The number of models in the store.
|
|
570
|
+
"""
|
|
571
|
+
return len(self.model_names)
|
|
572
|
+
|
|
573
|
+
@property # type: ignore
|
|
574
|
+
@functools.lru_cache()
|
|
575
|
+
def oseries_models(self):
|
|
576
|
+
"""List of model names per oseries.
|
|
577
|
+
|
|
578
|
+
Returns
|
|
579
|
+
-------
|
|
580
|
+
d : dict
|
|
581
|
+
dictionary with oseries names as keys and list of model names as
|
|
582
|
+
values
|
|
583
|
+
"""
|
|
584
|
+
self._trigger_links_update_if_needed()
|
|
585
|
+
d = {}
|
|
586
|
+
for onam in self.oseries_with_models:
|
|
587
|
+
d[onam] = self._get_item("oseries_models", onam)
|
|
588
|
+
return d
|
|
589
|
+
|
|
590
|
+
@property # type: ignore
|
|
591
|
+
@functools.lru_cache()
|
|
592
|
+
def stresses_models(self):
|
|
593
|
+
"""List of model names per stress.
|
|
594
|
+
|
|
595
|
+
Returns
|
|
596
|
+
-------
|
|
597
|
+
d : dict
|
|
598
|
+
dictionary with stress names as keys and list of model names as
|
|
599
|
+
values
|
|
600
|
+
"""
|
|
601
|
+
self._trigger_links_update_if_needed()
|
|
602
|
+
d = {}
|
|
603
|
+
for stress_name in self.stresses_with_models:
|
|
604
|
+
d[stress_name] = self._get_item("stresses_models", stress_name)
|
|
605
|
+
return d
|
|
272
606
|
|
|
273
607
|
def _add_series(
|
|
274
608
|
self,
|
|
275
|
-
libname:
|
|
276
|
-
series:
|
|
609
|
+
libname: TimeSeriesLibs,
|
|
610
|
+
series: FrameOrSeriesUnion,
|
|
277
611
|
name: str,
|
|
278
612
|
metadata: Optional[dict] = None,
|
|
279
613
|
validate: Optional[bool] = None,
|
|
@@ -305,9 +639,11 @@ class BaseConnector(ABC):
|
|
|
305
639
|
"""
|
|
306
640
|
if not isinstance(name, str):
|
|
307
641
|
name = str(name)
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
642
|
+
if metadata:
|
|
643
|
+
self.validator.validate_metadata(metadata)
|
|
644
|
+
self.validator.validate_input_series(series)
|
|
645
|
+
series = self.validator.set_series_name(series, name)
|
|
646
|
+
if self.validator.pastas_validation_status(validate):
|
|
311
647
|
if libname == "oseries":
|
|
312
648
|
if PASTAS_GEQ_150 and not ps.validate_oseries(series):
|
|
313
649
|
raise ValueError(
|
|
@@ -326,10 +662,16 @@ class BaseConnector(ABC):
|
|
|
326
662
|
ps.validate_stress(series)
|
|
327
663
|
in_store = getattr(self, f"{libname}_names")
|
|
328
664
|
if name not in in_store or overwrite:
|
|
329
|
-
self._add_item(
|
|
330
|
-
libname, series, name, metadata=metadata, overwrite=overwrite
|
|
331
|
-
)
|
|
665
|
+
self._add_item(libname, series, name, metadata=metadata)
|
|
332
666
|
self._clear_cache(libname)
|
|
667
|
+
elif (libname == "oseries" and self._item_exists("oseries_models", name)) or (
|
|
668
|
+
libname == "stresses" and self._item_exists("stresses_model", name)
|
|
669
|
+
):
|
|
670
|
+
raise SeriesUsedByModel(
|
|
671
|
+
f"Time series with name '{name}' is used by a model! "
|
|
672
|
+
"Use overwrite=True to replace existing time series. "
|
|
673
|
+
"Note that this may modify the model!"
|
|
674
|
+
)
|
|
333
675
|
else:
|
|
334
676
|
raise ItemInLibraryException(
|
|
335
677
|
f"Time series with name '{name}' already in '{libname}' library! "
|
|
@@ -338,11 +680,12 @@ class BaseConnector(ABC):
|
|
|
338
680
|
|
|
339
681
|
def _update_series(
|
|
340
682
|
self,
|
|
341
|
-
libname:
|
|
342
|
-
series:
|
|
683
|
+
libname: TimeSeriesLibs,
|
|
684
|
+
series: FrameOrSeriesUnion,
|
|
343
685
|
name: str,
|
|
344
686
|
metadata: Optional[dict] = None,
|
|
345
687
|
validate: Optional[bool] = None,
|
|
688
|
+
force: bool = False,
|
|
346
689
|
) -> None:
|
|
347
690
|
"""Update time series (internal method).
|
|
348
691
|
|
|
@@ -360,11 +703,16 @@ class BaseConnector(ABC):
|
|
|
360
703
|
validate: bool, optional
|
|
361
704
|
use pastas to validate series, default is None, which will use the
|
|
362
705
|
USE_PASTAS_VALIDATE_SERIES value (default is True).
|
|
706
|
+
force : bool, optional
|
|
707
|
+
force update even if time series is used in a model, by default False
|
|
708
|
+
|
|
363
709
|
"""
|
|
364
710
|
if libname not in ["oseries", "stresses"]:
|
|
365
711
|
raise ValueError("Library must be 'oseries' or 'stresses'!")
|
|
366
|
-
|
|
367
|
-
|
|
712
|
+
if not force:
|
|
713
|
+
self.validator.check_series_in_models(libname, name)
|
|
714
|
+
self.validator.validate_input_series(series)
|
|
715
|
+
series = self.validator.set_series_name(series, name)
|
|
368
716
|
stored = self._get_series(libname, name, progressbar=False)
|
|
369
717
|
if self.conn_type == "pas" and not isinstance(series, type(stored)):
|
|
370
718
|
if isinstance(series, pd.DataFrame):
|
|
@@ -389,11 +737,12 @@ class BaseConnector(ABC):
|
|
|
389
737
|
|
|
390
738
|
def _upsert_series(
|
|
391
739
|
self,
|
|
392
|
-
libname:
|
|
393
|
-
series:
|
|
740
|
+
libname: TimeSeriesLibs,
|
|
741
|
+
series: FrameOrSeriesUnion,
|
|
394
742
|
name: str,
|
|
395
743
|
metadata: Optional[dict] = None,
|
|
396
744
|
validate: Optional[bool] = None,
|
|
745
|
+
force: bool = False,
|
|
397
746
|
) -> None:
|
|
398
747
|
"""Update or insert series depending on whether it exists in store.
|
|
399
748
|
|
|
@@ -410,19 +759,23 @@ class BaseConnector(ABC):
|
|
|
410
759
|
validate : bool, optional
|
|
411
760
|
use pastas to validate series, default is None, which will use the
|
|
412
761
|
USE_PASTAS_VALIDATE_SERIES value (default is True).
|
|
762
|
+
force : bool, optional
|
|
763
|
+
force update even if time series is used in a model, by default False
|
|
413
764
|
"""
|
|
414
765
|
if libname not in ["oseries", "stresses"]:
|
|
415
766
|
raise ValueError("Library must be 'oseries' or 'stresses'!")
|
|
416
767
|
if name in getattr(self, f"{libname}_names"):
|
|
417
768
|
self._update_series(
|
|
418
|
-
libname, series, name, metadata=metadata, validate=validate
|
|
769
|
+
libname, series, name, metadata=metadata, validate=validate, force=force
|
|
419
770
|
)
|
|
420
771
|
else:
|
|
421
772
|
self._add_series(
|
|
422
773
|
libname, series, name, metadata=metadata, validate=validate
|
|
423
774
|
)
|
|
424
775
|
|
|
425
|
-
def update_metadata(
|
|
776
|
+
def update_metadata(
|
|
777
|
+
self, libname: TimeSeriesLibs, name: str, metadata: dict
|
|
778
|
+
) -> None:
|
|
426
779
|
"""Update metadata.
|
|
427
780
|
|
|
428
781
|
Note: also retrieves and stores time series as updating only metadata
|
|
@@ -449,7 +802,7 @@ class BaseConnector(ABC):
|
|
|
449
802
|
|
|
450
803
|
def add_oseries(
|
|
451
804
|
self,
|
|
452
|
-
series:
|
|
805
|
+
series: FrameOrSeriesUnion,
|
|
453
806
|
name: str,
|
|
454
807
|
metadata: Optional[dict] = None,
|
|
455
808
|
validate: Optional[bool] = None,
|
|
@@ -472,7 +825,6 @@ class BaseConnector(ABC):
|
|
|
472
825
|
overwrite existing dataset with the same name,
|
|
473
826
|
by default False
|
|
474
827
|
"""
|
|
475
|
-
series, metadata = self._parse_series_input(series, metadata)
|
|
476
828
|
self._add_series(
|
|
477
829
|
"oseries",
|
|
478
830
|
series,
|
|
@@ -484,7 +836,7 @@ class BaseConnector(ABC):
|
|
|
484
836
|
|
|
485
837
|
def add_stress(
|
|
486
838
|
self,
|
|
487
|
-
series:
|
|
839
|
+
series: FrameOrSeriesUnion,
|
|
488
840
|
name: str,
|
|
489
841
|
kind: str,
|
|
490
842
|
metadata: Optional[dict] = None,
|
|
@@ -511,7 +863,6 @@ class BaseConnector(ABC):
|
|
|
511
863
|
overwrite existing dataset with the same name,
|
|
512
864
|
by default False
|
|
513
865
|
"""
|
|
514
|
-
series, metadata = self._parse_series_input(series, metadata)
|
|
515
866
|
if metadata is None:
|
|
516
867
|
metadata = {}
|
|
517
868
|
metadata["kind"] = kind
|
|
@@ -563,58 +914,98 @@ class BaseConnector(ABC):
|
|
|
563
914
|
raise TypeError("Expected pastas.Model or dict!")
|
|
564
915
|
if not isinstance(name, str):
|
|
565
916
|
name = str(name)
|
|
566
|
-
if
|
|
917
|
+
if not self._item_exists("models", name) or overwrite:
|
|
567
918
|
# check if stressmodels supported
|
|
568
|
-
self.
|
|
569
|
-
# check
|
|
570
|
-
self.
|
|
571
|
-
self.
|
|
572
|
-
self.
|
|
919
|
+
self.validator.check_stressmodels_supported(ml)
|
|
920
|
+
# check oseries and stresses names and if they exist in store
|
|
921
|
+
self.validator.check_model_series_names_duplicates(ml)
|
|
922
|
+
self.validator.check_oseries_in_store(ml)
|
|
923
|
+
self.validator.check_stresses_in_store(ml)
|
|
573
924
|
# write model to store
|
|
574
|
-
self._add_item(
|
|
575
|
-
|
|
576
|
-
|
|
925
|
+
self._add_item("models", mldict, name, metadata=metadata)
|
|
926
|
+
self._clear_cache("_modelnames_cache")
|
|
927
|
+
# avoid updating links so parallel operations do not simultaneously
|
|
928
|
+
# access the same object. Indicate that these links need updating and
|
|
929
|
+
# clear existing caches. Handle both Manager proxies and booleans
|
|
930
|
+
if hasattr(self._oseries_links_need_update, "value"):
|
|
931
|
+
self._oseries_links_need_update.value = True
|
|
932
|
+
self._stresses_links_need_update.value = True
|
|
933
|
+
# this won't update main instance in parallel
|
|
934
|
+
self._added_models.append(name)
|
|
935
|
+
else:
|
|
936
|
+
self._oseries_links_need_update = True
|
|
937
|
+
self._stresses_links_need_update = True
|
|
938
|
+
self._added_models.append(name)
|
|
939
|
+
self._clear_cache("oseries_models")
|
|
940
|
+
self._clear_cache("stresses_models")
|
|
577
941
|
else:
|
|
578
942
|
raise ItemInLibraryException(
|
|
579
943
|
f"Model with name '{name}' already in 'models' library! "
|
|
580
944
|
"Use overwrite=True to replace existing model."
|
|
581
945
|
)
|
|
582
|
-
self._clear_cache("_modelnames_cache")
|
|
583
|
-
self._add_oseries_model_links(str(mldict["oseries"]["name"]), name)
|
|
584
946
|
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
947
|
+
def _update_series(
|
|
948
|
+
self,
|
|
949
|
+
libname: str,
|
|
950
|
+
series: FrameOrSeriesUnion,
|
|
951
|
+
name: str,
|
|
952
|
+
metadata: Optional[dict] = None,
|
|
953
|
+
validate: Optional[bool] = None,
|
|
954
|
+
force: bool = False,
|
|
955
|
+
) -> None:
|
|
956
|
+
"""Update time series (internal method).
|
|
591
957
|
|
|
592
958
|
Parameters
|
|
593
959
|
----------
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
960
|
+
libname : str
|
|
961
|
+
name of library
|
|
962
|
+
series : FrameorSeriesUnion
|
|
963
|
+
time series containing update values
|
|
964
|
+
name : str
|
|
965
|
+
name of the time series to update
|
|
966
|
+
metadata : Optional[dict], optional
|
|
967
|
+
optionally provide metadata dictionary which will also update
|
|
968
|
+
the current stored metadata dictionary, by default None
|
|
969
|
+
validate: bool, optional
|
|
970
|
+
use pastas to validate series, default is None, which will use the
|
|
971
|
+
USE_PASTAS_VALIDATE_SERIES value (default is True).
|
|
972
|
+
force : bool, optional
|
|
973
|
+
force update even if time series is used in a model, by default False
|
|
604
974
|
"""
|
|
605
|
-
if
|
|
606
|
-
raise
|
|
607
|
-
|
|
608
|
-
)
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
975
|
+
if libname not in ["oseries", "stresses"]:
|
|
976
|
+
raise ValueError("Library must be 'oseries' or 'stresses'!")
|
|
977
|
+
if not force:
|
|
978
|
+
self.validator.check_series_in_models(libname, name)
|
|
979
|
+
self.validator.validate_input_series(series)
|
|
980
|
+
series = self.validator.set_series_name(series, name)
|
|
981
|
+
stored = self._get_series(libname, name, progressbar=False)
|
|
982
|
+
if self.conn_type == "pas" and not isinstance(series, type(stored)):
|
|
983
|
+
if isinstance(series, pd.DataFrame):
|
|
984
|
+
stored = stored.to_frame()
|
|
985
|
+
# get union of index
|
|
986
|
+
idx_union = stored.index.union(series.index)
|
|
987
|
+
# update series with new values
|
|
988
|
+
update = stored.reindex(idx_union)
|
|
989
|
+
update.update(series)
|
|
990
|
+
# metadata
|
|
991
|
+
update_meta = self._get_metadata(libname, name)
|
|
992
|
+
if metadata is not None:
|
|
993
|
+
update_meta.update(metadata)
|
|
994
|
+
self._add_series(
|
|
995
|
+
libname,
|
|
996
|
+
update,
|
|
997
|
+
name,
|
|
998
|
+
metadata=update_meta,
|
|
999
|
+
validate=validate,
|
|
1000
|
+
overwrite=True,
|
|
1001
|
+
)
|
|
612
1002
|
|
|
613
1003
|
def update_oseries(
|
|
614
1004
|
self,
|
|
615
|
-
series:
|
|
1005
|
+
series: FrameOrSeriesUnion,
|
|
616
1006
|
name: str,
|
|
617
1007
|
metadata: Optional[dict] = None,
|
|
1008
|
+
force: bool = False,
|
|
618
1009
|
) -> None:
|
|
619
1010
|
"""Update oseries values.
|
|
620
1011
|
|
|
@@ -627,61 +1018,67 @@ class BaseConnector(ABC):
|
|
|
627
1018
|
metadata : Optional[dict], optional
|
|
628
1019
|
optionally provide metadata, which will update
|
|
629
1020
|
the stored metadata dictionary, by default None
|
|
1021
|
+
force : bool, optional
|
|
1022
|
+
force update even if time series is used in a model, by default False
|
|
630
1023
|
"""
|
|
631
|
-
series, metadata
|
|
632
|
-
self._update_series("oseries", series, name, metadata=metadata)
|
|
1024
|
+
self._update_series("oseries", series, name, metadata=metadata, force=force)
|
|
633
1025
|
|
|
634
|
-
def
|
|
1026
|
+
def update_stress(
|
|
635
1027
|
self,
|
|
636
|
-
series:
|
|
1028
|
+
series: FrameOrSeriesUnion,
|
|
637
1029
|
name: str,
|
|
638
1030
|
metadata: Optional[dict] = None,
|
|
1031
|
+
force: bool = False,
|
|
639
1032
|
) -> None:
|
|
640
|
-
"""Update
|
|
1033
|
+
"""Update stresses values.
|
|
1034
|
+
|
|
1035
|
+
Note: the 'kind' attribute of a stress cannot be updated! To update
|
|
1036
|
+
the 'kind' delete and add the stress again.
|
|
641
1037
|
|
|
642
1038
|
Parameters
|
|
643
1039
|
----------
|
|
644
1040
|
series : FrameorSeriesUnion
|
|
645
|
-
time series to update
|
|
1041
|
+
time series to update stored stress with
|
|
646
1042
|
name : str
|
|
647
|
-
name of the
|
|
1043
|
+
name of the stress to update
|
|
648
1044
|
metadata : Optional[dict], optional
|
|
649
1045
|
optionally provide metadata, which will update
|
|
650
|
-
the stored metadata dictionary
|
|
1046
|
+
the stored metadata dictionary, by default None
|
|
1047
|
+
force : bool, optional
|
|
1048
|
+
force update even if time series is used in a model, by default False
|
|
651
1049
|
"""
|
|
652
|
-
series, metadata
|
|
653
|
-
self._upsert_series("oseries", series, name, metadata=metadata)
|
|
1050
|
+
self._update_series("stresses", series, name, metadata=metadata, force=force)
|
|
654
1051
|
|
|
655
|
-
def
|
|
1052
|
+
def upsert_oseries(
|
|
656
1053
|
self,
|
|
657
|
-
series:
|
|
1054
|
+
series: FrameOrSeriesUnion,
|
|
658
1055
|
name: str,
|
|
659
1056
|
metadata: Optional[dict] = None,
|
|
1057
|
+
force: bool = False,
|
|
660
1058
|
) -> None:
|
|
661
|
-
"""Update
|
|
662
|
-
|
|
663
|
-
Note: the 'kind' attribute of a stress cannot be updated! To update
|
|
664
|
-
the 'kind' delete and add the stress again.
|
|
1059
|
+
"""Update or insert oseries values depending on whether it exists.
|
|
665
1060
|
|
|
666
1061
|
Parameters
|
|
667
1062
|
----------
|
|
668
1063
|
series : FrameorSeriesUnion
|
|
669
|
-
time series to update
|
|
1064
|
+
time series to update/insert
|
|
670
1065
|
name : str
|
|
671
|
-
name of the
|
|
1066
|
+
name of the oseries
|
|
672
1067
|
metadata : Optional[dict], optional
|
|
673
1068
|
optionally provide metadata, which will update
|
|
674
|
-
the stored metadata dictionary, by default None
|
|
1069
|
+
the stored metadata dictionary if it exists, by default None
|
|
1070
|
+
force : bool, optional
|
|
1071
|
+
force update even if time series is used in a model, by default False
|
|
675
1072
|
"""
|
|
676
|
-
series, metadata
|
|
677
|
-
self._update_series("stresses", series, name, metadata=metadata)
|
|
1073
|
+
self._upsert_series("oseries", series, name, metadata=metadata, force=force)
|
|
678
1074
|
|
|
679
1075
|
def upsert_stress(
|
|
680
1076
|
self,
|
|
681
|
-
series:
|
|
1077
|
+
series: FrameOrSeriesUnion,
|
|
682
1078
|
name: str,
|
|
683
1079
|
kind: str,
|
|
684
1080
|
metadata: Optional[dict] = None,
|
|
1081
|
+
force: bool = False,
|
|
685
1082
|
) -> None:
|
|
686
1083
|
"""Update or insert stress values depending on whether it exists.
|
|
687
1084
|
|
|
@@ -694,12 +1091,16 @@ class BaseConnector(ABC):
|
|
|
694
1091
|
metadata : Optional[dict], optional
|
|
695
1092
|
optionally provide metadata, which will update
|
|
696
1093
|
the stored metadata dictionary if it exists, by default None
|
|
1094
|
+
kind : str
|
|
1095
|
+
category to identify type of stress, this label is added to the
|
|
1096
|
+
metadata dictionary.
|
|
1097
|
+
force : bool, optional
|
|
1098
|
+
force update even if time series is used in a model, by default False
|
|
697
1099
|
"""
|
|
698
|
-
series, metadata = self._parse_series_input(series, metadata)
|
|
699
1100
|
if metadata is None:
|
|
700
1101
|
metadata = {}
|
|
701
1102
|
metadata["kind"] = kind
|
|
702
|
-
self._upsert_series("stresses", series, name, metadata=metadata)
|
|
1103
|
+
self._upsert_series("stresses", series, name, metadata=metadata, force=force)
|
|
703
1104
|
|
|
704
1105
|
def del_models(self, names: Union[list, str], verbose: bool = True) -> None:
|
|
705
1106
|
"""Delete model(s) from the database.
|
|
@@ -716,10 +1117,15 @@ class BaseConnector(ABC):
|
|
|
716
1117
|
mldict = self.get_models(n, return_dict=True)
|
|
717
1118
|
oname = mldict["oseries"]["name"]
|
|
718
1119
|
self._del_item("models", n)
|
|
719
|
-
|
|
1120
|
+
# delete reference to added model if present
|
|
1121
|
+
if n in self._added_models:
|
|
1122
|
+
self._added_models.remove(n)
|
|
1123
|
+
else:
|
|
1124
|
+
self._del_oseries_model_link(oname, n)
|
|
1125
|
+
self._del_stress_model_link(self._get_model_stress_names(mldict), n)
|
|
720
1126
|
self._clear_cache("_modelnames_cache")
|
|
721
1127
|
if verbose:
|
|
722
|
-
|
|
1128
|
+
logger.info("Deleted %d model(s) from database.", len(names))
|
|
723
1129
|
|
|
724
1130
|
def del_model(self, names: Union[list, str], verbose: bool = True) -> None:
|
|
725
1131
|
"""Delete model(s) from the database.
|
|
@@ -736,7 +1142,11 @@ class BaseConnector(ABC):
|
|
|
736
1142
|
self.del_models(names=names, verbose=verbose)
|
|
737
1143
|
|
|
738
1144
|
def del_oseries(
|
|
739
|
-
self,
|
|
1145
|
+
self,
|
|
1146
|
+
names: Union[list, str],
|
|
1147
|
+
remove_models: bool = False,
|
|
1148
|
+
force: bool = False,
|
|
1149
|
+
verbose: bool = True,
|
|
740
1150
|
):
|
|
741
1151
|
"""Delete oseries from the database.
|
|
742
1152
|
|
|
@@ -746,38 +1156,60 @@ class BaseConnector(ABC):
|
|
|
746
1156
|
name(s) of the oseries to delete
|
|
747
1157
|
remove_models : bool, optional
|
|
748
1158
|
also delete models for deleted oseries, default is False
|
|
1159
|
+
force : bool, optional
|
|
1160
|
+
force deletion of oseries that are used in models, by default False
|
|
749
1161
|
verbose : bool, optional
|
|
750
1162
|
print information about deleted oseries, by default True
|
|
751
1163
|
"""
|
|
752
1164
|
names = self._parse_names(names, libname="oseries")
|
|
753
1165
|
for n in names:
|
|
754
|
-
self._del_item("oseries", n)
|
|
1166
|
+
self._del_item("oseries", n, force=force)
|
|
755
1167
|
self._clear_cache("oseries")
|
|
756
1168
|
if verbose:
|
|
757
|
-
|
|
1169
|
+
logger.info("Deleted %d oseries from database.", len(names))
|
|
758
1170
|
# remove associated models from database
|
|
759
1171
|
if remove_models:
|
|
760
1172
|
modelnames = list(
|
|
761
1173
|
chain.from_iterable([self.oseries_models.get(n, []) for n in names])
|
|
762
1174
|
)
|
|
763
1175
|
self.del_models(modelnames, verbose=verbose)
|
|
1176
|
+
if verbose:
|
|
1177
|
+
logger.info("Deleted %d model(s) from database.", len(modelnames))
|
|
764
1178
|
|
|
765
|
-
def del_stress(
|
|
1179
|
+
def del_stress(
|
|
1180
|
+
self,
|
|
1181
|
+
names: Union[list, str],
|
|
1182
|
+
remove_models: bool = False,
|
|
1183
|
+
force: bool = False,
|
|
1184
|
+
verbose: bool = True,
|
|
1185
|
+
):
|
|
766
1186
|
"""Delete stress from the database.
|
|
767
1187
|
|
|
768
1188
|
Parameters
|
|
769
1189
|
----------
|
|
770
1190
|
names : str or list of str
|
|
771
1191
|
name(s) of the stress to delete
|
|
1192
|
+
remove_models : bool, optional
|
|
1193
|
+
also delete models for deleted stresses, default is False
|
|
1194
|
+
force : bool, optional
|
|
1195
|
+
force deletion of stresses that are used in models, by default False
|
|
772
1196
|
verbose : bool, optional
|
|
773
1197
|
print information about deleted stresses, by default True
|
|
774
1198
|
"""
|
|
775
1199
|
names = self._parse_names(names, libname="stresses")
|
|
776
1200
|
for n in names:
|
|
777
|
-
self._del_item("stresses", n)
|
|
1201
|
+
self._del_item("stresses", n, force=force)
|
|
778
1202
|
self._clear_cache("stresses")
|
|
779
1203
|
if verbose:
|
|
780
|
-
|
|
1204
|
+
logger.info("Deleted %d stress(es) from database.", len(names))
|
|
1205
|
+
# remove associated models from database
|
|
1206
|
+
if remove_models:
|
|
1207
|
+
modelnames = list(
|
|
1208
|
+
chain.from_iterable([self.stresses_models.get(n, []) for n in names])
|
|
1209
|
+
)
|
|
1210
|
+
self.del_models(modelnames, verbose=verbose)
|
|
1211
|
+
if verbose:
|
|
1212
|
+
logger.info("Deleted %d model(s) from database.", len(modelnames))
|
|
781
1213
|
|
|
782
1214
|
def _get_series(
|
|
783
1215
|
self,
|
|
@@ -785,7 +1217,7 @@ class BaseConnector(ABC):
|
|
|
785
1217
|
names: Union[list, str],
|
|
786
1218
|
progressbar: bool = True,
|
|
787
1219
|
squeeze: bool = True,
|
|
788
|
-
) ->
|
|
1220
|
+
) -> FrameOrSeriesUnion:
|
|
789
1221
|
"""Get time series (internal method).
|
|
790
1222
|
|
|
791
1223
|
Parameters
|
|
@@ -809,6 +1241,7 @@ class BaseConnector(ABC):
|
|
|
809
1241
|
ts = {}
|
|
810
1242
|
names = self._parse_names(names, libname=libname)
|
|
811
1243
|
desc = f"Get {libname}"
|
|
1244
|
+
n = None
|
|
812
1245
|
for n in tqdm(names, desc=desc) if progressbar else names:
|
|
813
1246
|
ts[n] = self._get_item(libname, n)
|
|
814
1247
|
# return frame if len == 1
|
|
@@ -865,7 +1298,7 @@ class BaseConnector(ABC):
|
|
|
865
1298
|
return_metadata: bool = False,
|
|
866
1299
|
progressbar: bool = False,
|
|
867
1300
|
squeeze: bool = True,
|
|
868
|
-
) -> Union[Union[
|
|
1301
|
+
) -> Union[Union[FrameOrSeriesUnion, Dict], Optional[Union[Dict, List]]]:
|
|
869
1302
|
"""Get oseries from database.
|
|
870
1303
|
|
|
871
1304
|
Parameters
|
|
@@ -910,7 +1343,7 @@ class BaseConnector(ABC):
|
|
|
910
1343
|
return_metadata: bool = False,
|
|
911
1344
|
progressbar: bool = False,
|
|
912
1345
|
squeeze: bool = True,
|
|
913
|
-
) -> Union[Union[
|
|
1346
|
+
) -> Union[Union[FrameOrSeriesUnion, Dict], Optional[Union[Dict, List]]]:
|
|
914
1347
|
"""Get stresses from database.
|
|
915
1348
|
|
|
916
1349
|
Parameters
|
|
@@ -955,7 +1388,7 @@ class BaseConnector(ABC):
|
|
|
955
1388
|
return_metadata: bool = False,
|
|
956
1389
|
progressbar: bool = False,
|
|
957
1390
|
squeeze: bool = True,
|
|
958
|
-
) -> Union[Union[
|
|
1391
|
+
) -> Union[Union[FrameOrSeriesUnion, Dict], Optional[Union[Dict, List]]]:
|
|
959
1392
|
"""Get stresses from database.
|
|
960
1393
|
|
|
961
1394
|
Alias for `get_stresses()`
|
|
@@ -1078,7 +1511,7 @@ class BaseConnector(ABC):
|
|
|
1078
1511
|
)
|
|
1079
1512
|
|
|
1080
1513
|
def empty_library(
|
|
1081
|
-
self, libname:
|
|
1514
|
+
self, libname: AllLibs, prompt: bool = True, progressbar: bool = True
|
|
1082
1515
|
):
|
|
1083
1516
|
"""Empty library of all its contents.
|
|
1084
1517
|
|
|
@@ -1101,8 +1534,8 @@ class BaseConnector(ABC):
|
|
|
1101
1534
|
return
|
|
1102
1535
|
|
|
1103
1536
|
if libname == "models":
|
|
1104
|
-
# also delete linked modelnames linked to oseries
|
|
1105
|
-
libs = ["models", "oseries_models"]
|
|
1537
|
+
# also delete linked modelnames linked to oseries and stresses
|
|
1538
|
+
libs = ["models", "oseries_models", "stresses_models"]
|
|
1106
1539
|
else:
|
|
1107
1540
|
libs = [libname]
|
|
1108
1541
|
|
|
@@ -1114,11 +1547,13 @@ class BaseConnector(ABC):
|
|
|
1114
1547
|
if progressbar
|
|
1115
1548
|
else names
|
|
1116
1549
|
):
|
|
1117
|
-
self._del_item(libname, name)
|
|
1550
|
+
self._del_item(libname, name, force=True)
|
|
1118
1551
|
self._clear_cache(libname)
|
|
1119
|
-
|
|
1552
|
+
logger.info(
|
|
1553
|
+
"Emptied library %s in %s: %s", libname, self.name, self.__class__
|
|
1554
|
+
)
|
|
1120
1555
|
|
|
1121
|
-
def _iter_series(self, libname:
|
|
1556
|
+
def _iter_series(self, libname: TimeSeriesLibs, names: Optional[List[str]] = None):
|
|
1122
1557
|
"""Iterate over time series in library (internal method).
|
|
1123
1558
|
|
|
1124
1559
|
Parameters
|
|
@@ -1196,33 +1631,76 @@ class BaseConnector(ABC):
|
|
|
1196
1631
|
for mlnam in modelnames:
|
|
1197
1632
|
yield self.get_models(mlnam, return_dict=return_dict, progressbar=False)
|
|
1198
1633
|
|
|
1199
|
-
def _add_oseries_model_links(
|
|
1634
|
+
def _add_oseries_model_links(
|
|
1635
|
+
self,
|
|
1636
|
+
oseries_name: str,
|
|
1637
|
+
model_names: Union[str, List[str]],
|
|
1638
|
+
_clear_cache: bool = True,
|
|
1639
|
+
):
|
|
1200
1640
|
"""Add model name to stored list of models per oseries.
|
|
1201
1641
|
|
|
1202
1642
|
Parameters
|
|
1203
1643
|
----------
|
|
1204
|
-
|
|
1644
|
+
oseries_name : str
|
|
1205
1645
|
name of oseries
|
|
1206
|
-
|
|
1646
|
+
model_names : Union[str, List[str]]
|
|
1207
1647
|
model name or list of model names for an oseries with name
|
|
1208
|
-
|
|
1648
|
+
oseries_name.
|
|
1649
|
+
_clear_cache : bool, optional
|
|
1650
|
+
whether to clear the cache after adding, by default True.
|
|
1651
|
+
Set to False during bulk operations to improve performance.
|
|
1209
1652
|
"""
|
|
1210
1653
|
# get stored list of model names
|
|
1211
|
-
if
|
|
1212
|
-
modellist = self._get_item("oseries_models",
|
|
1654
|
+
if self._item_exists("oseries_models", oseries_name):
|
|
1655
|
+
modellist = self._get_item("oseries_models", oseries_name)
|
|
1213
1656
|
else:
|
|
1214
1657
|
# else empty list
|
|
1215
1658
|
modellist = []
|
|
1216
1659
|
# if one model name, make list for loop
|
|
1217
|
-
if isinstance(
|
|
1218
|
-
|
|
1660
|
+
if isinstance(model_names, str):
|
|
1661
|
+
model_names = [model_names]
|
|
1219
1662
|
# loop over model names
|
|
1220
|
-
for iml in
|
|
1663
|
+
for iml in model_names:
|
|
1221
1664
|
# if not present, add to list
|
|
1222
1665
|
if iml not in modellist:
|
|
1223
1666
|
modellist.append(iml)
|
|
1224
|
-
self._add_item("oseries_models", modellist,
|
|
1225
|
-
|
|
1667
|
+
self._add_item("oseries_models", modellist, oseries_name)
|
|
1668
|
+
if _clear_cache:
|
|
1669
|
+
self._clear_cache("oseries_models")
|
|
1670
|
+
|
|
1671
|
+
def _add_stresses_model_links(
|
|
1672
|
+
self, stress_names, model_names, _clear_cache: bool = True
|
|
1673
|
+
):
|
|
1674
|
+
"""Add model name to stored list of models per stress.
|
|
1675
|
+
|
|
1676
|
+
Parameters
|
|
1677
|
+
----------
|
|
1678
|
+
stress_names : list of str
|
|
1679
|
+
names of stresses
|
|
1680
|
+
model_names : Union[str, List[str]]
|
|
1681
|
+
model name or list of model names for a stress with name
|
|
1682
|
+
_clear_cache : bool, optional
|
|
1683
|
+
whether to clear the cache after adding, by default True.
|
|
1684
|
+
Set to False during bulk operations to improve performance.
|
|
1685
|
+
"""
|
|
1686
|
+
# if one model name, make list for loop
|
|
1687
|
+
if isinstance(model_names, str):
|
|
1688
|
+
model_names = [model_names]
|
|
1689
|
+
for snam in stress_names:
|
|
1690
|
+
# get stored list of model names
|
|
1691
|
+
if self._item_exists("stresses_models", str(snam)):
|
|
1692
|
+
modellist = self._get_item("stresses_models", snam)
|
|
1693
|
+
else:
|
|
1694
|
+
# else empty list
|
|
1695
|
+
modellist = []
|
|
1696
|
+
# loop over model names
|
|
1697
|
+
for iml in model_names:
|
|
1698
|
+
# if not present, add to list
|
|
1699
|
+
if iml not in modellist:
|
|
1700
|
+
modellist.append(iml)
|
|
1701
|
+
self._add_item("stresses_models", modellist, snam)
|
|
1702
|
+
if _clear_cache:
|
|
1703
|
+
self._clear_cache("stresses_models")
|
|
1226
1704
|
|
|
1227
1705
|
def _del_oseries_model_link(self, onam, mlnam):
|
|
1228
1706
|
"""Delete model name from stored list of models per oseries.
|
|
@@ -1239,128 +1717,255 @@ class BaseConnector(ABC):
|
|
|
1239
1717
|
if len(modellist) == 0:
|
|
1240
1718
|
self._del_item("oseries_models", onam)
|
|
1241
1719
|
else:
|
|
1242
|
-
self._add_item("oseries_models", modellist, onam
|
|
1720
|
+
self._add_item("oseries_models", modellist, onam)
|
|
1243
1721
|
self._clear_cache("oseries_models")
|
|
1244
1722
|
|
|
1245
|
-
def
|
|
1246
|
-
"""
|
|
1723
|
+
def _del_stress_model_link(self, stress_names, model_name):
|
|
1724
|
+
"""Delete model name from stored list of models per stress.
|
|
1725
|
+
|
|
1726
|
+
Parameters
|
|
1727
|
+
----------
|
|
1728
|
+
stress_names : list of str
|
|
1729
|
+
List of stress names for which to remove the model link.
|
|
1730
|
+
model_name : str
|
|
1731
|
+
Name of the model to remove from the stress links.
|
|
1732
|
+
"""
|
|
1733
|
+
for stress_name in stress_names:
|
|
1734
|
+
modellist = self._get_item("stresses_models", stress_name)
|
|
1735
|
+
modellist.remove(model_name)
|
|
1736
|
+
if len(modellist) == 0:
|
|
1737
|
+
self._del_item("stresses_models", stress_name)
|
|
1738
|
+
else:
|
|
1739
|
+
self._add_item("stresses_models", modellist, stress_name)
|
|
1740
|
+
self._clear_cache("stresses_models")
|
|
1247
1741
|
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1742
|
+
def _update_time_series_model_links(
|
|
1743
|
+
self,
|
|
1744
|
+
libraries: list[str] = None,
|
|
1745
|
+
modelnames: Optional[List[str]] = None,
|
|
1746
|
+
recompute: bool = True,
|
|
1747
|
+
progressbar: bool = False,
|
|
1748
|
+
):
|
|
1749
|
+
"""Add all model names to reverse lookup time series dictionaries.
|
|
1750
|
+
|
|
1751
|
+
Used for old PastaStore versions, where relationship between time series and
|
|
1752
|
+
models was not stored. If there are any models in the database and if the
|
|
1753
|
+
oseries_models or stresses_models libraries are empty, loop through all models
|
|
1754
|
+
to determine which time series are used in each model.
|
|
1755
|
+
|
|
1756
|
+
Parameters
|
|
1757
|
+
----------
|
|
1758
|
+
libraries : list of str, optional
|
|
1759
|
+
list of time series libraries to update model links for,
|
|
1760
|
+
by default None which will update both 'oseries' and 'stresses'
|
|
1761
|
+
modelnames : Optional[List[str]], optional
|
|
1762
|
+
list of model names to update links for, by default None
|
|
1763
|
+
recompute : bool, optional
|
|
1764
|
+
Indicate operation is an update/recompute of existing links,
|
|
1765
|
+
by default False
|
|
1766
|
+
progressbar : bool, optional
|
|
1767
|
+
show progressbar, by default True
|
|
1252
1768
|
"""
|
|
1253
|
-
# get oseries_models
|
|
1254
|
-
# add all model links.
|
|
1769
|
+
# get oseries_models and stresses_models libraries,
|
|
1770
|
+
# if empty add all time series -> model links.
|
|
1771
|
+
if libraries is None:
|
|
1772
|
+
libraries = ["oseries", "stresses"]
|
|
1255
1773
|
if self.n_models > 0:
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1774
|
+
logger.debug("Updating time series -> models links in store.")
|
|
1775
|
+
links = self._get_time_series_model_links(
|
|
1776
|
+
modelnames=modelnames, recompute=recompute, progressbar=progressbar
|
|
1777
|
+
)
|
|
1778
|
+
for k in libraries:
|
|
1779
|
+
if recompute:
|
|
1780
|
+
desc = f"Updating {k}-models links"
|
|
1781
|
+
else:
|
|
1782
|
+
desc = f"Storing {k}-models links"
|
|
1783
|
+
for name, model_links in tqdm(
|
|
1784
|
+
links[k].items(),
|
|
1785
|
+
desc=desc,
|
|
1786
|
+
total=len(links[k]),
|
|
1787
|
+
disable=not progressbar,
|
|
1262
1788
|
):
|
|
1263
|
-
|
|
1789
|
+
if k == "oseries":
|
|
1790
|
+
self._add_oseries_model_links(
|
|
1791
|
+
name, model_links, _clear_cache=False
|
|
1792
|
+
)
|
|
1793
|
+
elif k == "stresses":
|
|
1794
|
+
self._add_stresses_model_links(
|
|
1795
|
+
[name], model_links, _clear_cache=False
|
|
1796
|
+
)
|
|
1797
|
+
# Clear caches after all updates are complete
|
|
1798
|
+
if "oseries" in libraries:
|
|
1799
|
+
self._clear_cache("oseries_models")
|
|
1800
|
+
if "stresses" in libraries:
|
|
1801
|
+
self._clear_cache("stresses_models")
|
|
1802
|
+
|
|
1803
|
+
def _trigger_links_update_if_needed(
|
|
1804
|
+
self, modelnames: Optional[list[str]] = None, progressbar: bool = False
|
|
1805
|
+
):
|
|
1806
|
+
# Check if time series-> model links need updating
|
|
1807
|
+
# Handle both Manager proxies (main) and booleans (worker after pickle)
|
|
1808
|
+
needs_update = (
|
|
1809
|
+
self._oseries_links_need_update.value
|
|
1810
|
+
if hasattr(self._oseries_links_need_update, "value")
|
|
1811
|
+
else self._oseries_links_need_update
|
|
1812
|
+
)
|
|
1813
|
+
if needs_update:
|
|
1814
|
+
self._clear_cache("_modelnames_cache")
|
|
1815
|
+
# Set BOTH flags to False BEFORE updating to prevent recursion
|
|
1816
|
+
# (update always recomputes both oseries and stresses links)
|
|
1817
|
+
if hasattr(self._oseries_links_need_update, "value"):
|
|
1818
|
+
self._oseries_links_need_update.value = False
|
|
1819
|
+
self._stresses_links_need_update.value = False
|
|
1820
|
+
else:
|
|
1821
|
+
self._oseries_links_need_update = False
|
|
1822
|
+
self._stresses_links_need_update = False
|
|
1823
|
+
modelnames = self._added_models
|
|
1824
|
+
if modelnames is None or len(modelnames) > 0:
|
|
1825
|
+
self._update_time_series_model_links(
|
|
1826
|
+
modelnames=modelnames, recompute=True, progressbar=progressbar
|
|
1827
|
+
)
|
|
1828
|
+
self._added_models = [] # reset list of added models
|
|
1829
|
+
else:
|
|
1830
|
+
self._added_models = [] # reset list of added models
|
|
1264
1831
|
|
|
1265
|
-
def
|
|
1266
|
-
|
|
1832
|
+
def _get_time_series_model_links(
|
|
1833
|
+
self,
|
|
1834
|
+
modelnames: Optional[list[str]] = None,
|
|
1835
|
+
recompute: bool = False,
|
|
1836
|
+
progressbar: bool = True,
|
|
1837
|
+
) -> dict:
|
|
1838
|
+
"""Get model names per oseries and stresses time series in a dictionary.
|
|
1267
1839
|
|
|
1268
1840
|
Returns
|
|
1269
1841
|
-------
|
|
1270
1842
|
links : dict
|
|
1271
|
-
dictionary with oseries
|
|
1272
|
-
|
|
1843
|
+
dictionary with 'oseries' and 'stresses' as keys containing
|
|
1844
|
+
dictionaries with time series names as keys and lists of model
|
|
1845
|
+
names as values.
|
|
1273
1846
|
"""
|
|
1274
|
-
|
|
1847
|
+
oseries_links = {}
|
|
1848
|
+
stresses_links = {}
|
|
1275
1849
|
for mldict in tqdm(
|
|
1276
|
-
self.iter_models(return_dict=True),
|
|
1850
|
+
self.iter_models(modelnames=modelnames, return_dict=True),
|
|
1277
1851
|
total=self.n_models,
|
|
1278
|
-
desc="Get models per
|
|
1852
|
+
desc=f"{'Recompute' if recompute else 'Get'} models per time series",
|
|
1853
|
+
disable=not progressbar,
|
|
1279
1854
|
):
|
|
1280
|
-
onam = mldict["oseries"]["name"]
|
|
1281
1855
|
mlnam = mldict["name"]
|
|
1282
|
-
|
|
1283
|
-
|
|
1856
|
+
# oseries
|
|
1857
|
+
onam = mldict["oseries"]["name"]
|
|
1858
|
+
if onam in oseries_links:
|
|
1859
|
+
oseries_links[onam].append(mlnam)
|
|
1284
1860
|
else:
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
@property # type: ignore
|
|
1296
|
-
@functools.lru_cache()
|
|
1297
|
-
def oseries(self):
|
|
1298
|
-
"""Dataframe with overview of oseries."""
|
|
1299
|
-
return self.get_metadata("oseries", self.oseries_names)
|
|
1300
|
-
|
|
1301
|
-
@property # type: ignore
|
|
1302
|
-
@functools.lru_cache()
|
|
1303
|
-
def stresses(self):
|
|
1304
|
-
"""Dataframe with overview of stresses."""
|
|
1305
|
-
return self.get_metadata("stresses", self.stresses_names)
|
|
1306
|
-
|
|
1307
|
-
@property # type: ignore
|
|
1308
|
-
@functools.lru_cache()
|
|
1309
|
-
def _modelnames_cache(self):
|
|
1310
|
-
"""List of model names."""
|
|
1311
|
-
return self.model_names
|
|
1312
|
-
|
|
1313
|
-
@property
|
|
1314
|
-
def n_oseries(self):
|
|
1315
|
-
"""
|
|
1316
|
-
Returns the number of oseries.
|
|
1861
|
+
oseries_links[onam] = [mlnam]
|
|
1862
|
+
# stresses
|
|
1863
|
+
stress_names = self._get_model_stress_names(mldict)
|
|
1864
|
+
for snam in stress_names:
|
|
1865
|
+
if snam in stresses_links:
|
|
1866
|
+
stresses_links[snam].append(mlnam)
|
|
1867
|
+
else:
|
|
1868
|
+
stresses_links[snam] = [mlnam]
|
|
1869
|
+
return {"oseries": oseries_links, "stresses": stresses_links}
|
|
1317
1870
|
|
|
1318
|
-
|
|
1319
|
-
|
|
1320
|
-
int
|
|
1321
|
-
The number of oseries names.
|
|
1322
|
-
"""
|
|
1323
|
-
return len(self.oseries_names)
|
|
1871
|
+
def _get_model_stress_names(self, ml: ps.Model | dict) -> List[str]:
|
|
1872
|
+
"""Get list of stress names used in model.
|
|
1324
1873
|
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
|
|
1874
|
+
Parameters
|
|
1875
|
+
----------
|
|
1876
|
+
ml : pastas.Model or dict
|
|
1877
|
+
model to get stress names from
|
|
1329
1878
|
|
|
1330
1879
|
Returns
|
|
1331
1880
|
-------
|
|
1332
|
-
|
|
1333
|
-
|
|
1881
|
+
list of str
|
|
1882
|
+
list of stress names used in model
|
|
1334
1883
|
"""
|
|
1335
|
-
|
|
1884
|
+
stresses = []
|
|
1885
|
+
if isinstance(ml, dict):
|
|
1886
|
+
for sm in ml["stressmodels"].values():
|
|
1887
|
+
class_key = "class"
|
|
1888
|
+
if sm[class_key] == "RechargeModel":
|
|
1889
|
+
stresses.append(sm["prec"]["name"])
|
|
1890
|
+
stresses.append(sm["evap"]["name"])
|
|
1891
|
+
if sm["temp"] is not None:
|
|
1892
|
+
stresses.append(sm["temp"]["name"])
|
|
1893
|
+
elif "stress" in sm:
|
|
1894
|
+
smstress = sm["stress"]
|
|
1895
|
+
if isinstance(smstress, dict):
|
|
1896
|
+
smstress = [smstress]
|
|
1897
|
+
for s in smstress:
|
|
1898
|
+
stresses.append(s["name"])
|
|
1899
|
+
else:
|
|
1900
|
+
for sm in ml.stressmodels.values():
|
|
1901
|
+
# Check class name using type instead of protected _name attribute
|
|
1902
|
+
if type(sm).__name__ == "RechargeModel":
|
|
1903
|
+
stresses.append(sm.prec.name)
|
|
1904
|
+
stresses.append(sm.evap.name)
|
|
1905
|
+
if sm.temp is not None:
|
|
1906
|
+
stresses.append(sm.temp.name)
|
|
1907
|
+
elif hasattr(sm, "stress"):
|
|
1908
|
+
smstress = sm.stress
|
|
1909
|
+
if not isinstance(smstress, list):
|
|
1910
|
+
smstress = [smstress]
|
|
1911
|
+
for s in smstress:
|
|
1912
|
+
stresses.append(s.name)
|
|
1913
|
+
return list(set(stresses))
|
|
1914
|
+
|
|
1915
|
+
def get_model_time_series_names(
|
|
1916
|
+
self,
|
|
1917
|
+
modelnames: Optional[Union[list, str]] = None,
|
|
1918
|
+
dropna: bool = True,
|
|
1919
|
+
progressbar: bool = True,
|
|
1920
|
+
) -> FrameOrSeriesUnion:
|
|
1921
|
+
"""Get time series names contained in model.
|
|
1336
1922
|
|
|
1337
|
-
|
|
1338
|
-
|
|
1339
|
-
|
|
1340
|
-
|
|
1923
|
+
Parameters
|
|
1924
|
+
----------
|
|
1925
|
+
modelnames : Optional[Union[list, str]], optional
|
|
1926
|
+
list or name of models to get time series names for,
|
|
1927
|
+
by default None which will use all modelnames
|
|
1928
|
+
dropna : bool, optional
|
|
1929
|
+
drop stresses from table if stress is not included in any
|
|
1930
|
+
model, by default True
|
|
1931
|
+
progressbar : bool, optional
|
|
1932
|
+
show progressbar, by default True
|
|
1341
1933
|
|
|
1342
1934
|
Returns
|
|
1343
1935
|
-------
|
|
1344
|
-
|
|
1345
|
-
|
|
1936
|
+
structure : pandas.DataFrame
|
|
1937
|
+
returns DataFrame with oseries name per model, and a flag
|
|
1938
|
+
indicating whether a stress is contained within a time series
|
|
1939
|
+
model.
|
|
1346
1940
|
"""
|
|
1347
|
-
|
|
1941
|
+
model_names = self._parse_names(modelnames, libname="models")
|
|
1942
|
+
structure = pd.DataFrame(
|
|
1943
|
+
index=model_names, columns=["oseries"] + self.stresses_names
|
|
1944
|
+
)
|
|
1945
|
+
structure.index.name = "model"
|
|
1348
1946
|
|
|
1349
|
-
|
|
1350
|
-
|
|
1351
|
-
|
|
1352
|
-
|
|
1947
|
+
for mlnam in (
|
|
1948
|
+
tqdm(model_names, desc="Get model time series names")
|
|
1949
|
+
if progressbar
|
|
1950
|
+
else model_names
|
|
1951
|
+
):
|
|
1952
|
+
mldict = self.get_models(mlnam, return_dict=True)
|
|
1953
|
+
stresses_names = self._get_model_stress_names(mldict)
|
|
1954
|
+
# oseries
|
|
1955
|
+
structure.loc[mlnam, "oseries"] = mldict["oseries"]["name"]
|
|
1956
|
+
# stresses
|
|
1957
|
+
structure.loc[mlnam, stresses_names] = 1
|
|
1958
|
+
if dropna:
|
|
1959
|
+
return structure.dropna(how="all", axis=1)
|
|
1960
|
+
else:
|
|
1961
|
+
return structure
|
|
1353
1962
|
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1360
|
-
d = {}
|
|
1361
|
-
for onam in self.oseries_with_models:
|
|
1362
|
-
d[onam] = self._get_item("oseries_models", onam)
|
|
1363
|
-
return d
|
|
1963
|
+
@staticmethod
|
|
1964
|
+
def _clear_cache(libname: AllLibs) -> None:
|
|
1965
|
+
"""Clear cached property."""
|
|
1966
|
+
if libname == "models":
|
|
1967
|
+
libname = "_modelnames_cache"
|
|
1968
|
+
getattr(BaseConnector, libname).fget.cache_clear()
|
|
1364
1969
|
|
|
1365
1970
|
|
|
1366
1971
|
class ModelAccessor:
|
|
@@ -1412,7 +2017,7 @@ class ModelAccessor:
|
|
|
1412
2017
|
"""Representation contains the number of models and the list of model names."""
|
|
1413
2018
|
return (
|
|
1414
2019
|
f"<{self.__class__.__name__}> {len(self)} model(s): \n"
|
|
1415
|
-
+ self.conn.
|
|
2020
|
+
+ self.conn.model_names.__repr__()
|
|
1416
2021
|
)
|
|
1417
2022
|
|
|
1418
2023
|
def __getitem__(self, name: str):
|
|
@@ -1463,9 +2068,7 @@ class ModelAccessor:
|
|
|
1463
2068
|
pastas.Model
|
|
1464
2069
|
A random model object from the connection.
|
|
1465
2070
|
"""
|
|
1466
|
-
|
|
1467
|
-
|
|
1468
|
-
return self.conn.get_models(choice(self.conn._modelnames_cache))
|
|
2071
|
+
return self.conn.get_models(choice(self.conn.model_names))
|
|
1469
2072
|
|
|
1470
2073
|
@property
|
|
1471
2074
|
def metadata(self):
|