pastastore 1.8.0__py3-none-any.whl → 1.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pastastore/base.py CHANGED
@@ -332,7 +332,8 @@ class BaseConnector(ABC):
332
332
  self._clear_cache(libname)
333
333
  else:
334
334
  raise ItemInLibraryException(
335
- f"Item with name '{name}' already" f" in '{libname}' library!"
335
+ f"Time series with name '{name}' already in '{libname}' library! "
336
+ "Use overwrite=True to replace existing time series."
336
337
  )
337
338
 
338
339
  def _update_series(
@@ -575,7 +576,8 @@ class BaseConnector(ABC):
575
576
  )
576
577
  else:
577
578
  raise ItemInLibraryException(
578
- f"Model with name '{name}' " "already in 'models' library!"
579
+ f"Model with name '{name}' already in 'models' library! "
580
+ "Use overwrite=True to replace existing model."
579
581
  )
580
582
  self._clear_cache("_modelnames_cache")
581
583
  self._add_oseries_model_links(str(mldict["oseries"]["name"]), name)
@@ -1093,8 +1095,7 @@ class BaseConnector(ABC):
1093
1095
  """
1094
1096
  if prompt:
1095
1097
  ui = input(
1096
- f"Do you want to empty '{libname}'"
1097
- " library of all its contents? [y/N] "
1098
+ f"Do you want to empty '{libname}' library of all its contents? [y/N] "
1098
1099
  )
1099
1100
  if ui.lower() != "y":
1100
1101
  return
@@ -1115,7 +1116,7 @@ class BaseConnector(ABC):
1115
1116
  ):
1116
1117
  self._del_item(libname, name)
1117
1118
  self._clear_cache(libname)
1118
- print(f"Emptied library {libname} in {self.name}: " f"{self.__class__}")
1119
+ print(f"Emptied library {libname} in {self.name}: {self.__class__}")
1119
1120
 
1120
1121
  def _iter_series(self, libname: str, names: Optional[List[str]] = None):
1121
1122
  """Iterate over time series in library (internal method).
@@ -1365,10 +1366,36 @@ class BaseConnector(ABC):
1365
1366
  class ModelAccessor:
1366
1367
  """Object for managing access to stored models.
1367
1368
 
1368
- Provides dict-like access to models (i.e. PastaStore.models["model1"]), or allows
1369
- adding models to the PastaStore using dict-like assignment (i.e.
1370
- PastaStore.models["model1"] = ml), and it can serve as an iterator (i.e. [ml for ml
1371
- in pstore.models]).
1369
+ The ModelAccessor object allows dictionary-like assignment and access to models.
1370
+ In addition it provides some useful utilities for working with stored models
1371
+ in the database.
1372
+
1373
+ Examples
1374
+ --------
1375
+ Get a model by name::
1376
+
1377
+ >>> model = pstore.models["my_model"]
1378
+
1379
+ Store a model in the database::
1380
+
1381
+ >>> pstore.models["my_model_v2"] = model
1382
+
1383
+ Get model metadata dataframe::
1384
+
1385
+ >>> pstore.models.metadata
1386
+
1387
+ Number of models::
1388
+
1389
+ >>> len(pstore.models)
1390
+
1391
+ Random model::
1392
+
1393
+ >>> model = pstore.models.random()
1394
+
1395
+ Iterate over stored models::
1396
+
1397
+ >>> for ml in pstore.models:
1398
+ >>> ml.solve()
1372
1399
  """
1373
1400
 
1374
1401
  def __init__(self, conn):
@@ -1382,8 +1409,11 @@ class ModelAccessor:
1382
1409
  self.conn = conn
1383
1410
 
1384
1411
  def __repr__(self):
1385
- """Representation of the object is a list of modelnames."""
1386
- return self.conn._modelnames_cache.__repr__()
1412
+ """Representation contains the number of models and the list of model names."""
1413
+ return (
1414
+ f"<{self.__class__.__name__}> {len(self)} model(s): \n"
1415
+ + self.conn._modelnames_cache.__repr__()
1416
+ )
1387
1417
 
1388
1418
  def __getitem__(self, name: str):
1389
1419
  """Get model from store with model name as key.
@@ -1436,3 +1466,27 @@ class ModelAccessor:
1436
1466
  from random import choice
1437
1467
 
1438
1468
  return self.conn.get_models(choice(self.conn._modelnames_cache))
1469
+
1470
+ @property
1471
+ def metadata(self):
1472
+ """Dataframe with overview of models metadata."""
1473
+ # NOTE: cannot be cached as this dataframe is not a property of the connector
1474
+ # I'm not sure how to clear this cache when models are added/removed.
1475
+ idx = pd.MultiIndex.from_tuples(
1476
+ ((k, i) for k, v in self.conn.oseries_models.items() for i in v),
1477
+ names=["oseries", "modelname"],
1478
+ )
1479
+ modeldf = pd.DataFrame(index=idx)
1480
+ modeldf = modeldf.join(
1481
+ self.conn.oseries, on=modeldf.index.get_level_values(0)
1482
+ ).drop("key_0", axis=1)
1483
+ modeldf["n_stressmodels"] = 0
1484
+ for onam, mlnam in modeldf.index:
1485
+ mldict = self.conn.get_models(mlnam, return_dict=True)
1486
+ modeldf.loc[(onam, mlnam), "n_stressmodels"] = len(mldict["stressmodels"])
1487
+ modeldf.loc[(onam, mlnam), "stressmodel_names"] = ",".join(
1488
+ list(mldict["stressmodels"].keys())
1489
+ )
1490
+ for setting in mldict["settings"].keys():
1491
+ modeldf.loc[(onam, mlnam), setting] = mldict["settings"][setting]
1492
+ return modeldf
pastastore/connectors.py CHANGED
@@ -370,8 +370,7 @@ class ConnectorUtil:
370
370
  raise TypeError("Expected pastas.Model or dict!")
371
371
  if name not in self.oseries.index:
372
372
  msg = (
373
- f"Cannot add model because oseries '{name}' "
374
- "is not contained in store."
373
+ f"Cannot add model because oseries '{name}' is not contained in store."
375
374
  )
376
375
  raise LookupError(msg)
377
376
  # expensive check
@@ -764,6 +763,7 @@ class ArcticDBConnector(BaseConnector, ConnectorUtil):
764
763
  """
765
764
  try:
766
765
  import arcticdb
766
+
767
767
  except ModuleNotFoundError as e:
768
768
  print("Please install arcticdb with `pip install arcticdb`!")
769
769
  raise e
@@ -777,6 +777,9 @@ class ArcticDBConnector(BaseConnector, ConnectorUtil):
777
777
  # for older versions of PastaStore, if oseries_models library is empty
778
778
  # populate oseries - models database
779
779
  self._update_all_oseries_model_links()
780
+ # write pstore file to store database info that can be used to load pstore
781
+ if "lmdb" in self.uri:
782
+ self.write_pstore_config_file()
780
783
 
781
784
  def _initialize(self, verbose: bool = True) -> None:
782
785
  """Initialize the libraries (internal method)."""
@@ -792,6 +795,29 @@ class ArcticDBConnector(BaseConnector, ConnectorUtil):
792
795
  )
793
796
  self.libs[libname] = self._get_library(libname)
794
797
 
798
+ def write_pstore_config_file(self, path: str = None) -> None:
799
+ """Write pstore configuration file to store database info."""
800
+ # NOTE: method is not private as theoretically an ArcticDB
801
+ # database could also be hosted in the cloud, in which case,
802
+ # writing this config in the folder holding the database
803
+ # is no longer possible. For those situations, the user can
804
+ # write this config file and specify the path it should be
805
+ # written to.
806
+ config = {
807
+ "connector_type": self.conn_type,
808
+ "name": self.name,
809
+ "uri": self.uri,
810
+ }
811
+ if path is None and "lmdb" in self.uri:
812
+ path = self.uri.split("://")[1]
813
+ elif path is None and "lmdb" not in self.uri:
814
+ raise ValueError("Please provide a path to write the pastastore file!")
815
+
816
+ with open(
817
+ os.path.join(path, f"{self.name}.pastastore"), "w", encoding="utf-8"
818
+ ) as f:
819
+ json.dump(config, f)
820
+
795
821
  def _library_name(self, libname: str) -> str:
796
822
  """Get full library name according to ArcticDB (internal method)."""
797
823
  return ".".join([self.name, libname])
@@ -1165,6 +1191,7 @@ class PasConnector(BaseConnector, ConnectorUtil):
1165
1191
  whether to print message when database is initialized, by default True
1166
1192
  """
1167
1193
  self.name = name
1194
+ self.parentdir = path
1168
1195
  self.path = os.path.abspath(os.path.join(path, self.name))
1169
1196
  self.relpath = os.path.relpath(self.path)
1170
1197
  self._initialize(verbose=verbose)
@@ -1172,6 +1199,8 @@ class PasConnector(BaseConnector, ConnectorUtil):
1172
1199
  # for older versions of PastaStore, if oseries_models library is empty
1173
1200
  # populate oseries_models library
1174
1201
  self._update_all_oseries_model_links()
1202
+ # write pstore file to store database info that can be used to load pstore
1203
+ self._write_pstore_config_file()
1175
1204
 
1176
1205
  def _initialize(self, verbose: bool = True) -> None:
1177
1206
  """Initialize the libraries (internal method)."""
@@ -1189,6 +1218,18 @@ class PasConnector(BaseConnector, ConnectorUtil):
1189
1218
  )
1190
1219
  setattr(self, f"lib_{val}", os.path.join(self.path, val))
1191
1220
 
1221
+ def _write_pstore_config_file(self):
1222
+ """Write pstore configuration file to store database info."""
1223
+ config = {
1224
+ "connector_type": self.conn_type,
1225
+ "name": self.name,
1226
+ "path": self.parentdir,
1227
+ }
1228
+ with open(
1229
+ os.path.join(self.path, f"{self.name}.pastastore"), "w", encoding="utf-8"
1230
+ ) as f:
1231
+ json.dump(config, f)
1232
+
1192
1233
  def _get_library(self, libname: str):
1193
1234
  """Get path to directory holding data.
1194
1235
 
@@ -259,7 +259,7 @@ class HydroPandasExtension:
259
259
  meteo_var: str = "RD",
260
260
  tmin: TimeType = None,
261
261
  tmax: TimeType = None,
262
- unit_multiplier: float = 1e-3,
262
+ unit_multiplier: float = 1e3,
263
263
  fill_missing_obs: bool = True,
264
264
  normalize_datetime_index: bool = True,
265
265
  **kwargs,
@@ -298,7 +298,7 @@ class HydroPandasExtension:
298
298
  meteo_var: str = "EV24",
299
299
  tmin: TimeType = None,
300
300
  tmax: TimeType = None,
301
- unit_multiplier: float = 1e-3,
301
+ unit_multiplier: float = 1e3,
302
302
  fill_missing_obs: bool = True,
303
303
  normalize_datetime_index: bool = True,
304
304
  **kwargs,
@@ -409,7 +409,7 @@ class HydroPandasExtension:
409
409
  meteo_var: str = "RD",
410
410
  tmin: Optional[TimeType] = None,
411
411
  tmax: Optional[TimeType] = None,
412
- unit_multiplier: float = 1e-3,
412
+ unit_multiplier: float = 1e3,
413
413
  normalize_datetime_index: bool = True,
414
414
  fill_missing_obs: bool = True,
415
415
  **kwargs,
@@ -428,7 +428,7 @@ class HydroPandasExtension:
428
428
  end time
429
429
  unit_multiplier : float, optional
430
430
  multiply unit by this value before saving it in the store,
431
- by default 1.0 (no conversion)
431
+ by default 1e3 (converting m to mm)
432
432
  fill_missing_obs : bool, optional
433
433
  if True, fill missing observations by getting observations from nearest
434
434
  station with data.
@@ -454,7 +454,7 @@ class HydroPandasExtension:
454
454
  meteo_var: str = "EV24",
455
455
  tmin: Optional[TimeType] = None,
456
456
  tmax: Optional[TimeType] = None,
457
- unit_multiplier: float = 1e-3,
457
+ unit_multiplier: float = 1e3,
458
458
  normalize_datetime_index: bool = True,
459
459
  fill_missing_obs: bool = True,
460
460
  **kwargs,
@@ -474,7 +474,7 @@ class HydroPandasExtension:
474
474
  end time
475
475
  unit_multiplier : float, optional
476
476
  multiply unit by this value before saving it in the store,
477
- by default 1.0 (no conversion)
477
+ by default 1e3 (converting m to mm)
478
478
  fill_missing_obs : bool, optional
479
479
  if True, fill missing observations by getting observations from nearest
480
480
  station with data.
pastastore/plotting.py CHANGED
@@ -98,8 +98,7 @@ class Plots:
98
98
 
99
99
  if len(names) > 20 and split:
100
100
  raise ValueError(
101
- "More than 20 time series leads to too many "
102
- "subplots, set split=False."
101
+ "More than 20 time series leads to too many subplots, set split=False."
103
102
  )
104
103
 
105
104
  if ax is None:
@@ -542,7 +541,7 @@ class Plots:
542
541
 
543
542
  if label is None:
544
543
  if extend:
545
- label = f"No. Models = {len(statsdf)-1}"
544
+ label = f"No. Models = {len(statsdf) - 1}"
546
545
  else:
547
546
  label = f"No. Models = {len(statsdf)}"
548
547
 
@@ -1053,7 +1052,7 @@ class Maps:
1053
1052
  yi = imeta.pop("y", np.nan)
1054
1053
  else:
1055
1054
  raise ValueError(
1056
- "metadata_source must be either " "'model' or 'store'!"
1055
+ "metadata_source must be either 'model' or 'store'!"
1057
1056
  )
1058
1057
  if np.isnan(xi) or np.isnan(yi):
1059
1058
  print(f"No x,y-data for {istress.name}!")
@@ -1062,7 +1061,7 @@ class Maps:
1062
1061
  print(f"x,y-data is 0.0 for {istress.name}, not plotting!")
1063
1062
  continue
1064
1063
 
1065
- stresses.loc[istress.name, :] = (xi, yi, name, f"C{count%10}")
1064
+ stresses.loc[istress.name, :] = (xi, yi, name, f"C{count % 10}")
1066
1065
  count += 1
1067
1066
 
1068
1067
  # create figure
@@ -1083,7 +1082,7 @@ class Maps:
1083
1082
  xm = float(ometa.pop("x", np.nan))
1084
1083
  ym = float(ometa.pop("y", np.nan))
1085
1084
  else:
1086
- raise ValueError("metadata_source must be either " "'model' or 'store'!")
1085
+ raise ValueError("metadata_source must be either 'model' or 'store'!")
1087
1086
 
1088
1087
  po = ax.scatter(xm, ym, s=osize, marker="o", label=oserieslabel, color="k")
1089
1088
  legend_list = [po]
@@ -1250,14 +1249,14 @@ class Maps:
1250
1249
  if np.isin(st.loc[s, "kind"], kinds):
1251
1250
  (c,) = np.where(skind == st.loc[s, "kind"])
1252
1251
  if color_lines:
1253
- color = f"C{c[0]+1}"
1252
+ color = f"C{c[0] + 1}"
1254
1253
  else:
1255
1254
  color = "k"
1256
1255
  segments.append(
1257
1256
  [[os["x"], os["y"]], [st.loc[s, "x"], st.loc[s, "y"]]]
1258
1257
  )
1259
1258
  segment_colors.append(color)
1260
- scatter_colors.append(f"C{c[0]+1}")
1259
+ scatter_colors.append(f"C{c[0] + 1}")
1261
1260
 
1262
1261
  stused = np.append(stused, s)
1263
1262
 
@@ -1294,7 +1293,7 @@ class Maps:
1294
1293
  [],
1295
1294
  marker="o",
1296
1295
  color="w",
1297
- markerfacecolor=f"C{c[0]+1}",
1296
+ markerfacecolor=f"C{c[0] + 1}",
1298
1297
  label=kind,
1299
1298
  markersize=10,
1300
1299
  )
pastastore/store.py CHANGED
@@ -15,7 +15,7 @@ from pastas.io.pas import pastas_hook
15
15
  from tqdm.auto import tqdm
16
16
 
17
17
  from pastastore.base import BaseConnector
18
- from pastastore.connectors import DictConnector
18
+ from pastastore.connectors import ArcticDBConnector, DictConnector, PasConnector
19
19
  from pastastore.plotting import Maps, Plots
20
20
  from pastastore.util import _custom_warning
21
21
  from pastastore.version import PASTAS_GEQ_150, PASTAS_LEQ_022
@@ -79,6 +79,24 @@ class PastaStore:
79
79
  self.plots = Plots(self)
80
80
  self.yaml = PastastoreYAML(self)
81
81
 
82
+ @classmethod
83
+ def from_pastastore_config_file(cls, fname):
84
+ """Create a PastaStore from a pastastore config file."""
85
+ with open(fname, "r") as f:
86
+ cfg = json.load(f)
87
+
88
+ conn_type = cfg.pop("connector_type")
89
+ if conn_type == "pas":
90
+ conn = PasConnector(**cfg)
91
+ elif conn_type == "arcticdb":
92
+ conn = ArcticDBConnector(**cfg)
93
+ else:
94
+ raise ValueError(
95
+ f"Cannot load connector type: '{conn_type}'. "
96
+ "This is only supported for PasConnector and ArcticDBConnector."
97
+ )
98
+ return cls(conn)
99
+
82
100
  @property
83
101
  def empty(self) -> bool:
84
102
  """Check if the PastaStore is empty."""
@@ -120,12 +138,43 @@ class PastaStore:
120
138
 
121
139
  @property
122
140
  def models(self):
123
- """Return list of model names.
141
+ """Return the ModelAccessor object.
142
+
143
+ The ModelAccessor object allows dictionary-like assignment and access to models.
144
+ In addition it provides some useful utilities for working with stored models
145
+ in the database.
146
+
147
+ Examples
148
+ --------
149
+ Get a model by name::
150
+
151
+ >>> model = pstore.models["my_model"]
152
+
153
+ Store a model in the database::
154
+
155
+ >>> pstore.models["my_model_v2"] = model
156
+
157
+ Get model metadata dataframe::
158
+
159
+ >>> pstore.models.metadata
160
+
161
+ Number of models::
162
+
163
+ >>> len(pstore.models)
164
+
165
+ Random model::
166
+
167
+ >>> model = pstore.models.random()
168
+
169
+ Iterate over stored models::
170
+
171
+ >>> for ml in pstore.models:
172
+ >>> ml.solve()
124
173
 
125
174
  Returns
126
175
  -------
127
- list
128
- list of model names
176
+ ModelAccessor
177
+ ModelAccessor object
129
178
  """
130
179
  return self.conn.models
131
180
 
@@ -1373,7 +1422,7 @@ class PastaStore:
1373
1422
 
1374
1423
  if os.path.exists(fname) and not overwrite:
1375
1424
  raise FileExistsError(
1376
- "File already exists! " "Use 'overwrite=True' to " "force writing file."
1425
+ "File already exists! Use 'overwrite=True' to force writing file."
1377
1426
  )
1378
1427
  elif os.path.exists(fname):
1379
1428
  warnings.warn(f"Overwriting file '{os.path.basename(fname)}'", stacklevel=1)
@@ -1548,7 +1597,10 @@ class PastaStore:
1548
1597
  "models": self.model_names,
1549
1598
  }
1550
1599
  else:
1551
- raise ValueError("Provide valid libname: 'models', 'stresses' or 'oseries'")
1600
+ raise ValueError(
1601
+ "Provide valid libname: 'models', 'stresses', 'oseries' or None"
1602
+ " to seach within all libraries."
1603
+ )
1552
1604
 
1553
1605
  result = {}
1554
1606
  for lib, names in lib_names.items():
pastastore/styling.py CHANGED
@@ -57,12 +57,12 @@ def boolean_styler(b):
57
57
  """
58
58
  if b:
59
59
  return (
60
- f"background-color: {rgb2hex((231/255, 255/255, 239/255))}; "
60
+ f"background-color: {rgb2hex((231 / 255, 255 / 255, 239 / 255))}; "
61
61
  "color: darkgreen"
62
62
  )
63
63
  else:
64
64
  return (
65
- f"background-color: {rgb2hex((255/255, 238/255, 238/255))}; "
65
+ f"background-color: {rgb2hex((255 / 255, 238 / 255, 238 / 255))}; "
66
66
  "color: darkred"
67
67
  )
68
68
 
@@ -90,11 +90,11 @@ def boolean_row_styler(row, column):
90
90
  """
91
91
  if row[column]:
92
92
  return (
93
- f"background-color: {rgb2hex((231/255, 255/255, 239/255))}; "
93
+ f"background-color: {rgb2hex((231 / 255, 255 / 255, 239 / 255))}; "
94
94
  "color: darkgreen",
95
95
  ) * row.size
96
96
  else:
97
97
  return (
98
- f"background-color: {rgb2hex((255/255, 238/255, 238/255))}; "
98
+ f"background-color: {rgb2hex((255 / 255, 238 / 255, 238 / 255))}; "
99
99
  "color: darkred",
100
100
  ) * row.size
pastastore/util.py CHANGED
@@ -1,6 +1,7 @@
1
1
  """Useful utilities for pastastore."""
2
2
 
3
3
  import os
4
+ import shutil
4
5
  from typing import Dict, List, Optional, Union
5
6
 
6
7
  import numpy as np
@@ -43,8 +44,6 @@ def delete_arcticdb_connector(
43
44
  list of library names to delete, by default None which deletes
44
45
  all libraries
45
46
  """
46
- import shutil
47
-
48
47
  import arcticdb
49
48
 
50
49
  if conn is not None:
@@ -75,9 +74,17 @@ def delete_arcticdb_connector(
75
74
  print()
76
75
  print(f" - deleted: {lib}")
77
76
 
78
- remaining = [ilib for ilib in arc.list_libraries() if ilib.split(".") == name]
77
+ # delete .pastastore file if entire pastastore is deleted
78
+ remaining_libs = [
79
+ ilib for ilib in arc.list_libraries() if ilib.split(".")[0] == name
80
+ ]
81
+ if remaining_libs == 0:
82
+ os.unlink(os.path.join(uri.split("//")[-1], f"{name}.pastastore"))
83
+
84
+ # check if any remaining libraries in lmdb dir, if none, delete entire folder
85
+ remaining = arc.list_libraries()
79
86
  if len(remaining) == 0:
80
- shutil.rmtree(os.path.join(conn.uri.split("//")[-1], name))
87
+ shutil.rmtree(os.path.join(conn.uri.split("//")[-1]))
81
88
 
82
89
  print("Done!")
83
90
 
@@ -98,8 +105,6 @@ def delete_dict_connector(conn, libraries: Optional[List[str]] = None) -> None:
98
105
 
99
106
  def delete_pas_connector(conn, libraries: Optional[List[str]] = None) -> None:
100
107
  """Delete PasConnector object."""
101
- import shutil
102
-
103
108
  print(f"Deleting PasConnector database: '{conn.name}' ... ", end="")
104
109
  if libraries is None:
105
110
  shutil.rmtree(conn.path)
@@ -143,7 +148,7 @@ def delete_pastastore(pstore, libraries: Optional[List[str]] = None) -> None:
143
148
  delete_pas_connector(conn=pstore.conn, libraries=libraries)
144
149
  else:
145
150
  raise TypeError(
146
- "Unrecognized pastastore Connector type: " f"{pstore.conn.conn_type}"
151
+ f"Unrecognized pastastore Connector type: {pstore.conn.conn_type}"
147
152
  )
148
153
 
149
154
 
@@ -545,7 +550,7 @@ def frontiers_checks(
545
550
  ml = pstore.get_models(mlnam)
546
551
 
547
552
  if ml.parameters["optimal"].hasnans:
548
- print(f"Warning! Skipping model '{mlnam}' because " "it is not solved!")
553
+ print(f"Warning! Skipping model '{mlnam}' because it is not solved!")
549
554
  continue
550
555
 
551
556
  checks = pd.DataFrame(columns=["stat", "threshold", "units", "check_passed"])
@@ -752,8 +757,7 @@ def frontiers_aic_select(
752
757
  modelnames += pstore.oseries_models[o]
753
758
  elif oseries is not None:
754
759
  print(
755
- "Warning! Both 'modelnames' and 'oseries' provided, "
756
- "using only 'modelnames'"
760
+ "Warning! Both 'modelnames' and 'oseries' provided, using only 'modelnames'"
757
761
  )
758
762
 
759
763
  # Dataframe of models with corresponding oseries
pastastore/version.py CHANGED
@@ -9,7 +9,7 @@ PASTAS_VERSION = parse_version(ps.__version__)
9
9
  PASTAS_LEQ_022 = PASTAS_VERSION <= parse_version("0.22.0")
10
10
  PASTAS_GEQ_150 = PASTAS_VERSION >= parse_version("1.5.0")
11
11
 
12
- __version__ = "1.8.0"
12
+ __version__ = "1.9.0"
13
13
 
14
14
 
15
15
  def show_versions(optional=False) -> None:
@@ -3,6 +3,8 @@
3
3
  import datetime
4
4
  import logging
5
5
  import os
6
+ import tempfile
7
+ from contextlib import contextmanager
6
8
  from copy import deepcopy
7
9
  from typing import Any, Dict, List, Optional, Union
8
10
 
@@ -124,6 +126,18 @@ def reduce_to_minimal_dict(d, keys=None):
124
126
  reduce_to_minimal_dict(v, keys=keys)
125
127
 
126
128
 
129
+ @contextmanager
130
+ def temporary_yaml_from_str(yaml):
131
+ """Temporary yaml file that is deleted after usage."""
132
+ temp = tempfile.NamedTemporaryFile(delete=False)
133
+ temp.write(yaml.encode("utf-8"))
134
+ temp.close()
135
+ try:
136
+ yield temp.name
137
+ finally:
138
+ os.unlink(temp.name)
139
+
140
+
127
141
  class PastastoreYAML:
128
142
  """Class for reading/writing Pastas models in YAML format.
129
143
 
@@ -427,7 +441,7 @@ class PastastoreYAML:
427
441
  .values
428
442
  )
429
443
  logger.info(
430
- f" | using {n} nearest stress(es) with kind='{kind}': " f"{snames}"
444
+ f" | using {n} nearest stress(es) with kind='{kind}': {snames}"
431
445
  )
432
446
  else:
433
447
  snames = [snames]
@@ -533,7 +547,10 @@ class PastastoreYAML:
533
547
  if (
534
548
  smnam.lower() in ["rch", "rech", "recharge", "rechargemodel"]
535
549
  ) and not smtyp:
536
- logger.info("| assuming RechargeModel based on stressmodel name.")
550
+ logger.info(
551
+ "| no StressModel type provided, using 'RechargeModel' based on "
552
+ "stressmodel name."
553
+ )
537
554
  # check if stressmodel dictionary is empty, create (nearly
538
555
  # empty) dict so defaults are used
539
556
  if smyml is None:
@@ -547,14 +564,14 @@ class PastastoreYAML:
547
564
  # cannot make any assumptions for non-RechargeModels
548
565
  if smyml is None:
549
566
  raise ValueError(
550
- "Insufficient information " f"for stressmodel '{name}'!"
567
+ f"Insufficient information for stressmodel '{name}'!"
551
568
  )
552
569
  # get stressmodel type, with default StressModel
553
570
  if classkey in smyml:
554
571
  smtyp = smyml[classkey]
555
572
  else:
556
573
  logger.info(
557
- "| no stressmodel class type provided, " "using 'StressModel'"
574
+ "| no stressmodel class type provided, using 'StressModel'"
558
575
  )
559
576
  smtyp = "StressModel"
560
577
 
@@ -574,7 +591,7 @@ class PastastoreYAML:
574
591
  sm = self._parse_wellmodel_dict(smyml, onam=onam)
575
592
  else:
576
593
  raise NotImplementedError(
577
- "PastaStore.yaml interface does " f"not (yet) support '{smtyp}'!"
594
+ f"PastaStore.yaml interface does not (yet) support '{smtyp}'!"
578
595
  )
579
596
 
580
597
  # add to list
@@ -604,7 +621,7 @@ class PastastoreYAML:
604
621
  Parameters
605
622
  ----------
606
623
  fyaml : str
607
- path to file
624
+ YAML as str or path to file
608
625
 
609
626
  Returns
610
627
  -------
@@ -618,8 +635,18 @@ class PastastoreYAML:
618
635
  NotImplementedError
619
636
  if unsupported stressmodel is encountered
620
637
  """
621
- with open(fyaml, "r") as f:
622
- yml = yaml.load(f, Loader=yaml.CFullLoader)
638
+ if "\n" in fyaml or "\r" in fyaml:
639
+ with temporary_yaml_from_str(fyaml) as fyaml:
640
+ with open(fyaml, "r") as f:
641
+ yml = yaml.load(f, Loader=yaml.CFullLoader)
642
+ elif os.path.exists(fyaml):
643
+ with open(fyaml, "r") as f:
644
+ yml = yaml.load(f, Loader=yaml.CFullLoader)
645
+ else:
646
+ raise ValueError(
647
+ "Could not read YAML file! Check if input is valid YAML "
648
+ "or valid path to YAML file."
649
+ )
623
650
 
624
651
  models = []
625
652
 
@@ -1,6 +1,6 @@
1
1
  The MIT License (MIT)
2
2
 
3
- Copyright (c) 2020 D.A. Brakenhoff
3
+ Copyright (c) 2020-2025 D.A. Brakenhoff
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal
@@ -1,12 +1,12 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: pastastore
3
- Version: 1.8.0
3
+ Version: 1.9.0
4
4
  Summary: Tools for managing Pastas time series models.
5
5
  Author: D.A. Brakenhoff
6
6
  Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
7
7
  License: The MIT License (MIT)
8
8
 
9
- Copyright (c) 2020 D.A. Brakenhoff
9
+ Copyright (c) 2020-2025 D.A. Brakenhoff
10
10
 
11
11
  Permission is hereby granted, free of charge, to any person obtaining a copy
12
12
  of this software and associated documentation files (the "Software"), to deal
@@ -40,13 +40,13 @@ Classifier: Operating System :: Unix
40
40
  Classifier: Operating System :: MacOS
41
41
  Classifier: Programming Language :: Python
42
42
  Classifier: Programming Language :: Python :: 3
43
- Classifier: Programming Language :: Python :: 3.9
44
43
  Classifier: Programming Language :: Python :: 3.10
45
44
  Classifier: Programming Language :: Python :: 3.11
46
45
  Classifier: Programming Language :: Python :: 3.12
46
+ Classifier: Programming Language :: Python :: 3.13
47
47
  Classifier: Programming Language :: Python :: 3 :: Only
48
48
  Classifier: Topic :: Scientific/Engineering :: Hydrology
49
- Requires-Python: >=3.7
49
+ Requires-Python: >=3.10
50
50
  Description-Content-Type: text/markdown
51
51
  License-File: LICENSE
52
52
  Requires-Dist: pastas>=0.13
@@ -65,26 +65,17 @@ Provides-Extra: arcticdb
65
65
  Requires-Dist: arcticdb; extra == "arcticdb"
66
66
  Provides-Extra: lint
67
67
  Requires-Dist: ruff; extra == "lint"
68
+ Provides-Extra: pytest
69
+ Requires-Dist: coverage; extra == "pytest"
70
+ Requires-Dist: codecov; extra == "pytest"
71
+ Requires-Dist: pytest; extra == "pytest"
72
+ Requires-Dist: pytest-cov; extra == "pytest"
73
+ Requires-Dist: pytest-dependency; extra == "pytest"
74
+ Requires-Dist: pytest-benchmark; extra == "pytest"
75
+ Requires-Dist: codacy-coverage; extra == "pytest"
68
76
  Provides-Extra: test
69
- Requires-Dist: pastastore[arcticdb,lint,optional]; extra == "test"
77
+ Requires-Dist: pastastore[arcticdb,lint,optional,pytest]; extra == "test"
70
78
  Requires-Dist: hydropandas[full]; extra == "test"
71
- Requires-Dist: coverage; extra == "test"
72
- Requires-Dist: codecov; extra == "test"
73
- Requires-Dist: pytest; extra == "test"
74
- Requires-Dist: pytest-cov; extra == "test"
75
- Requires-Dist: pytest-dependency; extra == "test"
76
- Requires-Dist: pytest-benchmark; extra == "test"
77
- Requires-Dist: codacy-coverage; extra == "test"
78
- Provides-Extra: test-py312
79
- Requires-Dist: pastastore[lint,optional]; extra == "test-py312"
80
- Requires-Dist: hydropandas[full]; extra == "test-py312"
81
- Requires-Dist: coverage; extra == "test-py312"
82
- Requires-Dist: codecov; extra == "test-py312"
83
- Requires-Dist: pytest; extra == "test-py312"
84
- Requires-Dist: pytest-cov; extra == "test-py312"
85
- Requires-Dist: pytest-dependency; extra == "test-py312"
86
- Requires-Dist: pytest-benchmark; extra == "test-py312"
87
- Requires-Dist: codacy-coverage; extra == "test-py312"
88
79
  Provides-Extra: docs
89
80
  Requires-Dist: pastastore[optional]; extra == "docs"
90
81
  Requires-Dist: sphinx_rtd_theme; extra == "docs"
@@ -102,7 +93,7 @@ Requires-Dist: nbsphinx_link; extra == "docs"
102
93
  # pastastore
103
94
 
104
95
  This module stores
105
- [Pastas](https://pastas.readthedocs.io/en/latest/) time series and models in a
96
+ [Pastas](https://pastas.readthedocs.io/latest/) time series and models in a
106
97
  database.
107
98
 
108
99
  Storing time series and models in a database allows the user to manage time
@@ -187,4 +178,4 @@ pstore.to_zip("my_backup.zip")
187
178
  ```
188
179
 
189
180
  For more elaborate examples, refer to the
190
- [Notebooks](https://pastastore.readthedocs.io/en/latest/examples.html#example-notebooks).
181
+ [Notebooks](https://pastastore.readthedocs.io/latest/examples.html#example-notebooks).
@@ -0,0 +1,28 @@
1
+ docs/conf.py,sha256=XcZUTmn9fGDhhu8k3mpaLu435SpIRNpABADCCTJJuag,6291
2
+ pastastore/__init__.py,sha256=cWwG9-YeiI4aOU0CDBGKbQgmKmmkcPd64YwPq2rRGt0,416
3
+ pastastore/base.py,sha256=B7sPe1eEpXFSeQsgrPXc5Mvp8Xkbhe_TxML6Zlp19Lk,48172
4
+ pastastore/connectors.py,sha256=MWekEj3CDspgEHKAm4Ml4kV-wHKPBlFgiVmq4ZPVlVM,50166
5
+ pastastore/datasets.py,sha256=FHVfmKqb8beEs9NONsWrCoJY37BmlvFLSEQ1VAFmE8A,6415
6
+ pastastore/plotting.py,sha256=y_20sAxhLelXLWs-aHHankICAMT-m1p3cIg68sIQO8A,46401
7
+ pastastore/store.py,sha256=yPg2jGWCbx3JhKQd75orVMoIiReWpHtkYDBlSa-kPDM,67303
8
+ pastastore/styling.py,sha256=0IEp_r-SpcaslShAZvZV6iuEhTG_YzNq-ad8krib3U0,2304
9
+ pastastore/util.py,sha256=31dzHaK6xdFHGDkYh49qGBq1dGel2m9r7i797S3WUpQ,28505
10
+ pastastore/version.py,sha256=JLSkXbkBpYWqHRJtx-UJKAiORL1Kn48xQAfAYq9PNik,1205
11
+ pastastore/yaml_interface.py,sha256=n6zjQ7ENrUvxszb6zE-jPLa-XVsoEOTJHQmRV1_fFt0,30818
12
+ pastastore/extensions/__init__.py,sha256=lCN9xfX1qefUzUbE2FQ12c6NjLbf5HoNo-D8cGb5CTw,461
13
+ pastastore/extensions/accessor.py,sha256=kftQM6dqMDoySbyTKcvmkjC5gJRp465KA18G4NVXUO0,367
14
+ pastastore/extensions/hpd.py,sha256=NAB9_24ClohVjZWN5erFgkcadhzdZqXOQUIz4aCycBY,27472
15
+ tests/conftest.py,sha256=TB0ZUH1m45gvQd_EZO7iudvhFw4JA-8rTJ71GT6Nf1w,5061
16
+ tests/test_001_import.py,sha256=g8AaJzWZ088A4B30_w-MrDfAVeeg8m78l--j7Onsklc,208
17
+ tests/test_002_connectors.py,sha256=k9etSRuSFVOrSEtZyxqsCF9GwIg0T7VdDJ2SjSe6i_s,7742
18
+ tests/test_003_pastastore.py,sha256=nhcUJHC2KiF9KREP_2uj_T2skKooUk13T1EVtkbwQnM,10051
19
+ tests/test_004_yaml.py,sha256=3hMNjb9s0S2rbmpyEjW6FDRAxfUZS_U1qoPl4wB-cCo,4440
20
+ tests/test_005_maps_plots.py,sha256=L0ppGf-cudsrdxteWy3qsV4We96DW4bCBE7c6jEm6aM,1866
21
+ tests/test_006_benchmark.py,sha256=VZG0bY7uz8DkfIZTgRCzkEDG8rguBEt_-mdGSMQLN2w,4930
22
+ tests/test_007_hpdextension.py,sha256=1QNUahq3hzqxjKbzsjofi9Yuyqe_oDGL0vWp6iouYe4,3004
23
+ tests/test_008_stressmodels.py,sha256=733fyCvuzjKcaLjvSMt5dTTLp-T4alzNJAToSxTIUug,4003
24
+ pastastore-1.9.0.dist-info/LICENSE,sha256=MB_6p4kXDCUsYNjslcMByBu6i7wMNRKPC36JnhzpN4o,1087
25
+ pastastore-1.9.0.dist-info/METADATA,sha256=XJLzfcZ8CKYUqQ8vJwZXrDq0fZdj1tLGVtsouX5EiSQ,7578
26
+ pastastore-1.9.0.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
27
+ pastastore-1.9.0.dist-info/top_level.txt,sha256=1bgyMk1p23f04RK83Jju2_YAQBwyoQD_fInxoPB4YRw,22
28
+ pastastore-1.9.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.6.0)
2
+ Generator: setuptools (75.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
tests/conftest.py CHANGED
@@ -1,18 +1,14 @@
1
1
  # ruff: noqa: D100 D103
2
2
  import importlib
3
3
  from importlib import metadata
4
- from platform import python_version
5
4
 
6
5
  import pandas as pd
7
6
  import pastas as ps
8
7
  import pytest
9
- from packaging.version import parse as parse_version
10
8
 
11
9
  import pastastore as pst
12
10
 
13
- IS_PY312 = parse_version(python_version()) >= parse_version("3.12.0")
14
-
15
- params = ["dict", "pas", "arcticdb"] if not IS_PY312 else ["dict", "pas"]
11
+ params = ["dict", "pas", "arcticdb"]
16
12
 
17
13
 
18
14
  def initialize_project(conn):
@@ -168,6 +168,7 @@ def test_create_models(pstore):
168
168
  ["oseries1", "oseries2"], store=True, progressbar=False
169
169
  )
170
170
  _ = pstore.conn.models
171
+ assert pstore.n_models == 2
171
172
 
172
173
 
173
174
  @pytest.mark.dependency
@@ -252,15 +253,12 @@ def test_update_ts_settings(request, pstore):
252
253
 
253
254
  ml2 = pstore.get_models(ml.name, update_ts_settings=True)
254
255
 
255
- try:
256
- assert ml2.oseries.settings["tmax"] == o.index[-1]
257
- assert ml2.stressmodels["recharge"].prec.settings["tmax"] == tmax
258
- assert ml2.stressmodels["recharge"].evap.settings["tmax"] == tmax
259
- assert ml2.stressmodels["prec"].stress[0].settings["tmax"] == p2.index[-1]
260
- except AssertionError:
261
- pstore.del_models("ml_oseries2")
262
- pstore.set_check_model_series_values(True)
263
- raise
256
+ assert ml2.oseries.settings["tmax"] == o.index[-1]
257
+ assert ml2.stressmodels["recharge"].prec.settings["tmax"] == tmax
258
+ assert ml2.stressmodels["recharge"].evap.settings["tmax"] == tmax
259
+ assert ml2.stressmodels["prec"].stress[0].settings["tmax"] == p2.index[-1]
260
+ pstore.del_models("ml_oseries2")
261
+ pstore.set_check_model_series_values(True)
264
262
 
265
263
 
266
264
  # @pytest.mark.dependency()
@@ -296,6 +294,16 @@ def test_to_from_zip(pstore):
296
294
  os.remove(zipname)
297
295
 
298
296
 
297
+ def test_load_pastastore_from_config_file(pstore):
298
+ if pstore.type == "pas" or pstore.type == "arcticdb":
299
+ path = (
300
+ pstore.conn.path if pstore.type == "pas" else pstore.conn.uri.split("//")[1]
301
+ )
302
+ fname = os.path.join(path, f"{pstore.conn.name}.pastastore")
303
+ pstore2 = pst.PastaStore.from_pastastore_config_file(fname)
304
+ assert not pstore2.empty
305
+
306
+
299
307
  def test_example_pastastore():
300
308
  from pastastore.datasets import example_pastastore
301
309
 
@@ -319,3 +327,12 @@ def test_meta_with_name(pstore):
319
327
  pstore.add_stress(s, "what_i_want", kind="special", metadata=smeta)
320
328
  assert "what_i_want" in pstore.stresses.index, "This is not right."
321
329
  pstore.del_stress("what_i_want")
330
+
331
+
332
+ @pytest.mark.dependency
333
+ def test_models_metadata(request, pstore):
334
+ # depends(request, [f"test_create_models[{pstore.type}]"])
335
+ pstore.create_models_bulk(["oseries1", "oseries2"], store=True, progressbar=False)
336
+ df = pstore.models.metadata
337
+ assert df.index.size == 2
338
+ assert (df["n_stressmodels"] == 1).all()
@@ -176,6 +176,3 @@ def test_benchmark_read_model_arcticdb(benchmark):
176
176
  conn = pst.ArcticDBConnector("test", uri)
177
177
  _ = benchmark(read_model, conn=conn)
178
178
  pst.util.delete_arcticdb_connector(conn=conn)
179
- import shutil
180
-
181
- shutil.rmtree("./arctic_db/")
@@ -1,28 +0,0 @@
1
- docs/conf.py,sha256=XcZUTmn9fGDhhu8k3mpaLu435SpIRNpABADCCTJJuag,6291
2
- pastastore/__init__.py,sha256=cWwG9-YeiI4aOU0CDBGKbQgmKmmkcPd64YwPq2rRGt0,416
3
- pastastore/base.py,sha256=hOvkgACew4fpLWumLfHA6PrxLDH1EvqdWjEvFbmxJR0,46436
4
- pastastore/connectors.py,sha256=QlaFcVEM_ZtOgLd3M2yZfgogcw0zNM08pVbnyRS3Mr8,48454
5
- pastastore/datasets.py,sha256=FHVfmKqb8beEs9NONsWrCoJY37BmlvFLSEQ1VAFmE8A,6415
6
- pastastore/plotting.py,sha256=t6gEeHVGzrwvM6q1l8V3OkklpU75O2Y4h6nKEHRWdjo,46416
7
- pastastore/store.py,sha256=HZtof9gIFSW2nQpc5FHMPb1RTjgo-QzIJxueywd4jDA,65840
8
- pastastore/styling.py,sha256=4xAY0FmhKrvmAGIuoMM7Uucww_X4KAxTpEoHlsxMldc,2280
9
- pastastore/util.py,sha256=KCUFV4GkocWaRpG57CdxzfkTXyTEpPjnxsKehYPVN7U,28249
10
- pastastore/version.py,sha256=vuqYDMX5ua14OQP04vqXH_0A_Ra2XErriiNPxgZEc5w,1205
11
- pastastore/yaml_interface.py,sha256=MddELxWe8_aqJRMUydOCbjoU1-ZodzxFKYnAaqJ5SqA,29947
12
- pastastore/extensions/__init__.py,sha256=lCN9xfX1qefUzUbE2FQ12c6NjLbf5HoNo-D8cGb5CTw,461
13
- pastastore/extensions/accessor.py,sha256=kftQM6dqMDoySbyTKcvmkjC5gJRp465KA18G4NVXUO0,367
14
- pastastore/extensions/hpd.py,sha256=fcXWb3BlWlogCbg7a2Gmha8P_eCh6zSGTyRlYp3mjXA,27466
15
- tests/conftest.py,sha256=u097z7LGAnviuzXPzvER9oPjsZWqdij1CJLnW_sPY8E,5258
16
- tests/test_001_import.py,sha256=g8AaJzWZ088A4B30_w-MrDfAVeeg8m78l--j7Onsklc,208
17
- tests/test_002_connectors.py,sha256=k9etSRuSFVOrSEtZyxqsCF9GwIg0T7VdDJ2SjSe6i_s,7742
18
- tests/test_003_pastastore.py,sha256=2nC0pU478iRbYKnVVSjh5F6PA_7SvFROwD6SABL2YSE,9370
19
- tests/test_004_yaml.py,sha256=3hMNjb9s0S2rbmpyEjW6FDRAxfUZS_U1qoPl4wB-cCo,4440
20
- tests/test_005_maps_plots.py,sha256=L0ppGf-cudsrdxteWy3qsV4We96DW4bCBE7c6jEm6aM,1866
21
- tests/test_006_benchmark.py,sha256=yuExF35qqxhw04uYMH3OIOlGr71c4AJSJDMjGD8GefY,4983
22
- tests/test_007_hpdextension.py,sha256=1QNUahq3hzqxjKbzsjofi9Yuyqe_oDGL0vWp6iouYe4,3004
23
- tests/test_008_stressmodels.py,sha256=733fyCvuzjKcaLjvSMt5dTTLp-T4alzNJAToSxTIUug,4003
24
- pastastore-1.8.0.dist-info/LICENSE,sha256=DtHftfUEm99KzgwLr3rQUTg8H3kAS0Z-p5WWJgLf_OY,1082
25
- pastastore-1.8.0.dist-info/METADATA,sha256=nWFELMkHg9yfp76Ib1xlnPdrp7RZh-9KyhW3CO3rOVY,8032
26
- pastastore-1.8.0.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
27
- pastastore-1.8.0.dist-info/top_level.txt,sha256=1bgyMk1p23f04RK83Jju2_YAQBwyoQD_fInxoPB4YRw,22
28
- pastastore-1.8.0.dist-info/RECORD,,