pastastore 1.8.0__py3-none-any.whl → 1.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pastastore/base.py CHANGED
@@ -332,7 +332,8 @@ class BaseConnector(ABC):
332
332
  self._clear_cache(libname)
333
333
  else:
334
334
  raise ItemInLibraryException(
335
- f"Item with name '{name}' already" f" in '{libname}' library!"
335
+ f"Time series with name '{name}' already in '{libname}' library! "
336
+ "Use overwrite=True to replace existing time series."
336
337
  )
337
338
 
338
339
  def _update_series(
@@ -575,7 +576,8 @@ class BaseConnector(ABC):
575
576
  )
576
577
  else:
577
578
  raise ItemInLibraryException(
578
- f"Model with name '{name}' " "already in 'models' library!"
579
+ f"Model with name '{name}' already in 'models' library! "
580
+ "Use overwrite=True to replace existing model."
579
581
  )
580
582
  self._clear_cache("_modelnames_cache")
581
583
  self._add_oseries_model_links(str(mldict["oseries"]["name"]), name)
@@ -1093,8 +1095,7 @@ class BaseConnector(ABC):
1093
1095
  """
1094
1096
  if prompt:
1095
1097
  ui = input(
1096
- f"Do you want to empty '{libname}'"
1097
- " library of all its contents? [y/N] "
1098
+ f"Do you want to empty '{libname}' library of all its contents? [y/N] "
1098
1099
  )
1099
1100
  if ui.lower() != "y":
1100
1101
  return
@@ -1115,7 +1116,7 @@ class BaseConnector(ABC):
1115
1116
  ):
1116
1117
  self._del_item(libname, name)
1117
1118
  self._clear_cache(libname)
1118
- print(f"Emptied library {libname} in {self.name}: " f"{self.__class__}")
1119
+ print(f"Emptied library {libname} in {self.name}: {self.__class__}")
1119
1120
 
1120
1121
  def _iter_series(self, libname: str, names: Optional[List[str]] = None):
1121
1122
  """Iterate over time series in library (internal method).
@@ -1365,10 +1366,36 @@ class BaseConnector(ABC):
1365
1366
  class ModelAccessor:
1366
1367
  """Object for managing access to stored models.
1367
1368
 
1368
- Provides dict-like access to models (i.e. PastaStore.models["model1"]), or allows
1369
- adding models to the PastaStore using dict-like assignment (i.e.
1370
- PastaStore.models["model1"] = ml), and it can serve as an iterator (i.e. [ml for ml
1371
- in pstore.models]).
1369
+ The ModelAccessor object allows dictionary-like assignment and access to models.
1370
+ In addition it provides some useful utilities for working with stored models
1371
+ in the database.
1372
+
1373
+ Examples
1374
+ --------
1375
+ Get a model by name::
1376
+
1377
+ >>> model = pstore.models["my_model"]
1378
+
1379
+ Store a model in the database::
1380
+
1381
+ >>> pstore.models["my_model_v2"] = model
1382
+
1383
+ Get model metadata dataframe::
1384
+
1385
+ >>> pstore.models.metadata
1386
+
1387
+ Number of models::
1388
+
1389
+ >>> len(pstore.models)
1390
+
1391
+ Random model::
1392
+
1393
+ >>> model = pstore.models.random()
1394
+
1395
+ Iterate over stored models::
1396
+
1397
+ >>> for ml in pstore.models:
1398
+ >>> ml.solve()
1372
1399
  """
1373
1400
 
1374
1401
  def __init__(self, conn):
@@ -1382,8 +1409,11 @@ class ModelAccessor:
1382
1409
  self.conn = conn
1383
1410
 
1384
1411
  def __repr__(self):
1385
- """Representation of the object is a list of modelnames."""
1386
- return self.conn._modelnames_cache.__repr__()
1412
+ """Representation contains the number of models and the list of model names."""
1413
+ return (
1414
+ f"<{self.__class__.__name__}> {len(self)} model(s): \n"
1415
+ + self.conn._modelnames_cache.__repr__()
1416
+ )
1387
1417
 
1388
1418
  def __getitem__(self, name: str):
1389
1419
  """Get model from store with model name as key.
@@ -1436,3 +1466,27 @@ class ModelAccessor:
1436
1466
  from random import choice
1437
1467
 
1438
1468
  return self.conn.get_models(choice(self.conn._modelnames_cache))
1469
+
1470
+ @property
1471
+ def metadata(self):
1472
+ """Dataframe with overview of models metadata."""
1473
+ # NOTE: cannot be cached as this dataframe is not a property of the connector
1474
+ # I'm not sure how to clear this cache when models are added/removed.
1475
+ idx = pd.MultiIndex.from_tuples(
1476
+ ((k, i) for k, v in self.conn.oseries_models.items() for i in v),
1477
+ names=["oseries", "modelname"],
1478
+ )
1479
+ modeldf = pd.DataFrame(index=idx)
1480
+ modeldf = modeldf.join(
1481
+ self.conn.oseries, on=modeldf.index.get_level_values(0)
1482
+ ).drop("key_0", axis=1)
1483
+ modeldf["n_stressmodels"] = 0
1484
+ for onam, mlnam in modeldf.index:
1485
+ mldict = self.conn.get_models(mlnam, return_dict=True)
1486
+ modeldf.loc[(onam, mlnam), "n_stressmodels"] = len(mldict["stressmodels"])
1487
+ modeldf.loc[(onam, mlnam), "stressmodel_names"] = ",".join(
1488
+ list(mldict["stressmodels"].keys())
1489
+ )
1490
+ for setting in mldict["settings"].keys():
1491
+ modeldf.loc[(onam, mlnam), setting] = mldict["settings"][setting]
1492
+ return modeldf
pastastore/connectors.py CHANGED
@@ -370,8 +370,7 @@ class ConnectorUtil:
370
370
  raise TypeError("Expected pastas.Model or dict!")
371
371
  if name not in self.oseries.index:
372
372
  msg = (
373
- f"Cannot add model because oseries '{name}' "
374
- "is not contained in store."
373
+ f"Cannot add model because oseries '{name}' is not contained in store."
375
374
  )
376
375
  raise LookupError(msg)
377
376
  # expensive check
@@ -764,6 +763,7 @@ class ArcticDBConnector(BaseConnector, ConnectorUtil):
764
763
  """
765
764
  try:
766
765
  import arcticdb
766
+
767
767
  except ModuleNotFoundError as e:
768
768
  print("Please install arcticdb with `pip install arcticdb`!")
769
769
  raise e
@@ -777,6 +777,9 @@ class ArcticDBConnector(BaseConnector, ConnectorUtil):
777
777
  # for older versions of PastaStore, if oseries_models library is empty
778
778
  # populate oseries - models database
779
779
  self._update_all_oseries_model_links()
780
+ # write pstore file to store database info that can be used to load pstore
781
+ if "lmdb" in self.uri:
782
+ self.write_pstore_config_file()
780
783
 
781
784
  def _initialize(self, verbose: bool = True) -> None:
782
785
  """Initialize the libraries (internal method)."""
@@ -792,6 +795,29 @@ class ArcticDBConnector(BaseConnector, ConnectorUtil):
792
795
  )
793
796
  self.libs[libname] = self._get_library(libname)
794
797
 
798
+ def write_pstore_config_file(self, path: str = None) -> None:
799
+ """Write pstore configuration file to store database info."""
800
+ # NOTE: method is not private as theoretically an ArcticDB
801
+ # database could also be hosted in the cloud, in which case,
802
+ # writing this config in the folder holding the database
803
+ # is no longer possible. For those situations, the user can
804
+ # write this config file and specify the path it should be
805
+ # written to.
806
+ config = {
807
+ "connector_type": self.conn_type,
808
+ "name": self.name,
809
+ "uri": self.uri,
810
+ }
811
+ if path is None and "lmdb" in self.uri:
812
+ path = self.uri.split("://")[1]
813
+ elif path is None and "lmdb" not in self.uri:
814
+ raise ValueError("Please provide a path to write the pastastore file!")
815
+
816
+ with open(
817
+ os.path.join(path, f"{self.name}.pastastore"), "w", encoding="utf-8"
818
+ ) as f:
819
+ json.dump(config, f)
820
+
795
821
  def _library_name(self, libname: str) -> str:
796
822
  """Get full library name according to ArcticDB (internal method)."""
797
823
  return ".".join([self.name, libname])
@@ -1165,6 +1191,7 @@ class PasConnector(BaseConnector, ConnectorUtil):
1165
1191
  whether to print message when database is initialized, by default True
1166
1192
  """
1167
1193
  self.name = name
1194
+ self.parentdir = path
1168
1195
  self.path = os.path.abspath(os.path.join(path, self.name))
1169
1196
  self.relpath = os.path.relpath(self.path)
1170
1197
  self._initialize(verbose=verbose)
@@ -1172,6 +1199,8 @@ class PasConnector(BaseConnector, ConnectorUtil):
1172
1199
  # for older versions of PastaStore, if oseries_models library is empty
1173
1200
  # populate oseries_models library
1174
1201
  self._update_all_oseries_model_links()
1202
+ # write pstore file to store database info that can be used to load pstore
1203
+ self._write_pstore_config_file()
1175
1204
 
1176
1205
  def _initialize(self, verbose: bool = True) -> None:
1177
1206
  """Initialize the libraries (internal method)."""
@@ -1189,6 +1218,18 @@ class PasConnector(BaseConnector, ConnectorUtil):
1189
1218
  )
1190
1219
  setattr(self, f"lib_{val}", os.path.join(self.path, val))
1191
1220
 
1221
+ def _write_pstore_config_file(self):
1222
+ """Write pstore configuration file to store database info."""
1223
+ config = {
1224
+ "connector_type": self.conn_type,
1225
+ "name": self.name,
1226
+ "path": os.path.abspath(self.parentdir),
1227
+ }
1228
+ with open(
1229
+ os.path.join(self.path, f"{self.name}.pastastore"), "w", encoding="utf-8"
1230
+ ) as f:
1231
+ json.dump(config, f)
1232
+
1192
1233
  def _get_library(self, libname: str):
1193
1234
  """Get path to directory holding data.
1194
1235
 
@@ -45,6 +45,13 @@ class HydroPandasExtension:
45
45
  """
46
46
  self._store = store
47
47
 
48
+ def __repr__(self):
49
+ """Return string representation of HydroPandasExtension."""
50
+ methods = "".join(
51
+ [f"\n - {meth}" for meth in dir(self) if not meth.startswith("_")]
52
+ )
53
+ return "HydroPandasExtension, available methods:" + methods
54
+
48
55
  def add_obscollection(
49
56
  self,
50
57
  libname: str,
@@ -259,7 +266,7 @@ class HydroPandasExtension:
259
266
  meteo_var: str = "RD",
260
267
  tmin: TimeType = None,
261
268
  tmax: TimeType = None,
262
- unit_multiplier: float = 1e-3,
269
+ unit_multiplier: float = 1e3,
263
270
  fill_missing_obs: bool = True,
264
271
  normalize_datetime_index: bool = True,
265
272
  **kwargs,
@@ -298,7 +305,7 @@ class HydroPandasExtension:
298
305
  meteo_var: str = "EV24",
299
306
  tmin: TimeType = None,
300
307
  tmax: TimeType = None,
301
- unit_multiplier: float = 1e-3,
308
+ unit_multiplier: float = 1e3,
302
309
  fill_missing_obs: bool = True,
303
310
  normalize_datetime_index: bool = True,
304
311
  **kwargs,
@@ -409,7 +416,7 @@ class HydroPandasExtension:
409
416
  meteo_var: str = "RD",
410
417
  tmin: Optional[TimeType] = None,
411
418
  tmax: Optional[TimeType] = None,
412
- unit_multiplier: float = 1e-3,
419
+ unit_multiplier: float = 1e3,
413
420
  normalize_datetime_index: bool = True,
414
421
  fill_missing_obs: bool = True,
415
422
  **kwargs,
@@ -428,7 +435,7 @@ class HydroPandasExtension:
428
435
  end time
429
436
  unit_multiplier : float, optional
430
437
  multiply unit by this value before saving it in the store,
431
- by default 1.0 (no conversion)
438
+ by default 1e3 (converting m to mm)
432
439
  fill_missing_obs : bool, optional
433
440
  if True, fill missing observations by getting observations from nearest
434
441
  station with data.
@@ -454,7 +461,7 @@ class HydroPandasExtension:
454
461
  meteo_var: str = "EV24",
455
462
  tmin: Optional[TimeType] = None,
456
463
  tmax: Optional[TimeType] = None,
457
- unit_multiplier: float = 1e-3,
464
+ unit_multiplier: float = 1e3,
458
465
  normalize_datetime_index: bool = True,
459
466
  fill_missing_obs: bool = True,
460
467
  **kwargs,
@@ -474,7 +481,7 @@ class HydroPandasExtension:
474
481
  end time
475
482
  unit_multiplier : float, optional
476
483
  multiply unit by this value before saving it in the store,
477
- by default 1.0 (no conversion)
484
+ by default 1e3 (converting m to mm)
478
485
  fill_missing_obs : bool, optional
479
486
  if True, fill missing observations by getting observations from nearest
480
487
  station with data.