pastastore 1.5.0__py3-none-any.whl → 1.6.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pastastore/base.py CHANGED
@@ -1471,6 +1471,7 @@ class ConnectorUtil:
1471
1471
  else:
1472
1472
  msg = "stress '{}' not present in library".format(name)
1473
1473
  raise KeyError(msg)
1474
+
1474
1475
  # hack for pcov w dtype object (when filled with NaNs on store?)
1475
1476
  if "fit" in mdict:
1476
1477
  if "pcov" in mdict["fit"]:
@@ -1534,7 +1535,7 @@ class ConnectorUtil:
1534
1535
  if isinstance(series, pd.Series):
1535
1536
  series.name = name
1536
1537
  # empty string on index name causes trouble when reading
1537
- # data from Arctic VersionStores
1538
+ # data from ArcticDB: TODO: check if still an issue?
1538
1539
  if series.index.name == "":
1539
1540
  series.index.name = None
1540
1541
 
pastastore/connectors.py CHANGED
@@ -4,7 +4,6 @@ import json
4
4
  import os
5
5
  import warnings
6
6
  from copy import deepcopy
7
- from importlib import import_module
8
7
  from typing import Dict, Optional, Union
9
8
 
10
9
  import pandas as pd
@@ -17,195 +16,6 @@ FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
17
16
  warnings.showwarning = _custom_warning
18
17
 
19
18
 
20
- class ArcticConnector(BaseConnector, ConnectorUtil): # pragma: no cover
21
- """ArcticConnector object that connects to a running MongoDB database via Arctic."""
22
-
23
- conn_type = "arctic"
24
-
25
- def __init__(self, name: str, connstr: str):
26
- """Create an ArcticConnector object that connects to a MongoDB database.
27
-
28
- Parameters
29
- ----------
30
- name : str
31
- name of the database
32
- connstr : str
33
- connection string (e.g. 'mongodb://localhost:27017/')
34
- """
35
- warnings.warn(
36
- "ArcticConnector is deprecated. Please use a different "
37
- "connector, e.g. `pst.ArcticDBConnector`.",
38
- DeprecationWarning,
39
- stacklevel=1,
40
- )
41
- try:
42
- import arctic
43
- except ModuleNotFoundError as e:
44
- print(
45
- "Please install arctic (also requires "
46
- "a MongoDB instance running somewhere, e.g. "
47
- "MongoDB Community: \n"
48
- "https://docs.mongodb.com/manual/administration"
49
- "/install-community/)!"
50
- )
51
- raise e
52
- self.connstr = connstr
53
- self.name = name
54
-
55
- self.libs: dict = {}
56
- self.arc = arctic.Arctic(connstr)
57
- self._initialize()
58
- self.models = ModelAccessor(self)
59
- # for older versions of PastaStore, if oseries_models library is empty
60
- # populate oseries - models database
61
- self._update_all_oseries_model_links()
62
-
63
- def _initialize(self) -> None:
64
- """Initialize the libraries (internal method)."""
65
- for libname in self._default_library_names:
66
- if self._library_name(libname) not in self.arc.list_libraries():
67
- self.arc.initialize_library(self._library_name(libname))
68
- else:
69
- print(
70
- f"ArcticConnector: library "
71
- f"'{self._library_name(libname)}'"
72
- " already exists. Linking to existing library."
73
- )
74
- self.libs[libname] = self._get_library(libname)
75
-
76
- def _library_name(self, libname: str) -> str:
77
- """Get full library name according to Arctic (internal method)."""
78
- return ".".join([self.name, libname])
79
-
80
- def _get_library(self, libname: str):
81
- """Get Arctic library handle.
82
-
83
- Parameters
84
- ----------
85
- libname : str
86
- name of the library
87
-
88
- Returns
89
- -------
90
- lib : arctic.Library handle
91
- handle to the library
92
- """
93
- # get library handle
94
- lib = self.arc.get_library(self._library_name(libname))
95
- return lib
96
-
97
- def _add_item(
98
- self,
99
- libname: str,
100
- item: Union[FrameorSeriesUnion, Dict],
101
- name: str,
102
- metadata: Optional[Dict] = None,
103
- **_,
104
- ) -> None:
105
- """Add item to library (time series or model) (internal method).
106
-
107
- Parameters
108
- ----------
109
- libname : str
110
- name of the library
111
- item : Union[FrameorSeriesUnion, Dict]
112
- item to add, either time series or pastas.Model as dictionary
113
- name : str
114
- name of the item
115
- metadata : Optional[Dict], optional
116
- dictionary containing metadata, by default None
117
- """
118
- lib = self._get_library(libname)
119
- lib.write(name, item, metadata=metadata)
120
-
121
- def _get_item(self, libname: str, name: str) -> Union[FrameorSeriesUnion, Dict]:
122
- """Retrieve item from library (internal method).
123
-
124
- Parameters
125
- ----------
126
- libname : str
127
- name of the library
128
- name : str
129
- name of the item
130
-
131
- Returns
132
- -------
133
- item : Union[FrameorSeriesUnion, Dict]
134
- time series or model dictionary
135
- """
136
- lib = self._get_library(libname)
137
- return lib.read(name).data
138
-
139
- def _del_item(self, libname: str, name: str) -> None:
140
- """Delete items (series or models) (internal method).
141
-
142
- Parameters
143
- ----------
144
- libname : str
145
- name of library to delete item from
146
- name : str
147
- name of item to delete
148
- """
149
- lib = self._get_library(libname)
150
- lib.delete(name)
151
-
152
- def _get_metadata(self, libname: str, name: str) -> dict:
153
- """Retrieve metadata for an item (internal method).
154
-
155
- Parameters
156
- ----------
157
- libname : str
158
- name of the library
159
- name : str
160
- name of the item
161
-
162
- Returns
163
- -------
164
- dict
165
- dictionary containing metadata
166
- """
167
- lib = self._get_library(libname)
168
- return lib.read_metadata(name).metadata
169
-
170
- @property
171
- def oseries_names(self):
172
- """List of oseries names.
173
-
174
- Returns
175
- -------
176
- list
177
- list of oseries in library
178
- """
179
- return self._get_library("oseries").list_symbols()
180
-
181
- @property
182
- def stresses_names(self):
183
- """List of stresses names.
184
-
185
- Returns
186
- -------
187
- list
188
- list of stresses in library
189
- """
190
- return self._get_library("stresses").list_symbols()
191
-
192
- @property
193
- def model_names(self):
194
- """List of model names.
195
-
196
- Returns
197
- -------
198
- list
199
- list of models in library
200
- """
201
- return self._get_library("models").list_symbols()
202
-
203
- @property
204
- def oseries_with_models(self):
205
- """List of oseries with models."""
206
- return self._get_library("oseries_models").list_symbols()
207
-
208
-
209
19
  class ArcticDBConnector(BaseConnector, ConnectorUtil):
210
20
  """ArcticDBConnector object using ArcticDB to store data."""
211
21
 
@@ -388,234 +198,6 @@ class ArcticDBConnector(BaseConnector, ConnectorUtil):
388
198
  return self._get_library("oseries_models").list_symbols()
389
199
 
390
200
 
391
- class PystoreConnector(BaseConnector, ConnectorUtil): # pragma: no cover
392
- """PystoreConnector object using pystore as database backend."""
393
-
394
- conn_type = "pystore"
395
-
396
- def __init__(self, name: str, path: str):
397
- """Create a PystoreConnector object that points to a Pystore.
398
-
399
- Parameters
400
- ----------
401
- name : str
402
- name of the store
403
- path : str
404
- path to the pystore directory
405
- """
406
- warnings.warn(
407
- "PystoreConnector is deprecated. Please use a different "
408
- "connector, e.g. `pst.PasConnector`.",
409
- DeprecationWarning,
410
- stacklevel=1,
411
- )
412
- try:
413
- import pystore
414
- except ModuleNotFoundError as e:
415
- print(
416
- "Install pystore, follow instructions at "
417
- "https://github.com/ranaroussi/pystore#dependencies"
418
- )
419
- raise e
420
- self.name = name
421
- self.path = path
422
- pystore.set_path(self.path)
423
- self.store = pystore.store(self.name)
424
- self.libs: dict = {}
425
- self._initialize()
426
- self.models = ModelAccessor(self)
427
- # for older versions of PastaStore, if oseries_models library is empty
428
- # populate oseries - models database
429
- self._update_all_oseries_model_links()
430
-
431
- def _initialize(self) -> None:
432
- """Initialize the libraries (stores) (internal method)."""
433
- for libname in self._default_library_names:
434
- if libname in self.store.list_collections():
435
- print(
436
- f"PystoreConnector: library '{self.path}/{libname}' "
437
- "already exists. Linking to existing library."
438
- )
439
- lib = self.store.collection(libname)
440
- self.libs[libname] = lib
441
-
442
- def _get_library(self, libname: str):
443
- """Get Pystore library handle.
444
-
445
- Parameters
446
- ----------
447
- libname : str
448
- name of the library
449
-
450
- Returns
451
- -------
452
- Pystore.Collection handle
453
- handle to the library
454
- """
455
- # get library handle
456
- lib = self.store.collection(libname)
457
- return lib
458
-
459
- def _add_item(
460
- self,
461
- libname: str,
462
- item: Union[FrameorSeriesUnion, Dict],
463
- name: str,
464
- metadata: Optional[Dict] = None,
465
- overwrite: bool = False,
466
- ) -> None:
467
- """Add item to library (time series or model) (internal method).
468
-
469
- Parameters
470
- ----------
471
- libname : str
472
- name of the library
473
- item : Union[FrameorSeriesUnion, Dict]
474
- item to add, either time series or pastas.Model as dictionary
475
- name : str
476
- name of the item
477
- metadata : Optional[Dict], optional
478
- dictionary containing metadata, by default None
479
- overwrite : bool, optional
480
- overwrite item if it already exists, by default False.
481
- """
482
- # convert to DataFrame because pystore doesn't accept pandas.Series
483
- # (maybe has an easy fix, but converting w to_frame for now)
484
- if isinstance(item, pd.Series):
485
- s = item.to_frame(name=name)
486
- is_type = "series"
487
- elif isinstance(item, dict):
488
- s = pd.DataFrame() # empty DataFrame as placeholder
489
- jsondict = json.loads(json.dumps(item, cls=PastasEncoder, indent=4))
490
- metadata = jsondict # model dict is stored in metadata
491
- is_type = "series"
492
- elif isinstance(item, list):
493
- s = pd.Series(item).to_frame(name="modelnames")
494
- is_type = "list"
495
- elif isinstance(item, pd.DataFrame):
496
- s = item
497
- is_type = "dataframe"
498
-
499
- # store info about input type to ensure same type is returned
500
- if metadata is None:
501
- metadata = {"_is_type": is_type}
502
- else:
503
- metadata["_is_type"] = is_type
504
-
505
- lib = self._get_library(libname)
506
- lib.write(name, s, metadata=metadata, overwrite=overwrite)
507
-
508
- def _get_item(self, libname: str, name: str) -> Union[FrameorSeriesUnion, Dict]:
509
- """Retrieve item from pystore library (internal method).
510
-
511
- Parameters
512
- ----------
513
- libname : str
514
- name of the library
515
- name : str
516
- name of the item
517
-
518
- Returns
519
- -------
520
- item : Union[FrameorSeriesUnion, Dict]
521
- time series or model dictionary
522
- """
523
- load_mod = import_module("pastas.io.pas") # type: ignore
524
- lib = self._get_library(libname)
525
- # hack for storing models, stored as metadata
526
- if libname == "models":
527
- jsonpath = lib._item_path(name).joinpath("metadata.json")
528
- s = load_mod.load(jsonpath) # type: ignore
529
- else:
530
- # read series and convert to pandas
531
- item = lib.item(name)
532
- s = item.to_pandas()
533
- # remove _is_type key and return correct type
534
- is_type = item.metadata.pop("_is_type")
535
- if is_type == "series":
536
- s = s.squeeze()
537
- elif is_type == "list":
538
- s = s["modelnames"].tolist()
539
- return s
540
-
541
- def _del_item(self, libname: str, name: str) -> None:
542
- """Delete data from the store (internal method).
543
-
544
- Parameters
545
- ----------
546
- libname : str
547
- name of the library
548
- name : str
549
- name of the item to delete
550
- """
551
- lib = self._get_library(libname)
552
- lib.delete_item(name)
553
- self._clear_cache(libname)
554
-
555
- def _get_metadata(self, libname: str, name: str) -> dict:
556
- """Read metadata from pystore (internal method).
557
-
558
- Parameters
559
- ----------
560
- libname : str
561
- name of the library the series are in ("oseries" or "stresses")
562
- name : str
563
- name of item to load metadata for
564
-
565
- Returns
566
- -------
567
- imeta : dict
568
- dictionary containing metadata
569
- """
570
- from pystore.utils import read_metadata
571
-
572
- lib = self._get_library(libname)
573
- imeta = read_metadata(lib._item_path(name))
574
- if "name" not in imeta.keys():
575
- imeta["name"] = name
576
- if "_is_type" in imeta.keys():
577
- imeta.pop("_is_type")
578
- return imeta
579
-
580
- @property
581
- def oseries_names(self):
582
- """List of oseries names.
583
-
584
- Returns
585
- -------
586
- list
587
- list of oseries in library
588
- """
589
- return list(self._get_library("oseries").list_items())
590
-
591
- @property
592
- def stresses_names(self):
593
- """List of stresses names.
594
-
595
- Returns
596
- -------
597
- list
598
- list of stresses in library
599
- """
600
- return list(self._get_library("stresses").list_items())
601
-
602
- @property
603
- def model_names(self):
604
- """List of model names.
605
-
606
- Returns
607
- -------
608
- list
609
- list of models in library
610
- """
611
- return list(self._get_library("models").list_items())
612
-
613
- @property
614
- def oseries_with_models(self):
615
- """List of oseries with models."""
616
- return list(self._get_library("oseries_models").list_items())
617
-
618
-
619
201
  class DictConnector(BaseConnector, ConnectorUtil):
620
202
  """DictConnector object that stores timeseries and models in dictionaries."""
621
203
 
pastastore/datasets.py CHANGED
@@ -176,8 +176,7 @@ def _default_connector(conntype: str):
176
176
  Parameters
177
177
  ----------
178
178
  conntype : str
179
- name of connector (DictConnector, PasConnector,
180
- ArcticConnector, ArcticDBConnector or PystoreConnector)
179
+ name of connector (DictConnector, PasConnector, ArcticDBConnector)
181
180
 
182
181
  Returns
183
182
  -------
@@ -185,14 +184,9 @@ def _default_connector(conntype: str):
185
184
  default Connector based on type.
186
185
  """
187
186
  Conn = getattr(pst, conntype)
188
- if Conn.conn_type == "arctic":
189
- connstr = "mongodb://localhost:27017/"
190
- conn = Conn("my_db", connstr)
191
- elif Conn.conn_type == "arcticdb":
187
+ if Conn.conn_type == "arcticdb":
192
188
  uri = "lmdb://./arctic_db"
193
189
  conn = Conn("my_db", uri)
194
- elif Conn.conn_type == "pystore":
195
- conn = Conn("my_db", "./pystore_db")
196
190
  elif Conn.conn_type == "dict":
197
191
  conn = Conn("my_db")
198
192
  elif Conn.conn_type == "pas":
pastastore/store.py CHANGED
@@ -1,9 +1,10 @@
1
1
  """Module containing the PastaStore object for managing time series and models."""
2
2
 
3
3
  import json
4
+ import logging
4
5
  import os
5
6
  import warnings
6
- from typing import List, Literal, Optional, Tuple, Union
7
+ from typing import Dict, List, Literal, Optional, Tuple, Union
7
8
 
8
9
  import numpy as np
9
10
  import pandas as pd
@@ -16,12 +17,14 @@ from pastastore.base import BaseConnector
16
17
  from pastastore.connectors import DictConnector
17
18
  from pastastore.plotting import Maps, Plots
18
19
  from pastastore.util import _custom_warning
19
- from pastastore.version import PASTAS_GEQ_150
20
+ from pastastore.version import PASTAS_GEQ_150, PASTAS_LEQ_022
20
21
  from pastastore.yaml_interface import PastastoreYAML
21
22
 
22
23
  FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
23
24
  warnings.showwarning = _custom_warning
24
25
 
26
+ logger = logging.getLogger(__name__)
27
+
25
28
 
26
29
  class PastaStore:
27
30
  """PastaStore object for managing pastas time series and models.
@@ -30,9 +33,8 @@ class PastaStore:
30
33
  the database. Different Connectors are available, e.g.:
31
34
 
32
35
  - PasConnector for storing all data as .pas (JSON) files on disk (recommended)
33
- - DictConenctor for storing all data in dictionaries (in-memory)
34
- - ArcticConnector for saving data to MongoDB using the Arctic module
35
- - PystoreConnector for saving data to disk using the Pystore module
36
+ - ArcticDBConnector for saving data on disk using arcticdb package
37
+ - DictConnector for storing all data in dictionaries (in-memory)
36
38
 
37
39
  Parameters
38
40
  ----------
@@ -43,6 +45,8 @@ class PastaStore:
43
45
  name of the PastaStore, by default takes the name of the Connector object
44
46
  """
45
47
 
48
+ _accessors = set()
49
+
46
50
  def __init__(
47
51
  self,
48
52
  connector: Optional[BaseConnector] = None,
@@ -668,7 +672,7 @@ class PastaStore:
668
672
  def create_model(
669
673
  self,
670
674
  name: str,
671
- modelname: str = None,
675
+ modelname: Optional[str] = None,
672
676
  add_recharge: bool = True,
673
677
  add_ar_noisemodel: bool = False,
674
678
  recharge_name: str = "recharge",
@@ -685,7 +689,7 @@ class PastaStore:
685
689
  add recharge to the model by looking for the closest
686
690
  precipitation and evaporation time series in the stresses
687
691
  library, by default True
688
- add_ar1_noisemodel : bool, optional
692
+ add_ar_noisemodel : bool, optional
689
693
  add AR(1) noise model to the model, by default False
690
694
  recharge_name : str
691
695
  name of the RechargeModel
@@ -706,7 +710,7 @@ class PastaStore:
706
710
  meta = self.conn.get_metadata("oseries", name, as_frame=False)
707
711
  ts = self.conn.get_oseries(name)
708
712
 
709
- # convert to Timeseries and create model
713
+ # convert to time series and create model
710
714
  if not ts.dropna().empty:
711
715
  if modelname is None:
712
716
  modelname = name
@@ -811,50 +815,363 @@ class PastaStore:
811
815
  recharge_name : str
812
816
  name of the RechargeModel
813
817
  """
814
- # get nearest prec and evap stns
815
- if "prec" not in self.stresses.kind.values:
816
- raise ValueError(
817
- "No stresses with kind='prec' found in store. "
818
- "add_recharge() requires stresses with kind='prec'!"
819
- )
820
- if "evap" not in self.stresses.kind.values:
821
- raise ValueError(
822
- "No stresses with kind='evap' found in store. "
823
- "add_recharge() requires stresses with kind='evap'!"
824
- )
825
- names = []
826
- for var in ("prec", "evap"):
827
- try:
828
- name = self.get_nearest_stresses(ml.oseries.name, kind=var).iloc[0, 0]
829
- except AttributeError as e:
830
- msg = "No precipitation or evaporation time series found!"
831
- raise Exception(msg) from e
832
- if isinstance(name, float):
833
- if np.isnan(name):
818
+ if recharge is None:
819
+ recharge = ps.rch.Linear()
820
+ if rfunc is None:
821
+ rfunc = ps.Exponential
822
+
823
+ self.add_stressmodel(
824
+ ml,
825
+ stresses={"prec": "nearest", "evap": "nearest"},
826
+ rfunc=rfunc,
827
+ stressmodel=ps.RechargeModel,
828
+ stressmodel_name=recharge_name,
829
+ recharge=recharge,
830
+ )
831
+
832
+ def _parse_stresses(
833
+ self,
834
+ stresses: Union[str, List[str], Dict[str, str]],
835
+ kind: Optional[str],
836
+ stressmodel,
837
+ oseries: Optional[str] = None,
838
+ ):
839
+ # parse stresses for RechargeModel, allow list of len 2 or 3 and
840
+ # set correct kwarg names
841
+ if stressmodel._name == "RechargeModel":
842
+ if isinstance(stresses, list):
843
+ if len(stresses) == 2:
844
+ stresses = {
845
+ "prec": stresses[0],
846
+ "evap": stresses[1],
847
+ }
848
+ elif len(stresses) == 3:
849
+ stresses = {
850
+ "prec": stresses[0],
851
+ "evap": stresses[1],
852
+ "temp": stresses[2],
853
+ }
854
+ else:
834
855
  raise ValueError(
835
- f"Unable to find nearest '{var}' stress! "
836
- "Check x and y coordinates."
856
+ "RechargeModel requires 2 or 3 stress names, "
857
+ f"got: {len(stresses)}!"
837
858
  )
859
+ # if stresses is list, create dictionary normally
860
+ elif isinstance(stresses, list):
861
+ stresses = {"stress": stresses}
862
+ # if stresses is str, make it a list of len 1
863
+ elif isinstance(stresses, str):
864
+ stresses = {"stress": [stresses]}
865
+
866
+ # check if stresses is a dictionary, else raise TypeError
867
+ if not isinstance(stresses, dict):
868
+ raise TypeError("stresses must be a list, string or dictionary!")
869
+
870
+ # if no kind specified, set to well for WellModel
871
+ if stressmodel._name == "WellModel":
872
+ if kind is None:
873
+ kind = "well"
874
+
875
+ # store a copy of the user input for kind
876
+ if isinstance(kind, list):
877
+ _kind = kind.copy()
878
+ else:
879
+ _kind = kind
880
+
881
+ # create empty list for gathering metadata
882
+ metadata = []
883
+ # loop over stresses keys/values
884
+ for i, (k, v) in enumerate(stresses.items()):
885
+ # if entry in dictionary is str, make it list of len 1
886
+ if isinstance(v, str):
887
+ v = [v]
888
+ # parse value
889
+ if isinstance(v, list):
890
+ for item in v:
891
+ names = [] # empty list for names
892
+ # parse nearest
893
+ if item.startswith("nearest"):
894
+ # check oseries defined if nearest option is used
895
+ if not oseries:
896
+ raise ValueError(
897
+ "Getting nearest stress(es) requires oseries name!"
898
+ )
899
+ try:
900
+ if len(item.split()) == 3: # nearest <n> <kind>
901
+ n = int(item.split()[1])
902
+ kind = item.split()[2]
903
+ elif len(item.split()) == 2: # nearest <n> | <kind>
904
+ try:
905
+ n = int(item.split()[1]) # try converting to <n>
906
+ except ValueError:
907
+ n = 1
908
+ kind = item.split()[1] # interpret as <kind>
909
+ else: # nearest
910
+ n = 1
911
+ # if RechargeModel, we can infer kind
912
+ if (
913
+ _kind is None
914
+ and stressmodel._name == "RechargeModel"
915
+ ):
916
+ kind = k
917
+ elif _kind is None: # catch no kind with bare nearest
918
+ raise ValueError(
919
+ "Bare 'nearest' found but no kind specified."
920
+ )
921
+ elif isinstance(_kind, list):
922
+ kind = _kind[i] # if multiple kind, select i-th
923
+ except Exception as e:
924
+ # raise if nearest parsing failed
925
+ raise ValueError(
926
+ f"Could not parse stresses: '{item}'! "
927
+ "When using option 'nearest', use 'nearest' and specify"
928
+ " kind, or 'nearest <kind>' or 'nearest <n> <kind>'!"
929
+ ) from e
930
+ # check if kind exists at all
931
+ if kind not in self.stresses.kind.values:
932
+ raise ValueError(
933
+ f"Could not find stresses with kind='{kind}'!"
934
+ )
935
+ # get stress names of <n> nearest <kind> stresses
936
+ inames = self.get_nearest_stresses(
937
+ oseries, kind=kind, n=n
938
+ ).iloc[0]
939
+ # check if any NaNs in result
940
+ if inames.isna().any():
941
+ nkind = (self.stresses.kind == kind).sum()
942
+ raise ValueError(
943
+ f"Could not find {n} nearest stress(es) for '{kind}'! "
944
+ f"There are only {nkind} '{kind}' stresses."
945
+ )
946
+ # append names
947
+ names += inames.tolist()
948
+ else:
949
+ # assume name is name of stress
950
+ names.append(item)
951
+ # get stresses and metadata
952
+ stress_series, imeta = self.get_stresses(
953
+ names, return_metadata=True, squeeze=True
954
+ )
955
+ # replace stress name(s) with time series
956
+ if len(names) > 1:
957
+ stresses[k] = list(stress_series.values())
958
+ else:
959
+ stresses[k] = stress_series
960
+ # gather metadata
961
+ if isinstance(imeta, list):
962
+ metadata += imeta
963
+ else:
964
+ metadata.append(imeta)
965
+
966
+ return stresses, metadata
967
+
968
+ def get_stressmodel(
969
+ self,
970
+ stresses: Union[str, List[str], Dict[str, str]],
971
+ stressmodel=ps.StressModel,
972
+ stressmodel_name: Optional[str] = None,
973
+ rfunc=ps.Exponential,
974
+ rfunc_kwargs: Optional[dict] = None,
975
+ kind: Optional[Union[List[str], str]] = None,
976
+ oseries: Optional[str] = None,
977
+ **kwargs,
978
+ ):
979
+ """Get a Pastas stressmodel from stresses time series in Pastastore.
980
+
981
+ Supports "nearest" selection. Any stress name can be replaced by
982
+ "nearest [<n>] <kind>" where <n> is optional and represents the number of
983
+ nearest stresses and <kind> and represents the kind of stress to
984
+ consider. <kind> can also be specified directly with the `kind` kwarg.
985
+
986
+ Note: the 'nearest' option requires the oseries name to be provided.
987
+ Additionally, 'x' and 'y' metadata must be stored for oseries and stresses.
988
+
989
+ Parameters
990
+ ----------
991
+ stresses : str, list of str, or dict
992
+ name(s) of the time series to use for the stressmodel, or dictionary
993
+ with key(s) and value(s) as time series name(s). Options include:
994
+ - name of stress: `"prec_stn"`
995
+ - list of stress names: `["prec_stn", "evap_stn"]`
996
+ - dict for RechargeModel: `{"prec": "prec_stn", "evap": "evap_stn"}`
997
+ - dict for StressModel: `{"stress": "well1"}`
998
+ - nearest, specifying kind: `"nearest well"`
999
+ - nearest specifying number and kind: `"nearest 2 well"`
1000
+ stressmodel : str or class
1001
+ stressmodel class to use, by default ps.StressModel
1002
+ stressmodel_name : str, optional
1003
+ name of the stressmodel, by default None, which uses the stress name,
1004
+ if there is 1 stress otherwise the name of the stressmodel type. For
1005
+ RechargeModels, the name defaults to 'recharge'.
1006
+ rfunc : str or class
1007
+ response function class to use, by default ps.Exponential
1008
+ rfunc_kwargs : dict, optional
1009
+ keyword arguments to pass to the response function, by default None
1010
+ kind : str or list of str, optional
1011
+ specify kind of stress(es) to use, by default None, useful in combination
1012
+ with 'nearest' option for defining stresses
1013
+ oseries : str, optional
1014
+ name of the oseries to use for the stressmodel, by default None, used when
1015
+ 'nearest' option is used for defining stresses.
1016
+ **kwargs
1017
+ additional keyword arguments to pass to the stressmodel
1018
+
1019
+ Returns
1020
+ -------
1021
+ stressmodel : pastas.StressModel
1022
+ pastas StressModel that can be added to pastas Model.
1023
+ """
1024
+ # get stressmodel class, if str was provided
1025
+ if isinstance(stressmodel, str):
1026
+ stressmodel = getattr(ps, stressmodel)
1027
+
1028
+ # parse stresses names to get time series and metadata
1029
+ stresses, metadata = self._parse_stresses(
1030
+ stresses=stresses, stressmodel=stressmodel, kind=kind, oseries=oseries
1031
+ )
1032
+
1033
+ # get stressmodel name if not provided
1034
+ if stressmodel_name is None:
1035
+ if stressmodel._name == "RechargeModel":
1036
+ stressmodel_name = "recharge"
1037
+ elif len(metadata) == 1:
1038
+ stressmodel_name = stresses["stress"].squeeze().name
838
1039
  else:
839
- names.append(name)
840
- if len(names) == 0:
841
- msg = "No precipitation or evaporation time series found!"
842
- raise Exception(msg)
843
-
844
- # get data
845
- tsdict = self.conn.get_stresses(names)
846
- metadata = self.conn.get_metadata("stresses", names, as_frame=False)
847
- # add recharge to model
848
- rch = ps.RechargeModel(
849
- tsdict[names[0]],
850
- tsdict[names[1]],
1040
+ stressmodel_name = stressmodel._name
1041
+
1042
+ # check if metadata is list of len 1 and unpack
1043
+ if isinstance(metadata, list) and len(metadata) == 1:
1044
+ metadata = metadata[0]
1045
+
1046
+ # get stressmodel time series settings
1047
+ if kind and "settings" not in kwargs:
1048
+ # try using kind to get predefined settings options
1049
+ if isinstance(kind, str):
1050
+ kwargs["settings"] = ps.rcParams["timeseries"].get(kind, None)
1051
+ else:
1052
+ kwargs["settings"] = [
1053
+ ps.rcParams["timeseries"].get(ikind, None) for ikind in kind
1054
+ ]
1055
+ elif kind is None and "settings" not in kwargs:
1056
+ # try using kind stored in metadata to get predefined settings options
1057
+ if isinstance(metadata, list):
1058
+ kwargs["settings"] = [
1059
+ ps.rcParams["timeseries"].get(imeta.get("kind", None), None)
1060
+ for imeta in metadata
1061
+ ]
1062
+ elif isinstance(metadata, dict):
1063
+ kwargs["settings"] = ps.rcParams["timeseries"].get(
1064
+ metadata.get("kind", None), None
1065
+ )
1066
+
1067
+ # get rfunc class if str was provided
1068
+ if isinstance(rfunc, str):
1069
+ rfunc = getattr(ps, rfunc)
1070
+
1071
+ # create empty rfunc_kwargs if not provided
1072
+ if rfunc_kwargs is None:
1073
+ rfunc_kwargs = {}
1074
+
1075
+ # special for WellModels
1076
+ if stressmodel._name == "WellModel":
1077
+ names = [s.squeeze().name for s in stresses["stress"]]
1078
+ # check oseries is provided
1079
+ if oseries is None:
1080
+ raise ValueError("WellModel requires 'oseries' to compute distances!")
1081
+ # compute distances and add to kwargs
1082
+ distances = (
1083
+ self.get_distances(oseries=oseries, stresses=names).T.squeeze().values
1084
+ )
1085
+ kwargs["distances"] = distances
1086
+ # set settings to well
1087
+ if "settings" not in kwargs:
1088
+ kwargs["settings"] = "well"
1089
+ # override rfunc and set to HantushWellModel
1090
+ rfunc = ps.HantushWellModel
1091
+
1092
+ # do not add metadata for pastas 0.22 and WellModel
1093
+ if not PASTAS_LEQ_022 and (stressmodel._name != "WellModel"):
1094
+ kwargs["metadata"] = metadata
1095
+
1096
+ return stressmodel(
1097
+ **stresses,
1098
+ rfunc=rfunc(**rfunc_kwargs),
1099
+ name=stressmodel_name,
1100
+ **kwargs,
1101
+ )
1102
+
1103
+ def add_stressmodel(
1104
+ self,
1105
+ ml: Union[ps.Model, str],
1106
+ stresses: Union[str, List[str], Dict[str, str]],
1107
+ stressmodel=ps.StressModel,
1108
+ stressmodel_name: Optional[str] = None,
1109
+ rfunc=ps.Exponential,
1110
+ rfunc_kwargs: Optional[dict] = None,
1111
+ kind: Optional[Union[List[str], str]] = None,
1112
+ **kwargs,
1113
+ ):
1114
+ """Add a pastas StressModel from stresses time series in Pastastore.
1115
+
1116
+ Supports "nearest" selection. Any stress name can be replaced by
1117
+ "nearest [<n>] <kind>" where <n> is optional and represents the number of
1118
+ nearest stresses and <kind> and represents the kind of stress to
1119
+ consider. <kind> can also be specified directly with the `kind` kwarg.
1120
+
1121
+ Note: the 'nearest' option requires the oseries name to be provided.
1122
+ Additionally, 'x' and 'y' metadata must be stored for oseries and stresses.
1123
+
1124
+ Parameters
1125
+ ----------
1126
+ ml : pastas.Model or str
1127
+ pastas.Model object to add StressModel to, if passed as string,
1128
+ model is loaded from store, the stressmodel is added and then written
1129
+ back to the store.
1130
+ stresses : str, list of str, or dict
1131
+ name(s) of the time series to use for the stressmodel, or dictionary
1132
+ with key(s) and value(s) as time series name(s). Options include:
1133
+ - name of stress: `"prec_stn"`
1134
+ - list of stress names: `["prec_stn", "evap_stn"]`
1135
+ - dict for RechargeModel: `{"prec": "prec_stn", "evap": "evap_stn"}`
1136
+ - dict for StressModel: `{"stress": "well1"}`
1137
+ - nearest, specifying kind: `"nearest well"`
1138
+ - nearest specifying number and kind: `"nearest 2 well"`
1139
+ stressmodel : str or class
1140
+ stressmodel class to use, by default ps.StressModel
1141
+ stressmodel_name : str, optional
1142
+ name of the stressmodel, by default None, which uses the stress name,
1143
+ if there is 1 stress otherwise the name of the stressmodel type. For
1144
+ RechargeModels, the name defaults to 'recharge'.
1145
+ rfunc : str or class
1146
+ response function class to use, by default ps.Exponential
1147
+ rfunc_kwargs : dict, optional
1148
+ keyword arguments to pass to the response function, by default None
1149
+ kind : str or list of str, optional
1150
+ specify kind of stress(es) to use, by default None, useful in combination
1151
+ with 'nearest' option for defining stresses
1152
+ **kwargs
1153
+ additional keyword arguments to pass to the stressmodel
1154
+ """
1155
+ sm = self.get_stressmodel(
1156
+ stresses=stresses,
1157
+ stressmodel=stressmodel,
1158
+ stressmodel_name=stressmodel_name,
851
1159
  rfunc=rfunc,
852
- name=recharge_name,
853
- recharge=recharge,
854
- settings=("prec", "evap"),
855
- metadata=metadata,
1160
+ rfunc_kwargs=rfunc_kwargs,
1161
+ kind=kind,
1162
+ oseries=ml if isinstance(ml, str) else ml.oseries.name,
1163
+ **kwargs,
856
1164
  )
857
- ml.add_stressmodel(rch)
1165
+ if isinstance(ml, str):
1166
+ ml = self.get_model(ml)
1167
+ ml.add_stressmodel(sm)
1168
+ self.conn.add_model(ml, overwrite=True)
1169
+ logger.info(
1170
+ f"Stressmodel '{sm.name}' added to model '{ml.name}' "
1171
+ "and stored in database."
1172
+ )
1173
+ else:
1174
+ ml.add_stressmodel(sm)
858
1175
 
859
1176
  def solve_models(
860
1177
  self,
pastastore/util.py CHANGED
@@ -1,7 +1,6 @@
1
1
  """Useful utilities for pastastore."""
2
2
 
3
3
  import os
4
- import warnings
5
4
  from typing import Dict, List, Optional, Union
6
5
 
7
6
  import numpy as np
@@ -24,111 +23,6 @@ class ItemInLibraryException(Exception):
24
23
  pass
25
24
 
26
25
 
27
- # TODO: remove in future version
28
- def delete_pystore_connector(
29
- conn=None,
30
- path: Optional[str] = None,
31
- name: Optional[str] = None,
32
- libraries: Optional[List[str]] = None,
33
- ) -> None: # pragma: no cover
34
- """Delete libraries from pystore.
35
-
36
- Parameters
37
- ----------
38
- conn : PystoreConnector, optional
39
- PystoreConnector object
40
- path : str, optional
41
- path to pystore
42
- name : str, optional
43
- name of the pystore
44
- libraries : Optional[List[str]], optional
45
- list of library names to delete, by default None which deletes
46
- all libraries
47
- """
48
- warnings.warn(
49
- "This function is deprecated. We recommend to migrate to a different "
50
- "Connector, e.g. `pst.PasConnector`.",
51
- DeprecationWarning,
52
- stacklevel=1,
53
- )
54
- import pystore
55
-
56
- if conn is not None:
57
- name = conn.name
58
- path = conn.path
59
- elif name is None or path is None:
60
- raise ValueError("Please provide 'name' and 'path' OR 'conn'!")
61
-
62
- print(f"Deleting PystoreConnector database: '{name}' ...", end="")
63
- pystore.set_path(path)
64
- if libraries is None:
65
- pystore.delete_store(name)
66
- print(" Done!")
67
- else:
68
- store = pystore.store(name)
69
- for lib in libraries:
70
- print()
71
- store.delete_collection(lib)
72
- print(f" - deleted: {lib}")
73
-
74
-
75
- # TODO: remove in future version
76
- def delete_arctic_connector(
77
- conn=None,
78
- connstr: Optional[str] = None,
79
- name: Optional[str] = None,
80
- libraries: Optional[List[str]] = None,
81
- ) -> None: # pragma: no cover
82
- """Delete libraries from arctic database.
83
-
84
- Parameters
85
- ----------
86
- conn : pastastore.ArcticConnector
87
- ArcticConnector object
88
- connstr : str, optional
89
- connection string to the database
90
- name : str, optional
91
- name of the database
92
- libraries : Optional[List[str]], optional
93
- list of library names to delete, by default None which deletes
94
- all libraries
95
- """
96
- warnings.warn(
97
- "This function is deprecated. We recommend to migrate to a different "
98
- "Connector, e.g. `pst.ArcticDBConnector`.",
99
- DeprecationWarning,
100
- stacklevel=1,
101
- )
102
- import arctic
103
-
104
- if conn is not None:
105
- name = conn.name
106
- connstr = conn.connstr
107
- elif name is None or connstr is None:
108
- raise ValueError("Provide 'name' and 'connstr' OR 'conn'!")
109
-
110
- arc = arctic.Arctic(connstr)
111
-
112
- print(f"Deleting ArcticConnector database: '{name}' ... ", end="")
113
- # get library names
114
- if libraries is None:
115
- libs = []
116
- for ilib in arc.list_libraries():
117
- if ilib.split(".")[0] == name:
118
- libs.append(ilib)
119
- elif name is not None:
120
- libs = [name + "." + ilib for ilib in libraries]
121
- else:
122
- raise ValueError("Provide 'name' and 'connstr' OR 'conn'!")
123
-
124
- for lib in libs:
125
- arc.delete_library(lib)
126
- if libraries is not None:
127
- print()
128
- print(f" - deleted: {lib}")
129
- print("Done!")
130
-
131
-
132
26
  def delete_arcticdb_connector(
133
27
  conn=None,
134
28
  uri: Optional[str] = None,
@@ -167,17 +61,15 @@ def delete_arcticdb_connector(
167
61
  libs = []
168
62
  for ilib in arc.list_libraries():
169
63
  if ilib.split(".")[0] == name:
170
- # TODO: remove replace when arcticdb is able to delete
171
- libs.append(ilib.replace(".", "/"))
64
+ libs.append(ilib)
172
65
  elif name is not None:
173
- # TODO: replace / with . when arcticdb is able to delete
174
- libs = [name + "/" + ilib for ilib in libraries]
66
+ libs = [name + "." + ilib for ilib in libraries]
175
67
  else:
176
68
  raise ValueError("Provide 'name' and 'uri' OR 'conn'!")
177
69
 
178
70
  for lib in libs:
179
- # arc.delete_library(lib) # TODO: not working at the moment.
180
- shutil.rmtree(os.path.join(conn.uri.split("//")[-1], lib))
71
+ arc.delete_library(lib)
72
+ # shutil.rmtree(os.path.join(conn.uri.split("//")[-1], lib))
181
73
 
182
74
  if libraries is not None:
183
75
  print()
@@ -243,14 +135,8 @@ def delete_pastastore(pstore, libraries: Optional[List[str]] = None) -> None:
243
135
  TypeError
244
136
  when Connector type is not recognized
245
137
  """
246
- # TODO: remove in future version
247
- if pstore.conn.conn_type == "pystore":
248
- delete_pystore_connector(conn=pstore.conn, libraries=libraries)
249
- elif pstore.conn.conn_type == "dict":
138
+ if pstore.conn.conn_type == "dict":
250
139
  delete_dict_connector(pstore)
251
- # TODO: remove in future version
252
- elif pstore.conn.conn_type == "arctic":
253
- delete_arctic_connector(conn=pstore.conn, libraries=libraries)
254
140
  elif pstore.conn.conn_type == "arcticdb":
255
141
  delete_arcticdb_connector(conn=pstore.conn, libraries=libraries)
256
142
  elif pstore.conn.conn_type == "pas":
pastastore/version.py CHANGED
@@ -9,7 +9,7 @@ PASTAS_VERSION = parse_version(ps.__version__)
9
9
  PASTAS_LEQ_022 = PASTAS_VERSION <= parse_version("0.22.0")
10
10
  PASTAS_GEQ_150 = PASTAS_VERSION >= parse_version("1.5.0")
11
11
 
12
- __version__ = "1.5.0"
12
+ __version__ = "1.6.0"
13
13
 
14
14
 
15
15
  def show_versions(optional=False) -> None:
@@ -21,6 +21,7 @@ def show_versions(optional=False) -> None:
21
21
  Print the version of optional dependencies, by default False
22
22
  """
23
23
  msg = (
24
+ f"Pastastore version : {__version__}\n\n"
24
25
  f"Python version : {python_version()}\n"
25
26
  f"Pandas version : {metadata.version('pandas')}\n"
26
27
  f"Matplotlib version : {metadata.version('matplotlib')}\n"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pastastore
3
- Version: 1.5.0
3
+ Version: 1.6.0
4
4
  Summary: Tools for managing Pastas time series models.
5
5
  Author: D.A. Brakenhoff
6
6
  Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
@@ -61,12 +61,12 @@ Requires-Dist: Ipython ; extra == 'docs'
61
61
  Requires-Dist: ipykernel ; extra == 'docs'
62
62
  Requires-Dist: nbsphinx ; extra == 'docs'
63
63
  Requires-Dist: nbsphinx-link ; extra == 'docs'
64
+ Provides-Extra: extensions
65
+ Requires-Dist: hydropandas ; extra == 'extensions'
64
66
  Provides-Extra: full
65
67
  Requires-Dist: pastastore[arcticdb,optional] ; extra == 'full'
66
68
  Provides-Extra: lint
67
- Requires-Dist: black ; extra == 'lint'
68
- Requires-Dist: flake8 ; extra == 'lint'
69
- Requires-Dist: isort ; extra == 'lint'
69
+ Requires-Dist: ruff ; extra == 'lint'
70
70
  Provides-Extra: optional
71
71
  Requires-Dist: contextily ; extra == 'optional'
72
72
  Requires-Dist: pyproj ; extra == 'optional'
@@ -0,0 +1,15 @@
1
+ pastastore/__init__.py,sha256=l6zRpDO0j6MIrfdljCTbkF70bt-GFlPseBd4IlmaC-o,269
2
+ pastastore/base.py,sha256=n7hPrkaLjR6_8S0XRHxvviqBWvULx3W_faQcoA9HZ9I,67166
3
+ pastastore/connectors.py,sha256=YK3I_Jb2uNwzBQvN2VwZvmTRfPeUETW-4ddcFSWkHVw,16820
4
+ pastastore/datasets.py,sha256=FHVfmKqb8beEs9NONsWrCoJY37BmlvFLSEQ1VAFmE8A,6415
5
+ pastastore/plotting.py,sha256=t6gEeHVGzrwvM6q1l8V3OkklpU75O2Y4h6nKEHRWdjo,46416
6
+ pastastore/store.py,sha256=istLgbTVXvNWqTkZQtEcxWEweouh-M6HWUAXybKrESw,58286
7
+ pastastore/styling.py,sha256=ioaH10ELV8CFvJA-xAKFbnBklTd6FB1TZV8sqvZrEcw,1518
8
+ pastastore/util.py,sha256=iXHoGHfK6VDbUpufNsnzdV71oBVp-koZUD4VJj6MOwo,28250
9
+ pastastore/version.py,sha256=p4YdipfRBvajfHzz2s7TjR_IpDOit_K_Lr2e7pnKhLU,1205
10
+ pastastore/yaml_interface.py,sha256=MddELxWe8_aqJRMUydOCbjoU1-ZodzxFKYnAaqJ5SqA,29947
11
+ pastastore-1.6.0.dist-info/LICENSE,sha256=DtHftfUEm99KzgwLr3rQUTg8H3kAS0Z-p5WWJgLf_OY,1082
12
+ pastastore-1.6.0.dist-info/METADATA,sha256=cFimKbNDGJdY-iOFcDfpo4ger907HniL0kWFeSUvTqg,8021
13
+ pastastore-1.6.0.dist-info/WHEEL,sha256=HiCZjzuy6Dw0hdX5R3LCFPDmFS4BWl8H-8W39XfmgX4,91
14
+ pastastore-1.6.0.dist-info/top_level.txt,sha256=QKfonr1KJZN46MFsj8eGRBw9Mg-jO-HFvgE2orVX7Sk,11
15
+ pastastore-1.6.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (72.1.0)
2
+ Generator: setuptools (72.2.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,15 +0,0 @@
1
- pastastore/__init__.py,sha256=l6zRpDO0j6MIrfdljCTbkF70bt-GFlPseBd4IlmaC-o,269
2
- pastastore/base.py,sha256=tUWIenFjiIeQHkA2CYBxmwMy7KXXq5JZ5jZCphRjuj8,67145
3
- pastastore/connectors.py,sha256=-Rsw8Uf7wcos9XsQMDPgysU2acT-kqt25WFeNnpf5K0,29588
4
- pastastore/datasets.py,sha256=VTjlekM3UryGpslSdMLAaT-QIAVJfaZql0dka2AbMyc,6665
5
- pastastore/plotting.py,sha256=t6gEeHVGzrwvM6q1l8V3OkklpU75O2Y4h6nKEHRWdjo,46416
6
- pastastore/store.py,sha256=1HEkr84vmCiB07Yj8iOMybFRY_l93C4J1e2suGd92SU,44171
7
- pastastore/styling.py,sha256=ioaH10ELV8CFvJA-xAKFbnBklTd6FB1TZV8sqvZrEcw,1518
8
- pastastore/util.py,sha256=4P1cQ_euEo3-YopKGReQ18EznTxBVfDpXFiEczpn_Bw,31811
9
- pastastore/version.py,sha256=xF0vR3dPK9WQScTFt_osbszvZt9GKJQTBvrT8nRgswc,1155
10
- pastastore/yaml_interface.py,sha256=MddELxWe8_aqJRMUydOCbjoU1-ZodzxFKYnAaqJ5SqA,29947
11
- pastastore-1.5.0.dist-info/LICENSE,sha256=DtHftfUEm99KzgwLr3rQUTg8H3kAS0Z-p5WWJgLf_OY,1082
12
- pastastore-1.5.0.dist-info/METADATA,sha256=_JrozH-4rPhQdxsVuzzHhmW3rWd_T_9oC8H-b68B_7s,8023
13
- pastastore-1.5.0.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
14
- pastastore-1.5.0.dist-info/top_level.txt,sha256=QKfonr1KJZN46MFsj8eGRBw9Mg-jO-HFvgE2orVX7Sk,11
15
- pastastore-1.5.0.dist-info/RECORD,,