pastastore 1.5.0__tar.gz → 1.6.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. {pastastore-1.5.0 → pastastore-1.6.0}/PKG-INFO +7 -7
  2. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore/base.py +2 -1
  3. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore/connectors.py +0 -418
  4. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore/datasets.py +2 -8
  5. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore/store.py +364 -47
  6. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore/util.py +5 -119
  7. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore/version.py +2 -1
  8. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore.egg-info/PKG-INFO +7 -7
  9. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore.egg-info/SOURCES.txt +3 -1
  10. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore.egg-info/requires.txt +4 -3
  11. {pastastore-1.5.0 → pastastore-1.6.0}/pyproject.toml +7 -5
  12. {pastastore-1.5.0 → pastastore-1.6.0}/tests/test_002_connectors.py +16 -19
  13. {pastastore-1.5.0 → pastastore-1.6.0}/tests/test_003_pastastore.py +21 -25
  14. {pastastore-1.5.0 → pastastore-1.6.0}/tests/test_004_yaml.py +7 -7
  15. {pastastore-1.5.0 → pastastore-1.6.0}/tests/test_005_maps_plots.py +6 -6
  16. pastastore-1.6.0/tests/test_007_hpdextension.py +66 -0
  17. pastastore-1.6.0/tests/test_008_stressmodels.py +128 -0
  18. {pastastore-1.5.0 → pastastore-1.6.0}/LICENSE +0 -0
  19. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore/__init__.py +0 -0
  20. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore/plotting.py +0 -0
  21. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore/styling.py +0 -0
  22. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore/yaml_interface.py +0 -0
  23. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore.egg-info/dependency_links.txt +0 -0
  24. {pastastore-1.5.0 → pastastore-1.6.0}/pastastore.egg-info/top_level.txt +0 -0
  25. {pastastore-1.5.0 → pastastore-1.6.0}/readme.md +0 -0
  26. {pastastore-1.5.0 → pastastore-1.6.0}/setup.cfg +0 -0
  27. {pastastore-1.5.0 → pastastore-1.6.0}/tests/test_001_import.py +0 -0
  28. {pastastore-1.5.0 → pastastore-1.6.0}/tests/test_006_benchmark.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pastastore
3
- Version: 1.5.0
3
+ Version: 1.6.0
4
4
  Summary: Tools for managing Pastas time series models.
5
5
  Author: D.A. Brakenhoff
6
6
  Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
@@ -54,14 +54,16 @@ Requires-Dist: tqdm>=4.36
54
54
  Requires-Dist: pyyaml
55
55
  Provides-Extra: full
56
56
  Requires-Dist: pastastore[arcticdb,optional]; extra == "full"
57
- Provides-Extra: lint
58
- Requires-Dist: black; extra == "lint"
59
- Requires-Dist: flake8; extra == "lint"
60
- Requires-Dist: isort; extra == "lint"
57
+ Provides-Extra: extensions
58
+ Requires-Dist: hydropandas; extra == "extensions"
61
59
  Provides-Extra: optional
62
60
  Requires-Dist: contextily; extra == "optional"
63
61
  Requires-Dist: pyproj; extra == "optional"
64
62
  Requires-Dist: adjustText; extra == "optional"
63
+ Provides-Extra: arcticdb
64
+ Requires-Dist: arcticdb; extra == "arcticdb"
65
+ Provides-Extra: lint
66
+ Requires-Dist: ruff; extra == "lint"
65
67
  Provides-Extra: test
66
68
  Requires-Dist: pastastore[arcticdb,lint,optional]; extra == "test"
67
69
  Requires-Dist: hydropandas[full]; extra == "test"
@@ -82,8 +84,6 @@ Requires-Dist: pytest-cov; extra == "test-py312"
82
84
  Requires-Dist: pytest-dependency; extra == "test-py312"
83
85
  Requires-Dist: pytest-benchmark; extra == "test-py312"
84
86
  Requires-Dist: codacy-coverage; extra == "test-py312"
85
- Provides-Extra: arcticdb
86
- Requires-Dist: arcticdb; extra == "arcticdb"
87
87
  Provides-Extra: docs
88
88
  Requires-Dist: pastastore[optional]; extra == "docs"
89
89
  Requires-Dist: sphinx_rtd_theme; extra == "docs"
@@ -1471,6 +1471,7 @@ class ConnectorUtil:
1471
1471
  else:
1472
1472
  msg = "stress '{}' not present in library".format(name)
1473
1473
  raise KeyError(msg)
1474
+
1474
1475
  # hack for pcov w dtype object (when filled with NaNs on store?)
1475
1476
  if "fit" in mdict:
1476
1477
  if "pcov" in mdict["fit"]:
@@ -1534,7 +1535,7 @@ class ConnectorUtil:
1534
1535
  if isinstance(series, pd.Series):
1535
1536
  series.name = name
1536
1537
  # empty string on index name causes trouble when reading
1537
- # data from Arctic VersionStores
1538
+ # data from ArcticDB: TODO: check if still an issue?
1538
1539
  if series.index.name == "":
1539
1540
  series.index.name = None
1540
1541
 
@@ -4,7 +4,6 @@ import json
4
4
  import os
5
5
  import warnings
6
6
  from copy import deepcopy
7
- from importlib import import_module
8
7
  from typing import Dict, Optional, Union
9
8
 
10
9
  import pandas as pd
@@ -17,195 +16,6 @@ FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
17
16
  warnings.showwarning = _custom_warning
18
17
 
19
18
 
20
- class ArcticConnector(BaseConnector, ConnectorUtil): # pragma: no cover
21
- """ArcticConnector object that connects to a running MongoDB database via Arctic."""
22
-
23
- conn_type = "arctic"
24
-
25
- def __init__(self, name: str, connstr: str):
26
- """Create an ArcticConnector object that connects to a MongoDB database.
27
-
28
- Parameters
29
- ----------
30
- name : str
31
- name of the database
32
- connstr : str
33
- connection string (e.g. 'mongodb://localhost:27017/')
34
- """
35
- warnings.warn(
36
- "ArcticConnector is deprecated. Please use a different "
37
- "connector, e.g. `pst.ArcticDBConnector`.",
38
- DeprecationWarning,
39
- stacklevel=1,
40
- )
41
- try:
42
- import arctic
43
- except ModuleNotFoundError as e:
44
- print(
45
- "Please install arctic (also requires "
46
- "a MongoDB instance running somewhere, e.g. "
47
- "MongoDB Community: \n"
48
- "https://docs.mongodb.com/manual/administration"
49
- "/install-community/)!"
50
- )
51
- raise e
52
- self.connstr = connstr
53
- self.name = name
54
-
55
- self.libs: dict = {}
56
- self.arc = arctic.Arctic(connstr)
57
- self._initialize()
58
- self.models = ModelAccessor(self)
59
- # for older versions of PastaStore, if oseries_models library is empty
60
- # populate oseries - models database
61
- self._update_all_oseries_model_links()
62
-
63
- def _initialize(self) -> None:
64
- """Initialize the libraries (internal method)."""
65
- for libname in self._default_library_names:
66
- if self._library_name(libname) not in self.arc.list_libraries():
67
- self.arc.initialize_library(self._library_name(libname))
68
- else:
69
- print(
70
- f"ArcticConnector: library "
71
- f"'{self._library_name(libname)}'"
72
- " already exists. Linking to existing library."
73
- )
74
- self.libs[libname] = self._get_library(libname)
75
-
76
- def _library_name(self, libname: str) -> str:
77
- """Get full library name according to Arctic (internal method)."""
78
- return ".".join([self.name, libname])
79
-
80
- def _get_library(self, libname: str):
81
- """Get Arctic library handle.
82
-
83
- Parameters
84
- ----------
85
- libname : str
86
- name of the library
87
-
88
- Returns
89
- -------
90
- lib : arctic.Library handle
91
- handle to the library
92
- """
93
- # get library handle
94
- lib = self.arc.get_library(self._library_name(libname))
95
- return lib
96
-
97
- def _add_item(
98
- self,
99
- libname: str,
100
- item: Union[FrameorSeriesUnion, Dict],
101
- name: str,
102
- metadata: Optional[Dict] = None,
103
- **_,
104
- ) -> None:
105
- """Add item to library (time series or model) (internal method).
106
-
107
- Parameters
108
- ----------
109
- libname : str
110
- name of the library
111
- item : Union[FrameorSeriesUnion, Dict]
112
- item to add, either time series or pastas.Model as dictionary
113
- name : str
114
- name of the item
115
- metadata : Optional[Dict], optional
116
- dictionary containing metadata, by default None
117
- """
118
- lib = self._get_library(libname)
119
- lib.write(name, item, metadata=metadata)
120
-
121
- def _get_item(self, libname: str, name: str) -> Union[FrameorSeriesUnion, Dict]:
122
- """Retrieve item from library (internal method).
123
-
124
- Parameters
125
- ----------
126
- libname : str
127
- name of the library
128
- name : str
129
- name of the item
130
-
131
- Returns
132
- -------
133
- item : Union[FrameorSeriesUnion, Dict]
134
- time series or model dictionary
135
- """
136
- lib = self._get_library(libname)
137
- return lib.read(name).data
138
-
139
- def _del_item(self, libname: str, name: str) -> None:
140
- """Delete items (series or models) (internal method).
141
-
142
- Parameters
143
- ----------
144
- libname : str
145
- name of library to delete item from
146
- name : str
147
- name of item to delete
148
- """
149
- lib = self._get_library(libname)
150
- lib.delete(name)
151
-
152
- def _get_metadata(self, libname: str, name: str) -> dict:
153
- """Retrieve metadata for an item (internal method).
154
-
155
- Parameters
156
- ----------
157
- libname : str
158
- name of the library
159
- name : str
160
- name of the item
161
-
162
- Returns
163
- -------
164
- dict
165
- dictionary containing metadata
166
- """
167
- lib = self._get_library(libname)
168
- return lib.read_metadata(name).metadata
169
-
170
- @property
171
- def oseries_names(self):
172
- """List of oseries names.
173
-
174
- Returns
175
- -------
176
- list
177
- list of oseries in library
178
- """
179
- return self._get_library("oseries").list_symbols()
180
-
181
- @property
182
- def stresses_names(self):
183
- """List of stresses names.
184
-
185
- Returns
186
- -------
187
- list
188
- list of stresses in library
189
- """
190
- return self._get_library("stresses").list_symbols()
191
-
192
- @property
193
- def model_names(self):
194
- """List of model names.
195
-
196
- Returns
197
- -------
198
- list
199
- list of models in library
200
- """
201
- return self._get_library("models").list_symbols()
202
-
203
- @property
204
- def oseries_with_models(self):
205
- """List of oseries with models."""
206
- return self._get_library("oseries_models").list_symbols()
207
-
208
-
209
19
  class ArcticDBConnector(BaseConnector, ConnectorUtil):
210
20
  """ArcticDBConnector object using ArcticDB to store data."""
211
21
 
@@ -388,234 +198,6 @@ class ArcticDBConnector(BaseConnector, ConnectorUtil):
388
198
  return self._get_library("oseries_models").list_symbols()
389
199
 
390
200
 
391
- class PystoreConnector(BaseConnector, ConnectorUtil): # pragma: no cover
392
- """PystoreConnector object using pystore as database backend."""
393
-
394
- conn_type = "pystore"
395
-
396
- def __init__(self, name: str, path: str):
397
- """Create a PystoreConnector object that points to a Pystore.
398
-
399
- Parameters
400
- ----------
401
- name : str
402
- name of the store
403
- path : str
404
- path to the pystore directory
405
- """
406
- warnings.warn(
407
- "PystoreConnector is deprecated. Please use a different "
408
- "connector, e.g. `pst.PasConnector`.",
409
- DeprecationWarning,
410
- stacklevel=1,
411
- )
412
- try:
413
- import pystore
414
- except ModuleNotFoundError as e:
415
- print(
416
- "Install pystore, follow instructions at "
417
- "https://github.com/ranaroussi/pystore#dependencies"
418
- )
419
- raise e
420
- self.name = name
421
- self.path = path
422
- pystore.set_path(self.path)
423
- self.store = pystore.store(self.name)
424
- self.libs: dict = {}
425
- self._initialize()
426
- self.models = ModelAccessor(self)
427
- # for older versions of PastaStore, if oseries_models library is empty
428
- # populate oseries - models database
429
- self._update_all_oseries_model_links()
430
-
431
- def _initialize(self) -> None:
432
- """Initialize the libraries (stores) (internal method)."""
433
- for libname in self._default_library_names:
434
- if libname in self.store.list_collections():
435
- print(
436
- f"PystoreConnector: library '{self.path}/{libname}' "
437
- "already exists. Linking to existing library."
438
- )
439
- lib = self.store.collection(libname)
440
- self.libs[libname] = lib
441
-
442
- def _get_library(self, libname: str):
443
- """Get Pystore library handle.
444
-
445
- Parameters
446
- ----------
447
- libname : str
448
- name of the library
449
-
450
- Returns
451
- -------
452
- Pystore.Collection handle
453
- handle to the library
454
- """
455
- # get library handle
456
- lib = self.store.collection(libname)
457
- return lib
458
-
459
- def _add_item(
460
- self,
461
- libname: str,
462
- item: Union[FrameorSeriesUnion, Dict],
463
- name: str,
464
- metadata: Optional[Dict] = None,
465
- overwrite: bool = False,
466
- ) -> None:
467
- """Add item to library (time series or model) (internal method).
468
-
469
- Parameters
470
- ----------
471
- libname : str
472
- name of the library
473
- item : Union[FrameorSeriesUnion, Dict]
474
- item to add, either time series or pastas.Model as dictionary
475
- name : str
476
- name of the item
477
- metadata : Optional[Dict], optional
478
- dictionary containing metadata, by default None
479
- overwrite : bool, optional
480
- overwrite item if it already exists, by default False.
481
- """
482
- # convert to DataFrame because pystore doesn't accept pandas.Series
483
- # (maybe has an easy fix, but converting w to_frame for now)
484
- if isinstance(item, pd.Series):
485
- s = item.to_frame(name=name)
486
- is_type = "series"
487
- elif isinstance(item, dict):
488
- s = pd.DataFrame() # empty DataFrame as placeholder
489
- jsondict = json.loads(json.dumps(item, cls=PastasEncoder, indent=4))
490
- metadata = jsondict # model dict is stored in metadata
491
- is_type = "series"
492
- elif isinstance(item, list):
493
- s = pd.Series(item).to_frame(name="modelnames")
494
- is_type = "list"
495
- elif isinstance(item, pd.DataFrame):
496
- s = item
497
- is_type = "dataframe"
498
-
499
- # store info about input type to ensure same type is returned
500
- if metadata is None:
501
- metadata = {"_is_type": is_type}
502
- else:
503
- metadata["_is_type"] = is_type
504
-
505
- lib = self._get_library(libname)
506
- lib.write(name, s, metadata=metadata, overwrite=overwrite)
507
-
508
- def _get_item(self, libname: str, name: str) -> Union[FrameorSeriesUnion, Dict]:
509
- """Retrieve item from pystore library (internal method).
510
-
511
- Parameters
512
- ----------
513
- libname : str
514
- name of the library
515
- name : str
516
- name of the item
517
-
518
- Returns
519
- -------
520
- item : Union[FrameorSeriesUnion, Dict]
521
- time series or model dictionary
522
- """
523
- load_mod = import_module("pastas.io.pas") # type: ignore
524
- lib = self._get_library(libname)
525
- # hack for storing models, stored as metadata
526
- if libname == "models":
527
- jsonpath = lib._item_path(name).joinpath("metadata.json")
528
- s = load_mod.load(jsonpath) # type: ignore
529
- else:
530
- # read series and convert to pandas
531
- item = lib.item(name)
532
- s = item.to_pandas()
533
- # remove _is_type key and return correct type
534
- is_type = item.metadata.pop("_is_type")
535
- if is_type == "series":
536
- s = s.squeeze()
537
- elif is_type == "list":
538
- s = s["modelnames"].tolist()
539
- return s
540
-
541
- def _del_item(self, libname: str, name: str) -> None:
542
- """Delete data from the store (internal method).
543
-
544
- Parameters
545
- ----------
546
- libname : str
547
- name of the library
548
- name : str
549
- name of the item to delete
550
- """
551
- lib = self._get_library(libname)
552
- lib.delete_item(name)
553
- self._clear_cache(libname)
554
-
555
- def _get_metadata(self, libname: str, name: str) -> dict:
556
- """Read metadata from pystore (internal method).
557
-
558
- Parameters
559
- ----------
560
- libname : str
561
- name of the library the series are in ("oseries" or "stresses")
562
- name : str
563
- name of item to load metadata for
564
-
565
- Returns
566
- -------
567
- imeta : dict
568
- dictionary containing metadata
569
- """
570
- from pystore.utils import read_metadata
571
-
572
- lib = self._get_library(libname)
573
- imeta = read_metadata(lib._item_path(name))
574
- if "name" not in imeta.keys():
575
- imeta["name"] = name
576
- if "_is_type" in imeta.keys():
577
- imeta.pop("_is_type")
578
- return imeta
579
-
580
- @property
581
- def oseries_names(self):
582
- """List of oseries names.
583
-
584
- Returns
585
- -------
586
- list
587
- list of oseries in library
588
- """
589
- return list(self._get_library("oseries").list_items())
590
-
591
- @property
592
- def stresses_names(self):
593
- """List of stresses names.
594
-
595
- Returns
596
- -------
597
- list
598
- list of stresses in library
599
- """
600
- return list(self._get_library("stresses").list_items())
601
-
602
- @property
603
- def model_names(self):
604
- """List of model names.
605
-
606
- Returns
607
- -------
608
- list
609
- list of models in library
610
- """
611
- return list(self._get_library("models").list_items())
612
-
613
- @property
614
- def oseries_with_models(self):
615
- """List of oseries with models."""
616
- return list(self._get_library("oseries_models").list_items())
617
-
618
-
619
201
  class DictConnector(BaseConnector, ConnectorUtil):
620
202
  """DictConnector object that stores timeseries and models in dictionaries."""
621
203
 
@@ -176,8 +176,7 @@ def _default_connector(conntype: str):
176
176
  Parameters
177
177
  ----------
178
178
  conntype : str
179
- name of connector (DictConnector, PasConnector,
180
- ArcticConnector, ArcticDBConnector or PystoreConnector)
179
+ name of connector (DictConnector, PasConnector, ArcticDBConnector)
181
180
 
182
181
  Returns
183
182
  -------
@@ -185,14 +184,9 @@ def _default_connector(conntype: str):
185
184
  default Connector based on type.
186
185
  """
187
186
  Conn = getattr(pst, conntype)
188
- if Conn.conn_type == "arctic":
189
- connstr = "mongodb://localhost:27017/"
190
- conn = Conn("my_db", connstr)
191
- elif Conn.conn_type == "arcticdb":
187
+ if Conn.conn_type == "arcticdb":
192
188
  uri = "lmdb://./arctic_db"
193
189
  conn = Conn("my_db", uri)
194
- elif Conn.conn_type == "pystore":
195
- conn = Conn("my_db", "./pystore_db")
196
190
  elif Conn.conn_type == "dict":
197
191
  conn = Conn("my_db")
198
192
  elif Conn.conn_type == "pas":