pastastore 1.2.2__tar.gz → 1.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. {pastastore-1.2.2 → pastastore-1.4.0}/PKG-INFO +34 -2
  2. pastastore-1.4.0/pastastore/__init__.py +11 -0
  3. {pastastore-1.2.2 → pastastore-1.4.0}/pastastore/base.py +21 -10
  4. {pastastore-1.2.2 → pastastore-1.4.0}/pastastore/connectors.py +2 -2
  5. {pastastore-1.2.2 → pastastore-1.4.0}/pastastore/plotting.py +129 -27
  6. {pastastore-1.2.2 → pastastore-1.4.0}/pastastore/store.py +193 -18
  7. pastastore-1.4.0/pastastore/styling.py +66 -0
  8. {pastastore-1.2.2 → pastastore-1.4.0}/pastastore/util.py +38 -25
  9. {pastastore-1.2.2 → pastastore-1.4.0}/pastastore/version.py +1 -1
  10. {pastastore-1.2.2 → pastastore-1.4.0}/pastastore/yaml_interface.py +3 -3
  11. {pastastore-1.2.2 → pastastore-1.4.0}/pastastore.egg-info/PKG-INFO +34 -2
  12. {pastastore-1.2.2 → pastastore-1.4.0}/pastastore.egg-info/SOURCES.txt +1 -0
  13. {pastastore-1.2.2 → pastastore-1.4.0}/pastastore.egg-info/requires.txt +1 -0
  14. {pastastore-1.2.2 → pastastore-1.4.0}/pyproject.toml +5 -2
  15. {pastastore-1.2.2 → pastastore-1.4.0}/tests/test_001_import.py +1 -1
  16. {pastastore-1.2.2 → pastastore-1.4.0}/tests/test_002_connectors.py +4 -4
  17. {pastastore-1.2.2 → pastastore-1.4.0}/tests/test_003_pastastore.py +23 -8
  18. {pastastore-1.2.2 → pastastore-1.4.0}/tests/test_004_yaml.py +0 -1
  19. {pastastore-1.2.2 → pastastore-1.4.0}/tests/test_006_benchmark.py +1 -1
  20. pastastore-1.2.2/pastastore/__init__.py +0 -10
  21. {pastastore-1.2.2 → pastastore-1.4.0}/LICENSE +0 -0
  22. {pastastore-1.2.2 → pastastore-1.4.0}/pastastore/datasets.py +0 -0
  23. {pastastore-1.2.2 → pastastore-1.4.0}/pastastore.egg-info/dependency_links.txt +0 -0
  24. {pastastore-1.2.2 → pastastore-1.4.0}/pastastore.egg-info/top_level.txt +0 -0
  25. {pastastore-1.2.2 → pastastore-1.4.0}/readme.md +0 -0
  26. {pastastore-1.2.2 → pastastore-1.4.0}/setup.cfg +0 -0
  27. {pastastore-1.2.2 → pastastore-1.4.0}/tests/test_005_maps_plots.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pastastore
3
- Version: 1.2.2
3
+ Version: 1.4.0
4
4
  Summary: Tools for managing Pastas time series models.
5
5
  Author: D.A. Brakenhoff
6
6
  Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
@@ -49,15 +49,47 @@ Classifier: Programming Language :: Python :: 3 :: Only
49
49
  Classifier: Topic :: Scientific/Engineering :: Hydrology
50
50
  Requires-Python: >=3.7
51
51
  Description-Content-Type: text/markdown
52
+ License-File: LICENSE
53
+ Requires-Dist: pastas>=0.13
54
+ Requires-Dist: tqdm>=4.36
55
+ Requires-Dist: pyyaml
52
56
  Provides-Extra: full
57
+ Requires-Dist: pastastore[arcticdb,optional]; extra == "full"
53
58
  Provides-Extra: lint
59
+ Requires-Dist: black; extra == "lint"
60
+ Requires-Dist: flake8; extra == "lint"
61
+ Requires-Dist: isort; extra == "lint"
54
62
  Provides-Extra: optional
63
+ Requires-Dist: contextily; extra == "optional"
64
+ Requires-Dist: pyproj; extra == "optional"
65
+ Requires-Dist: adjustText; extra == "optional"
55
66
  Provides-Extra: test
67
+ Requires-Dist: pastastore[arcticdb,lint,optional]; extra == "test"
68
+ Requires-Dist: hydropandas[full]; extra == "test"
69
+ Requires-Dist: coverage; extra == "test"
70
+ Requires-Dist: codecov; extra == "test"
71
+ Requires-Dist: pytest; extra == "test"
72
+ Requires-Dist: pytest-cov; extra == "test"
73
+ Requires-Dist: pytest-dependency; extra == "test"
74
+ Requires-Dist: pytest-benchmark; extra == "test"
75
+ Requires-Dist: codacy-coverage; extra == "test"
76
+ Requires-Dist: lxml; extra == "test"
56
77
  Provides-Extra: pystore
78
+ Requires-Dist: fsspec>=0.3.3; extra == "pystore"
79
+ Requires-Dist: python-snappy; extra == "pystore"
80
+ Requires-Dist: dask[dataframe]; extra == "pystore"
57
81
  Provides-Extra: arctic
82
+ Requires-Dist: arctic; extra == "arctic"
58
83
  Provides-Extra: arcticdb
84
+ Requires-Dist: arcticdb; extra == "arcticdb"
85
+ Requires-Dist: protobuf~=4.0; extra == "arcticdb"
59
86
  Provides-Extra: docs
60
- License-File: LICENSE
87
+ Requires-Dist: pastastore[optional]; extra == "docs"
88
+ Requires-Dist: sphinx_rtd_theme; extra == "docs"
89
+ Requires-Dist: Ipython; extra == "docs"
90
+ Requires-Dist: ipykernel; extra == "docs"
91
+ Requires-Dist: nbsphinx; extra == "docs"
92
+ Requires-Dist: nbsphinx_link; extra == "docs"
61
93
 
62
94
  ![pastastore](https://github.com/pastas/pastastore/workflows/pastastore/badge.svg)
63
95
  [![Documentation Status](https://readthedocs.org/projects/pastastore/badge/?version=latest)](https://pastastore.readthedocs.io/en/latest/?badge=latest)
@@ -0,0 +1,11 @@
1
+ # ruff: noqa: F401
2
+ from pastastore import connectors, styling, util
3
+ from pastastore.connectors import (
4
+ ArcticConnector,
5
+ ArcticDBConnector,
6
+ DictConnector,
7
+ PasConnector,
8
+ PystoreConnector,
9
+ )
10
+ from pastastore.store import PastaStore
11
+ from pastastore.version import __version__
@@ -11,10 +11,10 @@ import pastas as ps
11
11
  from numpy import isin
12
12
  from packaging.version import parse as parse_version
13
13
  from pastas.io.pas import PastasEncoder
14
- from tqdm import tqdm
14
+ from tqdm.auto import tqdm
15
15
 
16
- from .util import ItemInLibraryException, _custom_warning, validate_names
17
- from .version import PASTAS_LEQ_022
16
+ from pastastore.util import ItemInLibraryException, _custom_warning, validate_names
17
+ from pastastore.version import PASTAS_LEQ_022
18
18
 
19
19
  FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
20
20
  warnings.showwarning = _custom_warning
@@ -934,13 +934,24 @@ class BaseConnector(ABC):
934
934
  )
935
935
  if ui.lower() != "y":
936
936
  return
937
- names = self._parse_names(None, libname)
938
- for name in (
939
- tqdm(names, desc=f"Deleting items from {libname}") if progressbar else names
940
- ):
941
- self._del_item(libname, name)
942
- self._clear_cache(libname)
943
- print(f"Emptied library {libname} in {self.name}: " f"{self.__class__}")
937
+
938
+ if libname == "models":
939
+ # also delete linked modelnames linked to oseries
940
+ libs = ["models", "oseries_models"]
941
+ else:
942
+ libs = [libname]
943
+
944
+ # delete items and clear caches
945
+ for libname in libs:
946
+ names = self._parse_names(None, libname)
947
+ for name in (
948
+ tqdm(names, desc=f"Deleting items from {libname}")
949
+ if progressbar
950
+ else names
951
+ ):
952
+ self._del_item(libname, name)
953
+ self._clear_cache(libname)
954
+ print(f"Emptied library {libname} in {self.name}: " f"{self.__class__}")
944
955
 
945
956
  def _iter_series(self, libname: str, names: Optional[List[str]] = None):
946
957
  """Internal method iterate over time series in library.
@@ -8,8 +8,8 @@ from typing import Dict, Optional, Union
8
8
  import pandas as pd
9
9
  from pastas.io.pas import PastasEncoder, pastas_hook
10
10
 
11
- from .base import BaseConnector, ConnectorUtil, ModelAccessor
12
- from .util import _custom_warning
11
+ from pastastore.base import BaseConnector, ConnectorUtil, ModelAccessor
12
+ from pastastore.util import _custom_warning
13
13
 
14
14
  FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
15
15
  warnings.showwarning = _custom_warning
@@ -15,8 +15,6 @@ follows::
15
15
  pstore.maps.add_background_map(ax) # for adding a background map
16
16
  """
17
17
 
18
- from collections.abc import Iterable
19
-
20
18
  import matplotlib.pyplot as plt
21
19
  import numpy as np
22
20
  import pandas as pd
@@ -52,6 +50,9 @@ class Plots:
52
50
  split=False,
53
51
  figsize=(10, 5),
54
52
  progressbar=True,
53
+ show_legend=True,
54
+ labelfunc=None,
55
+ legend_kwargs=None,
55
56
  **kwargs,
56
57
  ):
57
58
  """Internal method to plot time series from pastastore.
@@ -74,6 +75,13 @@ class Plots:
74
75
  progressbar : bool, optional
75
76
  show progressbar when loading time series from store,
76
77
  by default True
78
+ show_legend : bool, optional
79
+ show legend, default is True.
80
+ labelfunc : callable, optional
81
+ function to create custom labels, function should take name of time series
82
+ as input
83
+ legend_kwargs : dict, optional
84
+ additional arguments to pass to legend
77
85
 
78
86
  Returns
79
87
  -------
@@ -96,15 +104,11 @@ class Plots:
96
104
 
97
105
  if ax is None:
98
106
  if split:
99
- fig, axes = plt.subplots(len(names), 1, sharex=True, figsize=figsize)
107
+ _, axes = plt.subplots(len(names), 1, sharex=True, figsize=figsize)
100
108
  else:
101
- fig, axes = plt.subplots(1, 1, figsize=figsize)
109
+ _, axes = plt.subplots(1, 1, figsize=figsize)
102
110
  else:
103
111
  axes = ax
104
- if isinstance(axes, Iterable):
105
- fig = axes[0].figure
106
- else:
107
- fig = axes.figure
108
112
 
109
113
  tsdict = self.pstore.conn._get_series(
110
114
  libname, names, progressbar=progressbar, squeeze=False
@@ -116,16 +120,33 @@ class Plots:
116
120
  iax = axes
117
121
  else:
118
122
  iax = ax
123
+ if labelfunc is not None:
124
+ n = labelfunc(n)
119
125
  iax.plot(ts.index, ts.squeeze(), label=n, **kwargs)
120
- if split:
126
+
127
+ if split and show_legend:
121
128
  iax.legend(loc="best", fontsize="x-small")
122
129
 
123
- if not split:
124
- axes.legend(loc=(0, 1), frameon=False, ncol=7, fontsize="x-small")
130
+ if not split and show_legend:
131
+ if legend_kwargs is None:
132
+ legend_kwargs = {}
133
+ ncol = legend_kwargs.pop("ncol", 7)
134
+ fontsize = legend_kwargs.pop("fontsize", "x-small")
135
+ axes.legend(loc=(0, 1), frameon=False, ncol=ncol, fontsize=fontsize)
125
136
 
126
137
  return axes
127
138
 
128
- def oseries(self, names=None, ax=None, split=False, figsize=(10, 5), **kwargs):
139
+ def oseries(
140
+ self,
141
+ names=None,
142
+ ax=None,
143
+ split=False,
144
+ figsize=(10, 5),
145
+ show_legend=True,
146
+ labelfunc=None,
147
+ legend_kwargs=None,
148
+ **kwargs,
149
+ ):
129
150
  """Plot oseries.
130
151
 
131
152
  Parameters
@@ -141,6 +162,13 @@ class Plots:
141
162
  A maximum of 20 time series is supported when split=True.
142
163
  figsize : tuple, optional
143
164
  figure size, by default (10, 5)
165
+ show_legend : bool, optional
166
+ show legend, default is True.
167
+ labelfunc : callable, optional
168
+ function to create custom labels, function should take name of time series
169
+ as input
170
+ legend_kwargs : dict, optional
171
+ additional arguments to pass to legend
144
172
 
145
173
  Returns
146
174
  -------
@@ -153,6 +181,9 @@ class Plots:
153
181
  ax=ax,
154
182
  split=split,
155
183
  figsize=figsize,
184
+ show_legend=show_legend,
185
+ labelfunc=labelfunc,
186
+ legend_kwargs=legend_kwargs,
156
187
  **kwargs,
157
188
  )
158
189
 
@@ -163,6 +194,9 @@ class Plots:
163
194
  ax=None,
164
195
  split=False,
165
196
  figsize=(10, 5),
197
+ show_legend=True,
198
+ labelfunc=None,
199
+ legend_kwargs=None,
166
200
  **kwargs,
167
201
  ):
168
202
  """Plot stresses.
@@ -183,6 +217,13 @@ class Plots:
183
217
  A maximum of 20 time series is supported when split=True.
184
218
  figsize : tuple, optional
185
219
  figure size, by default (10, 5)
220
+ show_legend : bool, optional
221
+ show legend, default is True.
222
+ labelfunc : callable, optional
223
+ function to create custom labels, function should take name of time series
224
+ as input
225
+ legend_kwargs : dict, optional
226
+ additional arguments to pass to legend
186
227
 
187
228
  Returns
188
229
  -------
@@ -203,6 +244,9 @@ class Plots:
203
244
  ax=ax,
204
245
  split=split,
205
246
  figsize=figsize,
247
+ show_legend=show_legend,
248
+ labelfunc=labelfunc,
249
+ legend_kwargs=legend_kwargs,
206
250
  **kwargs,
207
251
  )
208
252
 
@@ -397,20 +441,31 @@ class Plots:
397
441
  linewidth=0,
398
442
  rasterized=True,
399
443
  )
444
+
400
445
  # make a colorbar in an ax on the
401
446
  # right side, then set the current axes to ax again
402
447
  cb = fig.colorbar(pc, ax=ax, cax=cax, extend="both")
403
448
  cb.set_ticks(bounds)
404
449
  cb.ax.set_yticklabels(labels)
405
450
  cb.ax.minorticks_off()
451
+
406
452
  if set_yticks:
407
- ax.set_yticks(np.arange(0.5, len(series) + 0.5))
453
+ ax.set_yticks(np.arange(0.5, len(series) + 0.5), minor=False)
454
+ ax.set_yticks(np.arange(0, len(series) + 1), minor=True)
408
455
  if names is None:
409
456
  names = [s.name for s in series]
410
457
  ax.set_yticklabels(names)
458
+
459
+ for tick in ax.yaxis.get_major_ticks(): # don't show major ytick marker
460
+ tick.tick1line.set_visible(False)
461
+
462
+ ax.grid(True, which="minor", axis="y")
463
+ ax.grid(True, which="major", axis="x")
464
+
411
465
  else:
412
466
  ax.set_ylabel("Timeseries (-)")
413
- ax.grid(True)
467
+ ax.grid(True, which="both")
468
+ ax.grid(True, which="both")
414
469
 
415
470
  return ax
416
471
 
@@ -507,6 +562,22 @@ class Plots:
507
562
 
508
563
  return ax
509
564
 
565
+ def compare_models(self, modelnames, ax=None, **kwargs):
566
+ models = self.pstore.get_models(modelnames)
567
+ names = []
568
+ onames = [iml.oseries.name for iml in models]
569
+ if len(np.unique(onames)) == 1:
570
+ for modelname in modelnames:
571
+ if onames[0] in modelname:
572
+ names.append(modelname.replace(onames[0], ""))
573
+ else:
574
+ names.append(modelname)
575
+ else:
576
+ names = modelnames
577
+ cm = ps.CompareModels(models, names=names)
578
+ cm.plot(**kwargs)
579
+ return cm
580
+
510
581
 
511
582
  class Maps:
512
583
  """Map Class for PastaStore.
@@ -535,10 +606,12 @@ class Maps:
535
606
  self,
536
607
  names=None,
537
608
  kind=None,
609
+ extent=None,
538
610
  labels=True,
539
611
  adjust=False,
540
612
  figsize=(10, 8),
541
613
  backgroundmap=False,
614
+ label_kwargs=None,
542
615
  **kwargs,
543
616
  ):
544
617
  """Plot stresses locations on map.
@@ -550,6 +623,8 @@ class Maps:
550
623
  kind: str, optional
551
624
  if passed, only plot stresses of a specific kind, default is None
552
625
  which plots all stresses.
626
+ extent : list of float, optional
627
+ plot only stresses within extent [xmin, xmax, ymin, ymax]
553
628
  labels: bool, optional
554
629
  label models, by default True
555
630
  adjust: bool, optional
@@ -561,6 +636,8 @@ class Maps:
561
636
  backgroundmap: bool, optional
562
637
  if True, add background map (default CRS is EPSG:28992) with default tiles
563
638
  by OpenStreetMap.Mapnik. Default option is False.
639
+ label_kwargs: dict, optional
640
+ dictionary with keyword arguments to pass to add_labels method
564
641
 
565
642
  Returns
566
643
  -------
@@ -571,10 +648,10 @@ class Maps:
571
648
  --------
572
649
  self.add_background_map
573
650
  """
574
- if names is not None:
575
- df = self.pstore.stresses.loc[names]
576
- else:
577
- df = self.pstore.stresses
651
+ names = self.pstore.conn._parse_names(names, "stresses")
652
+ if extent is not None:
653
+ names = self.pstore.within(extent, names=names, libname="stresses")
654
+ df = self.pstore.stresses.loc[names]
578
655
 
579
656
  if kind is not None:
580
657
  if isinstance(kind, str):
@@ -599,7 +676,9 @@ class Maps:
599
676
  else:
600
677
  ax = r
601
678
  if labels:
602
- self.add_labels(stresses, ax, adjust=adjust)
679
+ if label_kwargs is None:
680
+ label_kwargs = {}
681
+ self.add_labels(stresses, ax, adjust=adjust, **label_kwargs)
603
682
 
604
683
  if backgroundmap:
605
684
  self.add_background_map(ax)
@@ -609,10 +688,12 @@ class Maps:
609
688
  def oseries(
610
689
  self,
611
690
  names=None,
691
+ extent=None,
612
692
  labels=True,
613
693
  adjust=False,
614
694
  figsize=(10, 8),
615
695
  backgroundmap=False,
696
+ label_kwargs=None,
616
697
  **kwargs,
617
698
  ):
618
699
  """Plot oseries locations on map.
@@ -621,8 +702,11 @@ class Maps:
621
702
  ----------
622
703
  names: list, optional
623
704
  oseries names, by default None which plots all oseries locations
624
- labels: bool, optional
625
- label models, by default True
705
+ extent : list of float, optional
706
+ plot only oseries within extent [xmin, xmax, ymin, ymax]
707
+ labels: bool or str, optional
708
+ label models, by default True, if passed as "grouped", only the first
709
+ label for each x,y-location is shown.
626
710
  adjust: bool, optional
627
711
  automated smart label placement using adjustText, by default False
628
712
  figsize: tuple, optional
@@ -630,6 +714,8 @@ class Maps:
630
714
  backgroundmap: bool, optional
631
715
  if True, add background map (default CRS is EPSG:28992) with default tiles
632
716
  by OpenStreetMap.Mapnik. Default option is False.
717
+ label_kwargs: dict, optional
718
+ dictionary with keyword arguments to pass to add_labels method
633
719
 
634
720
  Returns
635
721
  -------
@@ -642,6 +728,8 @@ class Maps:
642
728
  """
643
729
 
644
730
  names = self.pstore.conn._parse_names(names, "oseries")
731
+ if extent is not None:
732
+ names = self.pstore.within(extent, names=names)
645
733
  oseries = self.pstore.oseries.loc[names]
646
734
  mask0 = (oseries["x"] != 0.0) | (oseries["y"] != 0.0)
647
735
  r = self._plotmap_dataframe(oseries.loc[mask0], figsize=figsize, **kwargs)
@@ -650,7 +738,12 @@ class Maps:
650
738
  else:
651
739
  ax = r
652
740
  if labels:
653
- self.add_labels(oseries, ax, adjust=adjust)
741
+ if label_kwargs is None:
742
+ label_kwargs = {}
743
+ if labels == "grouped":
744
+ gr = oseries.sort_index().reset_index().groupby(["x", "y"])
745
+ oseries = oseries.loc[gr["index"].first().tolist()]
746
+ self.add_labels(oseries, ax, adjust=adjust, **label_kwargs)
654
747
 
655
748
  if backgroundmap:
656
749
  self.add_background_map(ax)
@@ -712,6 +805,7 @@ class Maps:
712
805
  def modelstat(
713
806
  self,
714
807
  statistic,
808
+ modelnames=None,
715
809
  label=True,
716
810
  adjust=False,
717
811
  cmap="viridis",
@@ -728,6 +822,8 @@ class Maps:
728
822
  ----------
729
823
  statistic: str
730
824
  name of the statistic, e.g. "evp" or "aic"
825
+ modelnames : list of str, optional
826
+ list of modelnames to include
731
827
  label: bool, optional
732
828
  label points, by default True
733
829
  adjust: bool, optional
@@ -757,7 +853,9 @@ class Maps:
757
853
  --------
758
854
  self.add_background_map
759
855
  """
760
- statsdf = self.pstore.get_statistics([statistic], progressbar=False).to_frame()
856
+ statsdf = self.pstore.get_statistics(
857
+ [statistic], modelnames=modelnames, progressbar=False
858
+ ).to_frame()
761
859
 
762
860
  statsdf["oseries"] = [
763
861
  self.pstore.get_models(m, return_dict=True)["oseries"]["name"]
@@ -1249,7 +1347,7 @@ class Maps:
1249
1347
  ctx.add_basemap(ax, source=providers[map_provider], crs=proj.srs, **kwargs)
1250
1348
 
1251
1349
  @staticmethod
1252
- def add_labels(df, ax, adjust=False, **kwargs):
1350
+ def add_labels(df, ax, adjust=False, objects=None, **kwargs):
1253
1351
  """Add labels to points on plot.
1254
1352
 
1255
1353
  Uses dataframe index to label points.
@@ -1262,11 +1360,12 @@ class Maps:
1262
1360
  axes object to label points on
1263
1361
  adjust: bool
1264
1362
  automated smart label placement using adjustText
1363
+ objects : list of matplotlib objects
1364
+ use to avoid labels overlapping markers
1265
1365
  **kwargs:
1266
- keyword arguments to ax.annotate
1366
+ keyword arguments to ax.annotate or adjusttext
1267
1367
  """
1268
1368
  stroke = [patheffects.withStroke(linewidth=3, foreground="w")]
1269
-
1270
1369
  fontsize = kwargs.pop("fontsize", 10)
1271
1370
 
1272
1371
  if adjust:
@@ -1286,7 +1385,9 @@ class Maps:
1286
1385
 
1287
1386
  adjust_text(
1288
1387
  texts,
1289
- force_text=0.05,
1388
+ objects=objects,
1389
+ force_text=(0.05, 0.10),
1390
+ **kwargs,
1290
1391
  **{
1291
1392
  "arrowprops": {
1292
1393
  "arrowstyle": "-",
@@ -1309,4 +1410,5 @@ class Maps:
1309
1410
  textcoords=textcoords,
1310
1411
  xytext=xytext,
1311
1412
  **{"path_effects": stroke},
1413
+ **kwargs,
1312
1414
  )
@@ -8,11 +8,13 @@ import pandas as pd
8
8
  import pastas as ps
9
9
  from packaging.version import parse as parse_version
10
10
  from pastas.io.pas import pastas_hook
11
- from tqdm import tqdm
11
+ from tqdm.auto import tqdm
12
12
 
13
- from .plotting import Maps, Plots
14
- from .util import _custom_warning
15
- from .yaml_interface import PastastoreYAML
13
+ from pastastore.base import BaseConnector
14
+ from pastastore.connectors import DictConnector
15
+ from pastastore.plotting import Maps, Plots
16
+ from pastastore.util import _custom_warning
17
+ from pastastore.yaml_interface import PastastoreYAML
16
18
 
17
19
  FrameorSeriesUnion = Union[pd.DataFrame, pd.Series]
18
20
  warnings.showwarning = _custom_warning
@@ -38,14 +40,19 @@ class PastaStore:
38
40
  name of the PastaStore, by default takes the name of the Connector object
39
41
  """
40
42
 
41
- def __init__(self, connector, name: str = None):
43
+ def __init__(
44
+ self,
45
+ connector: Optional[BaseConnector] = None,
46
+ name: Optional[str] = None,
47
+ ):
42
48
  """Initialize PastaStore for managing pastas time series and models.
43
49
 
44
50
  Parameters
45
51
  ----------
46
- connector : Connector object
47
- object that provides the interface to the
48
- database
52
+ connector : Connector object, optional
53
+ object that provides the connection to the database. Default is None, which
54
+ will create a DictConnector. This default Connector does not store data on
55
+ disk.
49
56
  name : str, optional
50
57
  name of the PastaStore, if not provided uses the Connector name
51
58
  """
@@ -53,6 +60,8 @@ class PastaStore:
53
60
  raise DeprecationWarning(
54
61
  "PastaStore expects the connector as the first argument since v1.1!"
55
62
  )
63
+ if connector is None:
64
+ connector = DictConnector("pastas_db")
56
65
  self.conn = connector
57
66
  self.name = name if name is not None else self.conn.name
58
67
  self._register_connector_methods()
@@ -300,6 +309,81 @@ class PastaStore:
300
309
  data = pd.concat([data, series], axis=0)
301
310
  return data
302
311
 
312
+ def get_signatures(
313
+ self,
314
+ signatures=None,
315
+ names=None,
316
+ libname="oseries",
317
+ progressbar=False,
318
+ ignore_errors=False,
319
+ ):
320
+ """Get groundwater signatures. NaN-values are returned when the
321
+ signature could not be computed.
322
+
323
+ Parameters
324
+ ----------
325
+ signatures : list of str, optional
326
+ list of groundwater signatures to compute, if None all groundwater
327
+ signatures in ps.stats.signatures.__all__ are used, by default None
328
+ names : str, list of str, or None, optional
329
+ names of the time series, by default None which
330
+ uses all the time series in the library
331
+ libname : str
332
+ name of the library containing the time series
333
+ ('oseries' or 'stresses'), by default "oseries"
334
+ progressbar : bool, optional
335
+ show progressbar, by default False
336
+ ignore_errors : bool, optional
337
+ ignore errors when True, i.e. when non-existent timeseries is
338
+ encountered in names, by default False
339
+
340
+ Returns
341
+ -------
342
+ signatures_df : pandas.DataFrame
343
+ DataFrame containing the signatures (columns) per time series (rows)
344
+ """
345
+ names = self.conn._parse_names(names, libname=libname)
346
+
347
+ if signatures is None:
348
+ signatures = ps.stats.signatures.__all__.copy()
349
+
350
+ # create dataframe for results
351
+ signatures_df = pd.DataFrame(index=names, columns=signatures, data=np.nan)
352
+
353
+ # loop through oseries names
354
+ desc = "Get groundwater signatures"
355
+ for name in tqdm(names, desc=desc) if progressbar else names:
356
+ try:
357
+ if libname == "oseries":
358
+ s = self.conn.get_oseries(name)
359
+ else:
360
+ s = self.conn.get_stresses(name)
361
+ except Exception as e:
362
+ if ignore_errors:
363
+ signatures_df.loc[name, :] = np.nan
364
+ continue
365
+ else:
366
+ raise e
367
+
368
+ try:
369
+ i_signatures = ps.stats.signatures.summary(s.squeeze(), signatures)
370
+ except Exception as e:
371
+ if ignore_errors:
372
+ i_signatures = []
373
+ for signature in signatures:
374
+ try:
375
+ sign_val = getattr(ps.stats.signatures, signature)(
376
+ s.squeeze()
377
+ )
378
+ except Exception as _:
379
+ sign_val = np.nan
380
+ i_signatures.append(sign_val)
381
+ else:
382
+ raise e
383
+ signatures_df.loc[name, signatures] = i_signatures.squeeze()
384
+
385
+ return signatures_df
386
+
303
387
  def get_tmin_tmax(self, libname, names=None, progressbar=False):
304
388
  """Get tmin and tmax for time series.
305
389
 
@@ -334,6 +418,23 @@ class PastaStore:
334
418
  tmintmax.loc[n, "tmax"] = s.last_valid_index()
335
419
  return tmintmax
336
420
 
421
+ def get_extent(self, libname, names=None, buffer=0.0):
422
+ names = self.conn._parse_names(names, libname=libname)
423
+ if libname in ["oseries", "stresses"]:
424
+ df = getattr(self, libname)
425
+ elif libname == "models":
426
+ df = self.oseries
427
+ else:
428
+ raise ValueError(f"Cannot get extent for library '{libname}'.")
429
+
430
+ extent = [
431
+ df.loc[names, "x"].min() - buffer,
432
+ df.loc[names, "x"].max() + buffer,
433
+ df.loc[names, "y"].min() - buffer,
434
+ df.loc[names, "y"].max() + buffer,
435
+ ]
436
+ return extent
437
+
337
438
  def get_parameters(
338
439
  self,
339
440
  parameters: Optional[List[str]] = None,
@@ -428,13 +529,13 @@ class PastaStore:
428
529
 
429
530
  modelnames = self.conn._parse_names(modelnames, libname="models")
430
531
 
431
- # create dataframe for results
432
- s = pd.DataFrame(index=modelnames, columns=statistics, data=np.nan)
433
-
434
532
  # if statistics is str
435
533
  if isinstance(statistics, str):
436
534
  statistics = [statistics]
437
535
 
536
+ # create dataframe for results
537
+ s = pd.DataFrame(index=modelnames, columns=statistics, data=np.nan)
538
+
438
539
  # loop through model names
439
540
  desc = "Get model statistics"
440
541
  for mlname in tqdm(modelnames, desc=desc) if progressbar else modelnames:
@@ -836,7 +937,7 @@ class PastaStore:
836
937
  def from_zip(
837
938
  cls,
838
939
  fname: str,
839
- conn,
940
+ conn: Optional[BaseConnector] = None,
840
941
  storename: Optional[str] = None,
841
942
  progressbar: bool = True,
842
943
  ):
@@ -846,8 +947,9 @@ class PastaStore:
846
947
  ----------
847
948
  fname : str
848
949
  pathname of zipfile
849
- conn : Connector object
850
- connector for storing loaded data
950
+ conn : Connector object, optional
951
+ connector for storing loaded data, default is None which creates a
952
+ DictConnector. This Connector does not store data on disk.
851
953
  storename : str, optional
852
954
  name of the PastaStore, by default None, which
853
955
  defaults to the name of the Connector.
@@ -861,6 +963,9 @@ class PastaStore:
861
963
  """
862
964
  from zipfile import ZipFile
863
965
 
966
+ if conn is None:
967
+ conn = DictConnector("pastas_db")
968
+
864
969
  with ZipFile(fname, "r") as archive:
865
970
  namelist = [
866
971
  fi for fi in archive.namelist() if not fi.endswith("_meta.json")
@@ -868,7 +973,7 @@ class PastaStore:
868
973
  for f in tqdm(namelist, desc="Reading zip") if progressbar else namelist:
869
974
  libname, fjson = os.path.split(f)
870
975
  if libname in ["stresses", "oseries"]:
871
- s = pd.read_json(archive.open(f), orient="columns")
976
+ s = pd.read_json(archive.open(f), dtype=float, orient="columns")
872
977
  if not isinstance(s.index, pd.DatetimeIndex):
873
978
  s.index = pd.to_datetime(s.index, unit="ms")
874
979
  s = s.sort_index()
@@ -886,8 +991,9 @@ class PastaStore:
886
991
  libname: str,
887
992
  s: Optional[Union[list, str]] = None,
888
993
  case_sensitive: bool = True,
994
+ sort=True,
889
995
  ):
890
- """Search for names of time series or models starting with s.
996
+ """Search for names of time series or models starting with `s`.
891
997
 
892
998
  Parameters
893
999
  ----------
@@ -897,6 +1003,8 @@ class PastaStore:
897
1003
  find names with part of this string or strings in list
898
1004
  case_sensitive : bool, optional
899
1005
  whether search should be case sensitive, by default True
1006
+ sort : bool, optional
1007
+ sort list of names
900
1008
 
901
1009
  Returns
902
1010
  -------
@@ -926,7 +1034,8 @@ class PastaStore:
926
1034
  else:
927
1035
  m = np.append(m, [n for n in lib_names if sub.lower() in n.lower()])
928
1036
  matches = list(np.unique(m))
929
-
1037
+ if sort:
1038
+ matches.sort()
930
1039
  return matches
931
1040
 
932
1041
  def get_model_timeseries_names(
@@ -983,9 +1092,75 @@ class PastaStore:
983
1092
  structure.loc[mlnam, pnam] = 1
984
1093
  structure.loc[mlnam, enam] = 1
985
1094
  elif "stress" in sm:
986
- for s in sm["stress"]:
1095
+ smstress = sm["stress"]
1096
+ if isinstance(smstress, dict):
1097
+ smstress = [smstress]
1098
+ for s in smstress:
987
1099
  structure.loc[mlnam, s["name"]] = 1
988
1100
  if dropna:
989
1101
  return structure.dropna(how="all", axis=1)
990
1102
  else:
991
1103
  return structure
1104
+
1105
+ def apply(self, libname, func, names=None, progressbar=True):
1106
+ """Apply function to items in library.
1107
+
1108
+ Supported libraries are oseries, stresses, and models.
1109
+
1110
+ Parameters
1111
+ ----------
1112
+ libname : str
1113
+ library name, supports "oseries", "stresses" and "models"
1114
+ func : callable
1115
+ function that accepts items from one of the supported libraries as input
1116
+ names : str, list of str, optional
1117
+ apply function to these names, by default None which loops over all stored
1118
+ items in library
1119
+ progressbar : bool, optional
1120
+ show progressbar, by default True
1121
+
1122
+ Returns
1123
+ -------
1124
+ dict
1125
+ dict of results of func, with names as keys and results as values
1126
+ """
1127
+ names = self.conn._parse_names(names, libname)
1128
+ result = {}
1129
+ if libname not in ("oseries", "stresses", "models"):
1130
+ raise ValueError(
1131
+ "'libname' must be one of ['oseries', 'stresses', 'models']!"
1132
+ )
1133
+ getter = getattr(self.conn, f"get_{libname}")
1134
+ for n in (
1135
+ tqdm(names, desc=f"Applying {func.__name__}") if progressbar else names
1136
+ ):
1137
+ result[n] = func(getter(n))
1138
+ return result
1139
+
1140
+ def within(self, extent, names=None, libname="oseries"):
1141
+ xmin, xmax, ymin, ymax = extent
1142
+ names = self.conn._parse_names(names, libname)
1143
+ if libname == "oseries":
1144
+ df = self.oseries.loc[names]
1145
+ elif libname == "stresses":
1146
+ df = self.stresses.loc[names]
1147
+ elif libname == "models":
1148
+ onames = np.unique(
1149
+ [
1150
+ self.get_models(modelname, return_dict=True)["oseries"]["name"]
1151
+ for modelname in names
1152
+ ]
1153
+ )
1154
+ df = self.oseries.loc[onames]
1155
+ else:
1156
+ raise ValueError(
1157
+ "libname must be one of ['oseries', 'stresses', 'models']"
1158
+ f", got '{libname}'"
1159
+ )
1160
+ mask = (
1161
+ (df["x"] <= xmax)
1162
+ & (df["x"] >= xmin)
1163
+ & (df["y"] >= ymin)
1164
+ & (df["y"] <= ymax)
1165
+ )
1166
+ return df.loc[mask].index.tolist()
@@ -0,0 +1,66 @@
1
+ import matplotlib as mpl
2
+ import matplotlib.pyplot as plt
3
+ import numpy as np
4
+
5
+
6
+ def float_styler(val, norm, cmap=None):
7
+ """Style float values in DataFrame.
8
+
9
+ Parameters
10
+ ----------
11
+ val : float
12
+ value in cell
13
+ norm : matplotlib.colors.Normalize
14
+ normalizer to map values to range(0, 1)
15
+ cmap : colormap, optional
16
+ colormap to use, by default None, which uses RdYlBu
17
+
18
+ Returns
19
+ -------
20
+ str
21
+ css value pairs for styling dataframe
22
+
23
+ Usage
24
+ -----
25
+ Given some dataframe
26
+
27
+ >>> df.map(float_styler, subset=["some column"], norm=norm, cmap=cmap)
28
+
29
+ """
30
+ if cmap is None:
31
+ cmap = plt.get_cmap("RdYlBu")
32
+ bg = cmap(norm(val))
33
+ color = mpl.colors.rgb2hex(bg)
34
+ c = "White" if np.mean(bg[:3]) < 0.4 else "Black"
35
+ return f"background-color: {color}; color: {c}"
36
+
37
+
38
+ def boolean_styler(b):
39
+ """Style boolean values in DataFrame.
40
+
41
+ Parameters
42
+ ----------
43
+ b : bool
44
+ value in cell
45
+
46
+ Returns
47
+ -------
48
+ str
49
+ css value pairs for styling dataframe
50
+
51
+ Usage
52
+ -----
53
+ Given some dataframe
54
+
55
+ >>> df.map(boolean_styler, subset=["some column"])
56
+ """
57
+ if b:
58
+ return (
59
+ f"background-color: {mpl.colors.rgb2hex((231/255, 255/255, 239/255))}; "
60
+ "color: darkgreen"
61
+ )
62
+ else:
63
+ return (
64
+ f"background-color: {mpl.colors.rgb2hex((255/255, 238/255, 238/255))}; "
65
+ "color: darkred"
66
+ )
@@ -6,9 +6,9 @@ import pandas as pd
6
6
  from numpy.lib._iotools import NameValidator
7
7
  from pandas.testing import assert_series_equal
8
8
  from pastas.stats.tests import runs_test, stoffer_toloi
9
- from tqdm import tqdm
9
+ from tqdm.auto import tqdm
10
10
 
11
- from .version import PASTAS_LEQ_022
11
+ from pastastore.version import PASTAS_LEQ_022
12
12
 
13
13
 
14
14
  def _custom_warning(message, category=UserWarning, filename="", lineno=-1, *args):
@@ -335,9 +335,11 @@ def compare_models(ml1, ml2, stats=None, detailed_comparison=False):
335
335
  try:
336
336
  assert_series_equal(
337
337
  oso,
338
- ml.oseries.series_original
339
- if PASTAS_LEQ_022
340
- else ml.oseries._series_original,
338
+ (
339
+ ml.oseries.series_original
340
+ if PASTAS_LEQ_022
341
+ else ml.oseries._series_original
342
+ ),
341
343
  )
342
344
  compare_oso = True
343
345
  except (ValueError, AssertionError):
@@ -378,9 +380,9 @@ def compare_models(ml1, ml2, stats=None, detailed_comparison=False):
378
380
  for ts in stresses:
379
381
  df.loc[f"- time series: '{ts.name}'"] = ts.name
380
382
  for tsk in ts.settings.keys():
381
- df.loc[
382
- f" - {ts.name} settings: {tsk}", f"model {i}"
383
- ] = ts.settings[tsk]
383
+ df.loc[f" - {ts.name} settings: {tsk}", f"model {i}"] = (
384
+ ts.settings[tsk]
385
+ )
384
386
 
385
387
  if i == 0:
386
388
  if PASTAS_LEQ_022:
@@ -403,9 +405,11 @@ def compare_models(ml1, ml2, stats=None, detailed_comparison=False):
403
405
  try:
404
406
  assert_series_equal(
405
407
  so1[counter],
406
- ts.series_original
407
- if PASTAS_LEQ_022
408
- else ts._series_original,
408
+ (
409
+ ts.series_original
410
+ if PASTAS_LEQ_022
411
+ else ts._series_original
412
+ ),
409
413
  )
410
414
  compare_so1 = True
411
415
  except (ValueError, AssertionError):
@@ -703,7 +707,9 @@ def frontiers_checks(
703
707
  check_tmem_passed,
704
708
  )
705
709
  else:
706
- tmem = ml.get_response_tmax(sm_name)
710
+ tmem = ml.get_response_tmax(sm_name, cutoff=check3_cutoff)
711
+ if tmem is None: # no rfunc in stressmodel
712
+ tmem = 0
707
713
  check_tmem_passed = tmem < len_oseries_calib / 2
708
714
  checks.loc[f"calib_period > 2*t_mem_95%: {sm_name}", :] = (
709
715
  tmem,
@@ -736,23 +742,30 @@ def frontiers_checks(
736
742
  "(unit head)/(unit well stress)",
737
743
  check_gain_passed,
738
744
  )
745
+ continue
746
+ elif sm._name == "LinearTrend":
747
+ gain = ml.parameters.loc[f"{sm_name}_a", "optimal"]
748
+ gain_std = ml.parameters.loc[f"{sm_name}_a", "stderr"]
749
+ elif sm._name == "StepModel":
750
+ gain = ml.parameters.loc[f"{sm_name}_d", "optimal"]
751
+ gain_std = ml.parameters.loc[f"{sm_name}_d", "stderr"]
739
752
  else:
740
753
  gain = ml.parameters.loc[f"{sm_name}_A", "optimal"]
741
754
  gain_std = ml.parameters.loc[f"{sm_name}_A", "stderr"]
742
- if gain_std is None:
743
- gain_std = np.nan
744
- check_gain_passed = pd.NA
745
- elif np.isnan(gain_std):
746
- check_gain_passed = pd.NA
747
- else:
748
- check_gain_passed = np.abs(gain) > 2 * gain_std
755
+
756
+ if gain_std is None:
757
+ gain_std = np.nan
758
+ check_gain_passed = pd.NA
759
+ elif np.isnan(gain_std):
760
+ check_gain_passed = pd.NA
761
+ else:
749
762
  check_gain_passed = np.abs(gain) > 2 * gain_std
750
- checks.loc[f"gain > 2*std: {sm_name}", :] = (
751
- gain,
752
- 2 * gain_std,
753
- "(unit head)/(unit well stress)",
754
- check_gain_passed,
755
- )
763
+ checks.loc[f"gain > 2*std: {sm_name}", :] = (
764
+ gain,
765
+ 2 * gain_std,
766
+ "(unit head)/(unit well stress)",
767
+ check_gain_passed,
768
+ )
756
769
 
757
770
  # Check 5 - Parameter Bounds
758
771
  if check5_parambounds:
@@ -4,4 +4,4 @@ from packaging.version import parse as parse_version
4
4
  PASTAS_VERSION = parse_version(ps.__version__)
5
5
  PASTAS_LEQ_022 = PASTAS_VERSION <= parse_version("0.22.0")
6
6
 
7
- __version__ = "1.2.2"
7
+ __version__ = "1.4.0"
@@ -2,14 +2,14 @@ import datetime
2
2
  import logging
3
3
  import os
4
4
  from copy import deepcopy
5
- from typing import Dict, List, Optional, Union
5
+ from typing import Any, Dict, List, Optional, Union
6
6
 
7
7
  import numpy as np
8
8
  import pandas as pd
9
9
  import pastas as ps
10
10
  import yaml
11
11
 
12
- from .version import PASTAS_LEQ_022
12
+ from pastastore.version import PASTAS_LEQ_022
13
13
 
14
14
  ps.logger.setLevel("ERROR")
15
15
 
@@ -17,7 +17,7 @@ logging.basicConfig(level="INFO")
17
17
  logger = logging.getLogger(__name__)
18
18
 
19
19
 
20
- def _convert_dict_dtypes_for_yaml(d: Dict):
20
+ def _convert_dict_dtypes_for_yaml(d: Dict[str, Any]):
21
21
  """Internal method to convert dictionary values for storing in YAML format.
22
22
 
23
23
  Parameters
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pastastore
3
- Version: 1.2.2
3
+ Version: 1.4.0
4
4
  Summary: Tools for managing Pastas time series models.
5
5
  Author: D.A. Brakenhoff
6
6
  Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
@@ -49,15 +49,47 @@ Classifier: Programming Language :: Python :: 3 :: Only
49
49
  Classifier: Topic :: Scientific/Engineering :: Hydrology
50
50
  Requires-Python: >=3.7
51
51
  Description-Content-Type: text/markdown
52
+ License-File: LICENSE
53
+ Requires-Dist: pastas>=0.13
54
+ Requires-Dist: tqdm>=4.36
55
+ Requires-Dist: pyyaml
52
56
  Provides-Extra: full
57
+ Requires-Dist: pastastore[arcticdb,optional]; extra == "full"
53
58
  Provides-Extra: lint
59
+ Requires-Dist: black; extra == "lint"
60
+ Requires-Dist: flake8; extra == "lint"
61
+ Requires-Dist: isort; extra == "lint"
54
62
  Provides-Extra: optional
63
+ Requires-Dist: contextily; extra == "optional"
64
+ Requires-Dist: pyproj; extra == "optional"
65
+ Requires-Dist: adjustText; extra == "optional"
55
66
  Provides-Extra: test
67
+ Requires-Dist: pastastore[arcticdb,lint,optional]; extra == "test"
68
+ Requires-Dist: hydropandas[full]; extra == "test"
69
+ Requires-Dist: coverage; extra == "test"
70
+ Requires-Dist: codecov; extra == "test"
71
+ Requires-Dist: pytest; extra == "test"
72
+ Requires-Dist: pytest-cov; extra == "test"
73
+ Requires-Dist: pytest-dependency; extra == "test"
74
+ Requires-Dist: pytest-benchmark; extra == "test"
75
+ Requires-Dist: codacy-coverage; extra == "test"
76
+ Requires-Dist: lxml; extra == "test"
56
77
  Provides-Extra: pystore
78
+ Requires-Dist: fsspec>=0.3.3; extra == "pystore"
79
+ Requires-Dist: python-snappy; extra == "pystore"
80
+ Requires-Dist: dask[dataframe]; extra == "pystore"
57
81
  Provides-Extra: arctic
82
+ Requires-Dist: arctic; extra == "arctic"
58
83
  Provides-Extra: arcticdb
84
+ Requires-Dist: arcticdb; extra == "arcticdb"
85
+ Requires-Dist: protobuf~=4.0; extra == "arcticdb"
59
86
  Provides-Extra: docs
60
- License-File: LICENSE
87
+ Requires-Dist: pastastore[optional]; extra == "docs"
88
+ Requires-Dist: sphinx_rtd_theme; extra == "docs"
89
+ Requires-Dist: Ipython; extra == "docs"
90
+ Requires-Dist: ipykernel; extra == "docs"
91
+ Requires-Dist: nbsphinx; extra == "docs"
92
+ Requires-Dist: nbsphinx_link; extra == "docs"
61
93
 
62
94
  ![pastastore](https://github.com/pastas/pastastore/workflows/pastastore/badge.svg)
63
95
  [![Documentation Status](https://readthedocs.org/projects/pastastore/badge/?version=latest)](https://pastastore.readthedocs.io/en/latest/?badge=latest)
@@ -7,6 +7,7 @@ pastastore/connectors.py
7
7
  pastastore/datasets.py
8
8
  pastastore/plotting.py
9
9
  pastastore/store.py
10
+ pastastore/styling.py
10
11
  pastastore/util.py
11
12
  pastastore/version.py
12
13
  pastastore/yaml_interface.py
@@ -7,6 +7,7 @@ arctic
7
7
 
8
8
  [arcticdb]
9
9
  arcticdb
10
+ protobuf~=4.0
10
11
 
11
12
  [docs]
12
13
  pastastore[optional]
@@ -58,13 +58,16 @@ test = [
58
58
  "pytest-dependency",
59
59
  "pytest-benchmark",
60
60
  "codacy-coverage",
61
- "lxml", # temporary fix: for hydropandas 0.8.0
61
+ "lxml", # temporary fix: for hydropandas 0.8.0
62
62
  ]
63
63
  pystore = ["fsspec>=0.3.3", "python-snappy", "dask[dataframe]"]
64
64
  arctic = [
65
65
  "arctic", # will not work as releases not uploaded to PyPI
66
66
  ]
67
- arcticdb = ["arcticdb"]
67
+ arcticdb = [
68
+ "arcticdb",
69
+ "protobuf~=4.0",
70
+ ] # TODO: temporarily set protobuf to version 4
68
71
  docs = [
69
72
  "pastastore[optional]",
70
73
  "sphinx_rtd_theme",
@@ -4,4 +4,4 @@ import warnings
4
4
  def test_import():
5
5
  with warnings.catch_warnings():
6
6
  warnings.simplefilter(action="ignore", category=FutureWarning)
7
- import pastastore
7
+ import pastastore # noqa: F401
@@ -14,7 +14,7 @@ ps.set_log_level("ERROR")
14
14
 
15
15
 
16
16
  def test_get_library(conn):
17
- olib = conn._get_library("oseries")
17
+ _ = conn._get_library("oseries")
18
18
 
19
19
 
20
20
  def test_add_get_series(request, conn):
@@ -206,13 +206,13 @@ def test_add_stress(conn):
206
206
  @pytest.mark.dependency()
207
207
  def test_get_oseries(request, conn):
208
208
  depends(request, [f"test_add_oseries[{conn.type}]"])
209
- o = conn.get_oseries("oseries1")
209
+ _ = conn.get_oseries("oseries1")
210
210
 
211
211
 
212
212
  @pytest.mark.dependency()
213
213
  def test_get_oseries_and_metadata(request, conn):
214
214
  depends(request, [f"test_add_oseries[{conn.type}]"])
215
- o, m = conn.get_oseries("oseries1", return_metadata=True)
215
+ _ = conn.get_oseries("oseries1", return_metadata=True)
216
216
 
217
217
 
218
218
  @pytest.mark.dependency()
@@ -225,7 +225,7 @@ def test_get_stress(request, conn):
225
225
  @pytest.mark.dependency()
226
226
  def test_get_stress_and_metadata(request, conn):
227
227
  depends(request, [f"test_add_stress[{conn.type}]"])
228
- s, m = conn.get_stresses("prec", return_metadata=True)
228
+ s, _ = conn.get_stresses("prec", return_metadata=True)
229
229
  s.name = "prec"
230
230
 
231
231
 
@@ -1,5 +1,4 @@
1
1
  import os
2
- import warnings
3
2
 
4
3
  import numpy as np
5
4
  import pandas as pd
@@ -9,9 +8,7 @@ from numpy import allclose
9
8
  from packaging.version import parse
10
9
  from pytest_dependency import depends
11
10
 
12
- with warnings.catch_warnings():
13
- warnings.simplefilter(action="ignore", category=FutureWarning)
14
- import pastastore as pst
11
+ import pastastore as pst
15
12
 
16
13
 
17
14
  @pytest.mark.dependency()
@@ -39,7 +36,7 @@ def test_search(pstore):
39
36
 
40
37
  @pytest.mark.dependency()
41
38
  def test_create_model(pstore):
42
- ml = pstore.create_model("oseries1")
39
+ _ = pstore.create_model("oseries1")
43
40
 
44
41
 
45
42
  @pytest.mark.dependency()
@@ -139,7 +136,7 @@ def test_get_model(request, pstore):
139
136
  f"test_store_model_missing_series[{pstore.type}]",
140
137
  ],
141
138
  )
142
- ml = pstore.conn.get_models("oseries1")
139
+ _ = pstore.conn.get_models("oseries1")
143
140
 
144
141
 
145
142
  @pytest.mark.dependency()
@@ -158,7 +155,7 @@ def test_del_model(request, pstore):
158
155
 
159
156
  @pytest.mark.dependency()
160
157
  def test_create_models(pstore):
161
- mls = pstore.create_models_bulk(
158
+ _ = pstore.create_models_bulk(
162
159
  ["oseries1", "oseries2"], store=True, progressbar=False
163
160
  )
164
161
  _ = pstore.conn.models
@@ -172,6 +169,13 @@ def test_get_parameters(request, pstore):
172
169
  assert p.isna().sum().sum() == 0
173
170
 
174
171
 
172
+ @pytest.mark.dependency()
173
+ def test_get_signatures(request, pstore):
174
+ depends(request, [f"test_create_models[{pstore.type}]"])
175
+ s = pstore.get_signatures(progressbar=False)
176
+ assert s.shape[1] == len(ps.stats.signatures.__all__)
177
+
178
+
175
179
  @pytest.mark.dependency()
176
180
  def test_iter_models(request, pstore):
177
181
  depends(request, [f"test_create_models[{pstore.type}]"])
@@ -181,13 +185,24 @@ def test_iter_models(request, pstore):
181
185
  @pytest.mark.dependency()
182
186
  def test_solve_models_and_get_stats(request, pstore):
183
187
  depends(request, [f"test_create_models[{pstore.type}]"])
184
- mls = pstore.solve_models(
188
+ _ = pstore.solve_models(
185
189
  ignore_solve_errors=False, progressbar=False, store_result=True
186
190
  )
187
191
  stats = pstore.get_statistics(["evp", "aic"], progressbar=False)
188
192
  assert stats.index.size == 2
189
193
 
190
194
 
195
+ @pytest.mark.dependency()
196
+ def test_apply(request, pstore):
197
+ depends(request, [f"test_solve_models_and_get_stats[{pstore.type}]"])
198
+
199
+ def func(ml):
200
+ return ml.parameters.loc["recharge_A", "optimal"]
201
+
202
+ result = pstore.apply("models", func)
203
+ assert len(result) == 2
204
+
205
+
191
206
  @pytest.mark.dependency()
192
207
  def test_save_and_load_model(request, pstore):
193
208
  ml = pstore.create_model("oseries2")
@@ -6,7 +6,6 @@ import pytest
6
6
  from pytest_dependency import depends
7
7
 
8
8
  import pastastore as pst
9
- from pastastore.version import PASTAS_LEQ_022
10
9
 
11
10
 
12
11
  @contextmanager
@@ -9,7 +9,7 @@ import pastastore as pst
9
9
 
10
10
  # data
11
11
  data = np.random.random_sample(int(1e5))
12
- s = pd.Series(index=pd.date_range("1970", periods=1e5, freq="H"), data=data)
12
+ s = pd.Series(index=pd.date_range("1970", periods=int(1e5), freq="h"), data=data)
13
13
  metadata = {"x": 100000.0, "y": 300000.0}
14
14
 
15
15
 
@@ -1,10 +0,0 @@
1
- from . import connectors, util
2
- from .connectors import (
3
- ArcticConnector,
4
- ArcticDBConnector,
5
- DictConnector,
6
- PasConnector,
7
- PystoreConnector,
8
- )
9
- from .store import PastaStore
10
- from .version import __version__
File without changes
File without changes
File without changes