pastastore 1.7.0__py3-none-any.whl → 1.7.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pastastore/version.py CHANGED
@@ -9,7 +9,7 @@ PASTAS_VERSION = parse_version(ps.__version__)
9
9
  PASTAS_LEQ_022 = PASTAS_VERSION <= parse_version("0.22.0")
10
10
  PASTAS_GEQ_150 = PASTAS_VERSION >= parse_version("1.5.0")
11
11
 
12
- __version__ = "1.7.0"
12
+ __version__ = "1.7.2"
13
13
 
14
14
 
15
15
  def show_versions(optional=False) -> None:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pastastore
3
- Version: 1.7.0
3
+ Version: 1.7.2
4
4
  Summary: Tools for managing Pastas time series models.
5
5
  Author: D.A. Brakenhoff
6
6
  Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
@@ -0,0 +1,28 @@
1
+ docs/conf.py,sha256=XcZUTmn9fGDhhu8k3mpaLu435SpIRNpABADCCTJJuag,6291
2
+ pastastore/__init__.py,sha256=cWwG9-YeiI4aOU0CDBGKbQgmKmmkcPd64YwPq2rRGt0,416
3
+ pastastore/base.py,sha256=gngnJOL4b4TNgPysRsxdyeg726ZNV2anQviNr5nnrqs,69101
4
+ pastastore/connectors.py,sha256=YK3I_Jb2uNwzBQvN2VwZvmTRfPeUETW-4ddcFSWkHVw,16820
5
+ pastastore/datasets.py,sha256=FHVfmKqb8beEs9NONsWrCoJY37BmlvFLSEQ1VAFmE8A,6415
6
+ pastastore/plotting.py,sha256=t6gEeHVGzrwvM6q1l8V3OkklpU75O2Y4h6nKEHRWdjo,46416
7
+ pastastore/store.py,sha256=xbv1prv6QqYj8M-2c77CT0ZQejjmNSldpuqu_M4WxoU,60906
8
+ pastastore/styling.py,sha256=4xAY0FmhKrvmAGIuoMM7Uucww_X4KAxTpEoHlsxMldc,2280
9
+ pastastore/util.py,sha256=iXHoGHfK6VDbUpufNsnzdV71oBVp-koZUD4VJj6MOwo,28250
10
+ pastastore/version.py,sha256=CNSMFFOsy1MPe400qfRtqb8M-LE8iUmm7u0QSZZUOfE,1205
11
+ pastastore/yaml_interface.py,sha256=MddELxWe8_aqJRMUydOCbjoU1-ZodzxFKYnAaqJ5SqA,29947
12
+ pastastore/extensions/__init__.py,sha256=lCN9xfX1qefUzUbE2FQ12c6NjLbf5HoNo-D8cGb5CTw,461
13
+ pastastore/extensions/accessor.py,sha256=kftQM6dqMDoySbyTKcvmkjC5gJRp465KA18G4NVXUO0,367
14
+ pastastore/extensions/hpd.py,sha256=w8_13Y_1bzgxCT8RJ2L6D_eTpYGpniSbxA4CoUw_CgI,27464
15
+ tests/conftest.py,sha256=u097z7LGAnviuzXPzvER9oPjsZWqdij1CJLnW_sPY8E,5258
16
+ tests/test_001_import.py,sha256=g8AaJzWZ088A4B30_w-MrDfAVeeg8m78l--j7Onsklc,208
17
+ tests/test_002_connectors.py,sha256=k9etSRuSFVOrSEtZyxqsCF9GwIg0T7VdDJ2SjSe6i_s,7742
18
+ tests/test_003_pastastore.py,sha256=uVGqM0RwYqnt2dxlj3i3YO7lG0LAfeg8gSG2QLm4lTI,9320
19
+ tests/test_004_yaml.py,sha256=3hMNjb9s0S2rbmpyEjW6FDRAxfUZS_U1qoPl4wB-cCo,4440
20
+ tests/test_005_maps_plots.py,sha256=L0ppGf-cudsrdxteWy3qsV4We96DW4bCBE7c6jEm6aM,1866
21
+ tests/test_006_benchmark.py,sha256=yuExF35qqxhw04uYMH3OIOlGr71c4AJSJDMjGD8GefY,4983
22
+ tests/test_007_hpdextension.py,sha256=ZYrJ16hNE2Cy6gucMW_0-NoCmlimsPQ5oNA5H67HGBg,2937
23
+ tests/test_008_stressmodels.py,sha256=733fyCvuzjKcaLjvSMt5dTTLp-T4alzNJAToSxTIUug,4003
24
+ pastastore-1.7.2.dist-info/LICENSE,sha256=DtHftfUEm99KzgwLr3rQUTg8H3kAS0Z-p5WWJgLf_OY,1082
25
+ pastastore-1.7.2.dist-info/METADATA,sha256=j_lnxfuDY6aLfSjm71PVA79hUCn18AuIxGycHUhXAcc,8021
26
+ pastastore-1.7.2.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
27
+ pastastore-1.7.2.dist-info/top_level.txt,sha256=1bgyMk1p23f04RK83Jju2_YAQBwyoQD_fInxoPB4YRw,22
28
+ pastastore-1.7.2.dist-info/RECORD,,
@@ -1 +1,3 @@
1
+ docs
1
2
  pastastore
3
+ tests
tests/conftest.py ADDED
@@ -0,0 +1,169 @@
1
+ # ruff: noqa: D100 D103
2
+ import importlib
3
+ from importlib import metadata
4
+ from platform import python_version
5
+
6
+ import pandas as pd
7
+ import pastas as ps
8
+ import pytest
9
+ from packaging.version import parse as parse_version
10
+
11
+ import pastastore as pst
12
+
13
+ IS_PY312 = parse_version(python_version()) >= parse_version("3.12.0")
14
+
15
+ params = ["dict", "pas", "arcticdb"] if not IS_PY312 else ["dict", "pas"]
16
+
17
+
18
+ def initialize_project(conn):
19
+ pstore = pst.PastaStore(conn, "test_project")
20
+
21
+ # oseries 1
22
+ o = pd.read_csv("./tests/data/obs.csv", index_col=0, parse_dates=True)
23
+ pstore.add_oseries(o, "oseries1", metadata={"x": 165000, "y": 424000})
24
+
25
+ # oseries 2
26
+ o = pd.read_csv("./tests/data/head_nb1.csv", index_col=0, parse_dates=True)
27
+ pstore.add_oseries(o, "oseries2", metadata={"x": 164000, "y": 423000})
28
+
29
+ # oseries 3
30
+ o = pd.read_csv("./tests/data/gw_obs.csv", index_col=0, parse_dates=True)
31
+ pstore.add_oseries(o, "oseries3", metadata={"x": 165554, "y": 422685})
32
+
33
+ # prec 1
34
+ s = pd.read_csv("./tests/data/rain.csv", index_col=0, parse_dates=True)
35
+ pstore.add_stress(s, "prec1", kind="prec", metadata={"x": 165050, "y": 424050})
36
+
37
+ # prec 2
38
+ s = pd.read_csv("./tests/data/rain_nb1.csv", index_col=0, parse_dates=True)
39
+ pstore.add_stress(s, "prec2", kind="prec", metadata={"x": 164010, "y": 423000})
40
+
41
+ # evap 1
42
+ s = pd.read_csv("./tests/data/evap.csv", index_col=0, parse_dates=True)
43
+ pstore.add_stress(s, "evap1", kind="evap", metadata={"x": 164500, "y": 424000})
44
+
45
+ # evap 2
46
+ s = pd.read_csv("./tests/data/evap_nb1.csv", index_col=0, parse_dates=True)
47
+ pstore.add_stress(s, "evap2", kind="evap", metadata={"x": 164000, "y": 423030})
48
+
49
+ # well 1
50
+ s = pd.read_csv("./tests/data/well_month_end.csv", index_col=0, parse_dates=True)
51
+ try:
52
+ s = ps.ts.timestep_weighted_resample(
53
+ s,
54
+ pd.date_range(s.index[0] - pd.offsets.MonthBegin(), s.index[-1], freq="D"),
55
+ ).bfill()
56
+ except AttributeError:
57
+ # pastas<=0.22.0
58
+ pass
59
+ pstore.add_stress(s, "well1", kind="well", metadata={"x": 164691, "y": 423579})
60
+ # add second well
61
+ pstore.add_stress(
62
+ s + 10, "well2", kind="well", metadata={"x": 164691 + 200, "y": 423579_200}
63
+ )
64
+
65
+ return pstore
66
+
67
+
68
+ @pytest.fixture(scope="module", params=params)
69
+ def conn(request):
70
+ """Fixture that yields connection object."""
71
+ name = f"test_{request.param}"
72
+ # connect to dbase
73
+ if request.param == "arcticdb":
74
+ uri = "lmdb://./arctic_db/"
75
+ conn = pst.ArcticDBConnector(name, uri)
76
+ elif request.param == "dict":
77
+ conn = pst.DictConnector(name)
78
+ elif request.param == "pas":
79
+ conn = pst.PasConnector(name, "./tests/data")
80
+ else:
81
+ raise ValueError("Unrecognized parameter!")
82
+ conn.type = request.param # added here for defining test dependencies
83
+ return conn
84
+
85
+
86
+ @pytest.fixture(scope="module", params=params)
87
+ def pstore(request):
88
+ if request.param == "arcticdb":
89
+ name = "test_project"
90
+ uri = "lmdb://./arctic_db/"
91
+ connector = pst.ArcticDBConnector(name, uri)
92
+ elif request.param == "dict":
93
+ name = "test_project"
94
+ connector = pst.DictConnector(name)
95
+ elif request.param == "pas":
96
+ name = "test_project"
97
+ connector = pst.PasConnector(name, "./tests/data/pas")
98
+ else:
99
+ raise ValueError("Unrecognized parameter!")
100
+ pstore = initialize_project(connector)
101
+ pstore.type = request.param # added here for defining test dependencies
102
+ yield pstore
103
+ pst.util.delete_pastastore(pstore)
104
+
105
+
106
+ def delete_arcticdb_test_db():
107
+ connstr = "lmdb://./arctic_db/"
108
+ name = "test_project"
109
+ connector = pst.ArcticDBConnector(name, connstr)
110
+ pst.util.delete_arcticdb_connector(connector)
111
+ print("ArcticDBConnector 'test_project' deleted.")
112
+
113
+
114
+ _has_pkg_cache = {}
115
+
116
+
117
+ def has_pkg(pkg: str, strict: bool = True) -> bool:
118
+ """
119
+ Determine if the given Python package is installed.
120
+
121
+ Parameters
122
+ ----------
123
+ pkg : str
124
+ Name of the package to check.
125
+ strict : bool
126
+ If False, only check if package metadata is available.
127
+ If True, try to import the package (all dependencies must be present).
128
+
129
+ Returns
130
+ -------
131
+ bool
132
+ True if the package is installed, otherwise False.
133
+
134
+ Notes
135
+ -----
136
+ Originally written by Mike Toews (mwtoews@gmail.com) for FloPy.
137
+ """
138
+
139
+ def try_import():
140
+ try: # import name, e.g. "import shapefile"
141
+ importlib.import_module(pkg)
142
+ return True
143
+ except ModuleNotFoundError:
144
+ return False
145
+
146
+ def try_metadata() -> bool:
147
+ try: # package name, e.g. pyshp
148
+ metadata.distribution(pkg)
149
+ return True
150
+ except metadata.PackageNotFoundError:
151
+ return False
152
+
153
+ found = False
154
+ if not strict:
155
+ found = pkg in _has_pkg_cache or try_metadata()
156
+ if not found:
157
+ found = try_import()
158
+ _has_pkg_cache[pkg] = found
159
+
160
+ return _has_pkg_cache[pkg]
161
+
162
+
163
+ def requires_pkg(*pkgs):
164
+ missing = {pkg for pkg in pkgs if not has_pkg(pkg, strict=True)}
165
+ return pytest.mark.skipif(
166
+ missing,
167
+ reason=f"missing package{'s' if len(missing) != 1 else ''}: "
168
+ + ", ".join(missing),
169
+ )
@@ -0,0 +1,8 @@
1
+ # ruff: noqa: D100 D103
2
+ import warnings
3
+
4
+
5
+ def test_import():
6
+ with warnings.catch_warnings():
7
+ warnings.simplefilter(action="ignore", category=FutureWarning)
8
+ import pastastore # noqa: F401
@@ -0,0 +1,277 @@
1
+ # ruff: noqa: D100 D103
2
+ import warnings
3
+
4
+ import numpy as np
5
+ import pandas as pd
6
+ import pastas as ps
7
+ import pytest
8
+ from pytest_dependency import depends
9
+
10
+ with warnings.catch_warnings():
11
+ warnings.simplefilter(action="ignore", category=FutureWarning)
12
+ import pastastore as pst
13
+
14
+ ps.set_log_level("ERROR")
15
+
16
+
17
+ def test_get_library(conn):
18
+ _ = conn._get_library("oseries")
19
+
20
+
21
+ def test_add_get_series(request, conn):
22
+ o1 = pd.Series(
23
+ index=pd.date_range("2000", periods=10, freq="D"),
24
+ data=0.0,
25
+ )
26
+ o1.name = "test_series"
27
+ conn.add_oseries(o1, "test_series", metadata=None)
28
+ o2 = conn.get_oseries("test_series")
29
+ try:
30
+ assert isinstance(o2, pd.Series)
31
+ assert o1.equals(o2)
32
+ assert o1.dtype == o2.dtype
33
+ finally:
34
+ conn.del_oseries("test_series")
35
+
36
+
37
+ def test_add_get_series_wnans(request, conn):
38
+ o1 = pd.Series(
39
+ index=pd.date_range("2000", periods=10, freq="D"),
40
+ data=1.0,
41
+ dtype=np.float64,
42
+ )
43
+ o1.iloc[-3:] = np.nan
44
+ o1.name = "test_series_nans"
45
+ conn.add_oseries(o1, "test_series_nans", metadata=None)
46
+ o2 = conn.get_oseries("test_series_nans")
47
+ try:
48
+ assert isinstance(o2, pd.Series)
49
+ assert o1.equals(o2)
50
+ finally:
51
+ conn.del_oseries("test_series_nans")
52
+
53
+
54
+ def test_add_get_dataframe(request, conn):
55
+ o1 = pd.DataFrame(
56
+ data=1.0,
57
+ columns=["test_df"],
58
+ index=pd.date_range("2000", periods=10, freq="D"),
59
+ )
60
+ o1.index.name = "test_idx"
61
+ conn.add_oseries(o1, "test_df", metadata=None)
62
+ o2 = conn.get_oseries("test_df")
63
+ # little hack as PasConnector does preserve DataFrames after load...
64
+ if conn.conn_type == "pas":
65
+ o2 = o2.to_frame()
66
+ try:
67
+ assert isinstance(o2, pd.DataFrame)
68
+ assert o1.equals(o2)
69
+ finally:
70
+ conn.del_oseries("test_df")
71
+
72
+
73
+ def test_add_pastas_timeseries(request, conn):
74
+ o1 = pd.DataFrame(
75
+ data=1.0,
76
+ columns=["test_df"],
77
+ index=pd.date_range("2000", periods=10, freq="D"),
78
+ )
79
+ o1.index.name = "test_idx"
80
+ ts = ps.timeseries.TimeSeries(o1, metadata={"x": 100000.0, "y": 400000.0})
81
+ try:
82
+ conn.add_oseries(ts, "test_pastas_ts", metadata=None)
83
+ except DeprecationWarning:
84
+ pass
85
+
86
+
87
+ def test_update_series(request, conn):
88
+ o1 = pd.DataFrame(
89
+ data=1.0,
90
+ columns=["test_df"],
91
+ index=pd.date_range("2000", periods=10, freq="D"),
92
+ )
93
+ o1.index.name = "test_idx"
94
+ conn.add_oseries(o1, "test_df", metadata={"x": 100000.0})
95
+ o2 = pd.DataFrame(
96
+ data=2.0,
97
+ columns=["test_df"],
98
+ index=pd.date_range("2000-01-10", periods=2, freq="D"),
99
+ )
100
+ o2.index.name = "test_idx"
101
+ conn.update_oseries(o2, "test_df", metadata={"x": 200000.0, "y": 400000})
102
+ o3 = conn.get_oseries("test_df")
103
+ try:
104
+ assert (o3.iloc[-2:] == 2.0).all().all()
105
+ assert o3.index.size == 11
106
+ finally:
107
+ conn.del_oseries("test_df")
108
+
109
+
110
+ def test_upsert_oseries(request, conn):
111
+ o1 = pd.DataFrame(
112
+ data=1.0,
113
+ columns=["test_df"],
114
+ index=pd.date_range("2000", periods=10, freq="D"),
115
+ )
116
+ o1.index.name = "test_idx"
117
+ conn.upsert_oseries(o1, "test_df", metadata={"x": 100000.0})
118
+ o2 = pd.DataFrame(
119
+ data=2.0,
120
+ columns=["test_df"],
121
+ index=pd.date_range("2000-01-05", periods=10, freq="D"),
122
+ )
123
+ o2.index.name = "test_idx"
124
+ conn.upsert_oseries(o2, "test_df", metadata={"x": 200000.0, "y": 400000})
125
+ o3 = conn.get_oseries("test_df")
126
+ try:
127
+ assert (o3.iloc[-10:] == 2.0).all().all()
128
+ assert o3.index.size == 14
129
+ finally:
130
+ conn.del_oseries("test_df")
131
+
132
+
133
+ def test_upsert_stress(request, conn):
134
+ s1 = pd.DataFrame(
135
+ data=1.0,
136
+ columns=["test_df"],
137
+ index=pd.date_range("2000", periods=10, freq="D"),
138
+ )
139
+ s1.index.name = "test_idx"
140
+ conn.upsert_stress(s1, "test_df", kind="useless", metadata={"x": 100000.0})
141
+ s2 = pd.DataFrame(
142
+ data=2.0,
143
+ columns=["test_df"],
144
+ index=pd.date_range("2000-01-05", periods=10, freq="D"),
145
+ )
146
+ s2.index.name = "test_idx"
147
+ conn.upsert_stress(
148
+ s2,
149
+ "test_df",
150
+ kind="not useless",
151
+ metadata={"x": 200000.0, "y": 400000},
152
+ )
153
+ s3 = conn.get_stresses("test_df")
154
+ try:
155
+ assert (s3.iloc[-10:] == 2.0).all().all()
156
+ assert s3.index.size == 14
157
+ assert conn.stresses.loc["test_df", "kind"] == "not useless"
158
+ finally:
159
+ conn.del_stress("test_df")
160
+
161
+
162
+ def test_update_metadata(request, conn):
163
+ o1 = pd.DataFrame(
164
+ data=1.1,
165
+ columns=["test_df"],
166
+ index=pd.date_range("2000", periods=10, freq="D"),
167
+ dtype=float,
168
+ )
169
+ o1.index.name = "test_idx"
170
+ conn.add_oseries(o1, "test_df", metadata={"x": 100000.0})
171
+ conn.update_metadata("oseries", "test_df", {"x": 200000.0, "y": 400000.0})
172
+ m = conn._get_metadata("oseries", "test_df")
173
+ try:
174
+ assert isinstance(m, dict)
175
+ assert m["x"] == 200000.0
176
+ assert m["y"] == 400000.0
177
+ finally:
178
+ conn.del_oseries("test_df")
179
+
180
+
181
+ @pytest.mark.dependency
182
+ def test_add_oseries(conn):
183
+ o = pd.read_csv("./tests/data/obs.csv", index_col=0, parse_dates=True)
184
+ conn.add_oseries(
185
+ o,
186
+ "oseries1",
187
+ metadata={"name": "oseries1", "x": 100000, "y": 400000},
188
+ overwrite=True,
189
+ )
190
+
191
+
192
+ @pytest.mark.dependency
193
+ def test_add_stress(conn):
194
+ s = pd.read_csv("./tests/data/rain.csv", index_col=0, parse_dates=True)
195
+ conn.add_stress(
196
+ s,
197
+ "prec",
198
+ kind="prec",
199
+ metadata={"kind": "prec", "x": 100001, "y": 400001},
200
+ )
201
+
202
+
203
+ @pytest.mark.dependency
204
+ def test_get_oseries(request, conn):
205
+ depends(request, [f"test_add_oseries[{conn.type}]"])
206
+ _ = conn.get_oseries("oseries1")
207
+
208
+
209
+ @pytest.mark.dependency
210
+ def test_get_oseries_and_metadata(request, conn):
211
+ depends(request, [f"test_add_oseries[{conn.type}]"])
212
+ _ = conn.get_oseries("oseries1", return_metadata=True)
213
+
214
+
215
+ @pytest.mark.dependency
216
+ def test_get_stress(request, conn):
217
+ depends(request, [f"test_add_stress[{conn.type}]"])
218
+ s = conn.get_stresses("prec")
219
+ s.name = "prec"
220
+
221
+
222
+ @pytest.mark.dependency
223
+ def test_get_stress_and_metadata(request, conn):
224
+ depends(request, [f"test_add_stress[{conn.type}]"])
225
+ s, _ = conn.get_stresses("prec", return_metadata=True)
226
+ s.name = "prec"
227
+
228
+
229
+ @pytest.mark.dependency
230
+ def test_oseries_prop(request, conn):
231
+ depends(request, [f"test_add_oseries[{conn.type}]"])
232
+ _ = conn.oseries
233
+
234
+
235
+ @pytest.mark.dependency
236
+ def test_stresses_prop(request, conn):
237
+ depends(request, [f"test_add_stress[{conn.type}]"])
238
+ _ = conn.stresses
239
+
240
+
241
+ def test_repr(conn):
242
+ conn.__repr__()
243
+
244
+
245
+ @pytest.mark.dependency
246
+ def test_del_oseries(request, conn):
247
+ depends(request, [f"test_add_oseries[{conn.type}]"])
248
+ conn.del_oseries("oseries1")
249
+
250
+
251
+ @pytest.mark.dependency
252
+ def test_del_stress(request, conn):
253
+ depends(request, [f"test_add_stress[{conn.type}]"])
254
+ conn.del_stress("prec")
255
+
256
+
257
+ @pytest.mark.dependency
258
+ def test_empty_library(request, conn):
259
+ s1 = pd.Series(
260
+ index=pd.date_range("2000", periods=10, freq="D"),
261
+ data=1.0,
262
+ dtype=np.float64,
263
+ )
264
+ s1.name = "test_series"
265
+ conn.add_oseries(s1, "test_series", metadata=None)
266
+ conn.empty_library("oseries", prompt=False, progressbar=False)
267
+
268
+
269
+ @pytest.mark.dependency
270
+ def test_delete(request, conn):
271
+ # no need to delete dictconnector (in memory)
272
+ if conn.conn_type == "arcticdb":
273
+ pst.util.delete_arcticdb_connector(conn, libraries=["oseries"])
274
+ pst.util.delete_arcticdb_connector(conn)
275
+ elif conn.conn_type == "pas":
276
+ pst.util.delete_pas_connector(conn, libraries=["oseries"])
277
+ pst.util.delete_pas_connector(conn)