pastastore 1.10.2__py3-none-any.whl → 1.12.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -73,7 +73,7 @@ def test_add_get_dataframe(request, conn):
73
73
  o1.index.name = "test_idx"
74
74
  conn.add_oseries(o1, "test_df", metadata=None)
75
75
  o2 = conn.get_oseries("test_df")
76
- # little hack as PasConnector does preserve DataFrames after load...
76
+ # PasConnector does not preserve DataFrames after load, so convert if needed.
77
77
  if conn.conn_type == "pas":
78
78
  o2 = o2.to_frame()
79
79
  try:
@@ -93,10 +93,29 @@ def test_add_pastas_timeseries(request, conn):
93
93
  ts = ps.timeseries.TimeSeries(o1, metadata={"x": 100000.0, "y": 400000.0})
94
94
  try:
95
95
  conn.add_oseries(ts, "test_pastas_ts", metadata=None)
96
- except DeprecationWarning:
96
+ except TypeError:
97
97
  pass
98
98
 
99
99
 
100
+ def test_add_series_illegal_filename(request, conn):
101
+ o1 = pd.Series(
102
+ index=pd.date_range("2000", periods=10, freq="D"),
103
+ data=0.0,
104
+ )
105
+ o1.name = r"test\series/illegal_chars"
106
+ conn.add_oseries(o1, o1.name, metadata=None)
107
+ o2 = conn.get_oseries("testseriesillegal_chars")
108
+ try:
109
+ assert isinstance(o2, pd.Series)
110
+ assert o1.equals(o2)
111
+ finally:
112
+ conn.del_oseries("testseriesillegal_chars")
113
+
114
+ if conn.conn_type == "pas":
115
+ with pytest.raises(ValueError, match="cannot end with '_meta'"):
116
+ conn.add_oseries(o1, "illegal_meta", metadata=None)
117
+
118
+
100
119
  def test_update_series(request, conn):
101
120
  o1 = pd.DataFrame(
102
121
  data=1.0,
@@ -281,10 +300,28 @@ def test_empty_library(request, conn):
281
300
 
282
301
  @pytest.mark.dependency
283
302
  def test_delete(request, conn):
284
- # no need to delete dictconnector (in memory)
303
+ # No need to delete dictconnector (in memory)
285
304
  if conn.conn_type == "arcticdb":
286
305
  pst.util.delete_arcticdb_connector(conn, libraries=["oseries"])
287
306
  pst.util.delete_arcticdb_connector(conn)
288
307
  elif conn.conn_type == "pas":
289
308
  pst.util.delete_pas_connector(conn, libraries=["oseries"])
290
309
  pst.util.delete_pas_connector(conn)
310
+
311
+
312
+ def test_new_connector_in_occupied_dir():
313
+ conn1 = pst.PasConnector("my_db", "./tests/data/pas")
314
+ with pytest.raises(
315
+ ValueError, match=f"Directory '{conn1.name}/' in use by another connector type!"
316
+ ):
317
+ pst.ArcticDBConnector("my_db", "lmdb://./tests/data/pas")
318
+
319
+ pst.util.delete_pas_connector(conn1)
320
+
321
+ conn1 = pst.ArcticDBConnector("my_db", "lmdb://./tests/data/arcticdb")
322
+ with pytest.raises(
323
+ ValueError, match=f"Directory '{conn1.name}/' in use by another connector type!"
324
+ ):
325
+ pst.PasConnector("my_db", "./tests/data/arcticdb")
326
+
327
+ pst.util.delete_arcticdb_connector(conn1)
@@ -1,5 +1,5 @@
1
1
  # ruff: noqa: D100 D103
2
- import os
2
+ from pathlib import Path
3
3
 
4
4
  import numpy as np
5
5
  import pandas as pd
@@ -10,6 +10,7 @@ from packaging.version import parse
10
10
  from pytest_dependency import depends
11
11
 
12
12
  import pastastore as pst
13
+ from pastastore.util import SeriesUsedByModel
13
14
 
14
15
 
15
16
  @pytest.mark.dependency
@@ -30,7 +31,7 @@ def test_get_tmintmax(pstore):
30
31
  assert sttt.at["evap2", "tmax"] == pd.Timestamp("2016-11-22")
31
32
  ml = pstore.create_model("oseries1")
32
33
  ml.solve(report=False)
33
- pstore.conn.add_model(ml)
34
+ pstore.add_model(ml)
34
35
  mltt = pstore.get_tmin_tmax("models")
35
36
  assert mltt.at["oseries1", "tmax"] == pd.Timestamp("2015-06-28")
36
37
  pstore.del_model("oseries1")
@@ -38,9 +39,11 @@ def test_get_tmintmax(pstore):
38
39
 
39
40
  @pytest.mark.dependency
40
41
  def test_search(pstore):
41
- results = pstore.search("oseries", "OSER", case_sensitive=False)
42
+ results = pstore.search("OSER", libname="oseries", case_sensitive=False)
42
43
  assert len(results) == 3
43
44
  assert len(set(results) - {"oseries1", "oseries2", "oseries3"}) == 0
45
+ results = pstore.search("oser", libname=None, case_sensitive=True)
46
+ assert len(results["oseries"]) == 3
44
47
 
45
48
 
46
49
  @pytest.mark.dependency
@@ -58,6 +61,8 @@ def test_properties(pstore):
58
61
  _ = pstore.oseries
59
62
  _ = pstore.stresses
60
63
  _ = pstore.models
64
+ _ = pstore.oseries_models
65
+ _ = pstore.stresses_models
61
66
 
62
67
  try:
63
68
  assert pstore.n_oseries == pstore.conn.n_oseries
@@ -71,19 +76,43 @@ def test_properties(pstore):
71
76
  def test_store_model(request, pstore):
72
77
  depends(request, [f"test_create_model[{pstore.type}]"])
73
78
  ml = pstore.create_model("oseries1")
74
- pstore.conn.add_model(ml)
79
+ pstore.add_model(ml)
80
+
81
+
82
+ @pytest.mark.dependency
83
+ def test_del_oseries_used_by_model(request, pstore):
84
+ depends(request, [f"test_store_model[{pstore.type}]"])
85
+ oseries, ometa = pstore.get_oseries("oseries1", return_metadata=True)
86
+ with pytest.raises(SeriesUsedByModel):
87
+ pstore.del_oseries("oseries1")
88
+ pstore.del_oseries("oseries1", force=True)
89
+ pstore.add_oseries(oseries, "oseries1", metadata=ometa)
90
+ pstore.validator.set_protect_series_in_models(False)
91
+ pstore.del_oseries("oseries1")
92
+ pstore.add_oseries(oseries, "oseries1", metadata=ometa)
93
+ pstore.validator.set_protect_series_in_models(True)
94
+
95
+
96
+ @pytest.mark.dependency
97
+ def test_del_stress_used_by_model(request, pstore):
98
+ depends(request, [f"test_store_model[{pstore.type}]"])
99
+ stress, smeta = pstore.get_stress("prec1", return_metadata=True)
100
+ with pytest.raises(SeriesUsedByModel):
101
+ pstore.del_stress("prec1")
102
+ pstore.del_stress("prec1", force=True)
103
+ pstore.add_stress(stress, "prec1", kind="prec", metadata=smeta)
104
+ pstore.validator.set_protect_series_in_models(False)
105
+ pstore.del_stress("prec1")
106
+ pstore.add_stress(stress, "prec1", kind="prec", metadata=smeta)
107
+ pstore.validator.set_protect_series_in_models(True)
75
108
 
76
109
 
77
110
  @pytest.mark.dependency
78
111
  def test_model_accessor(request, pstore):
79
112
  depends(request, [f"test_store_model[{pstore.type}]"])
80
- # repr
81
113
  pstore.models.__repr__()
82
- # getter
83
114
  ml = pstore.models["oseries1"]
84
- # setter
85
115
  pstore.models["oseries1_2"] = ml
86
- # iter
87
116
  mnames = [ml.name for ml in pstore.models]
88
117
  try:
89
118
  assert len(mnames) == 2
@@ -96,19 +125,15 @@ def test_model_accessor(request, pstore):
96
125
  @pytest.mark.dependency
97
126
  def test_oseries_model_accessor(request, pstore):
98
127
  depends(request, [f"test_store_model[{pstore.type}]"])
99
- # repr
100
128
  pstore.oseries_models.__repr__()
101
- # get model names
102
129
  ml = pstore.models["oseries1"]
103
130
  ml_list1 = pstore.oseries_models["oseries1"]
104
131
  assert len(ml_list1) == 1
105
132
 
106
- # add model
107
133
  pstore.models["oseries1_2"] = ml
108
134
  ml_list2 = pstore.oseries_models["oseries1"]
109
135
  assert len(ml_list2) == 2
110
136
 
111
- # delete model
112
137
  pstore.del_models("oseries1_2")
113
138
  ml_list3 = pstore.oseries_models["oseries1"]
114
139
  assert len(ml_list3) == 1
@@ -145,7 +170,7 @@ def test_get_model(request, pstore):
145
170
  f"test_store_model_missing_series[{pstore.type}]",
146
171
  ],
147
172
  )
148
- _ = pstore.conn.get_models("oseries1")
173
+ _ = pstore.get_models("oseries1")
149
174
 
150
175
 
151
176
  @pytest.mark.dependency
@@ -159,7 +184,7 @@ def test_del_model(request, pstore):
159
184
  f"test_get_model[{pstore.type}]",
160
185
  ],
161
186
  )
162
- pstore.conn.del_models("oseries1")
187
+ pstore.del_models("oseries1")
163
188
 
164
189
 
165
190
  @pytest.mark.dependency
@@ -167,7 +192,7 @@ def test_create_models(pstore):
167
192
  _ = pstore.create_models_bulk(
168
193
  ["oseries1", "oseries2"], store=True, progressbar=False
169
194
  )
170
- _ = pstore.conn.models
195
+ _ = pstore.models
171
196
  assert pstore.n_models == 2
172
197
 
173
198
 
@@ -183,7 +208,7 @@ def test_get_parameters(request, pstore):
183
208
  def test_get_signatures(request, pstore):
184
209
  depends(request, [f"test_create_models[{pstore.type}]"])
185
210
  s = pstore.get_signatures(progressbar=False)
186
- assert s.shape[1] == len(ps.stats.signatures.__all__)
211
+ assert s.shape[0] == len(ps.stats.signatures.__all__)
187
212
 
188
213
 
189
214
  @pytest.mark.dependency
@@ -202,6 +227,13 @@ def test_solve_models_and_get_stats(request, pstore):
202
227
  assert stats.index.size == 2
203
228
 
204
229
 
230
+ @pytest.mark.dependency
231
+ def test_check_models(request, pstore):
232
+ depends(request, [f"test_solve_models_and_get_stats[{pstore.type}]"])
233
+ if parse(ps.__version__) >= parse("1.8.0"):
234
+ _ = pstore.check_models(style_output=True)
235
+
236
+
205
237
  @pytest.mark.dependency
206
238
  def test_solve_models_parallel(request, pstore):
207
239
  depends(request, [f"test_create_models[{pstore.type}]"])
@@ -232,7 +264,7 @@ def test_save_and_load_model(request, pstore):
232
264
 
233
265
 
234
266
  def test_update_ts_settings(request, pstore):
235
- pstore.set_check_model_series_values(False)
267
+ pstore.validator.set_check_model_series_values(False)
236
268
 
237
269
  o = pstore.get_oseries("oseries2")
238
270
  ml = ps.Model(o.loc[:"2013"], name="ml_oseries2")
@@ -258,14 +290,7 @@ def test_update_ts_settings(request, pstore):
258
290
  assert ml2.stressmodels["recharge"].evap.settings["tmax"] == tmax
259
291
  assert ml2.stressmodels["prec"].stress[0].settings["tmax"] == p2.index[-1]
260
292
  pstore.del_models("ml_oseries2")
261
- pstore.set_check_model_series_values(True)
262
-
263
-
264
- # @pytest.mark.dependency()
265
- # def test_model_results(request, pstore):
266
- # depends(request, [f"test_create_models[{pstore.type}]",
267
- # f"test_solve_models[{pstore.type}]"])
268
- # pstore.model_results(["oseries1", "oseries2"], progressbar=False)
293
+ pstore.validator.set_check_model_series_values(True)
269
294
 
270
295
 
271
296
  def test_oseries_distances(pstore):
@@ -291,15 +316,17 @@ def test_to_from_zip(pstore):
291
316
  store = pst.PastaStore.from_zip(zipname, conn)
292
317
  assert not store.oseries.empty
293
318
  finally:
294
- os.remove(zipname)
319
+ Path(zipname).unlink()
295
320
 
296
321
 
297
322
  def test_load_pastastore_from_config_file(pstore):
298
323
  if pstore.type == "pas" or pstore.type == "arcticdb":
299
324
  path = (
300
- pstore.conn.path if pstore.type == "pas" else pstore.conn.uri.split("//")[1]
325
+ pstore.conn.path
326
+ if pstore.type == "pas"
327
+ else Path(pstore.conn.uri.split("://")[-1]) / pstore.conn.name
301
328
  )
302
- fname = os.path.join(path, f"{pstore.conn.name}.pastastore")
329
+ fname = path / f"{pstore.conn.name}.pastastore"
303
330
  pstore2 = pst.PastaStore.from_pastastore_config_file(fname)
304
331
  assert not pstore2.empty
305
332
 
@@ -331,8 +358,12 @@ def test_meta_with_name(pstore):
331
358
 
332
359
  @pytest.mark.dependency
333
360
  def test_models_metadata(request, pstore):
334
- # depends(request, [f"test_create_models[{pstore.type}]"])
335
361
  pstore.create_models_bulk(["oseries1", "oseries2"], store=True, progressbar=False)
336
362
  df = pstore.models.metadata
337
363
  assert df.index.size == 2
338
364
  assert (df["n_stressmodels"] == 1).all()
365
+
366
+
367
+ def test_pstore_validator_settings(pstore):
368
+ _ = pstore.validator.settings
369
+ _ = pstore.conn.validation_settings
@@ -63,6 +63,18 @@ def test_map_models(request, pstore):
63
63
  plt.close(ax.figure)
64
64
 
65
65
 
66
+ @pytest.mark.dependency
67
+ def test_map_signatures(request, pstore):
68
+ ax = pstore.maps.signature("mean_annual_maximum")
69
+ plt.close(ax.figure)
70
+
71
+
72
+ @pytest.mark.dependency
73
+ def test_map_modelparam(request, pstore):
74
+ ax = pstore.maps.modelparam("recharge_A")
75
+ plt.close(ax.figure)
76
+
77
+
66
78
  @pytest.mark.dependency
67
79
  def test_map_model(request, pstore):
68
80
  depends(request, [f"test_map_models[{pstore.type}]"])
@@ -128,7 +128,7 @@ def test_benchmark_write_model_arcticdb(benchmark):
128
128
 
129
129
 
130
130
  def write_model_nocheckts(conn, ml):
131
- conn.set_check_model_series_values(False)
131
+ conn.validator.set_check_model_series_values(False)
132
132
  conn.add_model(ml, overwrite=True)
133
133
 
134
134
 
@@ -1,10 +1,31 @@
1
1
  # ruff: noqa: D100 D103
2
2
  import pytest
3
3
  from pandas import Timestamp
4
+ from pandas.testing import assert_series_equal
5
+ from pastas.timeseries_utils import timestep_weighted_resample
4
6
 
5
7
  import pastastore as pst
6
8
 
7
9
 
10
+ def create_test_zip():
11
+ from pastastore.extensions import activate_hydropandas_extension
12
+
13
+ activate_hydropandas_extension()
14
+ pstore = pst.PastaStore()
15
+ pstore.hpd.download_bro_gmw(
16
+ extent=(117_850, 118_180, 439_550, 439_900),
17
+ tmin="2022-01-01",
18
+ tmax="2022-01-02",
19
+ )
20
+ pstore.hpd.download_knmi_precipitation(
21
+ stns=[260], meteo_var="RH", tmin="2022-01-01", tmax="2022-01-31"
22
+ )
23
+ pstore.hpd.download_knmi_evaporation(
24
+ stns=[260], tmin="2022-01-01", tmax="2022-01-31"
25
+ )
26
+ pstore.to_zip("tests/data/test_hpd_update.zip", overwrite=True)
27
+
28
+
8
29
  @pytest.mark.pastas150
9
30
  def test_hpd_download_from_bro():
10
31
  from pastastore.extensions import activate_hydropandas_extension
@@ -12,12 +33,14 @@ def test_hpd_download_from_bro():
12
33
  activate_hydropandas_extension()
13
34
  pstore = pst.PastaStore()
14
35
  pstore.hpd.download_bro_gmw(
15
- extent=(117850, 118180, 439550, 439900), tmin="2022-01-01", tmax="2022-01-02"
36
+ extent=(117_850, 118_180, 439_550, 439_900),
37
+ tmin="2022-01-01",
38
+ tmax="2022-01-02",
16
39
  )
17
40
  assert pstore.n_oseries == 3
18
41
 
19
42
 
20
- @pytest.mark.xfail(reason="KNMI is being flaky, so allow this test to xfail/xpass.")
43
+ # @pytest.mark.xfail(reason="KNMI is being flaky, so allow this test to xfail/xpass.")
21
44
  @pytest.mark.pastas150
22
45
  def test_hpd_download_precipitation_from_knmi():
23
46
  from pastastore.extensions import activate_hydropandas_extension
@@ -30,7 +53,7 @@ def test_hpd_download_precipitation_from_knmi():
30
53
  assert pstore.n_stresses == 1
31
54
 
32
55
 
33
- @pytest.mark.xfail(reason="KNMI is being flaky, so allow this test to xfail/xpass.")
56
+ # @pytest.mark.xfail(reason="KNMI is being flaky, so allow this test to xfail/xpass.")
34
57
  @pytest.mark.pastas150
35
58
  def test_hpd_download_evaporation_from_knmi():
36
59
  from pastastore.extensions import activate_hydropandas_extension
@@ -56,9 +79,11 @@ def test_update_oseries():
56
79
  assert tmintmax.loc["GMW000000036327_1", "tmax"] >= Timestamp("2022-02-27")
57
80
 
58
81
 
59
- @pytest.mark.xfail(reason="KNMI is being flaky, so allow this test to xfail/xpass.")
82
+ # @pytest.mark.xfail(reason="KNMI is being flaky, so allow this test to xfail/xpass.")
60
83
  @pytest.mark.pastas150
61
84
  def test_update_stresses():
85
+ import hydropandas as hpd
86
+
62
87
  from pastastore.extensions import activate_hydropandas_extension
63
88
 
64
89
  activate_hydropandas_extension()
@@ -66,10 +91,23 @@ def test_update_stresses():
66
91
  pstore = pst.PastaStore.from_zip("tests/data/test_hpd_update.zip")
67
92
  pstore.hpd.update_knmi_meteo(tmax="2022-02-28", normalize_datetime_index=True)
68
93
  tmintmax = pstore.get_tmin_tmax("stresses")
69
- assert (tmintmax["tmax"] >= Timestamp("2024-02-27")).all()
94
+ assert (tmintmax["tmax"] >= Timestamp("2022-02-27")).all()
95
+
96
+ # check if result is equal to hydropandas result after resampling
97
+ oc = hpd.read_knmi(
98
+ stns=[260], meteo_vars=["RH", "EV24"], starts="2022-01-01", ends="2022-02-28"
99
+ )
100
+ for i in range(2):
101
+ o = oc.obs.iloc[i].squeeze("columns") * 1e3
102
+ resampled_result = (timestep_weighted_resample(o, o.index.normalize())).dropna()
103
+ assert_series_equal(
104
+ pstore.get_stress(pstore.stresses_names[i]).squeeze(),
105
+ resampled_result,
106
+ check_names=False,
107
+ )
70
108
 
71
109
 
72
- @pytest.mark.xfail(reason="KNMI is being flaky, so allow this test to xfail/xpass.")
110
+ # @pytest.mark.xfail(reason="KNMI is being flaky, so allow this test to xfail/xpass.")
73
111
  @pytest.mark.pastas150
74
112
  def test_nearest_stresses():
75
113
  from pastastore.extensions import activate_hydropandas_extension
@@ -80,8 +118,8 @@ def test_nearest_stresses():
80
118
  pstore.hpd.download_nearest_knmi_precipitation(
81
119
  "GMW000000036319_1", tmin="2024-01-01", tmax="2024-01-31"
82
120
  )
83
- assert "RD_GROOT-AMMERS" in pstore.stresses_names
121
+ assert "RD_GROOT-AMMERS_434" in pstore.stresses_names
84
122
  pstore.hpd.download_nearest_knmi_evaporation(
85
123
  "GMW000000036319_1", tmin="2024-01-01", tmax="2024-01-31"
86
124
  )
87
- assert "EV24_CABAUW-MAST" in pstore.stresses_names
125
+ assert "EV24_CABAUW-MAST_348" in pstore.stresses_names