pastastore 1.7.1__py3-none-any.whl → 1.7.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
docs/conf.py ADDED
@@ -0,0 +1,215 @@
1
+ # ruff: noqa: D100
2
+ # -*- coding: utf-8 -*-
3
+ #
4
+ # Configuration file for the Sphinx documentation builder.
5
+ #
6
+ # This file does only contain a selection of the most common options. For a
7
+ # full list see the documentation:
8
+ # http://www.sphinx-doc.org/en/master/config
9
+
10
+ # -- Path setup --------------------------------------------------------------
11
+
12
+ # If extensions (or modules to document with autodoc) are in another directory,
13
+ # add these directories to sys.path here. If the directory is relative to the
14
+ # documentation root, use os.path.abspath to make it absolute, like shown here.
15
+ #
16
+ import os
17
+ import sys
18
+
19
+ sys.path.insert(0, os.path.abspath("."))
20
+ from pastastore import __version__ # noqa: E402
21
+
22
+ # -- Project information -----------------------------------------------------
23
+
24
+ project = "pastastore"
25
+ copyright = "2020, D.A. Brakenhoff"
26
+ author = "D.A. Brakenhoff"
27
+
28
+ # The short X.Y version
29
+ version = __version__
30
+ # The full version, including alpha/beta/rc tags
31
+ release = __version__
32
+
33
+
34
+ # -- General configuration ---------------------------------------------------
35
+
36
+ # If your documentation needs a minimal Sphinx version, state it here.
37
+ #
38
+ # needs_sphinx = '1.0'
39
+
40
+ # Add any Sphinx extension module names here, as strings. They can be
41
+ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
42
+ # ones.
43
+ extensions = [
44
+ "sphinx.ext.autodoc",
45
+ "sphinx.ext.autosummary",
46
+ "sphinx.ext.napoleon",
47
+ "sphinx.ext.doctest",
48
+ "sphinx.ext.intersphinx",
49
+ "sphinx.ext.todo",
50
+ "sphinx.ext.coverage",
51
+ "sphinx.ext.mathjax",
52
+ "sphinx.ext.ifconfig",
53
+ "sphinx.ext.viewcode",
54
+ "IPython.sphinxext.ipython_console_highlighting", # lowercase didn't work
55
+ "sphinx.ext.autosectionlabel",
56
+ "nbsphinx",
57
+ "nbsphinx_link",
58
+ ]
59
+
60
+ # Add any paths that contain templates here, relative to this directory.
61
+ templates_path = ["_templates"]
62
+
63
+ # The suffix(es) of source filenames.
64
+ # You can specify multiple suffix as a list of string:
65
+ #
66
+ # source_suffix = ['.rst', '.md']
67
+ source_suffix = ".rst"
68
+
69
+ # The master toctree document.
70
+ master_doc = "index"
71
+
72
+ # The language for content autogenerated by Sphinx. Refer to documentation
73
+ # for a list of supported languages.
74
+ #
75
+ # This is also used if you do content translation via gettext catalogs.
76
+ # Usually you set "language" from the command line for these cases.
77
+ language = "en"
78
+
79
+ # List of patterns, relative to source directory, that match files and
80
+ # directories to ignore when looking for source files.
81
+ # This pattern also affects html_static_path and html_extra_path.
82
+ exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
83
+
84
+ # The name of the Pygments (syntax highlighting) style to use.
85
+ pygments_style = None
86
+
87
+
88
+ # -- Options for HTML output -------------------------------------------------
89
+
90
+ # The theme to use for HTML and HTML Help pages. See the documentation for
91
+ # a list of builtin themes.
92
+ #
93
+ html_theme = "sphinx_rtd_theme"
94
+
95
+ # Theme options are theme-specific and customize the look and feel of a theme
96
+ # further. For a list of options available for each theme, see the
97
+ # documentation.
98
+ #
99
+ html_theme_options = {
100
+ "display_version": True,
101
+ "prev_next_buttons_location": "bottom",
102
+ # 'style_external_links': False,
103
+ # 'vcs_pageview_mode': '',
104
+ # 'style_nav_header_background': 'white',
105
+ # Toc options
106
+ "collapse_navigation": False,
107
+ "sticky_navigation": False,
108
+ "navigation_depth": 4,
109
+ "includehidden": True,
110
+ "titles_only": False,
111
+ "github_url": "https://github.com/pastas/pastastore",
112
+ }
113
+
114
+ # Add any paths that contain custom static files (such as style sheets) here,
115
+ # relative to this directory. They are copied after the builtin static files,
116
+ # so a file named "default.css" will overwrite the builtin "default.css".
117
+ html_static_path = ["_static"]
118
+
119
+ # Custom sidebar templates, must be a dictionary that maps document names
120
+ # to template names.
121
+ #
122
+ # The default sidebars (for documents that don't match any pattern) are
123
+ # defined by theme itself. Builtin themes are using these templates by
124
+ # default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
125
+ # 'searchbox.html']``.
126
+ #
127
+ # html_sidebars = {}
128
+
129
+ # -- Options for HTMLHelp output ---------------------------------------------
130
+
131
+ # Output file base name for HTML help builder.
132
+ htmlhelp_basename = "pastastoredoc"
133
+
134
+
135
+ # -- Options for LaTeX output ------------------------------------------------
136
+
137
+ latex_elements = {
138
+ # The paper size ('letterpaper' or 'a4paper').
139
+ #
140
+ # 'papersize': 'letterpaper',
141
+ # The font size ('10pt', '11pt' or '12pt').
142
+ #
143
+ # 'pointsize': '10pt',
144
+ # Additional stuff for the LaTeX preamble.
145
+ #
146
+ "preamble": r"""\makeatletter
147
+ \def\UTFviii@defined#1{%
148
+ \ifx#1\relax
149
+ -%
150
+ \else\expandafter
151
+ #1%
152
+ \fi
153
+ }
154
+
155
+ \makeatother""",
156
+ # Latex figure (float) alignment
157
+ #
158
+ # 'figure_align': 'htbp',
159
+ }
160
+
161
+ # Grouping the document tree into LaTeX files. List of tuples
162
+ # (source start file, target name, title,
163
+ # author, documentclass [howto, manual, or own class]).
164
+ latex_documents = [
165
+ (
166
+ master_doc,
167
+ "pastastore.tex",
168
+ "pastastore Documentation",
169
+ "D.A. Brakenhoff",
170
+ "manual",
171
+ ),
172
+ ]
173
+
174
+
175
+ # -- Options for manual page output ------------------------------------------
176
+
177
+ # One entry per manual page. List of tuples
178
+ # (source start file, name, description, authors, manual section).
179
+ man_pages = [(master_doc, "pastastore", "pastastore Documentation", [author], 1)]
180
+
181
+
182
+ # -- Options for Texinfo output ----------------------------------------------
183
+
184
+ # Grouping the document tree into Texinfo files. List of tuples
185
+ # (source start file, target name, title, author,
186
+ # dir menu entry, description, category)
187
+ texinfo_documents = [
188
+ (
189
+ master_doc,
190
+ "pastastore",
191
+ "pastastore Documentation",
192
+ author,
193
+ "pastastore",
194
+ "Tools for managing time series and Pastas models",
195
+ "Miscellaneous",
196
+ ),
197
+ ]
198
+
199
+
200
+ # -- Options for Epub output -------------------------------------------------
201
+
202
+ # Bibliographic Dublin Core info.
203
+ epub_title = project
204
+
205
+ # The unique identifier of the text. This can be a ISBN number
206
+ # or the project homepage.
207
+ #
208
+ # epub_identifier = ''
209
+
210
+ # A unique identification for the text.
211
+ #
212
+ # epub_uid = ''
213
+
214
+ # A list of files that should not be packed into the epub file.
215
+ epub_exclude_files = ["search.html"]
pastastore/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # ruff: noqa: F401 D104
2
- from pastastore import connectors, extensions, styling, util
2
+ from pastastore import connectors, styling, util
3
3
  from pastastore.connectors import (
4
4
  ArcticDBConnector,
5
5
  DictConnector,
@@ -7,3 +7,8 @@ from pastastore.connectors import (
7
7
  )
8
8
  from pastastore.store import PastaStore
9
9
  from pastastore.version import __version__, show_versions
10
+
11
+ try:
12
+ from pastastore import extensions
13
+ except ModuleNotFoundError:
14
+ print("Could not import extensions module. Update pastas to >=1.3.0!")
pastastore/base.py CHANGED
@@ -15,6 +15,7 @@ import pandas as pd
15
15
  import pastas as ps
16
16
  from numpy import isin
17
17
  from packaging.version import parse as parse_version
18
+ from pandas.testing import assert_series_equal
18
19
  from pastas.io.pas import PastasEncoder
19
20
  from tqdm.auto import tqdm
20
21
 
@@ -30,7 +31,7 @@ class BaseConnector(ABC):
30
31
 
31
32
  Class holds base logic for dealing with time series and Pastas Models. Create your
32
33
  own Connector to a data source by writing a a class that inherits from this
33
- BaseConnector. Your class has to override each abstractmethod and abstractproperty.
34
+ BaseConnector. Your class has to override each abstractmethod and property.
34
35
  """
35
36
 
36
37
  _default_library_names = [
@@ -47,6 +48,10 @@ class BaseConnector(ABC):
47
48
  # True for pastas>=0.23.0 and False for pastas<=0.22.0
48
49
  USE_PASTAS_VALIDATE_SERIES = False if PASTAS_LEQ_022 else True
49
50
 
51
+ # set series equality comparison settings (using assert_series_equal)
52
+ SERIES_EQUALITY_ABSOLUTE_TOLERANCE = 1e-10
53
+ SERIES_EQUALITY_RELATIVE_TOLERANCE = 0.0
54
+
50
55
  def __repr__(self):
51
56
  """Representation string of the object."""
52
57
  return (
@@ -670,22 +675,27 @@ class BaseConnector(ABC):
670
675
  metadata["kind"] = kind
671
676
  self._upsert_series("stresses", series, name, metadata=metadata)
672
677
 
673
- def del_models(self, names: Union[list, str]) -> None:
678
+ def del_models(self, names: Union[list, str], verbose: bool = True) -> None:
674
679
  """Delete model(s) from the database.
675
680
 
676
681
  Parameters
677
682
  ----------
678
683
  names : str or list of str
679
684
  name(s) of the model to delete
685
+ verbose : bool, optional
686
+ print information about deleted models, by default True
680
687
  """
681
- for n in self._parse_names(names, libname="models"):
688
+ names = self._parse_names(names, libname="models")
689
+ for n in names:
682
690
  mldict = self.get_models(n, return_dict=True)
683
691
  oname = mldict["oseries"]["name"]
684
692
  self._del_item("models", n)
685
693
  self._del_oseries_model_link(oname, n)
686
694
  self._clear_cache("_modelnames_cache")
695
+ if verbose:
696
+ print(f"Deleted {len(names)} model(s) from database.")
687
697
 
688
- def del_model(self, names: Union[list, str]) -> None:
698
+ def del_model(self, names: Union[list, str], verbose: bool = True) -> None:
689
699
  """Delete model(s) from the database.
690
700
 
691
701
  Alias for del_models().
@@ -694,10 +704,14 @@ class BaseConnector(ABC):
694
704
  ----------
695
705
  names : str or list of str
696
706
  name(s) of the model to delete
707
+ verbose : bool, optional
708
+ print information about deleted models, by default True
697
709
  """
698
- self.del_models(names=names)
710
+ self.del_models(names=names, verbose=verbose)
699
711
 
700
- def del_oseries(self, names: Union[list, str], remove_models: bool = False):
712
+ def del_oseries(
713
+ self, names: Union[list, str], remove_models: bool = False, verbose: bool = True
714
+ ):
701
715
  """Delete oseries from the database.
702
716
 
703
717
  Parameters
@@ -706,29 +720,38 @@ class BaseConnector(ABC):
706
720
  name(s) of the oseries to delete
707
721
  remove_models : bool, optional
708
722
  also delete models for deleted oseries, default is False
723
+ verbose : bool, optional
724
+ print information about deleted oseries, by default True
709
725
  """
710
726
  names = self._parse_names(names, libname="oseries")
711
727
  for n in names:
712
728
  self._del_item("oseries", n)
713
729
  self._clear_cache("oseries")
730
+ if verbose:
731
+ print(f"Deleted {len(names)} oseries from database.")
714
732
  # remove associated models from database
715
733
  if remove_models:
716
734
  modelnames = list(
717
735
  chain.from_iterable([self.oseries_models.get(n, []) for n in names])
718
736
  )
719
- self.del_models(modelnames)
737
+ self.del_models(modelnames, verbose=verbose)
720
738
 
721
- def del_stress(self, names: Union[list, str]):
739
+ def del_stress(self, names: Union[list, str], verbose: bool = True):
722
740
  """Delete stress from the database.
723
741
 
724
742
  Parameters
725
743
  ----------
726
744
  names : str or list of str
727
745
  name(s) of the stress to delete
746
+ verbose : bool, optional
747
+ print information about deleted stresses, by default True
728
748
  """
729
- for n in self._parse_names(names, libname="stresses"):
749
+ names = self._parse_names(names, libname="stresses")
750
+ for n in names:
730
751
  self._del_item("stresses", n)
731
752
  self._clear_cache("stresses")
753
+ if verbose:
754
+ print(f"Deleted {len(names)} stress(es) from database.")
732
755
 
733
756
  def _get_series(
734
757
  self,
@@ -1665,11 +1688,18 @@ class ConnectorUtil:
1665
1688
  so = ml.oseries.series_original
1666
1689
  else:
1667
1690
  so = ml.oseries._series_original
1668
- if not so.dropna().equals(s_org):
1691
+ try:
1692
+ assert_series_equal(
1693
+ so.dropna(),
1694
+ s_org,
1695
+ atol=self.SERIES_EQUALITY_ABSOLUTE_TOLERANCE,
1696
+ rtol=self.SERIES_EQUALITY_RELATIVE_TOLERANCE,
1697
+ )
1698
+ except AssertionError as e:
1669
1699
  raise ValueError(
1670
1700
  f"Cannot add model because model oseries '{name}'"
1671
- " is different from stored oseries!"
1672
- )
1701
+ " is different from stored oseries! See stacktrace for differences."
1702
+ ) from e
1673
1703
 
1674
1704
  def _check_stresses_in_store(self, ml: Union[ps.Model, dict]):
1675
1705
  """Check if stresses time series are contained in PastaStore (internal method).
@@ -1699,11 +1729,19 @@ class ConnectorUtil:
1699
1729
  so = s.series_original
1700
1730
  else:
1701
1731
  so = s._series_original
1702
- if not so.equals(s_org):
1732
+ try:
1733
+ assert_series_equal(
1734
+ so,
1735
+ s_org,
1736
+ atol=self.SERIES_EQUALITY_ABSOLUTE_TOLERANCE,
1737
+ rtol=self.SERIES_EQUALITY_RELATIVE_TOLERANCE,
1738
+ )
1739
+ except AssertionError as e:
1703
1740
  raise ValueError(
1704
1741
  f"Cannot add model because model stress "
1705
- f"'{s.name}' is different from stored stress!"
1706
- )
1742
+ f"'{s.name}' is different from stored stress! "
1743
+ "See stacktrace for differences."
1744
+ ) from e
1707
1745
  elif isinstance(ml, dict):
1708
1746
  for sm in ml["stressmodels"].values():
1709
1747
  classkey = "stressmodel" if PASTAS_LEQ_022 else "class"
@@ -164,10 +164,10 @@ class HydroPandasExtension:
164
164
  metadata.pop("name", None)
165
165
  metadata.pop("meta", None)
166
166
  unit = metadata.get("unit", None)
167
- if unit == "m" and unit_multiplier == 1e3:
167
+ if unit == "m" and np.allclose(unit_multiplier, 1e-3):
168
168
  metadata["unit"] = "mm"
169
169
  elif unit_multiplier != 1.0:
170
- metadata["unit"] = f"{unit_multiplier:e}*{unit}"
170
+ metadata["unit"] = f"{unit_multiplier:.1e}*{unit}"
171
171
 
172
172
  source = metadata.get("source", "")
173
173
  if len(source) > 0:
@@ -199,6 +199,60 @@ class HydroPandasExtension:
199
199
  else:
200
200
  raise ValueError("libname must be 'oseries' or 'stresses'.")
201
201
 
202
+ def _get_tmin_tmax(self, tmin, tmax, oseries=None):
203
+ """Get tmin and tmax from store if not specified.
204
+
205
+ Parameters
206
+ ----------
207
+ tmin : TimeType
208
+ start time
209
+ tmax : TimeType
210
+ end time
211
+ oseries : str, optional
212
+ name of the observation series to get tmin/tmax for, by default None
213
+
214
+ Returns
215
+ -------
216
+ tmin, tmax : TimeType, TimeType
217
+ tmin and tmax
218
+ """
219
+ # get tmin/tmax if not specified
220
+ if tmin is None or tmax is None:
221
+ tmintmax = self._store.get_tmin_tmax(
222
+ "oseries", names=[oseries] if oseries else None
223
+ )
224
+ if tmin is None:
225
+ tmin = tmintmax.loc[:, "tmin"].min() - Timedelta(days=10 * 365)
226
+ if tmax is None:
227
+ tmax = tmintmax.loc[:, "tmax"].max()
228
+ return tmin, tmax
229
+
230
+ @staticmethod
231
+ def _normalize_datetime_index(obs):
232
+ """Normalize observation datetime index (i.e. set observation time to midnight).
233
+
234
+ Parameters
235
+ ----------
236
+ obs : pandas.Series
237
+ observation series to normalize
238
+
239
+ Returns
240
+ -------
241
+ hpd.Obs
242
+ observation series with normalized datetime index
243
+ """
244
+ if isinstance(obs, hpd.Obs):
245
+ metadata = {k: getattr(obs, k) for k in obs._metadata}
246
+ else:
247
+ metadata = {}
248
+ return obs.__class__(
249
+ timestep_weighted_resample(
250
+ obs,
251
+ obs.index.normalize(),
252
+ ).rename(obs.name),
253
+ **metadata,
254
+ )
255
+
202
256
  def download_knmi_precipitation(
203
257
  self,
204
258
  stns: Optional[list[int]] = None,
@@ -303,7 +357,7 @@ class HydroPandasExtension:
303
357
  variable to download, by default "RH", valid options are
304
358
  e.g. ["RD", "RH", "EV24", "T", "Q"].
305
359
  kind : str
306
- kind identifier for observations, usually "prec" or "evap".
360
+ kind identifier for observations in pastastore, usually "prec" or "evap".
307
361
  stns : list of int/str, optional
308
362
  list of station numbers to download data for, by default None
309
363
  tmin : TimeType, optional
@@ -320,12 +374,7 @@ class HydroPandasExtension:
320
374
  if True, normalize the datetime so stress value at midnight represents
321
375
  the daily total, by default True.
322
376
  """
323
- # get tmin/tmax if not specified
324
- tmintmax = self._store.get_tmin_tmax("oseries")
325
- if tmin is None:
326
- tmin = tmintmax.loc[:, "tmin"].min() - Timedelta(days=10 * 365)
327
- if tmax is None:
328
- tmax = tmintmax.loc[:, "tmax"].max()
377
+ tmin, tmax = self._get_tmin_tmax(tmin, tmax)
329
378
 
330
379
  if stns is None:
331
380
  locations = self._store.oseries.loc[:, ["x", "y"]]
@@ -354,6 +403,155 @@ class HydroPandasExtension:
354
403
  normalize_datetime_index=normalize_datetime_index,
355
404
  )
356
405
 
406
+ def download_nearest_knmi_precipitation(
407
+ self,
408
+ oseries: str,
409
+ meteo_var: str = "RD",
410
+ tmin: Optional[TimeType] = None,
411
+ tmax: Optional[TimeType] = None,
412
+ unit_multiplier: float = 1e-3,
413
+ normalize_datetime_index: bool = True,
414
+ fill_missing_obs: bool = True,
415
+ **kwargs,
416
+ ):
417
+ """Download precipitation time series data from nearest KNMI station.
418
+
419
+ Parameters
420
+ ----------
421
+ oseries : str
422
+ download nearest precipitation information for this observation well
423
+ meteo_var : str, optional
424
+ variable to download, by default "RD", valid options are ["RD", "RH"].
425
+ tmin : TimeType
426
+ start time
427
+ tmax : TimeType
428
+ end time
429
+ unit_multiplier : float, optional
430
+ multiply unit by this value before saving it in the store,
431
+ by default 1.0 (no conversion)
432
+ fill_missing_obs : bool, optional
433
+ if True, fill missing observations by getting observations from nearest
434
+ station with data.
435
+ fill_missing_obs : bool, optional
436
+ if True, fill missing observations by getting observations from nearest
437
+ station with data.
438
+ """
439
+ self.download_nearest_knmi_meteo(
440
+ oseries=oseries,
441
+ meteo_var=meteo_var,
442
+ kind="prec",
443
+ tmin=tmin,
444
+ tmax=tmax,
445
+ unit_multiplier=unit_multiplier,
446
+ normalize_datetime_index=normalize_datetime_index,
447
+ fill_missing_obs=fill_missing_obs,
448
+ **kwargs,
449
+ )
450
+
451
+ def download_nearest_knmi_evaporation(
452
+ self,
453
+ oseries: str,
454
+ meteo_var: str = "EV24",
455
+ tmin: Optional[TimeType] = None,
456
+ tmax: Optional[TimeType] = None,
457
+ unit_multiplier: float = 1e-3,
458
+ normalize_datetime_index: bool = True,
459
+ fill_missing_obs: bool = True,
460
+ **kwargs,
461
+ ):
462
+ """Download evaporation time series data from nearest KNMI station.
463
+
464
+ Parameters
465
+ ----------
466
+ oseries : str
467
+ download nearest evaporation information for this observation well
468
+ meteo_var : str, optional
469
+ variable to download, by default "EV24", valid options are:
470
+ ["EV24", "penman", "hargreaves", "makkink"].
471
+ tmin : TimeType
472
+ start time
473
+ tmax : TimeType
474
+ end time
475
+ unit_multiplier : float, optional
476
+ multiply unit by this value before saving it in the store,
477
+ by default 1.0 (no conversion)
478
+ fill_missing_obs : bool, optional
479
+ if True, fill missing observations by getting observations from nearest
480
+ station with data.
481
+ fill_missing_obs : bool, optional
482
+ if True, fill missing observations by getting observations from nearest
483
+ station with data.
484
+ """
485
+ self.download_nearest_knmi_meteo(
486
+ oseries=oseries,
487
+ meteo_var=meteo_var,
488
+ kind="evap",
489
+ tmin=tmin,
490
+ tmax=tmax,
491
+ unit_multiplier=unit_multiplier,
492
+ normalize_datetime_index=normalize_datetime_index,
493
+ fill_missing_obs=fill_missing_obs,
494
+ **kwargs,
495
+ )
496
+
497
+ def download_nearest_knmi_meteo(
498
+ self,
499
+ oseries: str,
500
+ meteo_var: str,
501
+ kind: str,
502
+ tmin: Optional[TimeType] = None,
503
+ tmax: Optional[TimeType] = None,
504
+ unit_multiplier: float = 1.0,
505
+ normalize_datetime_index: bool = True,
506
+ fill_missing_obs: bool = True,
507
+ **kwargs,
508
+ ):
509
+ """Download meteorological data from nearest KNMI station.
510
+
511
+ Parameters
512
+ ----------
513
+ oseries : str
514
+ download nearest meteorological information for this observation well
515
+ meteo_var : str
516
+ meteorological variable to download, e.g. "RD", "RH", "EV24", "T", "Q"
517
+ kind : str
518
+ kind identifier for observations in pastastore, usually "prec" or "evap".
519
+ tmin : TimeType
520
+ start time
521
+ tmax : TimeType
522
+ end time
523
+ unit_multiplier : float, optional
524
+ multiply unit by this value before saving it in the store,
525
+ by default 1.0 (no conversion)
526
+ fill_missing_obs : bool, optional
527
+ if True, fill missing observations by getting observations from nearest
528
+ station with data.
529
+ fill_missing_obs : bool, optional
530
+ if True, fill missing observations by getting observations from nearest
531
+ station with data.
532
+ """
533
+ xy = self._store.oseries.loc[[oseries], ["x", "y"]].to_numpy()
534
+ # download data
535
+ tmin, tmax = self._get_tmin_tmax(tmin, tmax, oseries=oseries)
536
+ knmi = hpd.read_knmi(
537
+ xy=xy,
538
+ meteo_vars=[meteo_var],
539
+ starts=tmin,
540
+ ends=tmax,
541
+ fill_missing_obs=fill_missing_obs,
542
+ **kwargs,
543
+ )
544
+ # add to store
545
+ self.add_obscollection(
546
+ libname="stresses",
547
+ oc=knmi,
548
+ kind=kind,
549
+ data_column=meteo_var,
550
+ unit_multiplier=unit_multiplier,
551
+ update=False,
552
+ normalize_datetime_index=normalize_datetime_index,
553
+ )
554
+
357
555
  def update_knmi_meteo(
358
556
  self,
359
557
  names: Optional[List[str]] = None,
@@ -386,6 +584,17 @@ class HydroPandasExtension:
386
584
  **kwargs : dict, optional
387
585
  Additional keyword arguments to pass to `hpd.read_knmi()`
388
586
  """
587
+ if "source" not in self._store.stresses.columns:
588
+ msg = (
589
+ "Cannot update KNMI stresses! "
590
+ "KNMI stresses cannot be identified if 'source' column is not defined."
591
+ )
592
+ logger.error(msg)
593
+ if raise_on_error:
594
+ raise ValueError(msg)
595
+ else:
596
+ return
597
+
389
598
  if names is None:
390
599
  names = self._store.stresses.loc[
391
600
  self._store.stresses["source"] == "KNMI"
@@ -497,32 +706,6 @@ class HydroPandasExtension:
497
706
  if raise_on_error:
498
707
  raise e
499
708
 
500
- @staticmethod
501
- def _normalize_datetime_index(obs):
502
- """Normalize observation datetime index (i.e. set observation time to midnight).
503
-
504
- Parameters
505
- ----------
506
- obs : pandas.Series
507
- observation series to normalize
508
-
509
- Returns
510
- -------
511
- hpd.Obs
512
- observation series with normalized datetime index
513
- """
514
- if isinstance(obs, hpd.Obs):
515
- metadata = {k: getattr(obs, k) for k in obs._metadata}
516
- else:
517
- metadata = {}
518
- return obs.__class__(
519
- timestep_weighted_resample(
520
- obs,
521
- obs.index.normalize(),
522
- ).rename(obs.name),
523
- **metadata,
524
- )
525
-
526
709
  def download_bro_gmw(
527
710
  self,
528
711
  extent: Optional[List[float]] = None,