pastastore 1.7.1__tar.gz → 1.7.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pastastore-1.7.1 → pastastore-1.7.2}/PKG-INFO +1 -1
- pastastore-1.7.2/docs/conf.py +215 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore/__init__.py +6 -1
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore/base.py +53 -15
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore/extensions/hpd.py +218 -35
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore/version.py +1 -1
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore.egg-info/PKG-INFO +1 -1
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore.egg-info/SOURCES.txt +2 -0
- pastastore-1.7.2/pastastore.egg-info/top_level.txt +5 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/pyproject.toml +3 -0
- pastastore-1.7.2/tests/conftest.py +169 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/tests/test_007_hpdextension.py +16 -0
- pastastore-1.7.1/pastastore.egg-info/top_level.txt +0 -1
- {pastastore-1.7.1 → pastastore-1.7.2}/LICENSE +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore/connectors.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore/datasets.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore/extensions/__init__.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore/extensions/accessor.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore/plotting.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore/store.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore/styling.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore/util.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore/yaml_interface.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore.egg-info/dependency_links.txt +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/pastastore.egg-info/requires.txt +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/readme.md +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/setup.cfg +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/tests/test_001_import.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/tests/test_002_connectors.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/tests/test_003_pastastore.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/tests/test_004_yaml.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/tests/test_005_maps_plots.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/tests/test_006_benchmark.py +0 -0
- {pastastore-1.7.1 → pastastore-1.7.2}/tests/test_008_stressmodels.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pastastore
|
|
3
|
-
Version: 1.7.
|
|
3
|
+
Version: 1.7.2
|
|
4
4
|
Summary: Tools for managing Pastas time series models.
|
|
5
5
|
Author: D.A. Brakenhoff
|
|
6
6
|
Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
|
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
# ruff: noqa: D100
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
#
|
|
4
|
+
# Configuration file for the Sphinx documentation builder.
|
|
5
|
+
#
|
|
6
|
+
# This file does only contain a selection of the most common options. For a
|
|
7
|
+
# full list see the documentation:
|
|
8
|
+
# http://www.sphinx-doc.org/en/master/config
|
|
9
|
+
|
|
10
|
+
# -- Path setup --------------------------------------------------------------
|
|
11
|
+
|
|
12
|
+
# If extensions (or modules to document with autodoc) are in another directory,
|
|
13
|
+
# add these directories to sys.path here. If the directory is relative to the
|
|
14
|
+
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
|
15
|
+
#
|
|
16
|
+
import os
|
|
17
|
+
import sys
|
|
18
|
+
|
|
19
|
+
sys.path.insert(0, os.path.abspath("."))
|
|
20
|
+
from pastastore import __version__ # noqa: E402
|
|
21
|
+
|
|
22
|
+
# -- Project information -----------------------------------------------------
|
|
23
|
+
|
|
24
|
+
project = "pastastore"
|
|
25
|
+
copyright = "2020, D.A. Brakenhoff"
|
|
26
|
+
author = "D.A. Brakenhoff"
|
|
27
|
+
|
|
28
|
+
# The short X.Y version
|
|
29
|
+
version = __version__
|
|
30
|
+
# The full version, including alpha/beta/rc tags
|
|
31
|
+
release = __version__
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
# -- General configuration ---------------------------------------------------
|
|
35
|
+
|
|
36
|
+
# If your documentation needs a minimal Sphinx version, state it here.
|
|
37
|
+
#
|
|
38
|
+
# needs_sphinx = '1.0'
|
|
39
|
+
|
|
40
|
+
# Add any Sphinx extension module names here, as strings. They can be
|
|
41
|
+
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
|
42
|
+
# ones.
|
|
43
|
+
extensions = [
|
|
44
|
+
"sphinx.ext.autodoc",
|
|
45
|
+
"sphinx.ext.autosummary",
|
|
46
|
+
"sphinx.ext.napoleon",
|
|
47
|
+
"sphinx.ext.doctest",
|
|
48
|
+
"sphinx.ext.intersphinx",
|
|
49
|
+
"sphinx.ext.todo",
|
|
50
|
+
"sphinx.ext.coverage",
|
|
51
|
+
"sphinx.ext.mathjax",
|
|
52
|
+
"sphinx.ext.ifconfig",
|
|
53
|
+
"sphinx.ext.viewcode",
|
|
54
|
+
"IPython.sphinxext.ipython_console_highlighting", # lowercase didn't work
|
|
55
|
+
"sphinx.ext.autosectionlabel",
|
|
56
|
+
"nbsphinx",
|
|
57
|
+
"nbsphinx_link",
|
|
58
|
+
]
|
|
59
|
+
|
|
60
|
+
# Add any paths that contain templates here, relative to this directory.
|
|
61
|
+
templates_path = ["_templates"]
|
|
62
|
+
|
|
63
|
+
# The suffix(es) of source filenames.
|
|
64
|
+
# You can specify multiple suffix as a list of string:
|
|
65
|
+
#
|
|
66
|
+
# source_suffix = ['.rst', '.md']
|
|
67
|
+
source_suffix = ".rst"
|
|
68
|
+
|
|
69
|
+
# The master toctree document.
|
|
70
|
+
master_doc = "index"
|
|
71
|
+
|
|
72
|
+
# The language for content autogenerated by Sphinx. Refer to documentation
|
|
73
|
+
# for a list of supported languages.
|
|
74
|
+
#
|
|
75
|
+
# This is also used if you do content translation via gettext catalogs.
|
|
76
|
+
# Usually you set "language" from the command line for these cases.
|
|
77
|
+
language = "en"
|
|
78
|
+
|
|
79
|
+
# List of patterns, relative to source directory, that match files and
|
|
80
|
+
# directories to ignore when looking for source files.
|
|
81
|
+
# This pattern also affects html_static_path and html_extra_path.
|
|
82
|
+
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
|
|
83
|
+
|
|
84
|
+
# The name of the Pygments (syntax highlighting) style to use.
|
|
85
|
+
pygments_style = None
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
# -- Options for HTML output -------------------------------------------------
|
|
89
|
+
|
|
90
|
+
# The theme to use for HTML and HTML Help pages. See the documentation for
|
|
91
|
+
# a list of builtin themes.
|
|
92
|
+
#
|
|
93
|
+
html_theme = "sphinx_rtd_theme"
|
|
94
|
+
|
|
95
|
+
# Theme options are theme-specific and customize the look and feel of a theme
|
|
96
|
+
# further. For a list of options available for each theme, see the
|
|
97
|
+
# documentation.
|
|
98
|
+
#
|
|
99
|
+
html_theme_options = {
|
|
100
|
+
"display_version": True,
|
|
101
|
+
"prev_next_buttons_location": "bottom",
|
|
102
|
+
# 'style_external_links': False,
|
|
103
|
+
# 'vcs_pageview_mode': '',
|
|
104
|
+
# 'style_nav_header_background': 'white',
|
|
105
|
+
# Toc options
|
|
106
|
+
"collapse_navigation": False,
|
|
107
|
+
"sticky_navigation": False,
|
|
108
|
+
"navigation_depth": 4,
|
|
109
|
+
"includehidden": True,
|
|
110
|
+
"titles_only": False,
|
|
111
|
+
"github_url": "https://github.com/pastas/pastastore",
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
# Add any paths that contain custom static files (such as style sheets) here,
|
|
115
|
+
# relative to this directory. They are copied after the builtin static files,
|
|
116
|
+
# so a file named "default.css" will overwrite the builtin "default.css".
|
|
117
|
+
html_static_path = ["_static"]
|
|
118
|
+
|
|
119
|
+
# Custom sidebar templates, must be a dictionary that maps document names
|
|
120
|
+
# to template names.
|
|
121
|
+
#
|
|
122
|
+
# The default sidebars (for documents that don't match any pattern) are
|
|
123
|
+
# defined by theme itself. Builtin themes are using these templates by
|
|
124
|
+
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
|
|
125
|
+
# 'searchbox.html']``.
|
|
126
|
+
#
|
|
127
|
+
# html_sidebars = {}
|
|
128
|
+
|
|
129
|
+
# -- Options for HTMLHelp output ---------------------------------------------
|
|
130
|
+
|
|
131
|
+
# Output file base name for HTML help builder.
|
|
132
|
+
htmlhelp_basename = "pastastoredoc"
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
# -- Options for LaTeX output ------------------------------------------------
|
|
136
|
+
|
|
137
|
+
latex_elements = {
|
|
138
|
+
# The paper size ('letterpaper' or 'a4paper').
|
|
139
|
+
#
|
|
140
|
+
# 'papersize': 'letterpaper',
|
|
141
|
+
# The font size ('10pt', '11pt' or '12pt').
|
|
142
|
+
#
|
|
143
|
+
# 'pointsize': '10pt',
|
|
144
|
+
# Additional stuff for the LaTeX preamble.
|
|
145
|
+
#
|
|
146
|
+
"preamble": r"""\makeatletter
|
|
147
|
+
\def\UTFviii@defined#1{%
|
|
148
|
+
\ifx#1\relax
|
|
149
|
+
-%
|
|
150
|
+
\else\expandafter
|
|
151
|
+
#1%
|
|
152
|
+
\fi
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
\makeatother""",
|
|
156
|
+
# Latex figure (float) alignment
|
|
157
|
+
#
|
|
158
|
+
# 'figure_align': 'htbp',
|
|
159
|
+
}
|
|
160
|
+
|
|
161
|
+
# Grouping the document tree into LaTeX files. List of tuples
|
|
162
|
+
# (source start file, target name, title,
|
|
163
|
+
# author, documentclass [howto, manual, or own class]).
|
|
164
|
+
latex_documents = [
|
|
165
|
+
(
|
|
166
|
+
master_doc,
|
|
167
|
+
"pastastore.tex",
|
|
168
|
+
"pastastore Documentation",
|
|
169
|
+
"D.A. Brakenhoff",
|
|
170
|
+
"manual",
|
|
171
|
+
),
|
|
172
|
+
]
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
# -- Options for manual page output ------------------------------------------
|
|
176
|
+
|
|
177
|
+
# One entry per manual page. List of tuples
|
|
178
|
+
# (source start file, name, description, authors, manual section).
|
|
179
|
+
man_pages = [(master_doc, "pastastore", "pastastore Documentation", [author], 1)]
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
# -- Options for Texinfo output ----------------------------------------------
|
|
183
|
+
|
|
184
|
+
# Grouping the document tree into Texinfo files. List of tuples
|
|
185
|
+
# (source start file, target name, title, author,
|
|
186
|
+
# dir menu entry, description, category)
|
|
187
|
+
texinfo_documents = [
|
|
188
|
+
(
|
|
189
|
+
master_doc,
|
|
190
|
+
"pastastore",
|
|
191
|
+
"pastastore Documentation",
|
|
192
|
+
author,
|
|
193
|
+
"pastastore",
|
|
194
|
+
"Tools for managing time series and Pastas models",
|
|
195
|
+
"Miscellaneous",
|
|
196
|
+
),
|
|
197
|
+
]
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
# -- Options for Epub output -------------------------------------------------
|
|
201
|
+
|
|
202
|
+
# Bibliographic Dublin Core info.
|
|
203
|
+
epub_title = project
|
|
204
|
+
|
|
205
|
+
# The unique identifier of the text. This can be a ISBN number
|
|
206
|
+
# or the project homepage.
|
|
207
|
+
#
|
|
208
|
+
# epub_identifier = ''
|
|
209
|
+
|
|
210
|
+
# A unique identification for the text.
|
|
211
|
+
#
|
|
212
|
+
# epub_uid = ''
|
|
213
|
+
|
|
214
|
+
# A list of files that should not be packed into the epub file.
|
|
215
|
+
epub_exclude_files = ["search.html"]
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
# ruff: noqa: F401 D104
|
|
2
|
-
from pastastore import connectors,
|
|
2
|
+
from pastastore import connectors, styling, util
|
|
3
3
|
from pastastore.connectors import (
|
|
4
4
|
ArcticDBConnector,
|
|
5
5
|
DictConnector,
|
|
@@ -7,3 +7,8 @@ from pastastore.connectors import (
|
|
|
7
7
|
)
|
|
8
8
|
from pastastore.store import PastaStore
|
|
9
9
|
from pastastore.version import __version__, show_versions
|
|
10
|
+
|
|
11
|
+
try:
|
|
12
|
+
from pastastore import extensions
|
|
13
|
+
except ModuleNotFoundError:
|
|
14
|
+
print("Could not import extensions module. Update pastas to >=1.3.0!")
|
|
@@ -15,6 +15,7 @@ import pandas as pd
|
|
|
15
15
|
import pastas as ps
|
|
16
16
|
from numpy import isin
|
|
17
17
|
from packaging.version import parse as parse_version
|
|
18
|
+
from pandas.testing import assert_series_equal
|
|
18
19
|
from pastas.io.pas import PastasEncoder
|
|
19
20
|
from tqdm.auto import tqdm
|
|
20
21
|
|
|
@@ -30,7 +31,7 @@ class BaseConnector(ABC):
|
|
|
30
31
|
|
|
31
32
|
Class holds base logic for dealing with time series and Pastas Models. Create your
|
|
32
33
|
own Connector to a data source by writing a a class that inherits from this
|
|
33
|
-
BaseConnector. Your class has to override each abstractmethod and
|
|
34
|
+
BaseConnector. Your class has to override each abstractmethod and property.
|
|
34
35
|
"""
|
|
35
36
|
|
|
36
37
|
_default_library_names = [
|
|
@@ -47,6 +48,10 @@ class BaseConnector(ABC):
|
|
|
47
48
|
# True for pastas>=0.23.0 and False for pastas<=0.22.0
|
|
48
49
|
USE_PASTAS_VALIDATE_SERIES = False if PASTAS_LEQ_022 else True
|
|
49
50
|
|
|
51
|
+
# set series equality comparison settings (using assert_series_equal)
|
|
52
|
+
SERIES_EQUALITY_ABSOLUTE_TOLERANCE = 1e-10
|
|
53
|
+
SERIES_EQUALITY_RELATIVE_TOLERANCE = 0.0
|
|
54
|
+
|
|
50
55
|
def __repr__(self):
|
|
51
56
|
"""Representation string of the object."""
|
|
52
57
|
return (
|
|
@@ -670,22 +675,27 @@ class BaseConnector(ABC):
|
|
|
670
675
|
metadata["kind"] = kind
|
|
671
676
|
self._upsert_series("stresses", series, name, metadata=metadata)
|
|
672
677
|
|
|
673
|
-
def del_models(self, names: Union[list, str]) -> None:
|
|
678
|
+
def del_models(self, names: Union[list, str], verbose: bool = True) -> None:
|
|
674
679
|
"""Delete model(s) from the database.
|
|
675
680
|
|
|
676
681
|
Parameters
|
|
677
682
|
----------
|
|
678
683
|
names : str or list of str
|
|
679
684
|
name(s) of the model to delete
|
|
685
|
+
verbose : bool, optional
|
|
686
|
+
print information about deleted models, by default True
|
|
680
687
|
"""
|
|
681
|
-
|
|
688
|
+
names = self._parse_names(names, libname="models")
|
|
689
|
+
for n in names:
|
|
682
690
|
mldict = self.get_models(n, return_dict=True)
|
|
683
691
|
oname = mldict["oseries"]["name"]
|
|
684
692
|
self._del_item("models", n)
|
|
685
693
|
self._del_oseries_model_link(oname, n)
|
|
686
694
|
self._clear_cache("_modelnames_cache")
|
|
695
|
+
if verbose:
|
|
696
|
+
print(f"Deleted {len(names)} model(s) from database.")
|
|
687
697
|
|
|
688
|
-
def del_model(self, names: Union[list, str]) -> None:
|
|
698
|
+
def del_model(self, names: Union[list, str], verbose: bool = True) -> None:
|
|
689
699
|
"""Delete model(s) from the database.
|
|
690
700
|
|
|
691
701
|
Alias for del_models().
|
|
@@ -694,10 +704,14 @@ class BaseConnector(ABC):
|
|
|
694
704
|
----------
|
|
695
705
|
names : str or list of str
|
|
696
706
|
name(s) of the model to delete
|
|
707
|
+
verbose : bool, optional
|
|
708
|
+
print information about deleted models, by default True
|
|
697
709
|
"""
|
|
698
|
-
self.del_models(names=names)
|
|
710
|
+
self.del_models(names=names, verbose=verbose)
|
|
699
711
|
|
|
700
|
-
def del_oseries(
|
|
712
|
+
def del_oseries(
|
|
713
|
+
self, names: Union[list, str], remove_models: bool = False, verbose: bool = True
|
|
714
|
+
):
|
|
701
715
|
"""Delete oseries from the database.
|
|
702
716
|
|
|
703
717
|
Parameters
|
|
@@ -706,29 +720,38 @@ class BaseConnector(ABC):
|
|
|
706
720
|
name(s) of the oseries to delete
|
|
707
721
|
remove_models : bool, optional
|
|
708
722
|
also delete models for deleted oseries, default is False
|
|
723
|
+
verbose : bool, optional
|
|
724
|
+
print information about deleted oseries, by default True
|
|
709
725
|
"""
|
|
710
726
|
names = self._parse_names(names, libname="oseries")
|
|
711
727
|
for n in names:
|
|
712
728
|
self._del_item("oseries", n)
|
|
713
729
|
self._clear_cache("oseries")
|
|
730
|
+
if verbose:
|
|
731
|
+
print(f"Deleted {len(names)} oseries from database.")
|
|
714
732
|
# remove associated models from database
|
|
715
733
|
if remove_models:
|
|
716
734
|
modelnames = list(
|
|
717
735
|
chain.from_iterable([self.oseries_models.get(n, []) for n in names])
|
|
718
736
|
)
|
|
719
|
-
self.del_models(modelnames)
|
|
737
|
+
self.del_models(modelnames, verbose=verbose)
|
|
720
738
|
|
|
721
|
-
def del_stress(self, names: Union[list, str]):
|
|
739
|
+
def del_stress(self, names: Union[list, str], verbose: bool = True):
|
|
722
740
|
"""Delete stress from the database.
|
|
723
741
|
|
|
724
742
|
Parameters
|
|
725
743
|
----------
|
|
726
744
|
names : str or list of str
|
|
727
745
|
name(s) of the stress to delete
|
|
746
|
+
verbose : bool, optional
|
|
747
|
+
print information about deleted stresses, by default True
|
|
728
748
|
"""
|
|
729
|
-
|
|
749
|
+
names = self._parse_names(names, libname="stresses")
|
|
750
|
+
for n in names:
|
|
730
751
|
self._del_item("stresses", n)
|
|
731
752
|
self._clear_cache("stresses")
|
|
753
|
+
if verbose:
|
|
754
|
+
print(f"Deleted {len(names)} stress(es) from database.")
|
|
732
755
|
|
|
733
756
|
def _get_series(
|
|
734
757
|
self,
|
|
@@ -1665,11 +1688,18 @@ class ConnectorUtil:
|
|
|
1665
1688
|
so = ml.oseries.series_original
|
|
1666
1689
|
else:
|
|
1667
1690
|
so = ml.oseries._series_original
|
|
1668
|
-
|
|
1691
|
+
try:
|
|
1692
|
+
assert_series_equal(
|
|
1693
|
+
so.dropna(),
|
|
1694
|
+
s_org,
|
|
1695
|
+
atol=self.SERIES_EQUALITY_ABSOLUTE_TOLERANCE,
|
|
1696
|
+
rtol=self.SERIES_EQUALITY_RELATIVE_TOLERANCE,
|
|
1697
|
+
)
|
|
1698
|
+
except AssertionError as e:
|
|
1669
1699
|
raise ValueError(
|
|
1670
1700
|
f"Cannot add model because model oseries '{name}'"
|
|
1671
|
-
" is different from stored oseries!"
|
|
1672
|
-
)
|
|
1701
|
+
" is different from stored oseries! See stacktrace for differences."
|
|
1702
|
+
) from e
|
|
1673
1703
|
|
|
1674
1704
|
def _check_stresses_in_store(self, ml: Union[ps.Model, dict]):
|
|
1675
1705
|
"""Check if stresses time series are contained in PastaStore (internal method).
|
|
@@ -1699,11 +1729,19 @@ class ConnectorUtil:
|
|
|
1699
1729
|
so = s.series_original
|
|
1700
1730
|
else:
|
|
1701
1731
|
so = s._series_original
|
|
1702
|
-
|
|
1732
|
+
try:
|
|
1733
|
+
assert_series_equal(
|
|
1734
|
+
so,
|
|
1735
|
+
s_org,
|
|
1736
|
+
atol=self.SERIES_EQUALITY_ABSOLUTE_TOLERANCE,
|
|
1737
|
+
rtol=self.SERIES_EQUALITY_RELATIVE_TOLERANCE,
|
|
1738
|
+
)
|
|
1739
|
+
except AssertionError as e:
|
|
1703
1740
|
raise ValueError(
|
|
1704
1741
|
f"Cannot add model because model stress "
|
|
1705
|
-
f"'{s.name}' is different from stored stress!"
|
|
1706
|
-
|
|
1742
|
+
f"'{s.name}' is different from stored stress! "
|
|
1743
|
+
"See stacktrace for differences."
|
|
1744
|
+
) from e
|
|
1707
1745
|
elif isinstance(ml, dict):
|
|
1708
1746
|
for sm in ml["stressmodels"].values():
|
|
1709
1747
|
classkey = "stressmodel" if PASTAS_LEQ_022 else "class"
|
|
@@ -164,10 +164,10 @@ class HydroPandasExtension:
|
|
|
164
164
|
metadata.pop("name", None)
|
|
165
165
|
metadata.pop("meta", None)
|
|
166
166
|
unit = metadata.get("unit", None)
|
|
167
|
-
if unit == "m" and unit_multiplier
|
|
167
|
+
if unit == "m" and np.allclose(unit_multiplier, 1e-3):
|
|
168
168
|
metadata["unit"] = "mm"
|
|
169
169
|
elif unit_multiplier != 1.0:
|
|
170
|
-
metadata["unit"] = f"{unit_multiplier
|
|
170
|
+
metadata["unit"] = f"{unit_multiplier:.1e}*{unit}"
|
|
171
171
|
|
|
172
172
|
source = metadata.get("source", "")
|
|
173
173
|
if len(source) > 0:
|
|
@@ -199,6 +199,60 @@ class HydroPandasExtension:
|
|
|
199
199
|
else:
|
|
200
200
|
raise ValueError("libname must be 'oseries' or 'stresses'.")
|
|
201
201
|
|
|
202
|
+
def _get_tmin_tmax(self, tmin, tmax, oseries=None):
|
|
203
|
+
"""Get tmin and tmax from store if not specified.
|
|
204
|
+
|
|
205
|
+
Parameters
|
|
206
|
+
----------
|
|
207
|
+
tmin : TimeType
|
|
208
|
+
start time
|
|
209
|
+
tmax : TimeType
|
|
210
|
+
end time
|
|
211
|
+
oseries : str, optional
|
|
212
|
+
name of the observation series to get tmin/tmax for, by default None
|
|
213
|
+
|
|
214
|
+
Returns
|
|
215
|
+
-------
|
|
216
|
+
tmin, tmax : TimeType, TimeType
|
|
217
|
+
tmin and tmax
|
|
218
|
+
"""
|
|
219
|
+
# get tmin/tmax if not specified
|
|
220
|
+
if tmin is None or tmax is None:
|
|
221
|
+
tmintmax = self._store.get_tmin_tmax(
|
|
222
|
+
"oseries", names=[oseries] if oseries else None
|
|
223
|
+
)
|
|
224
|
+
if tmin is None:
|
|
225
|
+
tmin = tmintmax.loc[:, "tmin"].min() - Timedelta(days=10 * 365)
|
|
226
|
+
if tmax is None:
|
|
227
|
+
tmax = tmintmax.loc[:, "tmax"].max()
|
|
228
|
+
return tmin, tmax
|
|
229
|
+
|
|
230
|
+
@staticmethod
|
|
231
|
+
def _normalize_datetime_index(obs):
|
|
232
|
+
"""Normalize observation datetime index (i.e. set observation time to midnight).
|
|
233
|
+
|
|
234
|
+
Parameters
|
|
235
|
+
----------
|
|
236
|
+
obs : pandas.Series
|
|
237
|
+
observation series to normalize
|
|
238
|
+
|
|
239
|
+
Returns
|
|
240
|
+
-------
|
|
241
|
+
hpd.Obs
|
|
242
|
+
observation series with normalized datetime index
|
|
243
|
+
"""
|
|
244
|
+
if isinstance(obs, hpd.Obs):
|
|
245
|
+
metadata = {k: getattr(obs, k) for k in obs._metadata}
|
|
246
|
+
else:
|
|
247
|
+
metadata = {}
|
|
248
|
+
return obs.__class__(
|
|
249
|
+
timestep_weighted_resample(
|
|
250
|
+
obs,
|
|
251
|
+
obs.index.normalize(),
|
|
252
|
+
).rename(obs.name),
|
|
253
|
+
**metadata,
|
|
254
|
+
)
|
|
255
|
+
|
|
202
256
|
def download_knmi_precipitation(
|
|
203
257
|
self,
|
|
204
258
|
stns: Optional[list[int]] = None,
|
|
@@ -303,7 +357,7 @@ class HydroPandasExtension:
|
|
|
303
357
|
variable to download, by default "RH", valid options are
|
|
304
358
|
e.g. ["RD", "RH", "EV24", "T", "Q"].
|
|
305
359
|
kind : str
|
|
306
|
-
kind identifier for observations, usually "prec" or "evap".
|
|
360
|
+
kind identifier for observations in pastastore, usually "prec" or "evap".
|
|
307
361
|
stns : list of int/str, optional
|
|
308
362
|
list of station numbers to download data for, by default None
|
|
309
363
|
tmin : TimeType, optional
|
|
@@ -320,12 +374,7 @@ class HydroPandasExtension:
|
|
|
320
374
|
if True, normalize the datetime so stress value at midnight represents
|
|
321
375
|
the daily total, by default True.
|
|
322
376
|
"""
|
|
323
|
-
|
|
324
|
-
tmintmax = self._store.get_tmin_tmax("oseries")
|
|
325
|
-
if tmin is None:
|
|
326
|
-
tmin = tmintmax.loc[:, "tmin"].min() - Timedelta(days=10 * 365)
|
|
327
|
-
if tmax is None:
|
|
328
|
-
tmax = tmintmax.loc[:, "tmax"].max()
|
|
377
|
+
tmin, tmax = self._get_tmin_tmax(tmin, tmax)
|
|
329
378
|
|
|
330
379
|
if stns is None:
|
|
331
380
|
locations = self._store.oseries.loc[:, ["x", "y"]]
|
|
@@ -354,6 +403,155 @@ class HydroPandasExtension:
|
|
|
354
403
|
normalize_datetime_index=normalize_datetime_index,
|
|
355
404
|
)
|
|
356
405
|
|
|
406
|
+
def download_nearest_knmi_precipitation(
|
|
407
|
+
self,
|
|
408
|
+
oseries: str,
|
|
409
|
+
meteo_var: str = "RD",
|
|
410
|
+
tmin: Optional[TimeType] = None,
|
|
411
|
+
tmax: Optional[TimeType] = None,
|
|
412
|
+
unit_multiplier: float = 1e-3,
|
|
413
|
+
normalize_datetime_index: bool = True,
|
|
414
|
+
fill_missing_obs: bool = True,
|
|
415
|
+
**kwargs,
|
|
416
|
+
):
|
|
417
|
+
"""Download precipitation time series data from nearest KNMI station.
|
|
418
|
+
|
|
419
|
+
Parameters
|
|
420
|
+
----------
|
|
421
|
+
oseries : str
|
|
422
|
+
download nearest precipitation information for this observation well
|
|
423
|
+
meteo_var : str, optional
|
|
424
|
+
variable to download, by default "RD", valid options are ["RD", "RH"].
|
|
425
|
+
tmin : TimeType
|
|
426
|
+
start time
|
|
427
|
+
tmax : TimeType
|
|
428
|
+
end time
|
|
429
|
+
unit_multiplier : float, optional
|
|
430
|
+
multiply unit by this value before saving it in the store,
|
|
431
|
+
by default 1.0 (no conversion)
|
|
432
|
+
fill_missing_obs : bool, optional
|
|
433
|
+
if True, fill missing observations by getting observations from nearest
|
|
434
|
+
station with data.
|
|
435
|
+
fill_missing_obs : bool, optional
|
|
436
|
+
if True, fill missing observations by getting observations from nearest
|
|
437
|
+
station with data.
|
|
438
|
+
"""
|
|
439
|
+
self.download_nearest_knmi_meteo(
|
|
440
|
+
oseries=oseries,
|
|
441
|
+
meteo_var=meteo_var,
|
|
442
|
+
kind="prec",
|
|
443
|
+
tmin=tmin,
|
|
444
|
+
tmax=tmax,
|
|
445
|
+
unit_multiplier=unit_multiplier,
|
|
446
|
+
normalize_datetime_index=normalize_datetime_index,
|
|
447
|
+
fill_missing_obs=fill_missing_obs,
|
|
448
|
+
**kwargs,
|
|
449
|
+
)
|
|
450
|
+
|
|
451
|
+
def download_nearest_knmi_evaporation(
|
|
452
|
+
self,
|
|
453
|
+
oseries: str,
|
|
454
|
+
meteo_var: str = "EV24",
|
|
455
|
+
tmin: Optional[TimeType] = None,
|
|
456
|
+
tmax: Optional[TimeType] = None,
|
|
457
|
+
unit_multiplier: float = 1e-3,
|
|
458
|
+
normalize_datetime_index: bool = True,
|
|
459
|
+
fill_missing_obs: bool = True,
|
|
460
|
+
**kwargs,
|
|
461
|
+
):
|
|
462
|
+
"""Download evaporation time series data from nearest KNMI station.
|
|
463
|
+
|
|
464
|
+
Parameters
|
|
465
|
+
----------
|
|
466
|
+
oseries : str
|
|
467
|
+
download nearest evaporation information for this observation well
|
|
468
|
+
meteo_var : str, optional
|
|
469
|
+
variable to download, by default "EV24", valid options are:
|
|
470
|
+
["EV24", "penman", "hargreaves", "makkink"].
|
|
471
|
+
tmin : TimeType
|
|
472
|
+
start time
|
|
473
|
+
tmax : TimeType
|
|
474
|
+
end time
|
|
475
|
+
unit_multiplier : float, optional
|
|
476
|
+
multiply unit by this value before saving it in the store,
|
|
477
|
+
by default 1.0 (no conversion)
|
|
478
|
+
fill_missing_obs : bool, optional
|
|
479
|
+
if True, fill missing observations by getting observations from nearest
|
|
480
|
+
station with data.
|
|
481
|
+
fill_missing_obs : bool, optional
|
|
482
|
+
if True, fill missing observations by getting observations from nearest
|
|
483
|
+
station with data.
|
|
484
|
+
"""
|
|
485
|
+
self.download_nearest_knmi_meteo(
|
|
486
|
+
oseries=oseries,
|
|
487
|
+
meteo_var=meteo_var,
|
|
488
|
+
kind="evap",
|
|
489
|
+
tmin=tmin,
|
|
490
|
+
tmax=tmax,
|
|
491
|
+
unit_multiplier=unit_multiplier,
|
|
492
|
+
normalize_datetime_index=normalize_datetime_index,
|
|
493
|
+
fill_missing_obs=fill_missing_obs,
|
|
494
|
+
**kwargs,
|
|
495
|
+
)
|
|
496
|
+
|
|
497
|
+
def download_nearest_knmi_meteo(
|
|
498
|
+
self,
|
|
499
|
+
oseries: str,
|
|
500
|
+
meteo_var: str,
|
|
501
|
+
kind: str,
|
|
502
|
+
tmin: Optional[TimeType] = None,
|
|
503
|
+
tmax: Optional[TimeType] = None,
|
|
504
|
+
unit_multiplier: float = 1.0,
|
|
505
|
+
normalize_datetime_index: bool = True,
|
|
506
|
+
fill_missing_obs: bool = True,
|
|
507
|
+
**kwargs,
|
|
508
|
+
):
|
|
509
|
+
"""Download meteorological data from nearest KNMI station.
|
|
510
|
+
|
|
511
|
+
Parameters
|
|
512
|
+
----------
|
|
513
|
+
oseries : str
|
|
514
|
+
download nearest meteorological information for this observation well
|
|
515
|
+
meteo_var : str
|
|
516
|
+
meteorological variable to download, e.g. "RD", "RH", "EV24", "T", "Q"
|
|
517
|
+
kind : str
|
|
518
|
+
kind identifier for observations in pastastore, usually "prec" or "evap".
|
|
519
|
+
tmin : TimeType
|
|
520
|
+
start time
|
|
521
|
+
tmax : TimeType
|
|
522
|
+
end time
|
|
523
|
+
unit_multiplier : float, optional
|
|
524
|
+
multiply unit by this value before saving it in the store,
|
|
525
|
+
by default 1.0 (no conversion)
|
|
526
|
+
fill_missing_obs : bool, optional
|
|
527
|
+
if True, fill missing observations by getting observations from nearest
|
|
528
|
+
station with data.
|
|
529
|
+
fill_missing_obs : bool, optional
|
|
530
|
+
if True, fill missing observations by getting observations from nearest
|
|
531
|
+
station with data.
|
|
532
|
+
"""
|
|
533
|
+
xy = self._store.oseries.loc[[oseries], ["x", "y"]].to_numpy()
|
|
534
|
+
# download data
|
|
535
|
+
tmin, tmax = self._get_tmin_tmax(tmin, tmax, oseries=oseries)
|
|
536
|
+
knmi = hpd.read_knmi(
|
|
537
|
+
xy=xy,
|
|
538
|
+
meteo_vars=[meteo_var],
|
|
539
|
+
starts=tmin,
|
|
540
|
+
ends=tmax,
|
|
541
|
+
fill_missing_obs=fill_missing_obs,
|
|
542
|
+
**kwargs,
|
|
543
|
+
)
|
|
544
|
+
# add to store
|
|
545
|
+
self.add_obscollection(
|
|
546
|
+
libname="stresses",
|
|
547
|
+
oc=knmi,
|
|
548
|
+
kind=kind,
|
|
549
|
+
data_column=meteo_var,
|
|
550
|
+
unit_multiplier=unit_multiplier,
|
|
551
|
+
update=False,
|
|
552
|
+
normalize_datetime_index=normalize_datetime_index,
|
|
553
|
+
)
|
|
554
|
+
|
|
357
555
|
def update_knmi_meteo(
|
|
358
556
|
self,
|
|
359
557
|
names: Optional[List[str]] = None,
|
|
@@ -386,6 +584,17 @@ class HydroPandasExtension:
|
|
|
386
584
|
**kwargs : dict, optional
|
|
387
585
|
Additional keyword arguments to pass to `hpd.read_knmi()`
|
|
388
586
|
"""
|
|
587
|
+
if "source" not in self._store.stresses.columns:
|
|
588
|
+
msg = (
|
|
589
|
+
"Cannot update KNMI stresses! "
|
|
590
|
+
"KNMI stresses cannot be identified if 'source' column is not defined."
|
|
591
|
+
)
|
|
592
|
+
logger.error(msg)
|
|
593
|
+
if raise_on_error:
|
|
594
|
+
raise ValueError(msg)
|
|
595
|
+
else:
|
|
596
|
+
return
|
|
597
|
+
|
|
389
598
|
if names is None:
|
|
390
599
|
names = self._store.stresses.loc[
|
|
391
600
|
self._store.stresses["source"] == "KNMI"
|
|
@@ -497,32 +706,6 @@ class HydroPandasExtension:
|
|
|
497
706
|
if raise_on_error:
|
|
498
707
|
raise e
|
|
499
708
|
|
|
500
|
-
@staticmethod
|
|
501
|
-
def _normalize_datetime_index(obs):
|
|
502
|
-
"""Normalize observation datetime index (i.e. set observation time to midnight).
|
|
503
|
-
|
|
504
|
-
Parameters
|
|
505
|
-
----------
|
|
506
|
-
obs : pandas.Series
|
|
507
|
-
observation series to normalize
|
|
508
|
-
|
|
509
|
-
Returns
|
|
510
|
-
-------
|
|
511
|
-
hpd.Obs
|
|
512
|
-
observation series with normalized datetime index
|
|
513
|
-
"""
|
|
514
|
-
if isinstance(obs, hpd.Obs):
|
|
515
|
-
metadata = {k: getattr(obs, k) for k in obs._metadata}
|
|
516
|
-
else:
|
|
517
|
-
metadata = {}
|
|
518
|
-
return obs.__class__(
|
|
519
|
-
timestep_weighted_resample(
|
|
520
|
-
obs,
|
|
521
|
-
obs.index.normalize(),
|
|
522
|
-
).rename(obs.name),
|
|
523
|
-
**metadata,
|
|
524
|
-
)
|
|
525
|
-
|
|
526
709
|
def download_bro_gmw(
|
|
527
710
|
self,
|
|
528
711
|
extent: Optional[List[float]] = None,
|
|
@@ -9,7 +9,7 @@ PASTAS_VERSION = parse_version(ps.__version__)
|
|
|
9
9
|
PASTAS_LEQ_022 = PASTAS_VERSION <= parse_version("0.22.0")
|
|
10
10
|
PASTAS_GEQ_150 = PASTAS_VERSION >= parse_version("1.5.0")
|
|
11
11
|
|
|
12
|
-
__version__ = "1.7.
|
|
12
|
+
__version__ = "1.7.2"
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
def show_versions(optional=False) -> None:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pastastore
|
|
3
|
-
Version: 1.7.
|
|
3
|
+
Version: 1.7.2
|
|
4
4
|
Summary: Tools for managing Pastas time series models.
|
|
5
5
|
Author: D.A. Brakenhoff
|
|
6
6
|
Maintainer-email: "D.A. Brakenhoff" <d.brakenhoff@artesia-water.nl>, "R. Calje" <r.calje@artesia-water.nl>, "M.A. Vonk" <m.vonk@artesia-water.nl>
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
LICENSE
|
|
2
2
|
pyproject.toml
|
|
3
3
|
readme.md
|
|
4
|
+
docs/conf.py
|
|
4
5
|
pastastore/__init__.py
|
|
5
6
|
pastastore/base.py
|
|
6
7
|
pastastore/connectors.py
|
|
@@ -19,6 +20,7 @@ pastastore.egg-info/top_level.txt
|
|
|
19
20
|
pastastore/extensions/__init__.py
|
|
20
21
|
pastastore/extensions/accessor.py
|
|
21
22
|
pastastore/extensions/hpd.py
|
|
23
|
+
tests/conftest.py
|
|
22
24
|
tests/test_001_import.py
|
|
23
25
|
tests/test_002_connectors.py
|
|
24
26
|
tests/test_003_pastastore.py
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
# ruff: noqa: D100 D103
|
|
2
|
+
import importlib
|
|
3
|
+
from importlib import metadata
|
|
4
|
+
from platform import python_version
|
|
5
|
+
|
|
6
|
+
import pandas as pd
|
|
7
|
+
import pastas as ps
|
|
8
|
+
import pytest
|
|
9
|
+
from packaging.version import parse as parse_version
|
|
10
|
+
|
|
11
|
+
import pastastore as pst
|
|
12
|
+
|
|
13
|
+
IS_PY312 = parse_version(python_version()) >= parse_version("3.12.0")
|
|
14
|
+
|
|
15
|
+
params = ["dict", "pas", "arcticdb"] if not IS_PY312 else ["dict", "pas"]
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def initialize_project(conn):
|
|
19
|
+
pstore = pst.PastaStore(conn, "test_project")
|
|
20
|
+
|
|
21
|
+
# oseries 1
|
|
22
|
+
o = pd.read_csv("./tests/data/obs.csv", index_col=0, parse_dates=True)
|
|
23
|
+
pstore.add_oseries(o, "oseries1", metadata={"x": 165000, "y": 424000})
|
|
24
|
+
|
|
25
|
+
# oseries 2
|
|
26
|
+
o = pd.read_csv("./tests/data/head_nb1.csv", index_col=0, parse_dates=True)
|
|
27
|
+
pstore.add_oseries(o, "oseries2", metadata={"x": 164000, "y": 423000})
|
|
28
|
+
|
|
29
|
+
# oseries 3
|
|
30
|
+
o = pd.read_csv("./tests/data/gw_obs.csv", index_col=0, parse_dates=True)
|
|
31
|
+
pstore.add_oseries(o, "oseries3", metadata={"x": 165554, "y": 422685})
|
|
32
|
+
|
|
33
|
+
# prec 1
|
|
34
|
+
s = pd.read_csv("./tests/data/rain.csv", index_col=0, parse_dates=True)
|
|
35
|
+
pstore.add_stress(s, "prec1", kind="prec", metadata={"x": 165050, "y": 424050})
|
|
36
|
+
|
|
37
|
+
# prec 2
|
|
38
|
+
s = pd.read_csv("./tests/data/rain_nb1.csv", index_col=0, parse_dates=True)
|
|
39
|
+
pstore.add_stress(s, "prec2", kind="prec", metadata={"x": 164010, "y": 423000})
|
|
40
|
+
|
|
41
|
+
# evap 1
|
|
42
|
+
s = pd.read_csv("./tests/data/evap.csv", index_col=0, parse_dates=True)
|
|
43
|
+
pstore.add_stress(s, "evap1", kind="evap", metadata={"x": 164500, "y": 424000})
|
|
44
|
+
|
|
45
|
+
# evap 2
|
|
46
|
+
s = pd.read_csv("./tests/data/evap_nb1.csv", index_col=0, parse_dates=True)
|
|
47
|
+
pstore.add_stress(s, "evap2", kind="evap", metadata={"x": 164000, "y": 423030})
|
|
48
|
+
|
|
49
|
+
# well 1
|
|
50
|
+
s = pd.read_csv("./tests/data/well_month_end.csv", index_col=0, parse_dates=True)
|
|
51
|
+
try:
|
|
52
|
+
s = ps.ts.timestep_weighted_resample(
|
|
53
|
+
s,
|
|
54
|
+
pd.date_range(s.index[0] - pd.offsets.MonthBegin(), s.index[-1], freq="D"),
|
|
55
|
+
).bfill()
|
|
56
|
+
except AttributeError:
|
|
57
|
+
# pastas<=0.22.0
|
|
58
|
+
pass
|
|
59
|
+
pstore.add_stress(s, "well1", kind="well", metadata={"x": 164691, "y": 423579})
|
|
60
|
+
# add second well
|
|
61
|
+
pstore.add_stress(
|
|
62
|
+
s + 10, "well2", kind="well", metadata={"x": 164691 + 200, "y": 423579_200}
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
return pstore
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@pytest.fixture(scope="module", params=params)
|
|
69
|
+
def conn(request):
|
|
70
|
+
"""Fixture that yields connection object."""
|
|
71
|
+
name = f"test_{request.param}"
|
|
72
|
+
# connect to dbase
|
|
73
|
+
if request.param == "arcticdb":
|
|
74
|
+
uri = "lmdb://./arctic_db/"
|
|
75
|
+
conn = pst.ArcticDBConnector(name, uri)
|
|
76
|
+
elif request.param == "dict":
|
|
77
|
+
conn = pst.DictConnector(name)
|
|
78
|
+
elif request.param == "pas":
|
|
79
|
+
conn = pst.PasConnector(name, "./tests/data")
|
|
80
|
+
else:
|
|
81
|
+
raise ValueError("Unrecognized parameter!")
|
|
82
|
+
conn.type = request.param # added here for defining test dependencies
|
|
83
|
+
return conn
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
@pytest.fixture(scope="module", params=params)
|
|
87
|
+
def pstore(request):
|
|
88
|
+
if request.param == "arcticdb":
|
|
89
|
+
name = "test_project"
|
|
90
|
+
uri = "lmdb://./arctic_db/"
|
|
91
|
+
connector = pst.ArcticDBConnector(name, uri)
|
|
92
|
+
elif request.param == "dict":
|
|
93
|
+
name = "test_project"
|
|
94
|
+
connector = pst.DictConnector(name)
|
|
95
|
+
elif request.param == "pas":
|
|
96
|
+
name = "test_project"
|
|
97
|
+
connector = pst.PasConnector(name, "./tests/data/pas")
|
|
98
|
+
else:
|
|
99
|
+
raise ValueError("Unrecognized parameter!")
|
|
100
|
+
pstore = initialize_project(connector)
|
|
101
|
+
pstore.type = request.param # added here for defining test dependencies
|
|
102
|
+
yield pstore
|
|
103
|
+
pst.util.delete_pastastore(pstore)
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
def delete_arcticdb_test_db():
|
|
107
|
+
connstr = "lmdb://./arctic_db/"
|
|
108
|
+
name = "test_project"
|
|
109
|
+
connector = pst.ArcticDBConnector(name, connstr)
|
|
110
|
+
pst.util.delete_arcticdb_connector(connector)
|
|
111
|
+
print("ArcticDBConnector 'test_project' deleted.")
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
_has_pkg_cache = {}
|
|
115
|
+
|
|
116
|
+
|
|
117
|
+
def has_pkg(pkg: str, strict: bool = True) -> bool:
|
|
118
|
+
"""
|
|
119
|
+
Determine if the given Python package is installed.
|
|
120
|
+
|
|
121
|
+
Parameters
|
|
122
|
+
----------
|
|
123
|
+
pkg : str
|
|
124
|
+
Name of the package to check.
|
|
125
|
+
strict : bool
|
|
126
|
+
If False, only check if package metadata is available.
|
|
127
|
+
If True, try to import the package (all dependencies must be present).
|
|
128
|
+
|
|
129
|
+
Returns
|
|
130
|
+
-------
|
|
131
|
+
bool
|
|
132
|
+
True if the package is installed, otherwise False.
|
|
133
|
+
|
|
134
|
+
Notes
|
|
135
|
+
-----
|
|
136
|
+
Originally written by Mike Toews (mwtoews@gmail.com) for FloPy.
|
|
137
|
+
"""
|
|
138
|
+
|
|
139
|
+
def try_import():
|
|
140
|
+
try: # import name, e.g. "import shapefile"
|
|
141
|
+
importlib.import_module(pkg)
|
|
142
|
+
return True
|
|
143
|
+
except ModuleNotFoundError:
|
|
144
|
+
return False
|
|
145
|
+
|
|
146
|
+
def try_metadata() -> bool:
|
|
147
|
+
try: # package name, e.g. pyshp
|
|
148
|
+
metadata.distribution(pkg)
|
|
149
|
+
return True
|
|
150
|
+
except metadata.PackageNotFoundError:
|
|
151
|
+
return False
|
|
152
|
+
|
|
153
|
+
found = False
|
|
154
|
+
if not strict:
|
|
155
|
+
found = pkg in _has_pkg_cache or try_metadata()
|
|
156
|
+
if not found:
|
|
157
|
+
found = try_import()
|
|
158
|
+
_has_pkg_cache[pkg] = found
|
|
159
|
+
|
|
160
|
+
return _has_pkg_cache[pkg]
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
def requires_pkg(*pkgs):
|
|
164
|
+
missing = {pkg for pkg in pkgs if not has_pkg(pkg, strict=True)}
|
|
165
|
+
return pytest.mark.skipif(
|
|
166
|
+
missing,
|
|
167
|
+
reason=f"missing package{'s' if len(missing) != 1 else ''}: "
|
|
168
|
+
+ ", ".join(missing),
|
|
169
|
+
)
|
|
@@ -67,3 +67,19 @@ def test_update_stresses():
|
|
|
67
67
|
pstore.hpd.update_knmi_meteo(tmax="2024-01-31", normalize_datetime_index=False)
|
|
68
68
|
tmintmax = pstore.get_tmin_tmax("stresses")
|
|
69
69
|
assert (tmintmax["tmax"] >= Timestamp("2024-01-31")).all()
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
@pytest.mark.xfail(reason="KNMI is being flaky, so allow this test to xfail/xpass.")
|
|
73
|
+
@pytest.mark.pastas150
|
|
74
|
+
def test_nearest_stresses():
|
|
75
|
+
from pastastore.extensions import activate_hydropandas_extension
|
|
76
|
+
|
|
77
|
+
activate_hydropandas_extension()
|
|
78
|
+
|
|
79
|
+
pstore = pst.PastaStore.from_zip("tests/data/test_hpd_update.zip")
|
|
80
|
+
pstore.hpd.download_nearest_knmi_precipitation(
|
|
81
|
+
"GMW000000036319_1", tmin="2024-01-01"
|
|
82
|
+
)
|
|
83
|
+
assert "RD_GROOT-AMMERS" in pstore.stresses_names
|
|
84
|
+
pstore.hpd.download_nearest_knmi_evaporation("GMW000000036319_1", tmin="2024-01-01")
|
|
85
|
+
assert "EV24_CABAUW-MAST" in pstore.stresses_names
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
pastastore
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|