snowpat 0.3.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
snowpat-0.3.1/PKG-INFO ADDED
@@ -0,0 +1,181 @@
1
+ Metadata-Version: 2.1
2
+ Name: snowpat
3
+ Version: 0.3.1
4
+ Summary:
5
+ Author: leibersp
6
+ Author-email: patrick.leibersperger@slf.ch
7
+ Requires-Python: >=3.9,<4.0
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: Programming Language :: Python :: 3.9
10
+ Classifier: Programming Language :: Python :: 3.10
11
+ Classifier: Programming Language :: Python :: 3.11
12
+ Classifier: Programming Language :: Python :: 3.12
13
+ Requires-Dist: h5py (>=3.5,<4.0)
14
+ Requires-Dist: matplotlib (>=3.8.3,<4.0.0)
15
+ Requires-Dist: numpy (>=1.26,<2.0)
16
+ Requires-Dist: pandas (>=2,<3)
17
+ Description-Content-Type: text/markdown
18
+
19
+ # SNOWPAT
20
+
21
+ This is a toolbox for handling file formates most often used at SLF.
22
+
23
+ There are two submodules:
24
+ pysmet: Used to read and write SMET files
25
+ snowpackreader: Used to read SNOWPACK output files (.pro) and handle profiles easily (soon also with visualization)
26
+ SnowLense: Plotting Framework for files that can be read with this module (SMET not yet available) ([see Documentation](https://snowpat-patrick-leibersperger-c7ec84b7d2c6ab235482777a3905915fe.gitlab-pages.wsl.ch/indexplot/))
27
+
28
+ ## News
29
+
30
+ 2024-03-08: Plotting of Snow Profiles is available with SnowLense module
31
+
32
+ 2024-03-01: A simple merge function is now available to join to SMET Files: merge(SMETFile, override) and mergeFromFile(filename, override)
33
+
34
+ ## Installation
35
+
36
+ Installation via pip and poetry is supported.
37
+
38
+ The easiest way to is to directly install from git (needs git to be installed):
39
+
40
+ ```bash
41
+ pip install [--user] git+https://gitlabext.wsl.ch/patrick.leibersperger/snowpat.git
42
+ ```
43
+
44
+ the --user option might be needed if you do not have admin rights.
45
+
46
+ ### Upgrade
47
+
48
+ If you already have an installation of Sowpat, that is out of date, run:
49
+
50
+ ```bash
51
+ pip install [--user] --upgrade git+https://gitlabext.wsl.ch/patrick.leibersperger/snowpat.git
52
+ ```
53
+
54
+ ### Manually
55
+
56
+ Download the folder, and from the main directory run:
57
+
58
+ ```bash
59
+ poetry install
60
+ ```
61
+
62
+ or:
63
+
64
+ ```bash
65
+ pip install [--user] .
66
+ ```
67
+
68
+ ## Documentation
69
+
70
+ The main documentation can be found under the respective module names, i.e. pySMET and SMET, as well as snowpackreader
71
+
72
+ Extensive Documentation is available [online](http://snowpat-patrick-leibersperger-c7ec84b7d2c6ab235482777a3905915fe.gitlab-pages.wsl.ch/), it uses http, so you might get a privacy error in your browser;
73
+ Or prepuilt in artifacts.zip, whic can be found in [job artifacts](https://gitlabext.wsl.ch/patrick.leibersperger/snowpat/-/artifacts): under (Number) files download the folder.
74
+
75
+ If you download the zip folder just open index.html in your browser.
76
+
77
+ Or you can build the docs yourself.
78
+
79
+ ### MkDocs
80
+
81
+ To create the docs with MkDocs:
82
+ Install via
83
+
84
+ ```bash
85
+ pip install mkdocs
86
+ ```
87
+
88
+ run:
89
+
90
+ ```bash
91
+ mkdocs serve
92
+ ```
93
+
94
+ from the main directory and follow the link shown (localhost)
95
+
96
+
97
+ ## License
98
+
99
+ This project is licensed under the terms of the GNU-GPL-3.0 license.
100
+
101
+ ## Examples
102
+
103
+ Please see the Documentation for more Examples and information on the full capabilities
104
+
105
+ ```python
106
+ from snowpat import pysmet as smet
107
+ from snowpat import snowpackreader as spr
108
+ ```
109
+
110
+ ### Examples pySMET
111
+
112
+ ```python
113
+ from snowpat import pysmet as smet
114
+
115
+ file = smet.read(filename)
116
+ data_pandas = file.data
117
+ data_numpy = file.toNumpy()
118
+ # meta_data only contains the mandatory SMET metadata
119
+ station_id = file.meta_data.station_id
120
+ lon = file.meta_data.location.longitude
121
+
122
+ # optional_meta_data according to the file format can be accessed like this:
123
+ timezone = file.optional_meta_data.tz
124
+
125
+ # acdd metadata (anything preceded with acdd_ or known acdd attributes are stored in acdd metadata)
126
+ acdd_creator_name = file.acdd_meta_data.get_attribute("creator_name")
127
+
128
+ # everything else is in other metadata
129
+ value = file.other_metadata["key"]
130
+
131
+ # changing metadata
132
+ file.meta_data.station_ID = "WFJ"
133
+ file.acdd_meta_data.set_attribute("creator_name", "SomeName")
134
+
135
+ # and for writing to an output again (if no output filename is provided, the given filename is used with an out flag):
136
+ file.write(out_filename)
137
+
138
+ # a summary is also available wih
139
+ file.info()
140
+
141
+ #UNTESTED:
142
+ # it is also possible to merge to SMET files, as long as they are compatible (metadata and fields)
143
+ other_file = smet.read(other_filename)
144
+ file.merge(other_file)
145
+
146
+ # or
147
+ list_of_files_to_merge = [filename1, filename2,filename3,...]
148
+ merged_file = smet.merge_files(list_of_files_to_merge)
149
+ ```
150
+
151
+ ### Examples snowpackreader
152
+
153
+ ```python
154
+ from snowpat import snowpackreader as spr
155
+ pro = spr.readPRO("test.pro")
156
+
157
+ # print a summary of the file
158
+ pro.info()
159
+
160
+ # all available dates
161
+ dates = pro.get_all_dates()
162
+
163
+ # will only return data above the ground after this
164
+ pro.discard_below_ground(True)
165
+ # get a Snowpack object (internal data class for Profiles) on a specific date
166
+ profile = pro.get_profile_on(dates[0])
167
+ # convert it to a dataframe with minimum stability and surface hoar as metadata
168
+ # column names will be data codes, except for "0500"= height (layer boundaries)-> 2 columns: layer middle and layer thickness
169
+ profile.toDf().head()
170
+ wl = profile.weak_layer # or profile.get_param["0534"]
171
+ sh = profile.surface_hoar # pr profile.get_param["0514"]
172
+
173
+ # There is help, to deal with the DataCodes:
174
+ # per default, the names are as in the .pro Header (without units)
175
+ pro.update_name_of_code("0503", "Snow Density")
176
+ density_code = pro.name_to_code("Snow Density")
177
+
178
+
179
+ ```
180
+
181
+ Logo was creaeted with: hotpot.ai/art-generator
@@ -0,0 +1,163 @@
1
+ # SNOWPAT
2
+
3
+ This is a toolbox for handling file formates most often used at SLF.
4
+
5
+ There are two submodules:
6
+ pysmet: Used to read and write SMET files
7
+ snowpackreader: Used to read SNOWPACK output files (.pro) and handle profiles easily (soon also with visualization)
8
+ SnowLense: Plotting Framework for files that can be read with this module (SMET not yet available) ([see Documentation](https://snowpat-patrick-leibersperger-c7ec84b7d2c6ab235482777a3905915fe.gitlab-pages.wsl.ch/indexplot/))
9
+
10
+ ## News
11
+
12
+ 2024-03-08: Plotting of Snow Profiles is available with SnowLense module
13
+
14
+ 2024-03-01: A simple merge function is now available to join to SMET Files: merge(SMETFile, override) and mergeFromFile(filename, override)
15
+
16
+ ## Installation
17
+
18
+ Installation via pip and poetry is supported.
19
+
20
+ The easiest way to is to directly install from git (needs git to be installed):
21
+
22
+ ```bash
23
+ pip install [--user] git+https://gitlabext.wsl.ch/patrick.leibersperger/snowpat.git
24
+ ```
25
+
26
+ the --user option might be needed if you do not have admin rights.
27
+
28
+ ### Upgrade
29
+
30
+ If you already have an installation of Sowpat, that is out of date, run:
31
+
32
+ ```bash
33
+ pip install [--user] --upgrade git+https://gitlabext.wsl.ch/patrick.leibersperger/snowpat.git
34
+ ```
35
+
36
+ ### Manually
37
+
38
+ Download the folder, and from the main directory run:
39
+
40
+ ```bash
41
+ poetry install
42
+ ```
43
+
44
+ or:
45
+
46
+ ```bash
47
+ pip install [--user] .
48
+ ```
49
+
50
+ ## Documentation
51
+
52
+ The main documentation can be found under the respective module names, i.e. pySMET and SMET, as well as snowpackreader
53
+
54
+ Extensive Documentation is available [online](http://snowpat-patrick-leibersperger-c7ec84b7d2c6ab235482777a3905915fe.gitlab-pages.wsl.ch/), it uses http, so you might get a privacy error in your browser;
55
+ Or prepuilt in artifacts.zip, whic can be found in [job artifacts](https://gitlabext.wsl.ch/patrick.leibersperger/snowpat/-/artifacts): under (Number) files download the folder.
56
+
57
+ If you download the zip folder just open index.html in your browser.
58
+
59
+ Or you can build the docs yourself.
60
+
61
+ ### MkDocs
62
+
63
+ To create the docs with MkDocs:
64
+ Install via
65
+
66
+ ```bash
67
+ pip install mkdocs
68
+ ```
69
+
70
+ run:
71
+
72
+ ```bash
73
+ mkdocs serve
74
+ ```
75
+
76
+ from the main directory and follow the link shown (localhost)
77
+
78
+
79
+ ## License
80
+
81
+ This project is licensed under the terms of the GNU-GPL-3.0 license.
82
+
83
+ ## Examples
84
+
85
+ Please see the Documentation for more Examples and information on the full capabilities
86
+
87
+ ```python
88
+ from snowpat import pysmet as smet
89
+ from snowpat import snowpackreader as spr
90
+ ```
91
+
92
+ ### Examples pySMET
93
+
94
+ ```python
95
+ from snowpat import pysmet as smet
96
+
97
+ file = smet.read(filename)
98
+ data_pandas = file.data
99
+ data_numpy = file.toNumpy()
100
+ # meta_data only contains the mandatory SMET metadata
101
+ station_id = file.meta_data.station_id
102
+ lon = file.meta_data.location.longitude
103
+
104
+ # optional_meta_data according to the file format can be accessed like this:
105
+ timezone = file.optional_meta_data.tz
106
+
107
+ # acdd metadata (anything preceded with acdd_ or known acdd attributes are stored in acdd metadata)
108
+ acdd_creator_name = file.acdd_meta_data.get_attribute("creator_name")
109
+
110
+ # everything else is in other metadata
111
+ value = file.other_metadata["key"]
112
+
113
+ # changing metadata
114
+ file.meta_data.station_ID = "WFJ"
115
+ file.acdd_meta_data.set_attribute("creator_name", "SomeName")
116
+
117
+ # and for writing to an output again (if no output filename is provided, the given filename is used with an out flag):
118
+ file.write(out_filename)
119
+
120
+ # a summary is also available wih
121
+ file.info()
122
+
123
+ #UNTESTED:
124
+ # it is also possible to merge to SMET files, as long as they are compatible (metadata and fields)
125
+ other_file = smet.read(other_filename)
126
+ file.merge(other_file)
127
+
128
+ # or
129
+ list_of_files_to_merge = [filename1, filename2,filename3,...]
130
+ merged_file = smet.merge_files(list_of_files_to_merge)
131
+ ```
132
+
133
+ ### Examples snowpackreader
134
+
135
+ ```python
136
+ from snowpat import snowpackreader as spr
137
+ pro = spr.readPRO("test.pro")
138
+
139
+ # print a summary of the file
140
+ pro.info()
141
+
142
+ # all available dates
143
+ dates = pro.get_all_dates()
144
+
145
+ # will only return data above the ground after this
146
+ pro.discard_below_ground(True)
147
+ # get a Snowpack object (internal data class for Profiles) on a specific date
148
+ profile = pro.get_profile_on(dates[0])
149
+ # convert it to a dataframe with minimum stability and surface hoar as metadata
150
+ # column names will be data codes, except for "0500"= height (layer boundaries)-> 2 columns: layer middle and layer thickness
151
+ profile.toDf().head()
152
+ wl = profile.weak_layer # or profile.get_param["0534"]
153
+ sh = profile.surface_hoar # pr profile.get_param["0514"]
154
+
155
+ # There is help, to deal with the DataCodes:
156
+ # per default, the names are as in the .pro Header (without units)
157
+ pro.update_name_of_code("0503", "Snow Density")
158
+ density_code = pro.name_to_code("Snow Density")
159
+
160
+
161
+ ```
162
+
163
+ Logo was creaeted with: hotpot.ai/art-generator
@@ -0,0 +1,23 @@
1
+ [tool.poetry]
2
+ name = "snowpat"
3
+ version = "0.3.1"
4
+ description = ""
5
+ authors = ["leibersp <patrick.leibersperger@slf.ch>"]
6
+ readme = "README.md"
7
+
8
+ [tool.poetry.dependencies]
9
+ python = "^3.9"
10
+ pandas = "^2"
11
+ h5py = "^3.5"
12
+ numpy = "^1.26"
13
+ matplotlib = "^3.8.3"
14
+
15
+
16
+ [tool.poetry.group.dev.dependencies]
17
+ ipykernel = "^6.29.2"
18
+
19
+ [build-system]
20
+ requires = ["poetry-core"]
21
+ build-backend = "poetry.core.masonry.api"
22
+
23
+ [tool.poetry_bumpversion.file."snowpat/__init__.py"]
@@ -0,0 +1,152 @@
1
+ from enum import Enum
2
+ from datetime import datetime
3
+ from typing import Union, Dict, Callable, get_origin, get_args
4
+ from matplotlib.colors import Colormap, Normalize
5
+ from numpy import ndarray
6
+ from collections.abc import Callable as CallableType
7
+ import matplotlib.pyplot as plt
8
+ import numpy as np
9
+
10
+ class PlotType(Enum):
11
+ PRO = 1
12
+ SMET = 2
13
+ PROFILE = 3
14
+
15
+ from typing import get_origin, get_args, Union, Callable
16
+
17
+ def show_figure(fig:plt.Figure):
18
+ """
19
+ A helper function, to easily make a figure available to be displayed with plt.show .
20
+
21
+ Parameters:
22
+ fig (matplotlib.figure.Figure): The figure to be displayed.
23
+
24
+ """
25
+ dummy = plt.figure()
26
+ new_manager = dummy.canvas.manager
27
+ new_manager.canvas.figure = fig
28
+ fig.set_canvas(new_manager.canvas)
29
+
30
+
31
+ def _type_to_string(t):
32
+ origin = get_origin(t)
33
+ if origin is None:
34
+ # t is a plain type
35
+ return t.__name__
36
+ elif origin is Union:
37
+ # t is a Union type
38
+ args = ", or ".join(_type_to_string(arg) for arg in get_args(t))
39
+ return f"Either: {args}"
40
+ elif origin is CallableType:
41
+ # t is a Callable type
42
+ arg_types = get_args(t)[:-1]
43
+ return_type = get_args(t)[-1]
44
+ arg_types_str = ", ".join(_type_to_string(arg) for arg in arg_types[0])
45
+ return_type_str = _type_to_string(return_type)
46
+ return f"function({arg_types_str}) -> {return_type_str}"
47
+ else:
48
+ # t is another kind of special type
49
+ args = ", ".join(_type_to_string(arg) for arg in get_args(t))
50
+ return f"{origin.__name__}[{args}]"
51
+
52
+ def _is_of_type(value, needed_type)->bool:
53
+ v_origin = get_origin(value)
54
+ n_origin = get_origin(needed_type)
55
+ if n_origin is None:
56
+ return isinstance(value, needed_type)
57
+ elif n_origin is Union:
58
+ return any(_is_of_type(value, arg) for arg in get_args(needed_type))
59
+ elif n_origin is CallableType:
60
+ return _check_function(value)
61
+ elif n_origin is list:
62
+ if not isinstance(value, list):
63
+ return False
64
+ if not value:
65
+ return True
66
+ return all(_is_of_type(val, get_args(needed_type)[0]) for val in value)
67
+ elif n_origin is dict:
68
+ if not isinstance(value, dict):
69
+ return False
70
+ if not value:
71
+ return True
72
+ key_type, dval_type = get_args(needed_type)
73
+ return all(_is_of_type(key, key_type) and _is_of_type(val, dval_type) for key, val in value.items())
74
+ else:
75
+ return isinstance(value, needed_type)
76
+
77
+ def _check_function(func)->bool:
78
+ if not callable(func):
79
+ return False
80
+ code = "0501"
81
+ data = np.array([0, 1, 2, 3, 4])
82
+ try:
83
+ res = func(code, data)
84
+ if not isinstance(res, np.ndarray):
85
+ print("The data editing function needs to return a numpy array")
86
+ return False
87
+ if res.shape != data.shape:
88
+ print("the data editing function needs to return an array of the same shape as the input")
89
+ return False
90
+ except:
91
+ return False
92
+ return True
93
+
94
+ GENERAL_KWARGS = {
95
+ "outfile": str
96
+ }
97
+ GENERAL_HELP_TEXT = {
98
+ "outfile": "The path to save the plot to"
99
+ }
100
+
101
+ SMET_KWARGS = {}
102
+ SMET_HELP_TEXT = {}
103
+
104
+ PROFILE_KWARGS = {
105
+ "ind_mfcrust": bool,
106
+ "standardized_limits": bool
107
+ }
108
+ PROFILE_HELP_TEXT = {
109
+ "ind_mfcrust": "If True, the MF crust will be indicated",
110
+ "standardized_limits": "If True, the limits of the plot will be set as in niviz, otherwise depending on data"
111
+ }
112
+
113
+ SNOWPACK_KWARGS = {
114
+ "start": datetime,
115
+ "stop": datetime,
116
+ "resolution": str,
117
+ "num_ticks": int,
118
+ "cmap": Union[Colormap, Dict[str, Colormap], Dict[str, str]],
119
+ "norm": Union[Normalize, Dict[str, Normalize]],
120
+ "cbar_label": Union[str, Dict[str, str], type(None)],
121
+ "n_cols": int,
122
+ "title": str,
123
+ "vmin": Union[int, Dict[str, int]],
124
+ "vmax": Union[int, Dict[str, int]],
125
+ "adjust_data": Callable[[str, ndarray], ndarray],
126
+ "single_ticks": bool,
127
+ "set_ylabel": bool,
128
+ "colorbar": bool,
129
+ "ind_mfcrust": bool,
130
+ "profile_on": datetime,
131
+ }
132
+
133
+ SNOWPACK_HELP_TEXT = {
134
+ "start": "The start time of the plot",
135
+ "stop": "The stop time of the plot",
136
+ "resolution": "The resolution of dates of the plot",
137
+ "num_ticks": "The number of ticks on the x-axis, if not set, the ticks will be formatted automatically",
138
+ "cmap": "The colormap to use for the plot, either a single colormap or a dictionary with colormaps for each variable",
139
+ "norm": "The normalization to use for the plot, either a single normalization or a dictionary with normalizations for each variable",
140
+ "cbar_label": "The label of the colorbar, either a single label or a dictionary with labels for each variable",
141
+ "n_cols": "The subplots to be plotted side by side",
142
+ "title": "The title of the plot",
143
+ "vmin": "The minimum value of the colorbar, either a single value or a dictionary with values for each variable",
144
+ "vmax": "The maximum value of the colorbar, either a single value or a dictionary with values for each variable",
145
+ "adjust_data": "A function to adjust the data before plotting: f(var_code, ndarray) -> ndarray, needs to handle parameter for a single profile",
146
+ "single_ticks": "If True, ticks will only be plotted per profile will be plotted",
147
+ "set_ylabel": "If True, the y-label will be set",
148
+ "colorbar": "If True, a colorbar will be plotted",
149
+ "ind_mfcrust": "If True, the MF crust will be indicated",
150
+ "profile_on": "The date of the profile to be plotted"
151
+ }
152
+
@@ -0,0 +1,6 @@
1
+ from .snowlens import plot, help
2
+ from .plot_snowpack import SnowpackPlotter
3
+ from .plotting import plotProfile
4
+ from .Utils import show_figure
5
+
6
+ __all__ = ['plot', 'help', 'SnowpackPlotter', 'show_figure', 'plotProfile']
@@ -0,0 +1,174 @@
1
+ import matplotlib.colors as mcolors
2
+ import matplotlib.cm as cm
3
+ import matplotlib.pyplot as plt
4
+ import matplotlib.ticker as ticker
5
+ import matplotlib.dates as mdates
6
+
7
+ from dataclasses import dataclass
8
+
9
+ import numpy as np
10
+ import datetime
11
+
12
+ from typing import List, Dict, Optional, Callable, Union
13
+
14
+
15
+ grain_type_color = ['greenyellow', 'darkgreen', 'pink', 'lightblue', 'blue', 'magenta', 'red', 'cyan', 'lightblue']
16
+
17
+ def getColorMap(var_code) -> mcolors.Colormap:
18
+ if var_code == "0513":
19
+ cmap = mcolors.ListedColormap(grain_type_color)
20
+ elif var_code == "0503":
21
+ cmap = "coolwarm"
22
+ cmap = cm.get_cmap(cmap)
23
+ elif var_code == "0502":
24
+ cmap = "Greys"
25
+ cmap = cm.get_cmap(cmap)
26
+ elif var_code == "0535":
27
+ cmap = "viridis"
28
+ cmap = cm.get_cmap(cmap)
29
+ elif var_code == "0506":
30
+ cmap = "viridis"
31
+ cmap = cm.get_cmap(cmap)
32
+ else:
33
+ cmap = "viridis"
34
+ cmap = cm.get_cmap(cmap)
35
+ return cmap
36
+
37
+ def getNorm(var_code) -> mcolors.Normalize:
38
+ if var_code == "0513":
39
+ norm = mcolors.BoundaryNorm([0.5,1.5,2.5,3.5,4.5,5.5,6.5,7.5,8.5,9.5], 9)
40
+ elif var_code == "0503":
41
+ norm = mcolors.Normalize(vmin=-20, vmax=0)
42
+ elif var_code == "0506":
43
+ norm = mcolors.Normalize(vmin=0, vmax=5)
44
+ elif var_code == "0502":
45
+ norm = mcolors.Normalize(vmin=0, vmax=600)
46
+ elif var_code == "0535":
47
+ norm = mcolors.Normalize(vmin=0, vmax=60)
48
+ else:
49
+ norm = mcolors.LogNorm(vmin=10 ** -1, vmax=1)
50
+ return norm
51
+
52
+ def adjustData(var_code:str, data:np.ndarray)->np.ndarray:
53
+ new_data = data.copy()
54
+ new_data = np.where(np.isnan(new_data), -999, new_data)
55
+ if var_code == "0513":
56
+ new_data = np.where(new_data == 772, -100, new_data)
57
+ new_data = (divmod(new_data, 100)[0]).astype(int)
58
+ new_data = np.where(new_data == -10, -999, new_data)
59
+ new_data = np.where(new_data == -1, 7.2, new_data)
60
+ if var_code == "0535": # TODO: is this needed?
61
+ ice_density = 917
62
+ new_data = 6 / (ice_density * new_data/1000)
63
+ return new_data
64
+
65
+ @dataclass
66
+ class PlotHelper:
67
+ set_ylabel : bool = True
68
+ single_ticks : bool = True
69
+ colorbar : bool = True
70
+ set_cbar_label : bool = True
71
+ ind_mfcrust : bool = True
72
+ n_cols : int = 1
73
+ start : Optional[datetime.datetime] = None
74
+ stop : Optional[datetime.datetime] = None
75
+ resolution : Optional[str] = None
76
+ num_ticks : Optional[int] = None
77
+ cmap : Optional[mcolors.Colormap] = None
78
+ norm : Optional[mcolors.Normalize] = None
79
+ cmap_dict : Optional[Dict[str, mcolors.Colormap]] = None
80
+ norm_dict : Optional[Dict[str, mcolors.Normalize]] = None
81
+ cbar_label : Optional[Union[str, Dict[str,str]]] = None
82
+ title : Optional[str] = None
83
+ subtitle : Optional[Dict[str,str]] = None
84
+ adjust_data : Optional[Callable[[str,np.ndarray],np.ndarray]] = None
85
+
86
+ def getCbarLabel(self, var_code:str)->str:
87
+ is_available = False
88
+ cbar_label = None
89
+ if self.cbar_label:
90
+ try:
91
+ cbar_label = self.cbar_label[var_code]
92
+ is_available = True
93
+ except KeyError:
94
+ pass
95
+ return cbar_label, is_available
96
+
97
+ def getSubtitle(self, var_code:str)->str:
98
+ is_available = False
99
+ subtitle = None
100
+ if self.subtitle:
101
+ try:
102
+ subtitle = self.subtitle[var_code]
103
+ is_available = True
104
+ except KeyError:
105
+ pass
106
+ return subtitle, is_available
107
+
108
+
109
+ def getNorm(self, var_code) -> mcolors.Normalize:
110
+ if self.norm_dict:
111
+ try:
112
+ norm = self.norm_dict[var_code]
113
+ except KeyError:
114
+ norm = getNorm(var_code)
115
+ elif self.norm:
116
+ norm = self._plot_info.norm
117
+ else:
118
+ norm = getNorm(var_code)
119
+ return norm
120
+
121
+ def getCmap(self, var_code)->mcolors.Colormap:
122
+ if self.cmap_dict:
123
+ try:
124
+ cmap = self.cmap_dict[var_code]
125
+ except KeyError:
126
+ cmap = getColorMap(var_code)
127
+ elif self.cmap:
128
+ cmap = self.cmap
129
+ else:
130
+ cmap = getColorMap(var_code)
131
+ if isinstance(cmap, str):
132
+ cmap = cm.get_cmap(cmap)
133
+ return cmap
134
+
135
+ def handleData(self, var_code:str, data:np.ndarray)->np.ndarray:
136
+ if self.adjust_data:
137
+ res = self.adjust_data(var_code, data)
138
+ if var_code == "0513" or var_code == "0503":
139
+ if np.array_equal(res, data):
140
+ res = adjustData(var_code, data)
141
+ else:
142
+ res = adjustData(var_code, data)
143
+ return res
144
+
145
+ class CustomFormatter(mdates.DateFormatter):
146
+ def __init__(self, month_fmt, year_fmt):
147
+ self.month_fmt = month_fmt
148
+ self.year_fmt = year_fmt
149
+ super().__init__(month_fmt)
150
+
151
+ def __call__(self, x, pos=0):
152
+ # Convert the current x value to a date
153
+ current_date = mdates.num2date(x)
154
+
155
+ # If this is the first date, there's no previous date to compare to
156
+ if pos == 0:
157
+ self.prev_date = current_date
158
+ return current_date.strftime(self.year_fmt)
159
+
160
+ # If the current date and the previous one have the same year, return the month format
161
+ if current_date.year == self.prev_date.year:
162
+ if current_date.month == self.prev_date.month:
163
+ return ''
164
+ else:
165
+ self.prev_date = current_date
166
+ return current_date.strftime(self.month_fmt)
167
+
168
+ # Otherwise, update the previous date and return the year format
169
+ self.prev_date = current_date
170
+ return current_date.strftime(self.year_fmt)
171
+
172
+ def _add_crust_lines(ax:plt.Axes, x:list, h:np.ndarray, mesh:np.ndarray):
173
+ crust_mesh = np.ma.masked_where(mesh != 7.2, mesh)
174
+ ax.pcolor(x, h, crust_mesh.transpose(), hatch='///', alpha=0.)