sdf-xarray 0.2.0__cp312-cp312-win_amd64.whl → 0.5.0__cp312-cp312-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lib/SDFC_14.4.7/sdfc.lib +0 -0
- sdf_xarray/__init__.py +496 -57
- sdf_xarray/_version.py +22 -4
- sdf_xarray/dataset_accessor.py +124 -0
- sdf_xarray/download.py +88 -0
- sdf_xarray/plotting.py +484 -101
- sdf_xarray/sdf_interface.cp312-win_amd64.pyd +0 -0
- sdf_xarray/sdf_interface.pyx +5 -3
- sdf_xarray-0.5.0.dist-info/METADATA +150 -0
- {sdf_xarray-0.2.0.dist-info → sdf_xarray-0.5.0.dist-info}/RECORD +13 -11
- {sdf_xarray-0.2.0.dist-info → sdf_xarray-0.5.0.dist-info}/WHEEL +1 -1
- {sdf_xarray-0.2.0.dist-info → sdf_xarray-0.5.0.dist-info}/entry_points.txt +3 -0
- {sdf_xarray-0.2.0.dist-info → sdf_xarray-0.5.0.dist-info}/licenses/LICENCE +1 -1
- sdf_xarray-0.2.0.dist-info/METADATA +0 -190
sdf_xarray/_version.py
CHANGED
|
@@ -1,16 +1,34 @@
|
|
|
1
|
-
# file generated by
|
|
1
|
+
# file generated by setuptools-scm
|
|
2
2
|
# don't change, don't track in version control
|
|
3
|
+
|
|
4
|
+
__all__ = [
|
|
5
|
+
"__version__",
|
|
6
|
+
"__version_tuple__",
|
|
7
|
+
"version",
|
|
8
|
+
"version_tuple",
|
|
9
|
+
"__commit_id__",
|
|
10
|
+
"commit_id",
|
|
11
|
+
]
|
|
12
|
+
|
|
3
13
|
TYPE_CHECKING = False
|
|
4
14
|
if TYPE_CHECKING:
|
|
5
|
-
from typing import Tuple
|
|
15
|
+
from typing import Tuple
|
|
16
|
+
from typing import Union
|
|
17
|
+
|
|
6
18
|
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
|
19
|
+
COMMIT_ID = Union[str, None]
|
|
7
20
|
else:
|
|
8
21
|
VERSION_TUPLE = object
|
|
22
|
+
COMMIT_ID = object
|
|
9
23
|
|
|
10
24
|
version: str
|
|
11
25
|
__version__: str
|
|
12
26
|
__version_tuple__: VERSION_TUPLE
|
|
13
27
|
version_tuple: VERSION_TUPLE
|
|
28
|
+
commit_id: COMMIT_ID
|
|
29
|
+
__commit_id__: COMMIT_ID
|
|
30
|
+
|
|
31
|
+
__version__ = version = '0.5.0'
|
|
32
|
+
__version_tuple__ = version_tuple = (0, 5, 0)
|
|
14
33
|
|
|
15
|
-
|
|
16
|
-
__version_tuple__ = version_tuple = (0, 2, 0)
|
|
34
|
+
__commit_id__ = commit_id = 'g8a1775409'
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from types import MethodType
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
import xarray as xr
|
|
7
|
+
|
|
8
|
+
from .plotting import animate_multiple, show
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from matplotlib.animation import FuncAnimation
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@xr.register_dataset_accessor("epoch")
|
|
15
|
+
class EpochAccessor:
|
|
16
|
+
def __init__(self, xarray_obj: xr.Dataset):
|
|
17
|
+
# The xarray object is the Dataset, which we store as self._ds
|
|
18
|
+
self._ds = xarray_obj
|
|
19
|
+
|
|
20
|
+
def rescale_coords(
|
|
21
|
+
self,
|
|
22
|
+
multiplier: float,
|
|
23
|
+
unit_label: str,
|
|
24
|
+
coord_names: str | list[str],
|
|
25
|
+
) -> xr.Dataset:
|
|
26
|
+
"""
|
|
27
|
+
Rescales specified X and Y coordinates in the Dataset by a given multiplier
|
|
28
|
+
and updates the unit label attribute.
|
|
29
|
+
|
|
30
|
+
Parameters
|
|
31
|
+
----------
|
|
32
|
+
multiplier : float
|
|
33
|
+
The factor by which to multiply the coordinate values (e.g., 1e6 for meters to microns).
|
|
34
|
+
unit_label : str
|
|
35
|
+
The new unit label for the coordinates (e.g., "µm").
|
|
36
|
+
coord_names : str or list of str
|
|
37
|
+
The name(s) of the coordinate variable(s) to rescale.
|
|
38
|
+
If a string, only that coordinate is rescaled.
|
|
39
|
+
If a list, all listed coordinates are rescaled.
|
|
40
|
+
|
|
41
|
+
Returns
|
|
42
|
+
-------
|
|
43
|
+
xr.Dataset
|
|
44
|
+
A new Dataset with the updated and rescaled coordinates.
|
|
45
|
+
|
|
46
|
+
Examples
|
|
47
|
+
--------
|
|
48
|
+
# Convert X, Y, and Z from meters to microns
|
|
49
|
+
>>> ds_in_microns = ds.epoch.rescale_coords(1e6, "µm", coord_names=["X_Grid", "Y_Grid", "Z_Grid"])
|
|
50
|
+
|
|
51
|
+
# Convert only X to millimeters
|
|
52
|
+
>>> ds_in_mm = ds.epoch.rescale_coords(1000, "mm", coord_names="X_Grid")
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
ds = self._ds
|
|
56
|
+
new_coords = {}
|
|
57
|
+
|
|
58
|
+
if isinstance(coord_names, str):
|
|
59
|
+
# Convert single string to a list
|
|
60
|
+
coords_to_process = [coord_names]
|
|
61
|
+
elif isinstance(coord_names, list):
|
|
62
|
+
# Use the provided list
|
|
63
|
+
coords_to_process = coord_names
|
|
64
|
+
else:
|
|
65
|
+
coords_to_process = list(coord_names)
|
|
66
|
+
|
|
67
|
+
for coord_name in coords_to_process:
|
|
68
|
+
if coord_name not in ds.coords:
|
|
69
|
+
raise ValueError(
|
|
70
|
+
f"Coordinate '{coord_name}' not found in the Dataset. Cannot rescale."
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
coord_original = ds[coord_name]
|
|
74
|
+
|
|
75
|
+
coord_rescaled = coord_original * multiplier
|
|
76
|
+
coord_rescaled.attrs = coord_original.attrs.copy()
|
|
77
|
+
coord_rescaled.attrs["units"] = unit_label
|
|
78
|
+
|
|
79
|
+
new_coords[coord_name] = coord_rescaled
|
|
80
|
+
|
|
81
|
+
return ds.assign_coords(new_coords)
|
|
82
|
+
|
|
83
|
+
def animate_multiple(
|
|
84
|
+
self,
|
|
85
|
+
*variables: str | xr.DataArray,
|
|
86
|
+
datasets_kwargs: list[dict] | None = None,
|
|
87
|
+
**kwargs,
|
|
88
|
+
) -> FuncAnimation:
|
|
89
|
+
"""
|
|
90
|
+
Animate multiple Dataset variables on the same axes.
|
|
91
|
+
|
|
92
|
+
Parameters
|
|
93
|
+
----------
|
|
94
|
+
variables
|
|
95
|
+
The variables to animate.
|
|
96
|
+
datasets_kwargs
|
|
97
|
+
Per-dataset keyword arguments passed to plotting.
|
|
98
|
+
kwargs
|
|
99
|
+
Common keyword arguments forwarded to animation.
|
|
100
|
+
|
|
101
|
+
Examples
|
|
102
|
+
--------
|
|
103
|
+
>>> anim = ds.epoch.animate_multiple(
|
|
104
|
+
ds["Derived_Number_Density_Electron"],
|
|
105
|
+
ds["Derived_Number_Density_Ion"],
|
|
106
|
+
datasets_kwargs=[{"label": "Electron"}, {"label": "Ion"}],
|
|
107
|
+
ylabel="Derived Number Density [1/m$^3$]"
|
|
108
|
+
)
|
|
109
|
+
>>> anim.save("animation.gif")
|
|
110
|
+
>>> # Or in a jupyter notebook:
|
|
111
|
+
>>> anim.show()
|
|
112
|
+
"""
|
|
113
|
+
|
|
114
|
+
dataarrays = [
|
|
115
|
+
self._obj[var] if isinstance(var, str) else var for var in variables
|
|
116
|
+
]
|
|
117
|
+
anim = animate_multiple(
|
|
118
|
+
*dataarrays,
|
|
119
|
+
datasets_kwargs=datasets_kwargs,
|
|
120
|
+
**kwargs,
|
|
121
|
+
)
|
|
122
|
+
anim.show = MethodType(show, anim)
|
|
123
|
+
|
|
124
|
+
return anim
|
sdf_xarray/download.py
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from shutil import move
|
|
3
|
+
from typing import TYPE_CHECKING, Literal, TypeAlias
|
|
4
|
+
|
|
5
|
+
if TYPE_CHECKING:
|
|
6
|
+
import pooch # noqa: F401
|
|
7
|
+
|
|
8
|
+
DatasetName: TypeAlias = Literal[
|
|
9
|
+
"test_array_no_grids",
|
|
10
|
+
"test_dist_fn",
|
|
11
|
+
"test_files_1D",
|
|
12
|
+
"test_files_2D_moving_window",
|
|
13
|
+
"test_files_3D",
|
|
14
|
+
"test_mismatched_files",
|
|
15
|
+
"test_two_probes_2D",
|
|
16
|
+
"tutorial_dataset_1d",
|
|
17
|
+
"tutorial_dataset_2d",
|
|
18
|
+
"tutorial_dataset_2d_moving_window",
|
|
19
|
+
"tutorial_dataset_3d",
|
|
20
|
+
]
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def fetch_dataset(
|
|
24
|
+
dataset_name: DatasetName, save_path: Path | str | None = None
|
|
25
|
+
) -> Path:
|
|
26
|
+
"""
|
|
27
|
+
Downloads the specified dataset from its Zenodo URL. If it is already
|
|
28
|
+
downloaded, then the path to the cached, unzipped directory is returned.
|
|
29
|
+
|
|
30
|
+
Parameters
|
|
31
|
+
---------
|
|
32
|
+
dataset_name
|
|
33
|
+
The name of the dataset to download
|
|
34
|
+
save_path
|
|
35
|
+
The directory to save the dataset to (defaults to the cache folder ``"sdf_datasets"``.
|
|
36
|
+
See `pooch.os_cache` for details on how the cache works)
|
|
37
|
+
|
|
38
|
+
Returns
|
|
39
|
+
-------
|
|
40
|
+
Path
|
|
41
|
+
The path to the directory containing the unzipped dataset files
|
|
42
|
+
|
|
43
|
+
Examples
|
|
44
|
+
--------
|
|
45
|
+
>>> # Assuming the dataset has not been downloaded yet
|
|
46
|
+
>>> path = fetch_dataset("tutorial_dataset_1d")
|
|
47
|
+
Downloading file 'tutorial_dataset_1d.zip' ...
|
|
48
|
+
Unzipping contents of '.../sdf_datasets/tutorial_dataset_1d.zip' to '.../sdf_datasets/tutorial_dataset_1d'
|
|
49
|
+
>>> path
|
|
50
|
+
'.../sdf_datasets/tutorial_dataset_1d'
|
|
51
|
+
"""
|
|
52
|
+
import pooch # noqa: PLC0415
|
|
53
|
+
|
|
54
|
+
logger = pooch.get_logger()
|
|
55
|
+
datasets = pooch.create(
|
|
56
|
+
path=pooch.os_cache("sdf_datasets"),
|
|
57
|
+
base_url="https://zenodo.org/records/17991042/files",
|
|
58
|
+
registry={
|
|
59
|
+
"test_array_no_grids.zip": "md5:583c85ed8c31d0e34e7766b6d9f2d6da",
|
|
60
|
+
"test_dist_fn.zip": "md5:a582ff5e8c59bad62fe4897f65fc7a11",
|
|
61
|
+
"test_files_1D.zip": "md5:42e53b229556c174c538c5481c4d596a",
|
|
62
|
+
"test_files_2D_moving_window.zip": "md5:3744483bbf416936ad6df8847c54dad1",
|
|
63
|
+
"test_files_3D.zip": "md5:a679e71281bab1d373dc4980e6da1a7c",
|
|
64
|
+
"test_mismatched_files.zip": "md5:710fdc94666edf7777523e8fc9dd1bd4",
|
|
65
|
+
"test_two_probes_2D.zip": "md5:0f2a4fefe84a15292d066b3320d4d533",
|
|
66
|
+
"tutorial_dataset_1d.zip": "md5:7fad744d8b8b2b84bba5c0e705fdef7b",
|
|
67
|
+
"tutorial_dataset_2d.zip": "md5:b7f35c05703a48eb5128049cdd106ffa",
|
|
68
|
+
"tutorial_dataset_2d_moving_window.zip": "md5:a795f40d18df69263842055de4559501",
|
|
69
|
+
"tutorial_dataset_3d.zip": "md5:d9254648867016292440fdb028f717f7",
|
|
70
|
+
},
|
|
71
|
+
retry_if_failed=10,
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
datasets.fetch(
|
|
75
|
+
f"{dataset_name}.zip", processor=pooch.Unzip(extract_dir="."), progressbar=True
|
|
76
|
+
)
|
|
77
|
+
cache_path = Path(datasets.path) / dataset_name
|
|
78
|
+
|
|
79
|
+
if save_path is not None:
|
|
80
|
+
save_path = Path(save_path)
|
|
81
|
+
logger.info(
|
|
82
|
+
"Moving contents of '%s' to '%s'",
|
|
83
|
+
cache_path,
|
|
84
|
+
save_path / dataset_name,
|
|
85
|
+
)
|
|
86
|
+
return move(cache_path, save_path / dataset_name)
|
|
87
|
+
|
|
88
|
+
return cache_path
|