sdf-xarray 0.2.5__cp310-cp310-win_amd64.whl → 0.4.0__cp310-cp310-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lib/SDFC_14.4.7/sdfc.lib CHANGED
Binary file
sdf_xarray/__init__.py CHANGED
@@ -1,13 +1,17 @@
1
+ import contextlib
1
2
  import os
2
3
  import re
3
4
  from collections import Counter, defaultdict
4
5
  from collections.abc import Callable, Iterable
6
+ from importlib.metadata import version
5
7
  from itertools import product
8
+ from os import PathLike as os_PathLike
6
9
  from pathlib import Path
7
10
  from typing import ClassVar
8
11
 
9
12
  import numpy as np
10
13
  import xarray as xr
14
+ from packaging.version import Version
11
15
  from xarray.backends import AbstractDataStore, BackendArray, BackendEntrypoint
12
16
  from xarray.backends.file_manager import CachingFileManager
13
17
  from xarray.backends.locks import ensure_lock
@@ -15,12 +19,25 @@ from xarray.core import indexing
15
19
  from xarray.core.utils import close_on_error, try_read_magic_number_from_path
16
20
  from xarray.core.variable import Variable
17
21
 
18
- # NOTE: Do not delete this line, otherwise the "epoch" accessor will not be
19
- # imported when the user imports sdf_xarray
22
+ # NOTE: Do not delete these lines, otherwise the "epoch" dataset and dataarray
23
+ # accessors will not be imported when the user imports sdf_xarray
24
+ import sdf_xarray.dataset_accessor
25
+ import sdf_xarray.download
20
26
  import sdf_xarray.plotting # noqa: F401
21
27
 
28
+ # NOTE: This attempts to initialise with the "pint" accessor if the user
29
+ # has installed the package
30
+ with contextlib.suppress(ImportError):
31
+ import pint_xarray # noqa: F401
32
+
22
33
  from .sdf_interface import Constant, SDFFile # type: ignore # noqa: PGH003
23
34
 
35
+ # TODO Remove this once the new kwarg options are fully implemented
36
+ if Version(version("xarray")) >= Version("2025.8.0"):
37
+ xr.set_options(use_new_combine_kwarg_defaults=True)
38
+
39
+ PathLike = str | os_PathLike
40
+
24
41
 
25
42
  def _rename_with_underscore(name: str) -> str:
26
43
  """A lot of the variable names have spaces, forward slashes and dashes in them, which
@@ -51,14 +68,69 @@ def _process_latex_name(variable_name: str) -> str:
51
68
  return variable_name
52
69
 
53
70
 
54
- def combine_datasets(path_glob: Iterable | str, **kwargs) -> xr.Dataset:
55
- """Combine all datasets using a single time dimension"""
71
+ def _resolve_glob(path_glob: PathLike | Iterable[PathLike]):
72
+ """
73
+ Normalise input path_glob into a sorted list of absolute, resolved Path objects.
74
+ """
75
+
76
+ try:
77
+ p = Path(path_glob)
78
+ paths = list(p.parent.glob(p.name)) if p.name == "*.sdf" else list(p)
79
+ except TypeError:
80
+ paths = list({Path(p) for p in path_glob})
81
+
82
+ paths = sorted(p.resolve() for p in paths)
83
+ if not paths:
84
+ raise FileNotFoundError(f"No files matched pattern or input: {path_glob!r}")
85
+ return paths
86
+
87
+
88
+ def purge_unselected_data_vars(ds: xr.Dataset, data_vars: list[str]) -> xr.Dataset:
89
+ """
90
+ If the user has exclusively requested only certain variables be
91
+ loaded in then we purge all other variables and dimensions
92
+ """
93
+ existing_data_vars = set(ds.data_vars.keys())
94
+ vars_to_keep = set(data_vars) & existing_data_vars
95
+ vars_to_drop = existing_data_vars - vars_to_keep
96
+ ds = ds.drop_vars(vars_to_drop)
97
+
98
+ existing_dims = set(ds.sizes)
99
+ dims_to_keep = set()
100
+ for var in vars_to_keep:
101
+ dims_to_keep.update(ds[var].coords._names)
102
+ dims_to_keep.update(ds[var].dims)
103
+
104
+ coords_to_drop = existing_dims - dims_to_keep
105
+ return ds.drop_dims(coords_to_drop)
106
+
107
+
108
+ def combine_datasets(
109
+ path_glob: Iterable | str, data_vars: list[str], **kwargs
110
+ ) -> xr.Dataset:
111
+ """
112
+ Combine all datasets using a single time dimension, optionally extract
113
+ data from only the listed data_vars
114
+ """
115
+
116
+ if data_vars is not None:
117
+ return xr.open_mfdataset(
118
+ path_glob,
119
+ join="outer",
120
+ coords="different",
121
+ compat="no_conflicts",
122
+ combine="nested",
123
+ concat_dim="time",
124
+ preprocess=SDFPreprocess(data_vars=data_vars),
125
+ **kwargs,
126
+ )
56
127
 
57
128
  return xr.open_mfdataset(
58
129
  path_glob,
59
- data_vars="minimal",
60
- coords="minimal",
61
- compat="override",
130
+ data_vars="all",
131
+ coords="different",
132
+ compat="no_conflicts",
133
+ join="outer",
62
134
  preprocess=SDFPreprocess(),
63
135
  **kwargs,
64
136
  )
@@ -69,6 +141,8 @@ def open_mfdataset(
69
141
  *,
70
142
  separate_times: bool = False,
71
143
  keep_particles: bool = False,
144
+ probe_names: list[str] | None = None,
145
+ data_vars: list[str] | None = None,
72
146
  ) -> xr.Dataset:
73
147
  """Open a set of EPOCH SDF files as one `xarray.Dataset`
74
148
 
@@ -98,20 +172,36 @@ def open_mfdataset(
98
172
  different output frequencies
99
173
  keep_particles :
100
174
  If ``True``, also load particle data (this may use a lot of memory!)
175
+ probe_names :
176
+ List of EPOCH probe names
177
+ data_vars :
178
+ List of data vars to load in (If not specified loads in all variables)
101
179
  """
102
180
 
103
- # TODO: This is not very robust, look at how xarray.open_mfdataset does it
104
- if isinstance(path_glob, str):
105
- path_glob = Path().glob(path_glob)
106
-
107
- # Coerce to list because we might need to use the sequence multiple times
108
- path_glob = sorted(list(path_glob)) # noqa: C414
181
+ path_glob = _resolve_glob(path_glob)
109
182
 
110
183
  if not separate_times:
111
- return combine_datasets(path_glob, keep_particles=keep_particles)
184
+ return combine_datasets(
185
+ path_glob,
186
+ data_vars=data_vars,
187
+ keep_particles=keep_particles,
188
+ probe_names=probe_names,
189
+ )
190
+
191
+ _, var_times_map = make_time_dims(path_glob)
112
192
 
113
- time_dims, var_times_map = make_time_dims(path_glob)
114
- all_dfs = [xr.open_dataset(f, keep_particles=keep_particles) for f in path_glob]
193
+ all_dfs = []
194
+ for f in path_glob:
195
+ ds = xr.open_dataset(f, keep_particles=keep_particles, probe_names=probe_names)
196
+
197
+ # If the data_vars are specified then only load them in and disregard the rest.
198
+ # If there are no remaining data variables then skip adding the dataset to list
199
+ if data_vars is not None:
200
+ ds = purge_unselected_data_vars(ds, data_vars)
201
+ if not ds.data_vars:
202
+ continue
203
+
204
+ all_dfs.append(ds)
115
205
 
116
206
  for df in all_dfs:
117
207
  for da in df:
@@ -128,7 +218,11 @@ def open_mfdataset(
128
218
  )
129
219
 
130
220
  return xr.combine_by_coords(
131
- all_dfs, data_vars="minimal", combine_attrs="drop_conflicts"
221
+ all_dfs,
222
+ coords="different",
223
+ combine_attrs="drop_conflicts",
224
+ join="outer",
225
+ compat="no_conflicts",
132
226
  )
133
227
 
134
228
 
@@ -211,14 +305,23 @@ class SDFDataStore(AbstractDataStore):
211
305
  "drop_variables",
212
306
  "keep_particles",
213
307
  "lock",
308
+ "probe_names",
214
309
  )
215
310
 
216
- def __init__(self, manager, drop_variables=None, keep_particles=False, lock=None):
311
+ def __init__(
312
+ self,
313
+ manager,
314
+ drop_variables=None,
315
+ keep_particles=False,
316
+ lock=None,
317
+ probe_names=None,
318
+ ):
217
319
  self._manager = manager
218
320
  self._filename = self.ds.filename
219
321
  self.drop_variables = drop_variables
220
322
  self.keep_particles = keep_particles
221
323
  self.lock = ensure_lock(lock)
324
+ self.probe_names = probe_names
222
325
 
223
326
  @classmethod
224
327
  def open(
@@ -227,6 +330,7 @@ class SDFDataStore(AbstractDataStore):
227
330
  lock=None,
228
331
  drop_variables=None,
229
332
  keep_particles=False,
333
+ probe_names=None,
230
334
  ):
231
335
  if isinstance(filename, os.PathLike):
232
336
  filename = os.fspath(filename)
@@ -237,6 +341,7 @@ class SDFDataStore(AbstractDataStore):
237
341
  lock=lock,
238
342
  drop_variables=drop_variables,
239
343
  keep_particles=keep_particles,
344
+ probe_names=probe_names,
240
345
  )
241
346
 
242
347
  def _acquire(self, needs_lock=True):
@@ -347,7 +452,28 @@ class SDFDataStore(AbstractDataStore):
347
452
 
348
453
  if value.is_point_data:
349
454
  # Point (particle) variables are 1D
350
- var_coords = (f"ID_{_process_grid_name(key, _grid_species_name)}",)
455
+
456
+ # Particle data does not maintain a fixed dimension size
457
+ # throughout the simulation. An example of a particle name comes
458
+ # in the form of `Particles/Px/Ion_H` which is then modified
459
+ # using `_process_grid_name()` into `Ion_H`. This is fine as the
460
+ # other components of the momentum (`Py`, `Pz`) will have the same
461
+ # size as they represent the same bunch of particles.
462
+
463
+ # Probes however have names in the form of `Electron_Front_Probe/Px`
464
+ # which are changed to just `Px`; this is fine when there is only one
465
+ # probe in the system but when there are multiple they will have
466
+ # conflicting sizes so we can't keep the names as simply `Px` so we
467
+ # instead set their dimension as the full name `Electron_Front_Probe_Px`.
468
+ is_probe_name_match = self.probe_names is not None and any(
469
+ name in key for name in self.probe_names
470
+ )
471
+ name_processor = (
472
+ _rename_with_underscore
473
+ if is_probe_name_match
474
+ else _grid_species_name
475
+ )
476
+ var_coords = (f"ID_{_process_grid_name(key, name_processor)}",)
351
477
  else:
352
478
  # These are DataArrays
353
479
 
@@ -414,6 +540,7 @@ class SDFEntrypoint(BackendEntrypoint):
414
540
  *,
415
541
  drop_variables=None,
416
542
  keep_particles=False,
543
+ probe_names=None,
417
544
  ):
418
545
  if isinstance(filename_or_obj, Path):
419
546
  # sdf library takes a filename only
@@ -424,6 +551,7 @@ class SDFEntrypoint(BackendEntrypoint):
424
551
  filename_or_obj,
425
552
  drop_variables=drop_variables,
426
553
  keep_particles=keep_particles,
554
+ probe_names=probe_names,
427
555
  )
428
556
  with close_on_error(store):
429
557
  return store.load()
@@ -432,6 +560,7 @@ class SDFEntrypoint(BackendEntrypoint):
432
560
  "filename_or_obj",
433
561
  "drop_variables",
434
562
  "keep_particles",
563
+ "probe_names",
435
564
  ]
436
565
 
437
566
  def guess_can_open(self, filename_or_obj):
@@ -447,10 +576,43 @@ class SDFEntrypoint(BackendEntrypoint):
447
576
 
448
577
 
449
578
  class SDFPreprocess:
450
- """Preprocess SDF files for xarray ensuring matching job ids and sets time dimension"""
579
+ """Preprocess SDF files for xarray ensuring matching job ids and sets
580
+ time dimension.
451
581
 
452
- def __init__(self):
582
+ This class is used as a 'preprocess' function within ``xr.open_mfdataset``. It
583
+ performs three main duties on each individual file's Dataset:
584
+
585
+ 1. Checks for a **matching job ID** across all files to ensure dataset consistency.
586
+ 2. **Filters** the Dataset to keep only the variables specified in `data_vars`
587
+ and their required coordinates.
588
+ 3. **Expands dimensions** to include a single 'time' coordinate, preparing the
589
+ Dataset for concatenation.
590
+
591
+ EPOCH can output variables at different intervals, so some SDF files
592
+ may not contain the requested variable. We combine this data into one
593
+ dataset by concatenating across the time dimension.
594
+
595
+ The combination is performed using ``join="outer"`` (in the calling ``open_mfdataset`` function),
596
+ meaning that the final combined dataset will contain the variable across the
597
+ entire time span, with NaNs filling the time steps where the variable was absent in
598
+ the individual file.
599
+
600
+ With large SDF files, this filtering method will save on memory consumption when
601
+ compared to loading all variables from all files before concatenation.
602
+
603
+ Parameters
604
+ ----------
605
+ data_vars :
606
+ A list of data variables to load in (If not specified loads
607
+ in all variables)
608
+ """
609
+
610
+ def __init__(
611
+ self,
612
+ data_vars: list[str] | None = None,
613
+ ):
453
614
  self.job_id: int | None = None
615
+ self.data_vars = data_vars
454
616
 
455
617
  def __call__(self, ds: xr.Dataset) -> xr.Dataset:
456
618
  if self.job_id is None:
@@ -461,17 +623,23 @@ class SDFPreprocess:
461
623
  f"Mismatching job ids (got {ds.attrs['jobid1']}, expected {self.job_id})"
462
624
  )
463
625
 
464
- ds = ds.expand_dims(time=[ds.attrs["time"]])
626
+ # If the user has exclusively requested only certain variables be
627
+ # loaded in then we purge all other variables and coordinates
628
+ if self.data_vars:
629
+ ds = purge_unselected_data_vars(ds, self.data_vars)
630
+
631
+ time_val = ds.attrs.get("time", np.nan)
632
+ ds = ds.expand_dims(time=[time_val])
465
633
  ds = ds.assign_coords(
466
634
  time=(
467
635
  "time",
468
- [ds.attrs["time"]],
636
+ [time_val],
469
637
  {"units": "s", "long_name": "Time", "full_name": "time"},
470
638
  )
471
639
  )
472
640
  # Particles' spartial coordinates also evolve in time
473
641
  for coord, value in ds.coords.items():
474
642
  if value.attrs.get("point_data", False):
475
- ds.coords[coord] = value.expand_dims(time=[ds.attrs["time"]])
643
+ ds.coords[coord] = value.expand_dims(time=[time_val])
476
644
 
477
645
  return ds
sdf_xarray/_version.py CHANGED
@@ -1,7 +1,14 @@
1
1
  # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
3
 
4
- __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
5
12
 
6
13
  TYPE_CHECKING = False
7
14
  if TYPE_CHECKING:
@@ -9,13 +16,19 @@ if TYPE_CHECKING:
9
16
  from typing import Union
10
17
 
11
18
  VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
12
20
  else:
13
21
  VERSION_TUPLE = object
22
+ COMMIT_ID = object
14
23
 
15
24
  version: str
16
25
  __version__: str
17
26
  __version_tuple__: VERSION_TUPLE
18
27
  version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
19
30
 
20
- __version__ = version = '0.2.5'
21
- __version_tuple__ = version_tuple = (0, 2, 5)
31
+ __version__ = version = '0.4.0'
32
+ __version_tuple__ = version_tuple = (0, 4, 0)
33
+
34
+ __commit_id__ = commit_id = 'gc5cdb3bf9'
@@ -0,0 +1,71 @@
1
+ import xarray as xr
2
+
3
+
4
+ @xr.register_dataset_accessor("epoch")
5
+ class EpochAccessor:
6
+ def __init__(self, xarray_obj: xr.Dataset):
7
+ # The xarray object is the Dataset, which we store as self._ds
8
+ self._ds = xarray_obj
9
+
10
+ def rescale_coords(
11
+ self,
12
+ multiplier: float,
13
+ unit_label: str,
14
+ coord_names: str | list[str],
15
+ ) -> xr.Dataset:
16
+ """
17
+ Rescales specified X and Y coordinates in the Dataset by a given multiplier
18
+ and updates the unit label attribute.
19
+
20
+ Parameters
21
+ ----------
22
+ multiplier : float
23
+ The factor by which to multiply the coordinate values (e.g., 1e6 for meters to microns).
24
+ unit_label : str
25
+ The new unit label for the coordinates (e.g., "µm").
26
+ coord_names : str or list of str
27
+ The name(s) of the coordinate variable(s) to rescale.
28
+ If a string, only that coordinate is rescaled.
29
+ If a list, all listed coordinates are rescaled.
30
+
31
+ Returns
32
+ -------
33
+ xr.Dataset
34
+ A new Dataset with the updated and rescaled coordinates.
35
+
36
+ Examples
37
+ --------
38
+ # Convert X, Y, and Z from meters to microns
39
+ >>> ds_in_microns = ds.epoch.rescale_coords(1e6, "µm", coord_names=["X_Grid", "Y_Grid", "Z_Grid"])
40
+
41
+ # Convert only X to millimeters
42
+ >>> ds_in_mm = ds.epoch.rescale_coords(1000, "mm", coord_names="X_Grid")
43
+ """
44
+
45
+ ds = self._ds
46
+ new_coords = {}
47
+
48
+ if isinstance(coord_names, str):
49
+ # Convert single string to a list
50
+ coords_to_process = [coord_names]
51
+ elif isinstance(coord_names, list):
52
+ # Use the provided list
53
+ coords_to_process = coord_names
54
+ else:
55
+ coords_to_process = list(coord_names)
56
+
57
+ for coord_name in coords_to_process:
58
+ if coord_name not in ds.coords:
59
+ raise ValueError(
60
+ f"Coordinate '{coord_name}' not found in the Dataset. Cannot rescale."
61
+ )
62
+
63
+ coord_original = ds[coord_name]
64
+
65
+ coord_rescaled = coord_original * multiplier
66
+ coord_rescaled.attrs = coord_original.attrs.copy()
67
+ coord_rescaled.attrs["units"] = unit_label
68
+
69
+ new_coords[coord_name] = coord_rescaled
70
+
71
+ return ds.assign_coords(new_coords)
sdf_xarray/download.py ADDED
@@ -0,0 +1,87 @@
1
+ from pathlib import Path
2
+ from shutil import move
3
+ from typing import TYPE_CHECKING, Literal, TypeAlias
4
+
5
+ if TYPE_CHECKING:
6
+ import pooch # noqa: F401
7
+
8
+ DatasetName: TypeAlias = Literal[
9
+ "test_array_no_grids",
10
+ "test_dist_fn",
11
+ "test_files_1D",
12
+ "test_files_2D_moving_window",
13
+ "test_files_3D",
14
+ "test_mismatched_files",
15
+ "test_two_probes_2D",
16
+ "tutorial_dataset_1d",
17
+ "tutorial_dataset_2d",
18
+ "tutorial_dataset_2d_moving_window",
19
+ "tutorial_dataset_3d",
20
+ ]
21
+
22
+
23
+ def fetch_dataset(
24
+ dataset_name: DatasetName, save_path: Path | str | None = None
25
+ ) -> Path:
26
+ """
27
+ Downloads the specified dataset from its Zenodo URL. If it is already
28
+ downloaded, then the path to the cached, unzipped directory is returned.
29
+
30
+ Parameters
31
+ ---------
32
+ dataset_name
33
+ The name of the dataset to download
34
+ save_path
35
+ The directory to save the dataset to (defaults to the cache folder ``"sdf_datasets"``.
36
+ See `pooch.os_cache` for details on how the cache works)
37
+
38
+ Returns
39
+ -------
40
+ Path
41
+ The path to the directory containing the unzipped dataset files
42
+
43
+ Examples
44
+ --------
45
+ >>> # Assuming the dataset has not been downloaded yet
46
+ >>> path = fetch_dataset("tutorial_dataset_1d")
47
+ Downloading file 'tutorial_dataset_1d.zip' ...
48
+ Unzipping contents of '.../sdf_datasets/tutorial_dataset_1d.zip' to '.../sdf_datasets/tutorial_dataset_1d'
49
+ >>> path
50
+ '.../sdf_datasets/tutorial_dataset_1d'
51
+ """
52
+ import pooch # noqa: PLC0415
53
+
54
+ logger = pooch.get_logger()
55
+ datasets = pooch.create(
56
+ path=pooch.os_cache("sdf_datasets"),
57
+ base_url="doi:10.5281/zenodo.17618510",
58
+ registry={
59
+ "test_array_no_grids.zip": "md5:583c85ed8c31d0e34e7766b6d9f2d6da",
60
+ "test_dist_fn.zip": "md5:a582ff5e8c59bad62fe4897f65fc7a11",
61
+ "test_files_1D.zip": "md5:42e53b229556c174c538c5481c4d596a",
62
+ "test_files_2D_moving_window.zip": "md5:3744483bbf416936ad6df8847c54dad1",
63
+ "test_files_3D.zip": "md5:a679e71281bab1d373dc4980e6da1a7c",
64
+ "test_mismatched_files.zip": "md5:710fdc94666edf7777523e8fc9dd1bd4",
65
+ "test_two_probes_2D.zip": "md5:0f2a4fefe84a15292d066b3320d4d533",
66
+ "tutorial_dataset_1d.zip": "md5:7fad744d8b8b2b84bba5c0e705fdef7b",
67
+ "tutorial_dataset_2d.zip": "md5:1945ecdbc1ac1798164f83ea2b3d1b31",
68
+ "tutorial_dataset_2d_moving_window.zip": "md5:a795f40d18df69263842055de4559501",
69
+ "tutorial_dataset_3d.zip": "md5:d9254648867016292440fdb028f717f7",
70
+ },
71
+ )
72
+
73
+ datasets.fetch(
74
+ f"{dataset_name}.zip", processor=pooch.Unzip(extract_dir="."), progressbar=True
75
+ )
76
+ cache_path = Path(datasets.path) / dataset_name
77
+
78
+ if save_path is not None:
79
+ save_path = Path(save_path)
80
+ logger.info(
81
+ "Moving contents of '%s' to '%s'",
82
+ cache_path,
83
+ save_path / dataset_name,
84
+ )
85
+ return move(cache_path, save_path / dataset_name)
86
+
87
+ return cache_path
sdf_xarray/plotting.py CHANGED
@@ -9,37 +9,69 @@ if TYPE_CHECKING:
9
9
  import matplotlib.pyplot as plt
10
10
  from matplotlib.animation import FuncAnimation
11
11
 
12
+ from types import MethodType
13
+
12
14
 
13
15
  def get_frame_title(
14
16
  data: xr.DataArray,
15
17
  frame: int,
16
18
  display_sdf_name: bool = False,
17
19
  title_custom: str | None = None,
20
+ t: str = "time",
18
21
  ) -> str:
19
- """Generate the title for a frame"""
22
+ """Generate the title for a frame
23
+
24
+ Parameters
25
+ ----------
26
+ data
27
+ DataArray containing the target data
28
+ frame
29
+ Frame number
30
+ display_sdf_name
31
+ Display the sdf file name in the animation title
32
+ title_custom
33
+ Custom title to add to the plot
34
+ t
35
+ Time coordinate
36
+ """
37
+
20
38
  # Adds custom text to the start of the title, if specified
21
39
  title_custom = "" if title_custom is None else f"{title_custom}, "
22
- # Adds the time and associated units to the title
23
- time = data["time"][frame].to_numpy()
40
+ # Adds the time axis and associated units to the title
41
+ t_axis_value = data[t][frame].values
24
42
 
25
- time_units = data["time"].attrs.get("units", False)
26
- time_units_formatted = f" [{time_units}]" if time_units else ""
27
- title_time = f"time = {time:.2e}{time_units_formatted}"
43
+ t_axis_units = data[t].attrs.get("units", False)
44
+ t_axis_units_formatted = f" [{t_axis_units}]" if t_axis_units else ""
45
+ title_t_axis = f"{data[t].long_name} = {t_axis_value:.2e}{t_axis_units_formatted}"
28
46
 
29
47
  # Adds sdf name to the title, if specifed
30
48
  title_sdf = f", {frame:04d}.sdf" if display_sdf_name else ""
31
- return f"{title_custom}{title_time}{title_sdf}"
49
+ return f"{title_custom}{title_t_axis}{title_sdf}"
32
50
 
33
51
 
34
52
  def calculate_window_boundaries(
35
- data: xr.DataArray, xlim: tuple[float, float] | False = False
53
+ data: xr.DataArray,
54
+ xlim: tuple[float, float] | None = None,
55
+ x_axis_name: str = "X_Grid_mid",
56
+ t: str = "time",
36
57
  ) -> np.ndarray:
37
58
  """Calculate the bounderies a moving window frame. If the user specifies xlim, this will
38
59
  be used as the initial bounderies and the window will move along acordingly.
60
+
61
+ Parameters
62
+ ----------
63
+ data
64
+ DataArray containing the target data
65
+ xlim
66
+ x limits
67
+ x_axis_name
68
+ Name of coordinate to assign to the x-axis
69
+ t
70
+ Time coordinate
39
71
  """
40
- x_grid = data["X_Grid_mid"].values
72
+ x_grid = data[x_axis_name].values
41
73
  x_half_cell = (x_grid[1] - x_grid[0]) / 2
42
- N_frames = data["time"].size
74
+ N_frames = data[t].size
43
75
 
44
76
  # Find the window bounderies by finding the first and last non-NaN values in the 0th lineout
45
77
  # along the x-axis.
@@ -56,7 +88,7 @@ def calculate_window_boundaries(
56
88
  window_boundaries[i, 1] = x_grid_non_nan[-1] + x_half_cell
57
89
 
58
90
  # User's choice for initial window edge supercides the one calculated
59
- if xlim:
91
+ if xlim is not None:
60
92
  window_boundaries = window_boundaries + xlim - window_boundaries[0]
61
93
  return window_boundaries
62
94
 
@@ -68,6 +100,15 @@ def compute_global_limits(
68
100
  ) -> tuple[float, float]:
69
101
  """Remove all NaN values from the target data to calculate the global minimum and maximum of the data.
70
102
  User defined percentiles can remove extreme outliers.
103
+
104
+ Parameters
105
+ ----------
106
+ data
107
+ DataArray containing the target data
108
+ min_percentile
109
+ Minimum percentile of the data
110
+ max_percentile
111
+ Maximum percentile of the data
71
112
  """
72
113
 
73
114
  # Removes NaN values, needed for moving windows
@@ -86,74 +127,103 @@ def animate(
86
127
  max_percentile: float = 100,
87
128
  title: str | None = None,
88
129
  display_sdf_name: bool = False,
130
+ t: str | None = None,
89
131
  ax: plt.Axes | None = None,
90
132
  **kwargs,
91
133
  ) -> FuncAnimation:
92
- """Generate an animation
134
+ """Generate an animation using an xarray.DataArray
93
135
 
94
136
  Parameters
95
137
  ---------
96
138
  data
97
- The dataarray containing the target data
139
+ DataArray containing the target data
98
140
  fps
99
- Frames per second for the animation (default: 10)
141
+ Frames per second for the animation
100
142
  min_percentile
101
- Minimum percentile of the data (default: 0)
143
+ Minimum percentile of the data
102
144
  max_percentile
103
- Maximum percentile of the data (default: 100)
145
+ Maximum percentile of the data
104
146
  title
105
- Custom title to add to the plot.
147
+ Custom title to add to the plot
106
148
  display_sdf_name
107
149
  Display the sdf file name in the animation title
150
+ t
151
+ Coordinate for t axis (the coordinate which will be animated over). If `None`, use data.dims[0]
108
152
  ax
109
- Matplotlib axes on which to plot.
153
+ Matplotlib axes on which to plot
110
154
  kwargs
111
- Keyword arguments to be passed to matplotlib.
155
+ Keyword arguments to be passed to matplotlib
112
156
 
113
157
  Examples
114
158
  --------
115
- >>> dataset["Derived_Number_Density_Electron"].epoch.animate()
159
+ >>> ds["Derived_Number_Density_Electron"].epoch.animate()
116
160
  """
117
- import matplotlib.pyplot as plt
118
- from matplotlib.animation import FuncAnimation
161
+ import matplotlib.pyplot as plt # noqa: PLC0415
162
+ from matplotlib.animation import FuncAnimation # noqa: PLC0415
119
163
 
120
164
  kwargs_original = kwargs.copy()
121
165
 
166
+ # Create plot if no ax is provided
122
167
  if ax is None:
123
- _, ax = plt.subplots()
168
+ fig, ax = plt.subplots()
169
+ # Prevents figure from prematurely displaying in Jupyter notebook
170
+ plt.close(fig)
171
+
172
+ # Sets the animation coordinate (t) for iteration. If time is in the coords
173
+ # then it will set time to be t. If it is not it will fallback to the last
174
+ # coordinate passed in. By default coordinates are passed in from xarray in
175
+ # the form x, y, z so in order to preserve the x and y being on their
176
+ # respective axes we animate over the final coordinate that is passed in
177
+ # which in this example is z
178
+ coord_names = list(data.dims)
179
+ if t is None:
180
+ t = "time" if "time" in coord_names else coord_names[-1]
181
+ coord_names.remove(t)
182
+
183
+ N_frames = data[t].size
124
184
 
125
- N_frames = data["time"].size
126
- global_min, global_max = compute_global_limits(data, min_percentile, max_percentile)
127
-
128
- # Initialise plot and set y-limits for 1D data
129
185
  if data.ndim == 2:
130
- kwargs.setdefault("x", "X_Grid_mid")
131
- plot = data.isel(time=0).plot(ax=ax, **kwargs)
132
- ax.set_title(get_frame_title(data, 0, display_sdf_name, title))
186
+ kwargs.setdefault("x", coord_names[0])
187
+ plot = data.isel({t: 0}).plot(ax=ax, **kwargs)
188
+ ax.set_title(get_frame_title(data, 0, display_sdf_name, title, t))
189
+ global_min, global_max = compute_global_limits(
190
+ data, min_percentile, max_percentile
191
+ )
133
192
  ax.set_ylim(global_min, global_max)
134
193
 
135
- # Initilise plot and set colour bar for 2D data
136
194
  if data.ndim == 3:
137
- kwargs["norm"] = plt.Normalize(vmin=global_min, vmax=global_max)
195
+ if "norm" not in kwargs:
196
+ global_min, global_max = compute_global_limits(
197
+ data, min_percentile, max_percentile
198
+ )
199
+ kwargs["norm"] = plt.Normalize(vmin=global_min, vmax=global_max)
138
200
  kwargs["add_colorbar"] = False
139
- # Set default x and y coordinates for 2D data if not provided
140
- kwargs.setdefault("x", "X_Grid_mid")
141
- kwargs.setdefault("y", "Y_Grid_mid")
201
+ # Set default x and y coordinates for 3D data if not provided
202
+ kwargs.setdefault("x", coord_names[0])
203
+ kwargs.setdefault("y", coord_names[1])
204
+
205
+ # Finds the time step with the minimum data value
206
+ # This is needed so that the animation can use the correct colour bar
207
+ argmin_time = np.unravel_index(data.argmin(), data.shape)[0]
142
208
 
143
- # Initialize the plot with the first timestep
144
- plot = data.isel(time=0).plot(ax=ax, **kwargs)
145
- ax.set_title(get_frame_title(data, 0, display_sdf_name, title))
209
+ # Initialize the plot, the final output will still start at the first time step
210
+ plot = data.isel({t: argmin_time}).plot(ax=ax, **kwargs)
211
+ ax.set_title(get_frame_title(data, 0, display_sdf_name, title, t))
212
+ kwargs["cmap"] = plot.cmap
146
213
 
147
214
  # Add colorbar
148
215
  if kwargs_original.get("add_colorbar", True):
149
216
  long_name = data.attrs.get("long_name")
150
217
  units = data.attrs.get("units")
151
- plt.colorbar(plot, ax=ax, label=f"{long_name} [${units}$]")
218
+ fig = plot.get_figure()
219
+ fig.colorbar(plot, ax=ax, label=f"{long_name} [{units}]")
152
220
 
153
221
  # check if there is a moving window by finding NaNs in the data
154
222
  move_window = np.isnan(np.sum(data.values))
155
223
  if move_window:
156
- window_boundaries = calculate_window_boundaries(data, kwargs.get("xlim", False))
224
+ window_boundaries = calculate_window_boundaries(
225
+ data, kwargs.get("xlim"), kwargs["x"]
226
+ )
157
227
 
158
228
  def update(frame):
159
229
  # Set the xlim for each frame in the case of a moving window
@@ -163,12 +233,12 @@ def animate(
163
233
  # Update plot for the new frame
164
234
  ax.clear()
165
235
 
166
- data.isel(time=frame).plot(ax=ax, **kwargs)
167
- ax.set_title(get_frame_title(data, frame, display_sdf_name, title))
236
+ plot = data.isel({t: frame}).plot(ax=ax, **kwargs)
237
+ ax.set_title(get_frame_title(data, frame, display_sdf_name, title, t))
168
238
 
169
- # Update y-limits for 1D data
170
239
  if data.ndim == 2:
171
240
  ax.set_ylim(global_min, global_max)
241
+ return plot
172
242
 
173
243
  return FuncAnimation(
174
244
  ax.get_figure(),
@@ -179,6 +249,19 @@ def animate(
179
249
  )
180
250
 
181
251
 
252
+ def show(anim):
253
+ """Shows the FuncAnimation in a Jupyter notebook.
254
+
255
+ Parameters
256
+ ----------
257
+ anim
258
+ `matplotlib.animation.FuncAnimation`
259
+ """
260
+ from IPython.display import HTML # noqa: PLC0415
261
+
262
+ return HTML(anim.to_jshtml())
263
+
264
+
182
265
  @xr.register_dataarray_accessor("epoch")
183
266
  class EpochAccessor:
184
267
  def __init__(self, xarray_obj):
@@ -190,16 +273,21 @@ class EpochAccessor:
190
273
  Parameters
191
274
  ----------
192
275
  args
193
- Positional arguments passed to :func:`generate_animation`.
276
+ Positional arguments passed to :func:`animation`.
194
277
  kwargs
195
- Keyword arguments passed to :func:`generate_animation`.
278
+ Keyword arguments passed to :func:`animation`.
196
279
 
197
280
  Examples
198
281
  --------
199
- >>> import xarray as xr
200
- >>> from sdf_xarray import SDFPreprocess
201
- >>> ds = xr.open_mfdataset("*.sdf", preprocess=SDFPreprocess())
202
- >>> ani = ds["Electric_Field_Ey"].epoch.animate()
203
- >>> ani.save("myfile.mp4")
282
+ >>> anim = ds["Electric_Field_Ey"].epoch.animate()
283
+ >>> anim.save("myfile.mp4")
284
+ >>> # Or in a jupyter notebook:
285
+ >>> anim.show()
204
286
  """
205
- return animate(self._obj, *args, **kwargs)
287
+
288
+ # Add anim.show() functionality
289
+ # anim.show() will display the animation in a jupyter notebook
290
+ anim = animate(self._obj, *args, **kwargs)
291
+ anim.show = MethodType(show, anim)
292
+
293
+ return anim
@@ -110,12 +110,12 @@ _CONSTANT_UNITS_RE = re.compile(r"(?P<name>.*) \((?P<units>.*)\)$")
110
110
  cdef class Constant:
111
111
  _id: str
112
112
  name: str
113
- data: int | str | float
113
+ data: int | str | float | bool
114
114
  units: str | None
115
115
 
116
116
  @staticmethod
117
117
  cdef Constant from_block(str name, csdf.sdf_block_t* block):
118
- data: int | str | float | double
118
+ data: int | str | float | double | bool
119
119
 
120
120
  if block.datatype == csdf.SDF_DATATYPE_REAL4:
121
121
  data = (<float*>block.const_value)[0]
@@ -125,6 +125,8 @@ cdef class Constant:
125
125
  data = (<csdf.int32_t*>block.const_value)[0]
126
126
  if block.datatype == csdf.SDF_DATATYPE_INTEGER8:
127
127
  data = (<csdf.int64_t*>block.const_value)[0]
128
+ if block.datatype == csdf.SDF_DATATYPE_LOGICAL:
129
+ data = (<bint*>block.const_value)[0]
128
130
 
129
131
  # There's no metadata with e.g. units, but there's a
130
132
  # convention to put one in brackets at the end of the name,
@@ -1,70 +1,24 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: sdf-xarray
3
- Version: 0.2.5
3
+ Version: 0.4.0
4
4
  Summary: Provides a backend for xarray to read SDF files as created by the EPOCH plasma PIC code.
5
- Author-Email: Peter Hill <peter.hill@york.ac.uk>, Joel Adams <joel.adams@york.ac.uk>, Shaun Doherty <shaun.doherty@york.ac.uk>
6
- License: Copyright 2024, Peter Hill, Joel Adams, epochpic team
7
-
8
- Redistribution and use in source and binary forms, with or without
9
- modification, are permitted provided that the following conditions are
10
- met:
11
-
12
- 1. Redistributions of source code must retain the above copyright
13
- notice, this list of conditions and the following disclaimer.
14
-
15
- 2. Redistributions in binary form must reproduce the above copyright
16
- notice, this list of conditions and the following disclaimer in the
17
- documentation and/or other materials provided with the distribution.
18
-
19
- 3. Neither the name of the copyright holder nor the names of its
20
- contributors may be used to endorse or promote products derived from
21
- this software without specific prior written permission.
22
-
23
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
24
- “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
25
- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
26
- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
27
- HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
28
- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
29
- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
30
- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
31
- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
32
- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
33
- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
34
-
5
+ Author-Email: Peter Hill <peter.hill@york.ac.uk>, Joel Adams <joel.adams@york.ac.uk>, Shaun Doherty <shaun.doherty@york.ac.uk>, Chris Herdman <chris.herdman@york.ac.uk>, Liam Pattinson <liam.pattinson@york.ac.uk>
6
+ License-Expression: BSD-3-Clause
7
+ Classifier: Development Status :: 5 - Production/Stable
8
+ Classifier: Intended Audience :: Science/Research
9
+ Classifier: Topic :: Scientific/Engineering
10
+ Classifier: Operating System :: OS Independent
35
11
  Classifier: Programming Language :: Python
36
12
  Classifier: Programming Language :: Python :: 3
37
13
  Classifier: Programming Language :: Python :: 3.10
38
14
  Classifier: Programming Language :: Python :: 3.11
39
15
  Classifier: Programming Language :: Python :: 3.12
40
16
  Classifier: Programming Language :: Python :: 3.13
41
- Classifier: Intended Audience :: Science/Research
42
- Classifier: Topic :: Scientific/Engineering
43
- Classifier: Operating System :: OS Independent
44
- Requires-Python: >=3.10
17
+ Classifier: Programming Language :: Python :: 3.14
18
+ Requires-Python: <3.15,>=3.10
45
19
  Requires-Dist: numpy>=2.0.0
46
20
  Requires-Dist: xarray>=2024.1.0
47
21
  Requires-Dist: dask>=2024.7.1
48
- Provides-Extra: docs
49
- Requires-Dist: sphinx>=5.3; extra == "docs"
50
- Requires-Dist: sphinx_autodoc_typehints>=1.19; extra == "docs"
51
- Requires-Dist: sphinx-book-theme>=0.4.0rc1; extra == "docs"
52
- Requires-Dist: sphinx-argparse-cli>=1.10.0; extra == "docs"
53
- Requires-Dist: sphinx-inline-tabs; extra == "docs"
54
- Requires-Dist: pickleshare; extra == "docs"
55
- Requires-Dist: ipython; extra == "docs"
56
- Requires-Dist: matplotlib; extra == "docs"
57
- Requires-Dist: pint; extra == "docs"
58
- Requires-Dist: pint-xarray; extra == "docs"
59
- Requires-Dist: myst-parser; extra == "docs"
60
- Provides-Extra: test
61
- Requires-Dist: pytest>=3.3.0; extra == "test"
62
- Requires-Dist: dask[complete]; extra == "test"
63
- Requires-Dist: matplotlib; extra == "test"
64
- Provides-Extra: lint
65
- Requires-Dist: ruff; extra == "lint"
66
- Provides-Extra: build
67
- Requires-Dist: cibuildwheel[uv]; extra == "build"
68
22
  Provides-Extra: jupyter
69
23
  Requires-Dist: dask[diagnostics]; extra == "jupyter"
70
24
  Requires-Dist: ipykernel>=6.29.5; extra == "jupyter"
@@ -90,23 +44,19 @@ Part of [BEAM](#broad-epoch-analysis-modules-beam) (Broad EPOCH Analysis Modules
90
44
 
91
45
  ## Installation
92
46
 
47
+ > [!IMPORTANT]
48
+ > To install this package make sure you are using one of the Python versions listed above.
49
+
93
50
  Install from PyPI with:
94
51
 
95
52
  ```bash
96
53
  pip install sdf-xarray
97
54
  ```
98
55
 
99
- > [!NOTE]
100
- > For use within jupyter notebooks, run this additional command after installation:
101
- >
102
- > ```bash
103
- > pip install "sdf-xarray[jupyter]"
104
- > ```
105
-
106
- or from a local checkout:
56
+ or download this code locally:
107
57
 
108
58
  ```bash
109
- git clone https://github.com/epochpic/sdf-xarray.git
59
+ git clone --recursive https://github.com/epochpic/sdf-xarray.git
110
60
  cd sdf-xarray
111
61
  pip install .
112
62
  ```
@@ -115,6 +65,9 @@ We recommend switching to [uv](https://docs.astral.sh/uv/) to manage packages.
115
65
 
116
66
  ## Usage
117
67
 
68
+ Below are some simple examples to get you started. Please read the full
69
+ documentation here <https://sdf-xarray.readthedocs.io>.
70
+
118
71
  ### Single file loading
119
72
 
120
73
  ```python
@@ -135,15 +88,22 @@ print(df["Electric_Field_Ex"])
135
88
 
136
89
  ### Multi-file loading
137
90
 
138
- To open a whole simulation at once, pass `preprocess=sdf_xarray.SDFPreprocess()`
139
- to `xarray.open_mfdataset`:
91
+ You can open all the SDF files for a given simulation by calling the `open_mfdataset`
92
+ function from `sdf_xarray`. This will additionally add a time dimension using the `"time"`
93
+ value stored in each files attributes.
94
+
95
+ > [!IMPORTANT]
96
+ > If your simulation has multiple `output` blocks so that not all variables are
97
+ > output at every time step, then at the timesteps where those variables are not
98
+ > present they will have have a value of nan. To clean your dataset by removing
99
+ > these nan values we suggest using the `xarray.DataArray.dropna` function or
100
+ > loading sparse data along separate time dimensions using `separate_times=True`.
140
101
 
141
102
  ```python
142
- import xarray as xr
143
- from sdf_xarray import SDFPreprocess
103
+ from sdf_xarray import open_mfdataset
144
104
 
145
- with xr.open_mfdataset("*.sdf", preprocess=SDFPreprocess()) as ds:
146
- print(ds)
105
+ ds = open_mfdataset("*.sdf")
106
+ print(ds)
147
107
 
148
108
  # Dimensions:
149
109
  # time: 301, X_Grid_mid: 128, ...
@@ -153,15 +113,6 @@ with xr.open_mfdataset("*.sdf", preprocess=SDFPreprocess()) as ds:
153
113
  # Attributes: (22) ...
154
114
  ```
155
115
 
156
- `SDFPreprocess` checks that all the files are from the same simulation, as
157
- ensures there's a `time` dimension so the files are correctly concatenated.
158
-
159
- If your simulation has multiple `output` blocks so that not all variables are
160
- output at every time step, then those variables will have `NaN` values at the
161
- corresponding time points.
162
-
163
- For more in depth documentation please visit: <https://sdf-xarray.readthedocs.io/>
164
-
165
116
  ## Citing
166
117
 
167
118
  If sdf-xarray contributes to a project that leads to publication, please acknowledge this by citing sdf-xarray. This can be done by clicking the "cite this repository" button located near the top right of this page.
@@ -6,19 +6,21 @@ include/SDFC_14.4.7/sdf_list_type.h,sha256=Quu8v0-SEsQuJpGtEZnm09tAyXqWNitx0sXl5
6
6
  include/SDFC_14.4.7/sdf_vector_type.h,sha256=dbKjhzRRsvhzrnTwVjtVlvnuisEnRMKY-vvdm94ok_Q,1595
7
7
  include/SDFC_14.4.7/stack_allocator.h,sha256=L7U9vmGiVSw3VQLIv9EzTaVq7JbFxs9aNonKStTkUSg,1335
8
8
  include/SDFC_14.4.7/uthash.h,sha256=rIyy_-ylY6S_7WaZCCC3VtvXaC9q37rFyA0f1U9xc4w,63030
9
- lib/SDFC_14.4.7/sdfc.lib,sha256=No_BVr7T1qROlbEiwg-jQL4JARLE-tW-C0uPc9wX-_A,350410
9
+ lib/SDFC_14.4.7/sdfc.lib,sha256=FzSUfDKmu1bg4FtlzubnyO4SyrKmDTwNBQJVss7Gu5Y,350158
10
10
  lib/SDFC_14.4.7/SDFCConfig.cmake,sha256=IOA1eusC-KvUK4LNTEiOAmEdaPH1ZvNvbYPgiG1oZio,802
11
11
  lib/SDFC_14.4.7/SDFCConfigVersion.cmake,sha256=pN7Qqyf04s3izw7PYQ0XK6imvmhaVegSdR_nEl3Ok_o,2830
12
12
  lib/SDFC_14.4.7/SDFCTargets-release.cmake,sha256=G4zdx5PyjePigeD_a6rmZAxbk7L8Nf0klUnV78Lm2fI,828
13
13
  lib/SDFC_14.4.7/SDFCTargets.cmake,sha256=OVt1Gm8n7Ew4fiTmA9yHoef3vIIGwsXUZfqeG9p9Bys,4152
14
- sdf_xarray/__init__.py,sha256=MgATg9E6Jyo1fW3_li3lIEm-yxTHgJbNfZf5qeAvuyY,18073
15
- sdf_xarray/_version.py,sha256=XjF4m8_cMkXrbRU7Rim3aBSkgXL9DCvwDPeFFTVyWd8,532
14
+ sdf_xarray/__init__.py,sha256=OAihj1CQVqoVvf-eEmwIttedjyplcoiTOfXpDafNqT8,24229
15
+ sdf_xarray/_version.py,sha256=q0PPvfDga48CLepNF0cz12bk-1nMuF8tgn-8a0yG_w4,746
16
16
  sdf_xarray/csdf.pxd,sha256=ADPjAuHsodAvdOz96Z_XlFF7VL3KmVaXcTifWDP3rK0,4205
17
- sdf_xarray/plotting.py,sha256=ze1paC1Uw42WOWspdqkyNsUviDt-Z7AwQlPyO7JB90o,7007
18
- sdf_xarray/sdf_interface.cp310-win_amd64.pyd,sha256=XnLyq22rQxnDiE8Qrw1FeCEHxS6gOoL7U_rpwScnwjU,351232
19
- sdf_xarray/sdf_interface.pyx,sha256=PFC6upg14OZBqiGInLgBoxztIIKBk-HOh3WC9Ro4YUw,11975
20
- sdf_xarray-0.2.5.dist-info/METADATA,sha256=ybk80dZjGpoDSonOcmjyIO5HdCP4zUJRbqWjuxBn68c,9129
21
- sdf_xarray-0.2.5.dist-info/WHEEL,sha256=lOk88j15txMfymgGB6f9CZllTiUmb3GEt--Zg_z5WJg,106
22
- sdf_xarray-0.2.5.dist-info/entry_points.txt,sha256=gP7BIQpXNg6vIf7S7p-Rw_EJZTC1X50BsVTkK7dA7g0,57
23
- sdf_xarray-0.2.5.dist-info/licenses/LICENCE,sha256=aHWuyELjtzIL1jTXFHTbI3tr9vyVyhnw3I9_QYPdEX8,1515
24
- sdf_xarray-0.2.5.dist-info/RECORD,,
17
+ sdf_xarray/dataset_accessor.py,sha256=IhROgmqgdD5SvuMbpZz-G6WoTre06-SOYgXVQSb7VAY,2450
18
+ sdf_xarray/download.py,sha256=yT_z5q8KuGKe3yha_t7JW39IZjzdN2wczqRl8FIhgRA,3123
19
+ sdf_xarray/plotting.py,sha256=FNaptcnHzvwY462JyfXSy9tMKNtuerKNn9A0sD8vbe4,9550
20
+ sdf_xarray/sdf_interface.cp310-win_amd64.pyd,sha256=f1W3SDaA5jGniDeOY_-QkThgc_M64npCw16VNcQMN7s,340480
21
+ sdf_xarray/sdf_interface.pyx,sha256=j0BpaltExUI_T-DMQvWaavKSrq9vqHkrmkJMJfCwDsA,12096
22
+ sdf_xarray-0.4.0.dist-info/METADATA,sha256=9EoUJ22i_txDaZAX6ZIgnaQdogVv88M-L2VFIZDHofQ,6921
23
+ sdf_xarray-0.4.0.dist-info/WHEEL,sha256=hrGeChGtn46HBGmzasO9QQDSLelRN-tUarBSv4gFcsI,106
24
+ sdf_xarray-0.4.0.dist-info/entry_points.txt,sha256=gP7BIQpXNg6vIf7S7p-Rw_EJZTC1X50BsVTkK7dA7g0,57
25
+ sdf_xarray-0.4.0.dist-info/licenses/LICENCE,sha256=aHWuyELjtzIL1jTXFHTbI3tr9vyVyhnw3I9_QYPdEX8,1515
26
+ sdf_xarray-0.4.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: scikit-build-core 0.11.2
2
+ Generator: scikit-build-core 0.11.6
3
3
  Root-Is-Purelib: false
4
4
  Tag: cp310-cp310-win_amd64
5
5