sdf-xarray 0.2.1__cp313-cp313-win_amd64.whl → 0.3.2__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lib/SDFC_14.4.7/sdfc.lib CHANGED
Binary file
sdf_xarray/__init__.py CHANGED
@@ -1,13 +1,17 @@
1
+ import contextlib
1
2
  import os
2
3
  import re
3
4
  from collections import Counter, defaultdict
4
5
  from collections.abc import Callable, Iterable
6
+ from importlib.metadata import version
5
7
  from itertools import product
8
+ from os import PathLike as os_PathLike
6
9
  from pathlib import Path
7
10
  from typing import ClassVar
8
11
 
9
12
  import numpy as np
10
13
  import xarray as xr
14
+ from packaging.version import Version
11
15
  from xarray.backends import AbstractDataStore, BackendArray, BackendEntrypoint
12
16
  from xarray.backends.file_manager import CachingFileManager
13
17
  from xarray.backends.locks import ensure_lock
@@ -15,8 +19,24 @@ from xarray.core import indexing
15
19
  from xarray.core.utils import close_on_error, try_read_magic_number_from_path
16
20
  from xarray.core.variable import Variable
17
21
 
22
+ # NOTE: Do not delete these lines, otherwise the "epoch" dataset and dataarray
23
+ # accessors will not be imported when the user imports sdf_xarray
24
+ import sdf_xarray.dataset_accessor
25
+ import sdf_xarray.plotting # noqa: F401
26
+
27
+ # NOTE: This attempts to initialise with the "pint" accessor if the user
28
+ # has installed the package
29
+ with contextlib.suppress(ImportError):
30
+ import pint_xarray # noqa: F401
31
+
18
32
  from .sdf_interface import Constant, SDFFile # type: ignore # noqa: PGH003
19
33
 
34
+ # TODO Remove this once the new kwarg options are fully implemented
35
+ if Version(version("xarray")) >= Version("2025.8.0"):
36
+ xr.set_options(use_new_combine_kwarg_defaults=True)
37
+
38
+ PathLike = str | os_PathLike
39
+
20
40
 
21
41
  def _rename_with_underscore(name: str) -> str:
22
42
  """A lot of the variable names have spaces, forward slashes and dashes in them, which
@@ -47,14 +67,69 @@ def _process_latex_name(variable_name: str) -> str:
47
67
  return variable_name
48
68
 
49
69
 
50
- def combine_datasets(path_glob: Iterable | str, **kwargs) -> xr.Dataset:
51
- """Combine all datasets using a single time dimension"""
70
+ def _resolve_glob(path_glob: PathLike | Iterable[PathLike]):
71
+ """
72
+ Normalise input path_glob into a sorted list of absolute, resolved Path objects.
73
+ """
74
+
75
+ try:
76
+ p = Path(path_glob)
77
+ paths = list(p.parent.glob(p.name)) if p.name == "*.sdf" else list(p)
78
+ except TypeError:
79
+ paths = list({Path(p) for p in path_glob})
80
+
81
+ paths = sorted(p.resolve() for p in paths)
82
+ if not paths:
83
+ raise FileNotFoundError(f"No files matched pattern or input: {path_glob!r}")
84
+ return paths
85
+
86
+
87
+ def purge_unselected_data_vars(ds: xr.Dataset, data_vars: list[str]) -> xr.Dataset:
88
+ """
89
+ If the user has exclusively requested only certain variables be
90
+ loaded in then we purge all other variables and dimensions
91
+ """
92
+ existing_data_vars = set(ds.data_vars.keys())
93
+ vars_to_keep = set(data_vars) & existing_data_vars
94
+ vars_to_drop = existing_data_vars - vars_to_keep
95
+ ds = ds.drop_vars(vars_to_drop)
96
+
97
+ existing_dims = set(ds.sizes)
98
+ dims_to_keep = set()
99
+ for var in vars_to_keep:
100
+ dims_to_keep.update(ds[var].coords._names)
101
+ dims_to_keep.update(ds[var].dims)
102
+
103
+ coords_to_drop = existing_dims - dims_to_keep
104
+ return ds.drop_dims(coords_to_drop)
105
+
106
+
107
+ def combine_datasets(
108
+ path_glob: Iterable | str, data_vars: list[str], **kwargs
109
+ ) -> xr.Dataset:
110
+ """
111
+ Combine all datasets using a single time dimension, optionally extract
112
+ data from only the listed data_vars
113
+ """
114
+
115
+ if data_vars is not None:
116
+ return xr.open_mfdataset(
117
+ path_glob,
118
+ join="outer",
119
+ coords="different",
120
+ compat="no_conflicts",
121
+ combine="nested",
122
+ concat_dim="time",
123
+ preprocess=SDFPreprocess(data_vars=data_vars),
124
+ **kwargs,
125
+ )
52
126
 
53
127
  return xr.open_mfdataset(
54
128
  path_glob,
55
- data_vars="minimal",
56
- coords="minimal",
57
- compat="override",
129
+ data_vars="all",
130
+ coords="different",
131
+ compat="no_conflicts",
132
+ join="outer",
58
133
  preprocess=SDFPreprocess(),
59
134
  **kwargs,
60
135
  )
@@ -65,6 +140,8 @@ def open_mfdataset(
65
140
  *,
66
141
  separate_times: bool = False,
67
142
  keep_particles: bool = False,
143
+ probe_names: list[str] | None = None,
144
+ data_vars: list[str] | None = None,
68
145
  ) -> xr.Dataset:
69
146
  """Open a set of EPOCH SDF files as one `xarray.Dataset`
70
147
 
@@ -94,20 +171,36 @@ def open_mfdataset(
94
171
  different output frequencies
95
172
  keep_particles :
96
173
  If ``True``, also load particle data (this may use a lot of memory!)
174
+ probe_names :
175
+ List of EPOCH probe names
176
+ data_vars :
177
+ List of data vars to load in (If not specified loads in all variables)
97
178
  """
98
179
 
99
- # TODO: This is not very robust, look at how xarray.open_mfdataset does it
100
- if isinstance(path_glob, str):
101
- path_glob = Path().glob(path_glob)
102
-
103
- # Coerce to list because we might need to use the sequence multiple times
104
- path_glob = sorted(list(path_glob)) # noqa: C414
180
+ path_glob = _resolve_glob(path_glob)
105
181
 
106
182
  if not separate_times:
107
- return combine_datasets(path_glob, keep_particles=keep_particles)
183
+ return combine_datasets(
184
+ path_glob,
185
+ data_vars=data_vars,
186
+ keep_particles=keep_particles,
187
+ probe_names=probe_names,
188
+ )
189
+
190
+ _, var_times_map = make_time_dims(path_glob)
191
+
192
+ all_dfs = []
193
+ for f in path_glob:
194
+ ds = xr.open_dataset(f, keep_particles=keep_particles, probe_names=probe_names)
195
+
196
+ # If the data_vars are specified then only load them in and disregard the rest.
197
+ # If there are no remaining data variables then skip adding the dataset to list
198
+ if data_vars is not None:
199
+ ds = purge_unselected_data_vars(ds, data_vars)
200
+ if not ds.data_vars:
201
+ continue
108
202
 
109
- time_dims, var_times_map = make_time_dims(path_glob)
110
- all_dfs = [xr.open_dataset(f, keep_particles=keep_particles) for f in path_glob]
203
+ all_dfs.append(ds)
111
204
 
112
205
  for df in all_dfs:
113
206
  for da in df:
@@ -124,7 +217,11 @@ def open_mfdataset(
124
217
  )
125
218
 
126
219
  return xr.combine_by_coords(
127
- all_dfs, data_vars="minimal", combine_attrs="drop_conflicts"
220
+ all_dfs,
221
+ coords="different",
222
+ combine_attrs="drop_conflicts",
223
+ join="outer",
224
+ compat="no_conflicts",
128
225
  )
129
226
 
130
227
 
@@ -207,14 +304,23 @@ class SDFDataStore(AbstractDataStore):
207
304
  "drop_variables",
208
305
  "keep_particles",
209
306
  "lock",
307
+ "probe_names",
210
308
  )
211
309
 
212
- def __init__(self, manager, drop_variables=None, keep_particles=False, lock=None):
310
+ def __init__(
311
+ self,
312
+ manager,
313
+ drop_variables=None,
314
+ keep_particles=False,
315
+ lock=None,
316
+ probe_names=None,
317
+ ):
213
318
  self._manager = manager
214
319
  self._filename = self.ds.filename
215
320
  self.drop_variables = drop_variables
216
321
  self.keep_particles = keep_particles
217
322
  self.lock = ensure_lock(lock)
323
+ self.probe_names = probe_names
218
324
 
219
325
  @classmethod
220
326
  def open(
@@ -223,6 +329,7 @@ class SDFDataStore(AbstractDataStore):
223
329
  lock=None,
224
330
  drop_variables=None,
225
331
  keep_particles=False,
332
+ probe_names=None,
226
333
  ):
227
334
  if isinstance(filename, os.PathLike):
228
335
  filename = os.fspath(filename)
@@ -233,6 +340,7 @@ class SDFDataStore(AbstractDataStore):
233
340
  lock=lock,
234
341
  drop_variables=drop_variables,
235
342
  keep_particles=keep_particles,
343
+ probe_names=probe_names,
236
344
  )
237
345
 
238
346
  def _acquire(self, needs_lock=True):
@@ -249,9 +357,18 @@ class SDFDataStore(AbstractDataStore):
249
357
  def load(self): # noqa: PLR0912, PLR0915
250
358
  # Drop any requested variables
251
359
  if self.drop_variables:
360
+ # Build a mapping from underscored names to real variable names
361
+ name_map = {_rename_with_underscore(var): var for var in self.ds.variables}
362
+
252
363
  for variable in self.drop_variables:
253
- # TODO: nicer error handling
254
- self.ds.variables.pop(variable)
364
+ key = _rename_with_underscore(variable)
365
+ original_name = name_map.get(key)
366
+
367
+ if original_name is None:
368
+ raise KeyError(
369
+ f"Variable '{variable}' not found (interpreted as '{key}')."
370
+ )
371
+ self.ds.variables.pop(original_name)
255
372
 
256
373
  # These two dicts are global metadata about the run or file
257
374
  attrs = {**self.ds.header, **self.ds.run_info}
@@ -306,6 +423,8 @@ class SDFDataStore(AbstractDataStore):
306
423
  # Had some problems with these variables, so just ignore them for now
307
424
  if "cpu" in key.lower():
308
425
  continue
426
+ if "boundary" in key.lower():
427
+ continue
309
428
  if "output file" in key.lower():
310
429
  continue
311
430
 
@@ -332,7 +451,28 @@ class SDFDataStore(AbstractDataStore):
332
451
 
333
452
  if value.is_point_data:
334
453
  # Point (particle) variables are 1D
335
- var_coords = (f"ID_{_process_grid_name(key, _grid_species_name)}",)
454
+
455
+ # Particle data does not maintain a fixed dimension size
456
+ # throughout the simulation. An example of a particle name comes
457
+ # in the form of `Particles/Px/Ion_H` which is then modified
458
+ # using `_process_grid_name()` into `Ion_H`. This is fine as the
459
+ # other components of the momentum (`Py`, `Pz`) will have the same
460
+ # size as they represent the same bunch of particles.
461
+
462
+ # Probes however have names in the form of `Electron_Front_Probe/Px`
463
+ # which are changed to just `Px`; this is fine when there is only one
464
+ # probe in the system but when there are multiple they will have
465
+ # conflicting sizes so we can't keep the names as simply `Px` so we
466
+ # instead set their dimension as the full name `Electron_Front_Probe_Px`.
467
+ is_probe_name_match = self.probe_names is not None and any(
468
+ name in key for name in self.probe_names
469
+ )
470
+ name_processor = (
471
+ _rename_with_underscore
472
+ if is_probe_name_match
473
+ else _grid_species_name
474
+ )
475
+ var_coords = (f"ID_{_process_grid_name(key, name_processor)}",)
336
476
  else:
337
477
  # These are DataArrays
338
478
 
@@ -399,6 +539,7 @@ class SDFEntrypoint(BackendEntrypoint):
399
539
  *,
400
540
  drop_variables=None,
401
541
  keep_particles=False,
542
+ probe_names=None,
402
543
  ):
403
544
  if isinstance(filename_or_obj, Path):
404
545
  # sdf library takes a filename only
@@ -409,6 +550,7 @@ class SDFEntrypoint(BackendEntrypoint):
409
550
  filename_or_obj,
410
551
  drop_variables=drop_variables,
411
552
  keep_particles=keep_particles,
553
+ probe_names=probe_names,
412
554
  )
413
555
  with close_on_error(store):
414
556
  return store.load()
@@ -417,6 +559,7 @@ class SDFEntrypoint(BackendEntrypoint):
417
559
  "filename_or_obj",
418
560
  "drop_variables",
419
561
  "keep_particles",
562
+ "probe_names",
420
563
  ]
421
564
 
422
565
  def guess_can_open(self, filename_or_obj):
@@ -428,14 +571,47 @@ class SDFEntrypoint(BackendEntrypoint):
428
571
 
429
572
  description = "Use .sdf files in Xarray"
430
573
 
431
- url = "https://epochpic.github.io/documentation/visualising_output/python.html"
574
+ url = "https://epochpic.github.io/documentation/visualising_output/python_beam.html"
432
575
 
433
576
 
434
577
  class SDFPreprocess:
435
- """Preprocess SDF files for xarray ensuring matching job ids and sets time dimension"""
578
+ """Preprocess SDF files for xarray ensuring matching job ids and sets
579
+ time dimension.
580
+
581
+ This class is used as a 'preprocess' function within ``xr.open_mfdataset``. It
582
+ performs three main duties on each individual file's Dataset:
583
+
584
+ 1. Checks for a **matching job ID** across all files to ensure dataset consistency.
585
+ 2. **Filters** the Dataset to keep only the variables specified in `data_vars`
586
+ and their required coordinates.
587
+ 3. **Expands dimensions** to include a single 'time' coordinate, preparing the
588
+ Dataset for concatenation.
589
+
590
+ EPOCH can output variables at different intervals, so some SDF files
591
+ may not contain the requested variable. We combine this data into one
592
+ dataset by concatenating across the time dimension.
436
593
 
437
- def __init__(self):
594
+ The combination is performed using ``join="outer"`` (in the calling ``open_mfdataset`` function),
595
+ meaning that the final combined dataset will contain the variable across the
596
+ entire time span, with NaNs filling the time steps where the variable was absent in
597
+ the individual file.
598
+
599
+ With large SDF files, this filtering method will save on memory consumption when
600
+ compared to loading all variables from all files before concatenation.
601
+
602
+ Parameters
603
+ ----------
604
+ data_vars :
605
+ A list of data variables to load in (If not specified loads
606
+ in all variables)
607
+ """
608
+
609
+ def __init__(
610
+ self,
611
+ data_vars: list[str] | None = None,
612
+ ):
438
613
  self.job_id: int | None = None
614
+ self.data_vars = data_vars
439
615
 
440
616
  def __call__(self, ds: xr.Dataset) -> xr.Dataset:
441
617
  if self.job_id is None:
@@ -446,17 +622,23 @@ class SDFPreprocess:
446
622
  f"Mismatching job ids (got {ds.attrs['jobid1']}, expected {self.job_id})"
447
623
  )
448
624
 
449
- ds = ds.expand_dims(time=[ds.attrs["time"]])
625
+ # If the user has exclusively requested only certain variables be
626
+ # loaded in then we purge all other variables and coordinates
627
+ if self.data_vars:
628
+ ds = purge_unselected_data_vars(ds, self.data_vars)
629
+
630
+ time_val = ds.attrs.get("time", np.nan)
631
+ ds = ds.expand_dims(time=[time_val])
450
632
  ds = ds.assign_coords(
451
633
  time=(
452
634
  "time",
453
- [ds.attrs["time"]],
635
+ [time_val],
454
636
  {"units": "s", "long_name": "Time", "full_name": "time"},
455
637
  )
456
638
  )
457
639
  # Particles' spartial coordinates also evolve in time
458
640
  for coord, value in ds.coords.items():
459
641
  if value.attrs.get("point_data", False):
460
- ds.coords[coord] = value.expand_dims(time=[ds.attrs["time"]])
642
+ ds.coords[coord] = value.expand_dims(time=[time_val])
461
643
 
462
644
  return ds
sdf_xarray/_version.py CHANGED
@@ -1,7 +1,14 @@
1
1
  # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
3
 
4
- __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
5
12
 
6
13
  TYPE_CHECKING = False
7
14
  if TYPE_CHECKING:
@@ -9,13 +16,19 @@ if TYPE_CHECKING:
9
16
  from typing import Union
10
17
 
11
18
  VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
12
20
  else:
13
21
  VERSION_TUPLE = object
22
+ COMMIT_ID = object
14
23
 
15
24
  version: str
16
25
  __version__: str
17
26
  __version_tuple__: VERSION_TUPLE
18
27
  version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
19
30
 
20
- __version__ = version = '0.2.1'
21
- __version_tuple__ = version_tuple = (0, 2, 1)
31
+ __version__ = version = '0.3.2'
32
+ __version_tuple__ = version_tuple = (0, 3, 2)
33
+
34
+ __commit_id__ = commit_id = 'g331520e50'
@@ -0,0 +1,73 @@
1
+ from typing import Union
2
+
3
+ import xarray as xr
4
+
5
+
6
+ @xr.register_dataset_accessor("epoch")
7
+ class EpochAccessor:
8
+ def __init__(self, xarray_obj: xr.Dataset):
9
+ # The xarray object is the Dataset, which we store as self._ds
10
+ self._ds = xarray_obj
11
+
12
+ def rescale_coords(
13
+ self,
14
+ multiplier: float,
15
+ unit_label: str,
16
+ coord_names: Union[str, list[str]],
17
+ ) -> xr.Dataset:
18
+ """
19
+ Rescales specified X and Y coordinates in the Dataset by a given multiplier
20
+ and updates the unit label attribute.
21
+
22
+ Parameters
23
+ ----------
24
+ multiplier : float
25
+ The factor by which to multiply the coordinate values (e.g., 1e6 for meters to microns).
26
+ unit_label : str
27
+ The new unit label for the coordinates (e.g., "µm").
28
+ coord_names : str or list of str
29
+ The name(s) of the coordinate variable(s) to rescale.
30
+ If a string, only that coordinate is rescaled.
31
+ If a list, all listed coordinates are rescaled.
32
+
33
+ Returns
34
+ -------
35
+ xr.Dataset
36
+ A new Dataset with the updated and rescaled coordinates.
37
+
38
+ Examples
39
+ --------
40
+ # Convert X, Y, and Z from meters to microns
41
+ >>> ds_in_microns = ds.epoch.rescale_coords(1e6, "µm", coord_names=["X_Grid", "Y_Grid", "Z_Grid"])
42
+
43
+ # Convert only X to millimeters
44
+ >>> ds_in_mm = ds.epoch.rescale_coords(1000, "mm", coord_names="X_Grid")
45
+ """
46
+
47
+ ds = self._ds
48
+ new_coords = {}
49
+
50
+ if isinstance(coord_names, str):
51
+ # Convert single string to a list
52
+ coords_to_process = [coord_names]
53
+ elif isinstance(coord_names, list):
54
+ # Use the provided list
55
+ coords_to_process = coord_names
56
+ else:
57
+ coords_to_process = list(coord_names)
58
+
59
+ for coord_name in coords_to_process:
60
+ if coord_name not in ds.coords:
61
+ raise ValueError(
62
+ f"Coordinate '{coord_name}' not found in the Dataset. Cannot rescale."
63
+ )
64
+
65
+ coord_original = ds[coord_name]
66
+
67
+ coord_rescaled = coord_original * multiplier
68
+ coord_rescaled.attrs = coord_original.attrs.copy()
69
+ coord_rescaled.attrs["units"] = unit_label
70
+
71
+ new_coords[coord_name] = coord_rescaled
72
+
73
+ return ds.assign_coords(new_coords)
sdf_xarray/plotting.py CHANGED
@@ -10,60 +10,82 @@ if TYPE_CHECKING:
10
10
  from matplotlib.animation import FuncAnimation
11
11
 
12
12
 
13
- def get_frame_title(data: xr.DataArray, frame: int, display_sdf_name: bool) -> str:
13
+ def get_frame_title(
14
+ data: xr.DataArray,
15
+ frame: int,
16
+ display_sdf_name: bool = False,
17
+ title_custom: str | None = None,
18
+ ) -> str:
14
19
  """Generate the title for a frame"""
15
- sdf_name = f", {frame:04d}.sdf" if display_sdf_name else ""
20
+ # Adds custom text to the start of the title, if specified
21
+ title_custom = "" if title_custom is None else f"{title_custom}, "
22
+ # Adds the time and associated units to the title
16
23
  time = data["time"][frame].to_numpy()
17
- return f"t = {time:.2e}s{sdf_name}"
18
24
 
25
+ time_units = data["time"].attrs.get("units", False)
26
+ time_units_formatted = f" [{time_units}]" if time_units else ""
27
+ title_time = f"time = {time:.2e}{time_units_formatted}"
19
28
 
20
- def calculate_window_velocity_and_edges(
21
- data: xr.DataArray, x_axis_coord: str
22
- ) -> tuple[float, tuple[float, float], np.ndarray]:
23
- """Calculate the moving window's velocity and initial edges.
29
+ # Adds sdf name to the title, if specifed
30
+ title_sdf = f", {frame:04d}.sdf" if display_sdf_name else ""
31
+ return f"{title_custom}{title_time}{title_sdf}"
24
32
 
25
- 1. Finds a lineout of the target atribute in the x coordinate of the first frame
26
- 2. Removes the NaN values to isolate the simulation window
27
- 3. Produces the index size of the window, indexed at zero
28
- 4. Uses distance moved and final time of the simulation to calculate velocity and initial xlims
29
- """
30
- time_since_start = data["time"].values - data["time"].values[0]
31
- initial_window_edge = (0, 0)
32
- target_lineout = data.values[0, :, 0]
33
- target_lineout_window = target_lineout[~np.isnan(target_lineout)]
34
- x_grid = data[x_axis_coord].values
35
- window_size_index = target_lineout_window.size - 1
36
33
 
37
- velocity_window = (x_grid[-1] - x_grid[window_size_index]) / time_since_start[-1]
38
- initial_window_edge = (x_grid[0], x_grid[window_size_index])
39
- return velocity_window, initial_window_edge, time_since_start
34
+ def calculate_window_boundaries(
35
+ data: xr.DataArray, xlim: tuple[float, float] | False = False
36
+ ) -> np.ndarray:
37
+ """Calculate the bounderies a moving window frame. If the user specifies xlim, this will
38
+ be used as the initial bounderies and the window will move along acordingly.
39
+ """
40
+ x_grid = data["X_Grid_mid"].values
41
+ x_half_cell = (x_grid[1] - x_grid[0]) / 2
42
+ N_frames = data["time"].size
40
43
 
44
+ # Find the window bounderies by finding the first and last non-NaN values in the 0th lineout
45
+ # along the x-axis.
46
+ window_boundaries = np.zeros((N_frames, 2))
47
+ for i in range(N_frames):
48
+ # Check if data is 1D
49
+ if data.ndim == 2:
50
+ target_lineout = data[i].values
51
+ # Check if data is 2D
52
+ if data.ndim == 3:
53
+ target_lineout = data[i, :, 0].values
54
+ x_grid_non_nan = x_grid[~np.isnan(target_lineout)]
55
+ window_boundaries[i, 0] = x_grid_non_nan[0] - x_half_cell
56
+ window_boundaries[i, 1] = x_grid_non_nan[-1] + x_half_cell
41
57
 
42
- def compute_global_limits(data: xr.DataArray) -> tuple[float, float]:
43
- """Remove all NaN values from the target data to calculate the 1st and 99th percentiles,
44
- excluding extreme outliers.
45
- """
46
- values_no_nan = data.values[~np.isnan(data.values)]
47
- global_min = np.percentile(values_no_nan, 1)
48
- global_max = np.percentile(values_no_nan, 99)
49
- return global_min, global_max
58
+ # User's choice for initial window edge supercides the one calculated
59
+ if xlim:
60
+ window_boundaries = window_boundaries + xlim - window_boundaries[0]
61
+ return window_boundaries
50
62
 
51
63
 
52
- def is_1d(data: xr.DataArray) -> bool:
53
- """Check if the data is 1D."""
54
- return len(data.shape) == 2
64
+ def compute_global_limits(
65
+ data: xr.DataArray,
66
+ min_percentile: float = 0,
67
+ max_percentile: float = 100,
68
+ ) -> tuple[float, float]:
69
+ """Remove all NaN values from the target data to calculate the global minimum and maximum of the data.
70
+ User defined percentiles can remove extreme outliers.
71
+ """
55
72
 
73
+ # Removes NaN values, needed for moving windows
74
+ values_no_nan = data.values[~np.isnan(data.values)]
56
75
 
57
- def is_2d(data: xr.DataArray) -> bool:
58
- """Check if the data is 2D or 3D."""
59
- return len(data.shape) == 3
76
+ # Finds the global minimum and maximum of the plot, based on the percentile of the data
77
+ global_min = np.percentile(values_no_nan, min_percentile)
78
+ global_max = np.percentile(values_no_nan, max_percentile)
79
+ return global_min, global_max
60
80
 
61
81
 
62
- def generate_animation(
82
+ def animate(
63
83
  data: xr.DataArray,
84
+ fps: float = 10,
85
+ min_percentile: float = 0,
86
+ max_percentile: float = 100,
87
+ title: str | None = None,
64
88
  display_sdf_name: bool = False,
65
- fps: int = 10,
66
- move_window: bool = False,
67
89
  ax: plt.Axes | None = None,
68
90
  **kwargs,
69
91
  ) -> FuncAnimation:
@@ -71,17 +93,18 @@ def generate_animation(
71
93
 
72
94
  Parameters
73
95
  ---------
74
- dataset
75
- The dataset containing the simulation data
76
- target_attribute
77
- The attribute to plot for each timestep
78
- display_sdf_name
79
- Display the sdf file name in the animation title
96
+ data
97
+ The dataarray containing the target data
80
98
  fps
81
99
  Frames per second for the animation (default: 10)
82
- move_window
83
- If the simulation has a moving window, the animation will move along
84
- with it (default: False)
100
+ min_percentile
101
+ Minimum percentile of the data (default: 0)
102
+ max_percentile
103
+ Maximum percentile of the data (default: 100)
104
+ title
105
+ Custom title to add to the plot.
106
+ display_sdf_name
107
+ Display the sdf file name in the animation title
85
108
  ax
86
109
  Matplotlib axes on which to plot.
87
110
  kwargs
@@ -89,18 +112,28 @@ def generate_animation(
89
112
 
90
113
  Examples
91
114
  --------
92
- >>> generate_animation(dataset["Derived_Number_Density_Electron"])
115
+ >>> dataset["Derived_Number_Density_Electron"].epoch.animate()
93
116
  """
94
- import matplotlib.pyplot as plt
95
- from matplotlib.animation import FuncAnimation
117
+ import matplotlib.pyplot as plt # noqa: PLC0415
118
+ from matplotlib.animation import FuncAnimation # noqa: PLC0415
119
+
120
+ kwargs_original = kwargs.copy()
96
121
 
97
122
  if ax is None:
98
123
  _, ax = plt.subplots()
99
124
 
100
125
  N_frames = data["time"].size
101
- global_min, global_max = compute_global_limits(data)
126
+ global_min, global_max = compute_global_limits(data, min_percentile, max_percentile)
102
127
 
103
- if is_2d(data):
128
+ # Initialise plot and set y-limits for 1D data
129
+ if data.ndim == 2:
130
+ kwargs.setdefault("x", "X_Grid_mid")
131
+ plot = data.isel(time=0).plot(ax=ax, **kwargs)
132
+ ax.set_title(get_frame_title(data, 0, display_sdf_name, title))
133
+ ax.set_ylim(global_min, global_max)
134
+
135
+ # Initilise plot and set colour bar for 2D data
136
+ if data.ndim == 3:
104
137
  kwargs["norm"] = plt.Normalize(vmin=global_min, vmax=global_max)
105
138
  kwargs["add_colorbar"] = False
106
139
  # Set default x and y coordinates for 2D data if not provided
@@ -109,44 +142,32 @@ def generate_animation(
109
142
 
110
143
  # Initialize the plot with the first timestep
111
144
  plot = data.isel(time=0).plot(ax=ax, **kwargs)
112
- ax.set_title(get_frame_title(data, 0, display_sdf_name))
145
+ ax.set_title(get_frame_title(data, 0, display_sdf_name, title))
113
146
 
114
147
  # Add colorbar
115
- long_name = data.attrs.get("long_name")
116
- units = data.attrs.get("units")
117
- plt.colorbar(plot, ax=ax, label=f"{long_name} [${units}$]")
118
-
119
- # Initialise plo and set y-limits for 1D data
120
- if is_1d(data):
121
- plot = data.isel(time=0).plot(ax=ax, **kwargs)
122
- ax.set_title(get_frame_title(data, 0, display_sdf_name))
123
- ax.set_ylim(global_min, global_max)
148
+ if kwargs_original.get("add_colorbar", True):
149
+ long_name = data.attrs.get("long_name")
150
+ units = data.attrs.get("units")
151
+ plt.colorbar(plot, ax=ax, label=f"{long_name} [${units}$]")
124
152
 
153
+ # check if there is a moving window by finding NaNs in the data
154
+ move_window = np.isnan(np.sum(data.values))
125
155
  if move_window:
126
- window_velocity, window_initial_edge, time_since_start = (
127
- calculate_window_velocity_and_edges(data, kwargs["x"])
128
- )
129
-
130
- # User's choice for initial window edge supercides the one calculated
131
- if "xlim" in kwargs:
132
- window_initial_edge = kwargs["xlim"]
156
+ window_boundaries = calculate_window_boundaries(data, kwargs.get("xlim", False))
133
157
 
134
158
  def update(frame):
135
159
  # Set the xlim for each frame in the case of a moving window
136
160
  if move_window:
137
- kwargs["xlim"] = (
138
- window_initial_edge[0] + window_velocity * time_since_start[frame],
139
- window_initial_edge[1] * 0.99
140
- + window_velocity * time_since_start[frame],
141
- )
161
+ kwargs["xlim"] = window_boundaries[frame]
142
162
 
143
163
  # Update plot for the new frame
144
164
  ax.clear()
165
+
145
166
  data.isel(time=frame).plot(ax=ax, **kwargs)
146
- ax.set_title(get_frame_title(data, frame, display_sdf_name))
167
+ ax.set_title(get_frame_title(data, frame, display_sdf_name, title))
147
168
 
148
- # # Update y-limits for 1D data
149
- if is_1d(data):
169
+ # Update y-limits for 1D data
170
+ if data.ndim == 2:
150
171
  ax.set_ylim(global_min, global_max)
151
172
 
152
173
  return FuncAnimation(
@@ -181,4 +202,4 @@ class EpochAccessor:
181
202
  >>> ani = ds["Electric_Field_Ey"].epoch.animate()
182
203
  >>> ani.save("myfile.mp4")
183
204
  """
184
- return generate_animation(self._obj, *args, **kwargs)
205
+ return animate(self._obj, *args, **kwargs)
@@ -39,7 +39,7 @@ cdef class Block:
39
39
 
40
40
  @dataclasses.dataclass
41
41
  cdef class Variable(Block):
42
- units: tuple[str] | None
42
+ units: str | None
43
43
  mult: float | None
44
44
  grid: str | None
45
45
  grid_mid: str | None
@@ -1,42 +1,23 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: sdf-xarray
3
- Version: 0.2.1
3
+ Version: 0.3.2
4
4
  Summary: Provides a backend for xarray to read SDF files as created by the EPOCH plasma PIC code.
5
- Author-Email: Peter Hill <peter.hill@york.ac.uk>, Joel Adams <joel.adams@york.ac.uk>, Shaun Doherty <shaun.doherty@york.ac.uk>
6
- License: Copyright 2024, Peter Hill, Joel Adams, epochpic team
7
-
8
- Redistribution and use in source and binary forms, with or without
9
- modification, are permitted provided that the following conditions are
10
- met:
11
-
12
- 1. Redistributions of source code must retain the above copyright
13
- notice, this list of conditions and the following disclaimer.
14
-
15
- 2. Redistributions in binary form must reproduce the above copyright
16
- notice, this list of conditions and the following disclaimer in the
17
- documentation and/or other materials provided with the distribution.
18
-
19
- 3. Neither the name of the copyright holder nor the names of its
20
- contributors may be used to endorse or promote products derived from
21
- this software without specific prior written permission.
22
-
23
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
24
- “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
25
- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
26
- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
27
- HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
28
- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
29
- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
30
- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
31
- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
32
- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
33
- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
34
-
35
- Requires-Python: >=3.10
5
+ Author-Email: Peter Hill <peter.hill@york.ac.uk>, Joel Adams <joel.adams@york.ac.uk>, Shaun Doherty <shaun.doherty@york.ac.uk>, Chris Herdman <chris.herdman@york.ac.uk>
6
+ License-Expression: BSD-3-Clause
7
+ Classifier: Development Status :: 5 - Production/Stable
8
+ Classifier: Intended Audience :: Science/Research
9
+ Classifier: Topic :: Scientific/Engineering
10
+ Classifier: Operating System :: OS Independent
11
+ Classifier: Programming Language :: Python
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.10
14
+ Classifier: Programming Language :: Python :: 3.11
15
+ Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Programming Language :: Python :: 3.13
17
+ Requires-Python: <3.14,>=3.10
36
18
  Requires-Dist: numpy>=2.0.0
37
19
  Requires-Dist: xarray>=2024.1.0
38
20
  Requires-Dist: dask>=2024.7.1
39
- Requires-Dist: cython>=3.0
40
21
  Provides-Extra: docs
41
22
  Requires-Dist: sphinx>=5.3; extra == "docs"
42
23
  Requires-Dist: sphinx_autodoc_typehints>=1.19; extra == "docs"
@@ -52,6 +33,7 @@ Requires-Dist: myst-parser; extra == "docs"
52
33
  Provides-Extra: test
53
34
  Requires-Dist: pytest>=3.3.0; extra == "test"
54
35
  Requires-Dist: dask[complete]; extra == "test"
36
+ Requires-Dist: matplotlib; extra == "test"
55
37
  Provides-Extra: lint
56
38
  Requires-Dist: ruff; extra == "lint"
57
39
  Provides-Extra: build
@@ -66,14 +48,22 @@ Description-Content-Type: text/markdown
66
48
 
67
49
  # sdf-xarray
68
50
 
69
- ![PyPI](https://img.shields.io/pypi/v/sdf-xarray?color=blue)
51
+ ![Dynamic TOML Badge](https://img.shields.io/badge/dynamic/toml?url=https%3A%2F%2Fraw.githubusercontent.com%2Fepochpic%2Fsdf-xarray%2Frefs%2Fheads%2Fmain%2Fpyproject.toml&query=%24.project.requires-python&label=python&logo=python)
52
+ [![Available on PyPI](https://img.shields.io/pypi/v/sdf-xarray?color=blue&logo=pypi)](https://pypi.org/project/sdf-xarray/)
53
+ [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.15351323.svg)](https://doi.org/10.5281/zenodo.15351323)
70
54
  ![Build/Publish](https://github.com/epochpic/sdf-xarray/actions/workflows/build_publish.yml/badge.svg)
71
55
  ![Tests](https://github.com/epochpic/sdf-xarray/actions/workflows/tests.yml/badge.svg)
56
+ [![Read the Docs](https://img.shields.io/readthedocs/sdf-xarray?logo=readthedocs&link=https%3A%2F%2Fsdf-xarray.readthedocs.io%2F)](https://sdf-xarray.readthedocs.io)
57
+ [![Formatted with black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/python/black)
58
+
72
59
 
73
60
  sdf-xarray provides a backend for [xarray](https://xarray.dev) to read SDF files as created by
74
61
  [EPOCH](https://epochpic.github.io) using the [SDF-C](https://github.com/epochpic/SDF_C) library.
75
62
  Part of [BEAM](#broad-epoch-analysis-modules-beam) (Broad EPOCH Analysis Modules).
76
63
 
64
+ > [!IMPORTANT]
65
+ > To install this package make sure you are using one of the Python versions listed above.
66
+
77
67
  ## Installation
78
68
 
79
69
  Install from PyPI with:
@@ -6,19 +6,20 @@ include/SDFC_14.4.7/sdf_list_type.h,sha256=Quu8v0-SEsQuJpGtEZnm09tAyXqWNitx0sXl5
6
6
  include/SDFC_14.4.7/sdf_vector_type.h,sha256=dbKjhzRRsvhzrnTwVjtVlvnuisEnRMKY-vvdm94ok_Q,1595
7
7
  include/SDFC_14.4.7/stack_allocator.h,sha256=L7U9vmGiVSw3VQLIv9EzTaVq7JbFxs9aNonKStTkUSg,1335
8
8
  include/SDFC_14.4.7/uthash.h,sha256=rIyy_-ylY6S_7WaZCCC3VtvXaC9q37rFyA0f1U9xc4w,63030
9
- lib/SDFC_14.4.7/sdfc.lib,sha256=FFlajqQZVDNaOzVyuWTv2XcFS7DHOiwDut_EhSSKyi0,350698
9
+ lib/SDFC_14.4.7/sdfc.lib,sha256=bsw4ROxRHCvBtvwXUBfOyhycWZyUPTNrejvZ-qIHPKQ,350158
10
10
  lib/SDFC_14.4.7/SDFCConfig.cmake,sha256=IOA1eusC-KvUK4LNTEiOAmEdaPH1ZvNvbYPgiG1oZio,802
11
11
  lib/SDFC_14.4.7/SDFCConfigVersion.cmake,sha256=pN7Qqyf04s3izw7PYQ0XK6imvmhaVegSdR_nEl3Ok_o,2830
12
12
  lib/SDFC_14.4.7/SDFCTargets-release.cmake,sha256=G4zdx5PyjePigeD_a6rmZAxbk7L8Nf0klUnV78Lm2fI,828
13
13
  lib/SDFC_14.4.7/SDFCTargets.cmake,sha256=OVt1Gm8n7Ew4fiTmA9yHoef3vIIGwsXUZfqeG9p9Bys,4152
14
- sdf_xarray/__init__.py,sha256=BL9yrZKdmN9u8gUHFsfdlMbSPPtGkqkUKldKhs6XKXI,17407
15
- sdf_xarray/_version.py,sha256=cTPlZaUCc20I4ZWsDjY35UftpFNRgfDaDBgkWxfIQmg,532
14
+ sdf_xarray/__init__.py,sha256=obgAD4Aecvvpd8GkxLIAiIagSaY0bFVP2Q397N48_5g,24201
15
+ sdf_xarray/_version.py,sha256=bmLiJYnZTISDv_NDGANk6QDMSY0XTk0CwXXKhbOvW3Y,746
16
16
  sdf_xarray/csdf.pxd,sha256=ADPjAuHsodAvdOz96Z_XlFF7VL3KmVaXcTifWDP3rK0,4205
17
- sdf_xarray/plotting.py,sha256=pCKZ01sAT0VLEhnWrX6LN5OSJnzcIkRA3joN7f62giM,6238
18
- sdf_xarray/sdf_interface.cp313-win_amd64.pyd,sha256=KQqvS-YMfNROpaQs-kt9ivhmcio2V3oP1I9djAMxo1U,378368
19
- sdf_xarray/sdf_interface.pyx,sha256=3XRFlrC1e_HkJrU8-i3fMz8DlyUxZgt9wTx_QkGE_TQ,11982
20
- sdf_xarray-0.2.1.dist-info/METADATA,sha256=gfR0XGC6e0b1WFgTdVMf1nfL4AsICdVCDQCOsjZC2kY,7989
21
- sdf_xarray-0.2.1.dist-info/WHEEL,sha256=xwjTh-mFvAONxbW8av7jDNts8DLqzqCA-klxw1kaHPA,106
22
- sdf_xarray-0.2.1.dist-info/entry_points.txt,sha256=gP7BIQpXNg6vIf7S7p-Rw_EJZTC1X50BsVTkK7dA7g0,57
23
- sdf_xarray-0.2.1.dist-info/licenses/LICENCE,sha256=aHWuyELjtzIL1jTXFHTbI3tr9vyVyhnw3I9_QYPdEX8,1515
24
- sdf_xarray-0.2.1.dist-info/RECORD,,
17
+ sdf_xarray/dataset_accessor.py,sha256=TvnVMBefnT1d94Bkllhd-__O3ittzpaVjZKfze-3WQ4,2484
18
+ sdf_xarray/plotting.py,sha256=PnbEspR4XkA5SHkpoFKA2G7BYj5J3mVgR1TEeGol6Vw,7041
19
+ sdf_xarray/sdf_interface.cp313-win_amd64.pyd,sha256=VLbOR703pDE3NdsnV-_8qQyLKelRmW2taohOVqeWG68,359936
20
+ sdf_xarray/sdf_interface.pyx,sha256=PFC6upg14OZBqiGInLgBoxztIIKBk-HOh3WC9Ro4YUw,11975
21
+ sdf_xarray-0.3.2.dist-info/METADATA,sha256=xvADFsOdsd5EzaZbVYGOUgmEMe4RzrTDF9IbyijadqE,7624
22
+ sdf_xarray-0.3.2.dist-info/WHEEL,sha256=vkL3wTIkhjZa3RmEXX20hldNp6Q8qtwRjrXW6K5sw_Q,106
23
+ sdf_xarray-0.3.2.dist-info/entry_points.txt,sha256=gP7BIQpXNg6vIf7S7p-Rw_EJZTC1X50BsVTkK7dA7g0,57
24
+ sdf_xarray-0.3.2.dist-info/licenses/LICENCE,sha256=aHWuyELjtzIL1jTXFHTbI3tr9vyVyhnw3I9_QYPdEX8,1515
25
+ sdf_xarray-0.3.2.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: scikit-build-core 0.11.1
2
+ Generator: scikit-build-core 0.11.6
3
3
  Root-Is-Purelib: false
4
4
  Tag: cp313-cp313-win_amd64
5
5