sdf-xarray 0.1.1__cp312-cp312-win_amd64.whl → 0.3.2__cp312-cp312-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,13 +1,13 @@
1
1
  # Generated by CMake
2
2
 
3
3
  if("${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}" LESS 2.8)
4
- message(FATAL_ERROR "CMake >= 2.8.0 required")
4
+ message(FATAL_ERROR "CMake >= 2.8.3 required")
5
5
  endif()
6
6
  if(CMAKE_VERSION VERSION_LESS "2.8.3")
7
7
  message(FATAL_ERROR "CMake >= 2.8.3 required")
8
8
  endif()
9
9
  cmake_policy(PUSH)
10
- cmake_policy(VERSION 2.8.3...3.28)
10
+ cmake_policy(VERSION 2.8.3...3.29)
11
11
  #----------------------------------------------------------------
12
12
  # Generated CMake target import file.
13
13
  #----------------------------------------------------------------
lib/SDFC_14.4.7/sdfc.lib CHANGED
Binary file
sdf_xarray/__init__.py CHANGED
@@ -1,10 +1,17 @@
1
+ import contextlib
1
2
  import os
2
- import pathlib
3
+ import re
3
4
  from collections import Counter, defaultdict
4
- from typing import Iterable
5
+ from collections.abc import Callable, Iterable
6
+ from importlib.metadata import version
7
+ from itertools import product
8
+ from os import PathLike as os_PathLike
9
+ from pathlib import Path
10
+ from typing import ClassVar
5
11
 
6
12
  import numpy as np
7
13
  import xarray as xr
14
+ from packaging.version import Version
8
15
  from xarray.backends import AbstractDataStore, BackendArray, BackendEntrypoint
9
16
  from xarray.backends.file_manager import CachingFileManager
10
17
  from xarray.backends.locks import ensure_lock
@@ -12,7 +19,23 @@ from xarray.core import indexing
12
19
  from xarray.core.utils import close_on_error, try_read_magic_number_from_path
13
20
  from xarray.core.variable import Variable
14
21
 
15
- from .sdf_interface import Constant, SDFFile
22
+ # NOTE: Do not delete these lines, otherwise the "epoch" dataset and dataarray
23
+ # accessors will not be imported when the user imports sdf_xarray
24
+ import sdf_xarray.dataset_accessor
25
+ import sdf_xarray.plotting # noqa: F401
26
+
27
+ # NOTE: This attempts to initialise with the "pint" accessor if the user
28
+ # has installed the package
29
+ with contextlib.suppress(ImportError):
30
+ import pint_xarray # noqa: F401
31
+
32
+ from .sdf_interface import Constant, SDFFile # type: ignore # noqa: PGH003
33
+
34
+ # TODO Remove this once the new kwarg options are fully implemented
35
+ if Version(version("xarray")) >= Version("2025.8.0"):
36
+ xr.set_options(use_new_combine_kwarg_defaults=True)
37
+
38
+ PathLike = str | os_PathLike
16
39
 
17
40
 
18
41
  def _rename_with_underscore(name: str) -> str:
@@ -21,24 +44,104 @@ def _rename_with_underscore(name: str) -> str:
21
44
  return name.replace("/", "_").replace(" ", "_").replace("-", "_")
22
45
 
23
46
 
24
- def combine_datasets(path_glob: Iterable | str, **kwargs) -> xr.Dataset:
25
- """Combine all datasets using a single time dimension"""
47
+ def _process_latex_name(variable_name: str) -> str:
48
+ """Converts variable names to LaTeX format where possible
49
+ using the following rules:
50
+ - E -> $E_x$
51
+ - E -> $E_y$
52
+ - E -> $E_z$
53
+
54
+ This repeats for B, J and P. It only changes the variable
55
+ name if there are spaces around the affix (prefix + suffix)
56
+ or if there is no trailing space. This is to avoid changing variable
57
+ names that may contain these affixes as part of the variable name itself.
58
+ """
59
+ prefixes = ["E", "B", "J", "P"]
60
+ suffixes = ["x", "y", "z"]
61
+ for prefix, suffix in product(prefixes, suffixes):
62
+ # Match affix with preceding space and trailing space or end of string
63
+ affix_pattern = rf"\b{prefix}{suffix}\b"
64
+ # Insert LaTeX format while preserving spaces
65
+ replacement = rf"${prefix}_{suffix}$"
66
+ variable_name = re.sub(affix_pattern, replacement, variable_name)
67
+ return variable_name
68
+
69
+
70
+ def _resolve_glob(path_glob: PathLike | Iterable[PathLike]):
71
+ """
72
+ Normalise input path_glob into a sorted list of absolute, resolved Path objects.
73
+ """
74
+
75
+ try:
76
+ p = Path(path_glob)
77
+ paths = list(p.parent.glob(p.name)) if p.name == "*.sdf" else list(p)
78
+ except TypeError:
79
+ paths = list({Path(p) for p in path_glob})
80
+
81
+ paths = sorted(p.resolve() for p in paths)
82
+ if not paths:
83
+ raise FileNotFoundError(f"No files matched pattern or input: {path_glob!r}")
84
+ return paths
85
+
86
+
87
+ def purge_unselected_data_vars(ds: xr.Dataset, data_vars: list[str]) -> xr.Dataset:
88
+ """
89
+ If the user has exclusively requested only certain variables be
90
+ loaded in then we purge all other variables and dimensions
91
+ """
92
+ existing_data_vars = set(ds.data_vars.keys())
93
+ vars_to_keep = set(data_vars) & existing_data_vars
94
+ vars_to_drop = existing_data_vars - vars_to_keep
95
+ ds = ds.drop_vars(vars_to_drop)
96
+
97
+ existing_dims = set(ds.sizes)
98
+ dims_to_keep = set()
99
+ for var in vars_to_keep:
100
+ dims_to_keep.update(ds[var].coords._names)
101
+ dims_to_keep.update(ds[var].dims)
102
+
103
+ coords_to_drop = existing_dims - dims_to_keep
104
+ return ds.drop_dims(coords_to_drop)
105
+
106
+
107
+ def combine_datasets(
108
+ path_glob: Iterable | str, data_vars: list[str], **kwargs
109
+ ) -> xr.Dataset:
110
+ """
111
+ Combine all datasets using a single time dimension, optionally extract
112
+ data from only the listed data_vars
113
+ """
114
+
115
+ if data_vars is not None:
116
+ return xr.open_mfdataset(
117
+ path_glob,
118
+ join="outer",
119
+ coords="different",
120
+ compat="no_conflicts",
121
+ combine="nested",
122
+ concat_dim="time",
123
+ preprocess=SDFPreprocess(data_vars=data_vars),
124
+ **kwargs,
125
+ )
26
126
 
27
127
  return xr.open_mfdataset(
28
128
  path_glob,
29
- data_vars="minimal",
30
- coords="minimal",
31
- compat="override",
129
+ data_vars="all",
130
+ coords="different",
131
+ compat="no_conflicts",
132
+ join="outer",
32
133
  preprocess=SDFPreprocess(),
33
134
  **kwargs,
34
135
  )
35
136
 
36
137
 
37
138
  def open_mfdataset(
38
- path_glob: Iterable | str | pathlib.Path | pathlib.Path.glob,
139
+ path_glob: Iterable | str | Path | Callable[..., Iterable[Path]],
39
140
  *,
40
141
  separate_times: bool = False,
41
142
  keep_particles: bool = False,
143
+ probe_names: list[str] | None = None,
144
+ data_vars: list[str] | None = None,
42
145
  ) -> xr.Dataset:
43
146
  """Open a set of EPOCH SDF files as one `xarray.Dataset`
44
147
 
@@ -68,20 +171,36 @@ def open_mfdataset(
68
171
  different output frequencies
69
172
  keep_particles :
70
173
  If ``True``, also load particle data (this may use a lot of memory!)
174
+ probe_names :
175
+ List of EPOCH probe names
176
+ data_vars :
177
+ List of data vars to load in (If not specified loads in all variables)
71
178
  """
72
179
 
73
- # TODO: This is not very robust, look at how xarray.open_mfdataset does it
74
- if isinstance(path_glob, str):
75
- path_glob = pathlib.Path().glob(path_glob)
76
-
77
- # Coerce to list because we might need to use the sequence multiple times
78
- path_glob = sorted(list(path_glob))
180
+ path_glob = _resolve_glob(path_glob)
79
181
 
80
182
  if not separate_times:
81
- return combine_datasets(path_glob, keep_particles=keep_particles)
183
+ return combine_datasets(
184
+ path_glob,
185
+ data_vars=data_vars,
186
+ keep_particles=keep_particles,
187
+ probe_names=probe_names,
188
+ )
82
189
 
83
- time_dims, var_times_map = make_time_dims(path_glob)
84
- all_dfs = [xr.open_dataset(f, keep_particles=keep_particles) for f in path_glob]
190
+ _, var_times_map = make_time_dims(path_glob)
191
+
192
+ all_dfs = []
193
+ for f in path_glob:
194
+ ds = xr.open_dataset(f, keep_particles=keep_particles, probe_names=probe_names)
195
+
196
+ # If the data_vars are specified then only load them in and disregard the rest.
197
+ # If there are no remaining data variables then skip adding the dataset to list
198
+ if data_vars is not None:
199
+ ds = purge_unselected_data_vars(ds, data_vars)
200
+ if not ds.data_vars:
201
+ continue
202
+
203
+ all_dfs.append(ds)
85
204
 
86
205
  for df in all_dfs:
87
206
  for da in df:
@@ -98,7 +217,11 @@ def open_mfdataset(
98
217
  )
99
218
 
100
219
  return xr.combine_by_coords(
101
- all_dfs, data_vars="minimal", combine_attrs="drop_conflicts"
220
+ all_dfs,
221
+ coords="different",
222
+ combine_attrs="drop_conflicts",
223
+ join="outer",
224
+ compat="no_conflicts",
102
225
  )
103
226
 
104
227
 
@@ -119,14 +242,12 @@ def make_time_dims(path_glob):
119
242
  )
120
243
 
121
244
  # Count the unique set of lists of times
122
- times_count = Counter((tuple(v) for v in vars_count.values()))
245
+ times_count = Counter(tuple(v) for v in vars_count.values())
123
246
 
124
247
  # Give each set of times a unique name
125
248
  time_dims = {}
126
- count = 0
127
- for t in times_count:
249
+ for count, t in enumerate(times_count):
128
250
  time_dims[f"time{count}"] = t
129
- count += 1
130
251
 
131
252
  # Map each variable to the name of its time dimension
132
253
  var_times_map = {}
@@ -178,19 +299,28 @@ class SDFDataStore(AbstractDataStore):
178
299
  """Store for reading and writing data via the SDF library."""
179
300
 
180
301
  __slots__ = (
181
- "lock",
182
- "drop_variables",
183
- "keep_particles",
184
302
  "_filename",
185
303
  "_manager",
304
+ "drop_variables",
305
+ "keep_particles",
306
+ "lock",
307
+ "probe_names",
186
308
  )
187
309
 
188
- def __init__(self, manager, drop_variables=None, keep_particles=False, lock=None):
310
+ def __init__(
311
+ self,
312
+ manager,
313
+ drop_variables=None,
314
+ keep_particles=False,
315
+ lock=None,
316
+ probe_names=None,
317
+ ):
189
318
  self._manager = manager
190
319
  self._filename = self.ds.filename
191
320
  self.drop_variables = drop_variables
192
321
  self.keep_particles = keep_particles
193
322
  self.lock = ensure_lock(lock)
323
+ self.probe_names = probe_names
194
324
 
195
325
  @classmethod
196
326
  def open(
@@ -199,6 +329,7 @@ class SDFDataStore(AbstractDataStore):
199
329
  lock=None,
200
330
  drop_variables=None,
201
331
  keep_particles=False,
332
+ probe_names=None,
202
333
  ):
203
334
  if isinstance(filename, os.PathLike):
204
335
  filename = os.fspath(filename)
@@ -209,6 +340,7 @@ class SDFDataStore(AbstractDataStore):
209
340
  lock=lock,
210
341
  drop_variables=drop_variables,
211
342
  keep_particles=keep_particles,
343
+ probe_names=probe_names,
212
344
  )
213
345
 
214
346
  def _acquire(self, needs_lock=True):
@@ -222,12 +354,21 @@ class SDFDataStore(AbstractDataStore):
222
354
  def acquire_context(self, needs_lock=True):
223
355
  return self._manager.acquire_context(needs_lock)
224
356
 
225
- def load(self):
357
+ def load(self): # noqa: PLR0912, PLR0915
226
358
  # Drop any requested variables
227
359
  if self.drop_variables:
360
+ # Build a mapping from underscored names to real variable names
361
+ name_map = {_rename_with_underscore(var): var for var in self.ds.variables}
362
+
228
363
  for variable in self.drop_variables:
229
- # TODO: nicer error handling
230
- self.ds.variables.pop(variable)
364
+ key = _rename_with_underscore(variable)
365
+ original_name = name_map.get(key)
366
+
367
+ if original_name is None:
368
+ raise KeyError(
369
+ f"Variable '{variable}' not found (interpreted as '{key}')."
370
+ )
371
+ self.ds.variables.pop(original_name)
231
372
 
232
373
  # These two dicts are global metadata about the run or file
233
374
  attrs = {**self.ds.header, **self.ds.run_info}
@@ -247,8 +388,7 @@ class SDFDataStore(AbstractDataStore):
247
388
  def _process_grid_name(grid_name: str, transform_func) -> str:
248
389
  """Apply the given transformation function and then rename with underscores."""
249
390
  transformed_name = transform_func(grid_name)
250
- renamed_name = _rename_with_underscore(transformed_name)
251
- return renamed_name
391
+ return _rename_with_underscore(transformed_name)
252
392
 
253
393
  for key, value in self.ds.grids.items():
254
394
  if "cpu" in key.lower():
@@ -271,7 +411,7 @@ class SDFDataStore(AbstractDataStore):
271
411
  dim_name,
272
412
  coord,
273
413
  {
274
- "long_name": label,
414
+ "long_name": label.replace("_", " "),
275
415
  "units": unit,
276
416
  "point_data": value.is_point_data,
277
417
  "full_name": value.name,
@@ -283,6 +423,8 @@ class SDFDataStore(AbstractDataStore):
283
423
  # Had some problems with these variables, so just ignore them for now
284
424
  if "cpu" in key.lower():
285
425
  continue
426
+ if "boundary" in key.lower():
427
+ continue
286
428
  if "output file" in key.lower():
287
429
  continue
288
430
 
@@ -290,11 +432,6 @@ class SDFDataStore(AbstractDataStore):
290
432
  continue
291
433
 
292
434
  if isinstance(value, Constant) or value.grid is None:
293
- data_attrs = {}
294
- data_attrs["full_name"] = key
295
- if value.units is not None:
296
- data_attrs["units"] = value.units
297
-
298
435
  # We don't have a grid, either because it's just a
299
436
  # scalar, or because it's an array over something
300
437
  # else. We have no more information, so just make up
@@ -303,12 +440,39 @@ class SDFDataStore(AbstractDataStore):
303
440
  dims = [f"dim_{key}_{n}" for n, _ in enumerate(shape)]
304
441
  base_name = _rename_with_underscore(key)
305
442
 
443
+ data_attrs = {}
444
+ data_attrs["full_name"] = key
445
+ data_attrs["long_name"] = base_name.replace("_", " ")
446
+ if value.units is not None:
447
+ data_attrs["units"] = value.units
448
+
306
449
  data_vars[base_name] = Variable(dims, value.data, attrs=data_attrs)
307
450
  continue
308
451
 
309
452
  if value.is_point_data:
310
453
  # Point (particle) variables are 1D
311
- var_coords = (f"ID_{_process_grid_name(key, _grid_species_name)}",)
454
+
455
+ # Particle data does not maintain a fixed dimension size
456
+ # throughout the simulation. An example of a particle name comes
457
+ # in the form of `Particles/Px/Ion_H` which is then modified
458
+ # using `_process_grid_name()` into `Ion_H`. This is fine as the
459
+ # other components of the momentum (`Py`, `Pz`) will have the same
460
+ # size as they represent the same bunch of particles.
461
+
462
+ # Probes however have names in the form of `Electron_Front_Probe/Px`
463
+ # which are changed to just `Px`; this is fine when there is only one
464
+ # probe in the system but when there are multiple they will have
465
+ # conflicting sizes so we can't keep the names as simply `Px` so we
466
+ # instead set their dimension as the full name `Electron_Front_Probe_Px`.
467
+ is_probe_name_match = self.probe_names is not None and any(
468
+ name in key for name in self.probe_names
469
+ )
470
+ name_processor = (
471
+ _rename_with_underscore
472
+ if is_probe_name_match
473
+ else _grid_species_name
474
+ )
475
+ var_coords = (f"ID_{_process_grid_name(key, name_processor)}",)
312
476
  else:
313
477
  # These are DataArrays
314
478
 
@@ -341,13 +505,15 @@ class SDFDataStore(AbstractDataStore):
341
505
  ]
342
506
 
343
507
  # TODO: error handling here? other attributes?
508
+ base_name = _rename_with_underscore(key)
509
+ long_name = _process_latex_name(base_name.replace("_", " "))
344
510
  data_attrs = {
345
511
  "units": value.units,
346
512
  "point_data": value.is_point_data,
347
513
  "full_name": key,
514
+ "long_name": long_name,
348
515
  }
349
516
  lazy_data = indexing.LazilyIndexedArray(SDFBackendArray(key, self))
350
- base_name = _rename_with_underscore(key)
351
517
  data_vars[base_name] = Variable(var_coords, lazy_data, data_attrs)
352
518
 
353
519
  # TODO: might need to decode if mult is set?
@@ -373,8 +539,9 @@ class SDFEntrypoint(BackendEntrypoint):
373
539
  *,
374
540
  drop_variables=None,
375
541
  keep_particles=False,
542
+ probe_names=None,
376
543
  ):
377
- if isinstance(filename_or_obj, pathlib.Path):
544
+ if isinstance(filename_or_obj, Path):
378
545
  # sdf library takes a filename only
379
546
  # TODO: work out if we need to deal with file handles
380
547
  filename_or_obj = str(filename_or_obj)
@@ -383,33 +550,68 @@ class SDFEntrypoint(BackendEntrypoint):
383
550
  filename_or_obj,
384
551
  drop_variables=drop_variables,
385
552
  keep_particles=keep_particles,
553
+ probe_names=probe_names,
386
554
  )
387
555
  with close_on_error(store):
388
556
  return store.load()
389
557
 
390
- open_dataset_parameters = ["filename_or_obj", "drop_variables", "keep_particles"]
558
+ open_dataset_parameters: ClassVar[list[str]] = [
559
+ "filename_or_obj",
560
+ "drop_variables",
561
+ "keep_particles",
562
+ "probe_names",
563
+ ]
391
564
 
392
565
  def guess_can_open(self, filename_or_obj):
393
566
  magic_number = try_read_magic_number_from_path(filename_or_obj)
394
567
  if magic_number is not None:
395
568
  return magic_number.startswith(b"SDF1")
396
569
 
397
- try:
398
- _, ext = os.path.splitext(filename_or_obj)
399
- except TypeError:
400
- return False
401
- return ext in {".sdf", ".SDF"}
570
+ return Path(filename_or_obj).suffix in {".sdf", ".SDF"}
402
571
 
403
572
  description = "Use .sdf files in Xarray"
404
573
 
405
- url = "https://epochpic.github.io/documentation/visualising_output/python.html"
574
+ url = "https://epochpic.github.io/documentation/visualising_output/python_beam.html"
406
575
 
407
576
 
408
577
  class SDFPreprocess:
409
- """Preprocess SDF files for xarray ensuring matching job ids and sets time dimension"""
578
+ """Preprocess SDF files for xarray ensuring matching job ids and sets
579
+ time dimension.
580
+
581
+ This class is used as a 'preprocess' function within ``xr.open_mfdataset``. It
582
+ performs three main duties on each individual file's Dataset:
583
+
584
+ 1. Checks for a **matching job ID** across all files to ensure dataset consistency.
585
+ 2. **Filters** the Dataset to keep only the variables specified in `data_vars`
586
+ and their required coordinates.
587
+ 3. **Expands dimensions** to include a single 'time' coordinate, preparing the
588
+ Dataset for concatenation.
589
+
590
+ EPOCH can output variables at different intervals, so some SDF files
591
+ may not contain the requested variable. We combine this data into one
592
+ dataset by concatenating across the time dimension.
593
+
594
+ The combination is performed using ``join="outer"`` (in the calling ``open_mfdataset`` function),
595
+ meaning that the final combined dataset will contain the variable across the
596
+ entire time span, with NaNs filling the time steps where the variable was absent in
597
+ the individual file.
598
+
599
+ With large SDF files, this filtering method will save on memory consumption when
600
+ compared to loading all variables from all files before concatenation.
410
601
 
411
- def __init__(self):
602
+ Parameters
603
+ ----------
604
+ data_vars :
605
+ A list of data variables to load in (If not specified loads
606
+ in all variables)
607
+ """
608
+
609
+ def __init__(
610
+ self,
611
+ data_vars: list[str] | None = None,
612
+ ):
412
613
  self.job_id: int | None = None
614
+ self.data_vars = data_vars
413
615
 
414
616
  def __call__(self, ds: xr.Dataset) -> xr.Dataset:
415
617
  if self.job_id is None:
@@ -420,11 +622,23 @@ class SDFPreprocess:
420
622
  f"Mismatching job ids (got {ds.attrs['jobid1']}, expected {self.job_id})"
421
623
  )
422
624
 
423
- ds = ds.expand_dims(time=[ds.attrs["time"]])
424
-
625
+ # If the user has exclusively requested only certain variables be
626
+ # loaded in then we purge all other variables and coordinates
627
+ if self.data_vars:
628
+ ds = purge_unselected_data_vars(ds, self.data_vars)
629
+
630
+ time_val = ds.attrs.get("time", np.nan)
631
+ ds = ds.expand_dims(time=[time_val])
632
+ ds = ds.assign_coords(
633
+ time=(
634
+ "time",
635
+ [time_val],
636
+ {"units": "s", "long_name": "Time", "full_name": "time"},
637
+ )
638
+ )
425
639
  # Particles' spartial coordinates also evolve in time
426
640
  for coord, value in ds.coords.items():
427
641
  if value.attrs.get("point_data", False):
428
- ds.coords[coord] = value.expand_dims(time=[ds.attrs["time"]])
642
+ ds.coords[coord] = value.expand_dims(time=[time_val])
429
643
 
430
644
  return ds
sdf_xarray/_version.py CHANGED
@@ -1,16 +1,34 @@
1
- # file generated by setuptools_scm
1
+ # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
+
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
12
+
3
13
  TYPE_CHECKING = False
4
14
  if TYPE_CHECKING:
5
- from typing import Tuple, Union
15
+ from typing import Tuple
16
+ from typing import Union
17
+
6
18
  VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
7
20
  else:
8
21
  VERSION_TUPLE = object
22
+ COMMIT_ID = object
9
23
 
10
24
  version: str
11
25
  __version__: str
12
26
  __version_tuple__: VERSION_TUPLE
13
27
  version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
30
+
31
+ __version__ = version = '0.3.2'
32
+ __version_tuple__ = version_tuple = (0, 3, 2)
14
33
 
15
- __version__ = version = '0.1.1'
16
- __version_tuple__ = version_tuple = (0, 1, 1)
34
+ __commit_id__ = commit_id = 'g331520e50'
@@ -0,0 +1,73 @@
1
+ from typing import Union
2
+
3
+ import xarray as xr
4
+
5
+
6
+ @xr.register_dataset_accessor("epoch")
7
+ class EpochAccessor:
8
+ def __init__(self, xarray_obj: xr.Dataset):
9
+ # The xarray object is the Dataset, which we store as self._ds
10
+ self._ds = xarray_obj
11
+
12
+ def rescale_coords(
13
+ self,
14
+ multiplier: float,
15
+ unit_label: str,
16
+ coord_names: Union[str, list[str]],
17
+ ) -> xr.Dataset:
18
+ """
19
+ Rescales specified X and Y coordinates in the Dataset by a given multiplier
20
+ and updates the unit label attribute.
21
+
22
+ Parameters
23
+ ----------
24
+ multiplier : float
25
+ The factor by which to multiply the coordinate values (e.g., 1e6 for meters to microns).
26
+ unit_label : str
27
+ The new unit label for the coordinates (e.g., "µm").
28
+ coord_names : str or list of str
29
+ The name(s) of the coordinate variable(s) to rescale.
30
+ If a string, only that coordinate is rescaled.
31
+ If a list, all listed coordinates are rescaled.
32
+
33
+ Returns
34
+ -------
35
+ xr.Dataset
36
+ A new Dataset with the updated and rescaled coordinates.
37
+
38
+ Examples
39
+ --------
40
+ # Convert X, Y, and Z from meters to microns
41
+ >>> ds_in_microns = ds.epoch.rescale_coords(1e6, "µm", coord_names=["X_Grid", "Y_Grid", "Z_Grid"])
42
+
43
+ # Convert only X to millimeters
44
+ >>> ds_in_mm = ds.epoch.rescale_coords(1000, "mm", coord_names="X_Grid")
45
+ """
46
+
47
+ ds = self._ds
48
+ new_coords = {}
49
+
50
+ if isinstance(coord_names, str):
51
+ # Convert single string to a list
52
+ coords_to_process = [coord_names]
53
+ elif isinstance(coord_names, list):
54
+ # Use the provided list
55
+ coords_to_process = coord_names
56
+ else:
57
+ coords_to_process = list(coord_names)
58
+
59
+ for coord_name in coords_to_process:
60
+ if coord_name not in ds.coords:
61
+ raise ValueError(
62
+ f"Coordinate '{coord_name}' not found in the Dataset. Cannot rescale."
63
+ )
64
+
65
+ coord_original = ds[coord_name]
66
+
67
+ coord_rescaled = coord_original * multiplier
68
+ coord_rescaled.attrs = coord_original.attrs.copy()
69
+ coord_rescaled.attrs["units"] = unit_label
70
+
71
+ new_coords[coord_name] = coord_rescaled
72
+
73
+ return ds.assign_coords(new_coords)
sdf_xarray/plotting.py ADDED
@@ -0,0 +1,205 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING
4
+
5
+ import numpy as np
6
+ import xarray as xr
7
+
8
+ if TYPE_CHECKING:
9
+ import matplotlib.pyplot as plt
10
+ from matplotlib.animation import FuncAnimation
11
+
12
+
13
+ def get_frame_title(
14
+ data: xr.DataArray,
15
+ frame: int,
16
+ display_sdf_name: bool = False,
17
+ title_custom: str | None = None,
18
+ ) -> str:
19
+ """Generate the title for a frame"""
20
+ # Adds custom text to the start of the title, if specified
21
+ title_custom = "" if title_custom is None else f"{title_custom}, "
22
+ # Adds the time and associated units to the title
23
+ time = data["time"][frame].to_numpy()
24
+
25
+ time_units = data["time"].attrs.get("units", False)
26
+ time_units_formatted = f" [{time_units}]" if time_units else ""
27
+ title_time = f"time = {time:.2e}{time_units_formatted}"
28
+
29
+ # Adds sdf name to the title, if specifed
30
+ title_sdf = f", {frame:04d}.sdf" if display_sdf_name else ""
31
+ return f"{title_custom}{title_time}{title_sdf}"
32
+
33
+
34
+ def calculate_window_boundaries(
35
+ data: xr.DataArray, xlim: tuple[float, float] | False = False
36
+ ) -> np.ndarray:
37
+ """Calculate the bounderies a moving window frame. If the user specifies xlim, this will
38
+ be used as the initial bounderies and the window will move along acordingly.
39
+ """
40
+ x_grid = data["X_Grid_mid"].values
41
+ x_half_cell = (x_grid[1] - x_grid[0]) / 2
42
+ N_frames = data["time"].size
43
+
44
+ # Find the window bounderies by finding the first and last non-NaN values in the 0th lineout
45
+ # along the x-axis.
46
+ window_boundaries = np.zeros((N_frames, 2))
47
+ for i in range(N_frames):
48
+ # Check if data is 1D
49
+ if data.ndim == 2:
50
+ target_lineout = data[i].values
51
+ # Check if data is 2D
52
+ if data.ndim == 3:
53
+ target_lineout = data[i, :, 0].values
54
+ x_grid_non_nan = x_grid[~np.isnan(target_lineout)]
55
+ window_boundaries[i, 0] = x_grid_non_nan[0] - x_half_cell
56
+ window_boundaries[i, 1] = x_grid_non_nan[-1] + x_half_cell
57
+
58
+ # User's choice for initial window edge supercides the one calculated
59
+ if xlim:
60
+ window_boundaries = window_boundaries + xlim - window_boundaries[0]
61
+ return window_boundaries
62
+
63
+
64
+ def compute_global_limits(
65
+ data: xr.DataArray,
66
+ min_percentile: float = 0,
67
+ max_percentile: float = 100,
68
+ ) -> tuple[float, float]:
69
+ """Remove all NaN values from the target data to calculate the global minimum and maximum of the data.
70
+ User defined percentiles can remove extreme outliers.
71
+ """
72
+
73
+ # Removes NaN values, needed for moving windows
74
+ values_no_nan = data.values[~np.isnan(data.values)]
75
+
76
+ # Finds the global minimum and maximum of the plot, based on the percentile of the data
77
+ global_min = np.percentile(values_no_nan, min_percentile)
78
+ global_max = np.percentile(values_no_nan, max_percentile)
79
+ return global_min, global_max
80
+
81
+
82
+ def animate(
83
+ data: xr.DataArray,
84
+ fps: float = 10,
85
+ min_percentile: float = 0,
86
+ max_percentile: float = 100,
87
+ title: str | None = None,
88
+ display_sdf_name: bool = False,
89
+ ax: plt.Axes | None = None,
90
+ **kwargs,
91
+ ) -> FuncAnimation:
92
+ """Generate an animation
93
+
94
+ Parameters
95
+ ---------
96
+ data
97
+ The dataarray containing the target data
98
+ fps
99
+ Frames per second for the animation (default: 10)
100
+ min_percentile
101
+ Minimum percentile of the data (default: 0)
102
+ max_percentile
103
+ Maximum percentile of the data (default: 100)
104
+ title
105
+ Custom title to add to the plot.
106
+ display_sdf_name
107
+ Display the sdf file name in the animation title
108
+ ax
109
+ Matplotlib axes on which to plot.
110
+ kwargs
111
+ Keyword arguments to be passed to matplotlib.
112
+
113
+ Examples
114
+ --------
115
+ >>> dataset["Derived_Number_Density_Electron"].epoch.animate()
116
+ """
117
+ import matplotlib.pyplot as plt # noqa: PLC0415
118
+ from matplotlib.animation import FuncAnimation # noqa: PLC0415
119
+
120
+ kwargs_original = kwargs.copy()
121
+
122
+ if ax is None:
123
+ _, ax = plt.subplots()
124
+
125
+ N_frames = data["time"].size
126
+ global_min, global_max = compute_global_limits(data, min_percentile, max_percentile)
127
+
128
+ # Initialise plot and set y-limits for 1D data
129
+ if data.ndim == 2:
130
+ kwargs.setdefault("x", "X_Grid_mid")
131
+ plot = data.isel(time=0).plot(ax=ax, **kwargs)
132
+ ax.set_title(get_frame_title(data, 0, display_sdf_name, title))
133
+ ax.set_ylim(global_min, global_max)
134
+
135
+ # Initilise plot and set colour bar for 2D data
136
+ if data.ndim == 3:
137
+ kwargs["norm"] = plt.Normalize(vmin=global_min, vmax=global_max)
138
+ kwargs["add_colorbar"] = False
139
+ # Set default x and y coordinates for 2D data if not provided
140
+ kwargs.setdefault("x", "X_Grid_mid")
141
+ kwargs.setdefault("y", "Y_Grid_mid")
142
+
143
+ # Initialize the plot with the first timestep
144
+ plot = data.isel(time=0).plot(ax=ax, **kwargs)
145
+ ax.set_title(get_frame_title(data, 0, display_sdf_name, title))
146
+
147
+ # Add colorbar
148
+ if kwargs_original.get("add_colorbar", True):
149
+ long_name = data.attrs.get("long_name")
150
+ units = data.attrs.get("units")
151
+ plt.colorbar(plot, ax=ax, label=f"{long_name} [${units}$]")
152
+
153
+ # check if there is a moving window by finding NaNs in the data
154
+ move_window = np.isnan(np.sum(data.values))
155
+ if move_window:
156
+ window_boundaries = calculate_window_boundaries(data, kwargs.get("xlim", False))
157
+
158
+ def update(frame):
159
+ # Set the xlim for each frame in the case of a moving window
160
+ if move_window:
161
+ kwargs["xlim"] = window_boundaries[frame]
162
+
163
+ # Update plot for the new frame
164
+ ax.clear()
165
+
166
+ data.isel(time=frame).plot(ax=ax, **kwargs)
167
+ ax.set_title(get_frame_title(data, frame, display_sdf_name, title))
168
+
169
+ # Update y-limits for 1D data
170
+ if data.ndim == 2:
171
+ ax.set_ylim(global_min, global_max)
172
+
173
+ return FuncAnimation(
174
+ ax.get_figure(),
175
+ update,
176
+ frames=range(N_frames),
177
+ interval=1000 / fps,
178
+ repeat=True,
179
+ )
180
+
181
+
182
+ @xr.register_dataarray_accessor("epoch")
183
+ class EpochAccessor:
184
+ def __init__(self, xarray_obj):
185
+ self._obj = xarray_obj
186
+
187
+ def animate(self, *args, **kwargs) -> FuncAnimation:
188
+ """Generate animations of Epoch data.
189
+
190
+ Parameters
191
+ ----------
192
+ args
193
+ Positional arguments passed to :func:`generate_animation`.
194
+ kwargs
195
+ Keyword arguments passed to :func:`generate_animation`.
196
+
197
+ Examples
198
+ --------
199
+ >>> import xarray as xr
200
+ >>> from sdf_xarray import SDFPreprocess
201
+ >>> ds = xr.open_mfdataset("*.sdf", preprocess=SDFPreprocess())
202
+ >>> ani = ds["Electric_Field_Ey"].epoch.animate()
203
+ >>> ani.save("myfile.mp4")
204
+ """
205
+ return animate(self._obj, *args, **kwargs)
@@ -39,7 +39,7 @@ cdef class Block:
39
39
 
40
40
  @dataclasses.dataclass
41
41
  cdef class Variable(Block):
42
- units: tuple[str] | None
42
+ units: str | None
43
43
  mult: float | None
44
44
  grid: str | None
45
45
  grid_mid: str | None
@@ -0,0 +1,176 @@
1
+ Metadata-Version: 2.4
2
+ Name: sdf-xarray
3
+ Version: 0.3.2
4
+ Summary: Provides a backend for xarray to read SDF files as created by the EPOCH plasma PIC code.
5
+ Author-Email: Peter Hill <peter.hill@york.ac.uk>, Joel Adams <joel.adams@york.ac.uk>, Shaun Doherty <shaun.doherty@york.ac.uk>, Chris Herdman <chris.herdman@york.ac.uk>
6
+ License-Expression: BSD-3-Clause
7
+ Classifier: Development Status :: 5 - Production/Stable
8
+ Classifier: Intended Audience :: Science/Research
9
+ Classifier: Topic :: Scientific/Engineering
10
+ Classifier: Operating System :: OS Independent
11
+ Classifier: Programming Language :: Python
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3.10
14
+ Classifier: Programming Language :: Python :: 3.11
15
+ Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Programming Language :: Python :: 3.13
17
+ Requires-Python: <3.14,>=3.10
18
+ Requires-Dist: numpy>=2.0.0
19
+ Requires-Dist: xarray>=2024.1.0
20
+ Requires-Dist: dask>=2024.7.1
21
+ Provides-Extra: docs
22
+ Requires-Dist: sphinx>=5.3; extra == "docs"
23
+ Requires-Dist: sphinx_autodoc_typehints>=1.19; extra == "docs"
24
+ Requires-Dist: sphinx-book-theme>=0.4.0rc1; extra == "docs"
25
+ Requires-Dist: sphinx-argparse-cli>=1.10.0; extra == "docs"
26
+ Requires-Dist: sphinx-inline-tabs; extra == "docs"
27
+ Requires-Dist: pickleshare; extra == "docs"
28
+ Requires-Dist: ipython; extra == "docs"
29
+ Requires-Dist: matplotlib; extra == "docs"
30
+ Requires-Dist: pint; extra == "docs"
31
+ Requires-Dist: pint-xarray; extra == "docs"
32
+ Requires-Dist: myst-parser; extra == "docs"
33
+ Provides-Extra: test
34
+ Requires-Dist: pytest>=3.3.0; extra == "test"
35
+ Requires-Dist: dask[complete]; extra == "test"
36
+ Requires-Dist: matplotlib; extra == "test"
37
+ Provides-Extra: lint
38
+ Requires-Dist: ruff; extra == "lint"
39
+ Provides-Extra: build
40
+ Requires-Dist: cibuildwheel[uv]; extra == "build"
41
+ Provides-Extra: jupyter
42
+ Requires-Dist: dask[diagnostics]; extra == "jupyter"
43
+ Requires-Dist: ipykernel>=6.29.5; extra == "jupyter"
44
+ Provides-Extra: pint
45
+ Requires-Dist: pint; extra == "pint"
46
+ Requires-Dist: pint-xarray; extra == "pint"
47
+ Description-Content-Type: text/markdown
48
+
49
+ # sdf-xarray
50
+
51
+ ![Dynamic TOML Badge](https://img.shields.io/badge/dynamic/toml?url=https%3A%2F%2Fraw.githubusercontent.com%2Fepochpic%2Fsdf-xarray%2Frefs%2Fheads%2Fmain%2Fpyproject.toml&query=%24.project.requires-python&label=python&logo=python)
52
+ [![Available on PyPI](https://img.shields.io/pypi/v/sdf-xarray?color=blue&logo=pypi)](https://pypi.org/project/sdf-xarray/)
53
+ [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.15351323.svg)](https://doi.org/10.5281/zenodo.15351323)
54
+ ![Build/Publish](https://github.com/epochpic/sdf-xarray/actions/workflows/build_publish.yml/badge.svg)
55
+ ![Tests](https://github.com/epochpic/sdf-xarray/actions/workflows/tests.yml/badge.svg)
56
+ [![Read the Docs](https://img.shields.io/readthedocs/sdf-xarray?logo=readthedocs&link=https%3A%2F%2Fsdf-xarray.readthedocs.io%2F)](https://sdf-xarray.readthedocs.io)
57
+ [![Formatted with black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/python/black)
58
+
59
+
60
+ sdf-xarray provides a backend for [xarray](https://xarray.dev) to read SDF files as created by
61
+ [EPOCH](https://epochpic.github.io) using the [SDF-C](https://github.com/epochpic/SDF_C) library.
62
+ Part of [BEAM](#broad-epoch-analysis-modules-beam) (Broad EPOCH Analysis Modules).
63
+
64
+ > [!IMPORTANT]
65
+ > To install this package make sure you are using one of the Python versions listed above.
66
+
67
+ ## Installation
68
+
69
+ Install from PyPI with:
70
+
71
+ ```bash
72
+ pip install sdf-xarray
73
+ ```
74
+
75
+ > [!NOTE]
76
+ > For use within jupyter notebooks, run this additional command after installation:
77
+ >
78
+ > ```bash
79
+ > pip install "sdf-xarray[jupyter]"
80
+ > ```
81
+
82
+ or from a local checkout:
83
+
84
+ ```bash
85
+ git clone https://github.com/epochpic/sdf-xarray.git
86
+ cd sdf-xarray
87
+ pip install .
88
+ ```
89
+
90
+ We recommend switching to [uv](https://docs.astral.sh/uv/) to manage packages.
91
+
92
+ ## Usage
93
+
94
+ ### Single file loading
95
+
96
+ ```python
97
+ import xarray as xr
98
+
99
+ df = xr.open_dataset("0010.sdf")
100
+
101
+ print(df["Electric_Field_Ex"])
102
+
103
+ # <xarray.DataArray 'Electric_Field_Ex' (X_x_px_deltaf_electron_beam: 16)> Size: 128B
104
+ # [16 values with dtype=float64]
105
+ # Coordinates:
106
+ # * X_x_px_deltaf_electron_beam (X_x_px_deltaf_electron_beam) float64 128B 1...
107
+ # Attributes:
108
+ # units: V/m
109
+ # full_name: "Electric Field/Ex"
110
+ ```
111
+
112
+ ### Multi-file loading
113
+
114
+ To open a whole simulation at once, pass `preprocess=sdf_xarray.SDFPreprocess()`
115
+ to `xarray.open_mfdataset`:
116
+
117
+ ```python
118
+ import xarray as xr
119
+ from sdf_xarray import SDFPreprocess
120
+
121
+ with xr.open_mfdataset("*.sdf", preprocess=SDFPreprocess()) as ds:
122
+ print(ds)
123
+
124
+ # Dimensions:
125
+ # time: 301, X_Grid_mid: 128, ...
126
+ # Coordinates: (9) ...
127
+ # Data variables: (18) ...
128
+ # Indexes: (9) ...
129
+ # Attributes: (22) ...
130
+ ```
131
+
132
+ `SDFPreprocess` checks that all the files are from the same simulation, as
133
+ ensures there's a `time` dimension so the files are correctly concatenated.
134
+
135
+ If your simulation has multiple `output` blocks so that not all variables are
136
+ output at every time step, then those variables will have `NaN` values at the
137
+ corresponding time points.
138
+
139
+ For more in depth documentation please visit: <https://sdf-xarray.readthedocs.io/>
140
+
141
+ ## Citing
142
+
143
+ If sdf-xarray contributes to a project that leads to publication, please acknowledge this by citing sdf-xarray. This can be done by clicking the "cite this repository" button located near the top right of this page.
144
+
145
+ ## Contributing
146
+
147
+ We welcome contributions to the BEAM ecosystem! Whether it's reporting issues, suggesting features, or submitting pull requests, your input helps improve these tools for the community.
148
+
149
+ ### How to Contribute
150
+
151
+ There are many ways to get involved:
152
+ - **Report bugs**: Found something not working as expected? Open an issue with as much detail as possible.
153
+ - **Request a feature**: Got an idea for a new feature or enhancement? Open a feature request on [GitHub Issues](https://github.com/epochpic/sdf-xarray/issues)!
154
+ - **Improve the documentation**: We aim to keep our docs clear and helpful—if something's missing or unclear, feel free to suggest edits.
155
+ - **Submit code changes**: Bug fixes, refactoring, or new features are welcome.
156
+
157
+
158
+ All code is automatically linted, formatted, and tested via GitHub Actions.
159
+
160
+ To run checks locally before opening a pull request, see [CONTRIBUTING.md](CONTRIBUTING.md) or [readthedocs documentation](https://sdf-xarray.readthedocs.io/en/latest/contributing.html)
161
+
162
+ ## Broad EPOCH Analysis Modules (BEAM)
163
+
164
+ ![BEAM logo](./BEAM.png)
165
+
166
+ **BEAM** is a collection of independent yet complementary open-source tools for analysing EPOCH simulations, designed to be modular so researchers can adopt only the components they require without being constrained by a rigid framework. In line with the **FAIR principles — Findable**, **Accessible**, **Interoperable**, and **Reusable** — each package is openly published with clear documentation and versioning (Findable), distributed via public repositories (Accessible), designed to follow common standards for data structures and interfaces (Interoperable), and includes licensing and metadata to support long-term use and adaptation (Reusable). The packages are as follows:
167
+
168
+ - [sdf-xarray](https://github.com/epochpic/sdf-xarray): Reading and processing SDF files and converting them to [xarray](https://docs.xarray.dev/en/stable/).
169
+ - [epydeck](https://github.com/epochpic/epydeck): Input deck reader and writer.
170
+ - [epyscan](https://github.com/epochpic/epyscan): Create campaigns over a given parameter space using various sampling methods.
171
+
172
+ ## PlasmaFAIR
173
+
174
+ ![PlasmaFAIR logo](PlasmaFAIR.svg)
175
+
176
+ Originally developed by [PlasmaFAIR](https://plasmafair.github.io), EPSRC Grant EP/V051822/1
@@ -6,18 +6,20 @@ include/SDFC_14.4.7/sdf_list_type.h,sha256=Quu8v0-SEsQuJpGtEZnm09tAyXqWNitx0sXl5
6
6
  include/SDFC_14.4.7/sdf_vector_type.h,sha256=dbKjhzRRsvhzrnTwVjtVlvnuisEnRMKY-vvdm94ok_Q,1595
7
7
  include/SDFC_14.4.7/stack_allocator.h,sha256=L7U9vmGiVSw3VQLIv9EzTaVq7JbFxs9aNonKStTkUSg,1335
8
8
  include/SDFC_14.4.7/uthash.h,sha256=rIyy_-ylY6S_7WaZCCC3VtvXaC9q37rFyA0f1U9xc4w,63030
9
- lib/SDFC_14.4.7/sdfc.lib,sha256=R6r98UjN1rB5A3lEsEvHZBWEMJZuKYv65-DOMjcfQZg,350320
9
+ lib/SDFC_14.4.7/sdfc.lib,sha256=VyuxkhB3q8QOeICxMhp3a7jpi7GXvHRmIwFKCSHSyrA,350158
10
10
  lib/SDFC_14.4.7/SDFCConfig.cmake,sha256=IOA1eusC-KvUK4LNTEiOAmEdaPH1ZvNvbYPgiG1oZio,802
11
11
  lib/SDFC_14.4.7/SDFCConfigVersion.cmake,sha256=pN7Qqyf04s3izw7PYQ0XK6imvmhaVegSdR_nEl3Ok_o,2830
12
12
  lib/SDFC_14.4.7/SDFCTargets-release.cmake,sha256=G4zdx5PyjePigeD_a6rmZAxbk7L8Nf0klUnV78Lm2fI,828
13
- lib/SDFC_14.4.7/SDFCTargets.cmake,sha256=mDv-06UXeV_otJxYv6kfZgiuFf9JR8cLNGCqj8bKaiI,4152
14
- sdf_xarray/__init__.py,sha256=npsy7MSw1Ak2NKnmAo99cBEUjUROUef6aH25teRxZRY,15976
15
- sdf_xarray/_version.py,sha256=Q5fzuLr4LVBz5FTk0Ll4Ll1i97odpmL7Ku_bYhCouQQ,427
13
+ lib/SDFC_14.4.7/SDFCTargets.cmake,sha256=OVt1Gm8n7Ew4fiTmA9yHoef3vIIGwsXUZfqeG9p9Bys,4152
14
+ sdf_xarray/__init__.py,sha256=obgAD4Aecvvpd8GkxLIAiIagSaY0bFVP2Q397N48_5g,24201
15
+ sdf_xarray/_version.py,sha256=bmLiJYnZTISDv_NDGANk6QDMSY0XTk0CwXXKhbOvW3Y,746
16
16
  sdf_xarray/csdf.pxd,sha256=ADPjAuHsodAvdOz96Z_XlFF7VL3KmVaXcTifWDP3rK0,4205
17
- sdf_xarray/sdf_interface.cp312-win_amd64.pyd,sha256=y34BwJHyO9G2uC758ZQTA8kubEVb6-lo5ZEDO-dQ2Ak,379392
18
- sdf_xarray/sdf_interface.pyx,sha256=3XRFlrC1e_HkJrU8-i3fMz8DlyUxZgt9wTx_QkGE_TQ,11982
19
- sdf_xarray-0.1.1.dist-info/METADATA,sha256=VisLbeYBFnDBqkCn3leCpU8m_ajmbBSzV8sklCZ3kpw,5925
20
- sdf_xarray-0.1.1.dist-info/WHEEL,sha256=GgB_RydHGtp7zP9kXrVRu7kuGtdM7WtO3JhH95Vv87o,106
21
- sdf_xarray-0.1.1.dist-info/entry_points.txt,sha256=gP7BIQpXNg6vIf7S7p-Rw_EJZTC1X50BsVTkK7dA7g0,57
22
- sdf_xarray-0.1.1.dist-info/licenses/LICENCE,sha256=dsqtZx65gUc4vyNA4JKHTelIFuzWf-HVNi0h1c-lXNI,1517
23
- sdf_xarray-0.1.1.dist-info/RECORD,,
17
+ sdf_xarray/dataset_accessor.py,sha256=TvnVMBefnT1d94Bkllhd-__O3ittzpaVjZKfze-3WQ4,2484
18
+ sdf_xarray/plotting.py,sha256=PnbEspR4XkA5SHkpoFKA2G7BYj5J3mVgR1TEeGol6Vw,7041
19
+ sdf_xarray/sdf_interface.cp312-win_amd64.pyd,sha256=08xrwtYkgFqyN5GCr4sV5QP3g0mOozAPMg1DCVAqZm8,360960
20
+ sdf_xarray/sdf_interface.pyx,sha256=PFC6upg14OZBqiGInLgBoxztIIKBk-HOh3WC9Ro4YUw,11975
21
+ sdf_xarray-0.3.2.dist-info/METADATA,sha256=xvADFsOdsd5EzaZbVYGOUgmEMe4RzrTDF9IbyijadqE,7624
22
+ sdf_xarray-0.3.2.dist-info/WHEEL,sha256=chqeLhPBtPdrOoreR34YMcofSk3yWDQhkrsDJ2n48LU,106
23
+ sdf_xarray-0.3.2.dist-info/entry_points.txt,sha256=gP7BIQpXNg6vIf7S7p-Rw_EJZTC1X50BsVTkK7dA7g0,57
24
+ sdf_xarray-0.3.2.dist-info/licenses/LICENCE,sha256=aHWuyELjtzIL1jTXFHTbI3tr9vyVyhnw3I9_QYPdEX8,1515
25
+ sdf_xarray-0.3.2.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: scikit-build-core 0.10.7
2
+ Generator: scikit-build-core 0.11.6
3
3
  Root-Is-Purelib: false
4
4
  Tag: cp312-cp312-win_amd64
5
5
 
@@ -1,4 +1,4 @@
1
- Copyright 2024, Peter Hill, Joel Adams, PlasmaFAIR team
1
+ Copyright 2024, Peter Hill, Joel Adams, epochpic team
2
2
 
3
3
  Redistribution and use in source and binary forms, with or without
4
4
  modification, are permitted provided that the following conditions are
@@ -1,179 +0,0 @@
1
- Metadata-Version: 2.1
2
- Name: sdf-xarray
3
- Version: 0.1.1
4
- Author-Email: Peter Hill <peter.hill@york.ac.uk>, Joel Adams <joel.adams@york.ac.uk>
5
- License: Copyright 2024, Peter Hill, Joel Adams, PlasmaFAIR team
6
-
7
- Redistribution and use in source and binary forms, with or without
8
- modification, are permitted provided that the following conditions are
9
- met:
10
-
11
- 1. Redistributions of source code must retain the above copyright
12
- notice, this list of conditions and the following disclaimer.
13
-
14
- 2. Redistributions in binary form must reproduce the above copyright
15
- notice, this list of conditions and the following disclaimer in the
16
- documentation and/or other materials provided with the distribution.
17
-
18
- 3. Neither the name of the copyright holder nor the names of its
19
- contributors may be used to endorse or promote products derived from
20
- this software without specific prior written permission.
21
-
22
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
23
- “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
24
- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
25
- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
26
- HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
27
- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
28
- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
29
- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
30
- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
31
- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
32
- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33
- Requires-Python: >=3.10
34
- Requires-Dist: numpy>=2.0.0
35
- Requires-Dist: xarray>=2024.1.0
36
- Requires-Dist: dask>=2024.7.1
37
- Requires-Dist: cython>=3.0
38
- Requires-Dist: sphinx>=5.3; extra == "docs"
39
- Requires-Dist: sphinx_autodoc_typehints>=1.19; extra == "docs"
40
- Requires-Dist: sphinx-book-theme>=0.4.0rc1; extra == "docs"
41
- Requires-Dist: sphinx-argparse-cli>=1.10.0; extra == "docs"
42
- Requires-Dist: sphinx-inline-tabs; extra == "docs"
43
- Requires-Dist: pytest>=3.3.0; extra == "test"
44
- Requires-Dist: dask[complete]; extra == "test"
45
- Requires-Dist: ruff; extra == "lint"
46
- Requires-Dist: cibuildwheel[uv]; extra == "build"
47
- Provides-Extra: docs
48
- Provides-Extra: test
49
- Provides-Extra: lint
50
- Provides-Extra: build
51
- Description-Content-Type: text/markdown
52
-
53
- # sdf-xarray
54
-
55
- `sdf-xarray` provides a backend for [xarray](https://xarray.dev) to
56
- read SDF files as created by the [EPOCH](https://epochpic.github.io)
57
- plasma PIC code.
58
-
59
- `sdf-xarray` uses the [SDF-C](https://github.com/Warwick-Plasma/SDF_C) library.
60
-
61
- > [!IMPORTANT]
62
- > All variable names now use snake_case to align with Epoch’s `sdf_helper`
63
- > conventions. For example, `Electric Field/Ex` has been updated to
64
- > `Electric_Field_Ex`.
65
-
66
- ## Installation
67
-
68
- Install from PyPI with:
69
-
70
- ```bash
71
- pip install sdf-xarray
72
- ```
73
-
74
- or from a local checkout:
75
-
76
- ```bash
77
- git clone https://github.com/PlasmaFAIR/sdf-xarray.git
78
- cd sdf-xarray
79
- pip install .
80
- ```
81
-
82
- We recommend switching to [uv](https://docs.astral.sh/uv/) to manage packages.
83
-
84
- ## Usage
85
-
86
- `sdf-xarray` is a backend for xarray, and so is usable directly from
87
- xarray:
88
-
89
- ### Single file loading
90
-
91
- ```python
92
- import xarray as xr
93
-
94
- df = xr.open_dataset("0010.sdf")
95
-
96
- print(df["Electric_Field_Ex"])
97
-
98
- # <xarray.DataArray 'Electric_Field_Ex' (X_x_px_deltaf_electron_beam: 16)> Size: 128B
99
- # [16 values with dtype=float64]
100
- # Coordinates:
101
- # * X_x_px_deltaf_electron_beam (X_x_px_deltaf_electron_beam) float64 128B 1...
102
- # Attributes:
103
- # units: V/m
104
- # full_name: "Electric Field/Ex"
105
- ```
106
-
107
- ### Multi file loading
108
-
109
- To open a whole simulation at once, pass `preprocess=sdf_xarray.SDFPreprocess()`
110
- to `xarray.open_mfdataset`:
111
-
112
- ```python
113
- import xarray as xr
114
- from sdf_xarray import SDFPreprocess
115
-
116
- with xr.open_mfdataset("*.sdf", preprocess=SDFPreprocess()) as ds:
117
- print(ds)
118
-
119
- # Dimensions:
120
- # time: 301, X_Grid_mid: 128, ...
121
- # Coordinates: (9) ...
122
- # Data variables: (18) ...
123
- # Indexes: (9) ...
124
- # Attributes: (22) ...
125
- ```
126
-
127
- `SDFPreprocess` checks that all the files are from the same simulation, as
128
- ensures there's a `time` dimension so the files are correctly concatenated.
129
-
130
- If your simulation has multiple `output` blocks so that not all variables are
131
- output at every time step, then those variables will have `NaN` values at the
132
- corresponding time points.
133
-
134
- Alternatively, we can create a separate time dimensions for each `output` block
135
- (essentially) using `sdf_xarray.open_mfdataset` with `separate_times=True`:
136
-
137
- ```python
138
- from sdf_xarray import open_mfdataset
139
-
140
- with open_mfdataset("*.sdf", separate_times=True) as ds:
141
- print(ds)
142
-
143
- # Dimensions:
144
- # time0: 301, time1: 31, time2: 61, X_Grid_mid: 128, ...
145
- # Coordinates: (12) ...
146
- # Data variables: (18) ...
147
- # Indexes: (9) ...
148
- # Attributes: (22) ...
149
- ```
150
-
151
- This is better for memory consumption, at the cost of perhaps slightly less
152
- friendly comparisons between variables on different time coordinates.
153
-
154
- ### Reading particle data
155
-
156
- By default, particle data isn't kept as it takes up a lot of space. Pass
157
- `keep_particles=True` as a keyword argument to `open_dataset` (for single files)
158
- or `open_mfdataset` (for multiple files):
159
-
160
- ```python
161
- df = xr.open_dataset("0010.sdf", keep_particles=True)
162
- ```
163
-
164
- ### Loading SDF files directly
165
-
166
- For debugging, sometimes it's useful to see the raw SDF files:
167
-
168
- ```python
169
- from sdf_xarray import SDFFile
170
-
171
- with SDFFile("0010.sdf") as sdf_file:
172
- print(sdf_file.variables["Electric Field/Ex"])
173
-
174
- # Variable(_id='ex', name='Electric Field/Ex', dtype=dtype('float64'), ...
175
-
176
- print(sdf_file.variables["Electric Field/Ex"].data)
177
-
178
- # [ 0.00000000e+00 0.00000000e+00 0.00000000e+00 ... -4.44992788e+12 1.91704994e+13 0.00000000e+00]
179
- ```