sdf-xarray 0.2.5__tar.gz → 0.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/.github/workflows/build_publish.yml +1 -1
  2. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/CONTRIBUTING.md +7 -1
  3. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/PKG-INFO +11 -35
  4. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/README.md +3 -0
  5. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/conf.py +2 -2
  6. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/getting_started.rst +28 -9
  7. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/index.rst +1 -0
  8. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/key_functionality.rst +33 -7
  9. sdf_xarray-0.3.0/docs/known_issues.rst +9 -0
  10. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/unit_conversion.rst +2 -3
  11. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/pyproject.toml +7 -10
  12. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/src/sdf_xarray/__init__.py +85 -16
  13. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/src/sdf_xarray/_version.py +16 -3
  14. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/src/sdf_xarray/plotting.py +2 -2
  15. sdf_xarray-0.3.0/tests/example_two_probes_2D/0000.sdf +0 -0
  16. sdf_xarray-0.3.0/tests/example_two_probes_2D/0001.sdf +0 -0
  17. sdf_xarray-0.3.0/tests/example_two_probes_2D/0002.sdf +0 -0
  18. sdf_xarray-0.3.0/tests/example_two_probes_2D/input.deck +188 -0
  19. sdf_xarray-0.3.0/tests/test_basic.py +456 -0
  20. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/test_epoch_accessor.py +69 -19
  21. sdf_xarray-0.3.0/uv.lock +2605 -0
  22. sdf_xarray-0.2.5/tests/test_basic.py +0 -228
  23. sdf_xarray-0.2.5/uv.lock +0 -2164
  24. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/.github/workflows/black.yml +0 -0
  25. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/.github/workflows/lint.yml +0 -0
  26. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/.github/workflows/tests.yml +0 -0
  27. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/.gitignore +0 -0
  28. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/.gitmodules +0 -0
  29. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/.readthedocs.yaml +0 -0
  30. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/BEAM.png +0 -0
  31. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/CITATION.cff +0 -0
  32. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/CMakeLists.txt +0 -0
  33. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/LICENCE +0 -0
  34. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/PlasmaFAIR.svg +0 -0
  35. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/.gitignore +0 -0
  36. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/_templates/custom-class-template.rst +0 -0
  37. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/_templates/custom-module-template.rst +0 -0
  38. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/api.rst +0 -0
  39. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/contributing.rst +0 -0
  40. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/make.bat +0 -0
  41. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0000.sdf +0 -0
  42. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0001.sdf +0 -0
  43. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0002.sdf +0 -0
  44. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0003.sdf +0 -0
  45. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0004.sdf +0 -0
  46. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0005.sdf +0 -0
  47. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0006.sdf +0 -0
  48. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0007.sdf +0 -0
  49. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0008.sdf +0 -0
  50. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0009.sdf +0 -0
  51. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0010.sdf +0 -0
  52. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0011.sdf +0 -0
  53. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0012.sdf +0 -0
  54. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0013.sdf +0 -0
  55. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0014.sdf +0 -0
  56. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0015.sdf +0 -0
  57. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0016.sdf +0 -0
  58. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0017.sdf +0 -0
  59. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0018.sdf +0 -0
  60. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0019.sdf +0 -0
  61. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0020.sdf +0 -0
  62. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0021.sdf +0 -0
  63. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0022.sdf +0 -0
  64. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0023.sdf +0 -0
  65. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0024.sdf +0 -0
  66. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0025.sdf +0 -0
  67. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0026.sdf +0 -0
  68. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0027.sdf +0 -0
  69. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0028.sdf +0 -0
  70. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0029.sdf +0 -0
  71. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0030.sdf +0 -0
  72. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0031.sdf +0 -0
  73. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0032.sdf +0 -0
  74. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0033.sdf +0 -0
  75. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0034.sdf +0 -0
  76. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0035.sdf +0 -0
  77. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0036.sdf +0 -0
  78. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0037.sdf +0 -0
  79. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0038.sdf +0 -0
  80. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0039.sdf +0 -0
  81. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/0040.sdf +0 -0
  82. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/deck.status +0 -0
  83. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/epoch1d.dat +0 -0
  84. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/input.deck +0 -0
  85. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/normal.visit +0 -0
  86. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/docs/tutorial_dataset_1d/restart.visit +0 -0
  87. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/src/sdf_xarray/csdf.pxd +0 -0
  88. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/src/sdf_xarray/sdf_interface.pyx +0 -0
  89. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_array_no_grids/0000.sdf +0 -0
  90. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_array_no_grids/0001.sdf +0 -0
  91. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_array_no_grids/README.md +0 -0
  92. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_array_no_grids/input.deck +0 -0
  93. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_dist_fn/0000.sdf +0 -0
  94. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_dist_fn/0001.sdf +0 -0
  95. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_dist_fn/0002.sdf +0 -0
  96. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_dist_fn/input.deck +0 -0
  97. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_1D/0000.sdf +0 -0
  98. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_1D/0001.sdf +0 -0
  99. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_1D/0002.sdf +0 -0
  100. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_1D/0003.sdf +0 -0
  101. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_1D/0004.sdf +0 -0
  102. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_1D/0005.sdf +0 -0
  103. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_1D/0006.sdf +0 -0
  104. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_1D/0007.sdf +0 -0
  105. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_1D/0008.sdf +0 -0
  106. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_1D/0009.sdf +0 -0
  107. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_1D/0010.sdf +0 -0
  108. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_1D/README.md +0 -0
  109. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_1D/input.deck +0 -0
  110. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_2D_moving_window/0000.sdf +0 -0
  111. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_2D_moving_window/0001.sdf +0 -0
  112. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_2D_moving_window/0002.sdf +0 -0
  113. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_2D_moving_window/0003.sdf +0 -0
  114. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_2D_moving_window/0004.sdf +0 -0
  115. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_files_2D_moving_window/input.deck +0 -0
  116. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_mismatched_files/0000.sdf +0 -0
  117. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_mismatched_files/0001.sdf +0 -0
  118. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/example_mismatched_files/0002.sdf +0 -0
  119. {sdf_xarray-0.2.5 → sdf_xarray-0.3.0}/tests/test_cython.py +0 -0
@@ -32,7 +32,7 @@ jobs:
32
32
  uv sync --python 3.12 --extra test --extra build --frozen
33
33
 
34
34
  - name: Build
35
- uses: pypa/cibuildwheel@v2.21.3
35
+ uses: pypa/cibuildwheel@v3.1.3
36
36
  env:
37
37
  CIBW_ARCHS_LINUX: auto
38
38
  CIBW_ARCHS_MACOS: x86_64 arm64
@@ -33,7 +33,7 @@ To run these tools locally, install the optional dependencies and run:
33
33
 
34
34
  ```bash
35
35
  pip install "sdf-xarray[lint]"
36
- ruff check
36
+ ruff check src tests
37
37
  ```
38
38
 
39
39
  ### Running and Adding Tests
@@ -68,6 +68,12 @@ pip install "sdf-xarray[docs]"
68
68
  cd docs
69
69
  make html
70
70
  ```
71
+
72
+ The documentation can be updated by changing any of the `*.rst` files located
73
+ in the main `docs` directory. The existing documentation hopefully includes most
74
+ of the snippets you'd need to write or update it, however if you are stuck
75
+ please don't hesitate to reach out.
76
+
71
77
  Every time you make changes to the documentation or add a new page, you must
72
78
  re-run the `make html` command to regenerate the HTML files.
73
79
 
@@ -1,47 +1,20 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: sdf-xarray
3
- Version: 0.2.5
3
+ Version: 0.3.0
4
4
  Summary: Provides a backend for xarray to read SDF files as created by the EPOCH plasma PIC code.
5
5
  Author-Email: Peter Hill <peter.hill@york.ac.uk>, Joel Adams <joel.adams@york.ac.uk>, Shaun Doherty <shaun.doherty@york.ac.uk>
6
- License: Copyright 2024, Peter Hill, Joel Adams, epochpic team
7
-
8
- Redistribution and use in source and binary forms, with or without
9
- modification, are permitted provided that the following conditions are
10
- met:
11
-
12
- 1. Redistributions of source code must retain the above copyright
13
- notice, this list of conditions and the following disclaimer.
14
-
15
- 2. Redistributions in binary form must reproduce the above copyright
16
- notice, this list of conditions and the following disclaimer in the
17
- documentation and/or other materials provided with the distribution.
18
-
19
- 3. Neither the name of the copyright holder nor the names of its
20
- contributors may be used to endorse or promote products derived from
21
- this software without specific prior written permission.
22
-
23
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
24
- “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
25
- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
26
- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
27
- HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
28
- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
29
- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
30
- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
31
- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
32
- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
33
- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
34
-
6
+ License-Expression: BSD-3-Clause
7
+ Classifier: Development Status :: 5 - Production/Stable
8
+ Classifier: Intended Audience :: Science/Research
9
+ Classifier: Topic :: Scientific/Engineering
10
+ Classifier: Operating System :: OS Independent
35
11
  Classifier: Programming Language :: Python
36
12
  Classifier: Programming Language :: Python :: 3
37
13
  Classifier: Programming Language :: Python :: 3.10
38
14
  Classifier: Programming Language :: Python :: 3.11
39
15
  Classifier: Programming Language :: Python :: 3.12
40
16
  Classifier: Programming Language :: Python :: 3.13
41
- Classifier: Intended Audience :: Science/Research
42
- Classifier: Topic :: Scientific/Engineering
43
- Classifier: Operating System :: OS Independent
44
- Requires-Python: >=3.10
17
+ Requires-Python: <3.14,>=3.10
45
18
  Requires-Dist: numpy>=2.0.0
46
19
  Requires-Dist: xarray>=2024.1.0
47
20
  Requires-Dist: dask>=2024.7.1
@@ -88,6 +61,9 @@ sdf-xarray provides a backend for [xarray](https://xarray.dev) to read SDF files
88
61
  [EPOCH](https://epochpic.github.io) using the [SDF-C](https://github.com/epochpic/SDF_C) library.
89
62
  Part of [BEAM](#broad-epoch-analysis-modules-beam) (Broad EPOCH Analysis Modules).
90
63
 
64
+ > [!IMPORTANT]
65
+ > To install this package make sure you are using one of the Python versions listed above.
66
+
91
67
  ## Installation
92
68
 
93
69
  Install from PyPI with:
@@ -13,6 +13,9 @@ sdf-xarray provides a backend for [xarray](https://xarray.dev) to read SDF files
13
13
  [EPOCH](https://epochpic.github.io) using the [SDF-C](https://github.com/epochpic/SDF_C) library.
14
14
  Part of [BEAM](#broad-epoch-analysis-modules-beam) (Broad EPOCH Analysis Modules).
15
15
 
16
+ > [!IMPORTANT]
17
+ > To install this package make sure you are using one of the Python versions listed above.
18
+
16
19
  ## Installation
17
20
 
18
21
  Install from PyPI with:
@@ -8,9 +8,9 @@ from importlib.metadata import version as get_version
8
8
  from pathlib import Path
9
9
 
10
10
  with suppress(ImportError):
11
- import matplotlib
11
+ import matplotlib as mpl
12
12
 
13
- matplotlib.use("Agg")
13
+ mpl.use("Agg")
14
14
  # -- Project information -----------------------------------------------------
15
15
  # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
16
16
 
@@ -7,6 +7,15 @@
7
7
  Installation
8
8
  ------------
9
9
 
10
+ .. |python_versions_pypi| image:: https://img.shields.io/pypi/pyversions/sdf-xarray.svg
11
+ :alt: Supported Python versions
12
+ :target: https://pypi.org/project/sdf-xarray/
13
+
14
+ .. important::
15
+
16
+ To install this package, ensure that you are using one of the supported Python
17
+ versions: |python_versions_pypi|
18
+
10
19
  Install sdf-xarray from PyPI with:
11
20
 
12
21
  .. code-block:: bash
@@ -28,7 +37,7 @@ Usage
28
37
  `xarray`. There are several ways to load SDF files:
29
38
 
30
39
  - To load a single file, use :func:`xarray.open_dataset`.
31
- - To load multiple files, use :func:`xarray.open_mfdataset` or :func:`sdf_xarray.open_mfdataset`.
40
+ - To load multiple files, use :func:`xarray.open_mfdataset` or :func:`sdf_xarray.open_mfdataset` (Recommended).
32
41
  - To access the raw contents of a single SDF file, use :func:`sdf_xarray.sdf_interface.SDFFile`.
33
42
 
34
43
  .. note::
@@ -42,21 +51,32 @@ Basic usage:
42
51
  .. ipython:: python
43
52
 
44
53
  import xarray as xr
54
+ import sdf_xarray as sdfxr
45
55
  with xr.open_dataset("tutorial_dataset_1d/0010.sdf") as df:
46
56
  print(df["Electric_Field_Ex"])
47
57
 
48
58
  Multi file loading
49
59
  ~~~~~~~~~~~~~~~~~~
50
60
 
51
- To open a whole simulation at once, pass
52
- ``preprocess=sdf_xarray.SDFPreprocess()`` to `xarray.open_mfdataset`:
61
+ To open a whole simulation's files at once use the :func:`sdf_xarray.open_mfdataset` function:
62
+
63
+ .. ipython:: python
64
+
65
+ sdfxr.open_mfdataset("tutorial_dataset_1d/*.sdf")
66
+
67
+ You can alternatively open the dataset using the xarray's :func:`xarray.open_mfdataset`
68
+ along with the ``preprocess=sdfxr.SDFPreprocess()``:
53
69
 
54
70
  .. ipython:: python
55
71
 
56
- from sdf_xarray import SDFPreprocess
57
- xr.open_mfdataset("tutorial_dataset_1d/*.sdf", preprocess=SDFPreprocess())
72
+ xr.open_mfdataset(
73
+ "tutorial_dataset_1d/*.sdf",
74
+ join="outer",
75
+ compat="no_conflicts",
76
+ preprocess=sdfxr.SDFPreprocess()
77
+ )
58
78
 
59
- `SDFPreprocess` checks that all the files are from the same simulation, and
79
+ :class:`sdf_xarray.SDFPreprocess` checks that all the files are from the same simulation, and
60
80
  ensures there's a ``time`` dimension so the files are correctly concatenated.
61
81
 
62
82
  If your simulation has multiple ``output`` blocks so that not all variables are
@@ -64,12 +84,11 @@ output at every time step, then those variables will have ``NaN`` values at the
64
84
  corresponding time points.
65
85
 
66
86
  Alternatively, we can create a separate time dimensions for each ``output``
67
- block using `sdf_xarray.open_mfdataset` with ``separate_times=True``:
87
+ block using :func:`sdf_xarray.open_mfdataset` with ``separate_times=True``:
68
88
 
69
89
  .. ipython:: python
70
90
 
71
- from sdf_xarray import open_mfdataset
72
- open_mfdataset("tutorial_dataset_1d/*.sdf", separate_times=True)
91
+ sdfxr.open_mfdataset("tutorial_dataset_1d/*.sdf", separate_times=True)
73
92
 
74
93
  This is better for memory consumption, at the cost of perhaps slightly less
75
94
  friendly comparisons between variables on different time coordinates.
@@ -15,6 +15,7 @@ plasma PIC code.
15
15
  Getting Started <getting_started>
16
16
  Key Functionality <key_functionality>
17
17
  Unit Conversion <unit_conversion>
18
+ Known Issues <known_issues>
18
19
  Contributing <contributing>
19
20
 
20
21
  .. toctree::
@@ -6,17 +6,17 @@ Key Functionality
6
6
 
7
7
  .. ipython:: python
8
8
 
9
+ import xarray as xr
10
+ import sdf_xarray as sdfxr
9
11
  import matplotlib.pyplot as plt
10
12
  from IPython.display import display, HTML
11
- import xarray as xr
12
- from sdf_xarray import SDFFile, SDFPreprocess
13
13
 
14
14
  Loading SDF Files
15
15
  -----------------
16
16
  There are several ways to load SDF files:
17
17
 
18
18
  - To load a single file, use :func:`xarray.open_dataset`.
19
- - To load multiple files, use :func:`xarray.open_mfdataset` or :func:`sdf_xarray.open_mfdataset`.
19
+ - To load multiple files, use :func:`sdf_xarray.open_mfdataset` or :func:`xarray.open_mfdataset`.
20
20
  - To access the raw contents of a single SDF file, use :func:`sdf_xarray.sdf_interface.SDFFile`.
21
21
 
22
22
  .. note::
@@ -34,23 +34,49 @@ Loading a Single Raw SDF File
34
34
 
35
35
  .. ipython:: python
36
36
 
37
- with SDFFile("tutorial_dataset_1d/0010.sdf") as sdf_file:
37
+ with sdfxr.SDFFile("tutorial_dataset_1d/0010.sdf") as sdf_file:
38
38
  print(sdf_file.variables)
39
39
 
40
40
  Loading all SDF Files for a Simulation
41
41
  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
42
42
 
43
- When loading in all the files we have do some processing of the data
43
+ Multiple files can be loaded using one of two methods. The first of which
44
+ is by using the :func:`sdf_xarray.open_mfdataset`
45
+
46
+ .. ipython:: python
47
+
48
+ sdfxr.open_mfdataset("tutorial_dataset_1d/*.sdf")
49
+
50
+ Alternatively files can be loaded using :func:`xarray.open_mfdataset`
51
+ however when loading in all the files we have do some processing of the data
44
52
  so that we can correctly align it along the time dimension; This is
45
53
  done via the ``preprocess`` parameter.
46
54
 
47
55
  .. ipython:: python
48
56
 
49
- xr.open_mfdataset("tutorial_dataset_1d/*.sdf", preprocess=SDFPreprocess())
57
+ xr.open_mfdataset(
58
+ "tutorial_dataset_1d/*.sdf",
59
+ join="outer",
60
+ compat="no_conflicts",
61
+ preprocess=sdfxr.SDFPreprocess())
50
62
 
51
63
  Reading particle data
52
64
  ~~~~~~~~~~~~~~~~~~~~~
53
65
 
66
+ .. warning::
67
+ It is **not recommended** to use :func:`xarray.open_mfdataset` or
68
+ :func:`sdf_xarray.open_mfdataset` to load particle data from multiple
69
+ SDF outputs. The number of particles often varies between outputs,
70
+ which can lead to inconsistent array shapes that these functions
71
+ cannot handle. Instead, consider loading each file individually and
72
+ then concatenating them manually.
73
+
74
+ .. note::
75
+ When loading multiple probes from a single SDF file, you **must** use the
76
+ ``probe_names`` parameter to assign a unique name to each. For example,
77
+ use ``probe_names=["Front_Electron_Probe", "Back_Electron_Probe"]``.
78
+ Failing to do so will result in dimension name conflicts.
79
+
54
80
  By default, particle data isn't kept as it takes up a lot of space.
55
81
  Pass ``keep_particles=True`` as a keyword argument to
56
82
  :func:`xarray.open_dataset` (for single files) or :func:`xarray.open_mfdataset` (for
@@ -79,7 +105,7 @@ looking at when you call ``.values``
79
105
 
80
106
  .. ipython:: python
81
107
 
82
- ds = xr.open_mfdataset("tutorial_dataset_1d/*.sdf", preprocess=SDFPreprocess())
108
+ ds = sdfxr.open_mfdataset("tutorial_dataset_1d/*.sdf")
83
109
 
84
110
  ds["Electric_Field_Ex"]
85
111
 
@@ -0,0 +1,9 @@
1
+ .. _sec-known-issues:
2
+
3
+ ============
4
+ Known Issues
5
+ ============
6
+
7
+ There are a couple of known 'quirks' in sdf-xarray:
8
+
9
+ - `Issue #57 <https://github.com/epochpic/sdf-xarray/issues/57>`_ Loading multiple SDF files with `open_mfdataset` can lead to out-of-memory errors. The issue is believed to stem from how the underlying `xarray` library handles coordinates, causing it to infer an excessively large array shape that requests far more memory than is needed. Due to the significant architectural changes required for a fix, the maintainers do not plan to resolve this. The recommended solution is to load the files individually or in smaller batches.
@@ -38,8 +38,7 @@ import, the ``xarray.Dataset.pint`` accessor will not be initialised.
38
38
 
39
39
  .. ipython:: python
40
40
 
41
- import xarray as xr
42
- from sdf_xarray import SDFPreprocess
41
+ from sdf_xarray import open_mfdataset
43
42
  import pint_xarray
44
43
 
45
44
  In the following example we will extract the time-resolved total particle
@@ -72,7 +71,7 @@ be removed.
72
71
 
73
72
  .. ipython:: python
74
73
 
75
- with xr.open_mfdataset("tutorial_dataset_1d/*.sdf", preprocess=SDFPreprocess()) as ds:
74
+ with open_mfdataset("tutorial_dataset_1d/*.sdf") as ds:
76
75
  total_particle_energy = ds["Total_Particle_Energy_Electron"]
77
76
 
78
77
  total_particle_energy
@@ -10,30 +10,27 @@ build-backend = "scikit_build_core.build"
10
10
  [project]
11
11
  name = "sdf-xarray"
12
12
  dynamic = ["version"]
13
- license = { file = "LICENCE" }
13
+ license = "BSD-3-Clause"
14
14
  readme = "README.md"
15
15
  authors = [
16
16
  { name = "Peter Hill", email = "peter.hill@york.ac.uk" },
17
17
  { name = "Joel Adams", email = "joel.adams@york.ac.uk" },
18
18
  { name = "Shaun Doherty", email = "shaun.doherty@york.ac.uk" },
19
19
  ]
20
- requires-python = ">=3.10"
21
- dependencies = [
22
- "numpy>=2.0.0",
23
- "xarray>=2024.1.0",
24
- "dask>=2024.7.1",
25
- ]
20
+ requires-python = ">=3.10,<3.14"
21
+ dependencies = ["numpy>=2.0.0", "xarray>=2024.1.0", "dask>=2024.7.1"]
26
22
  description = "Provides a backend for xarray to read SDF files as created by the EPOCH plasma PIC code."
27
23
  classifiers = [
24
+ "Development Status :: 5 - Production/Stable",
25
+ "Intended Audience :: Science/Research",
26
+ "Topic :: Scientific/Engineering",
27
+ "Operating System :: OS Independent",
28
28
  "Programming Language :: Python",
29
29
  "Programming Language :: Python :: 3",
30
30
  "Programming Language :: Python :: 3.10",
31
31
  "Programming Language :: Python :: 3.11",
32
32
  "Programming Language :: Python :: 3.12",
33
33
  "Programming Language :: Python :: 3.13",
34
- "Intended Audience :: Science/Research",
35
- "Topic :: Scientific/Engineering",
36
- "Operating System :: OS Independent",
37
34
  ]
38
35
 
39
36
  [project.optional-dependencies]
@@ -2,12 +2,15 @@ import os
2
2
  import re
3
3
  from collections import Counter, defaultdict
4
4
  from collections.abc import Callable, Iterable
5
+ from importlib.metadata import version
5
6
  from itertools import product
7
+ from os import PathLike as os_PathLike
6
8
  from pathlib import Path
7
9
  from typing import ClassVar
8
10
 
9
11
  import numpy as np
10
12
  import xarray as xr
13
+ from packaging.version import Version
11
14
  from xarray.backends import AbstractDataStore, BackendArray, BackendEntrypoint
12
15
  from xarray.backends.file_manager import CachingFileManager
13
16
  from xarray.backends.locks import ensure_lock
@@ -21,6 +24,12 @@ import sdf_xarray.plotting # noqa: F401
21
24
 
22
25
  from .sdf_interface import Constant, SDFFile # type: ignore # noqa: PGH003
23
26
 
27
+ # TODO Remove this once the new kwarg options are fully implemented
28
+ if Version(version("xarray")) >= Version("2025.8.0"):
29
+ xr.set_options(use_new_combine_kwarg_defaults=True)
30
+
31
+ PathLike = str | os_PathLike
32
+
24
33
 
25
34
  def _rename_with_underscore(name: str) -> str:
26
35
  """A lot of the variable names have spaces, forward slashes and dashes in them, which
@@ -51,14 +60,32 @@ def _process_latex_name(variable_name: str) -> str:
51
60
  return variable_name
52
61
 
53
62
 
63
+ def _resolve_glob(path_glob: PathLike | Iterable[PathLike]):
64
+ """
65
+ Normalise input path_glob into a sorted list of absolute, resolved Path objects.
66
+ """
67
+
68
+ try:
69
+ p = Path(path_glob)
70
+ paths = list(p.parent.glob(p.name)) if p.name == "*.sdf" else list(p)
71
+ except TypeError:
72
+ paths = list({Path(p) for p in path_glob})
73
+
74
+ paths = sorted(p.resolve() for p in paths)
75
+ if not paths:
76
+ raise FileNotFoundError(f"No files matched pattern or input: {path_glob!r}")
77
+ return paths
78
+
79
+
54
80
  def combine_datasets(path_glob: Iterable | str, **kwargs) -> xr.Dataset:
55
81
  """Combine all datasets using a single time dimension"""
56
82
 
57
83
  return xr.open_mfdataset(
58
84
  path_glob,
59
- data_vars="minimal",
60
- coords="minimal",
61
- compat="override",
85
+ data_vars="all",
86
+ coords="different",
87
+ compat="no_conflicts",
88
+ join="outer",
62
89
  preprocess=SDFPreprocess(),
63
90
  **kwargs,
64
91
  )
@@ -69,6 +96,7 @@ def open_mfdataset(
69
96
  *,
70
97
  separate_times: bool = False,
71
98
  keep_particles: bool = False,
99
+ probe_names: list[str] | None = None,
72
100
  ) -> xr.Dataset:
73
101
  """Open a set of EPOCH SDF files as one `xarray.Dataset`
74
102
 
@@ -98,20 +126,21 @@ def open_mfdataset(
98
126
  different output frequencies
99
127
  keep_particles :
100
128
  If ``True``, also load particle data (this may use a lot of memory!)
129
+ probe_names :
130
+ List of EPOCH probe names
101
131
  """
102
132
 
103
- # TODO: This is not very robust, look at how xarray.open_mfdataset does it
104
- if isinstance(path_glob, str):
105
- path_glob = Path().glob(path_glob)
106
-
107
- # Coerce to list because we might need to use the sequence multiple times
108
- path_glob = sorted(list(path_glob)) # noqa: C414
109
-
133
+ path_glob = _resolve_glob(path_glob)
110
134
  if not separate_times:
111
- return combine_datasets(path_glob, keep_particles=keep_particles)
135
+ return combine_datasets(
136
+ path_glob, keep_particles=keep_particles, probe_names=probe_names
137
+ )
112
138
 
113
- time_dims, var_times_map = make_time_dims(path_glob)
114
- all_dfs = [xr.open_dataset(f, keep_particles=keep_particles) for f in path_glob]
139
+ _, var_times_map = make_time_dims(path_glob)
140
+ all_dfs = [
141
+ xr.open_dataset(f, keep_particles=keep_particles, probe_names=probe_names)
142
+ for f in path_glob
143
+ ]
115
144
 
116
145
  for df in all_dfs:
117
146
  for da in df:
@@ -128,7 +157,12 @@ def open_mfdataset(
128
157
  )
129
158
 
130
159
  return xr.combine_by_coords(
131
- all_dfs, data_vars="minimal", combine_attrs="drop_conflicts"
160
+ all_dfs,
161
+ data_vars="all",
162
+ coords="different",
163
+ combine_attrs="drop_conflicts",
164
+ join="outer",
165
+ compat="no_conflicts",
132
166
  )
133
167
 
134
168
 
@@ -211,14 +245,23 @@ class SDFDataStore(AbstractDataStore):
211
245
  "drop_variables",
212
246
  "keep_particles",
213
247
  "lock",
248
+ "probe_names",
214
249
  )
215
250
 
216
- def __init__(self, manager, drop_variables=None, keep_particles=False, lock=None):
251
+ def __init__(
252
+ self,
253
+ manager,
254
+ drop_variables=None,
255
+ keep_particles=False,
256
+ lock=None,
257
+ probe_names=None,
258
+ ):
217
259
  self._manager = manager
218
260
  self._filename = self.ds.filename
219
261
  self.drop_variables = drop_variables
220
262
  self.keep_particles = keep_particles
221
263
  self.lock = ensure_lock(lock)
264
+ self.probe_names = probe_names
222
265
 
223
266
  @classmethod
224
267
  def open(
@@ -227,6 +270,7 @@ class SDFDataStore(AbstractDataStore):
227
270
  lock=None,
228
271
  drop_variables=None,
229
272
  keep_particles=False,
273
+ probe_names=None,
230
274
  ):
231
275
  if isinstance(filename, os.PathLike):
232
276
  filename = os.fspath(filename)
@@ -237,6 +281,7 @@ class SDFDataStore(AbstractDataStore):
237
281
  lock=lock,
238
282
  drop_variables=drop_variables,
239
283
  keep_particles=keep_particles,
284
+ probe_names=probe_names,
240
285
  )
241
286
 
242
287
  def _acquire(self, needs_lock=True):
@@ -347,7 +392,28 @@ class SDFDataStore(AbstractDataStore):
347
392
 
348
393
  if value.is_point_data:
349
394
  # Point (particle) variables are 1D
350
- var_coords = (f"ID_{_process_grid_name(key, _grid_species_name)}",)
395
+
396
+ # Particle data does not maintain a fixed dimension size
397
+ # throughout the simulation. An example of a particle name comes
398
+ # in the form of `Particles/Px/Ion_H` which is then modified
399
+ # using `_process_grid_name()` into `Ion_H`. This is fine as the
400
+ # other components of the momentum (`Py`, `Pz`) will have the same
401
+ # size as they represent the same bunch of particles.
402
+
403
+ # Probes however have names in the form of `Electron_Front_Probe/Px`
404
+ # which are changed to just `Px`; this is fine when there is only one
405
+ # probe in the system but when there are multiple they will have
406
+ # conflicting sizes so we can't keep the names as simply `Px` so we
407
+ # instead set their dimension as the full name `Electron_Front_Probe_Px`.
408
+ is_probe_name_match = self.probe_names is not None and any(
409
+ name in key for name in self.probe_names
410
+ )
411
+ name_processor = (
412
+ _rename_with_underscore
413
+ if is_probe_name_match
414
+ else _grid_species_name
415
+ )
416
+ var_coords = (f"ID_{_process_grid_name(key, name_processor)}",)
351
417
  else:
352
418
  # These are DataArrays
353
419
 
@@ -414,6 +480,7 @@ class SDFEntrypoint(BackendEntrypoint):
414
480
  *,
415
481
  drop_variables=None,
416
482
  keep_particles=False,
483
+ probe_names=None,
417
484
  ):
418
485
  if isinstance(filename_or_obj, Path):
419
486
  # sdf library takes a filename only
@@ -424,6 +491,7 @@ class SDFEntrypoint(BackendEntrypoint):
424
491
  filename_or_obj,
425
492
  drop_variables=drop_variables,
426
493
  keep_particles=keep_particles,
494
+ probe_names=probe_names,
427
495
  )
428
496
  with close_on_error(store):
429
497
  return store.load()
@@ -432,6 +500,7 @@ class SDFEntrypoint(BackendEntrypoint):
432
500
  "filename_or_obj",
433
501
  "drop_variables",
434
502
  "keep_particles",
503
+ "probe_names",
435
504
  ]
436
505
 
437
506
  def guess_can_open(self, filename_or_obj):
@@ -1,7 +1,14 @@
1
1
  # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
3
 
4
- __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
5
12
 
6
13
  TYPE_CHECKING = False
7
14
  if TYPE_CHECKING:
@@ -9,13 +16,19 @@ if TYPE_CHECKING:
9
16
  from typing import Union
10
17
 
11
18
  VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
12
20
  else:
13
21
  VERSION_TUPLE = object
22
+ COMMIT_ID = object
14
23
 
15
24
  version: str
16
25
  __version__: str
17
26
  __version_tuple__: VERSION_TUPLE
18
27
  version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
19
30
 
20
- __version__ = version = '0.2.5'
21
- __version_tuple__ = version_tuple = (0, 2, 5)
31
+ __version__ = version = '0.3.0'
32
+ __version_tuple__ = version_tuple = (0, 3, 0)
33
+
34
+ __commit_id__ = commit_id = 'gcca942b3f'
@@ -114,8 +114,8 @@ def animate(
114
114
  --------
115
115
  >>> dataset["Derived_Number_Density_Electron"].epoch.animate()
116
116
  """
117
- import matplotlib.pyplot as plt
118
- from matplotlib.animation import FuncAnimation
117
+ import matplotlib.pyplot as plt # noqa: PLC0415
118
+ from matplotlib.animation import FuncAnimation # noqa: PLC0415
119
119
 
120
120
  kwargs_original = kwargs.copy()
121
121