sdf-xarray 0.3.2__tar.gz → 0.5.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (138) hide show
  1. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/.github/workflows/build_publish.yml +8 -1
  2. sdf_xarray-0.5.0/.github/workflows/lint.yml +30 -0
  3. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/.github/workflows/tests.yml +9 -3
  4. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/.gitignore +51 -0
  5. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/.readthedocs.yaml +10 -8
  6. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/CITATION.cff +4 -0
  7. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/CONTRIBUTING.md +56 -19
  8. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/PKG-INFO +26 -52
  9. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/README.md +22 -28
  10. sdf_xarray-0.5.0/docs/_static/force_render_dark_xarray_objects.css +7 -0
  11. sdf_xarray-0.5.0/docs/animation.rst +261 -0
  12. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/docs/conf.py +36 -12
  13. sdf_xarray-0.5.0/docs/getting_started.rst +106 -0
  14. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/docs/index.rst +1 -0
  15. sdf_xarray-0.5.0/docs/key_functionality.rst +312 -0
  16. sdf_xarray-0.5.0/docs/known_issues.rst +9 -0
  17. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/docs/unit_conversion.rst +67 -77
  18. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/pyproject.toml +31 -9
  19. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/src/sdf_xarray/__init__.py +274 -13
  20. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/src/sdf_xarray/_version.py +3 -3
  21. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/src/sdf_xarray/dataset_accessor.py +53 -2
  22. sdf_xarray-0.5.0/src/sdf_xarray/download.py +88 -0
  23. sdf_xarray-0.5.0/src/sdf_xarray/plotting.py +567 -0
  24. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/src/sdf_xarray/sdf_interface.pyx +4 -2
  25. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/tests/test_cython.py +14 -16
  26. sdf_xarray-0.3.2/tests/test_basic.py → sdf_xarray-0.5.0/tests/test_dataset.py +59 -56
  27. sdf_xarray-0.5.0/tests/test_datatree.py +313 -0
  28. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/tests/test_epoch_dataarray_accessor.py +29 -23
  29. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/tests/test_epoch_dataset_accessor.py +107 -9
  30. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/uv.lock +829 -673
  31. sdf_xarray-0.3.2/.github/workflows/black.yml +0 -40
  32. sdf_xarray-0.3.2/.github/workflows/lint.yml +0 -17
  33. sdf_xarray-0.3.2/docs/getting_started.rst +0 -116
  34. sdf_xarray-0.3.2/docs/key_functionality.rst +0 -181
  35. sdf_xarray-0.3.2/docs/known_issues.rst +0 -9
  36. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0000.sdf +0 -0
  37. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0001.sdf +0 -0
  38. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0002.sdf +0 -0
  39. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0003.sdf +0 -0
  40. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0004.sdf +0 -0
  41. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0005.sdf +0 -0
  42. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0006.sdf +0 -0
  43. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0007.sdf +0 -0
  44. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0008.sdf +0 -0
  45. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0009.sdf +0 -0
  46. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0010.sdf +0 -0
  47. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0011.sdf +0 -0
  48. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0012.sdf +0 -0
  49. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0013.sdf +0 -0
  50. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0014.sdf +0 -0
  51. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0015.sdf +0 -0
  52. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0016.sdf +0 -0
  53. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0017.sdf +0 -0
  54. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0018.sdf +0 -0
  55. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0019.sdf +0 -0
  56. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0020.sdf +0 -0
  57. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0021.sdf +0 -0
  58. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0022.sdf +0 -0
  59. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0023.sdf +0 -0
  60. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0024.sdf +0 -0
  61. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0025.sdf +0 -0
  62. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0026.sdf +0 -0
  63. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0027.sdf +0 -0
  64. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0028.sdf +0 -0
  65. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0029.sdf +0 -0
  66. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0030.sdf +0 -0
  67. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0031.sdf +0 -0
  68. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0032.sdf +0 -0
  69. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0033.sdf +0 -0
  70. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0034.sdf +0 -0
  71. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0035.sdf +0 -0
  72. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0036.sdf +0 -0
  73. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0037.sdf +0 -0
  74. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0038.sdf +0 -0
  75. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0039.sdf +0 -0
  76. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/0040.sdf +0 -0
  77. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/deck.status +0 -343
  78. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/epoch1d.dat +0 -45
  79. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/input.deck +0 -142
  80. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/normal.visit +0 -41
  81. sdf_xarray-0.3.2/docs/tutorial_dataset_1d/restart.visit +0 -1
  82. sdf_xarray-0.3.2/docs/tutorial_dataset_2d/0000.sdf +0 -0
  83. sdf_xarray-0.3.2/docs/tutorial_dataset_2d/0001.sdf +0 -0
  84. sdf_xarray-0.3.2/docs/tutorial_dataset_2d/0002.sdf +0 -0
  85. sdf_xarray-0.3.2/docs/tutorial_dataset_2d/0003.sdf +0 -0
  86. sdf_xarray-0.3.2/docs/tutorial_dataset_2d/0004.sdf +0 -0
  87. sdf_xarray-0.3.2/docs/tutorial_dataset_2d/0005.sdf +0 -0
  88. sdf_xarray-0.3.2/docs/tutorial_dataset_2d/input.deck +0 -65
  89. sdf_xarray-0.3.2/src/sdf_xarray/plotting.py +0 -205
  90. sdf_xarray-0.3.2/tests/example_array_no_grids/0000.sdf +0 -0
  91. sdf_xarray-0.3.2/tests/example_array_no_grids/0001.sdf +0 -0
  92. sdf_xarray-0.3.2/tests/example_array_no_grids/README.md +0 -9
  93. sdf_xarray-0.3.2/tests/example_array_no_grids/input.deck +0 -119
  94. sdf_xarray-0.3.2/tests/example_dist_fn/0000.sdf +0 -0
  95. sdf_xarray-0.3.2/tests/example_dist_fn/0001.sdf +0 -0
  96. sdf_xarray-0.3.2/tests/example_dist_fn/0002.sdf +0 -0
  97. sdf_xarray-0.3.2/tests/example_dist_fn/input.deck +0 -154
  98. sdf_xarray-0.3.2/tests/example_files_1D/0000.sdf +0 -0
  99. sdf_xarray-0.3.2/tests/example_files_1D/0001.sdf +0 -0
  100. sdf_xarray-0.3.2/tests/example_files_1D/0002.sdf +0 -0
  101. sdf_xarray-0.3.2/tests/example_files_1D/0003.sdf +0 -0
  102. sdf_xarray-0.3.2/tests/example_files_1D/0004.sdf +0 -0
  103. sdf_xarray-0.3.2/tests/example_files_1D/0005.sdf +0 -0
  104. sdf_xarray-0.3.2/tests/example_files_1D/0006.sdf +0 -0
  105. sdf_xarray-0.3.2/tests/example_files_1D/0007.sdf +0 -0
  106. sdf_xarray-0.3.2/tests/example_files_1D/0008.sdf +0 -0
  107. sdf_xarray-0.3.2/tests/example_files_1D/0009.sdf +0 -0
  108. sdf_xarray-0.3.2/tests/example_files_1D/0010.sdf +0 -0
  109. sdf_xarray-0.3.2/tests/example_files_1D/README.md +0 -9
  110. sdf_xarray-0.3.2/tests/example_files_1D/input.deck +0 -161
  111. sdf_xarray-0.3.2/tests/example_files_2D_moving_window/0000.sdf +0 -0
  112. sdf_xarray-0.3.2/tests/example_files_2D_moving_window/0001.sdf +0 -0
  113. sdf_xarray-0.3.2/tests/example_files_2D_moving_window/0002.sdf +0 -0
  114. sdf_xarray-0.3.2/tests/example_files_2D_moving_window/0003.sdf +0 -0
  115. sdf_xarray-0.3.2/tests/example_files_2D_moving_window/0004.sdf +0 -0
  116. sdf_xarray-0.3.2/tests/example_files_2D_moving_window/input.deck +0 -63
  117. sdf_xarray-0.3.2/tests/example_files_3D/0000.sdf +0 -0
  118. sdf_xarray-0.3.2/tests/example_files_3D/0001.sdf +0 -0
  119. sdf_xarray-0.3.2/tests/example_files_3D/input.deck +0 -54
  120. sdf_xarray-0.3.2/tests/example_mismatched_files/0000.sdf +0 -0
  121. sdf_xarray-0.3.2/tests/example_mismatched_files/0001.sdf +0 -0
  122. sdf_xarray-0.3.2/tests/example_mismatched_files/0002.sdf +0 -0
  123. sdf_xarray-0.3.2/tests/example_two_probes_2D/0000.sdf +0 -0
  124. sdf_xarray-0.3.2/tests/example_two_probes_2D/0001.sdf +0 -0
  125. sdf_xarray-0.3.2/tests/example_two_probes_2D/0002.sdf +0 -0
  126. sdf_xarray-0.3.2/tests/example_two_probes_2D/input.deck +0 -188
  127. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/.gitmodules +0 -0
  128. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/BEAM.png +0 -0
  129. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/CMakeLists.txt +0 -0
  130. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/LICENCE +0 -0
  131. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/PlasmaFAIR.svg +0 -0
  132. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/docs/.gitignore +0 -0
  133. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/docs/_templates/custom-class-template.rst +0 -0
  134. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/docs/_templates/custom-module-template.rst +0 -0
  135. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/docs/api.rst +0 -0
  136. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/docs/contributing.rst +0 -0
  137. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/docs/make.bat +0 -0
  138. {sdf_xarray-0.3.2 → sdf_xarray-0.5.0}/src/sdf_xarray/csdf.pxd +0 -0
@@ -19,6 +19,13 @@ jobs:
19
19
  fetch-depth: 0
20
20
  submodules: "recursive"
21
21
 
22
+ - name: Cache Zenodo datasets
23
+ uses: actions/cache@v4
24
+ with:
25
+ enableCrossOsArchive: true
26
+ path: ~/.cache/sdf_datasets
27
+ key: sdf-datasets-17991042
28
+
22
29
  - name: Setup uv
23
30
  id: setup-uv
24
31
  uses: astral-sh/setup-uv@v3
@@ -29,7 +36,7 @@ jobs:
29
36
  - name: Set up Python 3.12 and install dependencies
30
37
  run: |
31
38
  uv python install 3.12
32
- uv sync --python 3.12 --extra test --extra build --frozen
39
+ uv sync --python 3.12 --frozen
33
40
 
34
41
  - name: Build
35
42
  uses: pypa/cibuildwheel@v3.1.3
@@ -0,0 +1,30 @@
1
+ name: lint
2
+
3
+ on:
4
+ push:
5
+ paths:
6
+ - '**.py'
7
+ pull_request:
8
+ paths:
9
+ - '**.py'
10
+
11
+ jobs:
12
+ lint:
13
+ runs-on: ubuntu-latest
14
+ steps:
15
+ - uses: actions/checkout@v4
16
+ with:
17
+ fetch-depth: 0
18
+
19
+ # Providing 'args' prevents the action from running 'ruff check .'
20
+ # immediately after installing ruff.
21
+ - name: Install Ruff
22
+ uses: astral-sh/ruff-action@v3
23
+ with:
24
+ args: "--version"
25
+
26
+ - name: Check
27
+ run: ruff check --output-format=github src tests
28
+
29
+ - name: Format
30
+ run: ruff format --check --diff src tests
@@ -8,7 +8,7 @@ jobs:
8
8
  strategy:
9
9
  fail-fast: false
10
10
  matrix:
11
- python-version: ["3.10", "3.11", "3.12", "3.13"]
11
+ python-version: ["3.11", "3.12", "3.13", "3.14"]
12
12
 
13
13
  steps:
14
14
  - uses: actions/checkout@v4
@@ -18,7 +18,7 @@ jobs:
18
18
 
19
19
  - name: Setup uv
20
20
  id: setup-uv
21
- uses: astral-sh/setup-uv@v3
21
+ uses: astral-sh/setup-uv@v7
22
22
  with:
23
23
  enable-cache: true
24
24
 
@@ -28,7 +28,13 @@ jobs:
28
28
  - name: Set up Python ${{ matrix.python-version }} and install dependencies
29
29
  run: |
30
30
  uv python install ${{ matrix.python-version }}
31
- uv sync --python ${{ matrix.python-version }} --extra test --extra build --frozen
31
+ uv sync --python ${{ matrix.python-version }} --frozen
32
+
33
+ - name: Cache Zenodo datasets
34
+ uses: actions/cache@v4
35
+ with:
36
+ path: ~/.cache/sdf_datasets
37
+ key: sdf-datasets-17991042
32
38
 
33
39
  - name: Test with pytest
34
40
  run: uv run pytest
@@ -1,4 +1,6 @@
1
1
  # -*- mode: gitignore; -*-
2
+ # https://github.com/github/gitignore/blob/main/Global/Emacs.gitignore
3
+ # --------------------------------------------------------------------
2
4
  *~
3
5
  \#*\#
4
6
  /.emacs.desktop
@@ -47,6 +49,9 @@ flycheck_*.el
47
49
  # network security
48
50
  /network-security.data
49
51
 
52
+ # https://github.com/github/gitignore/blob/main/Global/Linux.gitignore
53
+ # --------------------------------------------------------------------
54
+
50
55
  *~
51
56
 
52
57
  # temporary files which can be created if a process still has a handle open of a deleted file
@@ -60,6 +65,10 @@ flycheck_*.el
60
65
 
61
66
  # .nfs files are created when an open file is removed but is still being accessed
62
67
  .nfs*
68
+
69
+ # https://github.com/github/gitignore/blob/main/Python.gitignore
70
+ # --------------------------------------------------------------
71
+
63
72
  # Byte-compiled / optimized / DLL files
64
73
  __pycache__/
65
74
  *.py[cod]
@@ -222,6 +231,37 @@ cython_debug/
222
231
  # and can be added to the global gitignore or merged into this file. For a more nuclear
223
232
  # option (not recommended) you can uncomment the following to ignore the entire idea folder.
224
233
  #.idea/
234
+
235
+ # Abstra
236
+ # Abstra is an AI-powered process automation framework.
237
+ # Ignore directories containing user credentials, local state, and settings.
238
+ # Learn more at https://abstra.io/docs
239
+ .abstra/
240
+
241
+ # Visual Studio Code
242
+ # Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
243
+ # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
244
+ # and can be added to the global gitignore or merged into this file. However, if you prefer,
245
+ # you could uncomment the following to ignore the entire vscode folder
246
+ .vscode/
247
+
248
+ # Ruff stuff:
249
+ .ruff_cache/
250
+
251
+ # PyPI configuration file
252
+ .pypirc
253
+
254
+ # Marimo
255
+ marimo/_static/
256
+ marimo/_lsp/
257
+ __marimo__/
258
+
259
+ # Streamlit
260
+ .streamlit/secrets.toml
261
+
262
+ # https://github.com/github/gitignore/blob/main/CMake.gitignore
263
+ # ------------------------------------------------------------
264
+
225
265
  CMakeLists.txt.user
226
266
  CMakeCache.txt
227
267
  CMakeFiles
@@ -233,6 +273,11 @@ install_manifest.txt
233
273
  compile_commands.json
234
274
  CTestTestfile.cmake
235
275
  _deps
276
+ CMakeUserPresets.json
277
+
278
+ # https://github.com/github/gitignore/blob/main/C.gitignore
279
+ # ---------------------------------------------------------
280
+
236
281
  # Prerequisites
237
282
  *.d
238
283
 
@@ -286,5 +331,11 @@ Module.symvers
286
331
  Mkfile.old
287
332
  dkms.conf
288
333
 
334
+ # sdf-xarray specific ignores
335
+ # ---------------------------
336
+
289
337
  # Generated version file
290
338
  src/sdf_xarray/_version.py
339
+
340
+ # Downloaded doc tutorial datasets
341
+ docs/tutorial_*
@@ -16,6 +16,16 @@ build:
16
16
  # rust: "1.55"
17
17
  # golang: "1.17"
18
18
 
19
+ jobs:
20
+ pre_create_environment:
21
+ - asdf plugin add uv
22
+ - asdf install uv latest
23
+ - asdf global uv latest
24
+ create_environment:
25
+ - uv venv "${READTHEDOCS_VIRTUALENV_PATH}"
26
+ install:
27
+ - UV_PROJECT_ENVIRONMENT="${READTHEDOCS_VIRTUALENV_PATH}" uv sync --frozen --group docs
28
+
19
29
  # Build documentation in the docs/ directory with Sphinx
20
30
  sphinx:
21
31
  configuration: docs/conf.py
@@ -23,11 +33,3 @@ sphinx:
23
33
  # Optionally build your docs in additional formats such as PDF
24
34
  # formats:
25
35
  # - pdf
26
-
27
- # Optionally declare the Python requirements required to build your docs
28
- python:
29
- install:
30
- - method: pip
31
- path: .
32
- extra_requirements:
33
- - docs
@@ -20,5 +20,9 @@ authors:
20
20
  given-names: Chris
21
21
  orcid: 'https://orcid.org/0000-0002-5159-0130'
22
22
  affiliation: University of York
23
+ - family-names: Pattinson
24
+ given-names: Liam
25
+ orcid: 'https://orcid.org/0000-0001-8604-6904'
26
+ affiliation: University of York
23
27
  doi: 10.5281/zenodo.15351323
24
28
  date-released: '2024-07-25'
@@ -4,7 +4,7 @@ We welcome contributions to the BEAM ecosystem! Whether it's reporting issues,
4
4
  suggesting features, improving the documentation, or submitting pull requests,
5
5
  your input helps improve these tools for the community.
6
6
 
7
- ## How to Contribute
7
+ ## How to contribute
8
8
 
9
9
  There are many ways to get involved:
10
10
 
@@ -20,35 +20,69 @@ There are many ways to get involved:
20
20
 
21
21
  ## Code
22
22
 
23
+ ```bash
24
+ git clone --recursive https://github.com/epochpic/sdf-xarray.git
25
+ cd sdf-xarray
26
+ pip install .
27
+ ```
28
+
23
29
  ### Style
24
30
 
25
- We follow [PEP 8](https://peps.python.org/pep-0008/) and use the
26
- following tools:
31
+ We use [Ruff](https://docs.astral.sh/ruff/) to maintain code quality and
32
+ formatting. This can be installed locally via the `lint` dependency group:
27
33
 
28
- - [ruff](https://github.com/astral-sh/ruff) for linting
29
- - [black](https://black.readthedocs.io/en/stable/) for formatting
30
- - [isort](https://pycqa.github.io/isort/) for sorting imports
34
+ ```bash
35
+ pip install --group lint
36
+ ```
31
37
 
32
- To run these tools locally, install the optional dependencies and run:
38
+ Ruff can then be run with:
33
39
 
34
40
  ```bash
35
- pip install "sdf-xarray[lint]"
36
41
  ruff check src tests
37
42
  ```
38
43
 
39
- ### Running and Adding Tests
44
+ Alternatively, `uv` users can do this in one step with `uv run`:
45
+
46
+ ```bash
47
+ uv run ruff check src tests
48
+ ```
49
+
50
+ Many of the issues raised by Ruff can be fixed automatically:
51
+
52
+ ```bash
53
+ ruff check --fix src tests
54
+ ```
55
+
56
+ Ruff may also be used to format the code to a style similar to that enforced by
57
+ [Black](https://black.readthedocs.io/en/stable/), which (almost) matches the
58
+ [PEP-8 standard](https://peps.python.org/pep-0008/):
59
+
60
+ ```bash
61
+ ruff format src tests
62
+ ```
63
+
64
+ ### Running and adding tests
40
65
 
41
66
  We use [pytest](https://docs.pytest.org/en/stable/) to run tests.
42
67
  All new functionality should include relevant tests, placed in the `tests/`
43
68
  directory and following the existing structure.
44
69
 
70
+ When running the tests for the first time you will need an internet connection
71
+ in order to download the datasets.
72
+
45
73
  Before submitting code changes, ensure that all tests pass:
46
74
 
47
75
  ```bash
48
- pip install "sdf-xarray[test]"
76
+ pip install --group test
49
77
  pytest
50
78
  ```
51
79
 
80
+ Alternatively, `uv` users can use:
81
+
82
+ ```bash
83
+ uv run pytest
84
+ ```
85
+
52
86
  ## Documentation
53
87
 
54
88
  ### Style
@@ -59,12 +93,15 @@ When contributing to the documentation:
59
93
  - Follow the format of existing `.rst` files.
60
94
  - Link to external functions or tools when possible.
61
95
 
62
- ### Compiling and Adding Documentation
96
+ ### Compiling and adding documentation
97
+
98
+ When compiling the documentation for the first time you will need an internet
99
+ connection in order to download the datasets.
63
100
 
64
101
  To build the documentation locally, first install the required packages:
65
102
 
66
103
  ```bash
67
- pip install "sdf-xarray[docs]"
104
+ pip install --group docs
68
105
  cd docs
69
106
  make html
70
107
  ```
@@ -77,9 +114,9 @@ please don't hesitate to reach out.
77
114
  Every time you make changes to the documentation or add a new page, you must
78
115
  re-run the `make html` command to regenerate the HTML files.
79
116
 
80
- ### Previewing Documentation
117
+ ### Previewing documentation
81
118
 
82
- #### Using VS Code Extensions
119
+ #### Using VS Code extensions
83
120
 
84
121
  Once the html web pages have been made you can review them installing the
85
122
  [Live Server](https://marketplace.visualstudio.com/items/?itemName=ritwickdey.LiveServer)
@@ -87,22 +124,22 @@ VS Code extension. Navigate to the `_build/html` folder, right-click the
87
124
  `index.html`, and select **"Open with Live Server"**. This
88
125
  will open a live preview of the documentation in your web browser.
89
126
 
90
- #### Using a Simple Python Server
127
+ #### Using a simple python server
91
128
 
92
129
  Alternatively, if you're not using VS Code, you can start a simple local server with Python:
93
130
 
94
131
  ```bash
95
- python -m http.server -d _build/htm
132
+ python -m http.server -d _build/html
96
133
  ```
97
134
 
98
135
  Then open http://localhost:8000 in your browser to view the documentation.
99
136
 
100
- ## Continuous Integration
137
+ ## Continuous integration
101
138
 
102
139
  All pull requests are automatically checked using GitHub Actions for:
103
140
 
104
- - Linting (`ruff`)
105
- - Formatting (`black` and `isort`)
141
+ - Linting and formatting (`ruff`)
106
142
  - Testing (`pytest`)
143
+ - Cross-platform building (`cibuildwheel`)
107
144
 
108
145
  These checks must pass before a pull request can be merged.
@@ -1,8 +1,8 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sdf-xarray
3
- Version: 0.3.2
3
+ Version: 0.5.0
4
4
  Summary: Provides a backend for xarray to read SDF files as created by the EPOCH plasma PIC code.
5
- Author-Email: Peter Hill <peter.hill@york.ac.uk>, Joel Adams <joel.adams@york.ac.uk>, Shaun Doherty <shaun.doherty@york.ac.uk>, Chris Herdman <chris.herdman@york.ac.uk>
5
+ Author-Email: Peter Hill <peter.hill@york.ac.uk>, Joel Adams <joel.adams@york.ac.uk>, Shaun Doherty <shaun.doherty@york.ac.uk>, Chris Herdman <chris.herdman@york.ac.uk>, Liam Pattinson <liam.pattinson@york.ac.uk>
6
6
  License-Expression: BSD-3-Clause
7
7
  Classifier: Development Status :: 5 - Production/Stable
8
8
  Classifier: Intended Audience :: Science/Research
@@ -10,34 +10,14 @@ Classifier: Topic :: Scientific/Engineering
10
10
  Classifier: Operating System :: OS Independent
11
11
  Classifier: Programming Language :: Python
12
12
  Classifier: Programming Language :: Python :: 3
13
- Classifier: Programming Language :: Python :: 3.10
14
13
  Classifier: Programming Language :: Python :: 3.11
15
14
  Classifier: Programming Language :: Python :: 3.12
16
15
  Classifier: Programming Language :: Python :: 3.13
17
- Requires-Python: <3.14,>=3.10
16
+ Classifier: Programming Language :: Python :: 3.14
17
+ Requires-Python: <3.15,>=3.11
18
18
  Requires-Dist: numpy>=2.0.0
19
19
  Requires-Dist: xarray>=2024.1.0
20
20
  Requires-Dist: dask>=2024.7.1
21
- Provides-Extra: docs
22
- Requires-Dist: sphinx>=5.3; extra == "docs"
23
- Requires-Dist: sphinx_autodoc_typehints>=1.19; extra == "docs"
24
- Requires-Dist: sphinx-book-theme>=0.4.0rc1; extra == "docs"
25
- Requires-Dist: sphinx-argparse-cli>=1.10.0; extra == "docs"
26
- Requires-Dist: sphinx-inline-tabs; extra == "docs"
27
- Requires-Dist: pickleshare; extra == "docs"
28
- Requires-Dist: ipython; extra == "docs"
29
- Requires-Dist: matplotlib; extra == "docs"
30
- Requires-Dist: pint; extra == "docs"
31
- Requires-Dist: pint-xarray; extra == "docs"
32
- Requires-Dist: myst-parser; extra == "docs"
33
- Provides-Extra: test
34
- Requires-Dist: pytest>=3.3.0; extra == "test"
35
- Requires-Dist: dask[complete]; extra == "test"
36
- Requires-Dist: matplotlib; extra == "test"
37
- Provides-Extra: lint
38
- Requires-Dist: ruff; extra == "lint"
39
- Provides-Extra: build
40
- Requires-Dist: cibuildwheel[uv]; extra == "build"
41
21
  Provides-Extra: jupyter
42
22
  Requires-Dist: dask[diagnostics]; extra == "jupyter"
43
23
  Requires-Dist: ipykernel>=6.29.5; extra == "jupyter"
@@ -54,35 +34,28 @@ Description-Content-Type: text/markdown
54
34
  ![Build/Publish](https://github.com/epochpic/sdf-xarray/actions/workflows/build_publish.yml/badge.svg)
55
35
  ![Tests](https://github.com/epochpic/sdf-xarray/actions/workflows/tests.yml/badge.svg)
56
36
  [![Read the Docs](https://img.shields.io/readthedocs/sdf-xarray?logo=readthedocs&link=https%3A%2F%2Fsdf-xarray.readthedocs.io%2F)](https://sdf-xarray.readthedocs.io)
57
- [![Formatted with black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/python/black)
37
+ [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
58
38
 
59
39
 
60
40
  sdf-xarray provides a backend for [xarray](https://xarray.dev) to read SDF files as created by
61
41
  [EPOCH](https://epochpic.github.io) using the [SDF-C](https://github.com/epochpic/SDF_C) library.
62
42
  Part of [BEAM](#broad-epoch-analysis-modules-beam) (Broad EPOCH Analysis Modules).
63
43
 
44
+ ## Installation
45
+
64
46
  > [!IMPORTANT]
65
47
  > To install this package make sure you are using one of the Python versions listed above.
66
48
 
67
- ## Installation
68
-
69
49
  Install from PyPI with:
70
50
 
71
51
  ```bash
72
52
  pip install sdf-xarray
73
53
  ```
74
54
 
75
- > [!NOTE]
76
- > For use within jupyter notebooks, run this additional command after installation:
77
- >
78
- > ```bash
79
- > pip install "sdf-xarray[jupyter]"
80
- > ```
81
-
82
- or from a local checkout:
55
+ or download this code locally:
83
56
 
84
57
  ```bash
85
- git clone https://github.com/epochpic/sdf-xarray.git
58
+ git clone --recursive https://github.com/epochpic/sdf-xarray.git
86
59
  cd sdf-xarray
87
60
  pip install .
88
61
  ```
@@ -91,6 +64,9 @@ We recommend switching to [uv](https://docs.astral.sh/uv/) to manage packages.
91
64
 
92
65
  ## Usage
93
66
 
67
+ Below are some simple examples to get you started. Please read the full
68
+ documentation here <https://sdf-xarray.readthedocs.io>.
69
+
94
70
  ### Single file loading
95
71
 
96
72
  ```python
@@ -111,15 +87,22 @@ print(df["Electric_Field_Ex"])
111
87
 
112
88
  ### Multi-file loading
113
89
 
114
- To open a whole simulation at once, pass `preprocess=sdf_xarray.SDFPreprocess()`
115
- to `xarray.open_mfdataset`:
90
+ You can open all the SDF files for a given simulation by calling the `open_mfdataset`
91
+ function from `sdf_xarray`. This will additionally add a time dimension using the `"time"`
92
+ value stored in each files attributes.
93
+
94
+ > [!IMPORTANT]
95
+ > If your simulation has multiple `output` blocks so that not all variables are
96
+ > output at every time step, then at the timesteps where those variables are not
97
+ > present they will have have a value of nan. To clean your dataset by removing
98
+ > these nan values we suggest using the `xarray.DataArray.dropna` function or
99
+ > loading sparse data along separate time dimensions using `separate_times=True`.
116
100
 
117
101
  ```python
118
- import xarray as xr
119
- from sdf_xarray import SDFPreprocess
102
+ from sdf_xarray import open_mfdataset
120
103
 
121
- with xr.open_mfdataset("*.sdf", preprocess=SDFPreprocess()) as ds:
122
- print(ds)
104
+ ds = open_mfdataset("*.sdf")
105
+ print(ds)
123
106
 
124
107
  # Dimensions:
125
108
  # time: 301, X_Grid_mid: 128, ...
@@ -129,15 +112,6 @@ with xr.open_mfdataset("*.sdf", preprocess=SDFPreprocess()) as ds:
129
112
  # Attributes: (22) ...
130
113
  ```
131
114
 
132
- `SDFPreprocess` checks that all the files are from the same simulation, as
133
- ensures there's a `time` dimension so the files are correctly concatenated.
134
-
135
- If your simulation has multiple `output` blocks so that not all variables are
136
- output at every time step, then those variables will have `NaN` values at the
137
- corresponding time points.
138
-
139
- For more in depth documentation please visit: <https://sdf-xarray.readthedocs.io/>
140
-
141
115
  ## Citing
142
116
 
143
117
  If sdf-xarray contributes to a project that leads to publication, please acknowledge this by citing sdf-xarray. This can be done by clicking the "cite this repository" button located near the top right of this page.
@@ -173,4 +147,4 @@ To run checks locally before opening a pull request, see [CONTRIBUTING.md](CONTR
173
147
 
174
148
  ![PlasmaFAIR logo](PlasmaFAIR.svg)
175
149
 
176
- Originally developed by [PlasmaFAIR](https://plasmafair.github.io), EPSRC Grant EP/V051822/1
150
+ Originally developed by [PlasmaFAIR](https://plasmafair.github.io), EPSRC Grant EP/V051822/1
@@ -6,35 +6,28 @@
6
6
  ![Build/Publish](https://github.com/epochpic/sdf-xarray/actions/workflows/build_publish.yml/badge.svg)
7
7
  ![Tests](https://github.com/epochpic/sdf-xarray/actions/workflows/tests.yml/badge.svg)
8
8
  [![Read the Docs](https://img.shields.io/readthedocs/sdf-xarray?logo=readthedocs&link=https%3A%2F%2Fsdf-xarray.readthedocs.io%2F)](https://sdf-xarray.readthedocs.io)
9
- [![Formatted with black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/python/black)
9
+ [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
10
10
 
11
11
 
12
12
  sdf-xarray provides a backend for [xarray](https://xarray.dev) to read SDF files as created by
13
13
  [EPOCH](https://epochpic.github.io) using the [SDF-C](https://github.com/epochpic/SDF_C) library.
14
14
  Part of [BEAM](#broad-epoch-analysis-modules-beam) (Broad EPOCH Analysis Modules).
15
15
 
16
+ ## Installation
17
+
16
18
  > [!IMPORTANT]
17
19
  > To install this package make sure you are using one of the Python versions listed above.
18
20
 
19
- ## Installation
20
-
21
21
  Install from PyPI with:
22
22
 
23
23
  ```bash
24
24
  pip install sdf-xarray
25
25
  ```
26
26
 
27
- > [!NOTE]
28
- > For use within jupyter notebooks, run this additional command after installation:
29
- >
30
- > ```bash
31
- > pip install "sdf-xarray[jupyter]"
32
- > ```
33
-
34
- or from a local checkout:
27
+ or download this code locally:
35
28
 
36
29
  ```bash
37
- git clone https://github.com/epochpic/sdf-xarray.git
30
+ git clone --recursive https://github.com/epochpic/sdf-xarray.git
38
31
  cd sdf-xarray
39
32
  pip install .
40
33
  ```
@@ -43,6 +36,9 @@ We recommend switching to [uv](https://docs.astral.sh/uv/) to manage packages.
43
36
 
44
37
  ## Usage
45
38
 
39
+ Below are some simple examples to get you started. Please read the full
40
+ documentation here <https://sdf-xarray.readthedocs.io>.
41
+
46
42
  ### Single file loading
47
43
 
48
44
  ```python
@@ -63,15 +59,22 @@ print(df["Electric_Field_Ex"])
63
59
 
64
60
  ### Multi-file loading
65
61
 
66
- To open a whole simulation at once, pass `preprocess=sdf_xarray.SDFPreprocess()`
67
- to `xarray.open_mfdataset`:
62
+ You can open all the SDF files for a given simulation by calling the `open_mfdataset`
63
+ function from `sdf_xarray`. This will additionally add a time dimension using the `"time"`
64
+ value stored in each files attributes.
65
+
66
+ > [!IMPORTANT]
67
+ > If your simulation has multiple `output` blocks so that not all variables are
68
+ > output at every time step, then at the timesteps where those variables are not
69
+ > present they will have have a value of nan. To clean your dataset by removing
70
+ > these nan values we suggest using the `xarray.DataArray.dropna` function or
71
+ > loading sparse data along separate time dimensions using `separate_times=True`.
68
72
 
69
73
  ```python
70
- import xarray as xr
71
- from sdf_xarray import SDFPreprocess
74
+ from sdf_xarray import open_mfdataset
72
75
 
73
- with xr.open_mfdataset("*.sdf", preprocess=SDFPreprocess()) as ds:
74
- print(ds)
76
+ ds = open_mfdataset("*.sdf")
77
+ print(ds)
75
78
 
76
79
  # Dimensions:
77
80
  # time: 301, X_Grid_mid: 128, ...
@@ -81,15 +84,6 @@ with xr.open_mfdataset("*.sdf", preprocess=SDFPreprocess()) as ds:
81
84
  # Attributes: (22) ...
82
85
  ```
83
86
 
84
- `SDFPreprocess` checks that all the files are from the same simulation, as
85
- ensures there's a `time` dimension so the files are correctly concatenated.
86
-
87
- If your simulation has multiple `output` blocks so that not all variables are
88
- output at every time step, then those variables will have `NaN` values at the
89
- corresponding time points.
90
-
91
- For more in depth documentation please visit: <https://sdf-xarray.readthedocs.io/>
92
-
93
87
  ## Citing
94
88
 
95
89
  If sdf-xarray contributes to a project that leads to publication, please acknowledge this by citing sdf-xarray. This can be done by clicking the "cite this repository" button located near the top right of this page.
@@ -125,4 +119,4 @@ To run checks locally before opening a pull request, see [CONTRIBUTING.md](CONTR
125
119
 
126
120
  ![PlasmaFAIR logo](PlasmaFAIR.svg)
127
121
 
128
- Originally developed by [PlasmaFAIR](https://plasmafair.github.io), EPSRC Grant EP/V051822/1
122
+ Originally developed by [PlasmaFAIR](https://plasmafair.github.io), EPSRC Grant EP/V051822/1
@@ -0,0 +1,7 @@
1
+ /* workaround Pydata Sphinx theme using light colors for widget cell outputs in dark-mode */
2
+ /* works for many widgets but not for Xarray html reprs */
3
+ /* https://github.com/pydata/pydata-sphinx-theme/issues/2189 */
4
+ html[data-theme="dark"] div.cell_output .text_html:has(div.xr-wrap) {
5
+ background-color: var(--pst-color-on-background) !important;
6
+ color: var(--pst-color-text-base) !important;
7
+ }