osiris-utils 1.1.10__py3-none-any.whl → 1.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- benchmarks/benchmark_hdf5_io.py +46 -0
- benchmarks/benchmark_load_all.py +54 -0
- docs/source/api/decks.rst +48 -0
- docs/source/api/postprocess.rst +66 -2
- docs/source/api/sim_diag.rst +1 -1
- docs/source/api/utilities.rst +1 -1
- docs/source/conf.py +2 -1
- docs/source/examples/example_Derivatives.md +78 -0
- docs/source/examples/example_FFT.md +152 -0
- docs/source/examples/example_InputDeck.md +148 -0
- docs/source/examples/example_Simulation_Diagnostic.md +213 -0
- docs/source/examples/quick_start.md +51 -0
- docs/source/examples.rst +14 -0
- docs/source/index.rst +8 -0
- examples/edited-deck.1d +1 -1
- examples/example_Derivatives.ipynb +24 -36
- examples/example_FFT.ipynb +44 -23
- examples/example_InputDeck.ipynb +24 -277
- examples/example_Simulation_Diagnostic.ipynb +27 -17
- examples/quick_start.ipynb +17 -1
- osiris_utils/__init__.py +10 -6
- osiris_utils/cli/__init__.py +6 -0
- osiris_utils/cli/__main__.py +85 -0
- osiris_utils/cli/export.py +199 -0
- osiris_utils/cli/info.py +156 -0
- osiris_utils/cli/plot.py +189 -0
- osiris_utils/cli/validate.py +247 -0
- osiris_utils/data/__init__.py +15 -0
- osiris_utils/data/data.py +41 -171
- osiris_utils/data/diagnostic.py +285 -274
- osiris_utils/data/simulation.py +20 -13
- osiris_utils/decks/__init__.py +4 -0
- osiris_utils/decks/decks.py +83 -8
- osiris_utils/decks/species.py +12 -9
- osiris_utils/postprocessing/__init__.py +28 -0
- osiris_utils/postprocessing/derivative.py +317 -106
- osiris_utils/postprocessing/fft.py +135 -24
- osiris_utils/postprocessing/field_centering.py +28 -14
- osiris_utils/postprocessing/heatflux_correction.py +39 -18
- osiris_utils/postprocessing/mft.py +10 -2
- osiris_utils/postprocessing/postprocess.py +8 -5
- osiris_utils/postprocessing/pressure_correction.py +29 -17
- osiris_utils/utils.py +26 -17
- osiris_utils/vis/__init__.py +3 -0
- osiris_utils/vis/plot3d.py +148 -0
- {osiris_utils-1.1.10.dist-info → osiris_utils-1.2.0.dist-info}/METADATA +55 -7
- {osiris_utils-1.1.10.dist-info → osiris_utils-1.2.0.dist-info}/RECORD +51 -34
- {osiris_utils-1.1.10.dist-info → osiris_utils-1.2.0.dist-info}/WHEEL +1 -1
- osiris_utils-1.2.0.dist-info/entry_points.txt +2 -0
- {osiris_utils-1.1.10.dist-info → osiris_utils-1.2.0.dist-info}/top_level.txt +1 -0
- osiris_utils/postprocessing/mft_for_gridfile.py +0 -55
- {osiris_utils-1.1.10.dist-info → osiris_utils-1.2.0.dist-info}/licenses/LICENSE.txt +0 -0
|
@@ -0,0 +1,247 @@
|
|
|
1
|
+
"""Validate command - check OSIRIS file and simulation integrity."""
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import sys
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
import h5py
|
|
8
|
+
import numpy as np
|
|
9
|
+
|
|
10
|
+
import osiris_utils as ou
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def register_parser(subparsers) -> None:
|
|
14
|
+
"""Register the 'validate' subcommand parser."""
|
|
15
|
+
parser = subparsers.add_parser(
|
|
16
|
+
"validate",
|
|
17
|
+
help="Validate OSIRIS files and simulations",
|
|
18
|
+
description="Check integrity of OSIRIS data files and simulation structure",
|
|
19
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
20
|
+
epilog="""
|
|
21
|
+
Examples:
|
|
22
|
+
osiris validate path/to/simulation # Check entire simulation
|
|
23
|
+
osiris validate file.h5 # Check single file
|
|
24
|
+
osiris validate sim --check-missing # Check for missing timesteps
|
|
25
|
+
""",
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
parser.add_argument(
|
|
29
|
+
"path",
|
|
30
|
+
type=str,
|
|
31
|
+
help="Path to OSIRIS simulation directory or HDF5 file",
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
parser.add_argument(
|
|
35
|
+
"--check-missing",
|
|
36
|
+
action="store_true",
|
|
37
|
+
help="Check for missing timesteps in diagnostics",
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
parser.add_argument(
|
|
41
|
+
"--strict",
|
|
42
|
+
action="store_true",
|
|
43
|
+
help="Fail on warnings, not just errors",
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
parser.set_defaults(func=run)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def run(args: argparse.Namespace) -> int:
|
|
50
|
+
"""Execute the validate command."""
|
|
51
|
+
path = Path(args.path)
|
|
52
|
+
|
|
53
|
+
if not path.exists():
|
|
54
|
+
print(f"Error: Path '{path}' does not exist", file=sys.stderr)
|
|
55
|
+
return 1
|
|
56
|
+
|
|
57
|
+
errors = 0
|
|
58
|
+
warnings = 0
|
|
59
|
+
|
|
60
|
+
try:
|
|
61
|
+
if path.is_file():
|
|
62
|
+
e, w = validate_file(path)
|
|
63
|
+
errors += e
|
|
64
|
+
warnings += w
|
|
65
|
+
elif path.is_dir():
|
|
66
|
+
e, w = validate_simulation(path, args.check_missing)
|
|
67
|
+
errors += e
|
|
68
|
+
warnings += w
|
|
69
|
+
else:
|
|
70
|
+
print(f"Error: '{path}' is not a file or directory", file=sys.stderr)
|
|
71
|
+
return 1
|
|
72
|
+
|
|
73
|
+
# Summary
|
|
74
|
+
separator = '=' * 60
|
|
75
|
+
print(f"\n{separator}")
|
|
76
|
+
print("Validation Summary:")
|
|
77
|
+
print(f" Errors: {errors}")
|
|
78
|
+
print(f" Warnings: {warnings}")
|
|
79
|
+
|
|
80
|
+
if errors > 0:
|
|
81
|
+
print("\nValidation FAILED")
|
|
82
|
+
return 1
|
|
83
|
+
elif warnings > 0 and args.strict:
|
|
84
|
+
print("\nValidation FAILED (strict mode)")
|
|
85
|
+
return 1
|
|
86
|
+
else:
|
|
87
|
+
print("\nValidation PASSED")
|
|
88
|
+
return 0
|
|
89
|
+
|
|
90
|
+
except Exception as e:
|
|
91
|
+
print(f"Error during validation: {e}", file=sys.stderr)
|
|
92
|
+
return 1
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def validate_file(filepath: Path) -> tuple[int, int]:
|
|
96
|
+
"""
|
|
97
|
+
Validate a single HDF5 file.
|
|
98
|
+
|
|
99
|
+
Returns
|
|
100
|
+
-------
|
|
101
|
+
tuple[int, int]
|
|
102
|
+
(error_count, warning_count)
|
|
103
|
+
"""
|
|
104
|
+
errors = 0
|
|
105
|
+
warnings = 0
|
|
106
|
+
|
|
107
|
+
print(f"Validating file: {filepath}")
|
|
108
|
+
|
|
109
|
+
# Check if file can be opened
|
|
110
|
+
try:
|
|
111
|
+
with h5py.File(filepath, 'r') as f:
|
|
112
|
+
print("File is a valid HDF5 file")
|
|
113
|
+
|
|
114
|
+
# Check for required datasets
|
|
115
|
+
if 'AXIS' not in f:
|
|
116
|
+
print("Warning: Missing AXIS dataset")
|
|
117
|
+
warnings += 1
|
|
118
|
+
|
|
119
|
+
# Try to load with osiris_utils
|
|
120
|
+
try:
|
|
121
|
+
data = ou.OsirisGridFile(str(filepath))
|
|
122
|
+
print("File loads successfully with osiris_utils")
|
|
123
|
+
|
|
124
|
+
# Check data integrity
|
|
125
|
+
if data.data.size == 0:
|
|
126
|
+
print("Error: Data array is empty")
|
|
127
|
+
errors += 1
|
|
128
|
+
else:
|
|
129
|
+
print(f"Data array is non-empty (shape: {data.data.shape})")
|
|
130
|
+
|
|
131
|
+
# Check for NaN or Inf
|
|
132
|
+
if not np.isfinite(data.data).all():
|
|
133
|
+
print("Warning: Data contains NaN or Inf values")
|
|
134
|
+
warnings += 1
|
|
135
|
+
else:
|
|
136
|
+
print("Data contains only finite values")
|
|
137
|
+
|
|
138
|
+
except Exception as e:
|
|
139
|
+
print(f"Error: Cannot load with osiris_utils: {e}")
|
|
140
|
+
errors += 1
|
|
141
|
+
|
|
142
|
+
except OSError as e:
|
|
143
|
+
print(f"Error: Cannot open file: {e}")
|
|
144
|
+
errors += 1
|
|
145
|
+
|
|
146
|
+
return errors, warnings
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
def validate_simulation(simpath: Path, check_missing: bool = False) -> tuple[int, int]:
|
|
150
|
+
"""
|
|
151
|
+
Validate an entire simulation directory.
|
|
152
|
+
|
|
153
|
+
Returns
|
|
154
|
+
-------
|
|
155
|
+
tuple[int, int]
|
|
156
|
+
(error_count, warning_count)
|
|
157
|
+
"""
|
|
158
|
+
errors = 0
|
|
159
|
+
warnings = 0
|
|
160
|
+
|
|
161
|
+
print(f"Validating simulation: {simpath}")
|
|
162
|
+
|
|
163
|
+
# Check for input deck
|
|
164
|
+
input_deck = None
|
|
165
|
+
for candidate in ["os-stdin", "input.deck", "deck.in"]:
|
|
166
|
+
deck_path = simpath / candidate
|
|
167
|
+
if deck_path.exists():
|
|
168
|
+
input_deck = deck_path
|
|
169
|
+
break
|
|
170
|
+
|
|
171
|
+
if input_deck is None:
|
|
172
|
+
print("Error: No input deck found")
|
|
173
|
+
errors += 1
|
|
174
|
+
return errors, warnings
|
|
175
|
+
else:
|
|
176
|
+
print(f"Found input deck: {input_deck.name}")
|
|
177
|
+
|
|
178
|
+
# Try to load simulation
|
|
179
|
+
try:
|
|
180
|
+
sim = ou.Simulation(str(input_deck))
|
|
181
|
+
print("Simulation loads successfully")
|
|
182
|
+
print(f"Found {len(sim.species)} species: {', '.join(sim.species)}")
|
|
183
|
+
except Exception as e:
|
|
184
|
+
print(f"Error: Cannot load simulation: {e}")
|
|
185
|
+
errors += 1
|
|
186
|
+
return errors, warnings
|
|
187
|
+
|
|
188
|
+
# Check MS directory
|
|
189
|
+
ms_path = simpath / "MS"
|
|
190
|
+
if not ms_path.exists():
|
|
191
|
+
print("Warning: MS directory not found")
|
|
192
|
+
warnings += 1
|
|
193
|
+
return errors, warnings
|
|
194
|
+
else:
|
|
195
|
+
print("Found MS directory")
|
|
196
|
+
|
|
197
|
+
# Check diagnostic directories
|
|
198
|
+
diag_types = ["FLD", "DENSITY", "CURRENT", "PHA"]
|
|
199
|
+
for diag_type in diag_types:
|
|
200
|
+
diag_path = ms_path / diag_type
|
|
201
|
+
if diag_path.exists():
|
|
202
|
+
e, w = validate_diagnostic_dir(diag_path, check_missing)
|
|
203
|
+
errors += e
|
|
204
|
+
warnings += w
|
|
205
|
+
|
|
206
|
+
return errors, warnings
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
def validate_diagnostic_dir(diag_path: Path, check_missing: bool) -> tuple[int, int]:
|
|
210
|
+
"""Validate a diagnostic directory."""
|
|
211
|
+
errors = 0
|
|
212
|
+
warnings = 0
|
|
213
|
+
|
|
214
|
+
print(f"\n Checking {diag_path.name}:")
|
|
215
|
+
|
|
216
|
+
# Recursively find all h5 files
|
|
217
|
+
for subdir in diag_path.rglob("*"):
|
|
218
|
+
if subdir.is_dir():
|
|
219
|
+
h5_files = sorted(list(subdir.glob("*.h5")))
|
|
220
|
+
if h5_files:
|
|
221
|
+
rel_path = subdir.relative_to(diag_path)
|
|
222
|
+
print(f" {rel_path}: {len(h5_files)} files")
|
|
223
|
+
|
|
224
|
+
if check_missing:
|
|
225
|
+
# Check for sequential numbering
|
|
226
|
+
actual_indices = set()
|
|
227
|
+
|
|
228
|
+
for f in h5_files:
|
|
229
|
+
# Extract iteration number from filename
|
|
230
|
+
try:
|
|
231
|
+
# Typical format: name-123456.h5
|
|
232
|
+
iter_str = f.stem.split('-')[-1]
|
|
233
|
+
actual_indices.add(int(iter_str))
|
|
234
|
+
except (ValueError, IndexError):
|
|
235
|
+
pass
|
|
236
|
+
|
|
237
|
+
if len(actual_indices) > 0:
|
|
238
|
+
min_idx = min(actual_indices)
|
|
239
|
+
max_idx = max(actual_indices)
|
|
240
|
+
expected_sequential = set(range(min_idx, max_idx + 1))
|
|
241
|
+
missing = expected_sequential - actual_indices
|
|
242
|
+
|
|
243
|
+
if missing:
|
|
244
|
+
print(f"Warning: Missing iterations: {sorted(missing)[:10]}{'...' if len(missing) > 10 else ''}")
|
|
245
|
+
warnings += 1
|
|
246
|
+
|
|
247
|
+
return errors, warnings
|
osiris_utils/data/__init__.py
CHANGED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from .data import OsirisData, OsirisGridFile, OsirisHIST, OsirisRawFile, OsirisTrackFile
|
|
2
|
+
from .diagnostic import Diagnostic, which_quantities
|
|
3
|
+
from .simulation import Simulation, Species_Handler
|
|
4
|
+
|
|
5
|
+
__all__ = [
|
|
6
|
+
"OsirisData",
|
|
7
|
+
"OsirisGridFile",
|
|
8
|
+
"OsirisRawFile",
|
|
9
|
+
"OsirisHIST",
|
|
10
|
+
"OsirisTrackFile",
|
|
11
|
+
"Diagnostic",
|
|
12
|
+
"which_quantities",
|
|
13
|
+
"Simulation",
|
|
14
|
+
"Species_Handler",
|
|
15
|
+
]
|
osiris_utils/data/data.py
CHANGED
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
from typing import Literal
|
|
2
4
|
|
|
3
5
|
import h5py
|
|
@@ -6,6 +8,14 @@ import pandas as pd
|
|
|
6
8
|
|
|
7
9
|
from osiris_utils.utils import create_file_tags
|
|
8
10
|
|
|
11
|
+
__all__ = [
|
|
12
|
+
"OsirisData",
|
|
13
|
+
"OsirisGridFile",
|
|
14
|
+
"OsirisRawFile",
|
|
15
|
+
"OsirisHIST",
|
|
16
|
+
"OsirisTrackFile",
|
|
17
|
+
]
|
|
18
|
+
|
|
9
19
|
|
|
10
20
|
class OsirisData:
|
|
11
21
|
"""
|
|
@@ -78,7 +88,7 @@ class OsirisData:
|
|
|
78
88
|
|
|
79
89
|
def _open_file_hdf5(self, filename):
|
|
80
90
|
"""
|
|
81
|
-
Open the OSIRIS output file
|
|
91
|
+
Open the OSIRIS output file with optimized cache settings.
|
|
82
92
|
|
|
83
93
|
Parameters
|
|
84
94
|
----------
|
|
@@ -88,11 +98,23 @@ class OsirisData:
|
|
|
88
98
|
if self._verbose:
|
|
89
99
|
print(f"Opening file > {filename}")
|
|
90
100
|
|
|
91
|
-
if filename.endswith(".h5"):
|
|
92
|
-
self._file = h5py.File(filename, "r")
|
|
93
|
-
else:
|
|
101
|
+
if not filename.endswith(".h5"):
|
|
94
102
|
raise ValueError("The file should be an HDF5 file with the extension .h5")
|
|
95
103
|
|
|
104
|
+
# Optimize HDF5 chunk cache for better performance
|
|
105
|
+
# Increase cache from default 1MB to 10MB for large file access
|
|
106
|
+
propfaid = h5py.h5p.create(h5py.h5p.FILE_ACCESS)
|
|
107
|
+
propfaid.set_cache(
|
|
108
|
+
0, # Meta cache elements (0 = use default)
|
|
109
|
+
10485760, # 10MB chunk cache (default is 1MB)
|
|
110
|
+
0.75, # Chunk cache preemption policy (0.75 = aggressive caching)
|
|
111
|
+
0, # Hash table size (0 = use default)
|
|
112
|
+
)
|
|
113
|
+
|
|
114
|
+
# Open file with optimized settings
|
|
115
|
+
fid = h5py.h5f.open(filename.encode(), flags=h5py.h5f.ACC_RDONLY, fapl=propfaid)
|
|
116
|
+
self._file = h5py.File(fid)
|
|
117
|
+
|
|
96
118
|
def _open_hist_file(self, filename):
|
|
97
119
|
self._df = pd.read_csv(filename, sep=r"\s+", comment="!", header=0, engine="python")
|
|
98
120
|
|
|
@@ -162,11 +184,9 @@ class OsirisGridFile(OsirisData):
|
|
|
162
184
|
Field units (LaTeX formatted)
|
|
163
185
|
label : str
|
|
164
186
|
Field label/name (LaTeX formatted, e.g., r'$E_x$')
|
|
165
|
-
FFTdata : np.ndarray
|
|
166
|
-
Fourier-transformed data (available after calling FFT())
|
|
167
187
|
"""
|
|
168
188
|
|
|
169
|
-
def __init__(self, filename):
|
|
189
|
+
def __init__(self, filename, data_slice: slice | None = None):
|
|
170
190
|
super().__init__(filename)
|
|
171
191
|
|
|
172
192
|
variable_key = self._get_variable_key(self._file)
|
|
@@ -175,7 +195,7 @@ class OsirisGridFile(OsirisData):
|
|
|
175
195
|
self._label = self._file.attrs["LABEL"][0].decode("utf-8")
|
|
176
196
|
self._FFTdata = None
|
|
177
197
|
|
|
178
|
-
data = np.array(self._file[variable_key][:])
|
|
198
|
+
data = np.array(self._file[variable_key][:]) if data_slice is None else np.array(self._file[variable_key][data_slice])
|
|
179
199
|
|
|
180
200
|
axis = list(self._file["AXIS"].keys())
|
|
181
201
|
if len(axis) == 1:
|
|
@@ -193,8 +213,11 @@ class OsirisGridFile(OsirisData):
|
|
|
193
213
|
|
|
194
214
|
# There's an issue when the dimension is 3 and we want to plot a 2D phasespace. I believe this
|
|
195
215
|
# is a problem for all cases where the dim != dim_of_phasespace
|
|
196
|
-
|
|
197
|
-
|
|
216
|
+
try:
|
|
217
|
+
self._x = [np.arange(self.grid[i, 0], self.grid[i, 1], self.dx[i]) for i in range(self.dim)]
|
|
218
|
+
except Exception as e:
|
|
219
|
+
print(f"Error occurred while creating spatial coordinates: {e}")
|
|
220
|
+
self._x = [np.arange(self.grid[i, 0], self.grid[i, 1], self.dx[i]) for i in range(1)]
|
|
198
221
|
|
|
199
222
|
self._axis = []
|
|
200
223
|
for ax in axis:
|
|
@@ -203,7 +226,8 @@ class OsirisGridFile(OsirisData):
|
|
|
203
226
|
"units": self._file["AXIS/" + ax].attrs["UNITS"][0].decode("utf-8"),
|
|
204
227
|
"long_name": self._file["AXIS/" + ax].attrs["LONG_NAME"][0].decode("utf-8"),
|
|
205
228
|
"type": self._file["AXIS/" + ax].attrs["TYPE"][0].decode("utf-8"),
|
|
206
|
-
"plot_label": rf'${self._file["AXIS/" + ax].attrs["LONG_NAME"][0].decode("utf-8")}$
|
|
229
|
+
"plot_label": rf'${self._file["AXIS/" + ax].attrs["LONG_NAME"][0].decode("utf-8")}$'
|
|
230
|
+
+ rf'$[{self._file["AXIS/" + ax].attrs["UNITS"][0].decode("utf-8")}]$',
|
|
207
231
|
}
|
|
208
232
|
self._axis.append(axis_data)
|
|
209
233
|
|
|
@@ -226,154 +250,6 @@ class OsirisGridFile(OsirisData):
|
|
|
226
250
|
def _get_variable_key(self, f: h5py.File) -> str:
|
|
227
251
|
return next(k for k in f.keys() if k not in {"AXIS", "SIMULATION"})
|
|
228
252
|
|
|
229
|
-
def _yeeToCellCenter1d(self, boundary):
|
|
230
|
-
"""
|
|
231
|
-
Converts 1d EM fields from a staggered Yee mesh to a grid with field values centered on the Center of the cell
|
|
232
|
-
"""
|
|
233
|
-
|
|
234
|
-
if self.name.lower() in ["b2", "b3", "e1"]:
|
|
235
|
-
if boundary == "periodic":
|
|
236
|
-
return 0.5 * (np.roll(self.data, shift=1) + self.data)
|
|
237
|
-
else:
|
|
238
|
-
return 0.5 * (self.data[1:] + self.data[:-1])
|
|
239
|
-
elif self.name.lower() in ["b1", "e2", "e3"]:
|
|
240
|
-
if boundary == "periodic":
|
|
241
|
-
return self.data
|
|
242
|
-
else:
|
|
243
|
-
return self.data[1:]
|
|
244
|
-
else:
|
|
245
|
-
raise TypeError(f"This method expects magnetic or electric field grid data but received '{self.name}' instead")
|
|
246
|
-
|
|
247
|
-
def _yeeToCellCenter2d(self, boundary):
|
|
248
|
-
"""
|
|
249
|
-
Converts 2d EM fields from a staggered Yee mesh to a grid with field values centered on the Center of the cell
|
|
250
|
-
"""
|
|
251
|
-
|
|
252
|
-
if self.name.lower() in ["e1", "b2"]:
|
|
253
|
-
if boundary == "periodic":
|
|
254
|
-
return 0.5 * (np.roll(self.data, shift=1, axis=0) + self.data)
|
|
255
|
-
else:
|
|
256
|
-
return 0.5 * (self.data[1:, 1:] + self.data[:-1, 1:])
|
|
257
|
-
elif self.name.lower() in ["e2", "b1"]:
|
|
258
|
-
if boundary == "periodic":
|
|
259
|
-
return 0.5 * (np.roll(self.data, shift=1, axis=1) + self.data)
|
|
260
|
-
else:
|
|
261
|
-
return 0.5 * (self.data[1:, 1:] + self.data[1:, :-1])
|
|
262
|
-
elif self.name.lower() in ["b3"]:
|
|
263
|
-
if boundary == "periodic":
|
|
264
|
-
return 0.5 * (
|
|
265
|
-
np.roll(
|
|
266
|
-
(0.5 * (np.roll(self.data, shift=1, axis=0) + self.data)),
|
|
267
|
-
shift=1,
|
|
268
|
-
axis=1,
|
|
269
|
-
)
|
|
270
|
-
+ (0.5 * (np.roll(self.data, shift=1, axis=0) + self.data))
|
|
271
|
-
)
|
|
272
|
-
else:
|
|
273
|
-
return 0.25 * (self.data[1:, 1:] + self.data[:-1, 1:] + self.data[1:, :-1] + self.data[:-1, :-1])
|
|
274
|
-
elif self.name.lower() in ["e3"]:
|
|
275
|
-
if boundary == "periodic":
|
|
276
|
-
return self.data
|
|
277
|
-
else:
|
|
278
|
-
return self.data[1:, 1:]
|
|
279
|
-
else:
|
|
280
|
-
raise TypeError(f"This method expects magnetic or electric field grid data but received '{self.name}' instead")
|
|
281
|
-
|
|
282
|
-
def _yeeToCellCenter3d(self, boundary):
|
|
283
|
-
"""
|
|
284
|
-
Converts 3d EM fields from a staggered Yee mesh to a grid with field values centered on the Center of the cell
|
|
285
|
-
"""
|
|
286
|
-
if self.name.lower() == "b1":
|
|
287
|
-
if boundary == "periodic":
|
|
288
|
-
return 0.5 * (
|
|
289
|
-
0.5
|
|
290
|
-
* np.roll(
|
|
291
|
-
(np.roll(self.data, shift=1, axis=1) + self.data),
|
|
292
|
-
shift=1,
|
|
293
|
-
axis=2,
|
|
294
|
-
)
|
|
295
|
-
+ 0.5 * (np.roll(self.data, shift=1, axis=1) + self.data)
|
|
296
|
-
)
|
|
297
|
-
else:
|
|
298
|
-
return 0.25 * (self.data[1:, 1:, 1:] + self.data[1:, :-1, 1:] + self.data[1:, 1:, :-1] + self.data[1:, :-1, :-1])
|
|
299
|
-
elif self.name.lower() == "b2":
|
|
300
|
-
if boundary == "periodic":
|
|
301
|
-
return 0.5 * (
|
|
302
|
-
0.5
|
|
303
|
-
* np.roll(
|
|
304
|
-
(np.roll(self.data, shift=1, axis=0) + self.data),
|
|
305
|
-
shift=1,
|
|
306
|
-
axis=2,
|
|
307
|
-
)
|
|
308
|
-
+ 0.5 * (np.roll(self.data, shift=1, axis=0) + self.data)
|
|
309
|
-
)
|
|
310
|
-
else:
|
|
311
|
-
return 0.25 * (self.data[1:, 1:, 1:] + self.data[:-1, 1:, 1:] + self.data[1:, 1:, :-1] + self.data[:-1, 1:, :-1])
|
|
312
|
-
elif self.name.lower() == "b3":
|
|
313
|
-
if boundary == "periodic":
|
|
314
|
-
return 0.5 * (
|
|
315
|
-
0.5
|
|
316
|
-
* np.roll(
|
|
317
|
-
(np.roll(self.data, shift=1, axis=0) + self.data),
|
|
318
|
-
shift=1,
|
|
319
|
-
axis=1,
|
|
320
|
-
)
|
|
321
|
-
+ 0.5 * (np.roll(self.data, shift=1, axis=0) + self.data)
|
|
322
|
-
)
|
|
323
|
-
else:
|
|
324
|
-
return 0.25 * (self.data[1:, 1:, 1:] + self.data[:-1, 1:, 1:] + self.data[1:, :-1, 1:] + self.data[:-1, :-1, 1:])
|
|
325
|
-
elif self.name.lower() == "e1":
|
|
326
|
-
if boundary == "periodic":
|
|
327
|
-
return 0.5 * (np.roll(self.data, shift=1, axis=0) + self.data)
|
|
328
|
-
else:
|
|
329
|
-
return 0.5 * (self.data[1:, 1:, 1:] + self.data[:-1, 1:, 1:])
|
|
330
|
-
elif self.name.lower() == "e2":
|
|
331
|
-
if boundary == "periodic":
|
|
332
|
-
return 0.5 * (np.roll(self.data, shift=1, axis=1) + self.data)
|
|
333
|
-
else:
|
|
334
|
-
return 0.5 * (self.data[1:, 1:, 1:] + self.data[1:, :-1, 1:])
|
|
335
|
-
elif self.name.lower() == "e3":
|
|
336
|
-
if boundary == "periodic":
|
|
337
|
-
return 0.5 * (np.roll(self.data, shift=1, axis=2) + self.data)
|
|
338
|
-
else:
|
|
339
|
-
return 0.5 * (self.data[1:, 1:, 1:] + self.data[1:, 1:, :-1])
|
|
340
|
-
else:
|
|
341
|
-
raise TypeError(f"This method expects magnetic or electric field grid data but received '{self.name}' instead")
|
|
342
|
-
|
|
343
|
-
def yeeToCellCenter(self, boundary: Literal["periodic", "default"] = "default"):
|
|
344
|
-
"""'
|
|
345
|
-
Converts EM fields from a staggered Yee mesh to a grid with field values centered on the center of the cell.'
|
|
346
|
-
Can be used for 1D, 2D and 3D simulations.'
|
|
347
|
-
Creates a new attribute `data_centered` with the centered data.'
|
|
348
|
-
"""
|
|
349
|
-
|
|
350
|
-
if boundary not in ("periodic", "default"):
|
|
351
|
-
raise ValueError(f"Invalid boundary: {boundary}, choose 'periodic' or 'default' instead.")
|
|
352
|
-
|
|
353
|
-
cases = {"b1", "b2", "b3", "e1", "e2", "e3"}
|
|
354
|
-
if self.name not in cases:
|
|
355
|
-
raise TypeError(f"This method expects magnetic or electric field grid data but received '{self.name}' instead")
|
|
356
|
-
|
|
357
|
-
if self.dim == 1:
|
|
358
|
-
self.data_centered = self._yeeToCellCenter1d(boundary)
|
|
359
|
-
return self.data_centered
|
|
360
|
-
elif self.dim == 2:
|
|
361
|
-
self.data_centered = self._yeeToCellCenter2d(boundary)
|
|
362
|
-
return self.data_centered
|
|
363
|
-
elif self.dim == 3:
|
|
364
|
-
self.data_centered = self._yeeToCellCenter3d(boundary)
|
|
365
|
-
return self.data_centered
|
|
366
|
-
else:
|
|
367
|
-
raise ValueError(f"Dimension {self.dim} is not supported")
|
|
368
|
-
|
|
369
|
-
def FFT(self, axis=(0,)):
|
|
370
|
-
"""
|
|
371
|
-
Computes the Fast Fourier Transform of the data along the specified axis and shifts the zero frequency to the center.
|
|
372
|
-
Transforms the data to the frequency domain. A(x, y, z) -> A(kx, ky, kz)
|
|
373
|
-
"""
|
|
374
|
-
datafft = np.fft.fftn(self.data, axes=axis)
|
|
375
|
-
self._FFTdata = np.fft.fftshift(datafft, axes=axis)
|
|
376
|
-
|
|
377
253
|
# Getters
|
|
378
254
|
@property
|
|
379
255
|
def grid(self):
|
|
@@ -407,12 +283,6 @@ class OsirisGridFile(OsirisData):
|
|
|
407
283
|
def label(self):
|
|
408
284
|
return self._label
|
|
409
285
|
|
|
410
|
-
@property
|
|
411
|
-
def FFTdata(self):
|
|
412
|
-
if self._FFTdata is None:
|
|
413
|
-
raise ValueError("The FFT of the data has not been computed yet. Compute it using the FFT method.")
|
|
414
|
-
return self._FFTdata
|
|
415
|
-
|
|
416
286
|
# Setters
|
|
417
287
|
@data.setter
|
|
418
288
|
def data(self, data):
|
|
@@ -501,8 +371,8 @@ class OsirisRawFile(OsirisData):
|
|
|
501
371
|
self._quants = [byte.decode("utf-8") for byte in self._file.attrs["QUANTS"][:]]
|
|
502
372
|
units_list = [byte.decode("utf-8") for byte in self._file.attrs["UNITS"][:]]
|
|
503
373
|
labels_list = [byte.decode("utf-8") for byte in self._file.attrs["LABELS"][:]]
|
|
504
|
-
self._units = dict(zip(self._quants, units_list))
|
|
505
|
-
self._labels = dict(zip(self._quants, labels_list))
|
|
374
|
+
self._units = dict(zip(self._quants, units_list, strict=False))
|
|
375
|
+
self._labels = dict(zip(self._quants, labels_list, strict=False))
|
|
506
376
|
|
|
507
377
|
self._data = {}
|
|
508
378
|
self._axis = {}
|
|
@@ -598,8 +468,8 @@ class OsirisRawFile(OsirisData):
|
|
|
598
468
|
|
|
599
469
|
|
|
600
470
|
class OsirisHIST(OsirisData):
|
|
601
|
-
"""
|
|
602
|
-
Class to read the data from an OSIRIS HIST file.
|
|
471
|
+
"""
|
|
472
|
+
Class to read the data from an OSIRIS HIST file.
|
|
603
473
|
|
|
604
474
|
Input
|
|
605
475
|
-----
|
|
@@ -669,8 +539,8 @@ class OsirisTrackFile(OsirisData):
|
|
|
669
539
|
self._quants = [byte.decode("utf-8") for byte in self._file.attrs["QUANTS"][1:]]
|
|
670
540
|
units_list = [byte.decode("utf-8") for byte in self._file.attrs["UNITS"][1:]]
|
|
671
541
|
labels_list = [byte.decode("utf-8") for byte in self._file.attrs["LABELS"][1:]]
|
|
672
|
-
self._units = dict(zip(self._quants, units_list))
|
|
673
|
-
self._labels = dict(zip(self._quants, labels_list))
|
|
542
|
+
self._units = dict(zip(self._quants, units_list, strict=False))
|
|
543
|
+
self._labels = dict(zip(self._quants, labels_list, strict=False))
|
|
674
544
|
|
|
675
545
|
self._num_particles = self._file.attrs["NTRACKS"][0]
|
|
676
546
|
|