osiris-utils 1.1.3__py3-none-any.whl → 1.1.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,692 @@
1
+ import numpy as np
2
+ import pandas as pd
3
+ import h5py
4
+ from osiris_utils.utils import create_file_tags
5
+ from typing import Literal
6
+
7
+ class OsirisData():
8
+ """
9
+ Base class for handling OSIRIS simulation data files (HDF5 and HIST formats).
10
+
11
+ This class provides common functionality for reading and managing basic attributes
12
+ from OSIRIS output files. It serves as the parent class for specialized data handlers.
13
+
14
+ Parameters
15
+ ----------
16
+ filename : str
17
+ Path to the data file. Supported formats:
18
+ - HDF5 files (.h5 extension)
19
+ - HIST files (ending with _ene)
20
+
21
+ Attributes
22
+ ----------
23
+ dt : float
24
+ Time step of the simulation [simulation units]
25
+ dim : int
26
+ Number of dimensions in the simulation (1, 2, or 3)
27
+ time : list[float, str]
28
+ Current simulation time and units as [value, unit_string]
29
+ iter : int
30
+ Current iteration number
31
+ name : str
32
+ Name identifier of the data field
33
+ type : str
34
+ Type of data (e.g., 'grid', 'particles')
35
+ verbose : bool
36
+ Verbosity flag controlling diagnostic messages (default: False)
37
+ """
38
+
39
+ def __init__(self, filename):
40
+ self._filename = str(filename)
41
+ # self._file = None
42
+
43
+ self._verbose = False
44
+
45
+ if self._filename.endswith('.h5'):
46
+ self._open_file_hdf5(self._filename)
47
+ self._load_basic_attributes(self._file)
48
+ elif self._filename.endswith('_ene'):
49
+ self._open_hist_file(self._filename)
50
+ else:
51
+ raise ValueError('The file should be an HDF5 file with the extension .h5, or a HIST file ending with _ene.')
52
+
53
+
54
+ def _load_basic_attributes(self, f: h5py.File) -> None:
55
+ '''Load common attributes from HDF5 file'''
56
+ self._dt = float(f['SIMULATION'].attrs['DT'][0])
57
+ self._dim = int(f['SIMULATION'].attrs['NDIMS'][0])
58
+ self._time = [float(f.attrs['TIME'][0]), f.attrs['TIME UNITS'][0].decode('utf-8')]
59
+ self._iter = int(f.attrs['ITER'][0])
60
+ self._name = f.attrs['NAME'][0].decode('utf-8')
61
+ self._type = f.attrs['TYPE'][0].decode('utf-8')
62
+
63
+ def verbose(self, verbose: bool = True):
64
+ '''
65
+ Set the verbosity of the class
66
+
67
+ Parameters
68
+ ----------
69
+ verbose : bool, optional
70
+ If True, the class will print messages, by default True when calling (False when not calling)
71
+ '''
72
+ self._verbose = verbose
73
+
74
+ def _open_file_hdf5(self, filename):
75
+ '''
76
+ Open the OSIRIS output file. Usually an HDF5 file or txt.
77
+
78
+ Parameters
79
+ ----------
80
+ filename : str
81
+ The path to the HDF5 file.
82
+ '''
83
+ if self._verbose: print(f'Opening file > {filename}')
84
+
85
+ if filename.endswith('.h5'):
86
+ self._file = h5py.File(filename, 'r')
87
+ else:
88
+ raise ValueError('The file should be an HDF5 file with the extension .h5')
89
+
90
+ def _open_hist_file(self, filename):
91
+ self._df = pd.read_csv(filename, sep=r'\s+', comment='!', header=0, engine='python')
92
+
93
+ def _close_file(self):
94
+ '''
95
+ Close the HDF5 file.
96
+ '''
97
+ if self._verbose: print('Closing file')
98
+ if self._file:
99
+ self._file.close()
100
+
101
+ @property
102
+ def dt(self):
103
+ return self._dt
104
+ @property
105
+ def dim(self):
106
+ return self._dim
107
+ @property
108
+ def time(self):
109
+ return self._time
110
+ @property
111
+ def iter(self):
112
+ return self._iter
113
+ @property
114
+ def name(self):
115
+ return self._name
116
+ @property
117
+ def type(self):
118
+ return self._type
119
+
120
+ class OsirisGridFile(OsirisData):
121
+ """
122
+ Handles structured grid data from OSIRIS HDF5 simulations, including electromagnetic fields.
123
+
124
+ Parameters
125
+ ----------
126
+ filename : str
127
+ Path to OSIRIS HDF5 grid file (.h5 extension)
128
+
129
+ Attributes
130
+ ----------
131
+ grid : np.ndarray
132
+ Grid boundaries as ((x1_min, x1_max), (x2_min, x2_max), ...)
133
+ nx : tuple
134
+ Number of grid points per dimension (nx1, nx2, nx3)
135
+ dx : np.ndarray
136
+ Grid spacing per dimension (dx1, dx2, dx3)
137
+ x : list[np.ndarray]
138
+ Spatial coordinates arrays for each dimension
139
+ axis : list[dict]
140
+ Axis metadata with keys:
141
+ - 'name': Axis identifier (e.g., 'x1')
142
+ - 'units': Physical units (LaTeX formatted)
143
+ - 'long_name': Descriptive name (LaTeX formatted)
144
+ - 'type': Axis type (e.g., 'SPATIAL')
145
+ - 'plot_label': Combined label for plotting
146
+ data : np.ndarray
147
+ Raw field data array (shape depends on simulation dimensions)
148
+ units : str
149
+ Field units (LaTeX formatted)
150
+ label : str
151
+ Field label/name (LaTeX formatted, e.g., r'$E_x$')
152
+ FFTdata : np.ndarray
153
+ Fourier-transformed data (available after calling FFT())
154
+ """
155
+
156
+ def __init__(self, filename):
157
+ super().__init__(filename)
158
+
159
+ variable_key = self._get_variable_key(self._file)
160
+
161
+ self._units = self._file.attrs['UNITS'][0].decode('utf-8')
162
+ self._label = self._file.attrs['LABEL'][0].decode('utf-8')
163
+ self._FFTdata = None
164
+
165
+ data = np.array(self._file[variable_key][:])
166
+
167
+ axis = list(self._file['AXIS'].keys())
168
+ if len(axis) == 1:
169
+ self._grid = self._file['AXIS/' + axis[0]][()]
170
+ self._nx = len(data)
171
+ self._dx = (self.grid[1] - self.grid[0] ) / self.nx
172
+ self._x = np.arange(self.grid[0], self.grid[1], self.dx)
173
+ else:
174
+ grid = []
175
+ for ax in axis: grid.append(self._file['AXIS/' + ax][()])
176
+ self._grid = np.array(grid)
177
+ self._nx = self._file[variable_key][()].transpose().shape
178
+ self._dx = (self.grid[:, 1] - self.grid[:, 0])/self.nx
179
+ self._x = [np.arange(self.grid[i, 0], self.grid[i, 1], self.dx[i]) for i in range(self.dim)]
180
+
181
+ self._axis = []
182
+ for ax in axis:
183
+ axis_data = {
184
+ 'name': self._file['AXIS/'+ax].attrs['NAME'][0].decode('utf-8'),
185
+ 'units': self._file['AXIS/'+ax].attrs['UNITS'][0].decode('utf-8'),
186
+ 'long_name': self._file['AXIS/'+ax].attrs['LONG_NAME'][0].decode('utf-8'),
187
+ 'type': self._file['AXIS/'+ax].attrs['TYPE'][0].decode('utf-8'),
188
+ 'plot_label': rf'${self._file["AXIS/"+ax].attrs["LONG_NAME"][0].decode("utf-8")}$ $[{self._file["AXIS/"+ax].attrs["UNITS"][0].decode("utf-8")}]$',
189
+ }
190
+ self._axis.append(axis_data)
191
+
192
+ self._data = np.ascontiguousarray(data.T)
193
+
194
+ self._close_file()
195
+
196
+ def _load_basic_attributes(self, f: h5py.File) -> None:
197
+ '''Load common attributes from HDF5 file'''
198
+ self._dt = float(f['SIMULATION'].attrs['DT'][0])
199
+ self._dim = int(f['SIMULATION'].attrs['NDIMS'][0])
200
+ self._time = [float(f.attrs['TIME'][0]), f.attrs['TIME UNITS'][0].decode('utf-8')]
201
+ self._iter = int(f.attrs['ITER'][0])
202
+ self._name = f.attrs['NAME'][0].decode('utf-8')
203
+ self._type = f.attrs['TYPE'][0].decode('utf-8')
204
+
205
+ def _get_variable_key(self, f: h5py.File) -> str:
206
+ return next(k for k in f.keys() if k not in {'AXIS', 'SIMULATION'})
207
+
208
+
209
+
210
+ def _yeeToCellCorner1d(self, boundary):
211
+ '''
212
+ Converts 1d EM fields from a staggered Yee mesh to a grid with field values centered on the corner of the cell (the corner of the cell [1] has coordinates [1])
213
+ '''
214
+
215
+ if self.name.lower() in ['b2', 'b3', 'e1']:
216
+ if boundary == 'periodic': return 0.5 * (np.roll(self.data, shift=1) + self.data)
217
+ else: return 0.5 * (self.data[1:] + self.data[:-1])
218
+ elif self.name.lower() in ['b1', 'e2', 'e3']:
219
+ if boundary == 'periodic': return self.data
220
+ else: return self.data[1:]
221
+ else:
222
+ raise TypeError(f'This method expects magnetic or electric field grid data but received \'{self.name}\' instead')
223
+
224
+
225
+ def _yeeToCellCorner2d(self, boundary):
226
+ '''
227
+ Converts 2d EM fields from a staggered Yee mesh to a grid with field values centered on the corner of the cell (the corner of the cell [1,1] has coordinates [1,1])
228
+ '''
229
+
230
+ if self.name.lower() in ['e1', 'b2']:
231
+ if boundary == 'periodic': return 0.5 * (np.roll(self.data, shift=1, axis=0) + self.data)
232
+ else: return 0.5 * (self.data[1:, 1:] + self.data[:-1, 1:])
233
+ elif self.name.lower() in ['e2', 'b1']:
234
+ if boundary == 'periodic': return 0.5 * (np.roll(self.data, shift=1, axis=1) + self.data)
235
+ else: return 0.5 * (self.data[1:, 1:] + self.data[1:, :-1])
236
+ elif self.name.lower() in ['b3']:
237
+ if boundary == 'periodic':
238
+ return 0.5 * (np.roll((0.5 * (np.roll(self.data, shift=1, axis=0) + self.data)), shift=1, axis=1) + (0.5 * (np.roll(self.data, shift=1, axis=0) + self.data)))
239
+ else:
240
+ return 0.25 * (self.data[1:, 1:] + self.data[:-1, 1:] + self.data[1:, :-1] + self.data[:-1, :-1])
241
+ elif self.name.lower() in ['e3']:
242
+ if boundary == 'periodic': return self.data
243
+ else: return self.data[1:, 1:]
244
+ else:
245
+ raise TypeError(f'This method expects magnetic or electric field grid data but received \'{self.name}\' instead')
246
+
247
+
248
+ def _yeeToCellCorner3d(self, boundary):
249
+ '''
250
+ Converts 3d EM fields from a staggered Yee mesh to a grid with field values centered on the corner of the cell (the corner of the cell [1,1,1] has coordinates [1,1,1])
251
+ '''
252
+ if boundary == 'periodic':
253
+ raise ValueError('Centering field from 3D simulations considering periodic boundary conditions is not implemented yet')
254
+ if self.name.lower() == 'b1':
255
+ return 0.25 * (self.data[1:, 1:, 1:] + self.data[1:, :-1, 1:] + self.data[1:, 1:, :-1] + self.data[1:, :-1, :-1])
256
+ elif self.name.lower() == 'b2':
257
+ return 0.25 * (self.data[1:, 1:, 1:] + self.data[:-1, 1:, 1:] + self.data[1:, 1:, :-1] + self.data[:-1, 1:, :-1])
258
+ elif self.name.lower() == 'b3':
259
+ return 0.25 * (self.data[1:, 1:, 1:] + self.data[:-1, 1:, 1:] + self.data[1:, :-1, 1:] + self.data[:-1, :-1, 1:])
260
+ elif self.name.lower() == 'e1':
261
+ return 0.5 * (self.data[1:, 1:, 1:] + self.data[:-1, 1:, 1:])
262
+ elif self.name.lower() == 'e2':
263
+ return 0.5 * (self.data[1:, 1:, 1:] + self.data[1:, :-1, 1:])
264
+ elif self.name.lower() == 'e3':
265
+ return 0.5 * (self.data[1:, 1:, 1:] + self.data[1:, 1:, :-1])
266
+ else:
267
+ raise TypeError(f'This method expects magnetic or electric field grid data but received \'{self.name}\' instead')
268
+
269
+ def yeeToCellCorner(self, boundary=None):
270
+ ''''
271
+ Converts EM fields from a staggered Yee mesh to a grid with field values centered on the corner of the cell.'
272
+ Can be used for 1D, 2D and 3D simulations.'
273
+ Creates a new attribute `data_centered` with the centered data.'
274
+ '''
275
+
276
+ cases = {'b1', 'b2', 'b3', 'e1', 'e2', 'e3'}
277
+ if self.name not in cases:
278
+ raise TypeError(f'This method expects magnetic or electric field grid data but received \'{self.name}\' instead')
279
+
280
+ if self.dim == 1:
281
+ self.data_centered = self._yeeToCellCorner1d(boundary)
282
+ return self.data_centered
283
+ elif self.dim == 2:
284
+ self.data_centered = self._yeeToCellCorner2d(boundary)
285
+ return self.data_centered
286
+ elif self.dim == 3:
287
+ self.data_centered = self._yeeToCellCorner3d(boundary)
288
+ return self.data_centered
289
+ else:
290
+ raise ValueError(f'Dimension {self.dim} is not supported')
291
+
292
+ def FFT(self, axis=(0, )):
293
+ '''
294
+ Computes the Fast Fourier Transform of the data along the specified axis and shifts the zero frequency to the center.
295
+ Transforms the data to the frequency domain. A(x, y, z) -> A(kx, ky, kz)
296
+ '''
297
+ datafft = np.fft.fftn(self.data, axes=axis)
298
+ self._FFTdata = np.fft.fftshift(datafft, axes=axis)
299
+
300
+ # Getters
301
+ @property
302
+ def grid(self):
303
+ return self._grid
304
+ @property
305
+ def nx(self):
306
+ return self._nx
307
+ @property
308
+ def dx(self):
309
+ return self._dx
310
+ @property
311
+ def x(self):
312
+ return self._x
313
+ @property
314
+ def axis(self):
315
+ return self._axis
316
+ @property
317
+ def data(self):
318
+ return self._data
319
+ @property
320
+ def units(self):
321
+ return self._units
322
+ @property
323
+ def label(self):
324
+ return self._label
325
+ @property
326
+ def FFTdata(self):
327
+ if self._FFTdata is None:
328
+ raise ValueError('The FFT of the data has not been computed yet. Compute it using the FFT method.')
329
+ return self._FFTdata
330
+ # Setters
331
+ @data.setter
332
+ def data(self, data):
333
+ self._data = data
334
+
335
+ def __str__(self):
336
+ # write me a template to print with the name, label, units, time, iter, grid, nx, dx, axis, dt, dim in a logical way
337
+ return rf'{self.name}' + f'\n' + rf'Time: [{self.time[0]} {self.time[1]}], dt = {self.dt}' + f'\n' + f'Iteration: {self.iter}' + f'\n' + f'Grid: {self.grid}' + f'\n' + f'dx: {self.dx}' + f'\n' + f'Dimensions: {self.dim}D'
338
+
339
+
340
+ def __array__(self):
341
+ return np.asarray(self.data)
342
+
343
+
344
+ class OsirisRawFile(OsirisData):
345
+ '''
346
+ Class to read the raw data from an OSIRIS HDF5 file.
347
+
348
+ Parameters
349
+ ----------
350
+ filename : str
351
+ Path to OSIRIS HDF5 track file (.h5 extension)
352
+
353
+ Attributes:
354
+ -----------
355
+ axis : dict[str, dict[str, str]]
356
+ Dictionary where each key is a dataset name, and each value is another dictionary containing:
357
+ - 'name' (str): Short name of the quantity (e.g., 'x1', 'ene')
358
+ - 'units' (str): Units (LaTeX formatted, e.g., 'c/\\omega_p', 'm_e c^2')
359
+ - 'long_name' (str): Descriptive name (LaTeX formatted, e.g., 'x_1', 'En2')
360
+ data : dict[str, np.ndarray]
361
+ Dataset values indexed by dataset name (quants).
362
+ dim : int
363
+ Number of spatial dimensions.
364
+ dt : float
365
+ Time step between iterations.
366
+ grid : np.ndarray
367
+ Grid boundaries as ((x1_min, x1_max), (x2_min, x2_max), ...)
368
+ iter : int
369
+ Iteration number corresponding to the data.
370
+ name : str
371
+ Name of the species.
372
+ time : list[float, str]
373
+ Simulation time and its units (e.g., [12.5, '1/\\omega_p']).
374
+ type : str
375
+ Type of data (e.g., 'particles' for raw files).
376
+ labels : list[str]
377
+ Field labels/names (LaTeX formatted, e.g., 'x_1')
378
+ quants : list[str]
379
+ field names of the data
380
+ units : list[str]
381
+ Units of each field of the data (LaTeX formatted, e.g., 'c/\\omega_p')
382
+
383
+ Example
384
+ -------
385
+ >>> import osiris_utils as ou
386
+ >>> raw = ou.raw = ou.OsirisRawFile("path/to/raw/file.h5")
387
+ >>> print(raw.data.keys())
388
+ >>> # Access x1 position of first 10 particles
389
+ >>> print(raw.data[\"x1\"][0:10])
390
+ >>> # Write beautiful labels and units
391
+ >>> print("${} = $".format(raw.labels[\"x1\"]) + "$[{}]$".format(track.units[\"x1\"]))
392
+ '''
393
+
394
+ def __init__(self, filename):
395
+ super().__init__(filename)
396
+
397
+ self._grid = np.array([self._file['SIMULATION'].attrs['XMIN'], self._file['SIMULATION'].attrs['XMAX']]).T
398
+
399
+ self._quants = [byte.decode('utf-8') for byte in self._file.attrs['QUANTS'][:]]
400
+ units_list = [byte.decode('utf-8') for byte in self._file.attrs['UNITS'][:]]
401
+ labels_list = [byte.decode('utf-8') for byte in self._file.attrs['LABELS'][:]]
402
+ self._units = dict(zip(self._quants, units_list))
403
+ self._labels = dict(zip(self._quants, labels_list))
404
+
405
+ self._data = {}
406
+ self._axis = {}
407
+ for key in self._file.keys():
408
+ if key == 'SIMULATION': continue
409
+
410
+ self.data[key] = np.array(self._file[key][()])
411
+
412
+ idx = np.where(self._file.attrs['QUANTS'] == str(key).encode('utf-8'))
413
+ axis_data = {
414
+ 'name': self._file.attrs['QUANTS'][idx][0].decode('utf-8'),
415
+ 'units': self._file.attrs['UNITS'][idx][0].decode('utf-8'),
416
+ 'long_name': self._file.attrs['LABELS'][idx][0].decode('utf-8'),
417
+ }
418
+ self._axis[key] = axis_data
419
+
420
+ def raw_to_file_tags(self, filename, type: Literal["all", "random"] = "all", n_tags=10, mask=None):
421
+ """
422
+ Function to write a file_tags file from raw data.
423
+ this file is used to choose particles for the OSIRIS track diagnostic.
424
+
425
+ Parameters
426
+ ----------
427
+ filename : str
428
+ Path to the output file where tags will be stored.
429
+ type : {'all', 'random'}, optional
430
+ Selection mode for tags:
431
+ - 'all': Includes all available tags.
432
+ - 'random': Randomly selects `n_tags` tags.
433
+ n_tags : int, optional
434
+ Number of tags to randomly select when `type` is 'random'. Default is 10.
435
+ mask : np.ndarray, optional
436
+ Boolean mask array applied to filter valid tags before selection.
437
+
438
+ Returns
439
+ ------
440
+ A file_tags file with path \"filename\" to be used for the OSIRIS track diagnostic.
441
+
442
+ Notes
443
+ -----
444
+ The first element of the tag of a particle that is already being tracked is negative,
445
+ so we apply the absolute function when generating the file
446
+
447
+ """
448
+
449
+ if mask is not None:
450
+ # Apply mask to select certain tags
451
+ if not isinstance(mask, np.ndarray) or mask.dtype != bool or mask.shape[0] != self.data["tag"].shape[0]:
452
+ raise ValueError("Mask must be a boolean NumPy array of the same length as 'tag'.")
453
+ filtered_indices = np.where(mask)[0]
454
+ filtered_tags = self.data["tag"][filtered_indices]
455
+ else:
456
+ filtered_tags = self.data["tag"]
457
+
458
+ if type == "all":
459
+ tags = filtered_tags
460
+ elif type == "random":
461
+ if len(filtered_tags) < n_tags:
462
+ raise ValueError("Not enough tags to sample from.")
463
+ random_indices = np.random.choice(len(filtered_tags), size=n_tags, replace=False)
464
+ tags = filtered_tags[random_indices]
465
+ else:
466
+ raise TypeError("Invalid type", type)
467
+
468
+ create_file_tags(filename, tags)
469
+ print("Tag_file created: ", filename)
470
+
471
+ # Getters
472
+ @property
473
+ def grid(self):
474
+ return self._grid
475
+ @property
476
+ def data(self):
477
+ return self._data
478
+ @property
479
+ def units(self):
480
+ return self._units
481
+ @property
482
+ def labels(self):
483
+ return self._labels
484
+ @property
485
+ def quants(self):
486
+ return self._quants
487
+ @property
488
+ def axis(self):
489
+ return self._axis
490
+
491
+ class OsirisHIST(OsirisData):
492
+ ''''
493
+ Class to read the data from an OSIRIS HIST file.'
494
+
495
+ Input
496
+ -----
497
+ filename: the path to the HIST file
498
+
499
+ Attributes
500
+ ----------
501
+ filename: the path to the file
502
+ str
503
+ df: the data in a pandas DataFrame
504
+ pandas.DataFrame
505
+ '''
506
+ def __init__(self, filename):
507
+ super().__init__(filename)
508
+
509
+ @property
510
+ def df(self):
511
+ return self._df
512
+
513
+ class OsirisTrackFile(OsirisData):
514
+ """
515
+ Handles structured track data from OSIRIS HDF5 simulations.
516
+
517
+ Parameters
518
+ ----------
519
+ filename : str
520
+ Path to OSIRIS HDF5 track file (.h5 extension)
521
+
522
+ Attributes
523
+ ----------
524
+ data: numpy.ndarray of shape (num_particles, num_time_iter),
525
+ dtype = [(field_name, float) for field_name in field_names]
526
+ A structured numpy array with the track data
527
+ Accessed as data[particles, time_iters][quant]
528
+ grid : np.ndarray
529
+ Grid boundaries as ((x1_min, x1_max), (x2_min, x2_max), ...)
530
+ labels : list[str]
531
+ Field labels/names (LaTeX formatted, e.g., 'x_1')
532
+ num_particles : int
533
+ Number of particlest tracked, they are accessed from 0 to num_particles-1
534
+ num_time_iters : int
535
+ Number of time iteratis, they are accessed from 0 to num_time_iters-1
536
+ quants : list[str]
537
+ field names of the data
538
+ units : list[str]
539
+ Units of each field of the data (LaTeX formatted, e.g., 'c/\\omega_p')
540
+
541
+ Example
542
+ -------
543
+ >>> import osiris_utils as ou
544
+ >>> track = ou.OsirisTrackFile(path/to/track_file.h5)
545
+ >>> print(track.data[0:10, :]["x1"]) # Access x1 position of first 10 particles over all time steps
546
+ """
547
+
548
+ def __init__(self, filename):
549
+ super().__init__(filename)
550
+
551
+ self._grid = np.array([self._file['SIMULATION'].attrs['XMIN'], self._file['SIMULATION'].attrs['XMAX']]).T
552
+
553
+ self._quants = [byte.decode('utf-8') for byte in self._file.attrs['QUANTS'][1:]]
554
+ units_list = [byte.decode('utf-8') for byte in self._file.attrs['UNITS'][1:]]
555
+ labels_list = [byte.decode('utf-8') for byte in self._file.attrs['LABELS'][1:]]
556
+ self._units = dict(zip(self._quants, units_list))
557
+ self._labels = dict(zip(self._quants, labels_list))
558
+
559
+ self._num_particles = self._file.attrs['NTRACKS'][0]
560
+
561
+ unordered_data = self._file['data'][:]
562
+ itermap = self._file['itermap'][:]
563
+
564
+ idxs = get_track_indexes(itermap, self._num_particles)
565
+ self._data = reorder_track_data(unordered_data, idxs, self._quants)
566
+ self._time = self._data[0][:]["t"]
567
+ self._num_time_iters = np.shape(self._time.shape)
568
+ self._close_file()
569
+
570
+ def _load_basic_attributes(self, f: h5py.File) -> None:
571
+ '''Load common attributes from HDF5 file'''
572
+ self._dt = float(f['SIMULATION'].attrs['DT'][0])
573
+ self._dim = int(f['SIMULATION'].attrs['NDIMS'][0])
574
+ self._time = None
575
+ self._iter = None
576
+ self._name = f.attrs['NAME'][0].decode('utf-8')
577
+ self._type = f.attrs['TYPE'][0].decode('utf-8')
578
+
579
+
580
+ # Getters
581
+ @property
582
+ def grid(self):
583
+ return self._grid
584
+ @property
585
+ def data(self):
586
+ return self._data
587
+ @property
588
+ def units(self):
589
+ return self._units
590
+ @property
591
+ def labels(self):
592
+ return self._labels
593
+ @property
594
+ def quants(self):
595
+ return self._quants
596
+ @property
597
+ def num_particles(self):
598
+ return self._num_particles
599
+ @property
600
+ def num_time_iters(self):
601
+ return self._num_time_iters
602
+
603
+
604
+ # Setters
605
+ @data.setter
606
+ def data(self, data):
607
+ self._data = data
608
+
609
+ def __str__(self):
610
+ # write me a template to print with the name, label, units, iter, grid, nx, dx, axis, dt, dim in a logical way
611
+ return rf'{self.name}' + f'\n' + f'Iteration: {self.iter}' + f'\n' + f'Grid: {self.grid}' + f'\n' + f'dx: {self.dx}' + f'\n' + f'Dimensions: {self.dim}D'
612
+
613
+ def __array__(self):
614
+ return np.asarray(self.data)
615
+
616
+
617
+
618
+ def reorder_track_data(unordered_data, indexes, field_names):
619
+ '''
620
+ Reorder data from HDF5 track file such data it can be accessed more intuitively
621
+
622
+ Parameters
623
+ ----------
624
+ unordered_data: np.array
625
+ The data from a HDF5 osiris track file
626
+
627
+ indexes : list[list[int]]
628
+ Output of get_track_indexes(), list with the indexes associated with each particle
629
+
630
+ field_names: list[str]
631
+ Names for the quantities on the output file.
632
+ Recommended: field_names = [byte.decode('utf-8') for byte in file.attrs['QUANTS'][1:]]
633
+
634
+ Returns
635
+ -------
636
+ data_sorted: numpy.ndarray of shape (num_particles, num_time_iter),
637
+ dtype = [(field_name, float) for field_name in field_names]
638
+ A structured numpy array where data is reordered according to indexes.
639
+
640
+ '''
641
+ # Initialize the sorted data structure
642
+ num_particles = len(indexes)
643
+ num_time_iter = len(indexes[0])
644
+ data_sorted = np.empty((num_particles, num_time_iter), dtype=[(name, float) for name in field_names])
645
+
646
+ # Fill the sorted data based on the indexes
647
+ for particle in range(num_particles):
648
+ for time_iter in range(num_time_iter):
649
+ index = indexes[particle][time_iter]
650
+ if len(unordered_data[index]) != len(field_names):
651
+ raise ValueError(f"Data at index {index} has {len(unordered_data[index])} elements, "
652
+ f"but {len(field_names)} are expected.")
653
+ data_sorted[particle, time_iter] = tuple(unordered_data[index])
654
+
655
+ return (data_sorted)
656
+
657
+
658
+ def get_track_indexes(itermap, num_particles):
659
+ '''
660
+ Returns the indexes for each particle to read track data directly from the hd5 file
661
+ (before it is ordered)
662
+
663
+ Parameters
664
+ ----------
665
+ itermap: np.array
666
+ Itermap from a HDF5 osiris track file
667
+ num_particles: int
668
+ num of particles tracked, recomended file.attrs['NTRACKS'][0]
669
+
670
+ Returns
671
+ -------
672
+ indexes : list[list[int]]
673
+ Returns a list with the indexes associated with each particle
674
+ shape(num_particles, num_time_iters)
675
+ '''
676
+
677
+ itermapshape = itermap.shape
678
+ for i in range(itermapshape[0]):
679
+ part_number,npoints,nstart = itermap[i,:]
680
+ track_indices = np.zeros(num_particles)
681
+
682
+ data_index = 0
683
+ indexes = [[] for _ in range(num_particles)]
684
+ for i in range(itermapshape[0]):
685
+ part_number,npoints,nstart = itermap[i,:]
686
+
687
+ indexes[part_number-1].extend(list(range(data_index, data_index + npoints)))
688
+
689
+ data_index += npoints
690
+ track_indices[part_number-1] += npoints
691
+
692
+ return indexes