osiris-utils 1.1.4__py3-none-any.whl → 1.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- osiris_utils/__init__.py +8 -2
- osiris_utils/data/data.py +316 -42
- osiris_utils/data/diagnostic.py +691 -233
- osiris_utils/data/simulation.py +30 -17
- osiris_utils/postprocessing/derivative.py +29 -49
- osiris_utils/postprocessing/fft.py +8 -14
- osiris_utils/postprocessing/field_centering.py +168 -0
- osiris_utils/postprocessing/heatflux_correction.py +193 -0
- osiris_utils/postprocessing/mft.py +14 -28
- osiris_utils/postprocessing/pressure_correction.py +171 -0
- osiris_utils/utils.py +140 -1
- {osiris_utils-1.1.4.dist-info → osiris_utils-1.1.6.dist-info}/METADATA +1 -1
- osiris_utils-1.1.6.dist-info/RECORD +25 -0
- osiris_utils-1.1.4.dist-info/RECORD +0 -22
- {osiris_utils-1.1.4.dist-info → osiris_utils-1.1.6.dist-info}/WHEEL +0 -0
- {osiris_utils-1.1.4.dist-info → osiris_utils-1.1.6.dist-info}/licenses/LICENSE.txt +0 -0
- {osiris_utils-1.1.4.dist-info → osiris_utils-1.1.6.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
from ..utils import *
|
|
2
|
+
from ..data.simulation import Simulation
|
|
3
|
+
from .postprocess import PostProcess
|
|
4
|
+
from ..data.diagnostic import Diagnostic
|
|
5
|
+
|
|
6
|
+
from .pressure_correction import *
|
|
7
|
+
|
|
8
|
+
OSIRIS_H = ["q1", "q2", "q3"]
|
|
9
|
+
|
|
10
|
+
class HeatfluxCorrection_Simulation(PostProcess):
|
|
11
|
+
def __init__(self, simulation):
|
|
12
|
+
super().__init__(f"HeatfluxCorrection Simulation")
|
|
13
|
+
"""
|
|
14
|
+
Class to correct pressure tensor components by subtracting Reynolds stress.
|
|
15
|
+
|
|
16
|
+
Parameters
|
|
17
|
+
----------
|
|
18
|
+
sim : Simulation
|
|
19
|
+
The simulation object.
|
|
20
|
+
heatflux : str
|
|
21
|
+
The heatflux component to center.
|
|
22
|
+
"""
|
|
23
|
+
if not isinstance(simulation, Simulation):
|
|
24
|
+
raise ValueError("Simulation must be a Simulation object.")
|
|
25
|
+
self._simulation = simulation
|
|
26
|
+
self._heatflux_corrected = {}
|
|
27
|
+
self._species_handler = {}
|
|
28
|
+
|
|
29
|
+
def __getitem__(self, key):
|
|
30
|
+
if key in self._simulation._species:
|
|
31
|
+
if key not in self._species_handler:
|
|
32
|
+
self._species_handler[key] = HeatfluxCorrection_Species_Handler(self._simulation[key], self._simulation)
|
|
33
|
+
return self._species_handler[key]
|
|
34
|
+
if key not in OSIRIS_H:
|
|
35
|
+
raise ValueError(f"Invalid heatflux component {key}. Supported: {OSIRIS_H}.")
|
|
36
|
+
if key not in self._heatflux_corrected:
|
|
37
|
+
print("Weird that it got here - heatflux is always species dependent on OSIRIS")
|
|
38
|
+
self._heatflux_corrected[key] = HeatfluxCorrection_Diagnostic(self._simulation[key], self._simulation)
|
|
39
|
+
return self._heatflux_corrected[key]
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def delete_all(self):
|
|
43
|
+
self._heatflux_corrected = {}
|
|
44
|
+
|
|
45
|
+
def delete(self, key):
|
|
46
|
+
if key in self._heatflux_corrected:
|
|
47
|
+
del self._heatflux_corrected[key]
|
|
48
|
+
else:
|
|
49
|
+
print(f"Heatflux {key} not found in simulation")
|
|
50
|
+
|
|
51
|
+
def process(self, diagnostic):
|
|
52
|
+
"""Apply heatflux correction to a diagnostic"""
|
|
53
|
+
return HeatfluxCorrection_Diagnostic(diagnostic, self._simulation)
|
|
54
|
+
|
|
55
|
+
class HeatfluxCorrection_Diagnostic(Diagnostic):
|
|
56
|
+
def __init__(self, diagnostic, vfl_i, Pjj_list, vfl_j_list, Pji_list):
|
|
57
|
+
|
|
58
|
+
"""
|
|
59
|
+
Class to correct the pressure in the simulation.
|
|
60
|
+
|
|
61
|
+
Parameters
|
|
62
|
+
----------
|
|
63
|
+
diagnostic : Diagnostic
|
|
64
|
+
The diagnostic object.
|
|
65
|
+
"""
|
|
66
|
+
if hasattr(diagnostic, '_species'):
|
|
67
|
+
super().__init__(simulation_folder=diagnostic._simulation_folder if hasattr(diagnostic, '_simulation_folder') else None,
|
|
68
|
+
species=diagnostic._species)
|
|
69
|
+
else:
|
|
70
|
+
super().__init__(None)
|
|
71
|
+
|
|
72
|
+
self.postprocess_name = "HFL_CORR"
|
|
73
|
+
|
|
74
|
+
if diagnostic._name not in OSIRIS_H:
|
|
75
|
+
raise ValueError(f"Invalid heatflux component {diagnostic._name}. Supported: {OSIRIS_H}")
|
|
76
|
+
|
|
77
|
+
self._diag = diagnostic
|
|
78
|
+
|
|
79
|
+
# The density and velocities are now passed as arguments (so it can doesn't depend on the simulation)
|
|
80
|
+
self._vfl_i = vfl_i
|
|
81
|
+
self._Pjj_list = Pjj_list
|
|
82
|
+
self._vfl_j_list = vfl_j_list
|
|
83
|
+
self._Pji_list = Pji_list
|
|
84
|
+
|
|
85
|
+
for attr in ['_dt', '_dx', '_ndump', '_axis', '_nx', '_x', '_grid', '_dim', '_maxiter', '_type']:
|
|
86
|
+
if hasattr(diagnostic, attr):
|
|
87
|
+
setattr(self, attr, getattr(diagnostic, attr))
|
|
88
|
+
|
|
89
|
+
self._original_name = diagnostic._name
|
|
90
|
+
self._name = diagnostic._name + "_corrected"
|
|
91
|
+
|
|
92
|
+
self._data = None
|
|
93
|
+
self._all_loaded = False
|
|
94
|
+
|
|
95
|
+
def load_all(self):
|
|
96
|
+
if self._data is not None:
|
|
97
|
+
return self._data
|
|
98
|
+
|
|
99
|
+
if not hasattr(self._diag, '_data') or self._diag._data is None:
|
|
100
|
+
self._diag.load_all()
|
|
101
|
+
|
|
102
|
+
print(f"Loading {self._species._name} {self._original_name} diagnostic")
|
|
103
|
+
|
|
104
|
+
self._vfl_i.load_all()
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
for vfl_j in self._vfl_j_list:
|
|
108
|
+
vfl_j.load_all()
|
|
109
|
+
for Pji in self._Pji_list:
|
|
110
|
+
Pji.load_all()
|
|
111
|
+
for Pjj in self._Pjj_list:
|
|
112
|
+
Pjj.load_all()
|
|
113
|
+
|
|
114
|
+
q = self._diag.data
|
|
115
|
+
vfl_i = self._vfl_i.data
|
|
116
|
+
|
|
117
|
+
trace_P = sum(Pjj.data for Pjj in self._Pjj_list)
|
|
118
|
+
|
|
119
|
+
# Sum over j: vfl_j * Pji
|
|
120
|
+
vfl_dot_Pji = sum(vfl_j.data * Pji.data for vfl_j, Pji in zip(self._vfl_j_list, self._Pji_list))
|
|
121
|
+
|
|
122
|
+
self._data = 2 * q - 0.5 * vfl_i * trace_P - vfl_dot_Pji
|
|
123
|
+
self._all_loaded = True
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
return self._data
|
|
127
|
+
|
|
128
|
+
def __getitem__(self, index):
|
|
129
|
+
"""Get data at a specific index"""
|
|
130
|
+
if self._all_loaded and self._data is not None:
|
|
131
|
+
return self._data[index]
|
|
132
|
+
|
|
133
|
+
if isinstance(index, int):
|
|
134
|
+
return next(self._data_generator(index))
|
|
135
|
+
elif isinstance(index, slice):
|
|
136
|
+
start = 0 if index.start is None else index.start
|
|
137
|
+
step = 1 if index.step is None else index.step
|
|
138
|
+
stop = self._diag._maxiter if index.stop is None else index.stop
|
|
139
|
+
return np.array([next(self._data_generator(i)) for i in range(start, stop, step)])
|
|
140
|
+
else:
|
|
141
|
+
raise ValueError("Invalid index type. Use int or slice.")
|
|
142
|
+
|
|
143
|
+
def _data_generator(self, index):
|
|
144
|
+
q = self._diag[index]
|
|
145
|
+
vfl_i = self._vfl_i[index]
|
|
146
|
+
trace_P = sum(Pjj[index] for Pjj in self._Pjj_list)
|
|
147
|
+
vfl_dot_Pji = sum(vfl_j[index] * Pji[index] for vfl_j, Pji in zip(self._vfl_j_list, self._Pji_list))
|
|
148
|
+
yield 2 * q - 0.5 * vfl_i * trace_P - vfl_dot_Pji
|
|
149
|
+
|
|
150
|
+
class HeatfluxCorrection_Species_Handler:
|
|
151
|
+
"""
|
|
152
|
+
Class to handle heatflux correction for a species.
|
|
153
|
+
Acts as a wrapper for the HeatfluxCorrection_Diagnostic class.
|
|
154
|
+
|
|
155
|
+
Not intended to be used directly, but through the HeatfluxCorrection_Simulation class.
|
|
156
|
+
|
|
157
|
+
Parameters
|
|
158
|
+
----------
|
|
159
|
+
species_handler : Species_Handler
|
|
160
|
+
The species handler object.
|
|
161
|
+
simulation : Simulation
|
|
162
|
+
The simulation object.
|
|
163
|
+
"""
|
|
164
|
+
def __init__(self, species_handler, simulation):
|
|
165
|
+
self._species_handler = species_handler
|
|
166
|
+
self._simulation = simulation
|
|
167
|
+
self._heatflux_corrected = {}
|
|
168
|
+
|
|
169
|
+
def __getitem__(self, key):
|
|
170
|
+
if key not in self._heatflux_corrected:
|
|
171
|
+
diag = self._species_handler[key]
|
|
172
|
+
|
|
173
|
+
# Velocities alwayes depend on the species so this can be done here
|
|
174
|
+
|
|
175
|
+
i = int(key[-1]) # Get i from 'q1', 'q2', etc.
|
|
176
|
+
|
|
177
|
+
vfl_i = self._species_handler[f"vfl{i}"]
|
|
178
|
+
|
|
179
|
+
# Load trace(P): sum over Pjj
|
|
180
|
+
Pjj_list = [self._species_handler[f"P{j}{j}"] for j in range(1, diag._dim + 1)]
|
|
181
|
+
|
|
182
|
+
# Compute quantities for vfl_j * P_{ji}
|
|
183
|
+
vfl_j_list = [self._species_handler[f"vfl{j}"] for j in range(1, diag._dim + 1)]
|
|
184
|
+
Pji_list = [PressureCorrection_Simulation(self._simulation)[diag._species._name][f"P{j}{i}"] for j in range(1, diag._dim + 1)]
|
|
185
|
+
|
|
186
|
+
self._heatflux_corrected[key] = HeatfluxCorrection_Diagnostic(
|
|
187
|
+
diag,
|
|
188
|
+
vfl_i,
|
|
189
|
+
Pjj_list,
|
|
190
|
+
vfl_j_list,
|
|
191
|
+
Pji_list
|
|
192
|
+
)
|
|
193
|
+
return self._heatflux_corrected[key]
|
|
@@ -15,11 +15,6 @@ class MeanFieldTheory_Simulation(PostProcess):
|
|
|
15
15
|
mft_axis : int
|
|
16
16
|
The axis to compute the mean field theory.
|
|
17
17
|
|
|
18
|
-
Example
|
|
19
|
-
-------
|
|
20
|
-
>>> sim = Simulation('electrons', 'path/to/simulation')
|
|
21
|
-
>>> mft = MeanFieldTheory(sim, 1)
|
|
22
|
-
>>> mft_e1 = mft['e1']
|
|
23
18
|
"""
|
|
24
19
|
|
|
25
20
|
def __init__(self, simulation, mft_axis=None):
|
|
@@ -65,19 +60,14 @@ class MFT_Diagnostic(Diagnostic):
|
|
|
65
60
|
mft_axis : int
|
|
66
61
|
The axis to compute mean field theory along.
|
|
67
62
|
|
|
68
|
-
|
|
69
|
-
-------
|
|
70
|
-
>>> sim = Simulation('electrons', 'path/to/simulation')
|
|
71
|
-
>>> diag = sim['e1']
|
|
72
|
-
>>> mft = MFT_Diagnostic(diag, 1)
|
|
73
|
-
>>> avg = mft['avg']
|
|
74
|
-
>>> delta = mft['delta']
|
|
63
|
+
|
|
75
64
|
"""
|
|
76
65
|
|
|
77
66
|
def __init__(self, diagnostic, mft_axis):
|
|
78
67
|
# Initialize using parent's __init__ with the same species
|
|
79
68
|
if hasattr(diagnostic, '_species'):
|
|
80
|
-
super().__init__(
|
|
69
|
+
super().__init__(simulation_folder=diagnostic._simulation_folder if hasattr(diagnostic, '_simulation_folder') else None,
|
|
70
|
+
species=diagnostic._species)
|
|
81
71
|
else:
|
|
82
72
|
super().__init__(None)
|
|
83
73
|
|
|
@@ -91,7 +81,7 @@ class MFT_Diagnostic(Diagnostic):
|
|
|
91
81
|
self._components = {}
|
|
92
82
|
|
|
93
83
|
# Copy all relevant attributes from diagnostic
|
|
94
|
-
for attr in ['_dt', '_dx', '_ndump', '_axis', '_nx', '_x', '_grid', '_dim', '_maxiter']:
|
|
84
|
+
for attr in ['_dt', '_dx', '_ndump', '_axis', '_nx', '_x', '_grid', '_dim', '_maxiter', '_tunits', '_type']:
|
|
95
85
|
if hasattr(diagnostic, attr):
|
|
96
86
|
setattr(self, attr, getattr(diagnostic, attr))
|
|
97
87
|
|
|
@@ -152,23 +142,21 @@ class MFT_Diagnostic_Average(Diagnostic):
|
|
|
152
142
|
mft_axis : int
|
|
153
143
|
The axis to compute the mean field theory.
|
|
154
144
|
|
|
155
|
-
Example
|
|
156
|
-
-------
|
|
157
|
-
>>> sim = Simulation('electrons', 'path/to/simulation')
|
|
158
|
-
>>> diag = sim['e1']
|
|
159
|
-
>>> avg = MFT_Diagnostic_Average(diag, 1)
|
|
160
145
|
"""
|
|
161
146
|
|
|
162
147
|
def __init__(self, diagnostic, mft_axis):
|
|
163
148
|
# Initialize with the same species as the diagnostic
|
|
164
149
|
if hasattr(diagnostic, '_species'):
|
|
165
|
-
super().__init__(
|
|
150
|
+
super().__init__(simulation_folder=diagnostic._simulation_folder if hasattr(diagnostic, '_simulation_folder') else None,
|
|
151
|
+
species=diagnostic._species)
|
|
166
152
|
else:
|
|
167
153
|
super().__init__(None)
|
|
168
154
|
|
|
169
155
|
if mft_axis is None:
|
|
170
156
|
raise ValueError("Mean field theory axis must be specified.")
|
|
171
157
|
|
|
158
|
+
self.postprocess_name = f"MFT_AVG"
|
|
159
|
+
|
|
172
160
|
self._name = f"MFT_avg[{diagnostic._name}, {mft_axis}]"
|
|
173
161
|
self._diag = diagnostic
|
|
174
162
|
self._mft_axis = mft_axis
|
|
@@ -176,7 +164,7 @@ class MFT_Diagnostic_Average(Diagnostic):
|
|
|
176
164
|
self._all_loaded = False
|
|
177
165
|
|
|
178
166
|
# Copy all relevant attributes from diagnostic
|
|
179
|
-
for attr in ['_dt', '_dx', '_ndump', '_axis', '_nx', '_x', '_grid', '_dim', '_maxiter']:
|
|
167
|
+
for attr in ['_dt', '_dx', '_ndump', '_axis', '_nx', '_x', '_grid', '_dim', '_maxiter', '_type']:
|
|
180
168
|
if hasattr(diagnostic, attr):
|
|
181
169
|
setattr(self, attr, getattr(diagnostic, attr))
|
|
182
170
|
|
|
@@ -237,23 +225,21 @@ class MFT_Diagnostic_Fluctuations(Diagnostic):
|
|
|
237
225
|
mft_axis : int
|
|
238
226
|
The axis to compute the mean field theory.
|
|
239
227
|
|
|
240
|
-
Example
|
|
241
|
-
-------
|
|
242
|
-
>>> sim = Simulation('electrons', 'path/to/simulation')
|
|
243
|
-
>>> diag = sim['e1']
|
|
244
|
-
>>> delta = MFT_Diagnostic_Fluctuations(diag, 1)
|
|
245
228
|
"""
|
|
246
229
|
|
|
247
230
|
def __init__(self, diagnostic, mft_axis):
|
|
248
231
|
# Initialize with the same species as the diagnostic
|
|
249
232
|
if hasattr(diagnostic, '_species'):
|
|
250
|
-
super().__init__(
|
|
233
|
+
super().__init__(simulation_folder=diagnostic._simulation_folder if hasattr(diagnostic, '_simulation_folder') else None,
|
|
234
|
+
species=diagnostic._species)
|
|
251
235
|
else:
|
|
252
236
|
super().__init__(None)
|
|
253
237
|
|
|
254
238
|
if mft_axis is None:
|
|
255
239
|
raise ValueError("Mean field theory axis must be specified.")
|
|
256
240
|
|
|
241
|
+
self.postprocess_name = f"MFT_FLT"
|
|
242
|
+
|
|
257
243
|
self._name = f"MFT_delta[{diagnostic._name}, {mft_axis}]"
|
|
258
244
|
self._diag = diagnostic
|
|
259
245
|
self._mft_axis = mft_axis
|
|
@@ -261,7 +247,7 @@ class MFT_Diagnostic_Fluctuations(Diagnostic):
|
|
|
261
247
|
self._all_loaded = False
|
|
262
248
|
|
|
263
249
|
# Copy all relevant attributes from diagnostic
|
|
264
|
-
for attr in ['_dt', '_dx', '_ndump', '_axis', '_nx', '_x', '_grid', '_dim', '_maxiter']:
|
|
250
|
+
for attr in ['_dt', '_dx', '_ndump', '_axis', '_nx', '_x', '_grid', '_dim', '_maxiter', '_type']:
|
|
265
251
|
if hasattr(diagnostic, attr):
|
|
266
252
|
setattr(self, attr, getattr(diagnostic, attr))
|
|
267
253
|
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
from ..utils import *
|
|
2
|
+
from ..data.simulation import Simulation
|
|
3
|
+
from .postprocess import PostProcess
|
|
4
|
+
from ..data.diagnostic import Diagnostic
|
|
5
|
+
|
|
6
|
+
OSIRIS_P = ["P11", "P12", "P13", "P21", "P22", "P23", "P31", "P32", "P33"]
|
|
7
|
+
|
|
8
|
+
class PressureCorrection_Simulation(PostProcess):
|
|
9
|
+
def __init__(self, simulation):
|
|
10
|
+
super().__init__(f"PressureCorrection Simulation")
|
|
11
|
+
"""
|
|
12
|
+
Class to correct pressure tensor components by subtracting Reynolds stress.
|
|
13
|
+
|
|
14
|
+
Parameters
|
|
15
|
+
----------
|
|
16
|
+
sim : Simulation
|
|
17
|
+
The simulation object.
|
|
18
|
+
pressure : str
|
|
19
|
+
The pressure component to center.
|
|
20
|
+
"""
|
|
21
|
+
if not isinstance(simulation, Simulation):
|
|
22
|
+
raise ValueError("Simulation must be a Simulation object.")
|
|
23
|
+
self._simulation = simulation
|
|
24
|
+
self._pressure_corrected = {}
|
|
25
|
+
self._species_handler = {}
|
|
26
|
+
|
|
27
|
+
def __getitem__(self, key):
|
|
28
|
+
if key in self._simulation._species:
|
|
29
|
+
if key not in self._species_handler:
|
|
30
|
+
self._species_handler[key] = PressureCorrection_Species_Handler(self._simulation[key])
|
|
31
|
+
return self._species_handler[key]
|
|
32
|
+
if key not in OSIRIS_P:
|
|
33
|
+
raise ValueError(f"Invalid pressure component {key}. Supported: {OSIRIS_P}.")
|
|
34
|
+
if key not in self._pressure_corrected:
|
|
35
|
+
print("Weird that it got here - pressure is always species dependent on OSIRIS")
|
|
36
|
+
self._pressure_corrected[key] = PressureCorrection_Diagnostic(self._simulation[key], self._simulation)
|
|
37
|
+
return self._pressure_corrected[key]
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def delete_all(self):
|
|
41
|
+
self._pressure_corrected = {}
|
|
42
|
+
|
|
43
|
+
def delete(self, key):
|
|
44
|
+
if key in self._pressure_corrected:
|
|
45
|
+
del self._pressure_corrected[key]
|
|
46
|
+
else:
|
|
47
|
+
print(f"Pressure {key} not found in simulation")
|
|
48
|
+
|
|
49
|
+
def process(self, diagnostic):
|
|
50
|
+
"""Apply pressure correction to a diagnostic"""
|
|
51
|
+
return PressureCorrection_Diagnostic(diagnostic, self._simulation)
|
|
52
|
+
|
|
53
|
+
class PressureCorrection_Diagnostic(Diagnostic):
|
|
54
|
+
def __init__(self, diagnostic, n, ufl_j, vfl_k):
|
|
55
|
+
|
|
56
|
+
"""
|
|
57
|
+
Class to correct the pressure in the simulation.
|
|
58
|
+
|
|
59
|
+
Parameters
|
|
60
|
+
----------
|
|
61
|
+
diagnostic : Diagnostic
|
|
62
|
+
The diagnostic object.
|
|
63
|
+
"""
|
|
64
|
+
if hasattr(diagnostic, '_species'):
|
|
65
|
+
super().__init__(simulation_folder=diagnostic._simulation_folder if hasattr(diagnostic, '_simulation_folder') else None,
|
|
66
|
+
species=diagnostic._species)
|
|
67
|
+
else:
|
|
68
|
+
super().__init__(None)
|
|
69
|
+
|
|
70
|
+
self.postprocess_name = "P_CORR"
|
|
71
|
+
|
|
72
|
+
if diagnostic._name not in OSIRIS_P:
|
|
73
|
+
raise ValueError(f"Invalid pressure component {diagnostic._name}. Supported: {OSIRIS_P}")
|
|
74
|
+
|
|
75
|
+
self._diag = diagnostic
|
|
76
|
+
|
|
77
|
+
# The density and velocities are now passed as arguments (so it can doesn't depend on the simulation)
|
|
78
|
+
self._n = n
|
|
79
|
+
self._ufl_j = ufl_j
|
|
80
|
+
self._vfl_k = vfl_k
|
|
81
|
+
|
|
82
|
+
for attr in ['_dt', '_dx', '_ndump', '_axis', '_nx', '_x', '_grid', '_dim', '_maxiter', '_type']:
|
|
83
|
+
if hasattr(diagnostic, attr):
|
|
84
|
+
setattr(self, attr, getattr(diagnostic, attr))
|
|
85
|
+
|
|
86
|
+
self._original_name = diagnostic._name
|
|
87
|
+
self._name = diagnostic._name + "_corrected"
|
|
88
|
+
|
|
89
|
+
self._data = None
|
|
90
|
+
self._all_loaded = False
|
|
91
|
+
|
|
92
|
+
def load_all(self):
|
|
93
|
+
if self._data is not None:
|
|
94
|
+
return self._data
|
|
95
|
+
|
|
96
|
+
if not hasattr(self._diag, '_data') or self._diag._data is None:
|
|
97
|
+
self._diag.load_all()
|
|
98
|
+
|
|
99
|
+
print(f"Loading {self._species._name} {self._original_name} diagnostic")
|
|
100
|
+
self._n.load_all()
|
|
101
|
+
self._ufl_j.load_all()
|
|
102
|
+
self._vfl_k.load_all()
|
|
103
|
+
|
|
104
|
+
# Then access the data
|
|
105
|
+
n = self._n.data
|
|
106
|
+
u = self._ufl_j.data
|
|
107
|
+
v = self._vfl_k.data
|
|
108
|
+
|
|
109
|
+
self._data = self._diag.data - n * v * u
|
|
110
|
+
self._all_loaded = True
|
|
111
|
+
|
|
112
|
+
# Unload the data to save memory
|
|
113
|
+
# self._n.unload()
|
|
114
|
+
# self._ufl_j.unload()
|
|
115
|
+
# self._vfl_k.unload()
|
|
116
|
+
|
|
117
|
+
return self._data
|
|
118
|
+
|
|
119
|
+
def __getitem__(self, index):
|
|
120
|
+
"""Get data at a specific index"""
|
|
121
|
+
if self._all_loaded and self._data is not None:
|
|
122
|
+
return self._data[index]
|
|
123
|
+
|
|
124
|
+
if isinstance(index, int):
|
|
125
|
+
return next(self._data_generator(index))
|
|
126
|
+
elif isinstance(index, slice):
|
|
127
|
+
start = 0 if index.start is None else index.start
|
|
128
|
+
step = 1 if index.step is None else index.step
|
|
129
|
+
stop = self._diag._maxiter if index.stop is None else index.stop
|
|
130
|
+
return np.array([next(self._data_generator(i)) for i in range(start, stop, step)])
|
|
131
|
+
else:
|
|
132
|
+
raise ValueError("Invalid index type. Use int or slice.")
|
|
133
|
+
|
|
134
|
+
def _data_generator(self, index):
|
|
135
|
+
yield self._diag[index] - self._n[index] * self._vfl_k[index] * self._ufl_j[index]
|
|
136
|
+
|
|
137
|
+
class PressureCorrection_Species_Handler:
|
|
138
|
+
"""
|
|
139
|
+
Class to handle pressure correction for a species.
|
|
140
|
+
Acts as a wrapper for the PressureCorrection_Diagnostic class.
|
|
141
|
+
|
|
142
|
+
Not intended to be used directly, but through the PressureCorrection_Simulation class.
|
|
143
|
+
|
|
144
|
+
Parameters
|
|
145
|
+
----------
|
|
146
|
+
species_handler : Species_Handler
|
|
147
|
+
The species handler object.
|
|
148
|
+
type : str
|
|
149
|
+
The type of derivative to compute. Options are: 't', 'x1', 'x2', 'x3', 'xx', 'xt' and 'tx'.
|
|
150
|
+
axis : int or tuple
|
|
151
|
+
The axis to compute the derivative. Only used for 'xx', 'xt' and 'tx' types.
|
|
152
|
+
"""
|
|
153
|
+
def __init__(self, species_handler):
|
|
154
|
+
self._species_handler = species_handler
|
|
155
|
+
self._pressure_corrected = {}
|
|
156
|
+
|
|
157
|
+
def __getitem__(self, key):
|
|
158
|
+
if key not in self._pressure_corrected:
|
|
159
|
+
diag = self._species_handler[key]
|
|
160
|
+
|
|
161
|
+
# Density and velocities alwayes depend on the species so this can be done here
|
|
162
|
+
|
|
163
|
+
n = self._species_handler["n"]
|
|
164
|
+
self._j, self._k = key[-2], key[-1]
|
|
165
|
+
try:
|
|
166
|
+
ufl = self._species_handler[f"ufl{self._j}"]
|
|
167
|
+
except:
|
|
168
|
+
ufl = self._species_handler[f"vfl{self._j}"]
|
|
169
|
+
vfl = self._species_handler[f"vfl{self._k}"]
|
|
170
|
+
self._pressure_corrected[key] = PressureCorrection_Diagnostic(diag, n, ufl, vfl)
|
|
171
|
+
return self._pressure_corrected[key]
|
osiris_utils/utils.py
CHANGED
|
@@ -4,6 +4,7 @@ import matplotlib.pyplot as plt
|
|
|
4
4
|
import matplotlib.animation as animation
|
|
5
5
|
import scipy
|
|
6
6
|
import pandas as pd
|
|
7
|
+
from datetime import datetime
|
|
7
8
|
|
|
8
9
|
def courant2D(dx, dy):
|
|
9
10
|
'''
|
|
@@ -141,4 +142,142 @@ def read_data(filename, option='numpy'):
|
|
|
141
142
|
Dim: 2D.
|
|
142
143
|
The data.
|
|
143
144
|
'''
|
|
144
|
-
return np.loadtxt(filename) if option == 'numpy' else pd.read_csv(filename).values
|
|
145
|
+
return np.loadtxt(filename) if option == 'numpy' else pd.read_csv(filename).values
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def convert_tracks(filename_in):
|
|
149
|
+
'''
|
|
150
|
+
Converts a new OSIRIS track file aka IDL-formatted aka tracks-2 to an older format that is more human-readable.
|
|
151
|
+
In the old format, each particle is stored in a separate folder, with datasets for each quantity.
|
|
152
|
+
The function reads data from the input file, processes it, and writes it to a new file with the suffix "-v2".
|
|
153
|
+
|
|
154
|
+
code from https://github.com/GoLP-IST/RaDi-x/blob/main/tools/convert_idl_tracks_py3.py
|
|
155
|
+
|
|
156
|
+
Parameters
|
|
157
|
+
----------
|
|
158
|
+
filename_in : str
|
|
159
|
+
The path to the trackfile.
|
|
160
|
+
|
|
161
|
+
Returns
|
|
162
|
+
-------
|
|
163
|
+
The output file will be in the same folder as the input file with the same name with \"v2\" added
|
|
164
|
+
|
|
165
|
+
'''
|
|
166
|
+
|
|
167
|
+
try:
|
|
168
|
+
file_in = h5py.File(filename_in, 'r')
|
|
169
|
+
except IOError:
|
|
170
|
+
print('cannot open ' + filename_in)
|
|
171
|
+
exit()
|
|
172
|
+
|
|
173
|
+
# read data from file
|
|
174
|
+
data = file_in['data'][:]
|
|
175
|
+
itermap = file_in['itermap'][:]
|
|
176
|
+
ntracks = file_in.attrs['NTRACKS'][0]
|
|
177
|
+
niter = file_in.attrs['NITER'][0]
|
|
178
|
+
quants = file_in.attrs['QUANTS'][:]
|
|
179
|
+
file_in_attr_keys = file_in.attrs.keys()
|
|
180
|
+
sim_attr_keys = file_in['SIMULATION'].attrs.keys()
|
|
181
|
+
nquants = len(quants)
|
|
182
|
+
|
|
183
|
+
# construct file out for new format
|
|
184
|
+
filename_out = filename_in[:-3] + '-v2' + filename_in[-3:]
|
|
185
|
+
file_out = h5py.File(filename_out,'w')
|
|
186
|
+
|
|
187
|
+
# copy attrs from file_in
|
|
188
|
+
for item in file_in_attr_keys:
|
|
189
|
+
file_out.attrs[item] = file_in.attrs[item]
|
|
190
|
+
for item in sim_attr_keys:
|
|
191
|
+
file_out.attrs[item] = file_in['SIMULATION'].attrs[item]
|
|
192
|
+
|
|
193
|
+
# first pass -- find total size of each track
|
|
194
|
+
#----------------------------------------#
|
|
195
|
+
sizes = np.zeros(ntracks)
|
|
196
|
+
|
|
197
|
+
itermapshape = itermap.shape
|
|
198
|
+
for i in range(itermapshape[0]):
|
|
199
|
+
part_number,npoints,nstart = itermap[i,:]
|
|
200
|
+
sizes[part_number-1] += npoints
|
|
201
|
+
|
|
202
|
+
# initialize ordered data buffer
|
|
203
|
+
#----------------------------------------#
|
|
204
|
+
ordered_data = []
|
|
205
|
+
for i in range(ntracks):
|
|
206
|
+
ordered_data.append(np.zeros((int(sizes[i]),nquants)))
|
|
207
|
+
#----------------------------------------#
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
|
|
211
|
+
# assign tracks to ordered data from file_in data
|
|
212
|
+
#----------------------------------------#
|
|
213
|
+
track_indices = np.zeros(ntracks)
|
|
214
|
+
data_index = 0
|
|
215
|
+
|
|
216
|
+
for i in range(itermapshape[0]):
|
|
217
|
+
part_number,npoints,nstart = itermap[i,:]
|
|
218
|
+
track_index = int(track_indices[part_number-1])
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
ordered_data[part_number-1][track_index : track_index + npoints,0] \
|
|
222
|
+
= nstart + np.arange(npoints) * niter
|
|
223
|
+
|
|
224
|
+
ordered_data[part_number-1][track_index : track_index + npoints,1:] \
|
|
225
|
+
= data[data_index:data_index + npoints ,:]
|
|
226
|
+
|
|
227
|
+
data_index += npoints
|
|
228
|
+
track_indices[part_number-1] += npoints
|
|
229
|
+
|
|
230
|
+
#----------------------------------------#
|
|
231
|
+
|
|
232
|
+
# write to file out
|
|
233
|
+
for i in range(ntracks):
|
|
234
|
+
group = file_out.create_group(str(i+1))
|
|
235
|
+
for j in range(nquants):
|
|
236
|
+
if(j==0):
|
|
237
|
+
group.create_dataset(quants[j], data=np.array(ordered_data[i][:, j], dtype=int))
|
|
238
|
+
else:
|
|
239
|
+
group.create_dataset(quants[j], data=ordered_data[i][:, j])
|
|
240
|
+
|
|
241
|
+
file_out.close()
|
|
242
|
+
file_in.close()
|
|
243
|
+
print("Track file converted to the old, more readable format: ", filename_out)
|
|
244
|
+
return filename_out
|
|
245
|
+
|
|
246
|
+
def create_file_tags(filename, tags_array):
|
|
247
|
+
'''
|
|
248
|
+
Function to write a file_tags file from a (number_of_tags, 2) NumPy array of tags.
|
|
249
|
+
this file is used to choose particles for the OSIRIS track diagnostic.
|
|
250
|
+
|
|
251
|
+
Parameters
|
|
252
|
+
----------
|
|
253
|
+
filename : str
|
|
254
|
+
Path to the output file where tags will be stored.
|
|
255
|
+
tags_array: np.ndarray
|
|
256
|
+
shape (number_of_tags, 2), containing particle tags
|
|
257
|
+
|
|
258
|
+
Returns
|
|
259
|
+
-------
|
|
260
|
+
file_tags file with path \"filename\" to be used for the OSIRIS track diagnostic.
|
|
261
|
+
|
|
262
|
+
Notes
|
|
263
|
+
------
|
|
264
|
+
The first element of the tag of a particle that is already being tracked is negative,
|
|
265
|
+
so we apply the absolute function when generating the file
|
|
266
|
+
|
|
267
|
+
'''
|
|
268
|
+
|
|
269
|
+
# In case the particles chosen were already being tracked
|
|
270
|
+
tags_array[:, 0] = np.abs(tags_array[:, 0])
|
|
271
|
+
tags_array = tags_array[np.lexsort((tags_array[:, 1], tags_array[:, 0]))]
|
|
272
|
+
num_tags = tags_array.shape[0]
|
|
273
|
+
|
|
274
|
+
with open(filename, 'w') as file:
|
|
275
|
+
file.write("! particle tag list\n")
|
|
276
|
+
file.write(f"! generated on {datetime.now().strftime('%a %b %d %H:%M:%S %Y')}\n")
|
|
277
|
+
file.write("! number of tags\n")
|
|
278
|
+
file.write(f" {num_tags}\n")
|
|
279
|
+
file.write("! particle tag list\n")
|
|
280
|
+
|
|
281
|
+
for i in range(num_tags):
|
|
282
|
+
file.write(f" {tags_array[i, 0]:<6}{tags_array[i, 1]:>6}\n")
|
|
283
|
+
return filename
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: osiris_utils
|
|
3
|
-
Version: 1.1.
|
|
3
|
+
Version: 1.1.6
|
|
4
4
|
Summary: Utilities to manipulate and visualize OSIRIS framework output data
|
|
5
5
|
Author: ['João Pedro Ferreira Biu', 'João Cândido', 'Diogo Carvalho']
|
|
6
6
|
Author-email: ['joaopedrofbiu@tecnico.ulisboa.pt']
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
osiris_utils/__init__.py,sha256=vnmri235UkoAxKZMf9111dMb94a4z05XAkszbGSyKgo,1191
|
|
2
|
+
osiris_utils/utils.py,sha256=zg-5pOVOieVHXYeNWtCq921Q4PmXVArLInJMd8JURpY,8137
|
|
3
|
+
osiris_utils/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
osiris_utils/data/data.py,sha256=u-0qcPXyTK187j8kGHNgTBgJKX90_YA8jUYno87q9n0,25619
|
|
5
|
+
osiris_utils/data/diagnostic.py,sha256=d9xfTTMFq4p5kVirw94Leq2Jak_gGMjw6PHG2PmRUtE,49179
|
|
6
|
+
osiris_utils/data/simulation.py,sha256=bvJ6UHIzUNUWlWqZbeg60i-Dscl7x6MRZXGfsy8xkWE,6878
|
|
7
|
+
osiris_utils/decks/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
|
+
osiris_utils/decks/decks.py,sha256=Ug1_fSfZL9iJwn7dnTvHM0k1Zc4kGpI427QvO0V2pwg,10173
|
|
9
|
+
osiris_utils/decks/species.py,sha256=uZA1oMSltofc2m1s3rK__wjxXfTAQ_3wB2aKRG_PmKo,1066
|
|
10
|
+
osiris_utils/gui/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
+
osiris_utils/gui/gui.py,sha256=s4not0GzHI3CrA4uQVnjt9jFavkVAeGbQ6rJpM4jir4,10538
|
|
12
|
+
osiris_utils/postprocessing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
13
|
+
osiris_utils/postprocessing/derivative.py,sha256=p1jhy0Ey8w-AYLVBVGgjpYw19cmLPYV_gdRMHtNSR7w,9602
|
|
14
|
+
osiris_utils/postprocessing/fft.py,sha256=Hif7nQN9RcnpnD_MKxvK5K6DgmINSjgmgYcktIrKdo8,9037
|
|
15
|
+
osiris_utils/postprocessing/field_centering.py,sha256=1i6Fue9PhuED2VxBUq1ggHO3MTSlXjhkFvqh1pb_-Y0,6788
|
|
16
|
+
osiris_utils/postprocessing/heatflux_correction.py,sha256=5_XaDGdB5xP8gDJF9MVIlHN5tggbYHWBgetQvEmAnKQ,7055
|
|
17
|
+
osiris_utils/postprocessing/mft.py,sha256=Ryx0DW8Mb7f0ikf2HfM_T_m2WqrCgcEkMJNZBLZ5qAY,12399
|
|
18
|
+
osiris_utils/postprocessing/mft_for_gridfile.py,sha256=ZaLgwsND9nbD-v-JfCZYxh7-dz3RA-IILwBjZnZGCiE,1522
|
|
19
|
+
osiris_utils/postprocessing/postprocess.py,sha256=f2ZXLnAvLpsLFd-Pygb_sYxwkU4zn1OoF_byVKYAPII,1115
|
|
20
|
+
osiris_utils/postprocessing/pressure_correction.py,sha256=9ZK6BTaGAa8W1ltpXsQf6aTH-xfYJ38076gJToxUFgQ,6345
|
|
21
|
+
osiris_utils-1.1.6.dist-info/licenses/LICENSE.txt,sha256=Cawy2v7wKc7n8yL8guFu-cH9sQw9r1gll1pEFPFAB-Q,1084
|
|
22
|
+
osiris_utils-1.1.6.dist-info/METADATA,sha256=hLLfQtRh4ZEoqt8-LlDHiRg7hU6dWyx65guAXVDu6Fc,3070
|
|
23
|
+
osiris_utils-1.1.6.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
|
24
|
+
osiris_utils-1.1.6.dist-info/top_level.txt,sha256=mM-_dX5fjzIKB7te655PhZOrPACVY-bJmiASCqW1eOA,13
|
|
25
|
+
osiris_utils-1.1.6.dist-info/RECORD,,
|