osiris-utils 1.1.4__py3-none-any.whl → 1.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- osiris_utils/__init__.py +8 -2
- osiris_utils/data/data.py +316 -42
- osiris_utils/data/diagnostic.py +691 -233
- osiris_utils/data/simulation.py +30 -17
- osiris_utils/postprocessing/derivative.py +29 -49
- osiris_utils/postprocessing/fft.py +8 -14
- osiris_utils/postprocessing/field_centering.py +168 -0
- osiris_utils/postprocessing/heatflux_correction.py +193 -0
- osiris_utils/postprocessing/mft.py +14 -28
- osiris_utils/postprocessing/pressure_correction.py +171 -0
- osiris_utils/utils.py +140 -1
- {osiris_utils-1.1.4.dist-info → osiris_utils-1.1.6.dist-info}/METADATA +1 -1
- osiris_utils-1.1.6.dist-info/RECORD +25 -0
- osiris_utils-1.1.4.dist-info/RECORD +0 -22
- {osiris_utils-1.1.4.dist-info → osiris_utils-1.1.6.dist-info}/WHEEL +0 -0
- {osiris_utils-1.1.4.dist-info → osiris_utils-1.1.6.dist-info}/licenses/LICENSE.txt +0 -0
- {osiris_utils-1.1.4.dist-info → osiris_utils-1.1.6.dist-info}/top_level.txt +0 -0
osiris_utils/data/diagnostic.py
CHANGED
|
@@ -4,10 +4,14 @@ of just a single file. This is what this file is for - deal with ''folders'' of
|
|
|
4
4
|
|
|
5
5
|
Took some inspiration from Diogo and Madox's work.
|
|
6
6
|
|
|
7
|
-
This would be awsome to compute time derivatives.
|
|
7
|
+
This would be awsome to compute time derivatives.
|
|
8
8
|
"""
|
|
9
|
+
|
|
10
|
+
from torch import isin
|
|
9
11
|
import numpy as np
|
|
10
12
|
import os
|
|
13
|
+
import glob
|
|
14
|
+
import h5py
|
|
11
15
|
|
|
12
16
|
from .data import OsirisGridFile
|
|
13
17
|
import tqdm
|
|
@@ -16,15 +20,17 @@ import warnings
|
|
|
16
20
|
from typing import Literal
|
|
17
21
|
from ..decks.decks import InputDeckIO, deval
|
|
18
22
|
|
|
23
|
+
|
|
19
24
|
def get_dimension_from_deck(deck: InputDeckIO) -> int:
|
|
20
25
|
for dim in range(1, 4):
|
|
21
26
|
try:
|
|
22
|
-
deck.get_param(section=
|
|
27
|
+
deck.get_param(section="grid", param=f"nx_p(1:{dim})")
|
|
23
28
|
return dim
|
|
24
29
|
except:
|
|
25
30
|
continue
|
|
26
|
-
|
|
27
|
-
raise Exception(
|
|
31
|
+
|
|
32
|
+
raise Exception("Error parsing grid dimension")
|
|
33
|
+
|
|
28
34
|
|
|
29
35
|
OSIRIS_DENSITY = ["n"]
|
|
30
36
|
OSIRIS_SPECIE_REPORTS = ["charge", "q1", "q2", "q3", "j1", "j2", "j3"]
|
|
@@ -48,9 +54,48 @@ OSIRIS_SPECIE_REP_UDIST = [
|
|
|
48
54
|
"T23",
|
|
49
55
|
"T33",
|
|
50
56
|
]
|
|
51
|
-
OSIRIS_FLD = [
|
|
52
|
-
|
|
53
|
-
|
|
57
|
+
OSIRIS_FLD = [
|
|
58
|
+
"e1",
|
|
59
|
+
"e2",
|
|
60
|
+
"e3",
|
|
61
|
+
"b1",
|
|
62
|
+
"b2",
|
|
63
|
+
"b3",
|
|
64
|
+
"part_e1",
|
|
65
|
+
"part_e2",
|
|
66
|
+
"epart_3",
|
|
67
|
+
"part_b1",
|
|
68
|
+
"part_b2",
|
|
69
|
+
"part_b3",
|
|
70
|
+
"ext_e1",
|
|
71
|
+
"ext_e2",
|
|
72
|
+
"ext_e3",
|
|
73
|
+
"ext_b1",
|
|
74
|
+
"ext_b2",
|
|
75
|
+
"ext_b3",
|
|
76
|
+
]
|
|
77
|
+
OSIRIS_PHA = [
|
|
78
|
+
"p1x1",
|
|
79
|
+
"p1x2",
|
|
80
|
+
"p1x3",
|
|
81
|
+
"p2x1",
|
|
82
|
+
"p2x2",
|
|
83
|
+
"p2x3",
|
|
84
|
+
"p3x1",
|
|
85
|
+
"p3x2",
|
|
86
|
+
"p3x3",
|
|
87
|
+
"gammax1",
|
|
88
|
+
"gammax2",
|
|
89
|
+
"gammax3",
|
|
90
|
+
] # there may be more that I don't know
|
|
91
|
+
OSIRIS_ALL = (
|
|
92
|
+
OSIRIS_DENSITY
|
|
93
|
+
+ OSIRIS_SPECIE_REPORTS
|
|
94
|
+
+ OSIRIS_SPECIE_REP_UDIST
|
|
95
|
+
+ OSIRIS_FLD
|
|
96
|
+
+ OSIRIS_PHA
|
|
97
|
+
)
|
|
98
|
+
|
|
54
99
|
|
|
55
100
|
def which_quantities():
|
|
56
101
|
print("Available quantities:")
|
|
@@ -134,20 +179,9 @@ class Diagnostic:
|
|
|
134
179
|
Plot a 3D scatter plot of the diagnostic data.
|
|
135
180
|
time(index)
|
|
136
181
|
Get the time for a given index.
|
|
137
|
-
|
|
138
|
-
Examples
|
|
139
|
-
--------
|
|
140
|
-
>>> sim = Simulation("electrons", "path/to/simulation")
|
|
141
|
-
>>> sim.get_quantity("charge")
|
|
142
|
-
>>> sim.load_all()
|
|
143
|
-
>>> print(sim.data.shape)
|
|
144
|
-
(100, 100, 100)
|
|
145
|
-
|
|
146
|
-
>>> sim = Simulation("electrons", "path/to/simulation")
|
|
147
|
-
>>> sim.get_quantity("charge")
|
|
148
|
-
>>> sim[0]
|
|
149
|
-
array with the data for the first timestep
|
|
182
|
+
|
|
150
183
|
"""
|
|
184
|
+
|
|
151
185
|
def __init__(self, simulation_folder=None, species=None, input_deck=None):
|
|
152
186
|
self._species = species if species else None
|
|
153
187
|
|
|
@@ -164,11 +198,13 @@ class Diagnostic:
|
|
|
164
198
|
self._ndump = None
|
|
165
199
|
self._maxiter = None
|
|
166
200
|
self._tunits = None
|
|
167
|
-
|
|
201
|
+
|
|
168
202
|
if simulation_folder:
|
|
169
203
|
self._simulation_folder = simulation_folder
|
|
170
204
|
if not os.path.isdir(simulation_folder):
|
|
171
|
-
raise FileNotFoundError(
|
|
205
|
+
raise FileNotFoundError(
|
|
206
|
+
f"Simulation folder {simulation_folder} not found."
|
|
207
|
+
)
|
|
172
208
|
else:
|
|
173
209
|
self._simulation_folder = None
|
|
174
210
|
|
|
@@ -180,7 +216,7 @@ class Diagnostic:
|
|
|
180
216
|
|
|
181
217
|
self._all_loaded = False
|
|
182
218
|
self._quantity = None
|
|
183
|
-
|
|
219
|
+
|
|
184
220
|
def get_quantity(self, quantity):
|
|
185
221
|
"""
|
|
186
222
|
Get the data for a given quantity.
|
|
@@ -193,7 +229,9 @@ class Diagnostic:
|
|
|
193
229
|
self._quantity = quantity
|
|
194
230
|
|
|
195
231
|
if self._quantity not in OSIRIS_ALL:
|
|
196
|
-
raise ValueError(
|
|
232
|
+
raise ValueError(
|
|
233
|
+
f"Invalid quantity {self._quantity}. Use which_quantities() to see the available quantities."
|
|
234
|
+
)
|
|
197
235
|
if self._quantity in OSIRIS_SPECIE_REP_UDIST:
|
|
198
236
|
if self._species is None:
|
|
199
237
|
raise ValueError("Species not set.")
|
|
@@ -213,45 +251,56 @@ class Diagnostic:
|
|
|
213
251
|
raise ValueError("Species not set.")
|
|
214
252
|
self._get_density(self._species.name, "charge")
|
|
215
253
|
else:
|
|
216
|
-
raise ValueError(
|
|
254
|
+
raise ValueError(
|
|
255
|
+
f"Invalid quantity {self._quantity}. Or it's not implemented yet (this may happen for phase space quantities)."
|
|
256
|
+
)
|
|
217
257
|
|
|
218
258
|
def _get_moment(self, species, moment):
|
|
219
259
|
if self._simulation_folder is None:
|
|
220
|
-
raise ValueError(
|
|
260
|
+
raise ValueError(
|
|
261
|
+
"Simulation folder not set. If you're using CustomDiagnostic, this method is not available."
|
|
262
|
+
)
|
|
221
263
|
self._path = f"{self._simulation_folder}/MS/UDIST/{species}/{moment}/"
|
|
222
|
-
self._file_template =
|
|
223
|
-
self._maxiter = len(
|
|
264
|
+
self._file_template = glob.glob(f"{self._path}/*.h5")[0][:-9]
|
|
265
|
+
self._maxiter = len(glob.glob(f"{self._path}/*.h5"))
|
|
224
266
|
self._load_attributes(self._file_template, self._input_deck)
|
|
225
|
-
|
|
267
|
+
|
|
226
268
|
def _get_field(self, field):
|
|
227
269
|
if self._simulation_folder is None:
|
|
228
|
-
raise ValueError(
|
|
270
|
+
raise ValueError(
|
|
271
|
+
"Simulation folder not set. If you're using CustomDiagnostic, this method is not available."
|
|
272
|
+
)
|
|
229
273
|
self._path = f"{self._simulation_folder}/MS/FLD/{field}/"
|
|
230
|
-
self._file_template =
|
|
231
|
-
self._maxiter = len(
|
|
274
|
+
self._file_template = glob.glob(f"{self._path}/*.h5")[0][:-9]
|
|
275
|
+
self._maxiter = len(glob.glob(f"{self._path}/*.h5"))
|
|
232
276
|
self._load_attributes(self._file_template, self._input_deck)
|
|
233
|
-
|
|
277
|
+
|
|
234
278
|
def _get_density(self, species, quantity):
|
|
235
279
|
if self._simulation_folder is None:
|
|
236
|
-
raise ValueError(
|
|
280
|
+
raise ValueError(
|
|
281
|
+
"Simulation folder not set. If you're using CustomDiagnostic, this method is not available."
|
|
282
|
+
)
|
|
237
283
|
self._path = f"{self._simulation_folder}/MS/DENSITY/{species}/{quantity}/"
|
|
238
|
-
self._file_template =
|
|
239
|
-
self._maxiter = len(
|
|
284
|
+
self._file_template = glob.glob(f"{self._path}/*.h5")[0][:-9]
|
|
285
|
+
self._maxiter = len(glob.glob(f"{self._path}/*.h5"))
|
|
240
286
|
self._load_attributes(self._file_template, self._input_deck)
|
|
241
287
|
|
|
242
288
|
def _get_phase_space(self, species, type):
|
|
243
289
|
if self._simulation_folder is None:
|
|
244
|
-
raise ValueError(
|
|
290
|
+
raise ValueError(
|
|
291
|
+
"Simulation folder not set. If you're using CustomDiagnostic, this method is not available."
|
|
292
|
+
)
|
|
245
293
|
self._path = f"{self._simulation_folder}/MS/PHA/{type}/{species}/"
|
|
246
|
-
self._file_template =
|
|
247
|
-
self._maxiter = len(
|
|
294
|
+
self._file_template = glob.glob(f"{self._path}/*.h5")[0][:-9]
|
|
295
|
+
self._maxiter = len(glob.glob(f"{self._path}/*.h5"))
|
|
248
296
|
self._load_attributes(self._file_template, self._input_deck)
|
|
249
297
|
|
|
250
|
-
def _load_attributes(
|
|
298
|
+
def _load_attributes(
|
|
299
|
+
self, file_template, input_deck
|
|
300
|
+
): # this will be replaced by reading the input deck
|
|
251
301
|
# This can go wrong! NDUMP
|
|
252
302
|
# if input_deck is not None:
|
|
253
303
|
# self._dt = float(input_deck["time_step"][0]["dt"])
|
|
254
|
-
# self._ndump = int(input_deck["time_step"][0]["ndump"])
|
|
255
304
|
# self._dim = get_dimension_from_deck(input_deck)
|
|
256
305
|
# self._nx = np.array(list(map(int, input_deck["grid"][0][f"nx_p(1:{self._dim})"].split(','))))
|
|
257
306
|
# xmin = [deval(input_deck["space"][0][f"xmin(1:{self._dim})"].split(',')[i]) for i in range(self._dim)]
|
|
@@ -260,30 +309,46 @@ class Diagnostic:
|
|
|
260
309
|
# self._dx = (self._grid[:,1] - self._grid[:,0])/self._nx
|
|
261
310
|
# self._x = [np.arange(self._grid[i,0], self._grid[i,1], self._dx[i]) for i in range(self._dim)]
|
|
262
311
|
|
|
312
|
+
self._ndump = int(input_deck["time_step"][0]["ndump"])
|
|
313
|
+
|
|
263
314
|
try:
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
self.
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
315
|
+
# Try files 000001, 000002, etc. until one is found
|
|
316
|
+
found_file = False
|
|
317
|
+
for file_num in range(1, self._maxiter + 1):
|
|
318
|
+
path_file = os.path.join(file_template + f"{file_num:06d}.h5")
|
|
319
|
+
if os.path.exists(path_file):
|
|
320
|
+
dump = OsirisGridFile(path_file)
|
|
321
|
+
self._dx = dump.dx
|
|
322
|
+
self._nx = dump.nx
|
|
323
|
+
self._x = dump.x
|
|
324
|
+
self._dt = dump.dt
|
|
325
|
+
self._grid = dump.grid
|
|
326
|
+
self._axis = dump.axis
|
|
327
|
+
self._units = dump.units
|
|
328
|
+
self._name = dump.name
|
|
329
|
+
self._label = dump.label
|
|
330
|
+
self._dim = dump.dim
|
|
331
|
+
# self._iter = dump.iter
|
|
332
|
+
self._tunits = dump.time[1]
|
|
333
|
+
self._type = dump.type
|
|
334
|
+
found_file = True
|
|
335
|
+
break
|
|
336
|
+
|
|
337
|
+
if not found_file:
|
|
338
|
+
warnings.warn(f"No valid data files found in {self._path} to read metadata from.")
|
|
339
|
+
except Exception as e:
|
|
340
|
+
warnings.warn(f"Error loading diagnostic attributes: {str(e)}. Please verify it there's any file in the folder.")
|
|
341
|
+
|
|
281
342
|
def _data_generator(self, index):
|
|
282
343
|
if self._simulation_folder is None:
|
|
283
344
|
raise ValueError("Simulation folder not set.")
|
|
284
|
-
file = os.path.join(self.
|
|
345
|
+
file = os.path.join(self._file_template + f"{index:06d}.h5")
|
|
285
346
|
data_object = OsirisGridFile(file)
|
|
286
|
-
yield
|
|
347
|
+
yield (
|
|
348
|
+
data_object.data
|
|
349
|
+
if self._quantity not in OSIRIS_DENSITY
|
|
350
|
+
else self._species.rqm * data_object.data
|
|
351
|
+
)
|
|
287
352
|
|
|
288
353
|
def load_all(self):
|
|
289
354
|
"""
|
|
@@ -298,39 +363,47 @@ class Diagnostic:
|
|
|
298
363
|
if self._all_loaded and self._data is not None:
|
|
299
364
|
print("Data already loaded.")
|
|
300
365
|
return self._data
|
|
301
|
-
|
|
366
|
+
|
|
302
367
|
# If this is a derived diagnostic without files
|
|
303
|
-
if self
|
|
368
|
+
if hasattr(self, "postprocess_name") or hasattr(self, "created_diagnostic_name"):
|
|
304
369
|
# If it has a data generator but no direct files
|
|
305
370
|
try:
|
|
306
|
-
print(
|
|
371
|
+
print(
|
|
372
|
+
"This appears to be a derived diagnostic. Loading data from generators..."
|
|
373
|
+
)
|
|
307
374
|
# Get the maximum size from the diagnostic attributes
|
|
308
|
-
if hasattr(self,
|
|
375
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
309
376
|
size = self._maxiter
|
|
310
377
|
else:
|
|
311
378
|
# Try to infer from a related diagnostic
|
|
312
|
-
if hasattr(self,
|
|
379
|
+
if hasattr(self, "_diag") and hasattr(self._diag, "_maxiter"):
|
|
313
380
|
size = self._diag._maxiter
|
|
314
381
|
else:
|
|
315
382
|
# Default to a reasonable number if we can't determine
|
|
316
383
|
size = 100
|
|
317
|
-
print(
|
|
318
|
-
|
|
384
|
+
print(
|
|
385
|
+
f"Warning: Could not determine timestep count, using {size}."
|
|
386
|
+
)
|
|
387
|
+
|
|
319
388
|
# Load data for all timesteps using the generator - this may take a while
|
|
320
|
-
self._data = np.stack(
|
|
389
|
+
self._data = np.stack(
|
|
390
|
+
[self[i] for i in tqdm.tqdm(range(size), desc="Loading data")]
|
|
391
|
+
)
|
|
321
392
|
self._all_loaded = True
|
|
322
393
|
return self._data
|
|
323
|
-
|
|
394
|
+
|
|
324
395
|
except Exception as e:
|
|
325
396
|
raise ValueError(f"Could not load derived diagnostic data: {str(e)}")
|
|
326
|
-
|
|
397
|
+
|
|
327
398
|
# Original implementation for file-based diagnostics
|
|
328
399
|
print("Loading all data from files. This may take a while.")
|
|
329
|
-
size = len(sorted(
|
|
330
|
-
self._data = np.stack(
|
|
400
|
+
size = len(sorted(glob.glob(f"{self._path}/*.h5")))
|
|
401
|
+
self._data = np.stack(
|
|
402
|
+
[self[i] for i in tqdm.tqdm(range(size), desc="Loading data")]
|
|
403
|
+
)
|
|
331
404
|
self._all_loaded = True
|
|
332
405
|
return self._data
|
|
333
|
-
|
|
406
|
+
|
|
334
407
|
def unload(self):
|
|
335
408
|
"""
|
|
336
409
|
Unload data from memory. This is useful to free memory when the data is not needed anymore.
|
|
@@ -352,108 +425,147 @@ class Diagnostic:
|
|
|
352
425
|
# For derived diagnostics with cached data
|
|
353
426
|
if self._all_loaded and self._data is not None:
|
|
354
427
|
return self._data[index]
|
|
355
|
-
|
|
428
|
+
|
|
356
429
|
# For standard diagnostics with files
|
|
357
430
|
if isinstance(index, int):
|
|
358
|
-
if self._simulation_folder is not None and hasattr(self,
|
|
431
|
+
if self._simulation_folder is not None and hasattr(self, "_data_generator"):
|
|
359
432
|
return next(self._data_generator(index))
|
|
360
|
-
|
|
433
|
+
|
|
361
434
|
# For derived diagnostics with custom generators
|
|
362
|
-
if hasattr(self,
|
|
435
|
+
if hasattr(self, "_data_generator") and callable(self._data_generator):
|
|
363
436
|
return next(self._data_generator(index))
|
|
364
|
-
|
|
437
|
+
|
|
365
438
|
elif isinstance(index, slice):
|
|
366
439
|
start = 0 if index.start is None else index.start
|
|
367
440
|
step = 1 if index.step is None else index.step
|
|
368
441
|
|
|
369
442
|
if index.stop is None:
|
|
370
|
-
if hasattr(self,
|
|
443
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
371
444
|
stop = self._maxiter
|
|
372
|
-
elif self._simulation_folder is not None and hasattr(self,
|
|
373
|
-
stop = len(sorted(
|
|
445
|
+
elif self._simulation_folder is not None and hasattr(self, "_path"):
|
|
446
|
+
stop = len(sorted(glob.glob(f"{self._path}/*.h5")))
|
|
374
447
|
else:
|
|
375
448
|
stop = 100 # Default if we can't determine
|
|
376
|
-
print(
|
|
449
|
+
print(
|
|
450
|
+
f"Warning: Could not determine iteration count for iteration, using {stop}."
|
|
451
|
+
)
|
|
377
452
|
else:
|
|
378
453
|
stop = index.stop
|
|
379
454
|
|
|
380
455
|
indices = range(start, stop, step)
|
|
381
|
-
if self._simulation_folder is not None and hasattr(self,
|
|
456
|
+
if self._simulation_folder is not None and hasattr(self, "_data_generator"):
|
|
382
457
|
return np.stack([next(self._data_generator(i)) for i in indices])
|
|
383
|
-
elif hasattr(self,
|
|
458
|
+
elif hasattr(self, "_data_generator") and callable(self._data_generator):
|
|
384
459
|
return np.stack([next(self._data_generator(i)) for i in indices])
|
|
385
460
|
|
|
386
461
|
# If we get here, we don't know how to get data for this index
|
|
387
|
-
raise ValueError(
|
|
388
|
-
|
|
462
|
+
raise ValueError(
|
|
463
|
+
f"Cannot retrieve data for this diagnostic at index {index}. No data loaded and no generator available."
|
|
464
|
+
)
|
|
465
|
+
|
|
389
466
|
def __iter__(self):
|
|
390
467
|
# If this is a file-based diagnostic
|
|
391
468
|
if self._simulation_folder is not None:
|
|
392
|
-
for i in range(len(sorted(
|
|
469
|
+
for i in range(len(sorted(glob.glob(f"{self._path}/*.h5")))):
|
|
393
470
|
yield next(self._data_generator(i))
|
|
394
|
-
|
|
471
|
+
|
|
395
472
|
# If this is a derived diagnostic and data is already loaded
|
|
396
473
|
elif self._all_loaded and self._data is not None:
|
|
397
474
|
for i in range(self._data.shape[0]):
|
|
398
475
|
yield self._data[i]
|
|
399
|
-
|
|
476
|
+
|
|
400
477
|
# If this is a derived diagnostic with custom generator but no loaded data
|
|
401
|
-
elif hasattr(self,
|
|
478
|
+
elif hasattr(self, "_data_generator") and callable(self._data_generator):
|
|
402
479
|
# Determine how many iterations to go through
|
|
403
480
|
max_iter = self._maxiter
|
|
404
481
|
if max_iter is None:
|
|
405
|
-
if hasattr(self,
|
|
482
|
+
if hasattr(self, "_diag") and hasattr(self._diag, "_maxiter"):
|
|
406
483
|
max_iter = self._diag._maxiter
|
|
407
484
|
else:
|
|
408
485
|
max_iter = 100 # Default if we can't determine
|
|
409
|
-
print(
|
|
410
|
-
|
|
486
|
+
print(
|
|
487
|
+
f"Warning: Could not determine iteration count for iteration, using {max_iter}."
|
|
488
|
+
)
|
|
489
|
+
|
|
411
490
|
for i in range(max_iter):
|
|
412
491
|
yield next(self._data_generator(i))
|
|
413
|
-
|
|
492
|
+
|
|
414
493
|
# If we don't know how to handle this
|
|
415
494
|
else:
|
|
416
|
-
raise ValueError(
|
|
495
|
+
raise ValueError(
|
|
496
|
+
"Cannot iterate over this diagnostic. No data loaded and no generator available."
|
|
497
|
+
)
|
|
417
498
|
|
|
418
499
|
def __add__(self, other):
|
|
419
500
|
if isinstance(other, (int, float, np.ndarray)):
|
|
420
501
|
result = Diagnostic(species=self._species)
|
|
421
|
-
|
|
422
|
-
for attr in [
|
|
502
|
+
|
|
503
|
+
for attr in [
|
|
504
|
+
"_dx",
|
|
505
|
+
"_nx",
|
|
506
|
+
"_x",
|
|
507
|
+
"_dt",
|
|
508
|
+
"_grid",
|
|
509
|
+
"_axis",
|
|
510
|
+
"_dim",
|
|
511
|
+
"_ndump",
|
|
512
|
+
"_maxiter",
|
|
513
|
+
"_tunits",
|
|
514
|
+
"_type",
|
|
515
|
+
"_simulation_folder",
|
|
516
|
+
]:
|
|
423
517
|
if hasattr(self, attr):
|
|
424
518
|
setattr(result, attr, getattr(self, attr))
|
|
425
|
-
|
|
519
|
+
|
|
426
520
|
# Make sure _maxiter is set even for derived diagnostics
|
|
427
|
-
if not hasattr(result,
|
|
428
|
-
if hasattr(self,
|
|
521
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
522
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
429
523
|
result._maxiter = self._maxiter
|
|
430
524
|
|
|
431
525
|
# result._name = self._name + " + " + str(other) if isinstance(other, (int, float)) else self._name + " + np.ndarray"
|
|
432
|
-
|
|
526
|
+
|
|
433
527
|
if self._all_loaded:
|
|
434
528
|
result._data = self._data + other
|
|
435
529
|
result._all_loaded = True
|
|
436
530
|
else:
|
|
531
|
+
|
|
437
532
|
def gen_scalar_add(original_gen, scalar):
|
|
438
533
|
for val in original_gen:
|
|
439
534
|
yield val + scalar
|
|
440
|
-
|
|
535
|
+
|
|
441
536
|
original_generator = self._data_generator
|
|
442
|
-
result._data_generator = lambda index: gen_scalar_add(
|
|
443
|
-
|
|
537
|
+
result._data_generator = lambda index: gen_scalar_add(
|
|
538
|
+
original_generator(index), other
|
|
539
|
+
)
|
|
540
|
+
|
|
541
|
+
result.created_diagnostic_name = "MISC"
|
|
542
|
+
|
|
444
543
|
return result
|
|
445
544
|
|
|
446
545
|
elif isinstance(other, Diagnostic):
|
|
447
546
|
result = Diagnostic(species=self._species)
|
|
448
547
|
|
|
449
|
-
for attr in [
|
|
548
|
+
for attr in [
|
|
549
|
+
"_dx",
|
|
550
|
+
"_nx",
|
|
551
|
+
"_x",
|
|
552
|
+
"_dt",
|
|
553
|
+
"_grid",
|
|
554
|
+
"_axis",
|
|
555
|
+
"_dim",
|
|
556
|
+
"_ndump",
|
|
557
|
+
"_maxiter",
|
|
558
|
+
"_tunits",
|
|
559
|
+
"_type",
|
|
560
|
+
"_simulation_folder",
|
|
561
|
+
]:
|
|
450
562
|
if hasattr(self, attr):
|
|
451
563
|
setattr(result, attr, getattr(self, attr))
|
|
452
|
-
|
|
453
|
-
if not hasattr(result,
|
|
454
|
-
if hasattr(self,
|
|
564
|
+
|
|
565
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
566
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
455
567
|
result._maxiter = self._maxiter
|
|
456
|
-
|
|
568
|
+
|
|
457
569
|
# result._name = self._name + " + " + str(other._name)
|
|
458
570
|
|
|
459
571
|
if self._all_loaded:
|
|
@@ -461,26 +573,44 @@ class Diagnostic:
|
|
|
461
573
|
result._data = self._data + other._data
|
|
462
574
|
result._all_loaded = True
|
|
463
575
|
else:
|
|
576
|
+
|
|
464
577
|
def gen_diag_add(original_gen1, original_gen2):
|
|
465
578
|
for val1, val2 in zip(original_gen1, original_gen2):
|
|
466
579
|
yield val1 + val2
|
|
467
|
-
|
|
580
|
+
|
|
468
581
|
original_generator = self._data_generator
|
|
469
582
|
other_generator = other._data_generator
|
|
470
|
-
result._data_generator = lambda index: gen_diag_add(
|
|
583
|
+
result._data_generator = lambda index: gen_diag_add(
|
|
584
|
+
original_generator(index), other_generator(index)
|
|
585
|
+
)
|
|
586
|
+
|
|
587
|
+
result.created_diagnostic_name = "MISC"
|
|
471
588
|
|
|
472
589
|
return result
|
|
473
|
-
|
|
590
|
+
|
|
474
591
|
def __sub__(self, other):
|
|
475
592
|
if isinstance(other, (int, float, np.ndarray)):
|
|
476
593
|
result = Diagnostic(species=self._species)
|
|
477
|
-
|
|
478
|
-
for attr in [
|
|
594
|
+
|
|
595
|
+
for attr in [
|
|
596
|
+
"_dx",
|
|
597
|
+
"_nx",
|
|
598
|
+
"_x",
|
|
599
|
+
"_dt",
|
|
600
|
+
"_grid",
|
|
601
|
+
"_axis",
|
|
602
|
+
"_dim",
|
|
603
|
+
"_ndump",
|
|
604
|
+
"_maxiter",
|
|
605
|
+
"_tunits",
|
|
606
|
+
"_type",
|
|
607
|
+
"_simulation_folder",
|
|
608
|
+
]:
|
|
479
609
|
if hasattr(self, attr):
|
|
480
610
|
setattr(result, attr, getattr(self, attr))
|
|
481
|
-
|
|
482
|
-
if not hasattr(result,
|
|
483
|
-
if hasattr(self,
|
|
611
|
+
|
|
612
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
613
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
484
614
|
result._maxiter = self._maxiter
|
|
485
615
|
|
|
486
616
|
# result._name = self._name + " - " + str(other) if isinstance(other, (int, float)) else self._name + " - np.ndarray"
|
|
@@ -489,28 +619,45 @@ class Diagnostic:
|
|
|
489
619
|
result._data = self._data - other
|
|
490
620
|
result._all_loaded = True
|
|
491
621
|
else:
|
|
622
|
+
|
|
492
623
|
def gen_scalar_sub(original_gen, scalar):
|
|
493
624
|
for val in original_gen:
|
|
494
625
|
yield val - scalar
|
|
495
|
-
|
|
626
|
+
|
|
496
627
|
original_generator = self._data_generator
|
|
497
|
-
result._data_generator = lambda index: gen_scalar_sub(
|
|
498
|
-
|
|
628
|
+
result._data_generator = lambda index: gen_scalar_sub(
|
|
629
|
+
original_generator(index), other
|
|
630
|
+
)
|
|
631
|
+
|
|
632
|
+
result.created_diagnostic_name = "MISC"
|
|
633
|
+
|
|
499
634
|
return result
|
|
500
635
|
|
|
501
636
|
elif isinstance(other, Diagnostic):
|
|
502
|
-
|
|
503
|
-
|
|
637
|
+
|
|
504
638
|
result = Diagnostic(species=self._species)
|
|
505
639
|
|
|
506
|
-
for attr in [
|
|
640
|
+
for attr in [
|
|
641
|
+
"_dx",
|
|
642
|
+
"_nx",
|
|
643
|
+
"_x",
|
|
644
|
+
"_dt",
|
|
645
|
+
"_grid",
|
|
646
|
+
"_axis",
|
|
647
|
+
"_dim",
|
|
648
|
+
"_ndump",
|
|
649
|
+
"_maxiter",
|
|
650
|
+
"_tunits",
|
|
651
|
+
"_type",
|
|
652
|
+
"_simulation_folder",
|
|
653
|
+
]:
|
|
507
654
|
if hasattr(self, attr):
|
|
508
655
|
setattr(result, attr, getattr(self, attr))
|
|
509
|
-
|
|
510
|
-
if not hasattr(result,
|
|
511
|
-
if hasattr(self,
|
|
656
|
+
|
|
657
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
658
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
512
659
|
result._maxiter = self._maxiter
|
|
513
|
-
|
|
660
|
+
|
|
514
661
|
# result._name = self._name + " - " + str(other._name)
|
|
515
662
|
|
|
516
663
|
if self._all_loaded:
|
|
@@ -518,54 +665,90 @@ class Diagnostic:
|
|
|
518
665
|
result._data = self._data - other._data
|
|
519
666
|
result._all_loaded = True
|
|
520
667
|
else:
|
|
668
|
+
|
|
521
669
|
def gen_diag_sub(original_gen1, original_gen2):
|
|
522
670
|
for val1, val2 in zip(original_gen1, original_gen2):
|
|
523
671
|
yield val1 - val2
|
|
524
|
-
|
|
672
|
+
|
|
525
673
|
original_generator = self._data_generator
|
|
526
674
|
other_generator = other._data_generator
|
|
527
|
-
result._data_generator = lambda index: gen_diag_sub(
|
|
675
|
+
result._data_generator = lambda index: gen_diag_sub(
|
|
676
|
+
original_generator(index), other_generator(index)
|
|
677
|
+
)
|
|
678
|
+
|
|
679
|
+
result.created_diagnostic_name = "MISC"
|
|
528
680
|
|
|
529
681
|
return result
|
|
530
|
-
|
|
682
|
+
|
|
531
683
|
def __mul__(self, other):
|
|
532
684
|
if isinstance(other, (int, float, np.ndarray)):
|
|
533
685
|
result = Diagnostic(species=self._species)
|
|
534
|
-
|
|
535
|
-
for attr in [
|
|
686
|
+
|
|
687
|
+
for attr in [
|
|
688
|
+
"_dx",
|
|
689
|
+
"_nx",
|
|
690
|
+
"_x",
|
|
691
|
+
"_dt",
|
|
692
|
+
"_grid",
|
|
693
|
+
"_axis",
|
|
694
|
+
"_dim",
|
|
695
|
+
"_ndump",
|
|
696
|
+
"_maxiter",
|
|
697
|
+
"_tunits",
|
|
698
|
+
"_type",
|
|
699
|
+
"_simulation_folder",
|
|
700
|
+
]:
|
|
536
701
|
if hasattr(self, attr):
|
|
537
702
|
setattr(result, attr, getattr(self, attr))
|
|
538
|
-
|
|
539
|
-
if not hasattr(result,
|
|
540
|
-
if hasattr(self,
|
|
703
|
+
|
|
704
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
705
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
541
706
|
result._maxiter = self._maxiter
|
|
542
707
|
|
|
543
708
|
# result._name = self._name + " * " + str(other) if isinstance(other, (int, float)) else self._name + " * np.ndarray"
|
|
544
|
-
|
|
709
|
+
|
|
545
710
|
if self._all_loaded:
|
|
546
711
|
result._data = self._data * other
|
|
547
712
|
result._all_loaded = True
|
|
548
713
|
else:
|
|
714
|
+
|
|
549
715
|
def gen_scalar_mul(original_gen, scalar):
|
|
550
716
|
for val in original_gen:
|
|
551
717
|
yield val * scalar
|
|
552
|
-
|
|
718
|
+
|
|
553
719
|
original_generator = self._data_generator
|
|
554
|
-
result._data_generator = lambda index: gen_scalar_mul(
|
|
720
|
+
result._data_generator = lambda index: gen_scalar_mul(
|
|
721
|
+
original_generator(index), other
|
|
722
|
+
)
|
|
555
723
|
|
|
724
|
+
result.created_diagnostic_name = "MISC"
|
|
725
|
+
|
|
556
726
|
return result
|
|
557
727
|
|
|
558
728
|
elif isinstance(other, Diagnostic):
|
|
559
729
|
result = Diagnostic(species=self._species)
|
|
560
730
|
|
|
561
|
-
for attr in [
|
|
731
|
+
for attr in [
|
|
732
|
+
"_dx",
|
|
733
|
+
"_nx",
|
|
734
|
+
"_x",
|
|
735
|
+
"_dt",
|
|
736
|
+
"_grid",
|
|
737
|
+
"_axis",
|
|
738
|
+
"_dim",
|
|
739
|
+
"_ndump",
|
|
740
|
+
"_maxiter",
|
|
741
|
+
"_tunits",
|
|
742
|
+
"_type",
|
|
743
|
+
"_simulation_folder",
|
|
744
|
+
]:
|
|
562
745
|
if hasattr(self, attr):
|
|
563
746
|
setattr(result, attr, getattr(self, attr))
|
|
564
|
-
|
|
565
|
-
if not hasattr(result,
|
|
566
|
-
if hasattr(self,
|
|
747
|
+
|
|
748
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
749
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
567
750
|
result._maxiter = self._maxiter
|
|
568
|
-
|
|
751
|
+
|
|
569
752
|
# result._name = self._name + " * " + str(other._name)
|
|
570
753
|
|
|
571
754
|
if self._all_loaded:
|
|
@@ -573,55 +756,91 @@ class Diagnostic:
|
|
|
573
756
|
result._data = self._data * other._data
|
|
574
757
|
result._all_loaded = True
|
|
575
758
|
else:
|
|
759
|
+
|
|
576
760
|
def gen_diag_mul(original_gen1, original_gen2):
|
|
577
761
|
for val1, val2 in zip(original_gen1, original_gen2):
|
|
578
762
|
yield val1 * val2
|
|
579
|
-
|
|
763
|
+
|
|
580
764
|
original_generator = self._data_generator
|
|
581
765
|
other_generator = other._data_generator
|
|
582
|
-
result._data_generator = lambda index: gen_diag_mul(
|
|
766
|
+
result._data_generator = lambda index: gen_diag_mul(
|
|
767
|
+
original_generator(index), other_generator(index)
|
|
768
|
+
)
|
|
769
|
+
|
|
770
|
+
result.created_diagnostic_name = "MISC"
|
|
583
771
|
|
|
584
772
|
return result
|
|
585
|
-
|
|
773
|
+
|
|
586
774
|
def __truediv__(self, other):
|
|
587
775
|
if isinstance(other, (int, float, np.ndarray)):
|
|
588
776
|
result = Diagnostic(species=self._species)
|
|
589
|
-
|
|
590
|
-
for attr in [
|
|
777
|
+
|
|
778
|
+
for attr in [
|
|
779
|
+
"_dx",
|
|
780
|
+
"_nx",
|
|
781
|
+
"_x",
|
|
782
|
+
"_dt",
|
|
783
|
+
"_grid",
|
|
784
|
+
"_axis",
|
|
785
|
+
"_dim",
|
|
786
|
+
"_ndump",
|
|
787
|
+
"_maxiter",
|
|
788
|
+
"_tunits",
|
|
789
|
+
"_type",
|
|
790
|
+
"_simulation_folder",
|
|
791
|
+
]:
|
|
591
792
|
if hasattr(self, attr):
|
|
592
793
|
setattr(result, attr, getattr(self, attr))
|
|
593
|
-
|
|
594
|
-
if not hasattr(result,
|
|
595
|
-
if hasattr(self,
|
|
794
|
+
|
|
795
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
796
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
596
797
|
result._maxiter = self._maxiter
|
|
597
798
|
|
|
598
799
|
# result._name = self._name + " / " + str(other) if isinstance(other, (int, float)) else self._name + " / np.ndarray"
|
|
599
|
-
|
|
800
|
+
|
|
600
801
|
if self._all_loaded:
|
|
601
802
|
result._data = self._data / other
|
|
602
803
|
result._all_loaded = True
|
|
603
804
|
else:
|
|
805
|
+
|
|
604
806
|
def gen_scalar_div(original_gen, scalar):
|
|
605
807
|
for val in original_gen:
|
|
606
808
|
yield val / scalar
|
|
607
|
-
|
|
809
|
+
|
|
608
810
|
original_generator = self._data_generator
|
|
609
|
-
result._data_generator = lambda index: gen_scalar_div(
|
|
811
|
+
result._data_generator = lambda index: gen_scalar_div(
|
|
812
|
+
original_generator(index), other
|
|
813
|
+
)
|
|
610
814
|
|
|
815
|
+
result.created_diagnostic_name = "MISC"
|
|
816
|
+
|
|
611
817
|
return result
|
|
612
818
|
|
|
613
819
|
elif isinstance(other, Diagnostic):
|
|
614
|
-
|
|
820
|
+
|
|
615
821
|
result = Diagnostic(species=self._species)
|
|
616
822
|
|
|
617
|
-
for attr in [
|
|
823
|
+
for attr in [
|
|
824
|
+
"_dx",
|
|
825
|
+
"_nx",
|
|
826
|
+
"_x",
|
|
827
|
+
"_dt",
|
|
828
|
+
"_grid",
|
|
829
|
+
"_axis",
|
|
830
|
+
"_dim",
|
|
831
|
+
"_ndump",
|
|
832
|
+
"_maxiter",
|
|
833
|
+
"_tunits",
|
|
834
|
+
"_type",
|
|
835
|
+
"_simulation_folder",
|
|
836
|
+
]:
|
|
618
837
|
if hasattr(self, attr):
|
|
619
838
|
setattr(result, attr, getattr(self, attr))
|
|
620
|
-
|
|
621
|
-
if not hasattr(result,
|
|
622
|
-
if hasattr(self,
|
|
839
|
+
|
|
840
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
841
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
623
842
|
result._maxiter = self._maxiter
|
|
624
|
-
|
|
843
|
+
|
|
625
844
|
# result._name = self._name + " / " + str(other._name)
|
|
626
845
|
|
|
627
846
|
if self._all_loaded:
|
|
@@ -629,115 +848,293 @@ class Diagnostic:
|
|
|
629
848
|
result._data = self._data / other._data
|
|
630
849
|
result._all_loaded = True
|
|
631
850
|
else:
|
|
851
|
+
|
|
632
852
|
def gen_diag_div(original_gen1, original_gen2):
|
|
633
853
|
for val1, val2 in zip(original_gen1, original_gen2):
|
|
634
854
|
yield val1 / val2
|
|
635
|
-
|
|
855
|
+
|
|
636
856
|
original_generator = self._data_generator
|
|
637
857
|
other_generator = other._data_generator
|
|
638
|
-
result._data_generator = lambda index: gen_diag_div(
|
|
858
|
+
result._data_generator = lambda index: gen_diag_div(
|
|
859
|
+
original_generator(index), other_generator(index)
|
|
860
|
+
)
|
|
861
|
+
|
|
862
|
+
result.created_diagnostic_name = "MISC"
|
|
639
863
|
|
|
640
864
|
return result
|
|
641
|
-
|
|
865
|
+
|
|
642
866
|
def __pow__(self, other):
|
|
643
|
-
|
|
867
|
+
# power by scalar
|
|
644
868
|
if isinstance(other, (int, float)):
|
|
645
869
|
result = Diagnostic(species=self._species)
|
|
646
870
|
|
|
647
|
-
for attr in [
|
|
871
|
+
for attr in [
|
|
872
|
+
"_dx",
|
|
873
|
+
"_nx",
|
|
874
|
+
"_x",
|
|
875
|
+
"_dt",
|
|
876
|
+
"_grid",
|
|
877
|
+
"_axis",
|
|
878
|
+
"_dim",
|
|
879
|
+
"_ndump",
|
|
880
|
+
"_maxiter",
|
|
881
|
+
"_tunits",
|
|
882
|
+
"_type",
|
|
883
|
+
"_simulation_folder",
|
|
884
|
+
]:
|
|
648
885
|
if hasattr(self, attr):
|
|
649
886
|
setattr(result, attr, getattr(self, attr))
|
|
650
887
|
|
|
651
|
-
if not hasattr(result,
|
|
652
|
-
if hasattr(self,
|
|
888
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
889
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
653
890
|
result._maxiter = self._maxiter
|
|
654
891
|
|
|
655
892
|
# result._name = self._name + " ^(" + str(other) + ")"
|
|
656
893
|
# result._label = self._label + rf"$ ^{other}$"
|
|
657
894
|
|
|
658
895
|
if self._all_loaded:
|
|
659
|
-
result._data = self._data
|
|
896
|
+
result._data = self._data**other
|
|
660
897
|
result._all_loaded = True
|
|
661
898
|
else:
|
|
899
|
+
|
|
662
900
|
def gen_scalar_pow(original_gen, scalar):
|
|
663
901
|
for val in original_gen:
|
|
664
|
-
yield val
|
|
902
|
+
yield val**scalar
|
|
665
903
|
|
|
666
904
|
original_generator = self._data_generator
|
|
667
|
-
result._data_generator = lambda index: gen_scalar_pow(
|
|
905
|
+
result._data_generator = lambda index: gen_scalar_pow(
|
|
906
|
+
original_generator(index), other
|
|
907
|
+
)
|
|
908
|
+
|
|
909
|
+
result.created_diagnostic_name = "MISC"
|
|
668
910
|
|
|
669
911
|
return result
|
|
670
|
-
|
|
912
|
+
|
|
671
913
|
# power by another diagnostic
|
|
672
914
|
elif isinstance(other, Diagnostic):
|
|
673
|
-
raise ValueError(
|
|
915
|
+
raise ValueError(
|
|
916
|
+
"Power by another diagnostic is not supported. Why would you do that?"
|
|
917
|
+
)
|
|
674
918
|
|
|
675
919
|
def __radd__(self, other):
|
|
676
920
|
return self + other
|
|
677
|
-
|
|
678
|
-
def __rsub__(
|
|
679
|
-
|
|
680
|
-
|
|
921
|
+
|
|
922
|
+
def __rsub__(
|
|
923
|
+
self, other
|
|
924
|
+
): # I don't know if this is correct because I'm not sure if the order of the subtraction is correct
|
|
925
|
+
return -self + other
|
|
926
|
+
|
|
681
927
|
def __rmul__(self, other):
|
|
682
928
|
return self * other
|
|
683
|
-
|
|
684
|
-
def __rtruediv__(self, other):
|
|
929
|
+
|
|
930
|
+
def __rtruediv__(self, other): # division is not commutative
|
|
685
931
|
if isinstance(other, (int, float, np.ndarray)):
|
|
686
932
|
result = Diagnostic(species=self._species)
|
|
687
|
-
|
|
688
|
-
for attr in [
|
|
933
|
+
|
|
934
|
+
for attr in [
|
|
935
|
+
"_dx",
|
|
936
|
+
"_nx",
|
|
937
|
+
"_x",
|
|
938
|
+
"_dt",
|
|
939
|
+
"_grid",
|
|
940
|
+
"_axis",
|
|
941
|
+
"_dim",
|
|
942
|
+
"_ndump",
|
|
943
|
+
"_maxiter",
|
|
944
|
+
"_tunits",
|
|
945
|
+
"_type",
|
|
946
|
+
"_simulation_folder",
|
|
947
|
+
]:
|
|
689
948
|
if hasattr(self, attr):
|
|
690
949
|
setattr(result, attr, getattr(self, attr))
|
|
691
|
-
|
|
692
|
-
if not hasattr(result,
|
|
693
|
-
if hasattr(self,
|
|
950
|
+
|
|
951
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
952
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
694
953
|
result._maxiter = self._maxiter
|
|
695
954
|
|
|
696
955
|
# result._name = str(other) + " / " + self._name if isinstance(other, (int, float)) else "np.ndarray / " + self._name
|
|
697
|
-
|
|
956
|
+
|
|
698
957
|
if self._all_loaded:
|
|
699
958
|
result._data = other / self._data
|
|
700
959
|
result._all_loaded = True
|
|
701
960
|
else:
|
|
961
|
+
|
|
702
962
|
def gen_scalar_rdiv(scalar, original_gen):
|
|
703
963
|
for val in original_gen:
|
|
704
964
|
yield scalar / val
|
|
705
|
-
|
|
965
|
+
|
|
706
966
|
original_generator = self._data_generator
|
|
707
|
-
result._data_generator = lambda index: gen_scalar_rdiv(
|
|
967
|
+
result._data_generator = lambda index: gen_scalar_rdiv(
|
|
968
|
+
other, original_generator(index)
|
|
969
|
+
)
|
|
708
970
|
|
|
971
|
+
result.created_diagnostic_name = "MISC"
|
|
972
|
+
|
|
709
973
|
return result
|
|
710
|
-
|
|
974
|
+
|
|
711
975
|
elif isinstance(other, Diagnostic):
|
|
712
|
-
|
|
976
|
+
|
|
713
977
|
result = Diagnostic(species=self._species)
|
|
714
978
|
|
|
715
|
-
for attr in [
|
|
979
|
+
for attr in [
|
|
980
|
+
"_dx",
|
|
981
|
+
"_nx",
|
|
982
|
+
"_x",
|
|
983
|
+
"_dt",
|
|
984
|
+
"_grid",
|
|
985
|
+
"_axis",
|
|
986
|
+
"_dim",
|
|
987
|
+
"_ndump",
|
|
988
|
+
"_maxiter",
|
|
989
|
+
"_tunits",
|
|
990
|
+
"_type",
|
|
991
|
+
"_simulation_folder",
|
|
992
|
+
]:
|
|
716
993
|
if hasattr(self, attr):
|
|
717
994
|
setattr(result, attr, getattr(self, attr))
|
|
718
|
-
|
|
719
|
-
if not hasattr(result,
|
|
720
|
-
if hasattr(self,
|
|
995
|
+
|
|
996
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
997
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
721
998
|
result._maxiter = self._maxiter
|
|
722
|
-
|
|
999
|
+
|
|
723
1000
|
# result._name = str(other._name) + " / " + self._name
|
|
724
1001
|
|
|
725
1002
|
if self._all_loaded:
|
|
726
1003
|
other.load_all()
|
|
727
|
-
result._data =
|
|
1004
|
+
result._data = other._data / self._data
|
|
728
1005
|
result._all_loaded = True
|
|
729
1006
|
else:
|
|
1007
|
+
|
|
730
1008
|
def gen_diag_div(original_gen1, original_gen2):
|
|
731
1009
|
for val1, val2 in zip(original_gen1, original_gen2):
|
|
732
|
-
yield
|
|
733
|
-
|
|
1010
|
+
yield val2 / val1
|
|
1011
|
+
|
|
734
1012
|
original_generator = self._data_generator
|
|
735
1013
|
other_generator = other._data_generator
|
|
736
|
-
result._data_generator = lambda index: gen_diag_div(
|
|
1014
|
+
result._data_generator = lambda index: gen_diag_div(
|
|
1015
|
+
original_generator(index), other_generator(index)
|
|
1016
|
+
)
|
|
1017
|
+
|
|
1018
|
+
result.created_diagnostic_name = "MISC"
|
|
737
1019
|
|
|
738
1020
|
return result
|
|
739
1021
|
|
|
740
|
-
def
|
|
1022
|
+
def to_h5(self, savename=None, index=None, all=False, verbose=False, path=None):
|
|
1023
|
+
"""
|
|
1024
|
+
Save the diagnostic data to HDF5 files.
|
|
1025
|
+
|
|
1026
|
+
Parameters
|
|
1027
|
+
----------
|
|
1028
|
+
savename : str, optional
|
|
1029
|
+
The name of the HDF5 file. If None, uses the diagnostic name.
|
|
1030
|
+
index : int, or list of ints, optional
|
|
1031
|
+
The index or indices of the data to save.
|
|
1032
|
+
all : bool, optional
|
|
1033
|
+
If True, save all data. Default is False.
|
|
1034
|
+
verbose : bool, optional
|
|
1035
|
+
If True, print messages about the saving process.
|
|
1036
|
+
path : str, optional
|
|
1037
|
+
The path to save the HDF5 files. If None, uses the default save path (in simulation folder).
|
|
1038
|
+
"""
|
|
1039
|
+
if folder is None:
|
|
1040
|
+
folder = self._simulation_folder
|
|
1041
|
+
self._save_path = folder + f"/MS/MISC/{self._default_save}/{savename}"
|
|
1042
|
+
else:
|
|
1043
|
+
self._save_path = folder
|
|
1044
|
+
# Check if is has attribute created_diagnostic_name or postprocess_name
|
|
1045
|
+
if savename is None:
|
|
1046
|
+
print(f"No savename provided. Using {self._name}.")
|
|
1047
|
+
savename = self._name
|
|
1048
|
+
|
|
1049
|
+
if hasattr(self, "created_diagnostic_name"):
|
|
1050
|
+
self._default_save = self.created_diagnostic_name
|
|
1051
|
+
elif hasattr(self, "postprocess_name"):
|
|
1052
|
+
self._default_save = self.postprocess_name
|
|
1053
|
+
else:
|
|
1054
|
+
self._default_save = "DIR_" + self._name
|
|
1055
|
+
|
|
1056
|
+
if not os.path.exists(self._save_path):
|
|
1057
|
+
os.makedirs(self._save_path)
|
|
1058
|
+
if verbose:
|
|
1059
|
+
print(f"Created folder {self._save_path}")
|
|
1060
|
+
|
|
1061
|
+
if verbose:
|
|
1062
|
+
print(f"Save Path: {self._save_path}")
|
|
1063
|
+
|
|
1064
|
+
def savefile(filename, i):
|
|
1065
|
+
with h5py.File(filename, 'w') as f:
|
|
1066
|
+
# Create SIMULATION group with attributes
|
|
1067
|
+
sim_group = f.create_group("SIMULATION")
|
|
1068
|
+
sim_group.attrs.create("DT", [self._dt])
|
|
1069
|
+
sim_group.attrs.create("NDIMS", [self._dim])
|
|
1070
|
+
|
|
1071
|
+
# Set file attributes
|
|
1072
|
+
f.attrs.create("TIME", [self.time(i)[0]])
|
|
1073
|
+
f.attrs.create("TIME UNITS", [np.bytes_(self.time(i)[1].encode()) if self.time(i)[1] else np.bytes_(b"")])
|
|
1074
|
+
f.attrs.create("ITER", [self._ndump * i])
|
|
1075
|
+
f.attrs.create("NAME", [np.bytes_(self._name.encode())])
|
|
1076
|
+
f.attrs.create("TYPE", [np.bytes_(self._type.encode())])
|
|
1077
|
+
f.attrs.create("UNITS", [np.bytes_(self._units.encode()) if self._units else np.bytes_(b"")])
|
|
1078
|
+
f.attrs.create("LABEL", [np.bytes_(self._label.encode()) if self._label else np.bytes_(b"")])
|
|
1079
|
+
|
|
1080
|
+
# Create dataset with data (transposed to match convention)
|
|
1081
|
+
f.create_dataset(savename, data=self[i].T)
|
|
1082
|
+
|
|
1083
|
+
# Create AXIS group
|
|
1084
|
+
axis_group = f.create_group("AXIS")
|
|
1085
|
+
|
|
1086
|
+
# Create axis datasets
|
|
1087
|
+
axis_names = ["AXIS1", "AXIS2", "AXIS3"][:self._dim]
|
|
1088
|
+
axis_shortnames = [self._axis[i]["name"] for i in range(self._dim)]
|
|
1089
|
+
axis_longnames = [self._axis[i]["long_name"] for i in range(self._dim)]
|
|
1090
|
+
axis_units = [self._axis[i]["units"] for i in range(self._dim)]
|
|
1091
|
+
|
|
1092
|
+
for i, axis_name in enumerate(axis_names):
|
|
1093
|
+
# Create axis dataset
|
|
1094
|
+
axis_dataset = axis_group.create_dataset(axis_name, data=np.array(self._grid[i]))
|
|
1095
|
+
|
|
1096
|
+
# Set axis attributes
|
|
1097
|
+
axis_dataset.attrs.create("NAME", [np.bytes_(axis_shortnames[i].encode())])
|
|
1098
|
+
axis_dataset.attrs.create("UNITS", [np.bytes_(axis_units[i].encode())])
|
|
1099
|
+
axis_dataset.attrs.create("LONG_NAME", [np.bytes_(axis_longnames[i].encode())])
|
|
1100
|
+
axis_dataset.attrs.create("TYPE", [np.bytes_("linear".encode())])
|
|
1101
|
+
|
|
1102
|
+
if verbose:
|
|
1103
|
+
print(f"File created: {filename}")
|
|
1104
|
+
|
|
1105
|
+
print(f"The savename of the diagnostic is {savename}. Files will be saves as {savename}-000001.h5, {savename}-000002.h5, etc.")
|
|
1106
|
+
|
|
1107
|
+
print(f"If you desire a different name, please set it with the 'name' method (setter).")
|
|
1108
|
+
|
|
1109
|
+
if self._name is None:
|
|
1110
|
+
raise ValueError("Diagnostic name is not set. Cannot save to HDF5.")
|
|
1111
|
+
if not os.path.exists(folder):
|
|
1112
|
+
print(f"Creating folder {folder}...")
|
|
1113
|
+
os.makedirs(folder)
|
|
1114
|
+
if not os.path.isdir(folder):
|
|
1115
|
+
raise ValueError(f"{folder} is not a directory.")
|
|
1116
|
+
|
|
1117
|
+
if all == False:
|
|
1118
|
+
if isinstance(index, int):
|
|
1119
|
+
filename = self._save_path + f"/{savename}-{index:06d}.h5"
|
|
1120
|
+
savefile(filename, index)
|
|
1121
|
+
elif isinstance(index, list) or isinstance(index, tuple):
|
|
1122
|
+
for i in index:
|
|
1123
|
+
filename = self._save_path + f"/{savename}-{i:06d}.h5"
|
|
1124
|
+
savefile(filename, i)
|
|
1125
|
+
elif all == True:
|
|
1126
|
+
for i in range(self._maxiter):
|
|
1127
|
+
filename = self._save_path + f"/{savename}-{i:06d}.h5"
|
|
1128
|
+
savefile(filename, i)
|
|
1129
|
+
else:
|
|
1130
|
+
raise ValueError("index should be an int, slice, or list of ints, or all should be True")
|
|
1131
|
+
|
|
1132
|
+
def plot_3d(
|
|
1133
|
+
self,
|
|
1134
|
+
idx,
|
|
1135
|
+
scale_type: Literal["zero_centered", "pos", "neg", "default"] = "default",
|
|
1136
|
+
boundaries: np.ndarray = None,
|
|
1137
|
+
):
|
|
741
1138
|
"""
|
|
742
1139
|
Plots a 3D scatter plot of the diagnostic data (grid data).
|
|
743
1140
|
|
|
@@ -768,23 +1165,24 @@ class Diagnostic:
|
|
|
768
1165
|
plt.show()
|
|
769
1166
|
"""
|
|
770
1167
|
|
|
771
|
-
|
|
772
1168
|
if self._dim != 3:
|
|
773
1169
|
raise ValueError("This method is only available for 3D diagnostics.")
|
|
774
|
-
|
|
1170
|
+
|
|
775
1171
|
if boundaries is None:
|
|
776
1172
|
boundaries = self._grid
|
|
777
1173
|
|
|
778
1174
|
if not isinstance(boundaries, np.ndarray):
|
|
779
|
-
try
|
|
1175
|
+
try:
|
|
780
1176
|
boundaries = np.array(boundaries)
|
|
781
1177
|
except:
|
|
782
|
-
boundaries = self._grid
|
|
783
|
-
warnings.warn(
|
|
1178
|
+
boundaries = self._grid
|
|
1179
|
+
warnings.warn(
|
|
1180
|
+
"boundaries cannot be accessed as a numpy array with shape (3, 2), using default instead"
|
|
1181
|
+
)
|
|
784
1182
|
|
|
785
1183
|
if boundaries.shape != (3, 2):
|
|
786
1184
|
warnings.warn("boundaries should have shape (3, 2), using default instead")
|
|
787
|
-
boundaries = self._grid
|
|
1185
|
+
boundaries = self._grid
|
|
788
1186
|
|
|
789
1187
|
# Load data
|
|
790
1188
|
if self._all_loaded:
|
|
@@ -795,12 +1193,32 @@ class Diagnostic:
|
|
|
795
1193
|
X, Y, Z = np.meshgrid(self._x[0], self._x[1], self._x[2], indexing="ij")
|
|
796
1194
|
|
|
797
1195
|
# Flatten arrays for scatter plot
|
|
798
|
-
|
|
1196
|
+
(
|
|
1197
|
+
X_flat,
|
|
1198
|
+
Y_flat,
|
|
1199
|
+
Z_flat,
|
|
1200
|
+
) = (
|
|
1201
|
+
X.ravel(),
|
|
1202
|
+
Y.ravel(),
|
|
1203
|
+
Z.ravel(),
|
|
1204
|
+
)
|
|
799
1205
|
data_flat = data.ravel()
|
|
800
1206
|
|
|
801
1207
|
# Apply filter: Keep only chosen points
|
|
802
|
-
mask = (
|
|
803
|
-
|
|
1208
|
+
mask = (
|
|
1209
|
+
(X_flat > boundaries[0][0])
|
|
1210
|
+
& (X_flat < boundaries[0][1])
|
|
1211
|
+
& (Y_flat > boundaries[1][0])
|
|
1212
|
+
& (Y_flat < boundaries[1][1])
|
|
1213
|
+
& (Z_flat > boundaries[2][0])
|
|
1214
|
+
& (Z_flat < boundaries[2][1])
|
|
1215
|
+
)
|
|
1216
|
+
X_cut, Y_cut, Z_cut, data_cut = (
|
|
1217
|
+
X_flat[mask],
|
|
1218
|
+
Y_flat[mask],
|
|
1219
|
+
Z_flat[mask],
|
|
1220
|
+
data_flat[mask],
|
|
1221
|
+
)
|
|
804
1222
|
|
|
805
1223
|
if scale_type == "zero_centered":
|
|
806
1224
|
# Center colormap around zero
|
|
@@ -841,10 +1259,22 @@ class Diagnostic:
|
|
|
841
1259
|
# Labels
|
|
842
1260
|
# TODO try to use a latex label instaead of _name
|
|
843
1261
|
cbar.set_label(r"${}$".format(self._name) + r"$\ [{}]$".format(self._units))
|
|
844
|
-
ax.set_title(
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
1262
|
+
ax.set_title(
|
|
1263
|
+
r"$t={:.2f}$".format(self.time(idx)[0])
|
|
1264
|
+
+ r"$\ [{}]$".format(self.time(idx)[1])
|
|
1265
|
+
)
|
|
1266
|
+
ax.set_xlabel(
|
|
1267
|
+
r"${}$".format(self.axis[0]["long_name"])
|
|
1268
|
+
+ r"$\ [{}]$".format(self.axis[0]["units"])
|
|
1269
|
+
)
|
|
1270
|
+
ax.set_ylabel(
|
|
1271
|
+
r"${}$".format(self.axis[1]["long_name"])
|
|
1272
|
+
+ r"$\ [{}]$".format(self.axis[1]["units"])
|
|
1273
|
+
)
|
|
1274
|
+
ax.set_zlabel(
|
|
1275
|
+
r"${}$".format(self.axis[2]["long_name"])
|
|
1276
|
+
+ r"$\ [{}]$".format(self.axis[2]["units"])
|
|
1277
|
+
)
|
|
848
1278
|
|
|
849
1279
|
return fig, ax
|
|
850
1280
|
|
|
@@ -852,84 +1282,108 @@ class Diagnostic:
|
|
|
852
1282
|
@property
|
|
853
1283
|
def data(self):
|
|
854
1284
|
if self._data is None:
|
|
855
|
-
raise ValueError(
|
|
1285
|
+
raise ValueError(
|
|
1286
|
+
"Data not loaded into memory. Use get_* method with load_all=True or access via generator/index."
|
|
1287
|
+
)
|
|
856
1288
|
return self._data
|
|
857
1289
|
|
|
858
1290
|
@property
|
|
859
1291
|
def dx(self):
|
|
860
1292
|
return self._dx
|
|
861
|
-
|
|
1293
|
+
|
|
862
1294
|
@property
|
|
863
1295
|
def nx(self):
|
|
864
1296
|
return self._nx
|
|
865
|
-
|
|
1297
|
+
|
|
866
1298
|
@property
|
|
867
1299
|
def x(self):
|
|
868
1300
|
return self._x
|
|
869
|
-
|
|
1301
|
+
|
|
870
1302
|
@property
|
|
871
1303
|
def dt(self):
|
|
872
1304
|
return self._dt
|
|
873
|
-
|
|
1305
|
+
|
|
874
1306
|
@property
|
|
875
1307
|
def grid(self):
|
|
876
1308
|
return self._grid
|
|
877
|
-
|
|
1309
|
+
|
|
878
1310
|
@property
|
|
879
1311
|
def axis(self):
|
|
880
1312
|
return self._axis
|
|
881
|
-
|
|
1313
|
+
|
|
882
1314
|
@property
|
|
883
1315
|
def units(self):
|
|
884
1316
|
return self._units
|
|
885
|
-
|
|
1317
|
+
|
|
886
1318
|
@property
|
|
887
1319
|
def tunits(self):
|
|
888
1320
|
return self._tunits
|
|
889
|
-
|
|
1321
|
+
|
|
890
1322
|
@property
|
|
891
1323
|
def name(self):
|
|
892
1324
|
return self._name
|
|
893
|
-
|
|
1325
|
+
|
|
894
1326
|
@property
|
|
895
1327
|
def dim(self):
|
|
896
1328
|
return self._dim
|
|
897
|
-
|
|
1329
|
+
|
|
898
1330
|
@property
|
|
899
1331
|
def path(self):
|
|
900
1332
|
return self
|
|
901
|
-
|
|
1333
|
+
|
|
902
1334
|
@property
|
|
903
1335
|
def simulation_folder(self):
|
|
904
1336
|
return self._simulation_folder
|
|
905
|
-
|
|
1337
|
+
|
|
906
1338
|
@property
|
|
907
1339
|
def ndump(self):
|
|
908
1340
|
return self._ndump
|
|
909
1341
|
|
|
1342
|
+
# @property
|
|
1343
|
+
# def iter(self):
|
|
1344
|
+
# return self._iter
|
|
1345
|
+
|
|
910
1346
|
@property
|
|
911
1347
|
def all_loaded(self):
|
|
912
1348
|
return self._all_loaded
|
|
913
|
-
|
|
1349
|
+
|
|
914
1350
|
@property
|
|
915
1351
|
def maxiter(self):
|
|
916
1352
|
return self._maxiter
|
|
917
|
-
|
|
1353
|
+
|
|
918
1354
|
@property
|
|
919
1355
|
def label(self):
|
|
920
1356
|
return self._label
|
|
1357
|
+
|
|
1358
|
+
@property
|
|
1359
|
+
def type(self):
|
|
1360
|
+
return self._type
|
|
921
1361
|
|
|
922
1362
|
@property
|
|
923
1363
|
def quantity(self):
|
|
924
1364
|
return self._quantity
|
|
925
|
-
|
|
1365
|
+
|
|
926
1366
|
def time(self, index):
|
|
927
1367
|
return [index * self._dt * self._ndump, self._tunits]
|
|
928
|
-
|
|
1368
|
+
|
|
1369
|
+
def attributes_to_save(self, index):
|
|
1370
|
+
"""
|
|
1371
|
+
Prints the attributes of the diagnostic.
|
|
1372
|
+
"""
|
|
1373
|
+
print(f"dt: {self._dt}\n"
|
|
1374
|
+
f"dim: {self._dim}\n"
|
|
1375
|
+
f"time: {self.time(index)[0]}\n"
|
|
1376
|
+
f"tunits: {self.time(index)[1]}\n"
|
|
1377
|
+
f"iter: {self._ndump * index}\n"
|
|
1378
|
+
f"name: {self._name}\n"
|
|
1379
|
+
f"type: {self._type}\n"
|
|
1380
|
+
f"label: {self._label}\n"
|
|
1381
|
+
f"units: {self._units}")
|
|
1382
|
+
|
|
929
1383
|
@dx.setter
|
|
930
1384
|
def dx(self, value):
|
|
931
1385
|
self._dx = value
|
|
932
|
-
|
|
1386
|
+
|
|
933
1387
|
@nx.setter
|
|
934
1388
|
def nx(self, value):
|
|
935
1389
|
self._nx = value
|
|
@@ -957,7 +1411,7 @@ class Diagnostic:
|
|
|
957
1411
|
@tunits.setter
|
|
958
1412
|
def tunits(self, value):
|
|
959
1413
|
self._tunits = value
|
|
960
|
-
|
|
1414
|
+
|
|
961
1415
|
@name.setter
|
|
962
1416
|
def name(self, value):
|
|
963
1417
|
self._name = value
|
|
@@ -969,11 +1423,15 @@ class Diagnostic:
|
|
|
969
1423
|
@ndump.setter
|
|
970
1424
|
def ndump(self, value):
|
|
971
1425
|
self._ndump = value
|
|
972
|
-
|
|
1426
|
+
|
|
973
1427
|
@data.setter
|
|
974
1428
|
def data(self, value):
|
|
975
|
-
self._data = value
|
|
1429
|
+
self._data = value
|
|
976
1430
|
|
|
977
1431
|
@quantity.setter
|
|
978
1432
|
def quantity(self, key):
|
|
979
|
-
self._quantity = key
|
|
1433
|
+
self._quantity = key
|
|
1434
|
+
|
|
1435
|
+
@label.setter
|
|
1436
|
+
def label(self, value):
|
|
1437
|
+
self._label = value
|