osiris-utils 1.1.3__py3-none-any.whl → 1.1.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- osiris_utils/__init__.py +25 -6
- osiris_utils/data/__init__.py +0 -0
- osiris_utils/data/data.py +692 -0
- osiris_utils/data/diagnostic.py +1437 -0
- osiris_utils/data/simulation.py +216 -0
- osiris_utils/decks/__init__.py +0 -0
- osiris_utils/decks/decks.py +288 -0
- osiris_utils/decks/species.py +55 -0
- osiris_utils/gui/__init__.py +0 -0
- osiris_utils/gui/gui.py +266 -0
- osiris_utils/postprocessing/__init__.py +0 -0
- osiris_utils/postprocessing/derivative.py +223 -0
- osiris_utils/postprocessing/fft.py +234 -0
- osiris_utils/postprocessing/field_centering.py +168 -0
- osiris_utils/postprocessing/heatflux_correction.py +193 -0
- osiris_utils/postprocessing/mft.py +334 -0
- osiris_utils/postprocessing/mft_for_gridfile.py +52 -0
- osiris_utils/postprocessing/postprocess.py +42 -0
- osiris_utils/postprocessing/pressure_correction.py +171 -0
- osiris_utils/utils.py +141 -41
- {osiris_utils-1.1.3.dist-info → osiris_utils-1.1.6.dist-info}/METADATA +20 -2
- osiris_utils-1.1.6.dist-info/RECORD +25 -0
- {osiris_utils-1.1.3.dist-info → osiris_utils-1.1.6.dist-info}/WHEEL +1 -1
- osiris_utils-1.1.3.dist-info/RECORD +0 -7
- {osiris_utils-1.1.3.dist-info → osiris_utils-1.1.6.dist-info}/licenses/LICENSE.txt +0 -0
- {osiris_utils-1.1.3.dist-info → osiris_utils-1.1.6.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1437 @@
|
|
|
1
|
+
"""
|
|
2
|
+
The utilities on data.py are cool but not useful when you want to work with whole data of a simulation instead
|
|
3
|
+
of just a single file. This is what this file is for - deal with ''folders'' of data.
|
|
4
|
+
|
|
5
|
+
Took some inspiration from Diogo and Madox's work.
|
|
6
|
+
|
|
7
|
+
This would be awsome to compute time derivatives.
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from torch import isin
|
|
11
|
+
import numpy as np
|
|
12
|
+
import os
|
|
13
|
+
import glob
|
|
14
|
+
import h5py
|
|
15
|
+
|
|
16
|
+
from .data import OsirisGridFile
|
|
17
|
+
import tqdm
|
|
18
|
+
import matplotlib.pyplot as plt
|
|
19
|
+
import warnings
|
|
20
|
+
from typing import Literal
|
|
21
|
+
from ..decks.decks import InputDeckIO, deval
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def get_dimension_from_deck(deck: InputDeckIO) -> int:
|
|
25
|
+
for dim in range(1, 4):
|
|
26
|
+
try:
|
|
27
|
+
deck.get_param(section="grid", param=f"nx_p(1:{dim})")
|
|
28
|
+
return dim
|
|
29
|
+
except:
|
|
30
|
+
continue
|
|
31
|
+
|
|
32
|
+
raise Exception("Error parsing grid dimension")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
OSIRIS_DENSITY = ["n"]
|
|
36
|
+
OSIRIS_SPECIE_REPORTS = ["charge", "q1", "q2", "q3", "j1", "j2", "j3"]
|
|
37
|
+
OSIRIS_SPECIE_REP_UDIST = [
|
|
38
|
+
"vfl1",
|
|
39
|
+
"vfl2",
|
|
40
|
+
"vfl3",
|
|
41
|
+
"ufl1",
|
|
42
|
+
"ufl2",
|
|
43
|
+
"ufl3",
|
|
44
|
+
"P11",
|
|
45
|
+
"P12",
|
|
46
|
+
"P13",
|
|
47
|
+
"P22",
|
|
48
|
+
"P23",
|
|
49
|
+
"P33",
|
|
50
|
+
"T11",
|
|
51
|
+
"T12",
|
|
52
|
+
"T13",
|
|
53
|
+
"T22",
|
|
54
|
+
"T23",
|
|
55
|
+
"T33",
|
|
56
|
+
]
|
|
57
|
+
OSIRIS_FLD = [
|
|
58
|
+
"e1",
|
|
59
|
+
"e2",
|
|
60
|
+
"e3",
|
|
61
|
+
"b1",
|
|
62
|
+
"b2",
|
|
63
|
+
"b3",
|
|
64
|
+
"part_e1",
|
|
65
|
+
"part_e2",
|
|
66
|
+
"epart_3",
|
|
67
|
+
"part_b1",
|
|
68
|
+
"part_b2",
|
|
69
|
+
"part_b3",
|
|
70
|
+
"ext_e1",
|
|
71
|
+
"ext_e2",
|
|
72
|
+
"ext_e3",
|
|
73
|
+
"ext_b1",
|
|
74
|
+
"ext_b2",
|
|
75
|
+
"ext_b3",
|
|
76
|
+
]
|
|
77
|
+
OSIRIS_PHA = [
|
|
78
|
+
"p1x1",
|
|
79
|
+
"p1x2",
|
|
80
|
+
"p1x3",
|
|
81
|
+
"p2x1",
|
|
82
|
+
"p2x2",
|
|
83
|
+
"p2x3",
|
|
84
|
+
"p3x1",
|
|
85
|
+
"p3x2",
|
|
86
|
+
"p3x3",
|
|
87
|
+
"gammax1",
|
|
88
|
+
"gammax2",
|
|
89
|
+
"gammax3",
|
|
90
|
+
] # there may be more that I don't know
|
|
91
|
+
OSIRIS_ALL = (
|
|
92
|
+
OSIRIS_DENSITY
|
|
93
|
+
+ OSIRIS_SPECIE_REPORTS
|
|
94
|
+
+ OSIRIS_SPECIE_REP_UDIST
|
|
95
|
+
+ OSIRIS_FLD
|
|
96
|
+
+ OSIRIS_PHA
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def which_quantities():
|
|
101
|
+
print("Available quantities:")
|
|
102
|
+
print(OSIRIS_ALL)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
class Diagnostic:
|
|
106
|
+
"""
|
|
107
|
+
Class to handle diagnostics. This is the "base" class of the code. Diagnostics can be loaded from OSIRIS output files, but are also created when performing operations with other diagnostics.
|
|
108
|
+
Post-processed quantities are also considered diagnostics. This way, we can perform operations with them as well.
|
|
109
|
+
|
|
110
|
+
Parameters
|
|
111
|
+
----------
|
|
112
|
+
species : str
|
|
113
|
+
The species to handle the diagnostics.
|
|
114
|
+
simulation_folder : str
|
|
115
|
+
The path to the simulation folder. This is the path to the folder where the input deck is located.
|
|
116
|
+
|
|
117
|
+
Attributes
|
|
118
|
+
----------
|
|
119
|
+
species : str
|
|
120
|
+
The species to handle the diagnostics.
|
|
121
|
+
dx : np.ndarray(float) or float
|
|
122
|
+
The grid spacing in each direction. If the dimension is 1, this is a float. If the dimension is 2 or 3, this is a np.ndarray.
|
|
123
|
+
nx : np.ndarray(int) or int
|
|
124
|
+
The number of grid points in each direction. If the dimension is 1, this is a int. If the dimension is 2 or 3, this is a np.ndarray.
|
|
125
|
+
x : np.ndarray
|
|
126
|
+
The grid points.
|
|
127
|
+
dt : float
|
|
128
|
+
The time step.
|
|
129
|
+
grid : np.ndarray
|
|
130
|
+
The grid boundaries.
|
|
131
|
+
axis : dict
|
|
132
|
+
The axis information. Each key is a direction and the value is a dictionary with the keys "name", "long_name", "units" and "plot_label".
|
|
133
|
+
units : str
|
|
134
|
+
The units of the diagnostic. This info may not be available for all diagnostics, ie, diagnostics resulting from operations and postprocessing.
|
|
135
|
+
name : str
|
|
136
|
+
The name of the diagnostic. This info may not be available for all diagnostics, ie, diagnostics resulting from operations and postprocessing.
|
|
137
|
+
label : str
|
|
138
|
+
The label of the diagnostic. This info may not be available for all diagnostics, ie, diagnostics resulting from operations and postprocessing.
|
|
139
|
+
dim : int
|
|
140
|
+
The dimension of the diagnostic.
|
|
141
|
+
ndump : int
|
|
142
|
+
The number of steps between dumps.
|
|
143
|
+
maxiter : int
|
|
144
|
+
The maximum number of iterations.
|
|
145
|
+
tunits : str
|
|
146
|
+
The time units.
|
|
147
|
+
path : str
|
|
148
|
+
The path to the diagnostic.
|
|
149
|
+
simulation_folder : str
|
|
150
|
+
The path to the simulation folder.
|
|
151
|
+
all_loaded : bool
|
|
152
|
+
If the data is already loaded into memory. This is useful to avoid loading the data multiple times.
|
|
153
|
+
data : np.ndarray
|
|
154
|
+
The diagnostic data. This is created only when the data is loaded into memory.
|
|
155
|
+
|
|
156
|
+
Methods
|
|
157
|
+
-------
|
|
158
|
+
get_quantity(quantity)
|
|
159
|
+
Get the data for a given quantity.
|
|
160
|
+
load_all()
|
|
161
|
+
Load all data into memory.
|
|
162
|
+
load(index)
|
|
163
|
+
Load data for a given index.
|
|
164
|
+
__getitem__(index)
|
|
165
|
+
Get data for a given index. Does not load the data into memory.
|
|
166
|
+
__iter__()
|
|
167
|
+
Iterate over the data. Does not load the data into memory.
|
|
168
|
+
__add__(other)
|
|
169
|
+
Add two diagnostics.
|
|
170
|
+
__sub__(other)
|
|
171
|
+
Subtract two diagnostics.
|
|
172
|
+
__mul__(other)
|
|
173
|
+
Multiply two diagnostics.
|
|
174
|
+
__truediv__(other)
|
|
175
|
+
Divide two diagnostics.
|
|
176
|
+
__pow__(other)
|
|
177
|
+
Power of a diagnostic.
|
|
178
|
+
plot_3d(idx, scale_type="default", boundaries=None)
|
|
179
|
+
Plot a 3D scatter plot of the diagnostic data.
|
|
180
|
+
time(index)
|
|
181
|
+
Get the time for a given index.
|
|
182
|
+
|
|
183
|
+
"""
|
|
184
|
+
|
|
185
|
+
def __init__(self, simulation_folder=None, species=None, input_deck=None):
|
|
186
|
+
self._species = species if species else None
|
|
187
|
+
|
|
188
|
+
self._dx = None
|
|
189
|
+
self._nx = None
|
|
190
|
+
self._x = None
|
|
191
|
+
self._dt = None
|
|
192
|
+
self._grid = None
|
|
193
|
+
self._axis = None
|
|
194
|
+
self._units = None
|
|
195
|
+
self._name = None
|
|
196
|
+
self._label = None
|
|
197
|
+
self._dim = None
|
|
198
|
+
self._ndump = None
|
|
199
|
+
self._maxiter = None
|
|
200
|
+
self._tunits = None
|
|
201
|
+
|
|
202
|
+
if simulation_folder:
|
|
203
|
+
self._simulation_folder = simulation_folder
|
|
204
|
+
if not os.path.isdir(simulation_folder):
|
|
205
|
+
raise FileNotFoundError(
|
|
206
|
+
f"Simulation folder {simulation_folder} not found."
|
|
207
|
+
)
|
|
208
|
+
else:
|
|
209
|
+
self._simulation_folder = None
|
|
210
|
+
|
|
211
|
+
# load input deck if available
|
|
212
|
+
if input_deck:
|
|
213
|
+
self._input_deck = input_deck
|
|
214
|
+
else:
|
|
215
|
+
self._input_deck = None
|
|
216
|
+
|
|
217
|
+
self._all_loaded = False
|
|
218
|
+
self._quantity = None
|
|
219
|
+
|
|
220
|
+
def get_quantity(self, quantity):
|
|
221
|
+
"""
|
|
222
|
+
Get the data for a given quantity.
|
|
223
|
+
|
|
224
|
+
Parameters
|
|
225
|
+
----------
|
|
226
|
+
quantity : str
|
|
227
|
+
The quantity to get the data.
|
|
228
|
+
"""
|
|
229
|
+
self._quantity = quantity
|
|
230
|
+
|
|
231
|
+
if self._quantity not in OSIRIS_ALL:
|
|
232
|
+
raise ValueError(
|
|
233
|
+
f"Invalid quantity {self._quantity}. Use which_quantities() to see the available quantities."
|
|
234
|
+
)
|
|
235
|
+
if self._quantity in OSIRIS_SPECIE_REP_UDIST:
|
|
236
|
+
if self._species is None:
|
|
237
|
+
raise ValueError("Species not set.")
|
|
238
|
+
self._get_moment(self._species.name, self._quantity)
|
|
239
|
+
elif self._quantity in OSIRIS_SPECIE_REPORTS:
|
|
240
|
+
if self._species is None:
|
|
241
|
+
raise ValueError("Species not set.")
|
|
242
|
+
self._get_density(self._species.name, self._quantity)
|
|
243
|
+
elif self._quantity in OSIRIS_FLD:
|
|
244
|
+
self._get_field(self._quantity)
|
|
245
|
+
elif self._quantity in OSIRIS_PHA:
|
|
246
|
+
if self._species is None:
|
|
247
|
+
raise ValueError("Species not set.")
|
|
248
|
+
self._get_phase_space(self._species.name, self._quantity)
|
|
249
|
+
elif self._quantity == "n":
|
|
250
|
+
if self._species is None:
|
|
251
|
+
raise ValueError("Species not set.")
|
|
252
|
+
self._get_density(self._species.name, "charge")
|
|
253
|
+
else:
|
|
254
|
+
raise ValueError(
|
|
255
|
+
f"Invalid quantity {self._quantity}. Or it's not implemented yet (this may happen for phase space quantities)."
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
def _get_moment(self, species, moment):
|
|
259
|
+
if self._simulation_folder is None:
|
|
260
|
+
raise ValueError(
|
|
261
|
+
"Simulation folder not set. If you're using CustomDiagnostic, this method is not available."
|
|
262
|
+
)
|
|
263
|
+
self._path = f"{self._simulation_folder}/MS/UDIST/{species}/{moment}/"
|
|
264
|
+
self._file_template = glob.glob(f"{self._path}/*.h5")[0][:-9]
|
|
265
|
+
self._maxiter = len(glob.glob(f"{self._path}/*.h5"))
|
|
266
|
+
self._load_attributes(self._file_template, self._input_deck)
|
|
267
|
+
|
|
268
|
+
def _get_field(self, field):
|
|
269
|
+
if self._simulation_folder is None:
|
|
270
|
+
raise ValueError(
|
|
271
|
+
"Simulation folder not set. If you're using CustomDiagnostic, this method is not available."
|
|
272
|
+
)
|
|
273
|
+
self._path = f"{self._simulation_folder}/MS/FLD/{field}/"
|
|
274
|
+
self._file_template = glob.glob(f"{self._path}/*.h5")[0][:-9]
|
|
275
|
+
self._maxiter = len(glob.glob(f"{self._path}/*.h5"))
|
|
276
|
+
self._load_attributes(self._file_template, self._input_deck)
|
|
277
|
+
|
|
278
|
+
def _get_density(self, species, quantity):
|
|
279
|
+
if self._simulation_folder is None:
|
|
280
|
+
raise ValueError(
|
|
281
|
+
"Simulation folder not set. If you're using CustomDiagnostic, this method is not available."
|
|
282
|
+
)
|
|
283
|
+
self._path = f"{self._simulation_folder}/MS/DENSITY/{species}/{quantity}/"
|
|
284
|
+
self._file_template = glob.glob(f"{self._path}/*.h5")[0][:-9]
|
|
285
|
+
self._maxiter = len(glob.glob(f"{self._path}/*.h5"))
|
|
286
|
+
self._load_attributes(self._file_template, self._input_deck)
|
|
287
|
+
|
|
288
|
+
def _get_phase_space(self, species, type):
|
|
289
|
+
if self._simulation_folder is None:
|
|
290
|
+
raise ValueError(
|
|
291
|
+
"Simulation folder not set. If you're using CustomDiagnostic, this method is not available."
|
|
292
|
+
)
|
|
293
|
+
self._path = f"{self._simulation_folder}/MS/PHA/{type}/{species}/"
|
|
294
|
+
self._file_template = glob.glob(f"{self._path}/*.h5")[0][:-9]
|
|
295
|
+
self._maxiter = len(glob.glob(f"{self._path}/*.h5"))
|
|
296
|
+
self._load_attributes(self._file_template, self._input_deck)
|
|
297
|
+
|
|
298
|
+
def _load_attributes(
|
|
299
|
+
self, file_template, input_deck
|
|
300
|
+
): # this will be replaced by reading the input deck
|
|
301
|
+
# This can go wrong! NDUMP
|
|
302
|
+
# if input_deck is not None:
|
|
303
|
+
# self._dt = float(input_deck["time_step"][0]["dt"])
|
|
304
|
+
# self._dim = get_dimension_from_deck(input_deck)
|
|
305
|
+
# self._nx = np.array(list(map(int, input_deck["grid"][0][f"nx_p(1:{self._dim})"].split(','))))
|
|
306
|
+
# xmin = [deval(input_deck["space"][0][f"xmin(1:{self._dim})"].split(',')[i]) for i in range(self._dim)]
|
|
307
|
+
# xmax = [deval(input_deck["space"][0][f"xmax(1:{self._dim})"].split(',')[i]) for i in range(self._dim)]
|
|
308
|
+
# self._grid = np.array([[xmin[i], xmax[i]] for i in range(self._dim)])
|
|
309
|
+
# self._dx = (self._grid[:,1] - self._grid[:,0])/self._nx
|
|
310
|
+
# self._x = [np.arange(self._grid[i,0], self._grid[i,1], self._dx[i]) for i in range(self._dim)]
|
|
311
|
+
|
|
312
|
+
self._ndump = int(input_deck["time_step"][0]["ndump"])
|
|
313
|
+
|
|
314
|
+
try:
|
|
315
|
+
# Try files 000001, 000002, etc. until one is found
|
|
316
|
+
found_file = False
|
|
317
|
+
for file_num in range(1, self._maxiter + 1):
|
|
318
|
+
path_file = os.path.join(file_template + f"{file_num:06d}.h5")
|
|
319
|
+
if os.path.exists(path_file):
|
|
320
|
+
dump = OsirisGridFile(path_file)
|
|
321
|
+
self._dx = dump.dx
|
|
322
|
+
self._nx = dump.nx
|
|
323
|
+
self._x = dump.x
|
|
324
|
+
self._dt = dump.dt
|
|
325
|
+
self._grid = dump.grid
|
|
326
|
+
self._axis = dump.axis
|
|
327
|
+
self._units = dump.units
|
|
328
|
+
self._name = dump.name
|
|
329
|
+
self._label = dump.label
|
|
330
|
+
self._dim = dump.dim
|
|
331
|
+
# self._iter = dump.iter
|
|
332
|
+
self._tunits = dump.time[1]
|
|
333
|
+
self._type = dump.type
|
|
334
|
+
found_file = True
|
|
335
|
+
break
|
|
336
|
+
|
|
337
|
+
if not found_file:
|
|
338
|
+
warnings.warn(f"No valid data files found in {self._path} to read metadata from.")
|
|
339
|
+
except Exception as e:
|
|
340
|
+
warnings.warn(f"Error loading diagnostic attributes: {str(e)}. Please verify it there's any file in the folder.")
|
|
341
|
+
|
|
342
|
+
def _data_generator(self, index):
|
|
343
|
+
if self._simulation_folder is None:
|
|
344
|
+
raise ValueError("Simulation folder not set.")
|
|
345
|
+
file = os.path.join(self._file_template + f"{index:06d}.h5")
|
|
346
|
+
data_object = OsirisGridFile(file)
|
|
347
|
+
yield (
|
|
348
|
+
data_object.data
|
|
349
|
+
if self._quantity not in OSIRIS_DENSITY
|
|
350
|
+
else self._species.rqm * data_object.data
|
|
351
|
+
)
|
|
352
|
+
|
|
353
|
+
def load_all(self):
|
|
354
|
+
"""
|
|
355
|
+
Load all data into memory (all iterations).
|
|
356
|
+
|
|
357
|
+
Returns
|
|
358
|
+
-------
|
|
359
|
+
data : np.ndarray
|
|
360
|
+
The data for all iterations. Also stored in the attribute data.
|
|
361
|
+
"""
|
|
362
|
+
# If data is already loaded, don't do anything
|
|
363
|
+
if self._all_loaded and self._data is not None:
|
|
364
|
+
print("Data already loaded.")
|
|
365
|
+
return self._data
|
|
366
|
+
|
|
367
|
+
# If this is a derived diagnostic without files
|
|
368
|
+
if hasattr(self, "postprocess_name") or hasattr(self, "created_diagnostic_name"):
|
|
369
|
+
# If it has a data generator but no direct files
|
|
370
|
+
try:
|
|
371
|
+
print(
|
|
372
|
+
"This appears to be a derived diagnostic. Loading data from generators..."
|
|
373
|
+
)
|
|
374
|
+
# Get the maximum size from the diagnostic attributes
|
|
375
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
376
|
+
size = self._maxiter
|
|
377
|
+
else:
|
|
378
|
+
# Try to infer from a related diagnostic
|
|
379
|
+
if hasattr(self, "_diag") and hasattr(self._diag, "_maxiter"):
|
|
380
|
+
size = self._diag._maxiter
|
|
381
|
+
else:
|
|
382
|
+
# Default to a reasonable number if we can't determine
|
|
383
|
+
size = 100
|
|
384
|
+
print(
|
|
385
|
+
f"Warning: Could not determine timestep count, using {size}."
|
|
386
|
+
)
|
|
387
|
+
|
|
388
|
+
# Load data for all timesteps using the generator - this may take a while
|
|
389
|
+
self._data = np.stack(
|
|
390
|
+
[self[i] for i in tqdm.tqdm(range(size), desc="Loading data")]
|
|
391
|
+
)
|
|
392
|
+
self._all_loaded = True
|
|
393
|
+
return self._data
|
|
394
|
+
|
|
395
|
+
except Exception as e:
|
|
396
|
+
raise ValueError(f"Could not load derived diagnostic data: {str(e)}")
|
|
397
|
+
|
|
398
|
+
# Original implementation for file-based diagnostics
|
|
399
|
+
print("Loading all data from files. This may take a while.")
|
|
400
|
+
size = len(sorted(glob.glob(f"{self._path}/*.h5")))
|
|
401
|
+
self._data = np.stack(
|
|
402
|
+
[self[i] for i in tqdm.tqdm(range(size), desc="Loading data")]
|
|
403
|
+
)
|
|
404
|
+
self._all_loaded = True
|
|
405
|
+
return self._data
|
|
406
|
+
|
|
407
|
+
def unload(self):
|
|
408
|
+
"""
|
|
409
|
+
Unload data from memory. This is useful to free memory when the data is not needed anymore.
|
|
410
|
+
"""
|
|
411
|
+
print("Unloading data from memory.")
|
|
412
|
+
if self._all_loaded == False:
|
|
413
|
+
print("Data is not loaded.")
|
|
414
|
+
return
|
|
415
|
+
self._data = None
|
|
416
|
+
self._all_loaded = False
|
|
417
|
+
|
|
418
|
+
def load(self, index):
|
|
419
|
+
"""
|
|
420
|
+
Load data for a given index into memory. Not recommended. Use load_all for all data or access via generator or index for better performance.
|
|
421
|
+
"""
|
|
422
|
+
self._data = next(self._data_generator(index))
|
|
423
|
+
|
|
424
|
+
def __getitem__(self, index):
|
|
425
|
+
# For derived diagnostics with cached data
|
|
426
|
+
if self._all_loaded and self._data is not None:
|
|
427
|
+
return self._data[index]
|
|
428
|
+
|
|
429
|
+
# For standard diagnostics with files
|
|
430
|
+
if isinstance(index, int):
|
|
431
|
+
if self._simulation_folder is not None and hasattr(self, "_data_generator"):
|
|
432
|
+
return next(self._data_generator(index))
|
|
433
|
+
|
|
434
|
+
# For derived diagnostics with custom generators
|
|
435
|
+
if hasattr(self, "_data_generator") and callable(self._data_generator):
|
|
436
|
+
return next(self._data_generator(index))
|
|
437
|
+
|
|
438
|
+
elif isinstance(index, slice):
|
|
439
|
+
start = 0 if index.start is None else index.start
|
|
440
|
+
step = 1 if index.step is None else index.step
|
|
441
|
+
|
|
442
|
+
if index.stop is None:
|
|
443
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
444
|
+
stop = self._maxiter
|
|
445
|
+
elif self._simulation_folder is not None and hasattr(self, "_path"):
|
|
446
|
+
stop = len(sorted(glob.glob(f"{self._path}/*.h5")))
|
|
447
|
+
else:
|
|
448
|
+
stop = 100 # Default if we can't determine
|
|
449
|
+
print(
|
|
450
|
+
f"Warning: Could not determine iteration count for iteration, using {stop}."
|
|
451
|
+
)
|
|
452
|
+
else:
|
|
453
|
+
stop = index.stop
|
|
454
|
+
|
|
455
|
+
indices = range(start, stop, step)
|
|
456
|
+
if self._simulation_folder is not None and hasattr(self, "_data_generator"):
|
|
457
|
+
return np.stack([next(self._data_generator(i)) for i in indices])
|
|
458
|
+
elif hasattr(self, "_data_generator") and callable(self._data_generator):
|
|
459
|
+
return np.stack([next(self._data_generator(i)) for i in indices])
|
|
460
|
+
|
|
461
|
+
# If we get here, we don't know how to get data for this index
|
|
462
|
+
raise ValueError(
|
|
463
|
+
f"Cannot retrieve data for this diagnostic at index {index}. No data loaded and no generator available."
|
|
464
|
+
)
|
|
465
|
+
|
|
466
|
+
def __iter__(self):
|
|
467
|
+
# If this is a file-based diagnostic
|
|
468
|
+
if self._simulation_folder is not None:
|
|
469
|
+
for i in range(len(sorted(glob.glob(f"{self._path}/*.h5")))):
|
|
470
|
+
yield next(self._data_generator(i))
|
|
471
|
+
|
|
472
|
+
# If this is a derived diagnostic and data is already loaded
|
|
473
|
+
elif self._all_loaded and self._data is not None:
|
|
474
|
+
for i in range(self._data.shape[0]):
|
|
475
|
+
yield self._data[i]
|
|
476
|
+
|
|
477
|
+
# If this is a derived diagnostic with custom generator but no loaded data
|
|
478
|
+
elif hasattr(self, "_data_generator") and callable(self._data_generator):
|
|
479
|
+
# Determine how many iterations to go through
|
|
480
|
+
max_iter = self._maxiter
|
|
481
|
+
if max_iter is None:
|
|
482
|
+
if hasattr(self, "_diag") and hasattr(self._diag, "_maxiter"):
|
|
483
|
+
max_iter = self._diag._maxiter
|
|
484
|
+
else:
|
|
485
|
+
max_iter = 100 # Default if we can't determine
|
|
486
|
+
print(
|
|
487
|
+
f"Warning: Could not determine iteration count for iteration, using {max_iter}."
|
|
488
|
+
)
|
|
489
|
+
|
|
490
|
+
for i in range(max_iter):
|
|
491
|
+
yield next(self._data_generator(i))
|
|
492
|
+
|
|
493
|
+
# If we don't know how to handle this
|
|
494
|
+
else:
|
|
495
|
+
raise ValueError(
|
|
496
|
+
"Cannot iterate over this diagnostic. No data loaded and no generator available."
|
|
497
|
+
)
|
|
498
|
+
|
|
499
|
+
def __add__(self, other):
|
|
500
|
+
if isinstance(other, (int, float, np.ndarray)):
|
|
501
|
+
result = Diagnostic(species=self._species)
|
|
502
|
+
|
|
503
|
+
for attr in [
|
|
504
|
+
"_dx",
|
|
505
|
+
"_nx",
|
|
506
|
+
"_x",
|
|
507
|
+
"_dt",
|
|
508
|
+
"_grid",
|
|
509
|
+
"_axis",
|
|
510
|
+
"_dim",
|
|
511
|
+
"_ndump",
|
|
512
|
+
"_maxiter",
|
|
513
|
+
"_tunits",
|
|
514
|
+
"_type",
|
|
515
|
+
"_simulation_folder",
|
|
516
|
+
]:
|
|
517
|
+
if hasattr(self, attr):
|
|
518
|
+
setattr(result, attr, getattr(self, attr))
|
|
519
|
+
|
|
520
|
+
# Make sure _maxiter is set even for derived diagnostics
|
|
521
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
522
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
523
|
+
result._maxiter = self._maxiter
|
|
524
|
+
|
|
525
|
+
# result._name = self._name + " + " + str(other) if isinstance(other, (int, float)) else self._name + " + np.ndarray"
|
|
526
|
+
|
|
527
|
+
if self._all_loaded:
|
|
528
|
+
result._data = self._data + other
|
|
529
|
+
result._all_loaded = True
|
|
530
|
+
else:
|
|
531
|
+
|
|
532
|
+
def gen_scalar_add(original_gen, scalar):
|
|
533
|
+
for val in original_gen:
|
|
534
|
+
yield val + scalar
|
|
535
|
+
|
|
536
|
+
original_generator = self._data_generator
|
|
537
|
+
result._data_generator = lambda index: gen_scalar_add(
|
|
538
|
+
original_generator(index), other
|
|
539
|
+
)
|
|
540
|
+
|
|
541
|
+
result.created_diagnostic_name = "MISC"
|
|
542
|
+
|
|
543
|
+
return result
|
|
544
|
+
|
|
545
|
+
elif isinstance(other, Diagnostic):
|
|
546
|
+
result = Diagnostic(species=self._species)
|
|
547
|
+
|
|
548
|
+
for attr in [
|
|
549
|
+
"_dx",
|
|
550
|
+
"_nx",
|
|
551
|
+
"_x",
|
|
552
|
+
"_dt",
|
|
553
|
+
"_grid",
|
|
554
|
+
"_axis",
|
|
555
|
+
"_dim",
|
|
556
|
+
"_ndump",
|
|
557
|
+
"_maxiter",
|
|
558
|
+
"_tunits",
|
|
559
|
+
"_type",
|
|
560
|
+
"_simulation_folder",
|
|
561
|
+
]:
|
|
562
|
+
if hasattr(self, attr):
|
|
563
|
+
setattr(result, attr, getattr(self, attr))
|
|
564
|
+
|
|
565
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
566
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
567
|
+
result._maxiter = self._maxiter
|
|
568
|
+
|
|
569
|
+
# result._name = self._name + " + " + str(other._name)
|
|
570
|
+
|
|
571
|
+
if self._all_loaded:
|
|
572
|
+
other.load_all()
|
|
573
|
+
result._data = self._data + other._data
|
|
574
|
+
result._all_loaded = True
|
|
575
|
+
else:
|
|
576
|
+
|
|
577
|
+
def gen_diag_add(original_gen1, original_gen2):
|
|
578
|
+
for val1, val2 in zip(original_gen1, original_gen2):
|
|
579
|
+
yield val1 + val2
|
|
580
|
+
|
|
581
|
+
original_generator = self._data_generator
|
|
582
|
+
other_generator = other._data_generator
|
|
583
|
+
result._data_generator = lambda index: gen_diag_add(
|
|
584
|
+
original_generator(index), other_generator(index)
|
|
585
|
+
)
|
|
586
|
+
|
|
587
|
+
result.created_diagnostic_name = "MISC"
|
|
588
|
+
|
|
589
|
+
return result
|
|
590
|
+
|
|
591
|
+
def __sub__(self, other):
|
|
592
|
+
if isinstance(other, (int, float, np.ndarray)):
|
|
593
|
+
result = Diagnostic(species=self._species)
|
|
594
|
+
|
|
595
|
+
for attr in [
|
|
596
|
+
"_dx",
|
|
597
|
+
"_nx",
|
|
598
|
+
"_x",
|
|
599
|
+
"_dt",
|
|
600
|
+
"_grid",
|
|
601
|
+
"_axis",
|
|
602
|
+
"_dim",
|
|
603
|
+
"_ndump",
|
|
604
|
+
"_maxiter",
|
|
605
|
+
"_tunits",
|
|
606
|
+
"_type",
|
|
607
|
+
"_simulation_folder",
|
|
608
|
+
]:
|
|
609
|
+
if hasattr(self, attr):
|
|
610
|
+
setattr(result, attr, getattr(self, attr))
|
|
611
|
+
|
|
612
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
613
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
614
|
+
result._maxiter = self._maxiter
|
|
615
|
+
|
|
616
|
+
# result._name = self._name + " - " + str(other) if isinstance(other, (int, float)) else self._name + " - np.ndarray"
|
|
617
|
+
|
|
618
|
+
if self._all_loaded:
|
|
619
|
+
result._data = self._data - other
|
|
620
|
+
result._all_loaded = True
|
|
621
|
+
else:
|
|
622
|
+
|
|
623
|
+
def gen_scalar_sub(original_gen, scalar):
|
|
624
|
+
for val in original_gen:
|
|
625
|
+
yield val - scalar
|
|
626
|
+
|
|
627
|
+
original_generator = self._data_generator
|
|
628
|
+
result._data_generator = lambda index: gen_scalar_sub(
|
|
629
|
+
original_generator(index), other
|
|
630
|
+
)
|
|
631
|
+
|
|
632
|
+
result.created_diagnostic_name = "MISC"
|
|
633
|
+
|
|
634
|
+
return result
|
|
635
|
+
|
|
636
|
+
elif isinstance(other, Diagnostic):
|
|
637
|
+
|
|
638
|
+
result = Diagnostic(species=self._species)
|
|
639
|
+
|
|
640
|
+
for attr in [
|
|
641
|
+
"_dx",
|
|
642
|
+
"_nx",
|
|
643
|
+
"_x",
|
|
644
|
+
"_dt",
|
|
645
|
+
"_grid",
|
|
646
|
+
"_axis",
|
|
647
|
+
"_dim",
|
|
648
|
+
"_ndump",
|
|
649
|
+
"_maxiter",
|
|
650
|
+
"_tunits",
|
|
651
|
+
"_type",
|
|
652
|
+
"_simulation_folder",
|
|
653
|
+
]:
|
|
654
|
+
if hasattr(self, attr):
|
|
655
|
+
setattr(result, attr, getattr(self, attr))
|
|
656
|
+
|
|
657
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
658
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
659
|
+
result._maxiter = self._maxiter
|
|
660
|
+
|
|
661
|
+
# result._name = self._name + " - " + str(other._name)
|
|
662
|
+
|
|
663
|
+
if self._all_loaded:
|
|
664
|
+
other.load_all()
|
|
665
|
+
result._data = self._data - other._data
|
|
666
|
+
result._all_loaded = True
|
|
667
|
+
else:
|
|
668
|
+
|
|
669
|
+
def gen_diag_sub(original_gen1, original_gen2):
|
|
670
|
+
for val1, val2 in zip(original_gen1, original_gen2):
|
|
671
|
+
yield val1 - val2
|
|
672
|
+
|
|
673
|
+
original_generator = self._data_generator
|
|
674
|
+
other_generator = other._data_generator
|
|
675
|
+
result._data_generator = lambda index: gen_diag_sub(
|
|
676
|
+
original_generator(index), other_generator(index)
|
|
677
|
+
)
|
|
678
|
+
|
|
679
|
+
result.created_diagnostic_name = "MISC"
|
|
680
|
+
|
|
681
|
+
return result
|
|
682
|
+
|
|
683
|
+
def __mul__(self, other):
|
|
684
|
+
if isinstance(other, (int, float, np.ndarray)):
|
|
685
|
+
result = Diagnostic(species=self._species)
|
|
686
|
+
|
|
687
|
+
for attr in [
|
|
688
|
+
"_dx",
|
|
689
|
+
"_nx",
|
|
690
|
+
"_x",
|
|
691
|
+
"_dt",
|
|
692
|
+
"_grid",
|
|
693
|
+
"_axis",
|
|
694
|
+
"_dim",
|
|
695
|
+
"_ndump",
|
|
696
|
+
"_maxiter",
|
|
697
|
+
"_tunits",
|
|
698
|
+
"_type",
|
|
699
|
+
"_simulation_folder",
|
|
700
|
+
]:
|
|
701
|
+
if hasattr(self, attr):
|
|
702
|
+
setattr(result, attr, getattr(self, attr))
|
|
703
|
+
|
|
704
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
705
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
706
|
+
result._maxiter = self._maxiter
|
|
707
|
+
|
|
708
|
+
# result._name = self._name + " * " + str(other) if isinstance(other, (int, float)) else self._name + " * np.ndarray"
|
|
709
|
+
|
|
710
|
+
if self._all_loaded:
|
|
711
|
+
result._data = self._data * other
|
|
712
|
+
result._all_loaded = True
|
|
713
|
+
else:
|
|
714
|
+
|
|
715
|
+
def gen_scalar_mul(original_gen, scalar):
|
|
716
|
+
for val in original_gen:
|
|
717
|
+
yield val * scalar
|
|
718
|
+
|
|
719
|
+
original_generator = self._data_generator
|
|
720
|
+
result._data_generator = lambda index: gen_scalar_mul(
|
|
721
|
+
original_generator(index), other
|
|
722
|
+
)
|
|
723
|
+
|
|
724
|
+
result.created_diagnostic_name = "MISC"
|
|
725
|
+
|
|
726
|
+
return result
|
|
727
|
+
|
|
728
|
+
elif isinstance(other, Diagnostic):
|
|
729
|
+
result = Diagnostic(species=self._species)
|
|
730
|
+
|
|
731
|
+
for attr in [
|
|
732
|
+
"_dx",
|
|
733
|
+
"_nx",
|
|
734
|
+
"_x",
|
|
735
|
+
"_dt",
|
|
736
|
+
"_grid",
|
|
737
|
+
"_axis",
|
|
738
|
+
"_dim",
|
|
739
|
+
"_ndump",
|
|
740
|
+
"_maxiter",
|
|
741
|
+
"_tunits",
|
|
742
|
+
"_type",
|
|
743
|
+
"_simulation_folder",
|
|
744
|
+
]:
|
|
745
|
+
if hasattr(self, attr):
|
|
746
|
+
setattr(result, attr, getattr(self, attr))
|
|
747
|
+
|
|
748
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
749
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
750
|
+
result._maxiter = self._maxiter
|
|
751
|
+
|
|
752
|
+
# result._name = self._name + " * " + str(other._name)
|
|
753
|
+
|
|
754
|
+
if self._all_loaded:
|
|
755
|
+
other.load_all()
|
|
756
|
+
result._data = self._data * other._data
|
|
757
|
+
result._all_loaded = True
|
|
758
|
+
else:
|
|
759
|
+
|
|
760
|
+
def gen_diag_mul(original_gen1, original_gen2):
|
|
761
|
+
for val1, val2 in zip(original_gen1, original_gen2):
|
|
762
|
+
yield val1 * val2
|
|
763
|
+
|
|
764
|
+
original_generator = self._data_generator
|
|
765
|
+
other_generator = other._data_generator
|
|
766
|
+
result._data_generator = lambda index: gen_diag_mul(
|
|
767
|
+
original_generator(index), other_generator(index)
|
|
768
|
+
)
|
|
769
|
+
|
|
770
|
+
result.created_diagnostic_name = "MISC"
|
|
771
|
+
|
|
772
|
+
return result
|
|
773
|
+
|
|
774
|
+
def __truediv__(self, other):
|
|
775
|
+
if isinstance(other, (int, float, np.ndarray)):
|
|
776
|
+
result = Diagnostic(species=self._species)
|
|
777
|
+
|
|
778
|
+
for attr in [
|
|
779
|
+
"_dx",
|
|
780
|
+
"_nx",
|
|
781
|
+
"_x",
|
|
782
|
+
"_dt",
|
|
783
|
+
"_grid",
|
|
784
|
+
"_axis",
|
|
785
|
+
"_dim",
|
|
786
|
+
"_ndump",
|
|
787
|
+
"_maxiter",
|
|
788
|
+
"_tunits",
|
|
789
|
+
"_type",
|
|
790
|
+
"_simulation_folder",
|
|
791
|
+
]:
|
|
792
|
+
if hasattr(self, attr):
|
|
793
|
+
setattr(result, attr, getattr(self, attr))
|
|
794
|
+
|
|
795
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
796
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
797
|
+
result._maxiter = self._maxiter
|
|
798
|
+
|
|
799
|
+
# result._name = self._name + " / " + str(other) if isinstance(other, (int, float)) else self._name + " / np.ndarray"
|
|
800
|
+
|
|
801
|
+
if self._all_loaded:
|
|
802
|
+
result._data = self._data / other
|
|
803
|
+
result._all_loaded = True
|
|
804
|
+
else:
|
|
805
|
+
|
|
806
|
+
def gen_scalar_div(original_gen, scalar):
|
|
807
|
+
for val in original_gen:
|
|
808
|
+
yield val / scalar
|
|
809
|
+
|
|
810
|
+
original_generator = self._data_generator
|
|
811
|
+
result._data_generator = lambda index: gen_scalar_div(
|
|
812
|
+
original_generator(index), other
|
|
813
|
+
)
|
|
814
|
+
|
|
815
|
+
result.created_diagnostic_name = "MISC"
|
|
816
|
+
|
|
817
|
+
return result
|
|
818
|
+
|
|
819
|
+
elif isinstance(other, Diagnostic):
|
|
820
|
+
|
|
821
|
+
result = Diagnostic(species=self._species)
|
|
822
|
+
|
|
823
|
+
for attr in [
|
|
824
|
+
"_dx",
|
|
825
|
+
"_nx",
|
|
826
|
+
"_x",
|
|
827
|
+
"_dt",
|
|
828
|
+
"_grid",
|
|
829
|
+
"_axis",
|
|
830
|
+
"_dim",
|
|
831
|
+
"_ndump",
|
|
832
|
+
"_maxiter",
|
|
833
|
+
"_tunits",
|
|
834
|
+
"_type",
|
|
835
|
+
"_simulation_folder",
|
|
836
|
+
]:
|
|
837
|
+
if hasattr(self, attr):
|
|
838
|
+
setattr(result, attr, getattr(self, attr))
|
|
839
|
+
|
|
840
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
841
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
842
|
+
result._maxiter = self._maxiter
|
|
843
|
+
|
|
844
|
+
# result._name = self._name + " / " + str(other._name)
|
|
845
|
+
|
|
846
|
+
if self._all_loaded:
|
|
847
|
+
other.load_all()
|
|
848
|
+
result._data = self._data / other._data
|
|
849
|
+
result._all_loaded = True
|
|
850
|
+
else:
|
|
851
|
+
|
|
852
|
+
def gen_diag_div(original_gen1, original_gen2):
|
|
853
|
+
for val1, val2 in zip(original_gen1, original_gen2):
|
|
854
|
+
yield val1 / val2
|
|
855
|
+
|
|
856
|
+
original_generator = self._data_generator
|
|
857
|
+
other_generator = other._data_generator
|
|
858
|
+
result._data_generator = lambda index: gen_diag_div(
|
|
859
|
+
original_generator(index), other_generator(index)
|
|
860
|
+
)
|
|
861
|
+
|
|
862
|
+
result.created_diagnostic_name = "MISC"
|
|
863
|
+
|
|
864
|
+
return result
|
|
865
|
+
|
|
866
|
+
def __pow__(self, other):
|
|
867
|
+
# power by scalar
|
|
868
|
+
if isinstance(other, (int, float)):
|
|
869
|
+
result = Diagnostic(species=self._species)
|
|
870
|
+
|
|
871
|
+
for attr in [
|
|
872
|
+
"_dx",
|
|
873
|
+
"_nx",
|
|
874
|
+
"_x",
|
|
875
|
+
"_dt",
|
|
876
|
+
"_grid",
|
|
877
|
+
"_axis",
|
|
878
|
+
"_dim",
|
|
879
|
+
"_ndump",
|
|
880
|
+
"_maxiter",
|
|
881
|
+
"_tunits",
|
|
882
|
+
"_type",
|
|
883
|
+
"_simulation_folder",
|
|
884
|
+
]:
|
|
885
|
+
if hasattr(self, attr):
|
|
886
|
+
setattr(result, attr, getattr(self, attr))
|
|
887
|
+
|
|
888
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
889
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
890
|
+
result._maxiter = self._maxiter
|
|
891
|
+
|
|
892
|
+
# result._name = self._name + " ^(" + str(other) + ")"
|
|
893
|
+
# result._label = self._label + rf"$ ^{other}$"
|
|
894
|
+
|
|
895
|
+
if self._all_loaded:
|
|
896
|
+
result._data = self._data**other
|
|
897
|
+
result._all_loaded = True
|
|
898
|
+
else:
|
|
899
|
+
|
|
900
|
+
def gen_scalar_pow(original_gen, scalar):
|
|
901
|
+
for val in original_gen:
|
|
902
|
+
yield val**scalar
|
|
903
|
+
|
|
904
|
+
original_generator = self._data_generator
|
|
905
|
+
result._data_generator = lambda index: gen_scalar_pow(
|
|
906
|
+
original_generator(index), other
|
|
907
|
+
)
|
|
908
|
+
|
|
909
|
+
result.created_diagnostic_name = "MISC"
|
|
910
|
+
|
|
911
|
+
return result
|
|
912
|
+
|
|
913
|
+
# power by another diagnostic
|
|
914
|
+
elif isinstance(other, Diagnostic):
|
|
915
|
+
raise ValueError(
|
|
916
|
+
"Power by another diagnostic is not supported. Why would you do that?"
|
|
917
|
+
)
|
|
918
|
+
|
|
919
|
+
def __radd__(self, other):
|
|
920
|
+
return self + other
|
|
921
|
+
|
|
922
|
+
def __rsub__(
|
|
923
|
+
self, other
|
|
924
|
+
): # I don't know if this is correct because I'm not sure if the order of the subtraction is correct
|
|
925
|
+
return -self + other
|
|
926
|
+
|
|
927
|
+
def __rmul__(self, other):
|
|
928
|
+
return self * other
|
|
929
|
+
|
|
930
|
+
def __rtruediv__(self, other): # division is not commutative
|
|
931
|
+
if isinstance(other, (int, float, np.ndarray)):
|
|
932
|
+
result = Diagnostic(species=self._species)
|
|
933
|
+
|
|
934
|
+
for attr in [
|
|
935
|
+
"_dx",
|
|
936
|
+
"_nx",
|
|
937
|
+
"_x",
|
|
938
|
+
"_dt",
|
|
939
|
+
"_grid",
|
|
940
|
+
"_axis",
|
|
941
|
+
"_dim",
|
|
942
|
+
"_ndump",
|
|
943
|
+
"_maxiter",
|
|
944
|
+
"_tunits",
|
|
945
|
+
"_type",
|
|
946
|
+
"_simulation_folder",
|
|
947
|
+
]:
|
|
948
|
+
if hasattr(self, attr):
|
|
949
|
+
setattr(result, attr, getattr(self, attr))
|
|
950
|
+
|
|
951
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
952
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
953
|
+
result._maxiter = self._maxiter
|
|
954
|
+
|
|
955
|
+
# result._name = str(other) + " / " + self._name if isinstance(other, (int, float)) else "np.ndarray / " + self._name
|
|
956
|
+
|
|
957
|
+
if self._all_loaded:
|
|
958
|
+
result._data = other / self._data
|
|
959
|
+
result._all_loaded = True
|
|
960
|
+
else:
|
|
961
|
+
|
|
962
|
+
def gen_scalar_rdiv(scalar, original_gen):
|
|
963
|
+
for val in original_gen:
|
|
964
|
+
yield scalar / val
|
|
965
|
+
|
|
966
|
+
original_generator = self._data_generator
|
|
967
|
+
result._data_generator = lambda index: gen_scalar_rdiv(
|
|
968
|
+
other, original_generator(index)
|
|
969
|
+
)
|
|
970
|
+
|
|
971
|
+
result.created_diagnostic_name = "MISC"
|
|
972
|
+
|
|
973
|
+
return result
|
|
974
|
+
|
|
975
|
+
elif isinstance(other, Diagnostic):
|
|
976
|
+
|
|
977
|
+
result = Diagnostic(species=self._species)
|
|
978
|
+
|
|
979
|
+
for attr in [
|
|
980
|
+
"_dx",
|
|
981
|
+
"_nx",
|
|
982
|
+
"_x",
|
|
983
|
+
"_dt",
|
|
984
|
+
"_grid",
|
|
985
|
+
"_axis",
|
|
986
|
+
"_dim",
|
|
987
|
+
"_ndump",
|
|
988
|
+
"_maxiter",
|
|
989
|
+
"_tunits",
|
|
990
|
+
"_type",
|
|
991
|
+
"_simulation_folder",
|
|
992
|
+
]:
|
|
993
|
+
if hasattr(self, attr):
|
|
994
|
+
setattr(result, attr, getattr(self, attr))
|
|
995
|
+
|
|
996
|
+
if not hasattr(result, "_maxiter") or result._maxiter is None:
|
|
997
|
+
if hasattr(self, "_maxiter") and self._maxiter is not None:
|
|
998
|
+
result._maxiter = self._maxiter
|
|
999
|
+
|
|
1000
|
+
# result._name = str(other._name) + " / " + self._name
|
|
1001
|
+
|
|
1002
|
+
if self._all_loaded:
|
|
1003
|
+
other.load_all()
|
|
1004
|
+
result._data = other._data / self._data
|
|
1005
|
+
result._all_loaded = True
|
|
1006
|
+
else:
|
|
1007
|
+
|
|
1008
|
+
def gen_diag_div(original_gen1, original_gen2):
|
|
1009
|
+
for val1, val2 in zip(original_gen1, original_gen2):
|
|
1010
|
+
yield val2 / val1
|
|
1011
|
+
|
|
1012
|
+
original_generator = self._data_generator
|
|
1013
|
+
other_generator = other._data_generator
|
|
1014
|
+
result._data_generator = lambda index: gen_diag_div(
|
|
1015
|
+
original_generator(index), other_generator(index)
|
|
1016
|
+
)
|
|
1017
|
+
|
|
1018
|
+
result.created_diagnostic_name = "MISC"
|
|
1019
|
+
|
|
1020
|
+
return result
|
|
1021
|
+
|
|
1022
|
+
def to_h5(self, savename=None, index=None, all=False, verbose=False, path=None):
|
|
1023
|
+
"""
|
|
1024
|
+
Save the diagnostic data to HDF5 files.
|
|
1025
|
+
|
|
1026
|
+
Parameters
|
|
1027
|
+
----------
|
|
1028
|
+
savename : str, optional
|
|
1029
|
+
The name of the HDF5 file. If None, uses the diagnostic name.
|
|
1030
|
+
index : int, or list of ints, optional
|
|
1031
|
+
The index or indices of the data to save.
|
|
1032
|
+
all : bool, optional
|
|
1033
|
+
If True, save all data. Default is False.
|
|
1034
|
+
verbose : bool, optional
|
|
1035
|
+
If True, print messages about the saving process.
|
|
1036
|
+
path : str, optional
|
|
1037
|
+
The path to save the HDF5 files. If None, uses the default save path (in simulation folder).
|
|
1038
|
+
"""
|
|
1039
|
+
if folder is None:
|
|
1040
|
+
folder = self._simulation_folder
|
|
1041
|
+
self._save_path = folder + f"/MS/MISC/{self._default_save}/{savename}"
|
|
1042
|
+
else:
|
|
1043
|
+
self._save_path = folder
|
|
1044
|
+
# Check if is has attribute created_diagnostic_name or postprocess_name
|
|
1045
|
+
if savename is None:
|
|
1046
|
+
print(f"No savename provided. Using {self._name}.")
|
|
1047
|
+
savename = self._name
|
|
1048
|
+
|
|
1049
|
+
if hasattr(self, "created_diagnostic_name"):
|
|
1050
|
+
self._default_save = self.created_diagnostic_name
|
|
1051
|
+
elif hasattr(self, "postprocess_name"):
|
|
1052
|
+
self._default_save = self.postprocess_name
|
|
1053
|
+
else:
|
|
1054
|
+
self._default_save = "DIR_" + self._name
|
|
1055
|
+
|
|
1056
|
+
if not os.path.exists(self._save_path):
|
|
1057
|
+
os.makedirs(self._save_path)
|
|
1058
|
+
if verbose:
|
|
1059
|
+
print(f"Created folder {self._save_path}")
|
|
1060
|
+
|
|
1061
|
+
if verbose:
|
|
1062
|
+
print(f"Save Path: {self._save_path}")
|
|
1063
|
+
|
|
1064
|
+
def savefile(filename, i):
|
|
1065
|
+
with h5py.File(filename, 'w') as f:
|
|
1066
|
+
# Create SIMULATION group with attributes
|
|
1067
|
+
sim_group = f.create_group("SIMULATION")
|
|
1068
|
+
sim_group.attrs.create("DT", [self._dt])
|
|
1069
|
+
sim_group.attrs.create("NDIMS", [self._dim])
|
|
1070
|
+
|
|
1071
|
+
# Set file attributes
|
|
1072
|
+
f.attrs.create("TIME", [self.time(i)[0]])
|
|
1073
|
+
f.attrs.create("TIME UNITS", [np.bytes_(self.time(i)[1].encode()) if self.time(i)[1] else np.bytes_(b"")])
|
|
1074
|
+
f.attrs.create("ITER", [self._ndump * i])
|
|
1075
|
+
f.attrs.create("NAME", [np.bytes_(self._name.encode())])
|
|
1076
|
+
f.attrs.create("TYPE", [np.bytes_(self._type.encode())])
|
|
1077
|
+
f.attrs.create("UNITS", [np.bytes_(self._units.encode()) if self._units else np.bytes_(b"")])
|
|
1078
|
+
f.attrs.create("LABEL", [np.bytes_(self._label.encode()) if self._label else np.bytes_(b"")])
|
|
1079
|
+
|
|
1080
|
+
# Create dataset with data (transposed to match convention)
|
|
1081
|
+
f.create_dataset(savename, data=self[i].T)
|
|
1082
|
+
|
|
1083
|
+
# Create AXIS group
|
|
1084
|
+
axis_group = f.create_group("AXIS")
|
|
1085
|
+
|
|
1086
|
+
# Create axis datasets
|
|
1087
|
+
axis_names = ["AXIS1", "AXIS2", "AXIS3"][:self._dim]
|
|
1088
|
+
axis_shortnames = [self._axis[i]["name"] for i in range(self._dim)]
|
|
1089
|
+
axis_longnames = [self._axis[i]["long_name"] for i in range(self._dim)]
|
|
1090
|
+
axis_units = [self._axis[i]["units"] for i in range(self._dim)]
|
|
1091
|
+
|
|
1092
|
+
for i, axis_name in enumerate(axis_names):
|
|
1093
|
+
# Create axis dataset
|
|
1094
|
+
axis_dataset = axis_group.create_dataset(axis_name, data=np.array(self._grid[i]))
|
|
1095
|
+
|
|
1096
|
+
# Set axis attributes
|
|
1097
|
+
axis_dataset.attrs.create("NAME", [np.bytes_(axis_shortnames[i].encode())])
|
|
1098
|
+
axis_dataset.attrs.create("UNITS", [np.bytes_(axis_units[i].encode())])
|
|
1099
|
+
axis_dataset.attrs.create("LONG_NAME", [np.bytes_(axis_longnames[i].encode())])
|
|
1100
|
+
axis_dataset.attrs.create("TYPE", [np.bytes_("linear".encode())])
|
|
1101
|
+
|
|
1102
|
+
if verbose:
|
|
1103
|
+
print(f"File created: {filename}")
|
|
1104
|
+
|
|
1105
|
+
print(f"The savename of the diagnostic is {savename}. Files will be saves as {savename}-000001.h5, {savename}-000002.h5, etc.")
|
|
1106
|
+
|
|
1107
|
+
print(f"If you desire a different name, please set it with the 'name' method (setter).")
|
|
1108
|
+
|
|
1109
|
+
if self._name is None:
|
|
1110
|
+
raise ValueError("Diagnostic name is not set. Cannot save to HDF5.")
|
|
1111
|
+
if not os.path.exists(folder):
|
|
1112
|
+
print(f"Creating folder {folder}...")
|
|
1113
|
+
os.makedirs(folder)
|
|
1114
|
+
if not os.path.isdir(folder):
|
|
1115
|
+
raise ValueError(f"{folder} is not a directory.")
|
|
1116
|
+
|
|
1117
|
+
if all == False:
|
|
1118
|
+
if isinstance(index, int):
|
|
1119
|
+
filename = self._save_path + f"/{savename}-{index:06d}.h5"
|
|
1120
|
+
savefile(filename, index)
|
|
1121
|
+
elif isinstance(index, list) or isinstance(index, tuple):
|
|
1122
|
+
for i in index:
|
|
1123
|
+
filename = self._save_path + f"/{savename}-{i:06d}.h5"
|
|
1124
|
+
savefile(filename, i)
|
|
1125
|
+
elif all == True:
|
|
1126
|
+
for i in range(self._maxiter):
|
|
1127
|
+
filename = self._save_path + f"/{savename}-{i:06d}.h5"
|
|
1128
|
+
savefile(filename, i)
|
|
1129
|
+
else:
|
|
1130
|
+
raise ValueError("index should be an int, slice, or list of ints, or all should be True")
|
|
1131
|
+
|
|
1132
|
+
def plot_3d(
|
|
1133
|
+
self,
|
|
1134
|
+
idx,
|
|
1135
|
+
scale_type: Literal["zero_centered", "pos", "neg", "default"] = "default",
|
|
1136
|
+
boundaries: np.ndarray = None,
|
|
1137
|
+
):
|
|
1138
|
+
"""
|
|
1139
|
+
Plots a 3D scatter plot of the diagnostic data (grid data).
|
|
1140
|
+
|
|
1141
|
+
Parameters
|
|
1142
|
+
----------
|
|
1143
|
+
idx : int
|
|
1144
|
+
Index of the data to plot.
|
|
1145
|
+
scale_type : Literal["zero_centered", "pos", "neg", "default"], optional
|
|
1146
|
+
Type of scaling for the colormap:
|
|
1147
|
+
- "zero_centered": Center colormap around zero.
|
|
1148
|
+
- "pos": Colormap for positive values.
|
|
1149
|
+
- "neg": Colormap for negative values.
|
|
1150
|
+
- "default": Standard colormap.
|
|
1151
|
+
boundaries : np.ndarray, optional
|
|
1152
|
+
Boundaries to plot part of the data. (3,2) If None, uses the default grid boundaries.
|
|
1153
|
+
|
|
1154
|
+
Returns
|
|
1155
|
+
-------
|
|
1156
|
+
fig : matplotlib.figure.Figure
|
|
1157
|
+
The figure object containing the plot.
|
|
1158
|
+
ax : matplotlib.axes._subplots.Axes3DSubplot
|
|
1159
|
+
The 3D axes object of the plot.
|
|
1160
|
+
|
|
1161
|
+
Example
|
|
1162
|
+
-------
|
|
1163
|
+
sim = ou.Simulation("electrons", "path/to/simulation")
|
|
1164
|
+
fig, ax = sim["b3"].plot_3d(55, scale_type="zero_centered", boundaries= [[0, 40], [0, 40], [0, 20]])
|
|
1165
|
+
plt.show()
|
|
1166
|
+
"""
|
|
1167
|
+
|
|
1168
|
+
if self._dim != 3:
|
|
1169
|
+
raise ValueError("This method is only available for 3D diagnostics.")
|
|
1170
|
+
|
|
1171
|
+
if boundaries is None:
|
|
1172
|
+
boundaries = self._grid
|
|
1173
|
+
|
|
1174
|
+
if not isinstance(boundaries, np.ndarray):
|
|
1175
|
+
try:
|
|
1176
|
+
boundaries = np.array(boundaries)
|
|
1177
|
+
except:
|
|
1178
|
+
boundaries = self._grid
|
|
1179
|
+
warnings.warn(
|
|
1180
|
+
"boundaries cannot be accessed as a numpy array with shape (3, 2), using default instead"
|
|
1181
|
+
)
|
|
1182
|
+
|
|
1183
|
+
if boundaries.shape != (3, 2):
|
|
1184
|
+
warnings.warn("boundaries should have shape (3, 2), using default instead")
|
|
1185
|
+
boundaries = self._grid
|
|
1186
|
+
|
|
1187
|
+
# Load data
|
|
1188
|
+
if self._all_loaded:
|
|
1189
|
+
data = self._data[idx]
|
|
1190
|
+
else:
|
|
1191
|
+
data = self[idx]
|
|
1192
|
+
|
|
1193
|
+
X, Y, Z = np.meshgrid(self._x[0], self._x[1], self._x[2], indexing="ij")
|
|
1194
|
+
|
|
1195
|
+
# Flatten arrays for scatter plot
|
|
1196
|
+
(
|
|
1197
|
+
X_flat,
|
|
1198
|
+
Y_flat,
|
|
1199
|
+
Z_flat,
|
|
1200
|
+
) = (
|
|
1201
|
+
X.ravel(),
|
|
1202
|
+
Y.ravel(),
|
|
1203
|
+
Z.ravel(),
|
|
1204
|
+
)
|
|
1205
|
+
data_flat = data.ravel()
|
|
1206
|
+
|
|
1207
|
+
# Apply filter: Keep only chosen points
|
|
1208
|
+
mask = (
|
|
1209
|
+
(X_flat > boundaries[0][0])
|
|
1210
|
+
& (X_flat < boundaries[0][1])
|
|
1211
|
+
& (Y_flat > boundaries[1][0])
|
|
1212
|
+
& (Y_flat < boundaries[1][1])
|
|
1213
|
+
& (Z_flat > boundaries[2][0])
|
|
1214
|
+
& (Z_flat < boundaries[2][1])
|
|
1215
|
+
)
|
|
1216
|
+
X_cut, Y_cut, Z_cut, data_cut = (
|
|
1217
|
+
X_flat[mask],
|
|
1218
|
+
Y_flat[mask],
|
|
1219
|
+
Z_flat[mask],
|
|
1220
|
+
data_flat[mask],
|
|
1221
|
+
)
|
|
1222
|
+
|
|
1223
|
+
if scale_type == "zero_centered":
|
|
1224
|
+
# Center colormap around zero
|
|
1225
|
+
cmap = "seismic"
|
|
1226
|
+
vmax = np.max(np.abs(data_flat)) # Find max absolute value
|
|
1227
|
+
vmin = -vmax
|
|
1228
|
+
elif scale_type == "pos":
|
|
1229
|
+
cmap = "plasma"
|
|
1230
|
+
vmax = np.max(data_flat)
|
|
1231
|
+
vmin = 0
|
|
1232
|
+
|
|
1233
|
+
elif scale_type == "neg":
|
|
1234
|
+
cmap = "plasma"
|
|
1235
|
+
vmax = 0
|
|
1236
|
+
vmin = np.min(data_flat)
|
|
1237
|
+
else:
|
|
1238
|
+
cmap = "plasma"
|
|
1239
|
+
vmax = np.max(data_flat)
|
|
1240
|
+
vmin = np.min(data_flat)
|
|
1241
|
+
|
|
1242
|
+
norm = plt.Normalize(vmin=vmin, vmax=vmax)
|
|
1243
|
+
|
|
1244
|
+
# Plot
|
|
1245
|
+
fig = plt.figure(figsize=(10, 7))
|
|
1246
|
+
ax = fig.add_subplot(111, projection="3d")
|
|
1247
|
+
|
|
1248
|
+
# Scatter plot with seismic colormap
|
|
1249
|
+
sc = ax.scatter(X_cut, Y_cut, Z_cut, c=data_cut, cmap=cmap, norm=norm, alpha=1)
|
|
1250
|
+
|
|
1251
|
+
# Set limits to maintain full background
|
|
1252
|
+
ax.set_xlim(*self._grid[0])
|
|
1253
|
+
ax.set_ylim(*self._grid[1])
|
|
1254
|
+
ax.set_zlim(*self._grid[2])
|
|
1255
|
+
|
|
1256
|
+
# Colorbar
|
|
1257
|
+
cbar = plt.colorbar(sc, ax=ax, shrink=0.6)
|
|
1258
|
+
|
|
1259
|
+
# Labels
|
|
1260
|
+
# TODO try to use a latex label instaead of _name
|
|
1261
|
+
cbar.set_label(r"${}$".format(self._name) + r"$\ [{}]$".format(self._units))
|
|
1262
|
+
ax.set_title(
|
|
1263
|
+
r"$t={:.2f}$".format(self.time(idx)[0])
|
|
1264
|
+
+ r"$\ [{}]$".format(self.time(idx)[1])
|
|
1265
|
+
)
|
|
1266
|
+
ax.set_xlabel(
|
|
1267
|
+
r"${}$".format(self.axis[0]["long_name"])
|
|
1268
|
+
+ r"$\ [{}]$".format(self.axis[0]["units"])
|
|
1269
|
+
)
|
|
1270
|
+
ax.set_ylabel(
|
|
1271
|
+
r"${}$".format(self.axis[1]["long_name"])
|
|
1272
|
+
+ r"$\ [{}]$".format(self.axis[1]["units"])
|
|
1273
|
+
)
|
|
1274
|
+
ax.set_zlabel(
|
|
1275
|
+
r"${}$".format(self.axis[2]["long_name"])
|
|
1276
|
+
+ r"$\ [{}]$".format(self.axis[2]["units"])
|
|
1277
|
+
)
|
|
1278
|
+
|
|
1279
|
+
return fig, ax
|
|
1280
|
+
|
|
1281
|
+
# Getters
|
|
1282
|
+
@property
|
|
1283
|
+
def data(self):
|
|
1284
|
+
if self._data is None:
|
|
1285
|
+
raise ValueError(
|
|
1286
|
+
"Data not loaded into memory. Use get_* method with load_all=True or access via generator/index."
|
|
1287
|
+
)
|
|
1288
|
+
return self._data
|
|
1289
|
+
|
|
1290
|
+
@property
|
|
1291
|
+
def dx(self):
|
|
1292
|
+
return self._dx
|
|
1293
|
+
|
|
1294
|
+
@property
|
|
1295
|
+
def nx(self):
|
|
1296
|
+
return self._nx
|
|
1297
|
+
|
|
1298
|
+
@property
|
|
1299
|
+
def x(self):
|
|
1300
|
+
return self._x
|
|
1301
|
+
|
|
1302
|
+
@property
|
|
1303
|
+
def dt(self):
|
|
1304
|
+
return self._dt
|
|
1305
|
+
|
|
1306
|
+
@property
|
|
1307
|
+
def grid(self):
|
|
1308
|
+
return self._grid
|
|
1309
|
+
|
|
1310
|
+
@property
|
|
1311
|
+
def axis(self):
|
|
1312
|
+
return self._axis
|
|
1313
|
+
|
|
1314
|
+
@property
|
|
1315
|
+
def units(self):
|
|
1316
|
+
return self._units
|
|
1317
|
+
|
|
1318
|
+
@property
|
|
1319
|
+
def tunits(self):
|
|
1320
|
+
return self._tunits
|
|
1321
|
+
|
|
1322
|
+
@property
|
|
1323
|
+
def name(self):
|
|
1324
|
+
return self._name
|
|
1325
|
+
|
|
1326
|
+
@property
|
|
1327
|
+
def dim(self):
|
|
1328
|
+
return self._dim
|
|
1329
|
+
|
|
1330
|
+
@property
|
|
1331
|
+
def path(self):
|
|
1332
|
+
return self
|
|
1333
|
+
|
|
1334
|
+
@property
|
|
1335
|
+
def simulation_folder(self):
|
|
1336
|
+
return self._simulation_folder
|
|
1337
|
+
|
|
1338
|
+
@property
|
|
1339
|
+
def ndump(self):
|
|
1340
|
+
return self._ndump
|
|
1341
|
+
|
|
1342
|
+
# @property
|
|
1343
|
+
# def iter(self):
|
|
1344
|
+
# return self._iter
|
|
1345
|
+
|
|
1346
|
+
@property
|
|
1347
|
+
def all_loaded(self):
|
|
1348
|
+
return self._all_loaded
|
|
1349
|
+
|
|
1350
|
+
@property
|
|
1351
|
+
def maxiter(self):
|
|
1352
|
+
return self._maxiter
|
|
1353
|
+
|
|
1354
|
+
@property
|
|
1355
|
+
def label(self):
|
|
1356
|
+
return self._label
|
|
1357
|
+
|
|
1358
|
+
@property
|
|
1359
|
+
def type(self):
|
|
1360
|
+
return self._type
|
|
1361
|
+
|
|
1362
|
+
@property
|
|
1363
|
+
def quantity(self):
|
|
1364
|
+
return self._quantity
|
|
1365
|
+
|
|
1366
|
+
def time(self, index):
|
|
1367
|
+
return [index * self._dt * self._ndump, self._tunits]
|
|
1368
|
+
|
|
1369
|
+
def attributes_to_save(self, index):
|
|
1370
|
+
"""
|
|
1371
|
+
Prints the attributes of the diagnostic.
|
|
1372
|
+
"""
|
|
1373
|
+
print(f"dt: {self._dt}\n"
|
|
1374
|
+
f"dim: {self._dim}\n"
|
|
1375
|
+
f"time: {self.time(index)[0]}\n"
|
|
1376
|
+
f"tunits: {self.time(index)[1]}\n"
|
|
1377
|
+
f"iter: {self._ndump * index}\n"
|
|
1378
|
+
f"name: {self._name}\n"
|
|
1379
|
+
f"type: {self._type}\n"
|
|
1380
|
+
f"label: {self._label}\n"
|
|
1381
|
+
f"units: {self._units}")
|
|
1382
|
+
|
|
1383
|
+
@dx.setter
|
|
1384
|
+
def dx(self, value):
|
|
1385
|
+
self._dx = value
|
|
1386
|
+
|
|
1387
|
+
@nx.setter
|
|
1388
|
+
def nx(self, value):
|
|
1389
|
+
self._nx = value
|
|
1390
|
+
|
|
1391
|
+
@x.setter
|
|
1392
|
+
def x(self, value):
|
|
1393
|
+
self._x = value
|
|
1394
|
+
|
|
1395
|
+
@dt.setter
|
|
1396
|
+
def dt(self, value):
|
|
1397
|
+
self._dt = value
|
|
1398
|
+
|
|
1399
|
+
@grid.setter
|
|
1400
|
+
def grid(self, value):
|
|
1401
|
+
self._grid = value
|
|
1402
|
+
|
|
1403
|
+
@axis.setter
|
|
1404
|
+
def axis(self, value):
|
|
1405
|
+
self._axis = value
|
|
1406
|
+
|
|
1407
|
+
@units.setter
|
|
1408
|
+
def units(self, value):
|
|
1409
|
+
self._units = value
|
|
1410
|
+
|
|
1411
|
+
@tunits.setter
|
|
1412
|
+
def tunits(self, value):
|
|
1413
|
+
self._tunits = value
|
|
1414
|
+
|
|
1415
|
+
@name.setter
|
|
1416
|
+
def name(self, value):
|
|
1417
|
+
self._name = value
|
|
1418
|
+
|
|
1419
|
+
@dim.setter
|
|
1420
|
+
def dim(self, value):
|
|
1421
|
+
self._dim = value
|
|
1422
|
+
|
|
1423
|
+
@ndump.setter
|
|
1424
|
+
def ndump(self, value):
|
|
1425
|
+
self._ndump = value
|
|
1426
|
+
|
|
1427
|
+
@data.setter
|
|
1428
|
+
def data(self, value):
|
|
1429
|
+
self._data = value
|
|
1430
|
+
|
|
1431
|
+
@quantity.setter
|
|
1432
|
+
def quantity(self, key):
|
|
1433
|
+
self._quantity = key
|
|
1434
|
+
|
|
1435
|
+
@label.setter
|
|
1436
|
+
def label(self, value):
|
|
1437
|
+
self._label = value
|