turbx 1.0.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
turbx/ztmd.py ADDED
@@ -0,0 +1,2224 @@
1
+ import io
2
+ import os
3
+ import re
4
+ import shutil
5
+ import struct
6
+ import subprocess
7
+ import sys
8
+ import textwrap
9
+ import time
10
+ import timeit
11
+ from pathlib import Path
12
+
13
+ import h5py
14
+ import numpy as np
15
+ from tqdm import tqdm
16
+
17
+ from .eas3 import eas3
18
+ from .eas4 import eas4
19
+ from .utils import even_print, format_time_string
20
+ from .ztmd_analysis import (
21
+ _calc_bl_edge,
22
+ _calc_bl_edge_quantities,
23
+ _calc_bl_integral_quantities,
24
+ _calc_d99,
25
+ _calc_d99_inc,
26
+ _calc_d99_quantities,
27
+ _calc_gradients,
28
+ _calc_peak_tauI,
29
+ _calc_psvel,
30
+ _calc_u_inc,
31
+ _calc_VDII,
32
+ _calc_wake_parameter,
33
+ _calc_wall_quantities,
34
+ )
35
+
36
+ # ======================================================================
37
+
38
+ class ztmd(h5py.File):
39
+ '''
40
+ Span (z) & temporal (t) mean data (md)
41
+ -----
42
+ --> mean_flow_mpi.eas
43
+ --> favre_mean_flow_mpi.eas
44
+ --> ext_rms_fluctuation_mpi.eas
45
+ --> ext_favre_fluctuation_mpi.eas
46
+ --> turbulent_budget_mpi.eas
47
+ -----
48
+ '''
49
+
50
+ def __init__(self, *args, **kwargs):
51
+
52
+ self.fname, self.open_mode = args
53
+
54
+ self.fname_path = os.path.dirname(self.fname)
55
+ self.fname_base = os.path.basename(self.fname)
56
+ self.fname_root, self.fname_ext = os.path.splitext(self.fname_base)
57
+
58
+ ## default to libver='latest' if none provided
59
+ if ('libver' not in kwargs):
60
+ kwargs['libver'] = 'latest'
61
+
62
+ ## catch possible user error --> could prevent accidental EAS overwrites
63
+ if (self.fname_ext=='.eas'):
64
+ raise ValueError('EAS4 files should not be opened with turbx.ztmd()')
65
+
66
+ ## mpio driver for ZTMD currently not supported
67
+ if ('driver' in kwargs) and (kwargs['driver']=='mpio'):
68
+ raise ValueError('ZTMD class is currently not set up to be used with MPI')
69
+
70
+ ## determine if using mpi
71
+ if ('driver' in kwargs) and (kwargs['driver']=='mpio'):
72
+ self.usingmpi = True
73
+ else:
74
+ self.usingmpi = False
75
+
76
+ ## determine communicator & rank info
77
+ if self.usingmpi:
78
+ self.comm = kwargs['comm']
79
+ self.n_ranks = self.comm.Get_size()
80
+ self.rank = self.comm.Get_rank()
81
+ else:
82
+ self.comm = None
83
+ self.n_ranks = 1
84
+ self.rank = 0
85
+
86
+ ## ztmd() unique kwargs (not h5py.File kwargs) --> pop() rather than get()
87
+ stripe_count = kwargs.pop('stripe_count' , 16 )
88
+ stripe_size_mb = kwargs.pop('stripe_size_mb' , 8 )
89
+ perms = kwargs.pop('perms' , '640' )
90
+
91
+ if not isinstance(stripe_count, int):
92
+ raise ValueError
93
+ if not isinstance(stripe_size_mb, int):
94
+ raise ValueError
95
+ if not isinstance(perms, str):
96
+ raise ValueError
97
+ if not len(perms)==3:
98
+ raise ValueError
99
+ if not re.fullmatch(r'\d{3}',perms):
100
+ raise ValueError
101
+
102
+ ## if not using MPI, remove 'driver' and 'comm' from kwargs
103
+ if ( not self.usingmpi ) and ('driver' in kwargs):
104
+ kwargs.pop('driver')
105
+ if ( not self.usingmpi ) and ('comm' in kwargs):
106
+ kwargs.pop('comm')
107
+
108
+ ## ztmd() unique kwargs (not h5py.File kwargs) --> pop() rather than get()
109
+ verbose = kwargs.pop( 'verbose' , False )
110
+ force = kwargs.pop( 'force' , False )
111
+
112
+ if not isinstance(verbose, bool):
113
+ raise ValueError
114
+ if not isinstance(force, bool):
115
+ raise ValueError
116
+
117
+ # === initialize file on FS
118
+
119
+ ## if file open mode is 'w', the file exists, and force is False
120
+ ## --> raise error
121
+ if (self.open_mode == 'w') and (force is False) and os.path.isfile(self.fname):
122
+ if (self.rank==0):
123
+ print('\n'+72*'-')
124
+ print(self.fname+' already exists! opening with \'w\' would overwrite.\n')
125
+ openModeInfoStr = '''
126
+ r --> Read only, file must exist
127
+ r+ --> Read/write, file must exist
128
+ w --> Create file, truncate if exists
129
+ w- or x --> Create file, fail if exists
130
+ a --> Read/write if exists, create otherwise
131
+
132
+ or use force=True arg:
133
+
134
+ >>> with ztmd(<<fname>>,'w',force=True) as f:
135
+ >>> ...
136
+ '''
137
+ print(textwrap.indent(textwrap.dedent(openModeInfoStr), 2*' ').strip('\n'))
138
+ print(72*'-'+'\n')
139
+ sys.stdout.flush()
140
+
141
+ if (self.comm is not None):
142
+ self.comm.Barrier()
143
+ raise FileExistsError()
144
+
145
+ ## if file open mode is 'w'
146
+ ## --> <delete>, touch, chmod, stripe
147
+ if (self.open_mode == 'w'):
148
+ if (self.rank==0):
149
+ if os.path.isfile(self.fname): ## if the file exists, delete it
150
+ os.remove(self.fname)
151
+ time.sleep(0.1)
152
+ Path(self.fname).touch() ## touch a new file
153
+ os.chmod(self.fname, int(perms, base=8)) ## change permissions
154
+ if shutil.which('lfs') is not None: ## set stripe if on Lustre
155
+ cmd_str_lfs_migrate = f'lfs migrate --stripe-count {stripe_count:d} --stripe-size {stripe_size_mb:d}M {self.fname} > /dev/null 2>&1'
156
+ return_code = subprocess.call(cmd_str_lfs_migrate, shell=True)
157
+ if (return_code != 0):
158
+ raise ValueError('lfs migrate failed')
159
+ else:
160
+ #print('striping with lfs not permitted on this filesystem')
161
+ pass
162
+
163
+ if (self.comm is not None):
164
+ self.comm.Barrier()
165
+
166
+ self.mod_avail_tqdm = ('tqdm' in sys.modules)
167
+
168
+ ## call actual h5py.File.__init__()
169
+ super(ztmd, self).__init__(*args, **kwargs)
170
+ self.get_header(verbose=verbose)
171
+
172
+ def get_header(self,**kwargs):
173
+ '''
174
+ initialize header attributes of ZTMD class instance
175
+ '''
176
+
177
+ verbose = kwargs.get('verbose',True)
178
+
179
+ if (self.rank!=0):
180
+ verbose=False
181
+
182
+ # === udef (header vector dset based) --> the 'old' way
183
+
184
+ if ('header' in self):
185
+
186
+ udef_real = np.copy(self['header/udef_real'][:])
187
+ udef_char = np.copy(self['header/udef_char'][:]) ## the unpacked numpy array of |S128 encoded fixed-length character objects
188
+ udef_char = [s.decode('utf-8') for s in udef_char] ## convert it to a python list of utf-8 strings
189
+ self.udef = dict(zip(udef_char, udef_real)) ## make dict where keys are udef_char and values are udef_real
190
+
191
+ # === characteristic values
192
+
193
+ self.Ma = self.udef['Ma']
194
+ self.M_inf = self.Ma
195
+ self.Re = self.udef['Re']
196
+ self.Pr = self.udef['Pr']
197
+ self.kappa = self.udef['kappa']
198
+ self.R = self.udef['R']
199
+ self.p_inf = self.udef['p_inf']
200
+ self.T_inf = self.udef['T_inf']
201
+ self.mu_Suth_ref = self.udef['mu_Suth_ref']
202
+ self.T_Suth_ref = self.udef['T_Suth_ref']
203
+ self.S_Suth = self.udef['S_Suth']
204
+ #self.C_Suth = self.udef['C_Suth']
205
+
206
+ self.C_Suth = self.mu_Suth_ref/(self.T_Suth_ref**(3/2))*(self.T_Suth_ref + self.S_Suth) ## [kg/(m·s·√K)]
207
+ self.udef['C_Suth'] = self.C_Suth
208
+
209
+ if verbose: print(72*'-')
210
+ if verbose: even_print('Ma' , '%0.2f [-]' % self.Ma )
211
+ if verbose: even_print('Re' , '%0.1f [-]' % self.Re )
212
+ if verbose: even_print('Pr' , '%0.3f [-]' % self.Pr )
213
+ if verbose: even_print('T_inf' , '%0.3f [K]' % self.T_inf )
214
+ if verbose: even_print('p_inf' , '%0.1f [Pa]' % self.p_inf )
215
+ if verbose: even_print('kappa' , '%0.3f [-]' % self.kappa )
216
+ if verbose: even_print('R' , '%0.3f [J/(kg·K)]' % self.R )
217
+ if verbose: even_print('mu_Suth_ref' , '%0.6E [kg/(m·s)]' % self.mu_Suth_ref )
218
+ if verbose: even_print('T_Suth_ref' , '%0.2f [K]' % self.T_Suth_ref )
219
+ if verbose: even_print('S_Suth' , '%0.2f [K]' % self.S_Suth )
220
+ if verbose: even_print('C_Suth' , '%0.5e [kg/(m·s·√K)]' % self.C_Suth )
221
+
222
+ # === characteristic values : derived
223
+
224
+ self.mu_inf = self.mu_Suth_ref*(self.T_inf/self.T_Suth_ref)**(3/2) * ((self.T_Suth_ref+self.S_Suth)/(self.T_inf+self.S_Suth))
225
+ self.rho_inf = self.p_inf/(self.R*self.T_inf)
226
+ self.nu_inf = self.mu_inf/self.rho_inf
227
+ self.a_inf = np.sqrt(self.kappa*self.R*self.T_inf)
228
+ self.U_inf = self.Ma*self.a_inf
229
+ self.cp = self.R*self.kappa/(self.kappa-1.)
230
+ self.cv = self.cp/self.kappa
231
+ self.recov_fac = self.Pr**(1/3)
232
+ self.Taw = self.T_inf + self.recov_fac*self.U_inf**2/(2*self.cp)
233
+ self.lchar = self.Re*self.nu_inf/self.U_inf
234
+
235
+ self.tchar = self.lchar / self.U_inf
236
+ self.uchar = self.U_inf
237
+
238
+ #self.p_tot_inf = self.p_inf * (1 + (self.kappa-1)/2 * self.U_inf**2 / (self.kappa*self.R*self.T_inf))**(self.kappa/(self.kappa-1))
239
+ self.p_tot_inf = self.p_inf * (1 + (self.kappa-1)/2 * self.M_inf**2)**(self.kappa/(self.kappa-1))
240
+ self.T_tot_inf = self.T_inf * (1 + (self.kappa-1)/2 * self.M_inf**2)
241
+ self.rho_tot_inf = self.rho_inf * (1 + (self.kappa-1)/2 * self.M_inf**2)**(1/(self.kappa-1))
242
+
243
+ if verbose: print(72*'-')
244
+ if verbose: even_print('rho_inf' , '%0.3f [kg/m³]' % self.rho_inf )
245
+ if verbose: even_print('mu_inf' , '%0.6E [kg/(m·s)]' % self.mu_inf )
246
+ if verbose: even_print('nu_inf' , '%0.6E [m²/s]' % self.nu_inf )
247
+ if verbose: even_print('a_inf' , '%0.6f [m/s]' % self.a_inf )
248
+ if verbose: even_print('U_inf' , '%0.6f [m/s]' % self.U_inf )
249
+ if verbose: even_print('cp' , '%0.3f [J/(kg·K)]' % self.cp )
250
+ if verbose: even_print('cv' , '%0.3f [J/(kg·K)]' % self.cv )
251
+ if verbose: even_print('recovery factor' , '%0.6f [-]' % self.recov_fac )
252
+ if verbose: even_print('Taw' , '%0.3f [K]' % self.Taw )
253
+ if verbose: even_print('lchar' , '%0.6E [m]' % self.lchar )
254
+ if verbose: even_print('tchar' , '%0.6E [s]' % self.tchar )
255
+ if verbose: print(72*'-'+'\n')
256
+
257
+ # === write the 'derived' udef variables to a dict attribute of the CGD instance
258
+ self.udef_deriv = { 'rho_inf':self.rho_inf,
259
+ 'mu_inf':self.mu_inf,
260
+ 'nu_inf':self.nu_inf,
261
+ 'a_inf':self.a_inf,
262
+ 'U_inf':self.U_inf,
263
+ 'cp':self.cp,
264
+ 'cv':self.cv,
265
+ 'recov_fac':self.recov_fac,
266
+ 'Taw':self.Taw,
267
+ 'lchar':self.lchar,
268
+ }
269
+
270
+ else:
271
+ #print("dset 'header' not in ZTMD")
272
+ pass
273
+
274
+ # === udef (attr based)
275
+
276
+ header_attr_keys = [
277
+ 'Ma','Re','Pr',
278
+ 'kappa','R',
279
+ 'p_inf','T_inf',
280
+ 'S_Suth','mu_Suth_ref','T_Suth_ref',
281
+ ]
282
+
283
+ header_attr_keys_derived = [
284
+ 'C_Suth','mu_inf','rho_inf','nu_inf',
285
+ 'a_inf','U_inf',
286
+ 'cp','cv',
287
+ 'recov_fac','Taw',
288
+ 'lchar','tchar',
289
+ 'uchar','M_inf',
290
+ ]
291
+
292
+ if all([ key in self.attrs.keys() for key in header_attr_keys ]):
293
+ header_attr_based = True
294
+ else:
295
+ header_attr_based = False
296
+
297
+ if header_attr_based:
298
+
299
+ ## set all attributes
300
+ for key in header_attr_keys:
301
+ setattr( self, key, self.attrs[key] )
302
+
303
+ if hasattr(self,'Ma') and not hasattr(self,'M_inf'):
304
+ setattr(self,'M_inf',self.Ma)
305
+
306
+ # mu_inf_1 = 14.58e-7*self.T_inf**1.5/(self.T_inf+110.4)
307
+ # mu_inf_2 = self.mu_Suth_ref*(self.T_inf/self.T_Suth_ref)**(3/2) * ((self.T_Suth_ref+self.S_Suth)/(self.T_inf+self.S_Suth))
308
+ # mu_inf_3 = self.C_Suth*self.T_inf**(3/2)/(self.T_inf+self.S_Suth)
309
+
310
+ ## characteristic values : derived
311
+ self.C_Suth = self.mu_Suth_ref/(self.T_Suth_ref**(3/2))*(self.T_Suth_ref + self.S_Suth) ## [kg/(m·s·√K)]
312
+ self.mu_inf = self.mu_Suth_ref*(self.T_inf/self.T_Suth_ref)**(3/2) * ((self.T_Suth_ref+self.S_Suth)/(self.T_inf+self.S_Suth))
313
+ self.rho_inf = self.p_inf/(self.R*self.T_inf)
314
+ self.nu_inf = self.mu_inf/self.rho_inf
315
+ self.a_inf = np.sqrt(self.kappa*self.R*self.T_inf)
316
+ self.U_inf = self.Ma*self.a_inf
317
+ self.cp = self.R*self.kappa/(self.kappa-1.)
318
+ self.cv = self.cp/self.kappa
319
+ self.recov_fac = self.Pr**(1/3)
320
+ self.Taw = self.T_inf + self.recov_fac*self.U_inf**2/(2*self.cp)
321
+ self.lchar = self.Re*self.nu_inf/self.U_inf
322
+
323
+ self.tchar = self.lchar / self.U_inf
324
+ self.uchar = self.U_inf
325
+
326
+ #self.p_tot_inf = self.p_inf * (1 + (self.kappa-1)/2 * self.U_inf**2 / (self.kappa*self.R*self.T_inf))**(self.kappa/(self.kappa-1))
327
+ self.p_tot_inf = self.p_inf * (1 + (self.kappa-1)/2 * self.M_inf**2)**(self.kappa/(self.kappa-1))
328
+ self.T_tot_inf = self.T_inf * (1 + (self.kappa-1)/2 * self.M_inf**2)
329
+ self.rho_tot_inf = self.rho_inf * (1 + (self.kappa-1)/2 * self.M_inf**2)**(1/(self.kappa-1))
330
+
331
+ #self.udef['C_Suth'] = self.C_Suth
332
+
333
+ if verbose: print(72*'-')
334
+ if verbose: even_print('Ma' , '%0.2f [-]' % self.Ma )
335
+ if verbose: even_print('Re' , '%0.1f [-]' % self.Re )
336
+ if verbose: even_print('Pr' , '%0.3f [-]' % self.Pr )
337
+ if verbose: even_print('T_inf' , '%0.3f [K]' % self.T_inf )
338
+ if verbose: even_print('p_inf' , '%0.1f [Pa]' % self.p_inf )
339
+ if verbose: even_print('kappa' , '%0.3f [-]' % self.kappa )
340
+ if verbose: even_print('R' , '%0.3f [J/(kg·K)]' % self.R )
341
+ if verbose: even_print('mu_Suth_ref' , '%0.6E [kg/(m·s)]' % self.mu_Suth_ref )
342
+ if verbose: even_print('T_Suth_ref' , '%0.2f [K]' % self.T_Suth_ref )
343
+ if verbose: even_print('S_Suth' , '%0.2f [K]' % self.S_Suth )
344
+ if verbose: even_print('C_Suth' , '%0.5e [kg/(m·s·√K)]' % self.C_Suth )
345
+
346
+ if verbose: print(72*'-')
347
+ if verbose: even_print('rho_inf' , '%0.3f [kg/m³]' % self.rho_inf )
348
+ if verbose: even_print('mu_inf' , '%0.6E [kg/(m·s)]' % self.mu_inf )
349
+ if verbose: even_print('nu_inf' , '%0.6E [m²/s]' % self.nu_inf )
350
+ if verbose: even_print('a_inf' , '%0.6f [m/s]' % self.a_inf )
351
+ if verbose: even_print('U_inf' , '%0.6f [m/s]' % self.U_inf )
352
+ if verbose: even_print('cp' , '%0.3f [J/(kg·K)]' % self.cp )
353
+ if verbose: even_print('cv' , '%0.3f [J/(kg·K)]' % self.cv )
354
+ if verbose: even_print('recovery factor' , '%0.6f [-]' % self.recov_fac )
355
+ if verbose: even_print('Taw' , '%0.3f [K]' % self.Taw )
356
+ if verbose: even_print('lchar' , '%0.6E [m]' % self.lchar )
357
+ if verbose: even_print('tchar' , '%0.6E [s]' % self.tchar )
358
+ #if verbose: print(72*'-'+'\n')
359
+
360
+ ## assert that the derived values are equal to any HDF5 top-level attributes
361
+ for key in header_attr_keys_derived:
362
+ if (key in self.attrs): ## if is in HDF5 as top-level attribute
363
+ np.testing.assert_allclose( getattr(self,key), self.attrs[key], rtol=1e-10, atol=1e-10 )
364
+
365
+ ## assign udef dict as instance attribute for convenience
366
+ self.udef = {
367
+ 'Ma':self.Ma,
368
+ 'Re':self.Re,
369
+ 'Pr':self.Pr,
370
+ 'kappa':self.kappa,
371
+ 'R':self.R,
372
+ 'p_inf':self.p_inf,
373
+ 'T_inf':self.T_inf,
374
+ 'S_Suth':self.S_Suth,
375
+ 'mu_Suth_ref':self.mu_Suth_ref,
376
+ 'T_Suth_ref':self.T_Suth_ref,
377
+
378
+ 'C_Suth':self.C_Suth,
379
+ 'mu_inf':self.mu_inf,
380
+ 'rho_inf':self.rho_inf,
381
+ 'nu_inf':self.nu_inf,
382
+ 'a_inf':self.a_inf,
383
+ 'U_inf':self.U_inf,
384
+ 'cp':self.cp,
385
+ 'cv':self.cv,
386
+ 'recov_fac':self.recov_fac,
387
+ 'Taw':self.Taw,
388
+ 'lchar':self.lchar,
389
+ 'tchar':self.tchar,
390
+
391
+ 'uchar':self.uchar,
392
+ 'M_inf':self.M_inf,
393
+ }
394
+
395
+ if ('duration_avg' in self.attrs.keys()):
396
+ self.duration_avg = self.attrs['duration_avg']
397
+ if ('nx' in self.attrs.keys()):
398
+ self.nx = self.attrs['nx']
399
+ if ('ny' in self.attrs.keys()):
400
+ self.ny = self.attrs['ny']
401
+
402
+ if ('dims/x' in self):
403
+ self.x = np.copy( self['dims/x'][()] ) ## dont transpose yet
404
+ if ('dims/y' in self):
405
+ self.y = np.copy( self['dims/y'][()] ) ## dont transpose yet
406
+ if ('dims/t' in self):
407
+ self.t = np.copy( self['dims/t'][()] )
408
+
409
+ if hasattr(self,'x') and hasattr(self,'y'):
410
+ if (self.x.ndim==1) and (self.y.ndim==1):
411
+ self.xx, self.yy = np.meshgrid( self.x, self.y, indexing='ij' )
412
+ elif (self.x.ndim==2) and (self.y.ndim==2):
413
+ self.x = np.copy( self.x.T )
414
+ self.y = np.copy( self.y.T )
415
+ self.xx = np.copy( self.x )
416
+ self.yy = np.copy( self.y )
417
+ else:
418
+ raise ValueError
419
+
420
+ if ('dz' in self.attrs.keys()):
421
+ self.dz = self.attrs['dz']
422
+
423
+ if ('dims/stang' in self):
424
+ self.stang = np.copy( self['dims/stang'][()] )
425
+ if ('dims/snorm' in self):
426
+ self.snorm = np.copy( self['dims/snorm'][()] )
427
+
428
+ if ('csys/vtang' in self):
429
+ self.vtang = np.copy( self['csys/vtang'][()] )
430
+ if ('csys/vnorm' in self):
431
+ self.vnorm = np.copy( self['csys/vnorm'][()] )
432
+
433
+ if ('dims/crv_R' in self):
434
+ self.crv_R = np.copy( self['dims/crv_R'][()] )
435
+ if ('dims/R_min' in self):
436
+ self.R_min = self['dims/R_min'][()]
437
+
438
+ if verbose: print(72*'-')
439
+ if verbose and hasattr(self,'duration_avg'): even_print('duration_avg', '%0.5f'%self.duration_avg)
440
+ if verbose: even_print('nx', '%i'%self.nx)
441
+ if verbose: even_print('ny', '%i'%self.ny)
442
+ #if verbose: print(72*'-')
443
+
444
+ # ===
445
+
446
+ if ('rectilinear' in self.attrs.keys()):
447
+ self.rectilinear = self.attrs['rectilinear']
448
+
449
+ if ('curvilinear' in self.attrs.keys()):
450
+ self.curvilinear = self.attrs['curvilinear']
451
+
452
+ ## check
453
+ if hasattr(self,'rectilinear') and not hasattr(self,'curvilinear'):
454
+ raise ValueError
455
+ if hasattr(self,'curvilinear') and not hasattr(self,'rectilinear'):
456
+ raise ValueError
457
+ if hasattr(self,'rectilinear') or hasattr(self,'curvilinear'):
458
+ if self.rectilinear and self.curvilinear:
459
+ raise ValueError
460
+ if not self.rectilinear and not self.curvilinear:
461
+ raise ValueError
462
+
463
+ if ('requires_wall_norm_interp' in self.attrs.keys()):
464
+ self.requires_wall_norm_interp = self.attrs['requires_wall_norm_interp']
465
+ else:
466
+ self.requires_wall_norm_interp = False
467
+
468
+ # === ts group names & scalars
469
+
470
+ if ('data' in self):
471
+ self.scalars = list(self['data'].keys())
472
+ self.n_scalars = len(self.scalars)
473
+ self.scalars_dtypes = []
474
+ for scalar in self.scalars:
475
+ self.scalars_dtypes.append(self['data/%s'%scalar].dtype)
476
+ self.scalars_dtypes_dict = dict(zip(self.scalars, self.scalars_dtypes)) ## dict {<<scalar>>: <<dtype>>}
477
+ else:
478
+ self.scalars = []
479
+ self.n_scalars = 0
480
+ self.scalars_dtypes = []
481
+ self.scalars_dtypes_dict = dict(zip(self.scalars, self.scalars_dtypes))
482
+
483
+ return
484
+
485
+ def import_data_eas4(self, path, **kwargs):
486
+ '''
487
+ Copy data from 2D EAS4 containers (output from NS3D) to a ZTMD container
488
+ -----
489
+
490
+ The 'path' directory should contain one or more of the following files:
491
+
492
+ --> mean_flow_mpi.eas
493
+ --> favre_mean_flow_mpi.eas
494
+ --> ext_rms_fluctuation_mpi.eas
495
+ --> ext_favre_fluctuation_mpi.eas
496
+ --> turbulent_budget_mpi.eas
497
+
498
+ /dims : 2D dimension datasets (x,y,..) and possibly 1D dimension datasets (s_wall,..)
499
+ /data : 2D datasets (u,uIuI,..)
500
+
501
+ Datasets are dimensionalized to SI units upon import!
502
+
503
+ /dimless : copy the dimless datasets as a reference
504
+
505
+ Curvilinear cases may have the following additional HDF5 groups
506
+
507
+ /data_1Dx : 1D datsets in streamwise (x/s1) direction (μ_wall,ρ_wall,u_τ,..)
508
+ /csys : coordinate system transformation arrays (projection vectors, transform tensors, etc.)
509
+ -----
510
+ /dims_2Dw : alternate grid (e.g. wall-normal projected/interpolation grid)
511
+ /data_2Dw : data interpolated onto alternate grid
512
+ '''
513
+
514
+ verbose = kwargs.get( 'verbose', True)
515
+ recalc_mu = kwargs.get( 'recalc_mu', False)
516
+
517
+ if verbose: print('\n'+'turbx.ztmd.import_data_eas4()'+'\n'+72*'-')
518
+ t_start_func = timeit.default_timer()
519
+
520
+ even_print('ztmd',str(self.fname))
521
+
522
+ ## dz,dt should be input as dimless (characteristic/inlet) (output from tgg)
523
+ ## --> dz & dt get re-dimensionalized during this func!
524
+ dz = kwargs.get('dz',None)
525
+ nz = kwargs.get('nz',None)
526
+ dt = kwargs.get('dt',None)
527
+
528
+ path_ztmean = Path(path)
529
+ if not path_ztmean.is_dir():
530
+ raise FileNotFoundError('%s does not exist.'%str(path_ztmean))
531
+ fn_Re_mean = Path(path_ztmean, 'mean_flow_mpi.eas')
532
+ fn_Fv_mean = Path(path_ztmean, 'favre_mean_flow_mpi.eas')
533
+ fn_Re_fluct = Path(path_ztmean, 'ext_rms_fluctuation_mpi.eas')
534
+ fn_Fv_fluct = Path(path_ztmean, 'ext_favre_fluctuation_mpi.eas')
535
+ fn_turb_budget = Path(path_ztmean, 'turbulent_budget_mpi.eas')
536
+
537
+ self.attrs['fn_Re_mean'] = str( fn_Re_mean.relative_to(Path()) )
538
+ self.attrs['fn_Fv_mean'] = str( fn_Fv_mean.relative_to(Path()) )
539
+ self.attrs['fn_Re_fluct'] = str( fn_Re_fluct.relative_to(Path()) )
540
+ self.attrs['fn_Fv_fluct'] = str( fn_Fv_fluct.relative_to(Path()) )
541
+ self.attrs['fn_turb_budget'] = str( fn_turb_budget.relative_to(Path()) )
542
+
543
+ ## the simulation timestep dt is not known from the averaged files
544
+ if (dt is not None):
545
+ self.attrs['dt'] = dt
546
+ if (nz is not None):
547
+ self.attrs['nz'] = nz
548
+ if (dz is not None):
549
+ self.attrs['dz'] = dz
550
+
551
+ if verbose:
552
+ if (nz is not None):
553
+ even_print('nz' , '%i'%nz )
554
+ if (dz is not None):
555
+ even_print('dz' , '%0.6e'%dz )
556
+ if (dt is not None):
557
+ even_print('dt' , '%0.6e'%dt )
558
+ print(72*'-')
559
+
560
+ # ===
561
+
562
+ if fn_Re_mean.exists():
563
+ even_print('eas4 Re mean',str(fn_Re_mean.relative_to(Path())))
564
+ with eas4(str(fn_Re_mean),'r',verbose=False) as f1:
565
+
566
+ ## the EAS4 data is still organized by rank in [z], so perform average across ranks
567
+ data_mean = f1.get_mean()
568
+
569
+ ## assert mean data shape
570
+ for i, key in enumerate(data_mean.dtype.names):
571
+ if (data_mean[key].shape[0]!=f1.nx):
572
+ raise AssertionError('mean data dim1 shape != nx')
573
+ if (data_mean[key].shape[1]!=f1.ny):
574
+ raise AssertionError('mean data dim2 shape != ny')
575
+ if (data_mean[key].ndim!=2):
576
+ raise AssertionError('mean data ndim != 2')
577
+
578
+ nx = f1.nx ; self.attrs['nx'] = nx
579
+ ny = f1.ny ; self.attrs['ny'] = ny
580
+
581
+ ## primary
582
+ Ma = f1.Ma ; self.attrs['Ma'] = Ma
583
+ Re = f1.Re ; self.attrs['Re'] = Re
584
+ Pr = f1.Pr ; self.attrs['Pr'] = Pr
585
+ T_inf = f1.T_inf ; self.attrs['T_inf'] = T_inf
586
+ p_inf = f1.p_inf ; self.attrs['p_inf'] = p_inf
587
+ kappa = f1.kappa ; self.attrs['kappa'] = kappa
588
+ R = f1.R ; self.attrs['R'] = R
589
+ mu_Suth_ref = f1.mu_Suth_ref ; self.attrs['mu_Suth_ref'] = mu_Suth_ref
590
+ T_Suth_ref = f1.T_Suth_ref ; self.attrs['T_Suth_ref'] = T_Suth_ref
591
+ C_Suth = f1.C_Suth ; self.attrs['C_Suth'] = C_Suth
592
+ S_Suth = f1.S_Suth ; self.attrs['S_Suth'] = S_Suth
593
+
594
+ ## derived
595
+ rho_inf = f1.rho_inf ; self.attrs['rho_inf'] = rho_inf
596
+ mu_inf = f1.mu_inf ; self.attrs['mu_inf'] = mu_inf
597
+ nu_inf = f1.nu_inf ; self.attrs['nu_inf'] = nu_inf
598
+ a_inf = f1.a_inf ; self.attrs['a_inf'] = a_inf
599
+ U_inf = f1.U_inf ; self.attrs['U_inf'] = U_inf
600
+ cp = f1.cp ; self.attrs['cp'] = cp
601
+ cv = f1.cv ; self.attrs['cv'] = cv
602
+ recov_fac = f1.recov_fac ; self.attrs['recov_fac'] = recov_fac
603
+ Taw = f1.Taw ; self.attrs['Taw'] = Taw
604
+ lchar = f1.lchar ; self.attrs['lchar'] = lchar
605
+
606
+ tchar = f1.lchar/f1.U_inf ; self.attrs['tchar'] = tchar
607
+ uchar = f1.U_inf ; self.attrs['uchar'] = uchar
608
+ M_inf = f1.Ma ; self.attrs['M_inf'] = M_inf
609
+
610
+ ## duration over which avg was performed, iteration count and the sampling period of the avg
611
+ Re_mean_total_avg_time = f1.total_avg_time * tchar
612
+ Re_mean_total_avg_iter_count = f1.total_avg_iter_count
613
+ Re_mean_dt = Re_mean_total_avg_time / Re_mean_total_avg_iter_count
614
+
615
+ self.attrs['Re_mean_total_avg_time'] = Re_mean_total_avg_time
616
+ self.attrs['Re_mean_total_avg_iter_count'] = Re_mean_total_avg_iter_count
617
+ self.attrs['Re_mean_dt'] = Re_mean_dt
618
+
619
+ t_meas = f1.total_avg_time * (f1.lchar/f1.U_inf) ## dimensional [s]
620
+ #t_meas = f1.total_avg_time ## dimless (char)
621
+ self.attrs['t_meas'] = t_meas
622
+ dset = self.create_dataset('dims/t', data=np.array([t_meas],dtype=np.float64), chunks=None)
623
+
624
+ # ===
625
+
626
+ ## from EAS4, dimless (char)
627
+ x = np.copy(f1.x)
628
+ y = np.copy(f1.y)
629
+
630
+ if (f1.x.ndim==1) and (f1.y.ndim==1): ## rectilinear in [x,y]
631
+
632
+ self.attrs['rectilinear'] = True
633
+ self.attrs['curvilinear'] = False
634
+
635
+ ## dimensionalize & write
636
+ x *= lchar
637
+ y *= lchar
638
+ dz *= lchar
639
+ dt *= tchar
640
+ self.create_dataset('dims/x', data=x, chunks=None, dtype=np.float64)
641
+ self.create_dataset('dims/y', data=y, chunks=None, dtype=np.float64)
642
+ self.attrs['dz'] = dz
643
+ self.attrs['dt'] = dt
644
+
645
+ self.attrs['nx'] = nx
646
+ self.attrs['ny'] = ny
647
+ #self.attrs['nz'] = 1 ## NO
648
+ if verbose:
649
+ even_print('nx' , '%i'%nx )
650
+ even_print('ny' , '%i'%ny )
651
+
652
+ elif (f1.x.ndim==3) and (f1.y.ndim==3): ## curvilinear in [x,y]
653
+
654
+ self.attrs['rectilinear'] = False
655
+ self.attrs['curvilinear'] = True
656
+
657
+ ## 3D coords: confirm that x,y coords are same in [z] direction
658
+ np.testing.assert_allclose( x[-1,-1,:] , x[-1,-1,0] , rtol=1e-14 , atol=1e-14 )
659
+ np.testing.assert_allclose( y[-1,-1,:] , y[-1,-1,0] , rtol=1e-14 , atol=1e-14 )
660
+
661
+ ## 3D coords: take only 1 layer in [z]
662
+ x = np.squeeze( np.copy( x[:,:,0] ) ) ## dimless (char)
663
+ y = np.squeeze( np.copy( y[:,:,0] ) )
664
+
665
+ # if True: ## check against tgg data wall distance file (if it exists)
666
+ # fn_dat = '../tgg/wall_distance.dat'
667
+ # if os.path.isfile(fn_dat):
668
+ # with open(fn_dat,'rb') as f:
669
+ # d_ = pickle.load(f)
670
+ # xy2d_tmp = d_['xy2d']
671
+ # np.testing.assert_allclose(xy2d_tmp[:,:,0], x[:,:,0], rtol=1e-14, atol=1e-14)
672
+ # np.testing.assert_allclose(xy2d_tmp[:,:,1], y[:,:,0], rtol=1e-14, atol=1e-14)
673
+ # if verbose: even_print('check passed' , 'x grid' )
674
+ # if verbose: even_print('check passed' , 'y grid' )
675
+ # d_ = None; del d_
676
+ # xy2d_tmp = None; del xy2d_tmp
677
+
678
+ # ## backup non-dimensional coordinate arrays
679
+ # dset = self.create_dataset('/dimless/dims/x', data=x.T, chunks=None)
680
+ # dset = self.create_dataset('/dimless/dims/y', data=y.T, chunks=None)
681
+
682
+ ## dimensionalize & write
683
+ x *= lchar
684
+ y *= lchar
685
+ dz *= lchar
686
+ dt *= tchar
687
+
688
+ self.create_dataset('dims/x', data=x.T, chunks=None, dtype=np.float64)
689
+ self.create_dataset('dims/y', data=y.T, chunks=None, dtype=np.float64)
690
+ self.attrs['dz'] = dz
691
+ self.attrs['dt'] = dt
692
+
693
+ self.attrs['nx'] = nx
694
+ self.attrs['ny'] = ny
695
+ #self.attrs['nz'] = 1 ## NO
696
+ if verbose:
697
+ even_print('nx' , '%i'%nx )
698
+ even_print('ny' , '%i'%ny )
699
+
700
+ else:
701
+ raise ValueError('case x.ndim=%i , y.ndim=%i not yet accounted for'%(f1.x.ndim,f1.y.ndim))
702
+
703
+ # === redimensionalize quantities (by sim characteristic quantities)
704
+
705
+ u = np.copy( data_mean['u'] ) * U_inf
706
+ v = np.copy( data_mean['v'] ) * U_inf
707
+ w = np.copy( data_mean['w'] ) * U_inf
708
+ rho = np.copy( data_mean['rho'] ) * rho_inf
709
+ p = np.copy( data_mean['p'] ) * (rho_inf * U_inf**2)
710
+ T = np.copy( data_mean['T'] ) * T_inf
711
+ mu = np.copy( data_mean['mu'] ) * mu_inf
712
+
713
+ # === check μ
714
+ #
715
+ # the O(1%) discrepancies are due to [z,t] averaging
716
+ # μ=f(T) BUT this no longer holds exactly once averaged
717
+ # the user should decide if <μ> should be re-calculated from <T>
718
+
719
+ T_NS3D = np.copy( data_mean['T'] ) ## dimless
720
+
721
+ ## Non-dimensional Suth Temp 'Ts'
722
+ ## 'equations.F90', subroutines calc_viscosity() & initialize_viscosity()
723
+ Ts = S_Suth/T_inf
724
+ mu_NS3D = np.copy( T_NS3D**1.5 * ( 1 + Ts ) / ( T_NS3D + Ts ) ) ## dimless
725
+ np.testing.assert_allclose(mu/mu_inf, mu_NS3D, rtol=0.003)
726
+
727
+ mu_A = np.copy( mu_Suth_ref*(T/T_Suth_ref)**(3/2) * ((T_Suth_ref+S_Suth)/(T+S_Suth)) )
728
+ mu_B = np.copy( C_Suth * T**(3/2) / (T + S_Suth) )
729
+ np.testing.assert_allclose(mu_A, mu_B, rtol=1e-6) ## single precision
730
+
731
+ np.testing.assert_allclose(mu, mu_A, rtol=0.003)
732
+ np.testing.assert_allclose(mu, mu_B, rtol=0.003)
733
+
734
+ ## !!! replace μ !!!
735
+ if recalc_mu:
736
+ mu = np.copy( C_Suth * T**(3/2) / (T + S_Suth) )
737
+
738
+ ## clear structured array from memory
739
+ data_mean = None; del data_mean
740
+
741
+ ## derived values from base scalars
742
+ a = np.copy( np.sqrt( kappa * R * T ) )
743
+ nu = np.copy( mu / rho )
744
+ umag = np.copy( np.sqrt( u**2 + v**2 + w**2 ) )
745
+ M = np.copy( umag / np.sqrt(kappa * R * T) )
746
+
747
+ T_tot = np.copy( T * (1 + (kappa-1)/2 * M**2) )
748
+ p_tot = np.copy( p * (1 + (kappa-1)/2 * M**2)**(kappa/(kappa-1)) )
749
+ rho_tot = np.copy( rho * (1 + (kappa-1)/2 * M**2)**(1/(kappa-1)) )
750
+
751
+ ## base scalars [u,v,w,ρ,p,T]
752
+ self.create_dataset('data/u' , data=u.T , chunks=None)
753
+ self.create_dataset('data/v' , data=v.T , chunks=None)
754
+ self.create_dataset('data/w' , data=w.T , chunks=None)
755
+ self.create_dataset('data/rho' , data=rho.T , chunks=None)
756
+ self.create_dataset('data/p' , data=p.T , chunks=None)
757
+ self.create_dataset('data/T' , data=T.T , chunks=None)
758
+
759
+ ## derived fields
760
+ self.create_dataset('data/a' , data=a.T , chunks=None) #; dset.attrs['dimensional'] = True ; dset.attrs['unit'] = '[m/s]'
761
+ self.create_dataset('data/mu' , data=mu.T , chunks=None) #; dset.attrs['dimensional'] = True ; dset.attrs['unit'] = '[kg/(m·s)]'
762
+ self.create_dataset('data/nu' , data=nu.T , chunks=None) #; dset.attrs['dimensional'] = True ; dset.attrs['unit'] = '[m²/s]'
763
+ self.create_dataset('data/umag' , data=umag.T , chunks=None) #; dset.attrs['dimensional'] = True ; dset.attrs['unit'] = '[m/s]'
764
+ self.create_dataset('data/M' , data=M.T , chunks=None) #; dset.attrs['dimensional'] = False
765
+ self.create_dataset('data/T_tot' , data=T_tot.T , chunks=None)
766
+ self.create_dataset('data/p_tot' , data=p_tot.T , chunks=None)
767
+ self.create_dataset('data/rho_tot' , data=rho_tot.T , chunks=None)
768
+
769
+ if fn_Re_fluct.exists():
770
+ even_print('eas4 Re fluct',str(fn_Re_fluct.relative_to(Path())))
771
+ with eas4(str(fn_Re_fluct),'r',verbose=False) as f1:
772
+
773
+ data_mean = f1.get_mean()
774
+
775
+ ## assert mean data shape
776
+ for i, key in enumerate(data_mean.dtype.names):
777
+ if (data_mean[key].shape[0]!=f1.nx):
778
+ raise AssertionError('mean data dim1 shape != nx')
779
+ if (data_mean[key].shape[1]!=f1.ny):
780
+ raise AssertionError('mean data dim2 shape != ny')
781
+ if (data_mean[key].ndim!=2):
782
+ raise AssertionError('mean data ndim != 2')
783
+
784
+ Re_fluct_total_avg_time = f1.total_avg_time
785
+ Re_fluct_total_avg_iter_count = f1.total_avg_iter_count
786
+ Re_fluct_dt = Re_fluct_total_avg_time/Re_fluct_total_avg_iter_count
787
+
788
+ self.attrs['Re_fluct_total_avg_time'] = Re_fluct_total_avg_time
789
+ self.attrs['Re_fluct_total_avg_iter_count'] = Re_fluct_total_avg_iter_count
790
+ self.attrs['Re_fluct_dt'] = Re_fluct_dt
791
+
792
+ uI_uI = data_mean["u'u'"] * U_inf**2
793
+ vI_vI = data_mean["v'v'"] * U_inf**2
794
+ wI_wI = data_mean["w'w'"] * U_inf**2
795
+ uI_vI = data_mean["u'v'"] * U_inf**2
796
+ uI_wI = data_mean["u'w'"] * U_inf**2
797
+ vI_wI = data_mean["v'w'"] * U_inf**2
798
+
799
+ self.create_dataset('data/uI_uI', data=uI_uI.T, chunks=None)
800
+ self.create_dataset('data/vI_vI', data=vI_vI.T, chunks=None)
801
+ self.create_dataset('data/wI_wI', data=wI_wI.T, chunks=None)
802
+ self.create_dataset('data/uI_vI', data=uI_vI.T, chunks=None)
803
+ self.create_dataset('data/uI_wI', data=uI_wI.T, chunks=None)
804
+ self.create_dataset('data/vI_wI', data=vI_wI.T, chunks=None)
805
+
806
+ uI_TI = data_mean["u'T'"] * (U_inf*T_inf)
807
+ vI_TI = data_mean["v'T'"] * (U_inf*T_inf)
808
+ wI_TI = data_mean["w'T'"] * (U_inf*T_inf)
809
+
810
+ self.create_dataset('data/uI_TI', data=uI_TI.T, chunks=None)
811
+ self.create_dataset('data/vI_TI', data=vI_TI.T, chunks=None)
812
+ self.create_dataset('data/wI_TI', data=wI_TI.T, chunks=None)
813
+
814
+ TI_TI = data_mean["T'T'"] * T_inf**2
815
+ pI_pI = data_mean["p'p'"] * (rho_inf * U_inf**2)**2
816
+ rI_rI = data_mean["r'r'"] * rho_inf**2
817
+ muI_muI = data_mean["mu'mu'"] * mu_inf**2
818
+
819
+ self.create_dataset('data/TI_TI', data=TI_TI.T, chunks=None)
820
+ self.create_dataset('data/pI_pI', data=pI_pI.T, chunks=None)
821
+ self.create_dataset('data/rI_rI', data=rI_rI.T, chunks=None)
822
+ self.create_dataset('data/muI_muI', data=muI_muI.T, chunks=None)
823
+
824
+ tauI_xx = data_mean["tau'_xx"] * mu_inf * U_inf / lchar
825
+ tauI_yy = data_mean["tau'_yy"] * mu_inf * U_inf / lchar
826
+ tauI_zz = data_mean["tau'_zz"] * mu_inf * U_inf / lchar
827
+ tauI_xy = data_mean["tau'_xy"] * mu_inf * U_inf / lchar
828
+ tauI_xz = data_mean["tau'_xz"] * mu_inf * U_inf / lchar
829
+ tauI_yz = data_mean["tau'_yz"] * mu_inf * U_inf / lchar
830
+
831
+ self.create_dataset('data/tauI_xx', data=tauI_xx.T, chunks=None)
832
+ self.create_dataset('data/tauI_yy', data=tauI_yy.T, chunks=None)
833
+ self.create_dataset('data/tauI_zz', data=tauI_zz.T, chunks=None)
834
+ self.create_dataset('data/tauI_xy', data=tauI_xy.T, chunks=None)
835
+ self.create_dataset('data/tauI_xz', data=tauI_xz.T, chunks=None)
836
+ self.create_dataset('data/tauI_yz', data=tauI_yz.T, chunks=None)
837
+
838
+ # === RMS values
839
+
840
+ if True: ## dimensional
841
+
842
+ uI_uI_rms = np.sqrt( data_mean["u'u'"] * U_inf**2 )
843
+ vI_vI_rms = np.sqrt( data_mean["v'v'"] * U_inf**2 )
844
+ wI_wI_rms = np.sqrt( data_mean["w'w'"] * U_inf**2 )
845
+ uI_vI_rms = np.sqrt(np.abs(data_mean["u'v'"]) * U_inf**2 ) * np.sign(data_mean["u'v'"])
846
+ uI_wI_rms = np.sqrt(np.abs(data_mean["u'w'"]) * U_inf**2 ) * np.sign(data_mean["u'w'"])
847
+ vI_wI_rms = np.sqrt(np.abs(data_mean["v'w'"]) * U_inf**2 ) * np.sign(data_mean["v'w'"])
848
+
849
+ uI_TI_rms = np.sqrt(np.abs(data_mean["u'T'"]) * U_inf*T_inf) * np.sign(data_mean["u'T'"])
850
+ vI_TI_rms = np.sqrt(np.abs(data_mean["v'T'"]) * U_inf*T_inf) * np.sign(data_mean["v'T'"])
851
+ wI_TI_rms = np.sqrt(np.abs(data_mean["w'T'"]) * U_inf*T_inf) * np.sign(data_mean["w'T'"])
852
+
853
+ rI_rI_rms = np.sqrt( data_mean["r'r'"] * rho_inf**2 )
854
+ TI_TI_rms = np.sqrt( data_mean["T'T'"] * T_inf**2 )
855
+ pI_pI_rms = np.sqrt( data_mean["p'p'"] * (rho_inf * U_inf**2)**2 )
856
+ muI_muI_rms = np.sqrt( data_mean["mu'mu'"] * mu_inf**2 )
857
+
858
+ M_rms = uI_uI_rms / np.sqrt(kappa * R * T)
859
+
860
+ # if False: ## dimless
861
+ #
862
+ # uI_uI_rms = np.sqrt( data_mean["u'u'"] )
863
+ # vI_vI_rms = np.sqrt( data_mean["v'v'"] )
864
+ # wI_wI_rms = np.sqrt( data_mean["w'w'"] )
865
+ # uI_vI_rms = np.sqrt( np.abs(data_mean["u'v'"]) ) * np.sign(data_mean["u'v'"])
866
+ # uI_wI_rms = np.sqrt( np.abs(data_mean["u'w'"]) ) * np.sign(data_mean["u'w'"])
867
+ # vI_wI_rms = np.sqrt( np.abs(data_mean["v'w'"]) ) * np.sign(data_mean["v'w'"])
868
+ #
869
+ # uI_TI_rms = np.sqrt( np.abs(data_mean["u'T'"]) ) * np.sign(data_mean["u'T'"])
870
+ # vI_TI_rms = np.sqrt( np.abs(data_mean["v'T'"]) ) * np.sign(data_mean["v'T'"])
871
+ # wI_TI_rms = np.sqrt( np.abs(data_mean["w'T'"]) ) * np.sign(data_mean["w'T'"])
872
+ #
873
+ # rI_rI_rms = np.sqrt( data_mean["r'r'"] )
874
+ # TI_TI_rms = np.sqrt( data_mean["T'T'"] )
875
+ # pI_pI_rms = np.sqrt( data_mean["p'p'"] )
876
+ # muI_muI_rms = np.sqrt( data_mean["mu'mu'"] )
877
+ #
878
+ # # ...
879
+ # M_rms = np.sqrt( data_mean["u'u'"] * U_inf**2 ) / np.sqrt(kappa * R * (T*T_inf) )
880
+
881
+ # ===
882
+
883
+ self.create_dataset( 'data/uI_uI_rms' , data=uI_uI_rms.T , chunks=None )
884
+ self.create_dataset( 'data/vI_vI_rms' , data=vI_vI_rms.T , chunks=None )
885
+ self.create_dataset( 'data/wI_wI_rms' , data=wI_wI_rms.T , chunks=None )
886
+ self.create_dataset( 'data/uI_vI_rms' , data=uI_vI_rms.T , chunks=None )
887
+ self.create_dataset( 'data/uI_wI_rms' , data=uI_wI_rms.T , chunks=None )
888
+ self.create_dataset( 'data/vI_wI_rms' , data=vI_wI_rms.T , chunks=None )
889
+ ##
890
+ self.create_dataset( 'data/uI_TI_rms' , data=uI_TI_rms.T , chunks=None )
891
+ self.create_dataset( 'data/vI_TI_rms' , data=vI_TI_rms.T , chunks=None )
892
+ self.create_dataset( 'data/wI_TI_rms' , data=wI_TI_rms.T , chunks=None )
893
+ ##
894
+ self.create_dataset( 'data/rI_rI_rms' , data=rI_rI_rms.T , chunks=None )
895
+ self.create_dataset( 'data/TI_TI_rms' , data=TI_TI_rms.T , chunks=None )
896
+ self.create_dataset( 'data/pI_pI_rms' , data=pI_pI_rms.T , chunks=None )
897
+ self.create_dataset( 'data/muI_muI_rms' , data=muI_muI_rms.T , chunks=None )
898
+ ##
899
+ self.create_dataset( 'data/M_rms' , data=M_rms.T , chunks=None )
900
+
901
+ if fn_Fv_mean.exists():
902
+ #print('--r-> %s'%fn_Fv_mean.relative_to(Path()) )
903
+ even_print('eas4 Fv mean',str(fn_Fv_mean.relative_to(Path())))
904
+ with eas4(str(fn_Fv_mean),'r',verbose=False) as f1:
905
+
906
+ ## the EAS4 data is still organized by rank in [z], so perform average across ranks
907
+ data_mean = f1.get_mean()
908
+
909
+ ## assert mean data shape
910
+ for i, key in enumerate(data_mean.dtype.names):
911
+ if (data_mean[key].shape[0]!=f1.nx):
912
+ raise AssertionError('mean data dim1 shape != nx')
913
+ if (data_mean[key].shape[1]!=f1.ny):
914
+ raise AssertionError('mean data dim2 shape != ny')
915
+ if (data_mean[key].ndim!=2):
916
+ raise AssertionError('mean data ndim != 2')
917
+
918
+ ## duration over which avg was performed, iteration count and the sampling period of the avg
919
+ Fv_mean_total_avg_time = f1.total_avg_time * tchar
920
+ Fv_mean_total_avg_iter_count = f1.total_avg_iter_count
921
+ Fv_mean_dt = Fv_mean_total_avg_time / Fv_mean_total_avg_iter_count
922
+
923
+ self.attrs['Fv_mean_total_avg_time'] = Fv_mean_total_avg_time
924
+ self.attrs['Fv_mean_total_avg_iter_count'] = Fv_mean_total_avg_iter_count
925
+ self.attrs['Fv_mean_dt'] = Fv_mean_dt
926
+
927
+ u_Fv = np.copy( data_mean['u'] ) * U_inf
928
+ v_Fv = np.copy( data_mean['v'] ) * U_inf
929
+ w_Fv = np.copy( data_mean['w'] ) * U_inf
930
+ rho_Fv = np.copy( data_mean['rho'] ) * rho_inf
931
+ p_Fv = np.copy( data_mean['p'] ) * (rho_inf * U_inf**2)
932
+ T_Fv = np.copy( data_mean['T'] ) * T_inf
933
+ mu_Fv = np.copy( data_mean['mu'] ) * mu_inf
934
+
935
+ uu_Fv = np.copy( data_mean['uu'] ) * U_inf**2
936
+ uv_Fv = np.copy( data_mean['uv'] ) * U_inf**2
937
+
938
+ data_mean = None; del data_mean
939
+
940
+ self.create_dataset('data/u_Fv' , data=u_Fv.T , chunks=None)
941
+ self.create_dataset('data/v_Fv' , data=v_Fv.T , chunks=None)
942
+ self.create_dataset('data/w_Fv' , data=w_Fv.T , chunks=None)
943
+ self.create_dataset('data/rho_Fv' , data=rho_Fv.T , chunks=None)
944
+ self.create_dataset('data/p_Fv' , data=p_Fv.T , chunks=None)
945
+ self.create_dataset('data/T_Fv' , data=T_Fv.T , chunks=None)
946
+ self.create_dataset('data/mu_Fv' , data=mu_Fv.T , chunks=None)
947
+
948
+ self.create_dataset('data/uu_Fv' , data=uu_Fv.T , chunks=None)
949
+ self.create_dataset('data/uv_Fv' , data=uv_Fv.T , chunks=None)
950
+
951
+ if fn_Fv_fluct.exists():
952
+ #print('--r-> %s'%fn_Fv_fluct.relative_to(Path()) )
953
+ even_print('eas4 Fv fluct',str(fn_Fv_fluct.relative_to(Path())))
954
+ with eas4(str(fn_Fv_fluct),'r',verbose=False) as f1:
955
+
956
+ data_mean = f1.get_mean()
957
+
958
+ ## assert mean data shape
959
+ for i, key in enumerate(data_mean.dtype.names):
960
+ if (data_mean[key].shape[0]!=f1.nx):
961
+ raise AssertionError('mean data dim1 shape != nx')
962
+ if (data_mean[key].shape[1]!=f1.ny):
963
+ raise AssertionError('mean data dim2 shape != ny')
964
+ if (data_mean[key].ndim!=2):
965
+ raise AssertionError('mean data ndim != 2')
966
+
967
+ Fv_fluct_total_avg_time = f1.total_avg_time
968
+ Fv_fluct_total_avg_iter_count = f1.total_avg_iter_count
969
+ Fv_fluct_dt = Fv_fluct_total_avg_time/Fv_fluct_total_avg_iter_count
970
+
971
+ self.attrs['Fv_fluct_total_avg_time'] = Fv_fluct_total_avg_time
972
+ self.attrs['Fv_fluct_total_avg_iter_count'] = Fv_fluct_total_avg_iter_count
973
+ self.attrs['Fv_fluct_dt'] = Fv_fluct_dt
974
+
975
+ r_uII_uII = data_mean["r u''u''"] * rho_inf * U_inf**2
976
+ r_vII_vII = data_mean["r v''v''"] * rho_inf * U_inf**2
977
+ r_wII_wII = data_mean["r w''_w''"] * rho_inf * U_inf**2
978
+ r_uII_vII = data_mean["r u''v''"] * rho_inf * U_inf**2
979
+ r_uII_wII = data_mean["r u''w''"] * rho_inf * U_inf**2
980
+ r_vII_wII = data_mean["r w''v''"] * rho_inf * U_inf**2
981
+
982
+ self.create_dataset('data/r_uII_uII', data=r_uII_uII.T, chunks=None)
983
+ self.create_dataset('data/r_vII_vII', data=r_vII_vII.T, chunks=None)
984
+ self.create_dataset('data/r_wII_wII', data=r_wII_wII.T, chunks=None)
985
+ self.create_dataset('data/r_uII_vII', data=r_uII_vII.T, chunks=None)
986
+ self.create_dataset('data/r_uII_wII', data=r_uII_wII.T, chunks=None)
987
+ self.create_dataset('data/r_vII_wII', data=r_vII_wII.T, chunks=None)
988
+
989
+ r_uII_TII = data_mean["r u''T''"] * rho_inf * U_inf * T_inf
990
+ r_vII_TII = data_mean["r v''T''"] * rho_inf * U_inf * T_inf
991
+ r_wII_TII = data_mean["r w''T''"] * rho_inf * U_inf * T_inf
992
+
993
+ self.create_dataset('data/r_uII_TII', data=r_uII_TII.T, chunks=None)
994
+ self.create_dataset('data/r_vII_TII', data=r_vII_TII.T, chunks=None)
995
+ self.create_dataset('data/r_wII_TII', data=r_wII_TII.T, chunks=None)
996
+
997
+ r_TII_TII = data_mean["r T''T''"] * rho_inf * T_inf**2
998
+ r_pII_pII = data_mean["r p''p''"] * rho_inf * (rho_inf * U_inf**2)**2
999
+ r_rII_rII = data_mean["r r''r''"] * rho_inf * rho_inf**2
1000
+ r_muII_muII = data_mean["r mu''mu''"] * rho_inf * mu_inf**2
1001
+
1002
+ self.create_dataset('data/r_TII_TII', data=r_TII_TII.T, chunks=None)
1003
+ self.create_dataset('data/r_pII_pII', data=r_pII_pII.T, chunks=None)
1004
+ self.create_dataset('data/r_rII_rII', data=r_rII_rII.T, chunks=None)
1005
+ self.create_dataset('data/r_muII_muII', data=r_muII_muII.T, chunks=None)
1006
+
1007
+ # === RMS
1008
+
1009
+ if True:
1010
+
1011
+ r_uII_uII_rms = np.sqrt( data_mean["r u''u''"] * rho_inf * U_inf**2 )
1012
+ r_vII_vII_rms = np.sqrt( data_mean["r v''v''"] * rho_inf * U_inf**2 )
1013
+ r_wII_wII_rms = np.sqrt( data_mean["r w''_w''"] * rho_inf * U_inf**2 )
1014
+ r_uII_vII_rms = np.sqrt(np.abs(data_mean["r u''v''"]) * rho_inf * U_inf**2 ) * np.sign(data_mean["r u''v''"])
1015
+ r_uII_wII_rms = np.sqrt(np.abs(data_mean["r u''w''"]) * rho_inf * U_inf**2 ) * np.sign(data_mean["r u''w''"])
1016
+ r_vII_wII_rms = np.sqrt(np.abs(data_mean["r w''v''"]) * rho_inf * U_inf**2 ) * np.sign(data_mean["r w''v''"])
1017
+ ## ... ρ·u″T″
1018
+
1019
+ self.create_dataset( 'data/r_uII_uII_rms' , data=r_uII_uII_rms.T , chunks=None )
1020
+ self.create_dataset( 'data/r_vII_vII_rms' , data=r_vII_vII_rms.T , chunks=None )
1021
+ self.create_dataset( 'data/r_wII_wII_rms' , data=r_wII_wII_rms.T , chunks=None )
1022
+ self.create_dataset( 'data/r_uII_vII_rms' , data=r_uII_vII_rms.T , chunks=None )
1023
+ self.create_dataset( 'data/r_uII_wII_rms' , data=r_uII_wII_rms.T , chunks=None )
1024
+ self.create_dataset( 'data/r_vII_wII_rms' , data=r_vII_wII_rms.T , chunks=None )
1025
+ ## ... ρ·u″T″
1026
+
1027
+ if fn_turb_budget.exists():
1028
+ #print('--r-> %s'%fn_turb_budget.relative_to(Path()) )
1029
+ even_print('eas4 turb budget',str(fn_turb_budget.relative_to(Path())))
1030
+ with eas4(str(fn_turb_budget),'r',verbose=False) as f1:
1031
+
1032
+ data_mean = f1.get_mean() ## numpy structured array
1033
+
1034
+ ## assert mean data shape
1035
+ for i, key in enumerate(data_mean.dtype.names):
1036
+ if (data_mean[key].shape[0]!=f1.nx):
1037
+ raise AssertionError('mean data dim1 shape != nx')
1038
+ if (data_mean[key].shape[1]!=f1.ny):
1039
+ raise AssertionError('mean data dim2 shape != ny')
1040
+ if (data_mean[key].ndim!=2):
1041
+ raise AssertionError('mean data ndim != 2')
1042
+
1043
+ turb_budget_total_avg_time = f1.total_avg_time
1044
+ turb_budget_total_avg_iter_count = f1.total_avg_iter_count
1045
+ turb_budget_dt = turb_budget_total_avg_time/turb_budget_total_avg_iter_count
1046
+
1047
+ self.attrs['turb_budget_total_avg_time'] = turb_budget_total_avg_time
1048
+ self.attrs['turb_budget_total_avg_iter_count'] = turb_budget_total_avg_iter_count
1049
+ self.attrs['turb_budget_dt'] = turb_budget_dt
1050
+
1051
+ production = data_mean['prod.'] * U_inf**3 * rho_inf / lchar
1052
+ dissipation = data_mean['dis.'] * U_inf**2 * mu_inf / lchar**2
1053
+ turb_transport = data_mean['t-transp.'] * U_inf**3 * rho_inf / lchar
1054
+ visc_diffusion = data_mean['v-diff.'] * U_inf**2 * mu_inf / lchar**2
1055
+ p_diffusion = data_mean['p-diff.'] * U_inf**3 * rho_inf / lchar
1056
+ p_dilatation = data_mean['p-dilat.'] * U_inf**3 * rho_inf / lchar
1057
+ rho_terms = data_mean['rho-terms'] * U_inf**3 * rho_inf / lchar
1058
+ ##
1059
+ dset = self.create_dataset('data/production' , data=production.T , chunks=None) #; dset.attrs['dimensional'] = True
1060
+ dset = self.create_dataset('data/dissipation' , data=dissipation.T , chunks=None) #; dset.attrs['dimensional'] = True
1061
+ dset = self.create_dataset('data/turb_transport' , data=turb_transport.T , chunks=None) #; dset.attrs['dimensional'] = True
1062
+ dset = self.create_dataset('data/visc_diffusion' , data=visc_diffusion.T , chunks=None) #; dset.attrs['dimensional'] = True
1063
+ dset = self.create_dataset('data/p_diffusion' , data=p_diffusion.T , chunks=None) #; dset.attrs['dimensional'] = True
1064
+ dset = self.create_dataset('data/p_dilatation' , data=p_dilatation.T , chunks=None) #; dset.attrs['dimensional'] = True
1065
+ dset = self.create_dataset('data/rho_terms' , data=rho_terms.T , chunks=None) #; dset.attrs['dimensional'] = True
1066
+
1067
+ if 'dissipation' in locals():
1068
+
1069
+ #if not self.get('data/nu').attrs['dimensional']:
1070
+ # raise ValueError('nu is not dimensional')
1071
+
1072
+ Kolm_len = (nu**3 / np.abs(dissipation))**(1/4)
1073
+ self.create_dataset('data/Kolm_len', data=Kolm_len.T, chunks=None) # ; dset.attrs['dimensional'] = True
1074
+
1075
+ # ===
1076
+
1077
+ self.get_header(verbose=True)
1078
+ if verbose: print(72*'-')
1079
+ if verbose: print('total time : turbx.ztmd.import_data_eas4() : %s'%format_time_string((timeit.default_timer() - t_start_func)))
1080
+ if verbose: print(72*'-')
1081
+
1082
+ return
1083
+
1084
+ def import_data_eas3(self, path, **kwargs):
1085
+ '''
1086
+ Copy data from legacy EAS3 containers (output from NS3D) to a ZTMD container
1087
+ -----
1088
+
1089
+ The 'path' directory should contain one or more of the following files:
1090
+
1091
+ --> mean_flow_all_mittel-z.eas
1092
+ --> favre_mean_flow_all_mittel-z.eas
1093
+ --> ext_rms_fluctuation_all_mittel-z.eas
1094
+ --> ext_favre_fluctuation_all_mittel-z.eas
1095
+ --> turbulent_budget_all_mittel-z.eas
1096
+
1097
+ /dims : 2D dimension datasets (x,y,..)
1098
+ /data : 2D datasets (u,uIuI,..)
1099
+
1100
+ Datasets are dimensionalized to SI units upon import!
1101
+ '''
1102
+
1103
+ verbose = kwargs.get('verbose',True)
1104
+ recalc_mu = kwargs.get( 'recalc_mu', False)
1105
+
1106
+ if verbose: print('\n'+'turbx.ztmd.import_data_eas3()'+'\n'+72*'-')
1107
+ t_start_func = timeit.default_timer()
1108
+
1109
+ even_print('ztmd',str(self.fname))
1110
+
1111
+ ## dz,dt should be input as dimless (characteristic/inlet) (output from tgg)
1112
+ ## --> dz & dt get re-dimensionalized during this func!
1113
+ dz = kwargs.get('dz',None)
1114
+ nz = kwargs.get('nz',None)
1115
+ dt = kwargs.get('dt',None)
1116
+
1117
+ path_ztmean = Path(path)
1118
+ if not path_ztmean.is_dir():
1119
+ raise FileNotFoundError('%s does not exist.'%str(path_ztmean))
1120
+ fn_Re_mean = Path(path_ztmean, 'mean_flow_all_mittel-z.eas')
1121
+ fn_Fv_mean = Path(path_ztmean, 'favre_mean_flow_all_mittel-z.eas')
1122
+ fn_Re_fluct = Path(path_ztmean, 'ext_rms_fluctuation_all_mittel-z.eas')
1123
+ fn_Fv_fluct = Path(path_ztmean, 'ext_favre_fluctuation_all_mittel-z.eas')
1124
+ fn_turb_budget = Path(path_ztmean, 'turbulent_budget_all_mittel-z.eas')
1125
+
1126
+ self.attrs['fn_Re_mean'] = str( fn_Re_mean.relative_to(Path()) )
1127
+ self.attrs['fn_Fv_mean'] = str( fn_Fv_mean.relative_to(Path()) )
1128
+ self.attrs['fn_Re_fluct'] = str( fn_Re_fluct.relative_to(Path()) )
1129
+ self.attrs['fn_Fv_fluct'] = str( fn_Fv_fluct.relative_to(Path()) )
1130
+ self.attrs['fn_turb_budget'] = str( fn_turb_budget.relative_to(Path()) )
1131
+
1132
+ ## the simulation timestep dt is not known from the averaged files
1133
+ if (dt is not None):
1134
+ self.attrs['dt'] = dt
1135
+ setattr(self,'dt',dt)
1136
+ if (nz is not None):
1137
+ self.attrs['nz'] = nz
1138
+ setattr(self,'nz',nz)
1139
+ if (dz is not None):
1140
+ self.attrs['dz'] = dz
1141
+ setattr(self,'dz',dz)
1142
+
1143
+ if verbose:
1144
+ if (nz is not None):
1145
+ even_print('nz' , '%i'%nz )
1146
+ if (dz is not None):
1147
+ even_print('dz' , '%0.6e'%dz )
1148
+ if (dt is not None):
1149
+ even_print('dt' , '%0.6e'%dt )
1150
+ print(72*'-')
1151
+
1152
+ # ===
1153
+
1154
+ ## this function currently requires that the Reynolds mean file exists
1155
+ if not fn_Re_mean.exists():
1156
+ raise FileNotFoundError(str(fn_Re_mean))
1157
+
1158
+ if fn_Re_mean.exists(): ## Reynolds mean (must exist!)
1159
+ even_print('eas3 Re mean',str(fn_Re_mean.relative_to(Path())))
1160
+ with eas3(fname=str(fn_Re_mean),verbose=False) as f1:
1161
+
1162
+ nx = f1.nx ; self.attrs['nx'] = nx
1163
+ ny = f1.ny ; self.attrs['ny'] = ny
1164
+
1165
+ Ma = f1.Ma ; self.attrs['Ma'] = Ma
1166
+ Re = f1.Re ; self.attrs['Re'] = Re
1167
+ Pr = f1.Pr ; self.attrs['Pr'] = Pr
1168
+ T_inf = f1.T_inf ; self.attrs['T_inf'] = T_inf
1169
+ p_inf = f1.p_inf ; self.attrs['p_inf'] = p_inf
1170
+ kappa = f1.kappa ; self.attrs['kappa'] = kappa
1171
+ R = f1.R ; self.attrs['R'] = R
1172
+ mu_Suth_ref = f1.mu_Suth_ref ; self.attrs['mu_Suth_ref'] = mu_Suth_ref
1173
+ T_Suth_ref = f1.T_Suth_ref ; self.attrs['T_Suth_ref'] = T_Suth_ref
1174
+ C_Suth = f1.C_Suth ; self.attrs['C_Suth'] = C_Suth
1175
+ S_Suth = f1.S_Suth ; self.attrs['S_Suth'] = S_Suth
1176
+
1177
+ rho_inf = f1.rho_inf # ; self.attrs['rho_inf'] = rho_inf
1178
+ mu_inf = f1.mu_inf # ; self.attrs['mu_inf'] = mu_inf
1179
+ #nu_inf = f1.nu_inf # ; self.attrs['nu_inf'] = nu_inf
1180
+ #a_inf = f1.a_inf # ; self.attrs['a_inf'] = a_inf
1181
+ U_inf = f1.U_inf # ; self.attrs['U_inf'] = U_inf
1182
+ #cp = f1.cp # ; self.attrs['cp'] = cp
1183
+ #cv = f1.cv # ; self.attrs['cv'] = cv
1184
+ #recov_fac = f1.recov_fac # ; self.attrs['recov_fac'] = recov_fac
1185
+ #Taw = f1.Taw # ; self.attrs['Taw'] = Taw
1186
+ lchar = f1.lchar # ; self.attrs['lchar'] = lchar
1187
+
1188
+ tchar = f1.lchar/f1.U_inf # ; self.attrs['tchar'] = tchar
1189
+
1190
+ setattr(self,'lchar',lchar)
1191
+ setattr(self,'tchar',tchar)
1192
+ setattr(self,'U_inf',U_inf)
1193
+
1194
+ # ===
1195
+
1196
+ if (f1.t.shape[0]==1):
1197
+ f1.total_avg_time = float(f1.t[0])
1198
+ else:
1199
+ raise NotImplementedError
1200
+
1201
+ ## duration over which avg was performed, iteration count and the sampling period of the avg
1202
+ Re_mean_total_avg_time = f1.total_avg_time * tchar
1203
+ Re_mean_total_avg_iter_count = f1.total_avg_iter_count
1204
+ Re_mean_dt = Re_mean_total_avg_time / Re_mean_total_avg_iter_count
1205
+
1206
+ self.attrs['Re_mean_total_avg_time'] = Re_mean_total_avg_time
1207
+ self.attrs['Re_mean_total_avg_iter_count'] = Re_mean_total_avg_iter_count
1208
+ self.attrs['Re_mean_dt'] = Re_mean_dt
1209
+
1210
+ t_meas = f1.total_avg_time * (f1.lchar/f1.U_inf) ## dimensional [s]
1211
+ #t_meas = f1.total_avg_time ## dimless (char)
1212
+ self.attrs['t_meas'] = t_meas
1213
+ self.create_dataset('dims/t', data=np.array([t_meas,],dtype=np.float64), chunks=None)
1214
+
1215
+ # ===
1216
+
1217
+ ## from EAS3, dimless (char)
1218
+ x = np.copy(f1.x)
1219
+ y = np.copy(f1.y)
1220
+
1221
+ if (f1.x.ndim==1) and (f1.y.ndim==1): ## rectilinear in [x,y]
1222
+
1223
+ self.attrs['rectilinear'] = True
1224
+ self.attrs['curvilinear'] = False
1225
+
1226
+ ## dimensionalize
1227
+ x *= lchar
1228
+ y *= lchar
1229
+ dz *= lchar
1230
+ dt *= tchar
1231
+
1232
+ ## write
1233
+ self.create_dataset('dims/x', data=x, chunks=None, dtype=np.float64)
1234
+ self.create_dataset('dims/y', data=y, chunks=None, dtype=np.float64)
1235
+ self.attrs['dz'] = dz
1236
+ self.attrs['dt'] = dt
1237
+ self.attrs['nx'] = nx
1238
+ self.attrs['ny'] = ny
1239
+ #self.attrs['nz'] = 1 ## NO
1240
+
1241
+ if verbose:
1242
+ even_print('nx' , '%i'%nx )
1243
+ even_print('ny' , '%i'%ny )
1244
+
1245
+ setattr(self,'x',x)
1246
+ setattr(self,'y',y)
1247
+
1248
+ setattr(self,'dz',dz)
1249
+ setattr(self,'dt',dt)
1250
+ setattr(self,'nx',nx)
1251
+ setattr(self,'ny',ny)
1252
+
1253
+ else:
1254
+ raise NotImplementedError
1255
+
1256
+ # ===
1257
+
1258
+ if (f1.scalars != f1.attr_param):
1259
+ raise AssertionError
1260
+ if (f1.ndim3!=1):
1261
+ raise AssertionError
1262
+
1263
+ if (f1.accuracy == f1.IEEES):
1264
+ dtypes = [ np.float32 for _ in f1.scalars ]
1265
+ if (f1.accuracy == f1.IEEED):
1266
+ dtypes = [ np.float64 for _ in f1.scalars ]
1267
+ else:
1268
+ raise ValueError
1269
+
1270
+ ## numpy structured array
1271
+ data = np.zeros( shape=(nx,ny), dtype={'names':f1.scalars,'formats':dtypes} )
1272
+
1273
+ ## populate structured array from EAS3 binary data file
1274
+ progress_bar = tqdm(total=f1.nt*f1.npar, ncols=100, desc='import eas3 Re mean', leave=False)
1275
+ for scalar in f1.attr_param:
1276
+ tqdm.write(even_print(scalar,f'({nx},{ny})',s=True))
1277
+ for jj in range(f1.ndim2):
1278
+ if f1.accuracy == f1.IEEES:
1279
+ packet = struct.unpack('!'+str(f1.ndim1)+'f',f1.f.read(4*f1.ndim1))[:]
1280
+ else:
1281
+ packet = struct.unpack('!'+str(f1.ndim1)+'d',f1.f.read(8*f1.ndim1))[:]
1282
+ data[scalar][:,jj] = packet
1283
+ progress_bar.update()
1284
+ progress_bar.close()
1285
+
1286
+ ## re-dimensionalize by characteristic freestream quantities
1287
+ for scalar in data.dtype.names:
1288
+ if scalar in ['u','v','w', 'uI','vI','wI', 'uII','vII','wII',]:
1289
+ data[scalar] *= U_inf
1290
+ elif scalar in ['r_uII','r_vII','r_wII']:
1291
+ data[scalar] *= (U_inf*rho_inf)
1292
+ elif scalar in ['T','TI','TII']:
1293
+ data[scalar] *= T_inf
1294
+ elif scalar in ['r_TII']:
1295
+ data[scalar] *= (T_inf*rho_inf)
1296
+ elif scalar in ['rho','rhoI']:
1297
+ data[scalar] *= rho_inf
1298
+ elif scalar in ['p','pI','pII']:
1299
+ data[scalar] *= (rho_inf * U_inf**2)
1300
+ elif scalar in ['mu']:
1301
+ data[scalar] *= mu_inf
1302
+ else:
1303
+ raise ValueError(f"condition needed for redimensionalizing '{scalar}'")
1304
+
1305
+ ## write structured array to ZTMD
1306
+ for scalar in data.dtype.names:
1307
+ self.create_dataset(f'data/{scalar}', data=data[scalar].T, chunks=None)
1308
+
1309
+ ## already made dimensional
1310
+ u = np.copy( data['u'] )
1311
+ v = np.copy( data['v'] )
1312
+ w = np.copy( data['w'] )
1313
+ rho = np.copy( data['rho'] )
1314
+ p = np.copy( data['p'] )
1315
+ T = np.copy( data['T'] )
1316
+ mu = np.copy( data['mu'] )
1317
+
1318
+ # === check μ
1319
+ #
1320
+ # the O(1%) discrepancies are due to [z,t] averaging
1321
+ # μ=f(T) BUT this no longer holds exactly once averaged
1322
+ # the user should decide if <μ> should be re-calculated from <T>
1323
+
1324
+ T_NS3D = np.copy( data['T']/T_inf ) ## dimless
1325
+
1326
+ ## Non-dimensional Suth Temp 'Ts'
1327
+ ## 'equations.F90', subroutine initialize_viscosity()
1328
+ Ts = S_Suth/T_inf
1329
+ mu_NS3D = np.copy( T_NS3D**1.5 * ( 1 + Ts ) / ( T_NS3D + Ts ) ) ## dimless
1330
+ np.testing.assert_allclose(mu/mu_inf, mu_NS3D, rtol=0.003)
1331
+
1332
+ mu_A = np.copy( mu_Suth_ref*(T/T_Suth_ref)**(3/2) * ((T_Suth_ref+S_Suth)/(T+S_Suth)) )
1333
+ mu_B = np.copy( C_Suth * T**(3/2) / (T + S_Suth) )
1334
+ np.testing.assert_allclose(mu_A, mu_B, rtol=1e-6) ## single precision
1335
+
1336
+ np.testing.assert_allclose(mu, mu_A, rtol=0.003)
1337
+ np.testing.assert_allclose(mu, mu_B, rtol=0.003)
1338
+
1339
+ ## !!! replace μ !!!
1340
+ if recalc_mu:
1341
+ mu = np.copy( C_Suth * T**(3/2) / (T + S_Suth) )
1342
+ if ('data/mu' in self):
1343
+ del self['data/mu']
1344
+ self.create_dataset('data/mu', data=mu.T, chunks=None)
1345
+
1346
+ ## clear structured array from memory
1347
+ data = None ; del data
1348
+
1349
+ ## derived values from base scalars
1350
+ a = np.copy( np.sqrt( kappa * R * T ) )
1351
+ nu = np.copy( mu / rho )
1352
+ umag = np.copy( np.sqrt( u**2 + v**2 + w**2 ) )
1353
+ M = np.copy( umag / np.sqrt(kappa * R * T) )
1354
+
1355
+ T_tot = np.copy( T * (1 + (kappa-1)/2 * M**2) )
1356
+ p_tot = np.copy( p * (1 + (kappa-1)/2 * M**2)**(kappa/(kappa-1)) )
1357
+ rho_tot = np.copy( rho * (1 + (kappa-1)/2 * M**2)**(1/(kappa-1)) )
1358
+
1359
+ ## write derived scalars
1360
+ self.create_dataset('data/a' , data=a.T , chunks=None)
1361
+ self.create_dataset('data/nu' , data=nu.T , chunks=None)
1362
+ self.create_dataset('data/umag' , data=umag.T , chunks=None)
1363
+ self.create_dataset('data/M' , data=M.T , chunks=None)
1364
+ self.create_dataset('data/T_tot' , data=T_tot.T , chunks=None)
1365
+ self.create_dataset('data/p_tot' , data=p_tot.T , chunks=None)
1366
+ self.create_dataset('data/rho_tot' , data=rho_tot.T , chunks=None)
1367
+
1368
+ if fn_Re_fluct.exists(): ## Reynolds turbulent
1369
+ even_print('eas3 Re fluct',str(fn_Re_fluct.relative_to(Path())))
1370
+ with eas3(str(fn_Re_fluct),verbose=False) as f1:
1371
+
1372
+ if (f1.t.shape[0]==1):
1373
+ f1.total_avg_time = float(f1.t[0])
1374
+ else:
1375
+ raise NotImplementedError
1376
+
1377
+ ## duration over which avg was performed, iteration count and the sampling period of the avg
1378
+ Re_fluct_total_avg_time = f1.total_avg_time
1379
+ Re_fluct_total_avg_iter_count = f1.total_avg_iter_count
1380
+ Re_fluct_dt = Re_fluct_total_avg_time/Re_fluct_total_avg_iter_count
1381
+
1382
+ self.attrs['Re_fluct_total_avg_time'] = Re_fluct_total_avg_time
1383
+ self.attrs['Re_fluct_total_avg_iter_count'] = Re_fluct_total_avg_iter_count
1384
+ self.attrs['Re_fluct_dt'] = Re_fluct_dt
1385
+
1386
+ ## assert grid,lchar are same
1387
+ if hasattr(self,'lchar') and hasattr(f1,'lchar'):
1388
+ np.testing.assert_allclose(self.lchar, f1.lchar, rtol=1e-12)
1389
+ if hasattr(self,'x') and hasattr(f1,'x'):
1390
+ np.testing.assert_allclose(self.x, f1.x*lchar, rtol=1e-12)
1391
+ if hasattr(self,'y') and hasattr(f1,'y'):
1392
+ np.testing.assert_allclose(self.y, f1.y*lchar, rtol=1e-12)
1393
+
1394
+ # ===
1395
+
1396
+ if (f1.scalars != f1.attr_param):
1397
+ raise AssertionError
1398
+ if (f1.ndim3!=1):
1399
+ raise AssertionError
1400
+
1401
+ if (f1.accuracy == f1.IEEES):
1402
+ dtypes = [ np.float32 for _ in f1.scalars ]
1403
+ if (f1.accuracy == f1.IEEED):
1404
+ dtypes = [ np.float64 for _ in f1.scalars ]
1405
+ else:
1406
+ raise ValueError
1407
+
1408
+ ## numpy structured array
1409
+ data = np.zeros( shape=(nx,ny), dtype={'names':f1.scalars,'formats':dtypes} )
1410
+
1411
+ ## dict for name change
1412
+ dnc = {
1413
+ "r'r'":'rI_rI',
1414
+ "u'u'":'uI_uI',
1415
+ "v'v'":'vI_vI',
1416
+ "w'w'":'wI_wI',
1417
+ "T'T'":'TI_TI',
1418
+ "p'p'":'pI_pI',
1419
+ "mu'mu'":'muI_muI',
1420
+ "u'v'":'uI_vI',
1421
+ "u'w'":'uI_wI',
1422
+ "v'w'":'vI_wI',
1423
+ "u'T'":'uI_TI',
1424
+ "v'T'":'vI_TI',
1425
+ "w'T'":'wI_TI',
1426
+ "tau'_xx":'tauI_xx',
1427
+ "tau'_xy":'tauI_xy',
1428
+ "tau'_xz":'tauI_xz',
1429
+ "tau'_yy":'tauI_yy',
1430
+ "tau'_yz":'tauI_yz',
1431
+ "tau'_zz":'tauI_zz',
1432
+ }
1433
+
1434
+ ## populate structured array from EAS3 binary data file
1435
+ progress_bar = tqdm(total=f1.nt*f1.npar, ncols=100, desc='import eas3 Re fluct', leave=False)
1436
+ for scalar in f1.attr_param:
1437
+ if (scalar in dnc):
1438
+ s_ = dnc[scalar]
1439
+ else:
1440
+ s_ = scalar
1441
+ tqdm.write(even_print(f"{scalar} --> {s_}",f'({nx},{ny})',s=True))
1442
+ for jj in range(f1.ndim2):
1443
+ if f1.accuracy == f1.IEEES:
1444
+ packet = struct.unpack('!'+str(f1.ndim1)+'f',f1.f.read(4*f1.ndim1))[:]
1445
+ else:
1446
+ packet = struct.unpack('!'+str(f1.ndim1)+'d',f1.f.read(8*f1.ndim1))[:]
1447
+ data[scalar][:,jj] = packet
1448
+ progress_bar.update()
1449
+ progress_bar.close()
1450
+
1451
+ ## re-dimensionalize by characteristic freestream quantities
1452
+ for scalar in data.dtype.names:
1453
+ if scalar in ["u'u'","v'v'","w'w'","u'v'","u'w'","v'w'",]:
1454
+ data[scalar] *= U_inf**2
1455
+ elif scalar in ["r'r'",]:
1456
+ data[scalar] *= rho_inf**2
1457
+ elif scalar in ["T'T'",]:
1458
+ data[scalar] *= T_inf**2
1459
+ elif scalar in ["p'p'",]:
1460
+ data[scalar] *= rho_inf**2 * U_inf**4
1461
+ elif scalar in ["mu'mu'",]:
1462
+ data[scalar] *= mu_inf**2
1463
+ elif scalar in ["u'T'","v'T'","w'T'",]:
1464
+ data[scalar] *= U_inf * T_inf
1465
+ elif scalar in ["tau'_xx","tau'_xy","tau'_xz","tau'_yy","tau'_yz","tau'_zz",]:
1466
+ data[scalar] *= mu_inf * U_inf / lchar
1467
+ else:
1468
+ raise ValueError(f"condition needed for redimensionalizing '{scalar}'")
1469
+
1470
+ ## write structured array to ZTMD
1471
+ for scalar in data.dtype.names:
1472
+ self.create_dataset(f'data/{dnc[scalar]}', data=data[scalar].T, chunks=None)
1473
+
1474
+ ## derived
1475
+ uI_uI_rms = np.copy( np.sqrt( data["u'u'"] ) )
1476
+ vI_vI_rms = np.copy( np.sqrt( data["v'v'"] ) )
1477
+ wI_wI_rms = np.copy( np.sqrt( data["w'w'"] ) )
1478
+ uI_vI_rms = np.copy( np.sqrt(np.abs(data["u'v'"]) ) * np.sign(data["u'v'"]) )
1479
+ uI_wI_rms = np.copy( np.sqrt(np.abs(data["u'w'"]) ) * np.sign(data["u'w'"]) )
1480
+ vI_wI_rms = np.copy( np.sqrt(np.abs(data["v'w'"]) ) * np.sign(data["v'w'"]) )
1481
+
1482
+ uI_TI_rms = np.copy( np.sqrt(np.abs(data["u'T'"]) ) * np.sign(data["u'T'"]) )
1483
+ vI_TI_rms = np.copy( np.sqrt(np.abs(data["v'T'"]) ) * np.sign(data["v'T'"]) )
1484
+ wI_TI_rms = np.copy( np.sqrt(np.abs(data["w'T'"]) ) * np.sign(data["w'T'"]) )
1485
+
1486
+ rI_rI_rms = np.copy( np.sqrt( data["r'r'"] ) )
1487
+ TI_TI_rms = np.copy( np.sqrt( data["T'T'"] ) )
1488
+ pI_pI_rms = np.copy( np.sqrt( data["p'p'"] ) )
1489
+ muI_muI_rms = np.copy( np.sqrt( data["mu'mu'"] ) )
1490
+
1491
+ M_rms = np.copy( uI_uI_rms / np.sqrt(kappa * R * T) )
1492
+
1493
+ ## clear structured array from memory
1494
+ data = None ; del data
1495
+
1496
+ self.create_dataset( 'data/uI_uI_rms' , data=uI_uI_rms.T , chunks=None )
1497
+ self.create_dataset( 'data/vI_vI_rms' , data=vI_vI_rms.T , chunks=None )
1498
+ self.create_dataset( 'data/wI_wI_rms' , data=wI_wI_rms.T , chunks=None )
1499
+ self.create_dataset( 'data/uI_vI_rms' , data=uI_vI_rms.T , chunks=None )
1500
+ self.create_dataset( 'data/uI_wI_rms' , data=uI_wI_rms.T , chunks=None )
1501
+ self.create_dataset( 'data/vI_wI_rms' , data=vI_wI_rms.T , chunks=None )
1502
+
1503
+ self.create_dataset( 'data/uI_TI_rms' , data=uI_TI_rms.T , chunks=None )
1504
+ self.create_dataset( 'data/vI_TI_rms' , data=vI_TI_rms.T , chunks=None )
1505
+ self.create_dataset( 'data/wI_TI_rms' , data=wI_TI_rms.T , chunks=None )
1506
+
1507
+ self.create_dataset( 'data/rI_rI_rms' , data=rI_rI_rms.T , chunks=None )
1508
+ self.create_dataset( 'data/TI_TI_rms' , data=TI_TI_rms.T , chunks=None )
1509
+ self.create_dataset( 'data/pI_pI_rms' , data=pI_pI_rms.T , chunks=None )
1510
+ self.create_dataset( 'data/muI_muI_rms' , data=muI_muI_rms.T , chunks=None )
1511
+
1512
+ self.create_dataset( 'data/M_rms' , data=M_rms.T , chunks=None )
1513
+
1514
+ if fn_Fv_mean.exists(): ## Favre mean
1515
+ even_print('eas3 Fv mean',str(fn_Fv_mean.relative_to(Path())))
1516
+ with eas3(str(fn_Fv_mean),verbose=False) as f1:
1517
+
1518
+ if (f1.t.shape[0]==1):
1519
+ f1.total_avg_time = float(f1.t[0])
1520
+ else:
1521
+ raise NotImplementedError
1522
+
1523
+ ## duration over which avg was performed, iteration count and the sampling period of the avg
1524
+ Fv_mean_total_avg_time = f1.total_avg_time
1525
+ Fv_mean_total_avg_iter_count = f1.total_avg_iter_count
1526
+ Fv_mean_dt = Fv_mean_total_avg_time/Fv_mean_total_avg_iter_count
1527
+
1528
+ self.attrs['Fv_mean_total_avg_time'] = Fv_mean_total_avg_time
1529
+ self.attrs['Fv_mean_total_avg_iter_count'] = Fv_mean_total_avg_iter_count
1530
+ self.attrs['Fv_mean_dt'] = Fv_mean_dt
1531
+
1532
+ ## assert grid,lchar are same
1533
+ if hasattr(self,'lchar') and hasattr(f1,'lchar'):
1534
+ np.testing.assert_allclose(self.lchar, f1.lchar, rtol=1e-12)
1535
+ if hasattr(self,'x') and hasattr(f1,'x'):
1536
+ np.testing.assert_allclose(self.x, f1.x*lchar, rtol=1e-12)
1537
+ if hasattr(self,'y') and hasattr(f1,'y'):
1538
+ np.testing.assert_allclose(self.y, f1.y*lchar, rtol=1e-12)
1539
+
1540
+ # ===
1541
+
1542
+ if (f1.scalars != f1.attr_param):
1543
+ raise AssertionError
1544
+ if (f1.ndim3!=1):
1545
+ raise AssertionError
1546
+
1547
+ if (f1.accuracy == f1.IEEES):
1548
+ dtypes = [ np.float32 for _ in f1.scalars ]
1549
+ if (f1.accuracy == f1.IEEED):
1550
+ dtypes = [ np.float64 for _ in f1.scalars ]
1551
+ else:
1552
+ raise ValueError
1553
+
1554
+ ## numpy structured array
1555
+ data = np.zeros( shape=(nx,ny), dtype={'names':f1.scalars,'formats':dtypes} )
1556
+
1557
+ ## dict for name change
1558
+ dnc = {
1559
+ 'rho':'rho_Fv',
1560
+ 'u':'u_Fv',
1561
+ 'v':'v_Fv',
1562
+ 'w':'w_Fv',
1563
+ 'T':'T_Fv',
1564
+ 'p':'p_Fv',
1565
+ 'mu':'mu_Fv',
1566
+ 'uu':'uu_Fv',
1567
+ 'uv':'uv_Fv',
1568
+ }
1569
+
1570
+ ## populate structured array from EAS3 binary data file
1571
+ progress_bar = tqdm(total=f1.nt*f1.npar, ncols=100, desc='import eas3 Fv mean', leave=False)
1572
+ for scalar in f1.attr_param:
1573
+ if (scalar in dnc):
1574
+ s_ = dnc[scalar]
1575
+ else:
1576
+ s_ = scalar
1577
+ tqdm.write(even_print(f"{scalar} --> {s_}",f'({nx},{ny})',s=True))
1578
+ for jj in range(f1.ndim2):
1579
+ if f1.accuracy == f1.IEEES:
1580
+ packet = struct.unpack('!'+str(f1.ndim1)+'f',f1.f.read(4*f1.ndim1))[:]
1581
+ else:
1582
+ packet = struct.unpack('!'+str(f1.ndim1)+'d',f1.f.read(8*f1.ndim1))[:]
1583
+ data[scalar][:,jj] = packet
1584
+ progress_bar.update()
1585
+ progress_bar.close()
1586
+
1587
+ ## re-dimensionalize by characteristic freestream quantities
1588
+ for scalar in data.dtype.names:
1589
+ if scalar in ["u","v","w",]:
1590
+ data[scalar] *= U_inf
1591
+ elif scalar in ["uu","uv",]:
1592
+ data[scalar] *= U_inf**2
1593
+ elif scalar in ["rho",]:
1594
+ data[scalar] *= rho_inf
1595
+ elif scalar in ["T",]:
1596
+ data[scalar] *= T_inf
1597
+ elif scalar in ["p",]:
1598
+ data[scalar] *= rho_inf * U_inf**2
1599
+ elif scalar in ["mu",]:
1600
+ data[scalar] *= mu_inf
1601
+ else:
1602
+ raise ValueError(f"condition needed for redimensionalizing '{scalar}'")
1603
+
1604
+ ## write
1605
+ for scalar in f1.attr_param:
1606
+ self.create_dataset( f'data/{dnc[scalar]}' , data=data[scalar].T , chunks=None )
1607
+
1608
+ if fn_Fv_fluct.exists(): ## Favre turbulent
1609
+ even_print('eas3 Fv fluct',str(fn_Fv_fluct.relative_to(Path())))
1610
+ with eas3(str(fn_Fv_fluct),verbose=False) as f1:
1611
+
1612
+ if (f1.t.shape[0]==1):
1613
+ f1.total_avg_time = float(f1.t[0])
1614
+ else:
1615
+ raise NotImplementedError
1616
+
1617
+ ## duration over which avg was performed, iteration count and the sampling period of the avg
1618
+ Fv_fluct_total_avg_time = f1.total_avg_time
1619
+ Fv_fluct_total_avg_iter_count = f1.total_avg_iter_count
1620
+ Fv_fluct_dt = Fv_fluct_total_avg_time/Fv_fluct_total_avg_iter_count
1621
+
1622
+ self.attrs['Fv_fluct_total_avg_time'] = Fv_fluct_total_avg_time
1623
+ self.attrs['Fv_fluct_total_avg_iter_count'] = Fv_fluct_total_avg_iter_count
1624
+ self.attrs['Fv_fluct_dt'] = Fv_fluct_dt
1625
+
1626
+ ## assert grid,lchar are same
1627
+ if hasattr(self,'lchar') and hasattr(f1,'lchar'):
1628
+ np.testing.assert_allclose(self.lchar, f1.lchar, rtol=1e-12)
1629
+ if hasattr(self,'x') and hasattr(f1,'x'):
1630
+ np.testing.assert_allclose(self.x, f1.x*lchar, rtol=1e-12)
1631
+ if hasattr(self,'y') and hasattr(f1,'y'):
1632
+ np.testing.assert_allclose(self.y, f1.y*lchar, rtol=1e-12)
1633
+
1634
+ # ===
1635
+
1636
+ if (f1.scalars != f1.attr_param):
1637
+ raise AssertionError
1638
+ if (f1.ndim3!=1):
1639
+ raise AssertionError
1640
+
1641
+ if (f1.accuracy == f1.IEEES):
1642
+ dtypes = [ np.float32 for _ in f1.scalars ]
1643
+ if (f1.accuracy == f1.IEEED):
1644
+ dtypes = [ np.float64 for _ in f1.scalars ]
1645
+ else:
1646
+ raise ValueError
1647
+
1648
+ ## numpy structured array
1649
+ data = np.zeros( shape=(nx,ny), dtype={'names':f1.scalars,'formats':dtypes} )
1650
+
1651
+ ## dict for name change
1652
+ dnc = {
1653
+ "r r''r''":'r_rII_rII',
1654
+ "r u''u''":'r_uII_uII',
1655
+ "r v''v''":'r_vII_vII',
1656
+ "r w''_w''":'r_wII_wII',
1657
+ "r T''T''":'r_TII_TII',
1658
+ "r p''p''":'r_pII_pII',
1659
+ "r mu''mu''":'r_muII_muII',
1660
+ "r u''v''":'r_uII_vII',
1661
+ "r u''w''":'r_uII_wII',
1662
+ "r w''v''":'r_vII_wII',
1663
+ "r u''T''":'r_uII_TII',
1664
+ "r v''T''":'r_vII_TII',
1665
+ "r w''T''":'r_wII_TII',
1666
+ }
1667
+
1668
+ ## populate structured array from EAS3 binary data file
1669
+ progress_bar = tqdm(total=f1.nt*f1.npar, ncols=100, desc='import eas3 Fv fluct', leave=False)
1670
+ for scalar in f1.attr_param:
1671
+ if (scalar in dnc):
1672
+ s_ = dnc[scalar]
1673
+ else:
1674
+ s_ = scalar
1675
+ tqdm.write(even_print(f"{scalar} --> {s_}",f'({nx},{ny})',s=True))
1676
+ for jj in range(f1.ndim2):
1677
+ if f1.accuracy == f1.IEEES:
1678
+ packet = struct.unpack('!'+str(f1.ndim1)+'f',f1.f.read(4*f1.ndim1))[:]
1679
+ else:
1680
+ packet = struct.unpack('!'+str(f1.ndim1)+'d',f1.f.read(8*f1.ndim1))[:]
1681
+ data[scalar][:,jj] = packet
1682
+ progress_bar.update()
1683
+ progress_bar.close()
1684
+
1685
+ ## re-dimensionalize by characteristic freestream quantities
1686
+ for scalar in data.dtype.names:
1687
+ if scalar in ["r r''r''",]:
1688
+ data[scalar] *= rho_inf**3
1689
+ elif scalar in ["r u''u''","r v''v''","r w''w''","r w''_w''","r u''v''","r u''w''","r w''v''",]:
1690
+ data[scalar] *= rho_inf * U_inf**2
1691
+ elif scalar in ["r u''T''","r v''T''","r w''T''",]:
1692
+ data[scalar] *= rho_inf * U_inf * T_inf
1693
+ elif scalar in ["r mu''mu''",]:
1694
+ data[scalar] *= rho_inf * mu_inf**2
1695
+ elif scalar in ["r p''p''",]:
1696
+ data[scalar] *= rho_inf * (rho_inf * U_inf**2)**2
1697
+ elif scalar in ["r T''T''",]:
1698
+ data[scalar] *= rho_inf * T_inf**2
1699
+ else:
1700
+ raise ValueError(f"condition needed for redimensionalizing '{scalar}'")
1701
+
1702
+ ## write structured array to ZTMD
1703
+ for scalar in data.dtype.names:
1704
+ self.create_dataset(f'data/{dnc[scalar]}', data=data[scalar].T, chunks=None)
1705
+
1706
+ ## derived
1707
+ r_uII_uII_rms = np.copy( np.sqrt( data["r u''u''"] ) )
1708
+ r_vII_vII_rms = np.copy( np.sqrt( data["r v''v''"] ) )
1709
+ r_wII_wII_rms = np.copy( np.sqrt( data["r w''_w''"] ) )
1710
+ r_uII_vII_rms = np.copy( np.sqrt(np.abs(data["r u''v''"]) ) * np.sign(data["r u''v''"]) )
1711
+ r_uII_wII_rms = np.copy( np.sqrt(np.abs(data["r u''w''"]) ) * np.sign(data["r u''w''"]) )
1712
+ r_vII_wII_rms = np.copy( np.sqrt(np.abs(data["r w''v''"]) ) * np.sign(data["r w''v''"]) )
1713
+ ## ... ρ·u″T″
1714
+
1715
+ self.create_dataset( 'data/r_uII_uII_rms' , data=r_uII_uII_rms.T , chunks=None )
1716
+ self.create_dataset( 'data/r_vII_vII_rms' , data=r_vII_vII_rms.T , chunks=None )
1717
+ self.create_dataset( 'data/r_wII_wII_rms' , data=r_wII_wII_rms.T , chunks=None )
1718
+ self.create_dataset( 'data/r_uII_vII_rms' , data=r_uII_vII_rms.T , chunks=None )
1719
+ self.create_dataset( 'data/r_uII_wII_rms' , data=r_uII_wII_rms.T , chunks=None )
1720
+ self.create_dataset( 'data/r_vII_wII_rms' , data=r_vII_wII_rms.T , chunks=None )
1721
+ ## ... ρ·u″T″
1722
+
1723
+ ## clear structured array from memory
1724
+ data = None ; del data
1725
+
1726
+ if fn_turb_budget.exists(): ## turbulent budget
1727
+ even_print('eas3 turb budget',str(fn_turb_budget.relative_to(Path())))
1728
+ with eas3(str(fn_turb_budget),verbose=False) as f1:
1729
+
1730
+ if (f1.t.shape[0]==1):
1731
+ f1.total_avg_time = float(f1.t[0])
1732
+ else:
1733
+ raise NotImplementedError
1734
+
1735
+ ## duration over which avg was performed, iteration count and the sampling period of the avg
1736
+ turb_budget_total_avg_time = f1.total_avg_time
1737
+ turb_budget_total_avg_iter_count = f1.total_avg_iter_count
1738
+ turb_budget_dt = turb_budget_total_avg_time/turb_budget_total_avg_iter_count
1739
+
1740
+ self.attrs['turb_budget_total_avg_time'] = turb_budget_total_avg_time
1741
+ self.attrs['turb_budget_total_avg_iter_count'] = turb_budget_total_avg_iter_count
1742
+ self.attrs['turb_budget_dt'] = turb_budget_dt
1743
+
1744
+ ## assert grid,lchar are same
1745
+ if hasattr(self,'lchar') and hasattr(f1,'lchar'):
1746
+ np.testing.assert_allclose(self.lchar, f1.lchar, rtol=1e-12)
1747
+ if hasattr(self,'x') and hasattr(f1,'x'):
1748
+ np.testing.assert_allclose(self.x, f1.x*lchar, rtol=1e-12)
1749
+ if hasattr(self,'y') and hasattr(f1,'y'):
1750
+ np.testing.assert_allclose(self.y, f1.y*lchar, rtol=1e-12)
1751
+
1752
+ # ===
1753
+
1754
+ if (f1.scalars != f1.attr_param):
1755
+ raise AssertionError
1756
+ if (f1.ndim3!=1):
1757
+ raise AssertionError
1758
+
1759
+ if (f1.accuracy == f1.IEEES):
1760
+ dtypes = [ np.float32 for _ in f1.scalars ]
1761
+ if (f1.accuracy == f1.IEEED):
1762
+ dtypes = [ np.float64 for _ in f1.scalars ]
1763
+ else:
1764
+ raise ValueError
1765
+
1766
+ ## numpy structured array
1767
+ names_ = [ s for s in f1.scalars if ( 'restart' not in s ) ]
1768
+ data = np.zeros( shape=(nx,ny), dtype={'names':names_,'formats':dtypes} )
1769
+
1770
+ ## dict for name change
1771
+ dnc = {
1772
+ "prod.":'production',
1773
+ "dis.":'dissipation',
1774
+ "t-transp.":'turb_transport',
1775
+ "v-diff.":'visc_diffusion',
1776
+ "p-diff.":'p_diffusion',
1777
+ "p-dilat.":'p_dilatation',
1778
+ "rho-terms":'rho_terms',
1779
+ }
1780
+
1781
+ ## populate structured array from EAS3 binary data file
1782
+ progress_bar = tqdm(total=7, ncols=100, desc='import eas3 turb budget', leave=False)
1783
+ for scalar in ["prod.","dis.","t-transp.","v-diff.","p-diff.","p-dilat.","rho-terms",]:
1784
+ if (scalar in dnc):
1785
+ s_ = dnc[scalar]
1786
+ else:
1787
+ s_ = scalar
1788
+ tqdm.write(even_print(f"{scalar} --> {s_}",f'({nx},{ny})',s=True))
1789
+ for jj in range(f1.ndim2):
1790
+ if f1.accuracy == f1.IEEES:
1791
+ packet = struct.unpack('!'+str(f1.ndim1)+'f',f1.f.read(4*f1.ndim1))[:]
1792
+ else:
1793
+ packet = struct.unpack('!'+str(f1.ndim1)+'d',f1.f.read(8*f1.ndim1))[:]
1794
+ data[scalar][:,jj] = packet
1795
+ progress_bar.update()
1796
+ progress_bar.close()
1797
+
1798
+ ## re-dimensionalize by characteristic freestream quantities
1799
+ for scalar in data.dtype.names:
1800
+ if scalar in ["prod.","t-transp.","p-diff.","p-dilat.","rho-terms",]:
1801
+ data[scalar] *= U_inf**3 * rho_inf / lchar
1802
+ elif scalar in ["dis.","v-diff.",]:
1803
+ data[scalar] *= U_inf**2 * mu_inf / lchar**2
1804
+ else:
1805
+ raise ValueError(f"condition needed for redimensionalizing '{scalar}'")
1806
+
1807
+ ## write structured array to ZTMD
1808
+ for scalar in data.dtype.names:
1809
+ self.create_dataset(f'data/{dnc[scalar]}', data=data[scalar].T, chunks=None)
1810
+
1811
+ ## derived
1812
+ dissipation = np.copy( data['dis.'] )
1813
+ Kolm_len = (nu**3 / np.abs(dissipation))**(1/4)
1814
+ self.create_dataset('data/Kolm_len', data=Kolm_len.T, chunks=None)
1815
+
1816
+ ## clear structured array from memory
1817
+ data = None ; del data
1818
+
1819
+ self.get_header(verbose=True)
1820
+ if verbose: print(72*'-')
1821
+ if verbose: print('total time : turbx.ztmd.import_data_eas3() : %s'%format_time_string((timeit.default_timer() - t_start_func)))
1822
+ if verbose: print(72*'-')
1823
+
1824
+ return
1825
+
1826
+ def export_dict(self,**kwargs):
1827
+ '''
1828
+ pull all data from HDF5 container into memory and pack it into a dictionary
1829
+ - convenient for multi-case plotting scripts
1830
+ '''
1831
+ verbose = kwargs.get('verbose',True)
1832
+ dsets = kwargs.get('dsets',None)
1833
+
1834
+ dd = {} ## the dict to return
1835
+
1836
+ ## class-level ZTMD attrs
1837
+ attr_exclude_list = ['rank','comm','n_ranks','usingmpi','open_mode',
1838
+ '_libver','_id','requires_wall_norm_interp',
1839
+ 'mod_avail_tqdm']
1840
+ for attr, val in self.__dict__.items():
1841
+ if (attr not in attr_exclude_list):
1842
+
1843
+ ## throw error if not a routine type
1844
+ if isinstance(val, (int,np.int64,np.int32,float,str,dict,list,tuple,np.ndarray,bool,np.bool_,)):
1845
+ pass
1846
+ elif (val is None):
1847
+ pass
1848
+ else:
1849
+ print(attr)
1850
+ print(type(val))
1851
+ raise TypeError
1852
+
1853
+ dd[attr] = val
1854
+
1855
+ ## HDF5 File Group attrs
1856
+ for attr, val in self.attrs.items():
1857
+ if (attr not in dd.keys()):
1858
+ dd[attr] = val
1859
+
1860
+ ## Group: dims/
1861
+ for dsn in self['dims']:
1862
+ if (dsn not in dd.keys()):
1863
+ ds = self[f'dims/{dsn}']
1864
+ if (ds.ndim==0):
1865
+ dd[dsn] = ds[()]
1866
+ elif (ds.ndim>0):
1867
+ dd[dsn] = np.copy(ds[()])
1868
+ else:
1869
+ raise ValueError
1870
+ else:
1871
+ #print(dsn)
1872
+ pass
1873
+
1874
+ ## Group: data/
1875
+ for dsn in self['data']:
1876
+ if (dsn not in dd.keys()):
1877
+ if (dsets is None) or (dsn in dsets):
1878
+ ds = self[f'data/{dsn}']
1879
+ if (ds.ndim==2):
1880
+ dd[dsn] = np.copy(ds[()].T)
1881
+ else:
1882
+ raise ValueError
1883
+ else:
1884
+ #print(dsn)
1885
+ pass
1886
+
1887
+ ## Group: data_1Dx/
1888
+ for dsn in self['data_1Dx']:
1889
+ if (dsn not in dd.keys()):
1890
+ #if (dsets is not None) and (dsn in dsets):
1891
+ ds = self[f'data_1Dx/{dsn}']
1892
+ if (ds.ndim==1) or (ds.ndim==2):
1893
+ dd[dsn] = np.copy(ds[()])
1894
+ else:
1895
+ raise ValueError
1896
+ else:
1897
+ #print(dsn)
1898
+ pass
1899
+
1900
+ if verbose:
1901
+ print(f'>>> {self.fname}')
1902
+
1903
+ return dd
1904
+
1905
+ def make_xdmf(self, **kwargs):
1906
+ '''
1907
+ generate an XDMF/XMF2 from ZTMD for processing with Paraview
1908
+ -----
1909
+ --> https://www.xdmf.org/index.php/XDMF_Model_and_Format
1910
+ '''
1911
+
1912
+ if (self.rank==0):
1913
+ verbose = True
1914
+ else:
1915
+ verbose = False
1916
+
1917
+ #makeVectors = kwargs.get('makeVectors',True) ## write vectors (e.g. velocity, vorticity) to XDMF
1918
+ #makeTensors = kwargs.get('makeTensors',True) ## write 3x3 tensors (e.g. stress, strain) to XDMF
1919
+
1920
+ fname_path = os.path.dirname(self.fname)
1921
+ fname_base = os.path.basename(self.fname)
1922
+ fname_root, fname_ext = os.path.splitext(fname_base)
1923
+ fname_xdmf_base = fname_root+'.xmf2'
1924
+ fname_xdmf = os.path.join(fname_path, fname_xdmf_base)
1925
+
1926
+ if verbose: print('\n'+'ztmd.make_xdmf()'+'\n'+72*'-')
1927
+
1928
+ dataset_precision_dict = {} ## holds dtype.itemsize ints i.e. 4,8
1929
+ dataset_numbertype_dict = {} ## holds string description of dtypes i.e. 'Float','Integer'
1930
+
1931
+ # === 1D coordinate dimension vectors --> get dtype.name
1932
+ for scalar in ['x','y','r','theta']:
1933
+ if ('dims/'+scalar in self):
1934
+ data = self['dims/'+scalar]
1935
+ dataset_precision_dict[scalar] = data.dtype.itemsize
1936
+ if (data.dtype.name=='float32') or (data.dtype.name=='float64'):
1937
+ dataset_numbertype_dict[scalar] = 'Float'
1938
+ elif (data.dtype.name=='int8') or (data.dtype.name=='int16') or (data.dtype.name=='int32') or (data.dtype.name=='int64'):
1939
+ dataset_numbertype_dict[scalar] = 'Integer'
1940
+ else:
1941
+ raise ValueError('dtype not recognized, please update script accordingly')
1942
+
1943
+ ## refresh header
1944
+ self.get_header(verbose=False)
1945
+
1946
+ for scalar in self.scalars:
1947
+ data = self['data/%s'%scalar]
1948
+
1949
+ dataset_precision_dict[scalar] = data.dtype.itemsize
1950
+ txt = '%s%s%s%s%s'%(data.dtype.itemsize, ' '*(4-len(str(data.dtype.itemsize))), data.dtype.name, ' '*(10-len(str(data.dtype.name))), data.dtype.byteorder)
1951
+ if verbose: even_print(scalar, txt)
1952
+
1953
+ if (data.dtype.name=='float32') or (data.dtype.name=='float64'):
1954
+ dataset_numbertype_dict[scalar] = 'Float'
1955
+ elif (data.dtype.name=='int8') or (data.dtype.name=='int16') or (data.dtype.name=='int32') or (data.dtype.name=='int64'):
1956
+ dataset_numbertype_dict[scalar] = 'Integer'
1957
+ else:
1958
+ raise TypeError('dtype not recognized, please update script accordingly')
1959
+
1960
+ if verbose: print(72*'-')
1961
+
1962
+ # === write to .xdmf/.xmf2 file
1963
+ if (self.rank==0):
1964
+
1965
+ if not os.path.isfile(fname_xdmf): ## if doesnt exist...
1966
+ Path(fname_xdmf).touch() ## touch XDMF file
1967
+ perms_h5 = oct(os.stat(self.fname).st_mode)[-3:] ## get permissions of ZTMD file
1968
+ os.chmod(fname_xdmf, int(perms_h5, base=8)) ## change permissions of XDMF file
1969
+
1970
+ #with open(fname_xdmf,'w') as xdmf:
1971
+ with io.open(fname_xdmf,'w',newline='\n') as xdmf:
1972
+
1973
+ xdmf_str='''
1974
+ <?xml version="1.0" encoding="utf-8"?>
1975
+ <!DOCTYPE Xdmf SYSTEM "Xdmf.dtd" []>
1976
+ <Xdmf xmlns:xi="http://www.w3.org/2001/XInclude" Version="2.0">
1977
+ <Domain>
1978
+ '''
1979
+
1980
+ xdmf.write(textwrap.indent(textwrap.dedent(xdmf_str.strip('\n')), 0*' '))
1981
+
1982
+ if self.curvilinear:
1983
+ xdmf_str=f'''
1984
+ <Topology TopologyType="3DSMesh" NumberOfElements="{self.ny:d} {self.nx:d}"/>
1985
+ <Geometry GeometryType="X_Y_Z">
1986
+ <DataItem Dimensions="{self.nx:d} {self.ny:d}" NumberType="{dataset_numbertype_dict['x']}" Precision="{dataset_precision_dict['x']:d}" Format="HDF">
1987
+ {fname_base}:/dims/{'x'}
1988
+ </DataItem>
1989
+ <DataItem Dimensions="{self.nx:d} {self.ny:d}" NumberType="{dataset_numbertype_dict['y']}" Precision="{dataset_precision_dict['y']:d}" Format="HDF">
1990
+ {fname_base}:/dims/{'y'}
1991
+ </DataItem>
1992
+ </Geometry>
1993
+ '''
1994
+ else:
1995
+ xdmf_str=f'''
1996
+ <Topology TopologyType="3DRectMesh" NumberOfElements="1 {self.ny:d} {self.nx:d}"/>
1997
+ <Geometry GeometryType="VxVyVz">
1998
+ <DataItem Dimensions="{self.nx:d}" NumberType="{dataset_numbertype_dict['x']}" Precision="{dataset_precision_dict['x']:d}" Format="HDF">
1999
+ {fname_base}:/dims/{'x'}
2000
+ </DataItem>
2001
+ <DataItem Dimensions="{self.ny:d}" NumberType="{dataset_numbertype_dict['y']}" Precision="{dataset_precision_dict['y']:d}" Format="HDF">
2002
+ {fname_base}:/dims/{'y'}
2003
+ </DataItem>
2004
+ <DataItem Dimensions="1" Format="XML">
2005
+ 0.0
2006
+ </DataItem>
2007
+ </Geometry>
2008
+ '''
2009
+
2010
+ xdmf.write(textwrap.indent(textwrap.dedent(xdmf_str.strip('\n')), 4*' '))
2011
+
2012
+ # ===
2013
+
2014
+ xdmf_str='''
2015
+ <!-- ==================== time series ==================== -->
2016
+ '''
2017
+
2018
+ xdmf.write(textwrap.indent(textwrap.dedent(xdmf_str.strip('\n')), 4*' '))
2019
+
2020
+ # === the time series
2021
+
2022
+ xdmf_str='''
2023
+ <Grid Name="TimeSeries" GridType="Collection" CollectionType="Temporal">
2024
+ '''
2025
+
2026
+ xdmf.write(textwrap.indent(textwrap.dedent(xdmf_str.strip('\n')), 4*' '))
2027
+
2028
+ for ti in range(len(self.t)):
2029
+
2030
+ dset_name = 'ts_%08d'%ti
2031
+
2032
+ xdmf_str='''
2033
+ <!-- ============================================================ -->
2034
+ '''
2035
+
2036
+ xdmf.write(textwrap.indent(textwrap.dedent(xdmf_str.strip('\n')), 6*' '))
2037
+
2038
+ # ===
2039
+
2040
+ xdmf_str=f'''
2041
+ <Grid Name="{dset_name}" GridType="Uniform">
2042
+ <Time TimeType="Single" Value="{self.t[ti]:0.8E}"/>
2043
+ <Topology Reference="/Xdmf/Domain/Topology[1]" />
2044
+ <Geometry Reference="/Xdmf/Domain/Geometry[1]" />
2045
+ '''
2046
+
2047
+ xdmf.write(textwrap.indent(textwrap.dedent(xdmf_str.strip('\n')), 6*' '))
2048
+
2049
+ # === .xdmf : <Grid> per 2D coordinate array
2050
+
2051
+ if self.curvilinear:
2052
+
2053
+ for scalar in ['x','y','r','theta']:
2054
+
2055
+ dset_hf_path = 'dims/%s'%scalar
2056
+
2057
+ if (dset_hf_path in self):
2058
+
2059
+ scalar_name = scalar
2060
+
2061
+ xdmf_str=f'''
2062
+ <!-- ===== scalar : {scalar} ===== -->
2063
+ <Attribute Name="{scalar_name}" AttributeType="Scalar" Center="Node">
2064
+ <DataItem Dimensions="{self.ny:d} {self.nx:d}" NumberType="{dataset_numbertype_dict[scalar]}" Precision="{dataset_precision_dict[scalar]:d}" Format="HDF">
2065
+ {fname_base}:/{dset_hf_path}
2066
+ </DataItem>
2067
+ </Attribute>
2068
+ '''
2069
+
2070
+ xdmf.write(textwrap.indent(textwrap.dedent(xdmf_str.strip('\n')), 8*' '))
2071
+
2072
+ # === .xdmf : <Grid> per scalar
2073
+
2074
+ for scalar in self.scalars:
2075
+
2076
+ dset_hf_path = 'data/%s'%scalar
2077
+
2078
+ scalar_name = scalar
2079
+
2080
+ if self.curvilinear:
2081
+ xdmf_str=f'''
2082
+ <!-- ===== scalar : {scalar} ===== -->
2083
+ <Attribute Name="{scalar_name}" AttributeType="Scalar" Center="Node">
2084
+ <DataItem Dimensions="{self.ny:d} {self.nx:d}" NumberType="{dataset_numbertype_dict[scalar]}" Precision="{dataset_precision_dict[scalar]:d}" Format="HDF">
2085
+ {fname_base}:/{dset_hf_path}
2086
+ </DataItem>
2087
+ </Attribute>
2088
+ '''
2089
+ else:
2090
+ xdmf_str=f'''
2091
+ <!-- ===== scalar : {scalar} ===== -->
2092
+ <Attribute Name="{scalar_name}" AttributeType="Scalar" Center="Node">
2093
+ <DataItem Dimensions="1 {self.ny:d} {self.nx:d}" NumberType="{dataset_numbertype_dict[scalar]}" Precision="{dataset_precision_dict[scalar]:d}" Format="HDF">
2094
+ {fname_base}:/{dset_hf_path}
2095
+ </DataItem>
2096
+ </Attribute>
2097
+ '''
2098
+
2099
+ xdmf.write(textwrap.indent(textwrap.dedent(xdmf_str.strip('\n')), 8*' '))
2100
+
2101
+ # === .xdmf : <Grid> per scalar (cell-centered values)
2102
+
2103
+ if ('data_cells' in self):
2104
+ scalars_cells = list(self['data_cells'].keys())
2105
+ for scalar in scalars_cells:
2106
+
2107
+ dset_hf_path = 'data_cells/%s'%scalar
2108
+ dset = self[dset_hf_path]
2109
+ dset_precision = dset.dtype.itemsize
2110
+ scalar_name = scalar
2111
+
2112
+ if (dset.dtype.name=='float32') or (dset.dtype.name=='float64'):
2113
+ dset_numbertype = 'Float'
2114
+ elif (data.dtype.name=='int8') or (data.dtype.name=='int16') or (data.dtype.name=='int32') or (data.dtype.name=='int64'):
2115
+ dset_numbertype = 'Integer'
2116
+ else:
2117
+ raise TypeError('dtype not recognized, please update script accordingly')
2118
+
2119
+ xdmf_str=f'''
2120
+ <!-- ===== scalar : {scalar} ===== -->
2121
+ <Attribute Name="{scalar_name}" AttributeType="Scalar" Center="Cell">
2122
+ <DataItem Dimensions="{(self.ny-1):d} {(self.nx-1):d}" NumberType="{dset_numbertype}" Precision="{dset_precision:d}" Format="HDF">
2123
+ {fname_base}:/{dset_hf_path}
2124
+ </DataItem>
2125
+ </Attribute>
2126
+ '''
2127
+
2128
+ xdmf.write(textwrap.indent(textwrap.dedent(xdmf_str.strip('\n')), 8*' '))
2129
+
2130
+ xdmf_str='''
2131
+ <!-- ===== end scalars ===== -->
2132
+ '''
2133
+ xdmf.write(textwrap.indent(textwrap.dedent(xdmf_str.strip('\n')), 8*' '))
2134
+
2135
+ # === .xdmf : end Grid for this timestep
2136
+
2137
+ xdmf_str='''
2138
+ </Grid>
2139
+ '''
2140
+ xdmf.write(textwrap.indent(textwrap.dedent(xdmf_str.strip('\n')), 6*' '))
2141
+
2142
+ # ===
2143
+
2144
+ xdmf_str='''
2145
+ </Grid>
2146
+ </Domain>
2147
+ </Xdmf>
2148
+ '''
2149
+ xdmf.write(textwrap.indent(textwrap.dedent(xdmf_str.strip('\n')), 0*' '))
2150
+
2151
+ if verbose: print('--w-> %s'%fname_xdmf_base)
2152
+ return
2153
+
2154
+ # ==================================================================
2155
+ # External attachments
2156
+ # ==================================================================
2157
+
2158
+ def calc_gradients(self, acc=6, edge_stencil='full', **kwargs):
2159
+ return _calc_gradients(self, acc=acc, edge_stencil=edge_stencil, **kwargs)
2160
+
2161
+ def calc_psvel(self, **kwargs):
2162
+ return _calc_psvel(self, **kwargs)
2163
+
2164
+ def calc_wall_quantities(self, acc=6, edge_stencil='full', **kwargs):
2165
+ return _calc_wall_quantities(self, acc=acc, edge_stencil=edge_stencil, **kwargs)
2166
+
2167
+ def calc_bl_edge(self, **kwargs):
2168
+ return _calc_bl_edge(self, **kwargs)
2169
+
2170
+ def calc_bl_edge_quantities(self, **kwargs):
2171
+ return _calc_bl_edge_quantities(self, **kwargs)
2172
+
2173
+ def calc_d99(self, **kwargs):
2174
+ return _calc_d99(self, **kwargs)
2175
+
2176
+ def calc_d99_quantities(self, **kwargs):
2177
+ return _calc_d99_quantities(self, **kwargs)
2178
+
2179
+ def calc_bl_integral_quantities(self, **kwargs):
2180
+ return _calc_bl_integral_quantities(self, **kwargs)
2181
+
2182
+ def calc_u_inc(self, method='rho', **kwargs):
2183
+ return _calc_u_inc(self, method=method, **kwargs)
2184
+
2185
+ def calc_d99_inc(self, **kwargs):
2186
+ return _calc_d99_inc(self, **kwargs)
2187
+
2188
+ def calc_wake_parameter(self, **kwargs):
2189
+ return _calc_wake_parameter(self, **kwargs)
2190
+
2191
+ def calc_VDII(self, **kwargs):
2192
+ return _calc_VDII(self, **kwargs)
2193
+
2194
+ def calc_peak_tauI(self, **kwargs):
2195
+ return _calc_peak_tauI(self, **kwargs)
2196
+
2197
+ # ==================================================================
2198
+
2199
+ def post_TBL(self,acc=6,edge_stencil='full'):
2200
+ '''
2201
+ A general-purpose wrapper for TBL ZTMD files
2202
+ '''
2203
+
2204
+ self.calc_gradients(acc=acc, edge_stencil=edge_stencil, favre=True)
2205
+ self.calc_psvel()
2206
+ self.calc_wall_quantities(acc=acc, edge_stencil=edge_stencil)
2207
+
2208
+ self.calc_bl_edge(method='vorticity', epsilon=5e-5, ongrid=True, acc=acc)
2209
+ self.calc_bl_edge_quantities()
2210
+ self.calc_d99(method='psvel', interp_kind='cubic')
2211
+ self.calc_d99_quantities(interp_kind='cubic')
2212
+ self.calc_bl_integral_quantities(interp_kind='cubic')
2213
+
2214
+ self.calc_u_inc(method='rho')
2215
+ self.calc_bl_edge_quantities() ## run again to get u_inc_edge
2216
+ self.calc_d99_inc(interp_kind='cubic')
2217
+
2218
+ self.calc_wake_parameter(k=0.41,B=5.2)
2219
+ self.calc_VDII(adiabatic=True)
2220
+ self.calc_peak_tauI()
2221
+
2222
+ self.make_xdmf()
2223
+
2224
+ return