toughanimator 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -0,0 +1,2336 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import io
|
|
3
|
+
import sys
|
|
4
|
+
import re
|
|
5
|
+
import shutil
|
|
6
|
+
import numpy as np
|
|
7
|
+
import pandas as pd
|
|
8
|
+
from vtkmodules.all import *
|
|
9
|
+
import pathlib
|
|
10
|
+
import pyvista as pv
|
|
11
|
+
import json
|
|
12
|
+
from enum import Enum
|
|
13
|
+
|
|
14
|
+
class MeshType(Enum):
|
|
15
|
+
RegularGrid = 1
|
|
16
|
+
StructuredGridOrth = 2
|
|
17
|
+
StructuredGridFree = 3
|
|
18
|
+
PolygonalMesh = 4
|
|
19
|
+
|
|
20
|
+
class MeshPlane():
|
|
21
|
+
unknown = -1,
|
|
22
|
+
X = 1,
|
|
23
|
+
Y = 2,
|
|
24
|
+
Z = 3,
|
|
25
|
+
XZ = 4,
|
|
26
|
+
XY = 5,
|
|
27
|
+
YZ = 6,
|
|
28
|
+
XYZ = 7
|
|
29
|
+
|
|
30
|
+
class OutType():
|
|
31
|
+
Unknown = 0,
|
|
32
|
+
TEC = 1,
|
|
33
|
+
CSV = 2
|
|
34
|
+
|
|
35
|
+
class VisType():
|
|
36
|
+
Tecplot = 1
|
|
37
|
+
ParaView = 2
|
|
38
|
+
MatplotLib = 3
|
|
39
|
+
|
|
40
|
+
class ToughVersion(Enum):
|
|
41
|
+
Unknown = 0
|
|
42
|
+
TOUGH2 = 2
|
|
43
|
+
TOUGH3 = 3
|
|
44
|
+
TOUGHReact = 4
|
|
45
|
+
|
|
46
|
+
class ValueType(Enum):
|
|
47
|
+
Unknown = 0
|
|
48
|
+
Scalar = 1
|
|
49
|
+
Vector = 3
|
|
50
|
+
|
|
51
|
+
class VisTimeStep:
|
|
52
|
+
def __init__(self, time_step, iteration, time):
|
|
53
|
+
self.selected = True
|
|
54
|
+
self.time_step = time_step
|
|
55
|
+
self.iter = iteration
|
|
56
|
+
self.time = time
|
|
57
|
+
self.vtu_file_name = ""
|
|
58
|
+
|
|
59
|
+
class VisVariable:
|
|
60
|
+
def __init__(self, name, value_type, number_of_components):
|
|
61
|
+
self.variable_name = name
|
|
62
|
+
self.value_type = value_type
|
|
63
|
+
self.number_of_components = number_of_components
|
|
64
|
+
def to_dict(self):
|
|
65
|
+
return {
|
|
66
|
+
"variable_name": self.variable_name,
|
|
67
|
+
"value_type": self.value_type.name, # or .value if you prefer
|
|
68
|
+
"number_of_components": self.number_of_components # or .value if you prefer
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
class VisSetting:
|
|
72
|
+
def __init__(self, input_file_paths, out_file_paths, vis_dir, corners_file="unkown", out_format_type=OutType.Unknown, tough_version = ToughVersion.Unknown, vis_types=[VisType.ParaView, VisType.Tecplot], mesh_type=MeshType.RegularGrid, debug=False, eos="ECO2N"):
|
|
73
|
+
self.mesh_type = mesh_type
|
|
74
|
+
self.out_format_type = out_format_type
|
|
75
|
+
self.vis_types = vis_types
|
|
76
|
+
self.input_file_paths = input_file_paths
|
|
77
|
+
self.out_file_paths = out_file_paths
|
|
78
|
+
self.vis_dir = vis_dir
|
|
79
|
+
self.known_bounds = False
|
|
80
|
+
self.debug = debug
|
|
81
|
+
self.tough_version = tough_version
|
|
82
|
+
self.mesh_plane = MeshPlane.unknown
|
|
83
|
+
self.isReverse = False
|
|
84
|
+
self.corners_file = corners_file
|
|
85
|
+
self.debug = debug
|
|
86
|
+
self.eos = eos
|
|
87
|
+
self.minc = False
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def setBounds(self, x_bounds, y_bounds, z_bounds):
|
|
91
|
+
self.bounds = np.concatenate((x_bounds, y_bounds, z_bounds))
|
|
92
|
+
self.known_bounds = True
|
|
93
|
+
|
|
94
|
+
class vis_reader:
|
|
95
|
+
def __init__(self, case_dir):
|
|
96
|
+
self.setting = None
|
|
97
|
+
self.main_geometry = None
|
|
98
|
+
self.incon_path = None
|
|
99
|
+
#self.variable_list = []
|
|
100
|
+
self.variable_list = {}
|
|
101
|
+
self.time_steps_list = []
|
|
102
|
+
self.rock_dict = []
|
|
103
|
+
if os.path.isdir(case_dir):
|
|
104
|
+
config_path = os.path.join(case_dir, "config.json")
|
|
105
|
+
if os.path.exists(config_path):
|
|
106
|
+
with open(config_path, "r") as config_file:
|
|
107
|
+
config = json.load(config_file)
|
|
108
|
+
else:
|
|
109
|
+
print(f"Config file:({config_path}) not found. Please create it.")
|
|
110
|
+
sys.exit(1)
|
|
111
|
+
else:
|
|
112
|
+
print(f"Case directory:({case_dir}) not found. Please check it.")
|
|
113
|
+
sys.exit(1)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
if "input_files" not in config:
|
|
117
|
+
print(f"Input files not found in config.json. Please check it.")
|
|
118
|
+
sys.exit(1)
|
|
119
|
+
if "output_files" not in config:
|
|
120
|
+
print(f"Output files not found in config.json. Please check it.")
|
|
121
|
+
sys.exit(1)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
setting = VisSetting(
|
|
126
|
+
input_file_paths = [os.path.join(case_dir, f) for f in config["input_files"]],
|
|
127
|
+
out_file_paths = [os.path.join(case_dir, f) for f in config["output_files"]],
|
|
128
|
+
vis_dir = config["vis_dir"] if "vis_dir" in config else case_dir,
|
|
129
|
+
corners_file = os.path.join(case_dir, config["corners_file"] if "corners_file" in config else "None"),
|
|
130
|
+
debug = config['debug'] if 'debug' in config else False,
|
|
131
|
+
eos = config['EOS'] if 'EOS' in config else "ECO2N",
|
|
132
|
+
#minc = config['MINC'] if 'MINC' in config else False,
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
for input_file_path in setting.input_file_paths:
|
|
136
|
+
if not os.path.exists(input_file_path):
|
|
137
|
+
print(f'Can\'t find input file: ({input_file_path}). please check the path or remove it from the config.json.')
|
|
138
|
+
sys.exit(1)
|
|
139
|
+
# check if this project use MINC
|
|
140
|
+
elif input_file_path.endswith('MINC'):
|
|
141
|
+
setting.minc = True
|
|
142
|
+
for out_file_path in setting.out_file_paths:
|
|
143
|
+
if not os.path.exists(input_file_path):
|
|
144
|
+
print(f'Can\'t find output file: ({out_file_path}). please check the path or remove it from the config.json.')
|
|
145
|
+
sys.exit(1)
|
|
146
|
+
|
|
147
|
+
if not os.path.isdir(setting.vis_dir):
|
|
148
|
+
print(f'Can\'t find directory: ({setting.vis_dir}). please check the path or remove it from the config.json.')
|
|
149
|
+
sys.exit(1)
|
|
150
|
+
else:
|
|
151
|
+
vis_path = os.path.join(setting.vis_dir, "tough_vis")
|
|
152
|
+
# delete the directory if it exists
|
|
153
|
+
if os.path.isdir(vis_path):
|
|
154
|
+
shutil.rmtree(vis_path)
|
|
155
|
+
os.mkdir(vis_path)
|
|
156
|
+
paraview_path = os.path.join(vis_path, 'paraview')
|
|
157
|
+
os.mkdir(paraview_path)
|
|
158
|
+
print(f"Visualization folder created: {vis_path}")
|
|
159
|
+
setting.vis_dir = vis_path
|
|
160
|
+
|
|
161
|
+
self.setting = setting
|
|
162
|
+
|
|
163
|
+
def write_eleme_conne(self):
|
|
164
|
+
if self.setting == None:
|
|
165
|
+
print(f'Please initialize the vis_reader class with the case directory.')
|
|
166
|
+
sys.exit(1)
|
|
167
|
+
print(f'Reading input files ...')
|
|
168
|
+
self.__write_elem_buffer()
|
|
169
|
+
self.__write_conne_buffer()
|
|
170
|
+
self.__write_rocks_buffer()
|
|
171
|
+
print(f'Creating elements and connections ...')
|
|
172
|
+
self.__create_elem_conne()
|
|
173
|
+
|
|
174
|
+
def write_geometry(self):
|
|
175
|
+
if self.setting == None:
|
|
176
|
+
print(f'Please initialize the vis_reader class with the case directory.')
|
|
177
|
+
sys.exit(1)
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
if self.elem_conne_path == None:
|
|
181
|
+
print(f'Can\'t find element and connection file. Please create it first. (write_eleme_conne)')
|
|
182
|
+
sys.exit(1)
|
|
183
|
+
|
|
184
|
+
print(f'Creating main geometry ...')
|
|
185
|
+
self.__create_main_geometry()
|
|
186
|
+
|
|
187
|
+
def write_incon(self):
|
|
188
|
+
if self.setting == None:
|
|
189
|
+
print(f'Please initialize the vis_reader class with the case directory.')
|
|
190
|
+
sys.exit(1)
|
|
191
|
+
if self.main_geometry == None:
|
|
192
|
+
print(f'Can not find main geometry. Please create the main geometry first. (write_geometry)')
|
|
193
|
+
sys.exit(0)
|
|
194
|
+
print(f'Reading input files ...')
|
|
195
|
+
self.__write_incon_buffer()
|
|
196
|
+
print(f'Creating initial condition ...')
|
|
197
|
+
self.__write_initial_conditions()
|
|
198
|
+
|
|
199
|
+
def write_result(self):
|
|
200
|
+
if self.setting == None:
|
|
201
|
+
print(f'Please initialize the vis_reader class with the case directory.')
|
|
202
|
+
sys.exit(1)
|
|
203
|
+
if self.main_geometry == None:
|
|
204
|
+
print(f'Can not find main geometry. Please create the main geometry first. (write_geometry)')
|
|
205
|
+
sys.exit(0)
|
|
206
|
+
|
|
207
|
+
for output_file_path in self.setting.out_file_paths:
|
|
208
|
+
print(f'Reading output ({output_file_path}) ... ')
|
|
209
|
+
|
|
210
|
+
self.current_out_file = output_file_path
|
|
211
|
+
self.__check_TOUGH_version()
|
|
212
|
+
print(f' Version: {self.setting.tough_version.name}')
|
|
213
|
+
if self.setting.tough_version == ToughVersion.TOUGH2:
|
|
214
|
+
self.__read_TOUGH2_CSV_outfile()
|
|
215
|
+
elif self.setting.tough_version == ToughVersion.TOUGH3:
|
|
216
|
+
self.__read_TOUGH3_CSV_outfile()
|
|
217
|
+
elif self.setting.tough_version == ToughVersion.TOUGHReact:
|
|
218
|
+
self.__read_tough_TEC_outfile()
|
|
219
|
+
# add post calculation
|
|
220
|
+
for timestep in self.time_steps_list:
|
|
221
|
+
self.__post_process(timestep)
|
|
222
|
+
self.__write_json()
|
|
223
|
+
print(f'All files have been created in {self.setting.vis_dir}.')
|
|
224
|
+
|
|
225
|
+
def write_all(self):
|
|
226
|
+
self.write_eleme_conne()
|
|
227
|
+
self.write_geometry()
|
|
228
|
+
self.write_incon()
|
|
229
|
+
self.write_result()
|
|
230
|
+
|
|
231
|
+
# TODO: need to clean up the code
|
|
232
|
+
def __check_bounds(self):
|
|
233
|
+
if not self.setting.bounds[1] > self.setting.bounds[0]:
|
|
234
|
+
print(
|
|
235
|
+
f'Max X {self.setting.bounds[1]} must be greater than Min X {self.setting.bounds[0]}.')
|
|
236
|
+
sys.exit(1)
|
|
237
|
+
if not self.setting.bounds[3] > self.setting.bounds[2]:
|
|
238
|
+
print(
|
|
239
|
+
f'Max Y {self.setting.bounds[3]} must be greater than Min Y {self.setting.bounds[2]}.')
|
|
240
|
+
sys.exit(1)
|
|
241
|
+
if not self.setting.bounds[5] > self.setting.bounds[4]:
|
|
242
|
+
print(
|
|
243
|
+
f'Max Z {self.setting.bounds[5]} must be greater than Min Z {self.setting.bounds[4]}.')
|
|
244
|
+
sys.exit(1)
|
|
245
|
+
# print(f'Can\'t find input file: {self.setting.input_path}.')
|
|
246
|
+
|
|
247
|
+
def __read_input(self):
|
|
248
|
+
self.__write_elem_buffer()
|
|
249
|
+
self.__write_conne_buffer()
|
|
250
|
+
self.__write_rocks_buffer()
|
|
251
|
+
self.__write_incon_buffer()
|
|
252
|
+
|
|
253
|
+
def __write_elem_buffer(self):
|
|
254
|
+
self.eleme_buffer = io.StringIO()
|
|
255
|
+
# write temp element txt
|
|
256
|
+
has_elem = False
|
|
257
|
+
for input_file_path in self.setting.input_file_paths:
|
|
258
|
+
line_counter = 0
|
|
259
|
+
with open(input_file_path) as f:
|
|
260
|
+
reading_elem = False
|
|
261
|
+
|
|
262
|
+
for line in f:
|
|
263
|
+
if line.startswith('ELEME-') or line.startswith('ELEME'):
|
|
264
|
+
reading_elem = True
|
|
265
|
+
has_elem = True
|
|
266
|
+
find_elem = True
|
|
267
|
+
continue
|
|
268
|
+
if reading_elem:
|
|
269
|
+
line_counter += 1
|
|
270
|
+
if self.__check_if_block_end(line, line_counter):
|
|
271
|
+
reading_elem = False
|
|
272
|
+
found_path = input_file_path
|
|
273
|
+
break
|
|
274
|
+
else:
|
|
275
|
+
self.eleme_buffer.write(line)
|
|
276
|
+
|
|
277
|
+
if has_elem == False:
|
|
278
|
+
print(f'Can\'t find ELEME block in input_file_paths.')
|
|
279
|
+
sys.exit(1)
|
|
280
|
+
else:
|
|
281
|
+
print(f' Found ELEME block in {found_path}')
|
|
282
|
+
|
|
283
|
+
def __write_conne_buffer(self):
|
|
284
|
+
self.conne_buffer = io.StringIO()
|
|
285
|
+
# write temp element txt
|
|
286
|
+
has_conne = False
|
|
287
|
+
for input_file_path in self.setting.input_file_paths:
|
|
288
|
+
line_counter = 0
|
|
289
|
+
with open(input_file_path) as f:
|
|
290
|
+
|
|
291
|
+
reading_conne = False
|
|
292
|
+
for line in f:
|
|
293
|
+
if line.startswith('CONNE-') or line.startswith('CONNE'):
|
|
294
|
+
reading_conne = True
|
|
295
|
+
has_conne = True
|
|
296
|
+
continue
|
|
297
|
+
if reading_conne:
|
|
298
|
+
line_counter += 1
|
|
299
|
+
if self.__check_if_block_end(line, line_counter):
|
|
300
|
+
reading_conne = False
|
|
301
|
+
found_path = input_file_path
|
|
302
|
+
break
|
|
303
|
+
else:
|
|
304
|
+
self.conne_buffer.write(line)
|
|
305
|
+
|
|
306
|
+
if has_conne == False:
|
|
307
|
+
print(f'Can\'t find CONNE block in input_file_paths.')
|
|
308
|
+
sys.exit(1)
|
|
309
|
+
else:
|
|
310
|
+
print(f' Found CONNE block in {found_path}')
|
|
311
|
+
|
|
312
|
+
def __write_rocks_buffer(self):
|
|
313
|
+
self.rocks_buffer = io.StringIO()
|
|
314
|
+
#self.rocks_sgr_buffer = io.StringIO()
|
|
315
|
+
has_rocks = False
|
|
316
|
+
|
|
317
|
+
for input_file_path in self.setting.input_file_paths:
|
|
318
|
+
line_counter = 0
|
|
319
|
+
with open(input_file_path) as f:
|
|
320
|
+
reading_rocks = False
|
|
321
|
+
for line in f:
|
|
322
|
+
if line.startswith('ROCKS-'):
|
|
323
|
+
reading_rocks = True
|
|
324
|
+
has_rocks = True
|
|
325
|
+
continue
|
|
326
|
+
|
|
327
|
+
if reading_rocks:
|
|
328
|
+
line_counter+= 1
|
|
329
|
+
if self.__check_if_block_end(line, line_counter):
|
|
330
|
+
reading_rocks = False
|
|
331
|
+
found_path = input_file_path
|
|
332
|
+
break
|
|
333
|
+
else:
|
|
334
|
+
if 'SEED' in line:
|
|
335
|
+
continue
|
|
336
|
+
first_line = line.replace('\n', '').rstrip()
|
|
337
|
+
|
|
338
|
+
#skip 1 lines
|
|
339
|
+
f.readline()
|
|
340
|
+
third_line = f.readline()
|
|
341
|
+
new_line = f'{first_line}{third_line}'
|
|
342
|
+
self.rocks_buffer.write(new_line)
|
|
343
|
+
f.readline()
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
if has_rocks == False:
|
|
347
|
+
print(f'Can\'t find ROCKS block in all input_file_paths.')
|
|
348
|
+
sys.exit(1)
|
|
349
|
+
else:
|
|
350
|
+
print(f' Found ROCKS block in {found_path}')
|
|
351
|
+
|
|
352
|
+
def __write_incon_buffer(self):
|
|
353
|
+
self.incon_buffer = io.StringIO()
|
|
354
|
+
has_incon = False
|
|
355
|
+
# write temp element txt
|
|
356
|
+
|
|
357
|
+
for input_file_path in self.setting.input_file_paths:
|
|
358
|
+
line_counter = 0
|
|
359
|
+
with open(input_file_path) as f:
|
|
360
|
+
|
|
361
|
+
reading_incon = False
|
|
362
|
+
for line in f:
|
|
363
|
+
if line.startswith('INCON-'):
|
|
364
|
+
reading_incon = True
|
|
365
|
+
has_incon = True
|
|
366
|
+
find_incon = True
|
|
367
|
+
continue
|
|
368
|
+
|
|
369
|
+
if reading_incon:
|
|
370
|
+
line_counter += 1
|
|
371
|
+
if self.__check_if_block_end(line, line_counter):
|
|
372
|
+
found_path = input_file_path
|
|
373
|
+
break
|
|
374
|
+
#line = f.readline() # skip first line #self.number_of_elements
|
|
375
|
+
eos = self.setting.eos
|
|
376
|
+
num = len(line.split())
|
|
377
|
+
if self.setting.eos == "ECO2N" and len(line.split()) == 4:
|
|
378
|
+
self.incon_buffer.write(line)
|
|
379
|
+
elif self.setting.eos == "EOS1":
|
|
380
|
+
line = f.readline() # skip first line #self.number_of_elements
|
|
381
|
+
if len(line.split()) == 2:
|
|
382
|
+
self.incon_buffer.write(line)
|
|
383
|
+
|
|
384
|
+
if has_incon == False:
|
|
385
|
+
print(f'Can\'t find INCON block in input_file_paths.')
|
|
386
|
+
#sys.exit(1)
|
|
387
|
+
else:
|
|
388
|
+
print(f' Found INCON block in {found_path}')
|
|
389
|
+
|
|
390
|
+
def __write_initial_conditions(self):
|
|
391
|
+
|
|
392
|
+
self.incon_vtk = self.__read_vtk_file(self.main_geometry)
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
|
|
396
|
+
self.incon_buffer.seek(0)
|
|
397
|
+
incon_df = pd.DataFrame()
|
|
398
|
+
if self.setting.eos == "ECO2N":
|
|
399
|
+
# read incon
|
|
400
|
+
incon_colspecs = [(0, 20), (20, 40), (40, 60), (60, 80)] # define column widths
|
|
401
|
+
incon_names = ['Pressure', 'NaCl', 'CO2', 'Temperature']
|
|
402
|
+
incon_df = pd.read_fwf(self.incon_buffer, colspecs=incon_colspecs, header=None,
|
|
403
|
+
names=incon_names,
|
|
404
|
+
dtype={'Pressure':float, 'NaCl':float, 'CO2':float, 'Temperature':float})
|
|
405
|
+
elif self.setting.eos == "EOS1":
|
|
406
|
+
# read incon
|
|
407
|
+
incon_colspecs = [(0, 20), (20, 40)]
|
|
408
|
+
incon_names = ['Temperature', 'Pressure']
|
|
409
|
+
incon_df = pd.read_fwf(self.incon_buffer, colspecs=incon_colspecs, header=None,
|
|
410
|
+
names=incon_names,
|
|
411
|
+
dtype={'Temperature':float, 'Pressure':float})
|
|
412
|
+
if len(incon_df) == 0:
|
|
413
|
+
print(f' It is empty in INCON block.')
|
|
414
|
+
return
|
|
415
|
+
|
|
416
|
+
for header in incon_names:
|
|
417
|
+
array = vtkDoubleArray()
|
|
418
|
+
array.SetName(header)
|
|
419
|
+
self.incon_vtk.GetCellData().AddArray(array)
|
|
420
|
+
|
|
421
|
+
for i in range(0, self.incon_vtk.GetNumberOfCells()):
|
|
422
|
+
for header in incon_names:
|
|
423
|
+
index = self.sequence_dist[i]
|
|
424
|
+
value = self.__parse_float(incon_df[header][index])
|
|
425
|
+
self.incon_vtk.GetCellData().GetArray(header).InsertNextValue(value)
|
|
426
|
+
|
|
427
|
+
extension = os.path.splitext(self.main_geometry)[1]
|
|
428
|
+
self.incon_path = os.path.join(self.setting.vis_dir, f'incon{extension}')
|
|
429
|
+
self.__write_vtk_file(self.incon_vtk, self.incon_path)
|
|
430
|
+
print(f' ✓ Initial condition file created: {self.incon_path}')
|
|
431
|
+
|
|
432
|
+
def __read_TOUGH2_CSV_outfile(self):
|
|
433
|
+
output_buffer = io.StringIO()
|
|
434
|
+
current_time_step = None
|
|
435
|
+
tim_step_counter = 1
|
|
436
|
+
csv_headers = []
|
|
437
|
+
line_number = -1
|
|
438
|
+
reading_number = 0
|
|
439
|
+
value_type = ValueType.Unknown
|
|
440
|
+
start_index = -1
|
|
441
|
+
self.time_steps_list = []
|
|
442
|
+
with open(self.current_out_file) as f:
|
|
443
|
+
for line in f:
|
|
444
|
+
line_number = line_number + 1
|
|
445
|
+
values = line.strip().split(',')
|
|
446
|
+
if line_number == 0:
|
|
447
|
+
csv_headers = [x.strip() for x in values]
|
|
448
|
+
if 'ELEM' in csv_headers and 'INDEX' in csv_headers:
|
|
449
|
+
value_type = ValueType.Scalar
|
|
450
|
+
start_index = 3
|
|
451
|
+
|
|
452
|
+
elif 'ELEM1' in csv_headers and 'ELEM2' in csv_headers:
|
|
453
|
+
value_type = ValueType.Vector
|
|
454
|
+
start_index = 4
|
|
455
|
+
|
|
456
|
+
csv_headers = csv_headers[start_index:]
|
|
457
|
+
print(f' Value type: {value_type.name}')
|
|
458
|
+
continue
|
|
459
|
+
|
|
460
|
+
time = self.__parse_float(values[0].strip())
|
|
461
|
+
|
|
462
|
+
# next time step
|
|
463
|
+
if current_time_step == None or time != current_time_step.time:
|
|
464
|
+
|
|
465
|
+
# when all items in this timestep have been read
|
|
466
|
+
if value_type == ValueType.Scalar and reading_number == self.number_of_elements:
|
|
467
|
+
output_buffer.seek(0)
|
|
468
|
+
df = pd.read_csv(output_buffer, sep=',', header=0)
|
|
469
|
+
|
|
470
|
+
self.__write_scalar_result(
|
|
471
|
+
current_time_step, df, csv_headers)
|
|
472
|
+
output_buffer.flush()
|
|
473
|
+
output_buffer.close()
|
|
474
|
+
output_buffer = io.StringIO()
|
|
475
|
+
reading_number = 0
|
|
476
|
+
if value_type == ValueType.Vector and reading_number == self.number_of_connections:
|
|
477
|
+
output_buffer.seek(0)
|
|
478
|
+
df = pd.read_csv(output_buffer, sep=',', header=0)
|
|
479
|
+
#df.to_csv(os.path.join(self.setting.vis_dir, "timestep.csv"), index=False)
|
|
480
|
+
self.__write_vector_result(
|
|
481
|
+
current_time_step, df, csv_headers)
|
|
482
|
+
output_buffer.flush()
|
|
483
|
+
output_buffer.close()
|
|
484
|
+
output_buffer = io.StringIO()
|
|
485
|
+
reading_number = 0
|
|
486
|
+
|
|
487
|
+
current_time_step = VisTimeStep(
|
|
488
|
+
time=float(time),
|
|
489
|
+
time_step=tim_step_counter,
|
|
490
|
+
iteration=1
|
|
491
|
+
)
|
|
492
|
+
#print(f' Reading step {current_time_step.time_step_id} : {current_time_step.time} ...')
|
|
493
|
+
|
|
494
|
+
# Initialize buffer
|
|
495
|
+
header_string = ','.join(csv_headers)
|
|
496
|
+
output_buffer.write(header_string + '\n')
|
|
497
|
+
self.time_steps_list.append(current_time_step)
|
|
498
|
+
tim_step_counter = tim_step_counter + 1
|
|
499
|
+
|
|
500
|
+
output_buffer.write(','.join(values[start_index:]) + '\n')
|
|
501
|
+
reading_number = reading_number + 1
|
|
502
|
+
else:
|
|
503
|
+
output_buffer.write(','.join(values[start_index:]) + '\n')
|
|
504
|
+
reading_number = reading_number + 1
|
|
505
|
+
else:
|
|
506
|
+
# write the last time step
|
|
507
|
+
if value_type == ValueType.Scalar:
|
|
508
|
+
output_buffer.seek(0)
|
|
509
|
+
df = pd.read_csv(output_buffer, sep=',', header=0)
|
|
510
|
+
self.__write_scalar_result(current_time_step, df, csv_headers)
|
|
511
|
+
if value_type == ValueType.Vector:
|
|
512
|
+
output_buffer.seek(0)
|
|
513
|
+
df = pd.read_csv(output_buffer, sep=',', header=0)
|
|
514
|
+
self.__write_vector_result(current_time_step, df, csv_headers)
|
|
515
|
+
output_buffer.close()
|
|
516
|
+
|
|
517
|
+
# TODO: find whick case is TOUGH3
|
|
518
|
+
def __read_TOUGH3_CSV_outfile(self):
|
|
519
|
+
scalar_buffer = io.StringIO()
|
|
520
|
+
current_time_step = None
|
|
521
|
+
tim_step_counter = 1
|
|
522
|
+
csv_headers = []
|
|
523
|
+
line_number = -1
|
|
524
|
+
reading_number = 0
|
|
525
|
+
value_type = ValueType.Unknown
|
|
526
|
+
start_index = -1
|
|
527
|
+
self.time_steps_list = []
|
|
528
|
+
|
|
529
|
+
with open(self.current_out_file) as f:
|
|
530
|
+
for line in f:
|
|
531
|
+
line_number = line_number + 1
|
|
532
|
+
values = line.strip().split(',')
|
|
533
|
+
if line_number == 0:
|
|
534
|
+
values = [x.replace('"', '') for x in values]
|
|
535
|
+
csv_headers = [x.strip() for x in values]
|
|
536
|
+
|
|
537
|
+
if 'ELEM' in csv_headers:
|
|
538
|
+
value_type = ValueType.Scalar
|
|
539
|
+
start_index = 5
|
|
540
|
+
|
|
541
|
+
elif 'ELEM1' in csv_headers and 'ELEM2' in csv_headers:
|
|
542
|
+
value_type = ValueType.Vector
|
|
543
|
+
start_index = 5
|
|
544
|
+
|
|
545
|
+
csv_headers = csv_headers[start_index:]
|
|
546
|
+
f.readline() # skip next line
|
|
547
|
+
print(f' Value type: {value_type.name}')
|
|
548
|
+
continue
|
|
549
|
+
|
|
550
|
+
# Find time item
|
|
551
|
+
if len(values) == 1:
|
|
552
|
+
time_string = values[0].replace('"', '').strip()
|
|
553
|
+
time_string = time_string.split()[-1]
|
|
554
|
+
time = self.__parse_float(time_string)
|
|
555
|
+
|
|
556
|
+
# if not the first time step
|
|
557
|
+
if value_type == ValueType.Scalar and reading_number == self.number_of_elements:
|
|
558
|
+
scalar_buffer.seek(0)
|
|
559
|
+
df = pd.read_csv(scalar_buffer, sep=',', header=0)
|
|
560
|
+
self.__write_scalar_result(
|
|
561
|
+
current_time_step, df, csv_headers)
|
|
562
|
+
scalar_buffer.flush()
|
|
563
|
+
scalar_buffer.close()
|
|
564
|
+
scalar_buffer = io.StringIO()
|
|
565
|
+
reading_number = 0
|
|
566
|
+
|
|
567
|
+
if value_type == ValueType.Vector and reading_number == self.number_of_connections:
|
|
568
|
+
scalar_buffer.seek(0)
|
|
569
|
+
df = pd.read_csv(scalar_buffer, sep=',', header=0)
|
|
570
|
+
self.__write_vector_result(
|
|
571
|
+
current_time_step, df, csv_headers)
|
|
572
|
+
scalar_buffer.flush()
|
|
573
|
+
scalar_buffer.close()
|
|
574
|
+
scalar_buffer = io.StringIO()
|
|
575
|
+
reading_number = 0
|
|
576
|
+
|
|
577
|
+
current_time_step = VisTimeStep(
|
|
578
|
+
time=float(time),
|
|
579
|
+
time_step=tim_step_counter,
|
|
580
|
+
iteration=1
|
|
581
|
+
)
|
|
582
|
+
|
|
583
|
+
# Initialize buffer
|
|
584
|
+
header_string = ','.join(csv_headers)
|
|
585
|
+
scalar_buffer.write(header_string + '\n')
|
|
586
|
+
self.time_steps_list.append(current_time_step)
|
|
587
|
+
tim_step_counter = tim_step_counter + 1
|
|
588
|
+
|
|
589
|
+
else:
|
|
590
|
+
scalar_buffer.write(','.join(values[start_index:]) + '\n')
|
|
591
|
+
reading_number = reading_number + 1
|
|
592
|
+
|
|
593
|
+
else:
|
|
594
|
+
# write the last time step
|
|
595
|
+
if value_type == ValueType.Scalar:
|
|
596
|
+
scalar_buffer.seek(0)
|
|
597
|
+
df = pd.read_csv(scalar_buffer, sep=',', header=0)
|
|
598
|
+
self.__write_scalar_result(current_time_step, df, csv_headers)
|
|
599
|
+
if value_type == ValueType.Vector:
|
|
600
|
+
scalar_buffer.seek(0)
|
|
601
|
+
df = pd.read_csv(scalar_buffer, sep=',', header=0)
|
|
602
|
+
self.__write_vector_result(current_time_step, df, csv_headers)
|
|
603
|
+
scalar_buffer.close()
|
|
604
|
+
|
|
605
|
+
def __read_tough_TEC_outfile(self):
|
|
606
|
+
|
|
607
|
+
scalar_buffer = io.StringIO()
|
|
608
|
+
#vector_buffer = io.StringIO()
|
|
609
|
+
current_time_step = None
|
|
610
|
+
tim_step_counter = 1
|
|
611
|
+
reading_scalar = False
|
|
612
|
+
scalar_headers = []
|
|
613
|
+
self.time_steps_list = []
|
|
614
|
+
with open(self.current_out_file) as f:
|
|
615
|
+
for line in f:
|
|
616
|
+
if line.strip().lower().startswith('Variables'.lower()):
|
|
617
|
+
headers_value = line.strip().split('=')[1]
|
|
618
|
+
#scalar_headers = headers_value.replace('"', '')
|
|
619
|
+
|
|
620
|
+
scalar_headers = re.split(' |,', headers_value.replace('"', '').strip())
|
|
621
|
+
scalar_headers = [x for x in scalar_headers if x]
|
|
622
|
+
scalar_headers.pop(0)
|
|
623
|
+
scalar_headers.pop(0)
|
|
624
|
+
scalar_headers.pop(0)
|
|
625
|
+
|
|
626
|
+
continue
|
|
627
|
+
if line.strip().lower().startswith('Zone T'.lower()):
|
|
628
|
+
if reading_scalar:
|
|
629
|
+
scalar_buffer.seek(0)
|
|
630
|
+
df = pd.read_csv(scalar_buffer, sep=',', header=0)
|
|
631
|
+
self.__write_scalar_result(
|
|
632
|
+
current_time_step, df, scalar_headers)
|
|
633
|
+
|
|
634
|
+
scalar_buffer.flush()
|
|
635
|
+
scalar_buffer.close()
|
|
636
|
+
scalar_buffer = io.StringIO()
|
|
637
|
+
|
|
638
|
+
time_values = line.split('"')
|
|
639
|
+
time = time_values[1].split()[0]
|
|
640
|
+
current_time_step = VisTimeStep(
|
|
641
|
+
time=float(time),
|
|
642
|
+
time_step=tim_step_counter,
|
|
643
|
+
iteration=1
|
|
644
|
+
)
|
|
645
|
+
reading_scalar = True
|
|
646
|
+
header_string = ','.join(scalar_headers)
|
|
647
|
+
scalar_buffer.write(header_string + '\n')
|
|
648
|
+
self.time_steps_list.append(current_time_step)
|
|
649
|
+
tim_step_counter = tim_step_counter + 1
|
|
650
|
+
continue
|
|
651
|
+
|
|
652
|
+
if reading_scalar and len(line.split()) == len(scalar_headers)+3:
|
|
653
|
+
csv_line = ','.join(line.split()[3:]) + '\n'
|
|
654
|
+
scalar_buffer.write(csv_line)
|
|
655
|
+
|
|
656
|
+
# if process to the end of file
|
|
657
|
+
else:
|
|
658
|
+
if len(scalar_buffer.getvalue()) > 0:
|
|
659
|
+
#df = self.prepare_fixed_length_scalar_dataframe(scalar_headers, scalar_buffer)
|
|
660
|
+
scalar_buffer.seek(0)
|
|
661
|
+
df = pd.read_csv(scalar_buffer, sep=',', header=0)
|
|
662
|
+
self.__write_scalar_result(
|
|
663
|
+
current_time_step, df, scalar_headers)
|
|
664
|
+
scalar_buffer.flush()
|
|
665
|
+
scalar_buffer.close()
|
|
666
|
+
scalar_buffer = io.StringIO()
|
|
667
|
+
tim_step_counter = tim_step_counter + 1
|
|
668
|
+
|
|
669
|
+
def __post_process(self, vis_time_step):
|
|
670
|
+
time_index = self.time_steps_list.index(vis_time_step)
|
|
671
|
+
#vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}.vtr')
|
|
672
|
+
|
|
673
|
+
extension = os.path.splitext(self.main_geometry)[1]
|
|
674
|
+
vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}{extension}')
|
|
675
|
+
self.time_steps_list[time_index].vtu_file_name = vtr_path
|
|
676
|
+
scalar_vtr = self.__read_vtk_file(vtr_path)
|
|
677
|
+
vtr = scalar_vtr
|
|
678
|
+
|
|
679
|
+
post_variable_list = []
|
|
680
|
+
# p
|
|
681
|
+
p_name = "P"
|
|
682
|
+
if vtr.GetCellData().GetArray("P (Pa)") is not None:
|
|
683
|
+
p_name = "P (Pa)"
|
|
684
|
+
if vtr.GetCellData().GetArray("P(bar)") is not None:
|
|
685
|
+
p_name = "P(bar)"
|
|
686
|
+
if vtr.GetCellData().GetArray("PRES") is not None:
|
|
687
|
+
p_name = "PRES"
|
|
688
|
+
|
|
689
|
+
if vtr.GetCellData().GetArray(p_name) is not None:
|
|
690
|
+
delPArray = vtkDoubleArray()
|
|
691
|
+
delPArray.SetName(f'del_{p_name}')
|
|
692
|
+
for i in range(0, vtr.GetNumberOfCells()):
|
|
693
|
+
p_value = vtr.GetCellData().GetArray(p_name).GetValue(i)
|
|
694
|
+
incon_p = self.incon_vtk.GetCellData().GetArray('Pressure').GetValue(i)
|
|
695
|
+
delP = p_value - incon_p
|
|
696
|
+
delPArray.InsertNextValue(delP)
|
|
697
|
+
|
|
698
|
+
vtr.GetCellData().AddArray(delPArray)
|
|
699
|
+
post_variable_list.append(VisVariable(f'del_{p_name}', ValueType.Scalar, 1))
|
|
700
|
+
|
|
701
|
+
# add toughreact variables
|
|
702
|
+
if self.setting.tough_version == ToughVersion.TOUGHReact or self.setting.tough_version == ToughVersion.TOUGH3:
|
|
703
|
+
trapHCO2_array = vtkDoubleArray()
|
|
704
|
+
trapHCO2_array.SetName('trapHCO2')
|
|
705
|
+
|
|
706
|
+
trapRCO2_array = vtkDoubleArray()
|
|
707
|
+
trapRCO2_array.SetName('trapRCO2')
|
|
708
|
+
|
|
709
|
+
trapDCO2_array = vtkDoubleArray()
|
|
710
|
+
trapDCO2_array.SetName('trapDCO2')
|
|
711
|
+
|
|
712
|
+
trapMCO2_array = vtkDoubleArray()
|
|
713
|
+
trapMCO2_array.SetName('trapMCO2')
|
|
714
|
+
|
|
715
|
+
for index in range(0, vtr.GetNumberOfCells()):
|
|
716
|
+
trapHCO2 = 0
|
|
717
|
+
trapRCO2 = 0
|
|
718
|
+
trapDCO2 = 0
|
|
719
|
+
trapMCO2 = 0
|
|
720
|
+
VOLX = 0
|
|
721
|
+
if vtr.GetCellData().GetArray("VOLX") is not None:
|
|
722
|
+
VOLX = vtr.GetCellData().GetArray("VOLX").GetValue(index)
|
|
723
|
+
|
|
724
|
+
SatGas = 0
|
|
725
|
+
if vtr.GetCellData().GetArray("SatGas") is not None:
|
|
726
|
+
SatGas = vtr.GetCellData().GetArray("SatGas").GetValue(index)
|
|
727
|
+
elif vtr.GetCellData().GetArray("SAT_G") is not None:
|
|
728
|
+
SatGas = vtr.GetCellData().GetArray("SAT_G").GetValue(index)
|
|
729
|
+
|
|
730
|
+
Porosity = 0
|
|
731
|
+
if vtr.GetCellData().GetArray("Porosity") is not None:
|
|
732
|
+
Porosity = vtr.GetCellData().GetArray("Porosity").GetValue(index)
|
|
733
|
+
elif vtr.GetCellData().GetArray("POR") is not None:
|
|
734
|
+
Porosity = vtr.GetCellData().GetArray("POR").GetValue(index)
|
|
735
|
+
|
|
736
|
+
DGas_kg_m3 = 0
|
|
737
|
+
if vtr.GetCellData().GetArray("DGas_kg/m3") is not None:
|
|
738
|
+
DGas_kg_m3 = vtr.GetCellData().GetArray("DGas_kg/m3").GetValue(index)
|
|
739
|
+
elif vtr.GetCellData().GetArray("DEN_G") is not None:
|
|
740
|
+
DGas_kg_m3 = vtr.GetCellData().GetArray("DEN_G").GetValue(index)
|
|
741
|
+
|
|
742
|
+
SatLiq = 0
|
|
743
|
+
if vtr.GetCellData().GetArray("SatLiq") is not None:
|
|
744
|
+
SatLiq = vtr.GetCellData().GetArray("SatLiq").GetValue(index)
|
|
745
|
+
elif vtr.GetCellData().GetArray("SAT_L") is not None:
|
|
746
|
+
SatLiq = vtr.GetCellData().GetArray("SAT_L").GetValue(index)
|
|
747
|
+
|
|
748
|
+
XCO2Liq = 0
|
|
749
|
+
if vtr.GetCellData().GetArray("XCO2Liq") is not None:
|
|
750
|
+
XCO2Liq = vtr.GetCellData().GetArray("XCO2Liq").GetValue(index)
|
|
751
|
+
elif vtr.GetCellData().GetArray("X_CO2_L") is not None:
|
|
752
|
+
XCO2Liq = vtr.GetCellData().GetArray("X_CO2_L").GetValue(index)
|
|
753
|
+
|
|
754
|
+
sgr = 0
|
|
755
|
+
if vtr.GetCellData().GetArray("sgr") is not None:
|
|
756
|
+
sgr = vtr.GetCellData().GetArray("sgr").GetValue(index)
|
|
757
|
+
|
|
758
|
+
|
|
759
|
+
trapHCO2 =(SatGas-sgr)*VOLX*Porosity*DGas_kg_m3* (SatGas>sgr)
|
|
760
|
+
trapRCO2 = 0.05 * VOLX * Porosity * DGas_kg_m3* (SatGas > sgr)+ SatGas * VOLX * Porosity * DGas_kg_m3*(SatGas <= sgr)
|
|
761
|
+
trapDCO2 = SatLiq*VOLX*Porosity*DGas_kg_m3*XCO2Liq
|
|
762
|
+
trapHCO2_array.InsertNextValue(self.__fix_negative_zero(trapHCO2))
|
|
763
|
+
trapRCO2_array.InsertNextValue(self.__fix_negative_zero(trapRCO2))
|
|
764
|
+
trapDCO2_array.InsertNextValue(self.__fix_negative_zero(trapDCO2))
|
|
765
|
+
|
|
766
|
+
trapMCO2 = 0
|
|
767
|
+
if vtr.GetCellData().GetArray("calcite") is not None:
|
|
768
|
+
calcite = vtr.GetCellData().GetArray("calcite").GetValue(index)
|
|
769
|
+
ankerite_2 = vtr.GetCellData().GetArray("ankerite-2").GetValue(index)
|
|
770
|
+
dawsonite = vtr.GetCellData().GetArray("dawsonite").GetValue(index)
|
|
771
|
+
dolomite_2 = vtr.GetCellData().GetArray("dolomite-2").GetValue(index)
|
|
772
|
+
magnesite = vtr.GetCellData().GetArray("magnesite").GetValue(index)
|
|
773
|
+
siderite_2 = vtr.GetCellData().GetArray("siderite-2").GetValue(index)
|
|
774
|
+
trapMCO2 =(calcite*1 + ankerite_2*2 + dawsonite*1 + dolomite_2*2 + magnesite*1 + siderite_2*1)*VOLX*Porosity*0.012
|
|
775
|
+
trapMCO2_array.InsertNextValue(self.__fix_negative_zero(trapMCO2))
|
|
776
|
+
|
|
777
|
+
|
|
778
|
+
vtr.GetCellData().AddArray(trapHCO2_array)
|
|
779
|
+
vtr.GetCellData().AddArray(trapRCO2_array)
|
|
780
|
+
vtr.GetCellData().AddArray(trapDCO2_array)
|
|
781
|
+
vtr.GetCellData().AddArray(trapMCO2_array)
|
|
782
|
+
|
|
783
|
+
post_variable_list.append(VisVariable('trapHCO2', ValueType.Scalar, 1))
|
|
784
|
+
post_variable_list.append(VisVariable('trapRCO2', ValueType.Scalar, 1))
|
|
785
|
+
post_variable_list.append(VisVariable('trapDCO2', ValueType.Scalar, 1))
|
|
786
|
+
post_variable_list.append(VisVariable('trapMCO2', ValueType.Scalar, 1))
|
|
787
|
+
|
|
788
|
+
|
|
789
|
+
# Put cell-centered data into points
|
|
790
|
+
filter = vtkCellDataToPointData()
|
|
791
|
+
filter.SetInputData(vtr)
|
|
792
|
+
filter.Update()
|
|
793
|
+
vtr_cell_to_points = filter.GetOutput()
|
|
794
|
+
|
|
795
|
+
for variabl_name in ['trapHCO2', 'trapRCO2', 'trapDCO2', 'trapMCO2']:
|
|
796
|
+
vtr.GetPointData().AddArray(vtr_cell_to_points.GetPointData().GetArray(variabl_name))
|
|
797
|
+
|
|
798
|
+
if len(post_variable_list) > 0:
|
|
799
|
+
self.variable_list["psot"] = post_variable_list
|
|
800
|
+
self.__write_vtk_file(vtr, vtr_path)
|
|
801
|
+
|
|
802
|
+
|
|
803
|
+
def __write_scalar_result(self, vis_time_step, dataframe, headers):
|
|
804
|
+
#print(f' Writting step {vis_time_step.time_step}: {vis_time_step.time} ...')
|
|
805
|
+
index = self.time_steps_list.index(vis_time_step)
|
|
806
|
+
#vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}.vtr')
|
|
807
|
+
|
|
808
|
+
extension = os.path.splitext(self.main_geometry)[1]
|
|
809
|
+
vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}{extension}')
|
|
810
|
+
self.time_steps_list[index].vtu_file_name = vtr_path
|
|
811
|
+
#scalar_vtr = vtkRectilinearGrid()
|
|
812
|
+
|
|
813
|
+
if not os.path.exists(vtr_path):
|
|
814
|
+
#vtr_reader = vtkXMLRectilinearGridReader()
|
|
815
|
+
#vtr_reader.SetFileName(self.main_geometry)
|
|
816
|
+
#vtr_reader.Update()
|
|
817
|
+
#scalar_vtr = vtr_reader.GetOutput()
|
|
818
|
+
scalar_vtr = self.__read_vtk_file(self.main_geometry)
|
|
819
|
+
|
|
820
|
+
# add time step data
|
|
821
|
+
timesteps = vtkDoubleArray()
|
|
822
|
+
timesteps.SetName("TimeValue")
|
|
823
|
+
timesteps.SetNumberOfTuples(1)
|
|
824
|
+
timesteps.SetNumberOfComponents(1)
|
|
825
|
+
timesteps.SetTuple1(0, vis_time_step.time)
|
|
826
|
+
scalar_vtr.SetFieldData(vtkFieldData())
|
|
827
|
+
scalar_vtr.GetFieldData().AddArray(timesteps)
|
|
828
|
+
|
|
829
|
+
else:
|
|
830
|
+
scalar_vtr = self.__read_vtk_file(vtr_path)
|
|
831
|
+
|
|
832
|
+
|
|
833
|
+
vtr = scalar_vtr
|
|
834
|
+
|
|
835
|
+
variable_list = []
|
|
836
|
+
for header in headers:
|
|
837
|
+
array = vtkDoubleArray()
|
|
838
|
+
array.SetName(header)
|
|
839
|
+
vtr.GetCellData().AddArray(array)
|
|
840
|
+
variable_list.append(VisVariable(header, ValueType.Scalar, 1))
|
|
841
|
+
|
|
842
|
+
|
|
843
|
+
for i in range(0, vtr.GetNumberOfCells()):
|
|
844
|
+
index = self.sequence_dist[i]
|
|
845
|
+
for header in headers:
|
|
846
|
+
value = float(self.__parse_float(dataframe[header][index]))
|
|
847
|
+
vtr.GetCellData().GetArray(header).InsertNextValue(value)
|
|
848
|
+
|
|
849
|
+
|
|
850
|
+
# update the variable list
|
|
851
|
+
if self.current_out_file not in self.variable_list:
|
|
852
|
+
self.variable_list[self.current_out_file] = variable_list
|
|
853
|
+
|
|
854
|
+
|
|
855
|
+
# Put cell-centered data into points
|
|
856
|
+
filter = vtkCellDataToPointData()
|
|
857
|
+
filter.SetInputData(vtr)
|
|
858
|
+
filter.Update()
|
|
859
|
+
vtr_cell_to_points = filter.GetOutput()
|
|
860
|
+
|
|
861
|
+
for i in range(0, vtr_cell_to_points.GetPointData().GetNumberOfArrays()):
|
|
862
|
+
vtr.GetPointData().AddArray(vtr_cell_to_points.GetPointData().GetArray(i))
|
|
863
|
+
|
|
864
|
+
self.__write_vtk_file(vtr, vtr_path)
|
|
865
|
+
print(f' ✓ Timestep {vis_time_step.time_step}:{vis_time_step.time} created: {vtr_path}')
|
|
866
|
+
|
|
867
|
+
if VisType.Tecplot not in self.setting.vis_types:
|
|
868
|
+
return
|
|
869
|
+
|
|
870
|
+
# Start Tecplot generating
|
|
871
|
+
tec_name = pathlib.Path(self.setting.input_file_paths[0]).stem
|
|
872
|
+
self.tec_scalar_path = os.path.join(self.setting.vis_dir, f'{tec_name}_scalar.dat')
|
|
873
|
+
firstFile = True
|
|
874
|
+
if os.path.isfile(self.tec_scalar_path):
|
|
875
|
+
firstFile = False
|
|
876
|
+
file = open(self.tec_scalar_path, "a")
|
|
877
|
+
if firstFile:
|
|
878
|
+
file.write('TITLE = TECPLOT PLOT \n')
|
|
879
|
+
header_string = '"'+'", "'.join(headers) + '"'
|
|
880
|
+
file.write(f'VARIABLES = "X", "Y", "Z", {header_string}\n')
|
|
881
|
+
|
|
882
|
+
tecplot_cell_type = 'BRICK'
|
|
883
|
+
|
|
884
|
+
#time_statement = f'ZONE T ="{vis_time_step.time_step}, Time = {vis_time_step.time}", N = {vtu_cell_to_points.GetNumberOfPoints()}, E = {vtu_cell_to_points.GetNumberOfCells()}, F = FEPOINT, ET = {tecplot_cell_type}, SOLUTIONTIME = {vis_time_step.time}\n'
|
|
885
|
+
|
|
886
|
+
time_statement = f'ZONE T="{vis_time_step.time_step}, Time = {vis_time_step.time}", I={self.xyz_elem[0] + 1}, J={self.xyz_elem[1] + 1}, K={self.xyz_elem[2] + 1}, SOLUTIONTIME={vis_time_step.time}, DATAPACKING=BLOCK, VARLOCATION=({self.__get_varlocarion_string(headers)})'
|
|
887
|
+
if not firstFile:
|
|
888
|
+
time_statement = f'{time_statement}, D=(1,2,3,FECONNECT)'
|
|
889
|
+
#if self.setting.debug:
|
|
890
|
+
#time_statement = f'ZONE T ="{vis_time_step.time_step}, Time = {vis_time_step.time}", N = {vtu_cell_to_points.GetNumberOfPoints()}, E = {vtu_cell_to_points.GetNumberOfCells()}, F = FEPOINT, ET = {tecplot_cell_type}\n'
|
|
891
|
+
file.write(f'{time_statement}\n')
|
|
892
|
+
|
|
893
|
+
# X, Y, Z
|
|
894
|
+
if firstFile:
|
|
895
|
+
for i in range(0, vtr.GetNumberOfPoints()):
|
|
896
|
+
point = vtr.GetPoint(i)
|
|
897
|
+
file.write(str(point[0]) + " ")
|
|
898
|
+
file.write(" \n")
|
|
899
|
+
|
|
900
|
+
for i in range(0, vtr.GetNumberOfPoints()):
|
|
901
|
+
point = vtr.GetPoint(i)
|
|
902
|
+
file.write(str(point[1]) + " ")
|
|
903
|
+
file.write(" \n")
|
|
904
|
+
|
|
905
|
+
for i in range(0, vtr.GetNumberOfPoints()):
|
|
906
|
+
point = vtr.GetPoint(i)
|
|
907
|
+
file.write(str(point[2]) + " ")
|
|
908
|
+
file.write(" \n")
|
|
909
|
+
|
|
910
|
+
# Other data
|
|
911
|
+
for header in headers:
|
|
912
|
+
array = vtr.GetCellData().GetArray(header)
|
|
913
|
+
for e in range(0, vtr.GetNumberOfCells()):
|
|
914
|
+
file.write(str(array.GetValue(e)) + " ")
|
|
915
|
+
file.write(" \n")
|
|
916
|
+
|
|
917
|
+
file.close()
|
|
918
|
+
|
|
919
|
+
def __get_tec_vector_headers(self, headers):
|
|
920
|
+
vector_headers = []
|
|
921
|
+
for header in headers:
|
|
922
|
+
vector_headers.append(f'{header}_X')
|
|
923
|
+
vector_headers.append(f'{header}_Y')
|
|
924
|
+
vector_headers.append(f'{header}_Z')
|
|
925
|
+
return vector_headers
|
|
926
|
+
|
|
927
|
+
def __get_varlocarion_string(self, headers):
|
|
928
|
+
var_string = ''
|
|
929
|
+
for i in range(0, len(headers)):
|
|
930
|
+
if i == len(headers)-1:
|
|
931
|
+
var_string = f'{var_string}{str(i+4)}=CELLCENTERED'
|
|
932
|
+
else:
|
|
933
|
+
var_string = f'{var_string}{str(i+4)}=CELLCENTERED,'
|
|
934
|
+
return var_string
|
|
935
|
+
|
|
936
|
+
# write the vector result for one timestep
|
|
937
|
+
def __write_vector_result(self, vis_time_step, dataframe, headers):
|
|
938
|
+
|
|
939
|
+
index = self.time_steps_list.index(vis_time_step)
|
|
940
|
+
extension = os.path.splitext(self.main_geometry)[1]
|
|
941
|
+
vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}{extension}')
|
|
942
|
+
self.time_steps_list[index].vtu_file_name = vtr_path
|
|
943
|
+
|
|
944
|
+
if not os.path.exists(vtr_path):
|
|
945
|
+
vector_vtr = self.__read_vtk_file(self.main_geometry)
|
|
946
|
+
# add time step data
|
|
947
|
+
timesteps = vtkDoubleArray()
|
|
948
|
+
timesteps.SetName("TimeValue")
|
|
949
|
+
timesteps.SetNumberOfTuples(1)
|
|
950
|
+
timesteps.SetNumberOfComponents(1)
|
|
951
|
+
timesteps.SetTuple1(0, vis_time_step.time)
|
|
952
|
+
vector_vtr.SetFieldData(vtkFieldData())
|
|
953
|
+
vector_vtr.GetFieldData().AddArray(timesteps)
|
|
954
|
+
|
|
955
|
+
else:
|
|
956
|
+
vtr_reader = vtkXMLRectilinearGridReader()
|
|
957
|
+
vtr_reader.SetFileName(vtr_path)
|
|
958
|
+
vtr_reader.Update()
|
|
959
|
+
vector_vtr = vtr_reader.GetOutput()
|
|
960
|
+
|
|
961
|
+
vtu_reader = vtkXMLUnstructuredGridReader()
|
|
962
|
+
vtu_reader.SetFileName(self.elem_conne_path)
|
|
963
|
+
vtu_reader.Update()
|
|
964
|
+
conne_vtu = vtu_reader.GetOutput()
|
|
965
|
+
|
|
966
|
+
|
|
967
|
+
variable_list = []
|
|
968
|
+
for header in headers:
|
|
969
|
+
#if not header == 'ELEM1' and not header == 'ELEM2' and not header == 'INDEX':
|
|
970
|
+
array = vtkDoubleArray()
|
|
971
|
+
array.SetName(header)
|
|
972
|
+
array.SetNumberOfComponents(3)
|
|
973
|
+
array.SetNumberOfTuples(vector_vtr.GetNumberOfCells())
|
|
974
|
+
array.FillComponent(0, 0)
|
|
975
|
+
array.FillComponent(1, 0)
|
|
976
|
+
array.FillComponent(2, 0)
|
|
977
|
+
vector_vtr.GetCellData().AddArray(array)
|
|
978
|
+
variable_list.append(VisVariable(header, ValueType.Vector, 3))
|
|
979
|
+
|
|
980
|
+
if self.current_out_file not in self.variable_list:
|
|
981
|
+
self.variable_list[self.current_out_file] = variable_list
|
|
982
|
+
|
|
983
|
+
|
|
984
|
+
for elem_id in range(0, conne_vtu.GetNumberOfPoints()):
|
|
985
|
+
point = conne_vtu.GetPoint(elem_id)
|
|
986
|
+
cellIDs = vtkIdList()
|
|
987
|
+
conne_vtu.GetPointCells(elem_id, cellIDs)
|
|
988
|
+
next_x = -1
|
|
989
|
+
next_y = -1
|
|
990
|
+
next_z = -1
|
|
991
|
+
for i in range(0, cellIDs.GetNumberOfIds()):
|
|
992
|
+
cellID = cellIDs.GetId(i)
|
|
993
|
+
cell = conne_vtu.GetCell(cellID)
|
|
994
|
+
next_id = cell.GetPointId(1)
|
|
995
|
+
|
|
996
|
+
if next_id - elem_id == 1:
|
|
997
|
+
next_x = cellID
|
|
998
|
+
|
|
999
|
+
elif next_id - elem_id == self.xyz_elem[0]:
|
|
1000
|
+
if self.xyz_elem[1] == 1:
|
|
1001
|
+
next_z = cellID
|
|
1002
|
+
next_y = -1
|
|
1003
|
+
break
|
|
1004
|
+
|
|
1005
|
+
|
|
1006
|
+
else:
|
|
1007
|
+
next_y = cellID
|
|
1008
|
+
|
|
1009
|
+
elif next_id - elem_id == self.xyz_elem[0] * self.xyz_elem[1]:
|
|
1010
|
+
next_z = cellID
|
|
1011
|
+
|
|
1012
|
+
for header in headers:
|
|
1013
|
+
#if not header == 'ELEM1' and not header == 'ELEM2' and not header == 'INDEX':
|
|
1014
|
+
x_value = 0 if next_x == -1 else self.__parse_float(dataframe[header][next_x])
|
|
1015
|
+
y_value = 0 if next_y == -1 else self.__parse_float(dataframe[header][next_y])
|
|
1016
|
+
z_value = 0 if next_z == -1 else self.__parse_float(dataframe[header][next_z])
|
|
1017
|
+
vector_vtr.GetCellData().GetArray(header).SetComponent(elem_id, 0, x_value)
|
|
1018
|
+
vector_vtr.GetCellData().GetArray(header).SetComponent(elem_id, 1, y_value)
|
|
1019
|
+
vector_vtr.GetCellData().GetArray(header).SetComponent(elem_id, 2, z_value)
|
|
1020
|
+
|
|
1021
|
+
self.__write_vtk_file(
|
|
1022
|
+
vector_vtr, self.time_steps_list[index].vtu_file_name)
|
|
1023
|
+
print(f' ✓ Timestep {vis_time_step.time_step}:{vis_time_step.time} created: {vtr_path}')
|
|
1024
|
+
|
|
1025
|
+
if VisType.Tecplot not in self.setting.vis_types:
|
|
1026
|
+
return
|
|
1027
|
+
|
|
1028
|
+
# Start Tecplot generating
|
|
1029
|
+
tec_name = pathlib.Path(self.setting.input_file_paths[0]).stem
|
|
1030
|
+
self.tec_vector_path = os.path.join(self.setting.vis_dir, f'{tec_name}_vector.dat')
|
|
1031
|
+
firstFile = True
|
|
1032
|
+
if os.path.isfile(self.tec_vector_path):
|
|
1033
|
+
firstFile = False
|
|
1034
|
+
file = open(self.tec_vector_path, "a")
|
|
1035
|
+
|
|
1036
|
+
vector_headers = self.__get_tec_vector_headers(headers)
|
|
1037
|
+
# add header
|
|
1038
|
+
if firstFile:
|
|
1039
|
+
file.write('TITLE = TECPLOT PLOT \n')
|
|
1040
|
+
header_string = '"'+'", "'.join(vector_headers) + '"'
|
|
1041
|
+
file.write(f'VARIABLES = "X", "Y", "Z", {header_string}\n')
|
|
1042
|
+
|
|
1043
|
+
time_statement = f'ZONE T="{vis_time_step.time_step}, Time = {vis_time_step.time}", I={self.xyz_elem[0] + 1}, J={self.xyz_elem[1] + 1}, K={self.xyz_elem[2] + 1}, SOLUTIONTIME={vis_time_step.time}, DATAPACKING=BLOCK, VARLOCATION=({self.__get_varlocarion_string(vector_headers)})'
|
|
1044
|
+
if not firstFile:
|
|
1045
|
+
time_statement = f'{time_statement}, D=(1,2,3,FECONNECT)'
|
|
1046
|
+
#if self.setting.debug:
|
|
1047
|
+
#time_statement = f'ZONE T ="{vis_time_step.time_step}, Time = {vis_time_step.time}", N = {vtu_cell_to_points.GetNumberOfPoints()}, E = {vtu_cell_to_points.GetNumberOfCells()}, F = FEPOINT, ET = {tecplot_cell_type}\n'
|
|
1048
|
+
file.write(f'{time_statement}\n')
|
|
1049
|
+
|
|
1050
|
+
# X, Y, Z
|
|
1051
|
+
if firstFile:
|
|
1052
|
+
for i in range(0, vector_vtr.GetNumberOfPoints()):
|
|
1053
|
+
point = vector_vtr.GetPoint(i)
|
|
1054
|
+
file.write(str(point[0]) + " ")
|
|
1055
|
+
file.write(" \n")
|
|
1056
|
+
|
|
1057
|
+
for i in range(0, vector_vtr.GetNumberOfPoints()):
|
|
1058
|
+
point = vector_vtr.GetPoint(i)
|
|
1059
|
+
file.write(str(point[1]) + " ")
|
|
1060
|
+
file.write(" \n")
|
|
1061
|
+
|
|
1062
|
+
for i in range(0, vector_vtr.GetNumberOfPoints()):
|
|
1063
|
+
point = vector_vtr.GetPoint(i)
|
|
1064
|
+
file.write(str(point[2]) + " ")
|
|
1065
|
+
file.write(" \n")
|
|
1066
|
+
|
|
1067
|
+
# Other data
|
|
1068
|
+
for header in headers:
|
|
1069
|
+
array = vector_vtr.GetCellData().GetArray(header)
|
|
1070
|
+
|
|
1071
|
+
for e in range(0, vector_vtr.GetNumberOfCells()):
|
|
1072
|
+
file.write(f'{str(array.GetComponent(e, 0))} ')
|
|
1073
|
+
file.write(" \n")
|
|
1074
|
+
|
|
1075
|
+
for e in range(0, vector_vtr.GetNumberOfCells()):
|
|
1076
|
+
file.write(f'{str(array.GetComponent(e, 1))} ')
|
|
1077
|
+
file.write(" \n")
|
|
1078
|
+
|
|
1079
|
+
for e in range(0, vector_vtr.GetNumberOfCells()):
|
|
1080
|
+
file.write(f'{str(array.GetComponent(e, 2))} ')
|
|
1081
|
+
file.write(" \n")
|
|
1082
|
+
|
|
1083
|
+
file.close()
|
|
1084
|
+
|
|
1085
|
+
|
|
1086
|
+
def __create_elem_conne(self):
|
|
1087
|
+
|
|
1088
|
+
'''
|
|
1089
|
+
read elem and conn files into dataframe
|
|
1090
|
+
'''
|
|
1091
|
+
|
|
1092
|
+
elem_colspecs = [(0, 5), (5, 10), (10, 15), (15, 20), (20, 30), (30, 40),
|
|
1093
|
+
(40, 50), (50, 60), (60, 70), (70, 80)] # define column widths
|
|
1094
|
+
self.eleme_buffer.seek(0)
|
|
1095
|
+
elem_df = pd.read_fwf(self.eleme_buffer, colspecs=elem_colspecs, header=None,
|
|
1096
|
+
names=['ELEME', 'NSEQ', 'NADD', 'MA12',
|
|
1097
|
+
'VOLX', 'AHTX', 'PMX', 'X', 'Y', 'Z'],
|
|
1098
|
+
dtype={'ELEME': str, 'NSEQ': float, 'NADD': float, 'MA12': str, 'VOLX': float, 'AHTX': float, 'PMX': float, 'X': float, 'Y': float, 'Z': float})
|
|
1099
|
+
|
|
1100
|
+
conn_colspecs = [(0, 5), (5, 10), (10, 15), (15, 20), (20, 25), (25, 30),
|
|
1101
|
+
(30, 40), (40, 50), (50, 60), (60, 70), (70, 80)] # define column widths
|
|
1102
|
+
|
|
1103
|
+
self.conne_buffer.seek(0)
|
|
1104
|
+
conn_df = pd.read_fwf(self.conne_buffer, colspecs=conn_colspecs, header=None,
|
|
1105
|
+
names=['ELEM_1', 'ELEM_2', 'NSEQ', 'NAD1', 'NAD2',
|
|
1106
|
+
'ISOT', 'D1', 'D2', 'AREAX', 'BETAX', 'SIGX'],
|
|
1107
|
+
dtype={'ELEM_1': str, 'ELEM_2': str, 'NSEQ': float, 'NAD1': float, 'NAD2': float, 'ISOT': float, 'D1': float, 'D2': float, 'AREAX': float, 'BETAX': float, 'SIGX': float})
|
|
1108
|
+
|
|
1109
|
+
|
|
1110
|
+
elem_df['original_index'] = range(0, len(elem_df))
|
|
1111
|
+
if self.__check_isReverse(elem_df):
|
|
1112
|
+
#Sort all dataframes
|
|
1113
|
+
elem_df = elem_df.sort_values(['Z', 'Y', 'X'], ascending = [True, True, True])
|
|
1114
|
+
elem_df = elem_df.reset_index(drop=True)
|
|
1115
|
+
elem_df.reset_index()
|
|
1116
|
+
|
|
1117
|
+
|
|
1118
|
+
# create material map
|
|
1119
|
+
unique_mats = elem_df['MA12'].unique()
|
|
1120
|
+
# Create a dictionary mapping index -> MAT value
|
|
1121
|
+
mat_mapping = {mat: i for i, mat in enumerate(unique_mats)}
|
|
1122
|
+
|
|
1123
|
+
'''
|
|
1124
|
+
create vtk points from elem
|
|
1125
|
+
'''
|
|
1126
|
+
vtk_points = vtkPoints()
|
|
1127
|
+
elemIDArray = vtkStringArray()
|
|
1128
|
+
elemIDArray.SetName('ELEME')
|
|
1129
|
+
matArray = vtkStringArray()
|
|
1130
|
+
matArray.SetName('Material')
|
|
1131
|
+
matIDArray = vtkIntArray()
|
|
1132
|
+
matIDArray.SetName('Material_ID')
|
|
1133
|
+
pmxArray = vtkDoubleArray()
|
|
1134
|
+
pmxArray.SetName('PMX')
|
|
1135
|
+
volxArray = vtkDoubleArray()
|
|
1136
|
+
volxArray.SetName('VOLX')
|
|
1137
|
+
elem_id_dist = {}
|
|
1138
|
+
self.sequence_dist = {}
|
|
1139
|
+
self.number_of_elements = len(elem_df.values)
|
|
1140
|
+
length = len(elem_df.values)
|
|
1141
|
+
for i in range(0, len(elem_df.values)):
|
|
1142
|
+
# y = elem_df['Y'][i]
|
|
1143
|
+
#[x, y, z] = [elem_df['X'][i], elem_df['Y'][i], elem_df['Z'][i]]
|
|
1144
|
+
vtk_points.InsertNextPoint(self.__parse_float(elem_df['X'][i]), self.__parse_float(
|
|
1145
|
+
elem_df['Y'][i]), self.__parse_float(elem_df['Z'][i]))
|
|
1146
|
+
elem_id = elem_df['ELEME'][i].strip()
|
|
1147
|
+
elemIDArray.InsertNextValue(elem_id)
|
|
1148
|
+
elem_id_dist[elem_id] = i
|
|
1149
|
+
original_index = int(elem_df['original_index'][i])
|
|
1150
|
+
self.sequence_dist[i] = original_index
|
|
1151
|
+
volxArray.InsertNextValue(self.__parse_float(elem_df['VOLX'][i]))
|
|
1152
|
+
matArray.InsertNextValue(elem_df['MA12'][i])
|
|
1153
|
+
matIDArray.InsertNextValue(mat_mapping[elem_df['MA12'][i]])
|
|
1154
|
+
pmxArray.InsertNextValue(self.__parse_float(elem_df['PMX'][i]))
|
|
1155
|
+
|
|
1156
|
+
'''
|
|
1157
|
+
compute permeability
|
|
1158
|
+
'''
|
|
1159
|
+
rock_colspecs = [(0, 5), (30, 40), (40, 50), (50, 60), (100, 110)] # define column widths
|
|
1160
|
+
rock_names = ['MAT', 'PER_1', 'PER_2', 'PER_3', 'SGR']
|
|
1161
|
+
self.rocks_buffer.seek(0)
|
|
1162
|
+
rocks_df = pd.read_fwf(self.rocks_buffer, colspecs=rock_colspecs, header=None,
|
|
1163
|
+
names=rock_names,
|
|
1164
|
+
dtype={'MAT':str, 'PER_1':str, 'PER_2':str, 'PER_3':str, 'SGR':str})
|
|
1165
|
+
|
|
1166
|
+
#sgr_dict = {}
|
|
1167
|
+
self.rock_dict = None
|
|
1168
|
+
if len(rocks_df) > 0:
|
|
1169
|
+
#rock_dict = {}
|
|
1170
|
+
rock_dict = []
|
|
1171
|
+
for i in range(0, len(rocks_df)):
|
|
1172
|
+
#rock_dict[rocks_df['MAT'][i]] = [self.parse_float(rocks_df['PER_1'][i]), self.parse_float(rocks_df['PER_2'][i]), self.parse_float(rocks_df['PER_3'][i])]
|
|
1173
|
+
rock_dict.append({
|
|
1174
|
+
'id': i,
|
|
1175
|
+
'rock_name': rocks_df['MAT'][i],
|
|
1176
|
+
'per_1':self.__parse_float(rocks_df['PER_1'][i]),
|
|
1177
|
+
'per_2':self.__parse_float(rocks_df['PER_2'][i]),
|
|
1178
|
+
'per_3':self.__parse_float(rocks_df['PER_3'][i]),
|
|
1179
|
+
'sgr':self.__parse_float(rocks_df['SGR'][i])
|
|
1180
|
+
})
|
|
1181
|
+
#sgr_dict[rocks_df['MAT'][i]] = self.__parse_float(rocks_df['SGR'][i])
|
|
1182
|
+
# compute per
|
|
1183
|
+
per_array = vtkDoubleArray()
|
|
1184
|
+
per_array.SetName('Permeability')
|
|
1185
|
+
|
|
1186
|
+
sgr_array = vtkDoubleArray()
|
|
1187
|
+
sgr_array.SetName('sgr')
|
|
1188
|
+
for i in range(0, len(elem_df.values)):
|
|
1189
|
+
value = 0
|
|
1190
|
+
|
|
1191
|
+
mat = matArray.GetValue(i)
|
|
1192
|
+
if self.__isInt(mat):
|
|
1193
|
+
mat_id = int(mat)
|
|
1194
|
+
value = pmxArray.GetValue(i) * rock_dict[mat_id-1]['per_1']
|
|
1195
|
+
sgr = rock_dict[mat_id-1]['sgr']
|
|
1196
|
+
else:
|
|
1197
|
+
rock_item = [rock for rock in rock_dict if rock['rock_name'] == mat]
|
|
1198
|
+
#if mat in rock_dict:
|
|
1199
|
+
value = pmxArray.GetValue(i) * rock_item[0]['per_1']
|
|
1200
|
+
sgr = rock_item[0]['sgr']
|
|
1201
|
+
|
|
1202
|
+
per_array.InsertNextValue(value)
|
|
1203
|
+
sgr_array.InsertNextValue(sgr)
|
|
1204
|
+
self.rock_dict = rock_dict
|
|
1205
|
+
|
|
1206
|
+
|
|
1207
|
+
|
|
1208
|
+
'''
|
|
1209
|
+
create connection cell array from conne
|
|
1210
|
+
'''
|
|
1211
|
+
d1_array = vtkDoubleArray()
|
|
1212
|
+
d1_array.SetName('D1')
|
|
1213
|
+
d2_array = vtkDoubleArray()
|
|
1214
|
+
d2_array.SetName('D2')
|
|
1215
|
+
area_array = vtkDoubleArray()
|
|
1216
|
+
area_array.SetName('AREAX')
|
|
1217
|
+
|
|
1218
|
+
line_cell_array = vtkCellArray()
|
|
1219
|
+
for i in range(0, len(conn_df.values)):
|
|
1220
|
+
elem_1_id = conn_df['ELEM_1'][i].strip()
|
|
1221
|
+
point_1_id = elem_id_dist[elem_1_id]
|
|
1222
|
+
elem_2_id = conn_df['ELEM_2'][i].strip()
|
|
1223
|
+
point_2_id = elem_id_dist[elem_2_id]
|
|
1224
|
+
# elem_conne_dist[point_id]
|
|
1225
|
+
cell = vtkLine()
|
|
1226
|
+
cell.GetPointIds().SetNumberOfIds(2)
|
|
1227
|
+
cell.GetPointIds().SetId(0, point_1_id)
|
|
1228
|
+
cell.GetPointIds().SetId(1, point_2_id)
|
|
1229
|
+
line_cell_array.InsertNextCell(cell)
|
|
1230
|
+
d1_array.InsertNextValue(conn_df['D1'][i])
|
|
1231
|
+
d2_array.InsertNextValue(conn_df['D2'][i])
|
|
1232
|
+
area_array.InsertNextValue(conn_df['AREAX'][i])
|
|
1233
|
+
|
|
1234
|
+
'''
|
|
1235
|
+
create vtu to display elem and conne
|
|
1236
|
+
'''
|
|
1237
|
+
self.elem_conne_path = os.path.join(
|
|
1238
|
+
self.setting.vis_dir, "elem_conne.vtu")
|
|
1239
|
+
elem_conne_vtu = vtkUnstructuredGrid()
|
|
1240
|
+
|
|
1241
|
+
elem_conne_vtu.SetPoints(vtk_points)
|
|
1242
|
+
elem_conne_vtu.GetPointData().AddArray(elemIDArray)
|
|
1243
|
+
elem_conne_vtu.GetPointData().AddArray(volxArray)
|
|
1244
|
+
elem_conne_vtu.GetPointData().AddArray(matArray)
|
|
1245
|
+
elem_conne_vtu.GetPointData().AddArray(matIDArray)
|
|
1246
|
+
|
|
1247
|
+
elem_conne_vtu.SetCells(4, line_cell_array)
|
|
1248
|
+
elem_conne_vtu.GetCellData().AddArray(d1_array)
|
|
1249
|
+
elem_conne_vtu.GetCellData().AddArray(d2_array)
|
|
1250
|
+
elem_conne_vtu.GetCellData().AddArray(area_array)
|
|
1251
|
+
vtu_writer = vtkXMLUnstructuredGridWriter()
|
|
1252
|
+
vtu_writer.SetFileName(self.elem_conne_path)
|
|
1253
|
+
vtu_writer.SetInputData(elem_conne_vtu)
|
|
1254
|
+
vtu_writer.Write()
|
|
1255
|
+
|
|
1256
|
+
if os.path.exists(self.elem_conne_path):
|
|
1257
|
+
print(f' ✓ Elements and connections created: {self.elem_conne_path}')
|
|
1258
|
+
|
|
1259
|
+
self.number_of_connections = elem_conne_vtu.GetNumberOfCells()
|
|
1260
|
+
|
|
1261
|
+
def __create_main_geometry(self):
|
|
1262
|
+
|
|
1263
|
+
|
|
1264
|
+
'''
|
|
1265
|
+
find number of elements in x, y, z directions
|
|
1266
|
+
TODO: check if vtu bound is inside user input bounds
|
|
1267
|
+
'''
|
|
1268
|
+
|
|
1269
|
+
|
|
1270
|
+
# prepare the vtu file
|
|
1271
|
+
vtu_reader = vtkXMLUnstructuredGridReader()
|
|
1272
|
+
vtu_reader.SetFileName(self.elem_conne_path)
|
|
1273
|
+
vtu_reader.Update()
|
|
1274
|
+
elem_conne_vtu = vtu_reader.GetOutput()
|
|
1275
|
+
|
|
1276
|
+
d1_array = elem_conne_vtu.GetCellData().GetArray('D1')
|
|
1277
|
+
d2_array = elem_conne_vtu.GetCellData().GetArray('D2')
|
|
1278
|
+
elemIDArray = elem_conne_vtu.GetPointData().GetArray('ELEME')
|
|
1279
|
+
volxArray = elem_conne_vtu.GetPointData().GetArray('VOLX')
|
|
1280
|
+
matArray = elem_conne_vtu.GetPointData().GetArray('Material')
|
|
1281
|
+
matIDArray = elem_conne_vtu.GetPointData().GetArray('Material_ID')
|
|
1282
|
+
pmxArray = elem_conne_vtu.GetPointData().GetArray('PMX')
|
|
1283
|
+
if self.rock_dict is not None:
|
|
1284
|
+
per_array = elem_conne_vtu.GetCellData().GetArray('Permeability')
|
|
1285
|
+
sgr_array = elem_conne_vtu.GetCellData().GetArray('sgr')
|
|
1286
|
+
|
|
1287
|
+
|
|
1288
|
+
|
|
1289
|
+
# get connection bound
|
|
1290
|
+
vtu_bounds = elem_conne_vtu.GetBounds()
|
|
1291
|
+
|
|
1292
|
+
# create array to keep x, y, z elements
|
|
1293
|
+
xyz_elem = []
|
|
1294
|
+
xyz_elem.append(0) # add x eleme
|
|
1295
|
+
xyz_elem.append(0) # add y eleme
|
|
1296
|
+
xyz_elem.append(0) # add z eleme
|
|
1297
|
+
|
|
1298
|
+
if vtu_bounds[0] == vtu_bounds[1]:
|
|
1299
|
+
xyz_elem[0] = 1
|
|
1300
|
+
if vtu_bounds[2] == vtu_bounds[3]:
|
|
1301
|
+
xyz_elem[1] = 1
|
|
1302
|
+
if vtu_bounds[4] == vtu_bounds[5]:
|
|
1303
|
+
xyz_elem[2] = 1
|
|
1304
|
+
|
|
1305
|
+
# find x, y , z number of structured points
|
|
1306
|
+
for i in range(0, elem_conne_vtu.GetNumberOfPoints()):
|
|
1307
|
+
point = elem_conne_vtu.GetPoint(i)
|
|
1308
|
+
if xyz_elem[0] == 0 and point[0] == vtu_bounds[1]:
|
|
1309
|
+
xyz_elem[0] = i+1
|
|
1310
|
+
if xyz_elem[1] == 0 and point[0] == vtu_bounds[1] and point[1] == vtu_bounds[3]:
|
|
1311
|
+
if xyz_elem[0] != 0:
|
|
1312
|
+
xyz_elem[1] = int((i+1)/xyz_elem[0])
|
|
1313
|
+
break
|
|
1314
|
+
|
|
1315
|
+
if xyz_elem[1] != 0 and xyz_elem[0] != 0:
|
|
1316
|
+
xyz_elem[2] = int(elem_conne_vtu.GetNumberOfPoints() / xyz_elem[0] / xyz_elem[1])
|
|
1317
|
+
self.xyz_elem = xyz_elem
|
|
1318
|
+
|
|
1319
|
+
range_ratio = 1
|
|
1320
|
+
np_xyz_elem = np.array(xyz_elem)
|
|
1321
|
+
if xyz_elem[0] != 0 and xyz_elem[1] != 0 and xyz_elem[2] != 0:
|
|
1322
|
+
range_ratio = np.max(np_xyz_elem)/np.min(np_xyz_elem)
|
|
1323
|
+
|
|
1324
|
+
|
|
1325
|
+
|
|
1326
|
+
if all(xyz_elem) and range_ratio < 1000:
|
|
1327
|
+
if os.path.exists(self.setting.corners_file):
|
|
1328
|
+
self.setting.mesh_type = MeshType.StructuredGridOrth
|
|
1329
|
+
else:
|
|
1330
|
+
self.setting.mesh_type = MeshType.RegularGrid
|
|
1331
|
+
|
|
1332
|
+
else:
|
|
1333
|
+
is_parallel = self.__checkParallel(elem_conne_vtu)
|
|
1334
|
+
# check if polygonal mesh
|
|
1335
|
+
|
|
1336
|
+
if os.path.exists(self.setting.corners_file):
|
|
1337
|
+
if is_parallel:
|
|
1338
|
+
self.setting.mesh_type = MeshType.StructuredGridFree
|
|
1339
|
+
else:
|
|
1340
|
+
self.setting.mesh_type = MeshType.PolygonalMesh
|
|
1341
|
+
else:
|
|
1342
|
+
print('Error: Your mesh type is not supported')
|
|
1343
|
+
sys.exit(1)
|
|
1344
|
+
|
|
1345
|
+
print(f' Mesh type: {self.setting.mesh_type.name}')
|
|
1346
|
+
|
|
1347
|
+
# Read corners file to dataframe
|
|
1348
|
+
if os.path.exists(self.setting.corners_file):
|
|
1349
|
+
corners_buffer = io.StringIO()
|
|
1350
|
+
csv_headers = []
|
|
1351
|
+
line_number = -1
|
|
1352
|
+
with open(self.setting.corners_file) as f:
|
|
1353
|
+
for line in f:
|
|
1354
|
+
line_number = line_number + 1
|
|
1355
|
+
values = line.strip().split(',')
|
|
1356
|
+
values = [x.replace('"', '') for x in values]
|
|
1357
|
+
if line_number == 0:
|
|
1358
|
+
|
|
1359
|
+
csv_headers = [x.strip() for x in values]
|
|
1360
|
+
csv_headers = csv_headers[:3]
|
|
1361
|
+
header_string = ','.join(csv_headers)
|
|
1362
|
+
corners_buffer.write(header_string + '\n')
|
|
1363
|
+
else:
|
|
1364
|
+
corners_buffer.write(','.join(values[:3]) + '\n')
|
|
1365
|
+
corners_buffer.seek(0)
|
|
1366
|
+
corners_df = pd.read_csv(corners_buffer, sep=',', header=0)
|
|
1367
|
+
|
|
1368
|
+
# Write four corners to vtu
|
|
1369
|
+
all_points = vtkPoints()
|
|
1370
|
+
all_cells = vtkCellArray()
|
|
1371
|
+
|
|
1372
|
+
for index, row in corners_df.iterrows():
|
|
1373
|
+
all_points.InsertNextPoint(row["X"], row["Y"], row["Z"])
|
|
1374
|
+
cell = vtkVertex()
|
|
1375
|
+
cell.GetPointIds().SetNumberOfIds(1)
|
|
1376
|
+
cell.GetPointIds().SetId(0, index)
|
|
1377
|
+
all_cells.InsertNextCell(cell)
|
|
1378
|
+
|
|
1379
|
+
corners_vtu = vtkUnstructuredGrid()
|
|
1380
|
+
corners_vtu.SetPoints(all_points)
|
|
1381
|
+
corners_vtu.SetCells(1, all_cells)
|
|
1382
|
+
print(f' Read corners from {self.setting.corners_file}')
|
|
1383
|
+
|
|
1384
|
+
# write four corners to vtu (debugging)
|
|
1385
|
+
if self.setting.debug:
|
|
1386
|
+
corners_vtu_writer = vtkXMLUnstructuredGridWriter()
|
|
1387
|
+
corners_vtu_writer.SetFileName(os.path.join(self.setting.vis_dir, "corners.vtu"))
|
|
1388
|
+
corners_vtu_writer.SetInputData(corners_vtu)
|
|
1389
|
+
corners_vtu_writer.Write()
|
|
1390
|
+
|
|
1391
|
+
|
|
1392
|
+
|
|
1393
|
+
if self.setting.mesh_type == MeshType.RegularGrid:
|
|
1394
|
+
'''
|
|
1395
|
+
* for RGrid from MeshMaker
|
|
1396
|
+
find index for determining x, y, z index
|
|
1397
|
+
'''
|
|
1398
|
+
xyz_index = []
|
|
1399
|
+
xyz_index.append([]) # add x index
|
|
1400
|
+
xyz_index.append([]) # add y index
|
|
1401
|
+
xyz_index.append([]) # add z index
|
|
1402
|
+
|
|
1403
|
+
for i in range(0, xyz_elem[0]):
|
|
1404
|
+
xyz_index[0].append(i)
|
|
1405
|
+
for i in range(0, xyz_elem[1]):
|
|
1406
|
+
xyz_index[1].append(i * xyz_elem[0])
|
|
1407
|
+
for i in range(0, xyz_elem[2]):
|
|
1408
|
+
xyz_index[2].append(i * xyz_elem[0] * xyz_elem[1])
|
|
1409
|
+
xyz_coordinates = []
|
|
1410
|
+
xyz_coordinates.append([]) # add x coordinates
|
|
1411
|
+
xyz_coordinates.append([]) # add y coordinates
|
|
1412
|
+
xyz_coordinates.append([]) # add z coordinates
|
|
1413
|
+
|
|
1414
|
+
for key in range(0, len(xyz_index)):
|
|
1415
|
+
for index in xyz_index[key]:
|
|
1416
|
+
point = elem_conne_vtu.GetPoint(index)
|
|
1417
|
+
cellIDs = vtkIdList()
|
|
1418
|
+
# find all cells connect to this point
|
|
1419
|
+
elem_conne_vtu.GetPointCells(index, cellIDs)
|
|
1420
|
+
d1 = 0
|
|
1421
|
+
d2 = 0
|
|
1422
|
+
find_next = False
|
|
1423
|
+
|
|
1424
|
+
for i in range(0, cellIDs.GetNumberOfIds()):
|
|
1425
|
+
cellID = cellIDs.GetId(i)
|
|
1426
|
+
cell = elem_conne_vtu.GetCell(cellID)
|
|
1427
|
+
# find next id in line element
|
|
1428
|
+
|
|
1429
|
+
next_id = cell.GetPointId(1)
|
|
1430
|
+
if next_id == index:
|
|
1431
|
+
next_id = cell.GetPointId(0)
|
|
1432
|
+
if next_id in xyz_index[key]:
|
|
1433
|
+
d1 = d1_array.GetValue(cellID)
|
|
1434
|
+
d2 = d2_array.GetValue(cellID)
|
|
1435
|
+
find_next = True
|
|
1436
|
+
break
|
|
1437
|
+
|
|
1438
|
+
# if it has any connection to other node
|
|
1439
|
+
if find_next:
|
|
1440
|
+
# add first node
|
|
1441
|
+
if len(xyz_coordinates[key]) == 0:
|
|
1442
|
+
first_value = point[key] - d1
|
|
1443
|
+
if self.setting.known_bounds:
|
|
1444
|
+
first_value = self.setting.bounds[key*2]
|
|
1445
|
+
xyz_coordinates[key].append(first_value)
|
|
1446
|
+
|
|
1447
|
+
# add current node
|
|
1448
|
+
xyz_coordinates[key].append(point[key] + d1)
|
|
1449
|
+
|
|
1450
|
+
# add last node
|
|
1451
|
+
if len(xyz_coordinates[key]) == xyz_elem[key]:
|
|
1452
|
+
last_value = point[key] + d1 + (2*d2)
|
|
1453
|
+
if self.setting.known_bounds:
|
|
1454
|
+
last_value = self.setting.bounds[key*2+1]
|
|
1455
|
+
xyz_coordinates[key].append(last_value)
|
|
1456
|
+
|
|
1457
|
+
# if there is only one element in this dimension
|
|
1458
|
+
elif len(xyz_index[key]) == 1:
|
|
1459
|
+
if self.setting.known_bounds:
|
|
1460
|
+
xyz_coordinates[key].append(self.setting.bounds[key*2])
|
|
1461
|
+
xyz_coordinates[key].append(
|
|
1462
|
+
self.setting.bounds[key*2+1])
|
|
1463
|
+
else:
|
|
1464
|
+
if point[key] == 0:
|
|
1465
|
+
# find max elem
|
|
1466
|
+
xyz_elem_np_array = np.array(
|
|
1467
|
+
(xyz_elem[0], xyz_elem[1], xyz_elem[2]))
|
|
1468
|
+
max = xyz_elem_np_array.max()
|
|
1469
|
+
index = xyz_elem.index(max)
|
|
1470
|
+
guess_value = (
|
|
1471
|
+
vtu_bounds[index*2+1] - vtu_bounds[index]) / max
|
|
1472
|
+
xyz_coordinates[key].append(-1 * guess_value)
|
|
1473
|
+
xyz_coordinates[key].append(guess_value)
|
|
1474
|
+
else:
|
|
1475
|
+
xyz_coordinates[key].append(
|
|
1476
|
+
point[key] - abs(point[key]))
|
|
1477
|
+
xyz_coordinates[key].append(
|
|
1478
|
+
point[key] + abs(point[key]))
|
|
1479
|
+
|
|
1480
|
+
'''
|
|
1481
|
+
create vtk rgid and translate it to vtu
|
|
1482
|
+
'''
|
|
1483
|
+
# self.rgrid_vtr = os.path.join(self.setting.vis_dir, "temp_rgrid.vtr")
|
|
1484
|
+
xyz_coords_array = []
|
|
1485
|
+
xyz_coords_array.append(vtkDoubleArray())
|
|
1486
|
+
xyz_coords_array.append(vtkDoubleArray())
|
|
1487
|
+
xyz_coords_array.append(vtkDoubleArray())
|
|
1488
|
+
|
|
1489
|
+
for key in range(0, len(xyz_coords_array)):
|
|
1490
|
+
for value in xyz_coordinates[key]:
|
|
1491
|
+
xyz_coords_array[key].InsertNextValue(value)
|
|
1492
|
+
|
|
1493
|
+
rGrid = vtkRectilinearGrid()
|
|
1494
|
+
rGrid.SetDimensions(xyz_elem[0]+1, xyz_elem[1]+1, xyz_elem[2]+1)
|
|
1495
|
+
|
|
1496
|
+
rGrid.SetXCoordinates(xyz_coords_array[0])
|
|
1497
|
+
rGrid.SetYCoordinates(xyz_coords_array[1])
|
|
1498
|
+
rGrid.SetZCoordinates(xyz_coords_array[2])
|
|
1499
|
+
rGrid.GetCellData().AddArray(elemIDArray)
|
|
1500
|
+
rGrid.GetCellData().AddArray(volxArray)
|
|
1501
|
+
rGrid.GetCellData().AddArray(matArray)
|
|
1502
|
+
|
|
1503
|
+
rGrid.GetCellData().AddArray(matIDArray)
|
|
1504
|
+
if self.rock_dict is not None:
|
|
1505
|
+
rGrid.GetCellData().AddArray(per_array)
|
|
1506
|
+
rGrid.GetCellData().AddArray(sgr_array)
|
|
1507
|
+
rGrid.GetCellData().AddArray(pmxArray)
|
|
1508
|
+
|
|
1509
|
+
self.main_geometry = os.path.join(
|
|
1510
|
+
self.setting.vis_dir, "main_geometry.vtr")
|
|
1511
|
+
self.__write_vtk_file(rGrid, self.main_geometry)
|
|
1512
|
+
|
|
1513
|
+
|
|
1514
|
+
if self.setting.mesh_type == MeshType.StructuredGridOrth:
|
|
1515
|
+
|
|
1516
|
+
#corners_df.to_csv(os.path.join(self.setting.vis_dir, "corners.csv"), index=False)
|
|
1517
|
+
# Step 1: Group by `x` and `y`, and sort `z` within each group
|
|
1518
|
+
corners_df = corners_df.groupby(['X', 'Y'], group_keys=False).apply(lambda group: group.sort_values(by='Z'))
|
|
1519
|
+
# Step 3: Set the new index using `x` and `y` columns
|
|
1520
|
+
corners_df = corners_df.set_index(['X', 'Y']).sort_index()
|
|
1521
|
+
|
|
1522
|
+
distinct_x = corners_df.index.get_level_values('X').unique()
|
|
1523
|
+
distinct_y = corners_df.index.get_level_values('Y').unique()
|
|
1524
|
+
|
|
1525
|
+
vts = vtkStructuredGrid()
|
|
1526
|
+
vts.SetDimensions(self.xyz_elem[0]+1, self.xyz_elem[1]+1, self.xyz_elem[2]+1)
|
|
1527
|
+
vts_points = vtkPoints()
|
|
1528
|
+
|
|
1529
|
+
for z_index in range(0, self.xyz_elem[2]+1):
|
|
1530
|
+
for y_index in range(0, len(distinct_y)):
|
|
1531
|
+
for x_index in range(0, len(distinct_x)):
|
|
1532
|
+
x = distinct_x[x_index]
|
|
1533
|
+
y = distinct_y[y_index]
|
|
1534
|
+
z_value = corners_df.loc[(x, y), 'Z'].iloc[z_index]
|
|
1535
|
+
vts_points.InsertNextPoint(x, y, z_value)
|
|
1536
|
+
|
|
1537
|
+
vts.SetPoints(vts_points)
|
|
1538
|
+
vts.GetCellData().AddArray(elemIDArray)
|
|
1539
|
+
vts.GetCellData().AddArray(volxArray)
|
|
1540
|
+
vts.GetCellData().AddArray(matArray)
|
|
1541
|
+
|
|
1542
|
+
vts.GetCellData().AddArray(matIDArray)
|
|
1543
|
+
if self.rock_dict is not None:
|
|
1544
|
+
vts.GetCellData().AddArray(per_array)
|
|
1545
|
+
vts.GetCellData().AddArray(sgr_array)
|
|
1546
|
+
self.main_geometry = os.path.join(self.setting.vis_dir, "main_geometry.vts")
|
|
1547
|
+
self.__write_vtk_file(vts, self.main_geometry)
|
|
1548
|
+
|
|
1549
|
+
|
|
1550
|
+
if self.setting.mesh_type == MeshType.StructuredGridFree:
|
|
1551
|
+
#initialize the locator
|
|
1552
|
+
pointTree = vtkPointLocator()
|
|
1553
|
+
pointTree.SetDataSet(corners_vtu)
|
|
1554
|
+
pointTree.BuildLocator()
|
|
1555
|
+
|
|
1556
|
+
cell_array = vtkCellArray()
|
|
1557
|
+
|
|
1558
|
+
for i in range(0, elem_conne_vtu.GetNumberOfPoints()):
|
|
1559
|
+
point = elem_conne_vtu.GetPoint(i)
|
|
1560
|
+
result = vtkIdList()
|
|
1561
|
+
#find the closest point to the the center of each element
|
|
1562
|
+
pointTree.FindClosestNPoints(8, point, result)
|
|
1563
|
+
result.Sort()
|
|
1564
|
+
|
|
1565
|
+
cell = vtkHexahedron()
|
|
1566
|
+
cell.GetPointIds().SetNumberOfIds(8)
|
|
1567
|
+
# need to sort the points in the rigth order
|
|
1568
|
+
points_array = []
|
|
1569
|
+
|
|
1570
|
+
for j in range(0, 8):
|
|
1571
|
+
points_array.append(corners_vtu.GetPoint(result.GetId(j)))
|
|
1572
|
+
|
|
1573
|
+
points_array = np.array(points_array)
|
|
1574
|
+
result_index = self.__reorder_hexahedron(points_array)
|
|
1575
|
+
|
|
1576
|
+
for j in range(0, 8):
|
|
1577
|
+
cell.GetPointIds().SetId(j, result.GetId(result_index[j]))
|
|
1578
|
+
cell_array.InsertNextCell(cell)
|
|
1579
|
+
|
|
1580
|
+
auto_corner_vtu = vtkUnstructuredGrid()
|
|
1581
|
+
auto_corner_vtu.SetPoints(all_points)
|
|
1582
|
+
auto_corner_vtu.SetCells(12, cell_array)
|
|
1583
|
+
|
|
1584
|
+
# TODO: compute mesh quality and fix bad cells
|
|
1585
|
+
|
|
1586
|
+
|
|
1587
|
+
auto_corner_vtu.GetCellData().AddArray(elemIDArray)
|
|
1588
|
+
auto_corner_vtu.GetCellData().AddArray(volxArray)
|
|
1589
|
+
auto_corner_vtu.GetCellData().AddArray(matArray)
|
|
1590
|
+
if self.rock_dict is not None:
|
|
1591
|
+
auto_corner_vtu.GetCellData().AddArray(per_array)
|
|
1592
|
+
auto_corner_vtu.GetCellData().AddArray(sgr_array)
|
|
1593
|
+
auto_corner_vtu.GetCellData().AddArray(matIDArray)
|
|
1594
|
+
self.main_geometry = os.path.join(self.setting.vis_dir, "main_geometry.vtu")
|
|
1595
|
+
self.__write_vtk_file(auto_corner_vtu, self.main_geometry)
|
|
1596
|
+
|
|
1597
|
+
|
|
1598
|
+
if self.setting.mesh_type == MeshType.PolygonalMesh:
|
|
1599
|
+
|
|
1600
|
+
# == Create `distinct_points` and `labeled_temp_elem` ==
|
|
1601
|
+
# Extract points from the grid
|
|
1602
|
+
points = np.array([elem_conne_vtu.GetPoint(i) for i in range(elem_conne_vtu.GetNumberOfPoints())])
|
|
1603
|
+
# Convert to Pandas DataFrame for easier manipulation
|
|
1604
|
+
df = pd.DataFrame(points, columns=["x", "y", "z"])
|
|
1605
|
+
# Round (x, y) values to avoid floating-point precision issues
|
|
1606
|
+
df[["x", "y"]] = df[["x", "y"]].round(10)
|
|
1607
|
+
|
|
1608
|
+
#df.to_csv(os.path.join(self.setting.vis_dir, 'all_points.csv'))
|
|
1609
|
+
# Find distinct (x, y) sets and create an index mapping
|
|
1610
|
+
distinct_xy = df[["x", "y"]].drop_duplicates().reset_index(drop=True)
|
|
1611
|
+
distinct_xy["Elem_Index"] = distinct_xy.index # Assign an index to each unique (x, y)
|
|
1612
|
+
#distinct_xy.to_csv(os.path.join(self.setting.vis_dir, 'distinct_xy.csv'))
|
|
1613
|
+
|
|
1614
|
+
# add distinct_xy to a vtkpoints object
|
|
1615
|
+
all_points = vtkPoints()
|
|
1616
|
+
all_points.SetDataTypeToDouble()
|
|
1617
|
+
for i in range(len(distinct_xy)):
|
|
1618
|
+
values = distinct_xy.iloc[i].values
|
|
1619
|
+
all_points.InsertNextPoint(values[0], values[1], 0)
|
|
1620
|
+
|
|
1621
|
+
# create a polydata object
|
|
1622
|
+
distinct_points = vtkPolyData()
|
|
1623
|
+
distinct_points.SetPoints(all_points)
|
|
1624
|
+
|
|
1625
|
+
cell_array = vtkCellArray()
|
|
1626
|
+
for i in range(all_points.GetNumberOfPoints()):
|
|
1627
|
+
vertex = vtkVertex()
|
|
1628
|
+
vertex.GetPointIds().SetId(0, i)
|
|
1629
|
+
cell_array.InsertNextCell(vertex)
|
|
1630
|
+
|
|
1631
|
+
distinct_points.SetVerts(cell_array)
|
|
1632
|
+
xy_index_labels = distinct_xy["Elem_Index"].to_numpy()
|
|
1633
|
+
elem_id_array = vtkIntArray()
|
|
1634
|
+
elem_id_array.SetName("Elem_Index")
|
|
1635
|
+
for i in range(len(xy_index_labels)):
|
|
1636
|
+
elem_id_array.InsertNextValue(int(xy_index_labels[i]))
|
|
1637
|
+
distinct_points.GetPointData().AddArray(elem_id_array)
|
|
1638
|
+
|
|
1639
|
+
if self.setting.debug:
|
|
1640
|
+
# Write the distinct points to a VTU file for debugging
|
|
1641
|
+
writer = vtkXMLPolyDataWriter()
|
|
1642
|
+
writer.SetInputData(distinct_points)
|
|
1643
|
+
writer.SetFileName(os.path.join(self.setting.vis_dir, 'distinct_points.vtp'))
|
|
1644
|
+
writer.Write()
|
|
1645
|
+
|
|
1646
|
+
|
|
1647
|
+
# Merge the index back to the original DataFrame
|
|
1648
|
+
df = df.merge(distinct_xy, on=["x", "y"], how="left")
|
|
1649
|
+
|
|
1650
|
+
# Compute Z descending order within each (x, y) group
|
|
1651
|
+
df["Horizon_ID"] = df.groupby(["x", "y"])["z"].rank(method="first", ascending=True).astype(int) - 1
|
|
1652
|
+
|
|
1653
|
+
# Convert the labels to NumPy arrays
|
|
1654
|
+
|
|
1655
|
+
z_order_labels = df["Horizon_ID"].to_numpy()
|
|
1656
|
+
xy_index_labels = df["Elem_Index"].to_numpy()
|
|
1657
|
+
|
|
1658
|
+
# Add labels to the VTU file
|
|
1659
|
+
xy_index_array = vtkIntArray()
|
|
1660
|
+
xy_index_array.SetName("Elem_Index")
|
|
1661
|
+
xy_index_array.SetNumberOfComponents(1)
|
|
1662
|
+
xy_index_array.SetNumberOfTuples(len(xy_index_labels))
|
|
1663
|
+
|
|
1664
|
+
z_order_array = vtkIntArray()
|
|
1665
|
+
z_order_array.SetName("Horizon_ID")
|
|
1666
|
+
z_order_array.SetNumberOfComponents(1)
|
|
1667
|
+
z_order_array.SetNumberOfTuples(len(z_order_labels))
|
|
1668
|
+
|
|
1669
|
+
for i, (xy_idx, z_ord) in enumerate(zip(xy_index_labels, z_order_labels)):
|
|
1670
|
+
xy_index_array.SetValue(i, int(xy_idx))
|
|
1671
|
+
z_order_array.SetValue(i, int(z_ord))
|
|
1672
|
+
|
|
1673
|
+
labeled_temp_elem = vtkUnstructuredGrid()
|
|
1674
|
+
labeled_temp_elem.DeepCopy(elem_conne_vtu)
|
|
1675
|
+
labeled_temp_elem.GetPointData().AddArray(xy_index_array)
|
|
1676
|
+
labeled_temp_elem.GetPointData().AddArray(z_order_array)
|
|
1677
|
+
# Write the modified VTU file with labels
|
|
1678
|
+
if self.setting.debug:
|
|
1679
|
+
writer = vtkXMLUnstructuredGridWriter()
|
|
1680
|
+
writer.SetFileName(os.path.join(self.setting.vis_dir, "labeled_temp_elem.vtu"))
|
|
1681
|
+
writer.SetInputData(labeled_temp_elem)
|
|
1682
|
+
writer.Write()
|
|
1683
|
+
|
|
1684
|
+
|
|
1685
|
+
# == Create a VTK 2D `voronoi` mesh ==
|
|
1686
|
+
# Create a Voronoi diagram from the cell centers
|
|
1687
|
+
print("crating voronoi...")
|
|
1688
|
+
voro = vtkVoronoi2D()
|
|
1689
|
+
voro.SetInputData(distinct_points)
|
|
1690
|
+
voro.SetMaximumNumberOfTileClips(distinct_points.GetNumberOfPoints())
|
|
1691
|
+
#voro.set
|
|
1692
|
+
voro.Update()
|
|
1693
|
+
voronoi = voro.GetOutput()
|
|
1694
|
+
|
|
1695
|
+
if self.setting.debug:
|
|
1696
|
+
# Write the Voronoi mesh to a VTU file for debugging
|
|
1697
|
+
voronoi_writer = vtkXMLPolyDataWriter()
|
|
1698
|
+
voronoi_writer.SetInputData(voronoi)
|
|
1699
|
+
voronoi_writer.SetFileName(os.path.join(self.setting.vis_dir, 'voronoi.vtp'))
|
|
1700
|
+
voronoi_writer.Write()
|
|
1701
|
+
print(f' ✓ Main voronoi created: {os.path.join(self.setting.vis_dir, "voronoi.vtp")}')
|
|
1702
|
+
|
|
1703
|
+
|
|
1704
|
+
# Count Layer
|
|
1705
|
+
|
|
1706
|
+
number_of_layers = elem_conne_vtu.GetNumberOfPoints() // distinct_points.GetNumberOfPoints()
|
|
1707
|
+
print(f'number_of_layers: {number_of_layers}')
|
|
1708
|
+
|
|
1709
|
+
# == Create `distinct_corners_points` and `labeled_corners` ==
|
|
1710
|
+
# Clean the grid
|
|
1711
|
+
clean_filter = vtkCleanUnstructuredGrid()
|
|
1712
|
+
clean_filter.SetInputData(corners_vtu)
|
|
1713
|
+
clean_filter.Update()
|
|
1714
|
+
labeled_corners = clean_filter.GetOutput()
|
|
1715
|
+
|
|
1716
|
+
# Extract points from the grid
|
|
1717
|
+
points = np.array([labeled_corners.GetPoint(i) for i in range(labeled_corners.GetNumberOfPoints())])
|
|
1718
|
+
|
|
1719
|
+
# Convert to Pandas DataFrame for easier manipulation
|
|
1720
|
+
df = pd.DataFrame(points, columns=["x", "y", "z"])
|
|
1721
|
+
|
|
1722
|
+
# Round (x, y) values to avoid floating-point precision issues
|
|
1723
|
+
df[["x", "y"]] = df[["x", "y"]].round(6)
|
|
1724
|
+
|
|
1725
|
+
# Find distinct (x, y) sets and create an index mapping
|
|
1726
|
+
distinct_xy = df[["x", "y"]].drop_duplicates().reset_index(drop=True)
|
|
1727
|
+
|
|
1728
|
+
distinct_xy["XY_Index"] = distinct_xy.index # Assign an index to each unique (x, y)
|
|
1729
|
+
|
|
1730
|
+
|
|
1731
|
+
# add distinct_xy to a vtkpoints object
|
|
1732
|
+
index_array = vtkIntArray()
|
|
1733
|
+
index_array.SetName("XY_Index")
|
|
1734
|
+
all_points = vtkPoints()
|
|
1735
|
+
all_points.SetDataTypeToDouble()
|
|
1736
|
+
for i in range(len(distinct_xy)):
|
|
1737
|
+
values = distinct_xy.iloc[i].values
|
|
1738
|
+
all_points.InsertNextPoint(values[0], values[1], 0)
|
|
1739
|
+
index_array.InsertNextValue(int(values[2]))
|
|
1740
|
+
# create a polydata object
|
|
1741
|
+
distinct_corners_points = vtkPolyData()
|
|
1742
|
+
distinct_corners_points.SetPoints(all_points)
|
|
1743
|
+
distinct_corners_points.GetPointData().AddArray(index_array)
|
|
1744
|
+
|
|
1745
|
+
cell_array = vtkCellArray()
|
|
1746
|
+
for i in range(all_points.GetNumberOfPoints()):
|
|
1747
|
+
vertex = vtkVertex()
|
|
1748
|
+
vertex.GetPointIds().SetId(0, i)
|
|
1749
|
+
cell_array.InsertNextCell(vertex)
|
|
1750
|
+
|
|
1751
|
+
distinct_corners_points.SetVerts(cell_array)
|
|
1752
|
+
|
|
1753
|
+
if self.setting.debug:
|
|
1754
|
+
# Write the distinct points to a VTU file for debugging
|
|
1755
|
+
writer = vtkXMLPolyDataWriter()
|
|
1756
|
+
writer.SetInputData(distinct_corners_points)
|
|
1757
|
+
writer.SetFileName(os.path.join(self.setting.vis_dir, 'distinct_corners_points.vtp'))
|
|
1758
|
+
writer.Write()
|
|
1759
|
+
|
|
1760
|
+
|
|
1761
|
+
|
|
1762
|
+
# Merge the index back to the original DataFrame
|
|
1763
|
+
df = df.merge(distinct_xy, on=["x", "y"], how="left")
|
|
1764
|
+
|
|
1765
|
+
# Compute Z descending order within each (x, y) group
|
|
1766
|
+
df["Z_Order"] = df.groupby(["x", "y"])["z"].rank(method="first", ascending=True).astype(int) - 1
|
|
1767
|
+
|
|
1768
|
+
# Convert the labels to NumPy arrays
|
|
1769
|
+
xy_index_labels = df["XY_Index"].to_numpy()
|
|
1770
|
+
z_order_labels = df["Z_Order"].to_numpy()
|
|
1771
|
+
|
|
1772
|
+
# Add labels to the VTU file
|
|
1773
|
+
xy_index_array = vtkIntArray()
|
|
1774
|
+
xy_index_array.SetName("XY_Index")
|
|
1775
|
+
xy_index_array.SetNumberOfComponents(1)
|
|
1776
|
+
xy_index_array.SetNumberOfTuples(len(xy_index_labels))
|
|
1777
|
+
|
|
1778
|
+
z_order_array = vtkIntArray()
|
|
1779
|
+
z_order_array.SetName("Layer_ID")
|
|
1780
|
+
z_order_array.SetNumberOfComponents(1)
|
|
1781
|
+
z_order_array.SetNumberOfTuples(len(z_order_labels))
|
|
1782
|
+
|
|
1783
|
+
for i, (xy_idx, z_ord) in enumerate(zip(xy_index_labels, z_order_labels)):
|
|
1784
|
+
xy_index_array.SetValue(i, int(xy_idx))
|
|
1785
|
+
z_order_array.SetValue(i, int(z_ord))
|
|
1786
|
+
|
|
1787
|
+
# Attach the label arrays to the unstructured grid
|
|
1788
|
+
labeled_corners.GetPointData().AddArray(xy_index_array)
|
|
1789
|
+
labeled_corners.GetPointData().AddArray(z_order_array)
|
|
1790
|
+
# Write the modified VTU file with labels
|
|
1791
|
+
if self.setting.debug:
|
|
1792
|
+
writer = vtkXMLUnstructuredGridWriter()
|
|
1793
|
+
writer.SetFileName(os.path.join(self.setting.vis_dir, "labeled_corners.vtu"))
|
|
1794
|
+
writer.SetInputData(labeled_corners)
|
|
1795
|
+
writer.Write()
|
|
1796
|
+
|
|
1797
|
+
|
|
1798
|
+
# == Create `distinct_corners_points_voronoi` ==
|
|
1799
|
+
#- use `distinct_points` as index, `voronoi` as map, search `distinct_corners_points` to recreate the correct voronoi mesh
|
|
1800
|
+
|
|
1801
|
+
distinct_corners_points_locator = vtkPointLocator()
|
|
1802
|
+
distinct_corners_points_locator.SetDataSet(distinct_corners_points)
|
|
1803
|
+
distinct_corners_points_locator.BuildLocator()
|
|
1804
|
+
|
|
1805
|
+
correct_voronoi_cell_array = vtkCellArray()
|
|
1806
|
+
elem_id_array = vtkIntArray()
|
|
1807
|
+
elem_id_array.SetName("Elem_Index")
|
|
1808
|
+
# go through each points in distinct_points
|
|
1809
|
+
for i in range(distinct_points.GetNumberOfPoints()):
|
|
1810
|
+
point = distinct_points.GetPoint(i)
|
|
1811
|
+
voronoi_cell = voronoi.GetCell(i)
|
|
1812
|
+
# go through each points in voronoi cell
|
|
1813
|
+
polygon = vtkPolygon()
|
|
1814
|
+
center_point_inserted = False
|
|
1815
|
+
center_point_id = distinct_corners_points_locator.FindClosestPoint(point)
|
|
1816
|
+
for j in range(voronoi_cell.GetNumberOfPoints()):
|
|
1817
|
+
cell_point_id = voronoi_cell.GetPointId(j)
|
|
1818
|
+
cell_point = voronoi.GetPoint(cell_point_id)
|
|
1819
|
+
dist2 = reference(0)
|
|
1820
|
+
corners_point_id = distinct_corners_points_locator.FindClosestPointWithinRadius(1, [cell_point[0], cell_point[1], cell_point[2]], dist2)
|
|
1821
|
+
|
|
1822
|
+
if corners_point_id == -1:
|
|
1823
|
+
closest_corners_point_id = distinct_corners_points_locator.FindClosestPoint(cell_point)
|
|
1824
|
+
if (not center_point_inserted) and j+1 < voronoi_cell.GetNumberOfPoints():
|
|
1825
|
+
next_cell_point_id = voronoi_cell.GetPointId(j+1)
|
|
1826
|
+
next_cell_point = voronoi.GetPoint(next_cell_point_id)
|
|
1827
|
+
dist2 = reference(0)
|
|
1828
|
+
next_corners_point_id = distinct_corners_points_locator.FindClosestPointWithinRadius(1, [next_cell_point[0], next_cell_point[1], next_cell_point[2]], dist2)
|
|
1829
|
+
# next point on boundary, so insert the center point after this point
|
|
1830
|
+
if next_corners_point_id == -1:
|
|
1831
|
+
polygon.GetPointIds().InsertNextId(closest_corners_point_id)
|
|
1832
|
+
polygon.GetPointIds().InsertNextId(center_point_id)
|
|
1833
|
+
# next point is not on boundary, so insert the center point before this point
|
|
1834
|
+
else:
|
|
1835
|
+
polygon.GetPointIds().InsertNextId(center_point_id)
|
|
1836
|
+
polygon.GetPointIds().InsertNextId(closest_corners_point_id)
|
|
1837
|
+
center_point_inserted = True
|
|
1838
|
+
|
|
1839
|
+
else:
|
|
1840
|
+
polygon.GetPointIds().InsertNextId(closest_corners_point_id)
|
|
1841
|
+
|
|
1842
|
+
else:
|
|
1843
|
+
polygon.GetPointIds().InsertNextId(corners_point_id)
|
|
1844
|
+
correct_voronoi_cell_array.InsertNextCell(polygon)
|
|
1845
|
+
elem_id_array.InsertNextValue(i)
|
|
1846
|
+
|
|
1847
|
+
distinct_corners_points_voronoi = vtkPolyData()
|
|
1848
|
+
distinct_corners_points_voronoi.SetPoints(distinct_corners_points.GetPoints())
|
|
1849
|
+
distinct_corners_points_voronoi.SetPolys(correct_voronoi_cell_array)
|
|
1850
|
+
distinct_corners_points_voronoi.GetCellData().AddArray(elem_id_array)
|
|
1851
|
+
distinct_corners_points_voronoi.GetPointData().AddArray(distinct_corners_points.GetPointData().GetArray("XY_Index"))
|
|
1852
|
+
|
|
1853
|
+
if self.setting.debug:
|
|
1854
|
+
# Write the Voronoi mesh to a VTU file for debugging
|
|
1855
|
+
voronoi_writer = vtkXMLPolyDataWriter()
|
|
1856
|
+
voronoi_writer.SetInputData(distinct_corners_points_voronoi)
|
|
1857
|
+
voronoi_writer.SetFileName(os.path.join(self.setting.vis_dir, 'distinct_corners_points_voronoi.vtp'))
|
|
1858
|
+
voronoi_writer.Write()
|
|
1859
|
+
|
|
1860
|
+
# == create the geometry ==
|
|
1861
|
+
#- use `labeled_temp_elem` points as index (cell sequecne)
|
|
1862
|
+
#- from layer 0 to layer max, horizon 0 to horizon max-1
|
|
1863
|
+
#- use `distinct_corners_points_voronoi` as 2D polygon map to find the actual points from `labeled_corners`
|
|
1864
|
+
#- create each polyhedron by adding top, buttom and side surfaces
|
|
1865
|
+
|
|
1866
|
+
main_geometray = vtkUnstructuredGrid()
|
|
1867
|
+
main_geometray.SetPoints(labeled_corners.GetPoints())
|
|
1868
|
+
|
|
1869
|
+
labeled_corners_points = np.array([labeled_corners.GetPoint(i) for i in range(labeled_corners.GetNumberOfPoints())])
|
|
1870
|
+
labeled_corners_df = pd.DataFrame(labeled_corners_points, columns=["x", "y", "z"])
|
|
1871
|
+
for i in range(labeled_corners.GetPointData().GetNumberOfArrays()):
|
|
1872
|
+
array_name = labeled_corners.GetPointData().GetArrayName(i)
|
|
1873
|
+
array = labeled_corners.GetPointData().GetArray(i)
|
|
1874
|
+
array_values = np.array([array.GetValue(j) for j in range(array.GetNumberOfTuples())])
|
|
1875
|
+
labeled_corners_df[array_name] = array_values
|
|
1876
|
+
#labeled_corners_df.describe()
|
|
1877
|
+
|
|
1878
|
+
horizon_id_array = labeled_temp_elem.GetPointData().GetArray("Horizon_ID")
|
|
1879
|
+
elem_id_array = labeled_temp_elem.GetPointData().GetArray("Elem_Index")
|
|
1880
|
+
|
|
1881
|
+
for index in range(labeled_temp_elem.GetNumberOfPoints()):
|
|
1882
|
+
#for index in range(10):
|
|
1883
|
+
# get voronoi cell
|
|
1884
|
+
voronoi_cell = distinct_corners_points_voronoi.GetCell(elem_id_array.GetValue(index))
|
|
1885
|
+
cell_xy_index = [ voronoi_cell.GetPointId(i) for i in range(voronoi_cell.GetNumberOfPoints())]
|
|
1886
|
+
|
|
1887
|
+
horizon_id = horizon_id_array.GetValue(index)
|
|
1888
|
+
# top face
|
|
1889
|
+
|
|
1890
|
+
polyhedron_faces = [] # top, bottom, sides
|
|
1891
|
+
top_face = []
|
|
1892
|
+
for xy_index in cell_xy_index:
|
|
1893
|
+
# find point in labeled_corners_df
|
|
1894
|
+
matching_indexes = labeled_corners_df.index[(labeled_corners_df["Layer_ID"] == horizon_id + 1) & (labeled_corners_df["XY_Index"] == xy_index)]
|
|
1895
|
+
top_face.append(matching_indexes[0])
|
|
1896
|
+
polyhedron_faces.append(top_face)
|
|
1897
|
+
|
|
1898
|
+
bottom_face = []
|
|
1899
|
+
for xy_index in cell_xy_index:
|
|
1900
|
+
# find point in labeled_corners_df
|
|
1901
|
+
matching_indexes = labeled_corners_df.index[(labeled_corners_df["Layer_ID"] == horizon_id) & (labeled_corners_df["XY_Index"] == xy_index)]
|
|
1902
|
+
bottom_face.append(matching_indexes[0])
|
|
1903
|
+
polyhedron_faces.append(bottom_face)
|
|
1904
|
+
|
|
1905
|
+
for j in range(len(cell_xy_index)):
|
|
1906
|
+
if j == len(cell_xy_index) -1:
|
|
1907
|
+
side_face = [top_face[0], top_face[j], bottom_face[j], bottom_face[0]]
|
|
1908
|
+
else:
|
|
1909
|
+
side_face = [top_face[j+1], top_face[j], bottom_face[j], bottom_face[j+1]]
|
|
1910
|
+
polyhedron_faces.append(side_face)
|
|
1911
|
+
|
|
1912
|
+
# create a polyhedron
|
|
1913
|
+
number_of_faces = voronoi_cell.GetNumberOfPoints() + 2
|
|
1914
|
+
polyhedron_faces_idlist = vtkIdList()
|
|
1915
|
+
# Number faces that make up the cell.
|
|
1916
|
+
polyhedron_faces_idlist.InsertNextId(number_of_faces)
|
|
1917
|
+
for face in polyhedron_faces:
|
|
1918
|
+
# Number of points in the face == numberOfFaceVertices
|
|
1919
|
+
polyhedron_faces_idlist.InsertNextId(len(face))
|
|
1920
|
+
# Insert the pointIds for that face.
|
|
1921
|
+
[polyhedron_faces_idlist.InsertNextId(i) for i in face]
|
|
1922
|
+
|
|
1923
|
+
main_geometray.InsertNextCell(VTK_POLYHEDRON, polyhedron_faces_idlist)
|
|
1924
|
+
|
|
1925
|
+
|
|
1926
|
+
main_geometray.GetCellData().AddArray(elemIDArray)
|
|
1927
|
+
main_geometray.GetCellData().AddArray(volxArray)
|
|
1928
|
+
main_geometray.GetCellData().AddArray(matArray)
|
|
1929
|
+
if self.rock_dict is not None:
|
|
1930
|
+
main_geometray.GetCellData().AddArray(per_array)
|
|
1931
|
+
main_geometray.GetCellData().AddArray(sgr_array)
|
|
1932
|
+
main_geometray.GetCellData().AddArray(matIDArray)
|
|
1933
|
+
main_geometray.GetCellData().AddArray(matIDArray)
|
|
1934
|
+
main_geometray.GetCellData().AddArray(horizon_id_array)
|
|
1935
|
+
self.main_geometry = os.path.join(
|
|
1936
|
+
self.setting.vis_dir, "main_geometry.vtu")
|
|
1937
|
+
self.__write_vtk_file(main_geometray, self.main_geometry)
|
|
1938
|
+
|
|
1939
|
+
if os.path.exists(self.main_geometry):
|
|
1940
|
+
print(f' ✓ Main geometry created: {self.main_geometry}')
|
|
1941
|
+
|
|
1942
|
+
def __reorder_hexahedron(self, points):
|
|
1943
|
+
# Step 1: Sort points by Z coordinate to separate bottom and top layers
|
|
1944
|
+
sorted_indices = np.argsort(points[:, 2]) # Sort indices by z-coordinate
|
|
1945
|
+
bottom_indices = sorted_indices[:4] # 4 lowest Z values
|
|
1946
|
+
top_indices = sorted_indices[4:] # 4 highest Z values
|
|
1947
|
+
|
|
1948
|
+
# Step 2: Compute centroid of bottom plane for angle sorting
|
|
1949
|
+
centroid = np.mean(points[bottom_indices, :2], axis=0) # Only x and y
|
|
1950
|
+
|
|
1951
|
+
# Step 3: Compute angles from centroid and sort counterclockwise
|
|
1952
|
+
def angle_from_centroid(idx):
|
|
1953
|
+
p = points[idx]
|
|
1954
|
+
return np.arctan2(p[1] - centroid[1], p[0] - centroid[0])
|
|
1955
|
+
|
|
1956
|
+
bottom_order = sorted(bottom_indices, key=angle_from_centroid)
|
|
1957
|
+
top_order = sorted(top_indices, key=angle_from_centroid)
|
|
1958
|
+
|
|
1959
|
+
# Step 4: Assign correct VTK order
|
|
1960
|
+
reordered_indices = np.array(bottom_order + top_order)
|
|
1961
|
+
|
|
1962
|
+
return reordered_indices
|
|
1963
|
+
|
|
1964
|
+
def __checkParallel(self, elem_conne):
|
|
1965
|
+
dir1 = self.__get_direction_from_polyline(elem_conne.GetCell(0))
|
|
1966
|
+
dir2 = self.__get_direction_from_polyline(elem_conne.GetCell(1))
|
|
1967
|
+
if self.__are_parallel(dir1, dir2):
|
|
1968
|
+
return True
|
|
1969
|
+
return False
|
|
1970
|
+
|
|
1971
|
+
def __get_direction_from_polyline(self, polyline):
|
|
1972
|
+
"""
|
|
1973
|
+
Given a vtkPolyLine, compute a representative normalized direction vector.
|
|
1974
|
+
Here we use the vector from the first point to the last point.
|
|
1975
|
+
"""
|
|
1976
|
+
points = polyline.GetPoints()
|
|
1977
|
+
num_points = points.GetNumberOfPoints()
|
|
1978
|
+
if num_points < 2:
|
|
1979
|
+
return None # Not enough points to define a direction.
|
|
1980
|
+
|
|
1981
|
+
first_pt = np.array(points.GetPoint(0))
|
|
1982
|
+
first_pt[2] = 0
|
|
1983
|
+
last_pt = np.array(points.GetPoint(num_points - 1))
|
|
1984
|
+
last_pt[2] = 0
|
|
1985
|
+
direction = last_pt - first_pt
|
|
1986
|
+
norm = np.linalg.norm(direction)
|
|
1987
|
+
if norm == 0:
|
|
1988
|
+
return None # Degenerate polyline
|
|
1989
|
+
return direction / norm
|
|
1990
|
+
|
|
1991
|
+
def __are_parallel(self, dir1, dir2, tol=1e-6):
|
|
1992
|
+
"""
|
|
1993
|
+
Two vectors are parallel if their cross product is nearly zero
|
|
1994
|
+
(i.e., the magnitude of the cross product is less than a tolerance)
|
|
1995
|
+
"""
|
|
1996
|
+
cross_prod = np.cross(dir1, dir2)
|
|
1997
|
+
return np.linalg.norm(cross_prod) < tol
|
|
1998
|
+
|
|
1999
|
+
def __check_isReverse(self, elem_df):
|
|
2000
|
+
"""
|
|
2001
|
+
Checks if the given DataFrame has a reverse sequence and determines the mesh plane.
|
|
2002
|
+
This method performs the following steps:
|
|
2003
|
+
1. Checks the real plane by analyzing the standard deviation of the 'X', 'Y', and 'Z' columns.
|
|
2004
|
+
2. Determines if the increasement sequence is in the order of X -> Y -> Z.
|
|
2005
|
+
3. If the sequence is not in the correct order, it re-indexes.
|
|
2006
|
+
Parameters:
|
|
2007
|
+
elem_df (pandas.DataFrame): The DataFrame containing the elements to be checked.
|
|
2008
|
+
Returns:
|
|
2009
|
+
bool: True if the sequence is reversed, False otherwise.
|
|
2010
|
+
"""
|
|
2011
|
+
describe = elem_df.describe()
|
|
2012
|
+
|
|
2013
|
+
# 1. check real plane
|
|
2014
|
+
mesh_plane = MeshPlane.unknown
|
|
2015
|
+
still_col = [1, 1, 1]
|
|
2016
|
+
if describe['X']['count'] == 0 or describe['X']['std'] == 0:
|
|
2017
|
+
still_col[0] = 0
|
|
2018
|
+
if describe['Y']['count'] == 0 or describe['Y']['std'] == 0:
|
|
2019
|
+
still_col[1] = 0
|
|
2020
|
+
if describe['Z']['count'] == 0 or describe['Z']['std'] == 0:
|
|
2021
|
+
still_col[2] = 0
|
|
2022
|
+
|
|
2023
|
+
|
|
2024
|
+
if still_col == [1, 1, 1]:
|
|
2025
|
+
mesh_plane = MeshPlane.XYZ
|
|
2026
|
+
elif still_col == [1, 0, 1]:
|
|
2027
|
+
mesh_plane = MeshPlane.XZ
|
|
2028
|
+
elif still_col == [1, 0, 0]:
|
|
2029
|
+
mesh_plane = MeshPlane.X
|
|
2030
|
+
elif still_col == [1, 1, 0]:
|
|
2031
|
+
mesh_plane = MeshPlane.XY
|
|
2032
|
+
elif still_col == [0, 1, 1]:
|
|
2033
|
+
mesh_plane = MeshPlane.YZ
|
|
2034
|
+
elif still_col == [0, 0, 1]:
|
|
2035
|
+
mesh_plane = MeshPlane.Z
|
|
2036
|
+
elif still_col == [0, 1, 0]:
|
|
2037
|
+
mesh_plane = MeshPlane.Y
|
|
2038
|
+
|
|
2039
|
+
self.setting.mesh_plane = mesh_plane
|
|
2040
|
+
|
|
2041
|
+
# check increase sequence
|
|
2042
|
+
map = ['X', 'Y', 'Z']
|
|
2043
|
+
head = elem_df.head()
|
|
2044
|
+
head_describe = head.describe()
|
|
2045
|
+
|
|
2046
|
+
|
|
2047
|
+
is_reverse = False
|
|
2048
|
+
# find increasement in each col
|
|
2049
|
+
for i in range(0, 3):
|
|
2050
|
+
if still_col[i] == 1:
|
|
2051
|
+
if head_describe[map[i]]['std'] == 0:
|
|
2052
|
+
is_reverse = True
|
|
2053
|
+
break
|
|
2054
|
+
else:
|
|
2055
|
+
break
|
|
2056
|
+
|
|
2057
|
+
#self.setting.isReverse = is_reverse
|
|
2058
|
+
return is_reverse
|
|
2059
|
+
|
|
2060
|
+
def __write_vtk_file(self, file, file_path):
|
|
2061
|
+
|
|
2062
|
+
extension = os.path.splitext(self.main_geometry)[1]
|
|
2063
|
+
writer = None
|
|
2064
|
+
if extension == '.vtr':
|
|
2065
|
+
writer = vtkXMLRectilinearGridWriter()
|
|
2066
|
+
|
|
2067
|
+
elif extension == '.vts':
|
|
2068
|
+
writer = vtkXMLStructuredGridWriter()
|
|
2069
|
+
|
|
2070
|
+
elif extension == '.vtu':
|
|
2071
|
+
writer = vtkXMLUnstructuredGridWriter()
|
|
2072
|
+
|
|
2073
|
+
writer.SetFileName(file_path)
|
|
2074
|
+
writer.SetInputData(file)
|
|
2075
|
+
writer.SetDataModeToBinary()
|
|
2076
|
+
writer.Write()
|
|
2077
|
+
|
|
2078
|
+
def __read_vtk_file(self, file_path):
|
|
2079
|
+
|
|
2080
|
+
extension = os.path.splitext(self.main_geometry)[1]
|
|
2081
|
+
if extension == '.vtr':
|
|
2082
|
+
reader = vtkXMLRectilinearGridReader()
|
|
2083
|
+
reader.SetFileName(file_path)
|
|
2084
|
+
reader.Update()
|
|
2085
|
+
return reader.GetOutput()
|
|
2086
|
+
elif extension == '.vts':
|
|
2087
|
+
reader = vtkXMLStructuredGridReader()
|
|
2088
|
+
reader.SetFileName(file_path)
|
|
2089
|
+
reader.Update()
|
|
2090
|
+
return reader.GetOutput()
|
|
2091
|
+
elif extension == '.vtu':
|
|
2092
|
+
reader = vtkXMLUnstructuredGridReader()
|
|
2093
|
+
reader.SetFileName(file_path)
|
|
2094
|
+
reader.Update()
|
|
2095
|
+
return reader.GetOutput()
|
|
2096
|
+
|
|
2097
|
+
def __parse_float(self, s):
|
|
2098
|
+
try:
|
|
2099
|
+
value = float(s)
|
|
2100
|
+
if np.isnan(value):
|
|
2101
|
+
return 0
|
|
2102
|
+
return value
|
|
2103
|
+
|
|
2104
|
+
except ValueError:
|
|
2105
|
+
if '-' in s:
|
|
2106
|
+
segments = s.split('-')
|
|
2107
|
+
if len(segments) == 2:
|
|
2108
|
+
return float(segments[0] + 'E-' + segments[1])
|
|
2109
|
+
if len(segments) == 3:
|
|
2110
|
+
return float('-0' + segments[1] + 'E-' + segments[2])
|
|
2111
|
+
if '+' in s:
|
|
2112
|
+
segments = s.split('+')
|
|
2113
|
+
if len(segments) == 2:
|
|
2114
|
+
return float(segments[0] + 'E+' + segments[1])
|
|
2115
|
+
if len(segments) == 3:
|
|
2116
|
+
return float('0' + segments[1] + 'E+' + segments[2])
|
|
2117
|
+
else:
|
|
2118
|
+
print(f'{s} can\'t parse to float.')
|
|
2119
|
+
return 0
|
|
2120
|
+
|
|
2121
|
+
def __isInt(self, s):
|
|
2122
|
+
try:
|
|
2123
|
+
value = int(s)
|
|
2124
|
+
if np.isnan(value):
|
|
2125
|
+
return False
|
|
2126
|
+
return True
|
|
2127
|
+
|
|
2128
|
+
except ValueError:
|
|
2129
|
+
return False
|
|
2130
|
+
|
|
2131
|
+
def __check_TOUGH_version(self):
|
|
2132
|
+
out_file_path = self.current_out_file
|
|
2133
|
+
extension = os.path.splitext(out_file_path)[1].lower()
|
|
2134
|
+
if extension == '.tec':
|
|
2135
|
+
self.setting.out_format_type = OutType.TEC
|
|
2136
|
+
self.setting.tough_version = ToughVersion.TOUGHReact
|
|
2137
|
+
return
|
|
2138
|
+
#return OutType.TEC
|
|
2139
|
+
elif extension == '.csv':
|
|
2140
|
+
self.setting.out_format_type = OutType.CSV
|
|
2141
|
+
line_number = 0
|
|
2142
|
+
with open(out_file_path) as f:
|
|
2143
|
+
for line in f:
|
|
2144
|
+
if line_number == 0:
|
|
2145
|
+
first_col = line.split(',')[0].strip().lower()
|
|
2146
|
+
if 'time' in first_col:
|
|
2147
|
+
self.setting.tough_version = ToughVersion.TOUGH2
|
|
2148
|
+
return
|
|
2149
|
+
if line_number == 2:
|
|
2150
|
+
values = line.strip().split(',')
|
|
2151
|
+
if len(values) == 1 and 'time' in values[0].strip().lower():
|
|
2152
|
+
self.setting.tough_version = ToughVersion.TOUGH3
|
|
2153
|
+
return
|
|
2154
|
+
print(f'The format of your output file, {out_file_path}, is not correct. Please double-check your file.')
|
|
2155
|
+
sys.exit(1)
|
|
2156
|
+
line_number = line_number + 1
|
|
2157
|
+
|
|
2158
|
+
else:
|
|
2159
|
+
print(f'The format of your output file, {out_file_path}, is not supported.\nPlease use either .csv or .tec file format instead.')
|
|
2160
|
+
sys.exit(1)
|
|
2161
|
+
|
|
2162
|
+
def __check_if_block_end(self, line, line_number):
|
|
2163
|
+
|
|
2164
|
+
if 'ENDCY' in line:
|
|
2165
|
+
return True
|
|
2166
|
+
|
|
2167
|
+
if len(line) < 10 and line_number == 1:
|
|
2168
|
+
return False
|
|
2169
|
+
|
|
2170
|
+
if len(line) < 10:
|
|
2171
|
+
return True
|
|
2172
|
+
if line.startswith('\n') or line.startswith(' \n'):
|
|
2173
|
+
return True
|
|
2174
|
+
if line.startswith('\r') or line.startswith(' \r'):
|
|
2175
|
+
return True
|
|
2176
|
+
|
|
2177
|
+
trimmed = line.lstrip()
|
|
2178
|
+
if trimmed[5] == "-" and trimmed[6] == "-":
|
|
2179
|
+
return True
|
|
2180
|
+
return False
|
|
2181
|
+
|
|
2182
|
+
def __write_json(self):
|
|
2183
|
+
# Convert each object to a dict
|
|
2184
|
+
# Assume variable_list is a list of visVariable objects
|
|
2185
|
+
variable_list_dicts = []
|
|
2186
|
+
for key in self.variable_list:
|
|
2187
|
+
for variable in self.variable_list[key]:
|
|
2188
|
+
variable_list_dicts.append(variable.to_dict())
|
|
2189
|
+
|
|
2190
|
+
|
|
2191
|
+
# Write to JSON file
|
|
2192
|
+
path = os.path.join(self.setting.vis_dir, "variable_list.json")
|
|
2193
|
+
with open(path, "w") as f:
|
|
2194
|
+
json.dump(variable_list_dicts, f, indent=2)
|
|
2195
|
+
|
|
2196
|
+
timestep_list_dicts = [timestep.__dict__ for timestep in self.time_steps_list]
|
|
2197
|
+
|
|
2198
|
+
|
|
2199
|
+
# Write to JSON file
|
|
2200
|
+
path = os.path.join(self.setting.vis_dir, "timestep_list.json")
|
|
2201
|
+
with open(path, "w") as f:
|
|
2202
|
+
json.dump(timestep_list_dicts, f, indent=2)
|
|
2203
|
+
|
|
2204
|
+
def __fix_negative_zero(self, x):
|
|
2205
|
+
return 0.0 if x == 0 else x
|
|
2206
|
+
|
|
2207
|
+
class vis_charting:
|
|
2208
|
+
def __init__(self, case_dir):
|
|
2209
|
+
self.vis_dir = None
|
|
2210
|
+
self.variable_list = None
|
|
2211
|
+
vis_dir = os.path.join(case_dir, "tough_vis", "paraview")
|
|
2212
|
+
|
|
2213
|
+
|
|
2214
|
+
if os.path.isdir(vis_dir):
|
|
2215
|
+
print("vis_dir:", vis_dir)
|
|
2216
|
+
else:
|
|
2217
|
+
print(f'Case vis_dir({vis_dir}) not found.')
|
|
2218
|
+
sys.exit(0)
|
|
2219
|
+
|
|
2220
|
+
variable_list_path = os.path.join(case_dir, "tough_vis", "variable_list.json")
|
|
2221
|
+
if os.path.isfile(variable_list_path):
|
|
2222
|
+
with open(variable_list_path, "r") as f:
|
|
2223
|
+
self.variable_list = json.load(f)
|
|
2224
|
+
else:
|
|
2225
|
+
print(f'Case variable_list.json({variable_list_path}) not found.')
|
|
2226
|
+
sys.exit(0)
|
|
2227
|
+
|
|
2228
|
+
timestep_list_path = os.path.join(case_dir, "tough_vis", "timestep_list.json")
|
|
2229
|
+
if os.path.isfile(timestep_list_path):
|
|
2230
|
+
with open(timestep_list_path, "r") as f:
|
|
2231
|
+
self.time_steps_list = json.load(f)
|
|
2232
|
+
else:
|
|
2233
|
+
print(f'Case timestep_list.json({timestep_list_path}) not found.')
|
|
2234
|
+
sys.exit(0)
|
|
2235
|
+
|
|
2236
|
+
self.vis_dir = vis_dir
|
|
2237
|
+
|
|
2238
|
+
|
|
2239
|
+
|
|
2240
|
+
def return_dataframe(self, element_id_list = None, scalar_variable_list = None, time_step_id_list = None, vector_variable_list = []):
|
|
2241
|
+
if self.vis_dir is None:
|
|
2242
|
+
print("vis_dir not set.")
|
|
2243
|
+
sys.exit(0)
|
|
2244
|
+
if self.variable_list is None:
|
|
2245
|
+
print("variable_list not set.")
|
|
2246
|
+
sys.exit(0)
|
|
2247
|
+
if self.time_steps_list is None:
|
|
2248
|
+
print("time_steps_list not set.")
|
|
2249
|
+
sys.exit(0)
|
|
2250
|
+
|
|
2251
|
+
# 1. find timesteps
|
|
2252
|
+
query_time_step_list = []
|
|
2253
|
+
if time_step_id_list == None:
|
|
2254
|
+
query_time_step_list = self.time_steps_list
|
|
2255
|
+
else:
|
|
2256
|
+
for time_step in self.time_steps_list:
|
|
2257
|
+
if time_step["time_step"] in time_step_id_list:
|
|
2258
|
+
query_time_step_list.append(time_step)
|
|
2259
|
+
|
|
2260
|
+
# 2. find scalar variables
|
|
2261
|
+
query_variables = []
|
|
2262
|
+
if scalar_variable_list == None:
|
|
2263
|
+
query_variables = self.variable_list
|
|
2264
|
+
else:
|
|
2265
|
+
for variable in self.variable_list:
|
|
2266
|
+
if variable["variable_name"] in scalar_variable_list:
|
|
2267
|
+
if variable["value_type"] == "Scalar":
|
|
2268
|
+
query_variables.append(variable)
|
|
2269
|
+
|
|
2270
|
+
# 3. find vector variables
|
|
2271
|
+
|
|
2272
|
+
for variable in self.variable_list:
|
|
2273
|
+
if variable["variable_name"] in vector_variable_list:
|
|
2274
|
+
if variable["value_type"] == "Vector":
|
|
2275
|
+
query_variables.append(variable)
|
|
2276
|
+
|
|
2277
|
+
|
|
2278
|
+
# create dataframe that contains all variables and timesteps
|
|
2279
|
+
df = pd.DataFrame()
|
|
2280
|
+
rows = []
|
|
2281
|
+
for time_step in query_time_step_list:
|
|
2282
|
+
time_step_id = time_step["time_step"]
|
|
2283
|
+
time = time_step["time"]
|
|
2284
|
+
vtk = self.__read_vtk_file(time_step["vtu_file_name"])
|
|
2285
|
+
#print(f"Reading time: {time_step['time']}")
|
|
2286
|
+
|
|
2287
|
+
for i in (range(vtk.GetNumberOfCells()) if element_id_list is None else element_id_list):
|
|
2288
|
+
if i >= vtk.GetNumberOfCells():
|
|
2289
|
+
print(f"Element ID {i} is out of range for the current VTK file.")
|
|
2290
|
+
sys.exit(1)
|
|
2291
|
+
|
|
2292
|
+
new_row = {"element_id": i, "time_step_id": time_step_id, "time": time}
|
|
2293
|
+
for variable in query_variables:
|
|
2294
|
+
variable_name = variable["variable_name"]
|
|
2295
|
+
vtk_array = vtk.GetCellData().GetArray(variable_name)
|
|
2296
|
+
if vtk_array is None:
|
|
2297
|
+
print(f"Variable '{variable_name}' not found in the VTK file.")
|
|
2298
|
+
continue
|
|
2299
|
+
if variable["value_type"] == "Scalar":
|
|
2300
|
+
new_row[variable_name] = vtk_array.GetValue(i)
|
|
2301
|
+
elif variable["value_type"] == "Vector":
|
|
2302
|
+
new_row[variable_name + "_x"] = vtk_array.GetComponent(i, 0)
|
|
2303
|
+
new_row[variable_name + "_y"] = vtk_array.GetComponent(i, 1)
|
|
2304
|
+
new_row[variable_name + "_z"] = vtk_array.GetComponent(i, 2)
|
|
2305
|
+
|
|
2306
|
+
#df = pd.concat([df, pd.DataFrame([new_row])], ignore_index=True)
|
|
2307
|
+
rows.append(new_row)
|
|
2308
|
+
|
|
2309
|
+
df = pd.DataFrame(rows)
|
|
2310
|
+
return df
|
|
2311
|
+
|
|
2312
|
+
|
|
2313
|
+
|
|
2314
|
+
|
|
2315
|
+
|
|
2316
|
+
def __read_vtk_file(self, file_path):
|
|
2317
|
+
|
|
2318
|
+
extension = os.path.splitext(file_path)[1]
|
|
2319
|
+
if extension == '.vtr':
|
|
2320
|
+
reader = vtkXMLRectilinearGridReader()
|
|
2321
|
+
reader.SetFileName(file_path)
|
|
2322
|
+
reader.Update()
|
|
2323
|
+
return reader.GetOutput()
|
|
2324
|
+
elif extension == '.vts':
|
|
2325
|
+
reader = vtkXMLStructuredGridReader()
|
|
2326
|
+
reader.SetFileName(file_path)
|
|
2327
|
+
reader.Update()
|
|
2328
|
+
return reader.GetOutput()
|
|
2329
|
+
elif extension == '.vtu':
|
|
2330
|
+
reader = vtkXMLUnstructuredGridReader()
|
|
2331
|
+
reader.SetFileName(file_path)
|
|
2332
|
+
reader.Update()
|
|
2333
|
+
return reader.GetOutput()
|
|
2334
|
+
|
|
2335
|
+
|
|
2336
|
+
|