toughanimator 0.1.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,2740 @@
1
+ import math
2
+ import os
3
+ import io
4
+ import sys
5
+ import re
6
+ import shutil
7
+ import numpy as np
8
+ import pandas as pd
9
+ from vtkmodules.all import *
10
+ import pathlib
11
+ import json
12
+
13
+ from enum import Enum
14
+
15
+ class MeshType(Enum):
16
+ RegularGrid = 1
17
+ StructuredGridOrth = 2
18
+ StructuredGridFree = 3
19
+ PolygonalMesh = 4
20
+
21
+ class MeshPlane():
22
+ unknown = -1,
23
+ X = 1,
24
+ Y = 2,
25
+ Z = 3,
26
+ XZ = 4,
27
+ XY = 5,
28
+ YZ = 6,
29
+ XYZ = 7
30
+
31
+ class OutType():
32
+ Unknown = 0,
33
+ TEC = 1,
34
+ CSV = 2
35
+
36
+ class VisType():
37
+ Tecplot = 1
38
+ ParaView = 2
39
+ MatplotLib = 3
40
+
41
+ class ToughVersion(Enum):
42
+ Unknown = 0
43
+ TOUGH2 = 2
44
+ TOUGH3 = 3
45
+ TOUGHReact = 4
46
+
47
+ class ValueType(Enum):
48
+ Unknown = 0
49
+ Scalar = 1
50
+ Vector = 3
51
+
52
+ class VisTimeStep:
53
+ def __init__(self, time_step, iteration, time):
54
+ self.selected = True
55
+ self.time_step = time_step
56
+ self.iter = iteration
57
+ self.time = time
58
+ self.vtu_file_name = ""
59
+
60
+ class VisVariable:
61
+ def __init__(self, name, value_type, number_of_components):
62
+ self.variable_name = name
63
+ self.value_type = value_type
64
+ self.number_of_components = number_of_components
65
+ def to_dict(self):
66
+ return {
67
+ "variable_name": self.variable_name,
68
+ "value_type": self.value_type.name, # or .value if you prefer
69
+ "number_of_components": self.number_of_components # or .value if you prefer
70
+ }
71
+
72
+ class VisSetting:
73
+ def __init__(self, input_file_paths, out_file_paths, vis_dir, corners_file="unkown", out_format_type=OutType.Unknown, tough_version = ToughVersion.Unknown, vis_types=[VisType.ParaView, VisType.Tecplot], mesh_type=MeshType.RegularGrid, debug=False, eos="ECO2N", minc=False, selected_variables_scalar = [], selected_variables_vector = [], ngv=False):
74
+ self.mesh_type = mesh_type
75
+ self.out_format_type = out_format_type
76
+ self.vis_types = vis_types
77
+ self.input_file_paths = input_file_paths
78
+ self.out_file_paths = out_file_paths
79
+ self.vis_dir = vis_dir
80
+ self.known_bounds = False
81
+ self.debug = debug
82
+ self.tough_version = tough_version
83
+ self.mesh_plane = MeshPlane.unknown
84
+ self.isReverse = False
85
+ self.corners_file = corners_file
86
+ self.debug = debug
87
+ self.eos = eos
88
+ self.minc = minc
89
+ self.selected_variables_scalar = selected_variables_scalar
90
+ self.selected_variables_vector = selected_variables_vector
91
+ self.ngv = ngv
92
+
93
+
94
+ def setBounds(self, x_bounds, y_bounds, z_bounds):
95
+ self.bounds = np.concatenate((x_bounds, y_bounds, z_bounds))
96
+ self.known_bounds = True
97
+
98
+ class vis_reader:
99
+ def __init__(self, case_dir):
100
+ self.setting = None
101
+ self.main_geometry = None
102
+ self.incon_path = None
103
+ #self.variable_list = []
104
+ self.variable_list = {}
105
+ self.time_steps_list = []
106
+ self.rock_dict = []
107
+ if os.path.isdir(case_dir):
108
+ config_path = os.path.join(case_dir, "config.json")
109
+ if os.path.exists(config_path):
110
+ with open(config_path, "r", encoding="utf-8") as config_file:
111
+ config = json.load(config_file)
112
+ else:
113
+ print(f"Config file:({config_path}) not found. Please create it.")
114
+ sys.exit(1)
115
+ else:
116
+ print(f"Case directory:({case_dir}) not found. Please check it.")
117
+ sys.exit(1)
118
+
119
+
120
+ if "input_files" not in config:
121
+ print(f"Input files not found in config.json. Please check it.")
122
+ sys.exit(1)
123
+ if "output_files" not in config:
124
+ print(f"Output files not found in config.json. Please check it.")
125
+ sys.exit(1)
126
+
127
+
128
+
129
+ setting = VisSetting(
130
+ input_file_paths = [os.path.join(case_dir, f) for f in config["input_files"]],
131
+ out_file_paths = [os.path.join(case_dir, f) for f in config["output_files"]],
132
+ vis_dir = config["vis_dir"] if "vis_dir" in config else case_dir,
133
+ corners_file = os.path.join(case_dir, config["corners_file"] if "corners_file" in config else "None"),
134
+ debug = config['debug'] if 'debug' in config else False,
135
+ #eos = config['EOS'] if 'EOS' in config else "ECO2N",
136
+ eos = next((v for k, v in config.items() if k.lower() == "eos"),"ECO2N"),
137
+ #minc = config['MINC'] if 'MINC' in config else False,
138
+ minc = next((v for k, v in config.items() if k.lower() == "minc"),False),
139
+ selected_variables_scalar = config['selected_variables_scalar'] if 'selected_variables_scalar' in config else [],
140
+ selected_variables_vector = config['selected_variables_vector'] if 'selected_variables_vector' in config else [],
141
+ #ngv= config['NGV'] if 'NGV' in config else False
142
+ ngv = next((v for k, v in config.items() if k.lower() == "ngv"),False)
143
+ )
144
+
145
+ # check if the project is using MINC
146
+ minc_file = os.path.join(case_dir, 'MINC')
147
+ if os.path.exists(minc_file):
148
+ setting.minc = True
149
+ self.minc_file = minc_file
150
+ self.__check_num_of_minc()
151
+ if minc_file in setting.input_file_paths:
152
+ setting.input_file_paths.remove(minc_file)
153
+
154
+ for input_file_path in setting.input_file_paths:
155
+ if not os.path.exists(input_file_path):
156
+ print(f'Can\'t find input file: ({input_file_path}). please check the path or remove it from the config.json.')
157
+ sys.exit(1)
158
+
159
+ for out_file_path in setting.out_file_paths:
160
+ if not os.path.exists(input_file_path):
161
+ print(f'Can\'t find output file: ({out_file_path}). please check the path or remove it from the config.json.')
162
+ sys.exit(1)
163
+
164
+ if not os.path.isdir(setting.vis_dir):
165
+ print(f'Can\'t find directory: ({setting.vis_dir}). please check the path or remove it from the config.json.')
166
+ sys.exit(1)
167
+ else:
168
+ vis_path = os.path.join(setting.vis_dir, "tough_vis")
169
+ # delete the directory if it exists
170
+ if os.path.isdir(vis_path):
171
+ shutil.rmtree(vis_path)
172
+ os.mkdir(vis_path)
173
+ paraview_path = os.path.join(vis_path, 'paraview')
174
+ os.mkdir(paraview_path)
175
+ print(f"Visualization folder created: {vis_path}")
176
+ setting.vis_dir = vis_path
177
+
178
+ self.setting = setting
179
+
180
+ def write_eleme_conne(self):
181
+ if self.setting == None:
182
+ print(f'Please initialize the vis_reader class with the case directory.')
183
+ sys.exit(1)
184
+ print(f'Reading input files ...')
185
+ self.__write_elem_buffer()
186
+ self.__write_conne_buffer()
187
+ self.__write_rocks_buffer()
188
+ print(f'Creating elements and connections ...')
189
+ self.__create_elem_conne()
190
+
191
+ def write_geometry(self):
192
+ if self.setting == None:
193
+ print(f'Please initialize the vis_reader class with the case directory.')
194
+ sys.exit(1)
195
+
196
+
197
+ if self.elem_conne_path == None:
198
+ print(f'Can\'t find element and connection file. Please create it first. (write_eleme_conne)')
199
+ sys.exit(1)
200
+
201
+ print(f'Creating main geometry ...')
202
+ self.__create_main_geometry()
203
+
204
+ def write_incon(self):
205
+ if self.setting == None:
206
+ print(f'Please initialize the vis_reader class with the case directory.')
207
+ sys.exit(1)
208
+ if self.main_geometry == None:
209
+ print(f'Can not find main geometry. Please create the main geometry first. (write_geometry)')
210
+ sys.exit(0)
211
+ print(f'Reading input files ...')
212
+ self.__write_incon_buffer()
213
+ print(f'Creating initial condition ...')
214
+ self.__write_initial_conditions()
215
+
216
+ def write_result(self):
217
+ if self.setting == None:
218
+ print(f'Please initialize the vis_reader class with the case directory.')
219
+ sys.exit(1)
220
+ if self.main_geometry == None:
221
+ print(f'Can not find main geometry. Please create the main geometry first. (write_geometry)')
222
+ sys.exit(0)
223
+
224
+ for output_file_path in self.setting.out_file_paths:
225
+ print(f'Reading output ({output_file_path}) ... ')
226
+
227
+ self.current_out_file = output_file_path
228
+ self.__check_TOUGH_version()
229
+ print(f' Version: {self.setting.tough_version.name}')
230
+ print(f' EOS: {self.setting.eos}')
231
+ if self.setting.tough_version == ToughVersion.TOUGH2:
232
+ self.__read_TOUGH2_CSV_outfile()
233
+ elif self.setting.tough_version == ToughVersion.TOUGH3:
234
+ self.__read_TOUGH3_CSV_outfile()
235
+ elif self.setting.tough_version == ToughVersion.TOUGHReact:
236
+ self.__read_tough_TEC_outfile()
237
+ # add post calculation
238
+ for timestep in self.time_steps_list:
239
+ self.__post_process(timestep)
240
+ if self.setting.ngv:
241
+ self.__post_process_ngv(timestep)
242
+ self.__write_json()
243
+ print(f'All files have been created in {self.setting.vis_dir}.')
244
+
245
+ def write_all(self):
246
+ self.write_eleme_conne()
247
+ self.write_geometry()
248
+ self.write_incon()
249
+ self.write_result()
250
+
251
+ # TODO: need to clean up the code
252
+ def __check_bounds(self):
253
+ if not self.setting.bounds[1] > self.setting.bounds[0]:
254
+ print(
255
+ f'Max X {self.setting.bounds[1]} must be greater than Min X {self.setting.bounds[0]}.')
256
+ sys.exit(1)
257
+ if not self.setting.bounds[3] > self.setting.bounds[2]:
258
+ print(
259
+ f'Max Y {self.setting.bounds[3]} must be greater than Min Y {self.setting.bounds[2]}.')
260
+ sys.exit(1)
261
+ if not self.setting.bounds[5] > self.setting.bounds[4]:
262
+ print(
263
+ f'Max Z {self.setting.bounds[5]} must be greater than Min Z {self.setting.bounds[4]}.')
264
+ sys.exit(1)
265
+ # print(f'Can\'t find input file: {self.setting.input_path}.')
266
+
267
+ def __read_input(self):
268
+ self.__write_elem_buffer()
269
+ self.__write_conne_buffer()
270
+ self.__write_rocks_buffer()
271
+ self.__write_incon_buffer()
272
+
273
+ def __write_elem_buffer(self):
274
+ self.eleme_buffer = io.StringIO()
275
+ # write temp element txt
276
+ has_elem = False
277
+ for input_file_path in self.setting.input_file_paths:
278
+ line_counter = 0
279
+ with open(input_file_path, encoding="utf-8") as f:
280
+ reading_elem = False
281
+
282
+ for line in f:
283
+ if line.startswith('ELEME-') or line.startswith('ELEME'):
284
+ reading_elem = True
285
+ has_elem = True
286
+ find_elem = True
287
+ continue
288
+ if reading_elem:
289
+ line_counter += 1
290
+ if self.__check_if_block_end(line, line_counter):
291
+ reading_elem = False
292
+ found_path = input_file_path
293
+ break
294
+ else:
295
+ self.eleme_buffer.write(line)
296
+
297
+ if has_elem == False:
298
+ print(f'Can\'t find ELEME block in input_file_paths.')
299
+ sys.exit(1)
300
+ else:
301
+ print(f' Found ELEME block in {found_path}')
302
+ def __check_num_of_minc(self):
303
+ #self.minc_buffer = io.StringIO()
304
+ minc_num = 0
305
+ with open(self.minc_file, encoding="utf-8") as f:
306
+ reading_minc = False
307
+ for line in f:
308
+ if line.startswith('ELEME-') or line.startswith('ELEME'):
309
+ reading_minc = True
310
+ #has_minc = True
311
+ continue
312
+ if reading_minc:
313
+
314
+ if self.__check_if_block_end(line, minc_num):
315
+ reading_minc = False
316
+ #found_path = input_file_path
317
+ break
318
+ else:
319
+ minc_num += 1
320
+ #self.minc_buffer.write(line)
321
+ self.num_of_minc = minc_num
322
+
323
+ def __write_conne_buffer(self):
324
+ self.conne_buffer = io.StringIO()
325
+ # write temp element txt
326
+ has_conne = False
327
+ for input_file_path in self.setting.input_file_paths:
328
+ line_counter = 0
329
+ with open(input_file_path, encoding="utf-8") as f:
330
+
331
+ reading_conne = False
332
+ for line in f:
333
+ if line.startswith('CONNE-') or line.startswith('CONNE'):
334
+ reading_conne = True
335
+ has_conne = True
336
+ continue
337
+ if reading_conne:
338
+ line_counter += 1
339
+ if self.__check_if_block_end(line, line_counter):
340
+ reading_conne = False
341
+ found_path = input_file_path
342
+ break
343
+ else:
344
+ self.conne_buffer.write(line)
345
+
346
+ if has_conne == False:
347
+ print(f'Can\'t find CONNE block in input_file_paths.')
348
+ sys.exit(1)
349
+ else:
350
+ print(f' Found CONNE block in {found_path}')
351
+
352
+ def __write_rocks_buffer(self):
353
+ self.rocks_buffer = io.StringIO()
354
+ #self.rocks_sgr_buffer = io.StringIO()
355
+ has_rocks = False
356
+
357
+ for input_file_path in self.setting.input_file_paths:
358
+ line_counter = 0
359
+ with open(input_file_path, encoding="utf-8") as f:
360
+ reading_rocks = False
361
+ for line in f:
362
+ if line.startswith('ROCKS-'):
363
+ reading_rocks = True
364
+ has_rocks = True
365
+ continue
366
+
367
+ if reading_rocks:
368
+ line_counter+= 1
369
+ if self.__check_if_block_end(line, line_counter):
370
+ reading_rocks = False
371
+ found_path = input_file_path
372
+ break
373
+ else:
374
+ if 'SEED' in line:
375
+ continue
376
+ first_line = line.replace('\n', '').rstrip()
377
+
378
+ #skip 1 lines
379
+ f.readline()
380
+ third_line = f.readline()
381
+ new_line = f'{first_line}{third_line}'
382
+ self.rocks_buffer.write(new_line)
383
+ f.readline()
384
+
385
+
386
+ if has_rocks == False:
387
+ print(f'Can\'t find ROCKS block in all input_file_paths.')
388
+ sys.exit(1)
389
+ else:
390
+ print(f' Found ROCKS block in {found_path}')
391
+
392
+ def __write_incon_buffer(self):
393
+ self.incon_buffer = io.StringIO()
394
+ has_incon = False
395
+ self.global_incon = False
396
+ # write temp element txt
397
+
398
+ for input_file_path in self.setting.input_file_paths:
399
+ line_counter = 0
400
+ with open(input_file_path, encoding="utf-8") as f:
401
+
402
+ reading_incon = False
403
+ for line in f:
404
+ if line.startswith('INCON-'):
405
+ reading_incon = True
406
+ has_incon = True
407
+ #find_incon = True
408
+ continue
409
+
410
+ if reading_incon:
411
+ line_counter += 1
412
+ if self.__check_if_block_end(line, line_counter):
413
+ found_path = input_file_path
414
+ break
415
+ #line = f.readline() # skip first line #self.number_of_elements
416
+ eos = self.setting.eos
417
+ num = len(line.split())
418
+ if self.setting.eos.upper() == "ECO2N" and len(line.split()) == 4:
419
+ self.incon_buffer.write(line)
420
+ elif self.setting.eos.upper() == "EOS1":
421
+ line = f.readline() # skip first line #self.number_of_elements
422
+ if len(line.split()) == 2:
423
+ self.incon_buffer.write(line)
424
+
425
+
426
+ if has_incon == False or self.incon_buffer.tell() == 0:
427
+ print(f'Can\'t find INCON block in input_file_paths (or length of INCON is zero).')
428
+
429
+ # find the fifth line of the "PARAM" block
430
+ reading_pram = False
431
+ for input_file_path in self.setting.input_file_paths:
432
+ line_counter = 0
433
+ with open(input_file_path, encoding="utf-8") as f:
434
+ reading_pram = False
435
+ for line in f:
436
+ if line.startswith('PARAM-'):
437
+ reading_pram = True
438
+ continue
439
+ if reading_pram:
440
+ line_counter += 1
441
+ if self.__check_if_block_end(line, line_counter):
442
+ if line_counter <5:
443
+ print(f' Can\'t find Global INCON line in PARAM block of {input_file_path}. Please check the PARAM block.')
444
+ break
445
+
446
+ if line_counter == 5:
447
+ self.global_incon = True
448
+ print(f' Found Global INCON line in PARAM block of {input_file_path}')
449
+ self.incon_buffer.write(line)
450
+ break
451
+
452
+ else:
453
+ print(f' Found INCON block in {found_path}')
454
+
455
+ def __write_initial_conditions(self):
456
+
457
+ self.incon_vtk = self.__read_vtk_file(self.main_geometry)
458
+
459
+
460
+
461
+ self.incon_buffer.seek(0)
462
+ incon_df = pd.DataFrame()
463
+ if self.setting.eos.upper() == "ECO2N":
464
+ # read incon
465
+ incon_colspecs = [(0, 20), (20, 40), (40, 60), (60, 80)] # define column widths
466
+ incon_names = ['Pressure', 'NaCl', 'CO2', 'Temperature']
467
+ incon_df = pd.read_fwf(self.incon_buffer, colspecs=incon_colspecs, header=None,
468
+ names=incon_names,
469
+ dtype={'Pressure':float, 'NaCl':float, 'CO2':float, 'Temperature':float})
470
+ elif self.setting.eos.upper() == "EOS1":
471
+ # read incon
472
+ incon_colspecs = [(0, 20), (20, 40)]
473
+ incon_names = ['Temperature', 'Pressure']
474
+ incon_df = pd.read_fwf(self.incon_buffer, colspecs=incon_colspecs, header=None,
475
+ names=incon_names,
476
+ dtype={'Temperature':float, 'Pressure':float})
477
+ if len(incon_df) == 0:
478
+ print(f' It is empty in INCON block.')
479
+ return
480
+
481
+ else:
482
+ for header in incon_names:
483
+ array = vtkDoubleArray()
484
+ array.SetName(header)
485
+ self.incon_vtk.GetCellData().AddArray(array)
486
+
487
+ for i in range(0, self.incon_vtk.GetNumberOfCells()):
488
+ for header in incon_names:
489
+ if self.global_incon:
490
+ # if global incon, use the first row
491
+ value = self.__parse_float(incon_df[header][0])
492
+ else:
493
+ index = self.sequence_dist[i]
494
+ value = self.__parse_float(incon_df[header][index])
495
+ self.incon_vtk.GetCellData().GetArray(header).InsertNextValue(value)
496
+
497
+ extension = os.path.splitext(self.main_geometry)[1]
498
+ self.incon_path = os.path.join(self.setting.vis_dir, f'incon{extension}')
499
+ self.__write_vtk_file(self.incon_vtk, self.incon_path)
500
+ print(f' ✓ Initial condition file created: {self.incon_path}')
501
+
502
+
503
+
504
+ def __read_TOUGH2_CSV_outfile(self):
505
+ self.time_steps_list = []
506
+ value_type = ValueType.Unknown
507
+ current_time = None
508
+ buffer = io.StringIO()
509
+ csv_headers = []
510
+ line_number = -1
511
+ start_index = -1
512
+ def process_chunk():
513
+ """Define what to do with each flushed chunk."""
514
+ buffer.seek(0)
515
+ df = pd.read_csv(buffer)
516
+ #print(f"Processing time group:\n{df.head()}")
517
+ time_step = VisTimeStep(
518
+ time=float(current_time),
519
+ time_step=len(self.time_steps_list)+1,
520
+ iteration=1
521
+ )
522
+ self.time_steps_list.append(time_step)
523
+ if value_type == ValueType.Scalar:
524
+ self.__write_scalar_result(time_step, df, csv_headers)
525
+ elif value_type == ValueType.Vector:
526
+ self.__write_vector_result(time_step, df, csv_headers)
527
+ else:
528
+ print('Error: Your value type is not supported')
529
+ sys.exit(1)
530
+
531
+ buffer.flush()
532
+ buffer.close()
533
+
534
+ with open(self.current_out_file, encoding="utf-8") as f:
535
+ for line in f:
536
+ line_number = line_number + 1
537
+ values = line.strip().split(',')
538
+
539
+ if line_number == 0:
540
+ csv_headers = [x.strip() for x in values]
541
+ #replace all " with ''"
542
+ csv_headers = [x.replace('"', '') for x in csv_headers]
543
+ if 'ELEM' in csv_headers and 'INDEX' in csv_headers:
544
+ value_type = ValueType.Scalar
545
+ #start_index = 1 # remove the first item
546
+
547
+ elif 'ELEM1' in csv_headers and 'ELEM2' in csv_headers:
548
+ value_type = ValueType.Vector
549
+ #start_index = 1
550
+
551
+ start_index = 1
552
+ # remove the first "TIME" header (to reduce the number of columns)
553
+ csv_headers = csv_headers[start_index:]
554
+
555
+ # Write header once
556
+ buffer.write(','.join(csv_headers) + '\n')
557
+ continue
558
+
559
+ row_time = self.__parse_float(values[0].strip())
560
+
561
+ if current_time is None:
562
+ current_time = row_time
563
+
564
+ if row_time != current_time:
565
+ # Time changed → flush and reset
566
+ process_chunk()
567
+ buffer = io.StringIO()
568
+ buffer.write(','.join(csv_headers) + '\n') # Write header
569
+ current_time = row_time
570
+
571
+ # Write current row
572
+ buffer.write(','.join(values[start_index:]) + '\n')
573
+
574
+ # Flush the last group
575
+ if buffer.tell() > 0:
576
+ process_chunk()
577
+
578
+
579
+ def __read_TOUGH3_CSV_outfile(self):
580
+ scalar_buffer = io.StringIO()
581
+ current_time_step = None
582
+ tim_step_counter = 1
583
+ csv_headers = []
584
+ line_number = -1
585
+ reading_number = 0
586
+ value_type = ValueType.Unknown
587
+ start_index = -1
588
+ self.time_steps_list = []
589
+
590
+ with open(self.current_out_file, encoding="utf-8") as f:
591
+ for line in f:
592
+ line_number = line_number + 1
593
+ values = line.strip().split(',')
594
+ if line_number == 0:
595
+ values = [x.replace('"', '') for x in values]
596
+ csv_headers = [x.strip() for x in values]
597
+
598
+ if 'ELEM' in csv_headers:
599
+ value_type = ValueType.Scalar
600
+ start_index = 5
601
+
602
+ elif 'ELEM1' in csv_headers and 'ELEM2' in csv_headers:
603
+ value_type = ValueType.Vector
604
+ start_index = 5
605
+
606
+ csv_headers = csv_headers[start_index:]
607
+ f.readline() # skip next line
608
+ print(f' Value type: {value_type.name}')
609
+ continue
610
+
611
+ # Find time item
612
+ if len(values) == 1:
613
+ time_string = values[0].replace('"', '').strip()
614
+ time_string = time_string.split()[-1]
615
+ time = self.__parse_float(time_string)
616
+
617
+ # if not the first time step
618
+ if value_type == ValueType.Scalar and reading_number == self.number_of_elements:
619
+ scalar_buffer.seek(0)
620
+ df = pd.read_csv(scalar_buffer, sep=',', header=0)
621
+ self.__write_scalar_result(
622
+ current_time_step, df, csv_headers)
623
+ scalar_buffer.flush()
624
+ scalar_buffer.close()
625
+ scalar_buffer = io.StringIO()
626
+ reading_number = 0
627
+
628
+ if value_type == ValueType.Vector and reading_number == self.number_of_connections:
629
+ scalar_buffer.seek(0)
630
+ df = pd.read_csv(scalar_buffer, sep=',', header=0)
631
+ self.__write_vector_result(
632
+ current_time_step, df, csv_headers)
633
+ scalar_buffer.flush()
634
+ scalar_buffer.close()
635
+ scalar_buffer = io.StringIO()
636
+ reading_number = 0
637
+
638
+ current_time_step = VisTimeStep(
639
+ time=float(time),
640
+ time_step=tim_step_counter,
641
+ iteration=1
642
+ )
643
+
644
+ # Initialize buffer
645
+ header_string = ','.join(csv_headers)
646
+ scalar_buffer.write(header_string + '\n')
647
+ self.time_steps_list.append(current_time_step)
648
+ tim_step_counter = tim_step_counter + 1
649
+
650
+ else:
651
+ scalar_buffer.write(','.join(values[start_index:]) + '\n')
652
+ reading_number = reading_number + 1
653
+
654
+ else:
655
+ # write the last time step
656
+ if value_type == ValueType.Scalar:
657
+ scalar_buffer.seek(0)
658
+ df = pd.read_csv(scalar_buffer, sep=',', header=0)
659
+ self.__write_scalar_result(current_time_step, df, csv_headers)
660
+ if value_type == ValueType.Vector:
661
+ scalar_buffer.seek(0)
662
+ df = pd.read_csv(scalar_buffer, sep=',', header=0)
663
+ self.__write_vector_result(current_time_step, df, csv_headers)
664
+ scalar_buffer.close()
665
+
666
+ def __old_read_TOUGH3_CSV_outfile(self):
667
+ scalar_buffer = io.StringIO()
668
+ current_time_step = None
669
+ tim_step_counter = 1
670
+ csv_headers = []
671
+ line_number = -1
672
+ reading_number = 0
673
+ value_type = ValueType.Unknown
674
+ start_index = -1
675
+ self.time_steps_list = []
676
+
677
+ with open(self.current_out_file, encoding="utf-8") as f:
678
+ for line in f:
679
+ line_number = line_number + 1
680
+ values = line.strip().split(',')
681
+ if line_number == 0:
682
+ values = [x.replace('"', '') for x in values]
683
+ csv_headers = [x.strip() for x in values]
684
+
685
+ if 'ELEM' in csv_headers:
686
+ value_type = ValueType.Scalar
687
+ start_index = 5
688
+
689
+ elif 'ELEM1' in csv_headers and 'ELEM2' in csv_headers:
690
+ value_type = ValueType.Vector
691
+ start_index = 5
692
+
693
+ csv_headers = csv_headers[start_index:]
694
+ f.readline() # skip next line
695
+ print(f' Value type: {value_type.name}')
696
+ continue
697
+
698
+ # Find time item
699
+ if len(values) == 1:
700
+ time_string = values[0].replace('"', '').strip()
701
+ time_string = time_string.split()[-1]
702
+ time = self.__parse_float(time_string)
703
+
704
+ # if not the first time step
705
+ if value_type == ValueType.Scalar and reading_number == self.number_of_elements:
706
+ scalar_buffer.seek(0)
707
+ df = pd.read_csv(scalar_buffer, sep=',', header=0)
708
+ self.__write_scalar_result(
709
+ current_time_step, df, csv_headers)
710
+ scalar_buffer.flush()
711
+ scalar_buffer.close()
712
+ scalar_buffer = io.StringIO()
713
+ reading_number = 0
714
+
715
+ if value_type == ValueType.Vector and reading_number == self.number_of_connections:
716
+ scalar_buffer.seek(0)
717
+ df = pd.read_csv(scalar_buffer, sep=',', header=0)
718
+ self.__write_vector_result(
719
+ current_time_step, df, csv_headers)
720
+ scalar_buffer.flush()
721
+ scalar_buffer.close()
722
+ scalar_buffer = io.StringIO()
723
+ reading_number = 0
724
+
725
+ current_time_step = VisTimeStep(
726
+ time=float(time),
727
+ time_step=tim_step_counter,
728
+ iteration=1
729
+ )
730
+
731
+ # Initialize buffer
732
+ header_string = ','.join(csv_headers)
733
+ scalar_buffer.write(header_string + '\n')
734
+ self.time_steps_list.append(current_time_step)
735
+ tim_step_counter = tim_step_counter + 1
736
+
737
+ else:
738
+ scalar_buffer.write(','.join(values[start_index:]) + '\n')
739
+ reading_number = reading_number + 1
740
+
741
+ else:
742
+ # write the last time step
743
+ if value_type == ValueType.Scalar:
744
+ scalar_buffer.seek(0)
745
+ df = pd.read_csv(scalar_buffer, sep=',', header=0)
746
+ self.__write_scalar_result(current_time_step, df, csv_headers)
747
+ if value_type == ValueType.Vector:
748
+ scalar_buffer.seek(0)
749
+ df = pd.read_csv(scalar_buffer, sep=',', header=0)
750
+ self.__write_vector_result(current_time_step, df, csv_headers)
751
+ scalar_buffer.close()
752
+
753
+ def __read_tough_TEC_outfile(self):
754
+
755
+ scalar_buffer = io.StringIO()
756
+ #vector_buffer = io.StringIO()
757
+ current_time_step = None
758
+ tim_step_counter = 1
759
+ reading_scalar = False
760
+ scalar_headers = []
761
+ self.time_steps_list = []
762
+
763
+ with open(self.current_out_file, encoding="utf-8") as f:
764
+ for line in f:
765
+ if line.strip().lower().startswith('Variables'.lower()):
766
+ headers_value = line.strip().split('=')[1]
767
+ #scalar_headers = headers_value.replace('"', '')
768
+
769
+ scalar_headers = re.split(' |,', headers_value.replace('"', '').strip())
770
+ scalar_headers = [x for x in scalar_headers if x]
771
+ scalar_headers.pop(0)
772
+ scalar_headers.pop(0)
773
+ scalar_headers.pop(0)
774
+
775
+ continue
776
+ if line.strip().lower().startswith('Zone T'.lower()):
777
+ if reading_scalar:
778
+ scalar_buffer.seek(0)
779
+ df = pd.read_csv(scalar_buffer, sep=',', header=0)
780
+ self.__write_scalar_result(
781
+ current_time_step, df, scalar_headers)
782
+
783
+ scalar_buffer.flush()
784
+ scalar_buffer.close()
785
+ scalar_buffer = io.StringIO()
786
+
787
+ time_values = line.split('"')
788
+ time = time_values[1].split()[0]
789
+ current_time_step = VisTimeStep(
790
+ time=float(time),
791
+ time_step=tim_step_counter,
792
+ iteration=1
793
+ )
794
+ reading_scalar = True
795
+ header_string = ','.join(scalar_headers)
796
+ scalar_buffer.write(header_string + '\n')
797
+ self.time_steps_list.append(current_time_step)
798
+ tim_step_counter = tim_step_counter + 1
799
+ continue
800
+
801
+ if reading_scalar and len(line.split()) == len(scalar_headers)+3:
802
+ csv_line = ','.join(line.split()[3:]) + '\n'
803
+ scalar_buffer.write(csv_line)
804
+
805
+ # if process to the end of file
806
+ else:
807
+ if len(scalar_buffer.getvalue()) > 0:
808
+ #df = self.prepare_fixed_length_scalar_dataframe(scalar_headers, scalar_buffer)
809
+ scalar_buffer.seek(0)
810
+ df = pd.read_csv(scalar_buffer, sep=',', header=0)
811
+ self.__write_scalar_result(
812
+ current_time_step, df, scalar_headers)
813
+ scalar_buffer.flush()
814
+ scalar_buffer.close()
815
+ scalar_buffer = io.StringIO()
816
+ tim_step_counter = tim_step_counter + 1
817
+
818
+ def __post_process(self, vis_time_step):
819
+ time_index = self.time_steps_list.index(vis_time_step)
820
+ #vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}.vtr')
821
+
822
+ extension = os.path.splitext(self.main_geometry)[1]
823
+ vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}{extension}')
824
+ self.time_steps_list[time_index].vtu_file_name = vtr_path
825
+ scalar_vtr = self.__read_vtk_file(vtr_path)
826
+ vtr = scalar_vtr
827
+
828
+ post_variable_list = []
829
+ # p
830
+ p_name = "P"
831
+ if vtr.GetCellData().GetArray("P (Pa)") is not None:
832
+ p_name = "P (Pa)"
833
+ if vtr.GetCellData().GetArray("P(Pa)") is not None:
834
+ p_name = "P(Pa)"
835
+ if vtr.GetCellData().GetArray("P(bar)") is not None:
836
+ p_name = "P(bar)"
837
+ if vtr.GetCellData().GetArray("PRES") is not None:
838
+ p_name = "PRES"
839
+
840
+ if vtr.GetCellData().GetArray(p_name) is not None and self.incon_vtk.GetCellData().GetArray('Pressure') is not None:
841
+ delPArray = vtkDoubleArray()
842
+ delPArray.SetName(f'del_{p_name}')
843
+ for i in range(0, vtr.GetNumberOfCells()):
844
+ p_value = vtr.GetCellData().GetArray(p_name).GetValue(i)
845
+ incon_p = self.incon_vtk.GetCellData().GetArray('Pressure').GetValue(i)
846
+ delP = p_value - incon_p
847
+ delPArray.InsertNextValue(delP)
848
+
849
+ vtr.GetCellData().AddArray(delPArray)
850
+ post_variable_list.append(VisVariable(f'del_{p_name}', ValueType.Scalar, 1))
851
+
852
+ # Put cell-centered data into points
853
+ filter = vtkCellDataToPointData()
854
+ filter.SetInputData(vtr)
855
+ filter.Update()
856
+ vtr_cell_to_points = filter.GetOutput()
857
+
858
+ if vtr.GetCellData().GetArray(f'del_{p_name}') is not None:
859
+ vtr.GetPointData().AddArray(vtr_cell_to_points.GetPointData().GetArray(f'del_{p_name}'))
860
+
861
+
862
+ # add toughreact variables
863
+ if self.setting.tough_version == ToughVersion.TOUGHReact or self.setting.tough_version == ToughVersion.TOUGH3:
864
+ trapHCO2_array = vtkDoubleArray()
865
+ trapHCO2_array.SetName('trapHCO2')
866
+
867
+ trapRCO2_array = vtkDoubleArray()
868
+ trapRCO2_array.SetName('trapRCO2')
869
+
870
+ trapDCO2_array = vtkDoubleArray()
871
+ trapDCO2_array.SetName('trapDCO2')
872
+
873
+ trapMCO2_array = vtkDoubleArray()
874
+ trapMCO2_array.SetName('trapMCO2')
875
+
876
+ for index in range(0, vtr.GetNumberOfCells()):
877
+ trapHCO2 = 0
878
+ trapRCO2 = 0
879
+ trapDCO2 = 0
880
+ trapMCO2 = 0
881
+ VOLX = 0
882
+ if vtr.GetCellData().GetArray("VOLX") is not None:
883
+ VOLX = vtr.GetCellData().GetArray("VOLX").GetValue(index)
884
+
885
+ SatGas = 0
886
+ if vtr.GetCellData().GetArray("SatGas") is not None:
887
+ SatGas = vtr.GetCellData().GetArray("SatGas").GetValue(index)
888
+ elif vtr.GetCellData().GetArray("SAT_G") is not None:
889
+ SatGas = vtr.GetCellData().GetArray("SAT_G").GetValue(index)
890
+
891
+ Porosity = 0
892
+ if vtr.GetCellData().GetArray("Porosity") is not None:
893
+ Porosity = vtr.GetCellData().GetArray("Porosity").GetValue(index)
894
+ elif vtr.GetCellData().GetArray("POR") is not None:
895
+ Porosity = vtr.GetCellData().GetArray("POR").GetValue(index)
896
+
897
+ DGas_kg_m3 = 0
898
+ if vtr.GetCellData().GetArray("DGas_kg/m3") is not None:
899
+ DGas_kg_m3 = vtr.GetCellData().GetArray("DGas_kg/m3").GetValue(index)
900
+ elif vtr.GetCellData().GetArray("DEN_G") is not None:
901
+ DGas_kg_m3 = vtr.GetCellData().GetArray("DEN_G").GetValue(index)
902
+
903
+ SatLiq = 0
904
+ if vtr.GetCellData().GetArray("SatLiq") is not None:
905
+ SatLiq = vtr.GetCellData().GetArray("SatLiq").GetValue(index)
906
+ elif vtr.GetCellData().GetArray("SAT_L") is not None:
907
+ SatLiq = vtr.GetCellData().GetArray("SAT_L").GetValue(index)
908
+
909
+ XCO2Liq = 0
910
+ if vtr.GetCellData().GetArray("XCO2Liq") is not None:
911
+ XCO2Liq = vtr.GetCellData().GetArray("XCO2Liq").GetValue(index)
912
+ elif vtr.GetCellData().GetArray("X_CO2_L") is not None:
913
+ XCO2Liq = vtr.GetCellData().GetArray("X_CO2_L").GetValue(index)
914
+
915
+ sgr = 0
916
+ if vtr.GetCellData().GetArray("sgr") is not None:
917
+ sgr = vtr.GetCellData().GetArray("sgr").GetValue(index)
918
+
919
+
920
+ trapHCO2 =(SatGas-sgr)*VOLX*Porosity*DGas_kg_m3* (SatGas>sgr)
921
+ trapRCO2 = 0.05 * VOLX * Porosity * DGas_kg_m3* (SatGas > sgr)+ SatGas * VOLX * Porosity * DGas_kg_m3*(SatGas <= sgr)
922
+ trapDCO2 = SatLiq*VOLX*Porosity*DGas_kg_m3*XCO2Liq
923
+ trapHCO2_array.InsertNextValue(self.__fix_negative_zero(trapHCO2))
924
+ trapRCO2_array.InsertNextValue(self.__fix_negative_zero(trapRCO2))
925
+ trapDCO2_array.InsertNextValue(self.__fix_negative_zero(trapDCO2))
926
+
927
+ trapMCO2 = 0
928
+ if vtr.GetCellData().GetArray("calcite") is not None:
929
+ calcite = vtr.GetCellData().GetArray("calcite").GetValue(index)
930
+ ankerite_2 = vtr.GetCellData().GetArray("ankerite-2").GetValue(index)
931
+ dawsonite = vtr.GetCellData().GetArray("dawsonite").GetValue(index)
932
+ dolomite_2 = vtr.GetCellData().GetArray("dolomite-2").GetValue(index)
933
+ magnesite = vtr.GetCellData().GetArray("magnesite").GetValue(index)
934
+ siderite_2 = vtr.GetCellData().GetArray("siderite-2").GetValue(index)
935
+ trapMCO2 =(calcite*1 + ankerite_2*2 + dawsonite*1 + dolomite_2*2 + magnesite*1 + siderite_2*1)*VOLX*Porosity*0.012
936
+ trapMCO2_array.InsertNextValue(self.__fix_negative_zero(trapMCO2))
937
+
938
+
939
+ vtr.GetCellData().AddArray(trapHCO2_array)
940
+ vtr.GetCellData().AddArray(trapRCO2_array)
941
+ vtr.GetCellData().AddArray(trapDCO2_array)
942
+ vtr.GetCellData().AddArray(trapMCO2_array)
943
+
944
+ post_variable_list.append(VisVariable('trapHCO2', ValueType.Scalar, 1))
945
+ post_variable_list.append(VisVariable('trapRCO2', ValueType.Scalar, 1))
946
+ post_variable_list.append(VisVariable('trapDCO2', ValueType.Scalar, 1))
947
+ post_variable_list.append(VisVariable('trapMCO2', ValueType.Scalar, 1))
948
+
949
+
950
+ # Put cell-centered data into points
951
+ filter = vtkCellDataToPointData()
952
+ filter.SetInputData(vtr)
953
+ filter.Update()
954
+ vtr_cell_to_points = filter.GetOutput()
955
+
956
+ for variabl_name in ['trapHCO2', 'trapRCO2', 'trapDCO2', 'trapMCO2']:
957
+ vtr.GetPointData().AddArray(vtr_cell_to_points.GetPointData().GetArray(variabl_name))
958
+
959
+ if len(post_variable_list) > 0:
960
+ self.variable_list["post"] = post_variable_list
961
+ self.__write_vtk_file(vtr, vtr_path)
962
+
963
+ def __post_process_ngv(self, vis_time_step):
964
+
965
+ #self.rock_dict
966
+ post_variable_list = []
967
+ if self.setting.mesh_type != MeshType.RegularGrid:
968
+ print(' NGV post-processing is only available for RegularGrid mesh.')
969
+ return
970
+
971
+
972
+ time_index = self.time_steps_list.index(vis_time_step)
973
+ #vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}.vtr')
974
+
975
+ extension = os.path.splitext(self.main_geometry)[1]
976
+ vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}{extension}')
977
+ self.time_steps_list[time_index].vtu_file_name = vtr_path
978
+ scalar_vtr = self.__read_vtk_file(vtr_path)
979
+ vtr = scalar_vtr
980
+
981
+
982
+ vtr_dimemsion = scalar_vtr.GetDimensions()
983
+ cell_index = 0
984
+ matIDArray = vtr.GetCellData().GetArray('Material_ID')
985
+
986
+ G = 9.81
987
+ Pc = 3000
988
+ # creare vtk double array 'ut','delta_p','Ncv_k1','Ncv_k2','Ncv_k3','Ngv_k1','Ngv_k2','Ngv_k3','Nb','R1'
989
+
990
+ Ncv_k1_array = vtkDoubleArray()
991
+ Ncv_k1_array.SetName('Ncv_k1')
992
+ vtr.GetCellData().AddArray(Ncv_k1_array)
993
+ Ncv_k2_array = vtkDoubleArray()
994
+ Ncv_k2_array.SetName('Ncv_k2')
995
+ vtr.GetCellData().AddArray(Ncv_k2_array)
996
+ Ncv_k3_array = vtkDoubleArray()
997
+ Ncv_k3_array.SetName('Ncv_k3')
998
+ vtr.GetCellData().AddArray(Ncv_k3_array)
999
+ Ngv_k1_array = vtkDoubleArray()
1000
+ Ngv_k1_array.SetName('Ngv_k1')
1001
+ vtr.GetCellData().AddArray(Ngv_k1_array)
1002
+ Ngv_k2_array = vtkDoubleArray()
1003
+ Ngv_k2_array.SetName('Ngv_k2')
1004
+ vtr.GetCellData().AddArray(Ngv_k2_array)
1005
+ Ngv_k3_array = vtkDoubleArray()
1006
+ Ngv_k3_array.SetName('Ngv_k3')
1007
+ vtr.GetCellData().AddArray(Ngv_k3_array)
1008
+ Nb_array = vtkDoubleArray()
1009
+ Nb_array.SetName('Nb')
1010
+ vtr.GetCellData().AddArray(Nb_array)
1011
+ R1_array = vtkDoubleArray()
1012
+ R1_array.SetName('R1')
1013
+ vtr.GetCellData().AddArray(R1_array)
1014
+
1015
+ post_variable_list.append(VisVariable('Ncv_k1', ValueType.Scalar, 1))
1016
+ post_variable_list.append(VisVariable('Ncv_k2', ValueType.Scalar, 1))
1017
+ post_variable_list.append(VisVariable('Ncv_k3', ValueType.Scalar, 1))
1018
+ post_variable_list.append(VisVariable('Ngv_k1', ValueType.Scalar, 1))
1019
+ post_variable_list.append(VisVariable('Ngv_k2', ValueType.Scalar, 1))
1020
+ post_variable_list.append(VisVariable('Ngv_k3', ValueType.Scalar, 1))
1021
+ post_variable_list.append(VisVariable('Nb', ValueType.Scalar, 1))
1022
+ post_variable_list.append(VisVariable('R1', ValueType.Scalar, 1))
1023
+
1024
+
1025
+ # check if the required arrays are in the vtk file
1026
+ vis_gas_array = vtkDoubleArray()
1027
+ vis_gas_name = 'VIS(gas)'
1028
+ if vtr.GetCellData().GetArray(vis_gas_name) is not None:
1029
+ vis_gas_array = vtr.GetCellData().GetArray(vis_gas_name)
1030
+ else:
1031
+ print(f' Can\'t find {vis_gas_name} array in the vtk file for NGV post-processing.')
1032
+ return
1033
+
1034
+ dl_array = vtkDoubleArray()
1035
+ dl_name = 'DL (kg/m^3)'
1036
+ if vtr.GetCellData().GetArray(dl_name) is not None:
1037
+ dl_array = vtr.GetCellData().GetArray(dl_name)
1038
+ else:
1039
+ print(f' Can\'t find {dl_name} array in the vtk file for NGV post-processing.')
1040
+ return
1041
+
1042
+ dg_array = vtkDoubleArray()
1043
+ dg_name = 'DG (kg/m^3)'
1044
+ if vtr.GetCellData().GetArray(dg_name) is not None:
1045
+ dg_array = vtr.GetCellData().GetArray(dg_name)
1046
+ else:
1047
+ print(f' Can\'t find {dg_name} array in the vtk file for NGV post-processing.')
1048
+ return
1049
+
1050
+ flof_array = vtkDoubleArray()
1051
+ flof_name = 'FLOF (kg/s)'
1052
+ if vtr.GetCellData().GetArray(flof_name) is not None:
1053
+ flof_array = vtr.GetCellData().GetArray(flof_name)
1054
+ else:
1055
+ print(f' Can\'t find {flof_name} array in the vtk file for NGV post-processing.')
1056
+ return
1057
+
1058
+
1059
+
1060
+ for z_index in range(0, vtr_dimemsion[2]-1):
1061
+ for y_index in range(0, vtr_dimemsion[1]-1):
1062
+ for x_index in range(0, vtr_dimemsion[0]-1):
1063
+ dx = vtr.GetXCoordinates().GetValue(x_index+1) - vtr.GetXCoordinates().GetValue(x_index)
1064
+ dy = vtr.GetYCoordinates().GetValue(y_index+1) - vtr.GetYCoordinates().GetValue(y_index)
1065
+ #dz = vtr.GetZCoordinates().GetValue(z_index+1) - vtr.GetZCoordinates().GetValue(z_index)
1066
+
1067
+
1068
+ #elemID = self..GetValue(cell_index)
1069
+ matID = matIDArray.GetValue(cell_index)
1070
+ # find rock from self.rock_dict with id = matID
1071
+ #rock = [obj for obj in self.rock_dict if obj.id == matID]
1072
+
1073
+ rock = next((o for o in self.rock_dict if o["id"] == matID), None)
1074
+ per_1 = rock["per_1"] if rock else 0
1075
+ per_2 = rock["per_2"] if rock else 0
1076
+ per_3 = rock["per_3"] if rock else 0
1077
+
1078
+ #df['μCO2'] = df['VIS(gas)']
1079
+ μCO2 = vis_gas_array.GetValue(cell_index)
1080
+ #df['delta_p'] = df['DL (kg/m^3)'] - df['DG (kg/m^3)']
1081
+ delta_p = dl_array.GetValue(cell_index) - dg_array.GetValue(cell_index)
1082
+ #df['ut'] = np.sqrt(df['FLOF (kg/s)_x']**2 + df['FLOF (kg/s)_y']**2 + df['FLOF (kg/s)_z']**2)
1083
+ FLOF = flof_array.GetTuple(cell_index)
1084
+
1085
+ ut = math.sqrt(FLOF[0]**2 + FLOF[1]**2 + FLOF[2]**2)
1086
+
1087
+ #df['Ncv_k1'] = (df['k1'] * df[L] * df['Pc'] )/(df[H]**2 * df['μCO2'] * df['ut'])
1088
+ #df['Ncv_k2'] = (df['k2'] * df[L] * df['Pc'] )/(df[H]**2 * df['μCO2'] * df['ut'])
1089
+ #df['Ncv_k3'] = (df['k3'] * df[L] * df['Pc'] )/(df[H]**2 * df['μCO2'] * df['ut'])
1090
+ #df['Ngv_k1'] = (df['delta_p'] * df['G'] * df['k1'] * df['d_x'])/(df[H] * df['μCO2'] * df['ut'])
1091
+ #df['Ngv_k2'] = (df['delta_p'] * df['G'] * df['k2'] * df['d_x'])/(df[H] * df['μCO2'] * df['ut'])
1092
+ #df['Ngv_k3'] = (df['delta_p'] * df['G'] * df['k3'] * df['d_x'])/(df[H] * df['μCO2'] * df['ut'])
1093
+
1094
+ L = dx
1095
+ H = dy
1096
+ k1 = per_1
1097
+ k2 = per_2
1098
+ k3 = per_3
1099
+ Ncv_k1 = (k1 * L * Pc )/(H**2 * μCO2 * ut) if (H**2 * μCO2 * ut) !=0 else 0
1100
+ Ncv_k2 = (k2 * L * Pc )/(H**2 * μCO2 * ut) if (H**2 * μCO2 * ut) !=0 else 0
1101
+ Ncv_k3 = (k3 * L * Pc )/(H**2 * μCO2 * ut) if (H**2 * μCO2 * ut) !=0 else 0
1102
+ Ngv_k1 = (delta_p * G * k1 * dx)/(H * μCO2 * ut) if (H * μCO2 * ut) !=0 else 0
1103
+ Ngv_k2 = (delta_p * G * k2 * dx)/(H * μCO2 * ut) if (H * μCO2 * ut) !=0 else 0
1104
+ Ngv_k3 = (delta_p * G * k3 * dx)/(H * μCO2 * ut) if (H * μCO2 * ut) !=0 else 0
1105
+
1106
+ #df['Nb'] =(df['delta_p'] * df['G'] * df[H])/df['Pc']
1107
+ Nb =(delta_p * G * H)/Pc if Pc !=0 else 0
1108
+ #df['R1'] = df[L]/df[H]
1109
+ R1 = L/H if H !=0 else 0
1110
+ Ncv_k1_array.InsertNextValue(Ncv_k1)
1111
+ Ncv_k2_array.InsertNextValue(Ncv_k2)
1112
+ Ncv_k3_array.InsertNextValue(Ncv_k3)
1113
+ Ngv_k1_array.InsertNextValue(Ngv_k1)
1114
+ Ngv_k2_array.InsertNextValue(Ngv_k2)
1115
+ Ngv_k3_array.InsertNextValue(Ngv_k3)
1116
+ Nb_array.InsertNextValue(Nb)
1117
+ R1_array.InsertNextValue(R1)
1118
+ cell_index += 1
1119
+
1120
+ #for z_index in range(0, scalar_vtr.GetZCoordinates().GetNumberOfTuples()):
1121
+
1122
+
1123
+ #if len(post_variable_list) > 0:
1124
+ #self.variable_list["post"].append(post_variable_list)
1125
+ self.__write_vtk_file(vtr, vtr_path)
1126
+
1127
+
1128
+
1129
+
1130
+ def __write_scalar_result(self, vis_time_step, dataframe, csv_headers):
1131
+
1132
+ headers = csv_headers.copy()
1133
+ index = self.time_steps_list.index(vis_time_step)
1134
+ #vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}.vtr')
1135
+
1136
+ extension = os.path.splitext(self.main_geometry)[1]
1137
+ vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}{extension}')
1138
+ self.time_steps_list[index].vtu_file_name = vtr_path
1139
+ #scalar_vtr = vtkRectilinearGrid()
1140
+
1141
+ if not os.path.exists(vtr_path):
1142
+ scalar_vtr = self.__read_vtk_file(self.main_geometry)
1143
+
1144
+ # add time step data
1145
+ timesteps = vtkDoubleArray()
1146
+ timesteps.SetName("TimeValue")
1147
+ timesteps.SetNumberOfTuples(1)
1148
+ timesteps.SetNumberOfComponents(1)
1149
+ timesteps.SetTuple1(0, vis_time_step.time)
1150
+ scalar_vtr.SetFieldData(vtkFieldData())
1151
+ scalar_vtr.GetFieldData().AddArray(timesteps)
1152
+
1153
+ else:
1154
+ scalar_vtr = self.__read_vtk_file(vtr_path)
1155
+
1156
+ vtr = scalar_vtr
1157
+
1158
+ variable_list = []
1159
+
1160
+ # make sure to drop TIME and INDEX columns if they exist
1161
+ if 'INDEX' in dataframe.columns:
1162
+ dataframe = dataframe.drop(columns=['INDEX'])
1163
+ headers.remove('INDEX')
1164
+ if 'ELEM' in dataframe.columns:
1165
+ # change the data type of ELEM to string
1166
+ dataframe['ELEM'] = dataframe['ELEM'].astype(str)
1167
+ # remove leading spaces from ELEM column
1168
+ dataframe['ELEM'] = dataframe['ELEM'].str.lstrip()
1169
+ headers.remove('ELEM')
1170
+
1171
+ # create vtkDoubleArray for each header
1172
+ for header in headers:
1173
+ array = vtkDoubleArray()
1174
+ array.SetName(header)
1175
+ vtr.GetCellData().AddArray(array)
1176
+ variable_list.append(VisVariable(header, ValueType.Scalar, 1))
1177
+
1178
+
1179
+ #if self.setting.minc:
1180
+ #print(f' MinC is enabled. Adding MinC values to the result.')
1181
+ minc_ratio = 1
1182
+ if self.setting.minc:
1183
+ minc_ratio = self.num_of_minc / self.number_of_elements
1184
+
1185
+ for i in range(0, vtr.GetNumberOfCells()):
1186
+ elemID = self.elemIDArray.GetValue(i)
1187
+
1188
+ index = self.sequence_dist[i]
1189
+ if 'ELEM' in dataframe.columns:
1190
+ index = dataframe['ELEM'].tolist().index(elemID)
1191
+ #elem_string = dataframe['ELEM'].iloc[index]
1192
+ #target_row = dataframe.iloc[index]
1193
+ #print(f' Processing ELEM {elem_string} at index {index}')
1194
+ for header in headers:
1195
+ minc_index = int(index * minc_ratio)
1196
+ value = float(self.__parse_float(dataframe[header].iloc[minc_index]))
1197
+ vtr.GetCellData().GetArray(header).InsertNextValue(value)
1198
+
1199
+
1200
+
1201
+ # update the variable list
1202
+ if self.current_out_file not in self.variable_list:
1203
+ self.variable_list[self.current_out_file] = variable_list
1204
+
1205
+
1206
+ # Put cell-centered data into points
1207
+ filter = vtkCellDataToPointData()
1208
+ filter.SetInputData(vtr)
1209
+ filter.Update()
1210
+ vtr_cell_to_points = filter.GetOutput()
1211
+
1212
+ for i in range(0, vtr_cell_to_points.GetPointData().GetNumberOfArrays()):
1213
+ vtr.GetPointData().AddArray(vtr_cell_to_points.GetPointData().GetArray(i))
1214
+
1215
+ self.__write_vtk_file(vtr, vtr_path)
1216
+ print(f' ✓ Timestep {vis_time_step.time_step}:{vis_time_step.time} created: {vtr_path}')
1217
+
1218
+ if VisType.Tecplot not in self.setting.vis_types:
1219
+ return
1220
+
1221
+
1222
+ if self.setting.mesh_type == MeshType.PolygonalMesh:
1223
+ print(f' Tecplot output for polygonal mesh is not supported yet.')
1224
+ return
1225
+
1226
+ # Start Tecplot generating
1227
+ tec_name = pathlib.Path(self.setting.input_file_paths[0]).stem
1228
+ self.tec_scalar_path = os.path.join(self.setting.vis_dir, f'{tec_name}_scalar.dat')
1229
+ firstFile = True
1230
+ if os.path.isfile(self.tec_scalar_path):
1231
+ firstFile = False
1232
+ file = open(self.tec_scalar_path, "a", encoding="utf-8")
1233
+ if len(self.setting.selected_variables_scalar) == 0:
1234
+ self.setting.selected_variables_scalar = headers
1235
+
1236
+ if firstFile:
1237
+ file.write('TITLE = TECPLOT PLOT \n')
1238
+ selected_header_string = '"'+'", "'.join(self.setting.selected_variables_scalar) + '"'
1239
+ #header_string = '"'+'", "'.join(headers) + '"'
1240
+ file.write(f'VARIABLES = "X", "Y", "Z", {selected_header_string}\n')
1241
+
1242
+ #tecplot_cell_type = 'BRICK'
1243
+
1244
+ #time_statement = f'ZONE T ="{vis_time_step.time_step}, Time = {vis_time_step.time}", N = {vtu_cell_to_points.GetNumberOfPoints()}, E = {vtu_cell_to_points.GetNumberOfCells()}, F = FEPOINT, ET = {tecplot_cell_type}, SOLUTIONTIME = {vis_time_step.time}\n'
1245
+
1246
+ time_statement = f'ZONE T="{vis_time_step.time_step}, Time = {vis_time_step.time}", I={self.xyz_elem[0] + 1}, J={self.xyz_elem[1] + 1}, K={self.xyz_elem[2] + 1}, SOLUTIONTIME={vis_time_step.time}, DATAPACKING=BLOCK, VARLOCATION=({self.__get_varlocarion_string(self.setting.selected_variables_scalar)})'
1247
+ if not firstFile:
1248
+ time_statement = f'{time_statement}, D=(1,2,3,FECONNECT)'
1249
+ #if self.setting.debug:
1250
+ #time_statement = f'ZONE T ="{vis_time_step.time_step}, Time = {vis_time_step.time}", N = {vtu_cell_to_points.GetNumberOfPoints()}, E = {vtu_cell_to_points.GetNumberOfCells()}, F = FEPOINT, ET = {tecplot_cell_type}\n'
1251
+ file.write(f'{time_statement}\n')
1252
+ max_line_length = 20000
1253
+ # X, Y, Z
1254
+ if firstFile:
1255
+ for point_idx in range(0, 3):
1256
+ line_string = ''
1257
+ for i in range(0, vtr.GetNumberOfPoints()):
1258
+ point = vtr.GetPoint(i)
1259
+ #file.write(str(point[0]) + " ")
1260
+ if len(line_string) + len(str(point[point_idx])) + 1 > max_line_length:
1261
+ # write the current line to file
1262
+ file.write(f'{line_string}\n')
1263
+ # reset the line string
1264
+ line_string = ''
1265
+ line_string = f'{line_string}{str(point[point_idx])} '
1266
+
1267
+ file.write(f'{line_string}\n')
1268
+
1269
+ # Other data
1270
+ for header in self.setting.selected_variables_scalar:
1271
+ array = vtr.GetCellData().GetArray(header)
1272
+ line_string = ''
1273
+ for e in range(0, vtr.GetNumberOfCells()):
1274
+ #file.write(f'{str(array.GetComponent(e, 0))} ')
1275
+ if len(line_string) + len(str(array.GetValue(e))) + 1 > max_line_length:
1276
+ # write the current line to file
1277
+ file.write(f'{line_string}\n')
1278
+ # reset the line string
1279
+ line_string = ''
1280
+ line_string = f'{line_string}{str(array.GetValue(e))} '
1281
+ file.write(f'{line_string}\n')
1282
+
1283
+ file.close()
1284
+
1285
+ def __get_tec_vector_headers(self, headers):
1286
+ vector_headers = []
1287
+ for header in headers:
1288
+ vector_headers.append(f'{header}_X')
1289
+ vector_headers.append(f'{header}_Y')
1290
+ vector_headers.append(f'{header}_Z')
1291
+ return vector_headers
1292
+
1293
+ def __get_varlocarion_string(self, headers):
1294
+ var_string = ''
1295
+ for i in range(0, len(headers)):
1296
+ if i == len(headers)-1:
1297
+ var_string = f'{var_string}{str(i+4)}=CELLCENTERED'
1298
+ else:
1299
+ var_string = f'{var_string}{str(i+4)}=CELLCENTERED,'
1300
+ return var_string
1301
+
1302
+ # write the vector result for one timestep
1303
+ def __write_vector_result(self, vis_time_step, dataframe, csv_headers):
1304
+
1305
+ headers = csv_headers.copy()
1306
+ index = self.time_steps_list.index(vis_time_step)
1307
+ extension = os.path.splitext(self.main_geometry)[1]
1308
+ vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}{extension}')
1309
+ self.time_steps_list[index].vtu_file_name = vtr_path
1310
+
1311
+ if not os.path.exists(vtr_path):
1312
+ vector_vtr = self.__read_vtk_file(self.main_geometry)
1313
+ # add time step data
1314
+ timesteps = vtkDoubleArray()
1315
+ timesteps.SetName("TimeValue")
1316
+ timesteps.SetNumberOfTuples(1)
1317
+ timesteps.SetNumberOfComponents(1)
1318
+ timesteps.SetTuple1(0, vis_time_step.time)
1319
+ #timesteps.SetTuple2(1, 100000)
1320
+ vector_vtr.SetFieldData(vtkFieldData())
1321
+ vector_vtr.GetFieldData().AddArray(timesteps)
1322
+
1323
+ else:
1324
+ vector_vtr = self.__read_vtk_file(vtr_path)
1325
+
1326
+ vtu_reader = vtkXMLUnstructuredGridReader()
1327
+ vtu_reader.SetFileName(self.elem_conne_path)
1328
+ vtu_reader.Update()
1329
+ conne_vtu = vtu_reader.GetOutput()
1330
+
1331
+ # make sure to drop TIME and INDEX columns if they exist
1332
+ if 'INDEX' in dataframe.columns:
1333
+ dataframe = dataframe.drop(columns=['INDEX'])
1334
+ headers.remove('INDEX')
1335
+ if 'ELEM1' in dataframe.columns:
1336
+ # remove leading spaces from ELEM column
1337
+ dataframe['ELEM1'] = dataframe['ELEM1'].astype(str)
1338
+ dataframe['ELEM1'] = dataframe['ELEM1'].str.lstrip()
1339
+ headers.remove('ELEM1')
1340
+ if 'ELEM2' in dataframe.columns:
1341
+ # remove leading spaces from ELEM column
1342
+ dataframe['ELEM2'] = dataframe['ELEM2'].astype(str)
1343
+ dataframe['ELEM2'] = dataframe['ELEM2'].str.lstrip()
1344
+ headers.remove('ELEM2')
1345
+
1346
+ variable_list = []
1347
+
1348
+ # find max number of cell connections of a element
1349
+ num_of_components = 3
1350
+ for elem_id in range(0, conne_vtu.GetNumberOfPoints()):
1351
+ cellIDs = vtkIdList()
1352
+ conne_vtu.GetPointCells(elem_id, cellIDs)
1353
+ if cellIDs.GetNumberOfIds() > num_of_components:
1354
+ num_of_components = cellIDs.GetNumberOfIds()
1355
+
1356
+
1357
+ # create double array for each header
1358
+ for header in headers:
1359
+ #if not header == 'ELEM1' and not header == 'ELEM2' and not header == 'INDEX':
1360
+ array = vtkDoubleArray()
1361
+ array.SetName(header)
1362
+ array.SetNumberOfComponents(num_of_components)
1363
+ array.SetNumberOfTuples(vector_vtr.GetNumberOfCells())
1364
+ for i in range(0, num_of_components):
1365
+ # set the default value to 0
1366
+ array.FillComponent(i, 0)
1367
+ vector_vtr.GetCellData().AddArray(array)
1368
+
1369
+ variable_list.append(VisVariable(header, ValueType.Vector, 3))
1370
+
1371
+ if self.current_out_file not in self.variable_list:
1372
+ self.variable_list[self.current_out_file] = variable_list
1373
+
1374
+
1375
+ # prepare cell data array for cells in conne_vtu
1376
+ for header in headers:
1377
+ array = vtkDoubleArray()
1378
+ array.SetName(header)
1379
+ conne_vtu.GetCellData().AddArray(array)
1380
+
1381
+ # add celldata to cells in elem_conn
1382
+ for cell_id in range(0, conne_vtu.GetNumberOfCells()):
1383
+ for header in headers:
1384
+ value = dataframe.loc[cell_id, header]
1385
+ conne_vtu.GetCellData().GetArray(header).InsertNextValue(value)
1386
+ #self.__write_vtk_file(
1387
+ #conne_vtu, self.elem_conne_path)
1388
+
1389
+ # create the vector data
1390
+ for elem_id in range(0, conne_vtu.GetNumberOfPoints()):
1391
+ cellIDs = vtkIdList()
1392
+ conne_vtu.GetPointCells(elem_id, cellIDs)
1393
+ for i in range(0, cellIDs.GetNumberOfIds()):
1394
+ cellID = cellIDs.GetId(i)
1395
+
1396
+ for header in headers:
1397
+ #value = dataframe.loc[next_id, header]
1398
+ value = conne_vtu.GetCellData().GetArray(header).GetValue(cellID)
1399
+ vector_vtr.GetCellData().GetArray(header).SetComponent(elem_id, i, value)
1400
+
1401
+ # Put cell-centered data into points
1402
+ filter = vtkCellDataToPointData()
1403
+ filter.SetInputData(vector_vtr)
1404
+ filter.Update()
1405
+ vtr_cell_to_points = filter.GetOutput()
1406
+
1407
+ for i in range(0, vtr_cell_to_points.GetPointData().GetNumberOfArrays()):
1408
+ vector_vtr.GetPointData().AddArray(vtr_cell_to_points.GetPointData().GetArray(i))
1409
+
1410
+ self.__write_vtk_file(
1411
+ vector_vtr, self.time_steps_list[index].vtu_file_name)
1412
+ print(f' ✓ Timestep {vis_time_step.time_step}:{vis_time_step.time} created: {vtr_path}')
1413
+
1414
+ if VisType.Tecplot not in self.setting.vis_types:
1415
+ return
1416
+
1417
+
1418
+ if self.setting.mesh_type == MeshType.PolygonalMesh:
1419
+ print(f' Tecplot output for polygonal mesh is not supported yet.')
1420
+ return
1421
+
1422
+ # Start Tecplot generating
1423
+ tec_name = pathlib.Path(self.setting.input_file_paths[0]).stem
1424
+ self.tec_vector_path = os.path.join(self.setting.vis_dir, f'{tec_name}_vector.dat')
1425
+ firstFile = True
1426
+ if os.path.isfile(self.tec_vector_path):
1427
+ firstFile = False
1428
+ file = open(self.tec_vector_path, "a", encoding="utf-8")
1429
+
1430
+ #selected_header_string = '"'+'", "'.join(self.setting.selected_variables_scalar) + '"'
1431
+ if len(self.setting.selected_variables_vector) == 0:
1432
+ self.setting.selected_variables_vector = headers
1433
+ vector_headers = self.__get_tec_vector_headers(self.setting.selected_variables_vector)
1434
+
1435
+ # add header
1436
+ if firstFile:
1437
+ file.write('TITLE = TECPLOT PLOT \n')
1438
+ header_string = '"'+'", "'.join(vector_headers) + '"'
1439
+
1440
+ file.write(f'VARIABLES = "X", "Y", "Z", {header_string}\n')
1441
+
1442
+ time_statement = f'ZONE T="{vis_time_step.time_step}, Time = {vis_time_step.time}", I={self.xyz_elem[0] + 1}, J={self.xyz_elem[1] + 1}, K={self.xyz_elem[2] + 1}, SOLUTIONTIME={vis_time_step.time}, DATAPACKING=BLOCK, VARLOCATION=({self.__get_varlocarion_string(vector_headers)})'
1443
+ if not firstFile:
1444
+ time_statement = f'{time_statement}, D=(1,2,3,FECONNECT)'
1445
+ #if self.setting.debug:
1446
+ #time_statement = f'ZONE T ="{vis_time_step.time_step}, Time = {vis_time_step.time}", N = {vtu_cell_to_points.GetNumberOfPoints()}, E = {vtu_cell_to_points.GetNumberOfCells()}, F = FEPOINT, ET = {tecplot_cell_type}\n'
1447
+ file.write(f'{time_statement}\n')
1448
+
1449
+ max_line_length = 20000
1450
+ # X, Y, Z
1451
+ if firstFile:
1452
+ for point_idx in range(0, 3):
1453
+ line_string = ''
1454
+ for i in range(0, vector_vtr.GetNumberOfPoints()):
1455
+ point = vector_vtr.GetPoint(i)
1456
+ #file.write(str(point[0]) + " ")
1457
+ if len(line_string) + len(str(point[point_idx])) + 1 > max_line_length:
1458
+ # write the current line to file
1459
+ file.write(f'{line_string}\n')
1460
+ # reset the line string
1461
+ line_string = ''
1462
+ line_string = f'{line_string}{str(point[point_idx])} '
1463
+ file.write(f'{line_string}\n')
1464
+
1465
+
1466
+
1467
+ # Other data
1468
+ for header in self.setting.selected_variables_vector:
1469
+
1470
+ array = vector_vtr.GetCellData().GetArray(header)
1471
+
1472
+ for dim_idx in range(0, 3):
1473
+ line_string = ''
1474
+ for e in range(0, vector_vtr.GetNumberOfCells()):
1475
+ #file.write(f'{str(array.GetComponent(e, 0))} ')
1476
+ if len(line_string) + len(str(array.GetComponent(e, dim_idx))) + 1 > max_line_length:
1477
+ # write the current line to file
1478
+ file.write(f'{line_string}\n')
1479
+ # reset the line string
1480
+ line_string = ''
1481
+ line_string = f'{line_string}{str(array.GetComponent(e, dim_idx))} '
1482
+ file.write(f'{line_string}\n')
1483
+
1484
+ file.close()
1485
+
1486
+
1487
+ def __create_elem_conne(self):
1488
+
1489
+ '''
1490
+ read elem and conn files into dataframe
1491
+ '''
1492
+
1493
+ elem_colspecs = [(0, 5), (5, 10), (10, 15), (15, 20), (20, 30), (30, 40),
1494
+ (40, 50), (50, 60), (60, 70), (70, 80)] # define column widths
1495
+ self.eleme_buffer.seek(0)
1496
+ elem_df = pd.read_fwf(self.eleme_buffer, colspecs=elem_colspecs, header=None,
1497
+ names=['ELEME', 'NSEQ', 'NADD', 'MA12',
1498
+ 'VOLX', 'AHTX', 'PMX', 'X', 'Y', 'Z'],
1499
+ dtype={'ELEME': str, 'NSEQ': float, 'NADD': float, 'MA12': str, 'VOLX': float, 'AHTX': float, 'PMX': float, 'X': float, 'Y': float, 'Z': float})
1500
+
1501
+ conn_colspecs = [(0, 5), (5, 10), (10, 15), (15, 20), (20, 25), (25, 30),
1502
+ (30, 40), (40, 50), (50, 60), (60, 70), (70, 80)] # define column widths
1503
+
1504
+ self.conne_buffer.seek(0)
1505
+ conn_df = pd.read_fwf(self.conne_buffer, colspecs=conn_colspecs, header=None,
1506
+ names=['ELEM_1', 'ELEM_2', 'NSEQ', 'NAD1', 'NAD2',
1507
+ 'ISOT', 'D1', 'D2', 'AREAX', 'BETAX', 'SIGX'],
1508
+ dtype={'ELEM_1': str, 'ELEM_2': str, 'NSEQ': float, 'NAD1': float, 'NAD2': float, 'ISOT': float, 'D1': float, 'D2': float, 'AREAX': float, 'BETAX': float, 'SIGX': float})
1509
+
1510
+
1511
+ elem_df['original_index'] = range(0, len(elem_df))
1512
+ if self.__check_isReverse(elem_df):
1513
+ #Sort all dataframes
1514
+ elem_df = elem_df.sort_values(['Z', 'Y', 'X'], ascending = [True, True, True])
1515
+ elem_df = elem_df.reset_index(drop=True)
1516
+ elem_df.reset_index()
1517
+
1518
+
1519
+ # create material map
1520
+ unique_mats = elem_df['MA12'].unique()
1521
+ # Create a dictionary mapping index -> MAT value
1522
+ mat_mapping = {mat: i for i, mat in enumerate(unique_mats)}
1523
+
1524
+ '''
1525
+ create vtk points from elem
1526
+ '''
1527
+ vtk_points = vtkPoints()
1528
+ elemIDArray = vtkStringArray()
1529
+ elemIDArray.SetName('ELEME')
1530
+ matArray = vtkStringArray()
1531
+ matArray.SetName('Material')
1532
+ matIDArray = vtkIntArray()
1533
+ matIDArray.SetName('Material_ID')
1534
+ pmxArray = vtkDoubleArray()
1535
+ pmxArray.SetName('PMX')
1536
+ volxArray = vtkDoubleArray()
1537
+ volxArray.SetName('VOLX')
1538
+ elem_id_dist = {}
1539
+ self.sequence_dist = {}
1540
+ self.number_of_elements = len(elem_df.values)
1541
+ length = len(elem_df.values)
1542
+ for i in range(0, len(elem_df.values)):
1543
+ # y = elem_df['Y'][i]
1544
+ #[x, y, z] = [elem_df['X'][i], elem_df['Y'][i], elem_df['Z'][i]]
1545
+ vtk_points.InsertNextPoint(self.__parse_float(elem_df['X'][i]), self.__parse_float(
1546
+ elem_df['Y'][i]), self.__parse_float(elem_df['Z'][i]))
1547
+ elem_id = elem_df['ELEME'][i].strip()
1548
+ elemIDArray.InsertNextValue(elem_id)
1549
+ elem_id_dist[elem_id] = i
1550
+ original_index = int(elem_df['original_index'][i])
1551
+ self.sequence_dist[i] = original_index
1552
+ volxArray.InsertNextValue(self.__parse_float(elem_df['VOLX'][i]))
1553
+ matArray.InsertNextValue(elem_df['MA12'][i])
1554
+ matIDArray.InsertNextValue(mat_mapping[elem_df['MA12'][i]])
1555
+ pmxArray.InsertNextValue(self.__parse_float(elem_df['PMX'][i]))
1556
+
1557
+ self.elemIDArray = elemIDArray
1558
+
1559
+ '''
1560
+ compute permeability
1561
+ '''
1562
+ rock_colspecs = [(0, 5), (30, 40), (40, 50), (50, 60), (100, 110)] # define column widths
1563
+ rock_names = ['MAT', 'PER_1', 'PER_2', 'PER_3', 'SGR']
1564
+ self.rocks_buffer.seek(0)
1565
+ rocks_df = pd.read_fwf(self.rocks_buffer, colspecs=rock_colspecs, header=None,
1566
+ names=rock_names,
1567
+ dtype={'MAT':str, 'PER_1':str, 'PER_2':str, 'PER_3':str, 'SGR':str})
1568
+
1569
+ #sgr_dict = {}
1570
+ self.rock_dict = None
1571
+ if len(rocks_df) > 0:
1572
+ #rock_dict = {}
1573
+ rock_dict = []
1574
+ for i in range(0, len(rocks_df)):
1575
+ #rock_dict[rocks_df['MAT'][i]] = [self.parse_float(rocks_df['PER_1'][i]), self.parse_float(rocks_df['PER_2'][i]), self.parse_float(rocks_df['PER_3'][i])]
1576
+ rock_dict.append({
1577
+ 'id': i,
1578
+ 'rock_name': rocks_df['MAT'][i],
1579
+ 'per_1':self.__parse_float(rocks_df['PER_1'][i]),
1580
+ 'per_2':self.__parse_float(rocks_df['PER_2'][i]),
1581
+ 'per_3':self.__parse_float(rocks_df['PER_3'][i]),
1582
+ 'sgr':self.__parse_float(rocks_df['SGR'][i])
1583
+ })
1584
+ #sgr_dict[rocks_df['MAT'][i]] = self.__parse_float(rocks_df['SGR'][i])
1585
+ # compute per
1586
+ per_array = vtkDoubleArray()
1587
+ per_array.SetName('Permeability')
1588
+
1589
+ sgr_array = vtkDoubleArray()
1590
+ sgr_array.SetName('sgr')
1591
+ for i in range(0, len(elem_df.values)):
1592
+ value = 0
1593
+
1594
+ mat = matArray.GetValue(i)
1595
+ if self.__isInt(mat):
1596
+ mat_id = int(mat)
1597
+ value = pmxArray.GetValue(i) * rock_dict[mat_id-1]['per_1']
1598
+ sgr = rock_dict[mat_id-1]['sgr']
1599
+ else:
1600
+ rock_item = [rock for rock in rock_dict if rock['rock_name'] == mat]
1601
+ #if mat in rock_dict:
1602
+ value = pmxArray.GetValue(i) * rock_item[0]['per_1']
1603
+ sgr = rock_item[0]['sgr']
1604
+
1605
+ per_array.InsertNextValue(value)
1606
+ sgr_array.InsertNextValue(sgr)
1607
+ self.rock_dict = rock_dict
1608
+
1609
+
1610
+
1611
+ '''
1612
+ create connection cell array from conne
1613
+ '''
1614
+ d1_array = vtkDoubleArray()
1615
+ d1_array.SetName('D1')
1616
+ d2_array = vtkDoubleArray()
1617
+ d2_array.SetName('D2')
1618
+ area_array = vtkDoubleArray()
1619
+ area_array.SetName('AREAX')
1620
+
1621
+ line_cell_array = vtkCellArray()
1622
+ for i in range(0, len(conn_df.values)):
1623
+ elem_1_id = conn_df['ELEM_1'][i].strip()
1624
+ point_1_id = elem_id_dist[elem_1_id]
1625
+ elem_2_id = conn_df['ELEM_2'][i].strip()
1626
+ point_2_id = elem_id_dist[elem_2_id]
1627
+ # elem_conne_dist[point_id]
1628
+ cell = vtkLine()
1629
+ cell.GetPointIds().SetNumberOfIds(2)
1630
+ cell.GetPointIds().SetId(0, point_1_id)
1631
+ cell.GetPointIds().SetId(1, point_2_id)
1632
+ line_cell_array.InsertNextCell(cell)
1633
+ d1_array.InsertNextValue(conn_df['D1'][i])
1634
+ d2_array.InsertNextValue(conn_df['D2'][i])
1635
+ area_array.InsertNextValue(conn_df['AREAX'][i])
1636
+
1637
+ '''
1638
+ create vtu to display elem and conne
1639
+ '''
1640
+ self.elem_conne_path = os.path.join(
1641
+ self.setting.vis_dir, "elem_conne.vtu")
1642
+ elem_conne_vtu = vtkUnstructuredGrid()
1643
+
1644
+ elem_conne_vtu.SetPoints(vtk_points)
1645
+ elem_conne_vtu.GetPointData().AddArray(elemIDArray)
1646
+ elem_conne_vtu.GetPointData().AddArray(volxArray)
1647
+ elem_conne_vtu.GetPointData().AddArray(matArray)
1648
+ elem_conne_vtu.GetPointData().AddArray(matIDArray)
1649
+
1650
+ elem_conne_vtu.SetCells(4, line_cell_array)
1651
+ elem_conne_vtu.GetCellData().AddArray(d1_array)
1652
+ elem_conne_vtu.GetCellData().AddArray(d2_array)
1653
+ elem_conne_vtu.GetCellData().AddArray(area_array)
1654
+ vtu_writer = vtkXMLUnstructuredGridWriter()
1655
+ vtu_writer.SetFileName(self.elem_conne_path)
1656
+ vtu_writer.SetInputData(elem_conne_vtu)
1657
+ vtu_writer.Write()
1658
+
1659
+ if os.path.exists(self.elem_conne_path):
1660
+ print(f' ✓ Elements and connections created: {self.elem_conne_path}')
1661
+
1662
+ self.number_of_connections = elem_conne_vtu.GetNumberOfCells()
1663
+
1664
+ def __create_main_geometry(self):
1665
+
1666
+
1667
+ '''
1668
+ find number of elements in x, y, z directions
1669
+ TODO: check if vtu bound is inside user input bounds
1670
+ '''
1671
+
1672
+
1673
+ # prepare the vtu file
1674
+ vtu_reader = vtkXMLUnstructuredGridReader()
1675
+ vtu_reader.SetFileName(self.elem_conne_path)
1676
+ vtu_reader.Update()
1677
+ elem_conne_vtu = vtu_reader.GetOutput()
1678
+
1679
+ d1_array = elem_conne_vtu.GetCellData().GetArray('D1')
1680
+ d2_array = elem_conne_vtu.GetCellData().GetArray('D2')
1681
+ #elemIDArray = elem_conne_vtu.GetPointData().GetArray('ELEME')
1682
+ volxArray = elem_conne_vtu.GetPointData().GetArray('VOLX')
1683
+ matArray = elem_conne_vtu.GetPointData().GetArray('Material')
1684
+ matIDArray = elem_conne_vtu.GetPointData().GetArray('Material_ID')
1685
+ pmxArray = elem_conne_vtu.GetPointData().GetArray('PMX')
1686
+ if self.rock_dict is not None:
1687
+ per_array = elem_conne_vtu.GetCellData().GetArray('Permeability')
1688
+ sgr_array = elem_conne_vtu.GetCellData().GetArray('sgr')
1689
+
1690
+
1691
+
1692
+ # get connection bound
1693
+ vtu_bounds = elem_conne_vtu.GetBounds()
1694
+
1695
+ # create array to keep x, y, z elements
1696
+ xyz_elem = []
1697
+ xyz_elem.append(0) # add x eleme
1698
+ xyz_elem.append(0) # add y eleme
1699
+ xyz_elem.append(0) # add z eleme
1700
+
1701
+ if vtu_bounds[0] == vtu_bounds[1]:
1702
+ xyz_elem[0] = 1
1703
+ if vtu_bounds[2] == vtu_bounds[3]:
1704
+ xyz_elem[1] = 1
1705
+ if vtu_bounds[4] == vtu_bounds[5]:
1706
+ xyz_elem[2] = 1
1707
+
1708
+ # find x, y , z number of structured points
1709
+ for i in range(0, elem_conne_vtu.GetNumberOfPoints()):
1710
+ point = elem_conne_vtu.GetPoint(i)
1711
+ if xyz_elem[0] == 0 and point[0] == vtu_bounds[1]:
1712
+ xyz_elem[0] = i+1
1713
+ if xyz_elem[1] == 0 and point[0] == vtu_bounds[1] and point[1] == vtu_bounds[3]:
1714
+ if xyz_elem[0] != 0:
1715
+ xyz_elem[1] = int((i+1)/xyz_elem[0])
1716
+ break
1717
+
1718
+ if xyz_elem[1] != 0 and xyz_elem[0] != 0:
1719
+ xyz_elem[2] = int(elem_conne_vtu.GetNumberOfPoints() / xyz_elem[0] / xyz_elem[1])
1720
+ self.xyz_elem = xyz_elem
1721
+
1722
+ range_ratio = 1
1723
+ np_xyz_elem = np.array(xyz_elem)
1724
+ if xyz_elem[0] != 0 and xyz_elem[1] != 0 and xyz_elem[2] != 0:
1725
+ range_ratio = np.max(np_xyz_elem)/np.min(np_xyz_elem)
1726
+
1727
+
1728
+
1729
+ if all(xyz_elem) and range_ratio < 1000:
1730
+ if os.path.exists(self.setting.corners_file):
1731
+ self.setting.mesh_type = MeshType.StructuredGridOrth
1732
+ else:
1733
+ self.setting.mesh_type = MeshType.RegularGrid
1734
+
1735
+ else:
1736
+ is_parallel = self.__checkParallel(elem_conne_vtu)
1737
+ # check if polygonal mesh
1738
+ if os.path.exists(self.setting.corners_file):
1739
+
1740
+ if is_parallel:
1741
+ self.setting.mesh_type = MeshType.StructuredGridFree
1742
+ else:
1743
+ self.setting.mesh_type = MeshType.PolygonalMesh
1744
+ else:
1745
+ print('Error: Your mesh type is not supported')
1746
+ sys.exit(1)
1747
+
1748
+ print(f' Mesh type: {self.setting.mesh_type.name}')
1749
+
1750
+ # Read corners file to dataframe
1751
+ if os.path.exists(self.setting.corners_file):
1752
+ corners_buffer = io.StringIO()
1753
+ csv_headers = []
1754
+ line_number = -1
1755
+ with open(self.setting.corners_file, encoding="utf-8") as f:
1756
+ for line in f:
1757
+ line_number = line_number + 1
1758
+ values = line.strip().split(',')
1759
+ values = [x.replace('"', '') for x in values]
1760
+ if line_number == 0:
1761
+
1762
+ csv_headers = [x.strip() for x in values]
1763
+ csv_headers = csv_headers[:3]
1764
+ header_string = ','.join(csv_headers)
1765
+ corners_buffer.write(header_string + '\n')
1766
+ else:
1767
+ corners_buffer.write(','.join(values[:3]) + '\n')
1768
+ corners_buffer.seek(0)
1769
+ corners_df = pd.read_csv(corners_buffer, sep=',', header=0)
1770
+
1771
+ # Write four corners to vtu
1772
+ all_points = vtkPoints()
1773
+ all_cells = vtkCellArray()
1774
+
1775
+ for index, row in corners_df.iterrows():
1776
+ all_points.InsertNextPoint(row["X"], row["Y"], row["Z"])
1777
+ cell = vtkVertex()
1778
+ cell.GetPointIds().SetNumberOfIds(1)
1779
+ cell.GetPointIds().SetId(0, index)
1780
+ all_cells.InsertNextCell(cell)
1781
+
1782
+ corners_vtu = vtkUnstructuredGrid()
1783
+ corners_vtu.SetPoints(all_points)
1784
+ corners_vtu.SetCells(1, all_cells)
1785
+ print(f' Read corners from {self.setting.corners_file}')
1786
+
1787
+ # write four corners to vtu (debugging)
1788
+ if self.setting.debug:
1789
+ corners_vtu_writer = vtkXMLUnstructuredGridWriter()
1790
+ corners_vtu_writer.SetFileName(os.path.join(self.setting.vis_dir, "corners.vtu"))
1791
+ corners_vtu_writer.SetInputData(corners_vtu)
1792
+ corners_vtu_writer.Write()
1793
+
1794
+
1795
+
1796
+ if self.setting.mesh_type == MeshType.RegularGrid:
1797
+ '''
1798
+ * for RGrid from MeshMaker
1799
+ find index for determining x, y, z index
1800
+ '''
1801
+ xyz_index = []
1802
+ xyz_index.append([]) # add x index
1803
+ xyz_index.append([]) # add y index
1804
+ xyz_index.append([]) # add z index
1805
+
1806
+ for i in range(0, xyz_elem[0]):
1807
+ xyz_index[0].append(i)
1808
+ for i in range(0, xyz_elem[1]):
1809
+ xyz_index[1].append(i * xyz_elem[0])
1810
+ for i in range(0, xyz_elem[2]):
1811
+ xyz_index[2].append(i * xyz_elem[0] * xyz_elem[1])
1812
+ xyz_coordinates = []
1813
+ xyz_coordinates.append([]) # add x coordinates
1814
+ xyz_coordinates.append([]) # add y coordinates
1815
+ xyz_coordinates.append([]) # add z coordinates
1816
+
1817
+ for key in range(0, len(xyz_index)):
1818
+ for index in xyz_index[key]:
1819
+ point = elem_conne_vtu.GetPoint(index)
1820
+ cellIDs = vtkIdList()
1821
+ # find all cells connect to this point
1822
+ elem_conne_vtu.GetPointCells(index, cellIDs)
1823
+ d1 = 0
1824
+ d2 = 0
1825
+ find_next = False
1826
+
1827
+ for i in range(0, cellIDs.GetNumberOfIds()):
1828
+ cellID = cellIDs.GetId(i)
1829
+ cell = elem_conne_vtu.GetCell(cellID)
1830
+ # find next id in line element
1831
+
1832
+ next_id = cell.GetPointId(1)
1833
+ if next_id == index:
1834
+ next_id = cell.GetPointId(0)
1835
+ if next_id in xyz_index[key]:
1836
+ d1 = d1_array.GetValue(cellID)
1837
+ d2 = d2_array.GetValue(cellID)
1838
+ find_next = True
1839
+ break
1840
+
1841
+ # if it has any connection to other node
1842
+ if find_next:
1843
+ # add first node
1844
+ if len(xyz_coordinates[key]) == 0:
1845
+ first_value = point[key] - d1
1846
+ if self.setting.known_bounds:
1847
+ first_value = self.setting.bounds[key*2]
1848
+ xyz_coordinates[key].append(first_value)
1849
+
1850
+ # add current node
1851
+ xyz_coordinates[key].append(point[key] + d1)
1852
+
1853
+ # add last node
1854
+ if len(xyz_coordinates[key]) == xyz_elem[key]:
1855
+ last_value = point[key] + d1 + (2*d2)
1856
+ if self.setting.known_bounds:
1857
+ last_value = self.setting.bounds[key*2+1]
1858
+ xyz_coordinates[key].append(last_value)
1859
+
1860
+ # if there is only one element in this dimension
1861
+ elif len(xyz_index[key]) == 1:
1862
+ if self.setting.known_bounds:
1863
+ xyz_coordinates[key].append(self.setting.bounds[key*2])
1864
+ xyz_coordinates[key].append(
1865
+ self.setting.bounds[key*2+1])
1866
+ else:
1867
+ if point[key] == 0:
1868
+ # find max elem
1869
+ xyz_elem_np_array = np.array(
1870
+ (xyz_elem[0], xyz_elem[1], xyz_elem[2]))
1871
+ max = xyz_elem_np_array.max()
1872
+ index = xyz_elem.index(max)
1873
+ guess_value = (
1874
+ vtu_bounds[index*2+1] - vtu_bounds[index]) / max
1875
+ xyz_coordinates[key].append(-1 * guess_value)
1876
+ xyz_coordinates[key].append(guess_value)
1877
+ else:
1878
+ xyz_coordinates[key].append(
1879
+ point[key] - abs(point[key]))
1880
+ xyz_coordinates[key].append(
1881
+ point[key] + abs(point[key]))
1882
+
1883
+ '''
1884
+ create vtk rgid and translate it to vtu
1885
+ '''
1886
+ # self.rgrid_vtr = os.path.join(self.setting.vis_dir, "temp_rgrid.vtr")
1887
+ xyz_coords_array = []
1888
+ xyz_coords_array.append(vtkDoubleArray())
1889
+ xyz_coords_array.append(vtkDoubleArray())
1890
+ xyz_coords_array.append(vtkDoubleArray())
1891
+
1892
+ for key in range(0, len(xyz_coords_array)):
1893
+ for value in xyz_coordinates[key]:
1894
+ xyz_coords_array[key].InsertNextValue(value)
1895
+
1896
+ rGrid = vtkRectilinearGrid()
1897
+ rGrid.SetDimensions(xyz_elem[0]+1, xyz_elem[1]+1, xyz_elem[2]+1)
1898
+
1899
+ rGrid.SetXCoordinates(xyz_coords_array[0])
1900
+ rGrid.SetYCoordinates(xyz_coords_array[1])
1901
+ rGrid.SetZCoordinates(xyz_coords_array[2])
1902
+ rGrid.GetCellData().AddArray(self.elemIDArray)
1903
+ rGrid.GetCellData().AddArray(volxArray)
1904
+ rGrid.GetCellData().AddArray(matArray)
1905
+
1906
+ rGrid.GetCellData().AddArray(matIDArray)
1907
+ if self.rock_dict is not None:
1908
+ rGrid.GetCellData().AddArray(per_array)
1909
+ rGrid.GetCellData().AddArray(sgr_array)
1910
+ rGrid.GetCellData().AddArray(pmxArray)
1911
+
1912
+ self.main_geometry = os.path.join(
1913
+ self.setting.vis_dir, "main_geometry.vtr")
1914
+ self.__write_vtk_file(rGrid, self.main_geometry)
1915
+
1916
+
1917
+ if self.setting.mesh_type == MeshType.StructuredGridOrth:
1918
+
1919
+ #corners_df.to_csv(os.path.join(self.setting.vis_dir, "corners.csv"), index=False)
1920
+ # Step 1: Group by `x` and `y`, and sort `z` within each group
1921
+ corners_df = corners_df.groupby(['X', 'Y'], group_keys=False).apply(lambda group: group.sort_values(by='Z'))
1922
+ # Step 3: Set the new index using `x` and `y` columns
1923
+ corners_df = corners_df.set_index(['X', 'Y']).sort_index()
1924
+
1925
+ distinct_x = corners_df.index.get_level_values('X').unique()
1926
+ distinct_y = corners_df.index.get_level_values('Y').unique()
1927
+
1928
+ vts = vtkStructuredGrid()
1929
+ vts.SetDimensions(self.xyz_elem[0]+1, self.xyz_elem[1]+1, self.xyz_elem[2]+1)
1930
+ vts_points = vtkPoints()
1931
+
1932
+ for z_index in range(0, self.xyz_elem[2]+1):
1933
+ for y_index in range(0, len(distinct_y)):
1934
+ for x_index in range(0, len(distinct_x)):
1935
+ x = distinct_x[x_index]
1936
+ y = distinct_y[y_index]
1937
+ z_value = corners_df.loc[(x, y), 'Z'].iloc[z_index]
1938
+ vts_points.InsertNextPoint(x, y, z_value)
1939
+
1940
+ vts.SetPoints(vts_points)
1941
+ vts.GetCellData().AddArray(self.elemIDArray)
1942
+ vts.GetCellData().AddArray(volxArray)
1943
+ vts.GetCellData().AddArray(matArray)
1944
+
1945
+ vts.GetCellData().AddArray(matIDArray)
1946
+ if self.rock_dict is not None:
1947
+ vts.GetCellData().AddArray(per_array)
1948
+ vts.GetCellData().AddArray(sgr_array)
1949
+ self.main_geometry = os.path.join(self.setting.vis_dir, "main_geometry.vts")
1950
+ self.__write_vtk_file(vts, self.main_geometry)
1951
+
1952
+
1953
+ if self.setting.mesh_type == MeshType.StructuredGridFree:
1954
+ #initialize the locator
1955
+ pointTree = vtkPointLocator()
1956
+ pointTree.SetDataSet(corners_vtu)
1957
+ pointTree.BuildLocator()
1958
+
1959
+ cell_array = vtkCellArray()
1960
+
1961
+ for i in range(0, elem_conne_vtu.GetNumberOfPoints()):
1962
+ point = elem_conne_vtu.GetPoint(i)
1963
+ result = vtkIdList()
1964
+ #find the closest point to the the center of each element
1965
+ pointTree.FindClosestNPoints(8, point, result)
1966
+ result.Sort()
1967
+
1968
+ cell = vtkHexahedron()
1969
+ cell.GetPointIds().SetNumberOfIds(8)
1970
+ # need to sort the points in the rigth order
1971
+ points_array = []
1972
+
1973
+ for j in range(0, 8):
1974
+ points_array.append(corners_vtu.GetPoint(result.GetId(j)))
1975
+
1976
+ points_array = np.array(points_array)
1977
+ result_index = self.__reorder_hexahedron(points_array)
1978
+
1979
+ for j in range(0, 8):
1980
+ cell.GetPointIds().SetId(j, result.GetId(result_index[j]))
1981
+ cell_array.InsertNextCell(cell)
1982
+
1983
+ auto_corner_vtu = vtkUnstructuredGrid()
1984
+ auto_corner_vtu.SetPoints(all_points)
1985
+ auto_corner_vtu.SetCells(12, cell_array)
1986
+
1987
+ # TODO: compute mesh quality and fix bad cells
1988
+
1989
+
1990
+ auto_corner_vtu.GetCellData().AddArray(self.elemIDArray)
1991
+ auto_corner_vtu.GetCellData().AddArray(volxArray)
1992
+ auto_corner_vtu.GetCellData().AddArray(matArray)
1993
+ if self.rock_dict is not None:
1994
+ auto_corner_vtu.GetCellData().AddArray(per_array)
1995
+ auto_corner_vtu.GetCellData().AddArray(sgr_array)
1996
+ auto_corner_vtu.GetCellData().AddArray(matIDArray)
1997
+ self.main_geometry = os.path.join(self.setting.vis_dir, "main_geometry.vtu")
1998
+ self.__write_vtk_file(auto_corner_vtu, self.main_geometry)
1999
+
2000
+
2001
+ if self.setting.mesh_type == MeshType.PolygonalMesh:
2002
+
2003
+ # == Create `distinct_points` and `labeled_temp_elem` ==
2004
+ # Extract points from the grid
2005
+ points = np.array([elem_conne_vtu.GetPoint(i) for i in range(elem_conne_vtu.GetNumberOfPoints())])
2006
+ # Convert to Pandas DataFrame for easier manipulation
2007
+ df = pd.DataFrame(points, columns=["x", "y", "z"])
2008
+ # Round (x, y) values to avoid floating-point precision issues
2009
+ df[["x", "y"]] = df[["x", "y"]].round(10)
2010
+
2011
+ #df.to_csv(os.path.join(self.setting.vis_dir, 'all_points.csv'))
2012
+ # Find distinct (x, y) sets and create an index mapping
2013
+ distinct_xy = df[["x", "y"]].drop_duplicates().reset_index(drop=True)
2014
+ distinct_xy["Elem_Index"] = distinct_xy.index # Assign an index to each unique (x, y)
2015
+ #distinct_xy.to_csv(os.path.join(self.setting.vis_dir, 'distinct_xy.csv'))
2016
+
2017
+ # add distinct_xy to a vtkpoints object
2018
+ all_points = vtkPoints()
2019
+ all_points.SetDataTypeToDouble()
2020
+ for i in range(len(distinct_xy)):
2021
+ values = distinct_xy.iloc[i].values
2022
+ all_points.InsertNextPoint(values[0], values[1], 0)
2023
+
2024
+ # create a polydata object
2025
+ distinct_points = vtkPolyData()
2026
+ distinct_points.SetPoints(all_points)
2027
+
2028
+ cell_array = vtkCellArray()
2029
+ for i in range(all_points.GetNumberOfPoints()):
2030
+ vertex = vtkVertex()
2031
+ vertex.GetPointIds().SetId(0, i)
2032
+ cell_array.InsertNextCell(vertex)
2033
+
2034
+ distinct_points.SetVerts(cell_array)
2035
+ xy_index_labels = distinct_xy["Elem_Index"].to_numpy()
2036
+ elem_id_array = vtkIntArray()
2037
+ elem_id_array.SetName("Elem_Index")
2038
+ for i in range(len(xy_index_labels)):
2039
+ elem_id_array.InsertNextValue(int(xy_index_labels[i]))
2040
+ distinct_points.GetPointData().AddArray(elem_id_array)
2041
+
2042
+ if self.setting.debug:
2043
+ # Write the distinct points to a VTU file for debugging
2044
+ writer = vtkXMLPolyDataWriter()
2045
+ writer.SetInputData(distinct_points)
2046
+ writer.SetFileName(os.path.join(self.setting.vis_dir, 'distinct_points.vtp'))
2047
+ writer.Write()
2048
+
2049
+
2050
+ # Merge the index back to the original DataFrame
2051
+ df = df.merge(distinct_xy, on=["x", "y"], how="left")
2052
+
2053
+ # Compute Z descending order within each (x, y) group
2054
+ df["Horizon_ID"] = df.groupby(["x", "y"])["z"].rank(method="first", ascending=True).astype(int) - 1
2055
+
2056
+ # Convert the labels to NumPy arrays
2057
+
2058
+ z_order_labels = df["Horizon_ID"].to_numpy()
2059
+ xy_index_labels = df["Elem_Index"].to_numpy()
2060
+
2061
+ # Add labels to the VTU file
2062
+ xy_index_array = vtkIntArray()
2063
+ xy_index_array.SetName("Elem_Index")
2064
+ xy_index_array.SetNumberOfComponents(1)
2065
+ xy_index_array.SetNumberOfTuples(len(xy_index_labels))
2066
+
2067
+ z_order_array = vtkIntArray()
2068
+ z_order_array.SetName("Horizon_ID")
2069
+ z_order_array.SetNumberOfComponents(1)
2070
+ z_order_array.SetNumberOfTuples(len(z_order_labels))
2071
+
2072
+ for i, (xy_idx, z_ord) in enumerate(zip(xy_index_labels, z_order_labels)):
2073
+ xy_index_array.SetValue(i, int(xy_idx))
2074
+ z_order_array.SetValue(i, int(z_ord))
2075
+
2076
+ labeled_temp_elem = vtkUnstructuredGrid()
2077
+ labeled_temp_elem.DeepCopy(elem_conne_vtu)
2078
+ labeled_temp_elem.GetPointData().AddArray(xy_index_array)
2079
+ labeled_temp_elem.GetPointData().AddArray(z_order_array)
2080
+ # Write the modified VTU file with labels
2081
+ if self.setting.debug:
2082
+ writer = vtkXMLUnstructuredGridWriter()
2083
+ writer.SetFileName(os.path.join(self.setting.vis_dir, "labeled_temp_elem.vtu"))
2084
+ writer.SetInputData(labeled_temp_elem)
2085
+ writer.Write()
2086
+
2087
+
2088
+ # == Create a VTK 2D `voronoi` mesh ==
2089
+ # Create a Voronoi diagram from the cell centers
2090
+ print("crating voronoi...")
2091
+ voro = vtkVoronoi2D()
2092
+ voro.SetInputData(distinct_points)
2093
+ voro.SetMaximumNumberOfTileClips(distinct_points.GetNumberOfPoints())
2094
+ #voro.set
2095
+ voro.Update()
2096
+ voronoi = voro.GetOutput()
2097
+
2098
+ if self.setting.debug:
2099
+ # Write the Voronoi mesh to a VTU file for debugging
2100
+ voronoi_writer = vtkXMLPolyDataWriter()
2101
+ voronoi_writer.SetInputData(voronoi)
2102
+ voronoi_writer.SetFileName(os.path.join(self.setting.vis_dir, 'voronoi.vtp'))
2103
+ voronoi_writer.Write()
2104
+ print(f' ✓ Main voronoi created: {os.path.join(self.setting.vis_dir, "voronoi.vtp")}')
2105
+
2106
+
2107
+ # Count Layer
2108
+
2109
+ number_of_layers = elem_conne_vtu.GetNumberOfPoints() // distinct_points.GetNumberOfPoints()
2110
+ print(f'number_of_layers: {number_of_layers}')
2111
+
2112
+ # == Create `distinct_corners_points` and `labeled_corners` ==
2113
+ # Clean the grid
2114
+ clean_filter = vtkCleanUnstructuredGrid()
2115
+ clean_filter.SetInputData(corners_vtu)
2116
+ clean_filter.Update()
2117
+ labeled_corners = clean_filter.GetOutput()
2118
+
2119
+ # Extract points from the grid
2120
+ points = np.array([labeled_corners.GetPoint(i) for i in range(labeled_corners.GetNumberOfPoints())])
2121
+
2122
+ # Convert to Pandas DataFrame for easier manipulation
2123
+ df = pd.DataFrame(points, columns=["x", "y", "z"])
2124
+
2125
+ # Round (x, y) values to avoid floating-point precision issues
2126
+ df[["x", "y"]] = df[["x", "y"]].round(6)
2127
+
2128
+ # Find distinct (x, y) sets and create an index mapping
2129
+ distinct_xy = df[["x", "y"]].drop_duplicates().reset_index(drop=True)
2130
+
2131
+ distinct_xy["XY_Index"] = distinct_xy.index # Assign an index to each unique (x, y)
2132
+
2133
+
2134
+ # add distinct_xy to a vtkpoints object
2135
+ index_array = vtkIntArray()
2136
+ index_array.SetName("XY_Index")
2137
+ all_points = vtkPoints()
2138
+ all_points.SetDataTypeToDouble()
2139
+ for i in range(len(distinct_xy)):
2140
+ values = distinct_xy.iloc[i].values
2141
+ all_points.InsertNextPoint(values[0], values[1], 0)
2142
+ index_array.InsertNextValue(int(values[2]))
2143
+ # create a polydata object
2144
+ distinct_corners_points = vtkPolyData()
2145
+ distinct_corners_points.SetPoints(all_points)
2146
+ distinct_corners_points.GetPointData().AddArray(index_array)
2147
+
2148
+ cell_array = vtkCellArray()
2149
+ for i in range(all_points.GetNumberOfPoints()):
2150
+ vertex = vtkVertex()
2151
+ vertex.GetPointIds().SetId(0, i)
2152
+ cell_array.InsertNextCell(vertex)
2153
+
2154
+ distinct_corners_points.SetVerts(cell_array)
2155
+
2156
+ if self.setting.debug:
2157
+ # Write the distinct points to a VTU file for debugging
2158
+ writer = vtkXMLPolyDataWriter()
2159
+ writer.SetInputData(distinct_corners_points)
2160
+ writer.SetFileName(os.path.join(self.setting.vis_dir, 'distinct_corners_points.vtp'))
2161
+ writer.Write()
2162
+
2163
+
2164
+
2165
+ # Merge the index back to the original DataFrame
2166
+ df = df.merge(distinct_xy, on=["x", "y"], how="left")
2167
+
2168
+ # Compute Z descending order within each (x, y) group
2169
+ df["Z_Order"] = df.groupby(["x", "y"])["z"].rank(method="first", ascending=True).astype(int) - 1
2170
+
2171
+ # Convert the labels to NumPy arrays
2172
+ xy_index_labels = df["XY_Index"].to_numpy()
2173
+ z_order_labels = df["Z_Order"].to_numpy()
2174
+
2175
+ # Add labels to the VTU file
2176
+ xy_index_array = vtkIntArray()
2177
+ xy_index_array.SetName("XY_Index")
2178
+ xy_index_array.SetNumberOfComponents(1)
2179
+ xy_index_array.SetNumberOfTuples(len(xy_index_labels))
2180
+
2181
+ z_order_array = vtkIntArray()
2182
+ z_order_array.SetName("Layer_ID")
2183
+ z_order_array.SetNumberOfComponents(1)
2184
+ z_order_array.SetNumberOfTuples(len(z_order_labels))
2185
+
2186
+ for i, (xy_idx, z_ord) in enumerate(zip(xy_index_labels, z_order_labels)):
2187
+ xy_index_array.SetValue(i, int(xy_idx))
2188
+ z_order_array.SetValue(i, int(z_ord))
2189
+
2190
+ # Attach the label arrays to the unstructured grid
2191
+ labeled_corners.GetPointData().AddArray(xy_index_array)
2192
+ labeled_corners.GetPointData().AddArray(z_order_array)
2193
+ # Write the modified VTU file with labels
2194
+ if self.setting.debug:
2195
+ writer = vtkXMLUnstructuredGridWriter()
2196
+ writer.SetFileName(os.path.join(self.setting.vis_dir, "labeled_corners.vtu"))
2197
+ writer.SetInputData(labeled_corners)
2198
+ writer.Write()
2199
+
2200
+
2201
+ # == Create `distinct_corners_points_voronoi` ==
2202
+ #- use `distinct_points` as index, `voronoi` as map, search `distinct_corners_points` to recreate the correct voronoi mesh
2203
+
2204
+ distinct_corners_points_locator = vtkPointLocator()
2205
+ distinct_corners_points_locator.SetDataSet(distinct_corners_points)
2206
+ distinct_corners_points_locator.BuildLocator()
2207
+
2208
+ correct_voronoi_cell_array = vtkCellArray()
2209
+ elem_id_array = vtkIntArray()
2210
+ elem_id_array.SetName("Elem_Index")
2211
+ # go through each points in distinct_points
2212
+ for i in range(distinct_points.GetNumberOfPoints()):
2213
+ point = distinct_points.GetPoint(i)
2214
+ voronoi_cell = voronoi.GetCell(i)
2215
+ # go through each points in voronoi cell
2216
+ polygon = vtkPolygon()
2217
+ center_point_inserted = False
2218
+ center_point_id = distinct_corners_points_locator.FindClosestPoint(point)
2219
+
2220
+ for j in range(voronoi_cell.GetNumberOfPoints()):
2221
+ cell_point_id = voronoi_cell.GetPointId(j)
2222
+ cell_point = voronoi.GetPoint(cell_point_id)
2223
+ dist2 = reference(0)
2224
+ corners_point_id = distinct_corners_points_locator.FindClosestPointWithinRadius(1, [cell_point[0], cell_point[1], cell_point[2]], dist2)
2225
+
2226
+ if corners_point_id == -1:
2227
+ closest_corners_point_id = distinct_corners_points_locator.FindClosestPoint(cell_point)
2228
+ if (not center_point_inserted) and j+1 < voronoi_cell.GetNumberOfPoints():
2229
+ next_cell_point_id = voronoi_cell.GetPointId(j+1)
2230
+ next_cell_point = voronoi.GetPoint(next_cell_point_id)
2231
+ dist2 = reference(0)
2232
+ next_corners_point_id = distinct_corners_points_locator.FindClosestPointWithinRadius(1, [next_cell_point[0], next_cell_point[1], next_cell_point[2]], dist2)
2233
+ # next point on boundary, so insert the center point after this point
2234
+ if next_corners_point_id == -1:
2235
+ polygon.GetPointIds().InsertNextId(closest_corners_point_id)
2236
+ polygon.GetPointIds().InsertNextId(center_point_id)
2237
+ # next point is not on boundary, so insert the center point before this point
2238
+ else:
2239
+ polygon.GetPointIds().InsertNextId(center_point_id)
2240
+ polygon.GetPointIds().InsertNextId(closest_corners_point_id)
2241
+ center_point_inserted = True
2242
+
2243
+ else:
2244
+ polygon.GetPointIds().InsertNextId(closest_corners_point_id)
2245
+
2246
+ else:
2247
+ polygon.GetPointIds().InsertNextId(corners_point_id)
2248
+ correct_voronoi_cell_array.InsertNextCell(polygon)
2249
+ elem_id_array.InsertNextValue(i)
2250
+
2251
+ distinct_corners_points_voronoi = vtkPolyData()
2252
+ distinct_corners_points_voronoi.SetPoints(distinct_corners_points.GetPoints())
2253
+ distinct_corners_points_voronoi.SetPolys(correct_voronoi_cell_array)
2254
+ distinct_corners_points_voronoi.GetCellData().AddArray(elem_id_array)
2255
+ distinct_corners_points_voronoi.GetPointData().AddArray(distinct_corners_points.GetPointData().GetArray("XY_Index"))
2256
+
2257
+ if self.setting.debug:
2258
+ # Write the Voronoi mesh to a VTU file for debugging
2259
+ voronoi_writer = vtkXMLPolyDataWriter()
2260
+ voronoi_writer.SetInputData(distinct_corners_points_voronoi)
2261
+ voronoi_writer.SetFileName(os.path.join(self.setting.vis_dir, 'distinct_corners_points_voronoi.vtp'))
2262
+ voronoi_writer.Write()
2263
+
2264
+ # == create the geometry ==
2265
+ #- use `labeled_temp_elem` points as index (cell sequecne)
2266
+ #- from layer 0 to layer max, horizon 0 to horizon max-1
2267
+ #- use `distinct_corners_points_voronoi` as 2D polygon map to find the actual points from `labeled_corners`
2268
+ #- create each polyhedron by adding top, buttom and side surfaces
2269
+
2270
+ main_geometray = vtkUnstructuredGrid()
2271
+ main_geometray.SetPoints(labeled_corners.GetPoints())
2272
+
2273
+ labeled_corners_points = np.array([labeled_corners.GetPoint(i) for i in range(labeled_corners.GetNumberOfPoints())])
2274
+ labeled_corners_df = pd.DataFrame(labeled_corners_points, columns=["x", "y", "z"])
2275
+ for i in range(labeled_corners.GetPointData().GetNumberOfArrays()):
2276
+ array_name = labeled_corners.GetPointData().GetArrayName(i)
2277
+ array = labeled_corners.GetPointData().GetArray(i)
2278
+ array_values = np.array([array.GetValue(j) for j in range(array.GetNumberOfTuples())])
2279
+ labeled_corners_df[array_name] = array_values
2280
+ #labeled_corners_df.describe()
2281
+
2282
+ horizon_id_array = labeled_temp_elem.GetPointData().GetArray("Horizon_ID")
2283
+ elem_id_array = labeled_temp_elem.GetPointData().GetArray("Elem_Index")
2284
+
2285
+ for index in range(labeled_temp_elem.GetNumberOfPoints()):
2286
+ #for index in range(10):
2287
+ # get voronoi cell
2288
+ voronoi_cell = distinct_corners_points_voronoi.GetCell(elem_id_array.GetValue(index))
2289
+ cell_xy_index = [ voronoi_cell.GetPointId(i) for i in range(voronoi_cell.GetNumberOfPoints())]
2290
+
2291
+ horizon_id = horizon_id_array.GetValue(index)
2292
+ # top face
2293
+
2294
+ polyhedron_faces = [] # top, bottom, sides
2295
+ top_face = []
2296
+ for xy_index in cell_xy_index:
2297
+ # find point in labeled_corners_df
2298
+ matching_indexes = labeled_corners_df.index[(labeled_corners_df["Layer_ID"] == horizon_id + 1) & (labeled_corners_df["XY_Index"] == xy_index)]
2299
+ top_face.append(matching_indexes[0])
2300
+ polyhedron_faces.append(top_face)
2301
+
2302
+ bottom_face = []
2303
+ for xy_index in cell_xy_index:
2304
+ # find point in labeled_corners_df
2305
+ matching_indexes = labeled_corners_df.index[(labeled_corners_df["Layer_ID"] == horizon_id) & (labeled_corners_df["XY_Index"] == xy_index)]
2306
+ bottom_face.append(matching_indexes[0])
2307
+ polyhedron_faces.append(bottom_face)
2308
+
2309
+ for j in range(len(cell_xy_index)):
2310
+ if j == len(cell_xy_index) -1:
2311
+ side_face = [top_face[0], top_face[j], bottom_face[j], bottom_face[0]]
2312
+ else:
2313
+ side_face = [top_face[j+1], top_face[j], bottom_face[j], bottom_face[j+1]]
2314
+ polyhedron_faces.append(side_face)
2315
+
2316
+ # create a polyhedron
2317
+ number_of_faces = voronoi_cell.GetNumberOfPoints() + 2
2318
+ polyhedron_faces_idlist = vtkIdList()
2319
+ # Number faces that make up the cell.
2320
+ polyhedron_faces_idlist.InsertNextId(number_of_faces)
2321
+ for face in polyhedron_faces:
2322
+ # Number of points in the face == numberOfFaceVertices
2323
+ polyhedron_faces_idlist.InsertNextId(len(face))
2324
+ # Insert the pointIds for that face.
2325
+ [polyhedron_faces_idlist.InsertNextId(i) for i in face]
2326
+
2327
+ main_geometray.InsertNextCell(VTK_POLYHEDRON, polyhedron_faces_idlist)
2328
+
2329
+
2330
+ main_geometray.GetCellData().AddArray(self.elemIDArray)
2331
+ main_geometray.GetCellData().AddArray(volxArray)
2332
+ main_geometray.GetCellData().AddArray(matArray)
2333
+ if self.rock_dict is not None:
2334
+ main_geometray.GetCellData().AddArray(per_array)
2335
+ main_geometray.GetCellData().AddArray(sgr_array)
2336
+
2337
+ main_geometray.GetCellData().AddArray(matIDArray)
2338
+ main_geometray.GetCellData().AddArray(horizon_id_array)
2339
+ self.main_geometry = os.path.join(
2340
+ self.setting.vis_dir, "main_geometry.vtu")
2341
+ self.__write_vtk_file(main_geometray, self.main_geometry)
2342
+
2343
+ if os.path.exists(self.main_geometry):
2344
+ print(f' ✓ Main geometry created: {self.main_geometry}')
2345
+
2346
+ def __reorder_hexahedron(self, points):
2347
+ # Step 1: Sort points by Z coordinate to separate bottom and top layers
2348
+ sorted_indices = np.argsort(points[:, 2]) # Sort indices by z-coordinate
2349
+ bottom_indices = sorted_indices[:4] # 4 lowest Z values
2350
+ top_indices = sorted_indices[4:] # 4 highest Z values
2351
+
2352
+ # Step 2: Compute centroid of bottom plane for angle sorting
2353
+ centroid = np.mean(points[bottom_indices, :2], axis=0) # Only x and y
2354
+
2355
+ # Step 3: Compute angles from centroid and sort counterclockwise
2356
+ def angle_from_centroid(idx):
2357
+ p = points[idx]
2358
+ return np.arctan2(p[1] - centroid[1], p[0] - centroid[0])
2359
+
2360
+ bottom_order = sorted(bottom_indices, key=angle_from_centroid)
2361
+ top_order = sorted(top_indices, key=angle_from_centroid)
2362
+
2363
+ # Step 4: Assign correct VTK order
2364
+ reordered_indices = np.array(bottom_order + top_order)
2365
+
2366
+ return reordered_indices
2367
+
2368
+ def __checkParallel(self, elem_conne):
2369
+ dir1 = self.__get_direction_from_polyline(elem_conne.GetCell(0))
2370
+ dir2 = self.__get_direction_from_polyline(elem_conne.GetCell(1))
2371
+ if self.__are_parallel(dir1, dir2):
2372
+ return True
2373
+ return False
2374
+
2375
+ def __get_direction_from_polyline(self, polyline):
2376
+ """
2377
+ Given a vtkPolyLine, compute a representative normalized direction vector.
2378
+ Here we use the vector from the first point to the last point.
2379
+ """
2380
+ points = polyline.GetPoints()
2381
+ num_points = points.GetNumberOfPoints()
2382
+ if num_points < 2:
2383
+ return None # Not enough points to define a direction.
2384
+
2385
+ first_pt = np.array(points.GetPoint(0))
2386
+ first_pt[2] = 0
2387
+ last_pt = np.array(points.GetPoint(num_points - 1))
2388
+ last_pt[2] = 0
2389
+ direction = last_pt - first_pt
2390
+ norm = np.linalg.norm(direction)
2391
+ if norm == 0:
2392
+ return None # Degenerate polyline
2393
+ return direction / norm
2394
+
2395
+ def __are_parallel(self, dir1, dir2, tol=1e-6):
2396
+ """
2397
+ Two vectors are parallel if their cross product is nearly zero
2398
+ (i.e., the magnitude of the cross product is less than a tolerance)
2399
+ """
2400
+ cross_prod = np.cross(dir1, dir2)
2401
+ return np.linalg.norm(cross_prod) < tol
2402
+
2403
+ def __check_isReverse(self, elem_df):
2404
+ """
2405
+ Checks if the given DataFrame has a reverse sequence and determines the mesh plane.
2406
+ This method performs the following steps:
2407
+ 1. Checks the real plane by analyzing the standard deviation of the 'X', 'Y', and 'Z' columns.
2408
+ 2. Determines if the increasement sequence is in the order of X -> Y -> Z.
2409
+ 3. If the sequence is not in the correct order, it re-indexes.
2410
+ Parameters:
2411
+ elem_df (pandas.DataFrame): The DataFrame containing the elements to be checked.
2412
+ Returns:
2413
+ bool: True if the sequence is reversed, False otherwise.
2414
+ """
2415
+ describe = elem_df.describe()
2416
+
2417
+ # 1. check real plane
2418
+ mesh_plane = MeshPlane.unknown
2419
+ still_col = [1, 1, 1]
2420
+ if describe['X']['count'] == 0 or describe['X']['std'] == 0:
2421
+ still_col[0] = 0
2422
+ if describe['Y']['count'] == 0 or describe['Y']['std'] == 0:
2423
+ still_col[1] = 0
2424
+ if describe['Z']['count'] == 0 or describe['Z']['std'] == 0:
2425
+ still_col[2] = 0
2426
+
2427
+
2428
+ if still_col == [1, 1, 1]:
2429
+ mesh_plane = MeshPlane.XYZ
2430
+ elif still_col == [1, 0, 1]:
2431
+ mesh_plane = MeshPlane.XZ
2432
+ elif still_col == [1, 0, 0]:
2433
+ mesh_plane = MeshPlane.X
2434
+ elif still_col == [1, 1, 0]:
2435
+ mesh_plane = MeshPlane.XY
2436
+ elif still_col == [0, 1, 1]:
2437
+ mesh_plane = MeshPlane.YZ
2438
+ elif still_col == [0, 0, 1]:
2439
+ mesh_plane = MeshPlane.Z
2440
+ elif still_col == [0, 1, 0]:
2441
+ mesh_plane = MeshPlane.Y
2442
+
2443
+ self.setting.mesh_plane = mesh_plane
2444
+
2445
+ # check increase sequence
2446
+ map = ['X', 'Y', 'Z']
2447
+ head = elem_df.head()
2448
+ head_describe = head.describe()
2449
+
2450
+
2451
+ is_reverse = False
2452
+ # find increasement in each col
2453
+ for i in range(0, 3):
2454
+ if still_col[i] == 1:
2455
+ if head_describe[map[i]]['std'] == 0:
2456
+ is_reverse = True
2457
+ break
2458
+ else:
2459
+ break
2460
+
2461
+ #self.setting.isReverse = is_reverse
2462
+ return is_reverse
2463
+
2464
+ def __write_vtk_file(self, file, file_path):
2465
+
2466
+ extension = os.path.splitext(self.main_geometry)[1]
2467
+ writer = None
2468
+ if extension == '.vtr':
2469
+ writer = vtkXMLRectilinearGridWriter()
2470
+
2471
+ elif extension == '.vts':
2472
+ writer = vtkXMLStructuredGridWriter()
2473
+
2474
+ elif extension == '.vtu':
2475
+ writer = vtkXMLUnstructuredGridWriter()
2476
+
2477
+ writer.SetFileName(file_path)
2478
+ writer.SetInputData(file)
2479
+ writer.SetDataModeToBinary()
2480
+ writer.Write()
2481
+
2482
+ def __read_vtk_file(self, file_path):
2483
+
2484
+ extension = os.path.splitext(self.main_geometry)[1]
2485
+ if extension == '.vtr':
2486
+ reader = vtkXMLRectilinearGridReader()
2487
+ reader.SetFileName(file_path)
2488
+ reader.Update()
2489
+ return reader.GetOutput()
2490
+ elif extension == '.vts':
2491
+ reader = vtkXMLStructuredGridReader()
2492
+ reader.SetFileName(file_path)
2493
+ reader.Update()
2494
+ return reader.GetOutput()
2495
+ elif extension == '.vtu':
2496
+ reader = vtkXMLUnstructuredGridReader()
2497
+ reader.SetFileName(file_path)
2498
+ reader.Update()
2499
+ return reader.GetOutput()
2500
+
2501
+ def __parse_float(self, s):
2502
+ try:
2503
+ value = float(s)
2504
+ if np.isnan(value):
2505
+ return 0
2506
+ return value
2507
+
2508
+ except ValueError:
2509
+ if '-' in s:
2510
+ segments = s.split('-')
2511
+ if len(segments) == 2:
2512
+ return float(segments[0] + 'E-' + segments[1])
2513
+ if len(segments) == 3:
2514
+ return float('-0' + segments[1] + 'E-' + segments[2])
2515
+ if '+' in s:
2516
+ segments = s.split('+')
2517
+ if len(segments) == 2:
2518
+ return float(segments[0] + 'E+' + segments[1])
2519
+ if len(segments) == 3:
2520
+ return float('0' + segments[1] + 'E+' + segments[2])
2521
+ else:
2522
+ print(f'{s} can\'t parse to float.')
2523
+ return 0
2524
+
2525
+ def __isInt(self, s):
2526
+ try:
2527
+ value = int(s)
2528
+ if np.isnan(value):
2529
+ return False
2530
+ return True
2531
+
2532
+ except ValueError:
2533
+ return False
2534
+
2535
+ def __check_TOUGH_version(self):
2536
+ out_file_path = self.current_out_file
2537
+ extension = os.path.splitext(out_file_path)[1].lower()
2538
+ if extension == '.tec':
2539
+ self.setting.out_format_type = OutType.TEC
2540
+ self.setting.tough_version = ToughVersion.TOUGHReact
2541
+ return
2542
+ #return OutType.TEC
2543
+ elif extension == '.csv':
2544
+ self.setting.out_format_type = OutType.CSV
2545
+ line_number = 0
2546
+ with open(out_file_path, encoding="utf-8") as f:
2547
+ for line in f:
2548
+ if line_number == 0:
2549
+ first_col = line.split(',')[0].strip().lower()
2550
+ if 'time' in first_col:
2551
+ self.setting.tough_version = ToughVersion.TOUGH2
2552
+ return
2553
+ if line_number == 2:
2554
+ values = line.strip().split(',')
2555
+ if len(values) == 1 and 'time' in values[0].strip().lower():
2556
+ self.setting.tough_version = ToughVersion.TOUGH3
2557
+ return
2558
+ print(f'The format of your output file, {out_file_path}, is not correct. Please double-check your file.')
2559
+ sys.exit(1)
2560
+ line_number = line_number + 1
2561
+
2562
+ else:
2563
+ print(f'The format of your output file, {out_file_path}, is not supported.\nPlease use either .csv or .tec file format instead.')
2564
+ sys.exit(1)
2565
+
2566
+ def __check_if_block_end(self, line, line_number):
2567
+
2568
+ if 'ENDCY' in line:
2569
+ return True
2570
+
2571
+ if len(line) < 10 and line_number == 1:
2572
+ return False
2573
+
2574
+ if len(line) < 10:
2575
+ return True
2576
+ if line.startswith('\n') or line.startswith(' \n'):
2577
+ return True
2578
+ if line.startswith('\r') or line.startswith(' \r'):
2579
+ return True
2580
+
2581
+ trimmed = line.lstrip()
2582
+ if len(trimmed)>6 and trimmed[5] == "-" and trimmed[6] == "-":
2583
+ return True
2584
+ return False
2585
+
2586
+ def __write_json(self):
2587
+ # Convert each object to a dict
2588
+ # Assume variable_list is a list of visVariable objects
2589
+ variable_list_dicts = []
2590
+ for key in self.variable_list:
2591
+ for variable in self.variable_list[key]:
2592
+ variable_list_dicts.append(variable.to_dict())
2593
+
2594
+
2595
+ # Write to JSON file
2596
+ path = os.path.join(self.setting.vis_dir, "variable_list.json")
2597
+ with open(path, "w", encoding="utf-8") as f:
2598
+ json.dump(variable_list_dicts, f, indent=2)
2599
+
2600
+ timestep_list_dicts = [timestep.__dict__ for timestep in self.time_steps_list]
2601
+
2602
+
2603
+ # Write to JSON file
2604
+ path = os.path.join(self.setting.vis_dir, "timestep_list.json")
2605
+ with open(path, "w", encoding="utf-8") as f:
2606
+ json.dump(timestep_list_dicts, f, indent=2)
2607
+
2608
+ def __fix_negative_zero(self, x):
2609
+ return 0.0 if x == 0 else x
2610
+
2611
+ class vis_charting:
2612
+ def __init__(self, case_dir):
2613
+ self.vis_dir = None
2614
+ self.variable_list = None
2615
+ vis_dir = os.path.join(case_dir, "tough_vis", "paraview")
2616
+
2617
+
2618
+ if os.path.isdir(vis_dir):
2619
+ print("vis_dir:", vis_dir)
2620
+ else:
2621
+ print(f'Case vis_dir({vis_dir}) not found.')
2622
+ sys.exit(0)
2623
+
2624
+ variable_list_path = os.path.join(case_dir, "tough_vis", "variable_list.json")
2625
+ if os.path.isfile(variable_list_path):
2626
+ with open(variable_list_path, "r", encoding="utf-8") as f:
2627
+ self.variable_list = json.load(f)
2628
+ else:
2629
+ print(f'Case variable_list.json({variable_list_path}) not found.')
2630
+ sys.exit(0)
2631
+
2632
+ timestep_list_path = os.path.join(case_dir, "tough_vis", "timestep_list.json")
2633
+ if os.path.isfile(timestep_list_path):
2634
+ with open(timestep_list_path, "r", encoding="utf-8") as f:
2635
+ self.time_steps_list = json.load(f)
2636
+ else:
2637
+ print(f'Case timestep_list.json({timestep_list_path}) not found.')
2638
+ sys.exit(0)
2639
+
2640
+ self.vis_dir = vis_dir
2641
+
2642
+
2643
+
2644
+ def return_dataframe(self, element_id_list = None, scalar_variable_list = None, time_step_id_list = None, vector_variable_list = []):
2645
+ if self.vis_dir is None:
2646
+ print("vis_dir not set.")
2647
+ sys.exit(0)
2648
+ if self.variable_list is None:
2649
+ print("variable_list not set.")
2650
+ sys.exit(0)
2651
+ if self.time_steps_list is None:
2652
+ print("time_steps_list not set.")
2653
+ sys.exit(0)
2654
+
2655
+ # 1. find timesteps
2656
+ query_time_step_list = []
2657
+ if time_step_id_list == None:
2658
+ query_time_step_list = self.time_steps_list
2659
+ else:
2660
+ for time_step in self.time_steps_list:
2661
+ if time_step["time_step"] in time_step_id_list:
2662
+ query_time_step_list.append(time_step)
2663
+
2664
+ # 2. find scalar variables
2665
+ query_variables = []
2666
+ if scalar_variable_list == None:
2667
+ query_variables = self.variable_list
2668
+ else:
2669
+ for variable in self.variable_list:
2670
+ if variable["variable_name"] in scalar_variable_list:
2671
+ if variable["value_type"] == "Scalar":
2672
+ query_variables.append(variable)
2673
+
2674
+ # 3. find vector variables
2675
+
2676
+ for variable in self.variable_list:
2677
+ if variable["variable_name"] in vector_variable_list:
2678
+ if variable["value_type"] == "Vector":
2679
+ query_variables.append(variable)
2680
+
2681
+
2682
+ # create dataframe that contains all variables and timesteps
2683
+ df = pd.DataFrame()
2684
+ rows = []
2685
+ for time_step in query_time_step_list:
2686
+ time_step_id = time_step["time_step"]
2687
+ time = time_step["time"]
2688
+ vtk = self.__read_vtk_file(time_step["vtu_file_name"])
2689
+ #print(f"Reading time: {time_step['time']}")
2690
+
2691
+ for i in (range(vtk.GetNumberOfCells()) if element_id_list is None else element_id_list):
2692
+ if i >= vtk.GetNumberOfCells():
2693
+ print(f"Element ID {i} is out of range for the current VTK file.")
2694
+ sys.exit(1)
2695
+
2696
+ new_row = {"element_id": i, "time_step_id": time_step_id, "time": time}
2697
+ for variable in query_variables:
2698
+ variable_name = variable["variable_name"]
2699
+ vtk_array = vtk.GetCellData().GetArray(variable_name)
2700
+ if vtk_array is None:
2701
+ print(f"Variable '{variable_name}' not found in the VTK file.")
2702
+ continue
2703
+ if variable["value_type"] == "Scalar":
2704
+ new_row[variable_name] = vtk_array.GetValue(i)
2705
+ elif variable["value_type"] == "Vector":
2706
+ new_row[variable_name + "_x"] = vtk_array.GetComponent(i, 0)
2707
+ new_row[variable_name + "_y"] = vtk_array.GetComponent(i, 1)
2708
+ new_row[variable_name + "_z"] = vtk_array.GetComponent(i, 2)
2709
+
2710
+ #df = pd.concat([df, pd.DataFrame([new_row])], ignore_index=True)
2711
+ rows.append(new_row)
2712
+
2713
+ df = pd.DataFrame(rows)
2714
+ return df
2715
+
2716
+
2717
+
2718
+
2719
+
2720
+ def __read_vtk_file(self, file_path):
2721
+
2722
+ extension = os.path.splitext(file_path)[1]
2723
+ if extension == '.vtr':
2724
+ reader = vtkXMLRectilinearGridReader()
2725
+ reader.SetFileName(file_path)
2726
+ reader.Update()
2727
+ return reader.GetOutput()
2728
+ elif extension == '.vts':
2729
+ reader = vtkXMLStructuredGridReader()
2730
+ reader.SetFileName(file_path)
2731
+ reader.Update()
2732
+ return reader.GetOutput()
2733
+ elif extension == '.vtu':
2734
+ reader = vtkXMLUnstructuredGridReader()
2735
+ reader.SetFileName(file_path)
2736
+ reader.Update()
2737
+ return reader.GetOutput()
2738
+
2739
+
2740
+