toughanimator 0.1.8__tar.gz → 0.1.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: toughanimator
3
- Version: 0.1.8
3
+ Version: 0.1.10
4
4
  Summary: A tool for visualizing TOUGH simulation outputs.
5
5
  Home-page: https://github.com/scarletref/toughanimator
6
6
  Author: scarletref
@@ -8,7 +8,7 @@ Author-email: scarletreflection@gmail.com
8
8
  Classifier: Programming Language :: Python :: 3
9
9
  Classifier: License :: OSI Approved :: MIT License
10
10
  Classifier: Operating System :: OS Independent
11
- Requires-Python: >=3.7
11
+ Requires-Python: >=3.11,<3.14
12
12
  Description-Content-Type: text/markdown
13
13
  Requires-Dist: numpy
14
14
  Requires-Dist: pandas
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name='toughanimator', # Package name on PyPI
5
- version='0.1.8',
5
+ version='0.1.10',
6
6
  description='A tool for visualizing TOUGH simulation outputs.',
7
7
  long_description=open('README.md').read(),
8
8
  long_description_content_type='text/markdown',
@@ -16,7 +16,7 @@ setup(
16
16
  'pandas',
17
17
  'vtk',
18
18
  ],
19
- python_requires='>=3.7',
19
+ python_requires='>=3.11,<3.14',
20
20
  classifiers=[
21
21
  'Programming Language :: Python :: 3',
22
22
  'License :: OSI Approved :: MIT License',
@@ -3,18 +3,20 @@ import tough_classes as ta
3
3
  import pandas as pd
4
4
  import matplotlib.pyplot as plt
5
5
 
6
- #dir_name = "unresolved"
7
- dir_name = "test_cases"
8
- case_name = "PetraSim_2D_Conceptual"
6
+ dir_name = "unresolved"
7
+ #dir_name = "test_cases"
8
+ #case_name = "PetraSim_2D_Conceptual"
9
+ case_name = "2D_Utransport_MINC"
9
10
  test_case_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), dir_name)
10
11
 
11
12
  case_dir = os.path.join(test_case_dir, case_name)
12
13
 
13
-
14
+ #case_dir = r"D:\Projects\202511\ta-post\0 Base_res_k"
14
15
  #case_dir = r"D:\Projects\202504\polygonal\poly_test"
15
16
  #case_dir = r"D:\Projects\202507\tough系列output\tough output format\TR_MINC_exe"
16
17
  #case_dir = r"D:\Projects\202508\tough_cases\WW\7_TR_MINC_petrasim2025__5spot"
17
- #case_dir = r"D:\Projects\202508\tough_cases\WW\6_TR_MINC_exe"
18
+ #case_dir = r"D:\Projects\202511\case_TRv4\uwc"
19
+ #case_dir = r"D:\Projects\202501\toughanimator\test_cases\2DCCS 100yrs_RC"
18
20
  reader = ta.vis_reader(case_dir)
19
21
  #reader.write_eleme_conne()
20
22
  #reader.write_geometry()
@@ -1,3 +1,4 @@
1
+ import math
1
2
  import os
2
3
  import io
3
4
  import sys
@@ -69,7 +70,7 @@ class VisVariable:
69
70
  }
70
71
 
71
72
  class VisSetting:
72
- def __init__(self, input_file_paths, out_file_paths, vis_dir, corners_file="unkown", out_format_type=OutType.Unknown, tough_version = ToughVersion.Unknown, vis_types=[VisType.ParaView, VisType.Tecplot], mesh_type=MeshType.RegularGrid, debug=False, eos="ECO2N", minc=False, selected_variables_scalar = [], selected_variables_vector = [] ):
73
+ def __init__(self, input_file_paths, out_file_paths, vis_dir, corners_file="unkown", out_format_type=OutType.Unknown, tough_version = ToughVersion.Unknown, vis_types=[VisType.ParaView, VisType.Tecplot], mesh_type=MeshType.RegularGrid, debug=False, eos="ECO2N", minc=False, selected_variables_scalar = [], selected_variables_vector = [], ngv=False):
73
74
  self.mesh_type = mesh_type
74
75
  self.out_format_type = out_format_type
75
76
  self.vis_types = vis_types
@@ -87,6 +88,7 @@ class VisSetting:
87
88
  self.minc = minc
88
89
  self.selected_variables_scalar = selected_variables_scalar
89
90
  self.selected_variables_vector = selected_variables_vector
91
+ self.ngv = ngv
90
92
 
91
93
 
92
94
  def setBounds(self, x_bounds, y_bounds, z_bounds):
@@ -105,7 +107,7 @@ class vis_reader:
105
107
  if os.path.isdir(case_dir):
106
108
  config_path = os.path.join(case_dir, "config.json")
107
109
  if os.path.exists(config_path):
108
- with open(config_path, "r") as config_file:
110
+ with open(config_path, "r", encoding="utf-8") as config_file:
109
111
  config = json.load(config_file)
110
112
  else:
111
113
  print(f"Config file:({config_path}) not found. Please create it.")
@@ -130,16 +132,22 @@ class vis_reader:
130
132
  vis_dir = config["vis_dir"] if "vis_dir" in config else case_dir,
131
133
  corners_file = os.path.join(case_dir, config["corners_file"] if "corners_file" in config else "None"),
132
134
  debug = config['debug'] if 'debug' in config else False,
133
- eos = config['EOS'] if 'EOS' in config else "ECO2N",
134
- minc = config['MINC'] if 'MINC' in config else False,
135
+ #eos = config['EOS'] if 'EOS' in config else "ECO2N",
136
+ eos = next((v for k, v in config.items() if k.lower() == "eos"),"ECO2N"),
137
+ #minc = config['MINC'] if 'MINC' in config else False,
138
+ minc = next((v for k, v in config.items() if k.lower() == "minc"),False),
135
139
  selected_variables_scalar = config['selected_variables_scalar'] if 'selected_variables_scalar' in config else [],
136
- selected_variables_vector = config['selected_variables_vector'] if 'selected_variables_vector' in config else []
140
+ selected_variables_vector = config['selected_variables_vector'] if 'selected_variables_vector' in config else [],
141
+ #ngv= config['NGV'] if 'NGV' in config else False
142
+ ngv = next((v for k, v in config.items() if k.lower() == "ngv"),False)
137
143
  )
138
144
 
139
145
  # check if the project is using MINC
140
146
  minc_file = os.path.join(case_dir, 'MINC')
141
147
  if os.path.exists(minc_file):
142
148
  setting.minc = True
149
+ self.minc_file = minc_file
150
+ self.__check_num_of_minc()
143
151
  if minc_file in setting.input_file_paths:
144
152
  setting.input_file_paths.remove(minc_file)
145
153
 
@@ -229,6 +237,8 @@ class vis_reader:
229
237
  # add post calculation
230
238
  for timestep in self.time_steps_list:
231
239
  self.__post_process(timestep)
240
+ if self.setting.ngv:
241
+ self.__post_process_ngv(timestep)
232
242
  self.__write_json()
233
243
  print(f'All files have been created in {self.setting.vis_dir}.')
234
244
 
@@ -266,7 +276,7 @@ class vis_reader:
266
276
  has_elem = False
267
277
  for input_file_path in self.setting.input_file_paths:
268
278
  line_counter = 0
269
- with open(input_file_path) as f:
279
+ with open(input_file_path, encoding="utf-8") as f:
270
280
  reading_elem = False
271
281
 
272
282
  for line in f:
@@ -289,6 +299,26 @@ class vis_reader:
289
299
  sys.exit(1)
290
300
  else:
291
301
  print(f' Found ELEME block in {found_path}')
302
+ def __check_num_of_minc(self):
303
+ #self.minc_buffer = io.StringIO()
304
+ minc_num = 0
305
+ with open(self.minc_file, encoding="utf-8") as f:
306
+ reading_minc = False
307
+ for line in f:
308
+ if line.startswith('ELEME-') or line.startswith('ELEME'):
309
+ reading_minc = True
310
+ #has_minc = True
311
+ continue
312
+ if reading_minc:
313
+
314
+ if self.__check_if_block_end(line, minc_num):
315
+ reading_minc = False
316
+ #found_path = input_file_path
317
+ break
318
+ else:
319
+ minc_num += 1
320
+ #self.minc_buffer.write(line)
321
+ self.num_of_minc = minc_num
292
322
 
293
323
  def __write_conne_buffer(self):
294
324
  self.conne_buffer = io.StringIO()
@@ -296,7 +326,7 @@ class vis_reader:
296
326
  has_conne = False
297
327
  for input_file_path in self.setting.input_file_paths:
298
328
  line_counter = 0
299
- with open(input_file_path) as f:
329
+ with open(input_file_path, encoding="utf-8") as f:
300
330
 
301
331
  reading_conne = False
302
332
  for line in f:
@@ -326,7 +356,7 @@ class vis_reader:
326
356
 
327
357
  for input_file_path in self.setting.input_file_paths:
328
358
  line_counter = 0
329
- with open(input_file_path) as f:
359
+ with open(input_file_path, encoding="utf-8") as f:
330
360
  reading_rocks = False
331
361
  for line in f:
332
362
  if line.startswith('ROCKS-'):
@@ -367,7 +397,7 @@ class vis_reader:
367
397
 
368
398
  for input_file_path in self.setting.input_file_paths:
369
399
  line_counter = 0
370
- with open(input_file_path) as f:
400
+ with open(input_file_path, encoding="utf-8") as f:
371
401
 
372
402
  reading_incon = False
373
403
  for line in f:
@@ -385,9 +415,9 @@ class vis_reader:
385
415
  #line = f.readline() # skip first line #self.number_of_elements
386
416
  eos = self.setting.eos
387
417
  num = len(line.split())
388
- if self.setting.eos == "ECO2N" and len(line.split()) == 4:
418
+ if self.setting.eos.upper() == "ECO2N" and len(line.split()) == 4:
389
419
  self.incon_buffer.write(line)
390
- elif self.setting.eos == "EOS1":
420
+ elif self.setting.eos.upper() == "EOS1":
391
421
  line = f.readline() # skip first line #self.number_of_elements
392
422
  if len(line.split()) == 2:
393
423
  self.incon_buffer.write(line)
@@ -400,7 +430,7 @@ class vis_reader:
400
430
  reading_pram = False
401
431
  for input_file_path in self.setting.input_file_paths:
402
432
  line_counter = 0
403
- with open(input_file_path) as f:
433
+ with open(input_file_path, encoding="utf-8") as f:
404
434
  reading_pram = False
405
435
  for line in f:
406
436
  if line.startswith('PARAM-'):
@@ -430,14 +460,14 @@ class vis_reader:
430
460
 
431
461
  self.incon_buffer.seek(0)
432
462
  incon_df = pd.DataFrame()
433
- if self.setting.eos == "ECO2N":
463
+ if self.setting.eos.upper() == "ECO2N":
434
464
  # read incon
435
465
  incon_colspecs = [(0, 20), (20, 40), (40, 60), (60, 80)] # define column widths
436
466
  incon_names = ['Pressure', 'NaCl', 'CO2', 'Temperature']
437
467
  incon_df = pd.read_fwf(self.incon_buffer, colspecs=incon_colspecs, header=None,
438
468
  names=incon_names,
439
469
  dtype={'Pressure':float, 'NaCl':float, 'CO2':float, 'Temperature':float})
440
- elif self.setting.eos == "EOS1":
470
+ elif self.setting.eos.upper() == "EOS1":
441
471
  # read incon
442
472
  incon_colspecs = [(0, 20), (20, 40)]
443
473
  incon_names = ['Temperature', 'Pressure']
@@ -501,7 +531,7 @@ class vis_reader:
501
531
  buffer.flush()
502
532
  buffer.close()
503
533
 
504
- with open(self.current_out_file) as f:
534
+ with open(self.current_out_file, encoding="utf-8") as f:
505
535
  for line in f:
506
536
  line_number = line_number + 1
507
537
  values = line.strip().split(',')
@@ -557,7 +587,7 @@ class vis_reader:
557
587
  start_index = -1
558
588
  self.time_steps_list = []
559
589
 
560
- with open(self.current_out_file) as f:
590
+ with open(self.current_out_file, encoding="utf-8") as f:
561
591
  for line in f:
562
592
  line_number = line_number + 1
563
593
  values = line.strip().split(',')
@@ -644,7 +674,7 @@ class vis_reader:
644
674
  start_index = -1
645
675
  self.time_steps_list = []
646
676
 
647
- with open(self.current_out_file) as f:
677
+ with open(self.current_out_file, encoding="utf-8") as f:
648
678
  for line in f:
649
679
  line_number = line_number + 1
650
680
  values = line.strip().split(',')
@@ -729,7 +759,8 @@ class vis_reader:
729
759
  reading_scalar = False
730
760
  scalar_headers = []
731
761
  self.time_steps_list = []
732
- with open(self.current_out_file) as f:
762
+
763
+ with open(self.current_out_file, encoding="utf-8") as f:
733
764
  for line in f:
734
765
  if line.strip().lower().startswith('Variables'.lower()):
735
766
  headers_value = line.strip().split('=')[1]
@@ -929,6 +960,172 @@ class vis_reader:
929
960
  self.variable_list["post"] = post_variable_list
930
961
  self.__write_vtk_file(vtr, vtr_path)
931
962
 
963
+ def __post_process_ngv(self, vis_time_step):
964
+
965
+ #self.rock_dict
966
+ post_variable_list = []
967
+ if self.setting.mesh_type != MeshType.RegularGrid:
968
+ print(' NGV post-processing is only available for RegularGrid mesh.')
969
+ return
970
+
971
+
972
+ time_index = self.time_steps_list.index(vis_time_step)
973
+ #vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}.vtr')
974
+
975
+ extension = os.path.splitext(self.main_geometry)[1]
976
+ vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}{extension}')
977
+ self.time_steps_list[time_index].vtu_file_name = vtr_path
978
+ scalar_vtr = self.__read_vtk_file(vtr_path)
979
+ vtr = scalar_vtr
980
+
981
+
982
+ vtr_dimemsion = scalar_vtr.GetDimensions()
983
+ cell_index = 0
984
+ matIDArray = vtr.GetCellData().GetArray('Material_ID')
985
+
986
+ G = 9.81
987
+ Pc = 3000
988
+ # creare vtk double array 'ut','delta_p','Ncv_k1','Ncv_k2','Ncv_k3','Ngv_k1','Ngv_k2','Ngv_k3','Nb','R1'
989
+
990
+ Ncv_k1_array = vtkDoubleArray()
991
+ Ncv_k1_array.SetName('Ncv_k1')
992
+ vtr.GetCellData().AddArray(Ncv_k1_array)
993
+ Ncv_k2_array = vtkDoubleArray()
994
+ Ncv_k2_array.SetName('Ncv_k2')
995
+ vtr.GetCellData().AddArray(Ncv_k2_array)
996
+ Ncv_k3_array = vtkDoubleArray()
997
+ Ncv_k3_array.SetName('Ncv_k3')
998
+ vtr.GetCellData().AddArray(Ncv_k3_array)
999
+ Ngv_k1_array = vtkDoubleArray()
1000
+ Ngv_k1_array.SetName('Ngv_k1')
1001
+ vtr.GetCellData().AddArray(Ngv_k1_array)
1002
+ Ngv_k2_array = vtkDoubleArray()
1003
+ Ngv_k2_array.SetName('Ngv_k2')
1004
+ vtr.GetCellData().AddArray(Ngv_k2_array)
1005
+ Ngv_k3_array = vtkDoubleArray()
1006
+ Ngv_k3_array.SetName('Ngv_k3')
1007
+ vtr.GetCellData().AddArray(Ngv_k3_array)
1008
+ Nb_array = vtkDoubleArray()
1009
+ Nb_array.SetName('Nb')
1010
+ vtr.GetCellData().AddArray(Nb_array)
1011
+ R1_array = vtkDoubleArray()
1012
+ R1_array.SetName('R1')
1013
+ vtr.GetCellData().AddArray(R1_array)
1014
+
1015
+ post_variable_list.append(VisVariable('Ncv_k1', ValueType.Scalar, 1))
1016
+ post_variable_list.append(VisVariable('Ncv_k2', ValueType.Scalar, 1))
1017
+ post_variable_list.append(VisVariable('Ncv_k3', ValueType.Scalar, 1))
1018
+ post_variable_list.append(VisVariable('Ngv_k1', ValueType.Scalar, 1))
1019
+ post_variable_list.append(VisVariable('Ngv_k2', ValueType.Scalar, 1))
1020
+ post_variable_list.append(VisVariable('Ngv_k3', ValueType.Scalar, 1))
1021
+ post_variable_list.append(VisVariable('Nb', ValueType.Scalar, 1))
1022
+ post_variable_list.append(VisVariable('R1', ValueType.Scalar, 1))
1023
+
1024
+
1025
+ # check if the required arrays are in the vtk file
1026
+ vis_gas_array = vtkDoubleArray()
1027
+ vis_gas_name = 'VIS(gas)'
1028
+ if vtr.GetCellData().GetArray(vis_gas_name) is not None:
1029
+ vis_gas_array = vtr.GetCellData().GetArray(vis_gas_name)
1030
+ else:
1031
+ print(f' Can\'t find {vis_gas_name} array in the vtk file for NGV post-processing.')
1032
+ return
1033
+
1034
+ dl_array = vtkDoubleArray()
1035
+ dl_name = 'DL (kg/m^3)'
1036
+ if vtr.GetCellData().GetArray(dl_name) is not None:
1037
+ dl_array = vtr.GetCellData().GetArray(dl_name)
1038
+ else:
1039
+ print(f' Can\'t find {dl_name} array in the vtk file for NGV post-processing.')
1040
+ return
1041
+
1042
+ dg_array = vtkDoubleArray()
1043
+ dg_name = 'DG (kg/m^3)'
1044
+ if vtr.GetCellData().GetArray(dg_name) is not None:
1045
+ dg_array = vtr.GetCellData().GetArray(dg_name)
1046
+ else:
1047
+ print(f' Can\'t find {dg_name} array in the vtk file for NGV post-processing.')
1048
+ return
1049
+
1050
+ flof_array = vtkDoubleArray()
1051
+ flof_name = 'FLOF (kg/s)'
1052
+ if vtr.GetCellData().GetArray(flof_name) is not None:
1053
+ flof_array = vtr.GetCellData().GetArray(flof_name)
1054
+ else:
1055
+ print(f' Can\'t find {flof_name} array in the vtk file for NGV post-processing.')
1056
+ return
1057
+
1058
+
1059
+
1060
+ for z_index in range(0, vtr_dimemsion[2]-1):
1061
+ for y_index in range(0, vtr_dimemsion[1]-1):
1062
+ for x_index in range(0, vtr_dimemsion[0]-1):
1063
+ dx = vtr.GetXCoordinates().GetValue(x_index+1) - vtr.GetXCoordinates().GetValue(x_index)
1064
+ dy = vtr.GetYCoordinates().GetValue(y_index+1) - vtr.GetYCoordinates().GetValue(y_index)
1065
+ #dz = vtr.GetZCoordinates().GetValue(z_index+1) - vtr.GetZCoordinates().GetValue(z_index)
1066
+
1067
+
1068
+ #elemID = self..GetValue(cell_index)
1069
+ matID = matIDArray.GetValue(cell_index)
1070
+ # find rock from self.rock_dict with id = matID
1071
+ #rock = [obj for obj in self.rock_dict if obj.id == matID]
1072
+
1073
+ rock = next((o for o in self.rock_dict if o["id"] == matID), None)
1074
+ per_1 = rock["per_1"] if rock else 0
1075
+ per_2 = rock["per_2"] if rock else 0
1076
+ per_3 = rock["per_3"] if rock else 0
1077
+
1078
+ #df['μCO2'] = df['VIS(gas)']
1079
+ μCO2 = vis_gas_array.GetValue(cell_index)
1080
+ #df['delta_p'] = df['DL (kg/m^3)'] - df['DG (kg/m^3)']
1081
+ delta_p = dl_array.GetValue(cell_index) - dg_array.GetValue(cell_index)
1082
+ #df['ut'] = np.sqrt(df['FLOF (kg/s)_x']**2 + df['FLOF (kg/s)_y']**2 + df['FLOF (kg/s)_z']**2)
1083
+ FLOF = flof_array.GetTuple(cell_index)
1084
+
1085
+ ut = math.sqrt(FLOF[0]**2 + FLOF[1]**2 + FLOF[2]**2)
1086
+
1087
+ #df['Ncv_k1'] = (df['k1'] * df[L] * df['Pc'] )/(df[H]**2 * df['μCO2'] * df['ut'])
1088
+ #df['Ncv_k2'] = (df['k2'] * df[L] * df['Pc'] )/(df[H]**2 * df['μCO2'] * df['ut'])
1089
+ #df['Ncv_k3'] = (df['k3'] * df[L] * df['Pc'] )/(df[H]**2 * df['μCO2'] * df['ut'])
1090
+ #df['Ngv_k1'] = (df['delta_p'] * df['G'] * df['k1'] * df['d_x'])/(df[H] * df['μCO2'] * df['ut'])
1091
+ #df['Ngv_k2'] = (df['delta_p'] * df['G'] * df['k2'] * df['d_x'])/(df[H] * df['μCO2'] * df['ut'])
1092
+ #df['Ngv_k3'] = (df['delta_p'] * df['G'] * df['k3'] * df['d_x'])/(df[H] * df['μCO2'] * df['ut'])
1093
+
1094
+ L = dx
1095
+ H = dy
1096
+ k1 = per_1
1097
+ k2 = per_2
1098
+ k3 = per_3
1099
+ Ncv_k1 = (k1 * L * Pc )/(H**2 * μCO2 * ut) if (H**2 * μCO2 * ut) !=0 else 0
1100
+ Ncv_k2 = (k2 * L * Pc )/(H**2 * μCO2 * ut) if (H**2 * μCO2 * ut) !=0 else 0
1101
+ Ncv_k3 = (k3 * L * Pc )/(H**2 * μCO2 * ut) if (H**2 * μCO2 * ut) !=0 else 0
1102
+ Ngv_k1 = (delta_p * G * k1 * dx)/(H * μCO2 * ut) if (H * μCO2 * ut) !=0 else 0
1103
+ Ngv_k2 = (delta_p * G * k2 * dx)/(H * μCO2 * ut) if (H * μCO2 * ut) !=0 else 0
1104
+ Ngv_k3 = (delta_p * G * k3 * dx)/(H * μCO2 * ut) if (H * μCO2 * ut) !=0 else 0
1105
+
1106
+ #df['Nb'] =(df['delta_p'] * df['G'] * df[H])/df['Pc']
1107
+ Nb =(delta_p * G * H)/Pc if Pc !=0 else 0
1108
+ #df['R1'] = df[L]/df[H]
1109
+ R1 = L/H if H !=0 else 0
1110
+ Ncv_k1_array.InsertNextValue(Ncv_k1)
1111
+ Ncv_k2_array.InsertNextValue(Ncv_k2)
1112
+ Ncv_k3_array.InsertNextValue(Ncv_k3)
1113
+ Ngv_k1_array.InsertNextValue(Ngv_k1)
1114
+ Ngv_k2_array.InsertNextValue(Ngv_k2)
1115
+ Ngv_k3_array.InsertNextValue(Ngv_k3)
1116
+ Nb_array.InsertNextValue(Nb)
1117
+ R1_array.InsertNextValue(R1)
1118
+ cell_index += 1
1119
+
1120
+ #for z_index in range(0, scalar_vtr.GetZCoordinates().GetNumberOfTuples()):
1121
+
1122
+
1123
+ #if len(post_variable_list) > 0:
1124
+ #self.variable_list["post"].append(post_variable_list)
1125
+ self.__write_vtk_file(vtr, vtr_path)
1126
+
1127
+
1128
+
932
1129
 
933
1130
  def __write_scalar_result(self, vis_time_step, dataframe, csv_headers):
934
1131
 
@@ -981,9 +1178,13 @@ class vis_reader:
981
1178
 
982
1179
  #if self.setting.minc:
983
1180
  #print(f' MinC is enabled. Adding MinC values to the result.')
1181
+ minc_ratio = 1
1182
+ if self.setting.minc:
1183
+ minc_ratio = self.num_of_minc / self.number_of_elements
984
1184
 
985
1185
  for i in range(0, vtr.GetNumberOfCells()):
986
1186
  elemID = self.elemIDArray.GetValue(i)
1187
+
987
1188
  index = self.sequence_dist[i]
988
1189
  if 'ELEM' in dataframe.columns:
989
1190
  index = dataframe['ELEM'].tolist().index(elemID)
@@ -991,7 +1192,8 @@ class vis_reader:
991
1192
  #target_row = dataframe.iloc[index]
992
1193
  #print(f' Processing ELEM {elem_string} at index {index}')
993
1194
  for header in headers:
994
- value = float(self.__parse_float(dataframe[header].iloc[index]))
1195
+ minc_index = int(index * minc_ratio)
1196
+ value = float(self.__parse_float(dataframe[header].iloc[minc_index]))
995
1197
  vtr.GetCellData().GetArray(header).InsertNextValue(value)
996
1198
 
997
1199
 
@@ -1027,7 +1229,10 @@ class vis_reader:
1027
1229
  firstFile = True
1028
1230
  if os.path.isfile(self.tec_scalar_path):
1029
1231
  firstFile = False
1030
- file = open(self.tec_scalar_path, "a")
1232
+ file = open(self.tec_scalar_path, "a", encoding="utf-8")
1233
+ if len(self.setting.selected_variables_scalar) == 0:
1234
+ self.setting.selected_variables_scalar = headers
1235
+
1031
1236
  if firstFile:
1032
1237
  file.write('TITLE = TECPLOT PLOT \n')
1033
1238
  selected_header_string = '"'+'", "'.join(self.setting.selected_variables_scalar) + '"'
@@ -1038,37 +1243,42 @@ class vis_reader:
1038
1243
 
1039
1244
  #time_statement = f'ZONE T ="{vis_time_step.time_step}, Time = {vis_time_step.time}", N = {vtu_cell_to_points.GetNumberOfPoints()}, E = {vtu_cell_to_points.GetNumberOfCells()}, F = FEPOINT, ET = {tecplot_cell_type}, SOLUTIONTIME = {vis_time_step.time}\n'
1040
1245
 
1041
- time_statement = f'ZONE T="{vis_time_step.time_step}, Time = {vis_time_step.time}", I={self.xyz_elem[0] + 1}, J={self.xyz_elem[1] + 1}, K={self.xyz_elem[2] + 1}, SOLUTIONTIME={vis_time_step.time}, DATAPACKING=BLOCK, VARLOCATION=({self.__get_varlocarion_string(headers)})'
1246
+ time_statement = f'ZONE T="{vis_time_step.time_step}, Time = {vis_time_step.time}", I={self.xyz_elem[0] + 1}, J={self.xyz_elem[1] + 1}, K={self.xyz_elem[2] + 1}, SOLUTIONTIME={vis_time_step.time}, DATAPACKING=BLOCK, VARLOCATION=({self.__get_varlocarion_string(self.setting.selected_variables_scalar)})'
1042
1247
  if not firstFile:
1043
1248
  time_statement = f'{time_statement}, D=(1,2,3,FECONNECT)'
1044
1249
  #if self.setting.debug:
1045
1250
  #time_statement = f'ZONE T ="{vis_time_step.time_step}, Time = {vis_time_step.time}", N = {vtu_cell_to_points.GetNumberOfPoints()}, E = {vtu_cell_to_points.GetNumberOfCells()}, F = FEPOINT, ET = {tecplot_cell_type}\n'
1046
1251
  file.write(f'{time_statement}\n')
1047
-
1252
+ max_line_length = 20000
1048
1253
  # X, Y, Z
1049
1254
  if firstFile:
1050
- for i in range(0, vtr.GetNumberOfPoints()):
1051
- point = vtr.GetPoint(i)
1052
- file.write(str(point[0]) + " ")
1053
- file.write(" \n")
1054
-
1055
- for i in range(0, vtr.GetNumberOfPoints()):
1056
- point = vtr.GetPoint(i)
1057
- file.write(str(point[1]) + " ")
1058
- file.write(" \n")
1059
-
1060
- for i in range(0, vtr.GetNumberOfPoints()):
1061
- point = vtr.GetPoint(i)
1062
- file.write(str(point[2]) + " ")
1063
- file.write(" \n")
1255
+ for point_idx in range(0, 3):
1256
+ line_string = ''
1257
+ for i in range(0, vtr.GetNumberOfPoints()):
1258
+ point = vtr.GetPoint(i)
1259
+ #file.write(str(point[0]) + " ")
1260
+ if len(line_string) + len(str(point[point_idx])) + 1 > max_line_length:
1261
+ # write the current line to file
1262
+ file.write(f'{line_string}\n')
1263
+ # reset the line string
1264
+ line_string = ''
1265
+ line_string = f'{line_string}{str(point[point_idx])} '
1266
+
1267
+ file.write(f'{line_string}\n')
1064
1268
 
1065
1269
  # Other data
1066
1270
  for header in self.setting.selected_variables_scalar:
1067
1271
  array = vtr.GetCellData().GetArray(header)
1068
-
1272
+ line_string = ''
1069
1273
  for e in range(0, vtr.GetNumberOfCells()):
1070
- file.write(str(array.GetValue(e)) + " ")
1071
- file.write(" \n")
1274
+ #file.write(f'{str(array.GetComponent(e, 0))} ')
1275
+ if len(line_string) + len(str(array.GetValue(e))) + 1 > max_line_length:
1276
+ # write the current line to file
1277
+ file.write(f'{line_string}\n')
1278
+ # reset the line string
1279
+ line_string = ''
1280
+ line_string = f'{line_string}{str(array.GetValue(e))} '
1281
+ file.write(f'{line_string}\n')
1072
1282
 
1073
1283
  file.close()
1074
1284
 
@@ -1203,6 +1413,11 @@ class vis_reader:
1203
1413
 
1204
1414
  if VisType.Tecplot not in self.setting.vis_types:
1205
1415
  return
1416
+
1417
+
1418
+ if self.setting.mesh_type == MeshType.PolygonalMesh:
1419
+ print(f' Tecplot output for polygonal mesh is not supported yet.')
1420
+ return
1206
1421
 
1207
1422
  # Start Tecplot generating
1208
1423
  tec_name = pathlib.Path(self.setting.input_file_paths[0]).stem
@@ -1210,13 +1425,18 @@ class vis_reader:
1210
1425
  firstFile = True
1211
1426
  if os.path.isfile(self.tec_vector_path):
1212
1427
  firstFile = False
1213
- file = open(self.tec_vector_path, "a")
1428
+ file = open(self.tec_vector_path, "a", encoding="utf-8")
1429
+
1430
+ #selected_header_string = '"'+'", "'.join(self.setting.selected_variables_scalar) + '"'
1431
+ if len(self.setting.selected_variables_vector) == 0:
1432
+ self.setting.selected_variables_vector = headers
1433
+ vector_headers = self.__get_tec_vector_headers(self.setting.selected_variables_vector)
1214
1434
 
1215
- vector_headers = self.__get_tec_vector_headers(headers)
1216
1435
  # add header
1217
1436
  if firstFile:
1218
1437
  file.write('TITLE = TECPLOT PLOT \n')
1219
1438
  header_string = '"'+'", "'.join(vector_headers) + '"'
1439
+
1220
1440
  file.write(f'VARIABLES = "X", "Y", "Z", {header_string}\n')
1221
1441
 
1222
1442
  time_statement = f'ZONE T="{vis_time_step.time_step}, Time = {vis_time_step.time}", I={self.xyz_elem[0] + 1}, J={self.xyz_elem[1] + 1}, K={self.xyz_elem[2] + 1}, SOLUTIONTIME={vis_time_step.time}, DATAPACKING=BLOCK, VARLOCATION=({self.__get_varlocarion_string(vector_headers)})'
@@ -1226,38 +1446,40 @@ class vis_reader:
1226
1446
  #time_statement = f'ZONE T ="{vis_time_step.time_step}, Time = {vis_time_step.time}", N = {vtu_cell_to_points.GetNumberOfPoints()}, E = {vtu_cell_to_points.GetNumberOfCells()}, F = FEPOINT, ET = {tecplot_cell_type}\n'
1227
1447
  file.write(f'{time_statement}\n')
1228
1448
 
1449
+ max_line_length = 20000
1229
1450
  # X, Y, Z
1230
- if firstFile:
1231
- for i in range(0, vector_vtr.GetNumberOfPoints()):
1232
- point = vector_vtr.GetPoint(i)
1233
- file.write(str(point[0]) + " ")
1234
- file.write(" \n")
1451
+ if firstFile:
1452
+ for point_idx in range(0, 3):
1453
+ line_string = ''
1454
+ for i in range(0, vector_vtr.GetNumberOfPoints()):
1455
+ point = vector_vtr.GetPoint(i)
1456
+ #file.write(str(point[0]) + " ")
1457
+ if len(line_string) + len(str(point[point_idx])) + 1 > max_line_length:
1458
+ # write the current line to file
1459
+ file.write(f'{line_string}\n')
1460
+ # reset the line string
1461
+ line_string = ''
1462
+ line_string = f'{line_string}{str(point[point_idx])} '
1463
+ file.write(f'{line_string}\n')
1235
1464
 
1236
- for i in range(0, vector_vtr.GetNumberOfPoints()):
1237
- point = vector_vtr.GetPoint(i)
1238
- file.write(str(point[1]) + " ")
1239
- file.write(" \n")
1240
1465
 
1241
- for i in range(0, vector_vtr.GetNumberOfPoints()):
1242
- point = vector_vtr.GetPoint(i)
1243
- file.write(str(point[2]) + " ")
1244
- file.write(" \n")
1245
1466
 
1246
1467
  # Other data
1247
- for header in headers:
1248
- array = vector_vtr.GetCellData().GetArray(header)
1249
-
1250
- for e in range(0, vector_vtr.GetNumberOfCells()):
1251
- file.write(f'{str(array.GetComponent(e, 0))} ')
1252
- file.write(" \n")
1468
+ for header in self.setting.selected_variables_vector:
1253
1469
 
1254
- for e in range(0, vector_vtr.GetNumberOfCells()):
1255
- file.write(f'{str(array.GetComponent(e, 1))} ')
1256
- file.write(" \n")
1470
+ array = vector_vtr.GetCellData().GetArray(header)
1257
1471
 
1258
- for e in range(0, vector_vtr.GetNumberOfCells()):
1259
- file.write(f'{str(array.GetComponent(e, 2))} ')
1260
- file.write(" \n")
1472
+ for dim_idx in range(0, 3):
1473
+ line_string = ''
1474
+ for e in range(0, vector_vtr.GetNumberOfCells()):
1475
+ #file.write(f'{str(array.GetComponent(e, 0))} ')
1476
+ if len(line_string) + len(str(array.GetComponent(e, dim_idx))) + 1 > max_line_length:
1477
+ # write the current line to file
1478
+ file.write(f'{line_string}\n')
1479
+ # reset the line string
1480
+ line_string = ''
1481
+ line_string = f'{line_string}{str(array.GetComponent(e, dim_idx))} '
1482
+ file.write(f'{line_string}\n')
1261
1483
 
1262
1484
  file.close()
1263
1485
 
@@ -1530,7 +1752,7 @@ class vis_reader:
1530
1752
  corners_buffer = io.StringIO()
1531
1753
  csv_headers = []
1532
1754
  line_number = -1
1533
- with open(self.setting.corners_file) as f:
1755
+ with open(self.setting.corners_file, encoding="utf-8") as f:
1534
1756
  for line in f:
1535
1757
  line_number = line_number + 1
1536
1758
  values = line.strip().split(',')
@@ -2321,7 +2543,7 @@ class vis_reader:
2321
2543
  elif extension == '.csv':
2322
2544
  self.setting.out_format_type = OutType.CSV
2323
2545
  line_number = 0
2324
- with open(out_file_path) as f:
2546
+ with open(out_file_path, encoding="utf-8") as f:
2325
2547
  for line in f:
2326
2548
  if line_number == 0:
2327
2549
  first_col = line.split(',')[0].strip().lower()
@@ -2372,7 +2594,7 @@ class vis_reader:
2372
2594
 
2373
2595
  # Write to JSON file
2374
2596
  path = os.path.join(self.setting.vis_dir, "variable_list.json")
2375
- with open(path, "w") as f:
2597
+ with open(path, "w", encoding="utf-8") as f:
2376
2598
  json.dump(variable_list_dicts, f, indent=2)
2377
2599
 
2378
2600
  timestep_list_dicts = [timestep.__dict__ for timestep in self.time_steps_list]
@@ -2380,7 +2602,7 @@ class vis_reader:
2380
2602
 
2381
2603
  # Write to JSON file
2382
2604
  path = os.path.join(self.setting.vis_dir, "timestep_list.json")
2383
- with open(path, "w") as f:
2605
+ with open(path, "w", encoding="utf-8") as f:
2384
2606
  json.dump(timestep_list_dicts, f, indent=2)
2385
2607
 
2386
2608
  def __fix_negative_zero(self, x):
@@ -2401,7 +2623,7 @@ class vis_charting:
2401
2623
 
2402
2624
  variable_list_path = os.path.join(case_dir, "tough_vis", "variable_list.json")
2403
2625
  if os.path.isfile(variable_list_path):
2404
- with open(variable_list_path, "r") as f:
2626
+ with open(variable_list_path, "r", encoding="utf-8") as f:
2405
2627
  self.variable_list = json.load(f)
2406
2628
  else:
2407
2629
  print(f'Case variable_list.json({variable_list_path}) not found.')
@@ -2409,7 +2631,7 @@ class vis_charting:
2409
2631
 
2410
2632
  timestep_list_path = os.path.join(case_dir, "tough_vis", "timestep_list.json")
2411
2633
  if os.path.isfile(timestep_list_path):
2412
- with open(timestep_list_path, "r") as f:
2634
+ with open(timestep_list_path, "r", encoding="utf-8") as f:
2413
2635
  self.time_steps_list = json.load(f)
2414
2636
  else:
2415
2637
  print(f'Case timestep_list.json({timestep_list_path}) not found.')
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: toughanimator
3
- Version: 0.1.8
3
+ Version: 0.1.10
4
4
  Summary: A tool for visualizing TOUGH simulation outputs.
5
5
  Home-page: https://github.com/scarletref/toughanimator
6
6
  Author: scarletref
@@ -8,7 +8,7 @@ Author-email: scarletreflection@gmail.com
8
8
  Classifier: Programming Language :: Python :: 3
9
9
  Classifier: License :: OSI Approved :: MIT License
10
10
  Classifier: Operating System :: OS Independent
11
- Requires-Python: >=3.7
11
+ Requires-Python: >=3.11,<3.14
12
12
  Description-Content-Type: text/markdown
13
13
  Requires-Dist: numpy
14
14
  Requires-Dist: pandas
File without changes
File without changes