toughanimator 0.1.9__tar.gz → 0.1.10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: toughanimator
3
- Version: 0.1.9
3
+ Version: 0.1.10
4
4
  Summary: A tool for visualizing TOUGH simulation outputs.
5
5
  Home-page: https://github.com/scarletref/toughanimator
6
6
  Author: scarletref
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name='toughanimator', # Package name on PyPI
5
- version='0.1.9',
5
+ version='0.1.10',
6
6
  description='A tool for visualizing TOUGH simulation outputs.',
7
7
  long_description=open('README.md').read(),
8
8
  long_description_content_type='text/markdown',
@@ -3,18 +3,20 @@ import tough_classes as ta
3
3
  import pandas as pd
4
4
  import matplotlib.pyplot as plt
5
5
 
6
- #dir_name = "unresolved"
7
- dir_name = "test_cases"
8
- case_name = "PetraSim_2D_Conceptual"
6
+ dir_name = "unresolved"
7
+ #dir_name = "test_cases"
8
+ #case_name = "PetraSim_2D_Conceptual"
9
+ case_name = "2D_Utransport_MINC"
9
10
  test_case_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), dir_name)
10
11
 
11
12
  case_dir = os.path.join(test_case_dir, case_name)
12
13
 
13
- case_dir = r"D:\Projects\202511\dip_left\TRv4"
14
+ #case_dir = r"D:\Projects\202511\ta-post\0 Base_res_k"
14
15
  #case_dir = r"D:\Projects\202504\polygonal\poly_test"
15
16
  #case_dir = r"D:\Projects\202507\tough系列output\tough output format\TR_MINC_exe"
16
17
  #case_dir = r"D:\Projects\202508\tough_cases\WW\7_TR_MINC_petrasim2025__5spot"
17
- #case_dir = r"D:\Projects\202508\tough_cases\WW\6_TR_MINC_exe"
18
+ #case_dir = r"D:\Projects\202511\case_TRv4\uwc"
19
+ #case_dir = r"D:\Projects\202501\toughanimator\test_cases\2DCCS 100yrs_RC"
18
20
  reader = ta.vis_reader(case_dir)
19
21
  #reader.write_eleme_conne()
20
22
  #reader.write_geometry()
@@ -1,3 +1,4 @@
1
+ import math
1
2
  import os
2
3
  import io
3
4
  import sys
@@ -69,7 +70,7 @@ class VisVariable:
69
70
  }
70
71
 
71
72
  class VisSetting:
72
- def __init__(self, input_file_paths, out_file_paths, vis_dir, corners_file="unkown", out_format_type=OutType.Unknown, tough_version = ToughVersion.Unknown, vis_types=[VisType.ParaView, VisType.Tecplot], mesh_type=MeshType.RegularGrid, debug=False, eos="ECO2N", minc=False, selected_variables_scalar = [], selected_variables_vector = [] ):
73
+ def __init__(self, input_file_paths, out_file_paths, vis_dir, corners_file="unkown", out_format_type=OutType.Unknown, tough_version = ToughVersion.Unknown, vis_types=[VisType.ParaView, VisType.Tecplot], mesh_type=MeshType.RegularGrid, debug=False, eos="ECO2N", minc=False, selected_variables_scalar = [], selected_variables_vector = [], ngv=False):
73
74
  self.mesh_type = mesh_type
74
75
  self.out_format_type = out_format_type
75
76
  self.vis_types = vis_types
@@ -87,6 +88,7 @@ class VisSetting:
87
88
  self.minc = minc
88
89
  self.selected_variables_scalar = selected_variables_scalar
89
90
  self.selected_variables_vector = selected_variables_vector
91
+ self.ngv = ngv
90
92
 
91
93
 
92
94
  def setBounds(self, x_bounds, y_bounds, z_bounds):
@@ -105,7 +107,7 @@ class vis_reader:
105
107
  if os.path.isdir(case_dir):
106
108
  config_path = os.path.join(case_dir, "config.json")
107
109
  if os.path.exists(config_path):
108
- with open(config_path, "r") as config_file:
110
+ with open(config_path, "r", encoding="utf-8") as config_file:
109
111
  config = json.load(config_file)
110
112
  else:
111
113
  print(f"Config file:({config_path}) not found. Please create it.")
@@ -130,16 +132,22 @@ class vis_reader:
130
132
  vis_dir = config["vis_dir"] if "vis_dir" in config else case_dir,
131
133
  corners_file = os.path.join(case_dir, config["corners_file"] if "corners_file" in config else "None"),
132
134
  debug = config['debug'] if 'debug' in config else False,
133
- eos = config['EOS'] if 'EOS' in config else "ECO2N",
134
- minc = config['MINC'] if 'MINC' in config else False,
135
+ #eos = config['EOS'] if 'EOS' in config else "ECO2N",
136
+ eos = next((v for k, v in config.items() if k.lower() == "eos"),"ECO2N"),
137
+ #minc = config['MINC'] if 'MINC' in config else False,
138
+ minc = next((v for k, v in config.items() if k.lower() == "minc"),False),
135
139
  selected_variables_scalar = config['selected_variables_scalar'] if 'selected_variables_scalar' in config else [],
136
- selected_variables_vector = config['selected_variables_vector'] if 'selected_variables_vector' in config else []
140
+ selected_variables_vector = config['selected_variables_vector'] if 'selected_variables_vector' in config else [],
141
+ #ngv= config['NGV'] if 'NGV' in config else False
142
+ ngv = next((v for k, v in config.items() if k.lower() == "ngv"),False)
137
143
  )
138
144
 
139
145
  # check if the project is using MINC
140
146
  minc_file = os.path.join(case_dir, 'MINC')
141
147
  if os.path.exists(minc_file):
142
148
  setting.minc = True
149
+ self.minc_file = minc_file
150
+ self.__check_num_of_minc()
143
151
  if minc_file in setting.input_file_paths:
144
152
  setting.input_file_paths.remove(minc_file)
145
153
 
@@ -229,6 +237,8 @@ class vis_reader:
229
237
  # add post calculation
230
238
  for timestep in self.time_steps_list:
231
239
  self.__post_process(timestep)
240
+ if self.setting.ngv:
241
+ self.__post_process_ngv(timestep)
232
242
  self.__write_json()
233
243
  print(f'All files have been created in {self.setting.vis_dir}.')
234
244
 
@@ -266,7 +276,7 @@ class vis_reader:
266
276
  has_elem = False
267
277
  for input_file_path in self.setting.input_file_paths:
268
278
  line_counter = 0
269
- with open(input_file_path) as f:
279
+ with open(input_file_path, encoding="utf-8") as f:
270
280
  reading_elem = False
271
281
 
272
282
  for line in f:
@@ -289,6 +299,26 @@ class vis_reader:
289
299
  sys.exit(1)
290
300
  else:
291
301
  print(f' Found ELEME block in {found_path}')
302
+ def __check_num_of_minc(self):
303
+ #self.minc_buffer = io.StringIO()
304
+ minc_num = 0
305
+ with open(self.minc_file, encoding="utf-8") as f:
306
+ reading_minc = False
307
+ for line in f:
308
+ if line.startswith('ELEME-') or line.startswith('ELEME'):
309
+ reading_minc = True
310
+ #has_minc = True
311
+ continue
312
+ if reading_minc:
313
+
314
+ if self.__check_if_block_end(line, minc_num):
315
+ reading_minc = False
316
+ #found_path = input_file_path
317
+ break
318
+ else:
319
+ minc_num += 1
320
+ #self.minc_buffer.write(line)
321
+ self.num_of_minc = minc_num
292
322
 
293
323
  def __write_conne_buffer(self):
294
324
  self.conne_buffer = io.StringIO()
@@ -296,7 +326,7 @@ class vis_reader:
296
326
  has_conne = False
297
327
  for input_file_path in self.setting.input_file_paths:
298
328
  line_counter = 0
299
- with open(input_file_path) as f:
329
+ with open(input_file_path, encoding="utf-8") as f:
300
330
 
301
331
  reading_conne = False
302
332
  for line in f:
@@ -326,7 +356,7 @@ class vis_reader:
326
356
 
327
357
  for input_file_path in self.setting.input_file_paths:
328
358
  line_counter = 0
329
- with open(input_file_path) as f:
359
+ with open(input_file_path, encoding="utf-8") as f:
330
360
  reading_rocks = False
331
361
  for line in f:
332
362
  if line.startswith('ROCKS-'):
@@ -367,7 +397,7 @@ class vis_reader:
367
397
 
368
398
  for input_file_path in self.setting.input_file_paths:
369
399
  line_counter = 0
370
- with open(input_file_path) as f:
400
+ with open(input_file_path, encoding="utf-8") as f:
371
401
 
372
402
  reading_incon = False
373
403
  for line in f:
@@ -385,9 +415,9 @@ class vis_reader:
385
415
  #line = f.readline() # skip first line #self.number_of_elements
386
416
  eos = self.setting.eos
387
417
  num = len(line.split())
388
- if self.setting.eos == "ECO2N" and len(line.split()) == 4:
418
+ if self.setting.eos.upper() == "ECO2N" and len(line.split()) == 4:
389
419
  self.incon_buffer.write(line)
390
- elif self.setting.eos == "EOS1":
420
+ elif self.setting.eos.upper() == "EOS1":
391
421
  line = f.readline() # skip first line #self.number_of_elements
392
422
  if len(line.split()) == 2:
393
423
  self.incon_buffer.write(line)
@@ -400,7 +430,7 @@ class vis_reader:
400
430
  reading_pram = False
401
431
  for input_file_path in self.setting.input_file_paths:
402
432
  line_counter = 0
403
- with open(input_file_path) as f:
433
+ with open(input_file_path, encoding="utf-8") as f:
404
434
  reading_pram = False
405
435
  for line in f:
406
436
  if line.startswith('PARAM-'):
@@ -430,14 +460,14 @@ class vis_reader:
430
460
 
431
461
  self.incon_buffer.seek(0)
432
462
  incon_df = pd.DataFrame()
433
- if self.setting.eos == "ECO2N":
463
+ if self.setting.eos.upper() == "ECO2N":
434
464
  # read incon
435
465
  incon_colspecs = [(0, 20), (20, 40), (40, 60), (60, 80)] # define column widths
436
466
  incon_names = ['Pressure', 'NaCl', 'CO2', 'Temperature']
437
467
  incon_df = pd.read_fwf(self.incon_buffer, colspecs=incon_colspecs, header=None,
438
468
  names=incon_names,
439
469
  dtype={'Pressure':float, 'NaCl':float, 'CO2':float, 'Temperature':float})
440
- elif self.setting.eos == "EOS1":
470
+ elif self.setting.eos.upper() == "EOS1":
441
471
  # read incon
442
472
  incon_colspecs = [(0, 20), (20, 40)]
443
473
  incon_names = ['Temperature', 'Pressure']
@@ -501,7 +531,7 @@ class vis_reader:
501
531
  buffer.flush()
502
532
  buffer.close()
503
533
 
504
- with open(self.current_out_file) as f:
534
+ with open(self.current_out_file, encoding="utf-8") as f:
505
535
  for line in f:
506
536
  line_number = line_number + 1
507
537
  values = line.strip().split(',')
@@ -557,7 +587,7 @@ class vis_reader:
557
587
  start_index = -1
558
588
  self.time_steps_list = []
559
589
 
560
- with open(self.current_out_file) as f:
590
+ with open(self.current_out_file, encoding="utf-8") as f:
561
591
  for line in f:
562
592
  line_number = line_number + 1
563
593
  values = line.strip().split(',')
@@ -644,7 +674,7 @@ class vis_reader:
644
674
  start_index = -1
645
675
  self.time_steps_list = []
646
676
 
647
- with open(self.current_out_file) as f:
677
+ with open(self.current_out_file, encoding="utf-8") as f:
648
678
  for line in f:
649
679
  line_number = line_number + 1
650
680
  values = line.strip().split(',')
@@ -729,7 +759,8 @@ class vis_reader:
729
759
  reading_scalar = False
730
760
  scalar_headers = []
731
761
  self.time_steps_list = []
732
- with open(self.current_out_file) as f:
762
+
763
+ with open(self.current_out_file, encoding="utf-8") as f:
733
764
  for line in f:
734
765
  if line.strip().lower().startswith('Variables'.lower()):
735
766
  headers_value = line.strip().split('=')[1]
@@ -929,6 +960,172 @@ class vis_reader:
929
960
  self.variable_list["post"] = post_variable_list
930
961
  self.__write_vtk_file(vtr, vtr_path)
931
962
 
963
+ def __post_process_ngv(self, vis_time_step):
964
+
965
+ #self.rock_dict
966
+ post_variable_list = []
967
+ if self.setting.mesh_type != MeshType.RegularGrid:
968
+ print(' NGV post-processing is only available for RegularGrid mesh.')
969
+ return
970
+
971
+
972
+ time_index = self.time_steps_list.index(vis_time_step)
973
+ #vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}.vtr')
974
+
975
+ extension = os.path.splitext(self.main_geometry)[1]
976
+ vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}{extension}')
977
+ self.time_steps_list[time_index].vtu_file_name = vtr_path
978
+ scalar_vtr = self.__read_vtk_file(vtr_path)
979
+ vtr = scalar_vtr
980
+
981
+
982
+ vtr_dimemsion = scalar_vtr.GetDimensions()
983
+ cell_index = 0
984
+ matIDArray = vtr.GetCellData().GetArray('Material_ID')
985
+
986
+ G = 9.81
987
+ Pc = 3000
988
+ # creare vtk double array 'ut','delta_p','Ncv_k1','Ncv_k2','Ncv_k3','Ngv_k1','Ngv_k2','Ngv_k3','Nb','R1'
989
+
990
+ Ncv_k1_array = vtkDoubleArray()
991
+ Ncv_k1_array.SetName('Ncv_k1')
992
+ vtr.GetCellData().AddArray(Ncv_k1_array)
993
+ Ncv_k2_array = vtkDoubleArray()
994
+ Ncv_k2_array.SetName('Ncv_k2')
995
+ vtr.GetCellData().AddArray(Ncv_k2_array)
996
+ Ncv_k3_array = vtkDoubleArray()
997
+ Ncv_k3_array.SetName('Ncv_k3')
998
+ vtr.GetCellData().AddArray(Ncv_k3_array)
999
+ Ngv_k1_array = vtkDoubleArray()
1000
+ Ngv_k1_array.SetName('Ngv_k1')
1001
+ vtr.GetCellData().AddArray(Ngv_k1_array)
1002
+ Ngv_k2_array = vtkDoubleArray()
1003
+ Ngv_k2_array.SetName('Ngv_k2')
1004
+ vtr.GetCellData().AddArray(Ngv_k2_array)
1005
+ Ngv_k3_array = vtkDoubleArray()
1006
+ Ngv_k3_array.SetName('Ngv_k3')
1007
+ vtr.GetCellData().AddArray(Ngv_k3_array)
1008
+ Nb_array = vtkDoubleArray()
1009
+ Nb_array.SetName('Nb')
1010
+ vtr.GetCellData().AddArray(Nb_array)
1011
+ R1_array = vtkDoubleArray()
1012
+ R1_array.SetName('R1')
1013
+ vtr.GetCellData().AddArray(R1_array)
1014
+
1015
+ post_variable_list.append(VisVariable('Ncv_k1', ValueType.Scalar, 1))
1016
+ post_variable_list.append(VisVariable('Ncv_k2', ValueType.Scalar, 1))
1017
+ post_variable_list.append(VisVariable('Ncv_k3', ValueType.Scalar, 1))
1018
+ post_variable_list.append(VisVariable('Ngv_k1', ValueType.Scalar, 1))
1019
+ post_variable_list.append(VisVariable('Ngv_k2', ValueType.Scalar, 1))
1020
+ post_variable_list.append(VisVariable('Ngv_k3', ValueType.Scalar, 1))
1021
+ post_variable_list.append(VisVariable('Nb', ValueType.Scalar, 1))
1022
+ post_variable_list.append(VisVariable('R1', ValueType.Scalar, 1))
1023
+
1024
+
1025
+ # check if the required arrays are in the vtk file
1026
+ vis_gas_array = vtkDoubleArray()
1027
+ vis_gas_name = 'VIS(gas)'
1028
+ if vtr.GetCellData().GetArray(vis_gas_name) is not None:
1029
+ vis_gas_array = vtr.GetCellData().GetArray(vis_gas_name)
1030
+ else:
1031
+ print(f' Can\'t find {vis_gas_name} array in the vtk file for NGV post-processing.')
1032
+ return
1033
+
1034
+ dl_array = vtkDoubleArray()
1035
+ dl_name = 'DL (kg/m^3)'
1036
+ if vtr.GetCellData().GetArray(dl_name) is not None:
1037
+ dl_array = vtr.GetCellData().GetArray(dl_name)
1038
+ else:
1039
+ print(f' Can\'t find {dl_name} array in the vtk file for NGV post-processing.')
1040
+ return
1041
+
1042
+ dg_array = vtkDoubleArray()
1043
+ dg_name = 'DG (kg/m^3)'
1044
+ if vtr.GetCellData().GetArray(dg_name) is not None:
1045
+ dg_array = vtr.GetCellData().GetArray(dg_name)
1046
+ else:
1047
+ print(f' Can\'t find {dg_name} array in the vtk file for NGV post-processing.')
1048
+ return
1049
+
1050
+ flof_array = vtkDoubleArray()
1051
+ flof_name = 'FLOF (kg/s)'
1052
+ if vtr.GetCellData().GetArray(flof_name) is not None:
1053
+ flof_array = vtr.GetCellData().GetArray(flof_name)
1054
+ else:
1055
+ print(f' Can\'t find {flof_name} array in the vtk file for NGV post-processing.')
1056
+ return
1057
+
1058
+
1059
+
1060
+ for z_index in range(0, vtr_dimemsion[2]-1):
1061
+ for y_index in range(0, vtr_dimemsion[1]-1):
1062
+ for x_index in range(0, vtr_dimemsion[0]-1):
1063
+ dx = vtr.GetXCoordinates().GetValue(x_index+1) - vtr.GetXCoordinates().GetValue(x_index)
1064
+ dy = vtr.GetYCoordinates().GetValue(y_index+1) - vtr.GetYCoordinates().GetValue(y_index)
1065
+ #dz = vtr.GetZCoordinates().GetValue(z_index+1) - vtr.GetZCoordinates().GetValue(z_index)
1066
+
1067
+
1068
+ #elemID = self..GetValue(cell_index)
1069
+ matID = matIDArray.GetValue(cell_index)
1070
+ # find rock from self.rock_dict with id = matID
1071
+ #rock = [obj for obj in self.rock_dict if obj.id == matID]
1072
+
1073
+ rock = next((o for o in self.rock_dict if o["id"] == matID), None)
1074
+ per_1 = rock["per_1"] if rock else 0
1075
+ per_2 = rock["per_2"] if rock else 0
1076
+ per_3 = rock["per_3"] if rock else 0
1077
+
1078
+ #df['μCO2'] = df['VIS(gas)']
1079
+ μCO2 = vis_gas_array.GetValue(cell_index)
1080
+ #df['delta_p'] = df['DL (kg/m^3)'] - df['DG (kg/m^3)']
1081
+ delta_p = dl_array.GetValue(cell_index) - dg_array.GetValue(cell_index)
1082
+ #df['ut'] = np.sqrt(df['FLOF (kg/s)_x']**2 + df['FLOF (kg/s)_y']**2 + df['FLOF (kg/s)_z']**2)
1083
+ FLOF = flof_array.GetTuple(cell_index)
1084
+
1085
+ ut = math.sqrt(FLOF[0]**2 + FLOF[1]**2 + FLOF[2]**2)
1086
+
1087
+ #df['Ncv_k1'] = (df['k1'] * df[L] * df['Pc'] )/(df[H]**2 * df['μCO2'] * df['ut'])
1088
+ #df['Ncv_k2'] = (df['k2'] * df[L] * df['Pc'] )/(df[H]**2 * df['μCO2'] * df['ut'])
1089
+ #df['Ncv_k3'] = (df['k3'] * df[L] * df['Pc'] )/(df[H]**2 * df['μCO2'] * df['ut'])
1090
+ #df['Ngv_k1'] = (df['delta_p'] * df['G'] * df['k1'] * df['d_x'])/(df[H] * df['μCO2'] * df['ut'])
1091
+ #df['Ngv_k2'] = (df['delta_p'] * df['G'] * df['k2'] * df['d_x'])/(df[H] * df['μCO2'] * df['ut'])
1092
+ #df['Ngv_k3'] = (df['delta_p'] * df['G'] * df['k3'] * df['d_x'])/(df[H] * df['μCO2'] * df['ut'])
1093
+
1094
+ L = dx
1095
+ H = dy
1096
+ k1 = per_1
1097
+ k2 = per_2
1098
+ k3 = per_3
1099
+ Ncv_k1 = (k1 * L * Pc )/(H**2 * μCO2 * ut) if (H**2 * μCO2 * ut) !=0 else 0
1100
+ Ncv_k2 = (k2 * L * Pc )/(H**2 * μCO2 * ut) if (H**2 * μCO2 * ut) !=0 else 0
1101
+ Ncv_k3 = (k3 * L * Pc )/(H**2 * μCO2 * ut) if (H**2 * μCO2 * ut) !=0 else 0
1102
+ Ngv_k1 = (delta_p * G * k1 * dx)/(H * μCO2 * ut) if (H * μCO2 * ut) !=0 else 0
1103
+ Ngv_k2 = (delta_p * G * k2 * dx)/(H * μCO2 * ut) if (H * μCO2 * ut) !=0 else 0
1104
+ Ngv_k3 = (delta_p * G * k3 * dx)/(H * μCO2 * ut) if (H * μCO2 * ut) !=0 else 0
1105
+
1106
+ #df['Nb'] =(df['delta_p'] * df['G'] * df[H])/df['Pc']
1107
+ Nb =(delta_p * G * H)/Pc if Pc !=0 else 0
1108
+ #df['R1'] = df[L]/df[H]
1109
+ R1 = L/H if H !=0 else 0
1110
+ Ncv_k1_array.InsertNextValue(Ncv_k1)
1111
+ Ncv_k2_array.InsertNextValue(Ncv_k2)
1112
+ Ncv_k3_array.InsertNextValue(Ncv_k3)
1113
+ Ngv_k1_array.InsertNextValue(Ngv_k1)
1114
+ Ngv_k2_array.InsertNextValue(Ngv_k2)
1115
+ Ngv_k3_array.InsertNextValue(Ngv_k3)
1116
+ Nb_array.InsertNextValue(Nb)
1117
+ R1_array.InsertNextValue(R1)
1118
+ cell_index += 1
1119
+
1120
+ #for z_index in range(0, scalar_vtr.GetZCoordinates().GetNumberOfTuples()):
1121
+
1122
+
1123
+ #if len(post_variable_list) > 0:
1124
+ #self.variable_list["post"].append(post_variable_list)
1125
+ self.__write_vtk_file(vtr, vtr_path)
1126
+
1127
+
1128
+
932
1129
 
933
1130
  def __write_scalar_result(self, vis_time_step, dataframe, csv_headers):
934
1131
 
@@ -981,9 +1178,13 @@ class vis_reader:
981
1178
 
982
1179
  #if self.setting.minc:
983
1180
  #print(f' MinC is enabled. Adding MinC values to the result.')
1181
+ minc_ratio = 1
1182
+ if self.setting.minc:
1183
+ minc_ratio = self.num_of_minc / self.number_of_elements
984
1184
 
985
1185
  for i in range(0, vtr.GetNumberOfCells()):
986
1186
  elemID = self.elemIDArray.GetValue(i)
1187
+
987
1188
  index = self.sequence_dist[i]
988
1189
  if 'ELEM' in dataframe.columns:
989
1190
  index = dataframe['ELEM'].tolist().index(elemID)
@@ -991,7 +1192,8 @@ class vis_reader:
991
1192
  #target_row = dataframe.iloc[index]
992
1193
  #print(f' Processing ELEM {elem_string} at index {index}')
993
1194
  for header in headers:
994
- value = float(self.__parse_float(dataframe[header].iloc[index]))
1195
+ minc_index = int(index * minc_ratio)
1196
+ value = float(self.__parse_float(dataframe[header].iloc[minc_index]))
995
1197
  vtr.GetCellData().GetArray(header).InsertNextValue(value)
996
1198
 
997
1199
 
@@ -1027,7 +1229,7 @@ class vis_reader:
1027
1229
  firstFile = True
1028
1230
  if os.path.isfile(self.tec_scalar_path):
1029
1231
  firstFile = False
1030
- file = open(self.tec_scalar_path, "a")
1232
+ file = open(self.tec_scalar_path, "a", encoding="utf-8")
1031
1233
  if len(self.setting.selected_variables_scalar) == 0:
1032
1234
  self.setting.selected_variables_scalar = headers
1033
1235
 
@@ -1223,7 +1425,7 @@ class vis_reader:
1223
1425
  firstFile = True
1224
1426
  if os.path.isfile(self.tec_vector_path):
1225
1427
  firstFile = False
1226
- file = open(self.tec_vector_path, "a")
1428
+ file = open(self.tec_vector_path, "a", encoding="utf-8")
1227
1429
 
1228
1430
  #selected_header_string = '"'+'", "'.join(self.setting.selected_variables_scalar) + '"'
1229
1431
  if len(self.setting.selected_variables_vector) == 0:
@@ -1550,7 +1752,7 @@ class vis_reader:
1550
1752
  corners_buffer = io.StringIO()
1551
1753
  csv_headers = []
1552
1754
  line_number = -1
1553
- with open(self.setting.corners_file) as f:
1755
+ with open(self.setting.corners_file, encoding="utf-8") as f:
1554
1756
  for line in f:
1555
1757
  line_number = line_number + 1
1556
1758
  values = line.strip().split(',')
@@ -2341,7 +2543,7 @@ class vis_reader:
2341
2543
  elif extension == '.csv':
2342
2544
  self.setting.out_format_type = OutType.CSV
2343
2545
  line_number = 0
2344
- with open(out_file_path) as f:
2546
+ with open(out_file_path, encoding="utf-8") as f:
2345
2547
  for line in f:
2346
2548
  if line_number == 0:
2347
2549
  first_col = line.split(',')[0].strip().lower()
@@ -2392,7 +2594,7 @@ class vis_reader:
2392
2594
 
2393
2595
  # Write to JSON file
2394
2596
  path = os.path.join(self.setting.vis_dir, "variable_list.json")
2395
- with open(path, "w") as f:
2597
+ with open(path, "w", encoding="utf-8") as f:
2396
2598
  json.dump(variable_list_dicts, f, indent=2)
2397
2599
 
2398
2600
  timestep_list_dicts = [timestep.__dict__ for timestep in self.time_steps_list]
@@ -2400,7 +2602,7 @@ class vis_reader:
2400
2602
 
2401
2603
  # Write to JSON file
2402
2604
  path = os.path.join(self.setting.vis_dir, "timestep_list.json")
2403
- with open(path, "w") as f:
2605
+ with open(path, "w", encoding="utf-8") as f:
2404
2606
  json.dump(timestep_list_dicts, f, indent=2)
2405
2607
 
2406
2608
  def __fix_negative_zero(self, x):
@@ -2421,7 +2623,7 @@ class vis_charting:
2421
2623
 
2422
2624
  variable_list_path = os.path.join(case_dir, "tough_vis", "variable_list.json")
2423
2625
  if os.path.isfile(variable_list_path):
2424
- with open(variable_list_path, "r") as f:
2626
+ with open(variable_list_path, "r", encoding="utf-8") as f:
2425
2627
  self.variable_list = json.load(f)
2426
2628
  else:
2427
2629
  print(f'Case variable_list.json({variable_list_path}) not found.')
@@ -2429,7 +2631,7 @@ class vis_charting:
2429
2631
 
2430
2632
  timestep_list_path = os.path.join(case_dir, "tough_vis", "timestep_list.json")
2431
2633
  if os.path.isfile(timestep_list_path):
2432
- with open(timestep_list_path, "r") as f:
2634
+ with open(timestep_list_path, "r", encoding="utf-8") as f:
2433
2635
  self.time_steps_list = json.load(f)
2434
2636
  else:
2435
2637
  print(f'Case timestep_list.json({timestep_list_path}) not found.')
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: toughanimator
3
- Version: 0.1.9
3
+ Version: 0.1.10
4
4
  Summary: A tool for visualizing TOUGH simulation outputs.
5
5
  Home-page: https://github.com/scarletref/toughanimator
6
6
  Author: scarletref
File without changes
File without changes