toughanimator 0.1.3__tar.gz → 0.1.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: toughanimator
3
- Version: 0.1.3
3
+ Version: 0.1.5
4
4
  Summary: A tool for visualizing TOUGH simulation outputs.
5
5
  Home-page: https://github.com/scarletref/toughanimator
6
6
  Author: Your Name
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
2
2
 
3
3
  setup(
4
4
  name='toughanimator', # Package name on PyPI
5
- version='0.1.3',
5
+ version='0.1.5',
6
6
  description='A tool for visualizing TOUGH simulation outputs.',
7
7
  long_description=open('README.md').read(),
8
8
  long_description_content_type='text/markdown',
@@ -7,8 +7,8 @@ dir_name = "unresolved" #"test_cases
7
7
  case_name = "3D five spot MINC"
8
8
  test_case_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), dir_name)
9
9
 
10
- case_dir = os.path.join(test_case_dir, case_name)
11
- #case_dir = r"D:\Projects\202506\cake"
10
+ #case_dir = os.path.join(test_case_dir, case_name)
11
+ case_dir = r"D:\Projects\202507\intern\P5_eco2n_1D-radial"
12
12
 
13
13
  #case_dir = r"D:\Projects\202504\polygonal\poly_test"
14
14
  #case_dir = r"D:\Projects\202501\toughanimator\test_cases\P5_eco2n_1D-radial"
@@ -8,6 +8,7 @@ import pandas as pd
8
8
  from vtkmodules.all import *
9
9
  import pathlib
10
10
  import json
11
+
11
12
  from enum import Enum
12
13
 
13
14
  class MeshType(Enum):
@@ -68,7 +69,7 @@ class VisVariable:
68
69
  }
69
70
 
70
71
  class VisSetting:
71
- def __init__(self, input_file_paths, out_file_paths, vis_dir, corners_file="unkown", out_format_type=OutType.Unknown, tough_version = ToughVersion.Unknown, vis_types=[VisType.ParaView, VisType.Tecplot], mesh_type=MeshType.RegularGrid, debug=False, eos="ECO2N"):
72
+ def __init__(self, input_file_paths, out_file_paths, vis_dir, corners_file="unkown", out_format_type=OutType.Unknown, tough_version = ToughVersion.Unknown, vis_types=[VisType.ParaView, VisType.Tecplot], mesh_type=MeshType.RegularGrid, debug=False, eos="ECO2N", minc=False):
72
73
  self.mesh_type = mesh_type
73
74
  self.out_format_type = out_format_type
74
75
  self.vis_types = vis_types
@@ -83,7 +84,7 @@ class VisSetting:
83
84
  self.corners_file = corners_file
84
85
  self.debug = debug
85
86
  self.eos = eos
86
- self.minc = False
87
+ self.minc = minc
87
88
 
88
89
 
89
90
  def setBounds(self, x_bounds, y_bounds, z_bounds):
@@ -128,16 +129,21 @@ class vis_reader:
128
129
  corners_file = os.path.join(case_dir, config["corners_file"] if "corners_file" in config else "None"),
129
130
  debug = config['debug'] if 'debug' in config else False,
130
131
  eos = config['EOS'] if 'EOS' in config else "ECO2N",
131
- #minc = config['MINC'] if 'MINC' in config else False,
132
+ minc = config['MINC'] if 'MINC' in config else False,
132
133
  )
133
134
 
135
+ # check if the project is using MINC
136
+ minc_file = os.path.join(case_dir, 'MINC')
137
+ if os.path.exists(minc_file):
138
+ setting.minc = True
139
+ if minc_file in setting.input_file_paths:
140
+ setting.input_file_paths.remove(minc_file)
141
+
134
142
  for input_file_path in setting.input_file_paths:
135
143
  if not os.path.exists(input_file_path):
136
144
  print(f'Can\'t find input file: ({input_file_path}). please check the path or remove it from the config.json.')
137
145
  sys.exit(1)
138
- # check if this project use MINC
139
- elif input_file_path.endswith('MINC'):
140
- setting.minc = True
146
+
141
147
  for out_file_path in setting.out_file_paths:
142
148
  if not os.path.exists(input_file_path):
143
149
  print(f'Can\'t find output file: ({out_file_path}). please check the path or remove it from the config.json.')
@@ -428,92 +434,78 @@ class vis_reader:
428
434
  self.__write_vtk_file(self.incon_vtk, self.incon_path)
429
435
  print(f' ✓ Initial condition file created: {self.incon_path}')
430
436
 
437
+
438
+
431
439
  def __read_TOUGH2_CSV_outfile(self):
432
- output_buffer = io.StringIO()
433
- current_time_step = None
434
- tim_step_counter = 1
440
+ self.time_steps_list = []
441
+ value_type = ValueType.Unknown
442
+ current_time = None
443
+ buffer = io.StringIO()
435
444
  csv_headers = []
436
445
  line_number = -1
437
- reading_number = 0
438
- value_type = ValueType.Unknown
439
446
  start_index = -1
440
- self.time_steps_list = []
447
+ def process_chunk():
448
+ """Define what to do with each flushed chunk."""
449
+ buffer.seek(0)
450
+ df = pd.read_csv(buffer)
451
+ #print(f"Processing time group:\n{df.head()}")
452
+ time_step = VisTimeStep(
453
+ time=float(current_time),
454
+ time_step=len(self.time_steps_list)+1,
455
+ iteration=1
456
+ )
457
+ self.time_steps_list.append(time_step)
458
+ if value_type == ValueType.Scalar:
459
+ self.__write_scalar_result(time_step, df, csv_headers)
460
+ elif value_type == ValueType.Vector:
461
+ self.__write_vector_result(time_step, df, csv_headers)
462
+
463
+ buffer.flush()
464
+ buffer.close()
465
+
441
466
  with open(self.current_out_file) as f:
442
467
  for line in f:
443
468
  line_number = line_number + 1
444
469
  values = line.strip().split(',')
470
+
445
471
  if line_number == 0:
446
472
  csv_headers = [x.strip() for x in values]
447
473
  if 'ELEM' in csv_headers and 'INDEX' in csv_headers:
448
474
  value_type = ValueType.Scalar
449
- start_index = 3
450
-
475
+ start_index = 1 # remove the first item
476
+
451
477
  elif 'ELEM1' in csv_headers and 'ELEM2' in csv_headers:
452
478
  value_type = ValueType.Vector
453
479
  start_index = 4
454
-
480
+
481
+ # remove the first "TIME" header (to reduce the number of columns)
455
482
  csv_headers = csv_headers[start_index:]
456
- print(f' Value type: {value_type.name}')
483
+
484
+ # Write header once
485
+ buffer.write(','.join(csv_headers) + '\n')
457
486
  continue
458
487
 
459
- time = self.__parse_float(values[0].strip())
488
+ row_time = self.__parse_float(values[0].strip())
460
489
 
461
- # next time step
462
- if current_time_step == None or time != current_time_step.time:
463
-
464
- # when all items in this timestep have been read
465
- if value_type == ValueType.Scalar and reading_number == self.number_of_elements:
466
- output_buffer.seek(0)
467
- df = pd.read_csv(output_buffer, sep=',', header=0)
468
-
469
- self.__write_scalar_result(
470
- current_time_step, df, csv_headers)
471
- output_buffer.flush()
472
- output_buffer.close()
473
- output_buffer = io.StringIO()
474
- reading_number = 0
475
- if value_type == ValueType.Vector and reading_number == self.number_of_connections:
476
- output_buffer.seek(0)
477
- df = pd.read_csv(output_buffer, sep=',', header=0)
478
- #df.to_csv(os.path.join(self.setting.vis_dir, "timestep.csv"), index=False)
479
- self.__write_vector_result(
480
- current_time_step, df, csv_headers)
481
- output_buffer.flush()
482
- output_buffer.close()
483
- output_buffer = io.StringIO()
484
- reading_number = 0
490
+ if current_time is None:
491
+ current_time = row_time
485
492
 
486
- current_time_step = VisTimeStep(
487
- time=float(time),
488
- time_step=tim_step_counter,
489
- iteration=1
490
- )
491
- #print(f' Reading step {current_time_step.time_step_id} : {current_time_step.time} ...')
493
+ if row_time != current_time:
494
+ # Time changed → flush and reset
495
+ process_chunk()
496
+ buffer = io.StringIO()
497
+ buffer.write(','.join(csv_headers) + '\n') # Write header
498
+ current_time = row_time
499
+
500
+ # Write current row
501
+ buffer.write(','.join(values[start_index:]) + '\n')
502
+
503
+ # Flush the last group
504
+ if buffer.tell() > 0:
505
+ process_chunk()
506
+
492
507
 
493
- # Initialize buffer
494
- header_string = ','.join(csv_headers)
495
- output_buffer.write(header_string + '\n')
496
- self.time_steps_list.append(current_time_step)
497
- tim_step_counter = tim_step_counter + 1
498
-
499
- output_buffer.write(','.join(values[start_index:]) + '\n')
500
- reading_number = reading_number + 1
501
- else:
502
- output_buffer.write(','.join(values[start_index:]) + '\n')
503
- reading_number = reading_number + 1
504
- else:
505
- # write the last time step
506
- if value_type == ValueType.Scalar:
507
- output_buffer.seek(0)
508
- df = pd.read_csv(output_buffer, sep=',', header=0)
509
- self.__write_scalar_result(current_time_step, df, csv_headers)
510
- if value_type == ValueType.Vector:
511
- output_buffer.seek(0)
512
- df = pd.read_csv(output_buffer, sep=',', header=0)
513
- self.__write_vector_result(current_time_step, df, csv_headers)
514
- output_buffer.close()
515
508
 
516
- # TODO: find whick case is TOUGH3
517
509
  def __read_TOUGH3_CSV_outfile(self):
518
510
  scalar_buffer = io.StringIO()
519
511
  current_time_step = None
@@ -685,7 +677,7 @@ class vis_reader:
685
677
  if vtr.GetCellData().GetArray("PRES") is not None:
686
678
  p_name = "PRES"
687
679
 
688
- if vtr.GetCellData().GetArray(p_name) is not None:
680
+ if vtr.GetCellData().GetArray(p_name) is not None and self.incon_vtk.GetCellData().GetArray('Pressure') is not None:
689
681
  delPArray = vtkDoubleArray()
690
682
  delPArray.SetName(f'del_{p_name}')
691
683
  for i in range(0, vtr.GetNumberOfCells()):
@@ -795,12 +787,13 @@ class vis_reader:
795
787
  vtr.GetPointData().AddArray(vtr_cell_to_points.GetPointData().GetArray(variabl_name))
796
788
 
797
789
  if len(post_variable_list) > 0:
798
- self.variable_list["psot"] = post_variable_list
790
+ self.variable_list["post"] = post_variable_list
799
791
  self.__write_vtk_file(vtr, vtr_path)
800
792
 
801
793
 
802
- def __write_scalar_result(self, vis_time_step, dataframe, headers):
803
- #print(f' Writting step {vis_time_step.time_step}: {vis_time_step.time} ...')
794
+ def __write_scalar_result(self, vis_time_step, dataframe, csv_headers):
795
+
796
+ headers = csv_headers.copy()
804
797
  index = self.time_steps_list.index(vis_time_step)
805
798
  #vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}.vtr')
806
799
 
@@ -810,10 +803,6 @@ class vis_reader:
810
803
  #scalar_vtr = vtkRectilinearGrid()
811
804
 
812
805
  if not os.path.exists(vtr_path):
813
- #vtr_reader = vtkXMLRectilinearGridReader()
814
- #vtr_reader.SetFileName(self.main_geometry)
815
- #vtr_reader.Update()
816
- #scalar_vtr = vtr_reader.GetOutput()
817
806
  scalar_vtr = self.__read_vtk_file(self.main_geometry)
818
807
 
819
808
  # add time step data
@@ -828,10 +817,20 @@ class vis_reader:
828
817
  else:
829
818
  scalar_vtr = self.__read_vtk_file(vtr_path)
830
819
 
831
-
832
820
  vtr = scalar_vtr
833
821
 
834
822
  variable_list = []
823
+
824
+ # make sure to drop TIME and INDEX columns if they exist
825
+ if 'INDEX' in dataframe.columns:
826
+ dataframe = dataframe.drop(columns=['INDEX'])
827
+ headers.remove('INDEX')
828
+ if 'ELEM' in dataframe.columns:
829
+ # remove leading spaces from ELEM column
830
+ dataframe['ELEM'] = dataframe['ELEM'].str.lstrip()
831
+ headers.remove('ELEM')
832
+
833
+ # create vtkDoubleArray for each header
835
834
  for header in headers:
836
835
  array = vtkDoubleArray()
837
836
  array.SetName(header)
@@ -839,13 +838,23 @@ class vis_reader:
839
838
  variable_list.append(VisVariable(header, ValueType.Scalar, 1))
840
839
 
841
840
 
841
+ #if self.setting.minc:
842
+ #print(f' MinC is enabled. Adding MinC values to the result.')
843
+
842
844
  for i in range(0, vtr.GetNumberOfCells()):
845
+ elemID = self.elemIDArray.GetValue(i)
843
846
  index = self.sequence_dist[i]
847
+ if 'ELEM' in dataframe.columns:
848
+ index = dataframe['ELEM'].tolist().index(elemID)
849
+ #elem_string = dataframe['ELEM'].iloc[index]
850
+ #target_row = dataframe.iloc[index]
851
+ #print(f' Processing ELEM {elem_string} at index {index}')
844
852
  for header in headers:
845
- value = float(self.__parse_float(dataframe[header][index]))
853
+ value = float(self.__parse_float(dataframe[header].iloc[index]))
846
854
  vtr.GetCellData().GetArray(header).InsertNextValue(value)
847
855
 
848
856
 
857
+
849
858
  # update the variable list
850
859
  if self.current_out_file not in self.variable_list:
851
860
  self.variable_list[self.current_out_file] = variable_list
@@ -909,6 +918,7 @@ class vis_reader:
909
918
  # Other data
910
919
  for header in headers:
911
920
  array = vtr.GetCellData().GetArray(header)
921
+
912
922
  for e in range(0, vtr.GetNumberOfCells()):
913
923
  file.write(str(array.GetValue(e)) + " ")
914
924
  file.write(" \n")
@@ -1151,6 +1161,8 @@ class vis_reader:
1151
1161
  matArray.InsertNextValue(elem_df['MA12'][i])
1152
1162
  matIDArray.InsertNextValue(mat_mapping[elem_df['MA12'][i]])
1153
1163
  pmxArray.InsertNextValue(self.__parse_float(elem_df['PMX'][i]))
1164
+
1165
+ self.elemIDArray = elemIDArray
1154
1166
 
1155
1167
  '''
1156
1168
  compute permeability
@@ -1274,7 +1286,7 @@ class vis_reader:
1274
1286
 
1275
1287
  d1_array = elem_conne_vtu.GetCellData().GetArray('D1')
1276
1288
  d2_array = elem_conne_vtu.GetCellData().GetArray('D2')
1277
- elemIDArray = elem_conne_vtu.GetPointData().GetArray('ELEME')
1289
+ #elemIDArray = elem_conne_vtu.GetPointData().GetArray('ELEME')
1278
1290
  volxArray = elem_conne_vtu.GetPointData().GetArray('VOLX')
1279
1291
  matArray = elem_conne_vtu.GetPointData().GetArray('Material')
1280
1292
  matIDArray = elem_conne_vtu.GetPointData().GetArray('Material_ID')
@@ -1495,7 +1507,7 @@ class vis_reader:
1495
1507
  rGrid.SetXCoordinates(xyz_coords_array[0])
1496
1508
  rGrid.SetYCoordinates(xyz_coords_array[1])
1497
1509
  rGrid.SetZCoordinates(xyz_coords_array[2])
1498
- rGrid.GetCellData().AddArray(elemIDArray)
1510
+ rGrid.GetCellData().AddArray(self.elemIDArray)
1499
1511
  rGrid.GetCellData().AddArray(volxArray)
1500
1512
  rGrid.GetCellData().AddArray(matArray)
1501
1513
 
@@ -1534,7 +1546,7 @@ class vis_reader:
1534
1546
  vts_points.InsertNextPoint(x, y, z_value)
1535
1547
 
1536
1548
  vts.SetPoints(vts_points)
1537
- vts.GetCellData().AddArray(elemIDArray)
1549
+ vts.GetCellData().AddArray(self.elemIDArray)
1538
1550
  vts.GetCellData().AddArray(volxArray)
1539
1551
  vts.GetCellData().AddArray(matArray)
1540
1552
 
@@ -1583,7 +1595,7 @@ class vis_reader:
1583
1595
  # TODO: compute mesh quality and fix bad cells
1584
1596
 
1585
1597
 
1586
- auto_corner_vtu.GetCellData().AddArray(elemIDArray)
1598
+ auto_corner_vtu.GetCellData().AddArray(self.elemIDArray)
1587
1599
  auto_corner_vtu.GetCellData().AddArray(volxArray)
1588
1600
  auto_corner_vtu.GetCellData().AddArray(matArray)
1589
1601
  if self.rock_dict is not None:
@@ -1922,13 +1934,13 @@ class vis_reader:
1922
1934
  main_geometray.InsertNextCell(VTK_POLYHEDRON, polyhedron_faces_idlist)
1923
1935
 
1924
1936
 
1925
- main_geometray.GetCellData().AddArray(elemIDArray)
1937
+ main_geometray.GetCellData().AddArray(self.elemIDArray)
1926
1938
  main_geometray.GetCellData().AddArray(volxArray)
1927
1939
  main_geometray.GetCellData().AddArray(matArray)
1928
1940
  if self.rock_dict is not None:
1929
1941
  main_geometray.GetCellData().AddArray(per_array)
1930
1942
  main_geometray.GetCellData().AddArray(sgr_array)
1931
- main_geometray.GetCellData().AddArray(matIDArray)
1943
+
1932
1944
  main_geometray.GetCellData().AddArray(matIDArray)
1933
1945
  main_geometray.GetCellData().AddArray(horizon_id_array)
1934
1946
  self.main_geometry = os.path.join(
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: toughanimator
3
- Version: 0.1.3
3
+ Version: 0.1.5
4
4
  Summary: A tool for visualizing TOUGH simulation outputs.
5
5
  Home-page: https://github.com/scarletref/toughanimator
6
6
  Author: Your Name
File without changes
File without changes