toughanimator 0.1.5__py3-none-any.whl → 0.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
toughanimator/run.py CHANGED
@@ -3,21 +3,23 @@ import tough_classes as ta
3
3
  import pandas as pd
4
4
  import matplotlib.pyplot as plt
5
5
 
6
- dir_name = "unresolved" #"test_cases
7
- case_name = "3D five spot MINC"
6
+ #dir_name = "unresolved"
7
+ dir_name = "test_cases"
8
+ case_name = "PetraSim_2D_Conceptual"
8
9
  test_case_dir = os.path.join(os.path.dirname(os.path.dirname(__file__)), dir_name)
9
10
 
10
- #case_dir = os.path.join(test_case_dir, case_name)
11
- case_dir = r"D:\Projects\202507\intern\P5_eco2n_1D-radial"
11
+ case_dir = os.path.join(test_case_dir, case_name)
12
+
12
13
 
13
14
  #case_dir = r"D:\Projects\202504\polygonal\poly_test"
14
- #case_dir = r"D:\Projects\202501\toughanimator\test_cases\P5_eco2n_1D-radial"
15
+ #case_dir = r"D:\Projects\202507\tough系列output\tough output format\TR_MINC_exe"
16
+ #case_dir = r"D:\Projects\202508\tough_cases\WW\7_TR_MINC_petrasim2025__5spot"
17
+ #case_dir = r"D:\Projects\202508\tough_cases\WW\6_TR_MINC_exe"
15
18
  reader = ta.vis_reader(case_dir)
16
19
  #reader.write_eleme_conne()
17
20
  #reader.write_geometry()
18
21
  #reader.write_incon()
19
22
  #reader.write_result()
20
- #reader.
21
23
  reader.write_all()
22
24
 
23
25
 
@@ -69,7 +69,7 @@ class VisVariable:
69
69
  }
70
70
 
71
71
  class VisSetting:
72
- def __init__(self, input_file_paths, out_file_paths, vis_dir, corners_file="unkown", out_format_type=OutType.Unknown, tough_version = ToughVersion.Unknown, vis_types=[VisType.ParaView, VisType.Tecplot], mesh_type=MeshType.RegularGrid, debug=False, eos="ECO2N", minc=False):
72
+ def __init__(self, input_file_paths, out_file_paths, vis_dir, corners_file="unkown", out_format_type=OutType.Unknown, tough_version = ToughVersion.Unknown, vis_types=[VisType.ParaView, VisType.Tecplot], mesh_type=MeshType.RegularGrid, debug=False, eos="ECO2N", minc=False, selected_variables_scalar = [], selected_variables_vector = [] ):
73
73
  self.mesh_type = mesh_type
74
74
  self.out_format_type = out_format_type
75
75
  self.vis_types = vis_types
@@ -85,6 +85,8 @@ class VisSetting:
85
85
  self.debug = debug
86
86
  self.eos = eos
87
87
  self.minc = minc
88
+ self.selected_variables_scalar = selected_variables_scalar
89
+ self.selected_variables_vector = selected_variables_vector
88
90
 
89
91
 
90
92
  def setBounds(self, x_bounds, y_bounds, z_bounds):
@@ -130,6 +132,8 @@ class vis_reader:
130
132
  debug = config['debug'] if 'debug' in config else False,
131
133
  eos = config['EOS'] if 'EOS' in config else "ECO2N",
132
134
  minc = config['MINC'] if 'MINC' in config else False,
135
+ selected_variables_scalar = config['selected_variables_scalar'] if 'selected_variables_scalar' in config else [],
136
+ selected_variables_vector = config['selected_variables_vector'] if 'selected_variables_vector' in config else []
133
137
  )
134
138
 
135
139
  # check if the project is using MINC
@@ -215,6 +219,7 @@ class vis_reader:
215
219
  self.current_out_file = output_file_path
216
220
  self.__check_TOUGH_version()
217
221
  print(f' Version: {self.setting.tough_version.name}')
222
+ print(f' EOS: {self.setting.eos}')
218
223
  if self.setting.tough_version == ToughVersion.TOUGH2:
219
224
  self.__read_TOUGH2_CSV_outfile()
220
225
  elif self.setting.tough_version == ToughVersion.TOUGH3:
@@ -357,6 +362,7 @@ class vis_reader:
357
362
  def __write_incon_buffer(self):
358
363
  self.incon_buffer = io.StringIO()
359
364
  has_incon = False
365
+ self.global_incon = False
360
366
  # write temp element txt
361
367
 
362
368
  for input_file_path in self.setting.input_file_paths:
@@ -368,7 +374,7 @@ class vis_reader:
368
374
  if line.startswith('INCON-'):
369
375
  reading_incon = True
370
376
  has_incon = True
371
- find_incon = True
377
+ #find_incon = True
372
378
  continue
373
379
 
374
380
  if reading_incon:
@@ -386,9 +392,33 @@ class vis_reader:
386
392
  if len(line.split()) == 2:
387
393
  self.incon_buffer.write(line)
388
394
 
389
- if has_incon == False:
390
- print(f'Can\'t find INCON block in input_file_paths.')
391
- #sys.exit(1)
395
+
396
+ if has_incon == False or self.incon_buffer.tell() == 0:
397
+ print(f'Can\'t find INCON block in input_file_paths (or length of INCON is zero).')
398
+
399
+ # find the fifth line of the "PARAM" block
400
+ reading_pram = False
401
+ for input_file_path in self.setting.input_file_paths:
402
+ line_counter = 0
403
+ with open(input_file_path) as f:
404
+ reading_pram = False
405
+ for line in f:
406
+ if line.startswith('PARAM-'):
407
+ reading_pram = True
408
+ continue
409
+ if reading_pram:
410
+ line_counter += 1
411
+ if self.__check_if_block_end(line, line_counter):
412
+ if line_counter <5:
413
+ print(f' Can\'t find Global INCON line in PARAM block of {input_file_path}. Please check the PARAM block.')
414
+ break
415
+
416
+ if line_counter == 5:
417
+ self.global_incon = True
418
+ print(f' Found Global INCON line in PARAM block of {input_file_path}')
419
+ self.incon_buffer.write(line)
420
+ break
421
+
392
422
  else:
393
423
  print(f' Found INCON block in {found_path}')
394
424
 
@@ -418,16 +448,21 @@ class vis_reader:
418
448
  print(f' It is empty in INCON block.')
419
449
  return
420
450
 
421
- for header in incon_names:
422
- array = vtkDoubleArray()
423
- array.SetName(header)
424
- self.incon_vtk.GetCellData().AddArray(array)
425
-
426
- for i in range(0, self.incon_vtk.GetNumberOfCells()):
451
+ else:
427
452
  for header in incon_names:
428
- index = self.sequence_dist[i]
429
- value = self.__parse_float(incon_df[header][index])
430
- self.incon_vtk.GetCellData().GetArray(header).InsertNextValue(value)
453
+ array = vtkDoubleArray()
454
+ array.SetName(header)
455
+ self.incon_vtk.GetCellData().AddArray(array)
456
+
457
+ for i in range(0, self.incon_vtk.GetNumberOfCells()):
458
+ for header in incon_names:
459
+ if self.global_incon:
460
+ # if global incon, use the first row
461
+ value = self.__parse_float(incon_df[header][0])
462
+ else:
463
+ index = self.sequence_dist[i]
464
+ value = self.__parse_float(incon_df[header][index])
465
+ self.incon_vtk.GetCellData().GetArray(header).InsertNextValue(value)
431
466
 
432
467
  extension = os.path.splitext(self.main_geometry)[1]
433
468
  self.incon_path = os.path.join(self.setting.vis_dir, f'incon{extension}')
@@ -459,6 +494,9 @@ class vis_reader:
459
494
  self.__write_scalar_result(time_step, df, csv_headers)
460
495
  elif value_type == ValueType.Vector:
461
496
  self.__write_vector_result(time_step, df, csv_headers)
497
+ else:
498
+ print('Error: Your value type is not supported')
499
+ sys.exit(1)
462
500
 
463
501
  buffer.flush()
464
502
  buffer.close()
@@ -470,14 +508,17 @@ class vis_reader:
470
508
 
471
509
  if line_number == 0:
472
510
  csv_headers = [x.strip() for x in values]
511
+ #replace all " with ''"
512
+ csv_headers = [x.replace('"', '') for x in csv_headers]
473
513
  if 'ELEM' in csv_headers and 'INDEX' in csv_headers:
474
514
  value_type = ValueType.Scalar
475
- start_index = 1 # remove the first item
515
+ #start_index = 1 # remove the first item
476
516
 
477
517
  elif 'ELEM1' in csv_headers and 'ELEM2' in csv_headers:
478
518
  value_type = ValueType.Vector
479
- start_index = 4
519
+ #start_index = 1
480
520
 
521
+ start_index = 1
481
522
  # remove the first "TIME" header (to reduce the number of columns)
482
523
  csv_headers = csv_headers[start_index:]
483
524
 
@@ -505,7 +546,6 @@ class vis_reader:
505
546
  process_chunk()
506
547
 
507
548
 
508
-
509
549
  def __read_TOUGH3_CSV_outfile(self):
510
550
  scalar_buffer = io.StringIO()
511
551
  current_time_step = None
@@ -517,6 +557,93 @@ class vis_reader:
517
557
  start_index = -1
518
558
  self.time_steps_list = []
519
559
 
560
+ with open(self.current_out_file) as f:
561
+ for line in f:
562
+ line_number = line_number + 1
563
+ values = line.strip().split(',')
564
+ if line_number == 0:
565
+ values = [x.replace('"', '') for x in values]
566
+ csv_headers = [x.strip() for x in values]
567
+
568
+ if 'ELEM' in csv_headers:
569
+ value_type = ValueType.Scalar
570
+ start_index = 5
571
+
572
+ elif 'ELEM1' in csv_headers and 'ELEM2' in csv_headers:
573
+ value_type = ValueType.Vector
574
+ start_index = 5
575
+
576
+ csv_headers = csv_headers[start_index:]
577
+ f.readline() # skip next line
578
+ print(f' Value type: {value_type.name}')
579
+ continue
580
+
581
+ # Find time item
582
+ if len(values) == 1:
583
+ time_string = values[0].replace('"', '').strip()
584
+ time_string = time_string.split()[-1]
585
+ time = self.__parse_float(time_string)
586
+
587
+ # if not the first time step
588
+ if value_type == ValueType.Scalar and reading_number == self.number_of_elements:
589
+ scalar_buffer.seek(0)
590
+ df = pd.read_csv(scalar_buffer, sep=',', header=0)
591
+ self.__write_scalar_result(
592
+ current_time_step, df, csv_headers)
593
+ scalar_buffer.flush()
594
+ scalar_buffer.close()
595
+ scalar_buffer = io.StringIO()
596
+ reading_number = 0
597
+
598
+ if value_type == ValueType.Vector and reading_number == self.number_of_connections:
599
+ scalar_buffer.seek(0)
600
+ df = pd.read_csv(scalar_buffer, sep=',', header=0)
601
+ self.__write_vector_result(
602
+ current_time_step, df, csv_headers)
603
+ scalar_buffer.flush()
604
+ scalar_buffer.close()
605
+ scalar_buffer = io.StringIO()
606
+ reading_number = 0
607
+
608
+ current_time_step = VisTimeStep(
609
+ time=float(time),
610
+ time_step=tim_step_counter,
611
+ iteration=1
612
+ )
613
+
614
+ # Initialize buffer
615
+ header_string = ','.join(csv_headers)
616
+ scalar_buffer.write(header_string + '\n')
617
+ self.time_steps_list.append(current_time_step)
618
+ tim_step_counter = tim_step_counter + 1
619
+
620
+ else:
621
+ scalar_buffer.write(','.join(values[start_index:]) + '\n')
622
+ reading_number = reading_number + 1
623
+
624
+ else:
625
+ # write the last time step
626
+ if value_type == ValueType.Scalar:
627
+ scalar_buffer.seek(0)
628
+ df = pd.read_csv(scalar_buffer, sep=',', header=0)
629
+ self.__write_scalar_result(current_time_step, df, csv_headers)
630
+ if value_type == ValueType.Vector:
631
+ scalar_buffer.seek(0)
632
+ df = pd.read_csv(scalar_buffer, sep=',', header=0)
633
+ self.__write_vector_result(current_time_step, df, csv_headers)
634
+ scalar_buffer.close()
635
+
636
+ def __old_read_TOUGH3_CSV_outfile(self):
637
+ scalar_buffer = io.StringIO()
638
+ current_time_step = None
639
+ tim_step_counter = 1
640
+ csv_headers = []
641
+ line_number = -1
642
+ reading_number = 0
643
+ value_type = ValueType.Unknown
644
+ start_index = -1
645
+ self.time_steps_list = []
646
+
520
647
  with open(self.current_out_file) as f:
521
648
  for line in f:
522
649
  line_number = line_number + 1
@@ -672,6 +799,8 @@ class vis_reader:
672
799
  p_name = "P"
673
800
  if vtr.GetCellData().GetArray("P (Pa)") is not None:
674
801
  p_name = "P (Pa)"
802
+ if vtr.GetCellData().GetArray("P(Pa)") is not None:
803
+ p_name = "P(Pa)"
675
804
  if vtr.GetCellData().GetArray("P(bar)") is not None:
676
805
  p_name = "P(bar)"
677
806
  if vtr.GetCellData().GetArray("PRES") is not None:
@@ -826,6 +955,8 @@ class vis_reader:
826
955
  dataframe = dataframe.drop(columns=['INDEX'])
827
956
  headers.remove('INDEX')
828
957
  if 'ELEM' in dataframe.columns:
958
+ # change the data type of ELEM to string
959
+ dataframe['ELEM'] = dataframe['ELEM'].astype(str)
829
960
  # remove leading spaces from ELEM column
830
961
  dataframe['ELEM'] = dataframe['ELEM'].str.lstrip()
831
962
  headers.remove('ELEM')
@@ -875,6 +1006,11 @@ class vis_reader:
875
1006
  if VisType.Tecplot not in self.setting.vis_types:
876
1007
  return
877
1008
 
1009
+
1010
+ if self.setting.mesh_type == MeshType.PolygonalMesh:
1011
+ print(f' Tecplot output for polygonal mesh is not supported yet.')
1012
+ return
1013
+
878
1014
  # Start Tecplot generating
879
1015
  tec_name = pathlib.Path(self.setting.input_file_paths[0]).stem
880
1016
  self.tec_scalar_path = os.path.join(self.setting.vis_dir, f'{tec_name}_scalar.dat')
@@ -884,10 +1020,11 @@ class vis_reader:
884
1020
  file = open(self.tec_scalar_path, "a")
885
1021
  if firstFile:
886
1022
  file.write('TITLE = TECPLOT PLOT \n')
887
- header_string = '"'+'", "'.join(headers) + '"'
888
- file.write(f'VARIABLES = "X", "Y", "Z", {header_string}\n')
1023
+ selected_header_string = '"'+'", "'.join(self.setting.selected_variables_scalar) + '"'
1024
+ #header_string = '"'+'", "'.join(headers) + '"'
1025
+ file.write(f'VARIABLES = "X", "Y", "Z", {selected_header_string}\n')
889
1026
 
890
- tecplot_cell_type = 'BRICK'
1027
+ #tecplot_cell_type = 'BRICK'
891
1028
 
892
1029
  #time_statement = f'ZONE T ="{vis_time_step.time_step}, Time = {vis_time_step.time}", N = {vtu_cell_to_points.GetNumberOfPoints()}, E = {vtu_cell_to_points.GetNumberOfCells()}, F = FEPOINT, ET = {tecplot_cell_type}, SOLUTIONTIME = {vis_time_step.time}\n'
893
1030
 
@@ -916,7 +1053,7 @@ class vis_reader:
916
1053
  file.write(" \n")
917
1054
 
918
1055
  # Other data
919
- for header in headers:
1056
+ for header in self.setting.selected_variables_scalar:
920
1057
  array = vtr.GetCellData().GetArray(header)
921
1058
 
922
1059
  for e in range(0, vtr.GetNumberOfCells()):
@@ -943,8 +1080,9 @@ class vis_reader:
943
1080
  return var_string
944
1081
 
945
1082
  # write the vector result for one timestep
946
- def __write_vector_result(self, vis_time_step, dataframe, headers):
1083
+ def __write_vector_result(self, vis_time_step, dataframe, csv_headers):
947
1084
 
1085
+ headers = csv_headers.copy()
948
1086
  index = self.time_steps_list.index(vis_time_step)
949
1087
  extension = os.path.splitext(self.main_geometry)[1]
950
1088
  vtr_path = os.path.join(self.setting.vis_dir, 'paraview', f'time_step_{vis_time_step.time_step}{extension}')
@@ -958,74 +1096,96 @@ class vis_reader:
958
1096
  timesteps.SetNumberOfTuples(1)
959
1097
  timesteps.SetNumberOfComponents(1)
960
1098
  timesteps.SetTuple1(0, vis_time_step.time)
1099
+ #timesteps.SetTuple2(1, 100000)
961
1100
  vector_vtr.SetFieldData(vtkFieldData())
962
1101
  vector_vtr.GetFieldData().AddArray(timesteps)
963
1102
 
964
1103
  else:
965
- vtr_reader = vtkXMLRectilinearGridReader()
966
- vtr_reader.SetFileName(vtr_path)
967
- vtr_reader.Update()
968
- vector_vtr = vtr_reader.GetOutput()
1104
+ vector_vtr = self.__read_vtk_file(vtr_path)
969
1105
 
970
1106
  vtu_reader = vtkXMLUnstructuredGridReader()
971
1107
  vtu_reader.SetFileName(self.elem_conne_path)
972
1108
  vtu_reader.Update()
973
1109
  conne_vtu = vtu_reader.GetOutput()
974
1110
 
1111
+ # make sure to drop TIME and INDEX columns if they exist
1112
+ if 'INDEX' in dataframe.columns:
1113
+ dataframe = dataframe.drop(columns=['INDEX'])
1114
+ headers.remove('INDEX')
1115
+ if 'ELEM1' in dataframe.columns:
1116
+ # remove leading spaces from ELEM column
1117
+ dataframe['ELEM1'] = dataframe['ELEM1'].astype(str)
1118
+ dataframe['ELEM1'] = dataframe['ELEM1'].str.lstrip()
1119
+ headers.remove('ELEM1')
1120
+ if 'ELEM2' in dataframe.columns:
1121
+ # remove leading spaces from ELEM column
1122
+ dataframe['ELEM2'] = dataframe['ELEM2'].astype(str)
1123
+ dataframe['ELEM2'] = dataframe['ELEM2'].str.lstrip()
1124
+ headers.remove('ELEM2')
975
1125
 
976
1126
  variable_list = []
1127
+
1128
+ # find max number of cell connections of a element
1129
+ num_of_components = 3
1130
+ for elem_id in range(0, conne_vtu.GetNumberOfPoints()):
1131
+ cellIDs = vtkIdList()
1132
+ conne_vtu.GetPointCells(elem_id, cellIDs)
1133
+ if cellIDs.GetNumberOfIds() > num_of_components:
1134
+ num_of_components = cellIDs.GetNumberOfIds()
1135
+
1136
+
1137
+ # create double array for each header
977
1138
  for header in headers:
978
1139
  #if not header == 'ELEM1' and not header == 'ELEM2' and not header == 'INDEX':
979
1140
  array = vtkDoubleArray()
980
1141
  array.SetName(header)
981
- array.SetNumberOfComponents(3)
1142
+ array.SetNumberOfComponents(num_of_components)
982
1143
  array.SetNumberOfTuples(vector_vtr.GetNumberOfCells())
983
- array.FillComponent(0, 0)
984
- array.FillComponent(1, 0)
985
- array.FillComponent(2, 0)
1144
+ for i in range(0, num_of_components):
1145
+ # set the default value to 0
1146
+ array.FillComponent(i, 0)
986
1147
  vector_vtr.GetCellData().AddArray(array)
1148
+
987
1149
  variable_list.append(VisVariable(header, ValueType.Vector, 3))
988
1150
 
989
1151
  if self.current_out_file not in self.variable_list:
990
1152
  self.variable_list[self.current_out_file] = variable_list
991
-
1153
+
1154
+
1155
+ # prepare cell data array for cells in conne_vtu
1156
+ for header in headers:
1157
+ array = vtkDoubleArray()
1158
+ array.SetName(header)
1159
+ conne_vtu.GetCellData().AddArray(array)
992
1160
 
1161
+ # add celldata to cells in elem_conn
1162
+ for cell_id in range(0, conne_vtu.GetNumberOfCells()):
1163
+ for header in headers:
1164
+ value = dataframe.loc[cell_id, header]
1165
+ conne_vtu.GetCellData().GetArray(header).InsertNextValue(value)
1166
+ #self.__write_vtk_file(
1167
+ #conne_vtu, self.elem_conne_path)
1168
+
1169
+ # create the vector data
993
1170
  for elem_id in range(0, conne_vtu.GetNumberOfPoints()):
994
- point = conne_vtu.GetPoint(elem_id)
995
1171
  cellIDs = vtkIdList()
996
1172
  conne_vtu.GetPointCells(elem_id, cellIDs)
997
- next_x = -1
998
- next_y = -1
999
- next_z = -1
1000
1173
  for i in range(0, cellIDs.GetNumberOfIds()):
1001
1174
  cellID = cellIDs.GetId(i)
1002
- cell = conne_vtu.GetCell(cellID)
1003
- next_id = cell.GetPointId(1)
1004
-
1005
- if next_id - elem_id == 1:
1006
- next_x = cellID
1007
1175
 
1008
- elif next_id - elem_id == self.xyz_elem[0]:
1009
- if self.xyz_elem[1] == 1:
1010
- next_z = cellID
1011
- next_y = -1
1012
- break
1176
+ for header in headers:
1177
+ #value = dataframe.loc[next_id, header]
1178
+ value = conne_vtu.GetCellData().GetArray(header).GetValue(cellID)
1179
+ vector_vtr.GetCellData().GetArray(header).SetComponent(elem_id, i, value)
1013
1180
 
1181
+ # Put cell-centered data into points
1182
+ filter = vtkCellDataToPointData()
1183
+ filter.SetInputData(vector_vtr)
1184
+ filter.Update()
1185
+ vtr_cell_to_points = filter.GetOutput()
1014
1186
 
1015
- else:
1016
- next_y = cellID
1017
-
1018
- elif next_id - elem_id == self.xyz_elem[0] * self.xyz_elem[1]:
1019
- next_z = cellID
1020
-
1021
- for header in headers:
1022
- #if not header == 'ELEM1' and not header == 'ELEM2' and not header == 'INDEX':
1023
- x_value = 0 if next_x == -1 else self.__parse_float(dataframe[header][next_x])
1024
- y_value = 0 if next_y == -1 else self.__parse_float(dataframe[header][next_y])
1025
- z_value = 0 if next_z == -1 else self.__parse_float(dataframe[header][next_z])
1026
- vector_vtr.GetCellData().GetArray(header).SetComponent(elem_id, 0, x_value)
1027
- vector_vtr.GetCellData().GetArray(header).SetComponent(elem_id, 1, y_value)
1028
- vector_vtr.GetCellData().GetArray(header).SetComponent(elem_id, 2, z_value)
1187
+ for i in range(0, vtr_cell_to_points.GetPointData().GetNumberOfArrays()):
1188
+ vector_vtr.GetPointData().AddArray(vtr_cell_to_points.GetPointData().GetArray(i))
1029
1189
 
1030
1190
  self.__write_vtk_file(
1031
1191
  vector_vtr, self.time_steps_list[index].vtu_file_name)
@@ -1343,8 +1503,8 @@ class vis_reader:
1343
1503
  else:
1344
1504
  is_parallel = self.__checkParallel(elem_conne_vtu)
1345
1505
  # check if polygonal mesh
1346
-
1347
1506
  if os.path.exists(self.setting.corners_file):
1507
+
1348
1508
  if is_parallel:
1349
1509
  self.setting.mesh_type = MeshType.StructuredGridFree
1350
1510
  else:
@@ -1824,6 +1984,7 @@ class vis_reader:
1824
1984
  polygon = vtkPolygon()
1825
1985
  center_point_inserted = False
1826
1986
  center_point_id = distinct_corners_points_locator.FindClosestPoint(point)
1987
+
1827
1988
  for j in range(voronoi_cell.GetNumberOfPoints()):
1828
1989
  cell_point_id = voronoi_cell.GetPointId(j)
1829
1990
  cell_point = voronoi.GetPoint(cell_point_id)
@@ -2186,7 +2347,7 @@ class vis_reader:
2186
2347
  return True
2187
2348
 
2188
2349
  trimmed = line.lstrip()
2189
- if trimmed[5] == "-" and trimmed[6] == "-":
2350
+ if len(trimmed)>6 and trimmed[5] == "-" and trimmed[6] == "-":
2190
2351
  return True
2191
2352
  return False
2192
2353
 
@@ -1,10 +1,10 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: toughanimator
3
- Version: 0.1.5
3
+ Version: 0.1.7
4
4
  Summary: A tool for visualizing TOUGH simulation outputs.
5
5
  Home-page: https://github.com/scarletref/toughanimator
6
- Author: Your Name
7
- Author-email: your.email@example.com
6
+ Author: scarletref
7
+ Author-email: scarletreflection@gmail.com
8
8
  Classifier: Programming Language :: Python :: 3
9
9
  Classifier: License :: OSI Approved :: MIT License
10
10
  Classifier: Operating System :: OS Independent
@@ -0,0 +1,7 @@
1
+ toughanimator/__init__.py,sha256=m1f3fJ1SNGLNKxHlS0pLNHwaHlN6UUTh5GpS_8hC8kw,30
2
+ toughanimator/run.py,sha256=YmoiVElHRCo9BzMmBuB53zGy_h8AEQddllQJsqN3tV4,771
3
+ toughanimator/tough_classes.py,sha256=xb6iOPd-xnY-Lv3WLI8mip38pMP3_uOBm_rLjT57h38,111206
4
+ toughanimator-0.1.7.dist-info/METADATA,sha256=RL1UNzvbpB-zEiGx7ekAbukcFYdbYdyscKllq2UB5Mg,4563
5
+ toughanimator-0.1.7.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
6
+ toughanimator-0.1.7.dist-info/top_level.txt,sha256=pAT-UflvbjT4lTmBdzHqApZGWbywkSM3Y_qsHyLi4pU,14
7
+ toughanimator-0.1.7.dist-info/RECORD,,
@@ -1,7 +0,0 @@
1
- toughanimator/__init__.py,sha256=m1f3fJ1SNGLNKxHlS0pLNHwaHlN6UUTh5GpS_8hC8kw,30
2
- toughanimator/run.py,sha256=EayWtDf_PrpYg8vfsyEm38XLb2-E4nF_y57TT_cdGzQ,671
3
- toughanimator/tough_classes.py,sha256=1SX1CttjeBkAlk6Ynw4EyIWfOKd15uWCpFvqUQo_wp8,103275
4
- toughanimator-0.1.5.dist-info/METADATA,sha256=EvR_7WeGK7hE8oD8h6QlFjcyBRfQ7svAS-G5AnbEcD4,4557
5
- toughanimator-0.1.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
6
- toughanimator-0.1.5.dist-info/top_level.txt,sha256=pAT-UflvbjT4lTmBdzHqApZGWbywkSM3Y_qsHyLi4pU,14
7
- toughanimator-0.1.5.dist-info/RECORD,,