subsurface-terra 2025.1.0rc15__py3-none-any.whl → 2025.1.0rc17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. subsurface/__init__.py +31 -31
  2. subsurface/_version.py +34 -21
  3. subsurface/api/__init__.py +13 -13
  4. subsurface/api/interfaces/__init__.py +3 -3
  5. subsurface/api/interfaces/stream.py +136 -136
  6. subsurface/api/reader/read_wells.py +78 -78
  7. subsurface/core/geological_formats/boreholes/_combine_trajectories.py +117 -117
  8. subsurface/core/geological_formats/boreholes/_map_attrs_to_survey.py +236 -234
  9. subsurface/core/geological_formats/boreholes/_survey_to_unstruct.py +163 -163
  10. subsurface/core/geological_formats/boreholes/boreholes.py +140 -140
  11. subsurface/core/geological_formats/boreholes/collars.py +26 -26
  12. subsurface/core/geological_formats/boreholes/survey.py +86 -86
  13. subsurface/core/geological_formats/fault.py +47 -47
  14. subsurface/core/reader_helpers/reader_unstruct.py +11 -11
  15. subsurface/core/reader_helpers/readers_data.py +130 -130
  16. subsurface/core/reader_helpers/readers_wells.py +13 -13
  17. subsurface/core/structs/__init__.py +3 -3
  18. subsurface/core/structs/base_structures/__init__.py +2 -2
  19. subsurface/core/structs/base_structures/_aux.py +69 -0
  20. subsurface/core/structs/base_structures/_liquid_earth_mesh.py +121 -121
  21. subsurface/core/structs/base_structures/_unstructured_data_constructor.py +70 -70
  22. subsurface/core/structs/base_structures/base_structures_enum.py +6 -6
  23. subsurface/core/structs/base_structures/structured_data.py +282 -282
  24. subsurface/core/structs/base_structures/unstructured_data.py +338 -319
  25. subsurface/core/structs/structured_elements/octree_mesh.py +10 -10
  26. subsurface/core/structs/structured_elements/structured_grid.py +59 -59
  27. subsurface/core/structs/structured_elements/structured_mesh.py +9 -9
  28. subsurface/core/structs/unstructured_elements/__init__.py +3 -3
  29. subsurface/core/structs/unstructured_elements/line_set.py +72 -72
  30. subsurface/core/structs/unstructured_elements/point_set.py +43 -43
  31. subsurface/core/structs/unstructured_elements/tetrahedron_mesh.py +35 -35
  32. subsurface/core/structs/unstructured_elements/triangular_surface.py +62 -62
  33. subsurface/core/utils/utils_core.py +38 -38
  34. subsurface/modules/reader/__init__.py +13 -13
  35. subsurface/modules/reader/faults/faults.py +80 -80
  36. subsurface/modules/reader/from_binary.py +46 -46
  37. subsurface/modules/reader/mesh/_GOCAD_mesh.py +82 -82
  38. subsurface/modules/reader/mesh/_trimesh_reader.py +447 -447
  39. subsurface/modules/reader/mesh/csv_mesh_reader.py +53 -53
  40. subsurface/modules/reader/mesh/dxf_reader.py +177 -177
  41. subsurface/modules/reader/mesh/glb_reader.py +30 -30
  42. subsurface/modules/reader/mesh/mx_reader.py +232 -232
  43. subsurface/modules/reader/mesh/obj_reader.py +53 -53
  44. subsurface/modules/reader/mesh/omf_mesh_reader.py +43 -43
  45. subsurface/modules/reader/mesh/surface_reader.py +56 -56
  46. subsurface/modules/reader/mesh/surfaces_api.py +41 -41
  47. subsurface/modules/reader/profiles/__init__.py +3 -3
  48. subsurface/modules/reader/profiles/profiles_core.py +197 -197
  49. subsurface/modules/reader/read_netcdf.py +38 -38
  50. subsurface/modules/reader/topography/__init__.py +7 -7
  51. subsurface/modules/reader/topography/topo_core.py +100 -100
  52. subsurface/modules/reader/volume/read_grav3d.py +447 -428
  53. subsurface/modules/reader/volume/read_volume.py +327 -230
  54. subsurface/modules/reader/volume/segy_reader.py +105 -105
  55. subsurface/modules/reader/volume/seismic.py +173 -173
  56. subsurface/modules/reader/volume/volume_utils.py +43 -43
  57. subsurface/modules/reader/wells/DEP/__init__.py +43 -43
  58. subsurface/modules/reader/wells/DEP/_well_files_reader.py +167 -167
  59. subsurface/modules/reader/wells/DEP/_wells_api.py +61 -61
  60. subsurface/modules/reader/wells/DEP/_welly_reader.py +180 -180
  61. subsurface/modules/reader/wells/DEP/pandas_to_welly.py +212 -212
  62. subsurface/modules/reader/wells/_read_to_df.py +57 -57
  63. subsurface/modules/reader/wells/read_borehole_interface.py +148 -148
  64. subsurface/modules/reader/wells/wells_utils.py +68 -68
  65. subsurface/modules/tools/mocking_aux.py +104 -104
  66. subsurface/modules/visualization/__init__.py +2 -2
  67. subsurface/modules/visualization/to_pyvista.py +320 -320
  68. subsurface/modules/writer/to_binary.py +12 -12
  69. subsurface/modules/writer/to_rex/common.py +78 -78
  70. subsurface/modules/writer/to_rex/data_struct.py +74 -74
  71. subsurface/modules/writer/to_rex/gempy_to_rexfile.py +791 -791
  72. subsurface/modules/writer/to_rex/material_encoder.py +44 -44
  73. subsurface/modules/writer/to_rex/mesh_encoder.py +152 -152
  74. subsurface/modules/writer/to_rex/to_rex.py +115 -115
  75. subsurface/modules/writer/to_rex/utils.py +15 -15
  76. subsurface/optional_requirements.py +116 -116
  77. {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc17.dist-info}/METADATA +194 -194
  78. subsurface_terra-2025.1.0rc17.dist-info/RECORD +99 -0
  79. {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc17.dist-info}/WHEEL +1 -1
  80. {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc17.dist-info}/licenses/LICENSE +203 -203
  81. subsurface_terra-2025.1.0rc15.dist-info/RECORD +0 -98
  82. {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc17.dist-info}/top_level.txt +0 -0
@@ -1,38 +1,38 @@
1
- from typing import Union
2
-
3
- from pathlib import Path
4
-
5
- from ..structs import StructuredData, UnstructuredData
6
-
7
-
8
- def get_extension(path):
9
- try:
10
- p = Path(path)
11
- return p.suffix
12
- except TypeError:
13
- return False
14
-
15
-
16
- def replace_outliers(base_data: Union[StructuredData, UnstructuredData], dim=0, perc=0.99, replace_for=None):
17
- """@Edoardo Guerreiro https://stackoverflow.com/questions/60816533/
18
- is-there-a-built-in-function-in-xarray-to-remove-outliers-from-a-dataset"""
19
-
20
- data = base_data.data
21
- # calculate percentile
22
- threshold = data[dim].quantile(perc)
23
-
24
- # find outliers and replace them with max among remaining values
25
- mask = data[dim].where(abs(data[dim]) <= threshold)
26
- if replace_for == 'max':
27
- max_value = mask.max().values
28
- # .where replace outliers with nan
29
- mask = mask.fillna(max_value)
30
- elif replace_for == 'min':
31
- min_value = mask.min().values
32
- # .where replace outliers with nan
33
- mask = mask.fillna(min_value)
34
-
35
- print(mask)
36
- data[dim] = mask
37
-
38
- return data
1
+ from typing import Union
2
+
3
+ from pathlib import Path
4
+
5
+ from ..structs import StructuredData, UnstructuredData
6
+
7
+
8
+ def get_extension(path):
9
+ try:
10
+ p = Path(path)
11
+ return p.suffix
12
+ except TypeError:
13
+ return False
14
+
15
+
16
+ def replace_outliers(base_data: Union[StructuredData, UnstructuredData], dim=0, perc=0.99, replace_for=None):
17
+ """@Edoardo Guerreiro https://stackoverflow.com/questions/60816533/
18
+ is-there-a-built-in-function-in-xarray-to-remove-outliers-from-a-dataset"""
19
+
20
+ data = base_data.data
21
+ # calculate percentile
22
+ threshold = data[dim].quantile(perc)
23
+
24
+ # find outliers and replace them with max among remaining values
25
+ mask = data[dim].where(abs(data[dim]) <= threshold)
26
+ if replace_for == 'max':
27
+ max_value = mask.max().values
28
+ # .where replace outliers with nan
29
+ mask = mask.fillna(max_value)
30
+ elif replace_for == 'min':
31
+ min_value = mask.min().values
32
+ # .where replace outliers with nan
33
+ mask = mask.fillna(min_value)
34
+
35
+ print(mask)
36
+ data[dim] = mask
37
+
38
+ return data
@@ -1,13 +1,13 @@
1
- # import warnings
2
-
3
- from .profiles import *
4
-
5
- from .topography.topo_core import read_structured_topography, read_unstructured_topography
6
-
7
- from .mesh.omf_mesh_reader import omf_stream_to_unstructs
8
- from .mesh.dxf_reader import dxf_stream_to_unstruct_input, dxf_file_to_unstruct_input
9
- from .mesh.mx_reader import mx_to_unstruc_from_binary
10
- from .mesh.obj_reader import load_obj_with_trimesh, load_obj_with_trimesh_from_binary
11
- from .mesh.glb_reader import load_gltf_with_trimesh
12
-
13
- from .volume.read_grav3d import read_msh_structured_grid
1
+ # import warnings
2
+
3
+ from .profiles import *
4
+
5
+ from .topography.topo_core import read_structured_topography, read_unstructured_topography
6
+
7
+ from .mesh.omf_mesh_reader import omf_stream_to_unstructs
8
+ from .mesh.dxf_reader import dxf_stream_to_unstruct_input, dxf_file_to_unstruct_input
9
+ from .mesh.mx_reader import mx_to_unstruc_from_binary
10
+ from .mesh.obj_reader import load_obj_with_trimesh, load_obj_with_trimesh_from_binary
11
+ from .mesh.glb_reader import load_gltf_with_trimesh
12
+
13
+ from .volume.read_grav3d import read_msh_structured_grid
@@ -1,80 +1,80 @@
1
- """
2
- TODO: This is legacy code waiting to be updated to the new ideas
3
-
4
-
5
- """
6
-
7
- import pandas as pd
8
-
9
-
10
- __all__ = ['read_faultsticks_kingdom', 'read_faultsticks_charisma']
11
-
12
-
13
- def read_faultsticks_kingdom(fp: str, name=None):
14
- """
15
- Reads in Kingdom fault stick files (kingdom) exported from Petrel (tested
16
- with Petrel 2017) and returns pandas DataFrame.
17
-
18
- Args:
19
- fp (str): Filepath.
20
- name (str, optional): Default: None.
21
-
22
- Returns:
23
- (pandas.DataFrame) Fault stick information stored in dataframe with
24
- ["X", "Y", "Z", "formation", "stick id"] columns.
25
-
26
- """
27
- storage = []
28
- with open(fp, "r") as file:
29
- lines = file.readlines()
30
- for line in lines:
31
- line = line.split(" ")
32
- X = float(line[6])
33
- Y = float(line[7])
34
- Z = float(line[9])
35
- if name is None:
36
- name = line[10]
37
- else:
38
- name = name
39
- stick = int(line[-2])
40
- storage.append([X, Y, Z, name, stick])
41
-
42
- df = pd.DataFrame(storage)
43
- df.columns = ["X", "Y", "Z", "name", "stick id"]
44
- return df
45
-
46
-
47
- def read_faultsticks_charisma(fp:str, name=None):
48
- """
49
- Reads in charisma fault stick files exported from Petrel (tested with
50
- Petrel 2017) and returns pandas DataFrame.
51
-
52
- Args:
53
- fp (str): Filepath.
54
- name (str, optional): Default: None.
55
-
56
- Returns:
57
- (pandas.DataFrame) Fault stick information stored in dataframe with
58
- ["X", "Y", "Z", "formation", "stick id"] columns.
59
-
60
- """
61
- storage = []
62
- with open(fp, "r") as file: # due to the variable delimiter length its
63
- # easier to just manually read this in
64
- lines = file.readlines()
65
- for line in lines:
66
- line = line.split(" ")
67
- line = [l for l in line if len(l) >= 1]
68
- X = float(line[3])
69
- Y = float(line[4])
70
- Z = float(line[5])
71
- if name is None:
72
- name = line[6]
73
- else:
74
- name = name
75
- stick = int(line[-1])
76
- storage.append([X, Y, Z, name, stick])
77
-
78
- df = pd.DataFrame(storage)
79
- df.columns = ["X", "Y", "Z", "name", "stick id"]
80
- return df
1
+ """
2
+ TODO: This is legacy code waiting to be updated to the new ideas
3
+
4
+
5
+ """
6
+
7
+ import pandas as pd
8
+
9
+
10
+ __all__ = ['read_faultsticks_kingdom', 'read_faultsticks_charisma']
11
+
12
+
13
+ def read_faultsticks_kingdom(fp: str, name=None):
14
+ """
15
+ Reads in Kingdom fault stick files (kingdom) exported from Petrel (tested
16
+ with Petrel 2017) and returns pandas DataFrame.
17
+
18
+ Args:
19
+ fp (str): Filepath.
20
+ name (str, optional): Default: None.
21
+
22
+ Returns:
23
+ (pandas.DataFrame) Fault stick information stored in dataframe with
24
+ ["X", "Y", "Z", "formation", "stick id"] columns.
25
+
26
+ """
27
+ storage = []
28
+ with open(fp, "r") as file:
29
+ lines = file.readlines()
30
+ for line in lines:
31
+ line = line.split(" ")
32
+ X = float(line[6])
33
+ Y = float(line[7])
34
+ Z = float(line[9])
35
+ if name is None:
36
+ name = line[10]
37
+ else:
38
+ name = name
39
+ stick = int(line[-2])
40
+ storage.append([X, Y, Z, name, stick])
41
+
42
+ df = pd.DataFrame(storage)
43
+ df.columns = ["X", "Y", "Z", "name", "stick id"]
44
+ return df
45
+
46
+
47
+ def read_faultsticks_charisma(fp:str, name=None):
48
+ """
49
+ Reads in charisma fault stick files exported from Petrel (tested with
50
+ Petrel 2017) and returns pandas DataFrame.
51
+
52
+ Args:
53
+ fp (str): Filepath.
54
+ name (str, optional): Default: None.
55
+
56
+ Returns:
57
+ (pandas.DataFrame) Fault stick information stored in dataframe with
58
+ ["X", "Y", "Z", "formation", "stick id"] columns.
59
+
60
+ """
61
+ storage = []
62
+ with open(fp, "r") as file: # due to the variable delimiter length its
63
+ # easier to just manually read this in
64
+ lines = file.readlines()
65
+ for line in lines:
66
+ line = line.split(" ")
67
+ line = [l for l in line if len(l) >= 1]
68
+ X = float(line[3])
69
+ Y = float(line[4])
70
+ Z = float(line[5])
71
+ if name is None:
72
+ name = line[6]
73
+ else:
74
+ name = name
75
+ stick = int(line[-1])
76
+ storage.append([X, Y, Z, name, stick])
77
+
78
+ df = pd.DataFrame(storage)
79
+ df.columns = ["X", "Y", "Z", "name", "stick id"]
80
+ return df
@@ -1,46 +1,46 @@
1
- import json
2
-
3
- import numpy as np
4
- import xarray as xr
5
-
6
-
7
- def read_data(json_file_path, binary_file_path):
8
- header = _read_json_header(json_file_path)
9
- data_dict = _read_binary_data(binary_file_path, header)
10
- return data_dict, header
11
-
12
-
13
- def _read_json_header(json_file_path):
14
- with open(json_file_path, 'r') as f:
15
- header = json.load(f)
16
- return header
17
-
18
-
19
- def _read_binary_data(binary_file_path, header, order='F'):
20
- # Initialize offset to 0
21
- offset = 0
22
- # Create a dictionary to hold the arrays
23
- data_dict = {}
24
-
25
- # Loop over the arrays
26
- for array_name in ['vertex', 'cell', 'cell_attr', 'vertex_attr']:
27
- # Get the shape and type of the array from the header
28
- array_shape = header.get(f'{array_name}_shape')
29
- array_type = header.get(f'{array_name}_types', 'float32') # default to float32 if not found
30
-
31
- # Calculate the number of elements in the array
32
- num_elements = np.prod(array_shape)
33
-
34
- # Read the data
35
- data = np.fromfile(binary_file_path, dtype="int32", count=num_elements, offset=offset)
36
- data = data.reshape(array_shape, order=order)
37
-
38
- # Store the data in the dictionary
39
- data_dict[array_name] = data
40
-
41
- # Update the offset
42
- offset += data.nbytes
43
-
44
- return data_dict
45
-
46
-
1
+ import json
2
+
3
+ import numpy as np
4
+ import xarray as xr
5
+
6
+
7
+ def read_data(json_file_path, binary_file_path):
8
+ header = _read_json_header(json_file_path)
9
+ data_dict = _read_binary_data(binary_file_path, header)
10
+ return data_dict, header
11
+
12
+
13
+ def _read_json_header(json_file_path):
14
+ with open(json_file_path, 'r') as f:
15
+ header = json.load(f)
16
+ return header
17
+
18
+
19
+ def _read_binary_data(binary_file_path, header, order='F'):
20
+ # Initialize offset to 0
21
+ offset = 0
22
+ # Create a dictionary to hold the arrays
23
+ data_dict = {}
24
+
25
+ # Loop over the arrays
26
+ for array_name in ['vertex', 'cell', 'cell_attr', 'vertex_attr']:
27
+ # Get the shape and type of the array from the header
28
+ array_shape = header.get(f'{array_name}_shape')
29
+ array_type = header.get(f'{array_name}_types', 'float32') # default to float32 if not found
30
+
31
+ # Calculate the number of elements in the array
32
+ num_elements = np.prod(array_shape)
33
+
34
+ # Read the data
35
+ data = np.fromfile(binary_file_path, dtype="int32", count=num_elements, offset=offset)
36
+ data = data.reshape(array_shape, order=order)
37
+
38
+ # Store the data in the dictionary
39
+ data_dict[array_name] = data
40
+
41
+ # Update the offset
42
+ offset += data.nbytes
43
+
44
+ return data_dict
45
+
46
+
@@ -1,82 +1,82 @@
1
- from dataclasses import dataclass, field
2
-
3
- import numpy as np
4
-
5
-
6
- @dataclass
7
- class GOCADMesh:
8
- header: dict = field(default_factory=dict)
9
- coordinate_system: dict = field(default_factory=dict)
10
- property_class_headers: list = field(default_factory=list)
11
- vertices: np.ndarray = field(default_factory=lambda: np.empty((0, 3)))
12
- vertex_indices: np.ndarray = field(default_factory=lambda: np.array([]))
13
- edges: np.ndarray = field(default_factory=lambda: np.empty((0, 3), dtype=int))
14
- bstones: list = field(default_factory=list)
15
- borders: list = field(default_factory=list)
16
- metadata: dict = field(default_factory=dict)
17
-
18
- @property
19
- def vectorized_edges(self):
20
- # Create index mapping from original to zero-based indices
21
- idx_map = {old_idx: new_idx for new_idx, old_idx in enumerate(self.vertex_indices)}
22
- # Map triangle indices
23
- try:
24
- triangles_mapped = np.vectorize(idx_map.get)(self.edges)
25
- except TypeError as e:
26
- self._verbose_debugging()
27
- raise f"Error mapping indices for mesh: {e}"
28
-
29
- return triangles_mapped
30
-
31
- @property
32
- def color(self):
33
- """Try to get the color from the metadata. Can be in the form of:
34
- *solid*color: #87ceeb or *solid*color: 0 0 1 1 (rgba).
35
- Returns a color in the format acceptable by PyVista.
36
- """
37
- color = None
38
-
39
- # First try to find a color value from the header
40
- for key, value in self.header.items():
41
- if 'color' in key.lower():
42
- color = value.strip()
43
- break
44
-
45
- # If no color was found, return None
46
- if not color:
47
- return None
48
-
49
- # Handle hexadecimal color string (e.g., #87ceeb)
50
- if color.startswith('#') and len(color) == 7:
51
- return color # already valid as a hex string
52
-
53
- # Handle space-separated RGBA or RGB values (e.g., "0 0 1 1")
54
- if ' ' in color:
55
- color_vals = [float(c) for c in color.split()]
56
- if len(color_vals) == 4: # RGBA
57
- return color_vals[:3] # ignore the alpha channel for now, as PyVista handles RGB
58
- elif len(color_vals) == 3: # RGB
59
- return color_vals # already in the right format
60
-
61
- # Fallback: if none of the formats match, return None
62
- return None
63
-
64
- def _verbose_debugging(self):
65
- # Create index mapping from original to zero-based indices
66
- idx_map = {old_idx: new_idx for new_idx, old_idx in enumerate(self.vertex_indices)}
67
-
68
- # Check for missing indices
69
- unique_edge_indices = np.unique(self.edges)
70
- missing_indices = set(unique_edge_indices) - set(idx_map.keys())
71
- if missing_indices:
72
- raise ValueError(f"Edges contain indices not found in vertex_indices: {missing_indices}")
73
-
74
- # Map triangle indices using a list comprehension
75
- try:
76
- edges_flat = self.edges.flatten()
77
- mapped_flat = [idx_map[idx] for idx in edges_flat]
78
- triangles_mapped = np.array(mapped_flat).reshape(self.edges.shape)
79
- except Exception as e:
80
- raise Exception(f"Error mapping indices for mesh: {e}")
81
-
82
- return triangles_mapped
1
+ from dataclasses import dataclass, field
2
+
3
+ import numpy as np
4
+
5
+
6
+ @dataclass
7
+ class GOCADMesh:
8
+ header: dict = field(default_factory=dict)
9
+ coordinate_system: dict = field(default_factory=dict)
10
+ property_class_headers: list = field(default_factory=list)
11
+ vertices: np.ndarray = field(default_factory=lambda: np.empty((0, 3)))
12
+ vertex_indices: np.ndarray = field(default_factory=lambda: np.array([]))
13
+ edges: np.ndarray = field(default_factory=lambda: np.empty((0, 3), dtype=int))
14
+ bstones: list = field(default_factory=list)
15
+ borders: list = field(default_factory=list)
16
+ metadata: dict = field(default_factory=dict)
17
+
18
+ @property
19
+ def vectorized_edges(self):
20
+ # Create index mapping from original to zero-based indices
21
+ idx_map = {old_idx: new_idx for new_idx, old_idx in enumerate(self.vertex_indices)}
22
+ # Map triangle indices
23
+ try:
24
+ triangles_mapped = np.vectorize(idx_map.get)(self.edges)
25
+ except TypeError as e:
26
+ self._verbose_debugging()
27
+ raise f"Error mapping indices for mesh: {e}"
28
+
29
+ return triangles_mapped
30
+
31
+ @property
32
+ def color(self):
33
+ """Try to get the color from the metadata. Can be in the form of:
34
+ *solid*color: #87ceeb or *solid*color: 0 0 1 1 (rgba).
35
+ Returns a color in the format acceptable by PyVista.
36
+ """
37
+ color = None
38
+
39
+ # First try to find a color value from the header
40
+ for key, value in self.header.items():
41
+ if 'color' in key.lower():
42
+ color = value.strip()
43
+ break
44
+
45
+ # If no color was found, return None
46
+ if not color:
47
+ return None
48
+
49
+ # Handle hexadecimal color string (e.g., #87ceeb)
50
+ if color.startswith('#') and len(color) == 7:
51
+ return color # already valid as a hex string
52
+
53
+ # Handle space-separated RGBA or RGB values (e.g., "0 0 1 1")
54
+ if ' ' in color:
55
+ color_vals = [float(c) for c in color.split()]
56
+ if len(color_vals) == 4: # RGBA
57
+ return color_vals[:3] # ignore the alpha channel for now, as PyVista handles RGB
58
+ elif len(color_vals) == 3: # RGB
59
+ return color_vals # already in the right format
60
+
61
+ # Fallback: if none of the formats match, return None
62
+ return None
63
+
64
+ def _verbose_debugging(self):
65
+ # Create index mapping from original to zero-based indices
66
+ idx_map = {old_idx: new_idx for new_idx, old_idx in enumerate(self.vertex_indices)}
67
+
68
+ # Check for missing indices
69
+ unique_edge_indices = np.unique(self.edges)
70
+ missing_indices = set(unique_edge_indices) - set(idx_map.keys())
71
+ if missing_indices:
72
+ raise ValueError(f"Edges contain indices not found in vertex_indices: {missing_indices}")
73
+
74
+ # Map triangle indices using a list comprehension
75
+ try:
76
+ edges_flat = self.edges.flatten()
77
+ mapped_flat = [idx_map[idx] for idx in edges_flat]
78
+ triangles_mapped = np.array(mapped_flat).reshape(self.edges.shape)
79
+ except Exception as e:
80
+ raise Exception(f"Error mapping indices for mesh: {e}")
81
+
82
+ return triangles_mapped