subsurface-terra 2025.1.0rc7__py3-none-any.whl → 2025.1.0rc10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- subsurface/_version.py +1 -1
- subsurface/api/__init__.py +2 -0
- subsurface/api/interfaces/stream.py +19 -1
- subsurface/modules/reader/__init__.py +2 -0
- subsurface/modules/reader/mesh/_trimesh_reader.py +411 -207
- subsurface/modules/reader/mesh/glb_reader.py +7 -4
- subsurface/modules/reader/mesh/mx_reader.py +2 -1
- subsurface/modules/reader/mesh/obj_reader.py +19 -5
- subsurface/modules/reader/volume/read_volume.py +70 -1
- subsurface/modules/reader/wells/read_borehole_interface.py +12 -6
- {subsurface_terra-2025.1.0rc7.dist-info → subsurface_terra-2025.1.0rc10.dist-info}/METADATA +1 -1
- {subsurface_terra-2025.1.0rc7.dist-info → subsurface_terra-2025.1.0rc10.dist-info}/RECORD +15 -15
- {subsurface_terra-2025.1.0rc7.dist-info → subsurface_terra-2025.1.0rc10.dist-info}/WHEEL +0 -0
- {subsurface_terra-2025.1.0rc7.dist-info → subsurface_terra-2025.1.0rc10.dist-info}/licenses/LICENSE +0 -0
- {subsurface_terra-2025.1.0rc7.dist-info → subsurface_terra-2025.1.0rc10.dist-info}/top_level.txt +0 -0
subsurface/_version.py
CHANGED
subsurface/api/__init__.py
CHANGED
|
@@ -1,17 +1,20 @@
|
|
|
1
|
+
import io
|
|
1
2
|
from io import BytesIO
|
|
2
3
|
from typing import TextIO
|
|
3
4
|
|
|
4
5
|
import pandas
|
|
5
|
-
from subsurface.modules.reader.volume.volume_utils import interpolate_unstructured_data_to_structured_data
|
|
6
6
|
|
|
7
|
+
from ...core.structs import TriSurf
|
|
7
8
|
from ...core.reader_helpers.reader_unstruct import ReaderUnstructuredHelper
|
|
8
9
|
from ...core.reader_helpers.readers_data import GenericReaderFilesHelper
|
|
9
10
|
from ...core.geological_formats import BoreholeSet
|
|
10
11
|
from ...core.structs.base_structures import UnstructuredData, StructuredData
|
|
11
12
|
|
|
12
13
|
from ...modules import reader
|
|
14
|
+
from ...modules.reader.mesh._trimesh_reader import TriMeshTransformations
|
|
13
15
|
from ...modules.reader.volume.read_volume import read_volumetric_mesh_to_subsurface, read_VTK_structured_grid
|
|
14
16
|
from ...modules.reader.mesh.surfaces_api import read_2d_mesh_to_unstruct
|
|
17
|
+
from ...modules.reader.volume.volume_utils import interpolate_unstructured_data_to_structured_data
|
|
15
18
|
|
|
16
19
|
from ..reader.read_wells import read_wells
|
|
17
20
|
|
|
@@ -39,6 +42,21 @@ def MX_stream_to_unstruc(stream: TextIO) -> list[UnstructuredData]:
|
|
|
39
42
|
return list_unstruct
|
|
40
43
|
|
|
41
44
|
|
|
45
|
+
def OBJ_stream_to_trisurf(obj_stream: TextIO, mtl_stream: list[TextIO],
|
|
46
|
+
texture_stream: list[io.BytesIO], coordinate_system: TriMeshTransformations) -> TriSurf:
|
|
47
|
+
tri_mesh: TriSurf = reader.load_obj_with_trimesh_from_binary(
|
|
48
|
+
obj_stream=obj_stream,
|
|
49
|
+
mtl_stream=mtl_stream,
|
|
50
|
+
texture_stream=texture_stream,
|
|
51
|
+
coord_system=coordinate_system
|
|
52
|
+
)
|
|
53
|
+
return tri_mesh
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def GLTF_stream_to_trisurf(gltf_stream: io.BytesIO, coordinate_system: TriMeshTransformations) -> TriSurf:
|
|
57
|
+
tri_mesh: TriSurf = reader.load_gltf_with_trimesh(gltf_stream, coordinate_system)
|
|
58
|
+
return tri_mesh
|
|
59
|
+
|
|
42
60
|
def VTK_stream_to_struct(stream: BytesIO, attribute_name: str) -> list[StructuredData]:
|
|
43
61
|
struct = read_VTK_structured_grid(stream, attribute_name)
|
|
44
62
|
return [struct]
|
|
@@ -7,3 +7,5 @@ from .topography.topo_core import read_structured_topography, read_unstructured_
|
|
|
7
7
|
from .mesh.omf_mesh_reader import omf_stream_to_unstructs
|
|
8
8
|
from .mesh.dxf_reader import dxf_stream_to_unstruct_input, dxf_file_to_unstruct_input
|
|
9
9
|
from .mesh.mx_reader import mx_to_unstruc_from_binary
|
|
10
|
+
from .mesh.obj_reader import load_obj_with_trimesh, load_obj_with_trimesh_from_binary
|
|
11
|
+
from .mesh.glb_reader import load_gltf_with_trimesh
|
|
@@ -1,229 +1,433 @@
|
|
|
1
|
-
|
|
1
|
+
import enum
|
|
2
|
+
from typing import Union, TextIO, Optional
|
|
3
|
+
import io
|
|
4
|
+
import os
|
|
2
5
|
|
|
3
6
|
import numpy as np
|
|
4
|
-
from
|
|
7
|
+
from ....core.structs import UnstructuredData
|
|
8
|
+
from .... import optional_requirements
|
|
9
|
+
from ....core.structs import TriSurf, StructuredData
|
|
5
10
|
|
|
6
|
-
import subsurface
|
|
7
|
-
from subsurface import optional_requirements, StructuredData, TriSurf
|
|
8
11
|
|
|
12
|
+
class TriMeshTransformations(enum.Enum):
|
|
13
|
+
RIGHT_HANDED_Z_UP = "right_handed_z_up"
|
|
14
|
+
ORIGINAL = "original"
|
|
9
15
|
|
|
10
|
-
def _load_with_trimesh(path_to_obj, plot=False):
|
|
11
|
-
trimesh = optional_requirements.require_trimesh()
|
|
12
|
-
# Load the OBJ with Trimesh using the specified options
|
|
13
|
-
scene_or_mesh = trimesh.load(path_to_obj)
|
|
14
|
-
# Process single mesh vs. scene
|
|
15
|
-
if isinstance(scene_or_mesh, trimesh.Scene):
|
|
16
|
-
print("Loaded a Scene with multiple geometries.")
|
|
17
|
-
_process_scene(scene_or_mesh)
|
|
18
|
-
if plot:
|
|
19
|
-
scene_or_mesh.show()
|
|
20
|
-
else:
|
|
21
|
-
print("Loaded a single Trimesh object.")
|
|
22
|
-
print(f" - Vertices: {len(scene_or_mesh.vertices)}")
|
|
23
|
-
print(f" - Faces: {len(scene_or_mesh.faces)}")
|
|
24
|
-
_handle_material_info(scene_or_mesh)
|
|
25
|
-
if plot:
|
|
26
|
-
scene_or_mesh.show()
|
|
27
|
-
return scene_or_mesh
|
|
28
16
|
|
|
29
|
-
|
|
30
|
-
|
|
17
|
+
def load_with_trimesh(path_to_file_or_buffer, file_type: Optional[str] = None,
|
|
18
|
+
coordinate_system: TriMeshTransformations = TriMeshTransformations.RIGHT_HANDED_Z_UP, *, plot=False):
|
|
31
19
|
"""
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
This function takes either a `trimesh.Trimesh` object or a `trimesh.Scene`
|
|
35
|
-
object and converts it to a `subsurface.TriSurf` object. If the input is
|
|
36
|
-
a scene containing multiple geometries, it processes all geometries and
|
|
37
|
-
combines them into a single TriSurf object. If the input is a single
|
|
38
|
-
Trimesh object, it directly converts it to a TriSurf object. An error
|
|
39
|
-
is raised if the input is neither a `trimesh.Trimesh` nor a `trimesh.Scene`
|
|
40
|
-
object.
|
|
41
|
-
|
|
42
|
-
Parameters:
|
|
43
|
-
scene_or_mesh (Union[trimesh.Trimesh, trimesh.Scene]):
|
|
44
|
-
Input geometry data, either as a Trimesh object representing
|
|
45
|
-
a single mesh or a Scene object containing multiple geometries.
|
|
46
|
-
|
|
47
|
-
Note:
|
|
48
|
-
! Multimesh with multiple materials will read the uvs but not the textures since in that case is better
|
|
49
|
-
! to read directly the multiple images (compressed) whenever the user wants to work with them.
|
|
50
|
-
|
|
51
|
-
Returns:
|
|
52
|
-
subsurface.TriSurf: Converted subsurface representation of the
|
|
53
|
-
provided geometry data.
|
|
54
|
-
|
|
55
|
-
Raises:
|
|
56
|
-
ValueError: If the input is neither a `trimesh.Trimesh` object nor
|
|
57
|
-
a `trimesh.Scene` object.
|
|
20
|
+
Load a mesh with trimesh and convert to the specified coordinate system.
|
|
21
|
+
|
|
58
22
|
"""
|
|
59
23
|
trimesh = optional_requirements.require_trimesh()
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
np.array(tri.vertices),
|
|
91
|
-
np.array(tri.faces),
|
|
92
|
-
cells_attr=frame,
|
|
93
|
-
vertex_attr=vertex_attr,
|
|
94
|
-
xarray_attributes={
|
|
95
|
-
"bounds": tri.bounds.tolist(),
|
|
96
|
-
},
|
|
97
|
-
)
|
|
98
|
-
|
|
99
|
-
texture = _extract_texture_from_material(tri)
|
|
100
|
-
|
|
101
|
-
ts = TriSurf(
|
|
102
|
-
mesh=unstruct,
|
|
103
|
-
texture=texture,
|
|
104
|
-
)
|
|
105
|
-
return ts
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
def _trisurf_from_scene(scene_or_mesh: 'Scene', trimesh: 'trimesh') -> subsurface.TriSurf:
|
|
109
|
-
pandas = optional_requirements.require_pandas()
|
|
110
|
-
geometries = scene_or_mesh.geometry
|
|
111
|
-
assert len(geometries) > 0, "No geometries found in the scene."
|
|
112
|
-
all_vertex = []
|
|
113
|
-
all_cells = []
|
|
114
|
-
cell_attr = []
|
|
115
|
-
all_vertex_attr = []
|
|
116
|
-
_last_cell = 0
|
|
117
|
-
texture = None
|
|
118
|
-
for i, (geom_name, geom) in enumerate(geometries.items()):
|
|
119
|
-
geom: trimesh.Trimesh
|
|
120
|
-
_handle_material_info(geom)
|
|
121
|
-
|
|
122
|
-
# Append vertices
|
|
123
|
-
all_vertex.append(np.array(geom.vertices))
|
|
124
|
-
|
|
125
|
-
# Adjust cell indices and append
|
|
126
|
-
cells = np.array(geom.faces)
|
|
127
|
-
if len(all_cells) > 0:
|
|
128
|
-
cells = cells + _last_cell
|
|
129
|
-
all_cells.append(cells)
|
|
130
|
-
|
|
131
|
-
# Create attribute array for this geometry
|
|
132
|
-
cell_attr.append(np.ones(len(cells)) * i)
|
|
133
|
-
|
|
134
|
-
_last_cell = cells.max() + 1
|
|
24
|
+
scene_or_mesh = LoadWithTrimesh.load_with_trimesh(path_to_file_or_buffer, file_type, plot)
|
|
25
|
+
|
|
26
|
+
# Compute a -90° rotation around the X axis
|
|
27
|
+
angle_rad = np.deg2rad(-90)
|
|
28
|
+
transform = trimesh.transformations.rotation_matrix(angle_rad, [1, 0, 0])
|
|
29
|
+
|
|
30
|
+
match coordinate_system:
|
|
31
|
+
case TriMeshTransformations.ORIGINAL:
|
|
32
|
+
return scene_or_mesh
|
|
33
|
+
case TriMeshTransformations.RIGHT_HANDED_Z_UP:
|
|
34
|
+
# Transform from Y-up (modeling software) to Z-up (scientific)
|
|
35
|
+
# This rotates the model so that:
|
|
36
|
+
# Old Y axis → New Z axis (pointing up)
|
|
37
|
+
# Old Z axis → New -Y axis
|
|
38
|
+
# Old X axis → Remains as X axis
|
|
39
|
+
transform = np.array([
|
|
40
|
+
[1, 0, 0, 0], # X → X
|
|
41
|
+
[0, 0, 1, 0], # Y → Z
|
|
42
|
+
[0, 1, 0, 0], # Z → -Y
|
|
43
|
+
[0, 0, 0, 1]
|
|
44
|
+
])
|
|
45
|
+
|
|
46
|
+
# Apply the coordinate transformation
|
|
47
|
+
if isinstance(scene_or_mesh, trimesh.Scene):
|
|
48
|
+
for geometry in scene_or_mesh.geometry.values():
|
|
49
|
+
geometry.apply_transform(transform)
|
|
50
|
+
else:
|
|
51
|
+
scene_or_mesh.apply_transform(transform)
|
|
52
|
+
case _:
|
|
53
|
+
raise ValueError(f"Invalid coordinate system: {coordinate_system}")
|
|
135
54
|
|
|
136
|
-
|
|
137
|
-
if hasattr(geom.visual, 'uv') and geom.visual.uv is not None:
|
|
138
|
-
vertex_attr = pandas.DataFrame(
|
|
139
|
-
geom.visual.uv,
|
|
140
|
-
columns=['u', 'v']
|
|
141
|
-
)
|
|
142
|
-
all_vertex_attr.append(vertex_attr)
|
|
143
|
-
|
|
144
|
-
# Extract texture from material if it is only one geometry
|
|
145
|
-
if len(geometries) == 1:
|
|
146
|
-
texture = _extract_texture_from_material(geom)
|
|
147
|
-
|
|
148
|
-
# Create the combined UnstructuredData
|
|
149
|
-
unstruct = UnstructuredData.from_array(
|
|
150
|
-
vertex=np.vstack(all_vertex),
|
|
151
|
-
cells=np.vstack(all_cells),
|
|
152
|
-
vertex_attr=pandas.concat(all_vertex_attr, ignore_index=True) if len(all_vertex_attr) > 0 else None,
|
|
153
|
-
cells_attr=pandas.DataFrame(np.hstack(cell_attr), columns=["Geometry id"]),
|
|
154
|
-
xarray_attributes={
|
|
155
|
-
"bounds": scene_or_mesh.bounds.tolist(),
|
|
156
|
-
},
|
|
157
|
-
)
|
|
158
|
-
|
|
159
|
-
# If there is a texture
|
|
160
|
-
ts = TriSurf(
|
|
161
|
-
mesh=unstruct,
|
|
162
|
-
texture=texture,
|
|
163
|
-
)
|
|
164
|
-
|
|
165
|
-
return ts
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
def _extract_texture_from_material(geom):
|
|
169
|
-
from PIL.JpegImagePlugin import JpegImageFile
|
|
170
|
-
from PIL.PngImagePlugin import PngImageFile
|
|
171
|
-
import trimesh
|
|
172
|
-
|
|
173
|
-
array = np.empty(0)
|
|
174
|
-
if isinstance(geom.visual.material, trimesh.visual.material.SimpleMaterial):
|
|
175
|
-
image: JpegImageFile = geom.visual.material.image
|
|
176
|
-
if image is None:
|
|
177
|
-
return None
|
|
178
|
-
array = np.array(image)
|
|
179
|
-
elif isinstance(geom.visual.material, trimesh.visual.material.PBRMaterial):
|
|
180
|
-
image: PngImageFile = geom.visual.material.baseColorTexture
|
|
181
|
-
array = np.array(image.convert('RGBA'))
|
|
55
|
+
return scene_or_mesh
|
|
182
56
|
|
|
183
|
-
if image is None:
|
|
184
|
-
return None
|
|
185
|
-
else:
|
|
186
|
-
raise ValueError(f"Unsupported material type: {type(geom.visual.material)}")
|
|
187
57
|
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
texture = StructuredData.from_numpy(array)
|
|
191
|
-
return texture
|
|
58
|
+
def trimesh_to_unstruct(scene_or_mesh: Union["trimesh.Trimesh", "trimesh.Scene"]) -> TriSurf:
|
|
59
|
+
return TrimeshToSubsurface.trimesh_to_unstruct(scene_or_mesh)
|
|
192
60
|
|
|
193
61
|
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
62
|
+
class LoadWithTrimesh:
|
|
63
|
+
@classmethod
|
|
64
|
+
def load_with_trimesh(cls, path_to_file_or_buffer, file_type: Optional[str] = None, plot=False):
|
|
65
|
+
trimesh = optional_requirements.require_trimesh()
|
|
66
|
+
# Load the OBJ with Trimesh using the specified options
|
|
67
|
+
scene_or_mesh = trimesh.load(
|
|
68
|
+
file_obj=path_to_file_or_buffer,
|
|
69
|
+
file_type=file_type,
|
|
70
|
+
force="mesh"
|
|
71
|
+
)
|
|
72
|
+
# Process single mesh vs. scene
|
|
73
|
+
if isinstance(scene_or_mesh, trimesh.Scene):
|
|
74
|
+
print("Loaded a Scene with multiple geometries.")
|
|
75
|
+
cls._process_scene(scene_or_mesh)
|
|
76
|
+
if plot:
|
|
77
|
+
scene_or_mesh.show()
|
|
78
|
+
else:
|
|
79
|
+
print("Loaded a single Trimesh object.")
|
|
80
|
+
print(f" - Vertices: {len(scene_or_mesh.vertices)}")
|
|
81
|
+
print(f" - Faces: {len(scene_or_mesh.faces)}")
|
|
82
|
+
cls.handle_material_info(scene_or_mesh)
|
|
83
|
+
if plot:
|
|
84
|
+
scene_or_mesh.show()
|
|
85
|
+
|
|
86
|
+
return scene_or_mesh
|
|
87
|
+
|
|
88
|
+
@classmethod
|
|
89
|
+
def handle_material_info(cls, geometry):
|
|
90
|
+
"""
|
|
91
|
+
Handle and print material information for a single geometry,
|
|
92
|
+
explicitly injecting the PIL image if provided.
|
|
93
|
+
"""
|
|
94
|
+
if geometry.visual and hasattr(geometry.visual, 'material'):
|
|
95
|
+
material = geometry.visual.material
|
|
96
|
+
|
|
97
|
+
print("Trimesh material:", material)
|
|
98
|
+
|
|
99
|
+
# If there's already an image reference in the material, let the user know
|
|
100
|
+
if hasattr(material, 'image') and material.image is not None:
|
|
101
|
+
print(" -> Material already has an image:", material.image)
|
|
102
|
+
else:
|
|
103
|
+
print("No material found or no 'material' attribute on this geometry.")
|
|
104
|
+
|
|
105
|
+
@classmethod
|
|
106
|
+
def _process_scene(cls, scene):
|
|
107
|
+
"""Process a scene with multiple geometries."""
|
|
108
|
+
geometries = scene.geometry
|
|
109
|
+
assert len(geometries) > 0, "No geometries found in the scene."
|
|
110
|
+
|
|
111
|
+
print(f"Loaded a Scene with {len(scene.geometry)} geometry object(s).")
|
|
112
|
+
for geom_name, geom in geometries.items():
|
|
113
|
+
print(f" Submesh: {geom_name}")
|
|
114
|
+
print(f" - Vertices: {len(geom.vertices)}")
|
|
115
|
+
print(f" - Faces: {len(geom.faces)}")
|
|
116
|
+
|
|
117
|
+
print(f"Geometry '{geom_name}':")
|
|
118
|
+
cls.handle_material_info(geom)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
class TrimeshToSubsurface:
|
|
122
|
+
@classmethod
|
|
123
|
+
def trimesh_to_unstruct(cls, scene_or_mesh: Union["trimesh.Trimesh", "trimesh.Scene"]) -> TriSurf:
|
|
124
|
+
"""
|
|
125
|
+
Convert a Trimesh or Scene object to a subsurface TriSurf object.
|
|
126
|
+
|
|
127
|
+
This function takes either a `trimesh.Trimesh` object or a `trimesh.Scene`
|
|
128
|
+
object and converts it to a `subsurface.TriSurf` object. If the input is
|
|
129
|
+
a scene containing multiple geometries, it processes all geometries and
|
|
130
|
+
combines them into a single TriSurf object. If the input is a single
|
|
131
|
+
Trimesh object, it directly converts it to a TriSurf object. An error
|
|
132
|
+
is raised if the input is neither a `trimesh.Trimesh` nor a `trimesh.Scene`
|
|
133
|
+
object.
|
|
134
|
+
|
|
135
|
+
Parameters:
|
|
136
|
+
scene_or_mesh (Union[trimesh.Trimesh, trimesh.Scene]):
|
|
137
|
+
Input geometry data, either as a Trimesh object representing
|
|
138
|
+
a single mesh or a Scene object containing multiple geometries.
|
|
139
|
+
|
|
140
|
+
Note:
|
|
141
|
+
! Multimesh with multiple materials will read the uvs but not the textures since in that case is better
|
|
142
|
+
! to read directly the multiple images (compressed) whenever the user wants to work with them.
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
subsurface.TriSurf: Converted subsurface representation of the
|
|
146
|
+
provided geometry data.
|
|
147
|
+
|
|
148
|
+
Raises:
|
|
149
|
+
ValueError: If the input is neither a `trimesh.Trimesh` object nor
|
|
150
|
+
a `trimesh.Scene` object.
|
|
151
|
+
"""
|
|
152
|
+
trimesh = optional_requirements.require_trimesh()
|
|
153
|
+
if isinstance(scene_or_mesh, trimesh.Scene):
|
|
154
|
+
# Process scene with multiple geometries
|
|
155
|
+
ts = cls._trisurf_from_scene(scene_or_mesh, trimesh)
|
|
156
|
+
|
|
157
|
+
elif isinstance(scene_or_mesh, trimesh.Trimesh):
|
|
158
|
+
ts = cls._trisurf_from_trimesh(scene_or_mesh)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
else:
|
|
162
|
+
raise ValueError("Input must be a Trimesh object or a Scene with multiple geometries.")
|
|
163
|
+
|
|
164
|
+
return ts
|
|
165
|
+
|
|
166
|
+
@classmethod
|
|
167
|
+
def _trisurf_from_trimesh(cls, scene_or_mesh):
|
|
168
|
+
# Process single mesh
|
|
169
|
+
tri = scene_or_mesh
|
|
170
|
+
pandas = optional_requirements.require_pandas()
|
|
171
|
+
frame = pandas.DataFrame(tri.face_attributes)
|
|
172
|
+
# Check frame has a valid shape for cells_attr if not make None
|
|
173
|
+
if frame.shape[0] != tri.faces.shape[0]:
|
|
174
|
+
frame = None
|
|
175
|
+
# Get UV coordinates if they exist
|
|
176
|
+
vertex_attr = None
|
|
177
|
+
if hasattr(tri.visual, 'uv') and tri.visual.uv is not None:
|
|
178
|
+
vertex_attr = pandas.DataFrame(
|
|
179
|
+
tri.visual.uv,
|
|
180
|
+
columns=['u', 'v']
|
|
181
|
+
)
|
|
182
|
+
unstruct = UnstructuredData.from_array(
|
|
183
|
+
np.array(tri.vertices),
|
|
184
|
+
np.array(tri.faces),
|
|
185
|
+
cells_attr=frame,
|
|
186
|
+
vertex_attr=vertex_attr,
|
|
187
|
+
xarray_attributes={
|
|
188
|
+
"bounds": tri.bounds.tolist(),
|
|
189
|
+
},
|
|
190
|
+
)
|
|
198
191
|
|
|
192
|
+
texture = cls._extract_texture_from_material(tri)
|
|
199
193
|
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
194
|
+
ts = TriSurf(
|
|
195
|
+
mesh=unstruct,
|
|
196
|
+
texture=texture,
|
|
197
|
+
)
|
|
198
|
+
return ts
|
|
199
|
+
|
|
200
|
+
@classmethod
|
|
201
|
+
def _trisurf_from_scene(cls, scene_or_mesh: 'Scene', trimesh: 'trimesh') -> TriSurf:
|
|
202
|
+
pandas = optional_requirements.require_pandas()
|
|
203
|
+
geometries = scene_or_mesh.geometry
|
|
204
|
+
assert len(geometries) > 0, "No geometries found in the scene."
|
|
205
|
+
all_vertex = []
|
|
206
|
+
all_cells = []
|
|
207
|
+
cell_attr = []
|
|
208
|
+
all_vertex_attr = []
|
|
209
|
+
_last_cell = 0
|
|
210
|
+
texture = None
|
|
211
|
+
for i, (geom_name, geom) in enumerate(geometries.items()):
|
|
212
|
+
geom: trimesh.Trimesh
|
|
213
|
+
LoadWithTrimesh.handle_material_info(geom)
|
|
214
|
+
|
|
215
|
+
# Append vertices
|
|
216
|
+
all_vertex.append(np.array(geom.vertices))
|
|
217
|
+
|
|
218
|
+
# Adjust cell indices and append
|
|
219
|
+
cells = np.array(geom.faces)
|
|
220
|
+
if len(all_cells) > 0:
|
|
221
|
+
cells = cells + _last_cell
|
|
222
|
+
all_cells.append(cells)
|
|
223
|
+
|
|
224
|
+
# Create attribute array for this geometry
|
|
225
|
+
cell_attr.append(np.ones(len(cells)) * i)
|
|
226
|
+
|
|
227
|
+
_last_cell = cells.max() + 1
|
|
228
|
+
|
|
229
|
+
# Get UV coordinates if they exist
|
|
230
|
+
if hasattr(geom.visual, 'uv') and geom.visual.uv is not None:
|
|
231
|
+
vertex_attr = pandas.DataFrame(
|
|
232
|
+
geom.visual.uv,
|
|
233
|
+
columns=['u', 'v']
|
|
234
|
+
)
|
|
235
|
+
all_vertex_attr.append(vertex_attr)
|
|
236
|
+
|
|
237
|
+
# Extract texture from material if it is only one geometry
|
|
238
|
+
if len(geometries) == 1:
|
|
239
|
+
texture = cls._extract_texture_from_material(geom)
|
|
240
|
+
|
|
241
|
+
# Create the combined UnstructuredData
|
|
242
|
+
unstruct = UnstructuredData.from_array(
|
|
243
|
+
vertex=np.vstack(all_vertex),
|
|
244
|
+
cells=np.vstack(all_cells),
|
|
245
|
+
vertex_attr=pandas.concat(all_vertex_attr, ignore_index=True) if len(all_vertex_attr) > 0 else None,
|
|
246
|
+
cells_attr=pandas.DataFrame(np.hstack(cell_attr), columns=["Geometry id"]),
|
|
247
|
+
xarray_attributes={
|
|
248
|
+
"bounds": scene_or_mesh.bounds.tolist(),
|
|
249
|
+
},
|
|
250
|
+
)
|
|
207
251
|
|
|
208
|
-
|
|
252
|
+
# If there is a texture
|
|
253
|
+
ts = TriSurf(
|
|
254
|
+
mesh=unstruct,
|
|
255
|
+
texture=texture,
|
|
256
|
+
)
|
|
209
257
|
|
|
210
|
-
|
|
211
|
-
if hasattr(material, 'image') and material.image is not None:
|
|
212
|
-
print(" -> Material already has an image:", material.image)
|
|
213
|
-
else:
|
|
214
|
-
print("No material found or no 'material' attribute on this geometry.")
|
|
258
|
+
return ts
|
|
215
259
|
|
|
260
|
+
@classmethod
|
|
261
|
+
def _extract_texture_from_material(cls, geom):
|
|
262
|
+
from PIL.JpegImagePlugin import JpegImageFile
|
|
263
|
+
from PIL.PngImagePlugin import PngImageFile
|
|
264
|
+
import trimesh
|
|
216
265
|
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
geometries = scene.geometry
|
|
220
|
-
assert len(geometries) > 0, "No geometries found in the scene."
|
|
266
|
+
if geom.visual is None or getattr(geom.visual, 'material', None) is None:
|
|
267
|
+
return None
|
|
221
268
|
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
269
|
+
array = np.empty(0)
|
|
270
|
+
if isinstance(geom.visual.material, trimesh.visual.material.SimpleMaterial):
|
|
271
|
+
image: JpegImageFile = geom.visual.material.image
|
|
272
|
+
if image is None:
|
|
273
|
+
return None
|
|
274
|
+
array = np.array(image)
|
|
275
|
+
elif isinstance(geom.visual.material, trimesh.visual.material.PBRMaterial):
|
|
276
|
+
image: PngImageFile = geom.visual.material.baseColorTexture
|
|
277
|
+
array = np.array(image.convert('RGBA'))
|
|
278
|
+
|
|
279
|
+
if image is None:
|
|
280
|
+
return None
|
|
281
|
+
else:
|
|
282
|
+
raise ValueError(f"Unsupported material type: {type(geom.visual.material)}")
|
|
283
|
+
|
|
284
|
+
# Asser that image has 3 channels assert array.shape[2] == 3 from PIL.PngImagePlugin import PngImageFile
|
|
285
|
+
assert array.shape[2] == 3 or array.shape[2] == 4
|
|
286
|
+
texture = StructuredData.from_numpy(array)
|
|
287
|
+
return texture
|
|
288
|
+
|
|
289
|
+
@classmethod
|
|
290
|
+
def _validate_texture_path(cls, texture_path):
|
|
291
|
+
"""Validate the texture file path."""
|
|
292
|
+
if texture_path and not texture_path.lower().endswith(('.png', '.jpg', '.jpeg')):
|
|
293
|
+
raise ValueError("Texture path must be a PNG or JPEG file")
|
|
294
|
+
|
|
295
|
+
|
|
296
|
+
class TriMeshReaderFromBlob:
|
|
297
|
+
@classmethod
|
|
298
|
+
def OBJ_stream_to_trisurf(cls, obj_stream: TextIO, mtl_stream: list[TextIO],
|
|
299
|
+
texture_stream: list[io.BytesIO], coord_system: TriMeshTransformations) -> TriSurf:
|
|
300
|
+
"""
|
|
301
|
+
Load an OBJ file from a stream and convert it to a TriSurf object.
|
|
302
|
+
|
|
303
|
+
Parameters:
|
|
304
|
+
obj_stream: TextIO containing the OBJ file data (text format)
|
|
305
|
+
mtl_stream: TextIO containing the MTL file data (text format)
|
|
306
|
+
texture_stream: BytesIO containing the texture file data (binary format)
|
|
307
|
+
|
|
308
|
+
Returns:
|
|
309
|
+
TriSurf: The loaded mesh with textures if available
|
|
310
|
+
"""
|
|
311
|
+
trimesh = optional_requirements.require_trimesh()
|
|
312
|
+
import tempfile
|
|
313
|
+
|
|
314
|
+
path_in = "file.obj"
|
|
315
|
+
|
|
316
|
+
# Create a temporary directory to store associated files
|
|
317
|
+
with tempfile.TemporaryDirectory() as temp_dir:
|
|
318
|
+
# Write the OBJ content to a temp file
|
|
319
|
+
obj_path = os.path.join(temp_dir, os.path.basename(path_in))
|
|
320
|
+
with open(obj_path, 'w') as f: # Use text mode 'w' for text files
|
|
321
|
+
obj_stream.seek(0)
|
|
322
|
+
f.write(obj_stream.read())
|
|
323
|
+
obj_stream.seek(0)
|
|
324
|
+
|
|
325
|
+
if mtl_stream is not None:
|
|
326
|
+
cls.write_material_files(
|
|
327
|
+
mtl_streams=mtl_stream,
|
|
328
|
+
obj_stream=obj_stream,
|
|
329
|
+
temp_dir=temp_dir,
|
|
330
|
+
texture_streams=texture_stream
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
# Now load the OBJ with all associated files available
|
|
334
|
+
scene_or_mesh = load_with_trimesh(
|
|
335
|
+
path_to_file_or_buffer=obj_path,
|
|
336
|
+
file_type="obj",
|
|
337
|
+
coordinate_system=coord_system
|
|
338
|
+
)
|
|
227
339
|
|
|
228
|
-
|
|
229
|
-
|
|
340
|
+
# Convert to a TriSurf object
|
|
341
|
+
tri_surf = TrimeshToSubsurface.trimesh_to_unstruct(scene_or_mesh)
|
|
342
|
+
|
|
343
|
+
return tri_surf
|
|
344
|
+
|
|
345
|
+
@classmethod
|
|
346
|
+
def write_material_files(cls, mtl_streams: list[TextIO], obj_stream: TextIO, temp_dir, texture_streams: list[io.BytesIO]):
|
|
347
|
+
# Extract mtl references from the OBJ file
|
|
348
|
+
mtl_files = cls._extract_mtl_references(obj_stream)
|
|
349
|
+
# Download and save MTL files
|
|
350
|
+
for e, mtl_file in enumerate(mtl_files):
|
|
351
|
+
mtl_path = f"{temp_dir}/{mtl_file}" if temp_dir else mtl_file
|
|
352
|
+
mtl_stream = mtl_streams[e] if mtl_streams else None
|
|
353
|
+
try:
|
|
354
|
+
# Save the MTL file to temp directory
|
|
355
|
+
mtl_temp_path = os.path.join(temp_dir, mtl_file)
|
|
356
|
+
with open(mtl_temp_path, 'w') as f: # Use text mode 'w' for text files
|
|
357
|
+
mtl_stream.seek(0)
|
|
358
|
+
f.write(mtl_stream.read())
|
|
359
|
+
|
|
360
|
+
# Extract texture references from MTL
|
|
361
|
+
mtl_stream.seek(0)
|
|
362
|
+
texture_files = cls._extract_texture_references(mtl_stream)
|
|
363
|
+
|
|
364
|
+
if texture_streams is None:
|
|
365
|
+
continue
|
|
366
|
+
|
|
367
|
+
# Download texture files
|
|
368
|
+
for ee, texture_file in enumerate(texture_files):
|
|
369
|
+
texture_path = f"{temp_dir}/{texture_file}" if temp_dir else texture_file
|
|
370
|
+
texture_stream = texture_streams[ee] if texture_streams else None
|
|
371
|
+
try:
|
|
372
|
+
# Save the texture file to temp directory
|
|
373
|
+
with open(os.path.join(temp_dir, texture_file), 'wb') as f: # Binary mode for textures
|
|
374
|
+
texture_stream.seek(0)
|
|
375
|
+
f.write(texture_stream.read())
|
|
376
|
+
except Exception as e:
|
|
377
|
+
print(f"Failed to load texture {texture_file}: {e}")
|
|
378
|
+
except Exception as e:
|
|
379
|
+
print(f"Failed to load MTL file {mtl_file}: {e}")
|
|
380
|
+
|
|
381
|
+
@classmethod
|
|
382
|
+
def _extract_mtl_references(cls, obj_stream):
|
|
383
|
+
"""Extract MTL file references from an OBJ file."""
|
|
384
|
+
obj_stream.seek(0)
|
|
385
|
+
mtl_files = []
|
|
386
|
+
|
|
387
|
+
# TextIO stream already contains decoded text, so no need to decode
|
|
388
|
+
obj_text = obj_stream.read()
|
|
389
|
+
obj_stream.seek(0)
|
|
390
|
+
|
|
391
|
+
for line in obj_text.splitlines():
|
|
392
|
+
if line.startswith('mtllib '):
|
|
393
|
+
mtl_name = line.split(None, 1)[1].strip()
|
|
394
|
+
mtl_files.append(mtl_name)
|
|
395
|
+
|
|
396
|
+
return mtl_files
|
|
397
|
+
|
|
398
|
+
@classmethod
|
|
399
|
+
def _extract_texture_references(cls, mtl_stream):
|
|
400
|
+
"""
|
|
401
|
+
Extract texture file references from an MTL file.
|
|
402
|
+
Works with both TextIO and BytesIO streams.
|
|
403
|
+
|
|
404
|
+
Parameters:
|
|
405
|
+
mtl_stream: TextIO or BytesIO containing the MTL file data
|
|
406
|
+
|
|
407
|
+
Returns:
|
|
408
|
+
list[str]: List of texture file names referenced in the MTL
|
|
409
|
+
"""
|
|
410
|
+
mtl_stream.seek(0)
|
|
411
|
+
texture_files = []
|
|
412
|
+
|
|
413
|
+
# Handle both TextIO and BytesIO
|
|
414
|
+
if isinstance(mtl_stream, io.TextIOWrapper):
|
|
415
|
+
# TextIO stream already contains decoded text
|
|
416
|
+
mtl_text = mtl_stream.read()
|
|
417
|
+
else:
|
|
418
|
+
# BytesIO stream needs to be decoded
|
|
419
|
+
mtl_text = mtl_stream.read().decode('utf-8', errors='replace')
|
|
420
|
+
|
|
421
|
+
mtl_stream.seek(0)
|
|
422
|
+
|
|
423
|
+
for line in mtl_text.splitlines():
|
|
424
|
+
# Check for texture map definitions
|
|
425
|
+
for prefix in ['map_Kd ', 'map_Ka ', 'map_Ks ', 'map_Bump ', 'map_d ']:
|
|
426
|
+
if line.startswith(prefix):
|
|
427
|
+
parts = line.split(None, 1)
|
|
428
|
+
if len(parts) > 1:
|
|
429
|
+
texture_name = parts[1].strip()
|
|
430
|
+
texture_files.append(texture_name)
|
|
431
|
+
break
|
|
432
|
+
|
|
433
|
+
return texture_files
|
|
@@ -1,8 +1,11 @@
|
|
|
1
|
-
import
|
|
2
|
-
from
|
|
1
|
+
import io
|
|
2
|
+
from typing import Union
|
|
3
3
|
|
|
4
|
+
from ....core.structs import TriSurf
|
|
5
|
+
from ._trimesh_reader import load_with_trimesh, trimesh_to_unstruct, TriMeshTransformations
|
|
4
6
|
|
|
5
|
-
|
|
7
|
+
|
|
8
|
+
def load_gltf_with_trimesh(path_to_glb: Union[str | io.BytesIO], coordinate_system: TriMeshTransformations) -> TriSurf:
|
|
6
9
|
"""
|
|
7
10
|
load_obj_with_trimesh(path_to_glb, plot=False)
|
|
8
11
|
|
|
@@ -22,6 +25,6 @@ def load_glb_with_trimesh(path_to_glb: str, plot: bool = False) -> subsurface.Tr
|
|
|
22
25
|
subsurface.TriSurf
|
|
23
26
|
A TriSurf object representing the processed 3D surface geometry.
|
|
24
27
|
"""
|
|
25
|
-
trimesh =
|
|
28
|
+
trimesh = load_with_trimesh(path_to_glb, file_type="glb", coordinate_system=coordinate_system, plot=False)
|
|
26
29
|
trisurf = trimesh_to_unstruct(trimesh)
|
|
27
30
|
return trisurf
|
|
@@ -157,7 +157,7 @@ def _process_mesh(mesh_lines) -> Optional[GOCADMesh]:
|
|
|
157
157
|
continue
|
|
158
158
|
|
|
159
159
|
if in_tface:
|
|
160
|
-
if line.startswith('VRTX'):
|
|
160
|
+
if line.startswith('VRTX') or line.startswith('PVRTX'):
|
|
161
161
|
# Parse vertex line
|
|
162
162
|
parts = line.split()
|
|
163
163
|
if len(parts) >= 5:
|
|
@@ -167,6 +167,7 @@ def _process_mesh(mesh_lines) -> Optional[GOCADMesh]:
|
|
|
167
167
|
vertex_indices.append(vid)
|
|
168
168
|
vertex_list.append([x, y, z])
|
|
169
169
|
vid_to_index[vid] = len(vertex_list) - 1
|
|
170
|
+
# If PVRTX then there could be more columns with property values. For now, we are just parsing the vertex coordinates.
|
|
170
171
|
continue
|
|
171
172
|
elif line.startswith('ATOM'):
|
|
172
173
|
# Parse ATOM line
|
|
@@ -1,10 +1,24 @@
|
|
|
1
|
-
from typing import Union
|
|
1
|
+
from typing import Union, TextIO
|
|
2
|
+
import io
|
|
2
3
|
|
|
3
|
-
import
|
|
4
|
-
from
|
|
4
|
+
from ._trimesh_reader import load_with_trimesh, trimesh_to_unstruct, TriMeshReaderFromBlob, TriMeshTransformations
|
|
5
|
+
from ....core.structs import TriSurf
|
|
5
6
|
|
|
6
7
|
|
|
7
|
-
|
|
8
|
+
|
|
9
|
+
def load_obj_with_trimesh_from_binary(obj_stream: TextIO, mtl_stream: list[TextIO],
|
|
10
|
+
texture_stream: list[io.BytesIO], coord_system: TriMeshTransformations) -> TriSurf:
|
|
11
|
+
tri_surf: TriSurf = TriMeshReaderFromBlob.OBJ_stream_to_trisurf(
|
|
12
|
+
obj_stream=obj_stream,
|
|
13
|
+
mtl_stream=mtl_stream,
|
|
14
|
+
texture_stream=texture_stream,
|
|
15
|
+
coord_system=coord_system
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
return tri_surf
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def load_obj_with_trimesh(path_to_obj: str, plot: bool = False) -> TriSurf:
|
|
8
22
|
"""
|
|
9
23
|
Load and process an OBJ file, returning trimesh-compatible objects.
|
|
10
24
|
|
|
@@ -34,6 +48,6 @@ def load_obj_with_trimesh(path_to_obj: str, plot: bool = False) -> subsurface.Tr
|
|
|
34
48
|
`ValueError`: If the OBJ file could not be properly processed.
|
|
35
49
|
|
|
36
50
|
"""
|
|
37
|
-
trimesh =
|
|
51
|
+
trimesh = load_with_trimesh(path_to_obj, file_type="obj", plot=plot)
|
|
38
52
|
trisurf = trimesh_to_unstruct(trimesh)
|
|
39
53
|
return trisurf
|
|
@@ -7,6 +7,7 @@ from subsurface.core.structs import StructuredData
|
|
|
7
7
|
from .... import optional_requirements
|
|
8
8
|
from ....core.structs import UnstructuredData
|
|
9
9
|
from subsurface.core.reader_helpers.readers_data import GenericReaderFilesHelper
|
|
10
|
+
import numpy as np
|
|
10
11
|
import pandas as pd
|
|
11
12
|
|
|
12
13
|
|
|
@@ -32,7 +33,7 @@ def read_VTK_structured_grid(file_or_buffer: Union[str, BytesIO], active_scalars
|
|
|
32
33
|
# If it's a file path, read directly
|
|
33
34
|
pyvista_obj = pv.read(file_or_buffer)
|
|
34
35
|
try:
|
|
35
|
-
pyvista_struct: pv.ExplicitStructuredGrid = pyvista_obj
|
|
36
|
+
pyvista_struct: pv.ExplicitStructuredGrid = pv_cast_to_explicit_structured_grid(pyvista_obj)
|
|
36
37
|
except Exception as e:
|
|
37
38
|
raise f"The file is not a structured grid: {e}"
|
|
38
39
|
|
|
@@ -99,3 +100,71 @@ def read_volumetric_mesh_attr_file(reader_helper: GenericReaderFilesHelper) -> p
|
|
|
99
100
|
df = pd.read_table(reader_helper.file_or_buffer, **reader_helper.pandas_reader_kwargs)
|
|
100
101
|
df.columns = df.columns.astype(str).str.strip()
|
|
101
102
|
return df
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def pv_cast_to_explicit_structured_grid(pyvista_object):
|
|
106
|
+
|
|
107
|
+
pv = optional_requirements.require_pyvista()
|
|
108
|
+
|
|
109
|
+
match pyvista_object:
|
|
110
|
+
|
|
111
|
+
case pv.RectilinearGrid() as rectl_grid:
|
|
112
|
+
|
|
113
|
+
return __pv_convert_rectilinear_to_explicit(rectl_grid)
|
|
114
|
+
|
|
115
|
+
case _:
|
|
116
|
+
|
|
117
|
+
return pyvista_object.cast_to_explicit_structured_grid()
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def __pv_convert_rectilinear_to_explicit(rectl_grid):
|
|
121
|
+
|
|
122
|
+
pv = optional_requirements.require_pyvista()
|
|
123
|
+
|
|
124
|
+
# Extract the coordinate arrays from the input RectilinearGrid.
|
|
125
|
+
x = np.asarray(rectl_grid.x)
|
|
126
|
+
y = np.asarray(rectl_grid.y)
|
|
127
|
+
z = np.asarray(rectl_grid.z)
|
|
128
|
+
|
|
129
|
+
# Helper function: "double" the coordinates to produce an expanded set
|
|
130
|
+
# that, when processed internally via np.unique, returns the original nodal values.
|
|
131
|
+
def doubled_coords(arr):
|
|
132
|
+
return np.repeat(arr, 2)[1:-1]
|
|
133
|
+
|
|
134
|
+
# Double the coordinate arrays.
|
|
135
|
+
xcorn = doubled_coords(x)
|
|
136
|
+
ycorn = doubled_coords(y)
|
|
137
|
+
zcorn = doubled_coords(z)
|
|
138
|
+
|
|
139
|
+
# Build a complete grid of corner points via meshgrid. Fortran ('F') order ensures
|
|
140
|
+
# the connectivity ordering aligns with VTK's expectations.
|
|
141
|
+
xx, yy, zz = np.meshgrid(xcorn, ycorn, zcorn, indexing='ij')
|
|
142
|
+
corners = np.column_stack((xx.ravel(order='F'),
|
|
143
|
+
yy.ravel(order='F'),
|
|
144
|
+
zz.ravel(order='F')))
|
|
145
|
+
|
|
146
|
+
# The dimensions to pass to the ExplicitStructuredGrid constructor should be
|
|
147
|
+
# the counts of unique coordinates in each direction.
|
|
148
|
+
dims = (len(np.unique(xcorn)),
|
|
149
|
+
len(np.unique(ycorn)),
|
|
150
|
+
len(np.unique(zcorn)))
|
|
151
|
+
|
|
152
|
+
# Create the ExplicitStructuredGrid.
|
|
153
|
+
explicit_grid = pv.ExplicitStructuredGrid(dims, corners)
|
|
154
|
+
explicit_grid.compute_connectivity()
|
|
155
|
+
|
|
156
|
+
# --- Copy associated data arrays ---
|
|
157
|
+
|
|
158
|
+
# Transfer all cell data arrays.
|
|
159
|
+
for name, array in rectl_grid.cell_data.items():
|
|
160
|
+
explicit_grid.cell_data[name] = array.copy()
|
|
161
|
+
|
|
162
|
+
# Transfer all point data arrays.
|
|
163
|
+
for name, array in rectl_grid.point_data.items():
|
|
164
|
+
explicit_grid.point_data[name] = array.copy()
|
|
165
|
+
|
|
166
|
+
# (Optional) Transfer field data as well.
|
|
167
|
+
for name, array in rectl_grid.field_data.items():
|
|
168
|
+
explicit_grid.field_data[name] = array.copy()
|
|
169
|
+
|
|
170
|
+
return explicit_grid
|
|
@@ -9,7 +9,6 @@ from subsurface.modules.reader.wells.wells_utils import add_tops_from_base_and_a
|
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
def read_collar(reader_helper: GenericReaderFilesHelper) -> pd.DataFrame:
|
|
12
|
-
if reader_helper.usecols is None: reader_helper.usecols = [0, 1, 2, 3]
|
|
13
12
|
if reader_helper.index_col is False: reader_helper.index_col = 0
|
|
14
13
|
|
|
15
14
|
# Check file_or_buffer type
|
|
@@ -22,13 +21,16 @@ def read_collar(reader_helper: GenericReaderFilesHelper) -> pd.DataFrame:
|
|
|
22
21
|
return data_df
|
|
23
22
|
|
|
24
23
|
|
|
25
|
-
def read_survey(reader_helper: GenericReaderFilesHelper):
|
|
24
|
+
def read_survey(reader_helper: GenericReaderFilesHelper, validate_survey: bool = True) -> pd.DataFrame:
|
|
26
25
|
if reader_helper.index_col is False: reader_helper.index_col = 0
|
|
27
26
|
|
|
28
27
|
d = check_format_and_read_to_df(reader_helper)
|
|
29
28
|
_map_rows_and_cols_inplace(d, reader_helper)
|
|
30
29
|
|
|
31
|
-
|
|
30
|
+
if validate_survey:
|
|
31
|
+
d_no_singles = _validate_survey_data(d)
|
|
32
|
+
else:
|
|
33
|
+
d_no_singles = d
|
|
32
34
|
|
|
33
35
|
return d_no_singles
|
|
34
36
|
|
|
@@ -37,12 +39,16 @@ def read_lith(reader_helper: GenericReaderFilesHelper) -> pd.DataFrame:
|
|
|
37
39
|
return read_attributes(reader_helper, is_lith=True)
|
|
38
40
|
|
|
39
41
|
|
|
40
|
-
def read_attributes(reader_helper: GenericReaderFilesHelper, is_lith: bool = False) -> pd.DataFrame:
|
|
41
|
-
if reader_helper.index_col is False:
|
|
42
|
-
|
|
42
|
+
def read_attributes(reader_helper: GenericReaderFilesHelper, is_lith: bool = False, validate_attr: bool = True) -> pd.DataFrame:
|
|
43
|
+
if reader_helper.index_col is False:
|
|
44
|
+
reader_helper.index_col = 0
|
|
45
|
+
|
|
43
46
|
d = check_format_and_read_to_df(reader_helper)
|
|
44
47
|
|
|
45
48
|
_map_rows_and_cols_inplace(d, reader_helper)
|
|
49
|
+
if validate_attr is False:
|
|
50
|
+
return d
|
|
51
|
+
|
|
46
52
|
if is_lith:
|
|
47
53
|
d = _validate_lith_data(d, reader_helper)
|
|
48
54
|
else:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: subsurface_terra
|
|
3
|
-
Version: 2025.1.
|
|
3
|
+
Version: 2025.1.0rc10
|
|
4
4
|
Summary: Subsurface data types and utilities. This version is the one used by Terranigma Solutions. Please feel free to take anything in this repository for the original one.
|
|
5
5
|
Home-page: https://softwareunderground.github.io/subsurface
|
|
6
6
|
Author: Software Underground
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
subsurface/__init__.py,sha256=0D2rCUem3fiHsXFXXSmwheLiPS4cXxEdfWdFBj0b-cY,930
|
|
2
|
-
subsurface/_version.py,sha256=
|
|
2
|
+
subsurface/_version.py,sha256=FcQPFS21SJ4lyiZc0tXWa590ajfkakU3g4UuBjnhZoI,542
|
|
3
3
|
subsurface/optional_requirements.py,sha256=Wg36RqxzPiLtN-3qSg5K9QVEeXCB0-EjSzHERAoO8EE,2883
|
|
4
|
-
subsurface/api/__init__.py,sha256=
|
|
4
|
+
subsurface/api/__init__.py,sha256=UiOBKQcZJGMeh_5ZNhXqT2iEdiIk721djLX30aFxEa4,341
|
|
5
5
|
subsurface/api/interfaces/__init__.py,sha256=rqUtJyMLicobcyhmr74TepjmUQAEmlazKT3vjV_n3aA,6
|
|
6
|
-
subsurface/api/interfaces/stream.py,sha256=
|
|
6
|
+
subsurface/api/interfaces/stream.py,sha256=i-ieGJbf5a2dskmiOezHMifPoUXf4VQESqEIChHWuoM,4621
|
|
7
7
|
subsurface/api/reader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
8
|
subsurface/api/reader/read_wells.py,sha256=bu6fAdj8WTMPWYVZwBdn7vNt00vfdJ3nqJsZ4o1V25U,2731
|
|
9
9
|
subsurface/core/__init__.py,sha256=73SzsVXHiiWt_215bgIqWWy2t0XgvyOxDPxuq0NyktM,32
|
|
@@ -37,7 +37,7 @@ subsurface/core/structs/unstructured_elements/triangular_surface.py,sha256=58Cpu
|
|
|
37
37
|
subsurface/core/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
38
38
|
subsurface/core/utils/utils_core.py,sha256=lRzLlE11JCH-GCjhbIoKZ-WfARSmI_yhmJkqLMMNmTo,1110
|
|
39
39
|
subsurface/modules/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
40
|
-
subsurface/modules/reader/__init__.py,sha256=
|
|
40
|
+
subsurface/modules/reader/__init__.py,sha256=bD5l951Zzw6xDuzyMSKKqiX5SBpm85iv4QJPwqIbdbQ,483
|
|
41
41
|
subsurface/modules/reader/from_binary.py,sha256=AtBzLCJJRZ0exgfUfv1Rfp1tVMUCfSLuAwTCb_Ieqfs,1341
|
|
42
42
|
subsurface/modules/reader/read_netcdf.py,sha256=zAqRhckeGjlh95Ar-IPbSlUPbbLVignXu-DC9FY7XUk,1019
|
|
43
43
|
subsurface/modules/reader/faults/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -45,12 +45,12 @@ subsurface/modules/reader/faults/faults.py,sha256=s144Gq0tFvjSUkB66dB86kYjW2l2e1
|
|
|
45
45
|
subsurface/modules/reader/geo_object/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
46
46
|
subsurface/modules/reader/mesh/_GOCAD_mesh.py,sha256=_MwNy4iVMGnemvT01hhB-nbCc8a1r8ETEAkD-AWcBz4,3304
|
|
47
47
|
subsurface/modules/reader/mesh/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
48
|
-
subsurface/modules/reader/mesh/_trimesh_reader.py,sha256
|
|
48
|
+
subsurface/modules/reader/mesh/_trimesh_reader.py,sha256=-cmm-BYIUC9x6tqABNgKzNMSrDfuoTdZ7YpfB1nNPh0,17260
|
|
49
49
|
subsurface/modules/reader/mesh/csv_mesh_reader.py,sha256=0iXYg-JOLUg7yH6Rw6qCoxXvKh0hOUTwjYxbhSlGfGM,1969
|
|
50
50
|
subsurface/modules/reader/mesh/dxf_reader.py,sha256=JDhzFRE46sdwMGBB8enHNluH07ohqt6LhgLHiSQRL-I,6525
|
|
51
|
-
subsurface/modules/reader/mesh/glb_reader.py,sha256=
|
|
52
|
-
subsurface/modules/reader/mesh/mx_reader.py,sha256=
|
|
53
|
-
subsurface/modules/reader/mesh/obj_reader.py,sha256=
|
|
51
|
+
subsurface/modules/reader/mesh/glb_reader.py,sha256=dierR9AYM5Q2szLuemfLlM_JcPRNtDrD5fpF8zNjBS8,1118
|
|
52
|
+
subsurface/modules/reader/mesh/mx_reader.py,sha256=YQqvOJ4FuPrlz_3bHIdqT-9YKkirF_Vbjnv6cVQnwAw,8587
|
|
53
|
+
subsurface/modules/reader/mesh/obj_reader.py,sha256=LXf-N-So5xWhnZ6uHJPjcCfQM71a_mqJa3hQEikOGzU,2207
|
|
54
54
|
subsurface/modules/reader/mesh/omf_mesh_reader.py,sha256=0gewosxlLVTQQoUyvYB91AOZ_SByQqxw53coSwCKeMI,1436
|
|
55
55
|
subsurface/modules/reader/mesh/surface_reader.py,sha256=EcRjr3sAJbwZpqm7WHHe1bnMZyGO5MSgF6qm5bSBLtQ,2420
|
|
56
56
|
subsurface/modules/reader/mesh/surfaces_api.py,sha256=3mwB0aU2FNNhIXD_yMyjBR7k2YW-NTXKkbXMg5UfGy0,1557
|
|
@@ -60,13 +60,13 @@ subsurface/modules/reader/profiles/profiles_core.py,sha256=kqlt79hjdWWQNBjWqLGlu
|
|
|
60
60
|
subsurface/modules/reader/topography/__init__.py,sha256=zkaTX5JxsNfjF-dFeEbHfUB58vhPMjm6Iiqx9HgJOrY,14
|
|
61
61
|
subsurface/modules/reader/topography/topo_core.py,sha256=6rkDp9XrUSif8ZuraDrUK2I8-yqEp8CRm4r4l2lQuw0,3542
|
|
62
62
|
subsurface/modules/reader/volume/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
63
|
-
subsurface/modules/reader/volume/read_volume.py,sha256=
|
|
63
|
+
subsurface/modules/reader/volume/read_volume.py,sha256=nn0C047jsty_kusZZ-nfZ2bWbyfkaakLsDy_ADxb5sw,6368
|
|
64
64
|
subsurface/modules/reader/volume/segy_reader.py,sha256=oBS1FwwzFTMBmAR3odJMvW-as_0YMudPcFmndpcApW4,3958
|
|
65
65
|
subsurface/modules/reader/volume/seismic.py,sha256=dRA7YKw9fkrkAYS7Bnfm7GfCPdfxVsDyfM7frQK56V4,4950
|
|
66
66
|
subsurface/modules/reader/volume/volume_utils.py,sha256=7ToIdVwq04lMyYGJE9PzYVQt9xl9mjbXXrzvMfM6wGw,1367
|
|
67
67
|
subsurface/modules/reader/wells/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
68
68
|
subsurface/modules/reader/wells/_read_to_df.py,sha256=PFs5ottnm7LtzmJgPh8UN3cdR4j2y734RTDPoks16ls,2294
|
|
69
|
-
subsurface/modules/reader/wells/read_borehole_interface.py,sha256=
|
|
69
|
+
subsurface/modules/reader/wells/read_borehole_interface.py,sha256=iyoaee00vfSzqF4sWy9WA4a92VtEb16ERbrkdWgWszE,5548
|
|
70
70
|
subsurface/modules/reader/wells/wells_utils.py,sha256=CoVF9Qtba8Qu42JcVmtsyaSS_PA80lcJ6mzh6-TQt2Q,2206
|
|
71
71
|
subsurface/modules/reader/wells/DEP/__init__.py,sha256=8PES2m_HqZtZ-jFhPynJWpk8dovKAh1UyVnfAxg_NXY,1887
|
|
72
72
|
subsurface/modules/reader/wells/DEP/_well_files_reader.py,sha256=QYZ9p7iyGt1JDlkrFpYPFaMnXwLj7wdFZmTYiO2niSY,6216
|
|
@@ -86,8 +86,8 @@ subsurface/modules/writer/to_rex/material_encoder.py,sha256=zGlqF9X_Civ9VvtGwo-I
|
|
|
86
86
|
subsurface/modules/writer/to_rex/mesh_encoder.py,sha256=6TBtJhYJEAMEBHxQkbweXrJO1jIUx1ClM8l5ajVCrLc,6443
|
|
87
87
|
subsurface/modules/writer/to_rex/to_rex.py,sha256=njsm2d3e69pRVfF_TOC_hexvXPmgNTZdJvhbnXcvyIo,3800
|
|
88
88
|
subsurface/modules/writer/to_rex/utils.py,sha256=HEpJ95LjHOK24ePpmLpPP5uFyv6i_kN3AWh031q-1Uc,379
|
|
89
|
-
subsurface_terra-2025.1.
|
|
90
|
-
subsurface_terra-2025.1.
|
|
91
|
-
subsurface_terra-2025.1.
|
|
92
|
-
subsurface_terra-2025.1.
|
|
93
|
-
subsurface_terra-2025.1.
|
|
89
|
+
subsurface_terra-2025.1.0rc10.dist-info/licenses/LICENSE,sha256=GSXh9K5TZauM89BeGbYg07oST_HMhOTiZoEGaUeKBtA,11606
|
|
90
|
+
subsurface_terra-2025.1.0rc10.dist-info/METADATA,sha256=jBJ-O1dvs0h0JLz8AC8pa_DvztAmPpJk5f_CivFG0n4,7094
|
|
91
|
+
subsurface_terra-2025.1.0rc10.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
|
92
|
+
subsurface_terra-2025.1.0rc10.dist-info/top_level.txt,sha256=f32R_tUSf83CfkpB4vjv5m2XcD8TmDX9h7F4rnEXt5A,11
|
|
93
|
+
subsurface_terra-2025.1.0rc10.dist-info/RECORD,,
|
|
File without changes
|
{subsurface_terra-2025.1.0rc7.dist-info → subsurface_terra-2025.1.0rc10.dist-info}/licenses/LICENSE
RENAMED
|
File without changes
|
{subsurface_terra-2025.1.0rc7.dist-info → subsurface_terra-2025.1.0rc10.dist-info}/top_level.txt
RENAMED
|
File without changes
|