subsurface-terra 2025.1.0rc10__py3-none-any.whl → 2025.1.0rc12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- subsurface/_version.py +2 -2
- subsurface/api/__init__.py +2 -1
- subsurface/api/interfaces/stream.py +7 -1
- subsurface/core/geological_formats/boreholes/survey.py +4 -2
- subsurface/core/structs/structured_elements/structured_grid.py +4 -0
- subsurface/modules/reader/__init__.py +2 -0
- subsurface/modules/reader/mesh/_trimesh_reader.py +26 -16
- subsurface/modules/reader/volume/read_grav3d.py +428 -0
- subsurface/modules/reader/volume/read_volume.py +66 -6
- subsurface/modules/reader/wells/read_borehole_interface.py +10 -0
- subsurface/modules/tools/__init__.py +0 -0
- subsurface/modules/tools/mocking_aux.py +104 -0
- {subsurface_terra-2025.1.0rc10.dist-info → subsurface_terra-2025.1.0rc12.dist-info}/METADATA +1 -1
- {subsurface_terra-2025.1.0rc10.dist-info → subsurface_terra-2025.1.0rc12.dist-info}/RECORD +17 -14
- {subsurface_terra-2025.1.0rc10.dist-info → subsurface_terra-2025.1.0rc12.dist-info}/WHEEL +1 -1
- {subsurface_terra-2025.1.0rc10.dist-info → subsurface_terra-2025.1.0rc12.dist-info}/licenses/LICENSE +0 -0
- {subsurface_terra-2025.1.0rc10.dist-info → subsurface_terra-2025.1.0rc12.dist-info}/top_level.txt +0 -0
subsurface/_version.py
CHANGED
|
@@ -17,5 +17,5 @@ __version__: str
|
|
|
17
17
|
__version_tuple__: VERSION_TUPLE
|
|
18
18
|
version_tuple: VERSION_TUPLE
|
|
19
19
|
|
|
20
|
-
__version__ = version = '2025.1.
|
|
21
|
-
__version_tuple__ = version_tuple = (2025, 1, 0)
|
|
20
|
+
__version__ = version = '2025.1.0rc12'
|
|
21
|
+
__version_tuple__ = version_tuple = (2025, 1, 0, 'rc12')
|
subsurface/api/__init__.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import io
|
|
2
2
|
from io import BytesIO
|
|
3
|
-
from typing import TextIO
|
|
3
|
+
from typing import TextIO, Optional
|
|
4
4
|
|
|
5
5
|
import pandas
|
|
6
6
|
|
|
@@ -57,11 +57,17 @@ def GLTF_stream_to_trisurf(gltf_stream: io.BytesIO, coordinate_system: TriMeshTr
|
|
|
57
57
|
tri_mesh: TriSurf = reader.load_gltf_with_trimesh(gltf_stream, coordinate_system)
|
|
58
58
|
return tri_mesh
|
|
59
59
|
|
|
60
|
+
|
|
60
61
|
def VTK_stream_to_struct(stream: BytesIO, attribute_name: str) -> list[StructuredData]:
|
|
61
62
|
struct = read_VTK_structured_grid(stream, attribute_name)
|
|
62
63
|
return [struct]
|
|
63
64
|
|
|
64
65
|
|
|
66
|
+
def MSH_stream_to_struct(grid_stream: TextIO, values_stream: TextIO, missing_value: Optional[float], attr_name: Optional[str]) -> list[StructuredData]:
|
|
67
|
+
struct = reader.read_msh_structured_grid(grid_stream, values_stream, missing_value, attr_name)
|
|
68
|
+
return [struct]
|
|
69
|
+
|
|
70
|
+
|
|
65
71
|
def CSV_wells_stream_to_unstruc(
|
|
66
72
|
collars_reader: GenericReaderFilesHelper,
|
|
67
73
|
surveys_reader: GenericReaderFilesHelper,
|
|
@@ -111,9 +111,11 @@ def _map_attrs_to_measured_depths(attrs: pd.DataFrame, survey: Survey) -> pd.Dat
|
|
|
111
111
|
|
|
112
112
|
# Start with a copy of the existing attributes DataFrame
|
|
113
113
|
new_attrs = survey.survey_trajectory.data.points_attributes.copy()
|
|
114
|
-
if 'component lith' in attrs.columns:
|
|
114
|
+
if 'component lith' in attrs.columns and 'lith_ids' not in attrs.columns:
|
|
115
115
|
# Factorize lith components directly in-place
|
|
116
116
|
attrs['lith_ids'], _ = pd.factorize(attrs['component lith'], use_na_sentinel=True)
|
|
117
|
+
else:
|
|
118
|
+
pass
|
|
117
119
|
|
|
118
120
|
# Add missing columns from attrs, preserving their dtypes
|
|
119
121
|
for col in attrs.columns.difference(new_attrs.columns):
|
|
@@ -152,7 +154,7 @@ def _map_attrs_to_measured_depths(attrs: pd.DataFrame, survey: Survey) -> pd.Dat
|
|
|
152
154
|
continue
|
|
153
155
|
attr_to_interpolate = attrs_well[col]
|
|
154
156
|
# make sure the attr_to_interpolate is not a string
|
|
155
|
-
if attr_to_interpolate.dtype == 'O':
|
|
157
|
+
if attr_to_interpolate.dtype == 'O' or isinstance(attr_to_interpolate.dtype, pd.CategoricalDtype):
|
|
156
158
|
continue
|
|
157
159
|
if col in ['lith_ids', 'component lith']:
|
|
158
160
|
interp_kind = 'nearest'
|
|
@@ -33,6 +33,10 @@ class StructuredGrid:
|
|
|
33
33
|
grid_3d = np.meshgrid(*cart_coord, indexing='ij')
|
|
34
34
|
return grid_3d
|
|
35
35
|
|
|
36
|
+
@property
|
|
37
|
+
def active_attributes(self) -> np.ndarray:
|
|
38
|
+
return self.ds.data[self.ds.active_data_array_name].values
|
|
39
|
+
|
|
36
40
|
def meshgrid_2d(self, attribute_name_coord_name: str = None) -> list:
|
|
37
41
|
"""
|
|
38
42
|
|
|
@@ -9,3 +9,5 @@ from .mesh.dxf_reader import dxf_stream_to_unstruct_input, dxf_file_to_unstruct_
|
|
|
9
9
|
from .mesh.mx_reader import mx_to_unstruc_from_binary
|
|
10
10
|
from .mesh.obj_reader import load_obj_with_trimesh, load_obj_with_trimesh_from_binary
|
|
11
11
|
from .mesh.glb_reader import load_gltf_with_trimesh
|
|
12
|
+
|
|
13
|
+
from .volume.read_grav3d import read_msh_structured_grid
|
|
@@ -9,9 +9,14 @@ from .... import optional_requirements
|
|
|
9
9
|
from ....core.structs import TriSurf, StructuredData
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
class TriMeshTransformations(enum.
|
|
13
|
-
|
|
14
|
-
|
|
12
|
+
class TriMeshTransformations(enum.Flag):
|
|
13
|
+
UP_Z = 2**1
|
|
14
|
+
UP_Y = 2**2
|
|
15
|
+
FORWARD_MINUS_Z = 2**3
|
|
16
|
+
FORWARD_PLUS_Z = 2**4
|
|
17
|
+
RIGHT_HANDED_Z_UP_Y_REVERSED = UP_Y | FORWARD_MINUS_Z
|
|
18
|
+
RIGHT_HANDED_Z_UP = UP_Y | FORWARD_PLUS_Z
|
|
19
|
+
ORIGINAL = UP_Z | FORWARD_MINUS_Z
|
|
15
20
|
|
|
16
21
|
|
|
17
22
|
def load_with_trimesh(path_to_file_or_buffer, file_type: Optional[str] = None,
|
|
@@ -23,13 +28,10 @@ def load_with_trimesh(path_to_file_or_buffer, file_type: Optional[str] = None,
|
|
|
23
28
|
trimesh = optional_requirements.require_trimesh()
|
|
24
29
|
scene_or_mesh = LoadWithTrimesh.load_with_trimesh(path_to_file_or_buffer, file_type, plot)
|
|
25
30
|
|
|
26
|
-
# Compute a -90° rotation around the X axis
|
|
27
|
-
angle_rad = np.deg2rad(-90)
|
|
28
|
-
transform = trimesh.transformations.rotation_matrix(angle_rad, [1, 0, 0])
|
|
29
|
-
|
|
30
31
|
match coordinate_system:
|
|
31
32
|
case TriMeshTransformations.ORIGINAL:
|
|
32
33
|
return scene_or_mesh
|
|
34
|
+
# * Forward -Z up Y
|
|
33
35
|
case TriMeshTransformations.RIGHT_HANDED_Z_UP:
|
|
34
36
|
# Transform from Y-up (modeling software) to Z-up (scientific)
|
|
35
37
|
# This rotates the model so that:
|
|
@@ -37,21 +39,29 @@ def load_with_trimesh(path_to_file_or_buffer, file_type: Optional[str] = None,
|
|
|
37
39
|
# Old Z axis → New -Y axis
|
|
38
40
|
# Old X axis → Remains as X axis
|
|
39
41
|
transform = np.array([
|
|
40
|
-
[1, 0, 0, 0],
|
|
41
|
-
[0, 0, 1, 0],
|
|
42
|
-
[0, 1, 0, 0],
|
|
42
|
+
[1, 0, 0, 0],
|
|
43
|
+
[0, 0, -1, 0],
|
|
44
|
+
[0, 1, 0, 0],
|
|
43
45
|
[0, 0, 0, 1]
|
|
44
46
|
])
|
|
45
|
-
|
|
47
|
+
case TriMeshTransformations.RIGHT_HANDED_Z_UP_Y_REVERSED:
|
|
48
|
+
# * Forward Z Up Y
|
|
49
|
+
transform=np.array([
|
|
50
|
+
[1, 0, 0, 0],
|
|
51
|
+
[0, 0, 1, 0],
|
|
52
|
+
[0, 1, 0, 0],
|
|
53
|
+
[0, 0, 0, 1],
|
|
54
|
+
])
|
|
46
55
|
# Apply the coordinate transformation
|
|
47
|
-
|
|
48
|
-
for geometry in scene_or_mesh.geometry.values():
|
|
49
|
-
geometry.apply_transform(transform)
|
|
50
|
-
else:
|
|
51
|
-
scene_or_mesh.apply_transform(transform)
|
|
56
|
+
# TODO: Add all the options of blender
|
|
52
57
|
case _:
|
|
53
58
|
raise ValueError(f"Invalid coordinate system: {coordinate_system}")
|
|
54
59
|
|
|
60
|
+
if isinstance(scene_or_mesh, trimesh.Scene):
|
|
61
|
+
for geometry in scene_or_mesh.geometry.values():
|
|
62
|
+
geometry.apply_transform(transform)
|
|
63
|
+
else:
|
|
64
|
+
scene_or_mesh.apply_transform(transform)
|
|
55
65
|
return scene_or_mesh
|
|
56
66
|
|
|
57
67
|
|
|
@@ -0,0 +1,428 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import List, Dict, Any, Tuple, Optional, Union, TextIO
|
|
3
|
+
import numpy as np
|
|
4
|
+
import xarray as xr
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
from ....core.structs import StructuredData
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass
|
|
11
|
+
class GridDimensions:
|
|
12
|
+
"""
|
|
13
|
+
Represents the dimensions of a 3D grid.
|
|
14
|
+
|
|
15
|
+
Attributes:
|
|
16
|
+
nx (int): Number of cells in the x-direction
|
|
17
|
+
ny (int): Number of cells in the y-direction
|
|
18
|
+
nz (int): Number of cells in the z-direction
|
|
19
|
+
"""
|
|
20
|
+
nx: int
|
|
21
|
+
ny: int
|
|
22
|
+
nz: int
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass
|
|
26
|
+
class GridOrigin:
|
|
27
|
+
"""
|
|
28
|
+
Represents the origin point of a 3D grid.
|
|
29
|
+
|
|
30
|
+
Attributes:
|
|
31
|
+
x (float): X-coordinate of the origin
|
|
32
|
+
y (float): Y-coordinate of the origin
|
|
33
|
+
z (float): Z-coordinate of the origin
|
|
34
|
+
"""
|
|
35
|
+
x: float
|
|
36
|
+
y: float
|
|
37
|
+
z: float
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@dataclass
|
|
41
|
+
class GridCellSizes:
|
|
42
|
+
"""
|
|
43
|
+
Represents the cell sizes in each direction of a 3D grid.
|
|
44
|
+
|
|
45
|
+
Attributes:
|
|
46
|
+
x (List[float]): Cell sizes in the x-direction
|
|
47
|
+
y (List[float]): Cell sizes in the y-direction
|
|
48
|
+
z (List[float]): Cell sizes in the z-direction
|
|
49
|
+
"""
|
|
50
|
+
x: List[float]
|
|
51
|
+
y: List[float]
|
|
52
|
+
z: List[float]
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@dataclass
|
|
56
|
+
class GridData:
|
|
57
|
+
"""
|
|
58
|
+
Represents a 3D grid with dimensions, origin, and cell sizes.
|
|
59
|
+
|
|
60
|
+
Attributes:
|
|
61
|
+
dimensions (GridDimensions): The dimensions of the grid
|
|
62
|
+
origin (GridOrigin): The origin point of the grid
|
|
63
|
+
cell_sizes (GridCellSizes): The cell sizes in each direction
|
|
64
|
+
metadata (Dict[str, Any]): Optional metadata about the grid
|
|
65
|
+
"""
|
|
66
|
+
dimensions: GridDimensions
|
|
67
|
+
origin: GridOrigin
|
|
68
|
+
cell_sizes: GridCellSizes
|
|
69
|
+
metadata: Dict[str, Any] = field(default_factory=dict)
|
|
70
|
+
|
|
71
|
+
@classmethod
|
|
72
|
+
def from_dict(cls, grid_dict: Dict[str, Any]) -> 'GridData':
|
|
73
|
+
"""
|
|
74
|
+
Converts a dictionary containing grid information into a GridData instance.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
grid_dict: Dictionary with grid information
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
GridData: A new GridData instance
|
|
81
|
+
"""
|
|
82
|
+
dims = grid_dict["dimensions"]
|
|
83
|
+
origin_dict = grid_dict["origin"]
|
|
84
|
+
cell_sizes_dict = grid_dict["cell_sizes"]
|
|
85
|
+
|
|
86
|
+
# Handle both new and legacy key names
|
|
87
|
+
nx = dims.get("nx", dims.get("ne"))
|
|
88
|
+
ny = dims.get("ny", dims.get("nn"))
|
|
89
|
+
nz = dims.get("nz", dims.get("nz"))
|
|
90
|
+
|
|
91
|
+
x = origin_dict.get("x", origin_dict.get("x0"))
|
|
92
|
+
y = origin_dict.get("y", origin_dict.get("y0"))
|
|
93
|
+
z = origin_dict.get("z", origin_dict.get("z0"))
|
|
94
|
+
|
|
95
|
+
x_sizes = cell_sizes_dict.get("x", cell_sizes_dict.get("easting"))
|
|
96
|
+
y_sizes = cell_sizes_dict.get("y", cell_sizes_dict.get("northing"))
|
|
97
|
+
z_sizes = cell_sizes_dict.get("z", cell_sizes_dict.get("vertical"))
|
|
98
|
+
|
|
99
|
+
metadata = grid_dict.get("metadata", {})
|
|
100
|
+
|
|
101
|
+
return cls(
|
|
102
|
+
dimensions=GridDimensions(nx=nx, ny=ny, nz=nz),
|
|
103
|
+
origin=GridOrigin(x=x, y=y, z=z),
|
|
104
|
+
cell_sizes=GridCellSizes(x=x_sizes, y=y_sizes, z=z_sizes),
|
|
105
|
+
metadata=metadata
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def read_msh_structured_grid(grid_stream: TextIO, values_stream: TextIO, missing_value: Optional[float],
|
|
110
|
+
attr_name: Optional[str]) -> StructuredData:
|
|
111
|
+
"""
|
|
112
|
+
Read a structured grid mesh and values from streams and return a StructuredData object.
|
|
113
|
+
|
|
114
|
+
This function is designed to work with streams (e.g., from Azure blob storage)
|
|
115
|
+
rather than file paths.
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
grid_stream: TextIO stream containing the grid definition (.msh format)
|
|
119
|
+
values_stream: TextIO stream containing the property values (.mod format)
|
|
120
|
+
|
|
121
|
+
Returns:
|
|
122
|
+
StructuredData object containing the grid and property values
|
|
123
|
+
|
|
124
|
+
Raises:
|
|
125
|
+
ValueError: If the stream format is invalid
|
|
126
|
+
"""
|
|
127
|
+
# Read all lines from the grid stream
|
|
128
|
+
lines = [line.strip() for line in grid_stream if line.strip()]
|
|
129
|
+
|
|
130
|
+
# Create metadata for the grid
|
|
131
|
+
metadata = {
|
|
132
|
+
'file_format': 'grav3d',
|
|
133
|
+
'source' : 'stream'
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
# Parse grid information from lines
|
|
137
|
+
try:
|
|
138
|
+
grid = _parse_grid_from_lines(lines, metadata)
|
|
139
|
+
except ValueError as e:
|
|
140
|
+
# Add context about the stream to the error message
|
|
141
|
+
raise ValueError(f"Error parsing grid stream: {e}") from e
|
|
142
|
+
|
|
143
|
+
# Read values from the values stream
|
|
144
|
+
try:
|
|
145
|
+
# Read all values from the stream
|
|
146
|
+
lines = [line.strip() for line in values_stream if line.strip()]
|
|
147
|
+
|
|
148
|
+
model_array = _parse_mod_file(grid, lines, missing_value=missing_value)
|
|
149
|
+
|
|
150
|
+
except Exception as e:
|
|
151
|
+
# Add context to any errors
|
|
152
|
+
raise ValueError(f"Error reading model stream: {str(e)}") from e
|
|
153
|
+
|
|
154
|
+
# Create and return a StructuredData object
|
|
155
|
+
return structured_data_from(model_array, grid, data_name=attr_name)
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def read_msh_file(filepath: Union[str, Path]) -> GridData:
|
|
159
|
+
"""
|
|
160
|
+
Read a structured grid mesh file and return a GridData object.
|
|
161
|
+
|
|
162
|
+
Currently supports Grav3D mesh file format (.msh):
|
|
163
|
+
- First line: NX NY NZ (number of cells in X, Y, Z directions)
|
|
164
|
+
- Second line: X Y Z (coordinates of origin in meters)
|
|
165
|
+
- Next section: X cell widths (either expanded or using N*value notation)
|
|
166
|
+
- Next section: Y cell widths (either expanded or using N*value notation)
|
|
167
|
+
- Next section: Z cell thicknesses (either expanded or using N*value notation)
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
filepath: Path to the mesh file
|
|
171
|
+
|
|
172
|
+
Returns:
|
|
173
|
+
GridData object containing the mesh information
|
|
174
|
+
|
|
175
|
+
Raises:
|
|
176
|
+
FileNotFoundError: If the file doesn't exist
|
|
177
|
+
ValueError: If the file format is invalid
|
|
178
|
+
"""
|
|
179
|
+
filepath = Path(filepath)
|
|
180
|
+
if not filepath.exists():
|
|
181
|
+
raise FileNotFoundError(f"Mesh file not found: {filepath}")
|
|
182
|
+
|
|
183
|
+
with open(filepath, 'r') as f:
|
|
184
|
+
lines = [line.strip() for line in f.readlines() if line.strip()]
|
|
185
|
+
|
|
186
|
+
metadata = {
|
|
187
|
+
'file_format': 'grav3d',
|
|
188
|
+
'filepath' : str(filepath)
|
|
189
|
+
}
|
|
190
|
+
|
|
191
|
+
try:
|
|
192
|
+
return _parse_grid_from_lines(lines, metadata)
|
|
193
|
+
except ValueError as e:
|
|
194
|
+
# Add context about the file to the error message
|
|
195
|
+
raise ValueError(f"Error parsing mesh file {filepath}: {e}") from e
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def read_mod_file(filepath: Union[str, Path], grid: GridData,
|
|
199
|
+
missing_value: float = -99_999.0) -> np.ndarray:
|
|
200
|
+
"""
|
|
201
|
+
Read a model file containing property values for a 3D grid.
|
|
202
|
+
|
|
203
|
+
Currently supports Grav3D model file format (.mod) where each line contains
|
|
204
|
+
a single property value. The values are ordered with the z-direction changing
|
|
205
|
+
fastest, then x, then y.
|
|
206
|
+
|
|
207
|
+
Args:
|
|
208
|
+
filepath: Path to the model file
|
|
209
|
+
grid: GridData object containing the grid dimensions
|
|
210
|
+
missing_value: Value to replace with NaN in the output array (default: -99_999.0)
|
|
211
|
+
|
|
212
|
+
Returns:
|
|
213
|
+
3D numpy array of property values with shape (ny, nx, nz)
|
|
214
|
+
|
|
215
|
+
Raises:
|
|
216
|
+
FileNotFoundError: If the file doesn't exist
|
|
217
|
+
ValueError: If the number of values doesn't match the grid dimensions
|
|
218
|
+
"""
|
|
219
|
+
filepath = Path(filepath)
|
|
220
|
+
if not filepath.exists():
|
|
221
|
+
raise FileNotFoundError(f"Model file not found: {filepath}")
|
|
222
|
+
|
|
223
|
+
try:
|
|
224
|
+
# Read all values from the file
|
|
225
|
+
with open(filepath, 'r') as f:
|
|
226
|
+
lines = [line.strip() for line in f if line.strip()]
|
|
227
|
+
|
|
228
|
+
model_array = _parse_mod_file(grid, lines, missing_value)
|
|
229
|
+
|
|
230
|
+
return model_array
|
|
231
|
+
|
|
232
|
+
except Exception as e:
|
|
233
|
+
# Add context to any errors
|
|
234
|
+
raise ValueError(f"Error reading model file {filepath}: {str(e)}") from e
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def _parse_mod_file(grid: GridData, lines: List[str], missing_value: Optional[float]) -> np.ndarray:
|
|
238
|
+
# Convert each line to a float
|
|
239
|
+
values = np.array([float(line) for line in lines], dtype=float)
|
|
240
|
+
# Calculate expected number of values based on grid dimensions
|
|
241
|
+
nx, ny, nz = grid.dimensions.nx, grid.dimensions.ny, grid.dimensions.nz
|
|
242
|
+
expected_count = nx * ny * nz
|
|
243
|
+
if len(values) != expected_count:
|
|
244
|
+
raise ValueError(
|
|
245
|
+
f"Invalid model file: expected {expected_count} values, got {len(values)}"
|
|
246
|
+
)
|
|
247
|
+
# Reshape to (ny, nx, nz) with z changing fastest
|
|
248
|
+
model_array = values.reshape((ny, nx, nz))
|
|
249
|
+
# Replace missing values with NaN
|
|
250
|
+
if missing_value is not None:
|
|
251
|
+
model_array[model_array == missing_value] = np.nan
|
|
252
|
+
return model_array
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
def structured_data_from(array: np.ndarray, grid: GridData,
|
|
256
|
+
data_name: str = 'model') -> StructuredData:
|
|
257
|
+
"""
|
|
258
|
+
Convert a 3D numpy array and grid information into a StructuredData object.
|
|
259
|
+
|
|
260
|
+
Args:
|
|
261
|
+
array: 3D numpy array of property values with shape (ny, nx, nz)
|
|
262
|
+
grid: GridData object containing grid dimensions, origin, and cell sizes
|
|
263
|
+
data_name: Name for the data array (default: 'model')
|
|
264
|
+
|
|
265
|
+
Returns:
|
|
266
|
+
StructuredData object containing the data array with proper coordinates
|
|
267
|
+
|
|
268
|
+
Raises:
|
|
269
|
+
ValueError: If array shape doesn't match grid dimensions
|
|
270
|
+
"""
|
|
271
|
+
# Verify array shape matches grid dimensions
|
|
272
|
+
expected_shape = (grid.dimensions.ny, grid.dimensions.nx, grid.dimensions.nz)
|
|
273
|
+
if array.shape != expected_shape:
|
|
274
|
+
raise ValueError(
|
|
275
|
+
f"Array shape {array.shape} doesn't match grid dimensions {expected_shape}"
|
|
276
|
+
)
|
|
277
|
+
|
|
278
|
+
# Calculate cell center coordinates
|
|
279
|
+
centers = _calculate_cell_centers(grid)
|
|
280
|
+
|
|
281
|
+
# Create the xarray DataArray with proper coordinates
|
|
282
|
+
xr_data_array = xr.DataArray(
|
|
283
|
+
data=array,
|
|
284
|
+
dims=['y', 'x', 'z'], # Dimensions in the order they appear in the array
|
|
285
|
+
coords={
|
|
286
|
+
'x': centers['x'],
|
|
287
|
+
'y': centers['y'],
|
|
288
|
+
'z': centers['z'],
|
|
289
|
+
},
|
|
290
|
+
name=data_name,
|
|
291
|
+
attrs=grid.metadata # Include grid metadata in the data array
|
|
292
|
+
)
|
|
293
|
+
|
|
294
|
+
# Create a StructuredData instance from the xarray DataArray
|
|
295
|
+
struct = StructuredData.from_data_array(
|
|
296
|
+
data_array=xr_data_array,
|
|
297
|
+
data_array_name=data_name
|
|
298
|
+
)
|
|
299
|
+
|
|
300
|
+
return struct
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
def _parse_grid_from_lines(lines: List[str], metadata: Dict[str, Any] = None) -> GridData:
|
|
304
|
+
"""
|
|
305
|
+
Parse grid information from a list of lines.
|
|
306
|
+
|
|
307
|
+
Args:
|
|
308
|
+
lines: List of lines containing grid information
|
|
309
|
+
metadata: Optional metadata to include in the GridData object
|
|
310
|
+
|
|
311
|
+
Returns:
|
|
312
|
+
GridData object containing the parsed grid information
|
|
313
|
+
|
|
314
|
+
Raises:
|
|
315
|
+
ValueError: If the lines format is invalid
|
|
316
|
+
"""
|
|
317
|
+
if len(lines) < 2:
|
|
318
|
+
raise ValueError("Invalid format: insufficient data")
|
|
319
|
+
|
|
320
|
+
# Parse dimensions (first line)
|
|
321
|
+
try:
|
|
322
|
+
dims = lines[0].split()
|
|
323
|
+
nx, ny, nz = int(dims[0]), int(dims[1]), int(dims[2])
|
|
324
|
+
except (IndexError, ValueError) as e:
|
|
325
|
+
raise ValueError(f"Invalid dimensions: {e}")
|
|
326
|
+
|
|
327
|
+
# Parse origin coordinates (second line)
|
|
328
|
+
try:
|
|
329
|
+
origin = lines[1].split()
|
|
330
|
+
x, y, z = float(origin[0]), float(origin[1]), float(origin[2])
|
|
331
|
+
except (IndexError, ValueError) as e:
|
|
332
|
+
raise ValueError(f"Invalid origin: {e}")
|
|
333
|
+
|
|
334
|
+
# Parse cell sizes
|
|
335
|
+
try:
|
|
336
|
+
current_line = 2
|
|
337
|
+
x_sizes, current_line = _parse_cell_sizes(lines, current_line, nx)
|
|
338
|
+
y_sizes, current_line = _parse_cell_sizes(lines, current_line, ny)
|
|
339
|
+
z_sizes, _ = _parse_cell_sizes(lines, current_line, nz)
|
|
340
|
+
except (IndexError, ValueError) as e:
|
|
341
|
+
raise ValueError(f"Error parsing cell sizes: {e}")
|
|
342
|
+
|
|
343
|
+
# Create a GridData object with the parsed information
|
|
344
|
+
grid_data_dict = {
|
|
345
|
+
'dimensions': {'nx': nx, 'ny': ny, 'nz': nz},
|
|
346
|
+
'origin' : {'x': x, 'y': y, 'z': z},
|
|
347
|
+
'cell_sizes': {
|
|
348
|
+
'x': x_sizes,
|
|
349
|
+
'y': y_sizes,
|
|
350
|
+
'z': z_sizes
|
|
351
|
+
},
|
|
352
|
+
'metadata' : metadata or {}
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
return GridData.from_dict(grid_data_dict)
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
def _parse_cell_sizes(lines: List[str], start_index: int, count: int) -> Tuple[List[float], int]:
|
|
359
|
+
"""
|
|
360
|
+
Parse cell sizes from file lines, handling both compact (N*value) and expanded notation.
|
|
361
|
+
|
|
362
|
+
Args:
|
|
363
|
+
lines: List of lines from the file
|
|
364
|
+
start_index: Index to start parsing from
|
|
365
|
+
count: Number of values to parse
|
|
366
|
+
|
|
367
|
+
Returns:
|
|
368
|
+
Tuple containing:
|
|
369
|
+
- List of parsed values
|
|
370
|
+
- Next line index after parsing
|
|
371
|
+
"""
|
|
372
|
+
line = lines[start_index]
|
|
373
|
+
|
|
374
|
+
# Check for compact notation (N*value)
|
|
375
|
+
if '*' in line:
|
|
376
|
+
parts = line.split('*')
|
|
377
|
+
repetition = int(parts[0])
|
|
378
|
+
value = float(parts[1])
|
|
379
|
+
values = [value] * repetition
|
|
380
|
+
return values, start_index + 1
|
|
381
|
+
|
|
382
|
+
# Handle expanded notation across multiple lines
|
|
383
|
+
values = []
|
|
384
|
+
line_index = start_index
|
|
385
|
+
|
|
386
|
+
while len(values) < count and line_index < len(lines):
|
|
387
|
+
current_line = lines[line_index]
|
|
388
|
+
|
|
389
|
+
# If we encounter a line with compact notation while parsing expanded,
|
|
390
|
+
# it's likely the next section
|
|
391
|
+
if '*' in current_line and len(values) > 0:
|
|
392
|
+
break
|
|
393
|
+
|
|
394
|
+
# Add all numbers from the current line
|
|
395
|
+
values.extend([float(x) for x in current_line.split()])
|
|
396
|
+
line_index += 1
|
|
397
|
+
|
|
398
|
+
# Take only the required number of values
|
|
399
|
+
return values[:count], line_index
|
|
400
|
+
|
|
401
|
+
|
|
402
|
+
def _calculate_cell_centers(grid: GridData) -> Dict[str, np.ndarray]:
|
|
403
|
+
"""
|
|
404
|
+
Calculate the center coordinates of each cell in the grid.
|
|
405
|
+
|
|
406
|
+
Args:
|
|
407
|
+
grid: GridData object containing grid dimensions, origin, and cell sizes
|
|
408
|
+
|
|
409
|
+
Returns:
|
|
410
|
+
Dictionary with 'x', 'y', and 'z' keys containing arrays of cell center coordinates
|
|
411
|
+
"""
|
|
412
|
+
# Convert cell sizes to numpy arrays for vectorized operations
|
|
413
|
+
x_sizes = np.array(grid.cell_sizes.x)
|
|
414
|
+
y_sizes = np.array(grid.cell_sizes.y)
|
|
415
|
+
z_sizes = np.array(grid.cell_sizes.z)
|
|
416
|
+
|
|
417
|
+
# Calculate cell centers by adding cumulative sizes and offsetting by half the first cell size
|
|
418
|
+
x_centers = grid.origin.x + np.cumsum(x_sizes) - x_sizes[0] / 2
|
|
419
|
+
y_centers = grid.origin.y + np.cumsum(y_sizes) - y_sizes[0] / 2
|
|
420
|
+
|
|
421
|
+
# For z, cells typically extend downward from the origin
|
|
422
|
+
z_centers = grid.origin.z - (np.cumsum(z_sizes) - z_sizes[0] / 2)
|
|
423
|
+
|
|
424
|
+
return {
|
|
425
|
+
'x': x_centers,
|
|
426
|
+
'y': y_centers,
|
|
427
|
+
'z': z_centers
|
|
428
|
+
}
|
|
@@ -102,21 +102,81 @@ def read_volumetric_mesh_attr_file(reader_helper: GenericReaderFilesHelper) -> p
|
|
|
102
102
|
return df
|
|
103
103
|
|
|
104
104
|
|
|
105
|
-
def pv_cast_to_explicit_structured_grid(pyvista_object):
|
|
106
|
-
|
|
105
|
+
def pv_cast_to_explicit_structured_grid(pyvista_object: 'pv.DataSet') -> 'pv.ExplicitStructuredGrid':
|
|
107
106
|
pv = optional_requirements.require_pyvista()
|
|
108
107
|
|
|
109
108
|
match pyvista_object:
|
|
110
|
-
|
|
111
109
|
case pv.RectilinearGrid() as rectl_grid:
|
|
112
|
-
|
|
113
110
|
return __pv_convert_rectilinear_to_explicit(rectl_grid)
|
|
114
|
-
|
|
111
|
+
case pv.UnstructuredGrid() as unstr_grid:
|
|
112
|
+
return __pv_convert_unstructured_to_explicit(unstr_grid)
|
|
115
113
|
case _:
|
|
116
|
-
|
|
117
114
|
return pyvista_object.cast_to_explicit_structured_grid()
|
|
118
115
|
|
|
119
116
|
|
|
117
|
+
def __pv_convert_unstructured_to_explicit(unstr_grid):
|
|
118
|
+
"""
|
|
119
|
+
Convert a PyVista UnstructuredGrid to an ExplicitStructuredGrid if possible.
|
|
120
|
+
"""
|
|
121
|
+
pv = optional_requirements.require_pyvista()
|
|
122
|
+
|
|
123
|
+
# First check if the grid has the necessary attributes to be treated as structured
|
|
124
|
+
if not hasattr(unstr_grid, 'n_cells') or unstr_grid.n_cells == 0:
|
|
125
|
+
raise ValueError("The unstructured grid has no cells.")
|
|
126
|
+
|
|
127
|
+
# Try to detect if the grid has a structured topology
|
|
128
|
+
# Check if the grid has cell type 11 (VTK_VOXEL) or 12 (VTK_HEXAHEDRON)
|
|
129
|
+
cell_types = unstr_grid.celltypes
|
|
130
|
+
|
|
131
|
+
# Voxels (11) and hexahedra (12) are the cell types used in structured grids
|
|
132
|
+
if not all(ct in [11, 12] for ct in cell_types):
|
|
133
|
+
raise ValueError("The unstructured grid contains non-hexahedral cells and cannot be converted to explicit structured.")
|
|
134
|
+
|
|
135
|
+
# Try to infer dimensions from the grid
|
|
136
|
+
try:
|
|
137
|
+
# Method 1: Try PyVista's built-in conversion if available
|
|
138
|
+
return unstr_grid.cast_to_explicit_structured_grid()
|
|
139
|
+
except (AttributeError, TypeError):
|
|
140
|
+
pass
|
|
141
|
+
|
|
142
|
+
try:
|
|
143
|
+
# Method 2: If the grid has dimensions stored as field data
|
|
144
|
+
if "dimensions" in unstr_grid.field_data:
|
|
145
|
+
dims = unstr_grid.field_data["dimensions"]
|
|
146
|
+
if len(dims) == 3:
|
|
147
|
+
nx, ny, nz = dims
|
|
148
|
+
# Verify that dimensions match the number of cells
|
|
149
|
+
if (nx-1)*(ny-1)*(nz-1) != unstr_grid.n_cells:
|
|
150
|
+
raise ValueError("Stored dimensions do not match the number of cells.")
|
|
151
|
+
|
|
152
|
+
# Extract points and reorder if needed
|
|
153
|
+
points = unstr_grid.points.reshape((nx, ny, nz, 3))
|
|
154
|
+
|
|
155
|
+
# Create explicit structured grid
|
|
156
|
+
explicit_grid = pv.ExplicitStructuredGrid((nx, ny, nz), points.reshape((-1, 3)))
|
|
157
|
+
explicit_grid.compute_connectivity()
|
|
158
|
+
|
|
159
|
+
# Transfer data arrays
|
|
160
|
+
for name, array in unstr_grid.cell_data.items():
|
|
161
|
+
explicit_grid.cell_data[name] = array.copy()
|
|
162
|
+
for name, array in unstr_grid.point_data.items():
|
|
163
|
+
explicit_grid.point_data[name] = array.copy()
|
|
164
|
+
for name, array in unstr_grid.field_data.items():
|
|
165
|
+
if name != "dimensions": # Skip dimensions field
|
|
166
|
+
explicit_grid.field_data[name] = array.copy()
|
|
167
|
+
|
|
168
|
+
return explicit_grid
|
|
169
|
+
except (ValueError, KeyError):
|
|
170
|
+
pass
|
|
171
|
+
|
|
172
|
+
# If none of the above methods work, use PyVista's extract_cells function
|
|
173
|
+
# to reconstruct the structured grid if possible
|
|
174
|
+
try:
|
|
175
|
+
# This is a best-effort approach that tries multiple strategies
|
|
176
|
+
return pv.core.filters.convert_unstructured_to_structured_grid(unstr_grid)
|
|
177
|
+
except Exception as e:
|
|
178
|
+
raise ValueError(f"Failed to convert unstructured grid to explicit structured grid: {e}")
|
|
179
|
+
|
|
120
180
|
def __pv_convert_rectilinear_to_explicit(rectl_grid):
|
|
121
181
|
|
|
122
182
|
pv = optional_requirements.require_pyvista()
|
|
@@ -100,6 +100,16 @@ def _validate_lith_data(d: pd.DataFrame, reader_helper: GenericReaderFilesHelper
|
|
|
100
100
|
raise AttributeError('If wells attributes represent lithology, `component lith` column must be present in the file. '
|
|
101
101
|
'Use columns_map to assign column names to these fields. Maybe you are marking as lithology'
|
|
102
102
|
'the wrong file?')
|
|
103
|
+
else:
|
|
104
|
+
# TODO: Add categories to reader helper
|
|
105
|
+
categories = sorted(d['component lith'].dropna().unique())
|
|
106
|
+
d['component lith'] = pd.Categorical(
|
|
107
|
+
d['component lith'],
|
|
108
|
+
categories=categories,
|
|
109
|
+
ordered=True
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
d['lith_ids'] = d['component lith'].cat.codes + 1
|
|
103
113
|
|
|
104
114
|
given_top = np.isin(['top', 'base'], d.columns).all()
|
|
105
115
|
given_altitude_and_base = np.isin(['altitude', 'base'], d.columns).all()
|
|
File without changes
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import pyvista as pv
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def transform_gaussian_blur(grid, sigma=20.0):
|
|
6
|
+
"""
|
|
7
|
+
Applies a Gaussian blur to the 'model_name' field of the structured grid.
|
|
8
|
+
|
|
9
|
+
Parameters:
|
|
10
|
+
grid - PyVista grid with 'model_name' field
|
|
11
|
+
sigma - Standard deviation for the Gaussian kernel
|
|
12
|
+
"""
|
|
13
|
+
from scipy.ndimage import gaussian_filter
|
|
14
|
+
|
|
15
|
+
# Get the original dimensions of the grid
|
|
16
|
+
dims = grid.dimensions
|
|
17
|
+
|
|
18
|
+
# Reshape the data to 3D array matching grid dimensions
|
|
19
|
+
values = np.array(grid['model_name'])
|
|
20
|
+
values_3d = values.reshape(dims[2] - 1, dims[1] - 1, dims[0] - 1).transpose(2, 1, 0)
|
|
21
|
+
|
|
22
|
+
# Apply Gaussian filter
|
|
23
|
+
blurred_values = gaussian_filter(values_3d, sigma=sigma, axes=(2,))
|
|
24
|
+
|
|
25
|
+
# Reshape back to 1D array
|
|
26
|
+
grid['model_name'] = blurred_values.transpose(2, 1, 0).flatten()
|
|
27
|
+
return grid
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def transform_sinusoidal(values, amplitude=1.0, frequency=0.01, phase=0):
|
|
31
|
+
"""
|
|
32
|
+
Apply a sinusoidal transformation to the values.
|
|
33
|
+
"""
|
|
34
|
+
return values + amplitude * np.sin(frequency * values + phase)
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def obfuscate_model_name(grid, transform_functions, attr):
|
|
38
|
+
"""
|
|
39
|
+
Applies transformation functions to the 'model_name' field.
|
|
40
|
+
Functions can operate on either the grid or the values array.
|
|
41
|
+
"""
|
|
42
|
+
for func in transform_functions:
|
|
43
|
+
if 'grid' in func.__code__.co_varnames:
|
|
44
|
+
# Function expects the full grid
|
|
45
|
+
grid = func(grid)
|
|
46
|
+
else:
|
|
47
|
+
# Function expects just the values array
|
|
48
|
+
values = np.array(grid[attr])
|
|
49
|
+
grid[attr] = func(values)
|
|
50
|
+
|
|
51
|
+
return grid
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
# pyvista_struct = transform_xy_to_z_propagation(pyvista_struct, z_factor=0.3, noise_level=0.1)
|
|
55
|
+
def transform_subtract_mean(values):
|
|
56
|
+
"""
|
|
57
|
+
Subtract the mean of the array from each element.
|
|
58
|
+
"""
|
|
59
|
+
return values - np.mean(values)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def transform_scale(values, scale_factor=0.003):
|
|
63
|
+
"""
|
|
64
|
+
Multiply each value by scale_factor.
|
|
65
|
+
"""
|
|
66
|
+
return values * scale_factor
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def update_extent(pyvista_grid, new_extent):
|
|
72
|
+
# new_extent: array-like with 6 elements [xmin, xmax, ymin, ymax, zmin, zmax]
|
|
73
|
+
old_bounds = np.array(pyvista_grid.bounds) # [xmin, xmax, ymin, ymax, zmin, zmax]
|
|
74
|
+
|
|
75
|
+
# Check for valid extents
|
|
76
|
+
if any(new_extent[i] >= new_extent[i + 1] for i in range(0, 6, 2)):
|
|
77
|
+
raise ValueError("Each min value must be less than the corresponding max value in the new extent.")
|
|
78
|
+
|
|
79
|
+
# Compute old ranges and new ranges for each axis
|
|
80
|
+
old_ranges = old_bounds[1::2] - old_bounds[0::2] # [x_range, y_range, z_range]
|
|
81
|
+
new_ranges = np.array([new_extent[1] - new_extent[0],
|
|
82
|
+
new_extent[3] - new_extent[2],
|
|
83
|
+
new_extent[5] - new_extent[4]])
|
|
84
|
+
|
|
85
|
+
# Avoid division by zero if any old range is zero
|
|
86
|
+
if np.any(old_ranges == 0):
|
|
87
|
+
raise ValueError("One of the dimensions in the current grid has zero length.")
|
|
88
|
+
|
|
89
|
+
# Get the old points and reshape for easier manipulation
|
|
90
|
+
old_points = pyvista_grid.points # shape (N, 3)
|
|
91
|
+
|
|
92
|
+
# Compute normalized coordinates within the old extent
|
|
93
|
+
norm_points = (old_points - old_bounds[0::2]) / old_ranges
|
|
94
|
+
|
|
95
|
+
# Compute new points based on new extent
|
|
96
|
+
new_mins = np.array([new_extent[0], new_extent[2], new_extent[4]])
|
|
97
|
+
new_points = new_mins + norm_points * new_ranges
|
|
98
|
+
|
|
99
|
+
# Update the grid's points
|
|
100
|
+
pyvista_grid.points = new_points
|
|
101
|
+
|
|
102
|
+
# Updating bounds is implicit once the points are modified.
|
|
103
|
+
pyvista_grid.Modified()
|
|
104
|
+
return pyvista_grid
|
{subsurface_terra-2025.1.0rc10.dist-info → subsurface_terra-2025.1.0rc12.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: subsurface_terra
|
|
3
|
-
Version: 2025.1.
|
|
3
|
+
Version: 2025.1.0rc12
|
|
4
4
|
Summary: Subsurface data types and utilities. This version is the one used by Terranigma Solutions. Please feel free to take anything in this repository for the original one.
|
|
5
5
|
Home-page: https://softwareunderground.github.io/subsurface
|
|
6
6
|
Author: Software Underground
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
subsurface/__init__.py,sha256=0D2rCUem3fiHsXFXXSmwheLiPS4cXxEdfWdFBj0b-cY,930
|
|
2
|
-
subsurface/_version.py,sha256=
|
|
2
|
+
subsurface/_version.py,sha256=D_1QY4RtsEm2Eew8rPYqG7jASO3oDCF7p_cXgYN2Qzg,550
|
|
3
3
|
subsurface/optional_requirements.py,sha256=Wg36RqxzPiLtN-3qSg5K9QVEeXCB0-EjSzHERAoO8EE,2883
|
|
4
|
-
subsurface/api/__init__.py,sha256=
|
|
4
|
+
subsurface/api/__init__.py,sha256=E3R1fNn_d5XQdlZyjtzBcH_bYRzAvOc3xV38qFLqbZY,369
|
|
5
5
|
subsurface/api/interfaces/__init__.py,sha256=rqUtJyMLicobcyhmr74TepjmUQAEmlazKT3vjV_n3aA,6
|
|
6
|
-
subsurface/api/interfaces/stream.py,sha256=
|
|
6
|
+
subsurface/api/interfaces/stream.py,sha256=fCoroYYIzbHbCi-JlThwEtw-JegKnEacYdD11QCO-bA,4911
|
|
7
7
|
subsurface/api/reader/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
8
8
|
subsurface/api/reader/read_wells.py,sha256=bu6fAdj8WTMPWYVZwBdn7vNt00vfdJ3nqJsZ4o1V25U,2731
|
|
9
9
|
subsurface/core/__init__.py,sha256=73SzsVXHiiWt_215bgIqWWy2t0XgvyOxDPxuq0NyktM,32
|
|
@@ -13,7 +13,7 @@ subsurface/core/geological_formats/boreholes/__init__.py,sha256=47DEQpj8HBSa-_TI
|
|
|
13
13
|
subsurface/core/geological_formats/boreholes/_combine_trajectories.py,sha256=U5VribebcMAag0DOKnna983g1BXAGLKCddGra2g3Nos,5246
|
|
14
14
|
subsurface/core/geological_formats/boreholes/boreholes.py,sha256=Q7KBYIk9M4-SZjDOVx5dMp8DnHBo4VTFrJ4jAKCmrJ8,5256
|
|
15
15
|
subsurface/core/geological_formats/boreholes/collars.py,sha256=o1I8bS0XqWa2fS0n6XZVKXsuBHknXO2Z_5sdlFc_GAE,750
|
|
16
|
-
subsurface/core/geological_formats/boreholes/survey.py,sha256=
|
|
16
|
+
subsurface/core/geological_formats/boreholes/survey.py,sha256=aaENR0ksT_lK0BYR7HL73KNZ44eo3Ag353xQdVcLOz0,16219
|
|
17
17
|
subsurface/core/reader_helpers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
18
|
subsurface/core/reader_helpers/reader_unstruct.py,sha256=Lme1ano-dQrKhLCwrokcpKqa28DFxDaGAs3ub8MAHtY,397
|
|
19
19
|
subsurface/core/reader_helpers/readers_data.py,sha256=Vewi8pqv-zooRIhffTM52eWZeP6l9MnHkD9LZj6c1LU,4995
|
|
@@ -27,7 +27,7 @@ subsurface/core/structs/base_structures/structured_data.py,sha256=iaVmCMIwkxiGXt
|
|
|
27
27
|
subsurface/core/structs/base_structures/unstructured_data.py,sha256=_cB64U0d8VlWgod0eHNluLmtLQZKLOwJYkmdtqYocXs,12904
|
|
28
28
|
subsurface/core/structs/structured_elements/__init__.py,sha256=wQ8z6vioC98TNwnqMHlDMVFJ1yOKsmlVa6kHL1SOfZQ,43
|
|
29
29
|
subsurface/core/structs/structured_elements/octree_mesh.py,sha256=vI30DW46oGP_h_3v85MiFmwFDoKh1HWdVBATdTmC-yA,240
|
|
30
|
-
subsurface/core/structs/structured_elements/structured_grid.py,sha256=
|
|
30
|
+
subsurface/core/structs/structured_elements/structured_grid.py,sha256=hSIBkLjEF-_INVUA7w66CoTDWYISmbwjLOP43zL5r10,1802
|
|
31
31
|
subsurface/core/structs/structured_elements/structured_mesh.py,sha256=GfFKwmIqxVe9qSxepxK5PdSNf320c14r13WYLYVwaEo,265
|
|
32
32
|
subsurface/core/structs/unstructured_elements/__init__.py,sha256=BcrnAs207rFTl0hZp8D_LdmpUJ_8G_hoK4b7EMFTKJw,144
|
|
33
33
|
subsurface/core/structs/unstructured_elements/line_set.py,sha256=cXLje8ep7dJnzGiFVgLzCazbYocqY5z1t-jyDCYHInY,2214
|
|
@@ -37,7 +37,7 @@ subsurface/core/structs/unstructured_elements/triangular_surface.py,sha256=58Cpu
|
|
|
37
37
|
subsurface/core/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
38
38
|
subsurface/core/utils/utils_core.py,sha256=lRzLlE11JCH-GCjhbIoKZ-WfARSmI_yhmJkqLMMNmTo,1110
|
|
39
39
|
subsurface/modules/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
40
|
-
subsurface/modules/reader/__init__.py,sha256=
|
|
40
|
+
subsurface/modules/reader/__init__.py,sha256=H51ReyQbadZ4813AZ_W6yZmfg_Osp7Tbn7VdzJ1YsnM,543
|
|
41
41
|
subsurface/modules/reader/from_binary.py,sha256=AtBzLCJJRZ0exgfUfv1Rfp1tVMUCfSLuAwTCb_Ieqfs,1341
|
|
42
42
|
subsurface/modules/reader/read_netcdf.py,sha256=zAqRhckeGjlh95Ar-IPbSlUPbbLVignXu-DC9FY7XUk,1019
|
|
43
43
|
subsurface/modules/reader/faults/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -45,7 +45,7 @@ subsurface/modules/reader/faults/faults.py,sha256=s144Gq0tFvjSUkB66dB86kYjW2l2e1
|
|
|
45
45
|
subsurface/modules/reader/geo_object/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
46
46
|
subsurface/modules/reader/mesh/_GOCAD_mesh.py,sha256=_MwNy4iVMGnemvT01hhB-nbCc8a1r8ETEAkD-AWcBz4,3304
|
|
47
47
|
subsurface/modules/reader/mesh/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
48
|
-
subsurface/modules/reader/mesh/_trimesh_reader.py,sha256
|
|
48
|
+
subsurface/modules/reader/mesh/_trimesh_reader.py,sha256=D28DrNuC8QDZpmmctjf6KRHWeHPidvavn2V8NIF9roo,17549
|
|
49
49
|
subsurface/modules/reader/mesh/csv_mesh_reader.py,sha256=0iXYg-JOLUg7yH6Rw6qCoxXvKh0hOUTwjYxbhSlGfGM,1969
|
|
50
50
|
subsurface/modules/reader/mesh/dxf_reader.py,sha256=JDhzFRE46sdwMGBB8enHNluH07ohqt6LhgLHiSQRL-I,6525
|
|
51
51
|
subsurface/modules/reader/mesh/glb_reader.py,sha256=dierR9AYM5Q2szLuemfLlM_JcPRNtDrD5fpF8zNjBS8,1118
|
|
@@ -60,19 +60,22 @@ subsurface/modules/reader/profiles/profiles_core.py,sha256=kqlt79hjdWWQNBjWqLGlu
|
|
|
60
60
|
subsurface/modules/reader/topography/__init__.py,sha256=zkaTX5JxsNfjF-dFeEbHfUB58vhPMjm6Iiqx9HgJOrY,14
|
|
61
61
|
subsurface/modules/reader/topography/topo_core.py,sha256=6rkDp9XrUSif8ZuraDrUK2I8-yqEp8CRm4r4l2lQuw0,3542
|
|
62
62
|
subsurface/modules/reader/volume/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
63
|
-
subsurface/modules/reader/volume/
|
|
63
|
+
subsurface/modules/reader/volume/read_grav3d.py,sha256=Zg5zCnSbcNLNOL-Yj_lePpnueFFLBPJQUyp3B7TD99E,14513
|
|
64
|
+
subsurface/modules/reader/volume/read_volume.py,sha256=GvlYIROefskHkqpwvSu60jQIzev54jBfbrL74CYT7TU,9580
|
|
64
65
|
subsurface/modules/reader/volume/segy_reader.py,sha256=oBS1FwwzFTMBmAR3odJMvW-as_0YMudPcFmndpcApW4,3958
|
|
65
66
|
subsurface/modules/reader/volume/seismic.py,sha256=dRA7YKw9fkrkAYS7Bnfm7GfCPdfxVsDyfM7frQK56V4,4950
|
|
66
67
|
subsurface/modules/reader/volume/volume_utils.py,sha256=7ToIdVwq04lMyYGJE9PzYVQt9xl9mjbXXrzvMfM6wGw,1367
|
|
67
68
|
subsurface/modules/reader/wells/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
68
69
|
subsurface/modules/reader/wells/_read_to_df.py,sha256=PFs5ottnm7LtzmJgPh8UN3cdR4j2y734RTDPoks16ls,2294
|
|
69
|
-
subsurface/modules/reader/wells/read_borehole_interface.py,sha256=
|
|
70
|
+
subsurface/modules/reader/wells/read_borehole_interface.py,sha256=1Kq_IjHhhPgxQKjk5B655CT9wCYCYz5T4UjggF8Pdfg,5899
|
|
70
71
|
subsurface/modules/reader/wells/wells_utils.py,sha256=CoVF9Qtba8Qu42JcVmtsyaSS_PA80lcJ6mzh6-TQt2Q,2206
|
|
71
72
|
subsurface/modules/reader/wells/DEP/__init__.py,sha256=8PES2m_HqZtZ-jFhPynJWpk8dovKAh1UyVnfAxg_NXY,1887
|
|
72
73
|
subsurface/modules/reader/wells/DEP/_well_files_reader.py,sha256=QYZ9p7iyGt1JDlkrFpYPFaMnXwLj7wdFZmTYiO2niSY,6216
|
|
73
74
|
subsurface/modules/reader/wells/DEP/_wells_api.py,sha256=SZU0cwfbn-bqKaAIG94be9Azxyx0pnvZrE2yXxIIJD4,2334
|
|
74
75
|
subsurface/modules/reader/wells/DEP/_welly_reader.py,sha256=ktBEswaQSj0mzubFME8oTP7sTPDI1r65zVfT91LORMI,6468
|
|
75
76
|
subsurface/modules/reader/wells/DEP/pandas_to_welly.py,sha256=Z67LE6K4Dy5LkR6ZrAmQx9u0UbLJj8oWxAALVx_eyDc,6925
|
|
77
|
+
subsurface/modules/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
78
|
+
subsurface/modules/tools/mocking_aux.py,sha256=DcavlyWV5pFl31MHUgOxAXW2I_y8bKqbyesBfCDWnLs,3581
|
|
76
79
|
subsurface/modules/visualization/__init__.py,sha256=Y9SUj2cflk0ulCj1F1cJAeoCQJOD0eN4E98wxZ5Zhj0,65
|
|
77
80
|
subsurface/modules/visualization/to_pyvista.py,sha256=_xM4Xx3PD0wyxtUrMHS5k6xbiDQ5k_Kis8dE-wnM77M,10514
|
|
78
81
|
subsurface/modules/writer/__init__.py,sha256=1oDGj2X1G-R2ZRi_8sMBfq1QHkUarbycmMVvSdSTk-g,50
|
|
@@ -86,8 +89,8 @@ subsurface/modules/writer/to_rex/material_encoder.py,sha256=zGlqF9X_Civ9VvtGwo-I
|
|
|
86
89
|
subsurface/modules/writer/to_rex/mesh_encoder.py,sha256=6TBtJhYJEAMEBHxQkbweXrJO1jIUx1ClM8l5ajVCrLc,6443
|
|
87
90
|
subsurface/modules/writer/to_rex/to_rex.py,sha256=njsm2d3e69pRVfF_TOC_hexvXPmgNTZdJvhbnXcvyIo,3800
|
|
88
91
|
subsurface/modules/writer/to_rex/utils.py,sha256=HEpJ95LjHOK24ePpmLpPP5uFyv6i_kN3AWh031q-1Uc,379
|
|
89
|
-
subsurface_terra-2025.1.
|
|
90
|
-
subsurface_terra-2025.1.
|
|
91
|
-
subsurface_terra-2025.1.
|
|
92
|
-
subsurface_terra-2025.1.
|
|
93
|
-
subsurface_terra-2025.1.
|
|
92
|
+
subsurface_terra-2025.1.0rc12.dist-info/licenses/LICENSE,sha256=GSXh9K5TZauM89BeGbYg07oST_HMhOTiZoEGaUeKBtA,11606
|
|
93
|
+
subsurface_terra-2025.1.0rc12.dist-info/METADATA,sha256=-JXdGryaR_k9plZ3-3GoFQKbYYS2kVZfe_4oH6haLkw,7094
|
|
94
|
+
subsurface_terra-2025.1.0rc12.dist-info/WHEEL,sha256=SmOxYU7pzNKBqASvQJ7DjX3XGUF92lrGhMb3R6_iiqI,91
|
|
95
|
+
subsurface_terra-2025.1.0rc12.dist-info/top_level.txt,sha256=f32R_tUSf83CfkpB4vjv5m2XcD8TmDX9h7F4rnEXt5A,11
|
|
96
|
+
subsurface_terra-2025.1.0rc12.dist-info/RECORD,,
|
{subsurface_terra-2025.1.0rc10.dist-info → subsurface_terra-2025.1.0rc12.dist-info}/licenses/LICENSE
RENAMED
|
File without changes
|
{subsurface_terra-2025.1.0rc10.dist-info → subsurface_terra-2025.1.0rc12.dist-info}/top_level.txt
RENAMED
|
File without changes
|