subsurface-terra 2025.1.0rc15__py3-none-any.whl → 2025.1.0rc17__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- subsurface/__init__.py +31 -31
- subsurface/_version.py +34 -21
- subsurface/api/__init__.py +13 -13
- subsurface/api/interfaces/__init__.py +3 -3
- subsurface/api/interfaces/stream.py +136 -136
- subsurface/api/reader/read_wells.py +78 -78
- subsurface/core/geological_formats/boreholes/_combine_trajectories.py +117 -117
- subsurface/core/geological_formats/boreholes/_map_attrs_to_survey.py +236 -234
- subsurface/core/geological_formats/boreholes/_survey_to_unstruct.py +163 -163
- subsurface/core/geological_formats/boreholes/boreholes.py +140 -140
- subsurface/core/geological_formats/boreholes/collars.py +26 -26
- subsurface/core/geological_formats/boreholes/survey.py +86 -86
- subsurface/core/geological_formats/fault.py +47 -47
- subsurface/core/reader_helpers/reader_unstruct.py +11 -11
- subsurface/core/reader_helpers/readers_data.py +130 -130
- subsurface/core/reader_helpers/readers_wells.py +13 -13
- subsurface/core/structs/__init__.py +3 -3
- subsurface/core/structs/base_structures/__init__.py +2 -2
- subsurface/core/structs/base_structures/_aux.py +69 -0
- subsurface/core/structs/base_structures/_liquid_earth_mesh.py +121 -121
- subsurface/core/structs/base_structures/_unstructured_data_constructor.py +70 -70
- subsurface/core/structs/base_structures/base_structures_enum.py +6 -6
- subsurface/core/structs/base_structures/structured_data.py +282 -282
- subsurface/core/structs/base_structures/unstructured_data.py +338 -319
- subsurface/core/structs/structured_elements/octree_mesh.py +10 -10
- subsurface/core/structs/structured_elements/structured_grid.py +59 -59
- subsurface/core/structs/structured_elements/structured_mesh.py +9 -9
- subsurface/core/structs/unstructured_elements/__init__.py +3 -3
- subsurface/core/structs/unstructured_elements/line_set.py +72 -72
- subsurface/core/structs/unstructured_elements/point_set.py +43 -43
- subsurface/core/structs/unstructured_elements/tetrahedron_mesh.py +35 -35
- subsurface/core/structs/unstructured_elements/triangular_surface.py +62 -62
- subsurface/core/utils/utils_core.py +38 -38
- subsurface/modules/reader/__init__.py +13 -13
- subsurface/modules/reader/faults/faults.py +80 -80
- subsurface/modules/reader/from_binary.py +46 -46
- subsurface/modules/reader/mesh/_GOCAD_mesh.py +82 -82
- subsurface/modules/reader/mesh/_trimesh_reader.py +447 -447
- subsurface/modules/reader/mesh/csv_mesh_reader.py +53 -53
- subsurface/modules/reader/mesh/dxf_reader.py +177 -177
- subsurface/modules/reader/mesh/glb_reader.py +30 -30
- subsurface/modules/reader/mesh/mx_reader.py +232 -232
- subsurface/modules/reader/mesh/obj_reader.py +53 -53
- subsurface/modules/reader/mesh/omf_mesh_reader.py +43 -43
- subsurface/modules/reader/mesh/surface_reader.py +56 -56
- subsurface/modules/reader/mesh/surfaces_api.py +41 -41
- subsurface/modules/reader/profiles/__init__.py +3 -3
- subsurface/modules/reader/profiles/profiles_core.py +197 -197
- subsurface/modules/reader/read_netcdf.py +38 -38
- subsurface/modules/reader/topography/__init__.py +7 -7
- subsurface/modules/reader/topography/topo_core.py +100 -100
- subsurface/modules/reader/volume/read_grav3d.py +447 -428
- subsurface/modules/reader/volume/read_volume.py +327 -230
- subsurface/modules/reader/volume/segy_reader.py +105 -105
- subsurface/modules/reader/volume/seismic.py +173 -173
- subsurface/modules/reader/volume/volume_utils.py +43 -43
- subsurface/modules/reader/wells/DEP/__init__.py +43 -43
- subsurface/modules/reader/wells/DEP/_well_files_reader.py +167 -167
- subsurface/modules/reader/wells/DEP/_wells_api.py +61 -61
- subsurface/modules/reader/wells/DEP/_welly_reader.py +180 -180
- subsurface/modules/reader/wells/DEP/pandas_to_welly.py +212 -212
- subsurface/modules/reader/wells/_read_to_df.py +57 -57
- subsurface/modules/reader/wells/read_borehole_interface.py +148 -148
- subsurface/modules/reader/wells/wells_utils.py +68 -68
- subsurface/modules/tools/mocking_aux.py +104 -104
- subsurface/modules/visualization/__init__.py +2 -2
- subsurface/modules/visualization/to_pyvista.py +320 -320
- subsurface/modules/writer/to_binary.py +12 -12
- subsurface/modules/writer/to_rex/common.py +78 -78
- subsurface/modules/writer/to_rex/data_struct.py +74 -74
- subsurface/modules/writer/to_rex/gempy_to_rexfile.py +791 -791
- subsurface/modules/writer/to_rex/material_encoder.py +44 -44
- subsurface/modules/writer/to_rex/mesh_encoder.py +152 -152
- subsurface/modules/writer/to_rex/to_rex.py +115 -115
- subsurface/modules/writer/to_rex/utils.py +15 -15
- subsurface/optional_requirements.py +116 -116
- {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc17.dist-info}/METADATA +194 -194
- subsurface_terra-2025.1.0rc17.dist-info/RECORD +99 -0
- {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc17.dist-info}/WHEEL +1 -1
- {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc17.dist-info}/licenses/LICENSE +203 -203
- subsurface_terra-2025.1.0rc15.dist-info/RECORD +0 -98
- {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc17.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def safe_convert_to_float32(df: pd.DataFrame, error_handling: str = 'raise') -> pd.DataFrame:
|
|
5
|
+
"""
|
|
6
|
+
Convert DataFrame columns to float32, handling non-convertible columns.
|
|
7
|
+
|
|
8
|
+
Args:
|
|
9
|
+
df: DataFrame to convert
|
|
10
|
+
error_handling: How to handle non-convertible columns:
|
|
11
|
+
- 'raise': Raise an error
|
|
12
|
+
- 'skip': Skip non-convertible columns
|
|
13
|
+
- 'drop': Drop non-convertible columns
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
DataFrame with converted columns
|
|
17
|
+
"""
|
|
18
|
+
convertible, non_convertible = _check_convertible_to_float32(df)
|
|
19
|
+
|
|
20
|
+
if non_convertible:
|
|
21
|
+
if error_handling == 'raise':
|
|
22
|
+
raise ValueError(
|
|
23
|
+
f"Cannot convert columns to float32: {non_convertible}. "
|
|
24
|
+
f"These columns contain non-numeric data."
|
|
25
|
+
)
|
|
26
|
+
elif error_handling == 'skip':
|
|
27
|
+
# Only convert the convertible columns
|
|
28
|
+
result = df.copy()
|
|
29
|
+
for col in convertible:
|
|
30
|
+
result[col] = df[col].astype('float32')
|
|
31
|
+
return result
|
|
32
|
+
elif error_handling == 'drop':
|
|
33
|
+
# Drop non-convertible columns
|
|
34
|
+
return df[convertible].astype('float32')
|
|
35
|
+
else:
|
|
36
|
+
raise ValueError(f"Invalid error_handling: {error_handling}")
|
|
37
|
+
|
|
38
|
+
return df.astype('float32')
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def _check_convertible_to_float32(df: pd.DataFrame) -> tuple[list[str], list[str]]:
|
|
42
|
+
"""
|
|
43
|
+
Check which columns in a DataFrame can be safely converted to float32.
|
|
44
|
+
|
|
45
|
+
Returns:
|
|
46
|
+
tuple: (convertible_columns, non_convertible_columns)
|
|
47
|
+
"""
|
|
48
|
+
convertible = []
|
|
49
|
+
non_convertible = []
|
|
50
|
+
|
|
51
|
+
for col in df.columns:
|
|
52
|
+
if pd.api.types.is_numeric_dtype(df[col]):
|
|
53
|
+
# Already numeric, can convert
|
|
54
|
+
convertible.append(col)
|
|
55
|
+
elif pd.api.types.is_bool_dtype(df[col]):
|
|
56
|
+
# Boolean can be converted (True->1.0, False->0.0)
|
|
57
|
+
convertible.append(col)
|
|
58
|
+
elif pd.api.types.is_string_dtype(df[col]) or pd.api.types.is_object_dtype(df[col]):
|
|
59
|
+
# Try to convert to numeric
|
|
60
|
+
try:
|
|
61
|
+
pd.to_numeric(df[col], errors='raise')
|
|
62
|
+
convertible.append(col)
|
|
63
|
+
except (ValueError, TypeError):
|
|
64
|
+
non_convertible.append(col)
|
|
65
|
+
else:
|
|
66
|
+
# Other types (datetime, timedelta, categorical, etc.)
|
|
67
|
+
non_convertible.append(col)
|
|
68
|
+
|
|
69
|
+
return convertible, non_convertible
|
|
@@ -1,121 +1,121 @@
|
|
|
1
|
-
import json
|
|
2
|
-
import numpy as np
|
|
3
|
-
import pandas as pd
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
class LiquidEarthMesh:
|
|
7
|
-
def __init__(self, vertex=None, cells=None, attributes=None, points_attributes=None, data_attrs=None):
|
|
8
|
-
self.vertex = vertex # Expected to be a numpy array of shape (N, 3)
|
|
9
|
-
self.cells = cells # Expected to be a numpy array of shape (M, K)
|
|
10
|
-
self.attributes = attributes
|
|
11
|
-
self.points_attributes = points_attributes
|
|
12
|
-
self.data_attrs = data_attrs if data_attrs is not None else {}
|
|
13
|
-
|
|
14
|
-
def to_binary(self, order='F') -> bytes:
|
|
15
|
-
body_ = self._to_bytearray(order)
|
|
16
|
-
header_ = self._set_binary_header()
|
|
17
|
-
header_json = json.dumps(header_)
|
|
18
|
-
header_json_bytes = header_json.encode('utf-8')
|
|
19
|
-
header_json_length = len(header_json_bytes)
|
|
20
|
-
header_json_length_bytes = header_json_length.to_bytes(4, byteorder='little')
|
|
21
|
-
file = header_json_length_bytes + header_json_bytes + body_
|
|
22
|
-
return file
|
|
23
|
-
|
|
24
|
-
def _set_binary_header(self):
|
|
25
|
-
header = {
|
|
26
|
-
"vertex_shape" : self.vertex.shape if self.vertex is not None else [0, 0],
|
|
27
|
-
"cell_shape" : self.cells.shape if self.cells is not None else [0, 0],
|
|
28
|
-
"cell_attr_shape" : self.attributes.shape if not self.attributes.empty else [0, 0],
|
|
29
|
-
"vertex_attr_shape": self.points_attributes.shape if not self.points_attributes.empty else [0, 0],
|
|
30
|
-
"cell_attr_names" : self.attributes.columns.tolist() if not self.attributes.empty else [],
|
|
31
|
-
"cell_attr_types" : self.attributes.dtypes.astype(str).tolist() if not self.attributes.empty else [],
|
|
32
|
-
"vertex_attr_names": self.points_attributes.columns.tolist() if not self.points_attributes.empty else [],
|
|
33
|
-
"vertex_attr_types": self.points_attributes.dtypes.astype(str).tolist() if not self.points_attributes.empty else [],
|
|
34
|
-
"xarray_attrs" : self.data_attrs
|
|
35
|
-
}
|
|
36
|
-
return header
|
|
37
|
-
|
|
38
|
-
def _to_bytearray(self, order):
|
|
39
|
-
parts = []
|
|
40
|
-
if self.vertex is not None:
|
|
41
|
-
vertex_bytes = self.vertex.astype('float32').tobytes(order)
|
|
42
|
-
parts.append(vertex_bytes)
|
|
43
|
-
if self.cells is not None:
|
|
44
|
-
cells_bytes = self.cells.astype('int32').tobytes(order)
|
|
45
|
-
parts.append(cells_bytes)
|
|
46
|
-
if not self.attributes.empty:
|
|
47
|
-
cell_attr_bytes = self.attributes.values.astype('float32').tobytes(order)
|
|
48
|
-
parts.append(cell_attr_bytes)
|
|
49
|
-
if not self.points_attributes.empty:
|
|
50
|
-
vertex_attr_bytes = self.points_attributes.values.astype('float32').tobytes(order)
|
|
51
|
-
parts.append(vertex_attr_bytes)
|
|
52
|
-
bytearray_le = b''.join(parts)
|
|
53
|
-
return bytearray_le
|
|
54
|
-
|
|
55
|
-
@classmethod
|
|
56
|
-
def from_binary(cls, binary_data, order='F'):
|
|
57
|
-
# Read header length
|
|
58
|
-
header_length_bytes = binary_data[:4]
|
|
59
|
-
header_length = int.from_bytes(header_length_bytes, byteorder='little')
|
|
60
|
-
# Read header
|
|
61
|
-
header_json_bytes = binary_data[4:4 + header_length]
|
|
62
|
-
header_json = header_json_bytes.decode('utf-8')
|
|
63
|
-
header = json.loads(header_json)
|
|
64
|
-
# Read body
|
|
65
|
-
body = binary_data[4 + header_length:]
|
|
66
|
-
offset = 0
|
|
67
|
-
|
|
68
|
-
# Parse vertices
|
|
69
|
-
vertex_shape = header['vertex_shape']
|
|
70
|
-
if vertex_shape[0] > 0 and vertex_shape[1] > 0:
|
|
71
|
-
num_vertices = np.prod(vertex_shape)
|
|
72
|
-
num_bytes = num_vertices * 4 # float32
|
|
73
|
-
vertex = np.frombuffer(body[offset:offset + num_bytes], dtype=np.float32, count=num_vertices)
|
|
74
|
-
offset += num_bytes
|
|
75
|
-
vertex = vertex.reshape(vertex_shape, order=order)
|
|
76
|
-
else:
|
|
77
|
-
vertex = None
|
|
78
|
-
|
|
79
|
-
# Parse cells
|
|
80
|
-
cell_shape = header['cell_shape']
|
|
81
|
-
if cell_shape[0] > 0 and cell_shape[1] > 0:
|
|
82
|
-
num_cells = np.prod(cell_shape)
|
|
83
|
-
num_bytes = num_cells * 4 # int32
|
|
84
|
-
cells = np.frombuffer(body[offset:offset + num_bytes], dtype=np.int32, count=num_cells)
|
|
85
|
-
offset += num_bytes
|
|
86
|
-
cells = cells.reshape(cell_shape, order=order)
|
|
87
|
-
else:
|
|
88
|
-
cells = None
|
|
89
|
-
|
|
90
|
-
# Parse cell attributes
|
|
91
|
-
attributes = pd.DataFrame()
|
|
92
|
-
cell_attr_shape = header['cell_attr_shape']
|
|
93
|
-
if cell_attr_shape[0] > 0 and cell_attr_shape[1] > 0:
|
|
94
|
-
num_attrs = np.prod(cell_attr_shape)
|
|
95
|
-
num_bytes = num_attrs * 4 # float32
|
|
96
|
-
cell_attr_values = np.frombuffer(body[offset:offset + num_bytes], dtype=np.float32, count=num_attrs)
|
|
97
|
-
offset += num_bytes
|
|
98
|
-
cell_attr_values = cell_attr_values.reshape(cell_attr_shape, order=order)
|
|
99
|
-
attr_names = header['cell_attr_names']
|
|
100
|
-
attributes = pd.DataFrame(cell_attr_values, columns=attr_names)
|
|
101
|
-
else:
|
|
102
|
-
attributes = None
|
|
103
|
-
|
|
104
|
-
# Parse vertex attributes
|
|
105
|
-
points_attributes = pd.DataFrame()
|
|
106
|
-
vertex_attr_shape = header['vertex_attr_shape']
|
|
107
|
-
if vertex_attr_shape[0] > 0 and vertex_attr_shape[1] > 0:
|
|
108
|
-
num_attrs = np.prod(vertex_attr_shape)
|
|
109
|
-
num_bytes = num_attrs * 4 # float32
|
|
110
|
-
vertex_attr_values = np.frombuffer(body[offset:offset + num_bytes], dtype=np.float32, count=num_attrs)
|
|
111
|
-
offset += num_bytes
|
|
112
|
-
vertex_attr_values = vertex_attr_values.reshape(vertex_attr_shape, order=order)
|
|
113
|
-
attr_names = header['vertex_attr_names']
|
|
114
|
-
points_attributes = pd.DataFrame(vertex_attr_values, columns=attr_names)
|
|
115
|
-
else:
|
|
116
|
-
points_attributes = None
|
|
117
|
-
|
|
118
|
-
data_attrs = header.get('xarray_attrs', {})
|
|
119
|
-
|
|
120
|
-
return cls(vertex=vertex, cells=cells, attributes=attributes, points_attributes=points_attributes, data_attrs=data_attrs)
|
|
121
|
-
|
|
1
|
+
import json
|
|
2
|
+
import numpy as np
|
|
3
|
+
import pandas as pd
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class LiquidEarthMesh:
|
|
7
|
+
def __init__(self, vertex=None, cells=None, attributes=None, points_attributes=None, data_attrs=None):
|
|
8
|
+
self.vertex = vertex # Expected to be a numpy array of shape (N, 3)
|
|
9
|
+
self.cells = cells # Expected to be a numpy array of shape (M, K)
|
|
10
|
+
self.attributes = attributes
|
|
11
|
+
self.points_attributes = points_attributes
|
|
12
|
+
self.data_attrs = data_attrs if data_attrs is not None else {}
|
|
13
|
+
|
|
14
|
+
def to_binary(self, order='F') -> bytes:
|
|
15
|
+
body_ = self._to_bytearray(order)
|
|
16
|
+
header_ = self._set_binary_header()
|
|
17
|
+
header_json = json.dumps(header_)
|
|
18
|
+
header_json_bytes = header_json.encode('utf-8')
|
|
19
|
+
header_json_length = len(header_json_bytes)
|
|
20
|
+
header_json_length_bytes = header_json_length.to_bytes(4, byteorder='little')
|
|
21
|
+
file = header_json_length_bytes + header_json_bytes + body_
|
|
22
|
+
return file
|
|
23
|
+
|
|
24
|
+
def _set_binary_header(self):
|
|
25
|
+
header = {
|
|
26
|
+
"vertex_shape" : self.vertex.shape if self.vertex is not None else [0, 0],
|
|
27
|
+
"cell_shape" : self.cells.shape if self.cells is not None else [0, 0],
|
|
28
|
+
"cell_attr_shape" : self.attributes.shape if not self.attributes.empty else [0, 0],
|
|
29
|
+
"vertex_attr_shape": self.points_attributes.shape if not self.points_attributes.empty else [0, 0],
|
|
30
|
+
"cell_attr_names" : self.attributes.columns.tolist() if not self.attributes.empty else [],
|
|
31
|
+
"cell_attr_types" : self.attributes.dtypes.astype(str).tolist() if not self.attributes.empty else [],
|
|
32
|
+
"vertex_attr_names": self.points_attributes.columns.tolist() if not self.points_attributes.empty else [],
|
|
33
|
+
"vertex_attr_types": self.points_attributes.dtypes.astype(str).tolist() if not self.points_attributes.empty else [],
|
|
34
|
+
"xarray_attrs" : self.data_attrs
|
|
35
|
+
}
|
|
36
|
+
return header
|
|
37
|
+
|
|
38
|
+
def _to_bytearray(self, order):
|
|
39
|
+
parts = []
|
|
40
|
+
if self.vertex is not None:
|
|
41
|
+
vertex_bytes = self.vertex.astype('float32').tobytes(order)
|
|
42
|
+
parts.append(vertex_bytes)
|
|
43
|
+
if self.cells is not None:
|
|
44
|
+
cells_bytes = self.cells.astype('int32').tobytes(order)
|
|
45
|
+
parts.append(cells_bytes)
|
|
46
|
+
if not self.attributes.empty:
|
|
47
|
+
cell_attr_bytes = self.attributes.values.astype('float32').tobytes(order)
|
|
48
|
+
parts.append(cell_attr_bytes)
|
|
49
|
+
if not self.points_attributes.empty:
|
|
50
|
+
vertex_attr_bytes = self.points_attributes.values.astype('float32').tobytes(order)
|
|
51
|
+
parts.append(vertex_attr_bytes)
|
|
52
|
+
bytearray_le = b''.join(parts)
|
|
53
|
+
return bytearray_le
|
|
54
|
+
|
|
55
|
+
@classmethod
|
|
56
|
+
def from_binary(cls, binary_data, order='F'):
|
|
57
|
+
# Read header length
|
|
58
|
+
header_length_bytes = binary_data[:4]
|
|
59
|
+
header_length = int.from_bytes(header_length_bytes, byteorder='little')
|
|
60
|
+
# Read header
|
|
61
|
+
header_json_bytes = binary_data[4:4 + header_length]
|
|
62
|
+
header_json = header_json_bytes.decode('utf-8')
|
|
63
|
+
header = json.loads(header_json)
|
|
64
|
+
# Read body
|
|
65
|
+
body = binary_data[4 + header_length:]
|
|
66
|
+
offset = 0
|
|
67
|
+
|
|
68
|
+
# Parse vertices
|
|
69
|
+
vertex_shape = header['vertex_shape']
|
|
70
|
+
if vertex_shape[0] > 0 and vertex_shape[1] > 0:
|
|
71
|
+
num_vertices = np.prod(vertex_shape)
|
|
72
|
+
num_bytes = num_vertices * 4 # float32
|
|
73
|
+
vertex = np.frombuffer(body[offset:offset + num_bytes], dtype=np.float32, count=num_vertices)
|
|
74
|
+
offset += num_bytes
|
|
75
|
+
vertex = vertex.reshape(vertex_shape, order=order)
|
|
76
|
+
else:
|
|
77
|
+
vertex = None
|
|
78
|
+
|
|
79
|
+
# Parse cells
|
|
80
|
+
cell_shape = header['cell_shape']
|
|
81
|
+
if cell_shape[0] > 0 and cell_shape[1] > 0:
|
|
82
|
+
num_cells = np.prod(cell_shape)
|
|
83
|
+
num_bytes = num_cells * 4 # int32
|
|
84
|
+
cells = np.frombuffer(body[offset:offset + num_bytes], dtype=np.int32, count=num_cells)
|
|
85
|
+
offset += num_bytes
|
|
86
|
+
cells = cells.reshape(cell_shape, order=order)
|
|
87
|
+
else:
|
|
88
|
+
cells = None
|
|
89
|
+
|
|
90
|
+
# Parse cell attributes
|
|
91
|
+
attributes = pd.DataFrame()
|
|
92
|
+
cell_attr_shape = header['cell_attr_shape']
|
|
93
|
+
if cell_attr_shape[0] > 0 and cell_attr_shape[1] > 0:
|
|
94
|
+
num_attrs = np.prod(cell_attr_shape)
|
|
95
|
+
num_bytes = num_attrs * 4 # float32
|
|
96
|
+
cell_attr_values = np.frombuffer(body[offset:offset + num_bytes], dtype=np.float32, count=num_attrs)
|
|
97
|
+
offset += num_bytes
|
|
98
|
+
cell_attr_values = cell_attr_values.reshape(cell_attr_shape, order=order)
|
|
99
|
+
attr_names = header['cell_attr_names']
|
|
100
|
+
attributes = pd.DataFrame(cell_attr_values, columns=attr_names)
|
|
101
|
+
else:
|
|
102
|
+
attributes = None
|
|
103
|
+
|
|
104
|
+
# Parse vertex attributes
|
|
105
|
+
points_attributes = pd.DataFrame()
|
|
106
|
+
vertex_attr_shape = header['vertex_attr_shape']
|
|
107
|
+
if vertex_attr_shape[0] > 0 and vertex_attr_shape[1] > 0:
|
|
108
|
+
num_attrs = np.prod(vertex_attr_shape)
|
|
109
|
+
num_bytes = num_attrs * 4 # float32
|
|
110
|
+
vertex_attr_values = np.frombuffer(body[offset:offset + num_bytes], dtype=np.float32, count=num_attrs)
|
|
111
|
+
offset += num_bytes
|
|
112
|
+
vertex_attr_values = vertex_attr_values.reshape(vertex_attr_shape, order=order)
|
|
113
|
+
attr_names = header['vertex_attr_names']
|
|
114
|
+
points_attributes = pd.DataFrame(vertex_attr_values, columns=attr_names)
|
|
115
|
+
else:
|
|
116
|
+
points_attributes = None
|
|
117
|
+
|
|
118
|
+
data_attrs = header.get('xarray_attrs', {})
|
|
119
|
+
|
|
120
|
+
return cls(vertex=vertex, cells=cells, attributes=attributes, points_attributes=points_attributes, data_attrs=data_attrs)
|
|
121
|
+
|
|
@@ -1,70 +1,70 @@
|
|
|
1
|
-
from typing import Union, Dict, Literal, List
|
|
2
|
-
|
|
3
|
-
import numpy as np
|
|
4
|
-
import pandas as pd
|
|
5
|
-
import xarray as xr
|
|
6
|
-
|
|
7
|
-
from subsurface.core.structs.base_structures.base_structures_enum import SpecialCellCase
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
def vertex_and_cells_arrays_to_data_array(cells: Union[np.ndarray, Literal["lines", "points"], SpecialCellCase],
|
|
11
|
-
vertex: np.ndarray):
|
|
12
|
-
n_vertex = vertex.shape[0]
|
|
13
|
-
if type(cells) is not np.ndarray:
|
|
14
|
-
cells: np.ndarray = _create_default_cells_arg(
|
|
15
|
-
cells=cells,
|
|
16
|
-
n_vertex=n_vertex
|
|
17
|
-
)
|
|
18
|
-
n_cells = cells.shape[0]
|
|
19
|
-
|
|
20
|
-
vertex_data_array = xr.DataArray(
|
|
21
|
-
data=vertex,
|
|
22
|
-
dims=['points', 'XYZ'],
|
|
23
|
-
coords={'XYZ': ['X', 'Y', 'Z']}
|
|
24
|
-
)
|
|
25
|
-
cells_data_array = xr.DataArray(cells, dims=['cell', 'nodes'])
|
|
26
|
-
return cells_data_array, n_cells, n_vertex, vertex_data_array
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
def raw_attributes_to_dict_data_arrays(
|
|
30
|
-
default_attributes_name: str, n_items: int, dims: List[str],
|
|
31
|
-
raw_attributes: Union[None, pd.DataFrame, Dict[str, xr.DataArray]]) \
|
|
32
|
-
-> Dict[str, xr.DataArray]:
|
|
33
|
-
|
|
34
|
-
if raw_attributes is None or type(raw_attributes) is pd.DataFrame:
|
|
35
|
-
points_attributes_xarray_dict = {
|
|
36
|
-
default_attributes_name: _data_array_attributes_from_raw_data(
|
|
37
|
-
raw_data=raw_attributes,
|
|
38
|
-
dims=dims,
|
|
39
|
-
n_rows=n_items
|
|
40
|
-
)
|
|
41
|
-
}
|
|
42
|
-
else:
|
|
43
|
-
points_attributes_xarray_dict = raw_attributes
|
|
44
|
-
return points_attributes_xarray_dict
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
def _create_default_cells_arg(cells: Union[Literal["points", "lines"], SpecialCellCase],
|
|
48
|
-
n_vertex: int) -> np.ndarray:
|
|
49
|
-
if cells is None or cells == 'points' or cells == SpecialCellCase.POINTS:
|
|
50
|
-
cells_array = np.arange(0, n_vertex).reshape(-1, 1)
|
|
51
|
-
elif cells == 'lines' or cells == SpecialCellCase.LINES:
|
|
52
|
-
a = np.arange(0, n_vertex - 1, dtype=np.int_)
|
|
53
|
-
b = np.arange(1, n_vertex, dtype=np.int_)
|
|
54
|
-
cells_array = np.vstack([a, b]).T
|
|
55
|
-
else:
|
|
56
|
-
raise ValueError("cells must be either None (will default to 'points'),"
|
|
57
|
-
"'points', 'lines' or a 2D ndarray.")
|
|
58
|
-
return cells_array
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
def _data_array_attributes_from_raw_data(raw_data: Union[None, pd.DataFrame],
|
|
62
|
-
dims: List[str], n_rows: int) -> xr.DataArray:
|
|
63
|
-
if raw_data is None:
|
|
64
|
-
raw_data = pd.DataFrame(np.zeros((n_rows, 0)))
|
|
65
|
-
|
|
66
|
-
if type(raw_data) is pd.DataFrame:
|
|
67
|
-
data_array = xr.DataArray(raw_data, dims=dims)
|
|
68
|
-
else:
|
|
69
|
-
raise ValueError("cells_attributes must be either pd.DataFrame or " "None/default.")
|
|
70
|
-
return data_array
|
|
1
|
+
from typing import Union, Dict, Literal, List
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
import pandas as pd
|
|
5
|
+
import xarray as xr
|
|
6
|
+
|
|
7
|
+
from subsurface.core.structs.base_structures.base_structures_enum import SpecialCellCase
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def vertex_and_cells_arrays_to_data_array(cells: Union[np.ndarray, Literal["lines", "points"], SpecialCellCase],
|
|
11
|
+
vertex: np.ndarray):
|
|
12
|
+
n_vertex = vertex.shape[0]
|
|
13
|
+
if type(cells) is not np.ndarray:
|
|
14
|
+
cells: np.ndarray = _create_default_cells_arg(
|
|
15
|
+
cells=cells,
|
|
16
|
+
n_vertex=n_vertex
|
|
17
|
+
)
|
|
18
|
+
n_cells = cells.shape[0]
|
|
19
|
+
|
|
20
|
+
vertex_data_array = xr.DataArray(
|
|
21
|
+
data=vertex,
|
|
22
|
+
dims=['points', 'XYZ'],
|
|
23
|
+
coords={'XYZ': ['X', 'Y', 'Z']}
|
|
24
|
+
)
|
|
25
|
+
cells_data_array = xr.DataArray(cells, dims=['cell', 'nodes'])
|
|
26
|
+
return cells_data_array, n_cells, n_vertex, vertex_data_array
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def raw_attributes_to_dict_data_arrays(
|
|
30
|
+
default_attributes_name: str, n_items: int, dims: List[str],
|
|
31
|
+
raw_attributes: Union[None, pd.DataFrame, Dict[str, xr.DataArray]]) \
|
|
32
|
+
-> Dict[str, xr.DataArray]:
|
|
33
|
+
|
|
34
|
+
if raw_attributes is None or type(raw_attributes) is pd.DataFrame:
|
|
35
|
+
points_attributes_xarray_dict = {
|
|
36
|
+
default_attributes_name: _data_array_attributes_from_raw_data(
|
|
37
|
+
raw_data=raw_attributes,
|
|
38
|
+
dims=dims,
|
|
39
|
+
n_rows=n_items
|
|
40
|
+
)
|
|
41
|
+
}
|
|
42
|
+
else:
|
|
43
|
+
points_attributes_xarray_dict = raw_attributes
|
|
44
|
+
return points_attributes_xarray_dict
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _create_default_cells_arg(cells: Union[Literal["points", "lines"], SpecialCellCase],
|
|
48
|
+
n_vertex: int) -> np.ndarray:
|
|
49
|
+
if cells is None or cells == 'points' or cells == SpecialCellCase.POINTS:
|
|
50
|
+
cells_array = np.arange(0, n_vertex).reshape(-1, 1)
|
|
51
|
+
elif cells == 'lines' or cells == SpecialCellCase.LINES:
|
|
52
|
+
a = np.arange(0, n_vertex - 1, dtype=np.int_)
|
|
53
|
+
b = np.arange(1, n_vertex, dtype=np.int_)
|
|
54
|
+
cells_array = np.vstack([a, b]).T
|
|
55
|
+
else:
|
|
56
|
+
raise ValueError("cells must be either None (will default to 'points'),"
|
|
57
|
+
"'points', 'lines' or a 2D ndarray.")
|
|
58
|
+
return cells_array
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _data_array_attributes_from_raw_data(raw_data: Union[None, pd.DataFrame],
|
|
62
|
+
dims: List[str], n_rows: int) -> xr.DataArray:
|
|
63
|
+
if raw_data is None:
|
|
64
|
+
raw_data = pd.DataFrame(np.zeros((n_rows, 0)))
|
|
65
|
+
|
|
66
|
+
if type(raw_data) is pd.DataFrame:
|
|
67
|
+
data_array = xr.DataArray(raw_data, dims=dims)
|
|
68
|
+
else:
|
|
69
|
+
raise ValueError("cells_attributes must be either pd.DataFrame or " "None/default.")
|
|
70
|
+
return data_array
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import enum
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
class SpecialCellCase(enum.Enum):
|
|
5
|
-
POINTS = "points"
|
|
6
|
-
LINES = "lines"
|
|
1
|
+
import enum
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class SpecialCellCase(enum.Enum):
|
|
5
|
+
POINTS = "points"
|
|
6
|
+
LINES = "lines"
|