subsurface-terra 2025.1.0rc15__py3-none-any.whl → 2025.1.0rc16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (81) hide show
  1. subsurface/__init__.py +31 -31
  2. subsurface/_version.py +34 -21
  3. subsurface/api/__init__.py +13 -13
  4. subsurface/api/interfaces/__init__.py +3 -3
  5. subsurface/api/interfaces/stream.py +136 -136
  6. subsurface/api/reader/read_wells.py +78 -78
  7. subsurface/core/geological_formats/boreholes/_combine_trajectories.py +117 -117
  8. subsurface/core/geological_formats/boreholes/_map_attrs_to_survey.py +236 -234
  9. subsurface/core/geological_formats/boreholes/_survey_to_unstruct.py +163 -163
  10. subsurface/core/geological_formats/boreholes/boreholes.py +140 -140
  11. subsurface/core/geological_formats/boreholes/collars.py +26 -26
  12. subsurface/core/geological_formats/boreholes/survey.py +86 -86
  13. subsurface/core/geological_formats/fault.py +47 -47
  14. subsurface/core/reader_helpers/reader_unstruct.py +11 -11
  15. subsurface/core/reader_helpers/readers_data.py +130 -130
  16. subsurface/core/reader_helpers/readers_wells.py +13 -13
  17. subsurface/core/structs/__init__.py +3 -3
  18. subsurface/core/structs/base_structures/__init__.py +2 -2
  19. subsurface/core/structs/base_structures/_liquid_earth_mesh.py +121 -121
  20. subsurface/core/structs/base_structures/_unstructured_data_constructor.py +70 -70
  21. subsurface/core/structs/base_structures/base_structures_enum.py +6 -6
  22. subsurface/core/structs/base_structures/structured_data.py +282 -282
  23. subsurface/core/structs/base_structures/unstructured_data.py +319 -319
  24. subsurface/core/structs/structured_elements/octree_mesh.py +10 -10
  25. subsurface/core/structs/structured_elements/structured_grid.py +59 -59
  26. subsurface/core/structs/structured_elements/structured_mesh.py +9 -9
  27. subsurface/core/structs/unstructured_elements/__init__.py +3 -3
  28. subsurface/core/structs/unstructured_elements/line_set.py +72 -72
  29. subsurface/core/structs/unstructured_elements/point_set.py +43 -43
  30. subsurface/core/structs/unstructured_elements/tetrahedron_mesh.py +35 -35
  31. subsurface/core/structs/unstructured_elements/triangular_surface.py +62 -62
  32. subsurface/core/utils/utils_core.py +38 -38
  33. subsurface/modules/reader/__init__.py +13 -13
  34. subsurface/modules/reader/faults/faults.py +80 -80
  35. subsurface/modules/reader/from_binary.py +46 -46
  36. subsurface/modules/reader/mesh/_GOCAD_mesh.py +82 -82
  37. subsurface/modules/reader/mesh/_trimesh_reader.py +447 -447
  38. subsurface/modules/reader/mesh/csv_mesh_reader.py +53 -53
  39. subsurface/modules/reader/mesh/dxf_reader.py +177 -177
  40. subsurface/modules/reader/mesh/glb_reader.py +30 -30
  41. subsurface/modules/reader/mesh/mx_reader.py +232 -232
  42. subsurface/modules/reader/mesh/obj_reader.py +53 -53
  43. subsurface/modules/reader/mesh/omf_mesh_reader.py +43 -43
  44. subsurface/modules/reader/mesh/surface_reader.py +56 -56
  45. subsurface/modules/reader/mesh/surfaces_api.py +41 -41
  46. subsurface/modules/reader/profiles/__init__.py +3 -3
  47. subsurface/modules/reader/profiles/profiles_core.py +197 -197
  48. subsurface/modules/reader/read_netcdf.py +38 -38
  49. subsurface/modules/reader/topography/__init__.py +7 -7
  50. subsurface/modules/reader/topography/topo_core.py +100 -100
  51. subsurface/modules/reader/volume/read_grav3d.py +478 -428
  52. subsurface/modules/reader/volume/read_volume.py +327 -230
  53. subsurface/modules/reader/volume/segy_reader.py +105 -105
  54. subsurface/modules/reader/volume/seismic.py +173 -173
  55. subsurface/modules/reader/volume/volume_utils.py +43 -43
  56. subsurface/modules/reader/wells/DEP/__init__.py +43 -43
  57. subsurface/modules/reader/wells/DEP/_well_files_reader.py +167 -167
  58. subsurface/modules/reader/wells/DEP/_wells_api.py +61 -61
  59. subsurface/modules/reader/wells/DEP/_welly_reader.py +180 -180
  60. subsurface/modules/reader/wells/DEP/pandas_to_welly.py +212 -212
  61. subsurface/modules/reader/wells/_read_to_df.py +57 -57
  62. subsurface/modules/reader/wells/read_borehole_interface.py +148 -148
  63. subsurface/modules/reader/wells/wells_utils.py +68 -68
  64. subsurface/modules/tools/mocking_aux.py +104 -104
  65. subsurface/modules/visualization/__init__.py +2 -2
  66. subsurface/modules/visualization/to_pyvista.py +320 -320
  67. subsurface/modules/writer/to_binary.py +12 -12
  68. subsurface/modules/writer/to_rex/common.py +78 -78
  69. subsurface/modules/writer/to_rex/data_struct.py +74 -74
  70. subsurface/modules/writer/to_rex/gempy_to_rexfile.py +791 -791
  71. subsurface/modules/writer/to_rex/material_encoder.py +44 -44
  72. subsurface/modules/writer/to_rex/mesh_encoder.py +152 -152
  73. subsurface/modules/writer/to_rex/to_rex.py +115 -115
  74. subsurface/modules/writer/to_rex/utils.py +15 -15
  75. subsurface/optional_requirements.py +116 -116
  76. {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc16.dist-info}/METADATA +194 -194
  77. subsurface_terra-2025.1.0rc16.dist-info/RECORD +98 -0
  78. {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc16.dist-info}/WHEEL +1 -1
  79. {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc16.dist-info}/licenses/LICENSE +203 -203
  80. subsurface_terra-2025.1.0rc15.dist-info/RECORD +0 -98
  81. {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc16.dist-info}/top_level.txt +0 -0
@@ -1,43 +1,43 @@
1
- def borehole_location_to_unstruct(reader_helper: ReaderFilesHelper,
2
- add_number_segments: bool = True) -> UnstructuredData:
3
- from . import _wells_api
4
- return _wells_api.borehole_location_to_unstruct(reader_helper, add_number_segments)
5
-
6
-
7
- def read_survey_df_from_las(reader_helper: ReaderFilesHelper, well_name: str) -> 'pd.DataFrame':
8
- from .DEP import _well_files_reader
9
- return _well_files_reader.read_survey_df_from_las(reader_helper, well_name)
10
-
11
-
12
- def read_assay_df_from_las(reader_helper: ReaderFilesHelper, well_name: str) -> 'pd.DataFrame':
13
- from .DEP import _well_files_reader
14
- return _well_files_reader.read_assay_df_from_las(reader_helper, well_name)
15
-
16
-
17
- def welly_to_subsurface(wts: WellyToSubsurfaceHelper,
18
- elev=True,
19
- n_vertex_per_well=50,
20
- convert_lith=True,
21
- table: list['striplog.Component'] = None,
22
- **kwargs) -> UnstructuredData:
23
- """Method to convert well data to `subsurface.UnstructuredData`
24
-
25
- Args:
26
- elev (bool): In general the (x, y, z) array of positions will have
27
- z as TVD, which is positive down. If `elev` is True, positive
28
- will be upwards.
29
- n_vertex_per_well (int): Number of vertex used to describe the geometry of the
30
- well.
31
- return_element (bool): if True return a `subsurface.LineSet` instead
32
- convert_lith (bool): if True convert lith from stiplog to curve
33
- table (List[Striplog.Component]): List of components to map lithologies
34
- to value.
35
- **kwargs:
36
- `Well.location.trajectory` kwargs
37
-
38
- Returns:
39
-
40
- """
41
- from . import _welly_reader
42
- return _welly_reader.welly_to_subsurface(wts, elev, n_vertex_per_well, convert_lith, table, **kwargs)
43
-
1
+ def borehole_location_to_unstruct(reader_helper: ReaderFilesHelper,
2
+ add_number_segments: bool = True) -> UnstructuredData:
3
+ from . import _wells_api
4
+ return _wells_api.borehole_location_to_unstruct(reader_helper, add_number_segments)
5
+
6
+
7
+ def read_survey_df_from_las(reader_helper: ReaderFilesHelper, well_name: str) -> 'pd.DataFrame':
8
+ from .DEP import _well_files_reader
9
+ return _well_files_reader.read_survey_df_from_las(reader_helper, well_name)
10
+
11
+
12
+ def read_assay_df_from_las(reader_helper: ReaderFilesHelper, well_name: str) -> 'pd.DataFrame':
13
+ from .DEP import _well_files_reader
14
+ return _well_files_reader.read_assay_df_from_las(reader_helper, well_name)
15
+
16
+
17
+ def welly_to_subsurface(wts: WellyToSubsurfaceHelper,
18
+ elev=True,
19
+ n_vertex_per_well=50,
20
+ convert_lith=True,
21
+ table: list['striplog.Component'] = None,
22
+ **kwargs) -> UnstructuredData:
23
+ """Method to convert well data to `subsurface.UnstructuredData`
24
+
25
+ Args:
26
+ elev (bool): In general the (x, y, z) array of positions will have
27
+ z as TVD, which is positive down. If `elev` is True, positive
28
+ will be upwards.
29
+ n_vertex_per_well (int): Number of vertex used to describe the geometry of the
30
+ well.
31
+ return_element (bool): if True return a `subsurface.LineSet` instead
32
+ convert_lith (bool): if True convert lith from stiplog to curve
33
+ table (List[Striplog.Component]): List of components to map lithologies
34
+ to value.
35
+ **kwargs:
36
+ `Well.location.trajectory` kwargs
37
+
38
+ Returns:
39
+
40
+ """
41
+ from . import _welly_reader
42
+ return _welly_reader.welly_to_subsurface(wts, elev, n_vertex_per_well, convert_lith, table, **kwargs)
43
+
@@ -1,167 +1,167 @@
1
- import pandas as pd
2
- import warnings
3
- from typing import Dict
4
-
5
- from subsurface.reader.readers_data import ReaderFilesHelper, ReaderWellsHelper, SupportedFormats
6
- from subsurface.reader.wells.wells_utils import add_tops_from_base_and_altitude_in_place
7
-
8
-
9
- def read_borehole_files(reader_wells_helper: ReaderWellsHelper) -> Dict[str, pd.DataFrame]:
10
- data_frames = dict()
11
-
12
- data_frames['collar_df'] = read_collar(reader_wells_helper.reader_collars_args)
13
-
14
- data_frames['survey_df'] = read_survey(reader_wells_helper.reader_survey_args)
15
-
16
- if reader_wells_helper.reader_lith_args is not None:
17
- data_frames['lith_df'] = read_lith(reader_wells_helper.reader_lith_args)
18
-
19
- if reader_wells_helper.reader_attr_args is not None:
20
- attributes_ = list()
21
- for e in reader_wells_helper.reader_attr_args:
22
- attributes_.append(read_attributes(e))
23
- data_frames['attrib_dfs'] = attributes_
24
-
25
- return data_frames
26
-
27
-
28
- def read_collar(reader_helper: ReaderFilesHelper) -> pd.DataFrame:
29
- if reader_helper.usecols is None: reader_helper.usecols = [0, 1, 2, 3]
30
- if reader_helper.index_col is False: reader_helper.index_col = 0
31
-
32
- # Check file_or_buffer type
33
- d = check_format_and_read_to_df(reader_helper)
34
- map_rows_and_cols_inplace(d, reader_helper)
35
-
36
- return d
37
-
38
-
39
- def read_survey(reader_helper: ReaderFilesHelper):
40
- if reader_helper.index_col is False: reader_helper.index_col = 0
41
-
42
- d = check_format_and_read_to_df(reader_helper)
43
- map_rows_and_cols_inplace(d, reader_helper)
44
-
45
- d_no_singles = _validate_survey_data(d)
46
-
47
- return d_no_singles
48
-
49
-
50
- def read_lith(reader_helper: ReaderFilesHelper):
51
- """Columns MUST contain:
52
- - top
53
- - base
54
- - component lith
55
- """
56
- if reader_helper.index_col is False: reader_helper.index_col = 0
57
-
58
- d = check_format_and_read_to_df(reader_helper)
59
- map_rows_and_cols_inplace(d, reader_helper)
60
- lith_df = _validate_lith_data(d, reader_helper)
61
-
62
- return lith_df
63
-
64
-
65
- def read_attributes(reader_helper: ReaderFilesHelper) -> pd.DataFrame:
66
- if reader_helper.index_col is False: reader_helper.index_col = 0
67
-
68
- d = check_format_and_read_to_df(reader_helper)
69
-
70
- if reader_helper.columns_map is not None: d.rename(reader_helper.columns_map, axis=1, inplace=True)
71
- if reader_helper.drop_cols is not None: d.drop(reader_helper.drop_cols, axis=1, inplace=True)
72
-
73
- _validate_attr_data(d)
74
- return d
75
-
76
-
77
- def read_survey_df_from_las(reader_helper: ReaderFilesHelper, well_name: str) -> pd.DataFrame:
78
- """
79
- Reads a las file and returns a dataframe.
80
-
81
- """
82
- from subsurface.reader.wells._welly_reader import _create_welly_well_from_las
83
- welly_well = _create_welly_well_from_las(well_name, reader_helper.file_or_buffer)
84
- survey_df = welly_well.df()[reader_helper.usecols]
85
- map_rows_and_cols_inplace(survey_df, reader_helper)
86
- survey_df["well_name"] = well_name
87
- survey_df.set_index("well_name", inplace=True)
88
- return survey_df
89
-
90
-
91
- def read_assay_df_from_las(reader_helper: ReaderFilesHelper, well_name: str) -> pd.DataFrame:
92
- from subsurface.reader.wells._welly_reader import _create_welly_well_from_las
93
- welly_well = _create_welly_well_from_las(well_name, reader_helper.file_or_buffer)
94
- assay_df = welly_well.df()
95
- assay_df["well_name"] = well_name
96
- assay_df.set_index("well_name", inplace=True)
97
- return assay_df
98
-
99
-
100
- def check_format_and_read_to_df(reader_helper: ReaderFilesHelper) -> pd.DataFrame:
101
- if reader_helper.format == ".json":
102
- d = pd.read_json(reader_helper.file_or_buffer, orient='split')
103
- elif reader_helper.is_file_in_disk:
104
- reader = _get_reader(reader_helper.format)
105
- d = reader(reader_helper.file_or_buffer, **reader_helper.pandas_reader_kwargs)
106
- elif reader_helper.is_bytes_string:
107
- reader = _get_reader('.csv')
108
- d = reader(reader_helper.file_or_buffer, **reader_helper.pandas_reader_kwargs)
109
- elif reader_helper.is_python_dict:
110
- reader = _get_reader('dict')
111
- d = reader(reader_helper.file_or_buffer)
112
- else:
113
- raise AttributeError('file_or_buffer must be either a path or a dict')
114
-
115
- if type(d.columns) is str: d.columns = d.columns.str.strip() # Remove spaces at the beginning and end
116
- if type(d.index) is str: d.index = d.index.str.strip() # Remove spaces at the beginning and end
117
- return d
118
-
119
-
120
-
121
-
122
- def _get_reader(file_format):
123
- if file_format == SupportedFormats.XLXS:
124
- reader = pd.read_excel
125
- elif file_format == 'dict':
126
- reader = _dict_reader
127
- elif file_format == SupportedFormats.CSV:
128
- reader = pd.read_csv
129
- elif file_format == SupportedFormats.JSON:
130
- reader = _dict_reader
131
- else:
132
- raise ValueError(f"Subsurface is not able to read the following extension: {file_format}")
133
- return reader
134
-
135
-
136
- def _dict_reader(dict_):
137
- """
138
-
139
- Args:
140
- dict_: data, index, columns
141
-
142
- """
143
- return pd.DataFrame(data=dict_['data'],
144
- columns=dict_['columns'],
145
- index=dict_['index'])
146
-
147
-
148
-
149
-
150
- def _validate_lith_data(d: pd.DataFrame, reader_helper: ReaderFilesHelper) -> pd.DataFrame:
151
- given_top = pd.np.isin(['top', 'base', 'component lith'], d.columns).all()
152
- given_altitude_and_base = pd.np.isin(['altitude', 'base', 'component lith'], d.columns).all()
153
-
154
- if given_altitude_and_base and not given_top:
155
- d = add_tops_from_base_and_altitude_in_place(d, reader_helper.index_col, 'base', 'altitude')
156
- elif not given_top and not given_altitude_and_base:
157
- raise ValueError('basis column must be present in the file. Use '
158
- 'columns_map to assign column names to these fields.')
159
- lith_df = d[['top', 'base', 'component lith']]
160
- return lith_df
161
-
162
-
163
- def _validate_attr_data(d):
164
- assert d.columns.isin(['basis']).any(), 'basis column' \
165
- 'must be present in the file.' \
166
- 'Use columns_map to assign' \
167
- 'column names to these fields.'
1
+ import pandas as pd
2
+ import warnings
3
+ from typing import Dict
4
+
5
+ from subsurface.reader.readers_data import ReaderFilesHelper, ReaderWellsHelper, SupportedFormats
6
+ from subsurface.reader.wells.wells_utils import add_tops_from_base_and_altitude_in_place
7
+
8
+
9
+ def read_borehole_files(reader_wells_helper: ReaderWellsHelper) -> Dict[str, pd.DataFrame]:
10
+ data_frames = dict()
11
+
12
+ data_frames['collar_df'] = read_collar(reader_wells_helper.reader_collars_args)
13
+
14
+ data_frames['survey_df'] = read_survey(reader_wells_helper.reader_survey_args)
15
+
16
+ if reader_wells_helper.reader_lith_args is not None:
17
+ data_frames['lith_df'] = read_lith(reader_wells_helper.reader_lith_args)
18
+
19
+ if reader_wells_helper.reader_attr_args is not None:
20
+ attributes_ = list()
21
+ for e in reader_wells_helper.reader_attr_args:
22
+ attributes_.append(read_attributes(e))
23
+ data_frames['attrib_dfs'] = attributes_
24
+
25
+ return data_frames
26
+
27
+
28
+ def read_collar(reader_helper: ReaderFilesHelper) -> pd.DataFrame:
29
+ if reader_helper.usecols is None: reader_helper.usecols = [0, 1, 2, 3]
30
+ if reader_helper.index_col is False: reader_helper.index_col = 0
31
+
32
+ # Check file_or_buffer type
33
+ d = check_format_and_read_to_df(reader_helper)
34
+ map_rows_and_cols_inplace(d, reader_helper)
35
+
36
+ return d
37
+
38
+
39
+ def read_survey(reader_helper: ReaderFilesHelper):
40
+ if reader_helper.index_col is False: reader_helper.index_col = 0
41
+
42
+ d = check_format_and_read_to_df(reader_helper)
43
+ map_rows_and_cols_inplace(d, reader_helper)
44
+
45
+ d_no_singles = _validate_survey_data(d)
46
+
47
+ return d_no_singles
48
+
49
+
50
+ def read_lith(reader_helper: ReaderFilesHelper):
51
+ """Columns MUST contain:
52
+ - top
53
+ - base
54
+ - component lith
55
+ """
56
+ if reader_helper.index_col is False: reader_helper.index_col = 0
57
+
58
+ d = check_format_and_read_to_df(reader_helper)
59
+ map_rows_and_cols_inplace(d, reader_helper)
60
+ lith_df = _validate_lith_data(d, reader_helper)
61
+
62
+ return lith_df
63
+
64
+
65
+ def read_attributes(reader_helper: ReaderFilesHelper) -> pd.DataFrame:
66
+ if reader_helper.index_col is False: reader_helper.index_col = 0
67
+
68
+ d = check_format_and_read_to_df(reader_helper)
69
+
70
+ if reader_helper.columns_map is not None: d.rename(reader_helper.columns_map, axis=1, inplace=True)
71
+ if reader_helper.drop_cols is not None: d.drop(reader_helper.drop_cols, axis=1, inplace=True)
72
+
73
+ _validate_attr_data(d)
74
+ return d
75
+
76
+
77
+ def read_survey_df_from_las(reader_helper: ReaderFilesHelper, well_name: str) -> pd.DataFrame:
78
+ """
79
+ Reads a las file and returns a dataframe.
80
+
81
+ """
82
+ from subsurface.reader.wells._welly_reader import _create_welly_well_from_las
83
+ welly_well = _create_welly_well_from_las(well_name, reader_helper.file_or_buffer)
84
+ survey_df = welly_well.df()[reader_helper.usecols]
85
+ map_rows_and_cols_inplace(survey_df, reader_helper)
86
+ survey_df["well_name"] = well_name
87
+ survey_df.set_index("well_name", inplace=True)
88
+ return survey_df
89
+
90
+
91
+ def read_assay_df_from_las(reader_helper: ReaderFilesHelper, well_name: str) -> pd.DataFrame:
92
+ from subsurface.reader.wells._welly_reader import _create_welly_well_from_las
93
+ welly_well = _create_welly_well_from_las(well_name, reader_helper.file_or_buffer)
94
+ assay_df = welly_well.df()
95
+ assay_df["well_name"] = well_name
96
+ assay_df.set_index("well_name", inplace=True)
97
+ return assay_df
98
+
99
+
100
+ def check_format_and_read_to_df(reader_helper: ReaderFilesHelper) -> pd.DataFrame:
101
+ if reader_helper.format == ".json":
102
+ d = pd.read_json(reader_helper.file_or_buffer, orient='split')
103
+ elif reader_helper.is_file_in_disk:
104
+ reader = _get_reader(reader_helper.format)
105
+ d = reader(reader_helper.file_or_buffer, **reader_helper.pandas_reader_kwargs)
106
+ elif reader_helper.is_bytes_string:
107
+ reader = _get_reader('.csv')
108
+ d = reader(reader_helper.file_or_buffer, **reader_helper.pandas_reader_kwargs)
109
+ elif reader_helper.is_python_dict:
110
+ reader = _get_reader('dict')
111
+ d = reader(reader_helper.file_or_buffer)
112
+ else:
113
+ raise AttributeError('file_or_buffer must be either a path or a dict')
114
+
115
+ if type(d.columns) is str: d.columns = d.columns.str.strip() # Remove spaces at the beginning and end
116
+ if type(d.index) is str: d.index = d.index.str.strip() # Remove spaces at the beginning and end
117
+ return d
118
+
119
+
120
+
121
+
122
+ def _get_reader(file_format):
123
+ if file_format == SupportedFormats.XLXS:
124
+ reader = pd.read_excel
125
+ elif file_format == 'dict':
126
+ reader = _dict_reader
127
+ elif file_format == SupportedFormats.CSV:
128
+ reader = pd.read_csv
129
+ elif file_format == SupportedFormats.JSON:
130
+ reader = _dict_reader
131
+ else:
132
+ raise ValueError(f"Subsurface is not able to read the following extension: {file_format}")
133
+ return reader
134
+
135
+
136
+ def _dict_reader(dict_):
137
+ """
138
+
139
+ Args:
140
+ dict_: data, index, columns
141
+
142
+ """
143
+ return pd.DataFrame(data=dict_['data'],
144
+ columns=dict_['columns'],
145
+ index=dict_['index'])
146
+
147
+
148
+
149
+
150
+ def _validate_lith_data(d: pd.DataFrame, reader_helper: ReaderFilesHelper) -> pd.DataFrame:
151
+ given_top = pd.np.isin(['top', 'base', 'component lith'], d.columns).all()
152
+ given_altitude_and_base = pd.np.isin(['altitude', 'base', 'component lith'], d.columns).all()
153
+
154
+ if given_altitude_and_base and not given_top:
155
+ d = add_tops_from_base_and_altitude_in_place(d, reader_helper.index_col, 'base', 'altitude')
156
+ elif not given_top and not given_altitude_and_base:
157
+ raise ValueError('basis column must be present in the file. Use '
158
+ 'columns_map to assign column names to these fields.')
159
+ lith_df = d[['top', 'base', 'component lith']]
160
+ return lith_df
161
+
162
+
163
+ def _validate_attr_data(d):
164
+ assert d.columns.isin(['basis']).any(), 'basis column' \
165
+ 'must be present in the file.' \
166
+ 'Use columns_map to assign' \
167
+ 'column names to these fields.'
@@ -1,61 +1,61 @@
1
- from typing import List
2
-
3
- import pandas as pd
4
-
5
- from .pandas_to_welly import WellyToSubsurfaceHelper
6
- from ._well_files_reader import read_borehole_files, read_collar
7
- from ._welly_reader import welly_to_subsurface
8
-
9
- from subsurface.reader.readers_data import ReaderWellsHelper, ReaderFilesHelper
10
- from subsurface.structs import UnstructuredData
11
-
12
-
13
- def read_wells_to_unstruct(reader_wells_helper: ReaderWellsHelper,
14
- backend='welly', n_vertex_per_well=80,
15
- table: List['welly.Component'] = None) -> UnstructuredData:
16
- """Read from csv files (or excel) to `subsurface.Unstructured` object.
17
-
18
- Args:
19
- backend (string): Which library use for reading and processing of data.
20
- So far: welly
21
- table (List[Striplog.Component]): List of components to map lithologies
22
- to value.
23
- n_vertex_per_well (int): Number of vertex used to describe the geometry of the
24
- well.
25
-
26
- Returns:
27
- `subsurface.UnstructuredData`: if `return_welly` also the
28
- welly object
29
-
30
- """
31
- pandas_dict = read_borehole_files(reader_wells_helper)
32
-
33
- if backend == 'welly':
34
- wts = WellyToSubsurfaceHelper(**pandas_dict)
35
- unstruct = welly_to_subsurface(wts, n_vertex_per_well=n_vertex_per_well, table=table)
36
- else:
37
- raise AttributeError('Only welly is available at the moment')
38
-
39
- return unstruct
40
-
41
-
42
- def borehole_location_to_unstruct(reader_helper: ReaderFilesHelper,
43
- add_number_segments: bool = True) -> UnstructuredData:
44
- collars = read_collar(reader_helper)
45
- collars_attributes = pd.DataFrame()
46
-
47
- # Remove duplicates
48
- collars_single_well = collars[~collars.index.duplicated()]
49
- wells_names = collars_single_well.index
50
-
51
- if add_number_segments is True:
52
- number_of_segments = collars.index.value_counts(sort=False).values
53
- collars_attributes['number_segments'] = number_of_segments
54
-
55
- ud = UnstructuredData.from_array(
56
- vertex=collars_single_well[['x', 'y', 'altitude']].values.astype('float32'),
57
- cells="points",
58
- cells_attr=collars_attributes.astype('float32'),
59
- xarray_attributes={"wells_names": wells_names.values.tolist()}) # TODO: This should be int16!
60
-
61
- return ud
1
+ from typing import List
2
+
3
+ import pandas as pd
4
+
5
+ from .pandas_to_welly import WellyToSubsurfaceHelper
6
+ from ._well_files_reader import read_borehole_files, read_collar
7
+ from ._welly_reader import welly_to_subsurface
8
+
9
+ from subsurface.reader.readers_data import ReaderWellsHelper, ReaderFilesHelper
10
+ from subsurface.structs import UnstructuredData
11
+
12
+
13
+ def read_wells_to_unstruct(reader_wells_helper: ReaderWellsHelper,
14
+ backend='welly', n_vertex_per_well=80,
15
+ table: List['welly.Component'] = None) -> UnstructuredData:
16
+ """Read from csv files (or excel) to `subsurface.Unstructured` object.
17
+
18
+ Args:
19
+ backend (string): Which library use for reading and processing of data.
20
+ So far: welly
21
+ table (List[Striplog.Component]): List of components to map lithologies
22
+ to value.
23
+ n_vertex_per_well (int): Number of vertex used to describe the geometry of the
24
+ well.
25
+
26
+ Returns:
27
+ `subsurface.UnstructuredData`: if `return_welly` also the
28
+ welly object
29
+
30
+ """
31
+ pandas_dict = read_borehole_files(reader_wells_helper)
32
+
33
+ if backend == 'welly':
34
+ wts = WellyToSubsurfaceHelper(**pandas_dict)
35
+ unstruct = welly_to_subsurface(wts, n_vertex_per_well=n_vertex_per_well, table=table)
36
+ else:
37
+ raise AttributeError('Only welly is available at the moment')
38
+
39
+ return unstruct
40
+
41
+
42
+ def borehole_location_to_unstruct(reader_helper: ReaderFilesHelper,
43
+ add_number_segments: bool = True) -> UnstructuredData:
44
+ collars = read_collar(reader_helper)
45
+ collars_attributes = pd.DataFrame()
46
+
47
+ # Remove duplicates
48
+ collars_single_well = collars[~collars.index.duplicated()]
49
+ wells_names = collars_single_well.index
50
+
51
+ if add_number_segments is True:
52
+ number_of_segments = collars.index.value_counts(sort=False).values
53
+ collars_attributes['number_segments'] = number_of_segments
54
+
55
+ ud = UnstructuredData.from_array(
56
+ vertex=collars_single_well[['x', 'y', 'altitude']].values.astype('float32'),
57
+ cells="points",
58
+ cells_attr=collars_attributes.astype('float32'),
59
+ xarray_attributes={"wells_names": wells_names.values.tolist()}) # TODO: This should be int16!
60
+
61
+ return ud