subsurface-terra 2025.1.0rc15__py3-none-any.whl → 2025.1.0rc17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. subsurface/__init__.py +31 -31
  2. subsurface/_version.py +34 -21
  3. subsurface/api/__init__.py +13 -13
  4. subsurface/api/interfaces/__init__.py +3 -3
  5. subsurface/api/interfaces/stream.py +136 -136
  6. subsurface/api/reader/read_wells.py +78 -78
  7. subsurface/core/geological_formats/boreholes/_combine_trajectories.py +117 -117
  8. subsurface/core/geological_formats/boreholes/_map_attrs_to_survey.py +236 -234
  9. subsurface/core/geological_formats/boreholes/_survey_to_unstruct.py +163 -163
  10. subsurface/core/geological_formats/boreholes/boreholes.py +140 -140
  11. subsurface/core/geological_formats/boreholes/collars.py +26 -26
  12. subsurface/core/geological_formats/boreholes/survey.py +86 -86
  13. subsurface/core/geological_formats/fault.py +47 -47
  14. subsurface/core/reader_helpers/reader_unstruct.py +11 -11
  15. subsurface/core/reader_helpers/readers_data.py +130 -130
  16. subsurface/core/reader_helpers/readers_wells.py +13 -13
  17. subsurface/core/structs/__init__.py +3 -3
  18. subsurface/core/structs/base_structures/__init__.py +2 -2
  19. subsurface/core/structs/base_structures/_aux.py +69 -0
  20. subsurface/core/structs/base_structures/_liquid_earth_mesh.py +121 -121
  21. subsurface/core/structs/base_structures/_unstructured_data_constructor.py +70 -70
  22. subsurface/core/structs/base_structures/base_structures_enum.py +6 -6
  23. subsurface/core/structs/base_structures/structured_data.py +282 -282
  24. subsurface/core/structs/base_structures/unstructured_data.py +338 -319
  25. subsurface/core/structs/structured_elements/octree_mesh.py +10 -10
  26. subsurface/core/structs/structured_elements/structured_grid.py +59 -59
  27. subsurface/core/structs/structured_elements/structured_mesh.py +9 -9
  28. subsurface/core/structs/unstructured_elements/__init__.py +3 -3
  29. subsurface/core/structs/unstructured_elements/line_set.py +72 -72
  30. subsurface/core/structs/unstructured_elements/point_set.py +43 -43
  31. subsurface/core/structs/unstructured_elements/tetrahedron_mesh.py +35 -35
  32. subsurface/core/structs/unstructured_elements/triangular_surface.py +62 -62
  33. subsurface/core/utils/utils_core.py +38 -38
  34. subsurface/modules/reader/__init__.py +13 -13
  35. subsurface/modules/reader/faults/faults.py +80 -80
  36. subsurface/modules/reader/from_binary.py +46 -46
  37. subsurface/modules/reader/mesh/_GOCAD_mesh.py +82 -82
  38. subsurface/modules/reader/mesh/_trimesh_reader.py +447 -447
  39. subsurface/modules/reader/mesh/csv_mesh_reader.py +53 -53
  40. subsurface/modules/reader/mesh/dxf_reader.py +177 -177
  41. subsurface/modules/reader/mesh/glb_reader.py +30 -30
  42. subsurface/modules/reader/mesh/mx_reader.py +232 -232
  43. subsurface/modules/reader/mesh/obj_reader.py +53 -53
  44. subsurface/modules/reader/mesh/omf_mesh_reader.py +43 -43
  45. subsurface/modules/reader/mesh/surface_reader.py +56 -56
  46. subsurface/modules/reader/mesh/surfaces_api.py +41 -41
  47. subsurface/modules/reader/profiles/__init__.py +3 -3
  48. subsurface/modules/reader/profiles/profiles_core.py +197 -197
  49. subsurface/modules/reader/read_netcdf.py +38 -38
  50. subsurface/modules/reader/topography/__init__.py +7 -7
  51. subsurface/modules/reader/topography/topo_core.py +100 -100
  52. subsurface/modules/reader/volume/read_grav3d.py +447 -428
  53. subsurface/modules/reader/volume/read_volume.py +327 -230
  54. subsurface/modules/reader/volume/segy_reader.py +105 -105
  55. subsurface/modules/reader/volume/seismic.py +173 -173
  56. subsurface/modules/reader/volume/volume_utils.py +43 -43
  57. subsurface/modules/reader/wells/DEP/__init__.py +43 -43
  58. subsurface/modules/reader/wells/DEP/_well_files_reader.py +167 -167
  59. subsurface/modules/reader/wells/DEP/_wells_api.py +61 -61
  60. subsurface/modules/reader/wells/DEP/_welly_reader.py +180 -180
  61. subsurface/modules/reader/wells/DEP/pandas_to_welly.py +212 -212
  62. subsurface/modules/reader/wells/_read_to_df.py +57 -57
  63. subsurface/modules/reader/wells/read_borehole_interface.py +148 -148
  64. subsurface/modules/reader/wells/wells_utils.py +68 -68
  65. subsurface/modules/tools/mocking_aux.py +104 -104
  66. subsurface/modules/visualization/__init__.py +2 -2
  67. subsurface/modules/visualization/to_pyvista.py +320 -320
  68. subsurface/modules/writer/to_binary.py +12 -12
  69. subsurface/modules/writer/to_rex/common.py +78 -78
  70. subsurface/modules/writer/to_rex/data_struct.py +74 -74
  71. subsurface/modules/writer/to_rex/gempy_to_rexfile.py +791 -791
  72. subsurface/modules/writer/to_rex/material_encoder.py +44 -44
  73. subsurface/modules/writer/to_rex/mesh_encoder.py +152 -152
  74. subsurface/modules/writer/to_rex/to_rex.py +115 -115
  75. subsurface/modules/writer/to_rex/utils.py +15 -15
  76. subsurface/optional_requirements.py +116 -116
  77. {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc17.dist-info}/METADATA +194 -194
  78. subsurface_terra-2025.1.0rc17.dist-info/RECORD +99 -0
  79. {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc17.dist-info}/WHEEL +1 -1
  80. {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc17.dist-info}/licenses/LICENSE +203 -203
  81. subsurface_terra-2025.1.0rc15.dist-info/RECORD +0 -98
  82. {subsurface_terra-2025.1.0rc15.dist-info → subsurface_terra-2025.1.0rc17.dist-info}/top_level.txt +0 -0
@@ -1,234 +1,236 @@
1
- import numpy as np
2
- import pandas as pd
3
- import xarray as xr
4
- from scipy.interpolate import interp1d
5
- from typing import Tuple, Optional, Union, List, Any
6
-
7
- from ...structs.base_structures import UnstructuredData
8
- from ...structs.base_structures._unstructured_data_constructor import raw_attributes_to_dict_data_arrays
9
- from ...structs.unstructured_elements import LineSet
10
-
11
-
12
- def combine_survey_and_attrs(attrs: pd.DataFrame, survey_trajectory: LineSet,well_id_mapper: dict[str, int]) -> UnstructuredData:
13
- # Import moved to top for clarity and possibly avoiding repeated imports if called multiple times
14
-
15
- # Ensure all columns in lith exist in new_attrs, if not, add them as NaN
16
- new_attrs = _map_attrs_to_measured_depths(attrs, survey_trajectory, well_id_mapper)
17
-
18
- # Construct the final xarray dict without intermediate variable
19
- points_attributes_xarray_dict: dict[str, xr.DataArray] = raw_attributes_to_dict_data_arrays(
20
- default_attributes_name="vertex_attrs",
21
- n_items=survey_trajectory.data.data["vertex_attrs"].shape[0], # TODO: Can I look this on new_attrs to remove line 11?
22
- dims=["points", "vertex_attr"],
23
- raw_attributes=new_attrs
24
- )
25
-
26
- # Inline construction of UnstructuredData
27
- return UnstructuredData.from_data_arrays_dict(
28
- xarray_dict={
29
- "vertex" : survey_trajectory.data.data["vertex"],
30
- "cells" : survey_trajectory.data.data["cells"],
31
- "vertex_attrs": points_attributes_xarray_dict["vertex_attrs"],
32
- "cell_attrs" : survey_trajectory.data.data["cell_attrs"]
33
- },
34
- xarray_attributes=survey_trajectory.data.data.attrs,
35
- default_cells_attributes_name=survey_trajectory.data.cells_attr_name,
36
- default_points_attributes_name=survey_trajectory.data.vertex_attr_name
37
- )
38
-
39
- def _prepare_categorical_data(attrs: pd.DataFrame) -> pd.DataFrame:
40
- """
41
- Prepare categorical data for interpolation by converting categorical columns to numeric IDs.
42
-
43
- Args:
44
- attrs: DataFrame containing attribute data
45
-
46
- Returns:
47
- Modified DataFrame with categorical data prepared for interpolation
48
- """
49
- # Create a copy to avoid modifying the original
50
- attrs_copy = attrs.copy()
51
-
52
- # If component lith exists but lith_ids doesn't, create lith_ids
53
- if 'component lith' in attrs_copy.columns and 'lith_ids' not in attrs_copy.columns:
54
- attrs_copy['lith_ids'], _ = pd.factorize(attrs_copy['component lith'], use_na_sentinel=True)
55
-
56
- return attrs_copy
57
-
58
-
59
- def _prepare_new_attributes(attrs: pd.DataFrame, survey_trajectory: LineSet) -> pd.DataFrame:
60
- """
61
- Prepare the new attributes DataFrame by adding missing columns from attrs.
62
-
63
- Args:
64
- attrs: DataFrame containing attribute data
65
- survey_trajectory: LineSet containing trajectory data
66
-
67
- Returns:
68
- New attributes DataFrame with all necessary columns
69
- """
70
- # Start with a copy of the existing attributes DataFrame
71
- new_attrs = survey_trajectory.data.points_attributes.copy()
72
-
73
- # Add missing columns from attrs, preserving their dtypes
74
- for col in attrs.columns.difference(new_attrs.columns):
75
- new_attrs[col] = np.nan if pd.api.types.is_numeric_dtype(attrs[col]) else None
76
-
77
- return new_attrs
78
-
79
-
80
- def _get_interpolation_locations(attrs_well: pd.DataFrame, well_name: str) -> np.ndarray:
81
- """
82
- Determine the locations to use for interpolation based on top and base values.
83
-
84
- Args:
85
- attrs_well: DataFrame containing well attribute data
86
- well_name: Name of the current well
87
-
88
- Returns:
89
- Array of location values to use for interpolation
90
- """
91
- if "base" not in attrs_well.columns:
92
- raise ValueError(f"Base column must be present in the file for well '{well_name}'.")
93
- elif "top" not in attrs_well.columns:
94
- return attrs_well['base'].values
95
- else:
96
- return ((attrs_well['top'] + attrs_well['base']) / 2).values
97
-
98
-
99
- def _nearest_neighbor_categorical_interpolation(
100
- x_locations: np.ndarray,
101
- y_values: np.ndarray,
102
- target_depths: np.ndarray
103
- ) -> np.ndarray:
104
- """
105
- Custom nearest neighbor interpolation for categorical data.
106
-
107
- This function finds the nearest source point for each target point
108
- and assigns the corresponding categorical value.
109
-
110
- Args:
111
- x_locations: Array of source locations
112
- y_values: Array of categorical values at source locations
113
- target_depths: Array of target depths for interpolation
114
-
115
- Returns:
116
- Array of interpolated categorical values
117
- """
118
- # Initialize output array with NaN or None values
119
- result = np.full(target_depths.shape, np.nan, dtype=object)
120
-
121
- # For each target depth, find the nearest source location
122
- for i, depth in enumerate(target_depths):
123
- # Calculate distances to all source locations
124
- distances = np.abs(x_locations - depth)
125
-
126
- # Find the index of the minimum distance
127
- if len(distances) > 0:
128
- nearest_idx = np.argmin(distances)
129
- result[i] = y_values[nearest_idx]
130
-
131
- return result
132
-
133
-
134
- def _interpolate_attribute(
135
- attr_values: pd.Series,
136
- x_locations: np.ndarray,
137
- target_depths: np.ndarray,
138
- column_name: str,
139
- is_categorical: bool
140
- ) -> np.ndarray:
141
- """
142
- Interpolate attribute values to target depths.
143
-
144
- Args:
145
- attr_values: Series containing attribute values
146
- x_locations: Array of source locations for interpolation
147
- target_depths: Array of target depths for interpolation
148
- column_name: Name of the column being interpolated
149
- is_categorical: Whether the attribute is categorical
150
-
151
- Returns:
152
- Array of interpolated values
153
- """
154
- # For categorical data or specific columns, use custom nearest neighbor interpolation
155
- if is_categorical or column_name in ['lith_ids', 'component lith']:
156
- return _nearest_neighbor_categorical_interpolation(
157
- x_locations=x_locations,
158
- y_values=attr_values.values,
159
- target_depths=target_depths
160
- )
161
- else:
162
- # For numerical data, use scipy's interp1d with linear interpolation
163
- interp_func = interp1d(
164
- x=x_locations,
165
- y=attr_values.values,
166
- bounds_error=False,
167
- fill_value=np.nan,
168
- kind='linear'
169
- )
170
- return interp_func(target_depths)
171
-
172
-
173
- def _map_attrs_to_measured_depths(attrs: pd.DataFrame, survey_trajectory: LineSet, well_id_mapper: dict[str, int]) -> pd.DataFrame:
174
- """
175
- Map attributes to measured depths for each well.
176
-
177
- Args:
178
- attrs: DataFrame containing attribute data
179
- survey_trajectory: LineSet containing trajectory data
180
- well_id_mapper: Dictionary mapping well names to IDs
181
-
182
- Returns:
183
- DataFrame with attributes mapped to measured depths
184
- """
185
- # Extract trajectory data
186
- trajectory: xr.DataArray = survey_trajectory.data.data["vertex_attrs"]
187
- trajectory_well_id: xr.DataArray = trajectory.sel({'vertex_attr': 'well_id'})
188
- measured_depths: np.ndarray = trajectory.sel({'vertex_attr': 'measured_depths'}).values.astype(np.float64)
189
-
190
- # Prepare data
191
- attrs: pd.DataFrame = _prepare_categorical_data(attrs)
192
- new_attrs: pd.DataFrame = _prepare_new_attributes(attrs, survey_trajectory)
193
-
194
- # Process each well
195
- for well_name in well_id_mapper:
196
- # Skip wells not in the attributes DataFrame
197
- if well_name not in attrs.index:
198
- print(f"Well '{well_name}' does not exist in the attributes DataFrame.")
199
- continue
200
-
201
- # Get well data
202
- attrs_well = attrs.loc[[well_name]]
203
- well_id = well_id_mapper.get(well_name)
204
- well_mask = (trajectory_well_id == well_id).values
205
- well_depths = measured_depths[well_mask]
206
-
207
- # Get interpolation locations
208
- interp_locations = _get_interpolation_locations(attrs_well, well_name)
209
-
210
- # Interpolate each attribute
211
- for col in attrs_well.columns:
212
- # Skip location and ID columns
213
- if col in ['top', 'base', 'well_id']:
214
- continue
215
-
216
- attr_values = attrs_well[col]
217
- is_categorical = attr_values.dtype == 'O' or isinstance(attr_values.dtype, pd.CategoricalDtype)
218
-
219
- # Skip columns that can't be interpolated and aren't categorical
220
- if is_categorical and col not in ['lith_ids', 'component lith']:
221
- continue
222
-
223
- # Interpolate and assign values
224
- interpolated_values = _interpolate_attribute(
225
- attr_values,
226
- interp_locations,
227
- well_depths,
228
- col,
229
- is_categorical
230
- )
231
-
232
- new_attrs.loc[well_mask, col] = interpolated_values
233
-
234
- return new_attrs
1
+ import numpy as np
2
+ import pandas as pd
3
+ import xarray as xr
4
+ from typing import Tuple, Optional, Union, List, Any
5
+
6
+ from subsurface import optional_requirements
7
+ from ...structs.base_structures import UnstructuredData
8
+ from ...structs.base_structures._unstructured_data_constructor import raw_attributes_to_dict_data_arrays
9
+ from ...structs.unstructured_elements import LineSet
10
+
11
+
12
+ def combine_survey_and_attrs(attrs: pd.DataFrame, survey_trajectory: LineSet,well_id_mapper: dict[str, int]) -> UnstructuredData:
13
+ # Import moved to top for clarity and possibly avoiding repeated imports if called multiple times
14
+
15
+ # Ensure all columns in lith exist in new_attrs, if not, add them as NaN
16
+ new_attrs = _map_attrs_to_measured_depths(attrs, survey_trajectory, well_id_mapper)
17
+
18
+ # Construct the final xarray dict without intermediate variable
19
+ points_attributes_xarray_dict: dict[str, xr.DataArray] = raw_attributes_to_dict_data_arrays(
20
+ default_attributes_name="vertex_attrs",
21
+ n_items=survey_trajectory.data.data["vertex_attrs"].shape[0], # TODO: Can I look this on new_attrs to remove line 11?
22
+ dims=["points", "vertex_attr"],
23
+ raw_attributes=new_attrs
24
+ )
25
+
26
+ # Inline construction of UnstructuredData
27
+ return UnstructuredData.from_data_arrays_dict(
28
+ xarray_dict={
29
+ "vertex" : survey_trajectory.data.data["vertex"],
30
+ "cells" : survey_trajectory.data.data["cells"],
31
+ "vertex_attrs": points_attributes_xarray_dict["vertex_attrs"],
32
+ "cell_attrs" : survey_trajectory.data.data["cell_attrs"]
33
+ },
34
+ xarray_attributes=survey_trajectory.data.data.attrs,
35
+ default_cells_attributes_name=survey_trajectory.data.cells_attr_name,
36
+ default_points_attributes_name=survey_trajectory.data.vertex_attr_name
37
+ )
38
+
39
+
40
+ def _prepare_categorical_data(attrs: pd.DataFrame) -> pd.DataFrame:
41
+ """
42
+ Prepare categorical data for interpolation by converting categorical columns to numeric IDs.
43
+
44
+ Args:
45
+ attrs: DataFrame containing attribute data
46
+
47
+ Returns:
48
+ Modified DataFrame with categorical data prepared for interpolation
49
+ """
50
+ # Create a copy to avoid modifying the original
51
+ attrs_copy = attrs.copy()
52
+
53
+ # If component lith exists but lith_ids doesn't, create lith_ids
54
+ if 'component lith' in attrs_copy.columns and 'lith_ids' not in attrs_copy.columns:
55
+ attrs_copy['lith_ids'], _ = pd.factorize(attrs_copy['component lith'], use_na_sentinel=True)
56
+
57
+ return attrs_copy
58
+
59
+
60
+ def _prepare_new_attributes(attrs: pd.DataFrame, survey_trajectory: LineSet) -> pd.DataFrame:
61
+ """
62
+ Prepare the new attributes DataFrame by adding missing columns from attrs.
63
+
64
+ Args:
65
+ attrs: DataFrame containing attribute data
66
+ survey_trajectory: LineSet containing trajectory data
67
+
68
+ Returns:
69
+ New attributes DataFrame with all necessary columns
70
+ """
71
+ # Start with a copy of the existing attributes DataFrame
72
+ new_attrs = survey_trajectory.data.points_attributes.copy()
73
+
74
+ # Add missing columns from attrs, preserving their dtypes
75
+ for col in attrs.columns.difference(new_attrs.columns):
76
+ new_attrs[col] = np.nan if pd.api.types.is_numeric_dtype(attrs[col]) else None
77
+
78
+ return new_attrs
79
+
80
+
81
+ def _get_interpolation_locations(attrs_well: pd.DataFrame, well_name: str) -> np.ndarray:
82
+ """
83
+ Determine the locations to use for interpolation based on top and base values.
84
+
85
+ Args:
86
+ attrs_well: DataFrame containing well attribute data
87
+ well_name: Name of the current well
88
+
89
+ Returns:
90
+ Array of location values to use for interpolation
91
+ """
92
+ if "base" not in attrs_well.columns:
93
+ raise ValueError(f"Base column must be present in the file for well '{well_name}'.")
94
+ elif "top" not in attrs_well.columns:
95
+ return attrs_well['base'].values
96
+ else:
97
+ return ((attrs_well['top'] + attrs_well['base']) / 2).values
98
+
99
+
100
+ def _nearest_neighbor_categorical_interpolation(
101
+ x_locations: np.ndarray,
102
+ y_values: np.ndarray,
103
+ target_depths: np.ndarray
104
+ ) -> np.ndarray:
105
+ """
106
+ Custom nearest neighbor interpolation for categorical data.
107
+
108
+ This function finds the nearest source point for each target point
109
+ and assigns the corresponding categorical value.
110
+
111
+ Args:
112
+ x_locations: Array of source locations
113
+ y_values: Array of categorical values at source locations
114
+ target_depths: Array of target depths for interpolation
115
+
116
+ Returns:
117
+ Array of interpolated categorical values
118
+ """
119
+ # Initialize output array with NaN or None values
120
+ result = np.full(target_depths.shape, np.nan, dtype=object)
121
+
122
+ # For each target depth, find the nearest source location
123
+ for i, depth in enumerate(target_depths):
124
+ # Calculate distances to all source locations
125
+ distances = np.abs(x_locations - depth)
126
+
127
+ # Find the index of the minimum distance
128
+ if len(distances) > 0:
129
+ nearest_idx = np.argmin(distances)
130
+ result[i] = y_values[nearest_idx]
131
+
132
+ return result
133
+
134
+
135
+ def _interpolate_attribute(
136
+ attr_values: pd.Series,
137
+ x_locations: np.ndarray,
138
+ target_depths: np.ndarray,
139
+ column_name: str,
140
+ is_categorical: bool
141
+ ) -> np.ndarray:
142
+ """
143
+ Interpolate attribute values to target depths.
144
+
145
+ Args:
146
+ attr_values: Series containing attribute values
147
+ x_locations: Array of source locations for interpolation
148
+ target_depths: Array of target depths for interpolation
149
+ column_name: Name of the column being interpolated
150
+ is_categorical: Whether the attribute is categorical
151
+
152
+ Returns:
153
+ Array of interpolated values
154
+ """
155
+ # For categorical data or specific columns, use custom nearest neighbor interpolation
156
+ if is_categorical or column_name in ['lith_ids', 'component lith']:
157
+ return _nearest_neighbor_categorical_interpolation(
158
+ x_locations=x_locations,
159
+ y_values=attr_values.values,
160
+ target_depths=target_depths
161
+ )
162
+ else:
163
+ # For numerical data, use scipy's interp1d with linear interpolation
164
+ scipy = optional_requirements.require_scipy()
165
+ interp_func = scipy.interpolate.interp1d(
166
+ x=x_locations,
167
+ y=attr_values.values,
168
+ bounds_error=False,
169
+ fill_value=np.nan,
170
+ kind='linear'
171
+ )
172
+ return interp_func(target_depths)
173
+
174
+
175
+ def _map_attrs_to_measured_depths(attrs: pd.DataFrame, survey_trajectory: LineSet, well_id_mapper: dict[str, int]) -> pd.DataFrame:
176
+ """
177
+ Map attributes to measured depths for each well.
178
+
179
+ Args:
180
+ attrs: DataFrame containing attribute data
181
+ survey_trajectory: LineSet containing trajectory data
182
+ well_id_mapper: Dictionary mapping well names to IDs
183
+
184
+ Returns:
185
+ DataFrame with attributes mapped to measured depths
186
+ """
187
+ # Extract trajectory data
188
+ trajectory: xr.DataArray = survey_trajectory.data.data["vertex_attrs"]
189
+ trajectory_well_id: xr.DataArray = trajectory.sel({'vertex_attr': 'well_id'})
190
+ measured_depths: np.ndarray = trajectory.sel({'vertex_attr': 'measured_depths'}).values.astype(np.float64)
191
+
192
+ # Prepare data
193
+ attrs: pd.DataFrame = _prepare_categorical_data(attrs)
194
+ new_attrs: pd.DataFrame = _prepare_new_attributes(attrs, survey_trajectory)
195
+
196
+ # Process each well
197
+ for well_name in well_id_mapper:
198
+ # Skip wells not in the attributes DataFrame
199
+ if well_name not in attrs.index:
200
+ print(f"Well '{well_name}' does not exist in the attributes DataFrame.")
201
+ continue
202
+
203
+ # Get well data
204
+ attrs_well = attrs.loc[[well_name]]
205
+ well_id = well_id_mapper.get(well_name)
206
+ well_mask = (trajectory_well_id == well_id).values
207
+ well_depths = measured_depths[well_mask]
208
+
209
+ # Get interpolation locations
210
+ interp_locations = _get_interpolation_locations(attrs_well, well_name)
211
+
212
+ # Interpolate each attribute
213
+ for col in attrs_well.columns:
214
+ # Skip location and ID columns
215
+ if col in ['top', 'base', 'well_id']:
216
+ continue
217
+
218
+ attr_values = attrs_well[col]
219
+ is_categorical = attr_values.dtype == 'O' or isinstance(attr_values.dtype, pd.CategoricalDtype)
220
+
221
+ # Skip columns that can't be interpolated and aren't categorical
222
+ if is_categorical and col not in ['lith_ids', 'component lith']:
223
+ continue
224
+
225
+ # Interpolate and assign values
226
+ interpolated_values = _interpolate_attribute(
227
+ attr_values,
228
+ interp_locations,
229
+ well_depths,
230
+ col,
231
+ is_categorical
232
+ )
233
+
234
+ new_attrs.loc[well_mask, col] = interpolated_values
235
+
236
+ return new_attrs