geost 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- geost/__init__.py +14 -0
- geost/_borehole.py +86 -0
- geost/abstract_classes.py +251 -0
- geost/analysis/__init__.py +6 -0
- geost/analysis/interpret_cpt.py +50 -0
- geost/analysis/layer_analysis.py +90 -0
- geost/base.py +2040 -0
- geost/bro/__init__.py +1 -0
- geost/bro/api.py +182 -0
- geost/bro/bro_utils.py +13 -0
- geost/enums.py +7 -0
- geost/export/__init__.py +2 -0
- geost/export/geodataclass.py +119 -0
- geost/export/vtk.py +70 -0
- geost/header_factory.py +29 -0
- geost/io/__init__.py +1 -0
- geost/io/gef.py +40 -0
- geost/io/parsers/__init__.py +2 -0
- geost/io/parsers/gef_parsers.py +451 -0
- geost/io/parsers/parser_utils.py +13 -0
- geost/io/parsers/xml_parsers.py +264 -0
- geost/mixins.py +105 -0
- geost/projections.py +32 -0
- geost/read.py +550 -0
- geost/spatial.py +346 -0
- geost/utils.py +156 -0
- geost/validate/__init__.py +1 -0
- geost/validate/decorators.py +44 -0
- geost/validate/validate.py +159 -0
- geost/validate/validation_schemes.py +43 -0
- geost-0.2.0.dist-info/LICENSE +21 -0
- geost-0.2.0.dist-info/METADATA +142 -0
- geost-0.2.0.dist-info/RECORD +35 -0
- geost-0.2.0.dist-info/WHEEL +5 -0
- geost-0.2.0.dist-info/top_level.txt +1 -0
geost/__init__.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from geost.read import (
|
|
2
|
+
get_bro_objects_from_bbox,
|
|
3
|
+
get_bro_objects_from_geometry,
|
|
4
|
+
read_borehole_table,
|
|
5
|
+
read_gef_cores,
|
|
6
|
+
read_gef_cpts,
|
|
7
|
+
read_nlog_cores,
|
|
8
|
+
read_sst_cpts,
|
|
9
|
+
read_xml_cpts,
|
|
10
|
+
read_xml_geological_cores,
|
|
11
|
+
read_xml_geotechnical_cores,
|
|
12
|
+
read_xml_soil_cores,
|
|
13
|
+
)
|
|
14
|
+
from geost.utils import csv_to_parquet, excel_to_parquet
|
geost/_borehole.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
# class CptCollection(PointDataCollection):
|
|
2
|
+
# """
|
|
3
|
+
# Class for collections of CPT data.
|
|
4
|
+
|
|
5
|
+
# Users must use the reader functions in
|
|
6
|
+
# :py:mod:`~geost.read` to create collections. The following readers generate CPT
|
|
7
|
+
# objects:
|
|
8
|
+
|
|
9
|
+
# :func:`~geost.read.read_sst_cpts`, :func:`~geost.read.read_gef_cpts`
|
|
10
|
+
|
|
11
|
+
# Args:
|
|
12
|
+
# data (pd.DataFrame): Dataframe containing borehole/CPT data.
|
|
13
|
+
|
|
14
|
+
# vertical_reference (str): Vertical reference, see
|
|
15
|
+
# :py:attr:`~geost.base.PointDataCollection.vertical_reference`
|
|
16
|
+
|
|
17
|
+
# horizontal_reference (int): Horizontal reference, see
|
|
18
|
+
# :py:attr:`~geost.base.PointDataCollection.horizontal_reference`
|
|
19
|
+
|
|
20
|
+
# header (pd.DataFrame): Header used for construction. see
|
|
21
|
+
# :py:attr:`~geost.base.PointDataCollection.header`
|
|
22
|
+
# """
|
|
23
|
+
|
|
24
|
+
# def __init__(
|
|
25
|
+
# self,
|
|
26
|
+
# data: pd.DataFrame,
|
|
27
|
+
# vertical_reference: str = "NAP",
|
|
28
|
+
# horizontal_reference: int = 28992,
|
|
29
|
+
# header: Optional[pd.DataFrame] = None,
|
|
30
|
+
# is_inclined: bool = False,
|
|
31
|
+
# ):
|
|
32
|
+
# super().__init__(
|
|
33
|
+
# data,
|
|
34
|
+
# vertical_reference,
|
|
35
|
+
# horizontal_reference,
|
|
36
|
+
# header=header,
|
|
37
|
+
# is_inclined=is_inclined,
|
|
38
|
+
# )
|
|
39
|
+
|
|
40
|
+
# def add_ic(
|
|
41
|
+
# self,
|
|
42
|
+
# ): # Move to cpt analysis functions, use something like 'apply' function in classes
|
|
43
|
+
# """
|
|
44
|
+
# Calculate soil behaviour type index (Ic) for all CPT's in the collection.
|
|
45
|
+
|
|
46
|
+
# The data is added to :py:attr:`~geost.base.PointDataCollection.header`.
|
|
47
|
+
# """
|
|
48
|
+
# self.data["ic"] = calc_ic(self.data["qc"], self.data["friction_number"])
|
|
49
|
+
|
|
50
|
+
# def add_lithology(
|
|
51
|
+
# self,
|
|
52
|
+
# ): # Move to cpt analysis functions, use something like 'apply' function in classes
|
|
53
|
+
# """
|
|
54
|
+
# Interpret lithoclass for all CPT's in the collection.
|
|
55
|
+
|
|
56
|
+
# The data is added to :py:attr:`~geost.base.PointDataCollection.header`.
|
|
57
|
+
# """
|
|
58
|
+
# if "ic" not in self.data.columns:
|
|
59
|
+
# self.add_ic()
|
|
60
|
+
# self.data["lith"] = calc_lithology(
|
|
61
|
+
# self.data["ic"], self.data["qc"], self.data["friction_number"]
|
|
62
|
+
# )
|
|
63
|
+
|
|
64
|
+
# def as_boreholecollection(self): # No change
|
|
65
|
+
# """
|
|
66
|
+
# Export CptCollection to BoreholeCollection. Requires the "lith" column to be
|
|
67
|
+
# present. Use the method :py:meth:`~geost.borehole.CptCollection.add_lithology`
|
|
68
|
+
|
|
69
|
+
# Returns
|
|
70
|
+
# -------
|
|
71
|
+
# Instance of :class:`~geost.borehole.BoreholeCollection`
|
|
72
|
+
# """
|
|
73
|
+
# if "lith" not in self.data.columns:
|
|
74
|
+
# raise IndexError(
|
|
75
|
+
# r"The column \"lith\" is required to convert to BoreholeCollection"
|
|
76
|
+
# )
|
|
77
|
+
|
|
78
|
+
# borehole_converted_dataframe = self.data[
|
|
79
|
+
# ["nr", "x", "y", "surface", "end", "top", "bottom", "lith"]
|
|
80
|
+
# ]
|
|
81
|
+
# cptcollection_as_bhcollection = BoreholeCollection(
|
|
82
|
+
# borehole_converted_dataframe,
|
|
83
|
+
# vertical_reference=self.vertical_reference,
|
|
84
|
+
# header=self.header,
|
|
85
|
+
# )
|
|
86
|
+
# return cptcollection_as_bhcollection
|
|
@@ -0,0 +1,251 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class AbstractHeader(ABC):
|
|
5
|
+
@property
|
|
6
|
+
@abstractmethod
|
|
7
|
+
def gdf(self):
|
|
8
|
+
pass
|
|
9
|
+
|
|
10
|
+
@property
|
|
11
|
+
@abstractmethod
|
|
12
|
+
def horizontal_reference(self):
|
|
13
|
+
pass
|
|
14
|
+
|
|
15
|
+
@property
|
|
16
|
+
@abstractmethod
|
|
17
|
+
def vertical_reference(self):
|
|
18
|
+
pass
|
|
19
|
+
|
|
20
|
+
@gdf.setter
|
|
21
|
+
@abstractmethod
|
|
22
|
+
def gdf(self, gdf):
|
|
23
|
+
pass
|
|
24
|
+
|
|
25
|
+
@abstractmethod
|
|
26
|
+
def change_horizontal_reference(self):
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
@abstractmethod
|
|
30
|
+
def change_vertical_reference(self):
|
|
31
|
+
pass
|
|
32
|
+
|
|
33
|
+
@abstractmethod
|
|
34
|
+
def get(self):
|
|
35
|
+
pass
|
|
36
|
+
|
|
37
|
+
@abstractmethod
|
|
38
|
+
def select_within_bbox(self):
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
@abstractmethod
|
|
42
|
+
def select_with_points(self):
|
|
43
|
+
pass
|
|
44
|
+
|
|
45
|
+
@abstractmethod
|
|
46
|
+
def select_with_lines(self):
|
|
47
|
+
pass
|
|
48
|
+
|
|
49
|
+
@abstractmethod
|
|
50
|
+
def select_within_polygons(self):
|
|
51
|
+
pass
|
|
52
|
+
|
|
53
|
+
@abstractmethod
|
|
54
|
+
def select_by_depth(self):
|
|
55
|
+
pass
|
|
56
|
+
|
|
57
|
+
@abstractmethod
|
|
58
|
+
def select_by_length(self):
|
|
59
|
+
pass
|
|
60
|
+
|
|
61
|
+
@abstractmethod
|
|
62
|
+
def get_area_labels(self):
|
|
63
|
+
pass
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class AbstractData(ABC):
|
|
67
|
+
@property
|
|
68
|
+
@abstractmethod
|
|
69
|
+
def df(self):
|
|
70
|
+
pass
|
|
71
|
+
|
|
72
|
+
@property
|
|
73
|
+
@abstractmethod
|
|
74
|
+
def datatype(self):
|
|
75
|
+
pass
|
|
76
|
+
|
|
77
|
+
@df.setter
|
|
78
|
+
@abstractmethod
|
|
79
|
+
def df(self, df):
|
|
80
|
+
pass
|
|
81
|
+
|
|
82
|
+
@datatype.setter
|
|
83
|
+
@abstractmethod
|
|
84
|
+
def datatype(self):
|
|
85
|
+
pass
|
|
86
|
+
|
|
87
|
+
@abstractmethod
|
|
88
|
+
def to_header(self):
|
|
89
|
+
pass
|
|
90
|
+
|
|
91
|
+
@abstractmethod
|
|
92
|
+
def to_collection(self):
|
|
93
|
+
pass
|
|
94
|
+
|
|
95
|
+
@abstractmethod
|
|
96
|
+
def select_by_values(self):
|
|
97
|
+
pass
|
|
98
|
+
|
|
99
|
+
@abstractmethod
|
|
100
|
+
def slice_depth_interval(self):
|
|
101
|
+
pass
|
|
102
|
+
|
|
103
|
+
@abstractmethod
|
|
104
|
+
def slice_by_values(self):
|
|
105
|
+
pass
|
|
106
|
+
|
|
107
|
+
@abstractmethod
|
|
108
|
+
def get_cumulative_layer_thickness(self):
|
|
109
|
+
# Not sure if this should be here, potentially unsuitable with DiscreteData
|
|
110
|
+
pass
|
|
111
|
+
|
|
112
|
+
@abstractmethod
|
|
113
|
+
def get_layer_top(self):
|
|
114
|
+
pass
|
|
115
|
+
|
|
116
|
+
@abstractmethod
|
|
117
|
+
def to_multiblock(self):
|
|
118
|
+
pass
|
|
119
|
+
|
|
120
|
+
@abstractmethod
|
|
121
|
+
def to_vtm(self):
|
|
122
|
+
pass
|
|
123
|
+
|
|
124
|
+
@abstractmethod
|
|
125
|
+
def to_datafusiontools(self):
|
|
126
|
+
# supporting this is low priority, perhaps even deprecate
|
|
127
|
+
pass
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
class AbstractCollection(ABC):
|
|
131
|
+
@property
|
|
132
|
+
@abstractmethod
|
|
133
|
+
def header(self):
|
|
134
|
+
pass
|
|
135
|
+
|
|
136
|
+
@property
|
|
137
|
+
@abstractmethod
|
|
138
|
+
def data(self):
|
|
139
|
+
pass
|
|
140
|
+
|
|
141
|
+
@property
|
|
142
|
+
def n_points(self):
|
|
143
|
+
pass
|
|
144
|
+
|
|
145
|
+
@property
|
|
146
|
+
@abstractmethod
|
|
147
|
+
def horizontal_reference(self): # Move to header class in future refactor
|
|
148
|
+
pass
|
|
149
|
+
|
|
150
|
+
@property
|
|
151
|
+
@abstractmethod
|
|
152
|
+
def vertical_reference(self): # move to data class in future refactor
|
|
153
|
+
pass
|
|
154
|
+
|
|
155
|
+
@header.setter
|
|
156
|
+
@abstractmethod
|
|
157
|
+
def header(self, header):
|
|
158
|
+
pass
|
|
159
|
+
|
|
160
|
+
@data.setter
|
|
161
|
+
@abstractmethod
|
|
162
|
+
def data(self, data):
|
|
163
|
+
pass
|
|
164
|
+
|
|
165
|
+
@horizontal_reference.setter
|
|
166
|
+
@abstractmethod
|
|
167
|
+
def horizontal_reference(self, to_epsg: int):
|
|
168
|
+
pass
|
|
169
|
+
|
|
170
|
+
@vertical_reference.setter
|
|
171
|
+
@abstractmethod
|
|
172
|
+
def vertical_reference(self, to_epsg: str): # will use epsg after refactor
|
|
173
|
+
pass
|
|
174
|
+
|
|
175
|
+
@abstractmethod
|
|
176
|
+
def get(self):
|
|
177
|
+
pass
|
|
178
|
+
|
|
179
|
+
@abstractmethod
|
|
180
|
+
def reset_header(self):
|
|
181
|
+
pass
|
|
182
|
+
|
|
183
|
+
@abstractmethod
|
|
184
|
+
def check_header_to_data_alignment(self):
|
|
185
|
+
pass
|
|
186
|
+
|
|
187
|
+
@abstractmethod
|
|
188
|
+
def select_within_bbox(self):
|
|
189
|
+
pass
|
|
190
|
+
|
|
191
|
+
@abstractmethod
|
|
192
|
+
def select_with_points(self):
|
|
193
|
+
pass
|
|
194
|
+
|
|
195
|
+
@abstractmethod
|
|
196
|
+
def select_with_lines(self):
|
|
197
|
+
pass
|
|
198
|
+
|
|
199
|
+
@abstractmethod
|
|
200
|
+
def select_within_polygons(self):
|
|
201
|
+
pass
|
|
202
|
+
|
|
203
|
+
@abstractmethod
|
|
204
|
+
def select_by_depth(self):
|
|
205
|
+
pass
|
|
206
|
+
|
|
207
|
+
@abstractmethod
|
|
208
|
+
def select_by_length(self):
|
|
209
|
+
pass
|
|
210
|
+
|
|
211
|
+
@abstractmethod
|
|
212
|
+
def get_area_labels(self):
|
|
213
|
+
pass
|
|
214
|
+
|
|
215
|
+
@abstractmethod
|
|
216
|
+
def select_by_values(self):
|
|
217
|
+
pass
|
|
218
|
+
|
|
219
|
+
@abstractmethod
|
|
220
|
+
def slice_depth_interval(self):
|
|
221
|
+
pass
|
|
222
|
+
|
|
223
|
+
@abstractmethod
|
|
224
|
+
def slice_by_values(self):
|
|
225
|
+
pass
|
|
226
|
+
|
|
227
|
+
@abstractmethod
|
|
228
|
+
def get_cumulative_layer_thickness(self):
|
|
229
|
+
# Not sure if this should be here, potentially unsuitable with DiscreteData
|
|
230
|
+
# These kind of methods should go to a seperate layer_analysis module with
|
|
231
|
+
# functions to cover such analyses
|
|
232
|
+
pass
|
|
233
|
+
|
|
234
|
+
@abstractmethod
|
|
235
|
+
def get_layer_top(self):
|
|
236
|
+
# These kind of methods should go to a seperate layer_analysis module with
|
|
237
|
+
# functions to cover such analyses
|
|
238
|
+
pass
|
|
239
|
+
|
|
240
|
+
@abstractmethod
|
|
241
|
+
def to_multiblock(self):
|
|
242
|
+
pass
|
|
243
|
+
|
|
244
|
+
@abstractmethod
|
|
245
|
+
def to_vtm(self):
|
|
246
|
+
pass
|
|
247
|
+
|
|
248
|
+
@abstractmethod
|
|
249
|
+
def to_datafusiontools(self):
|
|
250
|
+
# supporting this is low priority, perhaps even deprecate
|
|
251
|
+
pass
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
|
|
3
|
+
# NOTE: temporary functions ripped from old code. To be made compatible with Geolib
|
|
4
|
+
# functions
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def calc_ic(qc, rf) -> np.ndarray:
|
|
8
|
+
"""
|
|
9
|
+
Calculate non-normalized IC values (I_SBT in Robertson 2010). The non-normalized
|
|
10
|
+
variant does not require calculations of stresses to normalize with and hence no PWP
|
|
11
|
+
data is required.
|
|
12
|
+
|
|
13
|
+
Please note the following when using non-normalized IC values:
|
|
14
|
+
|
|
15
|
+
"The non-normalized SBT index (ISBT) is essentially the same as the normalized SBTn
|
|
16
|
+
index (Ic) but only uses the basic CPT measurements. In general, the normalized Ic
|
|
17
|
+
provides more reliable identification of SBT than the non-normalized ISBT, but when
|
|
18
|
+
the insitu vertical effective stress is between 50 kPa to 150 kPa there is often
|
|
19
|
+
little difference between normalized and non-normalized SBT."
|
|
20
|
+
|
|
21
|
+
Parameters
|
|
22
|
+
----------
|
|
23
|
+
qc : np.ndarray
|
|
24
|
+
Cone resistance values
|
|
25
|
+
rf : np.ndarray
|
|
26
|
+
Friction number
|
|
27
|
+
|
|
28
|
+
Returns
|
|
29
|
+
-------
|
|
30
|
+
np.ndarray
|
|
31
|
+
Non-normalized IC
|
|
32
|
+
"""
|
|
33
|
+
ic = np.sqrt((3.47 - np.log10(qc / 0.1)) ** 2 + (np.log10(rf) + 1.22) ** 2)
|
|
34
|
+
return ic
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# TODO numpy searchsort
|
|
38
|
+
def calc_lithology(ic, qc, rf) -> np.ndarray:
|
|
39
|
+
boundaries = [1.6, 2.0, 2.2, 2.6, 2.95, 3.6]
|
|
40
|
+
lith = np.full_like(ic, "NBE", dtype="<U3")
|
|
41
|
+
lith[ic < boundaries[0]] = "Z"
|
|
42
|
+
lith[(ic >= boundaries[0]) & (ic < boundaries[1])] = "Z"
|
|
43
|
+
lith[(ic >= boundaries[1]) & (ic < boundaries[2])] = "Z"
|
|
44
|
+
lith[(ic >= boundaries[2]) & (ic < boundaries[3])] = "Z"
|
|
45
|
+
lith[(ic >= boundaries[3]) & (ic < boundaries[4])] = "Kz"
|
|
46
|
+
lith[(ic >= boundaries[4]) & (ic < boundaries[5])] = "K"
|
|
47
|
+
lith[(ic >= boundaries[5]) & (rf > 8)] = "V"
|
|
48
|
+
lith[((rf > 5) & (qc < 1.5)) | (rf > 6)] = "V"
|
|
49
|
+
lith[(ic >= boundaries[5]) & (rf <= 8)] = "Kh"
|
|
50
|
+
return lith
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def find_top_sand(
|
|
5
|
+
lith: np.ndarray,
|
|
6
|
+
top: np.ndarray,
|
|
7
|
+
bottom: np.ndarray,
|
|
8
|
+
min_sand_frac: float,
|
|
9
|
+
min_sand_thickness: float,
|
|
10
|
+
) -> float:
|
|
11
|
+
"""
|
|
12
|
+
Find the top of sand depth in a borehole. The top of sand is defined by the
|
|
13
|
+
first layer of a specified thickness that contains a minimum percentage of
|
|
14
|
+
sand. By default: when the first layer of sand is detected, the next 1 meter
|
|
15
|
+
is scanned. Within this meter, if more than 50% of the lenght has a main
|
|
16
|
+
lithology of sand, the initially detected layer of sand is regarded as the top
|
|
17
|
+
of sand. If not, continue downward until the next layer of sand is detected and
|
|
18
|
+
repeat.
|
|
19
|
+
|
|
20
|
+
Parameters
|
|
21
|
+
----------
|
|
22
|
+
lith : ndarray
|
|
23
|
+
Numpy array containing the lithology of the borehole.
|
|
24
|
+
top : ndarray
|
|
25
|
+
Numpy array containing the top depth of the layers of the borehole.
|
|
26
|
+
bottom : ndarray
|
|
27
|
+
Numpy array containing the bottom depth of the layers of the borehole.
|
|
28
|
+
min_sand_frac : float
|
|
29
|
+
Minimum percentage required to be sand.
|
|
30
|
+
min_sand_thickness : float
|
|
31
|
+
Minimum thickness of the sand to search for.
|
|
32
|
+
|
|
33
|
+
Returns
|
|
34
|
+
-------
|
|
35
|
+
top_sand : float
|
|
36
|
+
Top depth of the sand layer that meets the requirements.
|
|
37
|
+
|
|
38
|
+
"""
|
|
39
|
+
is_sand = ("Z" == lith) + ("G" == lith)
|
|
40
|
+
is_unknown = ("GM" == lith) + ("NBE" == lith)
|
|
41
|
+
|
|
42
|
+
if np.any(is_sand) and not np.any(is_unknown):
|
|
43
|
+
idx_sand = np.where(is_sand)[0]
|
|
44
|
+
for idx in idx_sand:
|
|
45
|
+
top_sand = top[idx]
|
|
46
|
+
search_depth = top_sand - min_sand_thickness
|
|
47
|
+
|
|
48
|
+
search_mask = (top <= top_sand) & (top > search_depth)
|
|
49
|
+
|
|
50
|
+
tmp_top = top[search_mask]
|
|
51
|
+
tmp_bottom = bottom[search_mask]
|
|
52
|
+
tmp_bottom[-1] = search_depth
|
|
53
|
+
|
|
54
|
+
length = tmp_top - tmp_bottom
|
|
55
|
+
|
|
56
|
+
sand_frac = length[is_sand[search_mask]].sum() / min_sand_thickness
|
|
57
|
+
|
|
58
|
+
if sand_frac >= min_sand_frac:
|
|
59
|
+
break
|
|
60
|
+
|
|
61
|
+
else:
|
|
62
|
+
top_sand = np.nan
|
|
63
|
+
|
|
64
|
+
return top_sand
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def top_of_sand(boreholes, ids="nr", min_sand_frac=0.5, min_sand_thickness=1):
|
|
68
|
+
groupby = boreholes.groupby(ids)
|
|
69
|
+
|
|
70
|
+
for nr, df in groupby:
|
|
71
|
+
lith = df["lith"].values
|
|
72
|
+
top = df["top"].values
|
|
73
|
+
bottom = df["bottom"].values
|
|
74
|
+
|
|
75
|
+
top_sand = find_top_sand(lith, top, bottom, min_sand_frac, min_sand_thickness)
|
|
76
|
+
|
|
77
|
+
yield (nr, top_sand)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def cumulative_thickness(data, top: str = "top", bottom: str = "bottom"):
|
|
81
|
+
return np.abs(np.sum(data[top] - data[bottom]))
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def layer_top(data, column: str, value: str): # TODO
|
|
85
|
+
for nr, obj in data.groupby("nr"):
|
|
86
|
+
try:
|
|
87
|
+
layer_top = obj[obj[column] == value].iloc[0]["top"]
|
|
88
|
+
except IndexError:
|
|
89
|
+
layer_top = np.nan
|
|
90
|
+
yield (nr, layer_top)
|