bedrock-ge 0.2.4__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- bedrock_ge/__init__.py +1 -1
- bedrock_ge/gi/ags.py +103 -0
- bedrock_ge/gi/ags3.py +275 -0
- bedrock_ge/gi/ags4.py +29 -0
- bedrock_ge/gi/{ags/schemas.py → ags_schemas.py} +29 -8
- bedrock_ge/gi/db_operations.py +128 -0
- bedrock_ge/gi/geospatial.py +349 -0
- bedrock_ge/gi/io_utils.py +271 -0
- bedrock_ge/gi/mapper.py +221 -0
- bedrock_ge/gi/mapping_models.py +69 -0
- bedrock_ge/gi/schemas.py +136 -36
- bedrock_ge/gi/validate.py +45 -108
- bedrock_ge/gi/write.py +54 -37
- {bedrock_ge-0.2.4.dist-info → bedrock_ge-0.3.0.dist-info}/METADATA +2 -3
- bedrock_ge-0.3.0.dist-info/RECORD +22 -0
- bedrock_ge/gi/ags/__init__.py +0 -0
- bedrock_ge/gi/ags/read.py +0 -192
- bedrock_ge/gi/ags/transform.py +0 -264
- bedrock_ge/gi/ags/validate.py +0 -25
- bedrock_ge/gi/brgi-schema.json +0 -36
- bedrock_ge/gi/concatenate.py +0 -38
- bedrock_ge/gi/gis_geometry.py +0 -282
- bedrock_ge-0.2.4.dist-info/RECORD +0 -21
- /bedrock_ge/gi/{ags/ags3_data_dictionary.json → ags3_data_dictionary.json} +0 -0
- /bedrock_ge/gi/{ags/ags4_data_dictionary.json → ags4_data_dictionary.json} +0 -0
- {bedrock_ge-0.2.4.dist-info → bedrock_ge-0.3.0.dist-info}/WHEEL +0 -0
- {bedrock_ge-0.2.4.dist-info → bedrock_ge-0.3.0.dist-info}/licenses/LICENSE +0 -0
bedrock_ge/gi/ags/transform.py
DELETED
@@ -1,264 +0,0 @@
|
|
1
|
-
"""Transforms, i.e. maps, AGS data to Bedrock's schema."""
|
2
|
-
|
3
|
-
from typing import Dict
|
4
|
-
|
5
|
-
import pandas as pd
|
6
|
-
import pandera as pa
|
7
|
-
from pandera.typing import DataFrame
|
8
|
-
from pyproj import CRS
|
9
|
-
|
10
|
-
from bedrock_ge.gi.ags.schemas import Ags3HOLE, Ags3SAMP, BaseSAMP
|
11
|
-
from bedrock_ge.gi.schemas import BaseInSitu, BaseLocation, BaseSample, Project
|
12
|
-
from bedrock_ge.gi.validate import check_foreign_key
|
13
|
-
|
14
|
-
|
15
|
-
# What this function really does, is add the CRS and Bedrock columns:
|
16
|
-
# - `project_uid`
|
17
|
-
# - `location_uid`
|
18
|
-
# - `sample_id`
|
19
|
-
# - `sample_uid`
|
20
|
-
# - `depth_to_`
|
21
|
-
# There really isn't any mapping going on here...
|
22
|
-
# TODO: Make sure that the name of the function and docstrings reflect this.
|
23
|
-
def ags3_db_to_no_gis_brgi_db(
|
24
|
-
ags3_db: Dict[str, pd.DataFrame], crs: CRS
|
25
|
-
) -> Dict[str, pd.DataFrame]:
|
26
|
-
"""Maps a database with GI data from a single AGS 3 file to a database with Bedrock's schema.
|
27
|
-
|
28
|
-
This function converts an AGS 3 formatted geotechnical database into Bedrock's
|
29
|
-
internal database format, maintaining data relationships and structure. It handles
|
30
|
-
various types of geotechnical data including project information, locations,
|
31
|
-
samples, lab tests, and in-situ measurements.
|
32
|
-
|
33
|
-
The mapping process:
|
34
|
-
1. Project Data: Converts AGS 3 'PROJ' group to Bedrock's 'Project' table
|
35
|
-
2. Location Data: Converts AGS 3 'HOLE' group to Bedrock's 'Location' table
|
36
|
-
3. Sample Data: Converts AGS 3 'SAMP' group to Bedrock's 'Sample' table
|
37
|
-
4. Other Data: Handles lab tests, in-situ measurements, and miscellaneous tables
|
38
|
-
|
39
|
-
Args:
|
40
|
-
ags3_db (Dict[str, pd.DataFrame]): A dictionary containing AGS 3 data tables,
|
41
|
-
where keys are table names and values are pandas DataFrames.
|
42
|
-
crs (CRS): Coordinate Reference System for the project data.
|
43
|
-
|
44
|
-
Returns:
|
45
|
-
Dict[str, pd.DataFrame]: A dictionary containing Bedrock GI database tables,
|
46
|
-
where keys are table names and values are transformed pandas DataFrames.
|
47
|
-
|
48
|
-
Note:
|
49
|
-
The function creates a copy of the input database to avoid modifying the original data.
|
50
|
-
It performs foreign key checks to maintain data integrity during the mapping.
|
51
|
-
"""
|
52
|
-
# Make sure that the AGS 3 database is not changed outside this function.
|
53
|
-
ags3_db = ags3_db.copy()
|
54
|
-
|
55
|
-
print("Transforming AGS 3 groups to Bedrock tables...")
|
56
|
-
|
57
|
-
# Instantiate Bedrock dictionary of pd.DataFrames
|
58
|
-
brgi_db = {}
|
59
|
-
|
60
|
-
# Project
|
61
|
-
print("Transforming AGS 3 group 'PROJ' to Bedrock GI 'Project' table...")
|
62
|
-
brgi_db["Project"] = ags_proj_to_brgi_project(ags3_db["PROJ"], crs)
|
63
|
-
project_uid = brgi_db["Project"]["project_uid"].item()
|
64
|
-
del ags3_db["PROJ"]
|
65
|
-
|
66
|
-
# Locations
|
67
|
-
if "HOLE" in ags3_db.keys():
|
68
|
-
print("Transforming AGS 3 group 'HOLE' to Bedrock GI 'Location' table...")
|
69
|
-
brgi_db["Location"] = ags3_hole_to_brgi_location(ags3_db["HOLE"], project_uid) # type: ignore
|
70
|
-
del ags3_db["HOLE"]
|
71
|
-
else:
|
72
|
-
print(
|
73
|
-
"Your AGS 3 data doesn't contain a HOLE group, i.e. Ground Investigation locations."
|
74
|
-
)
|
75
|
-
|
76
|
-
# Samples
|
77
|
-
if "SAMP" in ags3_db.keys():
|
78
|
-
print("Transforming AGS 3 group 'SAMP' to Bedrock GI 'Sample' table...")
|
79
|
-
check_foreign_key("HOLE_ID", brgi_db["Location"], ags3_db["SAMP"])
|
80
|
-
ags3_db["SAMP"] = generate_sample_ids_for_ags3(ags3_db["SAMP"]) # type: ignore
|
81
|
-
brgi_db["Sample"] = ags3_samp_to_brgi_sample(ags3_db["SAMP"], project_uid) # type: ignore
|
82
|
-
del ags3_db["SAMP"]
|
83
|
-
else:
|
84
|
-
print("Your AGS 3 data doesn't contain a SAMP group, i.e. samples.")
|
85
|
-
|
86
|
-
# The rest of the tables: 1. Lab Tests 2. In-Situ Measurements 3. Other tables
|
87
|
-
for group, group_df in ags3_db.items():
|
88
|
-
if "SAMP_REF" in ags3_db[group].columns:
|
89
|
-
print(f"Project {project_uid} has lab test data: {group}.")
|
90
|
-
brgi_db[group] = group_df # type: ignore
|
91
|
-
elif "HOLE_ID" in ags3_db[group].columns:
|
92
|
-
print(
|
93
|
-
f"Transforming AGS 3 group '{group}' to Bedrock GI 'InSitu_{group}' table..."
|
94
|
-
)
|
95
|
-
check_foreign_key("HOLE_ID", brgi_db["Location"], group_df)
|
96
|
-
brgi_db[f"InSitu_{group}"] = ags3_in_situ_to_brgi_in_situ( # type: ignore
|
97
|
-
group, group_df, project_uid
|
98
|
-
)
|
99
|
-
else:
|
100
|
-
brgi_db[group] = ags3_db[group] # type: ignore
|
101
|
-
|
102
|
-
print(
|
103
|
-
"Done",
|
104
|
-
"The Bedrock database contains the following tables:",
|
105
|
-
list(brgi_db.keys()),
|
106
|
-
sep="\n",
|
107
|
-
end="\n\n",
|
108
|
-
)
|
109
|
-
return brgi_db # type: ignore
|
110
|
-
|
111
|
-
|
112
|
-
@pa.check_types(lazy=True)
|
113
|
-
def ags_proj_to_brgi_project(ags_proj: pd.DataFrame, crs: CRS) -> DataFrame[Project]:
|
114
|
-
"""Maps the AGS 3 'PROJ' group to a Bedrock GI 'Project' table.
|
115
|
-
|
116
|
-
Args:
|
117
|
-
ags_proj (pd.DataFrame): The AGS 3 'PROJ' group.
|
118
|
-
crs (CRS): The coordinate reference system of the project.
|
119
|
-
|
120
|
-
Returns:
|
121
|
-
DataFrame[Project]: The Bedrock GI 'Project' table.
|
122
|
-
"""
|
123
|
-
if "project_uid" not in ags_proj.columns:
|
124
|
-
ags_proj["project_uid"] = ags_proj["PROJ_ID"]
|
125
|
-
|
126
|
-
ags_proj["crs_wkt"] = crs.to_wkt()
|
127
|
-
|
128
|
-
return ags_proj # type: ignore
|
129
|
-
|
130
|
-
|
131
|
-
@pa.check_types(lazy=True)
|
132
|
-
def ags3_hole_to_brgi_location(
|
133
|
-
ags3_hole: DataFrame[Ags3HOLE], project_uid: str
|
134
|
-
) -> DataFrame[BaseLocation]:
|
135
|
-
brgi_location = ags3_hole
|
136
|
-
brgi_location["project_uid"] = project_uid
|
137
|
-
brgi_location["location_source_id"] = ags3_hole["HOLE_ID"]
|
138
|
-
brgi_location["location_uid"] = (
|
139
|
-
ags3_hole["HOLE_ID"] + "_" + ags3_hole["project_uid"]
|
140
|
-
)
|
141
|
-
brgi_location["location_type"] = ags3_hole["HOLE_TYPE"]
|
142
|
-
brgi_location["easting"] = ags3_hole["HOLE_NATE"]
|
143
|
-
brgi_location["northing"] = ags3_hole["HOLE_NATN"]
|
144
|
-
brgi_location["ground_level_elevation"] = ags3_hole["HOLE_GL"]
|
145
|
-
brgi_location["depth_to_base"] = ags3_hole["HOLE_FDEP"]
|
146
|
-
|
147
|
-
return ags3_hole # type: ignore
|
148
|
-
|
149
|
-
|
150
|
-
@pa.check_types(lazy=True)
|
151
|
-
def ags3_samp_to_brgi_sample(
|
152
|
-
ags3_samp: DataFrame[Ags3SAMP],
|
153
|
-
project_uid: str,
|
154
|
-
) -> DataFrame[BaseSample]:
|
155
|
-
brgi_sample = ags3_samp
|
156
|
-
brgi_sample["project_uid"] = project_uid
|
157
|
-
brgi_sample["location_source_id"] = ags3_samp["HOLE_ID"]
|
158
|
-
brgi_sample["location_uid"] = ags3_samp["HOLE_ID"] + "_" + ags3_samp["project_uid"]
|
159
|
-
brgi_sample["sample_source_id"] = ags3_samp["sample_id"]
|
160
|
-
brgi_sample["sample_uid"] = ags3_samp["sample_id"] + "_" + ags3_samp["project_uid"]
|
161
|
-
brgi_sample["depth_to_top"] = ags3_samp["SAMP_TOP"]
|
162
|
-
brgi_sample["depth_to_base"] = ags3_samp["SAMP_BASE"]
|
163
|
-
|
164
|
-
return brgi_sample # type: ignore
|
165
|
-
|
166
|
-
|
167
|
-
@pa.check_types(lazy=True)
|
168
|
-
def ags3_in_situ_to_brgi_in_situ(
|
169
|
-
group_name: str, ags3_in_situ: pd.DataFrame, project_uid: str
|
170
|
-
) -> DataFrame[BaseInSitu]:
|
171
|
-
"""Maps AGS 3 in-situ measurement data to Bedrock's in-situ data schema.
|
172
|
-
|
173
|
-
Args:
|
174
|
-
group_name (str): The AGS 3 group name.
|
175
|
-
ags3_data (pd.DataFrame): The AGS 3 data.
|
176
|
-
project_uid (str): The project uid.
|
177
|
-
|
178
|
-
Returns:
|
179
|
-
DataFrame[BaseInSitu]: The Bedrock in-situ data.
|
180
|
-
"""
|
181
|
-
brgi_in_situ = ags3_in_situ
|
182
|
-
brgi_in_situ["project_uid"] = project_uid
|
183
|
-
brgi_in_situ["location_uid"] = ags3_in_situ["HOLE_ID"] + "_" + project_uid
|
184
|
-
|
185
|
-
top_depth = f"{group_name}_TOP"
|
186
|
-
base_depth = f"{group_name}_BASE"
|
187
|
-
|
188
|
-
if group_name == "CDIA":
|
189
|
-
top_depth = "CDIA_CDEP"
|
190
|
-
elif group_name == "FLSH":
|
191
|
-
top_depth = "FLSH_FROM"
|
192
|
-
base_depth = "FLSH_TO"
|
193
|
-
elif group_name == "CORE":
|
194
|
-
base_depth = "CORE_BOT"
|
195
|
-
elif group_name == "HDIA":
|
196
|
-
top_depth = "HDIA_HDEP"
|
197
|
-
elif group_name == "PTIM":
|
198
|
-
top_depth = "PTIM_DEP"
|
199
|
-
elif group_name == "IVAN":
|
200
|
-
top_depth = "IVAN_DPTH"
|
201
|
-
elif group_name == "STCN":
|
202
|
-
top_depth = "STCN_DPTH"
|
203
|
-
elif group_name == "POBS" or group_name == "PREF":
|
204
|
-
top_depth = "PREF_TDEP"
|
205
|
-
elif group_name == "DREM":
|
206
|
-
top_depth = "DREM_DPTH"
|
207
|
-
elif group_name == "PRTD" or group_name == "PRTG" or group_name == "PRTL":
|
208
|
-
top_depth = "PRTD_DPTH"
|
209
|
-
elif group_name == "IPRM":
|
210
|
-
if top_depth not in ags3_in_situ.columns:
|
211
|
-
print(
|
212
|
-
"\n🚨 CAUTION: The IPRM group in this AGS 3 file does not contain a 'IPRM_TOP' heading!",
|
213
|
-
"🚨 CAUTION: Making the 'IPRM_BASE' heading the 'depth_to_top'...",
|
214
|
-
sep="\n",
|
215
|
-
end="\n\n",
|
216
|
-
)
|
217
|
-
top_depth = "IPRM_BASE"
|
218
|
-
base_depth = "None"
|
219
|
-
|
220
|
-
brgi_in_situ["depth_to_top"] = ags3_in_situ[top_depth]
|
221
|
-
brgi_in_situ["depth_to_base"] = ags3_in_situ.get(base_depth)
|
222
|
-
|
223
|
-
return brgi_in_situ # type: ignore
|
224
|
-
|
225
|
-
|
226
|
-
@pa.check_types(lazy=True)
|
227
|
-
def generate_sample_ids_for_ags3(
|
228
|
-
ags3_with_samp: DataFrame[BaseSAMP],
|
229
|
-
) -> DataFrame[Ags3SAMP]:
|
230
|
-
ags3_with_samp["sample_id"] = (
|
231
|
-
ags3_with_samp["SAMP_REF"].astype(str)
|
232
|
-
+ "_"
|
233
|
-
+ ags3_with_samp["SAMP_TYPE"].astype(str)
|
234
|
-
+ "_"
|
235
|
-
+ ags3_with_samp["SAMP_TOP"].astype(str)
|
236
|
-
+ "_"
|
237
|
-
+ ags3_with_samp["HOLE_ID"].astype(str)
|
238
|
-
)
|
239
|
-
# try:
|
240
|
-
# # SAMP_REF really should not be able to be null... Right?
|
241
|
-
# # Maybe SAMP_REF can be null when the
|
242
|
-
# Ags3SAMP_REF.validate(ags3_samp)
|
243
|
-
# print(
|
244
|
-
# "Generating unique sample IDs for AGS 3 data: 'sample_id'='{SAMP_REF}_{HOLE_ID}'"
|
245
|
-
# )
|
246
|
-
# ags3_samp["sample_id"] = (
|
247
|
-
# ags3_samp["SAMP_REF"].astype(str) + "_" + ags3_samp["HOLE_ID"].astype(str)
|
248
|
-
# )
|
249
|
-
# except pa.errors.SchemaError as exc:
|
250
|
-
# print(f"🚨 CAUTION: The AGS 3 SAMP group contains rows without SAMP_REF:\n{exc}")
|
251
|
-
|
252
|
-
# if "non-nullable series 'SAMP_REF'" in str(exc):
|
253
|
-
# print(
|
254
|
-
# "\nTo ensure unique sample IDs: 'sample_id'='{SAMP_REF}_{SAMP_TOP}_{HOLE_ID}'\n"
|
255
|
-
# )
|
256
|
-
# ags3_samp["sample_id"] = (
|
257
|
-
# ags3_samp["SAMP_REF"].astype(str)
|
258
|
-
# + "_"
|
259
|
-
# + ags3_samp["SAMP_TOP"].astype(str)
|
260
|
-
# + "_"
|
261
|
-
# + ags3_samp["HOLE_ID"].astype(str)
|
262
|
-
# )
|
263
|
-
|
264
|
-
return ags3_with_samp # type: ignore
|
bedrock_ge/gi/ags/validate.py
DELETED
@@ -1,25 +0,0 @@
|
|
1
|
-
import pandas as pd
|
2
|
-
|
3
|
-
|
4
|
-
def check_ags_proj_group(ags_proj: pd.DataFrame) -> bool:
|
5
|
-
"""Checks if the AGS 3 or AGS 4 PROJ group is correct.
|
6
|
-
|
7
|
-
Args:
|
8
|
-
ags_proj (pd.DataFrame): The DataFrame with the PROJ group.
|
9
|
-
|
10
|
-
Raises:
|
11
|
-
ValueError: If AGS 3 of AGS 4 PROJ group is not correct.
|
12
|
-
|
13
|
-
Returns:
|
14
|
-
bool: Returns True if the AGS 3 or AGS 4 PROJ group is correct.
|
15
|
-
"""
|
16
|
-
if len(ags_proj) != 1:
|
17
|
-
raise ValueError("The PROJ group must contain exactly one row.")
|
18
|
-
|
19
|
-
project_id = ags_proj["PROJ_ID"].iloc[0]
|
20
|
-
if not project_id:
|
21
|
-
raise ValueError(
|
22
|
-
'The project ID ("PROJ_ID" in the "PROJ" group) is missing from the AGS data.'
|
23
|
-
)
|
24
|
-
|
25
|
-
return True
|
bedrock_ge/gi/brgi-schema.json
DELETED
@@ -1,36 +0,0 @@
|
|
1
|
-
{
|
2
|
-
"Location": {
|
3
|
-
"attributes": {},
|
4
|
-
"geometry_type": "Point / 3D LineString",
|
5
|
-
"children": {
|
6
|
-
"MaterialClassification": {
|
7
|
-
"attributes": {},
|
8
|
-
"geometry_type": "3D LineString"
|
9
|
-
},
|
10
|
-
"SPT": {
|
11
|
-
"attributes": {},
|
12
|
-
"geometry_type": "3D Point"
|
13
|
-
},
|
14
|
-
"RQD": {
|
15
|
-
"attributes": {},
|
16
|
-
"geometry_type": "3D LineString"
|
17
|
-
},
|
18
|
-
"OtherInSituTests": {
|
19
|
-
"attributes": {},
|
20
|
-
"geometry_type": "3D Point or 3D LineString"
|
21
|
-
},
|
22
|
-
"Sample": {
|
23
|
-
"attributes": {},
|
24
|
-
"geometry_type": "3D Point",
|
25
|
-
"children": {
|
26
|
-
"grainSizeDistribution": {},
|
27
|
-
"atterbergLimits": {},
|
28
|
-
"oedometerTest": {},
|
29
|
-
"triaxialTest": {},
|
30
|
-
"unconfinedCompressiveStrength": {},
|
31
|
-
"otherLabTests": {}
|
32
|
-
}
|
33
|
-
}
|
34
|
-
}
|
35
|
-
}
|
36
|
-
}
|
bedrock_ge/gi/concatenate.py
DELETED
@@ -1,38 +0,0 @@
|
|
1
|
-
from typing import Dict, Union
|
2
|
-
|
3
|
-
import geopandas as gpd
|
4
|
-
import pandas as pd
|
5
|
-
|
6
|
-
|
7
|
-
def concatenate_databases(
|
8
|
-
db1: Dict[str, Union[pd.DataFrame, gpd.GeoDataFrame]],
|
9
|
-
db2: Dict[str, Union[pd.DataFrame, gpd.GeoDataFrame]],
|
10
|
-
) -> Dict[str, pd.DataFrame]:
|
11
|
-
"""Concatenates two dictionaries of DataFrames into one dict of DataFrames.
|
12
|
-
|
13
|
-
The function concatenates the pandas DataFrames of the second dict of
|
14
|
-
DataFrames to the first dict of DataFrames for the keys they have in common.
|
15
|
-
Keys that are unique to either dictionary will be included in the final
|
16
|
-
concatenated dictionary.
|
17
|
-
|
18
|
-
Args:
|
19
|
-
db1 (Dict[str, pd.DataFrame]): A dictionary of pandas DataFrames, i.e. a database.
|
20
|
-
db2 (Dict[str, pd.DataFrame]): A dictionary of pandas DataFrames, i.e. a database.
|
21
|
-
|
22
|
-
Returns:
|
23
|
-
concatenated_dict (Dict[str, pd.DataFrame]): A dictionary of concatenated pandas DataFrames.
|
24
|
-
"""
|
25
|
-
# Create a new dict to store the concatenated dataframes
|
26
|
-
concatenated_dict = {key: df.dropna(axis=1, how="all") for key, df in db1.items()}
|
27
|
-
|
28
|
-
# Iterate over the keys in the second dict
|
29
|
-
for key, df in db2.items():
|
30
|
-
df = df.dropna(axis=1, how="all")
|
31
|
-
# If the key is also in the first dict, concatenate the dataframes
|
32
|
-
if key in db1:
|
33
|
-
concatenated_dict[key] = pd.concat([db1[key], df], ignore_index=True)
|
34
|
-
# If the key is not in the first dict, just add it to the new dict
|
35
|
-
else:
|
36
|
-
concatenated_dict[key] = df
|
37
|
-
|
38
|
-
return concatenated_dict
|
bedrock_ge/gi/gis_geometry.py
DELETED
@@ -1,282 +0,0 @@
|
|
1
|
-
from typing import Dict, Tuple, Union
|
2
|
-
|
3
|
-
import geopandas as gpd
|
4
|
-
import numpy as np
|
5
|
-
import pandas as pd
|
6
|
-
from pyproj import Transformer
|
7
|
-
from pyproj.crs import CRS
|
8
|
-
from shapely.geometry import LineString, Point
|
9
|
-
|
10
|
-
# TODO: change function type hints, such that pandera checks the dataframes against the Bedrock schemas
|
11
|
-
|
12
|
-
|
13
|
-
def calculate_gis_geometry(
|
14
|
-
no_gis_brgi_db: Dict[str, Union[pd.DataFrame, gpd.GeoDataFrame]],
|
15
|
-
verbose: bool = True,
|
16
|
-
) -> Dict[str, gpd.GeoDataFrame]:
|
17
|
-
"""Calculates GIS geometry for tables in a Bedrock Ground Investigation database.
|
18
|
-
|
19
|
-
This function processes a dictionary of DataFrames containing Ground Investigation (GI) data,
|
20
|
-
adding appropriate GIS geometry to each table. It handles both 2D and 3D geometries,
|
21
|
-
including vertical boreholes and sampling locations.
|
22
|
-
|
23
|
-
Args:
|
24
|
-
no_gis_brgi_db (Dict[str, Union[pd.DataFrame, gpd.GeoDataFrame]]): Dictionary containing
|
25
|
-
the Bedrock GI database tables without GIS geometry. Keys are table names,
|
26
|
-
values are either pandas DataFrames or GeoDataFrames.
|
27
|
-
verbose (bool, optional): Whether to print progress information. Defaults to True.
|
28
|
-
|
29
|
-
Returns:
|
30
|
-
Dict[str, gpd.GeoDataFrame]: Dictionary containing the Bedrock GI database tables
|
31
|
-
with added GIS geometry. All tables are converted to GeoDataFrames with
|
32
|
-
appropriate CRS and geometry columns.
|
33
|
-
|
34
|
-
Raises:
|
35
|
-
ValueError: If the projects in the database use different Coordinate Reference Systems (CRS).
|
36
|
-
|
37
|
-
Note:
|
38
|
-
The function performs the following operations:
|
39
|
-
|
40
|
-
1. Verifies all projects use the same CRS
|
41
|
-
2. Calculates GIS geometry for the 'Location' table
|
42
|
-
3. Creates a 'LonLatHeight' table for 2D visualization
|
43
|
-
4. Processes 'Sample' table if present
|
44
|
-
5. Processes all tables starting with "InSitu_"
|
45
|
-
"""
|
46
|
-
# Make sure that the Bedrock database is not changed outside this function.
|
47
|
-
brgi_db = no_gis_brgi_db.copy()
|
48
|
-
|
49
|
-
if verbose:
|
50
|
-
print("Calculating GIS geometry for the Bedrock GI database tables...")
|
51
|
-
|
52
|
-
# Check if all projects have the same CRS
|
53
|
-
if not brgi_db["Project"]["crs_wkt"].nunique() == 1:
|
54
|
-
raise ValueError(
|
55
|
-
"All projects must have the same CRS (Coordinate Reference System).\n"
|
56
|
-
"Raise an issue on GitHub in case you need to be able to combine GI data that was acquired in multiple different CRS's."
|
57
|
-
)
|
58
|
-
|
59
|
-
crs = CRS.from_wkt(brgi_db["Project"]["crs_wkt"].iloc[0])
|
60
|
-
|
61
|
-
# Calculate GIS geometry for the 'Location' table
|
62
|
-
if verbose:
|
63
|
-
print("Calculating GIS geometry for the Bedrock GI 'Location' table...")
|
64
|
-
brgi_db["Location"] = calculate_location_gis_geometry(brgi_db["Location"], crs)
|
65
|
-
|
66
|
-
# Create the 'LonLatHeight' table.
|
67
|
-
# The 'LonLatHeight' table makes it easier to visualize the GIS geometry on 2D maps,
|
68
|
-
# because vertical lines are often very small or completely hidden in 2D.
|
69
|
-
# This table only contains the 3D of the GI locations at ground level,
|
70
|
-
# in WGS84 (Longitude, Latitude, Height) coordinates.
|
71
|
-
if verbose:
|
72
|
-
print(
|
73
|
-
"Creating 'LonLatHeight' table with GI locations in WGS84 geodetic coordinates...",
|
74
|
-
" WGS84 geodetic coordinates: (Longitude, Latitude, Ground Level Ellipsoidal Height)",
|
75
|
-
sep="\n",
|
76
|
-
)
|
77
|
-
brgi_db["LonLatHeight"] = create_lon_lat_height_table(brgi_db["Location"], crs)
|
78
|
-
|
79
|
-
# Create GIS geometry for tables that have In-Situ GIS geometry.
|
80
|
-
# These are the 'Sample' table and 'InSitu_...' tables.
|
81
|
-
# These tables are children of the Location table,
|
82
|
-
# i.e. have the 'Location' table as the parent table.
|
83
|
-
if "Sample" in brgi_db.keys():
|
84
|
-
if verbose:
|
85
|
-
print("Calculating GIS geometry for the Bedrock GI 'Sample' table...")
|
86
|
-
brgi_db["Sample"] = calculate_in_situ_gis_geometry(
|
87
|
-
brgi_db["Sample"], brgi_db["Location"], crs
|
88
|
-
)
|
89
|
-
|
90
|
-
for table_name, table in brgi_db.items():
|
91
|
-
if table_name.startswith("InSitu_"):
|
92
|
-
if verbose:
|
93
|
-
print(
|
94
|
-
f"Calculating GIS geometry for the Bedrock GI '{table_name}' table..."
|
95
|
-
)
|
96
|
-
brgi_db[table_name] = calculate_in_situ_gis_geometry(
|
97
|
-
table, brgi_db["Location"], crs
|
98
|
-
)
|
99
|
-
|
100
|
-
return brgi_db
|
101
|
-
|
102
|
-
|
103
|
-
def calculate_location_gis_geometry(
|
104
|
-
brgi_location: Union[pd.DataFrame, gpd.GeoDataFrame], crs: CRS
|
105
|
-
) -> gpd.GeoDataFrame:
|
106
|
-
"""Calculates GIS geometry for a set of Ground Investigation locations.
|
107
|
-
|
108
|
-
Args:
|
109
|
-
brgi_location (Union[pd.DataFrame, gpd.GeoDataFrame]): The GI locations to calculate GIS geometry for.
|
110
|
-
crs (pyproj.CRS): The Coordinate Reference System (CRS) to use for the GIS geometry.
|
111
|
-
|
112
|
-
Returns:
|
113
|
-
gpd.GeoDataFrame: The GIS geometry for the given GI locations, with additional columns:
|
114
|
-
- longitude: The longitude of the location in the WGS84 CRS.
|
115
|
-
- latitude: The latitude of the location in the WGS84 CRS.
|
116
|
-
- wgs84_ground_level_height: The height of the ground level of the location in the WGS84 CRS.
|
117
|
-
- elevation_at_base: The elevation at the base of the location.
|
118
|
-
- geometry: The GIS geometry of the location.
|
119
|
-
"""
|
120
|
-
# Calculate Elevation at base of GI location
|
121
|
-
brgi_location["elevation_at_base"] = (
|
122
|
-
brgi_location["ground_level_elevation"] - brgi_location["depth_to_base"]
|
123
|
-
)
|
124
|
-
|
125
|
-
# Make a gpd.GeoDataFrame from the pd.DataFrame by creating GIS geometry
|
126
|
-
brgi_location = gpd.GeoDataFrame(
|
127
|
-
brgi_location,
|
128
|
-
geometry=brgi_location.apply(
|
129
|
-
lambda row: LineString(
|
130
|
-
[
|
131
|
-
(row["easting"], row["northing"], row["ground_level_elevation"]),
|
132
|
-
(row["easting"], row["northing"], row["elevation_at_base"]),
|
133
|
-
]
|
134
|
-
),
|
135
|
-
axis=1,
|
136
|
-
),
|
137
|
-
crs=crs,
|
138
|
-
)
|
139
|
-
|
140
|
-
# Calculate WGS84 geodetic coordinates
|
141
|
-
brgi_location[["longitude", "latitude", "wgs84_ground_level_height"]] = (
|
142
|
-
brgi_location.apply(
|
143
|
-
lambda row: calculate_wgs84_coordinates(
|
144
|
-
from_crs=crs,
|
145
|
-
easting=row["easting"],
|
146
|
-
northing=row["northing"],
|
147
|
-
elevation=row["ground_level_elevation"],
|
148
|
-
),
|
149
|
-
axis=1,
|
150
|
-
result_type="expand",
|
151
|
-
)
|
152
|
-
)
|
153
|
-
|
154
|
-
return brgi_location
|
155
|
-
|
156
|
-
|
157
|
-
def calculate_wgs84_coordinates(
|
158
|
-
from_crs: CRS, easting: float, northing: float, elevation: Union[float, None] = None
|
159
|
-
) -> Tuple[float, float, (float | None)]:
|
160
|
-
"""Transforms coordinates from an arbitrary Coordinate Reference System (CRS) to the WGS84 CRS, which is the standard for geodetic coordinates.
|
161
|
-
|
162
|
-
Args:
|
163
|
-
from_crs (pyproj.CRS): The pyproj.CRS object of the CRS to transform from.
|
164
|
-
easting (float): The easting coordinate of the point to transform.
|
165
|
-
northing (float): The northing coordinate of the point to transform.
|
166
|
-
elevation (float or None, optional): The elevation of the point to
|
167
|
-
transform. Defaults to None.
|
168
|
-
|
169
|
-
Returns:
|
170
|
-
Tuple[float, float, (float | None)]: A tuple containing the longitude, latitude
|
171
|
-
and WGS84 height of the transformed point, in that order.
|
172
|
-
The height is None if no elevation was given, or if the provided CRS doesn't
|
173
|
-
have a proper datum defined.
|
174
|
-
"""
|
175
|
-
transformer = Transformer.from_crs(from_crs, 4326, always_xy=True)
|
176
|
-
if elevation:
|
177
|
-
lon, lat, wgs84_height = transformer.transform(easting, northing, elevation)
|
178
|
-
else:
|
179
|
-
lon, lat = transformer.transform(easting, northing)
|
180
|
-
wgs84_height = None
|
181
|
-
|
182
|
-
return (lon, lat, wgs84_height)
|
183
|
-
|
184
|
-
|
185
|
-
def create_lon_lat_height_table(
|
186
|
-
brgi_location: gpd.GeoDataFrame, crs: CRS
|
187
|
-
) -> gpd.GeoDataFrame:
|
188
|
-
"""Creates a GeoDataFrame with GI locations in WGS84 (lon, lat, height) coordinates.
|
189
|
-
|
190
|
-
The 'LonLatHeight' table makes it easier to visualize the GIS geometry on 2D maps,
|
191
|
-
because vertical lines are often very small or completely hidden in 2D. This table
|
192
|
-
only contains the 3D point of the GI locations at ground level, in WGS84 (Longitude,
|
193
|
-
Latitude, Height) coordinates. Other attributes, such as the location type, sample
|
194
|
-
type, geology description, etc., can be attached to this table by joining, i.e.
|
195
|
-
merging those tables on the location_uid key.
|
196
|
-
|
197
|
-
Args:
|
198
|
-
brgi_location (GeoDataFrame): The GeoDataFrame with the GI locations.
|
199
|
-
crs (CRS): The Coordinate Reference System of the GI locations.
|
200
|
-
|
201
|
-
Returns:
|
202
|
-
gpd.GeoDataFrame: The 'LonLatHeight' GeoDataFrame.
|
203
|
-
"""
|
204
|
-
lon_lat_height = gpd.GeoDataFrame(
|
205
|
-
brgi_location[
|
206
|
-
[
|
207
|
-
"project_uid",
|
208
|
-
"location_uid",
|
209
|
-
]
|
210
|
-
],
|
211
|
-
geometry=brgi_location.apply(
|
212
|
-
lambda row: Point(
|
213
|
-
row["longitude"], row["latitude"], row["wgs84_ground_level_height"]
|
214
|
-
),
|
215
|
-
axis=1,
|
216
|
-
),
|
217
|
-
crs=4326,
|
218
|
-
)
|
219
|
-
return lon_lat_height
|
220
|
-
|
221
|
-
|
222
|
-
def calculate_in_situ_gis_geometry(
|
223
|
-
brgi_in_situ: Union[pd.DataFrame, gpd.GeoDataFrame],
|
224
|
-
brgi_location: Union[pd.DataFrame, gpd.GeoDataFrame],
|
225
|
-
crs: CRS,
|
226
|
-
) -> gpd.GeoDataFrame:
|
227
|
-
"""Calculates GIS geometry for a set of Ground Investigation in-situ data.
|
228
|
-
|
229
|
-
Args:
|
230
|
-
brgi_in_situ (Union[pd.DataFrame, gpd.GeoDataFrame]): The in-situ data to calculate GIS geometry for.
|
231
|
-
brgi_location (Union[pd.DataFrame, gpd.GeoDataFrame]): The location data to merge with the in-situ data.
|
232
|
-
crs (CRS): The Coordinate Reference System of the in-situ data.
|
233
|
-
|
234
|
-
Returns:
|
235
|
-
gpd.GeoDataFrame: The GIS geometry for the given in-situ data, with additional columns:
|
236
|
-
- elevation_at_top: The elevation at the top of the in-situ data.
|
237
|
-
- elevation_at_base: The elevation at the base of the in-situ data.
|
238
|
-
- geometry: The GIS geometry of the in-situ data.
|
239
|
-
"""
|
240
|
-
location_child = brgi_in_situ.copy()
|
241
|
-
|
242
|
-
# Merge the location data into the in-situ data to get the location coordinates
|
243
|
-
location_child = pd.merge(
|
244
|
-
location_child,
|
245
|
-
brgi_location[
|
246
|
-
["location_uid", "easting", "northing", "ground_level_elevation"]
|
247
|
-
],
|
248
|
-
on="location_uid",
|
249
|
-
how="left",
|
250
|
-
)
|
251
|
-
|
252
|
-
# Calculate the elevation at the top of the Sample or in-situ test
|
253
|
-
location_child["elevation_at_top"] = (
|
254
|
-
location_child["ground_level_elevation"] - location_child["depth_to_top"]
|
255
|
-
)
|
256
|
-
brgi_in_situ["elevation_at_top"] = location_child["elevation_at_top"]
|
257
|
-
|
258
|
-
# Calculate the elevation at the base of the Sample or in-situ test
|
259
|
-
if "depth_to_base" in location_child.columns:
|
260
|
-
location_child["elevation_at_base"] = (
|
261
|
-
location_child["ground_level_elevation"] - location_child["depth_to_base"]
|
262
|
-
)
|
263
|
-
brgi_in_situ["elevation_at_base"] = location_child["elevation_at_base"]
|
264
|
-
|
265
|
-
# Create the in-situ data as a GeoDataFrame with LineString GIS geometry for
|
266
|
-
# Samples or in-situ tests that have an elevation at the base of the Sample or in-situ test.
|
267
|
-
brgi_in_situ = gpd.GeoDataFrame(
|
268
|
-
brgi_in_situ,
|
269
|
-
geometry=location_child.apply(
|
270
|
-
lambda row: LineString(
|
271
|
-
[
|
272
|
-
(row["easting"], row["northing"], row["elevation_at_top"]),
|
273
|
-
(row["easting"], row["northing"], row["elevation_at_base"]),
|
274
|
-
]
|
275
|
-
)
|
276
|
-
if "elevation_at_base" in row and not np.isnan(row["elevation_at_base"])
|
277
|
-
else Point((row["easting"], row["northing"], row["elevation_at_top"])),
|
278
|
-
axis=1,
|
279
|
-
),
|
280
|
-
crs=crs,
|
281
|
-
)
|
282
|
-
return brgi_in_situ
|
@@ -1,21 +0,0 @@
|
|
1
|
-
bedrock_ge/__init__.py,sha256=9zw1B8qBymbOqc4p2OJ3e8chmXEihHSBt1Fs1BtnvkQ,89
|
2
|
-
bedrock_ge/plot.py,sha256=C95aj8CXjFVZRGYYBssJMm5MyljLbdt_TKyvmQyWZBE,149
|
3
|
-
bedrock_ge/gi/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
4
|
-
bedrock_ge/gi/brgi-schema.json,sha256=XaumYqouiflu4Nc8ChyFSHpmpJW4YPG0hsyeSxkuIWQ,850
|
5
|
-
bedrock_ge/gi/concatenate.py,sha256=ewPXau86yDTGE92DlbNlQ7gprBHkqBottzHsPbXbEg0,1519
|
6
|
-
bedrock_ge/gi/gis_geometry.py,sha256=QXNjWIGANh_M035Lbc94eOZnNry9f778E8wg_7KgAQo,11669
|
7
|
-
bedrock_ge/gi/schemas.py,sha256=ZA2wFQOevXtN57XglY-M70TzbZY2RyLHJDRUkmz47_M,2871
|
8
|
-
bedrock_ge/gi/sqlmodels.py,sha256=_h3H9UP91I_1Ya_SZuL6gZbqL7uNCd5Y-u-yTf7CNto,2253
|
9
|
-
bedrock_ge/gi/validate.py,sha256=riUa9AaHngJDX0VtUrOEKKfr-sZxybrGqJFmq4kRFnA,7080
|
10
|
-
bedrock_ge/gi/write.py,sha256=eCfijylreJ5oNMJqqx_k-Y30FNIHys2NccHilrFm0X4,4319
|
11
|
-
bedrock_ge/gi/ags/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
12
|
-
bedrock_ge/gi/ags/ags3_data_dictionary.json,sha256=Wx20_oJRdAlzEo-cKD6FgN9B9zOMDTcsp5dgc8QWofI,188588
|
13
|
-
bedrock_ge/gi/ags/ags4_data_dictionary.json,sha256=XE5XJNo8GBPZTUPgvVr3QgO1UfEIAxzlSeXi-P1VLTs,609670
|
14
|
-
bedrock_ge/gi/ags/read.py,sha256=FCeMX1R7IGcZY0heqrHPzs13rytelkmmFaJRklPB7EM,7114
|
15
|
-
bedrock_ge/gi/ags/schemas.py,sha256=y36n9SCKqFfoIQ_7-MTEdfArA5vAqZdRpY3wC4fdjy4,7451
|
16
|
-
bedrock_ge/gi/ags/transform.py,sha256=NeeR6e3awDf3ITWSxAUW1bXspUXaj-dM7xi1g-Ppxhc,9988
|
17
|
-
bedrock_ge/gi/ags/validate.py,sha256=ZfBS7AP_CTguLQCbGeyy4Krz32th2yCmtdG9rvX0MOU,703
|
18
|
-
bedrock_ge-0.2.4.dist-info/METADATA,sha256=T2dwdFOQ2OLLEN_vNY_HQ_UVfX_iq1ATlbgDLVmXk7E,11727
|
19
|
-
bedrock_ge-0.2.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
20
|
-
bedrock_ge-0.2.4.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
21
|
-
bedrock_ge-0.2.4.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|