forgeo-gmlib 0.6.2__cp312-cp312-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- forgeo/gmlib/GeologicalModel3D.py +758 -0
- forgeo/gmlib/__init__.py +9 -0
- forgeo/gmlib/_version.py +34 -0
- forgeo/gmlib/architecture/__init__.py +1 -0
- forgeo/gmlib/architecture/core.py +130 -0
- forgeo/gmlib/common.pyd +0 -0
- forgeo/gmlib/fault_network.py +171 -0
- forgeo/gmlib/geomodeller_data.py +101 -0
- forgeo/gmlib/geomodeller_project.py +396 -0
- forgeo/gmlib/myxmltools.py +30 -0
- forgeo/gmlib/pypotential2D.pyd +0 -0
- forgeo/gmlib/pypotential3D.pyd +0 -0
- forgeo/gmlib/tesselate.py +236 -0
- forgeo/gmlib/tesselate_deprecated.py +249 -0
- forgeo/gmlib/topography_reader.py +198 -0
- forgeo/gmlib/utils/__init__.py +0 -0
- forgeo/gmlib/utils/append_data.py +508 -0
- forgeo/gmlib/utils/export.py +45 -0
- forgeo/gmlib/utils/normalized_gradient.py +40 -0
- forgeo/gmlib/utils/tools.py +35 -0
- forgeo_gmlib-0.6.2.dist-info/METADATA +23 -0
- forgeo_gmlib-0.6.2.dist-info/RECORD +24 -0
- forgeo_gmlib-0.6.2.dist-info/WHEEL +5 -0
- forgeo_gmlib-0.6.2.dist-info/licenses/LICENSE +661 -0
|
@@ -0,0 +1,508 @@
|
|
|
1
|
+
#
|
|
2
|
+
# This file is part of gmlib. It is free software.
|
|
3
|
+
# You can redistribute it and/or modify it under the terms of the GNU Affero General Public License version 3.
|
|
4
|
+
#
|
|
5
|
+
|
|
6
|
+
import os
|
|
7
|
+
import shutil
|
|
8
|
+
from collections import defaultdict, namedtuple
|
|
9
|
+
from optparse import OptionParser
|
|
10
|
+
|
|
11
|
+
import numpy as np
|
|
12
|
+
from lxml import etree
|
|
13
|
+
|
|
14
|
+
from forgeo.gmlib import geomodeller_project as gmp
|
|
15
|
+
from forgeo.gmlib import myxmltools as mx
|
|
16
|
+
from forgeo.gmlib.geomodeller_project import geo, gml
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def data_attribs(uid, provenance=None):
|
|
20
|
+
attribs = {"ObservationID": uid}
|
|
21
|
+
if provenance is not None:
|
|
22
|
+
attribs["Provenance"] = "%d" % provenance
|
|
23
|
+
return attribs
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def coordinates_node(pt):
|
|
27
|
+
return gml.coordinates("{:f},{:f}".format(*pt), cs=",", decimal=".", ts=" ")
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def create_contact_node(uid, provenance, pt, formation):
|
|
31
|
+
return geo.Interface(
|
|
32
|
+
geo.Data(Name=formation),
|
|
33
|
+
gml.LineString(coordinates_node(pt)),
|
|
34
|
+
**data_attribs(uid, provenance),
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def create_orientation_node(uid, provenance, pt, orientation, formation):
|
|
39
|
+
dip, direction, polarity = orientation
|
|
40
|
+
assert polarity in ("normal", "reverse")
|
|
41
|
+
return geo.Foliation(
|
|
42
|
+
geo.Data(Name=formation),
|
|
43
|
+
geo.FoliationObservation(
|
|
44
|
+
gml.Point(coordinates_node(pt)),
|
|
45
|
+
Azimuth=f"{direction:f}",
|
|
46
|
+
Dip=f"{dip:f}",
|
|
47
|
+
NormalPolarity="true" if polarity == "normal" else "false",
|
|
48
|
+
),
|
|
49
|
+
**data_attribs(uid, provenance),
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
Contact = namedtuple("Contact", ["uid", "provenance", "point", "attribution"])
|
|
54
|
+
Orientation = namedtuple(
|
|
55
|
+
"Contact", ["uid", "provenance", "point", "orientation", "attribution"]
|
|
56
|
+
)
|
|
57
|
+
OrientationMeasurement = namedtuple(
|
|
58
|
+
"OrientationMeasurement", ["dip", "direction", "polarity"]
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def extract_structural_roots(root):
|
|
63
|
+
topography_sections = gmp.find_topography_sections(root)
|
|
64
|
+
assert len(topography_sections) == 1, "one and only one topography section"
|
|
65
|
+
section = topography_sections[0]
|
|
66
|
+
structdata = mx.create_child_if_needed(section, "Structural2DData")
|
|
67
|
+
interfaces = mx.create_child_if_needed(structdata, "Interfaces")
|
|
68
|
+
foliations = mx.create_child_if_needed(structdata, "Foliations")
|
|
69
|
+
return interfaces, foliations
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def select_data_to_remove(root, uids):
|
|
73
|
+
interfaces, foliations = extract_structural_roots(root)
|
|
74
|
+
removed = []
|
|
75
|
+
for dataset, tag in [
|
|
76
|
+
(interfaces, geo.Interface().tag),
|
|
77
|
+
(foliations, geo.Foliation().tag),
|
|
78
|
+
]:
|
|
79
|
+
for child in dataset.findall(tag):
|
|
80
|
+
if "ObservationID" in child.attrib:
|
|
81
|
+
obsid = child.attrib["ObservationID"]
|
|
82
|
+
if obsid in uids:
|
|
83
|
+
removed.append((obsid, dataset, child))
|
|
84
|
+
return removed
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def locate_superimposed_data(root):
|
|
88
|
+
interfaces, foliations = extract_structural_roots(root)
|
|
89
|
+
data = defaultdict(list)
|
|
90
|
+
for dataset, tag in [
|
|
91
|
+
(interfaces, geo.Interface().tag),
|
|
92
|
+
(foliations, geo.Foliation().tag),
|
|
93
|
+
]:
|
|
94
|
+
for child in dataset.findall(tag):
|
|
95
|
+
formation = child.find(geo.Data().tag).attrib["Name"]
|
|
96
|
+
xy = child.find(".//" + gml.coordinates().tag)
|
|
97
|
+
assert xy is not None
|
|
98
|
+
x, y = [float(s.strip()) for s in xy.text.split(xy.attrib["cs"])]
|
|
99
|
+
data[(formation, x, y)].append(child)
|
|
100
|
+
superimposed = []
|
|
101
|
+
for key, elts in data.items():
|
|
102
|
+
if len(elts) > 1:
|
|
103
|
+
natures = [etree.QName(elt).localname for elt in elts]
|
|
104
|
+
assert natures.count("Interface") <= 1, (
|
|
105
|
+
"several superimposed contact points for at {1:f}, {2:f} for "
|
|
106
|
+
"formation {0}"
|
|
107
|
+
).format(*key)
|
|
108
|
+
superimposed.append(
|
|
109
|
+
(key, [(nature, elt) for nature, elt in zip(natures, elts)])
|
|
110
|
+
)
|
|
111
|
+
return superimposed
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def update_provenances(provenances, new):
|
|
115
|
+
rankmax = max(provenances.values()) + 1
|
|
116
|
+
provenances.update({p: rankmax + k for k, p in enumerate(set(new))})
|
|
117
|
+
return provenances
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def filter_superimposed_orientations(superimposed, average_provenance, joinsep="+"):
|
|
121
|
+
new_orientations = []
|
|
122
|
+
removed_orientations = []
|
|
123
|
+
for key, elts in superimposed:
|
|
124
|
+
natures = [elt[0] for elt in elts]
|
|
125
|
+
if natures.count("Foliation") > 1:
|
|
126
|
+
uids = []
|
|
127
|
+
dipdir = []
|
|
128
|
+
polarities = set()
|
|
129
|
+
for i, elt in enumerate([v[1] for v in elts]):
|
|
130
|
+
if natures[i] == "Foliation":
|
|
131
|
+
uids.append(elt.attrib["ObservationID"])
|
|
132
|
+
obs = elt.find(geo.FoliationObservation().tag)
|
|
133
|
+
dipdir.append(
|
|
134
|
+
tuple(float(obs.attrib[s]) for s in ["Dip", "Azimuth"])
|
|
135
|
+
)
|
|
136
|
+
polarities.add(obs.attrib["NormalPolarity"])
|
|
137
|
+
removed_orientations.append(elt)
|
|
138
|
+
assert len(polarities) == 1, "inconsistent polarities for data {}".format(
|
|
139
|
+
" ".join(uids)
|
|
140
|
+
)
|
|
141
|
+
dipdir = np.mean(dipdir, axis=0)
|
|
142
|
+
polarity = {"true": "normal", "false": "reverse"}[polarities.pop().lower()]
|
|
143
|
+
formation, x, y = key
|
|
144
|
+
new_orientations.append(
|
|
145
|
+
create_orientation_node(
|
|
146
|
+
joinsep.join(uids),
|
|
147
|
+
average_provenance,
|
|
148
|
+
(x, y),
|
|
149
|
+
(*dipdir, polarity),
|
|
150
|
+
formation,
|
|
151
|
+
)
|
|
152
|
+
)
|
|
153
|
+
return new_orientations, removed_orientations
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def append_data(root, data, provenances):
|
|
157
|
+
formations = gmp.read_formation_colors(root)
|
|
158
|
+
formation_names = formations.keys()
|
|
159
|
+
faults = gmp.extract_raw_faults_data(root)
|
|
160
|
+
fault_names = faults.keys()
|
|
161
|
+
assert not formation_names & fault_names
|
|
162
|
+
attributions = formation_names | fault_names
|
|
163
|
+
# print('possible attributions:', attributions)
|
|
164
|
+
interfaces, foliations = extract_structural_roots(root)
|
|
165
|
+
for item in data:
|
|
166
|
+
attribution = item.attribution
|
|
167
|
+
assert attribution in attributions, (
|
|
168
|
+
f"not a valid attribution: {attribution} for {item.uid}"
|
|
169
|
+
)
|
|
170
|
+
provenance = item.provenance
|
|
171
|
+
if provenance is not None:
|
|
172
|
+
provenance = provenances[provenance]
|
|
173
|
+
if type(item) is Contact:
|
|
174
|
+
interfaces.append(
|
|
175
|
+
create_contact_node(item.uid, provenance, item.point, attribution)
|
|
176
|
+
)
|
|
177
|
+
else:
|
|
178
|
+
assert type(item) is Orientation
|
|
179
|
+
foliations.append(
|
|
180
|
+
create_orientation_node(
|
|
181
|
+
item.uid, provenance, item.point, item.orientation, attribution
|
|
182
|
+
)
|
|
183
|
+
)
|
|
184
|
+
return root
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def collect_data(datafile, idsep, idcols, skip, has_provenance, csvsep=";"):
|
|
188
|
+
lines = []
|
|
189
|
+
with open(datafile) as f:
|
|
190
|
+
for line in f:
|
|
191
|
+
l = line.strip()
|
|
192
|
+
if l:
|
|
193
|
+
lines.append(l.split(csvsep))
|
|
194
|
+
data = []
|
|
195
|
+
uids = set()
|
|
196
|
+
assert skip >= 0
|
|
197
|
+
assert idcols > 0
|
|
198
|
+
provenance = None
|
|
199
|
+
for item in lines[skip:]:
|
|
200
|
+
uid = idsep.join(item[:idcols])
|
|
201
|
+
assert uid not in uids, "duplicate item: " + uid
|
|
202
|
+
uids.add(uid)
|
|
203
|
+
pos = idcols
|
|
204
|
+
if has_provenance:
|
|
205
|
+
provenance = item[pos]
|
|
206
|
+
pos += 1
|
|
207
|
+
point = tuple(float(s) for s in item[pos : pos + 2])
|
|
208
|
+
pos += 2
|
|
209
|
+
nature = item[pos]
|
|
210
|
+
assert nature in ["interface", "orientation"], (
|
|
211
|
+
f"unknown nature: {nature} for {uid}"
|
|
212
|
+
)
|
|
213
|
+
pos += 1
|
|
214
|
+
attribution = item[pos]
|
|
215
|
+
if nature == "interface":
|
|
216
|
+
data.append(Contact(uid, provenance, point, attribution))
|
|
217
|
+
else:
|
|
218
|
+
assert nature == "orientation"
|
|
219
|
+
pos += 1
|
|
220
|
+
dip, direction, polarity = item[pos:]
|
|
221
|
+
try:
|
|
222
|
+
dip, direction = float(dip), float(direction)
|
|
223
|
+
except ValueError as ve:
|
|
224
|
+
raise ve
|
|
225
|
+
assert 0 <= dip <= 90, f"{uid}: not a dip value: {dip:f}"
|
|
226
|
+
assert 0 <= direction < 360, f"{uid}: not a dir value: {direction:f}"
|
|
227
|
+
if not polarity:
|
|
228
|
+
polarity = "normal"
|
|
229
|
+
data.append(
|
|
230
|
+
Orientation(
|
|
231
|
+
uid,
|
|
232
|
+
provenance,
|
|
233
|
+
point,
|
|
234
|
+
OrientationMeasurement(dip, direction, polarity),
|
|
235
|
+
attribution,
|
|
236
|
+
)
|
|
237
|
+
)
|
|
238
|
+
return uids, data
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def setup_parser():
|
|
242
|
+
parser = OptionParser(
|
|
243
|
+
usage="""usage: %prog path_to_model_file
|
|
244
|
+
|
|
245
|
+
This scripts modifies a geomodeller xml project file."""
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
parser.add_option(
|
|
249
|
+
"-i",
|
|
250
|
+
"-d",
|
|
251
|
+
"--data",
|
|
252
|
+
action="store",
|
|
253
|
+
type="string",
|
|
254
|
+
dest="datafile",
|
|
255
|
+
default="",
|
|
256
|
+
help="path to the data file as csv",
|
|
257
|
+
)
|
|
258
|
+
parser.add_option(
|
|
259
|
+
"--csv",
|
|
260
|
+
action="store",
|
|
261
|
+
type="string",
|
|
262
|
+
dest="csvsep",
|
|
263
|
+
default=";",
|
|
264
|
+
help="symbol used to separate columns in csv (defaults to ;)",
|
|
265
|
+
)
|
|
266
|
+
parser.add_option(
|
|
267
|
+
"-o",
|
|
268
|
+
"--output",
|
|
269
|
+
action="store",
|
|
270
|
+
type="string",
|
|
271
|
+
dest="output_file",
|
|
272
|
+
default="",
|
|
273
|
+
help="path to the new geomodeller file",
|
|
274
|
+
)
|
|
275
|
+
parser.add_option(
|
|
276
|
+
"-l",
|
|
277
|
+
"--log",
|
|
278
|
+
action="store",
|
|
279
|
+
type="string",
|
|
280
|
+
dest="logfilename",
|
|
281
|
+
default="",
|
|
282
|
+
help="path to a file to log warning - this desactivate quiet",
|
|
283
|
+
)
|
|
284
|
+
parser.add_option(
|
|
285
|
+
"--skip",
|
|
286
|
+
action="store",
|
|
287
|
+
type="int",
|
|
288
|
+
dest="header_rows",
|
|
289
|
+
default=1,
|
|
290
|
+
help=("number of lines of header files (to be skipped) : defaults to 1"),
|
|
291
|
+
)
|
|
292
|
+
parser.add_option(
|
|
293
|
+
"--id-sep",
|
|
294
|
+
action="store",
|
|
295
|
+
type="string",
|
|
296
|
+
dest="idsep",
|
|
297
|
+
default="-",
|
|
298
|
+
help=("separator used to generate unique id from id columns(defaults to -)"),
|
|
299
|
+
)
|
|
300
|
+
parser.add_option(
|
|
301
|
+
"--id-cols",
|
|
302
|
+
action="store",
|
|
303
|
+
type="int",
|
|
304
|
+
dest="idcols",
|
|
305
|
+
default=4,
|
|
306
|
+
help=("number of columns to be used to generate unique id(defaults to 4)"),
|
|
307
|
+
)
|
|
308
|
+
parser.add_option(
|
|
309
|
+
"-q",
|
|
310
|
+
"--quiet",
|
|
311
|
+
action="store_true",
|
|
312
|
+
dest="quiet",
|
|
313
|
+
default=False,
|
|
314
|
+
help="does not output warnings and information",
|
|
315
|
+
)
|
|
316
|
+
parser.add_option(
|
|
317
|
+
"--no-provenance",
|
|
318
|
+
action="store_true",
|
|
319
|
+
dest="no_provenance",
|
|
320
|
+
default=False,
|
|
321
|
+
help=(
|
|
322
|
+
"provenances are given in the column, after the "
|
|
323
|
+
"first idcols columns, "
|
|
324
|
+
"if no provenance is given, use this flag"
|
|
325
|
+
),
|
|
326
|
+
)
|
|
327
|
+
parser.add_option(
|
|
328
|
+
"-p",
|
|
329
|
+
"--provenance",
|
|
330
|
+
action="append",
|
|
331
|
+
type="string",
|
|
332
|
+
dest="provenances",
|
|
333
|
+
default=[],
|
|
334
|
+
help=(
|
|
335
|
+
"adds a blank provenance field, "
|
|
336
|
+
"other provenance fields can be collected from provenance "
|
|
337
|
+
"column (cf. --no-provenance)"
|
|
338
|
+
),
|
|
339
|
+
)
|
|
340
|
+
parser.add_option(
|
|
341
|
+
"-c",
|
|
342
|
+
"--correct",
|
|
343
|
+
action="store_true",
|
|
344
|
+
dest="correct",
|
|
345
|
+
default=False,
|
|
346
|
+
help=(
|
|
347
|
+
"correct superimposed contact and orientation data by slightly offseting "
|
|
348
|
+
"orientation data (this is due to a bug in GeoModeller that affects "
|
|
349
|
+
"potential fields with one contact data with superimposed orientation data "
|
|
350
|
+
"and only one formation... typically such as planar faults - the correction "
|
|
351
|
+
"will affect all superpimposed data and orientation, regardless of these "
|
|
352
|
+
"conditions)"
|
|
353
|
+
),
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
return parser
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
def load_project_xml(projectfile):
|
|
360
|
+
assert os.path.exists(projectfile), "could not find file: " + projectfile
|
|
361
|
+
tree = gmp.extract_tree(projectfile)
|
|
362
|
+
return tree, tree.getroot()
|
|
363
|
+
|
|
364
|
+
|
|
365
|
+
def load_project(projectfile, additional_provenances):
|
|
366
|
+
tree, root = load_project_xml(projectfile)
|
|
367
|
+
provenance_group = root.find(geo.ProvenanceGroup().tag)
|
|
368
|
+
provenances = {
|
|
369
|
+
item.attrib["id"]: int(item.attrib["rank"])
|
|
370
|
+
for item in provenance_group.findall(geo.Provenance().tag)
|
|
371
|
+
}
|
|
372
|
+
update_provenances(provenances, [*additional_provenances, "Average"])
|
|
373
|
+
return (tree, root), provenances
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
def process(project, provenances, options):
|
|
377
|
+
warnings = []
|
|
378
|
+
|
|
379
|
+
def warn(message):
|
|
380
|
+
warnings.append("WARNING: " + message)
|
|
381
|
+
|
|
382
|
+
tree, root = project
|
|
383
|
+
|
|
384
|
+
datafile = options.datafile
|
|
385
|
+
if len(datafile.strip()) == 0:
|
|
386
|
+
warn("no data file provided!")
|
|
387
|
+
else:
|
|
388
|
+
assert os.path.exists(datafile), "could not find file: " + datafile
|
|
389
|
+
uids, data = collect_data(
|
|
390
|
+
datafile,
|
|
391
|
+
options.idsep,
|
|
392
|
+
options.idcols,
|
|
393
|
+
options.header_rows,
|
|
394
|
+
not options.no_provenance,
|
|
395
|
+
options.csvsep,
|
|
396
|
+
)
|
|
397
|
+
removed = select_data_to_remove(root, uids)
|
|
398
|
+
for uid, parent, child in removed:
|
|
399
|
+
warn(f"{uid} is overwritten")
|
|
400
|
+
parent.remove(child)
|
|
401
|
+
update_provenances(
|
|
402
|
+
provenances,
|
|
403
|
+
[item.provenance for item in data if item.provenance is not None],
|
|
404
|
+
)
|
|
405
|
+
append_data(root, data, provenances)
|
|
406
|
+
|
|
407
|
+
# Average duplicate orientation data
|
|
408
|
+
joinsep = "+"
|
|
409
|
+
locate_superimposed_data(root)
|
|
410
|
+
new_orientations, removed_orientations = filter_superimposed_orientations(
|
|
411
|
+
locate_superimposed_data(root), provenances["Average"], joinsep=joinsep
|
|
412
|
+
)
|
|
413
|
+
orientations = root.find(".//" + geo.Foliations().tag)
|
|
414
|
+
assert orientations is not None
|
|
415
|
+
for elt in removed_orientations:
|
|
416
|
+
orientations.remove(elt)
|
|
417
|
+
for elt in new_orientations:
|
|
418
|
+
warn(
|
|
419
|
+
"average orientation replacing: "
|
|
420
|
+
+ " and ".join(elt.attrib["ObservationID"].split(joinsep))
|
|
421
|
+
)
|
|
422
|
+
orientations.append(elt)
|
|
423
|
+
|
|
424
|
+
if options.correct:
|
|
425
|
+
epsilon = 1e-3 # 1 mm !!!
|
|
426
|
+
for key, elts in locate_superimposed_data(root):
|
|
427
|
+
assert len(elts) == 2
|
|
428
|
+
natures = [elt[0] for elt in elts]
|
|
429
|
+
assert natures.count("Interface") == 1
|
|
430
|
+
assert natures.count("Foliation") == 1
|
|
431
|
+
orientation = elts[1][1] if natures[0] == "Interface" else elts[0][1]
|
|
432
|
+
observation = orientation.find(geo.FoliationObservation().tag)
|
|
433
|
+
assert observation is not None
|
|
434
|
+
azimuth = (90.0 - float(observation.attrib["Azimuth"])) * 2 * np.pi / 360.0
|
|
435
|
+
u = np.array([np.cos(azimuth), np.sin(azimuth)])
|
|
436
|
+
coordinates = observation.find(".//" + gml.coordinates().tag)
|
|
437
|
+
formation, x, y = key
|
|
438
|
+
cs = coordinates.attrib["cs"]
|
|
439
|
+
assert (x, y) == tuple(float(s) for s in coordinates.text.split(cs))
|
|
440
|
+
P = np.array([x, y]) + epsilon * u
|
|
441
|
+
coordinates.text = f"{P[0]:f}{cs}{P[1]:f}"
|
|
442
|
+
warnings.append(
|
|
443
|
+
"slightly offseting {} orientation for data {} at ({:f}, {:f})".format(
|
|
444
|
+
formation, orientation.attrib["ObservationID"], x, y
|
|
445
|
+
)
|
|
446
|
+
)
|
|
447
|
+
|
|
448
|
+
provenance_group.clear()
|
|
449
|
+
for key, rank in provenances.items():
|
|
450
|
+
provenance_group.append(geo.Provenance(id=key, rank="%d" % rank))
|
|
451
|
+
|
|
452
|
+
outputfile = options.output_file.strip()
|
|
453
|
+
if len(outputfile) > 0:
|
|
454
|
+
backup = projectfile + ".bak"
|
|
455
|
+
warn(f"overwriting existing project file (backup in: {backup})")
|
|
456
|
+
shutil.copy(projectfile, backup)
|
|
457
|
+
outputfile = projectfile
|
|
458
|
+
if not outputfile.endswith(".xml"):
|
|
459
|
+
outputfile += ".xml"
|
|
460
|
+
tree.write(
|
|
461
|
+
outputfile,
|
|
462
|
+
xml_declaration=True,
|
|
463
|
+
encoding="utf-8",
|
|
464
|
+
method="xml",
|
|
465
|
+
pretty_print=True,
|
|
466
|
+
)
|
|
467
|
+
|
|
468
|
+
logfilename = options.logfilename.strip()
|
|
469
|
+
if len(logfilename) > 0:
|
|
470
|
+
with open(logfilename, "w") as logfile:
|
|
471
|
+
logfile.writelines(warnings)
|
|
472
|
+
elif not options.quiet:
|
|
473
|
+
pass
|
|
474
|
+
|
|
475
|
+
|
|
476
|
+
def append_crs_info(projectfile, crsinfo):
|
|
477
|
+
tree, root = load_project_xml(projectfile)
|
|
478
|
+
gmcrs, qgiscrs = crsinfo
|
|
479
|
+
if gmcrs is not None:
|
|
480
|
+
assert "CoordSystem" in root.attrib
|
|
481
|
+
assert root.attrib["CoordSystem"] == gmcrs
|
|
482
|
+
if qgiscrs is not None:
|
|
483
|
+
assert "QGisCRS" not in root.attrib or root.attrib["QGisCRS"] == qgiscrs
|
|
484
|
+
if gmcrs is None:
|
|
485
|
+
root.attrib["CoordSystem"] = "local"
|
|
486
|
+
root.attrib["QGisCRS"] = qgiscrs
|
|
487
|
+
# Overwriting file
|
|
488
|
+
tree.write(
|
|
489
|
+
projectfile,
|
|
490
|
+
xml_declaration=True,
|
|
491
|
+
encoding="utf-8",
|
|
492
|
+
method="xml",
|
|
493
|
+
pretty_print=True,
|
|
494
|
+
)
|
|
495
|
+
|
|
496
|
+
|
|
497
|
+
if __name__ == "__main__":
|
|
498
|
+
parser = setup_parser()
|
|
499
|
+
options, args = parser.parse_args()
|
|
500
|
+
projectfile = args[0]
|
|
501
|
+
if len(args) > 1:
|
|
502
|
+
print(
|
|
503
|
+
"WARNING: The following arguments will not be processed: "
|
|
504
|
+
+ " ".join(args[1:]),
|
|
505
|
+
file=sys.stderr,
|
|
506
|
+
)
|
|
507
|
+
project, provenances = load_project(projectfile, options.provenances)
|
|
508
|
+
process(project, provenances, options)
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
#
|
|
2
|
+
# This file is part of gmlib. It is free software.
|
|
3
|
+
# You can redistribute it and/or modify it under the terms of the GNU Affero General Public License version 3.
|
|
4
|
+
#
|
|
5
|
+
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from forgeo.gmlib.GeologicalModel3D import GeologicalModel
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class Data_dumper:
|
|
12
|
+
def __init__(self, f, *headers):
|
|
13
|
+
self.f = f
|
|
14
|
+
self.line_length = len(headers)
|
|
15
|
+
self(*headers)
|
|
16
|
+
|
|
17
|
+
def _line(self, items):
|
|
18
|
+
if len(items) < self.line_length:
|
|
19
|
+
items = items + ("",) * (self.line_length - len(items))
|
|
20
|
+
return ";".join([s if isinstance(s, str) else f"{s:.18e}" for s in items])
|
|
21
|
+
|
|
22
|
+
def __call__(self, *items):
|
|
23
|
+
print(self._line(items), file=self.f)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def fault_data_from_model(filepath):
|
|
27
|
+
filename = Path(filepath)
|
|
28
|
+
if filename.exists():
|
|
29
|
+
data = GeologicalModel.extract_data(str(filename))
|
|
30
|
+
else:
|
|
31
|
+
raise OSError(str(filename) + " not found!")
|
|
32
|
+
|
|
33
|
+
data = data["faults_data"]
|
|
34
|
+
|
|
35
|
+
with Path(f"{filename.stem}-faults-data.csv").open("w") as f:
|
|
36
|
+
dump = Data_dumper(f, "name", "x", "y", "z", "gx", "gy", "gz")
|
|
37
|
+
for name, fdata in data.items():
|
|
38
|
+
potdata = fdata.potential_data
|
|
39
|
+
interfaces = potdata.interfaces
|
|
40
|
+
assert len(interfaces) == 1
|
|
41
|
+
for M in interfaces[0]:
|
|
42
|
+
dump(name, *M)
|
|
43
|
+
gradients = potdata.gradients
|
|
44
|
+
for M, G in zip(gradients.locations, gradients.values):
|
|
45
|
+
dump(name, *M, *G)
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def normalized_gradient(azimuth, dip, polarity, degrees=False):
|
|
5
|
+
"""
|
|
6
|
+
Compute a GeoModeller unit gradient that corresponds to dip dir measures.
|
|
7
|
+
|
|
8
|
+
:param degrees: boolean to tell if angular data are measured in degrees,
|
|
9
|
+
defaults to False
|
|
10
|
+
"""
|
|
11
|
+
azimuth = np.asarray(azimuth, dtype=np.double)
|
|
12
|
+
dip = np.asarray(dip, dtype=np.double)
|
|
13
|
+
polarity = np.asarray(polarity, np.double)
|
|
14
|
+
if degrees:
|
|
15
|
+
azimuth = azimuth * (np.pi / 180.0)
|
|
16
|
+
dip = dip * (np.pi / 180.0)
|
|
17
|
+
assert np.all((azimuth >= 0) & (azimuth <= 2 * np.pi))
|
|
18
|
+
azimuth = 0.5 * np.pi - azimuth # North is at pi/2 and counter clock wise
|
|
19
|
+
assert np.all((dip >= 0) & (dip <= 0.5 * np.pi))
|
|
20
|
+
assert np.all((polarity == -1) | (polarity == 1))
|
|
21
|
+
assert azimuth.shape == dip.shape == polarity.shape
|
|
22
|
+
assert azimuth.ndim == 1
|
|
23
|
+
# unit dip vector
|
|
24
|
+
u = np.vstack(
|
|
25
|
+
np.broadcast(
|
|
26
|
+
np.cos(dip) * np.cos(azimuth), np.cos(dip) * np.sin(azimuth), -np.sin(dip)
|
|
27
|
+
)
|
|
28
|
+
)
|
|
29
|
+
# unit normal horizontal vector
|
|
30
|
+
v = np.vstack(np.broadcast(np.sin(azimuth), -np.cos(azimuth), 0))
|
|
31
|
+
assert np.allclose(np.sum(u * v, axis=1), 0)
|
|
32
|
+
# return gradient vector
|
|
33
|
+
return polarity[:, None] * np.cross(u, v, axis=1)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
if __name__ == "__main__":
|
|
37
|
+
dip = [45, 90, 0, 45]
|
|
38
|
+
azimuth = [0, 90, 45, 45]
|
|
39
|
+
polarity = [1, 1, 1, -1]
|
|
40
|
+
g = normalized_gradient(azimuth, dip, polarity, degrees=True)
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
@dataclass
|
|
5
|
+
class BBox3:
|
|
6
|
+
xmin: float = None
|
|
7
|
+
xmax: float = None
|
|
8
|
+
ymin: float = None
|
|
9
|
+
ymax: float = None
|
|
10
|
+
zmin: float = None
|
|
11
|
+
zmax: float = None
|
|
12
|
+
|
|
13
|
+
# backward compatibility
|
|
14
|
+
def __getitem__(self, name):
|
|
15
|
+
try:
|
|
16
|
+
return getattr(self, name.lower())
|
|
17
|
+
except AttributeError:
|
|
18
|
+
msg = f"BBox3 has no {name} field"
|
|
19
|
+
raise KeyError(msg)
|
|
20
|
+
|
|
21
|
+
@property
|
|
22
|
+
def is_consistent(self):
|
|
23
|
+
return (
|
|
24
|
+
((self.xmin is None and self.xmax is None) or (self.xmin <= self.xmax))
|
|
25
|
+
and ((self.ymin is None and self.ymax is None) or (self.ymin <= self.ymax))
|
|
26
|
+
and ((self.zmin is None and self.zmax is None) or (self.zmin <= self.zmax))
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
@property
|
|
30
|
+
def origin(self):
|
|
31
|
+
return self.xmin, self.ymin, self.zmin
|
|
32
|
+
|
|
33
|
+
@property
|
|
34
|
+
def extent(self):
|
|
35
|
+
return self.xmax - self.xmin, self.ymax - self.ymin, self.zmax - self.zmin
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: forgeo-gmlib
|
|
3
|
+
Version: 0.6.2
|
|
4
|
+
Summary: A python robust implicit geological surfaces meshing algorithm.
|
|
5
|
+
Author-Email: BRGM <forgeo@brgm.fr>
|
|
6
|
+
Maintainer-Email: Simon LOPEZ <s.lopez@brgm.fr>
|
|
7
|
+
License-File: LICENSE
|
|
8
|
+
Project-URL: repository, https://gitlab.com/brgm/geomodelling/model/gmlib
|
|
9
|
+
Requires-Python: >=3.9
|
|
10
|
+
Requires-Dist: numpy>=1.17
|
|
11
|
+
Requires-Dist: pyyaml
|
|
12
|
+
Requires-Dist: lxml
|
|
13
|
+
Requires-Dist: verstr>=0.1.2
|
|
14
|
+
Requires-Dist: forgeo>0.6
|
|
15
|
+
Provides-Extra: test
|
|
16
|
+
Requires-Dist: pytest; extra == "test"
|
|
17
|
+
Requires-Dist: matplotlib; extra == "test"
|
|
18
|
+
Requires-Dist: pypng==0.0.19; extra == "test"
|
|
19
|
+
Requires-Dist: vtkwriters>=0.0.3; extra == "test"
|
|
20
|
+
Requires-Dist: pycgal>=0.2.5; extra == "test"
|
|
21
|
+
Description-Content-Type: text/markdown
|
|
22
|
+
|
|
23
|
+
You may start with the [installation instructions](INSTALL.md).
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
forgeo/gmlib/__init__.py,sha256=sCO79cvFzrModr9rQU1YUCthE1k7D-up8Kt5c0V9t2A,270
|
|
2
|
+
forgeo/gmlib/_version.py,sha256=zNq0oieKkEzMOS9fgjJHGtz2NLxFbWoSAzfaIwmw_b4,746
|
|
3
|
+
forgeo/gmlib/architecture/__init__.py,sha256=nE8jJjGJaKuJKCyuf6QLfrj8xPj8IqGKeH2VXP3FMzY,21
|
|
4
|
+
forgeo/gmlib/architecture/core.py,sha256=hemZUQvt1CNSuHHQil4NsNn3Dwrif-magDpmdwHTDCw,4124
|
|
5
|
+
forgeo/gmlib/common.pyd,sha256=Gye1ADtY0m09R7kr9SpnF2jmZRWX7ZkKmRzGx61RIuY,304640
|
|
6
|
+
forgeo/gmlib/fault_network.py,sha256=J7-uM2_x9UlG3gkdBqwGYb5rikrFwbzFiL09oLkAQMI,6452
|
|
7
|
+
forgeo/gmlib/GeologicalModel3D.py,sha256=90J_2D007ddnRKU9E-Ge6xFiphVhFPETxYJulQCByjs,28022
|
|
8
|
+
forgeo/gmlib/geomodeller_data.py,sha256=MouDrnEt7EC90iTdEGJFeJEhDrXzdTjTXtbKIeEVmQs,3145
|
|
9
|
+
forgeo/gmlib/geomodeller_project.py,sha256=Gs2ZDsLNWDSrYy0Cq6eorM7g8q826vQj-LHYiS5ay1A,14650
|
|
10
|
+
forgeo/gmlib/myxmltools.py,sha256=mKq_9zWDsqFvXjZybx88Ez0NSUx-LBPJnbWiD8pg50E,805
|
|
11
|
+
forgeo/gmlib/pypotential2D.pyd,sha256=gM-P2BS2DLjqzbOWfpofrUjXcOjo-VBXIhfvFnvXnhY,1200128
|
|
12
|
+
forgeo/gmlib/pypotential3D.pyd,sha256=gnp2NV1CWuFsHiSGTIX5jtr4gxTWlcRc4pOk-FO2jSc,1236992
|
|
13
|
+
forgeo/gmlib/tesselate.py,sha256=pYPZwMA27doPuRwhpP8yJklmC8xDvA1QyhuJIj4Np-s,8416
|
|
14
|
+
forgeo/gmlib/tesselate_deprecated.py,sha256=TznC3o6f42BMXeyHDioy52VLu6KSHUIIwpUYwA_zZaM,9643
|
|
15
|
+
forgeo/gmlib/topography_reader.py,sha256=LdKjv6gWX8rRyHOUL5YTubOIFczFCQsTwTbHKqPHuAw,6555
|
|
16
|
+
forgeo/gmlib/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
17
|
+
forgeo/gmlib/utils/append_data.py,sha256=4kpJW_5Uz6hKLcN7436GJVlmeF0c2BvmWuj7QZz7Zts,17317
|
|
18
|
+
forgeo/gmlib/utils/export.py,sha256=eG0sRkvRjXH6SjqbrTi5SHxAyxFBJh3pxZPF5GHvQAo,1505
|
|
19
|
+
forgeo/gmlib/utils/normalized_gradient.py,sha256=t2GqZx4d6Z8Z_8QEKxEGp1NfTydiCzUkOfFR5rniiPg,1486
|
|
20
|
+
forgeo/gmlib/utils/tools.py,sha256=eHrXkUZJfLGAjnm3dCCeMh5g64nsO6r8vbPOUB71IcE,1001
|
|
21
|
+
forgeo_gmlib-0.6.2.dist-info/METADATA,sha256=V3oEjwQhyWINs4XnO5-e4vpPei-763go8J7XMuo0tjA,806
|
|
22
|
+
forgeo_gmlib-0.6.2.dist-info/WHEEL,sha256=chqeLhPBtPdrOoreR34YMcofSk3yWDQhkrsDJ2n48LU,106
|
|
23
|
+
forgeo_gmlib-0.6.2.dist-info/licenses/LICENSE,sha256=4O7bphXVzRuYavtsWzpLGuM3E-fp3HTRna7F4yIfnS4,35184
|
|
24
|
+
forgeo_gmlib-0.6.2.dist-info/RECORD,,
|