forgeo-gmlib 0.6.2__cp312-cp312-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- forgeo/gmlib/GeologicalModel3D.py +758 -0
- forgeo/gmlib/__init__.py +9 -0
- forgeo/gmlib/_version.py +34 -0
- forgeo/gmlib/architecture/__init__.py +1 -0
- forgeo/gmlib/architecture/core.py +130 -0
- forgeo/gmlib/common.pyd +0 -0
- forgeo/gmlib/fault_network.py +171 -0
- forgeo/gmlib/geomodeller_data.py +101 -0
- forgeo/gmlib/geomodeller_project.py +396 -0
- forgeo/gmlib/myxmltools.py +30 -0
- forgeo/gmlib/pypotential2D.pyd +0 -0
- forgeo/gmlib/pypotential3D.pyd +0 -0
- forgeo/gmlib/tesselate.py +236 -0
- forgeo/gmlib/tesselate_deprecated.py +249 -0
- forgeo/gmlib/topography_reader.py +198 -0
- forgeo/gmlib/utils/__init__.py +0 -0
- forgeo/gmlib/utils/append_data.py +508 -0
- forgeo/gmlib/utils/export.py +45 -0
- forgeo/gmlib/utils/normalized_gradient.py +40 -0
- forgeo/gmlib/utils/tools.py +35 -0
- forgeo_gmlib-0.6.2.dist-info/METADATA +23 -0
- forgeo_gmlib-0.6.2.dist-info/RECORD +24 -0
- forgeo_gmlib-0.6.2.dist-info/WHEEL +5 -0
- forgeo_gmlib-0.6.2.dist-info/licenses/LICENSE +661 -0
|
@@ -0,0 +1,758 @@
|
|
|
1
|
+
#
|
|
2
|
+
# This file is part of gmlib. It is free software.
|
|
3
|
+
# You can redistribute it and/or modify it under the terms of the GNU Affero General Public License version 3.
|
|
4
|
+
#
|
|
5
|
+
|
|
6
|
+
from collections import namedtuple
|
|
7
|
+
from dataclasses import dataclass
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
import numpy as np
|
|
12
|
+
import yaml
|
|
13
|
+
|
|
14
|
+
from forgeo.gmlib import geomodeller_project
|
|
15
|
+
from forgeo.gmlib import pypotential3D as pypotential
|
|
16
|
+
from forgeo.gmlib.common import CovarianceData
|
|
17
|
+
from forgeo.gmlib.geomodeller_data import GradientData
|
|
18
|
+
from forgeo.gmlib.pypotential3D import (
|
|
19
|
+
ConstantElevationSurface,
|
|
20
|
+
ElevationRaster,
|
|
21
|
+
ImplicitTopography,
|
|
22
|
+
Polyline,
|
|
23
|
+
VerticalSection,
|
|
24
|
+
)
|
|
25
|
+
from forgeo.gmlib.topography_reader import ImplicitDTM, ImplicitHorizontalPlane
|
|
26
|
+
from forgeo.gmlib.utils.tools import BBox3
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass
|
|
30
|
+
class Intersection:
|
|
31
|
+
point: Any
|
|
32
|
+
field: Any
|
|
33
|
+
value: float = 0.0
|
|
34
|
+
fault: str = None
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
PileInfo = namedtuple("PileInfo", ["name", "color", "relation"])
|
|
38
|
+
Box = namedtuple("Box", ["xmin", "ymin", "zmin", "xmax", "ymax", "zmax"])
|
|
39
|
+
CRSInfo = namedtuple("CRSInfo", ["geomodeller", "qgis"])
|
|
40
|
+
SeriesInfo = namedtuple(
|
|
41
|
+
"SeriesInfo", ["field", "drifts", "active_faults", "interfaces"]
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
scalardt = pypotential.scalar_type()
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
# FIMXE: to be put elsewhere
|
|
48
|
+
def image_ratio(section, nu):
|
|
49
|
+
assert section.umax > section.umin
|
|
50
|
+
nv = int(((section.vmax - section.vmin) / (section.umax - section.umin)) * nu)
|
|
51
|
+
assert nv > 0, (
|
|
52
|
+
f"z ratio is to small: {(section.vmax - section.vmin) / (section.umax - section.umin)}"
|
|
53
|
+
)
|
|
54
|
+
return nu, nv
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def covariance_data(potdata):
|
|
58
|
+
covmodel = potdata.covariance_model
|
|
59
|
+
return CovarianceData(
|
|
60
|
+
covmodel.gradient_variance,
|
|
61
|
+
covmodel.range,
|
|
62
|
+
covmodel.gradient_nugget,
|
|
63
|
+
covmodel.potential_nugget,
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def gradient_data(potdata):
|
|
68
|
+
graddata = potdata.gradients
|
|
69
|
+
return pypotential.gradient_data(graddata.locations, graddata.values)
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def interface_data(potdata):
|
|
73
|
+
return pypotential.interface_data(potdata.interfaces)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def drift_basis(potdata):
|
|
77
|
+
drift_order = potdata.covariance_model.drift_order
|
|
78
|
+
return pypotential.drift_basis(drift_order)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
# FIXME this should be set elsewhere
|
|
82
|
+
# Version with np.array here is suboptimal
|
|
83
|
+
def point_between(p1, p2, field, v, precision=0.01):
|
|
84
|
+
squared_precision = precision**2
|
|
85
|
+
# Point = pypotential.Point
|
|
86
|
+
# p1 = pypotential.Point(p1)
|
|
87
|
+
# p2 = pypotential.Point(p2)
|
|
88
|
+
p1 = np.asarray(p1, dtype=scalardt)
|
|
89
|
+
p2 = np.asarray(p2, dtype=scalardt)
|
|
90
|
+
assert len(p1.shape) == 1
|
|
91
|
+
assert len(p2.shape) == 1
|
|
92
|
+
# print('-> point between', p1, p2)
|
|
93
|
+
v1, v2 = field(p1), field(p2)
|
|
94
|
+
if v1 == v:
|
|
95
|
+
return p1
|
|
96
|
+
if v2 == v:
|
|
97
|
+
return p2
|
|
98
|
+
# print('field values:', v1, v2, 'looking for', v)
|
|
99
|
+
# FIXME: minimum can't be found (might scan between potentials)
|
|
100
|
+
if (v - v1) * (v - v2) > 0:
|
|
101
|
+
return None
|
|
102
|
+
# previous = Point(p1) # copy is mandatory here
|
|
103
|
+
previous = np.copy(p1) # copy is mandatory here
|
|
104
|
+
while True:
|
|
105
|
+
# (v - v1)/(v2 - v1)<1 so p is in [p1, p2] and we have convergence
|
|
106
|
+
# print('-> TEST:', p1)
|
|
107
|
+
# print('-> TEST:', v1, v2, p2- p1)
|
|
108
|
+
# if v1==v:
|
|
109
|
+
# return p1
|
|
110
|
+
# if v2==v:
|
|
111
|
+
# return p2
|
|
112
|
+
# if v1==v2==v:
|
|
113
|
+
# return None
|
|
114
|
+
p = p1 + float((v - v1) / (v2 - v1)) * (p2 - p1)
|
|
115
|
+
# squared_length = (p - previous).squared_length
|
|
116
|
+
squared_length = np.sum((p - previous) ** 2)
|
|
117
|
+
if squared_length < squared_precision:
|
|
118
|
+
# print('-> FOUND', p)
|
|
119
|
+
return p
|
|
120
|
+
vp = field(p)
|
|
121
|
+
# print('field value:', vp)
|
|
122
|
+
# previous = Point(p) # copy is mandatory here
|
|
123
|
+
previous = np.copy(p) # copy is mandatory here
|
|
124
|
+
if (v - vp) * (v - v2) <= 0:
|
|
125
|
+
# Look for between vp and v2
|
|
126
|
+
v1 = vp
|
|
127
|
+
p1 = p
|
|
128
|
+
elif (v - vp) * (v - v1) <= 0:
|
|
129
|
+
# Look for between v1 and vp
|
|
130
|
+
v2 = vp
|
|
131
|
+
p2 = p
|
|
132
|
+
else:
|
|
133
|
+
raise AssertionError()
|
|
134
|
+
return None
|
|
135
|
+
return None
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def distance(p1, p2):
|
|
139
|
+
x = p1[0] - p2[0]
|
|
140
|
+
y = p1[1] - p2[1]
|
|
141
|
+
z = p1[2] - p2[2]
|
|
142
|
+
return np.sqrt(x**2 + y**2 + z**2)
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def extract_data_from_legacy_geomodeller_file(
|
|
146
|
+
filename, scalardt, skip_topography=False
|
|
147
|
+
):
|
|
148
|
+
return geomodeller_project.extract_project_data(
|
|
149
|
+
filename, scalardt, skip_topography=skip_topography
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def make_potential(cache, name, potdata, drifts=None):
|
|
154
|
+
params = (
|
|
155
|
+
covariance_data(potdata),
|
|
156
|
+
gradient_data(potdata),
|
|
157
|
+
interface_data(potdata),
|
|
158
|
+
drifts or drift_basis(potdata),
|
|
159
|
+
)
|
|
160
|
+
cached = cache[name]
|
|
161
|
+
if cached is None:
|
|
162
|
+
field = pypotential.potential_field(*params)
|
|
163
|
+
cache[name] = field.data().beta()
|
|
164
|
+
else:
|
|
165
|
+
field = pypotential.potential_field(*params, cached)
|
|
166
|
+
return field
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def fault_potential(cache, name, data):
|
|
170
|
+
potdata = data.potential_data
|
|
171
|
+
assert len(potdata.interfaces) == 1, "inconsistent fault potential field"
|
|
172
|
+
return make_potential(cache, name, potdata)
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def finite_fault(cache, name, data):
|
|
176
|
+
assert not data.infinite
|
|
177
|
+
potdata = data.potential_data
|
|
178
|
+
assert len(potdata.interfaces) == 1, "inconsistent fault potential field"
|
|
179
|
+
points = np.reshape(potdata.interfaces[0], (-1, 3))
|
|
180
|
+
center = None
|
|
181
|
+
if data.center_type == "mean_center":
|
|
182
|
+
center = np.mean(points, axis=0)
|
|
183
|
+
elif data.center_type == "databox_center":
|
|
184
|
+
center = np.array(
|
|
185
|
+
[0.5 * (points[:, axis].min() + points[:, axis].max()) for axis in range(3)]
|
|
186
|
+
)
|
|
187
|
+
else:
|
|
188
|
+
assert type(data.center_type) is tuple, "unknown fault center type" + str(
|
|
189
|
+
data.center_type
|
|
190
|
+
)
|
|
191
|
+
center = np.array(data.center_type)
|
|
192
|
+
center = np.asarray(center, dtype=scalardt)
|
|
193
|
+
assert center is not None
|
|
194
|
+
field = fault_potential(cache, name, data)
|
|
195
|
+
# The following is as in GeoModeller: might be improved?
|
|
196
|
+
# it is based on the idea that the fault geometry is close
|
|
197
|
+
# to a (planar!) disk
|
|
198
|
+
g = pypotential.gradient(field)(center)
|
|
199
|
+
g.shape = (3,)
|
|
200
|
+
assert np.linalg.norm(g) > 0
|
|
201
|
+
g /= np.linalg.norm(g)
|
|
202
|
+
u = v = None
|
|
203
|
+
if g[0] == g[1] == 0: # tangent plane is horizontal
|
|
204
|
+
u = np.array([1, 0, 0], dtype=scalardt) # somewhat arbitrary
|
|
205
|
+
else:
|
|
206
|
+
vertical = np.array([0, 0, 1], dtype=scalardt)
|
|
207
|
+
u = np.cross(vertical, g)
|
|
208
|
+
u /= np.linalg.norm(u)
|
|
209
|
+
v = np.cross(u, g) # the basis is not direct!
|
|
210
|
+
v /= np.linalg.norm(v)
|
|
211
|
+
g *= data.influence_radius
|
|
212
|
+
u *= data.lateral_extent
|
|
213
|
+
v *= data.vertical_extent
|
|
214
|
+
ellipsoid = pypotential.Ellipsoid(
|
|
215
|
+
pypotential.Point(*center),
|
|
216
|
+
(pypotential.Vector(*g), pypotential.Vector(*u), pypotential.Vector(*v)),
|
|
217
|
+
)
|
|
218
|
+
return field, ellipsoid
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
def recast(data):
|
|
222
|
+
box, pile, faults_data, topography, formations = data
|
|
223
|
+
box_center = np.array(
|
|
224
|
+
[0.5 * (box[s + "min"] + box[s + "max"]) for s in ("X", "Y", "Z")],
|
|
225
|
+
dtype=scalardt,
|
|
226
|
+
)
|
|
227
|
+
L = max([box[s + "max"] - box[s + "min"] for s in ("X", "Y", "Z")])
|
|
228
|
+
assert L > 0
|
|
229
|
+
|
|
230
|
+
def recast_axis(i, x):
|
|
231
|
+
return (x - box_center[i]) / L
|
|
232
|
+
|
|
233
|
+
def recast_z(z):
|
|
234
|
+
return recast_axis(2, z)
|
|
235
|
+
|
|
236
|
+
def recast_P(P):
|
|
237
|
+
return (np.asarray(P, dtype=scalardt) - box_center) / L
|
|
238
|
+
|
|
239
|
+
if type(topography) is ImplicitHorizontalPlane:
|
|
240
|
+
topography = ImplicitHorizontalPlane(recast_z(topography.z))
|
|
241
|
+
else:
|
|
242
|
+
assert type(topography) is ImplicitDTM
|
|
243
|
+
topography = ImplicitDTM(
|
|
244
|
+
recast_P(topography.origin), topography.steps / L, recast_z(zmap)
|
|
245
|
+
)
|
|
246
|
+
new_box = {}
|
|
247
|
+
for i, s in enumerate(("X", "Y", "Z")):
|
|
248
|
+
new_box[s + "min"] = recast_axis(i, box[s + "min"])
|
|
249
|
+
new_box[s + "max"] = recast_axis(i, box[s + "max"])
|
|
250
|
+
|
|
251
|
+
def rescale_potential_data(handle):
|
|
252
|
+
if handle.potential_data is not None:
|
|
253
|
+
potdata = handle.potential_data
|
|
254
|
+
potdata.interfaces = [
|
|
255
|
+
recast_P(interface) for interface in potdata.interfaces
|
|
256
|
+
]
|
|
257
|
+
gradients = potdata.gradients
|
|
258
|
+
gradients = GradientData(
|
|
259
|
+
recast_P(gradients.locations),
|
|
260
|
+
gradients.values, # * L, # FIXME: scale effect *L
|
|
261
|
+
)
|
|
262
|
+
potdata.covariance_model.range /= L
|
|
263
|
+
|
|
264
|
+
for serie in pile.all_series:
|
|
265
|
+
if serie.potential_data is not None:
|
|
266
|
+
rescale_potential_data(serie)
|
|
267
|
+
for _name, fault in faults_data.items():
|
|
268
|
+
rescale_potential_data(fault)
|
|
269
|
+
return box, pile, faults_data, topography, formations
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def compute_fault_stops_on(model):
|
|
273
|
+
relations = {}
|
|
274
|
+
for fault, data in model.faults_data.items():
|
|
275
|
+
assert len(data.potential_data.interfaces) == 1
|
|
276
|
+
points = data.potential_data.interfaces[0]
|
|
277
|
+
limits = []
|
|
278
|
+
for limit in data.stops_on:
|
|
279
|
+
# the potential field associated to the limit
|
|
280
|
+
# is used to determine where lie the points defining the fault
|
|
281
|
+
v = np.mean(model.faults[limit](points))
|
|
282
|
+
assert v != 0, f"inconsitent limit potential in {fault} stops on {limit}"
|
|
283
|
+
limits.append((limit, -1 if v < 0 else 1))
|
|
284
|
+
relations[fault] = limits
|
|
285
|
+
return relations
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
class CacheDir:
|
|
289
|
+
def __init__(self, data):
|
|
290
|
+
filepath = data["filepath"]
|
|
291
|
+
path = filepath.parent / (filepath.stem + ".cache")
|
|
292
|
+
path.mkdir(exist_ok=True)
|
|
293
|
+
origin = path / "origin"
|
|
294
|
+
valid = origin.exists()
|
|
295
|
+
if valid:
|
|
296
|
+
assert origin.is_file()
|
|
297
|
+
with origin.open() as f:
|
|
298
|
+
try:
|
|
299
|
+
_path = Path(f.readline().strip())
|
|
300
|
+
_timestamp = int(f.readline().strip())
|
|
301
|
+
valid = _path == filepath and _timestamp == data["timestamp"]
|
|
302
|
+
except Exception:
|
|
303
|
+
valid = False
|
|
304
|
+
if not valid:
|
|
305
|
+
with origin.open("w") as f:
|
|
306
|
+
print(data["filepath"].as_posix(), file=f)
|
|
307
|
+
print(data["timestamp"], file=f)
|
|
308
|
+
self.path = path
|
|
309
|
+
self.valid = valid
|
|
310
|
+
|
|
311
|
+
def _source_from_key(self, key):
|
|
312
|
+
return self.path / f"{key}.npy"
|
|
313
|
+
|
|
314
|
+
def __getitem__(self, key):
|
|
315
|
+
if not self.valid:
|
|
316
|
+
return None
|
|
317
|
+
source = self._source_from_key(key)
|
|
318
|
+
if not source.is_file():
|
|
319
|
+
return None
|
|
320
|
+
return np.load(source)
|
|
321
|
+
|
|
322
|
+
def __setitem__(self, key, a):
|
|
323
|
+
np.save(self._source_from_key(key), a)
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
class GeologicalModel:
|
|
327
|
+
@staticmethod
|
|
328
|
+
def extract_data(path, *, skip_topography=False, convert_to_yaml=None):
|
|
329
|
+
# print()
|
|
330
|
+
# print('Loading model from:', filename)
|
|
331
|
+
# print()
|
|
332
|
+
path = Path(path)
|
|
333
|
+
assert path.is_file(), f"could not find {path.as_posix()}"
|
|
334
|
+
if path.suffix == ".xml":
|
|
335
|
+
data = extract_data_from_legacy_geomodeller_file(
|
|
336
|
+
path, scalardt, skip_topography=skip_topography
|
|
337
|
+
)
|
|
338
|
+
if convert_to_yaml is not None:
|
|
339
|
+
with open(convert_to_yaml, "w") as f:
|
|
340
|
+
print(yaml.dump(data), file=f)
|
|
341
|
+
elif path.suffix == ".yaml":
|
|
342
|
+
with path.open() as f:
|
|
343
|
+
data = yaml.load(f)
|
|
344
|
+
else:
|
|
345
|
+
msg = "Unknown file extension."
|
|
346
|
+
raise OSError(msg)
|
|
347
|
+
assert "path" not in data
|
|
348
|
+
data["filepath"] = path
|
|
349
|
+
assert "timestamp" not in data
|
|
350
|
+
# last modification time in nanoseconds
|
|
351
|
+
data["timestamp"] = path.stat().st_mtime_ns
|
|
352
|
+
return data
|
|
353
|
+
|
|
354
|
+
def __init__(self, data, **kwargs):
|
|
355
|
+
if type(data) is str:
|
|
356
|
+
data = GeologicalModel.extract_data(data, **kwargs)
|
|
357
|
+
cache = CacheDir(data)
|
|
358
|
+
self.box = data["box"]
|
|
359
|
+
self.crs = CRSInfo(*data["crs"])
|
|
360
|
+
self.pile = data["pile"]
|
|
361
|
+
assert self.pile.reference in ("top", "base")
|
|
362
|
+
self.faults_data = data["faults_data"]
|
|
363
|
+
self.topography = data["topography"]
|
|
364
|
+
formations = data["formations"]
|
|
365
|
+
faults = {}
|
|
366
|
+
fault_ellipsoids = {}
|
|
367
|
+
fault_drifts = {}
|
|
368
|
+
faults_data = data["faults_data"]
|
|
369
|
+
for name, data in faults_data.items():
|
|
370
|
+
# print('Registering fault:', name)
|
|
371
|
+
field = None
|
|
372
|
+
ellipsoid = None
|
|
373
|
+
if data.infinite:
|
|
374
|
+
field = fault_potential(cache, f"fault-{name}", data)
|
|
375
|
+
else:
|
|
376
|
+
field, ellipsoid = finite_fault(cache, f"fault-{name}", data)
|
|
377
|
+
faults[name] = pypotential.Fault(field)
|
|
378
|
+
if ellipsoid is not None:
|
|
379
|
+
faults[name].stops_on(ellipsoid)
|
|
380
|
+
fault_ellipsoids[name] = ellipsoid
|
|
381
|
+
for name, data in faults_data.items():
|
|
382
|
+
fault = faults[name]
|
|
383
|
+
filtered_limits = []
|
|
384
|
+
for limit_name in data.stops_on:
|
|
385
|
+
if limit_name not in faults:
|
|
386
|
+
pass
|
|
387
|
+
else:
|
|
388
|
+
filtered_limits.append(limit_name)
|
|
389
|
+
data.stops_on = filtered_limits
|
|
390
|
+
for limit_name in data.stops_on:
|
|
391
|
+
fault.stops_on(faults[limit_name])
|
|
392
|
+
for name, fault in faults.items():
|
|
393
|
+
if name in fault_ellipsoids:
|
|
394
|
+
fault_drifts[name] = pypotential.make_finite_drift(
|
|
395
|
+
fault, fault_ellipsoids[name]
|
|
396
|
+
)
|
|
397
|
+
else:
|
|
398
|
+
fault_drifts[name] = pypotential.make_drift(fault)
|
|
399
|
+
fields = []
|
|
400
|
+
values = []
|
|
401
|
+
relations = []
|
|
402
|
+
formation_names = [f.name for f in formations]
|
|
403
|
+
pile_formations = []
|
|
404
|
+
series_info = {}
|
|
405
|
+
|
|
406
|
+
def register_pile_formation(formation):
|
|
407
|
+
assert formation in formation_names, (
|
|
408
|
+
"Unknown formation " + formation + " in stratigraphic column!"
|
|
409
|
+
)
|
|
410
|
+
pile_formations.append(formation)
|
|
411
|
+
|
|
412
|
+
found_dummy_formation = False
|
|
413
|
+
pile = self.pile
|
|
414
|
+
for Sk, serie in enumerate(pile.all_series):
|
|
415
|
+
potdata = serie.potential_data
|
|
416
|
+
if potdata:
|
|
417
|
+
active_faults = {}
|
|
418
|
+
drifts = drift_basis(potdata)
|
|
419
|
+
if serie.influenced_by_fault:
|
|
420
|
+
for name in serie.influenced_by_fault:
|
|
421
|
+
try:
|
|
422
|
+
drifts.append(fault_drifts[name])
|
|
423
|
+
active_faults[name] = len(drifts) - 1
|
|
424
|
+
except KeyError:
|
|
425
|
+
pass
|
|
426
|
+
# print('Field for serie', serie.name, 'has', len(drifts), 'drifts.')
|
|
427
|
+
# print('with covariance:', covariance_data(potdata))
|
|
428
|
+
field = make_potential(cache, serie.name, potdata, drifts)
|
|
429
|
+
interfaces = []
|
|
430
|
+
for i, interface in enumerate(potdata.interfaces):
|
|
431
|
+
if len(interface) == 0:
|
|
432
|
+
pass
|
|
433
|
+
else:
|
|
434
|
+
mean_field_value = np.mean(field(interface))
|
|
435
|
+
values.append(mean_field_value)
|
|
436
|
+
formation = serie.formations[i]
|
|
437
|
+
interfaces.append(
|
|
438
|
+
(pile.reference + "-" + formation, mean_field_value)
|
|
439
|
+
)
|
|
440
|
+
fields.append(field)
|
|
441
|
+
register_pile_formation(formation)
|
|
442
|
+
relations.append(serie.relation)
|
|
443
|
+
series_info[serie.name] = SeriesInfo(
|
|
444
|
+
field, drifts, active_faults, interfaces
|
|
445
|
+
)
|
|
446
|
+
else:
|
|
447
|
+
|
|
448
|
+
def register_single_formation():
|
|
449
|
+
assert len(serie.formations) == 1
|
|
450
|
+
# print('registering dummy formation', serie.formations)
|
|
451
|
+
register_pile_formation(serie.formations[0])
|
|
452
|
+
|
|
453
|
+
if (pile.reference == "base" and Sk == 0) or (
|
|
454
|
+
pile.reference == "top" and Sk == len(pile.all_series) - 1
|
|
455
|
+
):
|
|
456
|
+
register_single_formation()
|
|
457
|
+
found_dummy_formation = True
|
|
458
|
+
else:
|
|
459
|
+
pass
|
|
460
|
+
if not found_dummy_formation:
|
|
461
|
+
dummy_formations = [f for f in formations if f.is_dummy]
|
|
462
|
+
# when importing a GeoModeller project the DefaultCover dummy formation is not exported
|
|
463
|
+
if len(dummy_formations) == 0:
|
|
464
|
+
assert pile.reference in {"top", "base"}, "Unknown pile reference"
|
|
465
|
+
dummy = geomodeller_project.Formation(
|
|
466
|
+
name="Default" + {"top": "Cover", "base": "Base"}[pile.reference],
|
|
467
|
+
color=(0.3,) * 3,
|
|
468
|
+
is_dummy=True,
|
|
469
|
+
)
|
|
470
|
+
formations.append(dummy)
|
|
471
|
+
dummy_formations = [dummy]
|
|
472
|
+
assert len(dummy_formations) == 1, "A dummy formation is needed!"
|
|
473
|
+
if pile.reference == "base":
|
|
474
|
+
pile_formations.insert(0, dummy_formations[0].name)
|
|
475
|
+
else:
|
|
476
|
+
pile_formations.append(dummy_formations[0].name)
|
|
477
|
+
assert len(fields) == len(relations)
|
|
478
|
+
assert len(fields) + 1 == len(pile_formations)
|
|
479
|
+
self.fields = fields
|
|
480
|
+
self.formations = formations
|
|
481
|
+
self.formation_colors = {f.name: f.color for f in formations}
|
|
482
|
+
self.pile_formations = pile_formations
|
|
483
|
+
self.series_info = series_info
|
|
484
|
+
self.values = values
|
|
485
|
+
self.relations = relations
|
|
486
|
+
self.faults = faults
|
|
487
|
+
self.fault_ellipsoids = fault_ellipsoids
|
|
488
|
+
self.fault_drifts = fault_drifts
|
|
489
|
+
self.fault_stops_on = compute_fault_stops_on(self)
|
|
490
|
+
|
|
491
|
+
def collect_pile_information(self):
|
|
492
|
+
relations = list(self.relations)
|
|
493
|
+
if self.pile.reference == "base":
|
|
494
|
+
relations.insert(0, "onlap")
|
|
495
|
+
else:
|
|
496
|
+
relations.append("onlap")
|
|
497
|
+
return [
|
|
498
|
+
PileInfo(formation, self.formation_colors[formation], relation)
|
|
499
|
+
for formation, relation in zip(self.pile_formations, relations)
|
|
500
|
+
]
|
|
501
|
+
|
|
502
|
+
def nbformations(self):
|
|
503
|
+
return len(self.pile_formations)
|
|
504
|
+
|
|
505
|
+
def nbcontacts(self):
|
|
506
|
+
# FIXME: this should have to be define rigorously...
|
|
507
|
+
return len(self.fields)
|
|
508
|
+
|
|
509
|
+
def getbox(self):
|
|
510
|
+
xmin, xmax = (self.box["Xmin"], self.box["Xmax"])
|
|
511
|
+
ymin, ymax = (self.box["Ymin"], self.box["Ymax"])
|
|
512
|
+
zmin, zmax = (self.box["Zmin"], self.box["Zmax"])
|
|
513
|
+
return Box(xmin, ymin, zmin, xmax, ymax, zmax)
|
|
514
|
+
|
|
515
|
+
def bbox(self):
|
|
516
|
+
xmin, xmax = (self.box["Xmin"], self.box["Xmax"])
|
|
517
|
+
ymin, ymax = (self.box["Ymin"], self.box["Ymax"])
|
|
518
|
+
zmin, zmax = (self.box["Zmin"], self.box["Zmax"])
|
|
519
|
+
return BBox3(xmin, xmax, ymin, ymax, zmin, zmax)
|
|
520
|
+
|
|
521
|
+
def domain(self, x, y, z):
|
|
522
|
+
return self.rank((x, y, z))
|
|
523
|
+
|
|
524
|
+
def rank_without_topography(self, p):
|
|
525
|
+
n = len(self.fields)
|
|
526
|
+
j1 = 0
|
|
527
|
+
j2 = n
|
|
528
|
+
# parcourir les erode decroissants
|
|
529
|
+
erosion_surfaces = [i for i in range(n) if self.relations[i] == "erode"]
|
|
530
|
+
for i in reversed(erosion_surfaces):
|
|
531
|
+
field = self.fields[i]
|
|
532
|
+
vp = field(p)
|
|
533
|
+
vi = self.values[i]
|
|
534
|
+
if vp > vi:
|
|
535
|
+
j1 = i
|
|
536
|
+
break
|
|
537
|
+
j2 = i
|
|
538
|
+
# parcourir les onlap croissants dans l'intervalle j1 j2
|
|
539
|
+
rank = j1
|
|
540
|
+
assert j1 == 0 or (
|
|
541
|
+
self.relations[j1] == "erode" and self.fields[j1](p) > self.values[j1]
|
|
542
|
+
)
|
|
543
|
+
rank = j1
|
|
544
|
+
for i in range(j1, j2):
|
|
545
|
+
field = self.fields[i]
|
|
546
|
+
vp = field(p)
|
|
547
|
+
vi = self.values[i]
|
|
548
|
+
# when i=j1>0 we are sure that vp<vi (previous test)
|
|
549
|
+
if vp < vi:
|
|
550
|
+
break
|
|
551
|
+
rank += 1
|
|
552
|
+
# print ("Which domain:", p[0], p[1], p[2], rank+1)
|
|
553
|
+
return rank + 1
|
|
554
|
+
|
|
555
|
+
def rank(self, p, consider_topography=True):
|
|
556
|
+
if consider_topography and self.topography(p) > 0:
|
|
557
|
+
return 0
|
|
558
|
+
return self.rank_without_topography(p)
|
|
559
|
+
|
|
560
|
+
def intersect(
|
|
561
|
+
self,
|
|
562
|
+
p1,
|
|
563
|
+
p2,
|
|
564
|
+
consider_formations=True,
|
|
565
|
+
consider_faults=False,
|
|
566
|
+
consider_topography=True,
|
|
567
|
+
precision=0.01,
|
|
568
|
+
):
|
|
569
|
+
# print('-> intersect', p1, p2)
|
|
570
|
+
p = point_between(p1, p2, self.topography, 0, precision)
|
|
571
|
+
if p is not None:
|
|
572
|
+
return Intersection(p, self.topography)
|
|
573
|
+
# print(' no topo')
|
|
574
|
+
# test fault potential fields
|
|
575
|
+
if consider_faults:
|
|
576
|
+
for name, fault in self.faults.items():
|
|
577
|
+
p = point_between(p1, p2, fault, 0, precision)
|
|
578
|
+
if p is not None:
|
|
579
|
+
if self.is_fault_point_valid(p, name, consider_topography):
|
|
580
|
+
return Intersection(p, fault, fault=name)
|
|
581
|
+
# test potential fields
|
|
582
|
+
# print(' no faults')
|
|
583
|
+
if consider_formations:
|
|
584
|
+
n = len(self.fields)
|
|
585
|
+
for i in range(n):
|
|
586
|
+
p = point_between(p1, p2, self.fields[i], self.values[i], precision)
|
|
587
|
+
if p is not None:
|
|
588
|
+
if self.is_valid(p, i, consider_topography):
|
|
589
|
+
# print(' Found:', n, p, 'shape:', p.shape)
|
|
590
|
+
return Intersection(p, self.fields[i], self.values[i])
|
|
591
|
+
# print(' Nothing!')
|
|
592
|
+
return None
|
|
593
|
+
|
|
594
|
+
def is_valid(self, p, rank, with_topography=True):
|
|
595
|
+
if with_topography and self.topography(p) > 0:
|
|
596
|
+
return False
|
|
597
|
+
n = len(self.fields)
|
|
598
|
+
for i in range(rank + 1, n):
|
|
599
|
+
Ri = self.relations[i]
|
|
600
|
+
if Ri == "erode":
|
|
601
|
+
field = self.fields[i]
|
|
602
|
+
vp = field(p)
|
|
603
|
+
vi = self.values[i]
|
|
604
|
+
if vp > vi:
|
|
605
|
+
return False
|
|
606
|
+
R = self.relations[rank]
|
|
607
|
+
if R == "onlap":
|
|
608
|
+
for i in reversed(range(rank)):
|
|
609
|
+
field = self.fields[i]
|
|
610
|
+
vp = field(p)
|
|
611
|
+
vi = self.values[i]
|
|
612
|
+
if vp < vi:
|
|
613
|
+
return False
|
|
614
|
+
Ri = self.relations[i]
|
|
615
|
+
if Ri == "erode":
|
|
616
|
+
break
|
|
617
|
+
return True
|
|
618
|
+
|
|
619
|
+
def is_fault_point_valid(self, p, fault, with_topography=True):
|
|
620
|
+
if with_topography and self.topography(p) > 0:
|
|
621
|
+
return False
|
|
622
|
+
if self.is_finite_fault(fault) and self.fault_ellipsoids[fault](p) >= 1:
|
|
623
|
+
return False
|
|
624
|
+
for limit_info in self.fault_stops_on[fault]:
|
|
625
|
+
limit, side = limit_info
|
|
626
|
+
if self.faults[limit](p) * side < 0:
|
|
627
|
+
return False
|
|
628
|
+
return True
|
|
629
|
+
|
|
630
|
+
def rank_colors(self):
|
|
631
|
+
for formation in self.pile_formation:
|
|
632
|
+
result.append(self.formation_colors[formation])
|
|
633
|
+
return result
|
|
634
|
+
|
|
635
|
+
def stats(self):
|
|
636
|
+
result = []
|
|
637
|
+
minimum_distances = []
|
|
638
|
+
|
|
639
|
+
def register(name, data):
|
|
640
|
+
ng = data.nb_gradients
|
|
641
|
+
nc = data.nb_contact_points
|
|
642
|
+
result.append(
|
|
643
|
+
f"{name} has {ng:d} gradients values "
|
|
644
|
+
f"and {nc:d} contact points which make {ng + 3 * nc} dof"
|
|
645
|
+
)
|
|
646
|
+
mdg, mdc = data.minimum_distances()
|
|
647
|
+
result.append(f"\tminimum distances {mdg:f} {mdc:f}")
|
|
648
|
+
minimum_distances.append((mdg, mdc))
|
|
649
|
+
|
|
650
|
+
for serie, info in self.series_info.items():
|
|
651
|
+
if info.field is None:
|
|
652
|
+
result.append(serie + "has no potential field")
|
|
653
|
+
else:
|
|
654
|
+
register(f"serie {serie}", info.field.data())
|
|
655
|
+
for fault, info in self.faults.items():
|
|
656
|
+
register(f"fault {fault}", info.potential_field.data())
|
|
657
|
+
md = np.array(minimum_distances)
|
|
658
|
+
mdg = md[:, 0]
|
|
659
|
+
mdc = md[:, 1]
|
|
660
|
+
if np.any(mdg >= 0):
|
|
661
|
+
result.append(
|
|
662
|
+
f"Minimum distance between gradients: {np.min(mdg[mdg >= 0]):f}"
|
|
663
|
+
)
|
|
664
|
+
if np.any(mdg < 0):
|
|
665
|
+
result.append(
|
|
666
|
+
f"There are {np.sum(mdg < 0):d} series with a single gradient data!"
|
|
667
|
+
)
|
|
668
|
+
if np.any(mdg == 0): # This shall not happen (kriging matrix would be singular)
|
|
669
|
+
result.append(
|
|
670
|
+
f"There are {np.sum(mdg == 0):d} with confunded gradient data!"
|
|
671
|
+
)
|
|
672
|
+
if np.any(mdc >= 0):
|
|
673
|
+
result.append(
|
|
674
|
+
f"Minimum distance between contacts: {np.min(mdc[mdc >= 0]):f}"
|
|
675
|
+
)
|
|
676
|
+
if np.any(mdc < 0):
|
|
677
|
+
result.append(
|
|
678
|
+
f"There are {np.sum(mdc < 0):d} series with a single contact data!"
|
|
679
|
+
)
|
|
680
|
+
if np.any(mdc == 0): # This shall not happen (kriging matrix would be singular)
|
|
681
|
+
result.append(
|
|
682
|
+
f"There are {np.sum(mdc == 0):d} series with a condunded contact data!"
|
|
683
|
+
)
|
|
684
|
+
return "\n".join(result)
|
|
685
|
+
|
|
686
|
+
def diagonal_section(self, flip=False):
|
|
687
|
+
box = self.getbox()
|
|
688
|
+
if flip:
|
|
689
|
+
diagonal = Polyline([[box.xmin, box.ymax, 0], [box.xmax, box.ymin, 0]])
|
|
690
|
+
else:
|
|
691
|
+
diagonal = Polyline([[box.xmin, box.ymin, 0], [box.xmax, box.ymax, 0]])
|
|
692
|
+
return VerticalSection(diagonal, box.zmin, box.zmax)
|
|
693
|
+
|
|
694
|
+
def x_section(self, x=None, flip=False):
|
|
695
|
+
box = self.getbox()
|
|
696
|
+
x = x or 0.5 * (box.xmin + box.xmax)
|
|
697
|
+
if flip:
|
|
698
|
+
path = Polyline([[x, box.ymax, 0], [x, box.ymin, 0]])
|
|
699
|
+
else:
|
|
700
|
+
path = Polyline([[x, box.ymin, 0], [x, box.ymax, 0]])
|
|
701
|
+
return VerticalSection(path, box.zmin, box.zmax)
|
|
702
|
+
|
|
703
|
+
def y_section(self, y=None, flip=False):
|
|
704
|
+
box = self.getbox()
|
|
705
|
+
y = y or 0.5 * (box.ymin + box.ymax)
|
|
706
|
+
if flip:
|
|
707
|
+
path = Polyline([[box.xmax, y, 0], [box.xmin, y, 0]])
|
|
708
|
+
else:
|
|
709
|
+
path = Polyline([[box.xmin, y, 0], [box.xmax, y, 0]])
|
|
710
|
+
return VerticalSection(path, box.zmin, box.zmax)
|
|
711
|
+
|
|
712
|
+
def ranks_to_rgb_picture(self, ranks, atmopshere_color=(0, 0, 0)):
|
|
713
|
+
picture = np.zeros((*ranks.shape, 3), dtype=np.uint8)
|
|
714
|
+
pile = self.collect_pile_information()
|
|
715
|
+
rank_color = [atmopshere_color] + [
|
|
716
|
+
[int(col * 255) for col in formation.color] for formation in pile
|
|
717
|
+
]
|
|
718
|
+
assert np.all((ranks >= 0) & (ranks < len(rank_color))), (
|
|
719
|
+
"Inconsistency in domain indexing!"
|
|
720
|
+
)
|
|
721
|
+
for fi, color in enumerate(rank_color):
|
|
722
|
+
picture[ranks == fi] = color
|
|
723
|
+
return picture
|
|
724
|
+
|
|
725
|
+
def is_finite_fault(self, name):
|
|
726
|
+
return name in self.fault_ellipsoids
|
|
727
|
+
|
|
728
|
+
@property
|
|
729
|
+
def has_finite_faults(self):
|
|
730
|
+
return any(name in self.fault_ellipsoids for name in self.faults)
|
|
731
|
+
|
|
732
|
+
def rgb_picture(
|
|
733
|
+
self,
|
|
734
|
+
section,
|
|
735
|
+
width,
|
|
736
|
+
height=None,
|
|
737
|
+
atmopshere_color=(0, 0, 0),
|
|
738
|
+
return_ranks=False,
|
|
739
|
+
):
|
|
740
|
+
if height is None:
|
|
741
|
+
width, height = image_ratio(section, width)
|
|
742
|
+
ranks = np.array([self.rank(p) for p in section.grid(width, height)])
|
|
743
|
+
ranks.shape = width, height
|
|
744
|
+
ranks = np.transpose(ranks)[::-1]
|
|
745
|
+
picture = self.ranks_to_rgb_picture(ranks, atmopshere_color)
|
|
746
|
+
if return_ranks:
|
|
747
|
+
return picture, ranks
|
|
748
|
+
return picture
|
|
749
|
+
|
|
750
|
+
def topography_as_elevation_surface(self):
|
|
751
|
+
topo = self.topography
|
|
752
|
+
if hasattr(topo, "origin"):
|
|
753
|
+
zmap = np.transpose(topo.z)[::-1]
|
|
754
|
+
return ElevationRaster(topo.origin, topo.steps, zmap)
|
|
755
|
+
return ConstantElevationSurface(topo.z)
|
|
756
|
+
|
|
757
|
+
def implicit_topography(self):
|
|
758
|
+
return ImplicitTopography(self.topography_as_elevation_surface())
|