LoopStructural 1.6.7__py3-none-any.whl → 1.6.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of LoopStructural might be problematic. Click here for more details.
- LoopStructural/__init__.py +1 -0
- LoopStructural/datatypes/_bounding_box.py +69 -10
- LoopStructural/datatypes/_point.py +18 -11
- LoopStructural/datatypes/_structured_grid.py +37 -9
- LoopStructural/datatypes/_surface.py +3 -3
- LoopStructural/export/geoh5.py +4 -2
- LoopStructural/interpolators/__init__.py +1 -0
- LoopStructural/interpolators/_discrete_interpolator.py +18 -0
- LoopStructural/interpolators/_finite_difference_interpolator.py +64 -11
- LoopStructural/interpolators/_geological_interpolator.py +9 -0
- LoopStructural/interpolators/_interpolator_builder.py +98 -19
- LoopStructural/interpolators/_interpolator_factory.py +2 -3
- LoopStructural/interpolators/_surfe_wrapper.py +3 -0
- LoopStructural/interpolators/supports/_2d_base_unstructured.py +3 -0
- LoopStructural/interpolators/supports/_2d_structured_grid.py +3 -0
- LoopStructural/interpolators/supports/_3d_base_structured.py +28 -5
- LoopStructural/interpolators/supports/_3d_structured_grid.py +2 -0
- LoopStructural/interpolators/supports/_3d_unstructured_tetra.py +21 -13
- LoopStructural/interpolators/supports/_base_support.py +4 -0
- LoopStructural/interpolators/supports/_support_factory.py +12 -4
- LoopStructural/modelling/core/geological_model.py +5 -6
- LoopStructural/modelling/features/_base_geological_feature.py +9 -3
- LoopStructural/modelling/features/_cross_product_geological_feature.py +1 -2
- LoopStructural/modelling/features/_geological_feature.py +3 -5
- LoopStructural/modelling/features/_lambda_geological_feature.py +11 -1
- LoopStructural/modelling/features/_projected_vector_feature.py +1 -2
- LoopStructural/modelling/features/_unconformity_feature.py +0 -1
- LoopStructural/modelling/features/builders/_base_builder.py +4 -2
- LoopStructural/modelling/features/builders/_geological_feature_builder.py +21 -25
- LoopStructural/modelling/features/builders/_structural_frame_builder.py +9 -4
- LoopStructural/modelling/features/fault/_fault_segment.py +1 -1
- LoopStructural/modelling/features/fold/__init__.py +1 -3
- LoopStructural/modelling/features/fold/fold_function/_base_fold_rotation_angle.py +0 -1
- LoopStructural/modelling/features/fold/fold_function/_lambda_fold_rotation_angle.py +0 -1
- LoopStructural/modelling/features/fold/fold_function/_trigo_fold_rotation_angle.py +0 -1
- LoopStructural/modelling/input/process_data.py +4 -2
- LoopStructural/utils/_surface.py +2 -2
- LoopStructural/utils/_transformation.py +28 -13
- LoopStructural/utils/colours.py +3 -1
- LoopStructural/version.py +1 -1
- {LoopStructural-1.6.7.dist-info → loopstructural-1.6.9.dist-info}/METADATA +7 -6
- {LoopStructural-1.6.7.dist-info → loopstructural-1.6.9.dist-info}/RECORD +45 -45
- {LoopStructural-1.6.7.dist-info → loopstructural-1.6.9.dist-info}/WHEEL +1 -1
- {LoopStructural-1.6.7.dist-info → loopstructural-1.6.9.dist-info/licenses}/LICENSE +0 -0
- {LoopStructural-1.6.7.dist-info → loopstructural-1.6.9.dist-info}/top_level.txt +0 -0
LoopStructural/__init__.py
CHANGED
|
@@ -20,6 +20,7 @@ ch.setLevel(logging.WARNING)
|
|
|
20
20
|
loggers = {}
|
|
21
21
|
from .modelling.core.geological_model import GeologicalModel
|
|
22
22
|
from .interpolators._api import LoopInterpolator
|
|
23
|
+
from .interpolators import InterpolatorBuilder
|
|
23
24
|
from .datatypes import BoundingBox
|
|
24
25
|
from .utils import log_to_console, log_to_file, getLogger, rng, get_levels
|
|
25
26
|
|
|
@@ -43,7 +43,7 @@ class BoundingBox:
|
|
|
43
43
|
if maximum is None and nsteps is not None and step_vector is not None:
|
|
44
44
|
maximum = origin + nsteps * step_vector
|
|
45
45
|
if origin is not None and global_origin is None:
|
|
46
|
-
global_origin =
|
|
46
|
+
global_origin = np.zeros(3)
|
|
47
47
|
self._origin = np.array(origin)
|
|
48
48
|
self._maximum = np.array(maximum)
|
|
49
49
|
self.dimensions = dimensions
|
|
@@ -90,7 +90,7 @@ class BoundingBox:
|
|
|
90
90
|
|
|
91
91
|
@property
|
|
92
92
|
def global_maximum(self):
|
|
93
|
-
return self.maximum
|
|
93
|
+
return self.maximum + self.global_origin
|
|
94
94
|
|
|
95
95
|
@property
|
|
96
96
|
def valid(self):
|
|
@@ -242,6 +242,8 @@ class BoundingBox:
|
|
|
242
242
|
)
|
|
243
243
|
origin = locations.min(axis=0)
|
|
244
244
|
maximum = locations.max(axis=0)
|
|
245
|
+
origin = np.array(origin)
|
|
246
|
+
maximum = np.array(maximum)
|
|
245
247
|
if local_coordinate:
|
|
246
248
|
self.global_origin = origin
|
|
247
249
|
self.origin = np.zeros(3)
|
|
@@ -273,15 +275,50 @@ class BoundingBox:
|
|
|
273
275
|
if self.origin is None or self.maximum is None:
|
|
274
276
|
raise LoopValueError("Cannot create bounding box with buffer, no origin or maximum")
|
|
275
277
|
# local coordinates, rescale into the original bounding boxes global coordinates
|
|
276
|
-
origin = self.origin - buffer * (self.maximum - self.origin)
|
|
277
|
-
maximum = self.maximum + buffer * (self.maximum - self.origin)
|
|
278
|
+
origin = self.origin - buffer * np.max(self.maximum - self.origin)
|
|
279
|
+
maximum = self.maximum + buffer * np.max(self.maximum - self.origin)
|
|
278
280
|
return BoundingBox(
|
|
279
281
|
origin=origin,
|
|
280
282
|
maximum=maximum,
|
|
281
|
-
global_origin=self.global_origin
|
|
283
|
+
global_origin=self.global_origin,
|
|
282
284
|
dimensions=self.dimensions,
|
|
283
285
|
)
|
|
284
286
|
|
|
287
|
+
# def __call__(self, xyz):
|
|
288
|
+
# xyz = np.array(xyz)
|
|
289
|
+
# if len(xyz.shape) == 1:
|
|
290
|
+
# xyz = xyz.reshape((1, -1))
|
|
291
|
+
|
|
292
|
+
# distances = np.maximum(0,
|
|
293
|
+
# np.maximum(self.global_origin+self.origin - xyz,
|
|
294
|
+
# xyz - self.global_maximum))
|
|
295
|
+
# distance = np.linalg.norm(distances, axis=1)
|
|
296
|
+
# distance[self.is_inside(xyz)] = -1
|
|
297
|
+
# return distance
|
|
298
|
+
|
|
299
|
+
def __call__(self, xyz):
|
|
300
|
+
# Calculate center and half-extents of the box
|
|
301
|
+
center = (self.maximum + self.global_origin + self.origin) / 2
|
|
302
|
+
half_extents = (self.maximum - self.global_origin + self.origin) / 2
|
|
303
|
+
|
|
304
|
+
# Calculate the distance from point to center
|
|
305
|
+
offset = np.abs(xyz - center) - half_extents
|
|
306
|
+
|
|
307
|
+
# Inside distance: negative value based on the smallest penetration
|
|
308
|
+
inside_distance = np.min(half_extents - np.abs(xyz - center), axis=1)
|
|
309
|
+
|
|
310
|
+
# Outside distance: length of the positive components of offset
|
|
311
|
+
outside_distance = np.linalg.norm(np.maximum(offset, 0))
|
|
312
|
+
|
|
313
|
+
# If any component of offset is positive, we're outside
|
|
314
|
+
# Otherwise, we're inside and return the negative penetration distance
|
|
315
|
+
distance = np.zeros(xyz.shape[0])
|
|
316
|
+
mask = np.any(offset > 0, axis=1)
|
|
317
|
+
distance[mask] = outside_distance
|
|
318
|
+
distance[~mask] = -inside_distance[~mask]
|
|
319
|
+
return distance
|
|
320
|
+
# return outside_distance if np.any(offset > 0) else -inside_distance
|
|
321
|
+
|
|
285
322
|
def get_value(self, name):
|
|
286
323
|
ix, iy = self.name_map.get(name, (-1, -1))
|
|
287
324
|
if ix == -1 and iy == -1:
|
|
@@ -319,7 +356,7 @@ class BoundingBox:
|
|
|
319
356
|
self,
|
|
320
357
|
nsteps: Optional[Union[list, np.ndarray]] = None,
|
|
321
358
|
shuffle: bool = False,
|
|
322
|
-
order: str = "
|
|
359
|
+
order: str = "F",
|
|
323
360
|
local: bool = True,
|
|
324
361
|
) -> np.ndarray:
|
|
325
362
|
"""Get the grid of points from the bounding box
|
|
@@ -361,8 +398,8 @@ class BoundingBox:
|
|
|
361
398
|
rng.shuffle(locs)
|
|
362
399
|
return locs
|
|
363
400
|
|
|
364
|
-
def
|
|
365
|
-
"""Get the cell
|
|
401
|
+
def cell_centres(self, order: str = "F") -> np.ndarray:
|
|
402
|
+
"""Get the cell centres of a regular grid
|
|
366
403
|
|
|
367
404
|
Parameters
|
|
368
405
|
----------
|
|
@@ -372,7 +409,7 @@ class BoundingBox:
|
|
|
372
409
|
Returns
|
|
373
410
|
-------
|
|
374
411
|
np.ndarray
|
|
375
|
-
array of cell
|
|
412
|
+
array of cell centres
|
|
376
413
|
"""
|
|
377
414
|
locs = self.regular_grid(order=order, nsteps=self.nsteps - 1)
|
|
378
415
|
|
|
@@ -392,6 +429,25 @@ class BoundingBox:
|
|
|
392
429
|
"maximum": self.maximum.tolist(),
|
|
393
430
|
"nsteps": self.nsteps.tolist(),
|
|
394
431
|
}
|
|
432
|
+
@classmethod
|
|
433
|
+
def from_dict(cls, data: dict) -> 'BoundingBox':
|
|
434
|
+
"""Create a bounding box from a dictionary
|
|
435
|
+
|
|
436
|
+
Parameters
|
|
437
|
+
----------
|
|
438
|
+
data : dict
|
|
439
|
+
dictionary with origin, maximum and nsteps
|
|
440
|
+
|
|
441
|
+
Returns
|
|
442
|
+
-------
|
|
443
|
+
BoundingBox
|
|
444
|
+
bounding box object
|
|
445
|
+
"""
|
|
446
|
+
return cls(
|
|
447
|
+
origin=np.array(data["origin"]),
|
|
448
|
+
maximum=np.array(data["maximum"]),
|
|
449
|
+
nsteps=np.array(data["nsteps"]),
|
|
450
|
+
)
|
|
395
451
|
|
|
396
452
|
def vtk(self):
|
|
397
453
|
"""Export the model as a pyvista RectilinearGrid
|
|
@@ -434,7 +490,7 @@ class BoundingBox:
|
|
|
434
490
|
_cell_data = copy.deepcopy(cell_data)
|
|
435
491
|
_vertex_data = copy.deepcopy(vertex_data)
|
|
436
492
|
return StructuredGrid(
|
|
437
|
-
origin=self.global_origin,
|
|
493
|
+
origin=self.global_origin + self.origin,
|
|
438
494
|
step_vector=self.step_vector,
|
|
439
495
|
nsteps=self.nsteps,
|
|
440
496
|
cell_properties=_cell_data,
|
|
@@ -460,6 +516,9 @@ class BoundingBox:
|
|
|
460
516
|
(self.global_maximum - self.global_origin)
|
|
461
517
|
) # np.clip(xyz, self.origin, self.maximum)
|
|
462
518
|
|
|
519
|
+
def scale_by_projection_factor(self, value):
|
|
520
|
+
return value / np.max((self.global_maximum - self.global_origin))
|
|
521
|
+
|
|
463
522
|
def reproject(self, xyz):
|
|
464
523
|
"""Reproject a point from the bounding box to the global space
|
|
465
524
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from dataclasses import dataclass
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
2
|
import numpy as np
|
|
3
3
|
|
|
4
4
|
from typing import Optional, Union
|
|
@@ -10,9 +10,9 @@ logger = getLogger(__name__)
|
|
|
10
10
|
|
|
11
11
|
@dataclass
|
|
12
12
|
class ValuePoints:
|
|
13
|
-
locations: np.ndarray
|
|
14
|
-
values: np.ndarray
|
|
15
|
-
name: str
|
|
13
|
+
locations: np.ndarray = field(default_factory=lambda: np.array([[0, 0, 0]]))
|
|
14
|
+
values: np.ndarray = field(default_factory=lambda: np.array([0]))
|
|
15
|
+
name: str = "unnamed"
|
|
16
16
|
properties: Optional[dict] = None
|
|
17
17
|
|
|
18
18
|
def to_dict(self):
|
|
@@ -108,9 +108,9 @@ class ValuePoints:
|
|
|
108
108
|
|
|
109
109
|
@dataclass
|
|
110
110
|
class VectorPoints:
|
|
111
|
-
locations: np.ndarray
|
|
112
|
-
vectors: np.ndarray
|
|
113
|
-
name: str
|
|
111
|
+
locations: np.ndarray = field(default_factory=lambda: np.array([[0, 0, 0]]))
|
|
112
|
+
vectors: np.ndarray = field(default_factory=lambda: np.array([[0, 0, 0]]))
|
|
113
|
+
name: str = "unnamed"
|
|
114
114
|
properties: Optional[dict] = None
|
|
115
115
|
|
|
116
116
|
def to_dict(self):
|
|
@@ -129,9 +129,9 @@ class VectorPoints:
|
|
|
129
129
|
def vtk(
|
|
130
130
|
self,
|
|
131
131
|
geom='arrow',
|
|
132
|
-
scale=0
|
|
132
|
+
scale=1.0,
|
|
133
133
|
scale_function=None,
|
|
134
|
-
normalise=
|
|
134
|
+
normalise=False,
|
|
135
135
|
tolerance=0.05,
|
|
136
136
|
bb=None,
|
|
137
137
|
scalars=None,
|
|
@@ -140,9 +140,15 @@ class VectorPoints:
|
|
|
140
140
|
|
|
141
141
|
_projected = False
|
|
142
142
|
vectors = np.copy(self.vectors)
|
|
143
|
+
|
|
143
144
|
if normalise:
|
|
144
145
|
norm = np.linalg.norm(vectors, axis=1)
|
|
145
146
|
vectors[norm > 0, :] /= norm[norm > 0][:, None]
|
|
147
|
+
else:
|
|
148
|
+
norm = np.linalg.norm(vectors, axis=1)
|
|
149
|
+
vectors[norm > 0, :] /= norm[norm > 0][:, None]
|
|
150
|
+
norm = norm[norm > 0] / norm[norm > 0].max()
|
|
151
|
+
vectors *= norm[:, None]
|
|
146
152
|
if scale_function is not None:
|
|
147
153
|
# vectors /= np.linalg.norm(vectors, axis=1)[:, None]
|
|
148
154
|
vectors *= scale_function(self.locations)[:, None]
|
|
@@ -151,6 +157,7 @@ class VectorPoints:
|
|
|
151
157
|
try:
|
|
152
158
|
locations = bb.project(locations)
|
|
153
159
|
_projected = True
|
|
160
|
+
scale = bb.scale_by_projection_factor(scale)
|
|
154
161
|
except Exception as e:
|
|
155
162
|
logger.error(f'Failed to project points to bounding box: {e}')
|
|
156
163
|
logger.error('Using unprojected points, this may cause issues with the glyphing')
|
|
@@ -161,10 +168,10 @@ class VectorPoints:
|
|
|
161
168
|
if geom == 'arrow':
|
|
162
169
|
geom = pv.Arrow(scale=scale)
|
|
163
170
|
elif geom == 'disc':
|
|
164
|
-
geom = pv.Disc(inner=0, outer=scale).rotate_y(90)
|
|
171
|
+
geom = pv.Disc(inner=0, outer=scale * 0.5, c_res=50).rotate_y(90)
|
|
165
172
|
|
|
166
173
|
# Perform the glyph
|
|
167
|
-
glyphed = points.glyph(orient="vectors", geom=geom, tolerance=tolerance
|
|
174
|
+
glyphed = points.glyph(orient="vectors", geom=geom, tolerance=tolerance)
|
|
168
175
|
if _projected:
|
|
169
176
|
glyphed.points = bb.reproject(glyphed.points)
|
|
170
177
|
return glyphed
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from typing import Dict
|
|
2
2
|
import numpy as np
|
|
3
|
-
from dataclasses import dataclass
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
4
|
from LoopStructural.utils import getLogger
|
|
5
5
|
|
|
6
6
|
logger = getLogger(__name__)
|
|
@@ -8,12 +8,12 @@ logger = getLogger(__name__)
|
|
|
8
8
|
|
|
9
9
|
@dataclass
|
|
10
10
|
class StructuredGrid:
|
|
11
|
-
origin: np.ndarray
|
|
12
|
-
step_vector: np.ndarray
|
|
13
|
-
nsteps: np.ndarray
|
|
14
|
-
cell_properties: Dict[str, np.ndarray]
|
|
15
|
-
properties: Dict[str, np.ndarray]
|
|
16
|
-
name: str
|
|
11
|
+
origin: np.ndarray = field(default_factory=lambda: np.array([0, 0, 0]))
|
|
12
|
+
step_vector: np.ndarray = field(default_factory=lambda: np.array([1, 1, 1]))
|
|
13
|
+
nsteps: np.ndarray = field(default_factory=lambda: np.array([10, 10, 10]))
|
|
14
|
+
cell_properties: Dict[str, np.ndarray] = field(default_factory=dict)
|
|
15
|
+
properties: Dict[str, np.ndarray] = field(default_factory=dict)
|
|
16
|
+
name: str = "default_grid"
|
|
17
17
|
|
|
18
18
|
def to_dict(self):
|
|
19
19
|
return {
|
|
@@ -44,9 +44,9 @@ class StructuredGrid:
|
|
|
44
44
|
z,
|
|
45
45
|
)
|
|
46
46
|
for name, data in self.properties.items():
|
|
47
|
-
grid[name] = data.
|
|
47
|
+
grid[name] = data.reshape((grid.n_points, -1), order="F")
|
|
48
48
|
for name, data in self.cell_properties.items():
|
|
49
|
-
grid.cell_data[name] = data.
|
|
49
|
+
grid.cell_data[name] = data.reshape((grid.n_cells, -1), order="F")
|
|
50
50
|
return grid
|
|
51
51
|
|
|
52
52
|
def plot(self, pyvista_kwargs={}):
|
|
@@ -63,6 +63,34 @@ class StructuredGrid:
|
|
|
63
63
|
except ImportError:
|
|
64
64
|
logger.error("pyvista is required for vtk")
|
|
65
65
|
|
|
66
|
+
@property
|
|
67
|
+
def cell_centres(self):
|
|
68
|
+
x = np.linspace(
|
|
69
|
+
self.origin[0] + self.step_vector[0] * 0.5,
|
|
70
|
+
self.maximum[0] + self.step_vector[0] * 0.5,
|
|
71
|
+
self.nsteps[0] - 1,
|
|
72
|
+
)
|
|
73
|
+
y = np.linspace(
|
|
74
|
+
self.origin[1] + self.step_vector[1] * 0.5,
|
|
75
|
+
self.maximum[1] - self.step_vector[1] * 0.5,
|
|
76
|
+
self.nsteps[1] - 1,
|
|
77
|
+
)
|
|
78
|
+
z = np.linspace(
|
|
79
|
+
self.origin[2] + self.step_vector[2] * 0.5,
|
|
80
|
+
self.maximum[2] - self.step_vector[2] * 0.5,
|
|
81
|
+
self.nsteps[2] - 1,
|
|
82
|
+
)
|
|
83
|
+
x, y, z = np.meshgrid(x, y, z, indexing="ij")
|
|
84
|
+
return np.vstack([x.flatten(order='f'), y.flatten(order='f'), z.flatten(order='f')]).T
|
|
85
|
+
|
|
86
|
+
@property
|
|
87
|
+
def nodes(self):
|
|
88
|
+
x = np.linspace(self.origin[0], self.maximum[0], self.nsteps[0])
|
|
89
|
+
y = np.linspace(self.origin[1], self.maximum[1], self.nsteps[1])
|
|
90
|
+
z = np.linspace(self.origin[2], self.maximum[2], self.nsteps[2])
|
|
91
|
+
x, y, z = np.meshgrid(x, y, z, indexing="ij")
|
|
92
|
+
return np.vstack([x.flatten(order='f'), y.flatten(order='f'), z.flatten(order='f')]).T
|
|
93
|
+
|
|
66
94
|
def merge(self, other):
|
|
67
95
|
if not np.all(np.isclose(self.origin, other.origin)):
|
|
68
96
|
raise ValueError("Origin of grids must be the same")
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from dataclasses import dataclass
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
2
|
from typing import Optional
|
|
3
3
|
import numpy as np
|
|
4
4
|
import io
|
|
@@ -9,8 +9,8 @@ logger = getLogger(__name__)
|
|
|
9
9
|
|
|
10
10
|
@dataclass
|
|
11
11
|
class Surface:
|
|
12
|
-
vertices: np.ndarray
|
|
13
|
-
triangles: np.ndarray
|
|
12
|
+
vertices: np.ndarray = field(default_factory=lambda: np.array([[0, 0, 0]]))
|
|
13
|
+
triangles: np.ndarray = field(default_factory=lambda: np.array([[0, 0, 0]]))
|
|
14
14
|
normals: Optional[np.ndarray] = None
|
|
15
15
|
name: str = 'surface'
|
|
16
16
|
values: Optional[np.ndarray] = None
|
LoopStructural/export/geoh5.py
CHANGED
|
@@ -78,8 +78,10 @@ def add_structured_grid_to_geoh5(filename, structured_grid, overwrite=True, grou
|
|
|
78
78
|
if structured_grid.cell_properties is not None:
|
|
79
79
|
for k, v in structured_grid.cell_properties.items():
|
|
80
80
|
data[k] = {
|
|
81
|
-
|
|
82
|
-
"values": np.
|
|
81
|
+
"association": "CELL",
|
|
82
|
+
"values": np.flipud(
|
|
83
|
+
np.rot90(v.reshape(structured_grid.nsteps - 1, order="F"), 1)
|
|
84
|
+
).flatten(),
|
|
83
85
|
}
|
|
84
86
|
block = geoh5py.objects.BlockModel.create(
|
|
85
87
|
workspace,
|
|
@@ -63,6 +63,20 @@ class DiscreteInterpolator(GeologicalInterpolator):
|
|
|
63
63
|
logger.info("Creating discrete interpolator with {} degrees of freedom".format(self.nx))
|
|
64
64
|
self.type = InterpolatorType.BASE_DISCRETE
|
|
65
65
|
|
|
66
|
+
def set_nelements(self, nelements: int) -> int:
|
|
67
|
+
return self.support.set_nelements(nelements)
|
|
68
|
+
|
|
69
|
+
@property
|
|
70
|
+
def n_elements(self) -> int:
|
|
71
|
+
"""Number of elements in the interpolator
|
|
72
|
+
|
|
73
|
+
Returns
|
|
74
|
+
-------
|
|
75
|
+
int
|
|
76
|
+
number of elements, positive
|
|
77
|
+
"""
|
|
78
|
+
return self.support.n_elements
|
|
79
|
+
|
|
66
80
|
@property
|
|
67
81
|
def nx(self) -> int:
|
|
68
82
|
"""Number of degrees of freedom for the interpolator
|
|
@@ -161,6 +175,7 @@ class DiscreteInterpolator(GeologicalInterpolator):
|
|
|
161
175
|
"""
|
|
162
176
|
self.constraints = {}
|
|
163
177
|
self.c_ = 0
|
|
178
|
+
self.regularisation_scale = np.ones(self.nx)
|
|
164
179
|
logger.info("Resetting interpolation constraints")
|
|
165
180
|
|
|
166
181
|
def add_constraints_to_least_squares(self, A, B, idc, w=1.0, name="undefined"):
|
|
@@ -737,3 +752,6 @@ class DiscreteInterpolator(GeologicalInterpolator):
|
|
|
737
752
|
**super().to_dict(),
|
|
738
753
|
# 'region_function':self.region_function,
|
|
739
754
|
}
|
|
755
|
+
|
|
756
|
+
def vtk(self):
|
|
757
|
+
return self.support.vtk({'c': self.c})
|
|
@@ -7,12 +7,37 @@ import numpy as np
|
|
|
7
7
|
from ..utils import get_vectors
|
|
8
8
|
from ._discrete_interpolator import DiscreteInterpolator
|
|
9
9
|
from ..interpolators import InterpolatorType
|
|
10
|
-
|
|
10
|
+
from scipy.spatial import KDTree
|
|
11
11
|
from LoopStructural.utils import getLogger
|
|
12
12
|
|
|
13
13
|
logger = getLogger(__name__)
|
|
14
14
|
|
|
15
15
|
|
|
16
|
+
def compute_weighting(grid_points, gradient_constraint_points, alpha=10.0, sigma=1.0):
|
|
17
|
+
"""
|
|
18
|
+
Compute weights for second derivative regularization based on proximity to gradient constraints.
|
|
19
|
+
|
|
20
|
+
Parameters:
|
|
21
|
+
grid_points (ndarray): (N, 3) array of 3D coordinates for grid cells.
|
|
22
|
+
gradient_constraint_points (ndarray): (M, 3) array of 3D coordinates for gradient constraints.
|
|
23
|
+
alpha (float): Strength of weighting increase.
|
|
24
|
+
sigma (float): Decay parameter for Gaussian-like influence.
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
weights (ndarray): (N,) array of weights for each grid point.
|
|
28
|
+
"""
|
|
29
|
+
# Build a KDTree with the gradient constraint locations
|
|
30
|
+
tree = KDTree(gradient_constraint_points)
|
|
31
|
+
|
|
32
|
+
# Find the distance from each grid point to the nearest gradient constraint
|
|
33
|
+
distances, _ = tree.query(grid_points, k=1)
|
|
34
|
+
|
|
35
|
+
# Compute weighting function (higher weight for nearby points)
|
|
36
|
+
weights = 1 + alpha * np.exp(-(distances**2) / (2 * sigma**2))
|
|
37
|
+
|
|
38
|
+
return weights
|
|
39
|
+
|
|
40
|
+
|
|
16
41
|
class FiniteDifferenceInterpolator(DiscreteInterpolator):
|
|
17
42
|
def __init__(self, grid, data={}):
|
|
18
43
|
"""
|
|
@@ -44,6 +69,7 @@ class FiniteDifferenceInterpolator(DiscreteInterpolator):
|
|
|
44
69
|
)
|
|
45
70
|
|
|
46
71
|
self.type = InterpolatorType.FINITE_DIFFERENCE
|
|
72
|
+
self.use_regularisation_weight_scale = False
|
|
47
73
|
|
|
48
74
|
def setup_interpolator(self, **kwargs):
|
|
49
75
|
"""
|
|
@@ -76,20 +102,19 @@ class FiniteDifferenceInterpolator(DiscreteInterpolator):
|
|
|
76
102
|
for key in kwargs:
|
|
77
103
|
self.up_to_date = False
|
|
78
104
|
if "regularisation" in kwargs:
|
|
79
|
-
self.interpolation_weights["dxy"] =
|
|
80
|
-
self.interpolation_weights["dyz"] =
|
|
81
|
-
self.interpolation_weights["dxz"] =
|
|
82
|
-
self.interpolation_weights["dxx"] =
|
|
83
|
-
self.interpolation_weights["dyy"] =
|
|
84
|
-
self.interpolation_weights["dzz"] =
|
|
105
|
+
self.interpolation_weights["dxy"] = kwargs["regularisation"]
|
|
106
|
+
self.interpolation_weights["dyz"] = kwargs["regularisation"]
|
|
107
|
+
self.interpolation_weights["dxz"] = kwargs["regularisation"]
|
|
108
|
+
self.interpolation_weights["dxx"] = kwargs["regularisation"]
|
|
109
|
+
self.interpolation_weights["dyy"] = kwargs["regularisation"]
|
|
110
|
+
self.interpolation_weights["dzz"] = kwargs["regularisation"]
|
|
85
111
|
self.interpolation_weights[key] = kwargs[key]
|
|
86
112
|
# either use the default operators or the ones passed to the function
|
|
87
113
|
operators = kwargs.get(
|
|
88
114
|
"operators", self.support.get_operators(weights=self.interpolation_weights)
|
|
89
115
|
)
|
|
90
|
-
for k, o in operators.items():
|
|
91
|
-
self.assemble_inner(o[0], o[1], name=k)
|
|
92
116
|
|
|
117
|
+
self.use_regularisation_weight_scale = kwargs.get('use_regularisation_weight_scale', False)
|
|
93
118
|
self.add_norm_constraints(self.interpolation_weights["npw"])
|
|
94
119
|
self.add_gradient_constraints(self.interpolation_weights["gpw"])
|
|
95
120
|
self.add_value_constraints(self.interpolation_weights["cpw"])
|
|
@@ -101,6 +126,8 @@ class FiniteDifferenceInterpolator(DiscreteInterpolator):
|
|
|
101
126
|
upper_bound=kwargs.get('inequality_pair_upper_bound', np.finfo(float).eps),
|
|
102
127
|
lower_bound=kwargs.get('inequality_pair_lower_bound', -np.inf),
|
|
103
128
|
)
|
|
129
|
+
for k, o in operators.items():
|
|
130
|
+
self.assemble_inner(o[0], o[1], name=k)
|
|
104
131
|
|
|
105
132
|
def copy(self):
|
|
106
133
|
"""
|
|
@@ -271,6 +298,11 @@ class FiniteDifferenceInterpolator(DiscreteInterpolator):
|
|
|
271
298
|
self.add_constraints_to_least_squares(A, B, idc[inside, :], w=w, name="gradient")
|
|
272
299
|
A = np.einsum("ij,ijk->ik", dip_vector.T, T)
|
|
273
300
|
self.add_constraints_to_least_squares(A, B, idc[inside, :], w=w, name="gradient")
|
|
301
|
+
# self.regularisation_scale += compute_weighting(
|
|
302
|
+
# self.support.nodes,
|
|
303
|
+
# points[inside, : self.support.dimension],
|
|
304
|
+
# sigma=self.support.nsteps[0] * 10,
|
|
305
|
+
# )
|
|
274
306
|
if np.sum(inside) <= 0:
|
|
275
307
|
logger.warning(
|
|
276
308
|
f" {np.sum(~inside)} \
|
|
@@ -318,7 +350,24 @@ class FiniteDifferenceInterpolator(DiscreteInterpolator):
|
|
|
318
350
|
)
|
|
319
351
|
# T*=np.product(self.support.step_vector)
|
|
320
352
|
# T/=self.support.step_vector[0]
|
|
321
|
-
|
|
353
|
+
# indexes, inside2 = self.support.position_to_nearby_cell_indexes(
|
|
354
|
+
# points[inside, : self.support.dimension]
|
|
355
|
+
# )
|
|
356
|
+
# indexes = indexes[inside2, :]
|
|
357
|
+
|
|
358
|
+
# corners = self.support.cell_corner_indexes(indexes)
|
|
359
|
+
# node_indexes = corners.reshape(-1, 3)
|
|
360
|
+
# indexes = self.support.global_node_indices(indexes)
|
|
361
|
+
# self.regularisation_scale[indexes] =10
|
|
362
|
+
|
|
363
|
+
self.regularisation_scale += compute_weighting(
|
|
364
|
+
self.support.nodes,
|
|
365
|
+
points[inside, : self.support.dimension],
|
|
366
|
+
sigma=self.support.nsteps[0] * 10,
|
|
367
|
+
)
|
|
368
|
+
# global_indexes = self.support.neighbour_global_indexes().T.astype(int)
|
|
369
|
+
# close_indexes =
|
|
370
|
+
# self.regularisation_scale[global_indexes[idc[inside,:].astype(int),]]=10
|
|
322
371
|
w /= 3
|
|
323
372
|
for d in range(self.support.dimension):
|
|
324
373
|
|
|
@@ -454,7 +503,11 @@ class FiniteDifferenceInterpolator(DiscreteInterpolator):
|
|
|
454
503
|
a[inside, :],
|
|
455
504
|
B[inside],
|
|
456
505
|
idc[inside, :],
|
|
457
|
-
w=
|
|
506
|
+
w=(
|
|
507
|
+
self.regularisation_scale[idc[inside, 13].astype(int)] * w
|
|
508
|
+
if self.use_regularisation_weight_scale
|
|
509
|
+
else w
|
|
510
|
+
),
|
|
458
511
|
name=name,
|
|
459
512
|
)
|
|
460
513
|
return
|
|
@@ -46,6 +46,15 @@ class GeologicalInterpolator(metaclass=ABCMeta):
|
|
|
46
46
|
self.dimensions = 3 # default to 3d
|
|
47
47
|
self.support = None
|
|
48
48
|
|
|
49
|
+
@abstractmethod
|
|
50
|
+
def set_nelements(self, nelements: int) -> int:
|
|
51
|
+
pass
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
@abstractmethod
|
|
55
|
+
def n_elements(self) -> int:
|
|
56
|
+
pass
|
|
57
|
+
|
|
49
58
|
@property
|
|
50
59
|
def data(self):
|
|
51
60
|
return self._data
|