LoopStructural 1.6.12__py3-none-any.whl → 1.6.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of LoopStructural might be problematic. Click here for more details.

@@ -281,6 +281,7 @@ class BoundingBox:
281
281
  origin=origin,
282
282
  maximum=maximum,
283
283
  global_origin=self.global_origin,
284
+ nsteps=self.nsteps,
284
285
  dimensions=self.dimensions,
285
286
  )
286
287
 
@@ -387,7 +388,7 @@ class BoundingBox:
387
388
 
388
389
  if not local:
389
390
  coordinates = [
390
- np.linspace(self.global_origin[i], self.global_maximum[i], nsteps[i])
391
+ np.linspace(self.global_origin[i]+self.origin[i], self.global_maximum[i], nsteps[i])
391
392
  for i in range(self.dimensions)
392
393
  ]
393
394
  coordinate_grid = np.meshgrid(*coordinates, indexing="ij")
@@ -83,8 +83,16 @@ from ..interpolators._p1interpolator import P1Interpolator
83
83
  try:
84
84
  from ..interpolators._surfe_wrapper import SurfeRBFInterpolator
85
85
  except ImportError:
86
- logger.warning("Surfe is not installed, SurfeRBFInterpolator will not be available")
87
- SurfeRBFInterpolator = None
86
+ class SurfeRBFInterpolator(GeologicalInterpolator):
87
+ """
88
+ Dummy class to handle the case where Surfe is not installed.
89
+ This will raise a warning when used.
90
+ """
91
+
92
+ def __init__(self, *args, **kwargs):
93
+ raise ImportError(
94
+ "Surfe cannot be imported. Please install Surfe. pip install surfe/ conda install -c loop3d surfe"
95
+ )
88
96
  interpolator_map = {
89
97
  InterpolatorType.BASE: GeologicalInterpolator,
90
98
  InterpolatorType.BASE_DISCRETE: DiscreteInterpolator,
@@ -454,22 +454,28 @@ class FiniteDifferenceInterpolator(DiscreteInterpolator):
454
454
  )
455
455
  self.up_to_date = False
456
456
 
457
- def add_regularisation(self, operator, w=0.1):
458
- """
459
457
 
460
- Parameters
461
- ----------
462
- operator
463
- w
464
458
 
465
- Returns
466
- -------
459
+ # def assemble_borders(self, operator, w, name='regularisation'):
460
+ # """
461
+ # Adds a constraint to the border of the model to force the value to be equal to the value at the border
467
462
 
468
- """
469
- self.assemble_inner(operator, w)
470
- # self.assemble_borders()
463
+ # Parameters
464
+ # ----------
465
+ # operator : Operator
466
+ # operator to use for the regularisation
467
+ # w : double
468
+ # weight of the regularisation
469
+
470
+ # Returns
471
+ # -------
472
+
473
+ # """
474
+ # # First get the global indicies of the pairs of neighbours this should be an
475
+ # # Nx27 array for 3d and an Nx9 array for 2d
476
+
477
+ # global_indexes = self.support.neighbour_global_indexes()
471
478
 
472
- # def assemble_borders(self, operator, w):
473
479
 
474
480
  def assemble_inner(self, operator, w, name='regularisation'):
475
481
  """
@@ -146,7 +146,7 @@ class P1Interpolator(DiscreteInterpolator):
146
146
  self.reset()
147
147
  for key in kwargs:
148
148
  if "regularisation" in kwargs:
149
- self.interpolation_weights["cgw"] = 0.1 * kwargs["regularisation"]
149
+ self.interpolation_weights["cgw"] = kwargs["regularisation"]
150
150
  self.up_to_date = False
151
151
  self.interpolation_weights[key] = kwargs[key]
152
152
  if self.interpolation_weights["cgw"] > 0.0:
@@ -437,8 +437,9 @@ class StructuredGrid(BaseStructuredSupport):
437
437
  T[:, 2, 6] = (1 - local_coords[:, 0]) * local_coords[:, 1]
438
438
  T[:, 2, 3] = -local_coords[:, 0] * local_coords[:, 1]
439
439
  T[:, 2, 7] = local_coords[:, 0] * local_coords[:, 1]
440
- T /= self.step_vector[0]
441
-
440
+ T[:, 0, :] /= self.step_vector[None, 0]
441
+ T[:, 1, :] /= self.step_vector[None, 1]
442
+ T[:, 2, :] /= self.step_vector[None, 2]
442
443
  return vertices, T, elements, inside
443
444
 
444
445
  def get_element_for_location(self, pos: np.ndarray):
@@ -22,6 +22,14 @@ from ..modelling.input import (
22
22
  try:
23
23
  from ..modelling.input.project_file import LoopProjectfileProcessor
24
24
  except (LoopImportError, ImportError):
25
- logger.warning(
26
- "Cannot use LoopProjectfileProcessor: Loop project file cannot be imported, try installing LoopProjectFile"
27
- )
25
+ class LoopProjectfileProcessor(ProcessInputData):
26
+ """
27
+ Dummy class to handle the case where LoopProjectFile is not installed.
28
+ This will raise a warning when used.
29
+ """
30
+
31
+ def __init__(self, *args, **kwargs):
32
+ raise LoopImportError(
33
+ "LoopProjectFile cannot be imported. Please install LoopProjectFile."
34
+ )
35
+
@@ -229,7 +229,7 @@ class BaseFeature(metaclass=ABCMeta):
229
229
  """
230
230
  Evaluate the gradient of the feature at a given position.
231
231
  """
232
-
232
+
233
233
  raise NotImplementedError
234
234
 
235
235
  def min(self):
@@ -1,12 +1,13 @@
1
1
  """
2
2
  Geological features
3
3
  """
4
-
4
+ from LoopStructural.utils.maths import regular_tetraherdron_for_points, gradient_from_tetrahedron
5
5
  from ...modelling.features import BaseFeature
6
6
  from ...utils import getLogger
7
7
  from ...modelling.features import FeatureType
8
8
  import numpy as np
9
9
  from typing import Callable, Optional
10
+ from ...utils import LoopValueError
10
11
 
11
12
  logger = getLogger(__name__)
12
13
 
@@ -18,8 +19,8 @@ class LambdaGeologicalFeature(BaseFeature):
18
19
  name: str = "unnamed_lambda",
19
20
  gradient_function: Optional[Callable[[np.ndarray], np.ndarray]] = None,
20
21
  model=None,
21
- regions: list = [],
22
- faults: list = [],
22
+ regions: Optional[list] = None,
23
+ faults: Optional[list] = None,
23
24
  builder=None,
24
25
  ):
25
26
  """A lambda geological feature is a wrapper for a geological
@@ -43,10 +44,11 @@ class LambdaGeologicalFeature(BaseFeature):
43
44
  builder : _type_, optional
44
45
  _description_, by default None
45
46
  """
46
- BaseFeature.__init__(self, name, model, faults, regions, builder)
47
+ BaseFeature.__init__(self, name, model, faults if faults is not None else [], regions if regions is not None else [], builder)
47
48
  self.type = FeatureType.LAMBDA
48
49
  self.function = function
49
50
  self.gradient_function = gradient_function
51
+ self.regions = regions if regions is not None else []
50
52
 
51
53
  def evaluate_value(self, pos: np.ndarray, ignore_regions=False) -> np.ndarray:
52
54
  """_summary_
@@ -62,13 +64,16 @@ class LambdaGeologicalFeature(BaseFeature):
62
64
  _description_
63
65
  """
64
66
  v = np.zeros((pos.shape[0]))
65
- if self.function is None:
66
- v[:] = np.nan
67
- else:
68
- v[:] = self.function(pos)
67
+ v[:] = np.nan
68
+
69
+ mask = self._calculate_mask(pos, ignore_regions=ignore_regions)
70
+ pos = self._apply_faults(pos)
71
+ if self.function is not None:
72
+
73
+ v[mask] = self.function(pos[mask,:])
69
74
  return v
70
75
 
71
- def evaluate_gradient(self, pos: np.ndarray, ignore_regions=False) -> np.ndarray:
76
+ def evaluate_gradient(self, pos: np.ndarray, ignore_regions=False,element_scale_parameter=None) -> np.ndarray:
72
77
  """_summary_
73
78
 
74
79
  Parameters
@@ -81,7 +86,60 @@ class LambdaGeologicalFeature(BaseFeature):
81
86
  np.ndarray
82
87
  _description_
83
88
  """
89
+ if pos.shape[1] != 3:
90
+ raise LoopValueError("Need Nx3 array of xyz points to evaluate gradient")
91
+ logger.info(f'Calculating gradient for {self.name}')
92
+ if element_scale_parameter is None:
93
+ if self.model is not None:
94
+ element_scale_parameter = np.min(self.model.bounding_box.step_vector) / 10
95
+ else:
96
+ element_scale_parameter = 1
97
+ else:
98
+ try:
99
+ element_scale_parameter = float(element_scale_parameter)
100
+ except ValueError:
101
+ logger.error("element_scale_parameter must be a float")
102
+ element_scale_parameter = 1
84
103
  v = np.zeros((pos.shape[0], 3))
104
+ v = np.zeros(pos.shape)
105
+ v[:] = np.nan
106
+ mask = self._calculate_mask(pos, ignore_regions=ignore_regions)
107
+ # evaluate the faults on the nodes of the faulted feature support
108
+ # then evaluate the gradient at these points
109
+ if len(self.faults) > 0:
110
+ # generate a regular tetrahedron for each point
111
+ # we will then move these points by the fault and then recalculate the gradient.
112
+ # this should work...
113
+ resolved = False
114
+ tetrahedron = regular_tetraherdron_for_points(pos, element_scale_parameter)
115
+
116
+ while resolved:
117
+ for f in self.faults:
118
+ v = (
119
+ f[0]
120
+ .evaluate_value(tetrahedron.reshape(-1, 3), fillnan='nearest')
121
+ .reshape(tetrahedron.shape[0], 4)
122
+ )
123
+ flag = np.logical_or(np.all(v > 0, axis=1), np.all(v < 0, axis=1))
124
+ if np.any(~flag):
125
+ logger.warning(
126
+ f"Points are too close to fault {f[0].name}. Refining the tetrahedron"
127
+ )
128
+ element_scale_parameter *= 0.5
129
+ tetrahedron = regular_tetraherdron_for_points(pos, element_scale_parameter)
130
+
131
+ resolved = True
132
+
133
+ tetrahedron_faulted = self._apply_faults(np.array(tetrahedron.reshape(-1, 3))).reshape(
134
+ tetrahedron.shape
135
+ )
136
+
137
+ values = self.function(tetrahedron_faulted.reshape(-1, 3)).reshape(
138
+ (-1, 4)
139
+ )
140
+ v[mask, :] = gradient_from_tetrahedron(tetrahedron[mask, :, :], values[mask])
141
+
142
+ return v
85
143
  if self.gradient_function is None:
86
144
  v[:, :] = np.nan
87
145
  else:
@@ -3,7 +3,6 @@ from typing import Union
3
3
  from LoopStructural.utils.maths import rotation
4
4
  from ._structural_frame_builder import StructuralFrameBuilder
5
5
  from .. import AnalyticalGeologicalFeature
6
- from LoopStructural.utils import get_vectors
7
6
  import numpy as np
8
7
  import pandas as pd
9
8
  from ....utils import getLogger
@@ -52,8 +51,11 @@ class FaultBuilder(StructuralFrameBuilder):
52
51
  self.frame.model = model
53
52
  self.origin = np.array([np.nan, np.nan, np.nan])
54
53
  self.maximum = np.array([np.nan, np.nan, np.nan]) # self.model.bounding_box[1, :]
54
+
55
+ if bounding_box is None:
56
+ raise ValueError("BoundingBox cannot be None")
57
+
55
58
  # define a maximum area to mesh adding buffer to model
56
- # buffer = .2
57
59
  self.minimum_origin = bounding_box.with_buffer(fault_bounding_box_buffer).origin
58
60
  self.maximum_maximum = bounding_box.with_buffer(fault_bounding_box_buffer).maximum
59
61
 
@@ -66,8 +68,23 @@ class FaultBuilder(StructuralFrameBuilder):
66
68
  self.fault_centre = None
67
69
 
68
70
  def update_geometry(self, points):
71
+ """
72
+ Update the geometry of the fault by adjusting the origin and maximum bounds
73
+ based on the provided points.
74
+
75
+ Parameters
76
+ ----------
77
+ points : numpy.ndarray
78
+ Array of points used to update the fault geometry.
79
+ """
80
+ if self.origin is None or self.maximum is None:
81
+ raise ValueError("Origin and maximum must be initialized before updating geometry.")
82
+
69
83
  self.origin = np.nanmin(np.array([np.min(points, axis=0), self.origin]), axis=0)
70
84
  self.maximum = np.nanmax(np.array([np.max(points, axis=0), self.maximum]), axis=0)
85
+ # add a small buffer 10% of current length to the origin and maximum
86
+ self.origin = self.origin - 0.1 * (self.maximum - self.origin)
87
+ self.maximum = self.maximum + 0.1 * (self.maximum - self.origin)
71
88
  self.origin[self.origin < self.minimum_origin] = self.minimum_origin[
72
89
  self.origin < self.minimum_origin
73
90
  ]
@@ -93,144 +110,109 @@ class FaultBuilder(StructuralFrameBuilder):
93
110
  fault_dip_anisotropy=1.0,
94
111
  fault_pitch=None,
95
112
  ):
96
- """Generate the required data for building a fault frame for a fault with the
97
- specified parameters
113
+ """
114
+ Generate the required data for building a fault frame with the specified parameters.
98
115
 
99
116
  Parameters
100
117
  ----------
101
- data : DataFrame,
102
- model data
103
- fault_center : np.array(3)
104
- x,y,z coordinates of the fault center
105
- normal_vector : np.array(3)
106
- x,y,z components of normal vector to fault, single observation usually
107
- average direction
108
- slip_vector : np.array(3)
109
- x,y,z components of slip vector for the fault, single observation usually
110
- average direction
111
- minor_axis : double
112
- distance away from fault for the fault volume
113
- major_axis : double
114
- fault extent
115
- intermediate_axis : double
116
- fault volume radius in the slip direction
118
+ fault_frame_data : pandas.DataFrame
119
+ DataFrame containing fault frame data.
120
+ fault_center : array-like, optional
121
+ Coordinates of the fault center.
122
+ fault_normal_vector : array-like, optional
123
+ Normal vector of the fault.
124
+ fault_slip_vector : array-like, optional
125
+ Slip vector of the fault.
126
+ minor_axis : float, optional
127
+ Minor axis length of the fault.
128
+ major_axis : float, optional
129
+ Major axis length of the fault.
130
+ intermediate_axis : float, optional
131
+ Intermediate axis length of the fault.
132
+ w : float, default=1.0
133
+ Weighting factor for the fault data.
134
+ points : bool, default=False
135
+ Whether to include points in the fault data.
136
+ force_mesh_geometry : bool, default=False
137
+ Whether to force the use of mesh geometry.
138
+ fault_buffer : float, default=0.2
139
+ Buffer size around the fault.
140
+ fault_trace_anisotropy : float, default=1.0
141
+ Anisotropy factor for the fault trace.
142
+ fault_dip : float, default=90
143
+ Dip angle of the fault in degrees.
144
+ fault_dip_anisotropy : float, default=1.0
145
+ Anisotropy factor for the fault dip.
146
+ fault_pitch : float, optional
147
+ Pitch angle of the fault.
117
148
  """
118
- trace_mask = np.logical_and(fault_frame_data["coord"] == 0, fault_frame_data["val"] == 0)
119
- logger.info(f"There are {np.sum(trace_mask)} points on the fault trace")
120
- if np.sum(trace_mask) == 0 and fault_center is None:
121
- logger.error("You cannot model a fault without defining the location of the fault")
122
- raise ValueError("There are no points on the fault trace")
123
- # find the middle point on the fault trace if center is not provided
124
- if fault_center is None:
125
- trace_mask = np.logical_and(
126
- fault_frame_data["coord"] == 0, fault_frame_data["val"] == 0
127
- )
128
- fault_center = fault_frame_data.loc[trace_mask, ["X", "Y", "Z"]].mean(axis=0).to_numpy()
129
- dist = np.linalg.norm(
130
- fault_center - fault_frame_data.loc[trace_mask, ["X", "Y", "Z"]].to_numpy(), axis=1
131
- )
132
- # make the nan points greater than the max dist 10 is arbitrary and doesn't matter
133
- dist[np.isnan(dist)] = np.nanmax(dist) + 10
134
- fault_center = fault_frame_data.loc[trace_mask, ["X", "Y", "Z"]].to_numpy()[
135
- np.argmin(dist), :
136
- ]
137
- # get all of the gradient data associated with the fault trace
149
+ fault_trace = fault_frame_data.loc[
150
+ np.logical_and(fault_frame_data["coord"] == 0, fault_frame_data["val"] == 0),
151
+ ["X", "Y"],
152
+ ].to_numpy()
138
153
  if fault_normal_vector is None:
139
- gradient_mask = np.logical_and(
140
- fault_frame_data["coord"] == 0, ~np.isnan(fault_frame_data["gz"])
141
- )
142
- vector_data = fault_frame_data.loc[gradient_mask, ["gx", "gy", "gz"]].to_numpy()
143
- normal_mask = np.logical_and(
144
- fault_frame_data["coord"] == 0, ~np.isnan(fault_frame_data["nz"])
145
- )
146
- vector_data = np.vstack(
147
- [
148
- vector_data,
149
- fault_frame_data.loc[normal_mask, ["nx", "ny", "nz"]].to_numpy(),
150
- ]
151
- )
154
+ if fault_frame_data.loc[
155
+ np.logical_and(fault_frame_data["coord"] == 0, fault_frame_data["nx"].notna())].shape[0]>0:
156
+ fault_normal_vector = fault_frame_data.loc[
157
+ np.logical_and(fault_frame_data["coord"] == 0, fault_frame_data["nx"].notna()),
158
+ ["nx", "ny", "nz"],
159
+ ].to_numpy().mean(axis=0)
152
160
 
153
- if len(vector_data) == 0:
154
- logger.warning(
155
- f"No orientation data for fault\n\
156
- Defaulting to a dip of {fault_dip}vertical fault"
157
- )
158
- # if the line is long enough, estimate the normal vector
159
- # by finding the centre point of the line and calculating the tangnent
160
- # of the two points
161
- if fault_frame_data.loc[trace_mask, :].shape[0] > 3:
162
-
163
- pts = fault_frame_data.loc[trace_mask, ["X", "Y", "Z"]].to_numpy()
164
- dist = np.abs(np.linalg.norm(fault_center - pts, axis=1))
165
- # any nans just make them max distance + a bit
166
- dist[np.isnan(dist)] = np.nanmax(dist) + 10
167
- # idx = np.argsort(dist)
168
- # direction_vector = pts[idx[-1]] - pts[idx[-2]]
169
- # coefficients = np.polyfit(
170
- # fault_frame_data.loc[trace_mask, "X"],
171
- # fault_frame_data.loc[trace_mask, "Y"],
172
- # 1,
173
- # )
174
- # slope, intercept = coefficients
175
- slope, intercept = np.polyfit(
176
- pts[dist < 0.25 * np.nanmax(dist), 0],
177
- pts[dist < 0.25 * np.nanmax(dist), 1],
178
- 1,
179
- )
180
-
181
- # # Create a direction vector using the slope
182
- direction_vector = np.array([1, slope, 0])
183
- direction_vector /= np.linalg.norm(direction_vector)
184
- rotation_matrix = rotation(direction_vector[None, :], [90 - fault_dip])
185
- vector_data = np.array(
186
- [
187
- [
188
- direction_vector[1],
189
- -direction_vector[0],
190
- 0,
191
- ]
192
- ]
193
- )
194
- vector_data /= np.linalg.norm(vector_data, axis=1)
195
- vector_data = np.einsum("ijk,ik->ij", rotation_matrix, vector_data)
196
-
197
- vector_data /= np.linalg.norm(vector_data, axis=1)
198
- fault_normal_vector = np.mean(vector_data, axis=0)
161
+ else:
199
162
 
200
- logger.info(f"Fault normal vector: {fault_normal_vector}")
163
+ # Calculate fault strike using eigenvectors
164
+ pts = fault_trace - fault_trace.mean(axis=0)
165
+ # Calculate covariance matrix
166
+ cov_matrix = pts.T @ pts
167
+ # Get eigenvectors and eigenvalues
168
+ eigenvalues, eigenvectors = np.linalg.eigh(cov_matrix)
169
+ # Use eigenvector with largest eigenvalue as strike direction
170
+ strike_vector = eigenvectors[:, np.argmax(eigenvalues)]
171
+ strike_vector = np.append(strike_vector, 0) # Add z component
172
+ strike_vector /= np.linalg.norm(strike_vector)
173
+
174
+ fault_normal_vector = np.cross(strike_vector, [0, 0, 1])
175
+ # Rotate the fault normal vector according to the fault dip
176
+ rotation_matrix = rotation(strike_vector[None, :], np.array([90 - fault_dip]))
177
+ fault_normal_vector = np.einsum("ijk,ik->ij", rotation_matrix, fault_normal_vector[None, :])[0]
178
+
179
+ if not isinstance(fault_normal_vector, np.ndarray):
180
+ fault_normal_vector = np.array(fault_normal_vector)
181
+
182
+ if fault_pitch is not None:
183
+ rotation_matrix = rotation(fault_normal_vector[None, :], np.array([fault_pitch]))
184
+ fault_slip_vector = np.einsum("ijk,ik->ij", rotation_matrix, fault_normal_vector[None, :])[0]
201
185
 
202
- # estimate the fault slip vector
203
186
  if fault_slip_vector is None:
204
- slip_mask = np.logical_and(
205
- fault_frame_data["coord"] == 1, ~np.isnan(fault_frame_data["gz"])
206
- )
207
- fault_slip_data = fault_frame_data.loc[slip_mask, ["gx", "gy", "gz"]]
208
-
209
- if len(fault_slip_data) == 0:
210
- logger.warning(
211
- "There is no slip vector data for the fault, using vertical slip vector\n\
212
- projected onto fault surface estimating from fault normal"
213
- )
214
- strike_vector, dip_vector = get_vectors(fault_normal_vector[None, :])
215
- fault_slip_vector = dip_vector[:, 0]
216
- if fault_pitch is not None:
217
- print('using pitch')
218
- rotm = rotation(fault_normal_vector[None,:],[fault_pitch])
219
- print(rotm.shape,fault_slip_vector.shape)
220
- fault_slip_vector = np.einsum("ijk,k->ij", rotm, fault_slip_vector)[0,:]
221
- logger.info(f"Estimated fault slip vector: {fault_slip_vector}")
222
- else:
223
- fault_slip_vector = fault_slip_data.mean(axis=0).to_numpy()
187
+ if fault_frame_data.loc[
188
+ np.logical_and(fault_frame_data["coord"] == 1, fault_frame_data["nx"].notna())].shape[0]>0:
189
+ fault_slip_vector = fault_frame_data.loc[
190
+ np.logical_and(fault_frame_data["coord"] == 1, fault_frame_data["nx"].notna()),
191
+ ["nx", "ny", "nz"],
192
+ ].to_numpy().mean(axis=0)
224
193
 
225
- self.fault_normal_vector = fault_normal_vector
226
- self.fault_slip_vector = fault_slip_vector
227
-
228
- self.fault_centre = fault_center
229
- if major_axis is None:
194
+ else:
195
+ fault_slip_vector = np.cross(fault_normal_vector, [1., 0., 0.])
196
+ if np.linalg.norm(fault_slip_vector) == 0:
197
+ fault_slip_vector = np.cross(fault_normal_vector, [0., 1., 0.])
198
+ fault_slip_vector /= np.linalg.norm(fault_slip_vector)
199
+ if fault_center is None:
230
200
  fault_trace = fault_frame_data.loc[
231
201
  np.logical_and(fault_frame_data["coord"] == 0, fault_frame_data["val"] == 0),
232
202
  ["X", "Y"],
233
203
  ].to_numpy()
204
+ fault_center = fault_trace.mean(axis=0)
205
+ fault_center = np.array([fault_center[0], fault_center[1], 0.0])
206
+ if not isinstance(fault_center, np.ndarray):
207
+ fault_center = np.array(fault_center)
208
+ if fault_center.shape[0] != 3:
209
+ raise ValueError("fault_center must be a 3 element array")
210
+ self.fault_normal_vector = fault_normal_vector / np.linalg.norm(fault_normal_vector)
211
+ self.fault_slip_vector = fault_slip_vector / np.linalg.norm(fault_slip_vector)
212
+
213
+ self.fault_centre = fault_center
214
+ if major_axis is None:
215
+
234
216
  distance = np.linalg.norm(fault_trace[:, None, :] - fault_trace[None, :, :], axis=2)
235
217
  if len(distance) == 0 or np.sum(distance) == 0:
236
218
  logger.warning("There is no fault trace for {}".format(self.name))
@@ -382,6 +364,7 @@ class FaultBuilder(StructuralFrameBuilder):
382
364
  0,
383
365
  w,
384
366
  ]
367
+
385
368
  if major_axis is not None:
386
369
  fault_tips[0, :] = fault_center[:3] + strike_vector * 0.5 * major_axis
387
370
  fault_tips[1, :] = fault_center[:3] - strike_vector * 0.5 * major_axis
@@ -503,16 +486,14 @@ class FaultBuilder(StructuralFrameBuilder):
503
486
  self.origin - length * buffer, self.maximum + length * buffer, rotation
504
487
  )
505
488
 
506
- def add_splay(self, splay, splayregion=None):
507
- if splayregion is None:
489
+ def add_splay(self, splay, splay_region=None):
490
+ if splay_region is None:
508
491
 
509
- def splayregion(xyz):
492
+ def default_splay_region(xyz):
510
493
  pts = (
511
- self.builders[0].data[["X", "Y", "Z", "val"]].to_numpy()
494
+ self.builders[0].data["X", "Y", "Z", "val"].to_numpy()
512
495
  ) # get_value_constraints()
513
496
  pts = pts[pts[:, 3] == 0, :]
514
- # check whether the fault is on the hanging wall or footwall of splay fault
515
-
516
497
  ext_field = splay[2].evaluate_value(pts[:, :3])
517
498
  surf_field = splay[0].evaluate_value(pts[:, :3])
518
499
  intersection_value = ext_field[np.nanargmin(np.abs(surf_field))]
@@ -531,19 +512,24 @@ class FaultBuilder(StructuralFrameBuilder):
531
512
  )
532
513
  return mask
533
514
 
515
+ splay_region = default_splay_region
516
+
534
517
  scalefactor = splay.fault_major_axis / self.fault_major_axis
535
- self.builders[0].add_equality_constraints(splay, splayregion, scalefactor)
536
- return splayregion
518
+ self.builders[0].add_equality_constraints(splay, splay_region, scalefactor)
519
+ return splay_region
537
520
 
538
521
  def add_fault_trace_anisotropy(self, w: float = 1.0):
539
- """_summary_
522
+ """
523
+ Add fault trace anisotropy to the model.
540
524
 
541
525
  Parameters
542
526
  ----------
543
527
  w : float, optional
544
- _description_, by default 1.0
528
+ Weighting factor for the anisotropy, by default 1.0
545
529
  """
546
530
  if w > 0:
531
+ if self.fault_normal_vector is None:
532
+ raise ValueError("fault_normal_vector must be initialized before adding anisotropy.")
547
533
 
548
534
  plane = np.array([0, 0, 1])
549
535
  strike_vector = (
@@ -553,24 +539,25 @@ class FaultBuilder(StructuralFrameBuilder):
553
539
  strike_vector = np.array([strike_vector[1], -strike_vector[0], 0])
554
540
 
555
541
  anisotropy_feature = AnalyticalGeologicalFeature(
556
- vector=strike_vector, origin=[0, 0, 0], name="fault_trace_anisotropy"
542
+ vector=strike_vector, origin=np.array([0, 0, 0]), name="fault_trace_anisotropy"
557
543
  )
558
- # print('adding fault trace anisotropy')
559
544
  self.builders[0].add_orthogonal_feature(
560
545
  anisotropy_feature, w=w, region=None, step=1, B=0
561
546
  )
562
547
 
563
548
  def add_fault_dip_anisotropy(self, w: float = 1.0):
564
- """_summary_
549
+ """
550
+ Add fault dip anisotropy to the model.
565
551
 
566
552
  Parameters
567
553
  ----------
568
- dip : np.ndarray
569
- _description_
570
554
  w : float, optional
571
- _description_, by default 1.0
555
+ Weighting factor for the anisotropy, by default 1.0
572
556
  """
573
557
  if w > 0:
558
+ if self.fault_normal_vector is None:
559
+ raise ValueError("fault_normal_vector must be initialized before adding anisotropy.")
560
+
574
561
  plane = np.array([0, 0, 1])
575
562
  strike_vector = (
576
563
  self.fault_normal_vector - np.dot(self.fault_normal_vector, plane) * plane
@@ -579,11 +566,11 @@ class FaultBuilder(StructuralFrameBuilder):
579
566
  strike_vector = np.array([strike_vector[1], -strike_vector[0], 0])
580
567
 
581
568
  dip_vector = np.cross(strike_vector, self.fault_normal_vector)
569
+ dip_vector /= np.linalg.norm(dip_vector)
582
570
 
583
571
  anisotropy_feature = AnalyticalGeologicalFeature(
584
- vector=dip_vector, origin=[0, 0, 0], name="fault_dip_anisotropy"
572
+ vector=dip_vector, origin=np.array([0, 0, 0]), name="fault_dip_anisotropy"
585
573
  )
586
- # print(f'adding fault dip anisotropy {anisotropy_feature.name}')
587
574
  self.builders[0].add_orthogonal_feature(
588
575
  anisotropy_feature, w=w, region=None, step=1, B=0
589
576
  )
@@ -86,7 +86,7 @@ class GeologicalFeatureBuilder(BaseBuilder):
86
86
  self._orthogonal_features = {}
87
87
  self._equality_constraints = {}
88
88
  # add default parameters
89
- self.update_build_arguments({'cpw':1.0,'npw':1.0,'regularisation':1.0,'nelements':self.interpolator.n_elements})
89
+ self.update_build_arguments({'cpw':1.0,'npw':1.0,'regularisation':.10,'nelements':self.interpolator.n_elements})
90
90
  def set_not_up_to_date(self, caller):
91
91
  logger.info(
92
92
  f"Setting {self.name} to not up to date from an instance of {caller.__class__.__name__}"
@@ -10,8 +10,10 @@ logger = getLogger(__name__)
10
10
 
11
11
 
12
12
  def smooth_peak(x):
13
- return 0.25 * x**6 + 0.5 * x**4 - 1.75 * x**2 + 1
14
-
13
+ v = np.zeros(x.shape)
14
+ mask = np.logical_and(x >= -1, x <= 1)
15
+ v[mask] = x[mask] ** 4 - 2 * x[mask] ** 2 + 1
16
+ return v
15
17
 
16
18
  class FaultProfileFunction(metaclass=ABCMeta):
17
19
  def __init__(self):
@@ -467,7 +467,7 @@ class FaultSegment(StructuralFrame):
467
467
  )
468
468
  self.abut[abutting_fault_feature.name] = abutting_region
469
469
  self.__getitem__(0).add_region(abutting_region)
470
-
470
+ return abutting_region
471
471
  def save(self, filename, scalar_field=True, slip_vector=True, surface=True):
472
472
  """
473
473
  Save the fault to a file
@@ -23,6 +23,7 @@ from .maths import (
23
23
  get_strike_vector,
24
24
  get_vectors,
25
25
  strikedip2vector,
26
+ plungeazimuth2vector,
26
27
  azimuthplunge2vector,
27
28
  normal_vector_to_strike_and_dip,
28
29
  rotate,
@@ -145,7 +145,7 @@ class LoopIsosurfacer:
145
145
  values = np.zeros(verts.shape[0]) + isovalue
146
146
  # need to add both global and local origin. If the bb is a buffer the local
147
147
  # origin may not be 0
148
- verts += self.bounding_box.global_origin
148
+ verts += self.bounding_box.global_origin+self.bounding_box.origin
149
149
  surfaces.append(
150
150
  Surface(
151
151
  vertices=verts,
@@ -107,30 +107,7 @@ def get_data_bounding_box(xyz, buffer):
107
107
  return bb, region
108
108
 
109
109
 
110
- # def azimuthplunge2vector(
111
- # plunge: Union[np.ndarray, list], plunge_dir: Union[np.ndarray, list]
112
- # ) -> np.ndarray:
113
- # """Convert plunge and plunge direction to a vector
114
-
115
- # Parameters
116
- # ----------
117
- # plunge : Union[np.ndarray, list]
118
- # array or array like of plunge values
119
- # plunge_dir : Union[np.ndarray, list]
120
- # array or array like of plunge direction values
121
-
122
- # Returns
123
- # -------
124
- # np.array
125
- # nx3 vector
126
- # """
127
- # plunge = np.deg2rad(plunge)
128
- # plunge_dir = np.deg2rad(plunge_dir)
129
- # vec = np.zeros(3)
130
- # vec[0] = np.sin(plunge_dir) * np.cos(plunge)
131
- # vec[1] = np.cos(plunge_dir) * np.cos(plunge)
132
- # vec[2] = -np.sin(plunge)
133
- # return vec
110
+
134
111
 
135
112
 
136
113
  def create_surface(bounding_box, nstep):
@@ -28,20 +28,28 @@ def strikedip2vector(strike: NumericInput, dip: NumericInput) -> np.ndarray:
28
28
  vec /= np.linalg.norm(vec, axis=1)[:, None]
29
29
  return vec
30
30
 
31
-
32
31
  def azimuthplunge2vector(
32
+ plunge: NumericInput,
33
+ azimuth: NumericInput,
34
+ degrees: bool = True,
35
+ ) -> np.ndarray:
36
+ raise DeprecationWarning(
37
+ "azimuthplunge2vector is deprecated, use plungeazimuth2vector instead"
38
+ )
39
+
40
+ def plungeazimuth2vector(
33
41
  plunge: NumericInput,
34
- plunge_dir: NumericInput,
42
+ azimuth: NumericInput,
35
43
  degrees: bool = True,
36
44
  ) -> np.ndarray:
37
45
  """Convert plunge and plunge direction to a vector
38
46
 
39
47
  Parameters
40
48
  ----------
49
+ azimuth : Union[np.ndarray, list]
50
+ array or array like of plunge direction values
41
51
  plunge : Union[np.ndarray, list]
42
52
  array or array like of plunge values
43
- plunge_dir : Union[np.ndarray, list]
44
- array or array like of plunge direction values
45
53
 
46
54
  Returns
47
55
  -------
@@ -52,16 +60,16 @@ def azimuthplunge2vector(
52
60
  plunge = np.array([plunge], dtype=float)
53
61
  else:
54
62
  plunge = np.array(plunge, dtype=float)
55
- if isinstance(plunge_dir, numbers.Number):
56
- plunge_dir = np.array([plunge_dir], dtype=float)
63
+ if isinstance(azimuth, numbers.Number):
64
+ azimuth = np.array([azimuth], dtype=float)
57
65
  else:
58
- plunge_dir = np.array(plunge_dir, dtype=float)
66
+ azimuth = np.array(azimuth, dtype=float)
59
67
  if degrees:
60
68
  plunge = np.deg2rad(plunge)
61
- plunge_dir = np.deg2rad(plunge_dir)
69
+ azimuth = np.deg2rad(azimuth)
62
70
  vec = np.zeros((len(plunge), 3))
63
- vec[:, 0] = np.sin(plunge_dir) * np.cos(plunge)
64
- vec[:, 1] = np.cos(plunge_dir) * np.cos(plunge)
71
+ vec[:, 0] = np.sin(azimuth) * np.cos(plunge)
72
+ vec[:, 1] = np.cos(azimuth) * np.cos(plunge)
65
73
  vec[:, 2] = -np.sin(plunge)
66
74
  return vec
67
75
 
@@ -204,19 +212,21 @@ def get_vectors(normal: NumericInput) -> Tuple[np.ndarray, np.ndarray]:
204
212
 
205
213
 
206
214
  def get_strike_vector(strike: NumericInput, degrees: bool = True) -> np.ndarray:
207
- """Return the vector aligned with the strike direction
215
+ """Return strike direction vector(s) from strike angle(s).
208
216
 
209
217
  Parameters
210
218
  ----------
211
- strike : np.ndarray
212
- strike direction
219
+ strike : NumericInput
220
+ Single strike angle or array-like of strike angles, measured clockwise from North.
213
221
  degrees : bool, optional
214
- whether to return in degrees or radians, by default True
222
+ Whether the input angles are in degrees. If False, angles are assumed to be in radians.
223
+ Default is True.
215
224
 
216
225
  Returns
217
226
  -------
218
227
  np.ndarray
219
- vector aligned with strike direction
228
+ Array of shape (3, n) where each column is a 3D unit vector (x, y, z) representing
229
+ the horizontal strike direction. The z-component is always 0.
220
230
 
221
231
  """
222
232
  if isinstance(strike, numbers.Number):
@@ -236,6 +246,21 @@ def get_strike_vector(strike: NumericInput, degrees: bool = True) -> np.ndarray:
236
246
 
237
247
 
238
248
  def get_dip_vector(strike, dip):
249
+ """Return the dip vector based on strike and dip angles.
250
+
251
+ Parameters
252
+ ----------
253
+ strike : float
254
+ Strike angle in degrees, measured clockwise from North.
255
+ dip : float
256
+ Dip angle in degrees, measured from the horizontal plane.
257
+
258
+ Returns
259
+ -------
260
+ np.ndarray
261
+ Unit vector (length 3) representing the dip direction in 3D space.
262
+
263
+ """
239
264
  v = np.array(
240
265
  [
241
266
  -np.cos(np.deg2rad(-strike)) * np.cos(-np.deg2rad(dip)),
@@ -247,6 +272,23 @@ def get_dip_vector(strike, dip):
247
272
 
248
273
 
249
274
  def regular_tetraherdron_for_points(xyz, scale_parameter):
275
+ """Generate regular tetrahedrons centered at given 3D points.
276
+
277
+ Parameters
278
+ ----------
279
+ xyz : np.ndarray
280
+ Array of shape (n, 3) representing the coordinates of n points in 3D space,
281
+ which will serve as the centers of the generated tetrahedrons.
282
+ scale_parameter : float
283
+ Scaling factor controlling the size of the regular tetrahedrons.
284
+
285
+ Returns
286
+ -------
287
+ np.ndarray
288
+ Array of shape (n, 4, 3) representing n regular tetrahedrons, where each
289
+ tetrahedron has 4 vertices in 3D space, positioned relative to the corresponding center point.
290
+
291
+ """
250
292
  regular_tetrahedron = np.array(
251
293
  [
252
294
  [np.sqrt(8 / 9), 0, -1 / 3],
@@ -264,8 +306,23 @@ def regular_tetraherdron_for_points(xyz, scale_parameter):
264
306
 
265
307
 
266
308
  def gradient_from_tetrahedron(tetrahedron, value):
267
- """
268
- Calculate the gradient from a tetrahedron
309
+ """Compute the gradient of values within tetrahedral elements
310
+
311
+ Parameters
312
+ ----------
313
+ tetrahedron : np.ndarray
314
+ Array of shape (n, 4, 3) representing the coordinates of tetrahedral elements,
315
+ where each tetrahedron is defined by 4 vertices in 3D space.
316
+ value : np.ndarray
317
+ Array of shape (n, 4) representing the scalar values at the 4 vertices
318
+ of each tetrahedron.
319
+
320
+ Returns
321
+ -------
322
+ np.ndarray
323
+ Array of shape (n, 3) representing the gradient vector of the scalar field
324
+ inside each tetrahedral element.
325
+
269
326
  """
270
327
  tetrahedron = tetrahedron.reshape(-1, 4, 3)
271
328
  m = np.array(
@@ -1,9 +1,20 @@
1
1
  import numpy as np
2
+ from abc import ABC, abstractmethod
3
+ from typing import Tuple
2
4
 
5
+ class BaseRegion(ABC):
6
+ @abstractmethod
7
+ def __init__(self, feature, vector=None, point=None):
8
+ self.feature = feature
9
+ self.vector = vector
10
+ self.point = point
11
+ self.name = None
12
+ self.parent = None
3
13
 
4
- class BaseRegion:
5
- def __init__(self):
6
- self.type = "BaseRegion"
14
+ @abstractmethod
15
+ def __call__(self, xyz) -> np.ndarray:
16
+ """Evaluate the region based on the input coordinates."""
17
+ pass
7
18
 
8
19
 
9
20
  class RegionEverywhere(BaseRegion):
@@ -23,19 +34,18 @@ class RegionFunction(BaseRegion):
23
34
  def __call__(self, xyz):
24
35
  return self.function(xyz)
25
36
 
26
-
27
- class PositiveRegion:
37
+ class BaseSignRegion(BaseRegion):
28
38
  """Helper class for evaluating whether you are in the positive region of a scalar field.
29
39
  If its outside of the support it will interpolate the average gradient at a point on the 0 isovalue
30
40
  and calculate the distance from this. Alternatively, a point and vector can be used to save computational time
31
41
  """
32
42
 
33
43
  def __init__(self, feature, vector=None, point=None):
34
- self.feature = feature
35
- self.vector = vector
36
- self.point = point
44
+ super().__init__(feature, vector, point)
45
+ self.name = 'PositiveRegion'
46
+ self.parent = feature
37
47
 
38
- def __call__(self, xyz):
48
+ def _calculate_value_and_distance(self, xyz)-> Tuple[np.ndarray, np.ndarray]:
39
49
  val = self.feature.evaluate_value(xyz)
40
50
  # find a point on/near 0 isosurface
41
51
  if self.point is None:
@@ -52,51 +62,45 @@ class PositiveRegion:
52
62
  average_gradient /= np.linalg.norm(average_gradient)
53
63
  else:
54
64
  average_gradient = self.vector
55
- # distance = ((xyz[:,0] - centre[None,0])*average_gradient[0] +
56
- # (xyz[:,1] - centre[None,1])*average_gradient[1] +
57
- # ( xyz[:,2] - centre[None,2])*average_gradient[2])
58
- distance = np.einsum("ij,j->i", centre[None, :] - xyz, average_gradient)
65
+
66
+ distance = np.einsum(
67
+ "ij,j->i", centre[None, :] - xyz, average_gradient.reshape(-1, 3)[0, :]
68
+ )
69
+ return val, distance
70
+
71
+
72
+ class PositiveRegion(BaseSignRegion):
73
+ """Helper class for evaluating whether you are in the positive region of a scalar field.
74
+ If its outside of the support it will interpolate the average gradient at a point on the 0 isovalue
75
+ and calculate the distance from this. Alternatively, a point and vector can be used to save computational time
76
+ """
77
+
78
+ def __init__(self, feature, vector=None, point=None):
79
+ super().__init__(feature, vector, point)
80
+ self.name = 'PositiveRegion'
81
+ self.parent = feature
82
+
83
+ def __call__(self, xyz) -> np.ndarray:
84
+ val, distance = self._calculate_value_and_distance(xyz)
59
85
  return np.logical_or(
60
86
  np.logical_and(~np.isnan(val), val > 0),
61
87
  np.logical_and(np.isnan(val), distance > 0),
62
88
  )
63
89
 
64
90
 
65
- class NegativeRegion:
91
+ class NegativeRegion(BaseSignRegion):
66
92
  """Helper class for evaluating whether you are in the positive region of a scalar field.
67
93
  If its outside of the support it will interpolate the average gradient at a point on the 0 isovalue
68
94
  and calculate the distance from this. Alternatively, a point and vector can be used to save computational time
69
95
  """
70
96
 
71
97
  def __init__(self, feature, vector=None, point=None):
72
- self.feature = feature
73
- self.vector = vector
74
- self.point = point
98
+ super().__init__(feature, vector, point)
99
+ self.name = 'NegativeRegion'
100
+ self.parent = feature
75
101
 
76
- def __call__(self, xyz):
77
- val = self.feature.evaluate_value(xyz)
78
- # find a point on/near 0 isosurface
79
- if self.point is None:
80
- mask = np.zeros(xyz.shape[0], dtype="bool")
81
- mask[:] = val < 0
82
- if np.sum(mask) == 0:
83
- raise ValueError("Cannot find point on surface")
84
- centre = xyz[mask, :][0, :]
85
- else:
86
- centre = self.point
87
- if self.vector is None:
88
- average_gradient = self.feature.evaluate_gradient(np.array([centre]))[0]
89
- average_gradient[2] = 0
90
- average_gradient /= np.linalg.norm(average_gradient)
91
- else:
92
- average_gradient = self.vector
93
- distance = np.einsum("ij,j->i", xyz - centre[None, :], average_gradient)
94
- # distance = ((xyz[:,0] - centre[None,0])*average_gradient[0] +
95
- # (xyz[:,1] - centre[None,1])*average_gradient[1] +
96
- # ( xyz[:,2] - centre[None,2])*average_gradient[2])
97
- # return np.logical_or(np.logical_and(~np.isnan(val),val
98
- # < 0),
99
- # np.logical_and(np.isnan(val),distance>0))
102
+ def __call__(self, xyz) -> np.ndarray:
103
+ val, distance = self._calculate_value_and_distance(xyz)
100
104
  return np.logical_or(
101
105
  np.logical_and(~np.isnan(val), val < 0),
102
106
  np.logical_and(np.isnan(val), distance < 0),
LoopStructural/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "1.6.12"
1
+ __version__ = "1.6.14"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LoopStructural
3
- Version: 1.6.12
3
+ Version: 1.6.14
4
4
  Summary: 3D geological modelling
5
5
  Author-email: Lachlan Grose <lachlan.grose@monash.edu>
6
6
  License: MIT
@@ -1,5 +1,5 @@
1
1
  LoopStructural/__init__.py,sha256=fg_Vm1aMDYIf_CffTFopLsTx21u6deLaI7JMVpRYdOI,1378
2
- LoopStructural/version.py,sha256=1ThuVeMS5KlxvVTyorbbzJi1pftu6GNdFZ7WRJWlGVc,23
2
+ LoopStructural/version.py,sha256=wWsaf5Mb6Lxn5-XiXSzm8MwgO3zmyC4OHxecUfU1mC8,23
3
3
  LoopStructural/datasets/__init__.py,sha256=ylb7fzJU_DyQ73LlwQos7VamqkDSGITbbnoKg7KAOmE,677
4
4
  LoopStructural/datasets/_base.py,sha256=FB_D5ybBYHoaNbycdkpZcRffzjrrL1xp9X0k-pyob9Y,7618
5
5
  LoopStructural/datasets/_example_models.py,sha256=Zg33IeUyh4C-lC0DRMLqCDP2IrX8L-gNV1WxJwBGjzM,113
@@ -29,7 +29,7 @@ LoopStructural/datasets/data/geological_map_data/stratigraphic_order.csv,sha256=
29
29
  LoopStructural/datasets/data/geological_map_data/stratigraphic_orientations.csv,sha256=RysyqUAIjY6iIDUfTh11n9QUQWXB_qxKnZeN_DqNzlY,26745
30
30
  LoopStructural/datasets/data/geological_map_data/stratigraphic_thickness.csv,sha256=pnSmG-wL8-kxuoHo_pgpJrfTmsZOzc8L0vxpBRh3r8A,355
31
31
  LoopStructural/datatypes/__init__.py,sha256=lVg64DnynMm58qvYTjLrcyWH7vk2ngr9JGMo5FaiALI,160
32
- LoopStructural/datatypes/_bounding_box.py,sha256=gXNKwhM3tCNuw_2PY92EoKJjJTI93fN-QQzlUl5Wjd0,18353
32
+ LoopStructural/datatypes/_bounding_box.py,sha256=lSiROmnaRPANMjMTjy_LoOxF8olfoEkArrQ0Cq4DVmk,18400
33
33
  LoopStructural/datatypes/_point.py,sha256=qg3lXUA1rnu1N1cEWG0WvhvJuENfDgpEDIeYldWBaG8,7740
34
34
  LoopStructural/datatypes/_structured_grid.py,sha256=mc-UM1Gh_BjHFItuPE4FF5wvGzJnSqF2MTx_xvrwcTk,5088
35
35
  LoopStructural/datatypes/_surface.py,sha256=5BpPKVS4X3Kq1k3YxxAofKMgxdXhnOIcDi6NzKn2p2Q,6652
@@ -38,17 +38,17 @@ LoopStructural/export/file_formats.py,sha256=0xKyYSW4Jv_4jsXwusg-WO6PNUhZKd6HdWS
38
38
  LoopStructural/export/geoh5.py,sha256=jLFKC5EB0azT3PgJPtkJzi3_CG28RLgP2FuENAGCQMI,4313
39
39
  LoopStructural/export/gocad.py,sha256=cQ6v7ZD0CVubt3c2f9EwAYrziu5bEFSWBtx0uade5mg,3370
40
40
  LoopStructural/export/omf_wrapper.py,sha256=4vcF4WOQIVEYsWrfATgKCDh8nUybLTbrlXnCxZ_3fkU,3392
41
- LoopStructural/interpolators/__init__.py,sha256=8uFSHumlOuHzs_3i26DCW1QWO93en_AzAmUEJy5iUXA,3451
41
+ LoopStructural/interpolators/__init__.py,sha256=BcCGY_9Hz_pNKibsdSKNn44U1RCFQ2yNfYCtJOXv8vU,3731
42
42
  LoopStructural/interpolators/_api.py,sha256=EC4ogG2uPq-z_pgNGd_eTieTl92eaZ-rjyoFwXiHL_s,7839
43
43
  LoopStructural/interpolators/_builders.py,sha256=B49KsxB8RRN6IHDfGT43nXWe_Av1SVVT8vm2Nh1oEiQ,6758
44
44
  LoopStructural/interpolators/_discrete_fold_interpolator.py,sha256=eDe0R1lcQ0AuMcv7zlpu5c-soCv7AybIqQAuN2vFE3M,6542
45
45
  LoopStructural/interpolators/_discrete_interpolator.py,sha256=i_joZ8HOf_s6Q2L8gHFnhkdtgyED1SjATxRsRd1HxRU,26038
46
- LoopStructural/interpolators/_finite_difference_interpolator.py,sha256=mZ89FWQZ5RbzhL9UYH8VzWME-28dy331KXtYtpqepHo,18351
46
+ LoopStructural/interpolators/_finite_difference_interpolator.py,sha256=rF9hhIcJTTMmy3IAcv3DPfPpgOMn8Arxj8FtBM7UuvA,18704
47
47
  LoopStructural/interpolators/_geological_interpolator.py,sha256=hcQuyv1zYakJ7mcDFlLj-YarjnMQvlP6pVbK1KuxBWs,11195
48
48
  LoopStructural/interpolators/_interpolator_builder.py,sha256=Z8bhmco5aSQX19A8It2SB_rG61wnlyshWfp3ivm8rU0,4586
49
49
  LoopStructural/interpolators/_interpolator_factory.py,sha256=fbjebXSe5IgTol1tnBlnsw9gD426v-TGkX3gquIg7LI,2782
50
50
  LoopStructural/interpolators/_operator.py,sha256=PZOUzq9OMaJdG151dSLIo7AxRuhTj6-zEAzFZo-EOJU,1114
51
- LoopStructural/interpolators/_p1interpolator.py,sha256=4rjj4iaw2c8hOfBS9u8ycxzijYdmvpeijvhYRwUwZg0,8736
51
+ LoopStructural/interpolators/_p1interpolator.py,sha256=-xTUMDe5slKpgMT-_7RB5voavLs-OmqN2qyd6RKu-gc,8730
52
52
  LoopStructural/interpolators/_p2interpolator.py,sha256=UT-As5RNsmOwHOzO_6FiRcAwlNHfi4ILbJw2LGpwKAw,10274
53
53
  LoopStructural/interpolators/_surfe_wrapper.py,sha256=Qdz-SuPBVh7gIw4-ZLdDqHkpezmq-Y3IhtRsbW417Xk,6837
54
54
  LoopStructural/interpolators/_cython/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -59,7 +59,7 @@ LoopStructural/interpolators/supports/_2d_structured_grid.py,sha256=Pt9fiXyTS-RT
59
59
  LoopStructural/interpolators/supports/_2d_structured_tetra.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
60
60
  LoopStructural/interpolators/supports/_3d_base_structured.py,sha256=PYIkgWCTHk0OHn-T9wpF4ZFYnKyvXNZgUyqO7YcoFjU,16481
61
61
  LoopStructural/interpolators/supports/_3d_p2_tetra.py,sha256=CqGVJRUMxbPQZDhhopNt_s9gVhMqh4YbjQyDZonoyxc,11574
62
- LoopStructural/interpolators/supports/_3d_structured_grid.py,sha256=x9NoZRsl58iowcObavgb0nY_C335BmcIYgec9REsFpU,17366
62
+ LoopStructural/interpolators/supports/_3d_structured_grid.py,sha256=ha32EmyN4EouxEeEPIMiQL3yq8zDHQZRcg2yOkiDPmY,17476
63
63
  LoopStructural/interpolators/supports/_3d_structured_tetra.py,sha256=5zUNtvEXDvbCHZCu6Fz9WjGbnrMaq-sYJqNUufyLcq8,26505
64
64
  LoopStructural/interpolators/supports/_3d_unstructured_tetra.py,sha256=_peXMTMxctuWNOL74AHxzw0b_1sP5glvbJigIvIkK9I,23867
65
65
  LoopStructural/interpolators/supports/__init__.py,sha256=V0JjixoBIUZVAo5MmqARR67xDOoQwnb4G3SXeOMRSyQ,1603
@@ -67,29 +67,29 @@ LoopStructural/interpolators/supports/_aabb.py,sha256=Z-kH_u6c6izak0aHG3Uo14PEKQ
67
67
  LoopStructural/interpolators/supports/_base_support.py,sha256=pYzsmeBu4kLaD9ZKsz_dfjVpfuAd00xENqOQC9Xw5QY,2501
68
68
  LoopStructural/interpolators/supports/_face_table.py,sha256=Hyj4Io63NkPRN8ab9uDHyec-2Kb8BLY_xBF6STNlvBw,3095
69
69
  LoopStructural/interpolators/supports/_support_factory.py,sha256=XNAxnr-JS3KEhdsoZeJ-VaLTJwlvxgBuRMCqYrCDW18,1485
70
- LoopStructural/modelling/__init__.py,sha256=oW7dz6c8K1A0VcW7-mVcyqcENUrtybCb3eVUNXFvMfA,656
70
+ LoopStructural/modelling/__init__.py,sha256=a-bq2gDhyUlcky5l9kl_IP3ExMdohkgYjQz2V8madQE,902
71
71
  LoopStructural/modelling/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
72
72
  LoopStructural/modelling/core/geological_model.py,sha256=okbEwjL8-wGKeWhvYE-1XSOE2C1nHX7M2AR__42pyAk,66464
73
73
  LoopStructural/modelling/features/__init__.py,sha256=Vf-qd5EDBtJ1DpuXXyCcw2-wf6LWPRW5wzxDEO3vOc8,939
74
74
  LoopStructural/modelling/features/_analytical_feature.py,sha256=U_g86LgQhYY2359rdsDqpvziYwqrWkc5EdvhJARiUWo,3597
75
- LoopStructural/modelling/features/_base_geological_feature.py,sha256=985aHq8z5riS7uS04mGBPd8UcPXFv0l_fPAdlE6Ydcc,12175
75
+ LoopStructural/modelling/features/_base_geological_feature.py,sha256=hx1EmDGWCE8YhypWuet6SUfpoGQCSecXj6zTF-CLRRg,12179
76
76
  LoopStructural/modelling/features/_cross_product_geological_feature.py,sha256=GIyCHUdE6F-bse2e4puG9V2f7qRtDVfby5PRe2BboD4,3021
77
77
  LoopStructural/modelling/features/_geological_feature.py,sha256=u6pbKj9BujX1Ijj5eVdhwGDNjrIAI16CpiAn5n8g3RY,11279
78
- LoopStructural/modelling/features/_lambda_geological_feature.py,sha256=ieZiiMd8HCeVEIl3B406tsysT7iUEGcEUJXALjhjkeM,2924
78
+ LoopStructural/modelling/features/_lambda_geological_feature.py,sha256=GiB19l6v5WvvR8CitATZvCwaOfRyLuzchoXzpNupsfM,5743
79
79
  LoopStructural/modelling/features/_projected_vector_feature.py,sha256=aifVLgn2spmK7GGlO0iHDewf1pFL-QoRzZEePTZwX1s,3017
80
80
  LoopStructural/modelling/features/_region.py,sha256=TB4qnoTDQM2VgRjgyODN839fKe3kuRYLllJj0xnDKXo,478
81
81
  LoopStructural/modelling/features/_structural_frame.py,sha256=e3QmNHLwuZc5PX3rLafocmBLNTclO90AXB4BRILCFC4,5044
82
82
  LoopStructural/modelling/features/_unconformity_feature.py,sha256=2Bx0BI38YLdcNvDWuP9E1pKFN4orEUq9aC8b5xG1UVk,2362
83
83
  LoopStructural/modelling/features/builders/__init__.py,sha256=Gqld1C-PcaXfJ8vpkWMDCmehmd3hZNYQk1knPtl59Bk,266
84
84
  LoopStructural/modelling/features/builders/_base_builder.py,sha256=N3txGC98V08A8-k2TLdoIWgWLfblZ91kaTvciPq_QVM,3750
85
- LoopStructural/modelling/features/builders/_fault_builder.py,sha256=3rNfRmMcMCrXMyc_hjltU30wTh041NWD6rShNVI2lCE,25999
85
+ LoopStructural/modelling/features/builders/_fault_builder.py,sha256=CeQnvgDrgMIbyPV6nB0qnpY5PJG1OYTJIukRXv4df1E,25324
86
86
  LoopStructural/modelling/features/builders/_folded_feature_builder.py,sha256=1_0BVTzcvmFl6K3_lX-jF0tiMFPmS8j6vPeSLn9MbrE,6607
87
- LoopStructural/modelling/features/builders/_geological_feature_builder.py,sha256=jn2BiZlzXyWl0_TrsajpFR2wegGOpbuO5yFu2FamuYA,22014
87
+ LoopStructural/modelling/features/builders/_geological_feature_builder.py,sha256=kWj8zjyWBpYoPU9Xih95UWzh0ta_oKga0fO45KnUeR0,22014
88
88
  LoopStructural/modelling/features/builders/_structural_frame_builder.py,sha256=ms3-fuFpDEarjzYU5W499TquOIlTwHPUibVxIypfmWY,8019
89
89
  LoopStructural/modelling/features/fault/__init__.py,sha256=4u0KfYzmoO-ddFGo9qd9ov0gBoLqBiPAUsaw5zhEOAQ,189
90
- LoopStructural/modelling/features/fault/_fault_function.py,sha256=Zr54uScKshNBalOFC2cZc1Lxh3dldTPZA-zOdMC0HkM,12578
90
+ LoopStructural/modelling/features/fault/_fault_function.py,sha256=QEPh2jIvgD68hEJc5SM5xuMzZw-93V1me1ZbK9G2TB0,12655
91
91
  LoopStructural/modelling/features/fault/_fault_function_feature.py,sha256=4m0jVNx7ewrVI0pECI1wNciv8Cy8FzhZrYDjKJ_e2GU,2558
92
- LoopStructural/modelling/features/fault/_fault_segment.py,sha256=f3zohJsQ3h5KnlSPYL4eulRZN_Wl6e7IeVASfXE7lWA,18275
92
+ LoopStructural/modelling/features/fault/_fault_segment.py,sha256=dNTCY0ZyC8krrL1suSnhywSE_i5V_VZ4DJ2BieirkhI,18305
93
93
  LoopStructural/modelling/features/fold/__init__.py,sha256=pOv20yQvshZozvmO_YFw2E7Prp9DExlm855N-0SnxbQ,175
94
94
  LoopStructural/modelling/features/fold/_fold.py,sha256=bPnnLUSiF4uoMRg8aHoOSTPRgaM0JyLoRQPu5_A-J3w,5448
95
95
  LoopStructural/modelling/features/fold/_fold_rotation_angle_feature.py,sha256=CXLbFRQ3CrTMAcHmfdbKcmSvvLs9_6TLe0Wqi1pK2tg,892
@@ -112,25 +112,25 @@ LoopStructural/modelling/intrusions/intrusion_builder.py,sha256=1cJjPyRUf3ZDkpwg
112
112
  LoopStructural/modelling/intrusions/intrusion_feature.py,sha256=ESjtikHFJQzUnowbYiY7UZ_kYdV2QHobQoRJ2far9Vc,15489
113
113
  LoopStructural/modelling/intrusions/intrusion_frame_builder.py,sha256=Q1TPHxREcrO7Rw71nUfACZHfYnISLjqlgkUNTPT324k,40143
114
114
  LoopStructural/modelling/intrusions/intrusion_support_functions.py,sha256=wodakheMD62WJyoKnyX8UO-C1pje0I-5kHQEoDqShzo,13951
115
- LoopStructural/utils/__init__.py,sha256=OJqNSu40SYJeC26IhoBBXDqQOogWjMGA-YokKVRrwMs,924
116
- LoopStructural/utils/_surface.py,sha256=vTVIkhjYC85CPyirtA4aMPi295fpo40QHzYOqeUS354,6140
115
+ LoopStructural/utils/__init__.py,sha256=t-vJQ0cF2DrjSRtAfuPEL4hc73XJyQno7PucBnd-fu8,950
116
+ LoopStructural/utils/_surface.py,sha256=Eg7x1GGfELl7bPe21_wU96Dn4JWJNReEFxwq-aIV4A4,6165
117
117
  LoopStructural/utils/_transformation.py,sha256=peuLPH3BJ5DxnPbOuNKcqK4eXhAXdbT540L1OIsO3v0,5404
118
118
  LoopStructural/utils/colours.py,sha256=-KRf1MXKx4L8TXnwyiunmKAX4tfy0qG68fRadyfn_bM,1163
119
119
  LoopStructural/utils/config.py,sha256=ITGOtZTo2_QBwXkG_0AFANfE90J9siCXLzxypVmg9QA,414
120
120
  LoopStructural/utils/dtm_creator.py,sha256=-yqGG0wyEJfTCCDghz058wull1q3zGFASjeu8oDgYnk,535
121
121
  LoopStructural/utils/exceptions.py,sha256=SJboJ7ncMqVX-ib7MMizClwMrFZRHQhjZr2eCnVwnQE,500
122
122
  LoopStructural/utils/features.py,sha256=WCatS4lYBrURNvWvWwhOsDVUod9KIPNq3x0OHPbWctU,241
123
- LoopStructural/utils/helper.py,sha256=An9NuRH16cASUWq2ZakHc1tZt_AvUpgx8tv4cyWZEQk,6581
123
+ LoopStructural/utils/helper.py,sha256=2yt_kdQIhRfik2zSAu4UhPigAM7eif1OB093ddAYtXQ,5857
124
124
  LoopStructural/utils/json_encoder.py,sha256=5YNouf1TlhjEqOYgthd07MRXc0JLgxern-nyKSZ__ws,403
125
125
  LoopStructural/utils/linalg.py,sha256=tBXyu6NXcG2AcPuzUMnkVI4ncZWtE_MPHGj2PLXRwfY,123
126
126
  LoopStructural/utils/logging.py,sha256=dIUWEsS2lT4G1dsf4ZYXknTR7eQkrgvGA4b_E0vMIRU,2402
127
- LoopStructural/utils/maths.py,sha256=8iqdQdB2-bf14SzIzfFxvjWbzmPknqK9DI7CWEcW6XU,8402
128
- LoopStructural/utils/regions.py,sha256=LvcOCPudF4u95-GKBOZqXVxOEcR3cOFgFpcs5x43sMk,3914
127
+ LoopStructural/utils/maths.py,sha256=KaLj9RHsxdaSkEHm4t0JEzykhiuETAV14KpjL6lknWY,10374
128
+ LoopStructural/utils/regions.py,sha256=SjCC40GI7_n03G4mlcmvyrBgJFbxnvB3leBzXWco37o,3891
129
129
  LoopStructural/utils/typing.py,sha256=29uVSTZdzXXH-jdlaYyBWZ1gQ2-nlZ2-XoVgG_PXNFY,157
130
130
  LoopStructural/utils/utils.py,sha256=2Z4zVE6G752-SPmM29zebk82bROJxEwi_YiiJjcVED4,2438
131
131
  LoopStructural/visualisation/__init__.py,sha256=5BDgKor8-ae6DrS7IZybJ3Wq_pTnCchxuY4EgzA7v1M,318
132
- loopstructural-1.6.12.dist-info/licenses/LICENSE,sha256=ZqGeNFOgmYevj7Ld7Q-kR4lAxWXuBRUdUmPC6XM_py8,1071
133
- loopstructural-1.6.12.dist-info/METADATA,sha256=6zcpH_9vavQAKqe0iOdCrDLy4eFLdFy8m9kK6xTunvs,6454
134
- loopstructural-1.6.12.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
135
- loopstructural-1.6.12.dist-info/top_level.txt,sha256=QtQErKzYHfg6ddxTQ1NyaTxXBVM6qAqrM_vxEPyXZLg,15
136
- loopstructural-1.6.12.dist-info/RECORD,,
132
+ loopstructural-1.6.14.dist-info/licenses/LICENSE,sha256=ZqGeNFOgmYevj7Ld7Q-kR4lAxWXuBRUdUmPC6XM_py8,1071
133
+ loopstructural-1.6.14.dist-info/METADATA,sha256=UVSv5pVjzLwcs6Wy5_0FSnis03MWuh9GseEH9dH4X44,6454
134
+ loopstructural-1.6.14.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
135
+ loopstructural-1.6.14.dist-info/top_level.txt,sha256=QtQErKzYHfg6ddxTQ1NyaTxXBVM6qAqrM_vxEPyXZLg,15
136
+ loopstructural-1.6.14.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (78.1.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5