xtgeo 4.14.1__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. cxtgeo.py +558 -0
  2. cxtgeoPYTHON_wrap.c +19537 -0
  3. xtgeo/__init__.py +248 -0
  4. xtgeo/_cxtgeo.cp313-win_amd64.pyd +0 -0
  5. xtgeo/_internal.cp313-win_amd64.pyd +0 -0
  6. xtgeo/common/__init__.py +19 -0
  7. xtgeo/common/_angles.py +29 -0
  8. xtgeo/common/_xyz_enum.py +50 -0
  9. xtgeo/common/calc.py +396 -0
  10. xtgeo/common/constants.py +30 -0
  11. xtgeo/common/exceptions.py +42 -0
  12. xtgeo/common/log.py +93 -0
  13. xtgeo/common/sys.py +166 -0
  14. xtgeo/common/types.py +18 -0
  15. xtgeo/common/version.py +34 -0
  16. xtgeo/common/xtgeo_dialog.py +604 -0
  17. xtgeo/cube/__init__.py +9 -0
  18. xtgeo/cube/_cube_export.py +214 -0
  19. xtgeo/cube/_cube_import.py +532 -0
  20. xtgeo/cube/_cube_roxapi.py +180 -0
  21. xtgeo/cube/_cube_utils.py +287 -0
  22. xtgeo/cube/_cube_window_attributes.py +273 -0
  23. xtgeo/cube/cube1.py +1023 -0
  24. xtgeo/grid3d/__init__.py +15 -0
  25. xtgeo/grid3d/_ecl_grid.py +778 -0
  26. xtgeo/grid3d/_ecl_inte_head.py +152 -0
  27. xtgeo/grid3d/_ecl_logi_head.py +71 -0
  28. xtgeo/grid3d/_ecl_output_file.py +81 -0
  29. xtgeo/grid3d/_egrid.py +1004 -0
  30. xtgeo/grid3d/_find_gridprop_in_eclrun.py +625 -0
  31. xtgeo/grid3d/_grdecl_format.py +309 -0
  32. xtgeo/grid3d/_grdecl_grid.py +400 -0
  33. xtgeo/grid3d/_grid3d.py +29 -0
  34. xtgeo/grid3d/_grid3d_fence.py +284 -0
  35. xtgeo/grid3d/_grid3d_utils.py +228 -0
  36. xtgeo/grid3d/_grid_boundary.py +76 -0
  37. xtgeo/grid3d/_grid_etc1.py +1683 -0
  38. xtgeo/grid3d/_grid_export.py +222 -0
  39. xtgeo/grid3d/_grid_hybrid.py +50 -0
  40. xtgeo/grid3d/_grid_import.py +79 -0
  41. xtgeo/grid3d/_grid_import_ecl.py +101 -0
  42. xtgeo/grid3d/_grid_import_roff.py +135 -0
  43. xtgeo/grid3d/_grid_import_xtgcpgeom.py +375 -0
  44. xtgeo/grid3d/_grid_refine.py +258 -0
  45. xtgeo/grid3d/_grid_roxapi.py +292 -0
  46. xtgeo/grid3d/_grid_translate_coords.py +154 -0
  47. xtgeo/grid3d/_grid_wellzone.py +165 -0
  48. xtgeo/grid3d/_gridprop_export.py +202 -0
  49. xtgeo/grid3d/_gridprop_import_eclrun.py +164 -0
  50. xtgeo/grid3d/_gridprop_import_grdecl.py +132 -0
  51. xtgeo/grid3d/_gridprop_import_roff.py +52 -0
  52. xtgeo/grid3d/_gridprop_import_xtgcpprop.py +168 -0
  53. xtgeo/grid3d/_gridprop_lowlevel.py +171 -0
  54. xtgeo/grid3d/_gridprop_op1.py +272 -0
  55. xtgeo/grid3d/_gridprop_roxapi.py +301 -0
  56. xtgeo/grid3d/_gridprop_value_init.py +140 -0
  57. xtgeo/grid3d/_gridprops_import_eclrun.py +344 -0
  58. xtgeo/grid3d/_gridprops_import_roff.py +83 -0
  59. xtgeo/grid3d/_roff_grid.py +470 -0
  60. xtgeo/grid3d/_roff_parameter.py +303 -0
  61. xtgeo/grid3d/grid.py +3010 -0
  62. xtgeo/grid3d/grid_properties.py +699 -0
  63. xtgeo/grid3d/grid_property.py +1313 -0
  64. xtgeo/grid3d/types.py +15 -0
  65. xtgeo/interfaces/rms/__init__.py +18 -0
  66. xtgeo/interfaces/rms/_regular_surface.py +460 -0
  67. xtgeo/interfaces/rms/_rms_base.py +100 -0
  68. xtgeo/interfaces/rms/_rmsapi_package.py +69 -0
  69. xtgeo/interfaces/rms/rmsapi_utils.py +438 -0
  70. xtgeo/io/__init__.py +1 -0
  71. xtgeo/io/_file.py +603 -0
  72. xtgeo/metadata/__init__.py +17 -0
  73. xtgeo/metadata/metadata.py +435 -0
  74. xtgeo/roxutils/__init__.py +7 -0
  75. xtgeo/roxutils/_roxar_loader.py +54 -0
  76. xtgeo/roxutils/_roxutils_etc.py +122 -0
  77. xtgeo/roxutils/roxutils.py +207 -0
  78. xtgeo/surface/__init__.py +20 -0
  79. xtgeo/surface/_regsurf_boundary.py +26 -0
  80. xtgeo/surface/_regsurf_cube.py +210 -0
  81. xtgeo/surface/_regsurf_cube_window.py +391 -0
  82. xtgeo/surface/_regsurf_cube_window_v2.py +297 -0
  83. xtgeo/surface/_regsurf_cube_window_v3.py +360 -0
  84. xtgeo/surface/_regsurf_export.py +388 -0
  85. xtgeo/surface/_regsurf_grid3d.py +275 -0
  86. xtgeo/surface/_regsurf_gridding.py +347 -0
  87. xtgeo/surface/_regsurf_ijxyz_parser.py +278 -0
  88. xtgeo/surface/_regsurf_import.py +347 -0
  89. xtgeo/surface/_regsurf_lowlevel.py +122 -0
  90. xtgeo/surface/_regsurf_oper.py +538 -0
  91. xtgeo/surface/_regsurf_utils.py +81 -0
  92. xtgeo/surface/_surfs_import.py +43 -0
  93. xtgeo/surface/_zmap_parser.py +138 -0
  94. xtgeo/surface/regular_surface.py +3043 -0
  95. xtgeo/surface/surfaces.py +276 -0
  96. xtgeo/well/__init__.py +24 -0
  97. xtgeo/well/_blockedwell_roxapi.py +241 -0
  98. xtgeo/well/_blockedwells_roxapi.py +68 -0
  99. xtgeo/well/_well_aux.py +30 -0
  100. xtgeo/well/_well_io.py +327 -0
  101. xtgeo/well/_well_oper.py +483 -0
  102. xtgeo/well/_well_roxapi.py +304 -0
  103. xtgeo/well/_wellmarkers.py +486 -0
  104. xtgeo/well/_wells_utils.py +158 -0
  105. xtgeo/well/blocked_well.py +220 -0
  106. xtgeo/well/blocked_wells.py +134 -0
  107. xtgeo/well/well1.py +1516 -0
  108. xtgeo/well/wells.py +211 -0
  109. xtgeo/xyz/__init__.py +6 -0
  110. xtgeo/xyz/_polygons_oper.py +272 -0
  111. xtgeo/xyz/_xyz.py +758 -0
  112. xtgeo/xyz/_xyz_data.py +646 -0
  113. xtgeo/xyz/_xyz_io.py +737 -0
  114. xtgeo/xyz/_xyz_lowlevel.py +42 -0
  115. xtgeo/xyz/_xyz_oper.py +613 -0
  116. xtgeo/xyz/_xyz_roxapi.py +766 -0
  117. xtgeo/xyz/points.py +698 -0
  118. xtgeo/xyz/polygons.py +827 -0
  119. xtgeo-4.14.1.dist-info/METADATA +146 -0
  120. xtgeo-4.14.1.dist-info/RECORD +122 -0
  121. xtgeo-4.14.1.dist-info/WHEEL +5 -0
  122. xtgeo-4.14.1.dist-info/licenses/LICENSE.md +165 -0
@@ -0,0 +1,42 @@
1
+ """Private low level routines (SWIG vs C)"""
2
+
3
+ import numpy as np
4
+
5
+ from xtgeo import _cxtgeo
6
+ from xtgeo.common.log import null_logger
7
+
8
+ logger = null_logger(__name__)
9
+
10
+
11
+ def convert_np_carr_int(xyz, np_array): # pragma: no cover
12
+ """Convert numpy 1D array to C array, assuming int type."""
13
+
14
+ # The numpy is always a double (float64), so need to convert first
15
+ # xyz is the general object
16
+
17
+ carr = _cxtgeo.new_intarray(xyz.nrow)
18
+
19
+ np_array = np_array.astype(np.int32)
20
+
21
+ _cxtgeo.swig_numpy_to_carr_i1d(np_array, carr)
22
+
23
+ return carr
24
+
25
+
26
+ def convert_np_carr_double(xyz, np_array): # pragma: no cover
27
+ """Convert numpy 1D array to C array, assuming double type."""
28
+
29
+ carr = _cxtgeo.new_doublearray(xyz.nrow)
30
+
31
+ _cxtgeo.swig_numpy_to_carr_1d(np_array, carr)
32
+
33
+ return carr
34
+
35
+
36
+ def convert_carr_double_np(xyz, carray, nlen=None): # pragma: no cover
37
+ """Convert a C array to numpy, assuming double type."""
38
+
39
+ if nlen is None:
40
+ nlen = len(xyz._df.index)
41
+
42
+ return _cxtgeo.swig_carr_to_numpy_1d(nlen, carray)
xtgeo/xyz/_xyz_oper.py ADDED
@@ -0,0 +1,613 @@
1
+ # coding: utf-8
2
+ """Various operations on XYZ data"""
3
+
4
+ import numpy as np
5
+ import pandas as pd
6
+ import shapely.geometry as sg
7
+ from scipy.interpolate import UnivariateSpline
8
+
9
+ import xtgeo
10
+ from xtgeo import _cxtgeo
11
+ from xtgeo.common.constants import UNDEF_LIMIT
12
+ from xtgeo.common.log import null_logger
13
+ from xtgeo.common.xtgeo_dialog import XTGeoDialog
14
+
15
+ xtg = XTGeoDialog()
16
+ logger = null_logger(__name__)
17
+
18
+
19
+ def mark_in_polygons_mpl(self, poly, name, inside_value, outside_value):
20
+ """Make column to mark if XYZ df is inside/outside polygons, using matplotlib."""
21
+ points = np.array(
22
+ [
23
+ self.get_dataframe(copy=False)[self.xname].values,
24
+ self.get_dataframe(copy=False)[self.yname].values,
25
+ ]
26
+ ).T
27
+
28
+ if name in self.protected_columns():
29
+ raise ValueError("The proposed name: {name}, is protected and cannot be used")
30
+
31
+ # allow a single Polygons instance or a list of Polygons instances
32
+ if isinstance(poly, xtgeo.Polygons):
33
+ usepolys = [poly]
34
+ elif isinstance(poly, list) and all(
35
+ isinstance(pol, xtgeo.Polygons) for pol in poly
36
+ ):
37
+ usepolys = poly
38
+ else:
39
+ raise ValueError("The poly values is not a Polygons or a list of Polygons")
40
+
41
+ dataframe = self.get_dataframe()
42
+ dataframe[name] = outside_value
43
+
44
+ import matplotlib.path as mplpath
45
+
46
+ for pol in usepolys:
47
+ idgroups = pol.get_dataframe().groupby(pol.pname)
48
+ for _, grp in idgroups:
49
+ singlepoly = np.array([grp[pol.xname].values, grp[pol.yname].values]).T
50
+ poly_path = mplpath.Path(singlepoly)
51
+ is_inside = poly_path.contains_points(points)
52
+ dataframe.loc[is_inside, name] = inside_value
53
+
54
+ self.set_dataframe(dataframe)
55
+
56
+
57
+ def operation_polygons_v1(self, poly, value, opname="add", inside=True, where=True):
58
+ """
59
+ Operations re restricted to closed polygons, for points or polyline points.
60
+
61
+ If value is not float but 'poly', then the avg of each polygon Z value will
62
+ be used instead.
63
+
64
+ 'Inside' several polygons will become a union, while 'outside' polygons
65
+ will be the intersection.
66
+
67
+ The "where" filter is reserved for future use.
68
+
69
+ This is now the legacy version, which will be deprecated later.
70
+ """
71
+ logger.warning("Where is not implemented: %s", where)
72
+
73
+ oper = {"set": 1, "add": 2, "sub": 3, "mul": 4, "div": 5, "eli": 11}
74
+
75
+ insidevalue = 0
76
+ if inside:
77
+ insidevalue = 1
78
+
79
+ logger.info("Operations of points inside polygon(s)...")
80
+ if not isinstance(poly, xtgeo.xyz.Polygons):
81
+ raise ValueError("The poly input is not a single Polygons instance")
82
+
83
+ idgroups = poly.get_dataframe(copy=False).groupby(poly.pname)
84
+
85
+ xcor = self.get_dataframe(copy=False)[self.xname].values
86
+ ycor = self.get_dataframe(copy=False)[self.yname].values
87
+ zcor = self.get_dataframe(copy=False)[self.zname].values
88
+
89
+ usepoly = False
90
+ if isinstance(value, str) and value == "poly":
91
+ usepoly = True
92
+
93
+ for id_, grp in idgroups:
94
+ pxcor = grp[poly.xname].values
95
+ pycor = grp[poly.yname].values
96
+ pvalue = value
97
+ pvalue = grp[poly.zname].values.mean() if usepoly else value
98
+
99
+ logger.info("C function for polygon %s...", id_)
100
+
101
+ ies = _cxtgeo.pol_do_points_inside(
102
+ xcor, ycor, zcor, pxcor, pycor, pvalue, oper[opname], insidevalue
103
+ )
104
+ logger.info("C function for polygon %s... done", id_)
105
+
106
+ if ies != 0:
107
+ raise RuntimeError(f"Something went wrong, code {ies}")
108
+
109
+ zcor[zcor > UNDEF_LIMIT] = np.nan
110
+ dataframe = self.get_dataframe()
111
+ dataframe[self.zname] = zcor
112
+ # removing rows where Z column is undefined
113
+ dataframe.dropna(how="any", subset=[self.zname], inplace=True)
114
+ dataframe.reset_index(inplace=True, drop=True)
115
+ self.set_dataframe(dataframe)
116
+ logger.info("Operations of points inside polygon(s)... done")
117
+
118
+
119
+ def operation_polygons_v2(self, poly, value, opname="add", inside=True, where=True):
120
+ """
121
+ Operations re restricted to closed polygons, for points or polyline points.
122
+
123
+ This (from v1) is NOT implemented here: "If value is not float but 'poly', then the
124
+ avg of each polygon Z value will be used instead."
125
+
126
+ NOTE! Both 'inside' and "outside" several polygons will become a union. Hence v2
127
+ it will TREAT OUTSIDE POINTS DIFFERENT than v1!
128
+
129
+ The "where" filter is reserved for future use.
130
+
131
+ The version v2 uses mark_in_polygons_mpl(), and should be much faster.
132
+ """
133
+ logger.info("Operations of points inside polygon(s), version 2")
134
+
135
+ allowed_opname = ("set", "add", "sub", "mul", "div", "eli")
136
+
137
+ logger.warning("Where is not implemented: %s", where)
138
+ if not isinstance(poly, xtgeo.xyz.Polygons):
139
+ raise ValueError("The poly input is not a Polygons instance")
140
+
141
+ tmp = self.copy()
142
+ ivalue = 1 if inside else 0
143
+ ovalue = 0 if inside else 1
144
+ mark_in_polygons_mpl(tmp, poly, "_TMP", inside_value=ivalue, outside_value=ovalue)
145
+ tmpdf = tmp.get_dataframe()
146
+
147
+ dataframe = self.get_dataframe()
148
+
149
+ if opname == "add":
150
+ dataframe.loc[tmpdf._TMP == 1, self.zname] += value
151
+ elif opname == "sub":
152
+ dataframe.loc[tmpdf._TMP == 1, self.zname] -= value
153
+ elif opname == "mul":
154
+ dataframe.loc[tmpdf._TMP == 1, self.zname] *= value
155
+ elif opname == "div":
156
+ if value != 0.0:
157
+ dataframe.loc[tmpdf._TMP == 1, self.zname] /= value
158
+ else:
159
+ dataframe.loc[tmpdf._TMP == 1, self.zname] = 0.0
160
+ elif opname == "set":
161
+ dataframe.loc[tmpdf._TMP == 1, self.zname] = value
162
+ elif opname == "eli":
163
+ dataframe = dataframe[tmpdf._TMP == 0]
164
+ dataframe.reset_index(inplace=True, drop=True)
165
+ else:
166
+ raise KeyError(f"The opname={opname} is not one of {allowed_opname}")
167
+
168
+ self.set_dataframe(dataframe)
169
+
170
+ logger.info("Operations of points inside polygon(s)... done")
171
+
172
+
173
+ def rescale_polygons(self, distance=10, addlen=False, kind="simple", mode2d=False):
174
+ """Rescale (resample) a polygons segment
175
+ Default settings will make it backwards compatible with 2.0
176
+ New options were added in 2.1:
177
+ * addlen
178
+ * kind
179
+ * mode2d
180
+ """
181
+
182
+ kind = "simple" if kind is None else kind
183
+
184
+ kinds_allowed = ("simple", "slinear", "cubic")
185
+ if kind not in kinds_allowed:
186
+ raise ValueError(
187
+ f"The input 'kind' {kind} is not valid. Allowed: {kinds_allowed}"
188
+ )
189
+
190
+ if kind in ("slinear", "cubic"):
191
+ _rescale_v2(self, distance, addlen, kind=kind, mode2d=mode2d)
192
+
193
+ else:
194
+ _rescale_v1(self, distance, addlen, mode2d=mode2d)
195
+
196
+
197
+ def _rescale_v1(self, distance, addlen, mode2d):
198
+ # version 1, simple approach, will rescale in 2D since Shapely use 2D lengths
199
+ if not mode2d:
200
+ raise KeyError("Cannot combine 'simple' with mode2d False")
201
+
202
+ idgroups = self.get_dataframe(copy=False).groupby(self.pname)
203
+
204
+ dfrlist = []
205
+ for idx, grp in idgroups:
206
+ if len(grp.index) < 2:
207
+ logger.warning("Cannot rescale polygons with less than two points. Skip")
208
+ continue
209
+
210
+ pxcor = grp[self.xname].values
211
+ pycor = grp[self.yname].values
212
+ pzcor = grp[self.zname].values
213
+ spoly = sg.LineString(np.stack([pxcor, pycor, pzcor], axis=1))
214
+
215
+ new_spoly = _redistribute_vertices(spoly, distance)
216
+ dfr = pd.DataFrame(
217
+ np.array(new_spoly.coords), columns=[self.xname, self.yname, self.zname]
218
+ )
219
+
220
+ dfr[self.pname] = idx
221
+ dfrlist.append(dfr)
222
+
223
+ dfr = pd.concat(dfrlist)
224
+ self.set_dataframe(dfr.reset_index(drop=True))
225
+
226
+ if addlen:
227
+ self.hlen()
228
+ self.tlen()
229
+
230
+
231
+ def _redistribute_vertices(geom, distance):
232
+ """Local function to interpolate in a polyline using Shapely"""
233
+ if geom.geom_type == "LineString":
234
+ num_vert = int(round(geom.length / distance))
235
+ if num_vert == 0:
236
+ num_vert = 1
237
+ return sg.LineString(
238
+ [
239
+ geom.interpolate(float(n) / num_vert, normalized=True)
240
+ for n in range(num_vert + 1)
241
+ ]
242
+ )
243
+
244
+ if geom.geom_type == "MultiLineString":
245
+ parts = [_redistribute_vertices(part, distance) for part in geom]
246
+ return type(geom)([p for p in parts if not p.is_empty])
247
+
248
+ raise ValueError(f"Unhandled geometry {geom.geom_type}")
249
+
250
+
251
+ def _handle_vertical_input(self, inframe):
252
+ """Treat the special vertical case.
253
+
254
+ A special case occurs for e.g. a 100% vertical well. Here the trick is distort
255
+ first and last row with a relative small number so that numerical problems are
256
+ avoided.
257
+ """
258
+
259
+ result = inframe.copy().reset_index()
260
+
261
+ # e.g. tvd.mean is 2000, then tolerance will be 0.002, and edit will be 2
262
+ tolerance = self.get_dataframe(copy=False)[self.zname].mean() * 0.000001
263
+ edit = tolerance * 1000
264
+ pseudo = self.copy()
265
+
266
+ if inframe[self.dhname].max() < tolerance:
267
+ result.at[0, self.xname] -= edit
268
+ result.at[result.index[-1], self.xname] += edit
269
+ pseudo.set_dataframe(result)
270
+ pseudo.hlen()
271
+ pseudo.tlen()
272
+ result = pseudo.get_dataframe()
273
+
274
+ return result
275
+
276
+
277
+ def _rescale_v2(self, distance, addlen, kind="slinear", mode2d=True):
278
+ # Rescaling to constant increment is perhaps impossible, but this is
279
+ # perhaps quite close
280
+
281
+ self.hlen()
282
+ self.tlen()
283
+
284
+ idgroups = self.get_dataframe(copy=False).groupby(self.pname)
285
+
286
+ dfrlist = []
287
+ for idx, grp in idgroups:
288
+ grp = _handle_vertical_input(self, grp)
289
+
290
+ # avoid duplicates when *_DELTALEN are 0.0 (makes scipy interp1d() fail
291
+ # when scipy >= 1.9)
292
+ grp = grp.drop(grp[(grp[self.dhname] == 0.0) | (grp[self.dtname] == 0.0)].index)
293
+
294
+ if len(grp.index) < 2:
295
+ logger.warning("Cannot rescale polygons with less than two points. Skip")
296
+ continue
297
+
298
+ points = [grp[self.xname], grp[self.yname], grp[self.zname]]
299
+
300
+ leng = grp[self.hname].iloc[-1]
301
+ gname = self.hname
302
+
303
+ if not mode2d:
304
+ leng = grp[self.tname].iloc[-1]
305
+ gname = self.tname
306
+
307
+ # to avoid numerical trouble of pure vertical sections
308
+ leng = leng - 0.001 * leng
309
+
310
+ nstep = int(leng / distance)
311
+ alpha = np.linspace(0, leng, num=nstep, endpoint=True)
312
+
313
+ if kind == "slinear":
314
+ splines = [UnivariateSpline(grp[gname], crd, k=1, s=0) for crd in points]
315
+ elif kind == "cubic":
316
+ splines = [UnivariateSpline(grp[gname], crd) for crd in points]
317
+
318
+ else:
319
+ raise ValueError(f"Invalid kind chosen: {kind}")
320
+
321
+ ip = np.vstack([spl(alpha) for spl in splines]).T
322
+
323
+ dfr = pd.DataFrame(np.array(ip), columns=[self.xname, self.yname, self.zname])
324
+
325
+ dfr[self.pname] = idx
326
+ dfrlist.append(dfr)
327
+
328
+ dfr = pd.concat(dfrlist)
329
+ self.set_dataframe(dfr.reset_index(drop=True))
330
+
331
+ if addlen:
332
+ self.tlen()
333
+ self.hlen()
334
+ else:
335
+ self.delete_columns([self.hname, self.dhname, self.tname, self.dtname])
336
+
337
+
338
+ def get_fence(
339
+ self, distance=20, atleast=5, nextend=2, name=None, asnumpy=True, polyid=None
340
+ ):
341
+ """Get a fence suitable for plotting xsections, either as a numpy or as a
342
+ new Polygons instance.
343
+
344
+ The atleast parameter will win over the distance, meaning that if total length
345
+ horizontally is 50, and distance is set to 20, the actual length will be 50/5=10
346
+ In such cases, nextend will be modified automatically also to fulfill the original
347
+ intention of nextend*distance (approx).
348
+
349
+ The routine is still not perfect for "close to very vertical polygon"
350
+ but assumed to be sufficient for all practical cases
351
+
352
+ """
353
+ if atleast < 3:
354
+ raise ValueError("The atleast key must be 3 or greater")
355
+
356
+ orig_extend = nextend * distance
357
+ orig_distance = distance
358
+
359
+ fence = self.copy()
360
+
361
+ fence.hlen()
362
+
363
+ if len(fence.get_dataframe(copy=False)) < 2:
364
+ xtg.warn(f"Too few points in polygons for fence, return False (name: {name})")
365
+ return False
366
+
367
+ fence.filter_byid(polyid)
368
+
369
+ hxlen = fence.get_shapely_objects()[0].length
370
+
371
+ # perhaps a way to treat very vertical polys from e.g. wells:
372
+ if hxlen < 0.1 * orig_distance:
373
+ hxlen = 0.1 * orig_distance
374
+
375
+ if hxlen / (atleast + 1) < orig_distance:
376
+ distance = hxlen / (atleast + 1)
377
+
378
+ fence_keep = fence.copy()
379
+ fence.rescale(distance, kind="slinear", mode2d=True)
380
+
381
+ if len(fence.get_dataframe(copy=False)) < 2:
382
+ fence = fence_keep
383
+
384
+ fence.hlen()
385
+ updated_distance = fence.get_dataframe(copy=False)[fence.dhname].median()
386
+
387
+ if updated_distance < 0.5 * distance:
388
+ updated_distance = 0.5 * distance
389
+
390
+ newnextend = int(round(orig_extend / updated_distance))
391
+ fence.extend(updated_distance, nsamples=newnextend)
392
+
393
+ df = fence.get_dataframe()
394
+ df0 = df.drop(df.index[1:]) # keep always first which has per def H_DELTALEN=0
395
+ df2 = df[updated_distance * 0.01 < df.H_DELTALEN] # skip very close points
396
+ df = pd.concat([df0, df2], axis=0, ignore_index=True)
397
+
398
+ # duplicates may still exist; skip those
399
+ df.drop_duplicates(subset=[fence.xname, fence.yname], keep="first", inplace=True)
400
+ df.reset_index(inplace=True, drop=True)
401
+ fence.set_dataframe(df)
402
+
403
+ if name:
404
+ fence.name = name
405
+
406
+ if asnumpy is True:
407
+ rval = np.concatenate(
408
+ (
409
+ df[fence.xname].values,
410
+ df[fence.yname].values,
411
+ df[fence.zname].values,
412
+ df[fence.hname].values,
413
+ df[fence.dhname].values,
414
+ ),
415
+ axis=0,
416
+ )
417
+ return np.reshape(rval, (fence.nrow, 5), order="F")
418
+
419
+ return fence
420
+
421
+
422
+ def snap_surface(self, surf, activeonly=True):
423
+ """Snap (or transfer) operation.
424
+
425
+ Points that falls outside the surface will be UNDEF, and they will be removed
426
+ if activeonly. Otherwise, the old values will be kept.
427
+ """
428
+
429
+ if not isinstance(surf, xtgeo.RegularSurface):
430
+ raise ValueError("Input object of wrong data type, must be RegularSurface")
431
+
432
+ dataframe = self.get_dataframe()
433
+ zval = dataframe[self.zname].values
434
+
435
+ ier = _cxtgeo.surf_get_zv_from_xyv(
436
+ dataframe[self.xname].values,
437
+ dataframe[self.yname].values,
438
+ zval,
439
+ surf.ncol,
440
+ surf.nrow,
441
+ surf.xori,
442
+ surf.yori,
443
+ surf.xinc,
444
+ surf.yinc,
445
+ surf.yflip,
446
+ surf.rotation,
447
+ surf.get_values1d(),
448
+ 0,
449
+ )
450
+
451
+ if ier != 0:
452
+ raise RuntimeError(f"Error code from C routine surf_get_zv_from_xyv is {ier}")
453
+ if activeonly:
454
+ dataframe[self.zname] = zval
455
+ dataframe = dataframe[dataframe[self.zname] < xtgeo.UNDEF_LIMIT]
456
+ dataframe.reset_index(inplace=True, drop=True)
457
+ else:
458
+ out = np.where(
459
+ zval < xtgeo.UNDEF_LIMIT, zval, self.get_dataframe()[self.zname].values
460
+ )
461
+ dataframe[self.zname] = out
462
+
463
+ self.set_dataframe(dataframe)
464
+
465
+
466
+ def hlen(self, hname="H_CUMLEN", dhname="H_DELTALEN", atindex=0):
467
+ """Get the horizontal distance (cumulative and delta) between points in polygons."""
468
+
469
+ _generic_length(self, gname=hname, dgname=dhname, atindex=atindex, mode2d=True)
470
+
471
+
472
+ def tlen(self, tname="T_CUMLEN", dtname="T_DELTALEN", atindex=0):
473
+ """Get the true 3D distance (cumulative and delta) between points in polygons."""
474
+
475
+ _generic_length(self, gname=tname, dgname=dtname, atindex=atindex, mode2d=False)
476
+
477
+
478
+ def _generic_length(
479
+ self, gname="G_CUMLEN", dgname="G_DELTALEN", atindex=0, mode2d=True
480
+ ):
481
+ """Get the true or horizontal distance (cum/delta) between points in polygons.
482
+
483
+ The properties gname and ghname will be updated.
484
+
485
+ Note that Dxx at first location will be set equal to that of location 1
486
+ """
487
+
488
+ # Potential todo: Add an option that dH never gets 0.0 to avoid numerical trouble
489
+ # for e.g. rescale?
490
+
491
+ if not isinstance(self, xtgeo.Polygons):
492
+ raise ValueError("Input object of wrong data type, must be Polygons")
493
+
494
+ # delete existing self.hname and self.dhname columns
495
+ self.delete_columns([gname, dgname])
496
+
497
+ idgroups = self.get_dataframe(copy=False).groupby(self.pname)
498
+
499
+ gdist = np.array([])
500
+ dgdist = np.array([])
501
+ for _id, grp in idgroups:
502
+ ier, tlenv, dtlenv, hlenv, dhlenv = _cxtgeo.pol_geometrics(
503
+ grp[self.xname].values.astype(np.float64),
504
+ grp[self.yname].values.astype(np.float64),
505
+ grp[self.zname].values.astype(np.float64),
506
+ len(grp),
507
+ len(grp),
508
+ len(grp),
509
+ len(grp),
510
+ )
511
+ if ier != 0:
512
+ raise RuntimeError(f"Error code from _cxtgeo.pol_geometrics is {ier}")
513
+
514
+ if mode2d:
515
+ dhlenv[0] = dhlenv[1]
516
+ if atindex > 0:
517
+ cumval = hlenv[atindex]
518
+ hlenv -= cumval
519
+
520
+ gdist = np.append(gdist, hlenv)
521
+ dgdist = np.append(dgdist, dhlenv)
522
+
523
+ else:
524
+ dtlenv[0] = dtlenv[1]
525
+ if atindex > 0:
526
+ cumval = tlenv[atindex]
527
+ tlenv -= cumval
528
+
529
+ gdist = np.append(gdist, tlenv)
530
+ dgdist = np.append(dgdist, dtlenv)
531
+
532
+ dataframe = self.get_dataframe()
533
+ dataframe[gname] = gdist
534
+ dataframe[dgname] = dgdist
535
+ self.set_dataframe(dataframe)
536
+
537
+ if mode2d:
538
+ self._hname = gname
539
+ self._dhname = dgname
540
+ else:
541
+ self._tname = gname
542
+ self._dtname = dgname
543
+
544
+
545
+ def extend(self, distance, nsamples, addhlen=True):
546
+ """Extend polygon by distance, nsamples times.
547
+
548
+ It is default to recompute HLEN from nsamples.
549
+ """
550
+
551
+ if not isinstance(self, xtgeo.Polygons):
552
+ raise ValueError("Input object of wrong data type, must be Polygons")
553
+
554
+ dataframe = self.get_dataframe()
555
+ for _ in range(nsamples):
556
+ # beginning of poly
557
+ row0 = dataframe.iloc[0]
558
+ row1 = dataframe.iloc[1]
559
+
560
+ rown = row0.copy()
561
+
562
+ # setting row0[2] as row1[2] is intentional, as this shall be a 2D lenght!
563
+ ier, newx, newy, _ = _cxtgeo.x_vector_linint2(
564
+ row1.iloc[0],
565
+ row1.iloc[1],
566
+ row1.iloc[2],
567
+ row0.iloc[0],
568
+ row0.iloc[1],
569
+ row1.iloc[2],
570
+ distance,
571
+ 12,
572
+ )
573
+
574
+ if ier != 0:
575
+ raise RuntimeError(f"Error code from _cxtgeo.x_vector_linint2 is {ier}")
576
+
577
+ rown[self.xname] = newx
578
+ rown[self.yname] = newy
579
+
580
+ df_to_add = rown.to_frame().T
581
+ df_to_add = df_to_add.astype(dataframe.dtypes.to_dict()) # ensure same dtypes
582
+
583
+ dataframe = pd.concat([df_to_add, dataframe]).reset_index(drop=True)
584
+
585
+ # end of poly
586
+ row0 = dataframe.iloc[-2]
587
+ row1 = dataframe.iloc[-1]
588
+
589
+ rown = row1.copy()
590
+
591
+ # setting row1[2] as row0[2] is intentional, as this shall be a 2D lenght!
592
+ ier, newx, newy, _ = _cxtgeo.x_vector_linint2(
593
+ row0.iloc[0],
594
+ row0.iloc[1],
595
+ row0.iloc[2],
596
+ row1.iloc[0],
597
+ row1.iloc[1],
598
+ row0.iloc[2],
599
+ distance,
600
+ 11,
601
+ )
602
+
603
+ rown[self.xname] = newx
604
+ rown[self.yname] = newy
605
+
606
+ df_to_add = rown.to_frame().T
607
+ df_to_add = df_to_add.astype(dataframe.dtypes.to_dict()) # ensure same dtypes
608
+ dataframe = pd.concat([dataframe, df_to_add]).reset_index(drop=True)
609
+
610
+ self.set_dataframe(dataframe)
611
+
612
+ if addhlen:
613
+ self.hlen(atindex=nsamples)