xtgeo 4.14.1__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. cxtgeo.py +558 -0
  2. cxtgeoPYTHON_wrap.c +19537 -0
  3. xtgeo/__init__.py +248 -0
  4. xtgeo/_cxtgeo.cp313-win_amd64.pyd +0 -0
  5. xtgeo/_internal.cp313-win_amd64.pyd +0 -0
  6. xtgeo/common/__init__.py +19 -0
  7. xtgeo/common/_angles.py +29 -0
  8. xtgeo/common/_xyz_enum.py +50 -0
  9. xtgeo/common/calc.py +396 -0
  10. xtgeo/common/constants.py +30 -0
  11. xtgeo/common/exceptions.py +42 -0
  12. xtgeo/common/log.py +93 -0
  13. xtgeo/common/sys.py +166 -0
  14. xtgeo/common/types.py +18 -0
  15. xtgeo/common/version.py +34 -0
  16. xtgeo/common/xtgeo_dialog.py +604 -0
  17. xtgeo/cube/__init__.py +9 -0
  18. xtgeo/cube/_cube_export.py +214 -0
  19. xtgeo/cube/_cube_import.py +532 -0
  20. xtgeo/cube/_cube_roxapi.py +180 -0
  21. xtgeo/cube/_cube_utils.py +287 -0
  22. xtgeo/cube/_cube_window_attributes.py +273 -0
  23. xtgeo/cube/cube1.py +1023 -0
  24. xtgeo/grid3d/__init__.py +15 -0
  25. xtgeo/grid3d/_ecl_grid.py +778 -0
  26. xtgeo/grid3d/_ecl_inte_head.py +152 -0
  27. xtgeo/grid3d/_ecl_logi_head.py +71 -0
  28. xtgeo/grid3d/_ecl_output_file.py +81 -0
  29. xtgeo/grid3d/_egrid.py +1004 -0
  30. xtgeo/grid3d/_find_gridprop_in_eclrun.py +625 -0
  31. xtgeo/grid3d/_grdecl_format.py +309 -0
  32. xtgeo/grid3d/_grdecl_grid.py +400 -0
  33. xtgeo/grid3d/_grid3d.py +29 -0
  34. xtgeo/grid3d/_grid3d_fence.py +284 -0
  35. xtgeo/grid3d/_grid3d_utils.py +228 -0
  36. xtgeo/grid3d/_grid_boundary.py +76 -0
  37. xtgeo/grid3d/_grid_etc1.py +1683 -0
  38. xtgeo/grid3d/_grid_export.py +222 -0
  39. xtgeo/grid3d/_grid_hybrid.py +50 -0
  40. xtgeo/grid3d/_grid_import.py +79 -0
  41. xtgeo/grid3d/_grid_import_ecl.py +101 -0
  42. xtgeo/grid3d/_grid_import_roff.py +135 -0
  43. xtgeo/grid3d/_grid_import_xtgcpgeom.py +375 -0
  44. xtgeo/grid3d/_grid_refine.py +258 -0
  45. xtgeo/grid3d/_grid_roxapi.py +292 -0
  46. xtgeo/grid3d/_grid_translate_coords.py +154 -0
  47. xtgeo/grid3d/_grid_wellzone.py +165 -0
  48. xtgeo/grid3d/_gridprop_export.py +202 -0
  49. xtgeo/grid3d/_gridprop_import_eclrun.py +164 -0
  50. xtgeo/grid3d/_gridprop_import_grdecl.py +132 -0
  51. xtgeo/grid3d/_gridprop_import_roff.py +52 -0
  52. xtgeo/grid3d/_gridprop_import_xtgcpprop.py +168 -0
  53. xtgeo/grid3d/_gridprop_lowlevel.py +171 -0
  54. xtgeo/grid3d/_gridprop_op1.py +272 -0
  55. xtgeo/grid3d/_gridprop_roxapi.py +301 -0
  56. xtgeo/grid3d/_gridprop_value_init.py +140 -0
  57. xtgeo/grid3d/_gridprops_import_eclrun.py +344 -0
  58. xtgeo/grid3d/_gridprops_import_roff.py +83 -0
  59. xtgeo/grid3d/_roff_grid.py +470 -0
  60. xtgeo/grid3d/_roff_parameter.py +303 -0
  61. xtgeo/grid3d/grid.py +3010 -0
  62. xtgeo/grid3d/grid_properties.py +699 -0
  63. xtgeo/grid3d/grid_property.py +1313 -0
  64. xtgeo/grid3d/types.py +15 -0
  65. xtgeo/interfaces/rms/__init__.py +18 -0
  66. xtgeo/interfaces/rms/_regular_surface.py +460 -0
  67. xtgeo/interfaces/rms/_rms_base.py +100 -0
  68. xtgeo/interfaces/rms/_rmsapi_package.py +69 -0
  69. xtgeo/interfaces/rms/rmsapi_utils.py +438 -0
  70. xtgeo/io/__init__.py +1 -0
  71. xtgeo/io/_file.py +603 -0
  72. xtgeo/metadata/__init__.py +17 -0
  73. xtgeo/metadata/metadata.py +435 -0
  74. xtgeo/roxutils/__init__.py +7 -0
  75. xtgeo/roxutils/_roxar_loader.py +54 -0
  76. xtgeo/roxutils/_roxutils_etc.py +122 -0
  77. xtgeo/roxutils/roxutils.py +207 -0
  78. xtgeo/surface/__init__.py +20 -0
  79. xtgeo/surface/_regsurf_boundary.py +26 -0
  80. xtgeo/surface/_regsurf_cube.py +210 -0
  81. xtgeo/surface/_regsurf_cube_window.py +391 -0
  82. xtgeo/surface/_regsurf_cube_window_v2.py +297 -0
  83. xtgeo/surface/_regsurf_cube_window_v3.py +360 -0
  84. xtgeo/surface/_regsurf_export.py +388 -0
  85. xtgeo/surface/_regsurf_grid3d.py +275 -0
  86. xtgeo/surface/_regsurf_gridding.py +347 -0
  87. xtgeo/surface/_regsurf_ijxyz_parser.py +278 -0
  88. xtgeo/surface/_regsurf_import.py +347 -0
  89. xtgeo/surface/_regsurf_lowlevel.py +122 -0
  90. xtgeo/surface/_regsurf_oper.py +538 -0
  91. xtgeo/surface/_regsurf_utils.py +81 -0
  92. xtgeo/surface/_surfs_import.py +43 -0
  93. xtgeo/surface/_zmap_parser.py +138 -0
  94. xtgeo/surface/regular_surface.py +3043 -0
  95. xtgeo/surface/surfaces.py +276 -0
  96. xtgeo/well/__init__.py +24 -0
  97. xtgeo/well/_blockedwell_roxapi.py +241 -0
  98. xtgeo/well/_blockedwells_roxapi.py +68 -0
  99. xtgeo/well/_well_aux.py +30 -0
  100. xtgeo/well/_well_io.py +327 -0
  101. xtgeo/well/_well_oper.py +483 -0
  102. xtgeo/well/_well_roxapi.py +304 -0
  103. xtgeo/well/_wellmarkers.py +486 -0
  104. xtgeo/well/_wells_utils.py +158 -0
  105. xtgeo/well/blocked_well.py +220 -0
  106. xtgeo/well/blocked_wells.py +134 -0
  107. xtgeo/well/well1.py +1516 -0
  108. xtgeo/well/wells.py +211 -0
  109. xtgeo/xyz/__init__.py +6 -0
  110. xtgeo/xyz/_polygons_oper.py +272 -0
  111. xtgeo/xyz/_xyz.py +758 -0
  112. xtgeo/xyz/_xyz_data.py +646 -0
  113. xtgeo/xyz/_xyz_io.py +737 -0
  114. xtgeo/xyz/_xyz_lowlevel.py +42 -0
  115. xtgeo/xyz/_xyz_oper.py +613 -0
  116. xtgeo/xyz/_xyz_roxapi.py +766 -0
  117. xtgeo/xyz/points.py +698 -0
  118. xtgeo/xyz/polygons.py +827 -0
  119. xtgeo-4.14.1.dist-info/METADATA +146 -0
  120. xtgeo-4.14.1.dist-info/RECORD +122 -0
  121. xtgeo-4.14.1.dist-info/WHEEL +5 -0
  122. xtgeo-4.14.1.dist-info/licenses/LICENSE.md +165 -0
@@ -0,0 +1,29 @@
1
+ """Private baseclass for Grid and GridProperties, not to be used by itself."""
2
+
3
+ from xtgeo.common import XTGeoDialog
4
+
5
+ xtg = XTGeoDialog()
6
+
7
+
8
+ class _Grid3D:
9
+ """Abstract base class for Grid3D."""
10
+
11
+ def __init__(self, ncol: int = 4, nrow: int = 3, nlay: int = 5):
12
+ self._ncol = ncol
13
+ self._nrow = nrow
14
+ self._nlay = nlay
15
+
16
+ @property
17
+ def ncol(self) -> int:
18
+ """Returns the NCOL (NX or Ncolumns) number of cells."""
19
+ return self._ncol
20
+
21
+ @property
22
+ def nrow(self) -> int:
23
+ """Returns the NROW (NY or Nrows) number of cells."""
24
+ return self._nrow
25
+
26
+ @property
27
+ def nlay(self) -> int:
28
+ """Returns the NLAY (NZ or Nlayers) number of cells."""
29
+ return self._nlay
@@ -0,0 +1,284 @@
1
+ """Some grid utilities, file scanning etc."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import TYPE_CHECKING
6
+
7
+ import numpy as np
8
+
9
+ from xtgeo import _cxtgeo
10
+ from xtgeo.common import null_logger
11
+ from xtgeo.common.constants import UNDEF_LIMIT
12
+ from xtgeo.grid3d import _gridprop_lowlevel as gl
13
+ from xtgeo.surface import _regsurf_lowlevel as rl
14
+ from xtgeo.surface.regular_surface import surface_from_grid3d
15
+ from xtgeo.xyz import Polygons
16
+
17
+ if TYPE_CHECKING:
18
+ from xtgeo.grid3d import Grid, GridProperty
19
+
20
+
21
+ logger = null_logger(__name__)
22
+
23
+
24
+ def _process_fencespec(
25
+ self: Grid,
26
+ fencespec: np.ndarray | Polygons,
27
+ hincrement: float | int | None,
28
+ atleast: int,
29
+ nextend: int,
30
+ ) -> np.ndarray:
31
+ if isinstance(fencespec, np.ndarray):
32
+ return fencespec
33
+
34
+ if hincrement is not None and not isinstance(hincrement, (float, int)):
35
+ raise TypeError("'hincrement' can only be a number or None")
36
+
37
+ if isinstance(fencespec, Polygons):
38
+ return _get_randomline_fence(self, fencespec, hincrement, atleast, nextend)
39
+
40
+ raise RuntimeError(
41
+ "The 'fencespec' argument must be a numpy array or a Polygons instance."
42
+ )
43
+
44
+
45
+ def get_randomline(
46
+ self: Grid,
47
+ fencespec: np.ndarray | Polygons,
48
+ prop: str | GridProperty,
49
+ zmin: float | int | None = None,
50
+ zmax: float | int | None = None,
51
+ zincrement: float | int = 1.0,
52
+ hincrement: float | int | None = None,
53
+ atleast: int = 5,
54
+ nextend: int = 2,
55
+ ) -> tuple[float, float, float, float, np.ndarray]:
56
+ import os
57
+
58
+ # keep the old method as hidden for reference since it proves to be faster in some
59
+ # real cases. Need to fully understand this to optimise the new, but will postpone
60
+ # this for now. TODO: remove this after a while when the new version is fully tested
61
+ if os.environ.get("XTGEO_RANDOMLINE_OLD", None):
62
+ return _get_randomline_old(
63
+ self, fencespec, prop, zmin, zmax, zincrement, hincrement, atleast, nextend
64
+ )
65
+
66
+ logger.info("New version")
67
+ return _get_randomline_v2(
68
+ self, fencespec, prop, zmin, zmax, zincrement, hincrement, atleast, nextend
69
+ )
70
+
71
+
72
+ def _get_randomline_v2(
73
+ self: Grid,
74
+ fencespec: np.ndarray | Polygons,
75
+ prop: str | GridProperty,
76
+ zmin: float | int | None = None,
77
+ zmax: float | int | None = None,
78
+ zincrement: float | int = 1.0,
79
+ hincrement: float | int | None = None,
80
+ atleast: int = 5,
81
+ nextend: int = 2,
82
+ ) -> tuple[float, float, float, float, np.ndarray]:
83
+ #
84
+ fencespec = _process_fencespec(self, fencespec, hincrement, atleast, nextend)
85
+
86
+ cache = self._get_cache()
87
+
88
+ logger.info("Get property...")
89
+ if isinstance(prop, str):
90
+ grid_prop = self.get_prop_by_name(prop)
91
+ if grid_prop is None:
92
+ raise ValueError(f"No property with name {prop} was found in grid")
93
+ prop = grid_prop
94
+
95
+ assert isinstance(fencespec, np.ndarray)
96
+
97
+ assert cache.top_depth is not None # mypy
98
+ assert cache.base_depth is not None # mypy
99
+ if zmin is None:
100
+ zmin = cache.top_depth.values.min()
101
+ if zmax is None:
102
+ zmax = cache.base_depth.values.max()
103
+
104
+ nzsam = int((zmax - zmin) / float(zincrement)) + 1
105
+ z_vector = np.linspace(zmin, zmax, nzsam)
106
+
107
+ arr = self._get_grid_cpp().get_grid_fence(
108
+ cache.onegrid_cpp,
109
+ fencespec,
110
+ prop.values,
111
+ z_vector,
112
+ cache.top_i_index_cpp,
113
+ cache.top_j_index_cpp,
114
+ cache.base_i_index_cpp,
115
+ cache.base_j_index_cpp,
116
+ cache.top_depth_cpp,
117
+ cache.base_depth_cpp,
118
+ cache.threshold_magic_1,
119
+ )
120
+
121
+ return (fencespec[3, 0], fencespec[3, -1], zmin, zmax, arr.T)
122
+
123
+
124
+ def _get_randomline_fence(
125
+ self: Grid,
126
+ polygon: Polygons,
127
+ distance: float | int | None,
128
+ atleast: int,
129
+ nextend: int,
130
+ ) -> np.ndarray:
131
+ """Compute a resampled fence from a Polygons instance."""
132
+ if distance is None:
133
+ logger.debug("Estimate fence distance from grid resolution...")
134
+ geom = self.get_geometrics()
135
+
136
+ avgdxdy = 0.5 * (geom[10] + geom[11])
137
+ distance = 0.5 * avgdxdy
138
+ logger.debug("Estimate fence distance from grid resolution... DONE")
139
+
140
+ logger.debug("Getting fence from a Polygons instance...")
141
+ fspec = polygon.get_fence(
142
+ distance=distance, atleast=atleast, nextend=nextend, asnumpy=True
143
+ )
144
+ logger.debug("Getting fence from a Polygons instance... DONE")
145
+ return fspec
146
+
147
+
148
+ # keep a while for benchmarking
149
+ def _get_randomline_old(
150
+ self: Grid,
151
+ fencespec: np.ndarray | Polygons,
152
+ prop: str | GridProperty,
153
+ zmin: float | int | None = None,
154
+ zmax: float | int | None = None,
155
+ zincrement: float | int = 1.0,
156
+ hincrement: float | int | None = None,
157
+ atleast: int = 5,
158
+ nextend: int = 2,
159
+ ) -> tuple[float, float, float, float, np.ndarray]:
160
+ """Extract a randomline from a 3D grid.
161
+
162
+ This is a difficult task, in particular in terms of acceptable speed.
163
+ """
164
+
165
+ logger.info("Enter get_randomline OLD from Grid...")
166
+
167
+ _update_tmpvars(self, force=True)
168
+
169
+ if hincrement is not None and not isinstance(hincrement, (float, int)):
170
+ raise TypeError("'hincrement' can only be a number or None")
171
+
172
+ if isinstance(fencespec, Polygons):
173
+ fencespec = _get_randomline_fence(self, fencespec, hincrement, atleast, nextend)
174
+
175
+ logger.info("Get property...")
176
+ if isinstance(prop, str):
177
+ grid_prop = self.get_prop_by_name(prop)
178
+ if grid_prop is None:
179
+ raise ValueError(f"No property with name {prop} was found in grid")
180
+ prop = grid_prop
181
+
182
+ assert isinstance(fencespec, np.ndarray)
183
+ xcoords = fencespec[:, 0]
184
+ ycoords = fencespec[:, 1]
185
+ hcoords = fencespec[:, 3]
186
+
187
+ if zmin is None:
188
+ zmin = self._tmp["topd"].values.min()
189
+ if zmax is None:
190
+ zmax = self._tmp["basd"].values.max()
191
+
192
+ print("ZICREMENT", zincrement)
193
+
194
+ nzsam = int((zmax - zmin) / float(zincrement)) + 1
195
+ nsamples = xcoords.shape[0] * nzsam
196
+
197
+ logger.info("Running C routine to get randomline...")
198
+ self._set_xtgformat1()
199
+ self._tmp["onegrid"]._set_xtgformat1()
200
+ _ier, values = _cxtgeo.grd3d_get_randomline(
201
+ xcoords,
202
+ ycoords,
203
+ zmin,
204
+ zmax,
205
+ nzsam,
206
+ self._tmp["topd"].ncol,
207
+ self._tmp["topd"].nrow,
208
+ self._tmp["topd"].xori,
209
+ self._tmp["topd"].yori,
210
+ self._tmp["topd"].xinc,
211
+ self._tmp["topd"].yinc,
212
+ self._tmp["topd"].rotation,
213
+ self._tmp["topd"].yflip,
214
+ self._tmp["topi_carr"],
215
+ self._tmp["topj_carr"],
216
+ self._tmp["basi_carr"],
217
+ self._tmp["basj_carr"],
218
+ self.ncol,
219
+ self.nrow,
220
+ self.nlay,
221
+ self._coordsv,
222
+ self._zcornsv,
223
+ self._actnumsv,
224
+ gl.update_carray(prop, dtype=np.float64),
225
+ self._tmp["onegrid"]._zcornsv,
226
+ self._tmp["onegrid"]._actnumsv,
227
+ nsamples,
228
+ )
229
+
230
+ logger.info("Running C routine to get randomline... DONE")
231
+
232
+ values[values > UNDEF_LIMIT] = np.nan
233
+ arr = values.reshape((xcoords.shape[0], nzsam)).T
234
+
235
+ logger.info("Getting randomline... DONE")
236
+ return (hcoords[0], hcoords[-1], zmin, zmax, arr)
237
+
238
+
239
+ def _update_tmpvars(self: Grid, force: bool = False) -> None:
240
+ """The self._tmp variables are needed to speed up calculations.
241
+
242
+ If they are already created, the no need to recreate
243
+ """
244
+ if "onegrid" not in self._tmp or force:
245
+ logger.info("Make a tmp onegrid instance...")
246
+ self._tmp["onegrid"] = self.copy()
247
+ self._tmp["onegrid"]._set_xtgformat1()
248
+ self._tmp["onegrid"].reduce_to_one_layer()
249
+ one = self._tmp["onegrid"]
250
+ logger.info("Make a tmp onegrid instance... DONE")
251
+ logger.info("Make a set of tmp surfaces for I J locations + depth...")
252
+ self._tmp["topd"] = surface_from_grid3d(
253
+ one, where="top", property="depth", rfactor=4, index_position="top"
254
+ )
255
+ self._tmp["topi"] = surface_from_grid3d(
256
+ one, where="top", property="i", rfactor=4, index_position="top"
257
+ )
258
+ self._tmp["topj"] = surface_from_grid3d(
259
+ one, where="top", property="j", rfactor=4, index_position="top"
260
+ )
261
+ self._tmp["basd"] = surface_from_grid3d(
262
+ one, where="base", property="depth", rfactor=4, index_position="base"
263
+ )
264
+ self._tmp["basi"] = surface_from_grid3d(
265
+ one, where="base", property="i", rfactor=4, index_position="base"
266
+ )
267
+ self._tmp["basj"] = surface_from_grid3d(
268
+ one, where="base", property="j", rfactor=4, index_position="base"
269
+ )
270
+
271
+ self._tmp["topi"].fill()
272
+ self._tmp["topj"].fill()
273
+ self._tmp["basi"].fill()
274
+ self._tmp["basj"].fill()
275
+
276
+ self._tmp["topi_carr"] = rl.get_carr_double(self._tmp["topi"])
277
+ self._tmp["topj_carr"] = rl.get_carr_double(self._tmp["topj"])
278
+ self._tmp["basi_carr"] = rl.get_carr_double(self._tmp["basi"])
279
+ self._tmp["basj_carr"] = rl.get_carr_double(self._tmp["basj"])
280
+
281
+ self._tmp["onegrid"]._set_xtgformat1()
282
+ logger.info("Make a set of tmp surfaces for I J locations + depth... DONE")
283
+ else:
284
+ logger.info("Re-use existing onegrid and tmp surfaces for I J")
@@ -0,0 +1,228 @@
1
+ """Some grid utilities, file scanning etc (methods with no class)"""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import TYPE_CHECKING, Literal
6
+
7
+ import numpy as np
8
+ import pandas as pd
9
+ import resfo
10
+ import roffio
11
+
12
+ from xtgeo.common import null_logger
13
+ from xtgeo.common.constants import MAXDATES, MAXKEYWORDS
14
+
15
+ if TYPE_CHECKING:
16
+ from xtgeo.io._file import FileWrapper
17
+
18
+ from .grid_properties import GridPropertiesKeywords, KeywordDateTuple, KeywordTuple
19
+
20
+ logger = null_logger(__name__)
21
+
22
+
23
+ def scan_keywords(
24
+ pfile: FileWrapper,
25
+ fformat: Literal["xecl", "roff"] = "xecl",
26
+ maxkeys: int = MAXKEYWORDS,
27
+ dataframe: bool = False,
28
+ dates: bool = False,
29
+ ) -> GridPropertiesKeywords:
30
+ """Quick scan of keywords in Eclipse binary restart/init/... file,
31
+ or ROFF binary files.
32
+
33
+ Cf. grid_properties.py description
34
+ """
35
+ if fformat not in ("xecl", "roff"):
36
+ raise ValueError(f"File format can be either `roff` or `xecl`, given {fformat}")
37
+
38
+ if fformat == "roff":
39
+ return _scan_roff_keywords(pfile, maxkeys=maxkeys, dataframe=dataframe)
40
+ return (
41
+ _scan_ecl_keywords_w_dates(pfile, maxkeys=maxkeys, dataframe=dataframe)
42
+ if dates
43
+ else _scan_ecl_keywords(pfile, maxkeys=maxkeys, dataframe=dataframe)
44
+ )
45
+
46
+
47
+ def scan_dates(
48
+ pfile: FileWrapper,
49
+ maxdates: int = MAXDATES,
50
+ dataframe: bool = False,
51
+ ) -> list | pd.DataFrame:
52
+ """Quick scan dates in a simulation restart file.
53
+
54
+ Cf. grid_properties.py description
55
+ """
56
+ dates = []
57
+ seqnum = -1
58
+ for item in resfo.lazy_read(pfile.file):
59
+ kw = item.read_keyword().strip()
60
+ data = item.read_array()
61
+
62
+ if kw == "SEQNUM":
63
+ seqnum = data[0]
64
+ continue
65
+
66
+ # With LGRs multiple INTEHEADs may occur. Ensure we get the date
67
+ # from the first INTEHEAD after a SEQNUM.
68
+ if kw == "INTEHEAD" and seqnum != -1:
69
+ # Index 66 = year, 65 = month, 64 = day
70
+ date = int(f"{data[66]}{data[65]:02d}{data[64]:02d}")
71
+ dates.append((seqnum, date))
72
+ seqnum = -1
73
+
74
+ return (
75
+ pd.DataFrame.from_records(dates, columns=["SEQNUM", "DATE"])
76
+ if dataframe
77
+ else dates
78
+ )
79
+
80
+
81
+ def _scan_ecl_keywords(
82
+ pfile: FileWrapper,
83
+ maxkeys: int = MAXKEYWORDS,
84
+ dataframe: bool = False,
85
+ ) -> list[KeywordTuple] | pd.DataFrame:
86
+ keywords = []
87
+ for item in resfo.lazy_read(pfile.file):
88
+ keywords.append(
89
+ (
90
+ item.read_keyword().strip(),
91
+ item.read_type().strip(),
92
+ item.read_length(),
93
+ item.stream.tell(),
94
+ )
95
+ )
96
+
97
+ return (
98
+ pd.DataFrame.from_records(
99
+ keywords,
100
+ columns=["KEYWORD", "TYPE", "NITEMS", "BYTESTART"],
101
+ )
102
+ if dataframe
103
+ else keywords
104
+ )
105
+
106
+
107
+ def _scan_ecl_keywords_w_dates(
108
+ pfile: FileWrapper,
109
+ maxkeys: int = MAXKEYWORDS,
110
+ dataframe: bool = False,
111
+ ) -> list[KeywordDateTuple] | pd.DataFrame:
112
+ """Add a date column to the keyword"""
113
+ xkeys = _scan_ecl_keywords(pfile, maxkeys=maxkeys, dataframe=False)
114
+ assert isinstance(xkeys, list)
115
+ xdates = scan_dates(pfile, maxdates=MAXDATES, dataframe=False)
116
+ assert isinstance(xdates, list)
117
+
118
+ result = []
119
+ # now merge these two:
120
+ nv = -1
121
+ date = 0
122
+ for item in xkeys:
123
+ name, dtype, reclen, bytepos = item
124
+ if name == "SEQNUM":
125
+ nv += 1
126
+ date = xdates[nv][1]
127
+
128
+ entry = (name, dtype, reclen, bytepos, date)
129
+ result.append(entry)
130
+
131
+ return (
132
+ pd.DataFrame.from_records(
133
+ result,
134
+ columns=["KEYWORD", "TYPE", "NITEMS", "BYTESTART", "DATE"],
135
+ )
136
+ if dataframe
137
+ else result
138
+ )
139
+
140
+
141
+ def _scan_roff_keywords(
142
+ pfile: FileWrapper,
143
+ maxkeys: int = MAXKEYWORDS,
144
+ dataframe: bool = False,
145
+ ) -> list[KeywordTuple] | pd.DataFrame:
146
+ with open(pfile.file, "rb") as fin:
147
+ is_binary = fin.read(8) == b"roff-bin"
148
+
149
+ keywords = []
150
+ with roffio.lazy_read(pfile.file) as roff_iter:
151
+ SPACE_OR_NUL = 1
152
+ TAG = 3 + SPACE_OR_NUL # "tag"
153
+ ENDTAG = 6 + SPACE_OR_NUL # "endtag"
154
+ ARRAY_AND_SIZE = 5 + SPACE_OR_NUL + 4 # "array", 4 byte int
155
+
156
+ count = 0
157
+ done = False
158
+ # 81 is where the standard RMS exported header size ends.
159
+ # This offset won't be correct for non-RMS exported roff files,
160
+ # but it is a compromise to keep the old functionality of byte
161
+ # counting _close enough_ because this data is not made available
162
+ # from roffio.
163
+ byte_pos = 81
164
+
165
+ for tag_name, tag_group in roff_iter:
166
+ byte_pos += TAG
167
+ byte_pos += len(tag_name) + SPACE_OR_NUL
168
+
169
+ for keyword, value in tag_group:
170
+ if isinstance(value, (np.ndarray, bytes)):
171
+ byte_pos += ARRAY_AND_SIZE
172
+ dtype, size, offset = _get_roff_type_and_size(value, is_binary)
173
+
174
+ byte_pos += len(dtype) + SPACE_OR_NUL
175
+ byte_pos += len(keyword) + SPACE_OR_NUL
176
+
177
+ keyword = f"{tag_name}!{keyword}"
178
+ if tag_name == "parameter" and keyword == "name":
179
+ keyword += f"!{value}"
180
+ keywords.append((keyword, dtype, size, byte_pos))
181
+
182
+ byte_pos += offset
183
+ count += 1
184
+ if count == maxkeys:
185
+ done = True
186
+ break
187
+
188
+ byte_pos += ENDTAG
189
+ if done:
190
+ break
191
+
192
+ return (
193
+ pd.DataFrame.from_records(
194
+ keywords,
195
+ columns=["KEYWORD", "TYPE", "NITEMS", "BYTESTARTDATA"],
196
+ )
197
+ if dataframe
198
+ else keywords
199
+ )
200
+
201
+
202
+ def _get_roff_type_and_size(
203
+ value: str | bool | bytes | np.ndarray, is_binary: bool
204
+ ) -> tuple[str, int, int]:
205
+ # If is_binary is False add a multiplier because values will
206
+ # be separated by spaces in the case of numerical/boolean
207
+ # data, as opposed to buffer packed, while strings will be
208
+ # quoted and not just NUL delimited
209
+ if isinstance(value, str):
210
+ return "char", 1, len(value) + (1 if is_binary else 3)
211
+ if isinstance(value, bool):
212
+ return "bool", 1, 1 if is_binary else 2
213
+ if isinstance(value, bytes):
214
+ return "byte", len(value), len(value) * (1 if is_binary else 2)
215
+ if np.issubdtype(value.dtype, np.bool_):
216
+ return "bool", value.size, value.size * (1 if is_binary else 2)
217
+ if np.issubdtype(value.dtype, np.int8) or np.issubdtype(value.dtype, np.uint8):
218
+ return "byte", value.size, value.size * (1 if is_binary else 2)
219
+ if np.issubdtype(value.dtype, np.integer):
220
+ return "int", value.size, value.size * (4 if is_binary else 5)
221
+ if np.issubdtype(value.dtype, np.float32):
222
+ return "float", value.size, value.size * (4 if is_binary else 5)
223
+ if np.issubdtype(value.dtype, np.double):
224
+ return "double", value.size, value.size * (8 if is_binary else 9)
225
+ if np.issubdtype(value.dtype, np.str_):
226
+ total_bytes = sum(len(val) + (1 if is_binary else 3) for val in value)
227
+ return "char", value.size, total_bytes
228
+ raise ValueError(f"Could not find suitable roff type for {type(value)}")
@@ -0,0 +1,76 @@
1
+ from __future__ import annotations
2
+
3
+ import math
4
+ from typing import TYPE_CHECKING, Any
5
+
6
+ import numpy as np
7
+
8
+ from xtgeo.xyz.points import Points
9
+ from xtgeo.xyz.polygons import Polygons
10
+
11
+ if TYPE_CHECKING:
12
+ from xtgeo.grid3d import Grid
13
+
14
+
15
+ def create_boundary(
16
+ grid: Grid,
17
+ alpha_factor: float = 1.0,
18
+ convex: bool = False,
19
+ simplify: bool | dict[str, Any] = True,
20
+ filter_array: np.ndarray | None = None,
21
+ ) -> Polygons:
22
+ """Create boundary polygons for a grid."""
23
+
24
+ xval, yval, zval = (prop.values for prop in grid.get_xyz())
25
+
26
+ if filter_array is not None:
27
+ if filter_array.shape != grid.dimensions:
28
+ raise ValueError(
29
+ "The filter_array needs to have the same dimensions as the grid. "
30
+ f"Found: {filter_array.shape=} {grid.dimensions=}"
31
+ )
32
+ xval = np.ma.masked_where(~filter_array, xval)
33
+ yval = np.ma.masked_where(~filter_array, yval)
34
+ zval = np.ma.masked_where(~filter_array, zval)
35
+
36
+ # for performance create average points along layers
37
+ xval = np.ma.mean(xval, axis=2)
38
+ yval = np.ma.mean(yval, axis=2)
39
+ zval = np.ma.mean(zval, axis=2)
40
+
41
+ xyz_values = np.column_stack(
42
+ (
43
+ xval[~xval.mask].ravel(),
44
+ yval[~yval.mask].ravel(),
45
+ zval[~zval.mask].ravel(),
46
+ )
47
+ )
48
+
49
+ pol = Polygons.boundary_from_points(
50
+ points=Points(xyz_values),
51
+ alpha_factor=alpha_factor,
52
+ alpha=_estimate_alpha_for_grid(grid),
53
+ convex=convex,
54
+ )
55
+
56
+ if simplify:
57
+ if isinstance(simplify, bool):
58
+ pol.simplify(tolerance=0.1)
59
+ elif isinstance(simplify, dict) and "tolerance" in simplify:
60
+ pol.simplify(**simplify)
61
+ else:
62
+ raise ValueError("Invalid values for simplify keyword")
63
+
64
+ return pol
65
+
66
+
67
+ def _estimate_alpha_for_grid(grid: Grid) -> float:
68
+ """
69
+ Estimate an alpha based on grid resolution.
70
+ Max dx and dy is used as basis for calculation to ensure that the alpha
71
+ computed is always high enough to prevent polygons appearing around areas
72
+ of the grid where cells have larger than average dx/dy increments.
73
+ """
74
+ dx, dy = grid.get_dx(), grid.get_dy()
75
+ xinc, yinc = dx.values.max(), dy.values.max()
76
+ return math.ceil(math.sqrt(xinc**2 + yinc**2) / 2)