xtgeo 4.14.1__cp313-cp313-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. cxtgeo.py +558 -0
  2. cxtgeoPYTHON_wrap.c +19537 -0
  3. xtgeo/__init__.py +248 -0
  4. xtgeo/_cxtgeo.cp313-win_amd64.pyd +0 -0
  5. xtgeo/_internal.cp313-win_amd64.pyd +0 -0
  6. xtgeo/common/__init__.py +19 -0
  7. xtgeo/common/_angles.py +29 -0
  8. xtgeo/common/_xyz_enum.py +50 -0
  9. xtgeo/common/calc.py +396 -0
  10. xtgeo/common/constants.py +30 -0
  11. xtgeo/common/exceptions.py +42 -0
  12. xtgeo/common/log.py +93 -0
  13. xtgeo/common/sys.py +166 -0
  14. xtgeo/common/types.py +18 -0
  15. xtgeo/common/version.py +34 -0
  16. xtgeo/common/xtgeo_dialog.py +604 -0
  17. xtgeo/cube/__init__.py +9 -0
  18. xtgeo/cube/_cube_export.py +214 -0
  19. xtgeo/cube/_cube_import.py +532 -0
  20. xtgeo/cube/_cube_roxapi.py +180 -0
  21. xtgeo/cube/_cube_utils.py +287 -0
  22. xtgeo/cube/_cube_window_attributes.py +273 -0
  23. xtgeo/cube/cube1.py +1023 -0
  24. xtgeo/grid3d/__init__.py +15 -0
  25. xtgeo/grid3d/_ecl_grid.py +778 -0
  26. xtgeo/grid3d/_ecl_inte_head.py +152 -0
  27. xtgeo/grid3d/_ecl_logi_head.py +71 -0
  28. xtgeo/grid3d/_ecl_output_file.py +81 -0
  29. xtgeo/grid3d/_egrid.py +1004 -0
  30. xtgeo/grid3d/_find_gridprop_in_eclrun.py +625 -0
  31. xtgeo/grid3d/_grdecl_format.py +309 -0
  32. xtgeo/grid3d/_grdecl_grid.py +400 -0
  33. xtgeo/grid3d/_grid3d.py +29 -0
  34. xtgeo/grid3d/_grid3d_fence.py +284 -0
  35. xtgeo/grid3d/_grid3d_utils.py +228 -0
  36. xtgeo/grid3d/_grid_boundary.py +76 -0
  37. xtgeo/grid3d/_grid_etc1.py +1683 -0
  38. xtgeo/grid3d/_grid_export.py +222 -0
  39. xtgeo/grid3d/_grid_hybrid.py +50 -0
  40. xtgeo/grid3d/_grid_import.py +79 -0
  41. xtgeo/grid3d/_grid_import_ecl.py +101 -0
  42. xtgeo/grid3d/_grid_import_roff.py +135 -0
  43. xtgeo/grid3d/_grid_import_xtgcpgeom.py +375 -0
  44. xtgeo/grid3d/_grid_refine.py +258 -0
  45. xtgeo/grid3d/_grid_roxapi.py +292 -0
  46. xtgeo/grid3d/_grid_translate_coords.py +154 -0
  47. xtgeo/grid3d/_grid_wellzone.py +165 -0
  48. xtgeo/grid3d/_gridprop_export.py +202 -0
  49. xtgeo/grid3d/_gridprop_import_eclrun.py +164 -0
  50. xtgeo/grid3d/_gridprop_import_grdecl.py +132 -0
  51. xtgeo/grid3d/_gridprop_import_roff.py +52 -0
  52. xtgeo/grid3d/_gridprop_import_xtgcpprop.py +168 -0
  53. xtgeo/grid3d/_gridprop_lowlevel.py +171 -0
  54. xtgeo/grid3d/_gridprop_op1.py +272 -0
  55. xtgeo/grid3d/_gridprop_roxapi.py +301 -0
  56. xtgeo/grid3d/_gridprop_value_init.py +140 -0
  57. xtgeo/grid3d/_gridprops_import_eclrun.py +344 -0
  58. xtgeo/grid3d/_gridprops_import_roff.py +83 -0
  59. xtgeo/grid3d/_roff_grid.py +470 -0
  60. xtgeo/grid3d/_roff_parameter.py +303 -0
  61. xtgeo/grid3d/grid.py +3010 -0
  62. xtgeo/grid3d/grid_properties.py +699 -0
  63. xtgeo/grid3d/grid_property.py +1313 -0
  64. xtgeo/grid3d/types.py +15 -0
  65. xtgeo/interfaces/rms/__init__.py +18 -0
  66. xtgeo/interfaces/rms/_regular_surface.py +460 -0
  67. xtgeo/interfaces/rms/_rms_base.py +100 -0
  68. xtgeo/interfaces/rms/_rmsapi_package.py +69 -0
  69. xtgeo/interfaces/rms/rmsapi_utils.py +438 -0
  70. xtgeo/io/__init__.py +1 -0
  71. xtgeo/io/_file.py +603 -0
  72. xtgeo/metadata/__init__.py +17 -0
  73. xtgeo/metadata/metadata.py +435 -0
  74. xtgeo/roxutils/__init__.py +7 -0
  75. xtgeo/roxutils/_roxar_loader.py +54 -0
  76. xtgeo/roxutils/_roxutils_etc.py +122 -0
  77. xtgeo/roxutils/roxutils.py +207 -0
  78. xtgeo/surface/__init__.py +20 -0
  79. xtgeo/surface/_regsurf_boundary.py +26 -0
  80. xtgeo/surface/_regsurf_cube.py +210 -0
  81. xtgeo/surface/_regsurf_cube_window.py +391 -0
  82. xtgeo/surface/_regsurf_cube_window_v2.py +297 -0
  83. xtgeo/surface/_regsurf_cube_window_v3.py +360 -0
  84. xtgeo/surface/_regsurf_export.py +388 -0
  85. xtgeo/surface/_regsurf_grid3d.py +275 -0
  86. xtgeo/surface/_regsurf_gridding.py +347 -0
  87. xtgeo/surface/_regsurf_ijxyz_parser.py +278 -0
  88. xtgeo/surface/_regsurf_import.py +347 -0
  89. xtgeo/surface/_regsurf_lowlevel.py +122 -0
  90. xtgeo/surface/_regsurf_oper.py +538 -0
  91. xtgeo/surface/_regsurf_utils.py +81 -0
  92. xtgeo/surface/_surfs_import.py +43 -0
  93. xtgeo/surface/_zmap_parser.py +138 -0
  94. xtgeo/surface/regular_surface.py +3043 -0
  95. xtgeo/surface/surfaces.py +276 -0
  96. xtgeo/well/__init__.py +24 -0
  97. xtgeo/well/_blockedwell_roxapi.py +241 -0
  98. xtgeo/well/_blockedwells_roxapi.py +68 -0
  99. xtgeo/well/_well_aux.py +30 -0
  100. xtgeo/well/_well_io.py +327 -0
  101. xtgeo/well/_well_oper.py +483 -0
  102. xtgeo/well/_well_roxapi.py +304 -0
  103. xtgeo/well/_wellmarkers.py +486 -0
  104. xtgeo/well/_wells_utils.py +158 -0
  105. xtgeo/well/blocked_well.py +220 -0
  106. xtgeo/well/blocked_wells.py +134 -0
  107. xtgeo/well/well1.py +1516 -0
  108. xtgeo/well/wells.py +211 -0
  109. xtgeo/xyz/__init__.py +6 -0
  110. xtgeo/xyz/_polygons_oper.py +272 -0
  111. xtgeo/xyz/_xyz.py +758 -0
  112. xtgeo/xyz/_xyz_data.py +646 -0
  113. xtgeo/xyz/_xyz_io.py +737 -0
  114. xtgeo/xyz/_xyz_lowlevel.py +42 -0
  115. xtgeo/xyz/_xyz_oper.py +613 -0
  116. xtgeo/xyz/_xyz_roxapi.py +766 -0
  117. xtgeo/xyz/points.py +698 -0
  118. xtgeo/xyz/polygons.py +827 -0
  119. xtgeo-4.14.1.dist-info/METADATA +146 -0
  120. xtgeo-4.14.1.dist-info/RECORD +122 -0
  121. xtgeo-4.14.1.dist-info/WHEEL +5 -0
  122. xtgeo-4.14.1.dist-info/licenses/LICENSE.md +165 -0
xtgeo/well/_well_io.py ADDED
@@ -0,0 +1,327 @@
1
+ """Well input and ouput, private module"""
2
+
3
+ import json
4
+ from copy import deepcopy
5
+
6
+ import numpy as np
7
+ import pandas as pd
8
+
9
+ from xtgeo.common._xyz_enum import _AttrName, _AttrType
10
+ from xtgeo.common.log import null_logger
11
+ from xtgeo.metadata.metadata import MetaDataWell
12
+
13
+ logger = null_logger(__name__)
14
+
15
+
16
+ def import_rms_ascii(
17
+ wfile,
18
+ mdlogname=None,
19
+ zonelogname=None,
20
+ strict=False,
21
+ lognames="all",
22
+ lognames_strict=False,
23
+ ):
24
+ """Import RMS ascii table well"""
25
+
26
+ wlogtype = {}
27
+ wlogrecords = {}
28
+
29
+ xlognames_all = [
30
+ _AttrName.XNAME.value,
31
+ _AttrName.YNAME.value,
32
+ _AttrName.ZNAME.value,
33
+ ]
34
+ xlognames = []
35
+
36
+ lnum = 1
37
+ with open(wfile.file, "r", encoding="UTF-8") as fwell:
38
+ for line in fwell:
39
+ if lnum == 1:
40
+ _ffver = line.strip() # noqa, file version
41
+ elif lnum == 2:
42
+ _wtype = line.strip() # noqa, well type
43
+ elif lnum == 3:
44
+ # usually 4 fields, but last (rkb) can be missing. A
45
+ # complication is that first field (well name) may have spaces,
46
+ # hence some clever guessing is needed. However, this cannot be
47
+ # 100% foolproof... if Ycoord < 1000 and last item of a well
48
+ # name with spaces is a number, then this may fail.
49
+ assume_rkb = False
50
+ row = line.strip().split()
51
+ newrow = []
52
+ if len(row) > 3:
53
+ for item in row:
54
+ try:
55
+ item = float(item)
56
+ except ValueError:
57
+ item = str(item)
58
+ newrow.append(item)
59
+
60
+ if all(isinstance(var, float) for var in newrow[-3:]) and abs(
61
+ newrow[-1] < 1000.0
62
+ ):
63
+ assume_rkb = True
64
+
65
+ rkb = float(row.pop()) if assume_rkb else None
66
+ ypos = float(row.pop())
67
+ xpos = float(row.pop())
68
+ wname = " ".join(map(str, row))
69
+
70
+ elif lnum == 4:
71
+ nlogs = int(line)
72
+ nlogread = 1
73
+ logger.debug("Number of logs: %s", nlogs)
74
+
75
+ else:
76
+ row = line.strip().split()
77
+ lname = row[0]
78
+
79
+ # if i_index etc, make uppercase to I_INDEX
80
+ # however it is most practical to treat indexes as CONT logs
81
+ if "_index" in lname:
82
+ lname = lname.upper()
83
+
84
+ ltype = row[1].upper()
85
+
86
+ rxv = row[2:]
87
+
88
+ xlognames_all.append(lname)
89
+ xlognames.append(lname)
90
+
91
+ wlogtype[lname] = ltype
92
+
93
+ logger.debug("Reading log name %s of type %s", lname, ltype)
94
+
95
+ if ltype == _AttrType.DISC.value:
96
+ xdict = {int(rxv[i]): rxv[i + 1] for i in range(0, len(rxv), 2)}
97
+ wlogrecords[lname] = xdict
98
+ else:
99
+ wlogrecords[lname] = tuple(row[1:])
100
+
101
+ nlogread += 1
102
+
103
+ if nlogread > nlogs:
104
+ break
105
+
106
+ lnum += 1
107
+
108
+ # now import all logs as pandas framework
109
+
110
+ dfr = pd.read_csv(
111
+ wfile.file,
112
+ sep=r"\s+",
113
+ skiprows=lnum,
114
+ header=None,
115
+ names=xlognames_all,
116
+ dtype=np.float64,
117
+ na_values=-999,
118
+ )
119
+
120
+ # undef values have a high float number? or keep Nan?
121
+ # df.fillna(Well.UNDEF, inplace=True)
122
+
123
+ dfr = _trim_on_lognames(dfr, lognames, lognames_strict, wname)
124
+ mdlogname, zonelogname = _check_special_logs(
125
+ dfr, mdlogname, zonelogname, strict, wname
126
+ )
127
+
128
+ return {
129
+ "wlogtypes": wlogtype,
130
+ "wlogrecords": wlogrecords,
131
+ "rkb": rkb,
132
+ "xpos": xpos,
133
+ "ypos": ypos,
134
+ "wname": wname,
135
+ "df": dfr,
136
+ "mdlogname": mdlogname,
137
+ "zonelogname": zonelogname,
138
+ }
139
+
140
+
141
+ def _trim_on_lognames(dfr, lognames, lognames_strict, wname):
142
+ """Reduce the dataframe based on provided list of lognames"""
143
+ if lognames == "all":
144
+ return dfr
145
+
146
+ uselnames = [_AttrName.XNAME.value, _AttrName.YNAME.value, _AttrName.ZNAME.value]
147
+ if isinstance(lognames, str):
148
+ uselnames.append(lognames)
149
+ elif isinstance(lognames, list):
150
+ uselnames.extend(lognames)
151
+
152
+ newdf = pd.DataFrame()
153
+ for lname in uselnames:
154
+ if lname in dfr.columns:
155
+ newdf[lname] = dfr[lname]
156
+ else:
157
+ if lognames_strict:
158
+ msg = f"Logname <{lname}> is not present for <{wname}>"
159
+ msg += " (required log under condition lognames_strict=True)"
160
+ raise ValueError(msg)
161
+
162
+ return newdf
163
+
164
+
165
+ def _check_special_logs(dfr, mdlogname, zonelogname, strict, wname):
166
+ """Check for MD log and Zonelog, if requested"""
167
+
168
+ mname = mdlogname
169
+ zname = zonelogname
170
+
171
+ if mdlogname is not None and mdlogname not in dfr.columns:
172
+ msg = (
173
+ f"mdlogname={mdlogname} was requested but no such log found for "
174
+ f"well {wname}"
175
+ )
176
+ mname = None
177
+ if strict:
178
+ raise ValueError(msg)
179
+
180
+ logger.warning(msg)
181
+
182
+ # check for zone log:
183
+ if zonelogname is not None and zonelogname not in dfr.columns:
184
+ msg = (
185
+ f"zonelogname={zonelogname} was requested but no such log found "
186
+ f"for well {wname}"
187
+ )
188
+ zname = None
189
+ if strict:
190
+ raise ValueError(msg)
191
+
192
+ logger.warning(msg)
193
+
194
+ return mname, zname
195
+
196
+
197
+ def export_rms_ascii(self, wfile, precision=4):
198
+ """Export to RMS well format."""
199
+ with open(wfile, "w", encoding="utf-8") as fwell:
200
+ print("1.0", file=fwell)
201
+ print("Unknown", file=fwell)
202
+ if self._rkb is None:
203
+ print(f"{self._wname} {self._xpos} {self._ypos}", file=fwell)
204
+ else:
205
+ print(
206
+ f"{self._wname} {self._xpos} {self._ypos} {self._rkb}",
207
+ file=fwell,
208
+ )
209
+ print(f"{len(self.lognames)}", file=fwell)
210
+ for lname in self.lognames:
211
+ usewrec = "linear"
212
+ wrec = []
213
+ if isinstance(self.wlogrecords[lname], dict):
214
+ for key in self.wlogrecords[lname]:
215
+ wrec.append(key)
216
+ wrec.append(self.wlogrecords[lname][key])
217
+ usewrec = " ".join(str(x) for x in wrec)
218
+
219
+ print(f"{lname} {self.get_logtype(lname)} {usewrec}", file=fwell)
220
+
221
+ # now export all logs as pandas framework
222
+ tmpdf = self._wdata.data.copy().fillna(value=-999)
223
+
224
+ # make the disc as is np.int
225
+ for lname in self.wlogtypes:
226
+ if self.wlogtypes[lname] == _AttrType.DISC.value:
227
+ tmpdf[[lname]] = tmpdf[[lname]].fillna(-999).astype(int)
228
+
229
+ cformat = "%-." + str(precision) + "f"
230
+ tmpdf.to_csv(
231
+ wfile,
232
+ sep=" ",
233
+ header=False,
234
+ index=False,
235
+ float_format=cformat,
236
+ escapechar="\\",
237
+ mode="a",
238
+ )
239
+
240
+
241
+ def export_hdf5_well(self, wfile, compression="lzf"):
242
+ """Save to HDF5 format."""
243
+ logger.debug("Export to hdf5 format...")
244
+
245
+ self._ensure_consistency()
246
+
247
+ self.metadata.required = self
248
+
249
+ meta = self.metadata.get_metadata()
250
+ jmeta = json.dumps(meta)
251
+
252
+ complib = "zlib" # same as default lzf
253
+ complevel = 5
254
+ if compression and compression == "blosc":
255
+ complib = "blosc"
256
+ else:
257
+ complevel = 0
258
+
259
+ with pd.HDFStore(wfile.file, "w", complevel=complevel, complib=complib) as store:
260
+ logger.debug("export to HDF5 %s", wfile.name)
261
+ store.put("Well", self._wdata.data)
262
+ store.get_storer("Well").attrs["metadata"] = jmeta
263
+ store.get_storer("Well").attrs["provider"] = "xtgeo"
264
+ store.get_storer("Well").attrs["format_idcode"] = 1401
265
+
266
+ logger.debug("Export to hdf5 format... done!")
267
+
268
+
269
+ def import_wlogs(wlogs: dict):
270
+ """
271
+ This converts joined wlogtypes/wlogrecords such as found in
272
+ the hdf5 format to the format used in the Well object.
273
+
274
+ >>> import_wlogs(dict())
275
+ {'wlogtypes': {}, 'wlogrecords': {}}
276
+ >>> import_wlogs(dict([("X_UTME", ("CONT", None))]))
277
+ {'wlogtypes': {'X_UTME': 'CONT'}, 'wlogrecords': {'X_UTME': None}}
278
+
279
+ Returns:
280
+ dictionary with "wlogtypes" and "wlogrecords" as keys
281
+ and corresponding values.
282
+ """
283
+ wlogtypes = {}
284
+ wlogrecords = {}
285
+ for key in wlogs:
286
+ typ, rec = wlogs[key]
287
+
288
+ if typ in {_AttrType.DISC.value, _AttrType.CONT.value}:
289
+ wlogtypes[key] = deepcopy(typ)
290
+ else:
291
+ raise ValueError(f"Invalid log type found in input: {typ}")
292
+
293
+ if rec is None or isinstance(rec, dict):
294
+ wlogrecords[key] = deepcopy(rec)
295
+ else:
296
+ raise ValueError(f"Invalid log record found in input: {rec}")
297
+ return {"wlogtypes": wlogtypes, "wlogrecords": wlogrecords}
298
+
299
+
300
+ def import_hdf5_well(wfile, **kwargs):
301
+ """Load from HDF5 format."""
302
+ logger.debug("The kwargs may be unused: %s", kwargs)
303
+ reqattrs = MetaDataWell.REQUIRED
304
+
305
+ with pd.HDFStore(wfile.file, "r") as store:
306
+ data = store.get("Well")
307
+ wstore = store.get_storer("Well")
308
+ jmeta = wstore.attrs["metadata"]
309
+ # provider = wstore.attrs["provider"]
310
+ # format_idcode = wstore.attrs["format_idcode"]
311
+
312
+ if isinstance(jmeta, bytes):
313
+ jmeta = jmeta.decode()
314
+
315
+ meta = json.loads(jmeta, object_pairs_hook=dict)
316
+ req = meta["_required_"]
317
+ result = {}
318
+ for myattr in reqattrs:
319
+ if myattr == "wlogs":
320
+ result.update(import_wlogs(req[myattr]))
321
+ elif myattr == "name":
322
+ result["wname"] = req[myattr]
323
+ else:
324
+ result[myattr] = req[myattr]
325
+
326
+ result["df"] = data
327
+ return result