resfo-utilities 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of resfo-utilities might be problematic. Click here for more details.

@@ -0,0 +1,3 @@
1
+ from ._cornerpoint_grid import CornerpointGrid, InvalidEgridFileError, MapAxes
2
+
3
+ __all__ = ["CornerpointGrid", "InvalidEgridFileError", "MapAxes"]
@@ -0,0 +1,461 @@
1
+ from __future__ import annotations
2
+ import os
3
+ from typing import Self, Any, IO, TypeVar
4
+ from dataclasses import dataclass
5
+ from numpy import typing as npt
6
+ import numpy as np
7
+ import resfo
8
+ import trimesh
9
+ import warnings
10
+ from matplotlib.path import Path
11
+ import heapq
12
+ from functools import cached_property
13
+
14
+
15
+ class InvalidEgridFileError(ValueError):
16
+ pass
17
+
18
+
19
+ @dataclass
20
+ class MapAxes:
21
+ """The axes of the map coordinate system.
22
+
23
+ Usually, a corner-point grid contains x,y values that needs to be transformed
24
+ into a map coordinate system (which could be :term:`UTM-coordinates`). That
25
+ coordinate system is represented by MapAxes.
26
+
27
+ Note that regardless of the size of the axes, when transforming from the grid
28
+ coordinate system to the map coordinate system, scaling is not applied.
29
+
30
+ Attributes:
31
+ y_axis:
32
+ A point along the map y axis.
33
+ origin:
34
+ The origin of the map coordinate system.
35
+ x_axis:
36
+ A point along the map x axis.
37
+ """
38
+
39
+ y_axis: tuple[np.float32, np.float32]
40
+ origin: tuple[np.float32, np.float32]
41
+ x_axis: tuple[np.float32, np.float32]
42
+
43
+ def transform_map_points(
44
+ self, points: npt.NDArray[np.float32]
45
+ ) -> npt.NDArray[np.float32]:
46
+ """Transforms points from map coordinates to grid coordinates.
47
+
48
+ Scaling according to length of the axes is not applied.
49
+
50
+ Returns:
51
+ The given map points in the grid coordinate system.
52
+ """
53
+ translated = points - np.array([*self.origin, 0])
54
+ tx = translated[:, 0]
55
+ ty = translated[:, 1]
56
+ x_vec = (self.x_axis[0] - self.origin[0], self.x_axis[1] - self.origin[1])
57
+ y_vec = (self.y_axis[0] - self.origin[0], self.y_axis[1] - self.origin[1])
58
+ x_norm = np.sqrt(x_vec[0] ** 2 + x_vec[1] ** 2)
59
+ x_unit = (x_vec[0] / x_norm, x_vec[1] / x_norm)
60
+ y_norm = np.sqrt(y_vec[0] ** 2 + y_vec[1] ** 2)
61
+ y_unit = (y_vec[0] / y_norm, y_vec[1] / y_norm)
62
+ norm = 1.0 / (x_unit[0] * y_unit[1] - x_unit[1] * y_unit[0])
63
+ return np.column_stack(
64
+ [
65
+ (tx * y_unit[1] - ty * y_unit[0]) * norm,
66
+ (-tx * x_unit[1] + ty * x_unit[0]) * norm,
67
+ translated[:, 2],
68
+ ]
69
+ )
70
+
71
+
72
+ @dataclass
73
+ class CornerpointGrid:
74
+ """
75
+ A :term:`corner-point grid` is a tessellation of a 3D volumne where
76
+ each cell is a hexahedron.
77
+
78
+ Each cell is identified by a integer coordinate (i,j,k).
79
+ For each i,j there is are four straight lines, defined by their end-points
80
+ called a :term:`pillar`. The end-points form two surfaces, one
81
+ for the top end-points and one for the bottom end points, which
82
+ are in the CornerpointGrid.coord array.
83
+
84
+ For the cell at position i,j,k, its eight corner vertices are defined by
85
+ giving the z values along the pillars at [(i,j), (i+1, j), (i, j+1), (i+1, j+1)]
86
+ which are in the CornerpointGrid.zcorn array.
87
+
88
+
89
+ Attributes:
90
+ coord:
91
+ A (ni+1, nj+1, 2, 3) array where coord[i,j,0] is the top end point
92
+ of the i,j pillar and coord[i,j,1] is the corresponding bottom end point.
93
+ zcorn:
94
+ A (ni, nj, nk, 8) array where zcorn[i,j,k] is the z value of
95
+ the 8 corners of the cell at i,j,k.
96
+
97
+ map_axes:
98
+ Optionally each point is interpreted to be relative to some map
99
+ coordinate system. Defaults to the unit coordinate system with
100
+ origo at (0,0). The order of the corner z valus are as follows:
101
+ [TSW, TSE, TNW, TNE, BSW, BSE, BNW, BNE] where N(orth) means higher y,
102
+ E(east) means higer x, T(op) means lower z (when z is interpreted as depth).
103
+
104
+ """
105
+
106
+ coord: npt.NDArray[np.float32]
107
+ zcorn: npt.NDArray[np.float32]
108
+ map_axes: MapAxes | None = None
109
+
110
+ @classmethod
111
+ def read_egrid(cls, file_like: str | os.PathLike[str] | IO[Any]) -> Self:
112
+ """Read the global grid from an .EGRID or .FEGRID file.
113
+
114
+ If the EGRID contains Local Grid Refinements or Coarsening Groups,
115
+ that is silently ignored and only the host grid is read. Radial grids
116
+ are not supported and will cause InvalidEgridFileError to be raised.
117
+
118
+ Args:
119
+ file_like:
120
+ The EGRID file, could either be a filename, patlike or an opened
121
+ EGRID file. The function also handles formatted egrid files (.FEGRID).
122
+ Whether the file is formatted or not is determined by looking at the
123
+ extension a filepath is given and by whether the stream is a byte-stream
124
+ (unformatted) or a text-stream when an opened file is given.
125
+ Raises:
126
+ InvalidEgridFileError:
127
+ When the egrid file is not valid, or contains a radial grid.
128
+ OSError:
129
+ If the given filepath cannot be opened.
130
+
131
+ """
132
+ coord = None
133
+ dims = None
134
+ zcorn = None
135
+ opened = False
136
+ stream = None
137
+ map_axes = None
138
+
139
+ try:
140
+ if isinstance(file_like, str):
141
+ filename = file_like
142
+ mode = "rt" if filename.lower().endswith("fegrid") else "rb"
143
+ stream = open(filename, mode=mode)
144
+ opened = True
145
+ elif isinstance(file_like, os.PathLike):
146
+ filename = str(file_like)
147
+ mode = "rt" if filename.lower().endswith("fegrid") else "rb"
148
+ stream = open(filename, mode=mode)
149
+ opened = True
150
+ else:
151
+ filename = getattr(file_like, "name", "unknown stream")
152
+ stream = file_like
153
+
154
+ T = TypeVar("T", bound=np.generic)
155
+
156
+ def validate_array(
157
+ name: str,
158
+ array: npt.NDArray[T] | resfo.MESS,
159
+ min_length: int | None = None,
160
+ ) -> npt.NDArray[T]:
161
+ if array is resfo.MESS or isinstance(array, resfo.MESS):
162
+ raise InvalidEgridFileError(
163
+ f"Expected Array for keyword {name} in {filename} but got MESS"
164
+ )
165
+ if min_length is not None and len(array) < min_length:
166
+ raise InvalidEgridFileError(
167
+ f"{name} in EGRID file {filename} contained too few elements"
168
+ )
169
+
170
+ return array
171
+
172
+ def optional_get(array: npt.NDArray[T] | None, index: int) -> T | None:
173
+ if array is None:
174
+ return None
175
+ if len(array) <= index:
176
+ return None
177
+ return array[index]
178
+
179
+ for entry in resfo.lazy_read(stream):
180
+ kw = entry.read_keyword()
181
+ match kw:
182
+ case "ZCORN ":
183
+ zcorn = validate_array(kw, entry.read_array())
184
+ case "COORD ":
185
+ coord = validate_array(kw, entry.read_array())
186
+ case "GRIDHEAD":
187
+ array = validate_array(kw, entry.read_array(), 4)
188
+ if (reference_number := optional_get(array, 4)) != 0:
189
+ warnings.warn(
190
+ f"The global grid in {filename} had "
191
+ f"reference number {reference_number}, expected 0."
192
+ " This could indicate that the grid being read"
193
+ " is actually an LGR grid."
194
+ )
195
+ if optional_get(array, 26) not in {0, None}:
196
+ raise InvalidEgridFileError(
197
+ f"EGRID file {filename} contains a radial grid"
198
+ " which is not supported by resfo-utilities."
199
+ )
200
+
201
+ dims = tuple(array[1:4])
202
+ case "MAPAXES ":
203
+ array = validate_array(kw, entry.read_array(), 6)
204
+ map_axes = MapAxes(
205
+ (array[0], array[1]),
206
+ (array[2], array[3]),
207
+ (array[4], array[5]),
208
+ )
209
+ case "ENDGRID ":
210
+ break
211
+
212
+ if coord is None:
213
+ raise InvalidEgridFileError(
214
+ f"EGRID file {filename} did not contain COORD"
215
+ )
216
+ if zcorn is None:
217
+ raise InvalidEgridFileError(
218
+ f"EGRID file {filename} did not contain ZCORN"
219
+ )
220
+ if dims is None:
221
+ raise InvalidEgridFileError(
222
+ f"EGRID file {filename} did not contain dimensions"
223
+ )
224
+ except resfo.ResfoParsingError as err:
225
+ raise InvalidEgridFileError(f"Could not parse EGRID file: {err}") from err
226
+ finally:
227
+ if opened and stream is not None:
228
+ stream.close()
229
+ try:
230
+ coord = np.swapaxes(coord.reshape((dims[1] + 1, dims[0] + 1, 2, 3)), 0, 1)
231
+ except ValueError as err:
232
+ raise InvalidEgridFileError(
233
+ f"COORD size {len(coord)} did not match"
234
+ f" grid dimensions {dims} in {filename}"
235
+ ) from err
236
+ try:
237
+ zcorn = zcorn.reshape(2, dims[0], 2, dims[1], 2, dims[2], order="F")
238
+ zcorn = np.moveaxis(zcorn, [1, 3, 5, 4, 2], [0, 1, 2, 3, 4])
239
+ zcorn = zcorn.reshape((dims[0], dims[1], dims[2], 8))
240
+ except ValueError as err:
241
+ raise InvalidEgridFileError(
242
+ f"ZCORN size {len(zcorn)} did not match"
243
+ f" grid dimensions {dims} in {filename}"
244
+ ) from err
245
+ return cls(coord, zcorn, map_axes)
246
+
247
+ def find_cell_containing_point(
248
+ self, points: npt.ArrayLike, map_coordinates: bool = True
249
+ ) -> list[tuple[float, float, float] | None]:
250
+ """Find a cell in the grid which contains the given point.
251
+
252
+ Args:
253
+ points:
254
+ The points to find cells for.
255
+ map_coordinates:
256
+ Whether points are in the map coordinate system.
257
+ Defaults to True.
258
+
259
+ Returns:
260
+ list of i,j,k indecies for each point (or None if the
261
+ point is not contained in any cell.
262
+ """
263
+ points = np.asarray(points)
264
+ result: list[tuple[float, float, float] | None] = []
265
+ if map_coordinates and self.map_axes is not None:
266
+ points = self.map_axes.transform_map_points(points)
267
+
268
+ # This algorithm will for each point p calculate the mesh surface that
269
+ # is the intersection of the pillars with the plane z=p[2]. Then it searches
270
+ # through the quad with a heuristical search that orders each neighbour by
271
+ # the points manhattan distance to the bounding box.
272
+ found = False
273
+ # The use case that the previous point is close to the
274
+ # next point is very common, so we optimize for that.
275
+ prev_ij = None # The i,j index the previous point was found at
276
+
277
+ @dataclass
278
+ class Quad:
279
+ """The quad at index i,j"""
280
+
281
+ mesh: npt.NDArray[np.float32]
282
+ i: int
283
+ j: int
284
+ p: npt.NDArray[np.float32]
285
+
286
+ @cached_property
287
+ def vertices(self) -> npt.NDArray[np.float32]:
288
+ return np.array(
289
+ [
290
+ self.mesh[self.i, self.j],
291
+ self.mesh[self.i + 1, self.j],
292
+ self.mesh[self.i + 1, self.j + 1],
293
+ self.mesh[self.i, self.j + 1],
294
+ ],
295
+ dtype=np.float32,
296
+ )
297
+
298
+ @cached_property
299
+ def min_x(self) -> np.float32:
300
+ return self.vertices[:, 0].min()
301
+
302
+ @cached_property
303
+ def min_y(self) -> np.float32:
304
+ return self.vertices[:, 1].min()
305
+
306
+ @cached_property
307
+ def max_x(self) -> np.float32:
308
+ return self.vertices[:, 0].max()
309
+
310
+ @cached_property
311
+ def max_y(self) -> np.float32:
312
+ return self.vertices[:, 1].max()
313
+
314
+ @cached_property
315
+ def distance_from_bounds(self) -> np.float32:
316
+ """Manhattan distance from the point to the quad bounding box."""
317
+ x_dist = max(self.min_x - self.p[0], self.p[0] - self.max_x, 0)
318
+ y_dist = max(self.min_y - self.p[1], self.p[1] - self.max_y, 0)
319
+ return x_dist + y_dist
320
+
321
+ def __lt__(self, other: object) -> bool:
322
+ """Used to order elements in the search queue.
323
+
324
+ The Quads are orderd by distance_from_bounds.
325
+ """
326
+ if not isinstance(other, Quad):
327
+ return False
328
+ return bool(self.distance_from_bounds < other.distance_from_bounds)
329
+
330
+ if self.zcorn.shape[0] <= 0 or self.zcorn.shape[1] <= 0:
331
+ return [None] * len(points)
332
+
333
+ for p in points:
334
+ found = False
335
+ mesh = self._pillars_z_plane_intersection(p[2])
336
+ if prev_ij is None:
337
+ queue = [Quad(mesh, 0, 0, p)]
338
+ else:
339
+ queue = [Quad(mesh, *prev_ij, p)]
340
+ visited = set([(queue[0].i, queue[0].j)])
341
+ while queue:
342
+ node = heapq.heappop(queue)
343
+ vertices = node.vertices
344
+ i = node.i
345
+ j = node.j
346
+
347
+ # If the quad contains the point then search through each k index
348
+ # for that quad
349
+ if node.distance_from_bounds <= 0 and Path(vertices).contains_points(
350
+ [p[0:2]]
351
+ ):
352
+ for k in range(self.zcorn.shape[2]):
353
+ zcorn = self.zcorn[i, j, k]
354
+ # Prune by bounding box first then check whether point_in_cell
355
+ if zcorn.min() <= p[2] <= zcorn.max() and self.point_in_cell(
356
+ p, i, j, k, map_coordinates=False
357
+ ):
358
+ prev_ij = (i, j)
359
+ result.append((i, j, k))
360
+ found = True
361
+ break
362
+ break
363
+
364
+ # Add each neighbour to the queue if not visited
365
+ for di in (-1, 0, 1):
366
+ ni = i + di
367
+ if ni < 0 or ni >= self.zcorn.shape[0]:
368
+ continue
369
+ for dj in (-1, 0, 1):
370
+ nj = j + dj
371
+ if nj < 0 or nj >= self.zcorn.shape[1]:
372
+ continue
373
+ if (ni, nj) not in visited:
374
+ heapq.heappush(queue, Quad(mesh, ni, nj, p))
375
+ visited.add((ni, nj))
376
+ if not found:
377
+ result.append(None)
378
+
379
+ return result
380
+
381
+ def point_in_cell(
382
+ self,
383
+ points: npt.ArrayLike,
384
+ i: int,
385
+ j: int,
386
+ k: int,
387
+ tolerance: float = 1e-6,
388
+ map_coordinates: bool = True,
389
+ ) -> npt.NDArray[np.bool_]:
390
+ """Whether the points (x,y,z) is in the cell at (i,j,k).
391
+
392
+ Param:
393
+ points:
394
+ x,y,z triple or array of x,y,z triples to be tested for containment.
395
+ tolerance:
396
+ The maximum distance to the cell boundary a point can have to
397
+ be considered to be contained in the cell.
398
+ map_coordinates:
399
+ Whether the given points are in the mapaxes coordinate system,
400
+ defaults to true.
401
+
402
+ Returns:
403
+ Array of boolean values for each triplet describing whether
404
+ it is contained in the cell.
405
+ """
406
+ points = np.asarray(points)
407
+ if len(points.shape) == 1:
408
+ points = points[np.newaxis, :]
409
+ if map_coordinates and self.map_axes is not None:
410
+ points = self.map_axes.transform_map_points(points)
411
+ pillar_vertices = np.concatenate(
412
+ [
413
+ self.coord[i, j, :],
414
+ self.coord[i, j + 1, :],
415
+ self.coord[i + 1, j, :],
416
+ self.coord[i + 1, j + 1, :],
417
+ ]
418
+ )
419
+ top = pillar_vertices[::2][[0, 2, 1, 3]]
420
+ bot = pillar_vertices[1::2][[0, 2, 1, 3]]
421
+ top_z = top[:, 2]
422
+ bot_z = bot[:, 2]
423
+
424
+ def twice(a: npt.NDArray[Any]) -> npt.NDArray[Any]:
425
+ return np.concatenate([a, a])
426
+
427
+ t = (self.zcorn[i, j, k] - twice(top_z)) / twice(bot_z - top_z)
428
+ mesh = trimesh.Trimesh(
429
+ vertices=twice(top) + t[:, np.newaxis] * twice(bot - top),
430
+ faces=np.array(
431
+ [
432
+ [0, 1, 2],
433
+ [1, 2, 3],
434
+ [0, 1, 5],
435
+ [0, 4, 5],
436
+ [0, 2, 6],
437
+ [0, 4, 6],
438
+ [4, 6, 5],
439
+ [5, 6, 7],
440
+ [1, 3, 7],
441
+ [1, 5, 7],
442
+ [2, 3, 7],
443
+ [2, 3, 6],
444
+ ]
445
+ ),
446
+ )
447
+ return mesh.contains(points) | (mesh.nearest.on_surface(points)[1] < tolerance)
448
+
449
+ def _pillars_z_plane_intersection(self, z: np.float32) -> npt.NDArray[np.float32]:
450
+ shape = self.coord.shape
451
+ coord = self.coord.reshape(shape[0] * shape[1], shape[2] * shape[3])
452
+ x1, y1, z1, x2, y2, z2 = coord.T
453
+ t = (z - z1) / (z2 - z1)
454
+
455
+ # Compute x and y for all lines
456
+ x = x1 + t * (x2 - x1)
457
+ y = y1 + t * (y2 - y1)
458
+
459
+ # Result: (x, y) coordinates for all lines at z
460
+ result = np.column_stack((x, y))
461
+ return result.reshape(shape[0], shape[1], 2)
@@ -0,0 +1,70 @@
1
+ Metadata-Version: 2.4
2
+ Name: resfo-utilities
3
+ Version: 0.0.1
4
+ Summary: A utility library for working with the output of reservoir simulators.
5
+ Author-email: Equinor <fg_sib-scout@equinor.com>
6
+ Maintainer-email: Eivind Jahren <ejah@equinor.com>, Håkon Steinkopf Søhoel <hsoho@equinor.com>
7
+ License: LGPL-3.0
8
+ Project-URL: Homepage, https://github.com/equinor/resfo-utilities
9
+ Project-URL: Repository, https://github.com/equinor/resfo-utilities
10
+ Project-URL: Bug Tracker, https://github.com/equinor/resfo-utilities/issues
11
+ Classifier: Development Status :: 1 - Planning
12
+ Classifier: License :: OSI Approved :: GNU Lesser General Public License v3 or later (LGPLv3+)
13
+ Classifier: Programming Language :: Python
14
+ Classifier: Programming Language :: Python :: 3.11
15
+ Classifier: Programming Language :: Python :: 3.12
16
+ Classifier: Programming Language :: Python :: 3.13
17
+ Requires-Python: <3.14,>=3.11
18
+ Description-Content-Type: text/markdown
19
+ Requires-Dist: numpy
20
+ Requires-Dist: resfo
21
+ Requires-Dist: shapely
22
+ Requires-Dist: trimesh[easy]
23
+ Requires-Dist: matplotlib
24
+ Provides-Extra: doc
25
+ Requires-Dist: sphinx; extra == "doc"
26
+ Requires-Dist: sphinx-rtd-theme; extra == "doc"
27
+ Provides-Extra: dev
28
+ Requires-Dist: pytest; extra == "dev"
29
+ Requires-Dist: hypothesis; extra == "dev"
30
+ Requires-Dist: pre-commit; extra == "dev"
31
+ Requires-Dist: mypy; extra == "dev"
32
+ Requires-Dist: types-shapely; extra == "dev"
33
+ Requires-Dist: pytest-benchmark; extra == "dev"
34
+
35
+ resfo
36
+ ===========
37
+
38
+
39
+ resfo-utilities is a library for working with output from
40
+ several reservoir simulators such as [opm
41
+ flow](https://github.com/OPM/opm-simulators).
42
+
43
+ Installation
44
+ ============
45
+
46
+ `pip install resfo-utilities`
47
+
48
+
49
+ How to contribute
50
+ =================
51
+
52
+ We use uv to have one synchronized development environment for all packages.
53
+ See [installing uv](https://docs.astral.sh/uv/getting-started/installation/). We
54
+ recommend either installing uv using your systems package manager, or creating
55
+ a small virtual environment you intall base packages into (such as `uv` and `pre-commit`).
56
+
57
+ Once uv is installed, you can get a development environment by running:
58
+
59
+ ```sh
60
+ git clone https://github.com/equinor/resfo-utilities
61
+ cd resfo-utilities
62
+ uv sync --all-extras
63
+ ```
64
+
65
+
66
+ You should set up `pre-commit` to ensure style checks are done as you commit:
67
+
68
+ ```bash
69
+ uv run pre-commit install --hook-type pre-push
70
+ ```
@@ -0,0 +1,6 @@
1
+ resfo_utilities/__init__.py,sha256=_O7IbN2VxBuoWsSeVlQl6LSi_N0Tnb1YAOwiPNcy2i0,146
2
+ resfo_utilities/_cornerpoint_grid.py,sha256=yZjPVyctdo9fqXmRjFBCtwmC6whEdokKLHB5FqCnosI,17893
3
+ resfo_utilities-0.0.1.dist-info/METADATA,sha256=d1osGa2k453dePtqlXueGF8y8E_AM_qvgPF9O1W3cf0,2328
4
+ resfo_utilities-0.0.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
5
+ resfo_utilities-0.0.1.dist-info/top_level.txt,sha256=VjItoaJHqsDLhHEvCjEI5bN2sZy55tA-zlkl-CtggEU,16
6
+ resfo_utilities-0.0.1.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1 @@
1
+ resfo_utilities