res2df 1.3.7__py3-none-any.whl → 1.3.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- res2df/__init__.py +2 -3
- res2df/common.py +79 -75
- res2df/compdat.py +27 -32
- res2df/csv2res.py +5 -9
- res2df/equil.py +24 -29
- res2df/faults.py +2 -7
- res2df/fipreports.py +10 -14
- res2df/grid.py +58 -63
- res2df/gruptree.py +33 -35
- res2df/inferdims.py +6 -9
- res2df/nnc.py +5 -10
- res2df/opmkeywords/__init__.py +0 -0
- res2df/parameters.py +12 -12
- res2df/pillars.py +24 -31
- res2df/pvt.py +29 -34
- res2df/res2csv.py +10 -15
- res2df/res2csvlogger.py +1 -3
- res2df/resdatafiles.py +8 -8
- res2df/rft.py +36 -42
- res2df/satfunc.py +22 -28
- res2df/summary.py +57 -60
- res2df/trans.py +16 -38
- res2df/version.py +2 -2
- res2df/vfp/__init__.py +1 -1
- res2df/vfp/_vfp.py +28 -33
- res2df/vfp/_vfpcommon.py +18 -19
- res2df/vfp/_vfpdefs.py +2 -3
- res2df/vfp/_vfpinj.py +23 -58
- res2df/vfp/_vfpprod.py +28 -64
- res2df/wcon.py +4 -11
- res2df/wellcompletiondata.py +26 -26
- res2df/wellconnstatus.py +4 -5
- {res2df-1.3.7.dist-info → res2df-1.3.8.dist-info}/METADATA +4 -2
- {res2df-1.3.7.dist-info → res2df-1.3.8.dist-info}/RECORD +38 -37
- {res2df-1.3.7.dist-info → res2df-1.3.8.dist-info}/WHEEL +0 -0
- {res2df-1.3.7.dist-info → res2df-1.3.8.dist-info}/entry_points.txt +0 -0
- {res2df-1.3.7.dist-info → res2df-1.3.8.dist-info}/licenses/LICENSE +0 -0
- {res2df-1.3.7.dist-info → res2df-1.3.8.dist-info}/top_level.txt +0 -0
res2df/vfp/_vfp.py
CHANGED
|
@@ -7,7 +7,7 @@ output both in csv format as a pandas DataFrame or in pyarrow and pyarrow.table
|
|
|
7
7
|
import argparse
|
|
8
8
|
import logging
|
|
9
9
|
import sys
|
|
10
|
-
from typing import Any
|
|
10
|
+
from typing import Any
|
|
11
11
|
|
|
12
12
|
import numpy as np
|
|
13
13
|
import pandas as pd
|
|
@@ -16,12 +16,11 @@ import pyarrow as pa
|
|
|
16
16
|
try:
|
|
17
17
|
# Needed for mypy
|
|
18
18
|
|
|
19
|
-
# pylint: disable=unused-import
|
|
20
19
|
import opm.io
|
|
21
20
|
|
|
22
21
|
# This import is seemingly not used, but necessary for some attributes
|
|
23
22
|
# to be included in DeckItem objects.
|
|
24
|
-
from opm.io.deck import DeckKeyword # noqa
|
|
23
|
+
from opm.io.deck import DeckKeyword # noqa: F401
|
|
25
24
|
except ImportError:
|
|
26
25
|
pass
|
|
27
26
|
|
|
@@ -37,10 +36,10 @@ logger = logging.getLogger(__name__)
|
|
|
37
36
|
|
|
38
37
|
|
|
39
38
|
def basic_data(
|
|
40
|
-
deck:
|
|
39
|
+
deck: "str | ResdataFiles | opm.opmcommon_python.Deck",
|
|
41
40
|
keyword: str = "VFPPROD",
|
|
42
|
-
vfpnumbers_str:
|
|
43
|
-
) ->
|
|
41
|
+
vfpnumbers_str: str | None = None,
|
|
42
|
+
) -> list[dict[str, Any]]:
|
|
44
43
|
"""Produce a dictionary with basic data for an Eclipe VFPPROD/VFPINJ.
|
|
45
44
|
Dictionary returned contains items for liftcuve tables as simple datatypes
|
|
46
45
|
Required keys in dictionary for VFPPROD and VFPINJ can be found in
|
|
@@ -75,13 +74,13 @@ def basic_data(
|
|
|
75
74
|
basic_data_vfps.append(basic_data_vfpprod)
|
|
76
75
|
elif deck_keyword.name == "VFPINJ":
|
|
77
76
|
basic_data_vfpinj = vfpinj.basic_data(deck_keyword, vfpnumbers_str)
|
|
78
|
-
if basic_data_vfpinj
|
|
77
|
+
if len(basic_data_vfpinj) > 0:
|
|
79
78
|
basic_data_vfps.append(basic_data_vfpinj)
|
|
80
79
|
|
|
81
80
|
return basic_data_vfps
|
|
82
81
|
|
|
83
82
|
|
|
84
|
-
def basic_data2df(data:
|
|
83
|
+
def basic_data2df(data: dict[str, Any]) -> pd.DataFrame:
|
|
85
84
|
"""Convert basic_data representation of VFPPROD/VFPINF
|
|
86
85
|
(see function basic_data for defintion of data) into
|
|
87
86
|
pandas DataFrame representation
|
|
@@ -141,7 +140,7 @@ def basic_data2df(data: Dict[str, Any]) -> pd.DataFrame:
|
|
|
141
140
|
raise ValueError("VFP_TYPE not found in basic data")
|
|
142
141
|
|
|
143
142
|
|
|
144
|
-
def basic_data2pyarrow(data:
|
|
143
|
+
def basic_data2pyarrow(data: dict[str, Any], /) -> pa.Table:
|
|
145
144
|
"""Convert basic_data representation of VFPPROD/VFPINF
|
|
146
145
|
(see function basic_data for defintion of data) into
|
|
147
146
|
pyarrow.Table representation
|
|
@@ -201,7 +200,7 @@ def basic_data2pyarrow(data: Dict[str, Any], /) -> pa.Table:
|
|
|
201
200
|
raise ValueError("VFP_TYPE not found in basic data")
|
|
202
201
|
|
|
203
202
|
|
|
204
|
-
def df2basic_data(dframe: pd.DataFrame, /) ->
|
|
203
|
+
def df2basic_data(dframe: pd.DataFrame, /) -> dict[str, Any] | None:
|
|
205
204
|
"""Produce a dictionary with basic data types for a VFPPROD/VFPINJ
|
|
206
205
|
liftcurve table represented as a Pandas DataFrame
|
|
207
206
|
|
|
@@ -222,7 +221,7 @@ def df2basic_data(dframe: pd.DataFrame, /) -> Union[Dict[str, Any], None]:
|
|
|
222
221
|
return None
|
|
223
222
|
|
|
224
223
|
|
|
225
|
-
def pyarrow2basic_data(pa_table: pa.Table) ->
|
|
224
|
+
def pyarrow2basic_data(pa_table: pa.Table) -> dict[str, Any] | None:
|
|
226
225
|
"""Produce a dictionary with basic data types for a VFPPROD/VFPINJ
|
|
227
226
|
liftcurve table represented as a pyarrow Table
|
|
228
227
|
|
|
@@ -243,10 +242,10 @@ def pyarrow2basic_data(pa_table: pa.Table) -> Union[Dict[str, Any], None]:
|
|
|
243
242
|
|
|
244
243
|
|
|
245
244
|
def dfs(
|
|
246
|
-
deck:
|
|
245
|
+
deck: "str | ResdataFiles | opm.opmcommon_python.Deck",
|
|
247
246
|
keyword: str = "VFPPROD",
|
|
248
|
-
vfpnumbers_str:
|
|
249
|
-
) ->
|
|
247
|
+
vfpnumbers_str: str | None = None,
|
|
248
|
+
) -> list[pd.DataFrame]:
|
|
250
249
|
"""Produce a list of dataframes of vfp tables from a :term:`deck`
|
|
251
250
|
|
|
252
251
|
Data for the keyword VFPPROD or VFPINJ will be returned as separate item in list
|
|
@@ -286,10 +285,10 @@ def dfs(
|
|
|
286
285
|
|
|
287
286
|
|
|
288
287
|
def pyarrow_tables(
|
|
289
|
-
deck:
|
|
288
|
+
deck: "str | ResdataFiles | opm.opmcommon_python.Deck",
|
|
290
289
|
keyword: str = "VFPPROD",
|
|
291
|
-
vfpnumbers_str:
|
|
292
|
-
) ->
|
|
290
|
+
vfpnumbers_str: str | None = None,
|
|
291
|
+
) -> list[pa.Table]:
|
|
293
292
|
"""Produce a list of pyarrow.Table of vfp tables from a :term:`deck`
|
|
294
293
|
|
|
295
294
|
Data for the keyword VFPPROD or VFPINJ will be returned as separate item in list
|
|
@@ -331,8 +330,8 @@ def pyarrow_tables(
|
|
|
331
330
|
def df2ress(
|
|
332
331
|
dframe: pd.DataFrame,
|
|
333
332
|
keyword: str = "VFPPROD",
|
|
334
|
-
comments:
|
|
335
|
-
) ->
|
|
333
|
+
comments: dict[str, str] | None = None,
|
|
334
|
+
) -> list[str]:
|
|
336
335
|
"""Produce a list of strings defining VFPPROD/VFPINJ Eclipse
|
|
337
336
|
:term:`include file` contents from a dataframe
|
|
338
337
|
|
|
@@ -377,8 +376,8 @@ def df2ress(
|
|
|
377
376
|
def df2res(
|
|
378
377
|
dframe: pd.DataFrame,
|
|
379
378
|
keyword: str = "VFPPROD",
|
|
380
|
-
comments:
|
|
381
|
-
filename:
|
|
379
|
+
comments: dict[str, str] | None = None,
|
|
380
|
+
filename: str | None = None,
|
|
382
381
|
) -> str:
|
|
383
382
|
"""Create a string defining all VFPPROD/VFPINJ Eclipse
|
|
384
383
|
:term:`include file` contents from a dataframe
|
|
@@ -412,9 +411,9 @@ def df2res(
|
|
|
412
411
|
|
|
413
412
|
|
|
414
413
|
def df(
|
|
415
|
-
deck:
|
|
414
|
+
deck: "str | ResdataFiles | opm.opmcommon_python.Deck",
|
|
416
415
|
keyword: str = "VFPPROD",
|
|
417
|
-
vfpnumbers_str:
|
|
416
|
+
vfpnumbers_str: str | None = None,
|
|
418
417
|
) -> pd.DataFrame:
|
|
419
418
|
"""Produce a dataframes of all vfp tables from a deck
|
|
420
419
|
|
|
@@ -487,9 +486,7 @@ def fill_reverse_parser(parser: argparse.ArgumentParser) -> argparse.ArgumentPar
|
|
|
487
486
|
|
|
488
487
|
def vfp_main(args) -> None:
|
|
489
488
|
"""Entry-point for module, for command line utility."""
|
|
490
|
-
logger = getLogger_res2csv(
|
|
491
|
-
__name__, vars(args)
|
|
492
|
-
)
|
|
489
|
+
logger = getLogger_res2csv(__name__, vars(args))
|
|
493
490
|
if args.keyword and args.keyword not in SUPPORTED_KEYWORDS:
|
|
494
491
|
raise ValueError(f"Keyword argument {args.keyword} not supported")
|
|
495
492
|
if not args.output:
|
|
@@ -510,11 +507,11 @@ def vfp_main(args) -> None:
|
|
|
510
507
|
table_number = int(
|
|
511
508
|
vfp_table.schema.metadata[b"TABLE_NUMBER"].decode("utf-8")
|
|
512
509
|
)
|
|
513
|
-
vfp_filename = f"{outputfile}_{
|
|
510
|
+
vfp_filename = f"{outputfile}_{table_number!s}.arrow"
|
|
514
511
|
write_dframe_stdout_file(
|
|
515
512
|
vfp_table, vfp_filename, index=False, caller_logger=logger
|
|
516
513
|
)
|
|
517
|
-
logger.info(
|
|
514
|
+
logger.info("Parsed file %s for vfp.dfs_arrow", args.DATAFILE)
|
|
518
515
|
else:
|
|
519
516
|
dframe = df(
|
|
520
517
|
resdatafiles.get_deck(), keyword=args.keyword, vfpnumbers_str=vfpnumbers
|
|
@@ -523,16 +520,14 @@ def vfp_main(args) -> None:
|
|
|
523
520
|
write_dframe_stdout_file(
|
|
524
521
|
dframe, args.output, index=False, caller_logger=logger
|
|
525
522
|
)
|
|
526
|
-
logger.info(
|
|
523
|
+
logger.info("Parsed file %s for vfp.df", args.DATAFILE)
|
|
527
524
|
|
|
528
525
|
|
|
529
526
|
def vfp_reverse_main(args) -> None:
|
|
530
527
|
"""Entry-point for module, for command line utility for CSV to Eclipse"""
|
|
531
|
-
logger = getLogger_res2csv(
|
|
532
|
-
__name__, vars(args)
|
|
533
|
-
)
|
|
528
|
+
logger = getLogger_res2csv(__name__, vars(args))
|
|
534
529
|
vfp_df = pd.read_csv(args.csvfile)
|
|
535
|
-
logger.info("Parsed
|
|
530
|
+
logger.info("Parsed %s", args.csvfile)
|
|
536
531
|
inc_string = df2res(vfp_df, args.keyword)
|
|
537
532
|
if args.output:
|
|
538
533
|
write_inc_stdout_file(inc_string, args.output)
|
res2df/vfp/_vfpcommon.py
CHANGED
|
@@ -8,7 +8,7 @@ output both in csv format as a pandas DataFrame or in pyarrow and pyarrow.table
|
|
|
8
8
|
|
|
9
9
|
import logging
|
|
10
10
|
import numbers
|
|
11
|
-
from typing import Any
|
|
11
|
+
from typing import Any
|
|
12
12
|
|
|
13
13
|
import numpy as np
|
|
14
14
|
import pandas as pd
|
|
@@ -16,12 +16,11 @@ import pandas as pd
|
|
|
16
16
|
try:
|
|
17
17
|
# Needed for mypy
|
|
18
18
|
|
|
19
|
-
# pylint: disable=unused-import
|
|
20
19
|
import opm.io
|
|
21
20
|
|
|
22
21
|
# This import is seemingly not used, but necessary for some attributes
|
|
23
22
|
# to be included in DeckItem objects.
|
|
24
|
-
from opm.io.deck import DeckKeyword # noqa
|
|
23
|
+
from opm.io.deck import DeckKeyword # noqa: F401
|
|
25
24
|
except ImportError:
|
|
26
25
|
pass
|
|
27
26
|
|
|
@@ -30,7 +29,7 @@ from ..common import parse_opmio_deckrecord
|
|
|
30
29
|
logger = logging.getLogger(__name__)
|
|
31
30
|
|
|
32
31
|
|
|
33
|
-
def _string2intlist(list_def_str: str) ->
|
|
32
|
+
def _string2intlist(list_def_str: str) -> list[int]:
|
|
34
33
|
"""Produce a list of int from input string
|
|
35
34
|
|
|
36
35
|
Args:
|
|
@@ -53,13 +52,13 @@ def _string2intlist(list_def_str: str) -> List[int]:
|
|
|
53
52
|
|
|
54
53
|
|
|
55
54
|
def _deckrecord2list(
|
|
56
|
-
record: "opm.
|
|
55
|
+
record: "opm.opmcommon_python.DeckRecord",
|
|
57
56
|
keyword: str,
|
|
58
57
|
recordindex: int,
|
|
59
58
|
recordname: str,
|
|
60
|
-
) ->
|
|
59
|
+
) -> Any | list[float]:
|
|
61
60
|
"""
|
|
62
|
-
Parse an opm.
|
|
61
|
+
Parse an opm.opmcommon_python.DeckRecord belonging to a certain keyword
|
|
63
62
|
and return as list of numbers
|
|
64
63
|
|
|
65
64
|
Args:
|
|
@@ -71,7 +70,7 @@ def _deckrecord2list(
|
|
|
71
70
|
"""
|
|
72
71
|
record = parse_opmio_deckrecord(record, keyword, "records", recordindex)
|
|
73
72
|
|
|
74
|
-
values:
|
|
73
|
+
values: Any | list[float]
|
|
75
74
|
# Extract interpolation ranges into lists
|
|
76
75
|
if isinstance(record.get(recordname), list):
|
|
77
76
|
values = record.get(recordname)
|
|
@@ -87,10 +86,10 @@ def _deckrecord2list(
|
|
|
87
86
|
|
|
88
87
|
|
|
89
88
|
def _stack_vfptable2df(
|
|
90
|
-
index_names_list:
|
|
91
|
-
index_values_list:
|
|
92
|
-
flow_values_list:
|
|
93
|
-
table_values_list:
|
|
89
|
+
index_names_list: list[str],
|
|
90
|
+
index_values_list: np.ndarray | list[list[float]],
|
|
91
|
+
flow_values_list: np.ndarray | list[float],
|
|
92
|
+
table_values_list: np.ndarray | list[list[float]],
|
|
94
93
|
) -> pd.DataFrame:
|
|
95
94
|
"""Return a dataframe from a list of interpolation ranges and tabulated values
|
|
96
95
|
|
|
@@ -137,16 +136,16 @@ def _stack_vfptable2df(
|
|
|
137
136
|
df_vfptable.columns = pd.MultiIndex.from_tuples(indextuples)
|
|
138
137
|
|
|
139
138
|
# Now stack
|
|
140
|
-
df_vfptable_stacked = df_vfptable.stack()
|
|
139
|
+
df_vfptable_stacked = df_vfptable.stack(future_stack=True).sort_index()
|
|
141
140
|
|
|
142
141
|
# In order to propagate the gfr, thp, wct values after
|
|
143
142
|
# stacking to the correct rows, we should either understand
|
|
144
143
|
# how to do that properly using pandas, but for now, we try a
|
|
145
144
|
# backwards fill, hopefully that is robust enough
|
|
146
|
-
df_vfptable_stacked.bfill(inplace=True)
|
|
147
145
|
# Also reset the index:
|
|
148
|
-
df_vfptable_stacked
|
|
149
|
-
|
|
146
|
+
df_vfptable_stacked = (
|
|
147
|
+
df_vfptable_stacked.bfill().reset_index().drop("level_0", axis="columns")
|
|
148
|
+
)
|
|
150
149
|
# This column is not meaningful (it is the old index)
|
|
151
150
|
|
|
152
151
|
# Delete rows that does not belong to any flow rate (this is
|
|
@@ -163,15 +162,15 @@ def _stack_vfptable2df(
|
|
|
163
162
|
df_vfptable_stacked["RATE"] = df_vfptable_stacked["RATE"].astype(float)
|
|
164
163
|
|
|
165
164
|
# Sort values in correct order
|
|
166
|
-
df_vfptable_stacked.sort_values(
|
|
167
|
-
by=index_names_list
|
|
165
|
+
df_vfptable_stacked = df_vfptable_stacked.sort_values(
|
|
166
|
+
by=[*index_names_list, "RATE"], ascending=True, ignore_index=True
|
|
168
167
|
)
|
|
169
168
|
|
|
170
169
|
return df_vfptable_stacked
|
|
171
170
|
|
|
172
171
|
|
|
173
172
|
def _write_vfp_range(
|
|
174
|
-
values:
|
|
173
|
+
values: list[float],
|
|
175
174
|
var_type: str,
|
|
176
175
|
unit_type: str,
|
|
177
176
|
format: str = "%10.6g",
|
res2df/vfp/_vfpdefs.py
CHANGED
|
@@ -6,10 +6,9 @@ routines for VFPPROD and VFPINJ keywords in res2df.
|
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
8
|
from enum import Enum
|
|
9
|
-
from typing import Dict, List, Union
|
|
10
9
|
|
|
11
10
|
# Supported types of VFP keywords
|
|
12
|
-
SUPPORTED_KEYWORDS:
|
|
11
|
+
SUPPORTED_KEYWORDS: list[str] = [
|
|
13
12
|
"VFPPROD",
|
|
14
13
|
"VFPINJ",
|
|
15
14
|
]
|
|
@@ -18,7 +17,7 @@ SUPPORTED_KEYWORDS: List[str] = [
|
|
|
18
17
|
# desired column names in produced dataframes. They also to a certain
|
|
19
18
|
# extent determine the structure of the dataframe, in particular
|
|
20
19
|
# for keywords with arbitrary data amount pr. record (GAS, THP, WGR, GOR f.ex)
|
|
21
|
-
RENAMERS:
|
|
20
|
+
RENAMERS: dict[str, dict[str, str | list[str]]] = {}
|
|
22
21
|
|
|
23
22
|
|
|
24
23
|
# Type of VFP curve
|
res2df/vfp/_vfpinj.py
CHANGED
|
@@ -11,7 +11,7 @@ pyarrow.Table to file as Eclipse .Ecl format
|
|
|
11
11
|
|
|
12
12
|
import logging
|
|
13
13
|
import numbers
|
|
14
|
-
from typing import Any
|
|
14
|
+
from typing import Any
|
|
15
15
|
|
|
16
16
|
import numpy as np
|
|
17
17
|
import pandas as pd
|
|
@@ -20,12 +20,11 @@ import pyarrow as pa
|
|
|
20
20
|
try:
|
|
21
21
|
# Needed for mypy
|
|
22
22
|
|
|
23
|
-
# pylint: disable=unused-import
|
|
24
23
|
import opm.io
|
|
25
24
|
|
|
26
25
|
# This import is seemingly not used, but necessary for some attributes
|
|
27
26
|
# to be included in DeckItem objects.
|
|
28
|
-
from opm.io.deck import DeckKeyword # noqa
|
|
27
|
+
from opm.io.deck import DeckKeyword # noqa: F401
|
|
29
28
|
except ImportError:
|
|
30
29
|
pass
|
|
31
30
|
|
|
@@ -65,9 +64,9 @@ logger = logging.getLogger(__name__)
|
|
|
65
64
|
|
|
66
65
|
|
|
67
66
|
def basic_data(
|
|
68
|
-
keyword: "opm.
|
|
69
|
-
vfpnumbers_str:
|
|
70
|
-
) ->
|
|
67
|
+
keyword: "opm.opmcommon_python.DeckKeyword",
|
|
68
|
+
vfpnumbers_str: str | None = None,
|
|
69
|
+
) -> dict[str, Any]:
|
|
71
70
|
"""Read and return all data for Eclipse VFPINJ keyword as basic data types
|
|
72
71
|
|
|
73
72
|
Empty string returned if vfp table number does not match any number in list
|
|
@@ -89,7 +88,7 @@ def basic_data(
|
|
|
89
88
|
if vfpnumbers_str:
|
|
90
89
|
vfpnumbers = _string2intlist(vfpnumbers_str)
|
|
91
90
|
if tableno not in vfpnumbers:
|
|
92
|
-
return
|
|
91
|
+
return {}
|
|
93
92
|
datum = basic_record["DATUM_DEPTH"]
|
|
94
93
|
rate_type = VFPINJ_FLO.GAS
|
|
95
94
|
if basic_record["RATE_TYPE"]:
|
|
@@ -121,11 +120,11 @@ def basic_data(
|
|
|
121
120
|
)
|
|
122
121
|
|
|
123
122
|
# Extract interpolation values and tabulated values (BHP values)
|
|
124
|
-
bhp_table:
|
|
125
|
-
thp_indices:
|
|
123
|
+
bhp_table: list[list[float]] = []
|
|
124
|
+
thp_indices: list[float] = []
|
|
126
125
|
for n in range(3, num_rec):
|
|
127
126
|
bhp_record = parse_opmio_deckrecord(keyword[n], "VFPINJ", "records", 3)
|
|
128
|
-
bhp_values:
|
|
127
|
+
bhp_values: Any | list[float]
|
|
129
128
|
if isinstance(bhp_record.get("VALUES"), list):
|
|
130
129
|
bhp_values = bhp_record.get("VALUES")
|
|
131
130
|
elif isinstance(bhp_record.get("VALUES"), numbers.Number):
|
|
@@ -224,7 +223,7 @@ def basic_data2df(
|
|
|
224
223
|
]
|
|
225
224
|
|
|
226
225
|
# reset index (not used other than tests)
|
|
227
|
-
df_bhp_stacked.reset_index(
|
|
226
|
+
df_bhp_stacked = df_bhp_stacked.reset_index(drop=True)
|
|
228
227
|
return df_bhp_stacked
|
|
229
228
|
|
|
230
229
|
|
|
@@ -295,7 +294,7 @@ def basic_data2pyarrow(
|
|
|
295
294
|
return pa_table
|
|
296
295
|
|
|
297
296
|
|
|
298
|
-
def df2basic_data(dframe: pd.DataFrame) ->
|
|
297
|
+
def df2basic_data(dframe: pd.DataFrame) -> dict[str, Any]:
|
|
299
298
|
"""Return basic data type for VFPINJ from a pandas dataframe.
|
|
300
299
|
|
|
301
300
|
Return format is a dictionary all data in VFPINJ in basic data types
|
|
@@ -348,12 +347,12 @@ def df2basic_data(dframe: pd.DataFrame) -> Dict[str, Any]:
|
|
|
348
347
|
if no_tab_values % no_flow_values != 0:
|
|
349
348
|
raise ValueError(
|
|
350
349
|
f"Number of unique rate values {no_flow_values} not consistent "
|
|
351
|
-
"with number of tabulated values {no_tab_values}"
|
|
350
|
+
f"with number of tabulated values {no_tab_values}"
|
|
352
351
|
)
|
|
353
352
|
if no_tab_values % no_thp_values != 0:
|
|
354
353
|
raise ValueError(
|
|
355
354
|
f"Number of unique thp values {no_thp_values} not consistent "
|
|
356
|
-
"with number of tabulated values {no_tab_values}"
|
|
355
|
+
f"with number of tabulated values {no_tab_values}"
|
|
357
356
|
)
|
|
358
357
|
|
|
359
358
|
# Replace interpolation values with index in dataframe
|
|
@@ -385,7 +384,7 @@ def df2basic_data(dframe: pd.DataFrame) -> Dict[str, Any]:
|
|
|
385
384
|
return vfpinj_data
|
|
386
385
|
|
|
387
386
|
|
|
388
|
-
def pyarrow2basic_data(pa_table: pa.Table) ->
|
|
387
|
+
def pyarrow2basic_data(pa_table: pa.Table) -> dict[str, Any]:
|
|
389
388
|
"""Return basic data type for VFPINJ from a pyarrow.Table.
|
|
390
389
|
|
|
391
390
|
Return format is a dictionary all data in VFPINJ in basic data types
|
|
@@ -422,7 +421,7 @@ def pyarrow2basic_data(pa_table: pa.Table) -> Dict[str, Any]:
|
|
|
422
421
|
return vfpinj_data
|
|
423
422
|
|
|
424
423
|
|
|
425
|
-
def _check_basic_data(vfp_data:
|
|
424
|
+
def _check_basic_data(vfp_data: dict[str, Any]) -> bool:
|
|
426
425
|
"""Perform a check of the VFPINJ data contained in the dictionary.
|
|
427
426
|
Checks if all data is present and if the dimensions of the arrays
|
|
428
427
|
are consisitent.
|
|
@@ -471,9 +470,9 @@ def _check_basic_data(vfp_data: Dict[str, Any]) -> bool:
|
|
|
471
470
|
|
|
472
471
|
|
|
473
472
|
def df(
|
|
474
|
-
keyword: "opm.
|
|
475
|
-
vfpnumbers_str:
|
|
476
|
-
) ->
|
|
473
|
+
keyword: "opm.opmcommon_python.DeckKeyword",
|
|
474
|
+
vfpnumbers_str: str | None = None,
|
|
475
|
+
) -> pd.DataFrame | None:
|
|
477
476
|
"""Return a dataframes of a single VFPINJ table from a :term:`.DATA file`
|
|
478
477
|
|
|
479
478
|
Data from the VFPINJ keyword are stacked into a Pandas Dataframe
|
|
@@ -508,9 +507,9 @@ def df(
|
|
|
508
507
|
|
|
509
508
|
|
|
510
509
|
def pyarrow(
|
|
511
|
-
keyword: "opm.
|
|
512
|
-
vfpnumbers_str:
|
|
513
|
-
) ->
|
|
510
|
+
keyword: "opm.opmcommon_python.DeckKeyword",
|
|
511
|
+
vfpnumbers_str: str | None = None,
|
|
512
|
+
) -> pa.Table | None:
|
|
514
513
|
"""Return a pyarrow Table of a single VFPINJ table from a :term:`.DATA file`
|
|
515
514
|
If no VFPINJ table found, return None
|
|
516
515
|
|
|
@@ -577,43 +576,9 @@ def _write_basic_record(
|
|
|
577
576
|
return deck_str
|
|
578
577
|
|
|
579
578
|
|
|
580
|
-
def _write_table(
|
|
581
|
-
table: pd.DataFrame,
|
|
582
|
-
format: str = "%10.6g",
|
|
583
|
-
values_per_line: int = 5,
|
|
584
|
-
) -> str:
|
|
585
|
-
"""Creates a :term:`include file` content string representing
|
|
586
|
-
a resdata record for a VFPINJ table (BHP part)
|
|
587
|
-
|
|
588
|
-
Args:
|
|
589
|
-
table: DataFrame with multiindex for table ranges and colums
|
|
590
|
-
for tabulated values (BHP)
|
|
591
|
-
format: Format string for values
|
|
592
|
-
values_per_line: Number of values per line in output
|
|
593
|
-
"""
|
|
594
|
-
|
|
595
|
-
deck_str = ""
|
|
596
|
-
for idx, _row in table.iterrows():
|
|
597
|
-
deck_str += f"{idx:2d}"
|
|
598
|
-
no_flo = len(table.loc[idx].to_list())
|
|
599
|
-
for n, value in enumerate(table.loc[idx].to_list()):
|
|
600
|
-
deck_str += format % value
|
|
601
|
-
if (n + 1) % values_per_line == 0:
|
|
602
|
-
if n < no_flo - 1:
|
|
603
|
-
deck_str += "\n"
|
|
604
|
-
deck_str += " " * 2
|
|
605
|
-
else:
|
|
606
|
-
deck_str += "\n"
|
|
607
|
-
elif n == no_flo - 1:
|
|
608
|
-
deck_str += "\n"
|
|
609
|
-
deck_str += "/\n"
|
|
610
|
-
|
|
611
|
-
return deck_str
|
|
612
|
-
|
|
613
|
-
|
|
614
579
|
def _write_table_records(
|
|
615
580
|
thp_indices: np.ndarray,
|
|
616
|
-
table:
|
|
581
|
+
table: np.ndarray,
|
|
617
582
|
format: str = "%10.6g",
|
|
618
583
|
values_per_line: int = 5,
|
|
619
584
|
) -> str:
|
|
@@ -657,7 +622,7 @@ def _write_table_records(
|
|
|
657
622
|
return deck_str
|
|
658
623
|
|
|
659
624
|
|
|
660
|
-
def df2res(dframe: pd.DataFrame, comment:
|
|
625
|
+
def df2res(dframe: pd.DataFrame, comment: str | None = None) -> str:
|
|
661
626
|
"""Creates a :term:`include file` content string
|
|
662
627
|
representing single VFPINJ Eclipse input from a dataframe
|
|
663
628
|
|